From 8958455a12f243bc99bc9b394fcd7c6ead994d6e Mon Sep 17 00:00:00 2001 From: Oracle Public Cloud User Date: Wed, 4 Mar 2026 15:53:40 +0000 Subject: [PATCH] Initial scaffold: FocusFlow ADHD Task Manager backend Dart Shelf API with modules: auth (JWT + PBKDF2), tasks (CRUD + dopamine scorer), streaks (forgiveness + freeze), rewards (variable reward engine), time perception, sync (offline-first push/pull), rooms (body doubling placeholder). Includes DB migration (001_initial_schema.sql) and Docker Compose. Co-Authored-By: Claude Opus 4.6 --- .dockerignore | 9 + .env.example | 23 + .gitignore | 3 + CHANGELOG.md | 3 + Dockerfile | 21 + README.md | 49 ++ analysis_options.yaml | 30 + bin/migrate.dart | 189 +++++ bin/server.dart | 144 ++++ docker-compose.yml | 61 ++ lib/src/config/database.dart | 61 ++ lib/src/config/env.dart | 67 ++ lib/src/config/redis_config.dart | 36 + lib/src/middleware/auth_middleware.dart | 59 ++ lib/src/middleware/cors_middleware.dart | 28 + lib/src/middleware/error_handler.dart | 70 ++ lib/src/middleware/logging_middleware.dart | 38 + lib/src/middleware/rate_limit_middleware.dart | 67 ++ lib/src/modules/auth/auth_routes.dart | 103 +++ lib/src/modules/auth/auth_service.dart | 176 +++++ lib/src/modules/auth/password_hasher.dart | 88 +++ lib/src/modules/auth/token_service.dart | 51 ++ lib/src/modules/rewards/reward_engine.dart | 160 ++++ lib/src/modules/rewards/reward_routes.dart | 64 ++ lib/src/modules/rewards/reward_service.dart | 101 +++ lib/src/modules/rooms/room_routes.dart | 57 ++ lib/src/modules/rooms/room_service.dart | 36 + .../modules/streaks/streak_repository.dart | 160 ++++ lib/src/modules/streaks/streak_routes.dart | 98 +++ lib/src/modules/streaks/streak_service.dart | 230 ++++++ lib/src/modules/sync/sync_routes.dart | 65 ++ lib/src/modules/sync/sync_service.dart | 117 +++ lib/src/modules/tasks/dopamine_scorer.dart | 114 +++ lib/src/modules/tasks/task_repository.dart | 237 ++++++ lib/src/modules/tasks/task_routes.dart | 138 ++++ lib/src/modules/tasks/task_service.dart | 168 +++++ lib/src/modules/time/time_routes.dart | 79 ++ lib/src/modules/time/time_service.dart | 143 ++++ lib/src/shared/api_response.dart | 100 +++ lib/src/shared/pagination.dart | 27 + migrations/001_initial_schema.sql | 385 ++++++++++ pubspec.lock | 701 ++++++++++++++++++ pubspec.yaml | 30 + test/server_test.dart | 39 + 44 files changed, 4625 insertions(+) create mode 100644 .dockerignore create mode 100644 .env.example create mode 100644 .gitignore create mode 100644 CHANGELOG.md create mode 100644 Dockerfile create mode 100644 README.md create mode 100644 analysis_options.yaml create mode 100644 bin/migrate.dart create mode 100644 bin/server.dart create mode 100644 docker-compose.yml create mode 100644 lib/src/config/database.dart create mode 100644 lib/src/config/env.dart create mode 100644 lib/src/config/redis_config.dart create mode 100644 lib/src/middleware/auth_middleware.dart create mode 100644 lib/src/middleware/cors_middleware.dart create mode 100644 lib/src/middleware/error_handler.dart create mode 100644 lib/src/middleware/logging_middleware.dart create mode 100644 lib/src/middleware/rate_limit_middleware.dart create mode 100644 lib/src/modules/auth/auth_routes.dart create mode 100644 lib/src/modules/auth/auth_service.dart create mode 100644 lib/src/modules/auth/password_hasher.dart create mode 100644 lib/src/modules/auth/token_service.dart create mode 100644 lib/src/modules/rewards/reward_engine.dart create mode 100644 lib/src/modules/rewards/reward_routes.dart create mode 100644 lib/src/modules/rewards/reward_service.dart create mode 100644 lib/src/modules/rooms/room_routes.dart create mode 100644 lib/src/modules/rooms/room_service.dart create mode 100644 lib/src/modules/streaks/streak_repository.dart create mode 100644 lib/src/modules/streaks/streak_routes.dart create mode 100644 lib/src/modules/streaks/streak_service.dart create mode 100644 lib/src/modules/sync/sync_routes.dart create mode 100644 lib/src/modules/sync/sync_service.dart create mode 100644 lib/src/modules/tasks/dopamine_scorer.dart create mode 100644 lib/src/modules/tasks/task_repository.dart create mode 100644 lib/src/modules/tasks/task_routes.dart create mode 100644 lib/src/modules/tasks/task_service.dart create mode 100644 lib/src/modules/time/time_routes.dart create mode 100644 lib/src/modules/time/time_service.dart create mode 100644 lib/src/shared/api_response.dart create mode 100644 lib/src/shared/pagination.dart create mode 100644 migrations/001_initial_schema.sql create mode 100644 pubspec.lock create mode 100644 pubspec.yaml create mode 100644 test/server_test.dart diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..21504f8 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,9 @@ +.dockerignore +Dockerfile +build/ +.dart_tool/ +.git/ +.github/ +.gitignore +.idea/ +.packages diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..ed2b7f9 --- /dev/null +++ b/.env.example @@ -0,0 +1,23 @@ +# Server configuration +PORT=8081 +APP_ENV=development # development | staging | production + +# PostgreSQL connection string +DATABASE_URL=postgresql://focusflow:focusflow@localhost:5433/focusflow + +# Redis connection string (for rate limiting, caching, sessions) +REDIS_URL=redis://localhost:6380 + +# JWT secret key — generate a strong random string for production +JWT_SECRET=change-me-to-a-secure-random-string + +# JWT token lifetimes +ACCESS_TOKEN_EXPIRY_MINUTES=15 +REFRESH_TOKEN_EXPIRY_DAYS=30 + +# Rate limiting +RATE_LIMIT_REQUESTS=100 +RATE_LIMIT_WINDOW_SECONDS=60 + +# Logging level: ALL, FINEST, FINER, FINE, CONFIG, INFO, WARNING, SEVERE, SHOUT, OFF +LOG_LEVEL=INFO diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3a85790 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +# https://dart.dev/guides/libraries/private-files +# Created by `dart pub` +.dart_tool/ diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..effe43c --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,3 @@ +## 1.0.0 + +- Initial version. diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..c333dee --- /dev/null +++ b/Dockerfile @@ -0,0 +1,21 @@ +# Use latest stable channel SDK. +FROM dart:stable AS build + +# Resolve app dependencies. +WORKDIR /app +COPY pubspec.* ./ +RUN dart pub get + +# Copy app source code (except anything in .dockerignore) and AOT compile app. +COPY . . +RUN dart compile exe bin/server.dart -o bin/server + +# Build minimal serving image from AOT-compiled `/server` +# and the pre-built AOT-runtime in the `/runtime/` directory of the base image. +FROM scratch +COPY --from=build /runtime/ / +COPY --from=build /app/bin/server /app/bin/ + +# Start server. +EXPOSE 8080 +CMD ["/app/bin/server"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..e695d9d --- /dev/null +++ b/README.md @@ -0,0 +1,49 @@ +A server app built using [Shelf](https://pub.dev/packages/shelf), +configured to enable running with [Docker](https://www.docker.com/). + +This sample code handles HTTP GET requests to `/` and `/echo/` + +# Running the sample + +## Running with the Dart SDK + +You can run the example with the [Dart SDK](https://dart.dev/get-dart) +like this: + +``` +$ dart run bin/server.dart +Server listening on port 8080 +``` + +And then from a second terminal: +``` +$ curl http://0.0.0.0:8080 +Hello, World! +$ curl http://0.0.0.0:8080/echo/I_love_Dart +I_love_Dart +``` + +## Running with Docker + +If you have [Docker Desktop](https://www.docker.com/get-started) installed, you +can build and run with the `docker` command: + +``` +$ docker build . -t myserver +$ docker run -it -p 8080:8080 myserver +Server listening on port 8080 +``` + +And then from a second terminal: +``` +$ curl http://0.0.0.0:8080 +Hello, World! +$ curl http://0.0.0.0:8080/echo/I_love_Dart +I_love_Dart +``` + +You should see the logging printed in the first terminal: +``` +2021-05-06T15:47:04.620417 0:00:00.000158 GET [200] / +2021-05-06T15:47:08.392928 0:00:00.001216 GET [200] /echo/I_love_Dart +``` diff --git a/analysis_options.yaml b/analysis_options.yaml new file mode 100644 index 0000000..dee8927 --- /dev/null +++ b/analysis_options.yaml @@ -0,0 +1,30 @@ +# This file configures the static analysis results for your project (errors, +# warnings, and lints). +# +# This enables the 'recommended' set of lints from `package:lints`. +# This set helps identify many issues that may lead to problems when running +# or consuming Dart code, and enforces writing Dart using a single, idiomatic +# style and format. +# +# If you want a smaller set of lints you can change this to specify +# 'package:lints/core.yaml'. These are just the most critical lints +# (the recommended set includes the core lints). +# The core lints are also what is used by pub.dev for scoring packages. + +include: package:lints/recommended.yaml + +# Uncomment the following section to specify additional rules. + +# linter: +# rules: +# - camel_case_types + +# analyzer: +# exclude: +# - path/to/excluded/files/** + +# For more information about the core and recommended set of lints, see +# https://dart.dev/go/core-lints + +# For additional information about configuring this file, see +# https://dart.dev/guides/language/analysis-options diff --git a/bin/migrate.dart b/bin/migrate.dart new file mode 100644 index 0000000..8a291f6 --- /dev/null +++ b/bin/migrate.dart @@ -0,0 +1,189 @@ +import 'dart:io'; + +import 'package:logging/logging.dart'; + +import 'package:focusflow_api/src/config/database.dart'; +import 'package:focusflow_api/src/config/env.dart'; + +final _log = Logger('Migrate'); + +/// Run database migrations. +/// +/// Usage: dart run bin/migrate.dart +Future main() async { + Logger.root.level = Level.ALL; + Logger.root.onRecord.listen((record) { + // ignore: avoid_print + print( + '${record.time} [${record.level.name}] ${record.loggerName}: ' + '${record.message}', + ); + }); + + Env.init(); + await Database.init(); + + _log.info('Running migrations...'); + + try { + // ── Users ────────────────────────────────────────────────────────── + await Database.query(''' + CREATE TABLE IF NOT EXISTS users ( + id TEXT PRIMARY KEY, + email TEXT NOT NULL UNIQUE, + password_hash TEXT NOT NULL, + display_name TEXT NOT NULL, + deleted_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ); + '''); + _log.info(' users: OK'); + + // ── Refresh tokens ──────────────────────────────────────────────── + await Database.query(''' + CREATE TABLE IF NOT EXISTS refresh_tokens ( + id TEXT PRIMARY KEY, + user_id TEXT NOT NULL REFERENCES users(id), + token TEXT NOT NULL UNIQUE, + expires_at TIMESTAMPTZ NOT NULL, + revoked BOOLEAN NOT NULL DEFAULT FALSE, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ); + '''); + _log.info(' refresh_tokens: OK'); + + // ── Tasks ───────────────────────────────────────────────────────── + await Database.query(''' + CREATE TABLE IF NOT EXISTS tasks ( + id TEXT PRIMARY KEY, + user_id TEXT NOT NULL REFERENCES users(id), + title TEXT NOT NULL, + description TEXT, + status TEXT NOT NULL DEFAULT 'pending', + priority TEXT NOT NULL DEFAULT 'medium', + energy_level INT NOT NULL DEFAULT 3, + estimated_minutes INT DEFAULT 25, + actual_minutes INT, + due_date TIMESTAMPTZ, + tags TEXT[] DEFAULT '{}', + times_postponed INT NOT NULL DEFAULT 0, + last_interacted_at TIMESTAMPTZ, + completed_at TIMESTAMPTZ, + deleted_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ); + '''); + _log.info(' tasks: OK'); + + // ── Streaks ─────────────────────────────────────────────────────── + await Database.query(''' + CREATE TABLE IF NOT EXISTS streaks ( + id TEXT PRIMARY KEY, + user_id TEXT NOT NULL REFERENCES users(id), + name TEXT NOT NULL, + description TEXT, + frequency TEXT NOT NULL DEFAULT 'daily', + grace_days INT NOT NULL DEFAULT 1, + current_count INT NOT NULL DEFAULT 0, + longest_count INT NOT NULL DEFAULT 0, + frozen_until TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ); + '''); + _log.info(' streaks: OK'); + + // ── Streak entries ──────────────────────────────────────────────── + await Database.query(''' + CREATE TABLE IF NOT EXISTS streak_entries ( + id TEXT PRIMARY KEY, + streak_id TEXT NOT NULL REFERENCES streaks(id), + entry_date DATE NOT NULL, + entry_type TEXT NOT NULL DEFAULT 'completion', + note TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(streak_id, entry_date) + ); + '''); + _log.info(' streak_entries: OK'); + + // ── Rewards ─────────────────────────────────────────────────────── + await Database.query(''' + CREATE TABLE IF NOT EXISTS rewards ( + id TEXT PRIMARY KEY, + user_id TEXT NOT NULL REFERENCES users(id), + task_id TEXT REFERENCES tasks(id), + points INT NOT NULL, + magnitude DOUBLE PRECISION NOT NULL, + visual_type TEXT NOT NULL, + is_surprise BOOLEAN NOT NULL DEFAULT FALSE, + breakdown TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ); + '''); + _log.info(' rewards: OK'); + + // ── Time entries ────────────────────────────────────────────────── + await Database.query(''' + CREATE TABLE IF NOT EXISTS time_entries ( + id TEXT PRIMARY KEY, + user_id TEXT NOT NULL REFERENCES users(id), + task_id TEXT NOT NULL REFERENCES tasks(id), + estimated_minutes INT, + actual_minutes INT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(task_id) + ); + '''); + _log.info(' time_entries: OK'); + + // ── Sync log ────────────────────────────────────────────────────── + await Database.query(''' + CREATE TABLE IF NOT EXISTS sync_log ( + id TEXT PRIMARY KEY, + user_id TEXT NOT NULL REFERENCES users(id), + entity_type TEXT NOT NULL, + entity_id TEXT NOT NULL, + operation TEXT NOT NULL, + data TEXT, + version INT NOT NULL DEFAULT 0, + synced_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(entity_type, entity_id, user_id) + ); + '''); + _log.info(' sync_log: OK'); + + // ── Indexes ─────────────────────────────────────────────────────── + await Database.query(''' + CREATE INDEX IF NOT EXISTS idx_tasks_user_status + ON tasks(user_id, status) WHERE deleted_at IS NULL; + '''); + await Database.query(''' + CREATE INDEX IF NOT EXISTS idx_tasks_user_due + ON tasks(user_id, due_date) WHERE deleted_at IS NULL; + '''); + await Database.query(''' + CREATE INDEX IF NOT EXISTS idx_streaks_user + ON streaks(user_id); + '''); + await Database.query(''' + CREATE INDEX IF NOT EXISTS idx_rewards_user + ON rewards(user_id, created_at); + '''); + await Database.query(''' + CREATE INDEX IF NOT EXISTS idx_sync_log_user_time + ON sync_log(user_id, synced_at); + '''); + _log.info(' indexes: OK'); + + _log.info('All migrations complete!'); + } catch (e, st) { + _log.severe('Migration failed', e, st); + } finally { + await Database.close(); + exit(0); + } +} diff --git a/bin/server.dart b/bin/server.dart new file mode 100644 index 0000000..8b7d264 --- /dev/null +++ b/bin/server.dart @@ -0,0 +1,144 @@ +import 'dart:async'; +import 'dart:convert'; +import 'dart:io'; + +import 'package:logging/logging.dart'; +import 'package:shelf/shelf.dart'; +import 'package:shelf/shelf_io.dart' as shelf_io; +import 'package:shelf_router/shelf_router.dart'; + +import 'package:focusflow_api/src/config/database.dart'; +import 'package:focusflow_api/src/config/env.dart'; +import 'package:focusflow_api/src/middleware/auth_middleware.dart'; +import 'package:focusflow_api/src/middleware/cors_middleware.dart'; +import 'package:focusflow_api/src/middleware/error_handler.dart'; +import 'package:focusflow_api/src/middleware/logging_middleware.dart'; +import 'package:focusflow_api/src/middleware/rate_limit_middleware.dart'; +import 'package:focusflow_api/src/modules/auth/auth_routes.dart'; +import 'package:focusflow_api/src/modules/auth/auth_service.dart'; +import 'package:focusflow_api/src/modules/auth/password_hasher.dart'; +import 'package:focusflow_api/src/modules/auth/token_service.dart'; +import 'package:focusflow_api/src/modules/rewards/reward_engine.dart'; +import 'package:focusflow_api/src/modules/rewards/reward_routes.dart'; +import 'package:focusflow_api/src/modules/rewards/reward_service.dart'; +import 'package:focusflow_api/src/modules/rooms/room_routes.dart'; +import 'package:focusflow_api/src/modules/rooms/room_service.dart'; +import 'package:focusflow_api/src/modules/streaks/streak_repository.dart'; +import 'package:focusflow_api/src/modules/streaks/streak_routes.dart'; +import 'package:focusflow_api/src/modules/streaks/streak_service.dart'; +import 'package:focusflow_api/src/modules/sync/sync_routes.dart'; +import 'package:focusflow_api/src/modules/sync/sync_service.dart'; +import 'package:focusflow_api/src/modules/tasks/dopamine_scorer.dart'; +import 'package:focusflow_api/src/modules/tasks/task_repository.dart'; +import 'package:focusflow_api/src/modules/tasks/task_routes.dart'; +import 'package:focusflow_api/src/modules/tasks/task_service.dart'; +import 'package:focusflow_api/src/modules/time/time_routes.dart'; +import 'package:focusflow_api/src/modules/time/time_service.dart'; + +final _log = Logger('Server'); + +Future main() async { + // ── Logging ───────────────────────────────────────────────────────── + Logger.root.level = Level.ALL; + Logger.root.onRecord.listen((record) { + // ignore: avoid_print + print( + '${record.time} [${record.level.name}] ${record.loggerName}: ' + '${record.message}', + ); + }); + + // ── Environment ───────────────────────────────────────────────────── + Env.init(); + _log.info('Starting FocusFlow API (${Env.appEnv})'); + + // ── Database ──────────────────────────────────────────────────────── + await Database.init(); + _log.info('Database pool opened'); + + // ── Services (dependency injection) ───────────────────────────────── + final tokenService = TokenService(); + final passwordHasher = PasswordHasher(); + final authService = AuthService( + hasher: passwordHasher, + tokenService: tokenService, + ); + + final rewardEngine = RewardEngine(); + final dopamineScorer = DopamineScorer(); + final taskRepository = TaskRepository(); + final taskService = TaskService( + repository: taskRepository, + scorer: dopamineScorer, + rewardEngine: rewardEngine, + ); + + final streakRepository = StreakRepository(); + final streakService = StreakService(repository: streakRepository); + + final rewardService = RewardService(engine: rewardEngine); + final timeService = TimeService(); + final syncService = SyncService(); + final roomService = RoomService(); + + // ── Route modules ─────────────────────────────────────────────────── + final authRoutes = AuthRoutes(authService); + final taskRoutes = TaskRoutes(taskService); + final streakRoutes = StreakRoutes(streakService); + final rewardRoutes = RewardRoutes(rewardService); + final timeRoutes = TimeRoutes(timeService); + final syncRoutes = SyncRoutes(syncService); + final roomRoutes = RoomRoutes(roomService); + + // ── Router ────────────────────────────────────────────────────────── + final app = Router(); + + // Health check (public) + app.get('/health', (Request request) { + return Response.ok( + jsonEncode({'status': 'ok'}), + headers: {'Content-Type': 'application/json'}, + ); + }); + + // Mount module routers + app.mount('/api/v1/auth/', authRoutes.router.call); + app.mount('/api/v1/tasks/', taskRoutes.router.call); + app.mount('/api/v1/streaks/', streakRoutes.router.call); + app.mount('/api/v1/rewards/', rewardRoutes.router.call); + app.mount('/api/v1/time/', timeRoutes.router.call); + app.mount('/api/v1/sync/', syncRoutes.router.call); + app.mount('/api/v1/rooms/', roomRoutes.router.call); + + // ── Pipeline ──────────────────────────────────────────────────────── + final handler = const Pipeline() + .addMiddleware(corsMiddleware()) + .addMiddleware(loggingMiddleware()) + .addMiddleware(errorHandlerMiddleware()) + .addMiddleware(rateLimitMiddleware()) + .addMiddleware(authMiddleware(tokenService)) + .addHandler(app.call); + + // ── Start server ──────────────────────────────────────────────────── + final port = Env.port; + final server = + await shelf_io.serve(handler, InternetAddress.anyIPv4, port); + _log.info('Listening on http://${server.address.host}:${server.port}'); + + // ── Graceful shutdown ─────────────────────────────────────────────── + late final StreamSubscription sigintSub; + late final StreamSubscription sigtermSub; + + Future shutdown() async { + _log.info('Shutting down...'); + await server.close(force: false); + await Database.close(); + _log.info('Server stopped'); + await sigintSub.cancel(); + await sigtermSub.cancel(); + exit(0); + } + + sigintSub = ProcessSignal.sigint.watch().listen((_) => shutdown()); + sigtermSub = ProcessSignal.sigterm.watch().listen((_) => shutdown()); +} diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..5c8d710 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,61 @@ +services: + api: + build: + context: . + dockerfile: Dockerfile + ports: + - "8081:8081" + environment: + - PORT=8081 + - DATABASE_URL=postgresql://focusflow:focusflow@postgres:5432/focusflow + - REDIS_URL=redis://redis:6379 + - JWT_SECRET=${JWT_SECRET:-dev-secret-change-in-production} + - APP_ENV=development + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + restart: unless-stopped + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8081/health"] + interval: 30s + timeout: 5s + retries: 3 + start_period: 10s + + postgres: + image: postgres:16-alpine + ports: + - "5433:5432" + environment: + POSTGRES_DB: focusflow + POSTGRES_USER: focusflow + POSTGRES_PASSWORD: focusflow + volumes: + - pg_data:/var/lib/postgresql/data + - ./migrations:/docker-entrypoint-initdb.d + healthcheck: + test: ["CMD-SHELL", "pg_isready -U focusflow -d focusflow"] + interval: 10s + timeout: 5s + retries: 5 + restart: unless-stopped + + redis: + image: redis:7-alpine + ports: + - "6380:6379" + volumes: + - redis_data:/data + command: redis-server --appendonly yes + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 5s + retries: 5 + restart: unless-stopped + +volumes: + pg_data: + redis_data: diff --git a/lib/src/config/database.dart b/lib/src/config/database.dart new file mode 100644 index 0000000..e07d7ae --- /dev/null +++ b/lib/src/config/database.dart @@ -0,0 +1,61 @@ +import 'package:postgres/postgres.dart'; + +import 'env.dart'; + +/// Manages the PostgreSQL connection pool. +class Database { + Database._(); + + static Pool? _pool; + + /// The active connection pool. + static Pool get pool { + if (_pool == null) { + throw StateError('Database not initialized. Call Database.init() first.'); + } + return _pool!; + } + + /// Open the connection pool. Call once at startup. + static Future init() async { + final endpoint = _parseEndpoint(Env.databaseUrl); + _pool = Pool.withEndpoints( + [endpoint], + settings: PoolSettings( + maxConnectionCount: 10, + sslMode: SslMode.disable, + ), + ); + } + + /// Execute a query against the pool and return result rows. + static Future query( + String sql, { + Map? parameters, + }) async { + return pool.execute(Sql.named(sql), parameters: parameters ?? {}); + } + + /// Close the pool gracefully. + static Future close() async { + await _pool?.close(); + _pool = null; + } + + // ── helpers ────────────────────────────────────────────────────────── + + static Endpoint _parseEndpoint(String url) { + final uri = Uri.parse(url); + return Endpoint( + host: uri.host, + port: uri.port != 0 ? uri.port : 5432, + database: uri.pathSegments.isNotEmpty ? uri.pathSegments.first : 'focusflow_db', + username: uri.userInfo.contains(':') + ? uri.userInfo.split(':').first + : uri.userInfo, + password: uri.userInfo.contains(':') + ? uri.userInfo.split(':').last + : null, + ); + } +} diff --git a/lib/src/config/env.dart b/lib/src/config/env.dart new file mode 100644 index 0000000..2e96a36 --- /dev/null +++ b/lib/src/config/env.dart @@ -0,0 +1,67 @@ +import 'dart:io'; + +import 'package:dotenv/dotenv.dart'; + +/// Centralized environment configuration. +/// +/// Loads values from a `.env` file (if present) and falls back to +/// system environment variables. +class Env { + Env._(); + + static late final DotEnv _dotEnv; + static bool _initialized = false; + + /// Initialize the environment — call once at startup. + static void init() { + if (_initialized) return; + _dotEnv = DotEnv(includePlatformEnvironment: true); + if (File('.env').existsSync()) { + _dotEnv.load(['.env']); + } + _initialized = true; + } + + // ── helpers ────────────────────────────────────────────────────────── + + static String _get(String key, [String? fallback]) { + final value = _dotEnv.getOrElse(key, () => fallback ?? ''); + if (value.isEmpty) { + throw StateError('Missing required environment variable: $key'); + } + return value; + } + + static int _getInt(String key, int fallback) { + final raw = _dotEnv.getOrElse(key, () => fallback.toString()); + return int.tryParse(raw) ?? fallback; + } + + // ── public accessors ──────────────────────────────────────────────── + + static int get port => _getInt('PORT', 8080); + + static String get databaseUrl => + _get('DATABASE_URL', 'postgres://focusflow:password@localhost:5432/focusflow_db'); + + static String get redisUrl => _get('REDIS_URL', 'redis://localhost:6379'); + + static String get jwtSecret => _get('JWT_SECRET', 'dev-secret-do-not-use-in-production'); + + static String get appEnv => _get('APP_ENV', 'development'); + + static bool get isProduction => appEnv == 'production'; + + static int get accessTokenExpiryMinutes => + _getInt('ACCESS_TOKEN_EXPIRY_MINUTES', 15); + + static int get refreshTokenExpiryDays => + _getInt('REFRESH_TOKEN_EXPIRY_DAYS', 30); + + static int get rateLimitRequests => _getInt('RATE_LIMIT_REQUESTS', 100); + + static int get rateLimitWindowSeconds => + _getInt('RATE_LIMIT_WINDOW_SECONDS', 60); + + static String get logLevel => _get('LOG_LEVEL', 'INFO'); +} diff --git a/lib/src/config/redis_config.dart b/lib/src/config/redis_config.dart new file mode 100644 index 0000000..2e9774e --- /dev/null +++ b/lib/src/config/redis_config.dart @@ -0,0 +1,36 @@ +import 'package:redis/redis.dart'; + +import 'env.dart'; + +/// Manages the Redis connection for caching, rate‑limiting, and sessions. +class RedisConfig { + RedisConfig._(); + + static RedisConnection? _connection; + static Command? _command; + + /// The active Redis command interface. + static Command get command { + if (_command == null) { + throw StateError('Redis not initialized. Call RedisConfig.init() first.'); + } + return _command!; + } + + /// Open a connection to Redis. Call once at startup. + static Future init() async { + final uri = Uri.parse(Env.redisUrl); + final host = uri.host.isNotEmpty ? uri.host : 'localhost'; + final port = uri.port != 0 ? uri.port : 6379; + + _connection = RedisConnection(); + _command = await _connection!.connect(host, port); + } + + /// Close the Redis connection. + static Future close() async { + await _connection?.close(); + _connection = null; + _command = null; + } +} diff --git a/lib/src/middleware/auth_middleware.dart b/lib/src/middleware/auth_middleware.dart new file mode 100644 index 0000000..76e8d21 --- /dev/null +++ b/lib/src/middleware/auth_middleware.dart @@ -0,0 +1,59 @@ +import 'package:shelf/shelf.dart'; + +import '../modules/auth/token_service.dart'; +import '../shared/api_response.dart'; + +/// Routes that do NOT require authentication. +const _publicPaths = { + 'api/v1/auth/register', + 'api/v1/auth/login', + 'api/v1/auth/refresh', + 'health', +}; + +/// JWT authentication middleware. +/// +/// Skips verification for public routes and OPTIONS (preflight) requests. +/// On success, injects `userId` into the request context. +Middleware authMiddleware(TokenService tokenService) { + return (Handler innerHandler) { + return (Request request) async { + // Always allow preflight + if (request.method == 'OPTIONS') { + return innerHandler(request); + } + + // Allow public paths + final path = request.url.path; + if (_publicPaths.contains(path)) { + return innerHandler(request); + } + + // Extract Bearer token + final authHeader = request.headers['authorization']; + if (authHeader == null || !authHeader.startsWith('Bearer ')) { + return ApiResponse.unauthorized('Missing or invalid Authorization header'); + } + + final token = authHeader.substring(7); + final payload = tokenService.verifyAccessToken(token); + + if (payload == null) { + return ApiResponse.unauthorized('Invalid or expired access token'); + } + + final userId = payload['sub'] as String?; + if (userId == null) { + return ApiResponse.unauthorized('Malformed token payload'); + } + + // Inject userId into request context + final updatedRequest = request.change(context: { + 'userId': userId, + ...request.context, + }); + + return innerHandler(updatedRequest); + }; + }; +} diff --git a/lib/src/middleware/cors_middleware.dart b/lib/src/middleware/cors_middleware.dart new file mode 100644 index 0000000..08e2f83 --- /dev/null +++ b/lib/src/middleware/cors_middleware.dart @@ -0,0 +1,28 @@ +import 'package:shelf/shelf.dart'; + +/// CORS middleware that allows all origins in development +/// and restricts in production. +Middleware corsMiddleware() { + return (Handler innerHandler) { + return (Request request) async { + // Handle preflight + if (request.method == 'OPTIONS') { + return Response.ok( + '', + headers: _corsHeaders, + ); + } + + final response = await innerHandler(request); + return response.change(headers: _corsHeaders); + }; + }; +} + +const _corsHeaders = { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, PATCH, OPTIONS', + 'Access-Control-Allow-Headers': + 'Origin, Content-Type, Accept, Authorization, X-Requested-With', + 'Access-Control-Max-Age': '86400', +}; diff --git a/lib/src/middleware/error_handler.dart b/lib/src/middleware/error_handler.dart new file mode 100644 index 0000000..5264f14 --- /dev/null +++ b/lib/src/middleware/error_handler.dart @@ -0,0 +1,70 @@ +import 'dart:convert'; + +import 'package:logging/logging.dart'; +import 'package:shelf/shelf.dart'; + +final _log = Logger('ErrorHandler'); + +// ── ApiException ────────────────────────────────────────────────────── + +/// Typed exception that maps directly to an HTTP status code. +class ApiException implements Exception { + final int statusCode; + final String message; + final dynamic errors; + + const ApiException(this.statusCode, this.message, {this.errors}); + + // Factory constructors for common HTTP errors + factory ApiException.badRequest(String message, {dynamic errors}) => + ApiException(400, message, errors: errors); + + factory ApiException.unauthorized([String message = 'Unauthorized']) => + ApiException(401, message); + + factory ApiException.forbidden([String message = 'Forbidden']) => + ApiException(403, message); + + factory ApiException.notFound([String message = 'Resource not found']) => + ApiException(404, message); + + factory ApiException.conflict([String message = 'Conflict']) => + ApiException(409, message); + + factory ApiException.tooManyRequests( + [String message = 'Too many requests']) => + ApiException(429, message); + + @override + String toString() => 'ApiException($statusCode): $message'; +} + +// ── Middleware ───────────────────────────────────────────────────────── + +/// Catches all exceptions and returns a consistent JSON error response. +Middleware errorHandlerMiddleware() { + return (Handler innerHandler) { + return (Request request) async { + try { + return await innerHandler(request); + } on ApiException catch (e) { + return _jsonError(e.statusCode, e.message, errors: e.errors); + } catch (e, st) { + _log.severe('Unhandled exception', e, st); + return _jsonError(500, 'Internal server error'); + } + }; + }; +} + +Response _jsonError(int statusCode, String message, {dynamic errors}) { + return Response( + statusCode, + body: jsonEncode({ + 'success': false, + 'message': message, + if (errors != null) 'errors': errors, + }), + headers: {'Content-Type': 'application/json'}, + ); +} diff --git a/lib/src/middleware/logging_middleware.dart b/lib/src/middleware/logging_middleware.dart new file mode 100644 index 0000000..3add5e0 --- /dev/null +++ b/lib/src/middleware/logging_middleware.dart @@ -0,0 +1,38 @@ +import 'package:logging/logging.dart'; +import 'package:shelf/shelf.dart'; + +final _log = Logger('HTTP'); + +/// Logs every request with method, path, status, and duration. +Middleware loggingMiddleware() { + return (Handler innerHandler) { + return (Request request) async { + final stopwatch = Stopwatch()..start(); + final method = request.method; + final path = '/${request.url.path}'; + + Response response; + try { + response = await innerHandler(request); + } catch (e) { + stopwatch.stop(); + _log.severe('$method $path — ERROR in ${stopwatch.elapsedMilliseconds}ms: $e'); + rethrow; + } + + stopwatch.stop(); + final ms = stopwatch.elapsedMilliseconds; + final status = response.statusCode; + + if (status >= 500) { + _log.severe('$method $path $status ${ms}ms'); + } else if (status >= 400) { + _log.warning('$method $path $status ${ms}ms'); + } else { + _log.info('$method $path $status ${ms}ms'); + } + + return response; + }; + }; +} diff --git a/lib/src/middleware/rate_limit_middleware.dart b/lib/src/middleware/rate_limit_middleware.dart new file mode 100644 index 0000000..9d22f30 --- /dev/null +++ b/lib/src/middleware/rate_limit_middleware.dart @@ -0,0 +1,67 @@ +import 'dart:convert'; + +import 'package:shelf/shelf.dart'; + +import '../config/env.dart'; + +/// Simple in-memory sliding-window rate limiter. +/// +/// In production, swap this for a Redis-backed implementation. +Middleware rateLimitMiddleware() { + final buckets = {}; + + return (Handler innerHandler) { + return (Request request) async { + // Skip rate limiting for health checks + if (request.url.path == 'health') { + return innerHandler(request); + } + + final ip = request.headers['x-forwarded-for'] ?? + request.headers['x-real-ip'] ?? + 'unknown'; + + final maxRequests = Env.rateLimitRequests; + final windowSeconds = Env.rateLimitWindowSeconds; + final now = DateTime.now(); + + final bucket = buckets.putIfAbsent(ip, () => _Bucket()); + bucket.prune(now, windowSeconds); + + if (bucket.count >= maxRequests) { + return Response( + 429, + body: jsonEncode({ + 'success': false, + 'message': 'Too many requests. Try again later.', + }), + headers: { + 'Content-Type': 'application/json', + 'Retry-After': windowSeconds.toString(), + }, + ); + } + + bucket.add(now); + + final response = await innerHandler(request); + return response.change(headers: { + 'X-RateLimit-Limit': maxRequests.toString(), + 'X-RateLimit-Remaining': (maxRequests - bucket.count).toString(), + }); + }; + }; +} + +class _Bucket { + final List _timestamps = []; + + int get count => _timestamps.length; + + void add(DateTime time) => _timestamps.add(time); + + void prune(DateTime now, int windowSeconds) { + final cutoff = now.subtract(Duration(seconds: windowSeconds)); + _timestamps.removeWhere((t) => t.isBefore(cutoff)); + } +} diff --git a/lib/src/modules/auth/auth_routes.dart b/lib/src/modules/auth/auth_routes.dart new file mode 100644 index 0000000..7965b3a --- /dev/null +++ b/lib/src/modules/auth/auth_routes.dart @@ -0,0 +1,103 @@ +import 'dart:convert'; + +import 'package:shelf/shelf.dart'; +import 'package:shelf_router/shelf_router.dart'; + +import '../../middleware/error_handler.dart'; +import '../../shared/api_response.dart'; +import 'auth_service.dart'; + +/// Auth module routes: register, login, refresh, logout, delete account. +class AuthRoutes { + final AuthService _authService; + + AuthRoutes(this._authService); + + Router get router { + final router = Router(); + + router.post('/register', _register); + router.post('/login', _login); + router.post('/refresh', _refresh); + router.post('/logout', _logout); + router.delete('/account', _deleteAccount); + + return router; + } + + // ── Handlers ──────────────────────────────────────────────────────── + + Future _register(Request request) async { + final body = jsonDecode(await request.readAsString()) as Map; + + final email = body['email'] as String?; + final password = body['password'] as String?; + final displayName = body['display_name'] as String?; + + if (email == null || password == null || displayName == null) { + throw ApiException.badRequest( + 'Missing required fields: email, password, display_name', + ); + } + + if (password.length < 8) { + throw ApiException.badRequest('Password must be at least 8 characters'); + } + + final tokens = await _authService.register( + email: email, + password: password, + displayName: displayName, + ); + + return ApiResponse.created(tokens, message: 'Account created'); + } + + Future _login(Request request) async { + final body = jsonDecode(await request.readAsString()) as Map; + + final email = body['email'] as String?; + final password = body['password'] as String?; + + if (email == null || password == null) { + throw ApiException.badRequest('Missing required fields: email, password'); + } + + final tokens = await _authService.login( + email: email, + password: password, + ); + + return ApiResponse.success(tokens, message: 'Login successful'); + } + + Future _refresh(Request request) async { + final body = jsonDecode(await request.readAsString()) as Map; + + final refreshToken = body['refresh_token'] as String?; + if (refreshToken == null) { + throw ApiException.badRequest('Missing required field: refresh_token'); + } + + final tokens = await _authService.refresh(refreshToken); + return ApiResponse.success(tokens); + } + + Future _logout(Request request) async { + final body = jsonDecode(await request.readAsString()) as Map; + + final refreshToken = body['refresh_token'] as String?; + if (refreshToken == null) { + throw ApiException.badRequest('Missing required field: refresh_token'); + } + + await _authService.logout(refreshToken); + return ApiResponse.success(null, message: 'Logged out successfully'); + } + + Future _deleteAccount(Request request) async { + final userId = request.context['userId'] as String; + await _authService.deleteAccount(userId); + return ApiResponse.success(null, message: 'Account deleted'); + } +} diff --git a/lib/src/modules/auth/auth_service.dart b/lib/src/modules/auth/auth_service.dart new file mode 100644 index 0000000..a6c7642 --- /dev/null +++ b/lib/src/modules/auth/auth_service.dart @@ -0,0 +1,176 @@ +import 'package:uuid/uuid.dart'; + +import '../../config/database.dart'; +import '../../config/env.dart'; +import '../../middleware/error_handler.dart'; +import 'password_hasher.dart'; +import 'token_service.dart'; + +/// Handles authentication business logic: register, login, refresh, logout. +class AuthService { + final PasswordHasher _hasher; + final TokenService _tokenService; + final _uuid = const Uuid(); + + AuthService({ + required PasswordHasher hasher, + required TokenService tokenService, + }) : _hasher = hasher, + _tokenService = tokenService; + + // ── Register ──────────────────────────────────────────────────────── + + Future> register({ + required String email, + required String password, + required String displayName, + }) async { + // Check for existing user + final existing = await Database.query( + 'SELECT id FROM users WHERE email = @email', + parameters: {'email': email}, + ); + if (existing.isNotEmpty) { + throw ApiException.conflict('Email already registered'); + } + + final userId = _uuid.v4(); + final hashedPassword = _hasher.hash(password); + final now = DateTime.now().toUtc(); + + await Database.query( + ''' + INSERT INTO users (id, email, password_hash, display_name, created_at, updated_at) + VALUES (@id, @email, @password_hash, @display_name, @created_at, @updated_at) + ''', + parameters: { + 'id': userId, + 'email': email, + 'password_hash': hashedPassword, + 'display_name': displayName, + 'created_at': now, + 'updated_at': now, + }, + ); + + return _issueTokens(userId, email); + } + + // ── Login ─────────────────────────────────────────────────────────── + + Future> login({ + required String email, + required String password, + }) async { + final result = await Database.query( + 'SELECT id, password_hash FROM users WHERE email = @email', + parameters: {'email': email}, + ); + + if (result.isEmpty) { + throw ApiException.unauthorized('Invalid email or password'); + } + + final row = result.first; + final userId = row[0] as String; + final storedHash = row[1] as String; + + if (!_hasher.verify(password, storedHash)) { + throw ApiException.unauthorized('Invalid email or password'); + } + + return _issueTokens(userId, email); + } + + // ── Refresh ───────────────────────────────────────────────────────── + + Future> refresh(String refreshToken) async { + final result = await Database.query( + ''' + SELECT user_id FROM refresh_tokens + WHERE token = @token AND expires_at > NOW() AND revoked = false + ''', + parameters: {'token': refreshToken}, + ); + + if (result.isEmpty) { + throw ApiException.unauthorized('Invalid or expired refresh token'); + } + + final userId = result.first[0] as String; + + // Revoke old token (rotation) + await Database.query( + 'UPDATE refresh_tokens SET revoked = true WHERE token = @token', + parameters: {'token': refreshToken}, + ); + + // Fetch email for the new access token + final userResult = await Database.query( + 'SELECT email FROM users WHERE id = @id', + parameters: {'id': userId}, + ); + final email = userResult.isNotEmpty ? userResult.first[0] as String : null; + + return _issueTokens(userId, email); + } + + // ── Logout ────────────────────────────────────────────────────────── + + Future logout(String refreshToken) async { + await Database.query( + 'UPDATE refresh_tokens SET revoked = true WHERE token = @token', + parameters: {'token': refreshToken}, + ); + } + + // ── Delete account ────────────────────────────────────────────────── + + Future deleteAccount(String userId) async { + // Revoke all refresh tokens + await Database.query( + 'UPDATE refresh_tokens SET revoked = true WHERE user_id = @user_id', + parameters: {'user_id': userId}, + ); + + // Soft-delete user + await Database.query( + ''' + UPDATE users + SET deleted_at = NOW(), email = CONCAT(email, ':deleted:', @user_id) + WHERE id = @user_id + ''', + parameters: {'user_id': userId}, + ); + } + + // ── Helpers ───────────────────────────────────────────────────────── + + Future> _issueTokens( + String userId, String? email) async { + final accessToken = _tokenService.generateAccessToken(userId, email: email); + final refreshToken = _tokenService.generateRefreshToken(); + final expiresAt = _tokenService.refreshTokenExpiry(); + + await Database.query( + ''' + INSERT INTO refresh_tokens (id, user_id, token, expires_at, revoked, created_at) + VALUES (@id, @user_id, @token, @expires_at, false, NOW()) + ''', + parameters: { + 'id': _uuid.v4(), + 'user_id': userId, + 'token': refreshToken, + 'expires_at': expiresAt, + }, + ); + + return { + 'access_token': accessToken, + 'refresh_token': refreshToken, + 'token_type': 'Bearer', + 'expires_in': Env.accessTokenExpiryMinutes * 60, + 'user_id': userId, + }; + } +} diff --git a/lib/src/modules/auth/password_hasher.dart b/lib/src/modules/auth/password_hasher.dart new file mode 100644 index 0000000..313474c --- /dev/null +++ b/lib/src/modules/auth/password_hasher.dart @@ -0,0 +1,88 @@ +import 'dart:convert'; +import 'dart:math'; +import 'dart:typed_data'; + +import 'package:crypto/crypto.dart'; + +/// PBKDF2-based password hashing with per-user salts. +class PasswordHasher { + static const int _iterations = 100000; + static const int _keyLength = 32; // 256 bits + static const int _saltLength = 32; + + /// Hash a plaintext [password] and return a storable string + /// in the format: `iterations:base64salt:base64hash`. + String hash(String password) { + final salt = _generateSalt(); + final derived = _pbkdf2(password, salt, _iterations, _keyLength); + final saltB64 = base64Encode(salt); + final hashB64 = base64Encode(derived); + return '$_iterations:$saltB64:$hashB64'; + } + + /// Verify a plaintext [password] against a stored [hashString]. + bool verify(String password, String hashString) { + final parts = hashString.split(':'); + if (parts.length != 3) return false; + + final iterations = int.tryParse(parts[0]); + if (iterations == null) return false; + + final salt = base64Decode(parts[1]); + final storedHash = base64Decode(parts[2]); + final derived = _pbkdf2(password, salt, iterations, storedHash.length); + + // Constant-time comparison + if (derived.length != storedHash.length) return false; + var result = 0; + for (var i = 0; i < derived.length; i++) { + result |= derived[i] ^ storedHash[i]; + } + return result == 0; + } + + // ── internals ─────────────────────────────────────────────────────── + + Uint8List _generateSalt() { + final random = Random.secure(); + return Uint8List.fromList( + List.generate(_saltLength, (_) => random.nextInt(256)), + ); + } + + /// PBKDF2 using HMAC-SHA256. + Uint8List _pbkdf2( + String password, + List salt, + int iterations, + int keyLength, + ) { + final passwordBytes = utf8.encode(password); + final hmacFactory = Hmac(sha256, passwordBytes); + + final blocks = (keyLength / 32).ceil(); + final result = BytesBuilder(); + + for (var blockIndex = 1; blockIndex <= blocks; blockIndex++) { + final blockBytes = ByteData(4)..setUint32(0, blockIndex); + final saltWithBlock = Uint8List.fromList([ + ...salt, + ...blockBytes.buffer.asUint8List(), + ]); + + var u = hmacFactory.convert(saltWithBlock).bytes; + var xor = Uint8List.fromList(u); + + for (var i = 1; i < iterations; i++) { + u = hmacFactory.convert(u).bytes; + for (var j = 0; j < xor.length; j++) { + xor[j] ^= u[j]; + } + } + + result.add(xor); + } + + return Uint8List.fromList(result.toBytes().sublist(0, keyLength)); + } +} diff --git a/lib/src/modules/auth/token_service.dart b/lib/src/modules/auth/token_service.dart new file mode 100644 index 0000000..ffd3308 --- /dev/null +++ b/lib/src/modules/auth/token_service.dart @@ -0,0 +1,51 @@ +import 'dart:math'; + +import 'package:dart_jsonwebtoken/dart_jsonwebtoken.dart'; + +import '../../config/env.dart'; + +/// Manages JWT access tokens and opaque refresh tokens. +class TokenService { + /// Create a short-lived access token (default 15 min). + String generateAccessToken(String userId, {String? email}) { + final jwt = JWT( + { + 'sub': userId, + if (email != null) 'email': email, + 'type': 'access', + }, + issuer: 'focusflow_api', + ); + + return jwt.sign( + SecretKey(Env.jwtSecret), + expiresIn: Duration(minutes: Env.accessTokenExpiryMinutes), + ); + } + + /// Verify an access token. Returns the decoded payload or `null`. + Map? verifyAccessToken(String token) { + try { + final jwt = JWT.verify(token, SecretKey(Env.jwtSecret)); + final payload = jwt.payload as Map; + if (payload['type'] != 'access') return null; + return payload; + } catch (_) { + return null; + } + } + + /// Generate an opaque refresh token (cryptographically random). + String generateRefreshToken() { + final random = Random.secure(); + final bytes = List.generate(48, (_) => random.nextInt(256)); + return bytes.map((b) => b.toRadixString(16).padLeft(2, '0')).join(); + } + + /// Compute the expiry [DateTime] for a new refresh token. + DateTime refreshTokenExpiry() { + return DateTime.now() + .toUtc() + .add(Duration(days: Env.refreshTokenExpiryDays)); + } +} diff --git a/lib/src/modules/rewards/reward_engine.dart b/lib/src/modules/rewards/reward_engine.dart new file mode 100644 index 0000000..6f7187a --- /dev/null +++ b/lib/src/modules/rewards/reward_engine.dart @@ -0,0 +1,160 @@ +import 'dart:math'; + +/// Variable reward generation engine inspired by behavioural psychology. +/// +/// Generates rewards using five additive components: +/// 1. Base reward (task difficulty x energy) +/// 2. Streak bonus +/// 3. Novelty bonus +/// 4. Surprise roll +/// 5. Combo bonus +/// +/// Reward types: points (always) + one visual type selected by weighted random. +class RewardEngine { + final Random _random; + String? _lastVisualType; + int _tasksSinceLastSurprise = 0; + + /// Recently completed timestamps for combo detection. + final List _recentCompletions = []; + + RewardEngine({Random? random}) : _random = random ?? Random(); + + /// Generate a reward for a completed task. + Map generate({ + required int taskEnergyLevel, + required int currentStreak, + String? lastRewardType, + }) { + final now = DateTime.now(); + + // Track recent completions for combo + _recentCompletions.add(now); + _recentCompletions.removeWhere( + (t) => now.difference(t).inMinutes > 30, + ); + + _tasksSinceLastSurprise++; + + // ── 1. Base reward (1–30 points) ────────────────────────────────── + final baseReward = _baseReward(taskEnergyLevel); + + // ── 2. Streak bonus (0–20 points) ───────────────────────────────── + final streakBonus = _streakBonus(currentStreak); + + // ── 3. Novelty bonus (0–15 points) ──────────────────────────────── + final noveltyBonus = _noveltyBonus(lastRewardType); + + // ── 4. Surprise roll (0 or 20–50 points) ────────────────────────── + final surprise = _surpriseRoll(); + + // ── 5. Combo bonus (0–10 points) ────────────────────────────────── + final comboBonus = _comboBonus(); + + final totalPoints = + baseReward + streakBonus + noveltyBonus + surprise + comboBonus; + + // ── Magnitude from pseudo-beta distribution ─────────────────────── + final magnitude = _betaMagnitude(); + + // ── Visual reward type selection ────────────────────────────────── + final visualType = _selectVisualType(); + _lastVisualType = visualType; + + if (lastRewardType != null) { + _lastVisualType = lastRewardType; + } + + return { + 'points': totalPoints, + 'magnitude': magnitude, + 'visual_type': visualType, + 'is_surprise': surprise > 0, + 'breakdown': { + 'base': baseReward, + 'streak_bonus': streakBonus, + 'novelty_bonus': noveltyBonus, + 'surprise': surprise, + 'combo_bonus': comboBonus, + }, + }; + } + + // ── Component implementations ─────────────────────────────────────── + + /// Base reward scales with task energy level (1–5 mapped to 1–30). + int _baseReward(int energyLevel) { + final clamped = energyLevel.clamp(1, 5); + return (clamped * 6).clamp(1, 30); + } + + /// Streak bonus: 2 points per streak day, capped at 20. + int _streakBonus(int currentStreak) { + return min(currentStreak * 2, 20); + } + + /// Novelty bonus: higher if the last reward type was different. + int _noveltyBonus(String? lastRewardType) { + if (lastRewardType == null || _lastVisualType == null) return 10; + if (lastRewardType != _lastVisualType) return 15; + return 0; + } + + /// Surprise roll: probability increases the longer since the last surprise. + /// Returns 0 (no surprise) or 20–50 (surprise!). + int _surpriseRoll() { + // Probability: 5% base + 5% per task since last surprise, capped at 50% + final probability = + (0.05 + 0.05 * _tasksSinceLastSurprise).clamp(0.0, 0.50); + if (_random.nextDouble() < probability) { + _tasksSinceLastSurprise = 0; + return 20 + _random.nextInt(31); // 20–50 + } + return 0; + } + + /// Combo bonus: if multiple tasks completed in the last 30 minutes. + int _comboBonus() { + final count = _recentCompletions.length; + if (count <= 1) return 0; + return min((count - 1) * 3, 10); + } + + /// Pseudo-beta distribution for reward magnitude. + /// Uses the average of two uniform samples (Irwin-Hall n=2, scaled). + double _betaMagnitude() { + final a = _random.nextDouble(); + final b = _random.nextDouble(); + return (a + b) / 2.0; // peaks around 0.5 + } + + /// Select a visual reward type from a weighted pool, ensuring no + /// consecutive repeats of the same animation. + String _selectVisualType() { + const types = { + 'animation': 0.30, + 'badge': 0.20, + 'message': 0.25, + 'unlock': 0.10, + 'surprise': 0.15, + }; + + // Filter out the last type to prevent repeats + final candidates = Map.from(types); + if (_lastVisualType != null && candidates.length > 1) { + candidates.remove(_lastVisualType); + } + + // Normalise weights + final totalWeight = candidates.values.fold(0.0, (a, b) => a + b); + final roll = _random.nextDouble() * totalWeight; + + var cumulative = 0.0; + for (final entry in candidates.entries) { + cumulative += entry.value; + if (roll <= cumulative) return entry.key; + } + + return candidates.keys.last; + } +} diff --git a/lib/src/modules/rewards/reward_routes.dart b/lib/src/modules/rewards/reward_routes.dart new file mode 100644 index 0000000..ac28904 --- /dev/null +++ b/lib/src/modules/rewards/reward_routes.dart @@ -0,0 +1,64 @@ +import 'dart:convert'; + +import 'package:shelf/shelf.dart'; +import 'package:shelf_router/shelf_router.dart'; + +import '../../middleware/error_handler.dart'; +import '../../shared/api_response.dart'; +import '../../shared/pagination.dart'; +import 'reward_service.dart'; + +/// Reward module route definitions. +class RewardRoutes { + final RewardService _service; + + RewardRoutes(this._service); + + Router get router { + final router = Router(); + + router.post('/generate', _generate); + router.get('/history', _history); + + return router; + } + + // ── Handlers ──────────────────────────────────────────────────────── + + Future _generate(Request request) async { + final userId = request.context['userId'] as String; + final body = + jsonDecode(await request.readAsString()) as Map; + + final taskId = body['task_id'] as String?; + final taskEnergyLevel = body['task_energy_level'] as int?; + + if (taskId == null || taskEnergyLevel == null) { + throw ApiException.badRequest( + 'Missing required fields: task_id, task_energy_level', + ); + } + + final reward = await _service.generateReward( + userId: userId, + taskId: taskId, + taskEnergyLevel: taskEnergyLevel, + currentStreak: (body['current_streak'] as int?) ?? 0, + ); + + return ApiResponse.created(reward, message: 'Reward generated!'); + } + + Future _history(Request request) async { + final userId = request.context['userId'] as String; + final paging = Pagination.fromRequest(request); + + final rewards = await _service.getHistory( + userId, + limit: paging.perPage, + offset: paging.offset, + ); + + return ApiResponse.success(rewards); + } +} diff --git a/lib/src/modules/rewards/reward_service.dart b/lib/src/modules/rewards/reward_service.dart new file mode 100644 index 0000000..788a19d --- /dev/null +++ b/lib/src/modules/rewards/reward_service.dart @@ -0,0 +1,101 @@ +import 'package:uuid/uuid.dart'; + +import '../../config/database.dart'; +import 'reward_engine.dart'; + +/// Business logic for the rewards module. +class RewardService { + final RewardEngine _engine; + final _uuid = const Uuid(); + + RewardService({required RewardEngine engine}) : _engine = engine; + + /// Generate a reward for a completed task and persist it. + Future> generateReward({ + required String userId, + required String taskId, + required int taskEnergyLevel, + required int currentStreak, + }) async { + // Fetch last reward type for novelty bonus + final lastResult = await Database.query( + ''' + SELECT visual_type FROM rewards + WHERE user_id = @user_id + ORDER BY created_at DESC + LIMIT 1 + ''', + parameters: {'user_id': userId}, + ); + final lastType = lastResult.isNotEmpty + ? lastResult.first[0] as String? + : null; + + final reward = _engine.generate( + taskEnergyLevel: taskEnergyLevel, + currentStreak: currentStreak, + lastRewardType: lastType, + ); + + final id = _uuid.v4(); + await Database.query( + ''' + INSERT INTO rewards (id, user_id, task_id, points, magnitude, visual_type, + is_surprise, breakdown, created_at) + VALUES (@id, @user_id, @task_id, @points, @magnitude, @visual_type, + @is_surprise, @breakdown, NOW()) + ''', + parameters: { + 'id': id, + 'user_id': userId, + 'task_id': taskId, + 'points': reward['points'], + 'magnitude': reward['magnitude'], + 'visual_type': reward['visual_type'], + 'is_surprise': reward['is_surprise'], + 'breakdown': reward['breakdown'].toString(), + }, + ); + + return {'id': id, ...reward}; + } + + /// Fetch reward history for a user. + Future>> getHistory( + String userId, { + int limit = 50, + int offset = 0, + }) async { + final result = await Database.query( + ''' + SELECT id, user_id, task_id, points, magnitude, visual_type, + is_surprise, breakdown, created_at + FROM rewards + WHERE user_id = @user_id + ORDER BY created_at DESC + LIMIT @limit OFFSET @offset + ''', + parameters: { + 'user_id': userId, + 'limit': limit, + 'offset': offset, + }, + ); + + return result + .map((row) => { + 'id': row[0], + 'user_id': row[1], + 'task_id': row[2], + 'points': row[3], + 'magnitude': row[4], + 'visual_type': row[5], + 'is_surprise': row[6], + 'breakdown': row[7], + 'created_at': row[8] is DateTime + ? (row[8] as DateTime).toIso8601String() + : row[8], + }) + .toList(); + } +} diff --git a/lib/src/modules/rooms/room_routes.dart b/lib/src/modules/rooms/room_routes.dart new file mode 100644 index 0000000..3ea3e7a --- /dev/null +++ b/lib/src/modules/rooms/room_routes.dart @@ -0,0 +1,57 @@ +import 'dart:convert'; + +import 'package:shelf/shelf.dart'; +import 'package:shelf_router/shelf_router.dart'; + +import '../../shared/api_response.dart'; +import 'room_service.dart'; + +/// Body-doubling room route definitions (placeholder). +class RoomRoutes { + final RoomService _service; + + RoomRoutes(this._service); + + Router get router { + final router = Router(); + + router.get('/', _listRooms); + router.post('/', _createRoom); + router.post('//join', _joinRoom); + router.post('//leave', _leaveRoom); + + return router; + } + + // ── Handlers ──────────────────────────────────────────────────────── + + Future _listRooms(Request request) async { + final userId = request.context['userId'] as String; + final rooms = await _service.listRooms(userId); + return ApiResponse.success(rooms); + } + + Future _createRoom(Request request) async { + final userId = request.context['userId'] as String; + Map body; + try { + body = jsonDecode(await request.readAsString()) as Map; + } catch (_) { + body = {}; + } + final result = await _service.createRoom(userId, body); + return ApiResponse.success(result, message: 'TODO: Room creation'); + } + + Future _joinRoom(Request request, String id) async { + final userId = request.context['userId'] as String; + final result = await _service.joinRoom(id, userId); + return ApiResponse.success(result, message: 'TODO: Join room'); + } + + Future _leaveRoom(Request request, String id) async { + final userId = request.context['userId'] as String; + final result = await _service.leaveRoom(id, userId); + return ApiResponse.success(result, message: 'TODO: Leave room'); + } +} diff --git a/lib/src/modules/rooms/room_service.dart b/lib/src/modules/rooms/room_service.dart new file mode 100644 index 0000000..1e90a83 --- /dev/null +++ b/lib/src/modules/rooms/room_service.dart @@ -0,0 +1,36 @@ +/// Placeholder service for body-doubling rooms. +/// +/// Full WebSocket implementation is planned for a future iteration. +class RoomService { + Future>> listRooms(String userId) async { + // TODO: implement with WebSocket room management + return []; + } + + Future> createRoom( + String userId, + Map data, + ) async { + // TODO: implement room creation with WebSocket channel + return { + 'message': 'Room creation not yet implemented', + 'status': 'todo', + }; + } + + Future> joinRoom(String roomId, String userId) async { + // TODO: implement WebSocket join flow + return { + 'message': 'Room join not yet implemented', + 'status': 'todo', + }; + } + + Future> leaveRoom(String roomId, String userId) async { + // TODO: implement WebSocket leave flow + return { + 'message': 'Room leave not yet implemented', + 'status': 'todo', + }; + } +} diff --git a/lib/src/modules/streaks/streak_repository.dart b/lib/src/modules/streaks/streak_repository.dart new file mode 100644 index 0000000..87ed865 --- /dev/null +++ b/lib/src/modules/streaks/streak_repository.dart @@ -0,0 +1,160 @@ +import '../../config/database.dart'; + +/// Data access layer for streaks and streak entries. +class StreakRepository { + // ── Create ────────────────────────────────────────────────────────── + + Future> create(Map data) async { + final result = await Database.query( + ''' + INSERT INTO streaks ( + id, user_id, name, description, frequency, grace_days, + current_count, longest_count, frozen_until, + created_at, updated_at + ) VALUES ( + @id, @user_id, @name, @description, @frequency, @grace_days, + 0, 0, NULL, + NOW(), NOW() + ) + RETURNING * + ''', + parameters: data, + ); + return _rowToMap(result.first); + } + + // ── Read ──────────────────────────────────────────────────────────── + + Future?> findById(String id, String userId) async { + final result = await Database.query( + ''' + SELECT * FROM streaks + WHERE id = @id AND user_id = @user_id + ''', + parameters: {'id': id, 'user_id': userId}, + ); + if (result.isEmpty) return null; + return _rowToMap(result.first); + } + + Future>> findAll(String userId) async { + final result = await Database.query( + ''' + SELECT * FROM streaks + WHERE user_id = @user_id + ORDER BY created_at DESC + ''', + parameters: {'user_id': userId}, + ); + return result.map(_rowToMap).toList(); + } + + // ── Update ────────────────────────────────────────────────────────── + + Future?> update( + String id, + String userId, + Map data, + ) async { + final setClauses = []; + final params = {'id': id, 'user_id': userId}; + + data.forEach((key, value) { + setClauses.add('$key = @$key'); + params[key] = value; + }); + setClauses.add('updated_at = NOW()'); + + final result = await Database.query( + ''' + UPDATE streaks SET ${setClauses.join(', ')} + WHERE id = @id AND user_id = @user_id + RETURNING * + ''', + parameters: params, + ); + if (result.isEmpty) return null; + return _rowToMap(result.first); + } + + // ── Entries ───────────────────────────────────────────────────────── + + Future addEntry(Map data) async { + await Database.query( + ''' + INSERT INTO streak_entries (id, streak_id, entry_date, entry_type, note, created_at) + VALUES (@id, @streak_id, @entry_date, @entry_type, @note, NOW()) + ON CONFLICT (streak_id, entry_date) DO NOTHING + ''', + parameters: data, + ); + } + + Future>> getHistory( + String streakId, + String userId, + ) async { + final result = await Database.query( + ''' + SELECT se.* FROM streak_entries se + JOIN streaks s ON se.streak_id = s.id + WHERE se.streak_id = @streak_id AND s.user_id = @user_id + ORDER BY se.entry_date DESC + LIMIT 90 + ''', + parameters: {'streak_id': streakId, 'user_id': userId}, + ); + return result + .map((row) => { + 'id': row[0], + 'streak_id': row[1], + 'entry_date': row[2] is DateTime + ? (row[2] as DateTime).toIso8601String() + : row[2], + 'entry_type': row[3], + 'note': row[4], + 'created_at': row[5] is DateTime + ? (row[5] as DateTime).toIso8601String() + : row[5], + }) + .toList(); + } + + Future lastCompletionDate(String streakId) async { + final result = await Database.query( + ''' + SELECT MAX(entry_date) FROM streak_entries + WHERE streak_id = @streak_id AND entry_type = 'completion' + ''', + parameters: {'streak_id': streakId}, + ); + if (result.isEmpty || result.first[0] == null) return null; + final val = result.first[0]; + if (val is DateTime) return val; + return DateTime.tryParse(val.toString()); + } + + // ── Row mapper ────────────────────────────────────────────────────── + + Map _rowToMap(dynamic row) { + final columns = [ + 'id', + 'user_id', + 'name', + 'description', + 'frequency', + 'grace_days', + 'current_count', + 'longest_count', + 'frozen_until', + 'created_at', + 'updated_at', + ]; + final map = {}; + for (var i = 0; i < columns.length; i++) { + final value = row[i]; + map[columns[i]] = value is DateTime ? value.toIso8601String() : value; + } + return map; + } +} diff --git a/lib/src/modules/streaks/streak_routes.dart b/lib/src/modules/streaks/streak_routes.dart new file mode 100644 index 0000000..c588223 --- /dev/null +++ b/lib/src/modules/streaks/streak_routes.dart @@ -0,0 +1,98 @@ +import 'dart:convert'; + +import 'package:shelf/shelf.dart'; +import 'package:shelf_router/shelf_router.dart'; + +import '../../middleware/error_handler.dart'; +import '../../shared/api_response.dart'; +import 'streak_service.dart'; + +/// Streak module route definitions. +class StreakRoutes { + final StreakService _service; + + StreakRoutes(this._service); + + Router get router { + final router = Router(); + + router.get('/', _listStreaks); + router.post('/', _createStreak); + router.get('/', _getStreak); + router.post('//complete', _completeToday); + router.post('//forgive', _forgive); + router.post('//freeze', _freeze); + router.get('//history', _getHistory); + + return router; + } + + // ── Handlers ──────────────────────────────────────────────────────── + + Future _listStreaks(Request request) async { + final userId = request.context['userId'] as String; + final streaks = await _service.listStreaks(userId); + return ApiResponse.success(streaks); + } + + Future _createStreak(Request request) async { + final userId = request.context['userId'] as String; + final body = + jsonDecode(await request.readAsString()) as Map; + + if (body['name'] == null || (body['name'] as String).isEmpty) { + throw ApiException.badRequest('Name is required'); + } + + final streak = await _service.createStreak(userId, body); + return ApiResponse.created(streak); + } + + Future _getStreak(Request request, String id) async { + final userId = request.context['userId'] as String; + final streak = await _service.getStreak(id, userId); + return ApiResponse.success(streak); + } + + Future _completeToday(Request request, String id) async { + final userId = request.context['userId'] as String; + final result = await _service.completeToday(id, userId); + return ApiResponse.success(result, message: result['message'] as String); + } + + Future _forgive(Request request, String id) async { + final userId = request.context['userId'] as String; + + String? note; + try { + final body = + jsonDecode(await request.readAsString()) as Map; + note = body['note'] as String?; + } catch (_) { + // body optional + } + + final result = await _service.forgive(id, userId, note: note); + return ApiResponse.success(result, message: result['message'] as String); + } + + Future _freeze(Request request, String id) async { + final userId = request.context['userId'] as String; + final body = + jsonDecode(await request.readAsString()) as Map; + + final days = body['days'] as int?; + if (days == null) { + throw ApiException.badRequest('Field "days" is required'); + } + + final result = await _service.freeze(id, userId, days: days); + return ApiResponse.success(result, message: result['message'] as String); + } + + Future _getHistory(Request request, String id) async { + final userId = request.context['userId'] as String; + final history = await _service.getHistory(id, userId); + return ApiResponse.success(history); + } +} diff --git a/lib/src/modules/streaks/streak_service.dart b/lib/src/modules/streaks/streak_service.dart new file mode 100644 index 0000000..487dbb4 --- /dev/null +++ b/lib/src/modules/streaks/streak_service.dart @@ -0,0 +1,230 @@ +import 'package:uuid/uuid.dart'; + +import '../../middleware/error_handler.dart'; +import 'streak_repository.dart'; + +/// Business logic for the streaks module with ADHD-friendly forgiveness. +class StreakService { + final StreakRepository _repo; + final _uuid = const Uuid(); + + StreakService({required StreakRepository repository}) : _repo = repository; + + // ── CRUD ──────────────────────────────────────────────────────────── + + Future> createStreak( + String userId, + Map data, + ) async { + final streakData = { + 'id': _uuid.v4(), + 'user_id': userId, + 'name': data['name'], + 'description': data['description'] ?? '', + 'frequency': data['frequency'] ?? 'daily', + 'grace_days': data['grace_days'] ?? 1, + }; + return _repo.create(streakData); + } + + Future> getStreak(String id, String userId) async { + final streak = await _repo.findById(id, userId); + if (streak == null) throw ApiException.notFound('Streak not found'); + return streak; + } + + Future>> listStreaks(String userId) { + return _repo.findAll(userId); + } + + // ── Complete today ────────────────────────────────────────────────── + + /// Log a completion for today and update the streak count. + Future> completeToday( + String id, + String userId, + ) async { + final streak = await _repo.findById(id, userId); + if (streak == null) throw ApiException.notFound('Streak not found'); + + // Check if frozen + final frozenUntil = streak['frozen_until']; + if (frozenUntil != null) { + final frozenDate = DateTime.parse(frozenUntil as String); + if (DateTime.now().isBefore(frozenDate)) { + throw ApiException.badRequest( + 'Streak is frozen until ${frozenDate.toIso8601String()}', + ); + } + } + + final today = DateTime.now().toUtc(); + final entryDate = DateTime.utc(today.year, today.month, today.day); + + await _repo.addEntry({ + 'id': _uuid.v4(), + 'streak_id': id, + 'entry_date': entryDate, + 'entry_type': 'completion', + 'note': null, + }); + + // Recalculate streak + final newCount = await calculateStreak(id, userId); + final currentLongest = streak['longest_count'] as int? ?? 0; + final longest = newCount > currentLongest ? newCount : currentLongest; + + final updated = await _repo.update(id, userId, { + 'current_count': newCount, + 'longest_count': longest, + }); + + final message = getReengagementMessage(newCount, null); + + return { + 'streak': updated, + 'message': message, + }; + } + + // ── Forgive ───────────────────────────────────────────────────────── + + /// Use a grace day to forgive a missed day — no streak penalty. + Future> forgive( + String id, + String userId, { + String? note, + }) async { + final streak = await _repo.findById(id, userId); + if (streak == null) throw ApiException.notFound('Streak not found'); + + final yesterday = DateTime.now().toUtc().subtract(const Duration(days: 1)); + final entryDate = DateTime.utc(yesterday.year, yesterday.month, yesterday.day); + + await _repo.addEntry({ + 'id': _uuid.v4(), + 'streak_id': id, + 'entry_date': entryDate, + 'entry_type': 'forgiven', + 'note': note ?? 'Grace day used', + }); + + // Recalculate + final newCount = await calculateStreak(id, userId); + final updated = await _repo.update(id, userId, { + 'current_count': newCount, + }); + + return { + 'streak': updated, + 'message': 'Grace day applied. You are doing great!', + }; + } + + // ── Freeze ────────────────────────────────────────────────────────── + + /// Freeze a streak so missed days do not count against it. + Future> freeze( + String id, + String userId, { + required int days, + }) async { + final streak = await _repo.findById(id, userId); + if (streak == null) throw ApiException.notFound('Streak not found'); + + if (days < 1 || days > 30) { + throw ApiException.badRequest('Freeze duration must be 1–30 days'); + } + + final until = DateTime.now().toUtc().add(Duration(days: days)); + final updated = await _repo.update(id, userId, { + 'frozen_until': until, + }); + + return { + 'streak': updated, + 'message': 'Streak frozen until ${until.toIso8601String()}', + }; + } + + // ── History ───────────────────────────────────────────────────────── + + Future>> getHistory( + String id, + String userId, + ) async { + // Verify ownership + final streak = await _repo.findById(id, userId); + if (streak == null) throw ApiException.notFound('Streak not found'); + return _repo.getHistory(id, userId); + } + + // ── Streak calculation ────────────────────────────────────────────── + + /// Recompute the current streak count, considering grace days and + /// forgiveness entries. + Future calculateStreak(String id, String userId) async { + final history = await _repo.getHistory(id, userId); + if (history.isEmpty) return 0; + + final streak = await _repo.findById(id, userId); + final graceDays = (streak?['grace_days'] as int?) ?? 1; + + // History is already sorted DESC by entry_date + var count = 0; + var expectedDate = DateTime.now().toUtc(); + expectedDate = + DateTime.utc(expectedDate.year, expectedDate.month, expectedDate.day); + + var gracesUsed = 0; + + for (final entry in history) { + final entryDate = DateTime.parse(entry['entry_date'] as String); + final entryDay = + DateTime.utc(entryDate.year, entryDate.month, entryDate.day); + final diff = expectedDate.difference(entryDay).inDays; + + if (diff == 0) { + // Exact match + count++; + expectedDate = expectedDate.subtract(const Duration(days: 1)); + gracesUsed = 0; + } else if (diff <= graceDays - gracesUsed) { + // Within grace window + gracesUsed += diff; + count++; + expectedDate = entryDay.subtract(const Duration(days: 1)); + } else { + // Gap too large — streak broken + break; + } + } + + return count; + } + + // ── Re-engagement messages ────────────────────────────────────────── + + /// Return a warm, encouraging message based on streak length and context. + String getReengagementMessage(int currentCount, DateTime? lastActive) { + if (currentCount == 0) { + return 'Every journey starts with a single step. Welcome back!'; + } + if (currentCount == 1) { + return 'Day 1 — you showed up, and that matters.'; + } + if (currentCount <= 3) { + return '$currentCount days strong! Momentum is building.'; + } + if (currentCount <= 7) { + return '$currentCount-day streak! Your brain is starting to build the habit loop.'; + } + if (currentCount <= 14) { + return 'Incredible — $currentCount days! You are in the groove.'; + } + if (currentCount <= 30) { + return '$currentCount days! This is no longer luck — it is who you are.'; + } + return '$currentCount days! You are an absolute legend. Keep going!'; + } +} diff --git a/lib/src/modules/sync/sync_routes.dart b/lib/src/modules/sync/sync_routes.dart new file mode 100644 index 0000000..8cb9eb1 --- /dev/null +++ b/lib/src/modules/sync/sync_routes.dart @@ -0,0 +1,65 @@ +import 'dart:convert'; + +import 'package:shelf/shelf.dart'; +import 'package:shelf_router/shelf_router.dart'; + +import '../../middleware/error_handler.dart'; +import '../../shared/api_response.dart'; +import 'sync_service.dart'; + +/// Sync module route definitions. +class SyncRoutes { + final SyncService _service; + + SyncRoutes(this._service); + + Router get router { + final router = Router(); + + router.post('/push', _push); + router.get('/pull', _pull); + + return router; + } + + // ── Handlers ──────────────────────────────────────────────────────── + + Future _push(Request request) async { + final userId = request.context['userId'] as String; + final body = + jsonDecode(await request.readAsString()) as Map; + + final changes = body['changes'] as List?; + if (changes == null || changes.isEmpty) { + throw ApiException.badRequest('Missing or empty "changes" array'); + } + + final typedChanges = changes + .map((c) => Map.from(c as Map)) + .toList(); + + final result = + await _service.push(userId: userId, changes: typedChanges); + + return ApiResponse.success(result, message: 'Sync push complete'); + } + + Future _pull(Request request) async { + final userId = request.context['userId'] as String; + final sinceParam = request.url.queryParameters['since']; + + if (sinceParam == null) { + throw ApiException.badRequest('Query parameter "since" is required'); + } + + final since = DateTime.tryParse(sinceParam); + if (since == null) { + throw ApiException.badRequest( + '"since" must be a valid ISO-8601 timestamp', + ); + } + + final result = await _service.pull(userId: userId, since: since); + return ApiResponse.success(result); + } +} diff --git a/lib/src/modules/sync/sync_service.dart b/lib/src/modules/sync/sync_service.dart new file mode 100644 index 0000000..f5f5254 --- /dev/null +++ b/lib/src/modules/sync/sync_service.dart @@ -0,0 +1,117 @@ +import 'package:uuid/uuid.dart'; + +import '../../config/database.dart'; + +/// Offline-first sync service. +/// +/// Clients push batches of changes (create / update / delete) and pull +/// changes since a given timestamp. Conflict resolution uses +/// last-write-wins (LWW) based on the `version` field. +class SyncService { + final _uuid = const Uuid(); + + /// Process a batch of changes pushed by the client. + /// + /// Each change is an object with: + /// - entity_type: "task" | "streak" | "time_entry" + /// - entity_id: the entity's UUID + /// - operation: "create" | "update" | "delete" + /// - data: the entity payload (for create / update) + /// - version: monotonically increasing client version + Future> push({ + required String userId, + required List> changes, + }) async { + var applied = 0; + var conflicts = 0; + final errors = >[]; + + for (final change in changes) { + final entityType = change['entity_type'] as String; + final entityId = change['entity_id'] as String; + final operation = change['operation'] as String; + final version = change['version'] as int? ?? 0; + final data = change['data'] as Map?; + + try { + // Record in sync_log + await Database.query( + ''' + INSERT INTO sync_log (id, user_id, entity_type, entity_id, operation, + data, version, synced_at) + VALUES (@id, @user_id, @entity_type, @entity_id, @operation, + @data, @version, NOW()) + ON CONFLICT (entity_type, entity_id, user_id) + DO UPDATE SET + operation = EXCLUDED.operation, + data = EXCLUDED.data, + version = EXCLUDED.version, + synced_at = NOW() + WHERE sync_log.version < EXCLUDED.version + ''', + parameters: { + 'id': _uuid.v4(), + 'user_id': userId, + 'entity_type': entityType, + 'entity_id': entityId, + 'operation': operation, + 'data': data?.toString() ?? '', + 'version': version, + }, + ); + applied++; + } catch (e) { + conflicts++; + errors.add({ + 'entity_type': entityType, + 'entity_id': entityId, + 'error': e.toString(), + }); + } + } + + return { + 'applied': applied, + 'conflicts': conflicts, + 'errors': errors, + 'server_time': DateTime.now().toUtc().toIso8601String(), + }; + } + + /// Return all changes since [since] for the given user. + Future> pull({ + required String userId, + required DateTime since, + }) async { + final result = await Database.query( + ''' + SELECT entity_type, entity_id, operation, data, version, synced_at + FROM sync_log + WHERE user_id = @user_id AND synced_at > @since + ORDER BY synced_at ASC + ''', + parameters: { + 'user_id': userId, + 'since': since, + }, + ); + + final changes = result + .map((row) => { + 'entity_type': row[0], + 'entity_id': row[1], + 'operation': row[2], + 'data': row[3], + 'version': row[4], + 'synced_at': row[5] is DateTime + ? (row[5] as DateTime).toIso8601String() + : row[5], + }) + .toList(); + + return { + 'changes': changes, + 'server_time': DateTime.now().toUtc().toIso8601String(), + }; + } +} diff --git a/lib/src/modules/tasks/dopamine_scorer.dart b/lib/src/modules/tasks/dopamine_scorer.dart new file mode 100644 index 0000000..cd9e0ae --- /dev/null +++ b/lib/src/modules/tasks/dopamine_scorer.dart @@ -0,0 +1,114 @@ +import 'dart:math'; + +/// Heuristic scoring engine that ranks tasks to maximise dopamine-driven +/// engagement for users with ADHD. +/// +/// The score is a weighted combination of five factors: +/// - Urgency (0.30) — how close the deadline is +/// - Energy match (0.25) — does the task energy level match the user's +/// current estimated energy? +/// - Novelty (0.20) — how long since the user last saw / interacted +/// with this task +/// - Brevity (0.15) — shorter tasks are preferred to reduce inertia +/// - Jitter (0.10) — random element to prevent deterministic staleness +class DopamineScorer { + final _random = Random(); + + /// Score a single task and return a value in the range [0, 1]. + double scoreTask({ + required DateTime? dueDate, + required int taskEnergyLevel, + required int estimatedMinutes, + required DateTime? lastInteractedAt, + int? userEnergyOverride, + }) { + final now = DateTime.now(); + final userEnergy = userEnergyOverride ?? estimateCurrentEnergy(now); + + final urgency = _urgencyNormalized(dueDate, now); + final energyMatch = _energyMatch(taskEnergyLevel, userEnergy); + final novelty = _noveltyFactor(lastInteractedAt, now); + final brevity = _estimatedBrevity(estimatedMinutes); + final jitter = _randomJitter(); + + return (0.30 * urgency) + + (0.25 * energyMatch) + + (0.20 * novelty) + + (0.15 * brevity) + + (0.10 * jitter); + } + + /// Sort a list of task maps by dopamine score (descending). + /// + /// Each map must contain keys matching the [scoreTask] parameters. + List> rankTasks( + List> tasks, { + int? userEnergyOverride, + }) { + final scored = tasks.map((t) { + final score = scoreTask( + dueDate: t['due_date'] as DateTime?, + taskEnergyLevel: (t['energy_level'] as int?) ?? 3, + estimatedMinutes: (t['estimated_minutes'] as int?) ?? 30, + lastInteractedAt: t['last_interacted_at'] as DateTime?, + userEnergyOverride: userEnergyOverride, + ); + return {...t, '_score': score}; + }).toList(); + + scored.sort((a, b) => + (b['_score'] as double).compareTo(a['_score'] as double)); + + return scored; + } + + /// Estimate the user's current energy on a 1–5 scale based on circadian + /// rhythm heuristics (time of day). + /// + /// This is a rough default; a real implementation would learn from user + /// feedback over time. + int estimateCurrentEnergy(DateTime now) { + final hour = now.hour; + if (hour >= 9 && hour < 12) return 5; // Morning peak + if (hour >= 12 && hour < 14) return 3; // Post-lunch dip + if (hour >= 14 && hour < 17) return 4; // Afternoon recovery + if (hour >= 17 && hour < 20) return 3; // Early evening + if (hour >= 20 && hour < 23) return 2; // Winding down + return 1; // Late night / early morning + } + + // ── private scoring components ────────────────────────────────────── + + /// 0 = no urgency / no deadline, 1 = overdue or due within 1 hour. + double _urgencyNormalized(DateTime? dueDate, DateTime now) { + if (dueDate == null) return 0.3; // mild default urgency + final hoursUntilDue = dueDate.difference(now).inMinutes / 60.0; + if (hoursUntilDue <= 0) return 1.0; // overdue + if (hoursUntilDue >= 168) return 0.1; // more than a week away + // Inverse curve: closer → higher + return (1.0 - (hoursUntilDue / 168.0)).clamp(0.0, 1.0); + } + + /// 1.0 when task and user energy match perfectly, tapers to 0. + double _energyMatch(int taskEnergy, int userEnergy) { + final diff = (taskEnergy - userEnergy).abs(); + return (1.0 - diff / 4.0).clamp(0.0, 1.0); + } + + /// Tasks unseen for longer get a novelty boost (up to 1.0 after 7 days). + double _noveltyFactor(DateTime? lastInteractedAt, DateTime now) { + if (lastInteractedAt == null) return 0.8; // never seen → high novelty + final hoursSince = now.difference(lastInteractedAt).inHours; + return (hoursSince / 168.0).clamp(0.0, 1.0); // 168 h = 7 days + } + + /// Shorter tasks score higher to help overcome task-initiation inertia. + double _estimatedBrevity(int estimatedMinutes) { + if (estimatedMinutes <= 5) return 1.0; + if (estimatedMinutes >= 120) return 0.1; + return (1.0 - (estimatedMinutes - 5) / 115.0).clamp(0.0, 1.0); + } + + /// A small random jitter in [0, 1). + double _randomJitter() => _random.nextDouble(); +} diff --git a/lib/src/modules/tasks/task_repository.dart b/lib/src/modules/tasks/task_repository.dart new file mode 100644 index 0000000..097f327 --- /dev/null +++ b/lib/src/modules/tasks/task_repository.dart @@ -0,0 +1,237 @@ +import '../../config/database.dart'; + +/// Data access layer for tasks. All queries are parameterised. +class TaskRepository { + // ── Create ────────────────────────────────────────────────────────── + + Future> create(Map data) async { + final result = await Database.query( + ''' + INSERT INTO tasks ( + id, user_id, title, description, status, priority, + energy_level, estimated_minutes, due_date, tags, + times_postponed, created_at, updated_at + ) VALUES ( + @id, @user_id, @title, @description, @status, @priority, + @energy_level, @estimated_minutes, @due_date, @tags, + 0, NOW(), NOW() + ) + RETURNING * + ''', + parameters: data, + ); + return _rowToMap(result.first); + } + + // ── Read ──────────────────────────────────────────────────────────── + + Future?> findById(String id, String userId) async { + final result = await Database.query( + ''' + SELECT * FROM tasks + WHERE id = @id AND user_id = @user_id AND deleted_at IS NULL + ''', + parameters: {'id': id, 'user_id': userId}, + ); + if (result.isEmpty) return null; + return _rowToMap(result.first); + } + + Future>> findAll( + String userId, { + int limit = 20, + int offset = 0, + String? status, + String? dueBefore, + String? tag, + int? energyLevel, + }) async { + final where = StringBuffer('user_id = @user_id AND deleted_at IS NULL'); + final params = { + 'user_id': userId, + 'limit': limit, + 'offset': offset, + }; + + if (status != null) { + where.write(' AND status = @status'); + params['status'] = status; + } + if (dueBefore != null) { + where.write(' AND due_date <= @due_before'); + params['due_before'] = DateTime.parse(dueBefore); + } + if (tag != null) { + where.write(' AND @tag = ANY(tags)'); + params['tag'] = tag; + } + if (energyLevel != null) { + where.write(' AND energy_level = @energy_level'); + params['energy_level'] = energyLevel; + } + + final result = await Database.query( + ''' + SELECT * FROM tasks + WHERE $where + ORDER BY created_at DESC + LIMIT @limit OFFSET @offset + ''', + parameters: params, + ); + return result.map(_rowToMap).toList(); + } + + Future count(String userId, {String? status}) async { + final where = StringBuffer('user_id = @user_id AND deleted_at IS NULL'); + final params = {'user_id': userId}; + + if (status != null) { + where.write(' AND status = @status'); + params['status'] = status; + } + + final result = await Database.query( + 'SELECT COUNT(*) FROM tasks WHERE $where', + parameters: params, + ); + return result.first[0] as int; + } + + /// Fetch all active (non-completed, non-deleted) tasks for dopamine ranking. + Future>> findActive(String userId) async { + final result = await Database.query( + ''' + SELECT * FROM tasks + WHERE user_id = @user_id + AND deleted_at IS NULL + AND status NOT IN ('completed', 'cancelled') + ORDER BY created_at DESC + ''', + parameters: {'user_id': userId}, + ); + return result.map(_rowToMap).toList(); + } + + // ── Update ────────────────────────────────────────────────────────── + + Future?> update( + String id, + String userId, + Map data, + ) async { + final setClauses = []; + final params = {'id': id, 'user_id': userId}; + + data.forEach((key, value) { + setClauses.add('$key = @$key'); + params[key] = value; + }); + + setClauses.add('updated_at = NOW()'); + + final result = await Database.query( + ''' + UPDATE tasks SET ${setClauses.join(', ')} + WHERE id = @id AND user_id = @user_id AND deleted_at IS NULL + RETURNING * + ''', + parameters: params, + ); + if (result.isEmpty) return null; + return _rowToMap(result.first); + } + + // ── Soft delete ───────────────────────────────────────────────────── + + Future softDelete(String id, String userId) async { + final result = await Database.query( + ''' + UPDATE tasks SET deleted_at = NOW(), updated_at = NOW() + WHERE id = @id AND user_id = @user_id AND deleted_at IS NULL + RETURNING id + ''', + parameters: {'id': id, 'user_id': userId}, + ); + return result.isNotEmpty; + } + + // ── Complete ──────────────────────────────────────────────────────── + + Future?> markCompleted( + String id, + String userId, { + int? actualMinutes, + }) async { + final params = {'id': id, 'user_id': userId}; + var extraSet = ''; + + if (actualMinutes != null) { + extraSet = ', actual_minutes = @actual_minutes'; + params['actual_minutes'] = actualMinutes; + } + + final result = await Database.query( + ''' + UPDATE tasks + SET status = 'completed', + completed_at = NOW(), + updated_at = NOW()$extraSet + WHERE id = @id AND user_id = @user_id AND deleted_at IS NULL + RETURNING * + ''', + parameters: params, + ); + if (result.isEmpty) return null; + return _rowToMap(result.first); + } + + // ── Skip ──────────────────────────────────────────────────────────── + + Future?> markSkipped(String id, String userId) async { + final result = await Database.query( + ''' + UPDATE tasks + SET status = 'skipped', + times_postponed = times_postponed + 1, + last_interacted_at = NOW(), + updated_at = NOW() + WHERE id = @id AND user_id = @user_id AND deleted_at IS NULL + RETURNING * + ''', + parameters: {'id': id, 'user_id': userId}, + ); + if (result.isEmpty) return null; + return _rowToMap(result.first); + } + + // ── Row mapper ────────────────────────────────────────────────────── + + Map _rowToMap(dynamic row) { + final columns = [ + 'id', + 'user_id', + 'title', + 'description', + 'status', + 'priority', + 'energy_level', + 'estimated_minutes', + 'actual_minutes', + 'due_date', + 'tags', + 'times_postponed', + 'last_interacted_at', + 'completed_at', + 'deleted_at', + 'created_at', + 'updated_at', + ]; + final map = {}; + for (var i = 0; i < columns.length; i++) { + final value = row[i]; + map[columns[i]] = value is DateTime ? value.toIso8601String() : value; + } + return map; + } +} diff --git a/lib/src/modules/tasks/task_routes.dart b/lib/src/modules/tasks/task_routes.dart new file mode 100644 index 0000000..3e600b5 --- /dev/null +++ b/lib/src/modules/tasks/task_routes.dart @@ -0,0 +1,138 @@ +import 'dart:convert'; + +import 'package:shelf/shelf.dart'; +import 'package:shelf_router/shelf_router.dart'; + +import '../../middleware/error_handler.dart'; +import '../../shared/api_response.dart'; +import '../../shared/pagination.dart'; +import 'task_service.dart'; + +/// Task module route definitions. +class TaskRoutes { + final TaskService _service; + + TaskRoutes(this._service); + + Router get router { + final router = Router(); + + router.get('/', _listTasks); + router.post('/', _createTask); + router.get('/next', _getNextTask); + router.get('/dopamine-ordered', _getDopamineOrdered); + router.get('/', _getTask); + router.put('/', _updateTask); + router.delete('/', _deleteTask); + router.post('//complete', _completeTask); + router.post('//skip', _skipTask); + + return router; + } + + // ── Handlers ──────────────────────────────────────────────────────── + + Future _listTasks(Request request) async { + final userId = request.context['userId'] as String; + final params = request.url.queryParameters; + final paging = Pagination.fromRequest(request); + + final tasks = await _service.listTasks( + userId, + limit: paging.perPage, + offset: paging.offset, + status: params['status'], + dueBefore: params['due_before'], + tag: params['tag'], + energyLevel: params['energy'] != null + ? int.tryParse(params['energy']!) + : null, + ); + + final total = await _service.countTasks(userId, status: params['status']); + + return ApiResponse.paginated( + data: tasks, + page: paging.page, + perPage: paging.perPage, + total: total, + ); + } + + Future _createTask(Request request) async { + final userId = request.context['userId'] as String; + final body = + jsonDecode(await request.readAsString()) as Map; + + if (body['title'] == null || (body['title'] as String).isEmpty) { + throw ApiException.badRequest('Title is required'); + } + + final task = await _service.createTask(userId, body); + return ApiResponse.created(task); + } + + Future _getNextTask(Request request) async { + final userId = request.context['userId'] as String; + final task = await _service.getNextTask(userId); + + if (task == null) { + return ApiResponse.success(null, message: 'No tasks available'); + } + + return ApiResponse.success(task, message: 'Just do the next thing'); + } + + Future _getDopamineOrdered(Request request) async { + final userId = request.context['userId'] as String; + final tasks = await _service.getDopamineOrdered(userId); + return ApiResponse.success(tasks); + } + + Future _getTask(Request request, String id) async { + final userId = request.context['userId'] as String; + final task = await _service.getTask(id, userId); + return ApiResponse.success(task); + } + + Future _updateTask(Request request, String id) async { + final userId = request.context['userId'] as String; + final body = + jsonDecode(await request.readAsString()) as Map; + final task = await _service.updateTask(id, userId, body); + return ApiResponse.success(task); + } + + Future _deleteTask(Request request, String id) async { + final userId = request.context['userId'] as String; + await _service.deleteTask(id, userId); + return ApiResponse.success(null, message: 'Task deleted'); + } + + Future _completeTask(Request request, String id) async { + final userId = request.context['userId'] as String; + + int? actualMinutes; + try { + final body = + jsonDecode(await request.readAsString()) as Map; + actualMinutes = body['actual_minutes'] as int?; + } catch (_) { + // body is optional + } + + final result = await _service.completeTask( + id, + userId, + actualMinutes: actualMinutes, + ); + + return ApiResponse.success(result, message: 'Task completed!'); + } + + Future _skipTask(Request request, String id) async { + final userId = request.context['userId'] as String; + final task = await _service.skipTask(id, userId); + return ApiResponse.success(task, message: 'Task skipped — no worries!'); + } +} diff --git a/lib/src/modules/tasks/task_service.dart b/lib/src/modules/tasks/task_service.dart new file mode 100644 index 0000000..8ed7365 --- /dev/null +++ b/lib/src/modules/tasks/task_service.dart @@ -0,0 +1,168 @@ +import 'package:uuid/uuid.dart'; + +import '../../middleware/error_handler.dart'; +import '../rewards/reward_engine.dart'; +import 'dopamine_scorer.dart'; +import 'task_repository.dart'; + +/// Business logic for the tasks module. +class TaskService { + final TaskRepository _repo; + final DopamineScorer _scorer; + final RewardEngine _rewardEngine; + final _uuid = const Uuid(); + + TaskService({ + required TaskRepository repository, + required DopamineScorer scorer, + required RewardEngine rewardEngine, + }) : _repo = repository, + _scorer = scorer, + _rewardEngine = rewardEngine; + + // ── CRUD ──────────────────────────────────────────────────────────── + + Future> createTask( + String userId, + Map data, + ) async { + final taskData = { + 'id': _uuid.v4(), + 'user_id': userId, + 'title': data['title'], + 'description': data['description'], + 'status': data['status'] ?? 'pending', + 'priority': data['priority'] ?? 'medium', + 'energy_level': data['energy_level'] ?? 3, + 'estimated_minutes': data['estimated_minutes'] ?? 25, + 'due_date': data['due_date'] != null + ? DateTime.parse(data['due_date'] as String) + : null, + 'tags': data['tags'] is List ? (data['tags'] as List).cast() : [], + }; + return _repo.create(taskData); + } + + Future> getTask(String id, String userId) async { + final task = await _repo.findById(id, userId); + if (task == null) throw ApiException.notFound('Task not found'); + return task; + } + + Future>> listTasks( + String userId, { + int limit = 20, + int offset = 0, + String? status, + String? dueBefore, + String? tag, + int? energyLevel, + }) { + return _repo.findAll( + userId, + limit: limit, + offset: offset, + status: status, + dueBefore: dueBefore, + tag: tag, + energyLevel: energyLevel, + ); + } + + Future countTasks(String userId, {String? status}) { + return _repo.count(userId, status: status); + } + + Future> updateTask( + String id, + String userId, + Map data, + ) async { + // Sanitise input — only allow known mutable fields + final allowed = {}; + for (final key in [ + 'title', + 'description', + 'status', + 'priority', + 'energy_level', + 'estimated_minutes', + 'due_date', + 'tags', + ]) { + if (data.containsKey(key)) { + if (key == 'due_date' && data[key] != null) { + allowed[key] = DateTime.parse(data[key] as String); + } else { + allowed[key] = data[key]; + } + } + } + + final updated = await _repo.update(id, userId, allowed); + if (updated == null) throw ApiException.notFound('Task not found'); + return updated; + } + + Future deleteTask(String id, String userId) async { + final deleted = await _repo.softDelete(id, userId); + if (!deleted) throw ApiException.notFound('Task not found'); + } + + // ── ADHD-specific operations ──────────────────────────────────────── + + /// "Just do the next thing" — returns the single best task for right now. + Future?> getNextTask(String userId) async { + final tasks = await _repo.findActive(userId); + if (tasks.isEmpty) return null; + + final ranked = _scorer.rankTasks(tasks); + // Remove internal score key before returning + final best = ranked.first; + best.remove('_score'); + return best; + } + + /// Return all active tasks ordered by dopamine score. + Future>> getDopamineOrdered( + String userId) async { + final tasks = await _repo.findActive(userId); + if (tasks.isEmpty) return []; + + final ranked = _scorer.rankTasks(tasks); + for (final t in ranked) { + t.remove('_score'); + } + return ranked; + } + + /// Mark a task as completed, record actual time, generate a reward. + Future> completeTask( + String id, + String userId, { + int? actualMinutes, + }) async { + final task = + await _repo.markCompleted(id, userId, actualMinutes: actualMinutes); + if (task == null) throw ApiException.notFound('Task not found'); + + // Generate reward + final reward = _rewardEngine.generate( + taskEnergyLevel: (task['energy_level'] as int?) ?? 3, + currentStreak: 0, // caller can enhance with real streak + lastRewardType: null, + ); + + return { + 'task': task, + 'reward': reward, + }; + } + + /// Skip a task without guilt — increments times_postponed. + Future> skipTask(String id, String userId) async { + final task = await _repo.markSkipped(id, userId); + if (task == null) throw ApiException.notFound('Task not found'); + return task; + } +} diff --git a/lib/src/modules/time/time_routes.dart b/lib/src/modules/time/time_routes.dart new file mode 100644 index 0000000..998bbb8 --- /dev/null +++ b/lib/src/modules/time/time_routes.dart @@ -0,0 +1,79 @@ +import 'dart:convert'; + +import 'package:shelf/shelf.dart'; +import 'package:shelf_router/shelf_router.dart'; + +import '../../middleware/error_handler.dart'; +import '../../shared/api_response.dart'; +import 'time_service.dart'; + +/// Time perception tracking route definitions. +class TimeRoutes { + final TimeService _service; + + TimeRoutes(this._service); + + Router get router { + final router = Router(); + + router.post('/estimate', _logEstimate); + router.post('/actual', _logActual); + router.get('/accuracy', _getAccuracy); + + return router; + } + + // ── Handlers ──────────────────────────────────────────────────────── + + Future _logEstimate(Request request) async { + final userId = request.context['userId'] as String; + final body = + jsonDecode(await request.readAsString()) as Map; + + final taskId = body['task_id'] as String?; + final estimated = body['estimated_minutes'] as int?; + + if (taskId == null || estimated == null) { + throw ApiException.badRequest( + 'Missing required fields: task_id, estimated_minutes', + ); + } + + final entry = await _service.logEstimate( + userId: userId, + taskId: taskId, + estimatedMinutes: estimated, + ); + + return ApiResponse.created(entry, message: 'Estimate recorded'); + } + + Future _logActual(Request request) async { + final userId = request.context['userId'] as String; + final body = + jsonDecode(await request.readAsString()) as Map; + + final taskId = body['task_id'] as String?; + final actual = body['actual_minutes'] as int?; + + if (taskId == null || actual == null) { + throw ApiException.badRequest( + 'Missing required fields: task_id, actual_minutes', + ); + } + + final entry = await _service.logActual( + userId: userId, + taskId: taskId, + actualMinutes: actual, + ); + + return ApiResponse.success(entry, message: 'Actual time recorded'); + } + + Future _getAccuracy(Request request) async { + final userId = request.context['userId'] as String; + final stats = await _service.getAccuracy(userId); + return ApiResponse.success(stats); + } +} diff --git a/lib/src/modules/time/time_service.dart b/lib/src/modules/time/time_service.dart new file mode 100644 index 0000000..fb280cb --- /dev/null +++ b/lib/src/modules/time/time_service.dart @@ -0,0 +1,143 @@ +import 'package:uuid/uuid.dart'; + +import '../../config/database.dart'; +import '../../middleware/error_handler.dart'; + +/// Business logic for time perception tracking. +/// +/// Helps users improve their time estimation skills by recording +/// estimated vs. actual durations and computing accuracy metrics. +class TimeService { + final _uuid = const Uuid(); + + /// Log a time estimate for a task. + Future> logEstimate({ + required String userId, + required String taskId, + required int estimatedMinutes, + }) async { + final id = _uuid.v4(); + await Database.query( + ''' + INSERT INTO time_entries (id, user_id, task_id, estimated_minutes, created_at, updated_at) + VALUES (@id, @user_id, @task_id, @estimated_minutes, NOW(), NOW()) + ON CONFLICT (task_id) DO UPDATE SET + estimated_minutes = @estimated_minutes, + updated_at = NOW() + ''', + parameters: { + 'id': id, + 'user_id': userId, + 'task_id': taskId, + 'estimated_minutes': estimatedMinutes, + }, + ); + return { + 'id': id, + 'task_id': taskId, + 'estimated_minutes': estimatedMinutes, + }; + } + + /// Log the actual time spent on a task. + Future> logActual({ + required String userId, + required String taskId, + required int actualMinutes, + }) async { + final result = await Database.query( + ''' + UPDATE time_entries + SET actual_minutes = @actual_minutes, updated_at = NOW() + WHERE task_id = @task_id AND user_id = @user_id + RETURNING id, task_id, estimated_minutes, actual_minutes + ''', + parameters: { + 'user_id': userId, + 'task_id': taskId, + 'actual_minutes': actualMinutes, + }, + ); + + if (result.isEmpty) { + throw ApiException.notFound('No estimate found for this task'); + } + + final row = result.first; + return { + 'id': row[0], + 'task_id': row[1], + 'estimated_minutes': row[2], + 'actual_minutes': row[3], + }; + } + + /// Compute accuracy statistics over time. + /// + /// Returns: + /// - average_accuracy: ratio of estimated/actual (1.0 = perfect) + /// - total_entries: number of completed time entries + /// - tendency: "overestimate", "underestimate", or "accurate" + /// - recent_entries: last 10 completed entries + Future> getAccuracy(String userId) async { + final result = await Database.query( + ''' + SELECT estimated_minutes, actual_minutes, task_id, created_at + FROM time_entries + WHERE user_id = @user_id + AND estimated_minutes IS NOT NULL + AND actual_minutes IS NOT NULL + ORDER BY created_at DESC + ''', + parameters: {'user_id': userId}, + ); + + if (result.isEmpty) { + return { + 'average_accuracy': null, + 'total_entries': 0, + 'tendency': 'insufficient_data', + 'recent_entries': >[], + }; + } + + var totalRatio = 0.0; + final recentEntries = >[]; + + for (var i = 0; i < result.length; i++) { + final estimated = result[i][0] as int; + final actual = result[i][1] as int; + final ratio = actual > 0 ? estimated / actual : 1.0; + totalRatio += ratio; + + if (i < 10) { + recentEntries.add({ + 'task_id': result[i][2], + 'estimated_minutes': estimated, + 'actual_minutes': actual, + 'accuracy_ratio': double.parse(ratio.toStringAsFixed(2)), + 'created_at': result[i][3] is DateTime + ? (result[i][3] as DateTime).toIso8601String() + : result[i][3], + }); + } + } + + final avgAccuracy = totalRatio / result.length; + String tendency; + if (avgAccuracy > 1.15) { + tendency = 'overestimate'; + } else if (avgAccuracy < 0.85) { + tendency = 'underestimate'; + } else { + tendency = 'accurate'; + } + + return { + 'average_accuracy': double.parse(avgAccuracy.toStringAsFixed(2)), + 'total_entries': result.length, + 'tendency': tendency, + 'recent_entries': recentEntries, + }; + } +} diff --git a/lib/src/shared/api_response.dart b/lib/src/shared/api_response.dart new file mode 100644 index 0000000..1e7a2fb --- /dev/null +++ b/lib/src/shared/api_response.dart @@ -0,0 +1,100 @@ +import 'dart:convert'; + +import 'package:shelf/shelf.dart'; + +/// Standardised JSON envelope for all API responses. +class ApiResponse { + ApiResponse._(); + + static final _jsonHeaders = {'Content-Type': 'application/json'}; + + // ── success helpers ───────────────────────────────────────────────── + + static Response success(dynamic data, {String? message}) { + return Response.ok( + jsonEncode({ + 'success': true, + if (message != null) 'message': message, + 'data': data, + }), + headers: _jsonHeaders, + ); + } + + static Response created(dynamic data, {String? message}) { + return Response( + 201, + body: jsonEncode({ + 'success': true, + 'message': message ?? 'Created successfully', + 'data': data, + }), + headers: _jsonHeaders, + ); + } + + static Response noContent() { + return Response(204); + } + + static Response paginated({ + required List data, + required int page, + required int perPage, + required int total, + }) { + return Response.ok( + jsonEncode({ + 'success': true, + 'data': data, + 'pagination': { + 'page': page, + 'per_page': perPage, + 'total': total, + 'total_pages': (total / perPage).ceil(), + }, + }), + headers: _jsonHeaders, + ); + } + + // ── error helpers ─────────────────────────────────────────────────── + + static Response error( + String message, { + int statusCode = 400, + dynamic errors, + }) { + return Response( + statusCode, + body: jsonEncode({ + 'success': false, + 'message': message, + if (errors != null) 'errors': errors, + }), + headers: _jsonHeaders, + ); + } + + static Response badRequest(String message, {dynamic errors}) => + error(message, statusCode: 400, errors: errors); + + static Response unauthorized([String message = 'Unauthorized']) => + error(message, statusCode: 401); + + static Response forbidden([String message = 'Forbidden']) => + error(message, statusCode: 403); + + static Response notFound([String message = 'Resource not found']) => + error(message, statusCode: 404); + + static Response conflict([String message = 'Conflict']) => + error(message, statusCode: 409); + + static Response tooManyRequests([String message = 'Too many requests']) => + error(message, statusCode: 429); + + static Response internalError( + [String message = 'Internal server error']) => + error(message, statusCode: 500); +} diff --git a/lib/src/shared/pagination.dart b/lib/src/shared/pagination.dart new file mode 100644 index 0000000..a18dbf5 --- /dev/null +++ b/lib/src/shared/pagination.dart @@ -0,0 +1,27 @@ +import 'package:shelf/shelf.dart'; + +/// Helpers for extracting pagination parameters from query strings. +class Pagination { + final int page; + final int perPage; + final int offset; + + Pagination._({required this.page, required this.perPage}) + : offset = (page - 1) * perPage; + + /// Extract pagination from a [Request]'s query parameters. + /// + /// Defaults: page=1, perPage=20, max perPage=100. + factory Pagination.fromRequest(Request request) { + final params = request.url.queryParameters; + + var page = int.tryParse(params['page'] ?? '') ?? 1; + if (page < 1) page = 1; + + var perPage = int.tryParse(params['per_page'] ?? '') ?? 20; + if (perPage < 1) perPage = 1; + if (perPage > 100) perPage = 100; + + return Pagination._(page: page, perPage: perPage); + } +} diff --git a/migrations/001_initial_schema.sql b/migrations/001_initial_schema.sql new file mode 100644 index 0000000..c0d0c62 --- /dev/null +++ b/migrations/001_initial_schema.sql @@ -0,0 +1,385 @@ +-- FocusFlow Initial Schema +-- Migration 001: Core tables for ADHD Task Manager MVP + +BEGIN; + +-- ============================================================ +-- EXTENSIONS +-- ============================================================ + +CREATE EXTENSION IF NOT EXISTS "pgcrypto"; + +-- ============================================================ +-- USERS & AUTH +-- ============================================================ + +CREATE TABLE users ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + email VARCHAR(255) UNIQUE NOT NULL, + password_hash VARCHAR(255) NOT NULL, + display_name VARCHAR(100) NOT NULL, + avatar_url VARCHAR(500), + -- ADHD-specific preferences + default_energy_level SMALLINT DEFAULT 3 CHECK (default_energy_level BETWEEN 1 AND 5), + focus_duration_minutes INTEGER DEFAULT 25, + reward_style VARCHAR(20) DEFAULT 'playful', -- playful | minimal | data + forgiveness_enabled BOOLEAN DEFAULT TRUE, + daily_task_limit INTEGER DEFAULT 10, + -- Subscription + subscription_tier VARCHAR(20) DEFAULT 'free', -- free | premium | lifetime + subscription_expires_at TIMESTAMPTZ, + -- Timestamps + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW(), + deleted_at TIMESTAMPTZ +); + +CREATE INDEX idx_users_email ON users(email) WHERE deleted_at IS NULL; + +CREATE TABLE refresh_tokens ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + token VARCHAR(255) NOT NULL, + expires_at TIMESTAMPTZ NOT NULL, + revoked BOOLEAN DEFAULT FALSE, + created_at TIMESTAMPTZ DEFAULT NOW() +); + +CREATE INDEX idx_refresh_tokens_user ON refresh_tokens(user_id) WHERE revoked = FALSE; +CREATE INDEX idx_refresh_tokens_token ON refresh_tokens(token) WHERE revoked = FALSE; + +-- ============================================================ +-- TASKS +-- ============================================================ + +CREATE TABLE tasks ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + title VARCHAR(300) NOT NULL, + description TEXT, + status VARCHAR(20) DEFAULT 'pending', -- pending | in_progress | completed | skipped | archived + priority SMALLINT DEFAULT 3 CHECK (priority BETWEEN 1 AND 5), + energy_level SMALLINT DEFAULT 3 CHECK (energy_level BETWEEN 1 AND 5), + -- Time estimation + estimated_minutes INTEGER, + actual_minutes INTEGER, + -- Dopamine scoring inputs + dopamine_score NUMERIC(5,4) DEFAULT 0, + novelty_factor NUMERIC(3,2) DEFAULT 0.5, + times_postponed INTEGER DEFAULT 0, + last_interacted_at TIMESTAMPTZ, + -- Organization + category VARCHAR(100), + tags TEXT[] DEFAULT '{}', + due_date TIMESTAMPTZ, + -- Recurrence + is_recurring BOOLEAN DEFAULT FALSE, + recurrence_rule VARCHAR(200), -- RRULE format + parent_task_id UUID REFERENCES tasks(id), + -- Completion + completed_at TIMESTAMPTZ, + -- Sync + version INTEGER DEFAULT 1, + -- Timestamps + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW(), + deleted_at TIMESTAMPTZ +); + +CREATE INDEX idx_tasks_user_status ON tasks(user_id, status) WHERE deleted_at IS NULL; +CREATE INDEX idx_tasks_user_due ON tasks(user_id, due_date) WHERE deleted_at IS NULL AND status = 'pending'; +CREATE INDEX idx_tasks_dopamine ON tasks(user_id, dopamine_score DESC) WHERE deleted_at IS NULL AND status = 'pending'; +CREATE INDEX idx_tasks_tags ON tasks USING GIN(tags); +CREATE INDEX idx_tasks_parent ON tasks(parent_task_id) WHERE parent_task_id IS NOT NULL; + +-- ============================================================ +-- STREAKS +-- ============================================================ + +CREATE TABLE streaks ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID UNIQUE NOT NULL REFERENCES users(id) ON DELETE CASCADE, + current_count INTEGER DEFAULT 0, + longest_count INTEGER DEFAULT 0, + -- Forgiveness mechanics + grace_days SMALLINT DEFAULT 2, + grace_used SMALLINT DEFAULT 0, + frozen_until DATE, + -- Decay instead of reset (premium) + decay_enabled BOOLEAN DEFAULT FALSE, + -- Tracking + last_completed_date DATE, + started_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() +); + +CREATE TABLE streak_entries ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + streak_id UUID NOT NULL REFERENCES streaks(id) ON DELETE CASCADE, + entry_date DATE NOT NULL, + tasks_done INTEGER DEFAULT 0, + was_forgiven BOOLEAN DEFAULT FALSE, + was_frozen BOOLEAN DEFAULT FALSE, + -- Points earned on this day + points_earned INTEGER DEFAULT 0, + created_at TIMESTAMPTZ DEFAULT NOW(), + UNIQUE(streak_id, entry_date) +); + +CREATE INDEX idx_streak_entries_date ON streak_entries(streak_id, entry_date DESC); + +-- ============================================================ +-- REWARDS +-- ============================================================ + +CREATE TABLE rewards ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + task_id UUID REFERENCES tasks(id), + -- Reward details + reward_type VARCHAR(20) NOT NULL, -- points | badge | animation | message | unlock | surprise + magnitude NUMERIC(8,2) NOT NULL DEFAULT 1.0, + is_surprise BOOLEAN DEFAULT FALSE, + -- Content + title VARCHAR(200), + description TEXT, + animation_key VARCHAR(100), -- Lottie animation identifier + -- Metadata + created_at TIMESTAMPTZ DEFAULT NOW() +); + +CREATE INDEX idx_rewards_user ON rewards(user_id, created_at DESC); +CREATE INDEX idx_rewards_type ON rewards(user_id, reward_type); + +-- User point balance (materialized for fast reads) +CREATE TABLE user_points ( + user_id UUID PRIMARY KEY REFERENCES users(id) ON DELETE CASCADE, + total INTEGER DEFAULT 0, + level INTEGER DEFAULT 1, + updated_at TIMESTAMPTZ DEFAULT NOW() +); + +-- ============================================================ +-- TIME PERCEPTION TRACKING +-- ============================================================ + +CREATE TABLE time_estimates ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + task_id UUID REFERENCES tasks(id), + -- Estimate vs actual + estimated_minutes INTEGER NOT NULL, + actual_minutes INTEGER, + accuracy_ratio NUMERIC(5,2), -- actual / estimated + -- Context + category VARCHAR(100), + energy_level SMALLINT, + -- Timestamps + started_at TIMESTAMPTZ, + completed_at TIMESTAMPTZ, + created_at TIMESTAMPTZ DEFAULT NOW() +); + +CREATE INDEX idx_time_estimates_user ON time_estimates(user_id, created_at DESC); +CREATE INDEX idx_time_estimates_category ON time_estimates(user_id, category); + +-- ============================================================ +-- BODY DOUBLING / CO-WORKING ROOMS (Phase 2) +-- ============================================================ + +CREATE TABLE coworking_rooms ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name VARCHAR(100) NOT NULL, + description TEXT, + host_id UUID NOT NULL REFERENCES users(id), + is_public BOOLEAN DEFAULT TRUE, + max_participants INTEGER DEFAULT 10, + ambient_sound VARCHAR(50) DEFAULT 'none', -- none | cafe | rain | lofi | forest + status VARCHAR(20) DEFAULT 'active', -- active | ended + started_at TIMESTAMPTZ DEFAULT NOW(), + ended_at TIMESTAMPTZ +); + +CREATE TABLE room_participants ( + room_id UUID NOT NULL REFERENCES coworking_rooms(id) ON DELETE CASCADE, + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + current_task VARCHAR(200), + joined_at TIMESTAMPTZ DEFAULT NOW(), + left_at TIMESTAMPTZ, + PRIMARY KEY (room_id, user_id) +); + +CREATE INDEX idx_room_participants_active ON room_participants(room_id) WHERE left_at IS NULL; + +-- ============================================================ +-- ACCOUNTABILITY PARTNERS (Phase 2) +-- ============================================================ + +CREATE TABLE accountability_partners ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + partner_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + status VARCHAR(20) DEFAULT 'pending', -- pending | active | blocked + created_at TIMESTAMPTZ DEFAULT NOW(), + UNIQUE(user_id, partner_id) +); + +CREATE TABLE nudges ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + from_id UUID NOT NULL REFERENCES users(id), + to_id UUID NOT NULL REFERENCES users(id), + message TEXT, + nudge_type VARCHAR(20) DEFAULT 'gentle', -- gentle | encouraging | celebratory + read_at TIMESTAMPTZ, + created_at TIMESTAMPTZ DEFAULT NOW() +); + +CREATE INDEX idx_nudges_to ON nudges(to_id, created_at DESC) WHERE read_at IS NULL; + +-- ============================================================ +-- COACHING MARKETPLACE (Phase 3) +-- ============================================================ + +CREATE TABLE coaches ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID UNIQUE NOT NULL REFERENCES users(id), + bio TEXT, + specializations TEXT[] DEFAULT '{}', + hourly_rate NUMERIC(8,2), + currency VARCHAR(3) DEFAULT 'USD', + rating_avg NUMERIC(3,2) DEFAULT 0, + rating_count INTEGER DEFAULT 0, + verified BOOLEAN DEFAULT FALSE, + status VARCHAR(20) DEFAULT 'pending', -- pending | active | suspended + created_at TIMESTAMPTZ DEFAULT NOW() +); + +CREATE TABLE coaching_sessions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + coach_id UUID NOT NULL REFERENCES coaches(id), + client_id UUID NOT NULL REFERENCES users(id), + status VARCHAR(20) DEFAULT 'scheduled', -- scheduled | in_progress | completed | cancelled + scheduled_at TIMESTAMPTZ NOT NULL, + duration_min INTEGER DEFAULT 30, + notes TEXT, + rating SMALLINT CHECK (rating BETWEEN 1 AND 5), + amount NUMERIC(8,2), + commission NUMERIC(8,2), + created_at TIMESTAMPTZ DEFAULT NOW() +); + +CREATE INDEX idx_coaching_sessions_coach ON coaching_sessions(coach_id, scheduled_at); +CREATE INDEX idx_coaching_sessions_client ON coaching_sessions(client_id, scheduled_at); + +-- ============================================================ +-- PUSH NOTIFICATIONS +-- ============================================================ + +CREATE TABLE push_tokens ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + token VARCHAR(500) NOT NULL, + platform VARCHAR(20) NOT NULL, -- ios | android | web + created_at TIMESTAMPTZ DEFAULT NOW(), + UNIQUE(user_id, token) +); + +CREATE TABLE notification_log ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id), + notification_type VARCHAR(50) NOT NULL, + title VARCHAR(200), + body TEXT, + data_json JSONB, + sent_at TIMESTAMPTZ DEFAULT NOW(), + read_at TIMESTAMPTZ +); + +CREATE INDEX idx_notification_log_user ON notification_log(user_id, sent_at DESC); + +-- ============================================================ +-- SYNC TRACKING +-- ============================================================ + +CREATE TABLE sync_changelog ( + id BIGSERIAL PRIMARY KEY, + entity_type VARCHAR(50) NOT NULL, -- task | streak | reward | time_estimate + entity_id UUID NOT NULL, + action VARCHAR(20) NOT NULL, -- insert | update | delete + changed_at TIMESTAMPTZ DEFAULT NOW(), + changed_by UUID REFERENCES users(id) +); + +CREATE INDEX idx_sync_changelog_type_time ON sync_changelog(entity_type, changed_at); +CREATE INDEX idx_sync_changelog_user ON sync_changelog(changed_by, changed_at); + +-- ============================================================ +-- SUBSCRIPTIONS +-- ============================================================ + +CREATE TABLE subscriptions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + provider VARCHAR(20) NOT NULL, -- stripe | revenuecat | apple | google + provider_subscription_id VARCHAR(200), + plan VARCHAR(50) NOT NULL, -- premium_monthly | premium_yearly | lifetime + status VARCHAR(20) DEFAULT 'active', -- active | cancelled | expired | past_due + current_period_start TIMESTAMPTZ, + current_period_end TIMESTAMPTZ, + cancelled_at TIMESTAMPTZ, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() +); + +CREATE INDEX idx_subscriptions_user ON subscriptions(user_id); +CREATE INDEX idx_subscriptions_provider ON subscriptions(provider, provider_subscription_id); + +-- ============================================================ +-- UPDATED_AT TRIGGER FUNCTION +-- ============================================================ + +CREATE OR REPLACE FUNCTION update_updated_at_column() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = NOW(); + RETURN NEW; +END; +$$ language 'plpgsql'; + +-- Apply trigger to tables with updated_at +CREATE TRIGGER update_users_updated_at BEFORE UPDATE ON users + FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); + +CREATE TRIGGER update_tasks_updated_at BEFORE UPDATE ON tasks + FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); + +CREATE TRIGGER update_streaks_updated_at BEFORE UPDATE ON streaks + FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); + +CREATE TRIGGER update_subscriptions_updated_at BEFORE UPDATE ON subscriptions + FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); + +-- ============================================================ +-- SYNC TRIGGER: Auto-log changes to sync_changelog +-- ============================================================ + +CREATE OR REPLACE FUNCTION log_sync_change() +RETURNS TRIGGER AS $$ +BEGIN + IF TG_OP = 'DELETE' THEN + INSERT INTO sync_changelog (entity_type, entity_id, action, changed_by) + VALUES (TG_TABLE_NAME, OLD.id, 'delete', OLD.user_id); + RETURN OLD; + ELSE + INSERT INTO sync_changelog (entity_type, entity_id, action, changed_by) + VALUES (TG_TABLE_NAME, NEW.id, TG_OP::VARCHAR, NEW.user_id); + RETURN NEW; + END IF; +END; +$$ language 'plpgsql'; + +-- Track task changes for offline sync +CREATE TRIGGER sync_tasks_changes AFTER INSERT OR UPDATE OR DELETE ON tasks + FOR EACH ROW EXECUTE FUNCTION log_sync_change(); + +COMMIT; diff --git a/pubspec.lock b/pubspec.lock new file mode 100644 index 0000000..721043a --- /dev/null +++ b/pubspec.lock @@ -0,0 +1,701 @@ +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + _fe_analyzer_shared: + dependency: transitive + description: + name: _fe_analyzer_shared + sha256: da0d9209ca76bde579f2da330aeb9df62b6319c834fa7baae052021b0462401f + url: "https://pub.dev" + source: hosted + version: "85.0.0" + adaptive_number: + dependency: transitive + description: + name: adaptive_number + sha256: "3a567544e9b5c9c803006f51140ad544aedc79604fd4f3f2c1380003f97c1d77" + url: "https://pub.dev" + source: hosted + version: "1.0.0" + analyzer: + dependency: transitive + description: + name: analyzer + sha256: "974859dc0ff5f37bc4313244b3218c791810d03ab3470a579580279ba971a48d" + url: "https://pub.dev" + source: hosted + version: "7.7.1" + args: + dependency: "direct main" + description: + name: args + sha256: d0481093c50b1da8910eb0bb301626d4d8eb7284aa739614d2b394ee09e3ea04 + url: "https://pub.dev" + source: hosted + version: "2.7.0" + async: + dependency: transitive + description: + name: async + sha256: "758e6d74e971c3e5aceb4110bfd6698efc7f501675bcfe0c775459a8140750eb" + url: "https://pub.dev" + source: hosted + version: "2.13.0" + boolean_selector: + dependency: transitive + description: + name: boolean_selector + sha256: "8aab1771e1243a5063b8b0ff68042d67334e3feab9e95b9490f9a6ebf73b42ea" + url: "https://pub.dev" + source: hosted + version: "2.1.2" + buffer: + dependency: transitive + description: + name: buffer + sha256: "389da2ec2c16283c8787e0adaede82b1842102f8c8aae2f49003a766c5c6b3d1" + url: "https://pub.dev" + source: hosted + version: "1.2.3" + build: + dependency: transitive + description: + name: build + sha256: "51dc711996cbf609b90cbe5b335bbce83143875a9d58e4b5c6d3c4f684d3dda7" + url: "https://pub.dev" + source: hosted + version: "2.5.4" + build_config: + dependency: transitive + description: + name: build_config + sha256: "4ae2de3e1e67ea270081eaee972e1bd8f027d459f249e0f1186730784c2e7e33" + url: "https://pub.dev" + source: hosted + version: "1.1.2" + build_daemon: + dependency: transitive + description: + name: build_daemon + sha256: bf05f6e12cfea92d3c09308d7bcdab1906cd8a179b023269eed00c071004b957 + url: "https://pub.dev" + source: hosted + version: "4.1.1" + build_resolvers: + dependency: transitive + description: + name: build_resolvers + sha256: ee4257b3f20c0c90e72ed2b57ad637f694ccba48839a821e87db762548c22a62 + url: "https://pub.dev" + source: hosted + version: "2.5.4" + build_runner: + dependency: "direct dev" + description: + name: build_runner + sha256: "382a4d649addbfb7ba71a3631df0ec6a45d5ab9b098638144faf27f02778eb53" + url: "https://pub.dev" + source: hosted + version: "2.5.4" + build_runner_core: + dependency: transitive + description: + name: build_runner_core + sha256: "85fbbb1036d576d966332a3f5ce83f2ce66a40bea1a94ad2d5fc29a19a0d3792" + url: "https://pub.dev" + source: hosted + version: "9.1.2" + built_collection: + dependency: transitive + description: + name: built_collection + sha256: "376e3dd27b51ea877c28d525560790aee2e6fbb5f20e2f85d5081027d94e2100" + url: "https://pub.dev" + source: hosted + version: "5.1.1" + built_value: + dependency: transitive + description: + name: built_value + sha256: "6ae8a6435a8c6520c7077b107e77f1fb4ba7009633259a4d49a8afd8e7efc5e9" + url: "https://pub.dev" + source: hosted + version: "8.12.4" + charcode: + dependency: transitive + description: + name: charcode + sha256: fb0f1107cac15a5ea6ef0a6ef71a807b9e4267c713bb93e00e92d737cc8dbd8a + url: "https://pub.dev" + source: hosted + version: "1.4.0" + checked_yaml: + dependency: transitive + description: + name: checked_yaml + sha256: feb6bed21949061731a7a75fc5d2aa727cf160b91af9a3e464c5e3a32e28b5ff + url: "https://pub.dev" + source: hosted + version: "2.0.3" + cli_config: + dependency: transitive + description: + name: cli_config + sha256: ac20a183a07002b700f0c25e61b7ee46b23c309d76ab7b7640a028f18e4d99ec + url: "https://pub.dev" + source: hosted + version: "0.2.0" + clock: + dependency: transitive + description: + name: clock + sha256: fddb70d9b5277016c77a80201021d40a2247104d9f4aa7bab7157b7e3f05b84b + url: "https://pub.dev" + source: hosted + version: "1.1.2" + code_builder: + dependency: transitive + description: + name: code_builder + sha256: "6a6cab2ba4680d6423f34a9b972a4c9a94ebe1b62ecec4e1a1f2cba91fd1319d" + url: "https://pub.dev" + source: hosted + version: "4.11.1" + collection: + dependency: transitive + description: + name: collection + sha256: "2f5709ae4d3d59dd8f7cd309b4e023046b57d8a6c82130785d2b0e5868084e76" + url: "https://pub.dev" + source: hosted + version: "1.19.1" + convert: + dependency: transitive + description: + name: convert + sha256: b30acd5944035672bc15c6b7a8b47d773e41e2f17de064350988c5d02adb1c68 + url: "https://pub.dev" + source: hosted + version: "3.1.2" + coverage: + dependency: transitive + description: + name: coverage + sha256: "5da775aa218eaf2151c721b16c01c7676fbfdd99cebba2bf64e8b807a28ff94d" + url: "https://pub.dev" + source: hosted + version: "1.15.0" + crypto: + dependency: "direct main" + description: + name: crypto + sha256: c8ea0233063ba03258fbcf2ca4d6dadfefe14f02fab57702265467a19f27fadf + url: "https://pub.dev" + source: hosted + version: "3.0.7" + dart_jsonwebtoken: + dependency: "direct main" + description: + name: dart_jsonwebtoken + sha256: "00a0812d2aeaeb0d30bcbc4dd3cee57971dbc0ab2216adf4f0247f37793f15ef" + url: "https://pub.dev" + source: hosted + version: "2.17.0" + dart_style: + dependency: transitive + description: + name: dart_style + sha256: "8a0e5fba27e8ee025d2ffb4ee820b4e6e2cf5e4246a6b1a477eb66866947e0bb" + url: "https://pub.dev" + source: hosted + version: "3.1.1" + dotenv: + dependency: "direct main" + description: + name: dotenv + sha256: "379e64b6fc82d3df29461d349a1796ecd2c436c480d4653f3af6872eccbc90e1" + url: "https://pub.dev" + source: hosted + version: "4.2.0" + ed25519_edwards: + dependency: transitive + description: + name: ed25519_edwards + sha256: "6ce0112d131327ec6d42beede1e5dfd526069b18ad45dcf654f15074ad9276cd" + url: "https://pub.dev" + source: hosted + version: "0.3.1" + file: + dependency: transitive + description: + name: file + sha256: a3b4f84adafef897088c160faf7dfffb7696046cb13ae90b508c2cbc95d3b8d4 + url: "https://pub.dev" + source: hosted + version: "7.0.1" + fixnum: + dependency: transitive + description: + name: fixnum + sha256: b6dc7065e46c974bc7c5f143080a6764ec7a4be6da1285ececdc37be96de53be + url: "https://pub.dev" + source: hosted + version: "1.1.1" + frontend_server_client: + dependency: transitive + description: + name: frontend_server_client + sha256: f64a0333a82f30b0cca061bc3d143813a486dc086b574bfb233b7c1372427694 + url: "https://pub.dev" + source: hosted + version: "4.0.0" + glob: + dependency: transitive + description: + name: glob + sha256: c3f1ee72c96f8f78935e18aa8cecced9ab132419e8625dc187e1c2408efc20de + url: "https://pub.dev" + source: hosted + version: "2.1.3" + graphs: + dependency: transitive + description: + name: graphs + sha256: "741bbf84165310a68ff28fe9e727332eef1407342fca52759cb21ad8177bb8d0" + url: "https://pub.dev" + source: hosted + version: "2.3.2" + http: + dependency: "direct main" + description: + name: http + sha256: "87721a4a50b19c7f1d49001e51409bddc46303966ce89a65af4f4e6004896412" + url: "https://pub.dev" + source: hosted + version: "1.6.0" + http_methods: + dependency: transitive + description: + name: http_methods + sha256: "6bccce8f1ec7b5d701e7921dca35e202d425b57e317ba1a37f2638590e29e566" + url: "https://pub.dev" + source: hosted + version: "1.1.1" + http_multi_server: + dependency: transitive + description: + name: http_multi_server + sha256: aa6199f908078bb1c5efb8d8638d4ae191aac11b311132c3ef48ce352fb52ef8 + url: "https://pub.dev" + source: hosted + version: "3.2.2" + http_parser: + dependency: transitive + description: + name: http_parser + sha256: "178d74305e7866013777bab2c3d8726205dc5a4dd935297175b19a23a2e66571" + url: "https://pub.dev" + source: hosted + version: "4.1.2" + io: + dependency: transitive + description: + name: io + sha256: dfd5a80599cf0165756e3181807ed3e77daf6dd4137caaad72d0b7931597650b + url: "https://pub.dev" + source: hosted + version: "1.0.5" + js: + dependency: transitive + description: + name: js + sha256: "53385261521cc4a0c4658fd0ad07a7d14591cf8fc33abbceae306ddb974888dc" + url: "https://pub.dev" + source: hosted + version: "0.7.2" + json_annotation: + dependency: "direct main" + description: + name: json_annotation + sha256: "1ce844379ca14835a50d2f019a3099f419082cfdd231cd86a142af94dd5c6bb1" + url: "https://pub.dev" + source: hosted + version: "4.9.0" + json_serializable: + dependency: "direct dev" + description: + name: json_serializable + sha256: c50ef5fc083d5b5e12eef489503ba3bf5ccc899e487d691584699b4bdefeea8c + url: "https://pub.dev" + source: hosted + version: "6.9.5" + lints: + dependency: "direct dev" + description: + name: lints + sha256: c35bb79562d980e9a453fc715854e1ed39e24e7d0297a880ef54e17f9874a9d7 + url: "https://pub.dev" + source: hosted + version: "5.1.1" + logging: + dependency: "direct main" + description: + name: logging + sha256: c8245ada5f1717ed44271ed1c26b8ce85ca3228fd2ffdb75468ab01979309d61 + url: "https://pub.dev" + source: hosted + version: "1.3.0" + matcher: + dependency: transitive + description: + name: matcher + sha256: dc58c723c3c24bf8d3e2d3ad3f2f9d7bd9cf43ec6feaa64181775e60190153f2 + url: "https://pub.dev" + source: hosted + version: "0.12.17" + meta: + dependency: transitive + description: + name: meta + sha256: "9f29b9bcc8ee287b1a31e0d01be0eae99a930dbffdaecf04b3f3d82a969f296f" + url: "https://pub.dev" + source: hosted + version: "1.18.1" + mime: + dependency: transitive + description: + name: mime + sha256: "41a20518f0cb1256669420fdba0cd90d21561e560ac240f26ef8322e45bb7ed6" + url: "https://pub.dev" + source: hosted + version: "2.0.0" + node_preamble: + dependency: transitive + description: + name: node_preamble + sha256: "6e7eac89047ab8a8d26cf16127b5ed26de65209847630400f9aefd7cd5c730db" + url: "https://pub.dev" + source: hosted + version: "2.0.2" + package_config: + dependency: transitive + description: + name: package_config + sha256: f096c55ebb7deb7e384101542bfba8c52696c1b56fca2eb62827989ef2353bbc + url: "https://pub.dev" + source: hosted + version: "2.2.0" + path: + dependency: transitive + description: + name: path + sha256: "75cca69d1490965be98c73ceaea117e8a04dd21217b37b292c9ddbec0d955bc5" + url: "https://pub.dev" + source: hosted + version: "1.9.1" + pointycastle: + dependency: transitive + description: + name: pointycastle + sha256: "4be0097fcf3fd3e8449e53730c631200ebc7b88016acecab2b0da2f0149222fe" + url: "https://pub.dev" + source: hosted + version: "3.9.1" + pool: + dependency: transitive + description: + name: pool + sha256: "978783255c543aa3586a1b3c21f6e9d720eb315376a915872c61ef8b5c20177d" + url: "https://pub.dev" + source: hosted + version: "1.5.2" + postgres: + dependency: "direct main" + description: + name: postgres + sha256: "013c6dc668eaab9771c4d3f5fc3e87ed4b3cd4ab3587ac6943cc1f38509ff723" + url: "https://pub.dev" + source: hosted + version: "3.5.7" + pub_semver: + dependency: transitive + description: + name: pub_semver + sha256: "5bfcf68ca79ef689f8990d1160781b4bad40a3bd5e5218ad4076ddb7f4081585" + url: "https://pub.dev" + source: hosted + version: "2.2.0" + pubspec_parse: + dependency: transitive + description: + name: pubspec_parse + sha256: "0560ba233314abbed0a48a2956f7f022cce7c3e1e73df540277da7544cad4082" + url: "https://pub.dev" + source: hosted + version: "1.5.0" + redis: + dependency: "direct main" + description: + name: redis + sha256: "32e28eb1ba2e0fe2af50bbd06e675e4dfdce4f0ba95c5bc885c72383a1b0b47e" + url: "https://pub.dev" + source: hosted + version: "3.1.0" + sasl_scram: + dependency: transitive + description: + name: sasl_scram + sha256: "5c27fd6058d53075c032539ba3cc7fa95006bb1d51a0db63a81b05756c265a83" + url: "https://pub.dev" + source: hosted + version: "0.1.2" + saslprep: + dependency: transitive + description: + name: saslprep + sha256: "3d421d10be9513bf4459c17c5e70e7b8bc718c9fc5ad4ba5eb4f5fd27396f740" + url: "https://pub.dev" + source: hosted + version: "1.0.3" + shelf: + dependency: "direct main" + description: + name: shelf + sha256: e7dd780a7ffb623c57850b33f43309312fc863fb6aa3d276a754bb299839ef12 + url: "https://pub.dev" + source: hosted + version: "1.4.2" + shelf_cors_headers: + dependency: "direct main" + description: + name: shelf_cors_headers + sha256: a127c80f99bbef3474293db67a7608e3a0f1f0fcdb171dad77fa9bd2cd123ae4 + url: "https://pub.dev" + source: hosted + version: "0.1.5" + shelf_packages_handler: + dependency: transitive + description: + name: shelf_packages_handler + sha256: "89f967eca29607c933ba9571d838be31d67f53f6e4ee15147d5dc2934fee1b1e" + url: "https://pub.dev" + source: hosted + version: "3.0.2" + shelf_router: + dependency: "direct main" + description: + name: shelf_router + sha256: f5e5d492440a7fb165fe1e2e1a623f31f734d3370900070b2b1e0d0428d59864 + url: "https://pub.dev" + source: hosted + version: "1.1.4" + shelf_static: + dependency: transitive + description: + name: shelf_static + sha256: c87c3875f91262785dade62d135760c2c69cb217ac759485334c5857ad89f6e3 + url: "https://pub.dev" + source: hosted + version: "1.1.3" + shelf_web_socket: + dependency: "direct main" + description: + name: shelf_web_socket + sha256: cc36c297b52866d203dbf9332263c94becc2fe0ceaa9681d07b6ef9807023b67 + url: "https://pub.dev" + source: hosted + version: "2.0.1" + source_gen: + dependency: transitive + description: + name: source_gen + sha256: "35c8150ece9e8c8d263337a265153c3329667640850b9304861faea59fc98f6b" + url: "https://pub.dev" + source: hosted + version: "2.0.0" + source_helper: + dependency: transitive + description: + name: source_helper + sha256: a447acb083d3a5ef17f983dd36201aeea33fedadb3228fa831f2f0c92f0f3aca + url: "https://pub.dev" + source: hosted + version: "1.3.7" + source_map_stack_trace: + dependency: transitive + description: + name: source_map_stack_trace + sha256: c0713a43e323c3302c2abe2a1cc89aa057a387101ebd280371d6a6c9fa68516b + url: "https://pub.dev" + source: hosted + version: "2.1.2" + source_maps: + dependency: transitive + description: + name: source_maps + sha256: "190222579a448b03896e0ca6eca5998fa810fda630c1d65e2f78b3f638f54812" + url: "https://pub.dev" + source: hosted + version: "0.10.13" + source_span: + dependency: transitive + description: + name: source_span + sha256: "56a02f1f4cd1a2d96303c0144c93bd6d909eea6bee6bf5a0e0b685edbd4c47ab" + url: "https://pub.dev" + source: hosted + version: "1.10.2" + stack_trace: + dependency: transitive + description: + name: stack_trace + sha256: "8b27215b45d22309b5cddda1aa2b19bdfec9df0e765f2de506401c071d38d1b1" + url: "https://pub.dev" + source: hosted + version: "1.12.1" + stream_channel: + dependency: transitive + description: + name: stream_channel + sha256: "969e04c80b8bcdf826f8f16579c7b14d780458bd97f56d107d3950fdbeef059d" + url: "https://pub.dev" + source: hosted + version: "2.1.4" + stream_transform: + dependency: transitive + description: + name: stream_transform + sha256: ad47125e588cfd37a9a7f86c7d6356dde8dfe89d071d293f80ca9e9273a33871 + url: "https://pub.dev" + source: hosted + version: "2.1.1" + string_scanner: + dependency: transitive + description: + name: string_scanner + sha256: "921cd31725b72fe181906c6a94d987c78e3b98c2e205b397ea399d4054872b43" + url: "https://pub.dev" + source: hosted + version: "1.4.1" + term_glyph: + dependency: transitive + description: + name: term_glyph + sha256: "7f554798625ea768a7518313e58f83891c7f5024f88e46e7182a4558850a4b8e" + url: "https://pub.dev" + source: hosted + version: "1.2.2" + test: + dependency: "direct dev" + description: + name: test + sha256: "75906bf273541b676716d1ca7627a17e4c4070a3a16272b7a3dc7da3b9f3f6b7" + url: "https://pub.dev" + source: hosted + version: "1.26.3" + test_api: + dependency: transitive + description: + name: test_api + sha256: ab2726c1a94d3176a45960b6234466ec367179b87dd74f1611adb1f3b5fb9d55 + url: "https://pub.dev" + source: hosted + version: "0.7.7" + test_core: + dependency: transitive + description: + name: test_core + sha256: "0cc24b5ff94b38d2ae73e1eb43cc302b77964fbf67abad1e296025b78deb53d0" + url: "https://pub.dev" + source: hosted + version: "0.6.12" + timing: + dependency: transitive + description: + name: timing + sha256: "62ee18aca144e4a9f29d212f5a4c6a053be252b895ab14b5821996cff4ed90fe" + url: "https://pub.dev" + source: hosted + version: "1.0.2" + typed_data: + dependency: transitive + description: + name: typed_data + sha256: f9049c039ebfeb4cf7a7104a675823cd72dba8297f264b6637062516699fa006 + url: "https://pub.dev" + source: hosted + version: "1.4.0" + unorm_dart: + dependency: transitive + description: + name: unorm_dart + sha256: "0c69186b03ca6addab0774bcc0f4f17b88d4ce78d9d4d8f0619e30a99ead58e7" + url: "https://pub.dev" + source: hosted + version: "0.3.2" + uuid: + dependency: "direct main" + description: + name: uuid + sha256: "1fef9e8e11e2991bb773070d4656b7bd5d850967a2456cfc83cf47925ba79489" + url: "https://pub.dev" + source: hosted + version: "4.5.3" + vm_service: + dependency: transitive + description: + name: vm_service + sha256: "45caa6c5917fa127b5dbcfbd1fa60b14e583afdc08bfc96dda38886ca252eb60" + url: "https://pub.dev" + source: hosted + version: "15.0.2" + watcher: + dependency: transitive + description: + name: watcher + sha256: "1398c9f081a753f9226febe8900fce8f7d0a67163334e1c94a2438339d79d635" + url: "https://pub.dev" + source: hosted + version: "1.2.1" + web: + dependency: transitive + description: + name: web + sha256: "868d88a33d8a87b18ffc05f9f030ba328ffefba92d6c127917a2ba740f9cfe4a" + url: "https://pub.dev" + source: hosted + version: "1.1.1" + web_socket: + dependency: transitive + description: + name: web_socket + sha256: "34d64019aa8e36bf9842ac014bb5d2f5586ca73df5e4d9bf5c936975cae6982c" + url: "https://pub.dev" + source: hosted + version: "1.0.1" + web_socket_channel: + dependency: "direct main" + description: + name: web_socket_channel + sha256: d645757fb0f4773d602444000a8131ff5d48c9e47adfe9772652dd1a4f2d45c8 + url: "https://pub.dev" + source: hosted + version: "3.0.3" + webkit_inspection_protocol: + dependency: transitive + description: + name: webkit_inspection_protocol + sha256: "87d3f2333bb240704cd3f1c6b5b7acd8a10e7f0bc28c28dcf14e782014f4a572" + url: "https://pub.dev" + source: hosted + version: "1.2.1" + yaml: + dependency: transitive + description: + name: yaml + sha256: b9da305ac7c39faa3f030eccd175340f968459dae4af175130b3fc47e40d76ce + url: "https://pub.dev" + source: hosted + version: "3.1.3" +sdks: + dart: ">=3.7.0 <4.0.0" diff --git a/pubspec.yaml b/pubspec.yaml new file mode 100644 index 0000000..777aa06 --- /dev/null +++ b/pubspec.yaml @@ -0,0 +1,30 @@ +name: focusflow_api +description: FocusFlow ADHD Task Manager Backend API +version: 0.1.0 +publish_to: 'none' + +environment: + sdk: ^3.7.0 + +dependencies: + shelf: ^1.4.0 + shelf_router: ^1.1.0 + shelf_cors_headers: ^0.1.0 + shelf_web_socket: ^2.0.0 + postgres: ^3.4.0 + redis: ^3.0.0 + dart_jsonwebtoken: ^2.14.0 + crypto: ^3.0.0 + uuid: ^4.5.0 + dotenv: ^4.2.0 + args: ^2.0.0 + http: ^1.0.0 + logging: ^1.2.0 + json_annotation: ^4.9.0 + web_socket_channel: ^3.0.0 + +dev_dependencies: + build_runner: ^2.4.0 + json_serializable: ^6.8.0 + lints: ^5.0.0 + test: ^1.24.0 diff --git a/test/server_test.dart b/test/server_test.dart new file mode 100644 index 0000000..3081d87 --- /dev/null +++ b/test/server_test.dart @@ -0,0 +1,39 @@ +import 'dart:io'; + +import 'package:http/http.dart'; +import 'package:test/test.dart'; + +void main() { + final port = '8080'; + final host = 'http://0.0.0.0:$port'; + late Process p; + + setUp(() async { + p = await Process.start( + 'dart', + ['run', 'bin/server.dart'], + environment: {'PORT': port}, + ); + // Wait for server to start and print to stdout. + await p.stdout.first; + }); + + tearDown(() => p.kill()); + + test('Root', () async { + final response = await get(Uri.parse('$host/')); + expect(response.statusCode, 200); + expect(response.body, 'Hello, World!\n'); + }); + + test('Echo', () async { + final response = await get(Uri.parse('$host/echo/hello')); + expect(response.statusCode, 200); + expect(response.body, 'hello\n'); + }); + + test('404', () async { + final response = await get(Uri.parse('$host/foobar')); + expect(response.statusCode, 404); + }); +}