diff --git a/.cursor/commands/create-module.md b/.cursor/commands/create-module.md new file mode 100644 index 0000000..2d72554 --- /dev/null +++ b/.cursor/commands/create-module.md @@ -0,0 +1,180 @@ +# Create a new module (tbk CLI) + +## Overview + +Scaffold a fully-typed module with controller, service, router, schema, and model files, then wire it into the application routing system. + +## Inputs + +- **moduleName**: module folder/name in `src/modules/` (e.g., `product`) +- **apiPath**: optional API root path; defaults to `/api` (e.g., `/api/v1`) + +## Steps + +1. **Generate module files** + + ```bash + pnpm tbk generate:module --path + ``` + + Examples: + + ```bash + pnpm tbk generate:module product --path /api + pnpm tbk generate:module product --path /api/v1 + ``` + + This creates: + + - `src/modules//.dto.ts` + - `src/modules//.model.ts` + - `src/modules//.schema.ts` + - `src/modules//.services.ts` + - `src/modules//.controller.ts` + - `src/modules//.router.ts` (exports `_ROUTER_ROOT` and default router) + +2. **Register router in routes** + Add an import and `router.use(...)` in `src/routes/routes.ts`: + + ```ts + // add with other imports + import Router, { _ROUTER_ROOT } from '../modules//.router'; + + // add with other router.use calls + router.use(_ROUTER_ROOT, Router); + ``` + + - Replace `` with your actual module name (e.g., `product`) + - Replace `` with the uppercased module name (e.g., `PRODUCT`) + + Example for `product`: + + ```ts + import productRouter, { + PRODUCT_ROUTER_ROOT, + } from '../modules/product/product.router'; + router.use(PRODUCT_ROUTER_ROOT, productRouter); + ``` + +3. **Rebuild OpenAPI documentation** + + ```bash + pnpm openapi + ``` + + Auto-generates Swagger from MagicRouter + Zod schemas. + +4. **Typecheck and lint** + + ```bash + pnpm typecheck && pnpm lint + ``` + +5. **Register with admin dashboard** (if needed) + + Add your module to `src/plugins/admin/registry.ts`: + + ```ts + import Model from '../modules//.model'; + + export const adminResources: AdminResource[] = [ + // ... existing resources + { + name: 's', + label: 's', + model: Model, + readOnlyFields: ['_id', 'createdAt', 'updatedAt'] + }, + ]; + ``` + + - Replace `` with your module name (e.g., `product`) + - Replace `` with PascalCase version (e.g., `Product`) + - Adjust `readOnlyFields` as needed for your module + +6. **Optional: Create seeder and factory** + ```bash + pnpm tbk make:factory / + pnpm tbk make:seeder / + ``` + +## Module Checklist + +- [ ] Module files generated successfully +- [ ] Router registered in `src/routes/routes.ts` +- [ ] Module registered in admin dashboard (`src/plugins/admin/registry.ts`) (if needed) +- [ ] OpenAPI documentation rebuilt +- [ ] Code passes typecheck and lint +- [ ] Environment variables added to `src/config/env.ts` and `.env.sample` (if needed) +- [ ] Committed with Conventional Commits format + +## Response Validation + +Generated modules automatically use the **response validation system**: + +- **Response schemas** defined in schema files using `R.success()`, `R.paginated()`, `R.error()` helpers +- **Response types** exported from schema files for type-safe controllers +- **Typed response helpers** (`res.ok()`, `res.created()`, `res.noContent()`) in controllers +- **OpenAPI documentation** includes accurate per-status response schemas +- **Runtime validation** ensures responses match schemas (configurable via `RESPONSE_VALIDATION` env var) + +### Example from generated code: + +**Schema file (module.schema.ts):** + +```typescript +import { R } from '../../openapi/response.builders'; +import { itemOutSchema } from './item.dto'; + +// Response schemas +export const getItemsResponseSchema = R.paginated(itemOutSchema); +export const createItemResponseSchema = R.success(itemOutSchema); + +// Response types +export type GetItemsResponseSchema = z.infer; +export type CreateItemResponseSchema = z.infer; +``` + +**Router (module.router.ts):** + +```typescript +import { getItemsResponseSchema } from './module.schema'; + +router.get( + '/', + { + requestType: { query: getItemsSchema }, + responses: { + 200: getItemsResponseSchema, + }, + }, + canAccess(), + handleGetItems, +); +``` + +**Controller (module.controller.ts):** + +```typescript +import type { ResponseExtended } from '../../types'; +import type { GetItemsResponseSchema } from './module.schema'; + +export const handleGetItems = async ( + req: Request, + res: ResponseExtended, +) => { + const { results, paginatorInfo } = await getItems(req.query); + return res.ok?.({ + success: true, + data: { items: results, paginator: paginatorInfo }, + }); +}; +``` + +## Notes + +- Routes must use `MagicRouter`; the generator already sets this up and defines `_ROUTER_ROOT` using the `--path` you pass +- Generated code uses **new response validation pattern** - see `docs/RESPONSE_VALIDATION.md` for details +- Legacy `successResponse()` still works but new pattern is recommended +- Keep environment configs valid, and update `src/config/env.ts` and `.env.sample` if you introduce new variables +- Commit with Conventional Commits (e.g., `feat(): add `) diff --git a/.cursor/commands/create-seeder.md b/.cursor/commands/create-seeder.md new file mode 100644 index 0000000..a639d4d --- /dev/null +++ b/.cursor/commands/create-seeder.md @@ -0,0 +1,131 @@ +# Create a module-tied seeder + factory (tbk CLI) + +## Overview + +Scaffold a seeder and its factory for a module, register it in the database seeder, and populate your database with test data. + +## Inputs + +- **module**: existing module folder in `src/modules/` (e.g., `user`) +- **name**: base name used for both factory and seeder (CLI appends `Seeder`) + +## Steps + +1. **Generate factory and seeder files** + + ```bash + # 1) Factory (used inside the seeder) + pnpm tbk make:factory / + + # 2) Seeder (will import and use the factory) + pnpm tbk make:seeder / + ``` + + Examples: + + ```bash + pnpm tbk make:factory payment/Payment && pnpm tbk make:seeder payment/Payment + pnpm tbk make:factory user/User && pnpm tbk make:seeder user/User + ``` + + This creates: + + - `src/modules//factories/.factory.ts` (lowercased file; exports `Factory`) + - `src/modules//seeders/Seeder.ts` + +2. **Implement seeder logic** + Edit `src/modules//seeders/Seeder.ts` to import the factory and insert documents via the module model: + + ```ts + import from '../.model'; + import { Factory } from '../factories/.factory'; + + export const Seeder = { + name: 'Seeder', + groups: ['dev'], + // collections help --fresh drop only the affected collections + collections: [''], + async run(ctx) { + ctx.logger.info('Running Seeder'); + + const docs = Array.from({ length: 10 }, (_, i) => + Factory.build(i + 1), + ); + + if (!ctx.env.dryRun) { + await .insertMany(docs); + } + + // share references across seeders if needed + ctx.refs.set(':seeded', docs.map((d) => d._id)); + }, + }; + ``` + + - Replace `` with your module's mongoose model (e.g., `Payment`) + - Replace ``/`` with the factory export/file (e.g., `paymentFactory`/`payment`) + - Replace `` with the underlying collection name + +3. **Register seeder** + Add your seeder to `src/seeders/registry.ts`: + + ```ts + import { Seeder } from '../modules//seeders/Seeder'; + + export const seeders = [ + // existing seeders... + Seeder, + ]; + ``` + +4. **Run seeders** + + ```bash + # default: group=dev, transactions enabled + pnpm tbk seed + + # choose specific group + pnpm tbk seed --group dev + pnpm tbk seed --group test + pnpm tbk seed --group demo + + # run specific seeders only (comma-separated names) + pnpm tbk seed --only Seeder,OtherSeeder + + # drop involved collections before seeding + pnpm tbk seed --fresh + + # dry run (log only, no writes) + pnpm tbk seed --dry-run + + # set random seed value + pnpm tbk seed --seed 42 + + # disable transactions globally + pnpm tbk seed --no-transaction + + # allow in production (blocked unless forced) + pnpm tbk seed --force + ``` + +## Seeder Checklist + +- [ ] Factory file generated in `src/modules//factories/` +- [ ] Seeder file generated in `src/modules//seeders/` +- [ ] Factory implements `build(i, overrides)` method +- [ ] Seeder registered in `src/seeders/registry.ts` +- [ ] Collections specified in seeder config +- [ ] MongoDB service running (`docker compose up -d`) +- [ ] Seeder tested with `--dry-run` flag + +## Advanced Options + +- **Order dependencies**: Use `dependsOn` to order seeders (names must match other seeders' `name`) +- **Share data**: Use `ctx.refs.set()` and `ctx.refs.get()` to pass data between seeders +- **Group targeting**: Assign seeders to groups (`dev`, `test`, `demo`) for different environments + +## Notes + +- Factories live in `src/modules//factories/` and export `Factory` with a `build(i, overrides)` helper +- Ensure MongoDB is reachable before seeding (`docker compose up -d`) +- Use transactions by default for data consistency; disable with `--no-transaction` if needed diff --git a/.cursor/rules/architecture.mdc b/.cursor/rules/architecture.mdc new file mode 100644 index 0000000..f436a10 --- /dev/null +++ b/.cursor/rules/architecture.mdc @@ -0,0 +1,79 @@ +--- +alwaysApply: true +description: Core architecture and patterns for the TypeScript backend toolkit +--- + +# Architecture Overview + +This is a TypeScript Express.js backend toolkit with a modular, type-safe architecture. + +## Core Patterns + +### MagicRouter System + +- All routes MUST use MagicRouter from [router.ts](mdc:src/plugins/magic/router.ts) +- MagicRouter automatically generates OpenAPI/Swagger documentation from Zod schemas +- Never use plain Express `app.get()` or `router.get()` - always use MagicRouter + +### Module Structure + +Modules live in [src/modules/](mdc:src/modules/) and follow this structure: + +``` +module-name/ + ├── module.controller.ts # Business logic handlers + ├── module.router.ts # MagicRouter route definitions + ├── module.service.ts # Database and external service interactions + ├── module.schema.ts # Zod schemas for validation + ├── module.model.ts # Mongoose models + └── module.dto.ts # TypeScript types/interfaces +``` + +### Validation & Type Safety + +- ALWAYS use Zod schemas for request/response validation +- Runtime validation via [validate-zod-schema.ts](mdc:src/middlewares/validate-zod-schema.ts) +- Extend Zod with OpenAPI metadata using `.openapi()` method from [zod-extend.ts](mdc:src/plugins/magic/zod-extend.ts) +- Use TypeScript strict mode - no `any` types + +### Configuration + +- All config in [env.ts](mdc:src/config/env.ts) +- Environment variables validated with Zod +- Time values are in milliseconds (converted from strings like "1d" or "7d") + +### Database + +- MongoDB with Mongoose ODM +- Connection managed in [database.ts](mdc:src/lib/database.ts) +- Models defined per module (e.g., [user.model.ts](mdc:src/modules/user/user.model.ts)) + +### Background Jobs & Queues + +- BullMQ with Redis for all background jobs +- Email queue in [email.queue.ts](mdc:src/queues/email.queue.ts) +- Admin dashboard at `/queues` + +### Error Handling + +- Global error handler in [error-handler.ts](mdc:src/middlewares/error-handler.ts) +- Throw errors with proper HTTP status codes +- Errors are automatically caught and formatted + +## Technology Stack + +- **Runtime**: Node.js with TypeScript +- **Framework**: Express.js +- **Validation**: Zod +- **Database**: MongoDB + Mongoose +- **Cache/Queue**: Redis + BullMQ +- **Auth**: JWT (with optional OTP) +- **Storage**: AWS S3 +- **Email**: React Email + Mailgun +- **Real-time**: Socket.io +- **API Docs**: Swagger/OpenAPI (auto-generated) +- **Logger**: Pino + +## Package Manager + +ALWAYS use `pnpm` - never npm or yarn diff --git a/.cursor/rules/controllers.mdc b/.cursor/rules/controllers.mdc new file mode 100644 index 0000000..2cf7034 --- /dev/null +++ b/.cursor/rules/controllers.mdc @@ -0,0 +1,464 @@ +--- +globs: *.controller.ts +description: Controller patterns for handling business logic +--- + +# Controller Patterns + +## Core Principle + +Controllers are async functions that handle validated requests and return responses. They should be thin - delegate complex logic to services. + +## Controller Template + +```typescript +import type { Request } from 'express'; +import type { ResponseExtended } from '@/types'; +import type { JwtPayload } from '@/utils/jwt.utils'; +import type { + CreateItemSchemaType, + GetItemsSchemaType, + CreateItemResponseSchema, + GetItemsResponseSchema, +} from './module.schema'; +import { + createItem, + deleteItem, + findById, + getItems, + updateItem, +} from './module.service'; + +/** + * Description of what this controller does + */ +export const handleAction = async ( + req: Request, + res: ResponseExtended, +) => { + // 1. Extract validated data (already validated by Zod middleware) + const { email, name } = req.body; // From body schema + const { id } = req.params; // From params schema + const { page = 1, limit = 10 } = req.query; // From query schema + + // 2. Access JWT payload (if route uses canAccess middleware) + const userId = req.user?.sub; + + // 3. Call service layer for business logic + const result = await createItem({ email, name, userId }); + + // 4. Return response using typed helper (NEW PATTERN - RECOMMENDED) + return res.created?.({ + success: true, + message: 'Item created successfully', + data: result, + }); +}; + +/** + * Example: Get single item (NEW PATTERN) + */ +export const handleGetById = async ( + req: Request<{ id: string }, unknown, unknown>, + res: ResponseExtended, +) => { + const { id } = req.params; + + const item = await findById(id); + + return res.ok?.({ + success: true, + data: item, + }); +}; + +/** + * Example: List with pagination (NEW PATTERN) + */ +export const handleGetItems = async ( + req: Request, + res: ResponseExtended, +) => { + const { results, paginatorInfo } = await getItems(req.query); + + return res.ok?.({ + success: true, + data: { + items: results, + paginator: paginatorInfo, + }, + }); +}; + +/** + * Example: Create new item + */ +export const handleCreate = async ( + req: Request, + res: Response, +) => { + const data = req.body; + const userId = req.user?.sub; + + const item = await createItem({ ...data, createdBy: userId }); + + return successResponse(res, 'Item created', item, StatusCodes.CREATED); +}; + +/** + * Example: Update item + */ +export const handleUpdate = async ( + req: Request<{ id: string }, unknown, UpdateItemSchemaType>, + res: Response, +) => { + const { id } = req.params; + const data = req.body; + const userId = req.user?.sub; + + const item = await updateItem(id, data, userId); + + if (!item) { + return successResponse( + res, + 'Item not found', + undefined, + StatusCodes.NOT_FOUND, + ); + } + + return successResponse(res, 'Item updated', item); +}; + +/** + * Example: Delete item + */ +export const handleDelete = async ( + req: Request<{ id: string }, unknown, unknown>, + res: Response, +) => { + const { id } = req.params; + + await deleteItem(id); + + return successResponse(res, 'Item deleted successfully'); +}; + +/** + * Example: Controller with no request params (unused) + */ +export const handlePublicAction = async (_: Request, res: Response) => { + const result = await performAction(); + + return successResponse(res, 'Action completed', result); +}; +``` + +## Key Points + +### TypeScript Request Typing + +Always use TypeScript generics for type-safe requests: + +```typescript +Request; + +// Examples: +Request<{ id: string }, unknown, unknown>; // params only +Request; // body only +Request; // query only +Request<{ id: string }, unknown, UpdateUserSchemaType>; // params + body +``` + +### Request Data Access + +- `req.body` - Request body (validated by Zod) +- `req.params` - URL parameters (validated by Zod) +- `req.query` - Query parameters (validated by Zod) +- `req.user` - JWT token payload (if using extractJwt middleware) +- `req.file` / `req.files` - Uploaded files (if using multer middleware) + +### JWT Payload Access + +When route uses `extractJwt` middleware from [extract-jwt-schema.ts](mdc:src/middlewares/extract-jwt-schema.ts): + +```typescript +import type { JwtPayload } from '@/utils/jwt.utils'; + +// Access JWT payload via req.user +const userId = req.user?.sub; // User ID +const email = req.user?.email; // User email +const username = req.user?.username; // Username +const role = req.user?.role; // User role + +// Type assertion if needed +const payload = req.user as JwtPayload; +``` + +**JwtPayload Type:** + +```typescript +type JwtPayload = { + sub: string; // User ID + email?: string | null; + phoneNo?: string | null; + username: string; + role: RoleType; +}; +``` + +### File Upload Access + +When route uses multer middleware from [multer-s3.ts](mdc:src/middlewares/multer-s3.ts): + +```typescript +const file = req.file; // For single file +const files = req.files; // For multiple files +const url = (req.file as any).location; // S3 URL +``` + +### Response Pattern + +**NEW (RECOMMENDED): Use typed response helpers** from ResponseExtended: + +```typescript +import type { ResponseExtended } from '@/types'; + +// 200 OK response +return res.ok?.({ + success: true, + message: 'Success message', + data: item, +}); + +// 201 Created response +return res.created?.({ + success: true, + message: 'Item created', + data: newItem, +}); + +// 204 No Content response +return res.noContent?.(); + +// Paginated list response +return res.ok?.({ + success: true, + data: { + items: results, + paginator: paginatorInfo, + }, +}); +``` + +**LEGACY (STILL SUPPORTED): `successResponse()` helper** from [response.utils.ts](mdc:src/utils/response.utils.ts): + +```typescript +import { successResponse } from '@/utils/response.utils'; +import { StatusCodes } from '@/plugins/magic/status-codes'; + +// Basic success (200 OK) +return successResponse(res, 'Success message'); + +// Success with data +return successResponse(res, 'User created', user); + +// Success with custom status code +return successResponse(res, 'Created', item, StatusCodes.CREATED); + +// Success with data but no message +return successResponse(res, undefined, { results, paginatorInfo }); +``` + +**Response Helpers Benefits:** + +- ✅ Type-safe response structure +- ✅ Runtime validation (configurable via `RESPONSE_VALIDATION` env var) +- ✅ Accurate OpenAPI documentation +- ✅ Consistent response format across your API +- ✅ Better IDE autocomplete and error detection + +**Response Format:** + +```json +{ + "success": true, + "message": "Optional message", + "data": { + /* Optional payload */ + } +} +``` + +### Cookie Management + +For authentication tokens: + +```typescript +import { AUTH_COOKIE_KEY, COOKIE_CONFIG } from './auth.constants'; + +// Set auth cookie +res.cookie(AUTH_COOKIE_KEY, token, COOKIE_CONFIG); + +// Clear cookie on logout +res.cookie(AUTH_COOKIE_KEY, undefined, COOKIE_CONFIG); +``` + +### Error Handling + +- Controllers don't need try-catch blocks +- Global error handler in [error-handler.ts](mdc:src/middlewares/error-handler.ts) catches all errors +- Just throw errors - they'll be handled automatically: + +```typescript +// Simple error (500) +throw new Error('Something went wrong'); + +// Not found - use successResponse with 404 +if (!item) { + return successResponse( + res, + 'Item not found', + undefined, + StatusCodes.NOT_FOUND, + ); +} + +// Or throw with custom status +const error = new Error('Not found') as any; +error.statusCode = 404; +throw error; +``` + +### Status Codes + +Use `@/plugins/magic/status-codes` for type-safe status codes: + +```typescript +import { StatusCodes } from '@/plugins/magic/status-codes'; + +StatusCodes.OK; // 200 +StatusCodes.CREATED; // 201 +StatusCodes.BAD_REQUEST; // 400 +StatusCodes.UNAUTHORIZED; // 401 +StatusCodes.FORBIDDEN; // 403 +StatusCodes.NOT_FOUND; // 404 +StatusCodes.INTERNAL_SERVER_ERROR; // 500 +``` + +### Logging + +Use Pino logger from [logger.ts](mdc:src/plugins/observability/logger.ts): + +```typescript +import logger from '@/plugins/observability/logger'; + +logger.info('Action performed', { userId, action: 'create' }); +logger.error('Error occurred', { error: error.message, userId }); +logger.warn('Warning message', { data }); +``` + +## Service Layer Pattern + +Controllers should delegate to services in `module.service.ts`: + +- **Controllers**: Handle HTTP concerns (req/res, cookies, response formatting) +- **Services**: Handle business logic, database operations, external APIs + +Import individual service functions: + +```typescript +// ✅ DO: Import specific functions +import { createUser, deleteUser, getUsers } from './user.service'; + +// ❌ DON'T: Use namespace imports +import * as userService from './user.service'; +``` + +## Naming Conventions + +- Controller functions: `handle` + `PascalCase` action + - `handleGetUsers` + - `handleCreateUser` + - `handleDeleteUser` + - `handleLoginByEmail` + - `handleGetCurrentUser` + +## Real-World Examples + +### Authentication Controller + +```typescript +export const handleLoginByEmail = async ( + req: Request, + res: Response, +) => { + const token = await loginUserByEmail(req.body); + + if (config.SET_SESSION) { + res.cookie(AUTH_COOKIE_KEY, token, COOKIE_CONFIG); + } + + return successResponse(res, 'Login successful', { token }); +}; + +export const handleLogout = async (_: Request, res: Response) => { + res.cookie(AUTH_COOKIE_KEY, undefined, COOKIE_CONFIG); + + return successResponse(res, 'Logout successful'); +}; +``` + +### Protected Route with JWT + +```typescript +export const handleChangePassword = async ( + req: Request, + res: Response, +) => { + const userId = (req.user as JwtPayload).sub; + + await changePassword(userId, req.body); + + return successResponse(res, 'Password successfully changed'); +}; +``` + +### Paginated List + +```typescript +export const handleGetUsers = async ( + req: Request, + res: Response, +) => { + const { results, paginatorInfo } = await getUsers( + { id: req.user.sub }, + req.query, + ); + + return successResponse(res, undefined, { results, paginatorInfo }); +}; +``` + +## Common Mistakes to Avoid + +❌ DON'T use direct `res.status().json()` +✅ DO use `successResponse()` helper + +❌ DON'T use `req.jwtPayload` +✅ DO use `req.user` for JWT payload + +❌ DON'T put business logic in controllers +✅ DO move complex logic to services + +❌ DON'T validate data in controllers (Zod does this) +✅ DO trust validated data from req.body/params/query + +❌ DON'T use try-catch everywhere +✅ DO let global error handler catch errors + +❌ DON'T send multiple responses +✅ DO return single response per request + +❌ DON'T use namespace imports for services +✅ DO import individual service functions diff --git a/.cursor/rules/development.mdc b/.cursor/rules/development.mdc new file mode 100644 index 0000000..04a5dfc --- /dev/null +++ b/.cursor/rules/development.mdc @@ -0,0 +1,353 @@ +--- +description: Development workflow and commands +--- + +# Development Workflow + +## Setup + +### Initial Setup + +```bash +# 1. Install dependencies +pnpm install + +# 2. Start Docker services (MongoDB + Redis) +docker compose up -d + +# 3. Copy environment template +cp .env.sample .env + +# 4. Edit .env with your values +nano .env + +# 5. (Optional) Seed database +pnpm run seeder + +# 6. Start development server +pnpm run dev +``` + +### Prerequisites + +- Node.js (v18+) +- pnpm (package manager) +- Docker and Docker Compose +- MongoDB (via Docker or local) +- Redis (via Docker or local) + +## Development Commands + +### Running the Server + +```bash +# Development with hot reload +pnpm run dev + +# Backend only (without email template server) +pnpm run start:dev + +# Production build + start +pnpm run build && pnpm run start:prod + +# Local production (uses .env.local) +pnpm run start:local +``` + +### Building + +```bash +# Build TypeScript to dist/ +pnpm run build + +# Build uses tsup (configured in build.ts) +``` + +### Linting + +```bash +# Check for linting errors +pnpm run lint + +# Auto-fix linting errors +pnpm run lint:fix +``` + +### Database + +```bash +# Run database seeder +pnpm run seeder +``` + +### Email Development + +```bash +# Start email template development server +pnpm run email:dev + +# Access at: http://localhost:3001 +``` + +## Project Structure + +``` +src/ +├── main.ts # Application entry point +├── config/ # Configuration management +├── lib/ # Core libraries (DB, Redis, AWS, etc.) +├── modules/ # Feature modules (auth, user, etc.) +│ └── module-name/ +│ ├── module.model.ts +│ ├── module.controller.ts +│ ├── module.service.ts +│ ├── module.router.ts +│ ├── module.schema.ts +│ └── module.dto.ts +├── middlewares/ # Express middlewares +├── queues/ # BullMQ background jobs +├── routes/ # Route registration +├── email/ # Email templates (React Email) +└── utils/ # Utility functions +``` + +## Key Endpoints + +### API Documentation + +- Swagger UI: `http://localhost:3000/docs` +- OpenAPI JSON: `http://localhost:3000/openapi.yml` + +### Queue Dashboard + +- BullMQ Admin: `http://localhost:3000/queues` + +### Health Check + +- `GET http://localhost:3000/api/health` + +## Development Workflow + +### Creating a New Feature + +1. Create new module in `src/modules/feature-name/` +2. Create model, controller, service, router, schema files +3. Register router in `src/routes/routes.ts` +4. Test in Swagger UI +5. (Optional) Add seeder + +See [new-module.mdc](mdc:.cursor/rules/new-module.mdc) for detailed steps. + +### Making Changes + +1. Edit files (hot reload enabled in dev mode) +2. Check for linter errors: `pnpm run lint` +3. Fix errors: `pnpm run lint:fix` +4. Test changes in Swagger UI or API client +5. Commit changes + +### Adding Dependencies + +```bash +# Add runtime dependency +pnpm add package-name + +# Add dev dependency +pnpm add -D package-name +``` + +## Testing the API + +### Using Swagger UI + +1. Navigate to `http://localhost:3000/docs` +2. Expand endpoint +3. Click "Try it out" +4. Fill in parameters +5. Execute request +6. View response + +### Using curl + +```bash +# Public endpoint +curl http://localhost:3000/api/health + +# Protected endpoint (requires JWT) +curl -H "Authorization: Bearer YOUR_JWT_TOKEN" \ + http://localhost:3000/api/user/profile + +# POST request +curl -X POST http://localhost:3000/api/auth/login \ + -H "Content-Type: application/json" \ + -d '{"email":"user@example.com","password":"password123"}' +``` + +### Using Postman/Insomnia + +1. Import OpenAPI spec from `http://localhost:3000/docs.json` +2. All endpoints auto-configured +3. Set Authorization header for protected routes + +## Debugging + +### Logging + +Logs use Pino logger from [logger.ts](mdc:src/plugins/observability/logger.ts): + +```typescript +import { logger } from '@/plugins/observability/logger'; + +logger.info('Info message', { data }); +logger.error('Error message', { error }); +logger.debug('Debug message', { data }); +``` + +### VS Code Debugging + +Add to `.vscode/launch.json`: + +```json +{ + "type": "node", + "request": "launch", + "name": "Debug Dev Server", + "runtimeExecutable": "pnpm", + "runtimeArgs": ["run", "dev"], + "skipFiles": ["/**"] +} +``` + +### MongoDB Debugging + +```bash +# Connect to MongoDB +docker exec -it mongodb mongosh + +# List databases +show dbs + +# Use database +use your-db-name + +# List collections +show collections + +# Query data +db.users.find() +``` + +### Redis Debugging + +```bash +# Connect to Redis +docker exec -it redis redis-cli + +# List all keys +KEYS * + +# Get value +GET key-name + +# Monitor commands +MONITOR +``` + +## Common Issues + +### Port Already in Use + +```bash +# Find process using port 3000 +lsof -i :3000 + +# Kill process +kill -9 PID +``` + +### MongoDB Connection Failed + +- Check Docker is running: `docker ps` +- Check connection string in `.env` +- Restart MongoDB: `docker compose restart mongodb` + +### Redis Connection Failed + +- Check Docker is running: `docker ps` +- Check Redis config in `.env` +- Restart Redis: `docker compose restart redis` + +### TypeScript Errors + +```bash +# Check TypeScript errors +npx tsc --noEmit + +# Clean build and rebuild +rm -rf dist && pnpm run build +``` + +### Module Not Found + +```bash +# Clear node_modules and reinstall +rm -rf node_modules pnpm-lock.yaml +pnpm install +``` + +## Production Deployment + +### Build + +```bash +pnpm run build +``` + +### Start Production Server + +```bash +# Using .env.production +pnpm run start:prod + +# Using PM2 (recommended) +pm2 start ecosystem.config.js +``` + +### Environment Variables + +- Set all required variables in production environment +- Use strong secrets (min 32 characters) +- Enable production mode: `NODE_ENV=production` + +## Best Practices + +### Code Style + +- Use TypeScript strict mode +- No `any` types +- Use Zod for validation +- Follow ESLint rules +- Use async/await (not callbacks) + +### Git Workflow + +- Create feature branches +- Write descriptive commit messages +- Keep commits focused +- Review changes before committing +- Never commit `.env` files + +### Performance + +- Use `.lean()` for Mongoose queries when not modifying +- Add database indexes for queried fields +- Use background jobs for heavy operations +- Cache frequently accessed data in Redis + +### Security + +- Never log sensitive data (passwords, tokens) +- Validate all inputs with Zod +- Use JWT for authentication +- Rate limit API endpoints (if configured) +- Keep dependencies updated diff --git a/.cursor/rules/email.mdc b/.cursor/rules/email.mdc new file mode 100644 index 0000000..804b5e4 --- /dev/null +++ b/.cursor/rules/email.mdc @@ -0,0 +1,461 @@ +--- +globs: src/email/**/*,email.queue.ts +description: Email system using React Email and Mailgun with queue-based sending +--- + +# Email System + +## Architecture + +- **Templates**: React Email components in [src/email/templates/](mdc:src/email/templates/) +- **Service**: Email service in [src/email/email.service.ts](mdc:src/email/email.service.ts) +- **Provider**: Email provider abstraction in [src/lib/email.ts](mdc:src/lib/email.ts) (supports Mailgun & SMTP) +- **Queue**: Background sending via [src/queues/email.queue.ts](mdc:src/queues/email.queue.ts) +- **Development**: Preview server for templates + +## Email Configuration + +### Environment Variables + +```bash +# Option 1: Mailgun (Recommended) +MAILGUN_API_KEY=your-mailgun-api-key +MAILGUN_DOMAIN=your-domain.com +MAILGUN_FROM_EMAIL=noreply@your-domain.com + +# Option 2: SMTP (Fallback) +SMTP_HOST=smtp.gmail.com +SMTP_PORT=587 +SMTP_USERNAME=your-email@gmail.com +SMTP_PASSWORD=your-app-password +SMTP_FROM=noreply@your-domain.com +EMAIL_FROM=noreply@your-domain.com + +# Note: Provider auto-selects Mailgun if configured, otherwise SMTP +``` + +## Creating Email Templates + +### Step 1: Create React Component + +Create new file in `src/email/templates/TemplateName.tsx`: + +```typescript +import { + Html, + Head, + Body, + Container, + Section, + Text, + Button, + Hr, + Img, +} from "@react-email/components"; + +interface TemplateNameProps { + name: string; + actionUrl: string; +} + +export default function TemplateName({ name, actionUrl }: TemplateNameProps) { + return ( + + + + +
+ Logo + + Hello, {name}! + + + Your email content goes here. + + + + +
+ + + © 2025 Your Company. All rights reserved. + +
+
+ + + ); +} + +const styles = { + body: { + backgroundColor: "#f6f9fc", + fontFamily: "-apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif", + }, + container: { + margin: "0 auto", + padding: "20px 0", + }, + section: { + backgroundColor: "#ffffff", + borderRadius: "8px", + padding: "40px", + }, + logo: { + margin: "0 auto 20px", + display: "block", + }, + heading: { + fontSize: "24px", + fontWeight: "bold", + margin: "20px 0", + color: "#1a1a1a", + }, + text: { + fontSize: "16px", + lineHeight: "24px", + color: "#525252", + margin: "16px 0", + }, + button: { + backgroundColor: "#007bff", + color: "#ffffff", + padding: "12px 32px", + borderRadius: "6px", + textDecoration: "none", + display: "inline-block", + margin: "20px 0", + }, + hr: { + borderColor: "#e6e6e6", + margin: "30px 0", + }, + footer: { + fontSize: "14px", + color: "#8c8c8c", + textAlign: "center" as const, + }, +}; + +// Preview props for development +TemplateName.PreviewProps = { + name: "John Doe", + actionUrl: "https://example.com/action", +} as TemplateNameProps; +``` + +### Step 2: Test Template + +```bash +# Start email development server +pnpm run email:dev + +# Open browser to preview +# http://localhost:3001 +``` + +## Sending Emails + +### Method 1: Direct Send (Simple) + +```typescript +import { sendEmail } from '@/email/email.service'; + +await sendEmail({ + to: 'user@example.com', + subject: 'Welcome!', + template: 'TemplateName', + data: { + name: 'John Doe', + actionUrl: 'https://example.com/verify', + }, +}); +``` + +### Method 2: Queue-based (Recommended) + +```typescript +import { emailQueue } from '@/queues/email.queue'; + +await emailQueue.add('sendEmail', { + to: 'user@example.com', + subject: 'Welcome!', + template: 'TemplateName', + data: { + name: 'John Doe', + actionUrl: 'https://example.com/verify', + }, +}); +``` + +## Email Service Usage + +The email service in [email.service.ts](mdc:src/email/email.service.ts) handles: + +- Template rendering +- HTML/text generation +- Queue job creation + +### Function Signature + +```typescript +interface SendEmailOptions { + to: string | string[]; // Recipient(s) + subject: string; + template: string; // Template name (without .tsx) + data: Record; // Props for template + from?: string; // Optional: override default sender + replyTo?: string; // Optional: reply-to address + attachments?: Array<{ + filename: string; + content: Buffer | string; + contentType?: string; + }>; +} + +export const sendEmail = async (options: SendEmailOptions): Promise; +``` + +## Queue System + +Email queue in [email.queue.ts](mdc:src/queues/email.queue.ts) provides: + +- Async sending (doesn't block API response) +- Automatic retries on failure +- Queue monitoring via dashboard + +### Queue Configuration + +```typescript +// Default options +{ + attempts: 3, // Retry up to 3 times + backoff: { + type: "exponential", + delay: 1000, // Start with 1 second delay + }, +} +``` + +### Custom Queue Options + +```typescript +await emailQueue.add( + 'sendEmail', + { to, subject, template, data }, + { + delay: 60000, // Send after 1 minute + attempts: 5, // Retry up to 5 times + priority: 1, // Higher priority (default: 0) + }, +); +``` + +## Common Email Templates + +### Welcome Email + +```typescript +await sendEmail({ + to: user.email, + subject: 'Welcome to Our Platform!', + template: 'Welcome', + data: { + name: user.name, + verifyUrl: `${config.FRONTEND_URL}/verify?token=${token}`, + }, +}); +``` + +### Password Reset + +```typescript +await sendEmail({ + to: user.email, + subject: 'Reset Your Password', + template: 'ResetPassword', + data: { + name: user.name, + resetUrl: `${config.FRONTEND_URL}/reset-password?token=${token}`, + expiresIn: '1 hour', + }, +}); +``` + +### OTP Verification + +```typescript +await sendEmail({ + to: user.email, + subject: 'Your Verification Code', + template: 'OTP', + data: { + name: user.name, + otp: otpCode, + expiresIn: '10 minutes', + }, +}); +``` + +### Notification + +```typescript +await sendEmail({ + to: user.email, + subject: 'New Activity', + template: 'Notification', + data: { + name: user.name, + message: 'You have a new message', + actionUrl: `${config.FRONTEND_URL}/messages`, + }, +}); +``` + +## React Email Components + +### Available Components + +- `Html` - Root HTML element +- `Head` - Head section +- `Body` - Body section +- `Container` - Main container +- `Section` - Content section +- `Text` - Text paragraph +- `Heading` - Heading element +- `Button` - Button/link +- `Hr` - Horizontal rule +- `Img` - Image +- `Link` - Hyperlink +- `Row` / `Column` - Grid layout + +### Styling + +```typescript +// Inline styles (required for email compatibility) +const styles = { + element: { + backgroundColor: "#ffffff", + padding: "20px", + fontSize: "16px", + }, +}; + +Content +``` + +## Monitoring + +### Queue Dashboard + +Access BullMQ dashboard at: `http://localhost:3000/queues` + +View: + +- Queued emails +- Processing status +- Failed emails +- Retry attempts + +### Logs + +Check email sending logs: + +```typescript +import { logger } from '@/plugins/observability/logger'; + +// Logs are automatically added by email service +logger.info('Email sent', { to, template }); +logger.error('Email failed', { to, template, error }); +``` + +## Testing Emails + +### Development Mode + +Set `MAILGUN_TO_OVERRIDE` to redirect all emails: + +```bash +MAILGUN_TO_OVERRIDE=dev@example.com +``` + +All emails will be sent to this address instead of actual recipients. + +### Preview in Browser + +```bash +# Start dev server +pnpm run email:dev + +# Visit http://localhost:3001 +# All templates listed with previews +``` + +### Manual Testing + +```bash +# In development console or test file +import { sendEmail } from "@/email/email.service"; + +await sendEmail({ + to: "test@example.com", + subject: "Test Email", + template: "TemplateName", + data: { /* test data */ }, +}); +``` + +## Best Practices + +### Template Design + +- Keep templates simple and clean +- Use inline styles (required for email clients) +- Test in multiple email clients +- Provide plain text fallback +- Include unsubscribe link (if applicable) +- Use responsive design +- Optimize images (small file sizes) + +### Sending + +- Always use queue for production (async) +- Set appropriate retry attempts +- Handle failures gracefully +- Log all email operations +- Rate limit sending if needed +- Verify email addresses before sending + +### Content + +- Personalize with user data +- Clear subject lines +- Brief and actionable content +- Include clear call-to-action +- Mobile-friendly design +- Avoid spam trigger words + +## Common Mistakes to Avoid + +❌ DON'T send emails synchronously in API handlers +✅ DO use queue for background sending + +❌ DON'T use external CSS +✅ DO use inline styles + +❌ DON'T forget to handle email failures +✅ DO set retry logic and monitor queue + +❌ DON'T send sensitive data in emails +✅ DO send links to secure pages instead + +❌ DON'T spam users +✅ DO respect user preferences and rate limits diff --git a/.cursor/rules/environment.mdc b/.cursor/rules/environment.mdc new file mode 100644 index 0000000..21a2da5 --- /dev/null +++ b/.cursor/rules/environment.mdc @@ -0,0 +1,221 @@ +--- +globs: .env*,config.service.ts +description: Environment configuration and secrets management +--- + +# Environment Configuration + +## Configuration Files + +- `.env.sample` - Template with all available variables +- `.env` - Local development (gitignored) +- `.env.local` - Local production build (gitignored) +- `.env.production` - Production environment (gitignored) +- [src/config/env.ts](mdc:src/config/env.ts) - Type-safe config with Zod validation + +## Configuration Pattern + +All environment variables are validated and typed in [env.ts](mdc:src/config/env.ts): + +```typescript +import { z } from 'zod'; + +const configSchema = z.object({ + NODE_ENV: z.enum(['development', 'production', 'test']), + PORT: z.string().transform(Number), + DATABASE_URL: z.string().url(), + JWT_SECRET: z.string().min(32), + // ... more config +}); + +export type Config = z.infer; + +export const config: Config = configSchema.parse({ + NODE_ENV: process.env.NODE_ENV || 'development', + PORT: process.env.PORT || '3000', + DATABASE_URL: process.env.DATABASE_URL, + JWT_SECRET: process.env.JWT_SECRET, + // ... more config +}); +``` + +## Time Duration Format + +All time-based config values use milliseconds internally: + +```typescript +// In .env +JWT_EXPIRES_IN=7d +OTP_EXPIRES_IN=10m + +// In env.ts - convert to milliseconds +import ms from "ms"; + +JWT_EXPIRES_IN: z.string().transform((val) => ms(val)), +// Converts "7d" → 604800000ms +``` + +## Required Environment Variables + +### Core + +```bash +NODE_ENV=development +PORT=3000 +``` + +### Database + +```bash +DATABASE_URL=mongodb://localhost:27017/your-db +``` + +### Authentication + +```bash +JWT_SECRET=your-super-secret-key-at-least-32-characters +JWT_EXPIRES_IN=7d +OTP_EXPIRES_IN=10m +OTP_SECRET=your-otp-secret-key +``` + +### Redis + +```bash +REDIS_HOST=localhost +REDIS_PORT=6379 +REDIS_PASSWORD= +``` + +### AWS S3 (File Uploads) + +```bash +AWS_REGION=us-east-1 +AWS_ACCESS_KEY_ID=your-access-key +AWS_SECRET_ACCESS_KEY=your-secret-key +AWS_S3_BUCKET=your-bucket-name +``` + +### Email (Mailgun) + +```bash +MAILGUN_API_KEY=your-mailgun-api-key +MAILGUN_DOMAIN=your-domain.com +MAILGUN_FROM=noreply@your-domain.com +``` + +### OAuth (Google) + +```bash +GOOGLE_CLIENT_ID=your-google-client-id +GOOGLE_CLIENT_SECRET=your-google-client-secret +GOOGLE_CALLBACK_URL=http://localhost:3000/api/auth/google/callback +``` + +### Session + +```bash +SESSION_SECRET=your-session-secret-key +``` + +### Frontend URL + +```bash +FRONTEND_URL=http://localhost:5173 +``` + +## Adding New Config Variables + +### Step 1: Add to `.env.sample` + +```bash +# New Feature Config +NEW_API_KEY=your-api-key +NEW_API_TIMEOUT=30s +``` + +### Step 2: Add to config schema + +```typescript +// In src/config/env.ts +const configSchema = z.object({ + // ... existing config + NEW_API_KEY: z.string().min(1), + NEW_API_TIMEOUT: z.string().transform((val) => ms(val)), +}); +``` + +### Step 3: Parse from environment + +```typescript +export const config: Config = configSchema.parse({ + // ... existing config + NEW_API_KEY: process.env.NEW_API_KEY, + NEW_API_TIMEOUT: process.env.NEW_API_TIMEOUT || '30s', +}); +``` + +### Step 4: Use in code + +```typescript +import config from '@/config/env'; + +const apiKey = config.NEW_API_KEY; +const timeout = config.NEW_API_TIMEOUT; // In milliseconds +``` + +## Best Practices + +### Security + +- NEVER commit actual `.env` files to git +- Keep secrets in environment variables, not hardcoded +- Use different secrets for development and production +- Rotate secrets regularly + +### Validation + +- Always validate with Zod in env.ts +- Fail fast if required config is missing +- Provide sensible defaults where appropriate +- Use type inference for type safety + +### Documentation + +- Document all variables in `.env.sample` +- Add comments explaining what each variable does +- Provide example values +- Indicate which variables are required vs optional + +### Time Values + +- Always use human-readable format in .env (e.g., "7d", "10m", "30s") +- Convert to milliseconds in env.ts using `ms` package +- Never use raw milliseconds in .env files + +## Docker Setup + +For local development with Docker: + +```bash +# Start services +docker compose up -d + +# Services included: +# - MongoDB (port 27017) +# - Redis (port 6379) +``` + +## Common Mistakes to Avoid + +❌ DON'T access `process.env` directly in code +✅ DO import from `env.ts` + +❌ DON'T use hardcoded values +✅ DO use environment variables + +❌ DON'T forget to validate new config variables +✅ DO add Zod validation in env.ts + +❌ DON'T commit `.env` files +✅ DO commit `.env.sample` as template diff --git a/.cursor/rules/models.mdc b/.cursor/rules/models.mdc new file mode 100644 index 0000000..2b88386 --- /dev/null +++ b/.cursor/rules/models.mdc @@ -0,0 +1,415 @@ +--- +globs: *.model.ts +description: Mongoose model patterns for MongoDB schemas +--- + +# Mongoose Model Patterns + +## Core Principle + +Models define MongoDB schemas using Mongoose. Keep them simple and focused on data structure. + +## Model Template + +```typescript +import { Schema, model, type Document } from 'mongoose'; + +// TypeScript interface +export interface IModel extends Document { + name: string; + email: string; + status: 'active' | 'inactive'; + metadata?: Record; + createdBy?: Schema.Types.ObjectId; + createdAt: Date; + updatedAt: Date; +} + +// Mongoose schema +const schema = new Schema( + { + name: { + type: String, + required: [true, 'Name is required'], + trim: true, + minlength: [2, 'Name must be at least 2 characters'], + maxlength: [100, 'Name must not exceed 100 characters'], + }, + email: { + type: String, + required: [true, 'Email is required'], + unique: true, + lowercase: true, + trim: true, + match: [/^\S+@\S+\.\S+$/, 'Please provide a valid email'], + }, + status: { + type: String, + enum: { + values: ['active', 'inactive'], + message: 'Status must be either active or inactive', + }, + default: 'active', + }, + metadata: { + type: Schema.Types.Mixed, + default: {}, + }, + createdBy: { + type: Schema.Types.ObjectId, + ref: 'User', + }, + }, + { + timestamps: true, // Adds createdAt and updatedAt automatically + collection: 'models', // Optional: specify collection name + }, +); + +// Indexes for query performance +schema.index({ email: 1 }); // Single field index +schema.index({ status: 1, createdAt: -1 }); // Compound index +schema.index({ name: 'text' }); // Text index for search + +// Virtual properties +schema.virtual('displayName').get(function () { + return `${this.name} (${this.email})`; +}); + +// Instance methods +schema.methods.isActive = function () { + return this.status === 'active'; +}; + +schema.methods.toJSON = function () { + const obj = this.toObject(); + delete obj.__v; // Remove version key + return obj; +}; + +// Static methods +schema.statics.findActive = function () { + return this.find({ status: 'active' }); +}; + +schema.statics.findByEmail = function (email: string) { + return this.findOne({ email: email.toLowerCase() }); +}; + +// Pre-save hook +schema.pre('save', async function (next) { + // Example: Normalize email + if (this.isModified('email')) { + this.email = this.email.toLowerCase().trim(); + } + next(); +}); + +// Post-save hook +schema.post('save', function (doc) { + // Example: Log creation + console.log('Document saved:', doc._id); +}); + +// Pre-remove hook +schema.pre('remove', async function (next) { + // Example: Clean up related data + await RelatedModel.deleteMany({ modelId: this._id }); + next(); +}); + +// Create and export model +export const Model = model('Model', schema); +``` + +## Common Field Types + +### Basic Types + +```typescript +{ + stringField: { type: String }, + numberField: { type: Number }, + booleanField: { type: Boolean }, + dateField: { type: Date }, + bufferField: { type: Buffer }, + mixedField: { type: Schema.Types.Mixed }, +} +``` + +### References + +```typescript +{ + userId: { + type: Schema.Types.ObjectId, + ref: "User", // Reference to User model + required: true, + }, +} +``` + +### Arrays + +```typescript +{ + tags: [String], // Array of strings + items: [{ // Array of subdocuments + name: String, + quantity: Number, + }], + userIds: [{ + type: Schema.Types.ObjectId, + ref: "User", + }], +} +``` + +### Enums + +```typescript +{ + status: { + type: String, + enum: { + values: ["pending", "active", "inactive"], + message: "Invalid status value", + }, + default: "pending", + }, +} +``` + +### Nested Objects + +```typescript +{ + address: { + street: String, + city: String, + country: String, + zipCode: String, + }, +} +``` + +## Field Options + +### Common Options + +```typescript +{ + field: { + type: String, + required: [true, "Error message"], // or just true + unique: true, // Creates unique index + index: true, // Creates index + default: "value", // or function: () => Date.now() + lowercase: true, // Auto-lowercase (String only) + uppercase: true, // Auto-uppercase (String only) + trim: true, // Remove whitespace (String only) + minlength: 5, // Min length (String only) + maxlength: 100, // Max length (String only) + min: 0, // Min value (Number/Date only) + max: 100, // Max value (Number/Date only) + match: /regex/, // Regex validation (String only) + validate: { // Custom validator + validator: (v) => v > 0, + message: "Must be positive", + }, + }, +} +``` + +## Indexes + +Add indexes for frequently queried fields: + +```typescript +// Single field index +schema.index({ email: 1 }); // 1 = ascending, -1 = descending + +// Compound index +schema.index({ status: 1, createdAt: -1 }); + +// Text index for search +schema.index({ name: 'text', description: 'text' }); + +// Unique compound index +schema.index({ userId: 1, itemId: 1 }, { unique: true }); + +// Sparse index (only for documents with the field) +schema.index({ optionalField: 1 }, { sparse: true }); + +// TTL index (auto-delete after time) +schema.index({ expireAt: 1 }, { expireAfterSeconds: 0 }); +``` + +## Hooks (Middleware) + +### Pre hooks + +```typescript +// Before save +schema.pre('save', async function (next) { + // this = document being saved + if (this.isModified('password')) { + // Hash password + } + next(); +}); + +// Before remove +schema.pre('remove', async function (next) { + // Clean up related data + next(); +}); + +// Before findOneAndUpdate +schema.pre('findOneAndUpdate', function (next) { + // this = query object + this.set({ updatedAt: new Date() }); + next(); +}); +``` + +### Post hooks + +```typescript +// After save +schema.post('save', function (doc) { + // Log or trigger events +}); + +// After find +schema.post('find', function (docs) { + // Process results +}); +``` + +## Virtual Properties + +```typescript +// Getter +schema.virtual('fullName').get(function () { + return `${this.firstName} ${this.lastName}`; +}); + +// Setter +schema.virtual('fullName').set(function (value: string) { + const [firstName, lastName] = value.split(' '); + this.firstName = firstName; + this.lastName = lastName; +}); + +// Include virtuals in JSON +schema.set('toJSON', { virtuals: true }); +schema.set('toObject', { virtuals: true }); + +// Virtual populate +schema.virtual('posts', { + ref: 'Post', + localField: '_id', + foreignField: 'userId', +}); +``` + +## Methods + +### Instance Methods + +```typescript +schema.methods.methodName = function () { + // this = document instance + return this.field; +}; + +// Usage: const result = await document.methodName(); +``` + +### Static Methods + +```typescript +schema.statics.methodName = function () { + // this = model + return this.find({ ... }); +}; + +// Usage: const result = await Model.methodName(); +``` + +### Query Helpers + +```typescript +schema.query.byStatus = function (status: string) { + return this.where({ status }); +}; + +// Usage: await Model.find().byStatus("active"); +``` + +## Common Patterns + +### Soft Delete + +```typescript +{ + isDeleted: { + type: Boolean, + default: false, + }, + deletedAt: Date, +} + +schema.pre(/^find/, function (next) { + this.where({ isDeleted: { $ne: true } }); + next(); +}); +``` + +### Timestamps + +```typescript +// Option 1: Automatic (recommended) +{ timestamps: true } // in schema options + +// Option 2: Manual +{ + createdAt: { type: Date, default: Date.now }, + updatedAt: { type: Date, default: Date.now }, +} + +schema.pre("save", function (next) { + this.updatedAt = new Date(); + next(); +}); +``` + +### User Reference + +```typescript +{ + createdBy: { + type: Schema.Types.ObjectId, + ref: "User", + }, + updatedBy: { + type: Schema.Types.ObjectId, + ref: "User", + }, +} +``` + +## Common Mistakes to Avoid + +❌ DON'T use arrow functions in methods/hooks (breaks `this`) +✅ DO use regular functions + +❌ DON'T forget to create indexes for queried fields +✅ DO add indexes for performance + +❌ DON'T validate in models AND Zod schemas (duplication) +✅ DO use Zod for API validation, Mongoose for data integrity + +❌ DON'T put business logic in models +✅ DO keep models simple, logic in services diff --git a/.cursor/rules/new-module.mdc b/.cursor/rules/new-module.mdc new file mode 100644 index 0000000..d6426fa --- /dev/null +++ b/.cursor/rules/new-module.mdc @@ -0,0 +1,290 @@ +--- +description: Step-by-step guide for creating a new module using the tbk CLI +--- + +# Creating a New Module + +This guide shows how to create a new module using the `tbk` CLI tool and customize it according to project patterns. + +## Quick Start + +### Step 1: Generate Module Scaffolding + +Use the `tbk` CLI to generate all module files automatically: + +```bash +pnpm exec tbk generate:module +``` + +Or with custom API path prefix: + +```bash +pnpm exec tbk generate:module --path /api/v1 +``` + +**Example:** + +```bash +pnpm exec tbk generate:module product +# Creates: src/modules/product/ with all required files +``` + +This creates a complete module structure: + +``` +src/modules// +├── .dto.ts # TypeScript types and Zod schemas +├── .model.ts # Mongoose model +├── .schema.ts # Request/response validation schemas +├── .services.ts # Business logic and database operations +├── .controller.ts # HTTP request handlers +└── .router.ts # MagicRouter route definitions +``` + +### Step 2: Customize Module Files + +The generated files follow project patterns but need customization for your specific use case. Refer to these rules for detailed patterns: + +#### 2.1 Update Model (`.model.ts`) + +- **Rule:** `@models` +- Add/modify fields in the Mongoose schema +- Define indexes, virtuals, and methods +- Configure schema options (timestamps, etc.) + +#### 2.2 Update DTOs (`.dto.ts`) + +- Define input/output types using Zod +- Use `definePaginatedResponse` from `common.utils` for list endpoints +- Export type definitions for type safety + +#### 2.3 Update Validation Schemas (`.schema.ts`) + +- **Rule:** `@schemas` +- Add/modify Zod validation for create/update operations +- Configure query parameter validation (pagination, search, filters) +- Define proper error messages and transformations + +#### 2.4 Update Services (`.services.ts`) + +- **Rule:** `@services` +- Implement business logic +- Handle database operations using the model +- Use proper error handling (throw errors with descriptive messages) +- Optimize queries with proper filtering, pagination, and sorting + +#### 2.5 Update Controller (`.controller.ts`) + +- **Rule:** `@controllers` +- Handle HTTP request/response +- Use `successResponse` from `@/utils/response.utils` +- Use proper HTTP status codes from `@/openapi/status-codes` +- Keep controllers thin - delegate logic to services + +#### 2.6 Update Router (`.router.ts`) + +- **Rule:** `@routing` +- Configure MagicRouter routes +- Add proper middleware (authentication, authorization) +- Use `canAccess()` for protected routes +- Define request validation schemas + +### Step 3: Register Router + +Add the router to `src/routes/routes.ts`: + +```typescript +import Router from '@/modules//.router'; + +// In the registerRoutes function or where routes are registered +app.use(Router); +``` + +### Step 4: Test the Module + +1. Start development server: + + ```bash + pnpm dev + ``` + +2. Visit Swagger UI: + + ``` + http://localhost:3000/docs + ``` + +3. Test all endpoints using the interactive API documentation + +4. Verify: + - All CRUD operations work correctly + - Validation catches invalid inputs + - Error responses are properly formatted + - OpenAPI documentation is accurate + +## Module File Responsibilities + +### 1. DTO (`*.dto.ts`) + +- Zod schemas for input/output validation +- TypeScript type definitions +- Paginated response schemas + +### 2. Model (`*.model.ts`) + +- Mongoose schema definition +- Database field types and constraints +- Indexes and virtuals +- Model interface extending Document + +### 3. Schema (`*.schema.ts`) + +- Request validation schemas (create, update, query) +- Zod transformations and refinements +- Type exports for controllers + +### 4. Services (`*.services.ts`) + +- Business logic implementation +- Database operations (CRUD) +- Data transformation +- Error handling + +### 5. Controller (`*.controller.ts`) + +- HTTP request/response handling +- Call service methods +- Return standardized responses +- Handle HTTP status codes + +### 6. Router (`*.router.ts`) + +- Route definitions using MagicRouter +- Middleware configuration +- Request validation binding +- OpenAPI metadata + +## Best Practices + +### Follow Project Patterns + +- **Always** use MagicRouter for automatic OpenAPI generation +- **Never** use plain Express `app.get()` or `router.get()` +- **Always** validate requests with Zod schemas +- **Always** use TypeScript strict mode - no `any` types + +### Error Handling + +- Throw descriptive errors in services +- Let global error handler format responses +- Use proper HTTP status codes + +### Type Safety + +- Export and use TypeScript types from DTOs +- Use Zod's `.infer` for type generation +- Keep runtime validation and TypeScript types in sync + +### Code Organization + +- Keep controllers thin - delegate to services +- Put business logic in services +- Use common utilities for shared functionality +- Follow the single responsibility principle + +## Advanced Customization + +### Adding Authentication + +Use `canAccess()` middleware in router: + +```typescript +import { canAccess } from '@/middlewares/can-access'; + +router.post( + '/', + { requestType: { body: createSchema } }, + canAccess(), // Add authentication + handleCreate, +); +``` + +### Adding Custom Middleware + +```typescript +router.get( + '/:id', + {}, + canAccess(), + customMiddleware, // Your custom middleware + handleGetById, +); +``` + +### Adding Indexes + +In model file: + +```typescript +schema.index({ field1: 1, field2: -1 }); +schema.index({ searchField: 'text' }); // Text search +``` + +### Adding Relationships + +```typescript +// In model +field: { type: Schema.Types.ObjectId, ref: 'OtherModel' } + +// In service +const result = await Model.find().populate('field'); +``` + +## Optional: Add Seeder + +Create `.seeder.ts` for test data: + +```typescript +import Model from './.model'; + +export const seed = async () => { + const count = await Model.countDocuments(); + if (count > 0) return; + + await Model.create([ + { /* seed data */ }, + ]); + + console.log(' seeded'); +}; +``` + +## Checklist + +- [ ] Generated module using `tbk generate:module ` +- [ ] Customized model with proper fields and indexes +- [ ] Updated validation schemas for your use case +- [ ] Implemented business logic in services +- [ ] Added proper error handling +- [ ] Configured authentication/authorization if needed +- [ ] Registered router in `routes.ts` +- [ ] Tested all endpoints in Swagger UI +- [ ] Verified OpenAPI documentation +- [ ] (Optional) Created seeder for test data + +## Common Commands + +```bash +# Generate new module +pnpm exec tbk generate:module + +# Generate with custom path +pnpm exec tbk generate:module --path /api/v2 + +# Aliases also work +pnpm exec tbk g:module + +# Other generators +pnpm exec tbk generate:plugin +pnpm exec tbk generate:middleware +``` diff --git a/.cursor/rules/routing.mdc b/.cursor/rules/routing.mdc new file mode 100644 index 0000000..e222b72 --- /dev/null +++ b/.cursor/rules/routing.mdc @@ -0,0 +1,440 @@ +--- +globs: *.router.ts,*.routes.ts +description: Routing patterns using MagicRouter for automatic OpenAPI generation +--- + +# Routing with MagicRouter + +## Core Principle + +NEVER use plain Express routing. ALWAYS use MagicRouter from [router.ts](mdc:src/plugins/magic/router.ts). + +## Pattern Template + +```typescript +import MagicRouter from '@/plugins/magic/router'; +import { canAccess } from '@/middlewares/can-access'; +import { + handleAction, + handleGetById, + handleCreate, + handleSearch, +} from './module.controller'; +import { + actionSchema, + createSchema, + idParamsSchema, + searchQuerySchema, + actionResponseSchema, + getMeResponseSchema, + createItemResponseSchema, + getItemByIdResponseSchema, + searchItemsResponseSchema, +} from './module.schema'; + +export const MODULE_ROUTER_ROOT = '/module'; + +const moduleRouter = new MagicRouter(MODULE_ROUTER_ROOT); + +// Public route with schema validation and response config +moduleRouter.post( + '/action', + { + requestType: { body: actionSchema }, + responses: { + 200: actionResponseSchema, + }, + }, + handleAction, +); + +// Protected route with authentication +moduleRouter.get( + '/me', + { + responses: { + 200: getMeResponseSchema, + }, + }, + canAccess(), + handleGetById, +); + +// Protected route with schema, auth, and response config +moduleRouter.post( + '/create', + { + requestType: { body: createSchema }, + responses: { + 201: createItemResponseSchema, + }, + }, + canAccess(), + handleCreate, +); + +// Route with params +moduleRouter.get( + '/:id', + { + requestType: { params: idParamsSchema }, + responses: { + 200: getItemByIdResponseSchema, + }, + }, + handleGetById, +); + +// Route with query params (paginated) +moduleRouter.get( + '/search', + { + requestType: { query: searchQuerySchema }, + responses: { + 200: searchItemsResponseSchema, + }, + }, + handleSearch, +); + +export default moduleRouter.getRouter(); +``` + +## MagicRouter API + +### Router Instantiation + +```typescript +const router = new MagicRouter(ROUTER_ROOT); +``` + +- Create router instance with root path (e.g., `/auth`, `/user`) +- Root path used for route grouping and OpenAPI tag generation + +### Route Definition Signature + +```typescript +router.method(path, requestType, ...handlers); +``` + +**Parameters:** + +1. `path`: Route path string (e.g., `/login`, `/:id`) +2. `requestType`: Schema configuration object +3. `...handlers`: Middleware functions and controller (spread arguments) + +### Request Type Object + +```typescript +{ + requestType?: { + body?: ZodSchema, // Request body validation + params?: ZodSchema, // URL params validation + query?: ZodSchema, // Query string validation + }, + responses?: { // NEW: Response schemas per status code + 200?: ResponseSchema, // Success response + 201?: ResponseSchema, // Created response + 404?: ResponseSchema, // Not found response + // ... other status codes + }, + contentType?: string, // 'application/json' | 'multipart/form-data' | etc. +} +``` + +- Use empty object `{}` when no validation needed +- Can combine `body`, `params`, and `query` in same route +- **NEW**: Add `responses` object for response schemas (RECOMMENDED) + +### Response Configuration (NEW - RECOMMENDED) + +**BEST PRACTICE:** Define response schemas in your schema file and import them: + +```typescript +// In module.schema.ts +import { R } from '@/plugins/magic/response.builders'; +import { itemOutSchema } from './module.dto'; + +export const createItemResponseSchema = R.success(itemOutSchema); +export const getItemsResponseSchema = R.paginated(itemOutSchema); + +export type CreateItemResponseSchema = z.infer; +export type GetItemsResponseSchema = z.infer; + +// In module.router.ts +import { createItemResponseSchema, getItemsResponseSchema } from './module.schema'; + +responses: { 201: createItemResponseSchema } +responses: { 200: getItemsResponseSchema } +``` + +**Alternative (inline):** Use response builders directly in router: + +```typescript +import { R } from '@/plugins/magic/response.builders'; + +// Standard success response +responses: { 200: R.success(itemSchema) } + +// Paginated list response +responses: { 200: R.paginated(itemSchema) } + +// Created response +responses: { 201: R.success(itemSchema) } + +// No content response +responses: { 204: R.noContent() } + +// Error response +responses: { 404: R.error() } + +// Multiple status codes +responses: { + 200: R.success(itemSchema), + 404: R.error(), +} + +// Raw response (non-envelope) +responses: { 200: R.raw(customSchema) } +``` + +**Response Builders:** + +- `R.success(schema)` - Standard envelope: `{ success, message?, data? }` +- `R.paginated(itemSchema)` - Paginated list: `{ success, message?, data: { items, paginator } }` +- `R.noContent()` - Empty 204 response +- `R.error(schema?)` - Error envelope (optional custom schema) +- `R.raw(schema)` - Non-envelope response (e.g., healthcheck) + +**Why define in schema files?** + +- ✅ Type-safe controller responses with `ResponseExtended` +- ✅ Centralized response definitions +- ✅ Easier to maintain and update +- ✅ Better code organization + +### Handler Order + +The last handler in the spread is treated as the **controller**. All preceding handlers are **middleware**. + +```typescript +// Public route +router.post('/action', { requestType: { body: schema } }, controller); + +// With one middleware +router.get('/me', {}, canAccess(), controller); + +// With multiple middleware +router.post('/upload', {}, middleware1(), middleware2(), controller); +``` + +## Authentication + +### Public Routes + +No authentication required - just pass the controller: + +```typescript +router.post('/login', { requestType: { body: loginSchema } }, handleLogin); +``` + +### Protected Routes + +Add `canAccess()` middleware before the controller: + +```typescript +import { canAccess } from '@/middlewares/can-access'; + +router.get('/me', {}, canAccess(), handleGetCurrentUser); +``` + +- Security is auto-detected in OpenAPI by presence of `canAccess()` middleware +- JWT payload available as `req.jwtPayload` in controller (via `canAccess()`) + +## Common Route Patterns + +### Body Validation + +```typescript +router.post('/create', { requestType: { body: createSchema } }, handleCreate); +``` + +### Params Validation + +```typescript +router.get('/:id', { requestType: { params: idParamsSchema } }, handleGetById); +``` + +### Query Validation + +```typescript +router.get( + '/search', + { requestType: { query: searchQuerySchema } }, + handleSearch, +); +``` + +### Combined Validation + +```typescript +router.put( + '/:id', + { + requestType: { + params: idParamsSchema, + body: updateSchema, + }, + }, + canAccess(), + handleUpdate, +); +``` + +### No Validation + +```typescript +router.post('/logout', {}, handleLogout); +``` + +## File Uploads + +Add multer middleware before controller: + +```typescript +import { multerS3 } from '@/middlewares/multer-s3'; + +router.post( + '/upload', + { contentType: 'multipart/form-data' }, + canAccess(), + multerS3.single('file'), + handleUpload, +); +``` + +## Available HTTP Methods + +- `router.get()` +- `router.post()` +- `router.put()` +- `router.patch()` +- `router.delete()` + +## Route Organization + +### File Structure + +``` +module/ + ├── module.controller.ts # Export named controller functions + ├── module.router.ts # Define routes + ├── module.schema.ts # Zod schemas + ├── module.service.ts # Business logic + └── module.model.ts # Database models +``` + +### Router Export Pattern + +```typescript +export const MODULE_ROUTER_ROOT = '/module'; +const moduleRouter = new MagicRouter(MODULE_ROUTER_ROOT); + +// ... define routes ... + +export default moduleRouter.getRouter(); +``` + +### Register in Routes + +Add router to [routes.ts](mdc:src/routes/routes.ts): + +```typescript +import moduleRouter from './modules/module/module.router'; + +app.use(moduleRouter); +``` + +## OpenAPI Generation + +MagicRouter automatically generates OpenAPI documentation: + +- **Tags**: Auto-generated from router root path +- **Summary**: Auto-generated from controller function name +- **Security**: Auto-detected from `canAccess()` middleware +- **Schemas**: Generated from Zod schemas in `requestType` +- **Responses**: Per-status schemas from `responses` config (NEW) + - If `responses` provided: Uses your schemas per status code + - If not provided: Defaults to 200, 400, 404, 500 + - Default errors (400/404/500) added unless overridden + +## Common Mistakes to Avoid + +❌ **DON'T** use plain Express routing + +```typescript +router.get('/path', handler); // Wrong +``` + +✅ **DO** use MagicRouter signature + +```typescript +router.get('/path', {}, handler); // Correct +``` + +❌ **DON'T** forget the request type object + +```typescript +router.post('/create', handleCreate); // Wrong +``` + +✅ **DO** always include it (use `{}` if no validation) + +```typescript +router.post('/create', {}, handleCreate); // Correct +``` + +❌ **DON'T** use array syntax for handlers + +```typescript +router.get('/me', {}, [canAccess(), handler]); // Wrong +``` + +✅ **DO** use spread arguments + +```typescript +router.get('/me', {}, canAccess(), handler); // Correct +``` + +❌ **DON'T** forget to call `.getRouter()` + +```typescript +export default moduleRouter; // Wrong +``` + +✅ **DO** call `.getRouter()` on export + +```typescript +export default moduleRouter.getRouter(); // Correct +``` + +❌ **DON'T** use wrong schema object structure + +```typescript +{ + schema: bodySchema; +} // Wrong +{ + body: bodySchema; +} // Wrong +``` + +✅ **DO** use correct nesting + +```typescript +{ + requestType: { + body: bodySchema; + } +} // Correct +``` diff --git a/.cursor/rules/schemas.mdc b/.cursor/rules/schemas.mdc new file mode 100644 index 0000000..a3061f2 --- /dev/null +++ b/.cursor/rules/schemas.mdc @@ -0,0 +1,349 @@ +--- +globs: *.schema.ts +description: Zod schema patterns for validation and OpenAPI documentation +--- + +# Zod Schema Patterns + +## Core Principle + +Every module should have a schema file that defines request/response validation using Zod schemas. + +## Import Pattern + +```typescript +import validator from 'validator'; +import z from 'zod'; +// OR +import * as z from 'zod'; +``` + +## Schema Structure + +Schemas are exported directly, NOT wrapped in request/response objects: + +```typescript +import validator from 'validator'; +import z from 'zod'; + +export const createItemSchema = z.object({ + name: z.string({ required_error: 'Name is required' }).min(1).max(100), + description: z + .string({ required_error: 'Description is required' }) + .min(10) + .max(500), + status: z.enum(['active', 'inactive']).default('active'), + categoryId: z + .string({ required_error: 'Category ID is required' }) + .refine((value) => validator.isMongoId(value), 'Category ID must be valid'), +}); + +export const updateItemSchema = z.object({ + name: z.string().min(1).max(100).optional(), + description: z.string().min(10).max(500).optional(), + status: z.enum(['active', 'inactive']).optional(), +}); +``` + +## Common Patterns + +### String Validation with Required Error + +```typescript +z.string({ required_error: 'Field name is required' }).min(1).max(64); +``` + +### Email Validation + +```typescript +z.string({ required_error: 'Email is required' }).email({ + message: 'Email is not valid', +}); +``` + +### MongoDB ObjectId Validation + +Use validator package, NOT regex: + +```typescript +z.string({ required_error: 'ID is required' }) + .min(1) + .refine((value) => validator.isMongoId(value), 'ID must be valid'); +``` + +### Alphanumeric Validation + +```typescript +z.string({ required_error: 'Code is required' }) + .min(4) + .max(4) + .refine((value) => validator.isAlphanumeric(value), 'Code must be valid'); +``` + +### Query Parameters with Transform + +```typescript +export const listItemsQuerySchema = z.object({ + searchString: z.string().optional(), + limitParam: z + .string() + .default('10') + .refine( + (value) => !Number.isNaN(Number(value)) && Number(value) >= 0, + 'Input must be positive integer', + ) + .transform(Number), + pageParam: z + .string() + .default('1') + .refine( + (value) => !Number.isNaN(Number(value)) && Number(value) >= 0, + 'Input must be positive integer', + ) + .transform(Number), + filterByStatus: z.enum(['active', 'inactive', 'archived']).optional(), +}); +``` + +### Enum Validation + +```typescript +// From enum object keys +z.enum(Object.keys(STATUS_ENUM) as [StatusType]).optional(); + +// Direct enum values +z.enum(['pending', 'approved', 'rejected']).optional(); +``` + +## Schema Composition + +### Merging Schemas + +```typescript +// Base schema +export const baseItemSchema = z.object({ + name: z.string({ required_error: 'Name is required' }).min(1), + description: z.string().optional(), +}); + +// Extended schema +export const createItemSchema = z + .object({ + categoryId: z.string().refine((value) => validator.isMongoId(value)), + tags: z.array(z.string()).optional(), + }) + .merge(baseItemSchema) + .strict(); +``` + +### Cross-Field Validation with .refine() + +```typescript +export const createItemWithConfirmationSchema = z + .object({ + price: z.number().positive(), + confirmPrice: z.number().positive(), + discountPrice: z.number().positive().optional(), + }) + .refine( + ({ price, confirmPrice }) => price === confirmPrice, + 'Price and confirm price must match', + ) + .refine( + ({ price, discountPrice }) => !discountPrice || discountPrice < price, + 'Discount price must be less than original price', + ); +``` + +### Strict Mode + +Use `.strict()` to disallow extra properties: + +```typescript +z.object({ + name: z.string(), + email: z.string().email(), +}).strict(); +``` + +## Reusable Schema Patterns + +### Password Validation Function + +Define in `common.schema.ts`: + +```typescript +export const passwordValidationSchema = (fieldName: string) => + z + .string({ required_error: `${fieldName} is required` }) + .min(8) + .max(64) + .refine( + (value) => + validator.isStrongPassword(value, { + minLength: 8, + minLowercase: 1, + minNumbers: 1, + minUppercase: 1, + minSymbols: 1, + }), + 'Password is too weak', + ); +``` + +### MongoDB ID Schema + +```typescript +export const mongoIdSchema = z.object({ + id: z.string().refine((value) => validator.isMongoId(value)), +}); +``` + +### Response Schemas + +```typescript +export const successResponseSchema = z.object({ + success: z.boolean().default(true), + message: z.string().optional(), + data: z.record(z.string(), z.any()).optional(), +}); + +export const errorResponseSchema = z.object({ + message: z.string(), + success: z.boolean().default(false), + data: z.record(z.string(), z.any()), + stack: z.string().optional(), +}); +``` + +### Paginator Schema + +```typescript +export const paginatorSchema = z.object({ + skip: z.number().min(0), + limit: z.number().min(1), + currentPage: z.number().min(1), + pages: z.number().min(0), + hasNextPage: z.boolean(), + totalRecords: z.number().min(0), + pageSize: z.number().min(1), +}); + +export const paginatedResponseSchema = z.object({ + success: z.boolean().default(true), + message: z.string().optional(), + data: z + .object({ + items: z.array(z.unknown()), + paginator: paginatorSchema, + }) + .optional(), +}); +``` + +## Custom Validators + +Use `.refine()` with custom validation functions: + +```typescript +export const createItemSchema = z.object({ + slug: z + .string({ required_error: 'Slug is required' }) + .min(1) + .refine((value) => isValidSlug(value), 'Slug must be valid'), + email: z + .string({ required_error: 'Email is required' }) + .refine((value) => validator.isEmail(value), 'Email must be valid'), +}); +``` + +## Response Schemas (NEW - RECOMMENDED) + +Define response schemas in your schema file using response builders: + +```typescript +import { R } from '@/plugins/magic/response.builders'; +import { itemOutSchema } from './item.dto'; + +// Response schemas +export const createItemResponseSchema = R.success(itemOutSchema); +export const getItemsResponseSchema = R.paginated(itemOutSchema); +export const getItemByIdResponseSchema = R.success(itemOutSchema); +export const updateItemResponseSchema = R.success(itemOutSchema); + +// Custom response schemas +export const customActionResponseSchema = R.success( + z.object({ + success: z.boolean(), + message: z.string(), + data: z.object({ + actionId: z.string(), + status: z.enum(['pending', 'completed']), + }), + }), +); + +// Response types +export type CreateItemResponseSchema = z.infer; +export type GetItemsResponseSchema = z.infer; +export type GetItemByIdResponseSchema = z.infer< + typeof getItemByIdResponseSchema +>; +export type UpdateItemResponseSchema = z.infer; +export type CustomActionResponseSchema = z.infer< + typeof customActionResponseSchema +>; +``` + +**Benefits:** + +- ✅ Type-safe responses in controllers +- ✅ Accurate OpenAPI documentation +- ✅ Runtime validation (configurable) +- ✅ Centralized response structure +- ✅ Better IDE autocomplete + +**Response Builders:** + +- `R.success(schema)` - Standard envelope: `{ success, message?, data? }` +- `R.paginated(itemSchema)` - Paginated list: `{ success, message?, data: { items, paginator } }` +- `R.noContent()` - Empty 204 response +- `R.error(schema?)` - Error envelope +- `R.raw(schema)` - Non-envelope response + +## Type Inference + +Export TypeScript types from schemas: + +```typescript +// Request types +export type CreateItemSchemaType = z.infer; +export type UpdateItemSchemaType = z.infer; +export type ListItemsQuerySchemaType = z.infer; +export type ItemParamsSchemaType = z.infer; + +// Response types (NEW) +export type CreateItemResponseSchema = z.infer; +export type GetItemsResponseSchema = z.infer; +``` + +## Key Patterns to Follow + +✅ DO import validator from "validator" +✅ DO use `{ required_error: "message" }` for required fields +✅ DO use `.min(1)` for required strings +✅ DO use `.refine()` with validator functions +✅ DO export schemas directly (not wrapped in objects) +✅ DO export types using `z.infer` +✅ DO use `.merge()` to compose schemas +✅ DO use `.strict()` to disallow extra properties +✅ DO use `.refine()` for cross-field validation +✅ DO create reusable schema functions in common.schema.ts +✅ DO define response schemas using R.success() / R.paginated() (NEW) +✅ DO export response types from response schemas (NEW) + +❌ DON'T use `.openapi()` method in schema files +❌ DON'T wrap schemas in request/response objects (that's for routers) +❌ DON'T use regex for MongoDB IDs (use validator.isMongoId) +❌ DON'T forget to handle query parameter transforms with .transform(Number) +❌ DON'T define response schemas inline in routers (define in schema files) diff --git a/.cursor/rules/services.mdc b/.cursor/rules/services.mdc new file mode 100644 index 0000000..cfc04a8 --- /dev/null +++ b/.cursor/rules/services.mdc @@ -0,0 +1,382 @@ +--- +globs: *.service.ts,*.services.ts +description: Service layer patterns for business logic and data access +--- + +# Service Layer Patterns + +## Core Principle + +Services contain business logic, database operations, external API calls, and complex computations. They should be framework-agnostic (no Express req/res). + +## Service Template + +```typescript +import { Model } from './module.model'; +import { logger } from '@/plugins/observability/logger'; +import type { CreateInput, UpdateInput } from './module.dto'; + +/** + * Find item by ID + */ +export const findById = async (id: string) => { + const item = await Model.findById(id); + return item; +}; + +/** + * Find all items with pagination + */ +export const findAll = async (options: { + page: number; + limit: number; + search?: string; +}) => { + const { page, limit, search } = options; + const skip = (page - 1) * limit; + + const query = search ? { name: { $regex: search, $options: 'i' } } : {}; + + const [items, total] = await Promise.all([ + Model.find(query).skip(skip).limit(limit).lean(), + Model.countDocuments(query), + ]); + + return { + data: items, + pagination: { + page, + limit, + total, + totalPages: Math.ceil(total / limit), + }, + }; +}; + +/** + * Create new item + */ +export const create = async (data: CreateInput) => { + const item = await Model.create(data); + + logger.info('Item created', { itemId: item._id }); + + return item.toObject(); +}; + +/** + * Update item + */ +export const update = async ( + id: string, + data: UpdateInput, + userId?: string, +) => { + const item = await Model.findById(id); + + if (!item) { + return null; + } + + // Business logic: Check permissions + if (item.createdBy?.toString() !== userId) { + const error = new Error('Forbidden') as any; + error.statusCode = 403; + throw error; + } + + Object.assign(item, data); + await item.save(); + + logger.info('Item updated', { itemId: id, userId }); + + return item.toObject(); +}; + +/** + * Delete item + */ +export const remove = async (id: string, userId?: string) => { + const item = await Model.findById(id); + + if (!item) { + return false; + } + + // Business logic: Check permissions + if (item.createdBy?.toString() !== userId) { + const error = new Error('Forbidden') as any; + error.statusCode = 403; + throw error; + } + + await item.deleteOne(); + + logger.info('Item deleted', { itemId: id, userId }); + + return true; +}; + +/** + * Complex business logic example + */ +export const performComplexOperation = async (input: { + userId: string; + data: any; +}) => { + // 1. Validate business rules + const user = await UserModel.findById(input.userId); + if (!user) { + throw new Error('User not found'); + } + + // 2. Perform operations + const result = await Model.create({ + ...input.data, + userId: input.userId, + }); + + // 3. Trigger background jobs if needed + await triggerEmailJob(user.email, result); + + // 4. Return result + return result; +}; + +/** + * Trigger background job + */ +const triggerEmailJob = async (email: string, data: any) => { + const { emailQueue } = await import('@/queues/email.queue'); + await emailQueue.add('sendNotification', { email, data }); +}; +``` + +## Key Patterns + +### Database Operations + +Use Mongoose models from `module.model.ts`: + +```typescript +// Find +const item = await Model.findById(id); +const items = await Model.find({ status: 'active' }); + +// Create +const item = await Model.create({ name: 'Test' }); + +// Update +const item = await Model.findByIdAndUpdate(id, { name: 'New' }, { new: true }); + +// Delete +await Model.findByIdAndDelete(id); + +// Count +const count = await Model.countDocuments({ status: 'active' }); + +// Use .lean() for better performance (returns plain objects) +const items = await Model.find().lean(); +``` + +### Pagination Helper + +Use pagination utility from [pagination.utils.ts](mdc:src/utils/pagination.utils.ts): + +```typescript +import { getPaginator } from '@/utils/pagination.utils'; + +const paginatorInfo = getPaginator(limit, page, totalRecords); +const items = await Model.find() + .skip(paginatorInfo.skip) + .limit(paginatorInfo.limit); +``` + +Or implement manually: + +```typescript +const skip = (page - 1) * limit; +const items = await Model.find().skip(skip).limit(limit); +const total = await Model.countDocuments(); +``` + +### Background Jobs + +Queue background tasks using BullMQ: + +```typescript +import { emailQueue } from '@/queues/email.queue'; + +await emailQueue.add( + 'jobName', + { data }, + { + delay: 5000, // Optional: delay in ms + attempts: 3, // Optional: retry attempts + }, +); +``` + +### Email Sending + +Send emails through queue system: + +```typescript +import { sendEmail } from '@/email/email.service'; + +await sendEmail({ + to: user.email, + subject: 'Welcome', + template: 'Welcome', + data: { name: user.name }, +}); +``` + +### File Storage (S3) + +Use storage service from [storage.ts](mdc:src/lib/storage.ts): + +```typescript +import { uploadFile, deleteFile, getFileUrl } from '@/lib/storage'; + +// Upload file (usually in controller, after file is uploaded) +const { url, key } = await uploadFile({ + file: uploadedFile, + key: `uploads/${userId}/${filename}`, +}); + +// Delete file +await deleteFile(fileKey); + +// Get file URL +const url = getFileUrl(fileKey); +``` + +### Authentication & Tokens + +Use auth utilities from the src/utils folder: + +```typescript +import { signToken, verifyToken } from '@/utils/jwt.utils'; +import { hashPassword, compareHash } from '@/utils/password.utils'; +import { generateOtp } from '@/utils/otp.utils'; + +// Generate JWT +const token = await signToken({ + sub: user._id, + email: user.email, + username: user.username, + role: user.role, +}); + +// Verify JWT +const payload = await verifyToken(token); + +// Hash password +const hashed = await hashPassword(plainPassword); + +// Compare password +const isValid = await compareHash(hashedPassword, plainPassword); + +// Generate OTP +const otp = generateOtp({ length: 6, charset: 'numeric' }); +``` + +### Error Handling + +Throw errors with status codes: + +```typescript +// Not found +const error = new Error('Item not found') as any; +error.statusCode = 404; +throw error; + +// Forbidden +const error = new Error('Insufficient permissions') as any; +error.statusCode = 403; +throw error; + +// Bad request +const error = new Error('Invalid input') as any; +error.statusCode = 400; +throw error; + +// Internal server error (default) +throw new Error('Something went wrong'); // 500 +``` + +### Logging + +Use Pino logger: + +```typescript +import { logger } from '@/plugins/observability/logger'; + +logger.info('Operation performed', { userId, itemId }); +logger.error('Error occurred', { error: err.message, stack: err.stack }); +logger.warn('Warning', { data }); +logger.debug('Debug info', { data }); +``` + +## Service Organization + +- One service file per module: `module.service.ts` +- Export individual functions (not a class) +- Keep functions focused and single-purpose +- Use TypeScript types from `module.dto.ts` + +## Common Patterns + +### Transaction Support (if needed) + +```typescript +import { startSession } from 'mongoose'; + +const session = await startSession(); +session.startTransaction(); + +try { + await Model1.create([data1], { session }); + await Model2.create([data2], { session }); + + await session.commitTransaction(); +} catch (error) { + await session.abortTransaction(); + throw error; +} finally { + session.endSession(); +} +``` + +### Caching with Redis + +```typescript +import { cacheClient } from '@/lib/cache'; + +// Get from cache +const cached = await cacheClient.get(`key:${id}`); +if (cached) { + return JSON.parse(cached); +} + +// Set cache +await cacheClient.set(`key:${id}`, JSON.stringify(data), 'EX', 3600); // 1 hour + +// Delete from cache +await cacheClient.del(`key:${id}`); +``` + +## Common Mistakes to Avoid + +❌ DON'T import Express types (Request, Response) +✅ DO keep services framework-agnostic + +❌ DON'T handle HTTP status codes in services (except throwing errors) +✅ DO let controllers handle HTTP concerns + +❌ DON'T perform heavy operations synchronously +✅ DO use background jobs for heavy tasks + +❌ DON'T forget to log important operations +✅ DO log creates, updates, deletes, and errors diff --git a/.cursorignore b/.cursorignore new file mode 100644 index 0000000..6a7b41b --- /dev/null +++ b/.cursorignore @@ -0,0 +1 @@ +.dump \ No newline at end of file diff --git a/.env.development b/.env.development index 2c2fc36..3ce60c5 100644 --- a/.env.development +++ b/.env.development @@ -6,7 +6,7 @@ CLIENT_SIDE_URL="http://localhost:3001" # JWT JWT_SECRET="some-secret" -JWT_EXPIRES_IN=3600 +JWT_EXPIRES_IN=86400 # NODE_ENV NODE_ENV="development" @@ -20,7 +20,7 @@ SET_PASSWORD_TOKEN_EXPIRES_IN=86400 SET_SESSION=1 # DATABSES -REDIS_URL="redis://localhost:6380" +REDIS_URL="redis://localhost:6379" MONGO_DATABASE_URL="mongodb://root:example@localhost:27017/typescript-backend-toolkit?authSource=admin" # Mailgun Configuration (dummy values for development) diff --git a/.env.production b/.env.production index 33fd396..a722da7 100644 --- a/.env.production +++ b/.env.production @@ -1,21 +1,36 @@ # APP CONFIG -PORT="3000" +PORT="3002" # FOR CORS AND EMAILS -CLIENT_SIDE_URL="http://localhost:3000" +CLIENT_SIDE_URL="http://localhost:3001" # JWT JWT_SECRET="some-secret" -JWT_EXPIRES_IN="1h" +JWT_EXPIRES_IN=86400 + +# NODE_ENV +NODE_ENV="production" # SESSION -SESSION_EXPIRES_IN="1d" +SESSION_EXPIRES_IN=86400 # AUTH -PASSWORD_RESET_TOKEN_EXPIRES_IN="1d" -SET_PASSWORD_TOKEN_EXPIRES_IN="1d" -SET_SESSION=0 +PASSWORD_RESET_TOKEN_EXPIRES_IN=86400 +SET_PASSWORD_TOKEN_EXPIRES_IN=86400 +SET_SESSION=1 # DATABSES -REDIS_URL="redis://localhost:6380" -MONGO_DATABASE_URL="mongodb://localhost:27018/typescript-backend-toolkit" \ No newline at end of file +REDIS_URL="redis://localhost:6379" +MONGO_DATABASE_URL="mongodb://root:example@localhost:27017/typescript-backend-toolkit?authSource=admin" + +# Mailgun Configuration (dummy values for development) +MAILGUN_API_KEY="dummy-key" +MAILGUN_DOMAIN="example.com" +MAILGUN_FROM_EMAIL="no-reply@example.com" + +# ADMIN +ADMIN_EMAIL="admin@example.com" +ADMIN_PASSWORD="password" + +# USER +OTP_VERIFICATION_ENABLED=0 diff --git a/.env.sample b/.env.sample index e35a7ad..97fcf6b 100644 --- a/.env.sample +++ b/.env.sample @@ -2,8 +2,8 @@ PORT="" NODE_ENV="" -# EMAIL (Choose either SMTP or Mailgun configuration) -# SMTP Configuration (Legacy) +# EMAIL (Choose one: Resend, Mailgun, or SMTP) +# SMTP Configuration SMTP_HOST="" SMTP_PORT="" SMTP_USERNAME="" @@ -11,7 +11,12 @@ EMAIL_FROM="" SMTP_FROM="" SMTP_PASSWORD="" -# Mailgun Configuration (Recommended) +# Resend Configuration (Recommended) +RESEND_API_KEY="" +# Optional: uses EMAIL_FROM if not set +RESEND_FROM_EMAIL="" + +# Mailgun Configuration MAILGUN_API_KEY="" MAILGUN_DOMAIN="" MAILGUN_FROM_EMAIL="" @@ -36,6 +41,30 @@ GOOGLE_CLIENT_ID="" GOOGLE_CLIENT_SECRET='' GOOGLE_REDIRECT_URI = '' +# STORAGE CONFIGURATION +# Choose storage provider: 's3' (AWS S3), 'r2' (Cloudflare R2), or 'local' (Local filesystem) +STORAGE_PROVIDER="s3" + +# AWS S3 Configuration (used when STORAGE_PROVIDER='s3') +AWS_REGION="us-east-1" +AWS_S3_BUCKET="" +# Optional: Leave empty to use default AWS credentials chain (IAM roles, ~/.aws/credentials, etc.) +AWS_ACCESS_KEY_ID="" +AWS_SECRET_ACCESS_KEY="" + +# Cloudflare R2 Configuration (used when STORAGE_PROVIDER='r2') +R2_ACCOUNT_ID="" +R2_ACCESS_KEY_ID="" +R2_SECRET_ACCESS_KEY="" +R2_BUCKET="" +# Optional: Custom domain or R2 public URL (e.g., https://cdn.example.com) +R2_PUBLIC_URL="" + +# Local Storage Configuration (used when STORAGE_PROVIDER='local') +LOCAL_STORAGE_PATH="./uploads" +# Optional: Base URL for serving uploaded files (e.g., http://localhost:3000) +LOCAL_STORAGE_BASE_URL="" + # DATABSES REDIS_URL="" MONGO_DATABASE_URL="" @@ -44,5 +73,13 @@ MONGO_DATABASE_URL="" ADMIN_EMAIL="admin@example.com" ADMIN_PASSWORD="password" +# ADMIN PANEL AUTH (separate from app auth) +ADMIN_AUTH_ENABLED=1 +ADMIN_USERNAME="admin" +ADMIN_PANEL_PASSWORD="your-secure-admin-password" +ADMIN_SESSION_SECRET="your-32-char-min-secret-key-here" +ADMIN_SESSION_TTL=86400 +ADMIN_COOKIE_NAME="admin_session" + # USER OTP_VERIFICATION_ENABLED=0 diff --git a/.gitignore b/.gitignore index 7d44f91..ab62e96 100644 --- a/.gitignore +++ b/.gitignore @@ -146,4 +146,4 @@ Api.ts .windsurf #Ignore cursor AI rules -.cursor/rules/codacy.mdc \ No newline at end of file +.cursor/rules/codacy.mdc diff --git a/.vscode/extensions.json b/.vscode/extensions.json deleted file mode 100644 index 627f28b..0000000 --- a/.vscode/extensions.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "recommendations": [ - "mattpocock.ts-error-translator", - "yoavbls.pretty-ts-errors", - "prisma.prisma", - "esbenp.prettier-vscode", - "christian-kohler.path-intellisense", - "dbaeumer.vscode-eslint", - "ms-azuretools.vscode-docker", - "digitalbrainstem.javascript-ejs-support", - "j69.ejs-beautify", - "dbaeumer.vscode-eslint" - ] -} diff --git a/.vscode/settings.json b/.vscode/settings.json index 543e746..5f6fc78 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,14 +1,4 @@ { - "eslint.useFlatConfig": true, "typescript.preferences.importModuleSpecifier": "relative", - "git.ignoreLimitWarning": true, - "prettier.useEditorConfig": false, - "totalTypeScript.hideAllTips": true, - "typescript.tsdk": "node_modules/typescript/lib", - "emmet.includeLanguages": { - "ejs": "html" - }, - "[html]": { - "editor.defaultFormatter": "j69.ejs-beautify" - } + "typescript.tsdk": "node_modules/typescript/lib" } diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 0000000..a9b41b1 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,40 @@ +# Repository Guidelines + +## Project Structure & Module Organization + +- `src/` holds runtime code: `app/` (Express setup), `modules/` (domain logic), `lib/` (DB, queues, email, storage), `routes/` (HTTP wiring). +- Shared helpers: `src/common/`, `src/utils/`, `src/observability/` — prefer reuse over new utilities. +- Assets: emails in `src/email/templates/`, static files in `public/`. +- Configuration: `src/config/` with environment schema in `src/config/env.ts`. + +## Build, Test & Development Commands + +- `docker compose up -d` — start MongoDB and Redis locally. +- `pnpm dev` — watch server and run the email previewer. +- `pnpm build` — compile TypeScript via `tsup` to `dist/`. +- `pnpm start` | `pnpm start:dev|:prod|:local` — run compiled or env-specific entry via `dotenv-cli`. +- `pnpm typecheck` — strict type checks without emit. `pnpm lint` | `pnpm lint:fix` — ESLint with Prettier. +- Useful: `pnpm openapi` (generate spec), `pnpm seed` (dev seed), `pnpm email:dev` (preview emails). + +## Coding Style & Naming Conventions + +- TypeScript, 2-space indentation, organized imports; prefer named exports from shared modules. +- Naming: camelCase (vars/functions), PascalCase (classes), kebab-case (feature files). +- Linting: ESLint + Prettier; avoid `any`, remove unused code before PRs. + +## Testing Guidelines + +- No test runner is configured yet. If adding tests, prefer colocated specs (`*.spec.ts`) near source or `__tests__/` under `src/`. +- Focus on unit tests for `modules/` and `utils/`; mock integrations from `lib/`. +- Aim for meaningful coverage on controllers, services, and schema validation. Keep tests fast and deterministic. + +## Commit & Pull Request Guidelines + +- Use Conventional Commits: `feat:`, `fix:`, `refactor:`, `chore:` — one logical change per commit. +- PRs must describe problem, solution, and rollout (migrations, flags, ops). Link issues; include logs/screens for ops-facing changes. +- Document new env vars and update `.env.sample` and `src/config/env.ts` together. + +## Security & Configuration Tips + +- Never commit secrets. Source config from env; validate via `src/config/env.ts`. +- Protect admin surfaces (e.g., `/queues`, `/ops/*`) behind auth in production; document access controls when changing them. diff --git a/CLAUDE.md b/CLAUDE.md deleted file mode 100644 index f7fa39c..0000000 --- a/CLAUDE.md +++ /dev/null @@ -1,68 +0,0 @@ -# CLAUDE.md - -This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. - -## Development Commands - -- **Development server**: `pnpm run dev` - Starts both backend and email template development server -- **Backend only**: `pnpm run start:dev` - Starts just the backend with hot reload -- **Build**: `pnpm run build` - Builds the project using tsup -- **Production start**: `pnpm run start:prod` - Starts production build with .env.production -- **Local start**: `pnpm run start:local` - Starts production build with .env.local -- **Database seeder**: `pnpm run seeder` - Runs database seeding scripts -- **Linting**: `pnpm run lint` - Runs ESLint, `pnpm run lint:fix` - Auto-fixes linting issues -- **Email templates**: `pnpm run email:dev` - Starts email template development server - -## Architecture Overview - -This is a TypeScript Express.js backend toolkit with the following key architectural components: - -### Core Architecture -- **MagicRouter System**: Custom routing ([src/openapi/magic-router.ts](src/openapi/magic-router.ts)) that automatically generates OpenAPI/Swagger documentation from Zod schemas -- **Module-based structure**: Features organized in modules under [src/modules/](src/modules/) (auth, user) -- **Configuration management**: Type-safe config using Zod validation in [src/config/config.service.ts](src/config/config.service.ts) -- **Database**: MongoDB with Mongoose ODM, connection managed in [src/lib/database.ts](src/lib/database.ts) - -### Key Features -- **Authentication**: JWT-based with optional OTP verification, Google OAuth support -- **File Uploads**: Multer with S3 integration via [src/lib/aws.service.ts](src/lib/aws.service.ts) -- **Email System**: React Email templates with Mailgun provider, queue-based sending -- **Real-time**: Socket.io integration with Redis adapter -- **Background Jobs**: BullMQ with Redis for email queues and other background tasks -- **API Documentation**: Auto-generated Swagger docs at `/api-docs` from MagicRouter -- **Queue Dashboard**: BullMQ admin dashboard at `/admin/queues` - -### Middleware Stack -- Request validation with Zod schemas ([src/middlewares/validate-zod-schema.middleware.ts](src/middlewares/validate-zod-schema.middleware.ts)) -- JWT extraction and authorization ([src/middlewares/extract-jwt-schema.middleware.ts](src/middlewares/extract-jwt-schema.middleware.ts)) -- File upload handling with S3 ([src/middlewares/multer-s3.middleware.ts](src/middlewares/multer-s3.middleware.ts)) -- Access control middleware ([src/middlewares/can-access.middleware.ts](src/middlewares/can-access.middleware.ts)) - -### Environment Setup -1. Start Docker services: `docker compose up -d` (MongoDB + Redis) -2. Install dependencies: `pnpm i` -3. Configure environment variables using `.env.sample` as template - -### Key Patterns -- **MagicRouter**: All API routes use MagicRouter for automatic OpenAPI generation -- **Zod Schemas**: Every route uses Zod for request/response validation -- **Service Layer**: Business logic separated into service files -- **Queue-based**: Email sending and background jobs use BullMQ queues -- **Type Safety**: Full TypeScript coverage with Zod for runtime validation - -### File Upload & Storage -- Multer middleware handles file uploads -- AWS S3 integration for file storage -- File upload routes in [src/upload/](src/upload/) - -### Email System -- React Email for template development -- Mailgun for email delivery -- Queue-based sending system in [src/queues/email.queue.ts](src/queues/email.queue.ts) - -## Important Notes -- All expiration times in config are in milliseconds (converted from strings) -- The project uses pnpm as package manager -- Database seeding is available via the seeder script -- Global error handling in [src/utils/globalErrorHandler.ts](src/utils/globalErrorHandler.ts) -- Logging uses Pino logger with pretty printing in development \ No newline at end of file diff --git a/README.md b/README.md index 770e556..8e64048 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,7 @@ Before you get started, make sure you have the following installed on your machi ## What's Included -- **OpenAPI Autogenerated Swagger Docs** : Automatically generated Swagger docs through MagicRouter API and Zod, accessible at `/api-docs`. +- **OpenAPI Autogenerated Swagger Docs** : Automatically generated Swagger docs through MagicRouter API and Zod, accessible at `/docs`. - **Auth Module**: Includes Google Sign-In support for easy authentication. - **User Management**: Comprehensive user management functionality. - **File Upload**: Handles file uploads with Multer and Amazon S3, with full OpenAPI/Swagger documentation support. @@ -65,97 +65,6 @@ Before you get started, make sure you have the following installed on your machi ``` - **PM2 Support**: Out-of-the-box support for PM2 to manage your production processes. -## Folder Structure - -```plaintext -├── build.ts -├── docker-compose.yml -├── docs -│   └── EMAIL.md -├── ecosystem.config.js -├── eslint.config.mjs -├── LICENSE -├── logo.webp -├── modules.d.ts -├── package.json -├── pnpm-lock.yaml -├── public -│   ├── index.html -│   ├── logo.webp -│   ├── script.js -│   └── styles.css -├── README.md -├── src -│   ├── common -│   │   ├── common.schema.ts -│   │   └── common.utils.ts -│   ├── config -│   │   └── config.service.ts -│   ├── email -│   │   ├── email.service.ts -│   │   └── templates -│   │   └── ResetPassword.tsx -│   ├── enums.ts -│   ├── healthcheck -│   │   ├── healthcheck.controller.ts -│   │   └── healthcheck.routes.ts -│   ├── lib -│   │   ├── aws.service.ts -│   │   ├── common.schema.ts -│   │   ├── database.ts -│   │   ├── email.server.ts -│   │   ├── logger.service.ts -│   │   ├── mailgun.server.ts -│   │   ├── queue.server.ts -│   │   ├── realtime.server.ts -│   │   ├── redis.server.ts -│   │   └── session.store.ts -│   ├── main.ts -│   ├── middlewares -│   │   ├── can-access.middleware.ts -│   │   ├── extract-jwt-schema.middleware.ts -│   │   ├── multer-s3.middleware.ts -│   │   └── validate-zod-schema.middleware.ts -│   ├── modules -│   │   ├── auth -│   │   │   ├── auth.constants.ts -│   │   │   ├── auth.controller.ts -│   │   │   ├── auth.router.ts -│   │   │   ├── auth.schema.ts -│   │   │   └── auth.service.ts -│   │   └── user -│   │   ├── user.controller.ts -│   │   ├── user.dto.ts -│   │   ├── user.model.ts -│   │   ├── user.router.ts -│   │   ├── user.schema.ts -│   │   └── user.services.ts -│   ├── openapi -│   │   ├── magic-router.ts -│   │   ├── openapi.utils.ts -│   │   ├── swagger-doc-generator.ts -│   │   ├── swagger-instance.ts -│   │   └── zod-extend.ts -│   ├── queues -│   │   └── email.queue.ts -│   ├── routes -│   │   └── routes.ts -│   ├── types.ts -│   ├── upload -│   │   ├── upload.controller.ts -│   │   └── upload.router.ts -│   └── utils -│   ├── api.utils.ts -│   ├── auth.utils.ts -│   ├── common.utils.ts -│   ├── email.utils.ts -│   ├── getPaginator.ts -│   ├── globalErrorHandler.ts -│   ├── isUsername.ts -│   └── responseInterceptor.ts -└── tsconfig.json -``` - ## File Upload with MagicRouter MagicRouter now supports multipart/form-data file uploads with automatic OpenAPI documentation generation. This feature works seamlessly with Multer middleware. @@ -168,7 +77,7 @@ MagicRouter now supports multipart/form-data file uploads with automatic OpenAPI import { z } from 'zod'; import MagicRouter from './openapi/magic-router'; import { zFile } from './openapi/zod-extend'; -import { uploadMiddleware } from './middlewares/multer-s3.middleware'; +import { uploadMiddleware } from './middlewares/multer-s3'; const router = new MagicRouter('/api'); diff --git a/bin/actions/makeFactory.ts b/bin/actions/makeFactory.ts new file mode 100644 index 0000000..28cf17a --- /dev/null +++ b/bin/actions/makeFactory.ts @@ -0,0 +1,143 @@ +import path from 'path'; +import fs from 'fs/promises'; +import { introspectModel, findCreateFunctionName, resolveModulePaths, type FieldInfo } from '../utils/introspectModel'; + +function pascalCase(str: string) { + return str + .replace(/[-_\s]+(.)?/g, (_, c) => (c ? c.toUpperCase() : '')) + .replace(/^(.)/, (m) => m.toUpperCase()); +} + +function defaultValueForField(f: FieldInfo, iVar = 'i'): string { + const name = f.path; + const idx = iVar; + if (name === '_id') { + if (f.type === 'string') return "new Types.ObjectId().toString()"; + if (f.type === 'objectid') return 'new Types.ObjectId() as any'; + } + if (f.enumValues && f.enumValues.length) { + return `${JSON.stringify(f.enumValues)}[(${idx}-1) % ${f.enumValues.length}]`; + } + if (f.refModelName) return 'new Types.ObjectId() as any'; + switch (f.type) { + case 'string': { + if (/email/i.test(name)) return '`'+name+'${'+idx+'}@example.com`'; + if (/username/i.test(name)) return '`'+name+'${'+idx+'}`'; + if (/password/i.test(name)) return '`password${'+idx+'}`'; + if (/name/i.test(name)) return '`'+pascalCase(name)+' ${'+idx+'}`'; + if (/description/i.test(name)) return '`Description ${'+idx+'}`'; + if (/url|avatar/i.test(name)) return '`https://example.com/'+name+'/${'+idx+'}`'; + return '`'+name+'${'+idx+'}`'; + } + case 'number': + return idx; + case 'boolean': + return `(${idx} % 2 === 0)`; + case 'date': + return 'new Date()'; + case 'array': + return '[] as any[]'; + case 'objectid': + return 'new Types.ObjectId() as any'; + default: + return 'undefined as any'; + } +} + +export const createMakeFactoryAction = async (fullName: string, opts?: { model?: string; use?: 'service'|'model'; 'id-type'?: string }) => { + try { + const [module, rawName] = String(fullName).split('/'); + const baseName = rawName.replace(/\.factory$/i, ''); + const factoryVar = `${baseName.charAt(0).toLowerCase()}${baseName.slice(1)}Factory`; + + const info = await introspectModel(module, baseName, { modelExport: opts?.model }); + const _hasObjectId = info.fields.some((f) => f.type === 'objectid' || f.refModelName); + + // DTO/service checks + const dtoTypeModel = `${pascalCase(baseName)}ModelType`; + const dtoTypeOut = `${pascalCase(baseName)}Type`; + let useDTO = false; + if (info.dtoFilePath) { + const dtoSrc = await fs.readFile(info.dtoFilePath, 'utf-8'); + useDTO = dtoSrc.includes(`export type ${dtoTypeModel}`) || dtoSrc.includes(`export interface ${dtoTypeModel}`); + } + + const { servicesFilePath } = await resolveModulePaths(module, baseName); + let createFnName: string | undefined; + if (servicesFilePath) { + const serviceSrc = await fs.readFile(servicesFilePath, 'utf-8'); + createFnName = findCreateFunctionName(baseName, serviceSrc); + } + + const buildFields: string[] = []; + for (const f of info.fields) { + // skip internal + if (f.path === '__v' || f.path === 'createdAt' || f.path === 'updatedAt') continue; + // include required fields and common optional initials (name/description/email) + const shouldInclude = f.required || /^(name|email|username|description|title|password|avatar|url|role)$/i.test(f.path) || f.path === '_id'; + if (!shouldInclude) continue; + buildFields.push(` ${JSON.stringify(f.path).replace(/"/g,'')}: ${defaultValueForField(f)}`); + } + + const payloadType = useDTO ? `${dtoTypeModel}` : 'Record'; + const returnType = useDTO ? `${dtoTypeOut}` : 'any'; + + const lines: string[] = []; + lines.push(`import ${info.modelExport === 'default' ? '' : `{ ${info.modelExport} as ` + info.modelName + `Model } from `} '../${baseName}.model';`); + // The above is messy; prefer default import as in repo styles + lines.length = 0; // reset + lines.push(`import { Types } from 'mongoose';`); + if (!createFnName) { + lines.push(`import ${info.modelExport === 'default' ? `${info.modelName}` : `{ ${info.modelExport} as ${info.modelName} }`} from '../${baseName}.model';`); + } + if (useDTO) lines.push(`import type { ${dtoTypeModel}, ${dtoTypeOut} } from '../${baseName}.dto';`); + if (createFnName) lines.push(`import { ${createFnName} } from '../${baseName}.services';`); + lines.push(''); + lines.push(`type Overrides = Partial<${payloadType}> & Record;`); + lines.push(''); + lines.push(`export const ${factoryVar} = {`); + lines.push(` build(i = 1, overrides: Overrides = {}): ${payloadType} {`); + lines.push(' return {'); + if (buildFields.length) lines.push(buildFields.join(',\n')); + lines.push(' , ...overrides'); + lines.push(' } as unknown as ' + payloadType + ';'); + lines.push(' },'); + lines.push(''); + lines.push(` async create(i = 1, overrides: Overrides = {}): Promise<${returnType}> {`); + lines.push(' const payload = this.build(i, overrides);'); + if (createFnName) { + lines.push(` // Prefer service function when available`); + lines.push(` return await ${createFnName}(payload as any);`); + } else { + lines.push(` const doc = await ${info.modelName}.create(payload as any);`); + lines.push(' // @ts-expect-error toObject present on mongoose doc'); + lines.push(' return (doc.toObject ? doc.toObject() : doc) as ' + returnType + ';'); + } + lines.push(' },'); + lines.push(''); + lines.push(' async createMany(count: number, overrides: Overrides = {}): Promise<' + returnType + '[]> {'); + lines.push(' const out: ' + returnType + '[] = [];'); + lines.push(' for (let i = 1; i <= count; i += 1) out.push(await this.create(i, overrides));'); + lines.push(' return out;'); + lines.push(' },'); + lines.push('};'); + + const content = lines.join('\n'); + + const outputPath = path.join( + process.cwd(), + 'src', + 'modules', + module, + 'factories', + ); + const filePath = path.join(outputPath, `${baseName.toLowerCase()}.factory.ts`); + + await fs.mkdir(outputPath, { recursive: true }); + await fs.writeFile(filePath, content, 'utf-8'); + console.log(`✓ Factory created: ${filePath}`); + } catch (error) { + console.error('Failed to create factory:', error); + process.exit(1); + } +}; diff --git a/bin/actions/makeSeeder.ts b/bin/actions/makeSeeder.ts new file mode 100644 index 0000000..a86b4e5 --- /dev/null +++ b/bin/actions/makeSeeder.ts @@ -0,0 +1,109 @@ +import path from 'path'; +import fs from 'fs/promises'; +import { introspectModel } from '../utils/introspectModel'; + +function pascalCase(str: string) { + return str + .replace(/[-_\s]+(.)?/g, (_, c) => (c ? c.toUpperCase() : '')) + .replace(/^(.)/, (m) => m.toUpperCase()); +} + +export const createMakeSeederAction = async ( + fullName: string, + opts?: { count?: string; unique?: string; 'depends-on'?: string; model?: string }, +) => { + try { + const [module, rawName] = String(fullName).split('/'); + const base = rawName.replace(/Seeder$/i, ''); + const seederName = rawName.endsWith('Seeder') ? rawName : `${base}Seeder`; + const baseName = base.toLowerCase(); + + const info = await introspectModel(module, baseName, { modelExport: opts?.model }); + + const seedCount = Number(opts?.count ?? 5); + const uniqueField = opts?.unique || info.fields.find((f) => f.unique)?.path; + + const refFields = info.fields.filter((f) => f.refModelName && !f.isArray); + const dependsOnAuto = refFields.map((f) => `${f.refModelName}Seeder`); + const dependsOnManual = (opts?.['depends-on'] || '') + .split(',') + .map((s) => s.trim()) + .filter(Boolean); + const dependsOn = Array.from(new Set([...dependsOnAuto, ...dependsOnManual])); + + // Pick a primary string field for detecting existing fixtures + const primaryStrField = ['name', 'email', 'username', 'title', 'description'] + .map((n) => info.fields.find((f) => f.path === n && f.type === 'string')) + .find(Boolean) || info.fields.find((f) => f.type === 'string'); + + // Build override for a single ref if available using ctx.refs + const refOverrideCode = refFields.length + ? `{ + ${refFields + .map((f) => `${f.path}: (ctx.refs.has('${String(f.refModelName).toLowerCase()}:seeded') ? ctx.refs.get('${String(f.refModelName).toLowerCase()}:seeded')[0] : undefined) as any`) + .join(',\n ')} + }` + : '{}'; + + const seederLines: string[] = []; + seederLines.push(`import type { Seeder } from '@/seeders/types';`); + seederLines.push(`import ${info.modelExport === 'default' ? `${info.modelName}` : `{ ${info.modelExport} as ${info.modelName} }`} from '../${baseName}.model';`); + seederLines.push(`import { ${baseName}Factory } from '../factories/${baseName}.factory';`); + seederLines.push(''); + seederLines.push(`export const ${seederName}: Seeder = {`); + seederLines.push(` name: '${seederName}',`); + seederLines.push(` groups: ['base','dev','test'],`); + seederLines.push(` dependsOn: ${JSON.stringify(dependsOn)},`); + seederLines.push(` collections: [${info.modelName}.collection.collectionName],`); + seederLines.push(' async run(ctx) {'); + if (uniqueField) { + const uniqVal = primaryStrField?.path === 'email' ? `'${baseName}0@example.com'` : `'${pascalCase(baseName)} 0'`; + seederLines.push(` const existing = await ${info.modelName}.findOne({ ${uniqueField}: ${uniqVal} });`); + seederLines.push(' if (!existing) {'); + seederLines.push(` await ${baseName}Factory.create(0, { ${uniqueField}: ${uniqVal} });`); + seederLines.push(' }'); + } + seederLines.push(` if (ctx.env.group === 'dev' || ctx.env.group === 'test') {`); + if (primaryStrField) { + let regex: string; + if (primaryStrField.path === 'email') { + regex = `^${primaryStrField.path}\\d+@example\\.com$`; + } else if (primaryStrField.path === 'name' || primaryStrField.path === 'title' || primaryStrField.path === 'description' || primaryStrField.path === 'username') { + regex = `^${pascalCase(primaryStrField.path)} \\d+$`; + } else { + regex = `^${primaryStrField.path}\\d+$`; + } + seederLines.push(` const existing = await ${info.modelName}.countDocuments({ ${primaryStrField.path}: { $regex: /${regex}/ } });`); + } else { + seederLines.push(` const existing = await ${info.modelName}.estimatedDocumentCount();`); + } + seederLines.push(' if (existing === 0) {'); + seederLines.push(` const docs = await ${baseName}Factory.createMany(${seedCount}, ${refOverrideCode});`); + seederLines.push(` ctx.refs.set('${baseName}:seeded', docs.map((d: any) => String(d._id)));`); + seederLines.push(' } else {'); + seederLines.push(` const ids = (await ${info.modelName}.find({}).select('_id').lean()).map((d: any) => String(d._id));`); + seederLines.push(` ctx.refs.set('${baseName}:seeded', ids);`); + seederLines.push(' }'); + seederLines.push(' }'); + seederLines.push(' },'); + seederLines.push('};'); + + const content = seederLines.join('\n'); + + const outputPath = path.join( + process.cwd(), + 'src', + 'modules', + module, + 'seeders', + ); + const filePath = path.join(outputPath, `${seederName}.ts`); + + await fs.mkdir(outputPath, { recursive: true }); + await fs.writeFile(filePath, content, 'utf-8'); + console.log(`✓ Seeder created: ${filePath}`); + } catch (error) { + console.error('Failed to create seeder:', error); + process.exit(1); + } +}; diff --git a/bin/actions/middleware.ts b/bin/actions/middleware.ts new file mode 100644 index 0000000..056eb46 --- /dev/null +++ b/bin/actions/middleware.ts @@ -0,0 +1,75 @@ +import path from 'path'; +import fs from 'fs/promises'; + +const toKebabCase = (s: string) => + s + .replace(/([a-z0-9])([A-Z])/g, '$1-$2') + .replace(/\s+/g, '-') + .replace(/_+/g, '-') + .replace(/-+/g, '-') + .toLowerCase(); + +const toCamelCase = (s: string) => + toKebabCase(s) + .split('-') + .map((p, i) => (i === 0 ? p : p.charAt(0).toUpperCase() + p.slice(1))) + .join(''); + +const toPascalCase = (s: string) => { + const c = toCamelCase(s); + return c.charAt(0).toUpperCase() + c.slice(1); +}; + +export const createMiddlewareAction = async (rawName: string) => { + const base = rawName.replace(/-?middleware$/i, ''); + + const fileBase = toKebabCase(base); + const exportBase = toCamelCase(base); + const optionsType = `${toPascalCase(base)}Options`; + + const dir = path.join(process.cwd(), 'src', 'middlewares'); + const outputPath = path.join(dir, `${fileBase}.ts`); + + const middlewareContent = `import type { NextFunction } from 'express'; +import { StatusCodes } from '@/plugins/magic/status-codes'; +import { errorResponse } from '@/utils/response.utils'; +import type { RequestAny, ResponseAny } from '@/plugins/magic/router'; + +export type ${optionsType} = Record; + +export const ${exportBase} = (options?: ${optionsType}) => + async (req: RequestAny, res: ResponseAny, next: NextFunction) => { + try { + // TODO: implement ${fileBase} middleware logic + } catch (err) { + return errorResponse( + res, + (err as Error).message, + StatusCodes.INTERNAL_SERVER_ERROR, + ); + } + + return next(); + }; + +export default ${exportBase}; +`; + + try { + await fs.mkdir(dir, { recursive: true }); + + try { + await fs.access(outputPath); + console.error(`✗ Middleware already exists: ${outputPath}`); + process.exit(1); + } catch { + // file does not exist, continue + } + + await fs.writeFile(outputPath, middlewareContent, 'utf-8'); + console.log(`✓ Middleware created: ${outputPath}`); + } catch (error) { + console.error('Failed to create middleware:', error); + process.exit(1); + } +}; diff --git a/bin/actions/module.ts b/bin/actions/module.ts new file mode 100644 index 0000000..01f7ab4 --- /dev/null +++ b/bin/actions/module.ts @@ -0,0 +1,423 @@ +import path from 'path'; +import fs from 'fs/promises'; + +export const createModuleAction = async ( + name: string, + options: { path: string }, +) => { + const moduleName = name.toLowerCase(); + const className = name.charAt(0).toUpperCase() + name.slice(1); + const moduleDir = path.join(process.cwd(), 'src', 'modules', moduleName); + + try { + // Create module directory + await fs.mkdir(moduleDir, { recursive: true }); + + // 1. DTO file + const dtoContent = `import { z } from "zod"; +import { definePaginatedResponse } from "@/common/common.utils"; + +export const ${moduleName}OutSchema = z.object({ + name: z.string(), + description: z.string().optional(), + createdAt: z.date().optional(), + updatedAt: z.date().optional(), +}); + +export const ${moduleName}Schema = ${moduleName}OutSchema.extend({ + // Add internal fields here +}); + +export const ${moduleName}sPaginatedSchema = definePaginatedResponse(${moduleName}OutSchema); + +export type ${className}ModelType = z.infer; +export type ${className}Type = z.infer & { id: string; _id: string }; +export type ${className}PaginatedType = z.infer; +`; + + // 2. Model file + const modelContent = `import mongoose, { type Document, Schema } from "mongoose"; +import type { ${className}ModelType, ${className}Type } from "./${moduleName}.dto"; + +const ${className}Schema: Schema<${className}Type> = new Schema( + { + name: { type: String, required: true }, + description: { type: String }, + }, + { timestamps: true }, +); + +export interface I${className}Document extends Document, ${className}ModelType {} +const ${className} = mongoose.model<${className}Type>("${className}", ${className}Schema); +export default ${className}; +`; + + // 3. Schema file (validation) + const schemaContent = `import { z } from "zod"; +import { R } from "@/plugins/magic/response.builders"; +import { ${moduleName}OutSchema } from "./${moduleName}.dto"; + +export const create${className}Schema = z.object({ + name: z.string({ required_error: "Name is required" }).min(1), + description: z.string().optional(), +}); + +export const update${className}Schema = z.object({ + name: z.string().min(1).optional(), + description: z.string().optional(), +}); + +export const get${className}sSchema = z.object({ + searchString: z.string().optional(), + limitParam: z + .string() + .default("10") + .refine( + (value) => !Number.isNaN(Number(value)) && Number(value) >= 0, + "Input must be positive integer", + ) + .transform(Number), + pageParam: z + .string() + .default("1") + .refine( + (value) => !Number.isNaN(Number(value)) && Number(value) >= 0, + "Input must be positive integer", + ) + .transform(Number), +}); + +export type Create${className}SchemaType = z.infer; +export type Update${className}SchemaType = z.infer; +export type Get${className}sSchemaType = z.infer; + +// Response schemas +export const create${className}ResponseSchema = R.success(${moduleName}OutSchema); +export const get${className}sResponseSchema = R.paginated(${moduleName}OutSchema); +export const get${className}ByIdResponseSchema = R.success(${moduleName}OutSchema); +export const update${className}ResponseSchema = R.success(${moduleName}OutSchema); +export const delete${className}ResponseSchema = R.success(z.object({ + success: z.boolean(), + message: z.string(), +})); + +// Response types +export type Create${className}ResponseSchema = z.infer; +export type Get${className}sResponseSchema = z.infer; +export type Get${className}ByIdResponseSchema = z.infer; +export type Update${className}ResponseSchema = z.infer; +export type Delete${className}ResponseSchema = z.infer; +`; + + // 4. Services file + const servicesContent = `import type { FilterQuery } from "mongoose"; +import type { MongoIdSchemaType } from "@/common/common.schema"; +import { getPaginator } from "@/utils/pagination.utils"; +import type { ${className}Type } from "./${moduleName}.dto"; +import ${className}, { type I${className}Document } from "./${moduleName}.model"; +import type { Create${className}SchemaType, Get${className}sSchemaType, Update${className}SchemaType } from "./${moduleName}.schema"; + +export const create${className} = async ( + payload: Create${className}SchemaType, +): Promise<${className}Type> => { + const created${className} = await ${className}.create(payload); + return created${className}.toObject(); +}; + +export const get${className}ById = async (${moduleName}Id: string): Promise<${className}Type> => { + const ${moduleName} = await ${className}.findById(${moduleName}Id); + + if (!${moduleName}) { + throw new Error("${className} not found"); + } + + return ${moduleName}.toObject(); +}; + +export const update${className} = async ( + ${moduleName}Id: string, + payload: Update${className}SchemaType, +): Promise<${className}Type> => { + const ${moduleName} = await ${className}.findByIdAndUpdate( + ${moduleName}Id, + { $set: payload }, + { new: true }, + ); + + if (!${moduleName}) { + throw new Error("${className} not found"); + } + + return ${moduleName}.toObject(); +}; + +export const delete${className} = async (${moduleName}Id: MongoIdSchemaType): Promise => { + const ${moduleName} = await ${className}.findByIdAndDelete(${moduleName}Id.id); + + if (!${moduleName}) { + throw new Error("${className} not found"); + } +}; + +export const get${className}s = async ( + payload: Get${className}sSchemaType, +) => { + const conditions: FilterQuery = {}; + + if (payload.searchString) { + conditions.$or = [ + { name: { $regex: payload.searchString, $options: "i" } }, + { description: { $regex: payload.searchString, $options: "i" } }, + ]; + } + + const totalRecords = await ${className}.countDocuments(conditions); + const paginatorInfo = getPaginator( + payload.limitParam, + payload.pageParam, + totalRecords, + ); + + const results = await ${className}.find(conditions) + .limit(paginatorInfo.limit) + .skip(paginatorInfo.skip) + .exec(); + + return { + results, + paginatorInfo, + }; +}; +`; + + // 5. Controller file + const controllerContent = `import type { Request } from "express"; +import type { MongoIdSchemaType } from "@/common/common.schema"; +import type { ResponseExtended } from "@/types"; +import type { + Create${className}SchemaType, + Get${className}sSchemaType, + Update${className}SchemaType, + Create${className}ResponseSchema, + Get${className}sResponseSchema, + Get${className}ByIdResponseSchema, + Update${className}ResponseSchema, + Delete${className}ResponseSchema, +} from "./${moduleName}.schema"; +import { create${className}, delete${className}, get${className}ById, get${className}s, update${className} } from "./${moduleName}.services"; + +// Using new res.created() helper +export const handleCreate${className} = async ( + req: Request, + res: ResponseExtended, +) => { + const ${moduleName} = await create${className}(req.body); + return res.created?.({ + success: true, + message: "${className} created successfully", + data: ${moduleName}, + }); +}; + +// Using new res.ok() helper with paginated response +export const handleGet${className}s = async ( + req: Request, + res: ResponseExtended, +) => { + const { results, paginatorInfo } = await get${className}s(req.query); + return res.ok?.({ + success: true, + data: { + items: results, + paginator: paginatorInfo, + }, + }); +}; + +// Using new res.ok() helper +export const handleGet${className}ById = async ( + req: Request, + res: ResponseExtended, +) => { + const ${moduleName} = await get${className}ById(req.params.id); + return res.ok?.({ + success: true, + data: ${moduleName}, + }); +}; + +// Using new res.ok() helper +export const handleUpdate${className} = async ( + req: Request, + res: ResponseExtended, +) => { + const ${moduleName} = await update${className}(req.params.id, req.body); + return res.ok?.({ + success: true, + message: "${className} updated successfully", + data: ${moduleName}, + }); +}; + +// Using new res.ok() helper +export const handleDelete${className} = async ( + req: Request, + res: ResponseExtended, +) => { + await delete${className}({ id: req.params.id }); + return res.ok?.({ + success: true, + message: "${className} deleted successfully", + }); +}; +`; + + // 6. Router file + const routerContent = `import { mongoIdSchema } from "@/common/common.schema"; +import { canAccess } from "@/middlewares/can-access"; +import MagicRouter from "@/plugins/magic/router"; +import { + handleCreate${className}, + handleDelete${className}, + handleGet${className}ById, + handleGet${className}s, + handleUpdate${className}, +} from "./${moduleName}.controller"; +import { + create${className}Schema, + get${className}sSchema, + update${className}Schema, + create${className}ResponseSchema, + get${className}sResponseSchema, + get${className}ByIdResponseSchema, + update${className}ResponseSchema, + delete${className}ResponseSchema, +} from "./${moduleName}.schema"; + +export const ${moduleName.toUpperCase()}_ROUTER_ROOT = "${options.path}/${moduleName}s"; + +const ${moduleName}Router = new MagicRouter(${moduleName.toUpperCase()}_ROUTER_ROOT); + +// List ${moduleName}s with pagination +${moduleName}Router.get( + "/", + { + requestType: { query: get${className}sSchema }, + responses: { + 200: get${className}sResponseSchema, + }, + }, + canAccess(), + handleGet${className}s, +); + +// Create ${moduleName} +${moduleName}Router.post( + "/", + { + requestType: { body: create${className}Schema }, + responses: { + 201: create${className}ResponseSchema, + }, + }, + canAccess(), + handleCreate${className}, +); + +// Get ${moduleName} by ID +${moduleName}Router.get( + "/:id", + { + requestType: { params: mongoIdSchema }, + responses: { + 200: get${className}ByIdResponseSchema, + }, + }, + canAccess(), + handleGet${className}ById, +); + +// Update ${moduleName} +${moduleName}Router.patch( + "/:id", + { + requestType: { + params: mongoIdSchema, + body: update${className}Schema, + }, + responses: { + 200: update${className}ResponseSchema, + }, + }, + canAccess(), + handleUpdate${className}, +); + +// Delete ${moduleName} +${moduleName}Router.delete( + "/:id", + { + requestType: { params: mongoIdSchema }, + responses: { + 200: delete${className}ResponseSchema, + }, + }, + canAccess(), + handleDelete${className}, +); + +export default ${moduleName}Router.getRouter(); +`; + + // Write all files + await Promise.all([ + fs.writeFile( + path.join(moduleDir, `${moduleName}.dto.ts`), + dtoContent, + 'utf-8', + ), + fs.writeFile( + path.join(moduleDir, `${moduleName}.model.ts`), + modelContent, + 'utf-8', + ), + fs.writeFile( + path.join(moduleDir, `${moduleName}.schema.ts`), + schemaContent, + 'utf-8', + ), + fs.writeFile( + path.join(moduleDir, `${moduleName}.services.ts`), + servicesContent, + 'utf-8', + ), + fs.writeFile( + path.join(moduleDir, `${moduleName}.controller.ts`), + controllerContent, + 'utf-8', + ), + fs.writeFile( + path.join(moduleDir, `${moduleName}.router.ts`), + routerContent, + 'utf-8', + ), + ]); + + console.log(`✓ Module created: ${moduleDir}`); + console.log(` ├── ${moduleName}.dto.ts`); + console.log(` ├── ${moduleName}.model.ts`); + console.log(` ├── ${moduleName}.schema.ts`); + console.log(` ├── ${moduleName}.services.ts`); + console.log(` ├── ${moduleName}.controller.ts`); + console.log(` └── ${moduleName}.router.ts`); + console.log(); + console.log(`Next steps:`); + console.log(` 1. Register the router in your main app file`); + console.log(` 2. Customize the model fields in ${moduleName}.model.ts`); + console.log(` 3. Update validation schemas in ${moduleName}.schema.ts`); + console.log(` 4. Add business logic to ${moduleName}.services.ts`); + } catch (error) { + console.error('Failed to create module:', error); + process.exit(1); + } +}; diff --git a/bin/actions/plugin.ts b/bin/actions/plugin.ts new file mode 100644 index 0000000..45e7379 --- /dev/null +++ b/bin/actions/plugin.ts @@ -0,0 +1,62 @@ +import path from 'path'; +import fs from 'fs/promises'; + +export const createPluginAction = async (name: string) => { + const pluginName = name.toLowerCase(); + const className = name.charAt(0).toUpperCase() + name.slice(1); + + const pluginContent = `import type { ToolkitPlugin, PluginFactory } from '@/plugins/types'; + +export interface ${className}Options { + enabled?: boolean; +} + +export const ${pluginName}Plugin: PluginFactory<${className}Options> = ( + options = {}, +): ToolkitPlugin<${className}Options> => { + const { enabled = true } = options; + + return { + name: '${pluginName}', + priority: 50, + options, + + register({ app }) { + if (!enabled) { + return; + } + + // Plugin implementation here + console.log('${className} plugin registered'); + }, + + onShutdown: async () => { + // Cleanup logic here + console.log('${className} plugin shutdown'); + }, + }; +}; + +export default ${pluginName}Plugin; +`; + + await fs.mkdir(path.join(process.cwd(), 'src', 'plugins', pluginName), { + recursive: true, + }); + + const outputPath = path.join( + process.cwd(), + 'src', + 'plugins', + pluginName, + 'index.ts', + ); + + try { + await fs.writeFile(outputPath, pluginContent, 'utf-8'); + console.log(`✓ Plugin created: ${outputPath}`); + } catch (error) { + console.error('Failed to create plugin:', error); + process.exit(1); + } +}; diff --git a/bin/actions/seed.ts b/bin/actions/seed.ts new file mode 100644 index 0000000..c9eeb28 --- /dev/null +++ b/bin/actions/seed.ts @@ -0,0 +1,34 @@ +import { seeders } from '../../src/seeders/registry'; +import { runSeeders } from '../../src/seeders/runner'; + +export const createSeedAction = async (opts: { + group: string; + only: string; + fresh: boolean; + force: boolean; + dryRun: boolean; + seed: number; + transaction: boolean; +}) => { + const only = opts.only + ? String(opts.only) + .split(',') + .map((s) => s.trim()) + : undefined; + + try { + await runSeeders(seeders, { + group: opts.group, + only, + fresh: Boolean(opts.fresh), + force: Boolean(opts.force), + dryRun: Boolean(opts.dryRun), + seed: Number(opts.seed) || 1, + transaction: opts.transaction ?? true, + }); + process.exit(0); + } catch (error) { + console.error('Seeding failed:', error); + process.exit(1); + } +}; diff --git a/bin/tbk.ts b/bin/tbk.ts new file mode 100755 index 0000000..60063bc --- /dev/null +++ b/bin/tbk.ts @@ -0,0 +1,72 @@ +import { Command } from 'commander'; +import { createPluginAction } from './actions/plugin'; +import { createMiddlewareAction } from './actions/middleware'; +import { createModuleAction } from './actions/module'; +import { createSeedAction } from './actions/seed'; +import { createMakeSeederAction } from './actions/makeSeeder'; +import { createMakeFactoryAction } from './actions/makeFactory'; + +const program = new Command(); + +program + .name('tbk') + .description('TypeScript Backend Toolkit CLI') + .version('1.0.0'); + +program + .command('generate:plugin ') + .alias('g:plugin') + .description('Generate a new plugin') + .action(createPluginAction); + +program + .command('generate:middleware ') + .alias('g:middleware') + .description('Generate a new middleware') + .action(createMiddlewareAction); + +program + .command('generate:module ') + .alias('g:module') + .description( + 'Generate a complete module with all files (dto, model, schema, services, controller, router)', + ) + .option('-p, --path ', 'API path prefix', '/api') + .action(createModuleAction); + +// Seeder commands +program + .command('seed') + .description('Run database seeders') + .option('-g, --group ', 'Group to run (base|dev|test|demo)', 'dev') + .option('--only ', 'Comma separated seeder names') + .option('--fresh', 'Drop involved collections before seeding') + .option('--force', 'Force run in production') + .option('--dry-run', 'Do not write, only log actions') + .option( + '--seed ', + 'Random seed for data generation', + (v) => Number(v), + 1, + ) + .option('--no-transaction', 'Disable transactions') + .action(createSeedAction); + +program + .command('make:seeder /') + .description('Scaffold a new module seeder') + .option('-c, --count ', 'Default count for dev/test', '5') + .option('-u, --unique ', 'Unique field to upsert by') + .option('-d, --depends-on ', 'Comma-separated additional dependencies') + .option('--model ', 'Model export name when not default') + .action(createMakeSeederAction); + +program + .command('make:factory /') + .description('Scaffold a new module factory') + .option('--model ', 'Model export name when not default') + .option('--use ', 'Prefer using service create function when present', 'service') + .option('--id-type ', 'Hint for _id type when ambiguous') + .action(createMakeFactoryAction); + +program.parse(); diff --git a/bin/utils/introspectModel.ts b/bin/utils/introspectModel.ts new file mode 100644 index 0000000..4d1977d --- /dev/null +++ b/bin/utils/introspectModel.ts @@ -0,0 +1,139 @@ +import path from 'path'; +import fs from 'fs/promises'; + +export type FieldInfo = { + path: string; + type: string; // string|number|boolean|date|objectId|array|subdocument|mixed + required: boolean; + enumValues?: string[]; + isArray?: boolean; + refModelName?: string; + unique?: boolean; +}; + +export type ModelInfo = { + modelName: string; + modelExport: string; // variable to import + modelFilePath: string; // absolute + dtoFilePath?: string; // absolute if exists + servicesFilePath?: string; // absolute if exists + fields: FieldInfo[]; +}; + +function pascalCase(str: string) { + return str + .replace(/[-_\s]+(.)?/g, (_, c) => (c ? c.toUpperCase() : '')) + .replace(/^(.)/, (m) => m.toUpperCase()); +} + +export async function resolveModulePaths(moduleName: string, baseName: string) { + const projectRoot = process.cwd(); + const moduleDir = path.join(projectRoot, 'src', 'modules', moduleName); + const modelFilePath = path.join(moduleDir, `${baseName}.model.ts`); + const dtoFilePath = path.join(moduleDir, `${baseName}.dto.ts`); + const servicesFilePath = path.join(moduleDir, `${baseName}.services.ts`); + return { + moduleDir, + modelFilePath, + dtoFilePath: await exists(dtoFilePath) ? dtoFilePath : undefined, + servicesFilePath: await exists(servicesFilePath) ? servicesFilePath : undefined, + }; +} + +async function exists(p: string) { + try { + await fs.stat(p); + return true; + } catch { + return false; + } +} + +export async function introspectModel( + moduleName: string, + baseName: string, + options?: { modelExport?: string }, +): Promise { + const { modelFilePath, dtoFilePath, servicesFilePath } = await resolveModulePaths( + moduleName, + baseName, + ); + + const { pathToFileURL } = await import('url'); + // Dynamic import of the TS file via tsx runtime using file URL + const mod = await import(pathToFileURL(modelFilePath).href); + + const exportName = options?.modelExport || 'default'; + const model: any = exportName === 'default' ? mod.default : mod[exportName]; + if (!model || !model.schema) { + throw new Error(`Could not load model from ${modelFilePath}. Export: ${exportName}`); + } + + const fields: FieldInfo[] = []; + const schemaPaths = (model.schema as any).paths || {}; + for (const [p, schemaType] of Object.entries(schemaPaths)) { + if (p === '__v') continue; + const instance: string | undefined = schemaType.instance; + const optionsObj: any = schemaType.options || {}; + const enumValues: string[] | undefined = optionsObj.enum; + let required = false; + try { + required = typeof schemaType.isRequired === 'function' ? !!schemaType.isRequired() : !!optionsObj.required; + } catch { + required = !!optionsObj.required; + } + const unique = !!optionsObj.unique; + let type = (instance || 'mixed').toLowerCase(); + const isArray = instance === 'Array'; + let refModelName: string | undefined; + + // Detect refs (single) + if ( + optionsObj && + optionsObj.ref && + (instance === 'ObjectId' || instance === 'ObjectID') + ) { + refModelName = String(optionsObj.ref); + type = 'objectid'; + } + + // Arrays + const caster: any = schemaType.caster || schemaType.$embeddedSchemaType; + if (isArray) { + if (caster && caster.options && caster.options.ref && (caster.instance === 'ObjectId' || caster.instance === 'ObjectID')) { + refModelName = String(caster.options.ref); + type = 'objectid'; + } else if (caster && caster.instance) { + type = String(caster.instance).toLowerCase(); + } else { + type = 'array'; + } + } + + fields.push({ path: p, type, required, enumValues, isArray, refModelName, unique }); + } + + const modelExport = options?.modelExport || 'default'; + const modelName = model.modelName || pascalCase(baseName); + + return { + modelName, + modelExport, + modelFilePath, + dtoFilePath, + servicesFilePath, + fields, + }; +} + +export function findCreateFunctionName(baseName: string, servicesSource: string): string | undefined { + const pascal = pascalCase(baseName); + const candidates = [ + `export const create${pascal}`, + `export function create${pascal}`, + ]; + for (const c of candidates) { + if (servicesSource.includes(c)) return `create${pascal}`; + } + return undefined; +} diff --git a/build.ts b/build.ts index fd08a6e..0d4cf56 100644 --- a/build.ts +++ b/build.ts @@ -10,4 +10,5 @@ export default defineConfig({ platform: 'node', tsconfig: 'tsconfig.json', keepNames: true, + external: ['readline/promises'], }); diff --git a/docker-compose.yml b/docker-compose.yml index e725df1..bb52529 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,7 +13,7 @@ services: redis: image: redis:latest ports: - - 6380:6379 + - 6379:6379 volumes: - redis_ts_toolkit:/data diff --git a/docs/EMAIL.md b/docs/EMAIL.md deleted file mode 100644 index 4a2867b..0000000 --- a/docs/EMAIL.md +++ /dev/null @@ -1,120 +0,0 @@ -# Email Service Documentation - -This document outlines the email service implementation using React Email for templating and Mailgun for delivery. - -## Overview - -The email service provides a robust, type-safe way to send transactional emails using: -- [React Email](https://react.email/) for building and maintaining email templates -- [Mailgun](https://www.mailgun.com/) for reliable email delivery -- TypeScript for type safety and better developer experience - -## Configuration - -### Environment Variables - -Add the following variables to your `.env` file: - -```env -MAILGUN_API_KEY="your-mailgun-api-key" -MAILGUN_DOMAIN="your-mailgun-domain" -MAILGUN_FROM_EMAIL="noreply@yourdomain.com" -``` - -## Email Templates - -Email templates are built using React Email components and are located in `src/email/templates/`. Each template is a React component that accepts typed props for the dynamic content. - -### Available Templates - -1. **Reset Password Email** (`ResetPassword.tsx`) - ```typescript - interface ResetPasswordEmailProps { - userName: string; - resetLink: string; - } - ``` - -## Usage - -### Sending Reset Password Email - -```typescript -import { sendResetPasswordEmail } from '../email/email.service'; - -await sendResetPasswordEmail({ - email: 'user@example.com', - userName: 'John Doe', - resetLink: 'https://yourdomain.com/reset-password?token=xyz' -}); -``` - -### Creating New Email Templates - -1. Create a new template in `src/email/templates/` -2. Use React Email components for consistent styling -3. Export the template component with proper TypeScript interfaces -4. Add a new method in `EmailService` class to send the email - -Example: -```typescript -// src/email/templates/WelcomeEmail.tsx -import * as React from 'react'; -import { Button, Container, Head, Html, Preview, Text } from '@react-email/components'; - -interface WelcomeEmailProps { - userName: string; -} - -export const WelcomeEmail = ({ userName }: WelcomeEmailProps) => ( - - - Welcome to our platform - - Welcome {userName}! - - - -); - -export default WelcomeEmail; -``` - -## Error Handling - -The email service includes comprehensive error handling: - -- Custom `EmailError` class for email-specific errors -- Detailed error logging using the application logger -- Type-safe error propagation - -## Benefits - -1. **Type Safety**: Full TypeScript support for templates and service methods -2. **Maintainable Templates**: React components for building and maintaining email templates -3. **Reliable Delivery**: Mailgun integration for professional email delivery -4. **Error Handling**: Comprehensive error handling and logging -5. **Developer Experience**: Easy to create and modify email templates using React - -## Migration from Nodemailer - -The service maintains backward compatibility with the previous Nodemailer implementation through exported functions. The internal implementation has been updated to use React Email and Mailgun while keeping the same interface. - -## Testing Emails - -To test emails in development: - -1. Set up a Mailgun sandbox domain (free) -2. Use the sandbox domain and API key in your `.env.development` -3. Add verified recipient emails in Mailgun sandbox settings -4. Use these verified emails for testing - -## Best Practices - -1. Always use TypeScript interfaces for template props -2. Include proper error handling in your email sending logic -3. Use React Email components for consistent styling -4. Test emails with different email clients -5. Keep templates simple and mobile-responsive diff --git a/ecosystem.config.js b/ecosystem.config.js index 1337079..04618e1 100644 --- a/ecosystem.config.js +++ b/ecosystem.config.js @@ -3,6 +3,21 @@ module.exports = { { name: 'typescript-backend-toolkit', script: './dist/main.js', + instances: 'max', + exec_mode: 'cluster', + env_file: '.env', + env: { + NODE_ENV: 'production', + }, + watch: false, + autorestart: true, + max_memory_restart: '512M', + exp_backoff_restart_delay: 100, + merge_logs: true, + time: true, + kill_timeout: 30000, + instance_var: 'INSTANCE_ID', + node_args: ['--enable-source-maps'], }, ], }; diff --git a/eslint.config.mjs b/eslint.config.mjs index c3c6d61..a0aeb64 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -16,13 +16,17 @@ export default [ 'dist', '.database', '.database/*', + 'public/*', + 'public/**/*', + 'src/plugins/admin/*', + 'src/plugins/magic/*', ], }, pluginJs.configs.recommended, ...tseslint.configs.recommended, { rules: { - "@typescript-eslint/no-explicit-any": "warn", + '@typescript-eslint/no-explicit-any': 'warn', '@typescript-eslint/no-unused-vars': [ 'error', { diff --git a/modules.d.ts b/modules.d.ts index 9c7c238..ddc7710 100644 --- a/modules.d.ts +++ b/modules.d.ts @@ -1,12 +1,19 @@ import { Server } from 'socket.io'; -import { JwtPayload } from '../utils/auth.utils'; -import { Config } from './src/config/config.service'; +import { JwtPayload } from '@/utils/jwt.utils'; +import { Config } from '@/config/env'; +import { SessionRecord } from '@/modules/auth/session/session.types'; +import { SessionManager } from '@/modules/auth/session/session.manager'; declare global { namespace Express { export interface Request { user: JwtPayload; io: Server; + session?: SessionRecord; + } + + export interface Locals { + sessionManager?: SessionManager; } } diff --git a/package.json b/package.json index 35b0732..6a8fea6 100644 --- a/package.json +++ b/package.json @@ -4,29 +4,32 @@ "description": "", "main": "dist/main.js", "scripts": { + "dev": "concurrently \"pnpm start:dev\" \"pnpm email:dev\"", "start:dev": "dotenv -e .env.development -- tsx --watch ./src/main.ts", - "seeder": "tsx ./src/seeder.ts", "build": "tsup --config build.ts", "start:prod": "dotenv -e .env.production -- node ./dist/main.js", "start:local": "dotenv -e .env.local -- node ./dist/main.js", + "typecheck": "tsc --noEmit", "lint": "eslint", "lint:fix": "eslint --fix", + "openapi": "dotenv -e .env.development -- tsx scripts/gen-openapi.ts", + "seed": "dotenv -e .env.development -- tsx scripts/seed.ts", "email:dev": "email dev --dir ./src/email/templates", - "dev": "concurrently \"pnpm start:dev\" \"pnpm email:dev\"" + "tbk": "dotenv -e .env.development -- tsx bin/tbk", + "gen-sdk": "npx swagger-typescript-api generate --path ./public/openapi.yml --output ./src/generated" }, "devDependencies": { "@eslint/js": "^9.4.0", "@types/cookie-parser": "^1.4.3", "@types/cors": "^2.8.13", "@types/express": "^4.17.15", - "@types/express-session": "^1.17.5", + "@types/express-rate-limit": "^6.0.2", + "@types/formidable": "^3.4.6", "@types/helmet": "^4.0.0", "@types/http-status-codes": "^1.2.0", "@types/jsonwebtoken": "^9.0.6", "@types/memory-cache": "^0.2.2", "@types/morgan": "^1.9.4", - "@types/multer": "^1.4.7", - "@types/multer-s3": "^3.0.3", "@types/node": "^18.11.18", "@types/nodemailer": "^6.4.8", "@types/passport": "^1.0.11", @@ -34,6 +37,7 @@ "@types/validator": "^13.7.17", "@typescript-eslint/eslint-plugin": "^5.62.0", "@typescript-eslint/parser": "^7.11.0", + "commander": "^14.0.1", "concurrently": "^9.1.0", "esbuild": "^0.19.8", "eslint": "~9.4.0", @@ -60,7 +64,6 @@ "axios": "^1.4.0", "bullmq": "^5.7.6", "compression": "^1.7.4", - "connect-redis": "^7.1.1", "cookie-parser": "^1.4.6", "cors": "^2.8.5", "cross-env": "^7.0.3", @@ -68,16 +71,15 @@ "dotenv-cli": "^7.4.2", "express": "^4.19.2", "express-async-handler": "^1.2.0", - "express-session": "^1.18.0", + "express-rate-limit": "^8.1.0", + "form-data": "^4.0.4", + "formidable": "^3.5.4", "helmet": "^6.0.1", - "http-status-codes": "^2.3.0", "ioredis": "^5.3.2", "jsonwebtoken": "^9.0.2", "mailgun.js": "^10.2.4", "mongoose": "^8.5.1", "morgan": "^1.10.0", - "multer": "^1.4.5-lts.1", - "multer-s3": "^3.0.1", "nanoid": "^3.3.7", "nodemailer": "^6.9.13", "openapi3-ts": "^4.3.3", @@ -86,8 +88,10 @@ "pino": "^9.1.0", "pino-http": "^10.1.0", "pino-pretty": "^11.1.0", + "prom-client": "^15.1.3", "react": "^18.3.1", "react-email": "^3.0.2", + "resend": "^4.0.0", "redis": "^4.6.11", "socket.io": "^4.7.5", "swagger-ui-express": "^5.0.1", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 383fe70..a132240 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -44,9 +44,6 @@ importers: compression: specifier: ^1.7.4 version: 1.8.0 - connect-redis: - specifier: ^7.1.1 - version: 7.1.1(express-session@1.18.1) cookie-parser: specifier: ^1.4.6 version: 1.4.7 @@ -68,15 +65,18 @@ importers: express-async-handler: specifier: ^1.2.0 version: 1.2.0 - express-session: - specifier: ^1.18.0 - version: 1.18.1 + express-rate-limit: + specifier: ^8.1.0 + version: 8.1.0(express@4.21.2) + form-data: + specifier: ^4.0.4 + version: 4.0.4 + formidable: + specifier: ^3.5.4 + version: 3.5.4 helmet: specifier: ^6.0.1 version: 6.2.0 - http-status-codes: - specifier: ^2.3.0 - version: 2.3.0 ioredis: specifier: ^5.3.2 version: 5.5.0 @@ -92,12 +92,6 @@ importers: morgan: specifier: ^1.10.0 version: 1.10.0 - multer: - specifier: ^1.4.5-lts.1 - version: 1.4.5-lts.1 - multer-s3: - specifier: ^3.0.1 - version: 3.0.1(@aws-sdk/client-s3@3.750.0) nanoid: specifier: ^3.3.7 version: 3.3.8 @@ -122,6 +116,9 @@ importers: pino-pretty: specifier: ^11.1.0 version: 11.3.0 + prom-client: + specifier: ^15.1.3 + version: 15.1.3 react: specifier: ^18.3.1 version: 18.3.1 @@ -131,6 +128,9 @@ importers: redis: specifier: ^4.6.11 version: 4.7.0 + resend: + specifier: ^4.0.0 + version: 4.8.0(react-dom@19.0.0(react@18.3.1))(react@18.3.1) socket.io: specifier: ^4.7.5 version: 4.8.1 @@ -159,9 +159,12 @@ importers: '@types/express': specifier: ^4.17.15 version: 4.17.21 - '@types/express-session': - specifier: ^1.17.5 - version: 1.18.1 + '@types/express-rate-limit': + specifier: ^6.0.2 + version: 6.0.2(express@4.21.2) + '@types/formidable': + specifier: ^3.4.6 + version: 3.4.6 '@types/helmet': specifier: ^4.0.0 version: 4.0.0 @@ -177,12 +180,6 @@ importers: '@types/morgan': specifier: ^1.9.4 version: 1.9.9 - '@types/multer': - specifier: ^1.4.7 - version: 1.4.12 - '@types/multer-s3': - specifier: ^3.0.3 - version: 3.0.3 '@types/node': specifier: ^18.11.18 version: 18.19.76 @@ -204,6 +201,9 @@ importers: '@typescript-eslint/parser': specifier: ^7.11.0 version: 7.18.0(eslint@9.4.0)(typescript@5.7.3) + commander: + specifier: ^14.0.1 + version: 14.0.1 concurrently: specifier: ^9.1.0 version: 9.1.2 @@ -221,7 +221,7 @@ importers: version: 2.31.0(@typescript-eslint/parser@7.18.0(eslint@9.4.0)(typescript@5.7.3))(eslint@9.4.0) eslint-plugin-prettier: specifier: ^5.1.3 - version: 5.2.3(eslint-config-prettier@9.1.0(eslint@9.4.0))(eslint@9.4.0)(prettier@3.4.2) + version: 5.2.3(eslint-config-prettier@9.1.0(eslint@9.4.0))(eslint@9.4.0)(prettier@3.6.2) globals: specifier: ^15.3.0 version: 15.15.0 @@ -315,12 +315,6 @@ packages: resolution: {integrity: sha512-Nz8zs3YJ+GOTSrq+LyzbbC1Ffpt7pK38gcOyNZv76pP5MswKTUKNYBJehqwa+i7FcFQHsCk3TdhR8MT1ZR23uA==} engines: {node: '>=18.0.0'} - '@aws-sdk/lib-storage@3.750.0': - resolution: {integrity: sha512-2IHbhUzlKtiAZVW7S5jkJfVDj5pJC9TldHGJLYRAR9GReG9HhK6mI7kLnYE9jf3GchWfe/Bn3wqSwh3BIf0OZQ==} - engines: {node: '>=18.0.0'} - peerDependencies: - '@aws-sdk/client-s3': ^3.750.0 - '@aws-sdk/middleware-bucket-endpoint@3.734.0': resolution: {integrity: sha512-etC7G18aF7KdZguW27GE/wpbrNmYLVT755EsFc8kXpZj8D6AFKxc7OuveinJmiy0bYXAMspJUWsF6CrGpOw6CQ==} engines: {node: '>=18.0.0'} @@ -1329,6 +1323,10 @@ packages: cpu: [x64] os: [win32] + '@noble/hashes@1.8.0': + resolution: {integrity: sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==} + engines: {node: ^14.21.3 || >=16} + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} @@ -1348,6 +1346,9 @@ packages: resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} engines: {node: '>=8.0.0'} + '@paralleldrive/cuid2@2.2.2': + resolution: {integrity: sha512-ZOBkgDwEdoYVlSeRbYYXs0S9MejQofiVYoTbKzy/6GQa39/q5tQU2IX46+shYnUkpEl3wc+J6wRlar7r2EK2xA==} + '@phc/format@1.0.0': resolution: {integrity: sha512-m7X9U6BG2+J+R1lSOdCiITLLrxm+cWlNI3HUFA92oLO77ObGNzaKdh8pMLqdZcshtkKuV84olNNXDfMc4FezBQ==} engines: {node: '>=10'} @@ -1468,6 +1469,13 @@ packages: react: ^18.0 || ^19.0 || ^19.0.0-rc react-dom: ^18.0 || ^19.0 || ^19.0.0-rc + '@react-email/render@1.1.2': + resolution: {integrity: sha512-RnRehYN3v9gVlNMehHPHhyp2RQo7+pSkHDtXPvg3s0GbzM9SQMW4Qrf8GRNvtpLC4gsI+Wt0VatNRUFqjvevbw==} + engines: {node: '>=18.0.0'} + peerDependencies: + react: ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^18.0 || ^19.0 || ^19.0.0-rc + '@react-email/row@0.0.11': resolution: {integrity: sha512-ra09h7BMoGa14ds3vh7KVuj1N3astTstEC1YbMdCiHcx/nxylglNaT7qJXU74ZTzyHiGabyiNuyabTS+HLoMCA==} engines: {node: '>=18.0.0'} @@ -1866,15 +1874,19 @@ packages: '@types/estree@1.0.6': resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==} + '@types/express-rate-limit@6.0.2': + resolution: {integrity: sha512-e1xZLOOlxCDvplAGq7rDcXtbdBu2CWRsMjaIu1LVqGxWtKvwr884YE5mPs3IvHeG/OMDhf24oTaqG5T1bV3rBQ==} + deprecated: This is a stub types definition. express-rate-limit provides its own type definitions, so you do not need this installed. + '@types/express-serve-static-core@4.19.6': resolution: {integrity: sha512-N4LZ2xG7DatVqhCZzOGb1Yi5lMbXSZcmdLDe9EzSndPV2HpWYWzRbaerl2n27irrm94EPpprqa8KpskPT085+A==} - '@types/express-session@1.18.1': - resolution: {integrity: sha512-S6TkD/lljxDlQ2u/4A70luD8/ZxZcrU5pQwI1rVXCiaVIywoFgbA+PIUNDjPhQpPdK0dGleLtYc/y7XWBfclBg==} - '@types/express@4.17.21': resolution: {integrity: sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==} + '@types/formidable@3.4.6': + resolution: {integrity: sha512-LI4Hk+KNsM5q7br4oMVoaWeb+gUqJpz1N8+Y2Q6Cz9cVH33ybahRKUWaRmMboVlkwSbOUGgwc/pEkS7yMSzoWg==} + '@types/helmet@4.0.0': resolution: {integrity: sha512-ONIn/nSNQA57yRge3oaMQESef/6QhoeX7llWeDli0UZIfz8TQMkfNPTXA8VnnyeA1WUjG2pGqdjEIueYonMdfQ==} deprecated: This is a stub types definition. helmet provides its own type definitions, so you do not need this installed. @@ -1907,12 +1919,6 @@ packages: '@types/ms@2.1.0': resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==} - '@types/multer-s3@3.0.3': - resolution: {integrity: sha512-VgWygI9UwyS7loLithUUi0qAMIDWdNrERS2Sb06UuPYiLzKuIFn2NgL7satyl4v8sh/LLoU7DiPanvbQaRg9Yg==} - - '@types/multer@1.4.12': - resolution: {integrity: sha512-pQ2hoqvXiJt2FP9WQVLPRO+AmiIm/ZYkavPlIQnx282u4ZrVdztx0pkh3jjpQt0Kz+YI0YhSG264y08UJKoUQg==} - '@types/node@18.19.76': resolution: {integrity: sha512-yvR7Q9LdPz2vGpmpJX5LolrgRdWvB67MJKDPSgIIzpFbaf9a1j/f5DnLp5VDyHGMR0QZHlTr1afsD87QCXFHKw==} @@ -2112,9 +2118,6 @@ packages: any-promise@1.3.0: resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} - append-field@1.0.0: - resolution: {integrity: sha512-klpgFSWLW1ZEs8svjfb7g4qWY0YS5imI82dTg+QahUvJ8YqAY0P10Uk8tTyh9ZGuYEZEMaeJYCF5BFuX552hsw==} - aproba@2.0.0: resolution: {integrity: sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==} @@ -2161,6 +2164,9 @@ packages: resolution: {integrity: sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==} engines: {node: '>= 0.4'} + asap@2.0.6: + resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} + async-function@1.0.0: resolution: {integrity: sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==} engines: {node: '>= 0.4'} @@ -2199,6 +2205,9 @@ packages: resolution: {integrity: sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==} engines: {node: '>= 0.8'} + bintrees@1.0.2: + resolution: {integrity: sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw==} + bl@4.1.0: resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} @@ -2231,12 +2240,6 @@ packages: buffer-equal-constant-time@1.0.1: resolution: {integrity: sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==} - buffer-from@1.1.2: - resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} - - buffer@5.6.0: - resolution: {integrity: sha512-/gDYp/UtU0eA1ys8bOs9J6a+E/KWIY+DZ+Q2WESNUA0jFRsJOc0SNUO6xJ5SGA1xueg3NL65W6s+NY5l9cunuw==} - buffer@5.7.1: resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} @@ -2351,6 +2354,10 @@ packages: resolution: {integrity: sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ==} engines: {node: '>=16'} + commander@14.0.1: + resolution: {integrity: sha512-2JkV3gUZUVrbNA+1sjBOYLsMZ5cEEl8GTFP2a4AVz5hvasAMCQ1D2l2le/cX+pV4N6ZU17zjUahLpIXRrnWL8A==} + engines: {node: '>=20'} + commander@4.1.1: resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} engines: {node: '>= 6'} @@ -2366,10 +2373,6 @@ packages: concat-map@0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} - concat-stream@1.6.2: - resolution: {integrity: sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==} - engines: {'0': node >= 0.8} - concurrently@9.1.2: resolution: {integrity: sha512-H9MWcoPsYddwbOGM6difjVwVZHl63nwMEwDJG/L7VGtuaJhb12h2caPG2tVPWs7emuYix252iGfqOyrz1GczTQ==} engines: {node: '>=18'} @@ -2378,12 +2381,6 @@ packages: config-chain@1.1.13: resolution: {integrity: sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==} - connect-redis@7.1.1: - resolution: {integrity: sha512-M+z7alnCJiuzKa8/1qAYdGUXHYfDnLolOGAUjOioB07pP39qxjG+X9ibsud7qUBc4jMV5Mcy3ugGv8eFcgamJQ==} - engines: {node: '>=16'} - peerDependencies: - express-session: '>=1' - consola@3.4.0: resolution: {integrity: sha512-EiPU8G6dQG0GFHNR8ljnZFki/8a+cQwEQ+7wpxdChl02Q8HXlwEZWD5lqAF8vC2sEC3Tehr8hy7vErz88LHyUA==} engines: {node: ^14.18.0 || >=16.10.0} @@ -2409,9 +2406,6 @@ packages: cookie-signature@1.0.6: resolution: {integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==} - cookie-signature@1.0.7: - resolution: {integrity: sha512-NXdYc3dLr47pBkpUCHtKSwIOQXLVn8dZEuywboCOJY/osA0wFSLlSawr3KN8qXJEyX66FcONTH8EIlVuK0yyFA==} - cookie@0.7.1: resolution: {integrity: sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==} engines: {node: '>= 0.6'} @@ -2420,9 +2414,6 @@ packages: resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==} engines: {node: '>= 0.6'} - core-util-is@1.0.3: - resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} - cors@2.8.5: resolution: {integrity: sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==} engines: {node: '>= 0.10'} @@ -2537,6 +2528,9 @@ packages: resolution: {integrity: sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==} engines: {node: '>=8'} + dezalgo@1.0.4: + resolution: {integrity: sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==} + dir-glob@3.0.1: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} engines: {node: '>=8'} @@ -2798,9 +2792,11 @@ packages: express-async-handler@1.2.0: resolution: {integrity: sha512-rCSVtPXRmQSW8rmik/AIb2P0op6l7r1fMW538yyvTMltCO4xQEWMmobfrIxN2V1/mVrgxB8Az3reYF6yUZw37w==} - express-session@1.18.1: - resolution: {integrity: sha512-a5mtTqEaZvBCL9A9aqkrtfz+3SMDhOVUnjafjo+s7A9Txkq+SVX2DLvSp1Zrv4uCXa3lMSK3viWnh9Gg07PBUA==} - engines: {node: '>= 0.8.0'} + express-rate-limit@8.1.0: + resolution: {integrity: sha512-4nLnATuKupnmwqiJc27b4dCFmB/T60ExgmtDD7waf4LdrbJ8CPZzZRHYErDYNhoz+ql8fUdYwM/opf90PoPAQA==} + engines: {node: '>= 16'} + peerDependencies: + express: '>= 4.11' express@4.21.2: resolution: {integrity: sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==} @@ -2854,10 +2850,6 @@ packages: resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} engines: {node: '>=16.0.0'} - file-type@3.9.0: - resolution: {integrity: sha512-RLoqTXE8/vPmMuTI88DAzhMYC99I8BWv7zYP4A1puo5HIjEJ5EX48ighy4ZyKMG9EDXxBgW6e++cn7d1xuFghA==} - engines: {node: '>=0.10.0'} - filelist@1.0.4: resolution: {integrity: sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==} @@ -2897,10 +2889,14 @@ packages: resolution: {integrity: sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==} engines: {node: '>=14'} - form-data@4.0.2: - resolution: {integrity: sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==} + form-data@4.0.4: + resolution: {integrity: sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==} engines: {node: '>= 6'} + formidable@3.5.4: + resolution: {integrity: sha512-YikH+7CUTOtP44ZTnUhR7Ic2UASBPOqmaRkRKxRbywPTe5VxF7RRCck4af9wutiZ/QKM5nME9Bie2fFaPz5Gug==} + engines: {node: '>=14.0.0'} + forwarded@0.2.0: resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} engines: {node: '>= 0.6'} @@ -3048,9 +3044,6 @@ packages: help-me@5.0.0: resolution: {integrity: sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg==} - html-comment-regex@1.1.2: - resolution: {integrity: sha512-P+M65QY2JQ5Y0G9KKdlDpo0zK+/OHptU5AaBwUfAIDJZk1MYf32Frm84EcOytfJE0t5JvkAnKlmjsXDnWzCJmQ==} - html-to-text@9.0.5: resolution: {integrity: sha512-qY60FjREgVZL03vJU6IfMV4GDjGBIoOyvuFdpBDIX9yTlDw0TjxVBQp+P8NvpdIXNJvfWBTNul7fsAQJq2FNpg==} engines: {node: '>=14'} @@ -3106,6 +3099,10 @@ packages: resolution: {integrity: sha512-7CutT89g23FfSa8MDoIFs2GYYa0PaNiW/OrT+nRyjRXHDZd17HmIgy+reOQ/yhh72NznNjGuS8kbCAcA4Ro4mw==} engines: {node: '>=12.22.0'} + ip-address@10.0.1: + resolution: {integrity: sha512-NWv9YLW4PoW2B7xtzaS3NCot75m6nK7Icdv0o3lfMceJVRfSoQwqD4wEH5rLwoKJwUiZ/rfpiVBhnaF0FK4HoA==} + engines: {node: '>= 12'} + ipaddr.js@1.9.1: resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} engines: {node: '>= 0.10'} @@ -3225,9 +3222,6 @@ packages: resolution: {integrity: sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==} engines: {node: '>= 0.4'} - isarray@1.0.0: - resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} - isarray@2.0.5: resolution: {integrity: sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==} @@ -3481,10 +3475,6 @@ packages: resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==} engines: {node: '>= 8'} - mkdirp@0.5.6: - resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==} - hasBin: true - mkdirp@1.0.4: resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} engines: {node: '>=10'} @@ -3549,16 +3539,6 @@ packages: msgpackr@1.11.2: resolution: {integrity: sha512-F9UngXRlPyWCDEASDpTf6c9uNhGPTqnTeLVt7bN+bU1eajoR/8V9ys2BRaV5C/e5ihE6sJ9uPIKaYt6bFuO32g==} - multer-s3@3.0.1: - resolution: {integrity: sha512-BFwSO80a5EW4GJRBdUuSHblz2jhVSAze33ZbnGpcfEicoT0iRolx4kWR+AJV07THFRCQ78g+kelKFdjkCCaXeQ==} - engines: {node: '>= 12.0.0'} - peerDependencies: - '@aws-sdk/client-s3': ^3.0.0 - - multer@1.4.5-lts.1: - resolution: {integrity: sha512-ywPWvcDMeH+z9gQq5qYHCCy+ethsk4goepZ45GLD63fOu0YcNecQxi64nDs3qluZB+murG3/D4dJ7+dGctcCQQ==} - engines: {node: '>= 6.0.0'} - mz@2.7.0: resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} @@ -3848,13 +3828,15 @@ packages: engines: {node: '>=14'} hasBin: true + prettier@3.6.2: + resolution: {integrity: sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==} + engines: {node: '>=14'} + hasBin: true + prismjs@1.29.0: resolution: {integrity: sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==} engines: {node: '>=6'} - process-nextick-args@2.0.1: - resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} - process-warning@4.0.1: resolution: {integrity: sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q==} @@ -3862,6 +3844,10 @@ packages: resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} engines: {node: '>= 0.6.0'} + prom-client@15.1.3: + resolution: {integrity: sha512-6ZiOBfCywsD4k1BN9IX0uZhF+tJkV8q8llP64G5Hajs4JOeVLPCwpPVcpXy3BwYiUGgyJzsJJQeOIv7+hDSq8g==} + engines: {node: ^16 || ^18 || >=20} + proto-list@1.2.4: resolution: {integrity: sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==} @@ -3889,10 +3875,6 @@ packages: quick-format-unescaped@4.0.4: resolution: {integrity: sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==} - random-bytes@1.0.0: - resolution: {integrity: sha512-iv7LhNVO047HzYR3InF6pUcUsPQiHTM1Qal51DcGSuZFBil1aBBWG5eHPNek7bvILMaYJ/8RU1e8w1AMdHmLQQ==} - engines: {node: '>= 0.8'} - range-parser@1.2.1: resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==} engines: {node: '>= 0.6'} @@ -3918,9 +3900,6 @@ packages: resolution: {integrity: sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==} engines: {node: '>=0.10.0'} - readable-stream@2.3.8: - resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==} - readable-stream@3.6.2: resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} engines: {node: '>= 6'} @@ -3963,6 +3942,10 @@ packages: resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} engines: {node: '>=0.10.0'} + resend@4.8.0: + resolution: {integrity: sha512-R8eBOFQDO6dzRTDmaMEdpqrkmgSjPpVXt4nGfWsZdYOet0kqra0xgbvTES6HmCriZEXbmGk3e0DiGIaLFTFSHA==} + engines: {node: '>=18'} + resolve-from@4.0.0: resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} engines: {node: '>=4'} @@ -4161,9 +4144,6 @@ packages: resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==} engines: {node: '>= 0.8'} - stream-browserify@3.0.0: - resolution: {integrity: sha512-H73RAHsVBapbim0tU2JwwOiXUj+fikfiaoYAKHF3VJfA0pe2BCzkhAHBlLG6REzE+2WNZcxOXjK7lkso+9euLA==} - streamsearch@1.1.0: resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==} engines: {node: '>=10.0.0'} @@ -4188,9 +4168,6 @@ packages: resolution: {integrity: sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==} engines: {node: '>= 0.4'} - string_decoder@1.1.1: - resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} - string_decoder@1.3.0: resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} @@ -4260,6 +4237,9 @@ packages: resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} engines: {node: '>=10'} + tdigest@0.1.2: + resolution: {integrity: sha512-+G0LLgjjo9BZX2MfdvPfH+MKLCrxlXSYec5DaPYP1fe6Iyhf0/fSmJ0bFiZ1F8BT6cGXl2LpltQptzjXKWEkKA==} + text-table@0.2.0: resolution: {integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==} @@ -4374,9 +4354,6 @@ packages: resolution: {integrity: sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==} engines: {node: '>= 0.4'} - typedarray@0.0.6: - resolution: {integrity: sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==} - typescript-eslint@7.18.0: resolution: {integrity: sha512-PonBkP603E3tt05lDkbOMyaxJjvKqQrXsnow72sVeOFINDE/qNmnnd+f9b4N+U7W6MXnnYyrhtmF2t08QWwUbA==} engines: {node: ^18.18.0 || >=20.0.0} @@ -4392,10 +4369,6 @@ packages: engines: {node: '>=14.17'} hasBin: true - uid-safe@2.1.5: - resolution: {integrity: sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==} - engines: {node: '>= 0.8'} - unbox-primitive@1.1.0: resolution: {integrity: sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==} engines: {node: '>= 0.4'} @@ -4512,10 +4485,6 @@ packages: utf-8-validate: optional: true - xtend@4.0.2: - resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} - engines: {node: '>=0.4'} - y18n@5.0.8: resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} engines: {node: '>=10'} @@ -4812,17 +4781,6 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/lib-storage@3.750.0(@aws-sdk/client-s3@3.750.0)': - dependencies: - '@aws-sdk/client-s3': 3.750.0 - '@smithy/abort-controller': 4.0.1 - '@smithy/middleware-endpoint': 4.0.5 - '@smithy/smithy-client': 4.1.5 - buffer: 5.6.0 - events: 3.3.0 - stream-browserify: 3.0.0 - tslib: 2.8.1 - '@aws-sdk/middleware-bucket-endpoint@3.734.0': dependencies: '@aws-sdk/types': 3.734.0 @@ -5648,6 +5606,8 @@ snapshots: '@next/swc-win32-x64-msvc@15.1.2': optional: true + '@noble/hashes@1.8.0': {} + '@nodelib/fs.scandir@2.1.5': dependencies: '@nodelib/fs.stat': 2.0.5 @@ -5662,8 +5622,11 @@ snapshots: '@one-ini/wasm@0.1.1': {} - '@opentelemetry/api@1.9.0': - optional: true + '@opentelemetry/api@1.9.0': {} + + '@paralleldrive/cuid2@2.2.2': + dependencies: + '@noble/hashes': 1.8.0 '@phc/format@1.0.0': {} @@ -5776,6 +5739,14 @@ snapshots: react-dom: 19.0.0(react@18.3.1) react-promise-suspense: 0.3.4 + '@react-email/render@1.1.2(react-dom@19.0.0(react@18.3.1))(react@18.3.1)': + dependencies: + html-to-text: 9.0.5 + prettier: 3.6.2 + react: 18.3.1 + react-dom: 19.0.0(react@18.3.1) + react-promise-suspense: 0.3.4 + '@react-email/row@0.0.11(react@18.3.1)': dependencies: react: 18.3.1 @@ -6246,6 +6217,12 @@ snapshots: '@types/estree@1.0.6': {} + '@types/express-rate-limit@6.0.2(express@4.21.2)': + dependencies: + express-rate-limit: 8.1.0(express@4.21.2) + transitivePeerDependencies: + - express + '@types/express-serve-static-core@4.19.6': dependencies: '@types/node': 18.19.76 @@ -6253,10 +6230,6 @@ snapshots: '@types/range-parser': 1.2.7 '@types/send': 0.17.4 - '@types/express-session@1.18.1': - dependencies: - '@types/express': 4.17.21 - '@types/express@4.17.21': dependencies: '@types/body-parser': 1.19.5 @@ -6264,6 +6237,10 @@ snapshots: '@types/qs': 6.9.18 '@types/serve-static': 1.15.7 + '@types/formidable@3.4.6': + dependencies: + '@types/node': 18.19.76 + '@types/helmet@4.0.0': dependencies: helmet: 6.2.0 @@ -6293,18 +6270,6 @@ snapshots: '@types/ms@2.1.0': {} - '@types/multer-s3@3.0.3': - dependencies: - '@aws-sdk/client-s3': 3.750.0 - '@types/multer': 1.4.12 - '@types/node': 18.19.76 - transitivePeerDependencies: - - aws-crt - - '@types/multer@1.4.12': - dependencies: - '@types/express': 4.17.21 - '@types/node@18.19.76': dependencies: undici-types: 5.26.5 @@ -6551,8 +6516,6 @@ snapshots: any-promise@1.3.0: {} - append-field@1.0.0: {} - aproba@2.0.0: {} are-we-there-yet@2.0.0: @@ -6622,6 +6585,8 @@ snapshots: get-intrinsic: 1.2.7 is-array-buffer: 3.0.5 + asap@2.0.6: {} + async-function@1.0.0: {} async@3.2.6: {} @@ -6637,7 +6602,7 @@ snapshots: axios@1.7.9: dependencies: follow-redirects: 1.15.9 - form-data: 4.0.2 + form-data: 4.0.4 proxy-from-env: 1.1.0 transitivePeerDependencies: - debug @@ -6654,6 +6619,8 @@ snapshots: dependencies: safe-buffer: 5.1.2 + bintrees@1.0.2: {} + bl@4.1.0: dependencies: buffer: 5.7.1 @@ -6703,13 +6670,6 @@ snapshots: buffer-equal-constant-time@1.0.1: {} - buffer-from@1.1.2: {} - - buffer@5.6.0: - dependencies: - base64-js: 1.5.1 - ieee754: 1.2.1 - buffer@5.7.1: dependencies: base64-js: 1.5.1 @@ -6825,6 +6785,8 @@ snapshots: commander@11.1.0: {} + commander@14.0.1: {} + commander@4.1.1: {} compressible@2.0.18: @@ -6845,13 +6807,6 @@ snapshots: concat-map@0.0.1: {} - concat-stream@1.6.2: - dependencies: - buffer-from: 1.1.2 - inherits: 2.0.4 - readable-stream: 2.3.8 - typedarray: 0.0.6 - concurrently@9.1.2: dependencies: chalk: 4.1.2 @@ -6867,10 +6822,6 @@ snapshots: ini: 1.3.8 proto-list: 1.2.4 - connect-redis@7.1.1(express-session@1.18.1): - dependencies: - express-session: 1.18.1 - consola@3.4.0: {} console-control-strings@1.1.0: {} @@ -6890,14 +6841,10 @@ snapshots: cookie-signature@1.0.6: {} - cookie-signature@1.0.7: {} - cookie@0.7.1: {} cookie@0.7.2: {} - core-util-is@1.0.3: {} - cors@2.8.5: dependencies: object-assign: 4.1.1 @@ -6989,6 +6936,11 @@ snapshots: detect-libc@2.0.3: {} + dezalgo@1.0.4: + dependencies: + asap: 2.0.6 + wrappy: 1.0.2 + dir-glob@3.0.1: dependencies: path-type: 4.0.0 @@ -7330,10 +7282,10 @@ snapshots: - eslint-import-resolver-webpack - supports-color - eslint-plugin-prettier@5.2.3(eslint-config-prettier@9.1.0(eslint@9.4.0))(eslint@9.4.0)(prettier@3.4.2): + eslint-plugin-prettier@5.2.3(eslint-config-prettier@9.1.0(eslint@9.4.0))(eslint@9.4.0)(prettier@3.6.2): dependencies: eslint: 9.4.0 - prettier: 3.4.2 + prettier: 3.6.2 prettier-linter-helpers: 1.0.0 synckit: 0.9.2 optionalDependencies: @@ -7420,18 +7372,10 @@ snapshots: express-async-handler@1.2.0: {} - express-session@1.18.1: + express-rate-limit@8.1.0(express@4.21.2): dependencies: - cookie: 0.7.2 - cookie-signature: 1.0.7 - debug: 2.6.9 - depd: 2.0.0 - on-headers: 1.0.2 - parseurl: 1.3.3 - safe-buffer: 5.2.1 - uid-safe: 2.1.5 - transitivePeerDependencies: - - supports-color + express: 4.21.2 + ip-address: 10.0.1 express@4.21.2: dependencies: @@ -7509,8 +7453,6 @@ snapshots: dependencies: flat-cache: 4.0.1 - file-type@3.9.0: {} - filelist@1.0.4: dependencies: minimatch: 5.1.6 @@ -7554,13 +7496,20 @@ snapshots: cross-spawn: 7.0.6 signal-exit: 4.1.0 - form-data@4.0.2: + form-data@4.0.4: dependencies: asynckit: 0.4.0 combined-stream: 1.0.8 es-set-tostringtag: 2.1.0 + hasown: 2.0.2 mime-types: 2.1.35 + formidable@3.5.4: + dependencies: + '@paralleldrive/cuid2': 2.2.2 + dezalgo: 1.0.4 + once: 1.4.0 + forwarded@0.2.0: {} fresh@0.5.2: {} @@ -7719,8 +7668,6 @@ snapshots: help-me@5.0.0: {} - html-comment-regex@1.1.2: {} - html-to-text@9.0.5: dependencies: '@selderee/plugin-htmlparser2': 0.11.0 @@ -7797,6 +7744,8 @@ snapshots: transitivePeerDependencies: - supports-color + ip-address@10.0.1: {} + ipaddr.js@1.9.1: {} is-array-buffer@3.0.5: @@ -7915,8 +7864,6 @@ snapshots: call-bound: 1.0.3 get-intrinsic: 1.2.7 - isarray@1.0.0: {} - isarray@2.0.5: {} isexe@2.0.0: {} @@ -8139,10 +8086,6 @@ snapshots: minipass: 3.3.6 yallist: 4.0.0 - mkdirp@0.5.6: - dependencies: - minimist: 1.2.8 - mkdirp@1.0.4: {} mongodb-connection-string-url@3.0.2: @@ -8213,24 +8156,6 @@ snapshots: optionalDependencies: msgpackr-extract: 3.0.3 - multer-s3@3.0.1(@aws-sdk/client-s3@3.750.0): - dependencies: - '@aws-sdk/client-s3': 3.750.0 - '@aws-sdk/lib-storage': 3.750.0(@aws-sdk/client-s3@3.750.0) - file-type: 3.9.0 - html-comment-regex: 1.1.2 - run-parallel: 1.2.0 - - multer@1.4.5-lts.1: - dependencies: - append-field: 1.0.0 - busboy: 1.6.0 - concat-stream: 1.6.2 - mkdirp: 0.5.6 - object-assign: 4.1.1 - type-is: 1.6.18 - xtend: 4.0.2 - mz@2.7.0: dependencies: any-promise: 1.3.0 @@ -8524,14 +8449,19 @@ snapshots: prettier@3.4.2: {} - prismjs@1.29.0: {} + prettier@3.6.2: {} - process-nextick-args@2.0.1: {} + prismjs@1.29.0: {} process-warning@4.0.1: {} process@0.11.10: {} + prom-client@15.1.3: + dependencies: + '@opentelemetry/api': 1.9.0 + tdigest: 0.1.2 + proto-list@1.2.4: {} proxy-addr@2.0.7: @@ -8556,8 +8486,6 @@ snapshots: quick-format-unescaped@4.0.4: {} - random-bytes@1.0.0: {} - range-parser@1.2.1: {} raw-body@2.5.2: @@ -8608,16 +8536,6 @@ snapshots: dependencies: loose-envify: 1.4.0 - readable-stream@2.3.8: - dependencies: - core-util-is: 1.0.3 - inherits: 2.0.4 - isarray: 1.0.0 - process-nextick-args: 2.0.1 - safe-buffer: 5.1.2 - string_decoder: 1.1.1 - util-deprecate: 1.0.2 - readable-stream@3.6.2: dependencies: inherits: 2.0.4 @@ -8677,6 +8595,13 @@ snapshots: require-directory@2.1.1: {} + resend@4.8.0(react-dom@19.0.0(react@18.3.1))(react@18.3.1): + dependencies: + '@react-email/render': 1.1.2(react-dom@19.0.0(react@18.3.1))(react@18.3.1) + transitivePeerDependencies: + - react + - react-dom + resolve-from@4.0.0: {} resolve-from@5.0.0: {} @@ -8955,11 +8880,6 @@ snapshots: statuses@2.0.1: {} - stream-browserify@3.0.0: - dependencies: - inherits: 2.0.4 - readable-stream: 3.6.2 - streamsearch@1.1.0: {} string-width@4.2.3: @@ -8997,10 +8917,6 @@ snapshots: define-properties: 1.2.1 es-object-atoms: 1.1.1 - string_decoder@1.1.1: - dependencies: - safe-buffer: 5.1.2 - string_decoder@1.3.0: dependencies: safe-buffer: 5.2.1 @@ -9069,6 +8985,10 @@ snapshots: mkdirp: 1.0.4 yallist: 4.0.0 + tdigest@0.1.2: + dependencies: + bintrees: 1.0.2 + text-table@0.2.0: {} thenify-all@1.6.0: @@ -9206,8 +9126,6 @@ snapshots: possible-typed-array-names: 1.1.0 reflect.getprototypeof: 1.0.10 - typedarray@0.0.6: {} - typescript-eslint@7.18.0(eslint@9.4.0)(typescript@5.7.3): dependencies: '@typescript-eslint/eslint-plugin': 7.18.0(@typescript-eslint/parser@7.18.0(eslint@9.4.0)(typescript@5.7.3))(eslint@9.4.0)(typescript@5.7.3) @@ -9221,10 +9139,6 @@ snapshots: typescript@5.7.3: {} - uid-safe@2.1.5: - dependencies: - random-bytes: 1.0.0 - unbox-primitive@1.1.0: dependencies: call-bound: 1.0.3 @@ -9350,8 +9264,6 @@ snapshots: ws@8.17.1: {} - xtend@4.0.2: {} - y18n@5.0.8: {} yallist@3.1.1: {} diff --git a/public/admin/index.html b/public/admin/index.html new file mode 100644 index 0000000..d1e7db0 --- /dev/null +++ b/public/admin/index.html @@ -0,0 +1,334 @@ + + + + + + Admin Dashboard + + + + + +
+
+ + + + + + +
+ +
+ + + +
+
+ + + + diff --git a/public/admin/login.html b/public/admin/login.html new file mode 100644 index 0000000..4ad4ab9 --- /dev/null +++ b/public/admin/login.html @@ -0,0 +1,206 @@ + + + + + + Admin Login + + + + + + + + + diff --git a/public/logo.webp b/public/assets/images/logo.webp similarity index 100% rename from public/logo.webp rename to public/assets/images/logo.webp diff --git a/public/assets/scripts/admin.js b/public/assets/scripts/admin.js new file mode 100644 index 0000000..c55f9bb --- /dev/null +++ b/public/assets/scripts/admin.js @@ -0,0 +1,1151 @@ +document.addEventListener('DOMContentLoaded', () => { + const state = { + resources: [], + current: null, + fields: [], + fileFields: [], + page: 1, + limit: 10, + total: 0, + data: [], + selectedIds: new Set(), + // cache relation labels: key => label, where key = `${resource}:${id}` + labelCache: Object.create(null), + }; + + const $ = (sel) => document.querySelector(sel); + const el = (tag, props = {}, children = []) => { + const e = document.createElement(tag); + Object.assign(e, props); + children.forEach((c) => + e.appendChild(typeof c === 'string' ? document.createTextNode(c) : c), + ); + return e; + }; + + function openModal(title) { + const m = $('#modal'); + const t = $('#modalTitle'); + if (t) t.textContent = title || ''; + if (m) m.classList.remove('hidden'); + } + + function closeModal() { + const m = $('#modal'); + const f = $('#form'); + if (f) f.innerHTML = ''; + if (m) m.classList.add('hidden'); + } + + async function api(path, opts = {}) { + const isFormData = + opts && + opts.body && + typeof FormData !== 'undefined' && + opts.body instanceof FormData; + const baseHeaders = isFormData + ? {} + : { 'Content-Type': 'application/json' }; + const headers = { ...baseHeaders, ...(opts.headers || {}) }; + const res = await fetch(`/admin/api${path}`, { ...opts, headers }); + + // Handle unauthorized - redirect to login + if (res.status === 401) { + window.location.href = + '/login?next=' + + encodeURIComponent(window.location.pathname); + throw new Error('Unauthorized'); + } + + if (!res.ok) throw new Error((await res.json()).error || res.statusText); + return res.json(); + } + + function toDatetimeLocal(value) { + if (!value) return ''; + const d = new Date(value); + if (isNaN(d.getTime())) return ''; + const pad = (n) => String(n).padStart(2, '0'); + const yyyy = d.getFullYear(); + const mm = pad(d.getMonth() + 1); + const dd = pad(d.getDate()); + const hh = pad(d.getHours()); + const mi = pad(d.getMinutes()); + return `${yyyy}-${mm}-${dd}T${hh}:${mi}`; + } + + function fromDatetimeLocal(value) { + if (!value) return undefined; + const d = new Date(value); + if (isNaN(d.getTime())) return undefined; + return d.toISOString(); + } + + async function loadResources() { + const { resources } = await api('/meta'); + state.resources = resources; + const wrap = $('#resources'); + wrap.innerHTML = ''; + // Prepare concurrent count fetches (used for header & sidebar badges) + const counts = Object.create(null); + try { + const countPromises = resources.map(async (r) => { + try { + const resp = await api(`/${r.name}?page=1&limit=1`); + return { name: r.name, total: resp.total || 0 }; + } catch { + return { name: r.name, total: null }; + } + }); + Object.assign( + counts, + Object.fromEntries( + (await Promise.all(countPromises)).map((c) => [c.name, c.total]), + ), + ); + } catch {} + resources.forEach((r) => { + const item = el('div', { + className: `resource${state.current === r.name ? ' active' : ''}`, + }); + const row = el( + 'div', + { style: 'display:flex; align-items:center; gap:8px;' }, + [ + el('span', { textContent: r.label || r.name }), + el( + 'span', + { + className: 'muted', + style: + 'font-size:11px; padding:2px 6px; border:1px solid var(--border); border-radius:10px; background: var(--panel);', + }, + [counts[r.name] == null ? '…' : String(counts[r.name])], + ), + ], + ); + item.appendChild(row); + item.onclick = () => selectResource(r.name); + wrap.appendChild(item); + }); + } + + async function loadFields(resource) { + const { fields, fileFields } = await api(`/${resource}/meta`); + state.fields = fields; + state.fileFields = fileFields || []; + } + + function renderList() { + const list = $('#list'); + if (!state.data.length) { + list.innerHTML = + '
No data
'; + return; + } + const cols = [ + '_id', + ...state.fields.filter((f) => f.path !== '_id').map((f) => f.path), + ].slice(0, 6); + const fieldByPath = Object.fromEntries( + state.fields.map((f) => [f.path, f]), + ); + const table = el('table'); + const thead = el('thead'); + const trh = el('tr'); + // Select-all checkbox header + const selectAll = el('input', { type: 'checkbox' }); + selectAll.onchange = (e) => { + const checked = !!selectAll.checked; + if (checked) { + state.data.forEach((row) => state.selectedIds.add(String(row._id))); + } else state.selectedIds.clear(); + renderList(); + updateBulkBtnState(); + }; + const thSelect = el('th'); + thSelect.appendChild(selectAll); + trh.appendChild(thSelect); + cols + .concat(['actions']) + .forEach((c) => trh.appendChild(el('th', { textContent: c }))); + thead.appendChild(trh); + table.appendChild(thead); + const tbody = el('tbody'); + state.data.forEach((row) => { + const tr = el('tr'); + // Row select checkbox + const tdChk = el('td'); + const cb = el('input', { type: 'checkbox' }); + cb.checked = state.selectedIds.has(String(row._id)); + cb.onchange = () => { + const id = String(row._id); + if (cb.checked) state.selectedIds.add(id); + else state.selectedIds.delete(id); + updateBulkBtnState(); + }; + tdChk.appendChild(cb); + tr.appendChild(tdChk); + cols.forEach((c) => { + const f = fieldByPath[c]; + tr.appendChild(el('td', { textContent: formatCell(row[c], f) })); + }); + const actions = el('td', { style: 'display: flex;' }); + const editBtn = el('button', { textContent: 'Edit' }); + editBtn.onclick = () => showForm(row); + const delBtn = el('button', { + textContent: 'Delete', + style: 'margin-left:6px', + }); + delBtn.onclick = async () => { + if (confirm('Delete record?')) { + await api(`/${state.current}/${row._id}`, { method: 'DELETE' }); + await refresh(); + } + }; + actions.appendChild(editBtn); + actions.appendChild(delBtn); + tr.appendChild(actions); + tbody.appendChild(tr); + }); + table.appendChild(tbody); + list.innerHTML = ''; + list.appendChild(table); + $('#pageInfo').textContent = `Page ${state.page} — ${state.total} total`; + // Reflect header select-all state (all rows selected) + const allSelected = state.data.every((r) => + state.selectedIds.has(String(r._id)), + ); + selectAll.checked = allSelected && state.data.length > 0; + } + + function formatVal(v) { + if (v == null) return ''; + if (typeof v === 'object') return JSON.stringify(v); + return String(v); + } + + function formatCell(v, field) { + if (!field) return formatVal(v); + if (field.type === 'relation' && field.relation) { + if (v == null) return ''; + const res = field.relation.resource; + if (Array.isArray(v)) { + const labels = v + .map((id) => state.labelCache[`${res}:${id}`] || String(id)) + .filter(Boolean); + return labels.slice(0, 3).join(', ') + (labels.length > 3 ? ' …' : ''); + } + return state.labelCache[`${res}:${v}`] || String(v); + } + return formatVal(v); + } + + function showForm(row) { + const form = $('#form'); + openModal(row ? 'Edit record' : 'Create record'); + const fields = state.fields.filter((f) => !['_id', '__v'].includes(f.path)); + const readOnly = new Set(['_id', 'createdAt', 'updatedAt', 'password']); + form.innerHTML = ''; + const grid = el('div', { className: 'form-grid' }); + fields.forEach((f) => { + const label = el('label', { textContent: f.path }); + const type = f.type; + let input; + const isFile = + Array.isArray(state.fileFields) && state.fileFields.includes(f.path); + if (isFile) { + input = el('input', { type: 'file' }); + } else if (type === 'relation' && f.relation) { + input = createRelationEditor(f, row ? row[f.path] : undefined); + } else if (type === 'subdocument' && Array.isArray(f.children)) { + if (f.isArray) { + input = createSubdocArrayEditor(f, row ? row[f.path] : undefined); + } else { + input = createSubdocEditor(f, row ? row[f.path] : undefined); + } + } else if (f.enumValues && f.enumValues.length) { + input = el('select'); + input.appendChild(el('option', { value: '', textContent: '' })); + f.enumValues.forEach((opt) => + input.appendChild(el('option', { value: opt, textContent: opt })), + ); + } else if (type === 'boolean') { + input = el('select'); + ['false', 'true'].forEach((opt) => + input.appendChild(el('option', { value: opt, textContent: opt })), + ); + } else if (type === 'number') { + input = el('input', { type: 'number', step: 'any' }); + } else if (type === 'date') { + input = el('input', { type: 'datetime-local' }); + } else if (type === 'array') { + input = el('textarea', { rows: 3, placeholder: '[ ... ]' }); + } else if (type === 'mixed') { + input = el('input', { type: 'text' }); + } else { + input = el('input', { type: 'text' }); + } + + const rawVal = row ? row[f.path] : undefined; + let val = ''; + if (rawVal != null) { + if (isFile) { + val = ''; + } else if (type === 'relation' && f.relation) { + // Value is controlled by relation editor (hidden input maintains value) + val = ''; + } else if (f.enumValues && f.enumValues.length) val = String(rawVal); + else if (type === 'boolean') val = rawVal ? 'true' : 'false'; + else if (type === 'number') val = String(rawVal); + else if (type === 'date') val = toDatetimeLocal(rawVal); + else if (type === 'array') + val = Array.isArray(rawVal) ? JSON.stringify(rawVal, null, 2) : ''; + else if (type === 'mixed') + val = + typeof rawVal === 'object' + ? JSON.stringify(rawVal) + : String(rawVal); + else val = String(rawVal); + } + if (type === 'relation' && f.relation) { + // disable search when readOnly + if (readOnly.has(f.path)) { + const controls = input.querySelectorAll('input,button'); + controls.forEach((c) => (c.disabled = true)); + } + } else if (type === 'subdocument' && Array.isArray(f.children)) { + if (readOnly.has(f.path)) { + const controls = input.querySelectorAll( + 'input,button,select,textarea', + ); + controls.forEach((c) => (c.disabled = true)); + } + } else { + input.value = val; + input.disabled = readOnly.has(f.path); + input.dataset.path = f.path; + input.dataset.type = type; + input.dataset.isArray = f.isArray ? '1' : '0'; + input.dataset.isFile = isFile ? '1' : '0'; + } + grid.appendChild(label); + grid.appendChild(input); + if (isFile && row && typeof rawVal === 'string' && rawVal) { + grid.appendChild( + el('div', { className: 'muted', style: 'grid-column: 1 / -1' }, [ + el('small', { textContent: `Current: ${rawVal}` }), + ]), + ); + } + }); + const save = el('button', { textContent: row ? 'Update' : 'Create' }); + save.onclick = async () => { + const payload = {}; + const fileInputs = Array.from( + grid.querySelectorAll('[data-is-file="1"]'), + ); + const anyFileSelected = fileInputs.some( + (inp) => inp.files && inp.files.length > 0, + ); + const useMultipart = anyFileSelected; + const formData = useMultipart ? new FormData() : null; + try { + grid.querySelectorAll('[data-path]').forEach((inp) => { + const isFile = inp.dataset.isFile === '1'; + const t = inp.dataset.type; + const isArr = inp.dataset.isArray === '1'; + const path = inp.dataset.path; + if (isFile) { + if (useMultipart && inp.files && inp.files[0]) { + formData.append(path, inp.files[0]); + } + return; + } + const raw = inp.value; + const v = parseByType(t, isArr, raw); + if (useMultipart) { + if (v !== undefined) + formData.append( + path, + typeof v === 'string' ? v : JSON.stringify(v), + ); + } else { + if (v !== undefined) payload[path] = v; + } + }); + } catch (e) { + $('#error').textContent = e.message || 'Invalid input'; + return; + } + try { + if (row) { + if (useMultipart) + await api(`/${state.current}/${row._id}`, { + method: 'PUT', + body: formData, + }); + else + await api(`/${state.current}/${row._id}`, { + method: 'PUT', + body: JSON.stringify(payload), + }); + } else { + if (useMultipart) + await api(`/${state.current}`, { method: 'POST', body: formData }); + else + await api(`/${state.current}`, { + method: 'POST', + body: JSON.stringify(payload), + }); + } + $('#error').textContent = ''; + await refresh(); + closeModal(); + } catch (e) { + $('#error').textContent = e.message || 'Failed'; + } + }; + const cancel = el('button', { + textContent: 'Cancel', + style: + 'margin-left:8px; background: transparent; color: var(--text); border: 1px solid var(--border);', + }); + cancel.onclick = () => closeModal(); + form.appendChild(grid); + form.appendChild(el('div', { style: 'margin-top:8px' }, [save, cancel])); + } + + function parseByType(type, isArray, v) { + if (v === '') return undefined; + if (type === 'boolean') return v === 'true'; + if (type === 'number') { + const n = Number(v); + return Number.isNaN(n) ? v : n; + } + if (type === 'date') { + const iso = fromDatetimeLocal(v); + return iso ?? v; + } + if (type === 'array' || isArray) { + try { + const parsed = JSON.parse(v); + if (!Array.isArray(parsed)) throw new Error('Array expected'); + return parsed; + } catch { + throw new Error('Invalid JSON for array field'); + } + } + if (type === 'subdocument') { + try { + return JSON.parse(v); + } catch { + throw new Error('Invalid JSON for subdocument field'); + } + } + if (type === 'mixed' || type === 'relation') return v; // keep as-is (relation handled upstream) + if (v === 'true') return true; + if (v === 'false') return false; + const n = Number(v); + if (!Number.isNaN(n) && String(n) === v) return n; + return v; + } + + async function refresh() { + if (!state.current) return; + const q = $('#search').value.trim(); + const sort = getSort(state.current); + const { data, total } = await api( + `/${state.current}?page=${state.page}&limit=${state.limit}${q ? `&q=${encodeURIComponent(q)}` : ''}${sort ? `&sort=${encodeURIComponent(sort)}` : ''}`, + ); + state.data = data; + state.total = total; + // Keep only selected ids that are present on the current page + const idsOnPage = new Set(data.map((r) => String(r._id))); + state.selectedIds = new Set( + Array.from(state.selectedIds).filter((id) => idsOnPage.has(id)), + ); + await batchLoadRelationLabels(); + renderList(); + updateBulkBtnState(); + } + + async function selectResource(name) { + state.current = name; + state.page = 1; + state.selectedIds.clear(); + $('#search').value = ''; + // Sync toolbar sort select with saved preference for this resource + try { + const s = $('#sortSelect'); + if (s) { + const savedSort = + localStorage.getItem(`admin.sort.${name}`) || '-createdAt'; + s.value = savedSort; + } + } catch {} + try { + localStorage.setItem('admin.currentResource', name); + } catch {} + await loadFields(name); + await refresh(); + loadResources(); + } + + $('#refresh').onclick = refresh; + $('#new').onclick = () => showForm(null); + $('#logoutBtn').onclick = async () => { + if (confirm('Are you sure you want to logout?')) { + try { + await fetch('/admin/logout', { + method: 'POST', + headers: { accept: 'application/json' }, + }); + window.location.href = './login'; + } catch (e) { + alert('Logout failed. Please try again.'); + } + } + }; + // Sorting select next to Clear All + (function initToolbarSort() { + const s = $('#sortSelect'); + if (!s) return; + try { + if (state.current) { + const saved = localStorage.getItem(`admin.sort.${state.current}`); + if (saved) s.value = saved; + } + } catch {} + s.onchange = async () => { + if (!state.current) return; + try { + localStorage.setItem(`admin.sort.${state.current}`, s.value); + } catch {} + state.page = 1; + await refresh(); + }; + })(); + $('#bulkDelete').onclick = async () => { + if (!state.current) return; + const ids = Array.from(state.selectedIds); + if (ids.length === 0) return; + if (!confirm(`Delete ${ids.length} selected record(s)?`)) return; + try { + await api(`/${state.current}/bulk-delete`, { + method: 'POST', + body: JSON.stringify({ ids }), + }); + state.selectedIds.clear(); + await refresh(); + } catch (e) { + $('#error').textContent = e.message || 'Bulk delete failed'; + } + }; + $('#clearAll').onclick = async () => { + if (!state.current) return; + if ( + !confirm( + 'This will delete ALL records for this resource. This cannot be undone. Continue?', + ) + ) + return; + try { + await api(`/${state.current}/clear`, { method: 'POST' }); + state.selectedIds.clear(); + await refresh(); + await loadResources(); + } catch (e) { + $('#error').textContent = e.message || 'Clear all failed'; + } + }; + $('#prev').onclick = async () => { + if (state.page > 1) { + state.page--; + await refresh(); + } + }; + $('#next').onclick = async () => { + const max = Math.ceil(state.total / state.limit) || 1; + if (state.page < max) { + state.page++; + await refresh(); + } + }; + $('#search').onkeydown = (e) => { + if (e.key === 'Enter') refresh(); + }; + + const modalClose = $('#modalClose'); + if (modalClose) modalClose.onclick = () => closeModal(); + const modalBackdrop = document.querySelector('#modal .modal-backdrop'); + if (modalBackdrop) modalBackdrop.onclick = () => closeModal(); + window.addEventListener('keydown', (e) => { + if (e.key === 'Escape' && !$('#modal').classList.contains('hidden')) + closeModal(); + }); + + loadResources().then(() => { + const saved = (() => { + try { + return localStorage.getItem('admin.currentResource'); + } catch { + return null; + } + })(); + const names = state.resources.map((r) => r.name); + const pick = + saved && names.includes(saved) + ? saved + : state.resources[0] && state.resources[0].name; + if (pick) selectResource(pick); + }); + + function updateBulkBtnState() { + const btn = $('#bulkDelete'); + if (!btn) return; + btn.disabled = state.selectedIds.size === 0; + } + + function getSort(resource) { + try { + return localStorage.getItem(`admin.sort.${resource}`) || '-createdAt'; + } catch { + return '-createdAt'; + } + } + + // Helpers for relation fields + function debounce(fn, ms) { + let t; + return (...args) => { + clearTimeout(t); + t = setTimeout(() => fn.apply(null, args), ms); + }; + } + + async function batchLoadRelationLabels() { + const relFields = state.fields.filter( + (f) => f.type === 'relation' && f.relation, + ); + const tasks = relFields.map(async (f) => { + const ids = new Set(); + for (const row of state.data) { + const v = row[f.path]; + if (Array.isArray(v)) v.forEach((id) => ids.add(String(id))); + else if (v != null) ids.add(String(v)); + } + const missing = Array.from(ids).filter( + (id) => !state.labelCache[`${f.relation.resource}:${id}`], + ); + if (!missing.length) return; + const resp = await api( + `/${state.current}/lookup/${encodeURIComponent(f.path)}?ids=${missing.join(',')}`, + ); + (resp.options || []).forEach((opt) => { + state.labelCache[`${f.relation.resource}:${opt._id}`] = opt.label; + }); + }); + await Promise.all(tasks); + } + + function createRelationEditor(field, rawVal) { + const isMulti = !!field.isArray; + const container = el('div', { + style: 'display:flex; flex-direction: column; gap:6px;', + }); + const hidden = el('input', { type: 'hidden' }); + // Set dataset on the hidden input so payload builder can read it + hidden.dataset.path = field.path; + hidden.dataset.type = 'relation'; + hidden.dataset.isArray = isMulti ? '1' : '0'; + hidden.dataset.isFile = '0'; + + const search = el('input', { type: 'text', placeholder: 'Search…' }); + const results = el('div', { + style: + 'border:1px solid var(--border); background: var(--bg); border-radius: 6px; display:none;', + }); + const recentBox = el('div', { + style: + 'border:1px solid var(--border); background: var(--bg); border-radius: 6px; display:none; margin-top:4px;', + }); + const chips = el('div', { + style: 'display:flex; gap:6px; flex-wrap:wrap;', + }); + // Will be assigned later (after the row is created). Used to toggle the Clear row visibility + let updateRowVisibility = () => {}; + + function setHidden(val) { + if (isMulti) hidden.value = JSON.stringify(val); + else hidden.value = val || ''; + } + + function renderChips(items) { + chips.innerHTML = ''; + items.forEach((it) => { + const chip = el( + 'span', + { + style: + 'padding:4px 8px; border:1px solid var(--border); border-radius:12px; background: var(--panel);', + }, + [`${it.label} `], + ); + const btn = el('button', { + textContent: '×', + style: + 'margin-left:6px; background: transparent; color: var(--muted); border: 1px solid var(--border); padding:0 6px;', + }); + btn.onclick = () => { + selected = selected.filter((s) => s._id !== it._id); + setHidden(selected.map((s) => s._id)); + renderChips(selected); + }; + chip.appendChild(btn); + chips.appendChild(chip); + }); + } + + function showResults(items) { + results.innerHTML = ''; + items.forEach((opt) => { + const row = el( + 'div', + { + style: + 'padding:8px 10px; cursor:pointer; border-bottom:1px solid var(--border);', + }, + [opt.label], + ); + row.onclick = () => { + if (isMulti) { + if (!selected.find((s) => s._id === opt._id)) selected.push(opt); + setHidden(selected.map((s) => s._id)); + renderChips(selected); + } else { + selected = [opt]; + setHidden(opt._id); + selectedLabel.textContent = opt.label; + } + results.style.display = 'none'; + search.value = ''; + // Toggle Clear button visibility when selection changes + updateRowVisibility(); + }; + results.appendChild(row); + }); + results.style.display = items.length ? 'block' : 'none'; + } + + function showRecent(items) { + recentBox.innerHTML = ''; + if (!items.length) { + recentBox.style.display = 'none'; + return; + } + // Header + recentBox.appendChild( + el( + 'div', + { className: 'muted', style: 'padding:6px 10px; font-size: 12px;' }, + ['Recent'], + ), + ); + items.forEach((opt) => { + const row = el( + 'div', + { + style: + 'padding:8px 10px; cursor:pointer; border-top:1px solid var(--border);', + }, + [opt.label], + ); + row.onclick = () => { + if (isMulti) { + if (!selected.find((s) => s._id === opt._id)) selected.push(opt); + setHidden(selected.map((s) => s._id)); + renderChips(selected); + } else { + selected = [opt]; + setHidden(opt._id); + selectedLabel.textContent = opt.label; + } + recentBox.style.display = 'none'; + search.value = ''; + updateRowVisibility(); + }; + recentBox.appendChild(row); + }); + recentBox.style.display = 'block'; + } + + const selectedLabel = el('div', { + className: 'muted', + style: + 'color: var(--muted); white-space: nowrap; overflow: hidden; text-overflow: ellipsis; max-width: 100%;', + }); + let selected = []; + + // Initialize from raw value + (async () => { + if (rawVal == null) { + setHidden(isMulti ? [] : ''); + // ensure UI reflects empty selection + queueMicrotask(() => updateRowVisibility()); + return; + } + if (isMulti && Array.isArray(rawVal)) { + const ids = rawVal.map(String); + const missing = ids.filter( + (id) => !state.labelCache[`${field.relation.resource}:${id}`], + ); + if (missing.length) { + const resp = await api( + `/${state.current}/lookup/${encodeURIComponent(field.path)}?ids=${missing.join(',')}`, + ); + (resp.options || []).forEach((opt) => { + state.labelCache[`${field.relation.resource}:${opt._id}`] = + opt.label; + }); + } + selected = ids.map((id) => ({ + _id: id, + label: state.labelCache[`${field.relation.resource}:${id}`] || id, + })); + renderChips(selected); + setHidden(ids); + } else if (!isMulti && typeof rawVal === 'string') { + const id = String(rawVal); + if (!state.labelCache[`${field.relation.resource}:${id}`]) { + const resp = await api( + `/${state.current}/lookup/${encodeURIComponent(field.path)}?ids=${id}`, + ); + (resp.options || []).forEach((opt) => { + state.labelCache[`${field.relation.resource}:${opt._id}`] = + opt.label; + }); + } + const label = + state.labelCache[`${field.relation.resource}:${id}`] || id; + selected = [{ _id: id, label }]; + selectedLabel.textContent = label; + setHidden(id); + } + // defer until the row exists in DOM + queueMicrotask(() => updateRowVisibility()); + })(); + + const doSearch = debounce(async () => { + const q = search.value.trim(); + if (!q) { + // Only show recent when the input is focused and empty; otherwise hide + results.style.display = 'none'; + results.innerHTML = ''; + if (document.activeElement === search) { + try { + const resp = await api( + `/${state.current}/lookup/${encodeURIComponent(field.path)}?recent=1&limit=10`, + ); + showRecent(resp.options || []); + } catch { + recentBox.style.display = 'none'; + } + } else { + recentBox.style.display = 'none'; + } + return; + } + try { + const resp = await api( + `/${state.current}/lookup/${encodeURIComponent(field.path)}?q=${encodeURIComponent(q)}`, + ); + showResults(resp.options || []); + // When search results are shown, hide recent suggestions + recentBox.style.display = 'none'; + } catch { + results.style.display = 'none'; + } + }, 250); + search.oninput = doSearch; + + if (isMulti) { + container.appendChild(chips); + } else { + const clearBtn = el('button', { + textContent: 'Clear', + style: + 'width:max-content; background: transparent; color: var(--text); border: 1px solid var(--border);', + }); + clearBtn.onclick = () => { + selected = []; + selectedLabel.textContent = ''; + setHidden(''); + updateRowVisibility(); + }; + const row = el( + 'div', + { style: 'display:none; gap:8px; align-items:center;' }, + [selectedLabel, clearBtn], + ); + // Assign the toggler now that row exists + updateRowVisibility = () => { + row.style.display = selected.length ? 'flex' : 'none'; + }; + container.appendChild(row); + // Ensure correct initial visibility after any async initialization + queueMicrotask(() => updateRowVisibility()); + } + container.appendChild(search); + // Show recent only when input gains focus and is empty + search.onfocus = async () => { + if (!search.value.trim()) { + try { + const resp = await api( + `/${state.current}/lookup/${encodeURIComponent(field.path)}?recent=1&limit=10`, + ); + showRecent(resp.options || []); + } catch { + recentBox.style.display = 'none'; + } + } + }; + // Hide recent shortly after blur to allow click selection + search.onblur = () => { + setTimeout(() => { + if (document.activeElement !== search) { + recentBox.style.display = 'none'; + } + }, 150); + }; + container.appendChild(results); + container.appendChild(recentBox); + container.appendChild(hidden); + return container; + } + + // Subdocument (single) editor + function createSubdocEditor(field, rawVal) { + const container = el('div', { + style: + 'display:flex; flex-direction: column; gap:8px; border:1px solid var(--border); padding:10px; border-radius:6px;', + }); + const hidden = el('input', { type: 'hidden' }); + hidden.dataset.path = field.path; + hidden.dataset.type = 'subdocument'; + hidden.dataset.isArray = '0'; + hidden.dataset.isFile = '0'; + + const childWrap = el('div', { className: 'form-grid' }); + const value = rawVal && typeof rawVal === 'object' ? rawVal : {}; + + const childControls = []; + field.children.forEach((cf) => { + const label = el('label', { textContent: cf.path }); + let input; + if (cf.type === 'relation' && cf.relation) + input = createRelationEditor( + { ...cf, path: `${field.path}.${cf.path}` }, + value[cf.path], + ); + else if (cf.type === 'subdocument' && Array.isArray(cf.children)) { + if (cf.isArray) + input = createSubdocArrayEditor( + { ...cf, path: `${field.path}.${cf.path}` }, + value[cf.path], + ); + else + input = createSubdocEditor( + { ...cf, path: `${field.path}.${cf.path}` }, + value[cf.path], + ); + } else if (cf.enumValues && cf.enumValues.length) { + input = el('select'); + input.appendChild(el('option', { value: '', textContent: '' })); + cf.enumValues.forEach((opt) => + input.appendChild(el('option', { value: opt, textContent: opt })), + ); + input.value = value[cf.path] != null ? String(value[cf.path]) : ''; + } else if (cf.type === 'boolean') { + input = el('select'); + ['false', 'true'].forEach((opt) => + input.appendChild(el('option', { value: opt, textContent: opt })), + ); + input.value = value[cf.path] ? 'true' : 'false'; + } else if (cf.type === 'number') { + input = el('input', { + type: 'number', + step: 'any', + value: value[cf.path] != null ? String(value[cf.path]) : '', + }); + } else if (cf.type === 'date') { + input = el('input', { + type: 'datetime-local', + value: value[cf.path] ? toDatetimeLocal(value[cf.path]) : '', + }); + } else if (cf.type === 'array') { + input = el('textarea', { + rows: 3, + placeholder: '[ ... ]', + value: Array.isArray(value[cf.path]) + ? JSON.stringify(value[cf.path], null, 2) + : '', + }); + } else { + input = el('input', { + type: 'text', + value: value[cf.path] != null ? String(value[cf.path]) : '', + }); + } + const childPath = `${field.path}.${cf.path}`; + if (!(cf.type === 'relation' && cf.relation)) { + input.dataset.path = childPath; + input.dataset.type = cf.type; + input.dataset.isArray = cf.isArray ? '1' : '0'; + input.dataset.isFile = '0'; + } + childControls.push({ def: cf, elem: input, path: childPath }); + childWrap.appendChild(label); + childWrap.appendChild(input); + }); + + function syncHidden() { + const obj = {}; + for (const item of childControls) { + const cf = item.def; + const ctrl = item.elem; + const p = item.path.split('.').slice(-1)[0]; + if (cf.type === 'relation') { + const hid = + ctrl.querySelector && + ctrl.querySelector( + `input[type="hidden"][data-path="${item.path}"]`, + ); + obj[p] = hid ? hid.value : undefined; + } else if ( + ctrl.tagName === 'TEXTAREA' && + (cf.type === 'array' || (cf.isArray && cf.type !== 'subdocument')) + ) { + try { + const parsed = JSON.parse(ctrl.value || '[]'); + obj[p] = parsed; + } catch {} + } else if (ctrl.type === 'number') + obj[p] = ctrl.value === '' ? undefined : Number(ctrl.value); + else if (ctrl.type === 'datetime-local') + obj[p] = ctrl.value ? fromDatetimeLocal(ctrl.value) : undefined; + else if (ctrl.tagName === 'SELECT' && cf.type === 'boolean') + obj[p] = ctrl.value === 'true'; + else if (cf.type === 'subdocument') { + const hid = + ctrl.querySelector && + ctrl.querySelector('input[type="hidden"][data-type="subdocument"]'); + if (hid) { + try { + obj[p] = JSON.parse(hid.value || '{}'); + } catch {} + } + } else obj[p] = ctrl.value === '' ? undefined : ctrl.value; + } + hidden.value = JSON.stringify(obj); + } + + childWrap.addEventListener('input', syncHidden, true); + syncHidden(); + + container.appendChild(childWrap); + container.appendChild(hidden); + return container; + } + + // Subdocument array (repeater) editor + function createSubdocArrayEditor(field, rawVal) { + const container = el('div', { + style: + 'display:flex; flex-direction: column; gap:8px; border:1px dashed var(--border); padding:10px; border-radius:6px;', + }); + const hidden = el('input', { type: 'hidden' }); + hidden.dataset.path = field.path; + hidden.dataset.type = 'subdocument'; + hidden.dataset.isArray = '1'; + hidden.dataset.isFile = '0'; + + const itemsWrap = el('div', { + style: 'display:flex; flex-direction: column; gap:8px;', + }); + const addBtn = el('button', { + textContent: 'Add item', + style: 'width:max-content;', + }); + + let items = Array.isArray(rawVal) ? rawVal : []; + + function render() { + itemsWrap.innerHTML = ''; + items.forEach((it, idx) => { + const row = el('div', { + style: + 'border:1px solid var(--border); border-radius:6px; padding:8px;', + }); + const header = el( + 'div', + { + style: + 'display:flex; justify-content: space-between; align-items:center; margin-bottom:6px;', + }, + [ + el('span', { className: 'muted', textContent: `Item ${idx + 1}` }), + (() => { + const btn = el('button', { + textContent: 'Remove', + style: + 'background: transparent; color: var(--danger); border:1px solid var(--border);', + }); + btn.onclick = () => { + items.splice(idx, 1); + syncHidden(); + render(); + }; + return btn; + })(), + ], + ); + const editor = createSubdocEditor({ ...field, isArray: false }, it); + row.appendChild(header); + row.appendChild(editor); + itemsWrap.appendChild(row); + }); + } + + function syncHidden() { + try { + const arr = []; + const rows = itemsWrap.querySelectorAll( + 'input[type="hidden"][data-type="subdocument"][data-is-array="0"]', + ); + rows.forEach((h) => { + try { + const val = JSON.parse(h.value || '{}'); + arr.push(val); + } catch {} + }); + hidden.value = JSON.stringify(arr); + } catch {} + } + + itemsWrap.addEventListener('input', syncHidden, true); + + addBtn.onclick = () => { + items.push({}); + render(); + syncHidden(); + }; + + render(); + syncHidden(); + container.appendChild(itemsWrap); + container.appendChild(addBtn); + container.appendChild(hidden); + return container; + } +}); diff --git a/public/assets/scripts/bullboard.js b/public/assets/scripts/bullboard.js new file mode 100644 index 0000000..9da5340 --- /dev/null +++ b/public/assets/scripts/bullboard.js @@ -0,0 +1,108 @@ +// BullBoard Logout Button Injector +(function () { + 'use strict'; + + // Wait for DOM to be ready + function init() { + injectLogoutButton(); + } + + function injectLogoutButton() { + // Create logout button + const logoutBtn = document.createElement('button'); + logoutBtn.id = 'bullboard-logout-btn'; + logoutBtn.textContent = 'Logout'; + logoutBtn.setAttribute('aria-label', 'Logout from queue dashboard'); + + // Apply styles matching admin dashboard logout button + Object.assign(logoutBtn.style, { + position: 'fixed', + bottom: '20px', + left: '20px', + zIndex: '9999', + padding: '8px 16px', + fontSize: '13px', + fontWeight: '600', + fontFamily: 'inherit', + color: '#f85149', + background: 'rgba(248, 81, 73, 0.1)', + border: '1px solid rgba(248, 81, 73, 0.3)', + borderRadius: '6px', + cursor: 'pointer', + transition: 'all 0.15s cubic-bezier(0.4, 0, 0.2, 1)', + boxShadow: '0 1px 3px rgba(0, 0, 0, 0.12)', + }); + + // Hover effect + logoutBtn.addEventListener('mouseenter', () => { + logoutBtn.style.background = 'rgba(248, 81, 73, 0.2)'; + logoutBtn.style.borderColor = '#f85149'; + logoutBtn.style.transform = 'translateY(-1px)'; + logoutBtn.style.boxShadow = '0 2px 8px rgba(0, 0, 0, 0.2)'; + }); + + logoutBtn.addEventListener('mouseleave', () => { + if (!logoutBtn.disabled) { + logoutBtn.style.background = 'rgba(248, 81, 73, 0.1)'; + logoutBtn.style.borderColor = 'rgba(248, 81, 73, 0.3)'; + logoutBtn.style.transform = 'translateY(0)'; + logoutBtn.style.boxShadow = '0 1px 3px rgba(0, 0, 0, 0.12)'; + } + }); + + // Click handler + logoutBtn.addEventListener('click', handleLogout); + + // Append to body + document.body.appendChild(logoutBtn); + } + + async function handleLogout() { + const btn = document.getElementById('bullboard-logout-btn'); + if (!btn || btn.disabled) return; + + // Set loading state + btn.disabled = true; + const originalText = btn.textContent; + btn.textContent = 'Logging out...'; + btn.style.opacity = '0.6'; + btn.style.cursor = 'not-allowed'; + + try { + // Call logout endpoint + const response = await fetch('/queues/logout', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Accept: 'application/json', + }, + credentials: 'same-origin', + }); + + if (response.ok) { + // Redirect to login page + window.location.href = '/queues/login'; + } else { + throw new Error('Logout failed'); + } + } catch (error) { + console.error('Logout error:', error); + // Reset button state on error + btn.disabled = false; + btn.textContent = originalText; + btn.style.opacity = '1'; + btn.style.cursor = 'pointer'; + + // Show error message + alert('Failed to logout. Please try again.'); + } + } + + // Initialize when DOM is ready + if (document.readyState === 'loading') { + document.addEventListener('DOMContentLoaded', init); + } else { + init(); + } +})(); + diff --git a/public/assets/scripts/login.js b/public/assets/scripts/login.js new file mode 100644 index 0000000..8cb80b4 --- /dev/null +++ b/public/assets/scripts/login.js @@ -0,0 +1,68 @@ +document.addEventListener('DOMContentLoaded', () => { + const form = document.getElementById('loginForm'); + const submitBtn = document.getElementById('submitBtn'); + const errorDiv = document.getElementById('error'); + + function showError(message) { + errorDiv.textContent = message; + errorDiv.classList.add('show'); + } + + function hideError() { + errorDiv.classList.remove('show'); + } + + function setLoading(loading) { + submitBtn.disabled = loading; + if (loading) { + submitBtn.innerHTML = 'Signing in...'; + } else { + submitBtn.innerHTML = 'Sign In'; + } + } + + form.addEventListener('submit', async (e) => { + e.preventDefault(); + hideError(); + setLoading(true); + + const formData = new FormData(form); + const username = formData.get('username'); + const password = formData.get('password'); + + try { + const response = await fetch('/admin/login', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Accept: 'application/json', + }, + body: JSON.stringify({ username, password }), + }); + + await response.json(); + + if (response.ok) { + const urlParams = new URLSearchParams(window.location.search); + const next = urlParams.get('next') || './'; + window.location.href = next; + } else { + // Handle errors + if (response.status === 429) { + showError('Too many login attempts. Please try again later.'); + } else if (response.status === 401) { + showError('Invalid username or password.'); + } else { + showError('An error occurred. Please try again.'); + } + setLoading(false); + } + } catch (error) { + showError('Network error. Please check your connection.'); + setLoading(false); + } + }); + + // Auto-focus username field + document.getElementById('username').focus(); +}); diff --git a/public/script.js b/public/assets/scripts/main.js similarity index 100% rename from public/script.js rename to public/assets/scripts/main.js diff --git a/public/assets/scripts/queue-login.js b/public/assets/scripts/queue-login.js new file mode 100644 index 0000000..45bc6e9 --- /dev/null +++ b/public/assets/scripts/queue-login.js @@ -0,0 +1,68 @@ +document.addEventListener('DOMContentLoaded', () => { + const form = document.getElementById('loginForm'); + const submitBtn = document.getElementById('submitBtn'); + const errorDiv = document.getElementById('error'); + + function showError(message) { + errorDiv.textContent = message; + errorDiv.classList.add('show'); + } + + function hideError() { + errorDiv.classList.remove('show'); + } + + function setLoading(loading) { + submitBtn.disabled = loading; + if (loading) { + submitBtn.innerHTML = 'Signing in...'; + } else { + submitBtn.innerHTML = 'Sign In'; + } + } + + form.addEventListener('submit', async (e) => { + e.preventDefault(); + hideError(); + setLoading(true); + + const formData = new FormData(form); + const username = formData.get('username'); + const password = formData.get('password'); + + try { + const response = await fetch('/queues/login', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Accept: 'application/json', + }, + body: JSON.stringify({ username, password }), + }); + + await response.json(); + + if (response.ok) { + const urlParams = new URLSearchParams(window.location.search); + const next = urlParams.get('next') || './'; + window.location.href = next; + } else { + if (response.status === 429) { + showError('Too many login attempts. Please try again later.'); + } else if (response.status === 401) { + showError('Invalid username or password.'); + } else { + showError('An error occurred. Please try again.'); + } + setLoading(false); + } + } catch (error) { + showError('Network error. Please check your connection.'); + setLoading(false); + } + }); + + document.getElementById('username').focus(); +}); + + diff --git a/public/assets/scripts/realtime.js b/public/assets/scripts/realtime.js new file mode 100644 index 0000000..be22379 --- /dev/null +++ b/public/assets/scripts/realtime.js @@ -0,0 +1,607 @@ +document.addEventListener('DOMContentLoaded', () => { + const $ = (id) => document.getElementById(id); + const statusEl = $('status'); + const sidEl = $('sid'); + const transportEl = $('transport'); + const logsEl = $('logs'); + + let socket = null; + + const STORAGE_KEY = 'rt.session.v1'; + const LOGS_CAP = 1000; + + const defaultState = () => ({ + settings: { + ns: '/', + path: '/socket.io', + token: '', + transports: { websocket: true, polling: true }, + }, + payload: { type: 'json', jsonText: '', strText: '' }, + emitTarget: { target: 'socket', room: '' }, + listeners: [], // { id, pattern, enabled } + rooms: [], // ['room1'] + logs: [], // { dir, event, payload, ts } + filters: { showOnlyEnabled: false, selectedListenerId: null }, + autoReconnect: true, + connected: false, + }); + + function loadState() { + try { + const raw = localStorage.getItem(STORAGE_KEY); + if (!raw) return defaultState(); + const obj = JSON.parse(raw); + return { ...defaultState(), ...obj }; + } catch (_) { + return defaultState(); + } + } + + let state = loadState(); + function saveState() { + try { + localStorage.setItem(STORAGE_KEY, JSON.stringify(state)); + } catch (_) { + // ignore + } + } + + function resetSession() { + localStorage.removeItem(STORAGE_KEY); + location.reload(); + } + + function setStatus(connected) { + if (connected) { + statusEl.textContent = 'Connected'; + statusEl.className = 'badge success'; + } else { + statusEl.textContent = 'Disconnected'; + statusEl.className = 'badge danger'; + } + $('disconnect').disabled = !connected; + $('connect').disabled = !!connected; + state.connected = !!connected; + saveState(); + } + + function log(direction, event, payload, options) { + const persist = options?.persist !== false; + const ts = options?.ts ?? Date.now(); + const time = new Date(ts).toLocaleTimeString(); + const item = document.createElement('div'); + item.className = `log ${direction}`; + const pretty = + typeof payload === 'string' ? payload : JSON.stringify(payload, null, 2); + item.innerHTML = `${time} ${direction} ${event}
${pretty}
`; + item.dataset.event = event; + logsEl.prepend(item); + + // persist log (cap by LOGS_CAP) + if (persist) { + try { + state.logs.push({ dir: direction, event, payload, ts }); + if (state.logs.length > LOGS_CAP) + state.logs.splice(0, state.logs.length - LOGS_CAP); + saveState(); + } catch (_) {} + } + + // highlight if matched by any enabled listener + const matches = getMatchingListeners(event); + if (matches.length) { + item.classList.add('matched'); + } + // apply filters to reflect current visibility rules + applyLogFilters(); + } + + function currentOptions() { + const transports = []; + if ($('t-websocket').checked) transports.push('websocket'); + if ($('t-polling').checked) transports.push('polling'); + const path = $('path').value || '/socket.io'; + const token = $('token').value.trim(); + const opts = { path, transports }; + if (token) opts.auth = { token }; + return opts; + } + + function connect() { + const ns = $('ns').value || '/'; + const opts = currentOptions(); + const url = undefined; // same-origin + socket = window.io(ns, opts); + + socket.on('connect', () => { + setStatus(true); + sidEl.textContent = socket.id; + transportEl.textContent = socket.io.engine.transport.name; + log('in', 'connect', { id: socket.id }); + + // auto-join rooms from state + try { + (state.rooms || []).forEach((r) => { + if (r) socket.emit('room:join', { room: r }); + }); + } catch (_) {} + }); + + socket.on('disconnect', (reason) => { + setStatus(false); + sidEl.textContent = '—'; + transportEl.textContent = '—'; + log('in', 'disconnect', { reason }); + }); + + socket.io.engine.on('upgrade', (transport) => { + transportEl.textContent = transport.name; + log('in', 'transport-upgrade', { transport: transport.name }); + }); + + socket.on('connect_error', (err) => { + log('in', 'connect_error', { message: err.message }); + }); + + socket.on('reconnect_attempt', (n) => + log('in', 'reconnect_attempt', { attempt: n }), + ); + socket.on('reconnect_failed', () => log('in', 'reconnect_failed')); + + socket.on('pong', (data) => log('in', 'pong', data)); + + // Rooms acks + socket.on('room:joined', (data) => { + log('in', 'room:joined', data); + if (data?.room && !state.rooms.includes(data.room)) { + state.rooms.push(data.room); + saveState(); + renderRooms(); + } + }); + socket.on('room:left', (data) => { + log('in', 'room:left', data); + if (data?.room) { + state.rooms = state.rooms.filter((r) => r !== data.room); + saveState(); + renderRooms(); + } + }); + + socket.onAny((event, ...args) => { + if (event === 'pong') return; // already logged + log('in', event, args.length > 1 ? args : args[0]); + // Post-log highlight already handled in log(); nothing else to do + }); + } + + function disconnect() { + if (socket) { + socket.disconnect(); + socket = null; + } + } + + $('connect').addEventListener('click', () => { + if (socket) socket.disconnect(); + connect(); + }); + $('disconnect').addEventListener('click', () => disconnect()); + $('clear').addEventListener('click', () => { + logsEl.innerHTML = ''; + state.logs = []; + saveState(); + }); + $('reset-session').addEventListener('click', () => resetSession()); + + $('ping').addEventListener('click', () => { + if (!socket) return log('out', 'ping', 'not connected'); + const payload = { ts: Date.now() }; + log('out', 'ping', payload); + socket.emit('ping', payload); + }); + + $('emit').addEventListener('click', () => { + if (!socket) return log('out', 'emit', 'not connected'); + const event = $('event').value.trim(); + if (!event) return log('out', 'emit', 'missing event name'); + const type = + document.querySelector('input[name="payload-type"]:checked')?.value || + 'json'; + let payloadToSend = null; + if (type === 'json') { + const text = $('payload').value.trim(); + if (text) { + try { + payloadToSend = JSON.parse(text); + } catch (e) { + return log('out', event, { error: 'invalid JSON' }); + } + } + } else { + payloadToSend = $('payload-str').value; + } + // Determine emit target + const target = + document.querySelector('input[name="emit-target"]:checked')?.value || + 'socket'; + if (target === 'room') { + const room = $('target-room-name').value.trim(); + if (!room) return log('out', 'emit', { error: 'missing room name' }); + log('out', event, { to: `room:${room}`, payload: payloadToSend }); + socket.emit('room:broadcast', { room, event, payload: payloadToSend }); + } else { + log('out', event, payloadToSend); + socket.emit(event, payloadToSend); + } + }); + + // initial state + setStatus(false); + + // payload type toggle + const toggleVisibility = () => { + const type = + document.querySelector('input[name="payload-type"]:checked')?.value || + 'json'; + const isJson = type === 'json'; + const jsonRow = $('row-payload-json'); + const stringRow = $('row-payload-string'); + if (jsonRow) jsonRow.classList.toggle('hidden', !isJson); + if (stringRow) stringRow.classList.toggle('hidden', isJson); + }; + ['pt-json', 'pt-string'].forEach((id) => { + const el = $(id); + if (el) el.addEventListener('change', toggleVisibility); + }); + toggleVisibility(); + + // emit target toggle + const toggleTargetVisibility = () => { + const target = + document.querySelector('input[name="emit-target"]:checked')?.value || + 'socket'; + const row = $('row-target-room'); + if (row) row.classList.toggle('hidden', target !== 'room'); + }; + ['target-socket', 'target-room'].forEach((id) => { + const el = $(id); + if (el) el.addEventListener('change', toggleTargetVisibility); + }); + toggleTargetVisibility(); + + // listeners management + const listenersEl = $('listeners'); + function globToRegExp(glob) { + const escaped = glob + .replace(/[.+^${}()|\\]/g, '\\$&') + .replace(/\*/g, '.*') + .replace(/\?/g, '.'); + return new RegExp('^' + escaped + '$'); + } + function getMatchingListeners(event) { + return (state.listeners || []).filter( + (l) => l.enabled && globToRegExp(l.pattern).test(event), + ); + } + function applyLogFilters() { + const showOnlyEnabled = !!state.filters?.showOnlyEnabled; + const selectedId = state.filters?.selectedListenerId || null; + const selected = selectedId + ? (state.listeners || []).find((l) => l.id === selectedId) + : null; + + const nodes = logsEl?.querySelectorAll('.log') || []; + for (const node of nodes) { + if (!(node instanceof HTMLElement) || !node.classList.contains('log')) + continue; + const eventName = node.dataset.event || ''; + let visible = true; + if (showOnlyEnabled) { + visible = getMatchingListeners(eventName).length > 0; + } + if (visible && selected) { + visible = + selected.enabled && globToRegExp(selected.pattern).test(eventName); + } + node.classList.toggle('hidden', !visible); + } + } + function renderListeners() { + if (!listenersEl) return; + listenersEl.innerHTML = ''; + (state.listeners || []).forEach((l) => { + const row = document.createElement('div'); + row.className = 'listener-item'; + row.dataset.id = l.id; + const enabledAttr = l.enabled ? 'checked' : ''; + row.innerHTML = ` +
${l.pattern}
+
+ +
+
+ +
+ `; + listenersEl.appendChild(row); + }); + // refresh filter dropdown options when listeners change + renderFilterOptions(); + // re-apply log filters in case enabled flags or patterns changed + applyLogFilters(); + } + function addListener(pattern, enabled = true) { + if (!pattern) return; + const exists = (state.listeners || []).some((l) => l.pattern === pattern); + if (exists) return; // avoid duplicates + const id = String(Date.now()) + Math.random().toString(36).slice(2, 7); + state.listeners.push({ id, pattern, enabled }); + saveState(); + renderListeners(); + } + function removeListener(id) { + state.listeners = (state.listeners || []).filter((l) => l.id !== id); + saveState(); + renderListeners(); + } + $('listener-add')?.addEventListener('click', () => { + const pattern = $('listener-pattern').value.trim(); + if (!pattern) return; + addListener(pattern, true); + $('listener-pattern').value = ''; + }); + $('listeners-clear')?.addEventListener('click', () => { + state.listeners = []; + saveState(); + renderListeners(); + renderFilterOptions(); + applyLogFilters(); + }); + listenersEl?.addEventListener('change', (e) => { + const target = e.target; + if (!(target instanceof HTMLInputElement)) return; + if (!target.classList.contains('listener-toggle')) return; + const row = target.closest('.listener-item'); + const id = row?.dataset.id; + if (!id) return; + const l = state.listeners.find((x) => x.id === id); + if (!l) return; + l.enabled = !!target.checked; + saveState(); + // update filters and visibility + renderFilterOptions(); + applyLogFilters(); + }); + listenersEl?.addEventListener('click', (e) => { + const btn = e.target; + if (!(btn instanceof HTMLElement)) return; + if (!btn.classList.contains('listener-remove')) return; + const row = btn.closest('.listener-item'); + const id = row?.dataset.id; + if (!id) return; + removeListener(id); + }); + + function renderFilterOptions() { + const sel = document.getElementById('filter-listener'); + if (!sel) return; + const selectedId = state.filters?.selectedListenerId || ''; + // if previously selected listener id no longer exists, reset + const stillExists = (state.listeners || []).some( + (l) => l.id === selectedId, + ); + const effectiveSelectedId = stillExists ? selectedId : ''; + if (!stillExists && selectedId) { + state.filters.selectedListenerId = null; + saveState(); + } + sel.innerHTML = ''; + const optAll = document.createElement('option'); + optAll.value = ''; + optAll.textContent = 'All listeners'; + sel.appendChild(optAll); + (state.listeners || []).forEach((l) => { + const opt = document.createElement('option'); + opt.value = l.id; + opt.textContent = l.pattern + (l.enabled ? '' : ' (disabled)'); + if (l.id === effectiveSelectedId) opt.selected = true; + sel.appendChild(opt); + }); + } + + // logs click-to-add listener + logsEl.addEventListener('click', (e) => { + const target = e.target; + if (!(target instanceof HTMLElement)) return; + if (!target.classList.contains('event')) return; + const row = target.closest('.log'); + if (!row || !row.classList.contains('in')) return; // only from inbound events + const eventName = target.textContent?.trim(); + if (!eventName) return; + addListener(eventName, true); + }); + + // rooms UI + const roomsEl = $('rooms'); + function renderRooms() { + if (!roomsEl) return; + roomsEl.innerHTML = ''; + (state.rooms || []).forEach((room) => { + const badge = document.createElement('span'); + badge.className = 'room-badge'; + badge.innerHTML = `${room} ×`; + badge.querySelector('.remove')?.addEventListener('click', () => { + // leave via server if connected + if (socket) socket.emit('room:leave', { room }); + state.rooms = state.rooms.filter((r) => r !== room); + saveState(); + renderRooms(); + }); + roomsEl.appendChild(badge); + }); + } + $('room-join')?.addEventListener('click', () => { + const room = $('room-input').value.trim(); + if (!room) return; + if (!state.rooms.includes(room)) state.rooms.push(room); + saveState(); + renderRooms(); + if (socket) socket.emit('room:join', { room }); + $('room-input').value = ''; + }); + $('room-leave')?.addEventListener('click', () => { + const room = $('room-input').value.trim(); + if (!room) return; + state.rooms = state.rooms.filter((r) => r !== room); + saveState(); + renderRooms(); + if (socket) socket.emit('room:leave', { room }); + $('room-input').value = ''; + }); + + // persist and restore settings/payload/emit target + function restoreUI() { + // settings + $('ns').value = state.settings.ns || '/'; + $('path').value = state.settings.path || '/socket.io'; + $('token').value = state.settings.token || ''; + $('t-websocket').checked = !!state.settings.transports.websocket; + $('t-polling').checked = !!state.settings.transports.polling; + // payload + if (state.payload.type === 'string') $('pt-string').checked = true; + else $('pt-json').checked = true; + $('payload').value = state.payload.jsonText || ''; + $('payload-str').value = state.payload.strText || ''; + toggleVisibility(); + // emit target + if (state.emitTarget.target === 'room') $('target-room').checked = true; + else $('target-socket').checked = true; + $('target-room-name').value = state.emitTarget.room || ''; + toggleTargetVisibility(); + // listeners & rooms + renderListeners(); + renderRooms(); + // filters UI state + renderFilterOptions(); + const chk = document.getElementById('filter-enabled'); + if (chk && chk instanceof HTMLInputElement) { + chk.checked = !!state.filters?.showOnlyEnabled; + chk.addEventListener('change', (e) => { + const target = e.target; + if (!(target instanceof HTMLInputElement)) return; + state.filters.showOnlyEnabled = !!target.checked; + saveState(); + applyLogFilters(); + }); + } + const sel = document.getElementById('filter-listener'); + if (sel && sel instanceof HTMLSelectElement) { + sel.value = state.filters?.selectedListenerId || ''; + sel.addEventListener('change', (e) => { + const target = e.target; + if (!(target instanceof HTMLSelectElement)) return; + state.filters.selectedListenerId = target.value || null; + saveState(); + applyLogFilters(); + }); + } + // logs + if (state.logs?.length) { + // render from oldest to newest so prepend makes correct order + const items = state.logs; + for ( + let i = Math.max(0, items.length - LOGS_CAP); + i < items.length; + i++ + ) { + const entry = items[i]; + log(entry.dir, entry.event, entry.payload, { + persist: false, + ts: entry.ts, + }); + } + } + // apply filters after logs restored + applyLogFilters(); + } + restoreUI(); + + // drawer toggle + const drawer = document.getElementById('drawer'); + document.getElementById('drawer-toggle')?.addEventListener('click', () => { + drawer?.classList.add('open'); + }); + document.getElementById('drawer-close')?.addEventListener('click', () => { + drawer?.classList.remove('open'); + }); + + // emit panel collapse + const emitBody = document.getElementById('emit-body'); + const emitToggle = document.getElementById('emit-collapse'); + let emitCollapsed = false; + emitToggle?.addEventListener('click', () => { + emitCollapsed = !emitCollapsed; + if (emitCollapsed) { + emitBody?.classList.add('hidden'); + emitToggle.textContent = 'Show'; + } else { + emitBody?.classList.remove('hidden'); + emitToggle.textContent = 'Hide'; + } + }); + + // persist on changes + ['ns', 'path', 'token'].forEach((id) => { + $(id)?.addEventListener('input', () => { + state.settings.ns = $('ns').value; + state.settings.path = $('path').value; + state.settings.token = $('token').value; + saveState(); + }); + }); + ['t-websocket', 't-polling'].forEach((id) => { + $(id)?.addEventListener('change', () => { + state.settings.transports.websocket = $('t-websocket').checked; + state.settings.transports.polling = $('t-polling').checked; + saveState(); + }); + }); + ['pt-json', 'pt-string'].forEach((id) => { + $(id)?.addEventListener('change', () => { + const type = + document.querySelector('input[name="payload-type"]:checked')?.value || + 'json'; + state.payload.type = type; + saveState(); + }); + }); + $('payload')?.addEventListener('input', () => { + state.payload.jsonText = $('payload').value; + saveState(); + }); + $('payload-str')?.addEventListener('input', () => { + state.payload.strText = $('payload-str').value; + saveState(); + }); + ['target-socket', 'target-room'].forEach((id) => { + $(id)?.addEventListener('change', () => { + const target = + document.querySelector('input[name="emit-target"]:checked')?.value || + 'socket'; + state.emitTarget.target = target; + saveState(); + }); + }); + $('target-room-name')?.addEventListener('input', () => { + state.emitTarget.room = $('target-room-name').value; + saveState(); + }); + + // auto-reconnect to previous session + if (state.autoReconnect && state.connected) { + // populate UI inputs already done; just connect + connect(); + } +}); diff --git a/public/assets/styles/bullboard-theme.css b/public/assets/styles/bullboard-theme.css new file mode 100644 index 0000000..8f6741f --- /dev/null +++ b/public/assets/styles/bullboard-theme.css @@ -0,0 +1,562 @@ +/* BullBoard Dark Theme - Matches shared-styles.css design system */ + +:root { + /* Import color palette from shared-styles.css */ + --bg: #0d1117 !important; + --panel: #161b22 !important; + --border: #30363d !important; + --text: #c9d1d9 !important; + --muted: #8b949e !important; + --primary: #1f6feb !important; + --primary-hover: #2f81f7 !important; + --success: #2ea043 !important; + --success-hover: #3fb950 !important; + --danger: #f85149 !important; + --danger-hover: #ff6b6b !important; + --warning: #d29922 !important; + --radius: 6px !important; + --transition: 0.15s cubic-bezier(0.4, 0, 0.2, 1) !important; +} + +/* Force dark mode globally */ +*, +*::before, +*::after { + border-color: var(--border) !important; +} + +/* Global overrides */ +html, +body { + background: var(--bg) !important; + color: var(--text) !important; + font-family: + 'Inter', + ui-sans-serif, + system-ui, + -apple-system, + Segoe UI, + Roboto, + 'Helvetica Neue', + Arial !important; +} + +/* Root and wrapper elements */ +#root, +[id*='root'], +[class*='App'], +[class*='app'] { + background: var(--bg) !important; + color: var(--text) !important; +} + +/* Main container */ +main, +[role='main'], +[class*='Main'], +[class*='main'], +[class*='Container'], +[class*='container'] { + background: var(--bg) !important; + color: var(--text) !important; +} + +/* Sections and divs - aggressive override */ +section, +div { + background-color: transparent !important; +} + +div[style*='background'] { + background: var(--bg) !important; +} + +/* Headers */ +header, +[role='banner'], +[class*='Header'], +[class*='header'] { + background: var(--panel) !important; + border-bottom: 1px solid var(--border) !important; + color: var(--text) !important; +} + +h1, +h2, +h3, +h4, +h5, +h6 { + color: var(--text) !important; +} + +/* Panels, Cards, and Wrappers */ +[class*='Card'], +[class*='card'], +[class*='Panel'], +[class*='panel'], +[class*='Box'], +[class*='box'], +[class*='Wrapper'], +[class*='wrapper'], +[class*='Content'], +[class*='content'] { + background: var(--panel) !important; + background-color: var(--panel) !important; + border-color: var(--border) !important; + color: var(--text) !important; +} + +/* Job items and list items - very specific for BullBoard */ +[class*='Job'], +[class*='job'], +article, +[role='article'], +li[class*='css-'] { + background: var(--panel) !important; + background-color: var(--panel) !important; + border-color: var(--border) !important; + color: var(--text) !important; +} + +/* Tables */ +table { + background: var(--panel) !important; + background-color: var(--panel) !important; + border-color: var(--border) !important; +} + +thead { + background: var(--panel) !important; + background-color: var(--panel) !important; + border-color: var(--border) !important; +} + +th { + background: var(--panel) !important; + background-color: var(--panel) !important; + color: var(--muted) !important; + border-color: var(--border) !important; + font-weight: 600 !important; + text-transform: uppercase !important; + font-size: 11px !important; + letter-spacing: 0.06em !important; +} + +td { + background: var(--bg) !important; + background-color: var(--bg) !important; + color: var(--text) !important; + border-color: rgba(48, 54, 61, 0.5) !important; +} + +tbody tr { + background: var(--bg) !important; + background-color: var(--bg) !important; +} + +tr:hover, +tr:hover td { + background: rgba(22, 27, 34, 0.6) !important; + background-color: rgba(22, 27, 34, 0.6) !important; +} + +/* Buttons - all variants */ +button, +[role='button'], +[type='button'], +[type='submit'] { + background: var(--primary) !important; + background-color: var(--primary) !important; + color: #f0f6fc !important; + border: 1px solid transparent !important; + border-radius: var(--radius) !important; + transition: all var(--transition) !important; + font-weight: 600 !important; +} + +svg { + fill: var(--text) !important; +} + +button[role='switch'] > span { + background-color: var(--text) !important; +} + +button:hover:not(:disabled), +[role='button']:hover:not(:disabled) { + background: var(--primary-hover) !important; + background-color: var(--primary-hover) !important; + transform: translateY(-1px); +} + +/* Secondary/outline buttons */ +button[data-variant='outline'], +button[data-variant='ghost'], +button[class*='secondary'], +button[class*='Secondary'] { + background: transparent !important; + background-color: transparent !important; + color: var(--text) !important; + border: 1px solid var(--border) !important; +} + +button[data-variant='outline']:hover, +button[data-variant='ghost']:hover { + background: var(--panel) !important; + background-color: var(--panel) !important; + border-color: var(--primary) !important; +} + +/* Tabs */ +[role='tab'], +[class*='Tab'], +[class*='tab'], +button[role='tab'] { + color: var(--muted) !important; + background: transparent !important; + background-color: transparent !important; + border-color: transparent !important; +} + +[role='tab'][aria-selected='true'], +[role='tab'][data-selected='true'], +[class*='Tab'][data-selected], +button[role='tab'][aria-selected='true'] { + color: var(--text) !important; + background: var(--panel) !important; + background-color: var(--panel) !important; + border-bottom-color: var(--primary) !important; + font-weight: 600 !important; +} + +[role='tablist'], +[class*='TabList'] { + background: var(--bg) !important; + background-color: var(--bg) !important; + border-bottom: 1px solid var(--border) !important; +} + +/* Inputs */ +input, +select, +textarea { + background: var(--bg) !important; + background-color: var(--bg) !important; + color: var(--text) !important; + border: 1px solid var(--border) !important; + border-radius: var(--radius) !important; +} + +input::placeholder, +textarea::placeholder { + color: #6e7681 !important; +} + +input:focus, +select:focus, +textarea:focus { + border-color: var(--primary) !important; + box-shadow: 0 0 0 3px rgba(31, 111, 235, 0.12) !important; + outline: none !important; +} + +/* Badges and Tags */ +[class*='Badge'], +[class*='badge'], +[class*='Tag'], +[class*='tag'], +span[class*='css-'][class*='badge'] { + border-radius: 999px !important; + font-weight: 600 !important; + font-size: 12px !important; + padding: 4px 10px !important; +} + +/* Status badges - completed/success */ +[class*='badge'][data-status='completed'], +[class*='badge'][data-status='success'], +[data-status='completed'], +[colorscheme='green'], +span[class*='success'], +span[class*='Success'] { + background: var(--success) !important; + background-color: var(--success) !important; + color: #fff !important; +} + +/* Status badges - failed/error */ +[class*='badge'][data-status='failed'], +[class*='badge'][data-status='error'], +[data-status='failed'], +[colorscheme='red'], +span[class*='error'], +span[class*='Error'], +span[class*='failed'], +span[class*='Failed'] { + background: var(--danger) !important; + background-color: var(--danger) !important; + color: #fff !important; +} + +/* Status badges - waiting/pending */ +[class*='badge'][data-status='waiting'], +[class*='badge'][data-status='pending'], +[data-status='waiting'], +[data-status='pending'], +[colorscheme='yellow'], +span[class*='waiting'], +span[class*='Waiting'], +span[class*='pending'], +span[class*='Pending'] { + background: var(--warning) !important; + background-color: var(--warning) !important; + color: #fff !important; +} + +/* Status badges - active/processing */ +[class*='badge'][data-status='active'], +[data-status='active'], +[colorscheme='blue'], +span[class*='active'], +span[class*='Active'] { + background: var(--primary) !important; + background-color: var(--primary) !important; + color: #fff !important; +} + +/* Links */ +a { + color: var(--primary) !important; + transition: color var(--transition) !important; +} + +a:hover { + color: var(--primary-hover) !important; +} + +/* Modals and Dialogs */ +[role='dialog'], +[class*='Modal'], +[class*='modal'], +[class*='Dialog'], +[class*='dialog'] { + background: var(--panel) !important; + background-color: var(--panel) !important; + border: 1px solid var(--border) !important; + border-radius: 8px !important; + box-shadow: 0 4px 16px rgba(0, 0, 0, 0.3) !important; + color: var(--text) !important; +} + +/* Modal overlay */ +[class*='Overlay'], +[class*='overlay'], +[role='dialog']::backdrop { + background: rgba(0, 0, 0, 0.65) !important; + background-color: rgba(0, 0, 0, 0.65) !important; +} + +/* Code blocks */ +code, +pre { + background: var(--bg) !important; + background-color: var(--bg) !important; + color: var(--text) !important; + border: 1px solid var(--border) !important; + border-radius: var(--radius) !important; + font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, + 'Liberation Mono', 'Courier New', monospace !important; +} + +/* Dividers */ +hr, +[class*='Divider'], +[class*='divider'] { + border-color: var(--border) !important; + background: var(--border) !important; +} + +/* Tooltips */ +[role='tooltip'], +[class*='Tooltip'], +[class*='tooltip'] { + background: var(--panel) !important; + background-color: var(--panel) !important; + color: var(--text) !important; + border: 1px solid var(--border) !important; + box-shadow: 0 2px 8px rgba(0, 0, 0, 0.2) !important; +} + +/* Menus */ +[role='menu'], +[class*='Menu'], +[class*='menu'] { + background: var(--panel) !important; + background-color: var(--panel) !important; + border: 1px solid var(--border) !important; + box-shadow: 0 2px 8px rgba(0, 0, 0, 0.2) !important; + color: var(--text) !important; +} + +main > div > section > ul > li > div > div > a { + color: var(--text) !important; +} + +aside nav a { + color: var(--text) !important; +} + +a[role='progressbar'] { + color: var(--bg) !important; + font-weight: 600 !important; +} + +section a span:nth-child(2) { + color: var(--text) !important; + background-color: var(--primary) !important; +} + +[role='menuitem'] { + color: var(--text) !important; +} + +[role='menuitem']:hover, +[class*='MenuItem']:hover { + background: rgba(31, 111, 235, 0.1) !important; + background-color: rgba(31, 111, 235, 0.1) !important; +} + +/* Scrollbar */ +::-webkit-scrollbar { + width: 10px; + height: 10px; +} + +::-webkit-scrollbar-thumb { + background: #30363d !important; + border-radius: 10px; + border: 2px solid var(--bg); +} + +::-webkit-scrollbar-track { + background: transparent !important; +} + +/* Specific BullBoard Layout elements */ +[class*='Layout'], +nav, +aside, +[class*='Sidebar'], +[class*='sidebar'] { + background: var(--bg) !important; + background-color: var(--bg) !important; + border-color: var(--border) !important; +} + +/* Stats and metrics */ +[class*='Stats'], +[class*='stats'], +[class*='Metric'], +[class*='metric'] { + background: var(--panel) !important; + background-color: var(--panel) !important; + border: 1px solid var(--border) !important; + border-radius: 8px !important; + color: var(--text) !important; +} + +/* Progress bars */ +[class*='Progress'], +progress { + background: var(--border) !important; + background-color: var(--border) !important; +} + +[class*='Progress'] > div, +progress::-webkit-progress-value { + background: var(--primary) !important; + background-color: var(--primary) !important; +} + +/* Text colors - force override */ +p, +span, +label, +small { + color: var(--text) !important; +} + +[class*='text-gray'], +[class*='text-muted'], +[class*='muted'] { + color: var(--muted) !important; +} + +/* Empty states */ +[class*='Empty'], +[class*='empty'] { + color: var(--muted) !important; + background: var(--panel) !important; + background-color: var(--panel) !important; +} + +/* Lists */ +ul, +ol { + background: transparent !important; +} + +li { + background: var(--panel) !important; + background-color: var(--panel) !important; + color: var(--text) !important; +} + +/* Specific Chakra UI overrides */ +.chakra-ui-light, +.chakra-ui-dark, +[data-theme='light'], +[data-theme='dark'] { + background: var(--bg) !important; + background-color: var(--bg) !important; + color: var(--text) !important; +} + +/* Override any inline white backgrounds */ +[style*='background: white'], +[style*='background: #fff'], +[style*='background-color: white'], +[style*='background-color: #fff'], +[style*='background: rgb(255, 255, 255)'], +[style*='background-color: rgb(255, 255, 255)'] { + background: var(--panel) !important; + background-color: var(--panel) !important; +} + +/* Override any inline light colors */ +[style*='color: #000'], +[style*='color: black'], +[style*='color: rgb(0, 0, 0)'] { + color: var(--text) !important; +} + +/* Form labels */ +label { + color: var(--muted) !important; + font-weight: 500 !important; +} + +/* Disabled states */ +:disabled, +[disabled], +[aria-disabled='true'] { + opacity: 0.5 !important; + cursor: not-allowed !important; +} + +/* Focus states */ +:focus-visible { + outline: 2px solid var(--primary) !important; + outline-offset: 2px !important; +} diff --git a/public/styles.css b/public/assets/styles/main.css similarity index 100% rename from public/styles.css rename to public/assets/styles/main.css diff --git a/public/assets/styles/realtime.css b/public/assets/styles/realtime.css new file mode 100644 index 0000000..65b6bcb --- /dev/null +++ b/public/assets/styles/realtime.css @@ -0,0 +1,441 @@ +/* Realtime Tester Specific Styles */ + +/* Sidebar customizations */ +#sidebar { + width: 280px; +} + +/* Topbar */ +.topbar { + position: sticky; + top: 0; + z-index: 5; + display: flex; + gap: 12px; + align-items: center; + padding: 10px 16px; + margin: -8px -8px 8px; + background: var(--panel); + border: 1px solid var(--border); + border-radius: var(--radius-lg); +} + +.topbar .topbar-section { + display: flex; + align-items: center; + gap: 8px; +} + +.topbar .topbar-spacer { + flex: 1; +} + +/* Right Drawer */ +.drawer { + position: fixed; + top: 0; + right: 0; + height: 100vh; + width: 340px; + background: var(--bg); + border-left: 1px solid var(--border); + box-shadow: var(--shadow-lg); + transform: translateX(100%); + transition: transform var(--transition); + display: flex; + flex-direction: column; + z-index: 99999; +} + +.drawer.open { + transform: translateX(0); +} + +.drawer-header { + display: flex; + align-items: center; + justify-content: space-between; + padding: 12px 16px; + border-bottom: 1px solid var(--border); + background: var(--panel); +} + +.drawer-section { + padding: 12px 16px; + border-bottom: 1px solid var(--border); +} + +.sidebar-section { + padding: 0 16px 20px; + border-bottom: 1px solid var(--border); +} + +.sidebar-section:last-child { + border-bottom: none; +} + +h3 { + margin: 0; + padding: 20px 0 12px; + font-size: 11px; + font-weight: 600; + text-transform: uppercase; + letter-spacing: 0.08em; + color: var(--muted); +} + +/* Status info in sidebar */ +.status-info { + display: flex; + flex-direction: column; + gap: 12px; +} + +.status-row { + display: flex; + justify-content: space-between; + align-items: center; + gap: 8px; +} + +.status-row .label { + font-size: 12px; + color: var(--muted); + min-width: 80px; +} + +.status-row code { + flex: 1; + text-align: right; + font-size: 12px; +} + +/* Sidebar form */ +.sidebar-form { + display: flex; + flex-direction: column; + gap: 12px; +} + +.sidebar-form-row { + display: flex; + flex-direction: column; + gap: 6px; +} + +.sidebar-form-row label:not(.checkbox-label) { + font-size: 11px; + color: var(--muted); + font-weight: 600; + text-transform: uppercase; + letter-spacing: 0.05em; +} + +.sidebar-form-row input[type='text'] { + width: 100%; + font-size: 12px; + padding: 8px 10px; +} + +.sidebar-form-row .checkbox-group { + display: flex; + gap: 8px; + flex-wrap: wrap; +} + +.sidebar-form-row .checkbox-label { + font-size: 12px; + padding: 6px 10px; +} + +/* Action buttons in sidebar */ +.action-buttons { + display: flex; + gap: 8px; +} + +.action-buttons button { + width: 100%; +} + +/* Content area - use flexbox for better height distribution */ +#content { + display: flex; + flex-direction: column; + gap: 12px; + overflow: auto; +} + +/* Panel customizations */ +.panel.padded { + flex-shrink: 0; +} + +.logs-panel { + flex: 1; + display: flex; + flex-direction: column; + min-height: 0; +} + +/* Panel header with actions */ +.panel-header { + flex-shrink: 0; + display: flex; + justify-content: space-between; + align-items: center; + padding: 12px 16px; + border-bottom: 1px solid var(--border); +} + +.panel-actions { + display: flex; + gap: 8px; +} + +.panel-header h2 { + margin: 0; +} + +.panel-body { + padding: 12px 16px 16px; +} + +button.small { + padding: 6px 12px; + font-size: 12px; +} + +/* Panel headings */ +h2 { + margin: 0 0 12px; + font-size: 16px; + font-weight: 600; + letter-spacing: -0.01em; + color: var(--text); +} + +/* Form layouts */ +.form-row { + margin-bottom: 12px; +} + +.form-row:last-child { + margin-bottom: 0; +} + +.form-row label:not(.checkbox-label):not(.radio-label) { + display: block; + margin-bottom: 8px; +} + +.form-row input[type='text'], +.form-row textarea { + width: 100%; +} + +.form-actions { + margin-top: 12px; + display: flex; + gap: 8px; +} + +/* Checkbox and radio groups */ +.checkbox-group, +.radio-group { + display: flex; + gap: 12px; + flex-wrap: wrap; +} + +.checkbox-label, +.radio-label { + display: inline-flex; + align-items: center; + gap: 8px; + padding: 8px 12px; + background: var(--bg); + border: 1px solid var(--border); + border-radius: var(--radius); + cursor: pointer; + user-select: none; + transition: all var(--transition); + margin: 0; + font-size: 13px; + color: var(--text); +} + +.checkbox-label:hover, +.radio-label:hover { + border-color: var(--primary); + background: var(--panel); +} + +.checkbox-label input, +.radio-label input { + margin: 0; + cursor: pointer; + accent-color: var(--primary); +} + +/* Logs container */ +.logs { + flex: 1; + background: var(--bg); + padding: 12px; + overflow-y: auto; + font-size: 13px; + min-height: 0; +} + +/* Listeners */ +.listeners { + display: flex; + flex-direction: column; + gap: 8px; + margin-top: 12px; +} + +.listener-item { + display: grid; + grid-template-columns: 1fr auto auto; + gap: 8px; + align-items: center; + padding: 8px 10px; + border: 1px solid var(--border); + border-radius: var(--radius); + background: var(--panel); +} + +.listener-item .pattern { + font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, + 'Liberation Mono', 'Courier New', monospace; + font-size: 12px; +} + +.listener-item .controls { + display: flex; + gap: 6px; +} + +/* Rooms */ +.rooms-list { + display: flex; + flex-wrap: wrap; + gap: 6px; +} + +.room-badge { + display: inline-flex; + align-items: center; + gap: 6px; + font-size: 12px; + padding: 4px 8px; + border: 1px solid var(--border); + border-radius: var(--radius-sm); + background: var(--panel); +} + +.room-badge .remove { + cursor: pointer; + color: var(--muted); +} + +/* Matched logs highlight */ +.log.matched { + background: rgba(38, 166, 91, 0.12); +} + +.logs:empty::after { + content: 'No logs yet. Connect and emit events to see logs here.'; + display: block; + text-align: center; + padding: 40px 20px; + color: var(--muted); + font-size: 14px; +} + +/* Log entries */ +.log { + display: grid; + grid-template-columns: 90px 60px 1fr; + gap: 8px; + padding: 8px 8px; + border-bottom: 1px solid rgba(48, 54, 61, 0.5); + align-items: start; + transition: background var(--transition); +} + +/* Ensure filtered logs are hidden even with .log display styles */ +.log.hidden { + display: none; +} + +.log:hover { + background: rgba(22, 27, 34, 0.4); +} + +.log:last-child { + border-bottom: none; +} + +.log .time { + color: var(--muted); + font-variant-numeric: tabular-nums; + font-size: 12px; + font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, + 'Liberation Mono', 'Courier New', monospace; +} + +.log .dir { + font-size: 10px; + font-weight: 700; + text-transform: uppercase; + letter-spacing: 0.05em; + padding: 3px 6px; + border-radius: var(--radius-sm); + text-align: center; +} + +.log.in .dir { + color: var(--warning); + background: rgba(210, 153, 34, 0.15); + border: 1px solid rgba(210, 153, 34, 0.3); +} + +.log.out .dir { + color: var(--primary); + background: rgba(31, 111, 235, 0.15); + border: 1px solid rgba(31, 111, 235, 0.3); +} + +.log .event { + color: var(--text); + font-weight: 600; + font-size: 13px; +} + +.log pre { + grid-column: 1 / -1; + margin: 8px 0 0; + padding: 10px; + background: rgba(13, 17, 23, 0.8); + border: 1px solid rgba(48, 54, 61, 0.8); + border-radius: var(--radius); + font-size: 12px; + line-height: 1.5; + color: #a5d8ff; + overflow-x: auto; +} + +/* Responsive adjustments */ +@media (max-width: 960px) { + .log { + grid-template-columns: 1fr; + gap: 6px; + } + + .log .dir { + width: fit-content; + } +} diff --git a/public/assets/styles/shared-styles.css b/public/assets/styles/shared-styles.css new file mode 100644 index 0000000..4779cba --- /dev/null +++ b/public/assets/styles/shared-styles.css @@ -0,0 +1,343 @@ +/* Shared Design System for Developer UIs */ + +:root { + /* Color Palette - GitHub-inspired */ + --bg: #0d1117; + --panel: #161b22; + --border: #30363d; + --text: #c9d1d9; + --muted: #8b949e; + --primary: #1f6feb; + --primary-hover: #2f81f7; + --success: #2ea043; + --success-hover: #3fb950; + --danger: #f85149; + --danger-hover: #ff6b6b; + --warning: #d29922; + + /* Spacing Scale */ + --space-xs: 4px; + --space-sm: 8px; + --space-md: 12px; + --space-lg: 16px; + --space-xl: 20px; + --space-2xl: 24px; + + /* Border Radius */ + --radius: 6px; + --radius-sm: 3px; + --radius-lg: 8px; + --radius-full: 999px; + + /* Shadows */ + --shadow-sm: 0 1px 3px rgba(0, 0, 0, 0.12); + --shadow-md: 0 2px 8px rgba(0, 0, 0, 0.2); + --shadow-lg: 0 4px 16px rgba(0, 0, 0, 0.3); + + /* Transitions */ + --transition: 0.15s cubic-bezier(0.4, 0, 0.2, 1); +} + +/* Reset & Base */ +* { + box-sizing: border-box; +} + +html, +body { + height: 100%; +} + +body { + margin: 0; + display: flex; + height: 100vh; + font-family: + 'Inter', + ui-sans-serif, + system-ui, + -apple-system, + Segoe UI, + Roboto, + 'Helvetica Neue', + Arial, + 'Apple Color Emoji', + 'Segoe UI Emoji'; + font-size: 14px; + line-height: 1.5; + color: var(--text); + background: var(--bg); + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +/* Sidebar */ +#sidebar { + width: 280px; + padding: 0; + border-right: 1px solid var(--border); + background: var(--bg); + overflow: auto; + display: flex; + flex-direction: column; +} + +#sidebar .sidebar-header { + display: block; + padding: 24px 20px 20px; + font-size: 20px; + font-weight: 700; + letter-spacing: -0.02em; + color: var(--text); + border-bottom: 1px solid var(--border); + background: var(--bg); +} + +/* Content Area */ +#content { + flex: 1; + padding: 24px 28px; + overflow: auto; + display: flex; + flex-direction: column; + gap: 20px; +} + +/* Buttons */ +button { + position: relative; + padding: 4px 10px; + font-size: 13px; + font-weight: 600; + font-family: inherit; + color: #f0f6fc; + background: var(--primary); + border: 1px solid transparent; + letter-spacing: 0.01em; + border-radius: var(--radius); + cursor: pointer; + box-shadow: var(--shadow-sm); + transition: all var(--transition); +} + +button:hover { + background: var(--primary-hover); + box-shadow: var(--shadow-md); + transform: translateY(-1px); +} + +button:active { + transform: translateY(0); + box-shadow: var(--shadow-sm); +} + +button:disabled { + opacity: 0.5; + cursor: not-allowed; + transform: none; +} + +button.secondary { + background: transparent; + color: var(--text); + border: 1px solid var(--border); +} + +button.secondary:hover { + background: var(--panel); + border-color: var(--primary); + box-shadow: var(--shadow-sm); +} + +button.danger { + background: var(--danger); +} + +button.danger:hover { + background: var(--danger-hover); +} + +button.success { + background: var(--success); +} + +button.success:hover { + background: var(--success-hover); +} + +/* Inputs */ +input, +select, +textarea { + padding: 10px 14px; + font-size: 13px; + font-family: inherit; + color: var(--text); + background: var(--bg); + border: 1px solid var(--border); + border-radius: var(--radius); + outline: none; + transition: all var(--transition); +} + +input::placeholder, +textarea::placeholder { + color: #6e7681; +} + +input:focus, +select:focus, +textarea:focus { + border-color: var(--primary); + box-shadow: 0 0 0 3px rgba(31, 111, 235, 0.12); +} + +textarea { + resize: vertical; + min-height: 80px; + font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, + 'Liberation Mono', 'Courier New', monospace; +} + +/* Labels */ +label { + display: block; + margin-bottom: 6px; + font-size: 12px; + font-weight: 500; + letter-spacing: 0.01em; + color: var(--muted); +} + +/* Cards/Panels */ +.card, +.panel { + background: var(--panel); + border: 1px solid var(--border); + border-radius: var(--radius-lg); + padding: 0; + box-shadow: var(--shadow-md); + overflow: hidden; +} + +.card.padded, +.panel.padded { + padding: 16px 20px; +} + +/* Badges */ +.badge { + display: inline-block; + padding: 6px 12px; + font-size: 12px; + font-weight: 600; + border-radius: var(--radius-full); + border: 1px solid transparent; +} + +.badge.success { + background: var(--success); + color: #fff; + box-shadow: 0 2px 8px rgba(46, 160, 67, 0.3); +} + +.badge.danger { + background: var(--danger); + color: #fff; + box-shadow: 0 2px 8px rgba(248, 81, 73, 0.3); +} + +.badge.warning { + background: var(--warning); + color: #fff; + box-shadow: 0 2px 8px rgba(210, 153, 34, 0.3); +} + +.badge.muted { + background: var(--panel); + color: var(--muted); + border-color: var(--border); +} + +/* Code/Monospace */ +code, +pre, +.mono { + font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, + 'Liberation Mono', 'Courier New', monospace; + font-size: 13px; +} + +code { + padding: 2px 6px; + background: var(--panel); + border: 1px solid var(--border); + border-radius: var(--radius-sm); +} + +pre { + padding: 12px; + background: var(--bg); + border: 1px solid var(--border); + border-radius: var(--radius); + overflow-x: auto; + white-space: pre-wrap; + word-break: break-word; +} + +/* Utilities */ +.muted { + color: var(--muted); + font-size: 12px; +} + +.hidden { + display: none; +} + +/* Animations */ +@keyframes fadeIn { + from { + opacity: 0; + } + to { + opacity: 1; + } +} + +@keyframes scaleIn { + from { + opacity: 0; + transform: scale(0.95); + } + to { + opacity: 1; + transform: scale(1); + } +} + +/* Responsive */ +@media (max-width: 960px) { + #sidebar { + display: none; + } + #content { + padding: 16px; + } +} + +/* Scrollbar Styling */ +::-webkit-scrollbar { + width: 10px; + height: 10px; +} + +::-webkit-scrollbar-thumb { + background: #30363d; + border-radius: 10px; + border: 2px solid var(--bg); +} + +::-webkit-scrollbar-track { + background: transparent; +} diff --git a/public/favicon.ico b/public/favicon.ico new file mode 100644 index 0000000..085a18a Binary files /dev/null and b/public/favicon.ico differ diff --git a/public/index.html b/public/index.html index e87a251..1e7eb7a 100644 --- a/public/index.html +++ b/public/index.html @@ -1,57 +1,73 @@ - + - - - + + + TypeScript Backend Toolkit - - - - - + + + + +
- +
-
-
-

TypeScript Backend Toolkit

-

- A robust backend boilerplate designed for scalability, flexibility, and ease of development. - Packed with modern technologies and best practices to kickstart your next backend project. -

- +
+
+

TypeScript Backend Toolkit

+

+ A robust backend boilerplate designed for scalability, flexibility, + and ease of development. Packed with modern technologies and best + practices to kickstart your next backend project. +

+ +
+
+
+
+ + +
-
-
-
- - - -
-
# Start MongoDB and Redis
+            
# Start MongoDB and Redis
 docker compose up -d
 
 # Install dependencies
@@ -61,125 +77,163 @@ 

TypeScript Backend Toolkit

pnpm run dev 🚀 Server running at http://localhost:3000 -📚 API Docs: http://localhost:3000/api-docs -📊 Queue Dashboard: http://localhost:3000/admin/queues
-
-
-
+📚 API Docs: http://localhost:3000/docs +📊 Queue Dashboard: http://localhost:3000/queues +
+ +
-
-

What's Included

-
-
- -

OpenAPI Docs

-

Auto-generated Swagger documentation through MagicRouter API and Zod for perfect type safety

-
-
- -

Auth Module

-

Complete authentication system with Google Sign-In support and JWT handling

-
-
- -

User Management

-

Comprehensive user management with role-based access control and profile handling

-
-
- -

File Upload

-

Seamless file uploads with Multer and Amazon S3 integration for scalable storage

-
-
- -

Data Validation

-

Type-safe data validation and serialization powered by Zod

-
-
- -

Config Management

-

Environment configuration with dotenv-cli and Zod validation for type safety

-
-
-
+
+

What's Included

+
+
+ +

OpenAPI Docs

+

+ Auto-generated Swagger documentation through MagicRouter API and + Zod for perfect type safety +

+
+
+ +

Auth Module

+

+ Complete authentication system with Google Sign-In support and JWT + handling +

+
+
+ +

User Management

+

+ Comprehensive user management with role-based access control and + profile handling +

+
+
+ +

File Upload

+

+ Seamless file uploads with Multer and Amazon S3 integration for + scalable storage +

+
+
+ +

Data Validation

+

Type-safe data validation and serialization powered by Zod

+
+
+ +

Config Management

+

+ Environment configuration with dotenv-cli and Zod validation for + type safety +

+
+
+
-
-

Before You Start

-

- To ensure a smooth development experience, make sure you have these essential tools installed. - They form the foundation of your development environment and are crucial for running the project efficiently. +

+

Before You Start

+

+ To ensure a smooth development experience, make sure you have these + essential tools installed. They form the foundation of your + development environment and are crucial for running the project + efficiently. +

+
+
+ +

Docker + Docker Compose

+

+ Required for running MongoDB and Redis services in isolated + containers

-
-
- -

Docker + Docker Compose

-

Required for running MongoDB and Redis services in isolated containers

-
-
- -

PNPM

-

Fast, disk space efficient package manager for managing dependencies

-
-
- -

Node.js 20+ (LTS)

-

Latest LTS version for optimal performance and security

-
-
-
+ +
+ +

PNPM

+

+ Fast, disk space efficient package manager for managing + dependencies +

+
+
+ +

Node.js 20+ (LTS)

+

Latest LTS version for optimal performance and security

+
+ +
-
-

What's Next

-

- Our development roadmap outlines exciting features and improvements planned for future releases. - These additions will enhance scalability, developer experience, and deployment options. +

+

What's Next

+

+ Our development roadmap outlines exciting features and improvements + planned for future releases. These additions will enhance scalability, + developer experience, and deployment options. +

+
+
+ +

Real-time Support

+

+ Socket.io integration with Redis adapter for scalable real-time + communication

-
-
- -

Real-time Support

-

Socket.io integration with Redis adapter for scalable real-time communication

-
-
- -

Advanced Notifications

-

FCM & Novu integration for powerful, multi-channel notifications

-
-
- -

Server Automation

-

Ansible playbooks for automated server provisioning and configuration

-
-
- -

Cloud Infrastructure

-

AWS CDK support for infrastructure as code and easy cloud deployment

-
-
- -

Monorepo Structure

-

Turborepo integration for efficient monorepo management

-
-
- -

Serverless Ready

-

Support for AWS Lambda and Cloudflare Workers deployment

-
-
-
+ +
+ +

Advanced Notifications

+

+ FCM & Novu integration for powerful, multi-channel notifications +

+
+
+ +

Server Automation

+

+ Ansible playbooks for automated server provisioning and + configuration +

+
+
+ +

Cloud Infrastructure

+

+ AWS CDK support for infrastructure as code and easy cloud + deployment +

+
+
+ +

Monorepo Structure

+

Turborepo integration for efficient monorepo management

+
+
+ +

Serverless Ready

+

Support for AWS Lambda and Cloudflare Workers deployment

+
+ +
-
- - - \ No newline at end of file + + + diff --git a/public/queues/login.html b/public/queues/login.html new file mode 100644 index 0000000..5a10d0a --- /dev/null +++ b/public/queues/login.html @@ -0,0 +1,60 @@ + + + + + + Queues Login + + + + + + + + + + + diff --git a/public/realtime/index.html b/public/realtime/index.html new file mode 100644 index 0000000..1577294 --- /dev/null +++ b/public/realtime/index.html @@ -0,0 +1,242 @@ + + + + + + Realtime Tester + + + + + + + + +
+
+
+ Status +
Disconnected
+
+
+ Socket ID + +
+
+ Transport + +
+
+ +
+ +
+
+

Custom Emit

+
+ +
+
+
+
+ + +
+
+
+ +
+ + +
+
+
+ +
+ + +
+
+
+ +
+ + +
+ +
+ +
+
+
+ + +
+
+

Event Logs

+
+ + + + +
+
+
+
+
+ + +
+
+

Listeners & Rooms

+ +
+
+

Listeners

+
+
+
+

Rooms

+
+
+
+ + + + + diff --git a/public/robots.txt b/public/robots.txt new file mode 100644 index 0000000..1f53798 --- /dev/null +++ b/public/robots.txt @@ -0,0 +1,2 @@ +User-agent: * +Disallow: / diff --git a/scripts/gen-openapi.ts b/scripts/gen-openapi.ts new file mode 100644 index 0000000..8d03144 --- /dev/null +++ b/scripts/gen-openapi.ts @@ -0,0 +1,30 @@ +#!/usr/bin/env tsx + +import fs from 'fs/promises'; +import path from 'path'; +import '@/plugins/magic/zod-extend'; +import '@/routes/routes'; +import { convertDocumentationToYaml } from '@/plugins/magic/swagger-doc-generator'; + +async function generateOpenApiSpec() { + try { + console.log('Generating OpenAPI specification...'); + + const yamlContent = convertDocumentationToYaml( + "Robust backend boilerplate designed for scalability, flexibility, and ease of development. It's packed with modern technologies and best practices to kickstart your next backend project", + [{ url: '/api' }], + ); + + const outputPath = path.join(process.cwd(), 'public', 'openapi.yml'); + await fs.writeFile(outputPath, yamlContent, 'utf-8'); + + console.log(`✓ OpenAPI spec generated successfully at: ${outputPath}`); + + process.exit(0); + } catch (error) { + console.error('Failed to generate OpenAPI spec:', error); + process.exit(1); + } +} + +generateOpenApiSpec(); diff --git a/scripts/seed.ts b/scripts/seed.ts new file mode 100644 index 0000000..4e724af --- /dev/null +++ b/scripts/seed.ts @@ -0,0 +1,41 @@ +#!/usr/bin/env tsx + +import { Command } from 'commander'; +import { seeders } from '../src/seeders/registry'; +import { runSeeders } from '../src/seeders/runner'; + +const program = new Command(); + +program + .name('seed') + .description('Run database seeders') + .option('-g, --group ', 'Group to run (base|dev|test|demo)', 'dev') + .option('--only ', 'Comma separated seeder names') + .option('--fresh', 'Drop involved collections before seeding') + .option('--force', 'Force run in production') + .option('--dry-run', 'Do not write, only log actions') + .option('--seed ', 'Random seed for data generation', (v) => Number(v), 1) + .option('--no-transaction', 'Disable transactions') + .action(async (opts) => { + const only = opts.only ? String(opts.only).split(',').map((s: string) => s.trim()) : undefined; + + try { + await runSeeders(seeders, { + group: opts.group, + only, + fresh: Boolean(opts.fresh), + force: Boolean(opts.force), + dryRun: Boolean(opts.dryRun), + seed: Number(opts.seed) || 1, + transaction: opts.transaction ?? true, + }); + process.exit(0); + } catch (e) { + + console.error((e as Error).message); + process.exit(1); + } + }); + +program.parse(); + diff --git a/src/app/app.ts b/src/app/app.ts new file mode 100644 index 0000000..6145a84 --- /dev/null +++ b/src/app/app.ts @@ -0,0 +1,73 @@ +import express from 'express'; +import compression from 'compression'; +import path from 'path'; +import { createApp } from './createApp'; +import config from '../config/env'; +import { securityPlugin } from '@/plugins/security'; +import { observabilityPlugin } from '@/plugins/observability'; +import { magicRouterPlugin } from '@/plugins/magic'; +import { authPlugin } from '@/plugins/auth'; +import { realtimePlugin } from '@/plugins/realtime'; +import { lifecyclePlugin } from '@/plugins/lifecycle'; +import { adminDashboardPlugin } from '@/plugins/admin'; +import { bullboardPlugin } from '@/plugins/bullboard'; +import { basicParserPlugin } from '@/plugins/basicParser'; + +export async function initializeApp(port: number) { + const { app, server, plugins } = await createApp({ + plugins: [ + basicParserPlugin({ + enabled: true, + }), + authPlugin({ + session: { + enabled: config.SET_SESSION, + driver: 'mongo', + debug: false, + }, + }), + securityPlugin({ + corsEnabled: config.CORS_ENABLED, + corsOrigins: [config.CLIENT_SIDE_URL], + corsCredentials: true, + helmetEnabled: config.NODE_ENV === 'production', + rateLimitEnabled: config.RATE_LIMIT_ENABLED, + rateLimitWindowMs: config.RATE_LIMIT_WINDOW_MS, + rateLimitMax: config.RATE_LIMIT_MAX_REQUESTS, + trustProxy: config.TRUST_PROXY, + }), + observabilityPlugin({ + requestId: true, + logging: true, + metrics: config.METRICS_ENABLED, + }), + realtimePlugin(), + magicRouterPlugin({ + path: '/docs', + description: + "Robust backend boilerplate designed for scalability, flexibility, and ease of development. It's packed with modern technologies and best practices to kickstart your next backend project", + servers: [{ url: '/api' }], + }), + lifecyclePlugin({ + gracefulShutdownTimeout: 30000, + }), + adminDashboardPlugin({ adminPath: '/admin', authGuard: true }), + bullboardPlugin({ + path: '/queues', + }), + ], + config: config, + port, + }); + + app.use( + '/assets', + express.static(path.join(process.cwd(), 'public', 'assets')), + ); + + app.use(compression({ threshold: 1024 * 10 })); + + return { app, server, plugins }; +} + +export default initializeApp; diff --git a/src/app/createApp.ts b/src/app/createApp.ts new file mode 100644 index 0000000..cc8d3aa --- /dev/null +++ b/src/app/createApp.ts @@ -0,0 +1,63 @@ +import express, { type Application } from 'express'; +import { createServer, type Server } from 'http'; +import type { AppContext, ToolkitPlugin } from '../plugins/types'; +import { createChildLogger } from '@/plugins/observability/logger'; +import config from '@/config/env'; + +const logger = createChildLogger({ + context: 'CreateApp - Plugins Registration', +}); + +export interface CreateAppOptions { + plugins?: ToolkitPlugin[]; + config?: Record; + port: number; +} + +export async function createApp( + options: CreateAppOptions = { port: config.PORT }, +): Promise<{ + app: Application; + server: Server; + plugins: ToolkitPlugin[]; +}> { + const { plugins = [], config = {}, port } = options; + + const app = express(); + const server = createServer(app); + + const context: AppContext = { + app, + server, + config, + port, + }; + + const sortedPlugins = [...plugins].sort( + (a, b) => (b.priority || 0) - (a.priority || 0), + ); + + for (const plugin of sortedPlugins) { + try { + const urls = await plugin.register(context); + + if (urls) { + for (const url of urls) { + logger.info(`${plugin.name}: ${url}`); + } + } else { + logger.info(`${plugin.name}`); + } + } catch (error) { + logger.error( + { err: error, plugin: plugin.name }, + `Failed to register plugin: ${plugin.name}`, + ); + throw error; + } + } + + return { app, server, plugins: sortedPlugins }; +} + +export default createApp; diff --git a/src/config/config.service.ts b/src/config/config.service.ts deleted file mode 100644 index 8c52967..0000000 --- a/src/config/config.service.ts +++ /dev/null @@ -1,51 +0,0 @@ -import dotenv from "dotenv"; -import { z } from "zod"; - -dotenv.config(); - -// Remove .optional() from requried schema properties - -const configSchema = z.object({ - REDIS_URL: z.string().url(), - PORT: z.string().regex(/^\d+$/).transform(Number), - MONGO_DATABASE_URL: z.string().url(), - SMTP_HOST: z.string().min(1).optional(), - SMTP_PORT: z.string().regex(/^\d+$/).transform(Number).optional(), - SMTP_USERNAME: z.string().email().optional(), - EMAIL_FROM: z.string().email().optional(), - SMTP_FROM: z.string().min(1).optional(), - SMTP_PASSWORD: z.string().min(1).optional(), - CLIENT_SIDE_URL: z.string().url(), - JWT_SECRET: z.string().min(1), - JWT_EXPIRES_IN: z.string().default("86400").transform(Number), - SESSION_EXPIRES_IN: z.string().default("86400").transform(Number), - PASSWORD_RESET_TOKEN_EXPIRES_IN: z.string().default("86400").transform(Number), - SET_PASSWORD_TOKEN_EXPIRES_IN: z.string().default("86400").transform(Number), - STATIC_OTP: z.enum(["1", "0"]).transform(Number).optional(), - NODE_ENV: z - .union([z.literal("production"), z.literal("development")]) - .default("development") - .optional(), - SET_SESSION: z - .string() - .transform((value) => !!Number(value)) - .optional(), - GOOGLE_CLIENT_ID: z.string().optional(), - GOOGLE_CLIENT_SECRET: z.string().optional(), - GOOGLE_REDIRECT_URI: z.string().optional(), - APP_NAME: z.string().default("API V1"), - APP_VERSION: z.string().default("1.0.0"), - // Mailgun configuration - MAILGUN_API_KEY: z.string().min(1), - MAILGUN_DOMAIN: z.string().min(1), - MAILGUN_FROM_EMAIL: z.string().email(), - ADMIN_EMAIL: z.string().email(), - ADMIN_PASSWORD: z.string().min(1), - OTP_VERIFICATION_ENABLED: z.string().transform((value) => !!Number(value)), -}); - -export type Config = z.infer; - -const config = configSchema.parse(process.env); - -export default config; diff --git a/src/config/env.ts b/src/config/env.ts new file mode 100644 index 0000000..c0e3210 --- /dev/null +++ b/src/config/env.ts @@ -0,0 +1,140 @@ +import dotenv from 'dotenv'; +import { z } from 'zod'; + +dotenv.config(); + +const booleanString = z + .string() + .transform((value) => value === 'true' || value === '1') + .pipe(z.boolean()); + +const configSchema = z.object({ + NODE_ENV: z + .enum(['production', 'development', 'test']) + .default('development'), + + PORT: z.string().regex(/^\d+$/).transform(Number).default('3000'), + + REDIS_URL: z.string().url(), + MONGO_DATABASE_URL: z.string().url(), + + CLIENT_SIDE_URL: z.string().url(), + + JWT_SECRET: z.string().min(1), + JWT_EXPIRES_IN: z.string().default('86400').transform(Number), + SESSION_EXPIRES_IN: z.string().default('86400').transform(Number), + PASSWORD_RESET_TOKEN_EXPIRES_IN: z + .string() + .default('86400') + .transform(Number), + SET_PASSWORD_TOKEN_EXPIRES_IN: z.string().default('86400').transform(Number), + SET_SESSION: booleanString.default('true'), + + SESSION_DRIVER: z.enum(['mongo', 'redis']).default('mongo'), + SESSION_MAX_PER_USER: z.string().transform(Number).default('5'), + SESSION_IDLE_TTL: z.string().transform(Number).optional(), + SESSION_ABSOLUTE_TTL: z.string().transform(Number).optional(), + SESSION_ROTATION: booleanString.default('false'), + SESSION_COOKIE_NAME: z.string().default('session_id'), + SESSION_DEBUG: booleanString.default('false'), + SESSION_CLEANUP_ENABLED: booleanString.default('true'), + SESSION_CLEANUP_CRON: z.string().default('0 * * * *'), + + SMTP_HOST: z.string().min(1).optional(), + SMTP_PORT: z.string().regex(/^\d+$/).transform(Number).optional(), + SMTP_USERNAME: z.string().email().optional(), + SMTP_PASSWORD: z.string().min(1).optional(), + SMTP_FROM: z.string().min(1).optional(), + EMAIL_FROM: z.string().email().optional(), + + // Resend Configuration + RESEND_API_KEY: z.string().min(1).optional(), + RESEND_FROM_EMAIL: z.string().email().optional(), + + // Mailgun Configuration + MAILGUN_API_KEY: z.string().min(1).optional(), + MAILGUN_DOMAIN: z.string().min(1).optional(), + MAILGUN_FROM_EMAIL: z.string().email().optional(), + + ADMIN_EMAIL: z.string().email(), + ADMIN_PASSWORD: z.string().min(1), + + // Admin panel authentication (separate from app auth) + ADMIN_AUTH_ENABLED: booleanString.default('true'), + ADMIN_USERNAME: z.string().min(1).default('admin'), + ADMIN_PANEL_PASSWORD: z.string().min(1).default("admin"), + ADMIN_SESSION_SECRET: z.string().min(32).default("z2fvHbkFRXlK3n7G10nmMm2wwjPTQhZ7jp2uNwoRhJc="), + ADMIN_SESSION_TTL: z.string().transform(Number).default('86400'), + ADMIN_COOKIE_NAME: z.string().default('admin_session'), + + // Queue (BullBoard) authentication (separate from admin) + QUEUE_AUTH_ENABLED: booleanString.default('true'), + QUEUE_USERNAME: z.string().min(1).default('admin'), + QUEUE_PANEL_PASSWORD: z.string().min(1).default('admin'), + QUEUE_SESSION_SECRET: z + .string() + .min(32) + .default('H0vd1IYc8b1U1cX7QKk1q9cN8zZp3aB4tYw2rS9mV6xP0eL3jD'), + QUEUE_SESSION_TTL: z.string().transform(Number).default('86400'), + QUEUE_COOKIE_NAME: z.string().default('queue_session'), + + OTP_VERIFICATION_ENABLED: booleanString, + STATIC_OTP: z.enum(['1', '0']).transform(Number).optional(), + + GOOGLE_CLIENT_ID: z.string().optional(), + GOOGLE_CLIENT_SECRET: z.string().optional(), + GOOGLE_REDIRECT_URI: z.string().optional(), + + // Storage Configuration + STORAGE_PROVIDER: z.enum(['s3', 'r2', 'local']).default('s3'), + + // AWS S3 Configuration + AWS_REGION: z.string().default('us-east-1'), + AWS_S3_BUCKET: z.string().min(1).default("default"), + AWS_ACCESS_KEY_ID: z.string().optional(), + AWS_SECRET_ACCESS_KEY: z.string().optional(), + + // Cloudflare R2 Configuration + R2_ACCOUNT_ID: z.string().optional(), + R2_ACCESS_KEY_ID: z.string().optional(), + R2_SECRET_ACCESS_KEY: z.string().optional(), + R2_BUCKET: z.string().optional(), + R2_PUBLIC_URL: z.string().url().optional(), + + // Local Storage Configuration + LOCAL_STORAGE_PATH: z.string().default('./uploads'), + LOCAL_STORAGE_BASE_URL: z.string().url().optional(), + + APP_NAME: z.string().default('API V1'), + APP_VERSION: z.string().default('1.0.0'), + + LOG_LEVEL: z + .enum(['trace', 'debug', 'info', 'warn', 'error', 'fatal']) + .optional(), + METRICS_ENABLED: booleanString.default('true'), + HEALTH_ENABLED: booleanString.default('true'), + + // Cache Configuration + CACHE_PROVIDER: z.enum(['redis', 'memory']).default('redis'), + CACHE_ENABLED: booleanString.default('true'), + CACHE_PREFIX: z.string().default('app:'), + CACHE_DEFAULT_TTL: z.string().transform(Number).default('3600'), + CACHE_COMPRESSION_ENABLED: booleanString.default('false'), + CACHE_COMPRESSION_THRESHOLD: z.string().transform(Number).default('1024'), + + CORS_ENABLED: booleanString.default('true'), + RATE_LIMIT_ENABLED: booleanString.default('false'), + RATE_LIMIT_WINDOW_MS: z.string().transform(Number).default('900000'), + RATE_LIMIT_MAX_REQUESTS: z.string().transform(Number).default('100'), + + TRUST_PROXY: booleanString.default('false'), + HTTPS_ENABLED: booleanString.default('false'), + + RESPONSE_VALIDATION: z.enum(['strict', 'warn', 'off']).default('strict'), +}); + +export type Config = z.infer; + +const config = configSchema.parse(process.env); + +export default config; diff --git a/src/email/email.service.ts b/src/email/email.service.ts index eb0a97a..a3cf6ec 100644 --- a/src/email/email.service.ts +++ b/src/email/email.service.ts @@ -1,95 +1,79 @@ -import { render } from "@react-email/render"; -import config from "../config/config.service"; -import logger from "../lib/logger.service"; -import mailgunClient from "../lib/mailgun.server"; -import ResetPasswordEmail from "./templates/ResetPassword"; +import { render } from '@react-email/render'; +import logger from '@/plugins/observability/logger'; +import { emailProvider } from '../lib/email'; +import { EmailError } from '../lib/errors'; +import ResetPasswordEmail from './templates/ResetPassword'; export type SendResetPasswordTypePayload = { - email: string; - resetLink: string; - userName: string; + email: string; + resetLink: string; + userName: string; }; -class EmailError extends Error { - constructor( - message: string, - public readonly cause?: unknown, - ) { - super(message); - this.name = "EmailError"; - } -} - // Utility functions for sending emails export const sendEmail = async ({ - to, - subject, - html, + to, + subject, + html, }: { - to: string; - subject: string; - html: string; + to: string; + subject: string; + html: string; }) => { - try { - const messageData = { - from: config.MAILGUN_FROM_EMAIL, - to, - subject, - html, - }; - - const result = await mailgunClient.messages.create( - config.MAILGUN_DOMAIN, - messageData, - ); + try { + const result = await emailProvider.send({ + to, + subject, + html, + }); - logger.info({ - msg: "Email sent successfully", - id: result.id, - to, - subject, - }); + logger.info({ + msg: 'Email sent successfully', + id: result.id, + to, + subject, + }); - return result; - } catch (error) { - logger.error({ - msg: "Failed to send email", - error, - to, - subject, - }); + return result; + } catch (error) { + logger.error({ + msg: 'Failed to send email', + error, + to, + subject, + }); - throw new EmailError("Failed to send email", error); - } + throw new EmailError('Failed to send email', error); + } }; export const sendResetPasswordEmail = async ( - payload: SendResetPasswordTypePayload, + payload: SendResetPasswordTypePayload, ) => { - const { email, resetLink, userName } = payload; + const { email, resetLink, userName } = payload; - try { - // Render the React email template to HTML - const emailHtml = await render( - ResetPasswordEmail({ - resetLink, - userName, - }), - ); + try { + // Render the React email template to HTML + const emailHtml = await render( + ResetPasswordEmail({ + resetLink, + userName, + }), + ); - // Send the email with the rendered HTML - await sendEmail({ - to: email, - subject: "Reset Your Password", - html: emailHtml, - }); - } catch (error) { - logger.error({ - msg: "Failed to send reset password email", - error, - email, - }); + // Send the email with the rendered HTML + await sendEmail({ + to: email, + subject: 'Reset Your Password', + html: emailHtml, + }); + } catch (error) { + logger.error({ + msg: 'Failed to send reset password email', + error, + email, + }); - throw new EmailError("Failed to send reset password email", error); - } + throw new EmailError('Failed to send reset password email', error); + } }; diff --git a/src/email/templates/ResetPassword.tsx b/src/email/templates/ResetPassword.tsx index 257b909..8a09af0 100644 --- a/src/email/templates/ResetPassword.tsx +++ b/src/email/templates/ResetPassword.tsx @@ -1,108 +1,108 @@ import { - Body, - Button, - Container, - Head, - Heading, - Html, - Preview, - Section, - Text, -} from "@react-email/components"; -import * as React from "react"; + Body, + Button, + Container, + Head, + Heading, + Html, + Preview, + Section, + Text, +} from '@react-email/components'; +import * as React from 'react'; interface ResetPasswordEmailProps { - userName: string; - resetLink: string; + userName: string; + resetLink: string; } export const ResetPasswordEmail = ({ - userName, - resetLink, + userName, + resetLink, }: ResetPasswordEmailProps) => { - return ( - - - Reset your password - - - Password Reset Request - Hi {userName}, - - We received a request to reset your password. Click the button below - to create a new password: - -
- -
- - If you didn't request this password reset, you can safely ignore - this email. - - - This link will expire in 1 hour for security reasons. - - - If you're having trouble clicking the button, copy and paste this - URL into your web browser: {resetLink} - -
- - - ); + return ( + + + Reset your password + + + Password Reset Request + Hi {userName}, + + We received a request to reset your password. Click the button below + to create a new password: + +
+ +
+ + If you didn't request this password reset, you can safely ignore + this email. + + + This link will expire in 1 hour for security reasons. + + + If you're having trouble clicking the button, copy and paste this + URL into your web browser: {resetLink} + +
+ + + ); }; const main = { - backgroundColor: "#f6f9fc", - fontFamily: - '-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Ubuntu,sans-serif', + backgroundColor: '#f6f9fc', + fontFamily: + '-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Ubuntu,sans-serif', }; const container = { - backgroundColor: "#ffffff", - margin: "0 auto", - padding: "20px 0 48px", - marginBottom: "64px", + backgroundColor: '#ffffff', + margin: '0 auto', + padding: '20px 0 48px', + marginBottom: '64px', }; const heading = { - fontSize: "24px", - letterSpacing: "-0.5px", - lineHeight: "1.3", - fontWeight: "400", - color: "#484848", - padding: "17px 0 0", + fontSize: '24px', + letterSpacing: '-0.5px', + lineHeight: '1.3', + fontWeight: '400', + color: '#484848', + padding: '17px 0 0', }; const text = { - margin: "0 0 12px", - fontSize: "16px", - lineHeight: "24px", - color: "#484848", + margin: '0 0 12px', + fontSize: '16px', + lineHeight: '24px', + color: '#484848', }; const buttonContainer = { - padding: "27px 0 27px", + padding: '27px 0 27px', }; const button = { - backgroundColor: "#5469d4", - borderRadius: "4px", - color: "#ffffff", - fontSize: "16px", - textDecoration: "none", - textAlign: "center" as const, - display: "block", - padding: "12px 20px", + backgroundColor: '#5469d4', + borderRadius: '4px', + color: '#ffffff', + fontSize: '16px', + textDecoration: 'none', + textAlign: 'center' as const, + display: 'block', + padding: '12px 20px', }; const footer = { - fontSize: "13px", - lineHeight: "24px", - color: "#777", - padding: "0 20px", + fontSize: '13px', + lineHeight: '24px', + color: '#777', + padding: '0 20px', }; export default ResetPasswordEmail; diff --git a/src/extras/port-resolver.ts b/src/extras/port-resolver.ts new file mode 100644 index 0000000..1ed331b --- /dev/null +++ b/src/extras/port-resolver.ts @@ -0,0 +1,212 @@ +import net from 'net'; +import readline from 'readline/promises'; +import logger from '@/plugins/observability/logger'; + +/** + * Check if a port is available on the given host + */ +export async function isPortFree( + port: number, + host: string = '0.0.0.0', +): Promise { + // Map ambiguous hosts to concrete probe targets + const probeHosts = + host === '0.0.0.0' || host === '::' || host === 'localhost' + ? ['127.0.0.1', '::1'] + : [host]; + + // If any probe connects, the port is in use + for (const h of probeHosts) { + const inUse = await new Promise((resolve) => { + const socket = net.createConnection({ port, host: h }); + + const done = (result: boolean) => { + socket.removeAllListeners(); + socket.destroy(); + resolve(result); + }; + + socket.once('connect', () => done(true)); // someone is listening + socket.once('error', (err: NodeJS.ErrnoException) => { + // ECONNREFUSED ⇒ nothing listening there; treat as free for this host + if (err.code === 'ECONNREFUSED' || err.code === 'EHOSTUNREACH' || err.code === 'ENOTFOUND') { + done(false); + } else { + // Conservative: any other error ⇒ consider "in use" + done(true); + } + }); + socket.setTimeout(1000, () => done(false)); // avoid hangs + }); + + if (inUse) return false; + } + + return true; +} + +/** + * Find the next available port starting from a given port + */ +export async function findNextFreePort( + startPort: number, + maxAttempts: number = 50, + host: string = '0.0.0.0', +): Promise { + for (let i = 0; i < maxAttempts; i++) { + const port = startPort + i; + if (port > 65535) break; + + const isFree = await isPortFree(port, host); + if (isFree) { + return port; + } + } + return null; +} + +/** + * Interactive prompt to resolve port conflicts + */ +async function promptForPort( + busyPort: number, + suggestedPort: number, +): Promise { + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + }); + + try { + console.log(`\n⚠️ Port ${busyPort} is already in use.\n`); + + const answer = await rl.question( + `Would you like to use port ${suggestedPort} instead? [Y/n] or enter a custom port (q to quit): `, + ); + + const trimmed = answer.trim().toLowerCase(); + + // Quit + if (trimmed === 'q' || trimmed === 'quit') { + console.log('❌ Server startup cancelled by user.'); + return null; + } + + // Accept suggestion (empty, 'y', 'yes') + if (trimmed === '' || trimmed === 'y' || trimmed === 'yes') { + return suggestedPort; + } + + // Reject suggestion ('n', 'no') + if (trimmed === 'n' || trimmed === 'no') { + const customAnswer = await rl.question('Enter a custom port number: '); + const customPort = parseInt(customAnswer.trim(), 10); + + if (isNaN(customPort) || customPort < 1 || customPort > 65535) { + console.log('❌ Invalid port number. Must be between 1 and 65535.'); + return null; + } + + return customPort; + } + + // Direct numeric input + const directPort = parseInt(trimmed, 10); + if (!isNaN(directPort)) { + if (directPort < 1 || directPort > 65535) { + console.log('❌ Invalid port number. Must be between 1 and 65535.'); + return null; + } + return directPort; + } + + console.log('❌ Invalid input. Please try again.'); + return null; + } finally { + rl.close(); + } +} + +export interface ResolvePortOptions { + desiredPort: number; + host?: string; + interactive?: boolean; + maxAttempts?: number; +} + +/** + * Resolve an available port, prompting the user if the desired port is busy + * Returns the selected port or throws if resolution fails + */ +export async function resolvePort( + options: ResolvePortOptions, +): Promise { + const { + desiredPort, + host = '0.0.0.0', + interactive = process.stdout.isTTY && !process.env.CI, + maxAttempts = 50, + } = options; + + // Check if desired port is available + const isDesiredPortFree = await isPortFree(desiredPort, host); + + if (isDesiredPortFree) { + return desiredPort; + } + + logger.warn({ port: desiredPort }, `Port ${desiredPort} is not available`); + + // Non-interactive mode: auto-find next port + if (!interactive) { + logger.info( + 'Running in non-interactive mode, finding next available port...', + ); + const nextPort = await findNextFreePort(desiredPort + 1, maxAttempts, host); + + if (nextPort === null) { + throw new Error( + `Could not find an available port after checking ${maxAttempts} ports starting from ${desiredPort}`, + ); + } + + logger.info({ port: nextPort }, `Using port ${nextPort} instead`); + return nextPort; + } + + // Interactive mode: prompt user + let selectedPort: number | null = null; + + while (selectedPort === null) { + const suggestedPort = await findNextFreePort( + desiredPort + 1, + maxAttempts, + host, + ); + + if (suggestedPort === null) { + throw new Error( + `Could not find an available port after checking ${maxAttempts} ports starting from ${desiredPort}`, + ); + } + + const userChoice = await promptForPort(desiredPort, suggestedPort); + + if (userChoice === null) { + throw new Error('Port resolution cancelled by user'); + } + + // Validate the user's choice + const isChoiceFree = await isPortFree(userChoice, host); + + if (isChoiceFree) { + selectedPort = userChoice; + console.log(`✅ Using port ${selectedPort}\n`); + } else { + console.log(`❌ Port ${userChoice} is also in use. Let's try again.\n`); + // Loop will continue with a new suggestion + } + } + + return selectedPort; +} diff --git a/src/healthcheck/healthcheck.controller.ts b/src/healthcheck/healthcheck.controller.ts deleted file mode 100644 index 0ac57eb..0000000 --- a/src/healthcheck/healthcheck.controller.ts +++ /dev/null @@ -1,19 +0,0 @@ -import type { Request, Response } from "express"; -import { StatusCodes } from "http-status-codes"; - -export const handleHealthCheck = async (_: Request, res: Response) => { - const healthCheck = { - uptime: process.uptime(), - responseTime: process.hrtime(), - message: "OK", - timestamp: Date.now(), - }; - - try { - res.send(healthCheck); - } catch (error) { - healthCheck.message = (error as Error).message; - - res.status(StatusCodes.SERVICE_UNAVAILABLE).send(healthCheck); - } -}; diff --git a/src/healthcheck/healthcheck.routes.ts b/src/healthcheck/healthcheck.routes.ts deleted file mode 100644 index 3109e7b..0000000 --- a/src/healthcheck/healthcheck.routes.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { Router } from "express"; -import { handleHealthCheck } from "./healthcheck.controller"; - -export const HEALTH_ROUTER_ROOT = "/healthcheck"; - -const healthCheckRouter = Router(); - -healthCheckRouter.get("/", handleHealthCheck); - -export default healthCheckRouter; diff --git a/src/lib/aws.service.ts b/src/lib/aws.service.ts deleted file mode 100644 index 681fa1f..0000000 --- a/src/lib/aws.service.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { S3Client } from "@aws-sdk/client-s3"; - -export const BUCKET_NAME = "your-bucket-name"; - -const s3 = new S3Client(); - -export default s3; diff --git a/src/lib/cache.ts b/src/lib/cache.ts new file mode 100644 index 0000000..a471602 --- /dev/null +++ b/src/lib/cache.ts @@ -0,0 +1,574 @@ +import type { RedisOptions } from 'ioredis'; +import Redis from 'ioredis'; +import config from '../config/env'; +import logger from '@/plugins/observability/logger'; +import { CacheError } from './errors'; + +/** + * Batch set entry for cache providers + */ +export type CacheBatchSetEntry = { + key: string; + value: string; + ttl?: number; +}; + +/** + * Abstract cache provider interface + * All cache providers must implement this interface + */ +export interface CacheProvider { + // Basic operations + get(key: string): Promise; + set(key: string, value: string, ttl?: number): Promise; + del(key: string): Promise; + exists(key: string): Promise; + + // Bulk operations + mget(keys: string[]): Promise<(string | null)[]>; + mset(entries: CacheBatchSetEntry[]): Promise; + mdel(keys: string[]): Promise; + + // Advanced operations + incr(key: string): Promise; + decr(key: string): Promise; + expire(key: string, ttl: number): Promise; + ttl(key: string): Promise; + keys(pattern: string): Promise; + clear(pattern?: string): Promise; + + // Health check + healthCheck(): Promise; +} + +/** + * Redis cache provider implementation + * Production-ready cache provider using ioredis + */ +export class RedisProvider implements CacheProvider { + private client: Redis; + + constructor() { + const redisOptions: RedisOptions = { + maxRetriesPerRequest: null, + enableReadyCheck: false, + lazyConnect: false, + }; + + this.client = new Redis(config.REDIS_URL || '', redisOptions); + + // Connection event listeners + this.client.on('connect', () => { + logger.info({ provider: 'redis' }, 'Cache client connected to Redis'); + }); + + this.client.on('ready', () => { + logger.info({ provider: 'redis' }, 'Cache client ready'); + }); + + this.client.on('error', (err) => { + logger.error({ provider: 'redis', err }, 'Cache client error'); + }); + + this.client.on('close', () => { + logger.warn({ provider: 'redis' }, 'Cache client connection closed'); + }); + + this.client.on('reconnecting', () => { + logger.info({ provider: 'redis' }, 'Cache client reconnecting to Redis'); + }); + } + + async get(key: string): Promise { + try { + return await this.client.get(key); + } catch (err) { + logger.error({ provider: 'redis', key, err }, 'Failed to get cache key'); + throw new CacheError('Failed to get cache key', err); + } + } + + async set(key: string, value: string, ttl?: number): Promise { + try { + if (ttl) { + await this.client.setex(key, ttl, value); + } else { + await this.client.set(key, value); + } + } catch (err) { + logger.error( + { provider: 'redis', key, ttl, err }, + 'Failed to set cache key', + ); + throw new CacheError('Failed to set cache key', err); + } + } + + async del(key: string): Promise { + try { + await this.client.del(key); + } catch (err) { + logger.error( + { provider: 'redis', key, err }, + 'Failed to delete cache key', + ); + throw new CacheError('Failed to delete cache key', err); + } + } + + async exists(key: string): Promise { + try { + const result = await this.client.exists(key); + return result === 1; + } catch (err) { + logger.error( + { provider: 'redis', key, err }, + 'Failed to check cache key existence', + ); + throw new CacheError('Failed to check cache key existence', err); + } + } + + async mget(keys: string[]): Promise<(string | null)[]> { + try { + if (keys.length === 0) return []; + return await this.client.mget(...keys); + } catch (err) { + logger.error( + { provider: 'redis', keys, err }, + 'Failed to get multiple cache keys', + ); + throw new CacheError('Failed to get multiple cache keys', err); + } + } + + async mset(entries: CacheBatchSetEntry[]): Promise { + try { + if (entries.length === 0) return; + + // Use pipeline for better performance + const pipeline = this.client.pipeline(); + + for (const entry of entries) { + if (entry.ttl) { + pipeline.setex(entry.key, entry.ttl, entry.value); + } else { + pipeline.set(entry.key, entry.value); + } + } + + await pipeline.exec(); + } catch (err) { + logger.error( + { provider: 'redis', entries: entries.length, err }, + 'Failed to set multiple cache keys', + ); + throw new CacheError('Failed to set multiple cache keys', err); + } + } + + async mdel(keys: string[]): Promise { + try { + if (keys.length === 0) return; + await this.client.del(...keys); + } catch (err) { + logger.error( + { provider: 'redis', keys, err }, + 'Failed to delete multiple cache keys', + ); + throw new CacheError('Failed to delete multiple cache keys', err); + } + } + + async incr(key: string): Promise { + try { + return await this.client.incr(key); + } catch (err) { + logger.error( + { provider: 'redis', key, err }, + 'Failed to increment cache key', + ); + throw new CacheError('Failed to increment cache key', err); + } + } + + async decr(key: string): Promise { + try { + return await this.client.decr(key); + } catch (err) { + logger.error( + { provider: 'redis', key, err }, + 'Failed to decrement cache key', + ); + throw new CacheError('Failed to decrement cache key', err); + } + } + + async expire(key: string, ttl: number): Promise { + try { + await this.client.expire(key, ttl); + } catch (err) { + logger.error( + { provider: 'redis', key, ttl, err }, + 'Failed to set cache key expiration', + ); + throw new CacheError('Failed to set cache key expiration', err); + } + } + + async ttl(key: string): Promise { + try { + return await this.client.ttl(key); + } catch (err) { + logger.error( + { provider: 'redis', key, err }, + 'Failed to get cache key TTL', + ); + throw new CacheError('Failed to get cache key TTL', err); + } + } + + async keys(pattern: string): Promise { + try { + return await this.client.keys(pattern); + } catch (err) { + logger.error( + { provider: 'redis', pattern, err }, + 'Failed to get cache keys by pattern', + ); + throw new CacheError('Failed to get cache keys by pattern', err); + } + } + + async clear(pattern?: string): Promise { + try { + if (pattern) { + const keys = await this.keys(pattern); + if (keys.length > 0) { + await this.mdel(keys); + } + } else { + await this.client.flushdb(); + } + } catch (err) { + logger.error( + { provider: 'redis', pattern, err }, + 'Failed to clear cache', + ); + throw new CacheError('Failed to clear cache', err); + } + } + + async healthCheck(): Promise { + try { + const result = await this.client.ping(); + return result === 'PONG'; + } catch (err) { + logger.error({ provider: 'redis', err }, 'Cache health check failed'); + return false; + } + } + + /** + * Get raw Redis client for advanced operations + */ + getClient(): Redis { + return this.client; + } +} + +/** + * In-memory cache provider implementation + * Uses LRU eviction and TTL cleanup for development/testing + */ +export class MemoryProvider implements CacheProvider { + private cache: Map; + private accessOrder: Map; // For LRU tracking + private maxSize: number; + private cleanupInterval: NodeJS.Timeout | null; + private accessCounter: number; + + constructor(maxSize = 1000, cleanupIntervalMs = 60000) { + this.cache = new Map(); + this.accessOrder = new Map(); + this.maxSize = maxSize; + this.cleanupInterval = null; + this.accessCounter = 0; + + // Start periodic cleanup of expired entries + this.startCleanup(cleanupIntervalMs); + + logger.info( + { provider: 'memory', maxSize, cleanupIntervalMs }, + 'Memory cache provider initialized', + ); + } + + private startCleanup(intervalMs: number): void { + this.cleanupInterval = setInterval(() => { + this.cleanupExpired(); + }, intervalMs); + } + + private cleanupExpired(): void { + const now = Date.now(); + let cleaned = 0; + + for (const [key, entry] of this.cache.entries()) { + if (entry.expiresAt !== null && entry.expiresAt <= now) { + this.cache.delete(key); + this.accessOrder.delete(key); + cleaned++; + } + } + + if (cleaned > 0) { + logger.debug( + { provider: 'memory', cleaned }, + 'Cleaned up expired cache entries', + ); + } + } + + private evictLRU(): void { + if (this.cache.size === 0) return; + + // Find the least recently used key + let lruKey: string | null = null; + let lruAccess = Number.POSITIVE_INFINITY; + + for (const [key, access] of this.accessOrder.entries()) { + if (access < lruAccess) { + lruAccess = access; + lruKey = key; + } + } + + if (lruKey) { + this.cache.delete(lruKey); + this.accessOrder.delete(lruKey); + logger.debug( + { provider: 'memory', key: lruKey }, + 'Evicted LRU cache entry', + ); + } + } + + private updateAccess(key: string): void { + this.accessOrder.set(key, ++this.accessCounter); + } + + private isExpired(entry: { + value: string; + expiresAt: number | null; + }): boolean { + return entry.expiresAt !== null && entry.expiresAt <= Date.now(); + } + + async get(key: string): Promise { + const entry = this.cache.get(key); + + if (!entry) { + return null; + } + + if (this.isExpired(entry)) { + this.cache.delete(key); + this.accessOrder.delete(key); + return null; + } + + this.updateAccess(key); + return entry.value; + } + + async set(key: string, value: string, ttl?: number): Promise { + // Check if we need to evict + if (!this.cache.has(key) && this.cache.size >= this.maxSize) { + this.evictLRU(); + } + + const expiresAt = ttl ? Date.now() + ttl * 1000 : null; + this.cache.set(key, { value, expiresAt }); + this.updateAccess(key); + } + + async del(key: string): Promise { + this.cache.delete(key); + this.accessOrder.delete(key); + } + + async exists(key: string): Promise { + const entry = this.cache.get(key); + + if (!entry) { + return false; + } + + if (this.isExpired(entry)) { + this.cache.delete(key); + this.accessOrder.delete(key); + return false; + } + + return true; + } + + async mget(keys: string[]): Promise<(string | null)[]> { + return Promise.all(keys.map((key) => this.get(key))); + } + + async mset(entries: CacheBatchSetEntry[]): Promise { + for (const entry of entries) { + await this.set(entry.key, entry.value, entry.ttl); + } + } + + async mdel(keys: string[]): Promise { + for (const key of keys) { + await this.del(key); + } + } + + async incr(key: string): Promise { + const current = await this.get(key); + const value = current ? Number.parseInt(current, 10) : 0; + const newValue = value + 1; + await this.set(key, String(newValue)); + return newValue; + } + + async decr(key: string): Promise { + const current = await this.get(key); + const value = current ? Number.parseInt(current, 10) : 0; + const newValue = value - 1; + await this.set(key, String(newValue)); + return newValue; + } + + async expire(key: string, ttl: number): Promise { + const entry = this.cache.get(key); + if (entry) { + entry.expiresAt = Date.now() + ttl * 1000; + } + } + + async ttl(key: string): Promise { + const entry = this.cache.get(key); + + if (!entry) { + return -2; // Key does not exist + } + + if (entry.expiresAt === null) { + return -1; // Key has no expiration + } + + const remaining = Math.ceil((entry.expiresAt - Date.now()) / 1000); + return remaining > 0 ? remaining : -2; // Return -2 if expired + } + + async keys(pattern: string): Promise { + // Simple pattern matching (* wildcard) + const regex = new RegExp(`^${pattern.replace(/\*/g, '.*')}$`); + const matchingKeys: string[] = []; + + for (const key of this.cache.keys()) { + if (regex.test(key)) { + const entry = this.cache.get(key); + if (entry && !this.isExpired(entry)) { + matchingKeys.push(key); + } + } + } + + return matchingKeys; + } + + async clear(pattern?: string): Promise { + if (pattern) { + const keys = await this.keys(pattern); + await this.mdel(keys); + } else { + this.cache.clear(); + this.accessOrder.clear(); + } + } + + async healthCheck(): Promise { + return true; // Memory provider is always healthy + } + + /** + * Get cache statistics + */ + getStats(): { size: number; maxSize: number } { + return { + size: this.cache.size, + maxSize: this.maxSize, + }; + } + + /** + * Cleanup and stop periodic cleanup + */ + destroy(): void { + if (this.cleanupInterval) { + clearInterval(this.cleanupInterval); + this.cleanupInterval = null; + } + this.cache.clear(); + this.accessOrder.clear(); + } +} + +/** + * Factory function to create the appropriate cache provider + * Supports Redis (production) and Memory (development/testing) + */ +const createCacheProvider = (): CacheProvider => { + const provider = config.CACHE_PROVIDER || 'redis'; + + logger.info( + { provider }, + `Initializing ${provider.toUpperCase()} cache provider`, + ); + + switch (provider) { + case 'redis': + return new RedisProvider(); + + case 'memory': + return new MemoryProvider(); + + default: + throw new CacheError(`Unsupported cache provider: ${provider}`); + } +}; + +/** + * Auto-initialized cache provider singleton + */ +export const cacheProvider = createCacheProvider(); + +/** + * Legacy export for backward compatibility + * @deprecated Use cacheProvider instead + */ +export const cacheClient = + cacheProvider instanceof RedisProvider ? cacheProvider.getClient() : null; + +/** + * Health check function for cache connection + * Returns a function compatible with HealthCheck interface + */ +export const checkCacheHealth = () => { + return async (): Promise => { + try { + return await cacheProvider.healthCheck(); + } catch (err) { + logger.error({ err }, 'Cache health check failed'); + return false; + } + }; +}; diff --git a/src/lib/common.schema.ts b/src/lib/common.schema.ts deleted file mode 100644 index 55db8ba..0000000 --- a/src/lib/common.schema.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { StatusCodes } from "http-status-codes"; -import z from "zod"; - -export const searchAndPaginationSchema = z.object({ - search: z.string().optional(), - page: z.string().default("1").transform(Number).optional(), - limit: z.string().default("10").transform(Number).optional(), -}); - -export const returnMessageSchema = z.object({ - status: z - .number() - .refine((value) => Object.values(StatusCodes).includes(value)), - message: z.string(), -}); - -export type ReturnMessageSchemaType = z.infer; diff --git a/src/lib/database.ts b/src/lib/database.ts index 8fe0d2c..3a03ab5 100644 --- a/src/lib/database.ts +++ b/src/lib/database.ts @@ -1,19 +1,78 @@ -import mongoose from "mongoose"; -import config from "../config/config.service"; -import logger from "./logger.service"; - -export const connectDatabase = async () => { - try { - logger.info("Connecting database..."); - await mongoose.connect(config.MONGO_DATABASE_URL); - logger.info("Database connected"); - } catch (err) { - logger.error((err as Error).message); - process.exit(1); - } +import mongoose from 'mongoose'; +import config from '@/config/env'; +import logger from '@/plugins/observability/logger'; +import { DatabaseError } from './errors'; + +const MAX_RETRIES = 3; +const RETRY_DELAY_MS = 1000; + +/** + * Connect to MongoDB with retry logic + */ +export const connectDatabase = async (): Promise => { + let retries = 0; + + while (retries < MAX_RETRIES) { + try { + logger.info('Connecting database...'); + await mongoose.connect(config.MONGO_DATABASE_URL); + logger.info('Database connected'); + return; + } catch (err) { + retries++; + logger.error({ err, retries }, `Database connection attempt ${retries} failed`); + + if (retries >= MAX_RETRIES) { + throw new DatabaseError( + `Failed to connect to database after ${MAX_RETRIES} attempts`, + err, + ); + } + + // Exponential backoff + const delay = RETRY_DELAY_MS * Math.pow(2, retries - 1); + logger.info(`Retrying in ${delay}ms...`); + await new Promise((resolve) => setTimeout(resolve, delay)); + } + } +}; + +/** + * Disconnect from MongoDB + */ +export const disconnectDatabase = async (): Promise => { + try { + await mongoose.disconnect(); + logger.info('Database disconnected'); + } catch (err) { + logger.error({ err }, 'Error disconnecting database'); + throw new DatabaseError('Failed to disconnect from database', err); + } }; -export const disconnectDatabase = async () => { - await mongoose.disconnect(); - logger.info("Database disconnected"); +/** + * Health check function for database connection + * Returns a function compatible with HealthCheck interface + */ +export const checkDatabaseHealth = () => { + return async (): Promise => { + try { + // Check if mongoose is connected + if (mongoose.connection.readyState !== 1) { + logger.warn('Database health check failed: not connected'); + return false; + } + + // Ping the database to ensure it's responsive + if (!mongoose.connection.db) { + logger.warn('Database health check failed: db not available'); + return false; + } + await mongoose.connection.db.admin().ping(); + return true; + } catch (err) { + logger.error({ err }, 'Database health check failed'); + return false; + } + }; }; diff --git a/src/lib/email.server.ts b/src/lib/email.server.ts deleted file mode 100644 index e028b5b..0000000 --- a/src/lib/email.server.ts +++ /dev/null @@ -1,14 +0,0 @@ -import nodemailer from "nodemailer"; -import type SMTPTransport from "nodemailer/lib/smtp-transport"; -import config from "../config/config.service"; - -const mailer = nodemailer.createTransport({ - host: config.SMTP_HOST, - port: config.SMTP_PORT, - auth: { - user: config.SMTP_USERNAME, - pass: config.SMTP_PASSWORD, - }, -} as SMTPTransport.Options); - -export default mailer; diff --git a/src/lib/email.ts b/src/lib/email.ts new file mode 100644 index 0000000..ccbb612 --- /dev/null +++ b/src/lib/email.ts @@ -0,0 +1,280 @@ +import formData from 'form-data'; +import Mailgun from 'mailgun.js'; +import nodemailer from 'nodemailer'; +import type SMTPTransport from 'nodemailer/lib/smtp-transport'; +import { Resend } from 'resend'; +import config from '@/config/env'; +import logger from '@/plugins/observability/logger'; +import { EmailError } from './errors'; + +/** + * Email parameters for sending emails + */ +export type EmailParams = { + to: string; + subject: string; + html: string; + from?: string; +}; + +/** + * Email send result + */ +export type EmailResult = { + id: string; + message?: string; +}; + +/** + * Abstract email provider interface + */ +export interface EmailProvider { + send(params: EmailParams): Promise; + healthCheck(): Promise; +} + +/** + * Mailgun email provider implementation + */ +export class MailgunProvider implements EmailProvider { + private client: ReturnType; + private domain: string; + private fromEmail: string; + + constructor() { + const mailgun = new Mailgun(formData); + this.client = mailgun.client({ + username: 'api', + key: config.MAILGUN_API_KEY as string, + }); + this.domain = config.MAILGUN_DOMAIN as string; + this.fromEmail = config.MAILGUN_FROM_EMAIL as string; + } + + async send(params: EmailParams): Promise { + try { + const messageData = { + from: params.from as string || this.fromEmail, + to: params.to, + subject: params.subject, + html: params.html, + }; + + const result = await this.client.messages.create(this.domain, messageData); + + logger.info({ + provider: 'mailgun', + id: result.id, + to: params.to, + subject: params.subject, + }, 'Email sent successfully'); + + return { + id: result.id || 'unknown', + message: result.message || 'Email sent', + }; + } catch (err) { + logger.error({ + provider: 'mailgun', + to: params.to, + subject: params.subject, + err, + }, 'Failed to send email'); + + throw new EmailError('Failed to send email via Mailgun', err); + } + } + + async healthCheck(): Promise { + try { + // Mailgun doesn't have a ping endpoint, so we just check if credentials exist + return !!(config.MAILGUN_API_KEY && config.MAILGUN_DOMAIN); + } catch (err) { + logger.error({ err }, 'Mailgun health check failed'); + return false; + } + } +} + +/** + * Nodemailer (SMTP) email provider implementation + */ +export class NodemailerProvider implements EmailProvider { + private transporter: nodemailer.Transporter; + private fromEmail: string; + + constructor() { + this.transporter = nodemailer.createTransport({ + host: config.SMTP_HOST, + port: config.SMTP_PORT, + auth: { + user: config.SMTP_USERNAME, + pass: config.SMTP_PASSWORD, + }, + } as SMTPTransport.Options); + + this.fromEmail = config.SMTP_FROM || config.EMAIL_FROM || 'noreply@example.com'; + } + + async send(params: EmailParams): Promise { + try { + const result = await this.transporter.sendMail({ + from: params.from || this.fromEmail, + to: params.to, + subject: params.subject, + html: params.html, + }); + + logger.info({ + provider: 'nodemailer', + messageId: result.messageId, + to: params.to, + subject: params.subject, + }, 'Email sent successfully'); + + return { + id: result.messageId, + message: result.response, + }; + } catch (err) { + logger.error({ + provider: 'nodemailer', + to: params.to, + subject: params.subject, + err, + }, 'Failed to send email'); + + throw new EmailError('Failed to send email via SMTP', err); + } + } + + async healthCheck(): Promise { + try { + await this.transporter.verify(); + return true; + } catch (err) { + logger.error({ err }, 'Nodemailer health check failed'); + return false; + } + } +} + +/** + * Resend email provider implementation + */ +export class ResendProvider implements EmailProvider { + private client: Resend; + private fromEmail: string; + + constructor() { + this.client = new Resend(config.RESEND_API_KEY); + this.fromEmail = + config.RESEND_FROM_EMAIL || config.EMAIL_FROM || 'noreply@example.com'; + } + + async send(params: EmailParams): Promise { + try { + const { data, error } = await this.client.emails.send({ + from: params.from || this.fromEmail, + to: params.to, + subject: params.subject, + html: params.html, + }); + + if (error) { + throw error; + } + + const id = (data as { id?: string } | null)?.id || 'unknown'; + + logger.info( + { + provider: 'resend', + id, + to: params.to, + subject: params.subject, + }, + 'Email sent successfully', + ); + + return { + id, + message: 'Email sent', + }; + } catch (err) { + logger.error( + { + provider: 'resend', + to: params.to, + subject: params.subject, + err, + }, + 'Failed to send email', + ); + + throw new EmailError('Failed to send email via Resend', err); + } + } + + async healthCheck(): Promise { + try { + return !!config.RESEND_API_KEY; + } catch (err) { + logger.error({ err }, 'Resend health check failed'); + return false; + } + } +} + +/** + * Factory function to create the appropriate email provider + * Priority: Resend > Mailgun > SMTP + */ +const createEmailProvider = (): EmailProvider => { + // Prefer Resend if configured + if (config.RESEND_API_KEY) { + logger.info('Using Resend email provider'); + return new ResendProvider(); + } + + // Prefer Mailgun if configured + if (config.MAILGUN_API_KEY && config.MAILGUN_DOMAIN) { + logger.info('Using Mailgun email provider'); + return new MailgunProvider(); + } + + // Fallback to SMTP if configured + if (config.SMTP_HOST && config.SMTP_PORT) { + logger.info('Using Nodemailer (SMTP) email provider'); + return new NodemailerProvider(); + } + + throw new EmailError('No email provider configured. Please set either Mailgun or SMTP credentials.'); +}; + +/** + * Auto-initialized email provider singleton + */ +export const emailProvider = createEmailProvider(); + +/** + * Convenience function to send emails using the configured provider + */ +export const sendEmail = async (params: EmailParams): Promise => { + return emailProvider.send(params); +}; + +/** + * Health check function for email service + * Returns a function compatible with HealthCheck interface + */ +export const checkEmailHealth = () => { + return async (): Promise => { + try { + return await emailProvider.healthCheck(); + } catch (err) { + logger.error({ err }, 'Email health check failed'); + return false; + } + }; +}; diff --git a/src/lib/errors.ts b/src/lib/errors.ts new file mode 100644 index 0000000..3415aac --- /dev/null +++ b/src/lib/errors.ts @@ -0,0 +1,63 @@ +/** + * Base error class for all library errors + * Follows the existing EmailError pattern with cause tracking + */ +export class LibraryError extends Error { + constructor( + message: string, + public readonly cause?: unknown, + ) { + super(message); + this.name = 'LibraryError'; + } +} + +/** + * Database-related errors (Mongoose, MongoDB) + */ +export class DatabaseError extends LibraryError { + constructor(message: string, cause?: unknown) { + super(message, cause); + this.name = 'DatabaseError'; + } +} + +/** + * Cache-related errors (Redis) + */ +export class CacheError extends LibraryError { + constructor(message: string, cause?: unknown) { + super(message, cause); + this.name = 'CacheError'; + } +} + +/** + * Queue-related errors (BullMQ) + */ +export class QueueError extends LibraryError { + constructor(message: string, cause?: unknown) { + super(message, cause); + this.name = 'QueueError'; + } +} + +/** + * Email-related errors (Mailgun, Nodemailer) + */ +export class EmailError extends LibraryError { + constructor(message: string, cause?: unknown) { + super(message, cause); + this.name = 'EmailError'; + } +} + +/** + * Storage-related errors (S3, file uploads) + */ +export class StorageError extends LibraryError { + constructor(message: string, cause?: unknown) { + super(message, cause); + this.name = 'StorageError'; + } +} diff --git a/src/lib/logger.service.ts b/src/lib/logger.service.ts deleted file mode 100644 index c263cec..0000000 --- a/src/lib/logger.service.ts +++ /dev/null @@ -1,15 +0,0 @@ -import pino from "pino"; -import pinohttpLogger from "pino-http"; - -const logger = pino({ - transport: { - target: "pino-pretty", - options: { - colorize: true, - }, - }, -}); - -export const httpLogger = pinohttpLogger({ logger: logger }); - -export default logger; diff --git a/src/lib/mailgun.server.ts b/src/lib/mailgun.server.ts deleted file mode 100644 index 3d8dde8..0000000 --- a/src/lib/mailgun.server.ts +++ /dev/null @@ -1,12 +0,0 @@ -import formData from "form-data"; -import Mailgun from "mailgun.js"; -import config from "../config/config.service"; - -const mailgun = new Mailgun(formData); - -const mailgunClient = mailgun.client({ - username: "api", - key: config.MAILGUN_API_KEY, -}); - -export default mailgunClient; diff --git a/src/lib/queue.server.ts b/src/lib/queue.server.ts deleted file mode 100644 index aecd4e4..0000000 --- a/src/lib/queue.server.ts +++ /dev/null @@ -1,41 +0,0 @@ -import type { Processor } from "bullmq"; -import { Queue as BullQueue, Worker } from "bullmq"; - -import logger from "./logger.service"; -import redisClient from "./redis.server"; - -type RegisteredQueue = { - queue: BullQueue; - worker: Worker; -}; - -declare global { - // eslint-disable-next-line no-var - var __registeredQueues: Record | undefined; -} - -if (!global.__registeredQueues) { - global.__registeredQueues = {}; -} -const registeredQueues = global.__registeredQueues; - -export function Queue( - name: string, - handler: Processor, -): BullQueue { - if (registeredQueues[name]) { - return registeredQueues[name].queue as BullQueue; - } - - const queue = new BullQueue(name, { connection: redisClient }); - - const worker = new Worker(name, handler, { - connection: redisClient, - }); - - registeredQueues[name] = { queue, worker }; - - logger.info({ name: "Queue" }, `${name}: Initialize`); - - return queue; -} diff --git a/src/lib/queue.ts b/src/lib/queue.ts new file mode 100644 index 0000000..308c152 --- /dev/null +++ b/src/lib/queue.ts @@ -0,0 +1,128 @@ +import type { Processor, QueueOptions, WorkerOptions } from 'bullmq'; +import { Queue as BullQueue, Worker } from 'bullmq'; + +import logger from '@/plugins/observability/logger'; +import { cacheProvider, RedisProvider } from './cache'; +import { QueueError } from './errors'; + +type RegisteredQueue = { + queue: BullQueue; + worker: Worker; +}; + +export const registeredQueues: Record = {}; + +/** + * Create and register a queue with its worker + * Auto-initialized singleton pattern - returns existing queue if already registered + */ +export function Queue( + name: string, + handler: Processor, + queueOptions?: QueueOptions, + workerOptions?: WorkerOptions, +): BullQueue { + if (registeredQueues[name]) { + return registeredQueues[name].queue as BullQueue; + } + + try { + // Get Redis client for queue connection + if (!(cacheProvider instanceof RedisProvider)) { + throw new QueueError( + 'Queue requires Redis cache provider. Set CACHE_PROVIDER=redis', + ); + } + + const redisClient = cacheProvider.getClient(); + + const queue = new BullQueue(name, { + connection: redisClient, + ...queueOptions, + }); + + const worker = new Worker(name, handler, { + connection: redisClient, + ...workerOptions, + }); + + // Log worker events + worker.on('completed', (job) => { + logger.debug({ queueName: name, jobId: job.id }, 'Job completed'); + }); + + worker.on('failed', (job, err) => { + logger.error({ queueName: name, jobId: job?.id, err }, 'Job failed'); + }); + + registeredQueues[name] = { queue, worker }; + + logger.info({ name }, 'Queue initialized'); + + return queue; + } catch (err) { + throw new QueueError(`Failed to create queue: ${name}`, err); + } +} + +/** + * Health check for all registered queues + * Returns a function compatible with HealthCheck interface + */ +export const checkQueueHealth = () => { + return async (): Promise => { + try { + // If no queues registered, consider it healthy + if (Object.keys(registeredQueues).length === 0) { + return true; + } + + // Check if all queues are connected (via Redis) + const healthChecks = await Promise.all( + Object.entries(registeredQueues).map(async ([name, { queue }]) => { + try { + // Try to get queue metrics as a health check + await queue.getJobCounts(); + return true; + } catch (err) { + logger.error({ queueName: name, err }, 'Queue health check failed'); + return false; + } + }), + ); + + return healthChecks.every((healthy) => healthy); + } catch (err) { + logger.error({ err }, 'Queue health check failed'); + return false; + } + }; +}; + +/** + * Gracefully close all registered queues and workers + * For use with LifecycleManager + */ +export const closeAllQueues = async (): Promise => { + try { + logger.info('Closing all queues...'); + + await Promise.all( + Object.entries(registeredQueues).map( + async ([name, { queue, worker }]) => { + try { + await worker.close(); + await queue.close(); + logger.debug({ queueName: name }, 'Queue closed'); + } catch (err) { + logger.error({ queueName: name, err }, 'Error closing queue'); + } + }, + ), + ); + + logger.info('All queues closed'); + } catch (err) { + throw new QueueError('Failed to close queues', err); + } +}; diff --git a/src/lib/realtime.server.ts b/src/lib/realtime.server.ts deleted file mode 100644 index 9efcd72..0000000 --- a/src/lib/realtime.server.ts +++ /dev/null @@ -1,14 +0,0 @@ -import type { Server as IServer } from "node:http"; -import { Server as RealtimeServer } from "socket.io"; - -export const useSocketIo = (server: IServer): RealtimeServer => { - const io = new RealtimeServer(server, { - transports: ["polling", "websocket"], - cors: { - origin: "*", - methods: ["GET", "POST"], - }, - }); - - return io; -}; diff --git a/src/lib/redis.server.ts b/src/lib/redis.server.ts deleted file mode 100644 index 89524dc..0000000 --- a/src/lib/redis.server.ts +++ /dev/null @@ -1,13 +0,0 @@ -import type { RedisOptions } from "ioredis"; -import Redis from "ioredis"; -import config from "../config/config.service"; - -const redisOptions: RedisOptions = { - maxRetriesPerRequest: null, - enableReadyCheck: false, - host: "redis", -}; - -const redisClient = new Redis(config.REDIS_URL || "", redisOptions); - -export default redisClient; diff --git a/src/lib/session.store.ts b/src/lib/session.store.ts deleted file mode 100644 index 4e5e214..0000000 --- a/src/lib/session.store.ts +++ /dev/null @@ -1,8 +0,0 @@ -import RedisStore from "connect-redis"; -import redisClient from "./redis.server"; - -const redisStore = new RedisStore({ - client: redisClient, -}); - -export default redisStore; diff --git a/src/lib/storage.ts b/src/lib/storage.ts new file mode 100644 index 0000000..c165380 --- /dev/null +++ b/src/lib/storage.ts @@ -0,0 +1,449 @@ +import { PutObjectCommand, DeleteObjectCommand } from '@aws-sdk/client-s3'; +import { S3Client } from '@aws-sdk/client-s3'; +import { createReadStream, promises as fs } from 'node:fs'; +import { join, dirname } from 'node:path'; +import config from '@/config/env'; +import logger from '@/plugins/observability/logger'; +import type { FormFile } from '../types'; +import { StorageError } from './errors'; + +/** + * Upload parameters + */ +export type UploadParams = { + file: FormFile; + key: string; +}; + +/** + * Upload result + */ +export type UploadResult = { + url: string; + key: string; +}; + +/** + * Abstract storage provider interface + */ +export interface StorageProvider { + upload(params: UploadParams): Promise; + delete(key: string): Promise; + getUrl(key: string): string; + healthCheck(): Promise; +} + +/** + * S3 storage provider implementation + */ +export class S3StorageProvider implements StorageProvider { + private client: S3Client; + private bucket: string; + + constructor() { + const s3Config: { + region: string; + credentials?: { accessKeyId: string; secretAccessKey: string }; + } = { + region: config.AWS_REGION, + }; + + // Only set explicit credentials if provided in env + if (config.AWS_ACCESS_KEY_ID && config.AWS_SECRET_ACCESS_KEY) { + s3Config.credentials = { + accessKeyId: config.AWS_ACCESS_KEY_ID, + secretAccessKey: config.AWS_SECRET_ACCESS_KEY, + }; + } + + this.client = new S3Client(s3Config); + this.bucket = config.AWS_S3_BUCKET; + } + + async upload(params: UploadParams): Promise { + try { + const fileStream = createReadStream(params.file.filepath); + + const command = new PutObjectCommand({ + Bucket: this.bucket, + Key: params.key, + Body: fileStream, + ContentType: params.file.mimetype || 'application/octet-stream', + }); + + await this.client.send(command); + + const url = this.getUrl(params.key); + + logger.info( + { + provider: 's3', + key: params.key, + size: params.file.size, + mimetype: params.file.mimetype, + }, + 'File uploaded successfully', + ); + + return { url, key: params.key }; + } catch (err) { + logger.error( + { + provider: 's3', + key: params.key, + err, + }, + 'Failed to upload file', + ); + + throw new StorageError('Failed to upload file to S3', err); + } + } + + async delete(key: string): Promise { + try { + const command = new DeleteObjectCommand({ + Bucket: this.bucket, + Key: key, + }); + + await this.client.send(command); + + logger.info( + { + provider: 's3', + key, + }, + 'File deleted successfully', + ); + } catch (err) { + logger.error( + { + provider: 's3', + key, + err, + }, + 'Failed to delete file', + ); + + throw new StorageError('Failed to delete file from S3', err); + } + } + + getUrl(key: string): string { + // Construct public URL (adjust based on your bucket configuration) + return `https://${this.bucket}.s3.amazonaws.com/${key}`; + } + + async healthCheck(): Promise { + try { + // Simple check: verify credentials and bucket are configured + return !!(this.bucket && config.AWS_REGION); + } catch (err) { + logger.error({ err }, 'S3 health check failed'); + return false; + } + } +} + +/** + * Cloudflare R2 storage provider implementation + * R2 is S3-compatible, so we use the same S3Client + */ +export class R2StorageProvider implements StorageProvider { + private client: S3Client; + private bucket: string; + private publicUrl?: string; + + constructor() { + if ( + !config.R2_ACCOUNT_ID || + !config.R2_ACCESS_KEY_ID || + !config.R2_SECRET_ACCESS_KEY || + !config.R2_BUCKET + ) { + throw new StorageError('Missing required R2 configuration'); + } + + // R2 endpoint format: https://.r2.cloudflarestorage.com + const endpoint = `https://${config.R2_ACCOUNT_ID}.r2.cloudflarestorage.com`; + + this.client = new S3Client({ + region: 'auto', // R2 uses 'auto' for region + endpoint, + credentials: { + accessKeyId: config.R2_ACCESS_KEY_ID, + secretAccessKey: config.R2_SECRET_ACCESS_KEY, + }, + }); + + this.bucket = config.R2_BUCKET; + this.publicUrl = config.R2_PUBLIC_URL; + } + + async upload(params: UploadParams): Promise { + try { + const fileStream = createReadStream(params.file.filepath); + + const command = new PutObjectCommand({ + Bucket: this.bucket, + Key: params.key, + Body: fileStream, + ContentType: params.file.mimetype || 'application/octet-stream', + }); + + await this.client.send(command); + + const url = this.getUrl(params.key); + + logger.info( + { + provider: 'r2', + key: params.key, + size: params.file.size, + mimetype: params.file.mimetype, + }, + 'File uploaded successfully', + ); + + return { url, key: params.key }; + } catch (err) { + logger.error( + { + provider: 'r2', + key: params.key, + err, + }, + 'Failed to upload file', + ); + + throw new StorageError('Failed to upload file to R2', err); + } + } + + async delete(key: string): Promise { + try { + const command = new DeleteObjectCommand({ + Bucket: this.bucket, + Key: key, + }); + + await this.client.send(command); + + logger.info( + { + provider: 'r2', + key, + }, + 'File deleted successfully', + ); + } catch (err) { + logger.error( + { + provider: 'r2', + key, + err, + }, + 'Failed to delete file', + ); + + throw new StorageError('Failed to delete file from R2', err); + } + } + + getUrl(key: string): string { + // Use custom public URL if configured (e.g., custom domain) + // Otherwise use R2 public bucket URL format + if (this.publicUrl) { + return `${this.publicUrl}/${key}`; + } + return `https://${this.bucket}.${config.R2_ACCOUNT_ID}.r2.cloudflarestorage.com/${key}`; + } + + async healthCheck(): Promise { + try { + return !!( + this.bucket && + config.R2_ACCOUNT_ID && + config.R2_ACCESS_KEY_ID && + config.R2_SECRET_ACCESS_KEY + ); + } catch (err) { + logger.error({ err }, 'R2 health check failed'); + return false; + } + } +} + +/** + * Local file system storage provider implementation + */ +export class LocalStorageProvider implements StorageProvider { + private storagePath: string; + private baseUrl?: string; + + constructor() { + this.storagePath = config.LOCAL_STORAGE_PATH; + this.baseUrl = config.LOCAL_STORAGE_BASE_URL; + } + + async upload(params: UploadParams): Promise { + try { + // Ensure storage directory exists + const targetDir = join(this.storagePath, dirname(params.key)); + await fs.mkdir(targetDir, { recursive: true }); + + // Copy file to storage location + const targetPath = join(this.storagePath, params.key); + await fs.copyFile(params.file.filepath, targetPath); + + const url = this.getUrl(params.key); + + logger.info( + { + provider: 'local', + key: params.key, + path: targetPath, + size: params.file.size, + mimetype: params.file.mimetype, + }, + 'File uploaded successfully', + ); + + return { url, key: params.key }; + } catch (err) { + logger.error( + { + provider: 'local', + key: params.key, + err, + }, + 'Failed to upload file', + ); + + throw new StorageError('Failed to upload file to local storage', err); + } + } + + async delete(key: string): Promise { + try { + const targetPath = join(this.storagePath, key); + await fs.unlink(targetPath); + + logger.info( + { + provider: 'local', + key, + path: targetPath, + }, + 'File deleted successfully', + ); + } catch (err) { + logger.error( + { + provider: 'local', + key, + err, + }, + 'Failed to delete file', + ); + + throw new StorageError('Failed to delete file from local storage', err); + } + } + + getUrl(key: string): string { + // Use base URL if configured, otherwise return file path + if (this.baseUrl) { + return `${this.baseUrl}/${key}`; + } + return `/uploads/${key}`; + } + + async healthCheck(): Promise { + try { + // Check if storage directory exists and is writable + await fs.access(this.storagePath); + return true; + } catch { + // Try to create directory if it doesn't exist + try { + await fs.mkdir(this.storagePath, { recursive: true }); + return true; + } catch (createErr) { + logger.error({ err: createErr }, 'Local storage health check failed'); + return false; + } + } + } +} + +/** + * Factory function to create the appropriate storage provider + * Supports S3, Cloudflare R2, and local file system storage + */ +const createStorageProvider = (): StorageProvider => { + const provider = config.STORAGE_PROVIDER; + + logger.info( + { provider }, + `Initializing ${provider.toUpperCase()} storage provider`, + ); + + switch (provider) { + case 's3': + return new S3StorageProvider(); + + case 'r2': + return new R2StorageProvider(); + + case 'local': + return new LocalStorageProvider(); + + default: + throw new StorageError(`Unsupported storage provider: ${provider}`); + } +}; + +/** + * Auto-initialized storage provider singleton + */ +export const storageProvider = createStorageProvider(); + +/** + * Convenience function to upload a file + */ +export const uploadFile = async ( + params: UploadParams, +): Promise => { + return storageProvider.upload(params); +}; + +/** + * Convenience function to delete a file + */ +export const deleteFile = async (key: string): Promise => { + return storageProvider.delete(key); +}; + +/** + * Convenience function to get file URL + */ +export const getFileUrl = (key: string): string => { + return storageProvider.getUrl(key); +}; + +/** + * Health check function for storage service + * Returns a function compatible with HealthCheck interface + */ +export const checkStorageHealth = () => { + return async (): Promise => { + try { + return await storageProvider.healthCheck(); + } catch (err) { + logger.error({ err }, 'Storage health check failed'); + return false; + } + }; +}; diff --git a/src/main.ts b/src/main.ts index b3a2c3d..c1f44ae 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,136 +1,40 @@ -import "./openapi/zod-extend"; - -import { createServer } from "node:http"; -import path from "node:path"; -import process from "node:process"; -import { createBullBoard } from "@bull-board/api"; -import { BullMQAdapter } from "@bull-board/api/bullMQAdapter"; -import { ExpressAdapter } from "@bull-board/express"; -import compression from "compression"; -import cookieParser from "cookie-parser"; -import cors from "cors"; -import express from "express"; -import session from "express-session"; -import helmet from "helmet"; -import morgan from "morgan"; -import config from "./config/config.service"; -import { connectDatabase, disconnectDatabase } from "./lib/database"; -import logger, { httpLogger } from "./lib/logger.service"; -import { useSocketIo } from "./lib/realtime.server"; -import redisStore from "./lib/session.store"; -import { extractJwt } from "./middlewares/extract-jwt-schema.middleware"; -import apiRoutes from "./routes/routes"; - -import swaggerUi from "swagger-ui-express"; - -import YAML from "yaml"; -import { convertDocumentationToYaml } from "./openapi/swagger-doc-generator"; -import globalErrorHandler from "./utils/globalErrorHandler"; - -const app = express(); - -app.set("trust proxy", true); - -const server = createServer(app); - -const io = useSocketIo(server); - -const boostrapServer = async () => { - await connectDatabase(); - - app.use((req, _, next) => { - req.io = io; - next(); - }); - - app.use( - cors({ - origin: [config.CLIENT_SIDE_URL], - optionsSuccessStatus: 200, - credentials: true, - }), - ); - - if (config.NODE_ENV === "development") { - app.use(morgan("dev")); - } else { - app.use(httpLogger); - } - - app.use(express.json()); - app.use(express.urlencoded({ extended: false })); - - app.use( - session({ - secret: config.JWT_SECRET, - resave: false, - saveUninitialized: true, - cookie: { secure: true }, - store: redisStore, - }), - ); - - // Middleware to serve static files - app.use(express.static(path.join(__dirname, "..", "public"))); - - app.use(cookieParser()); - - app.use(compression()); - - app.use(extractJwt); - - if (config.NODE_ENV === "production") { - app.use(helmet()); - } - - app.use("/api", apiRoutes); - - const swaggerDocument = YAML.parse(convertDocumentationToYaml()); - app.use("/api-docs", swaggerUi.serve, swaggerUi.setup(swaggerDocument)); - - const serverAdapter = new ExpressAdapter(); - serverAdapter.setBasePath("/admin/queues"); - - createBullBoard({ - queues: Object.entries(global.__registeredQueues || {}).map( - ([, values]) => new BullMQAdapter(values.queue), - ), - serverAdapter, - }); - - // Dashbaord for BullMQ - app.use("/admin/queues", serverAdapter.getRouter()); - - // Global Error Handler - app.use(globalErrorHandler); - - server.listen(config.PORT, () => { - logger.info(`Server is running on http://localhost:${config.PORT}`); - logger.info(`RESTful API: http://localhost:${config.PORT}/api`); - logger.info(`Swagger API Docs: http://localhost:${config.PORT}/api-docs`); - logger.info(`BullBoard: http://localhost:${config.PORT}/admin/queues`); - logger.info(`Client-side url set to: ${config.CLIENT_SIDE_URL}`); - }); +import '@/plugins/magic/zod-extend'; +import { initializeApp } from '@/app/app'; +import config from '@/config/env'; +import { connectDatabase } from '@/lib/database'; +import logger from '@/plugins/observability/logger'; +import apiRoutes from '@/routes/routes'; +import errorHandler from '@/middlewares/error-handler'; + +import { resolvePort } from '@/extras/port-resolver'; +import { ErrorRequestHandler } from 'express'; + +const bootstrapServer = async () => { + // Resolve port availability (dev-only interactive prompt) + const selectedPort = + config.NODE_ENV === 'development' + ? await resolvePort({ desiredPort: config.PORT }) + : config.PORT; + + await connectDatabase(); + + const { app, server } = await initializeApp(selectedPort); + + app.use('/api', apiRoutes); + + app.use(errorHandler as unknown as ErrorRequestHandler); + + server.listen(selectedPort, () => { + logger.info(`Server is running on http://localhost:${selectedPort}`); + logger.info(`RESTful API: http://localhost:${selectedPort}/api`); + logger.info(`Client-side url set to: ${config.CLIENT_SIDE_URL}`); + logger.info( + `Socket Testing Suite: http://localhost:${selectedPort}/realtime`, + ); + }); }; -boostrapServer().catch((err) => { - logger.error(err.message); - process.exit(1); -}); - -for (const signal of ["SIGINT", "SIGTERM"]) { - process.on(signal, async () => { - await disconnectDatabase(); - logger.info("Server is shutting down..."); - io.disconnectSockets(true); - logger.info("Server disconnected from sockets"); - server.close(); - logger.info("Server closed"); - process.exit(0); - }); -} - -process.on("uncaughtException", (err) => { - logger.error(err.message); - process.exit(1); +bootstrapServer().catch((err) => { + logger.error({ err }, 'Failed to bootstrap server'); + process.exit(1); }); diff --git a/src/middlewares/can-access.middleware.ts b/src/middlewares/can-access.middleware.ts deleted file mode 100644 index 12f9588..0000000 --- a/src/middlewares/can-access.middleware.ts +++ /dev/null @@ -1,91 +0,0 @@ -import type { NextFunction } from "express"; -import { StatusCodes } from "http-status-codes"; -import type { RoleType } from "../enums"; -import { getUserById } from "../modules/user/user.services"; -import { errorResponse } from "../utils/api.utils"; -import type { JwtPayload } from "../utils/auth.utils"; -import type { RequestAny, ResponseAny } from "../openapi/magic-router"; - -export type CanAccessByType = "roles"; - -export type CanAccessOptions = { - roles: RoleType | "*"; -}; - -export const canAccess = - (by?: T, access?: CanAccessOptions[T][]) => - async (req: RequestAny, res: ResponseAny, next?: NextFunction) => { - try { - const requestUser = req?.user as JwtPayload; - - if (!requestUser) { - return errorResponse( - res, - "token isn't attached or expired", - StatusCodes.UNAUTHORIZED, - ); - } - const currentUser = await getUserById(requestUser.sub); - - if (!currentUser) { - return errorResponse(res, "Login again", StatusCodes.UNAUTHORIZED); - } - - if (currentUser.otp !== null) { - return errorResponse( - res, - "Your account is not verified", - StatusCodes.UNAUTHORIZED, - ); - } - - let can = false; - - const accessorsToScanFor = access; - - if (by === "roles" && accessorsToScanFor) { - can = (accessorsToScanFor as RoleType[]).includes( - currentUser.role as RoleType, - ); - } - - if (!accessorsToScanFor) { - can = Boolean(currentUser.email); - } - - if (!can && by === "roles") { - return errorResponse( - res, - "User is not authorized to perform this action", - StatusCodes.UNAUTHORIZED, - { [`${by}_required`]: access }, - ); - } - - if (currentUser && !by && !access) { - can = true; - } - - if (!can) { - return errorResponse( - res, - "User is not authenticated", - StatusCodes.UNAUTHORIZED, - access, - ); - } - - if (currentUser) { - req.user = { ...currentUser, sub: currentUser._id }; - } - } catch (err) { - return errorResponse( - res, - (err as Error).message, - StatusCodes.UNAUTHORIZED, - access, - ); - } - - next?.(); - }; diff --git a/src/middlewares/can-access.ts b/src/middlewares/can-access.ts new file mode 100644 index 0000000..d06e874 --- /dev/null +++ b/src/middlewares/can-access.ts @@ -0,0 +1,71 @@ +import type { NextFunction } from 'express'; +import { StatusCodes } from '@/plugins/magic/status-codes'; +import type { RoleType } from '@/enums'; +import { errorResponse } from '@/utils/response.utils'; +import type { JwtPayload } from '@/utils/jwt.utils'; +import type { RequestAny, ResponseAny } from '@/plugins/magic/router'; + +export type CanAccessByType = 'roles'; + +export type CanAccessOptions = { + roles: RoleType | '*'; +}; + +export const canAccess = + (by?: T, access?: CanAccessOptions[T][]) => + async (req: RequestAny, res: ResponseAny, next: NextFunction) => { + try { + const requestUser = req?.user as JwtPayload | undefined; + + if (!requestUser) { + return errorResponse( + res, + "token isn't attached or expired", + StatusCodes.UNAUTHORIZED, + ); + } + + let can = false; + + const accessorsToScanFor = access as (RoleType | '*')[] | undefined; + + if (by === 'roles' && accessorsToScanFor && accessorsToScanFor.length) { + if ((accessorsToScanFor as (RoleType | '*')[]).includes('*')) { + can = true; + } else { + can = (accessorsToScanFor as RoleType[]).includes(requestUser.role); + } + } + + if (!accessorsToScanFor && !by) { + can = true; // Authenticated since JWT is present + } + + if (!can && by === 'roles') { + return errorResponse( + res, + 'User is not authorized to perform this action', + StatusCodes.UNAUTHORIZED, + { [`${by}_required`]: access }, + ); + } + + if (!can) { + return errorResponse( + res, + 'User is not authenticated', + StatusCodes.UNAUTHORIZED, + access, + ); + } + } catch (err) { + return errorResponse( + res, + (err as Error).message, + StatusCodes.UNAUTHORIZED, + access, + ); + } + + next(); + }; diff --git a/src/middlewares/error-handler.ts b/src/middlewares/error-handler.ts new file mode 100644 index 0000000..c2125af --- /dev/null +++ b/src/middlewares/error-handler.ts @@ -0,0 +1,39 @@ +import type { NextFunction, Request, Response } from 'express'; +import config from '@/config/env'; +import logger from '@/plugins/observability/logger'; +import type { ResponseExtended } from '@/types'; +import { errorResponse } from '@/utils/response.utils'; +import { StatusCodesValues } from '@/plugins/magic/status-codes'; + +interface CustomError extends Error { + status?: number; + message: string; +} + +/** + * Global error handler middleware for Express + * Catches all errors and sends a standardized error response + */ +export const errorHandler = ( + err: CustomError, + _: Request, + res: ResponseExtended | Response, + __: NextFunction, +): void => { + const statusCode = err.status || 500; + const errorMessage = err.message || 'Internal Server Error'; + + logger.error(`${statusCode}: ${errorMessage}`); + + errorResponse( + res as ResponseExtended, + errorMessage, + statusCode as StatusCodesValues, + undefined, + config.NODE_ENV === 'development' ? err.stack : undefined, + ); + + return; +}; + +export default errorHandler; diff --git a/src/middlewares/extract-jwt-schema.middleware.ts b/src/middlewares/extract-jwt-schema.middleware.ts deleted file mode 100644 index c32334d..0000000 --- a/src/middlewares/extract-jwt-schema.middleware.ts +++ /dev/null @@ -1,25 +0,0 @@ -import type { NextFunction } from "express"; -import { type JwtPayload, verifyToken } from "../utils/auth.utils"; -import type { RequestAny, ResponseAny } from "../openapi/magic-router"; - -export const extractJwt = async ( - req: RequestAny, - _: ResponseAny, - next: NextFunction, -) => { - try { - const token = - req.cookies?.accessToken ?? req.headers.authorization?.split(" ")[1]; - - if (!token) { - return next(); - } - - const decode = await verifyToken(token); - - req.user = decode; - return next(); - } catch { - return next(); - } -}; diff --git a/src/middlewares/extract-jwt.ts b/src/middlewares/extract-jwt.ts new file mode 100644 index 0000000..8ccfc7c --- /dev/null +++ b/src/middlewares/extract-jwt.ts @@ -0,0 +1,71 @@ +import type { NextFunction } from 'express'; +import { type JwtPayload, verifyToken } from '@/utils/jwt.utils'; +import type { RequestAny, ResponseAny } from '@/plugins/magic/router'; +import config from '@/config/env'; + +import { createChildLogger } from '@/plugins/observability/logger'; + +const logger = createChildLogger({ context: 'extract-jwt' }); + +export const extractJwt = async ( + req: RequestAny, + _: ResponseAny, + next: NextFunction, +) => { + try { + logger.debug('Starting JWT extraction'); + const token = + req.cookies?.accessToken ?? req.headers.authorization?.split(' ')[1]; + + if (!token) { + logger.debug('No JWT token found in cookies or headers'); + return next(); + } + + logger.debug( + { token: token ? '[REDACTED]' : undefined }, + 'JWT token found, verifying', + ); + const decode = await verifyToken(token); + + if (config.SET_SESSION && req.app.locals.sessionManager) { + logger.debug('Session management enabled, validating session'); + const sessionManager = req.app.locals.sessionManager; + + if (!decode.sid) { + logger.warn('JWT does not contain a session id (sid)'); + return next(); + } + + const validation = await sessionManager.validateSession( + decode.sid, + token, + ); + + if (!validation.isValid) { + logger.warn( + { sid: decode.sid, reason: validation.reason }, + 'Session validation failed', + ); + return next(); + } + + logger.debug( + { sid: decode.sid, userId: validation.session?.userId }, + 'Session validated successfully', + ); + req.session = validation.session; + } + + req.user = decode; + logger.debug( + { userId: decode.sub, sid: decode.sid }, + 'JWT decoded and user attached to request', + ); + + return next(); + } catch (err) { + logger.error({ err }, 'Error extracting or verifying JWT'); + return next(); + } +}; diff --git a/src/middlewares/multer-s3.middleware.ts b/src/middlewares/multer-s3.middleware.ts deleted file mode 100644 index a885cb7..0000000 --- a/src/middlewares/multer-s3.middleware.ts +++ /dev/null @@ -1,49 +0,0 @@ -import type { NextFunction } from 'express'; -import { StatusCodes } from 'http-status-codes'; -import multer from 'multer'; -import multerS3 from 'multer-s3'; -import s3, { BUCKET_NAME } from '../lib/aws.service'; -import { errorResponse } from '../utils/api.utils'; -import { checkFiletype } from '../utils/common.utils'; -import type { RequestAny, ResponseAny } from '../openapi/magic-router'; - -const storageEngineProfile: multer.StorageEngine = multerS3({ - s3: s3, - bucket: BUCKET_NAME, - metadata: (_, file, cb) => { - cb(null, { fieldName: file.fieldname }); - }, - key: (req: RequestAny, file, cb) => { - const key = `user-${req.user.id}/profile/${file.originalname}`; - - if (checkFiletype(file)) { - cb(null, key); - } else { - cb('File format is not valid', key); - } - }, -}); - -export const uploadProfile = ( - req: RequestAny, - res: ResponseAny, - next?: NextFunction, -) => { - const upload = multer({ - storage: storageEngineProfile, - limits: { fileSize: 1000000 * 10 }, - }).single('avatar'); - - upload(req, res, (err) => { - if (err) { - return errorResponse( - res, - (err as Error).message, - StatusCodes.BAD_REQUEST, - err, - ); - } - - next?.(); - }); -}; diff --git a/src/middlewares/response-validator.ts b/src/middlewares/response-validator.ts new file mode 100644 index 0000000..ba94f21 --- /dev/null +++ b/src/middlewares/response-validator.ts @@ -0,0 +1,144 @@ +import type { NextFunction, Request } from 'express'; +import { StatusCodes } from '@/plugins/magic/status-codes'; +import config from '@/config/env'; +import logger from '@/plugins/observability/logger'; +import type { ResponseExtended } from '@/types'; + +type ValidationMode = 'strict' | 'warn' | 'off'; + +/** + * Response validation middleware + * + * Validates outgoing responses against schemas defined in the route configuration. + * Also injects ergonomic response helpers (res.ok, res.created, res.noContent). + * + * Modes: + * - strict: Returns 500 error if response doesn't match schema + * - warn: Logs warning but sends response anyway + * - off: No validation (helpers still available) + * + * Configure via RESPONSE_VALIDATION env var. + */ +export const responseValidator = ( + _req: Request, + res: ResponseExtended, + next: NextFunction, +) => { + const mode: ValidationMode = config.RESPONSE_VALIDATION as ValidationMode; + const schemas = res.locals.responseSchemas; + + /** + * Core validation and send logic + * Validates response body against schema for given status code + */ + const validateAndSend = (status: number, body: unknown): void => { + // Skip validation if no schemas configured or validation is off + if (mode === 'off' || !schemas) { + res.status(status).json(body); + return; + } + + const entry = schemas.get(status); + + // No schema for this status - send as-is + if (!entry) { + res.status(status).json(body); + return; + } + + // Skip validation for non-JSON content types + const contentType = entry.contentType || 'application/json'; + if (contentType !== 'application/json') { + res.status(status).json(body); + return; + } + + // Validate response body + const parsed = entry.schema.safeParse(body); + + if (!parsed.success) { + const issues = parsed.error.format(); + + if (mode === 'strict') { + logger.error( + { issues, status, body }, + 'Response validation failed - schema mismatch', + ); + res.status(StatusCodes.INTERNAL_SERVER_ERROR).json({ + success: false, + message: 'Response validation failed', + data: + config.NODE_ENV === 'development' + ? { issues, originalBody: body } + : undefined, + }); + return; + } + + if (mode === 'warn') { + logger.warn( + { issues, status, body }, + 'Response validation warning - schema mismatch', + ); + } + } + + // Send validated or original body (depending on mode) + res.status(status).json(parsed.success ? parsed.data : body); + }; + + /** + * res.ok() - Send 200 OK response + * @param payload - Response body + * + * @example + * return res.ok({ success: true, data: user }); + */ + res.ok = (payload: T): void => { + validateAndSend(StatusCodes.OK, payload); + }; + + /** + * res.created() - Send 201 Created response + * @param payload - Response body + * + * @example + * return res.created({ success: true, data: newItem }); + */ + res.created = (payload: T): void => { + validateAndSend(StatusCodes.CREATED, payload); + }; + + /** + * res.noContent() - Send 204 No Content response + * Use for successful operations that don't return data + * + * @example + * return res.noContent(); + */ + res.noContent = (): void => { + // Check if 204 is configured in schemas + const entry = schemas?.get(StatusCodes.NO_CONTENT); + + if (entry) { + // Validate empty response if schema exists + const parsed = entry.schema.safeParse(undefined); + + if (!parsed.success && mode === 'strict') { + logger.error( + { issues: parsed.error.format() }, + 'Response validation failed for 204 No Content', + ); + res.status(StatusCodes.INTERNAL_SERVER_ERROR).json({ + success: false, + message: 'Response validation failed', + }); + return; + } + } + + res.status(StatusCodes.NO_CONTENT).end(); + }; + + next(); +}; diff --git a/src/middlewares/validate-zod-schema.middleware.ts b/src/middlewares/validate-zod-schema.middleware.ts deleted file mode 100644 index 9e118bb..0000000 --- a/src/middlewares/validate-zod-schema.middleware.ts +++ /dev/null @@ -1,41 +0,0 @@ -import type { NextFunction } from "express"; -import { StatusCodes } from "http-status-codes"; -import { ZodError, type ZodSchema } from "zod"; -import type { RequestZodSchemaType } from "../types"; -import { errorResponse } from "../utils/api.utils"; -import { sanitizeRecord } from "../utils/common.utils"; -import type { RequestAny, ResponseAny } from "../openapi/magic-router"; - -export const validateZodSchema = - (payload: RequestZodSchemaType) => - (req: RequestAny, res: ResponseAny, next?: NextFunction) => { - let error: ZodError | null = null; - - for (const [key, value] of Object.entries(payload)) { - const typedProp = [key, value] as [keyof RequestZodSchemaType, ZodSchema]; - const [typedKey, typedValue] = typedProp; - - const parsed = typedValue.safeParse(req[typedKey]); - - if (!parsed.success) { - if (error instanceof ZodError) { - error.addIssues(parsed.error.issues); - } else { - error = parsed.error; - } - } - - req[typedKey] = sanitizeRecord(parsed.data); - } - - if (error) { - return errorResponse( - res, - "Invalid input", - StatusCodes.BAD_REQUEST, - error, - ); - } - - next?.(); - }; diff --git a/src/middlewares/validate-zod-schema.ts b/src/middlewares/validate-zod-schema.ts new file mode 100644 index 0000000..5d85cda --- /dev/null +++ b/src/middlewares/validate-zod-schema.ts @@ -0,0 +1,41 @@ +import type { NextFunction } from 'express'; +import { StatusCodes } from '@/plugins/magic/status-codes'; +import { ZodError, type ZodSchema } from 'zod'; +import type { RequestZodSchemaType } from '@/types'; +import { errorResponse } from '@/utils/response.utils'; +import { sanitizeRecord } from '@/utils/record.utils'; +import type { RequestAny, ResponseAny } from '@/plugins/magic/router'; + +export const validateZodSchema = + (payload: RequestZodSchemaType) => + (req: RequestAny, res: ResponseAny, next: NextFunction) => { + let error: ZodError | null = null; + + for (const [key, value] of Object.entries(payload)) { + const typedProp = [key, value] as [keyof RequestZodSchemaType, ZodSchema]; + const [typedKey, typedValue] = typedProp; + + const parsed = typedValue.safeParse(req[typedKey]); + + if (!parsed.success) { + if (error instanceof ZodError) { + error.addIssues(parsed.error.issues); + } else { + error = parsed.error; + } + } + + req[typedKey] = sanitizeRecord(parsed.data); + } + + if (error) { + return errorResponse( + res, + 'Invalid input', + StatusCodes.BAD_REQUEST, + error, + ); + } + + next(); + }; diff --git a/src/modules/auth/auth.constants.ts b/src/modules/auth/auth.constants.ts index fcf889c..feab68a 100644 --- a/src/modules/auth/auth.constants.ts +++ b/src/modules/auth/auth.constants.ts @@ -1,14 +1,14 @@ -import type { CookieOptions } from "express"; -import config from "../../config/config.service"; +import type { CookieOptions } from 'express'; +import config from '../../config/env'; const clientSideUrl = new URL(config.CLIENT_SIDE_URL); -export const AUTH_COOKIE_KEY = "accessToken"; +export const AUTH_COOKIE_KEY = 'accessToken'; export const COOKIE_CONFIG: CookieOptions = { - httpOnly: true, - sameSite: "lax", - secure: config.NODE_ENV === "production", - maxAge: config.SESSION_EXPIRES_IN * 1000, - domain: clientSideUrl.hostname, + httpOnly: true, + sameSite: 'lax', + secure: config.NODE_ENV === 'production', + maxAge: config.SESSION_EXPIRES_IN * 1000, + domain: clientSideUrl.hostname, }; diff --git a/src/modules/auth/auth.controller.ts b/src/modules/auth/auth.controller.ts index d0e1f31..8938c0f 100644 --- a/src/modules/auth/auth.controller.ts +++ b/src/modules/auth/auth.controller.ts @@ -1,109 +1,262 @@ -import type { Request, Response } from "express"; -import config from "../../config/config.service"; -import type { GoogleCallbackQuery } from "../../types"; -import { successResponse } from "../../utils/api.utils"; -import type { JwtPayload } from "../../utils/auth.utils"; -import { AUTH_COOKIE_KEY, COOKIE_CONFIG } from "./auth.constants"; +import type { Request } from 'express'; +import config from '@/config/env'; +import type { ResponseExtended } from '@/types'; +import { successResponse } from '@/utils/response.utils'; +import type { JwtPayload } from '@/utils/jwt.utils'; +import { AUTH_COOKIE_KEY, COOKIE_CONFIG } from './auth.constants'; import type { - ChangePasswordSchemaType, - ForgetPasswordSchemaType, - LoginUserByEmailSchemaType, - RegisterUserByEmailSchemaType, - ResetPasswordSchemaType, -} from "./auth.schema"; + ChangePasswordSchemaType, + ForgetPasswordSchemaType, + LoginUserByEmailSchemaType, + RegisterUserByEmailSchemaType, + ResetPasswordSchemaType, + ResetPasswordResponseSchema, + ForgetPasswordResponseSchema, + ChangePasswordResponseSchema, + LogoutResponseSchema, + LoginResponseSchema, + GetCurrentUserResponseSchema, + ListSessionsResponseSchema, + RevokeSessionResponseSchema, + RevokeAllSessionsResponseSchema, + GoogleLoginResponseSchema, + GoogleCallbackResponseSchema, + GoogleCallbackSchemaType, +} from './auth.schema'; import { - changePassword, - forgetPassword, - googleLogin, - loginUserByEmail, - registerUserByEmail, - resetPassword, -} from "./auth.service"; + changePassword, + forgetPassword, + googleLogin, + loginUserByEmail, + registerUserByEmail, + resetPassword, +} from './auth.service'; +import { generateGoogleAuthUrl } from '@/utils/google-oauth.utils'; +// Using new res.ok() helper export const handleResetPassword = async ( - req: Request, - res: Response, + req: Request, + res: ResponseExtended, ) => { - await resetPassword(req.body); + await resetPassword(req.body); - return successResponse(res, "Password successfully reset"); + return res.ok?.({ + success: true, + message: 'Password successfully reset', + }); }; +// Using new res.ok() helper export const handleForgetPassword = async ( - req: Request, - res: Response, + req: Request, + res: ResponseExtended, ) => { - const user = await forgetPassword(req.body); + const user = await forgetPassword(req.body); - return successResponse(res, "Code has been sent", { userId: user._id }); + return res.ok?.({ + success: true, + message: 'Code has been sent', + data: { userId: user._id }, + }); }; +// Using new res.ok() helper export const handleChangePassword = async ( - req: Request, - res: Response, + req: Request, + res: ResponseExtended, ) => { - await changePassword((req.user as JwtPayload).sub, req.body); + await changePassword((req.user as JwtPayload).sub, req.body); - return successResponse(res, "Password successfully changed"); + return res.ok?.({ + success: true, + message: 'Password successfully changed', + }); }; +// Using legacy successResponse (register doesn't return token directly) export const handleRegisterUser = async ( - req: Request, - res: Response, + req: Request, + res: ResponseExtended, ) => { - const user = await registerUserByEmail(req.body); + const user = await registerUserByEmail(req.body); - if (config.OTP_VERIFICATION_ENABLED) { - return successResponse(res, "Please check your email for OTP", user); - } + if (config.OTP_VERIFICATION_ENABLED) { + return successResponse(res, 'Please check your email for OTP', user); + } - return successResponse(res, "User has been reigstered", user); + return successResponse(res, 'User has been registered', user); }; -export const handleLogout = async (_: Request, res: Response) => { - res.cookie(AUTH_COOKIE_KEY, undefined, COOKIE_CONFIG); +// Using new res.ok() helper +export const handleLogout = async ( + req: Request, + res: ResponseExtended, +) => { + if (config.SET_SESSION && req.session && req.app.locals.sessionManager) { + const sessionManager = req.app.locals.sessionManager; + await sessionManager.revokeSession(req.session.sessionId); + } + + res.clearCookie(AUTH_COOKIE_KEY, COOKIE_CONFIG); - return successResponse(res, "Logout successful"); + return res.ok?.({ + success: true, + message: 'Logout successful', + }); }; +// Using new res.ok() helper (login uses 200, not 201) export const handleLoginByEmail = async ( - req: Request, - res: Response, + req: Request, + res: ResponseExtended, ) => { - const token = await loginUserByEmail(req.body); - if (config.SET_SESSION) { - res.cookie(AUTH_COOKIE_KEY, token, COOKIE_CONFIG); - } - return successResponse(res, "Login successful", { token: token }); + const metadata = { + userAgent: req.headers['user-agent'], + ipAddress: + req.ip || + (req.headers['x-forwarded-for'] as string) || + req.connection?.remoteAddress, + }; + + const result = await loginUserByEmail(req.body, metadata); + + if (config.SET_SESSION) { + res.cookie(AUTH_COOKIE_KEY, result.token, COOKIE_CONFIG); + } + + return res.ok?.({ + success: true, + message: 'Login successful', + data: { + token: result.token, + }, + }); }; -export const handleGetCurrentUser = async (req: Request, res: Response) => { - const user = req.user; +// Using new res.ok() helper +export const handleGetCurrentUser = async ( + req: Request, + res: ResponseExtended, +) => { + const user = req.user; - return successResponse(res, undefined, user); + return res.ok?.({ + success: true, + data: { + sub: user?.sub, + email: user.email || '', + username: user.username || '', + role: user.role || '', + phoneNo: user.phoneNo || '', + }, + }); }; -export const handleGoogleLogin = async (_: Request, res: Response) => { - if (!config.GOOGLE_CLIENT_ID || !config.GOOGLE_REDIRECT_URI) { - throw new Error("Google credentials are not set"); - } - const googleAuthURL = `https://accounts.google.com/o/oauth2/v2/auth?response_type=code&client_id=${config.GOOGLE_CLIENT_ID}&redirect_uri=${config.GOOGLE_REDIRECT_URI}&scope=email profile`; +export const handleGoogleLogin = async ( + _: Request, + res: ResponseExtended, +) => { + if (!config.GOOGLE_CLIENT_ID || !config.GOOGLE_REDIRECT_URI) { + throw new Error('Google credentials are not set'); + } + + const googleAuthURL = generateGoogleAuthUrl({ + clientId: config.GOOGLE_CLIENT_ID, + redirectUri: config.GOOGLE_REDIRECT_URI, + scope: 'email profile', + }); - res.redirect(googleAuthURL); + return res.ok?.({ + success: true, + message: 'Google auth URL retrieved', + data: { + url: googleAuthURL, + }, + }); }; + export const handleGoogleCallback = async ( - req: Request, - res: Response, + req: Request, + res: ResponseExtended, +) => { + const metadata = { + userAgent: req.headers['user-agent'], + ipAddress: + req.ip || + (req.headers['x-forwarded-for'] as string) || + req.connection?.remoteAddress, + }; + + const result = await googleLogin(req.query, metadata); + + if (!result.user) throw new Error('Failed to login'); + + if (config.SET_SESSION) { + res.cookie(AUTH_COOKIE_KEY, result.token, COOKIE_CONFIG); + } + + return res.ok?.({ + success: true, + message: 'Logged in successfully', + data: { + token: result.token, + sessionId: result.sessionId, + }, + }); +}; + +// Using new res.ok() helper +export const handleListSessions = async ( + req: Request, + res: ResponseExtended, +) => { + if (!config.SET_SESSION || !req.app.locals.sessionManager) { + throw new Error('Session management is not enabled'); + } + + const userId = (req.user as JwtPayload).sub; + const sessionManager = req.app.locals.sessionManager; + const sessions = await sessionManager.listUserSessions(userId); + + return res.ok?.({ + success: true, + data: sessions, + }); +}; + +// Using new res.ok() helper +export const handleRevokeSession = async ( + req: Request<{ sessionId: string }>, + res: ResponseExtended, ) => { - const user = await googleLogin(req.query); - if (!user) throw new Error("Failed to login"); - res.cookie( - AUTH_COOKIE_KEY, - user.socialAccount?.[0]?.accessToken, - COOKIE_CONFIG, - ); - - return successResponse(res, "Logged in successfully", { - token: user.socialAccount?.[0]?.accessToken, - }); + if (!config.SET_SESSION || !req.app.locals.sessionManager) { + throw new Error('Session management is not enabled'); + } + + const sessionManager = req.app.locals.sessionManager; + await sessionManager.revokeSession(req.params.sessionId); + + return res.ok?.({ + success: true, + message: 'Session revoked successfully', + }); +}; + +// Using new res.ok() helper +export const handleRevokeAllSessions = async ( + req: Request, + res: ResponseExtended, +) => { + if (!config.SET_SESSION || !req.app.locals.sessionManager) { + throw new Error('Session management is not enabled'); + } + + const userId = (req.user as JwtPayload).sub; + const sessionManager = req.app.locals.sessionManager; + await sessionManager.revokeAllUserSessions(userId); + + return res.ok?.({ + success: true, + message: 'All sessions revoked successfully', + }); }; diff --git a/src/modules/auth/auth.router.ts b/src/modules/auth/auth.router.ts index 5cf02d5..5d05bbe 100644 --- a/src/modules/auth/auth.router.ts +++ b/src/modules/auth/auth.router.ts @@ -1,64 +1,164 @@ -import { canAccess } from "../../middlewares/can-access.middleware"; -import MagicRouter from "../../openapi/magic-router"; +import { canAccess } from '@/middlewares/can-access'; +import MagicRouter from '@/plugins/magic/router'; import { - handleChangePassword, - handleForgetPassword, - handleGetCurrentUser, - handleGoogleCallback, - handleGoogleLogin, - handleLoginByEmail, - handleLogout, - handleRegisterUser, - handleResetPassword, -} from "./auth.controller"; + handleChangePassword, + handleForgetPassword, + handleGetCurrentUser, + handleGoogleCallback, + handleGoogleLogin, + handleListSessions, + handleLoginByEmail, + handleLogout, + handleRegisterUser, + handleResetPassword, + handleRevokeAllSessions, + handleRevokeSession, +} from './auth.controller'; import { - changePasswordSchema, - forgetPasswordSchema, - loginUserByEmailSchema, - registerUserByEmailSchema, - resetPasswordSchema, -} from "./auth.schema"; + changePasswordSchema, + forgetPasswordSchema, + loginUserByEmailSchema, + registerUserByEmailSchema, + resetPasswordSchema, + loginResponseSchema, + registerResponseSchema, + logoutResponseSchema, + getCurrentUserResponseSchema, + forgetPasswordResponseSchema, + changePasswordResponseSchema, + resetPasswordResponseSchema, + listSessionsResponseSchema, + revokeSessionResponseSchema, + revokeAllSessionsResponseSchema, + googleLoginResponseSchema, + googleCallbackSchema, + googleCallbackResponseSchema, +} from './auth.schema'; -export const AUTH_ROUTER_ROOT = "/auth"; +export const AUTH_ROUTER_ROOT = '/auth'; const authRouter = new MagicRouter(AUTH_ROUTER_ROOT); +// Login with email authRouter.post( - "/login/email", - { requestType: { body: loginUserByEmailSchema } }, - handleLoginByEmail, + '/login/email', + { + requestType: { body: loginUserByEmailSchema }, + responses: { + 200: loginResponseSchema, + }, + }, + handleLoginByEmail, ); +// Register with email authRouter.post( - "/register/email", - { requestType: { body: registerUserByEmailSchema } }, - handleRegisterUser, + '/register/email', + { + requestType: { body: registerUserByEmailSchema }, + responses: { + 201: registerResponseSchema, + }, + }, + handleRegisterUser, ); -authRouter.post("/logout", {}, handleLogout); +// Logout +authRouter.post( + '/logout', + { + responses: { + 200: logoutResponseSchema, + }, + }, + handleLogout, +); -authRouter.get("/me", {}, canAccess(), handleGetCurrentUser); +// Get current user +authRouter.get( + '/me', + { + responses: { + 200: getCurrentUserResponseSchema, + }, + }, + canAccess(), + handleGetCurrentUser, +); +// Forget password authRouter.post( - "/forget-password", - { requestType: { body: forgetPasswordSchema } }, - handleForgetPassword, + '/forget-password', + { + requestType: { body: forgetPasswordSchema }, + responses: { + 200: forgetPasswordResponseSchema, + }, + }, + handleForgetPassword, ); +// Change password authRouter.post( - "/change-password", - { requestType: { body: changePasswordSchema } }, - canAccess(), - handleChangePassword, + '/change-password', + { + requestType: { body: changePasswordSchema }, + responses: { + 200: changePasswordResponseSchema, + }, + }, + canAccess(), + handleChangePassword, ); +// Reset password authRouter.post( - "/reset-password", - { requestType: { body: resetPasswordSchema } }, - handleResetPassword, + '/reset-password', + { + requestType: { body: resetPasswordSchema }, + responses: { + 200: resetPasswordResponseSchema, + }, + }, + handleResetPassword, +); + +authRouter.get('/google', { responses: { 200: googleLoginResponseSchema } }, handleGoogleLogin); + +authRouter.get('/google/callback', { requestType: { query: googleCallbackSchema }, responses: { 200: googleCallbackResponseSchema } }, handleGoogleCallback); + +// Session management +authRouter.get( + '/sessions', + { + responses: { + 200: listSessionsResponseSchema, + }, + }, + canAccess(), + handleListSessions, ); -authRouter.get("/google", {}, handleGoogleLogin); -authRouter.get("/google/callback", {}, handleGoogleCallback); +authRouter.delete( + '/sessions/:sessionId', + { + responses: { + 200: revokeSessionResponseSchema, + }, + }, + canAccess(), + handleRevokeSession, +); + +authRouter.delete( + '/sessions', + { + responses: { + 200: revokeAllSessionsResponseSchema, + }, + }, + canAccess(), + handleRevokeAllSessions, +); export default authRouter.getRouter(); diff --git a/src/modules/auth/auth.schema.ts b/src/modules/auth/auth.schema.ts index dc74e17..d365a61 100644 --- a/src/modules/auth/auth.schema.ts +++ b/src/modules/auth/auth.schema.ts @@ -1,60 +1,157 @@ -import validator from "validator"; -import z from "zod"; -import { passwordValidationSchema } from "../../common/common.schema"; -import { baseCreateUser } from "../user/user.schema"; +import validator from 'validator'; +import z from 'zod'; +import { passwordValidationSchema } from '@/common/common.schema'; +import { R } from '@/plugins/magic/response.builders'; +import { baseCreateUser } from '@/modules/user/user.schema'; +import { userOutSchema } from '@/modules/user/user.dto'; +import { sessionRecordSchema } from '@/modules/auth/session/session.schema'; export const resetPasswordSchema = z.object({ - userId: z - .string({ required_error: "userId is required" }) - .min(1) - .refine((value) => validator.isMongoId(value), "userId must be valid"), - code: z - .string({ required_error: "code is required" }) - .min(4) - .max(4) - .refine((value) => validator.isAlphanumeric(value), "code must be valid"), - password: passwordValidationSchema("Password"), - confirmPassword: passwordValidationSchema("Confirm password"), + userId: z + .string({ required_error: 'userId is required' }) + .min(1) + .refine((value) => validator.isMongoId(value), 'userId must be valid'), + code: z + .string({ required_error: 'code is required' }) + .min(4) + .max(4) + .refine((value) => validator.isAlphanumeric(value), 'code must be valid'), + password: passwordValidationSchema('Password'), + confirmPassword: passwordValidationSchema('Confirm password'), }); export const changePasswordSchema = z.object({ - currentPassword: passwordValidationSchema("Current password"), - newPassword: passwordValidationSchema("New password"), + currentPassword: passwordValidationSchema('Current password'), + newPassword: passwordValidationSchema('New password'), }); export const forgetPasswordSchema = z.object({ - email: z - .string({ required_error: "Email is required" }) - .email("Email must be valid"), + email: z + .string({ required_error: 'Email is required' }) + .email('Email must be valid'), }); export const registerUserByEmailSchema = z - .object({ - name: z.string({ required_error: "Name is required" }).min(1), - confirmPassword: passwordValidationSchema("Confirm Password"), - }) - .merge(baseCreateUser) - .strict() - .refine(({ password, confirmPassword }) => { - if (password !== confirmPassword) { - return false; - } - - return true; - }, "Password and confirm password must be same"); + .object({ + name: z.string({ required_error: 'Name is required' }).min(1), + confirmPassword: passwordValidationSchema('Confirm Password'), + }) + .merge(baseCreateUser) + .strict() + .refine(({ password, confirmPassword }) => { + if (password !== confirmPassword) { + return false; + } + + return true; + }, 'Password and confirm password must be same'); export const loginUserByEmailSchema = z.object({ - email: z - .string({ required_error: "Email is required" }) - .email({ message: "Email is not valid" }), - password: z.string().min(1, "Password is required"), + email: z + .string({ required_error: 'Email is required' }) + .email({ message: 'Email is not valid' }), + password: z.string().min(1, 'Password is required'), +}); + +export const googleCallbackSchema = z.object({ + code: z.string({ required_error: 'Code is required' }), + error: z.string().optional(), }); export type RegisterUserByEmailSchemaType = z.infer< - typeof registerUserByEmailSchema + typeof registerUserByEmailSchema >; export type LoginUserByEmailSchemaType = z.infer; export type ChangePasswordSchemaType = z.infer; export type ForgetPasswordSchemaType = z.infer; export type ResetPasswordSchemaType = z.infer; +export type GoogleCallbackSchemaType = z.infer; + +// Response schemas +export const loginResponseSchema = R.success(z.object({ token: z.string() })); +export const registerResponseSchema = R.success( + z.object({ token: z.string() }), +); +export const logoutResponseSchema = R.success( + z.object({ + success: z.boolean(), + message: z.string(), + }), +); +export const getCurrentUserResponseSchema = R.success(userOutSchema); +export const forgetPasswordResponseSchema = R.success( + z.object({ userId: z.string() }), +); +export const changePasswordResponseSchema = R.success( + z.object({ + success: z.boolean(), + message: z.string(), + }), +); +export const resetPasswordResponseSchema = R.success( + z.object({ + success: z.boolean(), + message: z.string(), + }), +); +export const listSessionsResponseSchema = R.success( + z.array(sessionRecordSchema), +); +export const revokeSessionResponseSchema = R.success( + z.object({ + success: z.boolean(), + message: z.string(), + }), +); +export const revokeAllSessionsResponseSchema = R.success( + z.object({ + success: z.boolean(), + message: z.string(), + }), +); + +export const googleLoginResponseSchema = R.success( + z.object({ + url: z.string(), + }), +); + +export const googleCallbackResponseSchema = R.success( + z.object({ + token: z.string(), + sessionId: z.string().optional(), + }), +); + +// Response types +export type LoginResponseSchema = z.infer; +export type RegisterResponseSchema = z.infer; +export type LogoutResponseSchema = z.infer; +export type GetCurrentUserResponseSchema = z.infer< + typeof getCurrentUserResponseSchema +>; +export type ForgetPasswordResponseSchema = z.infer< + typeof forgetPasswordResponseSchema +>; +export type ChangePasswordResponseSchema = z.infer< + typeof changePasswordResponseSchema +>; +export type ResetPasswordResponseSchema = z.infer< + typeof resetPasswordResponseSchema +>; +export type ListSessionsResponseSchema = z.infer< + typeof listSessionsResponseSchema +>; +export type RevokeSessionResponseSchema = z.infer< + typeof revokeSessionResponseSchema +>; +export type RevokeAllSessionsResponseSchema = z.infer< + typeof revokeAllSessionsResponseSchema +>; +export type GoogleLoginResponseSchema = z.infer< + typeof googleLoginResponseSchema +>; +export type GoogleCallbackResponseSchema = z.infer< + typeof googleCallbackResponseSchema +>; diff --git a/src/modules/auth/auth.service.ts b/src/modules/auth/auth.service.ts index 99ca864..a1f6853 100644 --- a/src/modules/auth/auth.service.ts +++ b/src/modules/auth/auth.service.ts @@ -1,187 +1,256 @@ -import config from "../../config/config.service"; -import { ROLE_ENUM, type RoleType, SOCIAL_ACCOUNT_ENUM } from "../../enums"; -import type { GoogleCallbackQuery } from "../../types"; +import config from '@/config/env'; +import { ROLE_ENUM, type RoleType, SOCIAL_ACCOUNT_ENUM } from '@/enums'; +import type { JwtPayload } from '@/utils/jwt.utils'; +import { signToken } from '@/utils/jwt.utils'; +import { compareHash, hashPassword } from '@/utils/password.utils'; +import { fetchGoogleTokens, getUserInfo } from '@/utils/google-oauth.utils'; +import { generateOtp } from '@/utils/otp.utils'; +import type { UserType } from '../user/user.dto'; import { - type JwtPayload, - compareHash, - fetchGoogleTokens, - generateOTP, - getUserInfo, - hashPassword, - signToken, -} from "../../utils/auth.utils"; -import { generateRandomNumbers } from "../../utils/common.utils"; -import type { UserType } from "../user/user.dto"; -import { - createUser, - getUserByEmail, - getUserById, - updateUser, -} from "../user/user.services"; + createUser, + getUserByEmail, + getUserById, + updateUser, +} from '../user/user.services'; import type { - ChangePasswordSchemaType, - ForgetPasswordSchemaType, - LoginUserByEmailSchemaType, - RegisterUserByEmailSchemaType, - ResetPasswordSchemaType, -} from "./auth.schema"; + ChangePasswordSchemaType, + ForgetPasswordSchemaType, + LoginUserByEmailSchemaType, + RegisterUserByEmailSchemaType, + ResetPasswordSchemaType, + GoogleCallbackSchemaType, +} from './auth.schema'; +import { getSessionManager } from './session/session.manager'; export const resetPassword = async (payload: ResetPasswordSchemaType) => { - const user = await getUserById(payload.userId); + const user = await getUserById(payload.userId); + + if (!user || user.passwordResetCode !== payload.code) { + throw new Error('token is not valid or expired, please try again'); + } - if (!user || user.passwordResetCode !== payload.code) { - throw new Error("token is not valid or expired, please try again"); - } + if (payload.confirmPassword !== payload.password) { + throw new Error('Password and confirm password must be same'); + } - if (payload.confirmPassword !== payload.password) { - throw new Error("Password and confirm password must be same"); - } + const hashedPassword = await hashPassword(payload.password); - const hashedPassword = await hashPassword(payload.password); + await updateUser(payload.userId, { + password: hashedPassword, + passwordResetCode: null, + }); - await updateUser(payload.userId, { - password: hashedPassword, - passwordResetCode: null, - }); + if (config.SET_SESSION) { + const sessionManager = getSessionManager(); + await sessionManager.revokeAllUserSessions(payload.userId); + } }; export const forgetPassword = async ( - payload: ForgetPasswordSchemaType, + payload: ForgetPasswordSchemaType, ): Promise => { - const user = await getUserByEmail(payload.email); + const user = await getUserByEmail(payload.email); - if (!user) { - throw new Error("user doesn't exists"); - } + if (!user) { + throw new Error("user doesn't exists"); + } - const code = generateRandomNumbers(4); + const code = generateOtp({ length: 4, charset: 'numeric' }); - await updateUser(user._id, { passwordResetCode: code }); + await updateUser(user._id, { passwordResetCode: code }); - return user; + return user; }; export const changePassword = async ( - userId: string, - payload: ChangePasswordSchemaType, + userId: string, + payload: ChangePasswordSchemaType, ): Promise => { - const user = await getUserById(userId, "+password"); + const user = await getUserById(userId, '+password'); + + if (!user || !user.password) { + throw new Error('User is not found'); + } - if (!user || !user.password) { - throw new Error("User is not found"); - } + const isCurrentPassowordCorrect = await compareHash( + user.password, + payload.currentPassword, + ); - const isCurrentPassowordCorrect = await compareHash( - user.password, - payload.currentPassword, - ); + if (!isCurrentPassowordCorrect) { + throw new Error('current password is not valid'); + } - if (!isCurrentPassowordCorrect) { - throw new Error("current password is not valid"); - } + const hashedPassword = await hashPassword(payload.newPassword); - const hashedPassword = await hashPassword(payload.newPassword); + await updateUser(userId, { password: hashedPassword }); - await updateUser(userId, { password: hashedPassword }); + if (config.SET_SESSION) { + const sessionManager = getSessionManager(); + await sessionManager.revokeAllUserSessions(userId); + } }; export const registerUserByEmail = async ( - payload: RegisterUserByEmailSchemaType, + payload: RegisterUserByEmailSchemaType, ): Promise => { - const userExistByEmail = await getUserByEmail(payload.email); + const userExistByEmail = await getUserByEmail(payload.email); - if (userExistByEmail) { - throw new Error("Account already exist with same email address"); - } + if (userExistByEmail) { + throw new Error('Account already exist with same email address'); + } - const { confirmPassword, ...rest } = payload; + const { confirmPassword, ...rest } = payload; - const otp = config.OTP_VERIFICATION_ENABLED ? generateOTP() : null; + const otp = config.OTP_VERIFICATION_ENABLED ? generateOtp({ length: 6, charset: 'hex' }) : null; - const user = await createUser( - { ...rest, role: "DEFAULT_USER", otp }, - false, - ); + const user = await createUser({ ...rest, role: 'DEFAULT_USER', otp }, false); - return user; + return user; }; export const loginUserByEmail = async ( - payload: LoginUserByEmailSchemaType, -): Promise => { - const user = await getUserByEmail(payload.email, "+password"); + payload: LoginUserByEmailSchemaType, + metadata?: { userAgent?: string; ipAddress?: string }, +): Promise<{ token: string; sessionId?: string }> => { + const user = await getUserByEmail(payload.email, '+password'); + + if (!user || !(await compareHash(String(user.password), payload.password))) { + throw new Error('Invalid email or password'); + } + + if (config.OTP_VERIFICATION_ENABLED && user.otp !== null) { + throw new Error('Your account is not verified'); + } + + const jwtPayload: JwtPayload = { + sub: String(user._id), + email: user?.email, + phoneNo: user?.phoneNo, + role: String(user.role) as RoleType, + username: user.username, + }; - if (!user || !(await compareHash(String(user.password), payload.password))) { - throw new Error("Invalid email or password"); - } + let sessionId: string | undefined; - const jwtPayload: JwtPayload = { - sub: String(user._id), - email: user?.email, - phoneNo: user?.phoneNo, - role: String(user.role) as RoleType, - username: user.username, - }; + if (config.SET_SESSION) { + const sessionManager = getSessionManager(); - const token = await signToken(jwtPayload); + // Lazy cleanup: remove user's expired/revoked sessions + sessionManager.cleanupUserSessions(String(user._id)).then(); - return token; + // Step 1: Create session without token (store empty token temporarily) + const session = await sessionManager.createSession({ + userId: String(user._id), + token: '', // Placeholder empty token + metadata, + }); + + sessionId = session.sessionId; + + // Step 2: Generate token once with real session ID + jwtPayload.sid = sessionId; + const token = await signToken(jwtPayload); + + // Step 3: Update session with actual token hash + await sessionManager.updateSessionToken(sessionId, token); + + return { token, sessionId }; + } + + const token = await signToken(jwtPayload); + return { token }; }; export const googleLogin = async ( - payload: GoogleCallbackQuery, -): Promise => { - const { code, error } = payload; - - if (error) { - throw new Error(error); - } - - if (!code) { - throw new Error("Code Not Provided"); - } - const tokenResponse = await fetchGoogleTokens({ code }); - - const { access_token, refresh_token, expires_in } = tokenResponse; - - const userInfoResponse = await getUserInfo(access_token); - - const { id, email, name, picture } = userInfoResponse; - - const user = await getUserByEmail(email); - - if (!user) { - const newUser = await createUser({ - email, - username: name, - avatar: picture, - role: ROLE_ENUM.DEFAULT_USER, - password: generateRandomNumbers(4), - socialAccount: [ - { - refreshToken: refresh_token, - tokenExpiry: new Date(Date.now() + expires_in * 1000), - accountType: SOCIAL_ACCOUNT_ENUM.GOOGLE, - accessToken: access_token, - accountID: id, - }, - ], - }); - - return newUser; - } - - const updatedUser = await updateUser(user._id, { - socialAccount: [ - { - refreshToken: refresh_token, - tokenExpiry: new Date(Date.now() + expires_in * 1000), - accountType: SOCIAL_ACCOUNT_ENUM.GOOGLE, - accessToken: access_token, - accountID: id, - }, - ], - }); - - return updatedUser; + payload: GoogleCallbackSchemaType, + metadata?: { userAgent?: string; ipAddress?: string }, +): Promise<{ user: UserType; token: string; sessionId?: string }> => { + const { code, error } = payload; + + if (error) { + throw new Error(error); + } + + if (!code) { + throw new Error('Code Not Provided'); + } + const tokenResponse = await fetchGoogleTokens({ code }); + + const { access_token, refresh_token, expires_in } = tokenResponse; + + const userInfoResponse = await getUserInfo(access_token); + + const { id, email, name, picture } = userInfoResponse; + + let user = await getUserByEmail(email); + + if (!user) { + user = await createUser({ + email, + username: name, + avatar: picture, + role: ROLE_ENUM.DEFAULT_USER, + password: generateOtp({ length: 16, charset: 'alphanumeric' }), + socialAccount: [ + { + refreshToken: refresh_token, + tokenExpiry: new Date(Date.now() + expires_in * 1000), + accountType: SOCIAL_ACCOUNT_ENUM.GOOGLE, + accessToken: access_token, + accountID: id, + }, + ], + }); + } else { + user = await updateUser(user._id, { + socialAccount: [ + { + refreshToken: refresh_token, + tokenExpiry: new Date(Date.now() + expires_in * 1000), + accountType: SOCIAL_ACCOUNT_ENUM.GOOGLE, + accessToken: access_token, + accountID: id, + }, + ], + }); + } + + const jwtPayload: JwtPayload = { + sub: String(user._id), + email: user.email, + phoneNo: user.phoneNo, + role: String(user.role) as RoleType, + username: user.username, + }; + + let sessionId: string | undefined; + + if (config.SET_SESSION) { + const sessionManager = getSessionManager(); + + // Lazy cleanup: remove user's expired/revoked sessions + sessionManager.cleanupUserSessions(String(user._id)).then(); + + // Step 1: Create session without token (store empty token temporarily) + const session = await sessionManager.createSession({ + userId: String(user._id), + token: '', // Placeholder empty token + metadata, + }); + + sessionId = session.sessionId; + + // Step 2: Generate token once with real session ID + jwtPayload.sid = sessionId; + const token = await signToken(jwtPayload); + + // Step 3: Update session with actual token hash + await sessionManager.updateSessionToken(sessionId, token); + + return { user, token, sessionId }; + } + + const token = await signToken(jwtPayload); + return { user, token }; }; diff --git a/src/modules/auth/password-links.ts b/src/modules/auth/password-links.ts new file mode 100644 index 0000000..c49f354 --- /dev/null +++ b/src/modules/auth/password-links.ts @@ -0,0 +1,19 @@ +import config from '@/config/env'; + +/** + * Generate a password reset link with the given token + * @param token - Password reset token + * @returns Full URL for password reset + */ +export const generateResetPasswordLink = (token: string): string => { + return `${config.CLIENT_SIDE_URL}/reset-password?token=${token}`; +}; + +/** + * Generate a set password link with the given token + * @param token - Set password token + * @returns Full URL for setting password + */ +export const generateSetPasswordLink = (token: string): string => { + return `${config.CLIENT_SIDE_URL}/set-password?token=${token}`; +}; diff --git a/src/modules/auth/session/index.ts b/src/modules/auth/session/index.ts new file mode 100644 index 0000000..554e8ca --- /dev/null +++ b/src/modules/auth/session/index.ts @@ -0,0 +1,7 @@ +export * from './session.types'; +export * from './session.schema'; +export * from './session.utils'; +export * from './session.manager'; +export * from './mongo.session.store'; +export * from './redis.session.store'; +export { SessionModel } from './session.model'; diff --git a/src/modules/auth/session/mongo.session.store.ts b/src/modules/auth/session/mongo.session.store.ts new file mode 100644 index 0000000..7073dae --- /dev/null +++ b/src/modules/auth/session/mongo.session.store.ts @@ -0,0 +1,153 @@ +import type { SessionStore, CreateSessionInput, SessionRecord } from './session.types'; +import { SessionModel } from './session.model'; +import { hashToken, calculateExpiresAt } from './session.utils'; +import { createChildLogger } from '@/plugins/observability/logger'; + +const logger = createChildLogger({ context: 'MongoSessionStore' }); + +export class MongoSessionStore implements SessionStore { + async create(input: CreateSessionInput): Promise { + const tokenHash = hashToken(input.token); + const expiresAt = calculateExpiresAt(input.expiresIn); + + const session = await SessionModel.create({ + userId: input.userId, + tokenHash, + metadata: input.metadata, + lastSeen: new Date(), + expiresAt, + isRevoked: false, + }); + + logger.debug({ sessionId: session._id, userId: input.userId }, 'Session created'); + + return { + sessionId: session._id.toString(), + userId: session.userId, + tokenHash: session.tokenHash, + metadata: session.metadata, + createdAt: session.createdAt!, + lastSeen: session.lastSeen, + expiresAt: session.expiresAt, + isRevoked: session.isRevoked, + }; + } + + async get(sessionId: string): Promise { + const session = await SessionModel.findById(sessionId); + + if (!session) { + return null; + } + + return { + sessionId: session._id.toString(), + userId: session.userId, + tokenHash: session.tokenHash, + metadata: session.metadata, + createdAt: session.createdAt!, + lastSeen: session.lastSeen, + expiresAt: session.expiresAt, + isRevoked: session.isRevoked, + }; + } + + async listByUser(userId: string): Promise { + const sessions = await SessionModel.find({ userId, isRevoked: false }) + .sort({ createdAt: -1 }) + .lean(); + + return sessions.map(session => ({ + sessionId: session._id.toString(), + userId: session.userId, + tokenHash: session.tokenHash, + metadata: session.metadata, + createdAt: session.createdAt!, + lastSeen: session.lastSeen, + expiresAt: session.expiresAt, + isRevoked: session.isRevoked, + })); + } + + async touch(sessionId: string): Promise { + await SessionModel.findByIdAndUpdate(sessionId, { + lastSeen: new Date(), + }); + } + + async updateTokenHash(sessionId: string, token: string): Promise { + const tokenHash = hashToken(token); + await SessionModel.findByIdAndUpdate(sessionId, { + tokenHash, + }); + + logger.debug({ sessionId }, 'Session token hash updated'); + } + + async revoke(sessionId: string): Promise { + await SessionModel.findByIdAndUpdate(sessionId, { + isRevoked: true, + }); + + logger.debug({ sessionId }, 'Session revoked'); + } + + async revokeAllForUser(userId: string): Promise { + await SessionModel.updateMany( + { userId, isRevoked: false }, + { isRevoked: true }, + ); + + logger.debug({ userId }, 'All sessions revoked for user'); + } + + async pruneExpired(): Promise { + const result = await SessionModel.deleteMany({ + expiresAt: { $lt: new Date() }, + }); + + if (result.deletedCount && result.deletedCount > 0) { + logger.debug({ count: result.deletedCount }, 'Expired sessions pruned'); + } + } + + async deleteRevoked(): Promise { + const result = await SessionModel.deleteMany({ isRevoked: true }); + const count = result.deletedCount || 0; + + if (count > 0) { + logger.info({ count }, 'Deleted revoked sessions'); + } + + return count; + } + + async deleteExpired(): Promise { + const result = await SessionModel.deleteMany({ + expiresAt: { $lt: new Date() }, + }); + const count = result.deletedCount || 0; + + if (count > 0) { + logger.info({ count }, 'Deleted expired sessions (TTL backup)'); + } + + return count; + } + + async deleteUserExpiredSessions(userId: string): Promise { + const result = await SessionModel.deleteMany({ + userId, + $or: [ + { isRevoked: true }, + { expiresAt: { $lt: new Date() } }, + ], + }); + + return result.deletedCount || 0; + } + + async close(): Promise { + // MongoDB connection is managed globally, no specific cleanup needed + } +} diff --git a/src/modules/auth/session/redis.session.store.ts b/src/modules/auth/session/redis.session.store.ts new file mode 100644 index 0000000..81bb473 --- /dev/null +++ b/src/modules/auth/session/redis.session.store.ts @@ -0,0 +1,321 @@ +import type { Redis } from 'ioredis'; +import type { + SessionStore, + CreateSessionInput, + SessionRecord, +} from './session.types'; +import { + generateSessionId, + hashToken, + calculateExpiresAt, +} from './session.utils'; +import { createChildLogger } from '@/plugins/observability/logger'; + +const logger = createChildLogger({ context: 'RedisSessionStore' }); + +const SESSION_PREFIX = 'session:'; +const USER_SESSIONS_PREFIX = 'user_sessions:'; + +export class RedisSessionStore implements SessionStore { + constructor(private redis: Redis) {} + + private getSessionKey(sessionId: string): string { + return `${SESSION_PREFIX}${sessionId}`; + } + + private getUserSessionsKey(userId: string): string { + return `${USER_SESSIONS_PREFIX}${userId}`; + } + + async create(input: CreateSessionInput): Promise { + const sessionId = generateSessionId(); + const tokenHash = hashToken(input.token); + const now = new Date(); + const expiresAt = calculateExpiresAt(input.expiresIn); + + const session: SessionRecord = { + sessionId, + userId: input.userId, + tokenHash, + metadata: input.metadata, + createdAt: now, + lastSeen: now, + expiresAt, + isRevoked: false, + }; + + const sessionKey = this.getSessionKey(sessionId); + const userSessionsKey = this.getUserSessionsKey(input.userId); + const ttl = Math.floor((expiresAt.getTime() - now.getTime()) / 1000); + + await this.redis + .multi() + .set(sessionKey, JSON.stringify(session), 'EX', ttl) + .zadd(userSessionsKey, now.getTime(), sessionId) + .expire(userSessionsKey, ttl) + .exec(); + + logger.debug({ sessionId, userId: input.userId }, 'Session created'); + + return session; + } + + async get(sessionId: string): Promise { + const sessionKey = this.getSessionKey(sessionId); + const data = await this.redis.get(sessionKey); + + if (!data) { + return null; + } + + return JSON.parse(data, (key, value) => { + if (key === 'createdAt' || key === 'lastSeen' || key === 'expiresAt') { + return new Date(value); + } + return value; + }); + } + + async listByUser(userId: string): Promise { + const userSessionsKey = this.getUserSessionsKey(userId); + const sessionIds = await this.redis.zrevrange(userSessionsKey, 0, -1); + + if (!sessionIds.length) { + return []; + } + + const pipeline = this.redis.pipeline(); + for (const sessionId of sessionIds) { + pipeline.get(this.getSessionKey(sessionId)); + } + + const results = await pipeline.exec(); + const sessions: SessionRecord[] = []; + + if (!results) return sessions; + + for (const [err, data] of results) { + if (!err && data) { + const session = JSON.parse(data as string, (key, value) => { + if ( + key === 'createdAt' || + key === 'lastSeen' || + key === 'expiresAt' + ) { + return new Date(value); + } + return value; + }); + if (!session.isRevoked) { + sessions.push(session); + } + } + } + + return sessions; + } + + async touch(sessionId: string): Promise { + const session = await this.get(sessionId); + if (!session) return; + + session.lastSeen = new Date(); + const sessionKey = this.getSessionKey(sessionId); + const ttl = await this.redis.ttl(sessionKey); + + if (ttl > 0) { + await this.redis.set(sessionKey, JSON.stringify(session), 'EX', ttl); + } + } + + async updateTokenHash(sessionId: string, token: string): Promise { + const session = await this.get(sessionId); + if (!session) return; + + const tokenHash = hashToken(token); + session.tokenHash = tokenHash; + + const sessionKey = this.getSessionKey(sessionId); + const ttl = await this.redis.ttl(sessionKey); + + if (ttl > 0) { + await this.redis.set(sessionKey, JSON.stringify(session), 'EX', ttl); + } + + logger.debug({ sessionId }, 'Session token hash updated'); + } + + async revoke(sessionId: string): Promise { + const session = await this.get(sessionId); + if (!session) return; + + session.isRevoked = true; + const sessionKey = this.getSessionKey(sessionId); + const ttl = await this.redis.ttl(sessionKey); + + if (ttl > 0) { + await this.redis.set(sessionKey, JSON.stringify(session), 'EX', ttl); + } + + logger.debug({ sessionId }, 'Session revoked'); + } + + async revokeAllForUser(userId: string): Promise { + const sessions = await this.listByUser(userId); + + if (!sessions.length) return; + + const pipeline = this.redis.pipeline(); + + for (const session of sessions) { + session.isRevoked = true; + const sessionKey = this.getSessionKey(session.sessionId); + const ttl = Math.floor((session.expiresAt.getTime() - Date.now()) / 1000); + + if (ttl > 0) { + pipeline.set(sessionKey, JSON.stringify(session), 'EX', ttl); + } + } + + await pipeline.exec(); + + logger.debug({ userId }, 'All sessions revoked for user'); + } + + async pruneExpired(): Promise { + // Redis automatically handles expiration via TTL, no manual pruning needed + logger.debug('Redis handles expiration automatically via TTL'); + } + + async deleteRevoked(): Promise { + let deletedCount = 0; + let cursor = '0'; + + do { + const [newCursor, keys] = await this.redis.scan( + cursor, + 'MATCH', + `${USER_SESSIONS_PREFIX}*`, + 'COUNT', + 1000, + ); + cursor = newCursor; + + for (const userKey of keys) { + const sessionIds = await this.redis.zrange(userKey, 0, -1); + + const pipeline = this.redis.pipeline(); + for (const sessionId of sessionIds) { + const session = await this.get(sessionId); + if (session?.isRevoked) { + pipeline.del(this.getSessionKey(sessionId)); + pipeline.zrem(userKey, sessionId); + deletedCount++; + } + } + await pipeline.exec(); + } + } while (cursor !== '0'); + + if (deletedCount > 0) { + logger.info({ count: deletedCount }, 'Deleted revoked sessions'); + } + + return deletedCount; + } + + async deleteExpired(): Promise { + let deletedCount = 0; + let cursor = '0'; + + do { + const [newCursor, keys] = await this.redis.scan( + cursor, + 'MATCH', + `${USER_SESSIONS_PREFIX}*`, + 'COUNT', + 1000, + ); + cursor = newCursor; + + for (const userKey of keys) { + const sessionIds = await this.redis.zrange(userKey, 0, -1); + + const pipeline = this.redis.pipeline(); + for (const sessionId of sessionIds) { + const exists = await this.redis.exists(this.getSessionKey(sessionId)); + if (!exists) { + pipeline.zrem(userKey, sessionId); + deletedCount++; + } + } + await pipeline.exec(); + } + } while (cursor !== '0'); + + if (deletedCount > 0) { + logger.info( + { count: deletedCount }, + 'Cleaned up expired session references', + ); + } + + return deletedCount; + } + + async deleteUserExpiredSessions(userId: string): Promise { + const userKey = this.getUserSessionsKey(userId); + const sessionIds = await this.redis.zrange(userKey, 0, -1); + let deletedCount = 0; + + for (const sessionId of sessionIds) { + const session = await this.get(sessionId); + if (!session || session.isRevoked || new Date() > session.expiresAt) { + await this.redis.del(this.getSessionKey(sessionId)); + await this.redis.zrem(userKey, sessionId); + deletedCount++; + } + } + + return deletedCount; + } + + async cleanupOrphanedKeys(): Promise { + let deletedCount = 0; + let cursor = '0'; + + do { + const [newCursor, keys] = await this.redis.scan( + cursor, + 'MATCH', + `${USER_SESSIONS_PREFIX}*`, + 'COUNT', + 1000, + ); + cursor = newCursor; + + const pipeline = this.redis.pipeline(); + for (const userKey of keys) { + const count = await this.redis.zcard(userKey); + if (count === 0) { + pipeline.del(userKey); + deletedCount++; + } + } + await pipeline.exec(); + } while (cursor !== '0'); + + if (deletedCount > 0) { + logger.info( + { count: deletedCount }, + 'Deleted orphaned user session keys', + ); + } + + return deletedCount; + } + + async close(): Promise { + // Redis connection is managed globally, no specific cleanup needed + } +} diff --git a/src/modules/auth/session/session.manager.ts b/src/modules/auth/session/session.manager.ts new file mode 100644 index 0000000..247fea1 --- /dev/null +++ b/src/modules/auth/session/session.manager.ts @@ -0,0 +1,241 @@ +import type { + SessionStore, + SessionStoreConfig, + CreateSessionInput, + SessionRecord, + SessionValidationResult, + CleanupStats, +} from './session.types'; +import { MongoSessionStore } from './mongo.session.store'; +import { RedisSessionStore } from './redis.session.store'; +import { hashToken, isSessionExpired } from './session.utils'; +import { createChildLogger } from '@/plugins/observability/logger'; +import { cacheProvider, RedisProvider } from '@/lib/cache'; +import config from '@/config/env'; + +const logger = createChildLogger({ context: 'SessionManager' }); + +export class SessionManager { + private store: SessionStore; + private config: SessionStoreConfig; + + constructor(storeConfig?: Partial) { + this.config = { + driver: storeConfig?.driver || config.SESSION_DRIVER, + maxPerUser: storeConfig?.maxPerUser || config.SESSION_MAX_PER_USER, + idleTTL: storeConfig?.idleTTL || config.SESSION_IDLE_TTL, + absoluteTTL: storeConfig?.absoluteTTL || config.SESSION_ABSOLUTE_TTL, + rotation: storeConfig?.rotation ?? config.SESSION_ROTATION, + debug: storeConfig?.debug ?? config.SESSION_DEBUG, + }; + + this.store = this.createStore(); + + if (this.config.debug) { + logger.info({ config: this.config }, 'SessionManager initialized'); + } + } + + private createStore(): SessionStore { + if (this.config.driver === 'redis') { + if (!(cacheProvider instanceof RedisProvider)) { + throw new Error( + 'Redis session driver requires Redis cache provider. Set CACHE_PROVIDER=redis', + ); + } + return new RedisSessionStore(cacheProvider.getClient()); + } + return new MongoSessionStore(); + } + + async createSession(input: CreateSessionInput): Promise { + // Note: This check-then-revoke pattern has a race condition where concurrent + // createSession calls can bypass the maxPerUser limit. For production use, + // consider implementing atomic store-level eviction (e.g., using Redis Lua + // scripts or MongoDB transactions with proper locking). This implementation + // provides best-effort enforcement. + + const sessions = await this.store.listByUser(input.userId); + + // Evict oldest sessions if at or over limit + while (sessions.length >= this.config.maxPerUser) { + const oldestSession = sessions.pop(); + if (oldestSession) { + await this.store.revoke(oldestSession.sessionId); + if (this.config.debug) { + logger.debug( + { userId: input.userId, revokedSessionId: oldestSession.sessionId }, + 'Evicted oldest session due to max limit', + ); + } + } + } + + const session = await this.store.create(input); + + if (this.config.debug) { + logger.info( + { sessionId: session.sessionId, userId: input.userId }, + 'Session created', + ); + } + + return session; + } + + async getSession(sessionId: string): Promise { + return this.store.get(sessionId); + } + + async validateSession( + sessionId: string, + token: string, + ): Promise { + const session = await this.store.get(sessionId); + + if (!session) { + return { isValid: false, reason: 'not_found' }; + } + + if (session.isRevoked) { + return { isValid: false, session, reason: 'revoked' }; + } + + if (isSessionExpired(session.expiresAt)) { + return { isValid: false, session, reason: 'expired' }; + } + + const tokenHash = hashToken(token); + + if (session.tokenHash !== tokenHash) { + return { isValid: false, session, reason: 'invalid' }; + } + + await this.store.touch(sessionId); + + return { isValid: true, session }; + } + + async touchSession(sessionId: string): Promise { + await this.store.touch(sessionId); + } + + async updateSessionToken(sessionId: string, token: string): Promise { + await this.store.updateTokenHash(sessionId, token); + + if (this.config.debug) { + logger.info({ sessionId }, 'Session token updated'); + } + } + + async revokeSession(sessionId: string): Promise { + await this.store.revoke(sessionId); + + if (this.config.debug) { + logger.info({ sessionId }, 'Session revoked'); + } + } + + async revokeAllUserSessions(userId: string): Promise { + await this.store.revokeAllForUser(userId); + + if (this.config.debug) { + logger.info({ userId }, 'All user sessions revoked'); + } + } + + async listUserSessions(userId: string): Promise { + return this.store.listByUser(userId); + } + + async pruneExpiredSessions(): Promise { + await this.store.pruneExpired(); + } + + async cleanupSessions( + type: 'full' | 'revoked' | 'expired', + ): Promise { + const stats: CleanupStats = { + revokedDeleted: 0, + expiredDeleted: 0, + orphanedKeysDeleted: 0, + totalProcessed: 0, + }; + + if (type === 'full' || type === 'revoked') { + stats.revokedDeleted = await this.store.deleteRevoked(); + } + + if (type === 'full' || type === 'expired') { + stats.expiredDeleted = await this.store.deleteExpired(); + } + + if (type === 'full' && this.config.driver === 'redis') { + stats.orphanedKeysDeleted = + (await this.store.cleanupOrphanedKeys?.()) || 0; + } + + stats.totalProcessed = + stats.revokedDeleted + + stats.expiredDeleted + + (stats.orphanedKeysDeleted || 0); + + if (this.config.debug) { + logger.info({ stats }, 'Session cleanup stats'); + } + + return stats; + } + + async cleanupUserSessions(userId: string): Promise { + return this.store.deleteUserExpiredSessions?.(userId) || 0; + } + + async cleanup(): Promise { + await this.store.close(); + } + + getConfig(): SessionStoreConfig { + return { ...this.config }; + } +} + +let sessionManagerInstance: SessionManager | null = null; +let initPromise: Promise | null = null; + +export async function initializeSessionManager( + config?: Partial, +): Promise { + // If already initialized, return the existing instance + if (sessionManagerInstance) { + return sessionManagerInstance; + } + + // If initialization is in progress, wait for it + if (initPromise) { + return initPromise; + } + + // Start initialization + initPromise = Promise.resolve() + .then(() => { + if (!sessionManagerInstance) { + sessionManagerInstance = new SessionManager(config); + } + return sessionManagerInstance; + }) + .finally(() => { + initPromise = null; + }); + + return initPromise; +} + +export function getSessionManager(): SessionManager { + if (!sessionManagerInstance) { + throw new Error( + 'SessionManager not initialized. Call initializeSessionManager first.', + ); + } + return sessionManagerInstance; +} diff --git a/src/modules/auth/session/session.model.ts b/src/modules/auth/session/session.model.ts new file mode 100644 index 0000000..092a9e2 --- /dev/null +++ b/src/modules/auth/session/session.model.ts @@ -0,0 +1,64 @@ +import mongoose, { Schema, type Document } from 'mongoose'; +import type { SessionRecord, SessionMetadata } from './session.types'; + +export interface SessionDocument + extends Omit, + Document { + _id: string; +} + +const sessionMetadataSchema = new Schema( + { + userAgent: { type: String }, + ipAddress: { type: String }, + deviceType: { type: String }, + browser: { type: String }, + os: { type: String }, + }, + { _id: false }, +); + +const sessionSchema = new Schema( + { + userId: { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + type: Schema.Types.ObjectId as any, + ref: 'User', + required: true, + index: true, + }, + tokenHash: { + type: String, + required: true, + unique: true, + }, + metadata: { + type: sessionMetadataSchema, + }, + lastSeen: { + type: Date, + required: true, + default: Date.now, + }, + expiresAt: { + type: Date, + required: true, + index: true, + }, + isRevoked: { + type: Boolean, + default: false, + }, + }, + { + timestamps: true, + }, +); + +sessionSchema.index({ expiresAt: 1 }, { expireAfterSeconds: 0 }); +sessionSchema.index({ userId: 1, createdAt: -1 }); + +export const SessionModel = mongoose.model( + 'Session', + sessionSchema, +); diff --git a/src/modules/auth/session/session.schema.ts b/src/modules/auth/session/session.schema.ts new file mode 100644 index 0000000..f6918bf --- /dev/null +++ b/src/modules/auth/session/session.schema.ts @@ -0,0 +1,52 @@ +import { z } from 'zod'; + +export const sessionMetadataSchema = z.object({ + userAgent: z.string().optional(), + ipAddress: z.string().optional(), + deviceType: z.string().optional(), + browser: z.string().optional(), + os: z.string().optional(), +}); + +export const createSessionSchema = z.object({ + userId: z.string().min(1), + token: z.string().min(1), + metadata: sessionMetadataSchema.optional(), + expiresIn: z.number().positive().optional(), +}); + +export const sessionRecordSchema = z.object({ + sessionId: z.string(), + userId: z.string(), + tokenHash: z.string(), + metadata: sessionMetadataSchema.optional(), + createdAt: z.date(), + lastSeen: z.date(), + expiresAt: z.date(), + isRevoked: z.boolean().optional(), +}); + +export const sessionStoreConfigSchema = z.object({ + driver: z.enum(['mongo', 'redis']), + maxPerUser: z.number().positive().default(5), + idleTTL: z.number().positive().optional(), + absoluteTTL: z.number().positive().optional(), + rotation: z.boolean().default(false), + debug: z.boolean().default(false), +}); + +export const sessionPluginOptionsSchema = z.object({ + enabled: z.boolean().default(true), + driver: z.enum(['mongo', 'redis']).optional(), + maxPerUser: z.number().positive().optional(), + idleTTL: z.number().positive().optional(), + absoluteTTL: z.number().positive().optional(), + rotation: z.boolean().optional(), + debug: z.boolean().optional(), +}); + +export type SessionMetadataSchemaType = z.infer; +export type CreateSessionSchemaType = z.infer; +export type SessionRecordSchemaType = z.infer; +export type SessionStoreConfigSchemaType = z.infer; +export type SessionPluginOptionsSchemaType = z.infer; diff --git a/src/modules/auth/session/session.types.ts b/src/modules/auth/session/session.types.ts new file mode 100644 index 0000000..450a77b --- /dev/null +++ b/src/modules/auth/session/session.types.ts @@ -0,0 +1,63 @@ +export interface SessionMetadata { + userAgent?: string; + ipAddress?: string; + deviceType?: string; + browser?: string; + os?: string; +} + +export interface SessionRecord { + sessionId: string; + userId: string; + tokenHash: string; + metadata?: SessionMetadata; + createdAt: Date; + lastSeen: Date; + expiresAt: Date; + isRevoked?: boolean; +} + +export interface SessionValidationResult { + isValid: boolean; + session?: SessionRecord; + reason?: 'expired' | 'revoked' | 'not_found' | 'invalid'; +} + +export interface SessionStoreConfig { + driver: 'mongo' | 'redis'; + maxPerUser: number; + idleTTL?: number; + absoluteTTL?: number; + rotation: boolean; + debug: boolean; +} + +export interface CreateSessionInput { + userId: string; + token: string; + metadata?: SessionMetadata; + expiresIn?: number; +} + +export interface SessionStore { + create(input: CreateSessionInput): Promise; + get(sessionId: string): Promise; + listByUser(userId: string): Promise; + touch(sessionId: string): Promise; + revoke(sessionId: string): Promise; + revokeAllForUser(userId: string): Promise; + pruneExpired(): Promise; + close(): Promise; + updateTokenHash(sessionId: string, token: string): Promise; + deleteRevoked(): Promise; + deleteExpired(): Promise; + deleteUserExpiredSessions?(userId: string): Promise; + cleanupOrphanedKeys?(): Promise; +} + +export interface CleanupStats { + revokedDeleted: number; + expiredDeleted: number; + orphanedKeysDeleted?: number; + totalProcessed: number; +} diff --git a/src/modules/auth/session/session.utils.ts b/src/modules/auth/session/session.utils.ts new file mode 100644 index 0000000..d080da0 --- /dev/null +++ b/src/modules/auth/session/session.utils.ts @@ -0,0 +1,49 @@ +import crypto from 'node:crypto'; +import type { CookieOptions } from 'express'; +import config from '../../../config/env'; + +export function generateSessionId(): string { + return crypto.randomUUID(); +} + +export function hashToken(token: string): string { + return crypto.createHash('sha256').update(token).digest('hex'); +} + +export function buildSessionCookieOptions(): CookieOptions { + const isProduction = config.NODE_ENV === 'production'; + + return { + httpOnly: true, + secure: isProduction || config.HTTPS_ENABLED, + sameSite: 'lax', + maxAge: config.SESSION_EXPIRES_IN * 1000, + path: '/', + }; +} + +export function extractMetadataFromRequest(req: { + headers?: Record; + ip?: string; + connection?: { remoteAddress?: string }; +}): { + userAgent?: string; + ipAddress?: string; +} { + const userAgent = req.headers?.['user-agent']; + const xForwardedFor = req.headers?.['x-forwarded-for']; + + return { + userAgent: Array.isArray(userAgent) ? userAgent[0] : userAgent, + ipAddress: req.ip || (Array.isArray(xForwardedFor) ? xForwardedFor[0] : xForwardedFor) || req.connection?.remoteAddress, + }; +} + +export function isSessionExpired(expiresAt: Date): boolean { + return new Date() > expiresAt; +} + +export function calculateExpiresAt(expiresIn?: number): Date { + const ttl = expiresIn || config.SESSION_EXPIRES_IN; + return new Date(Date.now() + ttl * 1000); +} diff --git a/src/modules/blog/blog.controller.ts b/src/modules/blog/blog.controller.ts new file mode 100644 index 0000000..c5581a2 --- /dev/null +++ b/src/modules/blog/blog.controller.ts @@ -0,0 +1,82 @@ +import type { Request } from 'express'; +import type { MongoIdSchemaType } from '@/common/common.schema'; +import type { ResponseExtended } from '@/types'; +import { successResponse } from '@/utils/response.utils'; +import type { + CreateBlogSchemaType, + GetBlogsSchemaType, + UpdateBlogSchemaType, + CreateBlogResponseSchema, + GetBlogsResponseSchema, + GetBlogByIdResponseSchema, + UpdateBlogResponseSchema, +} from './blog.schema'; +import { + createBlog, + deleteBlog, + getBlogById, + getBlogs, + updateBlog, +} from './blog.services'; + +// Using new res.created() helper +export const handleCreateBlog = async ( + req: Request, + res: ResponseExtended, +) => { + const blog = await createBlog(req.body); + return res.created?.({ + success: true, + message: 'Blog created successfully', + data: blog, + }); +}; + +// Using new res.ok() helper with paginated response +export const handleGetBlogs = async ( + req: Request, + res: ResponseExtended, +) => { + const { results, paginatorInfo } = await getBlogs(req.query); + return res.ok?.({ + success: true, + data: { + items: results, + paginator: paginatorInfo, + }, + }); +}; + +// Using new res.ok() helper +export const handleGetBlogById = async ( + req: Request, + res: ResponseExtended, +) => { + const blog = await getBlogById(req.params.id); + return res.ok?.({ + success: true, + data: blog, + }); +}; + +// Using new res.ok() helper +export const handleUpdateBlog = async ( + req: Request, + res: ResponseExtended, +) => { + const blog = await updateBlog(req.params.id, req.body); + return res.ok?.({ + success: true, + message: 'Blog updated successfully', + data: blog, + }); +}; + +// Keeping legacy pattern for comparison +export const handleDeleteBlog = async ( + req: Request, + res: ResponseExtended, +) => { + await deleteBlog({ id: req.params.id }); + return successResponse(res, 'Blog deleted successfully'); +}; diff --git a/src/modules/blog/blog.dto.ts b/src/modules/blog/blog.dto.ts new file mode 100644 index 0000000..2184b6d --- /dev/null +++ b/src/modules/blog/blog.dto.ts @@ -0,0 +1,19 @@ +import { z } from 'zod'; +import { definePaginatedResponse } from '../../common/common.utils'; + +export const blogOutSchema = z.object({ + name: z.string(), + description: z.string().optional(), + createdAt: z.date().optional(), + updatedAt: z.date().optional(), +}); + +export const blogSchema = blogOutSchema.extend({ + user: z.any(), +}); + +export const blogsPaginatedSchema = definePaginatedResponse(blogOutSchema); + +export type BlogModelType = z.infer; +export type BlogType = z.infer & { id: string; _id: string }; +export type BlogPaginatedType = z.infer; diff --git a/src/modules/blog/blog.model.ts b/src/modules/blog/blog.model.ts new file mode 100644 index 0000000..e65fc00 --- /dev/null +++ b/src/modules/blog/blog.model.ts @@ -0,0 +1,15 @@ +import mongoose, { type Document, Schema } from 'mongoose'; +import type { BlogModelType, BlogType } from './blog.dto'; + +const BlogSchema: Schema = new Schema( + { + name: { type: String, required: true }, + description: { type: String }, + user: { type: Schema.Types.ObjectId, ref: 'User', required: true }, + }, + { timestamps: true }, +); + +export interface IBlogDocument extends Document, BlogModelType {} +const Blog = mongoose.model('Blog', BlogSchema); +export default Blog; diff --git a/src/modules/blog/blog.router.ts b/src/modules/blog/blog.router.ts new file mode 100644 index 0000000..ffafb5c --- /dev/null +++ b/src/modules/blog/blog.router.ts @@ -0,0 +1,83 @@ +import { mongoIdSchema } from '../../common/common.schema'; +import { canAccess } from '@/middlewares/can-access'; +import MagicRouter from '@/plugins/magic/router'; +import { + handleCreateBlog, + handleDeleteBlog, + handleGetBlogById, + handleGetBlogs, + handleUpdateBlog, +} from './blog.controller'; +import { + createBlogSchema, + getBlogsSchema, + updateBlogSchema, + createBlogResponseSchema, + getBlogsResponseSchema, + getBlogByIdResponseSchema, + updateBlogResponseSchema, +} from './blog.schema'; + +export const BLOG_ROUTER_ROOT = '/blogs'; + +const blogRouter = new MagicRouter(BLOG_ROUTER_ROOT); + +// List blogs with pagination (using new response system) +blogRouter.get( + '/', + { + requestType: { query: getBlogsSchema }, + responses: { + 200: getBlogsResponseSchema, + }, + }, + canAccess(), + handleGetBlogs, +); + +// Create blog (using new response system) +blogRouter.post( + '/', + { + requestType: { body: createBlogSchema }, + responses: { + 201: createBlogResponseSchema, + }, + }, + canAccess(), + handleCreateBlog, +); + +// Get blog by ID (using new response system) +blogRouter.get( + '/:id', + { + requestType: { params: mongoIdSchema }, + responses: { + 200: getBlogByIdResponseSchema, + }, + }, + canAccess(), + handleGetBlogById, +); + +// Update blog (using new response system) +blogRouter.patch( + '/:id', + { + requestType: { + params: mongoIdSchema, + body: updateBlogSchema, + }, + responses: { + 200: updateBlogResponseSchema, + }, + }, + canAccess(), + handleUpdateBlog, +); + +// Delete blog (keeping legacy pattern for comparison) +blogRouter.delete('/:id', {}, canAccess(), handleDeleteBlog); + +export default blogRouter.getRouter(); diff --git a/src/modules/blog/blog.schema.ts b/src/modules/blog/blog.schema.ts new file mode 100644 index 0000000..9b0b09e --- /dev/null +++ b/src/modules/blog/blog.schema.ts @@ -0,0 +1,51 @@ +import { z } from 'zod'; +import { R } from '@/plugins/magic/response.builders'; +import { blogOutSchema } from './blog.dto'; + +export const createBlogSchema = z.object({ + name: z.string({ required_error: 'Name is required' }).min(1), + description: z.string().optional(), +}); + +export const updateBlogSchema = z.object({ + name: z.string().min(1).optional(), + description: z.string().optional(), +}); + +export const getBlogsSchema = z.object({ + searchString: z.string().optional(), + limitParam: z + .string() + .default('10') + .refine( + (value) => !Number.isNaN(Number(value)) && Number(value) >= 0, + 'Input must be positive integer', + ) + .transform(Number), + pageParam: z + .string() + .default('1') + .refine( + (value) => !Number.isNaN(Number(value)) && Number(value) >= 0, + 'Input must be positive integer', + ) + .transform(Number), +}); + +export type CreateBlogSchemaType = z.infer; +export type UpdateBlogSchemaType = z.infer; +export type GetBlogsSchemaType = z.infer; + +// Response schemas +export const createBlogResponseSchema = R.success(blogOutSchema); +export const getBlogsResponseSchema = R.paginated(blogOutSchema); +export const getBlogByIdResponseSchema = R.success(blogOutSchema); +export const updateBlogResponseSchema = R.success(blogOutSchema); + +// Response types +export type CreateBlogResponseSchema = z.infer; +export type GetBlogsResponseSchema = z.infer; +export type GetBlogByIdResponseSchema = z.infer< + typeof getBlogByIdResponseSchema +>; +export type UpdateBlogResponseSchema = z.infer; diff --git a/src/modules/blog/blog.services.ts b/src/modules/blog/blog.services.ts new file mode 100644 index 0000000..aaea30e --- /dev/null +++ b/src/modules/blog/blog.services.ts @@ -0,0 +1,78 @@ +import type { FilterQuery } from "mongoose"; +import type { MongoIdSchemaType } from "@/common/common.schema"; +import { getPaginator } from "@/utils/pagination.utils"; +import type { BlogType } from "./blog.dto"; +import Blog, { type IBlogDocument } from "./blog.model"; +import type { CreateBlogSchemaType, GetBlogsSchemaType, UpdateBlogSchemaType } from "./blog.schema"; + +export const createBlog = async ( + payload: CreateBlogSchemaType, +): Promise => { + const createdBlog = await Blog.create(payload); + return createdBlog.toObject(); +}; + +export const getBlogById = async (blogId: string): Promise => { + const blog = await Blog.findById(blogId); + + if (!blog) { + throw new Error("Blog not found"); + } + + return blog.toObject(); +}; + +export const updateBlog = async ( + blogId: string, + payload: UpdateBlogSchemaType, +): Promise => { + const blog = await Blog.findByIdAndUpdate( + blogId, + { $set: payload }, + { new: true }, + ); + + if (!blog) { + throw new Error("Blog not found"); + } + + return blog.toObject(); +}; + +export const deleteBlog = async (blogId: MongoIdSchemaType): Promise => { + const blog = await Blog.findByIdAndDelete(blogId.id); + + if (!blog) { + throw new Error("Blog not found"); + } +}; + +export const getBlogs = async ( + payload: GetBlogsSchemaType, +) => { + const conditions: FilterQuery = {}; + + if (payload.searchString) { + conditions.$or = [ + { name: { $regex: payload.searchString, $options: "i" } }, + { description: { $regex: payload.searchString, $options: "i" } }, + ]; + } + + const totalRecords = await Blog.countDocuments(conditions); + const paginatorInfo = getPaginator( + payload.limitParam, + payload.pageParam, + totalRecords, + ); + + const results = await Blog.find(conditions) + .limit(paginatorInfo.limit) + .skip(paginatorInfo.skip) + .exec(); + + return { + results, + paginatorInfo, + }; +}; diff --git a/src/modules/blog/factories/blog.factory.ts b/src/modules/blog/factories/blog.factory.ts new file mode 100644 index 0000000..3efd5c4 --- /dev/null +++ b/src/modules/blog/factories/blog.factory.ts @@ -0,0 +1,29 @@ +import { Types } from 'mongoose'; +import type { BlogModelType, BlogType } from '../blog.dto'; +import { createBlog } from '../blog.services'; + +type Overrides = Partial & Record; + +export const blogFactory = { + build(i = 1, overrides: Overrides = {}): BlogModelType { + return { + name: `Name ${i}`, + description: `Description ${i}`, + user: new Types.ObjectId() as any, + _id: new Types.ObjectId() as any + , ...overrides + } as unknown as BlogModelType; + }, + + async create(i = 1, overrides: Overrides = {}): Promise { + const payload = this.build(i, overrides); + // Prefer service function when available + return await createBlog(payload as any); + }, + + async createMany(count: number, overrides: Overrides = {}): Promise { + const out: BlogType[] = []; + for (let i = 1; i <= count; i += 1) out.push(await this.create(i, overrides)); + return out; + }, +}; \ No newline at end of file diff --git a/src/modules/blog/seeders/BlogSeeder.ts b/src/modules/blog/seeders/BlogSeeder.ts new file mode 100644 index 0000000..cfa362e --- /dev/null +++ b/src/modules/blog/seeders/BlogSeeder.ts @@ -0,0 +1,24 @@ +import type { Seeder } from '@/seeders/types'; +import Blog from '../blog.model'; +import { blogFactory } from '../factories/blog.factory'; + +export const BlogSeeder: Seeder = { + name: 'BlogSeeder', + groups: ['base','dev','test'], + dependsOn: ["UserSeeder"], + collections: [Blog.collection.collectionName], + async run(ctx) { + if (ctx.env.group === 'dev' || ctx.env.group === 'test') { + const existing = await Blog.countDocuments({ name: { $regex: /^Name \d+$/ } }); + if (existing === 0) { + const docs = await blogFactory.createMany(5, { + user: (ctx.refs.has('user:seeded') ? ctx.refs.get('user:seeded')[0] : undefined) as any + }); + ctx.refs.set('blog:seeded', docs.map((d: any) => String(d._id))); + } else { + const ids = (await Blog.find({}).select('_id').lean()).map((d: any) => String(d._id)); + ctx.refs.set('blog:seeded', ids); + } + } + }, +}; \ No newline at end of file diff --git a/src/modules/healthcheck/healthcheck.controller.ts b/src/modules/healthcheck/healthcheck.controller.ts new file mode 100644 index 0000000..88ff5a8 --- /dev/null +++ b/src/modules/healthcheck/healthcheck.controller.ts @@ -0,0 +1,33 @@ +import type { Request } from 'express'; +import { StatusCodes } from '@/plugins/magic/status-codes'; +import type { ResponseExtended } from '@/types'; +import { errorResponse } from '@/utils/response.utils'; + +// Healthcheck uses raw response (not the standard envelope) +export const handleHealthCheck = async ( + _: Request, + res: ResponseExtended, +): Promise => { + const healthCheck = { + uptime: process.uptime(), + responseTime: process.hrtime(), + message: 'OK', + timestamp: Date.now(), + }; + + try { + // Direct JSON response for healthcheck (no envelope) + res.status(StatusCodes.OK).json(healthCheck); + return; + } catch (error) { + healthCheck.message = (error as Error).message; + + errorResponse( + res, + (error as Error).message, + StatusCodes.SERVICE_UNAVAILABLE, + healthCheck, + ); + return; + } +}; diff --git a/src/modules/healthcheck/healthcheck.routes.ts b/src/modules/healthcheck/healthcheck.routes.ts new file mode 100644 index 0000000..90cb407 --- /dev/null +++ b/src/modules/healthcheck/healthcheck.routes.ts @@ -0,0 +1,28 @@ +import { z } from 'zod'; +import MagicRouter from '@/plugins/magic/router'; +import { R } from '@/plugins/magic/response.builders'; +import { handleHealthCheck } from './healthcheck.controller'; + +export const HEALTH_ROUTER_ROOT = '/healthcheck'; + +const healthCheckRouter = new MagicRouter(HEALTH_ROUTER_ROOT); + +// Healthcheck endpoint +healthCheckRouter.get( + '/', + { + responses: { + 200: R.raw( + z.object({ + uptime: z.number(), + responseTime: z.tuple([z.number(), z.number()]), + message: z.string(), + timestamp: z.number(), + }), + ), + }, + }, + handleHealthCheck, +); + +export default healthCheckRouter.getRouter(); diff --git a/src/modules/upload/upload.controller.ts b/src/modules/upload/upload.controller.ts new file mode 100644 index 0000000..93ecca8 --- /dev/null +++ b/src/modules/upload/upload.controller.ts @@ -0,0 +1,41 @@ +import type { Request } from 'express'; +import { uploadFile } from '@/lib/storage'; +import type { ResponseExtended } from '@/types'; +import { errorResponse } from '@/utils/response.utils'; +import type { UserType } from '../user/user.dto'; +import { updateUser } from '../user/user.services'; +import { UploadSchema, UploadResponseSchema } from './upload.schema'; + +export const handleProfileUpload = async ( + req: Request, + res: ResponseExtended, +) => { + try { + const avatar = req.body.avatar; + const multipleFiles = req.body.multipleFiles; + const currentUser = req.user as unknown as UserType; + + if (!avatar) { + return errorResponse(res, 'File not uploaded, Please try again'); + } + + // Upload to S3 + const key = `user-${currentUser._id}/profile/${avatar.originalFilename}`; + const { url } = await uploadFile({ file: avatar, key }); + + // Update user profile + await updateUser(String(currentUser._id), { + avatar: url, + }); + return res.created?.({ + success: true, + message: 'File uploaded successfully', + data: { + key: avatar, + multipleFiles, + }, + }); + } catch (err) { + return errorResponse(res, (err as Error).message); + } +}; diff --git a/src/modules/upload/upload.router.ts b/src/modules/upload/upload.router.ts new file mode 100644 index 0000000..855b732 --- /dev/null +++ b/src/modules/upload/upload.router.ts @@ -0,0 +1,25 @@ +import { canAccess } from '@/middlewares/can-access'; +import MagicRouter from '@/plugins/magic/router'; +import { handleProfileUpload } from './upload.controller'; +import { uploadResponseSchema, uploadSchema } from './upload.schema'; + +export const UPLOAD_ROUTER_ROOT = '/upload'; + +const uploadRouter = new MagicRouter(UPLOAD_ROUTER_ROOT); + +// Upload profile picture +uploadRouter.post( + '/profile', + { + requestType: { body: uploadSchema }, + contentType: 'multipart/form-data', + multipart: true, + responses: { + "201": uploadResponseSchema, + }, + }, + canAccess(), + handleProfileUpload, +); + +export default uploadRouter.getRouter(); diff --git a/src/modules/upload/upload.schema.ts b/src/modules/upload/upload.schema.ts new file mode 100644 index 0000000..263c695 --- /dev/null +++ b/src/modules/upload/upload.schema.ts @@ -0,0 +1,24 @@ +import { z } from 'zod'; +import { zFile, zFiles, MIME_GROUPS } from '@/plugins/magic/zod-extend'; +import { R } from '@/plugins/magic/response.builders'; + +export const uploadSchema = z.object({ + avatar: zFile({ + maxSize: 5 * 1024 * 1024, // 5MB + allowedTypes: MIME_GROUPS.IMAGES, + }), + multipleFiles: zFiles({ + maxSize: 2 * 1024 * 1024, // 2MB per file + allowedTypes: MIME_GROUPS.IMAGES, + }).optional(), +}); + +export const uploadResponseSchema = R.success( + z.object({ + key: zFile(), + multipleFiles: zFiles().optional(), + }), +); + +export type UploadSchema = z.infer; +export type UploadResponseSchema = z.infer; diff --git a/src/modules/user/factories/user.factory.ts b/src/modules/user/factories/user.factory.ts new file mode 100644 index 0000000..18b29fe --- /dev/null +++ b/src/modules/user/factories/user.factory.ts @@ -0,0 +1,35 @@ +import { ROLE_ENUM } from '../../../enums'; +import type { UserModelType, UserType } from '../user.dto'; +import { createUser } from '../user.services'; + +type Overrides = Partial & { password?: string }; + +const base = (i = 1): UserModelType & { password: string } => ({ + email: `user${i}@example.com`, + username: `user${i}`, + name: `User ${i}`, + role: ROLE_ENUM.DEFAULT_USER, + password: 'password123', +}); + +export const userFactory = { + build(i = 1, overrides: Overrides = {}): UserModelType & { password: string } { + return { ...base(i), ...overrides } as UserModelType & { password: string }; + }, + + async create(i = 1, overrides: Overrides = {}): Promise { + const payload = this.build(i, overrides); + return createUser(payload); + }, + + async createMany(count: number, overrides: Overrides = {}): Promise { + const result: UserType[] = []; + for (let i = 1; i <= count; i += 1) { + + const u = await this.create(i, overrides); + result.push(u); + } + return result; + }, +}; + diff --git a/src/modules/user/seeders/UserSeeder.ts b/src/modules/user/seeders/UserSeeder.ts new file mode 100644 index 0000000..f1ef5a0 --- /dev/null +++ b/src/modules/user/seeders/UserSeeder.ts @@ -0,0 +1,56 @@ +import type { Seeder } from '../../../seeders/types'; +import User from '../user.model'; +import { userFactory } from '../factories/user.factory'; +import config from '../../../config/env'; + +export const UserSeeder: Seeder = { + name: 'UserSeeder', + groups: ['base', 'dev', 'test'], + dependsOn: [], + collections: ['users'], + async run(ctx) { + // Ensure admin user (idempotent by email) + const adminEmail = config.ADMIN_EMAIL; + const adminPassword = config.ADMIN_PASSWORD; + + const existingAdmin = await User.findOne({ email: adminEmail }); + if (!existingAdmin) { + const admin = await userFactory.create(0, { + email: adminEmail, + username: 'admin', + name: 'Admin', + password: adminPassword, + role: 'SUPER_ADMIN', + }); + ctx.refs.set('user.admin.id', String(admin._id)); + } else { + ctx.refs.set('user.admin.id', String(existingAdmin._id)); + } + + // Dev fixtures + if (ctx.env.group === 'dev') { + const count = await User.countDocuments({ + email: { $regex: /^user\d+@example\.com$/ }, + }); + if (count === 0) { + const users = await userFactory.createMany(5); + // Store user IDs for other seeders to use + ctx.refs.set( + 'user:seeded', + users.map((u) => String(u._id)), + ); + } else { + // If users already exist, fetch and store their IDs + const existingUsers = await User.find({ + email: { $regex: /^user\d+@example\.com$/ }, + }) + .select('_id') + .lean(); + ctx.refs.set( + 'user:seeded', + existingUsers.map((u) => String(u._id)), + ); + } + } + }, +}; diff --git a/src/modules/user/user.controller.ts b/src/modules/user/user.controller.ts index 693ecea..9879d40 100644 --- a/src/modules/user/user.controller.ts +++ b/src/modules/user/user.controller.ts @@ -1,74 +1,86 @@ -import type { Request, Response } from "express"; -import { StatusCodes } from "http-status-codes"; -import type { MongoIdSchemaType } from "../../common/common.schema"; -import config from "../../config/config.service"; -import { successResponse } from "../../utils/api.utils"; -import { generateRandomPassword } from "../../utils/auth.utils"; -import type { CreateUserSchemaType, GetUsersSchemaType } from "./user.schema"; -import { createUser, deleteUser, getUsers } from "./user.services"; +import type { Request } from 'express'; +import type { MongoIdSchemaType } from '../../common/common.schema'; +import config from '../../config/env'; +import type { ResponseExtended } from '../../types'; +import { successResponse } from '../../utils/response.utils'; +import { generateRandomPassword } from '../../utils/otp.utils'; +import type { + CreateUserSchemaType, + GetUsersSchemaType, + CreateUserResponseSchema, + GetUsersResponseSchema, + CreateSuperAdminResponseSchema, +} from './user.schema'; +import { createUser, deleteUser, getUsers } from './user.services'; export const handleDeleteUser = async ( - req: Request, - res: Response, + req: Request, + res: ResponseExtended, ) => { - await deleteUser({ id: req.params.id }); + await deleteUser({ id: req.params.id }); - return successResponse(res, "User has been deleted"); + return successResponse(res, 'User has been deleted'); }; +// Using new res.created() helper export const handleCreateUser = async ( - req: Request, - res: Response, + req: Request, + res: ResponseExtended, ) => { - const data = req.body; + const data = req.body; - const user = await createUser({ - ...data, - password: generateRandomPassword(), - role: "DEFAULT_USER", - }); + const user = await createUser({ + ...data, + password: generateRandomPassword(), + role: 'DEFAULT_USER', + }); - return successResponse( - res, - "Email has been sent to the user", - user, - StatusCodes.CREATED, - ); + return res.created?.({ + success: true, + message: 'Email has been sent to the user', + data: user, + }); }; +// Using new res.created() helper export const handleCreateSuperAdmin = async ( - _: Request, - res: Response, + _: Request, + res: ResponseExtended, ) => { - - const user = await createUser({ - email: config.ADMIN_EMAIL, - name: "Super Admin", - username: "super_admin", - password: config.ADMIN_PASSWORD, - role: "SUPER_ADMIN", - phoneNo: "123456789", - otp: null, - }); + const user = await createUser({ + email: config.ADMIN_EMAIL, + name: 'Super Admin', + username: 'super_admin', + password: config.ADMIN_PASSWORD, + role: 'SUPER_ADMIN', + phoneNo: '123456789', + otp: null, + }); - return successResponse( - res, - "Super Admin has been created", - { email: user.email, password: config.ADMIN_PASSWORD }, - StatusCodes.CREATED, - ); + return res.created?.({ + success: true, + message: 'Super Admin has been created', + data: { email: user.email, password: config.ADMIN_PASSWORD }, + }); }; +// Using new res.ok() helper with paginated response export const handleGetUsers = async ( - req: Request, - res: Response, + req: Request, + res: ResponseExtended, ) => { - const { results, paginatorInfo } = await getUsers( - { - id: req.user.sub, - }, - req.query, - ); + const { results, paginatorInfo } = await getUsers( + { + id: req.user.sub, + }, + req.query, + ); - return successResponse(res, undefined, { results, paginatorInfo }); + return res.ok?.({ + success: true, + data: { + items: results, + paginator: paginatorInfo, + }, + }); }; diff --git a/src/modules/user/user.dto.ts b/src/modules/user/user.dto.ts index a473401..959aba2 100644 --- a/src/modules/user/user.dto.ts +++ b/src/modules/user/user.dto.ts @@ -1,42 +1,43 @@ -import z from "zod"; -import { definePaginatedResponse } from "../../common/common.utils"; +import z from 'zod'; +import { definePaginatedResponse } from '../../common/common.utils'; import { - ROLE_ENUM, - type RoleType, - SOCIAL_ACCOUNT_ENUM, - type SocialAccountType, -} from "../../enums"; + ROLE_ENUM, + type RoleType, + SOCIAL_ACCOUNT_ENUM, + type SocialAccountType, +} from '../../enums'; export const SocialAccountTypeZ = z.enum( - Object.keys(SOCIAL_ACCOUNT_ENUM) as [SocialAccountType], + Object.keys(SOCIAL_ACCOUNT_ENUM) as [SocialAccountType], ); export const RoleTypeZ = z.enum(Object.keys(ROLE_ENUM) as [RoleType]); export const socialAccountInfoSchema = z.object({ - accountType: SocialAccountTypeZ, - accessToken: z.string(), - tokenExpiry: z.date(), - refreshToken: z.string().optional(), - accountID: z.string(), + accountType: SocialAccountTypeZ, + accessToken: z.string(), + tokenExpiry: z.date(), + refreshToken: z.string().optional(), + accountID: z.string(), }); export const userOutSchema = z.object({ - email: z.string().email(), - avatar: z.string().url().optional(), - name: z.string().optional(), - username: z.string(), - role: RoleTypeZ, - phoneNo: z.string().optional(), - socialAccount: z.array(socialAccountInfoSchema).optional(), - updatedAt: z.date().optional(), - createdAt: z.date().optional(), + sub: z.string().optional(), + email: z.string().email(), + avatar: z.string().url().optional(), + name: z.string().optional(), + username: z.string(), + role: RoleTypeZ, + phoneNo: z.string().optional(), + socialAccount: z.array(socialAccountInfoSchema).optional(), + updatedAt: z.date().optional(), + createdAt: z.date().optional(), }); export const userSchema = userOutSchema.extend({ - otp: z.string().nullable().optional(), - password: z.string(), - passwordResetCode: z.string().optional().nullable(), + otp: z.string().nullable().optional(), + password: z.string(), + passwordResetCode: z.string().optional().nullable(), }); export const usersPaginatedSchema = definePaginatedResponse(userOutSchema); diff --git a/src/modules/user/user.model.ts b/src/modules/user/user.model.ts index 7aaf23f..6a14831 100644 --- a/src/modules/user/user.model.ts +++ b/src/modules/user/user.model.ts @@ -20,6 +20,7 @@ const SocialAccountSchema = new Schema({ const UserSchema: Schema = new Schema( { + _id: { type: String, required: true }, email: { type: String, unique: true, required: true }, avatar: { type: String }, username: { type: String, required: true, unique: true }, @@ -41,6 +42,8 @@ const UserSchema: Schema = new Schema( export interface ISocialAccountDocument extends SocialAccountInfoType, Document {} + export interface IUserDocument extends Document, UserModelType {} + const User = mongoose.model("User", UserSchema); export default User; diff --git a/src/modules/user/user.router.ts b/src/modules/user/user.router.ts index 6863d99..49b127e 100644 --- a/src/modules/user/user.router.ts +++ b/src/modules/user/user.router.ts @@ -1,32 +1,57 @@ -import { canAccess } from "../../middlewares/can-access.middleware"; -import MagicRouter from "../../openapi/magic-router"; +import { canAccess } from '@/middlewares/can-access'; +import MagicRouter from '@/plugins/magic/router'; import { - handleCreateSuperAdmin, - handleCreateUser, - handleGetUsers, -} from "./user.controller"; -import { createUserSchema, getUsersSchema } from "./user.schema"; + handleCreateSuperAdmin, + handleCreateUser, + handleGetUsers, +} from './user.controller'; +import { + createUserSchema, + getUsersSchema, + createUserResponseSchema, + getUsersResponseSchema, + createSuperAdminResponseSchema, +} from './user.schema'; -export const USER_ROUTER_ROOT = "/users"; +export const USER_ROUTER_ROOT = '/users'; const userRouter = new MagicRouter(USER_ROUTER_ROOT); +// List users with pagination userRouter.get( - "/", - { - requestType: { query: getUsersSchema }, - }, - canAccess(), - handleGetUsers, + '/', + { + requestType: { query: getUsersSchema }, + responses: { + 200: getUsersResponseSchema, + }, + }, + canAccess(), + handleGetUsers, ); +// Create user (admin only) userRouter.post( - "/user", - { requestType: { body: createUserSchema } }, - canAccess("roles", ["SUPER_ADMIN"]), - handleCreateUser, + '/user', + { + requestType: { body: createUserSchema }, + responses: { + 201: createUserResponseSchema, + }, + }, + canAccess('roles', ['SUPER_ADMIN']), + handleCreateUser, ); -userRouter.post("/_super-admin", {}, handleCreateSuperAdmin); +// Create super admin (initial setup) +userRouter.post( + '/_super-admin', + { + responses: { + 201: createSuperAdminResponseSchema, + }, + }, + handleCreateSuperAdmin, +); export default userRouter.getRouter(); diff --git a/src/modules/user/user.schema.ts b/src/modules/user/user.schema.ts index 7b3c7bb..8615971 100644 --- a/src/modules/user/user.schema.ts +++ b/src/modules/user/user.schema.ts @@ -1,45 +1,68 @@ -import * as z from "zod"; -import { passwordValidationSchema } from "../../common/common.schema"; -import { ROLE_ENUM, type RoleType } from "../../enums"; -import { isValidUsername } from "../../utils/isUsername"; +import * as z from 'zod'; +import { passwordValidationSchema } from '@/common/common.schema'; +import { ROLE_ENUM, type RoleType } from '@/enums'; +import { R } from '@/plugins/magic/response.builders'; +import { userOutSchema } from './user.dto'; + +const usernameRegex = /^[a-zA-Z0-9_]{3,16}$/; + +export const isValidUsername = (username: string) => + usernameRegex.test(username); export const baseCreateUser = z.object({ - email: z - .string({ required_error: "Email is required" }) - .email({ message: "Email is not valid" }), - password: passwordValidationSchema("Password"), - username: z - .string({ required_error: "Username is required" }) - .min(1) - .refine((value) => isValidUsername(value), "Username must be valid"), + email: z + .string({ required_error: 'Email is required' }) + .email({ message: 'Email is not valid' }), + password: passwordValidationSchema('Password'), + username: z + .string({ required_error: 'Username is required' }) + .min(1) + .refine((value) => isValidUsername(value), 'Username must be valid'), }); export const createUserSchema = z - .object({ - name: z.string({ required_error: "First name is required" }).min(1), - }) - .merge(baseCreateUser); + .object({ + name: z.string({ required_error: 'First name is required' }).min(1), + }) + .merge(baseCreateUser); export const getUsersSchema = z.object({ - searchString: z.string().optional(), - limitParam: z - .string() - .default("10") - .refine( - (value) => !Number.isNaN(Number(value)) && Number(value) >= 0, - "Input must be positive integer", - ) - .transform(Number), - pageParam: z - .string() - .default("1") - .refine( - (value) => !Number.isNaN(Number(value)) && Number(value) >= 0, - "Input must be positive integer", - ) - .transform(Number), - filterByRole: z.enum(Object.keys(ROLE_ENUM) as [RoleType]).optional(), + searchString: z.string().optional(), + limitParam: z + .string() + .default('10') + .refine( + (value) => !Number.isNaN(Number(value)) && Number(value) >= 0, + 'Input must be positive integer', + ) + .transform(Number), + pageParam: z + .string() + .default('1') + .refine( + (value) => !Number.isNaN(Number(value)) && Number(value) >= 0, + 'Input must be positive integer', + ) + .transform(Number), + filterByRole: z.enum(Object.keys(ROLE_ENUM) as [RoleType]).optional(), }); export type CreateUserSchemaType = z.infer; export type GetUsersSchemaType = z.infer; + +// Response schemas +export const createUserResponseSchema = R.success(userOutSchema); +export const getUsersResponseSchema = R.paginated(userOutSchema); +export const createSuperAdminResponseSchema = R.success( + z.object({ + email: z.string().email(), + password: z.string(), + }), +); + +// Response types +export type CreateUserResponseSchema = z.infer; +export type GetUsersResponseSchema = z.infer; +export type CreateSuperAdminResponseSchema = z.infer< + typeof createSuperAdminResponseSchema +>; diff --git a/src/modules/user/user.services.ts b/src/modules/user/user.services.ts index dc07f11..735e8e0 100644 --- a/src/modules/user/user.services.ts +++ b/src/modules/user/user.services.ts @@ -1,7 +1,7 @@ import type { FilterQuery } from "mongoose"; import type { MongoIdSchemaType } from "../../common/common.schema"; -import { hashPassword } from "../../utils/auth.utils"; -import { getPaginator } from "../../utils/getPaginator"; +import { hashPassword } from "../../utils/password.utils"; +import { getPaginator } from "../../utils/pagination.utils"; import type { UserModelType, UserType } from "./user.dto"; import User, { type IUserDocument } from "./user.model"; import type { GetUsersSchemaType } from "./user.schema"; diff --git a/src/openapi/magic-router.ts b/src/openapi/magic-router.ts deleted file mode 100644 index 2457ecf..0000000 --- a/src/openapi/magic-router.ts +++ /dev/null @@ -1,291 +0,0 @@ -import { - type NextFunction, - type Request, - type Response, - Router, -} from 'express'; -import asyncHandler from 'express-async-handler'; -import type { ZodTypeAny } from 'zod'; -import { - errorResponseSchema, - successResponseSchema, -} from '../common/common.schema'; -import { canAccess } from '../middlewares/can-access.middleware'; -import { validateZodSchema } from '../middlewares/validate-zod-schema.middleware'; -import type { - RequestExtended, - RequestZodSchemaType, - ResponseExtended, -} from '../types'; -import responseInterceptor from '../utils/responseInterceptor'; -import { - camelCaseToTitleCase, - parseRouteString, - routeToClassName, -} from './openapi.utils'; -import { bearerAuth, registry } from './swagger-instance'; - -type Method = - | 'get' - | 'post' - | 'put' - | 'delete' - | 'patch' - | 'head' - | 'options' - | 'trace'; - -// biome-ignore lint/suspicious/noExplicitAny: -export type IDontKnow = unknown | never | any; -export type MaybePromise = void | Promise; -export type RequestAny = Request; -export type ResponseAny = Response>; -export type MagicPathType = `/${string}`; -export type MagicRoutePType = PathSet extends true - ? [reqAndRes: RequestAndResponseType, ...handlers: MagicMiddleware[]] - : [ - path: MagicPathType, - reqAndRes: RequestAndResponseType, - ...handlers: MagicMiddleware[], - ]; -export type MagicRouteRType = Omit< - MagicRouter, - 'route' | 'getRouter' | 'use' ->; -export type MagicMiddleware = ( - req: RequestAny, - res: ResponseAny, - next?: NextFunction, -) => MaybePromise; - -export type RequestAndResponseType = { - requestType?: RequestZodSchemaType; - responseModel?: ZodTypeAny; - contentType?: - | 'application/json' - | 'multipart/form-data' - | 'application/x-www-form-urlencoded'; -}; - -export class MagicRouter { - private router: Router; - private rootRoute: string; - private currentPath?: MagicPathType; - - constructor(rootRoute: string, currentPath?: MagicPathType) { - this.router = Router(); - this.rootRoute = rootRoute; - this.currentPath = currentPath; - } - - private getPath(path: string) { - return this.rootRoute + parseRouteString(path); - } - - private wrapper( - method: Method, - path: MagicPathType, - requestAndResponseType: RequestAndResponseType, - ...middlewares: Array - ): void { - const bodyType = requestAndResponseType.requestType?.body; - const paramsType = requestAndResponseType.requestType?.params; - const queryType = requestAndResponseType.requestType?.query; - const responseType = - requestAndResponseType.responseModel ?? successResponseSchema; - - const className = routeToClassName(this.rootRoute); - const title = camelCaseToTitleCase( - middlewares[middlewares.length - 1]?.name, - ); - - const bodySchema = bodyType - ? registry.register(`${title} Input`, bodyType) - : null; - - const hasSecurity = middlewares.some((m) => m.name === canAccess().name); - - const attachResponseModelMiddleware = ( - _: RequestAny, - res: ResponseAny, - next: NextFunction, - ) => { - res.locals.validateSchema = requestAndResponseType.responseModel; - next(); - }; - - const contentType = - requestAndResponseType.contentType ?? 'application/json'; - - registry.registerPath({ - method: method, - tags: [className], - path: this.getPath(path), - security: hasSecurity ? [{ [bearerAuth.name]: ['bearer'] }] : [], - description: title, - summary: title, - request: { - params: paramsType, - query: queryType, - ...(bodySchema - ? { - body: { - content: { - [contentType]: { - schema: bodySchema, - }, - }, - }, - } - : {}), - }, - responses: { - 200: { - description: '', - content: { - 'application/json': { - schema: responseType, - }, - }, - }, - 400: { - description: 'API Error Response', - content: { - 'application/json': { - schema: errorResponseSchema, - }, - }, - }, - 404: { - description: 'API Error Response', - content: { - 'application/json': { - schema: errorResponseSchema, - }, - }, - }, - 500: { - description: 'API Error Response', - content: { - 'application/json': { - schema: errorResponseSchema, - }, - }, - }, - }, - }); - - const requestType = requestAndResponseType.requestType ?? {}; - - const controller = asyncHandler(middlewares[middlewares.length - 1]); - - const responseInterceptorWrapper = ( - req: RequestAny | RequestExtended, - res: ResponseAny | ResponseExtended, - next: NextFunction, - ) => { - return responseInterceptor( - req as RequestExtended, - res as ResponseExtended, - next, - ); - }; - - middlewares.pop(); - - if (Object.keys(requestType).length) { - this.router[method]( - path, - attachResponseModelMiddleware, - responseInterceptorWrapper, - validateZodSchema(requestType), - ...middlewares, - controller, - ); - } else { - this.router[method]( - path, - attachResponseModelMiddleware, - ...middlewares, - responseInterceptorWrapper, - controller, - ); - } - } - - public get(...args: MagicRoutePType): MagicRouteRType { - return this.routeHandler('get', ...args); - } - - public post(...args: MagicRoutePType): MagicRouteRType { - return this.routeHandler('post', ...args); - } - - public delete(...args: MagicRoutePType): MagicRouteRType { - return this.routeHandler('delete', ...args); - } - - public patch(...args: MagicRoutePType): MagicRouteRType { - return this.routeHandler('patch', ...args); - } - - public put(...args: MagicRoutePType): MagicRouteRType { - return this.routeHandler('put', ...args); - } - - public use(...args: Parameters): void { - this.router.use(...args); - } - - public route(path: MagicPathType): MagicRouteRType { - // Create a proxy object that will use the same router instance - const proxy = { - get: (...args: [RequestAndResponseType, ...MagicMiddleware[]]) => { - this.wrapper('get', path, ...args); - return proxy; - }, - post: (...args: [RequestAndResponseType, ...MagicMiddleware[]]) => { - this.wrapper('post', path, ...args); - return proxy; - }, - put: (...args: [RequestAndResponseType, ...MagicMiddleware[]]) => { - this.wrapper('put', path, ...args); - return proxy; - }, - delete: (...args: [RequestAndResponseType, ...MagicMiddleware[]]) => { - this.wrapper('delete', path, ...args); - return proxy; - }, - patch: (...args: [RequestAndResponseType, ...MagicMiddleware[]]) => { - this.wrapper('patch', path, ...args); - return proxy; - }, - }; - return proxy; - } - - private routeHandler(method: Method, ...args: MagicRoutePType) { - if (this.currentPath) { - const [reqAndRes, ...handlers] = args as [ - RequestAndResponseType, - ...MagicMiddleware[], - ]; - this.wrapper(method, this.currentPath, reqAndRes, ...handlers); - } else { - const [path, reqAndRes, ...handlers] = args as [ - MagicPathType, - RequestAndResponseType, - ...MagicMiddleware[], - ]; - this.wrapper(method, path, reqAndRes, ...handlers); - } - return this; - } - - // Method to get the router instance - public getRouter(): Router { - return this.router; - } -} - -export default MagicRouter; diff --git a/src/openapi/swagger-doc-generator.ts b/src/openapi/swagger-doc-generator.ts deleted file mode 100644 index cfed439..0000000 --- a/src/openapi/swagger-doc-generator.ts +++ /dev/null @@ -1,38 +0,0 @@ -import fs from "node:fs/promises"; -import { OpenApiGeneratorV3 } from "@asteasolutions/zod-to-openapi"; -import * as yaml from "yaml"; - -import type { OpenAPIObject } from "openapi3-ts/oas30"; -import config from "../config/config.service"; -import { registry } from "./swagger-instance"; - -export const getOpenApiDocumentation = (): OpenAPIObject => { - const generator = new OpenApiGeneratorV3(registry.definitions); - - return generator.generateDocument({ - openapi: "3.0.0", - info: { - version: config.APP_VERSION, - title: config.APP_NAME, - description: - "Robust backend boilerplate designed for scalability, flexibility, and ease of development. It's packed with modern technologies and best practices to kickstart your next backend project", - }, - servers: [{ url: "/api" }], - }); -}; - -export const convertDocumentationToYaml = (): string => { - const docs = getOpenApiDocumentation(); - - const fileContent = yaml.stringify(docs); - - return fileContent; -}; - -export const writeDocumentationToDisk = async (): Promise => { - const fileContent = convertDocumentationToYaml(); - - await fs.writeFile(`${__dirname}/openapi-docs.yml`, fileContent, { - encoding: "utf-8", - }); -}; diff --git a/src/openapi/zod-extend.ts b/src/openapi/zod-extend.ts deleted file mode 100644 index 8e29e81..0000000 --- a/src/openapi/zod-extend.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { extendZodWithOpenApi } from '@asteasolutions/zod-to-openapi'; -import { z } from 'zod'; -extendZodWithOpenApi(z); - -/** - * Helper to describe a single file upload field in OpenAPI spec. - * For use with multipart/form-data endpoints. - * @example - * z.object({ avatar: zFile() }) - */ -export const zFile = () => - z.any().openapi({ type: 'string', format: 'binary' }); - -/** - * Helper to describe multiple file upload fields in OpenAPI spec. - * For use with multipart/form-data endpoints. - * @example - * z.object({ images: zFiles() }) - */ -export const zFiles = () => - z - .array(z.any()) - .openapi({ type: 'array', items: { type: 'string', format: 'binary' } }); diff --git a/src/plugins/admin/admin-auth.ts b/src/plugins/admin/admin-auth.ts new file mode 100644 index 0000000..fbaf67d --- /dev/null +++ b/src/plugins/admin/admin-auth.ts @@ -0,0 +1,263 @@ +import crypto from 'crypto'; +import type { Request, Response, NextFunction } from 'express'; +import config from '@/config/env'; +import logger from '@/plugins/observability/logger'; + +// ───────────────────────────────────────────────────────────────────────────── +// Types +// ───────────────────────────────────────────────────────────────────────────── + +interface AdminSessionPayload { + sub: string; // username + iat: number; // issued at (unix timestamp) + exp: number; // expires at (unix timestamp) +} + +// ───────────────────────────────────────────────────────────────────────────── +// Token signing and verification (HMAC-based) +// ───────────────────────────────────────────────────────────────────────────── + +/** + * Sign an admin session token using HMAC-SHA256. + * Format: base64url(JSON payload) + '.' + base64url(HMAC) + */ +export function signAdminSession(username: string): string { + const now = Math.floor(Date.now() / 1000); + const payload: AdminSessionPayload = { + sub: username, + iat: now, + exp: now + config.ADMIN_SESSION_TTL, + }; + + const payloadJson = JSON.stringify(payload); + const payloadB64 = base64UrlEncode(Buffer.from(payloadJson, 'utf8')); + + const hmac = crypto + .createHmac('sha256', config.ADMIN_SESSION_SECRET) + .update(payloadB64) + .digest(); + const signature = base64UrlEncode(hmac); + + return `${payloadB64}.${signature}`; +} + +/** + * Verify an admin session token and return the payload if valid. + * Returns null if invalid or expired. + */ +export function verifyAdminSession(token: string): AdminSessionPayload | null { + try { + const parts = token.split('.'); + if (parts.length !== 2) return null; + + const [payloadB64, signature] = parts; + + // Verify signature + const expectedHmac = crypto + .createHmac('sha256', config.ADMIN_SESSION_SECRET) + .update(payloadB64) + .digest(); + const expectedSignature = base64UrlEncode(expectedHmac); + + if (!timingSafeEqual(signature, expectedSignature)) return null; + + // Decode payload + const payloadJson = Buffer.from(payloadB64, 'base64url').toString('utf8'); + const payload = JSON.parse(payloadJson) as AdminSessionPayload; + + // Check expiration + const now = Math.floor(Date.now() / 1000); + if (payload.exp < now) return null; + + return payload; + } catch { + return null; + } +} + +// ───────────────────────────────────────────────────────────────────────────── +// Cookie helpers +// ───────────────────────────────────────────────────────────────────────────── + +export function setAdminCookie(res: Response, token: string): void { + const isSecure = config.NODE_ENV === 'production' && config.HTTPS_ENABLED; + res.cookie(config.ADMIN_COOKIE_NAME, token, { + httpOnly: true, + secure: isSecure, + sameSite: 'lax', + maxAge: config.ADMIN_SESSION_TTL * 1000, + path: '/', + }); +} + +export function clearAdminCookie(res: Response): void { + res.clearCookie(config.ADMIN_COOKIE_NAME, { path: '/' }); +} + +// ───────────────────────────────────────────────────────────────────────────── +// Credential verification +// ───────────────────────────────────────────────────────────────────────────── + +/** + * Compare provided credentials against environment variables. + * Uses constant-time comparison to prevent timing attacks. + */ +export function compareCredentials( + username: string, + password: string, +): boolean { + const validUsername = config.ADMIN_USERNAME; + const validPassword = config.ADMIN_PANEL_PASSWORD; + + const usernameMatch = timingSafeEqual(username, validUsername); + const passwordMatch = timingSafeEqual(password, validPassword); + + return usernameMatch && passwordMatch; +} + +// ───────────────────────────────────────────────────────────────────────────── +// Middleware guards +// ───────────────────────────────────────────────────────────────────────────── + +/** + * Guard for admin API routes (/admin/api/*). + * Returns 401 JSON if unauthorized. + */ +export function adminAuthGuardApi( + req: Request, + res: Response, + next: NextFunction, +): void { + if (!config.ADMIN_AUTH_ENABLED) { + return next(); + } + + const token = req.cookies?.[config.ADMIN_COOKIE_NAME]; + if (!token) { + res.status(401).json({ error: 'unauthorized' }); + return; + } + + const payload = verifyAdminSession(token); + if (!payload) { + clearAdminCookie(res); + res.status(401).json({ error: 'unauthorized' }); + return; + } + + // Attach admin user to request for logging/audit + (req as any).adminUser = payload.sub; + next(); +} + +/** + * Guard for admin UI routes (/admin). + * Redirects to /admin/login if unauthorized. + */ +export const adminAuthGuardUI = + (adminPath: string = '/admin') => + (req: Request, res: Response, next: NextFunction) => { + if (!config.ADMIN_AUTH_ENABLED) { + return next(); + } + + const token = req.cookies?.[config.ADMIN_COOKIE_NAME]; + if (!token) { + const nextUrl = encodeURIComponent(req.originalUrl); + res.redirect(`${adminPath}/login?next=${nextUrl}`); + return; + } + + const payload = verifyAdminSession(token); + if (!payload) { + clearAdminCookie(res); + const nextUrl = encodeURIComponent(req.originalUrl); + res.redirect(`${adminPath}/login?next=${nextUrl}`); + return; + } + + // Attach admin user to request + (req as any).adminUser = payload.sub; + next(); + }; + +// ───────────────────────────────────────────────────────────────────────────── +// Rate limiting (in-memory, simple) +// ───────────────────────────────────────────────────────────────────────────── + +interface RateLimitEntry { + count: number; + resetAt: number; +} + +const rateLimitStore = new Map(); + +const RATE_LIMIT_WINDOW_MS = 5 * 60 * 1000; // 5 minutes +const RATE_LIMIT_MAX_ATTEMPTS = 10; + +/** + * Simple in-memory rate limiter for admin login. + * Returns true if request should be allowed, false if rate limited. + */ +export function checkAdminLoginRateLimit(identifier: string): boolean { + const now = Date.now(); + const entry = rateLimitStore.get(identifier); + + if (!entry || entry.resetAt < now) { + // New window + rateLimitStore.set(identifier, { + count: 1, + resetAt: now + RATE_LIMIT_WINDOW_MS, + }); + return true; + } + + if (entry.count >= RATE_LIMIT_MAX_ATTEMPTS) { + logger.warn( + { identifier, attempts: entry.count }, + 'Admin login rate limit exceeded', + ); + return false; + } + + entry.count += 1; + return true; +} + +// Cleanup old entries periodically +setInterval(() => { + const now = Date.now(); + for (const [key, entry] of rateLimitStore.entries()) { + if (entry.resetAt < now) { + rateLimitStore.delete(key); + } + } +}, 60 * 1000); // Every minute + +// ───────────────────────────────────────────────────────────────────────────── +// Utilities +// ───────────────────────────────────────────────────────────────────────────── + +function base64UrlEncode(buffer: Buffer): string { + return buffer + .toString('base64') + .replace(/\+/g, '-') + .replace(/\//g, '_') + .replace(/=/g, ''); +} + +/** + * Constant-time string comparison to prevent timing attacks. + */ +function timingSafeEqual(a: string, b: string): boolean { + const bufA = Buffer.from(a, 'utf8'); + const bufB = Buffer.from(b, 'utf8'); + + if (bufA.length !== bufB.length) { + // Still compare to prevent early exit timing leak + crypto.timingSafeEqual(bufA, bufA); + return false; + } + + return crypto.timingSafeEqual(bufA, bufB); +} diff --git a/src/plugins/admin/index.ts b/src/plugins/admin/index.ts new file mode 100644 index 0000000..72e28c4 --- /dev/null +++ b/src/plugins/admin/index.ts @@ -0,0 +1,110 @@ +import type { ToolkitPlugin, PluginFactory } from '@/plugins/types'; +import express from 'express'; +import { + adminAuthGuardApi, + adminAuthGuardUI, + signAdminSession, + setAdminCookie, + clearAdminCookie, + compareCredentials, + checkAdminLoginRateLimit, +} from './admin-auth'; + +import path from 'path'; +import logger from '@/plugins/observability/logger'; + +import { adminApiRouter, registerAdminUI } from './router'; +import cookieParser from 'cookie-parser'; + +export interface AdminDashboardOptions { + adminPath: string; + authGuard: boolean; +} + +export const adminDashboardPlugin: PluginFactory = ( + options, +): ToolkitPlugin => { + const { adminPath, authGuard } = options as AdminDashboardOptions; + + return { + name: 'admin-dashboard', + priority: 50, + options, + + register({ app, port }) { + + app.use(express.json()); + app.use(express.urlencoded({ extended: true })); + app.use(cookieParser()) + + app.get(`/admin/login`, (req, res) => { + const loginPath = path.join( + process.cwd(), + 'public', + 'admin', + 'login.html', + ); + res.sendFile(loginPath); + }); + + // Admin authentication routes + app.post(`/admin/login`, (req, res) => { + const { username, password } = req.body; + const identifier = req.ip || 'unknown'; + + // Rate limiting + if (!checkAdminLoginRateLimit(identifier)) { + logger.warn({ identifier }, 'Admin login rate limit exceeded'); + return res.status(429).json({ error: 'too_many_attempts' }); + } + + // Validate credentials + if (!username || !password || !compareCredentials(username, password)) { + logger.warn( + { username, ip: identifier }, + 'Failed admin login attempt', + ); + return res.status(401).json({ error: 'invalid_credentials' }); + } + + // Create session + const token = signAdminSession(username); + setAdminCookie(res, token); + + logger.info({ username, ip: identifier }, 'Admin login successful'); + + // Redirect or return JSON based on Accept header + const acceptsJson = req.headers.accept?.includes('application/json'); + if (acceptsJson) { + return res.json({ ok: true }); + } + + const next = + typeof req.query.next === 'string' ? req.query.next : '/admin'; + res.redirect(next); + }); + + app.post(`/admin/logout`, (req, res) => { + clearAdminCookie(res); + const acceptsJson = req.headers.accept?.includes('application/json'); + if (acceptsJson) { + return res.json({ ok: true }); + } + + }); + + app.use(`/admin/api`, adminAuthGuardApi, adminApiRouter); + + // Admin dashboard (CRUD) — UI and JSON API (protected) + registerAdminUI( + app, + adminPath, + authGuard ? adminAuthGuardUI(adminPath) : undefined, + ); + + return [`http://localhost:${port}${adminPath}`]; + }, + }; +}; + +export default adminDashboardPlugin; diff --git a/src/plugins/admin/registry.ts b/src/plugins/admin/registry.ts new file mode 100644 index 0000000..acc30fe --- /dev/null +++ b/src/plugins/admin/registry.ts @@ -0,0 +1,50 @@ +import User from '@/modules/user/user.model'; +import { SessionModel } from '@/modules/auth/session/session.model'; +import Blog from '@/modules/blog/blog.model'; +import type { AdminResource } from './types'; + +export const adminResources: AdminResource[] = [ + { + name: 'users', + label: 'Users', + model: User, + readOnlyFields: ['_id', 'createdAt', 'updatedAt', 'password'], + fileFields: ['avatar'], + displayField: 'email', + }, + { + name: 'sessions', + label: 'Sessions', + model: SessionModel, + readOnlyFields: ['_id', 'createdAt', 'updatedAt'], + displayField: 'tokenHash', + }, + { + name: 'blogs', + label: 'Blogs', + model: Blog, + readOnlyFields: ['_id', 'createdAt', 'updatedAt'], + displayField: 'name', + }, +]; + +export function getResource(name: string): AdminResource | undefined { + return adminResources.find((r) => r.name === name); +} + +// Helper: map mongoose modelName -> admin resource +const modelNameToResource = new Map(); +for (const res of adminResources) { + try { + const modelName = res.model.modelName; + if (modelName) modelNameToResource.set(modelName, res); + } catch { + // ignore + } +} + +export function getResourceByModelName( + modelName: string, +): AdminResource | undefined { + return modelNameToResource.get(modelName); +} diff --git a/src/plugins/admin/router.ts b/src/plugins/admin/router.ts new file mode 100644 index 0000000..9eafe3d --- /dev/null +++ b/src/plugins/admin/router.ts @@ -0,0 +1,568 @@ +import path from 'path'; +import { type Application, Router, type RequestHandler } from 'express'; +import formidable from 'formidable'; +import type { FilterQuery } from 'mongoose'; +import { adminResources, getResource } from './registry'; +import { buildSearchQuery, getFields } from './utils/schema-introspection'; +import type { AdminField } from './types'; +import { LocalStorageProvider } from '@/lib/storage'; +import type { FormFile } from '@/types'; +import logger from '@/plugins/observability/logger'; +import fs from 'fs'; + +export const adminApiRouter = Router(); + +// Admin module uses local storage by default for simplicity +// Files are stored in public/uploads/admin and served at /uploads/admin +const adminStorageProvider = new LocalStorageProvider(); + +/** + * Middleware to handle file uploads for admin resources + * Uses the configured storage provider (S3, R2, or local) + */ +async function uploadForResource(req: any, res: any, next: any) { + const resource = getResource(req.params.resource); + if (!resource || !resource.fileFields || resource.fileFields.length === 0) + return next(); + const ct = String(req.headers['content-type'] || ''); + if (!ct.startsWith('multipart/form-data')) return next(); + + const form = formidable({ + keepExtensions: true, + maxFileSize: 10 * 1024 * 1024, // 10MB + filename: (_name, _ext, part) => { + const safe = (part.originalFilename || 'file').replace( + /[^a-zA-Z0-9._-]+/g, + '-', + ); + return `${Date.now()}-${safe}`; + }, + }); + + form.parse( + req, + async ( + err: Error | null, + fields: formidable.Fields, + files: formidable.Files, + ) => { + if (err) { + return res.status(400).json({ + error: 'Failed to parse multipart data', + details: err.message, + }); + } + + try { + // Normalize fields + const normalizedFields: Record = {}; + for (const [key, value] of Object.entries(fields)) { + normalizedFields[key] = + Array.isArray(value) && value.length === 1 ? value[0] : value; + } + + // Upload files to storage provider and get URLs + const uploadedFiles: Record = {}; + for (const [key, value] of Object.entries(files)) { + if (!resource.fileFields?.includes(key)) continue; + + const fileArray = Array.isArray(value) ? value : [value]; + const uploadResults = []; + + for (const file of fileArray) { + if (!file) continue; + + // Convert formidable.File to FormFile format + const formFile: FormFile = { + filepath: file.filepath, + originalFilename: file.originalFilename || 'file', + mimetype: file.mimetype || 'application/octet-stream', + size: file.size, + }; + + // Generate unique key for storage + const originalName = formFile.originalFilename || 'file'; + const ext = path.extname(originalName); + const basename = path.basename(originalName, ext); + const safeBasename = basename.replace(/[^a-zA-Z0-9._-]+/g, '-'); + const storageKey = `admin/${resource.name}/${Date.now()}-${safeBasename}${ext}`; + + // Upload to storage provider + const result = await adminStorageProvider.upload({ + file: formFile, + key: storageKey, + }); + + uploadResults.push({ + url: result.url, + key: result.key, + filename: formFile.originalFilename, + size: formFile.size, + mimetype: formFile.mimetype, + }); + } + + // Store results + if (Array.isArray(value)) { + uploadedFiles[key] = uploadResults; + } else { + uploadedFiles[key] = uploadResults[0]; + } + } + + // Merge into req.body + req.body = { ...normalizedFields }; + req.uploadedFiles = uploadedFiles; + + next(); + } catch (uploadErr: any) { + logger.error( + { err: uploadErr, resource: resource.name }, + 'Failed to upload files', + ); + return res.status(500).json({ + error: 'Failed to upload files', + details: uploadErr.message, + }); + } + }, + ); +} + +adminApiRouter.get('/meta', (_req, res) => { + const resources = adminResources.map((r) => ({ + name: r.name, + label: r.label ?? r.name, + })); + res.json({ resources }); +}); + +adminApiRouter.get('/:resource/meta', (req, res) => { + const resource = getResource(req.params.resource); + if (!resource) return res.status(404).json({ error: 'resource_not_found' }); + const fields = getFields(resource.model, resource.fields); + res.json({ + name: resource.name, + label: resource.label ?? resource.name, + fields, + fileFields: resource.fileFields || [], + }); +}); + +adminApiRouter.get('/:resource', async (req, res) => { + const resource = getResource(req.params.resource); + if (!resource) return res.status(404).json({ error: 'resource_not_found' }); + + const page = Math.max(parseInt(String(req.query.page || '1'), 10) || 1, 1); + const limit = Math.min( + Math.max(parseInt(String(req.query.limit || '10'), 10) || 10, 1), + 100, + ); + const sort = String(req.query.sort || '-createdAt'); + const q = typeof req.query.q === 'string' ? req.query.q : undefined; + + const allowedFields = + resource.fields && resource.fields.length ? resource.fields : undefined; + const fieldsMeta = getFields(resource.model, allowedFields); + const searchQuery = buildSearchQuery(q, fieldsMeta); + const query: FilterQuery = { ...(searchQuery as object) }; + + const projection = allowedFields + ? Object.fromEntries([...allowedFields, '_id'].map((f) => [f, 1])) + : undefined; + + const [data, total] = await Promise.all([ + resource.model + .find(query, projection) + .sort(sort) + .skip((page - 1) * limit) + .limit(limit) + .lean(), + resource.model.countDocuments(query), + ]); + + res.json({ data, page, limit, total }); +}); + +// Relation lookup endpoint: search or batch by ids to retrieve label options +adminApiRouter.get('/:resource/lookup/:field', async (req, res) => { + const resource = getResource(req.params.resource); + if (!resource) return res.status(404).json({ error: 'resource_not_found' }); + + const fieldsMeta = getFields(resource.model, resource.fields); + const field = findFieldByPath(fieldsMeta, req.params.field); + if (!field || field.type !== 'relation' || !field.relation) + return res.status(404).json({ error: 'relation_field_not_found' }); + + // Resolve target resource/model and display field + const target = adminResources.find( + (r) => r.name === field.relation!.resource, + ); + if (!target) + return res.status(404).json({ error: 'target_resource_not_found' }); + const labelField = + field.relation!.displayField || target.displayField || 'name'; + + const idsParam = + typeof req.query.ids === 'string' ? req.query.ids : undefined; + const q = typeof req.query.q === 'string' ? req.query.q : undefined; + const page = Math.max(parseInt(String(req.query.page || '1'), 10) || 1, 1); + const limit = Math.min( + Math.max(parseInt(String(req.query.limit || '10'), 10) || 10, 1), + 100, + ); + const recent = req.query.recent === '1' || req.query.recent === 'true'; + + try { + if (idsParam) { + const ids = idsParam + .split(',') + .map((s) => s.trim()) + .filter(Boolean); + if (ids.length === 0) return res.json({ options: [] }); + const docs = await target.model + .find({ _id: { $in: ids } }, { _id: 1, [labelField]: 1 }) + .limit(100) + .lean(); + const label = (d: any) => + d && (d[labelField] ?? d.name ?? d.title ?? d.email ?? String(d._id)); + const options = docs.map((d: any) => ({ + _id: String(d._id), + label: String(label(d)), + })); + return res.json({ options }); + } + + if (q) { + const query: any = { [labelField]: { $regex: q, $options: 'i' } }; + const docs = await target.model + .find(query, { _id: 1, [labelField]: 1 }) + .sort({ [labelField]: 1 }) + .skip((page - 1) * limit) + .limit(limit) + .lean(); + const label = (d: any) => + d && (d[labelField] ?? d.name ?? d.title ?? d.email ?? String(d._id)); + const options = docs.map((d: any) => ({ + _id: String(d._id), + label: String(label(d)), + })); + return res.json({ options, page, limit }); + } + + // Recent items mode (no q or ids). Prefer newest first based on timestamps if available. + if (recent || (!idsParam && !q)) { + const sortBy: any = target.model.schema?.paths?.createdAt + ? { createdAt: -1 } + : { _id: -1 }; + const docs = await target.model + .find({}, { _id: 1, [labelField]: 1 }) + .sort(sortBy) + .limit(limit) + .lean(); + const label = (d: any) => + d && (d[labelField] ?? d.name ?? d.title ?? d.email ?? String(d._id)); + const options = docs.map((d: any) => ({ + _id: String(d._id), + label: String(label(d)), + })); + return res.json({ options, page: 1, limit }); + } + + return res + .status(400) + .json({ error: 'missing_query', details: 'Provide ids, q, or recent=1' }); + } catch (err: any) { + return res + .status(400) + .json({ error: 'lookup_failed', details: err?.message }); + } +}); + +adminApiRouter.get('/:resource/:id', async (req, res) => { + const resource = getResource(req.params.resource); + if (!resource) return res.status(404).json({ error: 'resource_not_found' }); + const allowedFields = + resource.fields && resource.fields.length ? resource.fields : undefined; + const projection = allowedFields + ? Object.fromEntries([...allowedFields, '_id'].map((f) => [f, 1])) + : undefined; + const doc = await resource.model.findById(req.params.id, projection).lean(); + if (!doc) return res.status(404).json({ error: 'not_found' }); + res.json({ data: doc }); +}); + +adminApiRouter.post('/:resource', uploadForResource, async (req: any, res) => { + const resource = getResource(req.params.resource); + if (!resource) return res.status(404).json({ error: 'resource_not_found' }); + try { + const body = { ...(req.body || {}) } as Record; + + // Handle uploaded files - store the URL from storage provider + if (resource.fileFields && req.uploadedFiles) { + for (const field of resource.fileFields) { + const uploadedFile = req.uploadedFiles[field]; + if (uploadedFile) { + // Store the URL (and optionally the key for deletion) + if (Array.isArray(uploadedFile)) { + body[field] = uploadedFile[0]?.url; + // Store key for future deletion + body[`${field}_key`] = uploadedFile[0]?.key; + } else { + body[field] = uploadedFile.url; + // Store key for future deletion + body[`${field}_key`] = uploadedFile.key; + } + } + } + } + + const created = await resource.model.create(body); + res.status(201).json({ data: created }); + } catch (err: any) { + res.status(400).json({ error: 'validation_error', details: err?.message }); + } +}); + +adminApiRouter.put( + '/:resource/:id', + uploadForResource, + async (req: any, res) => { + const resource = getResource(req.params.resource); + if (!resource) return res.status(404).json({ error: 'resource_not_found' }); + const readOnly = new Set([...(resource.readOnlyFields || []), '_id']); + const payload: Record = {}; + + // Copy non-readonly fields + for (const [k, v] of Object.entries(req.body || {})) { + if (!readOnly.has(k)) payload[k] = v; + } + + // Handle uploaded files + if (resource.fileFields && req.uploadedFiles) { + // First, get the existing document to delete old files + const existing = (await resource.model + .findById(req.params.id) + .lean()) as any; + + for (const field of resource.fileFields) { + if (readOnly.has(field)) continue; + + const uploadedFile = req.uploadedFiles[field]; + if (uploadedFile) { + // Delete old file if it exists + const oldKey = existing?.[`${field}_key`]; + if (oldKey) { + try { + await adminStorageProvider.delete(oldKey); + logger.info( + { key: oldKey, field, resource: resource.name }, + 'Deleted old file during update', + ); + } catch (deleteErr: any) { + logger.error( + { err: deleteErr, key: oldKey }, + 'Failed to delete old file', + ); + } + } + + // Store new file URL and key + if (Array.isArray(uploadedFile)) { + payload[field] = uploadedFile[0]?.url; + payload[`${field}_key`] = uploadedFile[0]?.key; + } else { + payload[field] = uploadedFile.url; + payload[`${field}_key`] = uploadedFile.key; + } + } + } + } + + try { + const updated = await resource.model.findByIdAndUpdate( + req.params.id, + payload, + { + new: true, + runValidators: true, + }, + ); + if (!updated) return res.status(404).json({ error: 'not_found' }); + res.json({ data: updated }); + } catch (err: any) { + res + .status(400) + .json({ error: 'validation_error', details: err?.message }); + } + }, +); + +adminApiRouter.delete('/:resource/:id', async (req, res) => { + const resource = getResource(req.params.resource); + if (!resource) return res.status(404).json({ error: 'resource_not_found' }); + + // Get document before deletion to access file keys + const doc = (await resource.model.findById(req.params.id).lean()) as any; + if (!doc) return res.status(404).json({ error: 'not_found' }); + + // Delete the document + const deleted = await resource.model.findByIdAndDelete(req.params.id); + if (!deleted) return res.status(404).json({ error: 'not_found' }); + + // Delete associated files from storage + if (resource.fileFields && resource.fileFields.length > 0) { + for (const field of resource.fileFields) { + const fileKey = doc[`${field}_key`]; + if (fileKey) { + try { + await adminStorageProvider.delete(fileKey); + logger.info( + { key: fileKey, field, resource: resource.name, id: req.params.id }, + 'Deleted file after resource deletion', + ); + } catch (deleteErr: any) { + logger.error( + { err: deleteErr, key: fileKey, field }, + 'Failed to delete file from storage', + ); + } + } + } + } + + res.json({ ok: true }); +}); + +// Bulk delete by ids +adminApiRouter.post('/:resource/bulk-delete', async (req, res) => { + const resource = getResource(req.params.resource); + if (!resource) return res.status(404).json({ error: 'resource_not_found' }); + + const ids = Array.isArray(req.body?.ids) + ? (req.body.ids as unknown[]).map(String).filter(Boolean) + : []; + if (ids.length === 0) + return res + .status(400) + .json({ error: 'invalid_request', details: 'ids[] required' }); + + try { + // Get documents before deletion to access file keys + const docs = + resource.fileFields && resource.fileFields.length > 0 + ? ((await resource.model.find({ _id: { $in: ids } }).lean()) as any[]) + : []; + + // Delete documents from database + const result = await resource.model.deleteMany({ _id: { $in: ids } }); + + // Delete associated files from storage + if (docs.length > 0 && resource.fileFields) { + for (const doc of docs) { + for (const field of resource.fileFields) { + const fileKey = doc[`${field}_key`]; + if (fileKey) { + try { + await adminStorageProvider.delete(fileKey); + logger.info( + { key: fileKey, field, resource: resource.name, id: doc._id }, + 'Deleted file during bulk delete', + ); + } catch (deleteErr: any) { + logger.error( + { err: deleteErr, key: fileKey, field }, + 'Failed to delete file from storage', + ); + } + } + } + } + } + + return res.json({ deletedCount: result?.deletedCount ?? 0 }); + } catch (err: any) { + return res + .status(400) + .json({ error: 'bulk_delete_failed', details: err?.message }); + } +}); + +// Clear all documents for a resource +adminApiRouter.post('/:resource/clear', async (req, res) => { + const resource = getResource(req.params.resource); + if (!resource) return res.status(404).json({ error: 'resource_not_found' }); + try { + // Get all documents before deletion to access file keys + const docs = + resource.fileFields && resource.fileFields.length > 0 + ? ((await resource.model.find({}).lean()) as any[]) + : []; + + // Delete all documents from database + const result = await resource.model.deleteMany({}); + + // Delete associated files from storage + if (docs.length > 0 && resource.fileFields) { + for (const doc of docs) { + for (const field of resource.fileFields) { + const fileKey = doc[`${field}_key`]; + if (fileKey) { + try { + await adminStorageProvider.delete(fileKey); + logger.info( + { key: fileKey, field, resource: resource.name, id: doc._id }, + 'Deleted file during clear', + ); + } catch (deleteErr: any) { + logger.error( + { err: deleteErr, key: fileKey, field }, + 'Failed to delete file from storage', + ); + } + } + } + } + } + + return res.json({ deletedCount: result?.deletedCount ?? 0 }); + } catch (err: any) { + return res + .status(400) + .json({ error: 'clear_failed', details: err?.message }); + } +}); + +export function registerAdminUI( + app: Application, + adminPath: string = '/admin', + guard?: RequestHandler, +) { + const handlers: RequestHandler[] = []; + if (guard) handlers.push(guard); + handlers.push((_req, res) => { + const indexPath = path.join(process.cwd(), 'public', 'admin', 'index.html'); + res.sendFile(indexPath); + }); + + app.get(adminPath, ...handlers); +} + +function findFieldByPath( + fields: AdminField[], + dotted: string, +): AdminField | undefined { + const parts = dotted.split('.'); + let currentFields = fields; + let field: AdminField | undefined; + for (let i = 0; i < parts.length; i++) { + const seg = parts[i]; + field = currentFields.find((f) => f.path === seg); + if (!field) return undefined; + if (i < parts.length - 1) { + if (field.type !== 'subdocument' || !field.children) return undefined; + currentFields = field.children; + } + } + return field; +} diff --git a/src/plugins/admin/types.ts b/src/plugins/admin/types.ts new file mode 100644 index 0000000..b69cba9 --- /dev/null +++ b/src/plugins/admin/types.ts @@ -0,0 +1,32 @@ +import type { Model } from 'mongoose'; + +export type AdminResource = { + name: string; + label?: string; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + model: Model; + fields?: string[]; + readOnlyFields?: string[]; + fileFields?: string[]; // fields that should be uploaded via multipart; values stored as URL strings + // Display field for this resource (used as label in relation lookups) + displayField?: string; +}; + +export type AdminField = { + path: string; + type: string; + required: boolean; + enumValues?: string[]; + isArray?: boolean; + // Present when type === 'relation' + relation?: { + // Mongoose modelName of the referenced model + model: string; + // Admin resource name of the referenced resource + resource: string; + // Field to display as label for the referenced resource + displayField: string; + }; + // Present when type === 'subdocument' + children?: AdminField[]; +}; diff --git a/src/plugins/admin/utils/schema-introspection.ts b/src/plugins/admin/utils/schema-introspection.ts new file mode 100644 index 0000000..6fa6f38 --- /dev/null +++ b/src/plugins/admin/utils/schema-introspection.ts @@ -0,0 +1,172 @@ +import type { Model, Schema as MongooseSchema } from 'mongoose'; +import type { AdminField } from '../types'; +import { getResourceByModelName } from '../registry'; + +function mapType(instance?: string): string { + switch (instance) { + case 'String': + return 'string'; + case 'Number': + return 'number'; + case 'Boolean': + return 'boolean'; + case 'Date': + return 'date'; + case 'ObjectId': + case 'ObjectID': + return 'objectId'; + case 'Array': + return 'array'; + case 'Mixed': + case 'Map': + return 'mixed'; + default: + return (instance || 'mixed').toLowerCase(); + } +} + +export function getFields(model: Model, only?: string[]): AdminField[] { + return extractFieldsFromSchema(model.schema, only, 0); +} + +function extractFieldsFromSchema( + schema: MongooseSchema, + only: string[] | undefined, + depth: number, +): AdminField[] { + const fields: AdminField[] = []; + if (depth > 3) return fields; // avoid deep recursion + for (const [path, schemaType] of Object.entries((schema as any).paths)) { + if (path === '__v') continue; + if (only && only.length && !only.includes(path)) continue; + const instance = (schemaType as any).instance as string | undefined; + const options = (schemaType as any).options || {}; + const enumValues: string[] | undefined = options.enum; + let required = false; + try { + required = + typeof (schemaType as any).isRequired === 'function' + ? !!(schemaType as any).isRequired() + : !!options.required; + } catch { + required = !!options.required; + } + const isArray = instance === 'Array'; + + // Array of subdocuments (DocumentArrayPath) — detect before single to avoid misclassification + const caster: any = + (schemaType as any).caster || (schemaType as any).$embeddedSchemaType; + const maybeSubArraySchema: MongooseSchema | undefined = + caster?.schema || (isArray ? (schemaType as any).schema : undefined); + if (isArray && maybeSubArraySchema) { + const children = extractFieldsFromSchema( + maybeSubArraySchema, + undefined, + depth + 1, + ); + fields.push({ + path, + type: 'subdocument', + required, + enumValues, + isArray: true, + children, + }); + continue; + } + + // Subdocument (single) + const subSchema: MongooseSchema | undefined = (schemaType as any) + .schema; + if (subSchema) { + const children = extractFieldsFromSchema(subSchema, undefined, depth + 1); + fields.push({ + path, + type: 'subdocument', + required, + enumValues, + isArray: false, + children, + }); + continue; + } + + // Detect relations + let refModelName: string | undefined; + if ( + options && + options.ref && + (instance === 'ObjectId' || instance === 'ObjectID') + ) { + refModelName = String(options.ref); + } else if (isArray) { + const casterForRef: any = + (schemaType as any).caster || (schemaType as any).$embeddedSchemaType; + if ( + casterForRef && + (casterForRef.instance === 'ObjectId' || + casterForRef.instance === 'ObjectID') && + casterForRef.options && + casterForRef.options.ref + ) { + refModelName = String(casterForRef.options.ref); + } + } + + if (refModelName) { + const res = getResourceByModelName(refModelName); + const displayField = res?.displayField || guessDisplayField(); + fields.push({ + path, + type: 'relation', + required, + enumValues, + isArray, + relation: res + ? { + model: refModelName, + resource: res.name, + displayField, + } + : undefined, + }); + continue; + } + + fields.push({ + path, + type: mapType(instance), + required, + enumValues, + isArray, + }); + } + return fields; +} + +export function buildSearchQuery(q: string | undefined, fields: AdminField[]) { + if (!q) return {}; + const searchables: string[] = []; + const walk = (fs: AdminField[], prefix?: string) => { + for (const f of fs) { + const full = prefix ? `${prefix}.${f.path}` : f.path; + if (f.type === 'string') searchables.push(full); + if ( + f.type === 'subdocument' && + Array.isArray(f.children) && + f.children.length + ) { + walk(f.children, full); + } + } + }; + walk(fields); + if (!searchables.length) return {}; + return { + $or: searchables.map((p) => ({ [p]: { $regex: q, $options: 'i' } })), + } as Record; +} + +function guessDisplayField() { + return 'name'; +} diff --git a/src/plugins/auth/index.ts b/src/plugins/auth/index.ts new file mode 100644 index 0000000..5c32821 --- /dev/null +++ b/src/plugins/auth/index.ts @@ -0,0 +1,66 @@ +import type { ToolkitPlugin, PluginFactory } from '@/plugins/types'; +import { + initializeSessionManager, + type SessionManager, +} from '@/modules/auth/session/session.manager'; +import type { SessionStoreConfig } from '@/modules/auth/session/session.types'; +import config from '@/config/env'; +import logger from '@/plugins/observability/logger'; +import { scheduleSessionCleanup } from '../../queues/session-cleanup.queue'; +import { extractJwt } from '../../middlewares/extract-jwt'; + +export interface AuthOptions { + jwtSecret?: string; + jwtExpiration?: string; + sessionSecret?: string; + session?: Partial & { enabled?: boolean }; +} + +export const authPlugin: PluginFactory = ( + options = {}, +): ToolkitPlugin => { + let sessionManager: SessionManager | null = null; + + return { + name: 'auth', + priority: 70, + options, + + async register({ app }) { + app.use(extractJwt); + app.set('auth:configured', true); + + if (options.jwtSecret) { + app.set('auth:jwt:secret', options.jwtSecret); + } + if (options.jwtExpiration) { + app.set('auth:jwt:expiration', options.jwtExpiration); + } + + if (config.SET_SESSION && options.session?.enabled) { + sessionManager = await initializeSessionManager(options.session); + app.locals.sessionManager = sessionManager; + app.set('auth:session:enabled', true); + + try { + const stats = await sessionManager.cleanupSessions('revoked'); + if (options.session?.debug) { + logger.debug({ stats }, 'Startup session cleanup completed'); + } + } catch (err) { + logger.warn({ err }, 'Startup session cleanup failed'); + } + + await scheduleSessionCleanup(); + } + }, + + async onShutdown() { + if (sessionManager) { + await sessionManager.cleanup(); + } + }, + }; +}; + +export default authPlugin; diff --git a/src/plugins/basicparser/index.ts b/src/plugins/basicparser/index.ts new file mode 100644 index 0000000..275b9c8 --- /dev/null +++ b/src/plugins/basicparser/index.ts @@ -0,0 +1,39 @@ +import type { ToolkitPlugin, PluginFactory } from '@/plugins/types'; +import cookieParser from 'cookie-parser'; +import express from 'express'; + +export interface BasicParserOptions { + enabled?: boolean; +} + +export const basicParserPlugin: PluginFactory = ( + options = {}, +): ToolkitPlugin => { + const { enabled = true } = options; + + return { + name: 'basicParser', + priority: 100, + options, + + register({ app }) { + if (!enabled) { + return; + } + + app.use(express.json()); + app.use(express.urlencoded({ extended: true })); + app.use(cookieParser()) + + // Plugin implementation here + console.log('BasicParser plugin registered'); + }, + + onShutdown: async () => { + // Cleanup logic here + console.log('BasicParser plugin shutdown'); + }, + }; +}; + +export default basicParserPlugin; diff --git a/src/plugins/bullboard/index.ts b/src/plugins/bullboard/index.ts new file mode 100644 index 0000000..1e638a8 --- /dev/null +++ b/src/plugins/bullboard/index.ts @@ -0,0 +1,155 @@ +import type { ToolkitPlugin, PluginFactory } from '@/plugins/types'; +import type { Request, Response, NextFunction } from 'express'; +import { createBullBoard } from '@bull-board/api'; +import { ExpressAdapter } from '@bull-board/express'; +import { registeredQueues } from '@/lib/queue'; +import { BullMQAdapter } from '@bull-board/api/bullMQAdapter'; +import pathLib from 'path'; +import { + queueAuthGuardAdaptive, + signQueueSession, + setQueueCookie, + compareQueueCredentials, + checkQueueLoginRateLimit, + clearQueueCookie, +} from './queue-auth'; + +export interface BullboardOptions { + path: string; + authGuard?: boolean; +} + +/** + * Middleware to inject custom CSS and JS into BullBoard HTML responses + */ +function injectAssetsMiddleware( + _req: Request, + res: Response, + next: NextFunction, +): void { + const originalSend = res.send; + + res.send = function (data: any): Response { + // Only modify HTML responses + if ( + typeof data === 'string' && + data.includes('') && + data.includes('') + ) { + // Inject custom CSS before + data = data.replace( + '', + '', + ); + + // Inject custom JS before + data = data.replace( + '', + '', + ); + } + + // Call original send with modified data + return originalSend.call(this, data); + }; + + next(); +} + +export const bullboardPlugin: PluginFactory = ( + options = { path: '/queues', authGuard: true }, +): ToolkitPlugin => { + const { path, authGuard = true } = options; + + return { + name: 'bullboard', + priority: 50, + options, + + register({ app, port }) { + const serverAdapter = new ExpressAdapter(); + serverAdapter.setBasePath(path); + + createBullBoard({ + queues: Object.entries(registeredQueues || {}).map( + ([, values]) => new BullMQAdapter(values.queue), + ), + options: { + uiConfig: { + boardTitle: 'Queues Manager', + boardLogo: { + path: '/assets/images/logo.webp', + width: '30px', + height: '30px', + }, + }, + }, + serverAdapter, + }); + + // Queues login page + app.get(`${path}/login`, (_req, res) => { + const loginPath = pathLib.join( + process.cwd(), + 'public', + 'queues', + 'login.html', + ); + res.sendFile(loginPath); + }); + + // Queues login + app.post(`${path}/login`, (req, res) => { + const { username, password } = req.body || {}; + const identifier = req.ip || 'unknown'; + + if (!checkQueueLoginRateLimit(identifier)) { + return res.status(429).json({ error: 'too_many_attempts' }); + } + + if ( + !username || + !password || + !compareQueueCredentials(username, password) + ) { + return res.status(401).json({ error: 'invalid_credentials' }); + } + + const token = signQueueSession(username); + setQueueCookie(res, token); + + const acceptsJson = req.headers.accept?.includes('application/json'); + if (acceptsJson) { + return res.json({ ok: true }); + } + + const nextUrl = + typeof req.query.next === 'string' ? req.query.next : path; + return res.redirect(nextUrl); + }); + + // Queues logout + app.post(`${path}/logout`, (req, res) => { + clearQueueCookie(res); + const acceptsJson = req.headers.accept?.includes('application/json'); + if (acceptsJson) { + return res.json({ ok: true }); + } + return res.redirect(`${path}/login`); + }); + + // Mount BullBoard with asset injection and optional auth + const middlewares = [injectAssetsMiddleware]; + if (authGuard) { + middlewares.push(queueAuthGuardAdaptive(path)); + } + app.use(path, ...middlewares, serverAdapter.getRouter()); + + return [`http://localhost:${port}${path}`]; + }, + + onShutdown: async () => {}, + }; +}; + +export default bullboardPlugin; diff --git a/src/plugins/bullboard/queue-auth.ts b/src/plugins/bullboard/queue-auth.ts new file mode 100644 index 0000000..765dbba --- /dev/null +++ b/src/plugins/bullboard/queue-auth.ts @@ -0,0 +1,225 @@ +import crypto from 'crypto'; +import type { Request, Response, NextFunction } from 'express'; +import config from '@/config/env'; +import logger from '@/plugins/observability/logger'; + +interface QueueSessionPayload { + sub: string; // username + iat: number; // issued at (unix timestamp) + exp: number; // expires at (unix timestamp) +} + +export function signQueueSession(username: string): string { + const now = Math.floor(Date.now() / 1000); + const payload: QueueSessionPayload = { + sub: username, + iat: now, + exp: now + config.QUEUE_SESSION_TTL, + }; + + const payloadJson = JSON.stringify(payload); + const payloadB64 = base64UrlEncode(Buffer.from(payloadJson, 'utf8')); + + const hmac = crypto + .createHmac('sha256', config.QUEUE_SESSION_SECRET) + .update(payloadB64) + .digest(); + const signature = base64UrlEncode(hmac); + + return `${payloadB64}.${signature}`; +} + +export function verifyQueueSession(token: string): QueueSessionPayload | null { + try { + const parts = token.split('.'); + if (parts.length !== 2) return null; + + const [payloadB64, signature] = parts; + + const expectedHmac = crypto + .createHmac('sha256', config.QUEUE_SESSION_SECRET) + .update(payloadB64) + .digest(); + const expectedSignature = base64UrlEncode(expectedHmac); + + if (!timingSafeEqual(signature, expectedSignature)) return null; + + const payloadJson = Buffer.from(payloadB64, 'base64url').toString('utf8'); + const payload = JSON.parse(payloadJson) as QueueSessionPayload; + + const now = Math.floor(Date.now() / 1000); + if (payload.exp < now) return null; + + return payload; + } catch { + return null; + } +} + +export function setQueueCookie(res: Response, token: string): void { + const isSecure = config.NODE_ENV === 'production' && config.HTTPS_ENABLED; + res.cookie(config.QUEUE_COOKIE_NAME, token, { + httpOnly: true, + secure: isSecure, + sameSite: 'lax', + maxAge: config.QUEUE_SESSION_TTL * 1000, + path: '/', + }); +} + +export function clearQueueCookie(res: Response): void { + res.clearCookie(config.QUEUE_COOKIE_NAME, { path: '/' }); +} + +export function compareQueueCredentials(username: string, password: string): boolean { + const validUsername = config.QUEUE_USERNAME; + const validPassword = config.QUEUE_PANEL_PASSWORD; + + const usernameMatch = timingSafeEqual(username, validUsername); + const passwordMatch = timingSafeEqual(password, validPassword); + + return usernameMatch && passwordMatch; +} + +export function queueAuthGuardApi( + req: Request, + res: Response, + next: NextFunction, +): void { + if (!config.QUEUE_AUTH_ENABLED) { + return next(); + } + + const token = req.cookies?.[config.QUEUE_COOKIE_NAME]; + if (!token) { + res.status(401).json({ error: 'unauthorized' }); + return; + } + + const payload = verifyQueueSession(token); + if (!payload) { + clearQueueCookie(res); + res.status(401).json({ error: 'unauthorized' }); + return; + } + + (req as any).queueUser = payload.sub; + next(); +} + +export const queueAuthGuardUI = (basePath: string = '/queues') => ( + req: Request, + res: Response, + next: NextFunction, +) => { + if (!config.QUEUE_AUTH_ENABLED) { + return next(); + } + + const token = req.cookies?.[config.QUEUE_COOKIE_NAME]; + if (!token) { + const nextUrl = encodeURIComponent(req.originalUrl); + res.redirect(`${basePath}/login?next=${nextUrl}`); + return; + } + + const payload = verifyQueueSession(token); + if (!payload) { + clearQueueCookie(res); + const nextUrl = encodeURIComponent(req.originalUrl); + res.redirect(`${basePath}/login?next=${nextUrl}`); + return; + } + + (req as any).queueUser = payload.sub; + next(); +}; + +export const queueAuthGuardAdaptive = (basePath: string = '/queues') => ( + req: Request, + res: Response, + next: NextFunction, +) => { + if (!config.QUEUE_AUTH_ENABLED) { + return next(); + } + + const token = req.cookies?.[config.QUEUE_COOKIE_NAME]; + const payload = token && verifyQueueSession(token); + if (payload) { + (req as any).queueUser = payload.sub; + return next(); + } + + clearQueueCookie(res); + const wantsJson = req.headers.accept?.includes('application/json'); + if (wantsJson) { + return res.status(401).json({ error: 'unauthorized' }); + } + const nextUrl = encodeURIComponent(req.originalUrl); + return res.redirect(`${basePath}/login?next=${nextUrl}`); +}; + +interface RateLimitEntry { + count: number; + resetAt: number; +} + +const rateLimitStore = new Map(); +const RATE_LIMIT_WINDOW_MS = 5 * 60 * 1000; // 5 minutes +const RATE_LIMIT_MAX_ATTEMPTS = 10; + +export function checkQueueLoginRateLimit(identifier: string): boolean { + const now = Date.now(); + const entry = rateLimitStore.get(identifier); + + if (!entry || entry.resetAt < now) { + rateLimitStore.set(identifier, { + count: 1, + resetAt: now + RATE_LIMIT_WINDOW_MS, + }); + return true; + } + + if (entry.count >= RATE_LIMIT_MAX_ATTEMPTS) { + logger.warn( + { identifier, attempts: entry.count }, + 'Queue login rate limit exceeded', + ); + return false; + } + + entry.count += 1; + return true; +} + +setInterval(() => { + const now = Date.now(); + for (const [key, entry] of rateLimitStore.entries()) { + if (entry.resetAt < now) { + rateLimitStore.delete(key); + } + } +}, 60 * 1000); + +function base64UrlEncode(buffer: Buffer): string { + return buffer + .toString('base64') + .replace(/\+/g, '-') + .replace(/\//g, '_') + .replace(/=/g, ''); +} + +function timingSafeEqual(a: string, b: string): boolean { + const bufA = Buffer.from(a, 'utf8'); + const bufB = Buffer.from(b, 'utf8'); + + if (bufA.length !== bufB.length) { + crypto.timingSafeEqual(bufA, bufA); + return false; + } + + return crypto.timingSafeEqual(bufA, bufB); +} + + diff --git a/src/plugins/cache/cache.middleware.ts b/src/plugins/cache/cache.middleware.ts new file mode 100644 index 0000000..35dba25 --- /dev/null +++ b/src/plugins/cache/cache.middleware.ts @@ -0,0 +1,526 @@ +import type { NextFunction } from 'express'; +import crypto from 'node:crypto'; +import type { + RequestAny, + ResponseAny, + MagicMiddleware, +} from '@/plugins/magic/router'; +import logger from '@/plugins/observability/logger'; +import type { + CacheMiddlewareOptions, + InvalidateOptions, + CacheKeyGenerator, + CacheTagResolver, + CachePatternResolver, + CacheKeyResolver, +} from './types'; +import { CacheService } from './cache.service'; +import type { JwtPayload } from '@/utils/jwt.utils'; + +/** + * Global cache service instance + * Will be initialized by the plugin + */ +let cacheService: CacheService | null = null; + +/** + * Initialize cache middleware with cache service + */ +export function initializeCacheMiddleware(service: CacheService): void { + cacheService = service; +} + +/** + * Generate cache key from request and options + */ +async function generateCacheKey( + req: RequestAny, + options: CacheMiddlewareOptions, +): Promise { + // Custom key generator (has full request access) + if (typeof options.key === 'function') { + return await (options.key as CacheKeyGenerator)(req); + } + + // Static key + if (typeof options.key === 'string') { + return options.key; + } + + // Auto-generate from request + const parts: string[] = []; + + // Always include the path + parts.push(req.path); + + // Add varyBy fields + const varyBy = options.varyBy || []; + + if (varyBy.includes('method')) { + parts.push(req.method); + } + + if (varyBy.includes('userId')) { + const userId = (req.user as JwtPayload)?.sub || 'anonymous'; + parts.push(`user:${userId}`); + } + + if (varyBy.includes('url')) { + parts.push(req.originalUrl || req.url); + } + + if (varyBy.includes('query')) { + const queryStr = JSON.stringify(req.query); + parts.push(`q:${crypto.createHash('md5').update(queryStr).digest('hex')}`); + } + + if (varyBy.includes('params')) { + const paramsStr = JSON.stringify(req.params); + parts.push(`p:${crypto.createHash('md5').update(paramsStr).digest('hex')}`); + } + + if (varyBy.includes('headers') && options.varyByHeaders) { + const headerValues = options.varyByHeaders + .map((h) => `${h}:${req.headers[h.toLowerCase()] || ''}`) + .join(','); + parts.push( + `h:${crypto.createHash('md5').update(headerValues).digest('hex')}`, + ); + } + + // Handle private option (shorthand for varyBy userId) + if (options.private && !varyBy.includes('userId')) { + const userId = (req.user as JwtPayload)?.sub || 'anonymous'; + parts.push(`user:${userId}`); + } + + return parts.join(':'); +} + +/** + * Resolve tags from options and request + */ +async function resolveTags( + req: RequestAny, + tags?: string[] | CacheTagResolver, +): Promise { + if (!tags) return []; + + if (typeof tags === 'function') { + return await (tags as CacheTagResolver)(req); + } + + return tags; +} + +/** + * Resolve patterns from options and request + */ +async function resolvePatterns( + req: RequestAny, + patterns?: string[] | CachePatternResolver, +): Promise { + if (!patterns) return []; + + if (typeof patterns === 'function') { + return await (patterns as CachePatternResolver)(req); + } + + return patterns; +} + +/** + * Resolve keys from options and request + */ +async function resolveKeys( + req: RequestAny, + keys?: string[] | CacheKeyResolver, +): Promise { + if (!keys) return []; + + if (typeof keys === 'function') { + return await (keys as CacheKeyResolver)(req); + } + + return keys; +} + +/** + * Response caching middleware for MagicRouter + * Caches GET request responses with configurable options + * + * @example + * router.get('/users/:id', + * { requestType: { params: userIdSchema } }, + * cacheResponse({ + * ttl: 300, + * key: (req) => `user:${req.params.id}`, + * tags: (req) => [`user:${req.params.id}`, 'users'] + * }), + * getUser + * ); + */ +export const cacheResponse = ( + options: CacheMiddlewareOptions = {}, +): MagicMiddleware => { + return async ( + req: RequestAny, + res: ResponseAny, + next: NextFunction, + ): Promise => { + // Check if cache service is initialized + if (!cacheService) { + logger.warn('Cache middleware used but cache service not initialized'); + next(); + return; + } + + // Skip if disabled + if (options.skip) { + next(); + return; + } + + // Only cache GET requests by default + if (req.method !== 'GET') { + next(); + return; + } + + // Check condition (if provided) + if (options.condition) { + try { + const shouldCache = await options.condition(req); + if (!shouldCache) { + next(); + return; + } + } catch (err) { + logger.warn({ err }, 'Cache condition check failed'); + next(); + return; + } + } + + try { + // Generate cache key dynamically from request + const cacheKey = await generateCacheKey(req, options); + const ttl = options.ttl; + + logger.debug( + { cacheKey, method: req.method, path: req.path }, + 'Checking cache', + ); + + // Try to get from cache + const cached = await cacheService.get(cacheKey); + + if (cached !== null) { + // Handle stale-while-revalidate + if (options.staleWhileRevalidate && options.staleTime) { + const ttlRemaining = await cacheService.ttl(cacheKey); + const effectiveTtl = ttl || 3600; + const isStale = + ttlRemaining > 0 && ttlRemaining < effectiveTtl - options.staleTime; + + if (isStale) { + logger.debug( + { cacheKey }, + 'Serving stale cache, revalidating in background', + ); + // Continue to serve from cache, but mark for revalidation + res.setHeader('X-Cache-Status', 'STALE'); + } else { + res.setHeader('X-Cache-Status', 'HIT'); + } + } else { + res.setHeader('X-Cache-Status', 'HIT'); + } + + // Set cache headers + if (ttl) { + res.setHeader('Cache-Control', `max-age=${ttl}`); + } + + // Set age header + const ttlRemaining = await cacheService.ttl(cacheKey); + if (ttlRemaining > 0) { + const age = (ttl || 3600) - ttlRemaining; + res.setHeader('Age', age.toString()); + } + + logger.debug( + { cacheKey, method: req.method, path: req.path }, + 'Cache hit', + ); + + res.json(cached); + return; + } + + // Cache miss - intercept res.json to cache the response + res.setHeader('X-Cache-Status', 'MISS'); + + const originalJson = res.json.bind(res); + const originalSend = res.send.bind(res); + + let responseSent = false; + + // Override res.json + res.json = function (data: unknown) { + if (responseSent) return originalJson(data); + responseSent = true; + + // Cache in background (don't block response) + if (cacheService) { + (async () => { + try { + const tags = await resolveTags(req, options.tags); + + if (tags.length > 0) { + await cacheService!.setWithTags(cacheKey, data, tags, ttl); + } else { + await cacheService!.set(cacheKey, data, ttl); + } + + logger.debug({ cacheKey, tags, ttl }, 'Response cached'); + } catch (err) { + logger.warn({ cacheKey, err }, 'Failed to cache response'); + } + })(); + } + + // Set cache headers + if (ttl) { + res.setHeader('Cache-Control', `max-age=${ttl}`); + } + + return originalJson(data); + }; + + // Override res.send for non-JSON responses + res.send = function (data: unknown) { + if (responseSent) return originalSend(data); + responseSent = true; + + // Only cache if it's likely JSON + if ( + cacheService && + (typeof data === 'object' || typeof data === 'string') + ) { + (async () => { + try { + const tags = await resolveTags(req, options.tags); + const cacheData = + typeof data === 'string' ? JSON.parse(data) : data; + + if (tags.length > 0) { + await cacheService!.setWithTags(cacheKey, cacheData, tags, ttl); + } else { + await cacheService!.set(cacheKey, cacheData, ttl); + } + + logger.debug({ cacheKey, tags, ttl }, 'Response cached'); + } catch (err) { + logger.debug( + { cacheKey, err }, + 'Skipped caching non-JSON response', + ); + } + })(); + } + + // Set cache headers + if (ttl) { + res.setHeader('Cache-Control', `max-age=${ttl}`); + } + + return originalSend(data); + }; + + next(); + } catch (err) { + logger.error({ err, path: req.path }, 'Cache middleware error'); + next(); + } + }; +}; + +/** + * Cache invalidation middleware for MagicRouter + * Invalidates cache based on tags, patterns, or specific keys + * + * @example + * router.put('/users/:id', + * { requestType: { params: userIdSchema } }, + * invalidateCache({ + * tags: (req) => [`user:${req.params.id}`, 'users'], + * patterns: ['dashboard:*'], + * timing: 'after' + * }), + * updateUser + * ); + */ +export const invalidateCache = ( + options: InvalidateOptions = {}, +): MagicMiddleware => { + return async ( + req: RequestAny, + res: ResponseAny, + next: NextFunction, + ): Promise => { + // Check if cache service is initialized + if (!cacheService) { + logger.warn( + 'Cache invalidation middleware used but cache service not initialized', + ); + next(); + return; + } + + const timing = options.timing || 'after'; + + // Check condition (if provided) + if (options.condition) { + try { + const shouldInvalidate = await options.condition(req); + if (!shouldInvalidate) { + next(); + return; + } + } catch (err) { + logger.warn({ err }, 'Cache invalidation condition check failed'); + next(); + return; + } + } + + const performInvalidation = async () => { + try { + // Resolve tags dynamically from request + if (options.tags && cacheService) { + const tags = await resolveTags(req, options.tags); + if (tags.length > 0) { + await cacheService.invalidateByTags(tags); + logger.debug( + { tags, method: req.method, path: req.path }, + 'Invalidated cache by tags', + ); + } + } + + // Resolve patterns dynamically from request + if (options.patterns && cacheService) { + const patterns = await resolvePatterns(req, options.patterns); + for (const pattern of patterns) { + await cacheService.invalidateByPattern(pattern); + logger.debug( + { pattern, method: req.method, path: req.path }, + 'Invalidated cache by pattern', + ); + } + } + + // Resolve specific keys dynamically from request + if (options.keys && cacheService) { + const keys = await resolveKeys(req, options.keys); + if (keys.length > 0) { + await cacheService.deleteMany(keys); + logger.debug( + { keys, method: req.method, path: req.path }, + 'Invalidated cache keys', + ); + } + } + } catch (err) { + logger.error( + { err, method: req.method, path: req.path }, + 'Cache invalidation failed', + ); + // Don't throw - invalidation failure shouldn't break the request + } + }; + + if (timing === 'before') { + await performInvalidation(); + next(); + return; + } + + // Invalidate after response (only on success) + res.on('finish', () => { + if (res.statusCode >= 200 && res.statusCode < 300) { + performInvalidation().catch((err) => + logger.error({ err }, 'Post-response cache invalidation failed'), + ); + } + }); + + next(); + }; +}; + +/** + * Convenience middleware to cache based on ETags + * Automatically generates ETags and handles conditional requests + */ +export const cacheWithETag = ( + options: Omit = {}, +): MagicMiddleware => { + return async ( + req: RequestAny, + res: ResponseAny, + next: NextFunction, + ): Promise => { + if (!cacheService) { + next(); + return; + } + + // Only for GET requests + if (req.method !== 'GET') { + next(); + return; + } + + const cacheKey = await generateCacheKey(req, { + ...options, + varyBy: ['url', 'query'], + }); + const etagKey = `etag:${cacheKey}`; + + // Check if client sent If-None-Match header + const clientETag = req.headers['if-none-match']; + + if (clientETag) { + const storedETag = await cacheService.get(etagKey); + + if (storedETag && storedETag === clientETag) { + // ETag matches - return 304 Not Modified + res.setHeader('ETag', storedETag); + res.setHeader('X-Cache-Status', 'NOT_MODIFIED'); + res.status(304).end(); + return; + } + } + + // Intercept response to generate ETag + const originalJson = res.json.bind(res); + + res.json = function (data: unknown) { + const etag = `"${crypto.createHash('md5').update(JSON.stringify(data)).digest('hex')}"`; + + // Store ETag + cacheService + ?.set(etagKey, etag, options.ttl) + .catch((err) => logger.warn({ err }, 'Failed to store ETag')); + + res.setHeader('ETag', etag); + res.setHeader('X-Cache-Status', 'MISS'); + + return originalJson(data); + }; + + next(); + }; +}; diff --git a/src/plugins/cache/cache.service.ts b/src/plugins/cache/cache.service.ts new file mode 100644 index 0000000..09999b6 --- /dev/null +++ b/src/plugins/cache/cache.service.ts @@ -0,0 +1,598 @@ +import { promisify } from 'node:util'; +import { gzip, gunzip } from 'node:zlib'; +import { cacheProvider } from '@/lib/cache'; +import logger from '@/plugins/observability/logger'; +import { metricsCollector } from '@/plugins/observability/metrics'; +import { CacheError } from '@/lib/errors'; +import type { + CacheWrapOptions, + CacheServiceOptions, + CacheStats, + CacheWarmEntry, +} from './types'; + +const gzipAsync = promisify(gzip); +const gunzipAsync = promisify(gunzip); + +/** + * High-level cache service with advanced features + * Wraps the cache provider with convenience methods and additional functionality + */ +export class CacheService { + private prefix: string; + private defaultTtl: number; + private compressionEnabled: boolean; + private compressionThreshold: number; + private enableMetrics: boolean; + private stats: { hits: number; misses: number }; + private tagKeyPrefix = '__tag__:'; + + constructor(options: CacheServiceOptions = {}) { + this.prefix = options.prefix || ''; + this.defaultTtl = options.defaultTtl || 3600; + this.compressionEnabled = options.compressionEnabled || false; + this.compressionThreshold = options.compressionThreshold || 1024; // 1KB + this.enableMetrics = options.enableMetrics !== false; + this.stats = { hits: 0, misses: 0 }; + } + + /** + * Get prefixed key + */ + private getKey(key: string): string { + return this.prefix ? `${this.prefix}${key}` : key; + } + + /** + * Get tag key for tag-based invalidation + */ + private getTagKey(tag: string): string { + return this.getKey(`${this.tagKeyPrefix}${tag}`); + } + + /** + * Compress data if it exceeds the threshold + */ + private async maybeCompress( + data: string, + ): Promise<{ data: string; compressed: boolean }> { + if (!this.compressionEnabled || data.length < this.compressionThreshold) { + return { data, compressed: false }; + } + + try { + const compressed = await gzipAsync(Buffer.from(data, 'utf-8')); + return { data: compressed.toString('base64'), compressed: true }; + } catch (err) { + logger.warn( + { err }, + 'Failed to compress cache data, storing uncompressed', + ); + return { data, compressed: false }; + } + } + + /** + * Decompress data if it was compressed + */ + private async maybeDecompress( + data: string, + compressed: boolean, + ): Promise { + if (!compressed) { + return data; + } + + try { + const decompressed = await gunzipAsync(Buffer.from(data, 'base64')); + return decompressed.toString('utf-8'); + } catch (err) { + logger.error({ err }, 'Failed to decompress cache data'); + throw new CacheError('Failed to decompress cache data', err); + } + } + + /** + * Record cache hit for metrics + */ + private recordHit(key: string): void { + this.stats.hits++; + if (this.enableMetrics) { + metricsCollector.incrementCacheHits(key); + } + } + + /** + * Record cache miss for metrics + */ + private recordMiss(key: string): void { + this.stats.misses++; + if (this.enableMetrics) { + metricsCollector.incrementCacheMisses(key); + } + } + + /** + * Get a value from cache with automatic JSON deserialization + */ + async get(key: string): Promise { + const prefixedKey = this.getKey(key); + + try { + const raw = await cacheProvider.get(prefixedKey); + + if (raw === null) { + this.recordMiss(key); + return null; + } + + this.recordHit(key); + + // Check if data is compressed (starts with metadata marker) + const isCompressed = raw.startsWith('__COMPRESSED__:'); + const data = isCompressed ? raw.substring(15) : raw; + + const decompressed = await this.maybeDecompress(data, isCompressed); + return JSON.parse(decompressed) as T; + } catch (err) { + logger.error({ key, err }, 'Failed to get cache value'); + this.recordMiss(key); + return null; + } + } + + /** + * Set a value in cache with automatic JSON serialization + */ + async set(key: string, value: T, ttl?: number): Promise { + const prefixedKey = this.getKey(key); + const effectiveTtl = ttl || this.defaultTtl; + + try { + const serialized = JSON.stringify(value); + const { data, compressed } = await this.maybeCompress(serialized); + + // Add compression marker if compressed + const finalData = compressed ? `__COMPRESSED__:${data}` : data; + + await cacheProvider.set(prefixedKey, finalData, effectiveTtl); + } catch (err) { + logger.error( + { key, ttl: effectiveTtl, err }, + 'Failed to set cache value', + ); + throw new CacheError('Failed to set cache value', err); + } + } + + /** + * Delete a value from cache + */ + async del(key: string): Promise { + const prefixedKey = this.getKey(key); + + try { + await cacheProvider.del(prefixedKey); + } catch (err) { + logger.error({ key, err }, 'Failed to delete cache value'); + throw new CacheError('Failed to delete cache value', err); + } + } + + /** + * Check if a key exists in cache + */ + async exists(key: string): Promise { + const prefixedKey = this.getKey(key); + + try { + return await cacheProvider.exists(prefixedKey); + } catch (err) { + logger.error({ key, err }, 'Failed to check cache key existence'); + return false; + } + } + + /** + * Get multiple values from cache + */ + async getMany(keys: string[]): Promise> { + if (keys.length === 0) return new Map(); + + const prefixedKeys = keys.map((k) => this.getKey(k)); + const result = new Map(); + + try { + const values = await cacheProvider.mget(prefixedKeys); + + for (let i = 0; i < keys.length; i++) { + const raw = values[i]; + if (raw !== null) { + try { + const isCompressed = raw.startsWith('__COMPRESSED__:'); + const data = isCompressed ? raw.substring(15) : raw; + const decompressed = await this.maybeDecompress(data, isCompressed); + result.set(keys[i], JSON.parse(decompressed) as T); + this.recordHit(keys[i]); + } catch (err) { + logger.warn({ key: keys[i], err }, 'Failed to parse cached value'); + this.recordMiss(keys[i]); + } + } else { + this.recordMiss(keys[i]); + } + } + + return result; + } catch (err) { + logger.error({ keys, err }, 'Failed to get multiple cache values'); + return result; + } + } + + /** + * Set multiple values in cache + */ + async setMany( + entries: Map, + ttl?: number, + ): Promise { + if (entries.size === 0) return; + + const effectiveTtl = ttl || this.defaultTtl; + const batchEntries: Array<{ key: string; value: string; ttl: number }> = []; + + try { + for (const [key, value] of entries.entries()) { + const serialized = JSON.stringify(value); + const { data, compressed } = await this.maybeCompress(serialized); + const finalData = compressed ? `__COMPRESSED__:${data}` : data; + + batchEntries.push({ + key: this.getKey(key), + value: finalData, + ttl: effectiveTtl, + }); + } + + await cacheProvider.mset(batchEntries); + } catch (err) { + logger.error( + { entries: entries.size, ttl: effectiveTtl, err }, + 'Failed to set multiple cache values', + ); + throw new CacheError('Failed to set multiple cache values', err); + } + } + + /** + * Delete multiple values from cache + */ + async deleteMany(keys: string[]): Promise { + if (keys.length === 0) return; + + const prefixedKeys = keys.map((k) => this.getKey(k)); + + try { + await cacheProvider.mdel(prefixedKeys); + } catch (err) { + logger.error({ keys, err }, 'Failed to delete multiple cache values'); + throw new CacheError('Failed to delete multiple cache values', err); + } + } + + /** + * Cache-aside pattern: get from cache or execute function and cache result + */ + async wrap( + key: string, + fn: () => Promise, + options: CacheWrapOptions = {}, + ): Promise { + const { ttl, tags, staleTime, forceRefresh } = options; + + // Check if we should force refresh + if (forceRefresh) { + const value = await fn(); + await this.setWithTags(key, value, tags || [], ttl); + return value; + } + + // Try to get from cache + const cached = await this.get(key); + + if (cached !== null) { + // Handle stale-while-revalidate + if (staleTime) { + const ttlRemaining = await cacheProvider.ttl(this.getKey(key)); + const effectiveTtl = ttl || this.defaultTtl; + const isStale = + ttlRemaining > 0 && ttlRemaining < effectiveTtl - staleTime; + + if (isStale) { + // Revalidate in background + fn() + .then((value) => this.setWithTags(key, value, tags || [], ttl)) + .catch((err) => + logger.error({ key, err }, 'Failed to revalidate stale cache'), + ); + } + } + + return cached; + } + + // Cache miss - execute function and cache result + const value = await fn(); + await this.setWithTags(key, value, tags || [], ttl); + return value; + } + + /** + * Set a value with tags for invalidation + */ + async setWithTags( + key: string, + value: T, + tags: string[], + ttl?: number, + ): Promise { + // Set the actual value + await this.set(key, value, ttl); + + // Store the key in each tag's set + if (tags.length > 0) { + const effectiveTtl = ttl || this.defaultTtl; + + for (const tag of tags) { + const tagKey = this.getTagKey(tag); + + try { + // Get current tag keys + const currentKeys = await cacheProvider.get(tagKey); + const keySet = currentKeys + ? new Set(JSON.parse(currentKeys)) + : new Set(); + + // Add this key to the set + keySet.add(key); + + // Save back with same TTL as the data (add some buffer) + await cacheProvider.set( + tagKey, + JSON.stringify([...keySet]), + effectiveTtl + 300, // Add 5 minutes buffer + ); + } catch (err) { + logger.warn({ tag, key, err }, 'Failed to update tag mapping'); + } + } + } + } + + /** + * Invalidate cache entries by tags + */ + async invalidateByTags(tags: string[]): Promise { + if (tags.length === 0) return; + + const keysToDelete = new Set(); + + for (const tag of tags) { + const tagKey = this.getTagKey(tag); + + try { + const raw = await cacheProvider.get(tagKey); + if (raw) { + const keys = JSON.parse(raw) as string[]; + for (const key of keys) { + keysToDelete.add(key); + } + + // Delete the tag key itself + await cacheProvider.del(tagKey); + } + } catch (err) { + logger.warn({ tag, err }, 'Failed to invalidate by tag'); + } + } + + if (keysToDelete.size > 0) { + await this.deleteMany([...keysToDelete]); + logger.info( + { tags, keys: keysToDelete.size }, + 'Invalidated cache by tags', + ); + } + } + + /** + * Invalidate cache entries by pattern + */ + async invalidateByPattern(pattern: string): Promise { + try { + const prefixedPattern = this.getKey(pattern); + const keys = await cacheProvider.keys(prefixedPattern); + + if (keys.length > 0) { + // Remove prefix from keys for deletion + const unprefixedKeys = keys.map((k) => + this.prefix && k.startsWith(this.prefix) + ? k.substring(this.prefix.length) + : k, + ); + + await this.deleteMany(unprefixedKeys); + logger.info( + { pattern, keys: keys.length }, + 'Invalidated cache by pattern', + ); + } + } catch (err) { + logger.error({ pattern, err }, 'Failed to invalidate by pattern'); + throw new CacheError('Failed to invalidate by pattern', err); + } + } + + /** + * Increment a counter + */ + async increment(key: string, by = 1): Promise { + const prefixedKey = this.getKey(key); + + try { + if (by === 1) { + return await cacheProvider.incr(prefixedKey); + } + + // For custom increment values + const current = (await this.get(key)) || 0; + const newValue = current + by; + await this.set(key, newValue); + return newValue; + } catch (err) { + logger.error({ key, by, err }, 'Failed to increment cache value'); + throw new CacheError('Failed to increment cache value', err); + } + } + + /** + * Decrement a counter + */ + async decrement(key: string, by = 1): Promise { + const prefixedKey = this.getKey(key); + + try { + if (by === 1) { + return await cacheProvider.decr(prefixedKey); + } + + // For custom decrement values + const current = (await this.get(key)) || 0; + const newValue = current - by; + await this.set(key, newValue); + return newValue; + } catch (err) { + logger.error({ key, by, err }, 'Failed to decrement cache value'); + throw new CacheError('Failed to decrement cache value', err); + } + } + + /** + * Set expiration on a key + */ + async expire(key: string, ttl: number): Promise { + const prefixedKey = this.getKey(key); + + try { + await cacheProvider.expire(prefixedKey, ttl); + } catch (err) { + logger.error({ key, ttl, err }, 'Failed to set expiration'); + throw new CacheError('Failed to set expiration', err); + } + } + + /** + * Get TTL of a key + */ + async ttl(key: string): Promise { + const prefixedKey = this.getKey(key); + + try { + return await cacheProvider.ttl(prefixedKey); + } catch (err) { + logger.error({ key, err }, 'Failed to get TTL'); + throw new CacheError('Failed to get TTL', err); + } + } + + /** + * Get all keys matching a pattern + */ + async keys(pattern: string): Promise { + try { + const prefixedPattern = this.getKey(pattern); + const keys = await cacheProvider.keys(prefixedPattern); + + // Remove prefix from returned keys + return keys.map((k) => + this.prefix && k.startsWith(this.prefix) + ? k.substring(this.prefix.length) + : k, + ); + } catch (err) { + logger.error({ pattern, err }, 'Failed to get keys'); + throw new CacheError('Failed to get keys', err); + } + } + + /** + * Clear all cache or by pattern + */ + async clear(pattern?: string): Promise { + try { + const prefixedPattern = pattern ? this.getKey(pattern) : undefined; + await cacheProvider.clear(prefixedPattern); + logger.info({ pattern }, 'Cleared cache'); + } catch (err) { + logger.error({ pattern, err }, 'Failed to clear cache'); + throw new CacheError('Failed to clear cache', err); + } + } + + /** + * Warm cache with multiple entries + */ + async warm(entries: CacheWarmEntry[]): Promise { + logger.info({ entries: entries.length }, 'Warming cache'); + + for (const entry of entries) { + try { + if (entry.tags && entry.tags.length > 0) { + await this.setWithTags(entry.key, entry.value, entry.tags, entry.ttl); + } else { + await this.set(entry.key, entry.value, entry.ttl); + } + } catch (err) { + logger.warn({ key: entry.key, err }, 'Failed to warm cache entry'); + } + } + + logger.info({ entries: entries.length }, 'Cache warming completed'); + } + + /** + * Create a new cache service with a different prefix + */ + withPrefix(prefix: string): CacheService { + return new CacheService({ + prefix: this.prefix + prefix, + defaultTtl: this.defaultTtl, + compressionEnabled: this.compressionEnabled, + compressionThreshold: this.compressionThreshold, + enableMetrics: this.enableMetrics, + }); + } + + /** + * Get cache statistics + */ + getStats(): CacheStats { + const total = this.stats.hits + this.stats.misses; + const hitRate = total > 0 ? this.stats.hits / total : 0; + + return { + hits: this.stats.hits, + misses: this.stats.misses, + hitRate, + }; + } + + /** + * Reset statistics + */ + resetStats(): void { + this.stats.hits = 0; + this.stats.misses = 0; + } +} diff --git a/src/plugins/cache/index.ts b/src/plugins/cache/index.ts new file mode 100644 index 0000000..a7c24de --- /dev/null +++ b/src/plugins/cache/index.ts @@ -0,0 +1,183 @@ +import type { ToolkitPlugin, PluginFactory } from '../types'; +import config from '@/config/env'; +import logger from '@/plugins/observability/logger'; +import { CacheService } from './cache.service'; +import { initializeCacheMiddleware } from './cache.middleware'; + +/** + * Cache plugin options + */ +export interface CachePluginOptions { + /** + * Enable/disable caching + * @default true + */ + enabled?: boolean; + + /** + * Cache provider ('redis' or 'memory') + * Defaults to config.CACHE_PROVIDER + */ + provider?: 'redis' | 'memory'; + + /** + * Global cache key prefix + * Defaults to config.CACHE_PREFIX + */ + prefix?: string; + + /** + * Default TTL in seconds + * Defaults to config.CACHE_DEFAULT_TTL + */ + ttl?: number; + + /** + * Enable compression for large values + * Defaults to config.CACHE_COMPRESSION_ENABLED + */ + compression?: boolean; + + /** + * Minimum bytes to trigger compression + * Defaults to config.CACHE_COMPRESSION_THRESHOLD + */ + compressionThreshold?: number; + + /** + * Enable metrics collection + * @default true + */ + enableMetrics?: boolean; +} + +/** + * Global cache service instance + */ +let globalCacheService: CacheService | null = null; + +/** + * Get the global cache service instance + * @throws Error if cache service is not initialized + */ +export function getCacheService(): CacheService { + if (!globalCacheService) { + throw new Error( + 'Cache service not initialized. Ensure cache plugin is registered.', + ); + } + return globalCacheService; +} + +/** + * Cache plugin for TypeScript Backend Toolkit + * + * Provides comprehensive caching capabilities with: + * - Multiple providers (Redis, Memory) + * - Tag-based invalidation + * - Compression support + * - Stale-while-revalidate + * - MagicRouter middleware integration + * - Metrics tracking + * + * @example + * ```ts + * import { cachePlugin } from './plugins/cache'; + * + * app.use(cachePlugin({ + * enabled: true, + * prefix: 'myapp:', + * ttl: 3600, + * compression: true + * })); + * ``` + */ +export const cachePlugin: PluginFactory = ( + options = {}, +): ToolkitPlugin => { + const { + enabled = config.CACHE_ENABLED !== false, + provider = config.CACHE_PROVIDER, + prefix = config.CACHE_PREFIX, + ttl = config.CACHE_DEFAULT_TTL, + compression = config.CACHE_COMPRESSION_ENABLED, + compressionThreshold = config.CACHE_COMPRESSION_THRESHOLD, + enableMetrics = true, + } = options; + + return { + name: 'cache', + priority: 50, + options, + + register({ app }) { + if (!enabled) { + logger.info('Cache plugin is disabled'); + return; + } + + logger.info( + { + provider, + prefix, + ttl, + compression, + compressionThreshold, + }, + 'Initializing cache plugin', + ); + + // Create cache service instance + const cacheService = new CacheService({ + prefix, + defaultTtl: ttl, + compressionEnabled: compression, + compressionThreshold, + enableMetrics, + }); + + // Set global instance + globalCacheService = cacheService; + + // Initialize middleware with service + initializeCacheMiddleware(cacheService); + + // Make cache service available on app + app.set('cache', cacheService); + + logger.info( + { + provider, + prefix, + }, + 'Cache plugin initialized successfully', + ); + }, + }; +}; + +// Re-export types and middleware +export type { + CacheMiddlewareOptions, + InvalidateOptions, + CacheKeyGenerator, + CacheTagResolver, + CachePatternResolver, + CacheKeyResolver, + CacheCondition, + VaryByField, + CacheWrapOptions, + CacheServiceOptions, + CacheStats, + CacheWarmEntry, + BatchSetEntry, +} from './types'; + +export { CacheService } from './cache.service'; +export { + cacheResponse, + invalidateCache, + cacheWithETag, +} from './cache.middleware'; + +export default cachePlugin; diff --git a/src/plugins/cache/types.ts b/src/plugins/cache/types.ts new file mode 100644 index 0000000..9e7a426 --- /dev/null +++ b/src/plugins/cache/types.ts @@ -0,0 +1,225 @@ +import type { RequestAny } from '@/plugins/magic/router'; + +/** + * Cache key generator function type + * Receives full request object for dynamic key generation + */ +export type CacheKeyGenerator = (req: RequestAny) => string | Promise; + +/** + * Cache tag resolver function type + * Receives full request object for dynamic tag generation + */ +export type CacheTagResolver = ( + req: RequestAny, +) => string[] | Promise; + +/** + * Cache pattern resolver function type + * Receives full request object for dynamic pattern generation + */ +export type CachePatternResolver = ( + req: RequestAny, +) => string[] | Promise; + +/** + * Cache key resolver function type + * Receives full request object for dynamic key generation + */ +export type CacheKeyResolver = ( + req: RequestAny, +) => string[] | Promise; + +/** + * Cache condition function type + * Determines whether to cache based on request + */ +export type CacheCondition = (req: RequestAny) => boolean | Promise; + +/** + * Fields that can be used to vary cache keys + */ +export type VaryByField = + | 'userId' + | 'url' + | 'query' + | 'params' + | 'headers' + | 'method'; + +/** + * Options for response caching middleware + */ +export type CacheMiddlewareOptions = { + /** + * Time to live in seconds + * @default 3600 + */ + ttl?: number; + + /** + * Cache key - can be static string or dynamic function with request access + * If not provided, auto-generated from URL and varyBy fields + */ + key?: string | CacheKeyGenerator; + + /** + * Vary cache by specific request fields + * Auto-generates cache keys based on these fields + */ + varyBy?: VaryByField[]; + + /** + * Specific headers to vary by (e.g., ['accept-language']) + */ + varyByHeaders?: string[]; + + /** + * Tags for cache invalidation + * Can be static array or dynamic function with request access + */ + tags?: string[] | CacheTagResolver; + + /** + * Conditional caching based on request + * Return false to skip caching + */ + condition?: CacheCondition; + + /** + * Enable stale-while-revalidate pattern + * Serves stale data while fetching fresh data in background + * @default false + */ + staleWhileRevalidate?: boolean; + + /** + * Time in seconds before cache is considered stale (for stale-while-revalidate) + * @default ttl / 2 + */ + staleTime?: number; + + /** + * Compress cached data if larger than threshold + * @default false + */ + compress?: boolean; + + /** + * Include user context in cache key + * Shorthand for varyBy: ['userId'] + * @default false + */ + private?: boolean; + + /** + * Skip cache if true (useful for debugging) + * @default false + */ + skip?: boolean; +}; + +/** + * Timing for cache invalidation + */ +export type InvalidationTiming = 'before' | 'after'; + +/** + * Options for cache invalidation middleware + */ +export type InvalidateOptions = { + /** + * Tags to invalidate + * Can be static array or dynamic function with request access + */ + tags?: string[] | CacheTagResolver; + + /** + * Patterns to invalidate (e.g., 'users:*') + * Can be static array or dynamic function with request access + */ + patterns?: string[] | CachePatternResolver; + + /** + * Specific keys to invalidate + * Can be static array or dynamic function with request access + */ + keys?: string[] | CacheKeyResolver; + + /** + * When to perform invalidation + * - 'before': Invalidate before handler executes + * - 'after': Invalidate after successful response (status 2xx) + * @default 'after' + */ + timing?: InvalidationTiming; + + /** + * Only invalidate if condition is met + */ + condition?: CacheCondition; +}; + +/** + * Cache entry with metadata + */ +export type CacheEntry = { + value: T; + createdAt: number; + expiresAt: number; + tags?: string[]; + compressed?: boolean; +}; + +/** + * Cache statistics + */ +export type CacheStats = { + hits: number; + misses: number; + hitRate: number; + size?: number; + keys?: number; +}; + +/** + * Cache warming entry + */ +export type CacheWarmEntry = { + key: string; + value: T; + ttl?: number; + tags?: string[]; +}; + +/** + * Options for cache.wrap() method + */ +export type CacheWrapOptions = { + ttl?: number; + tags?: string[]; + staleTime?: number; + compress?: boolean; // not implemented + forceRefresh?: boolean; +}; + +/** + * Options for cache service initialization + */ +export type CacheServiceOptions = { + prefix?: string; + defaultTtl?: number; + compressionEnabled?: boolean; + compressionThreshold?: number; + enableMetrics?: boolean; +}; + +/** + * Batch set entry + */ +export type BatchSetEntry = { + key: string; + value: T; + ttl?: number; + tags?: string[]; +}; diff --git a/src/plugins/lifecycle/index.ts b/src/plugins/lifecycle/index.ts new file mode 100644 index 0000000..f7203c1 --- /dev/null +++ b/src/plugins/lifecycle/index.ts @@ -0,0 +1,38 @@ +import type { ToolkitPlugin, PluginFactory } from '@/plugins/types'; +import { LifecycleManager, type LifecycleOptions } from './lifecycle-manager'; +import { disconnectDatabase } from '@/lib/database'; +import { Server as SocketServer } from 'socket.io'; +import { cacheProvider, RedisProvider } from '@/lib/cache'; +import { closeAllQueues } from '@/lib/queue'; + +export const lifecyclePlugin: PluginFactory = ( + options = {}, +): ToolkitPlugin => { + return { + name: 'lifecycle', + priority: 10, + options, + + register({ app, server }) { + const lifecycle = new LifecycleManager({ + gracefulShutdownTimeout: 30000, + }); + lifecycle.registerServer(server); + + lifecycle.registerCleanup(async () => { + await disconnectDatabase(); + await closeAllQueues(); + // Disconnect cache if using Redis + if (cacheProvider instanceof RedisProvider) { + await cacheProvider.getClient().quit(); + } + const io = app.locals?.io as SocketServer | undefined; + io?.disconnectSockets(true); + }); + + lifecycle.setupSignalHandlers(); + }, + }; +}; + +export default lifecyclePlugin; diff --git a/src/plugins/lifecycle/lifecycle-manager.ts b/src/plugins/lifecycle/lifecycle-manager.ts new file mode 100644 index 0000000..b39c98f --- /dev/null +++ b/src/plugins/lifecycle/lifecycle-manager.ts @@ -0,0 +1,105 @@ +import type { Server } from 'http'; +import logger from '@/plugins/observability/logger'; + +export type CleanupFunction = () => Promise | void; + +export interface LifecycleOptions { + gracefulShutdownTimeout?: number; +} + +export class LifecycleManager { + private cleanupHandlers: CleanupFunction[] = []; + private server?: Server; + private shuttingDown = false; + private gracefulShutdownTimeout = 30000; + + constructor(options?: LifecycleOptions) { + if (options?.gracefulShutdownTimeout) { + this.gracefulShutdownTimeout = options.gracefulShutdownTimeout; + } + } + + registerServer(server: Server): void { + this.server = server; + } + + registerCleanup(handler: CleanupFunction): void { + this.cleanupHandlers.push(handler); + } + + setupSignalHandlers(): void { + const signals: NodeJS.Signals[] = ['SIGTERM', 'SIGINT']; + + for (const signal of signals) { + process.on(signal, () => { + logger.info(`Received ${signal}, starting graceful shutdown...`); + this.gracefulShutdown().catch((err) => { + logger.error({ err }, 'Error during graceful shutdown'); + process.exit(1); + }); + }); + } + + process.on('uncaughtException', (err) => { + logger.error({ err }, 'Uncaught exception'); + process.exit(1); + }); + + process.on('unhandledRejection', (reason, promise) => { + logger.error({ reason, promise }, 'Unhandled rejection'); + }); + } + + private async gracefulShutdown(): Promise { + if (this.shuttingDown) { + logger.warn('Shutdown already in progress'); + return; + } + + this.shuttingDown = true; + + const shutdownTimer = setTimeout(() => { + logger.error('Graceful shutdown timeout exceeded, forcing exit'); + process.exit(1); + }, this.gracefulShutdownTimeout); + + try { + if (this.server) { + logger.info('Closing HTTP server...'); + await new Promise((resolve, reject) => { + this.server!.close((err) => { + if (err) { + logger.error({ err }, 'Error closing HTTP server'); + reject(err); + } else { + logger.info('HTTP server closed'); + resolve(); + } + }); + }); + } + + logger.info('Running cleanup handlers...'); + await Promise.all( + this.cleanupHandlers.map(async (handler, index) => { + try { + await handler(); + logger.debug(`Cleanup handler ${index + 1} completed`); + } catch (err) { + logger.error({ err, index }, `Cleanup handler ${index + 1} failed`); + } + }), + ); + + clearTimeout(shutdownTimer); + logger.info('Graceful shutdown completed'); + process.exit(0); + } catch (err) { + clearTimeout(shutdownTimer); + logger.error({ err }, 'Error during graceful shutdown'); + process.exit(1); + } + } +} + +export default LifecycleManager; diff --git a/src/plugins/magic/index.ts b/src/plugins/magic/index.ts new file mode 100644 index 0000000..1807143 --- /dev/null +++ b/src/plugins/magic/index.ts @@ -0,0 +1,34 @@ +import swaggerUi from 'swagger-ui-express'; +import YAML from 'yaml'; +import type { ToolkitPlugin, PluginFactory } from '@/plugins/types'; +import { convertDocumentationToYaml } from './swagger-doc-generator'; +import { ServerObject } from 'openapi3-ts/oas30'; + +export interface OpenApiOptions { + description: string; + servers: ServerObject[]; + path: string; +} + +export const magicRouterPlugin: PluginFactory = ( + options, +): ToolkitPlugin => { + const { path, description, servers } = options as OpenApiOptions; + + return { + name: 'magic-router', + priority: 20, + options, + + register({ app, port }) { + const swaggerDocument = YAML.parse( + convertDocumentationToYaml(description, servers), + ); + app.use(path, swaggerUi.serve, swaggerUi.setup(swaggerDocument)); + + return [`http://localhost:${port}${path}`]; + }, + }; +}; + +export default magicRouterPlugin; diff --git a/src/openapi/openapi.utils.ts b/src/plugins/magic/openapi.utils.ts similarity index 100% rename from src/openapi/openapi.utils.ts rename to src/plugins/magic/openapi.utils.ts diff --git a/src/plugins/magic/registry.ts b/src/plugins/magic/registry.ts new file mode 100644 index 0000000..5fc7cf5 --- /dev/null +++ b/src/plugins/magic/registry.ts @@ -0,0 +1 @@ +export { registry, bearerAuth } from './swagger-instance'; diff --git a/src/plugins/magic/response.builders.ts b/src/plugins/magic/response.builders.ts new file mode 100644 index 0000000..bd8aa9e --- /dev/null +++ b/src/plugins/magic/response.builders.ts @@ -0,0 +1,98 @@ +import { z } from 'zod'; +import { errorResponseSchema, paginatorSchema } from '@/common/common.schema'; + +/** + * Response builders (R) - helpers for creating consistent response schemas + * + * These builders wrap data schemas in standard envelope formats that match + * your API's response structure. Use them in router `responses` configuration. + * + * @example + * // In a router: + * router.get('/:id', { + * requestType: { params: idSchema }, + * responses: { + * 200: R.success(userSchema), + * 404: R.error(), + * } + * }, handler); + */ +export const R = { + /** + * Standard success response envelope + * @param data - Zod schema for the data payload + * @returns Schema matching { success: true, message?: string, data?: T } + * + * @example + * R.success(z.object({ user: userSchema })) + */ + success: (data: T) => + z.object({ + success: z.literal(true), + message: z.string().optional(), + data: data.optional(), + }), + + /** + * Paginated response envelope for list endpoints + * @param item - Zod schema for individual items in the array + * @returns Schema with items array and paginator metadata + * + * @example + * R.paginated(userSchema) // for list of users with pagination + */ + paginated: (item: T) => + z.object({ + success: z.literal(true), + message: z.string().optional(), + data: z.object({ + items: z.array(item), + paginator: paginatorSchema, + }), + }), + + /** + * No content response (204) + * Use for successful operations that don't return data + * + * @example + * responses: { 204: R.noContent() } + */ + noContent: () => z.undefined(), + + /** + * Error response envelope + * @param schema - Optional custom error schema (defaults to standard error) + * @returns Error schema matching { success: false, message: string, data: any, stack?: string } + * + * @example + * R.error() // uses default error schema + * R.error(customErrorSchema) // override with custom schema + */ + error: (schema?: T) => + schema ?? errorResponseSchema, + + /** + * Raw schema passthrough (for non-standard responses) + * Use when you need a response that doesn't fit the envelope pattern + * + * @example + * R.raw(z.string()) // for raw string responses + * R.raw(z.object({ customField: z.string() })) // custom structure + */ + raw: (schema: T) => schema, +}; + +/** + * Type helper to extract the data type from a success response schema + * @example + * type UserData = ExtractSuccessData; + */ +export type ExtractSuccessData = + T extends z.ZodObject + ? Shape extends { data: infer D } + ? D extends z.ZodOptional + ? U + : D + : never + : never; diff --git a/src/plugins/magic/router.ts b/src/plugins/magic/router.ts new file mode 100644 index 0000000..00a73b0 --- /dev/null +++ b/src/plugins/magic/router.ts @@ -0,0 +1,497 @@ +import express, { + type NextFunction, + type Request, + type Response, + Router, +} from 'express'; +import asyncHandler from 'express-async-handler'; +import formidable from 'formidable'; +import type { ZodTypeAny } from 'zod'; +import type { FormFile } from '@/types'; +import { errorResponse } from '@/utils/response.utils'; +import { + errorResponseSchema, + successResponseSchema, +} from '@/common/common.schema'; +import { canAccess } from '@/middlewares/can-access'; +import { responseValidator } from '@/middlewares/response-validator'; +import { validateZodSchema } from '@/middlewares/validate-zod-schema'; +import cookieParser from 'cookie-parser'; +import type { + RequestZodSchemaType, + ResponseExtended, + ResponseSchemaEntry, +} from '@/types'; +import { + camelCaseToTitleCase, + parseRouteString, + routeToClassName, +} from './openapi.utils'; +import { bearerAuth, registry } from './swagger-instance'; +import { StatusCodes, StatusCodesValues } from './status-codes'; + +type Method = + | 'get' + | 'post' + | 'put' + | 'delete' + | 'patch' + | 'head' + | 'options' + | 'trace'; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type IDontKnow = unknown | never | any; +export type MaybePromise = void | Promise; +export type RequestAny = Request; +export type ResponseAny = Response>; +export type MagicPathType = `/${string}`; +export type MagicRoutePType = PathSet extends true + ? [reqAndRes: RequestAndResponseType, ...handlers: MagicMiddleware[]] + : [ + path: MagicPathType, + reqAndRes: RequestAndResponseType, + ...handlers: MagicMiddleware[], + ]; +export type MagicRouteRType = Omit< + MagicRouter, + 'route' | 'getRouter' | 'use' +>; +export type MagicMiddleware = ( + req: RequestAny, + res: ResponseAny, + next: NextFunction, +) => MaybePromise; + +// Response configuration types +export type ResponseEntry = + | ZodTypeAny + | { + schema: ZodTypeAny; + description?: string; + contentType?: string; + headers?: Record; + examples?: Record; + }; + +export type ResponsesConfig = Partial>; + +// Multipart configuration options for formidable +export type MultipartOptions = { + maxFileSize?: number; // bytes + allowEmptyFiles?: boolean; + multiples?: boolean; // allow multiple files per field + keepExtensions?: boolean; + uploadDir?: string; // optional temp dir + maxFields?: number; + maxFiles?: number; +}; + +export type RequestAndResponseType = { + requestType?: RequestZodSchemaType; + // Legacy: treated as 200 response if provided + responseModel?: ZodTypeAny; + // New: supports multiple status codes with detailed config + responses?: ResponsesConfig; + contentType?: + | 'application/json' + | 'multipart/form-data' + | 'application/x-www-form-urlencoded'; + // Per-route multipart configuration + multipart?: true | MultipartOptions; + // Enable cookie parsing for this route even without canAccess middleware + useCookieParser?: boolean; +}; + +export class MagicRouter { + private router: Router; + private rootRoute: string; + private currentPath?: MagicPathType; + + constructor(rootRoute: string, currentPath?: MagicPathType) { + this.router = Router(); + this.rootRoute = rootRoute; + this.currentPath = currentPath; + } + + private getPath(path: string) { + return this.rootRoute + parseRouteString(path); + } + + /** + * Normalize response configuration to a Map of status -> ResponseSchemaEntry + * Handles backward compatibility with responseModel + */ + private normalizeResponses( + requestAndResponseType: RequestAndResponseType, + ): Map { + const normalized = new Map(); + + // New responses config takes priority + if (requestAndResponseType.responses) { + for (const [status, entry] of Object.entries( + requestAndResponseType.responses, + )) { + const statusCode = Number(status); + + if (typeof entry === 'object' && 'schema' in entry) { + // Full ResponseEntry object + normalized.set(statusCode, { + schema: entry.schema, + contentType: entry.contentType || 'application/json', + description: entry.description, + }); + } else { + // Just a Zod schema + normalized.set(statusCode, { + schema: entry as ZodTypeAny, + contentType: 'application/json', + }); + } + } + } else if (requestAndResponseType.responseModel) { + // Legacy: responseModel treated as 200 response + normalized.set(200, { + schema: requestAndResponseType.responseModel, + contentType: 'application/json', + }); + } else { + // Default: successResponseSchema for 200 + normalized.set(200, { + schema: successResponseSchema, + contentType: 'application/json', + }); + } + + return normalized; + } + + private wrapper( + method: Method, + path: MagicPathType, + requestAndResponseType: RequestAndResponseType, + ...middlewares: Array + ): void { + const bodyType = requestAndResponseType.requestType?.body; + const paramsType = requestAndResponseType.requestType?.params; + const queryType = requestAndResponseType.requestType?.query; + + // Normalize responses (handles backward compatibility) + const normalizedResponses = this.normalizeResponses(requestAndResponseType); + + const className = routeToClassName(this.rootRoute); + const title = camelCaseToTitleCase( + middlewares[middlewares.length - 1]?.name, + ); + + const bodySchema = bodyType + ? registry.register(`${title} Input`, bodyType) + : null; + + const hasSecurity = middlewares.some((m) => m.name === canAccess().name); + const shouldUseCookieParser = hasSecurity || requestAndResponseType.useCookieParser === true; + + const contentType = + requestAndResponseType.contentType ?? 'application/json'; + const needsJsonParser = + contentType === 'application/json' && + (method === 'post' || method === 'put' || method === 'patch'); + const needsUrlencodedParser = + contentType === 'application/x-www-form-urlencoded' && + (method === 'post' || method === 'put' || method === 'patch'); + + // Middleware to attach response schemas to res.locals + const attachResponseSchemasMiddleware: MagicMiddleware = ( + _: RequestAny, + res: ResponseAny, + next: NextFunction, + ) => { + const extRes = res as ResponseExtended; + extRes.locals.responseSchemas = normalizedResponses; + // Legacy support + extRes.locals.validateSchema = requestAndResponseType.responseModel; + next(); + }; + + // Multipart parser middleware for formidable + const multipartParser: MagicMiddleware = ( + req: RequestAny, + res: ResponseAny, + next: NextFunction, + ) => { + // Only parse if content-type is multipart/form-data + const ct = String(req.headers['content-type'] || ''); + if (!ct.startsWith('multipart/form-data')) { + return next(); + } + + // Build formidable options from route config + const multipartConfig = requestAndResponseType.multipart; + const options: formidable.Options = { + maxFileSize: 10 * 1024 * 1024, // 10MB default + allowEmptyFiles: false, + multiples: true, + keepExtensions: true, + }; + + // Merge user options if provided + if (multipartConfig && typeof multipartConfig === 'object') { + Object.assign(options, multipartConfig); + } + + const form = formidable(options); + + form.parse( + req, + ( + err: Error | null, + fields: formidable.Fields, + files: formidable.Files, + ) => { + if (err) { + return errorResponse( + res, + 'Failed to parse multipart data', + StatusCodes.BAD_REQUEST, + err, + ); + } + + // Normalize fields: convert single-element arrays to values + const normalizedFields: Record = {}; + for (const [key, value] of Object.entries(fields)) { + if (Array.isArray(value)) { + normalizedFields[key] = value.length === 1 ? value[0] : value; + } else { + normalizedFields[key] = value; + } + } + + // Normalize files: convert formidable.File to FormFile + const normalizedFiles: Record = {}; + for (const [key, value] of Object.entries(files)) { + if (Array.isArray(value)) { + const formFiles = value.map((f: FormFile) => ({ + filepath: f.filepath, + originalFilename: f.originalFilename, + mimetype: f.mimetype, + size: f.size, + hash: f.hash, + lastModifiedDate: f.lastModifiedDate, + })); + normalizedFiles[key] = + formFiles.length === 1 ? formFiles[0] : formFiles; + } else if (value) { + const file = value as FormFile; + normalizedFiles[key] = { + filepath: file.filepath, + originalFilename: file.originalFilename, + mimetype: file.mimetype, + size: file.size, + hash: file.hash, + lastModifiedDate: file.lastModifiedDate, + }; + } + } + + // Merge fields and files into req.body + req.body = { ...normalizedFields, ...normalizedFiles }; + + // Set req.files for compatibility + req.files = normalizedFiles; + + // Set req.file if there's exactly one file field with a single file + const fileKeys = Object.keys(normalizedFiles); + if (fileKeys.length === 1) { + const singleFile = normalizedFiles[fileKeys[0]]; + if (!Array.isArray(singleFile)) { + req.file = singleFile; + } + } + + next(); + }, + ); + }; + + // Build OpenAPI responses from normalized config + const openapiResponses: Record< + string, + { + description: string; + content: Record; + } + > = {}; + + for (const [status, entry] of normalizedResponses) { + const statusStr = String(status); + const ct = entry.contentType || 'application/json'; + + openapiResponses[statusStr] = { + description: entry.description || '', + content: { + [ct]: { + schema: entry.schema, + }, + }, + }; + } + + // Add default error responses if not already configured + const defaultErrors = [400, 404, 500]; + for (const errorStatus of defaultErrors) { + if (!normalizedResponses.has(errorStatus)) { + openapiResponses[String(errorStatus)] = { + description: 'API Error Response', + content: { + 'application/json': { + schema: errorResponseSchema, + }, + }, + }; + } + } + + registry.registerPath({ + method: method, + tags: [className], + path: this.getPath(path), + security: hasSecurity ? [{ [bearerAuth.name]: ['bearer'] }] : [], + description: title, + summary: title, + request: { + params: paramsType, + query: queryType, + ...(bodySchema + ? { + body: { + content: { + [contentType]: { + schema: bodySchema, + }, + }, + }, + } + : {}), + }, + responses: openapiResponses as never, + }); + + const requestType = requestAndResponseType.requestType ?? {}; + + const controller = asyncHandler(middlewares[middlewares.length - 1]); + + middlewares.pop(); + + // Determine if multipart parsing is needed + const needsMultipart = + contentType === 'multipart/form-data' && requestAndResponseType.multipart; + + if (Object.keys(requestType).length) { + this.router[method]( + path, + attachResponseSchemasMiddleware, + responseValidator, + ...(shouldUseCookieParser ? [cookieParser()] : []), + ...(needsMultipart ? [multipartParser] : []), + ...(needsJsonParser ? [express.json()] : []), + ...(needsUrlencodedParser + ? [express.urlencoded({ extended: false })] + : []), + validateZodSchema(requestType), + ...middlewares, + controller, + ); + } else { + this.router[method]( + path, + attachResponseSchemasMiddleware, + responseValidator, + ...(shouldUseCookieParser ? [cookieParser()] : []), + ...(needsMultipart ? [multipartParser] : []), + ...(needsJsonParser ? [express.json()] : []), + ...(needsUrlencodedParser + ? [express.urlencoded({ extended: false })] + : []), + ...middlewares, + controller, + ); + } + } + + public get(...args: MagicRoutePType): MagicRouteRType { + return this.routeHandler('get', ...args); + } + + public post(...args: MagicRoutePType): MagicRouteRType { + return this.routeHandler('post', ...args); + } + + public delete(...args: MagicRoutePType): MagicRouteRType { + return this.routeHandler('delete', ...args); + } + + public patch(...args: MagicRoutePType): MagicRouteRType { + return this.routeHandler('patch', ...args); + } + + public put(...args: MagicRoutePType): MagicRouteRType { + return this.routeHandler('put', ...args); + } + + public use(...args: Parameters): void { + this.router.use(...args); + } + + public route(path: MagicPathType): MagicRouteRType { + // Create a proxy object that will use the same router instance + const proxy = { + get: (...args: [RequestAndResponseType, ...MagicMiddleware[]]) => { + this.wrapper('get', path, ...args); + return proxy; + }, + post: (...args: [RequestAndResponseType, ...MagicMiddleware[]]) => { + this.wrapper('post', path, ...args); + return proxy; + }, + put: (...args: [RequestAndResponseType, ...MagicMiddleware[]]) => { + this.wrapper('put', path, ...args); + return proxy; + }, + delete: (...args: [RequestAndResponseType, ...MagicMiddleware[]]) => { + this.wrapper('delete', path, ...args); + return proxy; + }, + patch: (...args: [RequestAndResponseType, ...MagicMiddleware[]]) => { + this.wrapper('patch', path, ...args); + return proxy; + }, + }; + return proxy; + } + + private routeHandler(method: Method, ...args: MagicRoutePType) { + if (this.currentPath) { + const [reqAndRes, ...handlers] = args as [ + RequestAndResponseType, + ...MagicMiddleware[], + ]; + this.wrapper(method, this.currentPath, reqAndRes, ...handlers); + } else { + const [path, reqAndRes, ...handlers] = args as [ + MagicPathType, + RequestAndResponseType, + ...MagicMiddleware[], + ]; + this.wrapper(method, path, reqAndRes, ...handlers); + } + return this; + } + + // Method to get the router instance + public getRouter(): Router { + return this.router; + } +} + +export default MagicRouter; diff --git a/src/plugins/magic/status-codes.ts b/src/plugins/magic/status-codes.ts new file mode 100644 index 0000000..56e6e76 --- /dev/null +++ b/src/plugins/magic/status-codes.ts @@ -0,0 +1,358 @@ +export const StatusCodes = { + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.2.1 + * + * This interim response indicates that everything so far is OK and that the client should continue with the request or ignore it if it is already finished. + */ + CONTINUE: 100, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.2.2 + * + * This code is sent in response to an Upgrade request header by the client, and indicates the protocol the server is switching too. + */ + SWITCHING_PROTOCOLS: 101, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc2518#section-10.1 + * + * This code indicates that the server has received and is processing the request, but no response is available yet. + */ + PROCESSING: 102, + /** + * Official Documentation @ https://www.rfc-editor.org/rfc/rfc8297#page-3 + * + * This code indicates to the client that the server is likely to send a final response with the header fields included in the informational response. + */ + EARLY_HINTS: 103, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.3.1 + * + * The request has succeeded. The meaning of a success varies depending on the HTTP method: + * GET: The resource has been fetched and is transmitted in the message body. + * HEAD: The entity headers are in the message body. + * POST: The resource describing the result of the action is transmitted in the message body. + * TRACE: The message body contains the request message as received by the server + */ + OK: 200, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.3.2 + * + * The request has succeeded and a new resource has been created as a result of it. This is typically the response sent after a PUT request. + */ + CREATED: 201, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.3.3 + * + * The request has been received but not yet acted upon. It is non-committal, meaning that there is no way in HTTP to later send an asynchronous response indicating the outcome of processing the request. It is intended for cases where another process or server handles the request, or for batch processing. + */ + ACCEPTED: 202, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.3.4 + * + * This response code means returned meta-information set is not exact set as available from the origin server, but collected from a local or a third party copy. Except this condition, 200 OK response should be preferred instead of this response. + */ + NON_AUTHORITATIVE_INFORMATION: 203, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.3.5 + * + * There is no content to send for this request, but the headers may be useful. The user-agent may update its cached headers for this resource with the new ones. + */ + NO_CONTENT: 204, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.3.6 + * + * This response code is sent after accomplishing request to tell user agent reset document view which sent this request. + */ + RESET_CONTENT: 205, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7233#section-4.1 + * + * This response code is used because of range header sent by the client to separate download into multiple streams. + */ + PARTIAL_CONTENT: 206, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc2518#section-10.2 + * + * A Multi-Status response conveys information about multiple resources in situations where multiple status codes might be appropriate. + */ + MULTI_STATUS: 207, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.4.1 + * + * The request has more than one possible responses. User-agent or user should choose one of them. There is no standardized way to choose one of the responses. + */ + MULTIPLE_CHOICES: 300, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.4.2 + * + * This response code means that URI of requested resource has been changed. Probably, new URI would be given in the response. + */ + MOVED_PERMANENTLY: 301, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.4.3 + * + * This response code means that URI of requested resource has been changed temporarily. New changes in the URI might be made in the future. Therefore, this same URI should be used by the client in future requests. + */ + MOVED_TEMPORARILY: 302, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.4.4 + * + * Server sent this response to directing client to get requested resource to another URI with an GET request. + */ + SEE_OTHER: 303, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7232#section-4.1 + * + * This is used for caching purposes. It is telling to client that response has not been modified. So, client can continue to use same cached version of response. + */ + NOT_MODIFIED: 304, + /** + * @deprecated + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.4.6 + * + * Was defined in a previous version of the HTTP specification to indicate that a requested response must be accessed by a proxy. It has been deprecated due to security concerns regarding in-band configuration of a proxy. + */ + USE_PROXY: 305, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.4.7 + * + * Server sent this response to directing client to get requested resource to another URI with same method that used prior request. This has the same semantic than the 302 Found HTTP response code, with the exception that the user agent must not change the HTTP method used: if a POST was used in the first request, a POST must be used in the second request. + */ + TEMPORARY_REDIRECT: 307, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7538#section-3 + * + * This means that the resource is now permanently located at another URI, specified by the Location: HTTP Response header. This has the same semantics as the 301 Moved Permanently HTTP response code, with the exception that the user agent must not change the HTTP method used: if a POST was used in the first request, a POST must be used in the second request. + */ + PERMANENT_REDIRECT: 308, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.5.1 + * + * This response means that server could not understand the request due to invalid syntax. + */ + BAD_REQUEST: 400, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7235#section-3.1 + * + * Although the HTTP standard specifies "unauthorized", semantically this response means "unauthenticated". That is, the client must authenticate itself to get the requested response. + */ + UNAUTHORIZED: 401, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.5.2 + * + * This response code is reserved for future use. Initial aim for creating this code was using it for digital payment systems however this is not used currently. + */ + PAYMENT_REQUIRED: 402, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.5.3 + * + * The client does not have access rights to the content, i.e. they are unauthorized, so server is rejecting to give proper response. Unlike 401, the client's identity is known to the server. + */ + FORBIDDEN: 403, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.5.4 + * + * The server can not find requested resource. In the browser, this means the URL is not recognized. In an API, this can also mean that the endpoint is valid but the resource itself does not exist. Servers may also send this response instead of 403 to hide the existence of a resource from an unauthorized client. This response code is probably the most famous one due to its frequent occurence on the web. + */ + NOT_FOUND: 404, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.5.5 + * + * The request method is known by the server but has been disabled and cannot be used. For example, an API may forbid DELETE-ing a resource. The two mandatory methods, GET and HEAD, must never be disabled and should not return this error code. + */ + METHOD_NOT_ALLOWED: 405, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.5.6 + * + * This response is sent when the web server, after performing server-driven content negotiation, doesn't find any content following the criteria given by the user agent. + */ + NOT_ACCEPTABLE: 406, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7235#section-3.2 + * + * This is similar to 401 but authentication is needed to be done by a proxy. + */ + PROXY_AUTHENTICATION_REQUIRED: 407, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.5.7 + * + * This response is sent on an idle connection by some servers, even without any previous request by the client. It means that the server would like to shut down this unused connection. This response is used much more since some browsers, like Chrome, Firefox 27+, or IE9, use HTTP pre-connection mechanisms to speed up surfing. Also note that some servers merely shut down the connection without sending this message. + */ + REQUEST_TIMEOUT: 408, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.5.8 + * + * This response is sent when a request conflicts with the current state of the server. + */ + CONFLICT: 409, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.5.9 + * + * This response would be sent when the requested content has been permenantly deleted from server, with no forwarding address. Clients are expected to remove their caches and links to the resource. The HTTP specification intends this status code to be used for "limited-time, promotional services". APIs should not feel compelled to indicate resources that have been deleted with this status code. + */ + GONE: 410, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.5.10 + * + * The server rejected the request because the Content-Length header field is not defined and the server requires it. + */ + LENGTH_REQUIRED: 411, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7232#section-4.2 + * + * The client has indicated preconditions in its headers which the server does not meet. + */ + PRECONDITION_FAILED: 412, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.5.11 + * + * Request entity is larger than limits defined by server; the server might close the connection or return an Retry-After header field. + */ + REQUEST_TOO_LONG: 413, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.5.12 + * + * The URI requested by the client is longer than the server is willing to interpret. + */ + REQUEST_URI_TOO_LONG: 414, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.5.13 + * + * The media format of the requested data is not supported by the server, so the server is rejecting the request. + */ + UNSUPPORTED_MEDIA_TYPE: 415, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7233#section-4.4 + * + * The range specified by the Range header field in the request can't be fulfilled; it's possible that the range is outside the size of the target URI's data. + */ + REQUESTED_RANGE_NOT_SATISFIABLE: 416, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.5.14 + * + * This response code means the expectation indicated by the Expect request header field can't be met by the server. + */ + EXPECTATION_FAILED: 417, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc2324#section-2.3.2 + * + * Any attempt to brew coffee with a teapot should result in the error code "418 I'm a teapot". The resulting entity body MAY be short and stout. + */ + IM_A_TEAPOT: 418, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc2518#section-10.6 + * + * The 507 (Insufficient Storage) status code means the method could not be performed on the resource because the server is unable to store the representation needed to successfully complete the request. This condition is considered to be temporary. If the request which received this status code was the result of a user action, the request MUST NOT be repeated until it is requested by a separate user action. + */ + INSUFFICIENT_SPACE_ON_RESOURCE: 419, + /** + * @deprecated + * Official Documentation @ https://tools.ietf.org/rfcdiff?difftype=--hwdiff&url2=draft-ietf-webdav-protocol-06.txt + * + * A deprecated response used by the Spring Framework when a method has failed. + */ + METHOD_FAILURE: 420, + /** + * Official Documentation @ https://datatracker.ietf.org/doc/html/rfc7540#section-9.1.2 + * + * Defined in the specification of HTTP/2 to indicate that a server is not able to produce a response for the combination of scheme and authority that are included in the request URI. + */ + MISDIRECTED_REQUEST: 421, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc2518#section-10.3 + * + * The request was well-formed but was unable to be followed due to semantic errors. + */ + UNPROCESSABLE_ENTITY: 422, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc2518#section-10.4 + * + * The resource that is being accessed is locked. + */ + LOCKED: 423, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc2518#section-10.5 + * + * The request failed due to failure of a previous request. + */ + FAILED_DEPENDENCY: 424, + /** + * Official Documentation @ https://datatracker.ietf.org/doc/html/rfc7231#section-6.5.15 + * + * The server refuses to perform the request using the current protocol but might be willing to do so after the client upgrades to a different protocol. + */ + UPGRADE_REQUIRED: 426, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc6585#section-3 + * + * The origin server requires the request to be conditional. Intended to prevent the 'lost update' problem, where a client GETs a resource's state, modifies it, and PUTs it back to the server, when meanwhile a third party has modified the state on the server, leading to a conflict. + */ + PRECONDITION_REQUIRED: 428, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc6585#section-4 + * + * The user has sent too many requests in a given amount of time ("rate limiting"). + */ + TOO_MANY_REQUESTS: 429, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc6585#section-5 + * + * The server is unwilling to process the request because its header fields are too large. The request MAY be resubmitted after reducing the size of the request header fields. + */ + REQUEST_HEADER_FIELDS_TOO_LARGE: 431, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7725 + * + * The user-agent requested a resource that cannot legally be provided, such as a web page censored by a government. + */ + UNAVAILABLE_FOR_LEGAL_REASONS: 451, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.6.1 + * + * The server encountered an unexpected condition that prevented it from fulfilling the request. + */ + INTERNAL_SERVER_ERROR: 500, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.6.2 + * + * The request method is not supported by the server and cannot be handled. The only methods that servers are required to support (and therefore that must not return this code) are GET and HEAD. + */ + NOT_IMPLEMENTED: 501, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.6.3 + * + * This error response means that the server, while working as a gateway to get a response needed to handle the request, got an invalid response. + */ + BAD_GATEWAY: 502, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.6.4 + * + * The server is not ready to handle the request. Common causes are a server that is down for maintenance or that is overloaded. Note that together with this response, a user-friendly page explaining the problem should be sent. This responses should be used for temporary conditions and the Retry-After: HTTP header should, if possible, contain the estimated time before the recovery of the service. The webmaster must also take care about the caching-related headers that are sent along with this response, as these temporary condition responses should usually not be cached. + */ + SERVICE_UNAVAILABLE: 503, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.6.5 + * + * This error response is given when the server is acting as a gateway and cannot get a response in time. + */ + GATEWAY_TIMEOUT: 504, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc7231#section-6.6.6 + * + * The HTTP version used in the request is not supported by the server. + */ + HTTP_VERSION_NOT_SUPPORTED: 505, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc2518#section-10.6 + * + * The server has an internal configuration error: the chosen variant resource is configured to engage in transparent content negotiation itself, and is therefore not a proper end point in the negotiation process. + */ + INSUFFICIENT_STORAGE: 507, + /** + * Official Documentation @ https://tools.ietf.org/html/rfc6585#section-6 + * + * The 511 status code indicates that the client needs to authenticate to gain network access. + */ + NETWORK_AUTHENTICATION_REQUIRED: 511 +} as const + +export type StatusCodesValues = typeof StatusCodes[keyof typeof StatusCodes]; diff --git a/src/plugins/magic/swagger-doc-generator.ts b/src/plugins/magic/swagger-doc-generator.ts new file mode 100644 index 0000000..101601a --- /dev/null +++ b/src/plugins/magic/swagger-doc-generator.ts @@ -0,0 +1,55 @@ +import fs from 'node:fs/promises'; +import { OpenApiGeneratorV3 } from '@asteasolutions/zod-to-openapi'; +import * as yaml from 'yaml'; + +import type { OpenAPIObject, ServerObject } from 'openapi3-ts/oas30'; +import config from '@/config/env'; +import { registry } from './swagger-instance'; +import path from 'node:path'; + +export const getOpenApiDocumentation = ( + description: string, + servers: ServerObject[], +): OpenAPIObject => { + const generator = new OpenApiGeneratorV3(registry.definitions); + + return generator.generateDocument({ + openapi: '3.0.0', + externalDocs: { + url: '/openapi.yml', + description: 'OpenAPI documentation for the API', + }, + info: { + version: config.APP_VERSION, + title: config.APP_NAME, + description: description, + }, + servers: servers, + }); +}; + +export const convertDocumentationToYaml = ( + description: string, + servers: ServerObject[], +): string => { + const docs = getOpenApiDocumentation(description, servers); + + const fileContent = yaml.stringify(docs); + + return fileContent; +}; + +export const writeDocumentationToDisk = async ( + description: string, + servers: ServerObject[], +): Promise => { + const fileContent = convertDocumentationToYaml(description, servers); + + await fs.writeFile( + path.join(process.cwd(), 'public', 'openapi.yml'), + fileContent, + { + encoding: 'utf-8', + }, + ); +}; diff --git a/src/openapi/swagger-instance.ts b/src/plugins/magic/swagger-instance.ts similarity index 100% rename from src/openapi/swagger-instance.ts rename to src/plugins/magic/swagger-instance.ts diff --git a/src/plugins/magic/zod-extend.ts b/src/plugins/magic/zod-extend.ts new file mode 100644 index 0000000..ec62e26 --- /dev/null +++ b/src/plugins/magic/zod-extend.ts @@ -0,0 +1,218 @@ +import { extendZodWithOpenApi } from '@asteasolutions/zod-to-openapi'; +import { z } from 'zod'; +import type { FormFile } from '@/types'; + +extendZodWithOpenApi(z); + +/** + * File validation options for zFile and zFiles + */ +export type FileValidationOptions = { + /** Maximum file size in bytes */ + maxSize?: number; + /** Allowed MIME types (e.g., ['image/jpeg', 'image/png']) */ + allowedTypes?: readonly string[] | string[]; +}; + +/** + * Common MIME type constants for convenience + */ +export const MIME_TYPES = { + // Images + JPEG: 'image/jpeg', + JPG: 'image/jpg', + PNG: 'image/png', + GIF: 'image/gif', + WEBP: 'image/webp', + SVG: 'image/svg+xml', + + // Documents + PDF: 'application/pdf', + DOC: 'application/msword', + DOCX: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + XLS: 'application/vnd.ms-excel', + XLSX: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', + + // Archives + ZIP: 'application/zip', + RAR: 'application/x-rar-compressed', + + // Text + TEXT: 'text/plain', + CSV: 'text/csv', +} as const; + +/** + * Common MIME type groups + */ +export const MIME_GROUPS = { + IMAGES: [MIME_TYPES.JPEG, MIME_TYPES.JPG, MIME_TYPES.PNG, MIME_TYPES.WEBP], + IMAGES_WITH_GIF: [ + MIME_TYPES.JPEG, + MIME_TYPES.JPG, + MIME_TYPES.PNG, + MIME_TYPES.GIF, + MIME_TYPES.WEBP, + ], + DOCUMENTS: [MIME_TYPES.PDF, MIME_TYPES.DOC, MIME_TYPES.DOCX], + SPREADSHEETS: [MIME_TYPES.XLS, MIME_TYPES.XLSX, MIME_TYPES.CSV], +} as const; + +/** + * Convert bytes to human-readable format + */ +const formatBytes = (bytes: number): string => { + if (bytes === 0) return '0 Bytes'; + const k = 1024; + const sizes = ['Bytes', 'KB', 'MB', 'GB']; + const i = Math.floor(Math.log(bytes) / Math.log(k)); + return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))}${sizes[i]}`; +}; + +/** + * Type guard to check if a value is a FormFile + */ +export const isFormFile = (value: unknown): value is FormFile => { + if (!value || typeof value !== 'object') return false; + const file = value as Record; + return ( + typeof file.filepath === 'string' && + typeof file.size === 'number' && + (file.mimetype === undefined || + file.mimetype === null || + typeof file.mimetype === 'string') && + (file.originalFilename === undefined || + file.originalFilename === null || + typeof file.originalFilename === 'string') + ); +}; + +/** + * Validate FormFile against options + */ +const validateFormFile = ( + file: FormFile, + options?: FileValidationOptions, +): { valid: boolean; errors: string[] } => { + const errors: string[] = []; + + // Check if it's a valid FormFile structure + if (!isFormFile(file)) { + errors.push('Invalid file structure'); + return { valid: false, errors }; + } + + // Validate file size + if (options?.maxSize !== undefined && file.size > options.maxSize) { + errors.push( + `File size ${formatBytes(file.size)} exceeds maximum allowed size of ${formatBytes(options.maxSize)}`, + ); + } + + // Validate MIME type + if (options?.allowedTypes && options.allowedTypes.length > 0) { + const fileMimeType = file.mimetype?.toLowerCase(); + const allowedTypes = [...options.allowedTypes].map((t) => t.toLowerCase()); + + if (!fileMimeType || !allowedTypes.includes(fileMimeType)) { + errors.push( + `File type '${file.mimetype || 'unknown'}' is not allowed. Allowed types: ${[...options.allowedTypes].join(', ')}`, + ); + } + } + + return { valid: errors.length === 0, errors }; +}; + +/** + * Helper to describe a single file upload field in OpenAPI spec. + * For use with multipart/form-data endpoints. + * Validates that the value is a FormFile at runtime. + * + * @param options - Optional validation constraints + * @param options.maxSize - Maximum file size in bytes + * @param options.allowedTypes - Array of allowed MIME types + * + * @example + * // No validation + * z.object({ avatar: zFile() }) + * + * @example + * // With size and type validation + * z.object({ + * avatar: zFile({ + * maxSize: 5 * 1024 * 1024, // 5MB + * allowedTypes: ['image/jpeg', 'image/png'] + * }) + * }) + * + * @example + * // Using MIME type constants + * z.object({ + * avatar: zFile({ + * maxSize: 5 * 1024 * 1024, + * allowedTypes: MIME_GROUPS.IMAGES + * }) + * }) + */ +export const zFile = (options?: FileValidationOptions) => + z + .custom( + (value) => { + const validation = validateFormFile(value as FormFile, options); + return validation.valid; + }, + (value) => { + const validation = validateFormFile(value as FormFile, options); + return { + message: + validation.errors.length > 0 + ? `File validation failed: ${validation.errors.join('; ')}` + : 'Expected a file upload (FormFile)', + }; + }, + ) + .openapi({ type: 'string', format: 'binary' }); + +/** + * Helper to describe multiple file upload fields in OpenAPI spec. + * For use with multipart/form-data endpoints. + * Each file in the array is validated individually. + * + * @param options - Optional validation constraints applied to each file + * @param options.maxSize - Maximum file size in bytes per file + * @param options.allowedTypes - Array of allowed MIME types per file + * + * @example + * // No validation + * z.object({ images: zFiles() }) + * + * @example + * // Each file must be under 2MB and be an image + * z.object({ + * images: zFiles({ + * maxSize: 2 * 1024 * 1024, + * allowedTypes: MIME_GROUPS.IMAGES + * }) + * }) + */ +export const zFiles = (options?: FileValidationOptions) => + z + .array( + z.custom( + (value) => { + const validation = validateFormFile(value as FormFile, options); + return validation.valid; + }, + (value) => { + const validation = validateFormFile(value as FormFile, options); + return { + message: + validation.errors.length > 0 + ? `File validation failed: ${validation.errors.join('; ')}` + : 'Expected a file upload (FormFile)', + }; + }, + ), + ) + .openapi({ type: 'array', items: { type: 'string', format: 'binary' } }); diff --git a/src/plugins/observability/index.ts b/src/plugins/observability/index.ts new file mode 100644 index 0000000..e6b7c45 --- /dev/null +++ b/src/plugins/observability/index.ts @@ -0,0 +1,65 @@ +import type { ToolkitPlugin, PluginFactory } from '../types'; +import { requestIdMiddleware } from './requestId.middleware'; +import { httpLogger } from '@/plugins/observability/logger'; +import { metricsMiddleware } from './observability.middleware'; +import { checkEmailHealth } from '@/lib/email'; +import { checkStorageHealth } from '@/lib/storage'; +import { checkCacheHealth } from '@/lib/cache'; +import { checkDatabaseHealth } from '@/lib/database'; +import { checkQueueHealth } from '@/lib/queue'; +import createOpsRoutes from './ops'; +import config from '@/config/env'; + +export interface ObservabilityOptions { + requestId?: boolean; + logging?: boolean; + metrics?: boolean; +} + +export const observabilityPlugin: PluginFactory = ( + options = {}, +): ToolkitPlugin => { + const { requestId = true, logging = true, metrics = true } = options; + + return { + name: 'observability', + priority: 90, + options, + + register({ app, port }) { + const opsRoutes = createOpsRoutes({ + healthChecks: [ + { name: 'database', check: checkDatabaseHealth() }, + { name: 'cache', check: checkCacheHealth() }, + { name: 'queues', check: checkQueueHealth() }, + { name: 'email', check: checkEmailHealth() }, + { name: 'storage', check: checkStorageHealth() }, + ], + metricsEnabled: config.METRICS_ENABLED, + }); + + const urls = []; + + app.use('/ops', opsRoutes); + urls.push(`http://localhost:${port}/ops/health`); + urls.push(`http://localhost:${port}/ops/readiness`); + + if (requestId) { + app.use(requestIdMiddleware); + } + + if (logging) { + app.use(httpLogger); + } + + if (metrics) { + app.use(metricsMiddleware); + urls.push(`http://localhost:${port}/ops/metrics`); + } + + return urls; + }, + }; +}; + +export default observabilityPlugin; diff --git a/src/plugins/observability/logger.ts b/src/plugins/observability/logger.ts new file mode 100644 index 0000000..d56bc58 --- /dev/null +++ b/src/plugins/observability/logger.ts @@ -0,0 +1,97 @@ +import pino from 'pino'; +import pinoHttp from 'pino-http'; +import type { RequestExtended } from '../../types'; +import { ServerResponse as ResponseHTTP } from 'node:http'; + +const isDevelopment = process.env.NODE_ENV === 'development'; +const logLevel = process.env.LOG_LEVEL || (isDevelopment ? 'debug' : 'info'); + +export const logger = pino({ + level: logLevel, + transport: isDevelopment + ? { + target: 'pino-pretty', + options: { + colorize: true, + translateTime: 'HH:MM:ss Z', + ignore: 'pid,hostname', + }, + } + : undefined, + formatters: { + level: (label) => { + return { level: label.toUpperCase() }; + }, + }, +}); + +export const httpLogger = pinoHttp({ + logger, + customLogLevel: (_req, res, err) => { + if (res.statusCode >= 500 || err) { + return 'error'; + } + if (res.statusCode >= 400) { + return 'warn'; + } + return 'info'; + }, + customSuccessMessage: (req, res) => { + return `${req.method} ${req.url} ${res.statusCode}`; + }, + customErrorMessage: (req, res, err) => { + return `${req.method} ${req.url} ${res.statusCode} - ${err.message}`; + }, + customAttributeKeys: { + req: 'request', + res: 'response', + err: 'error', + responseTime: 'duration', + }, + serializers: { + req: (req) => { + const extended = req as RequestExtended; + return { + id: extended.id, + method: req.method, + url: req.url, + path: req.path, + headers: { + host: req.headers.host, + 'user-agent': req.headers['user-agent'], + 'x-request-id': req.headers['x-request-id'], + }, + remoteAddress: req.remoteAddress, + remotePort: req.remotePort, + }; + }, + res: (res: unknown) => ({ + statusCode: + res instanceof Response + ? res.status + : res instanceof ResponseHTTP + ? res.statusCode + : 200, + headers: { + 'content-type': + res instanceof Response + ? res.headers.get('content-type') + : res instanceof ResponseHTTP + ? res.getHeader('content-type') + : 'application/json', + 'content-length': + res instanceof Response + ? res.headers.get('content-length') + : res instanceof ResponseHTTP + ? res.getHeader('content-length') + : '100', + }, + }), + }, +}); + +export function createChildLogger(context: Record) { + return logger.child(context, { msgPrefix: `[${context.context}] ` }); +} + +export default logger; diff --git a/src/plugins/observability/metrics.ts b/src/plugins/observability/metrics.ts new file mode 100644 index 0000000..76c1cf9 --- /dev/null +++ b/src/plugins/observability/metrics.ts @@ -0,0 +1,97 @@ +import { + Registry, + Counter, + Histogram, + Gauge, + collectDefaultMetrics, +} from 'prom-client'; + +export class MetricsCollector { + public readonly register: Registry; + private httpRequestDuration: Histogram; + private httpRequestTotal: Counter; + private httpRequestsInProgress: Gauge; + private cacheHits: Counter; + private cacheMisses: Counter; + + constructor() { + this.register = new Registry(); + + collectDefaultMetrics({ + register: this.register, + prefix: 'nodejs_', + }); + + this.httpRequestDuration = new Histogram({ + name: 'http_request_duration_seconds', + help: 'Duration of HTTP requests in seconds', + labelNames: ['method', 'route', 'status_code'], + buckets: [0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1, 5], + registers: [this.register], + }); + + this.httpRequestTotal = new Counter({ + name: 'http_requests_total', + help: 'Total number of HTTP requests', + labelNames: ['method', 'route', 'status_code'], + registers: [this.register], + }); + + this.httpRequestsInProgress = new Gauge({ + name: 'http_requests_in_progress', + help: 'Number of HTTP requests currently in progress', + labelNames: ['method', 'route'], + registers: [this.register], + }); + + this.cacheHits = new Counter({ + name: 'cache_hits_total', + help: 'Total number of cache hits', + labelNames: ['key'], + registers: [this.register], + }); + + this.cacheMisses = new Counter({ + name: 'cache_misses_total', + help: 'Total number of cache misses', + labelNames: ['key'], + registers: [this.register], + }); + } + + recordRequest( + method: string, + route: string, + statusCode: number, + duration: number, + ): void { + const labels = { method, route, status_code: statusCode.toString() }; + + this.httpRequestDuration.observe(labels, duration / 1000); + this.httpRequestTotal.inc(labels); + } + + startRequest(method: string, route: string): void { + this.httpRequestsInProgress.inc({ method, route }); + } + + endRequest(method: string, route: string): void { + this.httpRequestsInProgress.dec({ method, route }); + } + + incrementCacheHits(key: string): void { + this.cacheHits.inc({ key }); + } + + incrementCacheMisses(key: string): void { + this.cacheMisses.inc({ key }); + } + + async getMetrics(): Promise { + return this.register.metrics(); + } +} + +export const metricsCollector = new MetricsCollector(); + +export default metricsCollector; diff --git a/src/plugins/observability/observability.middleware.ts b/src/plugins/observability/observability.middleware.ts new file mode 100644 index 0000000..43108c1 --- /dev/null +++ b/src/plugins/observability/observability.middleware.ts @@ -0,0 +1,34 @@ +import type { Request, Response, NextFunction } from 'express'; +import metricsCollector from './metrics'; + +export function metricsMiddleware( + req: Request, + res: Response, + next: NextFunction, +): void { + const route = req.route?.path || req.path || 'unknown'; + const method = req.method; + + const start = Date.now(); + let ended = false; + + metricsCollector.startRequest(method, route); + + res.on('finish', () => { + const duration = Date.now() - start; + metricsCollector.recordRequest(method, route, res.statusCode, duration); + if (!ended) { + metricsCollector.endRequest(method, route); + ended = true; + } + }); + + res.on('close', () => { + if (!res.writableEnded && !ended) { + metricsCollector.endRequest(method, route); + ended = true; + } + }); + + next(); +} diff --git a/src/plugins/observability/ops.ts b/src/plugins/observability/ops.ts new file mode 100644 index 0000000..10887e1 --- /dev/null +++ b/src/plugins/observability/ops.ts @@ -0,0 +1,82 @@ +import { Router, type Request, type Response } from 'express'; +import { metricsCollector } from './metrics'; + +export type HealthCheck = { + name: string; + check: () => Promise; +}; + +export interface OpsRoutesOptions { + healthChecks?: HealthCheck[]; + metricsEnabled?: boolean; +} + +export function createOpsRoutes(options: OpsRoutesOptions = {}): Router { + const router = Router(); + const { healthChecks = [], metricsEnabled = true } = options; + + router.get('/health', async (_req: Request, res: Response) => { + const status = { + status: 'ok', + timestamp: new Date().toISOString(), + uptime: process.uptime(), + }; + + res.status(200).json(status); + }); + + router.get('/readiness', async (_req: Request, res: Response) => { + try { + const checks = await Promise.all( + healthChecks.map(async ({ name, check }) => { + try { + const healthy = await check(); + return { name, healthy, error: null }; + } catch (error) { + return { + name, + healthy: false, + error: error instanceof Error ? error.message : 'Unknown error', + }; + } + }), + ); + + const allHealthy = checks.every((c) => c.healthy); + const status = { + status: allHealthy ? 'ready' : 'not_ready', + timestamp: new Date().toISOString(), + checks, + }; + + res.status(allHealthy ? 200 : 503).json(status); + } catch (error) { + res.status(503).json({ + status: 'error', + timestamp: new Date().toISOString(), + error: error instanceof Error ? error.message : 'Unknown error', + }); + } + }); + + if (metricsEnabled) { + router.get('/metrics', async (_req: Request, res: Response) => { + try { + const metrics = await metricsCollector.getMetrics(); + res.set('Content-Type', metricsCollector.register.contentType); + res.send(metrics); + } catch (error) { + res.status(500).json({ + error: + error instanceof Error + ? error.message + : 'Failed to collect metrics', + }); + } + }); + } + + return router; +} + +export default createOpsRoutes; diff --git a/src/plugins/observability/requestId.middleware.ts b/src/plugins/observability/requestId.middleware.ts new file mode 100644 index 0000000..8f5949d --- /dev/null +++ b/src/plugins/observability/requestId.middleware.ts @@ -0,0 +1,19 @@ +import type { Request, Response, NextFunction } from 'express'; +import { nanoid } from 'nanoid'; + +export function requestIdMiddleware( + req: Request, + res: Response, + next: NextFunction, +): void { + const requestId = + (req.headers['x-request-id'] as string) || + (req.headers['x-correlation-id'] as string) || + nanoid(); + + (req as Request & { id?: string }).id = requestId; + + res.setHeader('X-Request-ID', requestId); + + next(); +} diff --git a/src/plugins/realtime/handlers.ts b/src/plugins/realtime/handlers.ts new file mode 100644 index 0000000..54b7ae9 --- /dev/null +++ b/src/plugins/realtime/handlers.ts @@ -0,0 +1,49 @@ +import type { Server } from 'socket.io'; +import logger from '@/plugins/observability/logger'; + +export function registerRealtimeHandlers(io: Server) { + io.on('connection', (socket) => { + logger.info({ id: socket.id }, 'socket connected'); + + socket.on('ping', (data) => { + socket.emit('pong', data ?? 'pong'); + }); + + // Rooms: join a room + socket.on('room:join', (payload) => { + const room = typeof payload === 'string' ? payload : payload?.room; + if (!room || typeof room !== 'string') return; + const trimmed = room.trim(); + if (!trimmed) return; + socket.join(trimmed); + socket.emit('room:joined', { room: trimmed }); + }); + + // Rooms: leave a room + socket.on('room:leave', (payload) => { + const room = typeof payload === 'string' ? payload : payload?.room; + if (!room || typeof room !== 'string') return; + const trimmed = room.trim(); + if (!trimmed) return; + socket.leave(trimmed); + socket.emit('room:left', { room: trimmed }); + }); + + // Rooms: broadcast to a room (from client via server) + socket.on('room:broadcast', (payload) => { + const room = payload?.room; + const event = payload?.event; + if (!room || typeof room !== 'string') return; + if (!event || typeof event !== 'string') return; + const trimmed = room.trim(); + if (!trimmed) return; + io.to(trimmed).emit(event, payload?.payload); + }); + + socket.on('disconnect', (reason) => { + logger.info({ id: socket.id, reason }, 'socket disconnected'); + }); + }); +} + +export default registerRealtimeHandlers; diff --git a/src/plugins/realtime/index.ts b/src/plugins/realtime/index.ts new file mode 100644 index 0000000..9710460 --- /dev/null +++ b/src/plugins/realtime/index.ts @@ -0,0 +1,81 @@ +import type { PluginFactory } from '../types'; +import config from '@/config/env'; +import logger from '@/plugins/observability/logger'; +import { registerRealtimeHandlers } from './handlers'; +import type { Server as IServer } from 'node:http'; +import { Server as RealtimeServer } from 'socket.io'; +import path from 'node:path'; + +export type RealtimeOptions = { + path?: string; + transports?: Array<'websocket' | 'polling'>; + cors?: { + origin: string | string[]; + methods?: string[]; + credentials?: boolean; + }; +}; + +export const setupSocketIo = ( + server: IServer, + options: RealtimeOptions = {}, +): RealtimeServer => { + const io = new RealtimeServer(server, { + path: options.path ?? '/socket.io', + transports: options.transports ?? ['websocket', 'polling'], + cors: { + origin: options.cors?.origin ?? '*', + methods: options.cors?.methods ?? ['GET', 'POST'], + credentials: options.cors?.credentials ?? true, + }, + }); + + return io; +}; + +export const realtimePlugin: PluginFactory = (opts = {}) => { + let io: RealtimeServer | undefined; + + return { + name: 'realtime', + priority: 85, + options: opts, + + register({ app, server, port }) { + + app.get(`/realtime`, (_req, res) => { + const realtimePath = path.join( + process.cwd(), + 'public', + 'realtime', + 'index.html', + ); + res.sendFile(realtimePath); + }); + + if (!server) { + logger.warn('Realtime plugin: HTTP server not available'); + return; + } + + io = setupSocketIo(server, { + path: opts.path ?? '/socket.io', + transports: opts.transports, + cors: { + origin: [config.CLIENT_SIDE_URL], + methods: ['GET', 'POST'], + credentials: true, + }, + }); + + app.locals.io = io; + + registerRealtimeHandlers(io); + logger.info('Realtime server initialized'); + + return [`http://localhost:${port}/socket.io`, `http://localhost:${port}/realtime`]; + }, + }; +}; + +export default realtimePlugin; diff --git a/src/plugins/security/index.ts b/src/plugins/security/index.ts new file mode 100644 index 0000000..6261693 --- /dev/null +++ b/src/plugins/security/index.ts @@ -0,0 +1,18 @@ +import type { ToolkitPlugin, PluginFactory } from '@/plugins/types'; +import { applySecurity, type SecurityOptions } from './security'; + +export const securityPlugin: PluginFactory = ( + options = {}, +): ToolkitPlugin => { + return { + name: 'security', + priority: 100, + options, + + register({ app }) { + applySecurity(app, options); + }, + }; +}; + +export default securityPlugin; diff --git a/src/plugins/security/security.ts b/src/plugins/security/security.ts new file mode 100644 index 0000000..b73f220 --- /dev/null +++ b/src/plugins/security/security.ts @@ -0,0 +1,75 @@ +import cors from 'cors'; +import helmet from 'helmet'; +import rateLimit from 'express-rate-limit'; +import type { Application } from 'express'; + +export interface SecurityOptions { + corsEnabled?: boolean; + corsOrigins?: string | string[]; + corsCredentials?: boolean; + + helmetEnabled?: boolean; + helmetOptions?: Parameters[0]; + + rateLimitEnabled?: boolean; + rateLimitWindowMs?: number; + rateLimitMax?: number; + rateLimitMessage?: string; + + trustProxy?: boolean; +} + +export function applySecurity( + app: Application, + options: SecurityOptions = {}, +): void { + const { + corsEnabled = true, + corsOrigins = '*', + corsCredentials = false, + + helmetEnabled = true, + helmetOptions = {}, + + rateLimitEnabled = true, + rateLimitWindowMs = 15 * 60 * 1000, // 15 minutes + rateLimitMax = 1000, // 100 requests per 15 minutes + rateLimitMessage = 'Too many requests from this IP, please try again later.', + + trustProxy = false, + } = options; + + if (trustProxy) { + app.set('trust proxy', true); + } + + if (helmetEnabled) { + app.use(helmet(helmetOptions)); + } + + if (corsEnabled) { + const corsOptions = { + origin: corsOrigins, + credentials: corsCredentials, + optionsSuccessStatus: 200, + }; + app.use(cors(corsOptions)); + } + + if (rateLimitEnabled) { + const limiter = rateLimit({ + windowMs: rateLimitWindowMs, + max: rateLimitMax, + message: rateLimitMessage, + standardHeaders: true, + legacyHeaders: false, + skip: (req) => { + const healthPaths = ['/health', '/readiness', '/metrics']; + return healthPaths.some((path) => req.path.endsWith(path)); + }, + }); + app.use(limiter); + } +} + +export default applySecurity; diff --git a/src/plugins/types.ts b/src/plugins/types.ts new file mode 100644 index 0000000..4a51acd --- /dev/null +++ b/src/plugins/types.ts @@ -0,0 +1,28 @@ +import type { Application } from 'express'; +import type { Server } from 'http'; + +export interface AppContext { + app: Application; + server: Server; + config: Record; + port: number; +} + +export interface ToolkitPlugin { + name: string; + priority?: number; + options?: TOptions; + + register(context: AppContext): Promise | void | string[]; + + onShutdown?: () => Promise | void | string[]; +} + +export type PluginFactory = ( + options?: TOptions, +) => ToolkitPlugin; + +export interface PluginRegistration { + plugin: ToolkitPlugin; + enabled: boolean; +} diff --git a/src/queues/email.queue.ts b/src/queues/email.queue.ts index ab081b4..8b11be5 100644 --- a/src/queues/email.queue.ts +++ b/src/queues/email.queue.ts @@ -1,25 +1,25 @@ import { - type SendResetPasswordTypePayload, - sendResetPasswordEmail, -} from "../email/email.service"; -import logger from "../lib/logger.service"; -import { Queue } from "../lib/queue.server"; + type SendResetPasswordTypePayload, + sendResetPasswordEmail, +} from '../email/email.service'; +import logger from '@/plugins/observability/logger'; +import { Queue } from '../lib/queue'; export const ResetPasswordQueue = Queue( - "ResetPasswordQueue", - async (job) => { - try { - const { data } = job; + 'ResetPasswordQueue', + async (job) => { + try { + const { data } = job; - await sendResetPasswordEmail({ - ...data, - }); + await sendResetPasswordEmail({ + ...data, + }); - return true; - } catch (err) { - if (err instanceof Error) logger.error(err.message); + return true; + } catch (err) { + if (err instanceof Error) logger.error(err.message); - throw err; - } - }, + throw err; + } + }, ); diff --git a/src/queues/session-cleanup.queue.ts b/src/queues/session-cleanup.queue.ts new file mode 100644 index 0000000..57055b3 --- /dev/null +++ b/src/queues/session-cleanup.queue.ts @@ -0,0 +1,81 @@ +import { Queue } from '../lib/queue'; +import { + getSessionManager, + SessionManager, +} from '../modules/auth/session/session.manager'; +import { createChildLogger } from '@/plugins/observability/logger'; +import config from '../config/env'; + +const logger = createChildLogger({ context: 'SessionCleanupQueue' }); + +interface SessionCleanupPayload { + type: 'full' | 'revoked' | 'expired'; +} + +export const SessionCleanupQueue = Queue( + 'SessionCleanupQueue', + async (job) => { + if (!config.SET_SESSION) { + logger.debug('Session management disabled, skipping cleanup'); + return { skipped: true }; + } + + let sessionManager: SessionManager | null = null; + + try { + const { data } = job; + try { + sessionManager = getSessionManager(); + } catch { + return { skipped: true, reason: 'Session manager not initialized' }; + } + + logger.info({ type: data.type }, 'Starting session cleanup'); + + const startTime = Date.now(); + const stats = await sessionManager.cleanupSessions(data.type); + const duration = Date.now() - startTime; + + logger.info( + { + ...stats, + duration, + type: data.type, + }, + 'Session cleanup completed', + ); + + return stats; + } catch (err) { + logger.error({ err }, 'Session cleanup failed'); + throw err; + } + }, +); + +export async function scheduleSessionCleanup(): Promise { + if (!config.SET_SESSION || !config.SESSION_CLEANUP_ENABLED) { + logger.info('Session cleanup disabled, skipping schedule'); + return; + } + + try { + await SessionCleanupQueue.add( + 'recurring-cleanup', + { type: 'full' }, + { + repeat: { + pattern: config.SESSION_CLEANUP_CRON, + }, + jobId: 'session-cleanup-recurring', + }, + ); + + logger.info( + { pattern: config.SESSION_CLEANUP_CRON }, + 'Session cleanup job scheduled', + ); + } catch (err) { + logger.error({ err }, 'Failed to schedule session cleanup job'); + } +} diff --git a/src/routes/routes.ts b/src/routes/routes.ts index ce98d71..bc7d0e1 100644 --- a/src/routes/routes.ts +++ b/src/routes/routes.ts @@ -1,17 +1,21 @@ -import express from "express"; -import authRouter, { AUTH_ROUTER_ROOT } from "../modules/auth/auth.router"; +import express from 'express'; +import authRouter, { AUTH_ROUTER_ROOT } from '../modules/auth/auth.router'; +import blogRouter, { BLOG_ROUTER_ROOT } from '../modules/blog/blog.router'; +import userRouter, { USER_ROUTER_ROOT } from '../modules/user/user.router'; +import uploadRouter, { + UPLOAD_ROUTER_ROOT, +} from '../modules/upload/upload.router'; import healthCheckRouter, { - HEALTH_ROUTER_ROOT, -} from "../healthcheck/healthcheck.routes"; -import userRouter, { USER_ROUTER_ROOT } from "../modules/user/user.router"; -import uploadRouter, { UPLOAD_ROUTER_ROOT } from "../upload/upload.router"; + HEALTH_ROUTER_ROOT, +} from '../modules/healthcheck/healthcheck.routes'; const router = express.Router(); router.use(HEALTH_ROUTER_ROOT, healthCheckRouter); router.use(USER_ROUTER_ROOT, userRouter); router.use(AUTH_ROUTER_ROOT, authRouter); +router.use(BLOG_ROUTER_ROOT, blogRouter); router.use(UPLOAD_ROUTER_ROOT, uploadRouter); export default router; diff --git a/src/seeders/registry.ts b/src/seeders/registry.ts new file mode 100644 index 0000000..a2c8bdf --- /dev/null +++ b/src/seeders/registry.ts @@ -0,0 +1,4 @@ +import type { Seeder } from './types'; +import { UserSeeder } from '../modules/user/seeders/UserSeeder'; + +export const seeders: Seeder[] = [UserSeeder]; diff --git a/src/seeders/runner.ts b/src/seeders/runner.ts new file mode 100644 index 0000000..58f85be --- /dev/null +++ b/src/seeders/runner.ts @@ -0,0 +1,148 @@ +import mongoose from 'mongoose'; +import { connectDatabase, disconnectDatabase } from '../lib/database'; +import config from '../config/env'; +import logger from '@/plugins/observability/logger'; +import type { Seeder, SeederContext } from './types'; + +type RunOptions = { + group?: string; + only?: string[]; + modules?: string[]; // reserved for future module filtering + fresh?: boolean; + force?: boolean; + dryRun?: boolean; + seed?: number; + transaction?: boolean; // global override +}; + +export const runSeeders = async ( + seeders: Seeder[], + options: RunOptions = {}, +): Promise => { + const group = options.group ?? process.env.SEED_DEFAULT_GROUP ?? 'dev'; + const seed = options.seed ?? (Number(process.env.SEED_SEED) || 1); + const dryRun = Boolean(options.dryRun ?? false); + const useTransactions = options.transaction ?? true; + const fresh = Boolean(options.fresh ?? false); + const force = Boolean(options.force ?? false); + + if (process.env.NODE_ENV === 'production' && !force) { + throw new Error( + 'Seeding in production is blocked. Use --force or set ALLOW_SEED_IN_PROD=true.', + ); + } + + // Filter by group and explicit selection + let list = seeders.filter((s) => !s.groups || s.groups.includes(group)); + if (options.only && options.only.length) { + const onlySet = new Set(options.only.map((n) => n.toLowerCase())); + list = list.filter((s) => onlySet.has(s.name.toLowerCase())); + } + + // Topological sort according to dependsOn + const byName = new Map(list.map((s) => [s.name, s] as const)); + const inDegree = new Map(); + const edges = new Map(); + for (const s of list) { + inDegree.set(s.name, 0); + edges.set(s.name, []); + } + for (const s of list) { + for (const dep of s.dependsOn ?? []) { + if (!byName.has(dep)) { + throw new Error( + `Seeder ${s.name} depends on missing seeder ${dep} in group ${group}`, + ); + } + edges.get(dep)!.push(s.name); + inDegree.set(s.name, (inDegree.get(s.name) ?? 0) + 1); + } + } + const queue: string[] = []; + for (const [name, deg] of inDegree) if (deg === 0) queue.push(name); + const ordered: Seeder[] = []; + while (queue.length) { + const n = queue.shift()!; + ordered.push(byName.get(n)!); + for (const m of edges.get(n) ?? []) { + const d = (inDegree.get(m) ?? 0) - 1; + inDegree.set(m, d); + if (d === 0) queue.push(m); + } + } + if (ordered.length !== list.length) { + throw new Error('Circular dependency detected among seeders'); + } + + // Connect DB + await connectDatabase(); + + try { + const db = mongoose.connection; + + const refs = new Map(); + const ctx: SeederContext = { + db, + config, + logger, + refs: { + set: (k, v) => refs.set(k, v), + get: (k: string) => refs.get(k) as T, + has: (k) => refs.has(k), + keys: () => Array.from(refs.keys()), + }, + env: { group, dryRun, seed, now: new Date() }, + }; + + // Fresh: drop involved collections + if (fresh) { + const toDrop = new Set(); + for (const s of ordered) + for (const c of s.collections ?? []) toDrop.add(c); + if (toDrop.size) { + logger.warn( + `Fresh mode: dropping collections: ${Array.from(toDrop).join(', ')}`, + ); + for (const coll of toDrop) { + try { + const exists = + (await db.db!.listCollections({ name: coll }).toArray()).length > + 0; + if (exists && !dryRun) await db.dropCollection(coll); + } catch (e) { + logger.warn( + `Failed to drop collection ${coll}: ${(e as Error).message}`, + ); + } + } + } + } + + // Execute seeders + for (const seeder of ordered) { + const shouldTx = seeder.transaction ?? true; + logger.info(`→ Running ${seeder.name} (group=${group})`); + + if (dryRun) { + logger.info(`[dry-run] Skipping execution of ${seeder.name}`); + continue; + } + + if (useTransactions && shouldTx) { + const session = await db.startSession(); + try { + await session.withTransaction(async () => { + await seeder.run(ctx); + }); + } finally { + await session.endSession(); + } + } else { + await seeder.run(ctx); + } + logger.info(`✓ Completed ${seeder.name}`); + } + } finally { + await disconnectDatabase(); + } +}; diff --git a/src/seeders/types.ts b/src/seeders/types.ts new file mode 100644 index 0000000..e1968a6 --- /dev/null +++ b/src/seeders/types.ts @@ -0,0 +1,30 @@ +import type mongoose from 'mongoose'; +import type config from '../config/env'; +import logger from '@/plugins/observability/logger'; + +export type SeederContext = { + db: mongoose.Connection; + config: typeof config; + logger: typeof logger; + refs: { + set: (key: string, value: unknown) => void; + get: (key: string) => T; + has: (key: string) => boolean; + keys: () => string[]; + }; + env: { + group: string; + dryRun: boolean; + seed: number; + now: Date; + }; +}; + +export type Seeder = { + name: string; + run: (ctx: SeederContext) => Promise; + dependsOn?: string[]; + groups?: string[]; + transaction?: boolean; // default: true + collections?: string[]; // for --fresh drop targeting +}; diff --git a/src/types.ts b/src/types.ts index 9eddd68..09e1fb3 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,32 +1,67 @@ -import type { Request, Response } from "express"; -import type { Server } from "socket.io"; -import type { AnyZodObject, ZodEffects, ZodSchema } from "zod"; -import type { JwtPayload } from "./utils/auth.utils"; +import type { Request, Response } from 'express'; +import type { Server } from 'socket.io'; +import type { AnyZodObject, ZodEffects, ZodSchema } from 'zod'; +import type { JwtPayload } from '@/utils/jwt.utils'; +import { SessionRecord } from '@/modules/auth/session/session.types'; export type ZodObjectWithEffect = - | AnyZodObject - | ZodEffects; + | AnyZodObject + | ZodEffects; -export interface GoogleCallbackQuery { - code: string; - error?: string; -} +/** + * Represents a file parsed by formidable from multipart/form-data requests. + * Used for type-safe file handling in controllers. + */ +export type FormFile = { + /** Absolute path to the temporary file on disk */ + filepath: string; + /** Original filename from the client */ + originalFilename?: string | null; + /** MIME type of the file */ + mimetype?: string | null; + /** File size in bytes */ + size: number; + /** Hash of the file content (if enabled in formidable options) */ + hash?: string | null; + /** Last modified date of the file */ + lastModifiedDate?: Date | null; +}; export type RequestZodSchemaType = { - params?: ZodObjectWithEffect; - query?: ZodObjectWithEffect; - body?: ZodSchema; + params?: ZodObjectWithEffect; + query?: ZodObjectWithEffect; + body?: ZodSchema; }; export interface RequestExtended extends Request { - user: JwtPayload; - io: Server; + user: JwtPayload; + io: Server; + session?: SessionRecord; + file?: FormFile; + files?: Record; +} + +export type ResponseSchemaEntry = { + schema: ZodSchema; + contentType?: string; + description?: string; +}; + +export interface ResponseLocals extends Record { + validateSchema?: ZodSchema; + responseSchemas?: Map; +} + +export interface ResponseExtended = Record> extends Response { + ok?: (payload: T) => void; + created?: (payload: T) => void; + noContent?: () => void; } -export interface ResponseExtended extends Response { - locals: { - validateSchema?: ZodSchema; - }; - jsonValidate: Response["json"]; - sendValidate: Response["send"]; +// Extend Express Request globally to include formidable file properties +declare module "express" { + interface Request { + file?: FormFile; + files?: Record; + } } diff --git a/src/upload/upload.controller.ts b/src/upload/upload.controller.ts deleted file mode 100644 index c63fdb7..0000000 --- a/src/upload/upload.controller.ts +++ /dev/null @@ -1,24 +0,0 @@ -import type { Request, Response } from 'express'; -import type { UserType } from '../modules/user/user.dto'; -import { updateUser } from '../modules/user/user.services'; -import { errorResponse, successResponse } from '../utils/api.utils'; - -export const handleProfileUpload = async (req: Request, res: Response) => { - try { - const file = req.file; - - const currentUser = req.user as UserType; - - if ((file && !('location' in file)) || !file) { - return errorResponse(res, 'File not uploaded, Please try again'); - } - - const user = await updateUser(String(currentUser._id), { - avatar: String(file.location), - }); - - return successResponse(res, 'Profile picture has been uploaded', user); - } catch (err) { - return errorResponse(res, (err as Error).message); - } -}; diff --git a/src/upload/upload.router.ts b/src/upload/upload.router.ts deleted file mode 100644 index 15ae91a..0000000 --- a/src/upload/upload.router.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { z } from 'zod'; -import { canAccess } from '../middlewares/can-access.middleware'; -import { uploadProfile } from '../middlewares/multer-s3.middleware'; -import MagicRouter from '../openapi/magic-router'; -import { zFile } from '../openapi/zod-extend'; -import { handleProfileUpload } from './upload.controller'; - -export const UPLOAD_ROUTER_ROOT = '/upload'; - -const uploadRouter = new MagicRouter(UPLOAD_ROUTER_ROOT); - -uploadRouter.post( - '/profile', - { - requestType: { body: z.object({ avatar: zFile() }) }, - contentType: 'multipart/form-data', - }, - canAccess(), - uploadProfile, - handleProfileUpload, -); - -export default uploadRouter.getRouter(); diff --git a/src/utils/api.utils.ts b/src/utils/api.utils.ts deleted file mode 100644 index 4bee0fc..0000000 --- a/src/utils/api.utils.ts +++ /dev/null @@ -1,70 +0,0 @@ -import type { Response } from "express"; -import { StatusCodes } from "http-status-codes"; -import config from "../config/config.service"; -import logger from "../lib/logger.service"; -import type { ResponseExtended } from "../types"; - -export const errorResponse = ( - res: ResponseExtended | Response, - message?: string, - statusCode?: StatusCodes, - payload?: unknown, - stack?: string, -): void => { - try { - if ("jsonValidate" in res) { - (res as ResponseExtended) - .status(statusCode ?? StatusCodes.BAD_REQUEST) - .jsonValidate({ - success: false, - message: message, - data: payload, - stack: stack, - }); - } else { - (res as ResponseExtended) - .status(statusCode ?? StatusCodes.BAD_REQUEST) - .json({ - success: false, - message: message, - data: payload, - stack: stack, - }); - } - - return; - } catch (err) { - logger.error(err); - } -}; - -export const successResponse = ( - res: ResponseExtended | Response, - message?: string, - payload?: Record, - statusCode: StatusCodes = StatusCodes.OK, -): void => { - try { - if ("jsonValidate" in res) { - (res as ResponseExtended) - .status(statusCode) - .jsonValidate({ success: true, message: message, data: payload }); - } else { - (res as ResponseExtended) - .status(statusCode) - .json({ success: true, message: message, data: payload }); - } - - return; - } catch (err) { - logger.error(err); - } -}; - -export const generateResetPasswordLink = (token: string) => { - return `${config.CLIENT_SIDE_URL}/reset-password?token=${token}`; -}; - -export const generateSetPasswordLink = (token: string) => { - return `${config.CLIENT_SIDE_URL}/set-password?token=${token}`; -}; diff --git a/src/utils/auth.utils.ts b/src/utils/auth.utils.ts deleted file mode 100644 index f1e09be..0000000 --- a/src/utils/auth.utils.ts +++ /dev/null @@ -1,152 +0,0 @@ -import crypto from "node:crypto"; -import argon2 from "argon2"; -import { JsonWebTokenError, sign, verify } from "jsonwebtoken"; -import config from "../config/config.service"; -import type { RoleType } from "../enums"; -import logger from "../lib/logger.service"; - -export interface GoogleTokenResponse { - access_token: string; - expires_in: number; - id_token: string; - refresh_token?: string; - scope: string; - token_type: string; -} - -export interface GoogleTokensRequestParams { - code: string; -} - -export type JwtPayload = { - sub: string; - email?: string | null; - phoneNo?: string | null; - username: string; - role: RoleType; -}; - -export type PasswordResetTokenPayload = { - email: string; - userId: string; -}; - -export type SetPasswordTokenPayload = { - email: string; - userId: string; -}; - -export const hashPassword = async (password: string): Promise => { - return argon2.hash(password); -}; - -export const compareHash = async ( - hashed: string, - plainPassword: string, -): Promise => { - return argon2.verify(hashed, plainPassword); -}; -export const signToken = async (payload: JwtPayload): Promise => { - return sign(payload, String(config.JWT_SECRET), { - expiresIn: Number(config.JWT_EXPIRES_IN) * 1000, - }); -}; - -export const signPasswordResetToken = async ( - payload: PasswordResetTokenPayload, -) => { - return sign(payload, String(config.JWT_SECRET), { - expiresIn: config.PASSWORD_RESET_TOKEN_EXPIRES_IN * 1000, - }); -}; - -export const signSetPasswordToken = async ( - payload: SetPasswordTokenPayload, -) => { - return sign(payload, String(config.JWT_SECRET), { - expiresIn: config.SET_PASSWORD_TOKEN_EXPIRES_IN, - }); -}; - -export const verifyToken = async < - T extends JwtPayload | PasswordResetTokenPayload | SetPasswordTokenPayload, ->( - token: string, -): Promise => { - try { - return verify(token, String(config.JWT_SECRET)) as T; - } catch (err) { - if (err instanceof Error) { - throw new Error(err.message); - } - - if (err instanceof JsonWebTokenError) { - throw new Error(err.message); - } - - logger.error("verifyToken", { err }); - throw err; - } -}; - -export const generateRandomPassword = (length = 16): string => { - return crypto.randomBytes(length).toString("hex"); -}; -export const fetchGoogleTokens = async ( - params: GoogleTokensRequestParams, -): Promise => { - if ( - !config.GOOGLE_CLIENT_ID || - !config.GOOGLE_CLIENT_SECRET || - !config.GOOGLE_REDIRECT_URI - ) { - throw new Error("Google credentials are not set"); - } - - const url = "https://oauth2.googleapis.com/token"; - const response = await fetch(url, { - method: "POST", - headers: { "Content-Type": "application/x-www-form-urlencoded" }, - body: new URLSearchParams({ - code: params.code, - client_id: config.GOOGLE_CLIENT_ID, - client_secret: config.GOOGLE_CLIENT_SECRET, - redirect_uri: config.GOOGLE_REDIRECT_URI, - grant_type: "authorization_code", - }), - }); - - if (!response.ok) { - throw new Error("Failed to exchange code for tokens"); - } - - const data: GoogleTokenResponse = await response.json(); - return data; -}; -export interface GoogleUserInfo { - id: string; - email: string; - verified_email: boolean; - name: string; - given_name: string; - family_name: string; - picture: string; - locale: string; -} - -export const getUserInfo = async (accessToken: string) => { - const userInfoResponse = await fetch( - "https://www.googleapis.com/oauth2/v2/userinfo", - { - headers: { Authorization: `Bearer ${accessToken}` }, - }, - ); - if (!userInfoResponse.ok) { - throw new Error("Error fetching user info"); - } - return userInfoResponse.json(); -}; - -export const generateOTP = (length = 6): string => { - return crypto.randomBytes(length).toString("hex").slice(0, length); -}; diff --git a/src/utils/boolean.utils.ts b/src/utils/boolean.utils.ts new file mode 100644 index 0000000..1d7aee7 --- /dev/null +++ b/src/utils/boolean.utils.ts @@ -0,0 +1,38 @@ +const TRUTHY_VALUES = ['true', 't', '1']; +const FALSY_VALUES = ['false', 'f', '0']; + +export const transformableToBooleanError = `Value must be one of ${TRUTHY_VALUES.join(', ')} or ${FALSY_VALUES.join(', ')} (case-insensitive)`; + +/** + * Convert a string to a boolean value + * Supports: 'true', 't', '1' (case-insensitive) -> true + * 'false', 'f', '0' (case-insensitive) -> false + * @param value - String value to convert + * @returns Boolean value + * @throws Error if value cannot be converted to boolean + */ +export const stringToBoolean = (value: string): boolean => { + const normalized = value.trim().toLowerCase(); + + if (TRUTHY_VALUES.includes(normalized)) { + return true; + } + + if (FALSY_VALUES.includes(normalized)) { + return false; + } + + throw new Error( + `Value "${value}" is not transformable to boolean. ${transformableToBooleanError}`, + ); +}; + +/** + * Check if a string value can be converted to a boolean + * @param value - String value to check + * @returns True if value can be converted to boolean, false otherwise + */ +export const isTransformableToBoolean = (value: string): boolean => { + const normalized = value.trim().toLowerCase(); + return TRUTHY_VALUES.includes(normalized) || FALSY_VALUES.includes(normalized); +}; diff --git a/src/utils/common.utils.ts b/src/utils/common.utils.ts deleted file mode 100644 index f4929fd..0000000 --- a/src/utils/common.utils.ts +++ /dev/null @@ -1,85 +0,0 @@ -import path from "node:path"; -import { customAlphabet } from "nanoid"; -import config from "../config/config.service"; - -export const customNanoId = customAlphabet("0123456789", 4); - -const transformableToBooleanTruthy = ["true", "TRUE", "t", "T", "1"]; -const transformableToBooleanFalsy = ["false", "FALSE", "f", "F", "0"]; - -export const transformableToBooleanError = `Value must be one of ${transformableToBooleanTruthy.join(", ")} or ${transformableToBooleanFalsy.join(", ")}`; - -export const stringToBoolean = (value: string): boolean => { - if (transformableToBooleanTruthy.includes(value)) { - return true; - } - - if (transformableToBooleanFalsy.includes(value)) { - return false; - } - - throw new Error("Value is not transformable to boolean"); -}; - -export const isTransformableToBoolean = (value: string) => { - if ( - !transformableToBooleanTruthy.includes(value) && - !transformableToBooleanFalsy.includes(value) - ) { - return false; - } - - return true; -}; - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export const sanitizeRecord = >( - record: T, -): T => { - try { - return Object.fromEntries( - Object.entries(record).filter( - ([_, value]) => value !== null && value !== undefined, - ), - ) as T; - } catch { - return record; - } -}; - -export const checkRecordForEmptyArrays = >( - record: T, -): T => { - try { - return Object.fromEntries( - Object.entries(record).filter( - ([_, value]) => Array.isArray(value) && !!value.length, - ), - ) as T; - } catch { - return record; - } -}; - -export const generateRandomNumbers = (length: number): string => { - let id = ""; - - if (config.STATIC_OTP) { - id = "1234"; - } else { - id = customNanoId(length); - } - - return id; -}; - -export const checkFiletype = (file: Express.Multer.File): boolean => { - const filetypes = /jpeg|jpg|png/; - - const checkExtname = filetypes.test( - path.extname(file.originalname).toLowerCase(), - ); - const checkMimetype = filetypes.test(file.mimetype); - - return checkExtname && checkMimetype; -}; diff --git a/src/utils/getPaginator.ts b/src/utils/getPaginator.ts deleted file mode 100644 index 4409507..0000000 --- a/src/utils/getPaginator.ts +++ /dev/null @@ -1,40 +0,0 @@ -export type GetPaginatorReturnType = { - skip: number; - limit: number; - currentPage: number; - pages: number; - hasNextPage: boolean; - totalRecords: number; - pageSize: number; -}; - -export const getPaginator = ( - limitParam: number, - pageParam: number, - totalRecords: number, -): GetPaginatorReturnType => { - let skip = pageParam; - const limit = limitParam; - - if (pageParam <= 1) { - skip = 0; - } else { - skip = limit * (pageParam - 1); - } - - const currentPage = Math.max(1, pageParam as number); - - const pages = Math.ceil(totalRecords / Number(limit)); - - const hasNextPage = pages > currentPage; - - return { - skip, - limit, - currentPage, - pages, - hasNextPage, - totalRecords, - pageSize: limit, - }; -}; diff --git a/src/utils/globalErrorHandler.ts b/src/utils/globalErrorHandler.ts deleted file mode 100644 index 3f3a51e..0000000 --- a/src/utils/globalErrorHandler.ts +++ /dev/null @@ -1,34 +0,0 @@ -import type { NextFunction, Request, Response } from "express"; -import config from "../config/config.service"; -import logger from "../lib/logger.service"; -import type { RequestExtended, ResponseExtended } from "../types"; -import { errorResponse } from "./api.utils"; - -interface CustomError extends Error { - status?: number; - message: string; -} - -export const globalErrorHandler = ( - err: CustomError, - _: RequestExtended | Request, - res: ResponseExtended | Response, - __: NextFunction, -): void => { - const statusCode = err.status || 500; - const errorMessage = err.message || "Internal Server Error"; - - logger.error(`${statusCode}: ${errorMessage}`); - - errorResponse( - res as ResponseExtended, - errorMessage, - statusCode, - err, - config.NODE_ENV === "development" ? err.stack : undefined, - ); - - return; -}; - -export default globalErrorHandler; diff --git a/src/utils/google-oauth.utils.ts b/src/utils/google-oauth.utils.ts new file mode 100644 index 0000000..31f3790 --- /dev/null +++ b/src/utils/google-oauth.utils.ts @@ -0,0 +1,104 @@ +import config from '../config/env'; + +export interface GoogleTokenResponse { + access_token: string; + expires_in: number; + id_token: string; + refresh_token?: string; + scope: string; + token_type: string; +} + +export interface GoogleTokensRequestParams { + code: string; +} + +export interface GoogleUserInfo { + id: string; + email: string; + verified_email: boolean; + name: string; + given_name: string; + family_name: string; + picture: string; + locale: string; +} + +export interface GoogleAuthUrlParams { + clientId: string; + redirectUri: string; + scope?: string; + responseType?: string; +} + +/** + * Exchange Google OAuth authorization code for access tokens + * @param params - Request parameters containing the authorization code + * @returns Google token response with access and refresh tokens + * @throws Error if Google credentials are not configured or token exchange fails + */ +export const fetchGoogleTokens = async ( + params: GoogleTokensRequestParams, +): Promise => { + if ( + !config.GOOGLE_CLIENT_ID || + !config.GOOGLE_CLIENT_SECRET || + !config.GOOGLE_REDIRECT_URI + ) { + throw new Error('Google credentials are not set'); + } + + const url = 'https://oauth2.googleapis.com/token'; + const response = await fetch(url, { + method: 'POST', + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + body: new URLSearchParams({ + code: params.code, + client_id: config.GOOGLE_CLIENT_ID, + client_secret: config.GOOGLE_CLIENT_SECRET, + redirect_uri: config.GOOGLE_REDIRECT_URI, + grant_type: 'authorization_code', + }), + }); + + if (!response.ok) { + throw new Error('Failed to exchange code for tokens'); + } + + const data: GoogleTokenResponse = await response.json(); + return data; +}; + +/** + * Fetch Google user information using an access token + * @param accessToken - Google OAuth access token + * @returns Google user information + * @throws Error if fetching user info fails + */ +export const getUserInfo = async ( + accessToken: string, +): Promise => { + const userInfoResponse = await fetch( + 'https://www.googleapis.com/oauth2/v2/userinfo', + { + headers: { Authorization: `Bearer ${accessToken}` }, + }, + ); + + if (!userInfoResponse.ok) { + throw new Error('Error fetching user info'); + } + + return userInfoResponse.json(); +}; + +export const generateGoogleAuthUrl = (params: GoogleAuthUrlParams) => { + const googleURL = new URL('https://accounts.google.com/o/oauth2/v2/auth'); + googleURL.searchParams.set('response_type', params.responseType ?? 'code'); + googleURL.searchParams.set('client_id', params.clientId); + googleURL.searchParams.set('redirect_uri', params.redirectUri); + googleURL.searchParams.set('scope', params.scope ?? 'email profile'); + + return googleURL.toString(); +} + diff --git a/src/utils/isUsername.ts b/src/utils/isUsername.ts deleted file mode 100644 index 968b884..0000000 --- a/src/utils/isUsername.ts +++ /dev/null @@ -1,5 +0,0 @@ -const usernameRegex = /^[a-zA-Z0-9_]{3,16}$/; - -// Usage -export const isValidUsername = (username: string) => - usernameRegex.test(username); diff --git a/src/utils/jwt.utils.ts b/src/utils/jwt.utils.ts new file mode 100644 index 0000000..f0e77d9 --- /dev/null +++ b/src/utils/jwt.utils.ts @@ -0,0 +1,83 @@ +import { sign, verify } from 'jsonwebtoken'; +import config from '../config/env'; +import type { RoleType } from '../enums'; +import logger from '@/plugins/observability/logger'; + +export type JwtPayload = { + sub: string; + email?: string | null; + phoneNo?: string | null; + username: string; + role: RoleType; + sid?: string; +}; + +export type PasswordResetTokenPayload = { + email: string; + userId: string; +}; + +export type SetPasswordTokenPayload = { + email: string; + userId: string; +}; + +/** + * Sign a JWT token with the given payload + * @param payload - JWT payload + * @returns Signed JWT token + */ +export const signToken = async (payload: JwtPayload): Promise => { + return sign(payload, String(config.JWT_SECRET), { + expiresIn: Number(config.JWT_EXPIRES_IN), + }); +}; + +/** + * Sign a password reset token + * @param payload - Password reset token payload + * @returns Signed password reset token + */ +export const signPasswordResetToken = async ( + payload: PasswordResetTokenPayload, +): Promise => { + return sign(payload, String(config.JWT_SECRET), { + expiresIn: config.PASSWORD_RESET_TOKEN_EXPIRES_IN, + }); +}; + +/** + * Sign a set password token + * @param payload - Set password token payload + * @returns Signed set password token + */ +export const signSetPasswordToken = async ( + payload: SetPasswordTokenPayload, +): Promise => { + return sign(payload, String(config.JWT_SECRET), { + expiresIn: config.SET_PASSWORD_TOKEN_EXPIRES_IN, + }); +}; + +/** + * Verify a JWT token and return the decoded payload + * @param token - JWT token to verify + * @returns Decoded token payload + * @throws Error if token is invalid or expired + */ +export const verifyToken = async < + T extends JwtPayload | PasswordResetTokenPayload | SetPasswordTokenPayload, +>( + token: string, +): Promise => { + try { + return verify(token, String(config.JWT_SECRET)) as T; + } catch (err) { + if (err instanceof Error) { + logger.error('verifyToken failed', { error: err.message }); + throw err; + } + logger.error('verifyToken failed with unknown error', { err }); + throw new Error('Token verification failed'); + } +}; diff --git a/src/utils/otp.utils.ts b/src/utils/otp.utils.ts new file mode 100644 index 0000000..1c0eeca --- /dev/null +++ b/src/utils/otp.utils.ts @@ -0,0 +1,56 @@ +import { customAlphabet } from 'nanoid'; +import config from '../config/env'; + +export const numeric = '0123456789'; +export const hexChars = '0123456789abcdef'; +export const alphanumericChars = + '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'; + +export type OtpCharset = 'numeric' | 'hex' | 'alphanumeric'; + +export interface GenerateOtpOptions { + length?: number; + charset?: OtpCharset; +} + +/** + * Generate a one-time password (OTP) with configurable length and character set + * @param options - OTP generation options + * @param options.length - Length of the OTP (default: 6) + * @param options.charset - Character set to use: 'numeric', 'hex', or 'alphanumeric' (default: 'numeric') + * @returns Generated OTP string + */ +export const generateOtp = (options: GenerateOtpOptions = {}): string => { + const { length = 6, charset = 'numeric' } = options; + + // Return static OTP for testing if configured + if (config.STATIC_OTP) { + return '1234'.padEnd(length, '4').slice(0, length); + } + + switch (charset) { + case 'numeric': { + const generator = customAlphabet(numeric, length); + return generator(); + } + case 'hex': { + const generator = customAlphabet(hexChars, length); + return generator(); + } + case 'alphanumeric': { + const generator = customAlphabet(alphanumericChars, length); + return generator(); + } + default: + throw new Error(`Unsupported charset: ${charset}`); + } +}; + +/** + * Generate a random password with specified length + * @param length - Length of the password (default: 16) + * @returns Random password string + */ +export const generateRandomPassword = (length = 16): string => { + return customAlphabet(alphanumericChars, length)(); +}; diff --git a/src/utils/pagination.utils.ts b/src/utils/pagination.utils.ts new file mode 100644 index 0000000..eedacb3 --- /dev/null +++ b/src/utils/pagination.utils.ts @@ -0,0 +1,47 @@ +export type GetPaginatorReturnType = { + skip: number; + limit: number; + currentPage: number; + pages: number; + hasNextPage: boolean; + totalRecords: number; + pageSize: number; +}; + +/** + * Calculate pagination metadata + * @param limitParam - Number of items per page + * @param pageParam - Current page number (1-indexed) + * @param totalRecords - Total number of records + * @returns Pagination metadata including skip, limit, pages, etc. + */ +export const getPaginator = ( + limitParam: number, + pageParam: number, + totalRecords: number, +): GetPaginatorReturnType => { + // Ensure positive limit with fallback + const limit = Math.max(1, limitParam || 10); + + // Ensure page is at least 1 + const currentPage = Math.max(1, pageParam || 1); + + // Calculate skip based on current page + const skip = (currentPage - 1) * limit; + + // Calculate total pages + const pages = Math.ceil(totalRecords / limit); + + // Check if there's a next page + const hasNextPage = currentPage < pages; + + return { + skip, + limit, + currentPage, + pages, + hasNextPage, + totalRecords, + pageSize: limit, + }; +}; diff --git a/src/utils/password.utils.ts b/src/utils/password.utils.ts new file mode 100644 index 0000000..9117521 --- /dev/null +++ b/src/utils/password.utils.ts @@ -0,0 +1,23 @@ +import argon2 from 'argon2'; + +/** + * Hash a password using Argon2 + * @param password - Plain text password + * @returns Hashed password + */ +export const hashPassword = async (password: string): Promise => { + return argon2.hash(password); +}; + +/** + * Compare a plain text password with a hashed password + * @param hashed - Hashed password + * @param plainPassword - Plain text password to compare + * @returns True if passwords match, false otherwise + */ +export const compareHash = async ( + hashed: string, + plainPassword: string, +): Promise => { + return argon2.verify(hashed, plainPassword); +}; diff --git a/src/utils/record.utils.ts b/src/utils/record.utils.ts new file mode 100644 index 0000000..206b298 --- /dev/null +++ b/src/utils/record.utils.ts @@ -0,0 +1,41 @@ +/** + * Remove null and undefined values from a record + * @param record - Record to sanitize + * @returns New record with null/undefined values removed + */ +export const sanitizeRecord = >( + record: T, +): T => { + try { + return Object.fromEntries( + Object.entries(record).filter( + ([_, value]) => value !== null && value !== undefined, + ), + ) as T; + } catch { + return record; + } +}; + +/** + * Remove entries with empty arrays from a record + * Keeps all non-array fields and non-empty arrays + * @param record - Record to filter + * @returns New record with empty array entries removed + */ +export const removeEmptyArrays = >( + record: T, +): T => { + try { + return Object.fromEntries( + Object.entries(record).filter(([_, value]) => { + // Keep non-array values + if (!Array.isArray(value)) return true; + // Keep non-empty arrays + return value.length > 0; + }), + ) as T; + } catch { + return record; + } +}; diff --git a/src/utils/response.utils.ts b/src/utils/response.utils.ts new file mode 100644 index 0000000..e589f07 --- /dev/null +++ b/src/utils/response.utils.ts @@ -0,0 +1,51 @@ +import type { Response } from 'express'; +import { StatusCodes, StatusCodesValues } from '@/plugins/magic/status-codes'; +import config from '@/config/env'; +import type { ResponseExtended } from '@/types'; + +/** + * Send an error response + * @param res - Express response object + * @param message - Error message + * @param statusCode - HTTP status code (default: 400) + * @param payload - Optional error payload + * @param stack - Optional stack trace (only included in development) + */ +export const errorResponse = ( + res: ResponseExtended | Response, + message?: string, + statusCode?: StatusCodesValues, + payload?: unknown, + stack?: string, +): void => { + const isDevelopment = config.NODE_ENV === 'development'; + + res.status(statusCode ?? StatusCodes.BAD_REQUEST).json({ + success: false, + message: message, + data: isDevelopment ? payload : undefined, + stack: isDevelopment ? stack : undefined, + }); + + return; +}; + +/** + * Send a success response + * @param res - Express response object + * @param message - Success message + * @param payload - Response data payload + * @param statusCode - HTTP status code (default: 200) + */ +export const successResponse = ( + res: ResponseExtended | Response, + message?: string, + payload?: Record, + statusCode: StatusCodesValues = StatusCodes.OK, +): void => { + res + .status(statusCode) + .json({ success: true, message: message, data: payload }); + + return; +}; diff --git a/src/utils/responseInterceptor.ts b/src/utils/responseInterceptor.ts deleted file mode 100644 index 5bde030..0000000 --- a/src/utils/responseInterceptor.ts +++ /dev/null @@ -1,61 +0,0 @@ -import type { NextFunction } from "express"; -import { ZodError } from "zod"; -import type { RequestExtended, ResponseExtended } from "../types"; - -const responseInterceptor = ( - _: RequestExtended, - res: ResponseExtended, - next: NextFunction, -) => { - const originalJson = res.json; - const originalSend = res.send; - const validateSchema = res.locals.validateSchema ?? null; - - res.jsonValidate = function (body) { - if (validateSchema) { - try { - validateSchema.parse(body); - } catch (err) { - if (err instanceof ZodError) { - return originalJson.call(this, { - success: false, - message: "Response Validation Error - Server Error", - data: err.errors, - stack: err.stack, - }); - } - } - } - - return originalJson.call( - this, - validateSchema ? validateSchema.parse(body) : body, - ); - }; - - res.sendValidate = function (body) { - if (validateSchema) { - try { - validateSchema.parse(body); - } catch (err) { - if (err instanceof ZodError) { - return originalSend.call(this, { - success: false, - message: "Response Validation Error - Server Error", - data: err.errors, - stack: err.stack, - }); - } - } - } - - return originalSend.call( - this, - validateSchema ? validateSchema.parse(body) : body, - ); - }; - - next(); -}; - -export default responseInterceptor; diff --git a/tsconfig.json b/tsconfig.json index 7afec89..516f30f 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -31,7 +31,9 @@ "rootDir": "." /* Specify the root folder within your source files. */, // "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ "baseUrl": "." /* Specify the base directory to resolve non-relative module names. */, - "paths": {} /* Specify a set of entries that re-map imports to additional lookup locations. */, + "paths": { + "@/*": ["./src/*"], + } /* Specify a set of entries that re-map imports to additional lookup locations. */, // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ // "types": [], /* Specify type package names to be included without being referenced in a source file. */ @@ -101,5 +103,5 @@ /* Completeness */ // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ "skipLibCheck": true /* Skip type checking all .d.ts files. */ - } + }, }