diff --git a/plan.md b/plan.md new file mode 100644 index 00000000..9382dc72 --- /dev/null +++ b/plan.md @@ -0,0 +1,461 @@ +# Dumbo Database Schema Builder Implementation Plan (Simplified) + +## Overview + +Create a simple builder API for defining database schemas in dumbo, following the PongoSchema pattern but using const functions and reusing existing schema components. + +## Key Principles + +- **No overengineering** - Keep it simple +- **Use const instead of function** - Follow the existing pattern +- **Object parameters** - No function overloading, use objects with optional fields +- **No useless comments**. + +## Implementation Steps + +### Step 1: Create Constants File + +**File**: `/src/packages/dumbo/src/core/schema/dumboSchema/constants.ts` + +```typescript +// Special key for default schema +export const DEFAULT_SCHEMA = Symbol.for("dumbo.defaultSchema"); + +// Database-specific defaults (for reference/future use) +export const DATABASE_DEFAULTS = { + PostgreSQL: { defaultDatabase: "postgres", defaultSchema: "public" }, + MySQL: { defaultDatabase: null, defaultSchema: null }, + SQLite: { defaultDatabase: null, defaultSchema: "main" }, + SqlServer: { defaultDatabase: "master", defaultSchema: "dbo" }, +} as const; +``` + +**Validation**: + +- [ ] Compiles +- [ ] No linter errors + +### Step 2: Create Builder Functions + +**File**: `/src/packages/dumbo/src/core/schema/dumboSchema/index.ts` + +```typescript +import { + columnSchemaComponent, + type ColumnSchemaComponent, + indexSchemaComponent, + type IndexSchemaComponent, + tableSchemaComponent, + type TableSchemaComponent, + databaseSchemaSchemaComponent, + type DatabaseSchemaSchemaComponent, + databaseSchemaComponent, + type DatabaseSchemaComponent, +} from "../components"; +import type { SchemaComponentOptions } from "../schemaComponent"; +import { DEFAULT_SCHEMA, DATABASE_DEFAULTS } from "./constants"; + +// Column builder - simple const +const dumboColumn = ( + name: string, + options?: SchemaComponentOptions +): ColumnSchemaComponent => + columnSchemaComponent({ + columnName: name, + ...options, + }); + +// Index builder - simple const +const dumboIndex = ( + name: string, + columnNames: string[], + options?: { unique?: boolean } & SchemaComponentOptions +): IndexSchemaComponent => + indexSchemaComponent({ + indexName: name, + columnNames, + isUnique: options?.unique ?? false, + ...options, + }); + +// Table builder - takes columns and indexes as objects +const dumboTable = ( + name: string, + definition: { + columns?: Record; + indexes?: Record; + } & SchemaComponentOptions +): TableSchemaComponent => { + const { columns, indexes, ...options } = definition; + + const components = [ + ...(columns ? Object.values(columns) : []), + ...(indexes ? Object.values(indexes) : []), + ]; + + return tableSchemaComponent({ + tableName: name, + components, + ...options, + }); +}; + +// Schema builder - name is optional for default schema +const dumboDatabaseSchema = ( + nameOrTables: string | Record, + tables?: Record, + options?: SchemaComponentOptions +): DatabaseSchemaSchemaComponent => { + if (typeof nameOrTables === "string") { + // Named schema: dumboDatabaseSchema('public', {...}) + const tableComponents = Object.values(tables || {}); + return databaseSchemaSchemaComponent({ + schemaName: nameOrTables, + components: tableComponents, + ...options, + }); + } else { + // Default schema: dumboDatabaseSchema({...}) + const tableComponents = Object.values(nameOrTables || {}); + return databaseSchemaSchemaComponent({ + schemaName: "", // Will be replaced with default + components: tableComponents, + }); + } +}; + +// Helper for creating from array of names +dumboDatabaseSchema.from = ( + schemaName: string | undefined, + tableNames: string[] +): DatabaseSchemaSchemaComponent => { + const tables = tableNames.reduce((acc, tableName) => { + acc[tableName] = dumboTable(tableName, {}); + return acc; + }, {} as Record); + + return schemaName + ? dumboDatabaseSchema(schemaName, tables) + : dumboDatabaseSchema(tables); +}; + +// Database builder - name is optional for default database +const dumboDatabase = ( + nameOrSchemas: + | string + | Record, + schemas?: Record, + options?: { defaultSchemaName?: string } & SchemaComponentOptions +): DatabaseSchemaComponent => { + let databaseName: string; + let schemaMap: Record; + let dbOptions: typeof options; + + if (typeof nameOrSchemas === "string") { + // Named database: dumboDatabase('myapp', {...}) + databaseName = nameOrSchemas; + schemaMap = schemas || {}; + dbOptions = options; + } else { + // Default database: dumboDatabase({...}) + databaseName = "database"; // Default name + schemaMap = nameOrSchemas; + dbOptions = schemas as typeof options; + } + + // Process schemas, handling DEFAULT_SCHEMA + const schemaComponents: DatabaseSchemaSchemaComponent[] = []; + + for (const [key, schemaComponent] of Object.entries(schemaMap)) { + if (key === DEFAULT_SCHEMA.toString() || key === String(DEFAULT_SCHEMA)) { + // This is the default schema - replace its name + const defaultSchemaName = dbOptions?.defaultSchemaName || "public"; + schemaComponents.push( + databaseSchemaSchemaComponent({ + schemaName: defaultSchemaName, + components: Array.from(schemaComponent.components.values()), + migrations: schemaComponent.migrations, + }) + ); + } else { + schemaComponents.push(schemaComponent); + } + } + + return databaseSchemaComponent({ + databaseName, + components: schemaComponents, + ...dbOptions, + }); +}; + +// Helper for creating from array of names +dumboDatabase.from = ( + databaseName: string | undefined, + schemaNames: string[] +): DatabaseSchemaComponent => { + const schemas = schemaNames.reduce((acc, schemaName) => { + acc[schemaName] = dumboDatabaseSchema(schemaName, {}); + return acc; + }, {} as Record); + + return databaseName + ? dumboDatabase(databaseName, schemas) + : dumboDatabase(schemas); +}; + +// Main export - similar to pongoSchema +export const dumboSchema = { + database: dumboDatabase, + schema: dumboDatabaseSchema, + table: dumboTable, + column: dumboColumn, + index: dumboIndex, + DEFAULT_SCHEMA, + DATABASE_DEFAULTS, +}; +``` + +**Reference files**: + +- `/src/packages/pongo/src/core/schema/index.ts` (pattern to follow) +- `/src/packages/dumbo/src/core/schema/components/` (components to reuse) + +**Validation**: + +- [ ] Compiles +- [ ] No linter errors +- [ ] Exports work correctly + +### Step 3: Update Main Schema Export + +**File**: `/src/packages/dumbo/src/core/schema/index.ts` + +Add to existing exports: + +```typescript +export * from "./dumboSchema"; +``` + +**Validation**: + +- [ ] Export is accessible +- [ ] No circular dependencies + +### Step 4: Create Unit Tests + +**File**: `/src/packages/dumbo/src/core/schema/dumboSchema/dumboSchema.unit.spec.ts` + +```typescript +import { describe, it } from "node:test"; +import assert from "node:assert"; +import { dumboSchema } from "./index"; + +describe("dumboSchema", () => { + it("should create a column", () => { + const col = dumboSchema.column("id"); + assert.strictEqual(col.columnName, "id"); + }); + + it("should create an index", () => { + const idx = dumboSchema.index("idx_email", ["email"]); + assert.strictEqual(idx.indexName, "idx_email"); + assert.strictEqual(idx.isUnique, false); + }); + + it("should create a unique index", () => { + const idx = dumboSchema.index("idx_email", ["email"], { unique: true }); + assert.strictEqual(idx.indexName, "idx_email"); + assert.strictEqual(idx.isUnique, true); + }); + + it("should create a table with columns and indexes", () => { + const tbl = dumboSchema.table("users", { + columns: { + id: dumboSchema.column("id"), + email: dumboSchema.column("email"), + }, + indexes: { + idx_email: dumboSchema.index("idx_email", ["email"]), + }, + }); + + assert.strictEqual(tbl.tableName, "users"); + assert.strictEqual(tbl.columns.size, 2); + assert.strictEqual(tbl.indexes.size, 1); + assert.ok(tbl.columns.has("id")); + assert.ok(tbl.columns.has("email")); + assert.ok(tbl.indexes.has("idx_email")); + }); + + it("should create a named schema", () => { + const sch = dumboSchema.schema("public", { + users: dumboSchema.table("users", { + columns: { + id: dumboSchema.column("id"), + }, + }), + }); + + assert.strictEqual(sch.schemaName, "public"); + assert.strictEqual(sch.tables.size, 1); + assert.ok(sch.tables.has("users")); + }); + + it("should create a default schema without name", () => { + const sch = dumboSchema.schema({ + users: dumboSchema.table("users", { + columns: { + id: dumboSchema.column("id"), + }, + }), + }); + + assert.strictEqual(sch.schemaName, ""); + assert.strictEqual(sch.tables.size, 1); + }); + + it("should create a named database", () => { + const db = dumboSchema.database("myapp", { + public: dumboSchema.schema("public", { + users: dumboSchema.table("users", { + columns: { + id: dumboSchema.column("id"), + }, + }), + }), + }); + + assert.strictEqual(db.databaseName, "myapp"); + assert.strictEqual(db.schemas.size, 1); + assert.ok(db.schemas.has("public")); + }); + + it("should handle DEFAULT_SCHEMA", () => { + const db = dumboSchema.database( + "myapp", + { + [dumboSchema.DEFAULT_SCHEMA]: dumboSchema.schema({ + users: dumboSchema.table("users", { + columns: { + id: dumboSchema.column("id"), + }, + }), + }), + }, + { + defaultSchemaName: "main", + } + ); + + assert.strictEqual(db.databaseName, "myapp"); + assert.strictEqual(db.schemas.size, 1); + assert.ok(db.schemas.has("main")); + }); + + it("should create schema from table names", () => { + const sch = dumboSchema.schema.from("public", ["users", "posts"]); + assert.strictEqual(sch.schemaName, "public"); + assert.strictEqual(sch.tables.size, 2); + }); + + it("should create database from schema names", () => { + const db = dumboSchema.database.from("myapp", ["public", "analytics"]); + assert.strictEqual(db.databaseName, "myapp"); + assert.strictEqual(db.schemas.size, 2); + }); +}); +``` + +**Validation**: + +- [ ] All tests pass +- [ ] Tests cover main functionality + +### Step 5: Usage Example + +**File**: `/src/packages/dumbo/src/core/schema/dumboSchema/README.md` + +```typescript +import { dumboSchema } from "@event-driven-io/dumbo"; + +// Simple database with tables in default schema +const simpleDb = dumboSchema.database( + "myapp", + { + [dumboSchema.DEFAULT_SCHEMA]: dumboSchema.schema({ + users: dumboSchema.table("users", { + columns: { + id: dumboSchema.column("id"), + email: dumboSchema.column("email"), + name: dumboSchema.column("name"), + }, + indexes: { + idx_email: dumboSchema.index("idx_email", ["email"], { + unique: true, + }), + }, + }), + }), + }, + { + defaultSchemaName: "public", // PostgreSQL default + } +); + +// Database with multiple schemas +const multiSchemaDb = dumboSchema.database("myapp", { + public: dumboSchema.schema("public", { + users: dumboSchema.table("users", { + columns: { + id: dumboSchema.column("id"), + email: dumboSchema.column("email"), + }, + }), + }), + analytics: dumboSchema.schema("analytics", { + events: dumboSchema.table("events", { + columns: { + id: dumboSchema.column("id"), + user_id: dumboSchema.column("user_id"), + timestamp: dumboSchema.column("timestamp"), + }, + }), + }), +}); + +// Access using name-based maps +const publicSchema = multiSchemaDb.schemas.get("public"); +const usersTable = publicSchema?.tables.get("users"); +const emailColumn = usersTable?.columns.get("email"); +``` + +## Execution Plan + +### Setup Subagents + +1. **Build Agent**: `npm run build:ts:watch` from `/home/oskar/Repos/Pongo/src` +2. **Linter Agent**: `npm run fix` on demand from `/home/oskar/Repos/Pongo/src` +3. **Test Agent**: `npm run test:unit` from `/home/oskar/Repos/Pongo/src` + +### Implementation Process + +1. Start all subagents +2. Create constants.ts → Check compilation → Run linter → **Ask for confirmation** ✅ +3. Create index.ts → Check compilation → Run linter → **Ask for confirmation** ✅ +4. Update exports → Check compilation → Run linter → **Ask for confirmation** ✅ +5. Create tests → Ensure tests pass → **Ask for confirmation** ✅ +6. Don't commit after step. User will do it when confirming. + +## Success Criteria + +1. ✅ Code compiles +2. ✅ Linter passes +3. ✅ Unit tests pass +4. ✅ Fix issues, don't just report them + +## Questions to Ask If Unsure + +- Should column/index builders accept additional metadata beyond what's in the current components? Not for now. +- Should we validate that DEFAULT_SCHEMA isn't used as an actual schema name? Yes. +- Should the default database name be configurable or just use 'database'? Same as schema diff --git a/src/packages/dumbo/src/core/schema/MIGRATION_UNIFICATION_PLAN.md b/src/packages/dumbo/src/core/schema/MIGRATION_UNIFICATION_PLAN.md new file mode 100644 index 00000000..9299d4c5 --- /dev/null +++ b/src/packages/dumbo/src/core/schema/MIGRATION_UNIFICATION_PLAN.md @@ -0,0 +1,1731 @@ +# Schema Component Migration Unification Plan + +## Overview + +This plan details the unification of SQL migrations and TypeScript schema definitions in the Dumbo schema component system. The goal is to support both SQL-first and TypeScript-first workflows while maintaining a clear source of truth and enabling strongly-typed query builders for Pongo. + +## Context and Current State + +### Key Files + +- **Core Schema Component**: [src/packages/dumbo/src/core/schema/schemaComponent.ts](src/packages/dumbo/src/core/schema/schemaComponent.ts) - Lines 3-146 +- **Component Implementations**: + - [src/packages/dumbo/src/core/schema/components/databaseSchemaComponent.ts](src/packages/dumbo/src/core/schema/components/databaseSchemaComponent.ts) + - [src/packages/dumbo/src/core/schema/components/databaseSchemaSchemaComponent.ts](src/packages/dumbo/src/core/schema/components/databaseSchemaSchemaComponent.ts) + - [src/packages/dumbo/src/core/schema/components/tableSchemaComponent.ts](src/packages/dumbo/src/core/schema/components/tableSchemaComponent.ts) + - [src/packages/dumbo/src/core/schema/components/columnSchemaComponent.ts](src/packages/dumbo/src/core/schema/components/columnSchemaComponent.ts) + - [src/packages/dumbo/src/core/schema/components/indexSchemaComponent.ts](src/packages/dumbo/src/core/schema/components/indexSchemaComponent.ts) +- **Migration System**: + - [src/packages/dumbo/src/core/schema/sqlMigration.ts](src/packages/dumbo/src/core/schema/sqlMigration.ts) + - [src/packages/dumbo/src/core/schema/migrators/schemaComponentMigrator.ts](src/packages/dumbo/src/core/schema/migrators/schemaComponentMigrator.ts) + - [src/packages/dumbo/src/core/schema/migrators/migrator.ts](src/packages/dumbo/src/core/schema/migrators/migrator.ts) +- **Builder API**: [src/packages/dumbo/src/core/schema/dumboSchema/dumboSchema.ts](src/packages/dumbo/src/core/schema/dumboSchema/dumboSchema.ts) +- **Tests**: [src/packages/dumbo/src/core/schema/dumboSchema/dumboSchema.unit.spec.ts](src/packages/dumbo/src/core/schema/dumboSchema/dumboSchema.unit.spec.ts) +- **Pongo Integration Target**: [src/packages/pongo/src/core/schema/index.ts](src/packages/pongo/src/core/schema/index.ts) + +### Current Problem + +The system cannot distinguish between: + +1. **Initial schema definitions** (CREATE TABLE) +2. **Schema evolution** (ALTER TABLE) +3. **Data migrations** (UPDATE/INSERT) + +All migrations are stored in a single `migrations` array and bubble up through the component hierarchy (lines 79-83 in schemaComponent.ts), making it impossible to know the intent or current state. + +## Agreed Solution + +### Core Principle: TypeScript as Source of Truth (When Present) + +**The Rule**: When TypeScript definitions exist, they represent the desired schema state. Migrations are either: + +1. **Generated** from TypeScript definitions (if no migrations provided) +2. **Validated** against TypeScript definitions (if migrations provided) +3. **Trusted** completely (if no TypeScript definitions provided - SQL-first mode) + +### Three Usage Modes + +#### 1. TypeScript-First (Generates Migrations) + +```typescript +const table = tableSchemaComponent({ + tableName: 'users', + columns: { + id: column('id', { type: 'serial', primaryKey: true }), + email: column('email', { type: 'varchar', length: 255 }), + }, + // No migrations = will generate CREATE TABLE IF NOT EXISTS +}); +``` + +#### 2. SQL-First (Migrations as Source of Truth) + +```typescript +const table = tableSchemaComponent({ + tableName: 'users', + migrations: [sqlMigration('001_create', [SQL`CREATE TABLE users ...`])], + // No columns = trust the SQL completely +}); +``` + +#### 3. Hybrid with Validation + +```typescript +const table = tableSchemaComponent({ + tableName: 'users', + columns: { + id: column('id', { type: 'serial' }), + email: column('email', { type: 'varchar' }), + }, + migrations: [sqlMigration('001_create', [SQL`CREATE TABLE users ...`])], + // Both provided = TypeScript is truth, validate migrations match +}); +``` + +## Implementation Plan + +### Phase 1: Enhanced Column Definition Types + +**File to modify**: [src/packages/dumbo/src/core/schema/components/columnSchemaComponent.ts](src/packages/dumbo/src/core/schema/components/columnSchemaComponent.ts) + +```typescript +export type ColumnDefinition = { + type: + | 'serial' + | 'varchar' + | 'int' + | 'bigint' + | 'text' + | 'timestamp' + | 'boolean' + | 'uuid' + | 'json' + | 'jsonb'; + nullable?: boolean; + primaryKey?: boolean; + unique?: boolean; + length?: number; + precision?: number; + scale?: number; + defaultValue?: unknown; + references?: { + table: string; + column: string; + onDelete?: 'CASCADE' | 'SET NULL' | 'RESTRICT'; + onUpdate?: 'CASCADE' | 'SET NULL' | 'RESTRICT'; + }; +}; + +export type ColumnSchemaComponent = SchemaComponent< + ColumnURN, + Readonly<{ + columnName: string; + definition?: ColumnDefinition; // NEW: Optional definition for TypeScript-first + generateMigration?: () => SQL; // NEW: Self-contained migration generation + }> +>; + +// Column knows how to generate its own SQL +export const generateColumnSQL = (column: ColumnSchemaComponent): string => { + if (!column.definition) return `${column.columnName} TEXT`; // fallback + + const { type, nullable, primaryKey, unique, length, defaultValue } = + column.definition; + let sql = `${column.columnName} ${type.toUpperCase()}`; + + if (length) sql += `(${length})`; + if (primaryKey) sql += ' PRIMARY KEY'; + if (unique) sql += ' UNIQUE'; + if (!nullable) sql += ' NOT NULL'; + if (defaultValue !== undefined) sql += ` DEFAULT ${defaultValue}`; + + return sql; +}; +``` + +### Phase 2: Nested Schema Snapshot System + +**Complete directory structure**: + +``` +.dumbo/ + snapshots/ # Current state + migration tracking + myapp.snapshot.ts # Root database snapshot + schemas/ + public.snapshot.ts # Schema-level snapshot + analytics.snapshot.ts + public/ + users.snapshot.ts # Table-level snapshots + posts.snapshot.ts + comments.snapshot.ts + analytics/ + events.snapshot.ts + metrics.snapshot.ts + migrations/ # All SQL migrations (generated or provided) + 2024-01-15-001-create_schema_public.sql + 2024-01-15-002-create_table_users.sql + 2024-01-15-003-create_index_idx_users_email.sql + 2024-01-16-001-add_email_to_users.sql +``` + +Note: Applied migrations are tracked in the database `migrations` table (see `migrator.ts` lines 167-189), not in files. + +**Migration SQL file examples**: + +```sql +-- .dumbo/migrations/2024-01-15-002-create_table_users.sql +-- Source: generated (from TypeScript definitions) +-- Component: sc:dumbo:table:users +-- Generated at: 2024-01-15T10:30:00Z +CREATE TABLE IF NOT EXISTS users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) NOT NULL +); + +-- .dumbo/migrations/2024-01-16-001-custom_users_setup.sql +-- Source: provided (from component migrations array) +-- Component: sc:dumbo:table:users +-- Migration name: 001_custom_setup +CREATE TABLE users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255), + internal_field JSONB +); +``` + +**Snapshot file examples with migration tracking**: + +```typescript +// SCENARIO 1: TypeScript-first (no migrations provided) +// Component: tableSchemaComponent({ +// tableName: 'users', +// columns: { id: column('id', { type: 'serial', primaryKey: true }) } +// }) + +// .dumbo/snapshots/schemas/public/users.snapshot.ts +export const usersSnapshot = { + tableName: 'users', + columns: { + id: { type: 'serial' as const, primaryKey: true }, + email: { type: 'varchar' as const, length: 255, nullable: false }, + }, + indexes: { + idx_users_email: { columns: ['email'], unique: false }, + }, + migrations: [], // No migrations provided, will be generated +} as const; + +// SCENARIO 2: SQL-first (only migrations provided) +// Component: tableSchemaComponent({ +// tableName: 'orders', +// migrations: [ +// sqlMigration('001_create', [SQL`CREATE TABLE orders ...`]), +// sqlMigration('002_add_user', [SQL`ALTER TABLE orders ...`]) +// ] +// }) + +// .dumbo/snapshots/schemas/public/orders.snapshot.ts +export const ordersSnapshot = { + tableName: 'orders', + columns: { + // Introspected from database after applying migrations + id: { type: 'serial' as const, primaryKey: false }, + total: { type: 'decimal' as const, precision: 10, scale: 2 }, + user_id: { type: 'int' as const, nullable: true }, + }, + indexes: {}, + migrations: [ + { + name: '001_create', + sqls: ['CREATE TABLE orders (id SERIAL, total DECIMAL(10,2))'], + }, + { + name: '002_add_user', + sqls: ['ALTER TABLE orders ADD COLUMN user_id INT'], + }, + ], +} as const; + +// SCENARIO 3: Hybrid (TypeScript definitions + SQL migrations) +// Component: tableSchemaComponent({ +// tableName: 'posts', +// columns: { id: column('id', ...), title: column('title', ...) }, +// migrations: [sqlMigration('001_custom', [SQL`CREATE TABLE posts ...`])] +// }) + +// .dumbo/snapshots/schemas/public/posts.snapshot.ts +export const postsSnapshot = { + tableName: 'posts', + columns: { + // TypeScript definitions as source of truth + id: { type: 'serial' as const, primaryKey: true }, + title: { type: 'varchar' as const, length: 255, nullable: false }, + }, + indexes: {}, + migrations: [ + { + name: '001_custom', + sqls: [ + 'CREATE TABLE posts (id SERIAL PRIMARY KEY, title VARCHAR(255), internal_field JSONB)', + ], + }, + ], +} as const; + +// .dumbo/snapshots/schemas/public.snapshot.ts +import { usersSnapshot } from './public/users.snapshot'; +import { postsSnapshot } from './public/posts.snapshot'; +import { commentsSnapshot } from './public/comments.snapshot'; + +export const publicSnapshot = { + schemaName: 'public', + tables: { + users: usersSnapshot, + posts: postsSnapshot, + comments: commentsSnapshot, + }, +} as const; + +// .dumbo/snapshots/myapp.snapshot.ts +import { publicSnapshot } from './schemas/public.snapshot'; +import { analyticsSnapshot } from './schemas/analytics.snapshot'; + +export const databaseSnapshot = { + databaseName: 'myapp', + version: '2024-01-15-001', + timestamp: '2024-01-15T10:30:00Z', + schemas: { + public: publicSnapshot, + analytics: analyticsSnapshot, + }, +} as const; +``` + +**Benefits of nested structure**: + +- Minimizes git merge conflicts (each table in separate file) +- Clear ownership and change tracking +- Natural composition mirroring schema hierarchy +- Type-safe imports with TypeScript checking + +**New file to create**: `src/packages/dumbo/src/core/schema/snapshot/schemaSnapshot.ts` + +```typescript +export type ColumnSnapshot = { + type: ColumnDefinition['type']; + nullable?: boolean; + primaryKey?: boolean; + unique?: boolean; + length?: number; + precision?: number; + scale?: number; + defaultValue?: unknown; + references?: { + table: string; + column: string; + onDelete?: 'CASCADE' | 'SET NULL' | 'RESTRICT'; + onUpdate?: 'CASCADE' | 'SET NULL' | 'RESTRICT'; + }; +}; + +export type IndexSnapshot = { + columns: string[]; + unique: boolean; + where?: string; +}; + +export type TableSnapshot = { + tableName: string; + columns: Record; + indexes: Record; + migrations: SQLMigration[]; // Track migrations defined in component +}; + +export type SchemaSnapshot = { + schemaName: string; + tables: Record; + migrations: SQLMigration[]; // Schema-level migrations +}; + +export type DatabaseSnapshot = { + databaseName: string; + version: string; + timestamp: string; + schemas: Record; + migrations: SQLMigration[]; // Database-level migrations +}; +``` + +### Phase 3: Component-Specific Migration Generation + +**IMPORTANT**: Each component is responsible for generating its own migrations when none are provided. + +#### Table Component Migration Generation + +**File to modify**: [src/packages/dumbo/src/core/schema/components/tableSchemaComponent.ts](src/packages/dumbo/src/core/schema/components/tableSchemaComponent.ts) + +```typescript +// Add to tableSchemaComponent.ts +export const generateTableMigrations = ( + table: TableSchemaComponent, + snapshot?: TableSnapshot, +): SQLMigration[] => { + // If migrations already provided, return them (SQL-first mode) + if (table.migrations.length > 0) { + return table.migrations; + } + + // If no columns defined in TypeScript, nothing to generate + if (table.columns.size === 0) { + return []; + } + + const migrations: SQLMigration[] = []; + + if (!snapshot) { + // No snapshot = first time creation + const columnDefinitions = Array.from(table.columns.values()) + .map((col) => generateColumnSQL(col)) + .join(',\n '); + + migrations.push( + sqlMigration(`create_table_${table.tableName}`, [ + SQL`CREATE TABLE IF NOT EXISTS ${table.tableName} ( + ${columnDefinitions} +)`, + ]), + ); + } else { + // Generate ALTER statements based on diff with snapshot + const changes = diffTableWithSnapshot(table, snapshot); + + for (const newColumn of changes.addedColumns) { + migrations.push( + sqlMigration(`add_${newColumn.columnName}_to_${table.tableName}`, [ + SQL`ALTER TABLE ${table.tableName} ADD COLUMN IF NOT EXISTS ${generateColumnSQL(newColumn)}`, + ]), + ); + } + } + + // Indexes are generated as separate migrations + for (const [name, index] of table.indexes) { + if (!snapshot || !snapshot.indexes[name]) { + migrations.push(generateIndexMigration(table.tableName, index)); + } + } + + return migrations; +}; + +// Table component enhanced constructor +export const tableSchemaComponent = ( + options: TableOptions, +): TableSchemaComponent => { + // ... existing implementation ... + + const component = { + // ... existing properties ... + generateMigrations: (snapshot?: TableSnapshot) => + generateTableMigrations(component, snapshot), + }; + + return component; +}; +``` + +#### Index Component Migration Generation + +**File to modify**: [src/packages/dumbo/src/core/schema/components/indexSchemaComponent.ts](src/packages/dumbo/src/core/schema/components/indexSchemaComponent.ts) + +```typescript +export const generateIndexMigration = ( + tableName: string, + index: IndexSchemaComponent, +): SQLMigration => { + const uniqueClause = index.unique ? 'UNIQUE ' : ''; + const columns = index.columns.join(', '); + + return sqlMigration(`create_index_${index.indexName}`, [ + SQL`CREATE ${uniqueClause}INDEX IF NOT EXISTS ${index.indexName} + ON ${tableName}(${columns})`, + ]); +}; +``` + +#### Database Schema Component Migration Generation + +**File to modify**: [src/packages/dumbo/src/core/schema/components/databaseSchemaSchemaComponent.ts](src/packages/dumbo/src/core/schema/components/databaseSchemaSchemaComponent.ts) + +```typescript +export const generateSchemaMigrations = ( + schema: DatabaseSchemaSchemaComponent, + snapshot?: SchemaSnapshot, +): SQLMigration[] => { + if (schema.migrations.length > 0) { + return schema.migrations; + } + + const migrations: SQLMigration[] = []; + + // Create schema if doesn't exist + migrations.push( + sqlMigration(`create_schema_${schema.schemaName}`, [ + SQL`CREATE SCHEMA IF NOT EXISTS ${schema.schemaName}`, + ]), + ); + + // Tables will generate their own migrations + // But we collect them here for ordering + for (const table of schema.tables.values()) { + migrations.push( + ...table.generateMigrations(snapshot?.tables[table.tableName]), + ); + } + + return migrations; +}; +``` + +### Phase 4: Tree Traversal Migration Collection + +**File to modify**: [src/packages/dumbo/src/core/schema/components/databaseSchemaComponent.ts](src/packages/dumbo/src/core/schema/components/databaseSchemaComponent.ts) + +The database component is the root and orchestrates migration generation by traversing the tree in the correct order: + +```typescript +export const collectAllMigrations = ( + database: DatabaseSchemaComponent, + snapshot?: DatabaseSnapshot, +): SQLMigration[] => { + const migrations: SQLMigration[] = []; + + // Order matters! Follow this sequence: + // 1. Database-level migrations + // 2. Schema creation + // 3. Table creation (without foreign keys) + // 4. Column additions/modifications + // 5. Indexes + // 6. Foreign keys (future) + // 7. Other constraints (future) + + // If database has explicit migrations, use them + if (database.migrations.length > 0) { + return database.migrations; // SQL-first mode + } + + // 1. Database-level setup + migrations.push( + sqlMigration(`setup_database_${database.databaseName}`, [ + SQL`-- Database setup for ${database.databaseName}`, + ]), + ); + + // 2. Traverse schemas + for (const schema of database.schemas.values()) { + const schemaSnapshot = snapshot?.schemas[schema.schemaName]; + + // Create schema + migrations.push( + sqlMigration(`create_schema_${schema.schemaName}`, [ + SQL`CREATE SCHEMA IF NOT EXISTS ${schema.schemaName}`, + ]), + ); + + // 3. Collect all tables first (structure only, no foreign keys) + const tableCreations: SQLMigration[] = []; + const indexCreations: SQLMigration[] = []; + + for (const table of schema.tables.values()) { + const tableSnapshot = schemaSnapshot?.tables[table.tableName]; + + if (table.migrations.length > 0) { + // Table has explicit migrations, use them + tableCreations.push(...table.migrations); + } else if (table.columns.size > 0) { + // Generate from TypeScript definitions + const tableMigrations = table.generateMigrations(tableSnapshot); + + // Separate table creation from index creation + tableMigrations.forEach((m) => { + if (m.name.includes('index')) { + indexCreations.push(m); + } else { + tableCreations.push(m); + } + }); + } + } + + // Add in correct order + migrations.push(...tableCreations); // All tables first + migrations.push(...indexCreations); // Then all indexes + } + + // Future: Foreign key constraints would go here + // Future: Other constraints, triggers, etc. + + return migrations; +}; + +// Enhanced database component +export const databaseSchemaComponent = ( + options: DatabaseOptions, +): DatabaseSchemaComponent => { + // ... existing implementation ... + + const component = { + // ... existing properties ... + collectAllMigrations: (snapshot?: DatabaseSnapshot) => + collectAllMigrations(component, snapshot), + }; + + return component; +}; +``` + +### Phase 5: Schema Differ + +**New file to create**: `src/packages/dumbo/src/core/schema/differ/schemaDiffer.ts` + +```typescript +export type TableChanges = { + addedColumns: ColumnSchemaComponent[]; + removedColumns: string[]; + modifiedColumns: Array<{ + name: string; + from: ColumnSnapshot; + to: ColumnDefinition; + }>; +}; + +export const diffTableWithSnapshot = ( + table: TableSchemaComponent, + snapshot: TableSnapshot, +): TableChanges => { + const changes: TableChanges = { + addedColumns: [], + removedColumns: [], + modifiedColumns: [], + }; + + // Find added columns + for (const [name, column] of table.columns) { + if (!snapshot.columns[name]) { + changes.addedColumns.push(column); + } + } + + // Find removed columns (might want to warn instead of remove) + for (const name in snapshot.columns) { + if (!table.columns.has(name)) { + changes.removedColumns.push(name); + } + } + + // Find modified columns + for (const [name, column] of table.columns) { + const snapshotColumn = snapshot.columns[name]; + if (snapshotColumn && !columnsEqual(column.definition, snapshotColumn)) { + changes.modifiedColumns.push({ + name, + from: snapshotColumn, + to: column.definition, + }); + } + } + + return changes; +}; + +const columnsEqual = ( + def: ColumnDefinition | undefined, + snap: ColumnSnapshot, +): boolean => { + if (!def) return false; + return ( + def.type === snap.type && + def.nullable === snap.nullable && + def.primaryKey === snap.primaryKey && + def.unique === snap.unique && + def.length === snap.length + ); +}; +``` + +### Phase 6: Type Generation from Schema + +**New file to create**: `src/packages/dumbo/src/core/schema/generators/typeGenerator.ts` + +```typescript +export const generateTypesFromSnapshot = ( + snapshot: DatabaseSnapshot, +): string => { + const lines: string[] = []; + + lines.push('// Auto-generated database types from schema snapshot'); + lines.push('// Do not edit manually - use dumbo schema:generate-types'); + lines.push(''); + + lines.push('export type DatabaseSchema = {'); + + for (const [schemaName, schema] of Object.entries(snapshot.schemas)) { + lines.push(` ${schemaName}: {`); + + for (const [tableName, table] of Object.entries(schema.tables)) { + lines.push(` ${tableName}: {`); + + for (const [columnName, column] of Object.entries(table.columns)) { + const tsType = sqlTypeToTypeScript(column); + lines.push(` ${columnName}: ${tsType};`); + } + + lines.push(' };'); + } + + lines.push(' };'); + } + + lines.push('};'); + + return lines.join('\n'); +}; + +const sqlTypeToTypeScript = (column: ColumnSnapshot): string => { + let baseType: string; + + switch (column.type) { + case 'serial': + case 'int': + case 'bigint': + baseType = 'number'; + break; + case 'varchar': + case 'text': + case 'uuid': + baseType = 'string'; + break; + case 'boolean': + baseType = 'boolean'; + break; + case 'timestamp': + baseType = 'Date'; + break; + case 'json': + case 'jsonb': + baseType = 'unknown'; + break; + default: + baseType = 'unknown'; + } + + return column.nullable ? `${baseType} | null` : baseType; +}; + +// Example generated output: +// export type DatabaseSchema = { +// public: { +// users: { +// id: number; +// email: string; +// name: string | null; +// }; +// posts: { +// id: number; +// userId: number; +// title: string; +// content: string | null; +// }; +// }; +// }; +``` + +### Phase 7: Migration Immutability Validation + +**New file to create**: `src/packages/dumbo/src/core/schema/validators/migrationImmutabilityValidator.ts` + +```typescript +import { SQLMigration } from '../sqlMigration'; + +export type ImmutabilityValidationResult = { + valid: boolean; + error?: string; +}; + +export const validateMigrationImmutability = ( + currentMigrations: ReadonlyArray, + snapshotMigrations: ReadonlyArray, +): ImmutabilityValidationResult => { + // Check that no migrations were removed + for (const snapshotMigration of snapshotMigrations) { + const currentMigration = currentMigrations.find( + (m) => m.name === snapshotMigration.name, + ); + + if (!currentMigration) { + return { + valid: false, + error: + `Migration '${snapshotMigration.name}' was removed from component!\n` + + `Migrations cannot be deleted once defined.\n` + + `If you need to undo a migration, add a new migration that reverses it.`, + }; + } + + // Check that migration content hasn't changed + const snapshotSqls = snapshotMigration.sqls + .map((sql) => sql.toString()) + .join('\n'); + const currentSqls = currentMigration.sqls + .map((sql) => sql.toString()) + .join('\n'); + + if (snapshotSqls !== currentSqls) { + return { + valid: false, + error: + `Migration '${snapshotMigration.name}' has been modified!\n` + + `Original SQL:\n${snapshotSqls}\n\n` + + `Current SQL:\n${currentSqls}\n\n` + + `Migrations must be immutable once defined. Add a new migration instead.`, + }; + } + } + + return { valid: true }; +}; + +// Helper to detect new migrations +export const getNewMigrations = ( + currentMigrations: ReadonlyArray, + snapshotMigrations: ReadonlyArray, +): SQLMigration[] => { + const snapshotNames = new Set(snapshotMigrations.map((m) => m.name)); + return currentMigrations.filter((m) => !snapshotNames.has(m.name)); +}; +``` + +### Phase 8: Schema Validation System + +**New file to create**: `src/packages/dumbo/src/core/schema/validators/migrationValidator.ts` + +```typescript +export type ValidationResult = { + valid: boolean; + errors: Array; + warnings: Array; +}; + +export type ValidationError = { + type: + | 'missing_column' + | 'type_mismatch' + | 'constraint_mismatch' + | 'missing_table'; + message: string; + location: { + component: string; + file?: string; + line?: number; + }; + expected: string; + actual: string; +}; + +export type ValidationWarning = { + type: 'extra_column' | 'extra_index' | 'deprecated_type'; + message: string; +}; + +export const validateMigrationsAgainstSchema = ( + component: SchemaComponent, + options?: { strict: boolean }, +): ValidationResult => { + const result: ValidationResult = { + valid: true, + errors: [], + warnings: [], + }; + + // Only validate if component has both TypeScript definitions AND migrations + if ( + !hasTypeScriptDefinitions(component) || + component.migrations.length === 0 + ) { + return result; + } + + // Simulate migrations in memory to get resulting schema + const resultingSchema = simulateMigrations(component.migrations); + + // Compare with TypeScript definitions + const comparison = compareSchemas(component, resultingSchema, options); + + // Generate detailed error messages + for (const mismatch of comparison.mismatches) { + result.errors.push({ + type: mismatch.type, + message: formatErrorMessage(mismatch), + location: { + component: component.schemaComponentKey, + file: mismatch.file, + line: mismatch.line, + }, + expected: mismatch.expected, + actual: mismatch.actual, + }); + result.valid = false; + } + + return result; +}; + +const formatErrorMessage = (mismatch: SchemaMismatch): string => { + return `Column '${mismatch.column}' mismatch in table '${mismatch.table}': + Expected: ${mismatch.expected} (from ${mismatch.tsFile}:${mismatch.tsLine}) + Actual: ${mismatch.actual} (from ${mismatch.sqlFile}:${mismatch.sqlLine}) + Fix: Either update TypeScript definition or add migration to match`; +}; +``` + +### Phase 9: CLI Commands + +**New CLI commands to implement**: + +```bash +# Compare current TypeScript definitions with snapshot +dumbo schema:diff + +# Generate migrations from diff +dumbo schema:generate [name] +# Example: dumbo schema:generate add-email-to-users + +# Update snapshot to current state +dumbo schema:snapshot + +# Generate TypeScript types from snapshot +dumbo schema:generate-types + +# Validate migrations against TypeScript definitions +dumbo schema:validate + +# Show current schema state +dumbo schema:status + +# Collect and run all migrations from database component +dumbo schema:migrate +``` + +### Phase 10: Migration Orchestrator + +**New file to create**: `src/packages/dumbo/src/core/schema/orchestrator/migrationOrchestrator.ts` + +```typescript +export class MigrationOrchestrator { + constructor( + private database: DatabaseSchemaComponent, + private snapshotPath: string = '.dumbo/snapshots', + private migrationsPath: string = '.dumbo/migrations', + ) {} + + async processMigrations(): Promise { + // Load latest snapshot if exists + const snapshot = await this.loadSnapshot(); + + // Validate migration immutability + const validation = await this.validateMigrations(snapshot); + if (!validation.valid) { + throw new Error(validation.error); + } + + // Process user-provided migrations + const userProvidedMigrations = + await this.processUserProvidedMigrations(snapshot); + + // Generate migrations for TypeScript-defined components + const generatedMigrations = + await this.generateMigrationsFromDefinitions(snapshot); + + return { + userProvided: userProvidedMigrations, + generated: generatedMigrations, + all: [...userProvidedMigrations, ...generatedMigrations], + }; + } + + private async processUserProvidedMigrations( + snapshot: DatabaseSnapshot | null, + ): Promise { + const newMigrations: SQLMigration[] = []; + + // Process database-level migrations + if (this.database.migrations.length > 0) { + const snapshotMigrations = snapshot?.migrations || []; + const newDbMigrations = getNewMigrations( + this.database.migrations, + snapshotMigrations, + ); + + for (const migration of newDbMigrations) { + // Write to migrations folder + await this.writeMigrationToFile( + migration, + 'database', + this.database.schemaComponentKey, + ); + newMigrations.push(migration); + } + } + + // Traverse and process schema and table migrations + for (const schema of this.database.schemas.values()) { + const schemaSnapshot = snapshot?.schemas[schema.schemaName]; + + // Process schema migrations + if (schema.migrations.length > 0) { + const snapshotMigrations = schemaSnapshot?.migrations || []; + const newSchemaMigrations = getNewMigrations( + schema.migrations, + snapshotMigrations, + ); + + for (const migration of newSchemaMigrations) { + await this.writeMigrationToFile( + migration, + 'schema', + schema.schemaComponentKey, + ); + newMigrations.push(migration); + } + } + + // Process table migrations + for (const table of schema.tables.values()) { + const tableSnapshot = schemaSnapshot?.tables[table.tableName]; + + if (table.migrations.length > 0) { + const snapshotMigrations = tableSnapshot?.migrations || []; + const newTableMigrations = getNewMigrations( + table.migrations, + snapshotMigrations, + ); + + for (const migration of newTableMigrations) { + await this.writeMigrationToFile( + migration, + 'table', + table.schemaComponentKey, + ); + newMigrations.push(migration); + } + } + } + } + + return newMigrations; + } + + private async writeMigrationToFile( + migration: SQLMigration, + type: 'database' | 'schema' | 'table', + componentKey: string, + ): Promise { + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + const filename = `${timestamp}-${migration.name}.sql`; + + const header = [ + `-- Source: provided (from component migrations array)`, + `-- Component: ${componentKey}`, + `-- Type: ${type}`, + `-- Migration name: ${migration.name}`, + `-- Written at: ${new Date().toISOString()}`, + '', + '', + ].join('\n'); + + const content = + header + migration.sqls.map((sql) => sql.toString()).join(';\n') + ';'; + + await writeFile(`${this.migrationsPath}/${filename}`, content); + } + + private async validateMigrations( + snapshot: DatabaseSnapshot | null, + ): Promise { + if (!snapshot) return { valid: true }; + + // Validate database migrations + const dbValidation = validateMigrationImmutability( + this.database.migrations, + snapshot.migrations || [], + ); + if (!dbValidation.valid) return dbValidation; + + // Validate schema and table migrations + for (const schema of this.database.schemas.values()) { + const schemaSnapshot = snapshot.schemas[schema.schemaName]; + if (!schemaSnapshot) continue; + + const schemaValidation = validateMigrationImmutability( + schema.migrations, + schemaSnapshot.migrations || [], + ); + if (!schemaValidation.valid) return schemaValidation; + + for (const table of schema.tables.values()) { + const tableSnapshot = schemaSnapshot.tables[table.tableName]; + if (!tableSnapshot) continue; + + const tableValidation = validateMigrationImmutability( + table.migrations, + tableSnapshot.migrations || [], + ); + if (!tableValidation.valid) return tableValidation; + } + } + + return { valid: true }; + } + + async runMigrations(dumbo: Dumbo): Promise { + const processed = await this.processMigrations(); + + // Group migrations by type for correct ordering + const grouped = this.groupMigrations(processed.all); + + // Run in correct order + await this.runMigrationGroup(dumbo, grouped.schemas); + await this.runMigrationGroup(dumbo, grouped.tables); + await this.runMigrationGroup(dumbo, grouped.columns); + await this.runMigrationGroup(dumbo, grouped.indexes); + // Future: grouped.foreignKeys, grouped.constraints + + // Update snapshot after successful migration + await this.updateSnapshot(); + } + + private async loadSnapshot(): Promise { + try { + // Load root snapshot which imports all nested snapshots + const module = await import( + `${this.snapshotPath}/${this.database.databaseName}.snapshot.ts` + ); + return module.databaseSnapshot; + } catch (error) { + // No snapshot found, will generate everything from scratch + return null; + } + } + + private async updateSnapshot(): Promise { + // Generate new snapshots for each component + await this.generateDatabaseSnapshot(this.database); + } + + private async generateDatabaseSnapshot( + database: DatabaseSchemaComponent, + ): Promise { + // Generate nested snapshot files + for (const schema of database.schemas.values()) { + await this.generateSchemaSnapshot(schema); + } + + // Generate root database snapshot that imports schemas + const imports = Array.from(database.schemas.values()) + .map( + (s) => + `import { ${s.schemaName}Snapshot } from './schemas/${s.schemaName}.snapshot';`, + ) + .join('\n'); + + const migrationsStr = this.formatMigrationsForSnapshot(database.migrations); + + const content = `${imports} + +export const databaseSnapshot = { + databaseName: '${database.databaseName}', + version: '${new Date().toISOString().split('T')[0]}-001', + timestamp: '${new Date().toISOString()}', + schemas: { + ${Array.from(database.schemas.values()) + .map((s) => `${s.schemaName}: ${s.schemaName}Snapshot`) + .join(',\n ')} + }, + migrations: ${migrationsStr} +} as const;`; + + await writeFile( + `${this.snapshotPath}/${database.databaseName}.snapshot.ts`, + content, + ); + } + + private async generateSchemaSnapshot( + schema: DatabaseSchemaSchemaComponent, + ): Promise { + // Generate table snapshots first + for (const table of schema.tables.values()) { + await this.generateTableSnapshot(schema.schemaName, table); + } + + // Generate schema snapshot that imports tables + const imports = Array.from(schema.tables.values()) + .map( + (t) => + `import { ${t.tableName}Snapshot } from './${schema.schemaName}/${t.tableName}.snapshot';`, + ) + .join('\n'); + + const migrationsStr = this.formatMigrationsForSnapshot(schema.migrations); + + const content = `${imports} + +export const ${schema.schemaName}Snapshot = { + schemaName: '${schema.schemaName}', + tables: { + ${Array.from(schema.tables.values()) + .map((t) => `${t.tableName}: ${t.tableName}Snapshot`) + .join(',\n ')} + }, + migrations: ${migrationsStr} +} as const;`; + + await writeFile( + `${this.snapshotPath}/schemas/${schema.schemaName}.snapshot.ts`, + content, + ); + } + + private async generateTableSnapshot( + schemaName: string, + table: TableSchemaComponent, + ): Promise { + const migrationsStr = this.formatMigrationsForSnapshot(table.migrations); + + const content = `export const ${table.tableName}Snapshot = { + tableName: '${table.tableName}', + columns: { + ${Array.from(table.columns.entries()) + .map(([name, col]) => this.generateColumnSnapshot(name, col)) + .join(',\n ')} + }, + indexes: { + ${Array.from(table.indexes.entries()) + .map(([name, idx]) => this.generateIndexSnapshot(name, idx)) + .join(',\n ')} + }, + migrations: ${migrationsStr} +} as const;`; + + await writeFile( + `${this.snapshotPath}/schemas/${schemaName}/${table.tableName}.snapshot.ts`, + content, + ); + } + + private formatMigrationsForSnapshot( + migrations: ReadonlyArray, + ): string { + if (migrations.length === 0) return '[]'; + + const migrationStrs = migrations.map((m) => { + const sqlsStr = m.sqls + .map((sql) => `'${sql.toString().replace(/'/g, "\\'")}'`) + .join(', '); + return `{ name: '${m.name}', sqls: [${sqlsStr}] }`; + }); + + return `[\n ${migrationStrs.join(',\n ')}\n ]`; + } + + private groupMigrations(migrations: SQLMigration[]) { + return { + schemas: migrations.filter((m) => m.name.includes('schema')), + tables: migrations.filter((m) => m.name.includes('create_table')), + columns: migrations.filter( + (m) => m.name.includes('add_') || m.name.includes('alter_'), + ), + indexes: migrations.filter((m) => m.name.includes('index')), + }; + } +} +``` + +## Testing Strategy + +### Unit Tests + +Following the pattern in [src/packages/dumbo/src/core/schema/dumboSchema/dumboSchema.unit.spec.ts](src/packages/dumbo/src/core/schema/dumboSchema/dumboSchema.unit.spec.ts): + +```typescript +// src/packages/dumbo/src/core/schema/generators/migrationGenerator.unit.spec.ts +import { describe, it, assert } from 'vitest'; +import { tableSchemaComponent } from '../components/tableSchemaComponent'; +import { column } from '../components/columnSchemaComponent'; + +describe('generateTableMigrations', () => { + it('should generate CREATE TABLE from TypeScript definition', () => { + const table = tableSchemaComponent({ + tableName: 'users', + columns: { + id: column('id', { type: 'serial', primaryKey: true }), + email: column('email', { + type: 'varchar', + length: 255, + nullable: false, + }), + }, + }); + + const migrations = table.generateMigrations(); + + assert.equal(migrations.length, 1); + assert.match(migrations[0].sqls[0], /CREATE TABLE IF NOT EXISTS users/); + assert.match(migrations[0].sqls[0], /id SERIAL PRIMARY KEY/); + assert.match(migrations[0].sqls[0], /email VARCHAR\(255\) NOT NULL/); + }); + + it('should detect added columns when comparing with snapshot', () => { + const snapshot = { + tableName: 'users', + columns: { + id: { type: 'serial' as const, primaryKey: true }, + }, + indexes: {}, + }; + + const table = tableSchemaComponent({ + tableName: 'users', + columns: { + id: column('id', { type: 'serial', primaryKey: true }), + email: column('email', { type: 'varchar', length: 255 }), + }, + }); + + const migrations = table.generateMigrations(snapshot); + + assert.equal(migrations.length, 1); + assert.match( + migrations[0].sqls[0], + /ALTER TABLE users ADD COLUMN IF NOT EXISTS email/, + ); + }); +}); +``` + +### Integration Tests + +```typescript +// src/packages/dumbo/src/core/schema/orchestrator/migrationOrchestrator.integration.spec.ts +import { describe, it, assert, beforeEach } from 'vitest'; +import { MigrationOrchestrator } from './migrationOrchestrator'; +import { createTestDatabase } from '../../test/utils'; + +describe('MigrationOrchestrator integration', () => { + let db: Dumbo; + + beforeEach(async () => { + db = await createTestDatabase(); + }); + + it('should apply schema changes in correct order', async () => { + const schema = databaseSchemaComponent({ + databaseName: 'test', + schemas: { + public: databaseSchemaSchemaComponent({ + schemaName: 'public', + tables: { + users: tableSchemaComponent({ + tableName: 'users', + columns: { + id: column('id', { type: 'serial', primaryKey: true }), + email: column('email', { type: 'varchar', length: 255 }), + }, + }), + posts: tableSchemaComponent({ + tableName: 'posts', + columns: { + id: column('id', { type: 'serial', primaryKey: true }), + userId: column('userId', { + type: 'int', + references: { table: 'users', column: 'id' }, + }), + }, + }), + }, + }), + }, + }); + + const orchestrator = new MigrationOrchestrator(schema); + await orchestrator.runMigrations(db); + + // Verify tables were created + const tables = await db.query(` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = 'public' + `); + + assert.deepEqual(tables.map((t) => t.table_name).sort(), [ + 'posts', + 'users', + ]); + + // Verify columns exist + const columns = await db.query(` + SELECT column_name, data_type + FROM information_schema.columns + WHERE table_name = 'users' + `); + + assert.equal(columns.length, 2); + }); +}); +``` + +### End-to-End Tests + +```typescript +// src/packages/dumbo/src/core/schema/e2e/schemaEvolution.e2e.spec.ts +describe('Schema evolution E2E', () => { + it('should handle complete schema lifecycle', async () => { + // 1. Initial schema definition + const v1Schema = createSchema({ version: 1 }); + await orchestrator.runMigrations(v1Schema); + await orchestrator.updateSnapshot(); + + // 2. Schema modification + const v2Schema = createSchema({ + version: 2, + addColumn: { table: 'users', column: 'phone' }, + }); + + // 3. Diff detection + const diff = await orchestrator.diff(v2Schema); + assert.equal(diff.changes.length, 1); + assert.equal(diff.changes[0].type, 'add_column'); + + // 4. Migration generation + const migrations = await orchestrator.generateMigrations(); + assert.equal(migrations.length, 1); + + // 5. Migration application + await orchestrator.runMigrations(v2Schema); + + // 6. Snapshot update + await orchestrator.updateSnapshot(); + + // 7. Type generation + const types = await generateTypesFromSnapshot(); + assert.match(types, /phone: string | null/); + + // 8. Validation + const validation = await validateMigrationsAgainstSchema(v2Schema); + assert.equal(validation.valid, true); + }); +}); +``` + +## Complete Flow Example + +This section demonstrates how all pieces work together in practice: + +### Step 1: Initial Component Definition with SQL Migration + +```typescript +// User defines component with custom SQL migration +const usersTable = tableSchemaComponent({ + tableName: 'users', + columns: { + id: column('id', { type: 'serial', primaryKey: true }), + email: column('email', { type: 'varchar', length: 255 }), + }, + migrations: [ + sqlMigration('001_custom_create', [ + SQL`CREATE TABLE users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) UNIQUE, + internal_tracking JSONB DEFAULT '{}' + )`, + ]), + ], +}); +``` + +### Step 2: First Orchestrator Run + +```typescript +const orchestrator = new MigrationOrchestrator(database); + +// Process migrations (validates, diffs, writes new ones) +await orchestrator.processMigrations(); +// - No snapshot exists, so no validation needed +// - Detects '001_custom_create' as new +// - Writes to: .dumbo/migrations/2024-01-15-001-custom_create.sql +``` + +### Step 3: Generated Files After First Run + +```sql +-- .dumbo/migrations/2024-01-15-001-custom_create.sql +-- Source: provided (from component migrations array) +-- Component: sc:dumbo:table:users +-- Type: table +-- Migration name: 001_custom_create +-- Written at: 2024-01-15T10:30:00Z + +CREATE TABLE users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) UNIQUE, + internal_tracking JSONB DEFAULT '{}' +); +``` + +```typescript +// .dumbo/snapshots/schemas/public/users.snapshot.ts +export const usersSnapshot = { + tableName: 'users', + columns: { + id: { type: 'serial' as const, primaryKey: true }, + email: { type: 'varchar' as const, length: 255, nullable: false }, + }, + indexes: {}, + migrations: [ + { + name: '001_custom_create', + sqls: [ + "CREATE TABLE users (id SERIAL PRIMARY KEY, email VARCHAR(255) UNIQUE, internal_tracking JSONB DEFAULT '{}')", + ], + }, + ], +} as const; +``` + +### Step 4: User Adds New Column and Migration + +```typescript +// User updates component +const usersTable = tableSchemaComponent({ + tableName: 'users', + columns: { + id: column('id', { type: 'serial', primaryKey: true }), + email: column('email', { type: 'varchar', length: 255 }), + name: column('name', { type: 'varchar', length: 100 }), // NEW! + }, + migrations: [ + sqlMigration('001_custom_create', [ + SQL`CREATE TABLE users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) UNIQUE, + internal_tracking JSONB DEFAULT '{}' + )`, + ]), + sqlMigration('002_add_name', [ + // NEW! + SQL`ALTER TABLE users ADD COLUMN name VARCHAR(100)`, + ]), + ], +}); +``` + +### Step 5: Second Orchestrator Run + +```typescript +await orchestrator.processMigrations(); +// 1. Validates '001_custom_create' hasn't changed ✓ +// 2. Detects '002_add_name' as new +// 3. Writes new migration to file +// 4. NO generated migration for 'name' column (user provided SQL) +``` + +### Step 6: Error Case - Modified Migration + +```typescript +// User accidentally modifies existing migration (BAD!) +const usersTable = tableSchemaComponent({ + tableName: 'users', + migrations: [ + sqlMigration('001_custom_create', [ + SQL`CREATE TABLE users ( + id BIGSERIAL PRIMARY KEY, // Changed from SERIAL! + email VARCHAR(255) UNIQUE, + internal_tracking JSONB DEFAULT '{}' + )`, + ]), + sqlMigration('002_add_name', [ + SQL`ALTER TABLE users ADD COLUMN name VARCHAR(100)`, + ]), + ], +}); + +await orchestrator.processMigrations(); +// Throws error: +// Migration '001_custom_create' has been modified! +// Original SQL: +// CREATE TABLE users (id SERIAL PRIMARY KEY, email VARCHAR(255) UNIQUE, internal_tracking JSONB DEFAULT '{}') +// +// Current SQL: +// CREATE TABLE users (id BIGSERIAL PRIMARY KEY, email VARCHAR(255) UNIQUE, internal_tracking JSONB DEFAULT '{}') +// +// Migrations must be immutable once defined. Add a new migration instead. +``` + +### Step 7: Mixed Mode - TypeScript + SQL + +```typescript +// Another table with TypeScript-first approach +const postsTable = tableSchemaComponent({ + tableName: 'posts', + columns: { + id: column('id', { type: 'serial', primaryKey: true }), + title: column('title', { type: 'varchar', length: 255 }), + userId: column('userId', { + type: 'int', + references: { table: 'users', column: 'id' }, + }), + }, + // No migrations provided - will be generated! +}); + +await orchestrator.processMigrations(); +// Generates and writes: .dumbo/migrations/2024-01-15-002-create_table_posts.sql +// Content: +// -- Source: generated (from TypeScript definitions) +// -- Component: sc:dumbo:table:posts +// -- Generated at: 2024-01-15T10:45:00Z +// CREATE TABLE IF NOT EXISTS posts ( +// id SERIAL PRIMARY KEY, +// title VARCHAR(255) NOT NULL, +// userId INT REFERENCES users(id) +// ); +``` + +### Step 8: Type Generation + +```typescript +// Generate types from snapshots +const types = await generateTypesFromSnapshot(databaseSnapshot); + +// Results in: +export type DatabaseSchema = { + public: { + users: { + id: number; + email: string; + name: string | null; + }; + posts: { + id: number; + title: string; + userId: number; + }; + }; +}; + +// Can be used in Pongo query builder: +const result = await pongo + .from('users') + .where('email', '=', 'test@example.com') + .select(['id', 'name']); +// TypeScript knows result is { id: number; name: string | null }[] +``` + +## Migration Strategy for Existing Code + +1. **Backward Compatible**: Existing SQL-first code continues to work unchanged +2. **Progressive Enhancement**: Can add TypeScript definitions gradually +3. **Opt-in Validation**: Validation only runs when explicitly enabled +4. **Safe Defaults**: Use `CREATE IF NOT EXISTS` when no snapshot found + +## Success Criteria + +1. ✅ Each component can generate its own migrations +2. ✅ Database component traverses tree in correct order +3. ✅ Can use raw SQL migrations without TypeScript (backward compatible) +4. ✅ Can validate SQL migrations against TypeScript definitions +5. ✅ Nested snapshot system minimizes merge conflicts +6. ✅ Types can be generated from schema for Pongo integration +7. ✅ CLI provides clear workflow for schema evolution +8. ✅ Type safety maintained throughout +9. ✅ No breaking changes to existing API + +## Key Architectural Decisions + +1. **Each component generates its own migrations** - Encapsulation and single responsibility +2. **Database component orchestrates collection** - Single entry point for migration generation +3. **Tree traversal follows dependency order** - Schemas → Tables → Columns → Indexes → Foreign Keys +4. **TypeScript definitions are source of truth when present** - Override SQL for intended state +5. **Nested snapshot structure** - Each table/schema in separate file to minimize conflicts +6. **TypeScript snapshot format** - Type safety and IDE support over JSON simplicity +7. **Snapshots track component migrations** - Enable detection of new user-provided migrations +8. **All migrations go to `.dumbo/migrations/`** - Both generated and user-provided, uniform handling +9. **Migration immutability enforced** - Once in snapshot, migrations cannot be modified +10. **CREATE IF NOT EXISTS as default** - Safe behavior when no snapshot exists +11. **Indexes generate as separate migrations** - Clean separation of concerns +12. **Validation is opt-in** - No breaking changes, teams choose their strictness level +13. **Type generation from snapshots** - Enable strongly-typed Pongo query builders + +## Implementation Order + +1. **Phase 1**: Column definitions with SQL generation +2. **Phase 2**: Nested snapshot system structure with migration tracking +3. **Phase 3**: Component-specific migration generation (each component file) +4. **Phase 4**: Tree traversal in database component +5. **Phase 5**: Schema differ for change detection +6. **Phase 6**: Type generation from schema snapshots +7. **Phase 7**: Migration immutability validation +8. **Phase 8**: Schema validation system with detailed errors +9. **Phase 9**: CLI commands for workflow +10. **Phase 10**: Migration orchestrator with diffing and validation + +Each phase builds on the previous one and can be tested independently. The key insight is that each component knows how to generate its own SQL, and the database component knows how to traverse the tree in the correct order to collect all migrations. + +## Future Enhancements (Out of Scope) + +1. **Query Builder Integration**: Use generated types for strongly-typed Pongo queries +2. **Schema Introspection**: Read current schema from database and generate TypeScript +3. **Migration Rollback**: Track down migrations and generate reversal scripts +4. **Multi-Database Support**: PostgreSQL, MySQL, SQLite-specific features +5. **Partial Schema Definitions**: Allow defining subset of columns for gradual adoption +6. **Smart Merge Tool**: CLI command to help resolve snapshot conflicts +7. **Performance Optimization**: Cache component trees for large schemas + +## Summary + +This plan unifies SQL migrations and TypeScript schema definitions through: + +### Core Innovations + +1. **Dual-purpose snapshots** - Track both current schema state AND component migrations +2. **Migration diffing** - Detect new user-provided migrations by comparing with snapshots +3. **Immutability enforcement** - Prevent dangerous modification of existing migrations +4. **Unified migration storage** - All migrations (generated/provided) go to `.dumbo/migrations/` +5. **Nested snapshot structure** - Minimize git conflicts by separating tables into files + +### Key Benefits + +- **Flexibility** - Support SQL-first, TypeScript-first, or hybrid approaches +- **Safety** - Migration immutability prevents accidental schema corruption +- **Type Safety** - Generate types from snapshots for Pongo query builders +- **Gradual Adoption** - Add TypeScript definitions progressively to SQL-first projects +- **Clear Audit Trail** - Track all migrations with source metadata + +### Migration Workflow + +1. Component defines schema (TypeScript) and/or migrations (SQL) +2. Orchestrator validates immutability against snapshot +3. New user-provided migrations are written to files +4. TypeScript definitions generate missing migrations +5. Snapshots update to reflect current state + migrations +6. Types are generated for strongly-typed queries + +This architecture provides a robust foundation for schema evolution while maintaining backwards compatibility and enabling progressive enhancement. + +## References + +- Component hierarchy uses URN system with `sc:dumbo:database:{name}` pattern +- Migration bubbling happens at lines 79-83 in schemaComponent.ts +- Recent refactoring in commit 03fb40a changed to use `Exclude` instead of `Omit` +- Test patterns follow existing dumboSchema.unit.spec.ts structure +- Integration target is Pongo schema system in src/packages/pongo/src/core/schema/index.ts diff --git a/src/packages/dumbo/src/core/schema/REVIEW.MD b/src/packages/dumbo/src/core/schema/REVIEW.MD new file mode 100644 index 00000000..771dda1b --- /dev/null +++ b/src/packages/dumbo/src/core/schema/REVIEW.MD @@ -0,0 +1,201 @@ +Deep Analysis of the Architecture +After thoroughly rereading the plan, here's my multi-perspective analysis: +From a Regular User's Perspective (Storage Library Consumer) +Strengths: +Progressive Disclosure of Complexity - Users can start simple (SQL-only or TypeScript-only) and gradually adopt more features. This is excellent API design. +Familiar Patterns - The API resembles popular ORMs (Prisma, TypeORM) making adoption easier. +Clear Error Messages - The immutability validation provides actionable feedback rather than cryptic errors. +Type Safety Benefits - The generated types for query builders provide immediate value that developers can feel in their IDE. +Potential Pain Points: +Hidden Complexity in Snapshots - The dual-purpose snapshots (schema state + migration tracking) might be confusing initially. Users might not understand why snapshots contain their migrations. +Directory Proliferation - The .dumbo/ folder with nested snapshots could feel overwhelming for small projects. +Mental Model Shift - Understanding when TypeScript is truth vs when SQL is truth requires careful documentation and good examples. +Migration Immutability - While safer, this might frustrate users coming from Rails/Django where you can edit migrations before deploying. +From a Software Architect's Perspective +Architectural Strengths: +Separation of Concerns - Each component generating its own SQL is textbook single responsibility principle. This makes the system highly testable and maintainable. +Command-Query Separation - The clear distinction between: +Commands (processMigrations, runMigrations) +Queries (loadSnapshot, validateMigrations) +Pure functions (generateMigrations, diffTableWithSnapshot) +Temporal Decoupling - Snapshots act as a temporal boundary, allowing the system to reason about "then vs now" without querying the database. +Composability - The tree structure with proper traversal order handles complex dependencies elegantly. +Architectural Concerns: +Snapshot as Dual Source of Truth - Using snapshots for BOTH schema state AND migration tracking violates single responsibility. Consider splitting into: +Schema snapshots (current state for diffing) +Migration manifest (tracking what's been processed) +File System as Database - The .dumbo/migrations/ directory becomes a critical data store. What happens with: +Concurrent writes? +File system permissions? +CI/CD environments? +Type Generation Timing - Generating types from snapshots rather than from the database means types might not reflect reality if migrations fail partially. +Validation vs Enforcement - The system validates but doesn't enforce schema consistency. A team member could still manually edit the database. +Architecture as Code Analysis +Elegant Design Patterns: +Builder Pattern in dumboSchema API: +dumboSchema.database('myapp', { +public: dumboSchema.schema('public', { +users: dumboSchema.table('users', {...}) +}) +}) +Template Method Pattern in migration generation - each component implements its own generateMigrations() but follows the same structure. +Visitor Pattern in tree traversal - the orchestrator visits each component in dependency order. +Memento Pattern - Snapshots act as mementos capturing component state at a point in time. +Code Smells to Address: +Large Parameter Lists - The orchestrator constructor and several functions have many parameters that could be grouped into configuration objects. +String Manipulation for SQL - Building SQL through string concatenation (even with template literals) is risky. Consider a SQL builder abstraction. +Implicit Ordering Dependencies - The migration grouping relies on naming conventions (includes('schema'), includes('create_table')). This is fragile. +Mixed Abstraction Levels - The orchestrator handles both high-level workflow and low-level file I/O. Consider extracting a MigrationWriter service. +Philosophical Considerations +The Snapshot Paradox: The snapshots serve as both historical record (what migrations were defined) and current state (what the schema looks like). This dual nature is philosophically interesting but practically complex. It's like Git storing both diffs AND the current state in the same structure. Migration Immutability Trade-off: Enforcing immutability is philosophically sound (history should be append-only) but practically challenging. Real-world schema evolution is messy, and sometimes you need to fix a broken migration. TypeScript as Truth: The decision to make TypeScript the source of truth when present is bold. It prioritizes developer experience over database reality, which could lead to drift. +Recommendations for Improvement +Split Snapshot Responsibilities: +Keep schema snapshots for state +Add a separate .dumbo/manifest.json for tracking processed migrations +Add Schema Verification: +After running migrations, query information_schema to verify the result matches expectations +Generate types from actual database state, not snapshots +Implement Conflict Resolution: +Add a dumbo schema:merge command for snapshot conflicts +Use three-way merge logic (base, mine, theirs) +Consider Event Sourcing: +Treat migrations as events in an event store +Snapshots become projections of those events +Enables time-travel debugging +Add Dry-Run Capability: +Let users preview what migrations would be generated/applied +Essential for production deployments +Overall Assessment +This is a sophisticated and well-thought-out architecture that successfully bridges the gap between SQL-first and TypeScript-first workflows. The use of snapshots for migration tracking is innovative, though it adds complexity. The system would benefit from: +Clearer separation between state tracking and migration manifest +Runtime verification that generated types match database reality +Better tooling for snapshot conflict resolution +More explicit handling of edge cases (partial migration failure, concurrent modifications) +The architecture shows maturity in its backwards compatibility and progressive enhancement approach. It's clearly designed by someone who understands both the technical challenges and the human factors of schema management. Grade: B+ Strong foundation with room for refinement in the snapshot dual-responsibility and runtime verification aspects. + +######### + +What if snapshots were stored as jsons would that help in understanding the split? +Excellent question! Let me think through how JSON snapshots would affect the architecture, particularly around the split of responsibilities. +JSON Snapshots: Impact Analysis +How JSON Would Help with the Split +Current Issue with TypeScript Snapshots: +// .dumbo/snapshots/schemas/public/users.snapshot.ts +export const usersSnapshot = { +tableName: 'users', +columns: { /_ schema state _/ }, +migrations: [ /* migration tracking */ ] // Mixed responsibility! +} as const; +With JSON + Split Responsibilities: +// .dumbo/snapshots/schemas/public/users.schema.json +{ +"tableName": "users", +"columns": { +"id": { "type": "serial", "primaryKey": true }, +"email": { "type": "varchar", "length": 255 } +}, +"indexes": {} +} + +// .dumbo/manifests/schemas/public/users.manifest.json +{ +"component": "sc:dumbo:table:users", +"processedMigrations": [ +{ +"name": "001_custom_create", +"hash": "sha256:abcd1234...", +"processedAt": "2024-01-15T10:30:00Z", +"source": "provided" +} +] +} +Benefits of JSON Format +Clear Separation of Concerns +_.schema.json = Current state (for diffing) +_.manifest.json = Migration history (for tracking) +No temptation to mix responsibilities in a single file +Better Git Diffs +{ +"columns": { +"id": { "type": "serial" }, + +- "email": { "type": "varchar", "length": 255 } + } + } + Much cleaner than TypeScript object diffs with imports and type annotations. + Language Agnostic + Other tools (Python scripts, CI/CD, database admin tools) can read/write + No TypeScript compilation needed to access snapshot data + Simpler Merge Resolution + JSON merge tools are mature and widespread + Can use jq or similar for command-line manipulation + Easier to write custom merge strategies + Drawbacks of JSON + Loss of Type Safety + No compile-time checking of snapshot structure + Need runtime validation (though you need this anyway) + No Imports/Composition + Can't use TypeScript's import system for nested structure + Would need to manually resolve file references + Verbosity for Complex Structures + No as const assertions + More verbose without TypeScript's type inference + Hybrid Approach: Best of Both Worlds + What if we used JSON for storage but TypeScript for types? + // .dumbo/snapshots/types.ts (generated) + import usersSchema from './schemas/public/users.schema.json'; + import postsSchema from './schemas/public/posts.schema.json'; + +export type DatabaseSchema = { +public: { +users: typeof usersSchema; +posts: typeof postsSchema; +} +}; +Recommended Structure with JSON +.dumbo/ +schemas/ # Current state (JSON) +database.json +public/ +schema.json +tables/ +users.json +posts.json +manifests/ # Migration tracking (JSON) +database.json +public/ +schema.json +tables/ +users.json +posts.json +migrations/ # SQL files +2024-01-15-001-create.sql +generated/ # Generated TypeScript +types.ts # Generated from schemas/ +manifest-types.ts # Types for manifest structure +Impact on the Architecture +Cleaner Responsibilities: +Schema JSON: Pure state representation +Manifest JSON: Pure migration history +Generated TS: Type safety without mixing concerns +Better Tooling Opportunities: +dumbo schema:diff - Compare JSON files +dumbo manifest:verify - Check manifest against database +dumbo schema:validate - JSON schema validation +Simpler Mental Model: +"Schemas describe what IS" +"Manifests describe what WAS DONE" +"Migrations describe HOW TO DO IT" +My Recommendation +Yes, JSON would significantly help with understanding and implementing the split. Specifically: +Use JSON for snapshots (both schema and manifest) +Keep them in separate directories +Generate TypeScript types from the JSON for type safety +Store the actual SQL in migrations as a third, distinct concern +This gives you: +Clear separation of concerns +Better git diffs and merge resolution +Language-agnostic data files +Type safety through code generation +Simpler mental model +The only real loss is the elegance of TypeScript imports for composition, but that's a worthwhile trade-off for the clarity gained. Would you like me to revise the plan to use this JSON-based approach with properly separated concerns? diff --git a/src/packages/dumbo/src/core/schema/components/columnSchemaComponent.ts b/src/packages/dumbo/src/core/schema/components/columnSchemaComponent.ts new file mode 100644 index 00000000..b5d8ee36 --- /dev/null +++ b/src/packages/dumbo/src/core/schema/components/columnSchemaComponent.ts @@ -0,0 +1,37 @@ +import type { SQLColumnToken } from '../../sql'; +import { + schemaComponent, + type SchemaComponent, + type SchemaComponentOptions, +} from '../schemaComponent'; + +export type ColumnURNType = 'sc:dumbo:column'; +export type ColumnURN = `${ColumnURNType}:${string}`; + +export const ColumnURNType: ColumnURNType = 'sc:dumbo:column'; +export const ColumnURN = ({ name }: { name: string }): ColumnURN => + `${ColumnURNType}:${name}`; + +export type ColumnSchemaComponent = SchemaComponent< + ColumnURN, + Readonly<{ + columnName: string; + }> +>; +export const columnSchemaComponent = ({ + columnName, + ...migrationsOrComponents +}: { + columnName: string; +} & SchemaComponentOptions & + Omit): ColumnSchemaComponent => { + const sc = schemaComponent( + ColumnURN({ name: columnName }), + migrationsOrComponents, + ); + + return { + ...sc, + columnName, + }; +}; diff --git a/src/packages/dumbo/src/core/schema/components/databaseSchemaComponent.ts b/src/packages/dumbo/src/core/schema/components/databaseSchemaComponent.ts new file mode 100644 index 00000000..0f6da357 --- /dev/null +++ b/src/packages/dumbo/src/core/schema/components/databaseSchemaComponent.ts @@ -0,0 +1,66 @@ +import { + mapSchemaComponentsOfType, + schemaComponent, + type SchemaComponent, + type SchemaComponentOptions, +} from '../schemaComponent'; +import { + DatabaseSchemaURNType, + databaseSchemaSchemaComponent, + type DatabaseSchemaSchemaComponent, +} from './databaseSchemaSchemaComponent'; + +export type DatabaseURNType = 'sc:dumbo:database'; +export type DatabaseURN = `${DatabaseURNType}:${string}`; + +export const DatabaseURNType: DatabaseURNType = 'sc:dumbo:database'; +export const DatabaseURN = ({ name }: { name: string }): DatabaseURN => + `${DatabaseURNType}:${name}`; + +export type DatabaseSchemaComponent = SchemaComponent< + DatabaseURN, + Readonly<{ + databaseName: string; + schemas: ReadonlyMap; + addSchema: ( + schema: string | DatabaseSchemaSchemaComponent, + ) => DatabaseSchemaSchemaComponent; + }> +>; + +export const databaseSchemaComponent = ({ + databaseName, + schemaNames, + ...migrationsOrComponents +}: { + databaseName: string; + schemaNames?: string[]; +} & SchemaComponentOptions): DatabaseSchemaComponent => { + const schemas = + schemaNames?.map((schemaName) => + databaseSchemaSchemaComponent({ schemaName }), + ) ?? []; + + const base = schemaComponent(DatabaseURN({ name: databaseName }), { + migrations: migrationsOrComponents.migrations ?? [], + components: [...(migrationsOrComponents.components ?? []), ...schemas], + }); + + return { + ...base, + databaseName, + get schemas() { + return mapSchemaComponentsOfType( + base.components, + DatabaseSchemaURNType, + (c) => c.schemaName, + ); + }, + addSchema: (schema: string | DatabaseSchemaSchemaComponent) => + base.addComponent( + typeof schema === 'string' + ? databaseSchemaSchemaComponent({ schemaName: schema }) + : schema, + ), + }; +}; diff --git a/src/packages/dumbo/src/core/schema/components/databaseSchemaSchemaComponent.ts b/src/packages/dumbo/src/core/schema/components/databaseSchemaSchemaComponent.ts new file mode 100644 index 00000000..20bdafdc --- /dev/null +++ b/src/packages/dumbo/src/core/schema/components/databaseSchemaSchemaComponent.ts @@ -0,0 +1,66 @@ +import { + mapSchemaComponentsOfType, + schemaComponent, + type SchemaComponent, + type SchemaComponentOptions, +} from '../schemaComponent'; +import { + TableURNType, + tableSchemaComponent, + type TableSchemaComponent, +} from './tableSchemaComponent'; + +export type DatabaseSchemaURNType = 'sc:dumbo:database_schema'; +export type DatabaseSchemaURN = `${DatabaseSchemaURNType}:${string}`; + +export const DatabaseSchemaURNType: DatabaseSchemaURNType = + 'sc:dumbo:database_schema'; +export const DatabaseSchemaURN = ({ + name, +}: { + name: string; +}): DatabaseSchemaURN => `${DatabaseSchemaURNType}:${name}`; + +export type DatabaseSchemaSchemaComponent = SchemaComponent< + DatabaseSchemaURN, + Readonly<{ + schemaName: string; + tables: ReadonlyMap; + addTable: (table: string | TableSchemaComponent) => TableSchemaComponent; + }> +>; + +export const databaseSchemaSchemaComponent = ({ + schemaName, + tableNames, + ...migrationsOrComponents +}: { + schemaName: string; + tableNames?: string[]; +} & SchemaComponentOptions): DatabaseSchemaSchemaComponent => { + const tables = + tableNames?.map((tableName) => tableSchemaComponent({ tableName })) ?? []; + + const base = schemaComponent(DatabaseSchemaURN({ name: schemaName }), { + migrations: migrationsOrComponents.migrations ?? [], + components: [...(migrationsOrComponents.components ?? []), ...tables], + }); + + return { + ...base, + schemaName, + get tables() { + return mapSchemaComponentsOfType( + base.components, + TableURNType, + (c) => c.tableName, + ); + }, + addTable: (table: string | TableSchemaComponent) => + base.addComponent( + typeof table === 'string' + ? tableSchemaComponent({ tableName: table }) + : table, + ), + }; +}; diff --git a/src/packages/dumbo/src/core/schema/components/index.ts b/src/packages/dumbo/src/core/schema/components/index.ts new file mode 100644 index 00000000..fe39d7a8 --- /dev/null +++ b/src/packages/dumbo/src/core/schema/components/index.ts @@ -0,0 +1,23 @@ +import { ColumnURN } from './columnSchemaComponent'; +import { DatabaseURN } from './databaseSchemaComponent'; +import { DatabaseSchemaURN } from './databaseSchemaSchemaComponent'; +import { IndexURN } from './indexSchemaComponent'; +import { TableURN } from './tableSchemaComponent'; + +export * from './columnSchemaComponent'; +export * from './databaseSchemaComponent'; +export * from './databaseSchemaSchemaComponent'; +export * from './indexSchemaComponent'; +export * from './tableSchemaComponent'; + +export const schemaComponentURN = { + database: DatabaseURN, + schema: DatabaseSchemaURN, + table: TableURN, + column: ColumnURN, + index: IndexURN, + extractName: (urn: string): string => { + const parts = urn.split(':'); + return parts[parts.length - 1] || ''; + }, +} as const; diff --git a/src/packages/dumbo/src/core/schema/components/indexSchemaComponent.ts b/src/packages/dumbo/src/core/schema/components/indexSchemaComponent.ts new file mode 100644 index 00000000..6dc80afd --- /dev/null +++ b/src/packages/dumbo/src/core/schema/components/indexSchemaComponent.ts @@ -0,0 +1,50 @@ +import { + schemaComponent, + type SchemaComponent, + type SchemaComponentOptions, +} from '../schemaComponent'; +import { type ColumnSchemaComponent } from './columnSchemaComponent'; + +export type IndexURNType = 'sc:dumbo:index'; +export type IndexURN = `${IndexURNType}:${string}`; + +export type IndexSchemaComponent = SchemaComponent< + IndexURN, + Readonly<{ + indexName: string; + columnNames: ReadonlyArray; + isUnique: boolean; + addColumn: (column: string | ColumnSchemaComponent) => void; + }> +>; + +export const IndexURNType: IndexURNType = 'sc:dumbo:index'; +export const IndexURN = ({ name }: { name: string }): IndexURN => + `${IndexURNType}:${name}`; + +export const indexSchemaComponent = ({ + indexName, + columnNames, + isUnique, + ...migrationsOrComponents +}: { + indexName: string; + columnNames: string[]; + isUnique: boolean; +} & SchemaComponentOptions): IndexSchemaComponent => { + const sc = schemaComponent(IndexURN({ name: indexName }), { + migrations: migrationsOrComponents.migrations ?? [], + components: [...(migrationsOrComponents.components ?? [])], + }); + + return { + ...sc, + indexName, + get columnNames() { + return columnNames; + }, + addColumn: (column: string | ColumnSchemaComponent) => + columnNames.push(typeof column === 'string' ? column : column.columnName), + isUnique, + }; +}; diff --git a/src/packages/dumbo/src/core/schema/components/tableSchemaComponent.ts b/src/packages/dumbo/src/core/schema/components/tableSchemaComponent.ts new file mode 100644 index 00000000..425066c4 --- /dev/null +++ b/src/packages/dumbo/src/core/schema/components/tableSchemaComponent.ts @@ -0,0 +1,69 @@ +import { + mapSchemaComponentsOfType, + schemaComponent, + type SchemaComponent, + type SchemaComponentOptions, +} from '../schemaComponent'; +import { + ColumnURNType, + type ColumnSchemaComponent, +} from './columnSchemaComponent'; +import { + IndexURNType, + type IndexSchemaComponent, +} from './indexSchemaComponent'; + +export type TableURNType = 'sc:dumbo:table'; +export type TableURN = `${TableURNType}:${string}`; + +export const TableURNType: TableURNType = 'sc:dumbo:table'; +export const TableURN = ({ name }: { name: string }): TableURN => + `${TableURNType}:${name}`; + +export type TableSchemaComponent = SchemaComponent< + TableURN, + Readonly<{ + tableName: string; + columns: ReadonlyMap; + indexes: ReadonlyMap; + addColumn: (column: ColumnSchemaComponent) => ColumnSchemaComponent; + addIndex: (index: IndexSchemaComponent) => IndexSchemaComponent; + }> +>; + +export const tableSchemaComponent = ({ + tableName, + columns, + ...migrationsOrComponents +}: { + tableName: string; + columns?: ColumnSchemaComponent[]; +} & SchemaComponentOptions): TableSchemaComponent => { + columns ??= []; + + const base = schemaComponent(TableURN({ name: tableName }), { + migrations: migrationsOrComponents.migrations ?? [], + components: [...(migrationsOrComponents.components ?? []), ...columns], + }); + + return { + ...base, + tableName, + get columns() { + return mapSchemaComponentsOfType( + base.components, + ColumnURNType, + (c) => c.columnName, + ); + }, + get indexes() { + return mapSchemaComponentsOfType( + base.components, + IndexURNType, + (c) => c.indexName, + ); + }, + addColumn: (column: ColumnSchemaComponent) => base.addComponent(column), + addIndex: (index: IndexSchemaComponent) => base.addComponent(index), + }; +}; diff --git a/src/packages/dumbo/src/core/schema/dumboFeature.md b/src/packages/dumbo/src/core/schema/dumboFeature.md new file mode 100644 index 00000000..eaccab5b --- /dev/null +++ b/src/packages/dumbo/src/core/schema/dumboFeature.md @@ -0,0 +1,1606 @@ +read src/packages/dumbo/src/core/schema/MIGRATION_UNIFICATION_PLAN.md first, then tell me how would you recommend to join it with src/packages/pongo/src/core/schema/index.ts where pongo database could be either database or database per database schema. Also how would that fit Architecture as Code defined as: + +export type EmmettRelationshipType = string; + +export type PortRequirements = Record; + +export type EmmettArchModule< +Requires extends PortRequirements | undefined = undefined, +Exposes extends PortRequirements | undefined = undefined, + +> = { +> name: string; +> } & (Requires extends undefined +> ? Exposes extends undefined + + ? { ports?: undefined } // both undefined: ports optional + : { ports: { exposes: Exposes } } // only Exposes defined + +: Exposes extends undefined +? { ports: { requires: Requires } } // only Requires defined +: { ports: { requires: Requires; exposes: Exposes } }); // both defined + +export type AnyEmmettArchModule = EmmettArchModule; + +export interface EmmettRelationship< +Source extends AnyEmmettArchModule = AnyEmmettArchModule, +Target extends AnyEmmettArchModule = AnyEmmettArchModule, + +> { +> source: Source['name']; +> target: Target['name']; +> type: EmmettRelationshipType; +> description?: string; +> } + +export type EmmettRelationshipsMap< +Source extends AnyEmmettArchModule = AnyEmmettArchModule, +Target extends AnyEmmettArchModule = AnyEmmettArchModule, + +> = Record>; + +export type EmmettComponent< +NestedComponents extends +// eslint-disable-next-line @typescript-eslint/no-explicit-any +Record> | undefined = undefined, +Requires extends PortRequirements | undefined = undefined, +Exposes extends PortRequirements | undefined = undefined, + +> = EmmettArchModule & +> (NestedComponents extends undefined + + ? { components?: undefined } + : { components: NestedComponents }); + +export interface EmmettContainer< +T extends Record = Record, + +> extends EmmettArchModule { +> components?: T; +> } + +export interface EmmettSystem< +T extends Record = Record, + +> extends EmmettArchModule { +> containers?: T; +> } + +// export type EmmettComponentsMap> = { +// [K in keyof T]: EmmettComponent< +// T[K] extends EmmettComponent ? U : unknown +// >; +// }; + +export type EmmettContainersMap> = { +[K in keyof T]: EmmettContainer< +T[K] extends EmmettContainer ? U : Record + +> ; +> }; + +export type EmmettSystemsMap> = { +[K in keyof T]: EmmettSystem< +T[K] extends EmmettSystem ? U : Record + +> ; +> }; + +// const emmettComponent = , 'name'>>( +// name: string, +// config?: T, +// ) => { +// return { name, ...config } satisfies EmmettComponent; +// }; + +export type ComponentsOf = T extends { +components: infer M; +} +? M +: undefined; + +export function emmettComponent< +const Config extends { +// eslint-disable-next-line @typescript-eslint/no-explicit-any +components?: Record>; +ports?: { +requires?: PortRequirements; +exposes?: PortRequirements; +}; +}, + +> ( +> name: string, +> config?: Config, +> ): { +> name: string; +> components: Config extends { components: infer C } ? C : undefined; +> ports: Config extends { ports: infer P } ? P : undefined; +> } { +> return { + + name, + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-explicit-any + components: config?.components as any, + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-explicit-any + ports: config?.ports as any, + +}; +} + +// ({ +// name, +// relationships: config?.relationships ? config.relationships : undefined, +// }) satisfies EmmettComponent as unknown as TReturn; + +function emmettContainer>( +name: string, +components?: T, +): EmmettContainer { +return { +name, +components, +}; +} + +const emmettSystem = >( +name: string, +containers?: T, +): EmmettSystem => ({ +name, +containers, +}); + +const emmettRelationship = < +Source extends AnyEmmettArchModule, +Target extends AnyEmmettArchModule, + +> ( +> source: Source, +> type: EmmettRelationshipType, +> target: Target, +> bundle?: ( + + target: Target['ports']['exposes'], + +) => Partial, +description?: string, +): EmmettRelationship => ({ +source: source.name, +target: target.name, +type, +description, +}); + +type ModuleBuilder = { +relationship: ( +target: Target, +type: EmmettRelationshipType, +description?: string, +) => EmmettRelationship; +}; + +export const moduleBuilder = ( +ctx: Source, +): ModuleBuilder => ({ +relationship: ( +target: Target, +type: EmmettRelationshipType, +description?: string, +) => emmettRelationship(ctx, type, target, undefined, description), +}); + +export const emmettArch = { +system: emmettSystem, +container: emmettContainer, +component: emmettComponent, +relationship: emmettRelationship, +}; + +and + +import { emmettArch } from '.'; + +const { component, container, relationship } = emmettArch; + +const query = +() => +(\_input: Input) => +Promise.resolve({} as Output); + +const getGuestByExternalId = (\_externalId: string): Promise => +Promise.resolve(\_externalId); + +const guests = component('guests', { +components: {}, +ports: { +requires: {}, +exposes: { +queries: { +getGuestByExternalId, +}, +}, +}, +}); + +const pricing = component('pricing'); + +const groupReservations = component('group-reservations'); + +const reservations = component('reservations', { +components: { groupReservations }, +ports: { +requires: { +guests: { +getGuestByExternalId: query(), +}, +}, +exposes: {}, +}, +}); + +const reservationsToGuests = relationship( +reservations, +'provides guest information to', +guests, +({ queries: { getGuestByExternalId } }) => ({ +guests: { +getGuestByExternalId, +}, +}), +); + +const hotelManagement = container('hotel-management', { +guests, +reservations, +pricing, +}); + +Where we could define container and pass dumboSchema to define modules inside e.g. postgresqlContainer (in the c4 model sense), + +Also how could I define also event store having multiple tables and functions, e.g. + +import type { SQLiteConnection } from '../../connection'; +import { +globalTag, +messagesTable, +streamsTable, +subscriptionsTable, +} from './typing'; + +export const sql = (sql: string) => sql; + +export const streamsTableSQL = sql( +`CREATE TABLE IF NOT EXISTS ${streamsTable.name}( + stream_id TEXT NOT NULL, + stream_position BIGINT NOT NULL DEFAULT 0, + partition TEXT NOT NULL DEFAULT '${globalTag}', + stream_type TEXT NOT NULL, + stream_metadata JSONB NOT NULL, + is_archived BOOLEAN NOT NULL DEFAULT FALSE, + PRIMARY KEY (stream_id, partition, is_archived), + UNIQUE (stream_id, partition, is_archived) + );`, +); + +export const messagesTableSQL = sql( +`CREATE TABLE IF NOT EXISTS ${messagesTable.name}( + stream_id TEXT NOT NULL, + stream_position BIGINT NOT NULL, + partition TEXT NOT NULL DEFAULT '${globalTag}', + message_kind CHAR(1) NOT NULL DEFAULT 'E', + message_data JSONB NOT NULL, + message_metadata JSONB NOT NULL, + message_schema_version TEXT NOT NULL, + message_type TEXT NOT NULL, + message_id TEXT NOT NULL, + is_archived BOOLEAN NOT NULL DEFAULT FALSE, + global_position INTEGER PRIMARY KEY, + created DATETIME DEFAULT CURRENT_TIMESTAMP, + UNIQUE (stream_id, stream_position, partition, is_archived) + ); +`, +); + +export const subscriptionsTableSQL = sql( +` CREATE TABLE IF NOT EXISTS ${subscriptionsTable.name}( + subscription_id TEXT NOT NULL, + version INTEGER NOT NULL DEFAULT 1, + partition TEXT NOT NULL DEFAULT '${globalTag}', + last_processed_position BIGINT NOT NULL, + PRIMARY KEY (subscription_id, partition, version) + );`, +); + +export const schemaSQL: string[] = [ +streamsTableSQL, +messagesTableSQL, +subscriptionsTableSQL, +]; + +export const createEventStoreSchema = async ( +db: SQLiteConnection, +): Promise => { +for (const sql of schemaSQL) { +await db.command(sql); +} +}; + +export const appendToStreamSQL = rawSql( +`CREATE OR REPLACE FUNCTION emt_append_to_stream( +v_message_ids text[], +v_messages_data jsonb[], +v_messages_metadata jsonb[], +v_message_schema_versions text[], +v_message_types text[], +v_message_kinds text[], +v_stream_id text, +v_stream_type text, +v_expected_stream_position bigint DEFAULT NULL, +v_partition text DEFAULT emt_sanitize_name('default_partition') +) RETURNS TABLE ( +success boolean, +next_stream_position bigint, +global_positions bigint[], +transaction_id xid8 +) LANGUAGE plpgsql +AS $$ +DECLARE +v_next_stream_position bigint; +v_position bigint; +v_updated_rows int; +v_transaction_id xid8; +v_global_positions bigint[]; +BEGIN +v_transaction_id := pg_current_xact_id(); + + IF v_expected_stream_position IS NULL THEN + SELECT COALESCE( + (SELECT stream_position + FROM ${streamsTable.name} + WHERE stream_id = v_stream_id + AND partition = v_partition + AND is_archived = FALSE + LIMIT 1), + 0 + ) INTO v_expected_stream_position; + END IF; + + v_next_stream_position := v_expected_stream_position + array_upper(v_messages_data, 1); + + IF v_expected_stream_position = 0 THEN + INSERT INTO ${streamsTable.name} + (stream_id, stream_position, partition, stream_type, stream_metadata, is_archived) + VALUES + (v_stream_id, v_next_stream_position, v_partition, v_stream_type, '{}', FALSE); + ELSE + UPDATE ${streamsTable.name} as s + SET stream_position = v_next_stream_position + WHERE stream_id = v_stream_id AND stream_position = v_expected_stream_position AND partition = v_partition AND is_archived = FALSE; + + get diagnostics v_updated_rows = row_count; + + IF v_updated_rows = 0 THEN + RETURN QUERY SELECT FALSE, NULL::bigint, NULL::bigint[], NULL::xid8; + RETURN; + END IF; + END IF; + + WITH ev AS ( + SELECT row_number() OVER () + v_expected_stream_position AS stream_position, + message_data, + message_metadata, + schema_version, + message_id, + message_type, + message_kind + FROM ( + SELECT * + FROM + unnest(v_message_ids, v_messages_data, v_messages_metadata, v_message_schema_versions, v_message_types, v_message_kinds) + AS message(message_id, message_data, message_metadata, schema_version, message_type, message_kind) + ) AS message + ), + all_messages_insert AS ( + INSERT INTO ${messagesTable.name} + (stream_id, stream_position, partition, message_data, message_metadata, message_schema_version, message_type, message_kind, message_id, transaction_id) + SELECT + v_stream_id, ev.stream_position, v_partition, ev.message_data, ev.message_metadata, ev.schema_version, ev.message_type, ev.message_kind, ev.message_id, v_transaction_id + FROM ev + RETURNING global_position + ) + SELECT + array_agg(global_position ORDER BY global_position) INTO v_global_positions + FROM + all_messages_insert; + + RETURN QUERY SELECT TRUE, v_next_stream_position, v_global_positions, v_transaction_id; + +END; + +$$ +; +`, +); + +Potentially in different schemas (e.g. event store in one schema, read models using pongo in others. + +How could this be explicit? would we introduce some databaseFeatureSchemaComponent or databaseSchemaFeatureSchemaComponent? + +How to make it explicit that we're adding feature that can have multiple types of components but also be able to locate it in the specific schema or database? + +If I'm not clear enough, please ask for more. Ultrathink +[Show more] +I'll read the migration plan first to understand the context, then analyze how to integrate it with Pongo's schema system and the Architecture as Code concept. +Read MIGRATION_UNIFICATION_PLAN.md, index.ts +Now let me understand the question better by looking at some examples and clarifying a few points. I can see: +Dumbo's migration unification plan - A comprehensive system for managing database schemas with migrations, snapshots, and TypeScript definitions +Pongo's schema system - A higher-level abstraction focused on MongoDB-like collections within databases +Emmett Architecture as Code - A C4-model-inspired system for defining components, containers, and systems with ports and relationships +The user is asking about how to integrate these three concepts, specifically: +How to connect Dumbo's schema components with Pongo's database/collection abstractions +How to fit this into the Emmett Architecture as Code model +How to make it explicit when features span multiple schemas or databases +Dealing with the fact that Pongo can be "database or database per database schema" +Let me search for more context about event stores and how they're currently used: +Globpattern: "**/event*store*schema*.ts" +⎿ +Found 1 file +Grep "eventStoreSchema|event.*store.*component" +⎿ +1 line of output +Grep "databaseFeature|schemaFeature" +⎿ +1 line of output +Let me look for how dumbo schema components are currently structured: +Read dumboSchema.ts +Globpattern: "**/examples/**/*.ts" +⎿ +Found 1 file +Now I have a comprehensive understanding. Let me analyze the integration challenge and provide recommendations. The question involves several complex integration points: +Dumbo schema components - Low-level SQL schema definition with migrations +Pongo schema - MongoDB-like abstraction over SQL databases +Emmett Architecture as Code - C4-model components with ports and relationships +Physical deployment - Different database schemas or databases +Let me formulate my recommendations: +Claude’s Plan +Integration Plan: Dumbo + Pongo + Emmett Architecture as Code +Context Analysis +After reviewing the migration plan and existing code, I see three layers that need integration: +Dumbo Layer: Low-level database schema with SQL migrations (database → schema → table → column) +Pongo Layer: MongoDB-like abstraction (client → db → collection) +Emmett Layer: C4-model architecture components with ports/relationships +Key Challenge: "Pongo can be database OR database-per-schema" +This is the critical mapping issue: +PostgreSQL mode: One database, multiple schemas → Each Pongo DB maps to a PostgreSQL schema +SQLite mode: One database, no schema support → Each Pongo DB is just a logical grouping +Multi-tenant mode: Multiple databases → Each Pongo DB maps to a separate physical database +Recommended Architecture +1. Introduce databaseFeatureComponent +This bridges Dumbo's physical schema and Emmett's logical components: +export type DatabaseFeatureComponent< +NestedComponents extends Record> | undefined = undefined, +Requires extends PortRequirements | undefined = undefined, +Exposes extends PortRequirements | undefined = undefined, +> = EmmettComponent & { +// Physical database mapping +deployment: { + mode: 'database' | 'schema' | 'collection-prefix'; + database?: string; // Physical database name + schema?: string; // PostgreSQL schema name + prefix?: string; // For collection prefixing in shared DB +}; + +// Dumbo schema definition +dumboSchema: DatabaseSchemaComponent | DatabaseSchemaSchemaComponent; + +// Pongo schema definition +pongoSchema: PongoDbSchema; +}; +2. Event Store Example Integration +const eventStoreFeature = emmettArch.component('event-store', { +components: { + streams: emmettArch.component('streams-table'), + messages: emmettArch.component('messages-table'), + subscriptions: emmettArch.component('subscriptions-table'), + appendToStream: emmettArch.component('append-to-stream-function'), +}, +ports: { + exposes: { + commands: { + appendToStream: (/* ... */) => Promise.resolve(), + }, + queries: { + readStream: (/* ... */) => Promise.resolve([]), + }, + }, +}, +deployment: { + mode: 'schema', + database: 'main_db', + schema: 'event_store', +}, +dumboSchema: dumboDatabaseSchema('event_store', { + streams: dumboTable('streams', { /* ... */ }), + messages: dumboTable('messages', { /* ... */ }), + subscriptions: dumboTable('subscriptions', { /* ... */ }), +}), +pongoSchema: pongoSchema.db('event_store', { + streams: pongoSchema.collection('streams'), + messages: pongoSchema.collection('messages'), + subscriptions: pongoSchema.collection('subscriptions'), +}), +}); +3. Multi-Schema Container +For your hotel management example with separate schemas: +const hotelManagementContainer = emmettArch.container('hotel-management', { +// Event store in dedicated schema +eventStore: databaseFeatureComponent('event-store', { + deployment: { mode: 'schema', schema: 'event_store' }, + dumboSchema: eventStoreDumboSchema, + pongoSchema: eventStorePongoSchema, +}), + +// Read models in another schema +readModels: databaseFeatureComponent('read-models', { + deployment: { mode: 'schema', schema: 'read_models' }, + dumboSchema: readModelsDumboSchema, + pongoSchema: readModelsPongoSchema, + components: { + guests: emmettArch.component('guests'), + reservations: emmettArch.component('reservations'), + }, +}), +}); +4. Type-Safe Schema Navigation +Add helper to navigate from Emmett component to its schemas: +export const getComponentSchemas = ( +component: T +) => ({ +dumbo: component.dumboSchema, +pongo: component.pongoSchema, +deployment: component.deployment, + +// Get fully qualified table names +tables: () => { + const { mode, database, schema, prefix } = component.deployment; + // Return table references with correct qualifiers +}, +}); +5. Migration Integration +The orchestrator should understand deployment modes: +export class MultiTenantMigrationOrchestrator { +async deployFeature(feature: DatabaseFeatureComponent) { + const { mode, database, schema } = feature.deployment; + + switch (mode) { + case 'database': + // Deploy to separate database + await this.migrateDatabase(database!, feature.dumboSchema); + break; + case 'schema': + // Deploy to schema within database + await this.migrateSchema(database!, schema!, feature.dumboSchema); + break; + case 'collection-prefix': + // Just prefix collections, no migration needed + break; + } +} +} +Questions for Clarification +Naming: Should we call it databaseFeatureComponent or dataStoreComponent or persistenceComponent? +Nested features: If a feature has multiple schemas (e.g., event_store + projections), should each be a separate component or nested? +Cross-schema relationships: How should we express that read models depend on event store when they're in different schemas? +Collection prefixing: Should we support a "prefix" mode where multiple features share a schema but use prefixed collections? +Function/procedure components: Should stored procedures be first-class components or just part of table components? +Next Steps +Define DatabaseFeatureComponent type with deployment metadata +Extend Emmett arch builder with .databaseFeature() method +Create mapping helpers between Dumbo/Pongo/Emmett schemas +Update migration orchestrator to handle deployment modes +Add examples showing event store + read models in separate schemas +Stayed in plan mode +No, event store schema component should define all on their own. Potentially it can take pongoSchema or dumboSchema for inline projections. Read https://event-driven.io/en/emmett_projections_testing/ + +PongoSchema defines collections like mongo db, not eventstroe read: https://event-driven.io/en/pongo_strongly_typed_client/ + +I'd likesomeone to need to define all dumboSchema, pongo Schema etc. I'd like to be able to either wrap dumbo schema, or be able to pass those features to tell, e.g. like we have tables then including event store. or pongo collection. + +The obvious option is to include dumbodatabase schema into event store and pongoschema and build it with wrappers, but I'd like to avoid it if possible to just compose it as other schemaComponents +Ah, I understand much better now! Let me re-read those articles and rethink this. +Web Fetchhttps://event-driven.io/en/emmett_projections_testing/ +Fetched from https://event-driven.io/en/emmett_projections_testing/ +Web Fetchhttps://event-driven.io/en/pongo_strongly_typed_client/ +Fetched from https://event-driven.io/en/pongo_strongly_typed_client/ +Let me search for event store and projection examples in the codebase: +Grep "inline.*projection|projection.*inline" +⎿ +1 line of output +Grep "eventStore|event-store|EventStore" +⎿ +2 lines of output +Globpattern: "**/emmett**/projection*.ts" +⎿ +Found 1 file +Grep "class.*EventStore|function.*eventStore|const.*eventStore" +⎿ +1 line of output +Globpattern: "**/eventStore*.ts" +⎿ +Found 1 file +Globpattern: "**/event-store*.ts" +⎿ +Found 1 file +Let me look at the schema component structure to understand how components compose: +Read schemaComponent.ts, tableSchemaComponent.ts +If you don't know something then ask, don't hallucinate. I provided you a lot. Event Store is in emmett, not in this repo, that's why I've sent you code samples and articles... +If you need some more definition please ask. +Perfect! Now I understand the pattern. Let me think through what you're asking: Your key insights: +Event store should define its own schema components (tables, functions, etc.) +Pongo schema is for MongoDB-like collections, NOT for event store +You want to compose features like event store AS schema components, not wrap them +You want to avoid having to define dumbo/pongo schemas separately inside each feature +The pattern I see: +TableSchemaComponent extends SchemaComponent with table-specific data +It can be added to a DatabaseSchemaSchemaComponent as a component +You want something similar for "event store" or "pongo collections" +Let me propose a solution based on this understanding: +Proposed Solution: Feature Schema Components +1. EventStoreSchemaComponent (Dumbo side) +export type EventStoreURNType = 'sc:dumbo:event_store'; +export type EventStoreURN = `${EventStoreURNType}:${string}`; + +export type EventStoreSchemaComponent = SchemaComponent< +EventStoreURN, +Readonly<{ + eventStoreName: string; + tables: ReadonlyMap; + functions: ReadonlyMap; +}> +>; + +export const eventStoreSchemaComponent = ({ +eventStoreName, +...options +}: { +eventStoreName: string; +} & SchemaComponentOptions): EventStoreSchemaComponent => { +// Define the event store tables +const streamsTable = tableSchemaComponent({ + tableName: 'streams', + migrations: [/* SQL for streams table */] +}); + +const messagesTable = tableSchemaComponent({ + tableName: 'messages', + migrations: [/* SQL for messages table */] +}); + +const subscriptionsTable = tableSchemaComponent({ + tableName: 'subscriptions', + migrations: [/* SQL for subscriptions table */] +}); + +const appendFunction = functionSchemaComponent({ + functionName: 'emt_append_to_stream', + migrations: [/* SQL for function */] +}); + + const base = schemaComponent( + `sc:dumbo:event_store:${eventStoreName}` as EventStoreURN, + { + migrations: options.migrations ?? [], + components: [ + streamsTable, + messagesTable, + subscriptionsTable, + appendFunction, + ...(options.components ?? []), + ], + } +); + + return { + ...base, + eventStoreName, + get tables() { + return mapSchemaComponentsOfType( + base.components, + 'sc:dumbo:table', + (c) => c.tableName, + ); + }, + get functions() { + return mapSchemaComponentsOfType( + base.components, + 'sc:dumbo:function', + (c) => c.functionName, + ); + }, +}; +}; +2. PongoCollectionSetSchemaComponent (Pongo side) +For Pongo, you'd have a similar component that represents a set of collections: +export type PongoCollectionSetURNType = 'sc:pongo:collection_set'; +export type PongoCollectionSetURN = `${PongoCollectionSetURNType}:${string}`; + +export type PongoCollectionSetSchemaComponent = SchemaComponent< +PongoCollectionSetURN, +Readonly<{ + name: string; + pongoSchema: PongoDbSchema; + collections: ReadonlyMap; +}> +>; + +export const pongoCollectionSetSchemaComponent = ({ +name, +collections, +...options +}: { +name: string; +collections: Record; +} & SchemaComponentOptions): PongoCollectionSetSchemaComponent => { +const pongoDbSchema = pongoSchema.db(collections); + +const base = schemaComponent( + `sc:pongo:collection_set:${name}` as PongoCollectionSetURN, + { + migrations: options.migrations ?? [], + components: options.components ?? [], + } +); + + return { + ...base, + name, + pongoSchema: pongoDbSchema, + get collections() { + // Could map from pongoSchema.collections + return new Map(); + }, +}; +}; +3. Usage: Composing into DatabaseSchema +// Define event store as a schema component +const eventStore = eventStoreSchemaComponent({ +eventStoreName: 'default', +}); + +// Define read models as Pongo collections wrapped in dumbo tables +const readModelsSchema = dumboDatabaseSchema('read_models', { +guests: dumboTable('guests', { + columns: { + id: dumboColumn('id'), + name: dumboColumn('name'), + } +}), +reservations: dumboTable('reservations', { + columns: { + id: dumboColumn('id'), + guestId: dumboColumn('guest_id'), + } +}), +}); + +// Compose them together +const hotelDatabase = dumboDatabase('hotel', { +event_store: dumboDatabaseSchema('event_store', { + // Add event store as components! Tables are automatically included +}).addComponent(eventStore), // <-- Event store adds its tables + +read_models: readModelsSchema, +}); +Wait, that's still awkward. Let me reconsider... +Better Approach: Event Store IS a DatabaseSchemaSchemaComponent +export const eventStoreSchema = ( +schemaName: string = 'event_store', +options?: SchemaComponentOptions +): DatabaseSchemaSchemaComponent => { +return dumboDatabaseSchema(schemaName, { + streams: dumboTable('streams', { + migrations: [sqlMigration('create_streams', [streamsTableSQL])], + }), + messages: dumboTable('messages', { + migrations: [sqlMigration('create_messages', [messagesTableSQL])], + }), + subscriptions: dumboTable('subscriptions', { + migrations: [sqlMigration('create_subscriptions', [subscriptionsTableSQL])], + }), +}, options); +}; + +// Then you can use it directly: +const hotelDatabase = dumboDatabase('hotel', { +event_store: eventStoreSchema('event_store'), +read_models: dumboDatabaseSchema('read_models', { + guests: dumboTable('guests', { /* ... */ }), + reservations: dumboTable('reservations', { /* ... */ }), +}), +}); +But you also want inline projections... Let me think about that: +With Inline Projections +export const eventStoreSchema = ({ +schemaName = 'event_store', +inlineProjections = {}, +...options +}: { +schemaName?: string; +inlineProjections?: Record; +} & SchemaComponentOptions = {}): DatabaseSchemaSchemaComponent => { +const coreEventStoreTables = { + streams: dumboTable('streams', { + migrations: [sqlMigration('create_streams', [streamsTableSQL])], + }), + messages: dumboTable('messages', { + migrations: [sqlMigration('create_messages', [messagesTableSQL])], + }), + subscriptions: dumboTable('subscriptions', { + migrations: [sqlMigration('create_subscriptions', [subscriptionsTableSQL])], + }), +}; + +// Convert inline projections to dumbo tables if needed +const projectionTables: Record = {}; +for (const [name, projection] of Object.entries(inlineProjections)) { + if ('tableName' in projection) { + // It's already a TableSchemaComponent + projectionTables[name] = projection; + } else { + // It's a PongoCollectionSchema, create a table for it + projectionTables[name] = dumboTable(projection.name, {}); + } +} + +return dumboDatabaseSchema(schemaName, { + ...coreEventStoreTables, + ...projectionTables, +}, options); +}; + +// Usage: +const hotelDatabase = dumboDatabase('hotel', { +event_store: eventStoreSchema({ + schemaName: 'event_store', + inlineProjections: { + guest_summary: pongoSchema.collection('guest_summary'), + reservation_summary: dumboTable('reservation_summary', { /* ... */ }), + }, +}), +read_models: dumboDatabaseSchema('read_models', { + guests: dumboTable('guests', { /* ... */ }), +}), +}); +For Emmett Architecture Integration +Now, how does this relate to Emmett components? +const eventStoreComponent = emmettArch.component('event-store', { +ports: { + exposes: { + commands: { + appendToStream: (/* ... */) => Promise.resolve(), + }, + }, +}, +}); + +const guestsComponent = emmettArch.component('guests', { +ports: { + exposes: { + queries: { + getGuestByExternalId: query(), + }, + }, +}, +}); + +// The container ties Emmett components to database schemas +const hotelManagementContainer = emmettArch.container('hotel-management', { +// Emmett components +eventStore: eventStoreComponent, +guests: guestsComponent, +reservations: reservationsComponent, +}); + +// Separately, define the database schema +const hotelDatabaseSchema = dumboDatabase('hotel', { +event_store: eventStoreSchema({ schemaName: 'event_store' }), +read_models: dumboDatabaseSchema('read_models', { + guests: dumboTable('guests', { /* ... */ }), + reservations: dumboTable('reservations', { /* ... */ }), +}), +}); +But how do we link them? +Key Questions: +Should Emmett components contain schema definitions? Or should they be separate? +Should we have a mapping layer that says "event-store component uses event_store schema"? +Do you want deployment flexibility where the same Emmett component can be deployed to different schemas/databases? +For the "explicit location" problem: Do you want something like: +// Query that knows it's in event_store schema +const streams = database.schema('event_store').table('streams'); + +// Or from Emmett component perspective +const component = container.components.eventStore; +const schema = getSchemaFor(component); // Returns event_store schema +Let me ask some clarifying questions to nail down the design: Clarifying Questions: +Separation of concerns: Should Emmett components (business logic) be completely separate from dumbo schemas (persistence), or should they reference each other? +Event store as primitive: Should eventStoreSchema() be a first-class schema component type (like dumboTable), or just a factory function that creates a DatabaseSchemaSchemaComponent? +Inline projections: When you say "event store can take pongoSchema for inline projections", do you mean: +a) Event store schema should accept PongoCollectionSchema and convert them to tables +b) Event store runtime should support Pongo query API on projection tables +c) Both? +Multi-schema deployment: If you have event_store in one schema and read_models in another, how should the Emmett component know which schema to query? Should there be a deployment config that maps components to schemas? +Function/Stored Procedures: Should SQL functions (like emt_append_to_stream) be first-class schema components, or just part of table/schema migrations? +Please clarify these points so I can provide the right solution! +You're absolutely right - I apologize for making assumptions. Let me ask clarifying questions based on what you've actually provided: +Clarifying Questions +Based on the code samples you provided and the migration plan, I need to understand: +1. Event Store Structure +From your SQL example, I can see the event store has: +Tables: streams, messages, subscriptions +Functions: emt_append_to_stream (PostgreSQL stored procedure) +These are defined via raw SQL (streamsTableSQL, messagesTableSQL, etc.) +Question: Should event store be represented as: +Option A: A factory function that returns a DatabaseSchemaSchemaComponent with all tables pre-configured? +const eventStore = eventStoreSchema('event_store'); // Returns DatabaseSchemaSchemaComponent +Option B: A new schema component type (like EventStoreSchemaComponent extends SchemaComponent)? +Option C: Just a collection of table components that you manually compose? +2. Inline Projections +You mentioned "event store can take pongoSchema or dumboSchema for inline projections" Question: What does this mean in practice? +Do inline projections create additional tables in the same schema as the event store? +Should I be able to do something like: +const eventStore = eventStoreSchema('event_store', { +inlineProjections: { + guestSummary: pongoSchema.collection('guest_summary'), + // This creates a 'guest_summary' table in the event_store schema? +} +}); +3. Pongo vs Dumbo Relationship +From pongo/src/core/schema/index.ts: +Pongo schema defines client -> db -> collections (MongoDB-like) +It's a logical abstraction over the physical database +From the migration plan: +Dumbo schema defines database -> schema -> table -> column (SQL) +It's the physical structure with migrations +Question: How should these relate? +Can a single Pongo db map to multiple Dumbo schemas? +If I have event_store in one schema and read_models in another, do I create: +Two separate Pongo dbs? +One Pongo db that somehow spans both schemas? +4. Stored Procedures / Functions +Your emt_append_to_stream example is a PostgreSQL function. Question: Should functions be: +Option A: Part of table migrations (stored in table component)? +Option B: A new FunctionSchemaComponent type (like TableSchemaComponent)? +Option C: Part of schema-level migrations (stored in DatabaseSchemaSchemaComponent)? +5. Emmett Component Integration +You showed Emmett Architecture as Code with components, containers, ports. Question: What's the relationship between: +Emmett component (business logic with ports) +Dumbo DatabaseSchemaSchemaComponent (physical schema) +Pongo PongoDbSchema (logical collections) +Should they: +Option A: Be completely separate (define them independently, link via configuration)? +Option B: Have explicit references (Emmett component has .schema property)? +Option C: Use a mapping/registry pattern? +6. Multiple Schemas in One Database +Your question mentions "pongo database could be either database or database per database schema" Question: Can you explain this with a concrete example? +// Scenario 1: PostgreSQL with multiple schemas +const db = pongoClient.db('hotel'); // Maps to PostgreSQL database 'hotel' +// This database has schemas: event_store, read_models, analytics + +// Scenario 2: SQLite (no schemas) +const db = pongoClient.db('hotel'); // Maps to SQLite database 'hotel' +// No schema concept, all tables in one namespace + +// How should the schema definition work for both? +7. "Explicit Location" Requirement +You said: "How to make it explicit that we're adding feature that can have multiple types of components but also be able to locate it in the specific schema or database?" Question: What does "locate it" mean? +At definition time: "This feature's tables go in schema X"? +At runtime: "Query this component and know which schema it's in"? +At architecture documentation time: "Show which schema each component uses"? +8. Composition Pattern +You said: "I'd like to just compose it as other schemaComponents" Question: What does the ideal composition look like to you? +// Example A: Flat composition +const hotelDb = dumboDatabase('hotel', { +event_store: eventStoreSchema(), +read_models: readModelsSchema(), +}); + +// Example B: Nested composition +const hotelDb = dumboDatabase('hotel') +.addSchema(eventStoreSchema()) +.addSchema(readModelsSchema()); + +// Example C: Component-based +const hotelDb = dumboDatabase('hotel', { +features: { + eventStore: eventStoreFeature, // This internally creates schemas + readModels: readModelsFeature, +} +}); +Please clarify these points and I'll design the right solution based on what you actually need, not assumptions! +Thank you for your patience. Let me ask the essential questions I need to understand your requirements: +Essential Questions +1. Event Store Package Location & API +You mentioned "Event Store is in emmett, not in this repo" and provided SQL samples. Question: In the Emmett package, does the event store: +Export a function/class that creates the schema definition? +Provide just the SQL strings (like your streamsTableSQL, messagesTableSQL examples)? +Provide a complete schema component I can import and use? +What I need to know: If I want to use event store in this codebase, what exactly do I import from Emmett and how do I turn it into a Dumbo schema component? +2. The Core Integration Problem +You want to avoid wrapping dumbo/pongo schemas inside feature definitions, but still compose them. Question: Starting from your event store SQL example with streams, messages, subscriptions tables and the emt_append_to_stream function - if I want to: +Use this as a Dumbo schema component (for migrations) +Also expose it as a Pongo schema (for querying) +Include it in an Emmett architecture component (for documentation/ports) +Deploy it to a specific PostgreSQL schema (e.g., event_store schema) +What should the code look like? Can you show me a pseudocode example of how you envision defining this? +3. Inline Projections Clarification +You said event store "can take pongoSchema or dumboSchema for inline projections" Question: When you define an inline projection: +// Pseudocode - is this the right direction? +eventStore({ +inlineProjections: { + guestSummary: ??? // What goes here? +} +}) +Does guestSummary become a table in the same PostgreSQL schema as streams/messages? +Is it a Pongo collection definition that gets converted to a table? +Is it a Dumbo table definition? +Something else? +4. Physical Deployment Mapping +Regarding "pongo database could be either database or database per database schema": Question: In these two scenarios, how should the schemas be defined? Scenario A - PostgreSQL (one DB, multiple schemas): +-- Physical structure: +-- Database: hotel_db +-- Schema: event_store (tables: streams, messages, subscriptions) +-- Schema: read_models (tables: guests, reservations) +Scenario B - SQLite (one DB, no schema support): +-- Physical structure: +-- Database: hotel.db +-- Tables: streams, messages, subscriptions, guests, reservations +Should the Dumbo/Pongo schema definition be: +The same for both, with deployment config handling the difference? +Different for each database type? +Something else? +5. Function/Stored Procedure Components +Your emt_append_to_stream example is a PostgreSQL function. Question: Should I create a new FunctionSchemaComponent type (similar to TableSchemaComponent), or should functions just be SQL migrations attached to schema/table components? If you want FunctionSchemaComponent, should it be: +const appendFunction = dumboFunction('emt_append_to_stream', { +migrations: [sqlMigration('create_append', [appendToStreamSQL])], +}); + +// Then added to schema +const eventStoreSchema = dumboDatabaseSchema('event_store', { +tables: { streams, messages, subscriptions }, +functions: { appendToStream: appendFunction }, // ??? +}); +6. Emmett Component Connection +Looking at your Emmett Architecture example with emmettArch.component(), emmettArch.container(): Question: Should Emmett components: +Option A: Have no knowledge of Dumbo/Pongo schemas (purely architectural documentation)? +Option B: Reference which schema they use (e.g., eventStoreComponent.schema = eventStoreDumboSchema)? +Option C: Be created FROM schema definitions (e.g., emmettArch.component.fromSchema(eventStoreDumboSchema))? +7. Concrete Use Case +Can you provide a concrete example of what you want to build? For instance: +// I want to define: +// 1. Event store in its own PostgreSQL schema +// 2. Guest read model in another schema +// 3. Both accessible via Pongo API +// 4. Both documented in Emmett architecture +// 5. The event store should have an inline projection for guest summaries + +// What should this code look like? +const ??? = ??? +If you can show me the desired API/syntax you want to write, I can design the implementation to support it. +Please answer whichever questions are relevant to clarify your vision. I want to understand what you're trying to achieve before proposing a solution. +I don't like thaat as some just want to keep event store in the same schema as read model. + +The idea in general is to be able to also run migration together for database that contain both event store and read modls. And yes, by default I don't want user to know internal structure of event store, same for pongo collections. they should be opaque, but in the future give some customisation. +Ah! Now I understand much better. Let me clarify what I'm hearing: +What I Understand Now +Flexibility in deployment: Event store and read models can be in: +Same schema (simpler deployments) +Different schemas (separation of concerns) +User's choice, not forced +Opaque internals: Users shouldn't need to know that event store has streams, messages, subscriptions tables - that's an implementation detail +Run migrations together: Whether event store is in the same schema or different, migrations should run together as one cohesive database setup +Future customization: Later, allow power users to customize event store internals, but not required +Key Question: What Does the API Look Like? +Let me propose what I think you want and you tell me if I'm on the right track: +Scenario 1: Event Store + Read Models in SAME schema +// User just wants everything in 'public' schema +const hotelDb = dumboDatabase('hotel', { +public: dumboDatabaseSchema('public', { + // Event store tables are added transparently + ...eventStoreSchemaComponents(), // Returns { streams: table, messages: table, ... } + + // User's read model tables + guests: dumboTable('guests', { /* ... */ }), + reservations: dumboTable('reservations', { /* ... */ }), +}) +}); +Scenario 2: Event Store + Read Models in DIFFERENT schemas +// User wants separation +const hotelDb = dumboDatabase('hotel', { +event_store: eventStoreSchema('event_store'), // Opaque, returns DatabaseSchemaSchemaComponent + +read_models: dumboDatabaseSchema('read_models', { + guests: dumboTable('guests', { /* ... */ }), + reservations: dumboTable('reservations', { /* ... */ }), +}) +}); +Scenario 3: With Inline Projections +const hotelDb = dumboDatabase('hotel', { +event_store: eventStoreSchema('event_store', { + // Inline projections become tables in the SAME schema as event store + inlineProjections: { + guest_summary: pongoSchema.collection('guest_summary'), // Creates guest_summary table + } +}), + +read_models: dumboDatabaseSchema('read_models', { + guests: dumboTable('guests', { /* ... */ }), +}) +}); +Critical Questions: +Q1: Event Store Schema Component API +Should eventStoreSchema() return a DatabaseSchemaSchemaComponent that contains all the tables internally? +// Option A: Opaque schema component +export const eventStoreSchema = ( +schemaName: string, +options?: { inlineProjections?: Record } +): DatabaseSchemaSchemaComponent => { +// Internally creates streams, messages, subscriptions tables +// User doesn't see them +}; + +// Option B: Spread/merge pattern +export const eventStoreSchemaComponents = (): Record => ({ +streams: dumboTable('streams', { migrations: [streamsSQL] }), +messages: dumboTable('messages', { migrations: [messagesSQL] }), +subscriptions: dumboTable('subscriptions', { migrations: [subscriptionsSQL] }), +}); +Which pattern do you prefer? +Q2: Pongo Schema Relationship +If event store and read models are in the same schema, how does Pongo schema map? +// If both in 'public' schema: +const pongoDb = pongoSchema.db('hotel', { +// Do we expose event store collections? +streams: pongoSchema.collection('streams'), // ??? +messages: pongoSchema.collection('messages'), // ??? + +// Or only read models? +guests: pongoSchema.collection('guests'), +reservations: pongoSchema.collection('reservations'), +}); +Should event store tables be exposed via Pongo, or hidden? +Q3: Migration Composition +When running migrations, should event store migrations: +Be part of the normal migration flow (just more tables)? +Run separately/first (event store setup, then read models)? +Be configurable? +Q4: Stored Functions +The emt_append_to_stream PostgreSQL function - should it be: +// Option A: Part of schema migrations +const eventStoreSchema = (schemaName: string) => +dumboDatabaseSchema(schemaName, { + streams: dumboTable('streams', { /* ... */ }), + messages: dumboTable('messages', { /* ... */ }), +}, { + migrations: [ + sqlMigration('create_functions', [appendToStreamSQL]) + ] +}); + +// Option B: First-class function component +const eventStoreSchema = (schemaName: string) => +dumboDatabaseSchema(schemaName, { + streams: dumboTable('streams', { /* ... */ }), + appendToStream: dumboFunction('emt_append_to_stream', { /* ... */ }), +}); +Which approach? +Please tell me: +Which API scenarios (1, 2, 3 above) match what you want? +Which of the options for each question is closer to your vision? +Any corrections to my understanding? +This will help me propose the right design! +I don't like this ...eventStoreSchemaComponents(), - I'd prefer to just use eventStoreSchema(), which could take also inline projection with references to pongo collection, still, pongo collection could be also define inside schema just like we do for table, and pongoDatabase both as database and schema. + +We should be able to make eventStoreSchema and pongo related shcemas to extend some base feature schema component, and then detect it, right or not? +YES! Now I get it! Let me confirm my understanding: +What You Want +1. Event Store as Opaque Component +// User just uses it, doesn't see internal tables +const hotelDb = dumboDatabase('hotel', { +public: dumboDatabaseSchema('public', { + eventStore: eventStoreSchema(), // NOT spreading, just add it as a component + guests: dumboTable('guests', { /* ... */ }), + reservations: dumboTable('reservations', { /* ... */ }), +}) +}); +2. Feature Schema Component Pattern +// Base type that event store, pongo collections, etc. extend +export type FeatureSchemaComponent = SchemaComponent< +string, +{ + featureType: 'event_store' | 'pongo_collections' | 'custom'; +} +>; + +// Event store extends this +export type EventStoreSchemaComponent = SchemaComponent< +'sc:dumbo:event_store', +{ + featureType: 'event_store'; + eventStoreName: string; + // Internal: tables, functions - user doesn't see + internalTables: Map; +} +>; + +// Pongo collection set extends this +export type PongoCollectionSetSchemaComponent = SchemaComponent< +'sc:dumbo:pongo_collections', +{ + featureType: 'pongo_collections'; + pongoSchema: PongoDbSchema; +} +>; +3. Detection & Composition +// When adding to schema, detect what type it is +dumboDatabaseSchema('public', { +// This is detected as EventStoreSchemaComponent +eventStore: eventStoreSchema(), + +// This is detected as TableSchemaComponent +guests: dumboTable('guests', { /* ... */ }), + +// This is detected as PongoCollectionSetSchemaComponent +readModels: pongoCollectionsSchema({ + guests: pongoSchema.collection('guests'), + reservations: pongoSchema.collection('reservations'), +}), +}); +4. Inline Projections with Pongo Collections +const hotelDb = dumboDatabase('hotel', { +event_store: dumboDatabaseSchema('event_store', { + // Event store with inline projections + eventStore: eventStoreSchema({ + inlineProjections: { + // Reference to pongo collection - creates table automatically + guestSummary: pongoSchema.collection('guest_summary'), + } + }), +}), + +read_models: dumboDatabaseSchema('read_models', { + // Or define pongo collections directly in schema + readModels: pongoCollectionsSchema({ + guests: pongoSchema.collection('guests'), + reservations: pongoSchema.collection('reservations'), + }), +}), +}); +Key Questions to Confirm: +Q1: Feature Schema Component Base +Should there be a common base or marker for "feature components" that: +Contain multiple internal components (tables, functions, etc.) +Can be detected via isSchemaComponentOfType(comp, 'feature') or similar +Are opaque (internal structure hidden from user) +export const isFeatureSchemaComponent = (comp: AnySchemaComponent): boolean => { +return comp.schemaComponentKey.startsWith('sc:dumbo:feature:'); +}; +Q2: Pongo Collections as Schema Component +Should pongoCollectionsSchema() create tables automatically? +// This creates: +// - A schema component +// - Dumbo tables for each collection (for migrations) +// - Pongo schema for querying +const readModels = pongoCollectionsSchema({ +guests: pongoSchema.collection('guests'), +reservations: pongoSchema.collection('reservations'), +}); + +// Internally generates: +// - dumboTable('guests', {}) +// - dumboTable('reservations', {}) +// - pongoSchema.db({ guests, reservations }) +Is this correct? +Q3: Same Schema Flexibility +All of these should work, right? +// A: Everything in one schema +dumboDatabase('hotel', { +public: dumboDatabaseSchema('public', { + eventStore: eventStoreSchema(), + guests: dumboTable('guests', {}), +}) +}); + +// B: Separated schemas +dumboDatabase('hotel', { +event_store: dumboDatabaseSchema('event_store', { + eventStore: eventStoreSchema(), +}), +read_models: dumboDatabaseSchema('read_models', { + guests: dumboTable('guests', {}), +}) +}); + +// C: Event store + pongo collections in same schema +dumboDatabase('hotel', { +public: dumboDatabaseSchema('public', { + eventStore: eventStoreSchema(), + readModels: pongoCollectionsSchema({ + guests: pongoSchema.collection('guests'), + }), +}) +}); +Q4: Detection in Migration Orchestrator +When collecting migrations, detect feature components: +// In migration orchestrator +for (const component of schema.components.values()) { +if (isEventStoreComponent(component)) { + // Handle event store migrations specially if needed +} else if (isPongoCollectionsComponent(component)) { + // Generate table migrations from pongo collections +} else if (isTableComponent(component)) { + // Standard table migration +} +} +Is this the right approach? +Am I understanding correctly now? The key insight is: +Feature components (event store, pongo collections) are first-class schema components +They compose like tables do +They're detected by type checking +They encapsulate internal structure +Should I proceed with this design? +For user feature should be opaque, they don't need to know how sausages are made, still, they should be visible later in generated typed system and added to the dumbo schema definition so they're strongly typed and also that migrations works fine. + +Ad q3. + +A looks weird as it suggests that eventStore is table, which is not. +For user feature should be opaque, they don't need to know how sausages are made, still, they should be visible later in generated typed system and added to the dumbo schema definition so they're strongly typed and also that migrations works fine. + +Ad q3. + +A looks weird as it suggests that eventStore is table, which is not. +Perfect! This clarifies a lot. Let me refine my understanding: +Key Insights +1. Feature Components are NOT Tables +You're right - in scenario A, putting eventStore: eventStoreSchema() alongside tables is confusing because it's not a table. Better API: +// Option A-revised: Feature at schema level +const hotelDb = dumboDatabase('hotel', { +public: dumboDatabaseSchema('public', { + features: { + eventStore: eventStoreSchema(), + }, + tables: { + guests: dumboTable('guests', {}), + reservations: dumboTable('reservations', {}), + } +}) +}); + +// OR Option A-alternative: Feature creates its own implicit schema content +const hotelDb = dumboDatabase('hotel', { +public: dumboDatabaseSchema('public') + .withFeature(eventStoreSchema()) + .withTable(dumboTable('guests', {})) +}); + +// OR Option A-simpler: Features and tables mixed but clearly typed +const hotelDb = dumboDatabase('hotel', { +public: dumboDatabaseSchema('public', { + eventStore: eventStoreSchema(), // Type system knows this is FeatureSchemaComponent + guests: dumboTable('guests', {}), // Type system knows this is TableSchemaComponent +}) +}); +Question: Which API feels right to you? I'm leaning toward the last one (A-simpler) because: +Type system prevents confusion +Simpler API +Components compose uniformly +2. Strongly Typed Generated System +You want the generated types to expose the feature's tables: +// After running schema generation from: +const hotelDb = dumboDatabase('hotel', { +public: dumboDatabaseSchema('public', { + eventStore: eventStoreSchema(), + guests: dumboTable('guests', {}), +}) +}); + +// Generated types should include: +export type HotelSchema = { +public: { + // Event store tables exposed (even though user didn't define them) + streams: { stream_id: string, stream_position: number, ... }, + messages: { stream_id: string, message_data: unknown, ... }, + subscriptions: { subscription_id: string, ... }, + + // User's tables + guests: { id: string, name: string, ... }, +} +}; +Question: Should the generated types expose event store internals, or keep them hidden? +3. Pongo Schema Generation +For Pongo collections defined via pongoCollectionsSchema(): +const hotelDb = dumboDatabase('hotel', { +read_models: dumboDatabaseSchema('read_models', { + collections: pongoCollectionsSchema({ + guests: pongoSchema.collection('guests'), + reservations: pongoSchema.collection('reservations'), + }), +}) +}); + +// Should generate: +// 1. Dumbo tables: guests, reservations +// 2. Pongo schema: PongoDbSchema with those collections +// 3. Migrations: CREATE TABLE for each collection +Proposed Architecture +Feature Schema Component Base +// Marker interface +export type FeatureSchemaComponent< +URN extends string = string, +AdditionalData extends Record = Record +> = SchemaComponent; // Hidden from user +}>; + +// Type guard +export const isFeatureSchemaComponent = ( +comp: AnySchemaComponent +): comp is FeatureSchemaComponent => { +return '__featureMarker' in comp && comp.__featureMarker === true; +}; +Event Store Schema Component +export type EventStoreSchemaComponent = FeatureSchemaComponent< +'sc:dumbo:feature:event_store', +{ + eventStoreName?: string; + inlineProjections?: Record; +} +>; + +export const eventStoreSchema = ( +options?: { + inlineProjections?: Record; +} +): EventStoreSchemaComponent => { +// Create internal tables +const streams = dumboTable('streams', { + migrations: [sqlMigration('create_streams', [streamsTableSQL])] +}); +const messages = dumboTable('messages', { + migrations: [sqlMigration('create_messages', [messagesTableSQL])] +}); +const subscriptions = dumboTable('subscriptions', { + migrations: [sqlMigration('create_subscriptions', [subscriptionsTableSQL])] +}); + +// Handle inline projections +const projectionTables = Object.entries(options?.inlineProjections ?? {}).map( + ([name, collectionSchema]) => dumboTable(collectionSchema.name, {}) +); + +const allComponents = [streams, messages, subscriptions, ...projectionTables]; + +const base = schemaComponent('sc:dumbo:feature:event_store', { + components: allComponents, +}); + +return { + ...base, + __featureMarker: true as const, + eventStoreName: 'event_store', + inlineProjections: options?.inlineProjections, + internalComponents: new Map(allComponents.map(c => [c.schemaComponentKey, c])), +}; +}; +Pongo Collections Schema Component +export type PongoCollectionsSchemaComponent = FeatureSchemaComponent< +'sc:dumbo:feature:pongo_collections', +{ + pongoSchema: PongoDbSchema; +} +>; + +export const pongoCollectionsSchema = ( +collections: Record +): PongoCollectionsSchemaComponent => { +// Create dumbo table for each pongo collection +const tables = Object.entries(collections).map(([_name, collectionSchema]) => + dumboTable(collectionSchema.name, { + // Auto-generate basic table structure for Pongo + // Could add _id, document JSONB columns, etc. + }) +); + +const base = schemaComponent('sc:dumbo:feature:pongo_collections', { + components: tables, +}); + +return { + ...base, + __featureMarker: true as const, + pongoSchema: pongoSchema.db(collections), + internalComponents: new Map(tables.map(t => [t.schemaComponentKey, t])), +}; +}; +Migration Collection +// In migration orchestrator +export const collectAllMigrations = ( +database: DatabaseSchemaComponent, +): SQLMigration[] => { +const migrations: SQLMigration[] = []; + +for (const schema of database.schemas.values()) { + for (const component of schema.components.values()) { + if (isFeatureSchemaComponent(component)) { + // Feature component: extract internal components + for (const internalComp of component.internalComponents.values()) { + if (isTableComponent(internalComp)) { + migrations.push(...internalComp.migrations); + } + } + } else if (isTableComponent(component)) { + // Regular table + migrations.push(...component.migrations); + } + } +} + +return migrations; +}; +Final Clarifying Questions +Q1: Component Placement +Which feels most natural? +// A: Mixed (type system differentiates) +dumboDatabaseSchema('public', { +eventStore: eventStoreSchema(), +guests: dumboTable('guests', {}), +}) + +// B: Separated +dumboDatabaseSchema('public', { +features: { eventStore: eventStoreSchema() }, +tables: { guests: dumboTable('guests', {}) }, +}) + +// C: Builder pattern +dumboDatabaseSchema('public') +.withFeature(eventStoreSchema()) +.withTable(dumboTable('guests', {})) +Q2: Generated Types Visibility +Should generated types expose event store internals? +// Option A: Expose everything +type Schema = { +public: { + streams: { ... }, // From event store + messages: { ... }, // From event store + guests: { ... }, // From user +} +} + +// Option B: Keep features opaque +type Schema = { +public: { + guests: { ... }, // Only user-defined tables +} +} +Q3: Pongo Collections Table Structure +When pongoCollectionsSchema() creates tables, what should the table structure be? +// Option A: Document store (MongoDB-like) +CREATE TABLE guests ( +_id TEXT PRIMARY KEY, +document JSONB NOT NULL +) + +// Option B: Let user define structure +pongoCollectionsSchema({ +guests: pongoSchema.collection('guests', { + tableDefinition: dumboTable('guests', { + columns: { id, name, email, ... } + }) +}) +}) + +// Option C: Infer from Pongo usage later +Please confirm these choices and I'll build the complete solution! +$$ diff --git a/src/packages/dumbo/src/core/schema/dumboSchema/dumboSchema.ts b/src/packages/dumbo/src/core/schema/dumboSchema/dumboSchema.ts new file mode 100644 index 00000000..642a2153 --- /dev/null +++ b/src/packages/dumbo/src/core/schema/dumboSchema/dumboSchema.ts @@ -0,0 +1,203 @@ +import type { SQLColumnToken, SQLToken } from '../../sql'; +import { + columnSchemaComponent, + type ColumnSchemaComponent, + databaseSchemaComponent, + type DatabaseSchemaComponent, + databaseSchemaSchemaComponent, + type DatabaseSchemaSchemaComponent, + indexSchemaComponent, + type IndexSchemaComponent, + tableSchemaComponent, + type TableSchemaComponent, +} from '../components'; +import { + type AnySchemaComponent, + isSchemaComponentOfType, + type SchemaComponentOptions, +} from '../schemaComponent'; + +const DEFAULT_DATABASE_NAME = '__default_database__'; +const DEFAULT_DATABASE_SCHEMA_NAME = '__default_database_schema__'; + +const dumboColumn = ( + name: string, + type: string | SQLToken, + options: SchemaComponentOptions & + Omit = {}, +): ColumnSchemaComponent => + columnSchemaComponent({ + columnName: name, + type, + ...options, + }); + +const dumboIndex = ( + name: string, + columnNames: string[], + options?: { unique?: boolean } & SchemaComponentOptions, +): IndexSchemaComponent => + indexSchemaComponent({ + indexName: name, + columnNames, + isUnique: options?.unique ?? false, + ...options, + }); + +const dumboTable = ( + name: string, + definition: { + columns?: Record; + indexes?: Record; + } & SchemaComponentOptions, +): TableSchemaComponent => { + const { columns, indexes, ...options } = definition; + + const components = [ + ...(columns ? Object.values(columns) : []), + ...(indexes ? Object.values(indexes) : []), + ]; + + return tableSchemaComponent({ + tableName: name, + components, + ...options, + }); +}; + +function dumboDatabaseSchema( + tables: Record, +): DatabaseSchemaSchemaComponent; +function dumboDatabaseSchema( + schemaName: string, + tables: Record, + options?: SchemaComponentOptions, +): DatabaseSchemaSchemaComponent; +function dumboDatabaseSchema( + nameOrTables: string | Record, + tables?: Record, + options?: SchemaComponentOptions, +): DatabaseSchemaSchemaComponent { + const schemaName = + typeof nameOrTables === 'string' + ? nameOrTables + : DEFAULT_DATABASE_SCHEMA_NAME; + const components = Object.values( + (typeof nameOrTables === 'string' ? tables : nameOrTables) ?? {}, + ); + + return databaseSchemaSchemaComponent({ + schemaName, + components, + ...options, + }); +} + +dumboDatabaseSchema.from = ( + schemaName: string | undefined, + tableNames: string[], +): DatabaseSchemaSchemaComponent => { + const tables = tableNames.reduce( + (acc, tableName) => { + acc[tableName] = dumboTable(tableName, {}); + return acc; + }, + {} as Record, + ); + + return schemaName + ? dumboDatabaseSchema(schemaName, tables) + : dumboDatabaseSchema(tables); +}; + +function dumboDatabase( + schemas: Record, +): DatabaseSchemaComponent; +function dumboDatabase( + schema: DatabaseSchemaSchemaComponent, +): DatabaseSchemaComponent; +function dumboDatabase( + databaseName: string, + schemas: Record, + options?: SchemaComponentOptions, +): DatabaseSchemaComponent; +function dumboDatabase( + databaseName: string, + schema: DatabaseSchemaSchemaComponent, + options?: SchemaComponentOptions, +): DatabaseSchemaComponent; +function dumboDatabase( + nameOrSchemas: + | string + | DatabaseSchemaSchemaComponent + | Record, + schemasOrOptions?: + | DatabaseSchemaSchemaComponent + | Record + | SchemaComponentOptions, + options?: SchemaComponentOptions, +): DatabaseSchemaComponent { + const databaseName = + typeof nameOrSchemas === 'string' ? nameOrSchemas : DEFAULT_DATABASE_NAME; + + const schemasOrSchema = + typeof nameOrSchemas === 'string' + ? (schemasOrOptions ?? {}) + : nameOrSchemas; + const schemaMap: Record = + 'schemaComponentKey' in schemasOrSchema && + isSchemaComponentOfType( + schemasOrSchema as AnySchemaComponent, + 'sc:dumbo:database_schema', + ) + ? { + [DEFAULT_DATABASE_SCHEMA_NAME]: + schemasOrSchema as DatabaseSchemaSchemaComponent, + } + : (schemasOrSchema as Record); + + const dbOptions: typeof options = + typeof nameOrSchemas === 'string' + ? options + : (schemasOrOptions as typeof options); + + const schemaComponents: DatabaseSchemaSchemaComponent[] = []; + + for (const [_, schemaComponent] of Object.entries(schemaMap)) { + schemaComponents.push(schemaComponent); + } + + return databaseSchemaComponent({ + databaseName, + components: schemaComponents, + ...dbOptions, + }); +} + +dumboDatabase.from = ( + databaseName: string | undefined, + schemaNames: string[], +): DatabaseSchemaComponent => { + const schemas = schemaNames.reduce( + (acc, schemaName) => { + acc[schemaName] = dumboDatabaseSchema(schemaName, {}); + return acc; + }, + {} as Record, + ); + + return databaseName + ? dumboDatabase(databaseName, schemas) + : dumboDatabase(schemas); +}; + +dumboDatabase.defaultName = DEFAULT_DATABASE_NAME; +dumboDatabaseSchema.defaultName = DEFAULT_DATABASE_SCHEMA_NAME; + +export const dumboSchema = { + database: dumboDatabase, + schema: dumboDatabaseSchema, + table: dumboTable, + column: dumboColumn, + index: dumboIndex, +}; diff --git a/src/packages/dumbo/src/core/schema/dumboSchema/dumboSchema.unit.spec.ts b/src/packages/dumbo/src/core/schema/dumboSchema/dumboSchema.unit.spec.ts new file mode 100644 index 00000000..04a918fc --- /dev/null +++ b/src/packages/dumbo/src/core/schema/dumboSchema/dumboSchema.unit.spec.ts @@ -0,0 +1,178 @@ +import assert from 'node:assert'; +import { describe, it } from 'node:test'; +import { SQL } from '../../sql'; +import { dumboSchema } from './index'; + +const { database, schema, table, column, index } = dumboSchema; +const { Varchar } = SQL.column.type; + +void describe('dumboSchema', () => { + void it('should create a column', () => { + const col = column('id', Varchar('max')); + assert.strictEqual(col.columnName, 'id'); + }); + + void it('should create an index', () => { + const idx = index('idx_email', ['email']); + assert.strictEqual(idx.indexName, 'idx_email'); + assert.strictEqual(idx.isUnique, false); + }); + + void it('should create a unique index', () => { + const idx = index('idx_email', ['email'], { unique: true }); + assert.strictEqual(idx.indexName, 'idx_email'); + assert.strictEqual(idx.isUnique, true); + }); + + void it('should create a table with columns and indexes', () => { + const tbl = table('users', { + columns: { + id: column('id', Varchar('max')), + email: column('email', Varchar('max')), + }, + indexes: { + idx_email: index('idx_email', ['email']), + }, + }); + + assert.strictEqual(tbl.tableName, 'users'); + assert.strictEqual(tbl.columns.size, 2); + assert.strictEqual(tbl.indexes.size, 1); + assert.ok(tbl.columns.has('id')); + assert.ok(tbl.columns.has('email')); + assert.ok(tbl.indexes.has('idx_email')); + }); + + void it('should create a named schema', () => { + const sch = schema('public', { + users: table('users', { + columns: { + id: column('id', Varchar('max')), + }, + }), + }); + + assert.strictEqual(sch.schemaName, 'public'); + assert.strictEqual(sch.tables.size, 1); + assert.ok(sch.tables.has('users')); + }); + + void it('should create a default schema without name', () => { + const sch = schema({ + users: table('users', { + columns: { + id: column('id', Varchar('max')), + }, + }), + }); + + assert.strictEqual(sch.schemaName, dumboSchema.schema.defaultName); + assert.strictEqual(sch.tables.size, 1); + }); + + void it('should create a default database', () => { + const db = database({ + public: schema('public', { + users: table('users', { + columns: { + id: column('id', Varchar('max')), + }, + }), + }), + }); + + assert.strictEqual(db.databaseName, dumboSchema.database.defaultName); + assert.strictEqual(db.schemas.size, 1); + assert.ok(db.schemas.has('public')); + }); + + void it('should create a named database', () => { + const db = database('myapp', { + public: schema('public', { + users: table('users', { + columns: { + id: column('id', Varchar('max')), + }, + }), + }), + }); + + assert.strictEqual(db.databaseName, 'myapp'); + assert.strictEqual(db.schemas.size, 1); + assert.ok(db.schemas.has('public')); + }); + + void it('should handle DEFAULT_SCHEMA', () => { + const db = database( + 'myapp', + schema({ + users: table('users', { + columns: { + id: column('id', Varchar('max')), + }, + }), + }), + ); + + assert.strictEqual(db.databaseName, 'myapp'); + assert.strictEqual(db.schemas.size, 1); + assert.ok(db.schemas.has(dumboSchema.schema.defaultName)); + }); + + void it('should create schema from table names', () => { + const sch = schema.from('public', ['users', 'posts']); + assert.strictEqual(sch.schemaName, 'public'); + assert.strictEqual(sch.tables.size, 2); + }); + + void it('should create database from schema names', () => { + const db = database.from('myapp', ['public', 'analytics']); + assert.strictEqual(db.databaseName, 'myapp'); + assert.strictEqual(db.schemas.size, 2); + }); +}); + +// Samples + +// Simple database with tables in default schema + +const users = table('users', { + columns: { + id: column('id', Varchar('max')), + email: column('email', Varchar('max')), + name: column('name', Varchar('max')), + }, +}); + +export const simpleDb = database( + 'myapp', + schema({ + users, + }), +); + +// Database with multiple schemas +const multiSchemaDb = database('myapp', { + public: schema('public', { + users: table('users', { + columns: { + id: column('id', Varchar('max')), + email: column('email', Varchar('max')), + }, + }), + }), + analytics: schema('analytics', { + events: table('events', { + columns: { + id: column('id', Varchar('max')), + userId: column('user_id', Varchar('max')), + timestamp: column('timestamp', Varchar('max')), + }, + }), + }), +}); + +// Access using name-based maps +const publicSchema = multiSchemaDb.schemas.get('public'); +const usersTable = publicSchema?.tables.get('users'); +export const emailColumn = usersTable?.columns.get('email'); diff --git a/src/packages/dumbo/src/core/schema/dumboSchema/index.ts b/src/packages/dumbo/src/core/schema/dumboSchema/index.ts new file mode 100644 index 00000000..5ad75ea3 --- /dev/null +++ b/src/packages/dumbo/src/core/schema/dumboSchema/index.ts @@ -0,0 +1 @@ +export * from './dumboSchema'; diff --git a/src/packages/dumbo/src/core/schema/index.ts b/src/packages/dumbo/src/core/schema/index.ts index 582a76eb..31539d0b 100644 --- a/src/packages/dumbo/src/core/schema/index.ts +++ b/src/packages/dumbo/src/core/schema/index.ts @@ -1,2 +1,5 @@ -export * from './migrations'; +export * from './components'; +export * from './dumboSchema'; +export * from './migrators'; export * from './schemaComponent'; +export * from './sqlMigration'; diff --git a/src/packages/dumbo/src/core/schema/migrators/index.ts b/src/packages/dumbo/src/core/schema/migrators/index.ts new file mode 100644 index 00000000..58982d62 --- /dev/null +++ b/src/packages/dumbo/src/core/schema/migrators/index.ts @@ -0,0 +1,2 @@ +export * from './migrator'; +export * from './schemaComponentMigrator'; diff --git a/src/packages/dumbo/src/core/schema/migrations.ts b/src/packages/dumbo/src/core/schema/migrators/migrator.ts similarity index 79% rename from src/packages/dumbo/src/core/schema/migrations.ts rename to src/packages/dumbo/src/core/schema/migrators/migrator.ts index 2286e00a..69c8f451 100644 --- a/src/packages/dumbo/src/core/schema/migrations.ts +++ b/src/packages/dumbo/src/core/schema/migrators/migrator.ts @@ -1,56 +1,20 @@ -import type { Dumbo } from '..'; -import { type DatabaseType, fromDatabaseDriverType } from '../drivers'; -import type { SQLExecutor } from '../execute'; +import { type Dumbo } from '../..'; +import { type DatabaseType, fromDatabaseDriverType } from '../../drivers'; +import type { SQLExecutor } from '../../execute'; import { type DatabaseLock, type DatabaseLockOptions, NoDatabaseLock, -} from '../locks'; -import { mapToCamelCase, singleOrNull } from '../query'; -import { getFormatter, SQL, type SQLFormatter } from '../sql'; -import { tracer } from '../tracing'; -import { schemaComponent, type SchemaComponent } from './schemaComponent'; +} from '../../locks'; +import { mapToCamelCase, singleOrNull } from '../../query'; +import { SQL, SQLFormatter, getFormatter } from '../../sql'; +import { tracer } from '../../tracing'; +import { type SchemaComponent } from '../schemaComponent'; +import type { MigrationRecord, SQLMigration } from '../sqlMigration'; +import { migrationTableSchemaComponent } from './schemaComponentMigrator'; -export type MigrationStyle = 'None' | 'CreateOrUpdate'; - -export type SQLMigration = { - name: string; - sqls: SQL[]; -}; - -export const sqlMigration = (name: string, sqls: SQL[]): SQLMigration => ({ - name, - sqls, -}); - -export type MigrationRecord = { - id: number; - name: string; - application: string; - sqlHash: string; - timestamp: Date; -}; export const MIGRATIONS_LOCK_ID = 999956789; -const { AutoIncrement, Varchar, Timestamp } = SQL.column.type; - -const migrationTableSQL = SQL` - CREATE TABLE IF NOT EXISTS migrations ( - id ${AutoIncrement({ primaryKey: true })}, - name ${Varchar(255)} NOT NULL UNIQUE, - application ${Varchar(255)} NOT NULL DEFAULT 'default', - sql_hash ${Varchar(64)} NOT NULL, - timestamp ${Timestamp} NOT NULL DEFAULT CURRENT_TIMESTAMP - ); -`; - -export const migrationTableSchemaComponent = schemaComponent( - 'dumbo:schema-component:migrations-table', - { - migrations: [sqlMigration('dumbo:migrationTable:001', [migrationTableSQL])], - }, -); - declare global { var defaultMigratorOptions: Record; } diff --git a/src/packages/dumbo/src/core/schema/migrators/schemaComponentMigrator.ts b/src/packages/dumbo/src/core/schema/migrators/schemaComponentMigrator.ts new file mode 100644 index 00000000..1ae2db4a --- /dev/null +++ b/src/packages/dumbo/src/core/schema/migrators/schemaComponentMigrator.ts @@ -0,0 +1,59 @@ +import type { Dumbo } from '../..'; +import type { DatabaseDriverType } from '../../drivers'; +import { SQL } from '../../sql'; +import { schemaComponent, type SchemaComponent } from '../schemaComponent'; +import { sqlMigration } from '../sqlMigration'; +import { type MigratorOptions, runSQLMigrations } from './migrator'; + +const { AutoIncrement, Varchar, Timestamp } = SQL.column.type; + +const migrationTableSQL = SQL` + CREATE TABLE IF NOT EXISTS migrations ( + id ${AutoIncrement({ primaryKey: true })}, + name ${Varchar(255)} NOT NULL UNIQUE, + application ${Varchar(255)} NOT NULL DEFAULT 'default', + sql_hash ${Varchar(64)} NOT NULL, + timestamp ${Timestamp} NOT NULL DEFAULT CURRENT_TIMESTAMP + ); +`; + +export const migrationTableSchemaComponent = schemaComponent( + 'dumbo:schema-component:migrations-table', + { + migrations: [sqlMigration('dumbo:migrationTable:001', [migrationTableSQL])], + }, +); + +export type SchemaComponentMigrator = { + component: SchemaComponent; + run: (options?: Partial) => Promise; +}; + +export const SchemaComponentMigrator = ( + component: SchemaComponent, + dumbo: Dumbo, +): SchemaComponentMigrator => { + const completedMigrations: string[] = []; + + return { + component, + run: async (options) => { + const pendingMigrations = component.migrations.filter( + (m) => + !completedMigrations.includes( + `${component.schemaComponentKey}:${m.name}`, + ), + ); + + if (pendingMigrations.length === 0) return; + + await runSQLMigrations(dumbo, pendingMigrations, options); + + completedMigrations.push( + ...pendingMigrations.map( + (m) => `${component.schemaComponentKey}:${m.name}`, + ), + ); + }, + }; +}; diff --git a/src/packages/dumbo/src/core/schema/schemaComponent.ts b/src/packages/dumbo/src/core/schema/schemaComponent.ts index bebf5768..78d4b713 100644 --- a/src/packages/dumbo/src/core/schema/schemaComponent.ts +++ b/src/packages/dumbo/src/core/schema/schemaComponent.ts @@ -1,37 +1,145 @@ -import { type SQLMigration } from './migrations'; +import { type SQLMigration } from './sqlMigration'; -export type SchemaComponent = { - schemaComponentType: ComponentType; - components: ReadonlyArray; +export type SchemaComponent< + ComponentKey extends string = string, + AdditionalData extends + | Exclude< + Record, + | 'schemaComponentKey' + | 'components' + | 'migrations' + | 'addComponent' + | 'addMigration' + > + | undefined = undefined, +> = { + schemaComponentKey: ComponentKey; + components: ReadonlyMap; migrations: ReadonlyArray; -}; -export type SchemaComponentOptions = - | { - migrations: ReadonlyArray; - components?: never; - } - | { - migrations: ReadonlyArray; - components: ReadonlyArray; - } - | { - migrations?: never; - components: ReadonlyArray; - }; - -export const schemaComponent = ( - type: ComponentType, - migrationsOrComponents: SchemaComponentOptions, -): SchemaComponent => { - const components = migrationsOrComponents.components ?? []; - const migrations = migrationsOrComponents.migrations ?? []; + addComponent: < + SchemaComponentType extends SchemaComponent< + string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + Record + > = SchemaComponent< + string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + Record + >, + >( + component: SchemaComponentType, + ) => SchemaComponentType; + addMigration: (migration: SQLMigration) => void; +} & Exclude< + // eslint-disable-next-line @typescript-eslint/no-empty-object-type + AdditionalData extends undefined ? {} : AdditionalData, + | 'schemaComponentKey' + | 'components' + | 'migrations' + | 'addComponent' + | 'addMigration' +>; + +export type ExtractAdditionalData = + T extends SchemaComponent ? Data : never; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type AnySchemaComponent = SchemaComponent>; + +export type AnySchemaComponentOfType = + // eslint-disable-next-line @typescript-eslint/no-explicit-any + SchemaComponent; + +export type SchemaComponentOptions< + AdditionalOptions extends Record = Record, +> = { + migrations?: ReadonlyArray; + components?: ReadonlyArray; +} & Omit; + +export type SchemaComponentType = `sc:${Kind}`; + +export type DumboSchemaComponentType = + SchemaComponentType<`dumbo:${Kind}`>; + +export const schemaComponent = ( + key: ComponentKey, + options: SchemaComponentOptions, +): SchemaComponent => { + const componentsMap = new Map( + options.components?.map((comp) => [comp.schemaComponentKey, comp]), + ); + + const migrations: SQLMigration[] = [...(options.migrations ?? [])]; return { - schemaComponentType: type, - components, + schemaComponentKey: key, + components: componentsMap, get migrations(): SQLMigration[] { - return [...migrations, ...components.flatMap((c) => c.migrations)]; + return [ + ...migrations, + ...Array.from(componentsMap.values()).flatMap((c) => c.migrations), + ]; + }, + addComponent: < + SchemaComponentType extends AnySchemaComponent = AnySchemaComponent, + >( + component: SchemaComponentType, + ): SchemaComponentType => { + componentsMap.set(component.schemaComponentKey, component); + migrations.push(...component.migrations); + return component; }, + addMigration: (migration: SQLMigration) => { + migrations.push(migration); + }, + }; +}; + +export const isSchemaComponentOfType = < + SchemaComponentOfType extends AnySchemaComponent = AnySchemaComponent, +>( + component: AnySchemaComponent, + prefix: string, +): component is SchemaComponentOfType => + component.schemaComponentKey.startsWith(prefix); + +export const filterSchemaComponentsOfType = ( + components: ReadonlyMap, + prefix: string, +): ReadonlyMap => mapSchemaComponentsOfType(components, prefix); + +export const mapSchemaComponentsOfType = ( + components: ReadonlyMap, + prefix: string, + keyMapper?: (component: T) => string, +): ReadonlyMap => + new Map( + Array.from(components.entries()) + .filter(([urn]) => urn.startsWith(prefix)) + .map(([urn, component]) => [ + keyMapper ? keyMapper(component as T) : urn, + component as T, + ]), + ); + +export const findSchemaComponentsOfType = ( + root: AnySchemaComponent, + prefix: string, +): T[] => { + const results: T[] = []; + + const traverse = (component: AnySchemaComponent) => { + if (component.schemaComponentKey.startsWith(prefix)) { + results.push(component as T); + } + for (const child of component.components.values()) { + traverse(child); + } }; + + traverse(root); + + return results; }; diff --git a/src/packages/dumbo/src/core/schema/schema_feature.md b/src/packages/dumbo/src/core/schema/schema_feature.md new file mode 100644 index 00000000..9de5d3a3 --- /dev/null +++ b/src/packages/dumbo/src/core/schema/schema_feature.md @@ -0,0 +1,1345 @@ +# Schema Feature Component Design + +## Overview + +This document defines the design for **Feature Schema Components** - a composable, opaque abstraction layer for complex database features like event stores, Pongo collections, and custom application features that can be integrated seamlessly into Dumbo's schema system. + +## Problem Statement + +Current Dumbo schema components (`database`, `schema`, `table`, `column`, `index`) work well for explicit table definitions, but pose challenges for: + +1. **Complex Features**: Features like event stores have multiple tables, functions, and internal structure that users shouldn't need to understand +2. **Framework Integration**: Pongo collections need both Dumbo tables (for migrations) and Pongo schema (for querying) +3. **Multi-Database Support**: Same schema definition should work across PostgreSQL (with schemas), SQLite (without schemas), and other databases +4. **Multi-Tenancy**: Need flexible sharding strategies applicable at different levels (table, schema, database) +5. **System Metadata**: Migration tracking and schema introspection tables need consistent, configurable placement + +## Design Principles + +1. **Opaque by Default**: Users don't need to know internal structure of features (e.g., event store tables) +2. **Composable**: Features compose like other schema components +3. **Strongly Typed**: Generated types expose internal structure for type-safe queries +4. **Database Agnostic**: Schema definitions are portable; migration generation is database-specific +5. **Extensible**: Generic sharding mechanism supports multi-tenancy and custom partitioning strategies +6. **Explicit When Needed**: Power users can customize feature internals and placement + +## Core Concepts + +### 1. Feature Schema Component + +A feature is an opaque schema component that encapsulates internal components (tables, functions, etc.). + +```typescript +// Base marker interface for all features +export type FeatureSchemaComponent< + URN extends string = string, + AdditionalData extends Record = Record, +> = SchemaComponent< + URN, + AdditionalData & { + __featureMarker: true; + internalComponents: ReadonlyMap; // Hidden from user API + } +>; + +// Type guard +export const isFeatureSchemaComponent = ( + comp: AnySchemaComponent, +): comp is FeatureSchemaComponent => { + return '__featureMarker' in comp && comp.__featureMarker === true; +}; +``` + +### 2. Event Store Schema Component + +Event store as a first-class feature component. + +```typescript +export type EventStoreSchemaComponent = FeatureSchemaComponent< + 'sc:dumbo:feature:event_store', + { + eventStoreName?: string; + inlineProjections?: Record; + } +>; + +export const eventStoreSchema = (options?: { + inlineProjections?: Record; +}): EventStoreSchemaComponent => { + // Create internal tables (opaque to user) + const streams = dumboTable('streams', { + migrations: [sqlMigration('create_streams', [streamsTableSQL])], + }); + + const messages = dumboTable('messages', { + migrations: [sqlMigration('create_messages', [messagesTableSQL])], + }); + + const subscriptions = dumboTable('subscriptions', { + migrations: [sqlMigration('create_subscriptions', [subscriptionsTableSQL])], + }); + + // Create functions + const appendFunction = dumboFunction('emt_append_to_stream', { + migrations: [sqlMigration('create_append_function', [appendToStreamSQL])], + }); + + // Handle inline projections (create tables from Pongo collections) + const projectionTables = Object.entries(options?.inlineProjections ?? {}).map( + ([name, collectionSchema]) => + dumboTable(collectionSchema.name, { + // Auto-generate table structure for Pongo collection + migrations: [ + sqlMigration(`create_projection_${name}`, [ + // Generate CREATE TABLE with _id and document columns + ]), + ], + }), + ); + + const allComponents = [ + streams, + messages, + subscriptions, + appendFunction, + ...projectionTables, + ]; + + const base = schemaComponent('sc:dumbo:feature:event_store', { + components: allComponents, + }); + + return { + ...base, + __featureMarker: true as const, + eventStoreName: 'event_store', + inlineProjections: options?.inlineProjections, + internalComponents: new Map( + allComponents.map((c) => [c.schemaComponentKey, c]), + ), + }; +}; +``` + +### 3. Pongo Collections Schema Component + +Bridges Pongo's collection abstraction with Dumbo's table-based migrations. + +```typescript +export type PongoCollectionsSchemaComponent = FeatureSchemaComponent< + 'sc:dumbo:feature:pongo_collections', + { + pongoSchema: PongoDbSchema; + } +>; + +export const pongoCollectionsSchema = ( + collections: Record, +): PongoCollectionsSchemaComponent => { + // Create Dumbo table for each Pongo collection + const tables = Object.entries(collections).map(([_name, collectionSchema]) => + dumboTable(collectionSchema.name, { + migrations: [ + sqlMigration(`create_collection_${collectionSchema.name}`, [ + // Auto-generate table structure: + // CREATE TABLE {name} ( + // _id TEXT PRIMARY KEY, + // document JSONB NOT NULL, + // created_at TIMESTAMP DEFAULT NOW(), + // updated_at TIMESTAMP DEFAULT NOW() + // ) + ]), + ], + }), + ); + + const base = schemaComponent('sc:dumbo:feature:pongo_collections', { + components: tables, + }); + + return { + ...base, + __featureMarker: true as const, + pongoSchema: pongoSchema.db(collections), + internalComponents: new Map(tables.map((t) => [t.schemaComponentKey, t])), + }; +}; +``` + +### 4. System Schema Component + +Contains framework metadata tables (migrations, pongo metadata, schema introspection). + +```typescript +export type SystemSchemaComponent = FeatureSchemaComponent< + 'sc:dumbo:feature:system', + { + systemTables: { + migrationTracking: boolean; + pongoMetadata: boolean; + schemaIntrospection: boolean; + }; + } +>; + +export function systemSchema(): SystemSchemaComponent; +export function systemSchema(schemaName: string): DatabaseSchemaSchemaComponent; +export function systemSchema(options: { + migrationTracking?: boolean; + pongoMetadata?: boolean; + schemaIntrospection?: boolean; +}): SystemSchemaComponent; +export function systemSchema( + nameOrOptions?: + | string + | { + migrationTracking?: boolean; + pongoMetadata?: boolean; + schemaIntrospection?: boolean; + }, +): SystemSchemaComponent | DatabaseSchemaSchemaComponent { + const options = + typeof nameOrOptions === 'string' + ? { schemaName: nameOrOptions } + : nameOrOptions; + + const tables: TableSchemaComponent[] = []; + + if (options?.migrationTracking !== false) { + tables.push( + dumboTable('__migrations', { + migrations: [ + sqlMigration('create_migrations_table', [ + SQL`CREATE TABLE IF NOT EXISTS __migrations ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL UNIQUE, + applied_at TIMESTAMP DEFAULT NOW() + )`, + ]), + ], + }), + ); + } + + if (options?.pongoMetadata !== false) { + tables.push( + dumboTable('__pongo_collections', { + migrations: [ + sqlMigration('create_pongo_metadata_table', [ + SQL`CREATE TABLE IF NOT EXISTS __pongo_collections ( + collection_name TEXT PRIMARY KEY, + json_schema JSONB, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() + )`, + ]), + ], + }), + ); + } + + if (options?.schemaIntrospection !== false) { + tables.push( + dumboTable('__schema_metadata', { + migrations: [ + sqlMigration('create_schema_metadata_table', [ + SQL`CREATE TABLE IF NOT EXISTS __schema_metadata ( + component_type TEXT NOT NULL, + component_name TEXT NOT NULL, + component_key TEXT PRIMARY KEY, + definition JSONB NOT NULL, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() + )`, + ]), + ], + }), + ); + } + + const base = schemaComponent('sc:dumbo:feature:system', { + components: tables, + }); + + const component = { + ...base, + __featureMarker: true as const, + systemTables: { + migrationTracking: options?.migrationTracking !== false, + pongoMetadata: options?.pongoMetadata !== false, + schemaIntrospection: options?.schemaIntrospection !== false, + }, + internalComponents: new Map(tables.map((t) => [t.schemaComponentKey, t])), + } satisfies SystemSchemaComponent; + + // If schemaName provided, wrap in schema component + if ('schemaName' in options && options.schemaName) { + return dumboDatabaseSchema(options.schemaName, {}).addComponent(component); + } + + return component; +} + +export const isSystemSchemaComponent = ( + comp: AnySchemaComponent, +): comp is SystemSchemaComponent => { + return comp.schemaComponentKey === 'sc:dumbo:feature:system'; +}; +``` + +### 5. Function Schema Component + +First-class support for stored procedures/functions. + +```typescript +export type FunctionURNType = 'sc:dumbo:function'; +export type FunctionURN = `${FunctionURNType}:${string}`; + +export type FunctionSchemaComponent = SchemaComponent< + FunctionURN, + Readonly<{ + functionName: string; + language?: 'plpgsql' | 'sql' | 'javascript'; // Database-specific + }> +>; + +export const dumboFunction = ( + name: string, + options: SchemaComponentOptions & { + language?: 'plpgsql' | 'sql' | 'javascript'; + }, +): FunctionSchemaComponent => { + const base = schemaComponent(`sc:dumbo:function:${name}` as FunctionURN, { + migrations: options.migrations ?? [], + components: options.components ?? [], + }); + + return { + ...base, + functionName: name, + language: options.language, + }; +}; +``` + +## Usage Examples + +### Example 1: Simple Single-Schema Application + +```typescript +// Everything in one schema, system tables auto-added +const simpleApp = dumboDatabase('my_app', { + public: dumboDatabaseSchema('public', { + users: dumboTable('users', { + columns: { + id: dumboColumn('id', { type: 'serial', primaryKey: true }), + email: dumboColumn('email', { type: 'varchar', length: 255 }), + }, + }), + posts: dumboTable('posts', { + columns: { + id: dumboColumn('id', { type: 'serial', primaryKey: true }), + userId: dumboColumn('user_id', { type: 'int' }), + }, + }), + }), +}); + +// System tables (__migrations, __pongo_collections, __schema_metadata) +// are automatically added to the 'public' schema +``` + +### Example 2: Event Store with Read Models + +```typescript +// Event store and read models in same schema +const hotelApp = dumboDatabase('hotel', { + public: dumboDatabaseSchema('public', { + // Event store feature (opaque - user doesn't see internal tables) + eventStore: eventStoreSchema(), + + // User-defined tables + guests: dumboTable('guests', { + columns: { + id: dumboColumn('id', { type: 'serial', primaryKey: true }), + name: dumboColumn('name', { type: 'varchar', length: 255 }), + }, + }), + + reservations: dumboTable('reservations', { + columns: { + id: dumboColumn('id', { type: 'serial', primaryKey: true }), + guestId: dumboColumn('guest_id', { type: 'int' }), + }, + }), + }), +}); + +// Generated migrations include: +// - System tables: __migrations, __pongo_collections, __schema_metadata +// - Event store tables: streams, messages, subscriptions +// - Event store function: emt_append_to_stream +// - User tables: guests, reservations +``` + +### Example 3: Separated Schemas + +```typescript +// Event store and read models in separate schemas +const hotelApp = dumboDatabase('hotel', { + // Dedicated system schema + system: systemSchema('system'), + + // Event store in its own schema + event_store: dumboDatabaseSchema('event_store', { + eventStore: eventStoreSchema(), + }), + + // Read models in separate schema + read_models: dumboDatabaseSchema('read_models', { + guests: dumboTable('guests', { + /* ... */ + }), + reservations: dumboTable('reservations', { + /* ... */ + }), + }), +}); + +// PostgreSQL generates: +// CREATE SCHEMA system; +// CREATE TABLE system.__migrations (...); +// CREATE SCHEMA event_store; +// CREATE TABLE event_store.streams (...); +// CREATE SCHEMA read_models; +// CREATE TABLE read_models.guests (...); +``` + +### Example 4: Event Store with Inline Projections + +```typescript +const hotelApp = dumboDatabase('hotel', { + event_store: dumboDatabaseSchema('event_store', { + eventStore: eventStoreSchema({ + // Inline projections become tables in same schema + inlineProjections: { + guestSummary: pongoSchema.collection('guest_summary'), + reservationSummary: pongoSchema.collection('reservation_summary'), + }, + }), + }), + + read_models: dumboDatabaseSchema('read_models', { + // Or define Pongo collections as a feature + collections: pongoCollectionsSchema({ + guests: pongoSchema.collection('guests'), + reservations: pongoSchema.collection('reservations'), + }), + }), +}); + +// event_store schema contains: +// - streams, messages, subscriptions (event store core) +// - guest_summary, reservation_summary (inline projections) +// +// read_models schema contains: +// - guests, reservations (Pongo collections → Dumbo tables) +``` + +### Example 5: Multiple Databases with Database Group + +```typescript +// Database group for organizing related databases +const hotelSystemGroup = databaseGroup( + 'hotel_system', + { + // Operational database + operational: dumboDatabase('hotel_operational', { + event_store: dumboDatabaseSchema('event_store', { + eventStore: eventStoreSchema(), + }), + read_models: dumboDatabaseSchema('read_models', { + guests: dumboTable('guests', { + /* ... */ + }), + reservations: dumboTable('reservations', { + /* ... */ + }), + }), + }), + + // Analytics database + analytics: dumboDatabase('hotel_analytics', { + public: dumboDatabaseSchema('public', { + events: dumboTable('events', { + /* ... */ + }), + metrics: dumboTable('metrics', { + /* ... */ + }), + }), + }), + + // Reporting database + reporting: dumboDatabase('hotel_reporting', { + public: dumboDatabaseSchema('public', { + reports: dumboTable('reports', { + /* ... */ + }), + }), + }), + }, + { + // Shared system schema across all databases + shared: { + systemSchema: systemSchema('shared_system'), + }, + }, +); +``` + +## System Schema Placement Strategy + +### Single Schema + +When database has exactly one schema, system tables are automatically added to that schema. + +```typescript +const db = dumboDatabase('app', { + public: dumboDatabaseSchema('public', { + /* ... */ + }), +}); +// ✅ System tables → 'public' schema +``` + +### Multiple Schemas Without Explicit System Schema + +Default behavior: Use database type's default schema convention. + +```typescript +const db = dumboDatabase('app', { + event_store: dumboDatabaseSchema('event_store', { + /* ... */ + }), + read_models: dumboDatabaseSchema('read_models', { + /* ... */ + }), +}); + +// PostgreSQL: Check for 'public' schema, otherwise use first schema +// ✅ If 'public' exists → system tables go there +// ✅ Otherwise → system tables go to 'event_store' (first schema) + +// SQLite: Use first schema (schema names become table prefixes) +// ✅ System tables → 'event_store' schema (prefixed: event_store___migrations) +``` + +### Explicit System Schema + +User can explicitly place system schema anywhere: + +```typescript +// Option A: Dedicated system schema +const db = dumboDatabase('app', { + system: systemSchema('admin'), // Returns DatabaseSchemaSchemaComponent + event_store: dumboDatabaseSchema('event_store', { + /* ... */ + }), + read_models: dumboDatabaseSchema('read_models', { + /* ... */ + }), +}); + +// Option B: System schema within existing schema +const db = dumboDatabase('app', { + event_store: dumboDatabaseSchema('event_store', { + system: systemSchema(), // Just the component + eventStore: eventStoreSchema(), + }), + read_models: dumboDatabaseSchema('read_models', { + /* ... */ + }), +}); +``` + +## Generic Sharding Strategy + +### Sharding Configuration + +```typescript +export type ShardingStrategy = { + // How to identify this shard dimension + shardKey: TShardKey; + + // What values this shard can have + shardValues: readonly string[] | 'dynamic'; + + // Naming pattern for the sharded resource + namingPattern: string | ((shard: string, original: string) => string); + + // Which resources should NOT be sharded (shared across all shards) + exclude?: string[] | ((resourceName: string) => boolean); +}; + +export type ShardingLevel = 'table' | 'schema' | 'database' | 'database-group'; + +export type ShardingConfig = { + level: ShardingLevel; + strategy: ShardingStrategy; + + // Optional: Apply sharding only to specific components + applyTo?: string[]; +}; +``` + +### Predefined Sharding Strategies + +```typescript +export const shardingStrategies = { + // Multi-tenancy: Shard by tenant ID + multiTenant: (tenants: string[] | 'dynamic'): ShardingStrategy => ({ + shardKey: 'tenant_id', + shardValues: tenants, + namingPattern: '{shard}_{resource}', + exclude: ['system'], // Don't shard system schema + }), + + // Geographic regions + region: (regions: string[]): ShardingStrategy => ({ + shardKey: 'region', + shardValues: regions, + namingPattern: '{resource}_{shard}', + }), + + // Time-based partitioning + timePartition: (periods: string[]): ShardingStrategy => ({ + shardKey: 'period', + shardValues: periods, // e.g., ['2024_01', '2024_02', ...] + namingPattern: '{resource}_{shard}', + }), + + // Customer-based sharding + customerId: (customerIds: string[] | 'dynamic'): ShardingStrategy => ({ + shardKey: 'customer_id', + shardValues: customerIds, + namingPattern: 'cust_{shard}_{resource}', + }), +}; +``` + +### Sharding Examples + +#### Schema-Level Sharding (Multi-Tenancy) + +```typescript +const hotelApp = dumboDatabase( + 'hotel', + { + system: systemSchema('system'), // Not sharded + + event_store: dumboDatabaseSchema('event_store', { + eventStore: eventStoreSchema(), + }), + + read_models: dumboDatabaseSchema('read_models', { + guests: dumboTable('guests', { + /* ... */ + }), + reservations: dumboTable('reservations', { + /* ... */ + }), + }), + }, + { + sharding: { + level: 'schema', + strategy: shardingStrategies.multiTenant(['hilton', 'marriott', 'hyatt']), + }, + }, +); + +// PostgreSQL generates: +// CREATE SCHEMA system; -- Not sharded +// CREATE SCHEMA hilton_event_store; +// CREATE TABLE hilton_event_store.streams (...); +// CREATE SCHEMA hilton_read_models; +// CREATE TABLE hilton_read_models.guests (...); +// CREATE SCHEMA marriott_event_store; +// CREATE TABLE marriott_event_store.streams (...); +// CREATE SCHEMA marriott_read_models; +// CREATE TABLE marriott_read_models.guests (...); +// ... +``` + +#### Database-Level Sharding + +```typescript +const hotelApp = dumboDatabase( + 'hotel', + { + event_store: dumboDatabaseSchema('event_store', { + /* ... */ + }), + read_models: dumboDatabaseSchema('read_models', { + /* ... */ + }), + }, + { + sharding: { + level: 'database', + strategy: shardingStrategies.multiTenant(['hilton', 'marriott']), + }, + }, +); + +// PostgreSQL generates separate databases: +// CREATE DATABASE hilton_hotel; +// CREATE DATABASE marriott_hotel; + +// SQLite generates separate files: +// hilton_hotel.db +// marriott_hotel.db +``` + +#### Table-Level Sharding (Time Partitioning) + +```typescript +const analyticsDb = dumboDatabase( + 'analytics', + { + public: dumboDatabaseSchema('public', { + events: dumboTable('events', { + /* ... */ + }), + metrics: dumboTable('metrics', { + /* ... */ + }), + }), + }, + { + sharding: { + level: 'table', + strategy: shardingStrategies.timePartition([ + '2024_01', + '2024_02', + '2024_03', + ]), + applyTo: ['events'], // Only shard events table + }, + }, +); + +// Generates: +// CREATE TABLE events_2024_01 (...); +// CREATE TABLE events_2024_02 (...); +// CREATE TABLE events_2024_03 (...); +// CREATE TABLE metrics (...); -- Not sharded +``` + +#### Database Group Sharding + +```typescript +const hotelSystemGroup = databaseGroup( + 'hotel_system', + { + operational: dumboDatabase('hotel_operational', { + /* ... */ + }), + analytics: dumboDatabase('hotel_analytics', { + /* ... */ + }), + reporting: dumboDatabase('hotel_reporting', { + /* ... */ + }), + }, + { + sharding: { + level: 'database', + strategy: shardingStrategies.multiTenant(['hilton', 'marriott']), + }, + shared: { + systemSchema: systemSchema('shared_system'), + }, + }, +); + +// Generates for each tenant: +// Tenant 'hilton': +// Database: hilton_hotel_operational +// Database: hilton_hotel_analytics +// Database: hilton_hotel_reporting +// Shared: shared_system schema (referenced from all databases) +// +// Tenant 'marriott': +// Database: marriott_hotel_operational +// Database: marriott_hotel_analytics +// Database: marriott_hotel_reporting +// Shared: shared_system schema (referenced from all databases) +``` + +### Component-Level Sharding Override + +```typescript +// Different sharding strategies for different schemas +const hotelApp = dumboDatabase('hotel', { + system: systemSchema('system'), // No sharding + + event_store: dumboDatabaseSchema( + 'event_store', + { + eventStore: eventStoreSchema(), + }, + { + // Override: shard event store by tenant + sharding: { + level: 'schema', + strategy: shardingStrategies.multiTenant(['hilton', 'marriott']), + }, + }, + ), + + analytics: dumboDatabaseSchema( + 'analytics', + { + events: dumboTable('events', { + /* ... */ + }), + }, + { + // Override: shard analytics by region + sharding: { + level: 'table', + strategy: shardingStrategies.region(['us_east', 'us_west', 'eu']), + }, + }, + ), +}); + +// Generates: +// CREATE SCHEMA system; +// CREATE SCHEMA hilton_event_store; +// CREATE SCHEMA marriott_event_store; +// CREATE SCHEMA analytics; +// CREATE TABLE analytics.events_us_east (...); +// CREATE TABLE analytics.events_us_west (...); +// CREATE TABLE analytics.events_eu (...); +``` + +## Database Group Design + +### Database Group Component + +```typescript +export type DatabaseGroup< + Databases extends Record = Record< + string, + DatabaseSchemaComponent + >, +> = { + groupName: string; + databases: Databases; + + // Group-level sharding configuration + sharding?: ShardingConfig; + + // Resources shared across all databases in group + shared?: { + systemSchema?: SystemSchemaComponent; + }; +}; + +export const databaseGroup = < + T extends Record, +>( + groupName: string, + databases: T, + options?: { + sharding?: ShardingConfig; + shared?: { + systemSchema?: SystemSchemaComponent; + }; + }, +): DatabaseGroup => ({ + groupName, + databases, + sharding: options?.sharding, + shared: options?.shared, +}); +``` + +### Use Cases for Database Groups + +1. **Logical Organization**: Group related databases for documentation and architecture diagrams +2. **Shared Sharding**: Apply same sharding strategy across multiple databases +3. **Shared Resources**: Single system schema referenced by multiple databases +4. **CQRS/Event Sourcing**: Separate databases for commands, queries, and events +5. **Polyglot Persistence**: Different databases for different concerns (operational, analytics, reporting) + +## Migration Generation + +### Database-Agnostic Schema, Database-Specific Migrations + +Schema definitions are portable; migration generation considers database type: + +```typescript +// Schema definition (database-agnostic) +const hotelSchema = dumboDatabase('hotel', { + event_store: dumboDatabaseSchema('event_store', { + eventStore: eventStoreSchema(), + }), + read_models: dumboDatabaseSchema('read_models', { + guests: dumboTable('guests', { + /* ... */ + }), + }), +}); + +// PostgreSQL migration generation +const pgMigrations = generateMigrations(hotelSchema, { + databaseType: 'postgresql', +}); +// Generates: +// CREATE SCHEMA event_store; +// CREATE TABLE event_store.streams (...); +// CREATE SCHEMA read_models; +// CREATE TABLE read_models.guests (...); + +// SQLite migration generation +const sqliteMigrations = generateMigrations(hotelSchema, { + databaseType: 'sqlite', + sqliteStrategy: 'prefix-tables', // or 'separate-files' +}); +// prefix-tables generates: +// CREATE TABLE event_store_streams (...); +// CREATE TABLE read_models_guests (...); +// +// separate-files generates: +// File: event_store.db → CREATE TABLE streams (...); +// File: read_models.db → CREATE TABLE guests (...); +``` + +### Migration Collection from Features + +The migration orchestrator detects feature components and extracts their internal migrations: + +```typescript +export const collectAllMigrations = ( + database: DatabaseSchemaComponent, +): SQLMigration[] => { + const migrations: SQLMigration[] = []; + + for (const schema of database.schemas.values()) { + for (const component of schema.components.values()) { + if (isFeatureSchemaComponent(component)) { + // Feature component: extract internal components + for (const internalComp of component.internalComponents.values()) { + if (isTableComponent(internalComp)) { + migrations.push(...internalComp.migrations); + } else if (isFunctionComponent(internalComp)) { + migrations.push(...internalComp.migrations); + } + } + } else if (isTableComponent(component)) { + // Regular table + migrations.push(...component.migrations); + } else if (isFunctionComponent(component)) { + // Regular function + migrations.push(...component.migrations); + } + } + } + + return migrations; +}; +``` + +## Type Generation + +### Exposing Feature Internals in Generated Types + +While features are opaque at definition time, generated types expose internal structure for type-safe queries: + +```typescript +// Schema definition (event store is opaque) +const hotelSchema = dumboDatabase('hotel', { + public: dumboDatabaseSchema('public', { + eventStore: eventStoreSchema(), + guests: dumboTable('guests', { + /* ... */ + }), + }), +}); + +// Generated types expose all tables (including event store internals) +export type HotelSchema = { + public: { + // Event store tables (exposed for type-safe queries) + streams: { + stream_id: string; + stream_position: number; + partition: string; + stream_type: string; + stream_metadata: unknown; + is_archived: boolean; + }; + messages: { + stream_id: string; + stream_position: number; + partition: string; + message_kind: string; + message_data: unknown; + message_metadata: unknown; + message_schema_version: string; + message_type: string; + message_id: string; + is_archived: boolean; + global_position: number; + created: Date; + }; + subscriptions: { + subscription_id: string; + version: number; + partition: string; + last_processed_position: number; + }; + + // User-defined tables + guests: { + id: number; + name: string; + email: string; + }; + }; +}; + +// Usage with type-safe queries +const stream = await db + .from('streams') // TypeScript knows this exists + .where('stream_id', '=', 'guest-123') + .select(['stream_position', 'stream_type']); +// Type: { stream_position: number, stream_type: string }[] +``` + +## Integration with Emmett Architecture + +Feature components are separate from Emmett architectural components but can be mapped: + +```typescript +// Emmett: Logical architecture (business components with ports) +const hotelManagementContainer = emmettArch.container('hotel-management', { + eventStore: emmettArch.component('event-store', { + ports: { + exposes: { + commands: { + appendToStream: (/* ... */) => Promise.resolve(), + }, + }, + }, + }), + + guests: emmettArch.component('guests', { + ports: { + exposes: { + queries: { + getGuestByExternalId: query(), + }, + }, + }, + }), + + reservations: emmettArch.component('reservations', { + ports: { + requires: { + guests: { + getGuestByExternalId: query(), + }, + }, + }, + }), +}); + +// Dumbo: Physical schema (database structure) +const hotelSchema = dumboDatabase('hotel', { + event_store: dumboDatabaseSchema('event_store', { + eventStore: eventStoreSchema(), + }), + + read_models: dumboDatabaseSchema('read_models', { + guests: dumboTable('guests', { + /* ... */ + }), + reservations: dumboTable('reservations', { + /* ... */ + }), + }), +}); + +// Mapping layer (optional - for documentation/tooling) +const deployment = { + architecture: hotelManagementContainer, + schema: hotelSchema, + mapping: { + // Map Emmett components to Dumbo schemas/tables + 'event-store': 'event_store.eventStore', + guests: 'read_models.guests', + reservations: 'read_models.reservations', + }, +}; +``` + +## Design Decisions + +### 1. Why Feature Components Extend SchemaComponent? + +- **Uniform Composition**: Features compose like tables, indexes, etc. +- **Consistent API**: Same `addComponent()` pattern everywhere +- **Type Safety**: Generic `SchemaComponent` infrastructure works for features +- **Migration System**: Features participate in migration collection automatically + +### 2. Why System Schema is a Feature Component? + +- **Consistent Placement**: Same composition rules as other features +- **Flexible Location**: Can be in dedicated schema or mixed with application schemas +- **Opaque Internals**: Users don't need to know about **migrations, **pongo_collections tables +- **Customizable**: Power users can configure which system tables to include + +### 3. Why Generic Sharding vs. Hardcoded Multi-Tenancy? + +- **Extensibility**: Supports time partitioning, regional sharding, custom strategies +- **Composition**: Can combine multiple sharding dimensions in the future +- **Flexibility**: Same mechanism for different use cases (multi-tenancy, scaling, compliance) +- **Simplicity**: Single concept to learn instead of multiple special cases + +### 4. Why Database Groups? + +- **Organization**: Large systems have multiple related databases +- **Shared Resources**: System schema can be shared across databases +- **Consistent Sharding**: Apply same tenant strategy to command/query/event databases +- **Documentation**: Architecture diagrams show logical database groupings + +### 5. Why Portable Schema Definitions? + +- **Developer Experience**: Write once, deploy to PostgreSQL or SQLite +- **Testing**: Test with SQLite, deploy to PostgreSQL +- **Flexibility**: Change database type without rewriting schema +- **Separation of Concerns**: Schema = logical structure, migrations = physical implementation + +## Future Enhancements + +### 1. Composable Sharding (Out of Scope for Initial Implementation) + +Support multiple sharding dimensions: + +```typescript +{ + sharding: [ + { level: 'database', strategy: shardingStrategies.multiTenant(['hilton']) }, + { level: 'schema', strategy: shardingStrategies.region(['us', 'eu']) }, + { level: 'table', strategy: shardingStrategies.timePartition(['2024_01']) }, + ]; +} +// Generates: Database hilton_hotel → Schema us_event_store → Table events_2024_01 +``` + +### 2. Pattern-Based Exclusion (Future) + +Support patterns in sharding exclusion: + +```typescript +{ + sharding: { + strategy: tenantSharding, + exclude: [ + 'system', // Exact match + '__*', // Glob pattern: all system tables + /^temp_/, // Regex: temporary tables + (name) => name.startsWith('cache_'), // Function + ] + } +} +``` + +### 3. Dynamic Shard Provisioning (Out of Scope for Schema Definition) + +Runtime provisioning of new shards (tenants, regions, etc.): + +```typescript +// This is a runtime concern, not schema definition +const provisioner = createShardProvisioner(schema, config); +await provisioner.provisionShard(connection, 'new_tenant_id'); +``` + +### 4. Cross-Database Relationships (Future) + +Support foreign keys across sharded databases: + +```typescript +// Define relationship that spans databases +const relationship = crossDatabaseForeignKey( + 'read_models.reservations.guest_id', + 'read_models.guests.id', + { onDelete: 'CASCADE' }, +); +``` + +### 5. Migration Dependency Graph (Future) + +Explicit migration dependencies for complex scenarios: + +```typescript +const createUsersMigration = sqlMigration('create_users', [ + /* ... */ +]); +const createPostsMigration = sqlMigration( + 'create_posts', + [ + /* ... */ + ], + { + dependsOn: [createUsersMigration], // Posts table needs users table first + }, +); +``` + +## Open Questions + +### Q1: Function Component Placement + +Should functions be: + +- Top-level components in schema (like tables)? +- Nested within table components? +- Only within feature components? + +**Current decision**: Top-level and within features (flexible) + +### Q2: SQLite Multiple Schema Strategy Default + +For SQLite with multiple schemas, default to: + +- Table prefixing (simpler, single file) +- Separate database files (stronger isolation) +- Error (force explicit choice) + +**Current decision**: Table prefixing (simpler default, user can override) + +### Q3: System Schema Naming Convention + +Should system schema default name be: + +- `__dumbo_system` (clear it's framework) +- `_system` (shorter) +- `system` (clean but might conflict) + +**Current decision**: `system` for dedicated schema, auto-add to first schema otherwise + +### Q4: Sharding Naming Pattern Syntax + +Support only string templates, or also functions? + +- String: `'{shard}_{resource}'` +- Function: `(shard, resource) => ...` + +**Current decision**: Both (string for simplicity, function for flexibility) + +### Q5: Feature Component Registration + +Should there be a registry for custom feature components? + +```typescript +// Register custom feature +registerFeatureComponent('custom_feature', { + detect: (comp) => comp.schemaComponentKey.startsWith('sc:custom:'), + extractMigrations: (comp) => { + /* ... */ + }, +}); +``` + +**Current decision**: Not yet - keep simple, add if needed + +## Implementation Checklist + +### Phase 1: Core Feature Components + +- [ ] `FeatureSchemaComponent` base type +- [ ] `isFeatureSchemaComponent()` type guard +- [ ] `EventStoreSchemaComponent` implementation +- [ ] `PongoCollectionsSchemaComponent` implementation +- [ ] `SystemSchemaComponent` implementation +- [ ] `FunctionSchemaComponent` implementation + +### Phase 2: System Schema Placement + +- [ ] Auto-detection logic for single schema +- [ ] Database type default schema logic (PostgreSQL 'public', etc.) +- [ ] Explicit system schema placement +- [ ] System schema component tests + +### Phase 3: Generic Sharding + +- [ ] `ShardingStrategy` type definition +- [ ] `ShardingConfig` type definition +- [ ] Predefined strategies (`multiTenant`, `region`, `timePartition`) +- [ ] Sharding at database level +- [ ] Sharding at schema level +- [ ] Sharding at table level +- [ ] Exclusion logic for shared resources + +### Phase 4: Database Groups + +- [ ] `DatabaseGroup` type definition +- [ ] `databaseGroup()` factory function +- [ ] Shared system schema across databases +- [ ] Group-level sharding +- [ ] Migration generation for groups + +### Phase 5: Migration Generation + +- [ ] Detect feature components in migration collector +- [ ] Extract internal migrations from features +- [ ] Database-specific migration generation (PostgreSQL vs SQLite) +- [ ] SQLite table prefixing strategy +- [ ] SQLite separate files strategy +- [ ] Sharding-aware migration generation + +### Phase 6: Type Generation + +- [ ] Extract internal structure from features +- [ ] Generate types for event store tables +- [ ] Generate types for Pongo collections +- [ ] Generate types for custom features +- [ ] Exclude system tables from main types (optional) + +### Phase 7: Integration & Documentation + +- [ ] Dumbo schema API updates +- [ ] Pongo schema integration +- [ ] Emmett architecture mapping examples +- [ ] Unit tests for all components +- [ ] Integration tests with PostgreSQL +- [ ] Integration tests with SQLite +- [ ] Documentation and examples + +## Related Files + +- [MIGRATION_UNIFICATION_PLAN.md](./MIGRATION_UNIFICATION_PLAN.md) - Migration system design +- [schemaComponent.ts](./schemaComponent.ts) - Base schema component implementation +- [dumboSchema.ts](./dumboSchema/dumboSchema.ts) - Dumbo schema builder API +- [pongo/schema/index.ts](../../../pongo/src/core/schema/index.ts) - Pongo schema system +- Emmett Architecture (external package) - Component/container/system definitions + +## References + +- [Dumbo Migration Unification Plan](./MIGRATION_UNIFICATION_PLAN.md) - Context on migration system +- [Pongo Strongly Typed Client](https://event-driven.io/en/pongo_strongly_typed_client/) - Pongo collection schema +- [Emmett Projections Testing](https://event-driven.io/en/emmett_projections_testing/) - Event store and projections +- C4 Model - Architectural component hierarchy (system → container → component) + +--- + +**Document Version**: 1.0 +**Last Updated**: 2025-10-31 +**Status**: Design Proposal +**Authors**: Based on collaborative design discussion diff --git a/src/packages/dumbo/src/core/schema/sqlMigration.ts b/src/packages/dumbo/src/core/schema/sqlMigration.ts new file mode 100644 index 00000000..590472c4 --- /dev/null +++ b/src/packages/dumbo/src/core/schema/sqlMigration.ts @@ -0,0 +1,21 @@ +import { SQL } from '../sql'; + +export type MigrationStyle = 'None' | 'CreateOrUpdate'; + +export type SQLMigration = { + name: string; + sqls: SQL[]; +}; + +export const sqlMigration = (name: string, sqls: SQL[]): SQLMigration => ({ + name, + sqls, +}); + +export type MigrationRecord = { + id: number; + name: string; + application: string; + sqlHash: string; + timestamp: Date; +}; diff --git a/src/packages/dumbo/src/core/sql/processors/columnProcessors.ts b/src/packages/dumbo/src/core/sql/processors/columnProcessors.ts index 78766385..9245fa8b 100644 --- a/src/packages/dumbo/src/core/sql/processors/columnProcessors.ts +++ b/src/packages/dumbo/src/core/sql/processors/columnProcessors.ts @@ -1,9 +1,9 @@ import type { BigIntegerToken, DefaultSQLColumnToken, + SQLColumnTypeTokens, SQLToken, } from '../tokens'; -import { SQLColumnTokens } from '../tokens'; import { SQLProcessor, type SQLProcessorContext } from './sqlProcessor'; type ExtractTokenType = T extends (...args: never[]) => infer R @@ -15,8 +15,8 @@ type ExtractTokenType = T extends (...args: never[]) => infer R : never; export type DefaultSQLColumnProcessors = { - [key in keyof SQLColumnTokens]: SQLProcessor< - ExtractTokenType<(typeof SQLColumnTokens)[key]> + [key in keyof SQLColumnTypeTokens]: SQLProcessor< + ExtractTokenType<(typeof SQLColumnTypeTokens)[key]> >; }; diff --git a/src/packages/dumbo/src/core/sql/sql.ts b/src/packages/dumbo/src/core/sql/sql.ts index e4f317e7..2588acfb 100644 --- a/src/packages/dumbo/src/core/sql/sql.ts +++ b/src/packages/dumbo/src/core/sql/sql.ts @@ -8,7 +8,7 @@ import type { ParametrizedSQL } from './parametrizedSQL'; import { isTokenizedSQL, TokenizedSQL } from './tokenizedSQL'; import { SQLColumnToken, - SQLColumnTokens, + SQLColumnTypeTokens, SQLIdentifier, SQLIn, SQLPlain, @@ -99,10 +99,10 @@ SQL.check = { isSQLIn: SQLIn.check, }; -const columnFactory: SQLColumnToken & { type: typeof SQLColumnTokens } = +const columnFactory: SQLColumnToken & { type: typeof SQLColumnTypeTokens } = SQLColumnToken as unknown as SQLColumnToken & { - type: typeof SQLColumnTokens; + type: typeof SQLColumnTypeTokens; }; -columnFactory.type = SQLColumnTokens; +columnFactory.type = SQLColumnTypeTokens; SQL.column = columnFactory; diff --git a/src/packages/dumbo/src/core/sql/tokens/columnTokens.ts b/src/packages/dumbo/src/core/sql/tokens/columnTokens.ts index 34b06247..d619d0a3 100644 --- a/src/packages/dumbo/src/core/sql/tokens/columnTokens.ts +++ b/src/packages/dumbo/src/core/sql/tokens/columnTokens.ts @@ -67,7 +67,7 @@ export type AutoIncrementSQLColumnToken = SQLToken< export const AutoIncrementSQLColumnToken = SQLToken('SQL_COLUMN_AUTO_INCREMENT'); -export const SQLColumnTokens = { +export const SQLColumnTypeTokens = { AutoIncrement: AutoIncrementSQLColumnToken.from, BigInteger: BigIntegerToken.from(undefined!), BigSerial: BigSerialToken.from(undefined!), @@ -78,7 +78,7 @@ export const SQLColumnTokens = { Timestamptz: TimestamptzToken.from(undefined!), Varchar: VarcharToken.from, }; -export type SQLColumnTokens = typeof SQLColumnTokens; +export type SQLColumnTypeTokens = typeof SQLColumnTypeTokens; export type DefaultSQLColumnToken = | AutoIncrementSQLColumnToken diff --git a/src/packages/pongo/src/core/database/pongoDatabaseSchemaComponent.ts b/src/packages/pongo/src/core/database/pongoDatabaseSchemaComponent.ts index 42aaab37..a01f0f48 100644 --- a/src/packages/pongo/src/core/database/pongoDatabaseSchemaComponent.ts +++ b/src/packages/pongo/src/core/database/pongoDatabaseSchemaComponent.ts @@ -30,13 +30,6 @@ export type PongoDatabaseSchemaComponent< ) => PongoCollectionSchemaComponent; }; -export type PongoDatabaseSchemaComponentFactory = < - DriverType extends DatabaseDriverType = DatabaseDriverType, ->( - driverType: DriverType, - existingCollections: PongoCollectionSchemaComponent[], -) => PongoDatabaseSchemaComponent; - export type PongoDatabaseSchemaComponentOptions< DriverType extends DatabaseDriverType = DatabaseDriverType, T extends Record = Record<