diff --git a/.changeset/add-migration-installer-api.md b/.changeset/add-migration-installer-api.md new file mode 100644 index 000000000..efd201736 --- /dev/null +++ b/.changeset/add-migration-installer-api.md @@ -0,0 +1,5 @@ +--- +'@pgflow/edge-worker': patch +--- + +Add migration installer API endpoints for no-CLI platforms diff --git a/package.json b/package.json index 8c3462853..15512e676 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,6 @@ "private": true, "devDependencies": { "@axhxrx/nx-deno": "^1.0.2", - "supabase": "^2.62.10", "@changesets/cli": "^2.28.1", "@decimalturn/toml-patch": "0.3.7", "@eslint/js": "^9.8.0", @@ -45,7 +44,9 @@ "netlify-cli": "^22.1.3", "nx": "21.2.1", "prettier": "^2.6.2", + "supabase": "^2.62.10", "tslib": "^2.3.0", + "tsx": "^4.20.6", "typescript": "5.8.3", "typescript-eslint": "8.34.1", "vite": "6.3.5", diff --git a/pkgs/cli/project.json b/pkgs/cli/project.json index a6d3a8051..cf9bbdc90 100644 --- a/pkgs/cli/project.json +++ b/pkgs/cli/project.json @@ -61,7 +61,7 @@ "serve:functions:e2e": { "executor": "nx:run-commands", "continuous": true, - "dependsOn": ["^build"], + "dependsOn": ["^build", "edge-worker:build"], "local": true, "cache": false, "options": { diff --git a/pkgs/core/.gitignore b/pkgs/core/.gitignore deleted file mode 100644 index 96b1c1fe7..000000000 --- a/pkgs/core/.gitignore +++ /dev/null @@ -1 +0,0 @@ -atlas/schema.sql diff --git a/pkgs/edge-worker/.gitignore b/pkgs/edge-worker/.gitignore index 9e5aa8338..e1fcdedcf 100644 --- a/pkgs/edge-worker/.gitignore +++ b/pkgs/edge-worker/.gitignore @@ -1,3 +1,4 @@ supabase/migrations/* supabase/functions/_dist/ supabase/seed.sql +src/_generated/ diff --git a/pkgs/edge-worker/deno.lock b/pkgs/edge-worker/deno.lock deleted file mode 100644 index 11cf1a23e..000000000 --- a/pkgs/edge-worker/deno.lock +++ /dev/null @@ -1,159 +0,0 @@ -{ - "version": "4", - "specifiers": { - "jsr:@deno-library/progress@*": "1.5.1", - "jsr:@henrygd/queue@^1.0.7": "1.2.0", - "jsr:@oscar6echo/postgres@3.4.5-d": "3.4.5-d", - "jsr:@std/assert@0.224": "0.224.0", - "jsr:@std/async@0.224": "0.224.2", - "jsr:@std/crypto@0.224": "0.224.0", - "jsr:@std/fmt@0.224": "0.224.0", - "jsr:@std/fmt@1.0.3": "1.0.3", - "jsr:@std/internal@0.224": "0.224.0", - "jsr:@std/io@0.225.0": "0.225.0", - "jsr:@std/testing@0.224": "0.224.0", - "npm:@supabase/supabase-js@^2.39.0": "2.86.0", - "npm:@types/node@*": "22.5.4" - }, - "jsr": { - "@deno-library/progress@1.5.1": { - "integrity": "966611826b8bb27baae73ab1c4fa4317cd4edd2abb99750cd6f8488d22d5b121", - "dependencies": [ - "jsr:@std/fmt@1.0.3", - "jsr:@std/io" - ] - }, - "@henrygd/queue@1.2.0": { - "integrity": "3e6c968f9bd56e3e7dfbd9952d0b7173a4ecdb7d4df041b3a0f4dfc896efeede" - }, - "@oscar6echo/postgres@3.4.5-d": { - "integrity": "0648f637e9ad2533637b7c377aa62aa4fcacad86f497664edd5962ab0f997c03" - }, - "@std/assert@0.224.0": { - "integrity": "8643233ec7aec38a940a8264a6e3eed9bfa44e7a71cc6b3c8874213ff401967f", - "dependencies": [ - "jsr:@std/fmt@0.224", - "jsr:@std/internal" - ] - }, - "@std/async@0.224.2": { - "integrity": "4d277d6e165df43d5e061ba0ef3edfddb8e8d558f5b920e3e6b1d2614b44d074" - }, - "@std/crypto@0.224.0": { - "integrity": "154ef3ff08ef535562ef1a718718c5b2c5fc3808f0f9100daad69e829bfcdf2d", - "dependencies": [ - "jsr:@std/assert" - ] - }, - "@std/fmt@0.224.0": { - "integrity": "e20e9a2312a8b5393272c26191c0a68eda8d2c4b08b046bad1673148f1d69851" - }, - "@std/fmt@1.0.3": { - "integrity": "97765c16aa32245ff4e2204ecf7d8562496a3cb8592340a80e7e554e0bb9149f" - }, - "@std/internal@0.224.0": { - "integrity": "afc50644f9cdf4495eeb80523a8f6d27226b4b36c45c7c195dfccad4b8509291", - "dependencies": [ - "jsr:@std/fmt@0.224" - ] - }, - "@std/io@0.225.0": { - "integrity": "c1db7c5e5a231629b32d64b9a53139445b2ca640d828c26bf23e1c55f8c079b3" - }, - "@std/testing@0.224.0": { - "integrity": "371b8a929aa7132240d5dd766a439be8f780ef5c176ab194e0bcab72370c761e", - "dependencies": [ - "jsr:@std/assert" - ] - } - }, - "npm": { - "@supabase/auth-js@2.86.0": { - "integrity": "sha512-3xPqMvBWC6Haqpr6hEWmSUqDq+6SA1BAEdbiaHdAZM9QjZ5uiQJ+6iD9pZOzOa6MVXZh4GmwjhC9ObIG0K1NcA==", - "dependencies": [ - "tslib" - ] - }, - "@supabase/functions-js@2.86.0": { - "integrity": "sha512-AlOoVfeaq9XGlBFIyXTmb+y+CZzxNO4wWbfgRM6iPpNU5WCXKawtQYSnhivi3UVxS7GA0rWovY4d6cIAxZAojA==", - "dependencies": [ - "tslib" - ] - }, - "@supabase/postgrest-js@2.86.0": { - "integrity": "sha512-QVf+wIXILcZJ7IhWhWn+ozdf8B+oO0Ulizh2AAPxD/6nQL+x3r9lJ47a+fpc/jvAOGXMbkeW534Kw6jz7e8iIA==", - "dependencies": [ - "tslib" - ] - }, - "@supabase/realtime-js@2.86.0": { - "integrity": "sha512-dyS8bFoP29R/sj5zLi0AP3JfgG8ar1nuImcz5jxSx7UIW7fbFsXhUCVrSY2Ofo0+Ev6wiATiSdBOzBfWaiFyPA==", - "dependencies": [ - "@types/phoenix", - "@types/ws", - "tslib", - "ws" - ] - }, - "@supabase/storage-js@2.86.0": { - "integrity": "sha512-PM47jX/Mfobdtx7NNpoj9EvlrkapAVTQBZgGGslEXD6NS70EcGjhgRPBItwHdxZPM5GwqQ0cGMN06uhjeY2mHQ==", - "dependencies": [ - "iceberg-js", - "tslib" - ] - }, - "@supabase/supabase-js@2.86.0": { - "integrity": "sha512-BaC9sv5+HGNy1ulZwY8/Ev7EjfYYmWD4fOMw9bDBqTawEj6JHAiOHeTwXLRzVaeSay4p17xYLN2NSCoGgXMQnw==", - "dependencies": [ - "@supabase/auth-js", - "@supabase/functions-js", - "@supabase/postgrest-js", - "@supabase/realtime-js", - "@supabase/storage-js" - ] - }, - "@types/node@22.5.4": { - "integrity": "sha512-FDuKUJQm/ju9fT/SeX/6+gBzoPzlVCzfzmGkwKvRHQVxi4BntVbyIwf6a4Xn62mrvndLiml6z/UBXIdEVjQLXg==", - "dependencies": [ - "undici-types" - ] - }, - "@types/phoenix@1.6.6": { - "integrity": "sha512-PIzZZlEppgrpoT2QgbnDU+MMzuR6BbCjllj0bM70lWoejMeNJAxCchxnv7J3XFkI8MpygtRpzXrIlmWUBclP5A==" - }, - "@types/ws@8.18.1": { - "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", - "dependencies": [ - "@types/node" - ] - }, - "iceberg-js@0.8.0": { - "integrity": "sha512-kmgmea2nguZEvRqW79gDqNXyxA3OS5WIgMVffrHpqXV4F/J4UmNIw2vstixioLTNSkd5rFB8G0s3Lwzogm6OFw==" - }, - "tslib@2.8.1": { - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==" - }, - "undici-types@6.19.8": { - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==" - }, - "ws@8.18.3": { - "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==" - } - }, - "remote": { - "https://deno.land/std@0.224.0/fmt/colors.ts": "508563c0659dd7198ba4bbf87e97f654af3c34eb56ba790260f252ad8012e1c5" - }, - "workspace": { - "dependencies": [ - "jsr:@henrygd/queue@^1.0.7", - "jsr:@oscar6echo/postgres@3.4.5-d", - "jsr:@std/assert@0.224", - "jsr:@std/async@0.224", - "jsr:@std/crypto@0.224", - "jsr:@std/log@~0.224.13", - "jsr:@std/testing@0.224", - "npm:@supabase/supabase-js@^2.39.0", - "npm:@teidesu/deno-types@1.42.4" - ] - } -} diff --git a/pkgs/edge-worker/jsr.json b/pkgs/edge-worker/jsr.json index 4560a3683..1874823c6 100644 --- a/pkgs/edge-worker/jsr.json +++ b/pkgs/edge-worker/jsr.json @@ -21,6 +21,7 @@ "src/**/*.ts" ], "exclude": [ + "!src/_generated", "__tests__/**/*", "tests/", "supabase/", diff --git a/pkgs/edge-worker/project.json b/pkgs/edge-worker/project.json index 07a908abd..84812507f 100644 --- a/pkgs/edge-worker/project.json +++ b/pkgs/edge-worker/project.json @@ -7,9 +7,19 @@ "supabase:ci-marker": { "executor": "nx:noop" }, + "generate-migrations": { + "executor": "nx:run-commands", + "cache": true, + "inputs": ["{workspaceRoot}/pkgs/core/supabase/migrations/*.sql", "{projectRoot}/scripts/generate-migrations.ts"], + "outputs": ["{projectRoot}/src/_generated/migrations"], + "options": { + "cwd": "{projectRoot}", + "command": "tsx scripts/generate-migrations.ts" + } + }, "build": { "executor": "nx:noop", - "dependsOn": ["^build"] + "dependsOn": ["generate-migrations", "^build"] }, "_build_disabled": { "executor": "@nx/js:tsc", @@ -134,8 +144,18 @@ "parallel": false } }, + "db-clean:ensure": { + "executor": "nx:run-commands", + "local": true, + "cache": false, + "options": { + "cwd": "pkgs/edge-worker", + "commands": ["./scripts/ensure-db-clean"], + "parallel": false + } + }, "test:unit": { - "dependsOn": ["^build"], + "dependsOn": ["build", "^build"], "executor": "nx:run-commands", "local": true, "inputs": ["default", "^production"], @@ -148,7 +168,7 @@ } }, "test:integration": { - "dependsOn": ["db:ensure", "^build"], + "dependsOn": ["db:ensure", "build", "^build"], "executor": "nx:run-commands", "local": true, "inputs": ["default", "^production"], @@ -160,9 +180,22 @@ "parallel": false } }, + "test:migrations": { + "dependsOn": ["db-clean:ensure", "build", "^build"], + "executor": "nx:run-commands", + "local": true, + "inputs": ["default", "^production"], + "options": { + "cwd": "pkgs/edge-worker", + "commands": [ + "deno test --config deno.test.json --allow-all --env=supabase/functions/.env tests/migrations/" + ], + "parallel": false + } + }, "serve:functions:e2e": { "executor": "nx:run-commands", - "dependsOn": ["^build"], + "dependsOn": ["build", "^build"], "continuous": true, "local": true, "cache": false, @@ -188,12 +221,12 @@ }, "test": { "inputs": ["default", "^production"], - "dependsOn": ["test:types", "test:unit"] + "dependsOn": ["test:types", "test:unit", "test:migrations"] }, "test:types": { "executor": "nx:run-commands", "cache": true, - "dependsOn": ["^build"], + "dependsOn": ["build", "^build"], "inputs": ["production", "^production"], "options": { "cwd": "{projectRoot}", diff --git a/pkgs/edge-worker/scripts/ensure-db-clean b/pkgs/edge-worker/scripts/ensure-db-clean new file mode 100755 index 000000000..714cd25a6 --- /dev/null +++ b/pkgs/edge-worker/scripts/ensure-db-clean @@ -0,0 +1,20 @@ +#!/bin/bash +set -e + +# Clean up any existing containers +echo "Shutting down existing clean database containers..." +docker compose -f tests/db-clean/compose.yaml down --volumes --remove-orphans + +# Start fresh containers +echo "Starting clean database..." +docker compose -f tests/db-clean/compose.yaml up --detach + +# Wait for database to be ready +echo "Waiting for database to be available..." +./scripts/wait-for-localhost 5433 + +# Additional pause to ensure database is fully initialized +echo "Waiting for database initialization..." +sleep 3 + +echo "Clean database is ready on port 5433!" diff --git a/pkgs/edge-worker/scripts/generate-migrations.ts b/pkgs/edge-worker/scripts/generate-migrations.ts new file mode 100644 index 000000000..bfceb63bf --- /dev/null +++ b/pkgs/edge-worker/scripts/generate-migrations.ts @@ -0,0 +1,167 @@ +/** + * Migration Generator Script for edge-worker + * + * Generates TypeScript modules from SQL migration files at build time. + * This allows static imports of migration content that work across all environments + * (local dev, JSR, npm, Supabase Edge Functions). + * + * The migrations are sourced from the core package and bundled directly into + * edge-worker, making it fully self-contained without cross-package imports. + * + * Usage: tsx scripts/generate-migrations.ts + */ + +import * as fs from 'node:fs'; +import * as path from 'node:path'; + +const MIGRATIONS_DIR = '../core/supabase/migrations'; +const OUTPUT_DIR = 'src/_generated/migrations'; +const GENERATED_DIR = `${OUTPUT_DIR}/_generated`; +const INDEX_FILE = `${OUTPUT_DIR}/index.ts`; +const TYPES_FILE = `${OUTPUT_DIR}/types.ts`; + +interface MigrationFile { + timestamp: string; + filename: string; + content: string; +} + +function escapeBackticks(str: string): string { + // Escape backticks and ${} template expressions + return str.replace(/\\/g, '\\\\').replace(/`/g, '\\`').replace(/\$\{/g, '\\${'); +} + +function getMigrationFiles(): MigrationFile[] { + const files = fs.readdirSync(MIGRATIONS_DIR); + const migrations: MigrationFile[] = []; + + for (const filename of files) { + // Skip non-SQL files and atlas files + if (!filename.endsWith('.sql') || filename.startsWith('atlas')) { + continue; + } + + // Extract 14-digit timestamp from filename (e.g., "20250429164909_...") + const match = filename.match(/^(\d{14})_/); + if (!match) { + console.warn(`Skipping ${filename}: no valid timestamp found`); + continue; + } + + const content = fs.readFileSync(path.join(MIGRATIONS_DIR, filename), 'utf-8'); + migrations.push({ + timestamp: match[1], + filename, + content, + }); + } + + // Sort by timestamp (ascending) + migrations.sort((a, b) => a.timestamp.localeCompare(b.timestamp)); + + return migrations; +} + +function generateMigrationModule(migration: MigrationFile): string { + const escapedContent = escapeBackticks(migration.content); + return `import type { Migration } from '../types.ts'; + +export const migration: Migration = { + timestamp: '${migration.timestamp}', + filename: '${migration.filename}', + content: \`${escapedContent}\`, +}; +`; +} + +function generateTypesFile(): string { + return `/** + * Migration types for edge-worker + * + * Auto-generated by scripts/generate-migrations.ts + * Do not edit manually. + */ + +/** + * Represents a migration file loaded from @pgflow/core + */ +export interface Migration { + /** 14-digit timestamp from filename, e.g., "20250429164909" (PK) */ + timestamp: string; + /** Full filename, e.g., "20250429164909_pgflow_initial.sql" */ + filename: string; + /** Full SQL content of the migration */ + content: string; +} +`; +} + +function generateIndexFile(migrations: MigrationFile[]): string { + const imports = migrations + .map((m) => `import { migration as m_${m.timestamp} } from './_generated/m_${m.timestamp}.ts';`) + .join('\n'); + + const arrayItems = migrations.map((m) => ` m_${m.timestamp},`).join('\n'); + + return `/** + * Auto-generated migration exports for edge-worker + * + * This file is regenerated by scripts/generate-migrations.ts + * Do not edit manually. + */ + +import type { Migration } from './types.ts'; +${imports} + +export const migrations: Migration[] = [ +${arrayItems} +]; + +export function getMigrations(): Migration[] { + return migrations; +} + +export type { Migration }; +`; +} + +function main(): void { + console.log('Generating migration modules for edge-worker...'); + + // Ensure output directories exist + fs.mkdirSync(GENERATED_DIR, { recursive: true }); + + // Clean existing generated migration files + if (fs.existsSync(GENERATED_DIR)) { + const existingFiles = fs.readdirSync(GENERATED_DIR); + for (const file of existingFiles) { + fs.unlinkSync(path.join(GENERATED_DIR, file)); + } + } + + // Get and process migrations + const migrations = getMigrationFiles(); + console.log(`Found ${migrations.length} migrations`); + + // Generate types file + const typesContent = generateTypesFile(); + fs.writeFileSync(TYPES_FILE, typesContent); + console.log(`Generated ${TYPES_FILE}`); + + // Generate individual module files + for (const migration of migrations) { + const moduleContent = generateMigrationModule(migration); + const outputPath = path.join(GENERATED_DIR, `m_${migration.timestamp}.ts`); + fs.writeFileSync(outputPath, moduleContent); + console.log(` Generated ${path.basename(outputPath)}`); + } + + // Generate index file + const indexContent = generateIndexFile(migrations); + fs.writeFileSync(INDEX_FILE, indexContent); + console.log(`Generated ${INDEX_FILE}`); + + console.log('Done!'); +} + +main(); diff --git a/pkgs/edge-worker/src/control-plane/migrations/MigrationRunner.ts b/pkgs/edge-worker/src/control-plane/migrations/MigrationRunner.ts new file mode 100644 index 000000000..77ca552d1 --- /dev/null +++ b/pkgs/edge-worker/src/control-plane/migrations/MigrationRunner.ts @@ -0,0 +1,131 @@ +/** + * MigrationRunner - Applies pgflow migrations to a database + * + * This class handles: + * - Creating the pgflow_installer schema and tracking table + * - Listing migration status (pending/applied) + * - Applying pending migrations with advisory locking + */ + +import type postgres from 'postgres'; +import { getMigrations } from './loader.ts'; +import type { MigrationWithStatus, ApplyResult, MigrationApplyResult } from './types.ts'; + +// Advisory lock key for migration serialization +// Using a fixed key to ensure only one migration runs at a time +const MIGRATION_LOCK_KEY = 0x706766_6c6f77; // 'pgflow' in hex + +export class MigrationRunner { + constructor(private sql: postgres.Sql) {} + + /** + * Lists all migrations with their current status (pending/applied) + */ + async list(): Promise { + const migrations = await getMigrations(); + await this.ensureInstallerSchema(); + const appliedSet = await this.getAppliedTimestamps(); + + return migrations.map((m) => ({ + timestamp: m.timestamp, + filename: m.filename, + status: appliedSet.has(m.timestamp) ? 'applied' : 'pending', + })); + } + + /** + * Applies all pending migrations + * Each migration runs in its own transaction with advisory locking + */ + async up(): Promise { + const migrations = await getMigrations(); + await this.ensureInstallerSchema(); + const appliedSet = await this.getAppliedTimestamps(); + + const results: MigrationApplyResult[] = []; + let applied = 0; + let skipped = 0; + + for (const migration of migrations) { + if (appliedSet.has(migration.timestamp)) { + results.push({ timestamp: migration.timestamp, status: 'skipped' }); + skipped++; + continue; + } + + try { + // Run each migration in its own transaction with advisory lock + await this.sql.begin(async (tx) => { + // Acquire advisory lock (released at transaction end) + await tx`SELECT pg_advisory_xact_lock(${MIGRATION_LOCK_KEY})`; + + // Double-check migration hasn't been applied (race condition protection) + const existing = await tx` + SELECT 1 FROM pgflow_installer.migrations + WHERE timestamp = ${migration.timestamp} + `; + + if (existing.length > 0) { + // Another process applied it while we were waiting for lock + return; + } + + // Execute migration SQL + // SECURITY: migration.content comes from @pgflow/core package bundled at build time. + // This is trusted code from our own package. Do NOT allow user-provided migration content. + await tx.unsafe(migration.content); + + // Record that migration was applied + await tx` + INSERT INTO pgflow_installer.migrations (timestamp) + VALUES (${migration.timestamp}) + `; + }); + + results.push({ timestamp: migration.timestamp, status: 'applied' }); + applied++; + } catch (error) { + // Migration failed - return partial results with error + return { + success: false, + applied, + skipped, + total: migrations.length, + results, + error: `Failed at ${migration.timestamp}: ${error instanceof Error ? error.message : 'Unknown error'}`, + }; + } + } + + return { + success: true, + applied, + skipped, + total: migrations.length, + results, + }; + } + + /** + * Ensures the installer schema and tracking table exist + */ + private async ensureInstallerSchema(): Promise { + await this.sql`CREATE SCHEMA IF NOT EXISTS pgflow_installer`; + await this.sql` + CREATE TABLE IF NOT EXISTS pgflow_installer.migrations ( + timestamp TEXT PRIMARY KEY, + applied_at TIMESTAMPTZ NOT NULL DEFAULT now() + ) + `; + } + + /** + * Gets the set of already-applied migration timestamps + */ + private async getAppliedTimestamps(): Promise> { + const rows = await this.sql<{ timestamp: string }[]>` + SELECT timestamp FROM pgflow_installer.migrations + `; + return new Set(rows.map((r) => r.timestamp)); + } +} diff --git a/pkgs/edge-worker/src/control-plane/migrations/index.ts b/pkgs/edge-worker/src/control-plane/migrations/index.ts new file mode 100644 index 000000000..d712e55eb --- /dev/null +++ b/pkgs/edge-worker/src/control-plane/migrations/index.ts @@ -0,0 +1,9 @@ +export { MigrationRunner } from './MigrationRunner.ts'; +export { getMigrations } from './loader.ts'; +export type { + Migration, + MigrationStatus, + MigrationWithStatus, + MigrationApplyResult, + ApplyResult, +} from './types.ts'; diff --git a/pkgs/edge-worker/src/control-plane/migrations/loader.ts b/pkgs/edge-worker/src/control-plane/migrations/loader.ts new file mode 100644 index 000000000..2b1455539 --- /dev/null +++ b/pkgs/edge-worker/src/control-plane/migrations/loader.ts @@ -0,0 +1,8 @@ +/** + * Migration Loader + * + * Re-exports migrations bundled directly in edge-worker. + * Static imports work across all environments (local dev, JSR, npm, Supabase Edge). + */ + +export { getMigrations, type Migration } from '../../_generated/migrations/index.ts'; diff --git a/pkgs/edge-worker/src/control-plane/migrations/types.ts b/pkgs/edge-worker/src/control-plane/migrations/types.ts new file mode 100644 index 000000000..8bf743316 --- /dev/null +++ b/pkgs/edge-worker/src/control-plane/migrations/types.ts @@ -0,0 +1,36 @@ +// Re-export Migration from local generated migrations +export type { Migration } from '../../_generated/migrations/types.ts'; + +/** + * Status of a migration in the installer tracking table + */ +export type MigrationStatus = 'pending' | 'applied'; + +/** + * Migration with its current status + */ +export interface MigrationWithStatus { + timestamp: string; + filename: string; + status: MigrationStatus; +} + +/** + * Result of applying a single migration + */ +export interface MigrationApplyResult { + timestamp: string; + status: 'applied' | 'skipped'; +} + +/** + * Result of the up() operation + */ +export interface ApplyResult { + success: boolean; + applied: number; + skipped: number; + total: number; + results: MigrationApplyResult[]; + error?: string; +} diff --git a/pkgs/edge-worker/src/control-plane/server.ts b/pkgs/edge-worker/src/control-plane/server.ts index 443265f55..8a6101349 100644 --- a/pkgs/edge-worker/src/control-plane/server.ts +++ b/pkgs/edge-worker/src/control-plane/server.ts @@ -1,5 +1,12 @@ import type { AnyFlow } from '@pgflow/dsl'; import { compileFlow } from '@pgflow/dsl'; +import postgres from 'postgres'; +import { MigrationRunner } from './migrations/MigrationRunner.ts'; +import { + validateServiceRoleAuth, + createUnauthorizedResponse, +} from '../shared/authValidation.ts'; +import { getSanitizedErrorMessage } from '../shared/errorSanitization.ts'; /** * Response type for the /flows/:slug endpoint @@ -75,6 +82,21 @@ export function createControlPlaneHandler(flowsInput: FlowInput) { return handleGetFlow(registry, slug); } + // Handle GET /migrations/list + if (pathname === '/migrations/list' && req.method === 'GET') { + return handleMigrationsList(req); + } + + // Handle POST /migrations/up + if (pathname === '/migrations/up' && req.method === 'POST') { + return handleMigrationsUp(req); + } + + // Handle POST /secrets/configure + if (pathname === '/secrets/configure' && req.method === 'POST') { + return handleSecretsConfig(req); + } + // 404 for unknown routes return jsonResponse( { @@ -146,3 +168,176 @@ function jsonResponse(data: unknown, status: number): Response { }, }); } + +/** + * Gets environment variables, preferring Deno.env + */ +function getEnv(): Record { + return { + SUPABASE_ANON_KEY: Deno.env.get('SUPABASE_ANON_KEY'), + SUPABASE_DB_URL: Deno.env.get('SUPABASE_DB_URL'), + SUPABASE_SERVICE_ROLE_KEY: Deno.env.get('SUPABASE_SERVICE_ROLE_KEY'), + SUPABASE_URL: Deno.env.get('SUPABASE_URL'), + }; +} + +/** + * Creates a postgres connection for migrations + * Uses SUPABASE_DB_URL for direct connection (not pooler - better for DDL) + */ +function createInstallerSql(): postgres.Sql { + const dbUrl = Deno.env.get('SUPABASE_DB_URL'); + if (!dbUrl) { + throw new Error('SUPABASE_DB_URL environment variable is required'); + } + return postgres(dbUrl, { prepare: false }); +} + +/** + * Handles GET /migrations/list + * Returns list of all migrations with their status (pending/applied) + */ +async function handleMigrationsList(req: Request): Promise { + const env = getEnv(); + const authResult = validateServiceRoleAuth(req, env); + if (!authResult.valid) { + return createUnauthorizedResponse(); + } + + let sql: postgres.Sql | null = null; + try { + sql = createInstallerSql(); + const runner = new MigrationRunner(sql); + const migrations = await runner.list(); + + return jsonResponse({ migrations }, 200); + } catch (error) { + const sanitizedMessage = getSanitizedErrorMessage(error, env); + console.error('Error listing migrations:', sanitizedMessage); + return jsonResponse( + { + error: 'Migration Error', + message: sanitizedMessage, + }, + 500 + ); + } finally { + if (sql) await sql.end(); + } +} + +/** + * Handles POST /migrations/up + * Applies all pending migrations + */ +async function handleMigrationsUp(req: Request): Promise { + const env = getEnv(); + const authResult = validateServiceRoleAuth(req, env); + if (!authResult.valid) { + return createUnauthorizedResponse(); + } + + let sql: postgres.Sql | null = null; + try { + sql = createInstallerSql(); + const runner = new MigrationRunner(sql); + const result = await runner.up(); + + // Sanitize any error message in the result (from MigrationRunner) + if (result.error) { + result.error = getSanitizedErrorMessage({ message: result.error }, env); + } + + return jsonResponse(result, result.success ? 200 : 500); + } catch (error) { + const sanitizedMessage = getSanitizedErrorMessage(error, env); + console.error('Error applying migrations:', sanitizedMessage); + return jsonResponse( + { + error: 'Migration Error', + message: sanitizedMessage, + }, + 500 + ); + } finally { + if (sql) await sql.end(); + } +} + +/** + * Handles POST /secrets/configure + * Configures vault secrets needed for pgflow worker management + */ +async function handleSecretsConfig(req: Request): Promise { + const env = getEnv(); + const authResult = validateServiceRoleAuth(req, env); + if (!authResult.valid) { + return createUnauthorizedResponse(); + } + + let sql: postgres.Sql | null = null; + try { + sql = createInstallerSql(); + + // Extract project ID from SUPABASE_URL (e.g., https://abc123.supabase.co -> abc123) + const supabaseUrl = env.SUPABASE_URL; + const serviceRoleKey = env.SUPABASE_SERVICE_ROLE_KEY; + + if (!supabaseUrl || !serviceRoleKey) { + return jsonResponse( + { + error: 'Configuration Error', + message: 'Missing SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY', + }, + 500 + ); + } + + const projectId = extractProjectId(supabaseUrl); + if (!projectId) { + return jsonResponse( + { + error: 'Configuration Error', + message: 'Could not extract project ID from SUPABASE_URL', + }, + 500 + ); + } + + // Upsert secrets (delete + create pattern) in single transaction + await sql.begin(async (tx) => { + await tx`DELETE FROM vault.secrets WHERE name = 'supabase_project_id'`; + await tx`SELECT vault.create_secret(${projectId}, 'supabase_project_id')`; + + await tx`DELETE FROM vault.secrets WHERE name = 'supabase_service_role_key'`; + await tx`SELECT vault.create_secret(${serviceRoleKey}, 'supabase_service_role_key')`; + }); + + return jsonResponse({ + success: true, + configured: ['supabase_project_id', 'supabase_service_role_key'], + }, 200); + } catch (error) { + const sanitizedMessage = getSanitizedErrorMessage(error, env); + console.error('Error configuring secrets:', sanitizedMessage); + return jsonResponse( + { + error: 'Secret Configuration Error', + message: sanitizedMessage, + }, + 500 + ); + } finally { + if (sql) await sql.end(); + } +} + +/** + * Extracts project ID from Supabase URL + * @example https://abc123.supabase.co -> abc123 + * @example https://abc123.supabase.green -> abc123 + */ +export function extractProjectId(supabaseUrl: string): string | null { + const match = supabaseUrl.match(/https:\/\/([^.]+)\.supabase\./); + return match ? match[1] : null; +} diff --git a/pkgs/edge-worker/src/shared/errorSanitization.ts b/pkgs/edge-worker/src/shared/errorSanitization.ts new file mode 100644 index 000000000..a04d6e59c --- /dev/null +++ b/pkgs/edge-worker/src/shared/errorSanitization.ts @@ -0,0 +1,48 @@ +/** + * Sanitizes error messages by redacting sensitive values. + * + * Defense-in-depth measure to prevent accidental leakage of secrets + * (like SUPABASE_DB_URL or SUPABASE_SERVICE_ROLE_KEY) in error messages + * returned to clients or written to logs. + */ + +/** + * Replaces known secrets in error messages with [REDACTED] + * @param message - The error message to sanitize + * @param env - Environment variables containing potential secrets + * @returns Sanitized message with secrets redacted + */ +export function sanitizeErrorMessage( + message: string, + env: Record +): string { + let sanitized = message; + + const secrets = [ + env.SUPABASE_DB_URL, + env.SUPABASE_SERVICE_ROLE_KEY, + ]; + + for (const secret of secrets) { + // Only redact non-trivial secrets (avoid false positives) + if (secret && secret.length > 8) { + sanitized = sanitized.replaceAll(secret, '[REDACTED]'); + } + } + + return sanitized; +} + +/** + * Extracts and sanitizes an error message from an unknown error value + * @param error - The caught error (unknown type) + * @param env - Environment variables containing potential secrets + * @returns Sanitized error message + */ +export function getSanitizedErrorMessage( + error: unknown, + env: Record +): string { + const rawMessage = error instanceof Error ? error.message : 'Unknown error'; + return sanitizeErrorMessage(rawMessage, env); +} diff --git a/pkgs/edge-worker/supabase/functions/secrets_test/index.ts b/pkgs/edge-worker/supabase/functions/secrets_test/index.ts new file mode 100644 index 000000000..6e97f2202 --- /dev/null +++ b/pkgs/edge-worker/supabase/functions/secrets_test/index.ts @@ -0,0 +1,21 @@ +import { ControlPlane } from '@pgflow/edge-worker'; + +// Production-like values for testing secrets/configure endpoint +// These must match the values used in tests/e2e/secrets-configure.test.ts +export const TEST_PROJECT_ID = 'test-secrets-project'; +export const TEST_SUPABASE_URL = `https://${TEST_PROJECT_ID}.supabase.co`; +export const TEST_ANON_KEY = 'test-production-anon-key-for-secrets'; +export const TEST_SERVICE_ROLE_KEY = 'test-production-service-role-key-for-secrets'; + +// Mock Deno.env.get to return production-style values +// Supabase blocks Deno.env.set(), so we override .get() instead +const originalGet = Deno.env.get.bind(Deno.env); +Deno.env.get = (key: string): string | undefined => { + if (key === 'SUPABASE_URL') return TEST_SUPABASE_URL; + if (key === 'SUPABASE_ANON_KEY') return TEST_ANON_KEY; + if (key === 'SUPABASE_SERVICE_ROLE_KEY') return TEST_SERVICE_ROLE_KEY; + return originalGet(key); +}; + +// Serve control plane with no flows (we only need /secrets/configure) +ControlPlane.serve([]); diff --git a/pkgs/edge-worker/tests/config.ts b/pkgs/edge-worker/tests/config.ts index ef75da72c..71182df94 100644 --- a/pkgs/edge-worker/tests/config.ts +++ b/pkgs/edge-worker/tests/config.ts @@ -42,3 +42,14 @@ export const integrationConfig = { return 'postgresql://postgres:postgres@127.0.0.1:5432/postgres'; }, }; + +/** + * Clean database test configuration + * Uses Docker Compose database WITHOUT pgflow migrations (port 5433) + * For testing migration installer functionality + */ +export const cleanDbConfig = { + get dbUrl() { + return 'postgresql://postgres:postgres@127.0.0.1:5433/postgres'; + }, +}; diff --git a/pkgs/edge-worker/tests/db-clean/compose.yaml b/pkgs/edge-worker/tests/db-clean/compose.yaml new file mode 100644 index 000000000..ca1c71b74 --- /dev/null +++ b/pkgs/edge-worker/tests/db-clean/compose.yaml @@ -0,0 +1,14 @@ +services: + db: + # Same image as regular test DB + image: jumski/atlas-postgres-pgflow:latest + ports: + - '5433:5432' + volumes: + # Mount baseline schema (realtime, pgsodium, etc.) but NOT pgflow migrations + - ../../../core/atlas/supabase-baseline-schema.sql:/docker-entrypoint-initdb.d/950_baseline.sql + environment: + POSTGRES_DB: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_HOST: /var/run/postgresql + POSTGRES_PORT: 5432 diff --git a/pkgs/edge-worker/tests/e2e/secrets-configure.test.ts b/pkgs/edge-worker/tests/e2e/secrets-configure.test.ts new file mode 100644 index 000000000..9356b17a0 --- /dev/null +++ b/pkgs/edge-worker/tests/e2e/secrets-configure.test.ts @@ -0,0 +1,163 @@ +import { assertEquals, assertExists } from '@std/assert'; +import { e2eConfig } from '../config.ts'; +import { log } from './_helpers.ts'; +import postgres from 'postgres'; + +const API_URL = e2eConfig.apiUrl; +// Use secrets_test function which has mocked production-style SUPABASE_URL +const BASE_URL = `${API_URL}/functions/v1/secrets_test`; +const SECRET_NAMES = ['supabase_project_id', 'supabase_service_role_key']; + +// Expected values from secrets_test edge function (must match index.ts) +const EXPECTED_PROJECT_ID = 'test-secrets-project'; +const EXPECTED_SERVICE_ROLE_KEY = 'test-production-service-role-key-for-secrets'; + +// Auth header required for production-mode endpoints +const AUTH_HEADERS = { + Authorization: `Bearer ${EXPECTED_SERVICE_ROLE_KEY}`, +}; + +function createSql() { + return postgres(e2eConfig.dbUrl, { prepare: false }); +} + +async function cleanupSecrets(sql: postgres.Sql) { + await sql`DELETE FROM vault.secrets WHERE name = ANY(${SECRET_NAMES})`; +} + +/** + * Helper to ensure the secrets_test function is responsive + * Makes initial request and retries until server is ready + */ +async function ensureServerReady() { + log('Ensuring secrets_test function is ready...'); + + const maxRetries = e2eConfig.serverReadyMaxRetries; + const retryDelayMs = e2eConfig.serverReadyRetryDelayMs; + + for (let i = 0; i < maxRetries; i++) { + try { + // Try to hit the /secrets/configure endpoint to wake up the function + const response = await fetch(`${BASE_URL}/secrets/configure`, { + method: 'POST', + headers: AUTH_HEADERS, + signal: AbortSignal.timeout(5000), + }); + + // 502/503 means edge function is still initializing - retry + if (response.status === 502 || response.status === 503) { + await response.body?.cancel(); + log( + `Retry ${i + 1}/${maxRetries}: Server returned ${response.status}, waiting...` + ); + await new Promise((resolve) => setTimeout(resolve, retryDelayMs)); + continue; + } + + // Any other response (2xx, 4xx, etc.) means the function is running properly + if (response.status > 0) { + // Consume the response body to avoid resource leaks + await response.body?.cancel(); + log('secrets_test function is ready!'); + return; + } + } catch (error) { + if (i === maxRetries - 1) { + throw new Error(`Server not ready after ${maxRetries} retries: ${error}`); + } + log(`Retry ${i + 1}/${maxRetries}: Server not ready yet, waiting...`); + await new Promise((resolve) => setTimeout(resolve, retryDelayMs)); + } + } + + throw new Error(`Server not ready after ${maxRetries} retries`); +} + +Deno.test('E2E ControlPlane - POST /secrets/configure creates secrets when none exist', async () => { + await ensureServerReady(); + + const sql = createSql(); + try { + // Clean slate - ensure no secrets exist + await cleanupSecrets(sql); + + // Configure + const response = await fetch(`${BASE_URL}/secrets/configure`, { + method: 'POST', + headers: AUTH_HEADERS, + }); + const data = await response.json(); + + assertEquals(response.status, 200); + assertEquals(data.success, true); + assertExists(data.configured); + assertEquals(data.configured.includes('supabase_project_id'), true); + assertEquals(data.configured.includes('supabase_service_role_key'), true); + + // Verify secrets were created with expected values + const [projectId] = await sql` + SELECT decrypted_secret FROM vault.decrypted_secrets + WHERE name = 'supabase_project_id' + `; + assertEquals(projectId.decrypted_secret, EXPECTED_PROJECT_ID); + + const [serviceRoleKey] = await sql` + SELECT decrypted_secret FROM vault.decrypted_secrets + WHERE name = 'supabase_service_role_key' + `; + assertEquals(serviceRoleKey.decrypted_secret, EXPECTED_SERVICE_ROLE_KEY); + + log('Successfully configured secrets with expected values'); + } finally { + await cleanupSecrets(sql); + await sql.end(); + } +}); + +Deno.test('E2E ControlPlane - POST /secrets/configure is idempotent', async () => { + const sql = createSql(); + try { + // Clean slate + await cleanupSecrets(sql); + + // First call - creates secrets + const response1 = await fetch(`${BASE_URL}/secrets/configure`, { + method: 'POST', + headers: AUTH_HEADERS, + }); + await response1.json(); // Consume body to avoid leak + assertEquals(response1.status, 200); + + // Second call - should succeed without error (idempotent) + const response2 = await fetch(`${BASE_URL}/secrets/configure`, { + method: 'POST', + headers: AUTH_HEADERS, + }); + const data2 = await response2.json(); + + assertEquals(response2.status, 200); + assertEquals(data2.success, true); + + // Still only 2 secrets (not duplicated) + const secrets = await sql` + SELECT name FROM vault.decrypted_secrets + WHERE name = ANY(${SECRET_NAMES}) + `; + assertEquals(secrets.length, 2); + + log('Secrets configure is idempotent'); + } finally { + await cleanupSecrets(sql); + await sql.end(); + } +}); + +Deno.test('E2E ControlPlane - GET /secrets/configure returns 404 (wrong method)', async () => { + const response = await fetch(`${BASE_URL}/secrets/configure`); + const data = await response.json(); + + assertEquals(response.status, 404); + assertEquals(data.error, 'Not Found'); + + log('404 error correctly returned for wrong HTTP method'); +}); diff --git a/pkgs/edge-worker/tests/migrations/MigrationRunner.test.ts b/pkgs/edge-worker/tests/migrations/MigrationRunner.test.ts new file mode 100644 index 000000000..5fdb96c6b --- /dev/null +++ b/pkgs/edge-worker/tests/migrations/MigrationRunner.test.ts @@ -0,0 +1,120 @@ +import { assertEquals } from '@std/assert'; +import postgres from 'postgres'; +import { cleanDbConfig } from '../config.ts'; +import { MigrationRunner } from '../../src/control-plane/migrations/MigrationRunner.ts'; +import { getMigrations } from '../../src/control-plane/migrations/loader.ts'; + +/** + * Creates a postgres connection for the clean database. + * Each test gets a fresh connection. + */ +function createCleanDbSql() { + return postgres(cleanDbConfig.dbUrl, { + prepare: false, + onnotice(_: unknown) { + // no-op to silence notices + }, + }); +} + +/** + * Test wrapper that ensures schemas are cleaned up before each test. + * We can't use transactions here because migrations create schemas + * and we need to verify their creation. + */ +function withCleanDb(callback: (sql: postgres.Sql) => Promise) { + const sql = createCleanDbSql(); + + return async () => { + try { + // Clean up any existing schemas before test + await sql`DROP SCHEMA IF EXISTS pgflow_installer CASCADE`; + await sql`DROP SCHEMA IF EXISTS pgflow CASCADE`; + await sql`DROP SCHEMA IF EXISTS pgmq CASCADE`; + + await callback(sql); + } finally { + // Clean up after test + await sql`DROP SCHEMA IF EXISTS pgflow_installer CASCADE`; + await sql`DROP SCHEMA IF EXISTS pgflow CASCADE`; + await sql`DROP SCHEMA IF EXISTS pgmq CASCADE`; + await sql.end(); + } + }; +} + +Deno.test( + 'list() returns all migrations as pending on clean DB', + withCleanDb(async (sql) => { + const migrations = await getMigrations(); + const runner = new MigrationRunner(sql); + const result = await runner.list(); + + assertEquals(result.length, migrations.length); + assertEquals( + result.every((m) => m.status === 'pending'), + true, + 'All migrations should be pending' + ); + }) +); + +Deno.test( + 'up() applies all migrations on clean DB', + withCleanDb(async (sql) => { + const migrations = await getMigrations(); + const runner = new MigrationRunner(sql); + const result = await runner.up(); + + assertEquals(result.success, true, `Migration failed: ${result.error}`); + assertEquals(result.applied, migrations.length); + assertEquals(result.skipped, 0); + }) +); + +Deno.test( + 'up() is idempotent - second call skips all', + withCleanDb(async (sql) => { + const migrations = await getMigrations(); + const runner = new MigrationRunner(sql); + + // First call applies all + await runner.up(); + + // Second call should skip all + const result = await runner.up(); + + assertEquals(result.success, true); + assertEquals(result.applied, 0); + assertEquals(result.skipped, migrations.length); + }) +); + +Deno.test( + 'list() shows applied after up()', + withCleanDb(async (sql) => { + const runner = new MigrationRunner(sql); + + await runner.up(); + const result = await runner.list(); + + assertEquals( + result.every((m) => m.status === 'applied'), + true, + 'All migrations should be applied' + ); + }) +); + +Deno.test( + 'list() returns migrations sorted by timestamp', + withCleanDb(async (sql) => { + const runner = new MigrationRunner(sql); + const result = await runner.list(); + + const timestamps = result.map((m) => m.timestamp); + const sorted = [...timestamps].sort(); + + assertEquals(timestamps, sorted, 'Migrations should be sorted by timestamp'); + }) +); diff --git a/pkgs/edge-worker/tests/unit/control-plane/server.test.ts b/pkgs/edge-worker/tests/unit/control-plane/server.test.ts index dc85f5435..ffd536587 100644 --- a/pkgs/edge-worker/tests/unit/control-plane/server.test.ts +++ b/pkgs/edge-worker/tests/unit/control-plane/server.test.ts @@ -1,6 +1,6 @@ import { assertEquals, assertMatch } from '@std/assert'; import { Flow, compileFlow } from '@pgflow/dsl'; -import { createControlPlaneHandler } from '../../../src/control-plane/server.ts'; +import { createControlPlaneHandler, extractProjectId } from '../../../src/control-plane/server.ts'; // Test flows covering different DSL features const FlowWithSingleStep = new Flow({ slug: 'flow_single_step' }) @@ -229,3 +229,120 @@ Deno.test('ControlPlane Handler - empty array creates handler with no flows', as const data = await response.json(); assertEquals(data.error, 'Flow Not Found'); }); + +// ============================================================ +// Migration endpoints - Auth tests +// ============================================================ + +Deno.test('ControlPlane Handler - GET /migrations/list returns 401 without auth', async () => { + const handler = createControlPlaneHandler([]); + + const request = new Request('http://localhost/pgflow/migrations/list'); + const response = await handler(request); + + assertEquals(response.status, 401); + const data = await response.json(); + assertEquals(data.error, 'Unauthorized'); +}); + +Deno.test('ControlPlane Handler - POST /migrations/up returns 401 without auth', async () => { + const handler = createControlPlaneHandler([]); + + const request = new Request('http://localhost/pgflow/migrations/up', { + method: 'POST', + }); + const response = await handler(request); + + assertEquals(response.status, 401); + const data = await response.json(); + assertEquals(data.error, 'Unauthorized'); +}); + +Deno.test('ControlPlane Handler - GET /migrations/list returns 401 with invalid auth', async () => { + const handler = createControlPlaneHandler([]); + + const request = new Request('http://localhost/pgflow/migrations/list', { + headers: { Authorization: 'Bearer invalid-token' }, + }); + const response = await handler(request); + + assertEquals(response.status, 401); + const data = await response.json(); + assertEquals(data.error, 'Unauthorized'); +}); + +Deno.test('ControlPlane Handler - POST /migrations/up returns 401 with invalid auth', async () => { + const handler = createControlPlaneHandler([]); + + const request = new Request('http://localhost/pgflow/migrations/up', { + method: 'POST', + headers: { Authorization: 'Bearer invalid-token' }, + }); + const response = await handler(request); + + assertEquals(response.status, 401); + const data = await response.json(); + assertEquals(data.error, 'Unauthorized'); +}); + +// ============================================================ +// Secrets endpoint - Auth tests +// ============================================================ + +Deno.test('ControlPlane Handler - POST /secrets/configure returns 401 without auth', async () => { + const handler = createControlPlaneHandler([]); + + const request = new Request('http://localhost/pgflow/secrets/configure', { + method: 'POST', + }); + const response = await handler(request); + + assertEquals(response.status, 401); + const data = await response.json(); + assertEquals(data.error, 'Unauthorized'); +}); + +Deno.test('ControlPlane Handler - POST /secrets/configure returns 401 with invalid auth', async () => { + const handler = createControlPlaneHandler([]); + + const request = new Request('http://localhost/pgflow/secrets/configure', { + method: 'POST', + headers: { Authorization: 'Bearer invalid-token' }, + }); + const response = await handler(request); + + assertEquals(response.status, 401); + const data = await response.json(); + assertEquals(data.error, 'Unauthorized'); +}); + +// ============================================================ +// extractProjectId - URL parsing tests +// ============================================================ + +Deno.test('extractProjectId - extracts project ID from .supabase.co URL', () => { + assertEquals(extractProjectId('https://abc123.supabase.co'), 'abc123'); +}); + +Deno.test('extractProjectId - extracts project ID from .supabase.green URL', () => { + assertEquals(extractProjectId('https://xyz789.supabase.green'), 'xyz789'); +}); + +Deno.test('extractProjectId - handles complex project IDs', () => { + assertEquals( + extractProjectId('https://my-project-123.supabase.co'), + 'my-project-123' + ); +}); + +Deno.test('extractProjectId - returns null for invalid URL', () => { + assertEquals(extractProjectId('https://example.com'), null); +}); + +Deno.test('extractProjectId - returns null for empty string', () => { + assertEquals(extractProjectId(''), null); +}); + +Deno.test('extractProjectId - returns null for non-https URL', () => { + assertEquals(extractProjectId('http://abc123.supabase.co'), null); +}); diff --git a/pkgs/website/astro.config.mjs b/pkgs/website/astro.config.mjs index 8f85d3bf7..225263ed6 100644 --- a/pkgs/website/astro.config.mjs +++ b/pkgs/website/astro.config.mjs @@ -459,6 +459,12 @@ export default defineConfig({ directory: 'tutorials/ai-web-scraper/', }, }, + { + label: 'Lovable', + autogenerate: { + directory: 'tutorials/lovable/', + }, + }, ], }, { diff --git a/pkgs/website/src/content/docs/tutorials/index.mdx b/pkgs/website/src/content/docs/tutorials/index.mdx index a2eb5b527..83a53e57b 100644 --- a/pkgs/website/src/content/docs/tutorials/index.mdx +++ b/pkgs/website/src/content/docs/tutorials/index.mdx @@ -21,4 +21,9 @@ Learn pgflow through hands-on examples. These tutorials guide you through buildi description="Create a workflow that scrapes webpages, analyzes content with OpenAI, and stores results in Postgres" href="/tutorials/ai-web-scraper/" /> + diff --git a/pkgs/website/src/content/docs/tutorials/lovable/01-install-pgflow.mdx b/pkgs/website/src/content/docs/tutorials/lovable/01-install-pgflow.mdx new file mode 100644 index 000000000..724ca91b3 --- /dev/null +++ b/pkgs/website/src/content/docs/tutorials/lovable/01-install-pgflow.mdx @@ -0,0 +1,73 @@ +--- +title: "Step 1: Install Control Plane" +description: Create the pgflow Edge Function that manages workflow compilation and execution +sidebar: + order: 10 +--- + +import { Aside, Steps } from '@astrojs/starlight/components'; +import { FileTree } from '@astrojs/starlight/components'; + +The first step is to install the pgflow Control Plane - an Edge Function that manages your workflow definitions and compilation. + +## What gets created + +After pasting the prompt below, Lovable will create: + + +- supabase/ + - flows/ + - index.ts (empty - you'll add flows here later) + - functions/ + - pgflow/ + - index.ts (the Control Plane function) + + +## Installation prompt + +Copy and paste this entire prompt into Lovable: + +````text +I need to install pgflow - a workflow engine for Supabase Edge Functions. + +Please create the following files: + +1. Create folder `supabase/flows/` and file `supabase/flows/index.ts` with this content: +```ts +// Flow definitions will be added here +// Example: export { default as MyFlow } from './my-flow.ts'; +``` + +2. Create Edge Function `supabase/functions/pgflow/index.ts` with this exact content: +```ts +import { ControlPlane } from 'jsr:@pgflow/edge-worker'; +import * as flows from '../../flows/index.ts'; + +ControlPlane.serve(flows); +``` + +After creating these files, confirm that both were created successfully. +```` + +## What to expect + + +1. **Paste the prompt** into Lovable's chat +2. **Wait for confirmation** that the files were created +3. **Verify the structure** - Lovable should confirm both the `flows` folder and `pgflow` function exist + + + + +## Next step + +Once Lovable confirms the files are created, proceed to run the database migrations: + +[Step 2: Run Migrations →](/tutorials/lovable/02-install-migrations/) diff --git a/pkgs/website/src/content/docs/tutorials/lovable/02-install-migrations.mdx b/pkgs/website/src/content/docs/tutorials/lovable/02-install-migrations.mdx new file mode 100644 index 000000000..9e53a0124 --- /dev/null +++ b/pkgs/website/src/content/docs/tutorials/lovable/02-install-migrations.mdx @@ -0,0 +1,228 @@ +--- +title: "Step 2: Run Migrations" +description: Apply pgflow database migrations and configure vault secrets using a temporary installer +sidebar: + order: 20 +--- + +import { Aside, Steps } from '@astrojs/starlight/components'; + +This step creates a temporary Edge Function that securely runs pgflow migrations and configures vault secrets. The installer uses your project's service role key internally - it's never exposed to your frontend. + + + +## How it works + + +1. You paste a prompt that creates a temporary installer Edge Function +2. Lovable shows you a link to invoke the function +3. You click the link - the installer runs migrations and configures secrets +4. You copy the JSON result and paste it back to Lovable +5. Lovable confirms success and deletes the temporary installer + + +## Installation prompt + +Copy and paste this entire prompt into Lovable: + +````text +I need to run pgflow database migrations. This requires a temporary installer Edge Function. + +**STEP 1: Create the installer** + +Create Edge Function `supabase/functions/pgflow-installer/index.ts` with this exact content: + +```ts +const SUPABASE_URL = Deno.env.get('SUPABASE_URL')!; +const SERVICE_ROLE_KEY = Deno.env.get('SUPABASE_SERVICE_ROLE_KEY')!; + +interface StepResult { + success: boolean; + status: number; + data?: unknown; + error?: string; +} + +interface InstallerResult { + success: boolean; + secrets: StepResult; + migrations: StepResult; + message: string; +} + +async function callControlPlane( + endpoint: string, + method: 'GET' | 'POST' +): Promise { + try { + const response = await fetch(`${SUPABASE_URL}/functions/v1/pgflow${endpoint}`, { + method, + headers: { + 'Authorization': `Bearer ${SERVICE_ROLE_KEY}`, + 'Content-Type': 'application/json', + }, + }); + + const data = await response.json(); + return { + success: response.ok, + status: response.status, + data, + }; + } catch (error) { + return { + success: false, + status: 0, + error: error instanceof Error ? error.message : 'Unknown error', + }; + } +} + +Deno.serve(async (_req: Request) => { + console.log('pgflow installer starting...'); + + // Step 1: Configure vault secrets + console.log('Configuring vault secrets...'); + const secrets = await callControlPlane('/secrets/configure', 'POST'); + + if (!secrets.success) { + const result: InstallerResult = { + success: false, + secrets, + migrations: { success: false, status: 0, error: 'Skipped - secrets failed' }, + message: 'Failed to configure vault secrets. Check the pgflow Control Plane is deployed.', + }; + return new Response(JSON.stringify(result, null, 2), { + status: 500, + headers: { 'Content-Type': 'application/json' }, + }); + } + + // Step 2: Run migrations + console.log('Running migrations...'); + const migrations = await callControlPlane('/migrations/up', 'POST'); + + const result: InstallerResult = { + success: secrets.success && migrations.success, + secrets, + migrations, + message: migrations.success + ? 'pgflow installed successfully! Vault secrets configured and migrations applied.' + : 'Secrets configured but migrations failed. Check the error details.', + }; + + console.log('Installer complete:', result.message); + + return new Response(JSON.stringify(result, null, 2), { + status: result.success ? 200 : 500, + headers: { 'Content-Type': 'application/json' }, + }); +}); +``` + +**STEP 2: Configure to skip JWT verification** + +Add to `supabase/config.toml`: + +```toml +[functions.pgflow-installer] +verify_jwt = false +``` + +**STEP 3: After the function is deployed, show me a link to invoke it** + +The link format is: `https://[PROJECT_REF].supabase.co/functions/v1/pgflow-installer` + +Present this as a clickable link so I can invoke the installer. + +**STEP 4: Wait for me to paste the result** + +After I click the link, I will copy the JSON response and paste it here. + +The response will look like this if successful: +```json +{ + "success": true, + "secrets": { "success": true, "status": 200, "data": {...} }, + "migrations": { "success": true, "status": 200, "data": {...} }, + "message": "pgflow installed successfully! Vault secrets configured and migrations applied." +} +``` + +Or like this if there's an error: +```json +{ + "success": false, + "secrets": { "success": false, "status": 500, "error": "..." }, + "migrations": {...}, + "message": "Failed to configure vault secrets..." +} +``` + +**STEP 5: Interpret the result** + +When I paste the JSON result: +- If `"success": true` - confirm that pgflow is installed and ready to use +- If `"success": false` - explain what went wrong based on the error messages + +**STEP 6: Clean up** + +After confirming success (or discussing the error), delete the temporary installer: +- Remove `supabase/functions/pgflow-installer/` folder +- Remove the `[functions.pgflow-installer]` section from `config.toml` + +This keeps the project clean and removes the unauthenticated endpoint. +```` + +## What to expect + +After pasting the prompt: + + +1. **Lovable creates the installer** - You'll see the function code being added +2. **Lovable shows you a link** - Click it to run the installer +3. **Copy the JSON result** - Select all the JSON output from the browser +4. **Paste it back to Lovable** - Lovable interprets whether it succeeded +5. **Lovable cleans up** - The temporary function is deleted + + + + +## Troubleshooting + +### "Failed to configure vault secrets" + +This usually means the pgflow Control Plane function isn't deployed yet. Make sure you completed [Step 1: Install Control Plane](/tutorials/lovable/01-install-pgflow/) first. + +### "Migrations failed" + +Check the `migrations.data` field in the response for specific error messages. Common issues: +- Missing database permissions +- Conflicting table names +- Network timeout (try again) + +### The link doesn't work + +Ensure the function was deployed to Supabase. You may need to ask Lovable to deploy the Edge Functions to your connected Supabase project. + +## Next steps + +Congratulations! pgflow is now installed in your Lovable project. You can now: + +- **Create workflows** - Ask Lovable to create flow definitions in `supabase/flows/` +- **Define tasks** - Build task functions that call AI APIs, process data, etc. +- **Run flows** - Start workflow executions from your app + +Check out the [AI Web Scraper tutorial](/tutorials/ai-web-scraper/) for an example of building a complete workflow. diff --git a/pkgs/website/src/content/docs/tutorials/lovable/index.mdx b/pkgs/website/src/content/docs/tutorials/lovable/index.mdx new file mode 100644 index 000000000..cca3ff5bd --- /dev/null +++ b/pkgs/website/src/content/docs/tutorials/lovable/index.mdx @@ -0,0 +1,61 @@ +--- +title: Using pgflow with Lovable +description: Build multi-step AI workflows in your Lovable apps using pgflow - a Postgres-native workflow engine +sidebar: + order: 0 +--- + +import { Steps, LinkCard, CardGrid, Aside } from '@astrojs/starlight/components'; + +**Build reliable, multi-step AI workflows in your Lovable apps.** + +pgflow lets you create complex background processes - like AI pipelines, data processing, and multi-step operations - directly in your Supabase backend. When combined with Lovable's AI-powered development, you can build sophisticated apps with robust backend workflows. + +## What pgflow enables + +With pgflow integrated into your Lovable app, you can: + +- **Chain multiple AI operations** - Scrape content, then summarize, then extract tags - all as a single reliable workflow +- **Run steps in parallel** - Process independent operations simultaneously for faster results +- **Handle failures gracefully** - Automatic retries with exponential backoff when APIs fail +- **Track progress** - Full observability into workflow state in your Postgres database +- **Scale horizontally** - Edge Workers auto-scale with your Supabase project + +## Installation overview + +Installing pgflow in a Lovable project requires a few prompts that you paste into Lovable's chat. The process is designed to be simple and safe. + + +1. **Install the Control Plane** - Create the pgflow Edge Function that manages flows +2. **Run Migrations** - Apply database migrations and configure secrets via a temporary installer + + + + +## Get started + + + + + + +## After installation + +Once pgflow is installed, you can ask Lovable to: + +- Create new workflow definitions in `supabase/flows/` +- Define task functions that call AI APIs +- Compile flows and apply new migrations +- Start workflow runs from your app's frontend + +Each workflow you create becomes a reliable, observable background process that Lovable can help you build and iterate on. diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ccd320b6a..7061a01f6 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -102,6 +102,9 @@ importers: tslib: specifier: ^2.3.0 version: 2.8.1 + tsx: + specifier: ^4.20.6 + version: 4.20.6 typescript: specifier: 5.8.3 version: 5.8.3