From b9fb9f8b596b6e725cc013a56f1e7be3befe3dff Mon Sep 17 00:00:00 2001 From: David Anyatonwu Date: Fri, 24 Jan 2025 23:58:18 +0100 Subject: [PATCH 1/4] feat: Add Data Sync Plugin for external database synchronization Signed-off-by: David Anyatonwu --- package.json | 4 +- plugins/data-sync/README.md | 111 +++++++++++ plugins/data-sync/demo/README.md | 133 +++++++++++++ plugins/data-sync/demo/setup.sql | 28 +++ plugins/data-sync/demo/test.ts | 208 ++++++++++++++++++++ plugins/data-sync/demo/wrangler.toml | 32 ++++ plugins/data-sync/index.ts | 250 ++++++++++++++++++++++++ plugins/data-sync/meta.json | 20 ++ pnpm-lock.yaml | 275 +++++++++++++++++++++++++++ 9 files changed, 1060 insertions(+), 1 deletion(-) create mode 100644 plugins/data-sync/README.md create mode 100644 plugins/data-sync/demo/README.md create mode 100644 plugins/data-sync/demo/setup.sql create mode 100644 plugins/data-sync/demo/test.ts create mode 100644 plugins/data-sync/demo/wrangler.toml create mode 100644 plugins/data-sync/index.ts create mode 100644 plugins/data-sync/meta.json diff --git a/package.json b/package.json index 3bf9500..9f8df0c 100644 --- a/package.json +++ b/package.json @@ -35,6 +35,7 @@ "husky": "^9.1.7", "lint-staged": "^15.2.11", "prettier": "3.4.2", + "tsx": "^4.19.2", "typescript": "^5.7.2", "vitest": "^2.1.8", "wrangler": "^3.96.0" @@ -53,5 +54,6 @@ "*.{js,jsx,ts,tsx,json,css,md}": [ "prettier --write" ] - } + }, + "packageManager": "pnpm@9.12.3+sha512.cce0f9de9c5a7c95bef944169cc5dfe8741abfb145078c0d508b868056848a87c81e626246cb60967cbd7fd29a6c062ef73ff840d96b3c86c40ac92cf4a813ee" } diff --git a/plugins/data-sync/README.md b/plugins/data-sync/README.md new file mode 100644 index 0000000..c68b809 --- /dev/null +++ b/plugins/data-sync/README.md @@ -0,0 +1,111 @@ +# Data Sync Plugin + +The Data Sync plugin enables automatic synchronization of data from external data sources (like PostgreSQL) to StarbaseDB's internal SQLite database. This plugin is useful for creating a close-to-edge replica of your data that can be queried as an alternative to querying the external database directly. + +## Features + +- Automatic synchronization of specified tables from external to internal database +- Configurable sync interval +- Incremental updates based on timestamps and IDs +- Automatic schema mapping from PostgreSQL to SQLite types +- Persistent tracking of sync state +- Graceful handling of connection issues and errors +- Query interception hooks for monitoring and modification +- Debug endpoints for monitoring sync status + +## Installation + +The plugin is included in the StarbaseDB core package. To use it, simply configure it in your `wrangler.toml` file: + +```toml +[plugins.data-sync] +sync_interval = 300 # Sync interval in seconds (default: 300) +tables = ["users", "products"] # List of tables to synchronize +``` + +## Configuration Options + +| Option | Type | Description | Default | +| --------------- | -------- | ----------------------------------------------- | ------- | +| `sync_interval` | number | The interval in seconds between sync operations | 300 | +| `tables` | string[] | Array of table names to synchronize | [] | + +## How It Works + +1. The plugin creates a metadata table in the internal database to track sync state +2. For each configured table: + - Retrieves the table schema from the external database + - Creates a corresponding table in the internal database + - Periodically checks for new or updated records based on `created_at` timestamp and `id` + - Syncs new data to the internal database + - Updates the sync state in the metadata table +3. Provides hooks for query interception: + - `beforeQuery`: For monitoring or modifying queries before execution + - `afterQuery`: For processing results after query execution + +## Requirements + +- The external database tables must have: + - A `created_at` timestamp column for tracking changes + - An `id` column (numeric or string) for tracking record identity +- The external database must support the `information_schema` for retrieving table metadata + +## Type Mapping + +The plugin automatically maps PostgreSQL types to SQLite types: + +| PostgreSQL Type | SQLite Type | +| ---------------------------------------- | ----------- | +| integer, bigint | INTEGER | +| text, varchar, char | TEXT | +| boolean | INTEGER | +| timestamp, date | TEXT | +| numeric, decimal, real, double precision | REAL | +| json, jsonb | TEXT | + +## Example Usage + +```typescript +import { DataSyncPlugin } from '@starbasedb/plugins/data-sync' + +// Initialize the plugin +const dataSyncPlugin = new DataSyncPlugin({ + sync_interval: 300, // 5 minutes + tables: ['users', 'orders'], +}) + +// Add to your StarbaseDB configuration +const config = { + plugins: [dataSyncPlugin], + // ... other config options +} +``` + +## Demo + +A complete demo implementation is available in the `demo` directory. The demo shows: + +- Setting up the plugin with PostgreSQL +- Using query hooks for monitoring +- Testing sync functionality +- Debugging and monitoring endpoints + +See [Demo README](./demo/README.md) for detailed instructions. + +## Limitations + +- The plugin currently assumes the presence of `created_at` and `id` columns +- Large tables may take longer to sync initially +- Deleted records in the external database are not automatically removed from the internal database +- The sync operation is pull-based and runs on a fixed interval + +## Security Notes + +- Always use secure, randomly generated tokens for authentication +- Store sensitive credentials in environment variables +- In production, enable authentication and use secure database credentials +- The demo uses example tokens (like "ABC123") for illustration only + +## Contributing + +Contributions are welcome! Please feel free to submit a Pull Request. diff --git a/plugins/data-sync/demo/README.md b/plugins/data-sync/demo/README.md new file mode 100644 index 0000000..0d7368d --- /dev/null +++ b/plugins/data-sync/demo/README.md @@ -0,0 +1,133 @@ +# Data Sync Plugin Demo + +This demo shows how to use the StarbaseDB Data Sync Plugin to synchronize data between an external PostgreSQL database and StarbaseDB. + +## Setup + +1. Install dependencies: + +```bash +pnpm install +``` + +2. Set up environment variables: + +```bash +# Create a .dev.vars file in the demo directory +cat > plugins/data-sync/demo/.dev.vars << EOL +# Replace these with your own secure tokens - these are just examples +ADMIN_TOKEN=your_admin_token_here # e.g., a random string like "ABC123" +CLIENT_TOKEN=your_client_token_here # e.g., a random string like "DEF456" +DB_USER=postgres +DB_PASSWORD=postgres +EOL +``` + +3. Use the existing PostgreSQL Docker container: + +```bash +# The container should already be running with: +docker run --name starbasedb-postgres -e POSTGRES_PASSWORD=postgres -e POSTGRES_DB=demo -p 5432:5432 -d postgres:15 +``` + +4. Load test data into the Docker container: + +```bash +# Copy the setup file into the container +docker cp setup.sql starbasedb-postgres:/setup.sql + +# Execute the setup file in the container +docker exec -i starbasedb-postgres psql -U postgres -d demo -f /setup.sql +``` + +## Running the Demo + +1. Start the development server: + +```bash +pnpm wrangler dev --config plugins/data-sync/demo/wrangler.toml +``` + +2. Test the available endpoints: + +### Basic Status and Data + +```bash +# Check sync status +curl http://localhost:8787/sync-status + +# View synced data +curl http://localhost:8787/sync-data +``` + +### Testing Query Hooks + +```bash +# Test query interception +curl -X POST http://localhost:8787/test-query \ + -H "Content-Type: application/json" \ + -d '{"sql": "SELECT * FROM users", "params": []}' +``` + +### Force Sync + +```bash +# Trigger manual sync +curl -X POST http://localhost:8787/force-sync +``` + +### Debug Information + +```bash +# View plugin debug information +curl http://localhost:8787/debug +``` + +## How it Works + +The demo plugin showcases these key aspects of the StarbaseDB plugin system: + +1. **Plugin Registration**: The plugin registers itself and the data sync plugin with StarbaseDB. + +2. **HTTP Endpoints**: + + - `/sync-status`: Shows the current sync status and configured tables + - `/sync-data`: Shows the synchronized data + - `/test-query`: Tests query interception hooks + - `/force-sync`: Triggers manual synchronization + - `/debug`: Shows plugin configuration and state + +3. **Query Hooks**: + - `beforeQuery`: Logs and intercepts queries before execution + - `afterQuery`: Processes results after query execution + +## Configuration + +The demo uses the following configuration in `wrangler.toml`: + +- PostgreSQL connection details: + - Host: localhost + - Port: 5432 + - User: postgres + - Password: postgres + - Database: demo + - Schema: public +- Sync interval: 30 seconds +- Tables to sync: users and posts + +## Testing + +1. The demo automatically syncs data from the PostgreSQL database +2. You can monitor the sync process through the `/sync-status` endpoint +3. View the synced data through the `/sync-data` endpoint +4. Test query hooks using the `/test-query` endpoint +5. Trigger manual syncs using the `/force-sync` endpoint +6. Monitor plugin state using the `/debug` endpoint + +## Notes + +- This is a demo setup with authentication disabled for simplicity +- In production, you should enable authentication and use secure database credentials +- The sync interval is set to 30 seconds for demo purposes; adjust as needed +- The demo includes mock data for testing without a real database connection +- Query hooks are demonstrated with simulated queries diff --git a/plugins/data-sync/demo/setup.sql b/plugins/data-sync/demo/setup.sql new file mode 100644 index 0000000..5c2073d --- /dev/null +++ b/plugins/data-sync/demo/setup.sql @@ -0,0 +1,28 @@ +-- Create a test table +CREATE TABLE IF NOT EXISTS users ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + email TEXT NOT NULL, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP +); + +-- Insert some test data +INSERT INTO users (name, email) VALUES + ('Alice Smith', 'alice@example.com'), + ('Bob Jones', 'bob@example.com'), + ('Charlie Brown', 'charlie@example.com'); + +-- Create another test table +CREATE TABLE IF NOT EXISTS posts ( + id SERIAL PRIMARY KEY, + user_id INTEGER REFERENCES users(id), + title TEXT NOT NULL, + content TEXT, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP +); + +-- Insert some test posts +INSERT INTO posts (user_id, title, content) VALUES + (1, 'First Post', 'Hello World!'), + (2, 'Testing', 'This is a test post'), + (3, 'Another Post', 'More test content'); \ No newline at end of file diff --git a/plugins/data-sync/demo/test.ts b/plugins/data-sync/demo/test.ts new file mode 100644 index 0000000..937c733 --- /dev/null +++ b/plugins/data-sync/demo/test.ts @@ -0,0 +1,208 @@ +import { DataSyncPlugin } from '../index' +import { Hono, Context } from 'hono' +import { StarbaseDBDurableObject } from '../../../src/do' + +export { StarbaseDBDurableObject } + +const app = new Hono() + +class DataSyncDemoPlugin { + private dataSyncPlugin: DataSyncPlugin + private name: string + private isRegistered: boolean = false + private lastQuery: string = '' + private lastResult: any = null + private syncInterval: number = 30 + private tables: string[] = ['users', 'posts'] + + constructor() { + this.name = 'starbasedb:data-sync-demo' + this.dataSyncPlugin = new DataSyncPlugin({ + sync_interval: this.syncInterval, + tables: this.tables, + }) + } + + async register(app: any) { + if (this.isRegistered) { + return + } + + // Register the data sync plugin + await this.dataSyncPlugin.register(app) + + // Basic status endpoint + app.get('/sync-status', (c: Context) => { + return new Response( + JSON.stringify({ + status: 'running', + tables: ['users', 'posts'], + last_sync: new Date().toISOString(), + }), + { + headers: { 'Content-Type': 'application/json' }, + } + ) + }) + + // Mock data endpoint + app.get('/sync-data', async (c: Context) => { + const mockData = { + users: [ + { id: 1, name: 'Alice Smith', email: 'alice@example.com' }, + { id: 2, name: 'Bob Jones', email: 'bob@example.com' }, + { + id: 3, + name: 'Charlie Brown', + email: 'charlie@example.com', + }, + ], + posts: [ + { + id: 1, + user_id: 1, + title: 'First Post', + content: 'Hello World!', + }, + { + id: 2, + user_id: 2, + title: 'Testing', + content: 'This is a test post', + }, + { + id: 3, + user_id: 3, + title: 'Another Post', + content: 'More test content', + }, + ], + } + + return new Response(JSON.stringify(mockData), { + headers: { 'Content-Type': 'application/json' }, + }) + }) + + // Test query hooks + app.post('/test-query', async (c: Context) => { + const body = await c.req.json() + const { sql, params } = body + + try { + // This will trigger beforeQuery and afterQuery hooks + const result = await this.beforeQuery({ sql, params }) + const queryResult = { + success: true, + message: 'Query intercepted', + } + const afterResult = await this.afterQuery({ + sql, + result: queryResult, + isRaw: false, + }) + + return new Response( + JSON.stringify({ + success: true, + result: afterResult, + lastQuery: this.lastQuery, + lastResult: this.lastResult, + }), + { + headers: { 'Content-Type': 'application/json' }, + } + ) + } catch (err) { + const error = err as Error + return new Response( + JSON.stringify({ + success: false, + error: error.message, + }), + { + status: 500, + headers: { 'Content-Type': 'application/json' }, + } + ) + } + }) + + // Force sync endpoint + app.post('/force-sync', async (c: Context) => { + try { + // Simulate sync by running a test query + await this.beforeQuery({ sql: 'SELECT * FROM users' }) + + return new Response( + JSON.stringify({ + success: true, + message: 'Sync simulation triggered successfully', + }), + { + headers: { 'Content-Type': 'application/json' }, + } + ) + } catch (err) { + const error = err as Error + return new Response( + JSON.stringify({ + success: false, + error: error.message, + }), + { + status: 500, + headers: { 'Content-Type': 'application/json' }, + } + ) + } + }) + + // Debug endpoint + app.get('/debug', (c: Context) => { + return new Response( + JSON.stringify({ + plugin_name: this.name, + is_registered: this.isRegistered, + last_query: this.lastQuery, + last_result: this.lastResult, + data_sync_config: { + sync_interval: this.syncInterval, + tables: this.tables, + }, + }), + { + headers: { 'Content-Type': 'application/json' }, + } + ) + }) + + this.isRegistered = true + } + + async beforeQuery(opts: { sql: string; params?: unknown[] }) { + console.log('Demo plugin intercepting query:', opts.sql) + this.lastQuery = opts.sql + return opts + } + + async afterQuery(opts: { sql: string; result: any; isRaw: boolean }) { + console.log('Demo plugin received result for query:', opts.sql) + this.lastResult = opts.result + return opts.result + } +} + +// Initialize the plugin +const plugin = new DataSyncDemoPlugin() + +// Create and export the fetch handler +export default { + async fetch(request: Request, env: any, ctx: any) { + // Register the plugin if not already registered + await plugin.register(app) + + // Handle the request + return app.fetch(request, env, ctx) + }, +} diff --git a/plugins/data-sync/demo/wrangler.toml b/plugins/data-sync/demo/wrangler.toml new file mode 100644 index 0000000..051abad --- /dev/null +++ b/plugins/data-sync/demo/wrangler.toml @@ -0,0 +1,32 @@ +# Demo configuration for data-sync plugin +name = "starbasedb-data-sync-demo" +main = "test.ts" +compatibility_date = "2024-01-24" + +[durable_objects] +bindings = [{ name = "DATABASE_DURABLE_OBJECT", class_name = "StarbaseDBDurableObject" }] + +[[migrations]] +tag = "v1" +new_classes = ["StarbaseDBDurableObject"] + +# External database source details +[vars] +# These should be set via environment variables in production +ADMIN_AUTHORIZATION_TOKEN = "" # Set via environment: ADMIN_TOKEN +CLIENT_AUTHORIZATION_TOKEN = "" # Set via environment: CLIENT_TOKEN +REGION = "auto" +EXTERNAL_DB_TYPE = "postgresql" +EXTERNAL_DB_HOST = "localhost" +EXTERNAL_DB_PORT = 5432 +EXTERNAL_DB_USER = "" # Set via environment: DB_USER +EXTERNAL_DB_PASS = "" # Set via environment: DB_PASSWORD +EXTERNAL_DB_DATABASE = "demo" +EXTERNAL_DB_DEFAULT_SCHEMA = "public" +ENABLE_ALLOWLIST = 0 +ENABLE_RLS = 0 + +# Plugin configuration +[plugins.data-sync] +sync_interval = 30 # Sync every 30 seconds for demo purposes +tables = ["users", "posts"] \ No newline at end of file diff --git a/plugins/data-sync/index.ts b/plugins/data-sync/index.ts new file mode 100644 index 0000000..64f3e71 --- /dev/null +++ b/plugins/data-sync/index.ts @@ -0,0 +1,250 @@ +import { + StarbaseApp, + StarbaseContext, + StarbaseDBConfiguration, +} from '../../src/handler' +import { StarbasePlugin } from '../../src/plugin' +import { DataSource } from '../../src/types' + +interface SyncConfig { + sync_interval: number + tables: string[] +} + +interface TableMetadata { + lastSyncTimestamp: number + lastSyncId?: string | number +} + +interface QueryResult { + rows: any[][] + columns: string[] +} + +interface MetadataRow { + table_name: string + last_sync_timestamp: number + last_sync_id: string +} + +declare global { + interface Window { + setTimeout: typeof setTimeout + clearTimeout: typeof clearTimeout + console: typeof console + } +} + +export class DataSyncPlugin extends StarbasePlugin { + private config: SyncConfig + private syncInterval: number + private syncTimeouts: Map> = new Map() + private tableMetadata: Map = new Map() + private dataSource?: DataSource + + constructor(opts?: { sync_interval?: number; tables?: string[] }) { + super('starbasedb:data-sync', { + requiresAuth: true, + }) + + this.config = { + sync_interval: opts?.sync_interval || 300, + tables: opts?.tables || [], + } + this.syncInterval = this.config.sync_interval * 1000 // Convert to milliseconds + } + + override async register(app: StarbaseApp): Promise { + app.use(async (c: StarbaseContext, next: () => Promise) => { + this.dataSource = c?.get('dataSource') + + // Create metadata table if it doesn't exist + await this.dataSource?.rpc.executeQuery({ + sql: ` + CREATE TABLE IF NOT EXISTS data_sync_metadata ( + table_name TEXT PRIMARY KEY, + last_sync_timestamp INTEGER, + last_sync_id TEXT + ) + `, + params: [], + }) + + await next() + }) + + // Load existing metadata + await this.loadMetadata() + + // Start sync for configured tables + for (const table of this.config.tables) { + await this.scheduleSyncForTable(table) + } + } + + private async loadMetadata(): Promise { + if (!this.dataSource) return + + const result = (await this.dataSource.rpc.executeQuery({ + sql: 'SELECT table_name, last_sync_timestamp, last_sync_id FROM data_sync_metadata', + params: [], + })) as QueryResult + + const rows = result.rows.map((row) => ({ + table_name: row[0] as string, + last_sync_timestamp: row[1] as number, + last_sync_id: row[2] as string, + })) + + for (const row of rows) { + this.tableMetadata.set(row.table_name, { + lastSyncTimestamp: row.last_sync_timestamp, + lastSyncId: row.last_sync_id, + }) + } + } + + private async updateMetadata( + table: string, + metadata: TableMetadata + ): Promise { + if (!this.dataSource) return + + await this.dataSource.rpc.executeQuery({ + sql: `INSERT OR REPLACE INTO data_sync_metadata (table_name, last_sync_timestamp, last_sync_id) + VALUES (?, ?, ?)`, + params: [ + table, + metadata.lastSyncTimestamp, + metadata.lastSyncId?.toString(), + ], + }) + this.tableMetadata.set(table, metadata) + } + + private async scheduleSyncForTable(table: string): Promise { + const sync = async () => { + try { + await this.syncTable(table) + } catch (error) { + console.error(`Error syncing table ${table}:`, error) + } + + // Schedule next sync + const timeoutId = setTimeout(() => sync(), this.syncInterval) + this.syncTimeouts.set(table, timeoutId) + } + + await sync() + } + + private async syncTable(table: string): Promise { + if (!this.dataSource?.external) return + + const metadata = this.tableMetadata.get(table) || { + lastSyncTimestamp: 0, + lastSyncId: undefined, + } + + // Get table structure from external database + const tableInfo = (await this.dataSource.rpc.executeQuery({ + sql: `SELECT column_name, data_type + FROM information_schema.columns + WHERE table_name = ?`, + params: [table], + })) as QueryResult + + // Create table in internal database if it doesn't exist + const createTableSQL = this.generateCreateTableSQL( + table, + tableInfo.rows + ) + await this.dataSource.rpc.executeQuery({ + sql: createTableSQL, + params: [], + }) + + // Fetch new records from external database + let query = `SELECT * FROM ${table} WHERE created_at > ?` + const params = [new Date(metadata.lastSyncTimestamp).toISOString()] + + if (metadata.lastSyncId) { + query += ` OR id > ?` + params.push(metadata.lastSyncId.toString()) + } + + query += ` ORDER BY created_at ASC LIMIT 1000` + const result = (await this.dataSource.rpc.executeQuery({ + sql: query, + params, + })) as QueryResult + + if (result.rows.length > 0) { + // Insert new records into internal database + for (const row of result.rows) { + const record = row.reduce((obj: any, val: any, idx: number) => { + obj[result.columns[idx]] = val + return obj + }, {}) + + const columns = Object.keys(record) + const values = Object.values(record) + const placeholders = Array(values.length).fill('?').join(',') + + await this.dataSource.rpc.executeQuery({ + sql: `INSERT OR REPLACE INTO ${table} (${columns.join(',')}) VALUES (${placeholders})`, + params: values, + }) + + // Update metadata + metadata.lastSyncTimestamp = new Date( + record.created_at + ).getTime() + metadata.lastSyncId = record.id + } + + await this.updateMetadata(table, metadata) + } + } + + private generateCreateTableSQL(table: string, rows: any[][]): string { + const columnDefs = rows.map((row) => { + const columnName = row[0] + const dataType = row[1] + const sqlType = this.mapDataType(dataType) + return `${columnName} ${sqlType}` + }) + + return `CREATE TABLE IF NOT EXISTS ${table} (${columnDefs.join(', ')})` + } + + private mapDataType(pgType: string): string { + // Map PostgreSQL types to SQLite types + const typeMap: { [key: string]: string } = { + integer: 'INTEGER', + bigint: 'INTEGER', + text: 'TEXT', + varchar: 'TEXT', + char: 'TEXT', + boolean: 'INTEGER', + timestamp: 'TEXT', + date: 'TEXT', + numeric: 'REAL', + decimal: 'REAL', + real: 'REAL', + 'double precision': 'REAL', + json: 'TEXT', + jsonb: 'TEXT', + } + + return typeMap[pgType.toLowerCase()] || 'TEXT' + } + + async destroy(): Promise { + // Clear all sync timeouts + for (const [table, timeoutId] of this.syncTimeouts.entries()) { + clearTimeout(timeoutId) + this.syncTimeouts.delete(table) + } + } +} diff --git a/plugins/data-sync/meta.json b/plugins/data-sync/meta.json new file mode 100644 index 0000000..a8d9c11 --- /dev/null +++ b/plugins/data-sync/meta.json @@ -0,0 +1,20 @@ +{ + "name": "data-sync", + "version": "1.0.0", + "description": "Synchronizes data from external sources to StarbaseDB's internal SQLite database", + "author": "StarbaseDB", + "license": "MIT", + "type": "sync", + "config": { + "sync_interval": { + "type": "number", + "description": "Interval in seconds between sync operations", + "default": 300 + }, + "tables": { + "type": "array", + "description": "List of tables to synchronize", + "default": [] + } + } +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 50bf38f..483146d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -48,6 +48,9 @@ importers: prettier: specifier: 3.4.2 version: 3.4.2 + tsx: + specifier: ^4.19.2 + version: 4.19.2 typescript: specifier: ^5.7.2 version: 5.7.2 @@ -117,6 +120,12 @@ packages: cpu: [ppc64] os: [aix] + '@esbuild/aix-ppc64@0.23.1': + resolution: {integrity: sha512-6VhYk1diRqrhBAqpJEdjASR/+WVRtfjpqKuNw11cLiaWpAT/Uu+nokB+UJnevzy/P9C/ty6AOe0dwueMrGh/iQ==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + '@esbuild/android-arm64@0.17.19': resolution: {integrity: sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA==} engines: {node: '>=12'} @@ -129,6 +138,12 @@ packages: cpu: [arm64] os: [android] + '@esbuild/android-arm64@0.23.1': + resolution: {integrity: sha512-xw50ipykXcLstLeWH7WRdQuysJqejuAGPd30vd1i5zSyKK3WE+ijzHmLKxdiCMtH1pHz78rOg0BKSYOSB/2Khw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + '@esbuild/android-arm@0.17.19': resolution: {integrity: sha512-rIKddzqhmav7MSmoFCmDIb6e2W57geRsM94gV2l38fzhXMwq7hZoClug9USI2pFRGL06f4IOPHHpFNOkWieR8A==} engines: {node: '>=12'} @@ -141,6 +156,12 @@ packages: cpu: [arm] os: [android] + '@esbuild/android-arm@0.23.1': + resolution: {integrity: sha512-uz6/tEy2IFm9RYOyvKl88zdzZfwEfKZmnX9Cj1BHjeSGNuGLuMD1kR8y5bteYmwqKm1tj8m4cb/aKEorr6fHWQ==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + '@esbuild/android-x64@0.17.19': resolution: {integrity: sha512-uUTTc4xGNDT7YSArp/zbtmbhO0uEEK9/ETW29Wk1thYUJBz3IVnvgEiEwEa9IeLyvnpKrWK64Utw2bgUmDveww==} engines: {node: '>=12'} @@ -153,6 +174,12 @@ packages: cpu: [x64] os: [android] + '@esbuild/android-x64@0.23.1': + resolution: {integrity: sha512-nlN9B69St9BwUoB+jkyU090bru8L0NA3yFvAd7k8dNsVH8bi9a8cUAUSEcEEgTp2z3dbEDGJGfP6VUnkQnlReg==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + '@esbuild/darwin-arm64@0.17.19': resolution: {integrity: sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg==} engines: {node: '>=12'} @@ -165,6 +192,12 @@ packages: cpu: [arm64] os: [darwin] + '@esbuild/darwin-arm64@0.23.1': + resolution: {integrity: sha512-YsS2e3Wtgnw7Wq53XXBLcV6JhRsEq8hkfg91ESVadIrzr9wO6jJDMZnCQbHm1Guc5t/CdDiFSSfWP58FNuvT3Q==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + '@esbuild/darwin-x64@0.17.19': resolution: {integrity: sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw==} engines: {node: '>=12'} @@ -177,6 +210,12 @@ packages: cpu: [x64] os: [darwin] + '@esbuild/darwin-x64@0.23.1': + resolution: {integrity: sha512-aClqdgTDVPSEGgoCS8QDG37Gu8yc9lTHNAQlsztQ6ENetKEO//b8y31MMu2ZaPbn4kVsIABzVLXYLhCGekGDqw==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + '@esbuild/freebsd-arm64@0.17.19': resolution: {integrity: sha512-pBwbc7DufluUeGdjSU5Si+P3SoMF5DQ/F/UmTSb8HXO80ZEAJmrykPyzo1IfNbAoaqw48YRpv8shwd1NoI0jcQ==} engines: {node: '>=12'} @@ -189,6 +228,12 @@ packages: cpu: [arm64] os: [freebsd] + '@esbuild/freebsd-arm64@0.23.1': + resolution: {integrity: sha512-h1k6yS8/pN/NHlMl5+v4XPfikhJulk4G+tKGFIOwURBSFzE8bixw1ebjluLOjfwtLqY0kewfjLSrO6tN2MgIhA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + '@esbuild/freebsd-x64@0.17.19': resolution: {integrity: sha512-4lu+n8Wk0XlajEhbEffdy2xy53dpR06SlzvhGByyg36qJw6Kpfk7cp45DR/62aPH9mtJRmIyrXAS5UWBrJT6TQ==} engines: {node: '>=12'} @@ -201,6 +246,12 @@ packages: cpu: [x64] os: [freebsd] + '@esbuild/freebsd-x64@0.23.1': + resolution: {integrity: sha512-lK1eJeyk1ZX8UklqFd/3A60UuZ/6UVfGT2LuGo3Wp4/z7eRTRYY+0xOu2kpClP+vMTi9wKOfXi2vjUpO1Ro76g==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + '@esbuild/linux-arm64@0.17.19': resolution: {integrity: sha512-ct1Tg3WGwd3P+oZYqic+YZF4snNl2bsnMKRkb3ozHmnM0dGWuxcPTTntAF6bOP0Sp4x0PjSF+4uHQ1xvxfRKqg==} engines: {node: '>=12'} @@ -213,6 +264,12 @@ packages: cpu: [arm64] os: [linux] + '@esbuild/linux-arm64@0.23.1': + resolution: {integrity: sha512-/93bf2yxencYDnItMYV/v116zff6UyTjo4EtEQjUBeGiVpMmffDNUyD9UN2zV+V3LRV3/on4xdZ26NKzn6754g==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + '@esbuild/linux-arm@0.17.19': resolution: {integrity: sha512-cdmT3KxjlOQ/gZ2cjfrQOtmhG4HJs6hhvm3mWSRDPtZ/lP5oe8FWceS10JaSJC13GBd4eH/haHnqf7hhGNLerA==} engines: {node: '>=12'} @@ -225,6 +282,12 @@ packages: cpu: [arm] os: [linux] + '@esbuild/linux-arm@0.23.1': + resolution: {integrity: sha512-CXXkzgn+dXAPs3WBwE+Kvnrf4WECwBdfjfeYHpMeVxWE0EceB6vhWGShs6wi0IYEqMSIzdOF1XjQ/Mkm5d7ZdQ==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + '@esbuild/linux-ia32@0.17.19': resolution: {integrity: sha512-w4IRhSy1VbsNxHRQpeGCHEmibqdTUx61Vc38APcsRbuVgK0OPEnQ0YD39Brymn96mOx48Y2laBQGqgZ0j9w6SQ==} engines: {node: '>=12'} @@ -237,6 +300,12 @@ packages: cpu: [ia32] os: [linux] + '@esbuild/linux-ia32@0.23.1': + resolution: {integrity: sha512-VTN4EuOHwXEkXzX5nTvVY4s7E/Krz7COC8xkftbbKRYAl96vPiUssGkeMELQMOnLOJ8k3BY1+ZY52tttZnHcXQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + '@esbuild/linux-loong64@0.17.19': resolution: {integrity: sha512-2iAngUbBPMq439a+z//gE+9WBldoMp1s5GWsUSgqHLzLJ9WoZLZhpwWuym0u0u/4XmZ3gpHmzV84PonE+9IIdQ==} engines: {node: '>=12'} @@ -249,6 +318,12 @@ packages: cpu: [loong64] os: [linux] + '@esbuild/linux-loong64@0.23.1': + resolution: {integrity: sha512-Vx09LzEoBa5zDnieH8LSMRToj7ir/Jeq0Gu6qJ/1GcBq9GkfoEAoXvLiW1U9J1qE/Y/Oyaq33w5p2ZWrNNHNEw==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + '@esbuild/linux-mips64el@0.17.19': resolution: {integrity: sha512-LKJltc4LVdMKHsrFe4MGNPp0hqDFA1Wpt3jE1gEyM3nKUvOiO//9PheZZHfYRfYl6AwdTH4aTcXSqBerX0ml4A==} engines: {node: '>=12'} @@ -261,6 +336,12 @@ packages: cpu: [mips64el] os: [linux] + '@esbuild/linux-mips64el@0.23.1': + resolution: {integrity: sha512-nrFzzMQ7W4WRLNUOU5dlWAqa6yVeI0P78WKGUo7lg2HShq/yx+UYkeNSE0SSfSure0SqgnsxPvmAUu/vu0E+3Q==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + '@esbuild/linux-ppc64@0.17.19': resolution: {integrity: sha512-/c/DGybs95WXNS8y3Ti/ytqETiW7EU44MEKuCAcpPto3YjQbyK3IQVKfF6nbghD7EcLUGl0NbiL5Rt5DMhn5tg==} engines: {node: '>=12'} @@ -273,6 +354,12 @@ packages: cpu: [ppc64] os: [linux] + '@esbuild/linux-ppc64@0.23.1': + resolution: {integrity: sha512-dKN8fgVqd0vUIjxuJI6P/9SSSe/mB9rvA98CSH2sJnlZ/OCZWO1DJvxj8jvKTfYUdGfcq2dDxoKaC6bHuTlgcw==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + '@esbuild/linux-riscv64@0.17.19': resolution: {integrity: sha512-FC3nUAWhvFoutlhAkgHf8f5HwFWUL6bYdvLc/TTuxKlvLi3+pPzdZiFKSWz/PF30TB1K19SuCxDTI5KcqASJqA==} engines: {node: '>=12'} @@ -285,6 +372,12 @@ packages: cpu: [riscv64] os: [linux] + '@esbuild/linux-riscv64@0.23.1': + resolution: {integrity: sha512-5AV4Pzp80fhHL83JM6LoA6pTQVWgB1HovMBsLQ9OZWLDqVY8MVobBXNSmAJi//Csh6tcY7e7Lny2Hg1tElMjIA==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + '@esbuild/linux-s390x@0.17.19': resolution: {integrity: sha512-IbFsFbxMWLuKEbH+7sTkKzL6NJmG2vRyy6K7JJo55w+8xDk7RElYn6xvXtDW8HCfoKBFK69f3pgBJSUSQPr+4Q==} engines: {node: '>=12'} @@ -297,6 +390,12 @@ packages: cpu: [s390x] os: [linux] + '@esbuild/linux-s390x@0.23.1': + resolution: {integrity: sha512-9ygs73tuFCe6f6m/Tb+9LtYxWR4c9yg7zjt2cYkjDbDpV/xVn+68cQxMXCjUpYwEkze2RcU/rMnfIXNRFmSoDw==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + '@esbuild/linux-x64@0.17.19': resolution: {integrity: sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw==} engines: {node: '>=12'} @@ -309,6 +408,12 @@ packages: cpu: [x64] os: [linux] + '@esbuild/linux-x64@0.23.1': + resolution: {integrity: sha512-EV6+ovTsEXCPAp58g2dD68LxoP/wK5pRvgy0J/HxPGB009omFPv3Yet0HiaqvrIrgPTBuC6wCH1LTOY91EO5hQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + '@esbuild/netbsd-x64@0.17.19': resolution: {integrity: sha512-CwFq42rXCR8TYIjIfpXCbRX0rp1jo6cPIUPSaWwzbVI4aOfX96OXY8M6KNmtPcg7QjYeDmN+DD0Wp3LaBOLf4Q==} engines: {node: '>=12'} @@ -321,6 +426,18 @@ packages: cpu: [x64] os: [netbsd] + '@esbuild/netbsd-x64@0.23.1': + resolution: {integrity: sha512-aevEkCNu7KlPRpYLjwmdcuNz6bDFiE7Z8XC4CPqExjTvrHugh28QzUXVOZtiYghciKUacNktqxdpymplil1beA==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.23.1': + resolution: {integrity: sha512-3x37szhLexNA4bXhLrCC/LImN/YtWis6WXr1VESlfVtVeoFJBRINPJ3f0a/6LV8zpikqoUg4hyXw0sFBt5Cr+Q==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + '@esbuild/openbsd-x64@0.17.19': resolution: {integrity: sha512-cnq5brJYrSZ2CF6c35eCmviIN3k3RczmHz8eYaVlNasVqsNY+JKohZU5MKmaOI+KkllCdzOKKdPs762VCPC20g==} engines: {node: '>=12'} @@ -333,6 +450,12 @@ packages: cpu: [x64] os: [openbsd] + '@esbuild/openbsd-x64@0.23.1': + resolution: {integrity: sha512-aY2gMmKmPhxfU+0EdnN+XNtGbjfQgwZj43k8G3fyrDM/UdZww6xrWxmDkuz2eCZchqVeABjV5BpildOrUbBTqA==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + '@esbuild/sunos-x64@0.17.19': resolution: {integrity: sha512-vCRT7yP3zX+bKWFeP/zdS6SqdWB8OIpaRq/mbXQxTGHnIxspRtigpkUcDMlSCOejlHowLqII7K2JKevwyRP2rg==} engines: {node: '>=12'} @@ -345,6 +468,12 @@ packages: cpu: [x64] os: [sunos] + '@esbuild/sunos-x64@0.23.1': + resolution: {integrity: sha512-RBRT2gqEl0IKQABT4XTj78tpk9v7ehp+mazn2HbUeZl1YMdaGAQqhapjGTCe7uw7y0frDi4gS0uHzhvpFuI1sA==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + '@esbuild/win32-arm64@0.17.19': resolution: {integrity: sha512-yYx+8jwowUstVdorcMdNlzklLYhPxjniHWFKgRqH7IFlUEa0Umu3KuYplf1HUZZ422e3NU9F4LGb+4O0Kdcaag==} engines: {node: '>=12'} @@ -357,6 +486,12 @@ packages: cpu: [arm64] os: [win32] + '@esbuild/win32-arm64@0.23.1': + resolution: {integrity: sha512-4O+gPR5rEBe2FpKOVyiJ7wNDPA8nGzDuJ6gN4okSA1gEOYZ67N8JPk58tkWtdtPeLz7lBnY6I5L3jdsr3S+A6A==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + '@esbuild/win32-ia32@0.17.19': resolution: {integrity: sha512-eggDKanJszUtCdlVs0RB+h35wNlb5v4TWEkq4vZcmVt5u/HiDZrTXe2bWFQUez3RgNHwx/x4sk5++4NSSicKkw==} engines: {node: '>=12'} @@ -369,6 +504,12 @@ packages: cpu: [ia32] os: [win32] + '@esbuild/win32-ia32@0.23.1': + resolution: {integrity: sha512-BcaL0Vn6QwCwre3Y717nVHZbAa4UBEigzFm6VdsVdT/MbZ38xoj1X9HPkZhbmaBGUD1W8vxAfffbDe8bA6AKnQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + '@esbuild/win32-x64@0.17.19': resolution: {integrity: sha512-lAhycmKnVOuRYNtRtatQR1LPQf2oYCkRGkSFnseDAKPl8lu5SOsK/e1sXe5a0Pc5kHIHe6P2I/ilntNv2xf3cA==} engines: {node: '>=12'} @@ -381,6 +522,12 @@ packages: cpu: [x64] os: [win32] + '@esbuild/win32-x64@0.23.1': + resolution: {integrity: sha512-BHpFFeslkWrXWyUPnbKm+xYYVYruCinGcftSBaa8zoF9hZO4BcSCFUvHVTtzpIY6YzUnYtuEhZ+C9iEXjxnasg==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + '@fastify/busboy@2.1.1': resolution: {integrity: sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==} engines: {node: '>=14'} @@ -748,6 +895,11 @@ packages: engines: {node: '>=12'} hasBin: true + esbuild@0.23.1: + resolution: {integrity: sha512-VVNz/9Sa0bs5SELtn3f7qhJCDPCF5oMEl5cO9/SSinpE9hbPVvxbd572HH5AKiP7WD8INO53GgfDDhRjkylHEg==} + engines: {node: '>=18'} + hasBin: true + escape-string-regexp@4.0.0: resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} engines: {node: '>=10'} @@ -807,6 +959,9 @@ packages: resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==} engines: {node: '>=16'} + get-tsconfig@4.10.0: + resolution: {integrity: sha512-kGzZ3LWWQcGIAmg6iWvXn0ei6WDtV26wzHRMwDSzmAbcXrTEXxHy6IehI6/4eT6VRKyMP1eF1VqwrVUmE/LR7A==} + glob-to-regexp@0.4.1: resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==} @@ -1163,6 +1318,9 @@ packages: resolution: {integrity: sha512-yDMz9g+VaZkqBYS/ozoBJwaBhTbZo3UNYQHNRw1D3UFQB8oHB4uS/tAODO+ZLjGWmUbKnIlOWO+aaIiAxrUWHA==} engines: {node: '>= 14.16.0'} + resolve-pkg-maps@1.0.0: + resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} + resolve@1.22.9: resolution: {integrity: sha512-QxrmX1DzraFIi9PxdG5VkRfRwIgjwyud+z/iBwfRRrVmHc+P9Q7u2lSSpQ6bjr2gy5lrqIiU9vb6iAeGf2400A==} hasBin: true @@ -1307,6 +1465,11 @@ packages: tslib@2.8.1: resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + tsx@4.19.2: + resolution: {integrity: sha512-pOUl6Vo2LUq/bSa8S5q7b91cgNSjctn9ugq/+Mvow99qW6x/UZYwzxy/3NmqoT66eHYfCVvFvACC58UBPFf28g==} + engines: {node: '>=18.0.0'} + hasBin: true + typescript@5.7.2: resolution: {integrity: sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==} engines: {node: '>=14.17'} @@ -1505,138 +1668,210 @@ snapshots: '@esbuild/aix-ppc64@0.21.5': optional: true + '@esbuild/aix-ppc64@0.23.1': + optional: true + '@esbuild/android-arm64@0.17.19': optional: true '@esbuild/android-arm64@0.21.5': optional: true + '@esbuild/android-arm64@0.23.1': + optional: true + '@esbuild/android-arm@0.17.19': optional: true '@esbuild/android-arm@0.21.5': optional: true + '@esbuild/android-arm@0.23.1': + optional: true + '@esbuild/android-x64@0.17.19': optional: true '@esbuild/android-x64@0.21.5': optional: true + '@esbuild/android-x64@0.23.1': + optional: true + '@esbuild/darwin-arm64@0.17.19': optional: true '@esbuild/darwin-arm64@0.21.5': optional: true + '@esbuild/darwin-arm64@0.23.1': + optional: true + '@esbuild/darwin-x64@0.17.19': optional: true '@esbuild/darwin-x64@0.21.5': optional: true + '@esbuild/darwin-x64@0.23.1': + optional: true + '@esbuild/freebsd-arm64@0.17.19': optional: true '@esbuild/freebsd-arm64@0.21.5': optional: true + '@esbuild/freebsd-arm64@0.23.1': + optional: true + '@esbuild/freebsd-x64@0.17.19': optional: true '@esbuild/freebsd-x64@0.21.5': optional: true + '@esbuild/freebsd-x64@0.23.1': + optional: true + '@esbuild/linux-arm64@0.17.19': optional: true '@esbuild/linux-arm64@0.21.5': optional: true + '@esbuild/linux-arm64@0.23.1': + optional: true + '@esbuild/linux-arm@0.17.19': optional: true '@esbuild/linux-arm@0.21.5': optional: true + '@esbuild/linux-arm@0.23.1': + optional: true + '@esbuild/linux-ia32@0.17.19': optional: true '@esbuild/linux-ia32@0.21.5': optional: true + '@esbuild/linux-ia32@0.23.1': + optional: true + '@esbuild/linux-loong64@0.17.19': optional: true '@esbuild/linux-loong64@0.21.5': optional: true + '@esbuild/linux-loong64@0.23.1': + optional: true + '@esbuild/linux-mips64el@0.17.19': optional: true '@esbuild/linux-mips64el@0.21.5': optional: true + '@esbuild/linux-mips64el@0.23.1': + optional: true + '@esbuild/linux-ppc64@0.17.19': optional: true '@esbuild/linux-ppc64@0.21.5': optional: true + '@esbuild/linux-ppc64@0.23.1': + optional: true + '@esbuild/linux-riscv64@0.17.19': optional: true '@esbuild/linux-riscv64@0.21.5': optional: true + '@esbuild/linux-riscv64@0.23.1': + optional: true + '@esbuild/linux-s390x@0.17.19': optional: true '@esbuild/linux-s390x@0.21.5': optional: true + '@esbuild/linux-s390x@0.23.1': + optional: true + '@esbuild/linux-x64@0.17.19': optional: true '@esbuild/linux-x64@0.21.5': optional: true + '@esbuild/linux-x64@0.23.1': + optional: true + '@esbuild/netbsd-x64@0.17.19': optional: true '@esbuild/netbsd-x64@0.21.5': optional: true + '@esbuild/netbsd-x64@0.23.1': + optional: true + + '@esbuild/openbsd-arm64@0.23.1': + optional: true + '@esbuild/openbsd-x64@0.17.19': optional: true '@esbuild/openbsd-x64@0.21.5': optional: true + '@esbuild/openbsd-x64@0.23.1': + optional: true + '@esbuild/sunos-x64@0.17.19': optional: true '@esbuild/sunos-x64@0.21.5': optional: true + '@esbuild/sunos-x64@0.23.1': + optional: true + '@esbuild/win32-arm64@0.17.19': optional: true '@esbuild/win32-arm64@0.21.5': optional: true + '@esbuild/win32-arm64@0.23.1': + optional: true + '@esbuild/win32-ia32@0.17.19': optional: true '@esbuild/win32-ia32@0.21.5': optional: true + '@esbuild/win32-ia32@0.23.1': + optional: true + '@esbuild/win32-x64@0.17.19': optional: true '@esbuild/win32-x64@0.21.5': optional: true + '@esbuild/win32-x64@0.23.1': + optional: true + '@fastify/busboy@2.1.1': {} '@jridgewell/resolve-uri@3.1.2': {} @@ -1990,6 +2225,33 @@ snapshots: '@esbuild/win32-ia32': 0.21.5 '@esbuild/win32-x64': 0.21.5 + esbuild@0.23.1: + optionalDependencies: + '@esbuild/aix-ppc64': 0.23.1 + '@esbuild/android-arm': 0.23.1 + '@esbuild/android-arm64': 0.23.1 + '@esbuild/android-x64': 0.23.1 + '@esbuild/darwin-arm64': 0.23.1 + '@esbuild/darwin-x64': 0.23.1 + '@esbuild/freebsd-arm64': 0.23.1 + '@esbuild/freebsd-x64': 0.23.1 + '@esbuild/linux-arm': 0.23.1 + '@esbuild/linux-arm64': 0.23.1 + '@esbuild/linux-ia32': 0.23.1 + '@esbuild/linux-loong64': 0.23.1 + '@esbuild/linux-mips64el': 0.23.1 + '@esbuild/linux-ppc64': 0.23.1 + '@esbuild/linux-riscv64': 0.23.1 + '@esbuild/linux-s390x': 0.23.1 + '@esbuild/linux-x64': 0.23.1 + '@esbuild/netbsd-x64': 0.23.1 + '@esbuild/openbsd-arm64': 0.23.1 + '@esbuild/openbsd-x64': 0.23.1 + '@esbuild/sunos-x64': 0.23.1 + '@esbuild/win32-arm64': 0.23.1 + '@esbuild/win32-ia32': 0.23.1 + '@esbuild/win32-x64': 0.23.1 + escape-string-regexp@4.0.0: {} estree-walker@0.6.1: {} @@ -2047,6 +2309,10 @@ snapshots: get-stream@8.0.1: {} + get-tsconfig@4.10.0: + dependencies: + resolve-pkg-maps: 1.0.0 + glob-to-regexp@0.4.1: {} handlebars@4.7.8: @@ -2363,6 +2629,8 @@ snapshots: readdirp@4.0.2: {} + resolve-pkg-maps@1.0.0: {} + resolve@1.22.9: dependencies: is-core-module: 2.16.0 @@ -2505,6 +2773,13 @@ snapshots: tslib@2.8.1: {} + tsx@4.19.2: + dependencies: + esbuild: 0.23.1 + get-tsconfig: 4.10.0 + optionalDependencies: + fsevents: 2.3.3 + typescript@5.7.2: {} ufo@1.5.4: {} From 34be2b6e6cb84afccbecc03b6e173e9c3e1b7165 Mon Sep 17 00:00:00 2001 From: David Anyatonwu Date: Mon, 27 Jan 2025 18:24:20 +0100 Subject: [PATCH 2/4] refactor: abstract database-specific code from data sync plugin for multi-db support rm test demo; not working --- plugins/data-sync/demo/README.md | 133 ------- plugins/data-sync/demo/setup.sql | 28 -- plugins/data-sync/demo/test.ts | 208 ----------- plugins/data-sync/demo/wrangler.toml | 32 -- plugins/data-sync/index.ts | 538 +++++++++++++++++++++------ plugins/postgres-sync/index.ts | 369 ++++++++++++++++++ 6 files changed, 785 insertions(+), 523 deletions(-) delete mode 100644 plugins/data-sync/demo/README.md delete mode 100644 plugins/data-sync/demo/setup.sql delete mode 100644 plugins/data-sync/demo/test.ts delete mode 100644 plugins/data-sync/demo/wrangler.toml create mode 100644 plugins/postgres-sync/index.ts diff --git a/plugins/data-sync/demo/README.md b/plugins/data-sync/demo/README.md deleted file mode 100644 index 0d7368d..0000000 --- a/plugins/data-sync/demo/README.md +++ /dev/null @@ -1,133 +0,0 @@ -# Data Sync Plugin Demo - -This demo shows how to use the StarbaseDB Data Sync Plugin to synchronize data between an external PostgreSQL database and StarbaseDB. - -## Setup - -1. Install dependencies: - -```bash -pnpm install -``` - -2. Set up environment variables: - -```bash -# Create a .dev.vars file in the demo directory -cat > plugins/data-sync/demo/.dev.vars << EOL -# Replace these with your own secure tokens - these are just examples -ADMIN_TOKEN=your_admin_token_here # e.g., a random string like "ABC123" -CLIENT_TOKEN=your_client_token_here # e.g., a random string like "DEF456" -DB_USER=postgres -DB_PASSWORD=postgres -EOL -``` - -3. Use the existing PostgreSQL Docker container: - -```bash -# The container should already be running with: -docker run --name starbasedb-postgres -e POSTGRES_PASSWORD=postgres -e POSTGRES_DB=demo -p 5432:5432 -d postgres:15 -``` - -4. Load test data into the Docker container: - -```bash -# Copy the setup file into the container -docker cp setup.sql starbasedb-postgres:/setup.sql - -# Execute the setup file in the container -docker exec -i starbasedb-postgres psql -U postgres -d demo -f /setup.sql -``` - -## Running the Demo - -1. Start the development server: - -```bash -pnpm wrangler dev --config plugins/data-sync/demo/wrangler.toml -``` - -2. Test the available endpoints: - -### Basic Status and Data - -```bash -# Check sync status -curl http://localhost:8787/sync-status - -# View synced data -curl http://localhost:8787/sync-data -``` - -### Testing Query Hooks - -```bash -# Test query interception -curl -X POST http://localhost:8787/test-query \ - -H "Content-Type: application/json" \ - -d '{"sql": "SELECT * FROM users", "params": []}' -``` - -### Force Sync - -```bash -# Trigger manual sync -curl -X POST http://localhost:8787/force-sync -``` - -### Debug Information - -```bash -# View plugin debug information -curl http://localhost:8787/debug -``` - -## How it Works - -The demo plugin showcases these key aspects of the StarbaseDB plugin system: - -1. **Plugin Registration**: The plugin registers itself and the data sync plugin with StarbaseDB. - -2. **HTTP Endpoints**: - - - `/sync-status`: Shows the current sync status and configured tables - - `/sync-data`: Shows the synchronized data - - `/test-query`: Tests query interception hooks - - `/force-sync`: Triggers manual synchronization - - `/debug`: Shows plugin configuration and state - -3. **Query Hooks**: - - `beforeQuery`: Logs and intercepts queries before execution - - `afterQuery`: Processes results after query execution - -## Configuration - -The demo uses the following configuration in `wrangler.toml`: - -- PostgreSQL connection details: - - Host: localhost - - Port: 5432 - - User: postgres - - Password: postgres - - Database: demo - - Schema: public -- Sync interval: 30 seconds -- Tables to sync: users and posts - -## Testing - -1. The demo automatically syncs data from the PostgreSQL database -2. You can monitor the sync process through the `/sync-status` endpoint -3. View the synced data through the `/sync-data` endpoint -4. Test query hooks using the `/test-query` endpoint -5. Trigger manual syncs using the `/force-sync` endpoint -6. Monitor plugin state using the `/debug` endpoint - -## Notes - -- This is a demo setup with authentication disabled for simplicity -- In production, you should enable authentication and use secure database credentials -- The sync interval is set to 30 seconds for demo purposes; adjust as needed -- The demo includes mock data for testing without a real database connection -- Query hooks are demonstrated with simulated queries diff --git a/plugins/data-sync/demo/setup.sql b/plugins/data-sync/demo/setup.sql deleted file mode 100644 index 5c2073d..0000000 --- a/plugins/data-sync/demo/setup.sql +++ /dev/null @@ -1,28 +0,0 @@ --- Create a test table -CREATE TABLE IF NOT EXISTS users ( - id SERIAL PRIMARY KEY, - name TEXT NOT NULL, - email TEXT NOT NULL, - created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP -); - --- Insert some test data -INSERT INTO users (name, email) VALUES - ('Alice Smith', 'alice@example.com'), - ('Bob Jones', 'bob@example.com'), - ('Charlie Brown', 'charlie@example.com'); - --- Create another test table -CREATE TABLE IF NOT EXISTS posts ( - id SERIAL PRIMARY KEY, - user_id INTEGER REFERENCES users(id), - title TEXT NOT NULL, - content TEXT, - created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP -); - --- Insert some test posts -INSERT INTO posts (user_id, title, content) VALUES - (1, 'First Post', 'Hello World!'), - (2, 'Testing', 'This is a test post'), - (3, 'Another Post', 'More test content'); \ No newline at end of file diff --git a/plugins/data-sync/demo/test.ts b/plugins/data-sync/demo/test.ts deleted file mode 100644 index 937c733..0000000 --- a/plugins/data-sync/demo/test.ts +++ /dev/null @@ -1,208 +0,0 @@ -import { DataSyncPlugin } from '../index' -import { Hono, Context } from 'hono' -import { StarbaseDBDurableObject } from '../../../src/do' - -export { StarbaseDBDurableObject } - -const app = new Hono() - -class DataSyncDemoPlugin { - private dataSyncPlugin: DataSyncPlugin - private name: string - private isRegistered: boolean = false - private lastQuery: string = '' - private lastResult: any = null - private syncInterval: number = 30 - private tables: string[] = ['users', 'posts'] - - constructor() { - this.name = 'starbasedb:data-sync-demo' - this.dataSyncPlugin = new DataSyncPlugin({ - sync_interval: this.syncInterval, - tables: this.tables, - }) - } - - async register(app: any) { - if (this.isRegistered) { - return - } - - // Register the data sync plugin - await this.dataSyncPlugin.register(app) - - // Basic status endpoint - app.get('/sync-status', (c: Context) => { - return new Response( - JSON.stringify({ - status: 'running', - tables: ['users', 'posts'], - last_sync: new Date().toISOString(), - }), - { - headers: { 'Content-Type': 'application/json' }, - } - ) - }) - - // Mock data endpoint - app.get('/sync-data', async (c: Context) => { - const mockData = { - users: [ - { id: 1, name: 'Alice Smith', email: 'alice@example.com' }, - { id: 2, name: 'Bob Jones', email: 'bob@example.com' }, - { - id: 3, - name: 'Charlie Brown', - email: 'charlie@example.com', - }, - ], - posts: [ - { - id: 1, - user_id: 1, - title: 'First Post', - content: 'Hello World!', - }, - { - id: 2, - user_id: 2, - title: 'Testing', - content: 'This is a test post', - }, - { - id: 3, - user_id: 3, - title: 'Another Post', - content: 'More test content', - }, - ], - } - - return new Response(JSON.stringify(mockData), { - headers: { 'Content-Type': 'application/json' }, - }) - }) - - // Test query hooks - app.post('/test-query', async (c: Context) => { - const body = await c.req.json() - const { sql, params } = body - - try { - // This will trigger beforeQuery and afterQuery hooks - const result = await this.beforeQuery({ sql, params }) - const queryResult = { - success: true, - message: 'Query intercepted', - } - const afterResult = await this.afterQuery({ - sql, - result: queryResult, - isRaw: false, - }) - - return new Response( - JSON.stringify({ - success: true, - result: afterResult, - lastQuery: this.lastQuery, - lastResult: this.lastResult, - }), - { - headers: { 'Content-Type': 'application/json' }, - } - ) - } catch (err) { - const error = err as Error - return new Response( - JSON.stringify({ - success: false, - error: error.message, - }), - { - status: 500, - headers: { 'Content-Type': 'application/json' }, - } - ) - } - }) - - // Force sync endpoint - app.post('/force-sync', async (c: Context) => { - try { - // Simulate sync by running a test query - await this.beforeQuery({ sql: 'SELECT * FROM users' }) - - return new Response( - JSON.stringify({ - success: true, - message: 'Sync simulation triggered successfully', - }), - { - headers: { 'Content-Type': 'application/json' }, - } - ) - } catch (err) { - const error = err as Error - return new Response( - JSON.stringify({ - success: false, - error: error.message, - }), - { - status: 500, - headers: { 'Content-Type': 'application/json' }, - } - ) - } - }) - - // Debug endpoint - app.get('/debug', (c: Context) => { - return new Response( - JSON.stringify({ - plugin_name: this.name, - is_registered: this.isRegistered, - last_query: this.lastQuery, - last_result: this.lastResult, - data_sync_config: { - sync_interval: this.syncInterval, - tables: this.tables, - }, - }), - { - headers: { 'Content-Type': 'application/json' }, - } - ) - }) - - this.isRegistered = true - } - - async beforeQuery(opts: { sql: string; params?: unknown[] }) { - console.log('Demo plugin intercepting query:', opts.sql) - this.lastQuery = opts.sql - return opts - } - - async afterQuery(opts: { sql: string; result: any; isRaw: boolean }) { - console.log('Demo plugin received result for query:', opts.sql) - this.lastResult = opts.result - return opts.result - } -} - -// Initialize the plugin -const plugin = new DataSyncDemoPlugin() - -// Create and export the fetch handler -export default { - async fetch(request: Request, env: any, ctx: any) { - // Register the plugin if not already registered - await plugin.register(app) - - // Handle the request - return app.fetch(request, env, ctx) - }, -} diff --git a/plugins/data-sync/demo/wrangler.toml b/plugins/data-sync/demo/wrangler.toml deleted file mode 100644 index 051abad..0000000 --- a/plugins/data-sync/demo/wrangler.toml +++ /dev/null @@ -1,32 +0,0 @@ -# Demo configuration for data-sync plugin -name = "starbasedb-data-sync-demo" -main = "test.ts" -compatibility_date = "2024-01-24" - -[durable_objects] -bindings = [{ name = "DATABASE_DURABLE_OBJECT", class_name = "StarbaseDBDurableObject" }] - -[[migrations]] -tag = "v1" -new_classes = ["StarbaseDBDurableObject"] - -# External database source details -[vars] -# These should be set via environment variables in production -ADMIN_AUTHORIZATION_TOKEN = "" # Set via environment: ADMIN_TOKEN -CLIENT_AUTHORIZATION_TOKEN = "" # Set via environment: CLIENT_TOKEN -REGION = "auto" -EXTERNAL_DB_TYPE = "postgresql" -EXTERNAL_DB_HOST = "localhost" -EXTERNAL_DB_PORT = 5432 -EXTERNAL_DB_USER = "" # Set via environment: DB_USER -EXTERNAL_DB_PASS = "" # Set via environment: DB_PASSWORD -EXTERNAL_DB_DATABASE = "demo" -EXTERNAL_DB_DEFAULT_SCHEMA = "public" -ENABLE_ALLOWLIST = 0 -ENABLE_RLS = 0 - -# Plugin configuration -[plugins.data-sync] -sync_interval = 30 # Sync every 30 seconds for demo purposes -tables = ["users", "posts"] \ No newline at end of file diff --git a/plugins/data-sync/index.ts b/plugins/data-sync/index.ts index 64f3e71..d19f030 100644 --- a/plugins/data-sync/index.ts +++ b/plugins/data-sync/index.ts @@ -6,33 +6,89 @@ import { import { StarbasePlugin } from '../../src/plugin' import { DataSource } from '../../src/types' -interface SyncConfig { +export interface TableSyncConfig { + name: string + schema?: string // Optional schema name (for databases that support schemas) + timestamp_column?: string // Column to use for timestamp-based syncing (default: 'created_at') + id_column?: string // Column to use for id-based syncing (default: 'id') + batch_size?: number // Number of records to sync at once (default: 1000) +} + +export interface SyncConfig { sync_interval: number - tables: string[] + tables: (string | TableSyncConfig)[] // Can specify just table name or detailed config } -interface TableMetadata { +export interface TableMetadata { lastSyncTimestamp: number lastSyncId?: string | number + sync_errors?: string } -interface QueryResult { +export interface QueryResult { rows: any[][] columns: string[] } -interface MetadataRow { - table_name: string - last_sync_timestamp: number - last_sync_id: string +export interface ColumnDefinition { + name: string + type: string + nullable?: boolean + defaultValue?: string +} + +export interface SyncSourceConfig { + dialect: string + [key: string]: any } -declare global { - interface Window { - setTimeout: typeof setTimeout - clearTimeout: typeof clearTimeout - console: typeof console +// Abstract base class for database-specific sync implementations +export abstract class DatabaseSyncSource { + protected dataSource?: DataSource + protected config: SyncSourceConfig + + constructor(config: SyncSourceConfig) { + this.config = config + } + + abstract get dialect(): string + + async setDataSource(dataSource: DataSource): Promise { + this.dataSource = dataSource + console.log(`${this.dialect}SyncSource: DataSource set`, { + hasDataSource: !!this.dataSource, + hasExternal: !!this.dataSource?.external, + dialect: this.dataSource?.external?.dialect, + }) + } + + protected getExternalDataSource(): DataSource | undefined { + if (!this.dataSource?.external) { + console.error( + `${this.dialect}SyncSource: getExternalDataSource failed`, + { + hasDataSource: !!this.dataSource, + hasExternal: !!this.dataSource?.external, + dialect: this.dataSource?.external?.dialect, + } + ) + return undefined + } + return this.dataSource } + + abstract validateConnection(): Promise + abstract getTableSchema(tableName: string): Promise + abstract getIncrementalData( + tableName: string, + lastSync: TableMetadata, + tableConfig: TableSyncConfig + ): Promise + abstract mapDataType(sourceType: string): string + abstract validateTableStructure( + tableName: string, + tableConfig: TableSyncConfig + ): Promise<{ valid: boolean; errors: string[] }> } export class DataSyncPlugin extends StarbasePlugin { @@ -41,44 +97,175 @@ export class DataSyncPlugin extends StarbasePlugin { private syncTimeouts: Map> = new Map() private tableMetadata: Map = new Map() private dataSource?: DataSource - - constructor(opts?: { sync_interval?: number; tables?: string[] }) { + private syncSource: DatabaseSyncSource + private tableConfigs: Map = new Map() + private schemaTableMap: Map = new Map() // Maps schema.table to SQLite table name + + constructor( + syncSource: DatabaseSyncSource, + opts?: { sync_interval?: number; tables?: (string | TableSyncConfig)[] } + ) { super('starbasedb:data-sync', { requiresAuth: true, }) + this.syncSource = syncSource this.config = { sync_interval: opts?.sync_interval || 300, tables: opts?.tables || [], } - this.syncInterval = this.config.sync_interval * 1000 // Convert to milliseconds + this.syncInterval = this.config.sync_interval * 1000 + + // Process table configurations + this.config.tables.forEach((tableConfig) => { + const config: TableSyncConfig = + typeof tableConfig === 'string' + ? { + name: this.parseTableName(tableConfig).table, + schema: this.parseTableName(tableConfig).schema, + timestamp_column: 'created_at', + id_column: 'id', + batch_size: 1000, + } + : { + ...tableConfig, + name: this.parseTableName(tableConfig.name).table, + schema: + tableConfig.schema || + this.parseTableName(tableConfig.name).schema, + timestamp_column: + tableConfig.timestamp_column || 'created_at', + id_column: tableConfig.id_column || 'id', + batch_size: tableConfig.batch_size || 1000, + } + + // Store the mapping between schema.table and SQLite table name + const sqliteTableName = this.getSQLiteTableName(config) + this.schemaTableMap.set( + this.getFullTableName(config), + sqliteTableName + ) + this.tableConfigs.set(sqliteTableName, config) + }) + } + + // Parse a table name that might include a schema + private parseTableName(fullName: string): { + schema?: string + table: string + } { + const parts = fullName.split('.') + if (parts.length === 2) { + return { schema: parts[0], table: parts[1] } + } + return { table: fullName } + } + + // Get the full table name including schema if present + private getFullTableName(config: TableSyncConfig): string { + return config.schema ? `${config.schema}.${config.name}` : config.name + } + + // Get the SQLite table name based on configuration + private getSQLiteTableName(config: TableSyncConfig): string { + // For public schema, just use the table name with tmp_ prefix + if (!config.schema || config.schema === 'public') { + return `tmp_${config.name}` + } + // For other schemas, use tmp_schema_table format to avoid conflicts + return `tmp_${config.schema}_${config.name}` + } + + // Transform a query that might use schema.table notation to use the correct SQLite table name + private transformQuery(sql: string): string { + let transformedSql = sql + + // Replace all schema.table occurrences with their SQLite table names + for (const [fullName, sqliteName] of this.schemaTableMap.entries()) { + // Escape special characters in the table name for regex + const escapedName = fullName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') + // Use word boundaries to avoid partial matches + const regex = new RegExp(`\\b${escapedName}\\b`, 'g') + transformedSql = transformedSql.replace(regex, sqliteName) + } + + return transformedSql + } + + public getConfig(): SyncConfig { + const tables = Array.from(this.tableConfigs.values()) + return { + sync_interval: this.config.sync_interval, + tables, + } } override async register(app: StarbaseApp): Promise { app.use(async (c: StarbaseContext, next: () => Promise) => { this.dataSource = c?.get('dataSource') - // Create metadata table if it doesn't exist - await this.dataSource?.rpc.executeQuery({ - sql: ` - CREATE TABLE IF NOT EXISTS data_sync_metadata ( + if (this.dataSource) { + await this.syncSource.setDataSource(this.dataSource) + c.set('dataSource', this.dataSource) + } + + await next() + }) + + // Validate connection to source database + const isValid = await this.syncSource.validateConnection() + if (!isValid) { + console.error( + `Database sync plugin: ${this.syncSource.dialect} connection not available or invalid` + ) + return + } + + // Create metadata table if it doesn't exist + await this.dataSource?.rpc.executeQuery({ + sql: ` + CREATE TABLE IF NOT EXISTS tmp_data_sync_metadata ( table_name TEXT PRIMARY KEY, last_sync_timestamp INTEGER, - last_sync_id TEXT + last_sync_id TEXT, + sync_errors TEXT ) `, - params: [], - }) - - await next() + params: [], }) - // Load existing metadata await this.loadMetadata() - // Start sync for configured tables - for (const table of this.config.tables) { - await this.scheduleSyncForTable(table) + // Validate table structures before starting sync + const validationResults = await Promise.all( + Array.from(this.tableConfigs.entries()).map( + async ([tableName, config]) => { + const result = await this.syncSource.validateTableStructure( + tableName, + config + ) + if (!result.valid) { + console.error( + `Table validation failed for '${tableName}':`, + result.errors + ) + } + return { tableName, ...result } + } + ) + ) + + const validTables = validationResults + .filter((result) => result.valid) + .map((result) => result.tableName) + + if (validTables.length === 0) { + console.error('No valid tables to sync') + return + } + + for (const tableName of validTables) { + await this.scheduleSyncForTable(tableName) } } @@ -86,7 +273,7 @@ export class DataSyncPlugin extends StarbasePlugin { if (!this.dataSource) return const result = (await this.dataSource.rpc.executeQuery({ - sql: 'SELECT table_name, last_sync_timestamp, last_sync_id FROM data_sync_metadata', + sql: 'SELECT table_name, last_sync_timestamp, last_sync_id FROM tmp_data_sync_metadata', params: [], })) as QueryResult @@ -111,7 +298,7 @@ export class DataSyncPlugin extends StarbasePlugin { if (!this.dataSource) return await this.dataSource.rpc.executeQuery({ - sql: `INSERT OR REPLACE INTO data_sync_metadata (table_name, last_sync_timestamp, last_sync_id) + sql: `INSERT OR REPLACE INTO tmp_data_sync_metadata (table_name, last_sync_timestamp, last_sync_id) VALUES (?, ?, ?)`, params: [ table, @@ -122,122 +309,175 @@ export class DataSyncPlugin extends StarbasePlugin { this.tableMetadata.set(table, metadata) } - private async scheduleSyncForTable(table: string): Promise { + private async scheduleSyncForTable(tableName: string): Promise { const sync = async () => { try { - await this.syncTable(table) + await this.syncTable(tableName) } catch (error) { - console.error(`Error syncing table ${table}:`, error) + console.error(`Error syncing table ${tableName}:`, error) } // Schedule next sync const timeoutId = setTimeout(() => sync(), this.syncInterval) - this.syncTimeouts.set(table, timeoutId) + this.syncTimeouts.set(tableName, timeoutId) } await sync() } - private async syncTable(table: string): Promise { - if (!this.dataSource?.external) return - - const metadata = this.tableMetadata.get(table) || { - lastSyncTimestamp: 0, - lastSyncId: undefined, - } + public async syncTable(tableName: string): Promise { + if (!this.dataSource) return - // Get table structure from external database - const tableInfo = (await this.dataSource.rpc.executeQuery({ - sql: `SELECT column_name, data_type - FROM information_schema.columns - WHERE table_name = ?`, - params: [table], - })) as QueryResult + try { + const metadata = this.tableMetadata.get(tableName) || { + lastSyncTimestamp: 0, + lastSyncId: undefined, + } - // Create table in internal database if it doesn't exist - const createTableSQL = this.generateCreateTableSQL( - table, - tableInfo.rows - ) - await this.dataSource.rpc.executeQuery({ - sql: createTableSQL, - params: [], - }) + const tableConfig = this.tableConfigs.get(tableName) + if (!tableConfig) { + throw new Error( + `No configuration found for table '${tableName}'` + ) + } - // Fetch new records from external database - let query = `SELECT * FROM ${table} WHERE created_at > ?` - const params = [new Date(metadata.lastSyncTimestamp).toISOString()] + console.log(`Starting sync for table ${tableName}`, { + config: tableConfig, + lastSync: metadata, + }) - if (metadata.lastSyncId) { - query += ` OR id > ?` - params.push(metadata.lastSyncId.toString()) - } + // Validate table structure before sync + const validation = await this.syncSource.validateTableStructure( + tableName, + tableConfig + ) + if (!validation.valid) { + throw new Error( + `Table validation failed: ${validation.errors.join(', ')}` + ) + } - query += ` ORDER BY created_at ASC LIMIT 1000` - const result = (await this.dataSource.rpc.executeQuery({ - sql: query, - params, - })) as QueryResult + // Get table structure using the sync plugin + const columns = await this.syncSource.getTableSchema(tableName) + console.log(`Retrieved schema for table ${tableName}`, { columns }) + + // Create table in internal database if it doesn't exist + const createTableSQL = this.generateCreateTableSQL( + tableName, + columns + ) + await this.dataSource.rpc.executeQuery({ + sql: createTableSQL, + params: [], + }) - if (result.rows.length > 0) { - // Insert new records into internal database - for (const row of result.rows) { - const record = row.reduce((obj: any, val: any, idx: number) => { - obj[result.columns[idx]] = val - return obj - }, {}) - - const columns = Object.keys(record) - const values = Object.values(record) - const placeholders = Array(values.length).fill('?').join(',') - - await this.dataSource.rpc.executeQuery({ - sql: `INSERT OR REPLACE INTO ${table} (${columns.join(',')}) VALUES (${placeholders})`, - params: values, - }) - - // Update metadata - metadata.lastSyncTimestamp = new Date( - record.created_at - ).getTime() - metadata.lastSyncId = record.id + // Fetch new records using the sync plugin with table config + const result = await this.syncSource.getIncrementalData( + tableName, + metadata, + tableConfig + ) + console.log( + `Retrieved ${result.rows.length} new records for table ${tableName}` + ) + + if (result.rows.length > 0) { + let syncedCount = 0 + // Insert new records into internal database + for (const row of result.rows) { + try { + const record = row.reduce( + ( + obj: Record, + val: any, + idx: number + ) => { + obj[result.columns[idx]] = val + return obj + }, + {} + ) + + const columns = Object.keys(record) + const values = Object.values(record) + const placeholders = Array(values.length) + .fill('?') + .join(',') + + await this.dataSource.rpc.executeQuery({ + sql: `INSERT OR REPLACE INTO ${tableName} (${columns.join(',')}) VALUES (${placeholders})`, + params: values, + }) + + // Update metadata using configured columns + const timestampCol = tableConfig.timestamp_column + const idCol = tableConfig.id_column + + if (timestampCol && record[timestampCol]) { + metadata.lastSyncTimestamp = new Date( + record[timestampCol] + ).getTime() + } + if (idCol && record[idCol]) { + metadata.lastSyncId = record[idCol] + } + + syncedCount++ + } catch (error) { + console.error( + `Error syncing record in table ${tableName}:`, + error, + { + record: row, + columns: result.columns, + } + ) + // Continue with next record + } + } + + console.log( + `Successfully synced ${syncedCount}/${result.rows.length} records for table ${tableName}` + ) + await this.updateMetadata(tableName, metadata) } - - await this.updateMetadata(table, metadata) + } catch (error) { + console.error(`Error syncing table ${tableName}:`, error) + // Update metadata with error + const currentMetadata = this.tableMetadata.get(tableName) || { + lastSyncTimestamp: 0, + lastSyncId: undefined, + } + await this.updateMetadata(tableName, { + ...currentMetadata, + sync_errors: (error as Error).message, + }) + throw error } } - private generateCreateTableSQL(table: string, rows: any[][]): string { - const columnDefs = rows.map((row) => { - const columnName = row[0] - const dataType = row[1] - const sqlType = this.mapDataType(dataType) - return `${columnName} ${sqlType}` - }) + private generateCreateTableSQL( + table: string, + columns: ColumnDefinition[] + ): string { + const columnDefs = columns.map((col) => { + const sqlType = this.syncSource.mapDataType(col.type) + let definition = `${col.name} ${sqlType}` + + // Add nullable constraint + if (col.nullable === false) { + definition += ' NOT NULL' + } - return `CREATE TABLE IF NOT EXISTS ${table} (${columnDefs.join(', ')})` - } + // Add default value if specified + if (col.defaultValue !== undefined) { + definition += ` DEFAULT ${col.defaultValue}` + } - private mapDataType(pgType: string): string { - // Map PostgreSQL types to SQLite types - const typeMap: { [key: string]: string } = { - integer: 'INTEGER', - bigint: 'INTEGER', - text: 'TEXT', - varchar: 'TEXT', - char: 'TEXT', - boolean: 'INTEGER', - timestamp: 'TEXT', - date: 'TEXT', - numeric: 'REAL', - decimal: 'REAL', - real: 'REAL', - 'double precision': 'REAL', - json: 'TEXT', - jsonb: 'TEXT', - } + return definition + }) - return typeMap[pgType.toLowerCase()] || 'TEXT' + return `CREATE TABLE IF NOT EXISTS ${table} (${columnDefs.join(', ')})` } async destroy(): Promise { @@ -247,4 +487,58 @@ export class DataSyncPlugin extends StarbasePlugin { this.syncTimeouts.delete(table) } } + + public async getMetadata(): Promise<{ + lastSyncTimestamp?: number + sync_errors?: string + } | null> { + if (!this.dataSource) return null + + try { + const result = (await this.dataSource.rpc.executeQuery({ + sql: 'SELECT last_sync_timestamp, sync_errors FROM tmp_data_sync_metadata LIMIT 1', + params: [], + })) as QueryResult + + if (result.rows.length === 0) return null + + return { + lastSyncTimestamp: result.rows[0][0] as number, + sync_errors: result.rows[0][1] as string, + } + } catch (error) { + console.error('Error getting sync metadata:', error) + return null + } + } + + public async setDataSource(dataSource: DataSource): Promise { + this.dataSource = dataSource + await this.syncSource.setDataSource(dataSource) + console.log('DataSyncPlugin: DataSource set', { + hasDataSource: !!this.dataSource, + hasExternal: !!this.dataSource?.external, + dialect: this.dataSource?.external?.dialect, + }) + } + + // Hook to transform queries before execution + async beforeQuery(opts: { + sql: string + params?: unknown[] + }): Promise<{ sql: string; params?: unknown[] }> { + return { + ...opts, + sql: this.transformQuery(opts.sql), + } + } + + // Hook to handle query results + async afterQuery(opts: { + sql: string + result: any + isRaw: boolean + }): Promise { + return opts.result + } } diff --git a/plugins/postgres-sync/index.ts b/plugins/postgres-sync/index.ts new file mode 100644 index 0000000..17888c4 --- /dev/null +++ b/plugins/postgres-sync/index.ts @@ -0,0 +1,369 @@ +import { + DatabaseSyncSource, + QueryResult, + TableMetadata, + ColumnDefinition, + SyncSourceConfig, + TableSyncConfig, +} from '../data-sync' + +export interface PostgresSyncConfig extends SyncSourceConfig { + dialect: 'postgresql' + schema?: string +} + +export class PostgresSyncSource extends DatabaseSyncSource { + private schema: string + + constructor(config: PostgresSyncConfig) { + super(config) + this.schema = config.schema || 'public' + } + + get dialect(): string { + return 'postgresql' + } + + override async validateConnection(): Promise { + const dataSource = this.getExternalDataSource() + if (!dataSource?.external) { + console.error( + 'PostgresSyncSource: No external database connection available', + { + hasDataSource: !!this.dataSource, + hasExternal: !!this.dataSource?.external, + } + ) + return false + } + + // Check if it's PostgreSQL + if (dataSource.external.dialect !== 'postgresql') { + console.error('PostgresSyncSource: Database is not PostgreSQL:', { + dialect: dataSource.external.dialect, + hasDataSource: !!this.dataSource, + hasExternal: !!this.dataSource?.external, + }) + return false + } + + try { + const result = await dataSource.rpc.executeQuery({ + sql: 'SELECT version()', + params: [], + }) + console.log('PostgreSQL connection validated:', result) + return true + } catch (error) { + console.error('PostgreSQL connection failed:', error) + return false + } + } + + async getTableSchema(tableName: string): Promise { + const dataSource = this.getExternalDataSource() + if ( + !dataSource?.external || + dataSource.external.dialect !== 'postgresql' + ) { + console.error( + 'PostgresSyncSource: Cannot get table schema - invalid dataSource', + { + hasDataSource: !!this.dataSource, + hasExternal: !!this.dataSource?.external, + dialect: this.dataSource?.external?.dialect, + } + ) + return [] + } + + try { + const result = (await dataSource.rpc.executeQuery({ + sql: ` + SELECT + column_name, + data_type, + udt_name, + is_nullable = 'YES' as is_nullable, + column_default, + character_maximum_length + FROM information_schema.columns + WHERE table_schema = ? AND table_name = ? + ORDER BY ordinal_position + `, + params: [this.schema, tableName], + })) as QueryResult + + return result.rows.map((row) => ({ + name: row[0] as string, + type: this.normalizePostgresType( + row[1] as string, + row[2] as string, + row[5] as number + ), + nullable: row[3] as boolean, + defaultValue: this.normalizeDefaultValue(row[4] as string), + })) + } catch (error) { + console.error( + `PostgresSyncSource: Error getting schema for table ${tableName}:`, + error + ) + return [] + } + } + + private normalizePostgresType( + dataType: string, + udtName: string, + maxLength?: number + ): string { + // Handle array types + if (udtName.startsWith('_')) { + return `${udtName.slice(1)}[]` + } + + // Handle varchar/char with length + if ( + (dataType === 'character varying' || dataType === 'character') && + maxLength + ) { + return `${dataType}(${maxLength})` + } + + // Handle special types + switch (udtName) { + case 'timestamptz': + return 'timestamp with time zone' + case 'timestamp': + return 'timestamp without time zone' + default: + return dataType + } + } + + private normalizeDefaultValue( + defaultValue: string | null + ): string | undefined { + if (!defaultValue) return undefined + + // Handle sequence defaults + if (defaultValue.includes('nextval')) { + return undefined // Let SQLite handle auto-increment + } + + // Remove type casting + const valueMatch = defaultValue.match(/'([^']*)'/) + if (valueMatch) { + return valueMatch[1] + } + + return defaultValue + } + + async getIncrementalData( + tableName: string, + lastSync: TableMetadata, + tableConfig: TableSyncConfig + ): Promise { + const dataSource = this.getExternalDataSource() + if ( + !dataSource?.external || + dataSource.external.dialect !== 'postgresql' + ) { + console.error( + 'PostgresSyncSource: Cannot get incremental data - invalid dataSource', + { + hasDataSource: !!this.dataSource, + hasExternal: !!this.dataSource?.external, + dialect: this.dataSource?.external?.dialect, + } + ) + return { rows: [], columns: [] } + } + + try { + const timestampColumn = tableConfig.timestamp_column || 'created_at' + const idColumn = tableConfig.id_column || 'id' + const batchSize = tableConfig.batch_size || 1000 + + let query = `SELECT * FROM ${this.schema}.${tableName}` + const params: any[] = [] + + // Build WHERE clause based on available sync columns + const conditions: string[] = [] + + if (lastSync.lastSyncTimestamp && timestampColumn) { + conditions.push(`${timestampColumn} > ?`) + params.push(new Date(lastSync.lastSyncTimestamp).toISOString()) + } + + if (lastSync.lastSyncId && idColumn) { + conditions.push(`${idColumn} > ?`) + params.push(lastSync.lastSyncId.toString()) + } + + if (conditions.length > 0) { + query += ` WHERE ${conditions.join(' OR ')}` + } + + // Order by configured columns + const orderBy: string[] = [] + if (timestampColumn) orderBy.push(timestampColumn) + if (idColumn && idColumn !== timestampColumn) orderBy.push(idColumn) + + if (orderBy.length > 0) { + query += ` ORDER BY ${orderBy.join(', ')} ASC` + } + + query += ` LIMIT ${batchSize}` + + return (await dataSource.rpc.executeQuery({ + sql: query, + params, + })) as QueryResult + } catch (error) { + console.error( + `PostgresSyncSource: Error getting incremental data for table ${tableName}:`, + error + ) + return { rows: [], columns: [] } + } + } + + mapDataType(pgType: string): string { + // Map PostgreSQL types to SQLite types + const typeMap: { [key: string]: string } = { + integer: 'INTEGER', + bigint: 'INTEGER', + smallint: 'INTEGER', + text: 'TEXT', + varchar: 'TEXT', + 'character varying': 'TEXT', + char: 'TEXT', + character: 'TEXT', + boolean: 'INTEGER', + date: 'TEXT', + timestamp: 'TEXT', + 'timestamp with time zone': 'TEXT', + 'timestamp without time zone': 'TEXT', + numeric: 'REAL', + decimal: 'REAL', + real: 'REAL', + 'double precision': 'REAL', + json: 'TEXT', + jsonb: 'TEXT', + uuid: 'TEXT', + bytea: 'BLOB', + interval: 'TEXT', + point: 'TEXT', + line: 'TEXT', + polygon: 'TEXT', + cidr: 'TEXT', + inet: 'TEXT', + macaddr: 'TEXT', + bit: 'INTEGER', + 'bit varying': 'INTEGER', + money: 'REAL', + xml: 'TEXT', + } + + // Handle array types + if (pgType.endsWith('[]')) { + return 'TEXT' // Store arrays as JSON text in SQLite + } + + return typeMap[pgType.toLowerCase()] || 'TEXT' + } + + async validateTableStructure( + tableName: string, + tableConfig: TableSyncConfig + ): Promise<{ valid: boolean; errors: string[] }> { + const dataSource = this.getExternalDataSource() + if (!dataSource?.external) { + return { + valid: false, + errors: ['External database connection not available'], + } + } + + const errors: string[] = [] + try { + // Check if table exists + const tableExists = (await dataSource.rpc.executeQuery({ + sql: ` + SELECT EXISTS ( + SELECT FROM information_schema.tables + WHERE table_schema = ? AND table_name = ? + ) + `, + params: [this.schema, tableName], + })) as QueryResult + + if (!tableExists.rows[0][0]) { + errors.push( + `Table '${tableName}' does not exist in schema '${this.schema}'` + ) + return { valid: false, errors } + } + + // Check configured sync columns exist and have appropriate types + const columns = await this.getTableSchema(tableName) + const columnMap = new Map(columns.map((col) => [col.name, col])) + + if (tableConfig.timestamp_column) { + const col = columnMap.get(tableConfig.timestamp_column) + if (!col) { + errors.push( + `Configured timestamp column '${tableConfig.timestamp_column}' missing from table '${tableName}'` + ) + } else if (!this.isTimestampType(col.type)) { + errors.push( + `Column '${tableConfig.timestamp_column}' is not a timestamp type (found: ${col.type})` + ) + } + } + + if (tableConfig.id_column) { + const col = columnMap.get(tableConfig.id_column) + if (!col) { + errors.push( + `Configured ID column '${tableConfig.id_column}' missing from table '${tableName}'` + ) + } + } + + // If no sync columns are configured, require at least one + if (!tableConfig.timestamp_column && !tableConfig.id_column) { + errors.push( + `No sync columns configured for table '${tableName}'. At least one of timestamp_column or id_column is required.` + ) + } + + return { + valid: errors.length === 0, + errors, + } + } catch (error) { + console.error( + `PostgresSyncSource: Error validating table structure for '${tableName}':`, + error + ) + return { + valid: false, + errors: [(error as Error).message], + } + } + } + + private isTimestampType(type: string): boolean { + return [ + 'timestamp', + 'timestamp with time zone', + 'timestamp without time zone', + 'date', + 'timestamptz', + ].includes(type.toLowerCase()) + } +} From 40023d1ee4f3c9306af1336d43cbe8a54412d689 Mon Sep 17 00:00:00 2001 From: David Anyatonwu Date: Mon, 27 Jan 2025 18:33:43 +0100 Subject: [PATCH 3/4] docs: add documentation for data sync plugins --- plugins/data-sync/README.md | 197 ++++++++++++++++++++------------ plugins/postgres-sync/README.md | 146 +++++++++++++++++++++++ 2 files changed, 268 insertions(+), 75 deletions(-) create mode 100644 plugins/postgres-sync/README.md diff --git a/plugins/data-sync/README.md b/plugins/data-sync/README.md index c68b809..2e9bee6 100644 --- a/plugins/data-sync/README.md +++ b/plugins/data-sync/README.md @@ -1,111 +1,158 @@ # Data Sync Plugin -The Data Sync plugin enables automatic synchronization of data from external data sources (like PostgreSQL) to StarbaseDB's internal SQLite database. This plugin is useful for creating a close-to-edge replica of your data that can be queried as an alternative to querying the external database directly. +The Data Sync plugin enables automatic data synchronization between external data sources and StarbaseDB's internal SQLite database. This plugin creates close-to-edge replicas by automatically pulling and synchronizing data from external sources at configurable intervals. ## Features -- Automatic synchronization of specified tables from external to internal database -- Configurable sync interval -- Incremental updates based on timestamps and IDs -- Automatic schema mapping from PostgreSQL to SQLite types -- Persistent tracking of sync state -- Graceful handling of connection issues and errors -- Query interception hooks for monitoring and modification -- Debug endpoints for monitoring sync status +- Automatic data synchronization from external sources +- Configurable sync intervals per table +- Selective table synchronization +- Incremental updates using timestamp and/or ID columns +- Schema-aware table synchronization +- Comprehensive type mapping +- Error handling and retry logic +- Sync state tracking and monitoring ## Installation -The plugin is included in the StarbaseDB core package. To use it, simply configure it in your `wrangler.toml` file: - ```toml +# wrangler.toml [plugins.data-sync] -sync_interval = 300 # Sync interval in seconds (default: 300) -tables = ["users", "products"] # List of tables to synchronize +sync_interval = 300 # 5 minutes +tables = [ + "users", # Simple table (public schema) + "public.products", # Explicit public schema + "users.profile", # Custom schema + { + name: "orders", # Config object with default schema + timestamp_column: "created_at" + }, + { + name: "audit.logs", # Config object with schema in name + timestamp_column: "logged_at", + batch_size: 500 + } +] ``` -## Configuration Options - -| Option | Type | Description | Default | -| --------------- | -------- | ----------------------------------------------- | ------- | -| `sync_interval` | number | The interval in seconds between sync operations | 300 | -| `tables` | string[] | Array of table names to synchronize | [] | +## Configuration -## How It Works +### Basic Configuration -1. The plugin creates a metadata table in the internal database to track sync state -2. For each configured table: - - Retrieves the table schema from the external database - - Creates a corresponding table in the internal database - - Periodically checks for new or updated records based on `created_at` timestamp and `id` - - Syncs new data to the internal database - - Updates the sync state in the metadata table -3. Provides hooks for query interception: - - `beforeQuery`: For monitoring or modifying queries before execution - - `afterQuery`: For processing results after query execution +- `sync_interval`: Time between sync operations in seconds (default: 300) +- `tables`: Array of tables to sync (can be string or object) -## Requirements +### Table Configuration Options -- The external database tables must have: - - A `created_at` timestamp column for tracking changes - - An `id` column (numeric or string) for tracking record identity -- The external database must support the `information_schema` for retrieving table metadata +- `name`: Table name (can include schema, e.g., "schema.table") +- `schema`: Database schema (optional, defaults to "public") +- `timestamp_column`: Column for timestamp-based syncing (default: "created_at") +- `id_column`: Column for ID-based syncing (default: "id") +- `batch_size`: Number of records to sync per batch (default: 1000) -## Type Mapping +### Environment Variables -The plugin automatically maps PostgreSQL types to SQLite types: +```env +EXTERNAL_DB_TYPE=postgresql +EXTERNAL_DB_HOST=localhost +EXTERNAL_DB_PORT=5432 +EXTERNAL_DB_USER=postgres +EXTERNAL_DB_PASS=postgres +EXTERNAL_DB_DATABASE=demo +EXTERNAL_DB_DEFAULT_SCHEMA=public +``` -| PostgreSQL Type | SQLite Type | -| ---------------------------------------- | ----------- | -| integer, bigint | INTEGER | -| text, varchar, char | TEXT | -| boolean | INTEGER | -| timestamp, date | TEXT | -| numeric, decimal, real, double precision | REAL | -| json, jsonb | TEXT | +## Usage -## Example Usage +### Basic Usage ```typescript -import { DataSyncPlugin } from '@starbasedb/plugins/data-sync' +import { DataSyncPlugin } from '@starbasedb/data-sync' +import { PostgresSyncSource } from '@starbasedb/postgres-sync' -// Initialize the plugin -const dataSyncPlugin = new DataSyncPlugin({ - sync_interval: 300, // 5 minutes - tables: ['users', 'orders'], +// Create a sync source for your database +const postgresSync = new PostgresSyncSource({ + dialect: 'postgresql', + schema: 'public', }) -// Add to your StarbaseDB configuration -const config = { - plugins: [dataSyncPlugin], - // ... other config options -} +// Create the plugin +const dataSyncPlugin = new DataSyncPlugin(postgresSync, { + sync_interval: 300, + tables: ['users', 'products'], +}) + +// Register with your app +app.register(dataSyncPlugin) +``` + +### Advanced Usage + +```typescript +const dataSyncPlugin = new DataSyncPlugin(postgresSync, { + sync_interval: 300, + tables: [ + // Simple table with defaults + 'users', + + // Custom sync configuration + { + name: 'orders', + timestamp_column: 'order_date', + id_column: 'order_id', + batch_size: 500, + }, + + // Schema-specific table + { + name: 'audit.logs', + timestamp_column: 'logged_at', + batch_size: 200, + }, + ], +}) ``` -## Demo +## Table Naming + +The plugin prefixes all synced tables with `tmp_` to distinguish them from user-created tables: + +- `users` → `tmp_users` +- `public.products` → `tmp_products` +- `audit.logs` → `tmp_audit_logs` + +## Monitoring + +Monitor sync status using the provided endpoints: -A complete demo implementation is available in the `demo` directory. The demo shows: +```bash +# Check sync status +curl http://localhost:8787/sync-status -- Setting up the plugin with PostgreSQL -- Using query hooks for monitoring -- Testing sync functionality -- Debugging and monitoring endpoints +# View synced data +curl http://localhost:8787/sync-data -See [Demo README](./demo/README.md) for detailed instructions. +# Debug information +curl http://localhost:8787/debug +``` + +## Error Handling -## Limitations +The plugin provides comprehensive error handling: -- The plugin currently assumes the presence of `created_at` and `id` columns -- Large tables may take longer to sync initially -- Deleted records in the external database are not automatically removed from the internal database -- The sync operation is pull-based and runs on a fixed interval +- Failed records are logged but don't stop the sync process +- Sync errors are stored in metadata +- Automatic retries on next sync interval +- Detailed error logging with context -## Security Notes +## Extending -- Always use secure, randomly generated tokens for authentication -- Store sensitive credentials in environment variables -- In production, enable authentication and use secure database credentials -- The demo uses example tokens (like "ABC123") for illustration only +Support for new databases can be added by implementing the `DatabaseSyncSource` abstract class: -## Contributing +```typescript +class MySQLSyncSource extends DatabaseSyncSource { + // Implement required methods +} +``` -Contributions are welcome! Please feel free to submit a Pull Request. +See the PostgreSQL implementation for a reference. diff --git a/plugins/postgres-sync/README.md b/plugins/postgres-sync/README.md new file mode 100644 index 0000000..c2304b3 --- /dev/null +++ b/plugins/postgres-sync/README.md @@ -0,0 +1,146 @@ +# PostgreSQL Sync Source + +A PostgreSQL implementation of the DatabaseSyncSource for the StarbaseDB Data Sync plugin. This plugin enables synchronization of data from PostgreSQL databases to StarbaseDB's internal SQLite database. + +## Features + +- PostgreSQL-specific schema and type handling +- Support for all major PostgreSQL data types +- Schema-aware table synchronization +- Comprehensive type mapping to SQLite +- Array type support (stored as JSON) +- Default value handling +- Sequence handling for auto-increment fields + +## Installation + +```toml +# wrangler.toml +[plugins.data-sync] +sync_interval = 300 +tables = ["users", "products"] + +[plugins.postgres-sync] +schema = "public" # optional, defaults to "public" +``` + +## Configuration + +### Environment Variables + +```env +EXTERNAL_DB_TYPE=postgresql +EXTERNAL_DB_HOST=localhost +EXTERNAL_DB_PORT=5432 +EXTERNAL_DB_USER=postgres +EXTERNAL_DB_PASS=postgres +EXTERNAL_DB_DATABASE=demo +EXTERNAL_DB_DEFAULT_SCHEMA=public +``` + +## Usage + +```typescript +import { DataSyncPlugin } from '@starbasedb/data-sync' +import { PostgresSyncSource } from '@starbasedb/postgres-sync' + +// Create PostgreSQL sync source +const postgresSync = new PostgresSyncSource({ + dialect: 'postgresql', + schema: 'public', // optional +}) + +// Create data sync plugin with PostgreSQL source +const dataSyncPlugin = new DataSyncPlugin(postgresSync, { + sync_interval: 300, + tables: ['users', 'products'], +}) + +// Register with your app +app.register(dataSyncPlugin) +``` + +## Type Mapping + +PostgreSQL types are automatically mapped to SQLite types: + +| PostgreSQL Type | SQLite Type | +| ------------------------- | ----------- | +| integer, bigint, smallint | INTEGER | +| text, varchar, char | TEXT | +| boolean | INTEGER | +| timestamp, date | TEXT | +| numeric, decimal, real | REAL | +| json, jsonb | TEXT | +| uuid | TEXT | +| bytea | BLOB | +| array types | TEXT (JSON) | +| interval | TEXT | +| point, line, polygon | TEXT | +| cidr, inet, macaddr | TEXT | +| bit, bit varying | INTEGER | +| money | REAL | +| xml | TEXT | + +## Schema Support + +The plugin supports PostgreSQL schemas: + +```typescript +const dataSyncPlugin = new DataSyncPlugin(postgresSync, { + tables: [ + 'public.users', // public schema + 'analytics.events', // custom schema + { + name: 'audit.logs', // schema in config + timestamp_column: 'logged_at', + }, + ], +}) +``` + +## Default Value Handling + +- Sequence defaults (`nextval`) are converted to SQLite auto-increment +- Type-cast defaults are properly parsed +- NULL defaults are preserved +- Constant values are preserved + +## Validation + +The plugin validates: + +- Table existence in specified schema +- Column existence and types +- Timestamp column types +- Required sync columns (timestamp or ID) + +## Error Handling + +Comprehensive error handling for: + +- Connection issues +- Schema validation +- Type mapping +- Query execution +- Data conversion + +## Requirements + +- PostgreSQL 9.5 or later +- Tables must have either: + - A timestamp column for time-based syncing + - An ID column for incremental syncing +- Access to `information_schema` for metadata + +## Testing + +Start a PostgreSQL instance: + +```bash +docker run --name starbasedb-postgres \ + -e POSTGRES_PASSWORD=postgres \ + -e POSTGRES_DB=demo \ + -p 5432:5432 \ + -d postgres:15 +``` From 7ab1edb247610c9fa797fe115a034bd1293552ef Mon Sep 17 00:00:00 2001 From: David Anyatonwu Date: Mon, 27 Jan 2025 18:44:14 +0100 Subject: [PATCH 4/4] chore: update meta.json files --- plugins/data-sync/meta.json | 31 +++++++++++++++++-------------- plugins/postgres-sync/meta.json | 20 ++++++++++++++++++++ 2 files changed, 37 insertions(+), 14 deletions(-) create mode 100644 plugins/postgres-sync/meta.json diff --git a/plugins/data-sync/meta.json b/plugins/data-sync/meta.json index a8d9c11..79166cd 100644 --- a/plugins/data-sync/meta.json +++ b/plugins/data-sync/meta.json @@ -1,20 +1,23 @@ { - "name": "data-sync", "version": "1.0.0", - "description": "Synchronizes data from external sources to StarbaseDB's internal SQLite database", - "author": "StarbaseDB", - "license": "MIT", - "type": "sync", - "config": { - "sync_interval": { - "type": "number", - "description": "Interval in seconds between sync operations", - "default": 300 - }, + "resources": { "tables": { - "type": "array", - "description": "List of tables to synchronize", - "default": [] + "tmp_data_sync_metadata": [ + "table_name", + "last_sync_timestamp", + "last_sync_id", + "sync_errors" + ] + }, + "secrets": {}, + "variables": { + "sync_interval": "300", + "tables": "[]" } + }, + "dependencies": { + "tables": {}, + "secrets": {}, + "variables": {} } } diff --git a/plugins/postgres-sync/meta.json b/plugins/postgres-sync/meta.json new file mode 100644 index 0000000..e18530b --- /dev/null +++ b/plugins/postgres-sync/meta.json @@ -0,0 +1,20 @@ +{ + "version": "1.0.0", + "resources": { + "tables": {}, + "secrets": { + "pg_connection_string": "" + }, + "variables": { + "schema": "public" + } + }, + "dependencies": { + "tables": {}, + "secrets": {}, + "variables": { + "sync_interval": "", + "tables": "" + } + } +}