@autonoma-ai/sdk 0.1.0 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,88 @@
1
+ # @autonoma-ai/sdk
2
+
3
+ Core protocol layer for the Autonoma Environment Factory. Handles HMAC verification, JWT-signed teardown tokens, template resolution, FK graph ordering, and the `discover`/`up`/`down` request lifecycle.
4
+
5
+ This package is the shared dependency of all ORM and server adapters — you don't need to install it directly unless you're building a custom adapter.
6
+
7
+ ## Install
8
+
9
+ ```bash
10
+ pnpm add @autonoma-ai/sdk
11
+ ```
12
+
13
+ ## What's exported
14
+
15
+ ### `handleRequest(config, request)`
16
+
17
+ Main entry point. Routes `discover`, `up`, and `down` actions, verifies HMAC, and delegates to the ORM adapter.
18
+
19
+ ```typescript
20
+ import { handleRequest } from '@autonoma-ai/sdk'
21
+
22
+ const response = await handleRequest(config, { body, headers })
23
+ // { status: 200, body: { ... } }
24
+ ```
25
+
26
+ ### `checkScenario(adapter, scenario)`
27
+
28
+ Dry-run a scenario against a real database — full create-then-teardown cycle. Use this in integration tests to validate scenario data before deploying.
29
+
30
+ ```typescript
31
+ import { checkScenario } from '@autonoma-ai/sdk'
32
+
33
+ const result = await checkScenario(adapter, {
34
+ create: {
35
+ Organization: [{
36
+ name: 'Test [{{testRunId}}]',
37
+ slug: 'test-{{testRunId}}',
38
+ users: [{ email: 'admin-{{testRunId}}@test.com', name: 'Admin' }],
39
+ }],
40
+ },
41
+ })
42
+
43
+ // result.valid → true/false
44
+ // result.phase → 'ok' | 'up' | 'down'
45
+ // result.errors → [{ message, fix }]
46
+ // result.timing → { upMs, downMs }
47
+ ```
48
+
49
+ ### `checkAllScenarios(adapter, scenarios)`
50
+
51
+ Runs `checkScenario` for each scenario definition and returns all results.
52
+
53
+ ### Graph utilities (`@autonoma-ai/sdk/graph`)
54
+
55
+ Exported from the `/graph` subpath for use in ORM adapters:
56
+
57
+ ```typescript
58
+ import { topoSort, findDeferrableEdge } from '@autonoma-ai/sdk/graph'
59
+ ```
60
+
61
+ - `topoSort(edges)` — Kahn's algorithm + Tarjan's SCC for FK-ordered entity creation
62
+ - `findDeferrableEdge(scc, edges)` — finds a nullable FK in a cycle to break it
63
+
64
+ ### Other exports
65
+
66
+ | Export | Use |
67
+ |--------|-----|
68
+ | `signBody` / `verifySignature` | HMAC-SHA256 signing for request auth |
69
+ | `signRefs` / `verifyRefs` | JWT-like token for signing teardown refs |
70
+ | `resolveTemplate` | Resolve `{{testRunId}}`, `{{index}}`, `{{cycle(...)}}`, etc. |
71
+ | `resolveTree` | Nested scenario tree → flat entity list with auto-wired FKs |
72
+ | `fingerprint` | Deterministic hash of scenario definitions |
73
+
74
+ ## CLI
75
+
76
+ The package ships an `autonoma` CLI:
77
+
78
+ ```bash
79
+ # Validate a scenario against a schema
80
+ npx autonoma validate <schema.json> <scenario.json>
81
+
82
+ # Convert Prisma DMMF to autonoma-schema.json
83
+ npx autonoma schema convert <dmmf.json> --scope-field <field>
84
+ ```
85
+
86
+ ## Documentation
87
+
88
+ Full docs: [docs/](../../docs/) — start with [overview](../../docs/overview.txt) or read [everything in one file](../../docs/llms-full.txt).
package/dist/cli.js CHANGED
@@ -95,7 +95,7 @@ function convertDMMFToSchema(dmmf, scopeField) {
95
95
  });
96
96
  }
97
97
  }
98
- models.push({ name: model.name, fields });
98
+ models.push({ name: model.name, tableName: model.dbName ?? model.name, fields });
99
99
  }
100
100
  return { models, edges, relations: [], scopeField };
101
101
  }
package/dist/cli.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/cli.ts"],"sourcesContent":["#!/usr/bin/env node\n\nimport { readFile, writeFile } from 'node:fs/promises'\nimport { resolve } from 'node:path'\nimport type { SchemaInfo } from './types'\n\nconst HELP = `\nautonoma — Autonoma SDK CLI\n\nCommands:\n autonoma schema convert <dmmf.json> Convert Prisma DMMF to autonoma schema\n\nOptions:\n --scope-field <name> Scope field name (default: \"testRunId\")\n --pretty Pretty-print output\n -o, --output <path> Write output to file instead of stdout\n -h, --help Show this help\n`.trim()\n\nasync function main() {\n const args = process.argv.slice(2)\n\n if (args.length === 0 || args.includes('-h') || args.includes('--help')) {\n console.log(HELP)\n process.exit(0)\n }\n\n const command = args[0]\n\n switch (command) {\n case 'schema':\n return await cmdSchema(args.slice(1))\n default:\n console.error(`Unknown command: ${command}`)\n console.log(HELP)\n process.exit(1)\n }\n}\n\n// ── schema convert ────────────────────────────────────────────────────────\n\nasync function cmdSchema(args: string[]) {\n const subcommand = args[0]\n\n if (subcommand !== 'convert') {\n console.error('Usage: autonoma schema convert <dmmf.json> --scope-field <name>')\n process.exit(1)\n }\n\n const flags = parseFlags(args.slice(1))\n const positional = flags._positional\n\n if (positional.length < 1) {\n console.error('Usage: autonoma schema convert <dmmf.json> --scope-field <name>')\n process.exit(1)\n }\n\n const dmmfPath = resolve(positional[0]!)\n const scopeField = (flags['--scope-field'] as string) ?? 'testRunId'\n const dmmf = await readJSON<DMMFInput>(dmmfPath)\n\n const schema = convertDMMFToSchema(dmmf, scopeField)\n\n const json = JSON.stringify(schema, null, 2)\n\n if (flags['-o'] || flags['--output']) {\n const outPath = resolve((flags['-o'] ?? flags['--output']) as string)\n await writeFile(outPath, json + '\\n')\n console.error(`Schema written to ${outPath}`)\n console.error(` ${schema.models.length} models, ${schema.edges.length} FK edges, scopeField: \"${scopeField}\"`)\n } else {\n console.log(json)\n }\n}\n\n// ── DMMF conversion ───────────────────────────────────────────────────────\n\ninterface DMMFInput {\n models: Record<string, DMMFModel> | DMMFModel[]\n datamodel?: { models: DMMFModel[] }\n}\n\ninterface DMMFModel {\n name: string\n fields: DMMFField[]\n}\n\ninterface DMMFField {\n name: string\n type: string\n kind: string\n isRequired: boolean\n isId: boolean\n hasDefaultValue: boolean\n relationFromFields?: string[]\n relationToFields?: string[]\n}\n\nfunction convertDMMFToSchema(dmmf: DMMFInput, scopeField: string): SchemaInfo {\n let dmmfModels: DMMFModel[]\n\n if (dmmf.datamodel?.models) {\n dmmfModels = dmmf.datamodel.models\n } else if (Array.isArray(dmmf.models)) {\n dmmfModels = dmmf.models\n } else {\n dmmfModels = Object.entries(dmmf.models).map(\n ([name, model]) => ({ ...model, name }),\n )\n }\n\n const models: SchemaInfo['models'] = []\n const edges: SchemaInfo['edges'] = []\n\n for (const model of dmmfModels) {\n const fields: SchemaInfo['models'][number]['fields'] = []\n\n for (const field of model.fields) {\n if (field.kind === 'object') {\n if (field.relationFromFields?.length) {\n edges.push({\n from: model.name,\n to: field.type,\n localField: field.relationFromFields[0]!,\n foreignField: field.relationToFields?.[0] ?? 'id',\n nullable: !field.isRequired,\n })\n }\n continue\n }\n\n if (field.kind === 'scalar' || field.kind === 'enum') {\n fields.push({\n name: field.name,\n type: field.type,\n isRequired: field.isRequired,\n isId: field.isId,\n hasDefault: field.hasDefaultValue,\n })\n }\n }\n\n models.push({ name: model.name, fields })\n }\n\n return { models, edges, relations: [], scopeField }\n}\n\n// ── Utilities ─────────────────────────────────────────────────────────────\n\nasync function readJSON<T>(path: string): Promise<T> {\n try {\n const content = await readFile(path, 'utf-8')\n return JSON.parse(content) as T\n } catch (err) {\n if ((err as NodeJS.ErrnoException).code === 'ENOENT') {\n console.error(`File not found: ${path}`)\n } else if (err instanceof SyntaxError) {\n console.error(`Invalid JSON in ${path}: ${err.message}`)\n } else {\n console.error(`Error reading ${path}: ${err}`)\n }\n process.exit(1)\n }\n}\n\ninterface ParsedFlags {\n [key: string]: string | boolean | string[]\n _positional: string[]\n}\n\nfunction parseFlags(args: string[]): ParsedFlags {\n const result: ParsedFlags = { _positional: [] }\n let i = 0\n while (i < args.length) {\n const arg = args[i]!\n if (arg.startsWith('-')) {\n const next = args[i + 1]\n if (next && !next.startsWith('-')) {\n result[arg] = next\n i += 2\n } else {\n result[arg] = true\n i++\n }\n } else {\n result._positional.push(arg)\n i++\n }\n }\n return result\n}\n\nmain().catch((err) => {\n console.error('Fatal:', err instanceof Error ? err.message : err)\n process.exit(1)\n})\n"],"mappings":";;;AAEA,SAAS,UAAU,iBAAiB;AACpC,SAAS,eAAe;AAGxB,IAAM,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWX,KAAK;AAEP,eAAe,OAAO;AACpB,QAAM,OAAO,QAAQ,KAAK,MAAM,CAAC;AAEjC,MAAI,KAAK,WAAW,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,QAAQ,GAAG;AACvE,YAAQ,IAAI,IAAI;AAChB,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,UAAU,KAAK,CAAC;AAEtB,UAAQ,SAAS;AAAA,IACf,KAAK;AACH,aAAO,MAAM,UAAU,KAAK,MAAM,CAAC,CAAC;AAAA,IACtC;AACE,cAAQ,MAAM,oBAAoB,OAAO,EAAE;AAC3C,cAAQ,IAAI,IAAI;AAChB,cAAQ,KAAK,CAAC;AAAA,EAClB;AACF;AAIA,eAAe,UAAU,MAAgB;AACvC,QAAM,aAAa,KAAK,CAAC;AAEzB,MAAI,eAAe,WAAW;AAC5B,YAAQ,MAAM,iEAAiE;AAC/E,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,QAAQ,WAAW,KAAK,MAAM,CAAC,CAAC;AACtC,QAAM,aAAa,MAAM;AAEzB,MAAI,WAAW,SAAS,GAAG;AACzB,YAAQ,MAAM,iEAAiE;AAC/E,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,WAAW,QAAQ,WAAW,CAAC,CAAE;AACvC,QAAM,aAAc,MAAM,eAAe,KAAgB;AACzD,QAAM,OAAO,MAAM,SAAoB,QAAQ;AAE/C,QAAM,SAAS,oBAAoB,MAAM,UAAU;AAEnD,QAAM,OAAO,KAAK,UAAU,QAAQ,MAAM,CAAC;AAE3C,MAAI,MAAM,IAAI,KAAK,MAAM,UAAU,GAAG;AACpC,UAAM,UAAU,QAAS,MAAM,IAAI,KAAK,MAAM,UAAU,CAAY;AACpE,UAAM,UAAU,SAAS,OAAO,IAAI;AACpC,YAAQ,MAAM,qBAAqB,OAAO,EAAE;AAC5C,YAAQ,MAAM,KAAK,OAAO,OAAO,MAAM,YAAY,OAAO,MAAM,MAAM,2BAA2B,UAAU,GAAG;AAAA,EAChH,OAAO;AACL,YAAQ,IAAI,IAAI;AAAA,EAClB;AACF;AAyBA,SAAS,oBAAoB,MAAiB,YAAgC;AAC5E,MAAI;AAEJ,MAAI,KAAK,WAAW,QAAQ;AAC1B,iBAAa,KAAK,UAAU;AAAA,EAC9B,WAAW,MAAM,QAAQ,KAAK,MAAM,GAAG;AACrC,iBAAa,KAAK;AAAA,EACpB,OAAO;AACL,iBAAa,OAAO,QAAQ,KAAK,MAAM,EAAE;AAAA,MACvC,CAAC,CAAC,MAAM,KAAK,OAAO,EAAE,GAAG,OAAO,KAAK;AAAA,IACvC;AAAA,EACF;AAEA,QAAM,SAA+B,CAAC;AACtC,QAAM,QAA6B,CAAC;AAEpC,aAAW,SAAS,YAAY;AAC9B,UAAM,SAAiD,CAAC;AAExD,eAAW,SAAS,MAAM,QAAQ;AAChC,UAAI,MAAM,SAAS,UAAU;AAC3B,YAAI,MAAM,oBAAoB,QAAQ;AACpC,gBAAM,KAAK;AAAA,YACT,MAAM,MAAM;AAAA,YACZ,IAAI,MAAM;AAAA,YACV,YAAY,MAAM,mBAAmB,CAAC;AAAA,YACtC,cAAc,MAAM,mBAAmB,CAAC,KAAK;AAAA,YAC7C,UAAU,CAAC,MAAM;AAAA,UACnB,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,YAAY,MAAM,SAAS,QAAQ;AACpD,eAAO,KAAK;AAAA,UACV,MAAM,MAAM;AAAA,UACZ,MAAM,MAAM;AAAA,UACZ,YAAY,MAAM;AAAA,UAClB,MAAM,MAAM;AAAA,UACZ,YAAY,MAAM;AAAA,QACpB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO,KAAK,EAAE,MAAM,MAAM,MAAM,OAAO,CAAC;AAAA,EAC1C;AAEA,SAAO,EAAE,QAAQ,OAAO,WAAW,CAAC,GAAG,WAAW;AACpD;AAIA,eAAe,SAAY,MAA0B;AACnD,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,MAAM,OAAO;AAC5C,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,SAAS,KAAK;AACZ,QAAK,IAA8B,SAAS,UAAU;AACpD,cAAQ,MAAM,mBAAmB,IAAI,EAAE;AAAA,IACzC,WAAW,eAAe,aAAa;AACrC,cAAQ,MAAM,mBAAmB,IAAI,KAAK,IAAI,OAAO,EAAE;AAAA,IACzD,OAAO;AACL,cAAQ,MAAM,iBAAiB,IAAI,KAAK,GAAG,EAAE;AAAA,IAC/C;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAOA,SAAS,WAAW,MAA6B;AAC/C,QAAM,SAAsB,EAAE,aAAa,CAAC,EAAE;AAC9C,MAAI,IAAI;AACR,SAAO,IAAI,KAAK,QAAQ;AACtB,UAAM,MAAM,KAAK,CAAC;AAClB,QAAI,IAAI,WAAW,GAAG,GAAG;AACvB,YAAM,OAAO,KAAK,IAAI,CAAC;AACvB,UAAI,QAAQ,CAAC,KAAK,WAAW,GAAG,GAAG;AACjC,eAAO,GAAG,IAAI;AACd,aAAK;AAAA,MACP,OAAO;AACL,eAAO,GAAG,IAAI;AACd;AAAA,MACF;AAAA,IACF,OAAO;AACL,aAAO,YAAY,KAAK,GAAG;AAC3B;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAEA,KAAK,EAAE,MAAM,CAAC,QAAQ;AACpB,UAAQ,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,GAAG;AAChE,UAAQ,KAAK,CAAC;AAChB,CAAC;","names":[]}
1
+ {"version":3,"sources":["../src/cli.ts"],"sourcesContent":["#!/usr/bin/env node\n\nimport { readFile, writeFile } from 'node:fs/promises'\nimport { resolve } from 'node:path'\nimport type { SchemaInfo } from './types'\n\nconst HELP = `\nautonoma — Autonoma SDK CLI\n\nCommands:\n autonoma schema convert <dmmf.json> Convert Prisma DMMF to autonoma schema\n\nOptions:\n --scope-field <name> Scope field name (default: \"testRunId\")\n --pretty Pretty-print output\n -o, --output <path> Write output to file instead of stdout\n -h, --help Show this help\n`.trim()\n\nasync function main() {\n const args = process.argv.slice(2)\n\n if (args.length === 0 || args.includes('-h') || args.includes('--help')) {\n console.log(HELP)\n process.exit(0)\n }\n\n const command = args[0]\n\n switch (command) {\n case 'schema':\n return await cmdSchema(args.slice(1))\n default:\n console.error(`Unknown command: ${command}`)\n console.log(HELP)\n process.exit(1)\n }\n}\n\n// ── schema convert ────────────────────────────────────────────────────────\n\nasync function cmdSchema(args: string[]) {\n const subcommand = args[0]\n\n if (subcommand !== 'convert') {\n console.error('Usage: autonoma schema convert <dmmf.json> --scope-field <name>')\n process.exit(1)\n }\n\n const flags = parseFlags(args.slice(1))\n const positional = flags._positional\n\n if (positional.length < 1) {\n console.error('Usage: autonoma schema convert <dmmf.json> --scope-field <name>')\n process.exit(1)\n }\n\n const dmmfPath = resolve(positional[0]!)\n const scopeField = (flags['--scope-field'] as string) ?? 'testRunId'\n const dmmf = await readJSON<DMMFInput>(dmmfPath)\n\n const schema = convertDMMFToSchema(dmmf, scopeField)\n\n const json = JSON.stringify(schema, null, 2)\n\n if (flags['-o'] || flags['--output']) {\n const outPath = resolve((flags['-o'] ?? flags['--output']) as string)\n await writeFile(outPath, json + '\\n')\n console.error(`Schema written to ${outPath}`)\n console.error(` ${schema.models.length} models, ${schema.edges.length} FK edges, scopeField: \"${scopeField}\"`)\n } else {\n console.log(json)\n }\n}\n\n// ── DMMF conversion ───────────────────────────────────────────────────────\n\ninterface DMMFInput {\n models: Record<string, DMMFModel> | DMMFModel[]\n datamodel?: { models: DMMFModel[] }\n}\n\ninterface DMMFModel {\n name: string\n dbName?: string | null\n fields: DMMFField[]\n}\n\ninterface DMMFField {\n name: string\n type: string\n kind: string\n isRequired: boolean\n isId: boolean\n hasDefaultValue: boolean\n relationFromFields?: string[]\n relationToFields?: string[]\n}\n\nfunction convertDMMFToSchema(dmmf: DMMFInput, scopeField: string): SchemaInfo {\n let dmmfModels: DMMFModel[]\n\n if (dmmf.datamodel?.models) {\n dmmfModels = dmmf.datamodel.models\n } else if (Array.isArray(dmmf.models)) {\n dmmfModels = dmmf.models\n } else {\n dmmfModels = Object.entries(dmmf.models).map(\n ([name, model]) => ({ ...model, name }),\n )\n }\n\n const models: SchemaInfo['models'] = []\n const edges: SchemaInfo['edges'] = []\n\n for (const model of dmmfModels) {\n const fields: SchemaInfo['models'][number]['fields'] = []\n\n for (const field of model.fields) {\n if (field.kind === 'object') {\n if (field.relationFromFields?.length) {\n edges.push({\n from: model.name,\n to: field.type,\n localField: field.relationFromFields[0]!,\n foreignField: field.relationToFields?.[0] ?? 'id',\n nullable: !field.isRequired,\n })\n }\n continue\n }\n\n if (field.kind === 'scalar' || field.kind === 'enum') {\n fields.push({\n name: field.name,\n type: field.type,\n isRequired: field.isRequired,\n isId: field.isId,\n hasDefault: field.hasDefaultValue,\n })\n }\n }\n\n models.push({ name: model.name, tableName: model.dbName ?? model.name, fields })\n }\n\n return { models, edges, relations: [], scopeField }\n}\n\n// ── Utilities ─────────────────────────────────────────────────────────────\n\nasync function readJSON<T>(path: string): Promise<T> {\n try {\n const content = await readFile(path, 'utf-8')\n return JSON.parse(content) as T\n } catch (err) {\n if ((err as NodeJS.ErrnoException).code === 'ENOENT') {\n console.error(`File not found: ${path}`)\n } else if (err instanceof SyntaxError) {\n console.error(`Invalid JSON in ${path}: ${err.message}`)\n } else {\n console.error(`Error reading ${path}: ${err}`)\n }\n process.exit(1)\n }\n}\n\ninterface ParsedFlags {\n [key: string]: string | boolean | string[]\n _positional: string[]\n}\n\nfunction parseFlags(args: string[]): ParsedFlags {\n const result: ParsedFlags = { _positional: [] }\n let i = 0\n while (i < args.length) {\n const arg = args[i]!\n if (arg.startsWith('-')) {\n const next = args[i + 1]\n if (next && !next.startsWith('-')) {\n result[arg] = next\n i += 2\n } else {\n result[arg] = true\n i++\n }\n } else {\n result._positional.push(arg)\n i++\n }\n }\n return result\n}\n\nmain().catch((err) => {\n console.error('Fatal:', err instanceof Error ? err.message : err)\n process.exit(1)\n})\n"],"mappings":";;;AAEA,SAAS,UAAU,iBAAiB;AACpC,SAAS,eAAe;AAGxB,IAAM,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWX,KAAK;AAEP,eAAe,OAAO;AACpB,QAAM,OAAO,QAAQ,KAAK,MAAM,CAAC;AAEjC,MAAI,KAAK,WAAW,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,QAAQ,GAAG;AACvE,YAAQ,IAAI,IAAI;AAChB,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,UAAU,KAAK,CAAC;AAEtB,UAAQ,SAAS;AAAA,IACf,KAAK;AACH,aAAO,MAAM,UAAU,KAAK,MAAM,CAAC,CAAC;AAAA,IACtC;AACE,cAAQ,MAAM,oBAAoB,OAAO,EAAE;AAC3C,cAAQ,IAAI,IAAI;AAChB,cAAQ,KAAK,CAAC;AAAA,EAClB;AACF;AAIA,eAAe,UAAU,MAAgB;AACvC,QAAM,aAAa,KAAK,CAAC;AAEzB,MAAI,eAAe,WAAW;AAC5B,YAAQ,MAAM,iEAAiE;AAC/E,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,QAAQ,WAAW,KAAK,MAAM,CAAC,CAAC;AACtC,QAAM,aAAa,MAAM;AAEzB,MAAI,WAAW,SAAS,GAAG;AACzB,YAAQ,MAAM,iEAAiE;AAC/E,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,WAAW,QAAQ,WAAW,CAAC,CAAE;AACvC,QAAM,aAAc,MAAM,eAAe,KAAgB;AACzD,QAAM,OAAO,MAAM,SAAoB,QAAQ;AAE/C,QAAM,SAAS,oBAAoB,MAAM,UAAU;AAEnD,QAAM,OAAO,KAAK,UAAU,QAAQ,MAAM,CAAC;AAE3C,MAAI,MAAM,IAAI,KAAK,MAAM,UAAU,GAAG;AACpC,UAAM,UAAU,QAAS,MAAM,IAAI,KAAK,MAAM,UAAU,CAAY;AACpE,UAAM,UAAU,SAAS,OAAO,IAAI;AACpC,YAAQ,MAAM,qBAAqB,OAAO,EAAE;AAC5C,YAAQ,MAAM,KAAK,OAAO,OAAO,MAAM,YAAY,OAAO,MAAM,MAAM,2BAA2B,UAAU,GAAG;AAAA,EAChH,OAAO;AACL,YAAQ,IAAI,IAAI;AAAA,EAClB;AACF;AA0BA,SAAS,oBAAoB,MAAiB,YAAgC;AAC5E,MAAI;AAEJ,MAAI,KAAK,WAAW,QAAQ;AAC1B,iBAAa,KAAK,UAAU;AAAA,EAC9B,WAAW,MAAM,QAAQ,KAAK,MAAM,GAAG;AACrC,iBAAa,KAAK;AAAA,EACpB,OAAO;AACL,iBAAa,OAAO,QAAQ,KAAK,MAAM,EAAE;AAAA,MACvC,CAAC,CAAC,MAAM,KAAK,OAAO,EAAE,GAAG,OAAO,KAAK;AAAA,IACvC;AAAA,EACF;AAEA,QAAM,SAA+B,CAAC;AACtC,QAAM,QAA6B,CAAC;AAEpC,aAAW,SAAS,YAAY;AAC9B,UAAM,SAAiD,CAAC;AAExD,eAAW,SAAS,MAAM,QAAQ;AAChC,UAAI,MAAM,SAAS,UAAU;AAC3B,YAAI,MAAM,oBAAoB,QAAQ;AACpC,gBAAM,KAAK;AAAA,YACT,MAAM,MAAM;AAAA,YACZ,IAAI,MAAM;AAAA,YACV,YAAY,MAAM,mBAAmB,CAAC;AAAA,YACtC,cAAc,MAAM,mBAAmB,CAAC,KAAK;AAAA,YAC7C,UAAU,CAAC,MAAM;AAAA,UACnB,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,YAAY,MAAM,SAAS,QAAQ;AACpD,eAAO,KAAK;AAAA,UACV,MAAM,MAAM;AAAA,UACZ,MAAM,MAAM;AAAA,UACZ,YAAY,MAAM;AAAA,UAClB,MAAM,MAAM;AAAA,UACZ,YAAY,MAAM;AAAA,QACpB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO,KAAK,EAAE,MAAM,MAAM,MAAM,WAAW,MAAM,UAAU,MAAM,MAAM,OAAO,CAAC;AAAA,EACjF;AAEA,SAAO,EAAE,QAAQ,OAAO,WAAW,CAAC,GAAG,WAAW;AACpD;AAIA,eAAe,SAAY,MAA0B;AACnD,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,MAAM,OAAO;AAC5C,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,SAAS,KAAK;AACZ,QAAK,IAA8B,SAAS,UAAU;AACpD,cAAQ,MAAM,mBAAmB,IAAI,EAAE;AAAA,IACzC,WAAW,eAAe,aAAa;AACrC,cAAQ,MAAM,mBAAmB,IAAI,KAAK,IAAI,OAAO,EAAE;AAAA,IACzD,OAAO;AACL,cAAQ,MAAM,iBAAiB,IAAI,KAAK,GAAG,EAAE;AAAA,IAC/C;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAOA,SAAS,WAAW,MAA6B;AAC/C,QAAM,SAAsB,EAAE,aAAa,CAAC,EAAE;AAC9C,MAAI,IAAI;AACR,SAAO,IAAI,KAAK,QAAQ;AACtB,UAAM,MAAM,KAAK,CAAC;AAClB,QAAI,IAAI,WAAW,GAAG,GAAG;AACvB,YAAM,OAAO,KAAK,IAAI,CAAC;AACvB,UAAI,QAAQ,CAAC,KAAK,WAAW,GAAG,GAAG;AACjC,eAAO,GAAG,IAAI;AACd,aAAK;AAAA,MACP,OAAO;AACL,eAAO,GAAG,IAAI;AACd;AAAA,MACF;AAAA,IACF,OAAO;AACL,aAAO,YAAY,KAAK,GAAG;AAC3B;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAEA,KAAK,EAAE,MAAM,CAAC,QAAQ;AACpB,UAAQ,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,GAAG;AAChE,UAAQ,KAAK,CAAC;AAChB,CAAC;","names":[]}
@@ -1,11 +1,13 @@
1
- /** ORM adapter interfaceimplemented by @autonoma-ai/sdk-prisma, @autonoma-ai/sdk-drizzle, etc. */
2
- interface OrmAdapter {
3
- /** Return schema metadata for discover (models, fields, relationships) */
4
- getSchema(): SchemaInfo;
5
- /** Create entities from a resolved spec, return created records keyed by model */
6
- createEntities(spec: Record<string, ResolvedEntitySpec>, context: CreateContext): Promise<Record<string, Record<string, unknown>[]>>;
7
- /** Delete all data scoped to a value. Refs are provided for targeted cleanup of un-scoped models. */
8
- teardown(scopeValue: string, refs?: Record<string, Record<string, unknown>[]>): Promise<void>;
1
+ /** Minimal SQL executorwrap your DB connection (pg Pool, Prisma, Drizzle, etc.) into this. */
2
+ interface SQLExecutor {
3
+ /** Execute a SQL query with parameterized values. Returns rows as plain objects. */
4
+ query<T = Record<string, unknown>>(sql: string, params?: unknown[]): Promise<T[]>;
5
+ /**
6
+ * Execute a block within a transaction.
7
+ * The callback receives an executor scoped to the transaction.
8
+ * If the callback throws, the transaction is rolled back.
9
+ */
10
+ transaction<T>(fn: (tx: SQLExecutor) => Promise<T>): Promise<T>;
9
11
  }
10
12
  interface SchemaInfo {
11
13
  models: ModelInfo[];
@@ -22,6 +24,7 @@ interface SchemaRelation {
22
24
  }
23
25
  interface ModelInfo {
24
26
  name: string;
27
+ tableName: string;
25
28
  fields: FieldInfo[];
26
29
  }
27
30
  interface FieldInfo {
@@ -52,14 +55,36 @@ interface ScenarioDefinition {
52
55
  /** Nested tree: model name → array of node objects with nested children */
53
56
  create: Record<string, Record<string, unknown>[]>;
54
57
  }
58
+ interface SdkInfo {
59
+ language: string;
60
+ orm: string;
61
+ server: string;
62
+ }
55
63
  interface HandlerConfig {
56
- adapter: OrmAdapter;
64
+ /** SQL executor wrapping your database connection */
65
+ executor: SQLExecutor;
66
+ /** Scope field name (camelCase), e.g., 'organizationId' */
67
+ scopeField: string;
68
+ /** Database dialect. Defaults to 'postgres'. */
69
+ dialect?: 'postgres' | 'mysql' | 'sqlite';
70
+ /** DB schema name. Defaults to 'public' for Postgres. */
71
+ dbSchema?: string;
72
+ /**
73
+ * Map scenario model names to DB table names.
74
+ * Keys are model names (PascalCase), values are DB table names.
75
+ * If omitted, auto-detected from information_schema with PascalCase inference.
76
+ */
77
+ tableNameMap?: Record<string, string>;
78
+ /** Tables to exclude from introspection. Defaults to ['_prisma_migrations']. */
79
+ excludeTables?: string[];
57
80
  /** Shared secret — known by both you and Autonoma. Used to verify HMAC signatures on incoming requests. */
58
81
  sharedSecret: string;
59
82
  /** Internal secret — only you know this. Used to sign the refs JWT token. Autonoma never sees it. */
60
83
  signingSecret: string;
61
84
  allowProduction?: boolean;
62
85
  auth?: (user: Record<string, unknown>) => Promise<AuthResult> | AuthResult;
86
+ /** SDK identity metadata. Server and ORM adapters populate this. */
87
+ sdk?: Partial<SdkInfo>;
63
88
  }
64
89
  interface AuthResult {
65
90
  token: string;
@@ -104,4 +129,4 @@ declare function topoSort(nodes: string[], edges: FKEdge[]): TopoSortResult;
104
129
  */
105
130
  declare function findDeferrableEdge(cycle: string[], edges: FKEdge[]): FKEdge | null;
106
131
 
107
- export { type AuthResult as A, type CreateContext as C, type DiscoverResponse as D, type FKEdge as F, type HandlerConfig as H, type ModelInfo as M, type OrmAdapter as O, type ResolvedEntitySpec as R, type SchemaInfo as S, type TopoSortResult as T, type UpResponse as U, type HandlerRequest as a, type HandlerResponse as b, type ScenarioDefinition as c, type DownResponse as d, type FieldInfo as e, type SchemaRelation as f, findDeferrableEdge as g, topoSort as t };
132
+ export { type AuthResult as A, type CreateContext as C, type DiscoverResponse as D, type FKEdge as F, type HandlerConfig as H, type ModelInfo as M, type ResolvedEntitySpec as R, type SchemaInfo as S, type TopoSortResult as T, type UpResponse as U, type HandlerRequest as a, type HandlerResponse as b, type SQLExecutor as c, type ScenarioDefinition as d, type DownResponse as e, type FieldInfo as f, type SchemaRelation as g, type SdkInfo as h, findDeferrableEdge as i, topoSort as t };
package/dist/graph.d.ts CHANGED
@@ -1 +1 @@
1
- export { F as FKEdge, T as TopoSortResult, g as findDeferrableEdge, t as topoSort } from './graph-DpqVvKaD.js';
1
+ export { F as FKEdge, T as TopoSortResult, i as findDeferrableEdge, t as topoSort } from './graph-gkFzydIb.js';
package/dist/index.d.ts CHANGED
@@ -1,6 +1,7 @@
1
- import { H as HandlerConfig, a as HandlerRequest, b as HandlerResponse, S as SchemaInfo, O as OrmAdapter, c as ScenarioDefinition } from './graph-DpqVvKaD.js';
2
- export { A as AuthResult, C as CreateContext, D as DiscoverResponse, d as DownResponse, F as FKEdge, e as FieldInfo, M as ModelInfo, R as ResolvedEntitySpec, f as SchemaRelation, U as UpResponse, g as findDeferrableEdge, t as topoSort } from './graph-DpqVvKaD.js';
1
+ import { H as HandlerConfig, a as HandlerRequest, b as HandlerResponse, S as SchemaInfo, c as SQLExecutor, d as ScenarioDefinition, R as ResolvedEntitySpec, C as CreateContext } from './graph-gkFzydIb.js';
2
+ export { A as AuthResult, D as DiscoverResponse, e as DownResponse, F as FKEdge, f as FieldInfo, M as ModelInfo, g as SchemaRelation, h as SdkInfo, U as UpResponse, i as findDeferrableEdge, t as topoSort } from './graph-gkFzydIb.js';
3
3
 
4
+ declare const PROTOCOL_VERSION = "1.0";
4
5
  declare function handleRequest(config: HandlerConfig, req: HandlerRequest): Promise<HandlerResponse>;
5
6
 
6
7
  declare function signBody(body: string, secret: string): string;
@@ -43,9 +44,24 @@ interface CreateOp {
43
44
  tempId: string;
44
45
  batch: boolean;
45
46
  }
47
+ /**
48
+ * A deferred FK update — emitted when a _ref points to a node that hasn't
49
+ * been created yet (circular dependency). Resolved after all creates.
50
+ */
51
+ interface DeferredUpdate {
52
+ /** Temp ID of the record that needs to be updated */
53
+ targetTempId: string;
54
+ /** Model name of the record to update */
55
+ model: string;
56
+ /** Field on the record that holds the deferred FK */
57
+ field: string;
58
+ /** Alias that will resolve to the FK value once created */
59
+ refAlias: string;
60
+ }
46
61
  /** Result of resolving a tree scenario */
47
62
  interface ResolvedTree {
48
63
  ops: CreateOp[];
64
+ deferredUpdates: DeferredUpdate[];
49
65
  aliases: Map<string, string>;
50
66
  }
51
67
  /** A resolved reference to another node's id */
@@ -59,6 +75,10 @@ interface RefNode {
59
75
  * Handles both directions:
60
76
  * - FK on child (Application.organizationId → Organization): set child FK to parent ID
61
77
  * - FK on parent (Member.userId → User): create child first, set parent FK to child ID
78
+ *
79
+ * Circular FK cycles (e.g. Application.mainBranchId ↔ Branch.applicationId) are handled
80
+ * transparently: the nullable FK is omitted on the first create and emitted as a
81
+ * DeferredUpdate to be applied via UPDATE after all records exist.
62
82
  */
63
83
  declare function resolveTree(create: Record<string, Record<string, unknown>[]>, schema: SchemaInfo, testRunId: string): ResolvedTree;
64
84
 
@@ -80,7 +100,11 @@ interface CheckError {
80
100
  * Dry-run a scenario against a real database.
81
101
  * Runs the full up → down cycle and returns structured errors.
82
102
  */
83
- declare function checkScenario(adapter: OrmAdapter, scenario: ScenarioDefinition, options?: {
103
+ declare function checkScenario(executor: SQLExecutor, scenario: ScenarioDefinition, options?: {
104
+ scopeField: string;
105
+ dialect?: HandlerConfig['dialect'];
106
+ dbSchema?: string;
107
+ tableNameMap?: Record<string, string>;
84
108
  sharedSecret?: string;
85
109
  signingSecret?: string;
86
110
  auth?: HandlerConfig['auth'];
@@ -88,10 +112,83 @@ declare function checkScenario(adapter: OrmAdapter, scenario: ScenarioDefinition
88
112
  /**
89
113
  * Check multiple scenarios sequentially.
90
114
  */
91
- declare function checkAllScenarios(adapter: OrmAdapter, scenarios: ScenarioDefinition[], options?: {
115
+ declare function checkAllScenarios(executor: SQLExecutor, scenarios: ScenarioDefinition[], options?: {
116
+ scopeField: string;
117
+ dialect?: HandlerConfig['dialect'];
118
+ dbSchema?: string;
119
+ tableNameMap?: Record<string, string>;
92
120
  sharedSecret?: string;
93
121
  signingSecret?: string;
94
122
  auth?: HandlerConfig['auth'];
95
123
  }): Promise<CheckResult[]>;
96
124
 
97
- export { type CheckError, type CheckResult, type CreateOp, HandlerConfig, HandlerRequest, HandlerResponse, OrmAdapter, type RefNode, type ResolvedTree, ScenarioDefinition, SchemaInfo, type TemplateContext, checkAllScenarios, checkScenario, fingerprint, handleRequest, resolveTemplate, resolveTree, signBody, signRefs, verifyRefs, verifySignature };
125
+ /** Database dialect abstraction generates dialect-specific SQL strings. */
126
+ interface Dialect {
127
+ readonly name: 'postgres' | 'mysql' | 'sqlite';
128
+ /** Parameter placeholder for index (1-based). Postgres: $1, MySQL/SQLite: ? */
129
+ param(index: number): string;
130
+ /** Quote an identifier. Postgres: "name", MySQL: `name` */
131
+ quoteId(name: string): string;
132
+ /** Whether INSERT ... RETURNING is supported */
133
+ readonly supportsReturning: boolean;
134
+ /** SQL to list all base tables in a schema/database */
135
+ tablesSQL(schema: string): string;
136
+ /** SQL to list all columns for all tables in a schema/database */
137
+ columnsSQL(schema: string): string;
138
+ /** SQL to list primary key columns */
139
+ primaryKeysSQL(schema: string): string;
140
+ /** SQL to list foreign key relationships */
141
+ foreignKeysSQL(schema: string): string;
142
+ /** SQL to list enum types and their values */
143
+ enumsSQL(schema: string): string;
144
+ }
145
+ declare function getDialect(name?: 'postgres' | 'mysql' | 'sqlite'): Dialect;
146
+
147
+ /** Internal result including name mapping tables */
148
+ interface IntrospectionResult {
149
+ schema: SchemaInfo;
150
+ /** model name → DB table name */
151
+ tableMap: Map<string, string>;
152
+ /** model name → (field name → DB column name) */
153
+ columnMaps: Map<string, Map<string, string>>;
154
+ /** model name → (field name → Postgres enum type name). Only populated for Postgres. */
155
+ enumTypeMaps: Map<string, Map<string, string>>;
156
+ }
157
+ /**
158
+ * Introspect a database via information_schema to build SchemaInfo.
159
+ *
160
+ * Auto-maps DB names (snake_case) to model names (PascalCase) and
161
+ * field names (camelCase). Override with `tableNameMap`.
162
+ */
163
+ declare function introspectDatabase(executor: SQLExecutor, dialect: Dialect, config: {
164
+ scopeField: string;
165
+ schema?: string;
166
+ tableNameMap?: Record<string, string>;
167
+ excludeTables?: string[];
168
+ }): Promise<IntrospectionResult>;
169
+
170
+ /**
171
+ * Create entities via raw SQL INSERT.
172
+ *
173
+ * Entities arrive pre-sorted by FK order (handler does topo-sort via tree.ts).
174
+ * Each model in `spec` is inserted sequentially; within a model, batch mode
175
+ * uses a single multi-row INSERT while normal mode inserts one row at a time.
176
+ *
177
+ * For dialects with RETURNING (Postgres): INSERT ... RETURNING *
178
+ * For dialects without (MySQL): INSERT then SELECT via LAST_INSERT_ID()
179
+ */
180
+ declare function createEntities(executor: SQLExecutor, dialect: Dialect, tableMap: Map<string, string>, columnMaps: Map<string, Map<string, string>>, spec: Record<string, ResolvedEntitySpec>, _context: CreateContext, enumTypeMaps?: Map<string, Map<string, string>>): Promise<Record<string, Record<string, unknown>[]>>;
181
+
182
+ /**
183
+ * Tear down all data scoped to a value, in reverse topological order.
184
+ *
185
+ * Strategy:
186
+ * 1. Find the scope root model (e.g. Organization) from FK edges
187
+ * 2. Any model with a FK pointing to the scope root is "scoped"
188
+ * 3. Delete scoped models by their FK = scopeValue
189
+ * 4. Delete non-scoped models by their record IDs from refs
190
+ * 5. Delete the scope root entity last by id = scopeValue
191
+ */
192
+ declare function teardown(executor: SQLExecutor, dialect: Dialect, tableMap: Map<string, string>, columnMaps: Map<string, Map<string, string>>, schema: SchemaInfo, scopeValue: string, refs?: Record<string, Record<string, unknown>[]>): Promise<void>;
193
+
194
+ export { type CheckError, type CheckResult, CreateContext, type CreateOp, type Dialect, HandlerConfig, HandlerRequest, HandlerResponse, type IntrospectionResult, PROTOCOL_VERSION, type RefNode, ResolvedEntitySpec, type ResolvedTree, SQLExecutor, ScenarioDefinition, SchemaInfo, type TemplateContext, checkAllScenarios, checkScenario, createEntities, fingerprint, getDialect, handleRequest, introspectDatabase, resolveTemplate, resolveTree, signBody, signRefs, teardown, verifyRefs, verifySignature };