@danceroutine/tango-testing 0.1.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +85 -0
- package/dist/aDBClient-W6eXsK3X.js +21 -0
- package/dist/aDBClient-W6eXsK3X.js.map +1 -0
- package/dist/assertions/index.js +1 -1
- package/dist/{assertions-CN6KxXhH.js → assertions-CCFZ53Y-.js} +1 -1
- package/dist/assertions-CCFZ53Y-.js.map +1 -0
- package/dist/express/anExpressRequest.d.ts +24 -0
- package/dist/express/anExpressResponse.d.ts +9 -0
- package/dist/express/index.d.ts +3 -0
- package/dist/express/index.js +3 -0
- package/dist/express-Czpfz_Ay.js +68 -0
- package/dist/express-Czpfz_Ay.js.map +1 -0
- package/dist/factories/ModelDataFactory.d.ts +16 -1
- package/dist/factories/index.js +1 -1
- package/dist/{factories-CCAZ6E-g.js → factories-Cl_CAzbj.js} +19 -4
- package/dist/factories-Cl_CAzbj.js.map +1 -0
- package/dist/index.d.ts +5 -3
- package/dist/index.js +8 -11
- package/dist/integration/HarnessStrategyRegistry.d.ts +15 -0
- package/dist/integration/TestHarness.d.ts +23 -2
- package/dist/integration/anIntegrationHarness.d.ts +5 -0
- package/dist/integration/config.d.ts +4 -0
- package/dist/integration/conformance/index.d.ts +1 -0
- package/dist/integration/conformance/runDialectConformanceSuite.d.ts +11 -0
- package/dist/integration/domain/Dialect.d.ts +5 -4
- package/dist/integration/domain/ResetMode.d.ts +6 -5
- package/dist/integration/index.d.ts +8 -1
- package/dist/integration/index.js +3 -2
- package/dist/integration/migrations/ApplyAndVerifyMigrations.d.ts +3 -0
- package/dist/integration/migrations/AssertMigrationPlan.d.ts +3 -0
- package/dist/integration/migrations/IntrospectSchema.d.ts +3 -0
- package/dist/integration/orm/createQuerySetFixture.d.ts +10 -0
- package/dist/integration/orm/expectQueryResult.d.ts +4 -0
- package/dist/integration/orm/index.d.ts +6 -0
- package/dist/integration/orm/seedTable.d.ts +5 -0
- package/dist/integration/runtime/aTangoConfig.d.ts +8 -0
- package/dist/integration/runtime/index.d.ts +6 -0
- package/dist/integration/runtime/setupTestTangoRuntime.d.ts +6 -0
- package/dist/integration/smoke/AppProcessHarness.d.ts +83 -0
- package/dist/integration/smoke/index.d.ts +4 -0
- package/dist/integration/strategies/PostgresHarnessStrategy.d.ts +9 -0
- package/dist/integration/strategies/SqliteHarnessStrategy.d.ts +9 -0
- package/dist/integration-BrJw6NzG.js +747 -0
- package/dist/integration-BrJw6NzG.js.map +1 -0
- package/dist/mocks/DBClient.d.ts +1 -9
- package/dist/mocks/MockQuerySetResult.d.ts +5 -12
- package/dist/mocks/aDBClient.d.ts +21 -0
- package/dist/mocks/aManager.d.ts +17 -0
- package/dist/mocks/aQueryExecutor.d.ts +14 -0
- package/dist/mocks/aQueryResult.d.ts +5 -0
- package/dist/mocks/aQuerySet.d.ts +8 -0
- package/dist/mocks/aRequestContext.d.ts +22 -0
- package/dist/mocks/index.d.ts +9 -4
- package/dist/mocks/index.js +4 -6
- package/dist/mocks-BkwkXQQt.js +136 -0
- package/dist/mocks-BkwkXQQt.js.map +1 -0
- package/dist/vitest/index.js +3 -2
- package/dist/vitest/registerVitestTango.d.ts +3 -3
- package/dist/{vitest-PxMJue7R.js → vitest-37qN8D93.js} +4 -4
- package/dist/vitest-37qN8D93.js.map +1 -0
- package/package.json +81 -68
- package/dist/assertions/assertions.js +0 -8
- package/dist/assertions-CN6KxXhH.js.map +0 -1
- package/dist/factories/ModelDataFactory.js +0 -33
- package/dist/factories-CCAZ6E-g.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/integration/orm.d.ts +0 -9
- package/dist/integration/orm.js +0 -39
- package/dist/integration/strategies/PostgresHarnessStrategy.js +0 -95
- package/dist/integration-CDdpboYz.js +0 -378
- package/dist/integration-CDdpboYz.js.map +0 -1
- package/dist/mocks/DBClient.js +0 -1
- package/dist/mocks/MockQuerySetResult.js +0 -1
- package/dist/mocks/RepositoryLike.d.ts +0 -12
- package/dist/mocks/RepositoryLike.js +0 -1
- package/dist/mocks/aMockDBClient.d.ts +0 -2
- package/dist/mocks/aMockDBClient.js +0 -13
- package/dist/mocks/aMockQuerySet.d.ts +0 -2
- package/dist/mocks/aMockQuerySet.js +0 -15
- package/dist/mocks/aMockRepository.d.ts +0 -2
- package/dist/mocks/aMockRepository.js +0 -20
- package/dist/mocks/types.d.ts +0 -33
- package/dist/mocks-qo-1vCez.js +0 -72
- package/dist/mocks-qo-1vCez.js.map +0 -1
- package/dist/version.d.ts +0 -1
- package/dist/vitest/registerVitestTango.js +0 -90
- package/dist/vitest-PxMJue7R.js.map +0 -1
|
@@ -1,378 +0,0 @@
|
|
|
1
|
-
import { __export } from "./chunk-BkvOhyD0.js";
|
|
2
|
-
import { MigrationRunner, MigrationRunner as MigrationRunner$1, createDefaultIntrospectorStrategy } from "@danceroutine/tango-migrations";
|
|
3
|
-
import { PostgresAdapter, SqliteAdapter } from "@danceroutine/tango-orm/connection";
|
|
4
|
-
import { loadConfig } from "@danceroutine/tango-config";
|
|
5
|
-
import { rm } from "node:fs/promises";
|
|
6
|
-
import { Repository } from "@danceroutine/tango-orm";
|
|
7
|
-
|
|
8
|
-
//#region src/integration/domain/Dialect.ts
|
|
9
|
-
let Dialect = function(Dialect$1) {
|
|
10
|
-
Dialect$1["Sqlite"] = "sqlite";
|
|
11
|
-
Dialect$1["Postgres"] = "postgres";
|
|
12
|
-
return Dialect$1;
|
|
13
|
-
}({});
|
|
14
|
-
|
|
15
|
-
//#endregion
|
|
16
|
-
//#region src/integration/domain/ResetMode.ts
|
|
17
|
-
let ResetMode = function(ResetMode$1) {
|
|
18
|
-
ResetMode$1["Transaction"] = "transaction";
|
|
19
|
-
ResetMode$1["Truncate"] = "truncate";
|
|
20
|
-
ResetMode$1["DropSchema"] = "drop-schema";
|
|
21
|
-
return ResetMode$1;
|
|
22
|
-
}({});
|
|
23
|
-
|
|
24
|
-
//#endregion
|
|
25
|
-
//#region src/integration/domain/index.ts
|
|
26
|
-
var domain_exports = {};
|
|
27
|
-
__export(domain_exports, {
|
|
28
|
-
Dialect: () => Dialect,
|
|
29
|
-
ResetMode: () => ResetMode
|
|
30
|
-
});
|
|
31
|
-
|
|
32
|
-
//#endregion
|
|
33
|
-
//#region src/integration/migrations/AssertMigrationPlan.ts
|
|
34
|
-
async function assertMigrationPlan(harness, options) {
|
|
35
|
-
const runner = harness.migrationRunner(options.migrationsDir);
|
|
36
|
-
const plan = await runner.plan();
|
|
37
|
-
for (const snippet of options.expectSqlContains ?? []) if (!plan.includes(snippet)) throw new Error(`Expected migration plan to contain: ${snippet}`);
|
|
38
|
-
return plan;
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
//#endregion
|
|
42
|
-
//#region src/integration/migrations/ApplyAndVerifyMigrations.ts
|
|
43
|
-
async function applyAndVerifyMigrations(harness, options) {
|
|
44
|
-
const runner = harness.migrationRunner(options.migrationsDir);
|
|
45
|
-
await runner.apply(options.toId);
|
|
46
|
-
const statuses = await runner.status();
|
|
47
|
-
for (const id of options.expectedAppliedIds ?? []) {
|
|
48
|
-
const row = statuses.find((status) => status.id === id);
|
|
49
|
-
if (!row || !row.applied) throw new Error(`Expected migration ${id} to be applied`);
|
|
50
|
-
}
|
|
51
|
-
return { statuses };
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
//#endregion
|
|
55
|
-
//#region src/integration/migrations/IntrospectSchema.ts
|
|
56
|
-
const introspectorStrategy = createDefaultIntrospectorStrategy();
|
|
57
|
-
async function introspectSchema(harness) {
|
|
58
|
-
if (harness.dialect !== Dialect.Postgres && harness.dialect !== Dialect.Sqlite) throw new Error(`No introspector registered for dialect: ${String(harness.dialect)}`);
|
|
59
|
-
const dialect = harness.dialect === Dialect.Postgres ? "postgres" : "sqlite";
|
|
60
|
-
return introspectorStrategy.introspect(dialect, harness.dbClient);
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
//#endregion
|
|
64
|
-
//#region src/integration/migrations/index.ts
|
|
65
|
-
var migrations_exports = {};
|
|
66
|
-
__export(migrations_exports, {
|
|
67
|
-
applyAndVerifyMigrations: () => applyAndVerifyMigrations,
|
|
68
|
-
assertMigrationPlan: () => assertMigrationPlan,
|
|
69
|
-
introspectSchema: () => introspectSchema
|
|
70
|
-
});
|
|
71
|
-
|
|
72
|
-
//#endregion
|
|
73
|
-
//#region src/integration/HarnessStrategyRegistry.ts
|
|
74
|
-
var HarnessStrategyRegistry = class HarnessStrategyRegistry {
|
|
75
|
-
static BRAND = "tango.testing.harness_strategy_registry";
|
|
76
|
-
__tangoBrand = HarnessStrategyRegistry.BRAND;
|
|
77
|
-
strategies = new Map();
|
|
78
|
-
static isHarnessStrategyRegistry(value) {
|
|
79
|
-
return typeof value === "object" && value !== null && value.__tangoBrand === HarnessStrategyRegistry.BRAND;
|
|
80
|
-
}
|
|
81
|
-
register(strategy) {
|
|
82
|
-
this.strategies.set(String(strategy.dialect), strategy);
|
|
83
|
-
return this;
|
|
84
|
-
}
|
|
85
|
-
get(dialect) {
|
|
86
|
-
const strategy = this.strategies.get(String(dialect));
|
|
87
|
-
if (!strategy) throw new Error(`No harness strategy registered for dialect: ${String(dialect)}`);
|
|
88
|
-
return strategy;
|
|
89
|
-
}
|
|
90
|
-
list() {
|
|
91
|
-
return [...this.strategies.values()];
|
|
92
|
-
}
|
|
93
|
-
};
|
|
94
|
-
|
|
95
|
-
//#endregion
|
|
96
|
-
//#region src/integration/config.ts
|
|
97
|
-
function readNumber(value) {
|
|
98
|
-
if (!value) return undefined;
|
|
99
|
-
const parsed = Number(value);
|
|
100
|
-
return Number.isFinite(parsed) ? parsed : undefined;
|
|
101
|
-
}
|
|
102
|
-
function resolveAdapterConfig(dialect, opts) {
|
|
103
|
-
const fromOptions = opts.config ?? {};
|
|
104
|
-
if (opts.tangoConfigLoader) {
|
|
105
|
-
const loaded = loadConfig(opts.tangoConfigLoader);
|
|
106
|
-
const current = loaded.current.db;
|
|
107
|
-
const merged = {
|
|
108
|
-
url: fromOptions.url ?? current.url,
|
|
109
|
-
host: fromOptions.host ?? current.host,
|
|
110
|
-
port: fromOptions.port ?? current.port,
|
|
111
|
-
database: fromOptions.database ?? current.database,
|
|
112
|
-
user: fromOptions.user ?? current.user,
|
|
113
|
-
password: fromOptions.password ?? current.password,
|
|
114
|
-
filename: fromOptions.filename ?? current.filename,
|
|
115
|
-
maxConnections: fromOptions.maxConnections ?? current.maxConnections
|
|
116
|
-
};
|
|
117
|
-
if (dialect === Dialect.Sqlite) merged.filename = opts.sqliteFile ?? merged.filename ?? ":memory:";
|
|
118
|
-
return merged;
|
|
119
|
-
}
|
|
120
|
-
if (dialect === Dialect.Postgres) return {
|
|
121
|
-
url: fromOptions.url ?? process.env.TANGO_DATABASE_URL ?? process.env.DATABASE_URL,
|
|
122
|
-
host: fromOptions.host ?? process.env.TANGO_DB_HOST,
|
|
123
|
-
port: fromOptions.port ?? readNumber(process.env.TANGO_DB_PORT),
|
|
124
|
-
database: fromOptions.database ?? process.env.TANGO_DB_NAME,
|
|
125
|
-
user: fromOptions.user ?? process.env.TANGO_DB_USER,
|
|
126
|
-
password: fromOptions.password ?? process.env.TANGO_DB_PASSWORD,
|
|
127
|
-
maxConnections: fromOptions.maxConnections ?? 10
|
|
128
|
-
};
|
|
129
|
-
return {
|
|
130
|
-
filename: opts.sqliteFile ?? fromOptions.filename ?? process.env.TANGO_SQLITE_FILENAME ?? ":memory:",
|
|
131
|
-
maxConnections: fromOptions.maxConnections ?? 1
|
|
132
|
-
};
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
//#endregion
|
|
136
|
-
//#region src/integration/strategies/PostgresHarnessStrategy.ts
|
|
137
|
-
var PostgresHarnessStrategy = class PostgresHarnessStrategy {
|
|
138
|
-
static BRAND = "tango.testing.postgres_harness_strategy";
|
|
139
|
-
__tangoBrand = PostgresHarnessStrategy.BRAND;
|
|
140
|
-
dialect = Dialect.Postgres;
|
|
141
|
-
capabilities = {
|
|
142
|
-
transactionalDDL: true,
|
|
143
|
-
supportsSchemas: true,
|
|
144
|
-
supportsConcurrentIndex: true,
|
|
145
|
-
supportsDeferredFkValidation: true,
|
|
146
|
-
supportsJsonb: true
|
|
147
|
-
};
|
|
148
|
-
static isPostgresHarnessStrategy(value) {
|
|
149
|
-
return typeof value === "object" && value !== null && value.__tangoBrand === PostgresHarnessStrategy.BRAND;
|
|
150
|
-
}
|
|
151
|
-
static buildSchemaName(explicitSchema) {
|
|
152
|
-
if (explicitSchema) return explicitSchema;
|
|
153
|
-
const random = Math.random().toString(36).slice(2, 8);
|
|
154
|
-
return `tango_test_${Date.now()}_${random}`;
|
|
155
|
-
}
|
|
156
|
-
async create(options = {}) {
|
|
157
|
-
const config = resolveAdapterConfig(Dialect.Postgres, {
|
|
158
|
-
config: options.config,
|
|
159
|
-
tangoConfigLoader: options.tangoConfigLoader
|
|
160
|
-
});
|
|
161
|
-
const adapter = new PostgresAdapter();
|
|
162
|
-
const schemaName = PostgresHarnessStrategy.buildSchemaName(options.schema);
|
|
163
|
-
const resetMode = options.resetMode ?? ResetMode.DropSchema;
|
|
164
|
-
let client = null;
|
|
165
|
-
const ensureSearchPath = async () => {
|
|
166
|
-
if (!client) throw new Error("Postgres harness not initialized. Call setup() first.");
|
|
167
|
-
await client.query(`CREATE SCHEMA IF NOT EXISTS \"${schemaName}\"`);
|
|
168
|
-
await client.query(`SET search_path TO \"${schemaName}\"`);
|
|
169
|
-
};
|
|
170
|
-
const recreateSchema = async () => {
|
|
171
|
-
if (!client) throw new Error("Postgres harness not initialized. Call setup() first.");
|
|
172
|
-
await client.query(`DROP SCHEMA IF EXISTS \"${schemaName}\" CASCADE`);
|
|
173
|
-
await client.query(`CREATE SCHEMA \"${schemaName}\"`);
|
|
174
|
-
await client.query(`SET search_path TO \"${schemaName}\"`);
|
|
175
|
-
};
|
|
176
|
-
const harness = {
|
|
177
|
-
dialect: Dialect.Postgres,
|
|
178
|
-
capabilities: this.capabilities,
|
|
179
|
-
resetMode,
|
|
180
|
-
get dbClient() {
|
|
181
|
-
if (!client) throw new Error("Postgres harness not initialized. Call setup() first.");
|
|
182
|
-
return client;
|
|
183
|
-
},
|
|
184
|
-
async setup() {
|
|
185
|
-
client = await adapter.connect(config);
|
|
186
|
-
await ensureSearchPath();
|
|
187
|
-
},
|
|
188
|
-
async reset() {
|
|
189
|
-
if (!client) throw new Error("Postgres harness not initialized. Call setup() first.");
|
|
190
|
-
if (resetMode === ResetMode.DropSchema || resetMode === ResetMode.Transaction) {
|
|
191
|
-
await recreateSchema();
|
|
192
|
-
return;
|
|
193
|
-
}
|
|
194
|
-
const { rows } = await client.query(`SELECT table_name FROM information_schema.tables WHERE table_schema = $1 AND table_type = 'BASE TABLE'`, [schemaName]);
|
|
195
|
-
for (const row of rows) await client.query(`TRUNCATE TABLE \"${schemaName}\".\"${row.table_name}\" RESTART IDENTITY CASCADE`);
|
|
196
|
-
await client.query(`SET search_path TO \"${schemaName}\"`);
|
|
197
|
-
},
|
|
198
|
-
async teardown() {
|
|
199
|
-
if (!client) return;
|
|
200
|
-
try {
|
|
201
|
-
await client.query(`DROP SCHEMA IF EXISTS \"${schemaName}\" CASCADE`);
|
|
202
|
-
} finally {
|
|
203
|
-
await client.close();
|
|
204
|
-
client = null;
|
|
205
|
-
}
|
|
206
|
-
},
|
|
207
|
-
migrationRunner(migrationsDir) {
|
|
208
|
-
if (!client) throw new Error("Postgres harness not initialized. Call setup() first.");
|
|
209
|
-
return new MigrationRunner$1(client, "postgres", migrationsDir);
|
|
210
|
-
}
|
|
211
|
-
};
|
|
212
|
-
return harness;
|
|
213
|
-
}
|
|
214
|
-
};
|
|
215
|
-
|
|
216
|
-
//#endregion
|
|
217
|
-
//#region src/integration/strategies/SqliteHarnessStrategy.ts
|
|
218
|
-
var SqliteHarnessStrategy = class SqliteHarnessStrategy {
|
|
219
|
-
static BRAND = "tango.testing.sqlite_harness_strategy";
|
|
220
|
-
__tangoBrand = SqliteHarnessStrategy.BRAND;
|
|
221
|
-
dialect = Dialect.Sqlite;
|
|
222
|
-
capabilities = {
|
|
223
|
-
transactionalDDL: true,
|
|
224
|
-
supportsSchemas: false,
|
|
225
|
-
supportsConcurrentIndex: false,
|
|
226
|
-
supportsDeferredFkValidation: false,
|
|
227
|
-
supportsJsonb: false
|
|
228
|
-
};
|
|
229
|
-
static isSqliteHarnessStrategy(value) {
|
|
230
|
-
return typeof value === "object" && value !== null && value.__tangoBrand === SqliteHarnessStrategy.BRAND;
|
|
231
|
-
}
|
|
232
|
-
static async dropAllTables(client) {
|
|
233
|
-
const { rows } = await client.query("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'");
|
|
234
|
-
for (const row of rows) await client.query(`DROP TABLE IF EXISTS ${row.name}`);
|
|
235
|
-
}
|
|
236
|
-
async create(options = {}) {
|
|
237
|
-
const config = resolveAdapterConfig(Dialect.Sqlite, {
|
|
238
|
-
config: options.config,
|
|
239
|
-
tangoConfigLoader: options.tangoConfigLoader,
|
|
240
|
-
sqliteFile: options.sqliteFile
|
|
241
|
-
});
|
|
242
|
-
const adapter = new SqliteAdapter();
|
|
243
|
-
const resetMode = options.resetMode ?? ResetMode.DropSchema;
|
|
244
|
-
let client = null;
|
|
245
|
-
const reconnect = async () => {
|
|
246
|
-
client = await adapter.connect(config);
|
|
247
|
-
return client;
|
|
248
|
-
};
|
|
249
|
-
const harness = {
|
|
250
|
-
dialect: Dialect.Sqlite,
|
|
251
|
-
capabilities: this.capabilities,
|
|
252
|
-
resetMode,
|
|
253
|
-
get dbClient() {
|
|
254
|
-
if (!client) throw new Error("Sqlite harness not initialized. Call setup() first.");
|
|
255
|
-
return client;
|
|
256
|
-
},
|
|
257
|
-
async setup() {
|
|
258
|
-
await reconnect();
|
|
259
|
-
},
|
|
260
|
-
async reset() {
|
|
261
|
-
if (!client) throw new Error("Sqlite harness not initialized. Call setup() first.");
|
|
262
|
-
if (resetMode === ResetMode.DropSchema && config.filename && config.filename !== ":memory:") {
|
|
263
|
-
await client.close();
|
|
264
|
-
await rm(config.filename, { force: true });
|
|
265
|
-
await reconnect();
|
|
266
|
-
return;
|
|
267
|
-
}
|
|
268
|
-
await SqliteHarnessStrategy.dropAllTables(client);
|
|
269
|
-
},
|
|
270
|
-
async teardown() {
|
|
271
|
-
if (client) {
|
|
272
|
-
await client.close();
|
|
273
|
-
client = null;
|
|
274
|
-
}
|
|
275
|
-
if (config.filename && config.filename !== ":memory:") await rm(config.filename, { force: true });
|
|
276
|
-
},
|
|
277
|
-
migrationRunner(migrationsDir) {
|
|
278
|
-
if (!client) throw new Error("Sqlite harness not initialized. Call setup() first.");
|
|
279
|
-
return new MigrationRunner(client, "sqlite", migrationsDir);
|
|
280
|
-
}
|
|
281
|
-
};
|
|
282
|
-
return harness;
|
|
283
|
-
}
|
|
284
|
-
};
|
|
285
|
-
|
|
286
|
-
//#endregion
|
|
287
|
-
//#region src/integration/TestHarness.ts
|
|
288
|
-
var TestHarness = class TestHarness {
|
|
289
|
-
static BRAND = "tango.testing.test_harness";
|
|
290
|
-
__tangoBrand = TestHarness.BRAND;
|
|
291
|
-
static defaultRegistry = null;
|
|
292
|
-
static isTestHarness(value) {
|
|
293
|
-
return typeof value === "object" && value !== null && value.__tangoBrand === TestHarness.BRAND;
|
|
294
|
-
}
|
|
295
|
-
static ensureRegistry() {
|
|
296
|
-
if (this.defaultRegistry) return this.defaultRegistry;
|
|
297
|
-
const registry = new HarnessStrategyRegistry();
|
|
298
|
-
registry.register(new SqliteHarnessStrategy());
|
|
299
|
-
registry.register(new PostgresHarnessStrategy());
|
|
300
|
-
this.defaultRegistry = registry;
|
|
301
|
-
return registry;
|
|
302
|
-
}
|
|
303
|
-
static registerStrategy(strategy) {
|
|
304
|
-
this.ensureRegistry().register(strategy);
|
|
305
|
-
}
|
|
306
|
-
static getRegistry() {
|
|
307
|
-
return this.ensureRegistry();
|
|
308
|
-
}
|
|
309
|
-
static async forDialect(args, registry) {
|
|
310
|
-
const selectedRegistry = registry ?? this.ensureRegistry();
|
|
311
|
-
const strategy = selectedRegistry.get(args.dialect);
|
|
312
|
-
return strategy.create(args.options);
|
|
313
|
-
}
|
|
314
|
-
static async sqlite(options) {
|
|
315
|
-
return this.forDialect({
|
|
316
|
-
dialect: Dialect.Sqlite,
|
|
317
|
-
options
|
|
318
|
-
});
|
|
319
|
-
}
|
|
320
|
-
static async postgres(options) {
|
|
321
|
-
return this.forDialect({
|
|
322
|
-
dialect: Dialect.Postgres,
|
|
323
|
-
options
|
|
324
|
-
});
|
|
325
|
-
}
|
|
326
|
-
};
|
|
327
|
-
|
|
328
|
-
//#endregion
|
|
329
|
-
//#region src/integration/orm.ts
|
|
330
|
-
async function seedTable(harness, table, rows) {
|
|
331
|
-
if (rows.length === 0) return;
|
|
332
|
-
const columns = Object.keys(rows[0] ?? {});
|
|
333
|
-
if (columns.length === 0) return;
|
|
334
|
-
for (const row of rows) {
|
|
335
|
-
const values = columns.map((column) => {
|
|
336
|
-
const value = row[column];
|
|
337
|
-
if (harness.dialect === Dialect.Sqlite && typeof value === "boolean") return value ? 1 : 0;
|
|
338
|
-
return value;
|
|
339
|
-
});
|
|
340
|
-
const placeholders = harness.dialect === Dialect.Postgres ? columns.map((_, index) => `$${index + 1}`).join(", ") : columns.map(() => "?").join(", ");
|
|
341
|
-
await harness.dbClient.query(`INSERT INTO ${table} (${columns.join(", ")}) VALUES (${placeholders})`, values);
|
|
342
|
-
}
|
|
343
|
-
}
|
|
344
|
-
function createRepositoryFixture(input) {
|
|
345
|
-
class RepositoryFixture extends Repository {
|
|
346
|
-
meta = input.meta;
|
|
347
|
-
constructor() {
|
|
348
|
-
super(input.harness.dbClient, input.harness.dialect);
|
|
349
|
-
}
|
|
350
|
-
}
|
|
351
|
-
return new RepositoryFixture();
|
|
352
|
-
}
|
|
353
|
-
async function expectQueryResult(actual, expected) {
|
|
354
|
-
const resolved = await actual;
|
|
355
|
-
if (JSON.stringify(resolved) !== JSON.stringify(expected)) throw new Error(`Expected query result ${JSON.stringify(expected)}, got ${JSON.stringify(resolved)}`);
|
|
356
|
-
}
|
|
357
|
-
|
|
358
|
-
//#endregion
|
|
359
|
-
//#region src/integration/index.ts
|
|
360
|
-
var integration_exports = {};
|
|
361
|
-
__export(integration_exports, {
|
|
362
|
-
Dialect: () => Dialect,
|
|
363
|
-
HarnessStrategyRegistry: () => HarnessStrategyRegistry,
|
|
364
|
-
ResetMode: () => ResetMode,
|
|
365
|
-
TestHarness: () => TestHarness,
|
|
366
|
-
applyAndVerifyMigrations: () => applyAndVerifyMigrations,
|
|
367
|
-
assertMigrationPlan: () => assertMigrationPlan,
|
|
368
|
-
createRepositoryFixture: () => createRepositoryFixture,
|
|
369
|
-
domain: () => domain_exports,
|
|
370
|
-
expectQueryResult: () => expectQueryResult,
|
|
371
|
-
introspectSchema: () => introspectSchema,
|
|
372
|
-
migrations: () => migrations_exports,
|
|
373
|
-
seedTable: () => seedTable
|
|
374
|
-
});
|
|
375
|
-
|
|
376
|
-
//#endregion
|
|
377
|
-
export { Dialect, HarnessStrategyRegistry, ResetMode, TestHarness, applyAndVerifyMigrations, assertMigrationPlan, createRepositoryFixture, domain_exports, expectQueryResult, integration_exports, introspectSchema, migrations_exports, seedTable };
|
|
378
|
-
//# sourceMappingURL=integration-CDdpboYz.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"integration-CDdpboYz.js","names":["harness: IntegrationHarness","options: AssertMigrationPlanOptions","harness: IntegrationHarness","options: ApplyAndVerifyMigrationsOptions","status: MigrationStatus","harness: IntegrationHarness","value: unknown","strategy: HarnessStrategy","dialect: Dialect | string","value: string | undefined","dialect: Dialect","opts: {\n config?: Partial<AdapterConfig>;\n tangoConfigLoader?: () => unknown;\n sqliteFile?: string;\n }","merged: AdapterConfig","value: unknown","explicitSchema?: string","options: HarnessOptions","client: DBClient | null","harness: IntegrationHarness","migrationsDir: string","value: unknown","client: DBClient","options: HarnessOptions","client: DBClient | null","harness: IntegrationHarness","migrationsDir: string","value: unknown","strategy: HarnessStrategy","args: { dialect: Dialect | string; options?: HarnessOptions }","registry?: HarnessStrategyRegistry","options?: HarnessOptions","harness: IntegrationHarness","table: string","rows: T[]","input: {\n harness: IntegrationHarness;\n meta: RepoMeta;\n}","actual: Promise<T> | T","expected: T"],"sources":["../src/integration/domain/Dialect.ts","../src/integration/domain/ResetMode.ts","../src/integration/domain/index.ts","../src/integration/migrations/AssertMigrationPlan.ts","../src/integration/migrations/ApplyAndVerifyMigrations.ts","../src/integration/migrations/IntrospectSchema.ts","../src/integration/migrations/index.ts","../src/integration/HarnessStrategyRegistry.ts","../src/integration/config.ts","../src/integration/strategies/PostgresHarnessStrategy.ts","../src/integration/strategies/SqliteHarnessStrategy.ts","../src/integration/TestHarness.ts","../src/integration/orm.ts","../src/integration/index.ts"],"sourcesContent":["export enum Dialect {\n Sqlite = 'sqlite',\n Postgres = 'postgres',\n}\n","export enum ResetMode {\n Transaction = 'transaction',\n Truncate = 'truncate',\n DropSchema = 'drop-schema',\n}\n","/**\n * Domain boundary barrel: centralizes this subdomain's public contract.\n */\n\nexport { Dialect } from './Dialect';\nexport type { HarnessOptions, HarnessStrategy } from './HarnessStrategy';\nexport type { DialectTestCapabilities, IntegrationHarness } from './IntegrationHarness';\nexport { ResetMode } from './ResetMode';\n","import type { IntegrationHarness } from '../domain';\n\nexport type AssertMigrationPlanOptions = {\n migrationsDir: string;\n expectSqlContains?: string[];\n};\n\nexport async function assertMigrationPlan(\n harness: IntegrationHarness,\n options: AssertMigrationPlanOptions\n): Promise<string> {\n const runner = harness.migrationRunner(options.migrationsDir);\n const plan = await runner.plan();\n\n for (const snippet of options.expectSqlContains ?? []) {\n if (!plan.includes(snippet)) {\n throw new Error(`Expected migration plan to contain: ${snippet}`);\n }\n }\n\n return plan;\n}\n","import type { IntegrationHarness } from '../domain';\n\nexport type ApplyAndVerifyMigrationsOptions = {\n migrationsDir: string;\n toId?: string;\n expectedAppliedIds?: string[];\n};\n\nexport type MigrationStatus = { id: string; applied: boolean };\n\nexport async function applyAndVerifyMigrations(\n harness: IntegrationHarness,\n options: ApplyAndVerifyMigrationsOptions\n): Promise<{ statuses: MigrationStatus[] }> {\n const runner = harness.migrationRunner(options.migrationsDir);\n await runner.apply(options.toId);\n const statuses = await runner.status();\n\n for (const id of options.expectedAppliedIds ?? []) {\n const row = statuses.find((status: MigrationStatus) => status.id === id);\n if (!row || !row.applied) {\n throw new Error(`Expected migration ${id} to be applied`);\n }\n }\n\n return { statuses };\n}\n","import { createDefaultIntrospectorStrategy } from '@danceroutine/tango-migrations';\nimport type { Dialect as MigrationDialect } from '@danceroutine/tango-migrations';\nimport { Dialect, type IntegrationHarness } from '../domain';\n\nconst introspectorStrategy = createDefaultIntrospectorStrategy();\n\nexport async function introspectSchema(harness: IntegrationHarness): Promise<unknown> {\n if (harness.dialect !== Dialect.Postgres && harness.dialect !== Dialect.Sqlite) {\n throw new Error(`No introspector registered for dialect: ${String(harness.dialect)}`);\n }\n const dialect = harness.dialect === Dialect.Postgres ? 'postgres' : 'sqlite';\n return introspectorStrategy.introspect(dialect as unknown as MigrationDialect, harness.dbClient);\n}\n","/**\n * Domain boundary barrel: centralizes this subdomain's public contract.\n */\n\nexport { assertMigrationPlan, type AssertMigrationPlanOptions } from './AssertMigrationPlan';\nexport {\n applyAndVerifyMigrations,\n type ApplyAndVerifyMigrationsOptions,\n type MigrationStatus,\n} from './ApplyAndVerifyMigrations';\nexport { introspectSchema } from './IntrospectSchema';\n","import type { Dialect, HarnessStrategy } from './domain';\n\nexport class HarnessStrategyRegistry {\n static readonly BRAND = 'tango.testing.harness_strategy_registry' as const;\n readonly __tangoBrand: typeof HarnessStrategyRegistry.BRAND = HarnessStrategyRegistry.BRAND;\n private readonly strategies = new Map<string, HarnessStrategy>();\n\n static isHarnessStrategyRegistry(value: unknown): value is HarnessStrategyRegistry {\n return (\n typeof value === 'object' &&\n value !== null &&\n (value as { __tangoBrand?: unknown }).__tangoBrand === HarnessStrategyRegistry.BRAND\n );\n }\n\n register(strategy: HarnessStrategy): this {\n this.strategies.set(String(strategy.dialect), strategy);\n return this;\n }\n\n get(dialect: Dialect | string): HarnessStrategy {\n const strategy = this.strategies.get(String(dialect));\n if (!strategy) {\n throw new Error(`No harness strategy registered for dialect: ${String(dialect)}`);\n }\n return strategy;\n }\n\n list(): readonly HarnessStrategy[] {\n return [...this.strategies.values()];\n }\n}\n","import type { AdapterConfig } from '@danceroutine/tango-orm';\nimport { loadConfig } from '@danceroutine/tango-config';\nimport { Dialect } from './domain';\n\nfunction readNumber(value: string | undefined): number | undefined {\n if (!value) return undefined;\n const parsed = Number(value);\n return Number.isFinite(parsed) ? parsed : undefined;\n}\n\nexport function resolveAdapterConfig(\n dialect: Dialect,\n opts: {\n config?: Partial<AdapterConfig>;\n tangoConfigLoader?: () => unknown;\n sqliteFile?: string;\n }\n): AdapterConfig {\n const fromOptions = opts.config ?? {};\n\n if (opts.tangoConfigLoader) {\n const loaded = loadConfig(opts.tangoConfigLoader);\n const current = loaded.current.db;\n const merged: AdapterConfig = {\n url: fromOptions.url ?? current.url,\n host: fromOptions.host ?? current.host,\n port: fromOptions.port ?? current.port,\n database: fromOptions.database ?? current.database,\n user: fromOptions.user ?? current.user,\n password: fromOptions.password ?? current.password,\n filename: fromOptions.filename ?? current.filename,\n maxConnections: fromOptions.maxConnections ?? current.maxConnections,\n };\n if (dialect === Dialect.Sqlite) {\n merged.filename = opts.sqliteFile ?? merged.filename ?? ':memory:';\n }\n return merged;\n }\n\n if (dialect === Dialect.Postgres) {\n return {\n url: fromOptions.url ?? process.env.TANGO_DATABASE_URL ?? process.env.DATABASE_URL,\n host: fromOptions.host ?? process.env.TANGO_DB_HOST,\n port: fromOptions.port ?? readNumber(process.env.TANGO_DB_PORT),\n database: fromOptions.database ?? process.env.TANGO_DB_NAME,\n user: fromOptions.user ?? process.env.TANGO_DB_USER,\n password: fromOptions.password ?? process.env.TANGO_DB_PASSWORD,\n maxConnections: fromOptions.maxConnections ?? 10,\n };\n }\n\n return {\n filename: opts.sqliteFile ?? fromOptions.filename ?? process.env.TANGO_SQLITE_FILENAME ?? ':memory:',\n maxConnections: fromOptions.maxConnections ?? 1,\n };\n}\n","import { MigrationRunner } from '@danceroutine/tango-migrations';\nimport type { Dialect as MigrationDialect } from '@danceroutine/tango-migrations';\nimport { PostgresAdapter } from '@danceroutine/tango-orm/connection';\nimport type { DBClient } from '@danceroutine/tango-orm';\nimport { resolveAdapterConfig } from '../config';\nimport {\n Dialect,\n ResetMode,\n type DialectTestCapabilities,\n type HarnessOptions,\n type HarnessStrategy,\n type IntegrationHarness,\n} from '../domain';\nexport class PostgresHarnessStrategy implements HarnessStrategy {\n static readonly BRAND = 'tango.testing.postgres_harness_strategy' as const;\n readonly __tangoBrand: typeof PostgresHarnessStrategy.BRAND = PostgresHarnessStrategy.BRAND;\n readonly dialect: Dialect = Dialect.Postgres;\n readonly capabilities: DialectTestCapabilities = {\n transactionalDDL: true,\n supportsSchemas: true,\n supportsConcurrentIndex: true,\n supportsDeferredFkValidation: true,\n supportsJsonb: true,\n };\n\n static isPostgresHarnessStrategy(value: unknown): value is PostgresHarnessStrategy {\n return (\n typeof value === 'object' &&\n value !== null &&\n (value as { __tangoBrand?: unknown }).__tangoBrand === PostgresHarnessStrategy.BRAND\n );\n }\n\n private static buildSchemaName(explicitSchema?: string): string {\n if (explicitSchema) return explicitSchema;\n const random = Math.random().toString(36).slice(2, 8);\n return `tango_test_${Date.now()}_${random}`;\n }\n\n async create(options: HarnessOptions = {}): Promise<IntegrationHarness> {\n const config = resolveAdapterConfig(Dialect.Postgres, {\n config: options.config,\n tangoConfigLoader: options.tangoConfigLoader,\n });\n\n const adapter = new PostgresAdapter();\n const schemaName = PostgresHarnessStrategy.buildSchemaName(options.schema);\n const resetMode = options.resetMode ?? ResetMode.DropSchema;\n let client: DBClient | null = null;\n\n const ensureSearchPath = async (): Promise<void> => {\n if (!client) {\n throw new Error('Postgres harness not initialized. Call setup() first.');\n }\n await client.query(`CREATE SCHEMA IF NOT EXISTS \\\"${schemaName}\\\"`);\n await client.query(`SET search_path TO \\\"${schemaName}\\\"`);\n };\n\n const recreateSchema = async (): Promise<void> => {\n if (!client) {\n throw new Error('Postgres harness not initialized. Call setup() first.');\n }\n await client.query(`DROP SCHEMA IF EXISTS \\\"${schemaName}\\\" CASCADE`);\n await client.query(`CREATE SCHEMA \\\"${schemaName}\\\"`);\n await client.query(`SET search_path TO \\\"${schemaName}\\\"`);\n };\n\n const harness: IntegrationHarness = {\n dialect: Dialect.Postgres,\n capabilities: this.capabilities,\n resetMode,\n get dbClient(): DBClient {\n if (!client) {\n throw new Error('Postgres harness not initialized. Call setup() first.');\n }\n return client;\n },\n async setup(): Promise<void> {\n client = await adapter.connect(config);\n await ensureSearchPath();\n },\n async reset(): Promise<void> {\n if (!client) {\n throw new Error('Postgres harness not initialized. Call setup() first.');\n }\n if (resetMode === ResetMode.DropSchema || resetMode === ResetMode.Transaction) {\n await recreateSchema();\n return;\n }\n\n const { rows } = await client.query<{ table_name: string }>(\n `SELECT table_name FROM information_schema.tables WHERE table_schema = $1 AND table_type = 'BASE TABLE'`,\n [schemaName]\n );\n for (const row of rows) {\n await client.query(\n `TRUNCATE TABLE \\\"${schemaName}\\\".\\\"${row.table_name}\\\" RESTART IDENTITY CASCADE`\n );\n }\n await client.query(`SET search_path TO \\\"${schemaName}\\\"`);\n },\n async teardown(): Promise<void> {\n if (!client) return;\n try {\n await client.query(`DROP SCHEMA IF EXISTS \\\"${schemaName}\\\" CASCADE`);\n } finally {\n await client.close();\n client = null;\n }\n },\n migrationRunner(migrationsDir: string): MigrationRunner {\n if (!client) {\n throw new Error('Postgres harness not initialized. Call setup() first.');\n }\n return new MigrationRunner(client, 'postgres' as MigrationDialect, migrationsDir);\n },\n };\n\n return harness;\n }\n}\n","import { rm } from 'node:fs/promises';\nimport { MigrationRunner } from '@danceroutine/tango-migrations';\nimport type { Dialect as MigrationDialect } from '@danceroutine/tango-migrations';\nimport { SqliteAdapter } from '@danceroutine/tango-orm/connection';\nimport type { DBClient } from '@danceroutine/tango-orm';\nimport { resolveAdapterConfig } from '../config';\nimport {\n Dialect,\n ResetMode,\n type DialectTestCapabilities,\n type HarnessOptions,\n type HarnessStrategy,\n type IntegrationHarness,\n} from '../domain';\nexport class SqliteHarnessStrategy implements HarnessStrategy {\n static readonly BRAND = 'tango.testing.sqlite_harness_strategy' as const;\n readonly __tangoBrand: typeof SqliteHarnessStrategy.BRAND = SqliteHarnessStrategy.BRAND;\n readonly dialect: Dialect = Dialect.Sqlite;\n readonly capabilities: DialectTestCapabilities = {\n transactionalDDL: true,\n supportsSchemas: false,\n supportsConcurrentIndex: false,\n supportsDeferredFkValidation: false,\n supportsJsonb: false,\n };\n\n static isSqliteHarnessStrategy(value: unknown): value is SqliteHarnessStrategy {\n return (\n typeof value === 'object' &&\n value !== null &&\n (value as { __tangoBrand?: unknown }).__tangoBrand === SqliteHarnessStrategy.BRAND\n );\n }\n\n private static async dropAllTables(client: DBClient): Promise<void> {\n const { rows } = await client.query<{ name: string }>(\n \"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'\"\n );\n for (const row of rows) {\n await client.query(`DROP TABLE IF EXISTS ${row.name}`);\n }\n }\n\n async create(options: HarnessOptions = {}): Promise<IntegrationHarness> {\n const config = resolveAdapterConfig(Dialect.Sqlite, {\n config: options.config,\n tangoConfigLoader: options.tangoConfigLoader,\n sqliteFile: options.sqliteFile,\n });\n\n const adapter = new SqliteAdapter();\n const resetMode = options.resetMode ?? ResetMode.DropSchema;\n let client: DBClient | null = null;\n\n const reconnect = async (): Promise<DBClient> => {\n client = await adapter.connect(config);\n return client;\n };\n\n const harness: IntegrationHarness = {\n dialect: Dialect.Sqlite,\n capabilities: this.capabilities,\n resetMode,\n get dbClient(): DBClient {\n if (!client) {\n throw new Error('Sqlite harness not initialized. Call setup() first.');\n }\n return client;\n },\n async setup(): Promise<void> {\n await reconnect();\n },\n async reset(): Promise<void> {\n if (!client) {\n throw new Error('Sqlite harness not initialized. Call setup() first.');\n }\n\n if (resetMode === ResetMode.DropSchema && config.filename && config.filename !== ':memory:') {\n await client.close();\n await rm(config.filename, { force: true });\n await reconnect();\n return;\n }\n\n await SqliteHarnessStrategy.dropAllTables(client);\n },\n async teardown(): Promise<void> {\n if (client) {\n await client.close();\n client = null;\n }\n if (config.filename && config.filename !== ':memory:') {\n await rm(config.filename, { force: true });\n }\n },\n migrationRunner(migrationsDir: string): MigrationRunner {\n if (!client) {\n throw new Error('Sqlite harness not initialized. Call setup() first.');\n }\n return new MigrationRunner(client, 'sqlite' as MigrationDialect, migrationsDir);\n },\n };\n\n return harness;\n }\n}\n","import { HarnessStrategyRegistry } from './HarnessStrategyRegistry';\nimport { Dialect, type HarnessOptions, type HarnessStrategy, type IntegrationHarness } from './domain';\nimport { PostgresHarnessStrategy } from './strategies/PostgresHarnessStrategy';\nimport { SqliteHarnessStrategy } from './strategies/SqliteHarnessStrategy';\n\nexport class TestHarness {\n static readonly BRAND = 'tango.testing.test_harness' as const;\n readonly __tangoBrand: typeof TestHarness.BRAND = TestHarness.BRAND;\n private static defaultRegistry: HarnessStrategyRegistry | null = null;\n\n static isTestHarness(value: unknown): value is TestHarness {\n return (\n typeof value === 'object' &&\n value !== null &&\n (value as { __tangoBrand?: unknown }).__tangoBrand === TestHarness.BRAND\n );\n }\n\n private static ensureRegistry(): HarnessStrategyRegistry {\n if (this.defaultRegistry) return this.defaultRegistry;\n\n const registry = new HarnessStrategyRegistry();\n registry.register(new SqliteHarnessStrategy());\n registry.register(new PostgresHarnessStrategy());\n this.defaultRegistry = registry;\n return registry;\n }\n\n static registerStrategy(strategy: HarnessStrategy): void {\n this.ensureRegistry().register(strategy);\n }\n\n static getRegistry(): HarnessStrategyRegistry {\n return this.ensureRegistry();\n }\n\n static async forDialect(\n args: { dialect: Dialect | string; options?: HarnessOptions },\n registry?: HarnessStrategyRegistry\n ): Promise<IntegrationHarness> {\n const selectedRegistry = registry ?? this.ensureRegistry();\n const strategy = selectedRegistry.get(args.dialect);\n return strategy.create(args.options);\n }\n\n static async sqlite(options?: HarnessOptions): Promise<IntegrationHarness> {\n return this.forDialect({ dialect: Dialect.Sqlite, options });\n }\n\n static async postgres(options?: HarnessOptions): Promise<IntegrationHarness> {\n return this.forDialect({ dialect: Dialect.Postgres, options });\n }\n}\n","import { Repository } from '@danceroutine/tango-orm';\nimport type { RepoMeta } from '@danceroutine/tango-orm/query';\nimport { Dialect, type IntegrationHarness } from './domain';\n\nexport async function seedTable<T extends Record<string, unknown>>(\n harness: IntegrationHarness,\n table: string,\n rows: T[]\n): Promise<void> {\n if (rows.length === 0) {\n return;\n }\n\n const columns = Object.keys(rows[0] ?? {});\n if (columns.length === 0) {\n return;\n }\n\n for (const row of rows) {\n const values = columns.map((column) => {\n const value = row[column];\n if (harness.dialect === Dialect.Sqlite && typeof value === 'boolean') {\n return value ? 1 : 0;\n }\n return value;\n });\n const placeholders =\n harness.dialect === Dialect.Postgres\n ? columns.map((_, index) => `$${index + 1}`).join(', ')\n : columns.map(() => '?').join(', ');\n\n await harness.dbClient.query(\n `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`,\n values as unknown[]\n );\n }\n}\n\nexport function createRepositoryFixture<TModel extends Record<string, any>>(input: {\n harness: IntegrationHarness;\n meta: RepoMeta;\n}): Repository<TModel> {\n class RepositoryFixture extends Repository<TModel> {\n meta = input.meta;\n\n constructor() {\n super(input.harness.dbClient, input.harness.dialect as Dialect);\n }\n }\n\n return new RepositoryFixture();\n}\n\nexport async function expectQueryResult<T>(actual: Promise<T> | T, expected: T): Promise<void> {\n const resolved = await actual;\n\n if (JSON.stringify(resolved) !== JSON.stringify(expected)) {\n throw new Error(`Expected query result ${JSON.stringify(expected)}, got ${JSON.stringify(resolved)}`);\n }\n}\n","/**\n * Domain boundary barrel: exposes namespaced exports for Django-style drill-down\n * imports and curated flat exports for TS-native ergonomics.\n */\n\nexport * as domain from './domain/index';\nexport * as migrations from './migrations/index';\n\nexport * from './domain/index';\nexport * from './HarnessStrategyRegistry';\nexport * from './TestHarness';\nexport * from './migrations/index';\nexport * from './orm';\n"],"mappings":";;;;;;;;IAAY,UAAA,SAAA,WAAL;AACH,WAAA,YAAA;AACA,WAAA,cAAA;AAAA,QAAA;AACH,EAAA,CAAA,EAAA;;;;ICHW,YAAA,SAAA,aAAL;AACH,aAAA,iBAAA;AACA,aAAA,cAAA;AACA,aAAA,gBAAA;AAAA,QAAA;AACH,EAAA,CAAA,EAAA;;;;;;;;;;;;ACGM,eAAe,oBAClBA,SACAC,SACe;CACf,MAAM,SAAS,QAAQ,gBAAgB,QAAQ,cAAc;CAC7D,MAAM,OAAO,MAAM,OAAO,MAAM;AAEhC,MAAK,MAAM,WAAW,QAAQ,qBAAqB,CAAE,EACjD,MAAK,KAAK,SAAS,QAAQ,CACvB,OAAM,IAAI,OAAO,sCAAsC,QAAQ;AAIvE,QAAO;AACV;;;;ACXM,eAAe,yBAClBC,SACAC,SACwC;CACxC,MAAM,SAAS,QAAQ,gBAAgB,QAAQ,cAAc;AAC7D,OAAM,OAAO,MAAM,QAAQ,KAAK;CAChC,MAAM,WAAW,MAAM,OAAO,QAAQ;AAEtC,MAAK,MAAM,MAAM,QAAQ,sBAAsB,CAAE,GAAE;EAC/C,MAAM,MAAM,SAAS,KAAK,CAACC,WAA4B,OAAO,OAAO,GAAG;AACxE,OAAK,QAAQ,IAAI,QACb,OAAM,IAAI,OAAO,qBAAqB,GAAG;CAEhD;AAED,QAAO,EAAE,SAAU;AACtB;;;;ACtBD,MAAM,uBAAuB,mCAAmC;AAEzD,eAAe,iBAAiBC,SAA+C;AAClF,KAAI,QAAQ,YAAY,QAAQ,YAAY,QAAQ,YAAY,QAAQ,OACpE,OAAM,IAAI,OAAO,0CAA0C,OAAO,QAAQ,QAAQ,CAAC;CAEvF,MAAM,UAAU,QAAQ,YAAY,QAAQ,WAAW,aAAa;AACpE,QAAO,qBAAqB,WAAW,SAAwC,QAAQ,SAAS;AACnG;;;;;;;;;;;;;ICVY,0BAAN,MAAM,wBAAwB;CACjC,OAAgB,QAAQ;CACxB,eAA8D,wBAAwB;CACtF,aAA8B,IAAI;CAElC,OAAO,0BAA0BC,OAAkD;AAC/E,gBACW,UAAU,YACjB,UAAU,QACT,MAAqC,iBAAiB,wBAAwB;CAEtF;CAED,SAASC,UAAiC;AACtC,OAAK,WAAW,IAAI,OAAO,SAAS,QAAQ,EAAE,SAAS;AACvD,SAAO;CACV;CAED,IAAIC,SAA4C;EAC5C,MAAM,WAAW,KAAK,WAAW,IAAI,OAAO,QAAQ,CAAC;AACrD,OAAK,SACD,OAAM,IAAI,OAAO,8CAA8C,OAAO,QAAQ,CAAC;AAEnF,SAAO;CACV;CAED,OAAmC;AAC/B,SAAO,CAAC,GAAG,KAAK,WAAW,QAAQ,AAAC;CACvC;AACJ;;;;AC3BD,SAAS,WAAWC,OAA+C;AAC/D,MAAK,MAAO,QAAO;CACnB,MAAM,SAAS,OAAO,MAAM;AAC5B,QAAO,OAAO,SAAS,OAAO,GAAG,SAAS;AAC7C;AAEM,SAAS,qBACZC,SACAC,MAKa;CACb,MAAM,cAAc,KAAK,UAAU,CAAE;AAErC,KAAI,KAAK,mBAAmB;EACxB,MAAM,SAAS,WAAW,KAAK,kBAAkB;EACjD,MAAM,UAAU,OAAO,QAAQ;EAC/B,MAAMC,SAAwB;GAC1B,KAAK,YAAY,OAAO,QAAQ;GAChC,MAAM,YAAY,QAAQ,QAAQ;GAClC,MAAM,YAAY,QAAQ,QAAQ;GAClC,UAAU,YAAY,YAAY,QAAQ;GAC1C,MAAM,YAAY,QAAQ,QAAQ;GAClC,UAAU,YAAY,YAAY,QAAQ;GAC1C,UAAU,YAAY,YAAY,QAAQ;GAC1C,gBAAgB,YAAY,kBAAkB,QAAQ;EACzD;AACD,MAAI,YAAY,QAAQ,OACpB,QAAO,WAAW,KAAK,cAAc,OAAO,YAAY;AAE5D,SAAO;CACV;AAED,KAAI,YAAY,QAAQ,SACpB,QAAO;EACH,KAAK,YAAY,OAAO,QAAQ,IAAI,sBAAsB,QAAQ,IAAI;EACtE,MAAM,YAAY,QAAQ,QAAQ,IAAI;EACtC,MAAM,YAAY,QAAQ,WAAW,QAAQ,IAAI,cAAc;EAC/D,UAAU,YAAY,YAAY,QAAQ,IAAI;EAC9C,MAAM,YAAY,QAAQ,QAAQ,IAAI;EACtC,UAAU,YAAY,YAAY,QAAQ,IAAI;EAC9C,gBAAgB,YAAY,kBAAkB;CACjD;AAGL,QAAO;EACH,UAAU,KAAK,cAAc,YAAY,YAAY,QAAQ,IAAI,yBAAyB;EAC1F,gBAAgB,YAAY,kBAAkB;CACjD;AACJ;;;;IC1CY,0BAAN,MAAM,wBAAmD;CAC5D,OAAgB,QAAQ;CACxB,eAA8D,wBAAwB;CACtF,UAA4B,QAAQ;CACpC,eAAiD;EAC7C,kBAAkB;EAClB,iBAAiB;EACjB,yBAAyB;EACzB,8BAA8B;EAC9B,eAAe;CAClB;CAED,OAAO,0BAA0BC,OAAkD;AAC/E,gBACW,UAAU,YACjB,UAAU,QACT,MAAqC,iBAAiB,wBAAwB;CAEtF;CAED,OAAe,gBAAgBC,gBAAiC;AAC5D,MAAI,eAAgB,QAAO;EAC3B,MAAM,SAAS,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,GAAG,EAAE;AACrD,UAAQ,aAAa,KAAK,KAAK,CAAC,GAAG,OAAO;CAC7C;CAED,MAAM,OAAOC,UAA0B,CAAE,GAA+B;EACpE,MAAM,SAAS,qBAAqB,QAAQ,UAAU;GAClD,QAAQ,QAAQ;GAChB,mBAAmB,QAAQ;EAC9B,EAAC;EAEF,MAAM,UAAU,IAAI;EACpB,MAAM,aAAa,wBAAwB,gBAAgB,QAAQ,OAAO;EAC1E,MAAM,YAAY,QAAQ,aAAa,UAAU;EACjD,IAAIC,SAA0B;EAE9B,MAAM,mBAAmB,YAA2B;AAChD,QAAK,OACD,OAAM,IAAI,MAAM;AAEpB,SAAM,OAAO,OAAO,gCAAgC,WAAW,IAAI;AACnE,SAAM,OAAO,OAAO,uBAAuB,WAAW,IAAI;EAC7D;EAED,MAAM,iBAAiB,YAA2B;AAC9C,QAAK,OACD,OAAM,IAAI,MAAM;AAEpB,SAAM,OAAO,OAAO,0BAA0B,WAAW,YAAY;AACrE,SAAM,OAAO,OAAO,kBAAkB,WAAW,IAAI;AACrD,SAAM,OAAO,OAAO,uBAAuB,WAAW,IAAI;EAC7D;EAED,MAAMC,UAA8B;GAChC,SAAS,QAAQ;GACjB,cAAc,KAAK;GACnB;GACA,IAAI,WAAqB;AACrB,SAAK,OACD,OAAM,IAAI,MAAM;AAEpB,WAAO;GACV;GACD,MAAM,QAAuB;AACzB,aAAS,MAAM,QAAQ,QAAQ,OAAO;AACtC,UAAM,kBAAkB;GAC3B;GACD,MAAM,QAAuB;AACzB,SAAK,OACD,OAAM,IAAI,MAAM;AAEpB,QAAI,cAAc,UAAU,cAAc,cAAc,UAAU,aAAa;AAC3E,WAAM,gBAAgB;AACtB;IACH;IAED,MAAM,EAAE,MAAM,GAAG,MAAM,OAAO,OACzB,yGACD,CAAC,UAAW,EACf;AACD,SAAK,MAAM,OAAO,KACd,OAAM,OAAO,OACR,mBAAmB,WAAW,OAAO,IAAI,WAAW,6BACxD;AAEL,UAAM,OAAO,OAAO,uBAAuB,WAAW,IAAI;GAC7D;GACD,MAAM,WAA0B;AAC5B,SAAK,OAAQ;AACb,QAAI;AACA,WAAM,OAAO,OAAO,0BAA0B,WAAW,YAAY;IACxE,UAAS;AACN,WAAM,OAAO,OAAO;AACpB,cAAS;IACZ;GACJ;GACD,gBAAgBC,eAAwC;AACpD,SAAK,OACD,OAAM,IAAI,MAAM;AAEpB,WAAO,IAAI,kBAAgB,QAAQ,YAAgC;GACtE;EACJ;AAED,SAAO;CACV;AACJ;;;;IC1GY,wBAAN,MAAM,sBAAiD;CAC1D,OAAgB,QAAQ;CACxB,eAA4D,sBAAsB;CAClF,UAA4B,QAAQ;CACpC,eAAiD;EAC7C,kBAAkB;EAClB,iBAAiB;EACjB,yBAAyB;EACzB,8BAA8B;EAC9B,eAAe;CAClB;CAED,OAAO,wBAAwBC,OAAgD;AAC3E,gBACW,UAAU,YACjB,UAAU,QACT,MAAqC,iBAAiB,sBAAsB;CAEpF;CAED,aAAqB,cAAcC,QAAiC;EAChE,MAAM,EAAE,MAAM,GAAG,MAAM,OAAO,MAC1B,iFACH;AACD,OAAK,MAAM,OAAO,KACd,OAAM,OAAO,OAAO,uBAAuB,IAAI,KAAK,EAAE;CAE7D;CAED,MAAM,OAAOC,UAA0B,CAAE,GAA+B;EACpE,MAAM,SAAS,qBAAqB,QAAQ,QAAQ;GAChD,QAAQ,QAAQ;GAChB,mBAAmB,QAAQ;GAC3B,YAAY,QAAQ;EACvB,EAAC;EAEF,MAAM,UAAU,IAAI;EACpB,MAAM,YAAY,QAAQ,aAAa,UAAU;EACjD,IAAIC,SAA0B;EAE9B,MAAM,YAAY,YAA+B;AAC7C,YAAS,MAAM,QAAQ,QAAQ,OAAO;AACtC,UAAO;EACV;EAED,MAAMC,UAA8B;GAChC,SAAS,QAAQ;GACjB,cAAc,KAAK;GACnB;GACA,IAAI,WAAqB;AACrB,SAAK,OACD,OAAM,IAAI,MAAM;AAEpB,WAAO;GACV;GACD,MAAM,QAAuB;AACzB,UAAM,WAAW;GACpB;GACD,MAAM,QAAuB;AACzB,SAAK,OACD,OAAM,IAAI,MAAM;AAGpB,QAAI,cAAc,UAAU,cAAc,OAAO,YAAY,OAAO,aAAa,YAAY;AACzF,WAAM,OAAO,OAAO;AACpB,WAAM,GAAG,OAAO,UAAU,EAAE,OAAO,KAAM,EAAC;AAC1C,WAAM,WAAW;AACjB;IACH;AAED,UAAM,sBAAsB,cAAc,OAAO;GACpD;GACD,MAAM,WAA0B;AAC5B,QAAI,QAAQ;AACR,WAAM,OAAO,OAAO;AACpB,cAAS;IACZ;AACD,QAAI,OAAO,YAAY,OAAO,aAAa,WACvC,OAAM,GAAG,OAAO,UAAU,EAAE,OAAO,KAAM,EAAC;GAEjD;GACD,gBAAgBC,eAAwC;AACpD,SAAK,OACD,OAAM,IAAI,MAAM;AAEpB,WAAO,IAAI,gBAAgB,QAAQ,UAA8B;GACpE;EACJ;AAED,SAAO;CACV;AACJ;;;;ICpGY,cAAN,MAAM,YAAY;CACrB,OAAgB,QAAQ;CACxB,eAAkD,YAAY;CAC9D,OAAe,kBAAkD;CAEjE,OAAO,cAAcC,OAAsC;AACvD,gBACW,UAAU,YACjB,UAAU,QACT,MAAqC,iBAAiB,YAAY;CAE1E;CAED,OAAe,iBAA0C;AACrD,MAAI,KAAK,gBAAiB,QAAO,KAAK;EAEtC,MAAM,WAAW,IAAI;AACrB,WAAS,SAAS,IAAI,wBAAwB;AAC9C,WAAS,SAAS,IAAI,0BAA0B;AAChD,OAAK,kBAAkB;AACvB,SAAO;CACV;CAED,OAAO,iBAAiBC,UAAiC;AACrD,OAAK,gBAAgB,CAAC,SAAS,SAAS;CAC3C;CAED,OAAO,cAAuC;AAC1C,SAAO,KAAK,gBAAgB;CAC/B;CAED,aAAa,WACTC,MACAC,UAC2B;EAC3B,MAAM,mBAAmB,YAAY,KAAK,gBAAgB;EAC1D,MAAM,WAAW,iBAAiB,IAAI,KAAK,QAAQ;AACnD,SAAO,SAAS,OAAO,KAAK,QAAQ;CACvC;CAED,aAAa,OAAOC,SAAuD;AACvE,SAAO,KAAK,WAAW;GAAE,SAAS,QAAQ;GAAQ;EAAS,EAAC;CAC/D;CAED,aAAa,SAASA,SAAuD;AACzE,SAAO,KAAK,WAAW;GAAE,SAAS,QAAQ;GAAU;EAAS,EAAC;CACjE;AACJ;;;;AChDM,eAAe,UAClBC,SACAC,OACAC,MACa;AACb,KAAI,KAAK,WAAW,EAChB;CAGJ,MAAM,UAAU,OAAO,KAAK,KAAK,MAAM,CAAE,EAAC;AAC1C,KAAI,QAAQ,WAAW,EACnB;AAGJ,MAAK,MAAM,OAAO,MAAM;EACpB,MAAM,SAAS,QAAQ,IAAI,CAAC,WAAW;GACnC,MAAM,QAAQ,IAAI;AAClB,OAAI,QAAQ,YAAY,QAAQ,iBAAiB,UAAU,UACvD,QAAO,QAAQ,IAAI;AAEvB,UAAO;EACV,EAAC;EACF,MAAM,eACF,QAAQ,YAAY,QAAQ,WACtB,QAAQ,IAAI,CAAC,GAAG,WAAW,GAAG,QAAQ,EAAE,EAAE,CAAC,KAAK,KAAK,GACrD,QAAQ,IAAI,MAAM,IAAI,CAAC,KAAK,KAAK;AAE3C,QAAM,QAAQ,SAAS,OAClB,cAAc,MAAM,IAAI,QAAQ,KAAK,KAAK,CAAC,YAAY,aAAa,IACrE,OACH;CACJ;AACJ;AAEM,SAAS,wBAA4DC,OAGrD;CACnB,MAAM,0BAA0B,WAAmB;EAC/C,OAAO,MAAM;EAEb,cAAc;AACV,SAAM,MAAM,QAAQ,UAAU,MAAM,QAAQ,QAAmB;EAClE;CACJ;AAED,QAAO,IAAI;AACd;AAEM,eAAe,kBAAqBC,QAAwBC,UAA4B;CAC3F,MAAM,WAAW,MAAM;AAEvB,KAAI,KAAK,UAAU,SAAS,KAAK,KAAK,UAAU,SAAS,CACrD,OAAM,IAAI,OAAO,wBAAwB,KAAK,UAAU,SAAS,CAAC,QAAQ,KAAK,UAAU,SAAS,CAAC;AAE1G"}
|
package/dist/mocks/DBClient.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
export type RepositoryLike<TModel extends Record<string, unknown>> = {
|
|
2
|
-
meta: {
|
|
3
|
-
table: string;
|
|
4
|
-
pk: keyof TModel & string;
|
|
5
|
-
columns: Record<string, string>;
|
|
6
|
-
};
|
|
7
|
-
query(): unknown;
|
|
8
|
-
findById(id: string): Promise<TModel | null>;
|
|
9
|
-
create(input: Partial<TModel>): Promise<TModel>;
|
|
10
|
-
update(id: string, patch: Partial<TModel>): Promise<TModel>;
|
|
11
|
-
delete(id: string): Promise<void>;
|
|
12
|
-
};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
export function aMockDBClient(overrides = {}) {
|
|
2
|
-
const client = {
|
|
3
|
-
query: async () => ({ rows: [] }),
|
|
4
|
-
begin: async () => { },
|
|
5
|
-
commit: async () => { },
|
|
6
|
-
rollback: async () => { },
|
|
7
|
-
close: async () => { },
|
|
8
|
-
};
|
|
9
|
-
return {
|
|
10
|
-
...client,
|
|
11
|
-
...overrides,
|
|
12
|
-
};
|
|
13
|
-
}
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
export function aMockQuerySet(overrides = {}) {
|
|
2
|
-
const queryset = {
|
|
3
|
-
filter: () => queryset,
|
|
4
|
-
orderBy: () => queryset,
|
|
5
|
-
limit: () => queryset,
|
|
6
|
-
offset: () => queryset,
|
|
7
|
-
fetch: async () => ({ results: [], nextCursor: null }),
|
|
8
|
-
fetchOne: async () => null,
|
|
9
|
-
count: async () => 0,
|
|
10
|
-
};
|
|
11
|
-
return {
|
|
12
|
-
...queryset,
|
|
13
|
-
...overrides,
|
|
14
|
-
};
|
|
15
|
-
}
|
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
import { aMockQuerySet } from './aMockQuerySet';
|
|
2
|
-
export function aMockRepository(overrides = {}) {
|
|
3
|
-
const defaultQuerySet = aMockQuerySet();
|
|
4
|
-
const repository = {
|
|
5
|
-
meta: {
|
|
6
|
-
table: 'mock_table',
|
|
7
|
-
pk: 'id',
|
|
8
|
-
columns: {},
|
|
9
|
-
},
|
|
10
|
-
query: () => defaultQuerySet,
|
|
11
|
-
findById: async () => null,
|
|
12
|
-
create: async (input) => input,
|
|
13
|
-
update: async (_id, patch) => patch,
|
|
14
|
-
delete: async () => { },
|
|
15
|
-
};
|
|
16
|
-
return {
|
|
17
|
-
...repository,
|
|
18
|
-
...overrides,
|
|
19
|
-
};
|
|
20
|
-
}
|
package/dist/mocks/types.d.ts
DELETED
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
export interface DBClient {
|
|
2
|
-
query<T = unknown>(sql: string, params?: unknown[]): Promise<{
|
|
3
|
-
rows: T[];
|
|
4
|
-
}>;
|
|
5
|
-
begin(): Promise<void>;
|
|
6
|
-
commit(): Promise<void>;
|
|
7
|
-
rollback(): Promise<void>;
|
|
8
|
-
close(): Promise<void>;
|
|
9
|
-
}
|
|
10
|
-
export type RepositoryLike<TModel extends Record<string, unknown>> = {
|
|
11
|
-
meta: {
|
|
12
|
-
table: string;
|
|
13
|
-
pk: keyof TModel & string;
|
|
14
|
-
columns: Record<string, string>;
|
|
15
|
-
};
|
|
16
|
-
query(): unknown;
|
|
17
|
-
findById(id: string): Promise<TModel | null>;
|
|
18
|
-
create(input: Partial<TModel>): Promise<TModel>;
|
|
19
|
-
update(id: string, patch: Partial<TModel>): Promise<TModel>;
|
|
20
|
-
delete(id: string): Promise<void>;
|
|
21
|
-
};
|
|
22
|
-
export type MockQuerySetResult<TModel extends Record<string, unknown>> = {
|
|
23
|
-
filter: (input: unknown) => MockQuerySetResult<TModel>;
|
|
24
|
-
orderBy: (...tokens: Array<keyof TModel | `-${string & keyof TModel}`>) => MockQuerySetResult<TModel>;
|
|
25
|
-
limit: (n: number) => MockQuerySetResult<TModel>;
|
|
26
|
-
offset: (n: number) => MockQuerySetResult<TModel>;
|
|
27
|
-
fetch: () => Promise<{
|
|
28
|
-
results: TModel[];
|
|
29
|
-
nextCursor: string | null;
|
|
30
|
-
}>;
|
|
31
|
-
fetchOne: () => Promise<TModel | null>;
|
|
32
|
-
count: () => Promise<number>;
|
|
33
|
-
};
|
package/dist/mocks-qo-1vCez.js
DELETED
|
@@ -1,72 +0,0 @@
|
|
|
1
|
-
import { __export } from "./chunk-BkvOhyD0.js";
|
|
2
|
-
|
|
3
|
-
//#region src/mocks/aMockDBClient.ts
|
|
4
|
-
function aMockDBClient(overrides = {}) {
|
|
5
|
-
const client = {
|
|
6
|
-
query: async () => ({ rows: [] }),
|
|
7
|
-
begin: async () => {},
|
|
8
|
-
commit: async () => {},
|
|
9
|
-
rollback: async () => {},
|
|
10
|
-
close: async () => {}
|
|
11
|
-
};
|
|
12
|
-
return {
|
|
13
|
-
...client,
|
|
14
|
-
...overrides
|
|
15
|
-
};
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
//#endregion
|
|
19
|
-
//#region src/mocks/aMockQuerySet.ts
|
|
20
|
-
function aMockQuerySet(overrides = {}) {
|
|
21
|
-
const queryset = {
|
|
22
|
-
filter: () => queryset,
|
|
23
|
-
orderBy: () => queryset,
|
|
24
|
-
limit: () => queryset,
|
|
25
|
-
offset: () => queryset,
|
|
26
|
-
fetch: async () => ({
|
|
27
|
-
results: [],
|
|
28
|
-
nextCursor: null
|
|
29
|
-
}),
|
|
30
|
-
fetchOne: async () => null,
|
|
31
|
-
count: async () => 0
|
|
32
|
-
};
|
|
33
|
-
return {
|
|
34
|
-
...queryset,
|
|
35
|
-
...overrides
|
|
36
|
-
};
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
//#endregion
|
|
40
|
-
//#region src/mocks/aMockRepository.ts
|
|
41
|
-
function aMockRepository(overrides = {}) {
|
|
42
|
-
const defaultQuerySet = aMockQuerySet();
|
|
43
|
-
const repository = {
|
|
44
|
-
meta: {
|
|
45
|
-
table: "mock_table",
|
|
46
|
-
pk: "id",
|
|
47
|
-
columns: {}
|
|
48
|
-
},
|
|
49
|
-
query: () => defaultQuerySet,
|
|
50
|
-
findById: async () => null,
|
|
51
|
-
create: async (input) => input,
|
|
52
|
-
update: async (_id, patch) => patch,
|
|
53
|
-
delete: async () => {}
|
|
54
|
-
};
|
|
55
|
-
return {
|
|
56
|
-
...repository,
|
|
57
|
-
...overrides
|
|
58
|
-
};
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
//#endregion
|
|
62
|
-
//#region src/mocks/index.ts
|
|
63
|
-
var mocks_exports = {};
|
|
64
|
-
__export(mocks_exports, {
|
|
65
|
-
aMockDBClient: () => aMockDBClient,
|
|
66
|
-
aMockQuerySet: () => aMockQuerySet,
|
|
67
|
-
aMockRepository: () => aMockRepository
|
|
68
|
-
});
|
|
69
|
-
|
|
70
|
-
//#endregion
|
|
71
|
-
export { aMockDBClient, aMockQuerySet, aMockRepository, mocks_exports };
|
|
72
|
-
//# sourceMappingURL=mocks-qo-1vCez.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"mocks-qo-1vCez.js","names":["overrides: Partial<DBClient>","client: DBClient","overrides: Partial<MockQuerySetResult<TModel>>","queryset: MockQuerySetResult<TModel>","overrides: Partial<RepositoryLike<TModel>>","repository: Partial<RepositoryLike<TModel>>","input: Partial<TModel>","_id: string","patch: Partial<TModel>"],"sources":["../src/mocks/aMockDBClient.ts","../src/mocks/aMockQuerySet.ts","../src/mocks/aMockRepository.ts","../src/mocks/index.ts"],"sourcesContent":["import type { DBClient } from './types';\n\nexport function aMockDBClient(overrides: Partial<DBClient> = {}): DBClient {\n const client: DBClient = {\n query: async () => ({ rows: [] }),\n begin: async () => {},\n commit: async () => {},\n rollback: async () => {},\n close: async () => {},\n };\n\n return {\n ...client,\n ...overrides,\n };\n}\n","import type { MockQuerySetResult } from './types';\n\nexport function aMockQuerySet<TModel extends Record<string, unknown>>(\n overrides: Partial<MockQuerySetResult<TModel>> = {}\n): MockQuerySetResult<TModel> {\n const queryset: MockQuerySetResult<TModel> = {\n filter: () => queryset,\n orderBy: () => queryset,\n limit: () => queryset,\n offset: () => queryset,\n fetch: async () => ({ results: [], nextCursor: null }),\n fetchOne: async () => null,\n count: async () => 0,\n };\n\n return {\n ...queryset,\n ...overrides,\n };\n}\n","import { aMockQuerySet } from './aMockQuerySet';\nimport type { RepositoryLike } from './types';\n\nexport function aMockRepository<TModel extends Record<string, unknown>>(\n overrides: Partial<RepositoryLike<TModel>> = {}\n): RepositoryLike<TModel> {\n const defaultQuerySet = aMockQuerySet<TModel>();\n\n const repository: Partial<RepositoryLike<TModel>> = {\n meta: {\n table: 'mock_table',\n pk: 'id' as keyof TModel & string,\n columns: {},\n },\n query: () => defaultQuerySet,\n findById: async () => null,\n create: async (input: Partial<TModel>) => input as TModel,\n update: async (_id: string, patch: Partial<TModel>) => patch as TModel,\n delete: async () => {},\n };\n\n return {\n ...repository,\n ...overrides,\n } as RepositoryLike<TModel>;\n}\n","/**\n * Domain boundary barrel: centralizes this subdomain's public contract.\n */\n\nexport { aMockDBClient } from './aMockDBClient';\nexport { aMockQuerySet } from './aMockQuerySet';\nexport { aMockRepository } from './aMockRepository';\nexport type { DBClient, MockQuerySetResult, RepositoryLike } from './types';\n"],"mappings":";;;AAEO,SAAS,cAAcA,YAA+B,CAAE,GAAY;CACvE,MAAMC,SAAmB;EACrB,OAAO,aAAa,EAAE,MAAM,CAAE,EAAE;EAChC,OAAO,YAAY,CAAE;EACrB,QAAQ,YAAY,CAAE;EACtB,UAAU,YAAY,CAAE;EACxB,OAAO,YAAY,CAAE;CACxB;AAED,QAAO;EACH,GAAG;EACH,GAAG;CACN;AACJ;;;;ACbM,SAAS,cACZC,YAAiD,CAAE,GACzB;CAC1B,MAAMC,WAAuC;EACzC,QAAQ,MAAM;EACd,SAAS,MAAM;EACf,OAAO,MAAM;EACb,QAAQ,MAAM;EACd,OAAO,aAAa;GAAE,SAAS,CAAE;GAAE,YAAY;EAAM;EACrD,UAAU,YAAY;EACtB,OAAO,YAAY;CACtB;AAED,QAAO;EACH,GAAG;EACH,GAAG;CACN;AACJ;;;;AChBM,SAAS,gBACZC,YAA6C,CAAE,GACzB;CACtB,MAAM,kBAAkB,eAAuB;CAE/C,MAAMC,aAA8C;EAChD,MAAM;GACF,OAAO;GACP,IAAI;GACJ,SAAS,CAAE;EACd;EACD,OAAO,MAAM;EACb,UAAU,YAAY;EACtB,QAAQ,OAAOC,UAA2B;EAC1C,QAAQ,OAAOC,KAAaC,UAA2B;EACvD,QAAQ,YAAY,CAAE;CACzB;AAED,QAAO;EACH,GAAG;EACH,GAAG;CACN;AACJ"}
|
package/dist/version.d.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export declare const VERSION = "0.1.0";
|