prisma-sql 1.53.0 → 1.55.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/generator.cjs +283 -80
- package/dist/generator.cjs.map +1 -1
- package/dist/generator.js +283 -80
- package/dist/generator.js.map +1 -1
- package/dist/index.cjs +504 -73
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.mts +72 -61
- package/dist/index.d.ts +72 -61
- package/dist/index.js +499 -74
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -705,6 +705,9 @@ function assertNoControlChars(label, s) {
|
|
|
705
705
|
);
|
|
706
706
|
}
|
|
707
707
|
}
|
|
708
|
+
function quoteRawIdent(id) {
|
|
709
|
+
return `"${id.replace(/"/g, '""')}"`;
|
|
710
|
+
}
|
|
708
711
|
function isIdentCharCode(c) {
|
|
709
712
|
return c >= 48 && c <= 57 || c >= 65 && c <= 90 || c >= 97 && c <= 122 || c === 95;
|
|
710
713
|
}
|
|
@@ -726,33 +729,33 @@ function parseQuotedPart(input, start) {
|
|
|
726
729
|
}
|
|
727
730
|
if (!sawAny) {
|
|
728
731
|
throw new Error(
|
|
729
|
-
`
|
|
732
|
+
`qualified name has empty quoted identifier part: ${JSON.stringify(input)}`
|
|
730
733
|
);
|
|
731
734
|
}
|
|
732
735
|
return i + 1;
|
|
733
736
|
}
|
|
734
737
|
if (c === 10 || c === 13 || c === 0) {
|
|
735
738
|
throw new Error(
|
|
736
|
-
`
|
|
739
|
+
`qualified name contains invalid characters: ${JSON.stringify(input)}`
|
|
737
740
|
);
|
|
738
741
|
}
|
|
739
742
|
sawAny = true;
|
|
740
743
|
i++;
|
|
741
744
|
}
|
|
742
745
|
throw new Error(
|
|
743
|
-
`
|
|
746
|
+
`qualified name has unterminated quoted identifier: ${JSON.stringify(input)}`
|
|
744
747
|
);
|
|
745
748
|
}
|
|
746
749
|
function parseUnquotedPart(input, start) {
|
|
747
750
|
const n = input.length;
|
|
748
751
|
let i = start;
|
|
749
752
|
if (i >= n) {
|
|
750
|
-
throw new Error(`
|
|
753
|
+
throw new Error(`qualified name is invalid: ${JSON.stringify(input)}`);
|
|
751
754
|
}
|
|
752
755
|
const c0 = input.charCodeAt(i);
|
|
753
756
|
if (!isIdentStartCharCode(c0)) {
|
|
754
757
|
throw new Error(
|
|
755
|
-
`
|
|
758
|
+
`qualified name must use identifiers (or quoted identifiers). Got: ${JSON.stringify(input)}`
|
|
756
759
|
);
|
|
757
760
|
}
|
|
758
761
|
i++;
|
|
@@ -761,15 +764,15 @@ function parseUnquotedPart(input, start) {
|
|
|
761
764
|
if (c === 46) break;
|
|
762
765
|
if (!isIdentCharCode(c)) {
|
|
763
766
|
throw new Error(
|
|
764
|
-
`
|
|
767
|
+
`qualified name contains invalid identifier characters: ${JSON.stringify(input)}`
|
|
765
768
|
);
|
|
766
769
|
}
|
|
767
770
|
i++;
|
|
768
771
|
}
|
|
769
772
|
return i;
|
|
770
773
|
}
|
|
771
|
-
function assertSafeQualifiedName(
|
|
772
|
-
const raw = String(
|
|
774
|
+
function assertSafeQualifiedName(input) {
|
|
775
|
+
const raw = String(input);
|
|
773
776
|
const trimmed = raw.trim();
|
|
774
777
|
if (trimmed.length === 0) {
|
|
775
778
|
throw new Error("tableName/tableRef is required and cannot be empty");
|
|
@@ -843,7 +846,7 @@ function quote2(id) {
|
|
|
843
846
|
);
|
|
844
847
|
}
|
|
845
848
|
if (needsQuoting(id)) {
|
|
846
|
-
return
|
|
849
|
+
return quoteRawIdent(id);
|
|
847
850
|
}
|
|
848
851
|
return id;
|
|
849
852
|
}
|
|
@@ -2482,28 +2485,24 @@ function buildOperator(expr, op, val, ctx, mode, fieldType) {
|
|
|
2482
2485
|
function toSafeSqlIdentifier(input) {
|
|
2483
2486
|
const raw = String(input);
|
|
2484
2487
|
const n = raw.length;
|
|
2488
|
+
if (n === 0) return "_t";
|
|
2485
2489
|
let out = "";
|
|
2486
2490
|
for (let i = 0; i < n; i++) {
|
|
2487
2491
|
const c = raw.charCodeAt(i);
|
|
2488
2492
|
const isAZ = c >= 65 && c <= 90 || c >= 97 && c <= 122;
|
|
2489
2493
|
const is09 = c >= 48 && c <= 57;
|
|
2490
2494
|
const isUnderscore = c === 95;
|
|
2491
|
-
|
|
2492
|
-
out += raw[i];
|
|
2493
|
-
} else {
|
|
2494
|
-
out += "_";
|
|
2495
|
-
}
|
|
2495
|
+
out += isAZ || is09 || isUnderscore ? raw[i] : "_";
|
|
2496
2496
|
}
|
|
2497
|
-
if (out.length === 0) out = "_t";
|
|
2498
2497
|
const c0 = out.charCodeAt(0);
|
|
2499
2498
|
const startsOk = c0 >= 65 && c0 <= 90 || c0 >= 97 && c0 <= 122 || c0 === 95;
|
|
2500
|
-
|
|
2501
|
-
const lowered = out.toLowerCase();
|
|
2499
|
+
const lowered = (startsOk ? out : `_${out}`).toLowerCase();
|
|
2502
2500
|
return ALIAS_FORBIDDEN_KEYWORDS.has(lowered) ? `_${lowered}` : lowered;
|
|
2503
2501
|
}
|
|
2504
2502
|
function createAliasGenerator(maxAliases = 1e4) {
|
|
2505
2503
|
let counter = 0;
|
|
2506
2504
|
const usedAliases = /* @__PURE__ */ new Set();
|
|
2505
|
+
const maxLen = 63;
|
|
2507
2506
|
return {
|
|
2508
2507
|
next(baseName) {
|
|
2509
2508
|
if (usedAliases.size >= maxAliases) {
|
|
@@ -2513,14 +2512,13 @@ function createAliasGenerator(maxAliases = 1e4) {
|
|
|
2513
2512
|
}
|
|
2514
2513
|
const base = toSafeSqlIdentifier(baseName);
|
|
2515
2514
|
const suffix = `_${counter}`;
|
|
2516
|
-
const maxLen = 63;
|
|
2517
2515
|
const baseMax = Math.max(1, maxLen - suffix.length);
|
|
2518
2516
|
const trimmedBase = base.length > baseMax ? base.slice(0, baseMax) : base;
|
|
2519
2517
|
const alias = `${trimmedBase}${suffix}`;
|
|
2520
2518
|
counter += 1;
|
|
2521
2519
|
if (usedAliases.has(alias)) {
|
|
2522
2520
|
throw new Error(
|
|
2523
|
-
`CRITICAL: Duplicate alias '${alias}' at counter=${counter}
|
|
2521
|
+
`CRITICAL: Duplicate alias '${alias}' at counter=${counter}.`
|
|
2524
2522
|
);
|
|
2525
2523
|
}
|
|
2526
2524
|
usedAliases.add(alias);
|
|
@@ -2572,24 +2570,19 @@ function normalizeDynamicNameOrThrow(dynamicName, index) {
|
|
|
2572
2570
|
}
|
|
2573
2571
|
return dn;
|
|
2574
2572
|
}
|
|
2575
|
-
function assertUniqueDynamicName(dn, seen) {
|
|
2576
|
-
if (seen.has(dn)) {
|
|
2577
|
-
throw new Error(`CRITICAL: Duplicate dynamic param name in mappings: ${dn}`);
|
|
2578
|
-
}
|
|
2579
|
-
seen.add(dn);
|
|
2580
|
-
}
|
|
2581
|
-
function validateMappingEntry(m, expectedIndex, seenDynamic) {
|
|
2582
|
-
assertSequentialIndex(m.index, expectedIndex);
|
|
2583
|
-
assertExactlyOneOfDynamicOrValue(m);
|
|
2584
|
-
if (typeof m.dynamicName === "string") {
|
|
2585
|
-
const dn = normalizeDynamicNameOrThrow(m.dynamicName, m.index);
|
|
2586
|
-
assertUniqueDynamicName(dn, seenDynamic);
|
|
2587
|
-
}
|
|
2588
|
-
}
|
|
2589
2573
|
function validateMappings(mappings) {
|
|
2590
2574
|
const seenDynamic = /* @__PURE__ */ new Set();
|
|
2591
2575
|
for (let i = 0; i < mappings.length; i++) {
|
|
2592
|
-
|
|
2576
|
+
const m = mappings[i];
|
|
2577
|
+
assertSequentialIndex(m.index, i + 1);
|
|
2578
|
+
assertExactlyOneOfDynamicOrValue(m);
|
|
2579
|
+
if (typeof m.dynamicName === "string") {
|
|
2580
|
+
const dn = normalizeDynamicNameOrThrow(m.dynamicName, m.index);
|
|
2581
|
+
if (seenDynamic.has(dn)) {
|
|
2582
|
+
throw new Error(`CRITICAL: Duplicate dynamic param name: ${dn}`);
|
|
2583
|
+
}
|
|
2584
|
+
seenDynamic.add(dn);
|
|
2585
|
+
}
|
|
2593
2586
|
}
|
|
2594
2587
|
}
|
|
2595
2588
|
function validateState(params, mappings, index) {
|
|
@@ -2601,16 +2594,19 @@ function validateState(params, mappings, index) {
|
|
|
2601
2594
|
}
|
|
2602
2595
|
function createStoreInternal(startIndex, initialParams = [], initialMappings = []) {
|
|
2603
2596
|
let index = startIndex;
|
|
2604
|
-
const params = initialParams.length > 0 ?
|
|
2605
|
-
const mappings = initialMappings.length > 0 ?
|
|
2597
|
+
const params = initialParams.length > 0 ? initialParams.slice() : [];
|
|
2598
|
+
const mappings = initialMappings.length > 0 ? initialMappings.slice() : [];
|
|
2606
2599
|
const dynamicNameToIndex = /* @__PURE__ */ new Map();
|
|
2607
|
-
for (
|
|
2600
|
+
for (let i = 0; i < mappings.length; i++) {
|
|
2601
|
+
const m = mappings[i];
|
|
2608
2602
|
if (typeof m.dynamicName === "string") {
|
|
2609
2603
|
dynamicNameToIndex.set(m.dynamicName.trim(), m.index);
|
|
2610
2604
|
}
|
|
2611
2605
|
}
|
|
2612
2606
|
let dirty = true;
|
|
2613
2607
|
let cachedSnapshot = null;
|
|
2608
|
+
let frozenParams = null;
|
|
2609
|
+
let frozenMappings = null;
|
|
2614
2610
|
function assertCanAdd() {
|
|
2615
2611
|
if (index > MAX_PARAM_INDEX) {
|
|
2616
2612
|
throw new Error(
|
|
@@ -2662,13 +2658,17 @@ function createStoreInternal(startIndex, initialParams = [], initialMappings = [
|
|
|
2662
2658
|
}
|
|
2663
2659
|
function snapshot() {
|
|
2664
2660
|
if (!dirty && cachedSnapshot) return cachedSnapshot;
|
|
2661
|
+
if (!frozenParams) frozenParams = Object.freeze(params.slice());
|
|
2662
|
+
if (!frozenMappings) frozenMappings = Object.freeze(mappings.slice());
|
|
2665
2663
|
const snap = {
|
|
2666
2664
|
index,
|
|
2667
|
-
params,
|
|
2668
|
-
mappings
|
|
2665
|
+
params: frozenParams,
|
|
2666
|
+
mappings: frozenMappings
|
|
2669
2667
|
};
|
|
2670
2668
|
cachedSnapshot = snap;
|
|
2671
2669
|
dirty = false;
|
|
2670
|
+
frozenParams = null;
|
|
2671
|
+
frozenMappings = null;
|
|
2672
2672
|
return snap;
|
|
2673
2673
|
}
|
|
2674
2674
|
return {
|
|
@@ -2692,11 +2692,11 @@ function createParamStore(startIndex = 1) {
|
|
|
2692
2692
|
return createStoreInternal(startIndex);
|
|
2693
2693
|
}
|
|
2694
2694
|
function createParamStoreFrom(existingParams, existingMappings, nextIndex) {
|
|
2695
|
-
validateState(
|
|
2695
|
+
validateState(existingParams, existingMappings, nextIndex);
|
|
2696
2696
|
return createStoreInternal(
|
|
2697
2697
|
nextIndex,
|
|
2698
|
-
|
|
2699
|
-
|
|
2698
|
+
existingParams.slice(),
|
|
2699
|
+
existingMappings.slice()
|
|
2700
2700
|
);
|
|
2701
2701
|
}
|
|
2702
2702
|
|
|
@@ -2878,7 +2878,7 @@ function getRelationTableReference(relModel, dialect) {
|
|
|
2878
2878
|
dialect
|
|
2879
2879
|
);
|
|
2880
2880
|
}
|
|
2881
|
-
function resolveRelationOrThrow(model,
|
|
2881
|
+
function resolveRelationOrThrow(model, schemaByName, relName) {
|
|
2882
2882
|
const field = model.fields.find((f) => f.name === relName);
|
|
2883
2883
|
if (!isNotNullish(field)) {
|
|
2884
2884
|
throw new Error(
|
|
@@ -2932,8 +2932,9 @@ function validateOrderByForModel(model, orderBy) {
|
|
|
2932
2932
|
throw new Error("orderBy array entries must have exactly one field");
|
|
2933
2933
|
}
|
|
2934
2934
|
const fieldName = String(entries[0][0]).trim();
|
|
2935
|
-
if (fieldName.length === 0)
|
|
2935
|
+
if (fieldName.length === 0) {
|
|
2936
2936
|
throw new Error("orderBy field name cannot be empty");
|
|
2937
|
+
}
|
|
2937
2938
|
if (!scalarSet.has(fieldName)) {
|
|
2938
2939
|
throw new Error(
|
|
2939
2940
|
`orderBy references unknown or non-scalar field '${fieldName}' on model ${model.name}`
|
|
@@ -2992,8 +2993,9 @@ function extractRelationPaginationConfig(relArgs) {
|
|
|
2992
2993
|
function maybeReverseNegativeTake(takeVal, hasOrderBy, orderByInput) {
|
|
2993
2994
|
if (typeof takeVal !== "number") return { takeVal, orderByInput };
|
|
2994
2995
|
if (takeVal >= 0) return { takeVal, orderByInput };
|
|
2995
|
-
if (!hasOrderBy)
|
|
2996
|
+
if (!hasOrderBy) {
|
|
2996
2997
|
throw new Error("Negative take requires orderBy for deterministic results");
|
|
2998
|
+
}
|
|
2997
2999
|
return {
|
|
2998
3000
|
takeVal: Math.abs(takeVal),
|
|
2999
3001
|
orderByInput: reverseOrderByInput(orderByInput)
|
|
@@ -3003,9 +3005,7 @@ function finalizeOrderByForInclude(args) {
|
|
|
3003
3005
|
if (args.hasOrderBy && isNotNullish(args.orderByInput)) {
|
|
3004
3006
|
validateOrderByForModel(args.relModel, args.orderByInput);
|
|
3005
3007
|
}
|
|
3006
|
-
if (!args.hasPagination)
|
|
3007
|
-
return args.orderByInput;
|
|
3008
|
-
}
|
|
3008
|
+
if (!args.hasPagination) return args.orderByInput;
|
|
3009
3009
|
return ensureDeterministicOrderByInput({
|
|
3010
3010
|
orderBy: args.hasOrderBy ? args.orderByInput : void 0,
|
|
3011
3011
|
model: args.relModel,
|
|
@@ -3066,7 +3066,9 @@ function buildOrderBySql(finalOrderByInput, relAlias, dialect, relModel) {
|
|
|
3066
3066
|
return isNotNullish(finalOrderByInput) ? buildOrderBy(finalOrderByInput, relAlias, dialect, relModel) : "";
|
|
3067
3067
|
}
|
|
3068
3068
|
function buildBaseSql(args) {
|
|
3069
|
-
|
|
3069
|
+
const joins = args.joins ? ` ${args.joins}` : "";
|
|
3070
|
+
const where = `${SQL_TEMPLATES.WHERE} ${args.joinPredicate}${args.whereClause}`;
|
|
3071
|
+
return `${SQL_TEMPLATES.SELECT} ${args.selectExpr} ${SQL_TEMPLATES.FROM} ${args.relTable} ${args.relAlias}${joins} ` + where;
|
|
3070
3072
|
}
|
|
3071
3073
|
function buildOneToOneIncludeSql(args) {
|
|
3072
3074
|
const objExpr = jsonBuildObject(args.relSelect, args.ctx.dialect);
|
|
@@ -3078,9 +3080,7 @@ function buildOneToOneIncludeSql(args) {
|
|
|
3078
3080
|
joinPredicate: args.joinPredicate,
|
|
3079
3081
|
whereClause: args.whereClause
|
|
3080
3082
|
});
|
|
3081
|
-
if (args.orderBySql) {
|
|
3082
|
-
sql += ` ${SQL_TEMPLATES.ORDER_BY} ${args.orderBySql}`;
|
|
3083
|
-
}
|
|
3083
|
+
if (args.orderBySql) sql += ` ${SQL_TEMPLATES.ORDER_BY} ${args.orderBySql}`;
|
|
3084
3084
|
if (isNotNullish(args.takeVal)) {
|
|
3085
3085
|
return appendLimitOffset(
|
|
3086
3086
|
sql,
|
|
@@ -3133,7 +3133,7 @@ function buildListIncludeSpec(args) {
|
|
|
3133
3133
|
`include.${args.relName}`
|
|
3134
3134
|
);
|
|
3135
3135
|
const selectExpr = jsonAgg("row", args.ctx.dialect);
|
|
3136
|
-
const sql = `${SQL_TEMPLATES.SELECT} ${selectExpr} ${SQL_TEMPLATES.FROM} (${base}) ${rowAlias}`;
|
|
3136
|
+
const sql = `${SQL_TEMPLATES.SELECT} ${selectExpr} ${SQL_TEMPLATES.FROM} (${base}) ${SQL_TEMPLATES.AS} ${rowAlias}`;
|
|
3137
3137
|
return Object.freeze({ name: args.relName, sql, isOneToOne: false });
|
|
3138
3138
|
}
|
|
3139
3139
|
function buildSingleInclude(relName, relArgs, field, relModel, ctx) {
|
|
@@ -3231,12 +3231,7 @@ function buildIncludeSqlInternal(args, model, schemas, schemaByName, parentAlias
|
|
|
3231
3231
|
`Query complexity limit exceeded: ${stats.totalSubqueries} subqueries generated. Maximum allowed: ${MAX_TOTAL_SUBQUERIES}. This indicates exponential include nesting. Stats: depth=${stats.maxDepth}, includes=${stats.totalIncludes}. Path: ${visitPath.join(" -> ")}. Simplify your include structure or split into multiple queries.`
|
|
3232
3232
|
);
|
|
3233
3233
|
}
|
|
3234
|
-
const resolved = resolveRelationOrThrow(
|
|
3235
|
-
model,
|
|
3236
|
-
schemas,
|
|
3237
|
-
schemaByName,
|
|
3238
|
-
relName
|
|
3239
|
-
);
|
|
3234
|
+
const resolved = resolveRelationOrThrow(model, schemaByName, relName);
|
|
3240
3235
|
const relationPath = `${model.name}.${relName}`;
|
|
3241
3236
|
const currentPath = [...visitPath, relationPath];
|
|
3242
3237
|
if (visitPath.includes(relationPath)) {
|
|
@@ -3292,7 +3287,7 @@ function buildIncludeSql(args, model, schemas, parentAlias, params, dialect) {
|
|
|
3292
3287
|
stats
|
|
3293
3288
|
);
|
|
3294
3289
|
}
|
|
3295
|
-
function resolveCountRelationOrThrow(relName, model,
|
|
3290
|
+
function resolveCountRelationOrThrow(relName, model, schemaByName) {
|
|
3296
3291
|
const relationSet = getRelationFieldSet(model);
|
|
3297
3292
|
if (!relationSet.has(relName)) {
|
|
3298
3293
|
throw new Error(
|
|
@@ -3300,10 +3295,11 @@ function resolveCountRelationOrThrow(relName, model, schemas, schemaByName) {
|
|
|
3300
3295
|
);
|
|
3301
3296
|
}
|
|
3302
3297
|
const field = model.fields.find((f) => f.name === relName);
|
|
3303
|
-
if (!field)
|
|
3298
|
+
if (!field) {
|
|
3304
3299
|
throw new Error(
|
|
3305
3300
|
`_count.${relName} references unknown relation on model ${model.name}`
|
|
3306
3301
|
);
|
|
3302
|
+
}
|
|
3307
3303
|
if (!isValidRelationField(field)) {
|
|
3308
3304
|
throw new Error(
|
|
3309
3305
|
`_count.${relName} has invalid relation metadata on model ${model.name}`
|
|
@@ -3331,8 +3327,9 @@ function defaultReferencesForCount(fkCount) {
|
|
|
3331
3327
|
}
|
|
3332
3328
|
function resolveCountKeyPairs(field) {
|
|
3333
3329
|
const fkFields = normalizeKeyList(field.foreignKey);
|
|
3334
|
-
if (fkFields.length === 0)
|
|
3330
|
+
if (fkFields.length === 0) {
|
|
3335
3331
|
throw new Error("Relation count requires foreignKey");
|
|
3332
|
+
}
|
|
3336
3333
|
const refsRaw = field.references;
|
|
3337
3334
|
const refs = normalizeKeyList(refsRaw);
|
|
3338
3335
|
const refFields = refs.length > 0 ? refs : defaultReferencesForCount(fkFields.length);
|
|
@@ -3408,12 +3405,7 @@ function buildRelationCountSql(countSelect, model, schemas, parentAlias, _params
|
|
|
3408
3405
|
for (const m of schemas) schemaByName.set(m.name, m);
|
|
3409
3406
|
for (const [relName, shouldCount] of Object.entries(countSelect)) {
|
|
3410
3407
|
if (!shouldCount) continue;
|
|
3411
|
-
const resolved = resolveCountRelationOrThrow(
|
|
3412
|
-
relName,
|
|
3413
|
-
model,
|
|
3414
|
-
schemas,
|
|
3415
|
-
schemaByName
|
|
3416
|
-
);
|
|
3408
|
+
const resolved = resolveCountRelationOrThrow(relName, model, schemaByName);
|
|
3417
3409
|
const built = buildCountJoinAndPair({
|
|
3418
3410
|
relName,
|
|
3419
3411
|
field: resolved.field,
|
|
@@ -5141,6 +5133,304 @@ function buildSQLWithCache(model, models, method, args, dialect) {
|
|
|
5141
5133
|
queryCache.size;
|
|
5142
5134
|
return result;
|
|
5143
5135
|
}
|
|
5136
|
+
|
|
5137
|
+
// src/batch.ts
|
|
5138
|
+
function assertNoControlChars2(label, s) {
|
|
5139
|
+
for (let i = 0; i < s.length; i++) {
|
|
5140
|
+
const c = s.charCodeAt(i);
|
|
5141
|
+
if (c <= 31 || c === 127) {
|
|
5142
|
+
throw new Error(`${label} contains control characters`);
|
|
5143
|
+
}
|
|
5144
|
+
}
|
|
5145
|
+
}
|
|
5146
|
+
function quoteIdent(id) {
|
|
5147
|
+
const raw = String(id);
|
|
5148
|
+
if (raw.length === 0) throw new Error("Identifier cannot be empty");
|
|
5149
|
+
assertNoControlChars2("Identifier", raw);
|
|
5150
|
+
return `"${raw.replace(/"/g, '""')}"`;
|
|
5151
|
+
}
|
|
5152
|
+
function reindexParams(sql, params, offset) {
|
|
5153
|
+
if (!Number.isInteger(offset) || offset < 0) {
|
|
5154
|
+
throw new Error(`Invalid param offset: ${offset}`);
|
|
5155
|
+
}
|
|
5156
|
+
const newParams = [];
|
|
5157
|
+
const paramMap = /* @__PURE__ */ new Map();
|
|
5158
|
+
const reindexed = sql.replace(/\$(\d+)/g, (_match, num) => {
|
|
5159
|
+
const oldIndex = Number(num);
|
|
5160
|
+
if (!Number.isInteger(oldIndex) || oldIndex < 1) {
|
|
5161
|
+
throw new Error(`Invalid param placeholder: $${num}`);
|
|
5162
|
+
}
|
|
5163
|
+
const existing = paramMap.get(oldIndex);
|
|
5164
|
+
if (existing !== void 0) return `$${existing}`;
|
|
5165
|
+
const pos = oldIndex - 1;
|
|
5166
|
+
if (pos >= params.length) {
|
|
5167
|
+
throw new Error(
|
|
5168
|
+
`Param placeholder $${oldIndex} exceeds params length (${params.length})`
|
|
5169
|
+
);
|
|
5170
|
+
}
|
|
5171
|
+
const newIndex = offset + newParams.length + 1;
|
|
5172
|
+
paramMap.set(oldIndex, newIndex);
|
|
5173
|
+
newParams.push(params[pos]);
|
|
5174
|
+
return `$${newIndex}`;
|
|
5175
|
+
});
|
|
5176
|
+
return { sql: reindexed, params: newParams };
|
|
5177
|
+
}
|
|
5178
|
+
function wrapQueryForMethod(method, cteName, resultKey) {
|
|
5179
|
+
const outKey = quoteIdent(resultKey);
|
|
5180
|
+
switch (method) {
|
|
5181
|
+
case "findMany":
|
|
5182
|
+
case "groupBy":
|
|
5183
|
+
return `(SELECT COALESCE(json_agg(row_to_json(t)), '[]'::json) FROM ${cteName} t) AS ${outKey}`;
|
|
5184
|
+
case "findFirst":
|
|
5185
|
+
case "findUnique":
|
|
5186
|
+
return `(SELECT row_to_json(t) FROM ${cteName} t LIMIT 1) AS ${outKey}`;
|
|
5187
|
+
case "count":
|
|
5188
|
+
return `(SELECT * FROM ${cteName}) AS ${outKey}`;
|
|
5189
|
+
case "aggregate":
|
|
5190
|
+
return `(SELECT row_to_json(t) FROM ${cteName} t) AS ${outKey}`;
|
|
5191
|
+
default:
|
|
5192
|
+
throw new Error(`Unsupported batch method: ${method}`);
|
|
5193
|
+
}
|
|
5194
|
+
}
|
|
5195
|
+
function buildBatchSql(queries, modelMap, models, dialect) {
|
|
5196
|
+
const keys = Object.keys(queries);
|
|
5197
|
+
if (keys.length === 0) {
|
|
5198
|
+
throw new Error("buildBatchSql requires at least one query");
|
|
5199
|
+
}
|
|
5200
|
+
if (dialect !== "postgres") {
|
|
5201
|
+
throw new Error("Batch queries are only supported for postgres dialect");
|
|
5202
|
+
}
|
|
5203
|
+
const ctes = new Array(keys.length);
|
|
5204
|
+
const selects = new Array(keys.length);
|
|
5205
|
+
const allParams = [];
|
|
5206
|
+
for (let i = 0; i < keys.length; i++) {
|
|
5207
|
+
const key = keys[i];
|
|
5208
|
+
const query = queries[key];
|
|
5209
|
+
const model = modelMap.get(query.model);
|
|
5210
|
+
if (!model) {
|
|
5211
|
+
throw new Error(
|
|
5212
|
+
`Model '${query.model}' not found. Available: ${[...modelMap.keys()].join(", ")}`
|
|
5213
|
+
);
|
|
5214
|
+
}
|
|
5215
|
+
const { sql: querySql, params: queryParams } = buildSQLWithCache(
|
|
5216
|
+
model,
|
|
5217
|
+
models,
|
|
5218
|
+
query.method,
|
|
5219
|
+
query.args || {},
|
|
5220
|
+
dialect
|
|
5221
|
+
);
|
|
5222
|
+
const { sql: reindexedSql, params: reindexedParams } = reindexParams(
|
|
5223
|
+
querySql,
|
|
5224
|
+
queryParams,
|
|
5225
|
+
allParams.length
|
|
5226
|
+
);
|
|
5227
|
+
for (let p = 0; p < reindexedParams.length; p++) {
|
|
5228
|
+
allParams.push(reindexedParams[p]);
|
|
5229
|
+
}
|
|
5230
|
+
const cteName = `batch_${i}`;
|
|
5231
|
+
ctes[i] = `${cteName} AS (${reindexedSql})`;
|
|
5232
|
+
selects[i] = wrapQueryForMethod(query.method, cteName, key);
|
|
5233
|
+
}
|
|
5234
|
+
const sql = `WITH ${ctes.join(", ")} SELECT ${selects.join(", ")}`;
|
|
5235
|
+
return { sql, params: allParams, keys };
|
|
5236
|
+
}
|
|
5237
|
+
function buildBatchCountSql(queries, modelMap, models, dialect) {
|
|
5238
|
+
if (queries.length === 0) {
|
|
5239
|
+
throw new Error("buildBatchCountSql requires at least one query");
|
|
5240
|
+
}
|
|
5241
|
+
if (dialect !== "postgres") {
|
|
5242
|
+
throw new Error(
|
|
5243
|
+
"Batch count queries are only supported for postgres dialect"
|
|
5244
|
+
);
|
|
5245
|
+
}
|
|
5246
|
+
const ctes = new Array(queries.length);
|
|
5247
|
+
const selects = new Array(queries.length);
|
|
5248
|
+
const allParams = [];
|
|
5249
|
+
for (let i = 0; i < queries.length; i++) {
|
|
5250
|
+
const query = queries[i];
|
|
5251
|
+
const model = modelMap.get(query.model);
|
|
5252
|
+
if (!model) {
|
|
5253
|
+
throw new Error(
|
|
5254
|
+
`Model '${query.model}' not found. Available: ${[...modelMap.keys()].join(", ")}`
|
|
5255
|
+
);
|
|
5256
|
+
}
|
|
5257
|
+
const { sql: querySql, params: queryParams } = buildSQLWithCache(
|
|
5258
|
+
model,
|
|
5259
|
+
models,
|
|
5260
|
+
"count",
|
|
5261
|
+
query.args || {},
|
|
5262
|
+
dialect
|
|
5263
|
+
);
|
|
5264
|
+
const { sql: reindexedSql, params: reindexedParams } = reindexParams(
|
|
5265
|
+
querySql,
|
|
5266
|
+
queryParams,
|
|
5267
|
+
allParams.length
|
|
5268
|
+
);
|
|
5269
|
+
for (let p = 0; p < reindexedParams.length; p++) {
|
|
5270
|
+
allParams.push(reindexedParams[p]);
|
|
5271
|
+
}
|
|
5272
|
+
const cteName = `count_${i}`;
|
|
5273
|
+
const resultKey = `count_${i}`;
|
|
5274
|
+
ctes[i] = `${cteName} AS (${reindexedSql})`;
|
|
5275
|
+
selects[i] = `(SELECT * FROM ${cteName}) AS ${quoteIdent(resultKey)}`;
|
|
5276
|
+
}
|
|
5277
|
+
const sql = `WITH ${ctes.join(", ")} SELECT ${selects.join(", ")}`;
|
|
5278
|
+
return { sql, params: allParams };
|
|
5279
|
+
}
|
|
5280
|
+
function looksLikeJsonString(s) {
|
|
5281
|
+
const t = s.trim();
|
|
5282
|
+
if (t.length === 0) return false;
|
|
5283
|
+
const c0 = t.charCodeAt(0);
|
|
5284
|
+
const cN = t.charCodeAt(t.length - 1);
|
|
5285
|
+
if (c0 === 123 && cN === 125) return true;
|
|
5286
|
+
if (c0 === 91 && cN === 93) return true;
|
|
5287
|
+
if (t === "null" || t === "true" || t === "false") return true;
|
|
5288
|
+
return false;
|
|
5289
|
+
}
|
|
5290
|
+
function parseJsonValue(value) {
|
|
5291
|
+
if (typeof value !== "string") return value;
|
|
5292
|
+
if (!looksLikeJsonString(value)) return value;
|
|
5293
|
+
try {
|
|
5294
|
+
return JSON.parse(value);
|
|
5295
|
+
} catch (e) {
|
|
5296
|
+
return value;
|
|
5297
|
+
}
|
|
5298
|
+
}
|
|
5299
|
+
function parseCountValue(value) {
|
|
5300
|
+
if (value === null || value === void 0) return 0;
|
|
5301
|
+
if (typeof value === "number") return value;
|
|
5302
|
+
if (typeof value === "string") {
|
|
5303
|
+
const n = Number.parseInt(value, 10);
|
|
5304
|
+
return Number.isFinite(n) ? n : 0;
|
|
5305
|
+
}
|
|
5306
|
+
if (typeof value === "object") {
|
|
5307
|
+
const obj = value;
|
|
5308
|
+
const countKey = Object.prototype.hasOwnProperty.call(obj, "count") ? "count" : Object.prototype.hasOwnProperty.call(obj, "_count") ? "_count" : Object.keys(obj).find((k) => k.endsWith("_count"));
|
|
5309
|
+
if (countKey !== void 0) {
|
|
5310
|
+
const v = obj[countKey];
|
|
5311
|
+
if (typeof v === "number") return v;
|
|
5312
|
+
if (typeof v === "string") {
|
|
5313
|
+
const n = Number.parseInt(v, 10);
|
|
5314
|
+
return Number.isFinite(n) ? n : 0;
|
|
5315
|
+
}
|
|
5316
|
+
}
|
|
5317
|
+
}
|
|
5318
|
+
return 0;
|
|
5319
|
+
}
|
|
5320
|
+
function parseBatchCountResults(row, count) {
|
|
5321
|
+
const results = [];
|
|
5322
|
+
for (let i = 0; i < count; i++) {
|
|
5323
|
+
const key = `count_${i}`;
|
|
5324
|
+
const value = row[key];
|
|
5325
|
+
results.push(parseCountValue(value));
|
|
5326
|
+
}
|
|
5327
|
+
return results;
|
|
5328
|
+
}
|
|
5329
|
+
function parseBatchResults(row, keys, queries) {
|
|
5330
|
+
const results = {};
|
|
5331
|
+
for (let i = 0; i < keys.length; i++) {
|
|
5332
|
+
const key = keys[i];
|
|
5333
|
+
const rawValue = row[key];
|
|
5334
|
+
const query = queries[key];
|
|
5335
|
+
switch (query.method) {
|
|
5336
|
+
case "findMany": {
|
|
5337
|
+
const parsed = parseJsonValue(rawValue);
|
|
5338
|
+
results[key] = Array.isArray(parsed) ? parsed : [];
|
|
5339
|
+
break;
|
|
5340
|
+
}
|
|
5341
|
+
case "findFirst":
|
|
5342
|
+
case "findUnique": {
|
|
5343
|
+
const parsed = parseJsonValue(rawValue);
|
|
5344
|
+
results[key] = parsed != null ? parsed : null;
|
|
5345
|
+
break;
|
|
5346
|
+
}
|
|
5347
|
+
case "count": {
|
|
5348
|
+
results[key] = parseCountValue(rawValue);
|
|
5349
|
+
break;
|
|
5350
|
+
}
|
|
5351
|
+
case "aggregate": {
|
|
5352
|
+
const parsed = parseJsonValue(rawValue);
|
|
5353
|
+
const obj = parsed != null ? parsed : {};
|
|
5354
|
+
results[key] = transformQueryResults("aggregate", [obj]);
|
|
5355
|
+
break;
|
|
5356
|
+
}
|
|
5357
|
+
case "groupBy": {
|
|
5358
|
+
const parsed = parseJsonValue(rawValue);
|
|
5359
|
+
const arr = Array.isArray(parsed) ? parsed : [];
|
|
5360
|
+
results[key] = transformQueryResults("groupBy", arr);
|
|
5361
|
+
break;
|
|
5362
|
+
}
|
|
5363
|
+
default:
|
|
5364
|
+
results[key] = rawValue;
|
|
5365
|
+
}
|
|
5366
|
+
}
|
|
5367
|
+
return results;
|
|
5368
|
+
}
|
|
5369
|
+
|
|
5370
|
+
// src/transaction.ts
|
|
5371
|
+
function isolationLevelToPostgresKeyword(level) {
|
|
5372
|
+
switch (level) {
|
|
5373
|
+
case "ReadCommitted":
|
|
5374
|
+
return "read committed";
|
|
5375
|
+
case "RepeatableRead":
|
|
5376
|
+
return "repeatable read";
|
|
5377
|
+
case "Serializable":
|
|
5378
|
+
return "serializable";
|
|
5379
|
+
default:
|
|
5380
|
+
return void 0;
|
|
5381
|
+
}
|
|
5382
|
+
}
|
|
5383
|
+
function createTransactionExecutor(deps) {
|
|
5384
|
+
const { modelMap, allModels, dialect, executeRaw, postgresClient } = deps;
|
|
5385
|
+
return {
|
|
5386
|
+
execute(queries, options) {
|
|
5387
|
+
return __async(this, null, function* () {
|
|
5388
|
+
if (queries.length === 0) return [];
|
|
5389
|
+
if (dialect !== "postgres") {
|
|
5390
|
+
throw new Error("$transaction is only supported for postgres dialect");
|
|
5391
|
+
}
|
|
5392
|
+
if (!postgresClient) {
|
|
5393
|
+
throw new Error("postgresClient is required for transactions");
|
|
5394
|
+
}
|
|
5395
|
+
const transactionCallback = (sql) => __async(null, null, function* () {
|
|
5396
|
+
const results = [];
|
|
5397
|
+
const isolationLevel = isolationLevelToPostgresKeyword(
|
|
5398
|
+
options == null ? void 0 : options.isolationLevel
|
|
5399
|
+
);
|
|
5400
|
+
if (isolationLevel) {
|
|
5401
|
+
yield sql.unsafe(
|
|
5402
|
+
`SET TRANSACTION ISOLATION LEVEL ${isolationLevel.toUpperCase()}`
|
|
5403
|
+
);
|
|
5404
|
+
}
|
|
5405
|
+
if (options == null ? void 0 : options.timeout) {
|
|
5406
|
+
yield sql.unsafe(
|
|
5407
|
+
`SET LOCAL statement_timeout = ${Math.floor(options.timeout)}`
|
|
5408
|
+
);
|
|
5409
|
+
}
|
|
5410
|
+
for (const q of queries) {
|
|
5411
|
+
const model = modelMap.get(q.model);
|
|
5412
|
+
if (!model) {
|
|
5413
|
+
throw new Error(
|
|
5414
|
+
`Model '${q.model}' not found. Available: ${[...modelMap.keys()].join(", ")}`
|
|
5415
|
+
);
|
|
5416
|
+
}
|
|
5417
|
+
const { sql: sqlStr, params } = buildSQLWithCache(
|
|
5418
|
+
model,
|
|
5419
|
+
allModels,
|
|
5420
|
+
q.method,
|
|
5421
|
+
q.args || {},
|
|
5422
|
+
dialect
|
|
5423
|
+
);
|
|
5424
|
+
const rawResults = yield sql.unsafe(sqlStr, params);
|
|
5425
|
+
results.push(transformQueryResults(q.method, rawResults));
|
|
5426
|
+
}
|
|
5427
|
+
return results;
|
|
5428
|
+
});
|
|
5429
|
+
return yield postgresClient.begin(transactionCallback);
|
|
5430
|
+
});
|
|
5431
|
+
}
|
|
5432
|
+
};
|
|
5433
|
+
}
|
|
5144
5434
|
var ACCELERATED_METHODS = /* @__PURE__ */ new Set([
|
|
5145
5435
|
"findMany",
|
|
5146
5436
|
"findFirst",
|
|
@@ -5257,6 +5547,58 @@ function handleMethodCall(ctx, method, args, deps) {
|
|
|
5257
5547
|
}
|
|
5258
5548
|
});
|
|
5259
5549
|
}
|
|
5550
|
+
var DeferredQuery = class {
|
|
5551
|
+
constructor(model, method, args) {
|
|
5552
|
+
this.model = model;
|
|
5553
|
+
this.method = method;
|
|
5554
|
+
this.args = args;
|
|
5555
|
+
}
|
|
5556
|
+
then(onfulfilled, onrejected) {
|
|
5557
|
+
throw new Error(
|
|
5558
|
+
"Cannot await a batch query. Batch queries must not be awaited inside the $batch callback."
|
|
5559
|
+
);
|
|
5560
|
+
}
|
|
5561
|
+
};
|
|
5562
|
+
function createBatchProxy(modelMap, allowedModels) {
|
|
5563
|
+
return new Proxy(
|
|
5564
|
+
{},
|
|
5565
|
+
{
|
|
5566
|
+
get(_target, modelName) {
|
|
5567
|
+
if (typeof modelName === "symbol") return void 0;
|
|
5568
|
+
const model = modelMap.get(modelName);
|
|
5569
|
+
if (!model) {
|
|
5570
|
+
throw new Error(
|
|
5571
|
+
`Model '${modelName}' not found. Available: ${[...modelMap.keys()].join(", ")}`
|
|
5572
|
+
);
|
|
5573
|
+
}
|
|
5574
|
+
if (allowedModels && !allowedModels.includes(modelName)) {
|
|
5575
|
+
throw new Error(
|
|
5576
|
+
`Model '${modelName}' not allowed. Allowed: ${allowedModels.join(", ")}`
|
|
5577
|
+
);
|
|
5578
|
+
}
|
|
5579
|
+
return new Proxy(
|
|
5580
|
+
{},
|
|
5581
|
+
{
|
|
5582
|
+
get(_target2, method) {
|
|
5583
|
+
if (!ACCELERATED_METHODS.has(method)) {
|
|
5584
|
+
throw new Error(
|
|
5585
|
+
`Method '${method}' not supported in batch. Supported: ${[...ACCELERATED_METHODS].join(", ")}`
|
|
5586
|
+
);
|
|
5587
|
+
}
|
|
5588
|
+
return (args) => {
|
|
5589
|
+
return new DeferredQuery(
|
|
5590
|
+
modelName,
|
|
5591
|
+
method,
|
|
5592
|
+
args
|
|
5593
|
+
);
|
|
5594
|
+
};
|
|
5595
|
+
}
|
|
5596
|
+
}
|
|
5597
|
+
);
|
|
5598
|
+
}
|
|
5599
|
+
}
|
|
5600
|
+
);
|
|
5601
|
+
}
|
|
5260
5602
|
function speedExtension(config) {
|
|
5261
5603
|
const {
|
|
5262
5604
|
postgres,
|
|
@@ -5281,9 +5623,7 @@ function speedExtension(config) {
|
|
|
5281
5623
|
} else if (dmmf) {
|
|
5282
5624
|
models = schemaParser.convertDMMFToModels(dmmf.datamodel);
|
|
5283
5625
|
} else {
|
|
5284
|
-
throw new Error(
|
|
5285
|
-
'speedExtension requires either models or dmmf parameter.\n\n\u26A0\uFE0F RECOMMENDED APPROACH:\n Use the generated extension for zero runtime overhead:\n\n import { speedExtension } from "./generated/sql"\n const prisma = new PrismaClient().$extends(\n speedExtension({ postgres: sql })\n )\n\n 1. Add generator to schema.prisma:\n generator sql {\n provider = "prisma-sql-generator"\n }\n\n 2. Run: npx prisma generate\n\n 3. Import from generated file\n\n\u274C RUNTIME-ONLY MODE:\n If you cannot use the generator, provide models or dmmf:\n\n import { speedExtension } from "prisma-sql"\n import { MODELS } from "./generated/sql"\n const prisma = new PrismaClient().$extends(\n speedExtension({ postgres: sql, models: MODELS })\n )\n\n Or with DMMF (auto-converts on startup):\n\n import { Prisma } from "@prisma/client"\n const prisma = new PrismaClient().$extends(\n speedExtension({ postgres: sql, dmmf: Prisma.dmmf })\n )'
|
|
5286
|
-
);
|
|
5626
|
+
throw new Error("speedExtension requires either models or dmmf parameter.");
|
|
5287
5627
|
}
|
|
5288
5628
|
if (!Array.isArray(models) || models.length === 0) {
|
|
5289
5629
|
throw new Error("speedExtension: models array is empty or invalid");
|
|
@@ -5314,10 +5654,86 @@ function speedExtension(config) {
|
|
|
5314
5654
|
for (const method of ACCELERATED_METHODS) {
|
|
5315
5655
|
methodHandlers[method] = createMethodHandler(method);
|
|
5316
5656
|
}
|
|
5657
|
+
const executeRaw = (sql, params) => __async(null, null, function* () {
|
|
5658
|
+
if (dialect === "postgres") {
|
|
5659
|
+
return yield client.unsafe(sql, params);
|
|
5660
|
+
}
|
|
5661
|
+
throw new Error("Raw execution for sqlite not supported in transactions");
|
|
5662
|
+
});
|
|
5663
|
+
const txExecutor = createTransactionExecutor({
|
|
5664
|
+
modelMap,
|
|
5665
|
+
allModels: models,
|
|
5666
|
+
dialect,
|
|
5667
|
+
executeRaw,
|
|
5668
|
+
postgresClient: postgres
|
|
5669
|
+
});
|
|
5670
|
+
function batch(callback) {
|
|
5671
|
+
return __async(this, null, function* () {
|
|
5672
|
+
const batchProxy = createBatchProxy(modelMap, allowedModels);
|
|
5673
|
+
const queries = yield callback(batchProxy);
|
|
5674
|
+
const batchQueries = {};
|
|
5675
|
+
for (const [key, deferred] of Object.entries(queries)) {
|
|
5676
|
+
if (!(deferred instanceof DeferredQuery)) {
|
|
5677
|
+
throw new Error(
|
|
5678
|
+
`Batch query '${key}' must be a deferred query. Did you await it?`
|
|
5679
|
+
);
|
|
5680
|
+
}
|
|
5681
|
+
batchQueries[key] = {
|
|
5682
|
+
model: deferred.model,
|
|
5683
|
+
method: deferred.method,
|
|
5684
|
+
args: deferred.args || {}
|
|
5685
|
+
};
|
|
5686
|
+
}
|
|
5687
|
+
const startTime = Date.now();
|
|
5688
|
+
const { sql, params, keys } = buildBatchSql(
|
|
5689
|
+
batchQueries,
|
|
5690
|
+
modelMap,
|
|
5691
|
+
models,
|
|
5692
|
+
dialect
|
|
5693
|
+
);
|
|
5694
|
+
if (debug) {
|
|
5695
|
+
console.log(`[${dialect}] $batch (${keys.length} queries)`);
|
|
5696
|
+
console.log("SQL:", sql);
|
|
5697
|
+
console.log("Params:", params);
|
|
5698
|
+
}
|
|
5699
|
+
const rows = yield executeQuery(sql, params);
|
|
5700
|
+
const row = rows[0];
|
|
5701
|
+
const results = parseBatchResults(row, keys, batchQueries);
|
|
5702
|
+
const duration = Date.now() - startTime;
|
|
5703
|
+
onQuery == null ? void 0 : onQuery({
|
|
5704
|
+
model: "_batch",
|
|
5705
|
+
method: "batch",
|
|
5706
|
+
sql,
|
|
5707
|
+
params,
|
|
5708
|
+
duration
|
|
5709
|
+
});
|
|
5710
|
+
return results;
|
|
5711
|
+
});
|
|
5712
|
+
}
|
|
5713
|
+
function transaction(queries, options) {
|
|
5714
|
+
return __async(this, null, function* () {
|
|
5715
|
+
const startTime = Date.now();
|
|
5716
|
+
if (debug) {
|
|
5717
|
+
console.log(`[${dialect}] $transaction (${queries.length} queries)`);
|
|
5718
|
+
}
|
|
5719
|
+
const results = yield txExecutor.execute(queries, options);
|
|
5720
|
+
const duration = Date.now() - startTime;
|
|
5721
|
+
onQuery == null ? void 0 : onQuery({
|
|
5722
|
+
model: "_transaction",
|
|
5723
|
+
method: "count",
|
|
5724
|
+
sql: `TRANSACTION(${queries.length})`,
|
|
5725
|
+
params: [],
|
|
5726
|
+
duration
|
|
5727
|
+
});
|
|
5728
|
+
return results;
|
|
5729
|
+
});
|
|
5730
|
+
}
|
|
5317
5731
|
return prisma.$extends({
|
|
5318
5732
|
name: "prisma-sql-speed",
|
|
5319
5733
|
client: {
|
|
5320
|
-
$original: prisma
|
|
5734
|
+
$original: prisma,
|
|
5735
|
+
$batch: batch,
|
|
5736
|
+
$transaction: transaction
|
|
5321
5737
|
},
|
|
5322
5738
|
model: {
|
|
5323
5739
|
$allModels: methodHandlers
|
|
@@ -5325,6 +5741,10 @@ function speedExtension(config) {
|
|
|
5325
5741
|
});
|
|
5326
5742
|
};
|
|
5327
5743
|
}
|
|
5744
|
+
function extendPrisma(prisma, config) {
|
|
5745
|
+
const extension = speedExtension(config);
|
|
5746
|
+
return extension(prisma);
|
|
5747
|
+
}
|
|
5328
5748
|
function createToSQLFunction(models, dialect) {
|
|
5329
5749
|
if (!models || !Array.isArray(models) || models.length === 0) {
|
|
5330
5750
|
throw new Error("createToSQL requires non-empty models array");
|
|
@@ -5359,13 +5779,18 @@ function createPrismaSQL(config) {
|
|
|
5359
5779
|
throw new Error("createPrismaSQL: models array is empty or invalid");
|
|
5360
5780
|
}
|
|
5361
5781
|
const toSQL = createToSQLFunction(models, dialect);
|
|
5782
|
+
const modelMap = new Map(models.map((m) => [m.name, m]));
|
|
5362
5783
|
function query(_0, _1) {
|
|
5363
5784
|
return __async(this, arguments, function* (model, method, args = {}) {
|
|
5364
5785
|
const { sql, params } = toSQL(model, method, args);
|
|
5365
5786
|
return execute(client, sql, params);
|
|
5366
5787
|
});
|
|
5367
5788
|
}
|
|
5368
|
-
|
|
5789
|
+
function batchSql(queries) {
|
|
5790
|
+
const { sql, params } = buildBatchSql(queries, modelMap, models, dialect);
|
|
5791
|
+
return { sql, params };
|
|
5792
|
+
}
|
|
5793
|
+
return { toSQL, query, batchSql, client };
|
|
5369
5794
|
}
|
|
5370
5795
|
function generateSQL2(directive) {
|
|
5371
5796
|
return generateSQL(directive);
|
|
@@ -5411,13 +5836,19 @@ Object.defineProperty(exports, "convertDMMFToModels", {
|
|
|
5411
5836
|
enumerable: true,
|
|
5412
5837
|
get: function () { return schemaParser.convertDMMFToModels; }
|
|
5413
5838
|
});
|
|
5839
|
+
exports.buildBatchCountSql = buildBatchCountSql;
|
|
5840
|
+
exports.buildBatchSql = buildBatchSql;
|
|
5414
5841
|
exports.buildSQL = buildSQL;
|
|
5415
5842
|
exports.createPrismaSQL = createPrismaSQL;
|
|
5416
5843
|
exports.createToSQL = createToSQL;
|
|
5844
|
+
exports.createTransactionExecutor = createTransactionExecutor;
|
|
5845
|
+
exports.extendPrisma = extendPrisma;
|
|
5417
5846
|
exports.generateAllSQL = generateAllSQL;
|
|
5418
5847
|
exports.generateSQL = generateSQL2;
|
|
5419
5848
|
exports.generateSQLByModel = generateSQLByModel;
|
|
5420
5849
|
exports.getGlobalDialect = getGlobalDialect;
|
|
5850
|
+
exports.parseBatchCountResults = parseBatchCountResults;
|
|
5851
|
+
exports.parseBatchResults = parseBatchResults;
|
|
5421
5852
|
exports.setGlobalDialect = setGlobalDialect;
|
|
5422
5853
|
exports.speedExtension = speedExtension;
|
|
5423
5854
|
exports.transformQueryResults = transformQueryResults;
|