@mikro-orm/sql 7.1.0-dev.1 → 7.1.0-dev.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  import { type Dictionary, type Transaction } from '@mikro-orm/core';
2
2
  import { SchemaHelper } from '../../schema/SchemaHelper.js';
3
3
  import type { AbstractSqlConnection } from '../../AbstractSqlConnection.js';
4
- import type { CheckDef, Column, ForeignKey, IndexDef, Table, TableDifference } from '../../typings.js';
4
+ import type { CheckDef, Column, ForeignKey, IndexDef, Table, TableDifference, SqlTriggerDef } from '../../typings.js';
5
5
  import type { DatabaseSchema } from '../../schema/DatabaseSchema.js';
6
6
  import type { DatabaseTable } from '../../schema/DatabaseTable.js';
7
7
  export declare class PostgreSqlSchemaHelper extends SchemaHelper {
@@ -49,6 +49,13 @@ export declare class PostgreSqlSchemaHelper extends SchemaHelper {
49
49
  items: string[];
50
50
  }>, ctx?: Transaction): Promise<Dictionary<Column[]>>;
51
51
  getAllChecks(connection: AbstractSqlConnection, tablesBySchemas: Map<string | undefined, Table[]>, ctx?: Transaction): Promise<Dictionary<CheckDef[]>>;
52
+ /** Generates SQL to create a PostgreSQL trigger and its associated function. */
53
+ createTrigger(table: DatabaseTable, trigger: SqlTriggerDef): string;
54
+ /** Generates SQL to drop a PostgreSQL trigger and its associated function. */
55
+ dropTrigger(table: DatabaseTable, trigger: SqlTriggerDef): string;
56
+ private getSchemaQualifiedTriggerFnName;
57
+ getAllTriggers(connection: AbstractSqlConnection, tablesBySchemas: Map<string | undefined, Table[]>): Promise<Dictionary<SqlTriggerDef[]>>;
58
+ private getTriggersSQL;
52
59
  getAllForeignKeys(connection: AbstractSqlConnection, tablesBySchemas: Map<string | undefined, Table[]>, ctx?: Transaction): Promise<Dictionary<Dictionary<ForeignKey>>>;
53
60
  getNativeEnumDefinitions(connection: AbstractSqlConnection, schemas: string[], ctx?: Transaction): Promise<Dictionary<{
54
61
  name: string;
@@ -118,6 +118,7 @@ export class PostgreSqlSchemaHelper extends SchemaHelper {
118
118
  const indexes = await this.getAllIndexes(connection, tables, ctx);
119
119
  const checks = await this.getAllChecks(connection, tablesBySchema, ctx);
120
120
  const fks = await this.getAllForeignKeys(connection, tablesBySchema, ctx);
121
+ const triggers = await this.getAllTriggers(connection, tablesBySchema);
121
122
  for (const t of tables) {
122
123
  const key = this.getTableKey(t);
123
124
  const table = schema.addTable(t.table_name, t.schema_name, t.table_comment);
@@ -126,6 +127,9 @@ export class PostgreSqlSchemaHelper extends SchemaHelper {
126
127
  if (columns[key]) {
127
128
  table.init(columns[key], indexes[key], checks[key], pks, fks[key], enums);
128
129
  }
130
+ if (triggers[key]) {
131
+ table.setTriggers(triggers[key]);
132
+ }
129
133
  }
130
134
  }
131
135
  async getAllIndexes(connection, tables, ctx) {
@@ -375,6 +379,95 @@ export class PostgreSqlSchemaHelper extends SchemaHelper {
375
379
  }
376
380
  return ret;
377
381
  }
382
+ /** Generates SQL to create a PostgreSQL trigger and its associated function. */
383
+ createTrigger(table, trigger) {
384
+ if (trigger.expression) {
385
+ return trigger.expression;
386
+ }
387
+ const timing = trigger.timing.toUpperCase();
388
+ const events = trigger.events.map(e => e.toUpperCase()).join(' OR ');
389
+ const forEach = trigger.forEach === 'statement' ? 'STATEMENT' : 'ROW';
390
+ const when = trigger.when ? `\n when (${trigger.when})` : '';
391
+ const fnName = this.getSchemaQualifiedTriggerFnName(table, trigger);
392
+ const triggerName = this.platform.quoteIdentifier(trigger.name);
393
+ const fnSql = `create or replace function ${fnName}() returns trigger as $$ begin ${trigger.body}; end; $$ language plpgsql`;
394
+ const triggerSql = `create trigger ${triggerName} ${timing} ${events} on ${table.getQuotedName()} for each ${forEach}${when} execute function ${fnName}()`;
395
+ return `${fnSql};\n${triggerSql}`;
396
+ }
397
+ /** Generates SQL to drop a PostgreSQL trigger and its associated function. */
398
+ dropTrigger(table, trigger) {
399
+ const triggerName = this.platform.quoteIdentifier(trigger.name);
400
+ const fnName = this.getSchemaQualifiedTriggerFnName(table, trigger);
401
+ return `drop trigger if exists ${triggerName} on ${table.getQuotedName()};\ndrop function if exists ${fnName}()`;
402
+ }
403
+ getSchemaQualifiedTriggerFnName(table, trigger) {
404
+ const rawName = `${table.name}_${trigger.name}_fn`;
405
+ const defaultSchema = this.platform.getDefaultSchemaName();
406
+ if (table.schema && table.schema !== defaultSchema) {
407
+ return `${this.platform.quoteIdentifier(table.schema)}.${this.platform.quoteIdentifier(rawName)}`;
408
+ }
409
+ return this.platform.quoteIdentifier(rawName);
410
+ }
411
+ async getAllTriggers(connection, tablesBySchemas) {
412
+ const sql = this.getTriggersSQL(tablesBySchemas);
413
+ const allTriggers = await connection.execute(sql);
414
+ const ret = {};
415
+ const triggerMap = new Map();
416
+ for (const row of allTriggers) {
417
+ const key = this.getTableKey(row);
418
+ const dedupeKey = `${key}:${row.trigger_name}`;
419
+ if (triggerMap.has(dedupeKey)) {
420
+ // Same trigger with multiple events — merge events
421
+ const existing = triggerMap.get(dedupeKey);
422
+ const event = row.event.toLowerCase();
423
+ if (!existing.events.includes(event)) {
424
+ existing.events.push(event);
425
+ }
426
+ continue;
427
+ }
428
+ ret[key] ??= [];
429
+ // prosrc includes the full function body between $$ delimiters (e.g. " begin RETURN NEW; end;")
430
+ // Strip the begin/end wrapper to get just the trigger body for round-trip comparison
431
+ let body = (row.function_body ?? '').trim();
432
+ const beginEndMatch = /^\s*begin\s+([\s\S]*?)\s*end;?\s*$/i.exec(body);
433
+ if (beginEndMatch) {
434
+ body = beginEndMatch[1].trim().replace(/;\s*$/, '');
435
+ }
436
+ const trigger = {
437
+ name: row.trigger_name,
438
+ timing: row.timing.toLowerCase(),
439
+ events: [row.event.toLowerCase()],
440
+ forEach: row.for_each.toLowerCase(),
441
+ body,
442
+ when: row.when_clause ?? undefined,
443
+ };
444
+ ret[key].push(trigger);
445
+ triggerMap.set(dedupeKey, trigger);
446
+ }
447
+ return ret;
448
+ }
449
+ getTriggersSQL(tablesBySchemas) {
450
+ const conditions = [];
451
+ for (const [schema, tables] of tablesBySchemas) {
452
+ const names = tables.map(t => this.platform.quoteValue(t.table_name)).join(', ');
453
+ const schemaName = this.platform.quoteValue(schema ?? this.platform.getDefaultSchemaName());
454
+ conditions.push(`(t.event_object_schema = ${schemaName} and t.event_object_table in (${names}))`);
455
+ }
456
+ // Function lookup uses the '{table}_{trigger}_fn' convention from createTrigger().
457
+ // External triggers with different function names will have NULL body;
458
+ // use the `expression` escape hatch for those.
459
+ return `select t.trigger_name, t.event_object_schema as schema_name, t.event_object_table as table_name,
460
+ t.event_manipulation as event, t.action_timing as timing,
461
+ t.action_orientation as for_each,
462
+ t.action_condition as when_clause,
463
+ pg_get_functiondef(p.oid) as function_def,
464
+ p.prosrc as function_body
465
+ from information_schema.triggers t
466
+ left join pg_namespace n on n.nspname = t.event_object_schema
467
+ left join pg_proc p on p.proname = t.event_object_table || '_' || t.trigger_name || '_fn' and p.pronamespace = n.oid
468
+ where (${conditions.join(' or ')})
469
+ order by t.trigger_name, t.event_manipulation`;
470
+ }
378
471
  async getAllForeignKeys(connection, tablesBySchemas, ctx) {
379
472
  const sql = `select nsp1.nspname schema_name, cls1.relname table_name, nsp2.nspname referenced_schema_name,
380
473
  cls2.relname referenced_table_name, a.attname column_name, af.attname referenced_column_name, conname constraint_name,
@@ -1,7 +1,7 @@
1
1
  import { type Connection, type Transaction } from '@mikro-orm/core';
2
2
  import type { AbstractSqlConnection } from '../../AbstractSqlConnection.js';
3
3
  import { SchemaHelper } from '../../schema/SchemaHelper.js';
4
- import type { Column, IndexDef, Table, TableDifference } from '../../typings.js';
4
+ import type { Column, IndexDef, Table, TableDifference, SqlTriggerDef } from '../../typings.js';
5
5
  import type { DatabaseTable } from '../../schema/DatabaseTable.js';
6
6
  import type { DatabaseSchema } from '../../schema/DatabaseSchema.js';
7
7
  export declare class SqliteSchemaHelper extends SchemaHelper {
@@ -64,5 +64,9 @@ export declare class SqliteSchemaHelper extends SchemaHelper {
64
64
  */
65
65
  getReferencedTableName(referencedTableName: string, schema?: string): string;
66
66
  alterTable(diff: TableDifference, safe?: boolean): string[];
67
+ /** Generates SQL to create SQLite triggers. SQLite requires one trigger per event. */
68
+ createTrigger(table: DatabaseTable, trigger: SqlTriggerDef): string;
69
+ private getTableTriggers;
70
+ private parseTriggerDDL;
67
71
  private getAlterTempTableSQL;
68
72
  }
@@ -101,7 +101,9 @@ export class SqliteSchemaHelper extends SchemaHelper {
101
101
  const pks = await this.getPrimaryKeys(connection, indexes, table.name, table.schema, ctx);
102
102
  const fks = await this.getForeignKeys(connection, table.name, table.schema, ctx);
103
103
  const enums = await this.getEnumDefinitions(connection, table.name, table.schema, ctx);
104
+ const triggers = await this.getTableTriggers(connection, table.name);
104
105
  table.init(cols, indexes, checks, pks, fks, enums);
106
+ table.setTriggers(triggers);
105
107
  }
106
108
  }
107
109
  createTable(table, alter) {
@@ -140,6 +142,9 @@ export class SqliteSchemaHelper extends SchemaHelper {
140
142
  for (const index of table.getIndexes()) {
141
143
  this.append(ret, this.createIndex(index, table));
142
144
  }
145
+ for (const trigger of table.getTriggers()) {
146
+ this.append(ret, this.createTrigger(table, trigger));
147
+ }
143
148
  return ret;
144
149
  }
145
150
  createTableColumn(column, table, _changedProperties) {
@@ -501,8 +506,102 @@ export class SqliteSchemaHelper extends SchemaHelper {
501
506
  this.append(ret, this.getRenameIndexSQL(diff.name, index, oldIndexName));
502
507
  }
503
508
  }
509
+ for (const trigger of Object.values(diff.removedTriggers)) {
510
+ this.append(ret, this.dropTrigger(diff.toTable, trigger));
511
+ }
512
+ for (const trigger of Object.values(diff.changedTriggers)) {
513
+ this.append(ret, this.dropTrigger(diff.toTable, trigger));
514
+ this.append(ret, this.createTrigger(diff.toTable, trigger));
515
+ }
516
+ for (const trigger of Object.values(diff.addedTriggers)) {
517
+ this.append(ret, this.createTrigger(diff.toTable, trigger));
518
+ }
504
519
  return ret;
505
520
  }
521
+ /** Generates SQL to create SQLite triggers. SQLite requires one trigger per event. */
522
+ createTrigger(table, trigger) {
523
+ if (trigger.expression) {
524
+ return trigger.expression;
525
+ }
526
+ const timing = trigger.timing.toUpperCase();
527
+ const forEach = trigger.forEach === 'statement' ? 'STATEMENT' : 'ROW';
528
+ const ret = [];
529
+ for (const event of trigger.events) {
530
+ const name = trigger.events.length > 1 ? `${trigger.name}_${event}` : trigger.name;
531
+ const when = trigger.when ? `\n when ${trigger.when}` : '';
532
+ ret.push(`create trigger ${this.quote(name)} ${timing} ${event.toUpperCase()} on ${table.getQuotedName()} for each ${forEach}${when} begin ${trigger.body}; end`);
533
+ }
534
+ return ret.join(';\n');
535
+ }
536
+ async getTableTriggers(connection, tableName) {
537
+ const rows = await connection.execute(`select name, sql from sqlite_master where type = 'trigger' and tbl_name = ?`, [tableName]);
538
+ // First pass: parse all triggers and collect names to detect multi-event groups
539
+ const parsedRows = [];
540
+ for (const row of rows) {
541
+ /* v8 ignore next 3 */
542
+ if (!row.sql) {
543
+ continue;
544
+ }
545
+ const parsed = this.parseTriggerDDL(row.sql, row.name);
546
+ if (parsed) {
547
+ parsedRows.push({ name: row.name, parsed });
548
+ }
549
+ }
550
+ const allNames = parsedRows.map(r => r.name);
551
+ const triggers = [];
552
+ const triggerMap = new Map();
553
+ for (const { name, parsed } of parsedRows) {
554
+ // Only strip event suffix when another trigger with the same base exists
555
+ const eventLower = parsed.events[0];
556
+ const candidateBase = name.endsWith(`_${eventLower}`) ? name.slice(0, -eventLower.length - 1) : null;
557
+ const baseName = candidateBase && allNames.some(n => n !== name && n.startsWith(`${candidateBase}_`)) ? candidateBase : name;
558
+ if (triggerMap.has(baseName)) {
559
+ const existing = triggerMap.get(baseName);
560
+ if (!existing.events.includes(parsed.events[0])) {
561
+ existing.events.push(parsed.events[0]);
562
+ }
563
+ continue;
564
+ }
565
+ const trigger = { ...parsed, name: baseName };
566
+ triggers.push(trigger);
567
+ triggerMap.set(baseName, trigger);
568
+ }
569
+ return triggers;
570
+ }
571
+ parseTriggerDDL(sql, name) {
572
+ // Split at the last top-level BEGIN to separate header from body,
573
+ // so that a WHEN clause containing the word "begin" in a string literal doesn't confuse parsing.
574
+ const beginIdx = sql.search(/\bbegin\b(?=[^]*$)/i);
575
+ /* v8 ignore next 3 */
576
+ if (beginIdx === -1) {
577
+ return null;
578
+ }
579
+ const header = sql.slice(0, beginIdx);
580
+ const bodyPart = sql.slice(beginIdx);
581
+ const headerMatch = /create\s+trigger\s+["`]?\w+["`]?\s+(before|after|instead\s+of)\s+(insert|update|delete)\s+on\s+["`]?\w+["`]?\s*(?:for\s+each\s+(row|statement))?\s*(?:when\s+([\s\S]*?))?\s*$/i.exec(header);
582
+ /* v8 ignore next 3 */
583
+ if (!headerMatch) {
584
+ return null;
585
+ }
586
+ const bodyMatch = /^begin\s+([\s\S]*?)\s*end/i.exec(bodyPart);
587
+ /* v8 ignore next 3 */
588
+ if (!bodyMatch) {
589
+ return null;
590
+ }
591
+ const timing = headerMatch[1].toLowerCase();
592
+ const event = headerMatch[2].toLowerCase();
593
+ const forEach = (headerMatch[3]?.toLowerCase() ?? 'row');
594
+ const when = headerMatch[4]?.trim() || undefined;
595
+ const body = bodyMatch[1].trim().replace(/;\s*$/, '');
596
+ return {
597
+ name,
598
+ timing,
599
+ events: [event],
600
+ forEach,
601
+ body,
602
+ when,
603
+ };
604
+ }
506
605
  getAlterTempTableSQL(changedTable) {
507
606
  const tempName = `${changedTable.toTable.name}__temp_alter`;
508
607
  const quotedName = this.quote(changedTable.toTable.name);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mikro-orm/sql",
3
- "version": "7.1.0-dev.1",
3
+ "version": "7.1.0-dev.11",
4
4
  "description": "TypeScript ORM for Node.js based on Data Mapper, Unit of Work and Identity Map patterns. Supports MongoDB, MySQL, PostgreSQL and SQLite databases as well as usage with vanilla JavaScript.",
5
5
  "keywords": [
6
6
  "data-mapper",
@@ -53,7 +53,7 @@
53
53
  "@mikro-orm/core": "^7.0.11"
54
54
  },
55
55
  "peerDependencies": {
56
- "@mikro-orm/core": "7.1.0-dev.1"
56
+ "@mikro-orm/core": "7.1.0-dev.11"
57
57
  },
58
58
  "engines": {
59
59
  "node": ">= 22.17.0"
@@ -37,6 +37,11 @@ interface Options {
37
37
  limit?: number;
38
38
  offset?: number;
39
39
  data?: Dictionary;
40
+ insertSubQuery?: {
41
+ sql: string;
42
+ params: unknown[];
43
+ columns: string[];
44
+ };
40
45
  onConflict?: OnConflictClause;
41
46
  lockMode?: LockMode;
42
47
  lockTables?: string[];
@@ -105,6 +110,7 @@ export declare class NativeQueryBuilder implements Subquery {
105
110
  limit(limit: number): this;
106
111
  offset(offset: number): this;
107
112
  insert(data: Dictionary): this;
113
+ insertSelect(columns: string[], subQuery: NativeQueryBuilder | RawQueryFragment): this;
108
114
  update(data: Dictionary): this;
109
115
  delete(): this;
110
116
  truncate(): this;
@@ -215,6 +215,12 @@ export class NativeQueryBuilder {
215
215
  this.options.data = data;
216
216
  return this;
217
217
  }
218
+ insertSelect(columns, subQuery) {
219
+ this.type = QueryType.INSERT;
220
+ const { sql, params } = subQuery instanceof NativeQueryBuilder ? subQuery.compile() : { sql: subQuery.sql, params: [...subQuery.params] };
221
+ this.options.insertSubQuery = { sql, params, columns: columns.map(c => this.quote(c)) };
222
+ return this;
223
+ }
218
224
  update(data) {
219
225
  this.type = QueryType.UPDATE;
220
226
  this.options.data ??= {};
@@ -352,12 +358,21 @@ export class NativeQueryBuilder {
352
358
  return fields;
353
359
  }
354
360
  compileInsert() {
355
- if (!this.options.data) {
361
+ if (!this.options.data && !this.options.insertSubQuery) {
356
362
  throw new Error('No data provided');
357
363
  }
358
364
  this.parts.push('insert');
359
365
  this.addHintComment();
360
366
  this.parts.push(`into ${this.getTableName()}`);
367
+ if (this.options.insertSubQuery) {
368
+ if (this.options.insertSubQuery.columns.length) {
369
+ this.parts.push(`(${this.options.insertSubQuery.columns.join(', ')})`);
370
+ }
371
+ this.addOutputClause('inserted');
372
+ this.parts.push(this.options.insertSubQuery.sql);
373
+ this.params.push(...this.options.insertSubQuery.params);
374
+ return;
375
+ }
361
376
  if (Object.keys(this.options.data).length === 0) {
362
377
  this.addOutputClause('inserted');
363
378
  this.parts.push('default values');
@@ -158,6 +158,8 @@ export interface QBState<Entity extends object> {
158
158
  schema?: string;
159
159
  cond: Dictionary;
160
160
  data?: Dictionary;
161
+ insertSubQuery?: QueryBuilder<any>;
162
+ insertColumns?: string[];
161
163
  orderBy: QueryOrderMap<Entity>[];
162
164
  groupBy: InternalField<Entity>[];
163
165
  having: Dictionary;
@@ -191,6 +193,11 @@ export interface QBState<Entity extends object> {
191
193
  })[];
192
194
  tptJoinsApplied: boolean;
193
195
  autoJoinedPaths: string[];
196
+ partitionLimit?: {
197
+ partitionBy: string;
198
+ limit: number;
199
+ offset?: number;
200
+ };
194
201
  }
195
202
  /**
196
203
  * SQL query builder with fluent interface.
@@ -324,6 +331,35 @@ export declare class QueryBuilder<Entity extends object = AnyEntity, RootAlias e
324
331
  * ```
325
332
  */
326
333
  insert(data: RequiredEntityData<Entity> | RequiredEntityData<Entity>[]): InsertQueryBuilder<Entity, RootAlias, Context>;
334
+ /**
335
+ * Creates an INSERT ... SELECT query that copies rows from the source query.
336
+ *
337
+ * Column resolution (3 tiers):
338
+ * 1. No explicit select on source, no explicit columns → all cloneable columns derived from entity metadata
339
+ * 2. Explicit select on source, no explicit columns → columns derived from selected field names
340
+ * 3. Explicit `columns` option → user-provided column list
341
+ *
342
+ * @example
343
+ * ```ts
344
+ * // Clone all fields (columns auto-derived from metadata)
345
+ * const source = em.createQueryBuilder(User).where({ id: 1 });
346
+ * await em.createQueryBuilder(User).insertFrom(source).execute();
347
+ *
348
+ * // Clone with overrides via raw() aliases
349
+ * const source = em.createQueryBuilder(User)
350
+ * .select(['name', raw("'new@email.com'").as('email')])
351
+ * .where({ id: 1 });
352
+ * await em.createQueryBuilder(User).insertFrom(source).execute();
353
+ *
354
+ * // Explicit columns for full control
355
+ * await em.createQueryBuilder(User)
356
+ * .insertFrom(source, { columns: ['name', 'email'] })
357
+ * .execute();
358
+ * ```
359
+ */
360
+ insertFrom(subQuery: QueryBuilder<any>, options?: {
361
+ columns?: Field<Entity, RootAlias, Context>[];
362
+ }): InsertQueryBuilder<Entity, RootAlias, Context>;
327
363
  /**
328
364
  * Creates an UPDATE query with the given data.
329
365
  * Use `where()` to specify which rows to update.
@@ -644,6 +680,12 @@ export declare class QueryBuilder<Entity extends object = AnyEntity, RootAlias e
644
680
  setFlag(flag: QueryFlag): this;
645
681
  unsetFlag(flag: QueryFlag): this;
646
682
  hasFlag(flag: QueryFlag): boolean;
683
+ /** @internal */
684
+ setPartitionLimit(opts: {
685
+ partitionBy: string;
686
+ limit: number;
687
+ offset?: number;
688
+ }): this;
647
689
  cache(config?: boolean | number | [string, number]): this;
648
690
  /**
649
691
  * Adds index hint to the FROM clause.
@@ -877,6 +919,16 @@ export declare class QueryBuilder<Entity extends object = AnyEntity, RootAlias e
877
919
  protected resolveNestedPath(field: string): string | string[];
878
920
  protected init(type: QueryType, data?: any, cond?: any): this;
879
921
  private getQueryBase;
922
+ /**
923
+ * Resolves the INSERT column list for `insertFrom()`.
924
+ *
925
+ * Tier 1: Explicit `insertColumns` from `options.columns` → map property names to field names
926
+ * Tier 2: Source QB has explicit select fields → derive from those
927
+ * Tier 3: Derive from target entity metadata (all cloneable columns), auto-populate source select
928
+ */
929
+ private resolveInsertFromColumns;
930
+ /** Returns properties that are safe to clone (persistable, non-PK, non-generated). */
931
+ private getCloneableProps;
880
932
  private applyDiscriminatorCondition;
881
933
  /**
882
934
  * Ensures TPT joins are applied. Can be called early before finalize() to populate
@@ -911,6 +963,17 @@ export declare class QueryBuilder<Entity extends object = AnyEntity, RootAlias e
911
963
  private processNestedJoins;
912
964
  private hasToManyJoins;
913
965
  protected wrapPaginateSubQuery(meta: EntityMetadata): void;
966
+ /**
967
+ * Wraps the inner query (which has ROW_NUMBER in SELECT) with an outer query
968
+ * that filters by the __rn column to apply per-parent limiting.
969
+ */
970
+ protected wrapPartitionLimitSubQuery(innerQb: NativeQueryBuilder): NativeQueryBuilder;
971
+ /**
972
+ * Adds ROW_NUMBER() OVER (PARTITION BY ...) to the SELECT list and prepares
973
+ * the query state for per-parent limiting. The actual wrapping into a subquery
974
+ * with __rn filtering happens in getNativeQuery().
975
+ */
976
+ protected preparePartitionLimit(): void;
914
977
  /**
915
978
  * Computes the set of populate paths from the _populate hints.
916
979
  */