@mikro-orm/sql 7.0.0-dev.321 → 7.0.0-dev.322

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -60,7 +60,7 @@ export class AbstractSqlDriver extends DatabaseDriver {
60
60
  this.validateSqlOptions(options);
61
61
  const { first, last, before, after } = options;
62
62
  const isCursorPagination = [first, last, before, after].some(v => v != null);
63
- qb.__populateWhere = options._populateWhere;
63
+ qb.state.resolvedPopulateWhere = options._populateWhere;
64
64
  qb.select(fields)
65
65
  // only add populateWhere if we are populate-joining, as this will be used to add `on` conditions
66
66
  .populate(populate, joinedProps.length > 0 ? populateWhere : undefined, joinedProps.length > 0 ? options.populateFilter : undefined)
@@ -270,7 +270,7 @@ export class AbstractSqlDriver extends DatabaseDriver {
270
270
  * and need to be renamed back to `column_name` for the result mapper to work.
271
271
  */
272
272
  mapTPTColumns(result, meta, qb) {
273
- const tptAliases = qb._tptAlias;
273
+ const tptAliases = qb.state.tptAlias;
274
274
  // Walk up the TPT hierarchy
275
275
  let parentMeta = meta.tptParent;
276
276
  while (parentMeta) {
@@ -528,7 +528,7 @@ export class AbstractSqlDriver extends DatabaseDriver {
528
528
  this.buildFields(meta, populate, joinedProps, qb, qb.alias, options, schema);
529
529
  }
530
530
  this.validateSqlOptions(options);
531
- qb.__populateWhere = options._populateWhere;
531
+ qb.state.resolvedPopulateWhere = options._populateWhere;
532
532
  qb.indexHint(options.indexHint)
533
533
  .collation(options.collation)
534
534
  .comment(options.comments)
@@ -1538,7 +1538,7 @@ export class AbstractSqlDriver extends DatabaseDriver {
1538
1538
  * @internal
1539
1539
  */
1540
1540
  findTPTChildAlias(qb, childMeta) {
1541
- const joins = qb._joins;
1541
+ const joins = qb.state.joins;
1542
1542
  for (const key of Object.keys(joins)) {
1543
1543
  if (joins[key].table === childMeta.tableName && key.includes('[tpt]')) {
1544
1544
  return joins[key].alias;
@@ -1,17 +1,8 @@
1
1
  import { type Dictionary, type EntityMetadata, type EntityProperty, type Primary, type Transaction } from '@mikro-orm/core';
2
2
  import { type AbstractSqlDriver } from './AbstractSqlDriver.js';
3
3
  export declare class PivotCollectionPersister<Entity extends object> {
4
- private readonly meta;
5
- private readonly driver;
6
- private readonly ctx?;
7
- private readonly schema?;
8
- private readonly loggerContext?;
9
- private readonly inserts;
10
- private readonly upserts;
11
- private readonly deletes;
12
- private readonly batchSize;
13
- private order;
14
- constructor(meta: EntityMetadata<Entity>, driver: AbstractSqlDriver, ctx?: Transaction | undefined, schema?: string | undefined, loggerContext?: Dictionary | undefined);
4
+ #private;
5
+ constructor(meta: EntityMetadata<Entity>, driver: AbstractSqlDriver, ctx?: Transaction, schema?: string, loggerContext?: Dictionary);
15
6
  enqueueUpdate(prop: EntityProperty<Entity>, insertDiff: Primary<Entity>[][], deleteDiff: Primary<Entity>[][] | boolean, pks: Primary<Entity>[], isInitialized?: boolean): void;
16
7
  private enqueueInsert;
17
8
  private enqueueUpsert;
@@ -1,55 +1,55 @@
1
1
  class InsertStatement {
2
- keys;
3
- data;
4
2
  order;
3
+ #keys;
4
+ #data;
5
5
  constructor(keys, data, order) {
6
- this.keys = keys;
7
- this.data = data;
8
6
  this.order = order;
7
+ this.#keys = keys;
8
+ this.#data = data;
9
9
  }
10
10
  getHash() {
11
- return JSON.stringify(this.data);
11
+ return JSON.stringify(this.#data);
12
12
  }
13
13
  getData() {
14
14
  const data = {};
15
- this.keys.forEach((key, idx) => (data[key] = this.data[idx]));
15
+ this.#keys.forEach((key, idx) => (data[key] = this.#data[idx]));
16
16
  return data;
17
17
  }
18
18
  }
19
19
  class DeleteStatement {
20
- keys;
21
- cond;
20
+ #keys;
21
+ #cond;
22
22
  constructor(keys, cond) {
23
- this.keys = keys;
24
- this.cond = cond;
23
+ this.#keys = keys;
24
+ this.#cond = cond;
25
25
  }
26
26
  getHash() {
27
- return JSON.stringify(this.cond);
27
+ return JSON.stringify(this.#cond);
28
28
  }
29
29
  getCondition() {
30
30
  const cond = {};
31
- this.keys.forEach((key, idx) => (cond[key] = this.cond[idx]));
31
+ this.#keys.forEach((key, idx) => (cond[key] = this.#cond[idx]));
32
32
  return cond;
33
33
  }
34
34
  }
35
35
  export class PivotCollectionPersister {
36
- meta;
37
- driver;
38
- ctx;
39
- schema;
40
- loggerContext;
41
- inserts = new Map();
42
- upserts = new Map();
43
- deletes = new Map();
44
- batchSize;
45
- order = 0;
36
+ #inserts = new Map();
37
+ #upserts = new Map();
38
+ #deletes = new Map();
39
+ #batchSize;
40
+ #order = 0;
41
+ #meta;
42
+ #driver;
43
+ #ctx;
44
+ #schema;
45
+ #loggerContext;
46
46
  constructor(meta, driver, ctx, schema, loggerContext) {
47
- this.meta = meta;
48
- this.driver = driver;
49
- this.ctx = ctx;
50
- this.schema = schema;
51
- this.loggerContext = loggerContext;
52
- this.batchSize = this.driver.config.get('batchSize');
47
+ this.#meta = meta;
48
+ this.#driver = driver;
49
+ this.#ctx = ctx;
50
+ this.#schema = schema;
51
+ this.#loggerContext = loggerContext;
52
+ this.#batchSize = this.#driver.config.get('batchSize');
53
53
  }
54
54
  enqueueUpdate(prop, insertDiff, deleteDiff, pks, isInitialized = true) {
55
55
  if (insertDiff.length) {
@@ -68,8 +68,8 @@ export class PivotCollectionPersister {
68
68
  for (const fks of insertDiff) {
69
69
  const statement = this.createInsertStatement(prop, fks, pks);
70
70
  const hash = statement.getHash();
71
- if (prop.owner || !this.inserts.has(hash)) {
72
- this.inserts.set(hash, statement);
71
+ if (prop.owner || !this.#inserts.has(hash)) {
72
+ this.#inserts.set(hash, statement);
73
73
  }
74
74
  }
75
75
  }
@@ -77,26 +77,26 @@ export class PivotCollectionPersister {
77
77
  for (const fks of insertDiff) {
78
78
  const statement = this.createInsertStatement(prop, fks, pks);
79
79
  const hash = statement.getHash();
80
- if (prop.owner || !this.upserts.has(hash)) {
81
- this.upserts.set(hash, statement);
80
+ if (prop.owner || !this.#upserts.has(hash)) {
81
+ this.#upserts.set(hash, statement);
82
82
  }
83
83
  }
84
84
  }
85
85
  createInsertStatement(prop, fks, pks) {
86
86
  const { data, keys } = this.buildPivotKeysAndData(prop, fks, pks);
87
- return new InsertStatement(keys, data, this.order++);
87
+ return new InsertStatement(keys, data, this.#order++);
88
88
  }
89
89
  enqueueDelete(prop, deleteDiff, pks) {
90
90
  if (deleteDiff === true) {
91
91
  const { data, keys } = this.buildPivotKeysAndData(prop, [], pks, true);
92
92
  const statement = new DeleteStatement(keys, data);
93
- this.deletes.set(statement.getHash(), statement);
93
+ this.#deletes.set(statement.getHash(), statement);
94
94
  return;
95
95
  }
96
96
  for (const fks of deleteDiff) {
97
97
  const { data, keys } = this.buildPivotKeysAndData(prop, fks, pks);
98
98
  const statement = new DeleteStatement(keys, data);
99
- this.deletes.set(statement.getHash(), statement);
99
+ this.#deletes.set(statement.getHash(), statement);
100
100
  }
101
101
  }
102
102
  /**
@@ -130,46 +130,46 @@ export class PivotCollectionPersister {
130
130
  return items.filter(Boolean);
131
131
  }
132
132
  async execute() {
133
- if (this.deletes.size > 0) {
134
- const deletes = [...this.deletes.values()];
135
- for (let i = 0; i < deletes.length; i += this.batchSize) {
136
- const chunk = deletes.slice(i, i + this.batchSize);
133
+ if (this.#deletes.size > 0) {
134
+ const deletes = [...this.#deletes.values()];
135
+ for (let i = 0; i < deletes.length; i += this.#batchSize) {
136
+ const chunk = deletes.slice(i, i + this.#batchSize);
137
137
  const cond = { $or: [] };
138
138
  for (const item of chunk) {
139
139
  cond.$or.push(item.getCondition());
140
140
  }
141
- await this.driver.nativeDelete(this.meta.class, cond, {
142
- ctx: this.ctx,
143
- schema: this.schema,
144
- loggerContext: this.loggerContext,
141
+ await this.#driver.nativeDelete(this.#meta.class, cond, {
142
+ ctx: this.#ctx,
143
+ schema: this.#schema,
144
+ loggerContext: this.#loggerContext,
145
145
  });
146
146
  }
147
147
  }
148
- if (this.inserts.size > 0) {
149
- const filtered = this.collectStatements(this.inserts);
150
- for (let i = 0; i < filtered.length; i += this.batchSize) {
151
- const chunk = filtered.slice(i, i + this.batchSize);
152
- await this.driver.nativeInsertMany(this.meta.class, chunk, {
153
- ctx: this.ctx,
154
- schema: this.schema,
148
+ if (this.#inserts.size > 0) {
149
+ const filtered = this.collectStatements(this.#inserts);
150
+ for (let i = 0; i < filtered.length; i += this.#batchSize) {
151
+ const chunk = filtered.slice(i, i + this.#batchSize);
152
+ await this.#driver.nativeInsertMany(this.#meta.class, chunk, {
153
+ ctx: this.#ctx,
154
+ schema: this.#schema,
155
155
  convertCustomTypes: false,
156
156
  processCollections: false,
157
- loggerContext: this.loggerContext,
157
+ loggerContext: this.#loggerContext,
158
158
  });
159
159
  }
160
160
  }
161
- if (this.upserts.size > 0) {
162
- const filtered = this.collectStatements(this.upserts);
163
- for (let i = 0; i < filtered.length; i += this.batchSize) {
164
- const chunk = filtered.slice(i, i + this.batchSize);
165
- await this.driver.nativeUpdateMany(this.meta.class, [], chunk, {
166
- ctx: this.ctx,
167
- schema: this.schema,
161
+ if (this.#upserts.size > 0) {
162
+ const filtered = this.collectStatements(this.#upserts);
163
+ for (let i = 0; i < filtered.length; i += this.#batchSize) {
164
+ const chunk = filtered.slice(i, i + this.#batchSize);
165
+ await this.#driver.nativeUpdateMany(this.#meta.class, [], chunk, {
166
+ ctx: this.#ctx,
167
+ schema: this.#schema,
168
168
  convertCustomTypes: false,
169
169
  processCollections: false,
170
170
  upsert: true,
171
171
  onConflictAction: 'ignore',
172
- loggerContext: this.loggerContext,
172
+ loggerContext: this.#loggerContext,
173
173
  });
174
174
  }
175
175
  }
@@ -5,7 +5,7 @@ import { SchemaHelper } from '../../schema/SchemaHelper.js';
5
5
  import type { DatabaseSchema } from '../../schema/DatabaseSchema.js';
6
6
  import type { DatabaseTable } from '../../schema/DatabaseTable.js';
7
7
  export declare class MySqlSchemaHelper extends SchemaHelper {
8
- private readonly _cache;
8
+ #private;
9
9
  static readonly DEFAULT_VALUES: {
10
10
  'now()': string[];
11
11
  'current_timestamp(?)': string[];
@@ -1,7 +1,7 @@
1
1
  import { EnumType, StringType, TextType } from '@mikro-orm/core';
2
2
  import { SchemaHelper } from '../../schema/SchemaHelper.js';
3
3
  export class MySqlSchemaHelper extends SchemaHelper {
4
- _cache = {};
4
+ #cache = {};
5
5
  static DEFAULT_VALUES = {
6
6
  'now()': ['now()', 'current_timestamp'],
7
7
  'current_timestamp(?)': ['current_timestamp(?)'],
@@ -344,12 +344,12 @@ export class MySqlSchemaHelper extends SchemaHelper {
344
344
  }, {});
345
345
  }
346
346
  async supportsCheckConstraints(connection) {
347
- if (this._cache.supportsCheckConstraints != null) {
348
- return this._cache.supportsCheckConstraints;
347
+ if (this.#cache.supportsCheckConstraints != null) {
348
+ return this.#cache.supportsCheckConstraints;
349
349
  }
350
350
  const sql = `select 1 from information_schema.tables where table_name = 'CHECK_CONSTRAINTS' and table_schema = 'information_schema'`;
351
351
  const res = await connection.execute(sql);
352
- return (this._cache.supportsCheckConstraints = res.length > 0);
352
+ return (this.#cache.supportsCheckConstraints = res.length > 0);
353
353
  }
354
354
  getChecksSQL(tables) {
355
355
  return `select cc.constraint_schema as table_schema, tc.table_name as table_name, cc.constraint_name as name, cc.check_clause as expression
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mikro-orm/sql",
3
- "version": "7.0.0-dev.321",
3
+ "version": "7.0.0-dev.322",
4
4
  "description": "TypeScript ORM for Node.js based on Data Mapper, Unit of Work and Identity Map patterns. Supports MongoDB, MySQL, PostgreSQL and SQLite databases as well as usage with vanilla JavaScript.",
5
5
  "keywords": [
6
6
  "data-mapper",
@@ -53,7 +53,7 @@
53
53
  "@mikro-orm/core": "^6.6.9"
54
54
  },
55
55
  "peerDependencies": {
56
- "@mikro-orm/core": "7.0.0-dev.321"
56
+ "@mikro-orm/core": "7.0.0-dev.322"
57
57
  },
58
58
  "engines": {
59
59
  "node": ">= 22.17.0"
package/plugin/index.d.ts CHANGED
@@ -33,9 +33,7 @@ export interface MikroKyselyPluginOptions {
33
33
  convertValues?: boolean;
34
34
  }
35
35
  export declare class MikroKyselyPlugin implements KyselyPlugin {
36
- private readonly options;
37
- private static queryNodeCache;
38
- private readonly transformer;
36
+ #private;
39
37
  constructor(em: SqlEntityManager, options?: MikroKyselyPluginOptions);
40
38
  transformQuery(args: PluginTransformQueryArgs): RootOperationNode;
41
39
  transformResult(args: PluginTransformResultArgs): Promise<QueryResult<UnknownRow>>;
package/plugin/index.js CHANGED
@@ -1,39 +1,39 @@
1
1
  import { SelectQueryNode as SelectQueryNodeClass, InsertQueryNode as InsertQueryNodeClass, UpdateQueryNode as UpdateQueryNodeClass, DeleteQueryNode as DeleteQueryNodeClass, } from 'kysely';
2
2
  import { MikroTransformer } from './transformer.js';
3
3
  export class MikroKyselyPlugin {
4
- options;
5
- static queryNodeCache = new WeakMap();
6
- transformer;
4
+ static #queryNodeCache = new WeakMap();
5
+ #transformer;
6
+ #options;
7
7
  constructor(em, options = {}) {
8
- this.options = options;
9
- this.transformer = new MikroTransformer(em, options);
8
+ this.#options = options;
9
+ this.#transformer = new MikroTransformer(em, options);
10
10
  }
11
11
  transformQuery(args) {
12
- this.transformer.reset();
13
- const result = this.transformer.transformNode(args.node, args.queryId);
12
+ this.#transformer.reset();
13
+ const result = this.#transformer.transformNode(args.node, args.queryId);
14
14
  // Cache the entity map if it is one we can process (for use in transformResult)
15
15
  if (SelectQueryNodeClass.is(args.node) ||
16
16
  InsertQueryNodeClass.is(args.node) ||
17
17
  UpdateQueryNodeClass.is(args.node) ||
18
18
  DeleteQueryNodeClass.is(args.node)) {
19
19
  // clone the entityMap because the transformer's internal map will be cleared and reused by the next query
20
- const entityMap = new Map(this.transformer.getOutputEntityMap());
21
- MikroKyselyPlugin.queryNodeCache.set(args.queryId, { entityMap });
20
+ const entityMap = new Map(this.#transformer.getOutputEntityMap());
21
+ MikroKyselyPlugin.#queryNodeCache.set(args.queryId, { entityMap });
22
22
  }
23
23
  return result;
24
24
  }
25
25
  async transformResult(args) {
26
26
  // Only transform results if columnNamingStrategy is 'property' or convertValues is true
27
- if (this.options.columnNamingStrategy !== 'property' && !this.options.convertValues) {
27
+ if (this.#options.columnNamingStrategy !== 'property' && !this.#options.convertValues) {
28
28
  return args.result;
29
29
  }
30
30
  // Retrieve the cached query node and metadata
31
- const cache = MikroKyselyPlugin.queryNodeCache.get(args.queryId);
31
+ const cache = MikroKyselyPlugin.#queryNodeCache.get(args.queryId);
32
32
  if (!cache) {
33
33
  return args.result;
34
34
  }
35
35
  // Transform the result rows using the transformer
36
- const transformedRows = this.transformer.transformResult(args.result.rows ?? [], cache.entityMap);
36
+ const transformedRows = this.#transformer.transformResult(args.result.rows ?? [], cache.entityMap);
37
37
  return {
38
38
  ...args.result,
39
39
  rows: transformedRows ?? [],
@@ -3,29 +3,14 @@ import { type CommonTableExpressionNameNode, type DeleteQueryNode, type Identifi
3
3
  import type { MikroKyselyPluginOptions } from './index.js';
4
4
  import type { SqlEntityManager } from '../SqlEntityManager.js';
5
5
  export declare class MikroTransformer extends OperationNodeTransformer {
6
- private readonly em;
7
- private readonly options;
8
- /**
9
- * Context stack to support nested queries (subqueries, CTEs)
10
- * Each level of query scope has its own Map of table aliases/names to EntityMetadata
11
- * Top of stack (highest index) is the current scope
12
- */
13
- private readonly contextStack;
14
- /**
15
- * Subquery alias map: maps subquery/CTE alias to its source table metadata
16
- * Used to resolve columns from subqueries/CTEs to their original table definitions
17
- */
18
- private readonly subqueryAliasMap;
19
- private readonly metadata;
20
- private readonly platform;
21
- /**
22
- * Global map of all entities involved in the query.
23
- * Populated during AST transformation and used for result transformation.
24
- */
25
- private readonly entityMap;
6
+ #private;
26
7
  constructor(em: SqlEntityManager, options?: MikroKyselyPluginOptions);
27
8
  reset(): void;
28
9
  getOutputEntityMap(): Map<string, EntityMetadata>;
10
+ /** @internal */
11
+ getContextStack(): Map<string, EntityMetadata | undefined>[];
12
+ /** @internal */
13
+ getSubqueryAliasMap(): Map<string, EntityMetadata | undefined>;
29
14
  transformSelectQuery(node: SelectQueryNode, queryId: QueryId): SelectQueryNode;
30
15
  transformInsertQuery(node: InsertQueryNode, queryId?: QueryId): InsertQueryNode;
31
16
  transformUpdateQuery(node: UpdateQueryNode, queryId?: QueryId): UpdateQueryNode;