@tanstack/db 0.0.4 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (102) hide show
  1. package/dist/cjs/collection.cjs +113 -94
  2. package/dist/cjs/collection.cjs.map +1 -1
  3. package/dist/cjs/collection.d.cts +38 -11
  4. package/dist/cjs/index.cjs +1 -0
  5. package/dist/cjs/index.cjs.map +1 -1
  6. package/dist/cjs/proxy.cjs +87 -248
  7. package/dist/cjs/proxy.cjs.map +1 -1
  8. package/dist/cjs/proxy.d.cts +5 -5
  9. package/dist/cjs/query/compiled-query.cjs +23 -14
  10. package/dist/cjs/query/compiled-query.cjs.map +1 -1
  11. package/dist/cjs/query/compiled-query.d.cts +3 -1
  12. package/dist/cjs/query/evaluators.cjs +20 -20
  13. package/dist/cjs/query/evaluators.cjs.map +1 -1
  14. package/dist/cjs/query/evaluators.d.cts +3 -2
  15. package/dist/cjs/query/extractors.cjs +20 -20
  16. package/dist/cjs/query/extractors.cjs.map +1 -1
  17. package/dist/cjs/query/extractors.d.cts +3 -3
  18. package/dist/cjs/query/group-by.cjs +12 -15
  19. package/dist/cjs/query/group-by.cjs.map +1 -1
  20. package/dist/cjs/query/group-by.d.cts +7 -7
  21. package/dist/cjs/query/joins.cjs +41 -55
  22. package/dist/cjs/query/joins.cjs.map +1 -1
  23. package/dist/cjs/query/joins.d.cts +3 -3
  24. package/dist/cjs/query/order-by.cjs +37 -84
  25. package/dist/cjs/query/order-by.cjs.map +1 -1
  26. package/dist/cjs/query/order-by.d.cts +2 -2
  27. package/dist/cjs/query/pipeline-compiler.cjs +13 -18
  28. package/dist/cjs/query/pipeline-compiler.cjs.map +1 -1
  29. package/dist/cjs/query/pipeline-compiler.d.cts +2 -1
  30. package/dist/cjs/query/query-builder.cjs +0 -12
  31. package/dist/cjs/query/query-builder.cjs.map +1 -1
  32. package/dist/cjs/query/query-builder.d.cts +4 -8
  33. package/dist/cjs/query/schema.d.cts +1 -6
  34. package/dist/cjs/query/select.cjs +35 -24
  35. package/dist/cjs/query/select.cjs.map +1 -1
  36. package/dist/cjs/query/select.d.cts +2 -2
  37. package/dist/cjs/query/types.d.cts +1 -0
  38. package/dist/cjs/transactions.cjs +17 -8
  39. package/dist/cjs/transactions.cjs.map +1 -1
  40. package/dist/cjs/types.d.cts +41 -7
  41. package/dist/esm/collection.d.ts +38 -11
  42. package/dist/esm/collection.js +113 -94
  43. package/dist/esm/collection.js.map +1 -1
  44. package/dist/esm/index.js +2 -1
  45. package/dist/esm/proxy.d.ts +5 -5
  46. package/dist/esm/proxy.js +87 -248
  47. package/dist/esm/proxy.js.map +1 -1
  48. package/dist/esm/query/compiled-query.d.ts +3 -1
  49. package/dist/esm/query/compiled-query.js +23 -14
  50. package/dist/esm/query/compiled-query.js.map +1 -1
  51. package/dist/esm/query/evaluators.d.ts +3 -2
  52. package/dist/esm/query/evaluators.js +21 -21
  53. package/dist/esm/query/evaluators.js.map +1 -1
  54. package/dist/esm/query/extractors.d.ts +3 -3
  55. package/dist/esm/query/extractors.js +20 -20
  56. package/dist/esm/query/extractors.js.map +1 -1
  57. package/dist/esm/query/group-by.d.ts +7 -7
  58. package/dist/esm/query/group-by.js +14 -17
  59. package/dist/esm/query/group-by.js.map +1 -1
  60. package/dist/esm/query/joins.d.ts +3 -3
  61. package/dist/esm/query/joins.js +42 -56
  62. package/dist/esm/query/joins.js.map +1 -1
  63. package/dist/esm/query/order-by.d.ts +2 -2
  64. package/dist/esm/query/order-by.js +39 -86
  65. package/dist/esm/query/order-by.js.map +1 -1
  66. package/dist/esm/query/pipeline-compiler.d.ts +2 -1
  67. package/dist/esm/query/pipeline-compiler.js +14 -19
  68. package/dist/esm/query/pipeline-compiler.js.map +1 -1
  69. package/dist/esm/query/query-builder.d.ts +4 -8
  70. package/dist/esm/query/query-builder.js +0 -12
  71. package/dist/esm/query/query-builder.js.map +1 -1
  72. package/dist/esm/query/schema.d.ts +1 -6
  73. package/dist/esm/query/select.d.ts +2 -2
  74. package/dist/esm/query/select.js +36 -25
  75. package/dist/esm/query/select.js.map +1 -1
  76. package/dist/esm/query/types.d.ts +1 -0
  77. package/dist/esm/transactions.js +17 -8
  78. package/dist/esm/transactions.js.map +1 -1
  79. package/dist/esm/types.d.ts +41 -7
  80. package/package.json +2 -2
  81. package/src/collection.ts +174 -121
  82. package/src/proxy.ts +141 -358
  83. package/src/query/compiled-query.ts +30 -15
  84. package/src/query/evaluators.ts +22 -21
  85. package/src/query/extractors.ts +24 -21
  86. package/src/query/group-by.ts +24 -22
  87. package/src/query/joins.ts +88 -75
  88. package/src/query/order-by.ts +56 -106
  89. package/src/query/pipeline-compiler.ts +34 -37
  90. package/src/query/query-builder.ts +9 -23
  91. package/src/query/schema.ts +1 -10
  92. package/src/query/select.ts +44 -32
  93. package/src/query/types.ts +1 -0
  94. package/src/transactions.ts +22 -13
  95. package/src/types.ts +48 -7
  96. package/dist/cjs/query/key-by.cjs +0 -43
  97. package/dist/cjs/query/key-by.cjs.map +0 -1
  98. package/dist/cjs/query/key-by.d.cts +0 -3
  99. package/dist/esm/query/key-by.d.ts +0 -3
  100. package/dist/esm/query/key-by.js +0 -43
  101. package/dist/esm/query/key-by.js.map +0 -1
  102. package/src/query/key-by.ts +0 -61
@@ -438,6 +438,7 @@ export class BaseQueryBuilder<TContext extends Context<Schema>> {
438
438
  Input
439
439
  >
440
440
  }
441
+ hasJoin: true
441
442
  }
442
443
  >
443
444
  >
@@ -474,6 +475,7 @@ export class BaseQueryBuilder<TContext extends Context<Schema>> {
474
475
  schema: TContext[`schema`] & {
475
476
  [K in T]: RemoveIndexSignature<TContext[`baseSchema`][T]>
476
477
  }
478
+ hasJoin: true
477
479
  }
478
480
  >
479
481
  >
@@ -513,6 +515,7 @@ export class BaseQueryBuilder<TContext extends Context<Schema>> {
513
515
  schema: TContext[`schema`] & {
514
516
  [K in TAs]: RemoveIndexSignature<TContext[`baseSchema`][TFrom]>
515
517
  }
518
+ hasJoin: true
516
519
  }
517
520
  >
518
521
  >
@@ -754,25 +757,6 @@ export class BaseQueryBuilder<TContext extends Context<Schema>> {
754
757
  return newBuilder as QueryBuilder<TContext>
755
758
  }
756
759
 
757
- /**
758
- * Specify which column(s) to use as keys in the output keyed stream.
759
- *
760
- * @param keyBy The column(s) to use as keys
761
- * @returns A new QueryBuilder with the keyBy clause set
762
- */
763
- keyBy(
764
- keyBy: PropertyReference<TContext> | Array<PropertyReference<TContext>>
765
- ): QueryBuilder<TContext> {
766
- // Create a new builder with a copy of the current query
767
- const newBuilder = new BaseQueryBuilder<TContext>()
768
- Object.assign(newBuilder.query, this.query)
769
-
770
- // Set the keyBy clause
771
- newBuilder.query.keyBy = keyBy
772
-
773
- return newBuilder as QueryBuilder<TContext>
774
- }
775
-
776
760
  /**
777
761
  * Add a groupBy clause to group the results by one or more columns.
778
762
  *
@@ -883,10 +867,12 @@ export function queryBuilder<TBaseSchema extends Schema = {}>() {
883
867
 
884
868
  export type ResultsFromContext<TContext extends Context<Schema>> = Flatten<
885
869
  TContext[`result`] extends object
886
- ? TContext[`result`]
887
- : TContext[`result`] extends undefined
888
- ? TContext[`schema`]
889
- : object
870
+ ? TContext[`result`] // If there is a select we will have a result type
871
+ : TContext[`hasJoin`] extends true
872
+ ? TContext[`schema`] // If there is a join, the query returns the namespaced schema
873
+ : TContext[`default`] extends keyof TContext[`schema`]
874
+ ? TContext[`schema`][TContext[`default`]] // If there is no join we return the flat default schema
875
+ : never // Should never happen
890
876
  >
891
877
 
892
878
  export type ResultFromQueryBuilder<TQueryBuilder> = Flatten<
@@ -169,7 +169,6 @@ export interface JoinClause<TContext extends Context = Context> {
169
169
  from: string
170
170
  as?: string
171
171
  on: Condition<TContext>
172
- where?: Condition<TContext>
173
172
  }
174
173
 
175
174
  // The orderBy clause can be a string, an object mapping a column to "asc" or "desc",
@@ -217,7 +216,7 @@ export interface BaseQuery<TContext extends Context = Context> {
217
216
  // to expressions. Plain strings starting with "@" denote column references.
218
217
  // Plain string "@*" denotes all columns from all tables.
219
218
  // Plain string "@table.*" denotes all columns from a specific table.
220
- select: Array<Select<TContext>>
219
+ select?: Array<Select<TContext>>
221
220
  as?: As<TContext>
222
221
  from: From<TContext>
223
222
  join?: Array<JoinClause<TContext>>
@@ -232,7 +231,6 @@ export interface BaseQuery<TContext extends Context = Context> {
232
231
  // The top-level query interface.
233
232
  export interface Query<TContext extends Context = Context>
234
233
  extends BaseQuery<TContext> {
235
- keyBy?: PropertyReference<TContext> | Array<PropertyReference<TContext>>
236
234
  with?: Array<WithQuery<TContext>>
237
235
  collections?: {
238
236
  [K: string]: Collection<any>
@@ -246,10 +244,3 @@ export interface WithQuery<TContext extends Context = Context>
246
244
  extends BaseQuery<TContext> {
247
245
  as: string
248
246
  }
249
-
250
- // A keyed query is a query that has a keyBy clause, and so the result is always
251
- // a keyed stream.
252
- export interface KeyedQuery<TContext extends Context = Context>
253
- extends Query<TContext> {
254
- keyBy: PropertyReference<TContext> | Array<PropertyReference<TContext>>
255
- }
@@ -1,41 +1,48 @@
1
1
  import { map } from "@electric-sql/d2ts"
2
2
  import {
3
- evaluateOperandOnNestedRow,
4
- extractValueFromNestedRow,
3
+ evaluateOperandOnNamespacedRow,
4
+ extractValueFromNamespacedRow,
5
5
  } from "./extractors"
6
- import type { IStreamBuilder } from "@electric-sql/d2ts"
7
6
  import type { ConditionOperand, Query } from "./schema"
7
+ import type { KeyedStream, NamespacedAndKeyedStream } from "../types"
8
8
 
9
9
  export function processSelect(
10
- pipeline: IStreamBuilder<Record<string, unknown>>,
10
+ pipeline: NamespacedAndKeyedStream,
11
11
  query: Query,
12
12
  mainTableAlias: string,
13
- inputs: Record<string, IStreamBuilder<Record<string, unknown>>>
14
- ) {
13
+ inputs: Record<string, KeyedStream>
14
+ ): KeyedStream {
15
15
  return pipeline.pipe(
16
- map((nestedRow: Record<string, unknown>) => {
16
+ map(([key, namespacedRow]) => {
17
17
  const result: Record<string, unknown> = {}
18
18
 
19
19
  // Check if this is a grouped result (has no nested table structure)
20
20
  // If it's a grouped result, we need to handle it differently
21
21
  const isGroupedResult =
22
22
  query.groupBy &&
23
- Object.keys(nestedRow).some(
24
- (key) =>
25
- !Object.keys(inputs).includes(key) &&
26
- typeof nestedRow[key] !== `object`
23
+ Object.keys(namespacedRow).some(
24
+ (namespaceKey) =>
25
+ !Object.keys(inputs).includes(namespaceKey) &&
26
+ typeof namespacedRow[namespaceKey] !== `object`
27
27
  )
28
28
 
29
+ if (!query.select) {
30
+ throw new Error(`Cannot process missing SELECT clause`)
31
+ }
32
+
29
33
  for (const item of query.select) {
30
34
  if (typeof item === `string`) {
31
35
  // Handle wildcard select - all columns from all tables
32
36
  if ((item as string) === `@*`) {
33
37
  // For grouped results, just return the row as is
34
38
  if (isGroupedResult) {
35
- Object.assign(result, nestedRow)
39
+ Object.assign(result, namespacedRow)
36
40
  } else {
37
41
  // Extract all columns from all tables
38
- Object.assign(result, extractAllColumnsFromAllTables(nestedRow))
42
+ Object.assign(
43
+ result,
44
+ extractAllColumnsFromAllTables(namespacedRow)
45
+ )
39
46
  }
40
47
  continue
41
48
  }
@@ -56,7 +63,7 @@ export function processSelect(
56
63
  // Extract all columns from the specified table
57
64
  Object.assign(
58
65
  result,
59
- extractAllColumnsFromTable(nestedRow, tableAlias)
66
+ extractAllColumnsFromTable(namespacedRow, tableAlias)
60
67
  )
61
68
  }
62
69
  continue
@@ -68,12 +75,12 @@ export function processSelect(
68
75
  const alias = columnRef
69
76
 
70
77
  // For grouped results, check if the column is directly in the row first
71
- if (isGroupedResult && columnRef in nestedRow) {
72
- result[alias] = nestedRow[columnRef]
78
+ if (isGroupedResult && columnRef in namespacedRow) {
79
+ result[alias] = namespacedRow[columnRef]
73
80
  } else {
74
81
  // Extract the value from the nested structure
75
- result[alias] = extractValueFromNestedRow(
76
- nestedRow,
82
+ result[alias] = extractValueFromNamespacedRow(
83
+ namespacedRow,
77
84
  columnRef,
78
85
  mainTableAlias,
79
86
  undefined
@@ -95,12 +102,12 @@ export function processSelect(
95
102
  const columnRef = (expr as string).substring(1)
96
103
 
97
104
  // For grouped results, check if the column is directly in the row first
98
- if (isGroupedResult && columnRef in nestedRow) {
99
- result[alias] = nestedRow[columnRef]
105
+ if (isGroupedResult && columnRef in namespacedRow) {
106
+ result[alias] = namespacedRow[columnRef]
100
107
  } else {
101
108
  // Extract the value from the nested structure
102
- result[alias] = extractValueFromNestedRow(
103
- nestedRow,
109
+ result[alias] = extractValueFromNamespacedRow(
110
+ namespacedRow,
104
111
  columnRef,
105
112
  mainTableAlias,
106
113
  undefined
@@ -108,12 +115,14 @@ export function processSelect(
108
115
  }
109
116
  } else if (typeof expr === `object`) {
110
117
  // For grouped results, the aggregate results are already in the row
111
- if (isGroupedResult && alias in nestedRow) {
112
- result[alias] = nestedRow[alias]
118
+ if (isGroupedResult && alias in namespacedRow) {
119
+ result[alias] = namespacedRow[alias]
120
+ } else if ((expr as { ORDER_INDEX: unknown }).ORDER_INDEX) {
121
+ result[alias] = namespacedRow[mainTableAlias]![alias]
113
122
  } else {
114
123
  // This might be a function call
115
- result[alias] = evaluateOperandOnNestedRow(
116
- nestedRow,
124
+ result[alias] = evaluateOperandOnNamespacedRow(
125
+ namespacedRow,
117
126
  expr as ConditionOperand,
118
127
  mainTableAlias,
119
128
  undefined
@@ -124,23 +133,26 @@ export function processSelect(
124
133
  }
125
134
  }
126
135
 
127
- return result
136
+ return [key, result] as [string, typeof result]
128
137
  })
129
138
  )
130
139
  }
131
140
 
132
141
  // Helper function to extract all columns from all tables in a nested row
133
142
  function extractAllColumnsFromAllTables(
134
- nestedRow: Record<string, unknown>
143
+ namespacedRow: Record<string, unknown>
135
144
  ): Record<string, unknown> {
136
145
  const result: Record<string, unknown> = {}
137
146
 
138
147
  // Process each table in the nested row
139
- for (const [tableAlias, tableData] of Object.entries(nestedRow)) {
148
+ for (const [tableAlias, tableData] of Object.entries(namespacedRow)) {
140
149
  if (tableData && typeof tableData === `object`) {
141
150
  // Add all columns from this table to the result
142
151
  // If there are column name conflicts, the last table's columns will overwrite previous ones
143
- Object.assign(result, extractAllColumnsFromTable(nestedRow, tableAlias))
152
+ Object.assign(
153
+ result,
154
+ extractAllColumnsFromTable(namespacedRow, tableAlias)
155
+ )
144
156
  }
145
157
  }
146
158
 
@@ -149,13 +161,13 @@ function extractAllColumnsFromAllTables(
149
161
 
150
162
  // Helper function to extract all columns from a table in a nested row
151
163
  function extractAllColumnsFromTable(
152
- nestedRow: Record<string, unknown>,
164
+ namespacedRow: Record<string, unknown>,
153
165
  tableAlias: string
154
166
  ): Record<string, unknown> {
155
167
  const result: Record<string, unknown> = {}
156
168
 
157
169
  // Get the table data
158
- const tableData = nestedRow[tableAlias] as
170
+ const tableData = namespacedRow[tableAlias] as
159
171
  | Record<string, unknown>
160
172
  | null
161
173
  | undefined
@@ -20,6 +20,7 @@ export type Context<
20
20
  schema: TSchema
21
21
  default?: keyof TSchema
22
22
  result?: Record<string, unknown>
23
+ hasJoin?: boolean
23
24
  }
24
25
 
25
26
  // Helper types
@@ -24,6 +24,7 @@ function generateUUID() {
24
24
  }
25
25
 
26
26
  const transactions: Array<Transaction> = []
27
+ let transactionStack: Array<Transaction> = []
27
28
 
28
29
  export function createTransaction(config: TransactionConfig): Transaction {
29
30
  if (typeof config.mutationFn === `undefined`) {
@@ -35,13 +36,12 @@ export function createTransaction(config: TransactionConfig): Transaction {
35
36
  transactionId = generateUUID()
36
37
  }
37
38
  const newTransaction = new Transaction({ ...config, id: transactionId })
39
+
38
40
  transactions.push(newTransaction)
39
41
 
40
42
  return newTransaction
41
43
  }
42
44
 
43
- let transactionStack: Array<Transaction> = []
44
-
45
45
  export function getActiveTransaction(): Transaction | undefined {
46
46
  if (transactionStack.length > 0) {
47
47
  return transactionStack.slice(-1)[0]
@@ -58,6 +58,13 @@ function unregisterTransaction(tx: Transaction) {
58
58
  transactionStack = transactionStack.filter((t) => t.id !== tx.id)
59
59
  }
60
60
 
61
+ function removeFromPendingList(tx: Transaction) {
62
+ const index = transactions.findIndex((t) => t.id === tx.id)
63
+ if (index !== -1) {
64
+ transactions.splice(index, 1)
65
+ }
66
+ }
67
+
61
68
  export class Transaction {
62
69
  public id: string
63
70
  public state: TransactionState
@@ -85,6 +92,10 @@ export class Transaction {
85
92
 
86
93
  setState(newState: TransactionState) {
87
94
  this.state = newState
95
+
96
+ if (newState === `completed` || newState === `failed`) {
97
+ removeFromPendingList(this)
98
+ }
88
99
  }
89
100
 
90
101
  mutate(callback: () => void): Transaction {
@@ -130,22 +141,20 @@ export class Transaction {
130
141
 
131
142
  this.setState(`failed`)
132
143
 
133
- // See if there's any other transactions w/ mutations on the same keys
144
+ // See if there's any other transactions w/ mutations on the same ids
134
145
  // and roll them back as well.
135
146
  if (!isSecondaryRollback) {
136
- const mutationKeys = new Set()
137
- this.mutations.forEach((m) => mutationKeys.add(m.key))
138
- transactions.forEach(
139
- (t) =>
140
- t.state === `pending` &&
141
- t.mutations.some((m) => mutationKeys.has(m.key)) &&
147
+ const mutationIds = new Set()
148
+ this.mutations.forEach((m) => mutationIds.add(m.key))
149
+ for (const t of transactions) {
150
+ t.state === `pending` &&
151
+ t.mutations.some((m) => mutationIds.has(m.key)) &&
142
152
  t.rollback({ isSecondaryRollback: true })
143
- )
153
+ }
144
154
  }
145
155
 
146
156
  // Reject the promise
147
157
  this.isPersisted.reject(this.error?.error)
148
-
149
158
  this.touchCollection()
150
159
 
151
160
  return this
@@ -154,13 +163,13 @@ export class Transaction {
154
163
  // Tell collection that something has changed with the transaction
155
164
  touchCollection(): void {
156
165
  const hasCalled = new Set()
157
- this.mutations.forEach((mutation) => {
166
+ for (const mutation of this.mutations) {
158
167
  if (!hasCalled.has(mutation.collection.id)) {
159
168
  mutation.collection.transactions.setState((state) => state)
160
169
  mutation.collection.commitPendingTransactions()
161
170
  hasCalled.add(mutation.collection.id)
162
171
  }
163
- })
172
+ }
164
173
  }
165
174
 
166
175
  async commit(): Promise<Transaction> {
package/src/types.ts CHANGED
@@ -1,3 +1,4 @@
1
+ import type { IStreamBuilder } from "@electric-sql/d2ts"
1
2
  import type { Collection } from "./collection"
2
3
  import type { StandardSchemaV1 } from "@standard-schema/spec"
3
4
  import type { Transaction } from "./transactions"
@@ -13,7 +14,7 @@ export interface PendingMutation<T extends object = Record<string, unknown>> {
13
14
  original: Record<string, unknown>
14
15
  modified: Record<string, unknown>
15
16
  changes: Record<string, unknown>
16
- key: string
17
+ key: any
17
18
  type: OperationType
18
19
  metadata: unknown
19
20
  syncMetadata: Record<string, unknown>
@@ -51,7 +52,7 @@ type Value<TExtensions = never> =
51
52
  | null
52
53
  | TExtensions
53
54
  | Array<Value<TExtensions>>
54
- | { [key: string]: Value<TExtensions> }
55
+ | { [key: string | number | symbol]: Value<TExtensions> }
55
56
 
56
57
  export type Row<TExtensions = never> = Record<string, Value<TExtensions>>
57
58
 
@@ -61,19 +62,19 @@ export interface SyncConfig<T extends object = Record<string, unknown>> {
61
62
  sync: (params: {
62
63
  collection: Collection<T>
63
64
  begin: () => void
64
- write: (message: ChangeMessage<T>) => void
65
+ write: (message: Omit<ChangeMessage<T>, `key`>) => void
65
66
  commit: () => void
66
67
  }) => void
67
68
 
68
69
  /**
69
70
  * Get the sync metadata for insert operations
70
- * @returns Record containing primaryKey and relation information
71
+ * @returns Record containing relation information
71
72
  */
72
73
  getSyncMetadata?: () => Record<string, unknown>
73
74
  }
74
75
 
75
76
  export interface ChangeMessage<T extends object = Record<string, unknown>> {
76
- key: string
77
+ key: any
77
78
  value: T
78
79
  previousValue?: T
79
80
  type: OperationType
@@ -110,16 +111,56 @@ export interface OperationConfig {
110
111
  }
111
112
 
112
113
  export interface InsertConfig {
113
- key?: string | Array<string | undefined>
114
114
  metadata?: Record<string, unknown>
115
115
  }
116
116
 
117
117
  export interface CollectionConfig<T extends object = Record<string, unknown>> {
118
- id: string
118
+ // If an id isn't passed in, a UUID will be
119
+ // generated for it.
120
+ id?: string
119
121
  sync: SyncConfig<T>
120
122
  schema?: StandardSchema<T>
123
+ /**
124
+ * Function to extract the ID from an object
125
+ * This is required for update/delete operations which now only accept IDs
126
+ * @param item The item to extract the ID from
127
+ * @returns The ID string for the item
128
+ * @example
129
+ * // For a collection with a 'uuid' field as the primary key
130
+ * getId: (item) => item.uuid
131
+ */
132
+ getId: (item: T) => any
121
133
  }
122
134
 
123
135
  export type ChangesPayload<T extends object = Record<string, unknown>> = Array<
124
136
  ChangeMessage<T>
125
137
  >
138
+
139
+ /**
140
+ * An input row from a collection
141
+ */
142
+ export type InputRow = [unknown, Record<string, unknown>]
143
+
144
+ /**
145
+ * A keyed stream is a stream of rows
146
+ * This is used as the inputs from a collection to a query
147
+ */
148
+ export type KeyedStream = IStreamBuilder<InputRow>
149
+
150
+ /**
151
+ * A namespaced row is a row withing a pipeline that had each table wrapped in its alias
152
+ */
153
+ export type NamespacedRow = Record<string, Record<string, unknown>>
154
+
155
+ /**
156
+ * A keyed namespaced row is a row with a key and a namespaced row
157
+ * This is the main representation of a row in a query pipeline
158
+ */
159
+ export type KeyedNamespacedRow = [unknown, NamespacedRow]
160
+
161
+ /**
162
+ * A namespaced and keyed stream is a stream of rows
163
+ * This is used throughout a query pipeline and as the output from a query without
164
+ * a `select` clause.
165
+ */
166
+ export type NamespacedAndKeyedStream = IStreamBuilder<KeyedNamespacedRow>
@@ -1,43 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
- const d2ts = require("@electric-sql/d2ts");
4
- function processKeyBy(resultPipeline, query) {
5
- if (!query.keyBy) {
6
- return resultPipeline;
7
- }
8
- const keyByParam = query.keyBy;
9
- resultPipeline = resultPipeline.pipe(
10
- d2ts.keyBy((row) => {
11
- if (Array.isArray(keyByParam)) {
12
- const keyValues = {};
13
- for (const keyColumn of keyByParam) {
14
- const columnName = keyColumn.startsWith(`@`) ? keyColumn.substring(1) : keyColumn;
15
- if (columnName in row) {
16
- keyValues[columnName] = row[columnName];
17
- } else {
18
- throw new Error(
19
- `Key column "${columnName}" not found in result set. Make sure it's included in the select clause.`
20
- );
21
- }
22
- }
23
- return JSON.stringify(keyValues);
24
- } else {
25
- const columnName = keyByParam.startsWith(`@`) ? keyByParam.substring(1) : keyByParam;
26
- if (!(columnName in row)) {
27
- throw new Error(
28
- `Key column "${columnName}" not found in result set. Make sure it's included in the select clause.`
29
- );
30
- }
31
- const keyValue = row[columnName];
32
- if (typeof keyValue === `string` || typeof keyValue === `number`) {
33
- return keyValue;
34
- } else {
35
- return JSON.stringify(keyValue);
36
- }
37
- }
38
- })
39
- );
40
- return resultPipeline;
41
- }
42
- exports.processKeyBy = processKeyBy;
43
- //# sourceMappingURL=key-by.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"key-by.cjs","sources":["../../../src/query/key-by.ts"],"sourcesContent":["import { keyBy } from \"@electric-sql/d2ts\"\nimport type { IStreamBuilder } from \"@electric-sql/d2ts\"\nimport type { Query } from \"./schema\"\n\nexport function processKeyBy(\n resultPipeline: IStreamBuilder<\n Record<string, unknown> | [string | number, Record<string, unknown>]\n >,\n query: Query\n) {\n if (!query.keyBy) {\n return resultPipeline\n }\n const keyByParam = query.keyBy\n\n resultPipeline = resultPipeline.pipe(\n keyBy((row: Record<string, unknown>) => {\n if (Array.isArray(keyByParam)) {\n // Multiple columns - extract values and JSON stringify\n const keyValues: Record<string, unknown> = {}\n for (const keyColumn of keyByParam) {\n // Remove @ prefix if present\n const columnName = (keyColumn as string).startsWith(`@`)\n ? (keyColumn as string).substring(1)\n : (keyColumn as string)\n\n if (columnName in row) {\n keyValues[columnName] = row[columnName]\n } else {\n throw new Error(\n `Key column \"${columnName}\" not found in result set. Make sure it's included in the select clause.`\n )\n }\n }\n return JSON.stringify(keyValues)\n } else {\n // Single column\n // Remove @ prefix if present\n const columnName = (keyByParam as string).startsWith(`@`)\n ? (keyByParam as string).substring(1)\n : (keyByParam as string)\n\n if (!(columnName in row)) {\n throw new Error(\n `Key column \"${columnName}\" not found in result set. Make sure it's included in the select clause.`\n )\n }\n\n const keyValue = row[columnName]\n // Use the value directly if it's a string or number, otherwise JSON stringify\n if (typeof keyValue === `string` || typeof keyValue === `number`) {\n return keyValue\n } else {\n return JSON.stringify(keyValue)\n }\n }\n })\n )\n\n return resultPipeline\n}\n"],"names":["keyBy"],"mappings":";;;AAIgB,SAAA,aACd,gBAGA,OACA;AACI,MAAA,CAAC,MAAM,OAAO;AACT,WAAA;AAAA,EAAA;AAET,QAAM,aAAa,MAAM;AAEzB,mBAAiB,eAAe;AAAA,IAC9BA,KAAA,MAAM,CAAC,QAAiC;AAClC,UAAA,MAAM,QAAQ,UAAU,GAAG;AAE7B,cAAM,YAAqC,CAAC;AAC5C,mBAAW,aAAa,YAAY;AAE5B,gBAAA,aAAc,UAAqB,WAAW,GAAG,IAClD,UAAqB,UAAU,CAAC,IAChC;AAEL,cAAI,cAAc,KAAK;AACX,sBAAA,UAAU,IAAI,IAAI,UAAU;AAAA,UAAA,OACjC;AACL,kBAAM,IAAI;AAAA,cACR,eAAe,UAAU;AAAA,YAC3B;AAAA,UAAA;AAAA,QACF;AAEK,eAAA,KAAK,UAAU,SAAS;AAAA,MAAA,OAC1B;AAGC,cAAA,aAAc,WAAsB,WAAW,GAAG,IACnD,WAAsB,UAAU,CAAC,IACjC;AAED,YAAA,EAAE,cAAc,MAAM;AACxB,gBAAM,IAAI;AAAA,YACR,eAAe,UAAU;AAAA,UAC3B;AAAA,QAAA;AAGI,cAAA,WAAW,IAAI,UAAU;AAE/B,YAAI,OAAO,aAAa,YAAY,OAAO,aAAa,UAAU;AACzD,iBAAA;AAAA,QAAA,OACF;AACE,iBAAA,KAAK,UAAU,QAAQ;AAAA,QAAA;AAAA,MAChC;AAAA,IAEH,CAAA;AAAA,EACH;AAEO,SAAA;AACT;;"}
@@ -1,3 +0,0 @@
1
- import { IStreamBuilder } from '@electric-sql/d2ts';
2
- import { Query } from './schema.cjs';
3
- export declare function processKeyBy(resultPipeline: IStreamBuilder<Record<string, unknown> | [string | number, Record<string, unknown>]>, query: Query): IStreamBuilder<Record<string, unknown> | [string | number, Record<string, unknown>]>;
@@ -1,3 +0,0 @@
1
- import { IStreamBuilder } from '@electric-sql/d2ts';
2
- import { Query } from './schema.js';
3
- export declare function processKeyBy(resultPipeline: IStreamBuilder<Record<string, unknown> | [string | number, Record<string, unknown>]>, query: Query): IStreamBuilder<Record<string, unknown> | [string | number, Record<string, unknown>]>;
@@ -1,43 +0,0 @@
1
- import { keyBy } from "@electric-sql/d2ts";
2
- function processKeyBy(resultPipeline, query) {
3
- if (!query.keyBy) {
4
- return resultPipeline;
5
- }
6
- const keyByParam = query.keyBy;
7
- resultPipeline = resultPipeline.pipe(
8
- keyBy((row) => {
9
- if (Array.isArray(keyByParam)) {
10
- const keyValues = {};
11
- for (const keyColumn of keyByParam) {
12
- const columnName = keyColumn.startsWith(`@`) ? keyColumn.substring(1) : keyColumn;
13
- if (columnName in row) {
14
- keyValues[columnName] = row[columnName];
15
- } else {
16
- throw new Error(
17
- `Key column "${columnName}" not found in result set. Make sure it's included in the select clause.`
18
- );
19
- }
20
- }
21
- return JSON.stringify(keyValues);
22
- } else {
23
- const columnName = keyByParam.startsWith(`@`) ? keyByParam.substring(1) : keyByParam;
24
- if (!(columnName in row)) {
25
- throw new Error(
26
- `Key column "${columnName}" not found in result set. Make sure it's included in the select clause.`
27
- );
28
- }
29
- const keyValue = row[columnName];
30
- if (typeof keyValue === `string` || typeof keyValue === `number`) {
31
- return keyValue;
32
- } else {
33
- return JSON.stringify(keyValue);
34
- }
35
- }
36
- })
37
- );
38
- return resultPipeline;
39
- }
40
- export {
41
- processKeyBy
42
- };
43
- //# sourceMappingURL=key-by.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"key-by.js","sources":["../../../src/query/key-by.ts"],"sourcesContent":["import { keyBy } from \"@electric-sql/d2ts\"\nimport type { IStreamBuilder } from \"@electric-sql/d2ts\"\nimport type { Query } from \"./schema\"\n\nexport function processKeyBy(\n resultPipeline: IStreamBuilder<\n Record<string, unknown> | [string | number, Record<string, unknown>]\n >,\n query: Query\n) {\n if (!query.keyBy) {\n return resultPipeline\n }\n const keyByParam = query.keyBy\n\n resultPipeline = resultPipeline.pipe(\n keyBy((row: Record<string, unknown>) => {\n if (Array.isArray(keyByParam)) {\n // Multiple columns - extract values and JSON stringify\n const keyValues: Record<string, unknown> = {}\n for (const keyColumn of keyByParam) {\n // Remove @ prefix if present\n const columnName = (keyColumn as string).startsWith(`@`)\n ? (keyColumn as string).substring(1)\n : (keyColumn as string)\n\n if (columnName in row) {\n keyValues[columnName] = row[columnName]\n } else {\n throw new Error(\n `Key column \"${columnName}\" not found in result set. Make sure it's included in the select clause.`\n )\n }\n }\n return JSON.stringify(keyValues)\n } else {\n // Single column\n // Remove @ prefix if present\n const columnName = (keyByParam as string).startsWith(`@`)\n ? (keyByParam as string).substring(1)\n : (keyByParam as string)\n\n if (!(columnName in row)) {\n throw new Error(\n `Key column \"${columnName}\" not found in result set. Make sure it's included in the select clause.`\n )\n }\n\n const keyValue = row[columnName]\n // Use the value directly if it's a string or number, otherwise JSON stringify\n if (typeof keyValue === `string` || typeof keyValue === `number`) {\n return keyValue\n } else {\n return JSON.stringify(keyValue)\n }\n }\n })\n )\n\n return resultPipeline\n}\n"],"names":[],"mappings":";AAIgB,SAAA,aACd,gBAGA,OACA;AACI,MAAA,CAAC,MAAM,OAAO;AACT,WAAA;AAAA,EAAA;AAET,QAAM,aAAa,MAAM;AAEzB,mBAAiB,eAAe;AAAA,IAC9B,MAAM,CAAC,QAAiC;AAClC,UAAA,MAAM,QAAQ,UAAU,GAAG;AAE7B,cAAM,YAAqC,CAAC;AAC5C,mBAAW,aAAa,YAAY;AAE5B,gBAAA,aAAc,UAAqB,WAAW,GAAG,IAClD,UAAqB,UAAU,CAAC,IAChC;AAEL,cAAI,cAAc,KAAK;AACX,sBAAA,UAAU,IAAI,IAAI,UAAU;AAAA,UAAA,OACjC;AACL,kBAAM,IAAI;AAAA,cACR,eAAe,UAAU;AAAA,YAC3B;AAAA,UAAA;AAAA,QACF;AAEK,eAAA,KAAK,UAAU,SAAS;AAAA,MAAA,OAC1B;AAGC,cAAA,aAAc,WAAsB,WAAW,GAAG,IACnD,WAAsB,UAAU,CAAC,IACjC;AAED,YAAA,EAAE,cAAc,MAAM;AACxB,gBAAM,IAAI;AAAA,YACR,eAAe,UAAU;AAAA,UAC3B;AAAA,QAAA;AAGI,cAAA,WAAW,IAAI,UAAU;AAE/B,YAAI,OAAO,aAAa,YAAY,OAAO,aAAa,UAAU;AACzD,iBAAA;AAAA,QAAA,OACF;AACE,iBAAA,KAAK,UAAU,QAAQ;AAAA,QAAA;AAAA,MAChC;AAAA,IAEH,CAAA;AAAA,EACH;AAEO,SAAA;AACT;"}
@@ -1,61 +0,0 @@
1
- import { keyBy } from "@electric-sql/d2ts"
2
- import type { IStreamBuilder } from "@electric-sql/d2ts"
3
- import type { Query } from "./schema"
4
-
5
- export function processKeyBy(
6
- resultPipeline: IStreamBuilder<
7
- Record<string, unknown> | [string | number, Record<string, unknown>]
8
- >,
9
- query: Query
10
- ) {
11
- if (!query.keyBy) {
12
- return resultPipeline
13
- }
14
- const keyByParam = query.keyBy
15
-
16
- resultPipeline = resultPipeline.pipe(
17
- keyBy((row: Record<string, unknown>) => {
18
- if (Array.isArray(keyByParam)) {
19
- // Multiple columns - extract values and JSON stringify
20
- const keyValues: Record<string, unknown> = {}
21
- for (const keyColumn of keyByParam) {
22
- // Remove @ prefix if present
23
- const columnName = (keyColumn as string).startsWith(`@`)
24
- ? (keyColumn as string).substring(1)
25
- : (keyColumn as string)
26
-
27
- if (columnName in row) {
28
- keyValues[columnName] = row[columnName]
29
- } else {
30
- throw new Error(
31
- `Key column "${columnName}" not found in result set. Make sure it's included in the select clause.`
32
- )
33
- }
34
- }
35
- return JSON.stringify(keyValues)
36
- } else {
37
- // Single column
38
- // Remove @ prefix if present
39
- const columnName = (keyByParam as string).startsWith(`@`)
40
- ? (keyByParam as string).substring(1)
41
- : (keyByParam as string)
42
-
43
- if (!(columnName in row)) {
44
- throw new Error(
45
- `Key column "${columnName}" not found in result set. Make sure it's included in the select clause.`
46
- )
47
- }
48
-
49
- const keyValue = row[columnName]
50
- // Use the value directly if it's a string or number, otherwise JSON stringify
51
- if (typeof keyValue === `string` || typeof keyValue === `number`) {
52
- return keyValue
53
- } else {
54
- return JSON.stringify(keyValue)
55
- }
56
- }
57
- })
58
- )
59
-
60
- return resultPipeline
61
- }