alepha 0.10.0 → 0.10.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +189 -5
- package/batch.d.ts +2 -2
- package/bucket.d.ts +1 -1
- package/cache/redis.d.ts +1 -1
- package/cache.d.ts +1 -1
- package/command.d.ts +48 -9
- package/core.d.ts +20 -11
- package/datetime.d.ts +3 -2
- package/email.d.ts +6 -6
- package/lock/redis.d.ts +1 -1
- package/lock.d.ts +1 -1
- package/logger.d.ts +1 -1
- package/package.json +44 -44
- package/postgres.d.ts +77 -382
- package/queue/redis.d.ts +1 -1
- package/queue.d.ts +1 -1
- package/react/auth.d.ts +1 -1
- package/react/form.d.ts +1 -1
- package/react/head.d.ts +1 -1
- package/react/i18n.d.ts +1 -1
- package/react.d.ts +28 -28
- package/redis.d.ts +1 -1
- package/scheduler.d.ts +1 -1
- package/security.d.ts +5 -5
- package/server/cache.d.ts +1 -1
- package/server/compress.d.ts +5 -5
- package/server/cookies.d.ts +1 -1
- package/server/cors.d.ts +1 -1
- package/server/health.d.ts +1 -1
- package/server/helmet.d.ts +1 -1
- package/server/links.d.ts +1 -1
- package/server/metrics.d.ts +1 -1
- package/server/multipart.d.ts +1 -1
- package/server/proxy.d.ts +1 -1
- package/server/security.d.ts +1 -1
- package/server/static.d.ts +1 -1
- package/server/swagger.d.ts +14 -1
- package/server.d.ts +35 -390
- package/topic/redis.d.ts +1 -1
- package/topic.d.ts +1 -1
package/postgres.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as _alepha_core1 from "alepha";
|
|
2
2
|
import { Alepha, AlephaError, Descriptor, KIND, Service, Static, TArray, TBigInt, TBoolean, TInteger, TKeysToIndexer, TNull, TNumber, TNumberOptions, TObject, TObjectOptions, TOptional, TOptionalAdd, TPick, TRecord, TSchema as TSchema$1, TString, TStringOptions, TUnion } from "alepha";
|
|
3
|
-
import * as
|
|
3
|
+
import * as drizzle_orm0 from "drizzle-orm";
|
|
4
4
|
import { BuildColumns, BuildExtraConfigColumns, SQL, SQLWrapper, TableConfig, sql } from "drizzle-orm";
|
|
5
5
|
import * as pg$1 from "drizzle-orm/pg-core";
|
|
6
6
|
import { AnyPgColumn, LockConfig, LockStrength, PgColumn, PgColumnBuilderBase, PgDatabase, PgInsertValue, PgSequenceOptions, PgTableExtraConfigValue, PgTableWithColumns, PgTransaction, PgTransactionConfig, SelectedFields, TableConfig as TableConfig$1, UpdateDeleteAction } from "drizzle-orm/pg-core";
|
|
@@ -10,7 +10,7 @@ import * as _alepha_lock0 from "alepha/lock";
|
|
|
10
10
|
import { PostgresJsDatabase } from "drizzle-orm/postgres-js";
|
|
11
11
|
import postgres from "postgres";
|
|
12
12
|
import * as _alepha_retry0 from "alepha/retry";
|
|
13
|
-
import * as
|
|
13
|
+
import * as typebox0 from "typebox";
|
|
14
14
|
import { PgTransactionConfig as PgTransactionConfig$1 } from "drizzle-orm/pg-core/session";
|
|
15
15
|
import * as DrizzleKit from "drizzle-kit/api";
|
|
16
16
|
import { MigrationConfig } from "drizzle-orm/migrator";
|
|
@@ -91,6 +91,7 @@ declare const insertSchema: <T extends TObject>(obj: T) => TObjectInsert<T>;
|
|
|
91
91
|
* After: { name?: string | null; age: number; }
|
|
92
92
|
*/
|
|
93
93
|
type TObjectUpdate<T extends TObject> = TObject<{ [K in keyof T["properties"]]: T["properties"][K] extends TOptional<infer U> ? TOptional<TUnion<[U, TNull]>> : T["properties"][K] }>;
|
|
94
|
+
declare const updateSchema: <T extends TObject>(schema: T) => TObjectUpdate<T>;
|
|
94
95
|
//#endregion
|
|
95
96
|
//#region src/helpers/schemaToPgColumns.d.ts
|
|
96
97
|
/**
|
|
@@ -104,14 +105,14 @@ declare const schemaToPgColumns: <T extends TObject>(schema: T) => FromSchema<T>
|
|
|
104
105
|
* @param value The value of the field.
|
|
105
106
|
* @returns The PG column.
|
|
106
107
|
*/
|
|
107
|
-
declare const mapFieldToColumn: (name: string, value: TSchema$1) => pg$1.PgSerialBuilderInitial<string> | pg$1.PgIntegerBuilderInitial<string> |
|
|
108
|
+
declare const mapFieldToColumn: (name: string, value: TSchema$1) => pg$1.PgSerialBuilderInitial<string> | pg$1.PgIntegerBuilderInitial<string> | drizzle_orm0.IsIdentity<pg$1.PgBigInt64BuilderInitial<"">, "byDefault"> | drizzle_orm0.IsIdentity<pg$1.PgBigInt64BuilderInitial<"">, "always"> | pg$1.PgBigInt53BuilderInitial<string> | pg$1.PgNumericBuilderInitial<string> | pg$1.PgTimestampBuilderInitial<string> | pg$1.PgUUIDBuilderInitial<string> | pg$1.PgCustomColumnBuilder<{
|
|
108
109
|
name: string;
|
|
109
110
|
dataType: "custom";
|
|
110
111
|
columnType: "PgCustomColumn";
|
|
111
112
|
data: Buffer<ArrayBufferLike>;
|
|
112
113
|
driverParam: unknown;
|
|
113
114
|
enumValues: undefined;
|
|
114
|
-
}> | pg$1.PgTimestampStringBuilderInitial<string> | pg$1.PgDateStringBuilderInitial<string> | pg$1.PgTextBuilderInitial<string, [string, ...string[]]> | pg$1.PgBooleanBuilderInitial<string> |
|
|
115
|
+
}> | pg$1.PgTimestampStringBuilderInitial<string> | pg$1.PgDateStringBuilderInitial<string> | pg$1.PgTextBuilderInitial<string, [string, ...string[]]> | pg$1.PgBooleanBuilderInitial<string> | drizzle_orm0.$Type<pg$1.PgCustomColumnBuilder<{
|
|
115
116
|
name: string;
|
|
116
117
|
dataType: "custom";
|
|
117
118
|
columnType: "PgCustomColumn";
|
|
@@ -126,7 +127,14 @@ declare const mapFieldToColumn: (name: string, value: TSchema$1) => pg$1.PgSeria
|
|
|
126
127
|
[x: string]: unknown;
|
|
127
128
|
[x: number]: unknown;
|
|
128
129
|
[x: symbol]: unknown;
|
|
129
|
-
}> |
|
|
130
|
+
}> | drizzle_orm0.$Type<pg$1.PgCustomColumnBuilder<{
|
|
131
|
+
name: string;
|
|
132
|
+
dataType: "custom";
|
|
133
|
+
columnType: "PgCustomColumn";
|
|
134
|
+
data: Record<string, unknown>;
|
|
135
|
+
driverParam: string;
|
|
136
|
+
enumValues: undefined;
|
|
137
|
+
}>, Record<string, unknown>> | drizzle_orm0.$Type<pg$1.PgCustomColumnBuilder<{
|
|
130
138
|
name: string;
|
|
131
139
|
dataType: "custom";
|
|
132
140
|
columnType: "PgCustomColumn";
|
|
@@ -1188,12 +1196,30 @@ type PgQueryWhere<T extends object> = { [Key in keyof T]?: FilterOperators<T[Key
|
|
|
1188
1196
|
};
|
|
1189
1197
|
//#endregion
|
|
1190
1198
|
//#region src/interfaces/PgQuery.d.ts
|
|
1199
|
+
/**
|
|
1200
|
+
* Order direction for sorting
|
|
1201
|
+
*/
|
|
1202
|
+
type OrderDirection = "asc" | "desc";
|
|
1203
|
+
/**
|
|
1204
|
+
* Single order by clause with column and direction
|
|
1205
|
+
*/
|
|
1206
|
+
interface OrderByClause<T> {
|
|
1207
|
+
column: keyof T;
|
|
1208
|
+
direction?: OrderDirection;
|
|
1209
|
+
}
|
|
1210
|
+
/**
|
|
1211
|
+
* Order by parameter - supports 3 modes:
|
|
1212
|
+
* 1. String: orderBy: "name" (defaults to ASC)
|
|
1213
|
+
* 2. Single object: orderBy: { column: "name", direction: "desc" }
|
|
1214
|
+
* 3. Array: orderBy: [{ column: "name", direction: "asc" }, { column: "age", direction: "desc" }]
|
|
1215
|
+
*/
|
|
1216
|
+
type OrderBy<T> = keyof T | OrderByClause<T> | Array<OrderByClause<T>>;
|
|
1191
1217
|
interface PgQuery<T extends TObject> {
|
|
1192
1218
|
distinct?: boolean;
|
|
1193
1219
|
where?: PgQueryWhereOrSQL<Static<T>>;
|
|
1194
1220
|
limit?: number;
|
|
1195
1221
|
offset?: number;
|
|
1196
|
-
|
|
1222
|
+
orderBy?: OrderBy<Static<T>>;
|
|
1197
1223
|
groupBy?: (keyof Static<T>)[];
|
|
1198
1224
|
}
|
|
1199
1225
|
type PgQueryResult<T extends TObject, Select extends (keyof Static<T>)[]> = TPick<T, TKeysToIndexer<Select>>;
|
|
@@ -1209,10 +1235,10 @@ declare abstract class PostgresProvider {
|
|
|
1209
1235
|
}
|
|
1210
1236
|
//#endregion
|
|
1211
1237
|
//#region src/schemas/pageQuerySchema.d.ts
|
|
1212
|
-
declare const pageQuerySchema:
|
|
1213
|
-
page:
|
|
1214
|
-
size:
|
|
1215
|
-
sort:
|
|
1238
|
+
declare const pageQuerySchema: typebox0.TObject<{
|
|
1239
|
+
page: typebox0.TOptional<typebox0.TInteger>;
|
|
1240
|
+
size: typebox0.TOptional<typebox0.TInteger>;
|
|
1241
|
+
sort: typebox0.TOptional<typebox0.TString>;
|
|
1216
1242
|
}>;
|
|
1217
1243
|
type PageQuery = Static<typeof pageQuerySchema>;
|
|
1218
1244
|
//#endregion
|
|
@@ -1372,7 +1398,7 @@ type Page<T> = {
|
|
|
1372
1398
|
* filters.searchTerm ? { name: { ilike: `%${filters.searchTerm}%` } } : {}
|
|
1373
1399
|
* ]
|
|
1374
1400
|
* },
|
|
1375
|
-
*
|
|
1401
|
+
* orderBy: [{ column: "createdAt", direction: "desc" }]
|
|
1376
1402
|
* });
|
|
1377
1403
|
*
|
|
1378
1404
|
* return await this.products.paginate({ page, size }, query, { count: true });
|
|
@@ -1497,7 +1523,7 @@ type Page<T> = {
|
|
|
1497
1523
|
* // Automatically excludes soft-deleted records
|
|
1498
1524
|
* return await this.documents.find({
|
|
1499
1525
|
* where: { authorId: { isNotNull: true } },
|
|
1500
|
-
*
|
|
1526
|
+
* orderBy: [{ column: "updatedAt", direction: "desc" }]
|
|
1501
1527
|
* });
|
|
1502
1528
|
* }
|
|
1503
1529
|
*
|
|
@@ -1659,7 +1685,7 @@ declare class RepositoryDescriptor<EntityTableConfig extends TableConfig, Entity
|
|
|
1659
1685
|
/**
|
|
1660
1686
|
* Getter for the database connection from the database provider.
|
|
1661
1687
|
*/
|
|
1662
|
-
protected get db(): PgDatabase<any, Record<string, never>,
|
|
1688
|
+
protected get db(): PgDatabase<any, Record<string, never>, drizzle_orm0.ExtractTablesWithRelations<Record<string, never>>>;
|
|
1663
1689
|
/**
|
|
1664
1690
|
* Execute a SQL query.
|
|
1665
1691
|
*/
|
|
@@ -1684,10 +1710,10 @@ declare class RepositoryDescriptor<EntityTableConfig extends TableConfig, Entity
|
|
|
1684
1710
|
*
|
|
1685
1711
|
* @returns The SELECT query builder.
|
|
1686
1712
|
*/
|
|
1687
|
-
protected select(opts?: StatementOptions): pg$1.PgSelectBase<string, Record<string, PgColumn<
|
|
1713
|
+
protected select(opts?: StatementOptions): pg$1.PgSelectBase<string, Record<string, PgColumn<drizzle_orm0.ColumnBaseConfig<drizzle_orm0.ColumnDataType, string>, {}, {}>>, "single", Record<string, "not-null">, false, never, {
|
|
1688
1714
|
[x: string]: unknown;
|
|
1689
1715
|
}[], {
|
|
1690
|
-
[x: string]: PgColumn<
|
|
1716
|
+
[x: string]: PgColumn<drizzle_orm0.ColumnBaseConfig<drizzle_orm0.ColumnDataType, string>, {}, {}>;
|
|
1691
1717
|
}>;
|
|
1692
1718
|
protected selectDistinct(opts: StatementOptions | undefined, fields: SelectedFields): pg$1.PgSelectBase<string, SelectedFields, "partial", Record<string, "not-null">, false, never, {
|
|
1693
1719
|
[x: string]: unknown;
|
|
@@ -1859,6 +1885,36 @@ declare class RepositoryDescriptor<EntityTableConfig extends TableConfig, Entity
|
|
|
1859
1885
|
* @returns The cleaned row.
|
|
1860
1886
|
*/
|
|
1861
1887
|
protected clean<T extends TObject = EntitySchema>(row: any, schema?: T): Static<T>;
|
|
1888
|
+
/**
|
|
1889
|
+
* Parse pagination sort string to orderBy format.
|
|
1890
|
+
* Format: "firstName,-lastName" -> [{ column: "firstName", direction: "asc" }, { column: "lastName", direction: "desc" }]
|
|
1891
|
+
* - Columns separated by comma
|
|
1892
|
+
* - Prefix with '-' for DESC direction
|
|
1893
|
+
*
|
|
1894
|
+
* @param sort Pagination sort string
|
|
1895
|
+
* @returns OrderBy array or single object
|
|
1896
|
+
*/
|
|
1897
|
+
protected parsePaginationSort(sort: string): Array<{
|
|
1898
|
+
column: string;
|
|
1899
|
+
direction: "asc" | "desc";
|
|
1900
|
+
}> | {
|
|
1901
|
+
column: string;
|
|
1902
|
+
direction: "asc" | "desc";
|
|
1903
|
+
};
|
|
1904
|
+
/**
|
|
1905
|
+
* Normalize orderBy parameter to array format.
|
|
1906
|
+
* Supports 3 modes:
|
|
1907
|
+
* 1. String: "name" -> [{ column: "name", direction: "asc" }]
|
|
1908
|
+
* 2. Object: { column: "name", direction: "desc" } -> [{ column: "name", direction: "desc" }]
|
|
1909
|
+
* 3. Array: [{ column: "name" }, { column: "age", direction: "desc" }] -> normalized array
|
|
1910
|
+
*
|
|
1911
|
+
* @param orderBy The orderBy parameter
|
|
1912
|
+
* @returns Normalized array of order by clauses
|
|
1913
|
+
*/
|
|
1914
|
+
protected normalizeOrderBy(orderBy: any): Array<{
|
|
1915
|
+
column: string;
|
|
1916
|
+
direction: "asc" | "desc";
|
|
1917
|
+
}>;
|
|
1862
1918
|
/**
|
|
1863
1919
|
* Get the where clause for an ID.
|
|
1864
1920
|
*
|
|
@@ -1874,7 +1930,7 @@ declare class RepositoryDescriptor<EntityTableConfig extends TableConfig, Entity
|
|
|
1874
1930
|
*/
|
|
1875
1931
|
protected getPrimaryKey(schema: TObject): {
|
|
1876
1932
|
key: string;
|
|
1877
|
-
col: PgColumn<
|
|
1933
|
+
col: PgColumn<drizzle_orm0.ColumnBaseConfig<drizzle_orm0.ColumnDataType, string>, {}, {}>;
|
|
1878
1934
|
type: TSchema$1;
|
|
1879
1935
|
};
|
|
1880
1936
|
}
|
|
@@ -2280,373 +2336,12 @@ declare class SequenceDescriptor extends Descriptor<SequenceDescriptorOptions> {
|
|
|
2280
2336
|
* It integrates seamlessly with the repository pattern and provides built-in handling
|
|
2281
2337
|
* for optimistic locking scenarios with automatic retry on version mismatches.
|
|
2282
2338
|
*
|
|
2283
|
-
* **Key Features**
|
|
2284
|
-
*
|
|
2285
|
-
* - **ACID Compliance**: Full transaction support with commit/rollback functionality
|
|
2286
|
-
* - **Automatic Retry Logic**: Built-in retry for optimistic locking conflicts
|
|
2287
|
-
* - **Type Safety**: Full TypeScript support with generic parameters
|
|
2288
|
-
* - **Isolation Levels**: Configurable transaction isolation levels
|
|
2289
|
-
* - **Error Handling**: Automatic rollback on errors with proper error propagation
|
|
2290
|
-
* - **Repository Integration**: Seamless integration with $repository operations
|
|
2291
|
-
* - **Performance**: Efficient transaction management with connection reuse
|
|
2292
|
-
*
|
|
2293
|
-
* **Use Cases**
|
|
2294
|
-
*
|
|
2295
|
-
* Essential for operations requiring atomicity and consistency:
|
|
2296
|
-
* - Financial transactions and accounting operations
|
|
2297
|
-
* - Complex business workflows with multiple database operations
|
|
2298
|
-
* - Data migrations and bulk operations
|
|
2299
|
-
* - E-commerce order processing with inventory updates
|
|
2300
|
-
* - User registration with related data creation
|
|
2301
|
-
* - Audit trail creation with business operations
|
|
2302
|
-
*
|
|
2303
|
-
* @example
|
|
2304
|
-
* **Basic transaction for financial operations:**
|
|
2305
|
-
* ```ts
|
|
2306
|
-
* import { $transaction } from "alepha/postgres";
|
|
2307
|
-
*
|
|
2308
|
-
* class BankingService {
|
|
2309
|
-
* transfer = $transaction({
|
|
2310
|
-
* handler: async (tx, fromAccountId: string, toAccountId: string, amount: number) => {
|
|
2311
|
-
* // All operations within this transaction are atomic
|
|
2312
|
-
* console.log(`Processing transfer: $${amount} from ${fromAccountId} to ${toAccountId}`);
|
|
2313
|
-
*
|
|
2314
|
-
* // Get current account balances
|
|
2315
|
-
* const fromAccount = await this.accounts.findById(fromAccountId, { tx });
|
|
2316
|
-
* const toAccount = await this.accounts.findById(toAccountId, { tx });
|
|
2317
|
-
*
|
|
2318
|
-
* // Validate sufficient balance
|
|
2319
|
-
* if (fromAccount.balance < amount) {
|
|
2320
|
-
* throw new Error(`Insufficient funds. Balance: $${fromAccount.balance}, Required: $${amount}`);
|
|
2321
|
-
* }
|
|
2322
|
-
*
|
|
2323
|
-
* // Update account balances atomically
|
|
2324
|
-
* const updatedFromAccount = await this.accounts.updateById(
|
|
2325
|
-
* fromAccountId,
|
|
2326
|
-
* { balance: fromAccount.balance - amount },
|
|
2327
|
-
* { tx }
|
|
2328
|
-
* );
|
|
2329
|
-
*
|
|
2330
|
-
* const updatedToAccount = await this.accounts.updateById(
|
|
2331
|
-
* toAccountId,
|
|
2332
|
-
* { balance: toAccount.balance + amount },
|
|
2333
|
-
* { tx }
|
|
2334
|
-
* );
|
|
2335
|
-
*
|
|
2336
|
-
* // Create transaction record
|
|
2337
|
-
* const transactionRecord = await this.transactions.create({
|
|
2338
|
-
* id: generateUUID(),
|
|
2339
|
-
* fromAccountId,
|
|
2340
|
-
* toAccountId,
|
|
2341
|
-
* amount,
|
|
2342
|
-
* type: 'transfer',
|
|
2343
|
-
* status: 'completed',
|
|
2344
|
-
* processedAt: new Date().toISOString()
|
|
2345
|
-
* }, { tx });
|
|
2346
|
-
*
|
|
2347
|
-
* console.log(`Transfer completed successfully: ${transactionRecord.id}`);
|
|
2348
|
-
*
|
|
2349
|
-
* return {
|
|
2350
|
-
* transactionId: transactionRecord.id,
|
|
2351
|
-
* fromBalance: updatedFromAccount.balance,
|
|
2352
|
-
* toBalance: updatedToAccount.balance
|
|
2353
|
-
* };
|
|
2354
|
-
* }
|
|
2355
|
-
* });
|
|
2356
|
-
*
|
|
2357
|
-
* async transferFunds(fromAccountId: string, toAccountId: string, amount: number) {
|
|
2358
|
-
* // This will automatically retry if there's a version mismatch (optimistic locking)
|
|
2359
|
-
* return await this.transfer.run(fromAccountId, toAccountId, amount);
|
|
2360
|
-
* }
|
|
2361
|
-
* }
|
|
2362
|
-
* ```
|
|
2363
|
-
*
|
|
2364
|
-
* @example
|
|
2365
|
-
* **E-commerce order processing with inventory management:**
|
|
2366
|
-
* ```ts
|
|
2367
|
-
* class OrderService {
|
|
2368
|
-
* processOrder = $transaction({
|
|
2369
|
-
* config: {
|
|
2370
|
-
* isolationLevel: 'serializable' // Highest isolation for critical operations
|
|
2371
|
-
* },
|
|
2372
|
-
* handler: async (tx, orderData: {
|
|
2373
|
-
* customerId: string;
|
|
2374
|
-
* items: Array<{ productId: string; quantity: number; price: number }>;
|
|
2375
|
-
* shippingAddress: Address;
|
|
2376
|
-
* paymentMethodId: string;
|
|
2377
|
-
* }) => {
|
|
2378
|
-
* console.log(`Processing order for customer ${orderData.customerId}`);
|
|
2379
|
-
*
|
|
2380
|
-
* let totalAmount = 0;
|
|
2381
|
-
* const orderItems = [];
|
|
2382
|
-
*
|
|
2383
|
-
* // Process each item and update inventory atomically
|
|
2384
|
-
* for (const itemData of orderData.items) {
|
|
2385
|
-
* const product = await this.products.findById(itemData.productId, { tx });
|
|
2386
|
-
*
|
|
2387
|
-
* // Check inventory availability
|
|
2388
|
-
* if (product.stockQuantity < itemData.quantity) {
|
|
2389
|
-
* throw new Error(`Insufficient stock for ${product.name}. Available: ${product.stockQuantity}, Requested: ${itemData.quantity}`);
|
|
2390
|
-
* }
|
|
2391
|
-
*
|
|
2392
|
-
* // Update product inventory with optimistic locking
|
|
2393
|
-
* await this.products.save({
|
|
2394
|
-
* ...product,
|
|
2395
|
-
* stockQuantity: product.stockQuantity - itemData.quantity
|
|
2396
|
-
* }, { tx });
|
|
2397
|
-
*
|
|
2398
|
-
* // Calculate totals
|
|
2399
|
-
* const lineTotal = itemData.price * itemData.quantity;
|
|
2400
|
-
* totalAmount += lineTotal;
|
|
2401
|
-
*
|
|
2402
|
-
* orderItems.push({
|
|
2403
|
-
* id: generateUUID(),
|
|
2404
|
-
* productId: itemData.productId,
|
|
2405
|
-
* quantity: itemData.quantity,
|
|
2406
|
-
* unitPrice: itemData.price,
|
|
2407
|
-
* lineTotal
|
|
2408
|
-
* });
|
|
2409
|
-
* }
|
|
2410
|
-
*
|
|
2411
|
-
* // Create the main order record
|
|
2412
|
-
* const order = await this.orders.create({
|
|
2413
|
-
* id: generateUUID(),
|
|
2414
|
-
* customerId: orderData.customerId,
|
|
2415
|
-
* status: 'pending',
|
|
2416
|
-
* totalAmount,
|
|
2417
|
-
* shippingAddress: orderData.shippingAddress,
|
|
2418
|
-
* createdAt: new Date().toISOString()
|
|
2419
|
-
* }, { tx });
|
|
2420
|
-
*
|
|
2421
|
-
* // Create order items
|
|
2422
|
-
* for (const itemData of orderItems) {
|
|
2423
|
-
* await this.orderItems.create({
|
|
2424
|
-
* ...itemData,
|
|
2425
|
-
* orderId: order.id
|
|
2426
|
-
* }, { tx });
|
|
2427
|
-
* }
|
|
2428
|
-
*
|
|
2429
|
-
* // Process payment
|
|
2430
|
-
* const paymentResult = await this.paymentService.processPayment({
|
|
2431
|
-
* orderId: order.id,
|
|
2432
|
-
* amount: totalAmount,
|
|
2433
|
-
* paymentMethodId: orderData.paymentMethodId,
|
|
2434
|
-
* customerId: orderData.customerId
|
|
2435
|
-
* }, { tx });
|
|
2436
|
-
*
|
|
2437
|
-
* if (!paymentResult.success) {
|
|
2438
|
-
* throw new Error(`Payment failed: ${paymentResult.error}`);
|
|
2439
|
-
* }
|
|
2440
|
-
*
|
|
2441
|
-
* // Update order status
|
|
2442
|
-
* const completedOrder = await this.orders.updateById(
|
|
2443
|
-
* order.id,
|
|
2444
|
-
* {
|
|
2445
|
-
* status: 'paid',
|
|
2446
|
-
* paymentId: paymentResult.paymentId,
|
|
2447
|
-
* paidAt: new Date().toISOString()
|
|
2448
|
-
* },
|
|
2449
|
-
* { tx }
|
|
2450
|
-
* );
|
|
2451
|
-
*
|
|
2452
|
-
* console.log(`Order processed successfully: ${order.id}`);
|
|
2453
|
-
*
|
|
2454
|
-
* return {
|
|
2455
|
-
* orderId: order.id,
|
|
2456
|
-
* totalAmount,
|
|
2457
|
-
* paymentId: paymentResult.paymentId,
|
|
2458
|
-
* itemCount: orderItems.length
|
|
2459
|
-
* };
|
|
2460
|
-
* }
|
|
2461
|
-
* });
|
|
2462
|
-
* }
|
|
2463
|
-
* ```
|
|
2464
|
-
*
|
|
2465
|
-
* @example
|
|
2466
|
-
* **User registration with related data creation:**
|
|
2467
|
-
* ```ts
|
|
2468
|
-
* class UserService {
|
|
2469
|
-
* registerUser = $transaction({
|
|
2470
|
-
* handler: async (tx, registrationData: {
|
|
2471
|
-
* email: string;
|
|
2472
|
-
* password: string;
|
|
2473
|
-
* profile: {
|
|
2474
|
-
* firstName: string;
|
|
2475
|
-
* lastName: string;
|
|
2476
|
-
* dateOfBirth: string;
|
|
2477
|
-
* };
|
|
2478
|
-
* preferences: {
|
|
2479
|
-
* notifications: boolean;
|
|
2480
|
-
* newsletter: boolean;
|
|
2481
|
-
* };
|
|
2482
|
-
* }) => {
|
|
2483
|
-
* console.log(`Registering new user: ${registrationData.email}`);
|
|
2484
|
-
*
|
|
2485
|
-
* // Check if email already exists
|
|
2486
|
-
* const existingUser = await this.users.find(
|
|
2487
|
-
* { where: { email: registrationData.email } },
|
|
2488
|
-
* { tx }
|
|
2489
|
-
* );
|
|
2490
|
-
*
|
|
2491
|
-
* if (existingUser.length > 0) {
|
|
2492
|
-
* throw new Error(`User with email ${registrationData.email} already exists`);
|
|
2493
|
-
* }
|
|
2494
|
-
*
|
|
2495
|
-
* // Hash password
|
|
2496
|
-
* const hashedPassword = await this.hashPassword(registrationData.password);
|
|
2497
|
-
*
|
|
2498
|
-
* // Create user record
|
|
2499
|
-
* const user = await this.users.create({
|
|
2500
|
-
* id: generateUUID(),
|
|
2501
|
-
* email: registrationData.email,
|
|
2502
|
-
* passwordHash: hashedPassword,
|
|
2503
|
-
* isActive: true,
|
|
2504
|
-
* emailVerified: false
|
|
2505
|
-
* }, { tx });
|
|
2506
|
-
*
|
|
2507
|
-
* // Create user profile
|
|
2508
|
-
* const profile = await this.userProfiles.create({
|
|
2509
|
-
* id: generateUUID(),
|
|
2510
|
-
* userId: user.id,
|
|
2511
|
-
* firstName: registrationData.profile.firstName,
|
|
2512
|
-
* lastName: registrationData.profile.lastName,
|
|
2513
|
-
* dateOfBirth: registrationData.profile.dateOfBirth
|
|
2514
|
-
* }, { tx });
|
|
2515
|
-
*
|
|
2516
|
-
* // Create user preferences
|
|
2517
|
-
* const preferences = await this.userPreferences.create({
|
|
2518
|
-
* id: generateUUID(),
|
|
2519
|
-
* userId: user.id,
|
|
2520
|
-
* notifications: registrationData.preferences.notifications,
|
|
2521
|
-
* newsletter: registrationData.preferences.newsletter
|
|
2522
|
-
* }, { tx });
|
|
2523
|
-
*
|
|
2524
|
-
* // Create audit log entry
|
|
2525
|
-
* await this.auditLogs.create({
|
|
2526
|
-
* id: generateUUID(),
|
|
2527
|
-
* userId: user.id,
|
|
2528
|
-
* action: 'user_registered',
|
|
2529
|
-
* details: { email: user.email },
|
|
2530
|
-
* timestamp: new Date().toISOString()
|
|
2531
|
-
* }, { tx });
|
|
2532
|
-
*
|
|
2533
|
-
* console.log(`User registration completed: ${user.id}`);
|
|
2534
|
-
*
|
|
2535
|
-
* return {
|
|
2536
|
-
* userId: user.id,
|
|
2537
|
-
* email: user.email,
|
|
2538
|
-
* profile: {
|
|
2539
|
-
* firstName: profile.firstName,
|
|
2540
|
-
* lastName: profile.lastName
|
|
2541
|
-
* }
|
|
2542
|
-
* };
|
|
2543
|
-
* }
|
|
2544
|
-
* });
|
|
2545
|
-
* }
|
|
2546
|
-
* ```
|
|
2547
|
-
*
|
|
2548
|
-
* @example
|
|
2549
|
-
* **Data migration with progress tracking:**
|
|
2550
|
-
* ```ts
|
|
2551
|
-
* class MigrationService {
|
|
2552
|
-
* migrateUserData = $transaction({
|
|
2553
|
-
* config: {
|
|
2554
|
-
* isolationLevel: 'read_committed',
|
|
2555
|
-
* accessMode: 'read_write'
|
|
2556
|
-
* },
|
|
2557
|
-
* handler: async (tx, batchSize: number = 1000) => {
|
|
2558
|
-
* console.log(`Starting data migration with batch size ${batchSize}`);
|
|
2559
|
-
*
|
|
2560
|
-
* let totalMigrated = 0;
|
|
2561
|
-
* let hasMore = true;
|
|
2562
|
-
* let offset = 0;
|
|
2563
|
-
*
|
|
2564
|
-
* while (hasMore) {
|
|
2565
|
-
* // Get batch of users to migrate
|
|
2566
|
-
* const users = await this.legacyUsers.find({
|
|
2567
|
-
* limit: batchSize,
|
|
2568
|
-
* offset,
|
|
2569
|
-
* sort: { id: 'asc' }
|
|
2570
|
-
* }, { tx });
|
|
2571
|
-
*
|
|
2572
|
-
* if (users.length === 0) {
|
|
2573
|
-
* hasMore = false;
|
|
2574
|
-
* break;
|
|
2575
|
-
* }
|
|
2576
|
-
*
|
|
2577
|
-
* // Process each user in the batch
|
|
2578
|
-
* for (const legacyUser of users) {
|
|
2579
|
-
* try {
|
|
2580
|
-
* // Transform legacy data to new format
|
|
2581
|
-
* const newUser = {
|
|
2582
|
-
* id: generateUUID(),
|
|
2583
|
-
* email: legacyUser.email_address,
|
|
2584
|
-
* firstName: legacyUser.first_name,
|
|
2585
|
-
* lastName: legacyUser.last_name,
|
|
2586
|
-
* createdAt: legacyUser.created_date,
|
|
2587
|
-
* isActive: legacyUser.status === 'active'
|
|
2588
|
-
* };
|
|
2589
|
-
*
|
|
2590
|
-
* // Create new user record
|
|
2591
|
-
* await this.users.create(newUser, { tx });
|
|
2592
|
-
*
|
|
2593
|
-
* // Mark legacy user as migrated
|
|
2594
|
-
* await this.legacyUsers.updateById(
|
|
2595
|
-
* legacyUser.id,
|
|
2596
|
-
* {
|
|
2597
|
-
* migrated: true,
|
|
2598
|
-
* migratedAt: new Date().toISOString(),
|
|
2599
|
-
* newUserId: newUser.id
|
|
2600
|
-
* },
|
|
2601
|
-
* { tx }
|
|
2602
|
-
* );
|
|
2603
|
-
*
|
|
2604
|
-
* totalMigrated++;
|
|
2605
|
-
*
|
|
2606
|
-
* } catch (error) {
|
|
2607
|
-
* console.error(`Failed to migrate user ${legacyUser.id}:`, error.message);
|
|
2608
|
-
*
|
|
2609
|
-
* // Log failed migration
|
|
2610
|
-
* await this.migrationErrors.create({
|
|
2611
|
-
* id: generateUUID(),
|
|
2612
|
-
* legacyUserId: legacyUser.id,
|
|
2613
|
-
* error: error.message,
|
|
2614
|
-
* attemptedAt: new Date().toISOString()
|
|
2615
|
-
* }, { tx });
|
|
2616
|
-
* }
|
|
2617
|
-
* }
|
|
2618
|
-
*
|
|
2619
|
-
* offset += batchSize;
|
|
2620
|
-
* console.log(`Migrated ${totalMigrated} users so far...`);
|
|
2621
|
-
* }
|
|
2622
|
-
*
|
|
2623
|
-
* // Update migration status
|
|
2624
|
-
* await this.migrationStatus.updateById(
|
|
2625
|
-
* 'user_migration',
|
|
2626
|
-
* {
|
|
2627
|
-
* totalMigrated,
|
|
2628
|
-
* completedAt: new Date().toISOString(),
|
|
2629
|
-
* status: 'completed'
|
|
2630
|
-
* },
|
|
2631
|
-
* { tx }
|
|
2632
|
-
* );
|
|
2633
|
-
*
|
|
2634
|
-
* console.log(`Migration completed. Total migrated: ${totalMigrated}`);
|
|
2635
|
-
*
|
|
2636
|
-
* return { totalMigrated };
|
|
2637
|
-
* }
|
|
2638
|
-
* });
|
|
2639
|
-
* }
|
|
2640
|
-
* ```
|
|
2641
|
-
*
|
|
2642
2339
|
* **Important Notes**:
|
|
2643
2340
|
* - All operations within the transaction handler are atomic
|
|
2644
2341
|
* - Automatic retry on `PgVersionMismatchError` for optimistic locking
|
|
2645
2342
|
* - Pass `{ tx }` option to all repository operations within the transaction
|
|
2646
2343
|
* - Transactions are automatically rolled back on any unhandled error
|
|
2647
2344
|
* - Use appropriate isolation levels based on your consistency requirements
|
|
2648
|
-
*
|
|
2649
|
-
* @stability 2
|
|
2650
2345
|
*/
|
|
2651
2346
|
declare const $transaction: <T extends any[], R>(opts: TransactionDescriptorOptions<T, R>) => _alepha_retry0.RetryDescriptorFn<(...args: T) => Promise<R>>;
|
|
2652
2347
|
interface TransactionDescriptorOptions<T extends any[], R> {
|
|
@@ -2973,7 +2668,7 @@ declare class PostgresTypeProvider {
|
|
|
2973
2668
|
* This is used to mark rows as deleted without actually removing them from the database.
|
|
2974
2669
|
* The column is nullable - NULL means not deleted, timestamp means deleted.
|
|
2975
2670
|
*/
|
|
2976
|
-
readonly deletedAt: (options?: TStringOptions) => PgAttr<
|
|
2671
|
+
readonly deletedAt: (options?: TStringOptions) => PgAttr<typebox0.TOptional<TString>, typeof PG_DELETED_AT>;
|
|
2977
2672
|
/**
|
|
2978
2673
|
* Creates a reference to another table or schema. Basically a foreign key.
|
|
2979
2674
|
*/
|
|
@@ -2993,13 +2688,13 @@ declare const pg: PostgresTypeProvider;
|
|
|
2993
2688
|
/**
|
|
2994
2689
|
* @deprecated Use `pg.primaryKey()` instead.
|
|
2995
2690
|
*/
|
|
2996
|
-
declare const legacyIdSchema: PgAttr<PgAttr<PgAttr<
|
|
2691
|
+
declare const legacyIdSchema: PgAttr<PgAttr<PgAttr<typebox0.TInteger, typeof PG_PRIMARY_KEY>, typeof PG_SERIAL>, typeof PG_DEFAULT>;
|
|
2997
2692
|
//#endregion
|
|
2998
2693
|
//#region src/types/schema.d.ts
|
|
2999
2694
|
/**
|
|
3000
2695
|
* Postgres schema type.
|
|
3001
2696
|
*/
|
|
3002
|
-
declare const schema: <TDocument extends TSchema$1>(name: string, document: TDocument) =>
|
|
2697
|
+
declare const schema: <TDocument extends TSchema$1>(name: string, document: TDocument) => drizzle_orm0.$Type<pg$1.PgCustomColumnBuilder<{
|
|
3003
2698
|
name: string;
|
|
3004
2699
|
dataType: "custom";
|
|
3005
2700
|
columnType: "PgCustomColumn";
|
|
@@ -3049,7 +2744,7 @@ declare const schema: <TDocument extends TSchema$1>(name: string, document: TDoc
|
|
|
3049
2744
|
* @see {@link $transaction}
|
|
3050
2745
|
* @module alepha.postgres
|
|
3051
2746
|
*/
|
|
3052
|
-
declare const AlephaPostgres: _alepha_core1.Service<_alepha_core1.Module
|
|
2747
|
+
declare const AlephaPostgres: _alepha_core1.Service<_alepha_core1.Module<{}>>;
|
|
3053
2748
|
//#endregion
|
|
3054
|
-
export { $entity, $repository, $sequence, $transaction, AlephaPostgres, DrizzleKitProvider, Entity, EntityDescriptorOptions, FilterOperators, FromSchema, NodePostgresProvider, NodePostgresProviderOptions, PG_CREATED_AT, PG_DEFAULT, PG_DELETED_AT, PG_IDENTITY, PG_PRIMARY_KEY, PG_REF, PG_SCHEMA, PG_SERIAL, PG_UPDATED_AT, PG_VERSION, Page, PageQuery, PgDefault, PgEntityNotFoundError, PgIdentityOptions, PgPrimaryKey, PgQuery, PgQueryResult, PgQueryWhere, PgQueryWhereOrSQL, PgRef, PgRefOptions, PgSymbolKeys, PgSymbols, PgTableConfig, PgTableWithColumnsAndSchema, PostgresProvider, PostgresTypeProvider, RepositoryDescriptor, RepositoryDescriptorOptions, RepositoryProvider, SQLLike, SequenceDescriptor, SequenceDescriptorOptions, StatementOptions, TObjectInsert, TPage, TransactionContext, TransactionDescriptorOptions, camelToSnakeCase,
|
|
2749
|
+
export { $entity, $repository, $sequence, $transaction, AlephaPostgres, DrizzleKitProvider, Entity, EntityDescriptorOptions, FilterOperators, FromSchema, NodePostgresProvider, NodePostgresProviderOptions, OrderBy, OrderByClause, OrderDirection, PG_CREATED_AT, PG_DEFAULT, PG_DELETED_AT, PG_IDENTITY, PG_PRIMARY_KEY, PG_REF, PG_SCHEMA, PG_SERIAL, PG_UPDATED_AT, PG_VERSION, Page, PageQuery, PgDefault, PgEntityNotFoundError, PgIdentityOptions, PgPrimaryKey, PgQuery, PgQueryResult, PgQueryWhere, PgQueryWhereOrSQL, PgRef, PgRefOptions, PgSymbolKeys, PgSymbols, PgTableConfig, PgTableWithColumnsAndSchema, PostgresProvider, PostgresTypeProvider, RepositoryDescriptor, RepositoryDescriptorOptions, RepositoryProvider, SQLLike, SequenceDescriptor, SequenceDescriptorOptions, StatementOptions, TObjectInsert, TObjectUpdate, TPage, TransactionContext, TransactionDescriptorOptions, camelToSnakeCase, drizzle_orm0 as drizzle, insertSchema, legacyIdSchema, mapFieldToColumn, mapStringToColumn, pageQuerySchema, pageSchema, pg, schema, schemaToPgColumns, sql, updateSchema };
|
|
3055
2750
|
//# sourceMappingURL=index.d.ts.map
|
package/queue/redis.d.ts
CHANGED
|
@@ -23,7 +23,7 @@ declare class RedisQueueProvider implements QueueProvider {
|
|
|
23
23
|
* @see {@link RedisQueueProvider}
|
|
24
24
|
* @module alepha.queue.redis
|
|
25
25
|
*/
|
|
26
|
-
declare const AlephaQueueRedis: _alepha_core0.Service<_alepha_core0.Module
|
|
26
|
+
declare const AlephaQueueRedis: _alepha_core0.Service<_alepha_core0.Module<{}>>;
|
|
27
27
|
//#endregion
|
|
28
28
|
export { AlephaQueueRedis, RedisQueueProvider };
|
|
29
29
|
//# sourceMappingURL=index.d.ts.map
|
package/queue.d.ts
CHANGED
|
@@ -754,7 +754,7 @@ declare class ConsumerDescriptor<T extends TSchema> extends Descriptor<ConsumerD
|
|
|
754
754
|
* @see {@link $consumer}
|
|
755
755
|
* @module alepha.queue
|
|
756
756
|
*/
|
|
757
|
-
declare const AlephaQueue: _alepha_core1.Service<_alepha_core1.Module
|
|
757
|
+
declare const AlephaQueue: _alepha_core1.Service<_alepha_core1.Module<{}>>;
|
|
758
758
|
//#endregion
|
|
759
759
|
export { $consumer, $queue, AlephaQueue, ConsumerDescriptor, ConsumerDescriptorOptions, MemoryQueueProvider, QueueDescriptor, QueueDescriptorOptions, QueueMessage, QueueMessageSchema, QueueProvider };
|
|
760
760
|
//# sourceMappingURL=index.d.ts.map
|
package/react/auth.d.ts
CHANGED
|
@@ -488,7 +488,7 @@ declare module "alepha/react" {
|
|
|
488
488
|
* @see {@link ReactAuthProvider}
|
|
489
489
|
* @module alepha.react.auth
|
|
490
490
|
*/
|
|
491
|
-
declare const AlephaReactAuth: _alepha_core4.Service<_alepha_core4.Module
|
|
491
|
+
declare const AlephaReactAuth: _alepha_core4.Service<_alepha_core4.Module<{}>>;
|
|
492
492
|
//#endregion
|
|
493
493
|
export { $auth, AccessToken, AlephaReactAuth, AuthDescriptor, AuthDescriptorOptions, AuthExternal, AuthInternal, CredentialsOptions, OAuth2Options, OAuth2Profile, OidcOptions, ReactAuth, ReactAuthProvider, SessionExpiredError, useAuth };
|
|
494
494
|
//# sourceMappingURL=index.d.ts.map
|
package/react/form.d.ts
CHANGED
|
@@ -171,7 +171,7 @@ declare module "alepha" {
|
|
|
171
171
|
* @see {@link useForm}
|
|
172
172
|
* @module alepha.react.form
|
|
173
173
|
*/
|
|
174
|
-
declare const AlephaReactForm: _alepha_core0.Service<_alepha_core0.Module
|
|
174
|
+
declare const AlephaReactForm: _alepha_core0.Service<_alepha_core0.Module<{}>>;
|
|
175
175
|
//#endregion
|
|
176
176
|
export { AlephaReactForm, FormCtrlOptions, FormEventLike, FormModel, FormState, FormStateEvent, InputField, InputHTMLAttributesLike, SchemaToInput, UseFormStateReturn, useForm, useFormState };
|
|
177
177
|
//# sourceMappingURL=index.d.ts.map
|
package/react/head.d.ts
CHANGED
|
@@ -114,7 +114,7 @@ declare module "alepha/react" {
|
|
|
114
114
|
* @see {@link ServerHeadProvider}
|
|
115
115
|
* @module alepha.react.head
|
|
116
116
|
*/
|
|
117
|
-
declare const AlephaReactHead: _alepha_core1.Service<_alepha_core1.Module
|
|
117
|
+
declare const AlephaReactHead: _alepha_core1.Service<_alepha_core1.Module<{}>>;
|
|
118
118
|
//#endregion
|
|
119
119
|
export { $head, AlephaReactHead, Head, HeadDescriptor, HeadDescriptorOptions, ServerHeadProvider, SimpleHead, UseHeadOptions, UseHeadReturn, useHead };
|
|
120
120
|
//# sourceMappingURL=index.d.ts.map
|
package/react/i18n.d.ts
CHANGED
|
@@ -109,7 +109,7 @@ declare module "alepha" {
|
|
|
109
109
|
*
|
|
110
110
|
* @module alepha.react.i18n
|
|
111
111
|
*/
|
|
112
|
-
declare const AlephaReactI18n: _alepha_core1.Service<_alepha_core1.Module
|
|
112
|
+
declare const AlephaReactI18n: _alepha_core1.Service<_alepha_core1.Module<{}>>;
|
|
113
113
|
//#endregion
|
|
114
114
|
export { $dictionary, AlephaReactI18n, DictionaryDescriptor, DictionaryDescriptorOptions, I18nProvider, ServiceDictionary, useI18n };
|
|
115
115
|
//# sourceMappingURL=index.d.ts.map
|