@develit-io/backend-sdk 5.22.0 → 5.24.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.cts +11 -10
- package/dist/index.d.mts +11 -10
- package/dist/index.d.ts +11 -10
- package/dist/infrastructure/index.cjs +20 -12
- package/dist/infrastructure/index.d.cts +16 -8
- package/dist/infrastructure/index.d.mts +16 -8
- package/dist/infrastructure/index.d.ts +16 -8
- package/dist/infrastructure/index.mjs +13 -5
- package/dist/node/index.cjs +6 -2
- package/dist/node/index.d.cts +15 -4
- package/dist/node/index.d.mts +15 -4
- package/dist/node/index.d.ts +15 -4
- package/dist/node/index.mjs +3 -1
- package/dist/shared/backend-sdk.B5vcbB2_.d.cts +3 -0
- package/dist/shared/backend-sdk.B5vcbB2_.d.mts +3 -0
- package/dist/shared/backend-sdk.B5vcbB2_.d.ts +3 -0
- package/dist/shared/backend-sdk.D5vSybcI.d.cts +3 -0
- package/dist/shared/backend-sdk.D5vSybcI.d.mts +3 -0
- package/dist/shared/backend-sdk.D5vSybcI.d.ts +3 -0
- package/dist/shared/{backend-sdk.CxCzOVpU.mjs → backend-sdk.G6WixINe.mjs} +20 -2
- package/dist/shared/backend-sdk.Ntg58c34.d.cts +12 -0
- package/dist/shared/backend-sdk.Ntg58c34.d.mts +12 -0
- package/dist/shared/backend-sdk.Ntg58c34.d.ts +12 -0
- package/dist/shared/{backend-sdk.CpwGFVDb.cjs → backend-sdk.wIHFRZjD.cjs} +21 -1
- package/package.json +1 -1
- package/dist/shared/backend-sdk._l2mbzzF.d.cts +0 -5
- package/dist/shared/backend-sdk._l2mbzzF.d.mts +0 -5
- package/dist/shared/backend-sdk._l2mbzzF.d.ts +0 -5
package/dist/index.d.cts
CHANGED
|
@@ -5,7 +5,8 @@ import { ExtractTablesWithRelations, DBQueryConfig, BuildQueryResult } from 'dri
|
|
|
5
5
|
import * as drizzle_orm_sqlite_core from 'drizzle-orm/sqlite-core';
|
|
6
6
|
import { AnySQLiteTable } from 'drizzle-orm/sqlite-core';
|
|
7
7
|
import * as z from 'zod/v4/core';
|
|
8
|
-
export { E as Environment
|
|
8
|
+
export { E as Environment } from './shared/backend-sdk.D5vSybcI.cjs';
|
|
9
|
+
export { P as Project } from './shared/backend-sdk.B5vcbB2_.cjs';
|
|
9
10
|
import { StatusCodes, ReasonPhrases } from 'http-status-codes';
|
|
10
11
|
export { ReasonPhrases as InternalResponsePhrase, StatusCodes as InternalResponseStatus } from 'http-status-codes';
|
|
11
12
|
import { Queue } from '@cloudflare/workers-types';
|
|
@@ -236,7 +237,7 @@ type AuditLogWriter<TAuditAction = string> = (logs: AuditLogPayload<TAuditAction
|
|
|
236
237
|
*/
|
|
237
238
|
declare function createAuditLogWriter<TAuditAction = string>(table: AuditLogTable): AuditLogWriter<TAuditAction>;
|
|
238
239
|
|
|
239
|
-
declare function durableObjectNamespaceIdFromName(uniqueKey: string, name: string):
|
|
240
|
+
declare function durableObjectNamespaceIdFromName(uniqueKey: string, name: string): string;
|
|
240
241
|
declare const getD1DatabaseIdFromWrangler: () => string | undefined;
|
|
241
242
|
declare const getD1Credentials: () => {
|
|
242
243
|
driver?: undefined;
|
|
@@ -244,14 +245,14 @@ declare const getD1Credentials: () => {
|
|
|
244
245
|
} | {
|
|
245
246
|
driver: string;
|
|
246
247
|
dbCredentials: {
|
|
247
|
-
accountId:
|
|
248
|
+
accountId: string | undefined;
|
|
248
249
|
databaseId: string;
|
|
249
|
-
token:
|
|
250
|
+
token: string | undefined;
|
|
250
251
|
url?: undefined;
|
|
251
252
|
};
|
|
252
253
|
} | {
|
|
253
254
|
dbCredentials: {
|
|
254
|
-
url:
|
|
255
|
+
url: string | undefined;
|
|
255
256
|
accountId?: undefined;
|
|
256
257
|
databaseId?: undefined;
|
|
257
258
|
token?: undefined;
|
|
@@ -267,9 +268,9 @@ declare const getDrizzleD1Config: () => {
|
|
|
267
268
|
} | {
|
|
268
269
|
driver: string;
|
|
269
270
|
dbCredentials: {
|
|
270
|
-
accountId:
|
|
271
|
+
accountId: string | undefined;
|
|
271
272
|
databaseId: string;
|
|
272
|
-
token:
|
|
273
|
+
token: string | undefined;
|
|
273
274
|
url?: undefined;
|
|
274
275
|
};
|
|
275
276
|
schema: string;
|
|
@@ -277,7 +278,7 @@ declare const getDrizzleD1Config: () => {
|
|
|
277
278
|
dialect: "sqlite";
|
|
278
279
|
} | {
|
|
279
280
|
dbCredentials: {
|
|
280
|
-
url:
|
|
281
|
+
url: string | undefined;
|
|
281
282
|
accountId?: undefined;
|
|
282
283
|
databaseId?: undefined;
|
|
283
284
|
token?: undefined;
|
|
@@ -335,7 +336,7 @@ declare const getPgLocalConnectionString: (id: string) => string;
|
|
|
335
336
|
declare const getPgDatabaseIdFromWrangler: () => string | undefined;
|
|
336
337
|
declare const getPgCredentials: (serviceName?: string) => {
|
|
337
338
|
dbCredentials: {
|
|
338
|
-
url:
|
|
339
|
+
url: string;
|
|
339
340
|
};
|
|
340
341
|
};
|
|
341
342
|
declare const getDrizzlePgConfig: () => {
|
|
@@ -344,7 +345,7 @@ declare const getDrizzlePgConfig: () => {
|
|
|
344
345
|
schema: string;
|
|
345
346
|
};
|
|
346
347
|
dbCredentials: {
|
|
347
|
-
url:
|
|
348
|
+
url: string;
|
|
348
349
|
};
|
|
349
350
|
schema: string;
|
|
350
351
|
out: string;
|
package/dist/index.d.mts
CHANGED
|
@@ -5,7 +5,8 @@ import { ExtractTablesWithRelations, DBQueryConfig, BuildQueryResult } from 'dri
|
|
|
5
5
|
import * as drizzle_orm_sqlite_core from 'drizzle-orm/sqlite-core';
|
|
6
6
|
import { AnySQLiteTable } from 'drizzle-orm/sqlite-core';
|
|
7
7
|
import * as z from 'zod/v4/core';
|
|
8
|
-
export { E as Environment
|
|
8
|
+
export { E as Environment } from './shared/backend-sdk.D5vSybcI.mjs';
|
|
9
|
+
export { P as Project } from './shared/backend-sdk.B5vcbB2_.mjs';
|
|
9
10
|
import { StatusCodes, ReasonPhrases } from 'http-status-codes';
|
|
10
11
|
export { ReasonPhrases as InternalResponsePhrase, StatusCodes as InternalResponseStatus } from 'http-status-codes';
|
|
11
12
|
import { Queue } from '@cloudflare/workers-types';
|
|
@@ -236,7 +237,7 @@ type AuditLogWriter<TAuditAction = string> = (logs: AuditLogPayload<TAuditAction
|
|
|
236
237
|
*/
|
|
237
238
|
declare function createAuditLogWriter<TAuditAction = string>(table: AuditLogTable): AuditLogWriter<TAuditAction>;
|
|
238
239
|
|
|
239
|
-
declare function durableObjectNamespaceIdFromName(uniqueKey: string, name: string):
|
|
240
|
+
declare function durableObjectNamespaceIdFromName(uniqueKey: string, name: string): string;
|
|
240
241
|
declare const getD1DatabaseIdFromWrangler: () => string | undefined;
|
|
241
242
|
declare const getD1Credentials: () => {
|
|
242
243
|
driver?: undefined;
|
|
@@ -244,14 +245,14 @@ declare const getD1Credentials: () => {
|
|
|
244
245
|
} | {
|
|
245
246
|
driver: string;
|
|
246
247
|
dbCredentials: {
|
|
247
|
-
accountId:
|
|
248
|
+
accountId: string | undefined;
|
|
248
249
|
databaseId: string;
|
|
249
|
-
token:
|
|
250
|
+
token: string | undefined;
|
|
250
251
|
url?: undefined;
|
|
251
252
|
};
|
|
252
253
|
} | {
|
|
253
254
|
dbCredentials: {
|
|
254
|
-
url:
|
|
255
|
+
url: string | undefined;
|
|
255
256
|
accountId?: undefined;
|
|
256
257
|
databaseId?: undefined;
|
|
257
258
|
token?: undefined;
|
|
@@ -267,9 +268,9 @@ declare const getDrizzleD1Config: () => {
|
|
|
267
268
|
} | {
|
|
268
269
|
driver: string;
|
|
269
270
|
dbCredentials: {
|
|
270
|
-
accountId:
|
|
271
|
+
accountId: string | undefined;
|
|
271
272
|
databaseId: string;
|
|
272
|
-
token:
|
|
273
|
+
token: string | undefined;
|
|
273
274
|
url?: undefined;
|
|
274
275
|
};
|
|
275
276
|
schema: string;
|
|
@@ -277,7 +278,7 @@ declare const getDrizzleD1Config: () => {
|
|
|
277
278
|
dialect: "sqlite";
|
|
278
279
|
} | {
|
|
279
280
|
dbCredentials: {
|
|
280
|
-
url:
|
|
281
|
+
url: string | undefined;
|
|
281
282
|
accountId?: undefined;
|
|
282
283
|
databaseId?: undefined;
|
|
283
284
|
token?: undefined;
|
|
@@ -335,7 +336,7 @@ declare const getPgLocalConnectionString: (id: string) => string;
|
|
|
335
336
|
declare const getPgDatabaseIdFromWrangler: () => string | undefined;
|
|
336
337
|
declare const getPgCredentials: (serviceName?: string) => {
|
|
337
338
|
dbCredentials: {
|
|
338
|
-
url:
|
|
339
|
+
url: string;
|
|
339
340
|
};
|
|
340
341
|
};
|
|
341
342
|
declare const getDrizzlePgConfig: () => {
|
|
@@ -344,7 +345,7 @@ declare const getDrizzlePgConfig: () => {
|
|
|
344
345
|
schema: string;
|
|
345
346
|
};
|
|
346
347
|
dbCredentials: {
|
|
347
|
-
url:
|
|
348
|
+
url: string;
|
|
348
349
|
};
|
|
349
350
|
schema: string;
|
|
350
351
|
out: string;
|
package/dist/index.d.ts
CHANGED
|
@@ -5,7 +5,8 @@ import { ExtractTablesWithRelations, DBQueryConfig, BuildQueryResult } from 'dri
|
|
|
5
5
|
import * as drizzle_orm_sqlite_core from 'drizzle-orm/sqlite-core';
|
|
6
6
|
import { AnySQLiteTable } from 'drizzle-orm/sqlite-core';
|
|
7
7
|
import * as z from 'zod/v4/core';
|
|
8
|
-
export { E as Environment
|
|
8
|
+
export { E as Environment } from './shared/backend-sdk.D5vSybcI.js';
|
|
9
|
+
export { P as Project } from './shared/backend-sdk.B5vcbB2_.js';
|
|
9
10
|
import { StatusCodes, ReasonPhrases } from 'http-status-codes';
|
|
10
11
|
export { ReasonPhrases as InternalResponsePhrase, StatusCodes as InternalResponseStatus } from 'http-status-codes';
|
|
11
12
|
import { Queue } from '@cloudflare/workers-types';
|
|
@@ -236,7 +237,7 @@ type AuditLogWriter<TAuditAction = string> = (logs: AuditLogPayload<TAuditAction
|
|
|
236
237
|
*/
|
|
237
238
|
declare function createAuditLogWriter<TAuditAction = string>(table: AuditLogTable): AuditLogWriter<TAuditAction>;
|
|
238
239
|
|
|
239
|
-
declare function durableObjectNamespaceIdFromName(uniqueKey: string, name: string):
|
|
240
|
+
declare function durableObjectNamespaceIdFromName(uniqueKey: string, name: string): string;
|
|
240
241
|
declare const getD1DatabaseIdFromWrangler: () => string | undefined;
|
|
241
242
|
declare const getD1Credentials: () => {
|
|
242
243
|
driver?: undefined;
|
|
@@ -244,14 +245,14 @@ declare const getD1Credentials: () => {
|
|
|
244
245
|
} | {
|
|
245
246
|
driver: string;
|
|
246
247
|
dbCredentials: {
|
|
247
|
-
accountId:
|
|
248
|
+
accountId: string | undefined;
|
|
248
249
|
databaseId: string;
|
|
249
|
-
token:
|
|
250
|
+
token: string | undefined;
|
|
250
251
|
url?: undefined;
|
|
251
252
|
};
|
|
252
253
|
} | {
|
|
253
254
|
dbCredentials: {
|
|
254
|
-
url:
|
|
255
|
+
url: string | undefined;
|
|
255
256
|
accountId?: undefined;
|
|
256
257
|
databaseId?: undefined;
|
|
257
258
|
token?: undefined;
|
|
@@ -267,9 +268,9 @@ declare const getDrizzleD1Config: () => {
|
|
|
267
268
|
} | {
|
|
268
269
|
driver: string;
|
|
269
270
|
dbCredentials: {
|
|
270
|
-
accountId:
|
|
271
|
+
accountId: string | undefined;
|
|
271
272
|
databaseId: string;
|
|
272
|
-
token:
|
|
273
|
+
token: string | undefined;
|
|
273
274
|
url?: undefined;
|
|
274
275
|
};
|
|
275
276
|
schema: string;
|
|
@@ -277,7 +278,7 @@ declare const getDrizzleD1Config: () => {
|
|
|
277
278
|
dialect: "sqlite";
|
|
278
279
|
} | {
|
|
279
280
|
dbCredentials: {
|
|
280
|
-
url:
|
|
281
|
+
url: string | undefined;
|
|
281
282
|
accountId?: undefined;
|
|
282
283
|
databaseId?: undefined;
|
|
283
284
|
token?: undefined;
|
|
@@ -335,7 +336,7 @@ declare const getPgLocalConnectionString: (id: string) => string;
|
|
|
335
336
|
declare const getPgDatabaseIdFromWrangler: () => string | undefined;
|
|
336
337
|
declare const getPgCredentials: (serviceName?: string) => {
|
|
337
338
|
dbCredentials: {
|
|
338
|
-
url:
|
|
339
|
+
url: string;
|
|
339
340
|
};
|
|
340
341
|
};
|
|
341
342
|
declare const getDrizzlePgConfig: () => {
|
|
@@ -344,7 +345,7 @@ declare const getDrizzlePgConfig: () => {
|
|
|
344
345
|
schema: string;
|
|
345
346
|
};
|
|
346
347
|
dbCredentials: {
|
|
347
|
-
url:
|
|
348
|
+
url: string;
|
|
348
349
|
};
|
|
349
350
|
schema: string;
|
|
350
351
|
out: string;
|
|
@@ -1,15 +1,16 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
const
|
|
3
|
+
const worker = require('../shared/backend-sdk.wIHFRZjD.cjs');
|
|
4
4
|
const cloudflare = require('alchemy/cloudflare');
|
|
5
5
|
const environment_consts = require('../shared/backend-sdk.BdcrYpFD.cjs');
|
|
6
|
+
require('@std/path');
|
|
6
7
|
|
|
7
8
|
const composeD1Arguments = ({
|
|
8
9
|
resourceName
|
|
9
10
|
}) => {
|
|
10
11
|
return {
|
|
11
12
|
name: resourceName,
|
|
12
|
-
primaryLocationHint:
|
|
13
|
+
primaryLocationHint: worker.D1_LOCATION_HINT
|
|
13
14
|
};
|
|
14
15
|
};
|
|
15
16
|
|
|
@@ -23,8 +24,8 @@ const composeKvArguments = ({
|
|
|
23
24
|
|
|
24
25
|
const composeQueueArguments = ({
|
|
25
26
|
resourceName,
|
|
26
|
-
deliveryDelay =
|
|
27
|
-
messageRetentionPeriod =
|
|
27
|
+
deliveryDelay = worker.QUEUE_DELIVERY_DELAY,
|
|
28
|
+
messageRetentionPeriod = worker.QUEUE_MESSAGE_RETENTION_PERIOD
|
|
28
29
|
}) => {
|
|
29
30
|
return {
|
|
30
31
|
name: resourceName,
|
|
@@ -37,12 +38,12 @@ const composeQueueArguments = ({
|
|
|
37
38
|
|
|
38
39
|
const composeR2Arguments = ({
|
|
39
40
|
resourceName,
|
|
40
|
-
storageClass =
|
|
41
|
+
storageClass = worker.R2_STORAGE_CLASS
|
|
41
42
|
}) => {
|
|
42
43
|
return {
|
|
43
44
|
name: resourceName,
|
|
44
|
-
jurisdiction:
|
|
45
|
-
locationHint:
|
|
45
|
+
jurisdiction: worker.R2_JURISDICTION,
|
|
46
|
+
locationHint: worker.R2_LOCATION_HINT,
|
|
46
47
|
storageClass
|
|
47
48
|
};
|
|
48
49
|
};
|
|
@@ -142,20 +143,26 @@ class Infrastructure {
|
|
|
142
143
|
* Creates an instance of Cloudflare Worker.
|
|
143
144
|
*/
|
|
144
145
|
async worker(options) {
|
|
145
|
-
const { resourceName,
|
|
146
|
+
const { resourceName, resource, path, crons, bindings, eventSources } = options;
|
|
146
147
|
const identifierName = composeIdentifierName({
|
|
147
148
|
resourceName,
|
|
148
|
-
resource: "worker"
|
|
149
|
+
resource: resource || "worker"
|
|
149
150
|
});
|
|
151
|
+
const workerConfig = await worker.loadWorkerConfig({ path });
|
|
150
152
|
return await cloudflare.Worker(
|
|
151
153
|
identifierName,
|
|
152
|
-
|
|
154
|
+
worker.composeWorkerArguments({
|
|
153
155
|
resourceName: composeResourceName({
|
|
154
156
|
project: this.project,
|
|
155
157
|
environment: this.environment,
|
|
156
158
|
resourceName: identifierName
|
|
157
159
|
}),
|
|
158
|
-
|
|
160
|
+
// TODO: Convert to util
|
|
161
|
+
entrypoint: `${path}/src/index.ts`,
|
|
162
|
+
crons: crons || worker.extractWorkerTriggers({
|
|
163
|
+
workerConfig,
|
|
164
|
+
environment: this.environment
|
|
165
|
+
}),
|
|
159
166
|
bindings,
|
|
160
167
|
eventSources
|
|
161
168
|
})
|
|
@@ -168,8 +175,9 @@ class Infrastructure {
|
|
|
168
175
|
const { resourceName, bindings, eventSources } = options;
|
|
169
176
|
return await this.worker({
|
|
170
177
|
resourceName,
|
|
178
|
+
resource: "service",
|
|
171
179
|
// TODO: Convert to util
|
|
172
|
-
|
|
180
|
+
path: `./services/${resourceName}`,
|
|
173
181
|
bindings,
|
|
174
182
|
eventSources
|
|
175
183
|
});
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import { P as Project
|
|
1
|
+
import { P as Project } from '../shared/backend-sdk.B5vcbB2_.cjs';
|
|
2
|
+
import { E as Environment } from '../shared/backend-sdk.D5vSybcI.cjs';
|
|
2
3
|
import { KVNamespace, D1Database, Queue, R2Bucket, Bindings, Worker } from 'alchemy/cloudflare';
|
|
3
|
-
|
|
4
|
-
type Resource = 'kv' | 'd1' | 'queue' | 'r2' | 'worker' | 'service' | 'orchestrator';
|
|
4
|
+
import { W as WorkerType$1, R as Resource } from '../shared/backend-sdk.Ntg58c34.cjs';
|
|
5
5
|
|
|
6
6
|
declare class Infrastructure {
|
|
7
7
|
private project;
|
|
@@ -67,13 +67,21 @@ declare class Infrastructure {
|
|
|
67
67
|
*/
|
|
68
68
|
resourceName: string;
|
|
69
69
|
/**
|
|
70
|
-
*
|
|
70
|
+
* Type of the Worker.
|
|
71
|
+
*/
|
|
72
|
+
resource?: WorkerType$1;
|
|
73
|
+
/**
|
|
74
|
+
* Path to the root of the Worker.
|
|
75
|
+
*/
|
|
76
|
+
path: string;
|
|
77
|
+
/**
|
|
78
|
+
* Cron expressions for the trigger of the Worker.
|
|
71
79
|
*/
|
|
72
|
-
|
|
80
|
+
crons?: string[];
|
|
73
81
|
/**
|
|
74
82
|
* Bindings of the Worker.
|
|
75
83
|
*/
|
|
76
|
-
bindings
|
|
84
|
+
bindings?: Bindings;
|
|
77
85
|
/**
|
|
78
86
|
* Event sources for the service to consume.
|
|
79
87
|
*/
|
|
@@ -90,7 +98,7 @@ declare class Infrastructure {
|
|
|
90
98
|
/**
|
|
91
99
|
* Bindings of the Worker.
|
|
92
100
|
*/
|
|
93
|
-
bindings
|
|
101
|
+
bindings?: Bindings;
|
|
94
102
|
/**
|
|
95
103
|
* Event sources for the service to consume.
|
|
96
104
|
*/
|
|
@@ -134,7 +142,7 @@ declare const composeR2Arguments: ({ resourceName, storageClass, }: {
|
|
|
134
142
|
};
|
|
135
143
|
|
|
136
144
|
declare const composeIdentifierName: ({ resource, resourceName, }: {
|
|
137
|
-
resource: Resource;
|
|
145
|
+
resource: Resource | WorkerType;
|
|
138
146
|
resourceName: string;
|
|
139
147
|
}) => string;
|
|
140
148
|
declare const composeResourceName: ({ project, environment, resourceName, }: {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import { P as Project
|
|
1
|
+
import { P as Project } from '../shared/backend-sdk.B5vcbB2_.mjs';
|
|
2
|
+
import { E as Environment } from '../shared/backend-sdk.D5vSybcI.mjs';
|
|
2
3
|
import { KVNamespace, D1Database, Queue, R2Bucket, Bindings, Worker } from 'alchemy/cloudflare';
|
|
3
|
-
|
|
4
|
-
type Resource = 'kv' | 'd1' | 'queue' | 'r2' | 'worker' | 'service' | 'orchestrator';
|
|
4
|
+
import { W as WorkerType$1, R as Resource } from '../shared/backend-sdk.Ntg58c34.mjs';
|
|
5
5
|
|
|
6
6
|
declare class Infrastructure {
|
|
7
7
|
private project;
|
|
@@ -67,13 +67,21 @@ declare class Infrastructure {
|
|
|
67
67
|
*/
|
|
68
68
|
resourceName: string;
|
|
69
69
|
/**
|
|
70
|
-
*
|
|
70
|
+
* Type of the Worker.
|
|
71
|
+
*/
|
|
72
|
+
resource?: WorkerType$1;
|
|
73
|
+
/**
|
|
74
|
+
* Path to the root of the Worker.
|
|
75
|
+
*/
|
|
76
|
+
path: string;
|
|
77
|
+
/**
|
|
78
|
+
* Cron expressions for the trigger of the Worker.
|
|
71
79
|
*/
|
|
72
|
-
|
|
80
|
+
crons?: string[];
|
|
73
81
|
/**
|
|
74
82
|
* Bindings of the Worker.
|
|
75
83
|
*/
|
|
76
|
-
bindings
|
|
84
|
+
bindings?: Bindings;
|
|
77
85
|
/**
|
|
78
86
|
* Event sources for the service to consume.
|
|
79
87
|
*/
|
|
@@ -90,7 +98,7 @@ declare class Infrastructure {
|
|
|
90
98
|
/**
|
|
91
99
|
* Bindings of the Worker.
|
|
92
100
|
*/
|
|
93
|
-
bindings
|
|
101
|
+
bindings?: Bindings;
|
|
94
102
|
/**
|
|
95
103
|
* Event sources for the service to consume.
|
|
96
104
|
*/
|
|
@@ -134,7 +142,7 @@ declare const composeR2Arguments: ({ resourceName, storageClass, }: {
|
|
|
134
142
|
};
|
|
135
143
|
|
|
136
144
|
declare const composeIdentifierName: ({ resource, resourceName, }: {
|
|
137
|
-
resource: Resource;
|
|
145
|
+
resource: Resource | WorkerType;
|
|
138
146
|
resourceName: string;
|
|
139
147
|
}) => string;
|
|
140
148
|
declare const composeResourceName: ({ project, environment, resourceName, }: {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import { P as Project
|
|
1
|
+
import { P as Project } from '../shared/backend-sdk.B5vcbB2_.js';
|
|
2
|
+
import { E as Environment } from '../shared/backend-sdk.D5vSybcI.js';
|
|
2
3
|
import { KVNamespace, D1Database, Queue, R2Bucket, Bindings, Worker } from 'alchemy/cloudflare';
|
|
3
|
-
|
|
4
|
-
type Resource = 'kv' | 'd1' | 'queue' | 'r2' | 'worker' | 'service' | 'orchestrator';
|
|
4
|
+
import { W as WorkerType$1, R as Resource } from '../shared/backend-sdk.Ntg58c34.js';
|
|
5
5
|
|
|
6
6
|
declare class Infrastructure {
|
|
7
7
|
private project;
|
|
@@ -67,13 +67,21 @@ declare class Infrastructure {
|
|
|
67
67
|
*/
|
|
68
68
|
resourceName: string;
|
|
69
69
|
/**
|
|
70
|
-
*
|
|
70
|
+
* Type of the Worker.
|
|
71
|
+
*/
|
|
72
|
+
resource?: WorkerType$1;
|
|
73
|
+
/**
|
|
74
|
+
* Path to the root of the Worker.
|
|
75
|
+
*/
|
|
76
|
+
path: string;
|
|
77
|
+
/**
|
|
78
|
+
* Cron expressions for the trigger of the Worker.
|
|
71
79
|
*/
|
|
72
|
-
|
|
80
|
+
crons?: string[];
|
|
73
81
|
/**
|
|
74
82
|
* Bindings of the Worker.
|
|
75
83
|
*/
|
|
76
|
-
bindings
|
|
84
|
+
bindings?: Bindings;
|
|
77
85
|
/**
|
|
78
86
|
* Event sources for the service to consume.
|
|
79
87
|
*/
|
|
@@ -90,7 +98,7 @@ declare class Infrastructure {
|
|
|
90
98
|
/**
|
|
91
99
|
* Bindings of the Worker.
|
|
92
100
|
*/
|
|
93
|
-
bindings
|
|
101
|
+
bindings?: Bindings;
|
|
94
102
|
/**
|
|
95
103
|
* Event sources for the service to consume.
|
|
96
104
|
*/
|
|
@@ -134,7 +142,7 @@ declare const composeR2Arguments: ({ resourceName, storageClass, }: {
|
|
|
134
142
|
};
|
|
135
143
|
|
|
136
144
|
declare const composeIdentifierName: ({ resource, resourceName, }: {
|
|
137
|
-
resource: Resource;
|
|
145
|
+
resource: Resource | WorkerType;
|
|
138
146
|
resourceName: string;
|
|
139
147
|
}) => string;
|
|
140
148
|
declare const composeResourceName: ({ project, environment, resourceName, }: {
|
|
@@ -1,6 +1,7 @@
|
|
|
1
|
-
import { D as D1_LOCATION_HINT, Q as QUEUE_MESSAGE_RETENTION_PERIOD, a as QUEUE_DELIVERY_DELAY, R as R2_STORAGE_CLASS, b as R2_LOCATION_HINT,
|
|
1
|
+
import { D as D1_LOCATION_HINT, Q as QUEUE_MESSAGE_RETENTION_PERIOD, a as QUEUE_DELIVERY_DELAY, R as R2_STORAGE_CLASS, b as R2_LOCATION_HINT, d as R2_JURISDICTION, l as loadWorkerConfig, c as composeWorkerArguments, e as extractWorkerTriggers } from '../shared/backend-sdk.G6WixINe.mjs';
|
|
2
2
|
import { KVNamespace, D1Database, Queue, R2Bucket, Worker } from 'alchemy/cloudflare';
|
|
3
3
|
import { E as ENVIRONMENT } from '../shared/backend-sdk.DXRpnctc.mjs';
|
|
4
|
+
import '@std/path';
|
|
4
5
|
|
|
5
6
|
const composeD1Arguments = ({
|
|
6
7
|
resourceName
|
|
@@ -140,11 +141,12 @@ class Infrastructure {
|
|
|
140
141
|
* Creates an instance of Cloudflare Worker.
|
|
141
142
|
*/
|
|
142
143
|
async worker(options) {
|
|
143
|
-
const { resourceName,
|
|
144
|
+
const { resourceName, resource, path, crons, bindings, eventSources } = options;
|
|
144
145
|
const identifierName = composeIdentifierName({
|
|
145
146
|
resourceName,
|
|
146
|
-
resource: "worker"
|
|
147
|
+
resource: resource || "worker"
|
|
147
148
|
});
|
|
149
|
+
const workerConfig = await loadWorkerConfig({ path });
|
|
148
150
|
return await Worker(
|
|
149
151
|
identifierName,
|
|
150
152
|
composeWorkerArguments({
|
|
@@ -153,7 +155,12 @@ class Infrastructure {
|
|
|
153
155
|
environment: this.environment,
|
|
154
156
|
resourceName: identifierName
|
|
155
157
|
}),
|
|
156
|
-
|
|
158
|
+
// TODO: Convert to util
|
|
159
|
+
entrypoint: `${path}/src/index.ts`,
|
|
160
|
+
crons: crons || extractWorkerTriggers({
|
|
161
|
+
workerConfig,
|
|
162
|
+
environment: this.environment
|
|
163
|
+
}),
|
|
157
164
|
bindings,
|
|
158
165
|
eventSources
|
|
159
166
|
})
|
|
@@ -166,8 +173,9 @@ class Infrastructure {
|
|
|
166
173
|
const { resourceName, bindings, eventSources } = options;
|
|
167
174
|
return await this.worker({
|
|
168
175
|
resourceName,
|
|
176
|
+
resource: "service",
|
|
169
177
|
// TODO: Convert to util
|
|
170
|
-
|
|
178
|
+
path: `./services/${resourceName}`,
|
|
171
179
|
bindings,
|
|
172
180
|
eventSources
|
|
173
181
|
});
|
package/dist/node/index.cjs
CHANGED
|
@@ -1,7 +1,11 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
const
|
|
3
|
+
const worker = require('../shared/backend-sdk.wIHFRZjD.cjs');
|
|
4
|
+
require('../shared/backend-sdk.BdcrYpFD.cjs');
|
|
5
|
+
require('@std/path');
|
|
4
6
|
|
|
5
7
|
|
|
6
8
|
|
|
7
|
-
exports.composeWorkerArguments =
|
|
9
|
+
exports.composeWorkerArguments = worker.composeWorkerArguments;
|
|
10
|
+
exports.extractWorkerTriggers = worker.extractWorkerTriggers;
|
|
11
|
+
exports.loadWorkerConfig = worker.loadWorkerConfig;
|
package/dist/node/index.d.cts
CHANGED
|
@@ -1,9 +1,19 @@
|
|
|
1
1
|
import { Bindings, Queue } from 'alchemy/cloudflare';
|
|
2
|
+
import { E as Environment } from '../shared/backend-sdk.D5vSybcI.cjs';
|
|
3
|
+
import { a as WorkerConfig } from '../shared/backend-sdk.Ntg58c34.cjs';
|
|
2
4
|
|
|
3
|
-
declare const
|
|
5
|
+
declare const loadWorkerConfig: ({ path }: {
|
|
6
|
+
path: string;
|
|
7
|
+
}) => Promise<WorkerConfig>;
|
|
8
|
+
declare const extractWorkerTriggers: ({ workerConfig, environment, }: {
|
|
9
|
+
workerConfig: WorkerConfig;
|
|
10
|
+
environment: Environment;
|
|
11
|
+
}) => string[];
|
|
12
|
+
declare const composeWorkerArguments: ({ resourceName, entrypoint, crons, bindings, eventSources, }: {
|
|
4
13
|
resourceName: string;
|
|
5
14
|
entrypoint: string;
|
|
6
|
-
|
|
15
|
+
crons?: string[];
|
|
16
|
+
bindings?: Bindings;
|
|
7
17
|
eventSources?: Queue[];
|
|
8
18
|
}) => {
|
|
9
19
|
name: string;
|
|
@@ -15,7 +25,8 @@ declare const composeWorkerArguments: ({ resourceName, entrypoint, bindings, eve
|
|
|
15
25
|
};
|
|
16
26
|
url: false;
|
|
17
27
|
eventSources: Queue[];
|
|
18
|
-
bindings: Bindings;
|
|
28
|
+
bindings: Bindings | undefined;
|
|
29
|
+
crons: string[];
|
|
19
30
|
};
|
|
20
31
|
|
|
21
|
-
export { composeWorkerArguments };
|
|
32
|
+
export { composeWorkerArguments, extractWorkerTriggers, loadWorkerConfig };
|
package/dist/node/index.d.mts
CHANGED
|
@@ -1,9 +1,19 @@
|
|
|
1
1
|
import { Bindings, Queue } from 'alchemy/cloudflare';
|
|
2
|
+
import { E as Environment } from '../shared/backend-sdk.D5vSybcI.mjs';
|
|
3
|
+
import { a as WorkerConfig } from '../shared/backend-sdk.Ntg58c34.mjs';
|
|
2
4
|
|
|
3
|
-
declare const
|
|
5
|
+
declare const loadWorkerConfig: ({ path }: {
|
|
6
|
+
path: string;
|
|
7
|
+
}) => Promise<WorkerConfig>;
|
|
8
|
+
declare const extractWorkerTriggers: ({ workerConfig, environment, }: {
|
|
9
|
+
workerConfig: WorkerConfig;
|
|
10
|
+
environment: Environment;
|
|
11
|
+
}) => string[];
|
|
12
|
+
declare const composeWorkerArguments: ({ resourceName, entrypoint, crons, bindings, eventSources, }: {
|
|
4
13
|
resourceName: string;
|
|
5
14
|
entrypoint: string;
|
|
6
|
-
|
|
15
|
+
crons?: string[];
|
|
16
|
+
bindings?: Bindings;
|
|
7
17
|
eventSources?: Queue[];
|
|
8
18
|
}) => {
|
|
9
19
|
name: string;
|
|
@@ -15,7 +25,8 @@ declare const composeWorkerArguments: ({ resourceName, entrypoint, bindings, eve
|
|
|
15
25
|
};
|
|
16
26
|
url: false;
|
|
17
27
|
eventSources: Queue[];
|
|
18
|
-
bindings: Bindings;
|
|
28
|
+
bindings: Bindings | undefined;
|
|
29
|
+
crons: string[];
|
|
19
30
|
};
|
|
20
31
|
|
|
21
|
-
export { composeWorkerArguments };
|
|
32
|
+
export { composeWorkerArguments, extractWorkerTriggers, loadWorkerConfig };
|
package/dist/node/index.d.ts
CHANGED
|
@@ -1,9 +1,19 @@
|
|
|
1
1
|
import { Bindings, Queue } from 'alchemy/cloudflare';
|
|
2
|
+
import { E as Environment } from '../shared/backend-sdk.D5vSybcI.js';
|
|
3
|
+
import { a as WorkerConfig } from '../shared/backend-sdk.Ntg58c34.js';
|
|
2
4
|
|
|
3
|
-
declare const
|
|
5
|
+
declare const loadWorkerConfig: ({ path }: {
|
|
6
|
+
path: string;
|
|
7
|
+
}) => Promise<WorkerConfig>;
|
|
8
|
+
declare const extractWorkerTriggers: ({ workerConfig, environment, }: {
|
|
9
|
+
workerConfig: WorkerConfig;
|
|
10
|
+
environment: Environment;
|
|
11
|
+
}) => string[];
|
|
12
|
+
declare const composeWorkerArguments: ({ resourceName, entrypoint, crons, bindings, eventSources, }: {
|
|
4
13
|
resourceName: string;
|
|
5
14
|
entrypoint: string;
|
|
6
|
-
|
|
15
|
+
crons?: string[];
|
|
16
|
+
bindings?: Bindings;
|
|
7
17
|
eventSources?: Queue[];
|
|
8
18
|
}) => {
|
|
9
19
|
name: string;
|
|
@@ -15,7 +25,8 @@ declare const composeWorkerArguments: ({ resourceName, entrypoint, bindings, eve
|
|
|
15
25
|
};
|
|
16
26
|
url: false;
|
|
17
27
|
eventSources: Queue[];
|
|
18
|
-
bindings: Bindings;
|
|
28
|
+
bindings: Bindings | undefined;
|
|
29
|
+
crons: string[];
|
|
19
30
|
};
|
|
20
31
|
|
|
21
|
-
export { composeWorkerArguments };
|
|
32
|
+
export { composeWorkerArguments, extractWorkerTriggers, loadWorkerConfig };
|
package/dist/node/index.mjs
CHANGED
|
@@ -1 +1,3 @@
|
|
|
1
|
-
export {
|
|
1
|
+
export { c as composeWorkerArguments, e as extractWorkerTriggers, l as loadWorkerConfig } from '../shared/backend-sdk.G6WixINe.mjs';
|
|
2
|
+
import '../shared/backend-sdk.DXRpnctc.mjs';
|
|
3
|
+
import '@std/path';
|
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
import { E as ENVIRONMENT } from './backend-sdk.DXRpnctc.mjs';
|
|
2
|
+
import { join } from '@std/path';
|
|
3
|
+
|
|
1
4
|
const COMPATIBILITY_DATE = "2025-06-04";
|
|
2
5
|
const COMPATIBILITY_FLAGS = ["nodejs_compat"];
|
|
3
6
|
const QUEUE_DELIVERY_DELAY = 5;
|
|
@@ -7,9 +10,23 @@ const R2_STORAGE_CLASS = "Standard";
|
|
|
7
10
|
const R2_JURISDICTION = "eu";
|
|
8
11
|
const R2_LOCATION_HINT = "weur";
|
|
9
12
|
|
|
13
|
+
const loadWorkerConfig = async ({ path }) => {
|
|
14
|
+
const workerConfigFile = Bun.file(join(path, "./wrangler.jsonc"));
|
|
15
|
+
return await workerConfigFile.json();
|
|
16
|
+
};
|
|
17
|
+
const extractWorkerTriggers = ({
|
|
18
|
+
workerConfig,
|
|
19
|
+
environment
|
|
20
|
+
}) => {
|
|
21
|
+
if (!ENVIRONMENT.includes(String(environment))) {
|
|
22
|
+
return workerConfig.triggers;
|
|
23
|
+
}
|
|
24
|
+
return workerConfig.env[environment].triggers;
|
|
25
|
+
};
|
|
10
26
|
const composeWorkerArguments = ({
|
|
11
27
|
resourceName,
|
|
12
28
|
entrypoint,
|
|
29
|
+
crons = [],
|
|
13
30
|
bindings,
|
|
14
31
|
eventSources = []
|
|
15
32
|
}) => {
|
|
@@ -23,8 +40,9 @@ const composeWorkerArguments = ({
|
|
|
23
40
|
},
|
|
24
41
|
url: false,
|
|
25
42
|
eventSources,
|
|
26
|
-
bindings
|
|
43
|
+
bindings,
|
|
44
|
+
crons
|
|
27
45
|
};
|
|
28
46
|
};
|
|
29
47
|
|
|
30
|
-
export { D1_LOCATION_HINT as D, QUEUE_MESSAGE_RETENTION_PERIOD as Q, R2_STORAGE_CLASS as R, QUEUE_DELIVERY_DELAY as a, R2_LOCATION_HINT as b,
|
|
48
|
+
export { D1_LOCATION_HINT as D, QUEUE_MESSAGE_RETENTION_PERIOD as Q, R2_STORAGE_CLASS as R, QUEUE_DELIVERY_DELAY as a, R2_LOCATION_HINT as b, composeWorkerArguments as c, R2_JURISDICTION as d, extractWorkerTriggers as e, loadWorkerConfig as l };
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
type Resource = 'kv' | 'd1' | 'queue' | 'r2' | 'worker' | 'service' | 'orchestrator';
|
|
2
|
+
type WorkerType = 'service' | 'orchestrator';
|
|
3
|
+
interface WorkerConfig {
|
|
4
|
+
triggers: string[];
|
|
5
|
+
env: {
|
|
6
|
+
[key: string]: {
|
|
7
|
+
triggers: string[];
|
|
8
|
+
};
|
|
9
|
+
};
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export type { Resource as R, WorkerType as W, WorkerConfig as a };
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
type Resource = 'kv' | 'd1' | 'queue' | 'r2' | 'worker' | 'service' | 'orchestrator';
|
|
2
|
+
type WorkerType = 'service' | 'orchestrator';
|
|
3
|
+
interface WorkerConfig {
|
|
4
|
+
triggers: string[];
|
|
5
|
+
env: {
|
|
6
|
+
[key: string]: {
|
|
7
|
+
triggers: string[];
|
|
8
|
+
};
|
|
9
|
+
};
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export type { Resource as R, WorkerType as W, WorkerConfig as a };
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
type Resource = 'kv' | 'd1' | 'queue' | 'r2' | 'worker' | 'service' | 'orchestrator';
|
|
2
|
+
type WorkerType = 'service' | 'orchestrator';
|
|
3
|
+
interface WorkerConfig {
|
|
4
|
+
triggers: string[];
|
|
5
|
+
env: {
|
|
6
|
+
[key: string]: {
|
|
7
|
+
triggers: string[];
|
|
8
|
+
};
|
|
9
|
+
};
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export type { Resource as R, WorkerType as W, WorkerConfig as a };
|
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
+
const environment_consts = require('./backend-sdk.BdcrYpFD.cjs');
|
|
4
|
+
const path = require('@std/path');
|
|
5
|
+
|
|
3
6
|
const COMPATIBILITY_DATE = "2025-06-04";
|
|
4
7
|
const COMPATIBILITY_FLAGS = ["nodejs_compat"];
|
|
5
8
|
const QUEUE_DELIVERY_DELAY = 5;
|
|
@@ -9,9 +12,23 @@ const R2_STORAGE_CLASS = "Standard";
|
|
|
9
12
|
const R2_JURISDICTION = "eu";
|
|
10
13
|
const R2_LOCATION_HINT = "weur";
|
|
11
14
|
|
|
15
|
+
const loadWorkerConfig = async ({ path: path$1 }) => {
|
|
16
|
+
const workerConfigFile = Bun.file(path.join(path$1, "./wrangler.jsonc"));
|
|
17
|
+
return await workerConfigFile.json();
|
|
18
|
+
};
|
|
19
|
+
const extractWorkerTriggers = ({
|
|
20
|
+
workerConfig,
|
|
21
|
+
environment
|
|
22
|
+
}) => {
|
|
23
|
+
if (!environment_consts.ENVIRONMENT.includes(String(environment))) {
|
|
24
|
+
return workerConfig.triggers;
|
|
25
|
+
}
|
|
26
|
+
return workerConfig.env[environment].triggers;
|
|
27
|
+
};
|
|
12
28
|
const composeWorkerArguments = ({
|
|
13
29
|
resourceName,
|
|
14
30
|
entrypoint,
|
|
31
|
+
crons = [],
|
|
15
32
|
bindings,
|
|
16
33
|
eventSources = []
|
|
17
34
|
}) => {
|
|
@@ -25,7 +42,8 @@ const composeWorkerArguments = ({
|
|
|
25
42
|
},
|
|
26
43
|
url: false,
|
|
27
44
|
eventSources,
|
|
28
|
-
bindings
|
|
45
|
+
bindings,
|
|
46
|
+
crons
|
|
29
47
|
};
|
|
30
48
|
};
|
|
31
49
|
|
|
@@ -36,3 +54,5 @@ exports.R2_JURISDICTION = R2_JURISDICTION;
|
|
|
36
54
|
exports.R2_LOCATION_HINT = R2_LOCATION_HINT;
|
|
37
55
|
exports.R2_STORAGE_CLASS = R2_STORAGE_CLASS;
|
|
38
56
|
exports.composeWorkerArguments = composeWorkerArguments;
|
|
57
|
+
exports.extractWorkerTriggers = extractWorkerTriggers;
|
|
58
|
+
exports.loadWorkerConfig = loadWorkerConfig;
|
package/package.json
CHANGED