@develit-io/backend-sdk 5.23.0 → 5.24.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.cts +11 -10
- package/dist/index.d.mts +11 -10
- package/dist/index.d.ts +11 -10
- package/dist/infrastructure/index.cjs +22 -21
- package/dist/infrastructure/index.d.cts +6 -10
- package/dist/infrastructure/index.d.mts +6 -10
- package/dist/infrastructure/index.d.ts +6 -10
- package/dist/infrastructure/index.mjs +13 -15
- package/dist/node/index.cjs +6 -2
- package/dist/node/index.d.cts +3 -23
- package/dist/node/index.d.mts +3 -23
- package/dist/node/index.d.ts +3 -23
- package/dist/node/index.mjs +3 -1
- package/dist/shared/backend-sdk.B5vcbB2_.d.cts +3 -0
- package/dist/shared/backend-sdk.B5vcbB2_.d.mts +3 -0
- package/dist/shared/backend-sdk.B5vcbB2_.d.ts +3 -0
- package/dist/shared/backend-sdk.BpktX9HT.d.mts +43 -0
- package/dist/shared/backend-sdk.C9hX3xcw.d.ts +43 -0
- package/dist/shared/backend-sdk.CigZ8gpt.d.cts +43 -0
- package/dist/shared/backend-sdk.D5vSybcI.d.cts +3 -0
- package/dist/shared/backend-sdk.D5vSybcI.d.mts +3 -0
- package/dist/shared/backend-sdk.D5vSybcI.d.ts +3 -0
- package/dist/shared/{backend-sdk.BsaVoxCw.mjs → backend-sdk.G6WixINe.mjs} +17 -1
- package/dist/shared/{backend-sdk.CwYK8F7O.cjs → backend-sdk.wIHFRZjD.cjs} +18 -0
- package/package.json +1 -1
- package/dist/shared/backend-sdk._l2mbzzF.d.cts +0 -5
- package/dist/shared/backend-sdk._l2mbzzF.d.mts +0 -5
- package/dist/shared/backend-sdk._l2mbzzF.d.ts +0 -5
package/dist/index.d.cts
CHANGED
|
@@ -5,7 +5,8 @@ import { ExtractTablesWithRelations, DBQueryConfig, BuildQueryResult } from 'dri
|
|
|
5
5
|
import * as drizzle_orm_sqlite_core from 'drizzle-orm/sqlite-core';
|
|
6
6
|
import { AnySQLiteTable } from 'drizzle-orm/sqlite-core';
|
|
7
7
|
import * as z from 'zod/v4/core';
|
|
8
|
-
export { E as Environment
|
|
8
|
+
export { E as Environment } from './shared/backend-sdk.D5vSybcI.cjs';
|
|
9
|
+
export { P as Project } from './shared/backend-sdk.B5vcbB2_.cjs';
|
|
9
10
|
import { StatusCodes, ReasonPhrases } from 'http-status-codes';
|
|
10
11
|
export { ReasonPhrases as InternalResponsePhrase, StatusCodes as InternalResponseStatus } from 'http-status-codes';
|
|
11
12
|
import { Queue } from '@cloudflare/workers-types';
|
|
@@ -236,7 +237,7 @@ type AuditLogWriter<TAuditAction = string> = (logs: AuditLogPayload<TAuditAction
|
|
|
236
237
|
*/
|
|
237
238
|
declare function createAuditLogWriter<TAuditAction = string>(table: AuditLogTable): AuditLogWriter<TAuditAction>;
|
|
238
239
|
|
|
239
|
-
declare function durableObjectNamespaceIdFromName(uniqueKey: string, name: string):
|
|
240
|
+
declare function durableObjectNamespaceIdFromName(uniqueKey: string, name: string): string;
|
|
240
241
|
declare const getD1DatabaseIdFromWrangler: () => string | undefined;
|
|
241
242
|
declare const getD1Credentials: () => {
|
|
242
243
|
driver?: undefined;
|
|
@@ -244,14 +245,14 @@ declare const getD1Credentials: () => {
|
|
|
244
245
|
} | {
|
|
245
246
|
driver: string;
|
|
246
247
|
dbCredentials: {
|
|
247
|
-
accountId:
|
|
248
|
+
accountId: string | undefined;
|
|
248
249
|
databaseId: string;
|
|
249
|
-
token:
|
|
250
|
+
token: string | undefined;
|
|
250
251
|
url?: undefined;
|
|
251
252
|
};
|
|
252
253
|
} | {
|
|
253
254
|
dbCredentials: {
|
|
254
|
-
url:
|
|
255
|
+
url: string | undefined;
|
|
255
256
|
accountId?: undefined;
|
|
256
257
|
databaseId?: undefined;
|
|
257
258
|
token?: undefined;
|
|
@@ -267,9 +268,9 @@ declare const getDrizzleD1Config: () => {
|
|
|
267
268
|
} | {
|
|
268
269
|
driver: string;
|
|
269
270
|
dbCredentials: {
|
|
270
|
-
accountId:
|
|
271
|
+
accountId: string | undefined;
|
|
271
272
|
databaseId: string;
|
|
272
|
-
token:
|
|
273
|
+
token: string | undefined;
|
|
273
274
|
url?: undefined;
|
|
274
275
|
};
|
|
275
276
|
schema: string;
|
|
@@ -277,7 +278,7 @@ declare const getDrizzleD1Config: () => {
|
|
|
277
278
|
dialect: "sqlite";
|
|
278
279
|
} | {
|
|
279
280
|
dbCredentials: {
|
|
280
|
-
url:
|
|
281
|
+
url: string | undefined;
|
|
281
282
|
accountId?: undefined;
|
|
282
283
|
databaseId?: undefined;
|
|
283
284
|
token?: undefined;
|
|
@@ -335,7 +336,7 @@ declare const getPgLocalConnectionString: (id: string) => string;
|
|
|
335
336
|
declare const getPgDatabaseIdFromWrangler: () => string | undefined;
|
|
336
337
|
declare const getPgCredentials: (serviceName?: string) => {
|
|
337
338
|
dbCredentials: {
|
|
338
|
-
url:
|
|
339
|
+
url: string;
|
|
339
340
|
};
|
|
340
341
|
};
|
|
341
342
|
declare const getDrizzlePgConfig: () => {
|
|
@@ -344,7 +345,7 @@ declare const getDrizzlePgConfig: () => {
|
|
|
344
345
|
schema: string;
|
|
345
346
|
};
|
|
346
347
|
dbCredentials: {
|
|
347
|
-
url:
|
|
348
|
+
url: string;
|
|
348
349
|
};
|
|
349
350
|
schema: string;
|
|
350
351
|
out: string;
|
package/dist/index.d.mts
CHANGED
|
@@ -5,7 +5,8 @@ import { ExtractTablesWithRelations, DBQueryConfig, BuildQueryResult } from 'dri
|
|
|
5
5
|
import * as drizzle_orm_sqlite_core from 'drizzle-orm/sqlite-core';
|
|
6
6
|
import { AnySQLiteTable } from 'drizzle-orm/sqlite-core';
|
|
7
7
|
import * as z from 'zod/v4/core';
|
|
8
|
-
export { E as Environment
|
|
8
|
+
export { E as Environment } from './shared/backend-sdk.D5vSybcI.mjs';
|
|
9
|
+
export { P as Project } from './shared/backend-sdk.B5vcbB2_.mjs';
|
|
9
10
|
import { StatusCodes, ReasonPhrases } from 'http-status-codes';
|
|
10
11
|
export { ReasonPhrases as InternalResponsePhrase, StatusCodes as InternalResponseStatus } from 'http-status-codes';
|
|
11
12
|
import { Queue } from '@cloudflare/workers-types';
|
|
@@ -236,7 +237,7 @@ type AuditLogWriter<TAuditAction = string> = (logs: AuditLogPayload<TAuditAction
|
|
|
236
237
|
*/
|
|
237
238
|
declare function createAuditLogWriter<TAuditAction = string>(table: AuditLogTable): AuditLogWriter<TAuditAction>;
|
|
238
239
|
|
|
239
|
-
declare function durableObjectNamespaceIdFromName(uniqueKey: string, name: string):
|
|
240
|
+
declare function durableObjectNamespaceIdFromName(uniqueKey: string, name: string): string;
|
|
240
241
|
declare const getD1DatabaseIdFromWrangler: () => string | undefined;
|
|
241
242
|
declare const getD1Credentials: () => {
|
|
242
243
|
driver?: undefined;
|
|
@@ -244,14 +245,14 @@ declare const getD1Credentials: () => {
|
|
|
244
245
|
} | {
|
|
245
246
|
driver: string;
|
|
246
247
|
dbCredentials: {
|
|
247
|
-
accountId:
|
|
248
|
+
accountId: string | undefined;
|
|
248
249
|
databaseId: string;
|
|
249
|
-
token:
|
|
250
|
+
token: string | undefined;
|
|
250
251
|
url?: undefined;
|
|
251
252
|
};
|
|
252
253
|
} | {
|
|
253
254
|
dbCredentials: {
|
|
254
|
-
url:
|
|
255
|
+
url: string | undefined;
|
|
255
256
|
accountId?: undefined;
|
|
256
257
|
databaseId?: undefined;
|
|
257
258
|
token?: undefined;
|
|
@@ -267,9 +268,9 @@ declare const getDrizzleD1Config: () => {
|
|
|
267
268
|
} | {
|
|
268
269
|
driver: string;
|
|
269
270
|
dbCredentials: {
|
|
270
|
-
accountId:
|
|
271
|
+
accountId: string | undefined;
|
|
271
272
|
databaseId: string;
|
|
272
|
-
token:
|
|
273
|
+
token: string | undefined;
|
|
273
274
|
url?: undefined;
|
|
274
275
|
};
|
|
275
276
|
schema: string;
|
|
@@ -277,7 +278,7 @@ declare const getDrizzleD1Config: () => {
|
|
|
277
278
|
dialect: "sqlite";
|
|
278
279
|
} | {
|
|
279
280
|
dbCredentials: {
|
|
280
|
-
url:
|
|
281
|
+
url: string | undefined;
|
|
281
282
|
accountId?: undefined;
|
|
282
283
|
databaseId?: undefined;
|
|
283
284
|
token?: undefined;
|
|
@@ -335,7 +336,7 @@ declare const getPgLocalConnectionString: (id: string) => string;
|
|
|
335
336
|
declare const getPgDatabaseIdFromWrangler: () => string | undefined;
|
|
336
337
|
declare const getPgCredentials: (serviceName?: string) => {
|
|
337
338
|
dbCredentials: {
|
|
338
|
-
url:
|
|
339
|
+
url: string;
|
|
339
340
|
};
|
|
340
341
|
};
|
|
341
342
|
declare const getDrizzlePgConfig: () => {
|
|
@@ -344,7 +345,7 @@ declare const getDrizzlePgConfig: () => {
|
|
|
344
345
|
schema: string;
|
|
345
346
|
};
|
|
346
347
|
dbCredentials: {
|
|
347
|
-
url:
|
|
348
|
+
url: string;
|
|
348
349
|
};
|
|
349
350
|
schema: string;
|
|
350
351
|
out: string;
|
package/dist/index.d.ts
CHANGED
|
@@ -5,7 +5,8 @@ import { ExtractTablesWithRelations, DBQueryConfig, BuildQueryResult } from 'dri
|
|
|
5
5
|
import * as drizzle_orm_sqlite_core from 'drizzle-orm/sqlite-core';
|
|
6
6
|
import { AnySQLiteTable } from 'drizzle-orm/sqlite-core';
|
|
7
7
|
import * as z from 'zod/v4/core';
|
|
8
|
-
export { E as Environment
|
|
8
|
+
export { E as Environment } from './shared/backend-sdk.D5vSybcI.js';
|
|
9
|
+
export { P as Project } from './shared/backend-sdk.B5vcbB2_.js';
|
|
9
10
|
import { StatusCodes, ReasonPhrases } from 'http-status-codes';
|
|
10
11
|
export { ReasonPhrases as InternalResponsePhrase, StatusCodes as InternalResponseStatus } from 'http-status-codes';
|
|
11
12
|
import { Queue } from '@cloudflare/workers-types';
|
|
@@ -236,7 +237,7 @@ type AuditLogWriter<TAuditAction = string> = (logs: AuditLogPayload<TAuditAction
|
|
|
236
237
|
*/
|
|
237
238
|
declare function createAuditLogWriter<TAuditAction = string>(table: AuditLogTable): AuditLogWriter<TAuditAction>;
|
|
238
239
|
|
|
239
|
-
declare function durableObjectNamespaceIdFromName(uniqueKey: string, name: string):
|
|
240
|
+
declare function durableObjectNamespaceIdFromName(uniqueKey: string, name: string): string;
|
|
240
241
|
declare const getD1DatabaseIdFromWrangler: () => string | undefined;
|
|
241
242
|
declare const getD1Credentials: () => {
|
|
242
243
|
driver?: undefined;
|
|
@@ -244,14 +245,14 @@ declare const getD1Credentials: () => {
|
|
|
244
245
|
} | {
|
|
245
246
|
driver: string;
|
|
246
247
|
dbCredentials: {
|
|
247
|
-
accountId:
|
|
248
|
+
accountId: string | undefined;
|
|
248
249
|
databaseId: string;
|
|
249
|
-
token:
|
|
250
|
+
token: string | undefined;
|
|
250
251
|
url?: undefined;
|
|
251
252
|
};
|
|
252
253
|
} | {
|
|
253
254
|
dbCredentials: {
|
|
254
|
-
url:
|
|
255
|
+
url: string | undefined;
|
|
255
256
|
accountId?: undefined;
|
|
256
257
|
databaseId?: undefined;
|
|
257
258
|
token?: undefined;
|
|
@@ -267,9 +268,9 @@ declare const getDrizzleD1Config: () => {
|
|
|
267
268
|
} | {
|
|
268
269
|
driver: string;
|
|
269
270
|
dbCredentials: {
|
|
270
|
-
accountId:
|
|
271
|
+
accountId: string | undefined;
|
|
271
272
|
databaseId: string;
|
|
272
|
-
token:
|
|
273
|
+
token: string | undefined;
|
|
273
274
|
url?: undefined;
|
|
274
275
|
};
|
|
275
276
|
schema: string;
|
|
@@ -277,7 +278,7 @@ declare const getDrizzleD1Config: () => {
|
|
|
277
278
|
dialect: "sqlite";
|
|
278
279
|
} | {
|
|
279
280
|
dbCredentials: {
|
|
280
|
-
url:
|
|
281
|
+
url: string | undefined;
|
|
281
282
|
accountId?: undefined;
|
|
282
283
|
databaseId?: undefined;
|
|
283
284
|
token?: undefined;
|
|
@@ -335,7 +336,7 @@ declare const getPgLocalConnectionString: (id: string) => string;
|
|
|
335
336
|
declare const getPgDatabaseIdFromWrangler: () => string | undefined;
|
|
336
337
|
declare const getPgCredentials: (serviceName?: string) => {
|
|
337
338
|
dbCredentials: {
|
|
338
|
-
url:
|
|
339
|
+
url: string;
|
|
339
340
|
};
|
|
340
341
|
};
|
|
341
342
|
declare const getDrizzlePgConfig: () => {
|
|
@@ -344,7 +345,7 @@ declare const getDrizzlePgConfig: () => {
|
|
|
344
345
|
schema: string;
|
|
345
346
|
};
|
|
346
347
|
dbCredentials: {
|
|
347
|
-
url:
|
|
348
|
+
url: string;
|
|
348
349
|
};
|
|
349
350
|
schema: string;
|
|
350
351
|
out: string;
|
|
@@ -1,15 +1,16 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
const
|
|
3
|
+
const worker = require('../shared/backend-sdk.wIHFRZjD.cjs');
|
|
4
4
|
const cloudflare = require('alchemy/cloudflare');
|
|
5
5
|
const environment_consts = require('../shared/backend-sdk.BdcrYpFD.cjs');
|
|
6
|
+
require('@std/path');
|
|
6
7
|
|
|
7
8
|
const composeD1Arguments = ({
|
|
8
9
|
resourceName
|
|
9
10
|
}) => {
|
|
10
11
|
return {
|
|
11
12
|
name: resourceName,
|
|
12
|
-
primaryLocationHint:
|
|
13
|
+
primaryLocationHint: worker.D1_LOCATION_HINT
|
|
13
14
|
};
|
|
14
15
|
};
|
|
15
16
|
|
|
@@ -23,8 +24,8 @@ const composeKvArguments = ({
|
|
|
23
24
|
|
|
24
25
|
const composeQueueArguments = ({
|
|
25
26
|
resourceName,
|
|
26
|
-
deliveryDelay =
|
|
27
|
-
messageRetentionPeriod =
|
|
27
|
+
deliveryDelay = worker.QUEUE_DELIVERY_DELAY,
|
|
28
|
+
messageRetentionPeriod = worker.QUEUE_MESSAGE_RETENTION_PERIOD
|
|
28
29
|
}) => {
|
|
29
30
|
return {
|
|
30
31
|
name: resourceName,
|
|
@@ -37,12 +38,12 @@ const composeQueueArguments = ({
|
|
|
37
38
|
|
|
38
39
|
const composeR2Arguments = ({
|
|
39
40
|
resourceName,
|
|
40
|
-
storageClass =
|
|
41
|
+
storageClass = worker.R2_STORAGE_CLASS
|
|
41
42
|
}) => {
|
|
42
43
|
return {
|
|
43
44
|
name: resourceName,
|
|
44
|
-
jurisdiction:
|
|
45
|
-
locationHint:
|
|
45
|
+
jurisdiction: worker.R2_JURISDICTION,
|
|
46
|
+
locationHint: worker.R2_LOCATION_HINT,
|
|
46
47
|
storageClass
|
|
47
48
|
};
|
|
48
49
|
};
|
|
@@ -142,28 +143,26 @@ class Infrastructure {
|
|
|
142
143
|
* Creates an instance of Cloudflare Worker.
|
|
143
144
|
*/
|
|
144
145
|
async worker(options) {
|
|
145
|
-
const {
|
|
146
|
-
resourceName,
|
|
147
|
-
resource,
|
|
148
|
-
entrypoint,
|
|
149
|
-
crons,
|
|
150
|
-
bindings,
|
|
151
|
-
eventSources
|
|
152
|
-
} = options;
|
|
146
|
+
const { resourceName, resource, path, crons, bindings, eventSources } = options;
|
|
153
147
|
const identifierName = composeIdentifierName({
|
|
154
148
|
resourceName,
|
|
155
149
|
resource: resource || "worker"
|
|
156
150
|
});
|
|
151
|
+
const workerConfig = await worker.loadWorkerConfig({ path });
|
|
157
152
|
return await cloudflare.Worker(
|
|
158
153
|
identifierName,
|
|
159
|
-
|
|
154
|
+
worker.composeWorkerArguments({
|
|
160
155
|
resourceName: composeResourceName({
|
|
161
156
|
project: this.project,
|
|
162
157
|
environment: this.environment,
|
|
163
158
|
resourceName: identifierName
|
|
164
159
|
}),
|
|
165
|
-
|
|
166
|
-
|
|
160
|
+
// TODO: Convert to util
|
|
161
|
+
entrypoint: `${path}/src/index.ts`,
|
|
162
|
+
crons: crons || worker.extractWorkerTriggers({
|
|
163
|
+
workerConfig,
|
|
164
|
+
environment: this.environment
|
|
165
|
+
}) || [],
|
|
167
166
|
bindings,
|
|
168
167
|
eventSources
|
|
169
168
|
})
|
|
@@ -173,13 +172,12 @@ class Infrastructure {
|
|
|
173
172
|
* Creates an instance of Cloudflare Worker as a service.
|
|
174
173
|
*/
|
|
175
174
|
async service(options) {
|
|
176
|
-
const { resourceName,
|
|
175
|
+
const { resourceName, bindings, eventSources } = options;
|
|
177
176
|
return await this.worker({
|
|
178
177
|
resourceName,
|
|
179
178
|
resource: "service",
|
|
180
179
|
// TODO: Convert to util
|
|
181
|
-
|
|
182
|
-
crons,
|
|
180
|
+
path: `./services/${resourceName}`,
|
|
183
181
|
bindings,
|
|
184
182
|
eventSources
|
|
185
183
|
});
|
|
@@ -193,6 +191,9 @@ const validateEnvironment = (environment) => {
|
|
|
193
191
|
return Number(environment);
|
|
194
192
|
};
|
|
195
193
|
|
|
194
|
+
exports.composeWorkerArguments = worker.composeWorkerArguments;
|
|
195
|
+
exports.extractWorkerTriggers = worker.extractWorkerTriggers;
|
|
196
|
+
exports.loadWorkerConfig = worker.loadWorkerConfig;
|
|
196
197
|
exports.Infrastructure = Infrastructure;
|
|
197
198
|
exports.composeD1Arguments = composeD1Arguments;
|
|
198
199
|
exports.composeIdentifierName = composeIdentifierName;
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { P as Project
|
|
1
|
+
import { P as Project } from '../shared/backend-sdk.B5vcbB2_.cjs';
|
|
2
|
+
import { E as Environment } from '../shared/backend-sdk.D5vSybcI.cjs';
|
|
2
3
|
import { KVNamespace, D1Database, Queue, R2Bucket, Bindings, Worker } from 'alchemy/cloudflare';
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
type WorkerType$1 = 'service' | 'orchestrator';
|
|
4
|
+
import { W as WorkerType$1, R as Resource } from '../shared/backend-sdk.CigZ8gpt.cjs';
|
|
5
|
+
export { c as composeWorkerArguments, e as extractWorkerTriggers, l as loadWorkerConfig } from '../shared/backend-sdk.CigZ8gpt.cjs';
|
|
6
6
|
|
|
7
7
|
declare class Infrastructure {
|
|
8
8
|
private project;
|
|
@@ -72,9 +72,9 @@ declare class Infrastructure {
|
|
|
72
72
|
*/
|
|
73
73
|
resource?: WorkerType$1;
|
|
74
74
|
/**
|
|
75
|
-
*
|
|
75
|
+
* Path to the root of the Worker.
|
|
76
76
|
*/
|
|
77
|
-
|
|
77
|
+
path: string;
|
|
78
78
|
/**
|
|
79
79
|
* Cron expressions for the trigger of the Worker.
|
|
80
80
|
*/
|
|
@@ -96,10 +96,6 @@ declare class Infrastructure {
|
|
|
96
96
|
* Name of the service. Do not include the 'service' prefix.
|
|
97
97
|
*/
|
|
98
98
|
resourceName: string;
|
|
99
|
-
/**
|
|
100
|
-
* Cron expressions for the trigger of the Worker.
|
|
101
|
-
*/
|
|
102
|
-
crons?: string[];
|
|
103
99
|
/**
|
|
104
100
|
* Bindings of the Worker.
|
|
105
101
|
*/
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { P as Project
|
|
1
|
+
import { P as Project } from '../shared/backend-sdk.B5vcbB2_.mjs';
|
|
2
|
+
import { E as Environment } from '../shared/backend-sdk.D5vSybcI.mjs';
|
|
2
3
|
import { KVNamespace, D1Database, Queue, R2Bucket, Bindings, Worker } from 'alchemy/cloudflare';
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
type WorkerType$1 = 'service' | 'orchestrator';
|
|
4
|
+
import { W as WorkerType$1, R as Resource } from '../shared/backend-sdk.BpktX9HT.mjs';
|
|
5
|
+
export { c as composeWorkerArguments, e as extractWorkerTriggers, l as loadWorkerConfig } from '../shared/backend-sdk.BpktX9HT.mjs';
|
|
6
6
|
|
|
7
7
|
declare class Infrastructure {
|
|
8
8
|
private project;
|
|
@@ -72,9 +72,9 @@ declare class Infrastructure {
|
|
|
72
72
|
*/
|
|
73
73
|
resource?: WorkerType$1;
|
|
74
74
|
/**
|
|
75
|
-
*
|
|
75
|
+
* Path to the root of the Worker.
|
|
76
76
|
*/
|
|
77
|
-
|
|
77
|
+
path: string;
|
|
78
78
|
/**
|
|
79
79
|
* Cron expressions for the trigger of the Worker.
|
|
80
80
|
*/
|
|
@@ -96,10 +96,6 @@ declare class Infrastructure {
|
|
|
96
96
|
* Name of the service. Do not include the 'service' prefix.
|
|
97
97
|
*/
|
|
98
98
|
resourceName: string;
|
|
99
|
-
/**
|
|
100
|
-
* Cron expressions for the trigger of the Worker.
|
|
101
|
-
*/
|
|
102
|
-
crons?: string[];
|
|
103
99
|
/**
|
|
104
100
|
* Bindings of the Worker.
|
|
105
101
|
*/
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { P as Project
|
|
1
|
+
import { P as Project } from '../shared/backend-sdk.B5vcbB2_.js';
|
|
2
|
+
import { E as Environment } from '../shared/backend-sdk.D5vSybcI.js';
|
|
2
3
|
import { KVNamespace, D1Database, Queue, R2Bucket, Bindings, Worker } from 'alchemy/cloudflare';
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
type WorkerType$1 = 'service' | 'orchestrator';
|
|
4
|
+
import { W as WorkerType$1, R as Resource } from '../shared/backend-sdk.C9hX3xcw.js';
|
|
5
|
+
export { c as composeWorkerArguments, e as extractWorkerTriggers, l as loadWorkerConfig } from '../shared/backend-sdk.C9hX3xcw.js';
|
|
6
6
|
|
|
7
7
|
declare class Infrastructure {
|
|
8
8
|
private project;
|
|
@@ -72,9 +72,9 @@ declare class Infrastructure {
|
|
|
72
72
|
*/
|
|
73
73
|
resource?: WorkerType$1;
|
|
74
74
|
/**
|
|
75
|
-
*
|
|
75
|
+
* Path to the root of the Worker.
|
|
76
76
|
*/
|
|
77
|
-
|
|
77
|
+
path: string;
|
|
78
78
|
/**
|
|
79
79
|
* Cron expressions for the trigger of the Worker.
|
|
80
80
|
*/
|
|
@@ -96,10 +96,6 @@ declare class Infrastructure {
|
|
|
96
96
|
* Name of the service. Do not include the 'service' prefix.
|
|
97
97
|
*/
|
|
98
98
|
resourceName: string;
|
|
99
|
-
/**
|
|
100
|
-
* Cron expressions for the trigger of the Worker.
|
|
101
|
-
*/
|
|
102
|
-
crons?: string[];
|
|
103
99
|
/**
|
|
104
100
|
* Bindings of the Worker.
|
|
105
101
|
*/
|
|
@@ -1,6 +1,7 @@
|
|
|
1
|
-
import { D as D1_LOCATION_HINT, Q as QUEUE_MESSAGE_RETENTION_PERIOD, a as QUEUE_DELIVERY_DELAY, R as R2_STORAGE_CLASS, b as R2_LOCATION_HINT,
|
|
1
|
+
import { D as D1_LOCATION_HINT, Q as QUEUE_MESSAGE_RETENTION_PERIOD, a as QUEUE_DELIVERY_DELAY, R as R2_STORAGE_CLASS, b as R2_LOCATION_HINT, d as R2_JURISDICTION, l as loadWorkerConfig, c as composeWorkerArguments, e as extractWorkerTriggers } from '../shared/backend-sdk.G6WixINe.mjs';
|
|
2
2
|
import { KVNamespace, D1Database, Queue, R2Bucket, Worker } from 'alchemy/cloudflare';
|
|
3
3
|
import { E as ENVIRONMENT } from '../shared/backend-sdk.DXRpnctc.mjs';
|
|
4
|
+
import '@std/path';
|
|
4
5
|
|
|
5
6
|
const composeD1Arguments = ({
|
|
6
7
|
resourceName
|
|
@@ -140,18 +141,12 @@ class Infrastructure {
|
|
|
140
141
|
* Creates an instance of Cloudflare Worker.
|
|
141
142
|
*/
|
|
142
143
|
async worker(options) {
|
|
143
|
-
const {
|
|
144
|
-
resourceName,
|
|
145
|
-
resource,
|
|
146
|
-
entrypoint,
|
|
147
|
-
crons,
|
|
148
|
-
bindings,
|
|
149
|
-
eventSources
|
|
150
|
-
} = options;
|
|
144
|
+
const { resourceName, resource, path, crons, bindings, eventSources } = options;
|
|
151
145
|
const identifierName = composeIdentifierName({
|
|
152
146
|
resourceName,
|
|
153
147
|
resource: resource || "worker"
|
|
154
148
|
});
|
|
149
|
+
const workerConfig = await loadWorkerConfig({ path });
|
|
155
150
|
return await Worker(
|
|
156
151
|
identifierName,
|
|
157
152
|
composeWorkerArguments({
|
|
@@ -160,8 +155,12 @@ class Infrastructure {
|
|
|
160
155
|
environment: this.environment,
|
|
161
156
|
resourceName: identifierName
|
|
162
157
|
}),
|
|
163
|
-
|
|
164
|
-
|
|
158
|
+
// TODO: Convert to util
|
|
159
|
+
entrypoint: `${path}/src/index.ts`,
|
|
160
|
+
crons: crons || extractWorkerTriggers({
|
|
161
|
+
workerConfig,
|
|
162
|
+
environment: this.environment
|
|
163
|
+
}) || [],
|
|
165
164
|
bindings,
|
|
166
165
|
eventSources
|
|
167
166
|
})
|
|
@@ -171,13 +170,12 @@ class Infrastructure {
|
|
|
171
170
|
* Creates an instance of Cloudflare Worker as a service.
|
|
172
171
|
*/
|
|
173
172
|
async service(options) {
|
|
174
|
-
const { resourceName,
|
|
173
|
+
const { resourceName, bindings, eventSources } = options;
|
|
175
174
|
return await this.worker({
|
|
176
175
|
resourceName,
|
|
177
176
|
resource: "service",
|
|
178
177
|
// TODO: Convert to util
|
|
179
|
-
|
|
180
|
-
crons,
|
|
178
|
+
path: `./services/${resourceName}`,
|
|
181
179
|
bindings,
|
|
182
180
|
eventSources
|
|
183
181
|
});
|
|
@@ -191,4 +189,4 @@ const validateEnvironment = (environment) => {
|
|
|
191
189
|
return Number(environment);
|
|
192
190
|
};
|
|
193
191
|
|
|
194
|
-
export { Infrastructure, composeD1Arguments, composeIdentifierName, composeKvArguments, composeQueueArguments, composeR2Arguments, composeResourceName, validateEnvironment };
|
|
192
|
+
export { Infrastructure, composeD1Arguments, composeIdentifierName, composeKvArguments, composeQueueArguments, composeR2Arguments, composeResourceName, composeWorkerArguments, extractWorkerTriggers, loadWorkerConfig, validateEnvironment };
|
package/dist/node/index.cjs
CHANGED
|
@@ -1,7 +1,11 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
const
|
|
3
|
+
const worker = require('../shared/backend-sdk.wIHFRZjD.cjs');
|
|
4
|
+
require('../shared/backend-sdk.BdcrYpFD.cjs');
|
|
5
|
+
require('@std/path');
|
|
4
6
|
|
|
5
7
|
|
|
6
8
|
|
|
7
|
-
exports.composeWorkerArguments =
|
|
9
|
+
exports.composeWorkerArguments = worker.composeWorkerArguments;
|
|
10
|
+
exports.extractWorkerTriggers = worker.extractWorkerTriggers;
|
|
11
|
+
exports.loadWorkerConfig = worker.loadWorkerConfig;
|
package/dist/node/index.d.cts
CHANGED
|
@@ -1,23 +1,3 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
resourceName: string;
|
|
5
|
-
entrypoint: string;
|
|
6
|
-
crons?: string[];
|
|
7
|
-
bindings?: Bindings;
|
|
8
|
-
eventSources?: Queue[];
|
|
9
|
-
}) => {
|
|
10
|
-
name: string;
|
|
11
|
-
entrypoint: string;
|
|
12
|
-
compatibilityFlags: string[];
|
|
13
|
-
compatibilityDate: string;
|
|
14
|
-
observability: {
|
|
15
|
-
enabled: true;
|
|
16
|
-
};
|
|
17
|
-
url: false;
|
|
18
|
-
eventSources: Queue[];
|
|
19
|
-
bindings: Bindings | undefined;
|
|
20
|
-
crons: string[];
|
|
21
|
-
};
|
|
22
|
-
|
|
23
|
-
export { composeWorkerArguments };
|
|
1
|
+
export { c as composeWorkerArguments, e as extractWorkerTriggers, l as loadWorkerConfig } from '../shared/backend-sdk.CigZ8gpt.cjs';
|
|
2
|
+
import 'alchemy/cloudflare';
|
|
3
|
+
import '../shared/backend-sdk.D5vSybcI.cjs';
|
package/dist/node/index.d.mts
CHANGED
|
@@ -1,23 +1,3 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
resourceName: string;
|
|
5
|
-
entrypoint: string;
|
|
6
|
-
crons?: string[];
|
|
7
|
-
bindings?: Bindings;
|
|
8
|
-
eventSources?: Queue[];
|
|
9
|
-
}) => {
|
|
10
|
-
name: string;
|
|
11
|
-
entrypoint: string;
|
|
12
|
-
compatibilityFlags: string[];
|
|
13
|
-
compatibilityDate: string;
|
|
14
|
-
observability: {
|
|
15
|
-
enabled: true;
|
|
16
|
-
};
|
|
17
|
-
url: false;
|
|
18
|
-
eventSources: Queue[];
|
|
19
|
-
bindings: Bindings | undefined;
|
|
20
|
-
crons: string[];
|
|
21
|
-
};
|
|
22
|
-
|
|
23
|
-
export { composeWorkerArguments };
|
|
1
|
+
export { c as composeWorkerArguments, e as extractWorkerTriggers, l as loadWorkerConfig } from '../shared/backend-sdk.BpktX9HT.mjs';
|
|
2
|
+
import 'alchemy/cloudflare';
|
|
3
|
+
import '../shared/backend-sdk.D5vSybcI.mjs';
|
package/dist/node/index.d.ts
CHANGED
|
@@ -1,23 +1,3 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
resourceName: string;
|
|
5
|
-
entrypoint: string;
|
|
6
|
-
crons?: string[];
|
|
7
|
-
bindings?: Bindings;
|
|
8
|
-
eventSources?: Queue[];
|
|
9
|
-
}) => {
|
|
10
|
-
name: string;
|
|
11
|
-
entrypoint: string;
|
|
12
|
-
compatibilityFlags: string[];
|
|
13
|
-
compatibilityDate: string;
|
|
14
|
-
observability: {
|
|
15
|
-
enabled: true;
|
|
16
|
-
};
|
|
17
|
-
url: false;
|
|
18
|
-
eventSources: Queue[];
|
|
19
|
-
bindings: Bindings | undefined;
|
|
20
|
-
crons: string[];
|
|
21
|
-
};
|
|
22
|
-
|
|
23
|
-
export { composeWorkerArguments };
|
|
1
|
+
export { c as composeWorkerArguments, e as extractWorkerTriggers, l as loadWorkerConfig } from '../shared/backend-sdk.C9hX3xcw.js';
|
|
2
|
+
import 'alchemy/cloudflare';
|
|
3
|
+
import '../shared/backend-sdk.D5vSybcI.js';
|
package/dist/node/index.mjs
CHANGED
|
@@ -1 +1,3 @@
|
|
|
1
|
-
export {
|
|
1
|
+
export { c as composeWorkerArguments, e as extractWorkerTriggers, l as loadWorkerConfig } from '../shared/backend-sdk.G6WixINe.mjs';
|
|
2
|
+
import '../shared/backend-sdk.DXRpnctc.mjs';
|
|
3
|
+
import '@std/path';
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import { Bindings, Queue } from 'alchemy/cloudflare';
|
|
2
|
+
import { E as Environment } from './backend-sdk.D5vSybcI.mjs';
|
|
3
|
+
|
|
4
|
+
type Resource = 'kv' | 'd1' | 'queue' | 'r2' | 'worker' | 'service' | 'orchestrator';
|
|
5
|
+
type WorkerType = 'service' | 'orchestrator';
|
|
6
|
+
interface WorkerConfig {
|
|
7
|
+
triggers: string[];
|
|
8
|
+
env: {
|
|
9
|
+
[key: string]: {
|
|
10
|
+
triggers: string[];
|
|
11
|
+
};
|
|
12
|
+
};
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
declare const loadWorkerConfig: ({ path }: {
|
|
16
|
+
path: string;
|
|
17
|
+
}) => Promise<WorkerConfig>;
|
|
18
|
+
declare const extractWorkerTriggers: ({ workerConfig, environment, }: {
|
|
19
|
+
workerConfig: WorkerConfig;
|
|
20
|
+
environment: Environment;
|
|
21
|
+
}) => string[];
|
|
22
|
+
declare const composeWorkerArguments: ({ resourceName, entrypoint, crons, bindings, eventSources, }: {
|
|
23
|
+
resourceName: string;
|
|
24
|
+
entrypoint: string;
|
|
25
|
+
crons?: string[];
|
|
26
|
+
bindings?: Bindings;
|
|
27
|
+
eventSources?: Queue[];
|
|
28
|
+
}) => {
|
|
29
|
+
name: string;
|
|
30
|
+
entrypoint: string;
|
|
31
|
+
compatibilityFlags: string[];
|
|
32
|
+
compatibilityDate: string;
|
|
33
|
+
observability: {
|
|
34
|
+
enabled: true;
|
|
35
|
+
};
|
|
36
|
+
url: false;
|
|
37
|
+
eventSources: Queue[];
|
|
38
|
+
bindings: Bindings | undefined;
|
|
39
|
+
crons: string[];
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
export { composeWorkerArguments as c, extractWorkerTriggers as e, loadWorkerConfig as l };
|
|
43
|
+
export type { Resource as R, WorkerType as W };
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import { Bindings, Queue } from 'alchemy/cloudflare';
|
|
2
|
+
import { E as Environment } from './backend-sdk.D5vSybcI.js';
|
|
3
|
+
|
|
4
|
+
type Resource = 'kv' | 'd1' | 'queue' | 'r2' | 'worker' | 'service' | 'orchestrator';
|
|
5
|
+
type WorkerType = 'service' | 'orchestrator';
|
|
6
|
+
interface WorkerConfig {
|
|
7
|
+
triggers: string[];
|
|
8
|
+
env: {
|
|
9
|
+
[key: string]: {
|
|
10
|
+
triggers: string[];
|
|
11
|
+
};
|
|
12
|
+
};
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
declare const loadWorkerConfig: ({ path }: {
|
|
16
|
+
path: string;
|
|
17
|
+
}) => Promise<WorkerConfig>;
|
|
18
|
+
declare const extractWorkerTriggers: ({ workerConfig, environment, }: {
|
|
19
|
+
workerConfig: WorkerConfig;
|
|
20
|
+
environment: Environment;
|
|
21
|
+
}) => string[];
|
|
22
|
+
declare const composeWorkerArguments: ({ resourceName, entrypoint, crons, bindings, eventSources, }: {
|
|
23
|
+
resourceName: string;
|
|
24
|
+
entrypoint: string;
|
|
25
|
+
crons?: string[];
|
|
26
|
+
bindings?: Bindings;
|
|
27
|
+
eventSources?: Queue[];
|
|
28
|
+
}) => {
|
|
29
|
+
name: string;
|
|
30
|
+
entrypoint: string;
|
|
31
|
+
compatibilityFlags: string[];
|
|
32
|
+
compatibilityDate: string;
|
|
33
|
+
observability: {
|
|
34
|
+
enabled: true;
|
|
35
|
+
};
|
|
36
|
+
url: false;
|
|
37
|
+
eventSources: Queue[];
|
|
38
|
+
bindings: Bindings | undefined;
|
|
39
|
+
crons: string[];
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
export { composeWorkerArguments as c, extractWorkerTriggers as e, loadWorkerConfig as l };
|
|
43
|
+
export type { Resource as R, WorkerType as W };
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import { Bindings, Queue } from 'alchemy/cloudflare';
|
|
2
|
+
import { E as Environment } from './backend-sdk.D5vSybcI.cjs';
|
|
3
|
+
|
|
4
|
+
type Resource = 'kv' | 'd1' | 'queue' | 'r2' | 'worker' | 'service' | 'orchestrator';
|
|
5
|
+
type WorkerType = 'service' | 'orchestrator';
|
|
6
|
+
interface WorkerConfig {
|
|
7
|
+
triggers: string[];
|
|
8
|
+
env: {
|
|
9
|
+
[key: string]: {
|
|
10
|
+
triggers: string[];
|
|
11
|
+
};
|
|
12
|
+
};
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
declare const loadWorkerConfig: ({ path }: {
|
|
16
|
+
path: string;
|
|
17
|
+
}) => Promise<WorkerConfig>;
|
|
18
|
+
declare const extractWorkerTriggers: ({ workerConfig, environment, }: {
|
|
19
|
+
workerConfig: WorkerConfig;
|
|
20
|
+
environment: Environment;
|
|
21
|
+
}) => string[];
|
|
22
|
+
declare const composeWorkerArguments: ({ resourceName, entrypoint, crons, bindings, eventSources, }: {
|
|
23
|
+
resourceName: string;
|
|
24
|
+
entrypoint: string;
|
|
25
|
+
crons?: string[];
|
|
26
|
+
bindings?: Bindings;
|
|
27
|
+
eventSources?: Queue[];
|
|
28
|
+
}) => {
|
|
29
|
+
name: string;
|
|
30
|
+
entrypoint: string;
|
|
31
|
+
compatibilityFlags: string[];
|
|
32
|
+
compatibilityDate: string;
|
|
33
|
+
observability: {
|
|
34
|
+
enabled: true;
|
|
35
|
+
};
|
|
36
|
+
url: false;
|
|
37
|
+
eventSources: Queue[];
|
|
38
|
+
bindings: Bindings | undefined;
|
|
39
|
+
crons: string[];
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
export { composeWorkerArguments as c, extractWorkerTriggers as e, loadWorkerConfig as l };
|
|
43
|
+
export type { Resource as R, WorkerType as W };
|
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
import { E as ENVIRONMENT } from './backend-sdk.DXRpnctc.mjs';
|
|
2
|
+
import { join } from '@std/path';
|
|
3
|
+
|
|
1
4
|
const COMPATIBILITY_DATE = "2025-06-04";
|
|
2
5
|
const COMPATIBILITY_FLAGS = ["nodejs_compat"];
|
|
3
6
|
const QUEUE_DELIVERY_DELAY = 5;
|
|
@@ -7,6 +10,19 @@ const R2_STORAGE_CLASS = "Standard";
|
|
|
7
10
|
const R2_JURISDICTION = "eu";
|
|
8
11
|
const R2_LOCATION_HINT = "weur";
|
|
9
12
|
|
|
13
|
+
const loadWorkerConfig = async ({ path }) => {
|
|
14
|
+
const workerConfigFile = Bun.file(join(path, "./wrangler.jsonc"));
|
|
15
|
+
return await workerConfigFile.json();
|
|
16
|
+
};
|
|
17
|
+
const extractWorkerTriggers = ({
|
|
18
|
+
workerConfig,
|
|
19
|
+
environment
|
|
20
|
+
}) => {
|
|
21
|
+
if (!ENVIRONMENT.includes(String(environment))) {
|
|
22
|
+
return workerConfig.triggers;
|
|
23
|
+
}
|
|
24
|
+
return workerConfig.env[environment].triggers;
|
|
25
|
+
};
|
|
10
26
|
const composeWorkerArguments = ({
|
|
11
27
|
resourceName,
|
|
12
28
|
entrypoint,
|
|
@@ -29,4 +45,4 @@ const composeWorkerArguments = ({
|
|
|
29
45
|
};
|
|
30
46
|
};
|
|
31
47
|
|
|
32
|
-
export { D1_LOCATION_HINT as D, QUEUE_MESSAGE_RETENTION_PERIOD as Q, R2_STORAGE_CLASS as R, QUEUE_DELIVERY_DELAY as a, R2_LOCATION_HINT as b,
|
|
48
|
+
export { D1_LOCATION_HINT as D, QUEUE_MESSAGE_RETENTION_PERIOD as Q, R2_STORAGE_CLASS as R, QUEUE_DELIVERY_DELAY as a, R2_LOCATION_HINT as b, composeWorkerArguments as c, R2_JURISDICTION as d, extractWorkerTriggers as e, loadWorkerConfig as l };
|
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
+
const environment_consts = require('./backend-sdk.BdcrYpFD.cjs');
|
|
4
|
+
const path = require('@std/path');
|
|
5
|
+
|
|
3
6
|
const COMPATIBILITY_DATE = "2025-06-04";
|
|
4
7
|
const COMPATIBILITY_FLAGS = ["nodejs_compat"];
|
|
5
8
|
const QUEUE_DELIVERY_DELAY = 5;
|
|
@@ -9,6 +12,19 @@ const R2_STORAGE_CLASS = "Standard";
|
|
|
9
12
|
const R2_JURISDICTION = "eu";
|
|
10
13
|
const R2_LOCATION_HINT = "weur";
|
|
11
14
|
|
|
15
|
+
const loadWorkerConfig = async ({ path: path$1 }) => {
|
|
16
|
+
const workerConfigFile = Bun.file(path.join(path$1, "./wrangler.jsonc"));
|
|
17
|
+
return await workerConfigFile.json();
|
|
18
|
+
};
|
|
19
|
+
const extractWorkerTriggers = ({
|
|
20
|
+
workerConfig,
|
|
21
|
+
environment
|
|
22
|
+
}) => {
|
|
23
|
+
if (!environment_consts.ENVIRONMENT.includes(String(environment))) {
|
|
24
|
+
return workerConfig.triggers;
|
|
25
|
+
}
|
|
26
|
+
return workerConfig.env[environment].triggers;
|
|
27
|
+
};
|
|
12
28
|
const composeWorkerArguments = ({
|
|
13
29
|
resourceName,
|
|
14
30
|
entrypoint,
|
|
@@ -38,3 +54,5 @@ exports.R2_JURISDICTION = R2_JURISDICTION;
|
|
|
38
54
|
exports.R2_LOCATION_HINT = R2_LOCATION_HINT;
|
|
39
55
|
exports.R2_STORAGE_CLASS = R2_STORAGE_CLASS;
|
|
40
56
|
exports.composeWorkerArguments = composeWorkerArguments;
|
|
57
|
+
exports.extractWorkerTriggers = extractWorkerTriggers;
|
|
58
|
+
exports.loadWorkerConfig = loadWorkerConfig;
|
package/package.json
CHANGED