@rpcbase/worker 0.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ export * from './queue';
2
+ export * from './queueListener';
3
+ export * from './taskMap';
4
+ export * from './taskNames';
5
+ export { default as queue } from './queue';
6
+ export type { Job, JobsOptions as JobOptions } from 'bullmq';
7
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,SAAS,CAAA;AACvB,cAAc,iBAAiB,CAAA;AAC/B,cAAc,WAAW,CAAA;AACzB,cAAc,aAAa,CAAA;AAC3B,OAAO,EAAE,OAAO,IAAI,KAAK,EAAE,MAAM,SAAS,CAAA;AAC1C,YAAY,EAAE,GAAG,EAAE,WAAW,IAAI,UAAU,EAAE,MAAM,QAAQ,CAAA"}
package/dist/index.js ADDED
@@ -0,0 +1,317 @@
1
+ import { Queue, Worker } from "bullmq";
2
+ import { MongoClient } from "mongodb";
3
+ import mongoose from "mongoose";
4
+ const DEFAULT_QUEUE_NAME = "rb-queue-default";
5
+ const DEFAULT_CONCURRENCY = 2;
6
+ const tasksByName = /* @__PURE__ */ Object.create(null);
7
+ let queueInstance = null;
8
+ let workerInstance = null;
9
+ let hasStarted = false;
10
+ const getRedisUrl = () => {
11
+ const redisUrl = process.env.REDIS_URL?.trim();
12
+ if (!redisUrl) {
13
+ throw new Error("Missing REDIS_URL (required for @rpcbase/worker queue)");
14
+ }
15
+ return redisUrl;
16
+ };
17
+ const getQueueName = () => process.env.RB_QUEUE_NAME?.trim() || DEFAULT_QUEUE_NAME;
18
+ const getConcurrency = () => {
19
+ const raw = process.env.RB_QUEUE_CONCURRENCY?.trim();
20
+ const value = raw ? Number(raw) : DEFAULT_CONCURRENCY;
21
+ if (!Number.isFinite(value) || value <= 0) return DEFAULT_CONCURRENCY;
22
+ return Math.floor(value);
23
+ };
24
+ const getConnection = () => ({ url: getRedisUrl() });
25
+ const ensureQueue = () => {
26
+ if (queueInstance) return queueInstance;
27
+ queueInstance = new Queue(getQueueName(), { connection: getConnection() });
28
+ return queueInstance;
29
+ };
30
+ function registerTask(name, handler) {
31
+ tasksByName[name] = handler;
32
+ }
33
+ const getTasks = () => tasksByName;
34
+ function add(taskName, payload, options) {
35
+ return ensureQueue().add(taskName, payload, options);
36
+ }
37
+ const getJob = async (jobId) => await ensureQueue().getJob(jobId) ?? null;
38
+ const getJobs = async (...args) => ensureQueue().getJobs(...args);
39
+ const getInstance = () => ensureQueue();
40
+ const start = async () => {
41
+ if (hasStarted) return ensureQueue();
42
+ hasStarted = true;
43
+ const queue = ensureQueue();
44
+ const concurrency = getConcurrency();
45
+ console.log("start worker queue", { queue: queue.name, redisUrl: getRedisUrl(), concurrency });
46
+ queue.on("error", (err) => {
47
+ console.log(`queue error: ${err.message}`);
48
+ });
49
+ if (!workerInstance) {
50
+ workerInstance = new Worker(
51
+ queue.name,
52
+ async (job) => {
53
+ const taskName = job.name;
54
+ const handler = tasksByName[taskName];
55
+ if (!handler) {
56
+ throw new Error(`No task registered for '${taskName}'`);
57
+ }
58
+ return await handler(job.data, job);
59
+ },
60
+ {
61
+ concurrency,
62
+ connection: getConnection()
63
+ }
64
+ );
65
+ }
66
+ workerInstance.on("error", (err) => {
67
+ console.log(`worker error: ${err.message}`);
68
+ });
69
+ workerInstance.on("stalled", (jobId) => {
70
+ console.log(`job ${jobId} stalled`);
71
+ });
72
+ workerInstance.on("failed", (job, err) => {
73
+ console.log(`job ${job?.id ?? "unknown"} failed`, err);
74
+ });
75
+ await workerInstance.waitUntilReady();
76
+ return queue;
77
+ };
78
+ const close = async () => {
79
+ if (!queueInstance && !workerInstance) return;
80
+ try {
81
+ await workerInstance?.close();
82
+ await queueInstance?.close();
83
+ } finally {
84
+ workerInstance = null;
85
+ queueInstance = null;
86
+ hasStarted = false;
87
+ }
88
+ };
89
+ const getUrl = () => getRedisUrl();
90
+ async function scheduleTask(taskName, payload, options) {
91
+ const { jobId, repeat, ...rest } = options;
92
+ return add(taskName, payload, {
93
+ ...rest,
94
+ jobId: jobId ?? `schedule|${taskName}`,
95
+ repeat
96
+ });
97
+ }
98
+ const queueApi = {
99
+ start,
100
+ close,
101
+ registerTask,
102
+ getTasks,
103
+ add,
104
+ scheduleTask,
105
+ getJob,
106
+ getJobs,
107
+ getInstance,
108
+ getUrl
109
+ };
110
+ const RETRY_MAXIMUM_DELAY_MS = 3e3;
111
+ const RETRY_MINIMUM_DELAY_MS = 50;
112
+ const sleep = async (ms) => new Promise((resolve) => setTimeout(resolve, ms));
113
+ const getMongoUrl = () => {
114
+ const explicit = process.env.MONGODB_URL ?? process.env.MONGO_URL ?? process.env.MONGODB_URI ?? process.env.DB_URL;
115
+ if (explicit && explicit.trim()) return explicit.trim();
116
+ const port = process.env.DB_PORT?.trim();
117
+ if (!port) throw new Error("Missing Mongo connection details (MONGODB_URL/MONGO_URL/MONGODB_URI/DB_URL/DB_PORT)");
118
+ const host = process.env.DB_HOST?.trim() || "localhost";
119
+ return `mongodb://${host}:${port}`;
120
+ };
121
+ const escapeRegex = (value) => value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
122
+ const resolveModelNameFromCollection = (collName) => {
123
+ const models = mongoose.models;
124
+ for (const modelName of Object.keys(models)) {
125
+ const model = models[modelName];
126
+ const collectionName = model?.collection?.collectionName ?? model?.collection?.name;
127
+ if (collectionName === collName) {
128
+ return modelName;
129
+ }
130
+ }
131
+ return null;
132
+ };
133
+ const normalizeUpdateDescription = (updateDescription) => {
134
+ if (!updateDescription || typeof updateDescription !== "object") return updateDescription;
135
+ if (updateDescription.updatedFields && typeof updateDescription.updatedFields === "object") {
136
+ return {
137
+ ...updateDescription,
138
+ updatedFieldsKeys: Object.keys(updateDescription.updatedFields)
139
+ };
140
+ }
141
+ return updateDescription;
142
+ };
143
+ const normalizeOpForTaskName = (op) => op === "replace" ? "update" : op;
144
+ const dispatchWorkerQueue = async ({
145
+ dbName,
146
+ modelName,
147
+ op,
148
+ doc,
149
+ updateDescription
150
+ }) => {
151
+ const tasks = queueApi.getTasks();
152
+ const taskOp = normalizeOpForTaskName(op);
153
+ const handlerName = `on-${taskOp}-${modelName}`;
154
+ if (!tasks[handlerName]) return;
155
+ const normalizedUpdateDescription = normalizeUpdateDescription(updateDescription);
156
+ await queueApi.add(handlerName, { doc, updateDescription: normalizedUpdateDescription }, {
157
+ jobId: `${dbName}|${taskOp}-${doc?._id ?? "unknown"}`,
158
+ removeOnComplete: true,
159
+ removeOnFail: true
160
+ });
161
+ };
162
+ const shouldSkipCollection = (collName) => collName.endsWith(".files") || collName.endsWith(".chunks");
163
+ const INTERNAL_IGNORED_MODEL_NAMES = /* @__PURE__ */ new Set(["RBRtsChange", "RBRtsCounter"]);
164
+ const INTERNAL_IGNORED_COLLECTION_NAMES = /* @__PURE__ */ new Set(["rtschanges", "rtscounters"]);
165
+ const registerQueueListener = async (options = {}) => {
166
+ const maxRetries = Math.max(1, Math.floor(options.maxRetries ?? 20));
167
+ const appName = process.env.APP_NAME?.trim();
168
+ if (!appName) {
169
+ throw new Error("Missing APP_NAME (required to configure the worker DB change listener)");
170
+ }
171
+ const mongoUrl = getMongoUrl();
172
+ let stopped = false;
173
+ let retryCounter = 0;
174
+ let client = null;
175
+ let stream = null;
176
+ let resumeAfter = null;
177
+ let processing = Promise.resolve();
178
+ const isChangeStreamHistoryLost = (err) => {
179
+ const anyErr = err;
180
+ const code = typeof anyErr?.code === "number" ? anyErr.code : null;
181
+ const codeName = typeof anyErr?.codeName === "string" ? anyErr.codeName : "";
182
+ const message = err instanceof Error ? err.message : String(err ?? "");
183
+ return code === 286 || codeName === "ChangeStreamHistoryLost" || message.includes("ChangeStreamHistoryLost") || message.includes("resume token") || message.includes("Resume token") || message.includes("resume of change stream was not possible") || message.includes("cannot resume");
184
+ };
185
+ const close2 = async () => {
186
+ stopped = true;
187
+ try {
188
+ stream?.removeAllListeners();
189
+ await stream?.close();
190
+ } catch {
191
+ }
192
+ stream = null;
193
+ try {
194
+ await client?.close();
195
+ } catch {
196
+ }
197
+ client = null;
198
+ };
199
+ const startStream = async () => {
200
+ if (stopped) return;
201
+ if (stream) {
202
+ try {
203
+ stream.removeAllListeners();
204
+ await stream.close();
205
+ } catch {
206
+ }
207
+ stream = null;
208
+ }
209
+ if (client) {
210
+ try {
211
+ await client.close();
212
+ } catch {
213
+ }
214
+ client = null;
215
+ }
216
+ client = new MongoClient(mongoUrl, {
217
+ family: 4,
218
+ serverSelectionTimeoutMS: 2e3,
219
+ connectTimeoutMS: 2e3
220
+ });
221
+ await client.connect();
222
+ const dbMatch = { "ns.db": { $regex: `^${escapeRegex(appName)}-.*-db$` } };
223
+ const pipeline = [
224
+ { $match: dbMatch },
225
+ {
226
+ $match: {
227
+ operationType: { $in: ["insert", "update", "replace"] }
228
+ }
229
+ },
230
+ { $match: { "ns.coll": { $nin: Array.from(INTERNAL_IGNORED_COLLECTION_NAMES) } } }
231
+ ];
232
+ stream = client.watch(pipeline, {
233
+ fullDocument: "updateLookup",
234
+ ...resumeAfter ? { resumeAfter } : {}
235
+ });
236
+ stream.on("change", (change) => {
237
+ const streamRef = stream;
238
+ processing = processing.then(async () => {
239
+ const dbName = String(change?.ns?.db ?? "");
240
+ if (!dbName) return;
241
+ const collName = String(change?.ns?.coll ?? "");
242
+ if (!collName) return;
243
+ if (shouldSkipCollection(collName)) return;
244
+ if (INTERNAL_IGNORED_COLLECTION_NAMES.has(collName)) return;
245
+ const modelName = resolveModelNameFromCollection(collName);
246
+ if (!modelName) return;
247
+ if (INTERNAL_IGNORED_MODEL_NAMES.has(modelName)) return;
248
+ const op = String(change.operationType ?? "");
249
+ if (!op) return;
250
+ const normalizedOp = normalizeOpForTaskName(op);
251
+ const doc = change.fullDocument;
252
+ if (!doc) return;
253
+ try {
254
+ await dispatchWorkerQueue({
255
+ dbName,
256
+ modelName,
257
+ op: normalizedOp,
258
+ doc,
259
+ updateDescription: change.updateDescription
260
+ });
261
+ resumeAfter = change?._id ?? resumeAfter;
262
+ } catch (err) {
263
+ console.warn("queue listener failed to dispatch change", err);
264
+ try {
265
+ await streamRef?.close();
266
+ } catch {
267
+ }
268
+ }
269
+ }).catch((err) => {
270
+ console.warn("queue listener change handler failed", err);
271
+ });
272
+ });
273
+ stream.on("error", (err) => {
274
+ console.warn("queue listener change stream error", err);
275
+ if (stopped) return;
276
+ if (resumeAfter && isChangeStreamHistoryLost(err)) {
277
+ resumeAfter = null;
278
+ }
279
+ try {
280
+ void stream?.close();
281
+ } catch {
282
+ }
283
+ });
284
+ stream.on("close", () => {
285
+ if (stopped) return;
286
+ retryCounter += 1;
287
+ if (retryCounter > maxRetries) {
288
+ console.error("queue listener reached max retries, exiting with failure");
289
+ process.exit(1);
290
+ }
291
+ const timeoutMs = Math.min(
292
+ RETRY_MAXIMUM_DELAY_MS,
293
+ RETRY_MINIMUM_DELAY_MS + 10 * Math.pow(2, retryCounter)
294
+ );
295
+ console.log("queue listener closed, retrying in", timeoutMs);
296
+ void sleep(timeoutMs).then(() => startStream());
297
+ });
298
+ };
299
+ await startStream();
300
+ return { close: close2 };
301
+ };
302
+ const dbEventTaskName = (op, modelName) => `on-${op}-${modelName}`;
303
+ export {
304
+ add,
305
+ close,
306
+ dbEventTaskName,
307
+ getInstance,
308
+ getJob,
309
+ getJobs,
310
+ getTasks,
311
+ getUrl,
312
+ queueApi as queue,
313
+ registerQueueListener,
314
+ registerTask,
315
+ scheduleTask,
316
+ start
317
+ };
@@ -0,0 +1,34 @@
1
+ import { Queue, Job, JobsOptions as JobOptions } from 'bullmq';
2
+ import { WorkerTaskMap } from './taskMap';
3
+ export type TaskHandler<TPayload = unknown> = (payload: TPayload, job: Job) => unknown | Promise<unknown>;
4
+ export declare function registerTask<TName extends keyof WorkerTaskMap & string>(name: TName, handler: TaskHandler<WorkerTaskMap[TName]>): void;
5
+ export declare function registerTask(name: string, handler: TaskHandler<any>): void;
6
+ export declare const getTasks: () => Record<string, TaskHandler<any>>;
7
+ export declare function add<TName extends keyof WorkerTaskMap & string>(taskName: TName, payload: WorkerTaskMap[TName], options?: JobOptions): Promise<Job>;
8
+ export declare function add(taskName: string, payload: unknown, options?: JobOptions): Promise<Job>;
9
+ export declare const getJob: (jobId: string) => Promise<Job | null>;
10
+ export declare const getJobs: (...args: Parameters<Queue["getJobs"]>) => Promise<Job[]>;
11
+ export declare const getInstance: () => Queue;
12
+ export declare const start: () => Promise<Queue>;
13
+ export declare const close: () => Promise<void>;
14
+ export declare const getUrl: () => string;
15
+ export type ScheduleTaskOptions = Omit<JobOptions, "repeat"> & {
16
+ jobId?: string;
17
+ repeat: NonNullable<JobOptions["repeat"]>;
18
+ };
19
+ export declare function scheduleTask<TName extends keyof WorkerTaskMap & string>(taskName: TName, payload: WorkerTaskMap[TName], options: ScheduleTaskOptions): Promise<Job>;
20
+ export declare function scheduleTask(taskName: string, payload: unknown, options: ScheduleTaskOptions): Promise<Job>;
21
+ declare const queueApi: {
22
+ start: () => Promise<Queue>;
23
+ close: () => Promise<void>;
24
+ registerTask: typeof registerTask;
25
+ getTasks: () => Record<string, TaskHandler<any>>;
26
+ add: typeof add;
27
+ scheduleTask: typeof scheduleTask;
28
+ getJob: (jobId: string) => Promise<Job | null>;
29
+ getJobs: (types?: import('bullmq').JobType | import('bullmq').JobType[] | undefined, start?: number | undefined, end?: number | undefined, asc?: boolean | undefined) => Promise<Job[]>;
30
+ getInstance: () => Queue;
31
+ getUrl: () => string;
32
+ };
33
+ export default queueApi;
34
+ //# sourceMappingURL=queue.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"queue.d.ts","sourceRoot":"","sources":["../src/queue.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAU,KAAK,GAAG,EAAE,KAAK,WAAW,IAAI,UAAU,EAAE,MAAM,QAAQ,CAAA;AAEhF,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,WAAW,CAAA;AAG9C,MAAM,MAAM,WAAW,CAAC,QAAQ,GAAG,OAAO,IAAI,CAAC,OAAO,EAAE,QAAQ,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;AAoCzG,wBAAgB,YAAY,CAAC,KAAK,SAAS,MAAM,aAAa,GAAG,MAAM,EACrE,IAAI,EAAE,KAAK,EACX,OAAO,EAAE,WAAW,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,GACzC,IAAI,CAAA;AACP,wBAAgB,YAAY,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,CAAC,GAAG,CAAC,GAAG,IAAI,CAAA;AAK3E,eAAO,MAAM,QAAQ,QAAO,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,GAAG,CAAC,CAAgB,CAAA;AAE3E,wBAAgB,GAAG,CAAC,KAAK,SAAS,MAAM,aAAa,GAAG,MAAM,EAC5D,QAAQ,EAAE,KAAK,EACf,OAAO,EAAE,aAAa,CAAC,KAAK,CAAC,EAC7B,OAAO,CAAC,EAAE,UAAU,GACnB,OAAO,CAAC,GAAG,CAAC,CAAA;AACf,wBAAgB,GAAG,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,CAAA;AAK3F,eAAO,MAAM,MAAM,GAAU,OAAO,MAAM,KAAG,OAAO,CAAC,GAAG,GAAG,IAAI,CAAgD,CAAA;AAE/G,eAAO,MAAM,OAAO,GAAU,GAAG,MAAM,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,KAAG,OAAO,CAAC,GAAG,EAAE,CAC/B,CAAA;AAEpD,eAAO,MAAM,WAAW,QAAO,KAAsB,CAAA;AAErD,eAAO,MAAM,KAAK,QAAa,OAAO,CAAC,KAAK,CAgD3C,CAAA;AAED,eAAO,MAAM,KAAK,QAAa,OAAO,CAAC,IAAI,CAU1C,CAAA;AAED,eAAO,MAAM,MAAM,QAAO,MAAuB,CAAA;AAEjD,MAAM,MAAM,mBAAmB,GAAG,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,GAAG;IAC7D,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,MAAM,EAAE,WAAW,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAA;CAC1C,CAAA;AAED,wBAAgB,YAAY,CAAC,KAAK,SAAS,MAAM,aAAa,GAAG,MAAM,EACrE,QAAQ,EAAE,KAAK,EACf,OAAO,EAAE,aAAa,CAAC,KAAK,CAAC,EAC7B,OAAO,EAAE,mBAAmB,GAC3B,OAAO,CAAC,GAAG,CAAC,CAAA;AACf,wBAAgB,YAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,OAAO,EAAE,mBAAmB,GAAG,OAAO,CAAC,GAAG,CAAC,CAAA;AAU5G,QAAA,MAAM,QAAQ;iBApFiB,OAAO,CAAC,KAAK,CAAC;iBAkDd,OAAO,CAAC,IAAI,CAAC;;oBArEhB,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC;;;oBAYxB,MAAM,KAAG,OAAO,CAAC,GAAG,GAAG,IAAI,CAAC;6KAEM,OAAO,CAAC,GAAG,EAAE,CAAC;uBAGrD,KAAK;kBAgEV,MAAM;CAiC/B,CAAA;AAED,eAAe,QAAQ,CAAA"}
@@ -0,0 +1,9 @@
1
+ type Closeable = {
2
+ close: () => Promise<void>;
3
+ };
4
+ type QueueListenerOptions = {
5
+ maxRetries?: number;
6
+ };
7
+ export declare const registerQueueListener: (options?: QueueListenerOptions) => Promise<Closeable>;
8
+ export {};
9
+ //# sourceMappingURL=queueListener.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"queueListener.d.ts","sourceRoot":"","sources":["../src/queueListener.ts"],"names":[],"mappings":"AAMA,KAAK,SAAS,GAAG;IACf,KAAK,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAA;CAC3B,CAAA;AAED,KAAK,oBAAoB,GAAG;IAC1B,UAAU,CAAC,EAAE,MAAM,CAAA;CACpB,CAAA;AA4FD,eAAO,MAAM,qBAAqB,GAAU,UAAS,oBAAyB,KAAG,OAAO,CAAC,SAAS,CAmLjG,CAAA"}
@@ -0,0 +1,3 @@
1
+ export interface WorkerTaskMap {
2
+ }
3
+ //# sourceMappingURL=taskMap.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"taskMap.d.ts","sourceRoot":"","sources":["../src/taskMap.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,aAAa;CAAG"}
@@ -0,0 +1,8 @@
1
+ export type DbEventOp = "insert" | "update";
2
+ export type DbEventTaskName<TOp extends DbEventOp = DbEventOp, TModelName extends string = string> = `on-${TOp}-${TModelName}`;
3
+ export type DbEventTaskPayload<TDoc = unknown, TUpdateDescription = unknown> = {
4
+ doc: TDoc;
5
+ updateDescription?: TUpdateDescription;
6
+ };
7
+ export declare const dbEventTaskName: <TOp extends DbEventOp, TModelName extends string>(op: TOp, modelName: TModelName) => DbEventTaskName<TOp, TModelName>;
8
+ //# sourceMappingURL=taskNames.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"taskNames.d.ts","sourceRoot":"","sources":["../src/taskNames.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,SAAS,GAAG,QAAQ,GAAG,QAAQ,CAAA;AAE3C,MAAM,MAAM,eAAe,CAAC,GAAG,SAAS,SAAS,GAAG,SAAS,EAAE,UAAU,SAAS,MAAM,GAAG,MAAM,IAC/F,MAAM,GAAG,IAAI,UAAU,EAAE,CAAA;AAE3B,MAAM,MAAM,kBAAkB,CAAC,IAAI,GAAG,OAAO,EAAE,kBAAkB,GAAG,OAAO,IAAI;IAC7E,GAAG,EAAE,IAAI,CAAA;IACT,iBAAiB,CAAC,EAAE,kBAAkB,CAAA;CACvC,CAAA;AAED,eAAO,MAAM,eAAe,GAAI,GAAG,SAAS,SAAS,EAAE,UAAU,SAAS,MAAM,EAC9E,IAAI,GAAG,EACP,WAAW,UAAU,KACpB,eAAe,CAAC,GAAG,EAAE,UAAU,CAA4B,CAAA"}
package/package.json ADDED
@@ -0,0 +1,77 @@
1
+ {
2
+ "name": "@rpcbase/worker",
3
+ "version": "0.0.0",
4
+ "type": "module",
5
+ "files": [
6
+ "dist"
7
+ ],
8
+ "main": "./dist/index.js",
9
+ "types": "./dist/index.d.ts",
10
+ "exports": {
11
+ ".": {
12
+ "types": "./dist/index.d.ts",
13
+ "import": "./dist/index.js",
14
+ "default": "./dist/index.js"
15
+ }
16
+ },
17
+ "scripts": {
18
+ "build": "wireit",
19
+ "test": "wireit",
20
+ "release": "wireit"
21
+ },
22
+ "wireit": {
23
+ "build": {
24
+ "command": "../../node_modules/.bin/vite build",
25
+ "files": [
26
+ "src/**/*",
27
+ "../../tsconfig.json",
28
+ "../../scripts/tsconfig.pkg.json",
29
+ "./tsconfig.json",
30
+ "./vite.config.js",
31
+ "../../scripts/vite.config-pkg.js"
32
+ ],
33
+ "output": [
34
+ "dist/"
35
+ ]
36
+ },
37
+ "build-watch": {
38
+ "command": "../../node_modules/.bin/vite build --watch",
39
+ "service": true
40
+ },
41
+ "test": {
42
+ "command": "../../node_modules/.bin/vitest run --config ../../pkg/test/src/vitest.config.mjs --passWithNoTests",
43
+ "files": [
44
+ "src/**/*",
45
+ "tsconfig.json",
46
+ "../../scripts/tsconfig.pkg.json"
47
+ ],
48
+ "dependencies": []
49
+ },
50
+ "release": {
51
+ "command": "../../scripts/publish.js",
52
+ "dependencies": [
53
+ "build"
54
+ ],
55
+ "files": [
56
+ "../../scripts/publish.js",
57
+ "package.json",
58
+ "dist/**/*"
59
+ ],
60
+ "output": [
61
+ "publish-output.txt"
62
+ ],
63
+ "env": {
64
+ "NPM_RELEASE_CHANNEL": {
65
+ "external": true
66
+ }
67
+ }
68
+ }
69
+ },
70
+ "peerDependencies": {
71
+ "mongoose": "^9"
72
+ },
73
+ "dependencies": {
74
+ "bullmq": "5.66.1",
75
+ "mongodb": "7.0.0"
76
+ }
77
+ }