@package-broker/adapter-node 0.2.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +39 -0
- package/src/drivers/fs-driver.ts +64 -0
- package/src/drivers/redis-driver.ts +65 -0
- package/src/drivers/sqlite-driver.ts +23 -0
- package/src/index.ts +118 -0
- package/src/mocks/cloudflare-workers.ts +3 -0
- package/tsconfig.json +28 -0
- package/tsup.config.ts +20 -0
package/package.json
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@package-broker/adapter-node",
|
|
3
|
+
"version": "0.2.15",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"keywords": [],
|
|
6
|
+
"scripts": {
|
|
7
|
+
"lint": "echo 'no linting configured'",
|
|
8
|
+
"typecheck": "tsc --noEmit",
|
|
9
|
+
"dev": "tsx watch src/index.ts",
|
|
10
|
+
"build": "tsup",
|
|
11
|
+
"start": "node dist/index.js",
|
|
12
|
+
"clean": "rm -rf dist"
|
|
13
|
+
},
|
|
14
|
+
"dependencies": {
|
|
15
|
+
"@hono/node-server": "^1.11.1",
|
|
16
|
+
"@package-broker/core": "*",
|
|
17
|
+
"@package-broker/shared": "*",
|
|
18
|
+
"better-sqlite3": "^9.4.3",
|
|
19
|
+
"dotenv": "^16.4.5",
|
|
20
|
+
"drizzle-orm": "^0.29.0",
|
|
21
|
+
"hono": "^4.3.7",
|
|
22
|
+
"ioredis": "^5.4.1"
|
|
23
|
+
},
|
|
24
|
+
"devDependencies": {
|
|
25
|
+
"@types/better-sqlite3": "^7.6.9",
|
|
26
|
+
"@types/node": "^20.12.7",
|
|
27
|
+
"tsup": "^8.5.1",
|
|
28
|
+
"tsx": "^4.7.2",
|
|
29
|
+
"typescript": "^5.4.5"
|
|
30
|
+
},
|
|
31
|
+
"repository": {
|
|
32
|
+
"type": "git",
|
|
33
|
+
"url": "https://github.com/package-broker/server",
|
|
34
|
+
"directory": "packages/adapter-node"
|
|
35
|
+
},
|
|
36
|
+
"publishConfig": {
|
|
37
|
+
"access": "public"
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
|
|
2
|
+
import fs from 'node:fs/promises';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import { createReadStream, createWriteStream } from 'node:fs';
|
|
5
|
+
import { type StorageDriver } from '@package-broker/core';
|
|
6
|
+
import { Readable } from 'node:stream';
|
|
7
|
+
import { pipeline } from 'node:stream/promises';
|
|
8
|
+
|
|
9
|
+
export class FileSystemDriver implements StorageDriver {
|
|
10
|
+
private basePath: string;
|
|
11
|
+
|
|
12
|
+
constructor(basePath: string) {
|
|
13
|
+
this.basePath = basePath;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
private getPath(key: string): string {
|
|
17
|
+
// Prevent directory traversal
|
|
18
|
+
const safeKey = key.replace(/\.\./g, '');
|
|
19
|
+
return path.join(this.basePath, safeKey);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
async get(key: string): Promise<ReadableStream | null> {
|
|
23
|
+
const filePath = this.getPath(key);
|
|
24
|
+
try {
|
|
25
|
+
await fs.access(filePath);
|
|
26
|
+
// Convert Node.js Readable to Web ReadableStream
|
|
27
|
+
const nodeStream = createReadStream(filePath);
|
|
28
|
+
return Readable.toWeb(nodeStream) as unknown as ReadableStream;
|
|
29
|
+
} catch {
|
|
30
|
+
return null;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
async put(key: string, data: ReadableStream | ArrayBuffer | Uint8Array): Promise<void> {
|
|
35
|
+
const filePath = this.getPath(key);
|
|
36
|
+
const dir = path.dirname(filePath);
|
|
37
|
+
await fs.mkdir(dir, { recursive: true });
|
|
38
|
+
|
|
39
|
+
if (data instanceof ReadableStream) {
|
|
40
|
+
const nodeStream = Readable.fromWeb(data as any);
|
|
41
|
+
const writeStream = createWriteStream(filePath);
|
|
42
|
+
await pipeline(nodeStream, writeStream);
|
|
43
|
+
} else {
|
|
44
|
+
await fs.writeFile(filePath, Buffer.from(data as any));
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
async delete(key: string): Promise<void> {
|
|
49
|
+
try {
|
|
50
|
+
await fs.unlink(this.getPath(key));
|
|
51
|
+
} catch (e: any) {
|
|
52
|
+
if (e.code !== 'ENOENT') throw e;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
async exists(key: string): Promise<boolean> {
|
|
57
|
+
try {
|
|
58
|
+
await fs.access(this.getPath(key));
|
|
59
|
+
return true;
|
|
60
|
+
} catch {
|
|
61
|
+
return false;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
|
|
2
|
+
import { Redis } from 'ioredis';
|
|
3
|
+
import type { CachePort, QueuePort } from '@package-broker/core';
|
|
4
|
+
|
|
5
|
+
export class RedisDriver implements CachePort, QueuePort {
|
|
6
|
+
private redis: Redis;
|
|
7
|
+
|
|
8
|
+
constructor(url: string) {
|
|
9
|
+
this.redis = new Redis(url);
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
// CachePort implementation
|
|
13
|
+
async get(key: string): Promise<string | null> {
|
|
14
|
+
return this.redis.get(key);
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
async getJson<T>(key: string): Promise<T | null> {
|
|
18
|
+
const value = await this.redis.get(key);
|
|
19
|
+
if (!value) return null;
|
|
20
|
+
try {
|
|
21
|
+
return JSON.parse(value);
|
|
22
|
+
} catch {
|
|
23
|
+
return null;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
async put(key: string, value: string | ReadableStream | ArrayBuffer | FormData, options?: { expirationTtl?: number }): Promise<void> {
|
|
28
|
+
let stringValue: string;
|
|
29
|
+
|
|
30
|
+
if (typeof value === 'string') {
|
|
31
|
+
stringValue = value;
|
|
32
|
+
} else {
|
|
33
|
+
// Fallback for complex types - stringify if object/generic, or warn
|
|
34
|
+
try {
|
|
35
|
+
stringValue = JSON.stringify(value);
|
|
36
|
+
} catch {
|
|
37
|
+
stringValue = String(value);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
if (options?.expirationTtl) {
|
|
42
|
+
await this.redis.set(key, stringValue, 'EX', options.expirationTtl);
|
|
43
|
+
} else {
|
|
44
|
+
await this.redis.set(key, stringValue);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
async delete(key: string): Promise<void> {
|
|
49
|
+
await this.redis.del(key);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// QueuePort implementation
|
|
53
|
+
async send(message: any): Promise<void> {
|
|
54
|
+
await this.redis.rpush('jobs_queue', JSON.stringify(message));
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
async sendBatch(messages: any[]): Promise<void> {
|
|
58
|
+
if (messages.length === 0) return;
|
|
59
|
+
const pipeline = this.redis.pipeline();
|
|
60
|
+
for (const msg of messages) {
|
|
61
|
+
pipeline.rpush('jobs_queue', JSON.stringify(msg));
|
|
62
|
+
}
|
|
63
|
+
await pipeline.exec();
|
|
64
|
+
}
|
|
65
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
|
|
2
|
+
import Database from 'better-sqlite3';
|
|
3
|
+
import { drizzle } from 'drizzle-orm/better-sqlite3';
|
|
4
|
+
import { migrate } from 'drizzle-orm/better-sqlite3/migrator';
|
|
5
|
+
import { schema, type DatabasePort } from '@package-broker/core';
|
|
6
|
+
|
|
7
|
+
export function createSqliteDatabase(dbPath: string): DatabasePort {
|
|
8
|
+
const sqlite = new Database(dbPath);
|
|
9
|
+
const db = drizzle(sqlite, { schema });
|
|
10
|
+
|
|
11
|
+
// Create method to satisfy the DatabasePort interface if strictly typed,
|
|
12
|
+
// but usually generic ORM usages work fine.
|
|
13
|
+
// Note: DatabasePort in core is currently aliased to DrizzleD1Database<typeof schema>.
|
|
14
|
+
// We need to make sure core/db/index.ts types are flexible enough.
|
|
15
|
+
// Ideally, core should export a generic Drizzle type.
|
|
16
|
+
|
|
17
|
+
return db as any;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export async function migrateSqliteDatabase(db: ReturnType<typeof drizzle>, migrationsFolder: string) {
|
|
21
|
+
// This helper runs migrations on startup
|
|
22
|
+
await migrate(db, { migrationsFolder });
|
|
23
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
|
|
2
|
+
import { serve } from '@hono/node-server';
|
|
3
|
+
import { createApp, type AppInstance } from '@package-broker/core';
|
|
4
|
+
import { config } from 'dotenv';
|
|
5
|
+
import { createSqliteDatabase, migrateSqliteDatabase } from './drivers/sqlite-driver.js';
|
|
6
|
+
import { FileSystemDriver } from './drivers/fs-driver.js';
|
|
7
|
+
import { RedisDriver } from './drivers/redis-driver.js';
|
|
8
|
+
import { MemoryCacheDriver, MemoryQueueDriver } from '@package-broker/core';
|
|
9
|
+
import path from 'node:path';
|
|
10
|
+
import { fileURLToPath } from 'node:url';
|
|
11
|
+
import { serveStatic } from '@hono/node-server/serve-static';
|
|
12
|
+
import { readFile } from 'node:fs/promises';
|
|
13
|
+
import type { Context, Next } from 'hono';
|
|
14
|
+
|
|
15
|
+
// Load environment variables
|
|
16
|
+
config();
|
|
17
|
+
|
|
18
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
19
|
+
|
|
20
|
+
// Configuration
|
|
21
|
+
const PORT = Number(process.env.PORT) || 3000;
|
|
22
|
+
const DB_DRIVER = process.env.DB_DRIVER || 'sqlite';
|
|
23
|
+
const DB_URL = process.env.DB_URL || 'db.sqlite';
|
|
24
|
+
const STORAGE_DRIVER = process.env.STORAGE_DRIVER || 'fs';
|
|
25
|
+
const STORAGE_PATH = process.env.STORAGE_PATH || './storage';
|
|
26
|
+
const CACHE_DRIVER = process.env.CACHE_DRIVER || 'memory';
|
|
27
|
+
const CACHE_URL = process.env.CACHE_URL || 'redis://localhost:6379';
|
|
28
|
+
const QUEUE_DRIVER = process.env.QUEUE_DRIVER || 'memory';
|
|
29
|
+
|
|
30
|
+
console.log('Starting PACKAGE.broker Node Adapter...');
|
|
31
|
+
console.log(`Configuration: DB=${DB_DRIVER}, STORAGE=${STORAGE_DRIVER}, CACHE=${CACHE_DRIVER}, QUEUE=${QUEUE_DRIVER}`);
|
|
32
|
+
|
|
33
|
+
async function start() {
|
|
34
|
+
// Initialize Drivers
|
|
35
|
+
let database;
|
|
36
|
+
if (DB_DRIVER === 'sqlite') {
|
|
37
|
+
console.log(`Initializing SQLite at ${DB_URL}`);
|
|
38
|
+
database = createSqliteDatabase(DB_URL);
|
|
39
|
+
// Auto-migrate on start (simplified for MVP)
|
|
40
|
+
// In real deployment, migrations should be separate step
|
|
41
|
+
} else {
|
|
42
|
+
throw new Error(`Unsupported DB_DRIVER: ${DB_DRIVER}`);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
let storage;
|
|
46
|
+
if (STORAGE_DRIVER === 'fs') {
|
|
47
|
+
console.log(`Initializing FS Storage at ${STORAGE_PATH}`);
|
|
48
|
+
storage = new FileSystemDriver(STORAGE_PATH);
|
|
49
|
+
} else {
|
|
50
|
+
// TODO: Add S3 support
|
|
51
|
+
throw new Error(`Unsupported STORAGE_DRIVER: ${STORAGE_DRIVER}`);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
let cache;
|
|
55
|
+
if (CACHE_DRIVER === 'redis') {
|
|
56
|
+
console.log(`Initializing Redis Cache at ${CACHE_URL}`);
|
|
57
|
+
cache = new RedisDriver(CACHE_URL);
|
|
58
|
+
} else {
|
|
59
|
+
console.log('Initializing Memory Cache');
|
|
60
|
+
cache = new MemoryCacheDriver();
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
let queue;
|
|
64
|
+
if (QUEUE_DRIVER === 'redis') {
|
|
65
|
+
if (CACHE_DRIVER === 'redis') {
|
|
66
|
+
queue = cache as any; // RedisDriver implements both
|
|
67
|
+
} else {
|
|
68
|
+
console.log(`Initializing Redis Queue at ${CACHE_URL}`);
|
|
69
|
+
queue = new RedisDriver(CACHE_URL);
|
|
70
|
+
}
|
|
71
|
+
} else {
|
|
72
|
+
console.log('Initializing Memory Queue');
|
|
73
|
+
queue = new MemoryQueueDriver();
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// Create App
|
|
77
|
+
const app = createApp({
|
|
78
|
+
database,
|
|
79
|
+
storage,
|
|
80
|
+
onInit: (appInstance: AppInstance) => {
|
|
81
|
+
// Inject non-standard drivers if needed or custom middleware
|
|
82
|
+
appInstance.use('*', async (c: Context, next: Next) => {
|
|
83
|
+
// We already passed database/storage to createApp, but we can set extra vars here
|
|
84
|
+
// Note: createApp factory handles database/storage injection if passed in options
|
|
85
|
+
await next();
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
// Serve config.js dynamically
|
|
89
|
+
app.get('/config.js', (c: Context) => {
|
|
90
|
+
return c.text(`window.env = { API_URL: "${process.env.API_URL || '/'}" };`, 200, {
|
|
91
|
+
'Content-Type': 'application/javascript',
|
|
92
|
+
});
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
if (process.env.PUBLIC_DIR) {
|
|
96
|
+
console.log(`Serving static files from ${process.env.PUBLIC_DIR}`);
|
|
97
|
+
app.use('/*', serveStatic({ root: process.env.PUBLIC_DIR }));
|
|
98
|
+
|
|
99
|
+
// SPA Fallback
|
|
100
|
+
app.get('*', async (c: Context) => {
|
|
101
|
+
try {
|
|
102
|
+
return c.html(await readFile(path.join(process.env.PUBLIC_DIR!, 'index.html'), 'utf-8'));
|
|
103
|
+
} catch (e) {
|
|
104
|
+
return c.text('Not Found', 404);
|
|
105
|
+
}
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
console.log(`Server listening on port ${PORT}`);
|
|
112
|
+
serve({
|
|
113
|
+
fetch: app.fetch,
|
|
114
|
+
port: PORT
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
start().catch(console.error);
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
{
|
|
2
|
+
"extends": "../../tsconfig.json",
|
|
3
|
+
"compilerOptions": {
|
|
4
|
+
"target": "ES2022",
|
|
5
|
+
"module": "ESNext",
|
|
6
|
+
"moduleResolution": "bundler",
|
|
7
|
+
"outDir": "./dist",
|
|
8
|
+
"types": [
|
|
9
|
+
"node"
|
|
10
|
+
],
|
|
11
|
+
"baseUrl": ".",
|
|
12
|
+
"paths": {
|
|
13
|
+
"@package-broker/core": [
|
|
14
|
+
"../core/src"
|
|
15
|
+
],
|
|
16
|
+
"@package-broker/shared": [
|
|
17
|
+
"../shared/src"
|
|
18
|
+
]
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
"include": [
|
|
22
|
+
"src/**/*"
|
|
23
|
+
],
|
|
24
|
+
"exclude": [
|
|
25
|
+
"node_modules",
|
|
26
|
+
"**/*.test.ts"
|
|
27
|
+
]
|
|
28
|
+
}
|
package/tsup.config.ts
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { defineConfig } from 'tsup';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
|
|
4
|
+
const cloudflarePlugin = {
|
|
5
|
+
name: 'cloudflare-workers-mock',
|
|
6
|
+
setup(build) {
|
|
7
|
+
build.onResolve({ filter: /^cloudflare:workers$/ }, args => {
|
|
8
|
+
return { path: path.resolve(__dirname, 'src/mocks/cloudflare-workers.ts') }
|
|
9
|
+
});
|
|
10
|
+
},
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
export default defineConfig({
|
|
14
|
+
entry: ['src/index.ts'],
|
|
15
|
+
format: ['esm'],
|
|
16
|
+
target: 'node20',
|
|
17
|
+
noExternal: ['@package-broker/core', '@package-broker/shared'],
|
|
18
|
+
clean: true,
|
|
19
|
+
esbuildPlugins: [cloudflarePlugin],
|
|
20
|
+
});
|