@package-broker/main 0.2.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,45 @@
1
+ import { PackageStorageWorkflow } from '@package-broker/core';
2
+ export { PackageStorageWorkflow };
3
+ export interface WorkerConfig {
4
+ storage: 'r2' | 's3';
5
+ s3Config?: {
6
+ endpoint: string;
7
+ region: string;
8
+ accessKeyId: string;
9
+ secretAccessKey: string;
10
+ bucket: string;
11
+ };
12
+ }
13
+ export interface Env {
14
+ DB: D1Database;
15
+ KV?: KVNamespace;
16
+ QUEUE?: Queue;
17
+ PACKAGE_STORAGE_WORKFLOW?: Workflow;
18
+ R2_BUCKET: R2Bucket;
19
+ ASSETS?: Fetcher;
20
+ ENCRYPTION_KEY: string;
21
+ ADMIN_TOKEN?: string;
22
+ INITIAL_ADMIN_EMAIL?: string;
23
+ INITIAL_ADMIN_PASSWORD?: string;
24
+ S3_ENDPOINT?: string;
25
+ S3_REGION?: string;
26
+ S3_ACCESS_KEY_ID?: string;
27
+ S3_SECRET_ACCESS_KEY?: string;
28
+ S3_BUCKET?: string;
29
+ LOG_LEVEL?: 'debug' | 'info' | 'warn' | 'error';
30
+ ANALYTICS?: AnalyticsEngineDataset;
31
+ SMTP_HOST?: string;
32
+ SMTP_PORT?: string;
33
+ SMTP_USER?: string;
34
+ SMTP_PASS?: string;
35
+ SMTP_FROM?: string;
36
+ }
37
+ /**
38
+ * Create the PACKAGE.broker worker
39
+ */
40
+ export declare function createWorker(config?: WorkerConfig, env?: Env): import("@package-broker/core").AppInstance;
41
+ declare const _default: {
42
+ fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response>;
43
+ };
44
+ export default _default;
45
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAUA,OAAO,EAqDL,sBAAsB,EAIvB,MAAM,sBAAsB,CAAC;AAG9B,OAAO,EAAE,sBAAsB,EAAE,CAAC;AAElC,MAAM,WAAW,YAAY;IAC3B,OAAO,EAAE,IAAI,GAAG,IAAI,CAAC;IACrB,QAAQ,CAAC,EAAE;QACT,QAAQ,EAAE,MAAM,CAAC;QACjB,MAAM,EAAE,MAAM,CAAC;QACf,WAAW,EAAE,MAAM,CAAC;QACpB,eAAe,EAAE,MAAM,CAAC;QACxB,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC;CACH;AAED,MAAM,WAAW,GAAG;IAClB,EAAE,EAAE,UAAU,CAAC;IACf,EAAE,CAAC,EAAE,WAAW,CAAC;IACjB,KAAK,CAAC,EAAE,KAAK,CAAC;IACd,wBAAwB,CAAC,EAAE,QAAQ,CAAC;IACpC,SAAS,EAAE,QAAQ,CAAC;IACpB,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,cAAc,EAAE,MAAM,CAAC;IACvB,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,sBAAsB,CAAC,EAAE,MAAM,CAAC;IAEhC,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAC9B,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB,SAAS,CAAC,EAAE,OAAO,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC;IAEhD,SAAS,CAAC,EAAE,sBAAsB,CAAC;IAEnC,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED;;GAEG;AACH,wBAAgB,YAAY,CAAC,MAAM,GAAE,YAAgC,EAAE,GAAG,CAAC,EAAE,GAAG,8CA6E/E;;mBAMsB,OAAO,OAAO,GAAG,OAAO,gBAAgB,GAAG,OAAO,CAAC,QAAQ,CAAC;;AADnF,wBAeE"}
package/dist/index.js ADDED
@@ -0,0 +1,94 @@
1
+ /*
2
+ * PACKAGE.broker
3
+ * Copyright (C) 2025 Łukasz Bajsarowicz
4
+ * Licensed under AGPL-3.0
5
+ */
6
+ import { R2Driver, S3Driver, getLogger, initAnalytics, PackageStorageWorkflow, createD1Database, createApp, } from '@package-broker/core';
7
+ // Re-export the Workflow class for Cloudflare to find it
8
+ export { PackageStorageWorkflow };
9
+ /**
10
+ * Create the PACKAGE.broker worker
11
+ */
12
+ export function createWorker(config = { storage: 'r2' }, env) {
13
+ // Initialize logger with log level from environment
14
+ const logLevel = (env?.LOG_LEVEL || 'info');
15
+ // Logger initialization is side-effect, handled by getLogger singleton, can be configured globally
16
+ getLogger(logLevel);
17
+ // Initialize analytics
18
+ initAnalytics(env?.ANALYTICS);
19
+ // Define drivers initialization logic
20
+ // Since createApp supports injection, we'll wrap it
21
+ const app = createApp({
22
+ onInit: (app) => {
23
+ // Check for API token
24
+ // (Auth middleware handles this, but here we can add platform-specific middleware if needed)
25
+ // Database middleware
26
+ app.use('*', async (c, next) => {
27
+ if (c.env.DB) {
28
+ c.set('database', createD1Database(c.env.DB));
29
+ }
30
+ await next();
31
+ });
32
+ // Storage middleware
33
+ app.use('*', async (c, next) => {
34
+ if (config.storage === 's3' && config.s3Config) {
35
+ c.set('storage', new S3Driver(config.s3Config));
36
+ }
37
+ else if (c.env.S3_ENDPOINT) {
38
+ // Use environment variables for S3
39
+ c.set('storage', new S3Driver({
40
+ endpoint: c.env.S3_ENDPOINT,
41
+ region: c.env.S3_REGION || 'auto',
42
+ accessKeyId: c.env.S3_ACCESS_KEY_ID || '',
43
+ secretAccessKey: c.env.S3_SECRET_ACCESS_KEY || '',
44
+ bucket: c.env.S3_BUCKET || '',
45
+ }));
46
+ }
47
+ else {
48
+ // Default to R2
49
+ c.set('storage', new R2Driver({ bucket: c.env.R2_BUCKET }));
50
+ }
51
+ await next();
52
+ });
53
+ // Additional Cloudflare bindings (KV, Queue) can be added here if factory supports them being absent
54
+ // The current factory implementation assumes they are passed in Bindings or handled by specific routes
55
+ }
56
+ });
57
+ // Serve static assets (UI)
58
+ // Register this AFTER createApp so it doesn't shadow API routes
59
+ app.get('*', async (c) => {
60
+ if (c.env.ASSETS) {
61
+ // Try to serve the asset
62
+ const response = await c.env.ASSETS.fetch(c.req.raw);
63
+ if (response.status < 400) {
64
+ return response;
65
+ }
66
+ // SPA Fallback: If 404 and accepting html, serve index.html
67
+ // safely check accept header
68
+ const accept = c.req.header('accept');
69
+ if (response.status === 404 && accept && accept.includes('text/html')) {
70
+ const indexResponse = await c.env.ASSETS.fetch(new URL('/index.html', c.req.url).toString(), c.req.raw);
71
+ return indexResponse;
72
+ }
73
+ return response;
74
+ }
75
+ return c.text('UI Assets not available (ASSETS binding missing)', 404);
76
+ });
77
+ return app;
78
+ }
79
+ // Export default worker for Cloudflare
80
+ export default {
81
+ async fetch(request, env, ctx) {
82
+ // Initialize worker and logger
83
+ const logLevel = (env?.LOG_LEVEL || 'info');
84
+ const logger = getLogger(logLevel);
85
+ // Log worker initialization (only once per worker instance, but helpful for debugging)
86
+ logger.debug('Worker processing request', {
87
+ method: request.method,
88
+ url: request.url,
89
+ });
90
+ const app = createWorker({ storage: 'r2' }, env);
91
+ return app.fetch(request, env, ctx);
92
+ },
93
+ };
94
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAMH,OAAO,EAgDL,QAAQ,EACR,QAAQ,EAER,SAAS,EACT,aAAa,EACb,sBAAsB,EACtB,gBAAgB,EAEhB,SAAS,GACV,MAAM,sBAAsB,CAAC;AAE9B,yDAAyD;AACzD,OAAO,EAAE,sBAAsB,EAAE,CAAC;AA2ClC;;GAEG;AACH,MAAM,UAAU,YAAY,CAAC,SAAuB,EAAE,OAAO,EAAE,IAAI,EAAE,EAAE,GAAS;IAC9E,oDAAoD;IACpD,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE,SAAS,IAAI,MAAM,CAAwC,CAAC;IACnF,mGAAmG;IACnG,SAAS,CAAC,QAAQ,CAAC,CAAC;IAEpB,uBAAuB;IACvB,aAAa,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC;IAE9B,sCAAsC;IACtC,oDAAoD;IAEpD,MAAM,GAAG,GAAG,SAAS,CAAC;QACpB,MAAM,EAAE,CAAC,GAAG,EAAE,EAAE;YACd,sBAAsB;YACtB,6FAA6F;YAE7F,sBAAsB;YACtB,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,EAAE;gBAC7B,IAAI,CAAC,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC;oBACb,CAAC,CAAC,GAAG,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;gBAChD,CAAC;gBACD,MAAM,IAAI,EAAE,CAAC;YACf,CAAC,CAAC,CAAC;YAEH,qBAAqB;YACrB,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,EAAE;gBAC7B,IAAI,MAAM,CAAC,OAAO,KAAK,IAAI,IAAI,MAAM,CAAC,QAAQ,EAAE,CAAC;oBAC/C,CAAC,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,QAAQ,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAClD,CAAC;qBAAM,IAAI,CAAC,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC;oBAC7B,mCAAmC;oBACnC,CAAC,CAAC,GAAG,CACH,SAAS,EACT,IAAI,QAAQ,CAAC;wBACX,QAAQ,EAAE,CAAC,CAAC,GAAG,CAAC,WAAW;wBAC3B,MAAM,EAAE,CAAC,CAAC,GAAG,CAAC,SAAS,IAAI,MAAM;wBACjC,WAAW,EAAE,CAAC,CAAC,GAAG,CAAC,gBAAgB,IAAI,EAAE;wBACzC,eAAe,EAAE,CAAC,CAAC,GAAG,CAAC,oBAAoB,IAAI,EAAE;wBACjD,MAAM,EAAE,CAAC,CAAC,GAAG,CAAC,SAAS,IAAI,EAAE;qBAC9B,CAAC,CACH,CAAC;gBACJ,CAAC;qBAAM,CAAC;oBACN,gBAAgB;oBAChB,CAAC,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC,GAAG,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;gBAC9D,CAAC;gBACD,MAAM,IAAI,EAAE,CAAC;YACf,CAAC,CAAC,CAAC;YAEH,qGAAqG;YACrG,uGAAuG;QACzG,CAAC;KACF,CAAC,CAAC;IAEH,2BAA2B;IAC3B,gEAAgE;IAChE,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QACvB,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC;YACjB,yBAAyB;YACzB,MAAM,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YACrD,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,EAAE,CAAC;gBAC1B,OAAO,QAAQ,CAAC;YAClB,CAAC;YAED,4DAA4D;YAC5D,6BAA6B;YAC7B,MAAM,MAAM,GAAG,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;YACtC,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,IAAI,MAAM,IAAI,MAAM,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC;gBACtE,MAAM,aAAa,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,GAAG,CAAC,aAAa,EAAE,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;gBACxG,OAAO,aAAa,CAAC;YACvB,CAAC;YAED,OAAO,QAAQ,CAAC;QAClB,CAAC;QACD,OAAO,CAAC,CAAC,IAAI,CAAC,kDAAkD,EAAE,GAAG,CAAC,CAAC;IACzE,CAAC,CAAC,CAAC;IAEH,OAAO,GAAG,CAAC;AACb,CAAC;AAID,uCAAuC;AACvC,eAAe;IACb,KAAK,CAAC,KAAK,CAAC,OAAgB,EAAE,GAAQ,EAAE,GAAqB;QAC3D,+BAA+B;QAC/B,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE,SAAS,IAAI,MAAM,CAAwC,CAAC;QACnF,MAAM,MAAM,GAAG,SAAS,CAAC,QAAQ,CAAC,CAAC;QAEnC,uFAAuF;QACvF,MAAM,CAAC,KAAK,CAAC,2BAA2B,EAAE;YACxC,MAAM,EAAE,OAAO,CAAC,MAAM;YACtB,GAAG,EAAE,OAAO,CAAC,GAAG;SACjB,CAAC,CAAC;QAEH,MAAM,GAAG,GAAG,YAAY,CAAC,EAAE,OAAO,EAAE,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC;QACjD,OAAO,GAAG,CAAC,KAAK,CAAC,OAAO,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IACtC,CAAC;CACF,CAAC"}
@@ -0,0 +1,62 @@
1
+ -- Create repositories table
2
+ CREATE TABLE IF NOT EXISTS repositories (
3
+ id TEXT PRIMARY KEY,
4
+ url TEXT NOT NULL,
5
+ vcs_type TEXT NOT NULL,
6
+ credential_type TEXT NOT NULL,
7
+ auth_credentials TEXT NOT NULL,
8
+ composer_json_path TEXT,
9
+ status TEXT DEFAULT 'pending',
10
+ error_message TEXT,
11
+ last_synced_at INTEGER,
12
+ created_at INTEGER NOT NULL
13
+ );
14
+
15
+ CREATE INDEX IF NOT EXISTS idx_repositories_status ON repositories(status);
16
+ CREATE INDEX IF NOT EXISTS idx_repositories_vcs_type ON repositories(vcs_type);
17
+
18
+ -- Create tokens table
19
+ CREATE TABLE IF NOT EXISTS tokens (
20
+ id TEXT PRIMARY KEY,
21
+ description TEXT NOT NULL,
22
+ token_hash TEXT NOT NULL,
23
+ rate_limit_max INTEGER DEFAULT 1000,
24
+ created_at INTEGER NOT NULL,
25
+ expires_at INTEGER,
26
+ last_used_at INTEGER
27
+ );
28
+
29
+ CREATE INDEX IF NOT EXISTS idx_tokens_token_hash ON tokens(token_hash);
30
+
31
+ -- Create packages table
32
+ CREATE TABLE IF NOT EXISTS packages (
33
+ id TEXT PRIMARY KEY,
34
+ repo_id TEXT NOT NULL REFERENCES repositories(id) ON DELETE CASCADE,
35
+ name TEXT NOT NULL,
36
+ version TEXT NOT NULL,
37
+ dist_url TEXT NOT NULL,
38
+ released_at INTEGER,
39
+ readme_content TEXT,
40
+ created_at INTEGER NOT NULL,
41
+ UNIQUE(name, version)
42
+ );
43
+
44
+ CREATE INDEX IF NOT EXISTS idx_packages_repo_id ON packages(repo_id);
45
+ CREATE INDEX IF NOT EXISTS idx_packages_name ON packages(name);
46
+
47
+ -- Create artifacts table
48
+ CREATE TABLE IF NOT EXISTS artifacts (
49
+ id TEXT PRIMARY KEY,
50
+ repo_id TEXT NOT NULL REFERENCES repositories(id) ON DELETE CASCADE,
51
+ package_name TEXT NOT NULL,
52
+ version TEXT NOT NULL,
53
+ file_key TEXT,
54
+ size INTEGER,
55
+ download_count INTEGER DEFAULT 0,
56
+ last_downloaded_at INTEGER,
57
+ created_at INTEGER NOT NULL,
58
+ UNIQUE(repo_id, package_name, version)
59
+ );
60
+
61
+ CREATE INDEX IF NOT EXISTS idx_artifacts_repo_id ON artifacts(repo_id);
62
+ CREATE INDEX IF NOT EXISTS idx_artifacts_package_name ON artifacts(package_name);
@@ -0,0 +1,89 @@
1
+ -- Initial schema migration
2
+
3
+ -- Repositories table
4
+ CREATE TABLE IF NOT EXISTS repositories (
5
+ id TEXT PRIMARY KEY,
6
+ url TEXT NOT NULL,
7
+ vcs_type TEXT NOT NULL,
8
+ credential_type TEXT NOT NULL,
9
+ auth_credentials TEXT NOT NULL,
10
+ composer_json_path TEXT,
11
+ status TEXT DEFAULT 'pending',
12
+ error_message TEXT,
13
+ last_synced_at INTEGER,
14
+ created_at INTEGER NOT NULL
15
+ );
16
+
17
+ CREATE INDEX IF NOT EXISTS idx_repositories_status ON repositories(status);
18
+ CREATE INDEX IF NOT EXISTS idx_repositories_last_synced ON repositories(last_synced_at);
19
+ CREATE INDEX IF NOT EXISTS idx_repositories_credential_type ON repositories(credential_type);
20
+ CREATE INDEX IF NOT EXISTS idx_repositories_vcs_type ON repositories(vcs_type);
21
+
22
+ -- Tokens table
23
+ CREATE TABLE IF NOT EXISTS tokens (
24
+ id TEXT PRIMARY KEY,
25
+ description TEXT NOT NULL,
26
+ token_hash TEXT NOT NULL,
27
+ rate_limit_max INTEGER DEFAULT 1000,
28
+ created_at INTEGER NOT NULL,
29
+ expires_at INTEGER,
30
+ last_used_at INTEGER
31
+ );
32
+
33
+ CREATE INDEX IF NOT EXISTS idx_tokens_token_hash ON tokens(token_hash);
34
+
35
+ -- Artifacts table
36
+ CREATE TABLE IF NOT EXISTS artifacts (
37
+ id TEXT PRIMARY KEY,
38
+ repo_id TEXT NOT NULL REFERENCES repositories(id) ON DELETE CASCADE,
39
+ package_name TEXT NOT NULL,
40
+ version TEXT NOT NULL,
41
+ file_key TEXT NOT NULL,
42
+ size INTEGER,
43
+ download_count INTEGER DEFAULT 0,
44
+ created_at INTEGER NOT NULL,
45
+ last_downloaded_at INTEGER
46
+ );
47
+
48
+ CREATE INDEX IF NOT EXISTS idx_artifacts_repo_id ON artifacts(repo_id);
49
+ CREATE INDEX IF NOT EXISTS idx_artifacts_package_name ON artifacts(package_name);
50
+ CREATE INDEX IF NOT EXISTS idx_artifacts_last_downloaded ON artifacts(last_downloaded_at);
51
+ CREATE UNIQUE INDEX IF NOT EXISTS artifacts_repo_package_version ON artifacts(repo_id, package_name, version);
52
+
53
+ -- Packages table
54
+ CREATE TABLE IF NOT EXISTS packages (
55
+ id TEXT PRIMARY KEY,
56
+ repo_id TEXT NOT NULL REFERENCES repositories(id) ON DELETE CASCADE,
57
+ name TEXT NOT NULL,
58
+ version TEXT NOT NULL,
59
+ dist_url TEXT NOT NULL,
60
+ released_at INTEGER,
61
+ readme_content TEXT,
62
+ created_at INTEGER NOT NULL
63
+ );
64
+
65
+ CREATE INDEX IF NOT EXISTS idx_packages_repo_id ON packages(repo_id);
66
+ CREATE INDEX IF NOT EXISTS idx_packages_name ON packages(name);
67
+ CREATE UNIQUE INDEX IF NOT EXISTS packages_name_version_unique ON packages(name, version);
68
+
69
+ -- Admin users table
70
+ -- Users table
71
+ CREATE TABLE IF NOT EXISTS users (
72
+ id TEXT PRIMARY KEY,
73
+ email TEXT NOT NULL UNIQUE,
74
+ password_hash TEXT NOT NULL,
75
+ role TEXT DEFAULT 'admin' NOT NULL,
76
+ status TEXT DEFAULT 'active' NOT NULL,
77
+ two_factor_secret TEXT,
78
+ two_factor_enabled BOOLEAN DEFAULT FALSE,
79
+ recovery_codes TEXT,
80
+ invite_token TEXT,
81
+ invite_expires_at INTEGER,
82
+ created_at INTEGER NOT NULL,
83
+ last_login_at INTEGER
84
+ );
85
+
86
+ CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
87
+ CREATE INDEX IF NOT EXISTS idx_users_invite_token ON users(invite_token);
88
+
89
+
@@ -0,0 +1,6 @@
1
+ -- Add package_filter column for filtering packages to sync
2
+ -- Used by repositories with provider-includes (like Magento Marketplace)
3
+ -- Contains comma-separated list of package names to sync
4
+
5
+ ALTER TABLE repositories ADD COLUMN package_filter TEXT;
6
+
@@ -0,0 +1,2 @@
1
+ -- Add source_dist_url column to packages table for on-demand artifact mirroring
2
+ ALTER TABLE packages ADD COLUMN source_dist_url TEXT;
@@ -0,0 +1,2 @@
1
+ -- Add file_key column to artifacts table for R2/S3 storage key tracking
2
+ ALTER TABLE artifacts ADD COLUMN file_key TEXT;
@@ -0,0 +1,6 @@
1
+ -- Add package metadata fields: description, license, package_type, homepage
2
+ ALTER TABLE packages ADD COLUMN description TEXT;
3
+ ALTER TABLE packages ADD COLUMN license TEXT;
4
+ ALTER TABLE packages ADD COLUMN package_type TEXT;
5
+ ALTER TABLE packages ADD COLUMN homepage TEXT;
6
+
@@ -0,0 +1,6 @@
1
+ -- Add permissions column to tokens table
2
+ ALTER TABLE tokens ADD COLUMN permissions TEXT NOT NULL DEFAULT 'readonly';
3
+
4
+ -- Set all existing tokens to 'write' for backwards compatibility
5
+ UPDATE tokens SET permissions = 'write' WHERE permissions = 'readonly';
6
+
@@ -0,0 +1,2 @@
1
+ -- Add metadata column to store complete upstream package metadata as JSON
2
+ ALTER TABLE packages ADD COLUMN metadata TEXT;
package/package.json ADDED
@@ -0,0 +1,44 @@
1
+ {
2
+ "name": "@package-broker/main",
3
+ "version": "0.2.15",
4
+ "type": "module",
5
+ "main": "./dist/index.js",
6
+ "types": "./dist/index.d.ts",
7
+ "exports": {
8
+ ".": {
9
+ "types": "./dist/index.d.ts",
10
+ "import": "./dist/index.js",
11
+ "default": "./dist/index.js"
12
+ }
13
+ },
14
+ "files": [
15
+ "dist",
16
+ "migrations",
17
+ "wrangler.example.toml",
18
+ "package.json",
19
+ "README.md"
20
+ ],
21
+ "scripts": {
22
+ "lint": "echo 'no linting configured'",
23
+ "build": "tsc -p tsconfig.build.json",
24
+ "clean": "rm -rf dist",
25
+ "typecheck": "tsc --noEmit",
26
+ "prepublishOnly": "npm run clean && npm run build"
27
+ },
28
+ "dependencies": {
29
+ "@package-broker/core": "*",
30
+ "@package-broker/ui": "*"
31
+ },
32
+ "devDependencies": {
33
+ "typescript": "^5.3.3",
34
+ "@cloudflare/workers-types": "^4.20240125.0"
35
+ },
36
+ "repository": {
37
+ "type": "git",
38
+ "url": "https://github.com/package-broker/server",
39
+ "directory": "packages/main"
40
+ },
41
+ "publishConfig": {
42
+ "access": "public"
43
+ }
44
+ }
@@ -0,0 +1,60 @@
1
+ # PACKAGE.broker - Example wrangler.toml
2
+ # Copy this file to wrangler.toml and fill in your values
3
+
4
+ name = "package-broker"
5
+ main = "packages/main/src/index.ts"
6
+ compatibility_date = "2024-09-23"
7
+ compatibility_flags = ["nodejs_compat"]
8
+
9
+ [vars]
10
+ # Generate with: openssl rand -base64 32
11
+ ENCRYPTION_KEY = "REPLACE_WITH_YOUR_32_BYTE_KEY_BASE64"
12
+
13
+ # Optional: Set to restrict admin API access
14
+ # ADMIN_TOKEN = "your-admin-token"
15
+
16
+ # D1 Database
17
+ [[d1_databases]]
18
+ binding = "DB"
19
+ database_name = "package-broker-db"
20
+ database_id = "REPLACE_WITH_YOUR_DATABASE_ID"
21
+
22
+ # KV Namespace for caching
23
+ [[kv_namespaces]]
24
+ binding = "KV"
25
+ id = "REPLACE_WITH_YOUR_KV_NAMESPACE_ID"
26
+
27
+ # R2 Bucket for artifacts
28
+ [[r2_buckets]]
29
+ binding = "R2_BUCKET"
30
+ bucket_name = "package-broker-artifacts"
31
+
32
+ # Queue for async operations
33
+ [[queues.producers]]
34
+ binding = "QUEUE"
35
+ queue = "package-broker-queue"
36
+
37
+ [[queues.consumers]]
38
+ queue = "package-broker-queue"
39
+ max_batch_size = 10
40
+ max_batch_timeout = 30
41
+
42
+ # Analytics Engine (OPTIONAL - free tier: 100k events/day)
43
+ # Analytics Engine datasets are automatically created on first write.
44
+ # To enable Analytics Engine:
45
+ # 1. Uncomment the lines below
46
+ # 2. Deploy: npx wrangler deploy
47
+ # 3. The dataset will be created automatically when first event is written
48
+ #
49
+ # [[analytics_engine_datasets]]
50
+ # binding = "ANALYTICS"
51
+ # dataset = "package-broker-analytics"
52
+
53
+ # Optional: Use S3 instead of R2
54
+ # [vars]
55
+ # S3_ENDPOINT = "https://s3.amazonaws.com"
56
+ # S3_REGION = "us-east-1"
57
+ # S3_ACCESS_KEY_ID = "your-access-key"
58
+ # S3_SECRET_ACCESS_KEY = "your-secret-key"
59
+ # S3_BUCKET = "your-bucket-name"
60
+