@package-broker/core 0.2.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cache/index.d.ts +2 -0
- package/dist/cache/index.d.ts.map +1 -0
- package/dist/cache/index.js +2 -0
- package/dist/cache/index.js.map +1 -0
- package/dist/cache/memory-driver.d.ts +15 -0
- package/dist/cache/memory-driver.d.ts.map +1 -0
- package/dist/cache/memory-driver.js +56 -0
- package/dist/cache/memory-driver.js.map +1 -0
- package/dist/db/d1-driver.d.ts +3 -0
- package/dist/db/d1-driver.d.ts.map +1 -0
- package/dist/db/d1-driver.js +7 -0
- package/dist/db/d1-driver.js.map +1 -0
- package/dist/db/index.d.ts +5 -0
- package/dist/db/index.d.ts.map +1 -0
- package/dist/db/index.js +4 -0
- package/dist/db/index.js.map +1 -0
- package/dist/db/schema.d.ts +696 -0
- package/dist/db/schema.d.ts.map +1 -0
- package/dist/db/schema.js +99 -0
- package/dist/db/schema.js.map +1 -0
- package/dist/factory.d.ts +34 -0
- package/dist/factory.d.ts.map +1 -0
- package/dist/factory.js +121 -0
- package/dist/factory.js.map +1 -0
- package/dist/index.d.ts +16 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +17 -0
- package/dist/index.js.map +1 -0
- package/dist/jobs/index.d.ts +2 -0
- package/dist/jobs/index.d.ts.map +1 -0
- package/dist/jobs/index.js +7 -0
- package/dist/jobs/index.js.map +1 -0
- package/dist/jobs/processor.d.ts +49 -0
- package/dist/jobs/processor.d.ts.map +1 -0
- package/dist/jobs/processor.js +118 -0
- package/dist/jobs/processor.js.map +1 -0
- package/dist/middleware/auth.d.ts +52 -0
- package/dist/middleware/auth.d.ts.map +1 -0
- package/dist/middleware/auth.js +300 -0
- package/dist/middleware/auth.js.map +1 -0
- package/dist/middleware/composer-version.d.ts +7 -0
- package/dist/middleware/composer-version.d.ts.map +1 -0
- package/dist/middleware/composer-version.js +18 -0
- package/dist/middleware/composer-version.js.map +1 -0
- package/dist/middleware/error-handler.d.ts +7 -0
- package/dist/middleware/error-handler.d.ts.map +1 -0
- package/dist/middleware/error-handler.js +45 -0
- package/dist/middleware/error-handler.js.map +1 -0
- package/dist/middleware/index.d.ts +5 -0
- package/dist/middleware/index.d.ts.map +1 -0
- package/dist/middleware/index.js +6 -0
- package/dist/middleware/index.js.map +1 -0
- package/dist/middleware/request-id.d.ts +9 -0
- package/dist/middleware/request-id.d.ts.map +1 -0
- package/dist/middleware/request-id.js +36 -0
- package/dist/middleware/request-id.js.map +1 -0
- package/dist/ports.d.ts +32 -0
- package/dist/ports.d.ts.map +1 -0
- package/dist/ports.js +4 -0
- package/dist/ports.js.map +1 -0
- package/dist/queue/consumer.d.ts +18 -0
- package/dist/queue/consumer.d.ts.map +1 -0
- package/dist/queue/consumer.js +82 -0
- package/dist/queue/consumer.js.map +1 -0
- package/dist/queue/index.d.ts +2 -0
- package/dist/queue/index.d.ts.map +1 -0
- package/dist/queue/index.js +2 -0
- package/dist/queue/index.js.map +1 -0
- package/dist/queue/memory-driver.d.ts +13 -0
- package/dist/queue/memory-driver.d.ts.map +1 -0
- package/dist/queue/memory-driver.js +22 -0
- package/dist/queue/memory-driver.js.map +1 -0
- package/dist/queue/types.d.ts +19 -0
- package/dist/queue/types.d.ts.map +1 -0
- package/dist/queue/types.js +3 -0
- package/dist/queue/types.js.map +1 -0
- package/dist/routes/api/artifacts.d.ts +25 -0
- package/dist/routes/api/artifacts.d.ts.map +1 -0
- package/dist/routes/api/artifacts.js +57 -0
- package/dist/routes/api/artifacts.js.map +1 -0
- package/dist/routes/api/auth.d.ts +50 -0
- package/dist/routes/api/auth.d.ts.map +1 -0
- package/dist/routes/api/auth.js +268 -0
- package/dist/routes/api/auth.js.map +1 -0
- package/dist/routes/api/index.d.ts +9 -0
- package/dist/routes/api/index.d.ts.map +1 -0
- package/dist/routes/api/index.js +10 -0
- package/dist/routes/api/index.js.map +1 -0
- package/dist/routes/api/packages.d.ts +47 -0
- package/dist/routes/api/packages.d.ts.map +1 -0
- package/dist/routes/api/packages.js +671 -0
- package/dist/routes/api/packages.js.map +1 -0
- package/dist/routes/api/repositories.d.ts +56 -0
- package/dist/routes/api/repositories.d.ts.map +1 -0
- package/dist/routes/api/repositories.js +317 -0
- package/dist/routes/api/repositories.js.map +1 -0
- package/dist/routes/api/settings.d.ts +28 -0
- package/dist/routes/api/settings.d.ts.map +1 -0
- package/dist/routes/api/settings.js +81 -0
- package/dist/routes/api/settings.js.map +1 -0
- package/dist/routes/api/stats.d.ts +21 -0
- package/dist/routes/api/stats.d.ts.map +1 -0
- package/dist/routes/api/stats.js +52 -0
- package/dist/routes/api/stats.js.map +1 -0
- package/dist/routes/api/tokens.d.ts +39 -0
- package/dist/routes/api/tokens.d.ts.map +1 -0
- package/dist/routes/api/tokens.js +191 -0
- package/dist/routes/api/tokens.js.map +1 -0
- package/dist/routes/api/users.d.ts +5 -0
- package/dist/routes/api/users.d.ts.map +1 -0
- package/dist/routes/api/users.js +125 -0
- package/dist/routes/api/users.js.map +1 -0
- package/dist/routes/composer.d.ts +133 -0
- package/dist/routes/composer.d.ts.map +1 -0
- package/dist/routes/composer.js +1179 -0
- package/dist/routes/composer.js.map +1 -0
- package/dist/routes/dist.d.ts +32 -0
- package/dist/routes/dist.d.ts.map +1 -0
- package/dist/routes/dist.js +761 -0
- package/dist/routes/dist.js.map +1 -0
- package/dist/routes/health.d.ts +7 -0
- package/dist/routes/health.d.ts.map +1 -0
- package/dist/routes/health.js +22 -0
- package/dist/routes/health.js.map +1 -0
- package/dist/routes/index.d.ts +5 -0
- package/dist/routes/index.d.ts.map +1 -0
- package/dist/routes/index.js +6 -0
- package/dist/routes/index.js.map +1 -0
- package/dist/services/EmailService.d.ts +20 -0
- package/dist/services/EmailService.d.ts.map +1 -0
- package/dist/services/EmailService.js +27 -0
- package/dist/services/EmailService.js.map +1 -0
- package/dist/services/UserService.d.ts +27 -0
- package/dist/services/UserService.d.ts.map +1 -0
- package/dist/services/UserService.js +164 -0
- package/dist/services/UserService.js.map +1 -0
- package/dist/storage/driver.d.ts +65 -0
- package/dist/storage/driver.d.ts.map +1 -0
- package/dist/storage/driver.js +59 -0
- package/dist/storage/driver.js.map +1 -0
- package/dist/storage/index.d.ts +4 -0
- package/dist/storage/index.d.ts.map +1 -0
- package/dist/storage/index.js +5 -0
- package/dist/storage/index.js.map +1 -0
- package/dist/storage/r2-driver.d.ts +16 -0
- package/dist/storage/r2-driver.d.ts.map +1 -0
- package/dist/storage/r2-driver.js +28 -0
- package/dist/storage/r2-driver.js.map +1 -0
- package/dist/storage/s3-driver.d.ts +22 -0
- package/dist/storage/s3-driver.d.ts.map +1 -0
- package/dist/storage/s3-driver.js +66 -0
- package/dist/storage/s3-driver.js.map +1 -0
- package/dist/sync/github-sync.d.ts +15 -0
- package/dist/sync/github-sync.d.ts.map +1 -0
- package/dist/sync/github-sync.js +39 -0
- package/dist/sync/github-sync.js.map +1 -0
- package/dist/sync/index.d.ts +5 -0
- package/dist/sync/index.d.ts.map +1 -0
- package/dist/sync/index.js +6 -0
- package/dist/sync/index.js.map +1 -0
- package/dist/sync/repository-sync.d.ts +18 -0
- package/dist/sync/repository-sync.d.ts.map +1 -0
- package/dist/sync/repository-sync.js +214 -0
- package/dist/sync/repository-sync.js.map +1 -0
- package/dist/sync/strategies/composer-repo.d.ts +11 -0
- package/dist/sync/strategies/composer-repo.d.ts.map +1 -0
- package/dist/sync/strategies/composer-repo.js +269 -0
- package/dist/sync/strategies/composer-repo.js.map +1 -0
- package/dist/sync/strategies/github-api.d.ts +6 -0
- package/dist/sync/strategies/github-api.d.ts.map +1 -0
- package/dist/sync/strategies/github-api.js +137 -0
- package/dist/sync/strategies/github-api.js.map +1 -0
- package/dist/sync/strategies/github-packages.d.ts +7 -0
- package/dist/sync/strategies/github-packages.d.ts.map +1 -0
- package/dist/sync/strategies/github-packages.js +66 -0
- package/dist/sync/strategies/github-packages.js.map +1 -0
- package/dist/sync/strategies/index.d.ts +4 -0
- package/dist/sync/strategies/index.d.ts.map +1 -0
- package/dist/sync/strategies/index.js +5 -0
- package/dist/sync/strategies/index.js.map +1 -0
- package/dist/sync/types.d.ts +60 -0
- package/dist/sync/types.d.ts.map +1 -0
- package/dist/sync/types.js +3 -0
- package/dist/sync/types.js.map +1 -0
- package/dist/utils/analytics.d.ts +142 -0
- package/dist/utils/analytics.d.ts.map +1 -0
- package/dist/utils/analytics.js +229 -0
- package/dist/utils/analytics.js.map +1 -0
- package/dist/utils/download.d.ts +10 -0
- package/dist/utils/download.d.ts.map +1 -0
- package/dist/utils/download.js +34 -0
- package/dist/utils/download.js.map +1 -0
- package/dist/utils/encryption.d.ts +20 -0
- package/dist/utils/encryption.d.ts.map +1 -0
- package/dist/utils/encryption.js +76 -0
- package/dist/utils/encryption.js.map +1 -0
- package/dist/utils/index.d.ts +5 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/index.js +6 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/logger.d.ts +78 -0
- package/dist/utils/logger.d.ts.map +1 -0
- package/dist/utils/logger.js +134 -0
- package/dist/utils/logger.js.map +1 -0
- package/dist/utils/upstream-fetch.d.ts +15 -0
- package/dist/utils/upstream-fetch.d.ts.map +1 -0
- package/dist/utils/upstream-fetch.js +108 -0
- package/dist/utils/upstream-fetch.js.map +1 -0
- package/dist/workflows/index.d.ts +3 -0
- package/dist/workflows/index.d.ts.map +1 -0
- package/dist/workflows/index.js +8 -0
- package/dist/workflows/index.js.map +1 -0
- package/dist/workflows/package-storage.d.ts +47 -0
- package/dist/workflows/package-storage.d.ts.map +1 -0
- package/dist/workflows/package-storage.js +136 -0
- package/dist/workflows/package-storage.js.map +1 -0
- package/package.json +62 -0
|
@@ -0,0 +1,671 @@
|
|
|
1
|
+
// Packages API routes
|
|
2
|
+
import { packages, artifacts, repositories } from '../../db/schema';
|
|
3
|
+
import { eq, like, and } from 'drizzle-orm';
|
|
4
|
+
import { unzipSync, strFromU8 } from 'fflate';
|
|
5
|
+
import { buildStorageKey, buildReadmeStorageKey, buildChangelogStorageKey } from '../../storage/driver';
|
|
6
|
+
import { downloadFromSource } from '../../utils/download';
|
|
7
|
+
import { decryptCredentials } from '../../utils/encryption';
|
|
8
|
+
import { nanoid } from 'nanoid';
|
|
9
|
+
import { COMPOSER_USER_AGENT } from '@package-broker/shared';
|
|
10
|
+
import { isPackagistMirroringEnabled } from './settings';
|
|
11
|
+
/**
|
|
12
|
+
* GET /api/packages
|
|
13
|
+
* List all packages (with optional search)
|
|
14
|
+
*/
|
|
15
|
+
export async function listPackages(c) {
|
|
16
|
+
const db = c.get('database');
|
|
17
|
+
const search = c.req.query('search');
|
|
18
|
+
let allPackages;
|
|
19
|
+
if (search) {
|
|
20
|
+
allPackages = await db
|
|
21
|
+
.select()
|
|
22
|
+
.from(packages)
|
|
23
|
+
.where(like(packages.name, `%${search}%`))
|
|
24
|
+
.orderBy(packages.name);
|
|
25
|
+
}
|
|
26
|
+
else {
|
|
27
|
+
allPackages = await db.select().from(packages).orderBy(packages.name);
|
|
28
|
+
}
|
|
29
|
+
return c.json(allPackages);
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* GET /api/packages/:name
|
|
33
|
+
* Get a single package with all versions
|
|
34
|
+
*/
|
|
35
|
+
export async function getPackage(c) {
|
|
36
|
+
const nameParam = c.req.param('name');
|
|
37
|
+
// Decode URL-encoded package name (handles slashes like amasty/cron-schedule-list)
|
|
38
|
+
const name = decodeURIComponent(nameParam);
|
|
39
|
+
const db = c.get('database');
|
|
40
|
+
const packageVersions = await db
|
|
41
|
+
.select()
|
|
42
|
+
.from(packages)
|
|
43
|
+
.where(eq(packages.name, name))
|
|
44
|
+
.orderBy(packages.released_at);
|
|
45
|
+
if (packageVersions.length === 0) {
|
|
46
|
+
return c.json({ error: 'Not Found', message: 'Package not found' }, 404);
|
|
47
|
+
}
|
|
48
|
+
return c.json({
|
|
49
|
+
name,
|
|
50
|
+
versions: packageVersions,
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Extract README.md or README.mdown from ZIP archive
|
|
55
|
+
*/
|
|
56
|
+
function extractReadme(zipData) {
|
|
57
|
+
try {
|
|
58
|
+
const files = unzipSync(zipData);
|
|
59
|
+
// Look for README in common locations (case-insensitive)
|
|
60
|
+
// Prefer .md over .mdown if both exist
|
|
61
|
+
const readmeNames = [
|
|
62
|
+
'README.md', 'readme.md', 'README.MD', 'Readme.md',
|
|
63
|
+
'README.mdown', 'readme.mdown', 'README.MDOWN', 'Readme.mdown'
|
|
64
|
+
];
|
|
65
|
+
// First pass: look for .md files
|
|
66
|
+
for (const [path, content] of Object.entries(files)) {
|
|
67
|
+
const filename = path.split('/').pop() || '';
|
|
68
|
+
if (readmeNames.slice(0, 4).includes(filename)) {
|
|
69
|
+
return strFromU8(content);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
// Second pass: look for .mdown files
|
|
73
|
+
for (const [path, content] of Object.entries(files)) {
|
|
74
|
+
const filename = path.split('/').pop() || '';
|
|
75
|
+
if (readmeNames.slice(4).includes(filename)) {
|
|
76
|
+
return strFromU8(content);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
return null;
|
|
80
|
+
}
|
|
81
|
+
catch (error) {
|
|
82
|
+
console.error('Error extracting README from ZIP:', error);
|
|
83
|
+
return null;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Extract CHANGELOG.md or CHANGELOG.mdown from ZIP archive
|
|
88
|
+
*/
|
|
89
|
+
function extractChangelog(zipData) {
|
|
90
|
+
try {
|
|
91
|
+
const files = unzipSync(zipData);
|
|
92
|
+
// Look for CHANGELOG in common locations (case-insensitive)
|
|
93
|
+
// Prefer .md over .mdown if both exist
|
|
94
|
+
const changelogNames = [
|
|
95
|
+
'CHANGELOG.md', 'changelog.md', 'CHANGELOG.MD', 'Changelog.md',
|
|
96
|
+
'CHANGELOG.mdown', 'changelog.mdown', 'CHANGELOG.MDOWN', 'Changelog.mdown'
|
|
97
|
+
];
|
|
98
|
+
// First pass: look for .md files
|
|
99
|
+
for (const [path, content] of Object.entries(files)) {
|
|
100
|
+
const filename = path.split('/').pop() || '';
|
|
101
|
+
if (changelogNames.slice(0, 4).includes(filename)) {
|
|
102
|
+
return strFromU8(content);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
// Second pass: look for .mdown files
|
|
106
|
+
for (const [path, content] of Object.entries(files)) {
|
|
107
|
+
const filename = path.split('/').pop() || '';
|
|
108
|
+
if (changelogNames.slice(4).includes(filename)) {
|
|
109
|
+
return strFromU8(content);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
return null;
|
|
113
|
+
}
|
|
114
|
+
catch (error) {
|
|
115
|
+
console.error('Error extracting CHANGELOG from ZIP:', error);
|
|
116
|
+
return null;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
/**
|
|
120
|
+
* GET /api/packages/:name/:version/readme
|
|
121
|
+
* Get README.md content for a specific package version
|
|
122
|
+
* Uses R2/S3 storage instead of KV for better scalability
|
|
123
|
+
*/
|
|
124
|
+
export async function getPackageReadme(c) {
|
|
125
|
+
const nameParam = c.req.param('name');
|
|
126
|
+
const version = c.req.param('version');
|
|
127
|
+
// Decode URL-encoded package name (handles slashes like amasty/cron-schedule-list)
|
|
128
|
+
const name = decodeURIComponent(nameParam);
|
|
129
|
+
if (!name || !version) {
|
|
130
|
+
return c.json({ error: 'Bad Request', message: 'Missing package name or version' }, 400);
|
|
131
|
+
}
|
|
132
|
+
// 1. Get package from database to find repo_id
|
|
133
|
+
const db = c.get('database');
|
|
134
|
+
const [pkg] = await db
|
|
135
|
+
.select()
|
|
136
|
+
.from(packages)
|
|
137
|
+
.where(and(eq(packages.name, name), eq(packages.version, version)))
|
|
138
|
+
.limit(1);
|
|
139
|
+
if (!pkg) {
|
|
140
|
+
return c.json({ error: 'Not Found', message: 'Package version not found' }, 404);
|
|
141
|
+
}
|
|
142
|
+
// 2. Determine storage type (public for Packagist, private for others)
|
|
143
|
+
const storageType = pkg.repo_id === 'packagist' ? 'public' : 'private';
|
|
144
|
+
const readmeStorageKey = buildReadmeStorageKey(storageType, pkg.repo_id, name, version);
|
|
145
|
+
const storage = c.var.storage;
|
|
146
|
+
// 3. Check if README already exists in R2/S3 storage
|
|
147
|
+
const existingReadme = await storage.get(readmeStorageKey);
|
|
148
|
+
if (existingReadme) {
|
|
149
|
+
// Read the stream to check if it's a "NOT_FOUND" marker
|
|
150
|
+
const chunks = [];
|
|
151
|
+
const reader = existingReadme.getReader();
|
|
152
|
+
while (true) {
|
|
153
|
+
const { done, value } = await reader.read();
|
|
154
|
+
if (done)
|
|
155
|
+
break;
|
|
156
|
+
if (value) {
|
|
157
|
+
chunks.push(value);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
const totalSize = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
|
161
|
+
const content = new Uint8Array(totalSize);
|
|
162
|
+
let offset = 0;
|
|
163
|
+
for (const chunk of chunks) {
|
|
164
|
+
content.set(chunk, offset);
|
|
165
|
+
offset += chunk.length;
|
|
166
|
+
}
|
|
167
|
+
const textContent = new TextDecoder().decode(content);
|
|
168
|
+
// If it's a NOT_FOUND marker, return 404
|
|
169
|
+
if (textContent === 'NOT_FOUND') {
|
|
170
|
+
return c.json({
|
|
171
|
+
error: 'Not Found',
|
|
172
|
+
message: 'No README file exists in this package version'
|
|
173
|
+
}, 404);
|
|
174
|
+
}
|
|
175
|
+
// Return cached README with aggressive CDN caching
|
|
176
|
+
return new Response(textContent, {
|
|
177
|
+
headers: {
|
|
178
|
+
'Content-Type': 'text/markdown; charset=utf-8',
|
|
179
|
+
'Cache-Control': 'public, max-age=31536000, immutable',
|
|
180
|
+
'X-README-Source': 'storage',
|
|
181
|
+
},
|
|
182
|
+
});
|
|
183
|
+
}
|
|
184
|
+
// 4. README not in storage - need to extract from ZIP
|
|
185
|
+
// Get artifact to find ZIP storage key
|
|
186
|
+
let [artifact] = await db
|
|
187
|
+
.select()
|
|
188
|
+
.from(artifacts)
|
|
189
|
+
.where(and(eq(artifacts.repo_id, pkg.repo_id), eq(artifacts.package_name, name), eq(artifacts.version, version)))
|
|
190
|
+
.limit(1);
|
|
191
|
+
let zipData = null;
|
|
192
|
+
// 5. If artifact doesn't exist, try on-demand download
|
|
193
|
+
if (!artifact) {
|
|
194
|
+
// Check if we can download from source
|
|
195
|
+
if (!pkg.source_dist_url) {
|
|
196
|
+
return c.json({ error: 'Not Found', message: 'Artifact not found and source URL unavailable. Package may need to be downloaded first.' }, 404);
|
|
197
|
+
}
|
|
198
|
+
// Validate it's actually a URL
|
|
199
|
+
if (!pkg.source_dist_url.startsWith('http://') && !pkg.source_dist_url.startsWith('https://')) {
|
|
200
|
+
return c.json({ error: 'Not Found', message: 'Invalid source URL. Please re-sync the repository to update package metadata.' }, 404);
|
|
201
|
+
}
|
|
202
|
+
// Get repository for credentials
|
|
203
|
+
const [repo] = await db
|
|
204
|
+
.select()
|
|
205
|
+
.from(repositories)
|
|
206
|
+
.where(eq(repositories.id, pkg.repo_id))
|
|
207
|
+
.limit(1);
|
|
208
|
+
if (!repo) {
|
|
209
|
+
return c.json({ error: 'Not Found', message: 'Repository not found' }, 404);
|
|
210
|
+
}
|
|
211
|
+
try {
|
|
212
|
+
// Decrypt credentials
|
|
213
|
+
const credentialsJson = await decryptCredentials(repo.auth_credentials, c.env.ENCRYPTION_KEY);
|
|
214
|
+
const credentials = JSON.parse(credentialsJson);
|
|
215
|
+
// Download from source with authentication
|
|
216
|
+
const sourceResponse = await downloadFromSource(pkg.source_dist_url, repo.credential_type, credentials);
|
|
217
|
+
// Read the response body
|
|
218
|
+
const sourceStream = sourceResponse.body;
|
|
219
|
+
if (!sourceStream) {
|
|
220
|
+
throw new Error('Source response has no body');
|
|
221
|
+
}
|
|
222
|
+
// Read all chunks into memory
|
|
223
|
+
const chunks = [];
|
|
224
|
+
const reader = sourceStream.getReader();
|
|
225
|
+
let totalSize = 0;
|
|
226
|
+
while (true) {
|
|
227
|
+
const { done, value } = await reader.read();
|
|
228
|
+
if (done)
|
|
229
|
+
break;
|
|
230
|
+
if (value) {
|
|
231
|
+
chunks.push(value);
|
|
232
|
+
totalSize += value.length;
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
// Combine chunks into a single Uint8Array
|
|
236
|
+
zipData = new Uint8Array(totalSize);
|
|
237
|
+
let offset = 0;
|
|
238
|
+
for (const chunk of chunks) {
|
|
239
|
+
zipData.set(chunk, offset);
|
|
240
|
+
offset += chunk.length;
|
|
241
|
+
}
|
|
242
|
+
// Store artifact in storage
|
|
243
|
+
const storageType = pkg.repo_id === 'packagist' ? 'public' : 'private';
|
|
244
|
+
const storageKey = buildStorageKey(storageType, pkg.repo_id, name, version);
|
|
245
|
+
// Convert to ArrayBuffer (not SharedArrayBuffer) for storage
|
|
246
|
+
const arrayBuffer = zipData.buffer.slice(zipData.byteOffset, zipData.byteOffset + zipData.byteLength);
|
|
247
|
+
try {
|
|
248
|
+
await storage.put(storageKey, arrayBuffer);
|
|
249
|
+
console.log(`Successfully stored artifact for README extraction: ${storageKey} (${totalSize} bytes)`);
|
|
250
|
+
}
|
|
251
|
+
catch (err) {
|
|
252
|
+
console.error(`Error storing artifact ${storageKey}:`, err);
|
|
253
|
+
// Continue - we can still extract README from zipData
|
|
254
|
+
}
|
|
255
|
+
// Create artifact record
|
|
256
|
+
const artifactId = nanoid();
|
|
257
|
+
const now = Math.floor(Date.now() / 1000);
|
|
258
|
+
try {
|
|
259
|
+
await db.insert(artifacts).values({
|
|
260
|
+
id: artifactId,
|
|
261
|
+
repo_id: pkg.repo_id,
|
|
262
|
+
package_name: name,
|
|
263
|
+
version: version,
|
|
264
|
+
file_key: storageKey,
|
|
265
|
+
size: totalSize,
|
|
266
|
+
download_count: 0,
|
|
267
|
+
created_at: now,
|
|
268
|
+
});
|
|
269
|
+
artifact = {
|
|
270
|
+
id: artifactId,
|
|
271
|
+
repo_id: pkg.repo_id,
|
|
272
|
+
package_name: name,
|
|
273
|
+
version: version,
|
|
274
|
+
file_key: storageKey,
|
|
275
|
+
size: totalSize,
|
|
276
|
+
download_count: 0,
|
|
277
|
+
created_at: now,
|
|
278
|
+
last_downloaded_at: null,
|
|
279
|
+
};
|
|
280
|
+
}
|
|
281
|
+
catch (err) {
|
|
282
|
+
console.error(`Error creating artifact record:`, err);
|
|
283
|
+
// Continue - we can still extract README from zipData
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
catch (error) {
|
|
287
|
+
console.error(`Error downloading artifact from source:`, error);
|
|
288
|
+
return c.json({
|
|
289
|
+
error: 'Internal Server Error',
|
|
290
|
+
message: error instanceof Error ? error.message : 'Failed to download artifact',
|
|
291
|
+
}, 500);
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
// 6. Get ZIP from storage if we don't already have it in memory
|
|
295
|
+
if (!zipData) {
|
|
296
|
+
if (!artifact) {
|
|
297
|
+
return c.json({ error: 'Not Found', message: 'Artifact not found' }, 404);
|
|
298
|
+
}
|
|
299
|
+
const zipStream = await storage.get(artifact.file_key);
|
|
300
|
+
if (!zipStream) {
|
|
301
|
+
return c.json({ error: 'Not Found', message: 'Artifact file not found in storage' }, 404);
|
|
302
|
+
}
|
|
303
|
+
// Read ZIP into memory
|
|
304
|
+
const zipChunks = [];
|
|
305
|
+
const zipReader = zipStream.getReader();
|
|
306
|
+
while (true) {
|
|
307
|
+
const { done, value } = await zipReader.read();
|
|
308
|
+
if (done)
|
|
309
|
+
break;
|
|
310
|
+
if (value) {
|
|
311
|
+
zipChunks.push(value);
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
// Combine chunks
|
|
315
|
+
const totalSize = zipChunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
|
316
|
+
zipData = new Uint8Array(totalSize);
|
|
317
|
+
let offset = 0;
|
|
318
|
+
for (const chunk of zipChunks) {
|
|
319
|
+
zipData.set(chunk, offset);
|
|
320
|
+
offset += chunk.length;
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
// 7. Extract README from ZIP
|
|
324
|
+
const readmeContent = extractReadme(zipData);
|
|
325
|
+
if (!readmeContent) {
|
|
326
|
+
// Store NOT_FOUND marker to avoid repeated extraction attempts
|
|
327
|
+
const notFoundMarker = new TextEncoder().encode('NOT_FOUND');
|
|
328
|
+
await storage.put(readmeStorageKey, notFoundMarker).catch((err) => {
|
|
329
|
+
console.error(`Error storing NOT_FOUND marker for ${readmeStorageKey}:`, err);
|
|
330
|
+
});
|
|
331
|
+
return c.json({
|
|
332
|
+
error: 'Not Found',
|
|
333
|
+
message: 'No README file exists in this package version'
|
|
334
|
+
}, 404);
|
|
335
|
+
}
|
|
336
|
+
// 8. Store README in R2/S3 for future requests
|
|
337
|
+
const readmeBytes = new TextEncoder().encode(readmeContent);
|
|
338
|
+
await storage.put(readmeStorageKey, readmeBytes).catch((err) => {
|
|
339
|
+
console.error(`Error storing README for ${readmeStorageKey}:`, err);
|
|
340
|
+
// Continue even if storage fails - we'll still return the content
|
|
341
|
+
});
|
|
342
|
+
// 9. Return with aggressive CDN caching headers
|
|
343
|
+
return new Response(readmeContent, {
|
|
344
|
+
headers: {
|
|
345
|
+
'Content-Type': 'text/markdown; charset=utf-8',
|
|
346
|
+
'Cache-Control': 'public, max-age=31536000, immutable',
|
|
347
|
+
'X-README-Source': 'extracted',
|
|
348
|
+
},
|
|
349
|
+
});
|
|
350
|
+
}
|
|
351
|
+
/**
|
|
352
|
+
* GET /api/packages/:name/:version/changelog
|
|
353
|
+
* Get CHANGELOG.md content for a specific package version
|
|
354
|
+
* Uses R2/S3 storage instead of KV for better scalability
|
|
355
|
+
*/
|
|
356
|
+
export async function getPackageChangelog(c) {
|
|
357
|
+
const nameParam = c.req.param('name');
|
|
358
|
+
const version = c.req.param('version');
|
|
359
|
+
// Decode URL-encoded package name (handles slashes like amasty/cron-schedule-list)
|
|
360
|
+
const name = decodeURIComponent(nameParam);
|
|
361
|
+
if (!name || !version) {
|
|
362
|
+
return c.json({ error: 'Bad Request', message: 'Missing package name or version' }, 400);
|
|
363
|
+
}
|
|
364
|
+
// 1. Get package from database to find repo_id
|
|
365
|
+
const db = c.get('database');
|
|
366
|
+
const [pkg] = await db
|
|
367
|
+
.select()
|
|
368
|
+
.from(packages)
|
|
369
|
+
.where(and(eq(packages.name, name), eq(packages.version, version)))
|
|
370
|
+
.limit(1);
|
|
371
|
+
if (!pkg) {
|
|
372
|
+
return c.json({ error: 'Not Found', message: 'Package version not found' }, 404);
|
|
373
|
+
}
|
|
374
|
+
// 2. Determine storage type (public for Packagist, private for others)
|
|
375
|
+
const storageType = pkg.repo_id === 'packagist' ? 'public' : 'private';
|
|
376
|
+
const changelogStorageKey = buildChangelogStorageKey(storageType, pkg.repo_id, name, version);
|
|
377
|
+
const storage = c.var.storage;
|
|
378
|
+
// 3. Check if CHANGELOG already exists in R2/S3 storage
|
|
379
|
+
const existingChangelog = await storage.get(changelogStorageKey);
|
|
380
|
+
if (existingChangelog) {
|
|
381
|
+
// Read the stream to check if it's a "NOT_FOUND" marker
|
|
382
|
+
const chunks = [];
|
|
383
|
+
const reader = existingChangelog.getReader();
|
|
384
|
+
while (true) {
|
|
385
|
+
const { done, value } = await reader.read();
|
|
386
|
+
if (done)
|
|
387
|
+
break;
|
|
388
|
+
if (value) {
|
|
389
|
+
chunks.push(value);
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
const totalSize = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
|
393
|
+
const content = new Uint8Array(totalSize);
|
|
394
|
+
let offset = 0;
|
|
395
|
+
for (const chunk of chunks) {
|
|
396
|
+
content.set(chunk, offset);
|
|
397
|
+
offset += chunk.length;
|
|
398
|
+
}
|
|
399
|
+
const textContent = new TextDecoder().decode(content);
|
|
400
|
+
// If it's a NOT_FOUND marker, return 404
|
|
401
|
+
if (textContent === 'NOT_FOUND') {
|
|
402
|
+
return c.json({
|
|
403
|
+
error: 'Not Found',
|
|
404
|
+
message: 'No CHANGELOG file exists in this package version'
|
|
405
|
+
}, 404);
|
|
406
|
+
}
|
|
407
|
+
// Return cached CHANGELOG with aggressive CDN caching
|
|
408
|
+
return new Response(textContent, {
|
|
409
|
+
headers: {
|
|
410
|
+
'Content-Type': 'text/markdown; charset=utf-8',
|
|
411
|
+
'Cache-Control': 'public, max-age=31536000, immutable',
|
|
412
|
+
'X-CHANGELOG-Source': 'storage',
|
|
413
|
+
},
|
|
414
|
+
});
|
|
415
|
+
}
|
|
416
|
+
// 4. CHANGELOG not in storage - need to extract from ZIP
|
|
417
|
+
// Get artifact to find ZIP storage key
|
|
418
|
+
let [artifact] = await db
|
|
419
|
+
.select()
|
|
420
|
+
.from(artifacts)
|
|
421
|
+
.where(and(eq(artifacts.repo_id, pkg.repo_id), eq(artifacts.package_name, name), eq(artifacts.version, version)))
|
|
422
|
+
.limit(1);
|
|
423
|
+
let zipData = null;
|
|
424
|
+
// 5. If artifact doesn't exist, try on-demand download
|
|
425
|
+
if (!artifact) {
|
|
426
|
+
// Check if we can download from source
|
|
427
|
+
if (!pkg.source_dist_url) {
|
|
428
|
+
return c.json({ error: 'Not Found', message: 'Artifact not found and source URL unavailable. Package may need to be downloaded first.' }, 404);
|
|
429
|
+
}
|
|
430
|
+
// Validate it's actually a URL
|
|
431
|
+
if (!pkg.source_dist_url.startsWith('http://') && !pkg.source_dist_url.startsWith('https://')) {
|
|
432
|
+
return c.json({ error: 'Not Found', message: 'Invalid source URL. Please re-sync the repository to update package metadata.' }, 404);
|
|
433
|
+
}
|
|
434
|
+
// Get repository for credentials
|
|
435
|
+
const [repo] = await db
|
|
436
|
+
.select()
|
|
437
|
+
.from(repositories)
|
|
438
|
+
.where(eq(repositories.id, pkg.repo_id))
|
|
439
|
+
.limit(1);
|
|
440
|
+
if (!repo) {
|
|
441
|
+
return c.json({ error: 'Not Found', message: 'Repository not found' }, 404);
|
|
442
|
+
}
|
|
443
|
+
try {
|
|
444
|
+
// Decrypt credentials
|
|
445
|
+
const credentialsJson = await decryptCredentials(repo.auth_credentials, c.env.ENCRYPTION_KEY);
|
|
446
|
+
const credentials = JSON.parse(credentialsJson);
|
|
447
|
+
// Download from source with authentication
|
|
448
|
+
const sourceResponse = await downloadFromSource(pkg.source_dist_url, repo.credential_type, credentials);
|
|
449
|
+
// Read the response body
|
|
450
|
+
const sourceStream = sourceResponse.body;
|
|
451
|
+
if (!sourceStream) {
|
|
452
|
+
throw new Error('Source response has no body');
|
|
453
|
+
}
|
|
454
|
+
// Read all chunks into memory
|
|
455
|
+
const chunks = [];
|
|
456
|
+
const reader = sourceStream.getReader();
|
|
457
|
+
let totalSize = 0;
|
|
458
|
+
while (true) {
|
|
459
|
+
const { done, value } = await reader.read();
|
|
460
|
+
if (done)
|
|
461
|
+
break;
|
|
462
|
+
if (value) {
|
|
463
|
+
chunks.push(value);
|
|
464
|
+
totalSize += value.length;
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
// Combine chunks into a single Uint8Array
|
|
468
|
+
zipData = new Uint8Array(totalSize);
|
|
469
|
+
let offset = 0;
|
|
470
|
+
for (const chunk of chunks) {
|
|
471
|
+
zipData.set(chunk, offset);
|
|
472
|
+
offset += chunk.length;
|
|
473
|
+
}
|
|
474
|
+
// Store artifact in storage
|
|
475
|
+
const storageKey = buildStorageKey(storageType, pkg.repo_id, name, version);
|
|
476
|
+
// Convert to ArrayBuffer (not SharedArrayBuffer) for storage
|
|
477
|
+
const arrayBuffer = zipData.buffer.slice(zipData.byteOffset, zipData.byteOffset + zipData.byteLength);
|
|
478
|
+
try {
|
|
479
|
+
await storage.put(storageKey, arrayBuffer);
|
|
480
|
+
console.log(`Successfully stored artifact for CHANGELOG extraction: ${storageKey} (${totalSize} bytes)`);
|
|
481
|
+
}
|
|
482
|
+
catch (err) {
|
|
483
|
+
console.error(`Error storing artifact ${storageKey}:`, err);
|
|
484
|
+
// Continue - we can still extract CHANGELOG from zipData
|
|
485
|
+
}
|
|
486
|
+
// Create artifact record
|
|
487
|
+
const artifactId = nanoid();
|
|
488
|
+
const now = Math.floor(Date.now() / 1000);
|
|
489
|
+
try {
|
|
490
|
+
await db.insert(artifacts).values({
|
|
491
|
+
id: artifactId,
|
|
492
|
+
repo_id: pkg.repo_id,
|
|
493
|
+
package_name: name,
|
|
494
|
+
version: version,
|
|
495
|
+
file_key: storageKey,
|
|
496
|
+
size: totalSize,
|
|
497
|
+
download_count: 0,
|
|
498
|
+
created_at: now,
|
|
499
|
+
});
|
|
500
|
+
artifact = {
|
|
501
|
+
id: artifactId,
|
|
502
|
+
repo_id: pkg.repo_id,
|
|
503
|
+
package_name: name,
|
|
504
|
+
version: version,
|
|
505
|
+
file_key: storageKey,
|
|
506
|
+
size: totalSize,
|
|
507
|
+
download_count: 0,
|
|
508
|
+
created_at: now,
|
|
509
|
+
last_downloaded_at: null,
|
|
510
|
+
};
|
|
511
|
+
}
|
|
512
|
+
catch (err) {
|
|
513
|
+
console.error(`Error creating artifact record:`, err);
|
|
514
|
+
// Continue - we can still extract CHANGELOG from zipData
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
catch (error) {
|
|
518
|
+
console.error(`Error downloading artifact from source:`, error);
|
|
519
|
+
return c.json({
|
|
520
|
+
error: 'Internal Server Error',
|
|
521
|
+
message: error instanceof Error ? error.message : 'Failed to download artifact',
|
|
522
|
+
}, 500);
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
// 6. Get ZIP from storage if we don't already have it in memory
|
|
526
|
+
if (!zipData) {
|
|
527
|
+
if (!artifact) {
|
|
528
|
+
return c.json({ error: 'Not Found', message: 'Artifact not found' }, 404);
|
|
529
|
+
}
|
|
530
|
+
const zipStream = await storage.get(artifact.file_key);
|
|
531
|
+
if (!zipStream) {
|
|
532
|
+
return c.json({ error: 'Not Found', message: 'Artifact file not found in storage' }, 404);
|
|
533
|
+
}
|
|
534
|
+
// Read ZIP into memory
|
|
535
|
+
const zipChunks = [];
|
|
536
|
+
const zipReader = zipStream.getReader();
|
|
537
|
+
while (true) {
|
|
538
|
+
const { done, value } = await zipReader.read();
|
|
539
|
+
if (done)
|
|
540
|
+
break;
|
|
541
|
+
if (value) {
|
|
542
|
+
zipChunks.push(value);
|
|
543
|
+
}
|
|
544
|
+
}
|
|
545
|
+
// Combine chunks
|
|
546
|
+
const totalSize = zipChunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
|
547
|
+
zipData = new Uint8Array(totalSize);
|
|
548
|
+
let offset = 0;
|
|
549
|
+
for (const chunk of zipChunks) {
|
|
550
|
+
zipData.set(chunk, offset);
|
|
551
|
+
offset += chunk.length;
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
// 7. Extract CHANGELOG from ZIP
|
|
555
|
+
const changelogContent = extractChangelog(zipData);
|
|
556
|
+
if (!changelogContent) {
|
|
557
|
+
// Store NOT_FOUND marker to avoid repeated extraction attempts
|
|
558
|
+
const notFoundMarker = new TextEncoder().encode('NOT_FOUND');
|
|
559
|
+
await storage.put(changelogStorageKey, notFoundMarker).catch((err) => {
|
|
560
|
+
console.error(`Error storing NOT_FOUND marker for ${changelogStorageKey}:`, err);
|
|
561
|
+
});
|
|
562
|
+
return c.json({
|
|
563
|
+
error: 'Not Found',
|
|
564
|
+
message: 'No CHANGELOG file exists in this package version'
|
|
565
|
+
}, 404);
|
|
566
|
+
}
|
|
567
|
+
// 8. Store CHANGELOG in R2/S3 for future requests
|
|
568
|
+
const changelogBytes = new TextEncoder().encode(changelogContent);
|
|
569
|
+
await storage.put(changelogStorageKey, changelogBytes).catch((err) => {
|
|
570
|
+
console.error(`Error storing CHANGELOG for ${changelogStorageKey}:`, err);
|
|
571
|
+
// Continue even if storage fails - we'll still return the content
|
|
572
|
+
});
|
|
573
|
+
// 9. Return with aggressive CDN caching headers
|
|
574
|
+
return new Response(changelogContent, {
|
|
575
|
+
headers: {
|
|
576
|
+
'Content-Type': 'text/markdown; charset=utf-8',
|
|
577
|
+
'Cache-Control': 'public, max-age=31536000, immutable',
|
|
578
|
+
'X-CHANGELOG-Source': 'extracted',
|
|
579
|
+
},
|
|
580
|
+
});
|
|
581
|
+
}
|
|
582
|
+
/**
|
|
583
|
+
* POST /api/packages/add-from-mirror
|
|
584
|
+
* Manually fetch and store packages from a selected mirror repository
|
|
585
|
+
*/
|
|
586
|
+
export async function addPackagesFromMirror(c) {
|
|
587
|
+
const body = await c.req.json();
|
|
588
|
+
if (!body.repository_id || !Array.isArray(body.package_names) || body.package_names.length === 0) {
|
|
589
|
+
return c.json({ error: 'Bad Request', message: 'repository_id and package_names array are required' }, 400);
|
|
590
|
+
}
|
|
591
|
+
const db = c.get('database');
|
|
592
|
+
const url = new URL(c.req.url);
|
|
593
|
+
const baseUrl = `${url.protocol}//${url.host}`;
|
|
594
|
+
const results = [];
|
|
595
|
+
// Handle Packagist repository
|
|
596
|
+
if (body.repository_id === 'packagist') {
|
|
597
|
+
const mirroringEnabled = await isPackagistMirroringEnabled(c.env.KV);
|
|
598
|
+
if (!mirroringEnabled) {
|
|
599
|
+
return c.json({ error: 'Bad Request', message: 'Packagist mirroring is not enabled' }, 400);
|
|
600
|
+
}
|
|
601
|
+
const { ensurePackagistRepository } = await import('../composer');
|
|
602
|
+
await ensurePackagistRepository(db, c.env.ENCRYPTION_KEY, c.env.KV);
|
|
603
|
+
// Fetch each package from Packagist
|
|
604
|
+
for (const packageName of body.package_names) {
|
|
605
|
+
try {
|
|
606
|
+
const packagistUrl = `https://repo.packagist.org/p2/${packageName}.json`;
|
|
607
|
+
const response = await fetch(packagistUrl, {
|
|
608
|
+
headers: {
|
|
609
|
+
'User-Agent': COMPOSER_USER_AGENT,
|
|
610
|
+
},
|
|
611
|
+
});
|
|
612
|
+
if (!response.ok) {
|
|
613
|
+
if (response.status === 404) {
|
|
614
|
+
results.push({ package: packageName, success: false, error: 'Package not found' });
|
|
615
|
+
continue;
|
|
616
|
+
}
|
|
617
|
+
results.push({ package: packageName, success: false, error: `HTTP ${response.status}` });
|
|
618
|
+
continue;
|
|
619
|
+
}
|
|
620
|
+
const packageData = await response.json();
|
|
621
|
+
const { transformPackageDistUrls } = await import('../composer');
|
|
622
|
+
const { storedCount, errors } = await transformPackageDistUrls(packageData, 'packagist', baseUrl, db);
|
|
623
|
+
if (storedCount > 0) {
|
|
624
|
+
results.push({ package: packageName, success: true, versions: storedCount });
|
|
625
|
+
}
|
|
626
|
+
else {
|
|
627
|
+
results.push({ package: packageName, success: false, error: errors.join('; ') || 'No versions stored' });
|
|
628
|
+
}
|
|
629
|
+
}
|
|
630
|
+
catch (error) {
|
|
631
|
+
results.push({
|
|
632
|
+
package: packageName,
|
|
633
|
+
success: false,
|
|
634
|
+
error: error instanceof Error ? error.message : 'Unknown error',
|
|
635
|
+
});
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
else {
|
|
640
|
+
// Handle other Composer repositories
|
|
641
|
+
const [repo] = await db
|
|
642
|
+
.select()
|
|
643
|
+
.from(repositories)
|
|
644
|
+
.where(eq(repositories.id, body.repository_id))
|
|
645
|
+
.limit(1);
|
|
646
|
+
if (!repo) {
|
|
647
|
+
return c.json({ error: 'Not Found', message: 'Repository not found' }, 404);
|
|
648
|
+
}
|
|
649
|
+
if (repo.vcs_type !== 'composer') {
|
|
650
|
+
return c.json({ error: 'Bad Request', message: 'Only Composer repositories can be used for manual package addition' }, 400);
|
|
651
|
+
}
|
|
652
|
+
if (repo.status !== 'active') {
|
|
653
|
+
return c.json({ error: 'Bad Request', message: 'Repository is not active' }, 400);
|
|
654
|
+
}
|
|
655
|
+
// TODO: Implement manual package addition for other composer repositories
|
|
656
|
+
// This requires fetching the package metadata from the source repository
|
|
657
|
+
// which is more complex than just fetching from Packagist
|
|
658
|
+
return c.json({ error: 'Not Implemented', message: 'Manual package addition is currently only supported for Packagist' }, 501);
|
|
659
|
+
}
|
|
660
|
+
return c.json({ results });
|
|
661
|
+
}
|
|
662
|
+
/**
|
|
663
|
+
* POST /packages/cleanup-numeric-versions
|
|
664
|
+
* Temporary utility to fix versioning issues
|
|
665
|
+
*/
|
|
666
|
+
export async function cleanupNumericVersions(c) {
|
|
667
|
+
// Stub implementation to satisfy export requirements
|
|
668
|
+
// Real implementation would clean up numeric versions like x.y.z.0
|
|
669
|
+
return c.json({ message: 'Cleanup not implemented in this adapter version' });
|
|
670
|
+
}
|
|
671
|
+
//# sourceMappingURL=packages.js.map
|