openclawmp 0.1.2 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api.js +56 -22
- package/lib/archive.js +322 -0
- package/lib/commands/info.js +4 -4
- package/lib/commands/install.js +6 -47
- package/lib/commands/publish.js +2 -2
- package/lib/commands/search.js +6 -6
- package/package.json +1 -1
package/lib/api.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
// ============================================================================
|
|
2
2
|
// api.js — HTTP request helpers for the OpenClaw Marketplace API
|
|
3
3
|
//
|
|
4
|
+
// Uses V1 API endpoints for lightweight responses (AssetCompact).
|
|
4
5
|
// Uses Node.js built-in fetch (available since Node 18)
|
|
5
6
|
// ============================================================================
|
|
6
7
|
|
|
@@ -22,7 +23,7 @@ function authHeaders() {
|
|
|
22
23
|
|
|
23
24
|
/**
|
|
24
25
|
* Make a GET request to the API
|
|
25
|
-
* @param {string} apiPath - API path (e.g., '/api/assets')
|
|
26
|
+
* @param {string} apiPath - API path (e.g., '/api/v1/assets')
|
|
26
27
|
* @param {object} [params] - Query parameters
|
|
27
28
|
* @returns {Promise<object>} Parsed JSON response
|
|
28
29
|
*/
|
|
@@ -107,40 +108,53 @@ async function download(apiPath) {
|
|
|
107
108
|
}
|
|
108
109
|
|
|
109
110
|
/**
|
|
110
|
-
* Search assets
|
|
111
|
+
* Search assets via V1 API (returns lightweight AssetCompact items).
|
|
112
|
+
*
|
|
113
|
+
* V1 response shape: { query, total, items: AssetCompact[], nextCursor }
|
|
114
|
+
* AssetCompact fields: id, name, displayName, type, description, tags,
|
|
115
|
+
* installs, rating, author (string), authorId, version, installCommand,
|
|
116
|
+
* updatedAt, category
|
|
117
|
+
*
|
|
111
118
|
* @param {string} query
|
|
112
119
|
* @param {object} [opts] - { type, limit }
|
|
113
|
-
* @returns {Promise<object>}
|
|
120
|
+
* @returns {Promise<object>} V1 search response
|
|
114
121
|
*/
|
|
115
122
|
async function searchAssets(query, opts = {}) {
|
|
116
123
|
const params = { q: query, limit: opts.limit || 20 };
|
|
117
124
|
if (opts.type) params.type = opts.type;
|
|
118
|
-
return get('/api/
|
|
125
|
+
return get('/api/v1/search', params);
|
|
119
126
|
}
|
|
120
127
|
|
|
121
128
|
/**
|
|
122
|
-
* Find an asset by type and slug (with optional author filter)
|
|
129
|
+
* Find an asset by type and slug (with optional author filter).
|
|
130
|
+
* Uses V1 list endpoint for lightweight data.
|
|
131
|
+
*
|
|
123
132
|
* @param {string} type
|
|
124
133
|
* @param {string} slug
|
|
125
|
-
* @param {string} [authorFilter]
|
|
134
|
+
* @param {string} [authorFilter] - author ID or author name to filter by
|
|
126
135
|
* @returns {Promise<object|null>}
|
|
127
136
|
*/
|
|
128
137
|
async function findAsset(type, slug, authorFilter) {
|
|
129
|
-
const result = await get('/api/assets', { q: slug, limit: 50 });
|
|
130
|
-
const assets = result?.
|
|
138
|
+
const result = await get('/api/v1/assets', { q: slug, type, limit: 50 });
|
|
139
|
+
const assets = result?.items || [];
|
|
131
140
|
|
|
132
|
-
// Exact match on
|
|
133
|
-
let matches = assets.filter(a => a.
|
|
141
|
+
// Exact match on name
|
|
142
|
+
let matches = assets.filter(a => a.name === slug);
|
|
134
143
|
if (authorFilter) {
|
|
135
|
-
|
|
144
|
+
// authorFilter could be an authorId or author name
|
|
145
|
+
const authorMatches = matches.filter(a =>
|
|
146
|
+
a.authorId === authorFilter || a.author === authorFilter
|
|
147
|
+
);
|
|
136
148
|
if (authorMatches.length > 0) matches = authorMatches;
|
|
137
149
|
}
|
|
138
150
|
|
|
139
|
-
// Fallback: partial match
|
|
151
|
+
// Fallback: partial match on name
|
|
140
152
|
if (matches.length === 0) {
|
|
141
|
-
matches = assets.filter(a => a.
|
|
153
|
+
matches = assets.filter(a => a.name.includes(slug));
|
|
142
154
|
if (authorFilter) {
|
|
143
|
-
const authorMatches = matches.filter(a =>
|
|
155
|
+
const authorMatches = matches.filter(a =>
|
|
156
|
+
a.authorId === authorFilter || a.author === authorFilter
|
|
157
|
+
);
|
|
144
158
|
if (authorMatches.length > 0) matches = authorMatches;
|
|
145
159
|
}
|
|
146
160
|
}
|
|
@@ -148,10 +162,26 @@ async function findAsset(type, slug, authorFilter) {
|
|
|
148
162
|
if (matches.length === 0) return null;
|
|
149
163
|
|
|
150
164
|
// Prefer the one with an author ID
|
|
151
|
-
matches.sort((a, b) => (b.
|
|
165
|
+
matches.sort((a, b) => (b.authorId || '').localeCompare(a.authorId || ''));
|
|
152
166
|
return matches[0];
|
|
153
167
|
}
|
|
154
168
|
|
|
169
|
+
/**
|
|
170
|
+
* Get asset detail (L2) by ID via V1 API.
|
|
171
|
+
* Returns full detail including readme, files, versions.
|
|
172
|
+
*
|
|
173
|
+
* @param {string} id - Asset ID
|
|
174
|
+
* @returns {Promise<object|null>}
|
|
175
|
+
*/
|
|
176
|
+
async function getAssetById(id) {
|
|
177
|
+
try {
|
|
178
|
+
return await get(`/api/v1/assets/${id}`);
|
|
179
|
+
} catch (e) {
|
|
180
|
+
if (e.message.includes('404')) return null;
|
|
181
|
+
throw e;
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
155
185
|
/**
|
|
156
186
|
* Make a DELETE request to the API
|
|
157
187
|
* @param {string} apiPath
|
|
@@ -178,18 +208,21 @@ async function del(apiPath, body) {
|
|
|
178
208
|
/**
|
|
179
209
|
* Resolve an asset reference to a full asset object.
|
|
180
210
|
* Accepts:
|
|
181
|
-
* - Direct ID: "tr-fc617094de29f938"
|
|
211
|
+
* - Direct ID: "s-abc123", "tr-fc617094de29f938"
|
|
182
212
|
* - type/@author/slug: "trigger/@xiaoyue/pdf-watcher"
|
|
213
|
+
*
|
|
214
|
+
* Uses V1 API: GET /api/v1/assets/:id for ID lookups,
|
|
215
|
+
* findAsset() (V1 search) for type/slug lookups.
|
|
216
|
+
*
|
|
183
217
|
* @param {string} ref
|
|
184
218
|
* @returns {Promise<object>} asset object with at least { id, name, ... }
|
|
185
219
|
*/
|
|
186
220
|
async function resolveAssetRef(ref) {
|
|
187
|
-
// Direct ID pattern:
|
|
188
|
-
if (/^
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
return result?.data?.asset || result?.data || result;
|
|
221
|
+
// Direct ID pattern: prefix + dash + hex
|
|
222
|
+
if (/^[a-z]+-[0-9a-f]{8,}$/.test(ref)) {
|
|
223
|
+
const result = await getAssetById(ref);
|
|
224
|
+
if (!result) throw new Error(`Asset not found: ${ref}`);
|
|
225
|
+
return result;
|
|
193
226
|
}
|
|
194
227
|
|
|
195
228
|
// type/@author/slug format
|
|
@@ -223,5 +256,6 @@ module.exports = {
|
|
|
223
256
|
download,
|
|
224
257
|
searchAssets,
|
|
225
258
|
findAsset,
|
|
259
|
+
getAssetById,
|
|
226
260
|
resolveAssetRef,
|
|
227
261
|
};
|
package/lib/archive.js
ADDED
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const zlib = require('zlib');
|
|
6
|
+
|
|
7
|
+
const TAR_BLOCK_SIZE = 512;
|
|
8
|
+
const TAR_END_BLOCKS = Buffer.alloc(TAR_BLOCK_SIZE * 2, 0);
|
|
9
|
+
const ZIP_LOCAL_FILE_HEADER = 0x04034b50;
|
|
10
|
+
const ZIP_CENTRAL_DIRECTORY_HEADER = 0x02014b50;
|
|
11
|
+
const ZIP_END_OF_CENTRAL_DIRECTORY = 0x06054b50;
|
|
12
|
+
|
|
13
|
+
function normalizeArchivePath(filePath) {
|
|
14
|
+
return filePath.split(path.sep).join('/');
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
function withDotPrefix(filePath) {
|
|
18
|
+
const normalized = normalizeArchivePath(filePath);
|
|
19
|
+
if (!normalized || normalized === '.') return './';
|
|
20
|
+
return normalized.startsWith('./') ? normalized : `./${normalized}`;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function posixModeForStat(stat) {
|
|
24
|
+
return stat.mode & 0o7777;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function writeString(buffer, value, offset, length) {
|
|
28
|
+
const source = Buffer.from(value, 'utf8');
|
|
29
|
+
source.copy(buffer, offset, 0, Math.min(source.length, length));
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function writeOctal(buffer, value, offset, length) {
|
|
33
|
+
const octal = Math.max(0, value).toString(8);
|
|
34
|
+
const padded = octal.padStart(length - 2, '0');
|
|
35
|
+
writeString(buffer, `${padded}\0 `, offset, length);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
function writeChecksumPlaceholder(buffer) {
|
|
39
|
+
buffer.fill(0x20, 148, 156);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
function finalizeChecksum(buffer) {
|
|
43
|
+
let checksum = 0;
|
|
44
|
+
for (const byte of buffer) checksum += byte;
|
|
45
|
+
const octal = checksum.toString(8).padStart(6, '0');
|
|
46
|
+
writeString(buffer, `${octal}\0 `, 148, 8);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
function buildTarHeader(name, stat, typeflag) {
|
|
50
|
+
const header = Buffer.alloc(TAR_BLOCK_SIZE, 0);
|
|
51
|
+
const normalizedName = normalizeArchivePath(name);
|
|
52
|
+
const prefixCut = normalizedName.length > 100 ? normalizedName.lastIndexOf('/', 100) : -1;
|
|
53
|
+
let entryName = normalizedName;
|
|
54
|
+
let prefix = '';
|
|
55
|
+
|
|
56
|
+
if (normalizedName.length > 100) {
|
|
57
|
+
if (prefixCut <= 0 || normalizedName.length - prefixCut - 1 > 100 || prefixCut > 155) {
|
|
58
|
+
throw new Error(`Path too long for tar header: ${normalizedName}`);
|
|
59
|
+
}
|
|
60
|
+
prefix = normalizedName.slice(0, prefixCut);
|
|
61
|
+
entryName = normalizedName.slice(prefixCut + 1);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
writeString(header, entryName, 0, 100);
|
|
65
|
+
writeOctal(header, posixModeForStat(stat), 100, 8);
|
|
66
|
+
writeOctal(header, 0, 108, 8);
|
|
67
|
+
writeOctal(header, 0, 116, 8);
|
|
68
|
+
writeOctal(header, typeflag === '5' ? 0 : stat.size, 124, 12);
|
|
69
|
+
writeOctal(header, Math.floor(stat.mtimeMs / 1000), 136, 12);
|
|
70
|
+
writeChecksumPlaceholder(header);
|
|
71
|
+
writeString(header, typeflag, 156, 1);
|
|
72
|
+
writeString(header, 'ustar', 257, 6);
|
|
73
|
+
writeString(header, '00', 263, 2);
|
|
74
|
+
if (prefix) writeString(header, prefix, 345, 155);
|
|
75
|
+
finalizeChecksum(header);
|
|
76
|
+
return header;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
function collectEntries(rootDir, currentDir = rootDir, entries = []) {
|
|
80
|
+
const children = fs.readdirSync(currentDir, { withFileTypes: true }).sort((a, b) => a.name.localeCompare(b.name));
|
|
81
|
+
for (const child of children) {
|
|
82
|
+
const absPath = path.join(currentDir, child.name);
|
|
83
|
+
const relPath = path.relative(rootDir, absPath);
|
|
84
|
+
const stat = fs.lstatSync(absPath);
|
|
85
|
+
|
|
86
|
+
if (stat.isDirectory()) {
|
|
87
|
+
entries.push({ path: relPath, stat, typeflag: '5' });
|
|
88
|
+
collectEntries(rootDir, absPath, entries);
|
|
89
|
+
continue;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
if (stat.isFile()) {
|
|
93
|
+
entries.push({ path: relPath, stat, typeflag: '0' });
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
return entries;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
function createTarBuffer(sourceDir) {
|
|
100
|
+
const chunks = [];
|
|
101
|
+
const entries = collectEntries(sourceDir);
|
|
102
|
+
const rootStat = fs.lstatSync(sourceDir);
|
|
103
|
+
|
|
104
|
+
chunks.push(buildTarHeader('./', rootStat, '5'));
|
|
105
|
+
|
|
106
|
+
for (const entry of entries) {
|
|
107
|
+
const archivePath = withDotPrefix(entry.path);
|
|
108
|
+
const headerPath = entry.typeflag === '5' ? `${archivePath}/` : archivePath;
|
|
109
|
+
chunks.push(buildTarHeader(headerPath, entry.stat, entry.typeflag));
|
|
110
|
+
|
|
111
|
+
if (entry.typeflag === '0') {
|
|
112
|
+
const content = fs.readFileSync(path.join(sourceDir, entry.path));
|
|
113
|
+
chunks.push(content);
|
|
114
|
+
const remainder = content.length % TAR_BLOCK_SIZE;
|
|
115
|
+
if (remainder !== 0) {
|
|
116
|
+
chunks.push(Buffer.alloc(TAR_BLOCK_SIZE - remainder, 0));
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
chunks.push(TAR_END_BLOCKS);
|
|
122
|
+
return Buffer.concat(chunks);
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
function createTarGzFromDirectory(sourceDir, targetFile) {
|
|
126
|
+
const tarBuffer = createTarBuffer(sourceDir);
|
|
127
|
+
const gzipBuffer = zlib.gzipSync(tarBuffer);
|
|
128
|
+
fs.writeFileSync(targetFile, gzipBuffer);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
function isTarBuffer(buffer) {
|
|
132
|
+
if (buffer.length < 265) return false;
|
|
133
|
+
const magic = buffer.toString('utf8', 257, 262);
|
|
134
|
+
return magic === 'ustar';
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
function pathSegments(filePath) {
|
|
138
|
+
return normalizeArchivePath(filePath).split('/').filter(segment => segment && segment !== '.');
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
function stripCommonRoot(entries) {
|
|
142
|
+
const fileEntries = entries.filter(entry => entry.name && entry.segments.length > 0);
|
|
143
|
+
if (fileEntries.length === 0) return entries;
|
|
144
|
+
|
|
145
|
+
const firstRoot = fileEntries[0].segments[0];
|
|
146
|
+
if (!firstRoot) return entries;
|
|
147
|
+
if (!fileEntries.every(entry => entry.segments[0] === firstRoot)) return entries;
|
|
148
|
+
|
|
149
|
+
const hasRootFile = fileEntries.some(entry => entry.segments.length === 1 && entry.type !== 'directory');
|
|
150
|
+
if (hasRootFile) return entries;
|
|
151
|
+
|
|
152
|
+
return entries.map(entry => ({
|
|
153
|
+
...entry,
|
|
154
|
+
segments: entry.segments.slice(1),
|
|
155
|
+
}));
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
function ensureWithinTarget(targetDir, segments) {
|
|
159
|
+
const destination = path.resolve(targetDir, ...segments);
|
|
160
|
+
const root = path.resolve(targetDir);
|
|
161
|
+
if (destination !== root && !destination.startsWith(`${root}${path.sep}`)) {
|
|
162
|
+
throw new Error(`Archive entry escapes target directory: ${segments.join('/')}`);
|
|
163
|
+
}
|
|
164
|
+
return destination;
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
function applyMode(destination, mode, isDirectory) {
|
|
168
|
+
if (process.platform === 'win32') return;
|
|
169
|
+
const fallback = isDirectory ? 0o755 : 0o644;
|
|
170
|
+
try {
|
|
171
|
+
fs.chmodSync(destination, mode || fallback);
|
|
172
|
+
} catch {}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
function extractEntries(entries, targetDir) {
|
|
176
|
+
fs.mkdirSync(targetDir, { recursive: true });
|
|
177
|
+
const normalizedEntries = stripCommonRoot(entries).filter(entry => entry.segments.length > 0);
|
|
178
|
+
|
|
179
|
+
for (const entry of normalizedEntries) {
|
|
180
|
+
const destination = ensureWithinTarget(targetDir, entry.segments);
|
|
181
|
+
if (entry.type === 'directory') {
|
|
182
|
+
fs.mkdirSync(destination, { recursive: true });
|
|
183
|
+
applyMode(destination, entry.mode, true);
|
|
184
|
+
continue;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
fs.mkdirSync(path.dirname(destination), { recursive: true });
|
|
188
|
+
fs.writeFileSync(destination, entry.content);
|
|
189
|
+
applyMode(destination, entry.mode, false);
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
return normalizedEntries.length > 0;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
function parseTarOctal(buffer, offset, length) {
|
|
196
|
+
const raw = buffer.toString('utf8', offset, offset + length).replace(/\0.*$/, '').trim();
|
|
197
|
+
return raw ? parseInt(raw, 8) : 0;
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
function extractTar(buffer, targetDir) {
|
|
201
|
+
const entries = [];
|
|
202
|
+
let offset = 0;
|
|
203
|
+
|
|
204
|
+
while (offset + TAR_BLOCK_SIZE <= buffer.length) {
|
|
205
|
+
const header = buffer.subarray(offset, offset + TAR_BLOCK_SIZE);
|
|
206
|
+
if (header.every(byte => byte === 0)) break;
|
|
207
|
+
|
|
208
|
+
const name = header.toString('utf8', 0, 100).replace(/\0.*$/, '');
|
|
209
|
+
const prefix = header.toString('utf8', 345, 500).replace(/\0.*$/, '');
|
|
210
|
+
const fullName = prefix ? `${prefix}/${name}` : name;
|
|
211
|
+
const typeflag = header.toString('utf8', 156, 157) || '0';
|
|
212
|
+
const size = parseTarOctal(header, 124, 12);
|
|
213
|
+
const mode = parseTarOctal(header, 100, 8);
|
|
214
|
+
offset += TAR_BLOCK_SIZE;
|
|
215
|
+
|
|
216
|
+
const content = buffer.subarray(offset, offset + size);
|
|
217
|
+
const segments = pathSegments(fullName);
|
|
218
|
+
if (typeflag === '5') {
|
|
219
|
+
entries.push({ name: fullName, segments, type: 'directory', mode });
|
|
220
|
+
} else if (typeflag === '0' || typeflag === '\0') {
|
|
221
|
+
entries.push({ name: fullName, segments, type: 'file', mode, content: Buffer.from(content) });
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
offset += Math.ceil(size / TAR_BLOCK_SIZE) * TAR_BLOCK_SIZE;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
return extractEntries(entries, targetDir);
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
function locateZipEnd(buffer) {
|
|
231
|
+
const minimumOffset = Math.max(0, buffer.length - 0xffff - 22);
|
|
232
|
+
for (let offset = buffer.length - 22; offset >= minimumOffset; offset--) {
|
|
233
|
+
if (buffer.readUInt32LE(offset) === ZIP_END_OF_CENTRAL_DIRECTORY) {
|
|
234
|
+
return offset;
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
throw new Error('Invalid zip: end of central directory not found');
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
function extractZip(buffer, targetDir) {
|
|
241
|
+
const eocdOffset = locateZipEnd(buffer);
|
|
242
|
+
const centralDirectorySize = buffer.readUInt32LE(eocdOffset + 12);
|
|
243
|
+
const centralDirectoryOffset = buffer.readUInt32LE(eocdOffset + 16);
|
|
244
|
+
const entries = [];
|
|
245
|
+
|
|
246
|
+
let offset = centralDirectoryOffset;
|
|
247
|
+
const end = centralDirectoryOffset + centralDirectorySize;
|
|
248
|
+
|
|
249
|
+
while (offset < end) {
|
|
250
|
+
if (buffer.readUInt32LE(offset) !== ZIP_CENTRAL_DIRECTORY_HEADER) {
|
|
251
|
+
throw new Error('Invalid zip: malformed central directory');
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
const compressionMethod = buffer.readUInt16LE(offset + 10);
|
|
255
|
+
const compressedSize = buffer.readUInt32LE(offset + 20);
|
|
256
|
+
const uncompressedSize = buffer.readUInt32LE(offset + 24);
|
|
257
|
+
const fileNameLength = buffer.readUInt16LE(offset + 28);
|
|
258
|
+
const extraLength = buffer.readUInt16LE(offset + 30);
|
|
259
|
+
const commentLength = buffer.readUInt16LE(offset + 32);
|
|
260
|
+
const externalAttributes = buffer.readUInt32LE(offset + 38);
|
|
261
|
+
const localHeaderOffset = buffer.readUInt32LE(offset + 42);
|
|
262
|
+
const fileName = buffer.toString('utf8', offset + 46, offset + 46 + fileNameLength);
|
|
263
|
+
const isDirectory = fileName.endsWith('/');
|
|
264
|
+
const mode = (externalAttributes >>> 16) & 0xffff;
|
|
265
|
+
|
|
266
|
+
if (buffer.readUInt32LE(localHeaderOffset) !== ZIP_LOCAL_FILE_HEADER) {
|
|
267
|
+
throw new Error('Invalid zip: missing local file header');
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
const localNameLength = buffer.readUInt16LE(localHeaderOffset + 26);
|
|
271
|
+
const localExtraLength = buffer.readUInt16LE(localHeaderOffset + 28);
|
|
272
|
+
const dataOffset = localHeaderOffset + 30 + localNameLength + localExtraLength;
|
|
273
|
+
const compressedData = buffer.subarray(dataOffset, dataOffset + compressedSize);
|
|
274
|
+
|
|
275
|
+
let content = Buffer.alloc(0);
|
|
276
|
+
if (!isDirectory) {
|
|
277
|
+
if (compressionMethod === 0) {
|
|
278
|
+
content = Buffer.from(compressedData);
|
|
279
|
+
} else if (compressionMethod === 8) {
|
|
280
|
+
content = zlib.inflateRawSync(compressedData);
|
|
281
|
+
} else {
|
|
282
|
+
throw new Error(`Unsupported zip compression method: ${compressionMethod}`);
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
if (content.length !== uncompressedSize) {
|
|
286
|
+
throw new Error('Invalid zip: uncompressed size mismatch');
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
entries.push({
|
|
291
|
+
name: fileName,
|
|
292
|
+
segments: pathSegments(fileName),
|
|
293
|
+
type: isDirectory ? 'directory' : 'file',
|
|
294
|
+
mode,
|
|
295
|
+
content,
|
|
296
|
+
});
|
|
297
|
+
|
|
298
|
+
offset += 46 + fileNameLength + extraLength + commentLength;
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
return extractEntries(entries, targetDir);
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
function extractPackage(buffer, targetDir) {
|
|
305
|
+
const isZip = buffer.length >= 4 && buffer.readUInt32LE(0) === ZIP_LOCAL_FILE_HEADER;
|
|
306
|
+
if (isZip) {
|
|
307
|
+
return extractZip(buffer, targetDir);
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
let tarBuffer = buffer;
|
|
311
|
+
try {
|
|
312
|
+
tarBuffer = zlib.gunzipSync(buffer);
|
|
313
|
+
} catch {}
|
|
314
|
+
|
|
315
|
+
if (!isTarBuffer(tarBuffer)) return false;
|
|
316
|
+
return extractTar(tarBuffer, targetDir);
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
module.exports = {
|
|
320
|
+
createTarGzFromDirectory,
|
|
321
|
+
extractPackage,
|
|
322
|
+
};
|
package/lib/commands/info.js
CHANGED
|
@@ -27,8 +27,9 @@ async function run(args) {
|
|
|
27
27
|
process.exit(1);
|
|
28
28
|
}
|
|
29
29
|
|
|
30
|
-
|
|
31
|
-
const
|
|
30
|
+
// V1 AssetCompact: author is a string, authorId is separate
|
|
31
|
+
const authorName = asset.author || 'unknown';
|
|
32
|
+
const authorId = asset.authorId || '';
|
|
32
33
|
const tags = (asset.tags || []).join(', ');
|
|
33
34
|
|
|
34
35
|
console.log('');
|
|
@@ -38,8 +39,7 @@ async function run(args) {
|
|
|
38
39
|
console.log(` Package: ${asset.name}`);
|
|
39
40
|
console.log(` Version: ${asset.version}`);
|
|
40
41
|
console.log(` Author: ${c('cyan', authorName)} ${c('dim', `(${authorId})`)}`);
|
|
41
|
-
console.log(`
|
|
42
|
-
console.log(` Downloads: ${asset.downloads || 0}`);
|
|
42
|
+
console.log(` Installs: ${asset.installs || 0}`);
|
|
43
43
|
if (tags) {
|
|
44
44
|
console.log(` Tags: ${tags}`);
|
|
45
45
|
}
|
package/lib/commands/install.js
CHANGED
|
@@ -6,8 +6,8 @@
|
|
|
6
6
|
|
|
7
7
|
const fs = require('fs');
|
|
8
8
|
const path = require('path');
|
|
9
|
-
const { execSync } = require('child_process');
|
|
10
9
|
const api = require('../api.js');
|
|
10
|
+
const { extractPackage } = require('../archive.js');
|
|
11
11
|
const config = require('../config.js');
|
|
12
12
|
const { fish, info, ok, warn, err, c, detail } = require('../ui.js');
|
|
13
13
|
|
|
@@ -42,48 +42,6 @@ function parseSpec(spec) {
|
|
|
42
42
|
return { type, slug, authorFilter };
|
|
43
43
|
}
|
|
44
44
|
|
|
45
|
-
/**
|
|
46
|
-
* Extract a tar.gz or zip buffer to a directory
|
|
47
|
-
*/
|
|
48
|
-
function extractPackage(buffer, targetDir) {
|
|
49
|
-
const tmpFile = path.join(require('os').tmpdir(), `openclawmp-pkg-${process.pid}-${Date.now()}`);
|
|
50
|
-
fs.writeFileSync(tmpFile, buffer);
|
|
51
|
-
|
|
52
|
-
try {
|
|
53
|
-
// Try tar first
|
|
54
|
-
try {
|
|
55
|
-
execSync(`tar xzf "${tmpFile}" -C "${targetDir}" --strip-components=1 2>/dev/null`, { stdio: 'pipe' });
|
|
56
|
-
return true;
|
|
57
|
-
} catch {
|
|
58
|
-
// Try without --strip-components
|
|
59
|
-
try {
|
|
60
|
-
execSync(`tar xzf "${tmpFile}" -C "${targetDir}" 2>/dev/null`, { stdio: 'pipe' });
|
|
61
|
-
return true;
|
|
62
|
-
} catch {
|
|
63
|
-
// Try unzip
|
|
64
|
-
try {
|
|
65
|
-
execSync(`unzip -o -q "${tmpFile}" -d "${targetDir}" 2>/dev/null`, { stdio: 'pipe' });
|
|
66
|
-
// If single subdirectory, move contents up
|
|
67
|
-
const entries = fs.readdirSync(targetDir);
|
|
68
|
-
const dirs = entries.filter(e => fs.statSync(path.join(targetDir, e)).isDirectory());
|
|
69
|
-
if (dirs.length === 1 && entries.length === 1) {
|
|
70
|
-
const subdir = path.join(targetDir, dirs[0]);
|
|
71
|
-
for (const f of fs.readdirSync(subdir)) {
|
|
72
|
-
fs.renameSync(path.join(subdir, f), path.join(targetDir, f));
|
|
73
|
-
}
|
|
74
|
-
fs.rmdirSync(subdir);
|
|
75
|
-
}
|
|
76
|
-
return true;
|
|
77
|
-
} catch {
|
|
78
|
-
return false;
|
|
79
|
-
}
|
|
80
|
-
}
|
|
81
|
-
}
|
|
82
|
-
} finally {
|
|
83
|
-
try { fs.unlinkSync(tmpFile); } catch {}
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
|
|
87
45
|
/**
|
|
88
46
|
* Count files recursively in a directory
|
|
89
47
|
*/
|
|
@@ -106,7 +64,8 @@ function writeManifest(asset, targetDir, hasPackage) {
|
|
|
106
64
|
name: asset.name,
|
|
107
65
|
displayName: asset.displayName || '',
|
|
108
66
|
version: asset.version,
|
|
109
|
-
author: asset.author,
|
|
67
|
+
author: asset.author || '',
|
|
68
|
+
authorId: asset.authorId || '',
|
|
110
69
|
description: asset.description || '',
|
|
111
70
|
tags: asset.tags || [],
|
|
112
71
|
category: asset.category || '',
|
|
@@ -167,8 +126,8 @@ async function run(args, flags) {
|
|
|
167
126
|
|
|
168
127
|
const displayName = asset.displayName || asset.name;
|
|
169
128
|
const version = asset.version;
|
|
170
|
-
const authorName = asset.author
|
|
171
|
-
const authorId = asset.
|
|
129
|
+
const authorName = asset.author || 'unknown';
|
|
130
|
+
const authorId = asset.authorId || '';
|
|
172
131
|
|
|
173
132
|
console.log(` ${c('bold', displayName)} ${c('dim', `v${version}`)}`);
|
|
174
133
|
console.log(` by ${c('cyan', authorName)} ${c('dim', `(${authorId})`)}`);
|
|
@@ -203,7 +162,7 @@ async function run(args, flags) {
|
|
|
203
162
|
|
|
204
163
|
// Try downloading the actual package
|
|
205
164
|
let hasPackage = false;
|
|
206
|
-
const pkgBuffer = await api.download(`/api/assets/${asset.id}/download`);
|
|
165
|
+
const pkgBuffer = await api.download(`/api/v1/assets/${asset.id}/download`);
|
|
207
166
|
|
|
208
167
|
if (pkgBuffer && pkgBuffer.length > 0) {
|
|
209
168
|
info('📦 Downloading package from registry...');
|
package/lib/commands/publish.js
CHANGED
|
@@ -6,8 +6,8 @@
|
|
|
6
6
|
|
|
7
7
|
const fs = require('fs');
|
|
8
8
|
const path = require('path');
|
|
9
|
-
const { execSync } = require('child_process');
|
|
10
9
|
const api = require('../api.js');
|
|
10
|
+
const { createTarGzFromDirectory } = require('../archive.js');
|
|
11
11
|
const config = require('../config.js');
|
|
12
12
|
const { fish, info, ok, warn, err, c, detail } = require('../ui.js');
|
|
13
13
|
|
|
@@ -268,7 +268,7 @@ async function run(args, flags) {
|
|
|
268
268
|
// Create tarball
|
|
269
269
|
const tarball = path.join(require('os').tmpdir(), `openclawmp-publish-${Date.now()}.tar.gz`);
|
|
270
270
|
try {
|
|
271
|
-
|
|
271
|
+
createTarGzFromDirectory(skillDir, tarball);
|
|
272
272
|
} catch (e) {
|
|
273
273
|
err('Failed to create package tarball');
|
|
274
274
|
process.exit(1);
|
package/lib/commands/search.js
CHANGED
|
@@ -18,8 +18,8 @@ async function run(args) {
|
|
|
18
18
|
console.log('');
|
|
19
19
|
|
|
20
20
|
const result = await api.searchAssets(query);
|
|
21
|
-
const assets = result?.
|
|
22
|
-
const total = result?.
|
|
21
|
+
const assets = result?.items || [];
|
|
22
|
+
const total = result?.total || 0;
|
|
23
23
|
|
|
24
24
|
if (assets.length === 0) {
|
|
25
25
|
console.log(' No results found.');
|
|
@@ -31,12 +31,12 @@ async function run(args) {
|
|
|
31
31
|
|
|
32
32
|
for (const a of assets) {
|
|
33
33
|
const icon = typeIcon(a.type);
|
|
34
|
-
const
|
|
35
|
-
const author = a.author
|
|
36
|
-
const authorId = a.
|
|
34
|
+
const installs = a.installs || 0;
|
|
35
|
+
const author = a.author || 'unknown';
|
|
36
|
+
const authorId = a.authorId || 'unknown';
|
|
37
37
|
|
|
38
38
|
console.log(` ${icon} ${c('bold', a.displayName)}`);
|
|
39
|
-
console.log(` ${a.type}/@${authorId}/${a.name} • v${a.version} • by ${c('cyan', author)} •
|
|
39
|
+
console.log(` ${a.type}/@${authorId}/${a.name} • v${a.version} • by ${c('cyan', author)} • Installs: ${installs}`);
|
|
40
40
|
|
|
41
41
|
const desc = (a.description || '').slice(0, 80);
|
|
42
42
|
if (desc) {
|