cognitive-modules-cli 2.2.5 → 2.2.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -88,14 +88,19 @@ export interface RegistryEntryV2 {
88
88
  };
89
89
  dependencies: {
90
90
  runtime_min: string;
91
- modules: string[];
91
+ modules: Array<{
92
+ name: string;
93
+ version?: string;
94
+ optional?: boolean;
95
+ }>;
92
96
  };
93
97
  distribution: {
94
- tarball?: string;
95
- checksum?: string;
98
+ tarball: string;
99
+ checksum: string;
96
100
  size_bytes?: number;
97
101
  files?: string[];
98
- source?: string;
102
+ signature?: string;
103
+ signing_key?: string;
99
104
  };
100
105
  timestamps?: {
101
106
  created_at?: string;
@@ -110,6 +115,8 @@ export interface ModuleInfo {
110
115
  description: string;
111
116
  author: string;
112
117
  source: string;
118
+ tarball?: string;
119
+ checksum?: string;
113
120
  keywords: string[];
114
121
  tier?: string;
115
122
  namespace?: string;
@@ -189,6 +196,7 @@ export declare class RegistryClient {
189
196
  */
190
197
  getDownloadUrl(moduleName: string): Promise<{
191
198
  url: string;
199
+ checksum?: string;
192
200
  isGitHub: boolean;
193
201
  githubInfo?: {
194
202
  org: string;
@@ -16,7 +16,7 @@ import { createHash } from 'node:crypto';
16
16
  // =============================================================================
17
17
  // Constants
18
18
  // =============================================================================
19
- const DEFAULT_REGISTRY_URL = 'https://raw.githubusercontent.com/ziel-io/cognitive-modules/main/cognitive-registry.json';
19
+ const DEFAULT_REGISTRY_URL = 'https://raw.githubusercontent.com/Cognary/cognitive/main/cognitive-registry.v2.json';
20
20
  const CACHE_DIR = join(homedir(), '.cognitive', 'cache');
21
21
  const CACHE_TTL_MS = 5 * 60 * 1000; // 5 minutes
22
22
  const REGISTRY_FETCH_TIMEOUT_MS = 10_000; // 10s
@@ -172,7 +172,9 @@ export class RegistryClient {
172
172
  version: v2.identity.version,
173
173
  description: v2.metadata.description,
174
174
  author: v2.metadata.author,
175
- source: v2.distribution.source || v2.distribution.tarball || '',
175
+ source: v2.distribution.tarball,
176
+ tarball: v2.distribution.tarball,
177
+ checksum: v2.distribution.checksum,
176
178
  keywords: v2.metadata.keywords || [],
177
179
  tier: v2.metadata.tier,
178
180
  namespace: v2.identity.namespace,
@@ -335,6 +337,7 @@ export class RegistryClient {
335
337
  if (source.startsWith('http://') || source.startsWith('https://')) {
336
338
  return {
337
339
  url: source,
340
+ checksum: module.checksum,
338
341
  isGitHub: false,
339
342
  };
340
343
  }
@@ -0,0 +1,8 @@
1
+ export interface ExtractTarGzOptions {
2
+ maxFiles?: number;
3
+ maxTotalBytes?: number;
4
+ maxSingleFileBytes?: number;
5
+ maxTarBytes?: number;
6
+ }
7
+ export declare function extractTarGzFile(tarGzPath: string, destDir: string, options?: ExtractTarGzOptions): Promise<string[]>;
8
+ export declare function extractTarGzBuffer(gzBuffer: Buffer, destDir: string, options?: ExtractTarGzOptions): Promise<string[]>;
@@ -0,0 +1,353 @@
1
+ import { mkdir } from 'node:fs/promises';
2
+ import { createReadStream, createWriteStream } from 'node:fs';
3
+ import { resolve, sep, dirname } from 'node:path';
4
+ import { createGunzip } from 'node:zlib';
5
+ import { Readable, Transform } from 'node:stream';
6
+ import { finished } from 'node:stream/promises';
7
+ function isPathWithinRoot(rootDir, targetPath) {
8
+ const root = resolve(rootDir);
9
+ const target = resolve(targetPath);
10
+ return target === root || target.startsWith(root + sep);
11
+ }
12
+ function parseOctal(buf) {
13
+ const raw = buf.toString('utf-8').replace(/\0/g, '').trim();
14
+ if (!raw)
15
+ return 0;
16
+ // Tar uses octal ASCII. Some implementations pad with spaces or NUL.
17
+ return parseInt(raw, 8);
18
+ }
19
+ function parseHeader(block) {
20
+ // End of archive: two consecutive zero blocks. Caller handles the second.
21
+ if (block.every((b) => b === 0)) {
22
+ return null;
23
+ }
24
+ const name = block.subarray(0, 100).toString('utf-8').replace(/\0/g, '');
25
+ const size = parseOctal(block.subarray(124, 136));
26
+ const typeflag = block.subarray(156, 157).toString('utf-8') || '\0';
27
+ const prefix = block.subarray(345, 500).toString('utf-8').replace(/\0/g, '');
28
+ return { name, size, typeflag, prefix };
29
+ }
30
+ function normalizeTarPath(path) {
31
+ const normalized = path.replace(/\\/g, '/');
32
+ if (!normalized || normalized.includes('\0')) {
33
+ throw new Error('Unsafe tar entry (empty or NUL)');
34
+ }
35
+ if (normalized.startsWith('/') || /^[a-zA-Z]:\//.test(normalized)) {
36
+ throw new Error(`Unsafe tar entry (absolute path): ${path}`);
37
+ }
38
+ const parts = normalized.split('/');
39
+ if (parts.includes('..')) {
40
+ throw new Error(`Unsafe tar entry (path traversal): ${path}`);
41
+ }
42
+ // Collapse `.` segments without allowing traversal.
43
+ const collapsed = parts.filter((p) => p !== '.' && p !== '').join('/');
44
+ if (!collapsed) {
45
+ throw new Error(`Unsafe tar entry (empty after normalize): ${path}`);
46
+ }
47
+ return collapsed;
48
+ }
49
+ function parsePaxAttributes(payload) {
50
+ // PAX format: "<len> <key>=<value>\n"
51
+ const text = payload.toString('utf-8');
52
+ const attrs = {};
53
+ let i = 0;
54
+ while (i < text.length) {
55
+ const space = text.indexOf(' ', i);
56
+ if (space === -1)
57
+ break;
58
+ const lenStr = text.slice(i, space);
59
+ const len = Number(lenStr);
60
+ if (!Number.isFinite(len) || len <= 0)
61
+ break;
62
+ const record = text.slice(i, i + len);
63
+ const eq = record.indexOf('=');
64
+ if (eq !== -1) {
65
+ const key = record.slice(record.indexOf(' ') + 1, eq).trim();
66
+ const value = record.slice(eq + 1).trimEnd();
67
+ // Strip the trailing newline if present.
68
+ attrs[key] = value.endsWith('\n') ? value.slice(0, -1) : value;
69
+ }
70
+ i += len;
71
+ }
72
+ return attrs;
73
+ }
74
+ function createByteLimitTransform(maxBytes) {
75
+ let seen = 0;
76
+ return new Transform({
77
+ transform(chunk, _encoding, callback) {
78
+ seen += chunk.length;
79
+ if (seen > maxBytes) {
80
+ callback(new Error(`Tar stream too large after decompression (max ${maxBytes} bytes)`));
81
+ return;
82
+ }
83
+ callback(null, chunk);
84
+ },
85
+ });
86
+ }
87
+ async function writeChunk(stream, chunk) {
88
+ if (!chunk.length)
89
+ return;
90
+ const ok = stream.write(chunk);
91
+ if (ok)
92
+ return;
93
+ await new Promise((resolveDrain, rejectDrain) => {
94
+ const onDrain = () => {
95
+ cleanup();
96
+ resolveDrain();
97
+ };
98
+ const onError = (err) => {
99
+ cleanup();
100
+ rejectDrain(err);
101
+ };
102
+ const cleanup = () => {
103
+ stream.off('drain', onDrain);
104
+ stream.off('error', onError);
105
+ };
106
+ stream.once('drain', onDrain);
107
+ stream.once('error', onError);
108
+ });
109
+ }
110
+ async function extractTarStream(tarStream, destDir, options = {}) {
111
+ const maxEntries = options.maxFiles ?? 5_000;
112
+ const maxTotalBytes = options.maxTotalBytes ?? 50 * 1024 * 1024; // 50MB extracted content cap
113
+ const maxSingleFileBytes = options.maxSingleFileBytes ?? 20 * 1024 * 1024; // 20MB per file cap
114
+ // Guard metadata record sizes so we don't buffer huge PAX/longname payloads into memory.
115
+ const MAX_META_BYTES = 1024 * 1024; // 1MB
116
+ let extractedBytes = 0;
117
+ let entriesSeen = 0;
118
+ const written = [];
119
+ let pendingPax = null;
120
+ let pendingLongName = null;
121
+ let buf = Buffer.alloc(0);
122
+ let pending = null;
123
+ let pendingPayload = Buffer.alloc(0); // only used for pax/longname/globalPax
124
+ let padRemaining = 0;
125
+ let ended = false;
126
+ const closePendingFile = async () => {
127
+ if (!pending || pending.kind !== 'file')
128
+ return;
129
+ const ws = pending.stream;
130
+ ws.end?.();
131
+ try {
132
+ await finished(ws);
133
+ }
134
+ catch {
135
+ // ignore (best-effort cleanup)
136
+ }
137
+ };
138
+ try {
139
+ for await (const chunk of tarStream) {
140
+ if (!chunk?.length)
141
+ continue;
142
+ buf = buf.length ? Buffer.concat([buf, chunk]) : Buffer.from(chunk);
143
+ while (true) {
144
+ if (ended)
145
+ break;
146
+ // Drain padding to 512-byte boundary after each entry payload.
147
+ if (padRemaining > 0) {
148
+ if (buf.length === 0)
149
+ break;
150
+ const take = Math.min(padRemaining, buf.length);
151
+ buf = buf.subarray(take);
152
+ padRemaining -= take;
153
+ continue;
154
+ }
155
+ // If we are currently consuming an entry payload, continue that first.
156
+ if (pending) {
157
+ if (pending.kind === 'file') {
158
+ if (pending.remaining === 0) {
159
+ // Zero-length file: create empty file and continue.
160
+ await closePendingFile();
161
+ written.push(pending.pathRel);
162
+ pending = null;
163
+ pendingPayload = Buffer.alloc(0);
164
+ continue;
165
+ }
166
+ if (buf.length === 0)
167
+ break;
168
+ const take = Math.min(pending.remaining, buf.length);
169
+ const slice = buf.subarray(0, take);
170
+ await writeChunk(pending.stream, slice);
171
+ pending.remaining -= take;
172
+ buf = buf.subarray(take);
173
+ if (pending.remaining > 0)
174
+ continue;
175
+ padRemaining = (512 - (pending.originalSize % 512)) % 512;
176
+ extractedBytes += pending.originalSize;
177
+ if (extractedBytes > maxTotalBytes) {
178
+ throw new Error(`Tar extracted content too large (max ${maxTotalBytes} bytes)`);
179
+ }
180
+ await closePendingFile();
181
+ written.push(pending.pathRel);
182
+ pending = null;
183
+ pendingPayload = Buffer.alloc(0);
184
+ continue;
185
+ }
186
+ // Non-file payloads (PAX/longname/globalPax/skip).
187
+ if (pending.remaining === 0) {
188
+ padRemaining = (512 - (pending.originalSize % 512)) % 512;
189
+ if (pending.kind === 'pax') {
190
+ pendingPax = parsePaxAttributes(pendingPayload);
191
+ }
192
+ else if (pending.kind === 'longname') {
193
+ const longName = pendingPayload.toString('utf-8').replace(/\0/g, '').trim();
194
+ if (longName)
195
+ pendingLongName = longName;
196
+ }
197
+ else if (pending.kind === 'globalPax') {
198
+ // ignored
199
+ }
200
+ else if (pending.kind === 'skip') {
201
+ // nothing
202
+ }
203
+ pending = null;
204
+ pendingPayload = Buffer.alloc(0);
205
+ continue;
206
+ }
207
+ if (buf.length === 0)
208
+ break;
209
+ const take = Math.min(pending.remaining, buf.length);
210
+ const slice = buf.subarray(0, take);
211
+ buf = buf.subarray(take);
212
+ pending.remaining -= take;
213
+ if (pending.kind !== 'skip') {
214
+ pendingPayload = pendingPayload.length ? Buffer.concat([pendingPayload, slice]) : slice;
215
+ if (pendingPayload.length > MAX_META_BYTES) {
216
+ throw new Error(`Tar metadata entry too large (max ${MAX_META_BYTES} bytes)`);
217
+ }
218
+ }
219
+ if (pending.remaining > 0)
220
+ continue;
221
+ padRemaining = (512 - (pending.originalSize % 512)) % 512;
222
+ if (pending.kind === 'pax') {
223
+ pendingPax = parsePaxAttributes(pendingPayload);
224
+ }
225
+ else if (pending.kind === 'longname') {
226
+ const longName = pendingPayload.toString('utf-8').replace(/\0/g, '').trim();
227
+ if (longName)
228
+ pendingLongName = longName;
229
+ }
230
+ pending = null;
231
+ pendingPayload = Buffer.alloc(0);
232
+ continue;
233
+ }
234
+ // Need a header block.
235
+ if (buf.length < 512)
236
+ break;
237
+ const headerBlock = buf.subarray(0, 512);
238
+ buf = buf.subarray(512);
239
+ const header = parseHeader(headerBlock);
240
+ if (!header) {
241
+ ended = true;
242
+ break;
243
+ }
244
+ entriesSeen += 1;
245
+ if (entriesSeen > maxEntries) {
246
+ throw new Error(`Tar contains too many entries (max ${maxEntries})`);
247
+ }
248
+ let entryName = header.prefix ? `${header.prefix}/${header.name}` : header.name;
249
+ if (pendingLongName) {
250
+ entryName = pendingLongName;
251
+ pendingLongName = null;
252
+ }
253
+ if (pendingPax?.path) {
254
+ entryName = pendingPax.path;
255
+ }
256
+ pendingPax = null;
257
+ const size = header.size;
258
+ // Reject symlinks/hardlinks/devices/etc. Only support files + dirs + metadata.
259
+ if (header.typeflag === '2' || header.typeflag === '1') {
260
+ throw new Error(`Refusing to extract link entry: ${entryName}`);
261
+ }
262
+ if (header.typeflag === 'x') {
263
+ if (size > MAX_META_BYTES) {
264
+ throw new Error(`Tar metadata entry too large (max ${MAX_META_BYTES} bytes)`);
265
+ }
266
+ pending = { kind: 'pax', remaining: size, originalSize: size };
267
+ padRemaining = 0;
268
+ pendingPayload = Buffer.alloc(0);
269
+ continue;
270
+ }
271
+ if (header.typeflag === 'g') {
272
+ if (size > MAX_META_BYTES) {
273
+ throw new Error(`Tar metadata entry too large (max ${MAX_META_BYTES} bytes)`);
274
+ }
275
+ pending = { kind: 'globalPax', remaining: size, originalSize: size };
276
+ padRemaining = 0;
277
+ pendingPayload = Buffer.alloc(0);
278
+ continue;
279
+ }
280
+ if (header.typeflag === 'L') {
281
+ if (size > MAX_META_BYTES) {
282
+ throw new Error(`Tar metadata entry too large (max ${MAX_META_BYTES} bytes)`);
283
+ }
284
+ pending = { kind: 'longname', remaining: size, originalSize: size };
285
+ padRemaining = 0;
286
+ pendingPayload = Buffer.alloc(0);
287
+ continue;
288
+ }
289
+ if (header.typeflag !== '0' && header.typeflag !== '\0' && header.typeflag !== '5') {
290
+ throw new Error(`Unsupported tar entry type '${header.typeflag}' for ${entryName}`);
291
+ }
292
+ const rel = normalizeTarPath(entryName);
293
+ const outPath = resolve(destDir, rel);
294
+ if (!isPathWithinRoot(destDir, outPath)) {
295
+ throw new Error(`Unsafe tar entry (outside dest): ${rel}`);
296
+ }
297
+ if (header.typeflag === '5') {
298
+ // Directory entry.
299
+ await mkdir(outPath, { recursive: true });
300
+ // Directories may still have payload bytes (unusual); skip them safely.
301
+ if (size > 0) {
302
+ pending = { kind: 'skip', remaining: size, originalSize: size };
303
+ padRemaining = 0;
304
+ pendingPayload = Buffer.alloc(0);
305
+ }
306
+ continue;
307
+ }
308
+ // File entry.
309
+ if (size > maxSingleFileBytes) {
310
+ throw new Error(`Tar entry too large: ${rel} (${size} bytes)`);
311
+ }
312
+ await mkdir(dirname(outPath), { recursive: true });
313
+ const ws = createWriteStream(outPath, { flags: 'w', mode: 0o644 });
314
+ // Track file size and padding. Keep originalSize so we can compute padding after consumption.
315
+ pending = {
316
+ kind: 'file',
317
+ pathRel: rel,
318
+ outPath,
319
+ remaining: size,
320
+ originalSize: size,
321
+ stream: ws,
322
+ };
323
+ padRemaining = 0;
324
+ pendingPayload = Buffer.alloc(0);
325
+ }
326
+ }
327
+ }
328
+ finally {
329
+ // Ensure pending file stream is closed on error.
330
+ await closePendingFile();
331
+ }
332
+ if (pending) {
333
+ throw new Error('Unexpected end of tar stream (truncated archive)');
334
+ }
335
+ if (padRemaining > 0) {
336
+ throw new Error('Unexpected end of tar stream (truncated padding)');
337
+ }
338
+ return written;
339
+ }
340
+ async function extractTarGzReadable(gzReadable, destDir, options = {}) {
341
+ const maxTarBytes = options.maxTarBytes ?? 100 * 1024 * 1024; // 100MB decompressed TAR cap
342
+ const gunzip = createGunzip();
343
+ const limited = gzReadable.pipe(gunzip).pipe(createByteLimitTransform(maxTarBytes));
344
+ return extractTarStream(limited, destDir, options);
345
+ }
346
+ export async function extractTarGzFile(tarGzPath, destDir, options = {}) {
347
+ const rs = createReadStream(tarGzPath);
348
+ return extractTarGzReadable(rs, destDir, options);
349
+ }
350
+ export async function extractTarGzBuffer(gzBuffer, destDir, options = {}) {
351
+ const rs = Readable.from([gzBuffer]);
352
+ return extractTarGzReadable(rs, destDir, options);
353
+ }