sharetribe-cli 1.15.1 → 1.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "sharetribe-cli",
3
- "version": "1.15.1",
3
+ "version": "1.16.0",
4
4
  "description": "Unofficial Sharetribe CLI - 100% compatible with flex-cli",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -41,12 +41,14 @@
41
41
  "commander": "^12.1.0",
42
42
  "inquirer": "^9.2.23",
43
43
  "jsedn": "^0.4.1",
44
- "sharetribe-flex-build-sdk": "^1.15.1",
45
- "yargs": "^18.0.0"
44
+ "sharetribe-flex-build-sdk": "^1.15.2",
45
+ "yargs": "^18.0.0",
46
+ "yauzl": "^3.2.0"
46
47
  },
47
48
  "devDependencies": {
48
49
  "@types/inquirer": "^9.0.7",
49
50
  "@types/node": "^20.17.10",
51
+ "@types/yauzl": "^2.10.3",
50
52
  "@typescript-eslint/eslint-plugin": "^8.18.2",
51
53
  "@typescript-eslint/parser": "^8.18.2",
52
54
  "esbuild": "^0.27.2",
@@ -4,15 +4,31 @@
4
4
 
5
5
  import { Command } from 'commander';
6
6
  import {
7
- pullAssets as sdkPullAssets,
8
7
  pushAssets as sdkPushAssets,
8
+ stageAsset as sdkStageAsset,
9
+ getApiBaseUrl,
10
+ readAuth,
9
11
  } from 'sharetribe-flex-build-sdk';
10
12
  import { printError } from '../../util/output.js';
11
- import { readFileSync, writeFileSync, existsSync, mkdirSync, readdirSync, statSync, unlinkSync } from 'node:fs';
13
+ import {
14
+ readFileSync,
15
+ writeFileSync,
16
+ existsSync,
17
+ mkdirSync,
18
+ readdirSync,
19
+ statSync,
20
+ unlinkSync,
21
+ createWriteStream,
22
+ } from 'node:fs';
12
23
  import { join, dirname } from 'node:path';
13
24
  import { createHash } from 'node:crypto';
25
+ import * as http from 'node:http';
26
+ import * as https from 'node:https';
27
+ import { tmpdir } from 'node:os';
28
+ import { pipeline } from 'node:stream/promises';
14
29
  import chalk from 'chalk';
15
30
  import edn from 'jsedn';
31
+ import yauzl from 'yauzl';
16
32
 
17
33
 
18
34
  interface AssetMetadata {
@@ -20,21 +36,15 @@ interface AssetMetadata {
20
36
  assets: Array<{ path: string; 'content-hash': string }>;
21
37
  }
22
38
 
23
- /**
24
- * Reads asset metadata from .flex-cli/asset-meta.edn
25
- */
26
- function readAssetMetadata(basePath: string): AssetMetadata | null {
27
- const metaPath = join(basePath, '.flex-cli', 'asset-meta.edn');
28
- if (!existsSync(metaPath)) {
29
- return null;
30
- }
39
+ const ASSET_META_FILENAME = 'meta/asset-meta.edn';
40
+ const ASSETS_DIR = 'assets/';
41
+ const CLEAR_LINE = '\x1b[K';
42
+ const CARRIAGE_RETURN = '\r';
31
43
 
44
+ function parseAssetMetadataEdn(content: string): AssetMetadata | null {
32
45
  try {
33
- const content = readFileSync(metaPath, 'utf-8');
34
46
  const parsed = edn.parse(content);
35
-
36
- // Convert EDN map to JavaScript object
37
- const version = parsed.at(edn.kw(':version'));
47
+ const version = parsed.at(edn.kw(':version')) || parsed.at(edn.kw(':aliased-version'));
38
48
  const assets = parsed.at(edn.kw(':assets'));
39
49
 
40
50
  const assetList: Array<{ path: string; 'content-hash': string }> = [];
@@ -47,12 +57,33 @@ function readAssetMetadata(basePath: string): AssetMetadata | null {
47
57
  }
48
58
  }
49
59
 
60
+ if (!version) {
61
+ return null;
62
+ }
63
+
50
64
  return { version, assets: assetList };
51
65
  } catch {
52
66
  return null;
53
67
  }
54
68
  }
55
69
 
70
+ /**
71
+ * Reads asset metadata from .flex-cli/asset-meta.edn
72
+ */
73
+ function readAssetMetadata(basePath: string): AssetMetadata | null {
74
+ const metaPath = join(basePath, '.flex-cli', 'asset-meta.edn');
75
+ if (!existsSync(metaPath)) {
76
+ return null;
77
+ }
78
+
79
+ try {
80
+ const content = readFileSync(metaPath, 'utf-8');
81
+ return parseAssetMetadataEdn(content);
82
+ } catch {
83
+ return null;
84
+ }
85
+ }
86
+
56
87
  /**
57
88
  * Writes asset metadata to .flex-cli/asset-meta.edn
58
89
  */
@@ -79,7 +110,8 @@ function writeAssetMetadata(basePath: string, metadata: AssetMetadata): void {
79
110
  }
80
111
 
81
112
  /**
82
- * Calculates SHA-1 hash of file content
113
+ * Calculates SHA-1 hash of file content matching backend convention
114
+ * Content is prefixed with `${byte-count}|` before hashing
83
115
  */
84
116
  function calculateHash(data: Buffer): string {
85
117
  const prefix = Buffer.from(`${data.length}|`, 'utf-8');
@@ -97,6 +129,7 @@ function readLocalAssets(basePath: string): Array<{ path: string; data: Buffer;
97
129
 
98
130
  for (const entry of entries) {
99
131
  if (entry === '.flex-cli') continue; // Skip metadata directory
132
+ if (entry === '.DS_Store') continue; // Skip .DS_Store files
100
133
 
101
134
  const fullPath = join(dir, entry);
102
135
  const relPath = relativePath ? join(relativePath, entry) : entry;
@@ -116,6 +149,35 @@ function readLocalAssets(basePath: string): Array<{ path: string; data: Buffer;
116
149
  return assets;
117
150
  }
118
151
 
152
+ /**
153
+ * Lists local asset paths without reading file data
154
+ */
155
+ function listLocalAssetPaths(basePath: string): string[] {
156
+ const assets: string[] = [];
157
+
158
+ function scanDir(dir: string, relativePath: string = '') {
159
+ const entries = readdirSync(dir);
160
+
161
+ for (const entry of entries) {
162
+ if (entry === '.flex-cli') continue;
163
+ if (entry === '.DS_Store') continue;
164
+
165
+ const fullPath = join(dir, entry);
166
+ const relPath = relativePath ? join(relativePath, entry) : entry;
167
+ const stat = statSync(fullPath);
168
+
169
+ if (stat.isDirectory()) {
170
+ scanDir(fullPath, relPath);
171
+ } else if (stat.isFile()) {
172
+ assets.push(relPath);
173
+ }
174
+ }
175
+ }
176
+
177
+ scanDir(basePath);
178
+ return assets;
179
+ }
180
+
119
181
  /**
120
182
  * Validates JSON files
121
183
  */
@@ -131,6 +193,186 @@ function validateJsonAssets(assets: Array<{ path: string; data: Buffer }>): void
131
193
  }
132
194
  }
133
195
 
196
+ function formatDownloadProgress(bytes: number): string {
197
+ const mb = bytes / 1024 / 1024;
198
+ return `${CARRIAGE_RETURN}${CLEAR_LINE}Downloaded ${mb.toFixed(2)}MB`;
199
+ }
200
+
201
+ function printDownloadProgress(stream: NodeJS.ReadableStream): void {
202
+ let downloaded = 0;
203
+ const printProgress = (): void => {
204
+ process.stderr.write(formatDownloadProgress(downloaded));
205
+ };
206
+ const interval = setInterval(printProgress, 100);
207
+
208
+ stream.on('data', (chunk: Buffer) => {
209
+ downloaded += chunk.length;
210
+ });
211
+
212
+ stream.on('end', () => {
213
+ clearInterval(interval);
214
+ printProgress();
215
+ process.stderr.write('\nFinished downloading assets\n');
216
+ });
217
+ }
218
+
219
+ function getApiKeyOrThrow(): string {
220
+ const auth = readAuth();
221
+ if (!auth?.apiKey) {
222
+ throw new Error('Not logged in. Please provide apiKey or run: sharetribe-cli login');
223
+ }
224
+ return auth.apiKey;
225
+ }
226
+
227
+ function getAssetsPullUrl(marketplace: string, version?: string): URL {
228
+ const url = new URL(getApiBaseUrl() + '/assets/pull');
229
+ url.searchParams.set('marketplace', marketplace);
230
+ if (version) {
231
+ url.searchParams.set('version', version);
232
+ } else {
233
+ url.searchParams.set('version-alias', 'latest');
234
+ }
235
+ return url;
236
+ }
237
+
238
+ function getErrorMessage(body: string, statusCode: number): string {
239
+ try {
240
+ const parsed = JSON.parse(body) as { errors?: Array<{ message?: string }> };
241
+ const message = parsed.errors?.[0]?.message;
242
+ if (message) {
243
+ return message;
244
+ }
245
+ } catch {
246
+ // Ignore JSON parse errors
247
+ }
248
+ return body || `HTTP ${statusCode}`;
249
+ }
250
+
251
+ async function getAssetsZipStream(
252
+ marketplace: string,
253
+ version?: string
254
+ ): Promise<http.IncomingMessage> {
255
+ const url = getAssetsPullUrl(marketplace, version);
256
+ const apiKey = getApiKeyOrThrow();
257
+ const isHttps = url.protocol === 'https:';
258
+ const client = isHttps ? https : http;
259
+
260
+ return new Promise((resolve, reject) => {
261
+ const req = client.request(
262
+ {
263
+ method: 'GET',
264
+ hostname: url.hostname,
265
+ port: url.port || (isHttps ? 443 : 80),
266
+ path: url.pathname + url.search,
267
+ headers: {
268
+ Authorization: `Apikey ${apiKey}`,
269
+ Accept: 'application/zip',
270
+ },
271
+ },
272
+ (res) => {
273
+ const statusCode = res.statusCode || 0;
274
+ if (statusCode < 200 || statusCode >= 300) {
275
+ const chunks: Buffer[] = [];
276
+ res.on('data', (chunk: Buffer) => chunks.push(chunk));
277
+ res.on('end', () => {
278
+ const body = Buffer.concat(chunks).toString('utf-8');
279
+ reject(new Error(getErrorMessage(body, statusCode)));
280
+ });
281
+ return;
282
+ }
283
+ resolve(res);
284
+ }
285
+ );
286
+
287
+ req.setTimeout(120000, () => {
288
+ req.destroy(new Error('Request timeout'));
289
+ });
290
+ req.on('error', reject);
291
+ req.end();
292
+ });
293
+ }
294
+
295
+ function createTempZipPath(): string {
296
+ return join(tmpdir(), `assets-${Date.now()}.zip`);
297
+ }
298
+
299
+ function removeAssetsDir(filename: string): string {
300
+ if (filename.startsWith(ASSETS_DIR)) {
301
+ return filename.slice(ASSETS_DIR.length);
302
+ }
303
+ return filename;
304
+ }
305
+
306
+ function readStreamToString(stream: NodeJS.ReadableStream): Promise<string> {
307
+ return new Promise((resolve, reject) => {
308
+ const chunks: Buffer[] = [];
309
+ stream.on('data', (chunk: Buffer) => chunks.push(chunk));
310
+ stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf-8')));
311
+ stream.on('error', reject);
312
+ });
313
+ }
314
+
315
+ async function unzipAssets(zipPath: string, basePath: string): Promise<AssetMetadata> {
316
+ return new Promise((resolve, reject) => {
317
+ yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
318
+ if (err || !zipfile) {
319
+ reject(err || new Error('Failed to open zip file'));
320
+ return;
321
+ }
322
+
323
+ let assetMeta: AssetMetadata | null = null;
324
+
325
+ zipfile.on('error', reject);
326
+ zipfile.on('end', () => {
327
+ if (!assetMeta) {
328
+ reject(new Error('Asset metadata not found in zip'));
329
+ return;
330
+ }
331
+ resolve(assetMeta);
332
+ });
333
+
334
+ zipfile.readEntry();
335
+ zipfile.on('entry', (entry) => {
336
+ if (entry.fileName.endsWith('/')) {
337
+ zipfile.readEntry();
338
+ return;
339
+ }
340
+
341
+ zipfile.openReadStream(entry, (streamErr, readStream) => {
342
+ if (streamErr || !readStream) {
343
+ reject(streamErr || new Error('Failed to read zip entry'));
344
+ return;
345
+ }
346
+
347
+ if (entry.fileName === ASSET_META_FILENAME) {
348
+ readStreamToString(readStream)
349
+ .then((content) => {
350
+ assetMeta = parseAssetMetadataEdn(content);
351
+ if (!assetMeta) {
352
+ reject(new Error('Invalid asset metadata'));
353
+ return;
354
+ }
355
+ zipfile.readEntry();
356
+ })
357
+ .catch(reject);
358
+ return;
359
+ }
360
+
361
+ const assetPath = join(basePath, removeAssetsDir(entry.fileName));
362
+ const assetDir = dirname(assetPath);
363
+ if (!existsSync(assetDir)) {
364
+ mkdirSync(assetDir, { recursive: true });
365
+ }
366
+
367
+ pipeline(readStream, createWriteStream(assetPath))
368
+ .then(() => zipfile.readEntry())
369
+ .catch(reject);
370
+ });
371
+ });
372
+ });
373
+ });
374
+ }
375
+
134
376
  /**
135
377
  * Pulls assets from remote
136
378
  */
@@ -141,7 +383,7 @@ async function pullAssets(
141
383
  prune?: boolean
142
384
  ): Promise<void> {
143
385
  try {
144
- // Validate path
386
+ // Create directory if it doesn't exist
145
387
  if (!existsSync(path)) {
146
388
  mkdirSync(path, { recursive: true });
147
389
  }
@@ -151,57 +393,48 @@ async function pullAssets(
151
393
  throw new Error(`${path} is not a directory`);
152
394
  }
153
395
 
154
- // Fetch assets from API
155
- const result = await sdkPullAssets(undefined, marketplace, version ? { version } : undefined);
156
- const remoteVersion = result.version;
157
-
158
- // Read current metadata
396
+ const localAssets = prune ? listLocalAssetPaths(path) : [];
159
397
  const currentMeta = readAssetMetadata(path);
398
+ const tempZipPath = createTempZipPath();
160
399
 
161
- // Check if up to date
162
- if (currentMeta && currentMeta.version === remoteVersion && result.assets.length === currentMeta.assets.length) {
163
- console.log('Assets are up to date.');
164
- return;
165
- }
400
+ try {
401
+ const zipStream = await getAssetsZipStream(marketplace, version);
402
+ printDownloadProgress(zipStream);
403
+ await pipeline(zipStream, createWriteStream(tempZipPath));
166
404
 
167
- // Write assets to disk
168
- const newAssets: Array<{ path: string; 'content-hash': string }> = [];
169
- for (const asset of result.assets) {
170
- const assetPath = join(path, asset.path);
171
- const assetDir = dirname(assetPath);
405
+ const newAssetMeta = await unzipAssets(tempZipPath, path);
406
+ const remoteVersion = newAssetMeta.version;
172
407
 
173
- if (!existsSync(assetDir)) {
174
- mkdirSync(assetDir, { recursive: true });
175
- }
408
+ const deletedPaths = prune
409
+ ? new Set(localAssets.filter(p => !newAssetMeta.assets.some(a => a.path === p)))
410
+ : new Set<string>();
176
411
 
177
- // Decode base64 data
178
- const data = Buffer.from(asset.dataRaw, 'base64');
179
- writeFileSync(assetPath, data);
412
+ const updated = currentMeta?.version !== remoteVersion;
413
+ const shouldReportUpdate = updated || deletedPaths.size > 0;
180
414
 
181
- const hash = calculateHash(data);
182
- newAssets.push({ path: asset.path, 'content-hash': asset.contentHash || hash });
183
- }
184
-
185
- // Prune deleted assets if requested
186
- if (prune && currentMeta) {
187
- const remotePaths = new Set(result.assets.map(a => a.path));
188
- for (const localAsset of currentMeta.assets) {
189
- if (!remotePaths.has(localAsset.path)) {
190
- const assetPath = join(path, localAsset.path);
191
- if (existsSync(assetPath)) {
192
- unlinkSync(assetPath);
415
+ if (deletedPaths.size > 0) {
416
+ for (const assetPath of deletedPaths) {
417
+ const fullPath = join(path, assetPath);
418
+ if (existsSync(fullPath)) {
419
+ unlinkSync(fullPath);
193
420
  }
194
421
  }
195
422
  }
196
- }
197
423
 
198
- // Update metadata
199
- writeAssetMetadata(path, {
200
- version: remoteVersion,
201
- assets: newAssets,
202
- });
203
-
204
- console.log(`Version ${remoteVersion} successfully pulled.`);
424
+ if (shouldReportUpdate) {
425
+ writeAssetMetadata(path, {
426
+ version: remoteVersion,
427
+ assets: newAssetMeta.assets,
428
+ });
429
+ console.log(`Version ${remoteVersion} successfully pulled.`);
430
+ } else {
431
+ console.log('Assets are up to date.');
432
+ }
433
+ } finally {
434
+ if (existsSync(tempZipPath)) {
435
+ unlinkSync(tempZipPath);
436
+ }
437
+ }
205
438
  } catch (error) {
206
439
  if (error && typeof error === 'object' && 'message' in error) {
207
440
  printError(error.message as string);
@@ -212,6 +445,22 @@ async function pullAssets(
212
445
  }
213
446
  }
214
447
 
448
+ /**
449
+ * Filters assets to only those that have changed
450
+ */
451
+ function filterChangedAssets(
452
+ existingMeta: Array<{ path: string; 'content-hash': string }>,
453
+ localAssets: Array<{ path: string; hash: string }>
454
+ ): Array<{ path: string; data: Buffer; hash: string }> {
455
+ const hashByPath = new Map(existingMeta.map(a => [a.path, a['content-hash']]));
456
+
457
+ return localAssets.filter(asset => {
458
+ const storedHash = hashByPath.get(asset.path);
459
+ // Assets without stored metadata are treated as changed
460
+ return !storedHash || storedHash !== asset.hash;
461
+ });
462
+ }
463
+
215
464
  /**
216
465
  * Pushes assets to remote
217
466
  */
@@ -236,33 +485,23 @@ async function pushAssets(
236
485
  // Validate JSON files
237
486
  validateJsonAssets(localAssets);
238
487
 
239
- // Build operations
240
- const operations: Array<{
241
- path: string;
242
- op: 'upsert' | 'delete';
243
- data?: Buffer;
244
- }> = [];
488
+ // Filter to only changed assets
489
+ const changedAssets = filterChangedAssets(currentMeta?.assets || [], localAssets);
245
490
 
246
- // Find assets to upsert (new or changed)
247
- const localAssetMap = new Map(localAssets.map(a => [a.path, a]));
248
- const currentAssetMap = new Map((currentMeta?.assets || []).map(a => [a.path, a['content-hash']]));
249
-
250
- for (const [assetPath, asset] of localAssetMap) {
251
- const currentHash = currentAssetMap.get(assetPath);
252
- if (!currentHash || currentHash !== asset.hash) {
253
- operations.push({
254
- path: assetPath,
255
- op: 'upsert',
256
- data: asset.data,
257
- });
258
- }
259
- }
491
+ // Separate JSON and non-JSON assets
492
+ const isJsonAsset = (assetPath: string): boolean => {
493
+ return assetPath.toLowerCase().endsWith('.json');
494
+ };
495
+
496
+ const stageableAssets = changedAssets.filter(a => !isJsonAsset(a.path));
260
497
 
261
498
  // Find assets to delete (if prune enabled)
499
+ const localAssetMap = new Map(localAssets.map(a => [a.path, a]));
500
+ const deleteOperations: Array<{ path: string; op: 'delete' }> = [];
262
501
  if (prune && currentMeta) {
263
502
  for (const currentAsset of currentMeta.assets) {
264
503
  if (!localAssetMap.has(currentAsset.path)) {
265
- operations.push({
504
+ deleteOperations.push({
266
505
  path: currentAsset.path,
267
506
  op: 'delete',
268
507
  });
@@ -271,20 +510,62 @@ async function pushAssets(
271
510
  }
272
511
 
273
512
  // Check if there are any changes
274
- if (operations.length === 0) {
513
+ const noOps = changedAssets.length === 0 && deleteOperations.length === 0;
514
+ if (noOps) {
275
515
  console.log('Assets are up to date.');
276
516
  return;
277
517
  }
278
518
 
279
- const changedAssetPaths = operations
280
- .filter(op => op.op === 'upsert')
281
- .map(op => op.path);
282
- if (changedAssetPaths.length > 0) {
283
- console.log(chalk.green(`Uploading changed assets: ${changedAssetPaths.join(', ')}`));
519
+ // Log changed assets
520
+ if (changedAssets.length > 0) {
521
+ const paths = changedAssets.map(a => a.path).join(', ');
522
+ console.log(chalk.green(`Uploading changed assets: ${paths}`));
284
523
  }
285
524
 
525
+ // Stage non-JSON assets
526
+ const stagedByPath = new Map<string, string>();
527
+ if (stageableAssets.length > 0) {
528
+ const paths = stageableAssets.map(a => a.path).join(', ');
529
+ console.log(chalk.green(`Staging assets: ${paths}`));
530
+
531
+ for (const asset of stageableAssets) {
532
+ try {
533
+ const stagingResult = await sdkStageAsset(
534
+ undefined,
535
+ marketplace,
536
+ asset.data,
537
+ asset.path
538
+ );
539
+ stagedByPath.set(asset.path, stagingResult.stagingId);
540
+ } catch (error) {
541
+ if (error && typeof error === 'object' && 'code' in error && error.code === 'asset-invalid-content') {
542
+ const detail = 'message' in error ? error.message : 'The file is missing or uses an unsupported format.';
543
+ throw new Error(`Failed to stage image ${asset.path}: ${detail}\nFix the file and rerun assets push to retry staging.`);
544
+ }
545
+ throw error;
546
+ }
547
+ }
548
+ }
549
+
550
+ // Build upsert operations
551
+ const upsertOperations = changedAssets.map(asset => {
552
+ const stagingId = stagedByPath.get(asset.path);
553
+ return {
554
+ path: asset.path,
555
+ op: 'upsert' as const,
556
+ ...(stagingId
557
+ ? { stagingId }
558
+ : { data: asset.data, filename: asset.path }),
559
+ };
560
+ });
561
+
286
562
  // Upload to API
287
- const result = await sdkPushAssets(undefined, marketplace, currentVersion, operations);
563
+ const result = await sdkPushAssets(
564
+ undefined,
565
+ marketplace,
566
+ currentVersion,
567
+ [...upsertOperations, ...deleteOperations]
568
+ );
288
569
 
289
570
  // Update local metadata
290
571
  writeAssetMetadata(path, {
@@ -345,3 +626,9 @@ export function registerAssetsCommands(program: Command): void {
345
626
  await pushAssets(marketplace, opts.path, opts.prune);
346
627
  });
347
628
  }
629
+
630
+ export const __test__ = {
631
+ formatDownloadProgress,
632
+ removeAssetsDir,
633
+ parseAssetMetadataEdn,
634
+ };
@@ -2,7 +2,6 @@
2
2
  * Debug command - display config and auth info
3
3
  */
4
4
 
5
- import edn from 'jsedn';
6
5
  import { getConfigMap, readAuth } from 'sharetribe-flex-build-sdk';
7
6
 
8
7
  function maskLast4(value: string): string {
@@ -12,25 +11,16 @@ function maskLast4(value: string): string {
12
11
  return `...${value.slice(-4)}`;
13
12
  }
14
13
 
15
- function toEdnMap(record: Record<string, string>): edn.Map {
16
- const entries: Array<unknown> = [];
17
- for (const [key, value] of Object.entries(record)) {
18
- entries.push(edn.kw(`:${key}`), value);
19
- }
20
- return new edn.Map(entries);
21
- }
22
-
23
14
  export function debug(): void {
24
15
  const auth = readAuth();
25
16
  const apiKey = auth?.apiKey ? maskLast4(auth.apiKey) : 'No API key set';
26
17
  const confMap = getConfigMap();
27
18
 
28
- const payload = new edn.Map([
29
- edn.kw(':api-key'),
30
- apiKey,
31
- edn.kw(':conf-map'),
32
- toEdnMap(confMap),
33
- ]);
19
+ const confMapEntries = Object.keys(confMap)
20
+ .sort()
21
+ .map((key) => `:${key} ${confMap[key]}`)
22
+ .join(' ');
23
+ const confMapFormatted = confMapEntries ? `{${confMapEntries}}` : '{}';
34
24
 
35
- console.log(edn.encode(payload));
25
+ console.log(`{:api-key ${apiKey}, :conf-map ${confMapFormatted}}`);
36
26
  }
@@ -208,12 +208,18 @@ export function registerSearchCommands(program: Command): void {
208
208
  searchCmd
209
209
  .command('set')
210
210
  .description('set search schema')
211
- .requiredOption('--key <KEY>', 'schema key')
212
- .requiredOption('--scope <SCOPE>', 'schema scope')
213
- .requiredOption('--type <TYPE>', 'value type (enum, multi-enum, boolean, long, or text)')
211
+ .requiredOption('--key <KEY>', 'key name')
212
+ .requiredOption(
213
+ '--scope <SCOPE>',
214
+ 'extended data scope (either metadata or public for listing schema, metadata, private, protected or public for userProfile schema, metadata or protected for transaction schema)'
215
+ )
216
+ .requiredOption('--type <TYPE>', 'value type (either enum, multi-enum, boolean, long or text)')
214
217
  .option('--doc <DOC>', 'description of the schema')
215
218
  .option('--default <DEFAULT>', 'default value for search if value is not set')
216
- .option('--schema-for <SCHEMA_FOR>', 'subject of the schema (listing, userProfile, or transaction)')
219
+ .option(
220
+ '--schema-for <SCHEMA_FOR>',
221
+ 'Subject of the schema (either listing, userProfile or transaction, defaults to listing)'
222
+ )
217
223
  .option('-m, --marketplace <MARKETPLACE_ID>', 'marketplace identifier')
218
224
  .action(async (opts) => {
219
225
  const marketplace = opts.marketplace || program.opts().marketplace;
@@ -235,9 +241,15 @@ export function registerSearchCommands(program: Command): void {
235
241
  searchCmd
236
242
  .command('unset')
237
243
  .description('unset search schema')
238
- .requiredOption('--key <KEY>', 'schema key')
239
- .requiredOption('--scope <SCOPE>', 'schema scope')
240
- .option('--schema-for <SCHEMA_FOR>', 'subject of the schema (listing, userProfile, or transaction)')
244
+ .requiredOption('--key <KEY>', 'key name')
245
+ .requiredOption(
246
+ '--scope <SCOPE>',
247
+ 'extended data scope (either metadata or public for listing schema, metadata, private, protected or public for userProfile schema, metadata or protected for transaction schema)'
248
+ )
249
+ .option(
250
+ '--schema-for <SCHEMA_FOR>',
251
+ 'Subject of the schema (either listing, userProfile or transaction, defaults to listing)'
252
+ )
241
253
  .option('-m, --marketplace <MARKETPLACE_ID>', 'marketplace identifier')
242
254
  .action(async (opts) => {
243
255
  const marketplace = opts.marketplace || program.opts().marketplace;