@omen.foundation/node-microservice-runtime 0.1.65 → 0.1.67

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,1119 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- import fs from 'node:fs/promises';
4
- import fssync from 'node:fs';
5
- import path from 'node:path';
6
- import os from 'node:os';
7
- import crypto from 'node:crypto';
8
- import { fileURLToPath } from 'node:url';
9
- import { fetch } from 'undici';
10
- import tar from 'tar-stream';
11
- import dotenv from 'dotenv';
12
- import { runCommand } from './lib/cli-utils.mjs';
13
-
14
- const __dirname = path.dirname(fileURLToPath(import.meta.url));
15
- const DEFAULT_NODE_VERSION = '20';
16
- const MANIFEST_MEDIA_TYPE = 'application/vnd.docker.distribution.manifest.v2+json';
17
- const CONFIG_MEDIA_TYPE = 'application/vnd.docker.container.image.v1+json';
18
- const LAYER_MEDIA_TYPE = 'application/vnd.docker.image.rootfs.diff.tar.gzip';
19
-
20
- // ANSI color codes for colorful output
21
- const colors = {
22
- reset: '\x1b[0m',
23
- bright: '\x1b[1m',
24
- dim: '\x1b[2m',
25
- green: '\x1b[32m',
26
- yellow: '\x1b[33m',
27
- blue: '\x1b[34m',
28
- magenta: '\x1b[35m',
29
- cyan: '\x1b[36m',
30
- red: '\x1b[31m',
31
- };
32
-
33
- // Progress bar manager
34
- class ProgressBar {
35
- constructor(totalSteps) {
36
- this.totalSteps = totalSteps;
37
- this.currentStep = 0;
38
- this.currentTask = '';
39
- this.startTime = Date.now();
40
- }
41
-
42
- start(task) {
43
- this.currentTask = task;
44
- this.currentStep++;
45
- this.update();
46
- }
47
-
48
- complete(task) {
49
- this.currentTask = task;
50
- this.update();
51
- process.stdout.write('\n');
52
- }
53
-
54
- update() {
55
- const percentage = Math.round((this.currentStep / this.totalSteps) * 100);
56
- const barWidth = 30;
57
- const filled = Math.round((percentage / 100) * barWidth);
58
- const empty = barWidth - filled;
59
- const bar = '█'.repeat(filled) + '░'.repeat(empty);
60
-
61
- const elapsed = ((Date.now() - this.startTime) / 1000).toFixed(1);
62
-
63
- // Clear line and write progress
64
- process.stdout.write(`\r${colors.cyan}${bar}${colors.reset} ${colors.bright}${percentage}%${colors.reset} ${colors.dim}${this.currentTask}${colors.reset} ${colors.dim}(${elapsed}s)${colors.reset}`);
65
- }
66
-
67
- success(message) {
68
- process.stdout.write(`\r${colors.green}✓${colors.reset} ${message}\n`);
69
- }
70
-
71
- info(message) {
72
- process.stdout.write(`\r${colors.blue}ℹ${colors.reset} ${message}\n`);
73
- }
74
-
75
- error(message) {
76
- process.stdout.write(`\r${colors.red}✗${colors.reset} ${message}\n`);
77
- }
78
- }
79
-
80
- function parseArgs(argv) {
81
- const args = {
82
- entry: 'dist/main.js',
83
- openapi: 'beam_openApi.json',
84
- envFile: undefined,
85
- cid: process.env.BEAMABLE_CID || process.env.CID,
86
- pid: process.env.BEAMABLE_PID || process.env.PID,
87
- host: process.env.BEAMABLE_HOST || process.env.HOST,
88
- namePrefix: process.env.BEAMABLE_NAME_PREFIX || process.env.NAME_PREFIX,
89
- token: process.env.BEAMABLE_TOKEN || process.env.BEAMABLE_ACCESS_TOKEN || process.env.ACCESS_TOKEN,
90
- gamePid: process.env.BEAMABLE_GAME_PID,
91
- comments: process.env.BEAMABLE_PUBLISH_COMMENTS,
92
- service: process.env.BEAMABLE_SERVICE_ID,
93
- dockerTag: process.env.BEAMABLE_DOCKER_TAG,
94
- nodeVersion: process.env.BEAMABLE_NODE_VERSION || DEFAULT_NODE_VERSION,
95
- skipValidate: false,
96
- apiHost: process.env.BEAMABLE_API_HOST,
97
- };
98
-
99
- const queue = [...argv];
100
- while (queue.length > 0) {
101
- const current = queue.shift();
102
- switch (current) {
103
- case '--entry':
104
- args.entry = queue.shift();
105
- break;
106
- case '--openapi':
107
- args.openapi = queue.shift();
108
- break;
109
- case '--env-file':
110
- args.envFile = queue.shift();
111
- break;
112
- case '--cid':
113
- args.cid = queue.shift();
114
- break;
115
- case '--pid':
116
- args.pid = queue.shift();
117
- break;
118
- case '--host':
119
- args.host = queue.shift();
120
- break;
121
- case '--routing-key':
122
- case '--name-prefix':
123
- args.namePrefix = queue.shift();
124
- break;
125
- case '--token':
126
- args.token = queue.shift();
127
- break;
128
- case '--game-pid':
129
- args.gamePid = queue.shift();
130
- break;
131
- case '--comments':
132
- args.comments = queue.shift();
133
- break;
134
- case '--service':
135
- args.service = queue.shift();
136
- break;
137
- case '--docker-tag':
138
- args.dockerTag = queue.shift();
139
- break;
140
- case '--node-version':
141
- args.nodeVersion = queue.shift();
142
- break;
143
- case '--skip-validate':
144
- args.skipValidate = true;
145
- break;
146
- case '--api-host':
147
- args.apiHost = queue.shift();
148
- break;
149
- default:
150
- throw new Error(`Unknown argument: ${current}`);
151
- }
152
- }
153
-
154
- if (!args.envFile && process.env.npm_config_env_file) {
155
- args.envFile = process.env.npm_config_env_file;
156
- }
157
-
158
- return args;
159
- }
160
-
161
- function ensure(value, message) {
162
- if (!value) {
163
- throw new Error(message);
164
- }
165
- return value;
166
- }
167
-
168
- function md5Hex(input) {
169
- return crypto.createHash('md5').update(input).digest('hex');
170
- }
171
-
172
- function sha256Digest(buffer) {
173
- return `sha256:${crypto.createHash('sha256').update(buffer).digest('hex')}`;
174
- }
175
-
176
- function shortDigest(fullDigest) {
177
- if (!fullDigest) {
178
- return '';
179
- }
180
- const parts = fullDigest.split(':');
181
- const hash = parts.length > 1 ? parts[1] : parts[0];
182
- return hash.substring(0, 12);
183
- }
184
-
185
- function normalizeApiHost(host) {
186
- if (!host) {
187
- return undefined;
188
- }
189
- if (host.startsWith('wss://')) {
190
- return `https://${host.substring('wss://'.length).replace(/\/socket$/, '')}`;
191
- }
192
- if (host.startsWith('ws://')) {
193
- return `http://${host.substring('ws://'.length).replace(/\/socket$/, '')}`;
194
- }
195
- return host.replace(/\/$/, '');
196
- }
197
-
198
- async function readJson(filePath) {
199
- const content = await fs.readFile(filePath, 'utf8');
200
- return JSON.parse(content);
201
- }
202
-
203
- async function copyDirectory(source, destination) {
204
- const entries = await fs.readdir(source, { withFileTypes: true });
205
- await fs.mkdir(destination, { recursive: true });
206
- for (const entry of entries) {
207
- const srcPath = path.join(source, entry.name);
208
- const destPath = path.join(destination, entry.name);
209
- if (entry.isDirectory()) {
210
- await copyDirectory(srcPath, destPath);
211
- } else if (entry.isFile()) {
212
- await fs.copyFile(srcPath, destPath);
213
- }
214
- }
215
- }
216
-
217
- async function readDockerImageTar(tarPath) {
218
- const files = new Map();
219
- const extract = tar.extract();
220
-
221
- await new Promise((resolve, reject) => {
222
- extract.on('entry', (header, stream, next) => {
223
- const chunks = [];
224
- stream.on('data', (chunk) => chunks.push(chunk));
225
- stream.on('end', () => {
226
- files.set(header.name, Buffer.concat(chunks));
227
- next();
228
- });
229
- stream.on('error', reject);
230
- });
231
- extract.on('finish', resolve);
232
- extract.on('error', reject);
233
-
234
- fssync.createReadStream(tarPath).pipe(extract);
235
- });
236
-
237
- const manifestBuffer = files.get('manifest.json');
238
- if (!manifestBuffer) {
239
- throw new Error('Docker image archive missing manifest.json');
240
- }
241
-
242
- const manifestJson = JSON.parse(manifestBuffer.toString());
243
- if (!Array.isArray(manifestJson) || manifestJson.length === 0) {
244
- throw new Error('Unexpected manifest.json structure.');
245
- }
246
-
247
- const manifestEntry = manifestJson[0];
248
- const configName = manifestEntry.Config || manifestEntry.config;
249
- const layerNames = manifestEntry.Layers || manifestEntry.layers;
250
-
251
- if (!configName || !layerNames) {
252
- throw new Error('Manifest entry missing Config or Layers.');
253
- }
254
-
255
- const configBuffer = files.get(configName);
256
- if (!configBuffer) {
257
- throw new Error(`Config blob missing in archive: ${configName}`);
258
- }
259
-
260
- const layers = layerNames.map((layerName) => {
261
- const buffer = files.get(layerName);
262
- if (!buffer) {
263
- throw new Error(`Layer missing in archive: ${layerName}`);
264
- }
265
- return { name: layerName, buffer };
266
- });
267
-
268
- return { manifestEntry, configBuffer, layers };
269
- }
270
-
271
- async function checkBlobExists(baseUrl, digest, headers) {
272
- const url = new URL(`blobs/${digest}`, baseUrl);
273
- const response = await fetch(url, { method: 'HEAD', headers, redirect: 'manual' });
274
-
275
- if (response.status === 307 && response.headers.get('location')) {
276
- const redirected = response.headers.get('location');
277
- const nextBase = redirected.startsWith('http') ? redirected : new URL(redirected, baseUrl).href;
278
- return checkBlobExists(nextBase, digest, headers);
279
- }
280
-
281
- return response.status === 200;
282
- }
283
-
284
- async function prepareUploadLocation(baseUrl, headers) {
285
- const url = new URL('blobs/uploads/', baseUrl);
286
- // Debug logging only - removed verbose output
287
- // Match C# CLI exactly: StringContent("") sets Content-Type and Content-Length
288
- let response;
289
- try {
290
- response = await fetch(url, {
291
- method: 'POST',
292
- headers: {
293
- ...headers,
294
- 'Content-Type': 'text/plain; charset=utf-8',
295
- 'Content-Length': '0',
296
- },
297
- body: '', // Empty body
298
- });
299
- } catch (error) {
300
- // Network/SSL errors happen before HTTP response
301
- const errorMsg = error instanceof Error ? error.message : String(error);
302
- const errorDetails = {
303
- url: url.toString(),
304
- error: errorMsg,
305
- ...(error instanceof Error && error.stack ? { stack: error.stack } : {}),
306
- ...(error instanceof Error && error.cause ? { cause: error.cause } : {}),
307
- };
308
- if (process.env.BEAMO_DEBUG === '1' || process.env.BEAMO_NODE_DEBUG === '1') {
309
- console.error('[beamo-node] Network error preparing upload location:', errorDetails);
310
- }
311
- throw new Error(`Network error preparing upload location: ${errorMsg}. URL: ${url.toString()}`);
312
- }
313
-
314
- if (!response.ok) {
315
- const text = await response.text();
316
- if (process.env.BEAMO_DEBUG === '1' || process.env.BEAMO_NODE_DEBUG === '1') {
317
- console.error('[beamo-node] Upload location failed', {
318
- status: response.status,
319
- statusText: response.statusText,
320
- headers: Object.fromEntries(response.headers.entries()),
321
- body: text.substring(0, 500),
322
- });
323
- }
324
- throw new Error(`Failed to prepare upload location: ${response.status} ${text}`);
325
- }
326
- return response.headers.get('location');
327
- }
328
-
329
- async function uploadBlob(baseUrl, digest, buffer, headers) {
330
- if (await checkBlobExists(baseUrl, digest, headers)) {
331
- return { digest, size: buffer.length };
332
- }
333
-
334
- const location = await prepareUploadLocation(baseUrl, headers);
335
- if (!location) {
336
- throw new Error('Registry did not provide an upload location.');
337
- }
338
-
339
- // Match C# CLI: NormalizeWithDigest forces HTTPS and default port (-1 means use default for scheme)
340
- const locationUrl = new URL(location.startsWith('http') ? location : new URL(location, baseUrl).href);
341
- // Force HTTPS and remove explicit port (use default port 443 for HTTPS)
342
- locationUrl.protocol = 'https:';
343
- locationUrl.port = ''; // Empty string means use default port for the scheme
344
- locationUrl.searchParams.set('digest', digest);
345
- const uploadUrl = locationUrl;
346
-
347
- const response = await fetch(uploadUrl, {
348
- method: 'PUT',
349
- headers: { ...headers, 'Content-Type': 'application/octet-stream' },
350
- body: buffer,
351
- });
352
-
353
- if (!response.ok) {
354
- const text = await response.text();
355
- throw new Error(`Failed to upload blob ${digest}: ${response.status} ${text}`);
356
- }
357
-
358
- return { digest, size: buffer.length };
359
- }
360
-
361
- async function uploadManifest(baseUrl, manifestJson, shortImageId, headers) {
362
- // Match C# CLI: upload manifest using the short imageId as the tag
363
- // The backend looks up images using this short imageId tag
364
- const manifestJsonString = JSON.stringify(manifestJson);
365
- const url = new URL(`manifests/${shortImageId}`, baseUrl);
366
-
367
- // Debug logging only
368
- // Debug logging only - removed verbose output
369
-
370
- const response = await fetch(url, {
371
- method: 'PUT',
372
- headers: { ...headers, 'Content-Type': MANIFEST_MEDIA_TYPE },
373
- body: manifestJsonString,
374
- });
375
- if (!response.ok) {
376
- const text = await response.text();
377
- throw new Error(`Failed to upload manifest: ${response.status} ${text}`);
378
- }
379
- }
380
-
381
- async function fetchJson(url, options = {}) {
382
- const response = await fetch(url, options);
383
- if (!response.ok) {
384
- const text = await response.text();
385
- const error = new Error(`Request failed ${response.status}: ${text}`);
386
- error.status = response.status;
387
- throw error;
388
- }
389
- return response.json();
390
- }
391
-
392
- async function resolveGamePid(apiHost, token, cid, pid, explicitGamePid) {
393
- if (explicitGamePid) {
394
- return explicitGamePid;
395
- }
396
-
397
- const scope = pid ? `${cid}.${pid}` : cid;
398
- try {
399
- const url = new URL(`/basic/realms/game`, apiHost);
400
- url.searchParams.set('rootPID', pid);
401
- const body = await fetchJson(url, {
402
- headers: {
403
- Authorization: `Bearer ${token}`,
404
- Accept: 'application/json',
405
- ...(scope ? { 'X-BEAM-SCOPE': scope } : {}),
406
- },
407
- });
408
-
409
- const projects = Array.isArray(body?.projects) ? body.projects : [];
410
- if (projects.length === 0) {
411
- return pid;
412
- }
413
-
414
- const byPid = new Map(projects.map((project) => [project.pid, project]));
415
- let current = byPid.get(pid);
416
- const visited = new Set();
417
- while (current && current.parent && !visited.has(current.parent)) {
418
- visited.add(current.pid);
419
- current = byPid.get(current.parent);
420
- }
421
- const resolved = current?.pid ?? pid;
422
- // Debug logging only
423
- return resolved;
424
- } catch (error) {
425
- // Debug logging only
426
- return pid;
427
- }
428
- }
429
-
430
- async function getScopedAccessToken(apiHost, cid, pid, refreshToken, fallbackToken) {
431
- const scope = pid ? `${cid}.${pid}` : cid;
432
- if (!refreshToken) {
433
- return { accessToken: fallbackToken, refreshToken };
434
- }
435
-
436
- try {
437
- const response = await fetch(new URL('/basic/auth/token', apiHost), {
438
- method: 'POST',
439
- headers: {
440
- Accept: 'application/json',
441
- 'Content-Type': 'application/json',
442
- ...(scope ? { 'X-BEAM-SCOPE': scope } : {}),
443
- },
444
- body: JSON.stringify({
445
- grant_type: 'refresh_token',
446
- refresh_token: refreshToken,
447
- }),
448
- });
449
-
450
- if (!response.ok) {
451
- const text = await response.text();
452
- throw new Error(`Refresh token request failed: ${response.status} ${text}`);
453
- }
454
-
455
- const body = await response.json();
456
- const accessToken = body.access_token ?? fallbackToken;
457
- const nextRefresh = body.refresh_token ?? refreshToken;
458
- return { accessToken, refreshToken: nextRefresh };
459
- } catch (error) {
460
- // Debug logging only
461
- return { accessToken: fallbackToken, refreshToken };
462
- }
463
- }
464
-
465
- async function getRegistryUrl(apiHost, token, cid, pid) {
466
- const scope = pid ? `${cid}.${pid}` : cid;
467
- const body = await fetchJson(new URL('/basic/beamo/registry', apiHost), {
468
- headers: {
469
- Authorization: `Bearer ${token}`,
470
- Accept: 'application/json',
471
- ...(scope ? { 'X-BEAM-SCOPE': scope } : {}),
472
- },
473
- });
474
- const uri = body.uri || body.registry || body.url;
475
- if (!uri) {
476
- throw new Error('Registry URI response missing "uri" field.');
477
- }
478
- // Match C# CLI exactly: GetDockerImageRegistryUri() returns scheme://host/v2/ (Host property strips port)
479
- const normalized = uri.includes('://') ? uri : `https://${uri}`;
480
- const parsed = new URL(normalized);
481
- // parsedUri.Host in C# is just the hostname (no port), so we use hostname here
482
- return `${parsed.protocol}//${parsed.hostname}/v2/`;
483
- }
484
-
485
- async function uploadDockerImage({
486
- apiHost,
487
- registryUrl,
488
- cid,
489
- pid,
490
- gamePid,
491
- token,
492
- serviceId,
493
- uniqueName,
494
- imageTarPath,
495
- fullImageId,
496
- progress,
497
- }) {
498
- const baseUrl = `${registryUrl}${uniqueName}/`;
499
- // Match C# CLI exactly: use x-ks-* headers (all lowercase) with CID, PID (realm PID), and access token
500
- const headers = {
501
- 'x-ks-clientid': cid,
502
- 'x-ks-projectid': pid, // Use realm PID (not gamePid) - matches ctx.Pid in C# CLI
503
- 'x-ks-token': token, // Access token from login
504
- };
505
-
506
- const { manifestEntry, configBuffer, layers } = await readDockerImageTar(imageTarPath);
507
-
508
- // Upload config
509
- if (progress) {
510
- process.stdout.write(`\r${colors.blue}↑${colors.reset} Uploading config...`);
511
- }
512
- const configDigest = await uploadBlob(baseUrl, sha256Digest(configBuffer), configBuffer, headers);
513
-
514
- // Upload layers with progress
515
- const layerDescriptors = [];
516
- const totalLayers = layers.length;
517
- for (let i = 0; i < layers.length; i++) {
518
- if (progress) {
519
- process.stdout.write(`\r${colors.blue}↑${colors.reset} Uploading layers (${i + 1}/${totalLayers})...`);
520
- }
521
- const descriptor = await uploadBlob(baseUrl, sha256Digest(layers[i].buffer), layers[i].buffer, headers);
522
- layerDescriptors.push({
523
- digest: descriptor.digest,
524
- size: descriptor.size,
525
- mediaType: LAYER_MEDIA_TYPE,
526
- });
527
- }
528
-
529
- const uploadManifestJson = {
530
- schemaVersion: 2,
531
- mediaType: MANIFEST_MEDIA_TYPE,
532
- config: {
533
- mediaType: CONFIG_MEDIA_TYPE,
534
- digest: configDigest.digest,
535
- size: configDigest.size,
536
- },
537
- layers: layerDescriptors,
538
- };
539
-
540
- // Upload manifest using short imageId as tag (matching C# CLI behavior)
541
- if (progress) {
542
- process.stdout.write(`\r${colors.blue}↑${colors.reset} Uploading manifest...`);
543
- }
544
- const shortImageId = shortDigest(fullImageId);
545
- await uploadManifest(baseUrl, uploadManifestJson, shortImageId, headers);
546
- if (progress) {
547
- process.stdout.write('\r');
548
- }
549
- }
550
-
551
- async function fetchCurrentManifest(apiHost, token, cid, pid) {
552
- const response = await fetch(new URL('/api/beamo/manifests/current', apiHost), {
553
- headers: {
554
- Authorization: `Bearer ${token}`,
555
- Accept: 'application/json',
556
- 'X-BEAM-SCOPE': `${cid}.${pid}`,
557
- },
558
- });
559
- if (response.status === 404) {
560
- // No existing manifest (first publish) - return null
561
- return null;
562
- }
563
- if (!response.ok) {
564
- const text = await response.text();
565
- throw new Error(`Failed to fetch current manifest: ${response.status} ${text}`);
566
- }
567
- return response.json();
568
- }
569
-
570
- async function discoverStorageObjects(srcDir) {
571
- const storageObjects = [];
572
- try {
573
- const srcPath = path.resolve(srcDir || 'src');
574
- const files = await getAllTypeScriptFiles(srcPath);
575
-
576
- for (const file of files) {
577
- const content = await fs.readFile(file, 'utf-8');
578
- // Match @StorageObject('StorageName') pattern
579
- const storageRegex = /@StorageObject\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
580
- let match;
581
- while ((match = storageRegex.exec(content)) !== null) {
582
- const storageName = match[1];
583
- if (storageName && !storageObjects.find(s => s.id === storageName)) {
584
- storageObjects.push({
585
- id: storageName,
586
- enabled: true,
587
- checksum: null,
588
- archived: false,
589
- });
590
- }
591
- }
592
- }
593
- } catch (error) {
594
- // If we can't discover storage, that's okay - we'll just use existing ones
595
- // Debug logging only
596
- }
597
- return storageObjects;
598
- }
599
-
600
- async function discoverFederationComponents(srcDir) {
601
- const components = [];
602
- try {
603
- const srcPath = path.resolve(srcDir || 'src');
604
- const files = await getAllTypeScriptFiles(srcPath);
605
-
606
- for (const file of files) {
607
- const content = await fs.readFile(file, 'utf-8');
608
-
609
- // Match @FederatedInventory({ identity: IdentityClass }) pattern
610
- const federatedInventoryRegex = /@FederatedInventory\s*\(\s*\{\s*identity:\s*(\w+)\s*\}\s*\)/g;
611
- let match;
612
- while ((match = federatedInventoryRegex.exec(content)) !== null) {
613
- const identityClassName = match[1];
614
-
615
- // Find the identity class definition in the same file or other files
616
- // Look for: class IdentityClass implements FederationIdentity { getUniqueName(): string { return 'name'; } }
617
- // Use multiline matching to handle class definitions that span multiple lines
618
- const identityClassRegex = new RegExp(
619
- `class\\s+${identityClassName}[^{]*\\{[\\s\\S]*?getUniqueName\\(\\)[\\s\\S]*?return\\s+['"]([^'"]+)['"]`,
620
- 's'
621
- );
622
- let identityMatch = identityClassRegex.exec(content);
623
-
624
- // If not found in current file, search other files
625
- if (!identityMatch) {
626
- for (const otherFile of files) {
627
- if (otherFile !== file) {
628
- const otherContent = await fs.readFile(otherFile, 'utf-8');
629
- identityMatch = identityClassRegex.exec(otherContent);
630
- if (identityMatch) {
631
- break;
632
- }
633
- }
634
- }
635
- }
636
-
637
- if (identityMatch) {
638
- const identityName = identityMatch[1];
639
- // Add both IFederatedInventory and IFederatedLogin components
640
- const inventoryComponent = `IFederatedInventory/${identityName}`;
641
- const loginComponent = `IFederatedLogin/${identityName}`;
642
- if (!components.includes(inventoryComponent)) {
643
- components.push(inventoryComponent);
644
- }
645
- if (!components.includes(loginComponent)) {
646
- components.push(loginComponent);
647
- }
648
- }
649
- }
650
- }
651
- } catch (error) {
652
- // If we can't discover components, that's okay - we'll just use existing ones
653
- // Debug logging only
654
- }
655
- return components;
656
- }
657
-
658
- async function getAllTypeScriptFiles(dir) {
659
- const files = [];
660
- try {
661
- const entries = await fs.readdir(dir, { withFileTypes: true });
662
- for (const entry of entries) {
663
- const fullPath = path.join(dir, entry.name);
664
- if (entry.isDirectory() && entry.name !== 'node_modules' && !entry.name.startsWith('.')) {
665
- files.push(...await getAllTypeScriptFiles(fullPath));
666
- } else if (entry.isFile() && (entry.name.endsWith('.ts') || entry.name.endsWith('.tsx'))) {
667
- files.push(fullPath);
668
- }
669
- }
670
- } catch (error) {
671
- // Ignore errors reading directories
672
- }
673
- return files;
674
- }
675
-
676
- async function updateManifest({
677
- apiHost,
678
- token,
679
- cid,
680
- pid,
681
- serviceId,
682
- shortImageId,
683
- comments,
684
- existingManifest,
685
- discoveredStorage,
686
- discoveredComponents,
687
- discoveredDependencies,
688
- }) {
689
- const serviceReferences = existingManifest?.serviceReferences?.Value
690
- ?? existingManifest?.serviceReferences
691
- ?? existingManifest?.manifest
692
- ?? [];
693
- const storageRefsRaw = existingManifest?.storageReferences?.Value
694
- ?? existingManifest?.storageReferences
695
- ?? [];
696
- const existingStorage = Array.isArray(storageRefsRaw)
697
- ? storageRefsRaw.map((reference) => ({
698
- id: reference.id?.Value ?? reference.id,
699
- storageType: reference.storageType?.Value ?? reference.storageType ?? 'mongov1',
700
- enabled: reference.enabled?.Value ?? reference.enabled ?? true,
701
- checksum: reference.checksum?.Value ?? reference.checksum,
702
- archived: reference.archived?.Value ?? reference.archived ?? false,
703
- }))
704
- : [];
705
-
706
- // Merge discovered storage with existing storage
707
- // If a storage object exists in both, keep the existing one (preserves checksum, etc.)
708
- // But ensure storageType is always set to 'mongov1' for MongoDB storage
709
- const storageMap = new Map();
710
- existingStorage.forEach(s => {
711
- // Normalize storageType: update 'mongo' to 'mongov1' if present
712
- const normalizedStorage = {
713
- ...s,
714
- storageType: s.storageType === 'mongo' ? 'mongov1' : (s.storageType || 'mongov1'),
715
- };
716
- storageMap.set(s.id, normalizedStorage);
717
- });
718
- discoveredStorage.forEach(s => {
719
- if (!storageMap.has(s.id)) {
720
- storageMap.set(s.id, {
721
- ...s,
722
- storageType: 'mongov1', // All discovered storage uses MongoDB
723
- });
724
- } else {
725
- // Update existing storage to ensure storageType is 'mongov1'
726
- const existing = storageMap.get(s.id);
727
- storageMap.set(s.id, {
728
- ...existing,
729
- storageType: 'mongov1',
730
- });
731
- }
732
- });
733
- const storageReferences = Array.from(storageMap.values());
734
-
735
- // Extract existing components and dependencies for the service
736
- const existingServiceRef = serviceReferences.find(
737
- (ref) => (ref.serviceName?.Value ?? ref.serviceName) === serviceId
738
- );
739
- const existingComponents = existingServiceRef?.components?.Value
740
- ?? existingServiceRef?.components
741
- ?? [];
742
- const existingDependencies = existingServiceRef?.dependencies?.Value
743
- ?? existingServiceRef?.dependencies
744
- ?? [];
745
-
746
- // Components are ServiceComponent objects with {name: string}
747
- // Merge discovered with existing (preserve existing, add new)
748
- const componentsMap = new Map();
749
- // Add existing components
750
- if (Array.isArray(existingComponents)) {
751
- existingComponents.forEach(comp => {
752
- const name = comp.name?.Value ?? comp.name ?? comp;
753
- if (typeof name === 'string') {
754
- componentsMap.set(name, { name });
755
- }
756
- });
757
- }
758
- // Add discovered components (will overwrite existing if same name)
759
- (discoveredComponents || []).forEach(compName => {
760
- componentsMap.set(compName, { name: compName });
761
- });
762
- const components = Array.from(componentsMap.values());
763
-
764
- // Dependencies are objects with {id, storageType}, need to merge by id
765
- const dependenciesMap = new Map();
766
- // Add existing dependencies
767
- if (Array.isArray(existingDependencies)) {
768
- existingDependencies.forEach(dep => {
769
- const id = dep.id?.Value ?? dep.id ?? dep;
770
- if (typeof id === 'string') {
771
- let storageType = dep.storageType?.Value ?? dep.storageType ?? 'mongov1';
772
- // Normalize: update 'mongo' to 'mongov1' to match C# microservices
773
- if (storageType === 'mongo') {
774
- storageType = 'mongov1';
775
- }
776
- dependenciesMap.set(id, { id, storageType });
777
- }
778
- });
779
- }
780
- // Add discovered dependencies (will overwrite existing if same id)
781
- (discoveredDependencies || []).forEach(dep => {
782
- dependenciesMap.set(dep.id, dep);
783
- });
784
- const dependencies = Array.from(dependenciesMap.values());
785
-
786
- let updated = false;
787
- const mappedServices = serviceReferences.map((reference) => {
788
- const name = reference.serviceName?.Value ?? reference.serviceName;
789
- if (name === serviceId) {
790
- updated = true;
791
- return {
792
- serviceName: serviceId,
793
- enabled: true,
794
- templateId: reference.templateId?.Value ?? reference.templateId ?? 'small',
795
- containerHealthCheckPort: reference.containerHealthCheckPort?.Value ?? reference.containerHealthCheckPort ?? 6565,
796
- imageId: shortImageId,
797
- imageCpuArch: reference.imageCpuArch?.Value ?? reference.imageCpuArch ?? 'linux/amd64',
798
- logProvider: reference.logProvider?.Value ?? reference.logProvider ?? 'Clickhouse',
799
- dependencies,
800
- components,
801
- };
802
- }
803
- return {
804
- serviceName: name,
805
- enabled: reference.enabled?.Value ?? reference.enabled ?? true,
806
- templateId: reference.templateId?.Value ?? reference.templateId ?? 'small',
807
- containerHealthCheckPort: reference.containerHealthCheckPort?.Value ?? reference.containerHealthCheckPort ?? 6565,
808
- imageId: reference.imageId?.Value ?? reference.imageId ?? shortImageId,
809
- imageCpuArch: reference.imageCpuArch?.Value ?? reference.imageCpuArch ?? 'linux/amd64',
810
- logProvider: reference.logProvider?.Value ?? reference.logProvider ?? 'Clickhouse',
811
- dependencies: reference.dependencies?.Value ?? reference.dependencies ?? [],
812
- components: reference.components?.Value ?? reference.components ?? [],
813
- };
814
- });
815
-
816
- if (!updated) {
817
- mappedServices.push({
818
- serviceName: serviceId,
819
- enabled: true,
820
- templateId: 'small',
821
- containerHealthCheckPort: 6565,
822
- imageId: shortImageId,
823
- imageCpuArch: 'linux/amd64',
824
- logProvider: 'Clickhouse',
825
- dependencies,
826
- components,
827
- });
828
- }
829
-
830
- const requestBody = {
831
- autoDeploy: true,
832
- comments: comments ?? '',
833
- manifest: mappedServices,
834
- storageReferences,
835
- };
836
-
837
- const response = await fetch(new URL('/api/beamo/manifests', apiHost), {
838
- method: 'POST',
839
- headers: {
840
- Authorization: `Bearer ${token}`,
841
- Accept: 'application/json',
842
- 'Content-Type': 'application/json',
843
- 'X-BEAM-SCOPE': `${cid}.${pid}`,
844
- },
845
- body: JSON.stringify(requestBody),
846
- });
847
-
848
- if (!response.ok) {
849
- const text = await response.text();
850
- throw new Error(`Failed to publish manifest: ${response.status} ${text}`);
851
- }
852
- }
853
-
854
- async function prepareDockerContext({ entry, distDir, openapiPath, packageJson, packageLock, nodeVersion }) {
855
- const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'beam-node-ms-'));
856
- const contextDir = path.join(tempRoot, 'context');
857
- const appDir = path.join(contextDir, 'app');
858
- await fs.mkdir(appDir, { recursive: true });
859
-
860
- // Read and modify package.json to handle file: dependencies
861
- const pkg = JSON.parse(await fs.readFile(packageJson, 'utf8'));
862
- const modifiedPkg = { ...pkg };
863
-
864
- // No need to handle file: dependencies anymore - the runtime is published to npm
865
- // Just copy the package.json as-is
866
- await fs.copyFile(packageJson, path.join(appDir, 'package.json'));
867
-
868
- try {
869
- await fs.copyFile(packageLock, path.join(appDir, 'package-lock.json'));
870
- } catch {
871
- // ignore missing package-lock
872
- }
873
-
874
- await copyDirectory(distDir, path.join(appDir, 'dist'));
875
- try {
876
- await fs.copyFile(openapiPath, path.join(appDir, 'beam_openApi.json'));
877
- } catch {
878
- await fs.writeFile(path.join(appDir, 'beam_openApi.json'), '{}\n');
879
- }
880
-
881
- const dockerfile = `# syntax=docker/dockerfile:1
882
- ARG NODE_VERSION=${nodeVersion}
883
- FROM node:${nodeVersion}-alpine
884
-
885
- WORKDIR /beam/service
886
-
887
- COPY app/package*.json ./
888
- # Install dependencies (runtime is now on npm, so no special handling needed)
889
- RUN npm install --omit=dev && npm cache clean --force
890
-
891
- COPY app/dist ./dist
892
- COPY app/beam_openApi.json ./beam_openApi.json
893
-
894
- # Expose health check port (matches C# microservice behavior)
895
- EXPOSE 6565
896
-
897
- ENV NODE_ENV=production
898
-
899
- # Add startup script to log what's happening and catch errors
900
- RUN echo '#!/bin/sh' > /beam/service/start.sh && \\
901
- echo 'echo "Starting Node.js microservice..."' >> /beam/service/start.sh && \\
902
- echo 'echo "Working directory: $(pwd)"' >> /beam/service/start.sh && \\
903
- echo 'echo "Node version: $(node --version)"' >> /beam/service/start.sh && \\
904
- echo 'echo "Files in dist:"' >> /beam/service/start.sh && \\
905
- echo 'ls -la dist/ || echo "dist directory not found!"' >> /beam/service/start.sh && \\
906
- echo 'echo "Starting main.js..."' >> /beam/service/start.sh && \\
907
- echo 'exec node dist/main.js' >> /beam/service/start.sh && \\
908
- chmod +x /beam/service/start.sh
909
-
910
- # Use ENTRYPOINT with startup script to ensure we see what's happening
911
- ENTRYPOINT ["/beam/service/start.sh"]
912
-
913
- # Debug option: uncomment the line below and comment the ENTRYPOINT above
914
- # to keep the container alive for debugging (like C# Dockerfile does)
915
- # ENTRYPOINT ["tail", "-f", "/dev/null"]
916
- `;
917
- await fs.writeFile(path.join(contextDir, 'Dockerfile'), dockerfile, 'utf8');
918
-
919
- return { tempRoot, contextDir };
920
- }
921
-
922
- async function main() {
923
- const args = parseArgs(process.argv.slice(2));
924
-
925
- if (args.envFile) {
926
- dotenv.config({ path: path.resolve(args.envFile) });
927
- }
928
-
929
- const pkg = await readJson(path.resolve('package.json'));
930
- const beamableConfig = pkg.beamable || {};
931
-
932
- const serviceId = args.service || beamableConfig.beamoId || pkg.name;
933
- ensure(serviceId, 'Service identifier is required. Provide --service or set beamable.beamoId in package.json.');
934
-
935
- const cid = ensure(args.cid || beamableConfig.cid || process.env.CID, 'CID is required (set CID env var or --cid).');
936
- const pid = ensure(args.pid || beamableConfig.pid || process.env.PID, 'PID is required (set PID env var or --pid).');
937
- const host = args.host || beamableConfig.host || process.env.HOST || 'wss://api.beamable.com/socket';
938
- const apiHost = normalizeApiHost(args.apiHost || beamableConfig.apiHost || process.env.BEAMABLE_API_HOST || host);
939
- const token = ensure(args.token || process.env.ACCESS_TOKEN || process.env.BEAMABLE_TOKEN, 'Access token is required (set BEAMABLE_TOKEN env var or --token).');
940
-
941
- const configuredGamePid = args.gamePid || beamableConfig.gamePid || process.env.BEAMABLE_GAME_PID;
942
- const refreshToken = args.refreshToken || process.env.BEAMABLE_REFRESH_TOKEN || process.env.REFRESH_TOKEN;
943
-
944
- if (!apiHost) {
945
- throw new Error('API host could not be determined. Set BEAMABLE_API_HOST or provide --api-host.');
946
- }
947
-
948
- // Initialize progress bar (8 main steps)
949
- const progress = new ProgressBar(8);
950
- console.log(`${colors.bright}${colors.cyan}Publishing ${serviceId}...${colors.reset}\n`);
951
-
952
- // Step 1: Build
953
- progress.start('Building project');
954
- if (!args.skipValidate) {
955
- const validateScript = path.resolve(__dirname, 'validate-service.mjs');
956
- const validateArgs = ['--entry', args.entry, '--output', args.openapi, '--cid', cid, '--pid', pid, '--host', host];
957
- if (args.envFile) {
958
- validateArgs.push('--env-file', args.envFile);
959
- }
960
- if (args.namePrefix) {
961
- validateArgs.push('--routing-key', args.namePrefix);
962
- }
963
- validateArgs.push('--skip-build');
964
- await runCommand('npm', ['run', 'build'], { silent: true });
965
- await runCommand(process.execPath, [validateScript, ...validateArgs], { shell: false, silent: true });
966
- } else {
967
- await runCommand('npm', ['run', 'build'], { silent: true });
968
- }
969
- progress.complete('Build complete');
970
-
971
- const packageJsonPath = path.resolve('package.json');
972
- const packageLockPath = path.resolve('package-lock.json');
973
- const distDir = path.resolve('dist');
974
- const openapiPath = path.resolve(args.openapi);
975
- const entryFile = path.resolve(args.entry);
976
-
977
- await fs.access(entryFile);
978
- await fs.access(distDir);
979
-
980
- let tempRoot;
981
- try {
982
- // Step 2: Prepare Docker context
983
- progress.start('Preparing Docker context');
984
- const context = await prepareDockerContext({
985
- entry: entryFile,
986
- distDir,
987
- openapiPath,
988
- packageJson: packageJsonPath,
989
- packageLock: packageLockPath,
990
- nodeVersion: args.nodeVersion,
991
- });
992
- tempRoot = context.tempRoot;
993
- const { contextDir } = context;
994
- progress.complete('Docker context prepared');
995
-
996
- // Step 3: Build Docker image
997
- progress.start('Building Docker image');
998
- const dockerTag = args.dockerTag || `${serviceId.toLowerCase().replace(/[^a-z0-9-_]/g, '-')}:${Date.now()}`;
999
- await runCommand('docker', ['build', '-t', dockerTag, contextDir], { cwd: contextDir, silent: true });
1000
- progress.complete('Docker image built');
1001
-
1002
- // Step 4: Extract image ID and save
1003
- progress.start('Preparing image for upload');
1004
- const inspect = await runCommand('docker', ['image', 'inspect', '--format', '{{.Id}}', dockerTag], { capture: true });
1005
- const fullImageId = inspect.stdout.trim();
1006
- const imageTarPath = path.join(tempRoot, `${serviceId.replace(/[^a-z0-9-_]/gi, '_')}.tar`);
1007
- await runCommand('docker', ['image', 'save', dockerTag, '-o', imageTarPath], { silent: true });
1008
- progress.complete('Image prepared');
1009
-
1010
- // Step 5: Authenticate and get registry
1011
- progress.start('Authenticating');
1012
- const resolvedGamePid = await resolveGamePid(apiHost, token, cid, pid, configuredGamePid);
1013
-
1014
- // Verify token is valid (401 means invalid token, 403 might just be permission issue)
1015
- try {
1016
- const testUrl = new URL('/basic/accounts/me', apiHost);
1017
- const testResponse = await fetch(testUrl, {
1018
- headers: {
1019
- Authorization: `Bearer ${token}`,
1020
- Accept: 'application/json',
1021
- 'X-BEAM-SCOPE': `${cid}.${pid}`,
1022
- },
1023
- });
1024
- if (testResponse.status === 401) {
1025
- throw new Error(`Token validation failed: ${testResponse.status} ${await testResponse.text()}`);
1026
- }
1027
- } catch (error) {
1028
- if (error.message.includes('401')) {
1029
- throw new Error(`Token validation failed: ${error.message}. Please run "beamo-node login" again.`);
1030
- }
1031
- }
1032
-
1033
- const registryUrl = await getRegistryUrl(apiHost, token, cid, resolvedGamePid);
1034
- const uniqueName = md5Hex(`${cid}_${resolvedGamePid}_${serviceId}`).substring(0, 30);
1035
- progress.complete('Authenticated');
1036
-
1037
- // Step 6: Upload Docker image
1038
- progress.start('Uploading Docker image to registry');
1039
- await uploadDockerImage({
1040
- apiHost,
1041
- registryUrl,
1042
- cid,
1043
- pid,
1044
- gamePid: resolvedGamePid,
1045
- token,
1046
- serviceId,
1047
- uniqueName,
1048
- imageTarPath,
1049
- fullImageId,
1050
- progress,
1051
- });
1052
- progress.complete('Image uploaded');
1053
-
1054
- // Step 7: Discover storage, components, and dependencies
1055
- progress.start('Discovering storage objects and components');
1056
- const shortImageId = shortDigest(fullImageId);
1057
- const existingManifest = await fetchCurrentManifest(apiHost, token, cid, pid);
1058
- const discoveredStorage = await discoverStorageObjects('src');
1059
- const discoveredComponents = await discoverFederationComponents('src');
1060
- // Dependencies are ServiceDependencyReference objects with id and storageType
1061
- // storageType should be "mongov1" for MongoDB storage objects (matching C# microservices)
1062
- const discoveredDependencies = discoveredStorage.map(s => ({
1063
- id: s.id,
1064
- storageType: 'mongov1', // MongoDB storage type (matches ServiceStorageReference in backend)
1065
- }));
1066
- progress.complete('Storage and components discovered');
1067
-
1068
- // Step 8: Publish manifest
1069
- progress.start('Publishing manifest');
1070
- await updateManifest({
1071
- apiHost,
1072
- token,
1073
- cid,
1074
- pid,
1075
- serviceId,
1076
- shortImageId,
1077
- comments: args.comments,
1078
- existingManifest,
1079
- discoveredStorage,
1080
- discoveredComponents,
1081
- discoveredDependencies,
1082
- });
1083
- progress.complete('Manifest published');
1084
-
1085
- // Success message
1086
- console.log(`\n${colors.green}${colors.bright}✓ Publish complete!${colors.reset}`);
1087
- console.log(`${colors.dim} Service:${colors.reset} ${serviceId}`);
1088
- console.log(`${colors.dim} Image ID:${colors.reset} ${fullImageId}`);
1089
- console.log(`${colors.dim} Registry:${colors.reset} ${registryUrl}${uniqueName}`);
1090
- } finally {
1091
- if (tempRoot) {
1092
- await fs.rm(tempRoot, { recursive: true, force: true });
1093
- }
1094
- }
1095
- }
1096
-
1097
- main().catch(async (error) => {
1098
- // Show clean error message
1099
- console.error(`\n${colors.red}${colors.bright}✗ Publish failed${colors.reset}`);
1100
- if (error instanceof Error) {
1101
- console.error(`${colors.red}${error.message}${colors.reset}`);
1102
- if (process.env.BEAMO_DEBUG === '1' || process.env.BEAMO_NODE_DEBUG === '1') {
1103
- console.error(`\n${colors.dim}Stack:${colors.reset} ${error.stack}`);
1104
- if (error.cause) {
1105
- console.error(`${colors.dim}Cause:${colors.reset} ${error.cause}`);
1106
- }
1107
- if (error.stdout) {
1108
- console.error(`${colors.dim}stdout:${colors.reset} ${error.stdout}`);
1109
- }
1110
- if (error.stderr) {
1111
- console.error(`${colors.dim}stderr:${colors.reset} ${error.stderr}`);
1112
- }
1113
- console.error(`\n${colors.dim}Full error:${colors.reset}`, JSON.stringify(error, Object.getOwnPropertyNames(error), 2));
1114
- }
1115
- } else {
1116
- console.error(`${colors.red}${error}${colors.reset}`);
1117
- }
1118
- process.exit(1);
1119
- });