@series-inc/stowkit-cli 0.6.17 → 0.6.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/app/disk-project.d.ts +24 -1
- package/dist/app/disk-project.js +7 -0
- package/dist/app/process-cache.js +1 -1
- package/dist/app/state.d.ts +2 -1
- package/dist/app/state.js +2 -1
- package/dist/app/stowmeta-io.d.ts +6 -3
- package/dist/app/stowmeta-io.js +62 -21
- package/dist/app/thumbnail-cache.d.ts +29 -0
- package/dist/app/thumbnail-cache.js +137 -0
- package/dist/assets-package.d.ts +64 -0
- package/dist/assets-package.js +80 -0
- package/dist/cleanup.js +11 -2
- package/dist/cli.js +47 -0
- package/dist/core/constants.d.ts +4 -2
- package/dist/core/constants.js +4 -2
- package/dist/core/types.d.ts +5 -0
- package/dist/core/types.js +5 -0
- package/dist/encoders/basis-encoder.js +2 -1
- package/dist/format/metadata.js +12 -7
- package/dist/gcs.d.ts +10 -0
- package/dist/gcs.js +158 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +5 -0
- package/dist/node-fs.d.ts +4 -0
- package/dist/node-fs.js +14 -0
- package/dist/orchestrator.js +37 -10
- package/dist/pipeline.js +1 -0
- package/dist/publish.d.ts +27 -0
- package/dist/publish.js +399 -0
- package/dist/server.js +567 -20
- package/dist/store.d.ts +48 -0
- package/dist/store.js +300 -0
- package/package.json +2 -2
- package/skill.md +63 -0
package/dist/cli.js
CHANGED
|
@@ -10,6 +10,8 @@ import { cleanupProject } from './cleanup.js';
|
|
|
10
10
|
import { createMaterial } from './create-material.js';
|
|
11
11
|
import { renameAsset, moveAsset, deleteAsset, setStringId } from './asset-commands.js';
|
|
12
12
|
import { inspectPack } from './inspect.js';
|
|
13
|
+
import { publishPackage } from './publish.js';
|
|
14
|
+
import { storeSearch, storeList, storeInfo } from './store.js';
|
|
13
15
|
const args = process.argv.slice(2);
|
|
14
16
|
const thisDir = path.dirname(fileURLToPath(import.meta.url));
|
|
15
17
|
const STOWKIT_PACKAGES = [
|
|
@@ -89,6 +91,10 @@ Usage:
|
|
|
89
91
|
stowkit delete <path> Delete an asset and its sidecar files
|
|
90
92
|
stowkit set-id <path> <id> Change an asset's stringId
|
|
91
93
|
stowkit inspect <file.stow> Show manifest of a built .stow pack
|
|
94
|
+
stowkit publish [dir] Publish asset packs to GCS bucket
|
|
95
|
+
stowkit store search <query> Search the asset store
|
|
96
|
+
stowkit store list List all packages in the store
|
|
97
|
+
stowkit store info <package> Show package details and assets
|
|
92
98
|
stowkit update Update CLI to latest version and refresh skill files
|
|
93
99
|
stowkit version Show installed version
|
|
94
100
|
stowkit packer [dir] Open the packer GUI
|
|
@@ -100,6 +106,10 @@ Options:
|
|
|
100
106
|
--verbose Detailed output
|
|
101
107
|
--port Server port (default 3210)
|
|
102
108
|
--schema Material schema template: pbr (default), unlit, or custom name
|
|
109
|
+
--bucket GCS bucket for publish/store (overrides default)
|
|
110
|
+
--dry-run Show what would be published without uploading
|
|
111
|
+
--json Output store results as JSON (for AI agents)
|
|
112
|
+
--type Filter store search by asset type
|
|
103
113
|
--help Show this help message
|
|
104
114
|
`.trim());
|
|
105
115
|
}
|
|
@@ -151,6 +161,12 @@ async function main() {
|
|
|
151
161
|
const verbose = args.includes('--verbose') || args.includes('-v');
|
|
152
162
|
const portIdx = args.indexOf('--port');
|
|
153
163
|
const port = portIdx >= 0 ? parseInt(args[portIdx + 1]) : 3210;
|
|
164
|
+
const dryRun = args.includes('--dry-run');
|
|
165
|
+
const jsonOutput = args.includes('--json');
|
|
166
|
+
const bucketIdx = args.indexOf('--bucket');
|
|
167
|
+
const bucket = bucketIdx >= 0 ? args[bucketIdx + 1] : undefined;
|
|
168
|
+
const typeIdx = args.indexOf('--type');
|
|
169
|
+
const typeFilter = typeIdx >= 0 ? args[typeIdx + 1] : undefined;
|
|
154
170
|
const opts = { force, verbose };
|
|
155
171
|
try {
|
|
156
172
|
switch (command) {
|
|
@@ -280,6 +296,37 @@ async function main() {
|
|
|
280
296
|
await inspectPack(stowPath, { verbose });
|
|
281
297
|
break;
|
|
282
298
|
}
|
|
299
|
+
case 'publish':
|
|
300
|
+
await publishPackage(projectDir, { force, dryRun, bucket, verbose });
|
|
301
|
+
break;
|
|
302
|
+
case 'store': {
|
|
303
|
+
const subCmd = args[1];
|
|
304
|
+
const storeOpts = { json: jsonOutput, bucket };
|
|
305
|
+
if (subCmd === 'search') {
|
|
306
|
+
const query = args.filter(a => !a.startsWith('-') && a !== 'store' && a !== 'search').join(' ');
|
|
307
|
+
if (!query) {
|
|
308
|
+
console.error('Usage: stowkit store search <query> [--type <type>] [--json]');
|
|
309
|
+
process.exit(1);
|
|
310
|
+
}
|
|
311
|
+
await storeSearch(query, { ...storeOpts, type: typeFilter });
|
|
312
|
+
}
|
|
313
|
+
else if (subCmd === 'list') {
|
|
314
|
+
await storeList(storeOpts);
|
|
315
|
+
}
|
|
316
|
+
else if (subCmd === 'info') {
|
|
317
|
+
const pkgName = args.find(a => !a.startsWith('-') && a !== 'store' && a !== 'info');
|
|
318
|
+
if (!pkgName) {
|
|
319
|
+
console.error('Usage: stowkit store info <package-name> [--json]');
|
|
320
|
+
process.exit(1);
|
|
321
|
+
}
|
|
322
|
+
await storeInfo(pkgName, storeOpts);
|
|
323
|
+
}
|
|
324
|
+
else {
|
|
325
|
+
console.error('Usage: stowkit store <search|list|info> [args]');
|
|
326
|
+
process.exit(1);
|
|
327
|
+
}
|
|
328
|
+
break;
|
|
329
|
+
}
|
|
283
330
|
case 'packer': {
|
|
284
331
|
if (await isStowKitRunning(port)) {
|
|
285
332
|
console.log(`\n Packer already running: http://localhost:${port}\n`);
|
package/dist/core/constants.d.ts
CHANGED
|
@@ -12,8 +12,10 @@ export declare const DATA_ALIGNMENT = 16;
|
|
|
12
12
|
export declare const MAX_PATH_LENGTH = 512;
|
|
13
13
|
/** Size of the string_id field in metadata structs (bytes) */
|
|
14
14
|
export declare const STRING_ID_SIZE = 128;
|
|
15
|
-
/** Size of TextureMetadata on disk (bytes) */
|
|
16
|
-
export declare const TEXTURE_METADATA_SIZE =
|
|
15
|
+
/** Size of TextureMetadata on disk (bytes) — v1.1: appended filtering uint32 */
|
|
16
|
+
export declare const TEXTURE_METADATA_SIZE = 148;
|
|
17
|
+
/** Size of legacy TextureMetadata without filtering field (bytes) */
|
|
18
|
+
export declare const LEGACY_TEXTURE_METADATA_SIZE = 144;
|
|
17
19
|
/** Size of AudioMetadata on disk (bytes) */
|
|
18
20
|
export declare const AUDIO_METADATA_SIZE = 140;
|
|
19
21
|
/** Size of MeshGeometryInfo on disk (bytes) */
|
package/dist/core/constants.js
CHANGED
|
@@ -12,8 +12,10 @@ export const DATA_ALIGNMENT = 16;
|
|
|
12
12
|
export const MAX_PATH_LENGTH = 512;
|
|
13
13
|
/** Size of the string_id field in metadata structs (bytes) */
|
|
14
14
|
export const STRING_ID_SIZE = 128;
|
|
15
|
-
/** Size of TextureMetadata on disk (bytes) */
|
|
16
|
-
export const TEXTURE_METADATA_SIZE =
|
|
15
|
+
/** Size of TextureMetadata on disk (bytes) — v1.1: appended filtering uint32 */
|
|
16
|
+
export const TEXTURE_METADATA_SIZE = 148;
|
|
17
|
+
/** Size of legacy TextureMetadata without filtering field (bytes) */
|
|
18
|
+
export const LEGACY_TEXTURE_METADATA_SIZE = 144;
|
|
17
19
|
/** Size of AudioMetadata on disk (bytes) */
|
|
18
20
|
export const AUDIO_METADATA_SIZE = 140;
|
|
19
21
|
/** Size of MeshGeometryInfo on disk (bytes) */
|
package/dist/core/types.d.ts
CHANGED
|
@@ -26,6 +26,10 @@ export declare enum TextureResize {
|
|
|
26
26
|
Quarter = 2,
|
|
27
27
|
Eighth = 3
|
|
28
28
|
}
|
|
29
|
+
export declare enum TextureFilterMode {
|
|
30
|
+
Linear = 0,
|
|
31
|
+
Nearest = 1
|
|
32
|
+
}
|
|
29
33
|
export declare enum MaterialFieldType {
|
|
30
34
|
Texture = 0,
|
|
31
35
|
Color = 1,
|
|
@@ -86,6 +90,7 @@ export interface TextureMetadata {
|
|
|
86
90
|
channels: number;
|
|
87
91
|
channelFormat: TextureChannelFormat;
|
|
88
92
|
stringId: string;
|
|
93
|
+
filtering: TextureFilterMode;
|
|
89
94
|
}
|
|
90
95
|
export interface AudioMetadata {
|
|
91
96
|
stringId: string;
|
package/dist/core/types.js
CHANGED
|
@@ -32,6 +32,11 @@ export var TextureResize;
|
|
|
32
32
|
TextureResize[TextureResize["Quarter"] = 2] = "Quarter";
|
|
33
33
|
TextureResize[TextureResize["Eighth"] = 3] = "Eighth";
|
|
34
34
|
})(TextureResize || (TextureResize = {}));
|
|
35
|
+
export var TextureFilterMode;
|
|
36
|
+
(function (TextureFilterMode) {
|
|
37
|
+
TextureFilterMode[TextureFilterMode["Linear"] = 0] = "Linear";
|
|
38
|
+
TextureFilterMode[TextureFilterMode["Nearest"] = 1] = "Nearest";
|
|
39
|
+
})(TextureFilterMode || (TextureFilterMode = {}));
|
|
35
40
|
// ─── Material Enums ─────────────────────────────────────────────────────────
|
|
36
41
|
export var MaterialFieldType;
|
|
37
42
|
(function (MaterialFieldType) {
|
|
@@ -2,7 +2,7 @@ import * as fs from 'node:fs';
|
|
|
2
2
|
import * as path from 'node:path';
|
|
3
3
|
import { fileURLToPath } from 'node:url';
|
|
4
4
|
import { createRequire } from 'node:module';
|
|
5
|
-
import { KTX2Quality, TextureChannelFormat } from '../core/types.js';
|
|
5
|
+
import { KTX2Quality, TextureChannelFormat, TextureFilterMode } from '../core/types.js';
|
|
6
6
|
const QUALITY_TO_LEVEL = {
|
|
7
7
|
[KTX2Quality.Fastest]: 1,
|
|
8
8
|
[KTX2Quality.Fast]: 64,
|
|
@@ -106,6 +106,7 @@ export class NodeBasisEncoder {
|
|
|
106
106
|
channels: useUastc ? 4 : 3,
|
|
107
107
|
channelFormat: useUastc ? TextureChannelFormat.RGBA : TextureChannelFormat.RGB,
|
|
108
108
|
stringId: '',
|
|
109
|
+
filtering: TextureFilterMode.Linear,
|
|
109
110
|
};
|
|
110
111
|
return { data: ktx2Data, metadata };
|
|
111
112
|
}
|
package/dist/format/metadata.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { BinaryReader, BinaryWriter } from '../core/binary.js';
|
|
2
2
|
import { TEXTURE_METADATA_SIZE, AUDIO_METADATA_SIZE, STRING_ID_SIZE, MESH_GEOMETRY_INFO_SIZE, SCENE_NODE_SIZE, MATERIAL_DATA_FIXED_SIZE, MATERIAL_PROPERTY_VALUE_SIZE, MESH_METADATA_FIXED_SIZE, NODE_NAME_SIZE, MATERIAL_NAME_SIZE, MATERIAL_SCHEMA_ID_SIZE, MATERIAL_FIELD_NAME_SIZE, MATERIAL_SCHEMA_NAME_SIZE, MATERIAL_SCHEMA_DEFAULT_TEXTURE_ID_SIZE, MATERIAL_SCHEMA_METADATA_FIXED_SIZE, MATERIAL_SCHEMA_FIELD_SIZE, BONE_NAME_SIZE, SKINNED_MESH_GEOMETRY_INFO_SIZE, SKINNED_MESH_METADATA_FIXED_SIZE, BONE_SIZE, ANIMATION_TRACK_DESCRIPTOR_SIZE, ANIMATION_CLIP_METADATA_FIXED_SIZE, TRACK_NAME_SIZE, ANIMATION_METADATA_VERSION, } from '../core/constants.js';
|
|
3
|
+
import { TextureFilterMode } from '../core/types.js';
|
|
3
4
|
// ─── Texture Metadata ───────────────────────────────────────────────────────
|
|
4
5
|
export function serializeTextureMetadata(meta) {
|
|
5
6
|
const w = new BinaryWriter(TEXTURE_METADATA_SIZE);
|
|
@@ -8,17 +9,21 @@ export function serializeTextureMetadata(meta) {
|
|
|
8
9
|
w.writeUint32(meta.channels);
|
|
9
10
|
w.writeUint32(meta.channelFormat);
|
|
10
11
|
w.writeFixedString(meta.stringId, STRING_ID_SIZE);
|
|
12
|
+
w.writeUint32(meta.filtering);
|
|
11
13
|
return w.getUint8Array();
|
|
12
14
|
}
|
|
13
15
|
export function deserializeTextureMetadata(data) {
|
|
14
16
|
const r = new BinaryReader(data);
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
17
|
+
const width = r.readUint32();
|
|
18
|
+
const height = r.readUint32();
|
|
19
|
+
const channels = r.readUint32();
|
|
20
|
+
const channelFormat = r.readUint32();
|
|
21
|
+
const stringId = r.readFixedString(STRING_ID_SIZE);
|
|
22
|
+
// Old packs have 144-byte texture metadata (no filtering field)
|
|
23
|
+
const filtering = data.length >= TEXTURE_METADATA_SIZE
|
|
24
|
+
? r.readUint32()
|
|
25
|
+
: TextureFilterMode.Linear;
|
|
26
|
+
return { width, height, channels, channelFormat, stringId, filtering };
|
|
22
27
|
}
|
|
23
28
|
// ─── Audio Metadata ─────────────────────────────────────────────────────────
|
|
24
29
|
export function serializeAudioMetadata(meta) {
|
package/dist/gcs.d.ts
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export interface GCSClient {
|
|
2
|
+
upload(objectPath: string, data: Uint8Array | string, contentType?: string): Promise<void>;
|
|
3
|
+
download(objectPath: string): Promise<string | null>;
|
|
4
|
+
downloadWithGeneration(objectPath: string): Promise<{
|
|
5
|
+
data: string;
|
|
6
|
+
generation: string;
|
|
7
|
+
} | null>;
|
|
8
|
+
uploadWithGeneration(objectPath: string, data: string, generation: string | null, contentType?: string): Promise<void>;
|
|
9
|
+
}
|
|
10
|
+
export declare function createGCSClient(projectDir: string, bucketUri: string): Promise<GCSClient>;
|
package/dist/gcs.js
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
import * as fs from 'node:fs/promises';
|
|
2
|
+
import * as path from 'node:path';
|
|
3
|
+
import * as crypto from 'node:crypto';
|
|
4
|
+
// ─── JWT Auth ────────────────────────────────────────────────────────────────
|
|
5
|
+
function base64url(data) {
|
|
6
|
+
const buf = typeof data === 'string' ? Buffer.from(data) : data;
|
|
7
|
+
return buf.toString('base64url');
|
|
8
|
+
}
|
|
9
|
+
function createJWT(sa) {
|
|
10
|
+
const now = Math.floor(Date.now() / 1000);
|
|
11
|
+
const header = { alg: 'RS256', typ: 'JWT' };
|
|
12
|
+
const payload = {
|
|
13
|
+
iss: sa.client_email,
|
|
14
|
+
scope: 'https://www.googleapis.com/auth/devstorage.read_write',
|
|
15
|
+
aud: 'https://oauth2.googleapis.com/token',
|
|
16
|
+
iat: now,
|
|
17
|
+
exp: now + 3600,
|
|
18
|
+
};
|
|
19
|
+
const segments = `${base64url(JSON.stringify(header))}.${base64url(JSON.stringify(payload))}`;
|
|
20
|
+
const sign = crypto.createSign('RSA-SHA256');
|
|
21
|
+
sign.update(segments);
|
|
22
|
+
const signature = sign.sign(sa.private_key);
|
|
23
|
+
return `${segments}.${base64url(signature)}`;
|
|
24
|
+
}
|
|
25
|
+
async function getAccessToken(sa) {
|
|
26
|
+
const jwt = createJWT(sa);
|
|
27
|
+
const res = await fetch('https://oauth2.googleapis.com/token', {
|
|
28
|
+
method: 'POST',
|
|
29
|
+
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
|
|
30
|
+
body: `grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer&assertion=${jwt}`,
|
|
31
|
+
});
|
|
32
|
+
if (!res.ok) {
|
|
33
|
+
const text = await res.text();
|
|
34
|
+
throw new Error(`GCS auth failed (${res.status}): ${text}`);
|
|
35
|
+
}
|
|
36
|
+
const data = await res.json();
|
|
37
|
+
return data.access_token;
|
|
38
|
+
}
|
|
39
|
+
// ─── Credential Resolution ───────────────────────────────────────────────────
|
|
40
|
+
async function loadServiceAccount(projectDir) {
|
|
41
|
+
// Search order: project dir, cwd, GOOGLE_APPLICATION_CREDENTIALS env
|
|
42
|
+
const candidates = [
|
|
43
|
+
path.join(projectDir, 'service_account.json'),
|
|
44
|
+
path.join(process.cwd(), 'service_account.json'),
|
|
45
|
+
];
|
|
46
|
+
for (const candidate of candidates) {
|
|
47
|
+
try {
|
|
48
|
+
const text = await fs.readFile(candidate, 'utf-8');
|
|
49
|
+
return JSON.parse(text);
|
|
50
|
+
}
|
|
51
|
+
catch { /* not found */ }
|
|
52
|
+
}
|
|
53
|
+
// Fall back to GOOGLE_APPLICATION_CREDENTIALS
|
|
54
|
+
const envPath = process.env.GOOGLE_APPLICATION_CREDENTIALS;
|
|
55
|
+
if (envPath) {
|
|
56
|
+
try {
|
|
57
|
+
const text = await fs.readFile(envPath, 'utf-8');
|
|
58
|
+
return JSON.parse(text);
|
|
59
|
+
}
|
|
60
|
+
catch {
|
|
61
|
+
throw new Error(`Could not read service account from GOOGLE_APPLICATION_CREDENTIALS: ${envPath}`);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
throw new Error('No GCS credentials found. Place service_account.json in project root, ' +
|
|
65
|
+
'current directory, or set GOOGLE_APPLICATION_CREDENTIALS environment variable.');
|
|
66
|
+
}
|
|
67
|
+
// ─── Bucket Name Parsing ─────────────────────────────────────────────────────
|
|
68
|
+
function parseBucket(bucketUri) {
|
|
69
|
+
// Accept "gs://bucket-name" or just "bucket-name"
|
|
70
|
+
if (bucketUri.startsWith('gs://'))
|
|
71
|
+
return bucketUri.slice(5).replace(/\/$/, '');
|
|
72
|
+
return bucketUri.replace(/\/$/, '');
|
|
73
|
+
}
|
|
74
|
+
// ─── GCS Client Factory ─────────────────────────────────────────────────────
|
|
75
|
+
export async function createGCSClient(projectDir, bucketUri) {
|
|
76
|
+
const sa = await loadServiceAccount(projectDir);
|
|
77
|
+
const token = await getAccessToken(sa);
|
|
78
|
+
const bucket = parseBucket(bucketUri);
|
|
79
|
+
const apiBase = `https://storage.googleapis.com`;
|
|
80
|
+
return {
|
|
81
|
+
async upload(objectPath, data, contentType = 'application/octet-stream') {
|
|
82
|
+
const encoded = encodeURIComponent(objectPath);
|
|
83
|
+
const url = `${apiBase}/upload/storage/v1/b/${bucket}/o?uploadType=media&name=${encoded}`;
|
|
84
|
+
const body = typeof data === 'string' ? data : Buffer.from(data);
|
|
85
|
+
const res = await fetch(url, {
|
|
86
|
+
method: 'POST',
|
|
87
|
+
headers: {
|
|
88
|
+
Authorization: `Bearer ${token}`,
|
|
89
|
+
'Content-Type': contentType,
|
|
90
|
+
},
|
|
91
|
+
body,
|
|
92
|
+
});
|
|
93
|
+
if (!res.ok) {
|
|
94
|
+
const text = await res.text();
|
|
95
|
+
throw new Error(`GCS upload failed for ${objectPath} (${res.status}): ${text}`);
|
|
96
|
+
}
|
|
97
|
+
},
|
|
98
|
+
async download(objectPath) {
|
|
99
|
+
const encoded = encodeURIComponent(objectPath);
|
|
100
|
+
const url = `${apiBase}/storage/v1/b/${bucket}/o/${encoded}?alt=media`;
|
|
101
|
+
const res = await fetch(url, {
|
|
102
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
103
|
+
});
|
|
104
|
+
if (res.status === 404)
|
|
105
|
+
return null;
|
|
106
|
+
if (!res.ok) {
|
|
107
|
+
const text = await res.text();
|
|
108
|
+
throw new Error(`GCS download failed for ${objectPath} (${res.status}): ${text}`);
|
|
109
|
+
}
|
|
110
|
+
return res.text();
|
|
111
|
+
},
|
|
112
|
+
async downloadWithGeneration(objectPath) {
|
|
113
|
+
const encoded = encodeURIComponent(objectPath);
|
|
114
|
+
const url = `${apiBase}/storage/v1/b/${bucket}/o/${encoded}?alt=media`;
|
|
115
|
+
const res = await fetch(url, {
|
|
116
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
117
|
+
});
|
|
118
|
+
if (res.status === 404)
|
|
119
|
+
return null;
|
|
120
|
+
if (!res.ok) {
|
|
121
|
+
const text = await res.text();
|
|
122
|
+
throw new Error(`GCS download failed for ${objectPath} (${res.status}): ${text}`);
|
|
123
|
+
}
|
|
124
|
+
const data = await res.text();
|
|
125
|
+
const generation = res.headers.get('x-goog-generation') ?? '0';
|
|
126
|
+
return { data, generation };
|
|
127
|
+
},
|
|
128
|
+
async uploadWithGeneration(objectPath, data, generation, contentType = 'application/json') {
|
|
129
|
+
const encoded = encodeURIComponent(objectPath);
|
|
130
|
+
let url = `${apiBase}/upload/storage/v1/b/${bucket}/o?uploadType=media&name=${encoded}`;
|
|
131
|
+
const headers = {
|
|
132
|
+
Authorization: `Bearer ${token}`,
|
|
133
|
+
'Content-Type': contentType,
|
|
134
|
+
};
|
|
135
|
+
// Optimistic concurrency: if we have a generation, require it to match
|
|
136
|
+
if (generation) {
|
|
137
|
+
headers['x-goog-if-generation-match'] = generation;
|
|
138
|
+
}
|
|
139
|
+
else {
|
|
140
|
+
// Object should not exist yet
|
|
141
|
+
headers['x-goog-if-generation-match'] = '0';
|
|
142
|
+
}
|
|
143
|
+
const res = await fetch(url, {
|
|
144
|
+
method: 'POST',
|
|
145
|
+
headers,
|
|
146
|
+
body: data,
|
|
147
|
+
});
|
|
148
|
+
if (res.status === 412) {
|
|
149
|
+
throw new Error(`Registry was modified by another publish while uploading. ` +
|
|
150
|
+
`Please retry the publish command.`);
|
|
151
|
+
}
|
|
152
|
+
if (!res.ok) {
|
|
153
|
+
const text = await res.text();
|
|
154
|
+
throw new Error(`GCS upload failed for ${objectPath} (${res.status}): ${text}`);
|
|
155
|
+
}
|
|
156
|
+
},
|
|
157
|
+
};
|
|
158
|
+
}
|
package/dist/index.d.ts
CHANGED
|
@@ -27,3 +27,8 @@ export type { ServerOptions } from './server.js';
|
|
|
27
27
|
export { initProject } from './init.js';
|
|
28
28
|
export { cleanupProject } from './cleanup.js';
|
|
29
29
|
export { syncRuntimeAssets } from './sync-runtime-assets.js';
|
|
30
|
+
export { publishPackage } from './publish.js';
|
|
31
|
+
export type { PublishOptions, PublishResult } from './publish.js';
|
|
32
|
+
export * from './assets-package.js';
|
|
33
|
+
export { fetchRegistry, searchAssets, listPackages, resolveAssetDeps } from './store.js';
|
|
34
|
+
export type { SearchResult, PackageInfo } from './store.js';
|
package/dist/index.js
CHANGED
|
@@ -35,3 +35,8 @@ export { initProject } from './init.js';
|
|
|
35
35
|
export { cleanupProject } from './cleanup.js';
|
|
36
36
|
// Runtime asset sync
|
|
37
37
|
export { syncRuntimeAssets } from './sync-runtime-assets.js';
|
|
38
|
+
// Publish
|
|
39
|
+
export { publishPackage } from './publish.js';
|
|
40
|
+
export * from './assets-package.js';
|
|
41
|
+
// Store
|
|
42
|
+
export { fetchRegistry, searchAssets, listPackages, resolveAssetDeps } from './store.js';
|
package/dist/node-fs.d.ts
CHANGED
|
@@ -6,6 +6,10 @@ export declare function renameFile(basePath: string, oldRelative: string, newRel
|
|
|
6
6
|
export declare function deleteFile(basePath: string, relativePath: string): Promise<void>;
|
|
7
7
|
export declare function fileExists(basePath: string, relativePath: string): Promise<boolean>;
|
|
8
8
|
export declare function getFileSnapshot(basePath: string, relativePath: string): Promise<FileSnapshot | null>;
|
|
9
|
+
export declare function probeImageDimensions(srcArtDir: string, relativePath: string): Promise<{
|
|
10
|
+
width: number;
|
|
11
|
+
height: number;
|
|
12
|
+
} | null>;
|
|
9
13
|
export interface ScanResult {
|
|
10
14
|
sourceFiles: FileSnapshot[];
|
|
11
15
|
metaFiles: FileSnapshot[];
|
package/dist/node-fs.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import * as fs from 'node:fs/promises';
|
|
2
2
|
import * as path from 'node:path';
|
|
3
|
+
import sharp from 'sharp';
|
|
3
4
|
// ─── File I/O ────────────────────────────────────────────────────────────────
|
|
4
5
|
export async function readFile(basePath, relativePath) {
|
|
5
6
|
try {
|
|
@@ -68,6 +69,19 @@ export async function getFileSnapshot(basePath, relativePath) {
|
|
|
68
69
|
return null;
|
|
69
70
|
}
|
|
70
71
|
}
|
|
72
|
+
export async function probeImageDimensions(srcArtDir, relativePath) {
|
|
73
|
+
try {
|
|
74
|
+
const absPath = path.join(srcArtDir, relativePath);
|
|
75
|
+
const metadata = await sharp(absPath).metadata();
|
|
76
|
+
if (metadata.width && metadata.height) {
|
|
77
|
+
return { width: metadata.width, height: metadata.height };
|
|
78
|
+
}
|
|
79
|
+
return null;
|
|
80
|
+
}
|
|
81
|
+
catch {
|
|
82
|
+
return null;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
71
85
|
// ─── Directory Scanning ──────────────────────────────────────────────────────
|
|
72
86
|
const SOURCE_EXTENSIONS = new Set([
|
|
73
87
|
'png', 'jpg', 'jpeg', 'bmp', 'tga', 'webp', 'gif',
|
package/dist/orchestrator.js
CHANGED
|
@@ -4,8 +4,8 @@ import { AssetType } from './core/types.js';
|
|
|
4
4
|
import { cleanupProject } from './cleanup.js';
|
|
5
5
|
import { defaultAssetSettings } from './app/state.js';
|
|
6
6
|
import { BlobStore } from './app/blob-store.js';
|
|
7
|
-
import { readProjectConfig, scanDirectory, readFile, getFileSnapshot, } from './node-fs.js';
|
|
8
|
-
import { readStowmeta, writeStowmeta, stowmetaToAssetSettings, generateDefaultStowmeta, glbChildToAssetSettings, generateDefaultGlbChild, } from './app/stowmeta-io.js';
|
|
7
|
+
import { readProjectConfig, scanDirectory, readFile, getFileSnapshot, probeImageDimensions, } from './node-fs.js';
|
|
8
|
+
import { detectAssetType, readStowmeta, writeStowmeta, stowmetaToAssetSettings, generateDefaultStowmeta, glbChildToAssetSettings, generateDefaultGlbChild, } from './app/stowmeta-io.js';
|
|
9
9
|
import { parseGlb, pbrToMaterialConfig } from './encoders/glb-loader.js';
|
|
10
10
|
import { readStowmat, stowmatToMaterialConfig } from './app/stowmat-io.js';
|
|
11
11
|
import { readCacheBlobs, writeCacheBlobs, buildCacheStamp, isCacheValid, } from './app/process-cache.js';
|
|
@@ -21,7 +21,11 @@ export async function scanProject(projectDir, opts) {
|
|
|
21
21
|
for (const file of scan.sourceFiles) {
|
|
22
22
|
if (!existingMeta.has(file.relativePath)) {
|
|
23
23
|
newFiles.push(file.relativePath);
|
|
24
|
-
const
|
|
24
|
+
const type = detectAssetType(file.relativePath);
|
|
25
|
+
const imageDimensions = type === AssetType.Texture2D
|
|
26
|
+
? await probeImageDimensions(config.srcArtDir, file.relativePath)
|
|
27
|
+
: null;
|
|
28
|
+
const meta = generateDefaultStowmeta(file.relativePath, type, config.config.defaults, imageDimensions);
|
|
25
29
|
await writeStowmeta(config.srcArtDir, file.relativePath, meta);
|
|
26
30
|
if (verbose)
|
|
27
31
|
console.log(` [new] ${file.relativePath} → .stowmeta (${meta.type})`);
|
|
@@ -60,7 +64,11 @@ export async function fullBuild(projectDir, opts) {
|
|
|
60
64
|
const id = file.relativePath;
|
|
61
65
|
let meta = await readStowmeta(config.srcArtDir, id);
|
|
62
66
|
if (!meta) {
|
|
63
|
-
|
|
67
|
+
const type = detectAssetType(id);
|
|
68
|
+
const imageDimensions = type === AssetType.Texture2D
|
|
69
|
+
? await probeImageDimensions(config.srcArtDir, id)
|
|
70
|
+
: null;
|
|
71
|
+
meta = generateDefaultStowmeta(id, type, config.config.defaults, imageDimensions);
|
|
64
72
|
await writeStowmeta(config.srcArtDir, id, meta);
|
|
65
73
|
}
|
|
66
74
|
const { type, settings } = stowmetaToAssetSettings(meta);
|
|
@@ -115,7 +123,7 @@ export async function fullBuild(projectDir, opts) {
|
|
|
115
123
|
const baseName = fileName.replace(/\.[^.]+$/, '');
|
|
116
124
|
let meta = await readStowmeta(config.srcArtDir, id);
|
|
117
125
|
if (!meta) {
|
|
118
|
-
meta = generateDefaultStowmeta(id, AssetType.MaterialSchema);
|
|
126
|
+
meta = generateDefaultStowmeta(id, AssetType.MaterialSchema, config.config.defaults);
|
|
119
127
|
await writeStowmeta(config.srcArtDir, id, meta);
|
|
120
128
|
}
|
|
121
129
|
const asset = {
|
|
@@ -147,12 +155,12 @@ export async function fullBuild(projectDir, opts) {
|
|
|
147
155
|
const existingChildren = new Map((containerMeta.children ?? []).map(c => [c.name, c]));
|
|
148
156
|
const childrenManifest = [];
|
|
149
157
|
for (const tex of extract.textures) {
|
|
150
|
-
childrenManifest.push(existingChildren.get(tex.name) ?? generateDefaultGlbChild(tex.name, 'texture'));
|
|
158
|
+
childrenManifest.push(existingChildren.get(tex.name) ?? generateDefaultGlbChild(tex.name, 'texture', config.config.defaults));
|
|
151
159
|
BlobStore.setSource(`${container.id}/${tex.name}`, tex.data);
|
|
152
160
|
}
|
|
153
161
|
for (const mesh of extract.meshes) {
|
|
154
162
|
const typeName = mesh.hasSkeleton ? 'skinnedMesh' : 'staticMesh';
|
|
155
|
-
const meshChild = existingChildren.get(mesh.name) ?? generateDefaultGlbChild(mesh.name, typeName);
|
|
163
|
+
const meshChild = existingChildren.get(mesh.name) ?? generateDefaultGlbChild(mesh.name, typeName, config.config.defaults);
|
|
156
164
|
// Store scene node names so AI agents can see the hierarchy in the stowmeta
|
|
157
165
|
if (mesh.imported.nodes.length > 1) {
|
|
158
166
|
meshChild.sceneNodeNames = mesh.imported.nodes.map(n => n.name);
|
|
@@ -169,7 +177,7 @@ export async function fullBuild(projectDir, opts) {
|
|
|
169
177
|
childrenManifest.push(existing);
|
|
170
178
|
}
|
|
171
179
|
else {
|
|
172
|
-
const child = generateDefaultGlbChild(matName, 'materialSchema');
|
|
180
|
+
const child = generateDefaultGlbChild(matName, 'materialSchema', config.config.defaults);
|
|
173
181
|
const matConfig = pbrToMaterialConfig(mat.pbrConfig, container.id);
|
|
174
182
|
child.materialConfig = {
|
|
175
183
|
schemaId: matConfig.schemaId,
|
|
@@ -189,7 +197,7 @@ export async function fullBuild(projectDir, opts) {
|
|
|
189
197
|
}
|
|
190
198
|
}
|
|
191
199
|
for (const anim of extract.animations) {
|
|
192
|
-
childrenManifest.push(existingChildren.get(anim.name) ?? generateDefaultGlbChild(anim.name, 'animationClip'));
|
|
200
|
+
childrenManifest.push(existingChildren.get(anim.name) ?? generateDefaultGlbChild(anim.name, 'animationClip', config.config.defaults));
|
|
193
201
|
}
|
|
194
202
|
// Update stowmeta with inline children
|
|
195
203
|
containerMeta.children = childrenManifest;
|
|
@@ -408,7 +416,26 @@ export async function fullBuild(projectDir, opts) {
|
|
|
408
416
|
}
|
|
409
417
|
// 4. Clean orphaned caches/metas
|
|
410
418
|
await cleanupProject(config.projectDir, { verbose });
|
|
411
|
-
// 5.
|
|
419
|
+
// 5. Validate unique stringIds
|
|
420
|
+
const idCounts = new Map();
|
|
421
|
+
for (const a of assets) {
|
|
422
|
+
if (a.status !== 'ready' || a.type === AssetType.GlbContainer || a.settings.excluded)
|
|
423
|
+
continue;
|
|
424
|
+
const files = idCounts.get(a.stringId) ?? [];
|
|
425
|
+
files.push(a.id);
|
|
426
|
+
idCounts.set(a.stringId, files);
|
|
427
|
+
}
|
|
428
|
+
const duplicates = [...idCounts.entries()].filter(([, files]) => files.length > 1);
|
|
429
|
+
if (duplicates.length > 0) {
|
|
430
|
+
console.error('Build failed: duplicate stringIds found. Each asset must have a unique stringId.');
|
|
431
|
+
for (const [id, files] of duplicates) {
|
|
432
|
+
console.error(` "${id}" used by:`);
|
|
433
|
+
for (const f of files)
|
|
434
|
+
console.error(` - ${f}`);
|
|
435
|
+
}
|
|
436
|
+
throw new Error(`Cannot build: ${duplicates.length} duplicate stringId(s) found.`);
|
|
437
|
+
}
|
|
438
|
+
// 6. Build packs
|
|
412
439
|
const packs = config.config.packs ?? [{ name: 'default' }];
|
|
413
440
|
const cdnDir = path.resolve(config.projectDir, config.config.cdnAssetsPath ?? 'public/cdn-assets');
|
|
414
441
|
await fs.mkdir(cdnDir, { recursive: true });
|
package/dist/pipeline.js
CHANGED
|
@@ -54,6 +54,7 @@ export async function processAsset(id, sourceData, type, stringId, settings, ctx
|
|
|
54
54
|
log('encoding KTX2...');
|
|
55
55
|
const result = await ctx.textureEncoder.encode(pixels, width, height, 4, settings.quality, TextureChannelFormat.RGBA, settings.generateMipmaps);
|
|
56
56
|
result.metadata.stringId = stringId;
|
|
57
|
+
result.metadata.filtering = settings.filtering;
|
|
57
58
|
BlobStore.setProcessed(id, result.data);
|
|
58
59
|
log(`KTX2 encoded (${(result.data.length / 1024).toFixed(0)} KB)`);
|
|
59
60
|
return { metadata: result.metadata, processedSize: result.data.length };
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
export interface PublishProgress {
|
|
2
|
+
phase: 'auth' | 'files' | 'thumbnails' | 'registry';
|
|
3
|
+
done: number;
|
|
4
|
+
total: number;
|
|
5
|
+
message: string;
|
|
6
|
+
}
|
|
7
|
+
export interface PublishOptions {
|
|
8
|
+
force?: boolean;
|
|
9
|
+
dryRun?: boolean;
|
|
10
|
+
bucket?: string;
|
|
11
|
+
verbose?: boolean;
|
|
12
|
+
/** Thumbnails keyed by stringId (from packer GUI) */
|
|
13
|
+
thumbnails?: Record<string, {
|
|
14
|
+
data: string;
|
|
15
|
+
format: 'png' | 'webp' | 'webm';
|
|
16
|
+
}>;
|
|
17
|
+
onProgress?: (progress: PublishProgress) => void;
|
|
18
|
+
}
|
|
19
|
+
export interface PublishResult {
|
|
20
|
+
ok: boolean;
|
|
21
|
+
packageName: string;
|
|
22
|
+
version: string;
|
|
23
|
+
assetCount: number;
|
|
24
|
+
fileCount: number;
|
|
25
|
+
thumbnailCount: number;
|
|
26
|
+
}
|
|
27
|
+
export declare function publishPackage(projectDir: string, opts?: PublishOptions): Promise<PublishResult>;
|