@tiangong-lca/mcp-server 0.0.28 → 0.0.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,6 +2,7 @@ import { createClient, FunctionRegion } from '@supabase/supabase-js';
2
2
  import { z } from 'zod';
3
3
  import { supabase_base_url, supabase_publishable_key } from '../_shared/config.js';
4
4
  import { resolveSupabaseAccessToken } from '../_shared/supabase_session.js';
5
+ import { prepareLifecycleModelFile } from './life_cycle_model_file_tools.js';
5
6
  const allowedTables = ['contacts', 'flows', 'lifecyclemodels', 'processes', 'sources'];
6
7
  const tableSchema = z.enum(allowedTables);
7
8
  const UPDATE_FUNCTION_NAME = 'update_data';
@@ -50,7 +51,7 @@ const toolParamsSchema = {
50
51
  jsonOrdered: z
51
52
  .unknown()
52
53
  .optional()
53
- .describe('JSON value persisted into json_ordered (required for insert/update; omit for select/delete).'),
54
+ .describe('JSON value persisted into json_ordered (required for insert/update; omit for select/delete). For lifecyclemodels, native files, platform bundles, raw records, or a single-item array of those are accepted; json_tg and rule_verification are derived automatically before write.'),
54
55
  };
55
56
  const refinedInputSchema = z
56
57
  .object(toolParamsSchema)
@@ -175,6 +176,51 @@ async function validateJsonOrdered(table, jsonOrdered) {
175
176
  throw error;
176
177
  }
177
178
  }
179
+ function sanitizeLifecycleModelRows(rows) {
180
+ return rows.map((row) => {
181
+ const record = row && typeof row === 'object' && !Array.isArray(row)
182
+ ? row
183
+ : {};
184
+ return {
185
+ id: record.id ?? null,
186
+ version: record.version ?? null,
187
+ json_ordered: record.json_ordered ?? null,
188
+ };
189
+ });
190
+ }
191
+ function sanitizeRowsForOutput(table, rows) {
192
+ return table === 'lifecyclemodels' ? sanitizeLifecycleModelRows(rows) : rows;
193
+ }
194
+ async function prepareWritePayload(table, jsonOrdered, inputId, inputVersion, bearerKey) {
195
+ if (table !== 'lifecyclemodels') {
196
+ await validateJsonOrdered(table, jsonOrdered);
197
+ return {
198
+ payload: {
199
+ json_ordered: jsonOrdered,
200
+ },
201
+ resolvedId: inputId,
202
+ resolvedVersion: inputVersion,
203
+ };
204
+ }
205
+ const prepared = await prepareLifecycleModelFile({
206
+ payload: jsonOrdered,
207
+ }, bearerKey);
208
+ if (inputId && inputId !== prepared.lifecycleModelId) {
209
+ throw new Error(`Provided id (${inputId}) does not match lifecycle model UUID (${prepared.lifecycleModelId}).`);
210
+ }
211
+ if (inputVersion && inputVersion !== prepared.lifecycleModelVersion) {
212
+ throw new Error(`Provided version (${inputVersion}) does not match lifecycle model version (${prepared.lifecycleModelVersion}).`);
213
+ }
214
+ return {
215
+ payload: {
216
+ json_ordered: prepared.jsonOrdered,
217
+ json_tg: prepared.jsonTg,
218
+ rule_verification: prepared.ruleVerification,
219
+ },
220
+ resolvedId: prepared.lifecycleModelId,
221
+ resolvedVersion: prepared.lifecycleModelVersion,
222
+ };
223
+ }
178
224
  async function createSupabaseClient(bearerKey) {
179
225
  const { session: normalizedSession, accessToken: bearerToken } = resolveSupabaseAccessToken(bearerKey);
180
226
  const supabase = createClient(supabase_base_url, supabase_publishable_key, {
@@ -206,7 +252,8 @@ async function createSupabaseClient(bearerKey) {
206
252
  async function handleSelect(supabase, input) {
207
253
  const { table, limit, id, version, filters } = input;
208
254
  const keyColumn = getPrimaryKeyColumn(table);
209
- let queryBuilder = supabase.from(table).select('*');
255
+ const selectColumns = table === 'lifecyclemodels' ? 'id, version, json_ordered' : '*';
256
+ let queryBuilder = supabase.from(table).select(selectColumns);
210
257
  if (filters) {
211
258
  for (const [column, value] of Object.entries(filters)) {
212
259
  if (value !== null && value !== undefined) {
@@ -228,10 +275,11 @@ async function handleSelect(supabase, input) {
228
275
  console.error('Error querying the database:', error);
229
276
  throw error;
230
277
  }
231
- return JSON.stringify({ data: data ?? [], count: data?.length ?? 0 });
278
+ const rows = sanitizeRowsForOutput(table, (data ?? []));
279
+ return JSON.stringify({ data: rows, count: rows.length });
232
280
  }
233
- async function handleInsert(supabase, input) {
234
- const { table, jsonOrdered, id } = input;
281
+ async function handleInsert(supabase, input, bearerKey) {
282
+ const { table, jsonOrdered, id, version } = input;
235
283
  if (jsonOrdered === undefined) {
236
284
  throw new Error('jsonOrdered is required for insert operations.');
237
285
  }
@@ -239,19 +287,28 @@ async function handleInsert(supabase, input) {
239
287
  throw new Error('id is required for insert operations.');
240
288
  }
241
289
  const jsonOrderedValue = jsonOrdered;
242
- await validateJsonOrdered(table, jsonOrderedValue);
290
+ const preparedWrite = await prepareWritePayload(table, jsonOrderedValue, id, version, bearerKey);
291
+ const resolvedId = preparedWrite.resolvedId ?? id;
292
+ const resolvedVersion = preparedWrite.resolvedVersion ?? version;
243
293
  const keyColumn = getPrimaryKeyColumn(table);
244
294
  const { data, error } = await supabase
245
295
  .from(table)
246
- .insert([{ [keyColumn]: id, json_ordered: jsonOrderedValue }])
296
+ .insert([
297
+ {
298
+ [keyColumn]: resolvedId,
299
+ ...(resolvedVersion !== undefined ? { version: resolvedVersion } : {}),
300
+ ...preparedWrite.payload,
301
+ },
302
+ ])
247
303
  .select();
248
304
  if (error) {
249
305
  console.error('Error inserting into the database:', error);
250
306
  throw error;
251
307
  }
252
- return JSON.stringify({ id, data: data ?? [] });
308
+ const rows = sanitizeRowsForOutput(table, (data ?? []));
309
+ return JSON.stringify({ id: resolvedId, version: resolvedVersion, data: rows });
253
310
  }
254
- async function handleUpdate(supabase, accessToken, input) {
311
+ async function handleUpdate(supabase, accessToken, input, bearerKey) {
255
312
  const { table, id, version, jsonOrdered } = input;
256
313
  if (id === undefined) {
257
314
  throw new Error('id is required for update operations.');
@@ -263,13 +320,18 @@ async function handleUpdate(supabase, accessToken, input) {
263
320
  throw new Error('jsonOrdered is required for update operations.');
264
321
  }
265
322
  const jsonOrderedValue = jsonOrdered;
266
- await validateJsonOrdered(table, jsonOrderedValue);
323
+ const preparedWrite = await prepareWritePayload(table, jsonOrderedValue, id, version, bearerKey);
267
324
  const token = requireAccessToken(accessToken);
268
325
  const { data: functionPayload, error } = await supabase.functions.invoke(UPDATE_FUNCTION_NAME, {
269
326
  headers: {
270
327
  Authorization: `Bearer ${token}`,
271
328
  },
272
- body: { id, version, table, data: { json_ordered: jsonOrderedValue } },
329
+ body: {
330
+ id: preparedWrite.resolvedId ?? id,
331
+ version: preparedWrite.resolvedVersion ?? version,
332
+ table,
333
+ data: preparedWrite.payload,
334
+ },
273
335
  region: FunctionRegion.UsEast1,
274
336
  });
275
337
  if (error) {
@@ -284,8 +346,12 @@ async function handleUpdate(supabase, accessToken, input) {
284
346
  throw new Error(message);
285
347
  }
286
348
  const keyColumn = getPrimaryKeyColumn(table);
287
- const rows = ensureRows(updatedRows, `Update affected 0 rows for table "${table}"; verify the provided ${keyColumn} (${id}) and version (${version}) exist and are accessible.`);
288
- return JSON.stringify({ id, version, data: rows });
349
+ const rows = ensureRows(updatedRows, `Update affected 0 rows for table "${table}"; verify the provided ${keyColumn} (${preparedWrite.resolvedId ?? id}) and version (${preparedWrite.resolvedVersion ?? version}) exist and are accessible.`);
350
+ return JSON.stringify({
351
+ id: preparedWrite.resolvedId ?? id,
352
+ version: preparedWrite.resolvedVersion ?? version,
353
+ data: sanitizeRowsForOutput(table, rows),
354
+ });
289
355
  }
290
356
  async function handleDelete(supabase, input) {
291
357
  const { table, id, version } = input;
@@ -307,7 +373,7 @@ async function handleDelete(supabase, input) {
307
373
  throw error;
308
374
  }
309
375
  const rows = ensureRows(data, `Delete affected 0 rows for table "${table}"; verify the provided ${keyColumn} (${id}) and version (${version}) exist and are accessible.`);
310
- return JSON.stringify({ id, version, data: rows });
376
+ return JSON.stringify({ id, version, data: sanitizeRowsForOutput(table, rows) });
311
377
  }
312
378
  async function performCrud(input, bearerKey) {
313
379
  try {
@@ -316,9 +382,9 @@ async function performCrud(input, bearerKey) {
316
382
  case 'select':
317
383
  return handleSelect(supabase, input);
318
384
  case 'insert':
319
- return handleInsert(supabase, input);
385
+ return handleInsert(supabase, input, bearerKey);
320
386
  case 'update':
321
- return handleUpdate(supabase, accessToken, input);
387
+ return handleUpdate(supabase, accessToken, input, bearerKey);
322
388
  case 'delete':
323
389
  return handleDelete(supabase, input);
324
390
  default: {
@@ -333,7 +399,7 @@ async function performCrud(input, bearerKey) {
333
399
  }
334
400
  }
335
401
  export function regCrudTool(server, bearerKey) {
336
- server.tool('Database_CRUD_Tool', 'Perform select/insert/update/delete against allowed Supabase tables (insert needs jsonOrdered, update/delete need id and version).', toolParamsSchema, async (rawInput) => {
402
+ server.tool('Database_CRUD_Tool', 'Perform select/insert/update/delete against allowed Supabase tables (insert needs jsonOrdered, update/delete need id and version). lifecyclemodels insert/update automatically validate the payload, derive platform json_tg, compute rule_verification, and then write the row; lifecyclemodels select returns id/version/json_ordered only.', toolParamsSchema, async (rawInput) => {
337
403
  const input = refinedInputSchema.parse(rawInput);
338
404
  const result = await performCrud(input, bearerKey);
339
405
  return {
@@ -0,0 +1,857 @@
1
+ import dagre from '@dagrejs/dagre';
2
+ import { createClient } from '@supabase/supabase-js';
3
+ import { createLifeCycleModel } from '@tiangong-lca/tidas-sdk/core';
4
+ import { supabase_base_url, supabase_publishable_key } from '../_shared/config.js';
5
+ import { resolveSupabaseAccessToken } from '../_shared/supabase_session.js';
6
+ const MAX_VALIDATION_ERROR_LENGTH = 4_000;
7
+ const NODE_WIDTH = 350;
8
+ const NODE_MIN_HEIGHT = 100;
9
+ const PORT_START_Y = 65;
10
+ const PORT_STEP_Y = 20;
11
+ const PAIRED_INPUT_START_Y = 58;
12
+ const PAIRED_OUTPUT_START_Y = 78;
13
+ const PAIRED_PORT_STEP_Y = 40;
14
+ const MIN_NODE_SIZE = 1;
15
+ const DAGRE_RANKDIR = 'LR';
16
+ const DAGRE_NODESEP = 88;
17
+ const DAGRE_EDGESEP = 24;
18
+ const DAGRE_RANKSEP = 170;
19
+ const DAGRE_MARGIN_X = 36;
20
+ const DAGRE_MARGIN_Y = 36;
21
+ const PRIMARY_COLOR = '#5c246a';
22
+ const BACKGROUND_COLOR = '#ffffff';
23
+ const MUTED_TEXT_COLOR = 'rgba(0,0,0,0.45)';
24
+ const BODY_TEXT_COLOR = '#000';
25
+ function ensureArray(value) {
26
+ if (value === null || value === undefined) {
27
+ return [];
28
+ }
29
+ return Array.isArray(value) ? value : [value];
30
+ }
31
+ function cloneJson(value) {
32
+ return JSON.parse(JSON.stringify(value));
33
+ }
34
+ function asRecord(value) {
35
+ return value && typeof value === 'object' && !Array.isArray(value)
36
+ ? value
37
+ : {};
38
+ }
39
+ function toJsonRecord(value, message) {
40
+ if (!value || typeof value !== 'object' || Array.isArray(value)) {
41
+ throw new Error(message);
42
+ }
43
+ return value;
44
+ }
45
+ function parsePayload(value) {
46
+ if (typeof value === 'string') {
47
+ return JSON.parse(value);
48
+ }
49
+ return value;
50
+ }
51
+ function summarizeError(error) {
52
+ if (error instanceof Error) {
53
+ return error.message;
54
+ }
55
+ try {
56
+ const serialized = JSON.stringify(error);
57
+ if (!serialized) {
58
+ return String(error);
59
+ }
60
+ return serialized.length > MAX_VALIDATION_ERROR_LENGTH
61
+ ? `${serialized.slice(0, MAX_VALIDATION_ERROR_LENGTH)}...`
62
+ : serialized;
63
+ }
64
+ catch {
65
+ return String(error);
66
+ }
67
+ }
68
+ function normalizeLifecycleModelPayload(rawPayload) {
69
+ const parsed = parsePayload(rawPayload);
70
+ const singlePayload = Array.isArray(parsed) ? parsed[0] : parsed;
71
+ if (Array.isArray(parsed) && parsed.length !== 1) {
72
+ throw new Error('Lifecycle model file import currently supports exactly one lifecycle model object per request.');
73
+ }
74
+ const payload = asRecord(singlePayload);
75
+ if (payload.json_ordered) {
76
+ return {
77
+ jsonOrdered: toJsonRecord(payload.json_ordered, 'payload.json_ordered must be an object.'),
78
+ providedJsonTg: payload.json_tg
79
+ ? toJsonRecord(payload.json_tg, 'payload.json_tg must be an object.')
80
+ : undefined,
81
+ sourceFormat: 'raw_record',
82
+ };
83
+ }
84
+ if (payload.jsonOrdered) {
85
+ return {
86
+ jsonOrdered: toJsonRecord(payload.jsonOrdered, 'payload.jsonOrdered must be an object.'),
87
+ providedJsonTg: payload.jsonTg
88
+ ? toJsonRecord(payload.jsonTg, 'payload.jsonTg must be an object.')
89
+ : undefined,
90
+ sourceFormat: 'direct_fields',
91
+ };
92
+ }
93
+ if (payload.lifeCycleModelDataSet) {
94
+ const { json_tg, ...jsonOrdered } = payload;
95
+ return {
96
+ jsonOrdered: toJsonRecord(jsonOrdered, 'payload must contain a lifeCycleModelDataSet object.'),
97
+ providedJsonTg: json_tg
98
+ ? toJsonRecord(json_tg, 'payload.json_tg must be an object.')
99
+ : undefined,
100
+ sourceFormat: json_tg ? 'platform_bundle' : 'native',
101
+ };
102
+ }
103
+ throw new Error('Unsupported lifecycle model payload. Provide { lifeCycleModelDataSet }, { lifeCycleModelDataSet, json_tg }, { json_ordered, json_tg }, or an array containing exactly one of those objects.');
104
+ }
105
+ function getModelDataSet(jsonOrdered) {
106
+ const dataSet = jsonOrdered.lifeCycleModelDataSet;
107
+ if (!dataSet || typeof dataSet !== 'object' || Array.isArray(dataSet)) {
108
+ throw new Error('jsonOrdered.lifeCycleModelDataSet is required.');
109
+ }
110
+ return dataSet;
111
+ }
112
+ function getModelUuid(jsonOrdered) {
113
+ const dataSet = getModelDataSet(jsonOrdered);
114
+ const uuid = asRecord(asRecord(asRecord(dataSet.lifeCycleModelInformation).dataSetInformation))['common:UUID'];
115
+ if (typeof uuid !== 'string' || uuid.length === 0) {
116
+ throw new Error('lifeCycleModelInformation.dataSetInformation.common:UUID is required in the lifecycle model.');
117
+ }
118
+ return uuid;
119
+ }
120
+ function getModelVersion(jsonOrdered) {
121
+ const dataSet = getModelDataSet(jsonOrdered);
122
+ const version = asRecord(asRecord(dataSet.administrativeInformation).publicationAndOwnership)['common:dataSetVersion'];
123
+ if (typeof version !== 'string' || version.length === 0) {
124
+ throw new Error('administrativeInformation.publicationAndOwnership.common:dataSetVersion is required in the lifecycle model.');
125
+ }
126
+ return version;
127
+ }
128
+ function createLifecycleModelValidator(jsonOrdered) {
129
+ return createLifeCycleModel(jsonOrdered, { mode: 'strict' });
130
+ }
131
+ function validateLifecycleModelStrict(validator) {
132
+ const validationResult = validator.validate();
133
+ if (!validationResult.success) {
134
+ const errorDetails = summarizeError(validationResult.error);
135
+ throw new Error(`Lifecycle model validation failed: ${errorDetails}`);
136
+ }
137
+ }
138
+ function langEntries(value) {
139
+ const entries = ensureArray(value).filter((item) => item && typeof item === 'object');
140
+ if (entries.length > 0) {
141
+ return cloneJson(entries);
142
+ }
143
+ if (typeof value === 'string' && value.trim().length > 0) {
144
+ return [{ '@xml:lang': 'en', '#text': value.trim() }];
145
+ }
146
+ return [];
147
+ }
148
+ function preferredText(value) {
149
+ const entries = langEntries(value);
150
+ const preferredOrder = ['zh', 'zh-cn', 'zh-hans', 'en'];
151
+ for (const lang of preferredOrder) {
152
+ const match = entries.find((item) => (item['@xml:lang'] || '').toLowerCase() === lang);
153
+ if (match?.['#text']) {
154
+ return match['#text'];
155
+ }
156
+ }
157
+ return entries[0]?.['#text'] ?? '';
158
+ }
159
+ function buildSyntheticName(shortDescription) {
160
+ return {
161
+ baseName: langEntries(shortDescription),
162
+ treatmentStandardsRoutes: [],
163
+ mixAndLocationTypes: [],
164
+ functionalUnitFlowProperties: [],
165
+ };
166
+ }
167
+ function buildNameSummary(name) {
168
+ const nameRecord = asRecord(name);
169
+ const partMap = new Map();
170
+ const keys = [
171
+ 'baseName',
172
+ 'treatmentStandardsRoutes',
173
+ 'mixAndLocationTypes',
174
+ 'functionalUnitFlowProperties',
175
+ ];
176
+ for (const key of keys) {
177
+ for (const item of langEntries(nameRecord[key])) {
178
+ const lang = (item['@xml:lang'] || 'en').toLowerCase();
179
+ const text = item['#text'] || '';
180
+ if (!text) {
181
+ continue;
182
+ }
183
+ if (!partMap.has(lang)) {
184
+ partMap.set(lang, []);
185
+ }
186
+ partMap.get(lang).push(text);
187
+ }
188
+ }
189
+ return Array.from(partMap.entries())
190
+ .map(([lang, parts]) => ({
191
+ '@xml:lang': lang,
192
+ '#text': parts.filter(Boolean).join('; '),
193
+ }))
194
+ .filter((item) => item['#text'].length > 0);
195
+ }
196
+ function extractInternalId(value) {
197
+ if (typeof value === 'string') {
198
+ return value;
199
+ }
200
+ const candidate = asRecord(value)['@id'];
201
+ return typeof candidate === 'string' ? candidate : '';
202
+ }
203
+ function processInstancesFromModel(jsonOrdered) {
204
+ const dataSet = getModelDataSet(jsonOrdered);
205
+ return ensureArray(asRecord(asRecord(asRecord(asRecord(dataSet.lifeCycleModelInformation).technology).processes))
206
+ .processInstance).map((item) => asRecord(item));
207
+ }
208
+ function processInstanceInternalId(instance) {
209
+ return String(instance['@dataSetInternalID'] ?? '').trim();
210
+ }
211
+ function graphProcessInstancesFromModel(jsonOrdered) {
212
+ const processInstances = processInstancesFromModel(jsonOrdered);
213
+ const missingInternalIdIndexes = processInstances.flatMap((instance, index) => processInstanceInternalId(instance) ? [] : [index]);
214
+ if (missingInternalIdIndexes.length > 0) {
215
+ throw new Error(`Lifecycle model graph generation requires processInstance.@dataSetInternalID for every process. Missing values at indexes: ${missingInternalIdIndexes.join(', ')}.`);
216
+ }
217
+ return processInstances;
218
+ }
219
+ function referenceProcessInternalIdFromModel(jsonOrdered) {
220
+ const dataSet = getModelDataSet(jsonOrdered);
221
+ const quantitativeReference = asRecord(asRecord(dataSet.lifeCycleModelInformation).quantitativeReference);
222
+ return extractInternalId(quantitativeReference.referenceToReferenceProcess);
223
+ }
224
+ function modelEdgesFromConnections(processInstances) {
225
+ const seen = new Set();
226
+ const edges = [];
227
+ for (const instance of processInstances) {
228
+ const srcInternalId = processInstanceInternalId(instance);
229
+ if (!srcInternalId) {
230
+ continue;
231
+ }
232
+ const outputExchanges = ensureArray(asRecord(instance.connections).outputExchange);
233
+ for (const outputExchange of outputExchanges.map((item) => asRecord(item))) {
234
+ const flowUuid = String(outputExchange['@flowUUID'] ?? '').trim();
235
+ if (!flowUuid) {
236
+ continue;
237
+ }
238
+ const downstreamProcesses = ensureArray(asRecord(outputExchange).downstreamProcess).map((item) => asRecord(item));
239
+ for (const downstreamProcess of downstreamProcesses) {
240
+ const dstInternalId = String(downstreamProcess['@id'] ?? '').trim();
241
+ if (!dstInternalId) {
242
+ continue;
243
+ }
244
+ const key = `${srcInternalId}|${dstInternalId}|${flowUuid}`;
245
+ if (seen.has(key)) {
246
+ continue;
247
+ }
248
+ seen.add(key);
249
+ edges.push({ srcInternalId, dstInternalId, flowUuid });
250
+ }
251
+ }
252
+ }
253
+ return edges;
254
+ }
255
+ async function createSupabaseClient(bearerKey) {
256
+ const { session: normalizedSession, accessToken: bearerToken } = resolveSupabaseAccessToken(bearerKey);
257
+ const supabase = createClient(supabase_base_url, supabase_publishable_key, {
258
+ auth: {
259
+ persistSession: false,
260
+ autoRefreshToken: Boolean(normalizedSession?.refresh_token),
261
+ },
262
+ ...(bearerToken
263
+ ? {
264
+ global: {
265
+ headers: {
266
+ Authorization: `Bearer ${bearerToken}`,
267
+ },
268
+ },
269
+ }
270
+ : {}),
271
+ });
272
+ if (normalizedSession?.refresh_token) {
273
+ const { error } = await supabase.auth.setSession({
274
+ access_token: normalizedSession.access_token,
275
+ refresh_token: normalizedSession.refresh_token,
276
+ });
277
+ if (error) {
278
+ console.warn('Failed to set Supabase session for lifecycle model file tools:', error.message);
279
+ }
280
+ }
281
+ return { supabase };
282
+ }
283
+ function buildFallbackProcessLookup(processId, version, referenceToProcess) {
284
+ const fallbackShortDescription = langEntries(referenceToProcess['common:shortDescription']);
285
+ const fallbackLabel = buildSyntheticName(fallbackShortDescription);
286
+ const fallbackSummary = buildNameSummary(fallbackLabel);
287
+ return {
288
+ processId,
289
+ version,
290
+ shortDescription: fallbackSummary,
291
+ label: fallbackLabel,
292
+ shortSummary: fallbackSummary,
293
+ exchangeByDirectionAndFlow: new Map(),
294
+ };
295
+ }
296
+ function processSelectionKey(processId, version) {
297
+ return `${processId}@@${version}`;
298
+ }
299
+ function extractProcessDataSet(row) {
300
+ return asRecord(asRecord(row?.json_ordered).processDataSet);
301
+ }
302
+ async function loadReferencedProcessDataSets(supabase, processInstances) {
303
+ const versionedIdsByVersion = new Map();
304
+ const unversionedIds = new Set();
305
+ for (const instance of processInstances) {
306
+ const referenceToProcess = asRecord(instance.referenceToProcess);
307
+ const processId = String(referenceToProcess['@refObjectId'] ?? '').trim();
308
+ const version = String(referenceToProcess['@version'] ?? '').trim();
309
+ if (!processId) {
310
+ continue;
311
+ }
312
+ if (version) {
313
+ if (!versionedIdsByVersion.has(version)) {
314
+ versionedIdsByVersion.set(version, new Set());
315
+ }
316
+ versionedIdsByVersion.get(version).add(processId);
317
+ continue;
318
+ }
319
+ unversionedIds.add(processId);
320
+ }
321
+ const processDataSetBySelection = new Map();
322
+ const batchFetches = [];
323
+ for (const [version, processIds] of versionedIdsByVersion.entries()) {
324
+ const ids = Array.from(processIds);
325
+ if (ids.length === 0) {
326
+ continue;
327
+ }
328
+ batchFetches.push((async () => {
329
+ const { data, error } = await supabase
330
+ .from('processes')
331
+ .select('id, version, json_ordered')
332
+ .eq('version', version)
333
+ .in('id', ids);
334
+ if (error) {
335
+ throw new Error(`Failed to load referenced processes for version ${version}: ${error.message}`);
336
+ }
337
+ for (const row of (data ?? [])) {
338
+ const processId = String(row.id ?? '').trim();
339
+ if (!processId) {
340
+ continue;
341
+ }
342
+ processDataSetBySelection.set(processSelectionKey(processId, version), extractProcessDataSet(row));
343
+ }
344
+ })());
345
+ }
346
+ if (unversionedIds.size > 0) {
347
+ batchFetches.push((async () => {
348
+ const { data, error } = await supabase
349
+ .from('processes')
350
+ .select('id, version, json_ordered')
351
+ .in('id', Array.from(unversionedIds))
352
+ .order('version', { ascending: false });
353
+ if (error) {
354
+ throw new Error(`Failed to load referenced processes without version: ${error.message}`);
355
+ }
356
+ for (const row of (data ?? [])) {
357
+ const processId = String(row.id ?? '').trim();
358
+ if (!processId || processDataSetBySelection.has(processSelectionKey(processId, ''))) {
359
+ continue;
360
+ }
361
+ processDataSetBySelection.set(processSelectionKey(processId, ''), extractProcessDataSet(row));
362
+ }
363
+ })());
364
+ }
365
+ await Promise.all(batchFetches);
366
+ return processDataSetBySelection;
367
+ }
368
+ async function fetchProcessLookups(supabase, processInstances) {
369
+ const lookups = new Map();
370
+ const processDataSetBySelection = await loadReferencedProcessDataSets(supabase, processInstances);
371
+ const fetches = processInstances.map(async (instance) => {
372
+ const referenceToProcess = asRecord(instance.referenceToProcess);
373
+ const processId = String(referenceToProcess['@refObjectId'] ?? '').trim();
374
+ const version = String(referenceToProcess['@version'] ?? '').trim();
375
+ const internalId = processInstanceInternalId(instance);
376
+ if (!internalId) {
377
+ return;
378
+ }
379
+ if (!processId) {
380
+ lookups.set(internalId, buildFallbackProcessLookup(processId, version, referenceToProcess));
381
+ return;
382
+ }
383
+ const fallbackLookup = buildFallbackProcessLookup(processId, version, referenceToProcess);
384
+ const processDataSet = processDataSetBySelection.get(processSelectionKey(processId, version));
385
+ if (Object.keys(processDataSet ?? {}).length === 0) {
386
+ lookups.set(internalId, fallbackLookup);
387
+ return;
388
+ }
389
+ const info = asRecord(asRecord(asRecord(processDataSet).processInformation).dataSetInformation);
390
+ const label = Object.keys(asRecord(info.name)).length > 0
391
+ ? cloneJson(asRecord(info.name))
392
+ : fallbackLookup.label;
393
+ const shortSummary = buildNameSummary(label);
394
+ const exchangeByDirectionAndFlow = new Map();
395
+ let referenceExchange;
396
+ const refExchangeInternalId = String(asRecord(asRecord(asRecord(processDataSet).processInformation).quantitativeReference)
397
+ .referenceToReferenceFlow ?? '').trim();
398
+ for (const exchange of ensureArray(asRecord(processDataSet).exchanges?.exchange).map((item) => asRecord(item))) {
399
+ const flowRef = asRecord(exchange.referenceToFlowDataSet);
400
+ const flowId = String(flowRef['@refObjectId'] ?? '').trim();
401
+ const direction = String(exchange.exchangeDirection ?? '').trim();
402
+ if (flowId && direction) {
403
+ exchangeByDirectionAndFlow.set(`${direction}:${flowId}`, exchange);
404
+ }
405
+ if ((!referenceExchange && exchange.quantitativeReference === true) ||
406
+ (refExchangeInternalId &&
407
+ String(exchange['@dataSetInternalID'] ?? '').trim() === refExchangeInternalId)) {
408
+ referenceExchange = exchange;
409
+ }
410
+ }
411
+ lookups.set(internalId, {
412
+ processId,
413
+ version,
414
+ shortDescription: shortSummary.length > 0
415
+ ? shortSummary
416
+ : langEntries(referenceToProcess['common:shortDescription']),
417
+ label,
418
+ shortSummary: shortSummary.length > 0
419
+ ? shortSummary
420
+ : langEntries(referenceToProcess['common:shortDescription']),
421
+ referenceExchange,
422
+ exchangeByDirectionAndFlow,
423
+ });
424
+ });
425
+ await Promise.all(fetches);
426
+ return lookups;
427
+ }
428
+ function flowPortFallback(flowUuid) {
429
+ return [{ '@xml:lang': 'en', '#text': flowUuid }];
430
+ }
431
+ function exchangeAmount(exchange) {
432
+ if (!exchange) {
433
+ return '';
434
+ }
435
+ const candidate = exchange.meanAmount ?? exchange.resultingAmount ?? exchange.meanValue;
436
+ return candidate === undefined || candidate === null ? '' : String(candidate);
437
+ }
438
+ function dagreLayout(nodes, edges) {
439
+ const dagreGraph = new dagre.graphlib.Graph({ multigraph: true });
440
+ dagreGraph.setGraph({
441
+ rankdir: DAGRE_RANKDIR,
442
+ nodesep: DAGRE_NODESEP,
443
+ edgesep: DAGRE_EDGESEP,
444
+ ranksep: DAGRE_RANKSEP,
445
+ marginx: DAGRE_MARGIN_X,
446
+ marginy: DAGRE_MARGIN_Y,
447
+ acyclicer: 'greedy',
448
+ ranker: 'network-simplex',
449
+ });
450
+ const nodeIdByInternalId = new Map(nodes.map((node) => [node.internalId, node.nodeId]));
451
+ for (const node of nodes) {
452
+ dagreGraph.setNode(node.nodeId, {
453
+ width: Math.max(node.width, MIN_NODE_SIZE),
454
+ height: Math.max(node.height, MIN_NODE_SIZE),
455
+ });
456
+ }
457
+ for (const edge of edges) {
458
+ const sourceNodeId = nodeIdByInternalId.get(edge.srcInternalId);
459
+ const targetNodeId = nodeIdByInternalId.get(edge.dstInternalId);
460
+ if (!sourceNodeId || !targetNodeId || sourceNodeId === targetNodeId) {
461
+ continue;
462
+ }
463
+ if (!dagreGraph.hasNode(sourceNodeId) || !dagreGraph.hasNode(targetNodeId)) {
464
+ continue;
465
+ }
466
+ dagreGraph.setEdge(sourceNodeId, targetNodeId, {
467
+ minlen: 1,
468
+ weight: 2,
469
+ }, `${sourceNodeId}|${targetNodeId}|${edge.flowUuid}|${edge.srcInternalId}|${edge.dstInternalId}`);
470
+ }
471
+ dagre.layout(dagreGraph);
472
+ const positions = new Map();
473
+ for (const node of nodes) {
474
+ const layoutNode = dagreGraph.node(node.nodeId);
475
+ if (!layoutNode) {
476
+ positions.set(node.internalId, { x: DAGRE_MARGIN_X, y: DAGRE_MARGIN_Y });
477
+ continue;
478
+ }
479
+ positions.set(node.internalId, {
480
+ x: layoutNode.x - layoutNode.width / 2,
481
+ y: layoutNode.y - layoutNode.height / 2,
482
+ });
483
+ }
484
+ return positions;
485
+ }
486
+ function buildPortItem(spec, y) {
487
+ return {
488
+ id: `${spec.side}:${spec.flowUuid}`,
489
+ group: spec.side === 'INPUT' ? 'groupInput' : 'groupOutput',
490
+ args: {
491
+ x: spec.side === 'INPUT' ? 0 : '100%',
492
+ y,
493
+ },
494
+ attrs: {
495
+ text: {
496
+ text: spec.displayText,
497
+ title: spec.displayText,
498
+ cursor: 'pointer',
499
+ fill: spec.quantitativeReference ? PRIMARY_COLOR : MUTED_TEXT_COLOR,
500
+ 'font-weight': spec.quantitativeReference ? 'bold' : 'normal',
501
+ },
502
+ },
503
+ data: {
504
+ textLang: spec.textLang,
505
+ flowId: spec.flowUuid,
506
+ flowVersion: spec.flowVersion,
507
+ quantitativeReference: spec.quantitativeReference,
508
+ allocations: spec.allocations,
509
+ },
510
+ tools: [{ id: 'portTool' }],
511
+ };
512
+ }
513
+ function buildPortSpec(side, flowUuid, exchange, overrideQuantitativeReference = false) {
514
+ const flowRef = asRecord(exchange?.referenceToFlowDataSet);
515
+ const textLang = langEntries(flowRef['common:shortDescription']).length
516
+ ? langEntries(flowRef['common:shortDescription'])
517
+ : flowPortFallback(flowUuid);
518
+ return {
519
+ side,
520
+ flowUuid,
521
+ flowVersion: String(flowRef['@version'] ?? '').trim(),
522
+ textLang,
523
+ displayText: preferredText(textLang) || flowUuid,
524
+ quantitativeReference: overrideQuantitativeReference || exchange?.quantitativeReference === true,
525
+ allocations: exchange?.allocations,
526
+ };
527
+ }
528
+ function mergeJsonTg(generated, provided, preferProvidedJsonTg) {
529
+ if (!provided || !preferProvidedJsonTg) {
530
+ return { jsonTg: generated, source: 'generated' };
531
+ }
532
+ const providedXflow = asRecord(provided.xflow);
533
+ const providedSubmodels = ensureArray(provided.submodels);
534
+ const jsonTg = {
535
+ ...cloneJson(generated),
536
+ ...cloneJson(provided),
537
+ xflow: Object.keys(providedXflow).length > 0
538
+ ? cloneJson(providedXflow)
539
+ : cloneJson(asRecord(generated.xflow)),
540
+ submodels: providedSubmodels.length > 0
541
+ ? cloneJson(providedSubmodels)
542
+ : cloneJson(ensureArray(generated.submodels)),
543
+ };
544
+ return { jsonTg, source: 'merged' };
545
+ }
546
+ function generateJsonTg(jsonOrdered, processInstances, processLookups) {
547
+ const referenceProcessInternalId = referenceProcessInternalIdFromModel(jsonOrdered);
548
+ const fallbackReferenceProcessInstance = processInstances[processInstances.length - 1];
549
+ const resolvedReferenceProcessInternalId = referenceProcessInternalId ||
550
+ String(fallbackReferenceProcessInstance?.['@dataSetInternalID'] ?? '').trim();
551
+ const edges = modelEdgesFromConnections(processInstances);
552
+ const outgoingEdges = new Map();
553
+ const incomingEdges = new Map();
554
+ for (const edge of edges) {
555
+ if (!outgoingEdges.has(edge.srcInternalId)) {
556
+ outgoingEdges.set(edge.srcInternalId, []);
557
+ }
558
+ outgoingEdges.get(edge.srcInternalId).push(edge);
559
+ if (!incomingEdges.has(edge.dstInternalId)) {
560
+ incomingEdges.set(edge.dstInternalId, []);
561
+ }
562
+ incomingEdges.get(edge.dstInternalId).push(edge);
563
+ }
564
+ const nodeIdCounts = new Map();
565
+ const nodeSpecs = processInstances.map((instance) => {
566
+ const internalId = processInstanceInternalId(instance);
567
+ const multiplicationFactor = String(instance['@multiplicationFactor'] ?? '1');
568
+ const referenceToProcess = asRecord(instance.referenceToProcess);
569
+ const lookup = processLookups.get(internalId);
570
+ const label = lookup?.label ?? buildSyntheticName(referenceToProcess['common:shortDescription']);
571
+ const shortSummary = lookup?.shortSummary && lookup.shortSummary.length > 0
572
+ ? lookup.shortSummary
573
+ : buildNameSummary(label);
574
+ const processId = lookup?.processId || String(referenceToProcess['@refObjectId'] ?? '').trim();
575
+ const processVersion = lookup?.version || String(referenceToProcess['@version'] ?? '').trim();
576
+ const baseNodeId = processId || internalId;
577
+ const occurrence = (nodeIdCounts.get(baseNodeId) ?? 0) + 1;
578
+ nodeIdCounts.set(baseNodeId, occurrence);
579
+ const nodeId = occurrence === 1 ? baseNodeId : `${baseNodeId}::${internalId}`;
580
+ const portMap = new Map();
581
+ const registerPort = (spec) => {
582
+ const key = `${spec.side}:${spec.flowUuid}`;
583
+ if (!portMap.has(key)) {
584
+ portMap.set(key, spec);
585
+ }
586
+ };
587
+ if (lookup?.referenceExchange) {
588
+ const direction = String(lookup.referenceExchange.exchangeDirection ?? '').toUpperCase();
589
+ const flowId = String(asRecord(lookup.referenceExchange.referenceToFlowDataSet)['@refObjectId'] ?? '').trim();
590
+ if (flowId && (direction === 'INPUT' || direction === 'OUTPUT')) {
591
+ registerPort(buildPortSpec(direction, flowId, lookup.referenceExchange, internalId === resolvedReferenceProcessInternalId));
592
+ }
593
+ }
594
+ for (const edge of incomingEdges.get(internalId) ?? []) {
595
+ registerPort(buildPortSpec('INPUT', edge.flowUuid, lookup?.exchangeByDirectionAndFlow.get(`Input:${edge.flowUuid}`)));
596
+ }
597
+ for (const edge of outgoingEdges.get(internalId) ?? []) {
598
+ registerPort(buildPortSpec('OUTPUT', edge.flowUuid, lookup?.exchangeByDirectionAndFlow.get(`Output:${edge.flowUuid}`)));
599
+ }
600
+ const inputPorts = Array.from(portMap.values()).filter((item) => item.side === 'INPUT');
601
+ const outputPorts = Array.from(portMap.values()).filter((item) => item.side === 'OUTPUT');
602
+ const hasInputs = inputPorts.length > 0;
603
+ const hasOutputs = outputPorts.length > 0;
604
+ const bothSides = hasInputs && hasOutputs;
605
+ const pairCount = Math.max(inputPorts.length, outputPorts.length);
606
+ const height = bothSides
607
+ ? Math.max(NODE_MIN_HEIGHT + 10, 110 + Math.max(pairCount - 1, 0) * PAIRED_PORT_STEP_Y)
608
+ : Math.max(inputPorts.length, outputPorts.length, 2) * PORT_STEP_Y + 60;
609
+ return {
610
+ internalId,
611
+ nodeId,
612
+ processId: processId || nodeId,
613
+ processVersion,
614
+ label,
615
+ shortSummary,
616
+ multiplicationFactor,
617
+ inputPorts,
618
+ outputPorts,
619
+ width: NODE_WIDTH,
620
+ height: Math.max(NODE_MIN_HEIGHT, height),
621
+ isReferenceProcess: internalId === resolvedReferenceProcessInternalId,
622
+ };
623
+ });
624
+ const nodeSpecByInternalId = new Map(nodeSpecs.map((node) => [node.internalId, node]));
625
+ const positions = dagreLayout(nodeSpecs, edges);
626
+ const nodes = nodeSpecs.map((nodeSpec) => {
627
+ const bothSides = nodeSpec.inputPorts.length > 0 && nodeSpec.outputPorts.length > 0;
628
+ const position = positions.get(nodeSpec.internalId) ?? { x: DAGRE_MARGIN_X, y: DAGRE_MARGIN_Y };
629
+ const labelText = preferredText(nodeSpec.shortSummary);
630
+ const portY = (side, index) => {
631
+ if (bothSides) {
632
+ return side === 'INPUT'
633
+ ? PAIRED_INPUT_START_Y + index * PAIRED_PORT_STEP_Y
634
+ : PAIRED_OUTPUT_START_Y + index * PAIRED_PORT_STEP_Y;
635
+ }
636
+ return PORT_START_Y + index * PORT_STEP_Y;
637
+ };
638
+ return {
639
+ id: nodeSpec.nodeId,
640
+ shape: 'rect',
641
+ position,
642
+ size: {
643
+ width: nodeSpec.width,
644
+ height: nodeSpec.height,
645
+ },
646
+ attrs: {
647
+ body: {
648
+ stroke: PRIMARY_COLOR,
649
+ strokeWidth: 1,
650
+ fill: BACKGROUND_COLOR,
651
+ rx: 6,
652
+ ry: 6,
653
+ },
654
+ label: {
655
+ fill: BODY_TEXT_COLOR,
656
+ refX: 0.5,
657
+ refY: 8,
658
+ text: labelText,
659
+ textAnchor: 'middle',
660
+ textVerticalAnchor: 'top',
661
+ },
662
+ text: {
663
+ fill: BODY_TEXT_COLOR,
664
+ text: labelText,
665
+ },
666
+ },
667
+ isMyProcess: true,
668
+ data: {
669
+ id: nodeSpec.processId,
670
+ version: nodeSpec.processVersion,
671
+ index: nodeSpec.internalId,
672
+ label: nodeSpec.label,
673
+ shortDescription: nodeSpec.shortSummary,
674
+ quantitativeReference: nodeSpec.isReferenceProcess ? '1' : '0',
675
+ targetAmount: nodeSpec.isReferenceProcess ? '1' : '',
676
+ multiplicationFactor: nodeSpec.multiplicationFactor,
677
+ },
678
+ ports: {
679
+ groups: {
680
+ groupInput: {
681
+ position: { name: 'absolute' },
682
+ label: { position: { name: 'right' } },
683
+ attrs: {
684
+ circle: {
685
+ stroke: PRIMARY_COLOR,
686
+ fill: BACKGROUND_COLOR,
687
+ strokeWidth: 1,
688
+ r: 4,
689
+ magnet: true,
690
+ },
691
+ text: { fill: MUTED_TEXT_COLOR, fontSize: 14 },
692
+ },
693
+ },
694
+ groupOutput: {
695
+ position: { name: 'absolute' },
696
+ label: { position: { name: 'left' } },
697
+ attrs: {
698
+ circle: {
699
+ stroke: PRIMARY_COLOR,
700
+ fill: BACKGROUND_COLOR,
701
+ strokeWidth: 1,
702
+ r: 4,
703
+ magnet: true,
704
+ },
705
+ text: { fill: MUTED_TEXT_COLOR, fontSize: 14 },
706
+ },
707
+ },
708
+ },
709
+ items: [
710
+ ...nodeSpec.inputPorts.map((item, index) => buildPortItem(item, portY('INPUT', index))),
711
+ ...nodeSpec.outputPorts.map((item, index) => buildPortItem(item, portY('OUTPUT', index))),
712
+ ],
713
+ },
714
+ tools: { name: null, items: [] },
715
+ visible: true,
716
+ zIndex: 1,
717
+ };
718
+ });
719
+ const instanceMap = new Map(processInstances.map((instance) => [processInstanceInternalId(instance), asRecord(instance)]));
720
+ const xflowEdges = edges.map((edge) => {
721
+ const sourceNode = nodeSpecByInternalId.get(edge.srcInternalId);
722
+ const targetNode = nodeSpecByInternalId.get(edge.dstInternalId);
723
+ const sourceLookup = processLookups.get(edge.srcInternalId);
724
+ const targetLookup = processLookups.get(edge.dstInternalId);
725
+ const targetExchange = targetLookup?.exchangeByDirectionAndFlow.get(`Input:${edge.flowUuid}`);
726
+ const sourceProcessId = sourceNode?.processId ??
727
+ sourceLookup?.processId ??
728
+ String(asRecord(instanceMap.get(edge.srcInternalId)?.referenceToProcess)['@refObjectId'] ?? '');
729
+ const targetProcessId = targetNode?.processId ??
730
+ targetLookup?.processId ??
731
+ String(asRecord(instanceMap.get(edge.dstInternalId)?.referenceToProcess)['@refObjectId'] ?? '');
732
+ return {
733
+ id: crypto.randomUUID(),
734
+ shape: 'edge',
735
+ source: { cell: sourceNode?.nodeId ?? edge.srcInternalId, port: `OUTPUT:${edge.flowUuid}` },
736
+ target: { cell: targetNode?.nodeId ?? edge.dstInternalId, port: `INPUT:${edge.flowUuid}` },
737
+ labels: [],
738
+ attrs: {
739
+ line: {
740
+ stroke: PRIMARY_COLOR,
741
+ },
742
+ },
743
+ data: {
744
+ connection: {
745
+ outputExchange: {
746
+ '@flowUUID': edge.flowUuid,
747
+ downstreamProcess: {
748
+ '@id': edge.dstInternalId,
749
+ '@flowUUID': edge.flowUuid,
750
+ },
751
+ },
752
+ isBalanced: true,
753
+ unbalancedAmount: 0,
754
+ exchangeAmount: exchangeAmount(targetExchange),
755
+ },
756
+ node: {
757
+ sourceNodeID: sourceNode?.nodeId ?? edge.srcInternalId,
758
+ sourceProcessId,
759
+ sourceProcessVersion: sourceNode?.processVersion ??
760
+ processLookups.get(edge.srcInternalId)?.version ??
761
+ String(asRecord(instanceMap.get(edge.srcInternalId)?.referenceToProcess)['@version'] ?? ''),
762
+ targetNodeID: targetNode?.nodeId ?? edge.dstInternalId,
763
+ targetProcessId,
764
+ targetProcessVersion: targetNode?.processVersion ??
765
+ processLookups.get(edge.dstInternalId)?.version ??
766
+ String(asRecord(instanceMap.get(edge.dstInternalId)?.referenceToProcess)['@version'] ?? ''),
767
+ },
768
+ },
769
+ zIndex: 4,
770
+ };
771
+ });
772
+ const modelDataSet = getModelDataSet(jsonOrdered);
773
+ const dataSetInformation = asRecord(asRecord(modelDataSet.lifeCycleModelInformation).dataSetInformation);
774
+ const referenceToResultingProcess = asRecord(dataSetInformation.referenceToResultingProcess);
775
+ const referenceProcessInstance = processInstances.find((instance) => String(instance['@dataSetInternalID'] ?? '').trim() === resolvedReferenceProcessInternalId) ?? fallbackReferenceProcessInstance;
776
+ const referenceProcessRef = asRecord(referenceProcessInstance?.referenceToProcess);
777
+ const referenceProcessLookup = processLookups.get(resolvedReferenceProcessInternalId);
778
+ const referenceNodeSpec = nodeSpecByInternalId.get(resolvedReferenceProcessInternalId);
779
+ const referenceExchange = referenceProcessLookup?.referenceExchange;
780
+ const fallbackEdge = (outgoingEdges.get(resolvedReferenceProcessInternalId) ?? [])[0];
781
+ const finalId = {
782
+ nodeId: referenceNodeSpec?.nodeId ||
783
+ String(referenceProcessRef['@refObjectId'] ?? '') ||
784
+ resolvedReferenceProcessInternalId,
785
+ processId: referenceNodeSpec?.processId || String(referenceProcessRef['@refObjectId'] ?? ''),
786
+ };
787
+ if (referenceExchange) {
788
+ finalId.allocatedExchangeFlowId = String(asRecord(referenceExchange.referenceToFlowDataSet)['@refObjectId'] ?? '');
789
+ finalId.allocatedExchangeDirection = String(referenceExchange.exchangeDirection ?? '');
790
+ }
791
+ else if (fallbackEdge) {
792
+ finalId.referenceToFlowDataSet = {
793
+ '@refObjectId': fallbackEdge.flowUuid,
794
+ '@exchangeDirection': 'Output',
795
+ };
796
+ }
797
+ const submodels = [
798
+ {
799
+ id: String(referenceToResultingProcess['@refObjectId'] ?? getModelUuid(jsonOrdered)),
800
+ type: 'primary',
801
+ finalId,
802
+ },
803
+ ];
804
+ return {
805
+ xflow: {
806
+ nodes,
807
+ edges: xflowEdges,
808
+ },
809
+ submodels,
810
+ };
811
+ }
812
+ function deriveRuleVerification(validator) {
813
+ const enhanced = validator.validateEnhanced();
814
+ if (enhanced.success) {
815
+ return { ruleVerification: true, issueCount: 0, filteredIssues: [] };
816
+ }
817
+ const issues = ensureArray(asRecord(enhanced.error).issues);
818
+ const filteredIssues = issues.filter((issue) => {
819
+ const path = ensureArray(asRecord(issue).path).map((part) => String(part));
820
+ return !path.includes('validation') && !path.includes('compliance');
821
+ });
822
+ return {
823
+ ruleVerification: filteredIssues.length === 0,
824
+ issueCount: filteredIssues.length,
825
+ filteredIssues,
826
+ };
827
+ }
828
+ export async function prepareLifecycleModelFile(input, bearerKey) {
829
+ const preferProvidedJsonTg = input.preferProvidedJsonTg ?? false;
830
+ const normalized = normalizeLifecycleModelPayload(input.payload);
831
+ const jsonOrdered = normalized.jsonOrdered;
832
+ const processInstances = graphProcessInstancesFromModel(jsonOrdered);
833
+ const modelId = input.id ?? getModelUuid(jsonOrdered);
834
+ const modelVersion = input.version ?? getModelVersion(jsonOrdered);
835
+ const validator = createLifecycleModelValidator(jsonOrdered);
836
+ validateLifecycleModelStrict(validator);
837
+ const { supabase } = await createSupabaseClient(bearerKey);
838
+ const processLookups = await fetchProcessLookups(supabase, processInstances);
839
+ const generatedJsonTg = generateJsonTg(jsonOrdered, processInstances, processLookups);
840
+ const merged = mergeJsonTg(generatedJsonTg, normalized.providedJsonTg, preferProvidedJsonTg);
841
+ const validation = deriveRuleVerification(validator);
842
+ return {
843
+ sourceFormat: normalized.sourceFormat,
844
+ lifecycleModelId: modelId,
845
+ lifecycleModelVersion: modelVersion,
846
+ jsonTgSource: merged.source,
847
+ processCount: processInstances.length,
848
+ nodeCount: ensureArray(asRecord(merged.jsonTg.xflow).nodes).length,
849
+ edgeCount: ensureArray(asRecord(merged.jsonTg.xflow).edges).length,
850
+ submodelCount: ensureArray(merged.jsonTg.submodels).length,
851
+ ruleVerification: validation.ruleVerification,
852
+ validationIssueCount: validation.issueCount,
853
+ validationIssues: validation.filteredIssues,
854
+ jsonOrdered,
855
+ jsonTg: merged.jsonTg,
856
+ };
857
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tiangong-lca/mcp-server",
3
- "version": "0.0.28",
3
+ "version": "0.0.29",
4
4
  "description": "TianGong LCA MCP Server",
5
5
  "license": "MIT",
6
6
  "author": "Nan LI",
@@ -28,19 +28,20 @@
28
28
  "ncu:update": "npx npm-check-updates -u"
29
29
  },
30
30
  "dependencies": {
31
- "@modelcontextprotocol/sdk": "^1.26.0",
32
- "@supabase/supabase-js": "^2.95.3",
31
+ "@dagrejs/dagre": "^2.0.4",
32
+ "@modelcontextprotocol/sdk": "^1.27.1",
33
+ "@supabase/supabase-js": "^2.99.1",
33
34
  "@tiangong-lca/tidas-sdk": "^0.1.30",
34
35
  "@types/express": "^5.0.6",
35
- "@upstash/redis": "^1.36.2",
36
+ "@upstash/redis": "^1.37.0",
36
37
  "aws-jwt-verify": "^5.1.1",
37
38
  "olca-ipc": "^2.2.1",
38
39
  "zod": "^4.3.6"
39
40
  },
40
41
  "devDependencies": {
41
- "@modelcontextprotocol/inspector": "^0.19.0",
42
+ "@modelcontextprotocol/inspector": "^0.21.1",
42
43
  "dotenv-cli": "^11.0.0",
43
- "npm-check-updates": "^19.3.2",
44
+ "npm-check-updates": "^19.6.3",
44
45
  "prettier": "^3.8.1",
45
46
  "prettier-plugin-organize-imports": "^4.3.0",
46
47
  "shx": "^0.4.0",