primitive-admin 1.0.39 → 1.0.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/assets/skill/skills/primitive-platform/SKILL.md +1 -9
- package/dist/bin/primitive.js +132 -16
- package/dist/bin/primitive.js.map +1 -1
- package/dist/src/commands/admins.js +107 -0
- package/dist/src/commands/admins.js.map +1 -1
- package/dist/src/commands/blob-buckets.js +354 -0
- package/dist/src/commands/blob-buckets.js.map +1 -0
- package/dist/src/commands/collections.js +18 -4
- package/dist/src/commands/collections.js.map +1 -1
- package/dist/src/commands/cron-triggers.js +364 -0
- package/dist/src/commands/cron-triggers.js.map +1 -0
- package/dist/src/commands/email-templates.js +19 -5
- package/dist/src/commands/email-templates.js.map +1 -1
- package/dist/src/commands/env.js +260 -0
- package/dist/src/commands/env.js.map +1 -0
- package/dist/src/commands/init.js +90 -2
- package/dist/src/commands/init.js.map +1 -1
- package/dist/src/commands/sync.js +330 -7
- package/dist/src/commands/sync.js.map +1 -1
- package/dist/src/lib/api-client.js +134 -1
- package/dist/src/lib/api-client.js.map +1 -1
- package/dist/src/lib/config.js +51 -53
- package/dist/src/lib/config.js.map +1 -1
- package/dist/src/lib/credentials-store.js +307 -0
- package/dist/src/lib/credentials-store.js.map +1 -0
- package/dist/src/lib/env-resolver.js +121 -0
- package/dist/src/lib/env-resolver.js.map +1 -0
- package/dist/src/lib/paginate.js +42 -0
- package/dist/src/lib/paginate.js.map +1 -0
- package/dist/src/lib/project-config.js +209 -0
- package/dist/src/lib/project-config.js.map +1 -0
- package/dist/src/lib/sync-paths.js +102 -0
- package/dist/src/lib/sync-paths.js.map +1 -0
- package/dist/src/lib/version-check.js +5 -2
- package/dist/src/lib/version-check.js.map +1 -1
- package/package.json +1 -1
|
@@ -124,6 +124,62 @@ function serializeWebhook(webhook) {
|
|
|
124
124
|
});
|
|
125
125
|
return TOML.stringify(data);
|
|
126
126
|
}
|
|
127
|
+
function serializeCronTrigger(trigger) {
|
|
128
|
+
const data = {
|
|
129
|
+
cronTrigger: {
|
|
130
|
+
key: trigger.triggerKey,
|
|
131
|
+
displayName: trigger.displayName,
|
|
132
|
+
description: trigger.description || undefined,
|
|
133
|
+
cron: trigger.cron,
|
|
134
|
+
timezone: trigger.timezone || "UTC",
|
|
135
|
+
workflowKey: trigger.workflowKey,
|
|
136
|
+
overlapPolicy: trigger.overlapPolicy || "skip",
|
|
137
|
+
state: trigger.state,
|
|
138
|
+
},
|
|
139
|
+
};
|
|
140
|
+
if (trigger.rootInput) {
|
|
141
|
+
try {
|
|
142
|
+
data.rootInput = JSON.parse(trigger.rootInput);
|
|
143
|
+
}
|
|
144
|
+
catch {
|
|
145
|
+
data.rootInput = { raw: trigger.rootInput };
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
if (trigger.inputMapping) {
|
|
149
|
+
try {
|
|
150
|
+
data.inputMapping = JSON.parse(trigger.inputMapping);
|
|
151
|
+
}
|
|
152
|
+
catch {
|
|
153
|
+
data.inputMapping = { raw: trigger.inputMapping };
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
// Remove undefined values
|
|
157
|
+
Object.keys(data.cronTrigger).forEach(k => {
|
|
158
|
+
if (data.cronTrigger[k] === undefined)
|
|
159
|
+
delete data.cronTrigger[k];
|
|
160
|
+
});
|
|
161
|
+
return TOML.stringify(data);
|
|
162
|
+
}
|
|
163
|
+
function serializeBlobBucket(bucket) {
|
|
164
|
+
const data = {
|
|
165
|
+
bucket: {
|
|
166
|
+
key: bucket.bucketKey,
|
|
167
|
+
name: bucket.name,
|
|
168
|
+
description: bucket.description || undefined,
|
|
169
|
+
ttlTier: bucket.ttlTier,
|
|
170
|
+
accessPolicy: bucket.accessPolicy,
|
|
171
|
+
},
|
|
172
|
+
};
|
|
173
|
+
if (bucket.ruleSetId) {
|
|
174
|
+
data.bucket.ruleSetId = bucket.ruleSetId;
|
|
175
|
+
}
|
|
176
|
+
// Remove undefined values
|
|
177
|
+
Object.keys(data.bucket).forEach(k => {
|
|
178
|
+
if (data.bucket[k] === undefined)
|
|
179
|
+
delete data.bucket[k];
|
|
180
|
+
});
|
|
181
|
+
return TOML.stringify(data);
|
|
182
|
+
}
|
|
127
183
|
function serializePrompt(prompt) {
|
|
128
184
|
const data = {
|
|
129
185
|
prompt: {
|
|
@@ -770,6 +826,8 @@ Directory Structure:
|
|
|
770
826
|
app.toml # App settings
|
|
771
827
|
integrations/*.toml # Integration configs
|
|
772
828
|
webhooks/*.toml # Webhook configs
|
|
829
|
+
cron-triggers/*.toml # Cron trigger configs
|
|
830
|
+
blob-buckets/*.toml # Blob bucket configs
|
|
773
831
|
prompts/*.toml # Prompt configs
|
|
774
832
|
prompts/{key}.tests/*.toml # Prompt test cases
|
|
775
833
|
workflows/*.toml # Workflow definitions
|
|
@@ -792,6 +850,8 @@ Directory Structure:
|
|
|
792
850
|
ensureDir(configDir);
|
|
793
851
|
ensureDir(join(configDir, "integrations"));
|
|
794
852
|
ensureDir(join(configDir, "webhooks"));
|
|
853
|
+
ensureDir(join(configDir, "cron-triggers"));
|
|
854
|
+
ensureDir(join(configDir, "blob-buckets"));
|
|
795
855
|
ensureDir(join(configDir, "prompts"));
|
|
796
856
|
ensureDir(join(configDir, "workflows"));
|
|
797
857
|
ensureDir(join(configDir, "database-types"));
|
|
@@ -876,6 +936,8 @@ Directory Structure:
|
|
|
876
936
|
ensureDir(configDir);
|
|
877
937
|
ensureDir(join(configDir, "integrations"));
|
|
878
938
|
ensureDir(join(configDir, "webhooks"));
|
|
939
|
+
ensureDir(join(configDir, "cron-triggers"));
|
|
940
|
+
ensureDir(join(configDir, "blob-buckets"));
|
|
879
941
|
ensureDir(join(configDir, "prompts"));
|
|
880
942
|
ensureDir(join(configDir, "workflows"));
|
|
881
943
|
ensureDir(join(configDir, "database-types"));
|
|
@@ -918,6 +980,46 @@ Directory Structure:
|
|
|
918
980
|
};
|
|
919
981
|
}
|
|
920
982
|
info(` Pulled ${webhooks.length} webhook(s)`);
|
|
983
|
+
// Pull cron triggers
|
|
984
|
+
let cronTriggerItems = [];
|
|
985
|
+
try {
|
|
986
|
+
const cronResult = await client.listCronTriggers(resolvedAppId);
|
|
987
|
+
cronTriggerItems = cronResult.items || [];
|
|
988
|
+
}
|
|
989
|
+
catch {
|
|
990
|
+
// Cron triggers may not be available on older servers
|
|
991
|
+
}
|
|
992
|
+
const cronTriggersDir = join(configDir, "cron-triggers");
|
|
993
|
+
mkdirSync(cronTriggersDir, { recursive: true });
|
|
994
|
+
const cronTriggerEntities = {};
|
|
995
|
+
for (const trigger of cronTriggerItems) {
|
|
996
|
+
const filename = `${trigger.triggerKey}.toml`;
|
|
997
|
+
const filePath = join(cronTriggersDir, filename);
|
|
998
|
+
writeFileSync(filePath, serializeCronTrigger(trigger));
|
|
999
|
+
cronTriggerEntities[trigger.triggerKey] = {
|
|
1000
|
+
id: trigger.triggerId,
|
|
1001
|
+
modifiedAt: trigger.modifiedAt || new Date().toISOString(),
|
|
1002
|
+
contentHash: computeFileHash(filePath),
|
|
1003
|
+
};
|
|
1004
|
+
}
|
|
1005
|
+
info(` Pulled ${cronTriggerItems.length} cron trigger(s)`);
|
|
1006
|
+
// Pull blob buckets
|
|
1007
|
+
const blobBucketsResult = await client.listBlobBuckets(resolvedAppId).catch(() => ({ items: [] }));
|
|
1008
|
+
const blobBucketItems = blobBucketsResult.items || [];
|
|
1009
|
+
const blobBucketsDir = join(configDir, "blob-buckets");
|
|
1010
|
+
mkdirSync(blobBucketsDir, { recursive: true });
|
|
1011
|
+
const blobBucketEntities = {};
|
|
1012
|
+
for (const bucket of blobBucketItems) {
|
|
1013
|
+
const filename = `${bucket.bucketKey}.toml`;
|
|
1014
|
+
const filePath = join(blobBucketsDir, filename);
|
|
1015
|
+
writeFileSync(filePath, serializeBlobBucket(bucket));
|
|
1016
|
+
blobBucketEntities[bucket.bucketKey] = {
|
|
1017
|
+
id: bucket.bucketId,
|
|
1018
|
+
modifiedAt: bucket.modifiedAt || new Date().toISOString(),
|
|
1019
|
+
contentHash: computeFileHash(filePath),
|
|
1020
|
+
};
|
|
1021
|
+
}
|
|
1022
|
+
info(` Pulled ${blobBucketItems.length} blob bucket(s)`);
|
|
921
1023
|
// Write prompts
|
|
922
1024
|
const promptEntities = {};
|
|
923
1025
|
for (const prompt of prompts) {
|
|
@@ -1068,6 +1170,8 @@ Directory Structure:
|
|
|
1068
1170
|
app: settings ? { modifiedAt: new Date().toISOString(), contentHash: computeFileHash(join(configDir, "app.toml")) } : undefined,
|
|
1069
1171
|
integrations: integrationEntities,
|
|
1070
1172
|
webhooks: webhookEntities,
|
|
1173
|
+
cronTriggers: Object.keys(cronTriggerEntities).length > 0 ? cronTriggerEntities : undefined,
|
|
1174
|
+
blobBuckets: Object.keys(blobBucketEntities).length > 0 ? blobBucketEntities : undefined,
|
|
1071
1175
|
prompts: promptEntities,
|
|
1072
1176
|
workflows: workflowEntities,
|
|
1073
1177
|
emailTemplates: Object.keys(emailTemplateEntities).length > 0 ? emailTemplateEntities : undefined,
|
|
@@ -1082,6 +1186,8 @@ Directory Structure:
|
|
|
1082
1186
|
success(`Pulled configuration to ${configDir}`);
|
|
1083
1187
|
keyValue("Integrations", integrations.length);
|
|
1084
1188
|
keyValue("Webhooks", webhooks.length);
|
|
1189
|
+
keyValue("Cron Triggers", cronTriggerItems.length);
|
|
1190
|
+
keyValue("Blob Buckets", blobBucketItems.length);
|
|
1085
1191
|
keyValue("Prompts", prompts.length);
|
|
1086
1192
|
keyValue("Workflows", workflows.length);
|
|
1087
1193
|
keyValue("Email Templates", emailTemplates.length);
|
|
@@ -1425,6 +1531,152 @@ Directory Structure:
|
|
|
1425
1531
|
}
|
|
1426
1532
|
}
|
|
1427
1533
|
}
|
|
1534
|
+
// Process cron triggers
|
|
1535
|
+
const cronTriggersDir = join(configDir, "cron-triggers");
|
|
1536
|
+
if (existsSync(cronTriggersDir)) {
|
|
1537
|
+
const files = readdirSync(cronTriggersDir).filter((f) => f.endsWith(".toml"));
|
|
1538
|
+
for (const file of files) {
|
|
1539
|
+
const filePath = join(cronTriggersDir, file);
|
|
1540
|
+
const tomlData = parseTomlFile(filePath);
|
|
1541
|
+
const cronTrigger = tomlData.cronTrigger || {};
|
|
1542
|
+
const key = cronTrigger.key || basename(file, ".toml");
|
|
1543
|
+
const existingId = syncState?.entities?.cronTriggers?.[key]?.id;
|
|
1544
|
+
if (!options.force && existingId &&
|
|
1545
|
+
!shouldPushFile(filePath, syncState?.entities?.cronTriggers?.[key]?.contentHash)) {
|
|
1546
|
+
skippedCount++;
|
|
1547
|
+
continue;
|
|
1548
|
+
}
|
|
1549
|
+
const payload = {
|
|
1550
|
+
triggerKey: key,
|
|
1551
|
+
displayName: cronTrigger.displayName || key,
|
|
1552
|
+
description: cronTrigger.description,
|
|
1553
|
+
cron: cronTrigger.cron,
|
|
1554
|
+
timezone: cronTrigger.timezone,
|
|
1555
|
+
workflowKey: cronTrigger.workflowKey,
|
|
1556
|
+
overlapPolicy: cronTrigger.overlapPolicy,
|
|
1557
|
+
state: cronTrigger.state,
|
|
1558
|
+
};
|
|
1559
|
+
// Handle JSON fields
|
|
1560
|
+
if (tomlData.rootInput) {
|
|
1561
|
+
payload.rootInput = JSON.stringify(tomlData.rootInput);
|
|
1562
|
+
}
|
|
1563
|
+
if (tomlData.inputMapping) {
|
|
1564
|
+
payload.inputMapping = JSON.stringify(tomlData.inputMapping);
|
|
1565
|
+
}
|
|
1566
|
+
if (existingId) {
|
|
1567
|
+
changes.push({ type: "cron-trigger", action: "update", key });
|
|
1568
|
+
if (!options.dryRun) {
|
|
1569
|
+
try {
|
|
1570
|
+
const updated = await client.updateCronTrigger(resolvedAppId, existingId, payload);
|
|
1571
|
+
info(` Updated cron trigger: ${key}`);
|
|
1572
|
+
if (syncState?.entities?.cronTriggers?.[key] && updated?.modifiedAt) {
|
|
1573
|
+
syncState.entities.cronTriggers[key].modifiedAt = updated.modifiedAt;
|
|
1574
|
+
syncState.entities.cronTriggers[key].contentHash = computeFileHash(filePath);
|
|
1575
|
+
}
|
|
1576
|
+
}
|
|
1577
|
+
catch (err) {
|
|
1578
|
+
throw err;
|
|
1579
|
+
}
|
|
1580
|
+
}
|
|
1581
|
+
}
|
|
1582
|
+
else {
|
|
1583
|
+
changes.push({ type: "cron-trigger", action: "create", key });
|
|
1584
|
+
if (!options.dryRun) {
|
|
1585
|
+
const created = await client.createCronTrigger(resolvedAppId, payload);
|
|
1586
|
+
info(` Created cron trigger: ${key}`);
|
|
1587
|
+
if (syncState && created?.triggerId && created?.modifiedAt) {
|
|
1588
|
+
if (!syncState.entities.cronTriggers) {
|
|
1589
|
+
syncState.entities.cronTriggers = {};
|
|
1590
|
+
}
|
|
1591
|
+
syncState.entities.cronTriggers[key] = {
|
|
1592
|
+
id: created.triggerId,
|
|
1593
|
+
modifiedAt: created.modifiedAt,
|
|
1594
|
+
contentHash: computeFileHash(filePath),
|
|
1595
|
+
};
|
|
1596
|
+
}
|
|
1597
|
+
}
|
|
1598
|
+
}
|
|
1599
|
+
}
|
|
1600
|
+
}
|
|
1601
|
+
// Process blob buckets
|
|
1602
|
+
const blobBucketsPushDir = join(configDir, "blob-buckets");
|
|
1603
|
+
if (existsSync(blobBucketsPushDir)) {
|
|
1604
|
+
const files = readdirSync(blobBucketsPushDir).filter((f) => f.endsWith(".toml"));
|
|
1605
|
+
for (const file of files) {
|
|
1606
|
+
const filePath = join(blobBucketsPushDir, file);
|
|
1607
|
+
const tomlData = parseTomlFile(filePath);
|
|
1608
|
+
const bucket = tomlData.bucket || {};
|
|
1609
|
+
const key = bucket.key || basename(file, ".toml");
|
|
1610
|
+
const existingId = syncState?.entities?.blobBuckets?.[key]?.id;
|
|
1611
|
+
if (!options.force && existingId &&
|
|
1612
|
+
!shouldPushFile(filePath, syncState?.entities?.blobBuckets?.[key]?.contentHash)) {
|
|
1613
|
+
skippedCount++;
|
|
1614
|
+
continue;
|
|
1615
|
+
}
|
|
1616
|
+
if (existingId) {
|
|
1617
|
+
// Blob buckets don't have an update API - skip if already exists
|
|
1618
|
+
info(` Blob bucket already exists, skipping: ${key}`);
|
|
1619
|
+
if (syncState?.entities?.blobBuckets?.[key]) {
|
|
1620
|
+
syncState.entities.blobBuckets[key].contentHash = computeFileHash(filePath);
|
|
1621
|
+
}
|
|
1622
|
+
}
|
|
1623
|
+
else {
|
|
1624
|
+
const payload = {
|
|
1625
|
+
bucketKey: key,
|
|
1626
|
+
name: bucket.name || key,
|
|
1627
|
+
ttlTier: bucket.ttlTier,
|
|
1628
|
+
accessPolicy: bucket.accessPolicy,
|
|
1629
|
+
};
|
|
1630
|
+
if (bucket.description)
|
|
1631
|
+
payload.description = bucket.description;
|
|
1632
|
+
if (bucket.ruleSetId)
|
|
1633
|
+
payload.ruleSetId = bucket.ruleSetId;
|
|
1634
|
+
changes.push({ type: "blob-bucket", action: "create", key });
|
|
1635
|
+
if (!options.dryRun) {
|
|
1636
|
+
try {
|
|
1637
|
+
const created = await client.createBlobBucket(resolvedAppId, payload);
|
|
1638
|
+
info(` Created blob bucket: ${key}`);
|
|
1639
|
+
if (syncState) {
|
|
1640
|
+
if (!syncState.entities.blobBuckets) {
|
|
1641
|
+
syncState.entities.blobBuckets = {};
|
|
1642
|
+
}
|
|
1643
|
+
syncState.entities.blobBuckets[key] = {
|
|
1644
|
+
id: created.bucketId,
|
|
1645
|
+
modifiedAt: created.modifiedAt || new Date().toISOString(),
|
|
1646
|
+
contentHash: computeFileHash(filePath),
|
|
1647
|
+
};
|
|
1648
|
+
}
|
|
1649
|
+
}
|
|
1650
|
+
catch (err) {
|
|
1651
|
+
const msg = String(err?.message || err);
|
|
1652
|
+
if (msg.includes("already exists") || err.statusCode === 409) {
|
|
1653
|
+
info(` Blob bucket already exists on server: ${key}`);
|
|
1654
|
+
// Fetch the existing bucket to get its ID
|
|
1655
|
+
try {
|
|
1656
|
+
const existing = await client.getBlobBucket(resolvedAppId, key);
|
|
1657
|
+
if (syncState && existing?.bucketId) {
|
|
1658
|
+
if (!syncState.entities.blobBuckets) {
|
|
1659
|
+
syncState.entities.blobBuckets = {};
|
|
1660
|
+
}
|
|
1661
|
+
syncState.entities.blobBuckets[key] = {
|
|
1662
|
+
id: existing.bucketId,
|
|
1663
|
+
modifiedAt: existing.modifiedAt || new Date().toISOString(),
|
|
1664
|
+
contentHash: computeFileHash(filePath),
|
|
1665
|
+
};
|
|
1666
|
+
}
|
|
1667
|
+
}
|
|
1668
|
+
catch {
|
|
1669
|
+
// Ignore fetch errors
|
|
1670
|
+
}
|
|
1671
|
+
}
|
|
1672
|
+
else {
|
|
1673
|
+
throw err;
|
|
1674
|
+
}
|
|
1675
|
+
}
|
|
1676
|
+
}
|
|
1677
|
+
}
|
|
1678
|
+
}
|
|
1679
|
+
}
|
|
1428
1680
|
// Process prompts
|
|
1429
1681
|
const promptsDir = join(configDir, "prompts");
|
|
1430
1682
|
if (existsSync(promptsDir)) {
|
|
@@ -1779,8 +2031,10 @@ Directory Structure:
|
|
|
1779
2031
|
continue;
|
|
1780
2032
|
}
|
|
1781
2033
|
if (existingEntry) {
|
|
1782
|
-
// Update existing type config
|
|
1783
|
-
|
|
2034
|
+
// Update existing type config — only if there are type-level fields to update.
|
|
2035
|
+
// Operations are handled separately below, so skipping the PATCH here when
|
|
2036
|
+
// updateData is empty avoids sending an empty body (which the server rejects
|
|
2037
|
+
// with HTTP 400).
|
|
1784
2038
|
if (!options.dryRun) {
|
|
1785
2039
|
const expectedModifiedAt = options.force
|
|
1786
2040
|
? undefined
|
|
@@ -1793,11 +2047,14 @@ Directory Structure:
|
|
|
1793
2047
|
updateData.triggers = typeConfig.triggers || null;
|
|
1794
2048
|
if ("metadataAccess" in typeConfig)
|
|
1795
2049
|
updateData.metadataAccess = typeConfig.metadataAccess || null;
|
|
1796
|
-
|
|
1797
|
-
|
|
1798
|
-
|
|
1799
|
-
|
|
1800
|
-
syncState
|
|
2050
|
+
if (Object.keys(updateData).length > 0) {
|
|
2051
|
+
changes.push({ type: "database-type", action: "update", key: dbType });
|
|
2052
|
+
const updated = await client.updateDatabaseTypeConfig(resolvedAppId, dbType, updateData, expectedModifiedAt);
|
|
2053
|
+
info(` Updated database type: ${dbType}`);
|
|
2054
|
+
if (syncState?.entities?.databaseTypes?.[dbType] && updated?.modifiedAt) {
|
|
2055
|
+
syncState.entities.databaseTypes[dbType].modifiedAt = updated.modifiedAt;
|
|
2056
|
+
syncState.entities.databaseTypes[dbType].contentHash = computeFileHash(filePath);
|
|
2057
|
+
}
|
|
1801
2058
|
}
|
|
1802
2059
|
}
|
|
1803
2060
|
catch (err) {
|
|
@@ -1814,6 +2071,15 @@ Directory Structure:
|
|
|
1814
2071
|
}
|
|
1815
2072
|
}
|
|
1816
2073
|
}
|
|
2074
|
+
else {
|
|
2075
|
+
// In dry-run mode, still report the change iff we would actually PATCH.
|
|
2076
|
+
const wouldUpdate = "ruleSetId" in typeConfig ||
|
|
2077
|
+
"triggers" in typeConfig ||
|
|
2078
|
+
"metadataAccess" in typeConfig;
|
|
2079
|
+
if (wouldUpdate) {
|
|
2080
|
+
changes.push({ type: "database-type", action: "update", key: dbType });
|
|
2081
|
+
}
|
|
2082
|
+
}
|
|
1817
2083
|
}
|
|
1818
2084
|
else {
|
|
1819
2085
|
// Create new type config
|
|
@@ -2151,8 +2417,19 @@ Directory Structure:
|
|
|
2151
2417
|
client.listEmailTemplates(resolvedAppId).catch(() => ({ templates: [] })),
|
|
2152
2418
|
]);
|
|
2153
2419
|
const webhookItems = await fetchAll((p) => client.listWebhooks(resolvedAppId, p));
|
|
2420
|
+
let cronTriggerItemsDiff = [];
|
|
2421
|
+
try {
|
|
2422
|
+
const cronResult = await client.listCronTriggers(resolvedAppId);
|
|
2423
|
+
cronTriggerItemsDiff = cronResult.items || [];
|
|
2424
|
+
}
|
|
2425
|
+
catch {
|
|
2426
|
+
// Cron triggers may not be available on older servers
|
|
2427
|
+
}
|
|
2428
|
+
const blobBucketsDiffResult = await client.listBlobBuckets(resolvedAppId).catch(() => ({ items: [] }));
|
|
2154
2429
|
const remoteIntegrations = new Set(integrationItems.map((i) => i.integrationKey));
|
|
2155
2430
|
const remoteWebhooks = new Set(webhookItems.map((w) => w.webhookKey));
|
|
2431
|
+
const remoteCronTriggers = new Set(cronTriggerItemsDiff.map((t) => t.triggerKey));
|
|
2432
|
+
const remoteBlobBuckets = new Set((blobBucketsDiffResult.items || []).map((b) => b.bucketKey));
|
|
2156
2433
|
const remotePrompts = new Set(promptItems.map((p) => p.promptKey));
|
|
2157
2434
|
const remoteWorkflows = new Set(workflowItems.map((w) => w.workflowKey));
|
|
2158
2435
|
const remoteEmailTemplates = new Set((emailTemplatesResult.templates || [])
|
|
@@ -2161,6 +2438,8 @@ Directory Structure:
|
|
|
2161
2438
|
// Get local files
|
|
2162
2439
|
const localIntegrations = new Set();
|
|
2163
2440
|
const localWebhooks = new Set();
|
|
2441
|
+
const localCronTriggers = new Set();
|
|
2442
|
+
const localBlobBuckets = new Set();
|
|
2164
2443
|
const localPrompts = new Set();
|
|
2165
2444
|
const localWorkflows = new Set();
|
|
2166
2445
|
const localEmailTemplates = new Set();
|
|
@@ -2180,6 +2459,22 @@ Directory Structure:
|
|
|
2180
2459
|
localWebhooks.add(key);
|
|
2181
2460
|
}
|
|
2182
2461
|
}
|
|
2462
|
+
const cronTriggersDirPath = join(configDir, "cron-triggers");
|
|
2463
|
+
if (existsSync(cronTriggersDirPath)) {
|
|
2464
|
+
for (const file of readdirSync(cronTriggersDirPath).filter((f) => f.endsWith(".toml"))) {
|
|
2465
|
+
const tomlData = parseTomlFile(join(cronTriggersDirPath, file));
|
|
2466
|
+
const key = tomlData.cronTrigger?.key || basename(file, ".toml");
|
|
2467
|
+
localCronTriggers.add(key);
|
|
2468
|
+
}
|
|
2469
|
+
}
|
|
2470
|
+
const blobBucketsDiffDir = join(configDir, "blob-buckets");
|
|
2471
|
+
if (existsSync(blobBucketsDiffDir)) {
|
|
2472
|
+
for (const file of readdirSync(blobBucketsDiffDir).filter((f) => f.endsWith(".toml"))) {
|
|
2473
|
+
const tomlData = parseTomlFile(join(blobBucketsDiffDir, file));
|
|
2474
|
+
const key = tomlData.bucket?.key || basename(file, ".toml");
|
|
2475
|
+
localBlobBuckets.add(key);
|
|
2476
|
+
}
|
|
2477
|
+
}
|
|
2183
2478
|
const promptsDir = join(configDir, "prompts");
|
|
2184
2479
|
if (existsSync(promptsDir)) {
|
|
2185
2480
|
for (const file of readdirSync(promptsDir).filter((f) => f.endsWith(".toml"))) {
|
|
@@ -2234,6 +2529,34 @@ Directory Structure:
|
|
|
2234
2529
|
differences.push({ type: "webhook", key, status: "remote only" });
|
|
2235
2530
|
}
|
|
2236
2531
|
}
|
|
2532
|
+
// Cron Triggers
|
|
2533
|
+
for (const key of localCronTriggers) {
|
|
2534
|
+
if (!remoteCronTriggers.has(key)) {
|
|
2535
|
+
differences.push({ type: "cron-trigger", key, status: "local only" });
|
|
2536
|
+
}
|
|
2537
|
+
else {
|
|
2538
|
+
differences.push({ type: "cron-trigger", key, status: "exists" });
|
|
2539
|
+
}
|
|
2540
|
+
}
|
|
2541
|
+
for (const key of remoteCronTriggers) {
|
|
2542
|
+
if (!localCronTriggers.has(key)) {
|
|
2543
|
+
differences.push({ type: "cron-trigger", key, status: "remote only" });
|
|
2544
|
+
}
|
|
2545
|
+
}
|
|
2546
|
+
// Blob Buckets
|
|
2547
|
+
for (const key of localBlobBuckets) {
|
|
2548
|
+
if (!remoteBlobBuckets.has(key)) {
|
|
2549
|
+
differences.push({ type: "blob-bucket", key, status: "local only" });
|
|
2550
|
+
}
|
|
2551
|
+
else {
|
|
2552
|
+
differences.push({ type: "blob-bucket", key, status: "exists" });
|
|
2553
|
+
}
|
|
2554
|
+
}
|
|
2555
|
+
for (const key of remoteBlobBuckets) {
|
|
2556
|
+
if (!localBlobBuckets.has(key)) {
|
|
2557
|
+
differences.push({ type: "blob-bucket", key, status: "remote only" });
|
|
2558
|
+
}
|
|
2559
|
+
}
|
|
2237
2560
|
// Prompts
|
|
2238
2561
|
for (const key of localPrompts) {
|
|
2239
2562
|
if (!remotePrompts.has(key)) {
|