windmill-cli 1.694.0 → 1.696.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/main.js +597 -325
- package/package.json +2 -2
package/esm/main.js
CHANGED
|
@@ -16710,7 +16710,7 @@ var init_OpenAPI = __esm(() => {
|
|
|
16710
16710
|
PASSWORD: undefined,
|
|
16711
16711
|
TOKEN: getEnv3("WM_TOKEN"),
|
|
16712
16712
|
USERNAME: undefined,
|
|
16713
|
-
VERSION: "1.
|
|
16713
|
+
VERSION: "1.696.0",
|
|
16714
16714
|
WITH_CREDENTIALS: true,
|
|
16715
16715
|
interceptors: {
|
|
16716
16716
|
request: new Interceptors,
|
|
@@ -27428,6 +27428,29 @@ async function listRemote(_opts) {
|
|
|
27428
27428
|
await requireLogin(_opts);
|
|
27429
27429
|
remote = workspace.remote;
|
|
27430
27430
|
}
|
|
27431
|
+
if (_opts.asSuperadmin) {
|
|
27432
|
+
const perPage = 100;
|
|
27433
|
+
let page = 1;
|
|
27434
|
+
const all = [];
|
|
27435
|
+
while (true) {
|
|
27436
|
+
const batch = await listWorkspacesAsSuperAdmin({ page, perPage });
|
|
27437
|
+
all.push(...batch);
|
|
27438
|
+
if (batch.length < perPage)
|
|
27439
|
+
break;
|
|
27440
|
+
page++;
|
|
27441
|
+
}
|
|
27442
|
+
const hasForks2 = all.some((x) => x.parent_workspace_id);
|
|
27443
|
+
const headers2 = hasForks2 ? ["id", "name", "owner", "fork of"] : ["id", "name", "owner"];
|
|
27444
|
+
new Table2().header(headers2).padding(2).border(true).body(all.map((x) => {
|
|
27445
|
+
const row = [x.id, x.name, x.owner];
|
|
27446
|
+
if (hasForks2)
|
|
27447
|
+
row.push(x.parent_workspace_id ?? "-");
|
|
27448
|
+
return row;
|
|
27449
|
+
})).render();
|
|
27450
|
+
info(`Remote: ${colors.bold(remote)}`);
|
|
27451
|
+
info(`Total workspaces: ${colors.green.bold(all.length.toString())} (superadmin)`);
|
|
27452
|
+
return;
|
|
27453
|
+
}
|
|
27431
27454
|
const userWorkspaces = await listUserWorkspaces();
|
|
27432
27455
|
const hasForks = userWorkspaces.workspaces.some((x) => x.parent_workspace_id);
|
|
27433
27456
|
const headers = hasForks ? ["id", "name", "username", "fork of", "disabled"] : ["id", "name", "username", "disabled"];
|
|
@@ -27638,7 +27661,7 @@ var init_workspace = __esm(async () => {
|
|
|
27638
27661
|
]);
|
|
27639
27662
|
command2 = new Command().alias("profile").description("workspace related commands").action(list3).command("switch").complete("workspace", async () => (await allWorkspaces()).map((x) => x.name)).description("Switch to another workspace").arguments("<workspace_name:string:workspace>").action(switchC).command("add").description("Add a workspace").arguments("[workspace_name:string] [workspace_id:string] [remote:string]").option("-c --create", "Create the workspace if it does not exist").option("--create-workspace-name <workspace_name:string>", "Specify the workspace name. Ignored if --create is not specified or the workspace already exists. Will default to the workspace id.").option("--create-username <username:string>", "Specify your own username in the newly created workspace. Ignored if --create is not specified, the workspace already exists or automatic username creation is enabled on the instance.", {
|
|
27640
27663
|
default: "admin"
|
|
27641
|
-
}).action(add).command("remove").description("Remove a workspace").arguments("<workspace_name:string>").action(remove).command("whoami").description("Show the currently active user").action(whoami2).command("list").description("List local workspace profiles").action(list3).command("list-remote").description("List workspaces on the remote server that you have access to").action(listRemote).command("list-forks").description("List forked workspaces on the remote server").action(listForks).command("bind").description("Create or update a workspace entry in wmill.yaml from the active profile").option("--workspace <name:string>", "Workspace name (default: current branch or workspaceId)").option("--branch <branch:string>", "Git branch to associate (default: workspace name)").action((opts) => bind(opts, true)).command("unbind").description("Remove baseUrl and workspaceId from a workspace entry").option("--workspace <name:string>", "Workspace to unbind").action((opts) => bind(opts, false)).command("fork").description("Create a forked workspace").arguments("[workspace_name:string] [workspace_id:string]").option("--create-workspace-name <workspace_name:string>", "Specify the workspace name. Ignored if --create is not specified or the workspace already exists. Will default to the workspace id.").option("--color <color:string>", "Workspace color (hex code, e.g. #ff0000)").option("--datatable-behavior <behavior:string>", "How to handle datatables: skip, schema_only, or schema_and_data (default: interactive prompt)").option("-y --yes", "Skip interactive prompts (defaults datatable behavior to 'skip')").action(createWorkspaceFork2).command("delete-fork").description("Delete a forked workspace and git branch").arguments("<fork_name:string>").option("-y --yes", "Skip confirmation prompt").action(deleteWorkspaceFork).command("merge").description("Compare and deploy changes between a fork and its parent workspace").option("--direction <direction:string>", "Deploy direction: to-parent or to-fork").option("--all", "Deploy all changed items including conflicts").option("--skip-conflicts", "Skip items modified in both workspaces").option("--include <items:string>", "Comma-separated kind:path items to include (e.g. script:f/test/main,flow:f/my/flow)").option("--exclude <items:string>", "Comma-separated kind:path items to exclude").option("--preserve-on-behalf-of", "Preserve original on_behalf_of/permissioned_as values").option("-y --yes", "Non-interactive mode (deploy without prompts)").action(mergeWorkspaces).command("connect-slack").description("Non-interactively connect Slack to the active workspace using a pre-minted bot token (xoxb-...). Produces the same artifacts as the UI OAuth flow: workspace_settings fields, g/slack group, f/slack_bot folder, and the encrypted bot token variable + resource at f/slack_bot/bot_token.").option("--bot-token <bot_token:string>", "Slack bot token (xoxb-...)", { required: true }).option("--team-id <team_id:string>", "Slack team id", { required: true }).option("--team-name <team_name:string>", "Slack team name", { required: true }).action(connectSlack2).command("disconnect-slack").description("Clear slack_team_id / slack_name on the active workspace (marks the workspace as disconnected). Does NOT remove the bot token variable/resource/folder/group — delete those from the local sync folder and run 'wmill sync push' to tear them down. Does NOT remove the workspace-level OAuth override — set slack_oauth_client_id/_secret to '' in settings.yaml and push.").action(disconnectSlack2);
|
|
27664
|
+
}).action(add).command("remove").description("Remove a workspace").arguments("<workspace_name:string>").action(remove).command("whoami").description("Show the currently active user").action(whoami2).command("list").description("List local workspace profiles").action(list3).command("list-remote").description("List workspaces on the remote server that you have access to").option("--as-superadmin", "List ALL workspaces on the instance (requires the token to belong to a superadmin/devops user)").action(listRemote).command("list-forks").description("List forked workspaces on the remote server").action(listForks).command("bind").description("Create or update a workspace entry in wmill.yaml from the active profile").option("--workspace <name:string>", "Workspace name (default: current branch or workspaceId)").option("--branch <branch:string>", "Git branch to associate (default: workspace name)").action((opts) => bind(opts, true)).command("unbind").description("Remove baseUrl and workspaceId from a workspace entry").option("--workspace <name:string>", "Workspace to unbind").action((opts) => bind(opts, false)).command("fork").description("Create a forked workspace").arguments("[workspace_name:string] [workspace_id:string]").option("--create-workspace-name <workspace_name:string>", "Specify the workspace name. Ignored if --create is not specified or the workspace already exists. Will default to the workspace id.").option("--color <color:string>", "Workspace color (hex code, e.g. #ff0000)").option("--datatable-behavior <behavior:string>", "How to handle datatables: skip, schema_only, or schema_and_data (default: interactive prompt)").option("-y --yes", "Skip interactive prompts (defaults datatable behavior to 'skip')").action(createWorkspaceFork2).command("delete-fork").description("Delete a forked workspace and git branch").arguments("<fork_name:string>").option("-y --yes", "Skip confirmation prompt").action(deleteWorkspaceFork).command("merge").description("Compare and deploy changes between a fork and its parent workspace").option("--direction <direction:string>", "Deploy direction: to-parent or to-fork").option("--all", "Deploy all changed items including conflicts").option("--skip-conflicts", "Skip items modified in both workspaces").option("--include <items:string>", "Comma-separated kind:path items to include (e.g. script:f/test/main,flow:f/my/flow)").option("--exclude <items:string>", "Comma-separated kind:path items to exclude").option("--preserve-on-behalf-of", "Preserve original on_behalf_of/permissioned_as values").option("-y --yes", "Non-interactive mode (deploy without prompts)").action(mergeWorkspaces).command("connect-slack").description("Non-interactively connect Slack to the active workspace using a pre-minted bot token (xoxb-...). Produces the same artifacts as the UI OAuth flow: workspace_settings fields, g/slack group, f/slack_bot folder, and the encrypted bot token variable + resource at f/slack_bot/bot_token.").option("--bot-token <bot_token:string>", "Slack bot token (xoxb-...)", { required: true }).option("--team-id <team_id:string>", "Slack team id", { required: true }).option("--team-name <team_name:string>", "Slack team name", { required: true }).action(connectSlack2).command("disconnect-slack").description("Clear slack_team_id / slack_name on the active workspace (marks the workspace as disconnected). Does NOT remove the bot token variable/resource/folder/group — delete those from the local sync folder and run 'wmill sync push' to tear them down. Does NOT remove the workspace-level OAuth override — set slack_oauth_client_id/_secret to '' in settings.yaml and push.").action(disconnectSlack2);
|
|
27642
27665
|
workspace_default = command2;
|
|
27643
27666
|
});
|
|
27644
27667
|
|
|
@@ -61583,6 +61606,282 @@ var init_tar = __esm(() => {
|
|
|
61583
61606
|
init_tar_stream();
|
|
61584
61607
|
});
|
|
61585
61608
|
|
|
61609
|
+
// src/utils/dependency_tree.ts
|
|
61610
|
+
var exports_dependency_tree = {};
|
|
61611
|
+
__export(exports_dependency_tree, {
|
|
61612
|
+
uploadScripts: () => uploadScripts,
|
|
61613
|
+
DoubleLinkedDependencyTree: () => DoubleLinkedDependencyTree
|
|
61614
|
+
});
|
|
61615
|
+
async function uploadScripts(tree, workspace) {
|
|
61616
|
+
const scriptHashes = {};
|
|
61617
|
+
const workspaceDeps = [];
|
|
61618
|
+
for (const path7 of tree.allPaths()) {
|
|
61619
|
+
const content = tree.getContent(path7);
|
|
61620
|
+
const itemType = tree.getItemType(path7);
|
|
61621
|
+
if (itemType === "dependencies") {
|
|
61622
|
+
if (content === undefined)
|
|
61623
|
+
continue;
|
|
61624
|
+
const info2 = workspaceDependenciesPathToLanguageAndFilename(path7);
|
|
61625
|
+
if (info2) {
|
|
61626
|
+
const hash2 = await generateHash(content);
|
|
61627
|
+
workspaceDeps.push({ path: path7, language: info2.language, name: info2.name, hash: hash2 });
|
|
61628
|
+
}
|
|
61629
|
+
} else if (itemType === "script") {
|
|
61630
|
+
if (!content)
|
|
61631
|
+
continue;
|
|
61632
|
+
const hash2 = await generateHash(content);
|
|
61633
|
+
scriptHashes[path7] = hash2;
|
|
61634
|
+
}
|
|
61635
|
+
}
|
|
61636
|
+
if (Object.keys(scriptHashes).length === 0 && workspaceDeps.length === 0)
|
|
61637
|
+
return;
|
|
61638
|
+
const mismatched = await diffRawScriptsWithDeployed({
|
|
61639
|
+
workspace: workspace.workspaceId,
|
|
61640
|
+
requestBody: {
|
|
61641
|
+
scripts: scriptHashes,
|
|
61642
|
+
workspace_deps: workspaceDeps
|
|
61643
|
+
}
|
|
61644
|
+
});
|
|
61645
|
+
for (const path7 of mismatched) {
|
|
61646
|
+
const content = tree.getContent(path7);
|
|
61647
|
+
const itemType = tree.getItemType(path7);
|
|
61648
|
+
if (itemType === "dependencies") {
|
|
61649
|
+
if (content !== undefined) {
|
|
61650
|
+
tree.setContentHash(path7, "mismatched");
|
|
61651
|
+
}
|
|
61652
|
+
} else if (content) {
|
|
61653
|
+
const hash2 = await storeRawScriptTemp({
|
|
61654
|
+
workspace: workspace.workspaceId,
|
|
61655
|
+
requestBody: content
|
|
61656
|
+
});
|
|
61657
|
+
tree.setContentHash(path7, hash2);
|
|
61658
|
+
}
|
|
61659
|
+
}
|
|
61660
|
+
}
|
|
61661
|
+
|
|
61662
|
+
class DoubleLinkedDependencyTree {
|
|
61663
|
+
nodes = new Map;
|
|
61664
|
+
workspaceDeps = {};
|
|
61665
|
+
setWorkspaceDeps(deps) {
|
|
61666
|
+
this.workspaceDeps = deps;
|
|
61667
|
+
}
|
|
61668
|
+
async addNode(path7, content, language, metadata, imports, itemType, folder, originalPath, isDirectlyStale, isRawApp) {
|
|
61669
|
+
const hasWorkspaceDeps = itemType === "script" || itemType === "inline_script";
|
|
61670
|
+
const filteredDeps = hasWorkspaceDeps ? filterWorkspaceDependencies(this.workspaceDeps, content, language) : {};
|
|
61671
|
+
const stalenessHash = await generateScriptHash({}, content, metadata);
|
|
61672
|
+
if (!this.nodes.has(path7)) {
|
|
61673
|
+
this.nodes.set(path7, {
|
|
61674
|
+
content: "",
|
|
61675
|
+
stalenessHash: "",
|
|
61676
|
+
language: "deno",
|
|
61677
|
+
metadata: "",
|
|
61678
|
+
imports: new Set,
|
|
61679
|
+
importedBy: new Set,
|
|
61680
|
+
itemType: "script",
|
|
61681
|
+
folder: "",
|
|
61682
|
+
originalPath: "",
|
|
61683
|
+
isDirectlyStale: false
|
|
61684
|
+
});
|
|
61685
|
+
}
|
|
61686
|
+
const node = this.nodes.get(path7);
|
|
61687
|
+
node.content = content;
|
|
61688
|
+
node.stalenessHash = stalenessHash;
|
|
61689
|
+
node.language = language;
|
|
61690
|
+
node.metadata = metadata;
|
|
61691
|
+
node.itemType = itemType;
|
|
61692
|
+
node.folder = folder;
|
|
61693
|
+
node.originalPath = originalPath;
|
|
61694
|
+
node.isDirectlyStale = isDirectlyStale;
|
|
61695
|
+
node.isRawApp = isRawApp;
|
|
61696
|
+
const filteredDepsPaths = Object.keys(filteredDeps);
|
|
61697
|
+
for (const depsPath of filteredDepsPaths) {
|
|
61698
|
+
if (!this.nodes.has(depsPath)) {
|
|
61699
|
+
const depsInfo = workspaceDependenciesPathToLanguageAndFilename(depsPath);
|
|
61700
|
+
const contentHash = await generateHash(filteredDeps[depsPath] + depsPath);
|
|
61701
|
+
const isUpToDate = await checkifMetadataUptodate(depsPath, contentHash, undefined);
|
|
61702
|
+
this.nodes.set(depsPath, {
|
|
61703
|
+
content: filteredDeps[depsPath],
|
|
61704
|
+
stalenessHash: "",
|
|
61705
|
+
language: depsInfo?.language ?? "deno",
|
|
61706
|
+
metadata: "",
|
|
61707
|
+
imports: new Set,
|
|
61708
|
+
importedBy: new Set,
|
|
61709
|
+
itemType: "dependencies",
|
|
61710
|
+
folder: "",
|
|
61711
|
+
originalPath: depsPath,
|
|
61712
|
+
isDirectlyStale: !isUpToDate
|
|
61713
|
+
});
|
|
61714
|
+
}
|
|
61715
|
+
}
|
|
61716
|
+
const allImports = [...imports, ...filteredDepsPaths];
|
|
61717
|
+
for (const importPath of allImports) {
|
|
61718
|
+
node.imports.add(importPath);
|
|
61719
|
+
if (!this.nodes.has(importPath)) {
|
|
61720
|
+
this.nodes.set(importPath, {
|
|
61721
|
+
content: "",
|
|
61722
|
+
stalenessHash: "",
|
|
61723
|
+
language: "deno",
|
|
61724
|
+
metadata: "",
|
|
61725
|
+
imports: new Set,
|
|
61726
|
+
importedBy: new Set,
|
|
61727
|
+
itemType: "script",
|
|
61728
|
+
folder: "",
|
|
61729
|
+
originalPath: "",
|
|
61730
|
+
isDirectlyStale: false
|
|
61731
|
+
});
|
|
61732
|
+
}
|
|
61733
|
+
this.nodes.get(importPath).importedBy.add(path7);
|
|
61734
|
+
}
|
|
61735
|
+
}
|
|
61736
|
+
getContent(path7) {
|
|
61737
|
+
return this.nodes.get(path7)?.content;
|
|
61738
|
+
}
|
|
61739
|
+
getStalenessHash(path7) {
|
|
61740
|
+
return this.nodes.get(path7)?.stalenessHash;
|
|
61741
|
+
}
|
|
61742
|
+
getContentHash(path7) {
|
|
61743
|
+
return this.nodes.get(path7)?.contentHash;
|
|
61744
|
+
}
|
|
61745
|
+
setContentHash(path7, hash2) {
|
|
61746
|
+
const node = this.nodes.get(path7);
|
|
61747
|
+
if (node) {
|
|
61748
|
+
node.contentHash = hash2;
|
|
61749
|
+
}
|
|
61750
|
+
}
|
|
61751
|
+
getLanguage(path7) {
|
|
61752
|
+
return this.nodes.get(path7)?.language;
|
|
61753
|
+
}
|
|
61754
|
+
getMetadata(path7) {
|
|
61755
|
+
return this.nodes.get(path7)?.metadata;
|
|
61756
|
+
}
|
|
61757
|
+
getStaleReason(path7) {
|
|
61758
|
+
return this.nodes.get(path7)?.staleReason;
|
|
61759
|
+
}
|
|
61760
|
+
getItemType(path7) {
|
|
61761
|
+
return this.nodes.get(path7)?.itemType;
|
|
61762
|
+
}
|
|
61763
|
+
getFolder(path7) {
|
|
61764
|
+
return this.nodes.get(path7)?.folder;
|
|
61765
|
+
}
|
|
61766
|
+
getIsRawApp(path7) {
|
|
61767
|
+
return this.nodes.get(path7)?.isRawApp;
|
|
61768
|
+
}
|
|
61769
|
+
getIsDirectlyStale(path7) {
|
|
61770
|
+
return this.nodes.get(path7)?.isDirectlyStale ?? false;
|
|
61771
|
+
}
|
|
61772
|
+
getOriginalPath(path7) {
|
|
61773
|
+
return this.nodes.get(path7)?.originalPath;
|
|
61774
|
+
}
|
|
61775
|
+
getImports(path7) {
|
|
61776
|
+
return this.nodes.get(path7)?.imports;
|
|
61777
|
+
}
|
|
61778
|
+
isStale(path7) {
|
|
61779
|
+
return this.nodes.get(path7)?.staleReason !== undefined;
|
|
61780
|
+
}
|
|
61781
|
+
propagateStaleness() {
|
|
61782
|
+
const directlyStale = new Set;
|
|
61783
|
+
for (const [path7, node] of this.nodes.entries()) {
|
|
61784
|
+
if (node.isDirectlyStale) {
|
|
61785
|
+
directlyStale.add(path7);
|
|
61786
|
+
node.staleReason = "content changed";
|
|
61787
|
+
}
|
|
61788
|
+
}
|
|
61789
|
+
const allStale = new Set(directlyStale);
|
|
61790
|
+
const queue = [...directlyStale];
|
|
61791
|
+
const visited = new Set;
|
|
61792
|
+
while (queue.length > 0) {
|
|
61793
|
+
const scriptPath = queue.shift();
|
|
61794
|
+
if (visited.has(scriptPath))
|
|
61795
|
+
continue;
|
|
61796
|
+
visited.add(scriptPath);
|
|
61797
|
+
const node = this.nodes.get(scriptPath);
|
|
61798
|
+
if (!node)
|
|
61799
|
+
continue;
|
|
61800
|
+
for (const importer of node.importedBy) {
|
|
61801
|
+
if (!allStale.has(importer)) {
|
|
61802
|
+
allStale.add(importer);
|
|
61803
|
+
queue.push(importer);
|
|
61804
|
+
const importerNode = this.nodes.get(importer);
|
|
61805
|
+
if (importerNode)
|
|
61806
|
+
importerNode.staleReason = `depends on ${scriptPath}`;
|
|
61807
|
+
}
|
|
61808
|
+
}
|
|
61809
|
+
}
|
|
61810
|
+
}
|
|
61811
|
+
traverseTransitive(scriptPath, callback) {
|
|
61812
|
+
const queue = [scriptPath];
|
|
61813
|
+
const visited = new Set;
|
|
61814
|
+
while (queue.length > 0) {
|
|
61815
|
+
const current = queue.shift();
|
|
61816
|
+
if (visited.has(current))
|
|
61817
|
+
continue;
|
|
61818
|
+
visited.add(current);
|
|
61819
|
+
const node = this.nodes.get(current);
|
|
61820
|
+
if (!node)
|
|
61821
|
+
continue;
|
|
61822
|
+
for (const importPath of node.imports) {
|
|
61823
|
+
const importNode = this.nodes.get(importPath);
|
|
61824
|
+
if (importNode) {
|
|
61825
|
+
const stop = callback(importPath, importNode);
|
|
61826
|
+
if (!stop) {
|
|
61827
|
+
queue.push(importPath);
|
|
61828
|
+
}
|
|
61829
|
+
}
|
|
61830
|
+
}
|
|
61831
|
+
}
|
|
61832
|
+
}
|
|
61833
|
+
allPaths() {
|
|
61834
|
+
return this.nodes.keys();
|
|
61835
|
+
}
|
|
61836
|
+
*stalePaths() {
|
|
61837
|
+
for (const [path7, node] of this.nodes.entries()) {
|
|
61838
|
+
if (node.staleReason) {
|
|
61839
|
+
yield path7;
|
|
61840
|
+
}
|
|
61841
|
+
}
|
|
61842
|
+
}
|
|
61843
|
+
has(path7) {
|
|
61844
|
+
return this.nodes.has(path7);
|
|
61845
|
+
}
|
|
61846
|
+
getMismatchedWorkspaceDeps() {
|
|
61847
|
+
const result = {};
|
|
61848
|
+
for (const [path7, node] of this.nodes.entries()) {
|
|
61849
|
+
if (node.itemType === "dependencies" && node.contentHash && node.content !== undefined) {
|
|
61850
|
+
result[path7] = node.content;
|
|
61851
|
+
}
|
|
61852
|
+
}
|
|
61853
|
+
return result;
|
|
61854
|
+
}
|
|
61855
|
+
getTempScriptRefs(scriptPath) {
|
|
61856
|
+
const result = {};
|
|
61857
|
+
this.traverseTransitive(scriptPath, (_path, node) => {
|
|
61858
|
+
if (node.contentHash) {
|
|
61859
|
+
result[_path] = node.contentHash;
|
|
61860
|
+
}
|
|
61861
|
+
});
|
|
61862
|
+
return result;
|
|
61863
|
+
}
|
|
61864
|
+
async persistDepsHashes(depsPaths) {
|
|
61865
|
+
for (const path7 of depsPaths) {
|
|
61866
|
+
const node = this.nodes.get(path7);
|
|
61867
|
+
if (node?.itemType === "dependencies" && node.content !== undefined) {
|
|
61868
|
+
const hash2 = await generateHash(node.content + path7);
|
|
61869
|
+
await updateMetadataGlobalLock(path7, hash2);
|
|
61870
|
+
}
|
|
61871
|
+
}
|
|
61872
|
+
}
|
|
61873
|
+
get size() {
|
|
61874
|
+
return this.nodes.size;
|
|
61875
|
+
}
|
|
61876
|
+
}
|
|
61877
|
+
var init_dependency_tree = __esm(async () => {
|
|
61878
|
+
init_services_gen();
|
|
61879
|
+
await __promiseAll([
|
|
61880
|
+
init_metadata(),
|
|
61881
|
+
init_utils()
|
|
61882
|
+
]);
|
|
61883
|
+
});
|
|
61884
|
+
|
|
61586
61885
|
// src/commands/script/script.ts
|
|
61587
61886
|
import { writeFile as writeFile5, stat as stat4, mkdir as mkdir3 } from "node:fs/promises";
|
|
61588
61887
|
import { Buffer as Buffer4 } from "node:buffer";
|
|
@@ -62346,8 +62645,20 @@ async function generateMetadata(opts, scriptPath) {
|
|
|
62346
62645
|
info(colors.green.bold("No metadata to update"));
|
|
62347
62646
|
return;
|
|
62348
62647
|
}
|
|
62648
|
+
const { DoubleLinkedDependencyTree: DoubleLinkedDependencyTree2, uploadScripts: uploadScripts2 } = await init_dependency_tree().then(() => exports_dependency_tree);
|
|
62649
|
+
const tree = new DoubleLinkedDependencyTree2;
|
|
62650
|
+
tree.setWorkspaceDeps(rawWorkspaceDependencies);
|
|
62651
|
+
for (const e of Object.keys(elems)) {
|
|
62652
|
+
await generateScriptMetadataInternal(e, workspace, opts, true, true, rawWorkspaceDependencies, codebases, false, tree);
|
|
62653
|
+
}
|
|
62654
|
+
tree.propagateStaleness();
|
|
62655
|
+
try {
|
|
62656
|
+
await uploadScripts2(tree, workspace);
|
|
62657
|
+
} catch (e) {
|
|
62658
|
+
warn(colors.yellow(`Failed to upload scripts to temp storage (backend may be too old): ${e}. ` + `Locks will be generated using deployed script versions only — locally modified ` + `relative imports may not be reflected.`));
|
|
62659
|
+
}
|
|
62349
62660
|
for (const e of Object.keys(elems)) {
|
|
62350
|
-
await generateScriptMetadataInternal(e, workspace, opts, false, true, rawWorkspaceDependencies, codebases, false);
|
|
62661
|
+
await generateScriptMetadataInternal(e, workspace, opts, false, true, rawWorkspaceDependencies, codebases, false, tree);
|
|
62351
62662
|
}
|
|
62352
62663
|
}
|
|
62353
62664
|
}
|
|
@@ -64272,281 +64583,11 @@ var init_flow_metadata = __esm(async () => {
|
|
|
64272
64583
|
import_yaml9 = __toESM(require_dist(), 1);
|
|
64273
64584
|
});
|
|
64274
64585
|
|
|
64275
|
-
// src/utils/dependency_tree.ts
|
|
64276
|
-
async function uploadScripts(tree, workspace) {
|
|
64277
|
-
const scriptHashes = {};
|
|
64278
|
-
const workspaceDeps = [];
|
|
64279
|
-
for (const path11 of tree.allPaths()) {
|
|
64280
|
-
const content = tree.getContent(path11);
|
|
64281
|
-
const itemType = tree.getItemType(path11);
|
|
64282
|
-
if (itemType === "dependencies") {
|
|
64283
|
-
if (content === undefined)
|
|
64284
|
-
continue;
|
|
64285
|
-
const info2 = workspaceDependenciesPathToLanguageAndFilename(path11);
|
|
64286
|
-
if (info2) {
|
|
64287
|
-
const hash2 = await generateHash(content);
|
|
64288
|
-
workspaceDeps.push({ path: path11, language: info2.language, name: info2.name, hash: hash2 });
|
|
64289
|
-
}
|
|
64290
|
-
} else if (itemType === "script") {
|
|
64291
|
-
if (!content)
|
|
64292
|
-
continue;
|
|
64293
|
-
const hash2 = await generateHash(content);
|
|
64294
|
-
scriptHashes[path11] = hash2;
|
|
64295
|
-
}
|
|
64296
|
-
}
|
|
64297
|
-
if (Object.keys(scriptHashes).length === 0 && workspaceDeps.length === 0)
|
|
64298
|
-
return;
|
|
64299
|
-
const mismatched = await diffRawScriptsWithDeployed({
|
|
64300
|
-
workspace: workspace.workspaceId,
|
|
64301
|
-
requestBody: {
|
|
64302
|
-
scripts: scriptHashes,
|
|
64303
|
-
workspace_deps: workspaceDeps
|
|
64304
|
-
}
|
|
64305
|
-
});
|
|
64306
|
-
for (const path11 of mismatched) {
|
|
64307
|
-
const content = tree.getContent(path11);
|
|
64308
|
-
const itemType = tree.getItemType(path11);
|
|
64309
|
-
if (itemType === "dependencies") {
|
|
64310
|
-
if (content !== undefined) {
|
|
64311
|
-
tree.setContentHash(path11, "mismatched");
|
|
64312
|
-
}
|
|
64313
|
-
} else if (content) {
|
|
64314
|
-
const hash2 = await storeRawScriptTemp({
|
|
64315
|
-
workspace: workspace.workspaceId,
|
|
64316
|
-
requestBody: content
|
|
64317
|
-
});
|
|
64318
|
-
tree.setContentHash(path11, hash2);
|
|
64319
|
-
}
|
|
64320
|
-
}
|
|
64321
|
-
}
|
|
64322
|
-
|
|
64323
|
-
class DoubleLinkedDependencyTree {
|
|
64324
|
-
nodes = new Map;
|
|
64325
|
-
workspaceDeps = {};
|
|
64326
|
-
setWorkspaceDeps(deps) {
|
|
64327
|
-
this.workspaceDeps = deps;
|
|
64328
|
-
}
|
|
64329
|
-
async addNode(path11, content, language, metadata, imports, itemType, folder, originalPath, isDirectlyStale, isRawApp) {
|
|
64330
|
-
const hasWorkspaceDeps = itemType === "script" || itemType === "inline_script";
|
|
64331
|
-
const filteredDeps = hasWorkspaceDeps ? filterWorkspaceDependencies(this.workspaceDeps, content, language) : {};
|
|
64332
|
-
const stalenessHash = await generateScriptHash({}, content, metadata);
|
|
64333
|
-
if (!this.nodes.has(path11)) {
|
|
64334
|
-
this.nodes.set(path11, {
|
|
64335
|
-
content: "",
|
|
64336
|
-
stalenessHash: "",
|
|
64337
|
-
language: "deno",
|
|
64338
|
-
metadata: "",
|
|
64339
|
-
imports: new Set,
|
|
64340
|
-
importedBy: new Set,
|
|
64341
|
-
itemType: "script",
|
|
64342
|
-
folder: "",
|
|
64343
|
-
originalPath: "",
|
|
64344
|
-
isDirectlyStale: false
|
|
64345
|
-
});
|
|
64346
|
-
}
|
|
64347
|
-
const node = this.nodes.get(path11);
|
|
64348
|
-
node.content = content;
|
|
64349
|
-
node.stalenessHash = stalenessHash;
|
|
64350
|
-
node.language = language;
|
|
64351
|
-
node.metadata = metadata;
|
|
64352
|
-
node.itemType = itemType;
|
|
64353
|
-
node.folder = folder;
|
|
64354
|
-
node.originalPath = originalPath;
|
|
64355
|
-
node.isDirectlyStale = isDirectlyStale;
|
|
64356
|
-
node.isRawApp = isRawApp;
|
|
64357
|
-
const filteredDepsPaths = Object.keys(filteredDeps);
|
|
64358
|
-
for (const depsPath of filteredDepsPaths) {
|
|
64359
|
-
if (!this.nodes.has(depsPath)) {
|
|
64360
|
-
const depsInfo = workspaceDependenciesPathToLanguageAndFilename(depsPath);
|
|
64361
|
-
const contentHash = await generateHash(filteredDeps[depsPath] + depsPath);
|
|
64362
|
-
const isUpToDate = await checkifMetadataUptodate(depsPath, contentHash, undefined);
|
|
64363
|
-
this.nodes.set(depsPath, {
|
|
64364
|
-
content: filteredDeps[depsPath],
|
|
64365
|
-
stalenessHash: "",
|
|
64366
|
-
language: depsInfo?.language ?? "deno",
|
|
64367
|
-
metadata: "",
|
|
64368
|
-
imports: new Set,
|
|
64369
|
-
importedBy: new Set,
|
|
64370
|
-
itemType: "dependencies",
|
|
64371
|
-
folder: "",
|
|
64372
|
-
originalPath: depsPath,
|
|
64373
|
-
isDirectlyStale: !isUpToDate
|
|
64374
|
-
});
|
|
64375
|
-
}
|
|
64376
|
-
}
|
|
64377
|
-
const allImports = [...imports, ...filteredDepsPaths];
|
|
64378
|
-
for (const importPath of allImports) {
|
|
64379
|
-
node.imports.add(importPath);
|
|
64380
|
-
if (!this.nodes.has(importPath)) {
|
|
64381
|
-
this.nodes.set(importPath, {
|
|
64382
|
-
content: "",
|
|
64383
|
-
stalenessHash: "",
|
|
64384
|
-
language: "deno",
|
|
64385
|
-
metadata: "",
|
|
64386
|
-
imports: new Set,
|
|
64387
|
-
importedBy: new Set,
|
|
64388
|
-
itemType: "script",
|
|
64389
|
-
folder: "",
|
|
64390
|
-
originalPath: "",
|
|
64391
|
-
isDirectlyStale: false
|
|
64392
|
-
});
|
|
64393
|
-
}
|
|
64394
|
-
this.nodes.get(importPath).importedBy.add(path11);
|
|
64395
|
-
}
|
|
64396
|
-
}
|
|
64397
|
-
getContent(path11) {
|
|
64398
|
-
return this.nodes.get(path11)?.content;
|
|
64399
|
-
}
|
|
64400
|
-
getStalenessHash(path11) {
|
|
64401
|
-
return this.nodes.get(path11)?.stalenessHash;
|
|
64402
|
-
}
|
|
64403
|
-
getContentHash(path11) {
|
|
64404
|
-
return this.nodes.get(path11)?.contentHash;
|
|
64405
|
-
}
|
|
64406
|
-
setContentHash(path11, hash2) {
|
|
64407
|
-
const node = this.nodes.get(path11);
|
|
64408
|
-
if (node) {
|
|
64409
|
-
node.contentHash = hash2;
|
|
64410
|
-
}
|
|
64411
|
-
}
|
|
64412
|
-
getLanguage(path11) {
|
|
64413
|
-
return this.nodes.get(path11)?.language;
|
|
64414
|
-
}
|
|
64415
|
-
getMetadata(path11) {
|
|
64416
|
-
return this.nodes.get(path11)?.metadata;
|
|
64417
|
-
}
|
|
64418
|
-
getStaleReason(path11) {
|
|
64419
|
-
return this.nodes.get(path11)?.staleReason;
|
|
64420
|
-
}
|
|
64421
|
-
getItemType(path11) {
|
|
64422
|
-
return this.nodes.get(path11)?.itemType;
|
|
64423
|
-
}
|
|
64424
|
-
getFolder(path11) {
|
|
64425
|
-
return this.nodes.get(path11)?.folder;
|
|
64426
|
-
}
|
|
64427
|
-
getIsRawApp(path11) {
|
|
64428
|
-
return this.nodes.get(path11)?.isRawApp;
|
|
64429
|
-
}
|
|
64430
|
-
getIsDirectlyStale(path11) {
|
|
64431
|
-
return this.nodes.get(path11)?.isDirectlyStale ?? false;
|
|
64432
|
-
}
|
|
64433
|
-
getOriginalPath(path11) {
|
|
64434
|
-
return this.nodes.get(path11)?.originalPath;
|
|
64435
|
-
}
|
|
64436
|
-
getImports(path11) {
|
|
64437
|
-
return this.nodes.get(path11)?.imports;
|
|
64438
|
-
}
|
|
64439
|
-
isStale(path11) {
|
|
64440
|
-
return this.nodes.get(path11)?.staleReason !== undefined;
|
|
64441
|
-
}
|
|
64442
|
-
propagateStaleness() {
|
|
64443
|
-
const directlyStale = new Set;
|
|
64444
|
-
for (const [path11, node] of this.nodes.entries()) {
|
|
64445
|
-
if (node.isDirectlyStale) {
|
|
64446
|
-
directlyStale.add(path11);
|
|
64447
|
-
node.staleReason = "content changed";
|
|
64448
|
-
}
|
|
64449
|
-
}
|
|
64450
|
-
const allStale = new Set(directlyStale);
|
|
64451
|
-
const queue = [...directlyStale];
|
|
64452
|
-
const visited = new Set;
|
|
64453
|
-
while (queue.length > 0) {
|
|
64454
|
-
const scriptPath = queue.shift();
|
|
64455
|
-
if (visited.has(scriptPath))
|
|
64456
|
-
continue;
|
|
64457
|
-
visited.add(scriptPath);
|
|
64458
|
-
const node = this.nodes.get(scriptPath);
|
|
64459
|
-
if (!node)
|
|
64460
|
-
continue;
|
|
64461
|
-
for (const importer of node.importedBy) {
|
|
64462
|
-
if (!allStale.has(importer)) {
|
|
64463
|
-
allStale.add(importer);
|
|
64464
|
-
queue.push(importer);
|
|
64465
|
-
const importerNode = this.nodes.get(importer);
|
|
64466
|
-
if (importerNode)
|
|
64467
|
-
importerNode.staleReason = `depends on ${scriptPath}`;
|
|
64468
|
-
}
|
|
64469
|
-
}
|
|
64470
|
-
}
|
|
64471
|
-
}
|
|
64472
|
-
traverseTransitive(scriptPath, callback) {
|
|
64473
|
-
const queue = [scriptPath];
|
|
64474
|
-
const visited = new Set;
|
|
64475
|
-
while (queue.length > 0) {
|
|
64476
|
-
const current = queue.shift();
|
|
64477
|
-
if (visited.has(current))
|
|
64478
|
-
continue;
|
|
64479
|
-
visited.add(current);
|
|
64480
|
-
const node = this.nodes.get(current);
|
|
64481
|
-
if (!node)
|
|
64482
|
-
continue;
|
|
64483
|
-
for (const importPath of node.imports) {
|
|
64484
|
-
const importNode = this.nodes.get(importPath);
|
|
64485
|
-
if (importNode) {
|
|
64486
|
-
const stop = callback(importPath, importNode);
|
|
64487
|
-
if (!stop) {
|
|
64488
|
-
queue.push(importPath);
|
|
64489
|
-
}
|
|
64490
|
-
}
|
|
64491
|
-
}
|
|
64492
|
-
}
|
|
64493
|
-
}
|
|
64494
|
-
allPaths() {
|
|
64495
|
-
return this.nodes.keys();
|
|
64496
|
-
}
|
|
64497
|
-
*stalePaths() {
|
|
64498
|
-
for (const [path11, node] of this.nodes.entries()) {
|
|
64499
|
-
if (node.staleReason) {
|
|
64500
|
-
yield path11;
|
|
64501
|
-
}
|
|
64502
|
-
}
|
|
64503
|
-
}
|
|
64504
|
-
has(path11) {
|
|
64505
|
-
return this.nodes.has(path11);
|
|
64506
|
-
}
|
|
64507
|
-
getMismatchedWorkspaceDeps() {
|
|
64508
|
-
const result = {};
|
|
64509
|
-
for (const [path11, node] of this.nodes.entries()) {
|
|
64510
|
-
if (node.itemType === "dependencies" && node.contentHash && node.content !== undefined) {
|
|
64511
|
-
result[path11] = node.content;
|
|
64512
|
-
}
|
|
64513
|
-
}
|
|
64514
|
-
return result;
|
|
64515
|
-
}
|
|
64516
|
-
getTempScriptRefs(scriptPath) {
|
|
64517
|
-
const result = {};
|
|
64518
|
-
this.traverseTransitive(scriptPath, (_path, node) => {
|
|
64519
|
-
if (node.contentHash) {
|
|
64520
|
-
result[_path] = node.contentHash;
|
|
64521
|
-
}
|
|
64522
|
-
});
|
|
64523
|
-
return result;
|
|
64524
|
-
}
|
|
64525
|
-
async persistDepsHashes(depsPaths) {
|
|
64526
|
-
for (const path11 of depsPaths) {
|
|
64527
|
-
const node = this.nodes.get(path11);
|
|
64528
|
-
if (node?.itemType === "dependencies" && node.content !== undefined) {
|
|
64529
|
-
const hash2 = await generateHash(node.content + path11);
|
|
64530
|
-
await updateMetadataGlobalLock(path11, hash2);
|
|
64531
|
-
}
|
|
64532
|
-
}
|
|
64533
|
-
}
|
|
64534
|
-
get size() {
|
|
64535
|
-
return this.nodes.size;
|
|
64536
|
-
}
|
|
64537
|
-
}
|
|
64538
|
-
var init_dependency_tree = __esm(async () => {
|
|
64539
|
-
init_services_gen();
|
|
64540
|
-
await __promiseAll([
|
|
64541
|
-
init_metadata(),
|
|
64542
|
-
init_utils()
|
|
64543
|
-
]);
|
|
64544
|
-
});
|
|
64545
|
-
|
|
64546
64586
|
// src/commands/generate-metadata/generate-metadata.ts
|
|
64547
64587
|
var exports_generate_metadata = {};
|
|
64548
64588
|
__export(exports_generate_metadata, {
|
|
64549
64589
|
rehashOnly: () => rehashOnly,
|
|
64590
|
+
generateMetadata: () => generateMetadata2,
|
|
64550
64591
|
default: () => generate_metadata_default
|
|
64551
64592
|
});
|
|
64552
64593
|
import { sep as SEP8 } from "node:path";
|
|
@@ -66754,12 +66795,65 @@ Push aborted: ${lockIssues.length} script(s) missing locks.`));
|
|
|
66754
66795
|
const staleScripts = [];
|
|
66755
66796
|
const staleFlows = [];
|
|
66756
66797
|
const staleApps = [];
|
|
66798
|
+
const tree = autoRegenerate ? new DoubleLinkedDependencyTree : undefined;
|
|
66799
|
+
if (tree)
|
|
66800
|
+
tree.setWorkspaceDeps(rawWorkspaceDependencies);
|
|
66757
66801
|
for (const change of tracker.scripts) {
|
|
66758
|
-
const stale = await generateScriptMetadataInternal(change, workspace, opts,
|
|
66759
|
-
if (stale) {
|
|
66802
|
+
const stale = await generateScriptMetadataInternal(change, workspace, opts, true, true, rawWorkspaceDependencies, codebases, false, tree);
|
|
66803
|
+
if (!autoRegenerate && stale) {
|
|
66760
66804
|
staleScripts.push(stale);
|
|
66761
66805
|
}
|
|
66762
66806
|
}
|
|
66807
|
+
for (const change of tracker.flows) {
|
|
66808
|
+
const stale = await generateFlowLockInternal(change, true, workspace, opts, false, true, tree);
|
|
66809
|
+
if (!autoRegenerate && stale) {
|
|
66810
|
+
staleFlows.push(stale);
|
|
66811
|
+
}
|
|
66812
|
+
}
|
|
66813
|
+
for (const change of tracker.apps) {
|
|
66814
|
+
const stale = await generateAppLocksInternal(change, false, true, workspace, opts, true, true, tree);
|
|
66815
|
+
if (!autoRegenerate && stale) {
|
|
66816
|
+
staleApps.push(stale);
|
|
66817
|
+
}
|
|
66818
|
+
}
|
|
66819
|
+
for (const change of tracker.rawApps) {
|
|
66820
|
+
const stale = await generateAppLocksInternal(change, true, true, workspace, opts, true, true, tree);
|
|
66821
|
+
if (!autoRegenerate && stale) {
|
|
66822
|
+
staleApps.push(stale);
|
|
66823
|
+
}
|
|
66824
|
+
}
|
|
66825
|
+
if (autoRegenerate && tree) {
|
|
66826
|
+
tree.propagateStaleness();
|
|
66827
|
+
try {
|
|
66828
|
+
await uploadScripts(tree, workspace);
|
|
66829
|
+
} catch (e) {
|
|
66830
|
+
warn(colors.yellow(`Failed to upload scripts to temp storage (backend may be too old): ${e}. ` + `Locks will be generated using deployed script versions only — locally modified ` + `relative imports may not be reflected.`));
|
|
66831
|
+
}
|
|
66832
|
+
for (const change of tracker.scripts) {
|
|
66833
|
+
const generated = await generateScriptMetadataInternal(change, workspace, opts, false, true, rawWorkspaceDependencies, codebases, false, tree);
|
|
66834
|
+
if (generated) {
|
|
66835
|
+
staleScripts.push(generated);
|
|
66836
|
+
}
|
|
66837
|
+
}
|
|
66838
|
+
for (const change of tracker.flows) {
|
|
66839
|
+
const generated = await generateFlowLockInternal(change, false, workspace, opts, false, true, tree);
|
|
66840
|
+
if (generated) {
|
|
66841
|
+
staleFlows.push(generated);
|
|
66842
|
+
}
|
|
66843
|
+
}
|
|
66844
|
+
for (const change of tracker.apps) {
|
|
66845
|
+
const generated = await generateAppLocksInternal(change, false, false, workspace, opts, true, true, tree);
|
|
66846
|
+
if (generated) {
|
|
66847
|
+
staleApps.push(generated);
|
|
66848
|
+
}
|
|
66849
|
+
}
|
|
66850
|
+
for (const change of tracker.rawApps) {
|
|
66851
|
+
const generated = await generateAppLocksInternal(change, true, false, workspace, opts, true, true, tree);
|
|
66852
|
+
if (generated) {
|
|
66853
|
+
staleApps.push(generated);
|
|
66854
|
+
}
|
|
66855
|
+
}
|
|
66856
|
+
}
|
|
66763
66857
|
if (staleScripts.length > 0) {
|
|
66764
66858
|
info("");
|
|
66765
66859
|
if (autoRegenerate) {
|
|
@@ -66776,12 +66870,6 @@ Push aborted: ${lockIssues.length} script(s) missing locks.`));
|
|
|
66776
66870
|
}
|
|
66777
66871
|
info("");
|
|
66778
66872
|
}
|
|
66779
|
-
for (const change of tracker.flows) {
|
|
66780
|
-
const stale = await generateFlowLockInternal(change, !autoRegenerate, workspace, opts, false, true);
|
|
66781
|
-
if (stale) {
|
|
66782
|
-
staleFlows.push(stale);
|
|
66783
|
-
}
|
|
66784
|
-
}
|
|
66785
66873
|
if (staleFlows.length > 0) {
|
|
66786
66874
|
if (autoRegenerate) {
|
|
66787
66875
|
info("Auto-regenerated locks for stale flows:");
|
|
@@ -66797,18 +66885,6 @@ Push aborted: ${lockIssues.length} script(s) missing locks.`));
|
|
|
66797
66885
|
}
|
|
66798
66886
|
info("");
|
|
66799
66887
|
}
|
|
66800
|
-
for (const change of tracker.apps) {
|
|
66801
|
-
const stale = await generateAppLocksInternal(change, false, !autoRegenerate, workspace, opts, true, true);
|
|
66802
|
-
if (stale) {
|
|
66803
|
-
staleApps.push(stale);
|
|
66804
|
-
}
|
|
66805
|
-
}
|
|
66806
|
-
for (const change of tracker.rawApps) {
|
|
66807
|
-
const stale = await generateAppLocksInternal(change, true, !autoRegenerate, workspace, opts, true, true);
|
|
66808
|
-
if (stale) {
|
|
66809
|
-
staleApps.push(stale);
|
|
66810
|
-
}
|
|
66811
|
-
}
|
|
66812
66888
|
if (staleApps.length > 0) {
|
|
66813
66889
|
if (autoRegenerate) {
|
|
66814
66890
|
info("Auto-regenerated locks for stale apps:");
|
|
@@ -67511,6 +67587,7 @@ var init_sync = __esm(async () => {
|
|
|
67511
67587
|
init_types(),
|
|
67512
67588
|
init_codebase(),
|
|
67513
67589
|
init_metadata(),
|
|
67590
|
+
init_dependency_tree(),
|
|
67514
67591
|
init_resource(),
|
|
67515
67592
|
init_flow_metadata(),
|
|
67516
67593
|
init_app(),
|
|
@@ -79022,6 +79099,37 @@ Name the parameters by adding comments before the statement:
|
|
|
79022
79099
|
-- @name2 (int64) = 0
|
|
79023
79100
|
SELECT * FROM users WHERE name = @name1 AND age > @name2;
|
|
79024
79101
|
\`\`\`
|
|
79102
|
+
|
|
79103
|
+
## Receiving an S3Object as a script parameter
|
|
79104
|
+
|
|
79105
|
+
Declare the arg with type \`(s3object)\`. Windmill renders an S3 file picker for
|
|
79106
|
+
it, downloads the file, and binds it as a \`STRING\` JSON parameter — Parquet/CSV
|
|
79107
|
+
files are decoded server-side into a JSON array of records, JSON/JSONL pass
|
|
79108
|
+
through. Consume with \`JSON_EXTRACT_ARRAY\` / \`JSON_VALUE\`:
|
|
79109
|
+
|
|
79110
|
+
\`\`\`sql
|
|
79111
|
+
-- @file (s3object)
|
|
79112
|
+
SELECT
|
|
79113
|
+
CAST(JSON_VALUE(row, '$.id') AS INT64) AS id,
|
|
79114
|
+
JSON_VALUE(row, '$.name') AS name
|
|
79115
|
+
FROM UNNEST(JSON_EXTRACT_ARRAY(@file)) AS row;
|
|
79116
|
+
\`\`\`
|
|
79117
|
+
|
|
79118
|
+
## Streaming query results to S3
|
|
79119
|
+
|
|
79120
|
+
Add a \`-- s3\` directive at the top of the script to stream the result set to S3
|
|
79121
|
+
instead of returning rows. Windmill writes the file and returns its \`S3Object\`
|
|
79122
|
+
as the script result.
|
|
79123
|
+
|
|
79124
|
+
\`\`\`sql
|
|
79125
|
+
-- s3 prefix=exports/users format=parquet
|
|
79126
|
+
SELECT id, name FROM users;
|
|
79127
|
+
\`\`\`
|
|
79128
|
+
|
|
79129
|
+
All keys are optional: \`prefix\` (object key prefix), \`storage\` (named storage —
|
|
79130
|
+
omit to use the workspace default), \`format\` (\`json\` (default), \`parquet\`, or
|
|
79131
|
+
\`csv\`). Use this for large result sets — rows stream directly to S3 instead of
|
|
79132
|
+
being buffered, bypassing the 10000-row return cap.
|
|
79025
79133
|
`,
|
|
79026
79134
|
"write-script-bun": `---
|
|
79027
79135
|
name: write-script-bun
|
|
@@ -79145,19 +79253,20 @@ export async function preprocessor(event: Event) {
|
|
|
79145
79253
|
|
|
79146
79254
|
## S3 Object Operations
|
|
79147
79255
|
|
|
79148
|
-
Windmill provides built-in support for S3-compatible storage operations.
|
|
79149
|
-
|
|
79150
|
-
### S3Object Type
|
|
79256
|
+
Windmill provides built-in support for S3-compatible storage operations. The \`wmill.S3Object\` type covers both the \`s3://storage/key\` URI form (\`s3:///key\` for the workspace default storage) and the \`{ s3, storage? }\` record form — always use it instead of redefining your own.
|
|
79151
79257
|
|
|
79152
|
-
|
|
79258
|
+
### Receiving an S3Object as a script parameter
|
|
79153
79259
|
|
|
79154
79260
|
\`\`\`typescript
|
|
79155
|
-
|
|
79156
|
-
|
|
79157
|
-
|
|
79261
|
+
import * as wmill from "windmill-client";
|
|
79262
|
+
|
|
79263
|
+
export async function main(file: wmill.S3Object) {
|
|
79264
|
+
const content = await wmill.loadS3File(file);
|
|
79265
|
+
// ...
|
|
79266
|
+
}
|
|
79158
79267
|
\`\`\`
|
|
79159
79268
|
|
|
79160
|
-
|
|
79269
|
+
### S3 operations
|
|
79161
79270
|
|
|
79162
79271
|
\`\`\`typescript
|
|
79163
79272
|
import * as wmill from "windmill-client";
|
|
@@ -79169,7 +79278,7 @@ const content: Uint8Array = await wmill.loadS3File(s3object);
|
|
|
79169
79278
|
const blob: Blob = await wmill.loadS3FileStream(s3object);
|
|
79170
79279
|
|
|
79171
79280
|
// Write file to S3
|
|
79172
|
-
const result: S3Object = await wmill.writeS3File(
|
|
79281
|
+
const result: wmill.S3Object = await wmill.writeS3File(
|
|
79173
79282
|
s3object, // Target path (or undefined to auto-generate)
|
|
79174
79283
|
fileContent, // string or Blob
|
|
79175
79284
|
s3ResourcePath // Optional: specific S3 resource to use
|
|
@@ -79835,19 +79944,20 @@ export async function preprocessor(event: Event) {
|
|
|
79835
79944
|
|
|
79836
79945
|
## S3 Object Operations
|
|
79837
79946
|
|
|
79838
|
-
Windmill provides built-in support for S3-compatible storage operations.
|
|
79947
|
+
Windmill provides built-in support for S3-compatible storage operations. The \`wmill.S3Object\` type covers both the \`s3://storage/key\` URI form (\`s3:///key\` for the workspace default storage) and the \`{ s3, storage? }\` record form — always use it instead of redefining your own.
|
|
79839
79948
|
|
|
79840
|
-
### S3Object
|
|
79841
|
-
|
|
79842
|
-
The S3Object type represents a file in S3 storage:
|
|
79949
|
+
### Receiving an S3Object as a script parameter
|
|
79843
79950
|
|
|
79844
79951
|
\`\`\`typescript
|
|
79845
|
-
|
|
79846
|
-
|
|
79847
|
-
|
|
79952
|
+
import * as wmill from "windmill-client";
|
|
79953
|
+
|
|
79954
|
+
export async function main(file: wmill.S3Object) {
|
|
79955
|
+
const content = await wmill.loadS3File(file);
|
|
79956
|
+
// ...
|
|
79957
|
+
}
|
|
79848
79958
|
\`\`\`
|
|
79849
79959
|
|
|
79850
|
-
|
|
79960
|
+
### S3 operations
|
|
79851
79961
|
|
|
79852
79962
|
\`\`\`typescript
|
|
79853
79963
|
import * as wmill from "windmill-client";
|
|
@@ -79859,7 +79969,7 @@ const content: Uint8Array = await wmill.loadS3File(s3object);
|
|
|
79859
79969
|
const blob: Blob = await wmill.loadS3FileStream(s3object);
|
|
79860
79970
|
|
|
79861
79971
|
// Write file to S3
|
|
79862
|
-
const result: S3Object = await wmill.writeS3File(
|
|
79972
|
+
const result: wmill.S3Object = await wmill.writeS3File(
|
|
79863
79973
|
s3object, // Target path (or undefined to auto-generate)
|
|
79864
79974
|
fileContent, // string or Blob
|
|
79865
79975
|
s3ResourcePath // Optional: specific S3 resource to use
|
|
@@ -80613,19 +80723,20 @@ export async function preprocessor(event: Event) {
|
|
|
80613
80723
|
|
|
80614
80724
|
## S3 Object Operations
|
|
80615
80725
|
|
|
80616
|
-
Windmill provides built-in support for S3-compatible storage operations.
|
|
80617
|
-
|
|
80618
|
-
### S3Object Type
|
|
80726
|
+
Windmill provides built-in support for S3-compatible storage operations. The \`wmill.S3Object\` type covers both the \`s3://storage/key\` URI form (\`s3:///key\` for the workspace default storage) and the \`{ s3, storage? }\` record form — always use it instead of redefining your own.
|
|
80619
80727
|
|
|
80620
|
-
|
|
80728
|
+
### Receiving an S3Object as a script parameter
|
|
80621
80729
|
|
|
80622
80730
|
\`\`\`typescript
|
|
80623
|
-
|
|
80624
|
-
|
|
80625
|
-
|
|
80731
|
+
import * as wmill from "windmill-client";
|
|
80732
|
+
|
|
80733
|
+
export async function main(file: wmill.S3Object) {
|
|
80734
|
+
const content = await wmill.loadS3File(file);
|
|
80735
|
+
// ...
|
|
80736
|
+
}
|
|
80626
80737
|
\`\`\`
|
|
80627
80738
|
|
|
80628
|
-
|
|
80739
|
+
### S3 operations
|
|
80629
80740
|
|
|
80630
80741
|
\`\`\`typescript
|
|
80631
80742
|
import * as wmill from "windmill-client";
|
|
@@ -80637,7 +80748,7 @@ const content: Uint8Array = await wmill.loadS3File(s3object);
|
|
|
80637
80748
|
const blob: Blob = await wmill.loadS3FileStream(s3object);
|
|
80638
80749
|
|
|
80639
80750
|
// Write file to S3
|
|
80640
|
-
const result: S3Object = await wmill.writeS3File(
|
|
80751
|
+
const result: wmill.S3Object = await wmill.writeS3File(
|
|
80641
80752
|
s3object, // Target path (or undefined to auto-generate)
|
|
80642
80753
|
fileContent, // string or Blob
|
|
80643
80754
|
s3ResourcePath // Optional: specific S3 resource to use
|
|
@@ -81274,6 +81385,30 @@ SELECT * FROM read_parquet('s3:///path/to/file.parquet');
|
|
|
81274
81385
|
-- JSON files
|
|
81275
81386
|
SELECT * FROM read_json('s3:///path/to/file.json');
|
|
81276
81387
|
\`\`\`
|
|
81388
|
+
|
|
81389
|
+
### Receiving an S3Object as a script parameter
|
|
81390
|
+
|
|
81391
|
+
Declare the arg with type \`(s3object)\`. Windmill renders an S3 file picker for it
|
|
81392
|
+
and binds the arg as the bare \`s3://storage/key\` URI, which DuckDB's reader
|
|
81393
|
+
functions consume directly:
|
|
81394
|
+
|
|
81395
|
+
\`\`\`sql
|
|
81396
|
+
-- $file (s3object)
|
|
81397
|
+
SELECT * FROM read_parquet($file);
|
|
81398
|
+
\`\`\`
|
|
81399
|
+
|
|
81400
|
+
Works with any DuckDB reader: \`read_csv($file)\`, \`read_json($file)\`, etc.
|
|
81401
|
+
|
|
81402
|
+
### Writing query results to S3
|
|
81403
|
+
|
|
81404
|
+
DuckDB writes to S3 natively via \`COPY ... TO\`:
|
|
81405
|
+
|
|
81406
|
+
\`\`\`sql
|
|
81407
|
+
COPY (SELECT * FROM users) TO 's3:///exports/users.parquet' (FORMAT PARQUET);
|
|
81408
|
+
\`\`\`
|
|
81409
|
+
|
|
81410
|
+
Use this instead of the \`-- s3\` streaming directive supported by the other SQL
|
|
81411
|
+
dialects — that directive is not available in DuckDB.
|
|
81277
81412
|
`,
|
|
81278
81413
|
"write-script-go": `---
|
|
81279
81414
|
name: write-script-go
|
|
@@ -81590,6 +81725,36 @@ Name the parameters by adding comments before the statement:
|
|
|
81590
81725
|
-- @P2 name2 (int) = 0
|
|
81591
81726
|
SELECT * FROM users WHERE name = @P1 AND age > @P2;
|
|
81592
81727
|
\`\`\`
|
|
81728
|
+
|
|
81729
|
+
## Receiving an S3Object as a script parameter
|
|
81730
|
+
|
|
81731
|
+
Declare the arg with type \`(s3object)\`. Windmill renders an S3 file picker for
|
|
81732
|
+
it, downloads the file, and binds it as \`nvarchar(max)\` JSON text — Parquet/CSV
|
|
81733
|
+
files are decoded server-side into a JSON array of records, JSON/JSONL pass
|
|
81734
|
+
through. Consume with \`OPENJSON\`:
|
|
81735
|
+
|
|
81736
|
+
\`\`\`sql
|
|
81737
|
+
-- @P1 file (s3object)
|
|
81738
|
+
SELECT id, name
|
|
81739
|
+
FROM OPENJSON(@P1)
|
|
81740
|
+
WITH (id INT, name NVARCHAR(200));
|
|
81741
|
+
\`\`\`
|
|
81742
|
+
|
|
81743
|
+
## Streaming query results to S3
|
|
81744
|
+
|
|
81745
|
+
Add a \`-- s3\` directive at the top of the script to stream the result set to S3
|
|
81746
|
+
instead of returning rows. Windmill writes the file and returns its \`S3Object\`
|
|
81747
|
+
as the script result.
|
|
81748
|
+
|
|
81749
|
+
\`\`\`sql
|
|
81750
|
+
-- s3 prefix=exports/users format=parquet
|
|
81751
|
+
SELECT id, name FROM users;
|
|
81752
|
+
\`\`\`
|
|
81753
|
+
|
|
81754
|
+
All keys are optional: \`prefix\` (object key prefix), \`storage\` (named storage —
|
|
81755
|
+
omit to use the workspace default), \`format\` (\`json\` (default), \`parquet\`, or
|
|
81756
|
+
\`csv\`). Use this for large result sets — rows stream directly to S3 instead of
|
|
81757
|
+
being buffered as the script return value.
|
|
81593
81758
|
`,
|
|
81594
81759
|
"write-script-mysql": `---
|
|
81595
81760
|
name: write-script-mysql
|
|
@@ -81642,6 +81807,37 @@ Name the parameters by adding comments before the statement:
|
|
|
81642
81807
|
-- ? name2 (int) = 0
|
|
81643
81808
|
SELECT * FROM users WHERE name = ? AND age > ?;
|
|
81644
81809
|
\`\`\`
|
|
81810
|
+
|
|
81811
|
+
## Receiving an S3Object as a script parameter
|
|
81812
|
+
|
|
81813
|
+
Declare the arg with type \`(s3object)\`. Windmill renders an S3 file picker for
|
|
81814
|
+
it, downloads the file, and binds it as JSON text — Parquet/CSV files are
|
|
81815
|
+
decoded server-side into a JSON array of records, JSON/JSONL pass through.
|
|
81816
|
+
Consume with \`JSON_TABLE\`:
|
|
81817
|
+
|
|
81818
|
+
\`\`\`sql
|
|
81819
|
+
-- ? file (s3object)
|
|
81820
|
+
SELECT id, name
|
|
81821
|
+
FROM JSON_TABLE(?, '$[*]'
|
|
81822
|
+
COLUMNS (id INT PATH '$.id', name VARCHAR(200) PATH '$.name')
|
|
81823
|
+
) AS r;
|
|
81824
|
+
\`\`\`
|
|
81825
|
+
|
|
81826
|
+
## Streaming query results to S3
|
|
81827
|
+
|
|
81828
|
+
Add a \`-- s3\` directive at the top of the script to stream the result set to S3
|
|
81829
|
+
instead of returning rows. Windmill writes the file and returns its \`S3Object\`
|
|
81830
|
+
as the script result.
|
|
81831
|
+
|
|
81832
|
+
\`\`\`sql
|
|
81833
|
+
-- s3 prefix=exports/users format=parquet
|
|
81834
|
+
SELECT id, name FROM users;
|
|
81835
|
+
\`\`\`
|
|
81836
|
+
|
|
81837
|
+
All keys are optional: \`prefix\` (object key prefix), \`storage\` (named storage —
|
|
81838
|
+
omit to use the workspace default), \`format\` (\`json\` (default), \`parquet\`, or
|
|
81839
|
+
\`csv\`). Use this for large result sets — rows stream directly to S3 instead of
|
|
81840
|
+
being buffered as the script return value.
|
|
81645
81841
|
`,
|
|
81646
81842
|
"write-script-nativets": `---
|
|
81647
81843
|
name: write-script-nativets
|
|
@@ -82449,6 +82645,35 @@ Name the parameters by adding comments at the beginning of the script (without s
|
|
|
82449
82645
|
-- $2 name2 = default_value
|
|
82450
82646
|
SELECT * FROM users WHERE name = $1::TEXT AND age > $2::INT;
|
|
82451
82647
|
\`\`\`
|
|
82648
|
+
|
|
82649
|
+
## Receiving an S3Object as a script parameter
|
|
82650
|
+
|
|
82651
|
+
Declare the arg with type \`(s3object)\`. Windmill renders an S3 file picker for
|
|
82652
|
+
it, downloads the file, and binds it as a \`jsonb\` parameter — Parquet/CSV files
|
|
82653
|
+
are decoded server-side into a JSON array of records, JSON/JSONL pass through.
|
|
82654
|
+
Consume with \`jsonb_to_recordset\` (or any \`jsonb\` API):
|
|
82655
|
+
|
|
82656
|
+
\`\`\`sql
|
|
82657
|
+
-- $1 file (s3object)
|
|
82658
|
+
SELECT *
|
|
82659
|
+
FROM jsonb_to_recordset($1::jsonb) AS r(id INT, name TEXT);
|
|
82660
|
+
\`\`\`
|
|
82661
|
+
|
|
82662
|
+
## Streaming query results to S3
|
|
82663
|
+
|
|
82664
|
+
Add a \`-- s3\` directive at the top of the script to stream the result set to S3
|
|
82665
|
+
instead of returning rows. Windmill writes the file and returns its \`S3Object\`
|
|
82666
|
+
as the script result.
|
|
82667
|
+
|
|
82668
|
+
\`\`\`sql
|
|
82669
|
+
-- s3 prefix=exports/users format=parquet
|
|
82670
|
+
SELECT id, name FROM users;
|
|
82671
|
+
\`\`\`
|
|
82672
|
+
|
|
82673
|
+
All keys are optional: \`prefix\` (object key prefix), \`storage\` (named storage —
|
|
82674
|
+
omit to use the workspace default), \`format\` (\`json\` (default), \`parquet\`, or
|
|
82675
|
+
\`csv\`). Use this for large result sets — rows stream directly to S3 instead of
|
|
82676
|
+
being buffered as the script return value.
|
|
82452
82677
|
`,
|
|
82453
82678
|
"write-script-powershell": `---
|
|
82454
82679
|
name: write-script-powershell
|
|
@@ -82686,6 +82911,21 @@ def preprocessor(event: Event):
|
|
|
82686
82911
|
|
|
82687
82912
|
Windmill provides built-in support for S3-compatible storage operations.
|
|
82688
82913
|
|
|
82914
|
+
### Receiving an S3Object as a script parameter
|
|
82915
|
+
|
|
82916
|
+
To accept a file from S3 as input to a script, type the parameter with \`S3Object\` (imported from \`wmill\`):
|
|
82917
|
+
|
|
82918
|
+
\`\`\`python
|
|
82919
|
+
import wmill
|
|
82920
|
+
from wmill import S3Object
|
|
82921
|
+
|
|
82922
|
+
def main(file: S3Object):
|
|
82923
|
+
content = wmill.load_s3_file(file)
|
|
82924
|
+
# ...
|
|
82925
|
+
\`\`\`
|
|
82926
|
+
|
|
82927
|
+
### S3 operations
|
|
82928
|
+
|
|
82689
82929
|
\`\`\`python
|
|
82690
82930
|
import wmill
|
|
82691
82931
|
|
|
@@ -83690,6 +83930,37 @@ Name the parameters by adding comments before the statement:
|
|
|
83690
83930
|
-- ? name2 (number) = 0
|
|
83691
83931
|
SELECT * FROM users WHERE name = ? AND age > ?;
|
|
83692
83932
|
\`\`\`
|
|
83933
|
+
|
|
83934
|
+
## Receiving an S3Object as a script parameter
|
|
83935
|
+
|
|
83936
|
+
Declare the arg with type \`(s3object)\`. Windmill renders an S3 file picker for
|
|
83937
|
+
it, downloads the file, and binds it as JSON text — Parquet/CSV files are
|
|
83938
|
+
decoded server-side into a JSON array of records, JSON/JSONL pass through.
|
|
83939
|
+
Wrap the bind with \`PARSE_JSON(?)\` and walk it with \`LATERAL FLATTEN\`:
|
|
83940
|
+
|
|
83941
|
+
\`\`\`sql
|
|
83942
|
+
-- ? file (s3object)
|
|
83943
|
+
SELECT
|
|
83944
|
+
v.value:id::NUMBER AS id,
|
|
83945
|
+
v.value:name::STRING AS name
|
|
83946
|
+
FROM LATERAL FLATTEN(input => PARSE_JSON(?)) v;
|
|
83947
|
+
\`\`\`
|
|
83948
|
+
|
|
83949
|
+
## Streaming query results to S3
|
|
83950
|
+
|
|
83951
|
+
Add a \`-- s3\` directive at the top of the script to stream the result set to S3
|
|
83952
|
+
instead of returning rows. Windmill writes the file and returns its \`S3Object\`
|
|
83953
|
+
as the script result.
|
|
83954
|
+
|
|
83955
|
+
\`\`\`sql
|
|
83956
|
+
-- s3 prefix=exports/users format=parquet
|
|
83957
|
+
SELECT id, name FROM users;
|
|
83958
|
+
\`\`\`
|
|
83959
|
+
|
|
83960
|
+
All keys are optional: \`prefix\` (object key prefix), \`storage\` (named storage —
|
|
83961
|
+
omit to use the workspace default), \`format\` (\`json\` (default), \`parquet\`, or
|
|
83962
|
+
\`csv\`). Use this for large result sets — rows stream directly to S3 instead of
|
|
83963
|
+
being buffered, bypassing the 10000-row return cap.
|
|
83693
83964
|
`,
|
|
83694
83965
|
"write-flow": `---
|
|
83695
83966
|
name: write-flow
|
|
@@ -84813,12 +85084,12 @@ Use this guide when writing or modifying Windmill Workflow-as-Code (WAC) scripts
|
|
|
84813
85084
|
WAC is authored as a Windmill script and deployed with the normal script workflow. It is not an OpenFlow YAML flow.
|
|
84814
85085
|
|
|
84815
85086
|
Supported WAC authoring targets:
|
|
84816
|
-
- TypeScript scripts that import from \`windmill-client\`
|
|
85087
|
+
- Bun TypeScript scripts that import from \`windmill-client\`
|
|
84817
85088
|
- Python 3 scripts that import from \`wmill\`
|
|
84818
85089
|
|
|
84819
85090
|
## File Shape
|
|
84820
85091
|
|
|
84821
|
-
TypeScript:
|
|
85092
|
+
Bun TypeScript:
|
|
84822
85093
|
|
|
84823
85094
|
\`\`\`typescript
|
|
84824
85095
|
import {
|
|
@@ -84860,7 +85131,7 @@ async def main(x: str):
|
|
|
84860
85131
|
|
|
84861
85132
|
Rules:
|
|
84862
85133
|
- Do not call \`main\`.
|
|
84863
|
-
- TypeScript should export the workflow entrypoint, preferably \`export const main = workflow(async (...) => { ... })\`.
|
|
85134
|
+
- Bun TypeScript should export the workflow entrypoint, preferably \`export const main = workflow(async (...) => { ... })\`.
|
|
84864
85135
|
- Python must use \`@workflow\` on an async top-level function, usually \`main\`.
|
|
84865
85136
|
- Define task functions and \`taskScript\`/\`task_script\` or \`taskFlow\`/\`task_flow\` assignments at module top level with stable names.
|
|
84866
85137
|
- Use the exact SDK names. Do not alias \`workflow\`, \`task\`, \`taskScript\`, \`taskFlow\`, \`step\`, \`sleep\`, \`waitForApproval\`, \`task_script\`, \`task_flow\`, or \`wait_for_approval\`; the WAC parser recognizes these names directly.
|
|
@@ -85823,6 +86094,7 @@ workspace related commands
|
|
|
85823
86094
|
- \`workspace whoami\` - Show the currently active user
|
|
85824
86095
|
- \`workspace list\` - List local workspace profiles
|
|
85825
86096
|
- \`workspace list-remote\` - List workspaces on the remote server that you have access to
|
|
86097
|
+
- \`--as-superadmin\` - List ALL workspaces on the instance (requires the token to belong to a superadmin/devops user)
|
|
85826
86098
|
- \`workspace list-forks\` - List forked workspaces on the remote server
|
|
85827
86099
|
- \`workspace bind\` - Create or update a workspace entry in wmill.yaml from the active profile
|
|
85828
86100
|
- \`--workspace <name:string>\` - Workspace name (default: current branch or workspaceId)
|
|
@@ -88745,7 +89017,7 @@ var config_default = command35;
|
|
|
88745
89017
|
|
|
88746
89018
|
// src/main.ts
|
|
88747
89019
|
await init_context();
|
|
88748
|
-
var VERSION = "1.
|
|
89020
|
+
var VERSION = "1.696.0";
|
|
88749
89021
|
async function checkVersionSafe(cmd) {
|
|
88750
89022
|
const mainCommand = cmd.getMainCommand();
|
|
88751
89023
|
const upgradeCommand = mainCommand.getCommand("upgrade");
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "windmill-cli",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.696.0",
|
|
4
4
|
"description": "CLI for Windmill",
|
|
5
5
|
"license": "Apache 2.0",
|
|
6
6
|
"type": "module",
|
|
@@ -18,7 +18,7 @@
|
|
|
18
18
|
"dependencies": {
|
|
19
19
|
"esbuild": "0.28.0",
|
|
20
20
|
"windmill-parser-wasm-py": "1.693.1",
|
|
21
|
-
"windmill-parser-wasm-ts": "1.
|
|
21
|
+
"windmill-parser-wasm-ts": "1.695.0",
|
|
22
22
|
"windmill-parser-wasm-regex": "1.692.0",
|
|
23
23
|
"windmill-parser-wasm-go": "1.510.1",
|
|
24
24
|
"windmill-parser-wasm-php": "1.647.1",
|