windmill-cli 1.606.1 → 1.608.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -38,7 +38,7 @@ var __defProp = Object.defineProperty, __getOwnPropDesc = Object.getOwnPropertyD
38
38
  __spread: () => __spread,
39
39
  __spreadArrays: () => __spreadArrays,
40
40
  __values: () => __values
41
- });
41
+ }, 1);
42
42
  function __extends(c, L) {
43
43
  extendStatics(c, L);
44
44
  function R() {
@@ -535,7 +535,7 @@ function toUtf8$1(c) {
535
535
  var init_whatwgEncodingApi = __esmMin((() => { })), dist_es_exports = /* @__PURE__ */ __export({
536
536
  fromUtf8: () => fromUtf8$3,
537
537
  toUtf8: () => toUtf8$2
538
- }), fromUtf8$3, toUtf8$2, init_dist_es = __esmMin((() => {
538
+ }, 1), fromUtf8$3, toUtf8$2, init_dist_es = __esmMin((() => {
539
539
  init_pureJs(), init_whatwgEncodingApi(), fromUtf8$3 = (c) => typeof TextEncoder == "function" ? fromUtf8$2(c) : fromUtf8$1(c), toUtf8$2 = (c) => typeof TextDecoder == "function" ? toUtf8$1(c) : toUtf8(c);
540
540
  })), require_convertToBuffer = /* @__PURE__ */ __commonJSMin(((c) => {
541
541
  Object.defineProperty(c, "__esModule", { value: !0 }), c.convertToBuffer = void 0;
@@ -783,25 +783,135 @@ function getIntrospectionQuery(c) {
783
783
  let ColumnIdentity = /* @__PURE__ */ function (c) {
784
784
  return c.ByDefault = "By Default", c.Always = "Always", c.No = "No", c;
785
785
  }({});
786
- getIntrospectionQuery();
787
- function buildVisibleFieldList(c, L) {
788
- return c.filter((c) => c && c.ignored !== !0).map((c) => {
789
- switch (L) {
790
- case "postgresql": return `"${c?.field}"`;
791
- case "ms_sql_server": return `[${c?.field}]`;
792
- case "mysql": return `\`${c?.field}\``;
793
- case "snowflake": return `"${c?.field}"`;
794
- case "bigquery": return `\`${c?.field}\``;
795
- case "duckdb": return `"${c?.field}"`;
796
- default: throw Error("Unsupported database type: " + L);
786
+ var legacyScripts = {
787
+ postgresql: {
788
+ code: "SELECT table_name, column_name, udt_name, column_default, is_nullable, table_schema FROM information_schema.columns WHERE table_schema != 'pg_catalog' AND table_schema != 'information_schema'",
789
+ processingFn: (c) => {
790
+ let L = c.reduce((c, L) => {
791
+ let R = L.table_schema;
792
+ return delete L.table_schema, c[R] = c[R] || [], (L.table_name || L.column_name) && c[R].push(L), c;
793
+ }, {}), R = {};
794
+ for (let c in L)
795
+ R[c] = L[c].reduce((c, L) => {
796
+ let R = L.table_name;
797
+ delete L.table_name, c[R] = c[R] || {};
798
+ let z = {
799
+ type: L.udt_name,
800
+ required: L.is_nullable === "NO"
801
+ };
802
+ return L.column_default && (z.default = L.column_default), c[R][L.column_name] = z, c;
803
+ }, {});
804
+ return R;
805
+ },
806
+ lang: "postgresql",
807
+ argName: "database"
808
+ },
809
+ mysql: {
810
+ code: "SELECT DATABASE() AS default_db_name, TABLE_SCHEMA, TABLE_NAME, DATA_TYPE, COLUMN_NAME, COLUMN_DEFAULT FROM information_schema.columns WHERE table_schema = DATABASE() OR table_schema NOT IN ('information_schema', 'mysql', 'performance_schema', 'sys', '_vt');",
811
+ processingFn: (c) => {
812
+ let L = c.reduce((c, L) => {
813
+ let R = L.TABLE_SCHEMA;
814
+ return delete L.TABLE_SCHEMA, c[R] = c[R] || [], c[R].push(L), c;
815
+ }, {}), R = {};
816
+ for (let c in L)
817
+ R[c] = L[c].reduce((c, L) => {
818
+ let R = L.TABLE_NAME;
819
+ delete L.TABLE_NAME, c[R] = c[R] || {};
820
+ let z = {
821
+ type: L.DATA_TYPE,
822
+ required: L.is_nullable === "NO"
823
+ };
824
+ return L.column_default && (z.default = L.COLUMN_DEFAULT), c[R][L.COLUMN_NAME] = z, c;
825
+ }, {});
826
+ return R;
827
+ },
828
+ lang: "mysql",
829
+ argName: "database"
830
+ },
831
+ graphql: {
832
+ code: getIntrospectionQuery(),
833
+ lang: "graphql",
834
+ argName: "api"
835
+ },
836
+ bigquery: {
837
+ code: "import { BigQuery } from '@google-cloud/bigquery@7.5.0';\nexport async function main(args: bigquery) {\nconst bq = new BigQuery({\n credentials: args\n})\nconst [datasets] = await bq.getDatasets();\nconst schema = {}\nfor (const dataset of datasets) {\n schema[dataset.id] = {}\n const query = \"SELECT table_name, ARRAY_AGG(STRUCT(if(is_nullable = 'YES', true, false) AS required, column_name AS name, data_type AS type, if(column_default = 'NULL', null, column_default) AS `default`) ORDER BY ordinal_position) AS schema FROM `{dataset.id}`.INFORMATION_SCHEMA.COLUMNS GROUP BY table_name\".replace('{dataset.id}', dataset.id)\n const [rows] = await bq.query(query)\n for (const row of rows) {\n schema[dataset.id][row.table_name] = {}\n for (const col of row.schema) {\n const colName = col.name\n delete col.name\n if (col.default === null) {\n delete col.default\n }\n schema[dataset.id][row.table_name][colName] = col\n }\n }\n}\nreturn schema\n}",
838
+ lang: "bun",
839
+ argName: "args"
840
+ },
841
+ snowflake: {
842
+ code: "select TABLE_SCHEMA, TABLE_NAME, DATA_TYPE, COLUMN_NAME, COLUMN_DEFAULT, IS_NULLABLE from information_schema.columns where table_schema != 'INFORMATION_SCHEMA'",
843
+ lang: "snowflake",
844
+ processingFn: (c) => {
845
+ let L = {};
846
+ for (let R of c)
847
+ R.TABLE_SCHEMA in L || (L[R.TABLE_SCHEMA] = {}), R.TABLE_NAME in L[R.TABLE_SCHEMA] || (L[R.TABLE_SCHEMA][R.TABLE_NAME] = {}), L[R.TABLE_SCHEMA][R.TABLE_NAME][R.COLUMN_NAME] = {
848
+ type: R.DATA_TYPE,
849
+ required: R.IS_NULLABLE === "YES"
850
+ }, R.COLUMN_DEFAULT !== null && (L[R.TABLE_SCHEMA][R.TABLE_NAME][R.COLUMN_NAME].default = R.COLUMN_DEFAULT);
851
+ return L;
852
+ },
853
+ argName: "database"
854
+ },
855
+ snowflake_oauth: {
856
+ code: "select TABLE_SCHEMA, TABLE_NAME, DATA_TYPE, COLUMN_NAME, COLUMN_DEFAULT, IS_NULLABLE from information_schema.columns where table_schema != 'INFORMATION_SCHEMA'",
857
+ lang: "snowflake",
858
+ processingFn: (c) => {
859
+ let L = {};
860
+ for (let R of c)
861
+ R.TABLE_SCHEMA in L || (L[R.TABLE_SCHEMA] = {}), R.TABLE_NAME in L[R.TABLE_SCHEMA] || (L[R.TABLE_SCHEMA][R.TABLE_NAME] = {}), L[R.TABLE_SCHEMA][R.TABLE_NAME][R.COLUMN_NAME] = {
862
+ type: R.DATA_TYPE,
863
+ required: R.IS_NULLABLE === "YES"
864
+ }, R.COLUMN_DEFAULT !== null && (L[R.TABLE_SCHEMA][R.TABLE_NAME][R.COLUMN_NAME].default = R.COLUMN_DEFAULT);
865
+ return L;
866
+ },
867
+ argName: "database"
868
+ },
869
+ mssql: {
870
+ argName: "database",
871
+ code: "select TABLE_SCHEMA, TABLE_NAME, DATA_TYPE, COLUMN_NAME, COLUMN_DEFAULT from information_schema.columns where table_schema != 'sys'",
872
+ lang: "mssql",
873
+ processingFn: (c) => {
874
+ if (!c || c.length === 0)
875
+ return {};
876
+ let L = c.reduce((c, L) => {
877
+ let R = L.TABLE_SCHEMA;
878
+ return delete L.TABLE_SCHEMA, c[R] = c[R] || [], c[R].push(L), c;
879
+ }, {}), R = {};
880
+ for (let c in L)
881
+ R[c] = L[c].reduce((c, L) => {
882
+ let R = L.TABLE_NAME;
883
+ delete L.TABLE_NAME, c[R] = c[R] || {};
884
+ let z = {
885
+ type: L.DATA_TYPE,
886
+ required: L.is_nullable === "NO"
887
+ };
888
+ return L.column_default && (z.default = L.COLUMN_DEFAULT), c[R][L.COLUMN_NAME] = z, c;
889
+ }, {});
890
+ return R;
797
891
  }
798
- });
892
+ }
893
+ };
894
+ ({ ...legacyScripts }), { ...legacyScripts.postgresql };
895
+ function buildVisibleFieldList(c, L) {
896
+ return c.filter((c) => c && c.ignored !== !0).map((c) => renderDbQuotedIdentifier(c?.field, L));
897
+ }
898
+ function renderDbQuotedIdentifier(c, L) {
899
+ switch (L) {
900
+ case "postgresql": return `"${c}"`;
901
+ case "ms_sql_server": return `[${c}]`;
902
+ case "mysql": return `\`${c}\``;
903
+ case "snowflake": return `"${c}"`;
904
+ case "bigquery": return `\`${c}\``;
905
+ case "duckdb": return `"${c}"`;
906
+ default: throw Error("Unsupported database type: " + L);
907
+ }
799
908
  }
800
909
  function getLanguageByResourceType(c) {
801
910
  return {
802
911
  postgresql: "postgresql",
803
912
  mysql: "mysql",
804
913
  ms_sql_server: "mssql",
914
+ mssql: "mssql",
805
915
  snowflake: "snowflake",
806
916
  snowflake_oauth: "snowflake",
807
917
  bigquery: "bigquery",
@@ -1008,8 +1118,8 @@ function makeInsertQuery(c, L, R) {
1008
1118
  throw Error("Table name is required");
1009
1119
  let z = L.filter((c) => !c.hideInsert && !(R == "postgresql" && c.defaultvalue?.startsWith("nextval("))), B = L.filter((c) => !shouldOmitColumnInInsert(c)), V = z.concat(B), H = buildParameters(z, R);
1010
1120
  H += "\n";
1011
- let U = B.length > 0, W = formatColumnNames(V), G = formatInsertValues(z, R), K = formatDefaultValues(B);
1012
- return H += `INSERT INTO ${c} (${W}) VALUES (${G}${U ? ", " : ""}${K})`, H;
1121
+ let U = B.length > 0, W = formatColumnNames(V), G = formatInsertValues(z, R), K = formatDefaultValues(B), q = `${G}${U ? ", " : ""}${K}`;
1122
+ return q.trim() ? (H += `INSERT INTO ${c} (${W}) VALUES (${q})`, H) : `INSERT INTO ${c} DEFAULT VALUES`;
1013
1123
  }
1014
1124
  function getInsertInput(c, L, R) {
1015
1125
  let z = c.type === "ducklake" ? "duckdb" : c.resourceType, B = makeInsertQuery(L, R, z);
@@ -1067,10 +1177,10 @@ function makeSnowflakeSelectQuery(c, L, R, z) {
1067
1177
  CASE WHEN ? = '${c.field}' AND ? = TRUE THEN "${c.field}" END DESC`));
1068
1178
  return U += ` ORDER BY ${q.join(",\n")}`, U += ` LIMIT ${B} OFFSET ${V}`, U = buildParameters(H, "snowflake") + "\n" + U, U;
1069
1179
  }
1070
- function makeSelectQuery(c, L, R, z, B) {
1180
+ function makeSelectQuery(c, L, R, z, B, V) {
1071
1181
  if (!c)
1072
1182
  throw Error("Table name is required");
1073
- let V = "", H = buildParameters([
1183
+ let H = "", U = buildParameters([
1074
1184
  {
1075
1185
  field: "limit",
1076
1186
  datatype: z === "bigquery" ? "integer" : "int"
@@ -1092,30 +1202,73 @@ function makeSelectQuery(c, L, R, z, B) {
1092
1202
  datatype: z === "bigquery" ? "bool" : "boolean"
1093
1203
  }
1094
1204
  ], z);
1095
- H += "\n";
1096
- let U = buildVisibleFieldList(L, z), W = U.join(", ");
1205
+ U += "\n";
1206
+ let W = buildVisibleFieldList(L, z), G = W.join(", ");
1097
1207
  switch (z) {
1098
1208
  case "mysql": {
1099
1209
  let z = L.map((c) => `
1100
1210
  CASE WHEN :order_by = '${c.field}' AND :is_desc IS false THEN \`${c.field}\` END,
1101
1211
  CASE WHEN :order_by = '${c.field}' AND :is_desc IS true THEN \`${c.field}\` END DESC`).join(",\n");
1102
- V = ` (:quicksearch = '' OR CONCAT_WS(' ', ${U.join(", ")}) LIKE CONCAT('%', :quicksearch, '%'))`, H += `SELECT ${W} FROM ${c}`, H += ` WHERE ${R ? `${R} AND` : ""} ${V}`, H += ` ORDER BY ${z}`, H += " LIMIT :limit OFFSET :offset";
1212
+ H = ` (:quicksearch = '' OR CONCAT_WS(' ', ${W.join(", ")}) LIKE CONCAT('%', :quicksearch, '%'))`, U += `SELECT ${G} FROM ${c}`, U += ` WHERE ${R ? `${R} AND` : ""} ${H}`, U += ` ORDER BY ${z}`, U += " LIMIT :limit OFFSET :offset";
1103
1213
  break;
1104
1214
  }
1105
1215
  case "postgresql": {
1106
- let z = `
1107
- ${L.map((c) => `
1108
- (CASE WHEN $4 = '${c.field}' AND $5 IS false THEN "${c.field}"::text END),
1109
- (CASE WHEN $4 = '${c.field}' AND $5 IS true THEN "${c.field}"::text END) DESC`).join(",\n")}`;
1110
- V = `($3 = '' OR CONCAT(${U.join(", ")}) ILIKE '%' || $3 || '%')`, H += `SELECT ${U.map((c) => `${c}::text`).join(", ")} FROM ${c}\n`, H += ` WHERE ${R ? `${R} AND` : ""} ${V}\n`, H += ` ORDER BY ${z}\n`, H += " LIMIT $1::INT OFFSET $2::INT";
1216
+ function z({ field: c, is_desc: L = !1, text_cast: R = !1, check_is_number: z }) {
1217
+ return `(CASE WHEN${z === !0 ? ` pg_typeof("${c}")::text IN ('integer', 'bigint', 'smallint', 'numeric', 'real', 'double precision') AND` : z === !1 ? ` pg_typeof("${c}")::text NOT IN ('integer', 'bigint', 'smallint', 'numeric', 'real', 'double precision') AND` : ""} $4 = '${c}' AND $5 IS ${L} THEN "${c}"${R ? "::text" : ""} END)${L ? " DESC" : ""}`;
1218
+ }
1219
+ let B = `
1220
+ ${L.map((c) => V?.fixPgIntTypes ? `
1221
+ ${z({
1222
+ field: c.field,
1223
+ is_desc: !1,
1224
+ text_cast: !0,
1225
+ check_is_number: !1
1226
+ })},
1227
+ ${z({
1228
+ field: c.field,
1229
+ is_desc: !1,
1230
+ text_cast: !1,
1231
+ check_is_number: !0
1232
+ })},
1233
+ ${z({
1234
+ field: c.field,
1235
+ is_desc: !0,
1236
+ text_cast: !0,
1237
+ check_is_number: !1
1238
+ })},
1239
+ ${z({
1240
+ field: c.field,
1241
+ is_desc: !0,
1242
+ text_cast: !1,
1243
+ check_is_number: !0
1244
+ })}` : `
1245
+ ${z({
1246
+ field: c.field,
1247
+ is_desc: !1,
1248
+ text_cast: !0
1249
+ })},
1250
+ ${z({
1251
+ field: c.field,
1252
+ is_desc: !0,
1253
+ text_cast: !0
1254
+ })}`).join(",\n")}`;
1255
+ H = `($3 = '' OR CONCAT(${W.join(", ")}) ILIKE '%' || $3 || '%')`, U += `SELECT ${W.map((c) => `${c}::text`).join(", ")} FROM ${c}\n`, U += ` WHERE ${R ? `${R} AND` : ""} ${H}\n`, U += ` ORDER BY ${B}\n`, U += " LIMIT $1::INT OFFSET $2::INT";
1111
1256
  break;
1112
1257
  }
1113
- case "ms_sql_server":
1114
- let z = L.map((c) => `
1258
+ case "ms_sql_server": {
1259
+ let z = [
1260
+ "text",
1261
+ "ntext",
1262
+ "image"
1263
+ ], B = L.filter((c) => !z.includes(c.datatype.toLowerCase())).map((c) => `
1115
1264
  (CASE WHEN @p4 = '${c.field}' AND @p5 = 0 THEN ${c.field} END) ASC,
1116
- (CASE WHEN @p4 = '${c.field}' AND @p5 = 1 THEN ${c.field} END) DESC`).join(",\n");
1117
- V = ` (@p3 = '' OR CONCAT(${W}) LIKE '%' + @p3 + '%')`, H += `SELECT ${W} FROM ${c}`, H += ` WHERE ${R ? `${R} AND` : ""} ${V}`, H += ` ORDER BY ${z}`, H += " OFFSET @p2 ROWS FETCH NEXT @p1 ROWS ONLY";
1265
+ (CASE WHEN @p4 = '${c.field}' AND @p5 = 1 THEN ${c.field} END) DESC`).join(",\n"), V = W.filter((c) => {
1266
+ let R = c.slice(1, -1), B = L.find((c) => c.field === R);
1267
+ return !z.includes(B?.datatype.toLowerCase() ?? "");
1268
+ }).join(", ");
1269
+ H = V ? ` (@p3 = '' OR CONCAT(${V}) LIKE '%' + @p3 + '%')` : " (@p3 = '')", U += `SELECT ${G} FROM ${c}`, U += ` WHERE ${R ? `${R} AND` : ""} ${H}`, U += ` ORDER BY ${B}`, U += " OFFSET @p2 ROWS FETCH NEXT @p1 ROWS ONLY";
1118
1270
  break;
1271
+ }
1119
1272
  case "snowflake": return makeSnowflakeSelectQuery(c, L, R, B);
1120
1273
  case "bigquery": {
1121
1274
  let z = L.map((c) => c.datatype === "JSON" || c.datatype.startsWith("STRUCT") || c.datatype.startsWith("ARRAY") || c.datatype === "GEOGRAPHY" ? `
@@ -1123,10 +1276,10 @@ CASE WHEN :order_by = '${c.field}' AND :is_desc IS true THEN \`${c.field}\` END
1123
1276
  (CASE WHEN @order_by = '${c.field}' AND @is_desc = true THEN TO_JSON_STRING(${c.field}) END) DESC` : `
1124
1277
  (CASE WHEN @order_by = '${c.field}' AND @is_desc = false THEN ${c.field} END) ASC,
1125
1278
  (CASE WHEN @order_by = '${c.field}' AND @is_desc = true THEN ${c.field} END) DESC`).join(",\n");
1126
- V = ` (@quicksearch = '' OR REGEXP_CONTAINS(CONCAT(${U.map((c) => {
1279
+ H = ` (@quicksearch = '' OR REGEXP_CONTAINS(CONCAT(${W.map((c) => {
1127
1280
  let R = L.find((L) => L.field === c.slice(1, -1));
1128
1281
  return R?.datatype === "JSON" || R?.datatype.startsWith("STRUCT") || R?.datatype.startsWith("ARRAY") || R?.datatype === "GEOGRAPHY" ? `TO_JSON_STRING(${c})` : `CAST(${c} AS STRING)`;
1129
- }).join(",")}), '(?i)' || @quicksearch))`, H += `SELECT ${W} FROM ${c}`, H += ` WHERE ${R ? `${R} AND` : ""} ${V}`, H += ` ORDER BY ${z}`, H += " LIMIT @limit OFFSET @offset";
1282
+ }).join(",")}), '(?i)' || @quicksearch))`, U += `SELECT ${G} FROM ${c}`, U += ` WHERE ${R ? `${R} AND` : ""} ${H}`, U += ` ORDER BY ${z}`, U += " LIMIT @limit OFFSET @offset";
1130
1283
  break;
1131
1284
  }
1132
1285
  case "duckdb": {
@@ -1134,12 +1287,12 @@ CASE WHEN :order_by = '${c.field}' AND :is_desc IS true THEN \`${c.field}\` END
1134
1287
  ${L.map((c) => `
1135
1288
  (CASE WHEN $order_by = '${c.field}' AND $is_desc IS false THEN "${c.field}"::text END),
1136
1289
  (CASE WHEN $order_by = '${c.field}' AND $is_desc IS true THEN "${c.field}"::text END) DESC`).join(",\n")}`;
1137
- V = `($quicksearch = '' OR CONCAT(${U.join(", ")}) ILIKE '%' || $quicksearch || '%')`, H += `SELECT ${U.join(", ")} FROM ${c}\n`, H += ` WHERE ${R ? `${R} AND` : ""} ${V}\n`, H += ` ORDER BY ${z}\n`, H += " LIMIT $limit::INT OFFSET $offset::INT";
1290
+ H = `($quicksearch = '' OR CONCAT(${W.join(", ")}) ILIKE '%' || $quicksearch || '%')`, U += `SELECT ${W.join(", ")} FROM ${c}\n`, U += ` WHERE ${R ? `${R} AND` : ""} ${H}\n`, U += ` ORDER BY ${z}\n`, U += " LIMIT $limit::INT OFFSET $offset::INT";
1138
1291
  break;
1139
1292
  }
1140
1293
  default: throw Error("Unsupported database type");
1141
1294
  }
1142
- return H;
1295
+ return U;
1143
1296
  }
1144
1297
  function coerceToNumber(c) {
1145
1298
  return typeof c == "number" ? c : typeof c == "string" ? parseInt(c, 10) : 0;
@@ -1489,6 +1642,20 @@ export declare function waitJob(id: string): Promise<Job>;
1489
1642
  * @param id
1490
1643
  */
1491
1644
  export declare function getJob(id: string): Promise<Job>;
1645
+
1646
+ export type StreamUpdate = {
1647
+ new_result_stream?: string;
1648
+ stream_offset?: number;
1649
+ };
1650
+
1651
+ /**
1652
+ * Stream job results using SSE. Calls onUpdate for each stream update,
1653
+ * and resolves with the final result when the job completes.
1654
+ * @param id - The job ID to stream
1655
+ * @param onUpdate - Optional callback for stream updates with new_result_stream data
1656
+ * @returns Promise that resolves with the final job result
1657
+ */
1658
+ export declare function streamJob(id: string, onUpdate?: (data: StreamUpdate) => void): Promise<any>;
1492
1659
  `;
1493
1660
  }
1494
1661
  async function updateRawAppPolicy(c, L) {
@@ -1515,7 +1682,7 @@ async function processRunnable(c, L, R) {
1515
1682
  allow_user_resources: B
1516
1683
  }];
1517
1684
  }
1518
- var rawAppWmillTs_exports = /* @__PURE__ */ __export({ default: () => rawAppWmillTs_default }), rawAppWmillTs_default = "let reqs: Record<string, any> = {}\n\nfunction doRequest(type: string, o: object) {\n return new Promise((resolve, reject) => {\n const reqId = Math.random().toString(36)\n reqs[reqId] = { resolve, reject }\n parent.postMessage({ ...o, type, reqId }, '*')\n })\n}\n\nexport const backend = new Proxy(\n {},\n {\n get(_, runnable_id: string) {\n return (v: any) => {\n return doRequest('backend', { runnable_id, v })\n }\n }\n })\n\nexport const backendAsync = new Proxy(\n {},\n {\n get(_, runnable_id: string) {\n return (v: any) => {\n return doRequest('backendAsync', { runnable_id, v })\n }\n }\n })\n\nexport function waitJob(jobId: string) {\n return doRequest('waitJob', { jobId })\n}\n\nexport function getJob(jobId: string) {\n return doRequest('getJob', { jobId })\n}\n\n\nwindow.addEventListener('message', (e) => {\n if (e.data.type == 'backendRes' || e.data.type == 'backendAsyncRes') {\n console.log('Message from parent backend', e.data)\n let job = reqs[e.data.reqId]\n if (job) {\n const result = e.data.result\n if (e.data.error) {\n job.reject(new Error(result.stack ?? result.message))\n } else {\n job.resolve(result)\n }\n } else {\n console.error('No job found for', e.data.reqId)\n }\n }\n})";
1685
+ var rawAppWmillTs_exports = /* @__PURE__ */ __export({ default: () => rawAppWmillTs_default }, 1), rawAppWmillTs_default = "let reqs: Record<string, any> = {}\n\nfunction doRequest(type: string, o: object) {\n return new Promise((resolve, reject) => {\n const reqId = Math.random().toString(36)\n reqs[reqId] = { resolve, reject }\n parent.postMessage({ ...o, type, reqId }, '*')\n })\n}\n\nexport const backend = new Proxy(\n {},\n {\n get(_, runnable_id: string) {\n return (v: any) => {\n return doRequest('backend', { runnable_id, v })\n }\n }\n }\n)\n\nexport const backendAsync = new Proxy(\n {},\n {\n get(_, runnable_id: string) {\n return (v: any) => {\n return doRequest('backendAsync', { runnable_id, v })\n }\n }\n }\n)\n\nexport function waitJob(jobId: string) {\n return doRequest('waitJob', { jobId })\n}\n\nexport function getJob(jobId: string) {\n return doRequest('getJob', { jobId })\n}\n\n/**\n * Stream job results using SSE. Calls onUpdate for each stream update,\n * and resolves with the final result when the job completes.\n * @param jobId - The job ID to stream\n * @param onUpdate - Callback for stream updates with new_result_stream data\n * @returns Promise that resolves with the final job result\n */\nexport function streamJob(\n jobId: string,\n onUpdate?: (data: { new_result_stream?: string; stream_offset?: number }) => void\n): Promise<any> {\n return new Promise((resolve, reject) => {\n const reqId = Math.random().toString(36)\n reqs[reqId] = { resolve, reject, onUpdate }\n parent.postMessage({ jobId, type: 'streamJob', reqId }, '*')\n })\n}\n\nwindow.addEventListener('message', (e) => {\n if (e.data.type == 'streamJobUpdate') {\n // Handle streaming update\n let job = reqs[e.data.reqId]\n if (job && job.onUpdate) {\n job.onUpdate({\n new_result_stream: e.data.new_result_stream,\n stream_offset: e.data.stream_offset\n })\n }\n } else if (e.data.type == 'streamJobRes') {\n // Handle stream completion\n let job = reqs[e.data.reqId]\n if (job) {\n if (e.data.error) {\n job.reject(new Error(e.data.result?.stack ?? e.data.result?.message ?? 'Stream error'))\n } else {\n job.resolve(e.data.result)\n }\n delete reqs[e.data.reqId]\n }\n } else if (e.data.type == 'backendRes' || e.data.type == 'backendAsyncRes') {\n console.log('Message from parent backend', e.data)\n let job = reqs[e.data.reqId]\n if (job) {\n const result = e.data.result\n if (e.data.error) {\n job.reject(new Error(result.stack ?? result.message))\n } else {\n job.resolve(result)\n }\n } else {\n console.error('No job found for', e.data.reqId)\n }\n }\n})\n";
1519
1686
  function capitalize(c) {
1520
1687
  return c ? c.charAt(0).toUpperCase() + c.slice(1) : "";
1521
1688
  }
package/esm/deps.js CHANGED
@@ -44,18 +44,17 @@ export { minimatch } from "minimatch";
44
44
  export { default as JSZip } from "jszip";
45
45
  export * as express from "express";
46
46
  export * as http from "node:http";
47
- export { WebSocketServer, WebSocket } from "ws";
47
+ export { WebSocket, WebSocketServer } from "ws";
48
48
  export * as getPort from "get-port";
49
49
  export * as open from "open";
50
50
  export * as esMain from "es-main";
51
- export * as windmillUtils from "./deps/jsr.io/@windmill-labs/shared-utils/1.0.11/lib.es.js";
51
+ export * as windmillUtils from "./deps/jsr.io/@windmill-labs/shared-utils/1.0.12/lib.es.js";
52
52
  import { OpenAPI } from "./gen/index.js";
53
53
  export function setClient(token, baseUrl) {
54
54
  if (baseUrl === undefined) {
55
- baseUrl =
56
- getEnv("BASE_INTERNAL_URL") ??
57
- getEnv("BASE_URL") ??
58
- "http://localhost:8000";
55
+ baseUrl = getEnv("BASE_INTERNAL_URL") ??
56
+ getEnv("BASE_URL") ??
57
+ "http://localhost:8000";
59
58
  }
60
59
  if (token === undefined) {
61
60
  token = getEnv("WM_TOKEN") ?? "no_token";
@@ -32,7 +32,7 @@ export const OpenAPI = {
32
32
  PASSWORD: undefined,
33
33
  TOKEN: getEnv("WM_TOKEN"),
34
34
  USERNAME: undefined,
35
- VERSION: '1.606.1',
35
+ VERSION: '1.608.0',
36
36
  WITH_CREDENTIALS: true,
37
37
  interceptors: {
38
38
  request: new Interceptors(),
@@ -7499,6 +7499,7 @@ export const createJobSignature = (data) => {
7499
7499
  * @param data.id
7500
7500
  * @param data.resumeId
7501
7501
  * @param data.approver
7502
+ * @param data.flowLevel If true, generate resume URLs for the parent flow instead of the specific step. This allows pre-approvals that can be consumed by any later suspend step in the same flow.
7502
7503
  * @returns unknown url endpoints
7503
7504
  * @throws ApiError
7504
7505
  */
@@ -7512,7 +7513,8 @@ export const getResumeUrls = (data) => {
7512
7513
  resume_id: data.resumeId
7513
7514
  },
7514
7515
  query: {
7515
- approver: data.approver
7516
+ approver: data.approver,
7517
+ flow_level: data.flowLevel
7516
7518
  }
7517
7519
  });
7518
7520
  };
@@ -17,9 +17,41 @@ import { replaceInlineScripts } from "./app.js";
17
17
  import { APP_BACKEND_FOLDER, inferRunnableSchemaFromFile, } from "./app_metadata.js";
18
18
  import { loadRunnablesFromBackend } from "./raw_apps.js";
19
19
  import { regenerateAgentDocs } from "./generate_agents.js";
20
- import { hasFolderSuffix, getFolderSuffix } from "../../utils/resource_folders.js";
20
+ import { getFolderSuffix, hasFolderSuffix, setNonDottedPaths, } from "../../utils/resource_folders.js";
21
21
  const DEFAULT_PORT = 4000;
22
22
  const DEFAULT_HOST = "localhost";
23
+ /**
24
+ * Search for wmill.yaml by traversing upward from the current directory.
25
+ * Unlike the standard findWmillYaml() in conf.ts, this does not stop at
26
+ * the git root - it continues searching until the filesystem root.
27
+ * This is needed for `app dev` which runs from inside a raw_app folder
28
+ * that may be deeply nested within a larger git repository.
29
+ */
30
+ async function findAndLoadNonDottedPathsSetting() {
31
+ let currentDir = process.cwd();
32
+ while (true) {
33
+ const wmillYamlPath = path.join(currentDir, "wmill.yaml");
34
+ if (fs.existsSync(wmillYamlPath)) {
35
+ try {
36
+ const config = await yamlParseFile(wmillYamlPath);
37
+ setNonDottedPaths(config?.nonDottedPaths ?? false);
38
+ log.debug(`Found wmill.yaml at ${wmillYamlPath}, nonDottedPaths=${config?.nonDottedPaths ?? false}`);
39
+ }
40
+ catch (e) {
41
+ log.debug(`Failed to parse wmill.yaml at ${wmillYamlPath}: ${e}`);
42
+ }
43
+ return;
44
+ }
45
+ // Check if we've reached the filesystem root
46
+ const parentDir = path.dirname(currentDir);
47
+ if (parentDir === currentDir) {
48
+ // Reached filesystem root without finding wmill.yaml
49
+ log.debug("No wmill.yaml found, using default dotted paths");
50
+ return;
51
+ }
52
+ currentDir = parentDir;
53
+ }
54
+ }
23
55
  // HTML template with live reload and SQL migration modal
24
56
  const createHTML = (jsPath, cssPath) => `
25
57
  <!DOCTYPE html>
@@ -275,6 +307,9 @@ const createHTML = (jsPath, cssPath) => `
275
307
  `;
276
308
  async function dev(opts) {
277
309
  GLOBAL_CONFIG_OPT.noCdToRoot = true;
310
+ // Search for wmill.yaml by traversing upward (without git root constraint)
311
+ // to initialize nonDottedPaths setting before using folder suffix functions
312
+ await findAndLoadNonDottedPathsSetting();
278
313
  // Validate that we're in a .raw_app folder
279
314
  const cwd = process.cwd();
280
315
  const currentDirName = path.basename(cwd);
@@ -356,9 +391,12 @@ async function dev(opts) {
356
391
  });
357
392
  // Provide the virtual module content
358
393
  build.onLoad({ filter: /.*/, namespace: "wmill-virtual" }, (args) => {
394
+ const contents = wmillTs(port);
359
395
  log.info(colors.yellow(`[wmill-virtual] Loading virtual module: ${args.path}`));
396
+ log.info(colors.gray(`[wmill-virtual] Exports: ${contents.match(/export (const|function) \w+/g)?.join(", ") ??
397
+ "none"}`));
360
398
  return {
361
- contents: wmillTs(port),
399
+ contents,
362
400
  loader: "ts",
363
401
  };
364
402
  });
@@ -745,6 +783,23 @@ async function dev(opts) {
745
783
  }
746
784
  break;
747
785
  }
786
+ case "streamJob": {
787
+ // Stream job results using SSE
788
+ log.info(colors.blue(`[streamJob] Streaming job: ${jobId}`));
789
+ try {
790
+ await streamJobWithSSE(workspaceId, jobId, reqId, ws, workspace.remote, workspace.token);
791
+ }
792
+ catch (error) {
793
+ log.error(colors.red(`[streamJob] Error: ${error.message}`));
794
+ ws.send(JSON.stringify({
795
+ type: "streamJobRes",
796
+ reqId,
797
+ error: true,
798
+ result: { message: error.message, stack: error.stack },
799
+ }));
800
+ }
801
+ break;
802
+ }
748
803
  case "applySqlMigration": {
749
804
  // Execute SQL migration against a datatable
750
805
  const { sql, datatable, fileName } = message;
@@ -992,6 +1047,25 @@ async function genRunnablesTs(schemaOverrides = {}) {
992
1047
  log.error(colors.red(`Failed to generate wmill.d.ts: ${error.message}`));
993
1048
  }
994
1049
  }
1050
+ /**
1051
+ * Convert runnables from file format to API format.
1052
+ * File format uses type: "script"|"hubscript"|"flow" for path-based runnables.
1053
+ * API format uses type: "path" with runType: "script"|"hubscript"|"flow".
1054
+ */
1055
+ function convertRunnablesToApiFormat(runnables) {
1056
+ for (const [runnableId, runnable] of Object.entries(runnables)) {
1057
+ if (runnable?.type === "script" ||
1058
+ runnable?.type === "hubscript" ||
1059
+ runnable?.type === "flow") {
1060
+ // Convert from file format to API format
1061
+ // { type: "script" } -> { type: "path", runType: "script" }
1062
+ const originalType = runnable.type;
1063
+ runnable.runType = originalType;
1064
+ runnable.type = "path";
1065
+ log.debug(`Converted runnable '${runnableId}' from type='${originalType}' to type='path', runType='${originalType}'`);
1066
+ }
1067
+ }
1068
+ }
995
1069
  async function loadRunnables() {
996
1070
  try {
997
1071
  const localPath = process.cwd();
@@ -1003,6 +1077,9 @@ async function loadRunnables() {
1003
1077
  const rawApp = (await yamlParseFile(path.join(localPath, "raw_app.yaml")));
1004
1078
  runnables = rawApp?.runnables ?? {};
1005
1079
  }
1080
+ // Always convert path-based runnables from file format to API format
1081
+ // This handles both backend folder runnables and raw_app.yaml runnables
1082
+ convertRunnablesToApiFormat(runnables);
1006
1083
  replaceInlineScripts(runnables, backendPath + SEP, true);
1007
1084
  return runnables;
1008
1085
  }
@@ -1044,13 +1121,24 @@ async function executeRunnable(runnable, workspace, appPath, runnableId, args) {
1044
1121
  cache_ttl: inlineScript.cache_ttl,
1045
1122
  };
1046
1123
  }
1047
- else if (runnable.type === "path" && runnable.runType && runnable.path) {
1048
- // Path-based runnables have type: "path" and runType: "script"|"hubscript"|"flow"
1124
+ else if ((runnable.type === "path" || runnable.type === "runnableByPath") &&
1125
+ runnable.runType &&
1126
+ runnable.path) {
1127
+ // Path-based runnables have type: "path" (or legacy "runnableByPath") and runType: "script"|"hubscript"|"flow"
1049
1128
  const prefix = runnable.runType;
1050
1129
  requestBody.path = prefix !== "hubscript"
1051
1130
  ? `${prefix}/${runnable.path}`
1052
1131
  : `script/${runnable.path}`;
1053
1132
  }
1133
+ else {
1134
+ // Neither inline script nor valid path-based runnable
1135
+ const debugInfo = `type=${runnable.type}, runType=${runnable.runType}, ` +
1136
+ `path=${runnable.path}, hasInlineScript=${!!runnable
1137
+ .inlineScript}`;
1138
+ log.error(colors.red(`[executeRunnable] Invalid runnable configuration for '${runnableId}': ${debugInfo}`));
1139
+ throw new Error(`Invalid runnable '${runnableId}': ${debugInfo}. ` +
1140
+ `Must have either inlineScript (for inline type) or type="path" with runType and path fields`);
1141
+ }
1054
1142
  const uuid = await wmill.executeComponent({
1055
1143
  workspace,
1056
1144
  path: appPath,
@@ -1108,3 +1196,93 @@ async function getJobStatus(workspace, jobId) {
1108
1196
  id: jobId,
1109
1197
  });
1110
1198
  }
1199
+ async function streamJobWithSSE(workspace, jobId, reqId, ws, baseUrl, token) {
1200
+ const sseUrl = `${baseUrl}api/w/${workspace}/jobs_u/getupdate_sse/${jobId}?fast=true`;
1201
+ const response = await fetch(sseUrl, {
1202
+ headers: {
1203
+ Accept: "text/event-stream",
1204
+ Authorization: `Bearer ${token}`,
1205
+ },
1206
+ });
1207
+ if (!response.ok) {
1208
+ throw new Error(`SSE request failed: ${response.status} ${response.statusText}`);
1209
+ }
1210
+ const reader = response.body?.getReader();
1211
+ if (!reader) {
1212
+ throw new Error("No response body for SSE stream");
1213
+ }
1214
+ const decoder = new TextDecoder();
1215
+ let buffer = "";
1216
+ try {
1217
+ while (true) {
1218
+ const { done, value } = await reader.read();
1219
+ if (done)
1220
+ break;
1221
+ buffer += decoder.decode(value, { stream: true });
1222
+ const lines = buffer.split("\n");
1223
+ buffer = lines.pop() || "";
1224
+ for (const line of lines) {
1225
+ if (line.startsWith("data: ")) {
1226
+ const data = line.slice(6);
1227
+ try {
1228
+ const update = JSON.parse(data);
1229
+ const type = update.type;
1230
+ if (type === "ping" || type === "timeout") {
1231
+ if (type === "timeout") {
1232
+ reader.cancel();
1233
+ return;
1234
+ }
1235
+ continue;
1236
+ }
1237
+ if (type === "error") {
1238
+ ws.send(JSON.stringify({
1239
+ type: "streamJobRes",
1240
+ reqId,
1241
+ error: true,
1242
+ result: { message: update.error || "SSE error" },
1243
+ }));
1244
+ reader.cancel();
1245
+ return;
1246
+ }
1247
+ if (type === "not_found") {
1248
+ ws.send(JSON.stringify({
1249
+ type: "streamJobRes",
1250
+ reqId,
1251
+ error: true,
1252
+ result: { message: "Job not found" },
1253
+ }));
1254
+ reader.cancel();
1255
+ return;
1256
+ }
1257
+ // Send stream update if there's new stream data
1258
+ if (update.new_result_stream !== undefined) {
1259
+ ws.send(JSON.stringify({
1260
+ type: "streamJobUpdate",
1261
+ reqId,
1262
+ new_result_stream: update.new_result_stream,
1263
+ stream_offset: update.stream_offset,
1264
+ }));
1265
+ }
1266
+ // Check if job is completed
1267
+ if (update.completed) {
1268
+ ws.send(JSON.stringify({
1269
+ type: "streamJobRes",
1270
+ reqId,
1271
+ error: false,
1272
+ result: update.only_result,
1273
+ }));
1274
+ reader.cancel();
1275
+ return;
1276
+ }
1277
+ }
1278
+ catch (parseErr) {
1279
+ log.warn(`Failed to parse SSE data: ${parseErr}`);
1280
+ }
1281
+ }
1282
+ }
1283
+ }
1284
+ }
1285
+ finally {
1286
+ reader.releaseLock();
1287
+ }
1288
+ }