@windmill-labs/shared-utils 1.0.3 → 1.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib.es.js CHANGED
@@ -1,20 +1,17 @@
1
- var __create = Object.create, __defProp = Object.defineProperty, __getOwnPropDesc = Object.getOwnPropertyDescriptor, __getOwnPropNames = Object.getOwnPropertyNames, __getProtoOf = Object.getPrototypeOf, __hasOwnProp = Object.prototype.hasOwnProperty, __esmMin = (c, L) => () => (c && (L = c(c = 0)), L), __commonJSMin = (c, L) => () => (L || c((L = { exports: {} }).exports, L), L.exports), __export = (c) => {
2
- let R = {};
3
- for (var z in c) __defProp(R, z, {
4
- get: c[z],
1
+ var __defProp = Object.defineProperty, __getOwnPropDesc = Object.getOwnPropertyDescriptor, __getOwnPropNames = Object.getOwnPropertyNames, __hasOwnProp = Object.prototype.hasOwnProperty, __esmMin = (c, L) => () => (c && (L = c(c = 0)), L), __commonJSMin = (c, L) => () => (L || c((L = { exports: {} }).exports, L), L.exports), __export = (L, R) => {
2
+ let z = {};
3
+ for (var B in L) __defProp(z, B, {
4
+ get: L[B],
5
5
  enumerable: !0
6
6
  });
7
- return R;
8
- }, __copyProps = (c, B, H, U) => {
9
- if (B && typeof B == "object" || typeof B == "function") for (var W = __getOwnPropNames(B), G = 0, K = W.length, q; G < K; G++) q = W[G], !__hasOwnProp.call(c, q) && q !== H && __defProp(c, q, {
10
- get: ((c) => B[c]).bind(null, q),
11
- enumerable: !(U = __getOwnPropDesc(B, q)) || U.enumerable
7
+ return R && __defProp(z, Symbol.toStringTag, { value: "Module" }), z;
8
+ }, __copyProps = (B, V, H, U) => {
9
+ if (V && typeof V == "object" || typeof V == "function") for (var W = __getOwnPropNames(V), G = 0, K = W.length, q; G < K; G++) q = W[G], !__hasOwnProp.call(B, q) && q !== H && __defProp(B, q, {
10
+ get: ((c) => V[c]).bind(null, q),
11
+ enumerable: !(U = __getOwnPropDesc(V, q)) || U.enumerable
12
12
  });
13
- return c;
14
- }, __toESM = (R, z, V) => (V = R == null ? {} : __create(__getProtoOf(R)), __copyProps(z || !R || !R.__esModule ? __defProp(V, "default", {
15
- value: R,
16
- enumerable: !0
17
- }) : V, R)), __toCommonJS = (c) => __copyProps(__defProp({}, "__esModule", { value: !0 }), c), tslib_es6_exports = /* @__PURE__ */ __export({
13
+ return B;
14
+ }, __toCommonJS = (L) => __hasOwnProp.call(L, "module.exports") ? L["module.exports"] : __copyProps(__defProp({}, "__esModule", { value: !0 }), L), tslib_es6_exports = /* @__PURE__ */ __export({
18
15
  __assign: () => __assign,
19
16
  __asyncDelegator: () => __asyncDelegator,
20
17
  __asyncGenerator: () => __asyncGenerator,
@@ -38,7 +35,7 @@ var __create = Object.create, __defProp = Object.defineProperty, __getOwnPropDes
38
35
  __spread: () => __spread,
39
36
  __spreadArrays: () => __spreadArrays,
40
37
  __values: () => __values
41
- });
38
+ }, 1);
42
39
  function __extends(c, L) {
43
40
  extendStatics(c, L);
44
41
  function R() {
@@ -323,7 +320,7 @@ var extendStatics, __assign, init_tslib_es6 = __esmMin((() => {
323
320
  }, __assign.apply(this, arguments);
324
321
  };
325
322
  })), require_constants = /* @__PURE__ */ __commonJSMin(((c) => {
326
- Object.defineProperty(c, "__esModule", { value: !0 }), c.BLOCK_SIZE = 64, c.DIGEST_LENGTH = 32, c.KEY = new Uint32Array([
323
+ Object.defineProperty(c, "__esModule", { value: !0 }), c.MAX_HASHABLE_LENGTH = c.INIT = c.KEY = c.DIGEST_LENGTH = c.BLOCK_SIZE = void 0, c.BLOCK_SIZE = 64, c.DIGEST_LENGTH = 32, c.KEY = new Uint32Array([
327
324
  1116352408,
328
325
  1899447441,
329
326
  3049323471,
@@ -399,7 +396,7 @@ var extendStatics, __assign, init_tslib_es6 = __esmMin((() => {
399
396
  1541459225
400
397
  ], c.MAX_HASHABLE_LENGTH = 2 ** 53 - 1;
401
398
  })), require_RawSha256 = /* @__PURE__ */ __commonJSMin(((c) => {
402
- Object.defineProperty(c, "__esModule", { value: !0 });
399
+ Object.defineProperty(c, "__esModule", { value: !0 }), c.RawSha256 = void 0;
403
400
  var L = require_constants();
404
401
  c.RawSha256 = function() {
405
402
  function c() {
@@ -480,10 +477,10 @@ function toUtf8$1(c) {
480
477
  var init_whatwgEncodingApi = __esmMin((() => {})), dist_es_exports = /* @__PURE__ */ __export({
481
478
  fromUtf8: () => fromUtf8$3,
482
479
  toUtf8: () => toUtf8$2
483
- }), fromUtf8$3, toUtf8$2, init_dist_es = __esmMin((() => {
480
+ }, 1), fromUtf8$3, toUtf8$2, init_dist_es = __esmMin((() => {
484
481
  init_pureJs(), init_whatwgEncodingApi(), fromUtf8$3 = (c) => typeof TextEncoder == "function" ? fromUtf8$2(c) : fromUtf8$1(c), toUtf8$2 = (c) => typeof TextDecoder == "function" ? toUtf8$1(c) : toUtf8(c);
485
482
  })), require_convertToBuffer = /* @__PURE__ */ __commonJSMin(((c) => {
486
- Object.defineProperty(c, "__esModule", { value: !0 });
483
+ Object.defineProperty(c, "__esModule", { value: !0 }), c.convertToBuffer = void 0;
487
484
  var L = (init_dist_es(), __toCommonJS(dist_es_exports)), R = typeof Buffer < "u" && Buffer.from ? function(c) {
488
485
  return Buffer.from(c, "utf8");
489
486
  } : L.fromUtf8;
@@ -492,13 +489,13 @@ var init_whatwgEncodingApi = __esmMin((() => {})), dist_es_exports = /* @__PURE_
492
489
  }
493
490
  c.convertToBuffer = z;
494
491
  })), require_isEmptyData = /* @__PURE__ */ __commonJSMin(((c) => {
495
- Object.defineProperty(c, "__esModule", { value: !0 });
492
+ Object.defineProperty(c, "__esModule", { value: !0 }), c.isEmptyData = void 0;
496
493
  function L(c) {
497
494
  return typeof c == "string" ? c.length === 0 : c.byteLength === 0;
498
495
  }
499
496
  c.isEmptyData = L;
500
497
  })), require_numToUint8 = /* @__PURE__ */ __commonJSMin(((c) => {
501
- Object.defineProperty(c, "__esModule", { value: !0 });
498
+ Object.defineProperty(c, "__esModule", { value: !0 }), c.numToUint8 = void 0;
502
499
  function L(c) {
503
500
  return new Uint8Array([
504
501
  (c & 4278190080) >> 24,
@@ -509,7 +506,7 @@ var init_whatwgEncodingApi = __esmMin((() => {})), dist_es_exports = /* @__PURE_
509
506
  }
510
507
  c.numToUint8 = L;
511
508
  })), require_uint32ArrayFrom = /* @__PURE__ */ __commonJSMin(((c) => {
512
- Object.defineProperty(c, "__esModule", { value: !0 });
509
+ Object.defineProperty(c, "__esModule", { value: !0 }), c.uint32ArrayFrom = void 0;
513
510
  function L(c) {
514
511
  if (!Uint32Array.from) {
515
512
  for (var L = new Uint32Array(c.length), R = 0; R < c.length;) L[R] = c[R], R += 1;
@@ -549,7 +546,7 @@ var init_whatwgEncodingApi = __esmMin((() => {})), dist_es_exports = /* @__PURE_
549
546
  }
550
547
  });
551
548
  })), require_jsSha256 = /* @__PURE__ */ __commonJSMin(((c) => {
552
- Object.defineProperty(c, "__esModule", { value: !0 });
549
+ Object.defineProperty(c, "__esModule", { value: !0 }), c.Sha256 = void 0;
553
550
  var L = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)), R = require_constants(), z = require_RawSha256(), B = require_build$1();
554
551
  c.Sha256 = function() {
555
552
  function c(c) {
@@ -592,8 +589,7 @@ var init_whatwgEncodingApi = __esmMin((() => {})), dist_es_exports = /* @__PURE_
592
589
  }
593
590
  })), require_build = /* @__PURE__ */ __commonJSMin(((c) => {
594
591
  Object.defineProperty(c, "__esModule", { value: !0 }), (init_tslib_es6(), __toCommonJS(tslib_es6_exports)).__exportStar(require_jsSha256(), c);
595
- }));
596
- require_build();
592
+ })), import_build$1 = require_build();
597
593
  function wrapDucklakeQuery(c, L) {
598
594
  let R = `ATTACH 'ducklake://${L}' AS dl;USE dl;\n`;
599
595
  return c.replace(/^(--.*\n)*/, (c) => c + R);
@@ -723,25 +719,129 @@ function getIntrospectionQuery(c) {
723
719
  let ColumnIdentity = /* @__PURE__ */ function(c) {
724
720
  return c.ByDefault = "By Default", c.Always = "Always", c.No = "No", c;
725
721
  }({});
726
- getIntrospectionQuery();
727
- function buildVisibleFieldList(c, L) {
728
- return c.filter((c) => c && c.ignored !== !0).map((c) => {
729
- switch (L) {
730
- case "postgresql": return `"${c?.field}"`;
731
- case "ms_sql_server": return `[${c?.field}]`;
732
- case "mysql": return `\`${c?.field}\``;
733
- case "snowflake": return `"${c?.field}"`;
734
- case "bigquery": return `\`${c?.field}\``;
735
- case "duckdb": return `"${c?.field}"`;
736
- default: throw Error("Unsupported database type: " + L);
722
+ var legacyScripts = {
723
+ postgresql: {
724
+ code: "SELECT table_name, column_name, udt_name, column_default, is_nullable, table_schema FROM information_schema.columns WHERE table_schema != 'pg_catalog' AND table_schema != 'information_schema'",
725
+ processingFn: (c) => {
726
+ let L = c.reduce((c, L) => {
727
+ let R = L.table_schema;
728
+ return delete L.table_schema, c[R] = c[R] || [], (L.table_name || L.column_name) && c[R].push(L), c;
729
+ }, {}), R = {};
730
+ for (let c in L) R[c] = L[c].reduce((c, L) => {
731
+ let R = L.table_name;
732
+ delete L.table_name, c[R] = c[R] || {};
733
+ let z = {
734
+ type: L.udt_name,
735
+ required: L.is_nullable === "NO"
736
+ };
737
+ return L.column_default && (z.default = L.column_default), c[R][L.column_name] = z, c;
738
+ }, {});
739
+ return R;
740
+ },
741
+ lang: "postgresql",
742
+ argName: "database"
743
+ },
744
+ mysql: {
745
+ code: "SELECT DATABASE() AS default_db_name, TABLE_SCHEMA, TABLE_NAME, DATA_TYPE, COLUMN_NAME, COLUMN_DEFAULT FROM information_schema.columns WHERE table_schema = DATABASE() OR table_schema NOT IN ('information_schema', 'mysql', 'performance_schema', 'sys', '_vt');",
746
+ processingFn: (c) => {
747
+ let L = c.reduce((c, L) => {
748
+ let R = L.TABLE_SCHEMA;
749
+ return delete L.TABLE_SCHEMA, c[R] = c[R] || [], c[R].push(L), c;
750
+ }, {}), R = {};
751
+ for (let c in L) R[c] = L[c].reduce((c, L) => {
752
+ let R = L.TABLE_NAME;
753
+ delete L.TABLE_NAME, c[R] = c[R] || {};
754
+ let z = {
755
+ type: L.DATA_TYPE,
756
+ required: L.is_nullable === "NO"
757
+ };
758
+ return L.column_default && (z.default = L.COLUMN_DEFAULT), c[R][L.COLUMN_NAME] = z, c;
759
+ }, {});
760
+ return R;
761
+ },
762
+ lang: "mysql",
763
+ argName: "database"
764
+ },
765
+ graphql: {
766
+ code: getIntrospectionQuery(),
767
+ lang: "graphql",
768
+ argName: "api"
769
+ },
770
+ bigquery: {
771
+ code: "import { BigQuery } from '@google-cloud/bigquery@7.5.0';\nexport async function main(args: bigquery) {\nconst bq = new BigQuery({\n credentials: args\n})\nconst [datasets] = await bq.getDatasets();\nconst schema = {}\nfor (const dataset of datasets) {\n schema[dataset.id] = {}\n const query = \"SELECT table_name, ARRAY_AGG(STRUCT(if(is_nullable = 'YES', true, false) AS required, column_name AS name, data_type AS type, if(column_default = 'NULL', null, column_default) AS `default`) ORDER BY ordinal_position) AS schema FROM `{dataset.id}`.INFORMATION_SCHEMA.COLUMNS GROUP BY table_name\".replace('{dataset.id}', dataset.id)\n const [rows] = await bq.query(query)\n for (const row of rows) {\n schema[dataset.id][row.table_name] = {}\n for (const col of row.schema) {\n const colName = col.name\n delete col.name\n if (col.default === null) {\n delete col.default\n }\n schema[dataset.id][row.table_name][colName] = col\n }\n }\n}\nreturn schema\n}",
772
+ lang: "bun",
773
+ argName: "args"
774
+ },
775
+ snowflake: {
776
+ code: "select TABLE_SCHEMA, TABLE_NAME, DATA_TYPE, COLUMN_NAME, COLUMN_DEFAULT, IS_NULLABLE from information_schema.columns where table_schema != 'INFORMATION_SCHEMA'",
777
+ lang: "snowflake",
778
+ processingFn: (c) => {
779
+ let L = {};
780
+ for (let R of c) R.TABLE_SCHEMA in L || (L[R.TABLE_SCHEMA] = {}), R.TABLE_NAME in L[R.TABLE_SCHEMA] || (L[R.TABLE_SCHEMA][R.TABLE_NAME] = {}), L[R.TABLE_SCHEMA][R.TABLE_NAME][R.COLUMN_NAME] = {
781
+ type: R.DATA_TYPE,
782
+ required: R.IS_NULLABLE === "YES"
783
+ }, R.COLUMN_DEFAULT !== null && (L[R.TABLE_SCHEMA][R.TABLE_NAME][R.COLUMN_NAME].default = R.COLUMN_DEFAULT);
784
+ return L;
785
+ },
786
+ argName: "database"
787
+ },
788
+ snowflake_oauth: {
789
+ code: "select TABLE_SCHEMA, TABLE_NAME, DATA_TYPE, COLUMN_NAME, COLUMN_DEFAULT, IS_NULLABLE from information_schema.columns where table_schema != 'INFORMATION_SCHEMA'",
790
+ lang: "snowflake",
791
+ processingFn: (c) => {
792
+ let L = {};
793
+ for (let R of c) R.TABLE_SCHEMA in L || (L[R.TABLE_SCHEMA] = {}), R.TABLE_NAME in L[R.TABLE_SCHEMA] || (L[R.TABLE_SCHEMA][R.TABLE_NAME] = {}), L[R.TABLE_SCHEMA][R.TABLE_NAME][R.COLUMN_NAME] = {
794
+ type: R.DATA_TYPE,
795
+ required: R.IS_NULLABLE === "YES"
796
+ }, R.COLUMN_DEFAULT !== null && (L[R.TABLE_SCHEMA][R.TABLE_NAME][R.COLUMN_NAME].default = R.COLUMN_DEFAULT);
797
+ return L;
798
+ },
799
+ argName: "database"
800
+ },
801
+ mssql: {
802
+ argName: "database",
803
+ code: "select TABLE_SCHEMA, TABLE_NAME, DATA_TYPE, COLUMN_NAME, COLUMN_DEFAULT from information_schema.columns where table_schema != 'sys'",
804
+ lang: "mssql",
805
+ processingFn: (c) => {
806
+ if (!c || c.length === 0) return {};
807
+ let L = c.reduce((c, L) => {
808
+ let R = L.TABLE_SCHEMA;
809
+ return delete L.TABLE_SCHEMA, c[R] = c[R] || [], c[R].push(L), c;
810
+ }, {}), R = {};
811
+ for (let c in L) R[c] = L[c].reduce((c, L) => {
812
+ let R = L.TABLE_NAME;
813
+ delete L.TABLE_NAME, c[R] = c[R] || {};
814
+ let z = {
815
+ type: L.DATA_TYPE,
816
+ required: L.is_nullable === "NO"
817
+ };
818
+ return L.column_default && (z.default = L.COLUMN_DEFAULT), c[R][L.COLUMN_NAME] = z, c;
819
+ }, {});
820
+ return R;
737
821
  }
738
- });
822
+ }
823
+ };
824
+ ({ ...legacyScripts }), { ...legacyScripts.postgresql };
825
+ function buildVisibleFieldList(c, L) {
826
+ return c.filter((c) => c && c.ignored !== !0).map((c) => renderDbQuotedIdentifier(c?.field, L));
827
+ }
828
+ function renderDbQuotedIdentifier(c, L) {
829
+ switch (L) {
830
+ case "postgresql": return `"${c}"`;
831
+ case "ms_sql_server": return `[${c}]`;
832
+ case "mysql": return `\`${c}\``;
833
+ case "snowflake": return `"${c}"`;
834
+ case "bigquery": return `\`${c}\``;
835
+ case "duckdb": return `"${c}"`;
836
+ default: throw Error("Unsupported database type: " + L);
837
+ }
739
838
  }
740
839
  function getLanguageByResourceType(c) {
741
840
  return {
742
841
  postgresql: "postgresql",
743
842
  mysql: "mysql",
744
843
  ms_sql_server: "mssql",
844
+ mssql: "mssql",
745
845
  snowflake: "snowflake",
746
846
  snowflake_oauth: "snowflake",
747
847
  bigquery: "bigquery",
@@ -816,7 +916,7 @@ function getCountInput(c, L, R, z) {
816
916
  return c.type === "ducklake" && (V = wrapDucklakeQuery(V, c.ducklake)), {
817
917
  runnable: {
818
918
  name: "AppDbExplorer",
819
- type: "runnableByName",
919
+ type: "inline",
820
920
  inlineScript: {
821
921
  content: V,
822
922
  language: getLanguageByResourceType(B),
@@ -878,7 +978,7 @@ function getDeleteInput(c, L, R) {
878
978
  return c.type === "ducklake" && (B = wrapDucklakeQuery(B, c.ducklake)), {
879
979
  runnable: {
880
980
  name: "AppDbExplorer",
881
- type: "runnableByName",
981
+ type: "inline",
882
982
  inlineScript: {
883
983
  content: B,
884
984
  language: getLanguageByResourceType(z),
@@ -937,15 +1037,15 @@ function makeInsertQuery(c, L, R) {
937
1037
  if (!c) throw Error("Table name is required");
938
1038
  let z = L.filter((c) => !c.hideInsert && !(R == "postgresql" && c.defaultvalue?.startsWith("nextval("))), B = L.filter((c) => !shouldOmitColumnInInsert(c)), V = z.concat(B), H = buildParameters(z, R);
939
1039
  H += "\n";
940
- let U = B.length > 0, W = formatColumnNames(V), G = formatInsertValues(z, R), K = formatDefaultValues(B);
941
- return H += `INSERT INTO ${c} (${W}) VALUES (${G}${U ? ", " : ""}${K})`, H;
1040
+ let U = B.length > 0, W = formatColumnNames(V), G = formatInsertValues(z, R), K = formatDefaultValues(B), q = `${G}${U ? ", " : ""}${K}`;
1041
+ return q.trim() ? (H += `INSERT INTO ${c} (${W}) VALUES (${q})`, H) : `INSERT INTO ${c} DEFAULT VALUES`;
942
1042
  }
943
1043
  function getInsertInput(c, L, R) {
944
1044
  let z = c.type === "ducklake" ? "duckdb" : c.resourceType, B = makeInsertQuery(L, R, z);
945
1045
  return c.type === "ducklake" && (B = wrapDucklakeQuery(B, c.ducklake)), {
946
1046
  runnable: {
947
1047
  name: "AppDbExplorer",
948
- type: "runnableByName",
1048
+ type: "inline",
949
1049
  inlineScript: {
950
1050
  content: B,
951
1051
  language: getLanguageByResourceType(z),
@@ -996,9 +1096,9 @@ function makeSnowflakeSelectQuery(c, L, R, z) {
996
1096
  CASE WHEN ? = '${c.field}' AND ? = TRUE THEN "${c.field}" END DESC`));
997
1097
  return U += ` ORDER BY ${q.join(",\n")}`, U += ` LIMIT ${B} OFFSET ${V}`, U = buildParameters(H, "snowflake") + "\n" + U, U;
998
1098
  }
999
- function makeSelectQuery(c, L, R, z, B) {
1099
+ function makeSelectQuery(c, L, R, z, B, V) {
1000
1100
  if (!c) throw Error("Table name is required");
1001
- let V = "", H = buildParameters([
1101
+ let H = "", U = buildParameters([
1002
1102
  {
1003
1103
  field: "limit",
1004
1104
  datatype: z === "bigquery" ? "integer" : "int"
@@ -1020,30 +1120,73 @@ function makeSelectQuery(c, L, R, z, B) {
1020
1120
  datatype: z === "bigquery" ? "bool" : "boolean"
1021
1121
  }
1022
1122
  ], z);
1023
- H += "\n";
1024
- let U = buildVisibleFieldList(L, z), W = U.join(", ");
1123
+ U += "\n";
1124
+ let W = buildVisibleFieldList(L, z), G = W.join(", ");
1025
1125
  switch (z) {
1026
1126
  case "mysql": {
1027
1127
  let z = L.map((c) => `
1028
1128
  CASE WHEN :order_by = '${c.field}' AND :is_desc IS false THEN \`${c.field}\` END,
1029
1129
  CASE WHEN :order_by = '${c.field}' AND :is_desc IS true THEN \`${c.field}\` END DESC`).join(",\n");
1030
- V = ` (:quicksearch = '' OR CONCAT_WS(' ', ${U.join(", ")}) LIKE CONCAT('%', :quicksearch, '%'))`, H += `SELECT ${W} FROM ${c}`, H += ` WHERE ${R ? `${R} AND` : ""} ${V}`, H += ` ORDER BY ${z}`, H += " LIMIT :limit OFFSET :offset";
1130
+ H = ` (:quicksearch = '' OR CONCAT_WS(' ', ${W.join(", ")}) LIKE CONCAT('%', :quicksearch, '%'))`, U += `SELECT ${G} FROM ${c}`, U += ` WHERE ${R ? `${R} AND` : ""} ${H}`, U += ` ORDER BY ${z}`, U += " LIMIT :limit OFFSET :offset";
1031
1131
  break;
1032
1132
  }
1033
1133
  case "postgresql": {
1034
- let z = `
1035
- ${L.map((c) => `
1036
- (CASE WHEN $4 = '${c.field}' AND $5 IS false THEN "${c.field}"::text END),
1037
- (CASE WHEN $4 = '${c.field}' AND $5 IS true THEN "${c.field}"::text END) DESC`).join(",\n")}`;
1038
- V = `($3 = '' OR CONCAT(${U.join(", ")}) ILIKE '%' || $3 || '%')`, H += `SELECT ${U.map((c) => `${c}::text`).join(", ")} FROM ${c}\n`, H += ` WHERE ${R ? `${R} AND` : ""} ${V}\n`, H += ` ORDER BY ${z}\n`, H += " LIMIT $1::INT OFFSET $2::INT";
1134
+ function z({ field: c, is_desc: L = !1, text_cast: R = !1, check_is_number: z }) {
1135
+ return `(CASE WHEN${z === !0 ? ` pg_typeof("${c}")::text IN ('integer', 'bigint', 'smallint', 'numeric', 'real', 'double precision') AND` : z === !1 ? ` pg_typeof("${c}")::text NOT IN ('integer', 'bigint', 'smallint', 'numeric', 'real', 'double precision') AND` : ""} $4 = '${c}' AND $5 IS ${L} THEN "${c}"${R ? "::text" : ""} END)${L ? " DESC" : ""}`;
1136
+ }
1137
+ let B = `
1138
+ ${L.map((c) => V?.fixPgIntTypes ? `
1139
+ ${z({
1140
+ field: c.field,
1141
+ is_desc: !1,
1142
+ text_cast: !0,
1143
+ check_is_number: !1
1144
+ })},
1145
+ ${z({
1146
+ field: c.field,
1147
+ is_desc: !1,
1148
+ text_cast: !1,
1149
+ check_is_number: !0
1150
+ })},
1151
+ ${z({
1152
+ field: c.field,
1153
+ is_desc: !0,
1154
+ text_cast: !0,
1155
+ check_is_number: !1
1156
+ })},
1157
+ ${z({
1158
+ field: c.field,
1159
+ is_desc: !0,
1160
+ text_cast: !1,
1161
+ check_is_number: !0
1162
+ })}` : `
1163
+ ${z({
1164
+ field: c.field,
1165
+ is_desc: !1,
1166
+ text_cast: !0
1167
+ })},
1168
+ ${z({
1169
+ field: c.field,
1170
+ is_desc: !0,
1171
+ text_cast: !0
1172
+ })}`).join(",\n")}`;
1173
+ H = `($3 = '' OR CONCAT(${W.join(", ")}) ILIKE '%' || $3 || '%')`, U += `SELECT ${W.map((c) => `${c}::text`).join(", ")} FROM ${c}\n`, U += ` WHERE ${R ? `${R} AND` : ""} ${H}\n`, U += ` ORDER BY ${B}\n`, U += " LIMIT $1::INT OFFSET $2::INT";
1039
1174
  break;
1040
1175
  }
1041
- case "ms_sql_server":
1042
- let z = L.map((c) => `
1176
+ case "ms_sql_server": {
1177
+ let z = [
1178
+ "text",
1179
+ "ntext",
1180
+ "image"
1181
+ ], B = L.filter((c) => !z.includes(c.datatype.toLowerCase())).map((c) => `
1043
1182
  (CASE WHEN @p4 = '${c.field}' AND @p5 = 0 THEN ${c.field} END) ASC,
1044
- (CASE WHEN @p4 = '${c.field}' AND @p5 = 1 THEN ${c.field} END) DESC`).join(",\n");
1045
- V = ` (@p3 = '' OR CONCAT(${W}) LIKE '%' + @p3 + '%')`, H += `SELECT ${W} FROM ${c}`, H += ` WHERE ${R ? `${R} AND` : ""} ${V}`, H += ` ORDER BY ${z}`, H += " OFFSET @p2 ROWS FETCH NEXT @p1 ROWS ONLY";
1183
+ (CASE WHEN @p4 = '${c.field}' AND @p5 = 1 THEN ${c.field} END) DESC`).join(",\n"), V = W.filter((c) => {
1184
+ let R = c.slice(1, -1), B = L.find((c) => c.field === R);
1185
+ return !z.includes(B?.datatype.toLowerCase() ?? "");
1186
+ }).join(", ");
1187
+ H = V ? ` (@p3 = '' OR CONCAT(${V}) LIKE '%' + @p3 + '%')` : " (@p3 = '')", U += `SELECT ${G} FROM ${c}`, U += ` WHERE ${R ? `${R} AND` : ""} ${H}`, U += ` ORDER BY ${B}`, U += " OFFSET @p2 ROWS FETCH NEXT @p1 ROWS ONLY";
1046
1188
  break;
1189
+ }
1047
1190
  case "snowflake": return makeSnowflakeSelectQuery(c, L, R, B);
1048
1191
  case "bigquery": {
1049
1192
  let z = L.map((c) => c.datatype === "JSON" || c.datatype.startsWith("STRUCT") || c.datatype.startsWith("ARRAY") || c.datatype === "GEOGRAPHY" ? `
@@ -1051,10 +1194,10 @@ CASE WHEN :order_by = '${c.field}' AND :is_desc IS true THEN \`${c.field}\` END
1051
1194
  (CASE WHEN @order_by = '${c.field}' AND @is_desc = true THEN TO_JSON_STRING(${c.field}) END) DESC` : `
1052
1195
  (CASE WHEN @order_by = '${c.field}' AND @is_desc = false THEN ${c.field} END) ASC,
1053
1196
  (CASE WHEN @order_by = '${c.field}' AND @is_desc = true THEN ${c.field} END) DESC`).join(",\n");
1054
- V = ` (@quicksearch = '' OR REGEXP_CONTAINS(CONCAT(${U.map((c) => {
1197
+ H = ` (@quicksearch = '' OR REGEXP_CONTAINS(CONCAT(${W.map((c) => {
1055
1198
  let R = L.find((L) => L.field === c.slice(1, -1));
1056
1199
  return R?.datatype === "JSON" || R?.datatype.startsWith("STRUCT") || R?.datatype.startsWith("ARRAY") || R?.datatype === "GEOGRAPHY" ? `TO_JSON_STRING(${c})` : `CAST(${c} AS STRING)`;
1057
- }).join(",")}), '(?i)' || @quicksearch))`, H += `SELECT ${W} FROM ${c}`, H += ` WHERE ${R ? `${R} AND` : ""} ${V}`, H += ` ORDER BY ${z}`, H += " LIMIT @limit OFFSET @offset";
1200
+ }).join(",")}), '(?i)' || @quicksearch))`, U += `SELECT ${G} FROM ${c}`, U += ` WHERE ${R ? `${R} AND` : ""} ${H}`, U += ` ORDER BY ${z}`, U += " LIMIT @limit OFFSET @offset";
1058
1201
  break;
1059
1202
  }
1060
1203
  case "duckdb": {
@@ -1062,12 +1205,12 @@ CASE WHEN :order_by = '${c.field}' AND :is_desc IS true THEN \`${c.field}\` END
1062
1205
  ${L.map((c) => `
1063
1206
  (CASE WHEN $order_by = '${c.field}' AND $is_desc IS false THEN "${c.field}"::text END),
1064
1207
  (CASE WHEN $order_by = '${c.field}' AND $is_desc IS true THEN "${c.field}"::text END) DESC`).join(",\n")}`;
1065
- V = `($quicksearch = '' OR CONCAT(${U.join(", ")}) ILIKE '%' || $quicksearch || '%')`, H += `SELECT ${U.join(", ")} FROM ${c}\n`, H += ` WHERE ${R ? `${R} AND` : ""} ${V}\n`, H += ` ORDER BY ${z}\n`, H += " LIMIT $limit::INT OFFSET $offset::INT";
1208
+ H = `($quicksearch = '' OR CONCAT(${W.join(", ")}) ILIKE '%' || $quicksearch || '%')`, U += `SELECT ${W.join(", ")} FROM ${c}\n`, U += ` WHERE ${R ? `${R} AND` : ""} ${H}\n`, U += ` ORDER BY ${z}\n`, U += " LIMIT $limit::INT OFFSET $offset::INT";
1066
1209
  break;
1067
1210
  }
1068
1211
  default: throw Error("Unsupported database type");
1069
1212
  }
1070
- return H;
1213
+ return U;
1071
1214
  }
1072
1215
  function coerceToNumber(c) {
1073
1216
  return typeof c == "number" ? c : typeof c == "string" ? parseInt(c, 10) : 0;
@@ -1078,7 +1221,7 @@ function getSelectInput(c, L, R, z, B) {
1078
1221
  return c.type === "ducklake" && (H = wrapDucklakeQuery(H, c.ducklake)), {
1079
1222
  runnable: {
1080
1223
  name: "AppDbExplorer",
1081
- type: "runnableByName",
1224
+ type: "inline",
1082
1225
  inlineScript: {
1083
1226
  content: H,
1084
1227
  language: getLanguageByResourceType(V)
@@ -1134,7 +1277,7 @@ function getUpdateInput(c, L, R, z) {
1134
1277
  return c.type === "ducklake" && (V = wrapDucklakeQuery(V, c.ducklake)), {
1135
1278
  runnable: {
1136
1279
  name: "AppDbExplorer",
1137
- type: "runnableByName",
1280
+ type: "inline",
1138
1281
  inlineScript: {
1139
1282
  content: V,
1140
1283
  language: getLanguageByResourceType(B),
@@ -1156,6 +1299,12 @@ function getUpdateInput(c, L, R, z) {
1156
1299
  fieldType: "object"
1157
1300
  };
1158
1301
  }
1302
+ function isRunnableByPath(c) {
1303
+ return c?.type == "runnableByPath" || c?.type == "path";
1304
+ }
1305
+ function isRunnableByName(c) {
1306
+ return c?.type == "runnableByName" || c?.type == "inline";
1307
+ }
1159
1308
  function findGridItemById(c, L, R) {
1160
1309
  for (let z of allItems(c, L)) if (z.id === R) return z;
1161
1310
  }
@@ -1219,7 +1368,7 @@ function computeS3FileInputPolicy(c, L) {
1219
1368
  function isPartialS3Object(c) {
1220
1369
  return typeof c == "object" && !!c && typeof c.s3 == "string";
1221
1370
  }
1222
- function computeS3ImageViewerPolicy(c) {
1371
+ function computeS3FileViewerPolicy(c) {
1223
1372
  if (c.source.type === "uploadS3" && isPartialS3Object(c.source.value)) return {
1224
1373
  s3_path: c.source.value.s3,
1225
1374
  storage: c.source.value.storage
@@ -1232,7 +1381,16 @@ function computeS3ImageViewerPolicy(c) {
1232
1381
  function collectStaticFields(c) {
1233
1382
  return Object.fromEntries(Object.entries(c ?? {}).filter(([c, L]) => L.type == "static").map(([c, L]) => [c, L.value]));
1234
1383
  }
1235
- var import_build = /* @__PURE__ */ __toESM(require_build(), 1);
1384
+ async function hash(c) {
1385
+ try {
1386
+ let L = new TextEncoder().encode(c), R = await crypto.subtle.digest("SHA-256", L);
1387
+ return Array.from(new Uint8Array(R)).map((c) => c.toString(16).padStart(2, "0")).join("");
1388
+ } catch {
1389
+ let L = new import_build$1.Sha256();
1390
+ return L.update(c ?? ""), Array.from(await L.digest()).map((c) => c.toString(16).padStart(2, "0")).join("");
1391
+ }
1392
+ }
1393
+ var import_build = require_build();
1236
1394
  async function updatePolicy(c, L) {
1237
1395
  let R = allItems(c.grid, c.subgrids), z = await Promise.all(R.flatMap((L) => {
1238
1396
  let R = L.data, z = [{
@@ -1286,22 +1444,22 @@ async function updatePolicy(c, L) {
1286
1444
  z.push(...c);
1287
1445
  }
1288
1446
  return z.filter((c) => c.input).map(async (L) => {
1289
- if (L.input?.type == "runnable") return await processRunnable(L.id, L.input.runnable, L.input.fields, c);
1447
+ if (L.input?.type == "runnable") return await processRunnable$1(L.id, L.input.runnable, L.input.fields, c);
1290
1448
  });
1291
- }).concat(Object.values(c.hiddenInlineScripts ?? {}).map(async (L, R) => await processRunnable("bg_" + R, L, L.fields, c)))), B = Object.fromEntries(z.filter(Boolean)), V = R.filter((c) => c.data.type === "s3fileinputcomponent").map((L) => {
1449
+ }).concat(Object.values(c.hiddenInlineScripts ?? {}).map(async (L, R) => await processRunnable$1("bg_" + R, L, L.fields, c)))), B = Object.fromEntries(z.filter(Boolean)), V = R.filter((c) => c.data.type === "s3fileinputcomponent").map((L) => {
1292
1450
  let R = L.data.configuration;
1293
1451
  return computeS3FileInputPolicy(R?.type?.configuration?.s3, c);
1294
1452
  }).filter(Boolean);
1295
1453
  R.findIndex((c) => {
1296
1454
  let L = c.data;
1297
1455
  if (L.type === "schemaformcomponent" || L.type === "formbuttoncomponent" || L.type === "formcomponent") {
1298
- let c = L.type === "schemaformcomponent" ? L.componentInput?.value?.properties : L.componentInput?.runnable?.type === "runnableByName" ? L.componentInput?.runnable?.inlineScript?.schema?.properties : L.componentInput?.runnable?.schema?.properties;
1456
+ let c = L.type === "schemaformcomponent" ? L.componentInput?.value?.properties : isRunnableByName(L.componentInput?.runnable) ? L.componentInput?.runnable?.inlineScript?.schema?.properties : L.componentInput?.runnable?.schema?.properties;
1299
1457
  return Object.values(c ?? {}).findIndex((c) => c?.type === "object" && c?.format === "resource-s3_object" || c?.type === "array" && (c?.items?.resourceType === "s3object" || c?.items?.resourceType === "s3_object")) !== -1;
1300
1458
  } else return !1;
1301
1459
  }) !== -1 && V.push(computeWorkspaceS3FileInputPolicy());
1302
- let H = R.filter((c) => c.data.type === "imagecomponent").map((c) => {
1460
+ let H = R.filter((c) => c.data.type === "imagecomponent" || c.data.type === "pdfcomponent" || c.data.type === "downloadcomponent").map((c) => {
1303
1461
  let L = c.data.configuration;
1304
- return computeS3ImageViewerPolicy(L);
1462
+ return computeS3FileViewerPolicy(L);
1305
1463
  }).filter(Boolean);
1306
1464
  return {
1307
1465
  ...L ?? {},
@@ -1310,22 +1468,22 @@ async function updatePolicy(c, L) {
1310
1468
  triggerables_v2: B
1311
1469
  };
1312
1470
  }
1313
- async function processRunnable(c, L, R, z) {
1471
+ async function processRunnable$1(c, L, R, z) {
1314
1472
  let B = collectStaticFields(R), V = collectOneOfFields(R, z), H = Object.entries(R).map(([c, L]) => L.allowUserResources ? c : void 0).filter(Boolean);
1315
- if (L?.type == "runnableByName") {
1316
- let R = await hash(L.inlineScript?.content);
1473
+ if (isRunnableByName(L)) {
1474
+ let R = await hash$1(L.inlineScript?.content);
1317
1475
  return console.debug("hex", R, c), [`${c}:rawscript/${R}`, {
1318
1476
  static_inputs: B,
1319
1477
  one_of_inputs: V,
1320
1478
  allow_user_resources: H
1321
1479
  }];
1322
- } else if (L?.type == "runnableByPath") return [`${c}:${L.runType === "hubscript" ? "script" : L.runType}/${L.path}`, {
1480
+ } else if (isRunnableByPath(L)) return [`${c}:${L.runType === "hubscript" ? "script" : L.runType}/${L.path}`, {
1323
1481
  static_inputs: B,
1324
1482
  one_of_inputs: V,
1325
1483
  allow_user_resources: H
1326
1484
  }];
1327
1485
  }
1328
- async function hash(c) {
1486
+ async function hash$1(c) {
1329
1487
  try {
1330
1488
  let L = new TextEncoder().encode(c), R = await crypto.subtle.digest("SHA-256", L);
1331
1489
  return Array.from(new Uint8Array(R)).map((c) => c.toString(16).padStart(2, "0")).join("");
@@ -1349,56 +1507,82 @@ function removeStaticFields(c, L) {
1349
1507
  };
1350
1508
  }
1351
1509
  function hiddenRunnableToTsType(c) {
1352
- return c?.type == "runnableByName" ? c?.inlineScript?.schema ? schemaToTsType(removeStaticFields(c?.inlineScript?.schema, c?.fields ?? {})) : "{}" : c?.type == "runnableByPath" ? schemaToTsType(removeStaticFields(c?.schema, c?.fields ?? {})) : "{}";
1510
+ return isRunnableByName(c) ? c?.inlineScript?.schema ? schemaToTsType(removeStaticFields(c?.inlineScript?.schema, c?.fields ?? {})) : "{}" : isRunnableByPath(c) ? schemaToTsType(removeStaticFields(c?.schema, c?.fields ?? {})) : "{}";
1353
1511
  }
1354
1512
  function genWmillTs(c) {
1355
1513
  return `// THIS FILE IS READ-ONLY
1356
1514
  // AND GENERATED AUTOMATICALLY FROM YOUR RUNNABLES
1357
-
1358
- ${Object.entries(c).map(([c, L]) => `export type RunBg${capitalize(c)} = ${hiddenRunnableToTsType(L)}\n`).join("\n")}
1359
1515
 
1360
- export const runBg = {
1361
- ${Object.keys(c).map((c) => ` ${c}: null as unknown as (data: RunBg${capitalize(c)}) => Promise<any>`).join(",\n")}
1362
- }
1363
-
1364
- export const runBgAsync = {
1365
- ${Object.keys(c).map((c) => ` ${c}: null as unknown as (data: RunBg${capitalize(c)}) => Promise<string>`).join(",\n")}
1366
- }
1367
-
1516
+ export declare const backend: {
1517
+ ${Object.entries(c).map(([c, L]) => ` ${c}: (args: ${hiddenRunnableToTsType(L)}) => Promise<any>;`).join("\n")}
1518
+ };
1519
+
1520
+ export declare const backendAsync: {
1521
+ ${Object.entries(c).map(([c, L]) => ` ${c}: (args: ${hiddenRunnableToTsType(L)}) => Promise<string>;`).join("\n")}
1522
+ };
1368
1523
 
1369
1524
  export type Job = {
1370
- type: 'QueuedJob' | 'CompletedJob'
1371
- id: string
1372
- created_at: number
1373
- started_at: number | undefined
1374
- duration_ms: number
1375
- success: boolean
1376
- args: any
1377
- result: any
1378
- }
1525
+ type: "QueuedJob" | "CompletedJob";
1526
+ id: string;
1527
+ created_at: number;
1528
+ started_at: number | undefined;
1529
+ duration_ms: number;
1530
+ success: boolean;
1531
+ args: any;
1532
+ result: any;
1533
+ };
1379
1534
 
1380
1535
  /**
1381
- * Execute a job and wait for it to complete and return the completed job
1382
- * @param id
1383
- */
1384
- // @ts-ignore
1385
- export function waitJob(id: string): Promise<Job> {
1386
- // implementation passed when bundling/deploying
1387
- return null as unknown as Promise<Job>
1388
- }
1536
+ * Execute a job and wait for it to complete and return the completed job
1537
+ * @param id
1538
+ */
1539
+ export declare function waitJob(id: string): Promise<Job>;
1389
1540
 
1390
1541
  /**
1391
- * Get a job by id and return immediately with the current state of the job
1392
- * @param id
1393
- */
1394
- // @ts-ignore
1395
- export function getJob(id: string): Promise<Job> {
1396
- // implementation passed when bundling/deploying
1397
- return null as unknown as Promise<Job>
1398
- }
1542
+ * Get a job by id and return immediately with the current state of the job
1543
+ * @param id
1544
+ */
1545
+ export declare function getJob(id: string): Promise<Job>;
1546
+
1547
+ export type StreamUpdate = {
1548
+ new_result_stream?: string;
1549
+ stream_offset?: number;
1550
+ };
1551
+
1552
+ /**
1553
+ * Stream job results using SSE. Calls onUpdate for each stream update,
1554
+ * and resolves with the final result when the job completes.
1555
+ * @param id - The job ID to stream
1556
+ * @param onUpdate - Optional callback for stream updates with new_result_stream data
1557
+ * @returns Promise that resolves with the final job result
1558
+ */
1559
+ export declare function streamJob(id: string, onUpdate?: (data: StreamUpdate) => void): Promise<any>;
1399
1560
  `;
1400
1561
  }
1562
+ async function updateRawAppPolicy(c, L) {
1563
+ let R = Object.fromEntries(await Promise.all(Object.entries(c).map(async ([c, L]) => await processRunnable(c, L, L?.fields ?? {}))));
1564
+ return {
1565
+ ...L,
1566
+ triggerables_v2: R
1567
+ };
1568
+ }
1569
+ async function processRunnable(c, L, R) {
1570
+ let z = collectStaticFields(R), B = Object.entries(R).map(([c, L]) => L.allowUserResources ? c : void 0).filter(Boolean);
1571
+ if (isRunnableByName(L)) {
1572
+ let R = await hash(L.inlineScript?.content);
1573
+ return console.log("hex", R, c), [`${c}:rawscript/${R}`, {
1574
+ static_inputs: z,
1575
+ one_of_inputs: {},
1576
+ allow_user_resources: B
1577
+ }];
1578
+ } else if (isRunnableByPath(L)) return [`${c}:${L.runType === "hubscript" ? "script" : L.runType}/${L.path}`, {
1579
+ static_inputs: z,
1580
+ one_of_inputs: {},
1581
+ allow_user_resources: B
1582
+ }];
1583
+ }
1584
+ var rawAppWmillTs_exports = /* @__PURE__ */ __export({ default: () => rawAppWmillTs_default }, 1), rawAppWmillTs_default = "let reqs: Record<string, any> = {}\n\nfunction doRequest(type: string, o: object) {\n return new Promise((resolve, reject) => {\n const reqId = Math.random().toString(36)\n reqs[reqId] = { resolve, reject }\n parent.postMessage({ ...o, type, reqId }, '*')\n })\n}\n\nexport const backend = new Proxy(\n {},\n {\n get(_, runnable_id: string) {\n return (v: any) => {\n return doRequest('backend', { runnable_id, v })\n }\n }\n }\n)\n\nexport const backendAsync = new Proxy(\n {},\n {\n get(_, runnable_id: string) {\n return (v: any) => {\n return doRequest('backendAsync', { runnable_id, v })\n }\n }\n }\n)\n\nexport function waitJob(jobId: string) {\n return doRequest('waitJob', { jobId })\n}\n\nexport function getJob(jobId: string) {\n return doRequest('getJob', { jobId })\n}\n\n/**\n * Stream job results using SSE. Calls onUpdate for each stream update,\n * and resolves with the final result when the job completes.\n * @param jobId - The job ID to stream\n * @param onUpdate - Callback for stream updates with new_result_stream data\n * @returns Promise that resolves with the final job result\n */\nexport function streamJob(\n jobId: string,\n onUpdate?: (data: { new_result_stream?: string; stream_offset?: number }) => void\n): Promise<any> {\n return new Promise((resolve, reject) => {\n const reqId = Math.random().toString(36)\n reqs[reqId] = { resolve, reject, onUpdate }\n parent.postMessage({ jobId, type: 'streamJob', reqId }, '*')\n })\n}\n\nwindow.addEventListener('message', (e) => {\n if (e.data.type == 'streamJobUpdate') {\n // Handle streaming update\n let job = reqs[e.data.reqId]\n if (job && job.onUpdate) {\n job.onUpdate({\n new_result_stream: e.data.new_result_stream,\n stream_offset: e.data.stream_offset\n })\n }\n } else if (e.data.type == 'streamJobRes') {\n // Handle stream completion\n let job = reqs[e.data.reqId]\n if (job) {\n if (e.data.error) {\n job.reject(new Error(e.data.result?.stack ?? e.data.result?.message ?? 'Stream error'))\n } else {\n job.resolve(e.data.result)\n }\n delete reqs[e.data.reqId]\n }\n } else if (e.data.type == 'backendRes' || e.data.type == 'backendAsyncRes') {\n console.log('Message from parent backend', e.data)\n let job = reqs[e.data.reqId]\n if (job) {\n const result = e.data.result\n if (e.data.error) {\n job.reject(new Error(result.stack ?? result.message))\n } else {\n job.resolve(result)\n }\n } else {\n console.error('No job found for', e.data.reqId)\n }\n }\n})\n";
1401
1585
  function capitalize(c) {
1402
1586
  return c ? c.charAt(0).toUpperCase() + c.slice(1) : "";
1403
1587
  }
1404
- export { capitalize, genWmillTs, updatePolicy };
1588
+ export { capitalize, genWmillTs, updatePolicy, updateRawAppPolicy, rawAppWmillTs_exports as wmillTsRaw };