trellis 1.0.4 → 1.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli/tql.js +109 -24
  2. package/package.json +1 -1
package/dist/cli/tql.js CHANGED
@@ -16751,7 +16751,12 @@ var init_schema = __esm(() => {
16751
16751
  type: "string",
16752
16752
  description: "Output dataset name"
16753
16753
  },
16754
- source: { $ref: "#/definitions/httpSource" },
16754
+ source: {
16755
+ oneOf: [
16756
+ { $ref: "#/definitions/httpSource" },
16757
+ { $ref: "#/definitions/fileSource" }
16758
+ ]
16759
+ },
16755
16760
  from: { type: "string" },
16756
16761
  eqls: { type: "string" },
16757
16762
  output: { $ref: "#/definitions/output" }
@@ -16779,6 +16784,25 @@ var init_schema = __esm(() => {
16779
16784
  }
16780
16785
  ]
16781
16786
  },
16787
+ fileSource: {
16788
+ type: "object",
16789
+ properties: {
16790
+ kind: {
16791
+ const: "file",
16792
+ description: "Source kind"
16793
+ },
16794
+ path: {
16795
+ type: "string",
16796
+ description: "Path to local file, relative to the workflow file"
16797
+ },
16798
+ format: {
16799
+ enum: ["json", "csv"],
16800
+ description: "File format (defaults to json)"
16801
+ }
16802
+ },
16803
+ required: ["kind", "path"],
16804
+ additionalProperties: false
16805
+ },
16782
16806
  httpSource: {
16783
16807
  type: "object",
16784
16808
  properties: {
@@ -46443,7 +46467,8 @@ Kernel Statistics:`));
46443
46467
 
46444
46468
  // src/workflows/engine.ts
46445
46469
  init_types();
46446
- import { readFile as readFile2 } from "fs/promises";
46470
+ import { readFile as readFile3 } from "fs/promises";
46471
+ import { dirname as dirname3, resolve as resolve3 } from "path";
46447
46472
 
46448
46473
  // src/workflows/parser.ts
46449
46474
  init_dist();
@@ -46774,8 +46799,8 @@ function validateStepRequirements(step, availableDatasets) {
46774
46799
 
46775
46800
  // src/workflows/runners.ts
46776
46801
  init_types();
46777
- import { writeFile, mkdir } from "fs/promises";
46778
- import { dirname } from "path";
46802
+ import { writeFile, mkdir, readFile } from "fs/promises";
46803
+ import { dirname, resolve as resolve2 } from "path";
46779
46804
  class HttpSourceRunner {
46780
46805
  validate(spec2) {
46781
46806
  if (!spec2.url) {
@@ -46898,6 +46923,58 @@ class HttpSourceRunner {
46898
46923
  }
46899
46924
  }
46900
46925
 
46926
+ class FileSourceRunner {
46927
+ validate(spec2) {
46928
+ if (!spec2.path) {
46929
+ throw new WorkflowRuntimeError("File source requires path");
46930
+ }
46931
+ }
46932
+ async run(spec2, ctx) {
46933
+ this.validate(spec2);
46934
+ const filePath = resolve2(ctx.workingDir, spec2.path);
46935
+ ctx.log({ message: `Reading file: ${filePath}` });
46936
+ try {
46937
+ const content = await readFile(filePath, "utf-8");
46938
+ const format = spec2.format || "json";
46939
+ const rows = format === "json" ? this.parseJSON(content) : this.parseCSV(content);
46940
+ const limitedRows = ctx.dry && ctx.limit ? rows.slice(0, ctx.limit) : rows;
46941
+ return {
46942
+ name: "file_data",
46943
+ rows: limitedRows
46944
+ };
46945
+ } catch (error45) {
46946
+ throw new WorkflowRuntimeError(`Failed to read file ${spec2.path}: ${error45 instanceof Error ? error45.message : "Unknown error"}`);
46947
+ }
46948
+ }
46949
+ parseJSON(content) {
46950
+ const data = JSON.parse(content);
46951
+ if (Array.isArray(data)) {
46952
+ return data;
46953
+ }
46954
+ if (data.rows && Array.isArray(data.rows)) {
46955
+ return data.rows;
46956
+ }
46957
+ return [data];
46958
+ }
46959
+ parseCSV(content) {
46960
+ const lines = content.trim().split(`
46961
+ `);
46962
+ if (lines.length === 0)
46963
+ return [];
46964
+ const headers = lines[0].split(",").map((h) => h.trim());
46965
+ const rows = [];
46966
+ for (let i = 1;i < lines.length; i++) {
46967
+ const values = lines[i].split(",").map((v) => v.trim());
46968
+ const row = {};
46969
+ for (let j = 0;j < headers.length; j++) {
46970
+ row[headers[j]] = values[j] || "";
46971
+ }
46972
+ rows.push(row);
46973
+ }
46974
+ return rows;
46975
+ }
46976
+ }
46977
+
46901
46978
  class QueryRunner {
46902
46979
  validate(spec2) {
46903
46980
  if (!spec2.eqls || spec2.eqls.trim().length === 0) {
@@ -46930,8 +47007,8 @@ class QueryRunner {
46930
47007
  if (parseResult.errors.length > 0) {
46931
47008
  throw new WorkflowRuntimeError(`Query parsing failed: ${parseResult.errors.map((e) => e.message).join("; ")}`);
46932
47009
  }
46933
- const results = evaluator.evaluate(parseResult.query);
46934
- const rows = this.resultsToRows(results);
47010
+ const results = evaluator.evaluate(parseResult.query, parseResult.meta?.limit);
47011
+ const rows = this.resultsToRows(results, parseResult.projectionMap);
46935
47012
  const limitedRows = ctx.dry && ctx.limit ? rows.slice(0, ctx.limit) : rows;
46936
47013
  return {
46937
47014
  name: spec2.out,
@@ -46941,11 +47018,18 @@ class QueryRunner {
46941
47018
  throw new WorkflowRuntimeError(`Query execution failed: ${error45 instanceof Error ? error45.message : "Unknown error"}`);
46942
47019
  }
46943
47020
  }
46944
- resultsToRows(results) {
46945
- if (results && results.bindings && Array.isArray(results.bindings)) {
46946
- return results.bindings;
47021
+ resultsToRows(results, projectionMap) {
47022
+ if (!results || !results.bindings || !Array.isArray(results.bindings)) {
47023
+ return [];
46947
47024
  }
46948
- return [];
47025
+ return results.bindings.map((binding) => {
47026
+ const row = {};
47027
+ for (const [key, value] of Object.entries(binding)) {
47028
+ const cleanKey = projectionMap?.get(key) || key.replace(/^\?/, "");
47029
+ row[cleanKey] = value;
47030
+ }
47031
+ return row;
47032
+ });
46949
47033
  }
46950
47034
  resolveInputDatasets(spec2, ctx) {
46951
47035
  const datasets = [];
@@ -47060,6 +47144,7 @@ class OutputRunner {
47060
47144
  }
47061
47145
  var BUILTIN_RUNNERS = {
47062
47146
  "source:http": new HttpSourceRunner,
47147
+ "source:file": new FileSourceRunner,
47063
47148
  "query:eqls": new QueryRunner,
47064
47149
  "output:file": new OutputRunner,
47065
47150
  "output:stdout": new OutputRunner
@@ -47070,6 +47155,9 @@ function getRunner(stepType, stepSpec) {
47070
47155
  if (stepSpec.source?.kind === "http") {
47071
47156
  return BUILTIN_RUNNERS["source:http"];
47072
47157
  }
47158
+ if (stepSpec.source?.kind === "file") {
47159
+ return BUILTIN_RUNNERS["source:file"];
47160
+ }
47073
47161
  break;
47074
47162
  case "query":
47075
47163
  return BUILTIN_RUNNERS["query:eqls"];
@@ -47092,7 +47180,7 @@ var LOG_LEVELS = {
47092
47180
 
47093
47181
  // src/workflows/cache.ts
47094
47182
  import { createHash } from "crypto";
47095
- import { readFile, writeFile as writeFile2, mkdir as mkdir2 } from "fs/promises";
47183
+ import { readFile as readFile2, writeFile as writeFile2, mkdir as mkdir2 } from "fs/promises";
47096
47184
  import { existsSync } from "fs";
47097
47185
  import { dirname as dirname2, join } from "path";
47098
47186
  import { gzip, gunzip } from "zlib";
@@ -47111,7 +47199,7 @@ class FileCacheManager {
47111
47199
  if (!existsSync(filePath)) {
47112
47200
  return null;
47113
47201
  }
47114
- const compressed = await readFile(filePath);
47202
+ const compressed = await readFile2(filePath);
47115
47203
  const decompressed = await gunzipAsync(compressed);
47116
47204
  const dataset = JSON.parse(decompressed.toString("utf-8"));
47117
47205
  return dataset;
@@ -47263,13 +47351,15 @@ class WorkflowEngine {
47263
47351
  runId;
47264
47352
  cacheManager;
47265
47353
  events = [];
47354
+ workingDir = process.cwd();
47266
47355
  constructor(options = {}, cacheDir) {
47267
47356
  this.options = options;
47268
47357
  this.runId = `run_${Date.now()}_${Math.random().toString(36).substr(2, 6)}`;
47269
47358
  this.cacheManager = createCacheManager(options.cache || "write", cacheDir);
47270
47359
  }
47271
47360
  async executeWorkflowFile(filePath) {
47272
- const yamlContent = await readFile2(filePath, "utf-8");
47361
+ this.workingDir = dirname3(resolve3(filePath));
47362
+ const yamlContent = await readFile3(filePath, "utf-8");
47273
47363
  const spec2 = parseWorkflow(yamlContent);
47274
47364
  await this.executeWorkflow(spec2);
47275
47365
  }
@@ -47298,14 +47388,7 @@ class WorkflowEngine {
47298
47388
  if (!step) {
47299
47389
  throw new WorkflowRuntimeError(`Step not found: ${stepId}`);
47300
47390
  }
47301
- const startTime = Date.now();
47302
47391
  await this.executeStep(step, env2);
47303
- const duration3 = Date.now() - startTime;
47304
- const dataset = this.datasetsByStepId[stepId];
47305
- const count = dataset?.rows?.length || 0;
47306
- const cacheStatus = this.getCacheStatus(stepId);
47307
- const checkmark = "";
47308
- console.log(`${checkmark}${stepId} ${duration3}ms out=${count} cache:${cacheStatus}`);
47309
47392
  }
47310
47393
  this.logEvent("workflow", "completed", {});
47311
47394
  } catch (error45) {
@@ -47334,6 +47417,7 @@ class WorkflowEngine {
47334
47417
  dry: this.options.dry || false,
47335
47418
  limit: this.options.limit,
47336
47419
  cacheMode: this.options.cache || "write",
47420
+ workingDir: this.workingDir,
47337
47421
  cache: this.cacheManager,
47338
47422
  getDataset: (ref) => this.resolveDataset(ref),
47339
47423
  getDatasetByName: (name17) => this.datasetsByName[name17],
@@ -47361,6 +47445,7 @@ class WorkflowEngine {
47361
47445
  }, (event) => this.logEvent(step.id, "cache", event));
47362
47446
  if (result && step.out) {
47363
47447
  const dataset = result;
47448
+ dataset.name = step.out;
47364
47449
  this.datasetsByName[step.out] = dataset;
47365
47450
  this.datasetsByStepId[step.id] = dataset;
47366
47451
  this.stepOutputNames[step.id] = step.out;
@@ -47429,8 +47514,8 @@ class WorkflowEngine {
47429
47514
  console.log(`${LOG_LEVELS.DONE} [${timestamp}] Workflow completed`);
47430
47515
  } else {
47431
47516
  const duration4 = event.durationMs ? `${event.durationMs}ms` : "";
47432
- const inputRows = event.inputRows ? `${event.inputRows} in` : "";
47433
- const outputRows = event.outputRows ? `${event.outputRows} out` : "";
47517
+ const inputRows = event.inputRows !== undefined ? `${event.inputRows} in` : "";
47518
+ const outputRows = event.outputRows !== undefined ? `${event.outputRows} out` : "";
47434
47519
  const cache = event.cache ? `(${event.cache})` : "";
47435
47520
  const cacheKey = event.cacheKey ? `[${event.cacheKey.slice(0, 8)}]` : "";
47436
47521
  const details = [duration4, inputRows, outputRows, cache, cacheKey].filter(Boolean).join(", ");
@@ -47805,10 +47890,10 @@ workflowCommand.command("plan <file>").description("Show execution plan for a wo
47805
47890
  vars[key] = valueParts.join("=");
47806
47891
  }
47807
47892
  }
47808
- const { readFile: readFile3 } = await import("fs/promises");
47893
+ const { readFile: readFile4 } = await import("fs/promises");
47809
47894
  const { parseWorkflow: parseWorkflow3 } = await Promise.resolve().then(() => (init_parser(), exports_parser));
47810
47895
  const { createExecutionPlan: createExecutionPlan3 } = await Promise.resolve().then(() => (init_planner(), exports_planner));
47811
- const yamlContent = await readFile3(file2, "utf-8");
47896
+ const yamlContent = await readFile4(file2, "utf-8");
47812
47897
  const spec2 = parseWorkflow3(yamlContent);
47813
47898
  const plan = createExecutionPlan3(spec2);
47814
47899
  if (options.dot) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "trellis",
3
- "version": "1.0.4",
3
+ "version": "1.0.6",
4
4
  "description": "Graph database and query engine for Node.js — EAV-based Datalog with natural language support",
5
5
  "main": "./dist/index.js",
6
6
  "module": "./dist/index.js",