@platforma-open/milaboratories.immune-assay-data.workflow 1.4.3 → 1.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,12 +1,16 @@
1
1
   WARN  Issue while reading "/home/runner/work/immune-assay-data/immune-assay-data/.npmrc". Failed to replace env in config: ${NPMJS_TOKEN}
2
2
 
3
- > @platforma-open/milaboratories.immune-assay-data.workflow@1.4.3 build /home/runner/work/immune-assay-data/immune-assay-data/workflow
3
+ > @platforma-open/milaboratories.immune-assay-data.workflow@1.5.1 build /home/runner/work/immune-assay-data/immune-assay-data/workflow
4
4
  > rm -rf dist && pl-tengo check && pl-tengo build
5
5
 
6
+ Processing "src/build-outputs.tpl.tengo"...
7
+ Processing "src/extract-unique-values.tpl.tengo"...
6
8
  Processing "src/main.tpl.tengo"...
7
9
  Processing "src/run-alignment.tpl.tengo"...
8
10
  No syntax errors found.
9
11
  info: Compiling 'dist'...
12
+ info: - writing /home/runner/work/immune-assay-data/immune-assay-data/workflow/dist/tengo/tpl/build-outputs.plj.gz
13
+ info: - writing /home/runner/work/immune-assay-data/immune-assay-data/workflow/dist/tengo/tpl/extract-unique-values.plj.gz
10
14
  info: - writing /home/runner/work/immune-assay-data/immune-assay-data/workflow/dist/tengo/tpl/run-alignment.plj.gz
11
15
  info: - writing /home/runner/work/immune-assay-data/immune-assay-data/workflow/dist/tengo/tpl/main.plj.gz
12
16
  info: Template Pack build done.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,22 @@
1
1
  # @platforma-open/milaboratories.immune-assay-data.workflow
2
2
 
3
+ ## 1.5.1
4
+
5
+ ### Patch Changes
6
+
7
+ - 35cde6e: Show running state for tables and graphs
8
+ - Updated dependencies [35cde6e]
9
+ - @platforma-open/milaboratories.immune-assay-data.coverage-mode-calc@1.1.3
10
+ - @platforma-open/milaboratories.immune-assay-data.prepare-fasta@1.1.3
11
+ - @platforma-open/milaboratories.immune-assay-data.add-header@1.1.3
12
+ - @platforma-open/milaboratories.immune-assay-data.fasta-to-tsv@1.1.3
13
+
14
+ ## 1.5.0
15
+
16
+ ### Minor Changes
17
+
18
+ - b21c35e: Add isDiscreteFilter and discreteValues annotation to string pcolumn export specs
19
+
3
20
  ## 1.4.3
4
21
 
5
22
  ### Patch Changes
package/dist/index.cjs CHANGED
@@ -1,4 +1,6 @@
1
1
  module.exports = { Templates: {
2
+ 'build-outputs': { type: 'from-file', path: require.resolve('./tengo/tpl/build-outputs.plj.gz') },
3
+ 'extract-unique-values': { type: 'from-file', path: require.resolve('./tengo/tpl/extract-unique-values.plj.gz') },
2
4
  'run-alignment': { type: 'from-file', path: require.resolve('./tengo/tpl/run-alignment.plj.gz') },
3
5
  'main': { type: 'from-file', path: require.resolve('./tengo/tpl/main.plj.gz') }
4
6
  }};
package/dist/index.d.ts CHANGED
@@ -1,4 +1,4 @@
1
1
  declare type TemplateFromFile = { readonly type: "from-file"; readonly path: string; };
2
- declare type TplName = "run-alignment" | "main";
2
+ declare type TplName = "build-outputs" | "extract-unique-values" | "run-alignment" | "main";
3
3
  declare const Templates: Record<TplName, TemplateFromFile>;
4
4
  export { Templates };
package/dist/index.js CHANGED
@@ -1,5 +1,7 @@
1
1
  import { resolve } from 'node:path';
2
2
  export const Templates = {
3
+ 'build-outputs': { type: 'from-file', path: resolve(import.meta.dirname, './tengo/tpl/build-outputs.plj.gz') },
4
+ 'extract-unique-values': { type: 'from-file', path: resolve(import.meta.dirname, './tengo/tpl/extract-unique-values.plj.gz') },
3
5
  'run-alignment': { type: 'from-file', path: resolve(import.meta.dirname, './tengo/tpl/run-alignment.plj.gz') },
4
6
  'main': { type: 'from-file', path: resolve(import.meta.dirname, './tengo/tpl/main.plj.gz') }
5
7
  };
Binary file
Binary file
package/package.json CHANGED
@@ -1,24 +1,22 @@
1
1
  {
2
2
  "name": "@platforma-open/milaboratories.immune-assay-data.workflow",
3
- "version": "1.4.3",
3
+ "version": "1.5.1",
4
4
  "type": "module",
5
- "description": "Block Workflow",
5
+ "description": "Tengo-based template",
6
6
  "dependencies": {
7
- "@platforma-sdk/workflow-tengo": "^5.4.2",
7
+ "@platforma-sdk/workflow-tengo": "5.8.0",
8
8
  "@platforma-open/soedinglab.software-mmseqs2": "^1.17.2",
9
- "@platforma-open/milaboratories.immune-assay-data.add-header": "1.1.2",
10
- "@platforma-open/milaboratories.immune-assay-data.fasta-to-tsv": "1.1.2",
11
- "@platforma-open/milaboratories.immune-assay-data.coverage-mode-calc": "1.1.2",
12
- "@platforma-open/milaboratories.immune-assay-data.prepare-fasta": "1.1.2"
9
+ "@platforma-open/milaboratories.immune-assay-data.prepare-fasta": "1.1.3",
10
+ "@platforma-open/milaboratories.immune-assay-data.add-header": "1.1.3",
11
+ "@platforma-open/milaboratories.immune-assay-data.coverage-mode-calc": "1.1.3",
12
+ "@platforma-open/milaboratories.immune-assay-data.fasta-to-tsv": "1.1.3"
13
13
  },
14
14
  "devDependencies": {
15
- "@platforma-sdk/tengo-builder": "^2.3.2",
16
- "@platforma-sdk/test": "^1.44.19",
17
- "vitest": "^2.1.8"
15
+ "@platforma-sdk/tengo-builder": "2.4.8"
18
16
  },
19
17
  "scripts": {
20
18
  "build": "rm -rf dist && pl-tengo check && pl-tengo build",
21
- "test": "vitest",
22
- "format": "/usr/bin/env emacs --script ./format.el"
19
+ "format": "/usr/bin/env emacs --script ./format.el",
20
+ "do-pack": "rm -f *.tgz && pnpm pack && mv *.tgz package.tgz"
23
21
  }
24
22
  }
@@ -0,0 +1,230 @@
1
+ self := import("@platforma-sdk/workflow-tengo:tpl")
2
+ xsv := import("@platforma-sdk/workflow-tengo:pframes.xsv")
3
+ pframes := import("@platforma-sdk/workflow-tengo:pframes")
4
+ pSpec := import("@platforma-sdk/workflow-tengo:pframes.spec")
5
+ maps := import("@platforma-sdk/workflow-tengo:maps")
6
+ strings := import("@platforma-sdk/workflow-tengo:strings")
7
+
8
+ self.awaitState("uniqueValuesMap", "ResourceReady")
9
+ self.awaitState("datasetSpec", "ResourceReady")
10
+ self.awaitState("assaySequenceType", "ResourceReady")
11
+ self.awaitState("sequenceColumnInfo", "ResourceReady")
12
+
13
+ assayColumnName := func(header) {
14
+ return "pl7.app/vdj/assay-data/" + strings.substituteSpecialCharacters(header)
15
+ }
16
+
17
+ self.body(func(inputs) {
18
+ //////// Building outputs & exports ////////
19
+ assayColumns := [
20
+ {
21
+ column: "seqIdLabel",
22
+ spec: {
23
+ name: "pl7.app/label",
24
+ valueType: "String",
25
+ annotations: {
26
+ "pl7.app/label": "Sequence Id",
27
+ "pl7.app/table/fontFamily": "monospace"
28
+ }
29
+ }
30
+ },
31
+ {
32
+ column: "queryCount",
33
+ spec: {
34
+ name: "pl7.app/vdj/assay/queryCount",
35
+ valueType: "Int",
36
+ annotations: {
37
+ "pl7.app/label": "Matched Clones",
38
+ "pl7.app/table/orderPriority": "9000"
39
+ }
40
+ }
41
+ },
42
+ {
43
+ column: inputs.sequenceColumnInfo.header,
44
+ id: strings.substituteSpecialCharacters(inputs.sequenceColumnInfo.header),
45
+ spec: {
46
+ name: "pl7.app/vdj/sequence",
47
+ valueType: "String",
48
+ domain: {
49
+ "pl7.app/alphabet": inputs.assaySequenceType
50
+ },
51
+ annotations: {
52
+ "pl7.app/label": inputs.sequenceColumnInfo.header,
53
+ "pl7.app/table/fontFamily": "monospace",
54
+ "pl7.app/table/orderPriority": "10000"
55
+ }
56
+ }
57
+ }
58
+ ]
59
+
60
+ columnsToImport := inputs.importColumns
61
+ if inputs.selectedColumns != undefined && len(inputs.selectedColumns) > 0 {
62
+ selectedHeaders := {}
63
+ for header in inputs.selectedColumns {
64
+ selectedHeaders[header] = true
65
+ }
66
+
67
+ filteredColumns := []
68
+ for col in inputs.importColumns {
69
+ // Always include the main sequence column
70
+ if col.header == inputs.sequenceColumnHeader || selectedHeaders[col.header] {
71
+ filteredColumns = append(filteredColumns, col)
72
+ }
73
+ }
74
+ columnsToImport = filteredColumns
75
+ }
76
+
77
+ for h in columnsToImport {
78
+ if h.header == inputs.sequenceColumnHeader {
79
+ continue
80
+ }
81
+ assayColumns = append(assayColumns, {
82
+ column: h.header,
83
+ id: strings.substituteSpecialCharacters(h.header),
84
+ spec: {
85
+ name: assayColumnName(h.header),
86
+ valueType: h.type,
87
+ annotations: {
88
+ "pl7.app/label": h.header,
89
+ "pl7.app/table/orderPriority": "1000"
90
+ }
91
+ }
92
+ })
93
+ }
94
+
95
+ assayImportResults := xsv.importFile(inputs.assayDataTsv, "tsv", {
96
+ axes: [{
97
+ column: "seqId",
98
+ spec: {
99
+ name: "pl7.app/vdj/assay/sequenceId",
100
+ type: "String",
101
+ domain: {
102
+ "pl7.app/blockId": inputs.blockId
103
+ },
104
+ annotations: {
105
+ "pl7.app/label": "Sequence Id",
106
+ "pl7.app/table/fontFamily": "monospace"
107
+ }
108
+ }
109
+ }],
110
+ columns: assayColumns,
111
+ storageFormat: "Parquet",
112
+ partitionKeyLength: 0
113
+ }, {cpu: 1, mem: "16GiB"})
114
+
115
+ // "bits", "evalue", "pident"
116
+ cloneColumns := [
117
+ {
118
+ column: "seqIdLabel",
119
+ spec: {
120
+ name: "pl7.app/vdj/assay/sequenceIdLabel",
121
+ valueType: "String",
122
+ annotations: {
123
+ "pl7.app/label": "Assay Sequence Id",
124
+ "pl7.app/table/fontFamily": "monospace",
125
+ "pl7.app/table/visibility": "optional"
126
+ }
127
+ }
128
+ },
129
+ {
130
+ column: "bits",
131
+ spec: {
132
+ name: "pl7.app/alignment/bitScore",
133
+ valueType: "Float",
134
+ annotations: {
135
+ "pl7.app/label": "Bit Score",
136
+ "pl7.app/table/visibility": "optional"
137
+ }
138
+ }
139
+ },
140
+ {
141
+ column: "evalue",
142
+ spec: {
143
+ name: "pl7.app/alignment/evalue",
144
+ valueType: "Float",
145
+ annotations: {
146
+ "pl7.app/label": "E-value",
147
+ "pl7.app/table/visibility": "optional"
148
+ }
149
+ }
150
+ },
151
+ {
152
+ column: "pident",
153
+ spec: {
154
+ name: "pl7.app/alignment/pident",
155
+ valueType: "Float",
156
+ annotations: {
157
+ "pl7.app/label": "Percentage of identical matches",
158
+ "pl7.app/table/visibility": "optional"
159
+ }
160
+ }
161
+ }]
162
+
163
+ for h in columnsToImport {
164
+ annotations := {
165
+ "pl7.app/label": h.header,
166
+ "pl7.app/table/visibility": h.header == inputs.sequenceColumnHeader ? "optional" : "default"
167
+ }
168
+ if h.type == "String" && h.header != inputs.sequenceColumnHeader {
169
+ annotations["pl7.app/isDiscreteFilter"] = "true"
170
+ annotations["pl7.app/discreteValues"] = inputs.uniqueValuesMap[h.header]
171
+ }
172
+
173
+ cloneColumns = append(cloneColumns, {
174
+ column: h.header,
175
+ id: strings.substituteSpecialCharacters(h.header),
176
+ spec: {
177
+ name: assayColumnName(h.header),
178
+ valueType: h.type,
179
+ annotations: annotations
180
+ }
181
+ })
182
+ }
183
+
184
+ // insert domain
185
+ for col in cloneColumns {
186
+ col.spec.domain = maps.deepMerge(col.spec.domain, {
187
+ "pl7.app/blockId": inputs.blockId
188
+ })
189
+ }
190
+
191
+ cloneImportResults := xsv.importFile(
192
+ inputs.clonesDataTsv, "tsv", {
193
+ axes: [{
194
+ column: "query",
195
+ spec: inputs.datasetSpec.axesSpec[1]
196
+ }],
197
+ columns: cloneColumns,
198
+ storageFormat: "Parquet",
199
+ partitionKeyLength: 0
200
+ },
201
+ { splitDataAndSpec: true, cpu: 1, mem: "16GiB" }
202
+ )
203
+
204
+ // Create informative label with relevant matching parameters
205
+ identityStr := string(inputs.settings.identity)
206
+ coverageStr := string(inputs.settings.coverageThreshold)
207
+ similarityTypeStr := inputs.settings.similarityType == "sequence-identity" ? "Exact Match" : "BLOSUM"
208
+
209
+ traceLabel := "Assay Data (sim:" + similarityTypeStr + ", ident:" + identityStr + ", cov:" + coverageStr + ")"
210
+
211
+ trace := pSpec.makeTrace(inputs.datasetSpec,
212
+ {
213
+ type: "milaboratories.immune-assay-data",
214
+ importance: 40,
215
+ label: traceLabel
216
+ })
217
+
218
+ epfB := pframes.pFrameBuilder()
219
+ for k, v in cloneImportResults {
220
+ epfB.add(k, trace.inject(v.spec), v.data)
221
+ }
222
+ epf := epfB.build()
223
+ assayPframe := pframes.exportFrame(assayImportResults)
224
+
225
+ return {
226
+ assayPframe: assayPframe,
227
+ epf: epf
228
+ }
229
+ })
230
+
@@ -0,0 +1,32 @@
1
+ self := import("@platforma-sdk/workflow-tengo:tpl")
2
+ ll := import("@platforma-sdk/workflow-tengo:ll")
3
+ json := import("json")
4
+ text := import("text")
5
+
6
+ self.defineOutputs("uniqueValuesMap")
7
+
8
+ self.body(func(inputs) {
9
+ fileContents := inputs.fileContents
10
+ uniqueValuesMap := {}
11
+
12
+ for header, contentField in fileContents {
13
+ // In subtemplate, we can call getData() directly on exec fileContent fields
14
+ // (following clonotype-enrichment pattern)
15
+ contentBytes := contentField.getData()
16
+ content := string(contentBytes)
17
+ lines := text.split(text.trim_space(content), "\n")
18
+
19
+ if len(lines) > 1 {
20
+ // Skip header and collect values
21
+ values := lines[1:]
22
+ // JSON encode the array of strings and convert to string
23
+ encodedBytes := json.encode(values)
24
+ uniqueValuesMap[header] = string(encodedBytes)
25
+ }
26
+ }
27
+
28
+ return {
29
+ uniqueValuesMap: uniqueValuesMap
30
+ }
31
+ })
32
+
@@ -14,6 +14,8 @@ text := import("text")
14
14
  render := import("@platforma-sdk/workflow-tengo:render")
15
15
  strings := import("@platforma-sdk/workflow-tengo:strings")
16
16
  runAlignmentTpl := assets.importTemplate(":run-alignment")
17
+ extractUniqueValuesTpl := assets.importTemplate(":extract-unique-values")
18
+ buildOutputsTpl := assets.importTemplate(":build-outputs")
17
19
 
18
20
  prepareFastaSw := assets.importSoftware("@platforma-open/milaboratories.immune-assay-data.prepare-fasta:main")
19
21
  fastaToTsvSw := assets.importSoftware("@platforma-open/milaboratories.immune-assay-data.fasta-to-tsv:main")
@@ -140,10 +142,6 @@ runFastaToTsv := func(fileFasta) {
140
142
  return e.run()
141
143
  }
142
144
 
143
- assayColumnName := func(header) {
144
- return "pl7.app/vdj/assay-data/" + strings.substituteSpecialCharacters(header)
145
- }
146
-
147
145
  wf.body(func(args) {
148
146
  importFile := file.importFile(args.fileHandle)
149
147
  datasetSpec := args.columns.getSpec(args.datasetRef)
@@ -256,6 +254,7 @@ wf.body(func(args) {
256
254
  arg("-o").arg("results_with_header.tsv").
257
255
  addFile("results.tsv", mmseqsOutput).
258
256
  saveFile("results_with_header.tsv").
257
+ saveFileContent("results_with_header.tsv").
259
258
  run()
260
259
 
261
260
  mmseqsResultTsv := addHeaderRunResult.getFile("results_with_header.tsv")
@@ -266,6 +265,7 @@ wf.body(func(args) {
266
265
 
267
266
  assayPframe := undefined
268
267
  epf := undefined
268
+ uniqueValuesMap := undefined
269
269
 
270
270
  if emptyResults {
271
271
  assayPframe = pframes.emptyPFrame()
@@ -327,206 +327,60 @@ wf.body(func(args) {
327
327
  clonesDf.save("clonesData.tsv")
328
328
  ptw = ptw.run()
329
329
 
330
- //////// Building outputs & exports ////////
331
- assayColumns := [
332
- {
333
- column: "seqIdLabel",
334
- spec: {
335
- name: "pl7.app/label",
336
- valueType: "String",
337
- annotations: {
338
- "pl7.app/label": "Sequence Id",
339
- "pl7.app/table/fontFamily": "monospace"
340
- }
341
- }
342
- },
343
- {
344
- column: "queryCount",
345
- spec: {
346
- name: "pl7.app/vdj/assay/queryCount",
347
- valueType: "Int",
348
- annotations: {
349
- "pl7.app/label": "Matched Clones",
350
- "pl7.app/table/orderPriority": "9000"
351
- }
352
- }
353
- },
354
- {
355
- column: sequenceColumnInfo.header,
356
- id: strings.substituteSpecialCharacters(sequenceColumnInfo.header),
357
- spec: {
358
- name: "pl7.app/vdj/sequence",
359
- valueType: "String",
360
- domain: {
361
- "pl7.app/alphabet": assaySequenceType
362
- },
363
- annotations: {
364
- "pl7.app/label": sequenceColumnInfo.header,
365
- "pl7.app/table/fontFamily": "monospace",
366
- "pl7.app/table/orderPriority": "10000"
367
- }
368
- }
369
- }
370
- ]
371
-
372
- columnsToImport := args.importColumns
373
- if args.selectedColumns != undefined && len(args.selectedColumns) > 0 {
374
- selectedHeaders := {}
375
- for header in args.selectedColumns {
376
- selectedHeaders[header] = true
330
+ //////// Extract unique values from String columns ////////
331
+ // Find all String columns
332
+ stringColumns := []
333
+ for h in args.importColumns {
334
+ if h.type == "String" && h.header != args.sequenceColumnHeader {
335
+ stringColumns = append(stringColumns, h.header)
377
336
  }
378
-
379
- filteredColumns := []
380
- for col in args.importColumns {
381
- // Always include the main sequence column
382
- if col.header == args.sequenceColumnHeader || selectedHeaders[col.header] {
383
- filteredColumns = append(filteredColumns, col)
384
- }
385
- }
386
- columnsToImport = filteredColumns
387
337
  }
388
-
389
- for h in columnsToImport {
390
- if h.header == args.sequenceColumnHeader {
391
- continue
392
- }
393
- assayColumns = append(assayColumns, {
394
- column: h.header,
395
- id: strings.substituteSpecialCharacters(h.header),
396
- spec: {
397
- name: assayColumnName(h.header),
398
- valueType: h.type,
399
- annotations: {
400
- "pl7.app/label": h.header,
401
- "pl7.app/table/orderPriority": "1000"
402
- }
403
- }
338
+
339
+ // Extract unique values for all String columns
340
+ fileContentsMap := {}
341
+ if len(stringColumns) > 0 {
342
+ uniqueValuesWf := pt.workflow().mem("4GiB").cpu(1)
343
+ baseDf := uniqueValuesWf.frame({
344
+ file: ptw.getFile("assayData.tsv"),
345
+ xsvType: "tsv"
404
346
  })
405
- }
406
-
407
- assayImportResults := xsv.importFile(ptw.getFile("assayData.tsv"), "tsv", {
408
- axes: [{
409
- column: "seqId",
410
- spec: {
411
- name: "pl7.app/vdj/assay/sequenceId",
412
- type: "String",
413
- domain: {
414
- "pl7.app/blockId": blockId
415
- },
416
- annotations: {
417
- "pl7.app/label": "Sequence Id",
418
- "pl7.app/table/fontFamily": "monospace"
419
- }
420
- }
421
- }],
422
- columns: assayColumns,
423
- storageFormat: "Parquet",
424
- partitionKeyLength: 0
425
- }, {cpu: 1, mem: "16GiB"})
426
-
427
- // "bits", "evalue", "pident"
428
- cloneColumns := [
429
- {
430
- column: "seqIdLabel",
431
- spec: {
432
- name: "pl7.app/vdj/assay/sequenceIdLabel",
433
- valueType: "String",
434
- annotations: {
435
- "pl7.app/label": "Assay Sequence Id",
436
- "pl7.app/table/fontFamily": "monospace",
437
- "pl7.app/table/visibility": "optional"
438
- }
439
- }
440
- },
441
- {
442
- column: "bits",
443
- spec: {
444
- name: "pl7.app/alignment/bitScore",
445
- valueType: "Float",
446
- annotations: {
447
- "pl7.app/label": "Bit Score",
448
- "pl7.app/table/visibility": "optional"
449
- }
347
+ // Process each String column to extract unique values
348
+ for colHeader in stringColumns {
349
+ uniqueValuesDf := baseDf.select(pt.col(colHeader).alias("value")).groupBy("value").agg(pt.col("value").count().alias("_count"))
350
+ uniqueValuesDf = uniqueValuesDf.select("value")
351
+ fileName := "unique_values_" + strings.substituteSpecialCharacters(colHeader) + ".csv"
352
+ uniqueValuesDf.saveContent(fileName)
450
353
  }
451
- },
452
- {
453
- column: "evalue",
454
- spec: {
455
- name: "pl7.app/alignment/evalue",
456
- valueType: "Float",
457
- annotations: {
458
- "pl7.app/label": "E-value",
459
- "pl7.app/table/visibility": "optional"
460
- }
354
+ // Run once and collect all results
355
+ uniqueValuesResult := uniqueValuesWf.run()
356
+ for colHeader in stringColumns {
357
+ fileName := "unique_values_" + strings.substituteSpecialCharacters(colHeader) + ".csv"
358
+ fileContentsMap[colHeader] = uniqueValuesResult.getFileContent(fileName)
461
359
  }
462
- },
463
- {
464
- column: "pident",
465
- spec: {
466
- name: "pl7.app/alignment/pident",
467
- valueType: "Float",
468
- annotations: {
469
- "pl7.app/label": "Percentage of identical matches",
470
- "pl7.app/table/visibility": "optional"
471
- }
472
- }
473
- }]
474
-
475
- for h in columnsToImport {
476
- cloneColumns = append(cloneColumns, {
477
- column: h.header,
478
- id: strings.substituteSpecialCharacters(h.header),
479
- spec: {
480
- name: assayColumnName(h.header),
481
- valueType: h.type,
482
- annotations: {
483
- "pl7.app/label": h.header,
484
- "pl7.app/table/visibility": h.header == args.sequenceColumnHeader ? "optional" : "default"
485
- }
486
- }
487
- })
488
- }
489
-
490
- // insert domain
491
- for col in cloneColumns {
492
- col.spec.domain = maps.deepMerge(col.spec.domain, {
493
- "pl7.app/blockId": blockId
360
+
361
+ // Use subtemplate to extract content (getData() only works in subtemplates)
362
+ extractResult := render.create(extractUniqueValuesTpl, {
363
+ fileContents: fileContentsMap
494
364
  })
365
+ uniqueValuesMap = extractResult.output("uniqueValuesMap")
495
366
  }
496
-
497
- cloneImportResults := xsv.importFile(
498
- ptw.getFile("clonesData.tsv"), "tsv", {
499
- axes: [{
500
- column: "query",
501
- spec: datasetSpec.axesSpec[1]
502
- }],
503
- columns: cloneColumns,
504
- storageFormat: "Parquet",
505
- partitionKeyLength: 0
506
- },
507
- { splitDataAndSpec: true, cpu: 1, mem: "16GiB" }
508
- )
509
-
510
- // Create informative label with relevant matching parameters
511
- identityStr := string(args.settings.identity)
512
- coverageStr := string(args.settings.coverageThreshold)
513
- similarityTypeStr := args.settings.similarityType == "sequence-identity" ? "Exact Match" : "BLOSUM"
514
367
 
515
- traceLabel := "Assay Data (sim:" + similarityTypeStr + ", ident:" + identityStr + ", cov:" + coverageStr + ")"
516
-
517
- trace := pSpec.makeTrace(datasetSpec,
518
- {
519
- type: "milaboratories.immune-assay-data",
520
- importance: 40,
521
- label: traceLabel
522
- })
523
-
524
- epfB := pframes.pFrameBuilder()
525
- for k, v in cloneImportResults {
526
- epfB.add(k, trace.inject(v.spec), v.data)
527
- }
528
- epf = epfB.build()
529
- assayPframe = pframes.exportFrame(assayImportResults)
368
+ //////// Building outputs & exports ////////
369
+ buildOutputsResult := render.createEphemeral(buildOutputsTpl, {
370
+ importColumns: args.importColumns,
371
+ selectedColumns: args.selectedColumns,
372
+ sequenceColumnHeader: args.sequenceColumnHeader,
373
+ sequenceColumnInfo: sequenceColumnInfo,
374
+ assaySequenceType: assaySequenceType,
375
+ blockId: blockId,
376
+ datasetSpec: datasetSpec,
377
+ assayDataTsv: ptw.getFile("assayData.tsv"),
378
+ clonesDataTsv: ptw.getFile("clonesData.tsv"),
379
+ uniqueValuesMap: uniqueValuesMap,
380
+ settings: args.settings
381
+ })
382
+ assayPframe = buildOutputsResult.output("assayPframe")
383
+ epf = buildOutputsResult.output("epf")
530
384
  }
531
385
 
532
386
  result := {
@@ -534,8 +388,7 @@ wf.body(func(args) {
534
388
  dataImportHandle: importFile.handle,
535
389
  table: assayPframe,
536
390
  mmseqsOutput: mmseqsOutput, // @TODO tmp fix to resolve CID conflicts
537
- emptyResults: emptyResults
538
- }
391
+ emptyResults: emptyResults }
539
392
  }
540
393
 
541
394
  if !emptyResults {
@@ -20,7 +20,6 @@ self.body(func(args) {
20
20
  software(mmseqsSw).
21
21
  mem("32GiB").
22
22
  cpu(1).
23
- dontSaveStdoutOrStderr(). // important to avoid CID conflict problems coming from different stdout output on same datasets
24
23
  arg("easy-search").
25
24
  arg("clones.fasta").
26
25
  arg("assay.fasta").
package/index.d.ts DELETED
@@ -1,4 +0,0 @@
1
- declare type TemplateFromFile = { readonly type: "from-file"; readonly path: string; };
2
- declare type TplName = "main";
3
- declare const Templates: Record<TplName, TemplateFromFile>;
4
- export { Templates };
package/index.js DELETED
@@ -1,3 +0,0 @@
1
- module.exports = { Templates: {
2
- 'main': { type: 'from-file', path: require.resolve('./dist/tengo/tpl/main.plj.gz') }
3
- }}
package/tsconfig.json DELETED
@@ -1,16 +0,0 @@
1
- {
2
- "compilerOptions": {
3
- "target": "es2022",
4
- "module": "commonjs",
5
- "moduleResolution": "node",
6
- "esModuleInterop": true,
7
- "strict": true,
8
- "outDir": "./dist",
9
- "rootDir": "./src",
10
- "sourceMap": true,
11
- "declaration": true
12
- },
13
- "types": [],
14
- "include": ["src/**/*"],
15
- "exclude": ["node_modules", "dist"]
16
- }
package/vitest.config.mts DELETED
@@ -1,9 +0,0 @@
1
- import { defineConfig } from 'vitest/config';
2
-
3
- export default defineConfig({
4
- test: {
5
- watch: false,
6
- maxConcurrency: 3,
7
- testTimeout: 5000
8
- }
9
- });