@meshxdata/fops 0.1.30 → 0.1.32
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +372 -0
- package/package.json +2 -1
- package/src/electron/icon.png +0 -0
- package/src/electron/main.js +24 -0
- package/src/plugins/bundled/fops-plugin-embeddings/index.js +7 -0
- package/src/plugins/bundled/fops-plugin-foundation/index.js +240 -0
- package/src/plugins/bundled/fops-plugin-foundation/lib/align.js +145 -0
- package/src/plugins/bundled/fops-plugin-foundation/lib/client.js +40 -4
- package/src/plugins/bundled/fops-plugin-foundation/lib/tools-write.js +46 -0
- package/src/ui/tui/App.js +13 -5
package/CHANGELOG.md
CHANGED
|
@@ -2,6 +2,378 @@
|
|
|
2
2
|
|
|
3
3
|
All notable changes to @meshxdata/fops (Foundation Operator CLI) are documented here.
|
|
4
4
|
|
|
5
|
+
## [0.1.32] - 2026-03-04
|
|
6
|
+
|
|
7
|
+
- electron app (59ad0bb)
|
|
8
|
+
- compose and fops file plugin (1cf0e81)
|
|
9
|
+
- bump (346ffc1)
|
|
10
|
+
- localhost replaced by 127.0.0.1 (82b9f30)
|
|
11
|
+
- .29 (587b0e1)
|
|
12
|
+
- improve up down and bootstrap script (b79ebaf)
|
|
13
|
+
- checksum (22c8086)
|
|
14
|
+
- checksum (96b434f)
|
|
15
|
+
- checksum (15ed3c0)
|
|
16
|
+
- checksum (8a6543a)
|
|
17
|
+
- bump embed trino linksg (8440504)
|
|
18
|
+
- bump data (765ffd9)
|
|
19
|
+
- bump (cb8b232)
|
|
20
|
+
- broken tests (c532229)
|
|
21
|
+
- release 0.1.18, preflight checks (d902249)
|
|
22
|
+
- fix compute display bug (d10f5d9)
|
|
23
|
+
- cleanup packer files (6330f18)
|
|
24
|
+
- plan mode (cb36a8a)
|
|
25
|
+
- bump to 0.1.16 - agent ui (41ac1a2)
|
|
26
|
+
- bump to 0.1.15 - agent ui (4ebe2e1)
|
|
27
|
+
- bump to 0.1.14 (6c3a7fa)
|
|
28
|
+
- bump to 0.1.13 (8db570f)
|
|
29
|
+
- release 0.1.12 (c1c79e5)
|
|
30
|
+
- bump (11aa3b0)
|
|
31
|
+
- git keep and bump tui (be1678e)
|
|
32
|
+
- skills, index, rrf, compacted context (100k > 10k) (7b2fffd)
|
|
33
|
+
- cloudflare and token consumption, graphs indexing (0ad9eec)
|
|
34
|
+
- bump storage default (22c83ba)
|
|
35
|
+
- storage fix (68a22a0)
|
|
36
|
+
- skills update (7f56500)
|
|
37
|
+
- v9 bump (3864446)
|
|
38
|
+
- bump (c95eedc)
|
|
39
|
+
- rrf (dbf8c95)
|
|
40
|
+
- feat: warning when running predictions (95e8c52)
|
|
41
|
+
- feat: support for local predictions (45cf26b)
|
|
42
|
+
- feat: wip support for predictions + mlflow (3457052)
|
|
43
|
+
- add Reciprocal Rank Fusion (RRF) to knowledge and skill retrieval (61549bc)
|
|
44
|
+
- validate CSV headers in compute_run readiness check (a8c7a43)
|
|
45
|
+
- fix corrupted Iceberg metadata: probe tables + force cleanup on re-apply (50578af)
|
|
46
|
+
- enforce: never use foundation_apply to fix broken products (2e049bf)
|
|
47
|
+
- update SKILL.md with complete tool reference for knowledge retrieval (30b1924)
|
|
48
|
+
- add storage read, input DP table probe, and compute_run improvements (34e6c4c)
|
|
49
|
+
- skills update (1220385)
|
|
50
|
+
- skills update (bb66958)
|
|
51
|
+
- some tui improvement andd tools apply overwrite (e90c35c)
|
|
52
|
+
- skills update (e9227a1)
|
|
53
|
+
- skills update (669c4b3)
|
|
54
|
+
- fix plugin pre-flight checks (f741743)
|
|
55
|
+
- increase agent context (6479aaa)
|
|
56
|
+
- skills and init sql fixes (5fce35e)
|
|
57
|
+
- checksum (3518b56)
|
|
58
|
+
- penging job limit (a139861)
|
|
59
|
+
- checksum (575d28c)
|
|
60
|
+
- bump (92049ba)
|
|
61
|
+
- fix bug per tab status (0a33657)
|
|
62
|
+
- fix bug per tab status (50457c6)
|
|
63
|
+
- checksumming (0ad842e)
|
|
64
|
+
- shot af mardkwon overlapping (51f63b9)
|
|
65
|
+
- add spark dockerfile for multiarch builds (95abbd1)
|
|
66
|
+
- fix plugin initialization (16b9782)
|
|
67
|
+
- split index.js (50902a2)
|
|
68
|
+
- cloudflare cidr (cc4e021)
|
|
69
|
+
- cloduflare restrictions (2f6ba2d)
|
|
70
|
+
- sequential start (86b496e)
|
|
71
|
+
- sequential start (4930fe1)
|
|
72
|
+
- sequential start (353f014)
|
|
73
|
+
- qa tests (2dc6a1a)
|
|
74
|
+
- bump sha for .85 (dc2edfe)
|
|
75
|
+
- preserve env on sudo (7831227)
|
|
76
|
+
- bump sha for .84 (6c052f9)
|
|
77
|
+
- non interactive for azure vms (0aa8a2f)
|
|
78
|
+
- keep .env if present (d072450)
|
|
79
|
+
- bump (7a8e732)
|
|
80
|
+
- ensure opa is on compose if not set (f4a5228)
|
|
81
|
+
- checksum bump (a2ccc20)
|
|
82
|
+
- netrc defensive checks (a0b0ccc)
|
|
83
|
+
- netrc defensive checks (ae37403)
|
|
84
|
+
- checksum (ec45d11)
|
|
85
|
+
- update sync and fix up (7f9af72)
|
|
86
|
+
- expand test for azure and add new per app tag support (388a168)
|
|
87
|
+
- checksum on update (44005fc)
|
|
88
|
+
- cleanup for later (15e5313)
|
|
89
|
+
- cleanup for later (11c9597)
|
|
90
|
+
- switch branch feature (822fecc)
|
|
91
|
+
- add pull (d1c19ab)
|
|
92
|
+
- Bump hono from 4.11.9 to 4.12.0 in /operator-cli (ad25144)
|
|
93
|
+
- tests (f180a9a)
|
|
94
|
+
- cleanup (39c49a3)
|
|
95
|
+
- registry (7b7126a)
|
|
96
|
+
- reconcile kafka (832d0db)
|
|
97
|
+
- gh login bug (025886c)
|
|
98
|
+
- cleanup (bb96cab)
|
|
99
|
+
- strip envs from process (2421180)
|
|
100
|
+
- force use of gh creds not tokens in envs var (fff7787)
|
|
101
|
+
- resolve import between npm installs and npm link (79522e1)
|
|
102
|
+
- fix gh scope and azure states (afd846c)
|
|
103
|
+
- refactoring (da50352)
|
|
104
|
+
- split fops repo (d447638)
|
|
105
|
+
- aks (b791f8f)
|
|
106
|
+
- refactor azure (67d3bad)
|
|
107
|
+
- wildcard (391f023)
|
|
108
|
+
- azure plugin (c074074)
|
|
109
|
+
- zap (d7e6e7f)
|
|
110
|
+
- fix knock (cf89c05)
|
|
111
|
+
- azure (4adec98)
|
|
112
|
+
- Bump tar from 7.5.7 to 7.5.9 in /operator-cli (e41e98e)
|
|
113
|
+
- azure stack index.js split (de12272)
|
|
114
|
+
- Bump ajv from 8.17.1 to 8.18.0 in /operator-cli (76da21f)
|
|
115
|
+
- packer (9665fbc)
|
|
116
|
+
- remove stack api (db0fd4d)
|
|
117
|
+
- packer cleanup (fe1bf14)
|
|
118
|
+
- force refresh token (3a3d7e2)
|
|
119
|
+
- provision shell (2ad505f)
|
|
120
|
+
- azure vm management (91dcb31)
|
|
121
|
+
- azure specific (2b0cca8)
|
|
122
|
+
- azure packer (12175b8)
|
|
123
|
+
- init hashed pwd (db8523c)
|
|
124
|
+
- packer (5b5c7c4)
|
|
125
|
+
- doctor for azure vm (ed524fa)
|
|
126
|
+
- packer and 1pwd (c6d053e)
|
|
127
|
+
- split big index.js (dc85a1b)
|
|
128
|
+
- kafka volume update (21815ec)
|
|
129
|
+
- fix openai azure tools confirmation and flow (0118cd1)
|
|
130
|
+
- nighly fixx, test fix (5e0d04f)
|
|
131
|
+
- open ai training (cdc494a)
|
|
132
|
+
- openai integration in azure (1ca1475)
|
|
133
|
+
- ci (672cea9)
|
|
134
|
+
- refresh ghcr creds (4220c48)
|
|
135
|
+
- cleaned up version (1a0074f)
|
|
136
|
+
- traefik on ghcr and templates (8e31a05)
|
|
137
|
+
- apply fcl (e78911f)
|
|
138
|
+
- demo landscape (dd205fe)
|
|
139
|
+
- smarter login and schema (1af514f)
|
|
140
|
+
- no down before up unless something broke (56b1132)
|
|
141
|
+
- dai, reconcile failed containers (12907fa)
|
|
142
|
+
- reconcile dead container (7da75e4)
|
|
143
|
+
- defensive around storage buckets dir (b98871d)
|
|
144
|
+
- defensive around storage buckets dir (e86e132)
|
|
145
|
+
- gear in for multiarch (bf3fa3e)
|
|
146
|
+
- up autofix (99c7f89)
|
|
147
|
+
- autofix stale containers on up (43c7d0f)
|
|
148
|
+
- shared sessions fix (5de1359)
|
|
149
|
+
- share sessions between ui and tui (8321391)
|
|
150
|
+
- fix chat view display details (e263996)
|
|
151
|
+
- fix chat view display details (9babdda)
|
|
152
|
+
- tui up fixes (86e9f17)
|
|
153
|
+
- fix commands init (442538b)
|
|
154
|
+
- enable k3s profile (b2dcfc8)
|
|
155
|
+
- test up till job creation (656d388)
|
|
156
|
+
- tui fixes (0599779)
|
|
157
|
+
- cleanup (27731f0)
|
|
158
|
+
- train (90bf559)
|
|
159
|
+
- training (f809bf6)
|
|
160
|
+
- training (ba2b836)
|
|
161
|
+
- training (6fc5267)
|
|
162
|
+
- training (4af8ac9)
|
|
163
|
+
- fix build script (bd82836)
|
|
164
|
+
- infra test (5b79815)
|
|
165
|
+
- infra test (3a0ac05)
|
|
166
|
+
- infra test (e5c67b5)
|
|
167
|
+
- tests (ae7b621)
|
|
168
|
+
- tests (c09ae6a)
|
|
169
|
+
- update tui (4784153)
|
|
170
|
+
- training (0a5a330)
|
|
171
|
+
- tui (df4dd4a)
|
|
172
|
+
- pkg builds (4dc9993)
|
|
173
|
+
- also source env for creds (9a17d8f)
|
|
174
|
+
- fcl support (e8a5743)
|
|
175
|
+
- fcl support (8d6b6cd)
|
|
176
|
+
- fcl support (cb76a4a)
|
|
177
|
+
- bump package (df2ee85)
|
|
178
|
+
- add iam mgmt (2d3c294)
|
|
179
|
+
- fix k3s (976ae77)
|
|
180
|
+
- fix trino, add storage plugin (75cb1f4)
|
|
181
|
+
- add project root as config (a2863c6)
|
|
182
|
+
- failure learnings (637ef5c)
|
|
183
|
+
- Apple signed binaries (63a610e)
|
|
184
|
+
- send build info to apple for notary service (300c220)
|
|
185
|
+
- migration failure fixes (c7f0b2f)
|
|
186
|
+
- release to wipe clean pg on duplicate key error (a38bf4d)
|
|
187
|
+
- small fix (a26a674)
|
|
188
|
+
|
|
189
|
+
## [0.1.31] - 2026-03-04
|
|
190
|
+
|
|
191
|
+
- bump (346ffc1)
|
|
192
|
+
- localhost replaced by 127.0.0.1 (82b9f30)
|
|
193
|
+
- .29 (587b0e1)
|
|
194
|
+
- improve up down and bootstrap script (b79ebaf)
|
|
195
|
+
- checksum (22c8086)
|
|
196
|
+
- checksum (96b434f)
|
|
197
|
+
- checksum (15ed3c0)
|
|
198
|
+
- checksum (8a6543a)
|
|
199
|
+
- bump embed trino linksg (8440504)
|
|
200
|
+
- bump data (765ffd9)
|
|
201
|
+
- bump (cb8b232)
|
|
202
|
+
- broken tests (c532229)
|
|
203
|
+
- release 0.1.18, preflight checks (d902249)
|
|
204
|
+
- fix compute display bug (d10f5d9)
|
|
205
|
+
- cleanup packer files (6330f18)
|
|
206
|
+
- plan mode (cb36a8a)
|
|
207
|
+
- bump to 0.1.16 - agent ui (41ac1a2)
|
|
208
|
+
- bump to 0.1.15 - agent ui (4ebe2e1)
|
|
209
|
+
- bump to 0.1.14 (6c3a7fa)
|
|
210
|
+
- bump to 0.1.13 (8db570f)
|
|
211
|
+
- release 0.1.12 (c1c79e5)
|
|
212
|
+
- bump (11aa3b0)
|
|
213
|
+
- git keep and bump tui (be1678e)
|
|
214
|
+
- skills, index, rrf, compacted context (100k > 10k) (7b2fffd)
|
|
215
|
+
- cloudflare and token consumption, graphs indexing (0ad9eec)
|
|
216
|
+
- bump storage default (22c83ba)
|
|
217
|
+
- storage fix (68a22a0)
|
|
218
|
+
- skills update (7f56500)
|
|
219
|
+
- v9 bump (3864446)
|
|
220
|
+
- bump (c95eedc)
|
|
221
|
+
- rrf (dbf8c95)
|
|
222
|
+
- feat: warning when running predictions (95e8c52)
|
|
223
|
+
- feat: support for local predictions (45cf26b)
|
|
224
|
+
- feat: wip support for predictions + mlflow (3457052)
|
|
225
|
+
- add Reciprocal Rank Fusion (RRF) to knowledge and skill retrieval (61549bc)
|
|
226
|
+
- validate CSV headers in compute_run readiness check (a8c7a43)
|
|
227
|
+
- fix corrupted Iceberg metadata: probe tables + force cleanup on re-apply (50578af)
|
|
228
|
+
- enforce: never use foundation_apply to fix broken products (2e049bf)
|
|
229
|
+
- update SKILL.md with complete tool reference for knowledge retrieval (30b1924)
|
|
230
|
+
- add storage read, input DP table probe, and compute_run improvements (34e6c4c)
|
|
231
|
+
- skills update (1220385)
|
|
232
|
+
- skills update (bb66958)
|
|
233
|
+
- some tui improvement andd tools apply overwrite (e90c35c)
|
|
234
|
+
- skills update (e9227a1)
|
|
235
|
+
- skills update (669c4b3)
|
|
236
|
+
- fix plugin pre-flight checks (f741743)
|
|
237
|
+
- increase agent context (6479aaa)
|
|
238
|
+
- skills and init sql fixes (5fce35e)
|
|
239
|
+
- checksum (3518b56)
|
|
240
|
+
- penging job limit (a139861)
|
|
241
|
+
- checksum (575d28c)
|
|
242
|
+
- bump (92049ba)
|
|
243
|
+
- fix bug per tab status (0a33657)
|
|
244
|
+
- fix bug per tab status (50457c6)
|
|
245
|
+
- checksumming (0ad842e)
|
|
246
|
+
- shot af mardkwon overlapping (51f63b9)
|
|
247
|
+
- add spark dockerfile for multiarch builds (95abbd1)
|
|
248
|
+
- fix plugin initialization (16b9782)
|
|
249
|
+
- split index.js (50902a2)
|
|
250
|
+
- cloudflare cidr (cc4e021)
|
|
251
|
+
- cloduflare restrictions (2f6ba2d)
|
|
252
|
+
- sequential start (86b496e)
|
|
253
|
+
- sequential start (4930fe1)
|
|
254
|
+
- sequential start (353f014)
|
|
255
|
+
- qa tests (2dc6a1a)
|
|
256
|
+
- bump sha for .85 (dc2edfe)
|
|
257
|
+
- preserve env on sudo (7831227)
|
|
258
|
+
- bump sha for .84 (6c052f9)
|
|
259
|
+
- non interactive for azure vms (0aa8a2f)
|
|
260
|
+
- keep .env if present (d072450)
|
|
261
|
+
- bump (7a8e732)
|
|
262
|
+
- ensure opa is on compose if not set (f4a5228)
|
|
263
|
+
- checksum bump (a2ccc20)
|
|
264
|
+
- netrc defensive checks (a0b0ccc)
|
|
265
|
+
- netrc defensive checks (ae37403)
|
|
266
|
+
- checksum (ec45d11)
|
|
267
|
+
- update sync and fix up (7f9af72)
|
|
268
|
+
- expand test for azure and add new per app tag support (388a168)
|
|
269
|
+
- checksum on update (44005fc)
|
|
270
|
+
- cleanup for later (15e5313)
|
|
271
|
+
- cleanup for later (11c9597)
|
|
272
|
+
- switch branch feature (822fecc)
|
|
273
|
+
- add pull (d1c19ab)
|
|
274
|
+
- Bump hono from 4.11.9 to 4.12.0 in /operator-cli (ad25144)
|
|
275
|
+
- tests (f180a9a)
|
|
276
|
+
- cleanup (39c49a3)
|
|
277
|
+
- registry (7b7126a)
|
|
278
|
+
- reconcile kafka (832d0db)
|
|
279
|
+
- gh login bug (025886c)
|
|
280
|
+
- cleanup (bb96cab)
|
|
281
|
+
- strip envs from process (2421180)
|
|
282
|
+
- force use of gh creds not tokens in envs var (fff7787)
|
|
283
|
+
- resolve import between npm installs and npm link (79522e1)
|
|
284
|
+
- fix gh scope and azure states (afd846c)
|
|
285
|
+
- refactoring (da50352)
|
|
286
|
+
- split fops repo (d447638)
|
|
287
|
+
- aks (b791f8f)
|
|
288
|
+
- refactor azure (67d3bad)
|
|
289
|
+
- wildcard (391f023)
|
|
290
|
+
- azure plugin (c074074)
|
|
291
|
+
- zap (d7e6e7f)
|
|
292
|
+
- fix knock (cf89c05)
|
|
293
|
+
- azure (4adec98)
|
|
294
|
+
- Bump tar from 7.5.7 to 7.5.9 in /operator-cli (e41e98e)
|
|
295
|
+
- azure stack index.js split (de12272)
|
|
296
|
+
- Bump ajv from 8.17.1 to 8.18.0 in /operator-cli (76da21f)
|
|
297
|
+
- packer (9665fbc)
|
|
298
|
+
- remove stack api (db0fd4d)
|
|
299
|
+
- packer cleanup (fe1bf14)
|
|
300
|
+
- force refresh token (3a3d7e2)
|
|
301
|
+
- provision shell (2ad505f)
|
|
302
|
+
- azure vm management (91dcb31)
|
|
303
|
+
- azure specific (2b0cca8)
|
|
304
|
+
- azure packer (12175b8)
|
|
305
|
+
- init hashed pwd (db8523c)
|
|
306
|
+
- packer (5b5c7c4)
|
|
307
|
+
- doctor for azure vm (ed524fa)
|
|
308
|
+
- packer and 1pwd (c6d053e)
|
|
309
|
+
- split big index.js (dc85a1b)
|
|
310
|
+
- kafka volume update (21815ec)
|
|
311
|
+
- fix openai azure tools confirmation and flow (0118cd1)
|
|
312
|
+
- nighly fixx, test fix (5e0d04f)
|
|
313
|
+
- open ai training (cdc494a)
|
|
314
|
+
- openai integration in azure (1ca1475)
|
|
315
|
+
- ci (672cea9)
|
|
316
|
+
- refresh ghcr creds (4220c48)
|
|
317
|
+
- cleaned up version (1a0074f)
|
|
318
|
+
- traefik on ghcr and templates (8e31a05)
|
|
319
|
+
- apply fcl (e78911f)
|
|
320
|
+
- demo landscape (dd205fe)
|
|
321
|
+
- smarter login and schema (1af514f)
|
|
322
|
+
- no down before up unless something broke (56b1132)
|
|
323
|
+
- dai, reconcile failed containers (12907fa)
|
|
324
|
+
- reconcile dead container (7da75e4)
|
|
325
|
+
- defensive around storage buckets dir (b98871d)
|
|
326
|
+
- defensive around storage buckets dir (e86e132)
|
|
327
|
+
- gear in for multiarch (bf3fa3e)
|
|
328
|
+
- up autofix (99c7f89)
|
|
329
|
+
- autofix stale containers on up (43c7d0f)
|
|
330
|
+
- shared sessions fix (5de1359)
|
|
331
|
+
- share sessions between ui and tui (8321391)
|
|
332
|
+
- fix chat view display details (e263996)
|
|
333
|
+
- fix chat view display details (9babdda)
|
|
334
|
+
- tui up fixes (86e9f17)
|
|
335
|
+
- fix commands init (442538b)
|
|
336
|
+
- enable k3s profile (b2dcfc8)
|
|
337
|
+
- test up till job creation (656d388)
|
|
338
|
+
- tui fixes (0599779)
|
|
339
|
+
- cleanup (27731f0)
|
|
340
|
+
- train (90bf559)
|
|
341
|
+
- training (f809bf6)
|
|
342
|
+
- training (ba2b836)
|
|
343
|
+
- training (6fc5267)
|
|
344
|
+
- training (4af8ac9)
|
|
345
|
+
- fix build script (bd82836)
|
|
346
|
+
- infra test (5b79815)
|
|
347
|
+
- infra test (3a0ac05)
|
|
348
|
+
- infra test (e5c67b5)
|
|
349
|
+
- tests (ae7b621)
|
|
350
|
+
- tests (c09ae6a)
|
|
351
|
+
- update tui (4784153)
|
|
352
|
+
- training (0a5a330)
|
|
353
|
+
- tui (df4dd4a)
|
|
354
|
+
- pkg builds (4dc9993)
|
|
355
|
+
- also source env for creds (9a17d8f)
|
|
356
|
+
- fcl support (e8a5743)
|
|
357
|
+
- fcl support (8d6b6cd)
|
|
358
|
+
- fcl support (cb76a4a)
|
|
359
|
+
- bump package (df2ee85)
|
|
360
|
+
- add iam mgmt (2d3c294)
|
|
361
|
+
- fix k3s (976ae77)
|
|
362
|
+
- fix trino, add storage plugin (75cb1f4)
|
|
363
|
+
- add project root as config (a2863c6)
|
|
364
|
+
- failure learnings (637ef5c)
|
|
365
|
+
- Apple signed binaries (63a610e)
|
|
366
|
+
- send build info to apple for notary service (300c220)
|
|
367
|
+
- migration failure fixes (c7f0b2f)
|
|
368
|
+
- release to wipe clean pg on duplicate key error (a38bf4d)
|
|
369
|
+
- small fix (a26a674)
|
|
370
|
+
- cleanup (ca7405d)
|
|
371
|
+
- bump packages (59723b7)
|
|
372
|
+
|
|
373
|
+
# Changelog
|
|
374
|
+
|
|
375
|
+
All notable changes to @meshxdata/fops (Foundation Operator CLI) are documented here.
|
|
376
|
+
|
|
5
377
|
## [0.1.30] - 2026-03-04
|
|
6
378
|
|
|
7
379
|
- localhost replaced by 127.0.0.1 (82b9f30)
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@meshxdata/fops",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.32",
|
|
4
4
|
"description": "CLI to install and manage data mesh platforms",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"fops",
|
|
@@ -46,6 +46,7 @@
|
|
|
46
46
|
"chalk": "^5.3.0",
|
|
47
47
|
"commander": "^12.0.0",
|
|
48
48
|
"dataloader": "^2.2.3",
|
|
49
|
+
"electron": "^34.5.8",
|
|
49
50
|
"execa": "^9.5.2",
|
|
50
51
|
"graphql": "^16.13.0",
|
|
51
52
|
"hcl2-parser": "^1.0.3",
|
|
Binary file
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
const { app, BrowserWindow } = require("electron");
|
|
2
|
+
|
|
3
|
+
const url = process.env.FOUNDATION_URL || "http://127.0.0.1:3002";
|
|
4
|
+
|
|
5
|
+
function createWindow() {
|
|
6
|
+
const win = new BrowserWindow({
|
|
7
|
+
width: 1400,
|
|
8
|
+
height: 900,
|
|
9
|
+
title: "Foundation",
|
|
10
|
+
webPreferences: {
|
|
11
|
+
nodeIntegration: false,
|
|
12
|
+
contextIsolation: true,
|
|
13
|
+
},
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
win.loadURL(url);
|
|
17
|
+
win.setMenuBarVisibility(false);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
app.whenReady().then(createWindow);
|
|
21
|
+
|
|
22
|
+
app.on("window-all-closed", () => {
|
|
23
|
+
app.quit();
|
|
24
|
+
});
|
|
@@ -260,6 +260,13 @@ export default {
|
|
|
260
260
|
return 0;
|
|
261
261
|
}
|
|
262
262
|
},
|
|
263
|
+
async embedTexts(texts) {
|
|
264
|
+
if (!texts || texts.length === 0) return [];
|
|
265
|
+
if (!embeddingClient.isModelCached()) return [];
|
|
266
|
+
try {
|
|
267
|
+
return await embeddingClient.embed(texts);
|
|
268
|
+
} catch { return []; }
|
|
269
|
+
},
|
|
263
270
|
});
|
|
264
271
|
|
|
265
272
|
// NOTE: ONNX models are loaded lazily on first query, NOT eagerly at startup.
|
|
@@ -254,6 +254,245 @@ export function register(api) {
|
|
|
254
254
|
process.exit(1);
|
|
255
255
|
}
|
|
256
256
|
});
|
|
257
|
+
|
|
258
|
+
// ── align ─────────────────────────────────────────────────────────────
|
|
259
|
+
foundation
|
|
260
|
+
.command("align <source> [target]")
|
|
261
|
+
.description("Align source column names to target schema columns (exact → embedding → levenshtein)")
|
|
262
|
+
.option("--threshold <number>", "Similarity threshold 0–1 (default 0.7)", "0.7")
|
|
263
|
+
.option("--json", "Output raw JSON")
|
|
264
|
+
.action(async (source, target, opts) => {
|
|
265
|
+
const { alignColumns } = await import("./lib/align.js");
|
|
266
|
+
const { fetchEntityColumns } = await import("./lib/tools-write.js");
|
|
267
|
+
const { statSync, readdirSync, readFileSync } = await import("node:fs");
|
|
268
|
+
const threshold = Number.parseFloat(opts.threshold) || 0.7;
|
|
269
|
+
|
|
270
|
+
// Resolve source columns — directory of CSVs, comma-separated names, or entity identifier
|
|
271
|
+
const isDirectory = (s) => { try { return statSync(s).isDirectory(); } catch { return false; } };
|
|
272
|
+
const isIdentifier = (s) => s && !/,/.test(s) && !/\s/.test(s.trim());
|
|
273
|
+
|
|
274
|
+
let sourceCols;
|
|
275
|
+
if (isDirectory(source)) {
|
|
276
|
+
const csvFiles = readdirSync(source).filter((f) => /\.csv$/i.test(f));
|
|
277
|
+
if (csvFiles.length === 0) {
|
|
278
|
+
console.error(ERR(` ✗ No CSV files found in: ${source}`));
|
|
279
|
+
process.exit(1);
|
|
280
|
+
}
|
|
281
|
+
const seen = new Set();
|
|
282
|
+
for (const file of csvFiles) {
|
|
283
|
+
try {
|
|
284
|
+
const firstLine = readFileSync(path.join(source, file), "utf8").split("\n")[0]?.trim();
|
|
285
|
+
if (!firstLine) continue;
|
|
286
|
+
for (const col of firstLine.split(",").map((c) => c.trim().replace(/^["']|["']$/g, ""))) {
|
|
287
|
+
if (col) seen.add(col);
|
|
288
|
+
}
|
|
289
|
+
} catch { /* skip unreadable file */ }
|
|
290
|
+
}
|
|
291
|
+
sourceCols = [...seen];
|
|
292
|
+
console.log(DIM(` Reading ${csvFiles.length} CSV file(s) from ${source} — ${sourceCols.length} unique columns`));
|
|
293
|
+
} else if (isIdentifier(source)) {
|
|
294
|
+
sourceCols = await fetchEntityColumns(client, source.trim(), "data_object");
|
|
295
|
+
} else {
|
|
296
|
+
sourceCols = source.split(",").map((c) => c.trim()).filter(Boolean);
|
|
297
|
+
}
|
|
298
|
+
if (!sourceCols || sourceCols.length === 0) {
|
|
299
|
+
console.error(ERR(` ✗ Could not resolve source columns from: ${source}`));
|
|
300
|
+
process.exit(1);
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
// Get embeddings service (used for both target inference and column alignment)
|
|
304
|
+
let embSvc;
|
|
305
|
+
try {
|
|
306
|
+
embSvc = typeof api.getService === "function" ? api.getService("embeddings:search") : null;
|
|
307
|
+
} catch { /* embeddings not available */ }
|
|
308
|
+
const embedTexts = embSvc?.embedTexts ? embSvc.embedTexts.bind(embSvc) : null;
|
|
309
|
+
|
|
310
|
+
// Resolve target — explicit arg, or auto-infer from landscape
|
|
311
|
+
let targetCols;
|
|
312
|
+
let resolvedTarget = target;
|
|
313
|
+
if (!target) {
|
|
314
|
+
// Try embedding search first: query = unique source column names joined
|
|
315
|
+
const query = sourceCols.join(" ");
|
|
316
|
+
let dpIdentifier = null;
|
|
317
|
+
let dpName = null;
|
|
318
|
+
if (embSvc?.search && embSvc.hasEntries("landscape")) {
|
|
319
|
+
try {
|
|
320
|
+
const hits = await embSvc.search(query, { topK: 10, source: "landscape" });
|
|
321
|
+
const dpHit = hits.find((h) => h.metadata?.entityType === "data_product");
|
|
322
|
+
if (dpHit) {
|
|
323
|
+
dpIdentifier = dpHit.metadata.identifier;
|
|
324
|
+
dpName = dpHit.title?.replace(/^product\//, "") || dpIdentifier;
|
|
325
|
+
}
|
|
326
|
+
} catch { /* fall through */ }
|
|
327
|
+
}
|
|
328
|
+
// Fallback: list all data products, auto-select if only one
|
|
329
|
+
if (!dpIdentifier) {
|
|
330
|
+
try {
|
|
331
|
+
const res = await client.get("/data/data_product/list?per_page=100");
|
|
332
|
+
const products = res?.data || res?.items || res || [];
|
|
333
|
+
if (products.length === 1) {
|
|
334
|
+
dpIdentifier = products[0].identifier;
|
|
335
|
+
dpName = products[0].name || dpIdentifier;
|
|
336
|
+
} else if (products.length > 1) {
|
|
337
|
+
console.error(ERR(" ✗ Multiple data products found — specify a target:"));
|
|
338
|
+
for (const p of products) console.error(DIM(` ${p.identifier} ${p.name || ""}`));
|
|
339
|
+
process.exit(1);
|
|
340
|
+
} else {
|
|
341
|
+
console.error(ERR(" ✗ No data products found in the landscape. Specify a target explicitly."));
|
|
342
|
+
process.exit(1);
|
|
343
|
+
}
|
|
344
|
+
} catch (e) {
|
|
345
|
+
console.error(ERR(` ✗ Could not list data products: ${e.message}`));
|
|
346
|
+
process.exit(1);
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
console.log(DIM(` Auto-selected target: ${dpName} (${dpIdentifier})`));
|
|
350
|
+
resolvedTarget = dpIdentifier;
|
|
351
|
+
targetCols = await fetchEntityColumns(client, dpIdentifier, "data_product");
|
|
352
|
+
} else if (isIdentifier(target)) {
|
|
353
|
+
targetCols = await fetchEntityColumns(client, target.trim(), "data_product");
|
|
354
|
+
} else {
|
|
355
|
+
targetCols = target.split(",").map((c) => c.trim()).filter(Boolean);
|
|
356
|
+
}
|
|
357
|
+
if (!targetCols || targetCols.length === 0) {
|
|
358
|
+
console.error(ERR(` ✗ Could not resolve target columns from: ${resolvedTarget}`));
|
|
359
|
+
process.exit(1);
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
const result = await alignColumns(sourceCols, targetCols, { embedTexts, threshold });
|
|
363
|
+
|
|
364
|
+
if (opts.json) {
|
|
365
|
+
console.log(JSON.stringify(result, null, 2));
|
|
366
|
+
return;
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
// Formatted table output
|
|
370
|
+
const title = `Column Alignment: ${source} → ${resolvedTarget}`;
|
|
371
|
+
banner(title);
|
|
372
|
+
console.log();
|
|
373
|
+
|
|
374
|
+
if (result.mappings.length === 0) {
|
|
375
|
+
console.log(WARN(" No mappings found above threshold."));
|
|
376
|
+
} else {
|
|
377
|
+
const COL_W = 20, EXP_W = 24, MET_W = 12;
|
|
378
|
+
console.log(DIM(` ${"Source".padEnd(COL_W)} ${"Expected".padEnd(EXP_W)} ${"Method".padEnd(MET_W)} Confidence`));
|
|
379
|
+
console.log(DIM(" " + "─".repeat(COL_W + EXP_W + MET_W + 20)));
|
|
380
|
+
for (const m of result.mappings) {
|
|
381
|
+
const conf = `${Math.round(m.confidence * 100)}%`;
|
|
382
|
+
const methodColor = m.method === "exact" ? OK : m.method === "embedding" ? ACCENT : WARN;
|
|
383
|
+
console.log(
|
|
384
|
+
` ${m.sourceColumn.slice(0, COL_W - 1).padEnd(COL_W)} ${m.expectedColumn.slice(0, EXP_W - 1).padEnd(EXP_W)} ${methodColor(m.method.padEnd(MET_W))} ${conf}`,
|
|
385
|
+
);
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
if (result.unmappedSource.length > 0) {
|
|
390
|
+
console.log(WARN(`\n Unmapped source: ${result.unmappedSource.join(", ")}`));
|
|
391
|
+
}
|
|
392
|
+
if (result.unmappedExpected.length > 0) {
|
|
393
|
+
console.log(WARN(` Unmapped expected: ${result.unmappedExpected.join(", ")}`));
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
if (result.suggestedTransformation) {
|
|
397
|
+
console.log(ACCENT("\n Suggested rename_column step:"));
|
|
398
|
+
console.log(` ${JSON.stringify(result.suggestedTransformation.mapping, null, 2).replace(/\n/g, "\n ")}`);
|
|
399
|
+
}
|
|
400
|
+
console.log();
|
|
401
|
+
});
|
|
402
|
+
|
|
403
|
+
// ── run ───────────────────────────────────────────────────────────────────
|
|
404
|
+
foundation
|
|
405
|
+
.command("run")
|
|
406
|
+
.description("Open the Foundation UI in an Electron window")
|
|
407
|
+
.option("--url <url>", "Override the frontend URL")
|
|
408
|
+
.action(async (opts) => {
|
|
409
|
+
const { createRequire } = await import("node:module");
|
|
410
|
+
const { spawn } = await import("node:child_process");
|
|
411
|
+
const { readFileSync } = await import("node:fs");
|
|
412
|
+
const { findComposeRoot } = await import("./lib/tools-write.js");
|
|
413
|
+
|
|
414
|
+
// Resolve frontend URL: flag → env → .env file → default
|
|
415
|
+
let frontendUrl = opts.url || process.env.FOUNDATION_PUBLIC_URL;
|
|
416
|
+
if (!frontendUrl) {
|
|
417
|
+
const root = program._fopsRoot || findComposeRoot();
|
|
418
|
+
if (root) {
|
|
419
|
+
try {
|
|
420
|
+
const envContent = readFileSync(`${root}/.env`, "utf8");
|
|
421
|
+
const m = envContent.match(/^FOUNDATION_PUBLIC_URL=(.+)$/m);
|
|
422
|
+
if (m) frontendUrl = m[1].trim();
|
|
423
|
+
} catch { /* .env not found */ }
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
frontendUrl = frontendUrl || "http://127.0.0.1:3002";
|
|
427
|
+
|
|
428
|
+
// Resolve electron binary + fops root (via realpathSync to follow node_modules symlink)
|
|
429
|
+
const { writeFileSync, realpathSync } = await import("node:fs");
|
|
430
|
+
const { tmpdir } = await import("node:os");
|
|
431
|
+
const { join, resolve, dirname } = await import("node:path");
|
|
432
|
+
|
|
433
|
+
let electronBin;
|
|
434
|
+
let iconPath;
|
|
435
|
+
try {
|
|
436
|
+
const req = createRequire(import.meta.url);
|
|
437
|
+
electronBin = req("electron");
|
|
438
|
+
// Resolve real path of electron pkg to find the fops install root
|
|
439
|
+
const realElectronPkg = realpathSync(req.resolve("electron/package.json"));
|
|
440
|
+
const fopsRoot = resolve(dirname(realElectronPkg), "../..");
|
|
441
|
+
iconPath = join(fopsRoot, "src/electron/icon.png");
|
|
442
|
+
} catch {
|
|
443
|
+
console.error(ERR(" ✗ Electron is not installed. Run: npm install -g electron"));
|
|
444
|
+
process.exit(1);
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
// Write main script to a temp file — the plugin runs from ~/.fops/plugins/
|
|
448
|
+
// so relative path resolution from import.meta.url is not reliable
|
|
449
|
+
const mainScript = join(tmpdir(), "fops-electron-main.js");
|
|
450
|
+
writeFileSync(mainScript, `
|
|
451
|
+
const { app, BrowserWindow, nativeImage } = require("electron");
|
|
452
|
+
const url = process.env.FOUNDATION_URL || "http://127.0.0.1:3002";
|
|
453
|
+
const iconPath = process.env.FOUNDATION_ICON;
|
|
454
|
+
|
|
455
|
+
// Enable macOS platform authenticator (passkeys + TouchID)
|
|
456
|
+
app.commandLine.appendSwitch("enable-features", "WebAuthenticationTouchId,WebAuthenticationCable");
|
|
457
|
+
|
|
458
|
+
app.whenReady().then(() => {
|
|
459
|
+
if (iconPath) {
|
|
460
|
+
try {
|
|
461
|
+
const icon = nativeImage.createFromPath(iconPath);
|
|
462
|
+
if (process.platform === "darwin") app.dock.setIcon(icon);
|
|
463
|
+
} catch {}
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
const win = new BrowserWindow({
|
|
467
|
+
width: 1400, height: 900, title: "Foundation",
|
|
468
|
+
icon: iconPath || undefined,
|
|
469
|
+
webPreferences: {
|
|
470
|
+
nodeIntegration: false,
|
|
471
|
+
contextIsolation: true,
|
|
472
|
+
},
|
|
473
|
+
});
|
|
474
|
+
|
|
475
|
+
// Allow Google OAuth and other auth popups to open
|
|
476
|
+
win.webContents.setWindowOpenHandler(({ url: popupUrl }) => {
|
|
477
|
+
return { action: "allow" };
|
|
478
|
+
});
|
|
479
|
+
|
|
480
|
+
win.loadURL(url);
|
|
481
|
+
win.setMenuBarVisibility(false);
|
|
482
|
+
});
|
|
483
|
+
|
|
484
|
+
app.on("window-all-closed", () => app.quit());
|
|
485
|
+
`);
|
|
486
|
+
|
|
487
|
+
console.log(ACCENT(` Opening Foundation at ${frontendUrl}`));
|
|
488
|
+
|
|
489
|
+
const child = spawn(electronBin, [mainScript], {
|
|
490
|
+
stdio: "ignore",
|
|
491
|
+
env: { ...process.env, FOUNDATION_URL: frontendUrl, FOUNDATION_ICON: iconPath || "" },
|
|
492
|
+
detached: true,
|
|
493
|
+
});
|
|
494
|
+
child.unref();
|
|
495
|
+
});
|
|
257
496
|
});
|
|
258
497
|
|
|
259
498
|
// ─── Landscape cache state (used by tools + service) ────────────────────
|
|
@@ -479,6 +718,7 @@ foundation_entity is only for: SQL query action, or if foundation_graphql return
|
|
|
479
718
|
"foundation_iam",
|
|
480
719
|
"foundation_iam_manage",
|
|
481
720
|
"foundation_graphql",
|
|
721
|
+
"foundation_align",
|
|
482
722
|
"memory",
|
|
483
723
|
],
|
|
484
724
|
maxIterations: resolveFoundationMaxIterations(),
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Column alignment: maps source CSV column names to expected schema column names.
|
|
3
|
+
* Three-pass: exact → embedding (if model available) → Levenshtein ratio fallback.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
export function normalizeColName(s) {
|
|
7
|
+
return String(s || "")
|
|
8
|
+
.toLowerCase()
|
|
9
|
+
.replace(/\s+/g, "_")
|
|
10
|
+
.replace(/[^a-z0-9_]/g, "");
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function cosineSimilarity(a, b) {
|
|
14
|
+
let dot = 0, na = 0, nb = 0;
|
|
15
|
+
for (let i = 0; i < a.length; i++) {
|
|
16
|
+
dot += a[i] * b[i];
|
|
17
|
+
na += a[i] * a[i];
|
|
18
|
+
nb += b[i] * b[i];
|
|
19
|
+
}
|
|
20
|
+
const denom = Math.sqrt(na) * Math.sqrt(nb);
|
|
21
|
+
return denom === 0 ? 0 : dot / denom;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function levenshteinRatio(a, b) {
|
|
25
|
+
const x = String(a || "");
|
|
26
|
+
const y = String(b || "");
|
|
27
|
+
if (x === y) return 1;
|
|
28
|
+
const m = x.length, n = y.length;
|
|
29
|
+
if (m === 0 || n === 0) return 0;
|
|
30
|
+
const dp = Array.from({ length: m + 1 }, () => new Array(n + 1).fill(0));
|
|
31
|
+
for (let i = 0; i <= m; i++) dp[i][0] = i;
|
|
32
|
+
for (let j = 0; j <= n; j++) dp[0][j] = j;
|
|
33
|
+
for (let i = 1; i <= m; i++) {
|
|
34
|
+
for (let j = 1; j <= n; j++) {
|
|
35
|
+
const cost = x[i - 1] === y[j - 1] ? 0 : 1;
|
|
36
|
+
dp[i][j] = Math.min(dp[i - 1][j] + 1, dp[i][j - 1] + 1, dp[i - 1][j - 1] + cost);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
return 1 - dp[m][n] / Math.max(m, n);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Align source column names to expected column names using three passes.
|
|
44
|
+
*
|
|
45
|
+
* @param {string[]} sourceColumns
|
|
46
|
+
* @param {string[]} expectedColumns
|
|
47
|
+
* @param {{ embedTexts?: Function, threshold?: number }} opts
|
|
48
|
+
* @returns {Promise<{ mappings, unmappedSource, unmappedExpected, suggestedTransformation? }>}
|
|
49
|
+
*/
|
|
50
|
+
export async function alignColumns(sourceColumns, expectedColumns, { embedTexts, threshold = 0.7 } = {}) {
|
|
51
|
+
const mappings = [];
|
|
52
|
+
const unmappedSrc = new Set(sourceColumns);
|
|
53
|
+
const unmappedExp = new Set(expectedColumns);
|
|
54
|
+
|
|
55
|
+
// Pass 1 — Exact (normalized)
|
|
56
|
+
for (const src of sourceColumns) {
|
|
57
|
+
const normSrc = normalizeColName(src);
|
|
58
|
+
for (const exp of [...unmappedExp]) {
|
|
59
|
+
if (normalizeColName(exp) === normSrc) {
|
|
60
|
+
mappings.push({ sourceColumn: src, expectedColumn: exp, confidence: 1.0, method: "exact" });
|
|
61
|
+
unmappedSrc.delete(src);
|
|
62
|
+
unmappedExp.delete(exp);
|
|
63
|
+
break;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// Pass 2 — Embedding (if embedTexts provided)
|
|
69
|
+
if (typeof embedTexts === "function" && unmappedSrc.size > 0 && unmappedExp.size > 0) {
|
|
70
|
+
try {
|
|
71
|
+
const srcList = [...unmappedSrc];
|
|
72
|
+
const expList = [...unmappedExp];
|
|
73
|
+
const toHuman = (s) => normalizeColName(s).replace(/_/g, " ");
|
|
74
|
+
const srcTexts = srcList.map(toHuman);
|
|
75
|
+
const expTexts = expList.map(toHuman);
|
|
76
|
+
|
|
77
|
+
const allVecs = await embedTexts([...srcTexts, ...expTexts]);
|
|
78
|
+
if (allVecs && allVecs.length === srcTexts.length + expTexts.length) {
|
|
79
|
+
const srcVecs = allVecs.slice(0, srcTexts.length);
|
|
80
|
+
const expVecs = allVecs.slice(srcTexts.length);
|
|
81
|
+
|
|
82
|
+
// Build score matrix and collect all pairs above threshold
|
|
83
|
+
const candidates = [];
|
|
84
|
+
for (let i = 0; i < srcList.length; i++) {
|
|
85
|
+
for (let j = 0; j < expList.length; j++) {
|
|
86
|
+
const score = cosineSimilarity(srcVecs[i], expVecs[j]);
|
|
87
|
+
if (score >= threshold) candidates.push({ i, j, score });
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
// Greedy assign highest-confidence pairs first
|
|
91
|
+
candidates.sort((a, b) => b.score - a.score);
|
|
92
|
+
const usedSrc = new Set(), usedExp = new Set();
|
|
93
|
+
for (const { i, j, score } of candidates) {
|
|
94
|
+
if (usedSrc.has(i) || usedExp.has(j)) continue;
|
|
95
|
+
usedSrc.add(i);
|
|
96
|
+
usedExp.add(j);
|
|
97
|
+
mappings.push({ sourceColumn: srcList[i], expectedColumn: expList[j], confidence: score, method: "embedding" });
|
|
98
|
+
unmappedSrc.delete(srcList[i]);
|
|
99
|
+
unmappedExp.delete(expList[j]);
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
} catch { /* model not ready — fall through to pass 3 */ }
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// Pass 3 — Levenshtein ratio fallback
|
|
106
|
+
if (unmappedSrc.size > 0 && unmappedExp.size > 0) {
|
|
107
|
+
const srcList = [...unmappedSrc];
|
|
108
|
+
const expList = [...unmappedExp];
|
|
109
|
+
const candidates = [];
|
|
110
|
+
for (const src of srcList) {
|
|
111
|
+
for (const exp of expList) {
|
|
112
|
+
const score = levenshteinRatio(normalizeColName(src), normalizeColName(exp));
|
|
113
|
+
if (score >= threshold) candidates.push({ src, exp, score });
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
candidates.sort((a, b) => b.score - a.score);
|
|
117
|
+
const usedSrc = new Set(), usedExp = new Set();
|
|
118
|
+
for (const { src, exp, score } of candidates) {
|
|
119
|
+
if (usedSrc.has(src) || usedExp.has(exp)) continue;
|
|
120
|
+
usedSrc.add(src);
|
|
121
|
+
usedExp.add(exp);
|
|
122
|
+
mappings.push({ sourceColumn: src, expectedColumn: exp, confidence: score, method: "levenshtein" });
|
|
123
|
+
unmappedSrc.delete(src);
|
|
124
|
+
unmappedExp.delete(exp);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
const result = {
|
|
129
|
+
mappings,
|
|
130
|
+
unmappedSource: [...unmappedSrc],
|
|
131
|
+
unmappedExpected: [...unmappedExp],
|
|
132
|
+
};
|
|
133
|
+
|
|
134
|
+
if (mappings.length > 0) {
|
|
135
|
+
const renameMapping = {};
|
|
136
|
+
for (const { sourceColumn, expectedColumn } of mappings) {
|
|
137
|
+
if (sourceColumn !== expectedColumn) renameMapping[sourceColumn] = expectedColumn;
|
|
138
|
+
}
|
|
139
|
+
if (Object.keys(renameMapping).length > 0) {
|
|
140
|
+
result.suggestedTransformation = { transform: "rename_column", mapping: renameMapping };
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
return result;
|
|
145
|
+
}
|
|
@@ -5,6 +5,8 @@ import path from "node:path";
|
|
|
5
5
|
|
|
6
6
|
const DEFAULT_BASE_URL = "http://127.0.0.1:9001/api";
|
|
7
7
|
const TOKEN_TTL_MS = 30 * 60 * 1000; // 30 minutes
|
|
8
|
+
const RETRY_COUNT = 3;
|
|
9
|
+
const RETRY_DELAY_MS = 10_000;
|
|
8
10
|
|
|
9
11
|
/** Resolve API base URL: config.apiUrl, FOUNDATION_API_URL, or default. Ensures path ends with /api. */
|
|
10
12
|
function resolveBaseUrl(config) {
|
|
@@ -18,6 +20,18 @@ function resolveBaseUrl(config) {
|
|
|
18
20
|
return url.endsWith("/api") ? url : `${url}/api`;
|
|
19
21
|
}
|
|
20
22
|
|
|
23
|
+
/** Resolve x-org header value: config.orgId, ORG_ID env, ORG_NAME env, root .env, or "root". */
|
|
24
|
+
function resolveOrgId(config, rootEnv = {}) {
|
|
25
|
+
return (
|
|
26
|
+
config?.orgId?.trim() ||
|
|
27
|
+
process.env.ORG_ID?.trim() ||
|
|
28
|
+
rootEnv.ORG_ID?.trim() ||
|
|
29
|
+
process.env.ORG_NAME?.trim() ||
|
|
30
|
+
rootEnv.ORG_NAME?.trim() ||
|
|
31
|
+
"root"
|
|
32
|
+
);
|
|
33
|
+
}
|
|
34
|
+
|
|
21
35
|
function isValidJwt(token) {
|
|
22
36
|
if (!token) return false;
|
|
23
37
|
const parts = token.split(".");
|
|
@@ -47,7 +61,6 @@ function request(method, url, headers = {}, body, timeoutMs = 15_000) {
|
|
|
47
61
|
method,
|
|
48
62
|
headers: {
|
|
49
63
|
"Content-Type": "application/json",
|
|
50
|
-
"x-org": "root",
|
|
51
64
|
...headers,
|
|
52
65
|
},
|
|
53
66
|
timeout: timeoutMs,
|
|
@@ -73,6 +86,28 @@ function request(method, url, headers = {}, body, timeoutMs = 15_000) {
|
|
|
73
86
|
});
|
|
74
87
|
}
|
|
75
88
|
|
|
89
|
+
function sleep(ms) {
|
|
90
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/** Like request() but retries on network errors and 5xx responses. */
|
|
94
|
+
async function requestWithRetries(method, url, headers, body, timeoutMs, maxRetries = RETRY_COUNT, delayMs = RETRY_DELAY_MS) {
|
|
95
|
+
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
96
|
+
let res;
|
|
97
|
+
try {
|
|
98
|
+
res = await request(method, url, headers, body, timeoutMs);
|
|
99
|
+
} catch (e) {
|
|
100
|
+
if (attempt < maxRetries) { await sleep(delayMs); continue; }
|
|
101
|
+
throw new Error(`Request failed after ${maxRetries} attempts: ${e.message}`);
|
|
102
|
+
}
|
|
103
|
+
if (res.status >= 500 && attempt < maxRetries) {
|
|
104
|
+
await sleep(delayMs);
|
|
105
|
+
continue;
|
|
106
|
+
}
|
|
107
|
+
return res;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
76
111
|
/**
|
|
77
112
|
* Like request() but returns response headers, does NOT follow redirects,
|
|
78
113
|
* and supports both JSON and url-encoded bodies.
|
|
@@ -251,6 +286,7 @@ export class FoundationClient {
|
|
|
251
286
|
constructor(config = {}, opts = {}) {
|
|
252
287
|
this._config = { ...config };
|
|
253
288
|
this._baseUrl = resolveBaseUrl(config);
|
|
289
|
+
this._org = resolveOrgId(config, loadRootProjectEnv());
|
|
254
290
|
this._getCredentials = opts.getCredentials || null;
|
|
255
291
|
this._token = null;
|
|
256
292
|
this._tokenExpiry = 0;
|
|
@@ -501,7 +537,7 @@ export class FoundationClient {
|
|
|
501
537
|
*/
|
|
502
538
|
async _request(method, apiPath, body) {
|
|
503
539
|
const url = apiPath.startsWith("http") ? apiPath : `${this._baseUrl}${apiPath}`;
|
|
504
|
-
const headers = {};
|
|
540
|
+
const headers = { "x-org": this._org };
|
|
505
541
|
|
|
506
542
|
await this.ensureAuth();
|
|
507
543
|
headers.Authorization = `Bearer ${this._token}`;
|
|
@@ -526,7 +562,7 @@ export class FoundationClient {
|
|
|
526
562
|
headers["CF-Access-Client-Secret"] = cfSecret;
|
|
527
563
|
}
|
|
528
564
|
|
|
529
|
-
let res = await
|
|
565
|
+
let res = await requestWithRetries(method, url, headers, body, this._timeoutMs);
|
|
530
566
|
|
|
531
567
|
if (res.status === 401) {
|
|
532
568
|
this._token = null;
|
|
@@ -537,7 +573,7 @@ export class FoundationClient {
|
|
|
537
573
|
headers.Authorization = `Bearer ${this._token}`;
|
|
538
574
|
if (this._cfJwt) headers.Cookie = `CF_Authorization=${this._cfJwt}`;
|
|
539
575
|
else delete headers.Cookie;
|
|
540
|
-
res = await
|
|
576
|
+
res = await requestWithRetries(method, url, headers, body, this._timeoutMs);
|
|
541
577
|
}
|
|
542
578
|
|
|
543
579
|
if (res.status >= 400) {
|
|
@@ -2440,6 +2440,52 @@ export function registerWriteTools(api, client) {
|
|
|
2440
2440
|
return `Unknown scope: ${raw.scope}`;
|
|
2441
2441
|
},
|
|
2442
2442
|
},
|
|
2443
|
+
// foundation_align
|
|
2444
|
+
{
|
|
2445
|
+
name: "foundation_align",
|
|
2446
|
+
description: "Align source CSV column names to expected schema column names using exact, embedding, and Levenshtein matching. Returns mappings and a ready-to-use rename_column transformation.",
|
|
2447
|
+
inputSchema: {
|
|
2448
|
+
type: "object",
|
|
2449
|
+
properties: {
|
|
2450
|
+
source_columns: { type: "array", items: { type: "string" }, description: "Raw source column names (e.g. from a CSV header)" },
|
|
2451
|
+
expected_columns: { type: "array", items: { type: "string" }, description: "Target schema column names (e.g. from a data product schema)" },
|
|
2452
|
+
source_identifier: { type: "string", description: "Entity identifier to fetch source columns from (data_object or data_product)" },
|
|
2453
|
+
target_identifier: { type: "string", description: "Entity identifier to fetch expected columns from (data_object or data_product)" },
|
|
2454
|
+
threshold: { type: "number", description: "Minimum similarity score 0–1 (default 0.7)" },
|
|
2455
|
+
},
|
|
2456
|
+
},
|
|
2457
|
+
async execute(input) {
|
|
2458
|
+
const { alignColumns } = await import("./align.js");
|
|
2459
|
+
let sourceCols = input.source_columns;
|
|
2460
|
+
let expectedCols = input.expected_columns;
|
|
2461
|
+
|
|
2462
|
+
if (!sourceCols && input.source_identifier) {
|
|
2463
|
+
const cols = await fetchEntityColumns(client, input.source_identifier, "data_object");
|
|
2464
|
+
if (!cols) return `Error: could not fetch columns for source identifier "${input.source_identifier}"`;
|
|
2465
|
+
sourceCols = cols;
|
|
2466
|
+
}
|
|
2467
|
+
if (!expectedCols && input.target_identifier) {
|
|
2468
|
+
const cols = await fetchEntityColumns(client, input.target_identifier, "data_product");
|
|
2469
|
+
if (!cols) return `Error: could not fetch columns for target identifier "${input.target_identifier}"`;
|
|
2470
|
+
expectedCols = cols;
|
|
2471
|
+
}
|
|
2472
|
+
|
|
2473
|
+
if (!sourceCols || sourceCols.length === 0) return "Error: source_columns or source_identifier required";
|
|
2474
|
+
if (!expectedCols || expectedCols.length === 0) return "Error: expected_columns or target_identifier required";
|
|
2475
|
+
|
|
2476
|
+
let embedTexts;
|
|
2477
|
+
try {
|
|
2478
|
+
const embSvc = typeof api.getService === "function" && api.getService("embeddings:search");
|
|
2479
|
+
if (embSvc?.embedTexts) embedTexts = embSvc.embedTexts.bind(embSvc);
|
|
2480
|
+
} catch { /* embeddings not available */ }
|
|
2481
|
+
|
|
2482
|
+
const result = await alignColumns(sourceCols, expectedCols, {
|
|
2483
|
+
embedTexts,
|
|
2484
|
+
threshold: typeof input.threshold === "number" ? input.threshold : 0.7,
|
|
2485
|
+
});
|
|
2486
|
+
return fmt(result);
|
|
2487
|
+
},
|
|
2488
|
+
},
|
|
2443
2489
|
];
|
|
2444
2490
|
|
|
2445
2491
|
return tools;
|
package/src/ui/tui/App.js
CHANGED
|
@@ -2904,12 +2904,17 @@ export async function launchTui(root, registry, opts = {}) {
|
|
|
2904
2904
|
}
|
|
2905
2905
|
}
|
|
2906
2906
|
};
|
|
2907
|
+
// Catch-all for mouse-move position reports that some Windows/ConPTY
|
|
2908
|
+
// terminals emit in non-standard formats (e.g. CSI with lowercase 'm',
|
|
2909
|
+
// extra semicolons, or raw coordinate bytes after CSI).
|
|
2910
|
+
const CATCHALL_MOUSE_RE = /\x1b\[<?\d+;\d+;\d+[mM]/g;
|
|
2907
2911
|
const stripMouseSequences = (str) => {
|
|
2908
2912
|
// Strip all complete mouse sequences, returning any remaining text
|
|
2909
2913
|
return str
|
|
2910
2914
|
.replace(SGR_MOUSE_RE, "")
|
|
2911
2915
|
.replace(CSI_MOUSE_RE, "")
|
|
2912
|
-
.replace(LEGACY_MOUSE_RE, "")
|
|
2916
|
+
.replace(LEGACY_MOUSE_RE, "")
|
|
2917
|
+
.replace(CATCHALL_MOUSE_RE, "");
|
|
2913
2918
|
};
|
|
2914
2919
|
const origStdinEmit = process.stdin.emit.bind(process.stdin);
|
|
2915
2920
|
process.stdin.emit = function (event, ...args) {
|
|
@@ -3118,10 +3123,13 @@ export async function launchTui(root, registry, opts = {}) {
|
|
|
3118
3123
|
// small escape sequences during idle).
|
|
3119
3124
|
// \x1b[?1000h = click/wheel tracking (basic compatibility; helps on some Windows terminals)
|
|
3120
3125
|
// \x1b[?1002h = button-event tracking (press, release, wheel)
|
|
3121
|
-
// \x1b[?1003h = any-event tracking (fallback when 1002 is unreliable)
|
|
3122
3126
|
// \x1b[?1006h = SGR extended mode (scroll wheel as button 64/65)
|
|
3123
|
-
//
|
|
3124
|
-
|
|
3127
|
+
// NOTE: ?1003h (any-event/move tracking) deliberately omitted — it floods
|
|
3128
|
+
// stdin with mouse-position reports on every cursor move. Windows SSH
|
|
3129
|
+
// terminals (Windows Terminal, PuTTY, ConPTY) often send these in formats
|
|
3130
|
+
// the strip regex cannot match, causing raw escape codes to appear in the
|
|
3131
|
+
// input box. ?1015h (URXVT decimal mode) also omitted for similar reasons.
|
|
3132
|
+
const MOUSE_ENABLE = "\x1b[?1000h\x1b[?1002h\x1b[?1006h";
|
|
3125
3133
|
origStdoutWrite(MOUSE_ENABLE);
|
|
3126
3134
|
// Re-assert mouse tracking every 5s in case a render or resize resets modes
|
|
3127
3135
|
const mouseTimerId = setInterval(() => origStdoutWrite(MOUSE_ENABLE), 5000);
|
|
@@ -3155,7 +3163,7 @@ export async function launchTui(root, registry, opts = {}) {
|
|
|
3155
3163
|
// Clear the full-screen content, restore terminal state
|
|
3156
3164
|
process.stdout.write(
|
|
3157
3165
|
"\x1b[2J\x1b[H" // clear screen, cursor to 1,1
|
|
3158
|
-
+ "\x1b[?1000l\x1b[?1002l\x1b[?1003l\x1b[?1006l\x1b[?1015l" // disable all mouse tracking modes
|
|
3166
|
+
+ "\x1b[?1000l\x1b[?1002l\x1b[?1003l\x1b[?1006l\x1b[?1015l" // disable all mouse tracking modes (include 1003/1015 in case previously enabled)
|
|
3159
3167
|
+ "\x1b[?25h" // show cursor
|
|
3160
3168
|
+ "\x1b[0m" // reset attributes
|
|
3161
3169
|
);
|