tova 0.1.1 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +2 -0
- package/bin/tova.js +811 -154
- package/package.json +8 -2
- package/src/analyzer/analyzer.js +297 -58
- package/src/analyzer/scope.js +38 -1
- package/src/analyzer/type-registry.js +72 -0
- package/src/analyzer/types.js +478 -0
- package/src/codegen/base-codegen.js +371 -0
- package/src/codegen/client-codegen.js +62 -10
- package/src/codegen/codegen.js +111 -2
- package/src/codegen/server-codegen.js +175 -3
- package/src/config/edit-toml.js +100 -0
- package/src/config/package-json.js +52 -0
- package/src/config/resolve.js +100 -0
- package/src/config/toml.js +209 -0
- package/src/lexer/lexer.js +2 -2
- package/src/lsp/server.js +284 -30
- package/src/parser/ast.js +105 -0
- package/src/parser/parser.js +202 -2
- package/src/runtime/ai.js +305 -0
- package/src/runtime/devtools.js +228 -0
- package/src/runtime/embedded.js +3 -1
- package/src/runtime/io.js +240 -0
- package/src/runtime/reactivity.js +264 -19
- package/src/runtime/ssr.js +196 -24
- package/src/runtime/table.js +522 -0
- package/src/stdlib/collections.js +245 -0
- package/src/stdlib/core.js +87 -0
- package/src/stdlib/datetime.js +88 -0
- package/src/stdlib/encoding.js +35 -0
- package/src/stdlib/functional.js +82 -0
- package/src/stdlib/inline.js +334 -67
- package/src/stdlib/math.js +93 -0
- package/src/stdlib/string.js +95 -0
- package/src/stdlib/url.js +33 -0
- package/src/stdlib/validation.js +29 -0
package/src/parser/ast.js
CHANGED
|
@@ -1133,3 +1133,108 @@ export class TupleTypeAnnotation {
|
|
|
1133
1133
|
this.loc = loc;
|
|
1134
1134
|
}
|
|
1135
1135
|
}
|
|
1136
|
+
|
|
1137
|
+
// ============================================================
|
|
1138
|
+
// Column expressions (for table operations)
|
|
1139
|
+
// ============================================================
|
|
1140
|
+
|
|
1141
|
+
export class ColumnExpression {
|
|
1142
|
+
constructor(name, loc) {
|
|
1143
|
+
this.type = 'ColumnExpression';
|
|
1144
|
+
this.name = name; // column name, e.g. "age" for .age
|
|
1145
|
+
this.loc = loc;
|
|
1146
|
+
}
|
|
1147
|
+
}
|
|
1148
|
+
|
|
1149
|
+
export class ColumnAssignment {
|
|
1150
|
+
constructor(target, expression, loc) {
|
|
1151
|
+
this.type = 'ColumnAssignment';
|
|
1152
|
+
this.target = target; // column name to assign to
|
|
1153
|
+
this.expression = expression; // expression computing the value
|
|
1154
|
+
this.loc = loc;
|
|
1155
|
+
}
|
|
1156
|
+
}
|
|
1157
|
+
|
|
1158
|
+
export class NegatedColumnExpression {
|
|
1159
|
+
constructor(name, loc) {
|
|
1160
|
+
this.type = 'NegatedColumnExpression';
|
|
1161
|
+
this.name = name; // column name for exclusion, e.g. "password" for -.password
|
|
1162
|
+
this.loc = loc;
|
|
1163
|
+
}
|
|
1164
|
+
}
|
|
1165
|
+
|
|
1166
|
+
// ============================================================
|
|
1167
|
+
// Data block nodes
|
|
1168
|
+
// ============================================================
|
|
1169
|
+
|
|
1170
|
+
export class DataBlock {
|
|
1171
|
+
constructor(body, loc) {
|
|
1172
|
+
this.type = 'DataBlock';
|
|
1173
|
+
this.body = body; // Array of SourceDeclaration, PipelineDeclaration, ValidateBlock, RefreshPolicy
|
|
1174
|
+
this.loc = loc;
|
|
1175
|
+
}
|
|
1176
|
+
}
|
|
1177
|
+
|
|
1178
|
+
export class SourceDeclaration {
|
|
1179
|
+
constructor(name, typeAnnotation, expression, loc) {
|
|
1180
|
+
this.type = 'SourceDeclaration';
|
|
1181
|
+
this.name = name;
|
|
1182
|
+
this.typeAnnotation = typeAnnotation;
|
|
1183
|
+
this.expression = expression;
|
|
1184
|
+
this.loc = loc;
|
|
1185
|
+
}
|
|
1186
|
+
}
|
|
1187
|
+
|
|
1188
|
+
export class PipelineDeclaration {
|
|
1189
|
+
constructor(name, expression, loc) {
|
|
1190
|
+
this.type = 'PipelineDeclaration';
|
|
1191
|
+
this.name = name;
|
|
1192
|
+
this.expression = expression;
|
|
1193
|
+
this.loc = loc;
|
|
1194
|
+
}
|
|
1195
|
+
}
|
|
1196
|
+
|
|
1197
|
+
export class ValidateBlock {
|
|
1198
|
+
constructor(typeName, rules, loc) {
|
|
1199
|
+
this.type = 'ValidateBlock';
|
|
1200
|
+
this.typeName = typeName;
|
|
1201
|
+
this.rules = rules; // Array of expression predicates
|
|
1202
|
+
this.loc = loc;
|
|
1203
|
+
}
|
|
1204
|
+
}
|
|
1205
|
+
|
|
1206
|
+
export class RefreshPolicy {
|
|
1207
|
+
constructor(sourceName, interval, loc) {
|
|
1208
|
+
this.type = 'RefreshPolicy';
|
|
1209
|
+
this.sourceName = sourceName;
|
|
1210
|
+
this.interval = interval; // { value, unit } or "on_demand"
|
|
1211
|
+
this.loc = loc;
|
|
1212
|
+
}
|
|
1213
|
+
}
|
|
1214
|
+
|
|
1215
|
+
// ============================================================
|
|
1216
|
+
// AI configuration
|
|
1217
|
+
// ============================================================
|
|
1218
|
+
|
|
1219
|
+
export class AiConfigDeclaration {
|
|
1220
|
+
constructor(name, config, loc) {
|
|
1221
|
+
this.type = 'AiConfigDeclaration';
|
|
1222
|
+
this.name = name; // optional string name (null for default)
|
|
1223
|
+
this.config = config; // key-value config object
|
|
1224
|
+
this.loc = loc;
|
|
1225
|
+
}
|
|
1226
|
+
}
|
|
1227
|
+
|
|
1228
|
+
// ============================================================
|
|
1229
|
+
// Refinement types
|
|
1230
|
+
// ============================================================
|
|
1231
|
+
|
|
1232
|
+
export class RefinementType {
|
|
1233
|
+
constructor(name, baseType, predicate, loc) {
|
|
1234
|
+
this.type = 'RefinementType';
|
|
1235
|
+
this.name = name;
|
|
1236
|
+
this.baseType = baseType; // TypeAnnotation
|
|
1237
|
+
this.predicate = predicate; // Expression (body of where block, uses 'it')
|
|
1238
|
+
this.loc = loc;
|
|
1239
|
+
}
|
|
1240
|
+
}
|
package/src/parser/parser.js
CHANGED
|
@@ -189,6 +189,10 @@ export class Parser {
|
|
|
189
189
|
if (this.check(TokenType.CLIENT)) return this.parseClientBlock();
|
|
190
190
|
if (this.check(TokenType.SHARED)) return this.parseSharedBlock();
|
|
191
191
|
if (this.check(TokenType.IMPORT)) return this.parseImport();
|
|
192
|
+
// data block: data { ... }
|
|
193
|
+
if (this.check(TokenType.IDENTIFIER) && this.current().value === 'data' && this.peek(1).type === TokenType.LBRACE) {
|
|
194
|
+
return this.parseDataBlock();
|
|
195
|
+
}
|
|
192
196
|
// test block: test "name" { ... } or test { ... }
|
|
193
197
|
if (this.check(TokenType.IDENTIFIER) && this.current().value === 'test') {
|
|
194
198
|
const next = this.peek(1);
|
|
@@ -292,6 +296,124 @@ export class Parser {
|
|
|
292
296
|
return new AST.SharedBlock(body, l, name);
|
|
293
297
|
}
|
|
294
298
|
|
|
299
|
+
// ─── Data block ────────────────────────────────────────────
|
|
300
|
+
|
|
301
|
+
parseDataBlock() {
|
|
302
|
+
const l = this.loc();
|
|
303
|
+
this.advance(); // consume 'data'
|
|
304
|
+
this.expect(TokenType.LBRACE, "Expected '{' after 'data'");
|
|
305
|
+
const body = [];
|
|
306
|
+
while (!this.check(TokenType.RBRACE) && !this.isAtEnd()) {
|
|
307
|
+
try {
|
|
308
|
+
const stmt = this.parseDataStatement();
|
|
309
|
+
if (stmt) body.push(stmt);
|
|
310
|
+
} catch (e) {
|
|
311
|
+
this.errors.push(e);
|
|
312
|
+
this._synchronizeBlock();
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
this.expect(TokenType.RBRACE, "Expected '}' to close data block");
|
|
316
|
+
return new AST.DataBlock(body, l);
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
parseDataStatement() {
|
|
320
|
+
if (!this.check(TokenType.IDENTIFIER)) {
|
|
321
|
+
return this.parseStatement();
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
const val = this.current().value;
|
|
325
|
+
|
|
326
|
+
// source customers: Table<Customer> = read("customers.csv")
|
|
327
|
+
if (val === 'source') {
|
|
328
|
+
return this.parseSourceDeclaration();
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
// pipeline clean_customers = customers |> where(...)
|
|
332
|
+
if (val === 'pipeline') {
|
|
333
|
+
return this.parsePipelineDeclaration();
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
// validate Customer { .email |> contains("@"), ... }
|
|
337
|
+
if (val === 'validate') {
|
|
338
|
+
return this.parseValidateBlock();
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
// refresh customers every 15.minutes
|
|
342
|
+
// refresh orders on_demand
|
|
343
|
+
if (val === 'refresh') {
|
|
344
|
+
return this.parseRefreshPolicy();
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
return this.parseStatement();
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
parseSourceDeclaration() {
|
|
351
|
+
const l = this.loc();
|
|
352
|
+
this.advance(); // consume 'source'
|
|
353
|
+
const name = this.expect(TokenType.IDENTIFIER, "Expected source name").value;
|
|
354
|
+
|
|
355
|
+
// Optional type annotation: source customers: Table<Customer>
|
|
356
|
+
let typeAnnotation = null;
|
|
357
|
+
if (this.match(TokenType.COLON)) {
|
|
358
|
+
typeAnnotation = this.parseTypeAnnotation();
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
this.expect(TokenType.ASSIGN, "Expected '=' after source name");
|
|
362
|
+
const expression = this.parseExpression();
|
|
363
|
+
|
|
364
|
+
return new AST.SourceDeclaration(name, typeAnnotation, expression, l);
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
parsePipelineDeclaration() {
|
|
368
|
+
const l = this.loc();
|
|
369
|
+
this.advance(); // consume 'pipeline'
|
|
370
|
+
const name = this.expect(TokenType.IDENTIFIER, "Expected pipeline name").value;
|
|
371
|
+
this.expect(TokenType.ASSIGN, "Expected '=' after pipeline name");
|
|
372
|
+
const expression = this.parseExpression();
|
|
373
|
+
return new AST.PipelineDeclaration(name, expression, l);
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
parseValidateBlock() {
|
|
377
|
+
const l = this.loc();
|
|
378
|
+
this.advance(); // consume 'validate'
|
|
379
|
+
const typeName = this.expect(TokenType.IDENTIFIER, "Expected type name after 'validate'").value;
|
|
380
|
+
this.expect(TokenType.LBRACE, "Expected '{' after validate type name");
|
|
381
|
+
|
|
382
|
+
const rules = [];
|
|
383
|
+
while (!this.check(TokenType.RBRACE) && !this.isAtEnd()) {
|
|
384
|
+
const rule = this.parseExpression();
|
|
385
|
+
rules.push(rule);
|
|
386
|
+
this.match(TokenType.COMMA); // optional comma separator
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
this.expect(TokenType.RBRACE, "Expected '}' to close validate block");
|
|
390
|
+
return new AST.ValidateBlock(typeName, rules, l);
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
parseRefreshPolicy() {
|
|
394
|
+
const l = this.loc();
|
|
395
|
+
this.advance(); // consume 'refresh'
|
|
396
|
+
const sourceName = this.expect(TokenType.IDENTIFIER, "Expected source name after 'refresh'").value;
|
|
397
|
+
|
|
398
|
+
// refresh X every N.unit OR refresh X on_demand
|
|
399
|
+
if (this.check(TokenType.IDENTIFIER) && this.current().value === 'on_demand') {
|
|
400
|
+
this.advance();
|
|
401
|
+
return new AST.RefreshPolicy(sourceName, 'on_demand', l);
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
// expect 'every'
|
|
405
|
+
if (this.check(TokenType.IDENTIFIER) && this.current().value === 'every') {
|
|
406
|
+
this.advance(); // consume 'every'
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
// Parse interval: N.unit (e.g., 15.minutes, 1.hour)
|
|
410
|
+
const value = this.expect(TokenType.NUMBER, "Expected interval value").value;
|
|
411
|
+
this.expect(TokenType.DOT, "Expected '.' after interval value");
|
|
412
|
+
const unit = this.expect(TokenType.IDENTIFIER, "Expected time unit (minutes, hours, seconds)").value;
|
|
413
|
+
|
|
414
|
+
return new AST.RefreshPolicy(sourceName, { value, unit }, l);
|
|
415
|
+
}
|
|
416
|
+
|
|
295
417
|
// ─── Server-specific statements ───────────────────────────
|
|
296
418
|
|
|
297
419
|
parseServerStatement() {
|
|
@@ -375,6 +497,10 @@ export class Parser {
|
|
|
375
497
|
if (val === 'model' && this.peek(1).type === TokenType.IDENTIFIER) {
|
|
376
498
|
return this.parseModelDeclaration();
|
|
377
499
|
}
|
|
500
|
+
// ai { ... } or ai "name" { ... }
|
|
501
|
+
if (val === 'ai' && (this.peek(1).type === TokenType.LBRACE || this.peek(1).type === TokenType.STRING)) {
|
|
502
|
+
return this.parseAiConfig();
|
|
503
|
+
}
|
|
378
504
|
}
|
|
379
505
|
|
|
380
506
|
return this.parseStatement();
|
|
@@ -649,6 +775,29 @@ export class Parser {
|
|
|
649
775
|
return new AST.SessionDeclaration(config, l);
|
|
650
776
|
}
|
|
651
777
|
|
|
778
|
+
parseAiConfig() {
|
|
779
|
+
const l = this.loc();
|
|
780
|
+
this.advance(); // consume 'ai'
|
|
781
|
+
|
|
782
|
+
// Optional name: ai "claude" { ... }
|
|
783
|
+
let name = null;
|
|
784
|
+
if (this.check(TokenType.STRING)) {
|
|
785
|
+
name = this.advance().value;
|
|
786
|
+
}
|
|
787
|
+
|
|
788
|
+
this.expect(TokenType.LBRACE, "Expected '{' after 'ai'");
|
|
789
|
+
const config = {};
|
|
790
|
+
while (!this.check(TokenType.RBRACE) && !this.isAtEnd()) {
|
|
791
|
+
const key = this.expect(TokenType.IDENTIFIER, "Expected ai config key").value;
|
|
792
|
+
this.expect(TokenType.COLON, "Expected ':' after ai config key");
|
|
793
|
+
const value = this.parseExpression();
|
|
794
|
+
config[key] = value;
|
|
795
|
+
this.match(TokenType.COMMA);
|
|
796
|
+
}
|
|
797
|
+
this.expect(TokenType.RBRACE, "Expected '}' to close ai config");
|
|
798
|
+
return new AST.AiConfigDeclaration(name, config, l);
|
|
799
|
+
}
|
|
800
|
+
|
|
652
801
|
parseDbConfig() {
|
|
653
802
|
const l = this.loc();
|
|
654
803
|
this.advance(); // consume 'db'
|
|
@@ -1514,8 +1663,32 @@ export class Parser {
|
|
|
1514
1663
|
}
|
|
1515
1664
|
|
|
1516
1665
|
// Type alias: type Name = TypeExpr
|
|
1666
|
+
// OR Refinement type: type Name = TypeExpr where { ... }
|
|
1517
1667
|
if (this.match(TokenType.ASSIGN)) {
|
|
1518
1668
|
const typeExpr = this.parseTypeAnnotation();
|
|
1669
|
+
|
|
1670
|
+
// Check for refinement type: type Email = String where { ... }
|
|
1671
|
+
if (this.check(TokenType.IDENTIFIER) && this.current().value === 'where') {
|
|
1672
|
+
this.advance(); // consume 'where'
|
|
1673
|
+
this.expect(TokenType.LBRACE, "Expected '{' after 'where'");
|
|
1674
|
+
|
|
1675
|
+
// Parse predicate block — uses 'it' as implicit parameter
|
|
1676
|
+
const predicates = [];
|
|
1677
|
+
while (!this.check(TokenType.RBRACE) && !this.isAtEnd()) {
|
|
1678
|
+
predicates.push(this.parseExpression());
|
|
1679
|
+
this.match(TokenType.COMMA); // optional comma between predicates
|
|
1680
|
+
}
|
|
1681
|
+
this.expect(TokenType.RBRACE, "Expected '}' to close where block");
|
|
1682
|
+
|
|
1683
|
+
// Combine predicates with 'and'
|
|
1684
|
+
let predicate = predicates[0];
|
|
1685
|
+
for (let i = 1; i < predicates.length; i++) {
|
|
1686
|
+
predicate = new AST.LogicalExpression('and', predicate, predicates[i], l);
|
|
1687
|
+
}
|
|
1688
|
+
|
|
1689
|
+
return new AST.RefinementType(name, typeExpr, predicate, l);
|
|
1690
|
+
}
|
|
1691
|
+
|
|
1519
1692
|
return new AST.TypeAlias(name, typeExpr, l);
|
|
1520
1693
|
}
|
|
1521
1694
|
|
|
@@ -2123,6 +2296,14 @@ export class Parser {
|
|
|
2123
2296
|
const operand = this.parseUnary();
|
|
2124
2297
|
return new AST.YieldExpression(operand, delegate, l);
|
|
2125
2298
|
}
|
|
2299
|
+
// Negated column expression: -.column (for select exclusion)
|
|
2300
|
+
if (this.check(TokenType.MINUS) && this.peek(1).type === TokenType.DOT && this.peek(2).type === TokenType.IDENTIFIER) {
|
|
2301
|
+
const l = this.loc();
|
|
2302
|
+
this.advance(); // consume -
|
|
2303
|
+
this.advance(); // consume .
|
|
2304
|
+
const name = this.advance().value;
|
|
2305
|
+
return new AST.NegatedColumnExpression(name, l);
|
|
2306
|
+
}
|
|
2126
2307
|
if (this.check(TokenType.MINUS)) {
|
|
2127
2308
|
const l = this.loc();
|
|
2128
2309
|
this.advance();
|
|
@@ -2343,13 +2524,32 @@ export class Parser {
|
|
|
2343
2524
|
return this.parseObjectOrDictComprehension();
|
|
2344
2525
|
}
|
|
2345
2526
|
|
|
2527
|
+
// Column expression: .column (for table operations)
|
|
2528
|
+
// Appears at expression-start when DOT followed by IDENTIFIER
|
|
2529
|
+
// but NOT when preceded by an expression (which would be member access)
|
|
2530
|
+
if (this.check(TokenType.DOT) && this.peek(1).type === TokenType.IDENTIFIER) {
|
|
2531
|
+
// Check this isn't inside a method pipe (|> .method) — that's handled in parsePipe
|
|
2532
|
+
// Column expressions appear in function arguments and assignments
|
|
2533
|
+
this.advance(); // consume .
|
|
2534
|
+
const name = this.advance().value; // consume identifier
|
|
2535
|
+
|
|
2536
|
+
// Check for column assignment: .col = expr (used in derive)
|
|
2537
|
+
if (this.check(TokenType.ASSIGN)) {
|
|
2538
|
+
this.advance(); // consume =
|
|
2539
|
+
const expr = this.parseExpression();
|
|
2540
|
+
return new AST.ColumnAssignment(name, expr, l);
|
|
2541
|
+
}
|
|
2542
|
+
|
|
2543
|
+
return new AST.ColumnExpression(name, l);
|
|
2544
|
+
}
|
|
2545
|
+
|
|
2346
2546
|
// Parenthesized expression or arrow lambda
|
|
2347
2547
|
if (this.check(TokenType.LPAREN)) {
|
|
2348
2548
|
return this.parseParenOrArrowLambda();
|
|
2349
2549
|
}
|
|
2350
2550
|
|
|
2351
|
-
//
|
|
2352
|
-
if (this.check(TokenType.SERVER) || this.check(TokenType.CLIENT) || this.check(TokenType.SHARED)) {
|
|
2551
|
+
// Keywords that can appear as identifiers in expression position
|
|
2552
|
+
if (this.check(TokenType.SERVER) || this.check(TokenType.CLIENT) || this.check(TokenType.SHARED) || this.check(TokenType.DERIVE)) {
|
|
2353
2553
|
const name = this.advance().value;
|
|
2354
2554
|
return new AST.Identifier(name, l);
|
|
2355
2555
|
}
|
|
@@ -0,0 +1,305 @@
|
|
|
1
|
+
// AI Runtime for Tova — Multi-provider AI client
|
|
2
|
+
// Supports: anthropic, openai, ollama, custom (OpenAI-compatible)
|
|
3
|
+
|
|
4
|
+
// ── Provider Implementations ──────────────────────────
|
|
5
|
+
|
|
6
|
+
const providers = {
|
|
7
|
+
async anthropic(config, method, args) {
|
|
8
|
+
const baseUrl = config.base_url || 'https://api.anthropic.com';
|
|
9
|
+
const headers = {
|
|
10
|
+
'Content-Type': 'application/json',
|
|
11
|
+
'x-api-key': config.api_key,
|
|
12
|
+
'anthropic-version': '2023-06-01',
|
|
13
|
+
...(config.headers || {}),
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
switch (method) {
|
|
17
|
+
case 'ask': {
|
|
18
|
+
const [prompt, opts = {}] = args;
|
|
19
|
+
const body = {
|
|
20
|
+
model: config.model || 'claude-sonnet-4-20250514',
|
|
21
|
+
max_tokens: opts.max_tokens || config.max_tokens || 4096,
|
|
22
|
+
messages: [{ role: 'user', content: prompt }],
|
|
23
|
+
};
|
|
24
|
+
if (opts.temperature ?? config.temperature) body.temperature = opts.temperature ?? config.temperature;
|
|
25
|
+
if (opts.tools) {
|
|
26
|
+
body.tools = opts.tools.map(t => ({
|
|
27
|
+
name: t.name,
|
|
28
|
+
description: t.description,
|
|
29
|
+
input_schema: { type: 'object', properties: t.params ? Object.fromEntries(Object.entries(t.params).map(([k, v]) => [k, { type: typeof v === 'string' ? v.toLowerCase() : 'string' }])) : {} },
|
|
30
|
+
}));
|
|
31
|
+
}
|
|
32
|
+
const res = await fetch(`${baseUrl}/v1/messages`, { method: 'POST', headers, body: JSON.stringify(body) });
|
|
33
|
+
if (!res.ok) throw new Error(`Anthropic API error ${res.status}: ${await res.text()}`);
|
|
34
|
+
const data = await res.json();
|
|
35
|
+
if (opts.tools && data.content.some(c => c.type === 'tool_use')) {
|
|
36
|
+
return { text: data.content.filter(c => c.type === 'text').map(c => c.text).join(''), tool_calls: data.content.filter(c => c.type === 'tool_use') };
|
|
37
|
+
}
|
|
38
|
+
return data.content.map(c => c.text).join('');
|
|
39
|
+
}
|
|
40
|
+
case 'chat': {
|
|
41
|
+
const [messages, opts = {}] = args;
|
|
42
|
+
const systemMessages = messages.filter(m => m.role === 'system');
|
|
43
|
+
const nonSystemMessages = messages.filter(m => m.role !== 'system');
|
|
44
|
+
const body = {
|
|
45
|
+
model: config.model || 'claude-sonnet-4-20250514',
|
|
46
|
+
max_tokens: opts.max_tokens || config.max_tokens || 4096,
|
|
47
|
+
messages: nonSystemMessages,
|
|
48
|
+
};
|
|
49
|
+
if (systemMessages.length > 0) body.system = systemMessages.map(m => m.content).join('\n');
|
|
50
|
+
if (opts.temperature ?? config.temperature) body.temperature = opts.temperature ?? config.temperature;
|
|
51
|
+
const res = await fetch(`${baseUrl}/v1/messages`, { method: 'POST', headers, body: JSON.stringify(body) });
|
|
52
|
+
if (!res.ok) throw new Error(`Anthropic API error ${res.status}: ${await res.text()}`);
|
|
53
|
+
const data = await res.json();
|
|
54
|
+
return data.content.map(c => c.text).join('');
|
|
55
|
+
}
|
|
56
|
+
case 'embed': {
|
|
57
|
+
// Anthropic doesn't have embeddings — fall through to openai-compatible
|
|
58
|
+
throw new Error('Anthropic does not support embeddings. Use an OpenAI-compatible provider.');
|
|
59
|
+
}
|
|
60
|
+
case 'extract': {
|
|
61
|
+
const [prompt, schema, opts = {}] = args;
|
|
62
|
+
const body = {
|
|
63
|
+
model: config.model || 'claude-sonnet-4-20250514',
|
|
64
|
+
max_tokens: opts.max_tokens || config.max_tokens || 4096,
|
|
65
|
+
messages: [{ role: 'user', content: `${prompt}\n\nRespond with a JSON object matching this schema: ${JSON.stringify(schema)}` }],
|
|
66
|
+
};
|
|
67
|
+
if (opts.temperature ?? config.temperature) body.temperature = opts.temperature ?? config.temperature;
|
|
68
|
+
const res = await fetch(`${baseUrl}/v1/messages`, { method: 'POST', headers, body: JSON.stringify(body) });
|
|
69
|
+
if (!res.ok) throw new Error(`Anthropic API error ${res.status}: ${await res.text()}`);
|
|
70
|
+
const data = await res.json();
|
|
71
|
+
const text = data.content.map(c => c.text).join('');
|
|
72
|
+
try { return JSON.parse(text); } catch { return JSON.parse(text.match(/\{[\s\S]*\}/)?.[0] || '{}'); }
|
|
73
|
+
}
|
|
74
|
+
case 'classify': {
|
|
75
|
+
const [text, categories, opts = {}] = args;
|
|
76
|
+
const catList = Array.isArray(categories) ? categories : Object.keys(categories);
|
|
77
|
+
const body = {
|
|
78
|
+
model: config.model || 'claude-sonnet-4-20250514',
|
|
79
|
+
max_tokens: opts.max_tokens || config.max_tokens || 100,
|
|
80
|
+
messages: [{ role: 'user', content: `Classify the following text into exactly one of these categories: ${catList.join(', ')}\n\nText: "${text}"\n\nRespond with only the category name, nothing else.` }],
|
|
81
|
+
};
|
|
82
|
+
if (opts.temperature ?? config.temperature) body.temperature = opts.temperature ?? config.temperature;
|
|
83
|
+
const res = await fetch(`${baseUrl}/v1/messages`, { method: 'POST', headers, body: JSON.stringify(body) });
|
|
84
|
+
if (!res.ok) throw new Error(`Anthropic API error ${res.status}: ${await res.text()}`);
|
|
85
|
+
const data = await res.json();
|
|
86
|
+
const result = data.content.map(c => c.text).join('').trim();
|
|
87
|
+
// Match against categories (case-insensitive)
|
|
88
|
+
return catList.find(c => c.toLowerCase() === result.toLowerCase()) || result;
|
|
89
|
+
}
|
|
90
|
+
default:
|
|
91
|
+
throw new Error(`Unknown AI method: ${method}`);
|
|
92
|
+
}
|
|
93
|
+
},
|
|
94
|
+
|
|
95
|
+
async openai(config, method, args) {
|
|
96
|
+
const baseUrl = config.base_url || 'https://api.openai.com';
|
|
97
|
+
const headers = {
|
|
98
|
+
'Content-Type': 'application/json',
|
|
99
|
+
'Authorization': `Bearer ${config.api_key}`,
|
|
100
|
+
...(config.headers || {}),
|
|
101
|
+
};
|
|
102
|
+
|
|
103
|
+
switch (method) {
|
|
104
|
+
case 'ask': {
|
|
105
|
+
const [prompt, opts = {}] = args;
|
|
106
|
+
const body = {
|
|
107
|
+
model: config.model || 'gpt-4o',
|
|
108
|
+
messages: [{ role: 'user', content: prompt }],
|
|
109
|
+
};
|
|
110
|
+
if (opts.max_tokens || config.max_tokens) body.max_tokens = opts.max_tokens || config.max_tokens;
|
|
111
|
+
if (opts.temperature ?? config.temperature) body.temperature = opts.temperature ?? config.temperature;
|
|
112
|
+
if (opts.tools) {
|
|
113
|
+
body.tools = opts.tools.map(t => ({
|
|
114
|
+
type: 'function',
|
|
115
|
+
function: { name: t.name, description: t.description, parameters: { type: 'object', properties: t.params ? Object.fromEntries(Object.entries(t.params).map(([k, v]) => [k, { type: typeof v === 'string' ? v.toLowerCase() : 'string' }])) : {} } },
|
|
116
|
+
}));
|
|
117
|
+
}
|
|
118
|
+
const res = await fetch(`${baseUrl}/v1/chat/completions`, { method: 'POST', headers, body: JSON.stringify(body) });
|
|
119
|
+
if (!res.ok) throw new Error(`OpenAI API error ${res.status}: ${await res.text()}`);
|
|
120
|
+
const data = await res.json();
|
|
121
|
+
const choice = data.choices[0];
|
|
122
|
+
if (opts.tools && choice.message.tool_calls) {
|
|
123
|
+
return { text: choice.message.content || '', tool_calls: choice.message.tool_calls };
|
|
124
|
+
}
|
|
125
|
+
return choice.message.content;
|
|
126
|
+
}
|
|
127
|
+
case 'chat': {
|
|
128
|
+
const [messages, opts = {}] = args;
|
|
129
|
+
const body = { model: config.model || 'gpt-4o', messages };
|
|
130
|
+
if (opts.max_tokens || config.max_tokens) body.max_tokens = opts.max_tokens || config.max_tokens;
|
|
131
|
+
if (opts.temperature ?? config.temperature) body.temperature = opts.temperature ?? config.temperature;
|
|
132
|
+
const res = await fetch(`${baseUrl}/v1/chat/completions`, { method: 'POST', headers, body: JSON.stringify(body) });
|
|
133
|
+
if (!res.ok) throw new Error(`OpenAI API error ${res.status}: ${await res.text()}`);
|
|
134
|
+
const data = await res.json();
|
|
135
|
+
return data.choices[0].message.content;
|
|
136
|
+
}
|
|
137
|
+
case 'embed': {
|
|
138
|
+
const [input, opts = {}] = args;
|
|
139
|
+
const body = { model: config.model || 'text-embedding-3-small', input };
|
|
140
|
+
const res = await fetch(`${baseUrl}/v1/embeddings`, { method: 'POST', headers, body: JSON.stringify(body) });
|
|
141
|
+
if (!res.ok) throw new Error(`OpenAI API error ${res.status}: ${await res.text()}`);
|
|
142
|
+
const data = await res.json();
|
|
143
|
+
if (Array.isArray(input)) return data.data.map(d => d.embedding);
|
|
144
|
+
return data.data[0].embedding;
|
|
145
|
+
}
|
|
146
|
+
case 'extract': {
|
|
147
|
+
const [prompt, schema, opts = {}] = args;
|
|
148
|
+
const body = {
|
|
149
|
+
model: config.model || 'gpt-4o',
|
|
150
|
+
messages: [{ role: 'user', content: `${prompt}\n\nRespond with a JSON object matching this schema: ${JSON.stringify(schema)}` }],
|
|
151
|
+
response_format: { type: 'json_object' },
|
|
152
|
+
};
|
|
153
|
+
if (opts.max_tokens || config.max_tokens) body.max_tokens = opts.max_tokens || config.max_tokens;
|
|
154
|
+
const res = await fetch(`${baseUrl}/v1/chat/completions`, { method: 'POST', headers, body: JSON.stringify(body) });
|
|
155
|
+
if (!res.ok) throw new Error(`OpenAI API error ${res.status}: ${await res.text()}`);
|
|
156
|
+
const data = await res.json();
|
|
157
|
+
return JSON.parse(data.choices[0].message.content);
|
|
158
|
+
}
|
|
159
|
+
case 'classify': {
|
|
160
|
+
const [text, categories, opts = {}] = args;
|
|
161
|
+
const catList = Array.isArray(categories) ? categories : Object.keys(categories);
|
|
162
|
+
const body = {
|
|
163
|
+
model: config.model || 'gpt-4o',
|
|
164
|
+
messages: [{ role: 'user', content: `Classify into one of: ${catList.join(', ')}\n\nText: "${text}"\n\nRespond with only the category.` }],
|
|
165
|
+
};
|
|
166
|
+
if (opts.max_tokens || config.max_tokens) body.max_tokens = opts.max_tokens || config.max_tokens;
|
|
167
|
+
const res = await fetch(`${baseUrl}/v1/chat/completions`, { method: 'POST', headers, body: JSON.stringify(body) });
|
|
168
|
+
if (!res.ok) throw new Error(`OpenAI API error ${res.status}: ${await res.text()}`);
|
|
169
|
+
const data = await res.json();
|
|
170
|
+
const result = data.choices[0].message.content.trim();
|
|
171
|
+
return catList.find(c => c.toLowerCase() === result.toLowerCase()) || result;
|
|
172
|
+
}
|
|
173
|
+
default:
|
|
174
|
+
throw new Error(`Unknown AI method: ${method}`);
|
|
175
|
+
}
|
|
176
|
+
},
|
|
177
|
+
|
|
178
|
+
async ollama(config, method, args) {
|
|
179
|
+
const baseUrl = config.base_url || 'http://localhost:11434';
|
|
180
|
+
const headers = { 'Content-Type': 'application/json', ...(config.headers || {}) };
|
|
181
|
+
|
|
182
|
+
switch (method) {
|
|
183
|
+
case 'ask': {
|
|
184
|
+
const [prompt, opts = {}] = args;
|
|
185
|
+
const body = { model: config.model || 'llama3', messages: [{ role: 'user', content: prompt }], stream: false };
|
|
186
|
+
const res = await fetch(`${baseUrl}/api/chat`, { method: 'POST', headers, body: JSON.stringify(body) });
|
|
187
|
+
if (!res.ok) throw new Error(`Ollama API error ${res.status}: ${await res.text()}`);
|
|
188
|
+
const data = await res.json();
|
|
189
|
+
return data.message.content;
|
|
190
|
+
}
|
|
191
|
+
case 'chat': {
|
|
192
|
+
const [messages, opts = {}] = args;
|
|
193
|
+
const body = { model: config.model || 'llama3', messages, stream: false };
|
|
194
|
+
const res = await fetch(`${baseUrl}/api/chat`, { method: 'POST', headers, body: JSON.stringify(body) });
|
|
195
|
+
if (!res.ok) throw new Error(`Ollama API error ${res.status}: ${await res.text()}`);
|
|
196
|
+
const data = await res.json();
|
|
197
|
+
return data.message.content;
|
|
198
|
+
}
|
|
199
|
+
case 'embed': {
|
|
200
|
+
const [input, opts = {}] = args;
|
|
201
|
+
const body = { model: config.model || 'llama3', prompt: Array.isArray(input) ? input[0] : input };
|
|
202
|
+
const res = await fetch(`${baseUrl}/api/embeddings`, { method: 'POST', headers, body: JSON.stringify(body) });
|
|
203
|
+
if (!res.ok) throw new Error(`Ollama API error ${res.status}: ${await res.text()}`);
|
|
204
|
+
const data = await res.json();
|
|
205
|
+
if (Array.isArray(input)) {
|
|
206
|
+
const results = [];
|
|
207
|
+
for (const text of input) {
|
|
208
|
+
const r = await fetch(`${baseUrl}/api/embeddings`, { method: 'POST', headers, body: JSON.stringify({ model: config.model || 'llama3', prompt: text }) });
|
|
209
|
+
const d = await r.json();
|
|
210
|
+
results.push(d.embedding);
|
|
211
|
+
}
|
|
212
|
+
return results;
|
|
213
|
+
}
|
|
214
|
+
return data.embedding;
|
|
215
|
+
}
|
|
216
|
+
case 'extract': {
|
|
217
|
+
const [prompt, schema] = args;
|
|
218
|
+
const body = { model: config.model || 'llama3', messages: [{ role: 'user', content: `${prompt}\nRespond with JSON: ${JSON.stringify(schema)}` }], stream: false, format: 'json' };
|
|
219
|
+
const res = await fetch(`${baseUrl}/api/chat`, { method: 'POST', headers, body: JSON.stringify(body) });
|
|
220
|
+
if (!res.ok) throw new Error(`Ollama API error ${res.status}: ${await res.text()}`);
|
|
221
|
+
const data = await res.json();
|
|
222
|
+
return JSON.parse(data.message.content);
|
|
223
|
+
}
|
|
224
|
+
case 'classify': {
|
|
225
|
+
const [text, categories] = args;
|
|
226
|
+
const catList = Array.isArray(categories) ? categories : Object.keys(categories);
|
|
227
|
+
const body = { model: config.model || 'llama3', messages: [{ role: 'user', content: `Classify into one of: ${catList.join(', ')}\nText: "${text}"\nRespond with only the category.` }], stream: false };
|
|
228
|
+
const res = await fetch(`${baseUrl}/api/chat`, { method: 'POST', headers, body: JSON.stringify(body) });
|
|
229
|
+
if (!res.ok) throw new Error(`Ollama API error ${res.status}: ${await res.text()}`);
|
|
230
|
+
const data = await res.json();
|
|
231
|
+
const result = data.message.content.trim();
|
|
232
|
+
return catList.find(c => c.toLowerCase() === result.toLowerCase()) || result;
|
|
233
|
+
}
|
|
234
|
+
default:
|
|
235
|
+
throw new Error(`Unknown AI method: ${method}`);
|
|
236
|
+
}
|
|
237
|
+
},
|
|
238
|
+
};
|
|
239
|
+
|
|
240
|
+
// Custom provider uses OpenAI-compatible API format
|
|
241
|
+
providers.custom = providers.openai;
|
|
242
|
+
|
|
243
|
+
// ── AI Client Factory ─────────────────────────────────
|
|
244
|
+
|
|
245
|
+
export function createAI(config = {}) {
|
|
246
|
+
const providerName = config.provider || 'custom';
|
|
247
|
+
const providerFn = providers[providerName] || providers.custom;
|
|
248
|
+
|
|
249
|
+
const client = {
|
|
250
|
+
_config: config,
|
|
251
|
+
_provider: providerFn,
|
|
252
|
+
|
|
253
|
+
async ask(prompt, opts = {}) {
|
|
254
|
+
const mergedConfig = { ...config, ...opts };
|
|
255
|
+
return providerFn(mergedConfig, 'ask', [prompt, opts]);
|
|
256
|
+
},
|
|
257
|
+
|
|
258
|
+
async chat(messages, opts = {}) {
|
|
259
|
+
const mergedConfig = { ...config, ...opts };
|
|
260
|
+
return providerFn(mergedConfig, 'chat', [messages, opts]);
|
|
261
|
+
},
|
|
262
|
+
|
|
263
|
+
async embed(input, opts = {}) {
|
|
264
|
+
const mergedConfig = { ...config, ...opts };
|
|
265
|
+
return providerFn(mergedConfig, 'embed', [input, opts]);
|
|
266
|
+
},
|
|
267
|
+
|
|
268
|
+
async extract(prompt, schema, opts = {}) {
|
|
269
|
+
const mergedConfig = { ...config, ...opts };
|
|
270
|
+
return providerFn(mergedConfig, 'extract', [prompt, schema, opts]);
|
|
271
|
+
},
|
|
272
|
+
|
|
273
|
+
async classify(text, categories, opts = {}) {
|
|
274
|
+
const mergedConfig = { ...config, ...opts };
|
|
275
|
+
return providerFn(mergedConfig, 'classify', [text, categories, opts]);
|
|
276
|
+
},
|
|
277
|
+
};
|
|
278
|
+
|
|
279
|
+
return client;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
// ── Default AI object (for one-off calls) ─────────────
|
|
283
|
+
|
|
284
|
+
export const defaultAI = {
|
|
285
|
+
async ask(prompt, opts = {}) {
|
|
286
|
+
const client = createAI(opts);
|
|
287
|
+
return client.ask(prompt, opts);
|
|
288
|
+
},
|
|
289
|
+
async chat(messages, opts = {}) {
|
|
290
|
+
const client = createAI(opts);
|
|
291
|
+
return client.chat(messages, opts);
|
|
292
|
+
},
|
|
293
|
+
async embed(input, opts = {}) {
|
|
294
|
+
const client = createAI(opts);
|
|
295
|
+
return client.embed(input, opts);
|
|
296
|
+
},
|
|
297
|
+
async extract(prompt, schema, opts = {}) {
|
|
298
|
+
const client = createAI(opts);
|
|
299
|
+
return client.extract(prompt, schema, opts);
|
|
300
|
+
},
|
|
301
|
+
async classify(text, categories, opts = {}) {
|
|
302
|
+
const client = createAI(opts);
|
|
303
|
+
return client.classify(text, categories, opts);
|
|
304
|
+
},
|
|
305
|
+
};
|