@revealui/ai 0.2.6 → 0.2.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE CHANGED
@@ -1,22 +1,109 @@
1
- MIT License
1
+ Functional Source License, Version 1.1, MIT Future License
2
2
 
3
- Copyright (c) 2025-2026 RevealUI Studio
3
+ Abbreviation
4
4
 
5
- Permission is hereby granted, free of charge, to any person obtaining a copy
6
- of this software and associated documentation files (the "Software"), to deal
7
- in the Software without restriction, including without limitation the rights
8
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
- copies of the Software, and to permit persons to whom the Software is
10
- furnished to do so, subject to the following conditions:
5
+ FSL-1.1-MIT
11
6
 
12
- The above copyright notice and this permission notice shall be included in all
13
- copies or substantial portions of the Software.
7
+ Notice
14
8
 
15
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
- SOFTWARE.
9
+ Copyright 2025-2026 RevealUI Studio (founder@revealui.com)
22
10
 
11
+ Terms and Conditions
12
+
13
+ Licensor: RevealUI Studio
14
+
15
+ Licensed Work: @revealui/ai
16
+ The Licensed Work is copyright 2025-2026 RevealUI Studio.
17
+
18
+ Change Date: 2028-04-08
19
+
20
+ Change License: MIT
21
+
22
+ For information about alternative licensing arrangements for the Licensed Work,
23
+ please contact: founder@revealui.com
24
+
25
+ License text below is the Functional Source License, Version 1.1, MIT Future
26
+ License, as published at https://fsl.software/FSL-1.1-MIT.template.md
27
+
28
+ ---
29
+
30
+ ## Terms and Conditions
31
+
32
+ ### Acceptance
33
+
34
+ In order to get any license under these terms, you must agree to them as
35
+ both strict obligations and conditions to all your licenses.
36
+
37
+ ### Copyright License
38
+
39
+ The licensor grants you a non-exclusive, royalty-free, worldwide,
40
+ non-sublicensable, non-transferable license to use, copy, distribute, make
41
+ available, and prepare derivative works of the licensed work, in each case
42
+ subject to the limitations and conditions below.
43
+
44
+ ### Limitations
45
+
46
+ You may not make the functionality of the licensed work or a modified
47
+ version available to third parties as a service, or distribute the
48
+ licensed work or a modified version in a way that makes the functionality
49
+ of the software available to third parties. Making the functionality of
50
+ the licensed work available to third parties includes, without limitation,
51
+ enabling third parties to interact with the functionality of the licensed
52
+ work remotely through a computer network, offering a service the value of
53
+ which entirely or primarily derives from the value of the licensed work,
54
+ or offering a service that accomplishes for users the primary purpose of
55
+ the licensed work or a modified version.
56
+
57
+ ### Patents
58
+
59
+ The licensor grants you a license, under any patent claims the licensor
60
+ can license, or becomes able to license, to make, have made, use, sell,
61
+ offer for sale, import and have imported the licensed work, in each case
62
+ subject to the limitations and conditions in this license. This license
63
+ does not cover any patent claims that you cause to be infringed by
64
+ modifications or additions to the licensed work. If you or your company
65
+ make any written claim that the licensed work infringes or contributes to
66
+ infringement of any patent, your patent license for the licensed work
67
+ granted under these terms ends immediately. If your company makes such a
68
+ claim, your patent license ends immediately for work on behalf of your
69
+ company.
70
+
71
+ ### Fair Use
72
+
73
+ This license is not intended to limit any right of fair use, fair
74
+ dealing, or other applicable copyright exception or limitation.
75
+
76
+ ### No Other Rights
77
+
78
+ These terms do not allow you to sublicense or transfer any of your
79
+ licenses to anyone else, or prevent the licensor from granting licenses
80
+ to anyone else. These terms do not imply any other licenses.
81
+
82
+ ### Termination
83
+
84
+ If you use the licensed work in violation of these terms, such use is
85
+ not licensed, and your licenses may be revoked if you do not cure the
86
+ violation.
87
+
88
+ The licensor may also revoke your licenses if you fail to comply with
89
+ these terms.
90
+
91
+ ### No Liability
92
+
93
+ ***As far as the law allows, the licensed work comes as is, without any
94
+ warranty or condition, and the licensor will not be liable to you for any
95
+ damages arising out of these terms or the use or nature of the licensed
96
+ work, under any kind of legal claim.***
97
+
98
+ ### Change Date
99
+
100
+ On the Change Date, or the fourth anniversary of the first publicly
101
+ available distribution of a specific version of the Licensed Work under
102
+ this License, whichever comes first, the Licensor hereby grants you
103
+ rights under the terms of the Change License, and the rights granted in
104
+ the paragraphs above terminate.
105
+
106
+ ### Change License
107
+
108
+ On the Change Date, the Licensed Work will be made available under the
109
+ Change License specified above (MIT).
package/dist/index.d.ts CHANGED
@@ -57,9 +57,8 @@ export * from './embeddings/index.js';
57
57
  export * from './ingestion/index.js';
58
58
  export * from './llm/client.js';
59
59
  export * from './llm/provider-health.js';
60
- export * from './llm/providers/anthropic.js';
61
60
  export * from './llm/providers/base.js';
62
- export * from './llm/providers/openai.js';
61
+ export * from './llm/providers/openai-compat.js';
63
62
  export * from './llm/token-counter.js';
64
63
  export * from './llm/workspace-provider-config.js';
65
64
  export * from './memory/index.js';
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8CG;AAKH;;;;GAIG;AACH,wBAAgB,cAAc,IAAI,OAAO,CASxC;AAGD,cAAc,gBAAgB,CAAC;AAE/B,cAAc,kBAAkB,CAAC;AAIjC,cAAc,uBAAuB,CAAC;AAEtC,cAAc,sBAAsB,CAAC;AAErC,cAAc,iBAAiB,CAAC;AAChC,cAAc,0BAA0B,CAAC;AACzC,cAAc,8BAA8B,CAAC;AAC7C,cAAc,yBAAyB,CAAC;AACxC,cAAc,2BAA2B,CAAC;AAC1C,cAAc,wBAAwB,CAAC;AACvC,cAAc,oCAAoC,CAAC;AAEnD,cAAc,mBAAmB,CAAC;AAElC,cAAc,0BAA0B,CAAC;AACzC,cAAc,6BAA6B,CAAC;AAC5C,cAAc,uCAAuC,CAAC;AACtD,cAAc,iCAAiC,CAAC;AAChD,cAAc,4BAA4B,CAAC;AAC3C,cAAc,sCAAsC,CAAC;AACrD,cAAc,iCAAiC,CAAC;AAEhD,cAAc,mBAAmB,CAAC;AAElC,cAAc,sBAAsB,CAAC;AAErC,cAAc,iBAAiB,CAAC;AAChC,cAAc,yBAAyB,CAAC;AACxC,cAAc,gCAAgC,CAAC;AAC/C,cAAc,wBAAwB,CAAC;AACvC,cAAc,yBAAyB,CAAC;AACxC,cAAc,qBAAqB,CAAC;AACpC,cAAc,yBAAyB,CAAC;AACxC,cAAc,sBAAsB,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8CG;AAKH;;;;GAIG;AACH,wBAAgB,cAAc,IAAI,OAAO,CASxC;AAGD,cAAc,gBAAgB,CAAC;AAE/B,cAAc,kBAAkB,CAAC;AAIjC,cAAc,uBAAuB,CAAC;AAEtC,cAAc,sBAAsB,CAAC;AAErC,cAAc,iBAAiB,CAAC;AAChC,cAAc,0BAA0B,CAAC;AACzC,cAAc,yBAAyB,CAAC;AACxC,cAAc,kCAAkC,CAAC;AACjD,cAAc,wBAAwB,CAAC;AACvC,cAAc,oCAAoC,CAAC;AAEnD,cAAc,mBAAmB,CAAC;AAElC,cAAc,0BAA0B,CAAC;AACzC,cAAc,6BAA6B,CAAC;AAC5C,cAAc,uCAAuC,CAAC;AACtD,cAAc,iCAAiC,CAAC;AAChD,cAAc,4BAA4B,CAAC;AAC3C,cAAc,sCAAsC,CAAC;AACrD,cAAc,iCAAiC,CAAC;AAEhD,cAAc,mBAAmB,CAAC;AAElC,cAAc,sBAAsB,CAAC;AAErC,cAAc,iBAAiB,CAAC;AAChC,cAAc,yBAAyB,CAAC;AACxC,cAAc,gCAAgC,CAAC;AAC/C,cAAc,wBAAwB,CAAC;AACvC,cAAc,yBAAyB,CAAC;AACxC,cAAc,qBAAqB,CAAC;AACpC,cAAc,yBAAyB,CAAC;AACxC,cAAc,sBAAsB,CAAC"}
package/dist/index.js CHANGED
@@ -73,9 +73,8 @@ export * from './ingestion/index.js';
73
73
  // Re-export LLM providers and client
74
74
  export * from './llm/client.js';
75
75
  export * from './llm/provider-health.js';
76
- export * from './llm/providers/anthropic.js';
77
76
  export * from './llm/providers/base.js';
78
- export * from './llm/providers/openai.js';
77
+ export * from './llm/providers/openai-compat.js';
79
78
  export * from './llm/token-counter.js';
80
79
  export * from './llm/workspace-provider-config.js';
81
80
  // Re-export memory system
@@ -1 +1 @@
1
- {"version":3,"file":"cache-utils.d.ts","sourceRoot":"","sources":["../../src/llm/cache-utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;GAgBG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,qBAAqB,CAAC;AAEnD;;;GAGG;AACH,wBAAgB,SAAS,CAAC,OAAO,EAAE,OAAO,GAAG,OAAO,CAKnD;AAED;;;;;;;;;GASG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAK9D;AAED;;;;;;;;;;;;;;GAcG;AACH,wBAAgB,oBAAoB,CAClC,WAAW,EAAE,MAAM,EACnB,YAAY,EAAE,MAAM,EACpB,qBAAqB,EAAE,MAAM,GAC5B,MAAM,CAeR;AAED;;;;;;;;;;GAUG;AACH,wBAAgB,gBAAgB,CAAC,KAAK,EAAE;IACtC,YAAY,EAAE,MAAM,CAAC;IACrB,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,MAAM,GAAG,IAAI,CAqBhB;AAED;;;GAGG;AACH,wBAAgB,WAAW,CAAC,OAAO,EAAE,MAAM,EAAE,SAAS,SAAO,GAAG,OAAO,CAItE;AAED;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAgB,wBAAwB,CAAC,MAAM,EAAE;IAC/C,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,WAAW,CAAC,EAAE,MAAM,EAAE,CAAC;IACvB,QAAQ,EAAE,OAAO,EAAE,CAAC;CACrB,GAAG,OAAO,EAAE,CAwBZ;AAED;;;GAGG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;;;;;;;CAmBpB,CAAC;AAEX;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAgB,kBAAkB,CAAC,KAAK,EAAE;IACxC,KAAK,EAAE,MAAM,OAAO,iBAAiB,CAAC;IACtC,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,EAAE,MAAM,CAAC;IACzB,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG;IAAE,KAAK,EAAE,MAAM,CAAC;IAAC,SAAS,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAAC,OAAO,EAAE,MAAM,CAAA;CAAE,CAwBxE"}
1
+ {"version":3,"file":"cache-utils.d.ts","sourceRoot":"","sources":["../../src/llm/cache-utils.ts"],"names":[],"mappings":"AAEA;;;;;;;;;;;;;;;;GAgBG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,qBAAqB,CAAC;AAEnD;;;GAGG;AACH,wBAAgB,SAAS,CAAC,OAAO,EAAE,OAAO,GAAG,OAAO,CAKnD;AAED;;;;;;;;;GASG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAK9D;AAED;;;;;;;;;;;;;;GAcG;AACH,wBAAgB,oBAAoB,CAClC,WAAW,EAAE,MAAM,EACnB,YAAY,EAAE,MAAM,EACpB,qBAAqB,EAAE,MAAM,GAC5B,MAAM,CAeR;AAED;;;;;;;;;;GAUG;AACH,wBAAgB,gBAAgB,CAAC,KAAK,EAAE;IACtC,YAAY,EAAE,MAAM,CAAC;IACrB,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,MAAM,GAAG,IAAI,CAqBhB;AAED;;;GAGG;AACH,wBAAgB,WAAW,CAAC,OAAO,EAAE,MAAM,EAAE,SAAS,SAAO,GAAG,OAAO,CAItE;AAED;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAgB,wBAAwB,CAAC,MAAM,EAAE;IAC/C,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,WAAW,CAAC,EAAE,MAAM,EAAE,CAAC;IACvB,QAAQ,EAAE,OAAO,EAAE,CAAC;CACrB,GAAG,OAAO,EAAE,CAwBZ;AAED;;;GAGG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;;;;;;;CAmBpB,CAAC;AAEX;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAgB,kBAAkB,CAAC,KAAK,EAAE;IACxC,KAAK,EAAE,MAAM,OAAO,iBAAiB,CAAC;IACtC,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,EAAE,MAAM,CAAC;IACzB,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG;IAAE,KAAK,EAAE,MAAM,CAAC;IAAC,SAAS,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAAC,OAAO,EAAE,MAAM,CAAA;CAAE,CAwBxE"}
@@ -1,20 +1,4 @@
1
- /**
2
- * Prompt Caching Utilities
3
- *
4
- * Helper functions for optimizing Anthropic prompt caching usage.
5
- * Cache hits provide up to 90% cost reduction on input tokens.
6
- *
7
- * Cache TTL: 5 minutes
8
- * Minimum cacheable content: ~1024 tokens (~300 words)
9
- *
10
- * Best practices:
11
- * - Cache stable content that repeats across requests
12
- * - Place cached content at message boundaries
13
- * - Cache system prompts, tools, and large context documents
14
- * - Order matters: place most stable content first
15
- *
16
- * @see https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching
17
- */
1
+ /* console-allowed */
18
2
  /**
19
3
  * Mark a message for caching
20
4
  * Use this for system prompts, tool definitions, or large context that repeats
@@ -15,7 +15,7 @@ import type { ProviderHealthMonitor } from './provider-health.js';
15
15
  import type { Embedding, LLMChatOptions, LLMChunk, LLMEmbedOptions, LLMProvider, LLMResponse, LLMStreamOptions, Message } from './providers/base.js';
16
16
  import { type CacheStats, type ResponseCacheOptions } from './response-cache.js';
17
17
  import { type SemanticCacheOptions, type SemanticCacheStats } from './semantic-cache.js';
18
- export type LLMProviderType = 'openai' | 'anthropic' | 'vultr' | 'groq' | 'ollama' | 'bitnet' | 'huggingface' | 'inference-snaps';
18
+ export type LLMProviderType = 'vultr' | 'groq' | 'ollama' | 'bitnet' | 'huggingface' | 'inference-snaps';
19
19
  export interface LLMClientConfig {
20
20
  provider: LLMProviderType;
21
21
  apiKey: string;
@@ -116,11 +116,9 @@ export declare class LLMClient {
116
116
  * Create an LLM client from environment variables.
117
117
  *
118
118
  * When LLM_PROVIDER is not set, auto-detects the provider by checking env vars
119
- * in priority order: GROQ_API_KEYOLLAMA_BASE_URLANTHROPIC_API_KEY.
119
+ * in priority order: INFERENCE_SNAPSBITNETGROQ → OLLAMA.
120
120
  *
121
- * GROQ and Ollama are preferred they are free-tier and BYOK-friendly.
122
- * OpenAI is not in the auto-detection chain (no revenue yet — see LLM provider policy).
123
- * To use OpenAI, set LLM_PROVIDER=openai explicitly.
121
+ * All providers use OpenAI-compatible APIs. No proprietary provider SDKs.
124
122
  *
125
123
  * Provider defaults:
126
124
  * groq → llama-3.3-70b-versatile
@@ -1 +1 @@
1
- {"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../../src/llm/client.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAoBH;;;;GAIG;AACH,wBAAgB,qBAAqB,CAAC,GAAG,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAY3F;AAID,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,qBAAqB,CAAC;AAIpD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,sBAAsB,CAAC;AAElE,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,WAAW,EACX,gBAAgB,EAChB,OAAO,EACR,MAAM,qBAAqB,CAAC;AAU7B,OAAO,EAAE,KAAK,UAAU,EAAiB,KAAK,oBAAoB,EAAE,MAAM,qBAAqB,CAAC;AAChG,OAAO,EAEL,KAAK,oBAAoB,EACzB,KAAK,kBAAkB,EACxB,MAAM,qBAAqB,CAAC;AAG7B,MAAM,MAAM,eAAe,GACvB,QAAQ,GACR,WAAW,GACX,OAAO,GACP,MAAM,GACN,QAAQ,GACR,QAAQ,GACR,aAAa,GACb,iBAAiB,CAAC;AAEtB,MAAM,WAAW,eAAe;IAC9B,QAAQ,EAAE,eAAe,CAAC;IAC1B,MAAM,EAAE,MAAM,CAAC;IACf;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,OAAO,CAAC,MAAM,CAAC,CAAC;IACjC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,WAAW,CAAC;IAC5B,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,gBAAgB,CAAC,EAAE,eAAe,CAAC;IACnC,SAAS,CAAC,EAAE;QACV,iBAAiB,CAAC,EAAE,MAAM,CAAC;QAC3B,cAAc,CAAC,EAAE,MAAM,CAAC;KACzB,CAAC;IACF,oFAAoF;IACpF,oBAAoB,CAAC,EAAE,OAAO,CAAC;IAC/B,wEAAwE;IACxE,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAC9B,6BAA6B;IAC7B,oBAAoB,CAAC,EAAE,oBAAoB,CAAC;IAC5C,iEAAiE;IACjE,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAC9B,6BAA6B;IAC7B,oBAAoB,CAAC,EAAE,oBAAoB,CAAC;IAC5C,0EAA0E;IAC1E,aAAa,CAAC,EAAE,qBAAqB,CAAC;CACvC;AAQD,qBAAa,SAAS;IACpB,OAAO,CAAC,QAAQ,CAAc;IAC9B,OAAO,CAAC,gBAAgB,CAAC,CAAc;IACvC,OAAO,CAAC,qBAAqB,CAAC,CAAc;IAC5C,OAAO,CAAC,MAAM,CAAkB;IAChC,OAAO,CAAC,cAAc,CAAiB;IACvC,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,aAAa,CAAC,CAAwB;IAC9C,wFAAwF;IACxF,OAAO,CAAC,aAAa,CAAS;gBAElB,MAAM,EAAE,eAAe;IA8CnC,OAAO,CAAC,cAAc;IAkCtB;;;OAGG;YACW,uBAAuB;IAmBrC,OAAO,CAAC,cAAc;IAoCtB,OAAO,CAAC,aAAa;IAMf,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IAmHzE,KAAK,CACT,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EACvB,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;IA2B5B,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IA0BvF;;;OAGG;IACH,eAAe,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,gBAAgB,EAAE,MAAM,CAAA;KAAE;IAIlF;;OAEG;IACH,gBAAgB,IAAI,qBAAqB,GAAG,SAAS;IAIrD;;;;OAIG;IACH,qBAAqB,IAAI,UAAU,GAAG,SAAS;IAI/C;;OAEG;IACH,kBAAkB,IAAI,IAAI;IAI1B;;;;OAIG;IACH,qBAAqB,IAAI,kBAAkB,GAAG,SAAS;IAIvD;;OAEG;IACH,kBAAkB,IAAI,IAAI;CAG3B;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,sBAAsB,IAAI,SAAS,CAkGlD;AAED;;;;;;;;;;;;GAYG;AACH,wBAAsB,sBAAsB,CAC1C,MAAM,EAAE,MAAM,EACd,EAAE,EAAE,QAAQ,EACZ,UAAU,CAAC,EAAE,UAAU,GACtB,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,CAiD3B"}
1
+ {"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../../src/llm/client.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAoBH;;;;GAIG;AACH,wBAAgB,qBAAqB,CAAC,GAAG,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAY3F;AAID,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,qBAAqB,CAAC;AAIpD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,sBAAsB,CAAC;AAClE,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,WAAW,EACX,gBAAgB,EAChB,OAAO,EACR,MAAM,qBAAqB,CAAC;AAU7B,OAAO,EAAE,KAAK,UAAU,EAAiB,KAAK,oBAAoB,EAAE,MAAM,qBAAqB,CAAC;AAChG,OAAO,EAEL,KAAK,oBAAoB,EACzB,KAAK,kBAAkB,EACxB,MAAM,qBAAqB,CAAC;AAG7B,MAAM,MAAM,eAAe,GACvB,OAAO,GACP,MAAM,GACN,QAAQ,GACR,QAAQ,GACR,aAAa,GACb,iBAAiB,CAAC;AAEtB,MAAM,WAAW,eAAe;IAC9B,QAAQ,EAAE,eAAe,CAAC;IAC1B,MAAM,EAAE,MAAM,CAAC;IACf;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,OAAO,CAAC,MAAM,CAAC,CAAC;IACjC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,WAAW,CAAC;IAC5B,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,gBAAgB,CAAC,EAAE,eAAe,CAAC;IACnC,SAAS,CAAC,EAAE;QACV,iBAAiB,CAAC,EAAE,MAAM,CAAC;QAC3B,cAAc,CAAC,EAAE,MAAM,CAAC;KACzB,CAAC;IACF,oFAAoF;IACpF,oBAAoB,CAAC,EAAE,OAAO,CAAC;IAC/B,wEAAwE;IACxE,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAC9B,6BAA6B;IAC7B,oBAAoB,CAAC,EAAE,oBAAoB,CAAC;IAC5C,iEAAiE;IACjE,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAC9B,6BAA6B;IAC7B,oBAAoB,CAAC,EAAE,oBAAoB,CAAC;IAC5C,0EAA0E;IAC1E,aAAa,CAAC,EAAE,qBAAqB,CAAC;CACvC;AAQD,qBAAa,SAAS;IACpB,OAAO,CAAC,QAAQ,CAAc;IAC9B,OAAO,CAAC,gBAAgB,CAAC,CAAc;IACvC,OAAO,CAAC,qBAAqB,CAAC,CAAc;IAC5C,OAAO,CAAC,MAAM,CAAkB;IAChC,OAAO,CAAC,cAAc,CAAiB;IACvC,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,aAAa,CAAC,CAAwB;IAC9C,wFAAwF;IACxF,OAAO,CAAC,aAAa,CAAS;gBAElB,MAAM,EAAE,eAAe;IA8CnC,OAAO,CAAC,cAAc;IA0BtB;;;OAGG;YACW,uBAAuB;IAmBrC,OAAO,CAAC,cAAc;IAoCtB,OAAO,CAAC,aAAa;IAMf,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IAmHzE,KAAK,CACT,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EACvB,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;IA2B5B,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IA0BvF;;;OAGG;IACH,eAAe,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,gBAAgB,EAAE,MAAM,CAAA;KAAE;IAIlF;;OAEG;IACH,gBAAgB,IAAI,qBAAqB,GAAG,SAAS;IAIrD;;;;OAIG;IACH,qBAAqB,IAAI,UAAU,GAAG,SAAS;IAI/C;;OAEG;IACH,kBAAkB,IAAI,IAAI;IAI1B;;;;OAIG;IACH,qBAAqB,IAAI,kBAAkB,GAAG,SAAS;IAIvD;;OAEG;IACH,kBAAkB,IAAI,IAAI;CAG3B;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,sBAAsB,IAAI,SAAS,CAwFlD;AAED;;;;;;;;;;;;GAYG;AACH,wBAAsB,sBAAsB,CAC1C,MAAM,EAAE,MAAM,EACd,EAAE,EAAE,QAAQ,EACZ,UAAU,CAAC,EAAE,UAAU,GACtB,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,CAiD3B"}
@@ -42,12 +42,10 @@ export function redactSensitiveFields(obj) {
42
42
  import { decryptApiKey } from '@revealui/db/crypto';
43
43
  import { tenantProviderConfigs, userApiKeys } from '@revealui/db/schema';
44
44
  import { and, eq } from 'drizzle-orm';
45
- import { AnthropicProvider } from './providers/anthropic.js';
46
45
  import { BitnetProvider } from './providers/bitnet.js';
47
46
  import { GroqProvider } from './providers/groq.js';
48
47
  import { InferenceSnapsProvider, } from './providers/inference-snaps.js';
49
48
  import { OllamaProvider } from './providers/ollama.js';
50
- import { OpenAIProvider } from './providers/openai.js';
51
49
  import { VultrProvider } from './providers/vultr.js';
52
50
  import { ResponseCache } from './response-cache.js';
53
51
  import { SemanticCache, } from './semantic-cache.js';
@@ -104,13 +102,6 @@ export class LLMClient {
104
102
  }
105
103
  createProvider(type, config) {
106
104
  switch (type) {
107
- case 'openai':
108
- return new OpenAIProvider(config);
109
- case 'anthropic':
110
- return new AnthropicProvider({
111
- ...config,
112
- enableCacheByDefault: this.config.enableCacheByDefault,
113
- });
114
105
  case 'vultr':
115
106
  return new VultrProvider(config);
116
107
  case 'groq':
@@ -372,11 +363,9 @@ export class LLMClient {
372
363
  * Create an LLM client from environment variables.
373
364
  *
374
365
  * When LLM_PROVIDER is not set, auto-detects the provider by checking env vars
375
- * in priority order: GROQ_API_KEYOLLAMA_BASE_URLANTHROPIC_API_KEY.
366
+ * in priority order: INFERENCE_SNAPSBITNETGROQ → OLLAMA.
376
367
  *
377
- * GROQ and Ollama are preferred they are free-tier and BYOK-friendly.
378
- * OpenAI is not in the auto-detection chain (no revenue yet — see LLM provider policy).
379
- * To use OpenAI, set LLM_PROVIDER=openai explicitly.
368
+ * All providers use OpenAI-compatible APIs. No proprietary provider SDKs.
380
369
  *
381
370
  * Provider defaults:
382
371
  * groq → llama-3.3-70b-versatile
@@ -400,29 +389,16 @@ export function createLLMClientFromEnv() {
400
389
  else if (process.env.OLLAMA_BASE_URL) {
401
390
  provider = 'ollama';
402
391
  }
403
- else if (process.env.ANTHROPIC_API_KEY) {
404
- provider = 'anthropic';
405
- }
406
392
  else {
407
- // No provider configured — throw a clear error. OpenAI is intentionally excluded from
408
- // auto-detection (no revenue yet). Set LLM_PROVIDER=openai explicitly if needed.
409
393
  throw new Error('No LLM provider configured. Set one of: BITNET_BASE_URL (local BitNet), ' +
410
394
  'INFERENCE_SNAPS_BASE_URL (local snap), GROQ_API_KEY (recommended cloud), ' +
411
- 'OLLAMA_BASE_URL (local Ollama), or ANTHROPIC_API_KEY. ' +
395
+ 'OLLAMA_BASE_URL (local Ollama). ' +
412
396
  'Alternatively, set LLM_PROVIDER explicitly.');
413
397
  }
414
398
  let apiKey;
415
399
  let baseURL;
416
400
  let defaultModel;
417
- if (provider === 'openai') {
418
- apiKey = process.env.OPENAI_API_KEY;
419
- baseURL = process.env.OPENAI_BASE_URL;
420
- }
421
- else if (provider === 'anthropic') {
422
- apiKey = process.env.ANTHROPIC_API_KEY;
423
- baseURL = process.env.ANTHROPIC_BASE_URL;
424
- }
425
- else if (provider === 'vultr') {
401
+ if (provider === 'vultr') {
426
402
  apiKey = process.env.VULTR_API_KEY;
427
403
  baseURL = process.env.VULTR_BASE_URL;
428
404
  }
@@ -1 +1 @@
1
- {"version":3,"file":"bitnet.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/bitnet.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAEH,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EACR,MAAM,WAAW,CAAC;AAGnB,MAAM,WAAW,oBAAqB,SAAQ,IAAI,CAAC,iBAAiB,EAAE,QAAQ,CAAC;IAC7E,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,2CAA2C;IAC3C,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,oFAAoF;IACpF,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,qBAAa,cAAe,YAAW,WAAW;IAChD,OAAO,CAAC,KAAK,CAAiB;gBAElB,MAAM,EAAE,oBAAoB;IAUxC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IAIzE,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IAIhF,KAAK,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,QAAQ,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;CAO9F"}
1
+ {"version":3,"file":"bitnet.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/bitnet.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAEH,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EACR,MAAM,WAAW,CAAC;AAGnB,MAAM,WAAW,oBAAqB,SAAQ,IAAI,CAAC,iBAAiB,EAAE,QAAQ,CAAC;IAC7E,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,2CAA2C;IAC3C,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,oFAAoF;IACpF,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,qBAAa,cAAe,YAAW,WAAW;IAChD,OAAO,CAAC,KAAK,CAAuB;gBAExB,MAAM,EAAE,oBAAoB;IAUxC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IAIzE,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IAIhF,KAAK,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,QAAQ,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;CAO9F"}
@@ -10,11 +10,11 @@
10
10
  * Note: BitNet is a generative model only. It does not expose /v1/embeddings.
11
11
  * For vector search, use Ollama (nomic-embed-text) or @xenova/transformers.
12
12
  */
13
- import { OpenAIProvider } from './openai.js';
13
+ import { OpenAICompatProvider } from './openai-compat.js';
14
14
  export class BitnetProvider {
15
15
  inner;
16
16
  constructor(config) {
17
- this.inner = new OpenAIProvider({
17
+ this.inner = new OpenAICompatProvider({
18
18
  ...config,
19
19
  // llama-server ignores the API key but the OpenAI client requires a non-empty value
20
20
  apiKey: config.apiKey ?? 'bitnet',
@@ -1 +1 @@
1
- {"version":3,"file":"groq.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/groq.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EACR,MAAM,WAAW,CAAC;AAGnB,MAAM,WAAW,kBAAmB,SAAQ,IAAI,CAAC,iBAAiB,EAAE,QAAQ,CAAC;IAC3E,MAAM,EAAE,MAAM,CAAC;IACf,iDAAiD;IACjD,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,0CAA0C;IAC1C,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,qBAAa,YAAa,YAAW,WAAW;IAC9C,OAAO,CAAC,KAAK,CAAiB;gBAElB,MAAM,EAAE,kBAAkB;IAQtC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IAIzE,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IAIhF,KAAK,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,QAAQ,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;CAG9F"}
1
+ {"version":3,"file":"groq.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/groq.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EACR,MAAM,WAAW,CAAC;AAGnB,MAAM,WAAW,kBAAmB,SAAQ,IAAI,CAAC,iBAAiB,EAAE,QAAQ,CAAC;IAC3E,MAAM,EAAE,MAAM,CAAC;IACf,iDAAiD;IACjD,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,0CAA0C;IAC1C,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,qBAAa,YAAa,YAAW,WAAW;IAC9C,OAAO,CAAC,KAAK,CAAuB;gBAExB,MAAM,EAAE,kBAAkB;IAQtC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IAIzE,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IAIhF,KAAK,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,QAAQ,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;CAG9F"}
@@ -5,11 +5,11 @@
5
5
  * Free tier: 6,000 TPM / 500k TPD for Llama 3.3 70B.
6
6
  * Sign up: console.groq.com
7
7
  */
8
- import { OpenAIProvider } from './openai.js';
8
+ import { OpenAICompatProvider } from './openai-compat.js';
9
9
  export class GroqProvider {
10
10
  inner;
11
11
  constructor(config) {
12
- this.inner = new OpenAIProvider({
12
+ this.inner = new OpenAICompatProvider({
13
13
  ...config,
14
14
  baseURL: config.baseURL ?? 'https://api.groq.com/openai/v1',
15
15
  model: config.model ?? 'llama-3.3-70b-versatile',
@@ -1 +1 @@
1
- {"version":3,"file":"inference-snaps.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/inference-snaps.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AAEH,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EACR,MAAM,WAAW,CAAC;AAGnB,MAAM,WAAW,4BAA6B,SAAQ,IAAI,CAAC,iBAAiB,EAAE,QAAQ,CAAC;IACrF,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,6EAA6E;IAC7E,OAAO,EAAE,MAAM,CAAC;IAChB,6FAA6F;IAC7F,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,qEAAqE;IACrE,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,qBAAa,sBAAuB,YAAW,WAAW;IACxD,OAAO,CAAC,KAAK,CAAiB;IAC9B,OAAO,CAAC,UAAU,CAAS;IAC3B,OAAO,CAAC,OAAO,CAAS;gBAEZ,MAAM,EAAE,4BAA4B;IAahD,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IAIzE,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IAI1E,KAAK,CACT,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EACvB,QAAQ,CAAC,EAAE,eAAe,GACzB,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;CAsBpC"}
1
+ {"version":3,"file":"inference-snaps.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/inference-snaps.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AAEH,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EACR,MAAM,WAAW,CAAC;AAGnB,MAAM,WAAW,4BAA6B,SAAQ,IAAI,CAAC,iBAAiB,EAAE,QAAQ,CAAC;IACrF,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,6EAA6E;IAC7E,OAAO,EAAE,MAAM,CAAC;IAChB,6FAA6F;IAC7F,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,qEAAqE;IACrE,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,qBAAa,sBAAuB,YAAW,WAAW;IACxD,OAAO,CAAC,KAAK,CAAuB;IACpC,OAAO,CAAC,UAAU,CAAS;IAC3B,OAAO,CAAC,OAAO,CAAS;gBAEZ,MAAM,EAAE,4BAA4B;IAahD,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IAIzE,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IAI1E,KAAK,CACT,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EACvB,QAAQ,CAAC,EAAE,eAAe,GACzB,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;CAsBpC"}
@@ -22,7 +22,7 @@
22
22
  *
23
23
  * Docs: https://documentation.ubuntu.com/inference-snaps
24
24
  */
25
- import { OpenAIProvider } from './openai.js';
25
+ import { OpenAICompatProvider } from './openai-compat.js';
26
26
  export class InferenceSnapsProvider {
27
27
  inner;
28
28
  embedModel;
@@ -31,7 +31,7 @@ export class InferenceSnapsProvider {
31
31
  this.baseURL = config.baseURL;
32
32
  // Use the same model for embeddings unless explicitly overridden
33
33
  this.embedModel = config.embedModel ?? config.model ?? 'gemma3';
34
- this.inner = new OpenAIProvider({
34
+ this.inner = new OpenAICompatProvider({
35
35
  ...config,
36
36
  // inference-snaps ignores the API key; OpenAI client requires a non-empty value
37
37
  apiKey: config.apiKey ?? 'inference-snaps',
@@ -1 +1 @@
1
- {"version":3,"file":"ollama.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/ollama.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EACR,MAAM,WAAW,CAAC;AAGnB,MAAM,WAAW,oBAAqB,SAAQ,IAAI,CAAC,iBAAiB,EAAE,QAAQ,CAAC;IAC7E,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,4CAA4C;IAC5C,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,gFAAgF;IAChF,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,+FAA+F;IAC/F,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,qBAAa,cAAe,YAAW,WAAW;IAChD,OAAO,CAAC,KAAK,CAAiB;IAC9B,OAAO,CAAC,UAAU,CAAS;IAC3B,OAAO,CAAC,OAAO,CAAS;gBAEZ,MAAM,EAAE,oBAAoB;IAaxC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IAIzE,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IAI1E,KAAK,CACT,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EACvB,QAAQ,CAAC,EAAE,eAAe,GACzB,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;CAsBpC"}
1
+ {"version":3,"file":"ollama.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/ollama.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EACR,MAAM,WAAW,CAAC;AAGnB,MAAM,WAAW,oBAAqB,SAAQ,IAAI,CAAC,iBAAiB,EAAE,QAAQ,CAAC;IAC7E,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,4CAA4C;IAC5C,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,gFAAgF;IAChF,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,+FAA+F;IAC/F,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,qBAAa,cAAe,YAAW,WAAW;IAChD,OAAO,CAAC,KAAK,CAAuB;IACpC,OAAO,CAAC,UAAU,CAAS;IAC3B,OAAO,CAAC,OAAO,CAAS;gBAEZ,MAAM,EAAE,oBAAoB;IAaxC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IAIzE,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IAI1E,KAAK,CACT,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EACvB,QAAQ,CAAC,EAAE,eAAe,GACzB,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;CAsBpC"}
@@ -5,7 +5,7 @@
5
5
  * No API key required. Zero cost, fully offline.
6
6
  * Install: https://ollama.com
7
7
  */
8
- import { OpenAIProvider } from './openai.js';
8
+ import { OpenAICompatProvider } from './openai-compat.js';
9
9
  export class OllamaProvider {
10
10
  inner;
11
11
  embedModel;
@@ -14,7 +14,7 @@ export class OllamaProvider {
14
14
  const baseURL = config.baseURL ?? 'http://localhost:11434/v1';
15
15
  this.baseURL = baseURL;
16
16
  this.embedModel = config.embedModel ?? 'nomic-embed-text';
17
- this.inner = new OpenAIProvider({
17
+ this.inner = new OpenAICompatProvider({
18
18
  ...config,
19
19
  // Ollama ignores the API key but the OpenAI client requires a non-empty value
20
20
  apiKey: config.apiKey ?? 'ollama',
@@ -1,19 +1,20 @@
1
1
  /**
2
- * OpenAI Provider
2
+ * OpenAI-Compatible Provider
3
3
  *
4
- * Implementation of LLMProvider for OpenAI API
4
+ * Base implementation for any LLM API that follows the OpenAI chat/completions
5
+ * format. Used by: Ollama, Groq, Inference Snaps, BitNet, Vultr.
6
+ * NOT for direct OpenAI usage — RevealUI uses open-source models only.
5
7
  */
6
8
  import type { Embedding, LLMChatOptions, LLMChunk, LLMEmbedOptions, LLMProvider, LLMProviderConfig, LLMResponse, LLMStreamOptions, Message } from './base.js';
7
- export interface OpenAIProviderConfig extends LLMProviderConfig {
8
- organization?: string;
9
+ export interface OpenAICompatConfig extends LLMProviderConfig {
9
10
  }
10
- export declare class OpenAIProvider implements LLMProvider {
11
+ export declare class OpenAICompatProvider implements LLMProvider {
11
12
  private config;
12
13
  private baseURL;
13
- constructor(config: OpenAIProviderConfig);
14
+ constructor(config: OpenAICompatConfig);
14
15
  chat(messages: Message[], options?: LLMChatOptions): Promise<LLMResponse>;
15
16
  embed(text: string | string[], options?: LLMEmbedOptions): Promise<Embedding | Embedding[]>;
16
17
  stream(messages: Message[], options?: LLMStreamOptions): AsyncIterable<LLMChunk>;
17
18
  private formatMessages;
18
19
  }
19
- //# sourceMappingURL=openai.d.ts.map
20
+ //# sourceMappingURL=openai-compat.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"openai-compat.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/openai-compat.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,EACV,SAAS,EAET,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EAER,MAAM,WAAW,CAAC;AAEnB,MAAM,WAAW,kBAAmB,SAAQ,iBAAiB;CAAG;AAqDhE,qBAAa,oBAAqB,YAAW,WAAW;IACtD,OAAO,CAAC,MAAM,CAAqB;IACnC,OAAO,CAAC,OAAO,CAAS;gBAEZ,MAAM,EAAE,kBAAkB;IAUhC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IA6EzE,KAAK,CACT,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EACvB,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;IAyC5B,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IAuFvF,OAAO,CAAC,cAAc;CA4BvB"}
@@ -1,10 +1,11 @@
1
1
  /**
2
- * OpenAI Provider
2
+ * OpenAI-Compatible Provider
3
3
  *
4
- * Implementation of LLMProvider for OpenAI API
4
+ * Base implementation for any LLM API that follows the OpenAI chat/completions
5
+ * format. Used by: Ollama, Groq, Inference Snaps, BitNet, Vultr.
6
+ * NOT for direct OpenAI usage — RevealUI uses open-source models only.
5
7
  */
6
8
  const authorizationHeader = 'Authorization';
7
- const openAiOrganizationHeader = 'OpenAI-Organization';
8
9
  const maxTokensKey = 'max_tokens';
9
10
  const toolChoiceKey = 'tool_choice';
10
11
  const toolCallsKey = 'tool_calls';
@@ -27,12 +28,15 @@ const isFunctionToolCall = (call) => {
27
28
  const fn = asRecord(record.function);
28
29
  return !!fn && typeof fn.name === 'string' && typeof fn.arguments === 'string';
29
30
  };
30
- export class OpenAIProvider {
31
+ export class OpenAICompatProvider {
31
32
  config;
32
33
  baseURL;
33
34
  constructor(config) {
34
35
  this.config = config;
35
- this.baseURL = config.baseURL || 'https://api.openai.com/v1';
36
+ if (!config.baseURL) {
37
+ throw new Error('OpenAICompatProvider requires a baseURL — use a specific provider (InferenceSnapsProvider, BitNetProvider, OllamaProvider, etc.)');
38
+ }
39
+ this.baseURL = config.baseURL;
36
40
  }
37
41
  async chat(messages, options) {
38
42
  const response = await fetch(`${this.baseURL}/chat/completions`, {
@@ -40,12 +44,9 @@ export class OpenAIProvider {
40
44
  headers: {
41
45
  'Content-Type': 'application/json',
42
46
  [authorizationHeader]: `Bearer ${this.config.apiKey}`,
43
- ...(this.config.organization && {
44
- [openAiOrganizationHeader]: this.config.organization,
45
- }),
46
47
  },
47
48
  body: JSON.stringify({
48
- model: this.config.model || 'gpt-4o-mini',
49
+ model: this.config.model || 'default',
49
50
  messages: this.formatMessages(messages),
50
51
  temperature: options?.temperature ?? this.config.temperature ?? 0.7,
51
52
  [maxTokensKey]: options?.maxTokens ?? this.config.maxTokens,
@@ -111,10 +112,8 @@ export class OpenAIProvider {
111
112
  headers: {
112
113
  'Content-Type': 'application/json',
113
114
  [authorizationHeader]: `Bearer ${this.config.apiKey}`,
114
- ...(this.config.organization && {
115
- [openAiOrganizationHeader]: this.config.organization,
116
- }),
117
115
  },
116
+ // lgtm[js/file-access-to-http] — embedding providers must send text to their API by design
118
117
  body: JSON.stringify({
119
118
  model,
120
119
  input: texts,
@@ -146,12 +145,9 @@ export class OpenAIProvider {
146
145
  headers: {
147
146
  'Content-Type': 'application/json',
148
147
  [authorizationHeader]: `Bearer ${this.config.apiKey}`,
149
- ...(this.config.organization && {
150
- [openAiOrganizationHeader]: this.config.organization,
151
- }),
152
148
  },
153
149
  body: JSON.stringify({
154
- model: this.config.model || 'gpt-4o-mini',
150
+ model: this.config.model || 'default',
155
151
  messages: this.formatMessages(messages),
156
152
  temperature: options?.temperature ?? this.config.temperature ?? 0.7,
157
153
  [maxTokensKey]: options?.maxTokens ?? this.config.maxTokens,
@@ -1 +1 @@
1
- {"version":3,"file":"vultr.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/vultr.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EAER,MAAM,WAAW,CAAC;AAEnB,MAAM,WAAW,mBAAoB,SAAQ,iBAAiB;IAC5D,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB;AAgBD,qBAAa,aAAc,YAAW,WAAW;IAC/C,OAAO,CAAC,MAAM,CAAsB;IACpC,OAAO,CAAC,OAAO,CAAS;gBAEZ,MAAM,EAAE,mBAAmB;IAKjC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IA2DzE,KAAK,CACT,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EACvB,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;IAuC5B,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;CAkExF"}
1
+ {"version":3,"file":"vultr.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/vultr.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EAER,MAAM,WAAW,CAAC;AAEnB,MAAM,WAAW,mBAAoB,SAAQ,iBAAiB;IAC5D,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB;AAgBD,qBAAa,aAAc,YAAW,WAAW;IAC/C,OAAO,CAAC,MAAM,CAAsB;IACpC,OAAO,CAAC,OAAO,CAAS;gBAEZ,MAAM,EAAE,mBAAmB;IAKjC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IA2DzE,KAAK,CACT,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EACvB,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;IAwC5B,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;CAkExF"}
@@ -81,6 +81,7 @@ export class VultrProvider {
81
81
  [contentTypeHeader]: 'application/json',
82
82
  [authorizationHeader]: `Bearer ${this.config.apiKey}`,
83
83
  },
84
+ // lgtm[js/file-access-to-http] — embedding providers must send text to their API by design
84
85
  body: JSON.stringify({ model, input: inputs }),
85
86
  });
86
87
  if (!res.ok) {
@@ -5,12 +5,11 @@
5
5
  * Use this in API routes and server-side code.
6
6
  */
7
7
  export * from './client.js';
8
- export * from './providers/anthropic.js';
9
8
  export * from './providers/base.js';
10
9
  export * from './providers/bitnet.js';
11
10
  export * from './providers/groq.js';
12
11
  export * from './providers/inference-snaps.js';
13
12
  export * from './providers/ollama.js';
14
- export * from './providers/openai.js';
13
+ export * from './providers/openai-compat.js';
15
14
  export * from './providers/vultr.js';
16
15
  //# sourceMappingURL=server.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"server.d.ts","sourceRoot":"","sources":["../../src/llm/server.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAGH,cAAc,aAAa,CAAC;AAG5B,cAAc,0BAA0B,CAAC;AACzC,cAAc,qBAAqB,CAAC;AACpC,cAAc,uBAAuB,CAAC;AACtC,cAAc,qBAAqB,CAAC;AACpC,cAAc,gCAAgC,CAAC;AAC/C,cAAc,uBAAuB,CAAC;AACtC,cAAc,uBAAuB,CAAC;AACtC,cAAc,sBAAsB,CAAC"}
1
+ {"version":3,"file":"server.d.ts","sourceRoot":"","sources":["../../src/llm/server.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAGH,cAAc,aAAa,CAAC;AAG5B,cAAc,qBAAqB,CAAC;AACpC,cAAc,uBAAuB,CAAC;AACtC,cAAc,qBAAqB,CAAC;AACpC,cAAc,gCAAgC,CAAC;AAC/C,cAAc,uBAAuB,CAAC;AACtC,cAAc,8BAA8B,CAAC;AAC7C,cAAc,sBAAsB,CAAC"}
@@ -7,11 +7,10 @@
7
7
  // Export LLM client and factory functions
8
8
  export * from './client.js';
9
9
  // Export provider implementations
10
- export * from './providers/anthropic.js';
11
10
  export * from './providers/base.js';
12
11
  export * from './providers/bitnet.js';
13
12
  export * from './providers/groq.js';
14
13
  export * from './providers/inference-snaps.js';
15
14
  export * from './providers/ollama.js';
16
- export * from './providers/openai.js';
15
+ export * from './providers/openai-compat.js';
17
16
  export * from './providers/vultr.js';
@@ -27,13 +27,13 @@ export declare const SkillMetadataSchema: z.ZodObject<{
27
27
  repository: z.ZodOptional<z.ZodString>;
28
28
  tags: z.ZodOptional<z.ZodArray<z.ZodString>>;
29
29
  compatibility: z.ZodOptional<z.ZodArray<z.ZodEnum<{
30
- openai: "openai";
31
- anthropic: "anthropic";
32
30
  "claude-code": "claude-code";
33
31
  cursor: "cursor";
34
32
  windsurf: "windsurf";
35
33
  cline: "cline";
36
34
  copilot: "copilot";
35
+ openai: "openai";
36
+ anthropic: "anthropic";
37
37
  universal: "universal";
38
38
  }>>>;
39
39
  allowedTools: z.ZodOptional<z.ZodArray<z.ZodString>>;
@@ -78,13 +78,13 @@ export declare const SkillSchema: z.ZodObject<{
78
78
  repository: z.ZodOptional<z.ZodString>;
79
79
  tags: z.ZodOptional<z.ZodArray<z.ZodString>>;
80
80
  compatibility: z.ZodOptional<z.ZodArray<z.ZodEnum<{
81
- openai: "openai";
82
- anthropic: "anthropic";
83
81
  "claude-code": "claude-code";
84
82
  cursor: "cursor";
85
83
  windsurf: "windsurf";
86
84
  cline: "cline";
87
85
  copilot: "copilot";
86
+ openai: "openai";
87
+ anthropic: "anthropic";
88
88
  universal: "universal";
89
89
  }>>>;
90
90
  allowedTools: z.ZodOptional<z.ZodArray<z.ZodString>>;
@@ -143,13 +143,13 @@ export declare const SkillActivationResultSchema: z.ZodObject<{
143
143
  repository: z.ZodOptional<z.ZodString>;
144
144
  tags: z.ZodOptional<z.ZodArray<z.ZodString>>;
145
145
  compatibility: z.ZodOptional<z.ZodArray<z.ZodEnum<{
146
- openai: "openai";
147
- anthropic: "anthropic";
148
146
  "claude-code": "claude-code";
149
147
  cursor: "cursor";
150
148
  windsurf: "windsurf";
151
149
  cline: "cline";
152
150
  copilot: "copilot";
151
+ openai: "openai";
152
+ anthropic: "anthropic";
153
153
  universal: "universal";
154
154
  }>>>;
155
155
  allowedTools: z.ZodOptional<z.ZodArray<z.ZodString>>;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@revealui/ai",
3
- "version": "0.2.6",
3
+ "version": "0.2.8",
4
4
  "description": "[Pro] AI system for RevealUI - memory, LLM, orchestration, and tools",
5
5
  "keywords": [
6
6
  "agent",
@@ -21,13 +21,13 @@
21
21
  "url": "https://github.com/RevealUIStudio/revealui.git",
22
22
  "directory": "packages/ai"
23
23
  },
24
- "license": "SEE LICENSE IN ../../LICENSE.commercial",
24
+ "license": "FSL-1.1-MIT",
25
25
  "dependencies": {
26
26
  "lru-cache": "^11.2.5",
27
27
  "zod": "^4.3.5",
28
- "@revealui/contracts": "1.3.4",
29
- "@revealui/core": "0.5.3",
30
- "@revealui/db": "0.3.4"
28
+ "@revealui/contracts": "1.3.6",
29
+ "@revealui/core": "0.5.5",
30
+ "@revealui/db": "0.3.6"
31
31
  },
32
32
  "devDependencies": {
33
33
  "@testing-library/react": "^16.3.2",
@@ -141,7 +141,8 @@
141
141
  }
142
142
  },
143
143
  "files": [
144
- "dist"
144
+ "dist",
145
+ "LICENSE"
145
146
  ],
146
147
  "funding": {
147
148
  "type": "commercial",
@@ -1,111 +0,0 @@
1
- RevealUI Commercial License
2
- Version 1.0, February 2026
3
-
4
- Copyright (c) 2025-2026 RevealUI Studio (founder@revealui.com)
5
-
6
- TERMS AND CONDITIONS
7
-
8
- 1. DEFINITIONS
9
-
10
- "Software" means the RevealUI source code, documentation, and associated
11
- files contained in directories and packages designated as commercial,
12
- including but not limited to: packages/ai, packages/harnesses, and any
13
- directory named "ee" within the repository.
14
-
15
- "License Key" means a valid RevealUI license key obtained through an active
16
- paid subscription at https://revealui.com.
17
-
18
- "Licensee" means the individual or organization that holds a valid License
19
- Key through an active subscription.
20
-
21
- "Production Use" means any use of the Software beyond local development and
22
- evaluation, including but not limited to: deploying the Software to serve
23
- end users, integrating the Software into a product or service, or using the
24
- Software in a revenue-generating capacity.
25
-
26
- 2. GRANT OF RIGHTS
27
-
28
- Subject to the terms of this License and a valid License Key, the Licensee
29
- is granted a non-exclusive, non-transferable, revocable license to:
30
-
31
- (a) Use the Software for internal development and Production Use.
32
- (b) Modify the Software for internal use.
33
- (c) Deploy the Software on infrastructure controlled by the Licensee.
34
-
35
- Enterprise License holders are additionally granted the right to:
36
-
37
- (d) Deploy the Software in a self-hosted environment.
38
- (e) Remove or replace RevealUI branding (white-label).
39
- (f) Use the Software for multiple tenants within the Licensee's
40
- organization or customer base.
41
-
42
- 3. RESTRICTIONS
43
-
44
- The Licensee SHALL NOT:
45
-
46
- (a) Provide the Software, or any portion of it, to third parties as a
47
- hosted or managed service that competes with RevealUI.
48
- (b) Redistribute, sublicense, sell, or otherwise transfer the Software
49
- or any portion of it to third parties.
50
- (c) Remove, alter, or circumvent the license key verification
51
- functionality of the Software.
52
- (d) Use the Software in Production without a valid License Key.
53
- (e) Share, publish, or make the License Key available to unauthorized
54
- parties.
55
-
56
- 4. EVALUATION
57
-
58
- The Software may be used for evaluation and local development purposes
59
- without a License Key. Evaluation use does not grant any rights to
60
- Production Use.
61
-
62
- 5. SUBSCRIPTION AND PAYMENT
63
-
64
- This License is valid only during the term of an active paid subscription.
65
- Upon cancellation or expiration of the subscription:
66
-
67
- (a) The License terminates automatically.
68
- (b) A grace period of fourteen (14) days is provided for the Licensee
69
- to transition away from Production Use.
70
- (c) After the grace period, the Licensee must cease all Production Use
71
- of the Software.
72
-
73
- 6. INTELLECTUAL PROPERTY
74
-
75
- The Software is and remains the intellectual property of RevealUI Studio.
76
- This License does not grant any ownership rights. Contributions to
77
- commercial portions of the Software require a Contributor License Agreement.
78
-
79
- 7. OPEN SOURCE COMPONENTS
80
-
81
- This License applies only to files and directories designated as commercial.
82
- Files under the MIT License (as indicated in the root LICENSE file) are not
83
- subject to this commercial license and may be used freely under MIT terms.
84
-
85
- 8. DISCLAIMER OF WARRANTY
86
-
87
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
88
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
89
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
90
-
91
- 9. LIMITATION OF LIABILITY
92
-
93
- IN NO EVENT SHALL REVEALUI STUDIO BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
94
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
95
- FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
96
- DEALINGS IN THE SOFTWARE, EXCEEDING THE AMOUNT PAID BY THE LICENSEE IN
97
- THE TWELVE (12) MONTHS PRECEDING THE CLAIM.
98
-
99
- 10. GOVERNING LAW
100
-
101
- This License shall be governed by the laws of the State of California,
102
- United States of America, without regard to its conflict of law provisions.
103
-
104
- 11. ENTIRE AGREEMENT
105
-
106
- This License constitutes the entire agreement between the parties with
107
- respect to the commercial portions of the Software and supersedes all
108
- prior agreements, understandings, and communications.
109
-
110
- For licensing inquiries: founder@revealui.com
111
- For pricing and subscriptions: https://revealui.com/pricing
@@ -1,31 +0,0 @@
1
- /**
2
- * Anthropic Provider
3
- *
4
- * Implementation of LLMProvider for Anthropic Claude API
5
- */
6
- import type { Embedding, LLMChatOptions, LLMChunk, LLMEmbedOptions, LLMProvider, LLMProviderConfig, LLMResponse, LLMStreamOptions, Message } from './base.js';
7
- export interface AnthropicProviderConfig extends LLMProviderConfig {
8
- apiVersion?: string;
9
- /** Enable prompt caching by default (5min TTL, 90% cost reduction on cache hits) */
10
- enableCacheByDefault?: boolean;
11
- }
12
- export declare class AnthropicProvider implements LLMProvider {
13
- private config;
14
- private baseURL;
15
- constructor(config: AnthropicProviderConfig);
16
- chat(messages: Message[], options?: LLMChatOptions): Promise<LLMResponse>;
17
- embed(text: string | string[], options?: LLMEmbedOptions): Promise<Embedding | Embedding[]>;
18
- stream(messages: Message[], options?: LLMStreamOptions): AsyncIterable<LLMChunk>;
19
- /**
20
- * Format system messages with optional caching
21
- * Caches the last system message for maximum benefit
22
- */
23
- private formatSystemMessages;
24
- /**
25
- * Format tools with optional caching
26
- * Caches the last tool definition for maximum benefit
27
- */
28
- private formatTools;
29
- private formatMessages;
30
- }
31
- //# sourceMappingURL=anthropic.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/anthropic.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,EAEV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EAER,MAAM,WAAW,CAAC;AAenB,MAAM,WAAW,uBAAwB,SAAQ,iBAAiB;IAChE,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,oFAAoF;IACpF,oBAAoB,CAAC,EAAE,OAAO,CAAC;CAChC;AAqDD,qBAAa,iBAAkB,YAAW,WAAW;IACnD,OAAO,CAAC,MAAM,CAA0B;IACxC,OAAO,CAAC,OAAO,CAAS;gBAEZ,MAAM,EAAE,uBAAuB;IAKrC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IA+F/E,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;IAUpF,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IAoGvF;;;OAGG;IACH,OAAO,CAAC,oBAAoB;IA0B5B;;;OAGG;IACH,OAAO,CAAC,WAAW;IAmBnB,OAAO,CAAC,cAAc;CAkBvB"}
@@ -1,264 +0,0 @@
1
- /**
2
- * Anthropic Provider
3
- *
4
- * Implementation of LLMProvider for Anthropic Claude API
5
- */
6
- /**
7
- * Extract plain text from a message content value.
8
- * Anthropic has its own image format — for now, image parts are silently skipped
9
- * and only text parts are forwarded. Vision via Anthropic is out of scope.
10
- */
11
- function toTextContent(content) {
12
- if (typeof content === 'string')
13
- return content;
14
- return content
15
- .filter((p) => p.type === 'text')
16
- .map((p) => p.text)
17
- .join('\n');
18
- }
19
- const isRecord = (value) => typeof value === 'object' && value !== null && !Array.isArray(value);
20
- const isTextBlock = (block) => block.type === 'text' && typeof block.text === 'string';
21
- const isToolUseBlock = (block) => block.type === 'tool_use';
22
- const maxTokensKey = 'max_tokens';
23
- const inputTokensKey = 'input_tokens';
24
- const outputTokensKey = 'output_tokens';
25
- const stopReasonKey = 'stop_reason';
26
- const cacheCreationTokensKey = 'cache_creation_input_tokens';
27
- const cacheReadTokensKey = 'cache_read_input_tokens';
28
- export class AnthropicProvider {
29
- config;
30
- baseURL;
31
- constructor(config) {
32
- this.config = config;
33
- this.baseURL = config.baseURL || 'https://api.anthropic.com/v1';
34
- }
35
- async chat(messages, options) {
36
- // Anthropic API format is slightly different
37
- const systemMessages = messages.filter((m) => m.role === 'system');
38
- const conversationMessages = messages.filter((m) => m.role !== 'system');
39
- const enableCache = options?.enableCache ?? this.config.enableCacheByDefault ?? false;
40
- // Use 2024-07-15 API version for prompt caching support
41
- const apiVersion = enableCache ? '2024-07-15' : this.config.apiVersion || '2023-06-01';
42
- // Format system messages with caching
43
- const systemContent = this.formatSystemMessages(systemMessages, enableCache);
44
- // Format tools with caching (cache last tool if enabled)
45
- const tools = this.formatTools(options?.tools, enableCache);
46
- const response = await fetch(`${this.baseURL}/messages`, {
47
- method: 'POST',
48
- headers: {
49
- 'Content-Type': 'application/json',
50
- 'x-api-key': this.config.apiKey,
51
- 'anthropic-version': apiVersion,
52
- },
53
- body: JSON.stringify({
54
- model: this.config.model || 'claude-3-5-sonnet-20241022',
55
- system: systemContent,
56
- messages: this.formatMessages(conversationMessages),
57
- temperature: options?.temperature ?? this.config.temperature ?? 0.7,
58
- [maxTokensKey]: options?.maxTokens ?? this.config.maxTokens ?? 4096,
59
- tools,
60
- }),
61
- });
62
- if (!response.ok) {
63
- const errorPayload = (await response.json().catch(() => undefined));
64
- const errorMessage = isRecord(errorPayload) &&
65
- isRecord(errorPayload.error) &&
66
- typeof errorPayload.error.message === 'string'
67
- ? errorPayload.error.message
68
- : response.statusText;
69
- throw new Error(`Anthropic API error: ${errorMessage}`);
70
- }
71
- const data = (await response.json());
72
- const contentBlocks = Array.isArray(data.content)
73
- ? data.content
74
- : [];
75
- const textBlock = contentBlocks.find(isTextBlock);
76
- const toolCalls = contentBlocks.filter(isToolUseBlock).map((tc) => ({
77
- id: tc.id,
78
- type: 'function',
79
- function: {
80
- name: tc.name,
81
- arguments: JSON.stringify(tc.input),
82
- },
83
- }));
84
- const usage = data.usage && typeof data.usage === 'object'
85
- ? data.usage
86
- : undefined;
87
- const inputTokens = usage && typeof usage[inputTokensKey] === 'number' ? usage[inputTokensKey] : undefined;
88
- const outputTokens = usage && typeof usage[outputTokensKey] === 'number' ? usage[outputTokensKey] : undefined;
89
- const cacheCreationTokens = usage && typeof usage[cacheCreationTokensKey] === 'number'
90
- ? usage[cacheCreationTokensKey]
91
- : undefined;
92
- const cacheReadTokens = usage && typeof usage[cacheReadTokensKey] === 'number'
93
- ? usage[cacheReadTokensKey]
94
- : undefined;
95
- const finishReason = typeof data[stopReasonKey] === 'string'
96
- ? data[stopReasonKey]
97
- : undefined;
98
- return {
99
- content: textBlock?.text || '',
100
- role: 'assistant',
101
- toolCalls,
102
- finishReason,
103
- usage: inputTokens !== undefined && outputTokens !== undefined
104
- ? {
105
- promptTokens: inputTokens,
106
- completionTokens: outputTokens,
107
- totalTokens: inputTokens + outputTokens,
108
- cacheCreationTokens,
109
- cacheReadTokens,
110
- }
111
- : undefined,
112
- };
113
- }
114
- embed(text, options) {
115
- void text;
116
- void options;
117
- // Anthropic doesn't have a separate embeddings API
118
- // Would need to use a different provider or service
119
- return Promise.reject(new Error('Anthropic does not support embeddings. Use OpenAI provider for embeddings.'));
120
- }
121
- async *stream(messages, options) {
122
- const systemMessages = messages.filter((m) => m.role === 'system');
123
- const conversationMessages = messages.filter((m) => m.role !== 'system');
124
- const enableCache = options?.enableCache ?? this.config.enableCacheByDefault ?? false;
125
- // Use 2024-07-15 API version for prompt caching support
126
- const apiVersion = enableCache ? '2024-07-15' : this.config.apiVersion || '2023-06-01';
127
- // Format system messages with caching
128
- const systemContent = this.formatSystemMessages(systemMessages, enableCache);
129
- // Format tools with caching
130
- const tools = this.formatTools(options?.tools, enableCache);
131
- const response = await fetch(`${this.baseURL}/messages`, {
132
- method: 'POST',
133
- headers: {
134
- 'Content-Type': 'application/json',
135
- 'x-api-key': this.config.apiKey,
136
- 'anthropic-version': apiVersion,
137
- },
138
- body: JSON.stringify({
139
- model: this.config.model || 'claude-3-5-sonnet-20241022',
140
- system: systemContent,
141
- messages: this.formatMessages(conversationMessages),
142
- temperature: options?.temperature ?? this.config.temperature ?? 0.7,
143
- [maxTokensKey]: options?.maxTokens ?? this.config.maxTokens ?? 4096,
144
- tools,
145
- stream: true,
146
- }),
147
- });
148
- if (!response.ok) {
149
- const errorPayload = (await response.json().catch(() => undefined));
150
- const errorMessage = isRecord(errorPayload) &&
151
- isRecord(errorPayload.error) &&
152
- typeof errorPayload.error.message === 'string'
153
- ? errorPayload.error.message
154
- : response.statusText;
155
- throw new Error(`Anthropic API error: ${errorMessage}`);
156
- }
157
- const reader = response.body?.getReader();
158
- const decoder = new TextDecoder();
159
- if (!reader) {
160
- throw new Error('Response body is not readable');
161
- }
162
- let buffer = '';
163
- while (true) {
164
- const { done, value } = await reader.read();
165
- if (done) {
166
- yield { content: '', done: true };
167
- break;
168
- }
169
- buffer += decoder.decode(value, { stream: true });
170
- const lines = buffer.split('\n');
171
- buffer = lines.pop() || '';
172
- for (const line of lines) {
173
- if (line.startsWith('data: ')) {
174
- const data = line.slice(6);
175
- if (data === '[DONE]') {
176
- yield { content: '', done: true };
177
- return;
178
- }
179
- try {
180
- const parsed = JSON.parse(data);
181
- if (!isRecord(parsed)) {
182
- continue;
183
- }
184
- const eventType = typeof parsed.type === 'string' ? parsed.type : undefined;
185
- if (eventType === 'content_block_delta' && isRecord(parsed.delta)) {
186
- const deltaType = typeof parsed.delta.type === 'string' ? parsed.delta.type : undefined;
187
- if (deltaType === 'text_delta') {
188
- yield {
189
- content: typeof parsed.delta.text === 'string' ? parsed.delta.text : '',
190
- done: false,
191
- };
192
- }
193
- }
194
- else if (eventType === 'message_stop') {
195
- yield { content: '', done: true };
196
- return;
197
- }
198
- }
199
- catch {
200
- // Ignore parse errors for incomplete chunks
201
- }
202
- }
203
- }
204
- }
205
- }
206
- /**
207
- * Format system messages with optional caching
208
- * Caches the last system message for maximum benefit
209
- */
210
- formatSystemMessages(systemMessages, enableCache) {
211
- if (systemMessages.length === 0) {
212
- return '';
213
- }
214
- // If caching disabled, use simple string format
215
- if (!enableCache) {
216
- return systemMessages.map((m) => toTextContent(m.content)).join('\n');
217
- }
218
- // With caching, use structured format and cache the last block
219
- return systemMessages.map((msg, index) => ({
220
- type: 'text',
221
- text: toTextContent(msg.content),
222
- // Cache the last system message (most likely to be reused)
223
- ...(index === systemMessages.length - 1 && msg.cacheControl
224
- ? { cache_control: msg.cacheControl }
225
- : index === systemMessages.length - 1
226
- ? { cache_control: { type: 'ephemeral' } }
227
- : {}),
228
- }));
229
- }
230
- /**
231
- * Format tools with optional caching
232
- * Caches the last tool definition for maximum benefit
233
- */
234
- formatTools(tools, enableCache) {
235
- if (!tools || tools.length === 0) {
236
- return undefined;
237
- }
238
- return tools.map((tool, index) => ({
239
- name: tool.function.name,
240
- description: tool.function.description,
241
- input_schema: tool.function.parameters,
242
- // Cache the last tool (most likely to be reused across calls)
243
- ...(enableCache && index === tools.length - 1
244
- ? { cache_control: { type: 'ephemeral' } }
245
- : {}),
246
- }));
247
- }
248
- formatMessages(messages) {
249
- return messages
250
- .map((msg) => {
251
- if (msg.role === 'system') {
252
- // System messages are handled separately in Anthropic API
253
- return null;
254
- }
255
- const formatted = {
256
- role: msg.role === 'assistant' ? 'assistant' : 'user',
257
- // Anthropic uses a different image format; extract text only for now.
258
- content: toTextContent(msg.content),
259
- };
260
- return formatted;
261
- })
262
- .filter((message) => Boolean(message));
263
- }
264
- }
@@ -1 +0,0 @@
1
- {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/openai.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,EACV,SAAS,EAET,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EAER,MAAM,WAAW,CAAC;AAEnB,MAAM,WAAW,oBAAqB,SAAQ,iBAAiB;IAC7D,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB;AAsDD,qBAAa,cAAe,YAAW,WAAW;IAChD,OAAO,CAAC,MAAM,CAAuB;IACrC,OAAO,CAAC,OAAO,CAAS;gBAEZ,MAAM,EAAE,oBAAoB;IAKlC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IAgFzE,KAAK,CACT,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EACvB,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;IA2C5B,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IA0FvF,OAAO,CAAC,cAAc;CA4BvB"}