graphlit-client 1.0.20250615002 → 1.0.20250615003

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/README.md +152 -14
  2. package/package.json +1 -1
package/README.md CHANGED
@@ -16,16 +16,25 @@ Graphlit is a cloud platform that handles the complex parts of building AI appli
16
16
 
17
17
  ## ✨ What's New in v1.1.0
18
18
 
19
- - **Real-time streaming** - Watch AI responses appear word-by-word
19
+ - **Real-time streaming** - Watch AI responses appear word-by-word across 9 different providers
20
20
  - **Tool calling** - Let AI execute functions and retrieve data
21
- - **Better performance** - Native integration with OpenAI, Anthropic, and Google
21
+ - **Extended provider support** - Native streaming integration with OpenAI, Anthropic, Google, Groq, Cerebras, Cohere, Mistral, AWS Bedrock, and Deepseek
22
+ - **Better performance** - Optimized streaming with provider-specific SDKs
23
+ - **Network resilience** - Automatic retry logic for transient failures
22
24
 
23
25
  ## šŸ“‹ Table of Contents
24
26
 
25
27
  - [Quick Start](#quick-start)
28
+ - [Installation](#installation)
29
+ - [Setting Up](#setting-up)
30
+ - [Network Resilience](#network-resilience-new-in-v111)
31
+ - [Streaming Provider Support](#streaming-provider-support)
26
32
  - [Basic Examples](#basic-examples)
27
33
  - [Common Use Cases](#common-use-cases)
34
+ - [Advanced Agent Features](#advanced-agent-features)
35
+ - [Advanced Workflows](#advanced-workflows)
28
36
  - [API Reference](#api-reference)
37
+ - [Testing & Examples](#testing--examples)
29
38
  - [Support](#support)
30
39
 
31
40
  ## Quick Start
@@ -93,7 +102,7 @@ npm install @google/generative-ai
93
102
  # For Groq streaming (OpenAI-compatible)
94
103
  npm install groq-sdk
95
104
 
96
- # For Cerebras streaming (uses OpenAI SDK with custom base URL)
105
+ # For Cerebras streaming (OpenAI-compatible)
97
106
  npm install openai
98
107
 
99
108
  # For Cohere streaming
@@ -105,7 +114,7 @@ npm install @mistralai/mistralai
105
114
  # For AWS Bedrock streaming (Claude models)
106
115
  npm install @aws-sdk/client-bedrock-runtime
107
116
 
108
- # For Deepseek streaming (uses OpenAI SDK with custom base URL)
117
+ # For Deepseek streaming (OpenAI-compatible)
109
118
  npm install openai
110
119
  ```
111
120
 
@@ -118,22 +127,22 @@ GRAPHLIT_ORGANIZATION_ID=your_org_id
118
127
  GRAPHLIT_ENVIRONMENT_ID=your_env_id
119
128
  GRAPHLIT_JWT_SECRET=your_secret
120
129
 
121
- # Optional: For streaming
130
+ # Optional: For streaming with specific providers
122
131
  OPENAI_API_KEY=your_key
123
132
  ANTHROPIC_API_KEY=your_key
124
133
  GOOGLE_API_KEY=your_key
125
- GROQ_API_KEY=your_key
126
- CEREBRAS_API_KEY=your_key
127
- COHERE_API_KEY=your_key
128
- MISTRAL_API_KEY=your_key
129
134
 
130
- # For AWS Bedrock (requires AWS credentials)
135
+ # Additional streaming providers
136
+ GROQ_API_KEY=your_key # For Groq models (Llama, Mixtral)
137
+ CEREBRAS_API_KEY=your_key # For Cerebras models
138
+ COHERE_API_KEY=your_key # For Cohere Command models
139
+ MISTRAL_API_KEY=your_key # For Mistral models
140
+ DEEPSEEK_API_KEY=your_key # For Deepseek models
141
+
142
+ # For AWS Bedrock streaming (requires AWS credentials)
131
143
  AWS_REGION=us-east-2
132
144
  AWS_ACCESS_KEY_ID=your_key
133
145
  AWS_SECRET_ACCESS_KEY=your_secret
134
-
135
- # For Deepseek streaming
136
- DEEPSEEK_API_KEY=your_key
137
146
  ```
138
147
 
139
148
  ## Network Resilience (New in v1.1.1)
@@ -206,6 +215,69 @@ const client = new Graphlit({
206
215
  });
207
216
  ```
208
217
 
218
+ ## Streaming Provider Support
219
+
220
+ The Graphlit SDK supports real-time streaming responses from 9 different LLM providers. Each provider requires its specific SDK and API key:
221
+
222
+ ### Supported Providers
223
+
224
+ | Provider | Models | SDK Required | API Key |
225
+ |----------|--------|--------------|---------|
226
+ | **OpenAI** | GPT-4, GPT-4o, GPT-4.1, O1, O3, O4 | `openai` | `OPENAI_API_KEY` |
227
+ | **Anthropic** | Claude 3, Claude 3.5, Claude 3.7, Claude 4 | `@anthropic-ai/sdk` | `ANTHROPIC_API_KEY` |
228
+ | **Google** | Gemini 1.5, Gemini 2.0, Gemini 2.5 | `@google/generative-ai` | `GOOGLE_API_KEY` |
229
+ | **Groq** | Llama 4, Llama 3.3, Mixtral, Deepseek R1 | `groq-sdk` | `GROQ_API_KEY` |
230
+ | **Cerebras** | Llama 3.3, Llama 3.1 | `openai` | `CEREBRAS_API_KEY` |
231
+ | **Cohere** | Command R+, Command R, Command R7B, Command A | `cohere-ai` | `COHERE_API_KEY` |
232
+ | **Mistral** | Mistral Large, Medium, Small, Nemo, Pixtral | `@mistralai/mistralai` | `MISTRAL_API_KEY` |
233
+ | **AWS Bedrock** | Nova Premier/Pro, Claude 3.7, Llama 4 | `@aws-sdk/client-bedrock-runtime` | AWS credentials |
234
+ | **Deepseek** | Deepseek Chat, Deepseek Reasoner | `openai` | `DEEPSEEK_API_KEY` |
235
+
236
+ ### Setting Up Streaming
237
+
238
+ Each provider requires both the SDK installation and proper client setup:
239
+
240
+ ```typescript
241
+ import { Graphlit } from "graphlit-client";
242
+
243
+ const client = new Graphlit();
244
+
245
+ // Example: Set up multiple streaming providers
246
+ if (process.env.OPENAI_API_KEY) {
247
+ const { OpenAI } = await import("openai");
248
+ client.setOpenAIClient(new OpenAI());
249
+ }
250
+
251
+ if (process.env.COHERE_API_KEY) {
252
+ const { CohereClient } = await import("cohere-ai");
253
+ client.setCohereClient(new CohereClient({ token: process.env.COHERE_API_KEY }));
254
+ }
255
+
256
+ if (process.env.GROQ_API_KEY) {
257
+ const { Groq } = await import("groq-sdk");
258
+ client.setGroqClient(new Groq({ apiKey: process.env.GROQ_API_KEY }));
259
+ }
260
+
261
+ // Then create specifications for any provider
262
+ const spec = await client.createSpecification({
263
+ name: "Multi-Provider Assistant",
264
+ type: Types.SpecificationTypes.Completion,
265
+ serviceType: Types.ModelServiceTypes.Cohere, // or any supported provider
266
+ cohere: {
267
+ model: Types.CohereModels.CommandRPlus,
268
+ temperature: 0.7
269
+ },
270
+ });
271
+ ```
272
+
273
+ ### Provider-Specific Notes
274
+
275
+ - **OpenAI-Compatible**: Groq, Cerebras, and Deepseek use OpenAI-compatible APIs
276
+ - **AWS Bedrock**: Requires AWS credentials and uses the Converse API for streaming
277
+ - **Cohere**: Supports both chat and tool calling with Command models
278
+ - **Google**: Includes advanced multimodal capabilities with Gemini models
279
+ - **Mistral**: Supports both text and vision models (Pixtral)
280
+
209
281
  ## Basic Examples
210
282
 
211
283
  ### 1. Chat with AI
@@ -323,7 +395,73 @@ const response = await client.promptAgent(
323
395
  console.log(response.message);
324
396
  ```
325
397
 
326
- ### 4. Tool Calling
398
+ ### 4. Multiple Provider Streaming
399
+
400
+ Compare responses from different LLM providers:
401
+
402
+ ```typescript
403
+ import { Graphlit, Types } from "graphlit-client";
404
+
405
+ const client = new Graphlit();
406
+
407
+ // Set up multiple providers
408
+ if (process.env.OPENAI_API_KEY) {
409
+ const { OpenAI } = await import("openai");
410
+ client.setOpenAIClient(new OpenAI());
411
+ }
412
+
413
+ if (process.env.COHERE_API_KEY) {
414
+ const { CohereClient } = await import("cohere-ai");
415
+ client.setCohereClient(new CohereClient({ token: process.env.COHERE_API_KEY }));
416
+ }
417
+
418
+ if (process.env.GROQ_API_KEY) {
419
+ const { Groq } = await import("groq-sdk");
420
+ client.setGroqClient(new Groq({ apiKey: process.env.GROQ_API_KEY }));
421
+ }
422
+
423
+ // Create specifications for different providers
424
+ const providers = [
425
+ {
426
+ name: "OpenAI GPT-4o",
427
+ serviceType: Types.ModelServiceTypes.OpenAi,
428
+ openAI: { model: Types.OpenAiModels.Gpt4O_128K }
429
+ },
430
+ {
431
+ name: "Cohere Command R+",
432
+ serviceType: Types.ModelServiceTypes.Cohere,
433
+ cohere: { model: Types.CohereModels.CommandRPlus }
434
+ },
435
+ {
436
+ name: "Groq Llama",
437
+ serviceType: Types.ModelServiceTypes.Groq,
438
+ groq: { model: Types.GroqModels.Llama_3_3_70B }
439
+ }
440
+ ];
441
+
442
+ // Compare responses
443
+ for (const provider of providers) {
444
+ console.log(`\nšŸ¤– ${provider.name}:`);
445
+
446
+ const spec = await client.createSpecification({
447
+ ...provider,
448
+ type: Types.SpecificationTypes.Completion,
449
+ });
450
+
451
+ await client.streamAgent(
452
+ "Explain quantum computing in simple terms",
453
+ (event) => {
454
+ if (event.type === "message_update") {
455
+ process.stdout.write(event.message.message);
456
+ }
457
+ },
458
+ undefined,
459
+ { id: spec.createSpecification.id }
460
+ );
461
+ }
462
+ ```
463
+
464
+ ### 5. Tool Calling
327
465
 
328
466
  Let AI call functions to get real-time data:
329
467
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "graphlit-client",
3
- "version": "1.0.20250615002",
3
+ "version": "1.0.20250615003",
4
4
  "description": "Graphlit API Client for TypeScript",
5
5
  "type": "module",
6
6
  "main": "./dist/client.js",