@elizaos/plugin-groq 1.0.3 → 2.0.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser/index.browser.js +244 -0
- package/dist/browser/index.browser.js.map +10 -0
- package/dist/browser/index.d.ts +2 -0
- package/dist/cjs/index.d.ts +2 -0
- package/dist/cjs/index.node.cjs +276 -0
- package/dist/cjs/index.node.js.map +10 -0
- package/dist/index.d.ts +2 -5
- package/dist/node/index.d.ts +2 -0
- package/dist/node/index.node.js +244 -0
- package/dist/node/index.node.js.map +10 -0
- package/package.json +68 -27
- package/LICENSE +0 -21
- package/README.md +0 -144
- package/dist/index.js +0 -1041
- package/dist/index.js.map +0 -1
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
// index.ts
|
|
2
|
+
import { createGroq } from "@ai-sdk/groq";
|
|
3
|
+
import { logger, ModelType } from "@elizaos/core";
|
|
4
|
+
import { generateObject, generateText } from "ai";
|
|
5
|
+
var _globalThis = globalThis;
|
|
6
|
+
_globalThis.AI_SDK_LOG_WARNINGS ??= false;
|
|
7
|
+
var DEFAULT_SMALL_MODEL = "llama-3.1-8b-instant";
|
|
8
|
+
var DEFAULT_LARGE_MODEL = "llama-3.3-70b-versatile";
|
|
9
|
+
var DEFAULT_TTS_MODEL = "playai-tts";
|
|
10
|
+
var DEFAULT_TTS_VOICE = "Chip-PlayAI";
|
|
11
|
+
var DEFAULT_TRANSCRIPTION_MODEL = "distil-whisper-large-v3-en";
|
|
12
|
+
var DEFAULT_BASE_URL = "https://api.groq.com/openai/v1";
|
|
13
|
+
function isBrowser() {
|
|
14
|
+
return typeof globalThis !== "undefined" && typeof globalThis.document !== "undefined";
|
|
15
|
+
}
|
|
16
|
+
function getBaseURL(runtime) {
|
|
17
|
+
const url = runtime.getSetting("GROQ_BASE_URL");
|
|
18
|
+
return typeof url === "string" ? url : DEFAULT_BASE_URL;
|
|
19
|
+
}
|
|
20
|
+
function getSmallModel(runtime) {
|
|
21
|
+
const setting = runtime.getSetting("GROQ_SMALL_MODEL") || runtime.getSetting("SMALL_MODEL");
|
|
22
|
+
return typeof setting === "string" ? setting : DEFAULT_SMALL_MODEL;
|
|
23
|
+
}
|
|
24
|
+
function getLargeModel(runtime) {
|
|
25
|
+
const setting = runtime.getSetting("GROQ_LARGE_MODEL") || runtime.getSetting("LARGE_MODEL");
|
|
26
|
+
return typeof setting === "string" ? setting : DEFAULT_LARGE_MODEL;
|
|
27
|
+
}
|
|
28
|
+
function createGroqClient(runtime) {
|
|
29
|
+
const allowBrowserKey = !isBrowser() || String(runtime.getSetting("GROQ_ALLOW_BROWSER_API_KEY") ?? "").toLowerCase() === "true";
|
|
30
|
+
const apiKey = allowBrowserKey ? runtime.getSetting("GROQ_API_KEY") : undefined;
|
|
31
|
+
return createGroq({
|
|
32
|
+
apiKey: typeof apiKey === "string" ? apiKey : undefined,
|
|
33
|
+
fetch: runtime.fetch ?? undefined,
|
|
34
|
+
baseURL: getBaseURL(runtime)
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
function extractRetryDelay(message) {
|
|
38
|
+
const match = message.match(/try again in (\d+\.?\d*)s/i);
|
|
39
|
+
if (match?.[1]) {
|
|
40
|
+
return Math.ceil(Number.parseFloat(match[1]) * 1000) + 1000;
|
|
41
|
+
}
|
|
42
|
+
return 1e4;
|
|
43
|
+
}
|
|
44
|
+
async function generateWithRetry(groq, model, params) {
|
|
45
|
+
const generate = () => generateText({
|
|
46
|
+
model: groq.languageModel(model),
|
|
47
|
+
prompt: params.prompt,
|
|
48
|
+
system: params.system,
|
|
49
|
+
temperature: params.temperature,
|
|
50
|
+
maxRetries: 3,
|
|
51
|
+
frequencyPenalty: params.frequencyPenalty,
|
|
52
|
+
presencePenalty: params.presencePenalty,
|
|
53
|
+
stopSequences: params.stopSequences
|
|
54
|
+
});
|
|
55
|
+
try {
|
|
56
|
+
const { text } = await generate();
|
|
57
|
+
return text;
|
|
58
|
+
} catch (error) {
|
|
59
|
+
if (error instanceof Error && error.message.includes("Rate limit reached")) {
|
|
60
|
+
const delay = extractRetryDelay(error.message);
|
|
61
|
+
logger.warn(`Groq rate limit hit, retrying in ${delay}ms`);
|
|
62
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
63
|
+
const { text } = await generate();
|
|
64
|
+
return text;
|
|
65
|
+
}
|
|
66
|
+
throw error;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
var groqPlugin = {
|
|
70
|
+
name: "groq",
|
|
71
|
+
description: "Groq LLM provider - fast inference with Llama and other models",
|
|
72
|
+
async init(_config, runtime) {
|
|
73
|
+
const apiKey = runtime.getSetting("GROQ_API_KEY");
|
|
74
|
+
if (!apiKey && !isBrowser()) {
|
|
75
|
+
throw new Error("GROQ_API_KEY is required");
|
|
76
|
+
}
|
|
77
|
+
},
|
|
78
|
+
models: {
|
|
79
|
+
[ModelType.TEXT_SMALL]: async (runtime, params) => {
|
|
80
|
+
const groq = createGroqClient(runtime);
|
|
81
|
+
const model = getSmallModel(runtime);
|
|
82
|
+
return generateWithRetry(groq, model, {
|
|
83
|
+
prompt: params.prompt,
|
|
84
|
+
system: runtime.character.system,
|
|
85
|
+
temperature: 0.7,
|
|
86
|
+
maxTokens: 8000,
|
|
87
|
+
frequencyPenalty: 0.7,
|
|
88
|
+
presencePenalty: 0.7,
|
|
89
|
+
stopSequences: params.stopSequences || []
|
|
90
|
+
});
|
|
91
|
+
},
|
|
92
|
+
[ModelType.TEXT_LARGE]: async (runtime, params) => {
|
|
93
|
+
const groq = createGroqClient(runtime);
|
|
94
|
+
const model = getLargeModel(runtime);
|
|
95
|
+
return generateWithRetry(groq, model, {
|
|
96
|
+
prompt: params.prompt,
|
|
97
|
+
system: runtime.character.system,
|
|
98
|
+
temperature: params.temperature ?? 0.7,
|
|
99
|
+
maxTokens: params.maxTokens ?? 8192,
|
|
100
|
+
frequencyPenalty: params.frequencyPenalty ?? 0.7,
|
|
101
|
+
presencePenalty: params.presencePenalty ?? 0.7,
|
|
102
|
+
stopSequences: params.stopSequences || []
|
|
103
|
+
});
|
|
104
|
+
},
|
|
105
|
+
[ModelType.OBJECT_SMALL]: async (runtime, params) => {
|
|
106
|
+
const groq = createGroqClient(runtime);
|
|
107
|
+
const model = getSmallModel(runtime);
|
|
108
|
+
const { object } = await generateObject({
|
|
109
|
+
model: groq.languageModel(model),
|
|
110
|
+
output: "no-schema",
|
|
111
|
+
prompt: params.prompt,
|
|
112
|
+
temperature: params.temperature
|
|
113
|
+
});
|
|
114
|
+
return object;
|
|
115
|
+
},
|
|
116
|
+
[ModelType.OBJECT_LARGE]: async (runtime, params) => {
|
|
117
|
+
const groq = createGroqClient(runtime);
|
|
118
|
+
const model = getLargeModel(runtime);
|
|
119
|
+
const { object } = await generateObject({
|
|
120
|
+
model: groq.languageModel(model),
|
|
121
|
+
output: "no-schema",
|
|
122
|
+
prompt: params.prompt,
|
|
123
|
+
temperature: params.temperature
|
|
124
|
+
});
|
|
125
|
+
return object;
|
|
126
|
+
},
|
|
127
|
+
[ModelType.TRANSCRIPTION]: async (runtime, params) => {
|
|
128
|
+
function hasAudioData(obj) {
|
|
129
|
+
return "audioData" in obj && obj.audioData instanceof Uint8Array;
|
|
130
|
+
}
|
|
131
|
+
if (isBrowser()) {
|
|
132
|
+
throw new Error("Groq TRANSCRIPTION is not supported directly in browsers. Use a server proxy or submit a Blob/ArrayBuffer to a server.");
|
|
133
|
+
}
|
|
134
|
+
const hasBuffer = typeof Buffer !== "undefined" && typeof Buffer.isBuffer === "function";
|
|
135
|
+
const audioBuffer = typeof params === "string" ? Buffer.from(params, "base64") : hasBuffer && Buffer.isBuffer(params) ? params : typeof params === "object" && params !== null && hasAudioData(params) ? Buffer.from(params.audioData) : Buffer.alloc(0);
|
|
136
|
+
const baseURL = getBaseURL(runtime);
|
|
137
|
+
const formData = new FormData;
|
|
138
|
+
formData.append("file", new File([audioBuffer], "audio.mp3", { type: "audio/mp3" }));
|
|
139
|
+
formData.append("model", DEFAULT_TRANSCRIPTION_MODEL);
|
|
140
|
+
const apiKey = runtime.getSetting("GROQ_API_KEY");
|
|
141
|
+
const response = await fetch(`${baseURL}/audio/transcriptions`, {
|
|
142
|
+
method: "POST",
|
|
143
|
+
headers: {
|
|
144
|
+
Authorization: `Bearer ${typeof apiKey === "string" ? apiKey : ""}`
|
|
145
|
+
},
|
|
146
|
+
body: formData
|
|
147
|
+
});
|
|
148
|
+
if (!response.ok) {
|
|
149
|
+
throw new Error(`Transcription failed: ${response.status} ${await response.text()}`);
|
|
150
|
+
}
|
|
151
|
+
const data = await response.json();
|
|
152
|
+
return data.text;
|
|
153
|
+
},
|
|
154
|
+
[ModelType.TEXT_TO_SPEECH]: async (runtime, params) => {
|
|
155
|
+
if (isBrowser()) {
|
|
156
|
+
throw new Error("Groq TEXT_TO_SPEECH is not supported directly in browsers. Use a server proxy.");
|
|
157
|
+
}
|
|
158
|
+
const text = typeof params === "string" ? params : params.text;
|
|
159
|
+
const baseURL = getBaseURL(runtime);
|
|
160
|
+
const modelSetting = runtime.getSetting("GROQ_TTS_MODEL");
|
|
161
|
+
const voiceSetting = runtime.getSetting("GROQ_TTS_VOICE");
|
|
162
|
+
const model = typeof modelSetting === "string" ? modelSetting : DEFAULT_TTS_MODEL;
|
|
163
|
+
const voice = typeof voiceSetting === "string" ? voiceSetting : DEFAULT_TTS_VOICE;
|
|
164
|
+
const apiKey = runtime.getSetting("GROQ_API_KEY");
|
|
165
|
+
const response = await fetch(`${baseURL}/audio/speech`, {
|
|
166
|
+
method: "POST",
|
|
167
|
+
headers: {
|
|
168
|
+
Authorization: `Bearer ${typeof apiKey === "string" ? apiKey : ""}`,
|
|
169
|
+
"Content-Type": "application/json"
|
|
170
|
+
},
|
|
171
|
+
body: JSON.stringify({ model, voice, input: text })
|
|
172
|
+
});
|
|
173
|
+
if (!response.ok) {
|
|
174
|
+
throw new Error(`TTS failed: ${response.status} ${await response.text()}`);
|
|
175
|
+
}
|
|
176
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
177
|
+
return new Uint8Array(arrayBuffer);
|
|
178
|
+
}
|
|
179
|
+
},
|
|
180
|
+
tests: [
|
|
181
|
+
{
|
|
182
|
+
name: "groq_plugin_tests",
|
|
183
|
+
tests: [
|
|
184
|
+
{
|
|
185
|
+
name: "validate_api_key",
|
|
186
|
+
fn: async (runtime) => {
|
|
187
|
+
const baseURL = getBaseURL(runtime);
|
|
188
|
+
const response = await fetch(`${baseURL}/models`, {
|
|
189
|
+
headers: {
|
|
190
|
+
Authorization: `Bearer ${runtime.getSetting("GROQ_API_KEY")}`
|
|
191
|
+
}
|
|
192
|
+
});
|
|
193
|
+
if (!response.ok) {
|
|
194
|
+
throw new Error(`API key validation failed: ${response.statusText}`);
|
|
195
|
+
}
|
|
196
|
+
const data = await response.json();
|
|
197
|
+
logger.info(`Groq API validated, ${data.data.length} models available`);
|
|
198
|
+
}
|
|
199
|
+
},
|
|
200
|
+
{
|
|
201
|
+
name: "text_small",
|
|
202
|
+
fn: async (runtime) => {
|
|
203
|
+
const text = await runtime.useModel(ModelType.TEXT_SMALL, {
|
|
204
|
+
prompt: "Say hello in exactly 3 words."
|
|
205
|
+
});
|
|
206
|
+
if (!text || text.length === 0) {
|
|
207
|
+
throw new Error("Empty response from TEXT_SMALL");
|
|
208
|
+
}
|
|
209
|
+
logger.info("TEXT_SMALL:", text);
|
|
210
|
+
}
|
|
211
|
+
},
|
|
212
|
+
{
|
|
213
|
+
name: "text_large",
|
|
214
|
+
fn: async (runtime) => {
|
|
215
|
+
const text = await runtime.useModel(ModelType.TEXT_LARGE, {
|
|
216
|
+
prompt: "What is 2+2? Answer with just the number."
|
|
217
|
+
});
|
|
218
|
+
if (!text || text.length === 0) {
|
|
219
|
+
throw new Error("Empty response from TEXT_LARGE");
|
|
220
|
+
}
|
|
221
|
+
logger.info("TEXT_LARGE:", text);
|
|
222
|
+
}
|
|
223
|
+
},
|
|
224
|
+
{
|
|
225
|
+
name: "object_generation",
|
|
226
|
+
fn: async (runtime) => {
|
|
227
|
+
const obj = await runtime.useModel(ModelType.OBJECT_SMALL, {
|
|
228
|
+
prompt: 'Return a JSON object with name="test" and value=42',
|
|
229
|
+
temperature: 0.5
|
|
230
|
+
});
|
|
231
|
+
logger.info("OBJECT_SMALL:", JSON.stringify(obj));
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
]
|
|
235
|
+
}
|
|
236
|
+
]
|
|
237
|
+
};
|
|
238
|
+
var typescript_default = groqPlugin;
|
|
239
|
+
export {
|
|
240
|
+
groqPlugin,
|
|
241
|
+
typescript_default as default
|
|
242
|
+
};
|
|
243
|
+
|
|
244
|
+
//# debugId=F144FD046A3DE1B564756E2164756E21
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../index.ts"],
|
|
4
|
+
"sourcesContent": [
|
|
5
|
+
"import { createGroq } from \"@ai-sdk/groq\";\nimport type { IAgentRuntime, ObjectGenerationParams, Plugin } from \"@elizaos/core\";\nimport { type GenerateTextParams, logger, ModelType } from \"@elizaos/core\";\nimport { generateObject, generateText } from \"ai\";\n\nconst _globalThis = globalThis as typeof globalThis & { AI_SDK_LOG_WARNINGS?: boolean };\n_globalThis.AI_SDK_LOG_WARNINGS ??= false;\nconst DEFAULT_SMALL_MODEL = \"llama-3.1-8b-instant\";\nconst DEFAULT_LARGE_MODEL = \"llama-3.3-70b-versatile\";\nconst DEFAULT_TTS_MODEL = \"playai-tts\";\nconst DEFAULT_TTS_VOICE = \"Chip-PlayAI\";\nconst DEFAULT_TRANSCRIPTION_MODEL = \"distil-whisper-large-v3-en\";\nconst DEFAULT_BASE_URL = \"https://api.groq.com/openai/v1\";\n\nfunction isBrowser(): boolean {\n return (\n typeof globalThis !== \"undefined\" &&\n typeof (globalThis as { document?: Document }).document !== \"undefined\"\n );\n}\n\nfunction getBaseURL(runtime: IAgentRuntime): string {\n const url = runtime.getSetting(\"GROQ_BASE_URL\");\n return typeof url === \"string\" ? url : DEFAULT_BASE_URL;\n}\n\nfunction getSmallModel(runtime: IAgentRuntime): string {\n const setting = runtime.getSetting(\"GROQ_SMALL_MODEL\") || runtime.getSetting(\"SMALL_MODEL\");\n return typeof setting === \"string\" ? setting : DEFAULT_SMALL_MODEL;\n}\n\nfunction getLargeModel(runtime: IAgentRuntime): string {\n const setting = runtime.getSetting(\"GROQ_LARGE_MODEL\") || runtime.getSetting(\"LARGE_MODEL\");\n return typeof setting === \"string\" ? setting : DEFAULT_LARGE_MODEL;\n}\n\nfunction createGroqClient(runtime: IAgentRuntime) {\n // In browsers, default to *not* sending secrets.\n // Use a server-side proxy and configure GROQ_BASE_URL (or explicitly opt-in).\n const allowBrowserKey =\n !isBrowser() ||\n String(runtime.getSetting(\"GROQ_ALLOW_BROWSER_API_KEY\") ?? \"\").toLowerCase() === \"true\";\n const apiKey = allowBrowserKey ? runtime.getSetting(\"GROQ_API_KEY\") : undefined;\n return createGroq({\n apiKey: typeof apiKey === \"string\" ? apiKey : undefined,\n fetch: runtime.fetch ?? undefined,\n baseURL: getBaseURL(runtime),\n });\n}\n\nfunction extractRetryDelay(message: string): number {\n const match = message.match(/try again in (\\d+\\.?\\d*)s/i);\n if (match?.[1]) {\n return Math.ceil(Number.parseFloat(match[1]) * 1000) + 1000;\n }\n return 10000;\n}\n\nasync function generateWithRetry(\n groq: ReturnType<typeof createGroq>,\n model: string,\n params: {\n prompt: string;\n system?: string;\n temperature: number;\n maxTokens: number;\n frequencyPenalty: number;\n presencePenalty: number;\n stopSequences: string[];\n }\n): Promise<string> {\n const generate = () =>\n generateText({\n model: groq.languageModel(model),\n prompt: params.prompt,\n system: params.system,\n temperature: params.temperature,\n maxRetries: 3,\n frequencyPenalty: params.frequencyPenalty,\n presencePenalty: params.presencePenalty,\n stopSequences: params.stopSequences,\n });\n\n try {\n const { text } = await generate();\n return text;\n } catch (error) {\n if (error instanceof Error && error.message.includes(\"Rate limit reached\")) {\n const delay = extractRetryDelay(error.message);\n logger.warn(`Groq rate limit hit, retrying in ${delay}ms`);\n await new Promise((resolve) => setTimeout(resolve, delay));\n const { text } = await generate();\n return text;\n }\n throw error;\n }\n}\n\nexport const groqPlugin: Plugin = {\n name: \"groq\",\n description: \"Groq LLM provider - fast inference with Llama and other models\",\n\n async init(_config: Record<string, string>, runtime: IAgentRuntime): Promise<void> {\n const apiKey = runtime.getSetting(\"GROQ_API_KEY\");\n if (!apiKey && !isBrowser()) {\n throw new Error(\"GROQ_API_KEY is required\");\n }\n },\n\n models: {\n [ModelType.TEXT_SMALL]: async (runtime, params: GenerateTextParams) => {\n const groq = createGroqClient(runtime);\n const model = getSmallModel(runtime);\n\n return generateWithRetry(groq, model, {\n prompt: params.prompt,\n system: runtime.character.system,\n temperature: 0.7,\n maxTokens: 8000,\n frequencyPenalty: 0.7,\n presencePenalty: 0.7,\n stopSequences: params.stopSequences || [],\n });\n },\n\n [ModelType.TEXT_LARGE]: async (runtime, params: GenerateTextParams) => {\n const groq = createGroqClient(runtime);\n const model = getLargeModel(runtime);\n\n return generateWithRetry(groq, model, {\n prompt: params.prompt,\n system: runtime.character.system,\n temperature: params.temperature ?? 0.7,\n maxTokens: params.maxTokens ?? 8192,\n frequencyPenalty: params.frequencyPenalty ?? 0.7,\n presencePenalty: params.presencePenalty ?? 0.7,\n stopSequences: params.stopSequences || [],\n });\n },\n\n [ModelType.OBJECT_SMALL]: async (runtime, params: ObjectGenerationParams) => {\n const groq = createGroqClient(runtime);\n const model = getSmallModel(runtime);\n\n const { object } = await generateObject({\n model: groq.languageModel(model),\n output: \"no-schema\",\n prompt: params.prompt,\n temperature: params.temperature,\n });\n return object as Record<\n string,\n string | number | boolean | null | Record<string, string | number | boolean | null>\n >;\n },\n\n [ModelType.OBJECT_LARGE]: async (runtime, params: ObjectGenerationParams) => {\n const groq = createGroqClient(runtime);\n const model = getLargeModel(runtime);\n\n const { object } = await generateObject({\n model: groq.languageModel(model),\n output: \"no-schema\",\n prompt: params.prompt,\n temperature: params.temperature,\n });\n return object as Record<\n string,\n string | number | boolean | null | Record<string, string | number | boolean | null>\n >;\n },\n\n [ModelType.TRANSCRIPTION]: async (runtime, params) => {\n type AudioDataShape = { audioData: Uint8Array };\n\n function hasAudioData(obj: object): obj is AudioDataShape {\n return \"audioData\" in obj && (obj as AudioDataShape).audioData instanceof Uint8Array;\n }\n\n if (isBrowser()) {\n throw new Error(\n \"Groq TRANSCRIPTION is not supported directly in browsers. Use a server proxy or submit a Blob/ArrayBuffer to a server.\"\n );\n }\n\n const hasBuffer =\n typeof Buffer !== \"undefined\" &&\n typeof (Buffer as unknown as { isBuffer: (v: unknown) => boolean }).isBuffer === \"function\";\n\n const audioBuffer: Buffer =\n typeof params === \"string\"\n ? Buffer.from(params, \"base64\")\n : hasBuffer &&\n (Buffer as unknown as { isBuffer: (v: unknown) => boolean }).isBuffer(params)\n ? (params as Buffer)\n : typeof params === \"object\" && params !== null && hasAudioData(params)\n ? Buffer.from((params as AudioDataShape).audioData)\n : Buffer.alloc(0);\n const baseURL = getBaseURL(runtime);\n const formData = new FormData();\n formData.append(\n \"file\",\n new File([audioBuffer as BlobPart], \"audio.mp3\", { type: \"audio/mp3\" })\n );\n formData.append(\"model\", DEFAULT_TRANSCRIPTION_MODEL);\n\n const apiKey = runtime.getSetting(\"GROQ_API_KEY\");\n const response = await fetch(`${baseURL}/audio/transcriptions`, {\n method: \"POST\",\n headers: {\n Authorization: `Bearer ${typeof apiKey === \"string\" ? apiKey : \"\"}`,\n },\n body: formData,\n });\n\n if (!response.ok) {\n throw new Error(`Transcription failed: ${response.status} ${await response.text()}`);\n }\n\n const data = (await response.json()) as { text: string };\n return data.text;\n },\n\n [ModelType.TEXT_TO_SPEECH]: async (runtime: IAgentRuntime, params) => {\n if (isBrowser()) {\n throw new Error(\n \"Groq TEXT_TO_SPEECH is not supported directly in browsers. Use a server proxy.\"\n );\n }\n const text = typeof params === \"string\" ? params : (params as { text: string }).text;\n const baseURL = getBaseURL(runtime);\n const modelSetting = runtime.getSetting(\"GROQ_TTS_MODEL\");\n const voiceSetting = runtime.getSetting(\"GROQ_TTS_VOICE\");\n const model = typeof modelSetting === \"string\" ? modelSetting : DEFAULT_TTS_MODEL;\n const voice = typeof voiceSetting === \"string\" ? voiceSetting : DEFAULT_TTS_VOICE;\n\n const apiKey = runtime.getSetting(\"GROQ_API_KEY\");\n const response = await fetch(`${baseURL}/audio/speech`, {\n method: \"POST\",\n headers: {\n Authorization: `Bearer ${typeof apiKey === \"string\" ? apiKey : \"\"}`,\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({ model, voice, input: text }),\n });\n\n if (!response.ok) {\n throw new Error(`TTS failed: ${response.status} ${await response.text()}`);\n }\n\n const arrayBuffer = await response.arrayBuffer();\n return new Uint8Array(arrayBuffer);\n },\n },\n\n tests: [\n {\n name: \"groq_plugin_tests\",\n tests: [\n {\n name: \"validate_api_key\",\n fn: async (runtime) => {\n const baseURL = getBaseURL(runtime);\n const response = await fetch(`${baseURL}/models`, {\n headers: {\n Authorization: `Bearer ${runtime.getSetting(\"GROQ_API_KEY\")}`,\n },\n });\n if (!response.ok) {\n throw new Error(`API key validation failed: ${response.statusText}`);\n }\n const data = (await response.json()) as {\n data: Array<{ id: string; owned_by: string }>;\n };\n logger.info(`Groq API validated, ${data.data.length} models available`);\n },\n },\n {\n name: \"text_small\",\n fn: async (runtime) => {\n const text = await runtime.useModel(ModelType.TEXT_SMALL, {\n prompt: \"Say hello in exactly 3 words.\",\n });\n if (!text || text.length === 0) {\n throw new Error(\"Empty response from TEXT_SMALL\");\n }\n logger.info(\"TEXT_SMALL:\", text);\n },\n },\n {\n name: \"text_large\",\n fn: async (runtime) => {\n const text = await runtime.useModel(ModelType.TEXT_LARGE, {\n prompt: \"What is 2+2? Answer with just the number.\",\n });\n if (!text || text.length === 0) {\n throw new Error(\"Empty response from TEXT_LARGE\");\n }\n logger.info(\"TEXT_LARGE:\", text);\n },\n },\n {\n name: \"object_generation\",\n fn: async (runtime) => {\n const obj = await runtime.useModel(ModelType.OBJECT_SMALL, {\n prompt: 'Return a JSON object with name=\"test\" and value=42',\n temperature: 0.5,\n });\n logger.info(\"OBJECT_SMALL:\", JSON.stringify(obj));\n },\n },\n ],\n },\n ],\n};\n\nexport default groqPlugin;\n"
|
|
6
|
+
],
|
|
7
|
+
"mappings": ";AAAA;AAEA;AACA;AAEA,IAAM,cAAc;AACpB,YAAY,wBAAwB;AACpC,IAAM,sBAAsB;AAC5B,IAAM,sBAAsB;AAC5B,IAAM,oBAAoB;AAC1B,IAAM,oBAAoB;AAC1B,IAAM,8BAA8B;AACpC,IAAM,mBAAmB;AAEzB,SAAS,SAAS,GAAY;AAAA,EAC5B,OACE,OAAO,eAAe,eACtB,OAAQ,WAAuC,aAAa;AAAA;AAIhE,SAAS,UAAU,CAAC,SAAgC;AAAA,EAClD,MAAM,MAAM,QAAQ,WAAW,eAAe;AAAA,EAC9C,OAAO,OAAO,QAAQ,WAAW,MAAM;AAAA;AAGzC,SAAS,aAAa,CAAC,SAAgC;AAAA,EACrD,MAAM,UAAU,QAAQ,WAAW,kBAAkB,KAAK,QAAQ,WAAW,aAAa;AAAA,EAC1F,OAAO,OAAO,YAAY,WAAW,UAAU;AAAA;AAGjD,SAAS,aAAa,CAAC,SAAgC;AAAA,EACrD,MAAM,UAAU,QAAQ,WAAW,kBAAkB,KAAK,QAAQ,WAAW,aAAa;AAAA,EAC1F,OAAO,OAAO,YAAY,WAAW,UAAU;AAAA;AAGjD,SAAS,gBAAgB,CAAC,SAAwB;AAAA,EAGhD,MAAM,kBACJ,CAAC,UAAU,KACX,OAAO,QAAQ,WAAW,4BAA4B,KAAK,EAAE,EAAE,YAAY,MAAM;AAAA,EACnF,MAAM,SAAS,kBAAkB,QAAQ,WAAW,cAAc,IAAI;AAAA,EACtE,OAAO,WAAW;AAAA,IAChB,QAAQ,OAAO,WAAW,WAAW,SAAS;AAAA,IAC9C,OAAO,QAAQ,SAAS;AAAA,IACxB,SAAS,WAAW,OAAO;AAAA,EAC7B,CAAC;AAAA;AAGH,SAAS,iBAAiB,CAAC,SAAyB;AAAA,EAClD,MAAM,QAAQ,QAAQ,MAAM,4BAA4B;AAAA,EACxD,IAAI,QAAQ,IAAI;AAAA,IACd,OAAO,KAAK,KAAK,OAAO,WAAW,MAAM,EAAE,IAAI,IAAI,IAAI;AAAA,EACzD;AAAA,EACA,OAAO;AAAA;AAGT,eAAe,iBAAiB,CAC9B,MACA,OACA,QASiB;AAAA,EACjB,MAAM,WAAW,MACf,aAAa;AAAA,IACX,OAAO,KAAK,cAAc,KAAK;AAAA,IAC/B,QAAQ,OAAO;AAAA,IACf,QAAQ,OAAO;AAAA,IACf,aAAa,OAAO;AAAA,IACpB,YAAY;AAAA,IACZ,kBAAkB,OAAO;AAAA,IACzB,iBAAiB,OAAO;AAAA,IACxB,eAAe,OAAO;AAAA,EACxB,CAAC;AAAA,EAEH,IAAI;AAAA,IACF,QAAQ,SAAS,MAAM,SAAS;AAAA,IAChC,OAAO;AAAA,IACP,OAAO,OAAO;AAAA,IACd,IAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,oBAAoB,GAAG;AAAA,MAC1E,MAAM,QAAQ,kBAAkB,MAAM,OAAO;AAAA,MAC7C,OAAO,KAAK,oCAAoC,SAAS;AAAA,MACzD,MAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,KAAK,CAAC;AAAA,MACzD,QAAQ,SAAS,MAAM,SAAS;AAAA,MAChC,OAAO;AAAA,IACT;AAAA,IACA,MAAM;AAAA;AAAA;AAIH,IAAM,aAAqB;AAAA,EAChC,MAAM;AAAA,EACN,aAAa;AAAA,OAEP,KAAI,CAAC,SAAiC,SAAuC;AAAA,IACjF,MAAM,SAAS,QAAQ,WAAW,cAAc;AAAA,IAChD,IAAI,CAAC,UAAU,CAAC,UAAU,GAAG;AAAA,MAC3B,MAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAAA;AAAA,EAGF,QAAQ;AAAA,KACL,UAAU,aAAa,OAAO,SAAS,WAA+B;AAAA,MACrE,MAAM,OAAO,iBAAiB,OAAO;AAAA,MACrC,MAAM,QAAQ,cAAc,OAAO;AAAA,MAEnC,OAAO,kBAAkB,MAAM,OAAO;AAAA,QACpC,QAAQ,OAAO;AAAA,QACf,QAAQ,QAAQ,UAAU;AAAA,QAC1B,aAAa;AAAA,QACb,WAAW;AAAA,QACX,kBAAkB;AAAA,QAClB,iBAAiB;AAAA,QACjB,eAAe,OAAO,iBAAiB,CAAC;AAAA,MAC1C,CAAC;AAAA;AAAA,KAGF,UAAU,aAAa,OAAO,SAAS,WAA+B;AAAA,MACrE,MAAM,OAAO,iBAAiB,OAAO;AAAA,MACrC,MAAM,QAAQ,cAAc,OAAO;AAAA,MAEnC,OAAO,kBAAkB,MAAM,OAAO;AAAA,QACpC,QAAQ,OAAO;AAAA,QACf,QAAQ,QAAQ,UAAU;AAAA,QAC1B,aAAa,OAAO,eAAe;AAAA,QACnC,WAAW,OAAO,aAAa;AAAA,QAC/B,kBAAkB,OAAO,oBAAoB;AAAA,QAC7C,iBAAiB,OAAO,mBAAmB;AAAA,QAC3C,eAAe,OAAO,iBAAiB,CAAC;AAAA,MAC1C,CAAC;AAAA;AAAA,KAGF,UAAU,eAAe,OAAO,SAAS,WAAmC;AAAA,MAC3E,MAAM,OAAO,iBAAiB,OAAO;AAAA,MACrC,MAAM,QAAQ,cAAc,OAAO;AAAA,MAEnC,QAAQ,WAAW,MAAM,eAAe;AAAA,QACtC,OAAO,KAAK,cAAc,KAAK;AAAA,QAC/B,QAAQ;AAAA,QACR,QAAQ,OAAO;AAAA,QACf,aAAa,OAAO;AAAA,MACtB,CAAC;AAAA,MACD,OAAO;AAAA;AAAA,KAMR,UAAU,eAAe,OAAO,SAAS,WAAmC;AAAA,MAC3E,MAAM,OAAO,iBAAiB,OAAO;AAAA,MACrC,MAAM,QAAQ,cAAc,OAAO;AAAA,MAEnC,QAAQ,WAAW,MAAM,eAAe;AAAA,QACtC,OAAO,KAAK,cAAc,KAAK;AAAA,QAC/B,QAAQ;AAAA,QACR,QAAQ,OAAO;AAAA,QACf,aAAa,OAAO;AAAA,MACtB,CAAC;AAAA,MACD,OAAO;AAAA;AAAA,KAMR,UAAU,gBAAgB,OAAO,SAAS,WAAW;AAAA,MAGpD,SAAS,YAAY,CAAC,KAAoC;AAAA,QACxD,OAAO,eAAe,OAAQ,IAAuB,qBAAqB;AAAA;AAAA,MAG5E,IAAI,UAAU,GAAG;AAAA,QACf,MAAM,IAAI,MACR,wHACF;AAAA,MACF;AAAA,MAEA,MAAM,YACJ,OAAO,WAAW,eAClB,OAAQ,OAA4D,aAAa;AAAA,MAEnF,MAAM,cACJ,OAAO,WAAW,WACd,OAAO,KAAK,QAAQ,QAAQ,IAC5B,aACG,OAA4D,SAAS,MAAM,IAC3E,SACD,OAAO,WAAW,YAAY,WAAW,QAAQ,aAAa,MAAM,IAClE,OAAO,KAAM,OAA0B,SAAS,IAChD,OAAO,MAAM,CAAC;AAAA,MACxB,MAAM,UAAU,WAAW,OAAO;AAAA,MAClC,MAAM,WAAW,IAAI;AAAA,MACrB,SAAS,OACP,QACA,IAAI,KAAK,CAAC,WAAuB,GAAG,aAAa,EAAE,MAAM,YAAY,CAAC,CACxE;AAAA,MACA,SAAS,OAAO,SAAS,2BAA2B;AAAA,MAEpD,MAAM,SAAS,QAAQ,WAAW,cAAc;AAAA,MAChD,MAAM,WAAW,MAAM,MAAM,GAAG,gCAAgC;AAAA,QAC9D,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,eAAe,UAAU,OAAO,WAAW,WAAW,SAAS;AAAA,QACjE;AAAA,QACA,MAAM;AAAA,MACR,CAAC;AAAA,MAED,IAAI,CAAC,SAAS,IAAI;AAAA,QAChB,MAAM,IAAI,MAAM,yBAAyB,SAAS,UAAU,MAAM,SAAS,KAAK,GAAG;AAAA,MACrF;AAAA,MAEA,MAAM,OAAQ,MAAM,SAAS,KAAK;AAAA,MAClC,OAAO,KAAK;AAAA;AAAA,KAGb,UAAU,iBAAiB,OAAO,SAAwB,WAAW;AAAA,MACpE,IAAI,UAAU,GAAG;AAAA,QACf,MAAM,IAAI,MACR,gFACF;AAAA,MACF;AAAA,MACA,MAAM,OAAO,OAAO,WAAW,WAAW,SAAU,OAA4B;AAAA,MAChF,MAAM,UAAU,WAAW,OAAO;AAAA,MAClC,MAAM,eAAe,QAAQ,WAAW,gBAAgB;AAAA,MACxD,MAAM,eAAe,QAAQ,WAAW,gBAAgB;AAAA,MACxD,MAAM,QAAQ,OAAO,iBAAiB,WAAW,eAAe;AAAA,MAChE,MAAM,QAAQ,OAAO,iBAAiB,WAAW,eAAe;AAAA,MAEhE,MAAM,SAAS,QAAQ,WAAW,cAAc;AAAA,MAChD,MAAM,WAAW,MAAM,MAAM,GAAG,wBAAwB;AAAA,QACtD,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,eAAe,UAAU,OAAO,WAAW,WAAW,SAAS;AAAA,UAC/D,gBAAgB;AAAA,QAClB;AAAA,QACA,MAAM,KAAK,UAAU,EAAE,OAAO,OAAO,OAAO,KAAK,CAAC;AAAA,MACpD,CAAC;AAAA,MAED,IAAI,CAAC,SAAS,IAAI;AAAA,QAChB,MAAM,IAAI,MAAM,eAAe,SAAS,UAAU,MAAM,SAAS,KAAK,GAAG;AAAA,MAC3E;AAAA,MAEA,MAAM,cAAc,MAAM,SAAS,YAAY;AAAA,MAC/C,OAAO,IAAI,WAAW,WAAW;AAAA;AAAA,EAErC;AAAA,EAEA,OAAO;AAAA,IACL;AAAA,MACE,MAAM;AAAA,MACN,OAAO;AAAA,QACL;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AAAA,YACrB,MAAM,UAAU,WAAW,OAAO;AAAA,YAClC,MAAM,WAAW,MAAM,MAAM,GAAG,kBAAkB;AAAA,cAChD,SAAS;AAAA,gBACP,eAAe,UAAU,QAAQ,WAAW,cAAc;AAAA,cAC5D;AAAA,YACF,CAAC;AAAA,YACD,IAAI,CAAC,SAAS,IAAI;AAAA,cAChB,MAAM,IAAI,MAAM,8BAA8B,SAAS,YAAY;AAAA,YACrE;AAAA,YACA,MAAM,OAAQ,MAAM,SAAS,KAAK;AAAA,YAGlC,OAAO,KAAK,uBAAuB,KAAK,KAAK,yBAAyB;AAAA;AAAA,QAE1E;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AAAA,YACrB,MAAM,OAAO,MAAM,QAAQ,SAAS,UAAU,YAAY;AAAA,cACxD,QAAQ;AAAA,YACV,CAAC;AAAA,YACD,IAAI,CAAC,QAAQ,KAAK,WAAW,GAAG;AAAA,cAC9B,MAAM,IAAI,MAAM,gCAAgC;AAAA,YAClD;AAAA,YACA,OAAO,KAAK,eAAe,IAAI;AAAA;AAAA,QAEnC;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AAAA,YACrB,MAAM,OAAO,MAAM,QAAQ,SAAS,UAAU,YAAY;AAAA,cACxD,QAAQ;AAAA,YACV,CAAC;AAAA,YACD,IAAI,CAAC,QAAQ,KAAK,WAAW,GAAG;AAAA,cAC9B,MAAM,IAAI,MAAM,gCAAgC;AAAA,YAClD;AAAA,YACA,OAAO,KAAK,eAAe,IAAI;AAAA;AAAA,QAEnC;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AAAA,YACrB,MAAM,MAAM,MAAM,QAAQ,SAAS,UAAU,cAAc;AAAA,cACzD,QAAQ;AAAA,cACR,aAAa;AAAA,YACf,CAAC;AAAA,YACD,OAAO,KAAK,iBAAiB,KAAK,UAAU,GAAG,CAAC;AAAA;AAAA,QAEpD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAe;",
|
|
8
|
+
"debugId": "F144FD046A3DE1B564756E2164756E21",
|
|
9
|
+
"names": []
|
|
10
|
+
}
|
package/package.json
CHANGED
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@elizaos/plugin-groq",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "2.0.0-alpha.1",
|
|
4
4
|
"type": "module",
|
|
5
|
-
"main": "dist/index.
|
|
6
|
-
"module": "dist/index.js",
|
|
7
|
-
"types": "dist/index.d.ts",
|
|
5
|
+
"main": "dist/cjs/index.node.cjs",
|
|
6
|
+
"module": "dist/node/index.node.js",
|
|
7
|
+
"types": "dist/node/index.d.ts",
|
|
8
|
+
"browser": "dist/browser/index.browser.js",
|
|
8
9
|
"repository": {
|
|
9
10
|
"type": "git",
|
|
10
11
|
"url": "git+https://github.com/elizaos-plugins/plugin-groq.git"
|
|
@@ -12,31 +13,46 @@
|
|
|
12
13
|
"exports": {
|
|
13
14
|
"./package.json": "./package.json",
|
|
14
15
|
".": {
|
|
15
|
-
"
|
|
16
|
-
|
|
17
|
-
"
|
|
18
|
-
|
|
16
|
+
"types": "./dist/node/index.d.ts",
|
|
17
|
+
"browser": {
|
|
18
|
+
"types": "./dist/browser/index.d.ts",
|
|
19
|
+
"import": "./dist/browser/index.browser.js",
|
|
20
|
+
"default": "./dist/browser/index.browser.js"
|
|
21
|
+
},
|
|
22
|
+
"node": {
|
|
23
|
+
"types": "./dist/node/index.d.ts",
|
|
24
|
+
"import": "./dist/node/index.node.js",
|
|
25
|
+
"require": "./dist/cjs/index.node.cjs",
|
|
26
|
+
"default": "./dist/node/index.node.js"
|
|
27
|
+
},
|
|
28
|
+
"default": "./dist/node/index.node.js"
|
|
19
29
|
}
|
|
20
30
|
},
|
|
31
|
+
"sideEffects": false,
|
|
21
32
|
"files": [
|
|
22
33
|
"dist"
|
|
23
34
|
],
|
|
24
35
|
"dependencies": {
|
|
25
|
-
"@ai-sdk/groq": "^
|
|
26
|
-
"@
|
|
27
|
-
"
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
"
|
|
36
|
+
"@ai-sdk/groq": "^3.0.4",
|
|
37
|
+
"@elizaos/core": "workspace:*",
|
|
38
|
+
"ai": "^6.0.0"
|
|
39
|
+
},
|
|
40
|
+
"devDependencies": {
|
|
41
|
+
"@types/node": "^25.0.3",
|
|
42
|
+
"bun-types": "^1.2.0",
|
|
43
|
+
"typescript": "^5.9.3",
|
|
44
|
+
"@biomejs/biome": "^2.3.11"
|
|
31
45
|
},
|
|
32
46
|
"scripts": {
|
|
33
|
-
"
|
|
34
|
-
"
|
|
35
|
-
"
|
|
36
|
-
"
|
|
37
|
-
"
|
|
38
|
-
"
|
|
39
|
-
"
|
|
47
|
+
"dev": "bun run build.ts --watch",
|
|
48
|
+
"typecheck": "tsc --noEmit -p tsconfig.json",
|
|
49
|
+
"clean": "rm -rf dist node_modules",
|
|
50
|
+
"format": "bunx @biomejs/biome format --write .",
|
|
51
|
+
"test": "vitest run",
|
|
52
|
+
"lint": "bunx @biomejs/biome check --write --unsafe .",
|
|
53
|
+
"lint:check": "bunx @biomejs/biome check .",
|
|
54
|
+
"build": "bun run build.ts",
|
|
55
|
+
"build:ts": "bun run build.ts"
|
|
40
56
|
},
|
|
41
57
|
"publishConfig": {
|
|
42
58
|
"access": "public"
|
|
@@ -46,15 +62,40 @@
|
|
|
46
62
|
"pluginParameters": {
|
|
47
63
|
"GROQ_API_KEY": {
|
|
48
64
|
"type": "string",
|
|
49
|
-
"description": "Groq API key
|
|
65
|
+
"description": "Groq API key",
|
|
50
66
|
"required": true,
|
|
51
67
|
"sensitive": true
|
|
68
|
+
},
|
|
69
|
+
"GROQ_BASE_URL": {
|
|
70
|
+
"type": "string",
|
|
71
|
+
"description": "Custom API base URL",
|
|
72
|
+
"required": false,
|
|
73
|
+
"default": "https://api.groq.com/openai/v1"
|
|
74
|
+
},
|
|
75
|
+
"GROQ_SMALL_MODEL": {
|
|
76
|
+
"type": "string",
|
|
77
|
+
"description": "Small model name",
|
|
78
|
+
"required": false,
|
|
79
|
+
"default": "llama-3.1-8b-instant"
|
|
80
|
+
},
|
|
81
|
+
"GROQ_LARGE_MODEL": {
|
|
82
|
+
"type": "string",
|
|
83
|
+
"description": "Large model name",
|
|
84
|
+
"required": false,
|
|
85
|
+
"default": "llama-3.3-70b-versatile"
|
|
86
|
+
},
|
|
87
|
+
"GROQ_TTS_MODEL": {
|
|
88
|
+
"type": "string",
|
|
89
|
+
"description": "TTS model name",
|
|
90
|
+
"required": false,
|
|
91
|
+
"default": "playai-tts"
|
|
92
|
+
},
|
|
93
|
+
"GROQ_TTS_VOICE": {
|
|
94
|
+
"type": "string",
|
|
95
|
+
"description": "TTS voice name",
|
|
96
|
+
"required": false,
|
|
97
|
+
"default": "Chip-PlayAI"
|
|
52
98
|
}
|
|
53
99
|
}
|
|
54
|
-
},
|
|
55
|
-
"gitHead": "646c632924826e2b75c2304a75ee56959fe4a460",
|
|
56
|
-
"devDependencies": {
|
|
57
|
-
"prettier": "3.5.3",
|
|
58
|
-
"typescript": "^5.8.2"
|
|
59
100
|
}
|
|
60
101
|
}
|
package/LICENSE
DELETED
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
MIT License
|
|
2
|
-
|
|
3
|
-
Copyright (c) 2025 Shaw Walters and elizaOS Contributors
|
|
4
|
-
|
|
5
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
-
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
-
in the Software without restriction, including without limitation the rights
|
|
8
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
-
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
-
furnished to do so, subject to the following conditions:
|
|
11
|
-
|
|
12
|
-
The above copyright notice and this permission notice shall be included in all
|
|
13
|
-
copies or substantial portions of the Software.
|
|
14
|
-
|
|
15
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
-
SOFTWARE.
|
package/README.md
DELETED
|
@@ -1,144 +0,0 @@
|
|
|
1
|
-
# Groq Plugin
|
|
2
|
-
|
|
3
|
-
This plugin provides integration with Groq Cloud through the ElizaOS platform.
|
|
4
|
-
|
|
5
|
-
## Usage
|
|
6
|
-
|
|
7
|
-
Add the plugin to your character configuration:
|
|
8
|
-
|
|
9
|
-
```json
|
|
10
|
-
"plugins": ["@elizaos-plugins/plugin-groq"]
|
|
11
|
-
```
|
|
12
|
-
|
|
13
|
-
## Configuration
|
|
14
|
-
|
|
15
|
-
The plugin requires these environment variables (can be set in .env file or character settings):
|
|
16
|
-
|
|
17
|
-
```json
|
|
18
|
-
"settings": {
|
|
19
|
-
"GROQ_API_KEY": "your_groq_api_key",
|
|
20
|
-
"GROQ_BASE_URL": "optional_custom_endpoint",
|
|
21
|
-
"GROQ_SMALL_MODEL": "llama-3.1-8b-instant",
|
|
22
|
-
"GROQ_LARGE_MODEL": "qwen-qwq-32b"
|
|
23
|
-
}
|
|
24
|
-
```
|
|
25
|
-
|
|
26
|
-
Or in `.env` file:
|
|
27
|
-
|
|
28
|
-
```
|
|
29
|
-
GROQ_API_KEY=your_groq_api_key
|
|
30
|
-
# Optional overrides:
|
|
31
|
-
GROQ_BASE_URL=optional_custom_endpoint
|
|
32
|
-
GROQ_SMALL_MODEL=llama-3.1-8b-instant
|
|
33
|
-
GROQ_LARGE_MODEL=qwen-qwq-32b
|
|
34
|
-
```
|
|
35
|
-
|
|
36
|
-
### Configuration Options
|
|
37
|
-
|
|
38
|
-
- `GROQ_API_KEY` (required): Your Groq API credentials.
|
|
39
|
-
- `GROQ_BASE_URL`: Custom API endpoint (default: https://api.groq.com/openai/v1).
|
|
40
|
-
- `GROQ_SMALL_MODEL`: Defaults to Llama 3.1 8B Instant ("llama-3.1-8b-instant").
|
|
41
|
-
- `GROQ_LARGE_MODEL`: Defaults to Qwen QWQ 32B ("qwen-qwq-32b").
|
|
42
|
-
|
|
43
|
-
The plugin provides these model classes:
|
|
44
|
-
|
|
45
|
-
- `TEXT_SMALL`: Optimized for fast, cost-effective responses (uses `GROQ_SMALL_MODEL`).
|
|
46
|
-
- `TEXT_LARGE`: For complex tasks requiring deeper reasoning (uses `GROQ_LARGE_MODEL`).
|
|
47
|
-
- `IMAGE`: Image generation.
|
|
48
|
-
- `TRANSCRIPTION`: Whisper audio transcription.
|
|
49
|
-
- `TEXT_TOKENIZER_ENCODE`: Text tokenization.
|
|
50
|
-
- `TEXT_TOKENIZER_DECODE`: Token decoding.
|
|
51
|
-
- `OBJECT_SMALL`: For generating structured JSON objects with the small model.
|
|
52
|
-
- `OBJECT_LARGE`: For generating structured JSON objects with the large model.
|
|
53
|
-
|
|
54
|
-
## Additional Features
|
|
55
|
-
|
|
56
|
-
### Text Generation (Small Model)
|
|
57
|
-
|
|
58
|
-
```javascript
|
|
59
|
-
const response = await runtime.useModel(ModelType.TEXT_SMALL, {
|
|
60
|
-
prompt: 'Explain quantum computing in simple terms.',
|
|
61
|
-
// Optional parameters:
|
|
62
|
-
// stopSequences: ["stop phrase"],
|
|
63
|
-
// maxTokens: 200,
|
|
64
|
-
// temperature: 0.7,
|
|
65
|
-
// frequencyPenalty: 0.7,
|
|
66
|
-
// presencePenalty: 0.7,
|
|
67
|
-
});
|
|
68
|
-
console.log(response);
|
|
69
|
-
```
|
|
70
|
-
|
|
71
|
-
### Text Generation (Large Model)
|
|
72
|
-
|
|
73
|
-
```javascript
|
|
74
|
-
const response = await runtime.useModel(ModelType.TEXT_LARGE, {
|
|
75
|
-
prompt: 'Write a comprehensive guide on sustainable gardening.',
|
|
76
|
-
// Optional parameters:
|
|
77
|
-
// stopSequences: ["stop phrase"],
|
|
78
|
-
// maxTokens: 1000,
|
|
79
|
-
// temperature: 0.8,
|
|
80
|
-
// frequencyPenalty: 0.5,
|
|
81
|
-
// presencePenalty: 0.5,
|
|
82
|
-
});
|
|
83
|
-
console.log(response);
|
|
84
|
-
```
|
|
85
|
-
|
|
86
|
-
### Image Generation
|
|
87
|
-
|
|
88
|
-
```javascript
|
|
89
|
-
const images = await runtime.useModel(ModelType.IMAGE, {
|
|
90
|
-
prompt: 'A futuristic cityscape at sunset',
|
|
91
|
-
n: 1, // number of images
|
|
92
|
-
size: '1024x1024', // image resolution
|
|
93
|
-
});
|
|
94
|
-
console.log(images[0].url); // Example: Accessing the URL of the first image
|
|
95
|
-
```
|
|
96
|
-
|
|
97
|
-
### Audio Transcription
|
|
98
|
-
|
|
99
|
-
```javascript
|
|
100
|
-
// Assuming 'audioBuffer' is a Buffer containing the audio data (e.g., from a file)
|
|
101
|
-
const transcription = await runtime.useModel(ModelType.TRANSCRIPTION, audioBuffer);
|
|
102
|
-
console.log(transcription);
|
|
103
|
-
```
|
|
104
|
-
|
|
105
|
-
### Text Tokenization (Encode)
|
|
106
|
-
|
|
107
|
-
```javascript
|
|
108
|
-
const tokens = await runtime.useModel(ModelType.TEXT_TOKENIZER_ENCODE, {
|
|
109
|
-
prompt: 'Hello, world!',
|
|
110
|
-
modelType: ModelType.TEXT_SMALL, // Or ModelType.TEXT_LARGE
|
|
111
|
-
});
|
|
112
|
-
console.log(tokens);
|
|
113
|
-
```
|
|
114
|
-
|
|
115
|
-
### Text Tokenization (Decode)
|
|
116
|
-
|
|
117
|
-
```javascript
|
|
118
|
-
// Assuming 'tokens' is an array of numbers obtained from encoding
|
|
119
|
-
const text = await runtime.useModel(ModelType.TEXT_TOKENIZER_DECODE, {
|
|
120
|
-
tokens: [15339, 29871, 29991], // Example tokens for "Hello, world!" with some models
|
|
121
|
-
modelType: ModelType.TEXT_SMALL, // Or ModelType.TEXT_LARGE
|
|
122
|
-
});
|
|
123
|
-
console.log(text);
|
|
124
|
-
```
|
|
125
|
-
|
|
126
|
-
### Object Generation (Small Model)
|
|
127
|
-
|
|
128
|
-
```javascript
|
|
129
|
-
const userProfile = await runtime.useModel(ModelType.OBJECT_SMALL, {
|
|
130
|
-
prompt: 'Generate a JSON object for a user with name "Alex", age 30, and hobbies ["reading", "hiking"].',
|
|
131
|
-
temperature: 0.5,
|
|
132
|
-
});
|
|
133
|
-
console.log(userProfile);
|
|
134
|
-
```
|
|
135
|
-
|
|
136
|
-
### Object Generation (Large Model)
|
|
137
|
-
|
|
138
|
-
```javascript
|
|
139
|
-
const complexData = await runtime.useModel(ModelType.OBJECT_LARGE, {
|
|
140
|
-
prompt: 'Generate a detailed JSON object for a product listing: name "Laptop Pro", category "Electronics", price 1200, features ["16GB RAM", "512GB SSD", "15-inch Display"], and availability "in stock".',
|
|
141
|
-
temperature: 0.7,
|
|
142
|
-
});
|
|
143
|
-
console.log(complexData);
|
|
144
|
-
```
|