@ideascol/agents-generator-sdk 0.7.4 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -135,6 +135,193 @@ interface StreamCallbacks {
135
135
  }
136
136
  ```
137
137
 
138
+ ## Inline agents (Genkit-style, no persistence)
139
+
140
+ Define an agent in code and execute it without persisting the configuration
141
+ on the platform. The full agent definition travels in each request.
142
+
143
+ Two distinct credentials — do not confuse them:
144
+
145
+ - **`apiToken`** (constructor) — platform auth. A Keycloak JWT or a workspace
146
+ API key (`ack_*`). Optional. When omitted, the SDK runs in **anonymous
147
+ mode** and the backend skips workspace context.
148
+ - **`apiKey`** (`defineAgent`) — model-provider credential (OpenAI key, etc.).
149
+ Travels in the request body as `api_key`. Required in anonymous mode.
150
+
151
+ Anonymous mode rejects: `credentialId` references (agent or per-node) and
152
+ `workspaceAgentTool` nodes — both need workspace scoping.
153
+
154
+ ### Minimal — anonymous, no tools
155
+
156
+ ```ts
157
+ import { AgentClient } from '@ideascol/agents-generator-sdk';
158
+
159
+ const client = new AgentClient({
160
+ apiUrl: 'http://localhost:8000',
161
+ // no apiToken → anonymous mode
162
+ });
163
+
164
+ const agent = client.defineAgent({
165
+ name: 'haiku-bot',
166
+ instructions: 'Write short, original haikus on the topic the user gives.',
167
+ modelProvider: 'openai', // autocompletes 75 providers
168
+ modelName: 'gpt-5-mini', // autocompletes 2050 models
169
+ apiKey: process.env.OPENAI_API_KEY,
170
+ });
171
+
172
+ const r = await agent.run('the sea at dawn');
173
+ console.log(r.message);
174
+ ```
175
+
176
+ ### With local function tools
177
+
178
+ The SDK runs the callback-tool loop transparently: it executes your local
179
+ handler when the model calls the tool and posts the result back to continue
180
+ the same logical turn.
181
+
182
+ ```ts
183
+ const agent = client.defineAgent({
184
+ name: 'weather',
185
+ instructions: 'Use get_weather to answer questions.',
186
+ modelProvider: 'openai',
187
+ modelName: 'gpt-5-mini',
188
+ apiKey: process.env.OPENAI_API_KEY,
189
+ maxToolIterations: 10, // cap on tool round-trips per run
190
+ functions: [
191
+ {
192
+ name: 'get_weather',
193
+ description: 'Return current weather for a city.',
194
+ parameters: {
195
+ type: 'object',
196
+ properties: { city: { type: 'string' } },
197
+ required: ['city'],
198
+ },
199
+ execute: async ({ city }: { city: string }) => ({
200
+ city,
201
+ temp_c: 22,
202
+ conditions: 'sunny',
203
+ }),
204
+ },
205
+ ],
206
+ });
207
+
208
+ const r = await agent.run('Weather in Bogota?');
209
+ console.log(r.message);
210
+ console.log('tool calls:', r.tool_results?.length);
211
+ ```
212
+
213
+ ### Stateless with prior history
214
+
215
+ The SDK replays full history on each request — fine for short sessions,
216
+ prefer ephemeral conversations beyond ~20 turns.
217
+
218
+ ```ts
219
+ import { InlineMessage } from '@ideascol/agents-generator-sdk';
220
+
221
+ const history: InlineMessage[] = [
222
+ { role: 'user', content: 'My name is Jairo.' },
223
+ { role: 'assistant', content: 'Nice to meet you, Jairo.' },
224
+ ];
225
+ const r = await agent.run("What's my name?", { history });
226
+ ```
227
+
228
+ ### SSE streaming — callbacks
229
+
230
+ ```ts
231
+ let prev = 0;
232
+ await agent.run('write a 6-sentence story about a robot in a forest', {
233
+ callbacks: {
234
+ onMessage: (content) => {
235
+ process.stdout.write(content.slice(prev));
236
+ prev = content.length;
237
+ },
238
+ onToolCall: (tc) => console.log('\n[tool]', tc.name),
239
+ onDone: () => console.log('\n[done]'),
240
+ onError: (err) => console.error(err.message),
241
+ },
242
+ });
243
+ ```
244
+
245
+ ### SSE streaming — AsyncIterable
246
+
247
+ ```ts
248
+ let prev = '';
249
+ for await (const ev of agent.runStream('tell me a joke')) {
250
+ if (ev.type === 'message' && typeof (ev as any).content === 'string') {
251
+ const c = (ev as any).content as string;
252
+ process.stdout.write(c.slice(prev.length));
253
+ prev = c;
254
+ } else if (ev.type === 'done') {
255
+ break;
256
+ }
257
+ }
258
+ ```
259
+
260
+ ### Ephemeral conversation (multi-turn, server-tracked, TTL)
261
+
262
+ The backend keeps state for the session under a TTL'd row. No need to
263
+ replay history on each turn. Send messages via the returned handle.
264
+
265
+ ```ts
266
+ const conv = await agent.startConversation({ ttlSeconds: 600 });
267
+ console.log('conversation_id:', conv.conversation_id);
268
+
269
+ await conv.send('My favorite color is teal.');
270
+ const r2 = await conv.send("What's my favorite color?");
271
+ console.log(r2.message); // → "Your favorite color is teal."
272
+
273
+ await conv.close(); // best-effort DELETE
274
+ ```
275
+
276
+ ### Authenticated mode (workspace credential reference)
277
+
278
+ Pass an `ack_*` workspace API key as `apiToken` to unlock workspace-scoped
279
+ features: stored credentials, workspace-agent tools, token-usage tracking.
280
+
281
+ ```ts
282
+ const client = new AgentClient({
283
+ apiUrl: 'https://api.agentsgenerator.dev',
284
+ apiToken: 'ack_xxx', // create one in the platform UI → Workspace → API Keys
285
+ });
286
+
287
+ const agent = client.defineAgent({
288
+ name: 'support',
289
+ instructions: '...',
290
+ modelProvider: 'openai',
291
+ modelName: 'gpt-5-mini',
292
+ credentialId: 'cred-uuid', // resolves against the workspace
293
+ });
294
+ await agent.run('hello');
295
+ ```
296
+
297
+ ### Typed model catalog
298
+
299
+ `ModelProvider` / `ModelName` literal unions provide autocomplete on
300
+ `modelProvider` and `modelName`. Custom strings still allowed (forward-compat).
301
+ Snapshot is regeneratable from the live backend:
302
+
303
+ ```ts
304
+ import {
305
+ MODELS_BY_PROVIDER,
306
+ MODEL_INFO,
307
+ type ModelProvider,
308
+ type ModelName,
309
+ } from '@ideascol/agents-generator-sdk';
310
+
311
+ console.log(MODELS_BY_PROVIDER.anthropic); // ["claude-...", ...]
312
+ console.log(MODEL_INFO['gpt-5-mini'].max_input_tokens); // number
313
+ ```
314
+
315
+ Regenerate the snapshot:
316
+
317
+ ```bash
318
+ API_URL=https://api.agentsgenerator.dev bun run generate:models
319
+ # or against a local backend:
320
+ API_URL=http://localhost:8000 bun run generate:models
321
+ # filters:
322
+ MODELS_MODE=chat MODELS_FN_CALLING=true bun run generate:models
323
+ ```
324
+
138
325
  ## Quick start
139
326
  ```bash
140
327
  # Using npm