@ljoukov/llm 3.0.0 → 3.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +13 -4
- package/package.json +4 -1
package/README.md
CHANGED
|
@@ -87,7 +87,16 @@ refresh-token rotation and serves short-lived access tokens.
|
|
|
87
87
|
- `CHATGPT_AUTH_API_KEY` (shared secret; sent as `Authorization: Bearer ...` and `x-chatgpt-auth: ...`)
|
|
88
88
|
- `CHATGPT_AUTH_TOKEN_PROVIDER_STORE` (`kv` or `d1`, defaults to `kv`)
|
|
89
89
|
|
|
90
|
-
This repo includes a Cloudflare Workers token provider implementation in `
|
|
90
|
+
This repo includes a Cloudflare Workers token provider implementation in `chatgpt-auth/worker/`.
|
|
91
|
+
|
|
92
|
+
To seed the worker with a fresh OAuth token set via browser login:
|
|
93
|
+
|
|
94
|
+
```bash
|
|
95
|
+
npm run chatgpt-auth:seed -- --worker-url https://chatgpt-auth.<your-domain>
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
The CLI opens `auth.openai.com`, captures the localhost OAuth callback, exchanges the code, calls `POST /v1/seed`,
|
|
99
|
+
then resolves a smoke model from `GET /backend-api/codex/models` and runs a post-seed inference check (disable with `--skip-smoke-check`).
|
|
91
100
|
|
|
92
101
|
If `CHATGPT_AUTH_TOKEN_PROVIDER_URL` + `CHATGPT_AUTH_API_KEY` are set, `chatgpt-*` models will fetch tokens from the
|
|
93
102
|
token provider and will not read the local Codex auth store.
|
|
@@ -457,7 +466,7 @@ const fs = createInMemoryAgentFilesystem({
|
|
|
457
466
|
});
|
|
458
467
|
|
|
459
468
|
const result = await runAgentLoop({
|
|
460
|
-
model: "chatgpt-gpt-5.3-codex
|
|
469
|
+
model: "chatgpt-gpt-5.3-codex",
|
|
461
470
|
input: "Change value from 1 to 2 using filesystem tools.",
|
|
462
471
|
filesystemTool: {
|
|
463
472
|
profile: "auto",
|
|
@@ -481,13 +490,13 @@ import {
|
|
|
481
490
|
} from "@ljoukov/llm";
|
|
482
491
|
|
|
483
492
|
const fs = createInMemoryAgentFilesystem({ "/repo/a.ts": "export const n = 1;\n" });
|
|
484
|
-
const tools = createFilesystemToolSetForModel("chatgpt-gpt-5.3-codex
|
|
493
|
+
const tools = createFilesystemToolSetForModel("chatgpt-gpt-5.3-codex", {
|
|
485
494
|
cwd: "/repo",
|
|
486
495
|
fs,
|
|
487
496
|
});
|
|
488
497
|
|
|
489
498
|
const result = await runToolLoop({
|
|
490
|
-
model: "chatgpt-gpt-5.3-codex
|
|
499
|
+
model: "chatgpt-gpt-5.3-codex",
|
|
491
500
|
input: "Update n to 2.",
|
|
492
501
|
tools,
|
|
493
502
|
});
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ljoukov/llm",
|
|
3
|
-
"version": "3.0.
|
|
3
|
+
"version": "3.0.1",
|
|
4
4
|
"description": "Unified wrapper over OpenAI Responses API and Google Gemini (@google/genai), with streaming + usage/cost metrics.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"repository": {
|
|
@@ -47,6 +47,9 @@
|
|
|
47
47
|
"bench:agent:estimate": "tsx benchmarks/agent/run.ts --estimate-only",
|
|
48
48
|
"bench:agent:latest": "tsx benchmarks/agent/run.ts --tasks all --prune-traces",
|
|
49
49
|
"bench:agent:latest:estimate": "tsx benchmarks/agent/run.ts --tasks all --estimate-only",
|
|
50
|
+
"chatgpt-auth:seed": "tsx chatgpt-auth/seed/cli.ts",
|
|
51
|
+
"chatgpt-auth:worker:dry-run": "cd chatgpt-auth/worker && npx wrangler deploy --dry-run",
|
|
52
|
+
"chatgpt-auth:worker:deploy": "cd chatgpt-auth/worker && npx wrangler deploy",
|
|
50
53
|
"test": "npm run test:unit",
|
|
51
54
|
"test:unit": "vitest run --config vitest.unit.config.ts",
|
|
52
55
|
"test:integration": "vitest run --config vitest.integration.config.ts",
|