llm.rb 4.11.1 → 4.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +70 -0
- data/README.md +124 -695
- data/lib/llm/context.rb +2 -2
- data/lib/llm/function/task.rb +7 -1
- data/lib/llm/function.rb +14 -3
- data/lib/llm/mcp/error.rb +31 -1
- data/lib/llm/mcp/rpc.rb +8 -3
- data/lib/llm/mcp/transport/http.rb +2 -1
- data/lib/llm/mcp/transport/stdio.rb +1 -0
- data/lib/llm/mcp.rb +43 -1
- data/lib/llm/provider.rb +3 -4
- data/lib/llm/providers/anthropic/request_adapter/completion.rb +8 -1
- data/lib/llm/providers/anthropic/response_adapter/completion.rb +7 -2
- data/lib/llm/providers/anthropic/stream_parser.rb +1 -1
- data/lib/llm/providers/anthropic/utils.rb +23 -0
- data/lib/llm/providers/anthropic.rb +11 -0
- data/lib/llm/providers/openai/request_adapter/respond.rb +11 -5
- data/lib/llm/providers/openai/response_adapter/responds.rb +13 -1
- data/lib/llm/providers/openai/responses/stream_parser.rb +31 -0
- data/lib/llm/stream/queue.rb +15 -2
- data/lib/llm/stream.rb +24 -10
- data/lib/llm/version.rb +1 -1
- data/llm.gemspec +17 -39
- metadata +17 -36
data/llm.gemspec
CHANGED
|
@@ -8,47 +8,25 @@ Gem::Specification.new do |spec|
|
|
|
8
8
|
spec.authors = ["Antar Azri", "0x1eef", "Christos Maris", "Rodrigo Serrano"]
|
|
9
9
|
spec.email = ["azantar@proton.me", "0x1eef@hardenedbsd.org"]
|
|
10
10
|
|
|
11
|
-
spec.summary =
|
|
12
|
-
llm.rb is a Ruby-centric toolkit for building real LLM-powered systems — where
|
|
13
|
-
LLMs are part of your architecture, not just API calls. It gives you explicit
|
|
14
|
-
control over contexts, tools, concurrency, and providers, so you can compose
|
|
15
|
-
reliable, production-ready workflows without hidden abstractions.
|
|
16
|
-
SUMMARY
|
|
11
|
+
spec.summary = "System integration layer for LLMs, tools, MCP, and APIs in Ruby."
|
|
17
12
|
|
|
18
13
|
spec.description = <<~DESCRIPTION
|
|
19
|
-
llm.rb is a
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
- **Stdlib-only by default** — Runs on Ruby standard library, with optional features loaded only when used
|
|
36
|
-
|
|
37
|
-
## Capabilities
|
|
38
|
-
|
|
39
|
-
- Chat & Contexts with persistence
|
|
40
|
-
- Streaming responses
|
|
41
|
-
- Tool calling with JSON Schema validation
|
|
42
|
-
- Concurrent execution (threads, fibers, async tasks)
|
|
43
|
-
- Agents with auto-execution
|
|
44
|
-
- Structured outputs
|
|
45
|
-
- MCP (Model Context Protocol) support
|
|
46
|
-
- Multimodal inputs (text, images, audio, documents)
|
|
47
|
-
- Audio generation, transcription, translation
|
|
48
|
-
- Image generation and editing
|
|
49
|
-
- Files API for document processing
|
|
50
|
-
- Embeddings and vector stores
|
|
51
|
-
- Local model registry for capabilities, limits, and pricing
|
|
14
|
+
llm.rb is a runtime for building AI systems that integrate directly with your
|
|
15
|
+
application. It is not just an API wrapper. It provides a unified execution
|
|
16
|
+
model for providers, tools, MCP servers, streaming, schemas, files, and
|
|
17
|
+
state.
|
|
18
|
+
|
|
19
|
+
It is built for engineers who want control over how these systems run.
|
|
20
|
+
llm.rb stays close to Ruby, runs on the standard library by default, loads
|
|
21
|
+
optional pieces only when needed, and remains easy to extend. It also works
|
|
22
|
+
well in Rails or ActiveRecord applications, where a small wrapper around
|
|
23
|
+
context persistence is enough to save and restore long-lived conversation
|
|
24
|
+
state across requests, jobs, or retries.
|
|
25
|
+
|
|
26
|
+
Most LLM libraries stop at request/response APIs. Building real systems
|
|
27
|
+
means stitching together streaming, tools, state, persistence, and external
|
|
28
|
+
services by hand. llm.rb provides a single execution model for all of these,
|
|
29
|
+
so they compose naturally instead of becoming separate subsystems.
|
|
52
30
|
DESCRIPTION
|
|
53
31
|
|
|
54
32
|
spec.license = "0BSD"
|
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: llm.rb
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 4.
|
|
4
|
+
version: 4.13.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Antar Azri
|
|
@@ -195,39 +195,22 @@ dependencies:
|
|
|
195
195
|
- !ruby/object:Gem::Version
|
|
196
196
|
version: '1.7'
|
|
197
197
|
description: |
|
|
198
|
-
llm.rb is a
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
198
|
+
llm.rb is a runtime for building AI systems that integrate directly with your
|
|
199
|
+
application. It is not just an API wrapper. It provides a unified execution
|
|
200
|
+
model for providers, tools, MCP servers, streaming, schemas, files, and
|
|
201
|
+
state.
|
|
202
202
|
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
203
|
+
It is built for engineers who want control over how these systems run.
|
|
204
|
+
llm.rb stays close to Ruby, runs on the standard library by default, loads
|
|
205
|
+
optional pieces only when needed, and remains easy to extend. It also works
|
|
206
|
+
well in Rails or ActiveRecord applications, where a small wrapper around
|
|
207
|
+
context persistence is enough to save and restore long-lived conversation
|
|
208
|
+
state across requests, jobs, or retries.
|
|
206
209
|
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
- **One API across providers** — Unified interface for OpenAI, Anthropic, Google, xAI, zAI, DeepSeek, Ollama, and LlamaCpp
|
|
212
|
-
- **Thread-safe where it matters** — Providers are shareable, while contexts stay isolated and stateful
|
|
213
|
-
- **Production-ready** — Cost tracking, observability, persistence, and performance tuning built in
|
|
214
|
-
- **Stdlib-only by default** — Runs on Ruby standard library, with optional features loaded only when used
|
|
215
|
-
|
|
216
|
-
## Capabilities
|
|
217
|
-
|
|
218
|
-
- Chat & Contexts with persistence
|
|
219
|
-
- Streaming responses
|
|
220
|
-
- Tool calling with JSON Schema validation
|
|
221
|
-
- Concurrent execution (threads, fibers, async tasks)
|
|
222
|
-
- Agents with auto-execution
|
|
223
|
-
- Structured outputs
|
|
224
|
-
- MCP (Model Context Protocol) support
|
|
225
|
-
- Multimodal inputs (text, images, audio, documents)
|
|
226
|
-
- Audio generation, transcription, translation
|
|
227
|
-
- Image generation and editing
|
|
228
|
-
- Files API for document processing
|
|
229
|
-
- Embeddings and vector stores
|
|
230
|
-
- Local model registry for capabilities, limits, and pricing
|
|
210
|
+
Most LLM libraries stop at request/response APIs. Building real systems
|
|
211
|
+
means stitching together streaming, tools, state, persistence, and external
|
|
212
|
+
services by hand. llm.rb provides a single execution model for all of these,
|
|
213
|
+
so they compose naturally instead of becoming separate subsystems.
|
|
231
214
|
email:
|
|
232
215
|
- azantar@proton.me
|
|
233
216
|
- 0x1eef@hardenedbsd.org
|
|
@@ -300,6 +283,7 @@ files:
|
|
|
300
283
|
- lib/llm/providers/anthropic/response_adapter/models.rb
|
|
301
284
|
- lib/llm/providers/anthropic/response_adapter/web_search.rb
|
|
302
285
|
- lib/llm/providers/anthropic/stream_parser.rb
|
|
286
|
+
- lib/llm/providers/anthropic/utils.rb
|
|
303
287
|
- lib/llm/providers/deepseek.rb
|
|
304
288
|
- lib/llm/providers/deepseek/request_adapter.rb
|
|
305
289
|
- lib/llm/providers/deepseek/request_adapter/completion.rb
|
|
@@ -417,8 +401,5 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
|
417
401
|
requirements: []
|
|
418
402
|
rubygems_version: 3.6.9
|
|
419
403
|
specification_version: 4
|
|
420
|
-
summary:
|
|
421
|
-
where LLMs are part of your architecture, not just API calls. It gives you explicit
|
|
422
|
-
control over contexts, tools, concurrency, and providers, so you can compose reliable,
|
|
423
|
-
production-ready workflows without hidden abstractions.
|
|
404
|
+
summary: System integration layer for LLMs, tools, MCP, and APIs in Ruby.
|
|
424
405
|
test_files: []
|