groq_ruby 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +0 -0
  3. data/CHANGELOG.md +57 -0
  4. data/CLAUDE.md +103 -0
  5. data/LICENSE.txt +21 -0
  6. data/README.md +495 -0
  7. data/Rakefile +11 -0
  8. data/examples/README.md +39 -0
  9. data/examples/batch.rb +29 -0
  10. data/examples/chat_completion.rb +24 -0
  11. data/examples/chat_completion_stop.rb +19 -0
  12. data/examples/chat_completion_streaming.rb +23 -0
  13. data/examples/embedding.rb +20 -0
  14. data/examples/error_handling.rb +27 -0
  15. data/examples/file_upload.rb +23 -0
  16. data/examples/mcp_agent.rb +63 -0
  17. data/examples/mcp_chat_with_tools.rb +103 -0
  18. data/examples/mcp_resources_and_prompts.rb +89 -0
  19. data/examples/models_list.rb +16 -0
  20. data/examples/speech.rb +23 -0
  21. data/examples/transcription.rb +23 -0
  22. data/examples/translation.rb +22 -0
  23. data/lib/groq_ruby/client.rb +69 -0
  24. data/lib/groq_ruby/configuration.rb +62 -0
  25. data/lib/groq_ruby/error_mapper.rb +37 -0
  26. data/lib/groq_ruby/errors/api_connection_error.rb +8 -0
  27. data/lib/groq_ruby/errors/api_error.rb +14 -0
  28. data/lib/groq_ruby/errors/api_response_error.rb +5 -0
  29. data/lib/groq_ruby/errors/api_status_error.rb +23 -0
  30. data/lib/groq_ruby/errors/api_timeout_error.rb +8 -0
  31. data/lib/groq_ruby/errors/authentication_error.rb +4 -0
  32. data/lib/groq_ruby/errors/bad_request_error.rb +4 -0
  33. data/lib/groq_ruby/errors/configuration_error.rb +4 -0
  34. data/lib/groq_ruby/errors/conflict_error.rb +4 -0
  35. data/lib/groq_ruby/errors/error.rb +5 -0
  36. data/lib/groq_ruby/errors/internal_server_error.rb +4 -0
  37. data/lib/groq_ruby/errors/not_found_error.rb +4 -0
  38. data/lib/groq_ruby/errors/parameter_error.rb +13 -0
  39. data/lib/groq_ruby/errors/permission_denied_error.rb +4 -0
  40. data/lib/groq_ruby/errors/rate_limit_error.rb +4 -0
  41. data/lib/groq_ruby/errors/unprocessable_entity_error.rb +4 -0
  42. data/lib/groq_ruby/mcp/bridge.rb +239 -0
  43. data/lib/groq_ruby/mcp/claude_desktop_config.rb +79 -0
  44. data/lib/groq_ruby/mcp/client.rb +171 -0
  45. data/lib/groq_ruby/mcp/errors/error.rb +7 -0
  46. data/lib/groq_ruby/mcp/errors/json_rpc_error.rb +21 -0
  47. data/lib/groq_ruby/mcp/errors/protocol_error.rb +7 -0
  48. data/lib/groq_ruby/mcp/errors/timeout_error.rb +7 -0
  49. data/lib/groq_ruby/mcp/errors/transport_error.rb +6 -0
  50. data/lib/groq_ruby/mcp/errors/unknown_tool_error.rb +7 -0
  51. data/lib/groq_ruby/mcp/json_rpc.rb +51 -0
  52. data/lib/groq_ruby/mcp/prompt.rb +21 -0
  53. data/lib/groq_ruby/mcp/resource.rb +17 -0
  54. data/lib/groq_ruby/mcp/server_config.rb +22 -0
  55. data/lib/groq_ruby/mcp/tool.rb +22 -0
  56. data/lib/groq_ruby/mcp/transport.rb +32 -0
  57. data/lib/groq_ruby/mcp/transports/stdio.rb +100 -0
  58. data/lib/groq_ruby/mcp.rb +25 -0
  59. data/lib/groq_ruby/models/audio/transcription.rb +10 -0
  60. data/lib/groq_ruby/models/audio/translation.rb +8 -0
  61. data/lib/groq_ruby/models/batches/batch.rb +16 -0
  62. data/lib/groq_ruby/models/batches/batch_list.rb +10 -0
  63. data/lib/groq_ruby/models/batches/batch_request_counts.rb +8 -0
  64. data/lib/groq_ruby/models/chat/chat_completion.rb +14 -0
  65. data/lib/groq_ruby/models/chat/chat_completion_choice.rb +10 -0
  66. data/lib/groq_ruby/models/chat/chat_completion_chunk.rb +13 -0
  67. data/lib/groq_ruby/models/chat/chat_completion_chunk_choice.rb +10 -0
  68. data/lib/groq_ruby/models/chat/chat_completion_delta.rb +8 -0
  69. data/lib/groq_ruby/models/chat/chat_completion_message.rb +10 -0
  70. data/lib/groq_ruby/models/embeddings/create_embedding_response.rb +11 -0
  71. data/lib/groq_ruby/models/embeddings/embedding.rb +8 -0
  72. data/lib/groq_ruby/models/embeddings/embedding_usage.rb +8 -0
  73. data/lib/groq_ruby/models/files/file_deleted.rb +8 -0
  74. data/lib/groq_ruby/models/files/file_list.rb +10 -0
  75. data/lib/groq_ruby/models/files/file_object.rb +8 -0
  76. data/lib/groq_ruby/models/model.rb +8 -0
  77. data/lib/groq_ruby/models/model_deleted.rb +8 -0
  78. data/lib/groq_ruby/models/model_factory.rb +31 -0
  79. data/lib/groq_ruby/models/model_list.rb +10 -0
  80. data/lib/groq_ruby/models/usage.rb +11 -0
  81. data/lib/groq_ruby/multipart.rb +84 -0
  82. data/lib/groq_ruby/request.rb +13 -0
  83. data/lib/groq_ruby/resources/audio/speech.rb +32 -0
  84. data/lib/groq_ruby/resources/audio/transcriptions.rb +48 -0
  85. data/lib/groq_ruby/resources/audio/translations.rb +45 -0
  86. data/lib/groq_ruby/resources/audio.rb +26 -0
  87. data/lib/groq_ruby/resources/base.rb +33 -0
  88. data/lib/groq_ruby/resources/batches.rb +44 -0
  89. data/lib/groq_ruby/resources/chat/completions.rb +94 -0
  90. data/lib/groq_ruby/resources/chat.rb +16 -0
  91. data/lib/groq_ruby/resources/embeddings.rb +28 -0
  92. data/lib/groq_ruby/resources/files.rb +55 -0
  93. data/lib/groq_ruby/resources/models.rb +35 -0
  94. data/lib/groq_ruby/response.rb +9 -0
  95. data/lib/groq_ruby/streaming/chunk_stream.rb +58 -0
  96. data/lib/groq_ruby/streaming/event_parser.rb +23 -0
  97. data/lib/groq_ruby/transport.rb +169 -0
  98. data/lib/groq_ruby/version.rb +5 -0
  99. data/lib/groq_ruby.rb +36 -0
  100. data/lib/tasks/gem.rake +5 -0
  101. data/lib/tasks/lint/all.rake +11 -0
  102. data/lib/tasks/lint/rubocop.rake +15 -0
  103. data/lib/tasks/security.rake +11 -0
  104. data/lib/tasks/types.rake +11 -0
  105. data/sig/groq_ruby.rbs +191 -0
  106. data/sig/zeitwerk.rbs +13 -0
  107. data.tar.gz.sig +0 -0
  108. metadata +237 -0
  109. metadata.gz.sig +0 -0
data/Rakefile ADDED
@@ -0,0 +1,11 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "minitest/test_task"
4
+
5
+ Dir.glob("lib/tasks/**/*.rake").each { |r| load r }
6
+
7
+ Minitest::TestTask.create
8
+
9
+ task default: %i[test lint types]
10
+ desc "Run linter"
11
+ task lint: "lint:all"
@@ -0,0 +1,39 @@
1
+ # groq_ruby examples
2
+
3
+ Each script is self-contained and reads `GROQ_API_KEY` from the environment.
4
+
5
+ ```sh
6
+ export GROQ_API_KEY=gsk_...
7
+
8
+ bundle exec examples/chat_completion.rb
9
+ bundle exec examples/chat_completion_streaming.rb
10
+ bundle exec examples/chat_completion_stop.rb
11
+ bundle exec examples/embedding.rb
12
+ bundle exec examples/transcription.rb path/to/audio.mp3
13
+ bundle exec examples/translation.rb path/to/audio.mp3
14
+ bundle exec examples/speech.rb "Hello, Groq."
15
+ bundle exec examples/models_list.rb
16
+ bundle exec examples/file_upload.rb path/to/requests.jsonl
17
+ bundle exec examples/batch.rb file_abc123
18
+ bundle exec examples/error_handling.rb
19
+ bundle exec examples/mcp_agent.rb /path/to/sandbox # needs npx
20
+ bundle exec examples/mcp_chat_with_tools.rb /path/to/sandbox # needs npx
21
+ bundle exec examples/mcp_resources_and_prompts.rb /path/to/sandbox # needs npx
22
+ ```
23
+
24
+ | Script | Endpoint | What it shows |
25
+ |--------------------------------|----------------------------------------------|--------------------------------------------------|
26
+ | `chat_completion.rb` | `POST /openai/v1/chat/completions` | Buffered chat completion with usage info |
27
+ | `chat_completion_streaming.rb` | same, `stream: true` | Token-by-token Server-Sent-Events streaming |
28
+ | `chat_completion_stop.rb` | same | Custom `stop` sequence + reading `finish_reason` |
29
+ | `embedding.rb` | `POST /openai/v1/embeddings` | Generating an embedding vector |
30
+ | `transcription.rb` | `POST /openai/v1/audio/transcriptions` | Whisper STT from a local file |
31
+ | `translation.rb` | `POST /openai/v1/audio/translations` | Whisper STT translated into English |
32
+ | `speech.rb` | `POST /openai/v1/audio/speech` | TTS — saves a WAV to disk |
33
+ | `models_list.rb` | `GET /openai/v1/models` | Listing available models |
34
+ | `file_upload.rb` | `POST /openai/v1/files` (+ list/info/delete) | Multipart file upload lifecycle |
35
+ | `batch.rb` | `POST /openai/v1/batches` (+ retrieve poll) | Async batch submission and status polling |
36
+ | `error_handling.rb` | any | Catching the typed error hierarchy |
37
+ | `mcp_agent.rb` | chat.completions + MCP | Minimal agent loop — bridge tools, dispatch tool_calls back |
38
+ | `mcp_chat_with_tools.rb` | chat.completions + MCP | Heavily annotated walkthrough of the same loop, step by step |
39
+ | `mcp_resources_and_prompts.rb` | chat.completions + MCP | Full coverage: tools + resources (synthetic read_resource) + prompts |
data/examples/batch.rb ADDED
@@ -0,0 +1,29 @@
1
+ #!/usr/bin/env ruby
2
+ # Submit a batch job referencing a previously uploaded JSONL file, then poll
3
+ # until it terminates.
4
+ #
5
+ # Usage:
6
+ # GROQ_API_KEY=gsk_... bundle exec examples/batch.rb <input_file_id>
7
+
8
+ require "bundler/setup"
9
+ require "groq_ruby"
10
+
11
+ input_file_id = ARGV.first || abort("usage: batch.rb <input_file_id>")
12
+
13
+ client = GroqRuby::Client.new
14
+
15
+ batch = client.batches.create(
16
+ input_file_id: input_file_id,
17
+ endpoint: "/v1/chat/completions",
18
+ completion_window: "24h"
19
+ )
20
+ puts "submitted batch id=#{batch.id} status=#{batch.status}"
21
+
22
+ terminal = %w[completed failed cancelled expired].freeze
23
+
24
+ loop do
25
+ current = client.batches.retrieve(batch.id)
26
+ puts "status=#{current.status}"
27
+ break if terminal.include?(current.status)
28
+ sleep 5
29
+ end
@@ -0,0 +1,24 @@
1
+ #!/usr/bin/env ruby
2
+ # Buffered chat completion. Reads GROQ_API_KEY from the environment.
3
+ #
4
+ # Usage:
5
+ # GROQ_API_KEY=gsk_... bundle exec examples/chat_completion.rb
6
+
7
+ require "bundler/setup"
8
+ require "groq_ruby"
9
+
10
+ client = GroqRuby::Client.new
11
+
12
+ response = client.chat.completions.create(
13
+ model: "llama-3.3-70b-versatile",
14
+ messages: [
15
+ {role: "system", content: "You are a helpful assistant."},
16
+ {role: "user", content: "Explain the importance of low-latency LLMs in two sentences."}
17
+ ],
18
+ temperature: 0.5,
19
+ max_completion_tokens: 256
20
+ )
21
+
22
+ puts response.choices.first.message.content
23
+ puts
24
+ puts "tokens: prompt=#{response.usage.prompt_tokens} completion=#{response.usage.completion_tokens} total=#{response.usage.total_tokens}"
@@ -0,0 +1,19 @@
1
+ #!/usr/bin/env ruby
2
+ # Chat completion that halts when the model emits a custom stop sequence.
3
+ #
4
+ # Usage:
5
+ # GROQ_API_KEY=gsk_... bundle exec examples/chat_completion_stop.rb
6
+
7
+ require "bundler/setup"
8
+ require "groq_ruby"
9
+
10
+ client = GroqRuby::Client.new
11
+
12
+ response = client.chat.completions.create(
13
+ model: "llama-3.3-70b-versatile",
14
+ messages: [{role: "user", content: "Count from 1 to 10."}],
15
+ stop: ", 6"
16
+ )
17
+
18
+ puts response.choices.first.message.content
19
+ puts "finish_reason: #{response.choices.first.finish_reason}"
@@ -0,0 +1,23 @@
1
+ #!/usr/bin/env ruby
2
+ # Streaming chat completion. Each chunk is yielded as it arrives.
3
+ #
4
+ # Usage:
5
+ # GROQ_API_KEY=gsk_... bundle exec examples/chat_completion_streaming.rb
6
+
7
+ require "bundler/setup"
8
+ require "groq_ruby"
9
+
10
+ client = GroqRuby::Client.new
11
+
12
+ client.chat.completions.create(
13
+ model: "llama-3.3-70b-versatile",
14
+ messages: [
15
+ {role: "user", content: "Write a short poem about latency."}
16
+ ],
17
+ stream: true
18
+ ) do |chunk|
19
+ delta = chunk.choices.first.delta.content
20
+ print delta if delta
21
+ end
22
+
23
+ puts
@@ -0,0 +1,20 @@
1
+ #!/usr/bin/env ruby
2
+ # Generate an embedding vector for a piece of text.
3
+ #
4
+ # Usage:
5
+ # GROQ_API_KEY=gsk_... bundle exec examples/embedding.rb
6
+
7
+ require "bundler/setup"
8
+ require "groq_ruby"
9
+
10
+ client = GroqRuby::Client.new
11
+
12
+ response = client.embeddings.create(
13
+ model: "nomic-embed-text-v1_5",
14
+ input: "Groq makes inference very fast."
15
+ )
16
+
17
+ vector = response.data.first.embedding
18
+ puts "vector length: #{vector.is_a?(Array) ? vector.size : "(base64)"}"
19
+ puts "first 5 dims: #{vector.first(5).inspect}" if vector.is_a?(Array)
20
+ puts "tokens: #{response.usage.prompt_tokens}"
@@ -0,0 +1,27 @@
1
+ #!/usr/bin/env ruby
2
+ # Demonstrates the error hierarchy. Every API failure raises a subclass of
3
+ # GroqRuby::APIError so callers can rescue the base class and degrade
4
+ # gracefully — or pattern-match on a specific status code.
5
+ #
6
+ # Usage:
7
+ # GROQ_API_KEY=invalid bundle exec examples/error_handling.rb
8
+
9
+ require "bundler/setup"
10
+ require "groq_ruby"
11
+
12
+ client = GroqRuby::Client.new
13
+
14
+ begin
15
+ client.chat.completions.create(
16
+ model: "llama-3.3-70b-versatile",
17
+ messages: [{role: "user", content: "Hi"}]
18
+ )
19
+ rescue GroqRuby::AuthenticationError => e
20
+ warn "auth failed (status #{e.status}): #{e.message}"
21
+ rescue GroqRuby::RateLimitError => e
22
+ warn "rate limited: #{e.message}"
23
+ rescue GroqRuby::APIStatusError => e
24
+ warn "status #{e.status}: #{e.message}"
25
+ rescue GroqRuby::APIConnectionError => e
26
+ warn "network: #{e.message}"
27
+ end
@@ -0,0 +1,23 @@
1
+ #!/usr/bin/env ruby
2
+ # Upload a JSONL file for use with the batch API, then list, info, and delete.
3
+ #
4
+ # Usage:
5
+ # GROQ_API_KEY=gsk_... bundle exec examples/file_upload.rb path/to/requests.jsonl
6
+
7
+ require "bundler/setup"
8
+ require "groq_ruby"
9
+
10
+ path = ARGV.first || abort("usage: file_upload.rb <jsonl-file>")
11
+
12
+ client = GroqRuby::Client.new
13
+
14
+ uploaded = File.open(path, "rb") do |file|
15
+ client.files.create(file: file, filename: File.basename(path), purpose: "batch")
16
+ end
17
+ puts "uploaded id=#{uploaded.id} bytes=#{uploaded.bytes}"
18
+
19
+ info = client.files.info(uploaded.id)
20
+ puts "info: filename=#{info.filename} purpose=#{info.purpose}"
21
+
22
+ deleted = client.files.delete(uploaded.id)
23
+ puts "deleted=#{deleted.deleted}"
@@ -0,0 +1,63 @@
1
+ #!/usr/bin/env ruby
2
+ # Minimal MCP-powered agent loop:
3
+ # 1. Connect to one or more MCP servers via stdio
4
+ # 2. Expose their tools through chat.completions(tools:)
5
+ # 3. If the model wants to call a tool, dispatch through the bridge
6
+ # and feed the result back as a follow-up message.
7
+ #
8
+ # Usage:
9
+ # GROQ_API_KEY=gsk_... bundle exec examples/mcp_agent.rb
10
+ #
11
+ # This example uses the official @modelcontextprotocol/server-filesystem
12
+ # MCP server. Install once with: npm install -g @modelcontextprotocol/server-filesystem
13
+
14
+ require "bundler/setup"
15
+ require "groq_ruby"
16
+ require "json"
17
+
18
+ ROOT = ARGV[0] || Dir.pwd
19
+
20
+ filesystem = GroqRuby::MCP::ServerConfig.new(
21
+ name: "fs",
22
+ command: "npx",
23
+ args: ["-y", "@modelcontextprotocol/server-filesystem", ROOT]
24
+ )
25
+
26
+ bridge = GroqRuby::MCP::Bridge.new([filesystem])
27
+ groq = GroqRuby::Client.new
28
+
29
+ messages = [
30
+ {role: "system", content: "You can read files in #{ROOT}. Use tools to answer."},
31
+ {role: "user", content: "List the files in the directory and tell me what's there."}
32
+ ]
33
+
34
+ begin
35
+ loop do
36
+ response = groq.chat.completions.create(
37
+ model: "llama-3.3-70b-versatile",
38
+ messages: messages,
39
+ tools: bridge.tools
40
+ )
41
+
42
+ message = response.choices.first.message
43
+ tool_calls = message.tool_calls
44
+
45
+ if tool_calls.nil? || tool_calls.empty?
46
+ puts message.content
47
+ break
48
+ end
49
+
50
+ messages << {role: "assistant", content: message.content, tool_calls: tool_calls}
51
+ tool_calls.each do |call|
52
+ function = call["function"]
53
+ result = bridge.call(function["name"], function["arguments"])
54
+ messages << {
55
+ role: "tool",
56
+ tool_call_id: call["id"],
57
+ content: JSON.generate(result)
58
+ }
59
+ end
60
+ end
61
+ ensure
62
+ bridge.stop
63
+ end
@@ -0,0 +1,103 @@
1
+ #!/usr/bin/env ruby
2
+ # Annotated walkthrough: how an MCP server's tools become a Groq
3
+ # `tools:` argument and how a returned `tool_calls` flows back to the
4
+ # right MCP server.
5
+ #
6
+ # Usage:
7
+ # GROQ_API_KEY=gsk_... bundle exec examples/mcp_chat_with_tools.rb [DIR]
8
+ #
9
+ # Requires the official filesystem MCP server. One-time install:
10
+ # npm install -g @modelcontextprotocol/server-filesystem
11
+
12
+ require "bundler/setup"
13
+ require "groq_ruby"
14
+ require "json"
15
+
16
+ ROOT = ARGV[0] || Dir.pwd
17
+
18
+ # 1. Describe each MCP server you want to expose. ServerConfig is purely
19
+ # declarative — nothing is spawned yet. `name` is the prefix the bridge
20
+ # uses to namespace this server's tools (so `read_file` from server
21
+ # `fs` becomes `fs__read_file` to the model).
22
+ filesystem = GroqRuby::MCP::ServerConfig.new(
23
+ name: "fs",
24
+ command: "npx",
25
+ args: ["-y", "@modelcontextprotocol/server-filesystem", ROOT]
26
+ )
27
+
28
+ # 2. Build the Bridge. This synchronously: spawns each server's stdio
29
+ # process, runs the MCP `initialize` handshake, asks each server for
30
+ # its tool list, and indexes those tools by namespaced name.
31
+ #
32
+ # From this point on, `bridge.tools` returns OpenAI/Groq-shaped tool
33
+ # definitions ready to drop into chat.completions.
34
+ bridge = GroqRuby::MCP::Bridge.new([filesystem])
35
+ groq = GroqRuby::Client.new
36
+
37
+ # Inspect what the bridge advertises:
38
+ puts "tools advertised to the model:"
39
+ bridge.tools.each { |t| puts " - #{t[:function][:name]}: #{t[:function][:description]}" }
40
+ puts
41
+
42
+ # 3. The conversation. The system prompt nudges the model to use the
43
+ # tools we just enumerated.
44
+ messages = [
45
+ {role: "system", content: "You can use filesystem tools to inspect files in #{ROOT}. Prefer tools over speculation."},
46
+ {role: "user", content: "Look at the README in #{ROOT} and summarise it in two sentences."}
47
+ ]
48
+
49
+ # 4. Agent loop. Each iteration:
50
+ # a. ask Groq for the next assistant turn, passing bridge.tools
51
+ # b. if Groq returned plain content, print it and stop
52
+ # c. otherwise dispatch every tool_call through the bridge and feed
53
+ # the result back as a `role: tool` message
54
+ begin
55
+ loop do
56
+ response = groq.chat.completions.create(
57
+ model: "llama-3.3-70b-versatile",
58
+ messages: messages,
59
+ tools: bridge.tools,
60
+ # `tool_choice: "auto"` is the default — Groq decides whether to
61
+ # answer directly or call a tool. Use "required" to force a tool
62
+ # call, or {type: "function", function: {name: "fs__read_file"}}
63
+ # to demand a specific one.
64
+ tool_choice: "auto"
65
+ )
66
+
67
+ message = response.choices.first.message
68
+ tool_calls = message.tool_calls
69
+
70
+ if tool_calls.nil? || tool_calls.empty?
71
+ puts "\nassistant: #{message.content}"
72
+ break
73
+ end
74
+
75
+ # 5. Add the assistant turn (with its tool_calls) to the history,
76
+ # then resolve each tool_call against the bridge.
77
+ messages << {role: "assistant", content: message.content, tool_calls: tool_calls}
78
+
79
+ tool_calls.each do |call|
80
+ fn = call["function"]
81
+ puts "tool call: #{fn["name"]}(#{fn["arguments"]})"
82
+
83
+ # bridge.call accepts either a Hash or the raw JSON string Groq
84
+ # returns in `function.arguments`. It looks up the namespaced name,
85
+ # forwards the call to the owning MCP server, and returns the
86
+ # server's tool_call result (typically `{"content" => [...]}`).
87
+ result = bridge.call(fn["name"], fn["arguments"])
88
+
89
+ messages << {
90
+ role: "tool",
91
+ tool_call_id: call["id"],
92
+ # The chat-completions endpoint expects tool results as strings.
93
+ # Most MCP results are wrapped Hashes — encode them so the model
94
+ # sees structured data on the next turn.
95
+ content: JSON.generate(result)
96
+ }
97
+ end
98
+ end
99
+ ensure
100
+ # 6. Always stop the bridge. This kills each MCP server's child
101
+ # process and closes the stdio pipes.
102
+ bridge.stop
103
+ end
@@ -0,0 +1,89 @@
1
+ #!/usr/bin/env ruby
2
+ # Demonstrates the full MCP coverage of Bridge:
3
+ # * tools — surfaced as Groq function tools
4
+ # * resources — surfaced via a synthetic <server>__read_resource tool
5
+ # (and inventoried in the system prompt so the LLM knows
6
+ # which URIs exist)
7
+ # * prompts — surfaced to YOU (the application), not the LLM
8
+ #
9
+ # Usage:
10
+ # GROQ_API_KEY=gsk_... bundle exec examples/mcp_resources_and_prompts.rb [DIR]
11
+ #
12
+ # Requires the official filesystem MCP server. One-time install:
13
+ # npm install -g @modelcontextprotocol/server-filesystem
14
+
15
+ require "bundler/setup"
16
+ require "groq_ruby"
17
+ require "json"
18
+
19
+ ROOT = ARGV[0] || Dir.pwd
20
+
21
+ filesystem = GroqRuby::MCP::ServerConfig.new(
22
+ name: "fs",
23
+ command: "npx",
24
+ args: ["-y", "@modelcontextprotocol/server-filesystem", ROOT]
25
+ )
26
+
27
+ bridge = GroqRuby::MCP::Bridge.new([filesystem])
28
+ groq = GroqRuby::Client.new
29
+
30
+ # 1. Inspect what each MCP capability gave us.
31
+ puts "tools (#{bridge.tools.size}):"
32
+ bridge.tools.each { |t| puts " - #{t[:function][:name]}" }
33
+ puts
34
+ puts "resources (#{bridge.resources.size}):"
35
+ bridge.resources.each { |r| puts " - #{r[:resource].uri} (#{r[:resource].name})" }
36
+ puts
37
+ puts "prompts (#{bridge.prompts.size}):"
38
+ bridge.prompts.each { |p| puts " - #{p[:namespaced_name]}: #{p[:prompt].description}" }
39
+ puts
40
+
41
+ # 2. Build a system prompt that catalogues the available resources so
42
+ # the LLM knows which URIs are valid arguments to the synthetic
43
+ # `<server>__read_resource` tool that Bridge injects into bridge.tools.
44
+ resource_catalogue = bridge.resources.map { |r|
45
+ " - #{r[:resource].uri} — #{r[:resource].name || r[:resource].description}"
46
+ }.join("\n")
47
+
48
+ system_prompt = <<~PROMPT
49
+ You can use these tools to inspect files in #{ROOT}.
50
+
51
+ In addition to the named tools, you can call `fs__read_resource(uri)` to
52
+ fetch any of these resources:
53
+ #{resource_catalogue}
54
+ PROMPT
55
+
56
+ messages = [
57
+ {role: "system", content: system_prompt},
58
+ {role: "user", content: "What's in the README?"}
59
+ ]
60
+
61
+ # 3. Run the loop. Bridge.call transparently routes both real tool calls
62
+ # and the synthetic `fs__read_resource` calls.
63
+ begin
64
+ loop do
65
+ response = groq.chat.completions.create(
66
+ model: "llama-3.3-70b-versatile",
67
+ messages: messages,
68
+ tools: bridge.tools
69
+ )
70
+
71
+ message = response.choices.first.message
72
+ tool_calls = message.tool_calls
73
+
74
+ if tool_calls.nil? || tool_calls.empty?
75
+ puts "\nassistant: #{message.content}"
76
+ break
77
+ end
78
+
79
+ messages << {role: "assistant", content: message.content, tool_calls: tool_calls}
80
+ tool_calls.each do |call|
81
+ fn = call["function"]
82
+ puts " -> #{fn["name"]}(#{fn["arguments"]})"
83
+ result = bridge.call(fn["name"], fn["arguments"])
84
+ messages << {role: "tool", tool_call_id: call["id"], content: JSON.generate(result)}
85
+ end
86
+ end
87
+ ensure
88
+ bridge.stop
89
+ end
@@ -0,0 +1,16 @@
1
+ #!/usr/bin/env ruby
2
+ # List the models available on the Groq account.
3
+ #
4
+ # Usage:
5
+ # GROQ_API_KEY=gsk_... bundle exec examples/models_list.rb
6
+
7
+ require "bundler/setup"
8
+ require "groq_ruby"
9
+
10
+ client = GroqRuby::Client.new
11
+
12
+ response = client.models.list
13
+
14
+ response.data.each do |model|
15
+ puts "%-50s owned_by=%-15s context=%s" % [model.id, model.owned_by, model.context_window]
16
+ end
@@ -0,0 +1,23 @@
1
+ #!/usr/bin/env ruby
2
+ # Synthesise speech from text (TTS) and write it to a file.
3
+ #
4
+ # Usage:
5
+ # GROQ_API_KEY=gsk_... bundle exec examples/speech.rb "Hello, Groq."
6
+
7
+ require "bundler/setup"
8
+ require "groq_ruby"
9
+
10
+ text = ARGV.first || "Hello from Groq."
11
+
12
+ client = GroqRuby::Client.new
13
+
14
+ audio_bytes = client.audio.speech.create(
15
+ input: text,
16
+ model: "playai-tts",
17
+ voice: "Aaliyah-PlayAI",
18
+ response_format: "wav"
19
+ )
20
+
21
+ out = "speech.wav"
22
+ File.binwrite(out, audio_bytes)
23
+ puts "wrote #{out} (#{audio_bytes.bytesize} bytes)"
@@ -0,0 +1,23 @@
1
+ #!/usr/bin/env ruby
2
+ # Transcribe a local audio file with Whisper.
3
+ #
4
+ # Usage:
5
+ # GROQ_API_KEY=gsk_... bundle exec examples/transcription.rb path/to/audio.mp3
6
+
7
+ require "bundler/setup"
8
+ require "groq_ruby"
9
+
10
+ audio_path = ARGV.first || abort("usage: transcription.rb <audio-file>")
11
+
12
+ client = GroqRuby::Client.new
13
+
14
+ response = File.open(audio_path, "rb") do |file|
15
+ client.audio.transcriptions.create(
16
+ file: file,
17
+ filename: File.basename(audio_path),
18
+ model: "whisper-large-v3-turbo",
19
+ response_format: "json"
20
+ )
21
+ end
22
+
23
+ puts response.text
@@ -0,0 +1,22 @@
1
+ #!/usr/bin/env ruby
2
+ # Translate non-English speech to English text.
3
+ #
4
+ # Usage:
5
+ # GROQ_API_KEY=gsk_... bundle exec examples/translation.rb path/to/audio.mp3
6
+
7
+ require "bundler/setup"
8
+ require "groq_ruby"
9
+
10
+ audio_path = ARGV.first || abort("usage: translation.rb <audio-file>")
11
+
12
+ client = GroqRuby::Client.new
13
+
14
+ response = File.open(audio_path, "rb") do |file|
15
+ client.audio.translations.create(
16
+ file: file,
17
+ filename: File.basename(audio_path),
18
+ model: "whisper-large-v3"
19
+ )
20
+ end
21
+
22
+ puts response.text
@@ -0,0 +1,69 @@
1
+ module GroqRuby
2
+ # User-facing entry point. Holds the immutable {Configuration} and a
3
+ # single {Transport} instance, and lazily constructs each resource on
4
+ # demand. Mirrors the layout of the python SDK's `Groq` class:
5
+ #
6
+ # client.chat.completions.create(...)
7
+ # client.embeddings.create(...)
8
+ # client.audio.transcriptions.create(...)
9
+ # client.models.list
10
+ # client.files.create(...)
11
+ # client.batches.create(...)
12
+ #
13
+ # @example Build from environment
14
+ # client = GroqRuby::Client.new
15
+ #
16
+ # @example Explicit configuration
17
+ # client = GroqRuby::Client.new(api_key: "gsk_…", base_url: "https://api.groq.com")
18
+ class Client
19
+ # @return [Configuration] the immutable configuration this client was built with
20
+ attr_reader :configuration
21
+
22
+ # @param api_key [String, nil] explicit key (falls back to `GROQ_API_KEY`)
23
+ # @param base_url [String, nil] explicit base URL (falls back to `GROQ_BASE_URL`)
24
+ # @param open_timeout [Numeric, nil] connect-phase timeout in seconds
25
+ # @param read_timeout [Numeric, nil] socket-read timeout in seconds
26
+ # @param user_agent [String, nil] override the default User-Agent header
27
+ # @raise [ConfigurationError] if no api_key is found
28
+ def initialize(api_key: nil, base_url: nil, open_timeout: nil, read_timeout: nil, user_agent: nil)
29
+ @configuration = Configuration.from_env(
30
+ api_key: api_key,
31
+ base_url: base_url,
32
+ open_timeout: open_timeout,
33
+ read_timeout: read_timeout,
34
+ user_agent: user_agent
35
+ )
36
+ @transport = Transport.new(@configuration)
37
+ end
38
+
39
+ # @return [Resources::Chat] entry point for `client.chat.completions.create(...)`
40
+ def chat
41
+ @chat ||= Resources::Chat.new(@transport)
42
+ end
43
+
44
+ # @return [Resources::Embeddings] entry point for `client.embeddings.create(...)`
45
+ def embeddings
46
+ @embeddings ||= Resources::Embeddings.new(@transport)
47
+ end
48
+
49
+ # @return [Resources::Audio] entry point for `.speech`, `.transcriptions`, `.translations`
50
+ def audio
51
+ @audio ||= Resources::Audio.new(@transport)
52
+ end
53
+
54
+ # @return [Resources::Models] entry point for `.list`, `.retrieve`, `.delete`
55
+ def models
56
+ @models ||= Resources::Models.new(@transport)
57
+ end
58
+
59
+ # @return [Resources::Files] entry point for `.create`, `.list`, `.info`, `.content`, `.delete`
60
+ def files
61
+ @files ||= Resources::Files.new(@transport)
62
+ end
63
+
64
+ # @return [Resources::Batches] entry point for `.create`, `.retrieve`, `.list`, `.cancel`
65
+ def batches
66
+ @batches ||= Resources::Batches.new(@transport)
67
+ end
68
+ end
69
+ end