open_router_enhanced 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. checksums.yaml +7 -0
  2. data/.env.example +1 -0
  3. data/.rspec +3 -0
  4. data/.rubocop.yml +13 -0
  5. data/.rubocop_todo.yml +130 -0
  6. data/.ruby-version +1 -0
  7. data/CHANGELOG.md +41 -0
  8. data/CODE_OF_CONDUCT.md +84 -0
  9. data/CONTRIBUTING.md +384 -0
  10. data/Gemfile +22 -0
  11. data/Gemfile.lock +138 -0
  12. data/LICENSE.txt +21 -0
  13. data/MIGRATION.md +556 -0
  14. data/README.md +1660 -0
  15. data/Rakefile +334 -0
  16. data/SECURITY.md +150 -0
  17. data/VCR_CONFIGURATION.md +80 -0
  18. data/docs/model_selection.md +637 -0
  19. data/docs/observability.md +430 -0
  20. data/docs/prompt_templates.md +422 -0
  21. data/docs/streaming.md +467 -0
  22. data/docs/structured_outputs.md +466 -0
  23. data/docs/tools.md +1016 -0
  24. data/examples/basic_completion.rb +122 -0
  25. data/examples/model_selection_example.rb +141 -0
  26. data/examples/observability_example.rb +199 -0
  27. data/examples/prompt_template_example.rb +184 -0
  28. data/examples/smart_completion_example.rb +89 -0
  29. data/examples/streaming_example.rb +176 -0
  30. data/examples/structured_outputs_example.rb +191 -0
  31. data/examples/tool_calling_example.rb +149 -0
  32. data/lib/open_router/client.rb +552 -0
  33. data/lib/open_router/http.rb +118 -0
  34. data/lib/open_router/json_healer.rb +263 -0
  35. data/lib/open_router/model_registry.rb +378 -0
  36. data/lib/open_router/model_selector.rb +462 -0
  37. data/lib/open_router/prompt_template.rb +290 -0
  38. data/lib/open_router/response.rb +371 -0
  39. data/lib/open_router/schema.rb +288 -0
  40. data/lib/open_router/streaming_client.rb +210 -0
  41. data/lib/open_router/tool.rb +221 -0
  42. data/lib/open_router/tool_call.rb +180 -0
  43. data/lib/open_router/usage_tracker.rb +277 -0
  44. data/lib/open_router/version.rb +5 -0
  45. data/lib/open_router.rb +123 -0
  46. data/sig/open_router.rbs +20 -0
  47. metadata +186 -0
@@ -0,0 +1,89 @@
1
+ #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
3
+
4
+ # Example demonstrating the Client's smart completion methods
5
+ # Run this with: ruby -I lib examples/smart_completion_example.rb
6
+
7
+ require "open_router"
8
+
9
+ # NOTE: This example shows the interface but won't make real API calls
10
+ # To test with real API calls, set your OPENROUTER_API_KEY environment variable
11
+
12
+ puts "🧠 Smart Completion Examples"
13
+ puts "=" * 40
14
+
15
+ # Create a client
16
+ client = OpenRouter::Client.new
17
+
18
+ # Example 1: Using the select_model helper
19
+ puts "\n1. Using select_model helper:"
20
+ selector = client.select_model
21
+ .optimize_for(:cost)
22
+ .require(:function_calling)
23
+ .within_budget(max_cost: 0.01)
24
+
25
+ selected_model = selector.choose
26
+ puts " Selected model: #{selected_model}"
27
+
28
+ # Example 2: Smart completion with requirements
29
+ puts "\n2. Smart completion interface:"
30
+ requirements = {
31
+ capabilities: [:function_calling],
32
+ max_input_cost: 0.01,
33
+ providers: {
34
+ prefer: %w[anthropic openai],
35
+ avoid: ["google"]
36
+ }
37
+ }
38
+
39
+ messages = [
40
+ { role: "user", content: "What is the weather like today?" }
41
+ ]
42
+
43
+ puts " Requirements: #{requirements}"
44
+ puts " Messages: #{messages}"
45
+
46
+ # NOTE: This would make a real API call if OPENROUTER_API_KEY is set
47
+ # For demo purposes, we'll show what model would be selected
48
+ selector_for_smart = OpenRouter::ModelSelector.new
49
+ .optimize_for(:cost)
50
+ .require(*requirements[:capabilities])
51
+ .within_budget(max_cost: requirements[:max_input_cost])
52
+ .prefer_providers(*requirements[:providers][:prefer])
53
+ .avoid_providers(*requirements[:providers][:avoid])
54
+
55
+ smart_model = selector_for_smart.choose
56
+ puts " Would use model: #{smart_model}"
57
+
58
+ # Example 3: Smart completion with fallback
59
+ puts "\n3. Smart completion with fallback:"
60
+ fallback_requirements = {
61
+ capabilities: %i[function_calling vision],
62
+ max_input_cost: 0.005, # Very restrictive budget
63
+ min_context_length: 100_000
64
+ }
65
+
66
+ fallback_selector = OpenRouter::ModelSelector.new
67
+ .optimize_for(:cost)
68
+ .require(*fallback_requirements[:capabilities])
69
+ .within_budget(max_cost: fallback_requirements[:max_input_cost])
70
+ .min_context(fallback_requirements[:min_context_length])
71
+
72
+ fallback_models = fallback_selector.choose_with_fallbacks(limit: 3)
73
+ puts " Fallback candidates: #{fallback_models}"
74
+
75
+ # Example 4: Demonstrating graceful degradation
76
+ puts "\n4. Graceful degradation example:"
77
+ degradation_model = fallback_selector.choose_with_fallback
78
+ puts " Graceful fallback selected: #{degradation_model}"
79
+
80
+ # Example 5: Show the actual method signatures
81
+ puts "\n5. Available Client methods:"
82
+ puts " - client.select_model() -> ModelSelector"
83
+ puts " - client.smart_complete(messages, requirements:, optimization:)"
84
+ puts " - client.smart_complete_with_fallback(messages, requirements:, max_retries:)"
85
+
86
+ puts "\n✅ Smart completion examples completed!"
87
+ puts "\n💡 To test with real API calls:"
88
+ puts " export OPENROUTER_API_KEY=your_key_here"
89
+ puts " ruby -I lib examples/smart_completion_example.rb"
@@ -0,0 +1,176 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "open_router"
4
+
5
+ # Streaming example using OpenRouter Enhanced gem
6
+ #
7
+ # This example demonstrates:
8
+ # - Streaming responses
9
+ # - Real-time token processing
10
+ # - Callback-based streaming
11
+ # - Response accumulation
12
+
13
+ # Configure the client
14
+ OpenRouter.configure do |config|
15
+ config.access_token = ENV["OPENROUTER_API_KEY"]
16
+ config.site_name = "Streaming Example"
17
+ config.site_url = "https://github.com/yourusername/open_router_enhanced"
18
+ end
19
+
20
+ # Initialize streaming client
21
+ client = OpenRouter::StreamingClient.new
22
+
23
+ puts "=" * 60
24
+ puts "Streaming Example"
25
+ puts "=" * 60
26
+
27
+ # Example 1: Basic streaming with block
28
+ puts "\n1. Basic Streaming"
29
+ puts "-" * 60
30
+ puts "Assistant: "
31
+
32
+ messages = [
33
+ { role: "user", content: "Count from 1 to 10, saying each number on a new line." }
34
+ ]
35
+
36
+ accumulated = ""
37
+ client.stream(messages, model: "openai/gpt-4o-mini") do |chunk|
38
+ content = chunk.dig("choices", 0, "delta", "content")
39
+ if content
40
+ print content
41
+ accumulated += content
42
+ end
43
+ end
44
+
45
+ puts "\n\nFull response:\n#{accumulated}"
46
+
47
+ # Example 2: Streaming with callbacks
48
+ puts "\n2. Streaming with Callbacks"
49
+ puts "-" * 60
50
+
51
+ full_response = ""
52
+
53
+ client.on(:stream_start) do
54
+ puts "Stream started..."
55
+ end
56
+
57
+ client.on(:stream_chunk) do |chunk|
58
+ content = chunk.dig("choices", 0, "delta", "content")
59
+ if content
60
+ print content
61
+ full_response += content
62
+ end
63
+ end
64
+
65
+ client.on(:stream_end) do |response|
66
+ puts "\n\nStream completed!"
67
+ puts "Total tokens: #{response.usage&.dig("total_tokens") || "N/A"}"
68
+ end
69
+
70
+ client.on(:stream_error) do |error|
71
+ puts "\nError during streaming: #{error}"
72
+ end
73
+
74
+ puts "\nAssistant: "
75
+ client.stream(
76
+ [{ role: "user", content: "Write a haiku about coding" }],
77
+ model: "anthropic/claude-3-haiku"
78
+ )
79
+
80
+ # Example 3: Streaming with tool calls
81
+ puts "\n\n3. Streaming with Tool Calls"
82
+ puts "-" * 60
83
+
84
+ tools = [
85
+ OpenRouter::Tool.define do
86
+ name "get_weather"
87
+ description "Get current weather for a location"
88
+ parameters do
89
+ string :location, required: true, description: "City name"
90
+ string :units, enum: %w[celsius fahrenheit], description: "Temperature units"
91
+ end
92
+ end
93
+ ]
94
+
95
+ puts "Requesting weather..."
96
+ client.stream(
97
+ [{ role: "user", content: "What's the weather in Tokyo?" }],
98
+ model: "anthropic/claude-3.5-sonnet",
99
+ tools: tools
100
+ ) do |chunk|
101
+ # Handle tool calls in streaming
102
+ tool_calls = chunk.dig("choices", 0, "delta", "tool_calls")
103
+ tool_calls&.each do |tc|
104
+ puts "Tool call detected: #{tc.dig("function", "name")}"
105
+ end
106
+
107
+ # Handle content
108
+ content = chunk.dig("choices", 0, "delta", "content")
109
+ print content if content
110
+ end
111
+
112
+ # Example 4: Streaming with metadata collection
113
+ puts "\n\n4. Streaming with Metadata Collection"
114
+ puts "-" * 60
115
+
116
+ metadata = {
117
+ chunks_received: 0,
118
+ total_content_length: 0,
119
+ start_time: Time.now,
120
+ finish_reason: nil
121
+ }
122
+
123
+ accumulated_response = ""
124
+
125
+ client.stream(
126
+ [{ role: "user", content: "Explain async/await in JavaScript in one paragraph" }],
127
+ model: "openai/gpt-4o-mini"
128
+ ) do |chunk|
129
+ metadata[:chunks_received] += 1
130
+
131
+ content = chunk.dig("choices", 0, "delta", "content")
132
+ if content
133
+ metadata[:total_content_length] += content.length
134
+ accumulated_response += content
135
+ print content
136
+ end
137
+
138
+ finish_reason = chunk.dig("choices", 0, "finish_reason")
139
+ metadata[:finish_reason] = finish_reason if finish_reason
140
+ end
141
+
142
+ metadata[:end_time] = Time.now
143
+ metadata[:duration] = metadata[:end_time] - metadata[:start_time]
144
+
145
+ puts "\n\nMetadata:"
146
+ puts " Chunks received: #{metadata[:chunks_received]}"
147
+ puts " Content length: #{metadata[:total_content_length]} characters"
148
+ puts " Duration: #{metadata[:duration].round(2)} seconds"
149
+ puts " Finish reason: #{metadata[:finish_reason]}"
150
+
151
+ # Example 5: Streaming long-form content
152
+ puts "\n\n5. Streaming Long-Form Content"
153
+ puts "-" * 60
154
+
155
+ puts "Generating story..."
156
+ print "\n"
157
+
158
+ client.stream(
159
+ [
160
+ { role: "system", content: "You are a creative storyteller." },
161
+ { role: "user", content: "Write a very short 2-sentence story about a robot learning to paint." }
162
+ ],
163
+ model: "anthropic/claude-3-haiku",
164
+ extras: { max_tokens: 200 }
165
+ ) do |chunk|
166
+ content = chunk.dig("choices", 0, "delta", "content")
167
+ print content if content
168
+ $stdout.flush # Ensure immediate output
169
+ end
170
+
171
+ puts "\n\n#{"=" * 60}"
172
+ puts "Streaming examples completed!"
173
+ puts "=" * 60
174
+
175
+ # Clean up callbacks
176
+ client.callbacks.clear
@@ -0,0 +1,191 @@
1
+ #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
3
+
4
+ require "bundler/setup"
5
+ require "open_router"
6
+
7
+ # Configure the client
8
+ OpenRouter.configure do |config|
9
+ config.access_token = ENV["OPENROUTER_API_KEY"]
10
+ config.site_name = "OpenRouter Ruby Gem Example"
11
+ config.site_url = "https://github.com/OlympiaAI/open_router"
12
+ end
13
+
14
+ OpenRouter::Client.new
15
+
16
+ # Example 1: Simple structured output
17
+ puts "=== Example 1: Simple Weather Schema ==="
18
+
19
+ weather_schema = OpenRouter::Schema.define("weather") do
20
+ strict true
21
+
22
+ string :location, required: true, description: "City or location name"
23
+ number :temperature, required: true, description: "Temperature in Celsius"
24
+ string :conditions, required: true, description: "Weather conditions"
25
+ string :humidity, description: "Humidity percentage"
26
+
27
+ no_additional_properties
28
+ end
29
+
30
+ puts "Schema definition:"
31
+ puts weather_schema.to_json
32
+
33
+ # Example 2: Complex nested schema
34
+ puts "\n=== Example 2: Complex User Profile Schema ==="
35
+
36
+ user_schema = OpenRouter::Schema.define("user_profile") do
37
+ string :name, required: true, description: "Full name"
38
+ integer :age, required: true, minimum: 0, maximum: 150
39
+ string :email, required: true, description: "Email address"
40
+
41
+ object :address, required: true do
42
+ string :street, required: true
43
+ string :city, required: true
44
+ string :state, required: true
45
+ string :zip_code, required: true
46
+ end
47
+
48
+ array :hobbies do
49
+ string description: "A hobby or interest"
50
+ end
51
+
52
+ object :preferences do
53
+ boolean :newsletter, description: "Wants to receive newsletter"
54
+ string :theme, description: "UI theme preference"
55
+ end
56
+ end
57
+
58
+ puts "User schema:"
59
+ puts JSON.pretty_generate(user_schema.to_h)
60
+
61
+ # Example 3: Using schemas with API calls
62
+ puts "\n=== Example 3: Structured Output API Call ==="
63
+
64
+ # Simulate a structured response
65
+ mock_weather_response = {
66
+ "id" => "chatcmpl-123",
67
+ "choices" => [
68
+ {
69
+ "message" => {
70
+ "role" => "assistant",
71
+ "content" => '{"location": "San Francisco", "temperature": 22, "conditions": "Partly cloudy", "humidity": "65%"}'
72
+ }
73
+ }
74
+ ]
75
+ }
76
+
77
+ response = OpenRouter::Response.new(mock_weather_response, response_format: weather_schema)
78
+ puts "Parsed structured output:"
79
+ puts response.structured_output.inspect
80
+
81
+ # Check if output is valid
82
+ if response.valid_structured_output?
83
+ puts "✅ Output is valid according to schema"
84
+ else
85
+ puts "❌ Output validation failed:"
86
+ puts response.validation_errors
87
+ end
88
+
89
+ # Example 4: Working with different response formats
90
+ puts "\n=== Example 4: Different Response Format Styles ==="
91
+
92
+ # Style 1: Schema object directly
93
+ format1 = weather_schema
94
+
95
+ # Style 2: Hash with schema object
96
+ format2 = {
97
+ type: "json_schema",
98
+ json_schema: weather_schema
99
+ }
100
+
101
+ # Style 3: Raw hash format
102
+ format3 = {
103
+ type: "json_schema",
104
+ json_schema: {
105
+ name: "simple_weather",
106
+ strict: true,
107
+ schema: {
108
+ type: "object",
109
+ properties: {
110
+ temp: { type: "number" },
111
+ desc: { type: "string" }
112
+ },
113
+ required: %w[temp desc],
114
+ additionalProperties: false
115
+ }
116
+ }
117
+ }
118
+
119
+ puts "All three formats are supported:"
120
+ puts "1. Direct schema object: #{format1.class}"
121
+ puts "2. Hash with schema object: #{format2[:json_schema].class}"
122
+ puts "3. Raw hash format: #{format3[:json_schema].class}"
123
+
124
+ # Example 5: Real API call example (commented out)
125
+ puts "\n=== Example 5: Real API Usage ==="
126
+
127
+ # # Uncomment to make a real API call
128
+ # begin
129
+ # response = client.complete(
130
+ # [{ role: "user", content: "What's the weather like in Tokyo right now?" }],
131
+ # model: "openai/gpt-4o",
132
+ # response_format: weather_schema
133
+ # )
134
+ #
135
+ # if response.structured_output
136
+ # weather = response.structured_output
137
+ # puts "Location: #{weather['location']}"
138
+ # puts "Temperature: #{weather['temperature']}°C"
139
+ # puts "Conditions: #{weather['conditions']}"
140
+ # puts "Humidity: #{weather['humidity']}" if weather['humidity']
141
+ #
142
+ # if response.valid_structured_output?
143
+ # puts "✅ Response validates against schema"
144
+ # else
145
+ # puts "❌ Validation errors:"
146
+ # response.validation_errors.each { |error| puts " - #{error}" }
147
+ # end
148
+ # end
149
+ #
150
+ # rescue OpenRouter::ServerError => e
151
+ # puts "API Error: #{e.message}"
152
+ # rescue OpenRouter::StructuredOutputError => e
153
+ # puts "Structured Output Error: #{e.message}"
154
+ # rescue => e
155
+ # puts "Unexpected error: #{e.message}"
156
+ # end
157
+
158
+ puts "\n(Structured outputs example complete - uncomment the API call section to test with real API)"
159
+
160
+ # Example 6: Schema validation demonstration
161
+ puts "\n=== Example 6: Schema Validation Demo ==="
162
+
163
+ if weather_schema.validation_available?
164
+ puts "JSON Schema validation is available"
165
+
166
+ # Valid data
167
+ valid_data = {
168
+ "location" => "London",
169
+ "temperature" => 18,
170
+ "conditions" => "Rainy"
171
+ }
172
+
173
+ # Invalid data
174
+ invalid_data = {
175
+ "location" => "London",
176
+ "temperature" => "eighteen", # Should be number
177
+ "conditions" => "Rainy"
178
+ }
179
+
180
+ puts "Valid data validation: #{weather_schema.validate(valid_data)}"
181
+ puts "Invalid data validation: #{weather_schema.validate(invalid_data)}"
182
+
183
+ unless weather_schema.validate(invalid_data)
184
+ puts "Validation errors for invalid data:"
185
+ weather_schema.validation_errors(invalid_data).each do |error|
186
+ puts " - #{error}"
187
+ end
188
+ end
189
+ else
190
+ puts "JSON Schema validation not available (install json-schema gem for validation)"
191
+ end
@@ -0,0 +1,149 @@
1
+ #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
3
+
4
+ require "bundler/setup"
5
+ require "open_router"
6
+
7
+ # Configure the client
8
+ OpenRouter.configure do |config|
9
+ config.access_token = ENV["OPENROUTER_API_KEY"]
10
+ config.site_name = "OpenRouter Ruby Gem Example"
11
+ config.site_url = "https://github.com/OlympiaAI/open_router"
12
+ end
13
+
14
+ OpenRouter::Client.new
15
+
16
+ # Example 1: Define a tool using the DSL
17
+ puts "=== Example 1: Tool Definition with DSL ==="
18
+
19
+ search_tool = OpenRouter::Tool.define do
20
+ name "search_gutenberg_books"
21
+ description "Search for books in the Project Gutenberg library"
22
+
23
+ parameters do
24
+ array :search_terms, required: true do
25
+ string description: "Search term for finding books"
26
+ end
27
+ integer :max_results, description: "Maximum number of results to return"
28
+ end
29
+ end
30
+
31
+ puts "Tool definition:"
32
+ puts search_tool.to_json
33
+
34
+ # Example 2: Define a tool using hash format
35
+ puts "\n=== Example 2: Tool Definition with Hash ==="
36
+
37
+ weather_tool = OpenRouter::Tool.new({
38
+ name: "get_weather",
39
+ description: "Get current weather for a location",
40
+ parameters: {
41
+ type: "object",
42
+ properties: {
43
+ location: {
44
+ type: "string",
45
+ description: "City and state, e.g. San Francisco, CA"
46
+ },
47
+ unit: {
48
+ type: "string",
49
+ enum: %w[celsius fahrenheit],
50
+ description: "Temperature unit"
51
+ }
52
+ },
53
+ required: ["location"]
54
+ }
55
+ })
56
+
57
+ puts "Tool definition:"
58
+ puts weather_tool.to_json
59
+
60
+ # Example 3: Tool calling conversation
61
+ puts "\n=== Example 3: Tool Calling Conversation ==="
62
+
63
+ def simulate_search(_search_terms, max_results = 10)
64
+ # Simulate a search function
65
+ results = [
66
+ { title: "Programming Ruby", author: "Dave Thomas", year: 2004 },
67
+ { title: "The Ruby Programming Language", author: "David Flanagan", year: 2008 }
68
+ ]
69
+
70
+ results.first(max_results).to_json
71
+ end
72
+
73
+ def simulate_weather(location, unit = "fahrenheit")
74
+ # Simulate a weather API call
75
+ {
76
+ location:,
77
+ temperature: unit == "celsius" ? 22 : 72,
78
+ conditions: "Sunny",
79
+ unit:
80
+ }.to_json
81
+ end
82
+
83
+ # Initial message
84
+ messages = [
85
+ { role: "user", content: "Can you search for Ruby programming books and also tell me the weather in San Francisco?" }
86
+ ]
87
+
88
+ puts "User: #{messages.first[:content]}"
89
+
90
+ # Uncomment the following lines to make a real API call:
91
+ # begin
92
+ # # Make the tool call request
93
+ # response = client.complete(
94
+ # messages,
95
+ # model: "anthropic/claude-3.5-sonnet",
96
+ # tools: [search_tool, weather_tool],
97
+ # tool_choice: "auto"
98
+ # )
99
+ #
100
+ # puts "\nAssistant response:"
101
+ # puts response.content if response.has_content?
102
+ #
103
+ # # Handle tool calls
104
+ # if response.has_tool_calls?
105
+ # puts "\nTool calls requested:"
106
+ #
107
+ # # Add the assistant's message to conversation
108
+ # messages << response.to_message
109
+ #
110
+ # # Execute each tool call
111
+ # response.tool_calls.each do |tool_call|
112
+ # puts "- #{tool_call.name} with arguments: #{tool_call.arguments}"
113
+ #
114
+ # # Execute the tool based on its name
115
+ # result = case tool_call.name
116
+ # when "search_gutenberg_books"
117
+ # args = tool_call.arguments
118
+ # simulate_search(args["search_terms"], args["max_results"])
119
+ # when "get_weather"
120
+ # args = tool_call.arguments
121
+ # simulate_weather(args["location"], args["unit"])
122
+ # else
123
+ # "Unknown tool: #{tool_call.name}"
124
+ # end
125
+ #
126
+ # puts " Result: #{result}"
127
+ #
128
+ # # Add tool result to conversation
129
+ # messages << tool_call.to_result_message(result)
130
+ # end
131
+ #
132
+ # # Get final response with tool results
133
+ # final_response = client.complete(
134
+ # messages,
135
+ # model: "anthropic/claude-3.5-sonnet",
136
+ # tools: [search_tool, weather_tool]
137
+ # )
138
+ #
139
+ # puts "\nFinal assistant response:"
140
+ # puts final_response.content
141
+ # end
142
+ #
143
+ # rescue OpenRouter::ServerError => e
144
+ # puts "Error: #{e.message}"
145
+ # rescue => e
146
+ # puts "Unexpected error: #{e.message}"
147
+ # end
148
+
149
+ puts "\n(Tool calling example complete - uncomment the API call section to test with real API)"