sentry-agents 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +26 -0
- data/LICENSE +21 -0
- data/README.md +208 -0
- data/lib/sentry/agents/configuration.rb +48 -0
- data/lib/sentry/agents/instrumentation.rb +234 -0
- data/lib/sentry/agents/serializer.rb +70 -0
- data/lib/sentry/agents/span_builder.rb +125 -0
- data/lib/sentry/agents/version.rb +7 -0
- data/lib/sentry/agents.rb +75 -0
- data/lib/sentry-agents.rb +3 -0
- metadata +127 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA256:
|
|
3
|
+
metadata.gz: 7fa4b4fcf3ca85a7404b987df6ee161819a05375a4ec0045ad02fc5e221ca394
|
|
4
|
+
data.tar.gz: 84a5ebb7522b169f6e0018df6095c8868fe4305c0e3d1ac91b706995bd63b9c1
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: c4ee05c5e3cac969d620e9379b25ed4b6b657dbfefb7b6263374974c60ef46ee032974a1f8ba2c34b5d86d765222e8df534e1d83c5126f5fd63d84b3cf27c36d
|
|
7
|
+
data.tar.gz: ffc74b1b22eee48374460672570cdb945ee54cb74acca1473126412a4627dc83daf636d23b2af5444dc722a0fa9d6fb09f1ddfa703a6ef785995e68bec6a2048
|
data/CHANGELOG.md
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
# Changelog
|
|
2
|
+
|
|
3
|
+
All notable changes to this project will be documented in this file.
|
|
4
|
+
|
|
5
|
+
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
|
6
|
+
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
7
|
+
|
|
8
|
+
## [0.1.0] - 2025-12-15
|
|
9
|
+
|
|
10
|
+
### Added
|
|
11
|
+
|
|
12
|
+
- Initial release
|
|
13
|
+
- Core `Sentry::Agents::Instrumentation` module with span helpers:
|
|
14
|
+
- `with_agent_span` - Wrap agent invocations
|
|
15
|
+
- `with_chat_span` - Wrap LLM chat API calls
|
|
16
|
+
- `with_tool_span` - Wrap tool/function executions
|
|
17
|
+
- `with_handoff_span` - Track stage transitions
|
|
18
|
+
- Configuration system with:
|
|
19
|
+
- Configurable default LLM system/provider
|
|
20
|
+
- Max string length for serialization
|
|
21
|
+
- Custom data filtering hooks
|
|
22
|
+
- Debug mode
|
|
23
|
+
- `SpanBuilder` helper class for consistent span creation
|
|
24
|
+
- `Serializer` utility for data serialization and truncation
|
|
25
|
+
- Graceful degradation when Sentry is not available
|
|
26
|
+
- Full backward compatibility with `SwiftTail::AiAgentInstrumentation`
|
data/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 SwiftTail
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
data/README.md
ADDED
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
# Sentry Agents
|
|
2
|
+
|
|
3
|
+
Sentry Gen AI instrumentation for AI/LLM agents in Ruby applications.
|
|
4
|
+
|
|
5
|
+
Provides [Sentry's AI Agents monitoring](https://docs.sentry.io/platforms/python/tracing/instrumentation/custom-instrumentation/ai-agents-module/) capabilities for Ruby, supporting multiple LLM providers (Anthropic, OpenAI, Cohere, Google Gemini, etc.).
|
|
6
|
+
|
|
7
|
+
## Installation
|
|
8
|
+
|
|
9
|
+
Add this line to your application's Gemfile:
|
|
10
|
+
|
|
11
|
+
```ruby
|
|
12
|
+
gem 'sentry-agents'
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
And then execute:
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
bundle install
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
Or install it yourself as:
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
gem install sentry-agents
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
## Requirements
|
|
28
|
+
|
|
29
|
+
- Ruby >= 3.1.0
|
|
30
|
+
- sentry-ruby >= 5.0.0
|
|
31
|
+
|
|
32
|
+
## Configuration
|
|
33
|
+
|
|
34
|
+
```ruby
|
|
35
|
+
Sentry::Agents.configure do |config|
|
|
36
|
+
# Default LLM provider (default: "anthropic")
|
|
37
|
+
config.default_system = "anthropic"
|
|
38
|
+
|
|
39
|
+
# Maximum string length for serialized data (default: 1000)
|
|
40
|
+
config.max_string_length = 1000
|
|
41
|
+
|
|
42
|
+
# Enable debug logging (default: false)
|
|
43
|
+
config.debug = false
|
|
44
|
+
|
|
45
|
+
# Custom data filtering (optional)
|
|
46
|
+
config.data_filter = ->(data) do
|
|
47
|
+
# Remove sensitive keys in production
|
|
48
|
+
data.delete("gen_ai.request.messages") if ENV["SENTRY_SKIP_MESSAGES"]
|
|
49
|
+
data
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
## Usage
|
|
55
|
+
|
|
56
|
+
### Manual Instrumentation
|
|
57
|
+
|
|
58
|
+
Include the `Sentry::Agents::Instrumentation` module in any class:
|
|
59
|
+
|
|
60
|
+
```ruby
|
|
61
|
+
class MyAgent
|
|
62
|
+
include Sentry::Agents::Instrumentation
|
|
63
|
+
|
|
64
|
+
def process_request(user_message)
|
|
65
|
+
with_agent_span(agent_name: "MyAgent", model: "claude-3-5-sonnet") do
|
|
66
|
+
# Get LLM response
|
|
67
|
+
response = with_chat_span(model: "claude-3-5-sonnet") do
|
|
68
|
+
client.messages.create(
|
|
69
|
+
model: "claude-3-5-sonnet-20241022",
|
|
70
|
+
messages: [{ role: "user", content: user_message }]
|
|
71
|
+
)
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
# Execute tool if needed
|
|
75
|
+
if response.stop_reason == "tool_use"
|
|
76
|
+
with_tool_span(
|
|
77
|
+
tool_name: "search",
|
|
78
|
+
tool_input: { query: response.tool_input["query"] }
|
|
79
|
+
) do
|
|
80
|
+
search_service.search(response.tool_input["query"])
|
|
81
|
+
end
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
# Track stage transition
|
|
85
|
+
with_handoff_span(from_stage: "processing", to_stage: "complete") do
|
|
86
|
+
update_status!(:complete)
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
response
|
|
90
|
+
end
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
### Custom Provider Override
|
|
96
|
+
|
|
97
|
+
Override the default provider on a per-span basis:
|
|
98
|
+
|
|
99
|
+
```ruby
|
|
100
|
+
class OpenAIAgent
|
|
101
|
+
include Sentry::Agents::Instrumentation
|
|
102
|
+
|
|
103
|
+
def process(message)
|
|
104
|
+
with_chat_span(model: "gpt-4", system: "openai") do
|
|
105
|
+
openai_client.chat(model: "gpt-4", messages: [message])
|
|
106
|
+
end
|
|
107
|
+
end
|
|
108
|
+
end
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
## Span Types
|
|
112
|
+
|
|
113
|
+
### Agent Invocation (`gen_ai.invoke_agent`)
|
|
114
|
+
|
|
115
|
+
Wraps the overall agent execution lifecycle.
|
|
116
|
+
|
|
117
|
+
```ruby
|
|
118
|
+
with_agent_span(agent_name: "Emily", model: "claude-3-5-sonnet") do
|
|
119
|
+
# Full agent conversation logic
|
|
120
|
+
end
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
### Chat Completion (`gen_ai.chat`)
|
|
124
|
+
|
|
125
|
+
Wraps individual LLM API calls. Automatically captures:
|
|
126
|
+
- Token usage (input/output tokens)
|
|
127
|
+
- Response text
|
|
128
|
+
|
|
129
|
+
```ruby
|
|
130
|
+
with_chat_span(model: "claude-3-5-sonnet", messages: conversation_history) do
|
|
131
|
+
llm_client.chat(messages)
|
|
132
|
+
end
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
### Tool Execution (`gen_ai.execute_tool`)
|
|
136
|
+
|
|
137
|
+
Wraps tool/function executions. Captures:
|
|
138
|
+
- Tool name
|
|
139
|
+
- Tool input
|
|
140
|
+
- Tool output
|
|
141
|
+
|
|
142
|
+
```ruby
|
|
143
|
+
with_tool_span(tool_name: "weather_lookup", tool_input: { city: "NYC" }) do
|
|
144
|
+
weather_api.get_forecast("NYC")
|
|
145
|
+
end
|
|
146
|
+
```
|
|
147
|
+
|
|
148
|
+
### Handoff (`gen_ai.handoff`)
|
|
149
|
+
|
|
150
|
+
Tracks stage transitions or agent handoffs.
|
|
151
|
+
|
|
152
|
+
```ruby
|
|
153
|
+
with_handoff_span(from_stage: "greeting", to_stage: "qualification") do
|
|
154
|
+
update_conversation_stage!
|
|
155
|
+
end
|
|
156
|
+
```
|
|
157
|
+
|
|
158
|
+
## Graceful Degradation
|
|
159
|
+
|
|
160
|
+
All instrumentation methods gracefully degrade when Sentry is not available or tracing is disabled. Your code will continue to work normally without any errors.
|
|
161
|
+
|
|
162
|
+
```ruby
|
|
163
|
+
# Works fine even without Sentry initialized
|
|
164
|
+
with_chat_span(model: "claude-3-5-sonnet") do
|
|
165
|
+
llm_client.chat(messages) # Still executes, just without tracing
|
|
166
|
+
end
|
|
167
|
+
```
|
|
168
|
+
|
|
169
|
+
## Development
|
|
170
|
+
|
|
171
|
+
After checking out the repo, run:
|
|
172
|
+
|
|
173
|
+
```bash
|
|
174
|
+
bundle install
|
|
175
|
+
rake test # Run tests
|
|
176
|
+
rake rubocop # Run linter
|
|
177
|
+
rake # Run both
|
|
178
|
+
```
|
|
179
|
+
|
|
180
|
+
## Releasing
|
|
181
|
+
|
|
182
|
+
Releases are automated via GitHub Actions. To publish a new version:
|
|
183
|
+
|
|
184
|
+
1. Update the version in `lib/sentry/agents/version.rb`
|
|
185
|
+
2. Update `CHANGELOG.md` with the new version's changes
|
|
186
|
+
3. Commit the changes:
|
|
187
|
+
```bash
|
|
188
|
+
git add -A && git commit -m "Bump version to X.Y.Z"
|
|
189
|
+
```
|
|
190
|
+
4. Create and push a version tag:
|
|
191
|
+
```bash
|
|
192
|
+
git tag vX.Y.Z
|
|
193
|
+
git push origin main --tags
|
|
194
|
+
```
|
|
195
|
+
|
|
196
|
+
The release workflow will automatically:
|
|
197
|
+
- Run the test suite
|
|
198
|
+
- Build the gem
|
|
199
|
+
- Publish to RubyGems
|
|
200
|
+
- Create a GitHub Release with auto-generated release notes
|
|
201
|
+
|
|
202
|
+
## Contributing
|
|
203
|
+
|
|
204
|
+
Bug reports and pull requests are welcome on GitHub at https://github.com/sentry-agents/sentry-agents-ruby.
|
|
205
|
+
|
|
206
|
+
## License
|
|
207
|
+
|
|
208
|
+
The gem is available as open source under the terms of the [MIT License](LICENSE).
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Sentry
|
|
4
|
+
module Agents
|
|
5
|
+
# Configuration class for sentry-agents gem
|
|
6
|
+
#
|
|
7
|
+
# @example Basic configuration
|
|
8
|
+
# Sentry::Agents.configure do |config|
|
|
9
|
+
# config.default_system = "anthropic"
|
|
10
|
+
# config.max_string_length = 2000
|
|
11
|
+
# end
|
|
12
|
+
#
|
|
13
|
+
class Configuration
|
|
14
|
+
# Default LLM provider system name (e.g., "anthropic", "openai", "cohere")
|
|
15
|
+
# @return [String]
|
|
16
|
+
attr_accessor :default_system
|
|
17
|
+
|
|
18
|
+
# Enable auto-instrumentation for RubyLLM gem
|
|
19
|
+
# @return [Boolean]
|
|
20
|
+
attr_accessor :auto_instrument_ruby_llm
|
|
21
|
+
|
|
22
|
+
# Enable auto-instrumentation for LangChain.rb gem
|
|
23
|
+
# @return [Boolean]
|
|
24
|
+
attr_accessor :auto_instrument_langchainrb
|
|
25
|
+
|
|
26
|
+
# Maximum length for serialized strings in span attributes
|
|
27
|
+
# @return [Integer]
|
|
28
|
+
attr_accessor :max_string_length
|
|
29
|
+
|
|
30
|
+
# Enable debug logging
|
|
31
|
+
# @return [Boolean]
|
|
32
|
+
attr_accessor :debug
|
|
33
|
+
|
|
34
|
+
# Custom data filter hook for sanitizing span data
|
|
35
|
+
# @return [Proc, nil]
|
|
36
|
+
attr_accessor :data_filter
|
|
37
|
+
|
|
38
|
+
def initialize
|
|
39
|
+
@default_system = "anthropic"
|
|
40
|
+
@auto_instrument_ruby_llm = false
|
|
41
|
+
@auto_instrument_langchainrb = false
|
|
42
|
+
@max_string_length = 1000
|
|
43
|
+
@debug = false
|
|
44
|
+
@data_filter = nil
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
end
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Sentry
|
|
4
|
+
module Agents
|
|
5
|
+
# Core instrumentation module that provides span helper methods
|
|
6
|
+
#
|
|
7
|
+
# Include this module in any class that needs to create Sentry Gen AI spans.
|
|
8
|
+
# All methods are designed to gracefully degrade when Sentry is not available.
|
|
9
|
+
#
|
|
10
|
+
# @example Basic usage
|
|
11
|
+
# class MyAgent
|
|
12
|
+
# include Sentry::Agents::Instrumentation
|
|
13
|
+
#
|
|
14
|
+
# def process(message)
|
|
15
|
+
# with_agent_span(agent_name: "MyAgent", model: "claude-3-5-sonnet") do
|
|
16
|
+
# with_chat_span(model: "claude-3-5-sonnet") do
|
|
17
|
+
# llm_client.chat(message)
|
|
18
|
+
# end
|
|
19
|
+
# end
|
|
20
|
+
# end
|
|
21
|
+
# end
|
|
22
|
+
#
|
|
23
|
+
module Instrumentation
|
|
24
|
+
# Wrap an agent invocation (e.g., full conversation lifecycle)
|
|
25
|
+
#
|
|
26
|
+
# Creates a gen_ai.invoke_agent span that captures the overall agent execution.
|
|
27
|
+
# Token usage is automatically captured if the block result responds to
|
|
28
|
+
# :input_tokens and :output_tokens.
|
|
29
|
+
#
|
|
30
|
+
# @param agent_name [String] name of the agent (e.g., "Emily", "CustomerService")
|
|
31
|
+
# @param model [String] LLM model identifier (e.g., "claude-3-5-sonnet")
|
|
32
|
+
# @param system [String, nil] override default LLM provider system name
|
|
33
|
+
# @yield the agent logic
|
|
34
|
+
# @return [Object] the block result
|
|
35
|
+
#
|
|
36
|
+
# @example
|
|
37
|
+
# with_agent_span(agent_name: "Emily", model: "claude-3-5-sonnet") do
|
|
38
|
+
# process_conversation
|
|
39
|
+
# end
|
|
40
|
+
#
|
|
41
|
+
def with_agent_span(agent_name:, model:, system: nil)
|
|
42
|
+
return yield unless sentry_tracing_available?
|
|
43
|
+
|
|
44
|
+
SpanBuilder.build(
|
|
45
|
+
operation: :invoke_agent,
|
|
46
|
+
description: "invoke_agent #{agent_name}",
|
|
47
|
+
attributes: {
|
|
48
|
+
"gen_ai.operation.name" => "invoke_agent",
|
|
49
|
+
"gen_ai.system" => system_name(system),
|
|
50
|
+
"gen_ai.request.model" => model,
|
|
51
|
+
"gen_ai.agent.name" => agent_name
|
|
52
|
+
}
|
|
53
|
+
) do |span|
|
|
54
|
+
result = yield
|
|
55
|
+
capture_token_usage(span, result)
|
|
56
|
+
result
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
# Wrap an LLM chat API call
|
|
61
|
+
#
|
|
62
|
+
# Creates a gen_ai.chat span that captures a single LLM API call.
|
|
63
|
+
# Automatically captures token usage and response text if available.
|
|
64
|
+
#
|
|
65
|
+
# @param model [String] LLM model identifier
|
|
66
|
+
# @param messages [Array<Hash>, nil] optional message array for the request
|
|
67
|
+
# @param system [String, nil] override default LLM provider system name
|
|
68
|
+
# @yield the LLM call
|
|
69
|
+
# @return [Object] the block result (should respond to :input_tokens, :output_tokens, :content)
|
|
70
|
+
#
|
|
71
|
+
# @example
|
|
72
|
+
# with_chat_span(model: "claude-3-5-sonnet", messages: conversation_history) do
|
|
73
|
+
# llm_client.chat(messages)
|
|
74
|
+
# end
|
|
75
|
+
#
|
|
76
|
+
def with_chat_span(model:, messages: nil, system: nil)
|
|
77
|
+
return yield unless sentry_tracing_available?
|
|
78
|
+
|
|
79
|
+
attributes = {
|
|
80
|
+
"gen_ai.operation.name" => "chat",
|
|
81
|
+
"gen_ai.system" => system_name(system),
|
|
82
|
+
"gen_ai.request.model" => model
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
attributes["gen_ai.request.messages"] = Serializer.serialize(messages) if messages
|
|
86
|
+
|
|
87
|
+
SpanBuilder.build(
|
|
88
|
+
operation: :chat,
|
|
89
|
+
description: "chat #{model}",
|
|
90
|
+
attributes: attributes
|
|
91
|
+
) do |span|
|
|
92
|
+
result = yield
|
|
93
|
+
capture_token_usage(span, result)
|
|
94
|
+
capture_response_text(span, result)
|
|
95
|
+
result
|
|
96
|
+
end
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
# Wrap a tool/function execution
|
|
100
|
+
#
|
|
101
|
+
# Creates a gen_ai.execute_tool span that captures tool execution.
|
|
102
|
+
# The tool output is automatically captured from the block result.
|
|
103
|
+
#
|
|
104
|
+
# @param tool_name [String] name of the tool being executed
|
|
105
|
+
# @param tool_input [Hash, String, nil] tool input parameters
|
|
106
|
+
# @param system [String, nil] override default LLM provider system name
|
|
107
|
+
# @yield the tool execution
|
|
108
|
+
# @return [Object] the block result
|
|
109
|
+
#
|
|
110
|
+
# @example
|
|
111
|
+
# with_tool_span(tool_name: "search", tool_input: { query: "flights" }) do
|
|
112
|
+
# search_service.search("flights")
|
|
113
|
+
# end
|
|
114
|
+
#
|
|
115
|
+
def with_tool_span(tool_name:, tool_input: nil, system: nil)
|
|
116
|
+
return yield unless sentry_tracing_available?
|
|
117
|
+
|
|
118
|
+
attributes = {
|
|
119
|
+
"gen_ai.operation.name" => "execute_tool",
|
|
120
|
+
"gen_ai.system" => system_name(system),
|
|
121
|
+
"gen_ai.tool.name" => tool_name
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
attributes["gen_ai.tool.input"] = Serializer.serialize(tool_input) if tool_input
|
|
125
|
+
|
|
126
|
+
SpanBuilder.build(
|
|
127
|
+
operation: :execute_tool,
|
|
128
|
+
description: "execute_tool #{tool_name}",
|
|
129
|
+
attributes: attributes
|
|
130
|
+
) do |span|
|
|
131
|
+
result = yield
|
|
132
|
+
capture_tool_output(span, result)
|
|
133
|
+
result
|
|
134
|
+
end
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
# Track agent stage transitions or handoffs
|
|
138
|
+
#
|
|
139
|
+
# Creates a gen_ai.handoff span that captures transitions between
|
|
140
|
+
# stages or handoffs between agents.
|
|
141
|
+
#
|
|
142
|
+
# @param from_stage [String] source stage/agent
|
|
143
|
+
# @param to_stage [String] destination stage/agent
|
|
144
|
+
# @param system [String, nil] override default LLM provider system name
|
|
145
|
+
# @yield the transition logic
|
|
146
|
+
# @return [Object] the block result
|
|
147
|
+
#
|
|
148
|
+
# @example
|
|
149
|
+
# with_handoff_span(from_stage: "greeting", to_stage: "qualification") do
|
|
150
|
+
# update_conversation_stage!
|
|
151
|
+
# end
|
|
152
|
+
#
|
|
153
|
+
def with_handoff_span(from_stage:, to_stage:, system: nil)
|
|
154
|
+
return yield unless sentry_tracing_available?
|
|
155
|
+
|
|
156
|
+
SpanBuilder.build(
|
|
157
|
+
operation: :handoff,
|
|
158
|
+
description: "handoff from #{from_stage} to #{to_stage}",
|
|
159
|
+
attributes: {
|
|
160
|
+
"gen_ai.operation.name" => "handoff",
|
|
161
|
+
"gen_ai.system" => system_name(system),
|
|
162
|
+
"gen_ai.handoff.from" => from_stage,
|
|
163
|
+
"gen_ai.handoff.to" => to_stage
|
|
164
|
+
}
|
|
165
|
+
) do |_span|
|
|
166
|
+
yield
|
|
167
|
+
end
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
private
|
|
171
|
+
|
|
172
|
+
# Check if Sentry tracing is available
|
|
173
|
+
#
|
|
174
|
+
# @return [Boolean]
|
|
175
|
+
#
|
|
176
|
+
def sentry_tracing_available?
|
|
177
|
+
SpanBuilder.sentry_available?
|
|
178
|
+
end
|
|
179
|
+
|
|
180
|
+
# Get system name (provider) from config or override
|
|
181
|
+
#
|
|
182
|
+
# @param override [String, nil]
|
|
183
|
+
# @return [String]
|
|
184
|
+
#
|
|
185
|
+
def system_name(override = nil)
|
|
186
|
+
override || Sentry::Agents.configuration.default_system
|
|
187
|
+
end
|
|
188
|
+
|
|
189
|
+
# Capture token usage from result if available
|
|
190
|
+
# Optimized to minimize method calls in hot paths
|
|
191
|
+
#
|
|
192
|
+
# @param span [Sentry::Span, nil]
|
|
193
|
+
# @param result [Object]
|
|
194
|
+
# @return [void]
|
|
195
|
+
#
|
|
196
|
+
def capture_token_usage(span, result)
|
|
197
|
+
return unless span && result
|
|
198
|
+
|
|
199
|
+
# Cache values to avoid repeated method lookups
|
|
200
|
+
input_tokens = result.input_tokens if result.respond_to?(:input_tokens)
|
|
201
|
+
output_tokens = result.output_tokens if result.respond_to?(:output_tokens)
|
|
202
|
+
|
|
203
|
+
span.set_data("gen_ai.usage.input_tokens", input_tokens) if input_tokens
|
|
204
|
+
span.set_data("gen_ai.usage.output_tokens", output_tokens) if output_tokens
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
# Capture response text from result if available
|
|
208
|
+
#
|
|
209
|
+
# @param span [Sentry::Span, nil]
|
|
210
|
+
# @param result [Object]
|
|
211
|
+
# @return [void]
|
|
212
|
+
#
|
|
213
|
+
def capture_response_text(span, result)
|
|
214
|
+
return unless span && result
|
|
215
|
+
return unless result.respond_to?(:content) && result.content
|
|
216
|
+
|
|
217
|
+
# Sentry expects response.text as JSON array
|
|
218
|
+
span.set_data("gen_ai.response.text", [result.content].to_json)
|
|
219
|
+
end
|
|
220
|
+
|
|
221
|
+
# Capture tool output in span
|
|
222
|
+
#
|
|
223
|
+
# @param span [Sentry::Span, nil]
|
|
224
|
+
# @param result [Object]
|
|
225
|
+
# @return [void]
|
|
226
|
+
#
|
|
227
|
+
def capture_tool_output(span, result)
|
|
228
|
+
return unless span && result
|
|
229
|
+
|
|
230
|
+
span.set_data("gen_ai.tool.output", Serializer.serialize(result))
|
|
231
|
+
end
|
|
232
|
+
end
|
|
233
|
+
end
|
|
234
|
+
end
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
|
|
5
|
+
module Sentry
|
|
6
|
+
module Agents
|
|
7
|
+
# Handles data serialization for span attributes
|
|
8
|
+
#
|
|
9
|
+
# Provides utilities for converting various data types to strings
|
|
10
|
+
# suitable for Sentry span attributes, with truncation and filtering.
|
|
11
|
+
#
|
|
12
|
+
class Serializer
|
|
13
|
+
class << self
|
|
14
|
+
# Serialize a value for use in span attributes
|
|
15
|
+
#
|
|
16
|
+
# @param value [Object] the value to serialize
|
|
17
|
+
# @param max_length [Integer, nil] maximum length for the result
|
|
18
|
+
# @return [String, nil] the serialized value
|
|
19
|
+
#
|
|
20
|
+
# @example
|
|
21
|
+
# Serializer.serialize({ key: "value" })
|
|
22
|
+
# # => '{"key":"value"}'
|
|
23
|
+
#
|
|
24
|
+
# Serializer.serialize("a" * 2000, max_length: 100)
|
|
25
|
+
# # => "aaa...aaa..." (truncated to 100 chars)
|
|
26
|
+
#
|
|
27
|
+
def serialize(value, max_length: nil)
|
|
28
|
+
max_length ||= Sentry::Agents.configuration.max_string_length
|
|
29
|
+
|
|
30
|
+
result = case value
|
|
31
|
+
when String
|
|
32
|
+
value
|
|
33
|
+
when Hash, Array
|
|
34
|
+
value.to_json
|
|
35
|
+
when NilClass
|
|
36
|
+
return nil
|
|
37
|
+
else
|
|
38
|
+
value.to_s
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
truncate(result, max_length)
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
# Truncate a string to the specified maximum length
|
|
45
|
+
#
|
|
46
|
+
# @param str [String] the string to truncate
|
|
47
|
+
# @param max_length [Integer] maximum length
|
|
48
|
+
# @return [String] the truncated string
|
|
49
|
+
#
|
|
50
|
+
def truncate(str, max_length)
|
|
51
|
+
return str if str.nil? || str.length <= max_length
|
|
52
|
+
|
|
53
|
+
"#{str[0...max_length]}..."
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
# Apply custom data filter if configured
|
|
57
|
+
#
|
|
58
|
+
# @param data [Hash] the data to filter
|
|
59
|
+
# @return [Hash] the filtered data
|
|
60
|
+
#
|
|
61
|
+
def filter(data)
|
|
62
|
+
filter_proc = Sentry::Agents.configuration.data_filter
|
|
63
|
+
return data unless filter_proc
|
|
64
|
+
|
|
65
|
+
filter_proc.call(data.dup)
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
end
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Sentry
|
|
4
|
+
module Agents
|
|
5
|
+
# Helper class for building Sentry spans with Gen AI attributes
|
|
6
|
+
#
|
|
7
|
+
# Provides a consistent interface for creating spans across all
|
|
8
|
+
# instrumentation methods.
|
|
9
|
+
#
|
|
10
|
+
class SpanBuilder
|
|
11
|
+
# Mapping of operation types to Sentry span operation names
|
|
12
|
+
OPERATIONS = {
|
|
13
|
+
invoke_agent: "gen_ai.invoke_agent",
|
|
14
|
+
chat: "gen_ai.chat",
|
|
15
|
+
execute_tool: "gen_ai.execute_tool",
|
|
16
|
+
handoff: "gen_ai.handoff"
|
|
17
|
+
}.freeze
|
|
18
|
+
|
|
19
|
+
class << self
|
|
20
|
+
# Build and execute a Sentry span
|
|
21
|
+
#
|
|
22
|
+
# @param operation [Symbol] one of :invoke_agent, :chat, :execute_tool, :handoff
|
|
23
|
+
# @param description [String] span description
|
|
24
|
+
# @param attributes [Hash] initial span attributes
|
|
25
|
+
# @yield [Sentry::Span, nil] the created span (or nil if unavailable)
|
|
26
|
+
# @return [Object] the block result
|
|
27
|
+
#
|
|
28
|
+
# @example
|
|
29
|
+
# SpanBuilder.build(
|
|
30
|
+
# operation: :chat,
|
|
31
|
+
# description: "chat claude-3-5-sonnet",
|
|
32
|
+
# attributes: { "gen_ai.request.model" => "claude-3-5-sonnet" }
|
|
33
|
+
# ) do |span|
|
|
34
|
+
# # perform LLM call
|
|
35
|
+
# end
|
|
36
|
+
#
|
|
37
|
+
def build(operation:, description:, attributes: {})
|
|
38
|
+
return yield(nil) unless sentry_available?
|
|
39
|
+
|
|
40
|
+
# Only rescue errors from Sentry span creation itself, not from user code
|
|
41
|
+
span = begin
|
|
42
|
+
create_span(operation, description)
|
|
43
|
+
rescue StandardError => e
|
|
44
|
+
log_span_error(e)
|
|
45
|
+
nil
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
# If span creation failed, execute without instrumentation
|
|
49
|
+
return yield(nil) unless span
|
|
50
|
+
|
|
51
|
+
# Execute user code within the span - let their exceptions propagate
|
|
52
|
+
begin
|
|
53
|
+
set_attributes(span, attributes)
|
|
54
|
+
yield(span)
|
|
55
|
+
ensure
|
|
56
|
+
# Always finish the span, even if user code raises
|
|
57
|
+
finish_span(span)
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
# Set attributes on a span
|
|
62
|
+
#
|
|
63
|
+
# @param span [Sentry::Span, nil] the span to modify
|
|
64
|
+
# @param attributes [Hash] attributes to set
|
|
65
|
+
# @return [void]
|
|
66
|
+
#
|
|
67
|
+
def set_attributes(span, attributes)
|
|
68
|
+
return unless span
|
|
69
|
+
|
|
70
|
+
filtered = Serializer.filter(attributes)
|
|
71
|
+
filtered.each do |key, value|
|
|
72
|
+
span.set_data(key, value) if value_present?(value)
|
|
73
|
+
end
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
# Check if Sentry tracing is available
|
|
77
|
+
#
|
|
78
|
+
# @return [Boolean] true if Sentry is initialized and has an active span
|
|
79
|
+
#
|
|
80
|
+
def sentry_available?
|
|
81
|
+
defined?(Sentry) &&
|
|
82
|
+
Sentry.initialized? &&
|
|
83
|
+
Sentry.get_current_scope&.get_span
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
private
|
|
87
|
+
|
|
88
|
+
def create_span(operation, description)
|
|
89
|
+
# Start a child span manually so we control when it finishes
|
|
90
|
+
parent_span = Sentry.get_current_scope&.get_span
|
|
91
|
+
return nil unless parent_span
|
|
92
|
+
|
|
93
|
+
parent_span.start_child(
|
|
94
|
+
op: OPERATIONS[operation],
|
|
95
|
+
description: description
|
|
96
|
+
)
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
def finish_span(span)
|
|
100
|
+
span&.finish
|
|
101
|
+
rescue StandardError => e
|
|
102
|
+
log_span_error(e)
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
def log_span_error(error)
|
|
106
|
+
return unless Sentry::Agents.configuration.debug
|
|
107
|
+
|
|
108
|
+
warn "[sentry-agents] Span error: #{error.class} - #{error.message}"
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
# Check if a value is present (not nil and not empty if applicable)
|
|
112
|
+
#
|
|
113
|
+
# @param value [Object] the value to check
|
|
114
|
+
# @return [Boolean]
|
|
115
|
+
#
|
|
116
|
+
def value_present?(value)
|
|
117
|
+
return false if value.nil?
|
|
118
|
+
return !value.empty? if value.respond_to?(:empty?)
|
|
119
|
+
|
|
120
|
+
true
|
|
121
|
+
end
|
|
122
|
+
end
|
|
123
|
+
end
|
|
124
|
+
end
|
|
125
|
+
end
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "agents/version"
|
|
4
|
+
require_relative "agents/configuration"
|
|
5
|
+
require_relative "agents/serializer"
|
|
6
|
+
require_relative "agents/span_builder"
|
|
7
|
+
require_relative "agents/instrumentation"
|
|
8
|
+
|
|
9
|
+
module Sentry
|
|
10
|
+
# Sentry Gen AI instrumentation for AI/LLM agents
|
|
11
|
+
#
|
|
12
|
+
# @example Basic usage
|
|
13
|
+
# class MyAgent
|
|
14
|
+
# include Sentry::Agents::Instrumentation
|
|
15
|
+
#
|
|
16
|
+
# def process(message)
|
|
17
|
+
# with_agent_span(agent_name: "MyAgent", model: "claude-3-5-sonnet") do
|
|
18
|
+
# # agent logic
|
|
19
|
+
# end
|
|
20
|
+
# end
|
|
21
|
+
# end
|
|
22
|
+
#
|
|
23
|
+
module Agents
|
|
24
|
+
class << self
|
|
25
|
+
# @return [Configuration] the current configuration
|
|
26
|
+
# Thread-safe lazy initialization
|
|
27
|
+
def configuration
|
|
28
|
+
@mutex ||= Mutex.new
|
|
29
|
+
@mutex.synchronize do
|
|
30
|
+
@configuration ||= Configuration.new
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
# Configure the gem
|
|
35
|
+
#
|
|
36
|
+
# @yield [Configuration] the configuration object
|
|
37
|
+
# @return [void]
|
|
38
|
+
#
|
|
39
|
+
# @example
|
|
40
|
+
# Sentry::Agents.configure do |config|
|
|
41
|
+
# config.default_system = "anthropic"
|
|
42
|
+
# config.auto_instrument_ruby_llm = true
|
|
43
|
+
# end
|
|
44
|
+
#
|
|
45
|
+
def configure
|
|
46
|
+
yield(configuration)
|
|
47
|
+
apply_auto_instrumentation
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# Reset configuration to defaults (mainly for testing)
|
|
51
|
+
# Thread-safe reset
|
|
52
|
+
# @return [void]
|
|
53
|
+
def reset_configuration!
|
|
54
|
+
@mutex ||= Mutex.new
|
|
55
|
+
@mutex.synchronize do
|
|
56
|
+
@configuration = Configuration.new
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
private
|
|
61
|
+
|
|
62
|
+
def apply_auto_instrumentation
|
|
63
|
+
if configuration.auto_instrument_ruby_llm && defined?(RubyLLM)
|
|
64
|
+
require_relative "agents/integrations/ruby_llm"
|
|
65
|
+
Integrations::RubyLLM.install
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
return unless configuration.auto_instrument_langchainrb && defined?(Langchain)
|
|
69
|
+
|
|
70
|
+
require_relative "agents/integrations/langchainrb"
|
|
71
|
+
Integrations::LangChainRb.install
|
|
72
|
+
end
|
|
73
|
+
end
|
|
74
|
+
end
|
|
75
|
+
end
|
metadata
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: sentry-agents
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 0.1.0
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- SwiftTail
|
|
8
|
+
bindir: bin
|
|
9
|
+
cert_chain: []
|
|
10
|
+
date: 1980-01-02 00:00:00.000000000 Z
|
|
11
|
+
dependencies:
|
|
12
|
+
- !ruby/object:Gem::Dependency
|
|
13
|
+
name: sentry-ruby
|
|
14
|
+
requirement: !ruby/object:Gem::Requirement
|
|
15
|
+
requirements:
|
|
16
|
+
- - ">="
|
|
17
|
+
- !ruby/object:Gem::Version
|
|
18
|
+
version: 5.0.0
|
|
19
|
+
type: :runtime
|
|
20
|
+
prerelease: false
|
|
21
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
22
|
+
requirements:
|
|
23
|
+
- - ">="
|
|
24
|
+
- !ruby/object:Gem::Version
|
|
25
|
+
version: 5.0.0
|
|
26
|
+
- !ruby/object:Gem::Dependency
|
|
27
|
+
name: minitest
|
|
28
|
+
requirement: !ruby/object:Gem::Requirement
|
|
29
|
+
requirements:
|
|
30
|
+
- - "~>"
|
|
31
|
+
- !ruby/object:Gem::Version
|
|
32
|
+
version: '5.0'
|
|
33
|
+
type: :development
|
|
34
|
+
prerelease: false
|
|
35
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
36
|
+
requirements:
|
|
37
|
+
- - "~>"
|
|
38
|
+
- !ruby/object:Gem::Version
|
|
39
|
+
version: '5.0'
|
|
40
|
+
- !ruby/object:Gem::Dependency
|
|
41
|
+
name: rake
|
|
42
|
+
requirement: !ruby/object:Gem::Requirement
|
|
43
|
+
requirements:
|
|
44
|
+
- - "~>"
|
|
45
|
+
- !ruby/object:Gem::Version
|
|
46
|
+
version: '13.0'
|
|
47
|
+
type: :development
|
|
48
|
+
prerelease: false
|
|
49
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
50
|
+
requirements:
|
|
51
|
+
- - "~>"
|
|
52
|
+
- !ruby/object:Gem::Version
|
|
53
|
+
version: '13.0'
|
|
54
|
+
- !ruby/object:Gem::Dependency
|
|
55
|
+
name: rubocop
|
|
56
|
+
requirement: !ruby/object:Gem::Requirement
|
|
57
|
+
requirements:
|
|
58
|
+
- - "~>"
|
|
59
|
+
- !ruby/object:Gem::Version
|
|
60
|
+
version: '1.21'
|
|
61
|
+
type: :development
|
|
62
|
+
prerelease: false
|
|
63
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
64
|
+
requirements:
|
|
65
|
+
- - "~>"
|
|
66
|
+
- !ruby/object:Gem::Version
|
|
67
|
+
version: '1.21'
|
|
68
|
+
- !ruby/object:Gem::Dependency
|
|
69
|
+
name: rubocop-minitest
|
|
70
|
+
requirement: !ruby/object:Gem::Requirement
|
|
71
|
+
requirements:
|
|
72
|
+
- - "~>"
|
|
73
|
+
- !ruby/object:Gem::Version
|
|
74
|
+
version: '0.35'
|
|
75
|
+
type: :development
|
|
76
|
+
prerelease: false
|
|
77
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
78
|
+
requirements:
|
|
79
|
+
- - "~>"
|
|
80
|
+
- !ruby/object:Gem::Version
|
|
81
|
+
version: '0.35'
|
|
82
|
+
description: |
|
|
83
|
+
Provides Sentry Gen AI instrumentation for AI/LLM agents,
|
|
84
|
+
supporting multiple providers (Anthropic, OpenAI, etc.)
|
|
85
|
+
with auto-instrumentation for RubyLLM and LangChain.rb.
|
|
86
|
+
email:
|
|
87
|
+
- dev@flyswifttail.com
|
|
88
|
+
executables: []
|
|
89
|
+
extensions: []
|
|
90
|
+
extra_rdoc_files: []
|
|
91
|
+
files:
|
|
92
|
+
- CHANGELOG.md
|
|
93
|
+
- LICENSE
|
|
94
|
+
- README.md
|
|
95
|
+
- lib/sentry-agents.rb
|
|
96
|
+
- lib/sentry/agents.rb
|
|
97
|
+
- lib/sentry/agents/configuration.rb
|
|
98
|
+
- lib/sentry/agents/instrumentation.rb
|
|
99
|
+
- lib/sentry/agents/serializer.rb
|
|
100
|
+
- lib/sentry/agents/span_builder.rb
|
|
101
|
+
- lib/sentry/agents/version.rb
|
|
102
|
+
homepage: https://github.com/sentry-agents/sentry-agents-ruby
|
|
103
|
+
licenses:
|
|
104
|
+
- MIT
|
|
105
|
+
metadata:
|
|
106
|
+
homepage_uri: https://github.com/sentry-agents/sentry-agents-ruby
|
|
107
|
+
source_code_uri: https://github.com/sentry-agents/sentry-agents-ruby
|
|
108
|
+
changelog_uri: https://github.com/sentry-agents/sentry-agents-ruby/blob/main/CHANGELOG.md
|
|
109
|
+
rubygems_mfa_required: 'true'
|
|
110
|
+
rdoc_options: []
|
|
111
|
+
require_paths:
|
|
112
|
+
- lib
|
|
113
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
114
|
+
requirements:
|
|
115
|
+
- - ">="
|
|
116
|
+
- !ruby/object:Gem::Version
|
|
117
|
+
version: 3.1.0
|
|
118
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
119
|
+
requirements:
|
|
120
|
+
- - ">="
|
|
121
|
+
- !ruby/object:Gem::Version
|
|
122
|
+
version: '0'
|
|
123
|
+
requirements: []
|
|
124
|
+
rubygems_version: 3.6.9
|
|
125
|
+
specification_version: 4
|
|
126
|
+
summary: Sentry Gen AI instrumentation for AI/LLM agents in Ruby
|
|
127
|
+
test_files: []
|