thinkhive 0.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,81 @@
1
+ Metadata-Version: 2.4
2
+ Name: thinkhive
3
+ Version: 0.2.0
4
+ Summary: AI agent observability SDK supporting 25 trace formats
5
+ Home-page: https://github.com/thinkhive/thinkhive-python
6
+ Author: ThinkHive
7
+ Author-email: support@thinkhive.ai
8
+ Classifier: Development Status :: 3 - Alpha
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
11
+ Classifier: License :: OSI Approved :: MIT License
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.8
14
+ Classifier: Programming Language :: Python :: 3.9
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Requires-Python: >=3.8
18
+ Description-Content-Type: text/markdown
19
+ Requires-Dist: opentelemetry-api>=1.20.0
20
+ Requires-Dist: opentelemetry-sdk>=1.20.0
21
+ Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.20.0
22
+ Dynamic: author
23
+ Dynamic: author-email
24
+ Dynamic: classifier
25
+ Dynamic: description
26
+ Dynamic: description-content-type
27
+ Dynamic: home-page
28
+ Dynamic: requires-dist
29
+ Dynamic: requires-python
30
+ Dynamic: summary
31
+
32
+ # ThinkHive Python SDK
33
+
34
+ OpenTelemetry-based observability SDK for AI agents supporting 25 trace formats including LangSmith, Langfuse, Opik, Braintrust, Datadog, MLflow, and more.
35
+
36
+ ## Installation
37
+
38
+ ```bash
39
+ pip install thinkhive
40
+ ```
41
+
42
+ ## Quick Start
43
+
44
+ ```python
45
+ import thinkhive
46
+
47
+ # Initialize SDK
48
+ thinkhive.init(
49
+ api_key="your-api-key", # or set THINKHIVE_API_KEY
50
+ service_name="my-ai-agent"
51
+ )
52
+
53
+ # Trace LLM calls
54
+ @thinkhive.trace_llm(model_name="gpt-4", provider="openai")
55
+ def call_llm(prompt):
56
+ response = openai.chat.completions.create(
57
+ model="gpt-4",
58
+ messages=[{"role": "user", "content": prompt}]
59
+ )
60
+ return response
61
+
62
+ # Trace retrieval operations
63
+ @thinkhive.trace_retrieval()
64
+ def search_documents(query):
65
+ results = vector_db.search(query)
66
+ return results
67
+
68
+ # Trace tool calls
69
+ @thinkhive.trace_tool(tool_name="web_search")
70
+ def search_web(query):
71
+ return requests.get(f"https://api.example.com/search?q={query}")
72
+ ```
73
+
74
+ ## Environment Variables
75
+
76
+ - `THINKHIVE_API_KEY`: Your ThinkHive API key
77
+ - `THINKHIVE_AGENT_ID`: Your agent ID (alternative to API key)
78
+
79
+ ## License
80
+
81
+ MIT
@@ -0,0 +1,50 @@
1
+ # ThinkHive Python SDK
2
+
3
+ OpenTelemetry-based observability SDK for AI agents supporting 25 trace formats including LangSmith, Langfuse, Opik, Braintrust, Datadog, MLflow, and more.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ pip install thinkhive
9
+ ```
10
+
11
+ ## Quick Start
12
+
13
+ ```python
14
+ import thinkhive
15
+
16
+ # Initialize SDK
17
+ thinkhive.init(
18
+ api_key="your-api-key", # or set THINKHIVE_API_KEY
19
+ service_name="my-ai-agent"
20
+ )
21
+
22
+ # Trace LLM calls
23
+ @thinkhive.trace_llm(model_name="gpt-4", provider="openai")
24
+ def call_llm(prompt):
25
+ response = openai.chat.completions.create(
26
+ model="gpt-4",
27
+ messages=[{"role": "user", "content": prompt}]
28
+ )
29
+ return response
30
+
31
+ # Trace retrieval operations
32
+ @thinkhive.trace_retrieval()
33
+ def search_documents(query):
34
+ results = vector_db.search(query)
35
+ return results
36
+
37
+ # Trace tool calls
38
+ @thinkhive.trace_tool(tool_name="web_search")
39
+ def search_web(query):
40
+ return requests.get(f"https://api.example.com/search?q={query}")
41
+ ```
42
+
43
+ ## Environment Variables
44
+
45
+ - `THINKHIVE_API_KEY`: Your ThinkHive API key
46
+ - `THINKHIVE_AGENT_ID`: Your agent ID (alternative to API key)
47
+
48
+ ## License
49
+
50
+ MIT
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,33 @@
1
+ from setuptools import setup, find_packages
2
+
3
+ with open("README.md", "r", encoding="utf-8") as fh:
4
+ long_description = fh.read()
5
+
6
+ setup(
7
+ name="thinkhive",
8
+ version="0.2.0",
9
+ author="ThinkHive",
10
+ author_email="support@thinkhive.ai",
11
+ description="AI agent observability SDK supporting 25 trace formats",
12
+ long_description=long_description,
13
+ long_description_content_type="text/markdown",
14
+ url="https://github.com/thinkhive/thinkhive-python",
15
+ packages=find_packages(),
16
+ classifiers=[
17
+ "Development Status :: 3 - Alpha",
18
+ "Intended Audience :: Developers",
19
+ "Topic :: Software Development :: Libraries :: Python Modules",
20
+ "License :: OSI Approved :: MIT License",
21
+ "Programming Language :: Python :: 3",
22
+ "Programming Language :: Python :: 3.8",
23
+ "Programming Language :: Python :: 3.9",
24
+ "Programming Language :: Python :: 3.10",
25
+ "Programming Language :: Python :: 3.11",
26
+ ],
27
+ python_requires=">=3.8",
28
+ install_requires=[
29
+ "opentelemetry-api>=1.20.0",
30
+ "opentelemetry-sdk>=1.20.0",
31
+ "opentelemetry-exporter-otlp-proto-http>=1.20.0",
32
+ ],
33
+ )
@@ -0,0 +1,229 @@
1
+ """
2
+ ThinkHive Python SDK
3
+ OpenTelemetry-based observability for AI agents
4
+ """
5
+
6
+ from opentelemetry import trace
7
+ try:
8
+ # Try to use HTTP+JSON exporter (simpler, no protobuf required)
9
+ from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
10
+ EXPORTER_TYPE = "http+proto"
11
+ except ImportError:
12
+ # Fallback to gRPC exporter
13
+ from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
14
+ EXPORTER_TYPE = "grpc"
15
+ from opentelemetry.sdk.trace import TracerProvider
16
+ from opentelemetry.sdk.trace.export import BatchSpanProcessor
17
+ from opentelemetry.sdk.resources import Resource
18
+ import functools
19
+ from typing import Optional, Dict, Any, Callable
20
+ import os
21
+
22
+ __version__ = "0.1.0"
23
+
24
+ # Global tracer
25
+ _tracer: Optional[trace.Tracer] = None
26
+ _initialized = False
27
+
28
+
29
+ def init(
30
+ api_key: Optional[str] = None,
31
+ endpoint: str = "https://thinkhivemind-h25z7pvd3q-uc.a.run.app",
32
+ service_name: str = "my-ai-agent",
33
+ agent_id: Optional[str] = None,
34
+ ):
35
+ """
36
+ Initialize ThinkHive SDK with OTLP exporter
37
+
38
+ Args:
39
+ api_key: ThinkHive API key (or set THINKHIVE_API_KEY env var)
40
+ endpoint: ThinkHive endpoint URL
41
+ service_name: Name of your service/agent
42
+ agent_id: Optional agent ID (or set THINKHIVE_AGENT_ID env var)
43
+ """
44
+ global _tracer, _initialized
45
+
46
+ if _initialized:
47
+ return
48
+
49
+ # Get API key from env if not provided
50
+ api_key = api_key or os.getenv("THINKHIVE_API_KEY")
51
+ agent_id = agent_id or os.getenv("THINKHIVE_AGENT_ID")
52
+
53
+ if not api_key and not agent_id:
54
+ raise ValueError("Either api_key or agent_id must be provided")
55
+
56
+ # Create resource with service name
57
+ resource = Resource.create({
58
+ "service.name": service_name,
59
+ })
60
+
61
+ # Create tracer provider
62
+ provider = TracerProvider(resource=resource)
63
+
64
+ # Configure OTLP exporter
65
+ headers = {}
66
+ if api_key:
67
+ headers["Authorization"] = f"Bearer {api_key}"
68
+ elif agent_id:
69
+ headers["X-Agent-ID"] = agent_id
70
+
71
+ exporter = OTLPSpanExporter(
72
+ endpoint=f"{endpoint}/v1/traces",
73
+ headers=headers,
74
+ )
75
+
76
+ # Add span processor
77
+ provider.add_span_processor(BatchSpanProcessor(exporter))
78
+
79
+ # Set global tracer provider
80
+ trace.set_tracer_provider(provider)
81
+
82
+ # Get tracer
83
+ _tracer = trace.get_tracer(__name__, __version__)
84
+ _initialized = True
85
+
86
+ print(f"✅ ThinkHive SDK initialized (endpoint: {endpoint})")
87
+
88
+
89
+ def get_tracer() -> trace.Tracer:
90
+ """Get the global tracer instance"""
91
+ global _tracer
92
+ if not _initialized:
93
+ raise RuntimeError("ThinkHive SDK not initialized. Call thinkhive.init() first.")
94
+ return _tracer
95
+
96
+
97
+ def trace_llm(
98
+ model_name: Optional[str] = None,
99
+ provider: Optional[str] = None,
100
+ ):
101
+ """
102
+ Decorator for tracing LLM calls
103
+
104
+ Usage:
105
+ @trace_llm(model_name="gpt-4", provider="openai")
106
+ def call_llm(prompt):
107
+ return openai.chat.completions.create(...)
108
+ """
109
+ def decorator(func: Callable) -> Callable:
110
+ @functools.wraps(func)
111
+ def wrapper(*args, **kwargs):
112
+ tracer = get_tracer()
113
+ with tracer.start_as_current_span(
114
+ func.__name__,
115
+ attributes={
116
+ "openinference.span.kind": "LLM",
117
+ "llm.model_name": model_name,
118
+ "llm.provider": provider,
119
+ }
120
+ ) as span:
121
+ try:
122
+ result = func(*args, **kwargs)
123
+
124
+ # Try to extract token counts if result is OpenAI-like
125
+ if hasattr(result, "usage"):
126
+ usage = result.usage
127
+ if hasattr(usage, "prompt_tokens"):
128
+ span.set_attribute("llm.token_count.prompt", usage.prompt_tokens)
129
+ if hasattr(usage, "completion_tokens"):
130
+ span.set_attribute("llm.token_count.completion", usage.completion_tokens)
131
+ if hasattr(usage, "total_tokens"):
132
+ span.set_attribute("llm.token_count.total", usage.total_tokens)
133
+
134
+ span.set_status(trace.Status(trace.StatusCode.OK))
135
+ return result
136
+ except Exception as e:
137
+ span.set_status(trace.Status(trace.StatusCode.ERROR, str(e)))
138
+ span.record_exception(e)
139
+ raise
140
+
141
+ return wrapper
142
+ return decorator
143
+
144
+
145
+ def trace_retrieval(query: Optional[str] = None):
146
+ """
147
+ Decorator for tracing retrieval/RAG operations
148
+
149
+ Usage:
150
+ @trace_retrieval()
151
+ def search_documents(query):
152
+ return vector_db.search(query)
153
+ """
154
+ def decorator(func: Callable) -> Callable:
155
+ @functools.wraps(func)
156
+ def wrapper(*args, **kwargs):
157
+ tracer = get_tracer()
158
+ with tracer.start_as_current_span(
159
+ func.__name__,
160
+ attributes={
161
+ "openinference.span.kind": "RETRIEVER",
162
+ "retrieval.query": query or (args[0] if args else None),
163
+ }
164
+ ) as span:
165
+ try:
166
+ result = func(*args, **kwargs)
167
+
168
+ # If result is a list of documents, record them
169
+ if isinstance(result, list) and len(result) > 0:
170
+ for i, doc in enumerate(result[:10]): # Limit to first 10
171
+ if hasattr(doc, "id"):
172
+ span.set_attribute(f"retrieval.documents.{i}.document.id", doc.id)
173
+ if hasattr(doc, "score"):
174
+ span.set_attribute(f"retrieval.documents.{i}.document.score", doc.score)
175
+ if hasattr(doc, "content"):
176
+ content = doc.content[:500] # Truncate
177
+ span.set_attribute(f"retrieval.documents.{i}.document.content", content)
178
+
179
+ span.set_status(trace.Status(trace.StatusCode.OK))
180
+ return result
181
+ except Exception as e:
182
+ span.set_status(trace.Status(trace.StatusCode.ERROR, str(e)))
183
+ span.record_exception(e)
184
+ raise
185
+
186
+ return wrapper
187
+ return decorator
188
+
189
+
190
+ def trace_tool(tool_name: Optional[str] = None):
191
+ """
192
+ Decorator for tracing tool/function calls
193
+
194
+ Usage:
195
+ @trace_tool(tool_name="web_search")
196
+ def search_web(query):
197
+ return requests.get(f"https://api.example.com/search?q={query}")
198
+ """
199
+ def decorator(func: Callable) -> Callable:
200
+ @functools.wraps(func)
201
+ def wrapper(*args, **kwargs):
202
+ tracer = get_tracer()
203
+ with tracer.start_as_current_span(
204
+ tool_name or func.__name__,
205
+ attributes={
206
+ "openinference.span.kind": "TOOL",
207
+ "tool.name": tool_name or func.__name__,
208
+ }
209
+ ) as span:
210
+ try:
211
+ result = func(*args, **kwargs)
212
+ span.set_status(trace.Status(trace.StatusCode.OK))
213
+ return result
214
+ except Exception as e:
215
+ span.set_status(trace.Status(trace.StatusCode.ERROR, str(e)))
216
+ span.record_exception(e)
217
+ raise
218
+
219
+ return wrapper
220
+ return decorator
221
+
222
+
223
+ __all__ = [
224
+ "init",
225
+ "get_tracer",
226
+ "trace_llm",
227
+ "trace_retrieval",
228
+ "trace_tool",
229
+ ]
@@ -0,0 +1,81 @@
1
+ Metadata-Version: 2.4
2
+ Name: thinkhive
3
+ Version: 0.2.0
4
+ Summary: AI agent observability SDK supporting 25 trace formats
5
+ Home-page: https://github.com/thinkhive/thinkhive-python
6
+ Author: ThinkHive
7
+ Author-email: support@thinkhive.ai
8
+ Classifier: Development Status :: 3 - Alpha
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
11
+ Classifier: License :: OSI Approved :: MIT License
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.8
14
+ Classifier: Programming Language :: Python :: 3.9
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Requires-Python: >=3.8
18
+ Description-Content-Type: text/markdown
19
+ Requires-Dist: opentelemetry-api>=1.20.0
20
+ Requires-Dist: opentelemetry-sdk>=1.20.0
21
+ Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.20.0
22
+ Dynamic: author
23
+ Dynamic: author-email
24
+ Dynamic: classifier
25
+ Dynamic: description
26
+ Dynamic: description-content-type
27
+ Dynamic: home-page
28
+ Dynamic: requires-dist
29
+ Dynamic: requires-python
30
+ Dynamic: summary
31
+
32
+ # ThinkHive Python SDK
33
+
34
+ OpenTelemetry-based observability SDK for AI agents supporting 25 trace formats including LangSmith, Langfuse, Opik, Braintrust, Datadog, MLflow, and more.
35
+
36
+ ## Installation
37
+
38
+ ```bash
39
+ pip install thinkhive
40
+ ```
41
+
42
+ ## Quick Start
43
+
44
+ ```python
45
+ import thinkhive
46
+
47
+ # Initialize SDK
48
+ thinkhive.init(
49
+ api_key="your-api-key", # or set THINKHIVE_API_KEY
50
+ service_name="my-ai-agent"
51
+ )
52
+
53
+ # Trace LLM calls
54
+ @thinkhive.trace_llm(model_name="gpt-4", provider="openai")
55
+ def call_llm(prompt):
56
+ response = openai.chat.completions.create(
57
+ model="gpt-4",
58
+ messages=[{"role": "user", "content": prompt}]
59
+ )
60
+ return response
61
+
62
+ # Trace retrieval operations
63
+ @thinkhive.trace_retrieval()
64
+ def search_documents(query):
65
+ results = vector_db.search(query)
66
+ return results
67
+
68
+ # Trace tool calls
69
+ @thinkhive.trace_tool(tool_name="web_search")
70
+ def search_web(query):
71
+ return requests.get(f"https://api.example.com/search?q={query}")
72
+ ```
73
+
74
+ ## Environment Variables
75
+
76
+ - `THINKHIVE_API_KEY`: Your ThinkHive API key
77
+ - `THINKHIVE_AGENT_ID`: Your agent ID (alternative to API key)
78
+
79
+ ## License
80
+
81
+ MIT
@@ -0,0 +1,8 @@
1
+ README.md
2
+ setup.py
3
+ thinkhive/__init__.py
4
+ thinkhive.egg-info/PKG-INFO
5
+ thinkhive.egg-info/SOURCES.txt
6
+ thinkhive.egg-info/dependency_links.txt
7
+ thinkhive.egg-info/requires.txt
8
+ thinkhive.egg-info/top_level.txt
@@ -0,0 +1,3 @@
1
+ opentelemetry-api>=1.20.0
2
+ opentelemetry-sdk>=1.20.0
3
+ opentelemetry-exporter-otlp-proto-http>=1.20.0
@@ -0,0 +1 @@
1
+ thinkhive