traia-iatp 0.1.29__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of traia-iatp might be problematic. Click here for more details.
- traia_iatp/README.md +368 -0
- traia_iatp/__init__.py +54 -0
- traia_iatp/cli/__init__.py +5 -0
- traia_iatp/cli/main.py +483 -0
- traia_iatp/client/__init__.py +10 -0
- traia_iatp/client/a2a_client.py +274 -0
- traia_iatp/client/crewai_a2a_tools.py +335 -0
- traia_iatp/client/d402_a2a_client.py +293 -0
- traia_iatp/client/grpc_a2a_tools.py +349 -0
- traia_iatp/client/root_path_a2a_client.py +1 -0
- traia_iatp/contracts/__init__.py +12 -0
- traia_iatp/contracts/iatp_contracts_config.py +263 -0
- traia_iatp/contracts/wallet_creator.py +255 -0
- traia_iatp/core/__init__.py +43 -0
- traia_iatp/core/models.py +172 -0
- traia_iatp/d402/__init__.py +55 -0
- traia_iatp/d402/chains.py +102 -0
- traia_iatp/d402/client.py +150 -0
- traia_iatp/d402/clients/__init__.py +7 -0
- traia_iatp/d402/clients/base.py +218 -0
- traia_iatp/d402/clients/httpx.py +219 -0
- traia_iatp/d402/common.py +114 -0
- traia_iatp/d402/encoding.py +28 -0
- traia_iatp/d402/examples/client_example.py +197 -0
- traia_iatp/d402/examples/server_example.py +171 -0
- traia_iatp/d402/facilitator.py +453 -0
- traia_iatp/d402/fastapi_middleware/__init__.py +6 -0
- traia_iatp/d402/fastapi_middleware/middleware.py +225 -0
- traia_iatp/d402/fastmcp_middleware.py +147 -0
- traia_iatp/d402/mcp_middleware.py +434 -0
- traia_iatp/d402/middleware.py +193 -0
- traia_iatp/d402/models.py +116 -0
- traia_iatp/d402/networks.py +98 -0
- traia_iatp/d402/path.py +43 -0
- traia_iatp/d402/payment_introspection.py +104 -0
- traia_iatp/d402/payment_signing.py +178 -0
- traia_iatp/d402/paywall.py +119 -0
- traia_iatp/d402/starlette_middleware.py +326 -0
- traia_iatp/d402/template.py +1 -0
- traia_iatp/d402/types.py +300 -0
- traia_iatp/mcp/__init__.py +18 -0
- traia_iatp/mcp/client.py +201 -0
- traia_iatp/mcp/d402_mcp_tool_adapter.py +361 -0
- traia_iatp/mcp/mcp_agent_template.py +481 -0
- traia_iatp/mcp/templates/Dockerfile.j2 +80 -0
- traia_iatp/mcp/templates/README.md.j2 +310 -0
- traia_iatp/mcp/templates/cursor-rules.md.j2 +520 -0
- traia_iatp/mcp/templates/deployment_params.json.j2 +20 -0
- traia_iatp/mcp/templates/docker-compose.yml.j2 +32 -0
- traia_iatp/mcp/templates/dockerignore.j2 +47 -0
- traia_iatp/mcp/templates/env.example.j2 +57 -0
- traia_iatp/mcp/templates/gitignore.j2 +77 -0
- traia_iatp/mcp/templates/mcp_health_check.py.j2 +150 -0
- traia_iatp/mcp/templates/pyproject.toml.j2 +32 -0
- traia_iatp/mcp/templates/pyrightconfig.json.j2 +22 -0
- traia_iatp/mcp/templates/run_local_docker.sh.j2 +390 -0
- traia_iatp/mcp/templates/server.py.j2 +175 -0
- traia_iatp/mcp/traia_mcp_adapter.py +543 -0
- traia_iatp/preview_diagrams.html +181 -0
- traia_iatp/registry/__init__.py +26 -0
- traia_iatp/registry/atlas_search_indexes.json +280 -0
- traia_iatp/registry/embeddings.py +298 -0
- traia_iatp/registry/iatp_search_api.py +846 -0
- traia_iatp/registry/mongodb_registry.py +771 -0
- traia_iatp/registry/readmes/ATLAS_SEARCH_INDEXES.md +252 -0
- traia_iatp/registry/readmes/ATLAS_SEARCH_SETUP.md +134 -0
- traia_iatp/registry/readmes/AUTHENTICATION_UPDATE.md +124 -0
- traia_iatp/registry/readmes/EMBEDDINGS_SETUP.md +172 -0
- traia_iatp/registry/readmes/IATP_SEARCH_API_GUIDE.md +257 -0
- traia_iatp/registry/readmes/MONGODB_X509_AUTH.md +208 -0
- traia_iatp/registry/readmes/README.md +251 -0
- traia_iatp/registry/readmes/REFACTORING_SUMMARY.md +191 -0
- traia_iatp/scripts/__init__.py +2 -0
- traia_iatp/scripts/create_wallet.py +244 -0
- traia_iatp/server/__init__.py +15 -0
- traia_iatp/server/a2a_server.py +219 -0
- traia_iatp/server/example_template_usage.py +72 -0
- traia_iatp/server/iatp_server_agent_generator.py +237 -0
- traia_iatp/server/iatp_server_template_generator.py +235 -0
- traia_iatp/server/templates/.dockerignore.j2 +48 -0
- traia_iatp/server/templates/Dockerfile.j2 +49 -0
- traia_iatp/server/templates/README.md +137 -0
- traia_iatp/server/templates/README.md.j2 +425 -0
- traia_iatp/server/templates/__init__.py +1 -0
- traia_iatp/server/templates/__main__.py.j2 +565 -0
- traia_iatp/server/templates/agent.py.j2 +94 -0
- traia_iatp/server/templates/agent_config.json.j2 +22 -0
- traia_iatp/server/templates/agent_executor.py.j2 +279 -0
- traia_iatp/server/templates/docker-compose.yml.j2 +23 -0
- traia_iatp/server/templates/env.example.j2 +84 -0
- traia_iatp/server/templates/gitignore.j2 +78 -0
- traia_iatp/server/templates/grpc_server.py.j2 +218 -0
- traia_iatp/server/templates/pyproject.toml.j2 +78 -0
- traia_iatp/server/templates/run_local_docker.sh.j2 +103 -0
- traia_iatp/server/templates/server.py.j2 +243 -0
- traia_iatp/special_agencies/__init__.py +4 -0
- traia_iatp/special_agencies/registry_search_agency.py +392 -0
- traia_iatp/utils/__init__.py +10 -0
- traia_iatp/utils/docker_utils.py +251 -0
- traia_iatp/utils/general.py +64 -0
- traia_iatp/utils/iatp_utils.py +126 -0
- traia_iatp-0.1.29.dist-info/METADATA +423 -0
- traia_iatp-0.1.29.dist-info/RECORD +107 -0
- traia_iatp-0.1.29.dist-info/WHEEL +5 -0
- traia_iatp-0.1.29.dist-info/entry_points.txt +2 -0
- traia_iatp-0.1.29.dist-info/licenses/LICENSE +21 -0
- traia_iatp-0.1.29.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,298 @@
|
|
|
1
|
+
"""Embeddings generation for vector search."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import logging
|
|
5
|
+
from typing import List, Optional, Dict, Any
|
|
6
|
+
from enum import Enum
|
|
7
|
+
|
|
8
|
+
logger = logging.getLogger(__name__)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
# Default embedding models per provider
|
|
12
|
+
DEFAULT_EMBEDDING_MODELS = {
|
|
13
|
+
"openai": "text-embedding-3-small", # 1536 dimensions
|
|
14
|
+
"cohere": "embed-english-v3.0", # 1024 dimensions
|
|
15
|
+
"huggingface": "all-MiniLM-L6-v2", # 384 dimensions
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class EmbeddingProvider(str, Enum):
|
|
20
|
+
"""Supported embedding providers."""
|
|
21
|
+
OPENAI = "openai"
|
|
22
|
+
COHERE = "cohere"
|
|
23
|
+
HUGGINGFACE = "huggingface"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class EmbeddingService:
|
|
27
|
+
"""Service for generating embeddings from text."""
|
|
28
|
+
|
|
29
|
+
def __init__(self, provider: EmbeddingProvider = EmbeddingProvider.OPENAI):
|
|
30
|
+
"""Initialize embedding service.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
provider: Which embedding provider to use
|
|
34
|
+
"""
|
|
35
|
+
self.provider = provider
|
|
36
|
+
self._client = None
|
|
37
|
+
self._initialize_client()
|
|
38
|
+
|
|
39
|
+
def _initialize_client(self):
|
|
40
|
+
"""Initialize the embedding client based on provider."""
|
|
41
|
+
if self.provider == EmbeddingProvider.OPENAI:
|
|
42
|
+
try:
|
|
43
|
+
import openai
|
|
44
|
+
api_key = os.getenv("OPENAI_API_KEY")
|
|
45
|
+
if not api_key:
|
|
46
|
+
logger.warning("OPENAI_API_KEY not set. Embedding generation will fail.")
|
|
47
|
+
self._client = openai.OpenAI(api_key=api_key)
|
|
48
|
+
except ImportError:
|
|
49
|
+
logger.error("OpenAI package not installed. Run: uv add openai")
|
|
50
|
+
raise
|
|
51
|
+
elif self.provider == EmbeddingProvider.COHERE:
|
|
52
|
+
try:
|
|
53
|
+
import cohere
|
|
54
|
+
api_key = os.getenv("COHERE_API_KEY")
|
|
55
|
+
if not api_key:
|
|
56
|
+
logger.warning("COHERE_API_KEY not set. Embedding generation will fail.")
|
|
57
|
+
self._client = cohere.Client(api_key)
|
|
58
|
+
except ImportError:
|
|
59
|
+
logger.error("Cohere package not installed. Run: uv add cohere")
|
|
60
|
+
raise
|
|
61
|
+
else:
|
|
62
|
+
raise NotImplementedError(f"Provider {self.provider} not yet implemented")
|
|
63
|
+
|
|
64
|
+
def get_default_model(self) -> str:
|
|
65
|
+
"""Get the default embedding model for the current provider.
|
|
66
|
+
|
|
67
|
+
Returns:
|
|
68
|
+
Default model name for the provider
|
|
69
|
+
"""
|
|
70
|
+
return DEFAULT_EMBEDDING_MODELS.get(self.provider.value, "")
|
|
71
|
+
|
|
72
|
+
async def generate_embedding(self, text: str, model: Optional[str] = None) -> List[float]:
|
|
73
|
+
"""Generate embedding for a single text.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
text: Text to embed
|
|
77
|
+
model: Optional model override
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
List of floats representing the embedding vector
|
|
81
|
+
"""
|
|
82
|
+
if not text:
|
|
83
|
+
return []
|
|
84
|
+
|
|
85
|
+
if self.provider == EmbeddingProvider.OPENAI:
|
|
86
|
+
model = model or DEFAULT_EMBEDDING_MODELS[self.provider.value]
|
|
87
|
+
try:
|
|
88
|
+
response = self._client.embeddings.create(
|
|
89
|
+
model=model,
|
|
90
|
+
input=text
|
|
91
|
+
)
|
|
92
|
+
return response.data[0].embedding
|
|
93
|
+
except Exception as e:
|
|
94
|
+
logger.error(f"Failed to generate OpenAI embedding: {e}")
|
|
95
|
+
raise
|
|
96
|
+
|
|
97
|
+
elif self.provider == EmbeddingProvider.COHERE:
|
|
98
|
+
model = model or DEFAULT_EMBEDDING_MODELS[self.provider.value]
|
|
99
|
+
try:
|
|
100
|
+
response = self._client.embed(
|
|
101
|
+
texts=[text],
|
|
102
|
+
model=model,
|
|
103
|
+
input_type="search_document"
|
|
104
|
+
)
|
|
105
|
+
return response.embeddings[0]
|
|
106
|
+
except Exception as e:
|
|
107
|
+
logger.error(f"Failed to generate Cohere embedding: {e}")
|
|
108
|
+
raise
|
|
109
|
+
|
|
110
|
+
else:
|
|
111
|
+
raise NotImplementedError(f"Provider {self.provider} not yet implemented")
|
|
112
|
+
|
|
113
|
+
async def generate_embeddings(self, texts: List[str], model: Optional[str] = None) -> List[List[float]]:
|
|
114
|
+
"""Generate embeddings for multiple texts.
|
|
115
|
+
|
|
116
|
+
Args:
|
|
117
|
+
texts: List of texts to embed
|
|
118
|
+
model: Optional model override
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
List of embedding vectors
|
|
122
|
+
"""
|
|
123
|
+
if not texts:
|
|
124
|
+
return []
|
|
125
|
+
|
|
126
|
+
if self.provider == EmbeddingProvider.OPENAI:
|
|
127
|
+
model = model or DEFAULT_EMBEDDING_MODELS[self.provider.value]
|
|
128
|
+
try:
|
|
129
|
+
response = self._client.embeddings.create(
|
|
130
|
+
model=model,
|
|
131
|
+
input=texts
|
|
132
|
+
)
|
|
133
|
+
return [item.embedding for item in response.data]
|
|
134
|
+
except Exception as e:
|
|
135
|
+
logger.error(f"Failed to generate OpenAI embeddings: {e}")
|
|
136
|
+
raise
|
|
137
|
+
|
|
138
|
+
elif self.provider == EmbeddingProvider.COHERE:
|
|
139
|
+
model = model or DEFAULT_EMBEDDING_MODELS[self.provider.value]
|
|
140
|
+
try:
|
|
141
|
+
response = self._client.embed(
|
|
142
|
+
texts=texts,
|
|
143
|
+
model=model,
|
|
144
|
+
input_type="search_document"
|
|
145
|
+
)
|
|
146
|
+
return response.embeddings
|
|
147
|
+
except Exception as e:
|
|
148
|
+
logger.error(f"Failed to generate Cohere embeddings: {e}")
|
|
149
|
+
raise
|
|
150
|
+
|
|
151
|
+
else:
|
|
152
|
+
raise NotImplementedError(f"Provider {self.provider} not yet implemented")
|
|
153
|
+
|
|
154
|
+
async def generate_query_embedding(self, query: str, model: Optional[str] = None) -> List[float]:
|
|
155
|
+
"""Generate embedding for a search query.
|
|
156
|
+
|
|
157
|
+
Some providers have different handling for queries vs documents.
|
|
158
|
+
|
|
159
|
+
Args:
|
|
160
|
+
query: Search query text
|
|
161
|
+
model: Optional model override
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
Embedding vector for the query
|
|
165
|
+
"""
|
|
166
|
+
if self.provider == EmbeddingProvider.COHERE:
|
|
167
|
+
model = model or DEFAULT_EMBEDDING_MODELS[self.provider.value]
|
|
168
|
+
try:
|
|
169
|
+
response = self._client.embed(
|
|
170
|
+
texts=[query],
|
|
171
|
+
model=model,
|
|
172
|
+
input_type="search_query" # Different from documents
|
|
173
|
+
)
|
|
174
|
+
return response.embeddings[0]
|
|
175
|
+
except Exception as e:
|
|
176
|
+
logger.error(f"Failed to generate Cohere query embedding: {e}")
|
|
177
|
+
raise
|
|
178
|
+
else:
|
|
179
|
+
# For other providers, queries and documents are handled the same
|
|
180
|
+
return await self.generate_embedding(query, model)
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
# Singleton instance
|
|
184
|
+
_embedding_service = None
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def get_embedding_service(provider: Optional[EmbeddingProvider] = None) -> EmbeddingService:
|
|
188
|
+
"""Get or create the embedding service singleton.
|
|
189
|
+
|
|
190
|
+
Args:
|
|
191
|
+
provider: Optional provider override
|
|
192
|
+
|
|
193
|
+
Returns:
|
|
194
|
+
EmbeddingService instance
|
|
195
|
+
"""
|
|
196
|
+
global _embedding_service
|
|
197
|
+
|
|
198
|
+
if _embedding_service is None or (provider and provider != _embedding_service.provider):
|
|
199
|
+
provider = provider or EmbeddingProvider.OPENAI
|
|
200
|
+
_embedding_service = EmbeddingService(provider)
|
|
201
|
+
|
|
202
|
+
return _embedding_service
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
if __name__ == "__main__":
|
|
206
|
+
import asyncio
|
|
207
|
+
|
|
208
|
+
async def test_embeddings():
|
|
209
|
+
"""Test embedding generation functionality."""
|
|
210
|
+
print("=== Embedding Service Test ===\n")
|
|
211
|
+
|
|
212
|
+
# Check environment
|
|
213
|
+
provider = os.getenv("EMBEDDING_PROVIDER", "openai").lower()
|
|
214
|
+
print(f"Provider: {provider}")
|
|
215
|
+
print(f"Default model: {DEFAULT_EMBEDDING_MODELS.get(provider, 'N/A')}")
|
|
216
|
+
|
|
217
|
+
# Check API keys
|
|
218
|
+
if provider == "openai" and not os.getenv("OPENAI_API_KEY"):
|
|
219
|
+
print("\nERROR: OPENAI_API_KEY not set. Please set it to test embeddings.")
|
|
220
|
+
print("Example: export OPENAI_API_KEY=your-api-key")
|
|
221
|
+
return
|
|
222
|
+
elif provider == "cohere" and not os.getenv("COHERE_API_KEY"):
|
|
223
|
+
print("\nERROR: COHERE_API_KEY not set. Please set it to test embeddings.")
|
|
224
|
+
print("Example: export COHERE_API_KEY=your-api-key")
|
|
225
|
+
return
|
|
226
|
+
|
|
227
|
+
try:
|
|
228
|
+
# Initialize service
|
|
229
|
+
print("\n--- Initializing Embedding Service ---")
|
|
230
|
+
service = get_embedding_service()
|
|
231
|
+
print(f"✓ Service initialized with provider: {service.provider}")
|
|
232
|
+
print(f"✓ Default model: {service.get_default_model()}")
|
|
233
|
+
|
|
234
|
+
# Test single embedding
|
|
235
|
+
print("\n--- Testing Single Text Embedding ---")
|
|
236
|
+
test_text = "Weather forecasting API with real-time data"
|
|
237
|
+
embedding = await service.generate_embedding(test_text)
|
|
238
|
+
print(f"Input text: '{test_text}'")
|
|
239
|
+
print(f"✓ Embedding dimensions: {len(embedding)}")
|
|
240
|
+
print(f"✓ First 5 values: {embedding[:5]}")
|
|
241
|
+
|
|
242
|
+
# Test multiple embeddings
|
|
243
|
+
print("\n--- Testing Multiple Text Embeddings ---")
|
|
244
|
+
test_texts = [
|
|
245
|
+
"Machine learning for data analysis",
|
|
246
|
+
"API integration tools",
|
|
247
|
+
"Real-time weather monitoring"
|
|
248
|
+
]
|
|
249
|
+
embeddings = await service.generate_embeddings(test_texts)
|
|
250
|
+
print(f"Input texts: {len(test_texts)} texts")
|
|
251
|
+
for i, text in enumerate(test_texts):
|
|
252
|
+
print(f" {i+1}. '{text}' -> {len(embeddings[i])} dimensions")
|
|
253
|
+
|
|
254
|
+
# Test query embedding
|
|
255
|
+
print("\n--- Testing Query Embedding ---")
|
|
256
|
+
query = "climate prediction services"
|
|
257
|
+
query_embedding = await service.generate_query_embedding(query)
|
|
258
|
+
print(f"Query: '{query}'")
|
|
259
|
+
print(f"✓ Query embedding dimensions: {len(query_embedding)}")
|
|
260
|
+
|
|
261
|
+
# Test similarity (cosine similarity between doc and query)
|
|
262
|
+
print("\n--- Testing Semantic Similarity ---")
|
|
263
|
+
import numpy as np
|
|
264
|
+
|
|
265
|
+
# Calculate cosine similarity
|
|
266
|
+
def cosine_similarity(a, b):
|
|
267
|
+
return np.dot(a, b) / (np.linalg.norm(a) * np.linalg.norm(b))
|
|
268
|
+
|
|
269
|
+
# Compare query with test texts
|
|
270
|
+
print(f"Query: '{query}'")
|
|
271
|
+
for i, (text, emb) in enumerate(zip(test_texts, embeddings)):
|
|
272
|
+
similarity = cosine_similarity(query_embedding, emb)
|
|
273
|
+
print(f" vs '{text}': {similarity:.3f}")
|
|
274
|
+
|
|
275
|
+
print("\n✓ All tests passed!")
|
|
276
|
+
|
|
277
|
+
except ImportError as e:
|
|
278
|
+
print(f"\n✗ Import error: {e}")
|
|
279
|
+
print("Please install the required package:")
|
|
280
|
+
if "openai" in str(e):
|
|
281
|
+
print(" uv add openai")
|
|
282
|
+
elif "cohere" in str(e):
|
|
283
|
+
print(" uv add cohere")
|
|
284
|
+
elif "numpy" in str(e):
|
|
285
|
+
print(" uv add numpy")
|
|
286
|
+
|
|
287
|
+
except Exception as e:
|
|
288
|
+
print(f"\n✗ Error: {e}")
|
|
289
|
+
import traceback
|
|
290
|
+
traceback.print_exc()
|
|
291
|
+
|
|
292
|
+
# Run the test
|
|
293
|
+
print("Testing embedding service...")
|
|
294
|
+
print(f"ENABLE_EMBEDDINGS: {os.getenv('ENABLE_EMBEDDINGS', 'false')}")
|
|
295
|
+
print(f"EMBEDDING_PROVIDER: {os.getenv('EMBEDDING_PROVIDER', 'openai')}")
|
|
296
|
+
print()
|
|
297
|
+
|
|
298
|
+
asyncio.run(test_embeddings())
|