traia-iatp 0.1.2__py3-none-any.whl → 0.1.67__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. traia_iatp/__init__.py +105 -8
  2. traia_iatp/cli/main.py +85 -1
  3. traia_iatp/client/__init__.py +28 -3
  4. traia_iatp/client/crewai_a2a_tools.py +32 -12
  5. traia_iatp/client/d402_a2a_client.py +348 -0
  6. traia_iatp/contracts/__init__.py +11 -0
  7. traia_iatp/contracts/data/abis/contract-abis-localhost.json +4091 -0
  8. traia_iatp/contracts/data/abis/contract-abis-sepolia.json +4890 -0
  9. traia_iatp/contracts/data/addresses/contract-addresses.json +17 -0
  10. traia_iatp/contracts/data/addresses/contract-proxies.json +12 -0
  11. traia_iatp/contracts/iatp_contracts_config.py +263 -0
  12. traia_iatp/contracts/wallet_creator.py +369 -0
  13. traia_iatp/core/models.py +17 -3
  14. traia_iatp/d402/MIDDLEWARE_ARCHITECTURE.md +205 -0
  15. traia_iatp/d402/PRICE_BUILDER_USAGE.md +249 -0
  16. traia_iatp/d402/README.md +489 -0
  17. traia_iatp/d402/__init__.py +54 -0
  18. traia_iatp/d402/asgi_wrapper.py +469 -0
  19. traia_iatp/d402/chains.py +102 -0
  20. traia_iatp/d402/client.py +150 -0
  21. traia_iatp/d402/clients/__init__.py +7 -0
  22. traia_iatp/d402/clients/base.py +218 -0
  23. traia_iatp/d402/clients/httpx.py +266 -0
  24. traia_iatp/d402/common.py +114 -0
  25. traia_iatp/d402/encoding.py +28 -0
  26. traia_iatp/d402/examples/client_example.py +197 -0
  27. traia_iatp/d402/examples/server_example.py +171 -0
  28. traia_iatp/d402/facilitator.py +481 -0
  29. traia_iatp/d402/mcp_middleware.py +296 -0
  30. traia_iatp/d402/models.py +116 -0
  31. traia_iatp/d402/networks.py +98 -0
  32. traia_iatp/d402/path.py +43 -0
  33. traia_iatp/d402/payment_introspection.py +126 -0
  34. traia_iatp/d402/payment_signing.py +183 -0
  35. traia_iatp/d402/price_builder.py +164 -0
  36. traia_iatp/d402/servers/__init__.py +61 -0
  37. traia_iatp/d402/servers/base.py +139 -0
  38. traia_iatp/d402/servers/example_general_server.py +140 -0
  39. traia_iatp/d402/servers/fastapi.py +253 -0
  40. traia_iatp/d402/servers/mcp.py +304 -0
  41. traia_iatp/d402/servers/starlette.py +878 -0
  42. traia_iatp/d402/starlette_middleware.py +529 -0
  43. traia_iatp/d402/types.py +300 -0
  44. traia_iatp/mcp/D402_MCP_ADAPTER_FLOW.md +357 -0
  45. traia_iatp/mcp/__init__.py +3 -0
  46. traia_iatp/mcp/d402_mcp_tool_adapter.py +526 -0
  47. traia_iatp/mcp/mcp_agent_template.py +78 -13
  48. traia_iatp/mcp/templates/Dockerfile.j2 +27 -4
  49. traia_iatp/mcp/templates/README.md.j2 +104 -8
  50. traia_iatp/mcp/templates/cursor-rules.md.j2 +194 -0
  51. traia_iatp/mcp/templates/deployment_params.json.j2 +1 -2
  52. traia_iatp/mcp/templates/docker-compose.yml.j2 +13 -3
  53. traia_iatp/mcp/templates/env.example.j2 +60 -0
  54. traia_iatp/mcp/templates/mcp_health_check.py.j2 +2 -2
  55. traia_iatp/mcp/templates/pyproject.toml.j2 +11 -5
  56. traia_iatp/mcp/templates/pyrightconfig.json.j2 +22 -0
  57. traia_iatp/mcp/templates/run_local_docker.sh.j2 +320 -10
  58. traia_iatp/mcp/templates/server.py.j2 +174 -197
  59. traia_iatp/mcp/traia_mcp_adapter.py +182 -20
  60. traia_iatp/registry/__init__.py +47 -12
  61. traia_iatp/registry/atlas_search_indexes.json +108 -54
  62. traia_iatp/registry/iatp_search_api.py +169 -39
  63. traia_iatp/registry/mongodb_registry.py +241 -69
  64. traia_iatp/registry/readmes/EMBEDDINGS_SETUP.md +1 -1
  65. traia_iatp/registry/readmes/IATP_SEARCH_API_GUIDE.md +8 -8
  66. traia_iatp/registry/readmes/MONGODB_X509_AUTH.md +1 -1
  67. traia_iatp/registry/readmes/README.md +3 -3
  68. traia_iatp/registry/readmes/REFACTORING_SUMMARY.md +6 -6
  69. traia_iatp/scripts/__init__.py +2 -0
  70. traia_iatp/scripts/create_wallet.py +244 -0
  71. traia_iatp/server/a2a_server.py +22 -7
  72. traia_iatp/server/iatp_server_template_generator.py +23 -0
  73. traia_iatp/server/templates/.dockerignore.j2 +48 -0
  74. traia_iatp/server/templates/Dockerfile.j2 +23 -1
  75. traia_iatp/server/templates/README.md +2 -2
  76. traia_iatp/server/templates/README.md.j2 +5 -5
  77. traia_iatp/server/templates/__main__.py.j2 +374 -66
  78. traia_iatp/server/templates/agent.py.j2 +12 -11
  79. traia_iatp/server/templates/agent_config.json.j2 +3 -3
  80. traia_iatp/server/templates/agent_executor.py.j2 +45 -27
  81. traia_iatp/server/templates/env.example.j2 +32 -4
  82. traia_iatp/server/templates/gitignore.j2 +7 -0
  83. traia_iatp/server/templates/pyproject.toml.j2 +13 -12
  84. traia_iatp/server/templates/run_local_docker.sh.j2 +143 -11
  85. traia_iatp/server/templates/server.py.j2 +197 -10
  86. traia_iatp/special_agencies/registry_search_agency.py +1 -1
  87. traia_iatp/utils/iatp_utils.py +6 -6
  88. traia_iatp-0.1.67.dist-info/METADATA +320 -0
  89. traia_iatp-0.1.67.dist-info/RECORD +117 -0
  90. traia_iatp-0.1.2.dist-info/METADATA +0 -414
  91. traia_iatp-0.1.2.dist-info/RECORD +0 -72
  92. {traia_iatp-0.1.2.dist-info → traia_iatp-0.1.67.dist-info}/WHEEL +0 -0
  93. {traia_iatp-0.1.2.dist-info → traia_iatp-0.1.67.dist-info}/entry_points.txt +0 -0
  94. {traia_iatp-0.1.2.dist-info → traia_iatp-0.1.67.dist-info}/licenses/LICENSE +0 -0
  95. {traia_iatp-0.1.2.dist-info → traia_iatp-0.1.67.dist-info}/top_level.txt +0 -0
@@ -17,23 +17,25 @@ from a2a.server.agent_execution import AgentExecutor, RequestContext
17
17
  from a2a.server.events.event_queue import EventQueue
18
18
  from a2a.types import Message, TextPart
19
19
  from a2a.utils import new_agent_text_message
20
- from crewai import Task
20
+ from crewai import Task, LLM
21
21
  from traia_iatp.mcp import MCPServerConfig, MCPAgentBuilder, run_with_mcp_tools, MCPServerInfo
22
22
 
23
23
  # Import AgentOps for operation tracking
24
- try:
25
- import agentops
26
- from agentops.sdk.decorators import operation
27
- AGENTOPS_AVAILABLE = True
28
- except ImportError:
29
- AGENTOPS_AVAILABLE = False
30
- agentops = None
31
- # Create a no-op decorator if AgentOps is not available
32
- def operation(func):
33
- return func
24
+ import agentops
25
+
26
+
27
+
28
+ DEFAULT_LLM = LLM(
29
+ model=os.getenv("LLM_MODEL", "openai/gpt-4o-mini"), # Better model for tool execution
30
+ temperature=float(os.getenv("LLM_MODEL_TEMPERATURE", "0.1")),
31
+ api_key=os.getenv("OPENAI_API_KEY")
32
+ )
33
+ current_time = datetime.utcnow()
34
34
 
35
35
  logger = logging.getLogger(__name__)
36
36
 
37
+ logger.info(f"Current LLM model used: {os.getenv('LLM_MODEL', 'openai/gpt-4o-mini')}")
38
+
37
39
  # Create a thread pool for CPU-bound CrewAI operations
38
40
  executor = ThreadPoolExecutor(max_workers=10)
39
41
 
@@ -68,7 +70,14 @@ class CustomEvent:
68
70
 
69
71
 
70
72
  class {{ class_name }}AgentExecutor(AgentExecutor):
71
- """Agent executor for {{ agent_name }}."""
73
+ """Agent executor for {{ agent_name }}.
74
+
75
+ This executor supports:
76
+ - ✅ Concurrent requests from multiple clients (each gets fresh agent/task)
77
+ - ✅ Memory-enabled agents for better MCP tool usage learning
78
+ - ✅ D402 payment enforcement per request
79
+ - ✅ Streaming and non-streaming modes
80
+ """
72
81
 
73
82
  def __init__(self, mcp_config: MCPServerConfig, supports_streaming: bool = False):
74
83
  self.mcp_config = mcp_config
@@ -84,7 +93,6 @@ class {{ class_name }}AgentExecutor(AgentExecutor):
84
93
  tags=mcp_config.metadata.get("tags", [])
85
94
  )
86
95
 
87
- @operation
88
96
  async def execute(self, context: RequestContext, event_queue: EventQueue) -> None:
89
97
  """Process a request using the {{ agent_name }} capabilities."""
90
98
  try:
@@ -94,7 +102,7 @@ class {{ class_name }}AgentExecutor(AgentExecutor):
94
102
  # Send empty response with task ID if available
95
103
  msg = new_agent_text_message("No user message provided")
96
104
  if hasattr(context, 'task_id') and context.task_id:
97
- msg.taskId = context.task_id
105
+ msg.task_id = context.task_id
98
106
  await event_queue.enqueue_event(msg)
99
107
  return
100
108
 
@@ -114,10 +122,9 @@ class {{ class_name }}AgentExecutor(AgentExecutor):
114
122
  logger.error(f"Error processing request: {e}")
115
123
  msg = new_agent_text_message(f"Error processing request: {str(e)}")
116
124
  if hasattr(context, 'task_id') and context.task_id:
117
- msg.taskId = context.task_id
125
+ msg.task_id = context.task_id
118
126
  await event_queue.enqueue_event(msg)
119
127
 
120
- @operation
121
128
  async def _execute_standard(self, context: RequestContext, event_queue: EventQueue, request_text: str) -> None:
122
129
  """Execute standard (non-streaming) request."""
123
130
  # Get additional context if provided
@@ -126,16 +133,27 @@ class {{ class_name }}AgentExecutor(AgentExecutor):
126
133
  task_context = context.metadata or {}
127
134
 
128
135
  # Create an agent for this request
136
+ # Note: Each request gets a fresh agent to support concurrent requests
137
+ # Memory is enabled to help the agent learn MCP tool usage patterns
129
138
  agent = MCPAgentBuilder.create_agent(
130
- role=f"{{ agent_name }} Specialist",
131
- goal=f"Process the request using {self.mcp_config.name} capabilities",
132
- backstory=f"You are an expert at using {self.mcp_config.name}. {self.mcp_config.description}"
139
+ role=f"{{ agent_name }} Tool Executor",
140
+ goal=f"EXECUTE tools from {self.mcp_config.name} to get REAL data. Never respond without calling tools first.",
141
+ backstory=f"You are a tool execution specialist for {self.mcp_config.name}. {self.mcp_config.description}. "
142
+ f"CRITICAL RULES: "
143
+ f"1. ALWAYS call the appropriate tool to get real data "
144
+ f"2. NEVER answer based on assumptions or knowledge "
145
+ f"3. If no tool can answer the request, explicitly state 'No suitable tool found in {self.mcp_config.name}' "
146
+ f"4. Base your response ONLY on actual tool execution results",
147
+ llm=DEFAULT_LLM,
148
+ verbose=True, # Enable verbose mode for better debugging
149
+ memory=True, # Enable memory for learning MCP server usage
150
+ max_iter=25 # Allow sufficient iterations to execute tools
133
151
  )
134
152
 
135
- # Create a task
153
+ # Create a task with explicit tool requirement
136
154
  task = Task(
137
- description=request_text,
138
- expected_output="The processed result based on the request",
155
+ description=f"{request_text}\n\nIMPORTANT: You MUST use the available tools to get this data. Do not provide answers without calling tools.",
156
+ expected_output="Real data obtained by executing the appropriate tool. If no tool can provide this data, state 'No suitable tool available'.",
139
157
  agent=agent
140
158
  )
141
159
 
@@ -163,10 +181,9 @@ class {{ class_name }}AgentExecutor(AgentExecutor):
163
181
  # Send the result as agent message with task ID if available
164
182
  msg = new_agent_text_message(str(result))
165
183
  if hasattr(context, 'task_id') and context.task_id:
166
- msg.taskId = context.task_id
184
+ msg.task_id = context.task_id
167
185
  await event_queue.enqueue_event(msg)
168
186
 
169
- @operation
170
187
  async def _execute_streaming(self, context: RequestContext, event_queue: EventQueue, request_text: str) -> None:
171
188
  """Execute streaming request using SSE."""
172
189
  try:
@@ -217,7 +234,6 @@ class {{ class_name }}AgentExecutor(AgentExecutor):
217
234
  })
218
235
  )
219
236
 
220
- @operation
221
237
  async def _stream_mcp_response(self, request_text: str, context: RequestContext) -> AsyncGenerator[str, None]:
222
238
  """
223
239
  Stream responses from MCP server.
@@ -233,8 +249,10 @@ class {{ class_name }}AgentExecutor(AgentExecutor):
233
249
 
234
250
  agent = MCPAgentBuilder.create_agent(
235
251
  role=f"{{ agent_name }} Streaming Specialist",
236
- goal=f"Process the streaming request using {self.mcp_config.name} capabilities",
237
- backstory=f"You are an expert at using {self.mcp_config.name} for streaming data. {self.mcp_config.description}"
252
+ goal=f"Process the streaming request using {self.mcp_config.name} for streaming data",
253
+ backstory=f"You are an expert at using {self.mcp_config.name} for streaming data. {self.mcp_config.description}",
254
+ verbose=True, # Enable verbose mode
255
+ memory=True # Enable memory for learning MCP server usage
238
256
  )
239
257
 
240
258
  task = Task(
@@ -9,12 +9,15 @@ HOST=0.0.0.0
9
9
  AGENTOPS_API_KEY=your-agentops-api-key-here
10
10
 
11
11
  # Language Model Configuration
12
- LLM_MODEL=openai/gpt-4.1
12
+ LLM_MODEL=openai/gpt-4.1-nano
13
+ LLM_MODEL_TEMPERATURE=0.7
14
+ EMBEDDINGS_OPENAI_MODEL_NAME=text-embedding-3-small
15
+
13
16
  # You can also use other models:
14
17
  # LLM_MODEL=vertex_ai/gemini-2.0-flash
15
- # LLM_MODEL=anthropic/claude-4-sonnetf
18
+ # LLM_MODEL=anthropic/claude-3-5-sonnet-20241022
16
19
 
17
- # API Keys (add as needed based on your LLM choice)
20
+ # API Keys (populated by run_local_docker.sh from runtime environment)
18
21
  # OPENAI_API_KEY=your-openai-key-here
19
22
  # ANTHROPIC_API_KEY=your-anthropic-key-here
20
23
  # GOOGLE_API_KEY=your-google-key-here
@@ -22,7 +25,8 @@ LLM_MODEL=openai/gpt-4.1
22
25
  # VERTEX_LOCATION=us-central1
23
26
 
24
27
  # MCP Server Configuration
25
- MCP_SERVER_URL={{ mcp_server_url }}
28
+ # Note: localhost is auto-replaced with host.docker.internal for Docker compatibility
29
+ MCP_SERVER_URL={{ mcp_server_url.replace('localhost', 'host.docker.internal') }}
26
30
  {% if requires_api_key and api_keys %}
27
31
  # MCP Server API Keys (REQUIRED - The MCP server requires authentication)
28
32
  # Set one of the following environment variables:
@@ -64,6 +68,30 @@ LOG_LEVEL=INFO
64
68
  {% endfor %}
65
69
  {% endif %}
66
70
 
71
+ {% if d402_enabled %}
72
+ # D402 Payment Configuration
73
+ UTILITY_AGENT_CONTRACT_ADDRESS={{ d402_contract_address }}
74
+ UTILITY_AGENT_OPERATOR_PRIVATE_KEY={{ d402_operator_private_key }}
75
+ D402_ENABLED=true
76
+ D402_PRICE_USD={{ d402_price_usd }}
77
+ D402_TOKEN_SYMBOL={{ d402_token_symbol }}
78
+ D402_TOKEN_ADDRESS={{ d402_token_address }}
79
+ D402_TOKEN_DECIMALS={{ d402_token_decimals }}
80
+ D402_NETWORK={{ d402_network }}
81
+ D402_FACILITATOR_URL={{ d402_facilitator_url.replace('localhost', 'host.docker.internal') }}
82
+ D402_TESTING_MODE={{ d402_testing_mode }}
83
+ {% else %}
84
+ # D402 Payment Configuration (disabled)f
85
+ # D402_ENABLED=false
86
+ # D402_PRICE_USD=0.01
87
+ # D402_TOKEN_SYMBOL=USDC
88
+ # D402_TOKEN_ADDRESS=
89
+ # D402_TOKEN_DECIMALS=6
90
+ # D402_NETWORK=sepolia
91
+ # D402_FACILITATOR_URL=http://localhost:7070
92
+ # D402_TESTING_MODE=false
93
+ {% endif %}
94
+
67
95
  # Additional custom environment variables
68
96
  {% for env_var in environment_variables %}
69
97
  # {{ env_var.name }}={{ env_var.value }}
@@ -53,6 +53,13 @@ uv.lock
53
53
  # Docker
54
54
  *.pid
55
55
  docker-compose.override.yml
56
+ .docker-iatp/ # Local IATP copy for Docker builds
57
+ pyproject.toml.backup # Backup during local IATP builds
58
+ pyproject.toml.tmp # Temporary sed file
59
+
60
+ # Test results
61
+ test_results.log
62
+ *.md # Test reports
56
63
 
57
64
  # {{ agent_name }} specific
58
65
  {% if additional_ignores %}
@@ -5,19 +5,21 @@ description = "{{ agent_description }}"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.12"
7
7
  dependencies = [
8
- "crewai>=0.130.0",
9
- "crewai-tools[mcp]>=0.47.1",
10
- "agentops>=0.3.0",
8
+ "crewai>=0.203.1",
9
+ "crewai-tools[mcp]>=0.76.0",
10
+ "agentops>=0.4.21",
11
11
  "a2a-sdk>=0.2.6",
12
- "fastmcp>=2.8.1",
12
+ "fastmcp>=2.12.5",
13
13
  "hypercorn>=0.17.3", # HTTP/2 support via h2 dependency
14
14
  "httpx[http2]>=0.28.1", # HTTP/2 support with http2 extra
15
- "starlette>=0.46.2", # For SSE streaming support
16
- "sse-starlette>=2.3.6", # Server-Sent Events support
17
- "pydantic>=2.11.0",
18
- "python-dotenv>=1.1.0",
19
- "pymongo>=4.13.0", # Required for MCP registry
20
- "traia-iatp", # Core IATP functionality - always use latest version
15
+ "starlette>=0.48.0", # For SSE streaming support
16
+ "sse-starlette>=3.0.2", # Server-Sent Events support
17
+ "pydantic>=2.12.2",
18
+ "python-dotenv>=1.1.1",
19
+ "pymongo>=4.15.3", # Required for MCP registry
20
+ "traia-iatp>=0.1.67", # Core IATP functionality with d402 and RPC fallback support
21
+ "eth-account>=0.11.0", # For x402 signing
22
+ "web3>=6.15.0", # For x402 blockchain interaction
21
23
  {% for dep in additional_dependencies %}
22
24
  "{{ dep }}",
23
25
  {% endfor %}
@@ -30,8 +32,7 @@ grpc = [
30
32
  "grpcio-reflection>=1.71.0",
31
33
  ]
32
34
 
33
- [tool.uv]
34
- dev-dependencies = [
35
+ dev = [
35
36
  "pytest>=7.0.0",
36
37
  "pytest-asyncio>=0.21.0",
37
38
  "black>=23.0.0",
@@ -11,17 +11,73 @@ YELLOW='\033[1;33m'
11
11
  RED='\033[0;31m'
12
12
  NC='\033[0m' # No Color
13
13
 
14
- # Load environment variables from .env if it exists
15
- if [ -f .env ]; then
16
- echo -e "${BLUE}📋 Loading environment variables from .env file...${NC}"
17
- set -a # Export all variables
18
- source .env
19
- set +a # Stop exporting
14
+ # Create .env from .env.example if it doesn't exist
15
+ if [ ! -f .env ]; then
16
+ if [ -f .env.example ]; then
17
+ echo -e "${BLUE}📋 Creating .env from .env.example...${NC}"
18
+ cp .env.example .env
19
+ echo -e "${GREEN}✅ Created .env file${NC}"
20
+ else
21
+ echo -e "${RED}❌ No .env.example file found${NC}"
22
+ exit 1
23
+ fi
20
24
  else
21
- echo -e "${YELLOW}⚠️ No .env file found. Using defaults.${NC}"
22
- echo -e "${YELLOW} Copy .env.example to .env and configure as needed.${NC}"
25
+ echo -e "${GREEN} .env file already exists${NC}"
23
26
  fi
24
27
 
28
+ # Populate missing secrets from runtime environment or main repo .env
29
+ populate_secrets() {
30
+ echo -e "${BLUE}🔍 Populating secrets from runtime environment...${NC}"
31
+
32
+ # List of secret env vars to populate (prioritize runtime env, fallback to repo .env)
33
+ secret_vars=("OPENAI_API_KEY" "ANTHROPIC_API_KEY" "GOOGLE_API_KEY" "AGENTOPS_API_KEY")
34
+
35
+ for var in "${secret_vars[@]}"; do
36
+ # Check if variable is missing or commented out in local .env
37
+ if ! grep -q "^${var}=" .env 2>/dev/null; then
38
+ # First try to get value from current runtime environment
39
+ value="${!var:-}"
40
+
41
+ # If not in runtime env, try to find main repo .env file
42
+ if [ -z "$value" ]; then
43
+ local repo_env=""
44
+ if [ -f "../../../../.env" ]; then
45
+ repo_env="../../../../.env"
46
+ elif [ -f "../../../.env" ]; then
47
+ repo_env="../../../.env"
48
+ elif [ -f "../../.env" ]; then
49
+ repo_env="../../.env"
50
+ fi
51
+
52
+ if [ -n "$repo_env" ] && [ -f "$repo_env" ]; then
53
+ value=$(grep "^${var}=" "$repo_env" 2>/dev/null | cut -d'=' -f2-)
54
+ fi
55
+ fi
56
+
57
+ # If we found a value (from either source), add it to .env
58
+ if [ -n "$value" ]; then
59
+ echo -e "${GREEN} ✅ Adding ${var}${NC}"
60
+ # Replace commented placeholder with actual value
61
+ sed -i.bak "s|^# ${var}=.*|${var}=${value}|" .env
62
+ else
63
+ echo -e "${YELLOW} ⚠️ ${var} not found in runtime env or repo .env${NC}"
64
+ fi
65
+ else
66
+ echo -e "${GREEN} ✅ ${var} already set in .env${NC}"
67
+ fi
68
+ done
69
+ }
70
+
71
+ # Load environment variables from .env
72
+ echo -e "${BLUE}📋 Loading environment variables from .env file...${NC}"
73
+
74
+ # Populate secrets before loading (from runtime env or repo .env)
75
+ populate_secrets
76
+
77
+ set -a # Export all variables
78
+ source .env
79
+ set +a # Stop exporting
80
+
25
81
  # Configuration with defaults
26
82
  IMAGE_NAME="{{ agent_id }}"
27
83
  CONTAINER_NAME="{{ agent_id }}-local"
@@ -45,10 +101,86 @@ if docker ps -a | grep -q $CONTAINER_NAME; then
45
101
  docker rm $CONTAINER_NAME >/dev/null 2>&1 || true
46
102
  fi
47
103
 
104
+ # Temporarily modify pyproject.toml to use /tmp/IATP for Docker build (if in development mode)
105
+ # Check for local IATP path
106
+ IATP_PATH=""
107
+ for possible_iatp in "../../../../IATP" "../../../IATP" "../../IATP"; do
108
+ if [ -d "$possible_iatp" ] && [ -f "$possible_iatp/pyproject.toml" ]; then
109
+ IATP_PATH="$possible_iatp"
110
+ break
111
+ fi
112
+ done
113
+
114
+ if [ -n "$IATP_PATH" ]; then
115
+ echo -e "${BLUE}📦 Found local IATP at: $IATP_PATH${NC}"
116
+ echo -e "${BLUE}🔧 Preparing for local IATP build...${NC}"
117
+
118
+ # Backup pyproject.toml
119
+ cp pyproject.toml pyproject.toml.backup
120
+
121
+ # Modify to use /tmp/IATP (Docker internal path)
122
+ sed -i.tmp 's|"traia-iatp>=0\.1[^"]*"|"traia-iatp @ file:///tmp/IATP"|g' pyproject.toml
123
+ rm -f pyproject.toml.tmp
124
+ echo -e "${GREEN}✅ pyproject.toml configured for local IATP build${NC}"
125
+ fi
126
+
48
127
  # Build the Docker image
49
128
  echo -e "${BLUE}🔨 Building Docker image...${NC}"
129
+
130
+ # Check if pyproject.toml uses local IATP path (for local development)
131
+ if grep -q "file:///tmp/IATP" pyproject.toml 2>/dev/null; then
132
+ # Extract IATP path from pyproject.toml
133
+ IATP_PATH_FROM_TOML=$(grep "file://" pyproject.toml | sed -E 's|.*file://([^"]+).*|\1|' | head -1)
134
+
135
+ # If path is /tmp/IATP (Docker path), find the actual local IATP path
136
+ if [ "$IATP_PATH_FROM_TOML" = "/tmp/IATP" ] || [ ! -d "$IATP_PATH_FROM_TOML" ]; then
137
+ # Try to find local IATP path (common locations)
138
+ if [ -d "/Users/eitanlavi/workspace/traia/IATP" ]; then
139
+ IATP_PATH="/Users/eitanlavi/workspace/traia/IATP"
140
+ elif [ -d "$(dirname $(pwd))/IATP" ]; then
141
+ IATP_PATH="$(dirname $(pwd))/IATP"
142
+ elif [ -d "$HOME/workspace/traia/IATP" ]; then
143
+ IATP_PATH="$HOME/workspace/traia/IATP"
144
+ elif [ -n "$LOCAL_IATP_PATH" ] && [ -d "$LOCAL_IATP_PATH" ]; then
145
+ IATP_PATH="$LOCAL_IATP_PATH"
146
+ else
147
+ IATP_PATH=""
148
+ fi
149
+ else
150
+ IATP_PATH="$IATP_PATH_FROM_TOML"
151
+ fi
152
+
153
+ if [ -n "$IATP_PATH" ] && [ -d "$IATP_PATH" ]; then
154
+ echo -e "${BLUE}📦 Using local IATP package: $IATP_PATH${NC}"
155
+ # Copy IATP into build context for Docker (exclude .venv to save space!)
156
+ mkdir -p .docker-iatp
157
+
158
+ # Use rsync if available (much better), otherwise cp with find
159
+ if command -v rsync &> /dev/null; then
160
+ rsync -a --exclude='.venv' --exclude='__pycache__' --exclude='.git' --exclude='build' --exclude='dist' --exclude='*.egg-info' "$IATP_PATH/" .docker-iatp/IATP/
161
+ echo -e "${BLUE} Copied IATP to build context (excluded .venv, ${GREEN}saved ~900MB${BLUE})${NC}"
162
+ else
163
+ # Fallback: use cp but warn about size
164
+ cp -r "$IATP_PATH" .docker-iatp/IATP
165
+ echo -e "${YELLOW} ⚠️ rsync not found, copied full IATP (install rsync to save ~900MB)${NC}"
166
+ fi
167
+ else
168
+ echo -e "${YELLOW}⚠️ Local IATP path not found: $IATP_PATH_FROM_TOML${NC}"
169
+ echo -e "${YELLOW} Docker build will use published package${NC}"
170
+ fi
171
+ fi
172
+
50
173
  docker build -t $IMAGE_NAME .
51
174
 
175
+ # Cleanup .docker-iatp after build
176
+ rm -rf .docker-iatp
177
+
178
+ # Restore original pyproject.toml if we modified it
179
+ if [ -f pyproject.toml.backup ]; then
180
+ mv pyproject.toml.backup pyproject.toml
181
+ echo -e "${BLUE}🔄 Restored original pyproject.toml${NC}"
182
+ fi
183
+
52
184
  # Run the container
53
185
  echo -e "${BLUE}🏃 Starting container...${NC}"
54
186
  docker run -d \
@@ -78,7 +210,7 @@ echo -e "${GREEN}✅ {{ agent_name }} is running!${NC}"
78
210
  echo
79
211
  echo -e "${BLUE}📍 Connection Information:${NC}"
80
212
  echo -e " A2A Endpoint: ${GREEN}http://localhost:${HOST_PORT}/a2a${NC}"
81
- echo -e " Agent Info: ${GREEN}http://localhost:${HOST_PORT}/.well-known/agent.json${NC}"
213
+ echo -e " Agent Info: ${GREEN}http://localhost:${HOST_PORT}/.well-known/agent-card.json${NC}"
82
214
  echo -e " Container Name: ${GREEN}${CONTAINER_NAME}${NC}"
83
215
  echo -e " Container ID: ${GREEN}${CONTAINER_ID:0:12}${NC}"
84
216
  echo
@@ -91,13 +223,13 @@ echo
91
223
 
92
224
  # Check if the server is responding
93
225
  echo -e "${YELLOW}🔍 Checking server health...${NC}"
94
- if curl -s -o /dev/null -w "%{http_code}" "http://localhost:${HOST_PORT}/.well-known/agent.json" | grep -q "200"; then
226
+ if curl -s -o /dev/null -w "%{http_code}" "http://localhost:${HOST_PORT}/.well-known/agent-card.json" | grep -q "200"; then
95
227
  echo -e "${GREEN}✅ Server is healthy!${NC}"
96
228
 
97
229
  # Get and display agency info
98
230
  echo
99
231
  echo -e "${BLUE}📋 Agency Information:${NC}"
100
- curl -s "http://localhost:${HOST_PORT}/.well-known/agent.json" | python -m json.tool || echo "Could not fetch agency info"
232
+ curl -s "http://localhost:${HOST_PORT}/.well-known/agent-card.json" | python -m json.tool || echo "Could not fetch agency info"
101
233
  else
102
234
  echo -e "${YELLOW}⚠️ Server may still be starting up. Check logs with: docker logs -f ${CONTAINER_NAME}${NC}"
103
235
  fi