reasoning-deployment-service 0.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of reasoning-deployment-service might be problematic. Click here for more details.
- examples/programmatic_usage.py +154 -0
- reasoning_deployment_service/__init__.py +25 -0
- reasoning_deployment_service/cli_editor/__init__.py +5 -0
- reasoning_deployment_service/cli_editor/api_client.py +666 -0
- reasoning_deployment_service/cli_editor/cli_runner.py +343 -0
- reasoning_deployment_service/cli_editor/config.py +82 -0
- reasoning_deployment_service/cli_editor/google_deps.py +29 -0
- reasoning_deployment_service/cli_editor/reasoning_engine_creator.py +448 -0
- reasoning_deployment_service/gui_editor/__init__.py +5 -0
- reasoning_deployment_service/gui_editor/main.py +280 -0
- reasoning_deployment_service/gui_editor/requirements_minimal.txt +54 -0
- reasoning_deployment_service/gui_editor/run_program.sh +55 -0
- reasoning_deployment_service/gui_editor/src/__init__.py +1 -0
- reasoning_deployment_service/gui_editor/src/core/__init__.py +1 -0
- reasoning_deployment_service/gui_editor/src/core/api_client.py +647 -0
- reasoning_deployment_service/gui_editor/src/core/config.py +43 -0
- reasoning_deployment_service/gui_editor/src/core/google_deps.py +22 -0
- reasoning_deployment_service/gui_editor/src/core/reasoning_engine_creator.py +448 -0
- reasoning_deployment_service/gui_editor/src/ui/__init__.py +1 -0
- reasoning_deployment_service/gui_editor/src/ui/agent_space_view.py +312 -0
- reasoning_deployment_service/gui_editor/src/ui/authorization_view.py +280 -0
- reasoning_deployment_service/gui_editor/src/ui/reasoning_engine_view.py +354 -0
- reasoning_deployment_service/gui_editor/src/ui/reasoning_engines_view.py +204 -0
- reasoning_deployment_service/gui_editor/src/ui/ui_components.py +1221 -0
- reasoning_deployment_service/reasoning_deployment_service.py +687 -0
- reasoning_deployment_service-0.2.8.dist-info/METADATA +177 -0
- reasoning_deployment_service-0.2.8.dist-info/RECORD +29 -0
- reasoning_deployment_service-0.2.8.dist-info/WHEEL +5 -0
- reasoning_deployment_service-0.2.8.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,687 @@
|
|
|
1
|
+
import json, os, subprocess, yaml, sys
|
|
2
|
+
import urllib.parse, vertexai, google.auth
|
|
3
|
+
import requests as _requests
|
|
4
|
+
from typing import Tuple
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from dotenv import load_dotenv
|
|
7
|
+
from vertexai import agent_engines
|
|
8
|
+
from google.adk.agents import BaseAgent
|
|
9
|
+
from google.auth.transport.requests import Request as GoogleAuthRequest
|
|
10
|
+
from google.api_core.exceptions import NotFound
|
|
11
|
+
import logging
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
|
|
14
|
+
DISCOVERY_ENGINE_URL = "https://discoveryengine.googleapis.com/v1alpha"
|
|
15
|
+
|
|
16
|
+
class ReasoningEngineDeploymentService:
|
|
17
|
+
def __init__(self, root_agent: BaseAgent, deployment_environment: str="DEV"):
|
|
18
|
+
# Setup logging
|
|
19
|
+
self._setup_logging()
|
|
20
|
+
|
|
21
|
+
self._check_required_files_exist()
|
|
22
|
+
load_dotenv(dotenv_path=".env.agent", override=True)
|
|
23
|
+
|
|
24
|
+
self.root_agent = root_agent
|
|
25
|
+
self.deployment_env = deployment_environment
|
|
26
|
+
self.attempt_to_use_existing_auth = False
|
|
27
|
+
|
|
28
|
+
self._staging_bucket = None
|
|
29
|
+
self._project_id = None
|
|
30
|
+
self._project_number = None
|
|
31
|
+
self._project_location = None
|
|
32
|
+
self._oauth_client_id = None
|
|
33
|
+
self._oauth_client_secret = None
|
|
34
|
+
self._agent_space_engine = None
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
self._load_agent_definition()
|
|
38
|
+
self._load_deployment_environment_variables(deployment_environment=deployment_environment)
|
|
39
|
+
self._check_requirements_file_present()
|
|
40
|
+
|
|
41
|
+
self._http = _requests.Session()
|
|
42
|
+
self._http.headers.update({"Content-Type": "application/json"})
|
|
43
|
+
self.authenticate()
|
|
44
|
+
|
|
45
|
+
def _setup_logging(self):
|
|
46
|
+
os.makedirs("logs", exist_ok=True)
|
|
47
|
+
|
|
48
|
+
# Setup logger
|
|
49
|
+
self.logger = logging.getLogger("ReasoningEngineDeployment")
|
|
50
|
+
self.logger.setLevel(logging.DEBUG)
|
|
51
|
+
self.logger.propagate = False
|
|
52
|
+
|
|
53
|
+
if not self.logger.handlers:
|
|
54
|
+
self.log_filename = f"logs/deployment_{datetime.now().strftime('%Y%m%d_%H%M%S')}.log"
|
|
55
|
+
file_handler = logging.FileHandler(self.log_filename)
|
|
56
|
+
file_handler.setLevel(logging.DEBUG)
|
|
57
|
+
|
|
58
|
+
console_handler = logging.StreamHandler()
|
|
59
|
+
console_handler.setLevel(logging.INFO)
|
|
60
|
+
|
|
61
|
+
file_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
|
62
|
+
file_handler.setFormatter(file_formatter)
|
|
63
|
+
|
|
64
|
+
console_formatter = logging.Formatter('%(message)s')
|
|
65
|
+
console_handler.setFormatter(console_formatter)
|
|
66
|
+
|
|
67
|
+
self.logger.addHandler(file_handler)
|
|
68
|
+
self.logger.addHandler(console_handler)
|
|
69
|
+
|
|
70
|
+
logging.getLogger('google').setLevel(logging.ERROR)
|
|
71
|
+
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
|
72
|
+
logging.getLogger('requests').setLevel(logging.ERROR)
|
|
73
|
+
|
|
74
|
+
def _log_record_file(self, level="ERROR"):
|
|
75
|
+
"""Log the current deployment record file contents to file only."""
|
|
76
|
+
try:
|
|
77
|
+
record = self._read_engine_deployment_record()
|
|
78
|
+
if record:
|
|
79
|
+
record_str = json.dumps(record, indent=2)
|
|
80
|
+
# Write directly to the same log file without terminal output
|
|
81
|
+
with open(self.log_filename, 'a') as f:
|
|
82
|
+
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S,%f')[:-3]
|
|
83
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - {level} - Current deployment record:\n{record_str}\n")
|
|
84
|
+
except Exception as e:
|
|
85
|
+
self.logger.error(f"Failed to read deployment record: {e}")
|
|
86
|
+
|
|
87
|
+
def error(self, message: str):
|
|
88
|
+
self.logger.error(f"[DEPLOYMENT SERVICE: CRITICAL FAILURE]: {message}")
|
|
89
|
+
self._log_record_file("ERROR")
|
|
90
|
+
|
|
91
|
+
def warning(self, message: str):
|
|
92
|
+
self.logger.warning(f"[DEPLOYMENT SERVICE: WARNING]: {message}")
|
|
93
|
+
|
|
94
|
+
def info(self, message: str):
|
|
95
|
+
self.logger.info(f"[DEPLOYMENT SERVICE: INFO]: {message}")
|
|
96
|
+
|
|
97
|
+
def _check_required_files_exist(self):
|
|
98
|
+
end_run = False
|
|
99
|
+
if not os.path.exists(".env.agent"):
|
|
100
|
+
self.warning("Creating .env.agent file ... done")
|
|
101
|
+
self._generate_env_agent()
|
|
102
|
+
end_run = True
|
|
103
|
+
|
|
104
|
+
if not os.path.exists("aix_agent.yaml"):
|
|
105
|
+
self._generate_example_yaml_config()
|
|
106
|
+
self.warning("Creating aix_agent.yaml file ... done")
|
|
107
|
+
end_run = True
|
|
108
|
+
|
|
109
|
+
self.warning("Please fill in the required values in the generated files and re-run the deployment.")
|
|
110
|
+
|
|
111
|
+
if end_run:
|
|
112
|
+
sys.exit(1)
|
|
113
|
+
|
|
114
|
+
def _access_token(self) -> str:
|
|
115
|
+
"""Live: fetch ADC access token; raises if not available."""
|
|
116
|
+
creds, _ = google.auth.default(scopes=["https://www.googleapis.com/auth/cloud-platform"])
|
|
117
|
+
|
|
118
|
+
if not creds.valid or (creds.expired and creds.refresh_token):
|
|
119
|
+
creds.refresh(GoogleAuthRequest())
|
|
120
|
+
return creds.token
|
|
121
|
+
|
|
122
|
+
def authenticate(self) -> bool:
|
|
123
|
+
try:
|
|
124
|
+
_ = self._access_token()
|
|
125
|
+
return True
|
|
126
|
+
except Exception:
|
|
127
|
+
pass
|
|
128
|
+
|
|
129
|
+
try:
|
|
130
|
+
subprocess.run(["gcloud", "--version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True)
|
|
131
|
+
except Exception:
|
|
132
|
+
raise RuntimeError("'gcloud' not found on PATH. Install Google Cloud SDK.")
|
|
133
|
+
|
|
134
|
+
proc = subprocess.run(
|
|
135
|
+
["gcloud", "auth", "application-default", "login"],
|
|
136
|
+
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True
|
|
137
|
+
)
|
|
138
|
+
if proc.returncode != 0:
|
|
139
|
+
raise RuntimeError(f"ADC auth failed:\n{proc.stdout}")
|
|
140
|
+
|
|
141
|
+
_ = self._access_token()
|
|
142
|
+
return True
|
|
143
|
+
|
|
144
|
+
def _get_deployment_record_path(self):
|
|
145
|
+
return Path("deployments") / f"{self.deployment_env}_{self._reasoning_engine_name}.json"
|
|
146
|
+
|
|
147
|
+
def _read_engine_deployment_record(self) -> dict:
|
|
148
|
+
file_path = self._get_deployment_record_path()
|
|
149
|
+
if os.path.exists(file_path):
|
|
150
|
+
with open(file_path, "r") as f:
|
|
151
|
+
return json.load(f) or {}
|
|
152
|
+
return {}
|
|
153
|
+
|
|
154
|
+
def _write_engine_deployment(self, json_payload: dict):
|
|
155
|
+
if not os.path.exists("deployments"):
|
|
156
|
+
self.info(f"Creating deployments directory")
|
|
157
|
+
os.makedirs("deployments")
|
|
158
|
+
|
|
159
|
+
file_path = self._get_deployment_record_path()
|
|
160
|
+
read_engine_deployment = self._read_engine_deployment_record()
|
|
161
|
+
|
|
162
|
+
if read_engine_deployment:
|
|
163
|
+
read_engine_deployment.update(json_payload)
|
|
164
|
+
|
|
165
|
+
with open(file_path, "w") as f:
|
|
166
|
+
json.dump(read_engine_deployment, f, sort_keys=False)
|
|
167
|
+
|
|
168
|
+
self.info(f"Successfully wrote entry {json_payload} to {file_path}")
|
|
169
|
+
return
|
|
170
|
+
|
|
171
|
+
with open(file_path, "w") as f:
|
|
172
|
+
self.info(f"Successfully created {file_path}")
|
|
173
|
+
json.dump(json_payload, f, sort_keys=False)
|
|
174
|
+
self.info(f"Successfully wrote entry {json_payload} to {file_path}")
|
|
175
|
+
|
|
176
|
+
def _generate_env_agent(self, path: str | Path = ".env.agent", overwrite: bool = False) -> Path:
|
|
177
|
+
"""
|
|
178
|
+
Generate a .env.agent template file with all deployment profile
|
|
179
|
+
and app environment variables, left empty for later filling.
|
|
180
|
+
"""
|
|
181
|
+
path = Path(path)
|
|
182
|
+
if path.exists() and not overwrite:
|
|
183
|
+
raise FileExistsError(f"{path} already exists. Pass overwrite=True to replace it.")
|
|
184
|
+
|
|
185
|
+
template = """#===================== **** DEPLOYMENT PROFILE **** =====================
|
|
186
|
+
# Development Profile
|
|
187
|
+
DEV_PROJECT_ID=
|
|
188
|
+
DEV_PROJECT_NUMBER=
|
|
189
|
+
DEV_PROJECT_LOCATION=
|
|
190
|
+
DEV_STAGING_BUCKET=
|
|
191
|
+
DEV_AGENT_SPACE_ENGINE=
|
|
192
|
+
DEV_API_TOKEN=
|
|
193
|
+
DEV_OAUTH_CLIENT_ID=
|
|
194
|
+
DEV_OAUTH_CLIENT_SECRET=
|
|
195
|
+
|
|
196
|
+
# Production Profile
|
|
197
|
+
PROD_PROJECT_ID=
|
|
198
|
+
PROD_PROJECT_NUMBER=
|
|
199
|
+
PROD_PROJECT_LOCATION=
|
|
200
|
+
PROD_STAGING_BUCKET=
|
|
201
|
+
PROD_AGENT_SPACE_ENGINE=
|
|
202
|
+
PROD_API_TOKEN=
|
|
203
|
+
PROD_OAUTH_CLIENT_ID=
|
|
204
|
+
PROD_OAUTH_CLIENT_SECRET=
|
|
205
|
+
|
|
206
|
+
REASONING_DEPLOYMENT_VERSION=0.2.4
|
|
207
|
+
#===================== **** DEPLOYMENT PROFILE **** =====================
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
#===================== **** YOUR APP ENV VARIABLES **** =====================
|
|
211
|
+
DEVELOPER=
|
|
212
|
+
#===================== **** YOUR APP ENV VARIABLES **** =====================
|
|
213
|
+
"""
|
|
214
|
+
|
|
215
|
+
path.write_text(template.strip() + "\n")
|
|
216
|
+
|
|
217
|
+
# Also update .gitignore to include logs
|
|
218
|
+
self._update_gitignore()
|
|
219
|
+
|
|
220
|
+
return path
|
|
221
|
+
|
|
222
|
+
def _update_gitignore(self):
|
|
223
|
+
"""Add common ignore patterns to .gitignore."""
|
|
224
|
+
gitignore_path = Path(".gitignore")
|
|
225
|
+
patterns_to_add = [
|
|
226
|
+
"logs/",
|
|
227
|
+
"deployments/",
|
|
228
|
+
".env.agent",
|
|
229
|
+
"__pycache__/",
|
|
230
|
+
"*.pyc",
|
|
231
|
+
".venv/",
|
|
232
|
+
"deploy_env/"
|
|
233
|
+
]
|
|
234
|
+
|
|
235
|
+
existing_patterns = set()
|
|
236
|
+
if gitignore_path.exists():
|
|
237
|
+
existing_patterns = set(gitignore_path.read_text().splitlines())
|
|
238
|
+
|
|
239
|
+
new_patterns = [p for p in patterns_to_add if p not in existing_patterns]
|
|
240
|
+
|
|
241
|
+
if new_patterns:
|
|
242
|
+
with open(gitignore_path, "a") as f:
|
|
243
|
+
if existing_patterns: # Add newline if file has content
|
|
244
|
+
f.write("\n")
|
|
245
|
+
f.write("# Added by ReasoningEngineDeploymentService\n")
|
|
246
|
+
for pattern in new_patterns:
|
|
247
|
+
f.write(f"{pattern}\n")
|
|
248
|
+
|
|
249
|
+
def _generate_example_yaml_config(self, path: str | Path = "aix_agent.yaml", overwrite: bool = False) -> Path:
|
|
250
|
+
"""
|
|
251
|
+
Create an example YAML config matching the requested schema.
|
|
252
|
+
|
|
253
|
+
Structure:
|
|
254
|
+
defaults:
|
|
255
|
+
scopes: [ ... ]
|
|
256
|
+
metadata: { ... }
|
|
257
|
+
auth: { ... }
|
|
258
|
+
environment_variables: [ ... ]
|
|
259
|
+
"""
|
|
260
|
+
path = Path(path)
|
|
261
|
+
if path.exists() and not overwrite:
|
|
262
|
+
raise FileExistsError(f"{path} already exists. Pass overwrite=True to replace it.")
|
|
263
|
+
|
|
264
|
+
config = {
|
|
265
|
+
"defaults": {
|
|
266
|
+
"scopes": [
|
|
267
|
+
"https://www.googleapis.com/auth/cloud-platform",
|
|
268
|
+
"https://www.googleapis.com/auth/userinfo.email",
|
|
269
|
+
],
|
|
270
|
+
"metadata": {
|
|
271
|
+
"reasoning_engine_name": "reasoning-engine-dev",
|
|
272
|
+
"reasoning_engine_description": "A reasoning engine for development",
|
|
273
|
+
"agent_space_name": "Agent Space Dev Numba Three!",
|
|
274
|
+
"agent_space_description": "Agent spece description, lets go",
|
|
275
|
+
"agent_space_tool_description": "Agent space tool description",
|
|
276
|
+
},
|
|
277
|
+
"agent_folder": "your_agent_folder",
|
|
278
|
+
"auth": {
|
|
279
|
+
"oauth_authorization_id": "test_auth_three",
|
|
280
|
+
},
|
|
281
|
+
"environment_variables": [
|
|
282
|
+
"DEVELOPER",
|
|
283
|
+
],
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
path.write_text(yaml.safe_dump(config, sort_keys=False))
|
|
288
|
+
return path
|
|
289
|
+
|
|
290
|
+
def _load_agent_definition(self):
|
|
291
|
+
try:
|
|
292
|
+
with open("aix_agent.yaml", "r") as f:
|
|
293
|
+
config = yaml.safe_load(f)
|
|
294
|
+
except FileNotFoundError:
|
|
295
|
+
self._generate_example_yaml_config()
|
|
296
|
+
self.error("Could not locate a valid aix_agent.yaml file. Generating example file in your directory.")
|
|
297
|
+
sys.exit(1)
|
|
298
|
+
|
|
299
|
+
try:
|
|
300
|
+
config = config['defaults']
|
|
301
|
+
scopes = config['scopes']
|
|
302
|
+
metadata = config['metadata']
|
|
303
|
+
agent_folder = config['agent_folder']
|
|
304
|
+
auth = config['auth']
|
|
305
|
+
environment_variables = config['environment_variables']
|
|
306
|
+
|
|
307
|
+
reasoning_engine_name = metadata['reasoning_engine_name']
|
|
308
|
+
reasoning_engine_description = metadata['reasoning_engine_description']
|
|
309
|
+
agent_space_name = metadata['agent_space_name']
|
|
310
|
+
agent_space_description = metadata['agent_space_description']
|
|
311
|
+
agent_space_tool_description = metadata["agent_space_tool_description"]
|
|
312
|
+
|
|
313
|
+
self._required_scopes = scopes
|
|
314
|
+
self._agent_folder = agent_folder
|
|
315
|
+
self._reasoning_engine_name = reasoning_engine_name
|
|
316
|
+
self._reasoning_engine_description = reasoning_engine_description
|
|
317
|
+
self._agent_space_name = agent_space_name
|
|
318
|
+
self._agent_space_description = agent_space_description
|
|
319
|
+
self._agent_space_tool_description = agent_space_tool_description
|
|
320
|
+
self._authorization_id = auth.get("oauth_authorization_id", None)
|
|
321
|
+
self._environment_variables = environment_variables or []
|
|
322
|
+
except KeyError as e:
|
|
323
|
+
raise RuntimeError(f"Missing required key in aix_agent.yaml: {e}")
|
|
324
|
+
|
|
325
|
+
def _load_deployment_environment_variables(self, deployment_environment: str):
|
|
326
|
+
required_vars = ['PROJECT_ID', 'PROJECT_NUMBER', 'PROJECT_LOCATION', 'STAGING_BUCKET', 'AGENT_SPACE_ENGINE']
|
|
327
|
+
|
|
328
|
+
for var in required_vars:
|
|
329
|
+
env_var = f"{deployment_environment}_{var}"
|
|
330
|
+
if env_var not in os.environ or not os.getenv(env_var):
|
|
331
|
+
raise RuntimeError(f"Missing required environment variable: {env_var}.")
|
|
332
|
+
|
|
333
|
+
setattr(self, f"_{var.lower()}", os.getenv(env_var))
|
|
334
|
+
|
|
335
|
+
if self._authorization_id:
|
|
336
|
+
required_auth_vars = ['OAUTH_CLIENT_ID', 'OAUTH_CLIENT_SECRET']
|
|
337
|
+
|
|
338
|
+
for var in required_auth_vars:
|
|
339
|
+
env_var = f"{deployment_environment}_{var}"
|
|
340
|
+
if env_var not in os.environ or not os.getenv(env_var):
|
|
341
|
+
raise RuntimeError(f"Missing required environment variable: {env_var}")
|
|
342
|
+
|
|
343
|
+
setattr(self, f"_{var.lower()}", os.getenv(env_var))
|
|
344
|
+
|
|
345
|
+
def _check_requirements_file_present(self):
|
|
346
|
+
if not os.path.exists("requirements.txt"):
|
|
347
|
+
raise RuntimeError("Missing requirements.txt file")
|
|
348
|
+
|
|
349
|
+
def _load_requirements(self):
|
|
350
|
+
with open("requirements.txt", "r") as f:
|
|
351
|
+
return [line.strip() for line in f if line.strip()]
|
|
352
|
+
|
|
353
|
+
def create_reasoning_engine(self):
|
|
354
|
+
vertexai.init(
|
|
355
|
+
project=self._project_id,
|
|
356
|
+
location=self._project_location,
|
|
357
|
+
staging_bucket=self._staging_bucket,
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
creation = agent_engines.create(
|
|
361
|
+
self.root_agent,
|
|
362
|
+
display_name=self._reasoning_engine_name,
|
|
363
|
+
description=self._reasoning_engine_description,
|
|
364
|
+
requirements=self._load_requirements(),
|
|
365
|
+
env_vars=self._environment_variables,
|
|
366
|
+
extra_packages=[f"./{self._agent_folder}"]
|
|
367
|
+
)
|
|
368
|
+
|
|
369
|
+
if creation:
|
|
370
|
+
self.info(f"Reasoning engine created successfully: {creation.resource_name}")
|
|
371
|
+
|
|
372
|
+
json_payload = {"reasoning_engine_id": creation.resource_name}
|
|
373
|
+
self._write_engine_deployment(json_payload)
|
|
374
|
+
|
|
375
|
+
def update_reasoning_engine(self, reasoning_engine_id):
|
|
376
|
+
vertexai.init(
|
|
377
|
+
project=self._project_id,
|
|
378
|
+
location=self._project_location,
|
|
379
|
+
staging_bucket=self._staging_bucket,
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
try:
|
|
383
|
+
updating = agent_engines.update(
|
|
384
|
+
resource_name=reasoning_engine_id,
|
|
385
|
+
agent_engine=self.root_agent,
|
|
386
|
+
display_name=self._reasoning_engine_name,
|
|
387
|
+
description=self._reasoning_engine_description,
|
|
388
|
+
requirements=self._load_requirements(),
|
|
389
|
+
env_vars=self._environment_variables,
|
|
390
|
+
extra_packages=[f"./{self._agent_folder}"]
|
|
391
|
+
)
|
|
392
|
+
except NotFound as e:
|
|
393
|
+
self.error(f"Reasoning engine {reasoning_engine_id} not found. Cannot update.")
|
|
394
|
+
self.error(f"Please inspect using CLI, GUI or GCP Interface to identify root cause.")
|
|
395
|
+
self.error(f"Deleting deployment record {self._get_deployment_record_path()} to allow re-creation.")
|
|
396
|
+
self.error(f"System reported: {e}")
|
|
397
|
+
os.remove(self._get_deployment_record_path())
|
|
398
|
+
sys.exit(1)
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
if updating:
|
|
402
|
+
self.info(f"Reasoning engine updated successfully: {updating.resource_name}")
|
|
403
|
+
|
|
404
|
+
self._write_engine_deployment({'reasoning_engine_id': updating.resource_name})
|
|
405
|
+
|
|
406
|
+
def _get_agent_space_payload(self, reasoning_engine: str) -> Tuple[dict, dict]:
|
|
407
|
+
headers = {
|
|
408
|
+
"Authorization": f"Bearer {self._access_token()}",
|
|
409
|
+
"Content-Type": "application/json",
|
|
410
|
+
"X-Goog-User-Project": self._project_number,
|
|
411
|
+
}
|
|
412
|
+
payload = {
|
|
413
|
+
"displayName": self._agent_space_name,
|
|
414
|
+
"description": self._agent_space_description,
|
|
415
|
+
"adk_agent_definition": {
|
|
416
|
+
"tool_settings": {"tool_description": self._agent_space_tool_description},
|
|
417
|
+
"provisioned_reasoning_engine": {"reasoning_engine": reasoning_engine},
|
|
418
|
+
},
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
if self._authorization_id:
|
|
422
|
+
payload["adk_agent_definition"]["authorizations"] = [
|
|
423
|
+
f"projects/{self._project_number}/locations/global/authorizations/{self._authorization_id}"
|
|
424
|
+
]
|
|
425
|
+
|
|
426
|
+
return headers, payload
|
|
427
|
+
|
|
428
|
+
def _get_agent_space_agent_url_new(self):
|
|
429
|
+
return (f"{DISCOVERY_ENGINE_URL}/projects/{self._project_number}/locations/global/collections/default_collection/"
|
|
430
|
+
f"engines/{self._agent_space_engine}/assistants/default_assistant/agents")
|
|
431
|
+
|
|
432
|
+
def _deploy_to_agent_space(self):
|
|
433
|
+
get_reasoning_engine = self._read_engine_deployment_record()
|
|
434
|
+
|
|
435
|
+
if not get_reasoning_engine or not get_reasoning_engine.get("reasoning_engine_id"):
|
|
436
|
+
return ("failed", "Reasoning engine required before deploy", None)
|
|
437
|
+
|
|
438
|
+
if get_reasoning_engine.get("agent_space_id"):
|
|
439
|
+
self.warning("Agent space already exists; skipping creation.")
|
|
440
|
+
|
|
441
|
+
return
|
|
442
|
+
|
|
443
|
+
headers, payload = self._get_agent_space_payload(get_reasoning_engine["reasoning_engine_id"])
|
|
444
|
+
url = self._get_agent_space_agent_url_new()
|
|
445
|
+
r = self._http.post(url, headers=headers, json=payload, timeout=90)
|
|
446
|
+
|
|
447
|
+
if r.status_code < 400:
|
|
448
|
+
self.info("Agent space deployed successfully.")
|
|
449
|
+
if self.attempt_to_use_existing_auth:
|
|
450
|
+
self.warning("Using existing authorization.")
|
|
451
|
+
self._write_engine_deployment({"authorization_id": self._authorization_id, 'scopes': self._required_scopes})
|
|
452
|
+
self._write_engine_deployment({"agent_space_id": r.json().get("name")})
|
|
453
|
+
else:
|
|
454
|
+
# Log API failure details to file only
|
|
455
|
+
with open(self.log_filename, 'a') as f:
|
|
456
|
+
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S,%f')[:-3]
|
|
457
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - Agent space deployment failed with status {r.status_code} {r.reason}\n")
|
|
458
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - URL: {r.url}\n")
|
|
459
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - Response: {r.text}\n")
|
|
460
|
+
if r.headers.get('content-type', '').startswith('application/json'):
|
|
461
|
+
try:
|
|
462
|
+
error_json = r.json()
|
|
463
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - Error details: {json.dumps(error_json, indent=2)}\n")
|
|
464
|
+
except:
|
|
465
|
+
pass
|
|
466
|
+
|
|
467
|
+
# Terminal message - simple
|
|
468
|
+
self.error("Agent space deployment failed")
|
|
469
|
+
# This will also log the record file
|
|
470
|
+
|
|
471
|
+
def _update_in_agent_space(self):
|
|
472
|
+
get_reasoning_engine = self._read_engine_deployment_record()
|
|
473
|
+
|
|
474
|
+
if not get_reasoning_engine or not get_reasoning_engine.get("reasoning_engine_id"):
|
|
475
|
+
return ("failed", "Reasoning engine required before update", None)
|
|
476
|
+
|
|
477
|
+
if not get_reasoning_engine.get("agent_space_id"):
|
|
478
|
+
self.warning("No agent space to update; skipping.")
|
|
479
|
+
return
|
|
480
|
+
|
|
481
|
+
headers, payload = self._get_agent_space_payload(get_reasoning_engine["reasoning_engine_id"])
|
|
482
|
+
url = f'{DISCOVERY_ENGINE_URL}/' + get_reasoning_engine.get("agent_space_id")
|
|
483
|
+
r = self._http.patch(url, headers=headers, json=payload, timeout=90)
|
|
484
|
+
|
|
485
|
+
if r.status_code < 400:
|
|
486
|
+
if self.attempt_to_use_existing_auth:
|
|
487
|
+
self.info("Using existing authorization.")
|
|
488
|
+
self._write_engine_deployment({"authorization_id": self._authorization_id, 'scopes': self._required_scopes})
|
|
489
|
+
self.info("Agent space updated successfully.")
|
|
490
|
+
return True
|
|
491
|
+
else:
|
|
492
|
+
# Log API failure details to file only
|
|
493
|
+
with open(self.log_filename, 'a') as f:
|
|
494
|
+
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S,%f')[:-3]
|
|
495
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - Failed to update agent space with status {r.status_code} {r.reason}\n")
|
|
496
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - URL: {r.url}\n")
|
|
497
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - Response: {r.text}\n")
|
|
498
|
+
if r.headers.get('content-type', '').startswith('application/json'):
|
|
499
|
+
try:
|
|
500
|
+
error_json = r.json()
|
|
501
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - Error details: {json.dumps(error_json, indent=2)}\n")
|
|
502
|
+
except:
|
|
503
|
+
pass
|
|
504
|
+
|
|
505
|
+
# Terminal message - simple
|
|
506
|
+
self.error("Failed to update agent space")
|
|
507
|
+
# This will also log the record file
|
|
508
|
+
return False
|
|
509
|
+
|
|
510
|
+
def _build_authorization_uri(self, client_id: str, scopes: list[str]) -> str:
|
|
511
|
+
base = "https://accounts.google.com/o/oauth2/auth"
|
|
512
|
+
query = {
|
|
513
|
+
"response_type": "code",
|
|
514
|
+
"client_id": client_id,
|
|
515
|
+
"scope": " ".join(scopes),
|
|
516
|
+
"access_type": "offline",
|
|
517
|
+
"prompt": "consent",
|
|
518
|
+
}
|
|
519
|
+
return f"{base}?{urllib.parse.urlencode(query)}"
|
|
520
|
+
|
|
521
|
+
def _create_authorization(self) -> dict:
|
|
522
|
+
read_authorizations = self._read_engine_deployment_record()
|
|
523
|
+
|
|
524
|
+
if not self._authorization_id:
|
|
525
|
+
self.warning("No authorization ID provided; skipping authorization creation.")
|
|
526
|
+
|
|
527
|
+
return
|
|
528
|
+
|
|
529
|
+
if read_authorizations and (read_authorizations.get("authorization_id") and read_authorizations.get("authorization_id") == self._authorization_id):
|
|
530
|
+
self.warning("Authorization already exists; skipping creation.")
|
|
531
|
+
|
|
532
|
+
return
|
|
533
|
+
|
|
534
|
+
discovery_engine_url = "https://discoveryengine.googleapis.com/v1alpha"
|
|
535
|
+
headers = {
|
|
536
|
+
"Authorization": f"Bearer {self._access_token()}",
|
|
537
|
+
"Content-Type": "application/json",
|
|
538
|
+
"X-Goog-User-Project": self._project_number,
|
|
539
|
+
}
|
|
540
|
+
|
|
541
|
+
client_id = self._oauth_client_id or "your-client-id"
|
|
542
|
+
client_secret = self._oauth_client_secret or "your-client-secret"
|
|
543
|
+
|
|
544
|
+
payload = {
|
|
545
|
+
"name": f"projects/{self._project_number}/locations/global/authorizations/{self._authorization_id}",
|
|
546
|
+
"serverSideOauth2": {
|
|
547
|
+
"clientId": client_id,
|
|
548
|
+
"clientSecret": client_secret,
|
|
549
|
+
"authorizationUri": self._build_authorization_uri(client_id, self._required_scopes),
|
|
550
|
+
"tokenUri": "https://oauth2.googleapis.com/token",
|
|
551
|
+
},
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
url = (
|
|
555
|
+
f"{discovery_engine_url}/projects/{self._project_id}/locations/global/authorizations"
|
|
556
|
+
f"?authorizationId={self._authorization_id}"
|
|
557
|
+
)
|
|
558
|
+
|
|
559
|
+
r = self._http.post(url, headers=headers, json=payload, timeout=60)
|
|
560
|
+
|
|
561
|
+
if r.status_code < 400:
|
|
562
|
+
payload = {"authorization_id": self._authorization_id, 'scopes': self._required_scopes}
|
|
563
|
+
self._write_engine_deployment(payload)
|
|
564
|
+
return True
|
|
565
|
+
|
|
566
|
+
if r.status_code == 409:
|
|
567
|
+
self.warning("Authorization conflict detected; attempting to use existing authorization.")
|
|
568
|
+
self.logger.debug(f"Conflict response: {r.text}")
|
|
569
|
+
self.attempt_to_use_existing_auth = True
|
|
570
|
+
elif r.status_code >= 400:
|
|
571
|
+
# Log API failure details to file only
|
|
572
|
+
with open(self.log_filename, 'a') as f:
|
|
573
|
+
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S,%f')[:-3]
|
|
574
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - Authorization creation failed with status {r.status_code} {r.reason}\n")
|
|
575
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - URL: {r.url}\n")
|
|
576
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - Response: {r.text}\n")
|
|
577
|
+
if r.headers.get('content-type', '').startswith('application/json'):
|
|
578
|
+
try:
|
|
579
|
+
error_json = r.json()
|
|
580
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - Error details: {json.dumps(error_json, indent=2)}\n")
|
|
581
|
+
except:
|
|
582
|
+
pass
|
|
583
|
+
|
|
584
|
+
# Terminal message - simple
|
|
585
|
+
self.error("Authorization creation failed")
|
|
586
|
+
# This will also log the record file
|
|
587
|
+
|
|
588
|
+
|
|
589
|
+
def _drop_authorization(self) -> bool:
|
|
590
|
+
temp_auth = self._authorization_id
|
|
591
|
+
self._authorization_id = None
|
|
592
|
+
self._update_in_agent_space()
|
|
593
|
+
self._authorization_id = temp_auth
|
|
594
|
+
|
|
595
|
+
read_record = self._read_engine_deployment_record()
|
|
596
|
+
read_record.pop("authorization_id", None)
|
|
597
|
+
|
|
598
|
+
file_path = self._get_deployment_record_path()
|
|
599
|
+
|
|
600
|
+
with open(file_path, "w") as f:
|
|
601
|
+
json.dump(read_record, f, sort_keys=False, indent=2)
|
|
602
|
+
|
|
603
|
+
self.info("Authorization dropped successfully")
|
|
604
|
+
|
|
605
|
+
return True
|
|
606
|
+
|
|
607
|
+
def _delete_authorization(self):
|
|
608
|
+
if not self._authorization_id:
|
|
609
|
+
self.warning("No authorization ID provided; skipping deletion.")
|
|
610
|
+
return
|
|
611
|
+
|
|
612
|
+
discovery_engine_url = "https://discoveryengine.googleapis.com/v1alpha"
|
|
613
|
+
headers = {
|
|
614
|
+
"Authorization": f"Bearer {self._access_token()}",
|
|
615
|
+
"Content-Type": "application/json",
|
|
616
|
+
"X-Goog-User-Project": self._project_number,
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
url = (
|
|
620
|
+
f"{discovery_engine_url}/projects/{self._project_id}/locations/global/authorizations"
|
|
621
|
+
f"?authorizationId={self._authorization_id}"
|
|
622
|
+
)
|
|
623
|
+
|
|
624
|
+
r = self._http.delete(url, headers=headers, timeout=60)
|
|
625
|
+
|
|
626
|
+
if r.status_code < 400:
|
|
627
|
+
self.info("Authorization deleted successfully.")
|
|
628
|
+
self._authorization_id = None
|
|
629
|
+
self._update_in_agent_space()
|
|
630
|
+
return True
|
|
631
|
+
|
|
632
|
+
# Log API failure details to file only
|
|
633
|
+
with open(self.log_filename, 'a') as f:
|
|
634
|
+
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S,%f')[:-3]
|
|
635
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - Failed to delete authorization with status {r.status_code} {r.reason}\n")
|
|
636
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - URL: {r.url}\n")
|
|
637
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - Response: {r.text}\n")
|
|
638
|
+
if r.headers.get('content-type', '').startswith('application/json'):
|
|
639
|
+
try:
|
|
640
|
+
error_json = r.json()
|
|
641
|
+
f.write(f"{timestamp} - ReasoningEngineDeployment - ERROR - Error details: {json.dumps(error_json, indent=2)}\n")
|
|
642
|
+
except:
|
|
643
|
+
pass
|
|
644
|
+
|
|
645
|
+
# Terminal message - simple
|
|
646
|
+
self.error("Failed to delete authorization")
|
|
647
|
+
# This will also log the record file
|
|
648
|
+
return False
|
|
649
|
+
|
|
650
|
+
def one_deployment_with_everything_on_it(self, skip_engine_step=False):
|
|
651
|
+
read_engine = self._read_engine_deployment_record()
|
|
652
|
+
|
|
653
|
+
|
|
654
|
+
if not skip_engine_step:
|
|
655
|
+
if read_engine and read_engine.get("reasoning_engine_id"):
|
|
656
|
+
self.info("Updating reasoning engine ... ")
|
|
657
|
+
self.update_reasoning_engine(read_engine["reasoning_engine_id"])
|
|
658
|
+
self.info("Done!")
|
|
659
|
+
else:
|
|
660
|
+
self.create_reasoning_engine()
|
|
661
|
+
else:
|
|
662
|
+
if not read_engine or not read_engine.get("reasoning_engine_id"):
|
|
663
|
+
self.warning("Skipping reasoning engine step, but no existing engine found in record.")
|
|
664
|
+
self.warning("Please ensure the reasoning engine exists before proceeding. Ending Agent Space update attempt")
|
|
665
|
+
|
|
666
|
+
sys.exit(1)
|
|
667
|
+
|
|
668
|
+
needs_auth_reset = False
|
|
669
|
+
|
|
670
|
+
if not read_engine or read_engine.get("authorization_id") != self._authorization_id:
|
|
671
|
+
self.warning("Detected change in authorization ID")
|
|
672
|
+
needs_auth_reset = True
|
|
673
|
+
|
|
674
|
+
elif read_engine and read_engine.get("scopes") != self._required_scopes:
|
|
675
|
+
self.warning("Detected change in authorization scopes")
|
|
676
|
+
needs_auth_reset = True
|
|
677
|
+
|
|
678
|
+
if needs_auth_reset:
|
|
679
|
+
self.info("Resetting authorization...")
|
|
680
|
+
self._drop_authorization()
|
|
681
|
+
self._create_authorization()
|
|
682
|
+
|
|
683
|
+
if not read_engine or (read_engine and not read_engine.get("agent_space_id")):
|
|
684
|
+
self.info("Creating agent space ... ")
|
|
685
|
+
self._deploy_to_agent_space()
|
|
686
|
+
else:
|
|
687
|
+
self._update_in_agent_space()
|