uipath 2.0.0.dev3__py3-none-any.whl → 2.0.1.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of uipath might be problematic. Click here for more details.
- uipath/__init__.py +24 -0
- uipath/_cli/README.md +11 -0
- uipath/_cli/__init__.py +54 -0
- uipath/_cli/_auth/_auth_server.py +165 -0
- uipath/_cli/_auth/_models.py +51 -0
- uipath/_cli/_auth/_oidc_utils.py +69 -0
- uipath/_cli/_auth/_portal_service.py +163 -0
- uipath/_cli/_auth/_utils.py +51 -0
- uipath/_cli/_auth/auth_config.json +6 -0
- uipath/_cli/_auth/index.html +167 -0
- uipath/_cli/_auth/localhost.crt +25 -0
- uipath/_cli/_auth/localhost.key +27 -0
- uipath/_cli/_runtime/_contracts.py +429 -0
- uipath/_cli/_runtime/_logging.py +193 -0
- uipath/_cli/_runtime/_runtime.py +264 -0
- uipath/_cli/_templates/.psmdcp.template +9 -0
- uipath/_cli/_templates/.rels.template +5 -0
- uipath/_cli/_templates/[Content_Types].xml.template +9 -0
- uipath/_cli/_templates/main.py.template +25 -0
- uipath/_cli/_templates/package.nuspec.template +10 -0
- uipath/_cli/_utils/_common.py +24 -0
- uipath/_cli/_utils/_input_args.py +126 -0
- uipath/_cli/_utils/_parse_ast.py +542 -0
- uipath/_cli/cli_auth.py +97 -0
- uipath/_cli/cli_deploy.py +13 -0
- uipath/_cli/cli_init.py +113 -0
- uipath/_cli/cli_new.py +76 -0
- uipath/_cli/cli_pack.py +337 -0
- uipath/_cli/cli_publish.py +113 -0
- uipath/_cli/cli_run.py +133 -0
- uipath/_cli/middlewares.py +113 -0
- uipath/_config.py +6 -0
- uipath/_execution_context.py +83 -0
- uipath/_folder_context.py +62 -0
- uipath/_models/__init__.py +37 -0
- uipath/_models/action_schema.py +26 -0
- uipath/_models/actions.py +64 -0
- uipath/_models/assets.py +48 -0
- uipath/_models/connections.py +51 -0
- uipath/_models/context_grounding.py +18 -0
- uipath/_models/context_grounding_index.py +60 -0
- uipath/_models/exceptions.py +6 -0
- uipath/_models/interrupt_models.py +28 -0
- uipath/_models/job.py +66 -0
- uipath/_models/llm_gateway.py +101 -0
- uipath/_models/processes.py +48 -0
- uipath/_models/queues.py +167 -0
- uipath/_services/__init__.py +26 -0
- uipath/_services/_base_service.py +250 -0
- uipath/_services/actions_service.py +271 -0
- uipath/_services/api_client.py +89 -0
- uipath/_services/assets_service.py +257 -0
- uipath/_services/buckets_service.py +268 -0
- uipath/_services/connections_service.py +185 -0
- uipath/_services/connections_service.pyi +50 -0
- uipath/_services/context_grounding_service.py +402 -0
- uipath/_services/folder_service.py +49 -0
- uipath/_services/jobs_service.py +265 -0
- uipath/_services/llm_gateway_service.py +311 -0
- uipath/_services/processes_service.py +168 -0
- uipath/_services/queues_service.py +314 -0
- uipath/_uipath.py +98 -0
- uipath/_utils/__init__.py +17 -0
- uipath/_utils/_endpoint.py +79 -0
- uipath/_utils/_infer_bindings.py +30 -0
- uipath/_utils/_logs.py +15 -0
- uipath/_utils/_request_override.py +18 -0
- uipath/_utils/_request_spec.py +23 -0
- uipath/_utils/_user_agent.py +16 -0
- uipath/_utils/constants.py +25 -0
- uipath/py.typed +0 -0
- {uipath-2.0.0.dev3.dist-info → uipath-2.0.1.dev1.dist-info}/METADATA +2 -3
- uipath-2.0.1.dev1.dist-info/RECORD +75 -0
- uipath-2.0.0.dev3.dist-info/RECORD +0 -4
- {uipath-2.0.0.dev3.dist-info → uipath-2.0.1.dev1.dist-info}/WHEEL +0 -0
- {uipath-2.0.0.dev3.dist-info → uipath-2.0.1.dev1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
import sys
|
|
4
|
+
from typing import Optional, TextIO, Union, cast
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class PersistentLogsHandler(logging.FileHandler):
|
|
8
|
+
"""A simple log handler that always writes to a single file without rotation."""
|
|
9
|
+
|
|
10
|
+
def __init__(self, file: str):
|
|
11
|
+
"""Initialize the handler to write logs to a single file, appending always.
|
|
12
|
+
|
|
13
|
+
Args:
|
|
14
|
+
file (str): The file where logs should be stored.
|
|
15
|
+
"""
|
|
16
|
+
# Open file in append mode ('a'), so logs are not overwritten
|
|
17
|
+
super().__init__(file, mode="a", encoding="utf8")
|
|
18
|
+
|
|
19
|
+
self.formatter = logging.Formatter("[%(asctime)s][%(levelname)s] %(message)s")
|
|
20
|
+
self.setFormatter(self.formatter)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class LogsInterceptor:
|
|
24
|
+
"""Intercepts all logging and stdout/stderr, routing to either persistent log files or stdout based on whether it's running as a job or not."""
|
|
25
|
+
|
|
26
|
+
def __init__(
|
|
27
|
+
self,
|
|
28
|
+
min_level: Optional[str] = "INFO",
|
|
29
|
+
dir: Optional[str] = "__uipath",
|
|
30
|
+
file: Optional[str] = "execution.log",
|
|
31
|
+
job_id: Optional[str] = None,
|
|
32
|
+
):
|
|
33
|
+
"""Initialize the log interceptor.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
min_level: Minimum logging level to capture.
|
|
37
|
+
dir (str): The directory where logs should be stored.
|
|
38
|
+
file (str): The log file name.
|
|
39
|
+
job_id (str, optional): If provided, logs go to file; otherwise, to stdout.
|
|
40
|
+
"""
|
|
41
|
+
min_level = min_level or "INFO"
|
|
42
|
+
self.job_id = job_id
|
|
43
|
+
|
|
44
|
+
# Convert to numeric level for consistent comparison
|
|
45
|
+
self.numeric_min_level = getattr(logging, min_level.upper(), logging.INFO)
|
|
46
|
+
|
|
47
|
+
# Store the original disable level
|
|
48
|
+
self.original_disable_level = logging.root.manager.disable
|
|
49
|
+
|
|
50
|
+
self.root_logger = logging.getLogger()
|
|
51
|
+
self.original_level = self.root_logger.level
|
|
52
|
+
self.original_handlers = list(self.root_logger.handlers)
|
|
53
|
+
|
|
54
|
+
self.original_stdout: Optional[TextIO] = None
|
|
55
|
+
self.original_stderr: Optional[TextIO] = None
|
|
56
|
+
|
|
57
|
+
self.log_handler: Union[PersistentLogsHandler, logging.StreamHandler[TextIO]]
|
|
58
|
+
|
|
59
|
+
# Create either file handler (runtime) or stdout handler (debug)
|
|
60
|
+
if self.job_id:
|
|
61
|
+
# Ensure directory exists for file logging
|
|
62
|
+
dir = dir or "__uipath"
|
|
63
|
+
file = file or "execution.log"
|
|
64
|
+
os.makedirs(dir, exist_ok=True)
|
|
65
|
+
log_file = os.path.join(dir, file)
|
|
66
|
+
self.log_handler = PersistentLogsHandler(file=log_file)
|
|
67
|
+
else:
|
|
68
|
+
# Use stdout handler when not running as a job
|
|
69
|
+
self.log_handler = logging.StreamHandler(sys.stdout)
|
|
70
|
+
formatter = logging.Formatter("[%(asctime)s][%(levelname)s] %(message)s")
|
|
71
|
+
self.log_handler.setFormatter(formatter)
|
|
72
|
+
|
|
73
|
+
self.log_handler.setLevel(self.numeric_min_level)
|
|
74
|
+
self.logger = logging.getLogger("runtime")
|
|
75
|
+
self.patched_loggers: set[str] = set()
|
|
76
|
+
|
|
77
|
+
# Store system stdout/stderr
|
|
78
|
+
self.sys_stdout = cast(TextIO, sys.__stdout__)
|
|
79
|
+
self.sys_stderr = cast(TextIO, sys.__stderr__)
|
|
80
|
+
|
|
81
|
+
def _clean_all_handlers(self, logger: logging.Logger) -> None:
|
|
82
|
+
"""Remove ALL handlers from a logger except ours."""
|
|
83
|
+
handlers_to_remove = list(logger.handlers)
|
|
84
|
+
for handler in handlers_to_remove:
|
|
85
|
+
logger.removeHandler(handler)
|
|
86
|
+
|
|
87
|
+
# Now add our handler
|
|
88
|
+
logger.addHandler(self.log_handler)
|
|
89
|
+
|
|
90
|
+
def setup(self) -> None:
|
|
91
|
+
"""Configure logging to use our persistent handler."""
|
|
92
|
+
# Use global disable to prevent all logging below our minimum level
|
|
93
|
+
if self.numeric_min_level > logging.NOTSET:
|
|
94
|
+
logging.disable(self.numeric_min_level - 1)
|
|
95
|
+
|
|
96
|
+
# Set root logger level
|
|
97
|
+
self.root_logger.setLevel(self.numeric_min_level)
|
|
98
|
+
|
|
99
|
+
# Remove ALL handlers from root logger and add only ours
|
|
100
|
+
self._clean_all_handlers(self.root_logger)
|
|
101
|
+
|
|
102
|
+
# Set up propagation for all existing loggers
|
|
103
|
+
for logger_name in logging.root.manager.loggerDict:
|
|
104
|
+
logger = logging.getLogger(logger_name)
|
|
105
|
+
logger.propagate = False # Prevent double-logging
|
|
106
|
+
self._clean_all_handlers(logger)
|
|
107
|
+
self.patched_loggers.add(logger_name)
|
|
108
|
+
|
|
109
|
+
# Set up stdout/stderr redirection
|
|
110
|
+
self._redirect_stdout_stderr()
|
|
111
|
+
|
|
112
|
+
def _redirect_stdout_stderr(self) -> None:
|
|
113
|
+
"""Redirect stdout and stderr to the logging system."""
|
|
114
|
+
self.original_stdout = sys.stdout
|
|
115
|
+
self.original_stderr = sys.stderr
|
|
116
|
+
|
|
117
|
+
class LoggerWriter:
|
|
118
|
+
def __init__(
|
|
119
|
+
self,
|
|
120
|
+
logger: logging.Logger,
|
|
121
|
+
level: int,
|
|
122
|
+
min_level: int,
|
|
123
|
+
sys_file: TextIO,
|
|
124
|
+
):
|
|
125
|
+
self.logger = logger
|
|
126
|
+
self.level = level
|
|
127
|
+
self.min_level = min_level
|
|
128
|
+
self.buffer = ""
|
|
129
|
+
self.sys_file = sys_file # Store reference to system stdout/stderr
|
|
130
|
+
|
|
131
|
+
def write(self, message: str) -> None:
|
|
132
|
+
if message and message.strip() and self.level >= self.min_level:
|
|
133
|
+
self.logger.log(self.level, message.rstrip())
|
|
134
|
+
|
|
135
|
+
def flush(self) -> None:
|
|
136
|
+
pass
|
|
137
|
+
|
|
138
|
+
def fileno(self) -> int:
|
|
139
|
+
# Return the file descriptor of the original system stdout/stderr
|
|
140
|
+
return self.sys_file.fileno()
|
|
141
|
+
|
|
142
|
+
# Set up stdout and stderr loggers with propagate=False
|
|
143
|
+
stdout_logger = logging.getLogger("stdout")
|
|
144
|
+
stdout_logger.propagate = False
|
|
145
|
+
self._clean_all_handlers(stdout_logger)
|
|
146
|
+
|
|
147
|
+
stderr_logger = logging.getLogger("stderr")
|
|
148
|
+
stderr_logger.propagate = False
|
|
149
|
+
self._clean_all_handlers(stderr_logger)
|
|
150
|
+
|
|
151
|
+
# Use the min_level in the LoggerWriter to filter messages
|
|
152
|
+
sys.stdout = LoggerWriter(
|
|
153
|
+
stdout_logger, logging.INFO, self.numeric_min_level, self.sys_stdout
|
|
154
|
+
)
|
|
155
|
+
sys.stderr = LoggerWriter(
|
|
156
|
+
stderr_logger, logging.ERROR, self.numeric_min_level, self.sys_stderr
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
def teardown(self) -> None:
|
|
160
|
+
"""Restore original logging configuration."""
|
|
161
|
+
# Restore the original disable level
|
|
162
|
+
logging.disable(self.original_disable_level)
|
|
163
|
+
|
|
164
|
+
if self.log_handler in self.root_logger.handlers:
|
|
165
|
+
self.root_logger.removeHandler(self.log_handler)
|
|
166
|
+
|
|
167
|
+
for logger_name in self.patched_loggers:
|
|
168
|
+
logger = logging.getLogger(logger_name)
|
|
169
|
+
if self.log_handler in logger.handlers:
|
|
170
|
+
logger.removeHandler(self.log_handler)
|
|
171
|
+
|
|
172
|
+
self.root_logger.setLevel(self.original_level)
|
|
173
|
+
for handler in self.original_handlers:
|
|
174
|
+
if handler not in self.root_logger.handlers:
|
|
175
|
+
self.root_logger.addHandler(handler)
|
|
176
|
+
|
|
177
|
+
if self.original_stdout and self.original_stderr:
|
|
178
|
+
sys.stdout = self.original_stdout
|
|
179
|
+
sys.stderr = self.original_stderr
|
|
180
|
+
|
|
181
|
+
self.log_handler.close()
|
|
182
|
+
|
|
183
|
+
def __enter__(self):
|
|
184
|
+
self.setup()
|
|
185
|
+
return self
|
|
186
|
+
|
|
187
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
188
|
+
if exc_type is not None:
|
|
189
|
+
self.logger.error(
|
|
190
|
+
f"Exception occurred: {exc_val}", exc_info=(exc_type, exc_val, exc_tb)
|
|
191
|
+
)
|
|
192
|
+
self.teardown()
|
|
193
|
+
return False
|
|
@@ -0,0 +1,264 @@
|
|
|
1
|
+
"""Python script runtime implementation for executing and managing python scripts."""
|
|
2
|
+
|
|
3
|
+
import importlib.util
|
|
4
|
+
import inspect
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
import os
|
|
8
|
+
from dataclasses import asdict, is_dataclass
|
|
9
|
+
from typing import Any, Dict, Optional, Type, TypeVar, cast, get_type_hints
|
|
10
|
+
|
|
11
|
+
from ._contracts import (
|
|
12
|
+
UiPathBaseRuntime,
|
|
13
|
+
UiPathErrorCategory,
|
|
14
|
+
UiPathRuntimeError,
|
|
15
|
+
UiPathRuntimeResult,
|
|
16
|
+
UiPathRuntimeStatus,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
T = TypeVar("T")
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class UiPathRuntime(UiPathBaseRuntime):
|
|
25
|
+
"""Runtime for executing Python scripts."""
|
|
26
|
+
|
|
27
|
+
async def execute(self) -> Optional[UiPathRuntimeResult]:
|
|
28
|
+
"""Execute the Python script with the provided input and configuration.
|
|
29
|
+
|
|
30
|
+
Returns:
|
|
31
|
+
Dictionary with execution results
|
|
32
|
+
|
|
33
|
+
Raises:
|
|
34
|
+
UiPathRuntimeError: If execution fails
|
|
35
|
+
"""
|
|
36
|
+
await self.validate()
|
|
37
|
+
|
|
38
|
+
try:
|
|
39
|
+
if self.context.entrypoint is None:
|
|
40
|
+
return None
|
|
41
|
+
|
|
42
|
+
script_result = self._execute_python_script(
|
|
43
|
+
self.context.entrypoint, self.context.input_json
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
if self.context.job_id is None:
|
|
47
|
+
logger.info(script_result)
|
|
48
|
+
|
|
49
|
+
self.context.result = UiPathRuntimeResult(
|
|
50
|
+
output=script_result, status=UiPathRuntimeStatus.SUCCESSFUL
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
return self.context.result
|
|
54
|
+
|
|
55
|
+
except Exception as e:
|
|
56
|
+
if isinstance(e, UiPathRuntimeError):
|
|
57
|
+
raise
|
|
58
|
+
|
|
59
|
+
raise UiPathRuntimeError(
|
|
60
|
+
"EXECUTION_ERROR",
|
|
61
|
+
"Python script execution failed",
|
|
62
|
+
f"Error: {str(e)}",
|
|
63
|
+
UiPathErrorCategory.SYSTEM,
|
|
64
|
+
) from e
|
|
65
|
+
|
|
66
|
+
async def validate(self) -> None:
|
|
67
|
+
"""Validate runtime inputs."""
|
|
68
|
+
if not self.context.entrypoint:
|
|
69
|
+
raise UiPathRuntimeError(
|
|
70
|
+
"ENTRYPOINT_MISSING",
|
|
71
|
+
"No entrypoint specified",
|
|
72
|
+
"Please provide a path to a Python script.",
|
|
73
|
+
UiPathErrorCategory.USER,
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
if not os.path.exists(self.context.entrypoint):
|
|
77
|
+
raise UiPathRuntimeError(
|
|
78
|
+
"ENTRYPOINT_NOT_FOUND",
|
|
79
|
+
"Script not found",
|
|
80
|
+
f"Script not found at path {self.context.entrypoint}.",
|
|
81
|
+
UiPathErrorCategory.USER,
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
try:
|
|
85
|
+
if self.context.input:
|
|
86
|
+
self.context.input_json = json.loads(self.context.input)
|
|
87
|
+
else:
|
|
88
|
+
self.context.input_json = {}
|
|
89
|
+
except json.JSONDecodeError as e:
|
|
90
|
+
raise UiPathRuntimeError(
|
|
91
|
+
"INPUT_INVALID_JSON",
|
|
92
|
+
"Invalid JSON input",
|
|
93
|
+
f"The input data is not valid JSON: {str(e)}",
|
|
94
|
+
UiPathErrorCategory.USER,
|
|
95
|
+
) from e
|
|
96
|
+
|
|
97
|
+
async def cleanup(self) -> None:
|
|
98
|
+
"""Cleanup runtime resources."""
|
|
99
|
+
pass
|
|
100
|
+
|
|
101
|
+
def _execute_python_script(self, script_path: str, input_data: Any) -> Any:
|
|
102
|
+
"""Execute the Python script with the given input."""
|
|
103
|
+
spec = importlib.util.spec_from_file_location("dynamic_module", script_path)
|
|
104
|
+
if not spec or not spec.loader:
|
|
105
|
+
raise UiPathRuntimeError(
|
|
106
|
+
"IMPORT_ERROR",
|
|
107
|
+
"Module import failed",
|
|
108
|
+
f"Could not load spec for {script_path}",
|
|
109
|
+
UiPathErrorCategory.USER,
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
module = importlib.util.module_from_spec(spec)
|
|
113
|
+
try:
|
|
114
|
+
spec.loader.exec_module(module)
|
|
115
|
+
except Exception as e:
|
|
116
|
+
raise UiPathRuntimeError(
|
|
117
|
+
"MODULE_EXECUTION_ERROR",
|
|
118
|
+
"Module execution failed",
|
|
119
|
+
f"Error executing module: {str(e)}",
|
|
120
|
+
UiPathErrorCategory.USER,
|
|
121
|
+
) from e
|
|
122
|
+
|
|
123
|
+
for func_name in ["main", "run", "execute"]:
|
|
124
|
+
if hasattr(module, func_name):
|
|
125
|
+
main_func = getattr(module, func_name)
|
|
126
|
+
sig = inspect.signature(main_func)
|
|
127
|
+
params = list(sig.parameters.values())
|
|
128
|
+
|
|
129
|
+
# Case 1: No parameters
|
|
130
|
+
if not params:
|
|
131
|
+
try:
|
|
132
|
+
result = main_func()
|
|
133
|
+
return (
|
|
134
|
+
self._convert_from_class(result)
|
|
135
|
+
if result is not None
|
|
136
|
+
else {}
|
|
137
|
+
)
|
|
138
|
+
except Exception as e:
|
|
139
|
+
raise UiPathRuntimeError(
|
|
140
|
+
"FUNCTION_EXECUTION_ERROR",
|
|
141
|
+
f"Error executing {func_name} function",
|
|
142
|
+
f"Error: {str(e)}",
|
|
143
|
+
UiPathErrorCategory.USER,
|
|
144
|
+
) from e
|
|
145
|
+
|
|
146
|
+
input_param = params[0]
|
|
147
|
+
input_type = input_param.annotation
|
|
148
|
+
|
|
149
|
+
# Case 2: Class or dataclass parameter
|
|
150
|
+
if input_type != inspect.Parameter.empty and (
|
|
151
|
+
is_dataclass(input_type) or hasattr(input_type, "__annotations__")
|
|
152
|
+
):
|
|
153
|
+
try:
|
|
154
|
+
valid_type = cast(Type[Any], input_type)
|
|
155
|
+
typed_input = self._convert_to_class(input_data, valid_type)
|
|
156
|
+
result = main_func(typed_input)
|
|
157
|
+
return (
|
|
158
|
+
self._convert_from_class(result)
|
|
159
|
+
if result is not None
|
|
160
|
+
else {}
|
|
161
|
+
)
|
|
162
|
+
except Exception as e:
|
|
163
|
+
raise UiPathRuntimeError(
|
|
164
|
+
"FUNCTION_EXECUTION_ERROR",
|
|
165
|
+
f"Error executing {func_name} function with typed input",
|
|
166
|
+
f"Error: {str(e)}",
|
|
167
|
+
UiPathErrorCategory.USER,
|
|
168
|
+
) from e
|
|
169
|
+
|
|
170
|
+
# Case 3: Dict parameter
|
|
171
|
+
else:
|
|
172
|
+
try:
|
|
173
|
+
result = main_func(input_data)
|
|
174
|
+
return (
|
|
175
|
+
self._convert_from_class(result)
|
|
176
|
+
if result is not None
|
|
177
|
+
else {}
|
|
178
|
+
)
|
|
179
|
+
except Exception as e:
|
|
180
|
+
raise UiPathRuntimeError(
|
|
181
|
+
"FUNCTION_EXECUTION_ERROR",
|
|
182
|
+
f"Error executing {func_name} function with dictionary input",
|
|
183
|
+
f"Error: {str(e)}",
|
|
184
|
+
UiPathErrorCategory.USER,
|
|
185
|
+
) from e
|
|
186
|
+
|
|
187
|
+
raise UiPathRuntimeError(
|
|
188
|
+
"ENTRYPOINT_FUNCTION_MISSING",
|
|
189
|
+
"No entry function found",
|
|
190
|
+
f"No main function (main, run, or execute) found in {script_path}",
|
|
191
|
+
UiPathErrorCategory.USER,
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
def _convert_to_class(self, data: Dict[str, Any], cls: Type[T]) -> T:
|
|
195
|
+
"""Convert a dictionary to either a dataclass or regular class instance."""
|
|
196
|
+
if is_dataclass(cls):
|
|
197
|
+
field_types = get_type_hints(cls)
|
|
198
|
+
converted_data = {}
|
|
199
|
+
|
|
200
|
+
for field_name, field_type in field_types.items():
|
|
201
|
+
if field_name not in data:
|
|
202
|
+
continue
|
|
203
|
+
|
|
204
|
+
value = data[field_name]
|
|
205
|
+
if (
|
|
206
|
+
is_dataclass(field_type) or hasattr(field_type, "__annotations__")
|
|
207
|
+
) and isinstance(value, dict):
|
|
208
|
+
typed_field = cast(Type[Any], field_type)
|
|
209
|
+
value = self._convert_to_class(value, typed_field)
|
|
210
|
+
converted_data[field_name] = value
|
|
211
|
+
|
|
212
|
+
return cast(T, cls(**converted_data))
|
|
213
|
+
else:
|
|
214
|
+
sig = inspect.signature(cls.__init__)
|
|
215
|
+
params = sig.parameters
|
|
216
|
+
|
|
217
|
+
init_args = {}
|
|
218
|
+
|
|
219
|
+
for param_name, param in params.items():
|
|
220
|
+
if param_name == "self":
|
|
221
|
+
continue
|
|
222
|
+
|
|
223
|
+
if param_name in data:
|
|
224
|
+
value = data[param_name]
|
|
225
|
+
param_type = (
|
|
226
|
+
param.annotation
|
|
227
|
+
if param.annotation != inspect.Parameter.empty
|
|
228
|
+
else Any
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
if (
|
|
232
|
+
is_dataclass(param_type)
|
|
233
|
+
or hasattr(param_type, "__annotations__")
|
|
234
|
+
) and isinstance(value, dict):
|
|
235
|
+
typed_param = cast(Type[Any], param_type)
|
|
236
|
+
value = self._convert_to_class(value, typed_param)
|
|
237
|
+
|
|
238
|
+
init_args[param_name] = value
|
|
239
|
+
elif param.default != inspect.Parameter.empty:
|
|
240
|
+
init_args[param_name] = param.default
|
|
241
|
+
|
|
242
|
+
return cls(**init_args)
|
|
243
|
+
|
|
244
|
+
def _convert_from_class(self, obj: Any) -> Dict[str, Any]:
|
|
245
|
+
"""Convert a class instance (dataclass or regular) to a dictionary."""
|
|
246
|
+
if obj is None:
|
|
247
|
+
return {}
|
|
248
|
+
|
|
249
|
+
if is_dataclass(obj):
|
|
250
|
+
# Make sure obj is an instance, not a class
|
|
251
|
+
if isinstance(obj, type):
|
|
252
|
+
return {}
|
|
253
|
+
return asdict(obj)
|
|
254
|
+
elif hasattr(obj, "__dict__"):
|
|
255
|
+
result = {}
|
|
256
|
+
for key, value in obj.__dict__.items():
|
|
257
|
+
# Skip private attributes
|
|
258
|
+
if not key.startswith("_"):
|
|
259
|
+
if hasattr(value, "__dict__") or is_dataclass(value):
|
|
260
|
+
result[key] = self._convert_from_class(value)
|
|
261
|
+
else:
|
|
262
|
+
result[key] = value
|
|
263
|
+
return result
|
|
264
|
+
return {} if obj is None else {str(type(obj).__name__): str(obj)} # Fallback
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
<?xml version="1.0" encoding="utf-8"?>
|
|
2
|
+
<coreProperties xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.openxmlformats.org/package/2006/metadata/core-properties">
|
|
3
|
+
<dc:creator>$creator</dc:creator>
|
|
4
|
+
<dc:description>$description</dc:description>
|
|
5
|
+
<dc:identifier>$projectName</dc:identifier>
|
|
6
|
+
<version>$packageVersion</version>
|
|
7
|
+
<keywords></keywords>
|
|
8
|
+
<lastModifiedBy>NuGet.Packaging, Version=6.12.1.1, Culture=neutral, PublicKeyToken=$publicKeyToken;.NET 5.0</lastModifiedBy>
|
|
9
|
+
</coreProperties>
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
<?xml version="1.0" encoding="utf-8"?>
|
|
2
|
+
<Relationships xmlns="http://schemas.openxmlformats.org/package/2006/relationships">
|
|
3
|
+
<Relationship Type="http://schemas.microsoft.com/packaging/2010/07/manifest" Target="$nuspecPath" Id="$nuspecId" />
|
|
4
|
+
<Relationship Type="http://schemas.openxmlformats.org/package/2006/relationships/metadata/core-properties" Target="$psmdcpPath" Id="$psmdcpId" />
|
|
5
|
+
</Relationships>
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
<?xml version="1.0" encoding="utf-8"?>
|
|
2
|
+
<Types xmlns="http://schemas.openxmlformats.org/package/2006/content-types">
|
|
3
|
+
<Default Extension="rels" ContentType="application/vnd.openxmlformats-package.relationships+xml" />
|
|
4
|
+
<Default Extension="psmdcp" ContentType="application/vnd.openxmlformats-package.core-properties+xml" />
|
|
5
|
+
<Default Extension="json" ContentType="application/octet" />
|
|
6
|
+
<Default Extension="py" ContentType="application/octet" />
|
|
7
|
+
<Default Extension="txt" ContentType="application/octet" />
|
|
8
|
+
<Default Extension="nuspec" ContentType="application/octet" />
|
|
9
|
+
</Types>
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
@dataclass
|
|
6
|
+
class EchoIn:
|
|
7
|
+
message: str
|
|
8
|
+
repeat: Optional[int] = 1
|
|
9
|
+
prefix: Optional[str] = None
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class EchoOut:
|
|
14
|
+
message: str
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def main(input: EchoIn) -> EchoOut:
|
|
18
|
+
result = []
|
|
19
|
+
for _ in range(input.repeat):
|
|
20
|
+
line = input.message
|
|
21
|
+
if input.prefix:
|
|
22
|
+
line = f"{input.prefix}: {line}"
|
|
23
|
+
result.append(line)
|
|
24
|
+
|
|
25
|
+
return EchoOut(message="\n".join(result))
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
<?xml version="1.0" encoding="utf-8"?>
|
|
2
|
+
<package xmlns="http://schemas.microsoft.com/packaging/2011/10/nuspec.xsd">
|
|
3
|
+
<metadata>
|
|
4
|
+
<id>$packageName</id>
|
|
5
|
+
<version>$packageVersion</version>
|
|
6
|
+
<authors>$authors</authors>
|
|
7
|
+
<requireLicenseAcceptance>false</requireLicenseAcceptance>
|
|
8
|
+
<description>$description</description>
|
|
9
|
+
</metadata>
|
|
10
|
+
</package>
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import click
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def environment_options(function):
|
|
5
|
+
function = click.option(
|
|
6
|
+
"--alpha",
|
|
7
|
+
"domain",
|
|
8
|
+
flag_value="alpha",
|
|
9
|
+
help="Use alpha environment",
|
|
10
|
+
)(function)
|
|
11
|
+
function = click.option(
|
|
12
|
+
"--staging",
|
|
13
|
+
"domain",
|
|
14
|
+
flag_value="staging",
|
|
15
|
+
help="Use staging environment",
|
|
16
|
+
)(function)
|
|
17
|
+
function = click.option(
|
|
18
|
+
"--cloud",
|
|
19
|
+
"domain",
|
|
20
|
+
flag_value="cloud",
|
|
21
|
+
default=True,
|
|
22
|
+
help="Use production environment",
|
|
23
|
+
)(function)
|
|
24
|
+
return function
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import importlib.util
|
|
2
|
+
import inspect
|
|
3
|
+
import sys
|
|
4
|
+
from dataclasses import fields, is_dataclass
|
|
5
|
+
from types import ModuleType
|
|
6
|
+
from typing import (
|
|
7
|
+
Any,
|
|
8
|
+
Dict,
|
|
9
|
+
List,
|
|
10
|
+
Literal,
|
|
11
|
+
Union,
|
|
12
|
+
get_args,
|
|
13
|
+
get_origin,
|
|
14
|
+
get_type_hints,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
SchemaType = Literal["object", "integer", "double", "string", "boolean", "array"]
|
|
18
|
+
|
|
19
|
+
TYPE_MAP: Dict[str, SchemaType] = {
|
|
20
|
+
"int": "integer",
|
|
21
|
+
"float": "double",
|
|
22
|
+
"str": "string",
|
|
23
|
+
"bool": "boolean",
|
|
24
|
+
"list": "array",
|
|
25
|
+
"dict": "object",
|
|
26
|
+
"List": "array",
|
|
27
|
+
"Dict": "object",
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def get_type_schema(type_hint: Any) -> Dict[str, Any]:
|
|
32
|
+
"""Convert a type hint to a JSON schema."""
|
|
33
|
+
if type_hint is None or type_hint == inspect.Parameter.empty:
|
|
34
|
+
return {"type": "object"}
|
|
35
|
+
|
|
36
|
+
origin = get_origin(type_hint)
|
|
37
|
+
args = get_args(type_hint)
|
|
38
|
+
|
|
39
|
+
if origin is Union:
|
|
40
|
+
if type(None) in args:
|
|
41
|
+
real_type = next(arg for arg in args if arg is not type(None))
|
|
42
|
+
return get_type_schema(real_type)
|
|
43
|
+
return {"type": "object"}
|
|
44
|
+
|
|
45
|
+
if origin in (list, List):
|
|
46
|
+
item_type = args[0] if args else Any
|
|
47
|
+
return {"type": "array", "items": get_type_schema(item_type)}
|
|
48
|
+
|
|
49
|
+
if origin in (dict, Dict):
|
|
50
|
+
return {"type": "object"}
|
|
51
|
+
|
|
52
|
+
if inspect.isclass(type_hint):
|
|
53
|
+
if is_dataclass(type_hint):
|
|
54
|
+
properties = {}
|
|
55
|
+
required = []
|
|
56
|
+
|
|
57
|
+
for field in fields(type_hint):
|
|
58
|
+
field_schema = get_type_schema(field.type)
|
|
59
|
+
properties[field.name] = field_schema
|
|
60
|
+
if field.default == field.default_factory:
|
|
61
|
+
required.append(field.name)
|
|
62
|
+
|
|
63
|
+
return {"type": "object", "properties": properties, "required": required}
|
|
64
|
+
elif hasattr(type_hint, "__annotations__"):
|
|
65
|
+
properties = {}
|
|
66
|
+
required = []
|
|
67
|
+
|
|
68
|
+
for name, field_type in type_hint.__annotations__.items():
|
|
69
|
+
field_schema = get_type_schema(field_type)
|
|
70
|
+
properties[name] = field_schema
|
|
71
|
+
# For regular classes, we'll consider all annotated fields as required
|
|
72
|
+
# unless they have a default value in __init__
|
|
73
|
+
if hasattr(type_hint, "__init__"):
|
|
74
|
+
sig = inspect.signature(type_hint.__init__)
|
|
75
|
+
if (
|
|
76
|
+
name in sig.parameters
|
|
77
|
+
and sig.parameters[name].default == inspect.Parameter.empty
|
|
78
|
+
):
|
|
79
|
+
required.append(name)
|
|
80
|
+
else:
|
|
81
|
+
required.append(name)
|
|
82
|
+
|
|
83
|
+
return {"type": "object", "properties": properties, "required": required}
|
|
84
|
+
|
|
85
|
+
type_name = type_hint.__name__ if hasattr(type_hint, "__name__") else str(type_hint)
|
|
86
|
+
schema_type = TYPE_MAP.get(type_name, "object")
|
|
87
|
+
|
|
88
|
+
return {"type": schema_type}
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def load_module(file_path: str) -> ModuleType:
|
|
92
|
+
"""Load a Python module from file path."""
|
|
93
|
+
spec = importlib.util.spec_from_file_location("dynamic_module", file_path)
|
|
94
|
+
if not spec or not spec.loader:
|
|
95
|
+
raise ImportError(f"Could not load spec for {file_path}")
|
|
96
|
+
|
|
97
|
+
module = importlib.util.module_from_spec(spec)
|
|
98
|
+
sys.modules["dynamic_module"] = module
|
|
99
|
+
spec.loader.exec_module(module)
|
|
100
|
+
return module
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def generate_args(path: str) -> Dict[str, Dict[str, Any]]:
|
|
104
|
+
"""Generate input/output schema from main function type hints."""
|
|
105
|
+
module = load_module(path)
|
|
106
|
+
|
|
107
|
+
main_func = None
|
|
108
|
+
for func_name in ["main", "run", "execute"]:
|
|
109
|
+
if hasattr(module, func_name):
|
|
110
|
+
main_func = getattr(module, func_name)
|
|
111
|
+
break
|
|
112
|
+
|
|
113
|
+
if not main_func:
|
|
114
|
+
raise ValueError("No main function found in module")
|
|
115
|
+
|
|
116
|
+
hints = get_type_hints(main_func)
|
|
117
|
+
sig = inspect.signature(main_func)
|
|
118
|
+
|
|
119
|
+
if not sig.parameters:
|
|
120
|
+
return {"input": {}, "output": get_type_schema(hints.get("return", None))}
|
|
121
|
+
|
|
122
|
+
input_param_name = next(iter(sig.parameters))
|
|
123
|
+
input_schema = get_type_schema(hints.get(input_param_name))
|
|
124
|
+
output_schema = get_type_schema(hints.get("return"))
|
|
125
|
+
|
|
126
|
+
return {"input": input_schema, "output": output_schema}
|