flyte 2.0.0b22__py3-none-any.whl → 2.0.0b30__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flyte/__init__.py +18 -2
- flyte/_bin/runtime.py +43 -5
- flyte/_cache/cache.py +4 -2
- flyte/_cache/local_cache.py +216 -0
- flyte/_code_bundle/_ignore.py +1 -1
- flyte/_code_bundle/_packaging.py +4 -4
- flyte/_code_bundle/_utils.py +14 -8
- flyte/_code_bundle/bundle.py +13 -5
- flyte/_constants.py +1 -0
- flyte/_context.py +4 -1
- flyte/_custom_context.py +73 -0
- flyte/_debug/constants.py +0 -1
- flyte/_debug/vscode.py +6 -1
- flyte/_deploy.py +223 -59
- flyte/_environment.py +5 -0
- flyte/_excepthook.py +1 -1
- flyte/_image.py +144 -82
- flyte/_initialize.py +95 -12
- flyte/_interface.py +2 -0
- flyte/_internal/controllers/_local_controller.py +65 -24
- flyte/_internal/controllers/_trace.py +1 -1
- flyte/_internal/controllers/remote/_action.py +13 -11
- flyte/_internal/controllers/remote/_client.py +1 -1
- flyte/_internal/controllers/remote/_controller.py +9 -4
- flyte/_internal/controllers/remote/_core.py +16 -16
- flyte/_internal/controllers/remote/_informer.py +4 -4
- flyte/_internal/controllers/remote/_service_protocol.py +7 -7
- flyte/_internal/imagebuild/docker_builder.py +139 -84
- flyte/_internal/imagebuild/image_builder.py +7 -13
- flyte/_internal/imagebuild/remote_builder.py +65 -13
- flyte/_internal/imagebuild/utils.py +51 -3
- flyte/_internal/resolvers/_task_module.py +5 -38
- flyte/_internal/resolvers/default.py +2 -2
- flyte/_internal/runtime/convert.py +42 -20
- flyte/_internal/runtime/entrypoints.py +24 -1
- flyte/_internal/runtime/io.py +21 -8
- flyte/_internal/runtime/resources_serde.py +20 -6
- flyte/_internal/runtime/reuse.py +1 -1
- flyte/_internal/runtime/rusty.py +20 -5
- flyte/_internal/runtime/task_serde.py +33 -27
- flyte/_internal/runtime/taskrunner.py +10 -1
- flyte/_internal/runtime/trigger_serde.py +160 -0
- flyte/_internal/runtime/types_serde.py +1 -1
- flyte/_keyring/file.py +39 -9
- flyte/_logging.py +79 -12
- flyte/_map.py +31 -12
- flyte/_module.py +70 -0
- flyte/_pod.py +2 -2
- flyte/_resources.py +213 -31
- flyte/_run.py +107 -41
- flyte/_task.py +66 -10
- flyte/_task_environment.py +96 -24
- flyte/_task_plugins.py +4 -2
- flyte/_trigger.py +1000 -0
- flyte/_utils/__init__.py +2 -1
- flyte/_utils/asyn.py +3 -1
- flyte/_utils/docker_credentials.py +173 -0
- flyte/_utils/module_loader.py +17 -2
- flyte/_version.py +3 -3
- flyte/cli/_abort.py +3 -3
- flyte/cli/_build.py +1 -3
- flyte/cli/_common.py +78 -7
- flyte/cli/_create.py +178 -3
- flyte/cli/_delete.py +23 -1
- flyte/cli/_deploy.py +49 -11
- flyte/cli/_get.py +79 -34
- flyte/cli/_params.py +8 -6
- flyte/cli/_plugins.py +209 -0
- flyte/cli/_run.py +127 -11
- flyte/cli/_serve.py +64 -0
- flyte/cli/_update.py +37 -0
- flyte/cli/_user.py +17 -0
- flyte/cli/main.py +30 -4
- flyte/config/_config.py +2 -0
- flyte/config/_internal.py +1 -0
- flyte/config/_reader.py +3 -3
- flyte/connectors/__init__.py +11 -0
- flyte/connectors/_connector.py +270 -0
- flyte/connectors/_server.py +197 -0
- flyte/connectors/utils.py +135 -0
- flyte/errors.py +10 -1
- flyte/extend.py +8 -1
- flyte/extras/_container.py +6 -1
- flyte/git/_config.py +11 -9
- flyte/io/__init__.py +2 -0
- flyte/io/_dataframe/__init__.py +2 -0
- flyte/io/_dataframe/basic_dfs.py +1 -1
- flyte/io/_dataframe/dataframe.py +12 -8
- flyte/io/_dir.py +551 -120
- flyte/io/_file.py +538 -141
- flyte/models.py +57 -12
- flyte/remote/__init__.py +6 -1
- flyte/remote/_action.py +18 -16
- flyte/remote/_client/_protocols.py +39 -4
- flyte/remote/_client/auth/_channel.py +10 -6
- flyte/remote/_client/controlplane.py +17 -5
- flyte/remote/_console.py +3 -2
- flyte/remote/_data.py +4 -3
- flyte/remote/_logs.py +3 -3
- flyte/remote/_run.py +47 -7
- flyte/remote/_secret.py +26 -17
- flyte/remote/_task.py +21 -9
- flyte/remote/_trigger.py +306 -0
- flyte/remote/_user.py +33 -0
- flyte/storage/__init__.py +6 -1
- flyte/storage/_parallel_reader.py +274 -0
- flyte/storage/_storage.py +185 -103
- flyte/types/__init__.py +16 -0
- flyte/types/_interface.py +2 -2
- flyte/types/_pickle.py +17 -4
- flyte/types/_string_literals.py +8 -9
- flyte/types/_type_engine.py +26 -19
- flyte/types/_utils.py +1 -1
- {flyte-2.0.0b22.data → flyte-2.0.0b30.data}/scripts/runtime.py +43 -5
- {flyte-2.0.0b22.dist-info → flyte-2.0.0b30.dist-info}/METADATA +8 -1
- flyte-2.0.0b30.dist-info/RECORD +192 -0
- flyte/_protos/__init__.py +0 -0
- flyte/_protos/common/authorization_pb2.py +0 -66
- flyte/_protos/common/authorization_pb2.pyi +0 -108
- flyte/_protos/common/authorization_pb2_grpc.py +0 -4
- flyte/_protos/common/identifier_pb2.py +0 -99
- flyte/_protos/common/identifier_pb2.pyi +0 -120
- flyte/_protos/common/identifier_pb2_grpc.py +0 -4
- flyte/_protos/common/identity_pb2.py +0 -48
- flyte/_protos/common/identity_pb2.pyi +0 -72
- flyte/_protos/common/identity_pb2_grpc.py +0 -4
- flyte/_protos/common/list_pb2.py +0 -36
- flyte/_protos/common/list_pb2.pyi +0 -71
- flyte/_protos/common/list_pb2_grpc.py +0 -4
- flyte/_protos/common/policy_pb2.py +0 -37
- flyte/_protos/common/policy_pb2.pyi +0 -27
- flyte/_protos/common/policy_pb2_grpc.py +0 -4
- flyte/_protos/common/role_pb2.py +0 -37
- flyte/_protos/common/role_pb2.pyi +0 -53
- flyte/_protos/common/role_pb2_grpc.py +0 -4
- flyte/_protos/common/runtime_version_pb2.py +0 -28
- flyte/_protos/common/runtime_version_pb2.pyi +0 -24
- flyte/_protos/common/runtime_version_pb2_grpc.py +0 -4
- flyte/_protos/imagebuilder/definition_pb2.py +0 -60
- flyte/_protos/imagebuilder/definition_pb2.pyi +0 -153
- flyte/_protos/imagebuilder/definition_pb2_grpc.py +0 -4
- flyte/_protos/imagebuilder/payload_pb2.py +0 -32
- flyte/_protos/imagebuilder/payload_pb2.pyi +0 -21
- flyte/_protos/imagebuilder/payload_pb2_grpc.py +0 -4
- flyte/_protos/imagebuilder/service_pb2.py +0 -29
- flyte/_protos/imagebuilder/service_pb2.pyi +0 -5
- flyte/_protos/imagebuilder/service_pb2_grpc.py +0 -66
- flyte/_protos/logs/dataplane/payload_pb2.py +0 -100
- flyte/_protos/logs/dataplane/payload_pb2.pyi +0 -177
- flyte/_protos/logs/dataplane/payload_pb2_grpc.py +0 -4
- flyte/_protos/secret/definition_pb2.py +0 -49
- flyte/_protos/secret/definition_pb2.pyi +0 -93
- flyte/_protos/secret/definition_pb2_grpc.py +0 -4
- flyte/_protos/secret/payload_pb2.py +0 -62
- flyte/_protos/secret/payload_pb2.pyi +0 -94
- flyte/_protos/secret/payload_pb2_grpc.py +0 -4
- flyte/_protos/secret/secret_pb2.py +0 -38
- flyte/_protos/secret/secret_pb2.pyi +0 -6
- flyte/_protos/secret/secret_pb2_grpc.py +0 -198
- flyte/_protos/secret/secret_pb2_grpc_grpc.py +0 -198
- flyte/_protos/validate/validate/validate_pb2.py +0 -76
- flyte/_protos/workflow/common_pb2.py +0 -27
- flyte/_protos/workflow/common_pb2.pyi +0 -14
- flyte/_protos/workflow/common_pb2_grpc.py +0 -4
- flyte/_protos/workflow/environment_pb2.py +0 -29
- flyte/_protos/workflow/environment_pb2.pyi +0 -12
- flyte/_protos/workflow/environment_pb2_grpc.py +0 -4
- flyte/_protos/workflow/node_execution_service_pb2.py +0 -26
- flyte/_protos/workflow/node_execution_service_pb2.pyi +0 -4
- flyte/_protos/workflow/node_execution_service_pb2_grpc.py +0 -32
- flyte/_protos/workflow/queue_service_pb2.py +0 -111
- flyte/_protos/workflow/queue_service_pb2.pyi +0 -168
- flyte/_protos/workflow/queue_service_pb2_grpc.py +0 -172
- flyte/_protos/workflow/run_definition_pb2.py +0 -123
- flyte/_protos/workflow/run_definition_pb2.pyi +0 -352
- flyte/_protos/workflow/run_definition_pb2_grpc.py +0 -4
- flyte/_protos/workflow/run_logs_service_pb2.py +0 -41
- flyte/_protos/workflow/run_logs_service_pb2.pyi +0 -28
- flyte/_protos/workflow/run_logs_service_pb2_grpc.py +0 -69
- flyte/_protos/workflow/run_service_pb2.py +0 -137
- flyte/_protos/workflow/run_service_pb2.pyi +0 -185
- flyte/_protos/workflow/run_service_pb2_grpc.py +0 -446
- flyte/_protos/workflow/state_service_pb2.py +0 -67
- flyte/_protos/workflow/state_service_pb2.pyi +0 -76
- flyte/_protos/workflow/state_service_pb2_grpc.py +0 -138
- flyte/_protos/workflow/task_definition_pb2.py +0 -82
- flyte/_protos/workflow/task_definition_pb2.pyi +0 -88
- flyte/_protos/workflow/task_definition_pb2_grpc.py +0 -4
- flyte/_protos/workflow/task_service_pb2.py +0 -60
- flyte/_protos/workflow/task_service_pb2.pyi +0 -59
- flyte/_protos/workflow/task_service_pb2_grpc.py +0 -138
- flyte-2.0.0b22.dist-info/RECORD +0 -250
- {flyte-2.0.0b22.data → flyte-2.0.0b30.data}/scripts/debug.py +0 -0
- {flyte-2.0.0b22.dist-info → flyte-2.0.0b30.dist-info}/WHEEL +0 -0
- {flyte-2.0.0b22.dist-info → flyte-2.0.0b30.dist-info}/entry_points.txt +0 -0
- {flyte-2.0.0b22.dist-info → flyte-2.0.0b30.dist-info}/licenses/LICENSE +0 -0
- {flyte-2.0.0b22.dist-info → flyte-2.0.0b30.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from typing import Union
|
|
3
|
+
|
|
4
|
+
from flyteidl2.core import interface_pb2, literals_pb2
|
|
5
|
+
from flyteidl2.task import common_pb2, run_pb2, task_definition_pb2
|
|
6
|
+
from google.protobuf import timestamp_pb2, wrappers_pb2
|
|
7
|
+
|
|
8
|
+
import flyte.types
|
|
9
|
+
from flyte import Cron, FixedRate, Trigger, TriggerTime
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _to_schedule(m: Union[Cron, FixedRate], kickoff_arg_name: str | None = None) -> common_pb2.Schedule:
|
|
13
|
+
if isinstance(m, Cron):
|
|
14
|
+
return common_pb2.Schedule(
|
|
15
|
+
cron=common_pb2.Cron(
|
|
16
|
+
expression=m.expression,
|
|
17
|
+
timezone=m.timezone,
|
|
18
|
+
),
|
|
19
|
+
kickoff_time_input_arg=kickoff_arg_name,
|
|
20
|
+
)
|
|
21
|
+
elif isinstance(m, FixedRate):
|
|
22
|
+
start_time = None
|
|
23
|
+
if m.start_time is not None:
|
|
24
|
+
start_time = timestamp_pb2.Timestamp()
|
|
25
|
+
start_time.FromDatetime(m.start_time)
|
|
26
|
+
|
|
27
|
+
return common_pb2.Schedule(
|
|
28
|
+
rate=common_pb2.FixedRate(
|
|
29
|
+
value=m.interval_minutes,
|
|
30
|
+
unit=common_pb2.FixedRateUnit.FIXED_RATE_UNIT_MINUTE,
|
|
31
|
+
start_time=start_time,
|
|
32
|
+
),
|
|
33
|
+
kickoff_time_input_arg=kickoff_arg_name,
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
async def process_default_inputs(
|
|
38
|
+
default_inputs: dict,
|
|
39
|
+
task_name: str,
|
|
40
|
+
task_inputs: interface_pb2.VariableMap,
|
|
41
|
+
task_default_inputs: list[common_pb2.NamedParameter],
|
|
42
|
+
) -> list[common_pb2.NamedLiteral]:
|
|
43
|
+
"""
|
|
44
|
+
Process default inputs and convert them to NamedLiteral objects.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
default_inputs: Dictionary of default input values
|
|
48
|
+
task_name: Name of the task for error messages
|
|
49
|
+
task_inputs: Task input variable map
|
|
50
|
+
task_default_inputs: List of default parameters from task
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
List of NamedLiteral objects
|
|
54
|
+
"""
|
|
55
|
+
keys = []
|
|
56
|
+
literal_coros = []
|
|
57
|
+
for k, v in default_inputs.items():
|
|
58
|
+
if k not in task_inputs.variables:
|
|
59
|
+
raise ValueError(
|
|
60
|
+
f"Trigger default input '{k}' must be an input to the task, but not found in task {task_name}. "
|
|
61
|
+
f"Available inputs: {list(task_inputs.variables.keys())}"
|
|
62
|
+
)
|
|
63
|
+
else:
|
|
64
|
+
literal_coros.append(flyte.types.TypeEngine.to_literal(v, type(v), task_inputs.variables[k].type))
|
|
65
|
+
keys.append(k)
|
|
66
|
+
|
|
67
|
+
final_literals: list[literals_pb2.Literal] = await asyncio.gather(*literal_coros)
|
|
68
|
+
|
|
69
|
+
for p in task_default_inputs or []:
|
|
70
|
+
if p.name not in keys:
|
|
71
|
+
keys.append(p.name)
|
|
72
|
+
final_literals.append(p.parameter.default)
|
|
73
|
+
|
|
74
|
+
literals: list[common_pb2.NamedLiteral] = []
|
|
75
|
+
for k, lit in zip(keys, final_literals):
|
|
76
|
+
literals.append(
|
|
77
|
+
common_pb2.NamedLiteral(
|
|
78
|
+
name=k,
|
|
79
|
+
value=lit,
|
|
80
|
+
)
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
return literals
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
async def to_task_trigger(
|
|
87
|
+
t: Trigger,
|
|
88
|
+
task_name: str,
|
|
89
|
+
task_inputs: interface_pb2.VariableMap,
|
|
90
|
+
task_default_inputs: list[common_pb2.NamedParameter],
|
|
91
|
+
) -> task_definition_pb2.TaskTrigger:
|
|
92
|
+
"""
|
|
93
|
+
Converts a Trigger object to a TaskTrigger protobuf object.
|
|
94
|
+
Args:
|
|
95
|
+
t:
|
|
96
|
+
task_name:
|
|
97
|
+
task_inputs:
|
|
98
|
+
task_default_inputs:
|
|
99
|
+
Returns:
|
|
100
|
+
|
|
101
|
+
"""
|
|
102
|
+
env = None
|
|
103
|
+
if t.env_vars:
|
|
104
|
+
env = run_pb2.Envs()
|
|
105
|
+
for k, v in t.env_vars.items():
|
|
106
|
+
env.values.append(literals_pb2.KeyValuePair(key=k, value=v))
|
|
107
|
+
|
|
108
|
+
labels = run_pb2.Labels(values=t.labels) if t.labels else None
|
|
109
|
+
|
|
110
|
+
annotations = run_pb2.Annotations(values=t.annotations) if t.annotations else None
|
|
111
|
+
|
|
112
|
+
run_spec = run_pb2.RunSpec(
|
|
113
|
+
overwrite_cache=t.overwrite_cache,
|
|
114
|
+
envs=env,
|
|
115
|
+
interruptible=wrappers_pb2.BoolValue(value=t.interruptible) if t.interruptible is not None else None,
|
|
116
|
+
cluster=t.queue,
|
|
117
|
+
labels=labels,
|
|
118
|
+
annotations=annotations,
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
kickoff_arg_name = None
|
|
122
|
+
default_inputs = {}
|
|
123
|
+
if t.inputs:
|
|
124
|
+
for k, v in t.inputs.items():
|
|
125
|
+
if v is TriggerTime:
|
|
126
|
+
if k == "trigger_time" and k not in task_inputs.variables:
|
|
127
|
+
# the 'trigger_time' input name that by default Triggers look for so it's always added.
|
|
128
|
+
# Remove it here by skipping if it's not actually an input to the task
|
|
129
|
+
continue
|
|
130
|
+
kickoff_arg_name = k
|
|
131
|
+
else:
|
|
132
|
+
default_inputs[k] = v
|
|
133
|
+
|
|
134
|
+
# assert that default_inputs and the kickoff_arg_name are infact in the task inputs
|
|
135
|
+
if kickoff_arg_name is not None and kickoff_arg_name not in task_inputs.variables:
|
|
136
|
+
raise ValueError(
|
|
137
|
+
f"For a scheduled trigger, the TriggerTime input '{kickoff_arg_name}' "
|
|
138
|
+
f"must be an input to the task, but not found in task {task_name}. "
|
|
139
|
+
f"Available inputs: {list(task_inputs.variables.keys())}"
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
literals = await process_default_inputs(default_inputs, task_name, task_inputs, task_default_inputs)
|
|
143
|
+
|
|
144
|
+
automation = _to_schedule(
|
|
145
|
+
t.automation,
|
|
146
|
+
kickoff_arg_name=kickoff_arg_name,
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
return task_definition_pb2.TaskTrigger(
|
|
150
|
+
name=t.name,
|
|
151
|
+
spec=task_definition_pb2.TaskTriggerSpec(
|
|
152
|
+
active=t.auto_activate,
|
|
153
|
+
run_spec=run_spec,
|
|
154
|
+
inputs=common_pb2.Inputs(literals=literals),
|
|
155
|
+
),
|
|
156
|
+
automation_spec=common_pb2.TriggerAutomationSpec(
|
|
157
|
+
type=common_pb2.TriggerAutomationSpecType.TYPE_SCHEDULE,
|
|
158
|
+
schedule=automation,
|
|
159
|
+
),
|
|
160
|
+
)
|
flyte/_keyring/file.py
CHANGED
|
@@ -10,9 +10,27 @@ _FLYTE_KEYRING_PATH: Path = Path.home() / ".flyte" / "keyring.cfg"
|
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
class SimplePlainTextKeyring(KeyringBackend):
|
|
13
|
-
"""
|
|
13
|
+
"""
|
|
14
|
+
Simple plain text keyring for remote notebook environments.
|
|
14
15
|
|
|
15
|
-
|
|
16
|
+
This backend is only active when running in IPython/Jupyter notebooks.
|
|
17
|
+
For local development, the system keyring is used instead.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
@property
|
|
21
|
+
def priority(self):
|
|
22
|
+
"""
|
|
23
|
+
Return priority based on whether we're in a notebook environment.
|
|
24
|
+
Negative priority means this backend will be skipped by keyring.
|
|
25
|
+
"""
|
|
26
|
+
from flyte._tools import ipython_check
|
|
27
|
+
|
|
28
|
+
if ipython_check():
|
|
29
|
+
# In IPython/Jupyter - use this backend
|
|
30
|
+
return 0.5
|
|
31
|
+
else:
|
|
32
|
+
# Not in IPython - skip this backend, let system keyring handle it
|
|
33
|
+
return -1
|
|
16
34
|
|
|
17
35
|
def get_password(self, service: str, username: str) -> Optional[str]:
|
|
18
36
|
"""Get password."""
|
|
@@ -72,13 +90,25 @@ class SimplePlainTextKeyring(KeyringBackend):
|
|
|
72
90
|
|
|
73
91
|
@property
|
|
74
92
|
def file_path(self) -> Path:
|
|
75
|
-
from flyte._initialize import
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
93
|
+
from flyte._initialize import get_init_config, is_initialized
|
|
94
|
+
from flyte._logging import logger
|
|
95
|
+
|
|
96
|
+
# Only try to use source_config_path if flyte.init() has been called
|
|
97
|
+
if is_initialized():
|
|
98
|
+
try:
|
|
99
|
+
config = get_init_config()
|
|
100
|
+
config_path = config.source_config_path
|
|
101
|
+
if config_path and str(config_path.parent.name) == ".flyte":
|
|
102
|
+
# if the config is in a .flyte directory, use that as the path
|
|
103
|
+
return config_path.parent / "keyring.cfg"
|
|
104
|
+
except Exception as e:
|
|
105
|
+
# If anything fails, fall back to default path
|
|
106
|
+
logger.debug(f"Skipping config-based keyring path due to error: {e}")
|
|
107
|
+
else:
|
|
108
|
+
# flyte.init() hasn't been called, use default path
|
|
109
|
+
logger.debug("flyte.init() not called, using default keyring path")
|
|
110
|
+
|
|
111
|
+
# Default path
|
|
82
112
|
return _FLYTE_KEYRING_PATH
|
|
83
113
|
|
|
84
114
|
def __repr__(self):
|
flyte/_logging.py
CHANGED
|
@@ -1,13 +1,17 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import json
|
|
3
4
|
import logging
|
|
4
5
|
import os
|
|
5
|
-
from
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from typing import Literal, Optional
|
|
6
8
|
|
|
7
9
|
import flyte
|
|
8
10
|
|
|
9
11
|
from ._tools import ipython_check
|
|
10
12
|
|
|
13
|
+
LogFormat = Literal["console", "json"]
|
|
14
|
+
|
|
11
15
|
DEFAULT_LOG_LEVEL = logging.WARNING
|
|
12
16
|
|
|
13
17
|
|
|
@@ -33,11 +37,14 @@ def get_env_log_level() -> int:
|
|
|
33
37
|
return int(os.environ.get("LOG_LEVEL", DEFAULT_LOG_LEVEL))
|
|
34
38
|
|
|
35
39
|
|
|
36
|
-
def log_format_from_env() ->
|
|
40
|
+
def log_format_from_env() -> LogFormat:
|
|
37
41
|
"""
|
|
38
42
|
Get the log format from the environment variable.
|
|
39
43
|
"""
|
|
40
|
-
|
|
44
|
+
format_str = os.environ.get("LOG_FORMAT", "console")
|
|
45
|
+
if format_str not in ("console", "json"):
|
|
46
|
+
return "console"
|
|
47
|
+
return format_str # type: ignore[return-value]
|
|
41
48
|
|
|
42
49
|
|
|
43
50
|
def _get_console():
|
|
@@ -71,7 +78,7 @@ def get_rich_handler(log_level: int) -> Optional[logging.Handler]:
|
|
|
71
78
|
|
|
72
79
|
handler = RichHandler(
|
|
73
80
|
tracebacks_suppress=[click],
|
|
74
|
-
rich_tracebacks=
|
|
81
|
+
rich_tracebacks=False,
|
|
75
82
|
omit_repeated_times=False,
|
|
76
83
|
show_path=False,
|
|
77
84
|
log_time_format="%H:%M:%S.%f",
|
|
@@ -86,13 +93,49 @@ def get_rich_handler(log_level: int) -> Optional[logging.Handler]:
|
|
|
86
93
|
return handler
|
|
87
94
|
|
|
88
95
|
|
|
89
|
-
|
|
96
|
+
class JSONFormatter(logging.Formatter):
|
|
97
|
+
"""
|
|
98
|
+
Formatter that outputs JSON strings for each log record.
|
|
99
|
+
"""
|
|
100
|
+
|
|
101
|
+
def format(self, record: logging.LogRecord) -> str:
|
|
102
|
+
log_data = {
|
|
103
|
+
"timestamp": datetime.fromtimestamp(record.created).isoformat(),
|
|
104
|
+
"level": record.levelname,
|
|
105
|
+
"logger": record.name,
|
|
106
|
+
"message": record.getMessage(),
|
|
107
|
+
"filename": record.filename,
|
|
108
|
+
"lineno": record.lineno,
|
|
109
|
+
"funcName": record.funcName,
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
# Add context fields if present
|
|
113
|
+
if getattr(record, "run_name", None):
|
|
114
|
+
log_data["run_name"] = record.run_name # type: ignore[attr-defined]
|
|
115
|
+
if getattr(record, "action_name", None):
|
|
116
|
+
log_data["action_name"] = record.action_name # type: ignore[attr-defined]
|
|
117
|
+
if getattr(record, "is_flyte_internal", False):
|
|
118
|
+
log_data["is_flyte_internal"] = True
|
|
119
|
+
|
|
120
|
+
# Add exception info if present
|
|
121
|
+
if record.exc_info:
|
|
122
|
+
log_data["exc_info"] = self.formatException(record.exc_info)
|
|
123
|
+
|
|
124
|
+
return json.dumps(log_data)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def initialize_logger(log_level: int | None = None, log_format: LogFormat | None = None, enable_rich: bool = False):
|
|
90
128
|
"""
|
|
91
129
|
Initializes the global loggers to the default configuration.
|
|
92
130
|
When enable_rich=True, upgrades to Rich handler for local CLI usage.
|
|
93
131
|
"""
|
|
94
132
|
global logger # noqa: PLW0603
|
|
95
133
|
|
|
134
|
+
if log_level is None:
|
|
135
|
+
log_level = get_env_log_level()
|
|
136
|
+
if log_format is None:
|
|
137
|
+
log_format = log_format_from_env()
|
|
138
|
+
|
|
96
139
|
# Clear existing handlers to reconfigure
|
|
97
140
|
root = logging.getLogger()
|
|
98
141
|
root.handlers.clear()
|
|
@@ -100,9 +143,16 @@ def initialize_logger(log_level: int = get_env_log_level(), enable_rich: bool =
|
|
|
100
143
|
flyte_logger = logging.getLogger("flyte")
|
|
101
144
|
flyte_logger.handlers.clear()
|
|
102
145
|
|
|
146
|
+
# Determine log format (JSON takes precedence over Rich)
|
|
147
|
+
use_json = log_format == "json"
|
|
148
|
+
use_rich = enable_rich and not use_json
|
|
149
|
+
|
|
103
150
|
# Set up root logger handler
|
|
104
|
-
root_handler = None
|
|
105
|
-
if
|
|
151
|
+
root_handler: logging.Handler | None = None
|
|
152
|
+
if use_json:
|
|
153
|
+
root_handler = logging.StreamHandler()
|
|
154
|
+
root_handler.setFormatter(JSONFormatter())
|
|
155
|
+
elif use_rich:
|
|
106
156
|
root_handler = get_rich_handler(log_level)
|
|
107
157
|
|
|
108
158
|
if root_handler is None:
|
|
@@ -110,11 +160,16 @@ def initialize_logger(log_level: int = get_env_log_level(), enable_rich: bool =
|
|
|
110
160
|
|
|
111
161
|
# Add context filter to root handler for all logging
|
|
112
162
|
root_handler.addFilter(ContextFilter())
|
|
163
|
+
root_handler.setLevel(logging.DEBUG)
|
|
113
164
|
root.addHandler(root_handler)
|
|
114
165
|
|
|
115
166
|
# Set up Flyte logger handler
|
|
116
|
-
flyte_handler = None
|
|
117
|
-
if
|
|
167
|
+
flyte_handler: logging.Handler | None = None
|
|
168
|
+
if use_json:
|
|
169
|
+
flyte_handler = logging.StreamHandler()
|
|
170
|
+
flyte_handler.setLevel(log_level)
|
|
171
|
+
flyte_handler.setFormatter(JSONFormatter())
|
|
172
|
+
elif use_rich:
|
|
118
173
|
flyte_handler = get_rich_handler(log_level)
|
|
119
174
|
|
|
120
175
|
if flyte_handler is None:
|
|
@@ -165,13 +220,20 @@ class ContextFilter(logging.Filter):
|
|
|
165
220
|
Applied globally to capture context for both user and Flyte internal logging.
|
|
166
221
|
"""
|
|
167
222
|
|
|
168
|
-
def filter(self, record):
|
|
223
|
+
def filter(self, record: logging.LogRecord) -> bool:
|
|
169
224
|
from flyte._context import ctx
|
|
170
225
|
|
|
171
226
|
c = ctx()
|
|
172
227
|
if c:
|
|
173
228
|
action = c.action
|
|
229
|
+
# Add as attributes for structured logging (JSON)
|
|
230
|
+
record.run_name = action.run_name
|
|
231
|
+
record.action_name = action.name
|
|
232
|
+
# Also modify message for console/Rich output
|
|
174
233
|
record.msg = f"[{action.run_name}][{action.name}] {record.msg}"
|
|
234
|
+
else:
|
|
235
|
+
record.run_name = None
|
|
236
|
+
record.action_name = None
|
|
175
237
|
return True
|
|
176
238
|
|
|
177
239
|
|
|
@@ -180,8 +242,12 @@ class FlyteInternalFilter(logging.Filter):
|
|
|
180
242
|
A logging filter that adds [flyte] prefix to internal Flyte logging only.
|
|
181
243
|
"""
|
|
182
244
|
|
|
183
|
-
def filter(self, record):
|
|
184
|
-
|
|
245
|
+
def filter(self, record: logging.LogRecord) -> bool:
|
|
246
|
+
is_internal = record.name.startswith("flyte")
|
|
247
|
+
# Add as attribute for structured logging (JSON)
|
|
248
|
+
record.is_flyte_internal = is_internal
|
|
249
|
+
# Also modify message for console/Rich output
|
|
250
|
+
if is_internal:
|
|
185
251
|
record.msg = f"[flyte] {record.msg}"
|
|
186
252
|
return True
|
|
187
253
|
|
|
@@ -198,6 +264,7 @@ def _setup_root_logger():
|
|
|
198
264
|
handler = logging.StreamHandler()
|
|
199
265
|
# Add context filter to ALL logging
|
|
200
266
|
handler.addFilter(ContextFilter())
|
|
267
|
+
handler.setLevel(logging.DEBUG)
|
|
201
268
|
|
|
202
269
|
# Simple formatter since filters handle prefixes
|
|
203
270
|
root.addHandler(handler)
|
flyte/_map.py
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import functools
|
|
3
3
|
import logging
|
|
4
|
-
from typing import Any, AsyncGenerator, AsyncIterator, Generic, Iterable, Iterator, List, Union, cast
|
|
4
|
+
from typing import Any, AsyncGenerator, AsyncIterator, Generic, Iterable, Iterator, List, Union, cast, overload
|
|
5
5
|
|
|
6
6
|
from flyte.syncify import syncify
|
|
7
7
|
|
|
8
8
|
from ._group import group
|
|
9
9
|
from ._logging import logger
|
|
10
|
-
from ._task import P, R
|
|
10
|
+
from ._task import AsyncFunctionTaskTemplate, F, P, R
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
class MapAsyncIterator(Generic[P, R]):
|
|
@@ -15,7 +15,7 @@ class MapAsyncIterator(Generic[P, R]):
|
|
|
15
15
|
|
|
16
16
|
def __init__(
|
|
17
17
|
self,
|
|
18
|
-
func:
|
|
18
|
+
func: AsyncFunctionTaskTemplate[P, R, F] | functools.partial[R],
|
|
19
19
|
args: tuple,
|
|
20
20
|
name: str,
|
|
21
21
|
concurrency: int,
|
|
@@ -78,7 +78,7 @@ class MapAsyncIterator(Generic[P, R]):
|
|
|
78
78
|
|
|
79
79
|
if isinstance(self.func, functools.partial):
|
|
80
80
|
# Handle partial functions by merging bound args/kwargs with mapped args
|
|
81
|
-
base_func = cast(
|
|
81
|
+
base_func = cast(AsyncFunctionTaskTemplate, self.func.func)
|
|
82
82
|
bound_args = self.func.args
|
|
83
83
|
bound_kwargs = self.func.keywords or {}
|
|
84
84
|
|
|
@@ -144,7 +144,7 @@ class _Mapper(Generic[P, R]):
|
|
|
144
144
|
:param func: partial function to validate
|
|
145
145
|
:raises TypeError: if the partial function is not valid for mapping
|
|
146
146
|
"""
|
|
147
|
-
f = cast(
|
|
147
|
+
f = cast(AsyncFunctionTaskTemplate, func.func)
|
|
148
148
|
inputs = f.native_interface.inputs
|
|
149
149
|
params = list(inputs.keys())
|
|
150
150
|
total_params = len(params)
|
|
@@ -172,9 +172,28 @@ class _Mapper(Generic[P, R]):
|
|
|
172
172
|
f"in partial function {f.name}."
|
|
173
173
|
)
|
|
174
174
|
|
|
175
|
+
@overload
|
|
175
176
|
def __call__(
|
|
176
177
|
self,
|
|
177
|
-
func:
|
|
178
|
+
func: AsyncFunctionTaskTemplate[P, R, F] | functools.partial[R],
|
|
179
|
+
*args: Iterable[Any],
|
|
180
|
+
group_name: str | None = None,
|
|
181
|
+
concurrency: int = 0,
|
|
182
|
+
) -> Iterator[R]: ...
|
|
183
|
+
|
|
184
|
+
@overload
|
|
185
|
+
def __call__(
|
|
186
|
+
self,
|
|
187
|
+
func: AsyncFunctionTaskTemplate[P, R, F] | functools.partial[R],
|
|
188
|
+
*args: Iterable[Any],
|
|
189
|
+
group_name: str | None = None,
|
|
190
|
+
concurrency: int = 0,
|
|
191
|
+
return_exceptions: bool = True,
|
|
192
|
+
) -> Iterator[Union[R, Exception]]: ...
|
|
193
|
+
|
|
194
|
+
def __call__(
|
|
195
|
+
self,
|
|
196
|
+
func: AsyncFunctionTaskTemplate[P, R, F] | functools.partial[R],
|
|
178
197
|
*args: Iterable[Any],
|
|
179
198
|
group_name: str | None = None,
|
|
180
199
|
concurrency: int = 0,
|
|
@@ -194,10 +213,10 @@ class _Mapper(Generic[P, R]):
|
|
|
194
213
|
return
|
|
195
214
|
|
|
196
215
|
if isinstance(func, functools.partial):
|
|
197
|
-
f = cast(
|
|
216
|
+
f = cast(AsyncFunctionTaskTemplate, func.func)
|
|
198
217
|
self.validate_partial(func)
|
|
199
218
|
else:
|
|
200
|
-
f = cast(
|
|
219
|
+
f = cast(AsyncFunctionTaskTemplate, func)
|
|
201
220
|
|
|
202
221
|
name = self._get_name(f.name, group_name)
|
|
203
222
|
logger.debug(f"Blocking Map for {name}")
|
|
@@ -234,7 +253,7 @@ class _Mapper(Generic[P, R]):
|
|
|
234
253
|
|
|
235
254
|
async def aio(
|
|
236
255
|
self,
|
|
237
|
-
func:
|
|
256
|
+
func: AsyncFunctionTaskTemplate[P, R, F] | functools.partial[R],
|
|
238
257
|
*args: Iterable[Any],
|
|
239
258
|
group_name: str | None = None,
|
|
240
259
|
concurrency: int = 0,
|
|
@@ -244,10 +263,10 @@ class _Mapper(Generic[P, R]):
|
|
|
244
263
|
return
|
|
245
264
|
|
|
246
265
|
if isinstance(func, functools.partial):
|
|
247
|
-
f = cast(
|
|
266
|
+
f = cast(AsyncFunctionTaskTemplate, func.func)
|
|
248
267
|
self.validate_partial(func)
|
|
249
268
|
else:
|
|
250
|
-
f = cast(
|
|
269
|
+
f = cast(AsyncFunctionTaskTemplate, func)
|
|
251
270
|
|
|
252
271
|
name = self._get_name(f.name, group_name)
|
|
253
272
|
with group(name):
|
|
@@ -277,7 +296,7 @@ class _Mapper(Generic[P, R]):
|
|
|
277
296
|
|
|
278
297
|
@syncify
|
|
279
298
|
async def _map(
|
|
280
|
-
func:
|
|
299
|
+
func: AsyncFunctionTaskTemplate[P, R, F] | functools.partial[R],
|
|
281
300
|
*args: Iterable[Any],
|
|
282
301
|
name: str = "map",
|
|
283
302
|
concurrency: int = 0,
|
flyte/_module.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
import os
|
|
3
|
+
import pathlib
|
|
4
|
+
import sys
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def extract_obj_module(obj: object, /, source_dir: pathlib.Path) -> str:
|
|
8
|
+
"""
|
|
9
|
+
Extract the module from the given object. If source_dir is provided, the module will be relative to the source_dir.
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
obj: The object to extract the module from.
|
|
13
|
+
source_dir: The source directory to use for relative paths.
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
The module name as a string.
|
|
17
|
+
"""
|
|
18
|
+
if source_dir is None:
|
|
19
|
+
raise ValueError("extract_obj_module: source_dir cannot be None - specify root-dir")
|
|
20
|
+
# Get the module containing the object
|
|
21
|
+
entity_module = inspect.getmodule(obj)
|
|
22
|
+
if entity_module is None:
|
|
23
|
+
obj_name = getattr(obj, "__name__", str(obj))
|
|
24
|
+
raise ValueError(f"Object {obj_name} has no module.")
|
|
25
|
+
|
|
26
|
+
fp = entity_module.__file__
|
|
27
|
+
if fp is None:
|
|
28
|
+
obj_name = getattr(obj, "__name__", str(obj))
|
|
29
|
+
raise ValueError(f"Object {obj_name} has no module.")
|
|
30
|
+
|
|
31
|
+
file_path = pathlib.Path(fp)
|
|
32
|
+
try:
|
|
33
|
+
# Get the relative path to the current directory
|
|
34
|
+
# Will raise ValueError if the file is not in the source directory
|
|
35
|
+
relative_path = file_path.relative_to(str(pathlib.Path(source_dir).absolute()))
|
|
36
|
+
|
|
37
|
+
if relative_path == pathlib.Path("_internal/resolvers"):
|
|
38
|
+
entity_module_name = entity_module.__name__
|
|
39
|
+
else:
|
|
40
|
+
# Replace file separators with dots and remove the '.py' extension
|
|
41
|
+
dotted_path = os.path.splitext(str(relative_path))[0].replace(os.sep, ".")
|
|
42
|
+
entity_module_name = dotted_path
|
|
43
|
+
except ValueError:
|
|
44
|
+
# If source_dir is not provided or file is not in source_dir, fallback to module name
|
|
45
|
+
# File is not relative to source_dir - check if it's an installed package
|
|
46
|
+
file_path_str = str(file_path)
|
|
47
|
+
if "site-packages" in file_path_str or "dist-packages" in file_path_str:
|
|
48
|
+
# It's an installed package - use the module's __name__ directly
|
|
49
|
+
# This will be importable via importlib.import_module()
|
|
50
|
+
entity_module_name = entity_module.__name__
|
|
51
|
+
else:
|
|
52
|
+
# File is not in source_dir and not in site-packages - re-raise the error
|
|
53
|
+
obj_name = getattr(obj, "__name__", str(obj))
|
|
54
|
+
raise ValueError(
|
|
55
|
+
f"Object {obj_name} module file {file_path} is not relative to "
|
|
56
|
+
f"source directory {source_dir} and is not an installed package."
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
if entity_module_name == "__main__":
|
|
60
|
+
"""
|
|
61
|
+
This case is for the case in which the object is run from the main module.
|
|
62
|
+
"""
|
|
63
|
+
fp = sys.modules["__main__"].__file__
|
|
64
|
+
if fp is None:
|
|
65
|
+
obj_name = getattr(obj, "__name__", str(obj))
|
|
66
|
+
raise ValueError(f"Object {obj_name} has no module.")
|
|
67
|
+
main_path = pathlib.Path(fp)
|
|
68
|
+
entity_module_name = main_path.stem
|
|
69
|
+
|
|
70
|
+
return entity_module_name
|
flyte/_pod.py
CHANGED
|
@@ -2,7 +2,7 @@ from dataclasses import dataclass, field
|
|
|
2
2
|
from typing import TYPE_CHECKING, Dict, Optional
|
|
3
3
|
|
|
4
4
|
if TYPE_CHECKING:
|
|
5
|
-
from
|
|
5
|
+
from flyteidl2.core.tasks_pb2 import K8sPod
|
|
6
6
|
from kubernetes.client import V1PodSpec
|
|
7
7
|
|
|
8
8
|
|
|
@@ -20,7 +20,7 @@ class PodTemplate(object):
|
|
|
20
20
|
annotations: Optional[Dict[str, str]] = None
|
|
21
21
|
|
|
22
22
|
def to_k8s_pod(self) -> "K8sPod":
|
|
23
|
-
from
|
|
23
|
+
from flyteidl2.core.tasks_pb2 import K8sObjectMetadata, K8sPod
|
|
24
24
|
from kubernetes.client import ApiClient
|
|
25
25
|
|
|
26
26
|
return K8sPod(
|