metaflow 2.13.9__py2.py3-none-any.whl → 2.14.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow/cli.py +6 -6
- metaflow/cli_args.py +2 -2
- metaflow/cmd/develop/stub_generator.py +8 -2
- metaflow/datastore/task_datastore.py +11 -32
- metaflow/flowspec.py +11 -5
- metaflow/metaflow_environment.py +3 -1
- metaflow/plugins/argo/argo_workflows.py +108 -0
- metaflow/plugins/argo/argo_workflows_cli.py +47 -7
- metaflow/plugins/metadata_providers/service.py +31 -18
- metaflow/plugins/project_decorator.py +18 -0
- metaflow/runner/click_api.py +10 -5
- metaflow/user_configs/config_decorators.py +19 -14
- metaflow/user_configs/config_options.py +1 -1
- metaflow/util.py +2 -2
- metaflow/version.py +1 -1
- {metaflow-2.13.9.dist-info → metaflow-2.14.0.dist-info}/METADATA +2 -2
- {metaflow-2.13.9.dist-info → metaflow-2.14.0.dist-info}/RECORD +21 -21
- {metaflow-2.13.9.dist-info → metaflow-2.14.0.dist-info}/LICENSE +0 -0
- {metaflow-2.13.9.dist-info → metaflow-2.14.0.dist-info}/WHEEL +0 -0
- {metaflow-2.13.9.dist-info → metaflow-2.14.0.dist-info}/entry_points.txt +0 -0
- {metaflow-2.13.9.dist-info → metaflow-2.14.0.dist-info}/top_level.txt +0 -0
metaflow/cli.py
CHANGED
@@ -206,15 +206,15 @@ def output_raw(obj, json):
|
|
206
206
|
else:
|
207
207
|
_graph = str(obj.graph)
|
208
208
|
_msg = "Internal representation of the flow:"
|
209
|
-
|
209
|
+
echo_always(_msg, fg="magenta", bold=False)
|
210
210
|
echo_always(_graph, err=False)
|
211
211
|
|
212
212
|
|
213
213
|
@cli.command(help="Visualize the flow with Graphviz.")
|
214
214
|
@click.pass_obj
|
215
215
|
def output_dot(obj):
|
216
|
-
|
217
|
-
|
216
|
+
echo_always("Visualizing the flow as a GraphViz graph", fg="magenta", bold=False)
|
217
|
+
echo_always(
|
218
218
|
"Try piping the output to 'dot -Tpng -o graph.png' to produce "
|
219
219
|
"an actual image.",
|
220
220
|
indent=True,
|
@@ -330,7 +330,7 @@ def start(
|
|
330
330
|
event_logger=None,
|
331
331
|
monitor=None,
|
332
332
|
local_config_file=None,
|
333
|
-
|
333
|
+
config=None,
|
334
334
|
config_value=None,
|
335
335
|
**deco_options
|
336
336
|
):
|
@@ -383,7 +383,7 @@ def start(
|
|
383
383
|
# When we process the options, the first one processed will return None and the
|
384
384
|
# second one processed will return the actual options. The order of processing
|
385
385
|
# depends on what (and in what order) the user specifies on the command line.
|
386
|
-
config_options =
|
386
|
+
config_options = config or config_value
|
387
387
|
|
388
388
|
if (
|
389
389
|
hasattr(ctx, "saved_args")
|
@@ -396,7 +396,7 @@ def start(
|
|
396
396
|
# if we need to in the first place
|
397
397
|
if getattr(ctx.obj, "has_cl_config_options", False):
|
398
398
|
raise click.UsageError(
|
399
|
-
"Cannot specify --config
|
399
|
+
"Cannot specify --config or --config-value with 'resume'"
|
400
400
|
)
|
401
401
|
# We now load the config artifacts from the original run id
|
402
402
|
run_id = None
|
metaflow/cli_args.py
CHANGED
@@ -72,10 +72,10 @@ class CLIArgs(object):
|
|
72
72
|
# keyword in Python, so we call it 'decospecs' in click args
|
73
73
|
if k == "decospecs":
|
74
74
|
k = "with"
|
75
|
-
if k in ("
|
75
|
+
if k in ("config", "config_value"):
|
76
76
|
# Special handling here since we gather them all in one option but actually
|
77
77
|
# need to send them one at a time using --config-value <name> kv.<name>.
|
78
|
-
# Note it can be either
|
78
|
+
# Note it can be either config or config_value depending
|
79
79
|
# on click processing order.
|
80
80
|
for config_name in v.keys():
|
81
81
|
yield "--config-value"
|
@@ -1133,8 +1133,14 @@ class StubGenerator:
|
|
1133
1133
|
result = result[1:]
|
1134
1134
|
# Add doc to first and last overloads. Jedi uses the last one and pycharm
|
1135
1135
|
# the first one. Go figure.
|
1136
|
-
result[0] = (
|
1137
|
-
|
1136
|
+
result[0] = (
|
1137
|
+
result[0][0],
|
1138
|
+
docs["func_doc"] + "\nParameters\n----------\n" + docs["param_doc"],
|
1139
|
+
)
|
1140
|
+
result[-1] = (
|
1141
|
+
result[-1][0],
|
1142
|
+
docs["func_doc"] + "\nParameters\n----------\n" + docs["param_doc"],
|
1143
|
+
)
|
1138
1144
|
return result
|
1139
1145
|
|
1140
1146
|
def _generate_function_stub(
|
@@ -253,7 +253,7 @@ class TaskDataStore(object):
|
|
253
253
|
|
254
254
|
@only_if_not_done
|
255
255
|
@require_mode("w")
|
256
|
-
def save_artifacts(self, artifacts_iter,
|
256
|
+
def save_artifacts(self, artifacts_iter, len_hint=0):
|
257
257
|
"""
|
258
258
|
Saves Metaflow Artifacts (Python objects) to the datastore and stores
|
259
259
|
any relevant metadata needed to retrieve them.
|
@@ -269,11 +269,6 @@ class TaskDataStore(object):
|
|
269
269
|
artifacts : Iterator[(string, object)]
|
270
270
|
Iterator over the human-readable name of the object to save
|
271
271
|
and the object itself
|
272
|
-
force_v4 : boolean or Dict[string -> boolean]
|
273
|
-
Indicates whether the artifact should be pickled using the v4
|
274
|
-
version of pickle. If a single boolean, applies to all artifacts.
|
275
|
-
If a dictionary, applies to the object named only. Defaults to False
|
276
|
-
if not present or not specified
|
277
272
|
len_hint: integer
|
278
273
|
Estimated number of items in artifacts_iter
|
279
274
|
"""
|
@@ -281,40 +276,24 @@ class TaskDataStore(object):
|
|
281
276
|
|
282
277
|
def pickle_iter():
|
283
278
|
for name, obj in artifacts_iter:
|
284
|
-
|
285
|
-
|
286
|
-
if isinstance(force_v4, bool)
|
287
|
-
else force_v4.get(name, False)
|
288
|
-
)
|
289
|
-
if do_v4:
|
290
|
-
encode_type = "gzip+pickle-v4"
|
291
|
-
if encode_type not in self._encodings:
|
292
|
-
raise DataException(
|
293
|
-
"Artifact *%s* requires a serialization encoding that "
|
294
|
-
"requires Python 3.4 or newer." % name
|
295
|
-
)
|
279
|
+
encode_type = "gzip+pickle-v4"
|
280
|
+
if encode_type in self._encodings:
|
296
281
|
try:
|
297
282
|
blob = pickle.dumps(obj, protocol=4)
|
298
283
|
except TypeError as e:
|
299
|
-
raise UnpicklableArtifactException(name)
|
284
|
+
raise UnpicklableArtifactException(name) from e
|
300
285
|
else:
|
301
286
|
try:
|
302
287
|
blob = pickle.dumps(obj, protocol=2)
|
303
288
|
encode_type = "gzip+pickle-v2"
|
304
|
-
except (SystemError, OverflowError):
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
"serialize large objects." % name
|
311
|
-
)
|
312
|
-
try:
|
313
|
-
blob = pickle.dumps(obj, protocol=4)
|
314
|
-
except TypeError as e:
|
315
|
-
raise UnpicklableArtifactException(name)
|
289
|
+
except (SystemError, OverflowError) as e:
|
290
|
+
raise DataException(
|
291
|
+
"Artifact *%s* is very large (over 2GB). "
|
292
|
+
"You need to use Python 3.4 or newer if you want to "
|
293
|
+
"serialize large objects." % name
|
294
|
+
) from e
|
316
295
|
except TypeError as e:
|
317
|
-
raise UnpicklableArtifactException(name)
|
296
|
+
raise UnpicklableArtifactException(name) from e
|
318
297
|
|
319
298
|
self._info[name] = {
|
320
299
|
"size": len(blob),
|
metaflow/flowspec.py
CHANGED
@@ -223,17 +223,18 @@ class FlowSpec(metaclass=FlowSpecMeta):
|
|
223
223
|
seen.add(norm)
|
224
224
|
|
225
225
|
@classmethod
|
226
|
-
def _process_config_decorators(cls, config_options,
|
226
|
+
def _process_config_decorators(cls, config_options, process_configs=True):
|
227
227
|
|
228
228
|
# Fast path for no user configurations
|
229
|
-
if not
|
230
|
-
|
229
|
+
if not process_configs or (
|
230
|
+
not cls._flow_state.get(_FlowState.CONFIG_DECORATORS)
|
231
|
+
and all(len(step.config_decorators) == 0 for step in cls._steps)
|
231
232
|
):
|
232
233
|
# Process parameters to allow them to also use config values easily
|
233
234
|
for var, param in cls._get_parameters():
|
234
235
|
if param.IS_CONFIG_PARAMETER:
|
235
236
|
continue
|
236
|
-
param.init(
|
237
|
+
param.init(not process_configs)
|
237
238
|
return None
|
238
239
|
|
239
240
|
debug.userconf_exec("Processing mutating step/flow decorators")
|
@@ -258,6 +259,11 @@ class FlowSpec(metaclass=FlowSpecMeta):
|
|
258
259
|
debug.userconf_exec("Setting config %s to %s" % (var, str(val)))
|
259
260
|
setattr(cls, var, val)
|
260
261
|
|
262
|
+
# Reset cached parameters since we have replaced configs already with ConfigValue
|
263
|
+
# so they are not parameters anymore to be re-evaluated when we do _get_parameters
|
264
|
+
if _FlowState.CACHED_PARAMETERS in cls._flow_state:
|
265
|
+
del cls._flow_state[_FlowState.CACHED_PARAMETERS]
|
266
|
+
|
261
267
|
# Run all the decorators. Step decorators are directly in the step and
|
262
268
|
# we will run those first and *then* we run all the flow level decorators
|
263
269
|
for step in cls._steps:
|
@@ -277,7 +283,7 @@ class FlowSpec(metaclass=FlowSpecMeta):
|
|
277
283
|
setattr(cls, step.name, step)
|
278
284
|
|
279
285
|
mutable_flow = MutableFlow(cls)
|
280
|
-
for deco in cls._flow_state
|
286
|
+
for deco in cls._flow_state.get(_FlowState.CONFIG_DECORATORS, []):
|
281
287
|
if isinstance(deco, CustomFlowDecorator):
|
282
288
|
# Sanity check to make sure we are applying the decorator to the right
|
283
289
|
# class
|
metaflow/metaflow_environment.py
CHANGED
@@ -125,7 +125,9 @@ class MetaflowEnvironment(object):
|
|
125
125
|
)
|
126
126
|
|
127
127
|
def _get_install_dependencies_cmd(self, datastore_type):
|
128
|
-
base_cmd = "{} -m pip install -qqq".format(
|
128
|
+
base_cmd = "{} -m pip install -qqq --no-compile --no-cache-dir --disable-pip-version-check".format(
|
129
|
+
self._python()
|
130
|
+
)
|
129
131
|
|
130
132
|
datastore_packages = {
|
131
133
|
"s3": ["boto3"],
|
@@ -111,6 +111,9 @@ class ArgoWorkflows(object):
|
|
111
111
|
notify_on_success=False,
|
112
112
|
notify_slack_webhook_url=None,
|
113
113
|
notify_pager_duty_integration_key=None,
|
114
|
+
notify_incident_io_api_key=None,
|
115
|
+
incident_io_success_severity_id=None,
|
116
|
+
incident_io_error_severity_id=None,
|
114
117
|
enable_heartbeat_daemon=True,
|
115
118
|
enable_error_msg_capture=False,
|
116
119
|
):
|
@@ -160,6 +163,9 @@ class ArgoWorkflows(object):
|
|
160
163
|
self.notify_on_success = notify_on_success
|
161
164
|
self.notify_slack_webhook_url = notify_slack_webhook_url
|
162
165
|
self.notify_pager_duty_integration_key = notify_pager_duty_integration_key
|
166
|
+
self.notify_incident_io_api_key = notify_incident_io_api_key
|
167
|
+
self.incident_io_success_severity_id = incident_io_success_severity_id
|
168
|
+
self.incident_io_error_severity_id = incident_io_error_severity_id
|
163
169
|
self.enable_heartbeat_daemon = enable_heartbeat_daemon
|
164
170
|
self.enable_error_msg_capture = enable_error_msg_capture
|
165
171
|
self.parameters = self._process_parameters()
|
@@ -891,6 +897,17 @@ class ArgoWorkflows(object):
|
|
891
897
|
and self.notify_pager_duty_integration_key
|
892
898
|
else {}
|
893
899
|
),
|
900
|
+
**(
|
901
|
+
{
|
902
|
+
# workflow status maps to Completed
|
903
|
+
"notify-incident-io-on-success": LifecycleHook()
|
904
|
+
.expression("workflow.status == 'Succeeded'")
|
905
|
+
.template("notify-incident-io-on-success"),
|
906
|
+
}
|
907
|
+
if self.notify_on_success
|
908
|
+
and self.notify_incident_io_api_key
|
909
|
+
else {}
|
910
|
+
),
|
894
911
|
**(
|
895
912
|
{
|
896
913
|
# workflow status maps to Failed or Error
|
@@ -918,6 +935,19 @@ class ArgoWorkflows(object):
|
|
918
935
|
and self.notify_pager_duty_integration_key
|
919
936
|
else {}
|
920
937
|
),
|
938
|
+
**(
|
939
|
+
{
|
940
|
+
# workflow status maps to Failed or Error
|
941
|
+
"notify-incident-io-on-failure": LifecycleHook()
|
942
|
+
.expression("workflow.status == 'Failed'")
|
943
|
+
.template("notify-incident-io-on-error"),
|
944
|
+
"notify-incident-io-on-error": LifecycleHook()
|
945
|
+
.expression("workflow.status == 'Error'")
|
946
|
+
.template("notify-incident-io-on-error"),
|
947
|
+
}
|
948
|
+
if self.notify_on_error and self.notify_incident_io_api_key
|
949
|
+
else {}
|
950
|
+
),
|
921
951
|
# Warning: terrible hack to workaround a bug in Argo Workflow
|
922
952
|
# where the hooks listed above do not execute unless
|
923
953
|
# there is an explicit exit hook. as and when this
|
@@ -2270,9 +2300,11 @@ class ArgoWorkflows(object):
|
|
2270
2300
|
if self.notify_on_error:
|
2271
2301
|
templates.append(self._slack_error_template())
|
2272
2302
|
templates.append(self._pager_duty_alert_template())
|
2303
|
+
templates.append(self._incident_io_alert_template())
|
2273
2304
|
if self.notify_on_success:
|
2274
2305
|
templates.append(self._slack_success_template())
|
2275
2306
|
templates.append(self._pager_duty_change_template())
|
2307
|
+
templates.append(self._incident_io_change_template())
|
2276
2308
|
if self.notify_on_error or self.notify_on_success:
|
2277
2309
|
# Warning: terrible hack to workaround a bug in Argo Workflow where the
|
2278
2310
|
# templates listed above do not execute unless there is an
|
@@ -2466,6 +2498,82 @@ class ArgoWorkflows(object):
|
|
2466
2498
|
)
|
2467
2499
|
)
|
2468
2500
|
|
2501
|
+
def _incident_io_alert_template(self):
|
2502
|
+
if self.notify_incident_io_api_key is None:
|
2503
|
+
return None
|
2504
|
+
if self.incident_io_error_severity_id is None:
|
2505
|
+
raise MetaflowException(
|
2506
|
+
"Creating incidents for errors requires a severity id."
|
2507
|
+
)
|
2508
|
+
return Template("notify-incident-io-on-error").http(
|
2509
|
+
Http("POST")
|
2510
|
+
.url("https://api.incident.io/v2/incidents")
|
2511
|
+
.header("Content-Type", "application/json")
|
2512
|
+
.header("Authorization", "Bearer %s" % self.notify_incident_io_api_key)
|
2513
|
+
.body(
|
2514
|
+
json.dumps(
|
2515
|
+
{
|
2516
|
+
"idempotency_key": "argo-{{workflow.name}}", # use run id to deduplicate alerts.
|
2517
|
+
"visibility": "public",
|
2518
|
+
"severity_id": self.incident_io_error_severity_id,
|
2519
|
+
"name": "Flow %s has failed." % self.flow.name,
|
2520
|
+
"summary": "Metaflow run %s/argo-{{workflow.name}} failed! %s"
|
2521
|
+
% (self.flow.name, self._incident_io_ui_urls_for_run()),
|
2522
|
+
# TODO: Add support for custom field entries.
|
2523
|
+
}
|
2524
|
+
)
|
2525
|
+
)
|
2526
|
+
)
|
2527
|
+
|
2528
|
+
def _incident_io_change_template(self):
|
2529
|
+
if self.notify_incident_io_api_key is None:
|
2530
|
+
return None
|
2531
|
+
if self.incident_io_success_severity_id is None:
|
2532
|
+
raise MetaflowException(
|
2533
|
+
"Creating incidents for successes requires a severity id."
|
2534
|
+
)
|
2535
|
+
return Template("notify-incident-io-on-success").http(
|
2536
|
+
Http("POST")
|
2537
|
+
.url("https://api.incident.io/v2/incidents")
|
2538
|
+
.header("Content-Type", "application/json")
|
2539
|
+
.header("Authorization", "Bearer %s" % self.notify_incident_io_api_key)
|
2540
|
+
.body(
|
2541
|
+
json.dumps(
|
2542
|
+
{
|
2543
|
+
"idempotency_key": "argo-{{workflow.name}}", # use run id to deduplicate alerts.
|
2544
|
+
"visibility": "public",
|
2545
|
+
"severity_id": self.incident_io_success_severity_id,
|
2546
|
+
# TODO: Do we need to make incident type configurable for successes? otherwise they are created as 'investigating'
|
2547
|
+
# "incident_type_id": ""
|
2548
|
+
"name": "Flow %s has succeeded." % self.flow.name,
|
2549
|
+
"summary": "Metaflow run %s/argo-{{workflow.name}} succeeded!%s"
|
2550
|
+
% (self.flow.name, self._incident_io_ui_urls_for_run()),
|
2551
|
+
# TODO: Add support for custom field entries.
|
2552
|
+
}
|
2553
|
+
)
|
2554
|
+
)
|
2555
|
+
)
|
2556
|
+
|
2557
|
+
def _incident_io_ui_urls_for_run(self):
|
2558
|
+
links = []
|
2559
|
+
if UI_URL:
|
2560
|
+
url = "[Metaflow UI](%s/%s/%s)" % (
|
2561
|
+
UI_URL.rstrip("/"),
|
2562
|
+
self.flow.name,
|
2563
|
+
"argo-{{workflow.name}}",
|
2564
|
+
)
|
2565
|
+
links.append(url)
|
2566
|
+
if ARGO_WORKFLOWS_UI_URL:
|
2567
|
+
url = "[Argo UI](%s/workflows/%s/%s)" % (
|
2568
|
+
ARGO_WORKFLOWS_UI_URL.rstrip("/"),
|
2569
|
+
"{{workflow.namespace}}",
|
2570
|
+
"{{workflow.name}}",
|
2571
|
+
)
|
2572
|
+
links.append(url)
|
2573
|
+
if links:
|
2574
|
+
links = ["See details for the run at: ", *links]
|
2575
|
+
return "\n\n".join(links)
|
2576
|
+
|
2469
2577
|
def _pager_duty_change_template(self):
|
2470
2578
|
# https://developer.pagerduty.com/docs/ZG9jOjExMDI5NTgy-send-a-change-event
|
2471
2579
|
if self.notify_pager_duty_integration_key is None:
|
@@ -176,6 +176,21 @@ def argo_workflows(obj, name=None):
|
|
176
176
|
default="",
|
177
177
|
help="PagerDuty Events API V2 Integration key for workflow success/failure notifications.",
|
178
178
|
)
|
179
|
+
@click.option(
|
180
|
+
"--notify-incident-io-api-key",
|
181
|
+
default=None,
|
182
|
+
help="Incident.io API V2 key for workflow success/failure notifications.",
|
183
|
+
)
|
184
|
+
@click.option(
|
185
|
+
"--incident-io-success-severity-id",
|
186
|
+
default=None,
|
187
|
+
help="Incident.io severity id for success alerts.",
|
188
|
+
)
|
189
|
+
@click.option(
|
190
|
+
"--incident-io-error-severity-id",
|
191
|
+
default=None,
|
192
|
+
help="Incident.io severity id for error alerts.",
|
193
|
+
)
|
179
194
|
@click.option(
|
180
195
|
"--enable-heartbeat-daemon/--no-enable-heartbeat-daemon",
|
181
196
|
default=False,
|
@@ -213,6 +228,9 @@ def create(
|
|
213
228
|
notify_on_success=False,
|
214
229
|
notify_slack_webhook_url=None,
|
215
230
|
notify_pager_duty_integration_key=None,
|
231
|
+
notify_incident_io_api_key=None,
|
232
|
+
incident_io_success_severity_id=None,
|
233
|
+
incident_io_error_severity_id=None,
|
216
234
|
enable_heartbeat_daemon=True,
|
217
235
|
deployer_attribute_file=None,
|
218
236
|
enable_error_msg_capture=False,
|
@@ -268,6 +286,9 @@ def create(
|
|
268
286
|
notify_on_success,
|
269
287
|
notify_slack_webhook_url,
|
270
288
|
notify_pager_duty_integration_key,
|
289
|
+
notify_incident_io_api_key,
|
290
|
+
incident_io_success_severity_id,
|
291
|
+
incident_io_error_severity_id,
|
271
292
|
enable_heartbeat_daemon,
|
272
293
|
enable_error_msg_capture,
|
273
294
|
)
|
@@ -442,6 +463,9 @@ def make_flow(
|
|
442
463
|
notify_on_success,
|
443
464
|
notify_slack_webhook_url,
|
444
465
|
notify_pager_duty_integration_key,
|
466
|
+
notify_incident_io_api_key,
|
467
|
+
incident_io_success_severity_id,
|
468
|
+
incident_io_error_severity_id,
|
445
469
|
enable_heartbeat_daemon,
|
446
470
|
enable_error_msg_capture,
|
447
471
|
):
|
@@ -453,17 +477,30 @@ def make_flow(
|
|
453
477
|
)
|
454
478
|
|
455
479
|
if (notify_on_error or notify_on_success) and not (
|
456
|
-
notify_slack_webhook_url
|
480
|
+
notify_slack_webhook_url
|
481
|
+
or notify_pager_duty_integration_key
|
482
|
+
or notify_incident_io_api_key
|
457
483
|
):
|
458
484
|
raise MetaflowException(
|
459
|
-
"Notifications require specifying an incoming Slack webhook url via --notify-slack-webhook-url or
|
460
|
-
"
|
461
|
-
"notifications for your Slack workspace, follow the instructions at "
|
462
|
-
"https://api.slack.com/messaging/webhooks to generate a webhook url.\n
|
463
|
-
"generate an integration key by following the instructions at "
|
464
|
-
"https://support.pagerduty.com/docs/services-and-integrations#create-a-generic-events-api-integration"
|
485
|
+
"Notifications require specifying an incoming Slack webhook url via --notify-slack-webhook-url, PagerDuty events v2 integration key via --notify-pager-duty-integration-key or\n"
|
486
|
+
"Incident.io integration API key via --notify-incident-io-api-key.\n"
|
487
|
+
" If you would like to set up notifications for your Slack workspace, follow the instructions at "
|
488
|
+
"https://api.slack.com/messaging/webhooks to generate a webhook url.\n"
|
489
|
+
" For notifications through PagerDuty, generate an integration key by following the instructions at "
|
490
|
+
"https://support.pagerduty.com/docs/services-and-integrations#create-a-generic-events-api-integration\n"
|
491
|
+
" For notifications through Incident.io, generate an API key with a permission to create incidents."
|
465
492
|
)
|
466
493
|
|
494
|
+
if notify_incident_io_api_key:
|
495
|
+
if notify_on_error and incident_io_error_severity_id is None:
|
496
|
+
raise MetaflowException(
|
497
|
+
"Incident.io error notifications require a severity id. Please set one with --incident-io-error-severity-id"
|
498
|
+
)
|
499
|
+
|
500
|
+
if notify_on_success and incident_io_success_severity_id is None:
|
501
|
+
raise MetaflowException(
|
502
|
+
"Incident.io success notifications require a severity id. Please set one with --incident-io-success-severity-id"
|
503
|
+
)
|
467
504
|
# Attach @kubernetes and @environment decorator to the flow to
|
468
505
|
# ensure that the related decorator hooks are invoked.
|
469
506
|
decorators._attach_decorators(
|
@@ -507,6 +544,9 @@ def make_flow(
|
|
507
544
|
notify_on_success=notify_on_success,
|
508
545
|
notify_slack_webhook_url=notify_slack_webhook_url,
|
509
546
|
notify_pager_duty_integration_key=notify_pager_duty_integration_key,
|
547
|
+
notify_incident_io_api_key=notify_incident_io_api_key,
|
548
|
+
incident_io_success_severity_id=incident_io_success_severity_id,
|
549
|
+
incident_io_error_severity_id=incident_io_error_severity_id,
|
510
550
|
enable_heartbeat_daemon=enable_heartbeat_daemon,
|
511
551
|
enable_error_msg_capture=enable_error_msg_capture,
|
512
552
|
)
|
@@ -1,23 +1,18 @@
|
|
1
1
|
import os
|
2
2
|
import random
|
3
|
+
import time
|
3
4
|
|
4
5
|
import requests
|
5
|
-
import time
|
6
6
|
|
7
7
|
from metaflow.exception import (
|
8
8
|
MetaflowException,
|
9
|
-
MetaflowTaggingError,
|
10
9
|
MetaflowInternalError,
|
11
|
-
|
12
|
-
from metaflow.metaflow_config import (
|
13
|
-
SERVICE_RETRY_COUNT,
|
14
|
-
SERVICE_HEADERS,
|
15
|
-
SERVICE_URL,
|
10
|
+
MetaflowTaggingError,
|
16
11
|
)
|
17
12
|
from metaflow.metadata_provider import MetadataProvider
|
18
13
|
from metaflow.metadata_provider.heartbeat import HB_URL_KEY
|
14
|
+
from metaflow.metaflow_config import SERVICE_HEADERS, SERVICE_RETRY_COUNT, SERVICE_URL
|
19
15
|
from metaflow.sidecar import Message, MessageTypes, Sidecar
|
20
|
-
|
21
16
|
from metaflow.util import version_parse
|
22
17
|
|
23
18
|
|
@@ -39,6 +34,23 @@ class ServiceException(MetaflowException):
|
|
39
34
|
class ServiceMetadataProvider(MetadataProvider):
|
40
35
|
TYPE = "service"
|
41
36
|
|
37
|
+
_session = requests.Session()
|
38
|
+
_session.mount(
|
39
|
+
"http://",
|
40
|
+
requests.adapters.HTTPAdapter(
|
41
|
+
pool_connections=20,
|
42
|
+
pool_maxsize=20,
|
43
|
+
max_retries=0, # Handle retries explicitly
|
44
|
+
pool_block=False,
|
45
|
+
),
|
46
|
+
)
|
47
|
+
_session.mount(
|
48
|
+
"https://",
|
49
|
+
requests.adapters.HTTPAdapter(
|
50
|
+
pool_connections=20, pool_maxsize=20, max_retries=0, pool_block=False
|
51
|
+
),
|
52
|
+
)
|
53
|
+
|
42
54
|
_supports_attempt_gets = None
|
43
55
|
_supports_tag_mutation = None
|
44
56
|
|
@@ -59,7 +71,9 @@ class ServiceMetadataProvider(MetadataProvider):
|
|
59
71
|
def compute_info(cls, val):
|
60
72
|
v = val.rstrip("/")
|
61
73
|
try:
|
62
|
-
resp =
|
74
|
+
resp = cls._session.get(
|
75
|
+
os.path.join(v, "ping"), headers=SERVICE_HEADERS.copy()
|
76
|
+
)
|
63
77
|
resp.raise_for_status()
|
64
78
|
except: # noqa E722
|
65
79
|
raise ValueError("Metaflow service [%s] unreachable." % v)
|
@@ -412,27 +426,27 @@ class ServiceMetadataProvider(MetadataProvider):
|
|
412
426
|
if method == "GET":
|
413
427
|
if monitor:
|
414
428
|
with monitor.measure("metaflow.service_metadata.get"):
|
415
|
-
resp =
|
429
|
+
resp = cls._session.get(url, headers=SERVICE_HEADERS.copy())
|
416
430
|
else:
|
417
|
-
resp =
|
431
|
+
resp = cls._session.get(url, headers=SERVICE_HEADERS.copy())
|
418
432
|
elif method == "POST":
|
419
433
|
if monitor:
|
420
434
|
with monitor.measure("metaflow.service_metadata.post"):
|
421
|
-
resp =
|
435
|
+
resp = cls._session.post(
|
422
436
|
url, headers=SERVICE_HEADERS.copy(), json=data
|
423
437
|
)
|
424
438
|
else:
|
425
|
-
resp =
|
439
|
+
resp = cls._session.post(
|
426
440
|
url, headers=SERVICE_HEADERS.copy(), json=data
|
427
441
|
)
|
428
442
|
elif method == "PATCH":
|
429
443
|
if monitor:
|
430
444
|
with monitor.measure("metaflow.service_metadata.patch"):
|
431
|
-
resp =
|
445
|
+
resp = cls._session.patch(
|
432
446
|
url, headers=SERVICE_HEADERS.copy(), json=data
|
433
447
|
)
|
434
448
|
else:
|
435
|
-
resp =
|
449
|
+
resp = cls._session.patch(
|
436
450
|
url, headers=SERVICE_HEADERS.copy(), json=data
|
437
451
|
)
|
438
452
|
else:
|
@@ -475,7 +489,6 @@ class ServiceMetadataProvider(MetadataProvider):
|
|
475
489
|
resp.text,
|
476
490
|
)
|
477
491
|
time.sleep(2**i)
|
478
|
-
|
479
492
|
if resp:
|
480
493
|
raise ServiceException(
|
481
494
|
"Metadata request (%s) failed (code %s): %s"
|
@@ -499,9 +512,9 @@ class ServiceMetadataProvider(MetadataProvider):
|
|
499
512
|
try:
|
500
513
|
if monitor:
|
501
514
|
with monitor.measure("metaflow.service_metadata.get"):
|
502
|
-
resp =
|
515
|
+
resp = cls._session.get(url, headers=SERVICE_HEADERS.copy())
|
503
516
|
else:
|
504
|
-
resp =
|
517
|
+
resp = cls._session.get(url, headers=SERVICE_HEADERS.copy())
|
505
518
|
except:
|
506
519
|
if monitor:
|
507
520
|
with monitor.count("metaflow.service_metadata.failed_request"):
|
@@ -26,6 +26,24 @@ class ProjectDecorator(FlowDecorator):
|
|
26
26
|
projects that use the same production scheduler. The name may
|
27
27
|
contain only lowercase alphanumeric characters and underscores.
|
28
28
|
|
29
|
+
branch : Optional[str], default None
|
30
|
+
The branch to use. If not specified, the branch is set to
|
31
|
+
`user.<username>` unless `production` is set to `True`. This can
|
32
|
+
also be set on the command line using `--branch` as a top-level option.
|
33
|
+
It is an error to specify `branch` in the decorator and on the command line.
|
34
|
+
|
35
|
+
production : bool, default False
|
36
|
+
Whether or not the branch is the production branch. This can also be set on the
|
37
|
+
command line using `--production` as a top-level option. It is an error to specify
|
38
|
+
`production` in the decorator and on the command line.
|
39
|
+
The project branch name will be:
|
40
|
+
- if `branch` is specified:
|
41
|
+
- if `production` is True: `prod.<branch>`
|
42
|
+
- if `production` is False: `test.<branch>`
|
43
|
+
- if `branch` is not specified:
|
44
|
+
- if `production` is True: `prod`
|
45
|
+
- if `production` is False: `user.<username>`
|
46
|
+
|
29
47
|
MF Add To Current
|
30
48
|
-----------------
|
31
49
|
project_name -> str
|
metaflow/runner/click_api.py
CHANGED
@@ -41,6 +41,7 @@ from metaflow.exception import MetaflowException
|
|
41
41
|
from metaflow.includefile import FilePathClass
|
42
42
|
from metaflow.metaflow_config import CLICK_API_PROCESS_CONFIG
|
43
43
|
from metaflow.parameters import JSONTypeClass, flow_context
|
44
|
+
from metaflow.user_configs.config_decorators import CustomFlowDecorator
|
44
45
|
from metaflow.user_configs.config_options import (
|
45
46
|
ConfigValue,
|
46
47
|
ConvertDictOrStr,
|
@@ -252,10 +253,14 @@ def extract_flow_class_from_file(flow_file: str) -> FlowSpec:
|
|
252
253
|
# Cache the loaded module
|
253
254
|
loaded_modules[flow_file] = module
|
254
255
|
|
255
|
-
classes = inspect.getmembers(
|
256
|
+
classes = inspect.getmembers(
|
257
|
+
module, lambda x: inspect.isclass(x) or isinstance(x, CustomFlowDecorator)
|
258
|
+
)
|
256
259
|
flow_cls = None
|
257
260
|
|
258
261
|
for _, kls in classes:
|
262
|
+
if isinstance(kls, CustomFlowDecorator):
|
263
|
+
kls = kls._flow_cls
|
259
264
|
if (
|
260
265
|
kls is not FlowSpec
|
261
266
|
and kls.__module__ == module_name
|
@@ -444,10 +449,10 @@ class MetaflowAPI(object):
|
|
444
449
|
ds = opts.get("datastore", defaults["datastore"])
|
445
450
|
quiet = opts.get("quiet", defaults["quiet"])
|
446
451
|
is_default = False
|
447
|
-
config_file = opts.get("config
|
452
|
+
config_file = opts.get("config")
|
448
453
|
if config_file is None:
|
449
454
|
is_default = True
|
450
|
-
config_file = defaults.get("
|
455
|
+
config_file = defaults.get("config")
|
451
456
|
|
452
457
|
if config_file:
|
453
458
|
config_file = map(
|
@@ -480,7 +485,7 @@ class MetaflowAPI(object):
|
|
480
485
|
# Process both configurations; the second one will return all the merged
|
481
486
|
# configuration options properly processed.
|
482
487
|
self._config_input.process_configs(
|
483
|
-
self._flow_cls.__name__, "
|
488
|
+
self._flow_cls.__name__, "config", config_file, quiet, ds
|
484
489
|
)
|
485
490
|
config_options = self._config_input.process_configs(
|
486
491
|
self._flow_cls.__name__, "config_value", config_value, quiet, ds
|
@@ -493,7 +498,7 @@ class MetaflowAPI(object):
|
|
493
498
|
# it will init all parameters (config_options will be None)
|
494
499
|
# We ignore any errors if we don't check the configs in the click API.
|
495
500
|
new_cls = self._flow_cls._process_config_decorators(
|
496
|
-
config_options,
|
501
|
+
config_options, process_configs=CLICK_API_PROCESS_CONFIG
|
497
502
|
)
|
498
503
|
if new_cls:
|
499
504
|
self._flow_cls = new_cls
|
@@ -406,19 +406,6 @@ class CustomFlowDecorator:
|
|
406
406
|
self._args = args
|
407
407
|
self._kwargs = kwargs
|
408
408
|
|
409
|
-
def __get__(self, instance, owner):
|
410
|
-
# Required so that we "present" as a FlowSpec when the flow decorator is
|
411
|
-
# of the form
|
412
|
-
# @MyFlowDecorator
|
413
|
-
# class MyFlow(FlowSpec):
|
414
|
-
# pass
|
415
|
-
#
|
416
|
-
# In that case, if we don't have __get__, the object is a CustomFlowDecorator
|
417
|
-
# and not a FlowSpec. This is more critical for steps (and CustomStepDecorator)
|
418
|
-
# because other parts of the code rely on steps having is_step. There are
|
419
|
-
# other ways to solve this but this allowed for minimal changes going forward.
|
420
|
-
return self()
|
421
|
-
|
422
409
|
def __call__(
|
423
410
|
self, flow_spec: Optional["metaflow.flowspec.FlowSpecMeta"] = None
|
424
411
|
) -> "metaflow.flowspec.FlowSpecMeta":
|
@@ -447,6 +434,15 @@ class CustomFlowDecorator:
|
|
447
434
|
raise MetaflowException(
|
448
435
|
"A CustomFlowDecorator can only be applied to a FlowSpec"
|
449
436
|
)
|
437
|
+
# NOTA: This returns self._flow_cls() because the object in the case of
|
438
|
+
# @FlowDecorator
|
439
|
+
# class MyFlow(FlowSpec):
|
440
|
+
# pass
|
441
|
+
# the object is a FlowDecorator and when the main function calls it, we end up
|
442
|
+
# here and need to actually call the FlowSpec. This is not the case when using
|
443
|
+
# a decorator with arguments because in the line above, we will have returned a
|
444
|
+
# FlowSpec object. Previous solution was to use __get__ but this does not seem
|
445
|
+
# to work properly.
|
450
446
|
return self._flow_cls()
|
451
447
|
|
452
448
|
def _set_flow_cls(
|
@@ -500,7 +496,16 @@ class CustomStepDecorator:
|
|
500
496
|
self._kwargs = kwargs
|
501
497
|
|
502
498
|
def __get__(self, instance, owner):
|
503
|
-
#
|
499
|
+
# Required so that we "present" as a step when the step decorator is
|
500
|
+
# of the form
|
501
|
+
# @MyStepDecorator
|
502
|
+
# @step
|
503
|
+
# def my_step(self):
|
504
|
+
# pass
|
505
|
+
#
|
506
|
+
# In that case, if we don't have __get__, the object is a CustomStepDecorator
|
507
|
+
# and not a step. Other parts of the code rely on steps having is_step. There are
|
508
|
+
# other ways to solve this but this allowed for minimal changes going forward.
|
504
509
|
return self()
|
505
510
|
|
506
511
|
def __call__(
|
metaflow/util.py
CHANGED
@@ -307,10 +307,10 @@ def dict_to_cli_options(params):
|
|
307
307
|
# keyword in Python, so we call it 'decospecs' in click args
|
308
308
|
if k == "decospecs":
|
309
309
|
k = "with"
|
310
|
-
if k in ("
|
310
|
+
if k in ("config", "config_value"):
|
311
311
|
# Special handling here since we gather them all in one option but actually
|
312
312
|
# need to send them one at a time using --config-value <name> kv.<name>
|
313
|
-
# Note it can be either
|
313
|
+
# Note it can be either config or config_value depending
|
314
314
|
# on click processing order.
|
315
315
|
for config_name in v.keys():
|
316
316
|
yield "--config-value"
|
metaflow/version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
metaflow_version = "2.
|
1
|
+
metaflow_version = "2.14.0"
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: metaflow
|
3
|
-
Version: 2.
|
3
|
+
Version: 2.14.0
|
4
4
|
Summary: Metaflow: More Data Science, Less Engineering
|
5
5
|
Author: Metaflow Developers
|
6
6
|
Author-email: help@metaflow.org
|
@@ -26,7 +26,7 @@ License-File: LICENSE
|
|
26
26
|
Requires-Dist: requests
|
27
27
|
Requires-Dist: boto3
|
28
28
|
Provides-Extra: stubs
|
29
|
-
Requires-Dist: metaflow-stubs==2.
|
29
|
+
Requires-Dist: metaflow-stubs==2.14.0; extra == "stubs"
|
30
30
|
Dynamic: author
|
31
31
|
Dynamic: author-email
|
32
32
|
Dynamic: classifier
|
@@ -1,8 +1,8 @@
|
|
1
1
|
metaflow/R.py,sha256=CqVfIatvmjciuICNnoyyNGrwE7Va9iXfLdFbQa52hwA,3958
|
2
2
|
metaflow/__init__.py,sha256=fbhdWiWnEoAX4KnzRHMY_iQcT-uYlMWhzrXPKvK0i5g,5832
|
3
3
|
metaflow/cards.py,sha256=IbRmredvmFEU0V6JL7DR8wCESwVmmZJubr6x24bo7U4,442
|
4
|
-
metaflow/cli.py,sha256=
|
5
|
-
metaflow/cli_args.py,sha256=
|
4
|
+
metaflow/cli.py,sha256=ASW0FwUa4QjF7qQ7XmUTiDrA9d5WMRS3VggSrpG5OLQ,21883
|
5
|
+
metaflow/cli_args.py,sha256=hDsdWdRmfXYifVGq6b6FDfgoWxtIG2nr_lU6EBV0Pnk,3584
|
6
6
|
metaflow/clone_util.py,sha256=LSuVbFpPUh92UW32DBcnZbL0FFw-4w3CLa0tpEbCkzk,2066
|
7
7
|
metaflow/cmd_with_io.py,sha256=kl53HkAIyv0ecpItv08wZYczv7u3msD1VCcciqigqf0,588
|
8
8
|
metaflow/debug.py,sha256=HEmt_16tJtqHXQXsqD9pqOFe3CWR5GZ7VwpaYQgnRdU,1466
|
@@ -10,7 +10,7 @@ metaflow/decorators.py,sha256=cbOCahmwVlnHklMN2O_j5DKvZA7m_Q72_6LBzzBZRhk,24131
|
|
10
10
|
metaflow/event_logger.py,sha256=joTVRqZPL87nvah4ZOwtqWX8NeraM_CXKXXGVpKGD8o,780
|
11
11
|
metaflow/events.py,sha256=ahjzkSbSnRCK9RZ-9vTfUviz_6gMvSO9DGkJ86X80-k,5300
|
12
12
|
metaflow/exception.py,sha256=_m9ZBJM0cooHRslDqfxCPQmkChqaTh6fGxp7HvISnYI,5161
|
13
|
-
metaflow/flowspec.py,sha256=
|
13
|
+
metaflow/flowspec.py,sha256=GgbTeUBtG3AmZwIF-prRFMsZqFYGVysd5xBS9IPIPBs,35953
|
14
14
|
metaflow/graph.py,sha256=cdpnWr85aEj_rRn-7EjbndWjr_i8Dt3P7-oPUW0NNpI,12393
|
15
15
|
metaflow/includefile.py,sha256=kWKDSlzVcRVNGG9PV5eB3o2ynrzqhVsfaLtkqjshn7Q,20948
|
16
16
|
metaflow/info_file.py,sha256=wtf2_F0M6dgiUu74AFImM8lfy5RrUw5Yj7Rgs2swKRY,686
|
@@ -19,7 +19,7 @@ metaflow/lint.py,sha256=x4p6tnRzYqNNniCGXyrUW0WuYfTUgnaOMRivxvnxask,11661
|
|
19
19
|
metaflow/metaflow_config.py,sha256=Uu5GmiVLlAu1-VT6MTprnQ_kPC5WrRS_C2UJ-qgNi9I,23477
|
20
20
|
metaflow/metaflow_config_funcs.py,sha256=5GlvoafV6SxykwfL8D12WXSfwjBN_NsyuKE_Q3gjGVE,6738
|
21
21
|
metaflow/metaflow_current.py,sha256=pfkXmkyHeMJhxIs6HBJNBEaBDpcl5kz9Wx5mW6F_3qo,7164
|
22
|
-
metaflow/metaflow_environment.py,sha256=
|
22
|
+
metaflow/metaflow_environment.py,sha256=gs_UpYpuOKEEfFM0z0tnwje7zAVsQ5Ck7Dp2M9_1utQ,8065
|
23
23
|
metaflow/metaflow_profile.py,sha256=jKPEW-hmAQO-htSxb9hXaeloLacAh41A35rMZH6G8pA,418
|
24
24
|
metaflow/metaflow_version.py,sha256=duhIzfKZtcxMVMs2uiBqBvUarSHJqyWDwMhaBOQd_g0,7491
|
25
25
|
metaflow/monitor.py,sha256=T0NMaBPvXynlJAO_avKtk8OIIRMyEuMAyF8bIp79aZU,5323
|
@@ -34,9 +34,9 @@ metaflow/tagging_util.py,sha256=ctyf0Q1gBi0RyZX6J0e9DQGNkNHblV_CITfy66axXB4,2346
|
|
34
34
|
metaflow/task.py,sha256=xVVLWy8NH16OlLu2VoOb1OfiFzcOVVCdQldlmb1Zb_w,29691
|
35
35
|
metaflow/tuple_util.py,sha256=_G5YIEhuugwJ_f6rrZoelMFak3DqAR2tt_5CapS1XTY,830
|
36
36
|
metaflow/unbounded_foreach.py,sha256=p184WMbrMJ3xKYHwewj27ZhRUsSj_kw1jlye5gA9xJk,387
|
37
|
-
metaflow/util.py,sha256=
|
37
|
+
metaflow/util.py,sha256=mJBkV5tShIyCsLDeM1zygQGeciQVMrVPm_qI8Oi33G0,14656
|
38
38
|
metaflow/vendor.py,sha256=FchtA9tH22JM-eEtJ2c9FpUdMn8sSb1VHuQS56EcdZk,5139
|
39
|
-
metaflow/version.py,sha256=
|
39
|
+
metaflow/version.py,sha256=EaSK29SvOJFXVcYk1a_Ie2OHqYgRDs6H9YMj74CUnmU,28
|
40
40
|
metaflow/_vendor/__init__.py,sha256=y_CiwUD3l4eAKvTVDZeqgVujMy31cAM1qjAB-HfI-9s,353
|
41
41
|
metaflow/_vendor/typing_extensions.py,sha256=0nUs5p1A_UrZigrAVBoOEM6TxU37zzPDUtiij1ZwpNc,110417
|
42
42
|
metaflow/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
|
@@ -125,7 +125,7 @@ metaflow/cmd/main_cli.py,sha256=E546zT_jYQKysmjwfpEgzZd5QMsyirs28M2s0OPU93E,2966
|
|
125
125
|
metaflow/cmd/tutorials_cmd.py,sha256=8FdlKkicTOhCIDKcBR5b0Oz6giDvS-EMY3o9skIrRqw,5156
|
126
126
|
metaflow/cmd/util.py,sha256=jS_0rUjOnGGzPT65fzRLdGjrYAOOLA4jU2S0HJLV0oc,406
|
127
127
|
metaflow/cmd/develop/__init__.py,sha256=p1Sy8yU1MEKSrH5ttOWOZvNcI1qYu6J6jghdTHwPgOw,689
|
128
|
-
metaflow/cmd/develop/stub_generator.py,sha256=
|
128
|
+
metaflow/cmd/develop/stub_generator.py,sha256=vhhEAPkHN-9wZYW4rqnMjVsjwqm4Avn_inVx-wC6DKE,65164
|
129
129
|
metaflow/cmd/develop/stubs.py,sha256=JX2qNZDvG0upvPueAcLhoR_zyLtRranZMwY05tLdpRQ,11884
|
130
130
|
metaflow/datastore/__init__.py,sha256=VxP6ddJt3rwiCkpiSfAhyVkUCOe1pgZZsytVEJzFmSQ,155
|
131
131
|
metaflow/datastore/content_addressed_store.py,sha256=6T7tNqL29kpmecyMLHF35RhoSBOb-OZcExnsB65AvnI,7641
|
@@ -134,7 +134,7 @@ metaflow/datastore/datastore_storage.py,sha256=7V43QuiWDQ_Q4oHw9y7Z7X9lYj3GI-LV1
|
|
134
134
|
metaflow/datastore/exceptions.py,sha256=r7Ab5FvHIzyFh6kwiptA1lO5nLqWg0xRBoeYGefvapA,373
|
135
135
|
metaflow/datastore/flow_datastore.py,sha256=rDMEHdYwub1PwLp2uaK-8CHdd8hiwxqeELXzsUfuqZs,10250
|
136
136
|
metaflow/datastore/inputs.py,sha256=i43dXr2xvgtsgKMO9allgCR18bk80GeayeQFyUTH36w,449
|
137
|
-
metaflow/datastore/task_datastore.py,sha256=
|
137
|
+
metaflow/datastore/task_datastore.py,sha256=VCV-5v3gYbzhmrBr9zcrrzQWJ-L7N-ZbRGbAiVE2aH4,35060
|
138
138
|
metaflow/extension_support/__init__.py,sha256=2z0c4R8zsVmEFOMGT2Jujsl6xveDVa9KLll7moL58NE,52984
|
139
139
|
metaflow/extension_support/_empty_file.py,sha256=HENjnM4uAfeNygxMB_feCCWORFoSat9n_QwzSx2oXPw,109
|
140
140
|
metaflow/extension_support/cmd.py,sha256=hk8iBUUINqvKCDxInKgWpum8ThiRZtHSJP7qBASHzl8,5711
|
@@ -158,7 +158,7 @@ metaflow/plugins/events_decorator.py,sha256=T_YSK-DlgZhd3ge9PlpTRNaMi15GK0tKZMZl
|
|
158
158
|
metaflow/plugins/logs_cli.py,sha256=77W5UNagU2mOKSMMvrQxQmBLRzvmjK-c8dWxd-Ygbqs,11410
|
159
159
|
metaflow/plugins/package_cli.py,sha256=-J6D4cupHfWSZ4GEFo2yy9Je9oL3owRWm5pEJwaiqd4,1649
|
160
160
|
metaflow/plugins/parallel_decorator.py,sha256=GR6LKIW7_S7AoU50Ar2_0nndVtO2epdn3LuthE0vKMQ,9127
|
161
|
-
metaflow/plugins/project_decorator.py,sha256=
|
161
|
+
metaflow/plugins/project_decorator.py,sha256=uhwsguEj7OM_E2OnY1ap3MoGocQHeywuJSa-qPuWn-U,7592
|
162
162
|
metaflow/plugins/resources_decorator.py,sha256=AtoOwg4mHYHYthg-CAfbfam-QiT0ViuDLDoukoDvF6Q,1347
|
163
163
|
metaflow/plugins/retry_decorator.py,sha256=tz_2Tq6GLg3vjDBZp0KKVTk3ADlCvqaWTSf7blmFdUw,1548
|
164
164
|
metaflow/plugins/storage_executor.py,sha256=FqAgR0-L9MuqN8fRtTe4jjUfJL9lqt6fQkYaglAjRbk,6137
|
@@ -181,8 +181,8 @@ metaflow/plugins/airflow/sensors/s3_sensor.py,sha256=iDReG-7FKnumrtQg-HY6cCUAAqN
|
|
181
181
|
metaflow/plugins/argo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
182
182
|
metaflow/plugins/argo/argo_client.py,sha256=PS_cYGnPw9h4X7TP_plObDH3clMw4reOsBLkkGPTd0Y,16282
|
183
183
|
metaflow/plugins/argo/argo_events.py,sha256=_C1KWztVqgi3zuH57pInaE9OzABc2NnncC-zdwOMZ-w,5909
|
184
|
-
metaflow/plugins/argo/argo_workflows.py,sha256=
|
185
|
-
metaflow/plugins/argo/argo_workflows_cli.py,sha256=
|
184
|
+
metaflow/plugins/argo/argo_workflows.py,sha256=gAzznOrNQT8SQ2zDqo__lEd1ye5gYqaResrlzzCgKY4,181585
|
185
|
+
metaflow/plugins/argo/argo_workflows_cli.py,sha256=kzZF2XRr5If9t135wMXXDTpIRupnWCK7mtDpvZ6K8P0,38424
|
186
186
|
metaflow/plugins/argo/argo_workflows_decorator.py,sha256=ogCSBmwsC2C3eusydrgjuAJd4qK18f1sI4jJwA4Fd-o,7800
|
187
187
|
metaflow/plugins/argo/argo_workflows_deployer.py,sha256=6kHxEnYXJwzNCM9swI8-0AckxtPWqwhZLerYkX8fxUM,4444
|
188
188
|
metaflow/plugins/argo/argo_workflows_deployer_objects.py,sha256=bs0E8WJGQYXuwi6u0OiwTn_jkfeKb5DywCmuJHeRl8I,13949
|
@@ -298,7 +298,7 @@ metaflow/plugins/kubernetes/spot_metadata_cli.py,sha256=an0nWCxgflmqIPBCBrlb4m3D
|
|
298
298
|
metaflow/plugins/kubernetes/spot_monitor_sidecar.py,sha256=zrWU-smQwPnL6MBHmzTxWyEA00R6iKKQbhhy50xFwQ8,3832
|
299
299
|
metaflow/plugins/metadata_providers/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
300
300
|
metaflow/plugins/metadata_providers/local.py,sha256=9UAxe9caN6kU1lkSlIoJbRGgTqsMa62cBTnyMwhqiaA,22446
|
301
|
-
metaflow/plugins/metadata_providers/service.py,sha256=
|
301
|
+
metaflow/plugins/metadata_providers/service.py,sha256=XhXarANcgkQ4D5q8qhywvd8BjPN274DLGLVx1KtxWF0,20725
|
302
302
|
metaflow/plugins/pypi/__init__.py,sha256=0YFZpXvX7HCkyBFglatual7XGifdA1RwC3U4kcizyak,1037
|
303
303
|
metaflow/plugins/pypi/bootstrap.py,sha256=oRfJkAp99R338nYX2wq4FrV0Ax5h4QqqifNmAXrM3CY,14892
|
304
304
|
metaflow/plugins/pypi/conda_decorator.py,sha256=piFcE4uGmWhhbGlxMK0GHd7BGEyqy6r9BFy8Mjoi80Q,15937
|
@@ -312,7 +312,7 @@ metaflow/plugins/secrets/__init__.py,sha256=mhJaN2eMS_ZZVewAMR2E-JdP5i0t3v9e6Dcw
|
|
312
312
|
metaflow/plugins/secrets/inline_secrets_provider.py,sha256=EChmoBGA1i7qM3jtYwPpLZDBybXLergiDlN63E0u3x8,294
|
313
313
|
metaflow/plugins/secrets/secrets_decorator.py,sha256=s-sFzPWOjahhpr5fMj-ZEaHkDYAPTO0isYXGvaUwlG8,11273
|
314
314
|
metaflow/runner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
315
|
-
metaflow/runner/click_api.py,sha256=
|
315
|
+
metaflow/runner/click_api.py,sha256=truFw2mbtnEKWVhfY5haIxJEyyoB6-cipoY5-ZWXFXo,23111
|
316
316
|
metaflow/runner/deployer.py,sha256=Yas_SZCss3kfJw3hLC8_IyzgiytUFGoEGHz-l-rBBKk,8980
|
317
317
|
metaflow/runner/deployer_impl.py,sha256=Kab9rLoA3EiBJDtTTulhPCeKzqiljW366nx2Tm0LYy0,6143
|
318
318
|
metaflow/runner/metaflow_runner.py,sha256=L302ew_BPBPs-NnW8n92dqqbqmHwrwGL5D6kTZvl5vY,16074
|
@@ -357,12 +357,12 @@ metaflow/tutorials/07-worldview/worldview.ipynb,sha256=ztPZPI9BXxvW1QdS2Tfe7LBuV
|
|
357
357
|
metaflow/tutorials/08-autopilot/README.md,sha256=GnePFp_q76jPs991lMUqfIIh5zSorIeWznyiUxzeUVE,1039
|
358
358
|
metaflow/tutorials/08-autopilot/autopilot.ipynb,sha256=DQoJlILV7Mq9vfPBGW-QV_kNhWPjS5n6SJLqePjFYLY,3191
|
359
359
|
metaflow/user_configs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
360
|
-
metaflow/user_configs/config_decorators.py,sha256=
|
361
|
-
metaflow/user_configs/config_options.py,sha256=
|
360
|
+
metaflow/user_configs/config_decorators.py,sha256=qCKVAvd0NKgaCxQ2OThes5-DYHXq6A1HqURubYNeFdw,20481
|
361
|
+
metaflow/user_configs/config_options.py,sha256=m6jccSpzI4qUJ7vyYkYBIf8G3V0Caunxg_k7zg4Zlqg,21067
|
362
362
|
metaflow/user_configs/config_parameters.py,sha256=oeJGVKu1ao_YQX6Lg6P2FEv5k5-_F4sARLlVpTW9ezM,15502
|
363
|
-
metaflow-2.
|
364
|
-
metaflow-2.
|
365
|
-
metaflow-2.
|
366
|
-
metaflow-2.
|
367
|
-
metaflow-2.
|
368
|
-
metaflow-2.
|
363
|
+
metaflow-2.14.0.dist-info/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
|
364
|
+
metaflow-2.14.0.dist-info/METADATA,sha256=qxM8LtLppUZz43NgvqAQ8H2D0pRnlYoTwkGfwEdYXow,6121
|
365
|
+
metaflow-2.14.0.dist-info/WHEEL,sha256=9Hm2OB-j1QcCUq9Jguht7ayGIIZBRTdOXD1qg9cCgPM,109
|
366
|
+
metaflow-2.14.0.dist-info/entry_points.txt,sha256=IKwTN1T3I5eJL3uo_vnkyxVffcgnRdFbKwlghZfn27k,57
|
367
|
+
metaflow-2.14.0.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
|
368
|
+
metaflow-2.14.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|