runnable 0.22.0__tar.gz → 0.24.0__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (57) hide show
  1. {runnable-0.22.0 → runnable-0.24.0}/PKG-INFO +1 -1
  2. {runnable-0.22.0 → runnable-0.24.0}/extensions/job_executor/__init__.py +15 -1
  3. {runnable-0.22.0 → runnable-0.24.0}/extensions/pipeline_executor/__init__.py +19 -8
  4. {runnable-0.22.0 → runnable-0.24.0}/extensions/pipeline_executor/argo.py +86 -44
  5. {runnable-0.22.0 → runnable-0.24.0}/pyproject.toml +1 -1
  6. {runnable-0.22.0 → runnable-0.24.0}/runnable/cli.py +8 -4
  7. {runnable-0.22.0 → runnable-0.24.0}/runnable/entrypoints.py +12 -12
  8. {runnable-0.22.0 → runnable-0.24.0}/runnable/executor.py +5 -0
  9. {runnable-0.22.0 → runnable-0.24.0}/runnable/sdk.py +4 -1
  10. {runnable-0.22.0 → runnable-0.24.0}/runnable/utils.py +7 -3
  11. {runnable-0.22.0 → runnable-0.24.0}/.gitignore +0 -0
  12. {runnable-0.22.0 → runnable-0.24.0}/LICENSE +0 -0
  13. {runnable-0.22.0 → runnable-0.24.0}/README.md +0 -0
  14. {runnable-0.22.0 → runnable-0.24.0}/extensions/README.md +0 -0
  15. {runnable-0.22.0 → runnable-0.24.0}/extensions/__init__.py +0 -0
  16. {runnable-0.22.0 → runnable-0.24.0}/extensions/catalog/README.md +0 -0
  17. {runnable-0.22.0 → runnable-0.24.0}/extensions/catalog/file_system.py +0 -0
  18. {runnable-0.22.0 → runnable-0.24.0}/extensions/catalog/pyproject.toml +0 -0
  19. {runnable-0.22.0 → runnable-0.24.0}/extensions/job_executor/README.md +0 -0
  20. {runnable-0.22.0 → runnable-0.24.0}/extensions/job_executor/k8s.py +0 -0
  21. {runnable-0.22.0 → runnable-0.24.0}/extensions/job_executor/k8s_job_spec.yaml +0 -0
  22. {runnable-0.22.0 → runnable-0.24.0}/extensions/job_executor/local.py +0 -0
  23. {runnable-0.22.0 → runnable-0.24.0}/extensions/job_executor/local_container.py +0 -0
  24. {runnable-0.22.0 → runnable-0.24.0}/extensions/job_executor/pyproject.toml +0 -0
  25. {runnable-0.22.0 → runnable-0.24.0}/extensions/nodes/README.md +0 -0
  26. {runnable-0.22.0 → runnable-0.24.0}/extensions/nodes/nodes.py +0 -0
  27. {runnable-0.22.0 → runnable-0.24.0}/extensions/nodes/pyproject.toml +0 -0
  28. {runnable-0.22.0 → runnable-0.24.0}/extensions/pipeline_executor/README.md +0 -0
  29. {runnable-0.22.0 → runnable-0.24.0}/extensions/pipeline_executor/local.py +0 -0
  30. {runnable-0.22.0 → runnable-0.24.0}/extensions/pipeline_executor/local_container.py +0 -0
  31. {runnable-0.22.0 → runnable-0.24.0}/extensions/pipeline_executor/mocked.py +0 -0
  32. {runnable-0.22.0 → runnable-0.24.0}/extensions/pipeline_executor/pyproject.toml +0 -0
  33. {runnable-0.22.0 → runnable-0.24.0}/extensions/pipeline_executor/retry.py +0 -0
  34. {runnable-0.22.0 → runnable-0.24.0}/extensions/run_log_store/README.md +0 -0
  35. {runnable-0.22.0 → runnable-0.24.0}/extensions/run_log_store/__init__.py +0 -0
  36. {runnable-0.22.0 → runnable-0.24.0}/extensions/run_log_store/chunked_fs.py +0 -0
  37. {runnable-0.22.0 → runnable-0.24.0}/extensions/run_log_store/db/implementation_FF.py +0 -0
  38. {runnable-0.22.0 → runnable-0.24.0}/extensions/run_log_store/db/integration_FF.py +0 -0
  39. {runnable-0.22.0 → runnable-0.24.0}/extensions/run_log_store/file_system.py +0 -0
  40. {runnable-0.22.0 → runnable-0.24.0}/extensions/run_log_store/generic_chunked.py +0 -0
  41. {runnable-0.22.0 → runnable-0.24.0}/extensions/run_log_store/pyproject.toml +0 -0
  42. {runnable-0.22.0 → runnable-0.24.0}/extensions/secrets/README.md +0 -0
  43. {runnable-0.22.0 → runnable-0.24.0}/extensions/secrets/dotenv.py +0 -0
  44. {runnable-0.22.0 → runnable-0.24.0}/extensions/secrets/pyproject.toml +0 -0
  45. {runnable-0.22.0 → runnable-0.24.0}/runnable/__init__.py +0 -0
  46. {runnable-0.22.0 → runnable-0.24.0}/runnable/catalog.py +0 -0
  47. {runnable-0.22.0 → runnable-0.24.0}/runnable/context.py +0 -0
  48. {runnable-0.22.0 → runnable-0.24.0}/runnable/datastore.py +0 -0
  49. {runnable-0.22.0 → runnable-0.24.0}/runnable/defaults.py +0 -0
  50. {runnable-0.22.0 → runnable-0.24.0}/runnable/exceptions.py +0 -0
  51. {runnable-0.22.0 → runnable-0.24.0}/runnable/graph.py +0 -0
  52. {runnable-0.22.0 → runnable-0.24.0}/runnable/names.py +0 -0
  53. {runnable-0.22.0 → runnable-0.24.0}/runnable/nodes.py +0 -0
  54. {runnable-0.22.0 → runnable-0.24.0}/runnable/parameters.py +0 -0
  55. {runnable-0.22.0 → runnable-0.24.0}/runnable/pickler.py +0 -0
  56. {runnable-0.22.0 → runnable-0.24.0}/runnable/secrets.py +0 -0
  57. {runnable-0.22.0 → runnable-0.24.0}/runnable/tasks.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: runnable
3
- Version: 0.22.0
3
+ Version: 0.24.0
4
4
  Summary: Add your description here
5
5
  Author-email: "Vammi, Vijay" <vijay.vammi@astrazeneca.com>
6
6
  License-File: LICENSE
@@ -2,7 +2,7 @@ import logging
2
2
  import os
3
3
  from typing import Dict, List, Optional
4
4
 
5
- from runnable import context, defaults, exceptions, parameters, utils
5
+ from runnable import context, defaults, exceptions, parameters, task_console, utils
6
6
  from runnable.datastore import DataCatalog, JobLog, JsonParameter
7
7
  from runnable.executor import BaseJobExecutor
8
8
 
@@ -158,3 +158,17 @@ class GenericJobExecutor(BaseJobExecutor):
158
158
  data_catalogs.extend(data_catalog)
159
159
 
160
160
  return data_catalogs
161
+
162
+ def add_task_log_to_catalog(
163
+ self, name: str, map_variable: Dict[str, str | int | float] | None = None
164
+ ):
165
+ log_file_name = utils.make_log_file_name(
166
+ name=name,
167
+ map_variable=map_variable,
168
+ )
169
+ task_console.save_text(log_file_name)
170
+ # Put the log file in the catalog
171
+ self._context.catalog_handler.put(
172
+ name=log_file_name, run_id=self._context.run_id
173
+ )
174
+ os.remove(log_file_name)
@@ -210,6 +210,20 @@ class GenericPipelineExecutor(BasePipelineExecutor):
210
210
  """
211
211
  return int(os.environ.get(defaults.ATTEMPT_NUMBER, 1))
212
212
 
213
+ def add_task_log_to_catalog(
214
+ self, name: str, map_variable: Dict[str, str | int | float] | None = None
215
+ ):
216
+ log_file_name = utils.make_log_file_name(
217
+ name=name,
218
+ map_variable=map_variable,
219
+ )
220
+ task_console.save_text(log_file_name)
221
+ # Put the log file in the catalog
222
+ self._context.catalog_handler.put(
223
+ name=log_file_name, run_id=self._context.run_id
224
+ )
225
+ os.remove(log_file_name)
226
+
213
227
  def _execute_node(
214
228
  self,
215
229
  node: BaseNode,
@@ -336,14 +350,6 @@ class GenericPipelineExecutor(BasePipelineExecutor):
336
350
  )
337
351
  self.trigger_node_execution(node=node, map_variable=map_variable, **kwargs)
338
352
 
339
- log_file_name = utils.make_log_file_name(node=node, map_variable=map_variable)
340
- task_console.save_text(log_file_name, clear=True)
341
-
342
- self._context.catalog_handler.put(
343
- name=log_file_name, run_id=self._context.run_id
344
- )
345
- os.remove(log_file_name)
346
-
347
353
  def trigger_node_execution(
348
354
  self, node: BaseNode, map_variable: TypeMapVariable = None, **kwargs
349
355
  ):
@@ -482,6 +488,11 @@ class GenericPipelineExecutor(BasePipelineExecutor):
482
488
  console.print(e, style=defaults.error_style)
483
489
  logger.exception(e)
484
490
  raise
491
+ finally:
492
+ # Add task log to the catalog
493
+ self.add_task_log_to_catalog(
494
+ name=working_on.internal_name, map_variable=map_variable
495
+ )
485
496
 
486
497
  console.rule(style="[dark orange]")
487
498
 
@@ -6,7 +6,7 @@ import string
6
6
  from collections import namedtuple
7
7
  from enum import Enum
8
8
  from functools import cached_property
9
- from typing import Annotated, Literal, Optional
9
+ from typing import Annotated, Any, Literal, Optional, cast
10
10
 
11
11
  from pydantic import (
12
12
  BaseModel,
@@ -20,7 +20,13 @@ from pydantic import (
20
20
  from pydantic.alias_generators import to_camel
21
21
  from ruamel.yaml import YAML
22
22
 
23
- from extensions.nodes.nodes import MapNode, ParallelNode, TaskNode
23
+ from extensions.nodes.nodes import (
24
+ MapNode,
25
+ ParallelNode,
26
+ StubNode,
27
+ SuccessNode,
28
+ TaskNode,
29
+ )
24
30
  from extensions.pipeline_executor import GenericPipelineExecutor
25
31
  from runnable import defaults, utils
26
32
  from runnable.defaults import TypeMapVariable
@@ -229,9 +235,8 @@ class Resources(BaseModel):
229
235
  requests: Request = Field(default=Request(), serialization_alias="requests")
230
236
 
231
237
 
232
- # This is what the user can override per template
233
- # Some are specific to container and some are specific to dag
234
- class TemplateDefaults(BaseModelWIthConfig):
238
+ # Lets construct this from UserDefaults
239
+ class ArgoTemplateDefaults(BaseModelWIthConfig):
235
240
  active_deadline_seconds: Optional[int] = Field(default=86400) # 1 day
236
241
  fail_fast: bool = Field(default=True)
237
242
  node_selector: dict[str, str] = Field(default_factory=dict)
@@ -240,13 +245,34 @@ class TemplateDefaults(BaseModelWIthConfig):
240
245
  timeout: Optional[str] = Field(default=None)
241
246
  tolerations: Optional[list[Toleration]] = Field(default=None)
242
247
 
243
- # These are in addition to what argo spec provides
244
- image: str
245
- image_pull_policy: Optional[ImagePullPolicy] = Field(default=ImagePullPolicy.Always)
248
+ model_config = ConfigDict(
249
+ extra="ignore",
250
+ )
251
+
252
+
253
+ class CommonDefaults(BaseModelWIthConfig):
254
+ active_deadline_seconds: Optional[int] = Field(default=86400) # 1 day
255
+ fail_fast: bool = Field(default=True)
256
+ node_selector: dict[str, str] = Field(default_factory=dict)
257
+ parallelism: Optional[int] = Field(default=None)
258
+ retry_strategy: Optional[RetryStrategy] = Field(default=None)
259
+ timeout: Optional[str] = Field(default=None)
260
+ tolerations: Optional[list[Toleration]] = Field(default=None)
261
+ image_pull_policy: ImagePullPolicy = Field(default=ImagePullPolicy.Always)
246
262
  resources: Resources = Field(default_factory=Resources)
247
263
  env: list[EnvVar | SecretEnvVar] = Field(default_factory=list, exclude=True)
248
264
 
249
265
 
266
+ # The user provided defaults at the top level
267
+ class UserDefaults(CommonDefaults):
268
+ image: str
269
+
270
+
271
+ # Overrides need not have image
272
+ class Overrides(CommonDefaults):
273
+ image: Optional[str] = Field(default=None)
274
+
275
+
250
276
  # User provides this as part of the argoSpec
251
277
  # some an be provided here or as a template default or node override
252
278
  class ArgoWorkflowSpec(BaseModelWIthConfig):
@@ -258,8 +284,8 @@ class ArgoWorkflowSpec(BaseModelWIthConfig):
258
284
  pod_gc: Optional[PodGC] = Field(default=None, serialization_alias="podGC")
259
285
  retry_strategy: Optional[RetryStrategy] = Field(default=None)
260
286
  service_account_name: Optional[str] = Field(default=None)
261
- template_defaults: TemplateDefaults
262
287
  tolerations: Optional[list[Toleration]] = Field(default=None)
288
+ template_defaults: Optional[ArgoTemplateDefaults] = Field(default=None)
263
289
 
264
290
 
265
291
  class ArgoMetadata(BaseModelWIthConfig):
@@ -321,7 +347,6 @@ class ContainerTemplate((BaseModelWIthConfig)):
321
347
  inputs: Optional[Inputs] = Field(default=None)
322
348
  outputs: Optional[Outputs] = Field(default=None)
323
349
 
324
- # The remaining can be from template defaults or node overrides
325
350
  active_deadline_seconds: Optional[int] = Field(default=86400) # 1 day
326
351
  metadata: Optional[PodMetaData] = Field(default=None)
327
352
  node_selector: dict[str, str] = Field(default_factory=dict)
@@ -356,17 +381,10 @@ class ArgoExecutor(GenericPipelineExecutor):
356
381
  from_attributes=True,
357
382
  use_enum_values=True,
358
383
  )
359
-
360
- argo_workflow: ArgoWorkflow
361
-
362
- # Lets use a generic one
363
384
  pvc_for_runnable: Optional[str] = Field(default=None)
364
- # pvc_for_catalog: Optional[str] = Field(default=None)
365
- # pvc_for_run_log: Optional[str] = Field(default=None)
366
385
  custom_volumes: Optional[list[CustomVolume]] = Field(
367
386
  default_factory=list[CustomVolume]
368
387
  )
369
- env: list[EnvVar] = Field(default_factory=list[EnvVar])
370
388
 
371
389
  expose_parameters_as_inputs: bool = True
372
390
  secret_from_k8s: Optional[str] = Field(default=None)
@@ -375,6 +393,11 @@ class ArgoExecutor(GenericPipelineExecutor):
375
393
  default="INFO"
376
394
  )
377
395
 
396
+ defaults: UserDefaults
397
+ argo_workflow: ArgoWorkflow
398
+
399
+ overrides: dict[str, Overrides] = Field(default_factory=dict)
400
+
378
401
  # This should be used when we refer to run_id or log_level in the containers
379
402
  _run_id_as_parameter: str = PrivateAttr(default="{{workflow.parameters.run_id}}")
380
403
  _log_level_as_parameter: str = PrivateAttr(
@@ -388,6 +411,11 @@ class ArgoExecutor(GenericPipelineExecutor):
388
411
  _container_catalog_location: str = PrivateAttr(default="/tmp/catalog/")
389
412
  _added_initial_container: bool = PrivateAttr(default=False)
390
413
 
414
+ def model_post_init(self, __context: Any) -> None:
415
+ self.argo_workflow.spec.template_defaults = ArgoTemplateDefaults(
416
+ **self.defaults.model_dump()
417
+ )
418
+
391
419
  def sanitize_name(self, name: str) -> str:
392
420
  formatted_name = name.replace(" ", "-").replace(".", "-").replace("_", "-")
393
421
  tag = "".join(random.choices(string.ascii_lowercase + string.digits, k=6))
@@ -425,8 +453,6 @@ class ArgoExecutor(GenericPipelineExecutor):
425
453
  parameters: Optional[list[Parameter]],
426
454
  task_name: str,
427
455
  ):
428
- template_defaults = self.argo_workflow.spec.template_defaults.model_dump()
429
-
430
456
  map_variable: TypeMapVariable = {}
431
457
  for parameter in parameters or []:
432
458
  map_variable[parameter.name] = ( # type: ignore
@@ -442,8 +468,8 @@ class ArgoExecutor(GenericPipelineExecutor):
442
468
 
443
469
  core_container_template = CoreContainerTemplate(
444
470
  command=shlex.split(fan_command),
445
- image=template_defaults["image"],
446
- image_pull_policy=template_defaults["image_pull_policy"],
471
+ image=self.defaults.image,
472
+ image_pull_policy=self.defaults.image_pull_policy,
447
473
  volume_mounts=[
448
474
  volume_pair.volume_mount for volume_pair in self.volume_pairs
449
475
  ],
@@ -459,12 +485,17 @@ class ArgoExecutor(GenericPipelineExecutor):
459
485
  outputs = Outputs(parameters=[OutputParameter(name="iterate-on")])
460
486
 
461
487
  container_template = ContainerTemplate(
462
- container=core_container_template,
463
488
  name=task_name,
464
- volumes=[volume_pair.volume for volume_pair in self.volume_pairs],
489
+ container=core_container_template,
465
490
  inputs=Inputs(parameters=parameters),
466
491
  outputs=outputs,
467
- **template_defaults,
492
+ active_deadline_seconds=self.defaults.active_deadline_seconds,
493
+ node_selector=self.defaults.node_selector,
494
+ parallelism=self.defaults.parallelism,
495
+ retry_strategy=self.defaults.retry_strategy,
496
+ timeout=self.defaults.timeout,
497
+ tolerations=self.defaults.tolerations,
498
+ volumes=[volume_pair.volume for volume_pair in self.volume_pairs],
468
499
  )
469
500
 
470
501
  self._templates.append(container_template)
@@ -475,14 +506,23 @@ class ArgoExecutor(GenericPipelineExecutor):
475
506
  task_name: str,
476
507
  inputs: Optional[Inputs] = None,
477
508
  ) -> ContainerTemplate:
478
- template_defaults = self.argo_workflow.spec.template_defaults.model_dump()
509
+ assert (
510
+ isinstance(node, TaskNode)
511
+ or isinstance(node, StubNode)
512
+ or isinstance(node, SuccessNode)
513
+ )
479
514
 
480
- node_overide = {}
481
- if hasattr(node, "overides"):
482
- node_overide = node.overides
515
+ node_override = None
516
+ if hasattr(node, "overrides"):
517
+ override_key = node.overrides.get(self.service_name, "")
518
+ try:
519
+ node_override = self.overrides.get(override_key)
520
+ except: # noqa
521
+ raise Exception("Override not found for: ", override_key)
483
522
 
484
- # update template defaults with node overrides
485
- template_defaults.update(node_overide)
523
+ effective_settings = self.defaults.model_dump()
524
+ if node_override:
525
+ effective_settings.update(node_override.model_dump())
486
526
 
487
527
  inputs = inputs or Inputs(parameters=[])
488
528
 
@@ -502,8 +542,9 @@ class ArgoExecutor(GenericPipelineExecutor):
502
542
 
503
543
  core_container_template = CoreContainerTemplate(
504
544
  command=shlex.split(command),
505
- image=template_defaults["image"],
506
- image_pull_policy=template_defaults["image_pull_policy"],
545
+ image=effective_settings["image"],
546
+ image_pull_policy=effective_settings["image_pull_policy"],
547
+ resources=effective_settings["resources"],
507
548
  volume_mounts=[
508
549
  volume_pair.volume_mount for volume_pair in self.volume_pairs
509
550
  ],
@@ -516,15 +557,15 @@ class ArgoExecutor(GenericPipelineExecutor):
516
557
  self._set_env_vars_to_task(node, core_container_template)
517
558
 
518
559
  container_template = ContainerTemplate(
519
- container=core_container_template,
520
560
  name=task_name,
561
+ container=core_container_template,
521
562
  inputs=Inputs(
522
563
  parameters=[
523
564
  Parameter(name=param.name) for param in inputs.parameters or []
524
565
  ]
525
566
  ),
526
567
  volumes=[volume_pair.volume for volume_pair in self.volume_pairs],
527
- **template_defaults,
568
+ **node_override.model_dump() if node_override else {},
528
569
  )
529
570
 
530
571
  return container_template
@@ -534,16 +575,22 @@ class ArgoExecutor(GenericPipelineExecutor):
534
575
  ):
535
576
  if not isinstance(working_on, TaskNode):
536
577
  return
578
+
537
579
  global_envs: dict[str, str] = {}
538
580
 
539
- for env_var in self.env:
581
+ for env_var in self.defaults.env:
582
+ env_var = cast(EnvVar, env_var)
540
583
  global_envs[env_var.name] = env_var.value
541
584
 
542
- node_overide = {}
543
- if hasattr(working_on, "overides"):
544
- node_overide = working_on.overides
585
+ override_key = working_on.overrides.get(self.service_name, "")
586
+ node_override = self.overrides.get(override_key, None)
587
+
588
+ # Update the global envs with the node overrides
589
+ if node_override:
590
+ for env_var in node_override.env:
591
+ env_var = cast(EnvVar, env_var)
592
+ global_envs[env_var.name] = env_var.value
545
593
 
546
- global_envs.update(node_overide.get("env", {}))
547
594
  for key, value in global_envs.items():
548
595
  env_var_to_add = EnvVar(name=key, value=value)
549
596
  container_template.env.append(env_var_to_add)
@@ -796,11 +843,6 @@ class ArgoExecutor(GenericPipelineExecutor):
796
843
 
797
844
  argo_workflow_dump = self.argo_workflow.model_dump(
798
845
  by_alias=True,
799
- exclude={
800
- "spec": {
801
- "template_defaults": {"image_pull_policy", "image", "resources"}
802
- }
803
- },
804
846
  exclude_none=True,
805
847
  round_trip=False,
806
848
  )
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "runnable"
3
- version = "0.22.0"
3
+ version = "0.24.0"
4
4
  description = "Add your description here"
5
5
  readme = "README.md"
6
6
  authors = [
@@ -64,8 +64,9 @@ def execute(
64
64
  tag: Annotated[str, typer.Option(help="A tag attached to the run")] = "",
65
65
  run_id: Annotated[
66
66
  str,
67
- typer.Option(
68
- help="An optional run_id, one would be generated if its not provided"
67
+ typer.Argument(
68
+ envvar="RUNNABLE_RUN_ID",
69
+ help="An optional run_id, one would be generated if its not provided",
69
70
  ),
70
71
  ] = "",
71
72
  ):
@@ -282,8 +283,11 @@ def execute_job(
282
283
  ],
283
284
  run_id: Annotated[
284
285
  str,
285
- typer.Argument(help="An run_id, one would be generated if its not provided"),
286
- ],
286
+ typer.Argument(
287
+ envvar="RUNNABLE_RUN_ID",
288
+ help="An optional run_id, one would be generated if its not provided",
289
+ ),
290
+ ] = "",
287
291
  config_file: Annotated[
288
292
  str,
289
293
  typer.Option(
@@ -345,15 +345,9 @@ def execute_single_node(
345
345
  try:
346
346
  executor.execute_node(node=node_to_execute, map_variable=map_variable_dict)
347
347
  finally:
348
- log_file_name = utils.make_log_file_name(
349
- node=node_to_execute,
350
- map_variable=map_variable_dict,
348
+ run_context.executor.add_task_log_to_catalog(
349
+ name=node_to_execute.internal_name, map_variable=map_variable_dict
351
350
  )
352
- task_console.save_text(log_file_name)
353
-
354
- # Put the log file in the catalog
355
- run_context.catalog_handler.put(name=log_file_name, run_id=run_context.run_id)
356
- os.remove(log_file_name)
357
351
 
358
352
  executor.send_return_code()
359
353
 
@@ -408,7 +402,10 @@ def execute_job_yaml_spec(
408
402
  )
409
403
 
410
404
  assert isinstance(executor, BaseJobExecutor)
411
- executor.submit_job(job, catalog_settings=catalog_config)
405
+ try:
406
+ executor.submit_job(job, catalog_settings=catalog_config)
407
+ finally:
408
+ run_context.executor.add_task_log_to_catalog("job")
412
409
 
413
410
  executor.send_return_code()
414
411
 
@@ -483,9 +480,12 @@ def execute_job_non_local(
483
480
  "Executing the job from the user. We are still in the caller's compute environment"
484
481
  )
485
482
 
486
- run_context.executor.execute_job(
487
- run_context.job, catalog_settings=run_context.job_catalog_settings
488
- )
483
+ try:
484
+ run_context.executor.execute_job(
485
+ run_context.job, catalog_settings=run_context.job_catalog_settings
486
+ )
487
+ finally:
488
+ run_context.executor.add_task_log_to_catalog("job")
489
489
 
490
490
  run_context.executor.send_return_code()
491
491
 
@@ -84,6 +84,11 @@ class BaseExecutor(ABC, BaseModel):
84
84
  """
85
85
  ...
86
86
 
87
+ @abstractmethod
88
+ def add_task_log_to_catalog(
89
+ self, name: str, map_variable: Optional[TypeMapVariable] = None
90
+ ): ...
91
+
87
92
 
88
93
  class BaseJobExecutor(BaseExecutor):
89
94
  service_type: str = "job_executor"
@@ -899,7 +899,10 @@ class BaseJob(BaseModel):
899
899
  job = self.get_task()
900
900
  catalog_settings = self.return_catalog_settings()
901
901
 
902
- run_context.executor.submit_job(job, catalog_settings=catalog_settings)
902
+ try:
903
+ run_context.executor.submit_job(job, catalog_settings=catalog_settings)
904
+ finally:
905
+ run_context.executor.add_task_log_to_catalog("job")
903
906
 
904
907
  logger.info(
905
908
  "Executing the job from the user. We are still in the caller's compute environment"
@@ -77,7 +77,11 @@ def generate_run_id(run_id: str = "") -> str:
77
77
  Returns:
78
78
  str: A generated run_id
79
79
  """
80
- # If we are not provided with a run_id, generate one
80
+ # If we are not provided with a run_id, check env var
81
+ if not run_id:
82
+ run_id = os.environ.get(defaults.ENV_RUN_ID, "")
83
+
84
+ # If both are not given, generate one
81
85
  if not run_id:
82
86
  now = datetime.now()
83
87
  run_id = f"{names.get_random_name()}-{now.hour:02}{now.minute:02}"
@@ -628,9 +632,9 @@ def gather_variables() -> Dict[str, str]:
628
632
  return variables
629
633
 
630
634
 
631
- def make_log_file_name(node: BaseNode, map_variable: TypeMapVariable) -> str:
635
+ def make_log_file_name(name: str, map_variable: TypeMapVariable) -> str:
632
636
  random_tag = "".join(random.choices(string.ascii_uppercase + string.digits, k=3))
633
- log_file_name = node.name
637
+ log_file_name = name
634
638
 
635
639
  if map_variable:
636
640
  for _, value in map_variable.items():
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes