runnable 0.21.0__tar.gz → 0.23.0__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (57) hide show
  1. {runnable-0.21.0 → runnable-0.23.0}/PKG-INFO +1 -1
  2. {runnable-0.21.0 → runnable-0.23.0}/extensions/job_executor/__init__.py +15 -1
  3. {runnable-0.21.0 → runnable-0.23.0}/extensions/pipeline_executor/__init__.py +19 -8
  4. {runnable-0.21.0 → runnable-0.23.0}/extensions/pipeline_executor/argo.py +107 -39
  5. {runnable-0.21.0 → runnable-0.23.0}/pyproject.toml +1 -1
  6. {runnable-0.21.0 → runnable-0.23.0}/runnable/entrypoints.py +12 -12
  7. {runnable-0.21.0 → runnable-0.23.0}/runnable/executor.py +5 -0
  8. {runnable-0.21.0 → runnable-0.23.0}/runnable/sdk.py +4 -1
  9. {runnable-0.21.0 → runnable-0.23.0}/runnable/utils.py +12 -4
  10. {runnable-0.21.0 → runnable-0.23.0}/.gitignore +0 -0
  11. {runnable-0.21.0 → runnable-0.23.0}/LICENSE +0 -0
  12. {runnable-0.21.0 → runnable-0.23.0}/README.md +0 -0
  13. {runnable-0.21.0 → runnable-0.23.0}/extensions/README.md +0 -0
  14. {runnable-0.21.0 → runnable-0.23.0}/extensions/__init__.py +0 -0
  15. {runnable-0.21.0 → runnable-0.23.0}/extensions/catalog/README.md +0 -0
  16. {runnable-0.21.0 → runnable-0.23.0}/extensions/catalog/file_system.py +0 -0
  17. {runnable-0.21.0 → runnable-0.23.0}/extensions/catalog/pyproject.toml +0 -0
  18. {runnable-0.21.0 → runnable-0.23.0}/extensions/job_executor/README.md +0 -0
  19. {runnable-0.21.0 → runnable-0.23.0}/extensions/job_executor/k8s.py +0 -0
  20. {runnable-0.21.0 → runnable-0.23.0}/extensions/job_executor/k8s_job_spec.yaml +0 -0
  21. {runnable-0.21.0 → runnable-0.23.0}/extensions/job_executor/local.py +0 -0
  22. {runnable-0.21.0 → runnable-0.23.0}/extensions/job_executor/local_container.py +0 -0
  23. {runnable-0.21.0 → runnable-0.23.0}/extensions/job_executor/pyproject.toml +0 -0
  24. {runnable-0.21.0 → runnable-0.23.0}/extensions/nodes/README.md +0 -0
  25. {runnable-0.21.0 → runnable-0.23.0}/extensions/nodes/nodes.py +0 -0
  26. {runnable-0.21.0 → runnable-0.23.0}/extensions/nodes/pyproject.toml +0 -0
  27. {runnable-0.21.0 → runnable-0.23.0}/extensions/pipeline_executor/README.md +0 -0
  28. {runnable-0.21.0 → runnable-0.23.0}/extensions/pipeline_executor/local.py +0 -0
  29. {runnable-0.21.0 → runnable-0.23.0}/extensions/pipeline_executor/local_container.py +0 -0
  30. {runnable-0.21.0 → runnable-0.23.0}/extensions/pipeline_executor/mocked.py +0 -0
  31. {runnable-0.21.0 → runnable-0.23.0}/extensions/pipeline_executor/pyproject.toml +0 -0
  32. {runnable-0.21.0 → runnable-0.23.0}/extensions/pipeline_executor/retry.py +0 -0
  33. {runnable-0.21.0 → runnable-0.23.0}/extensions/run_log_store/README.md +0 -0
  34. {runnable-0.21.0 → runnable-0.23.0}/extensions/run_log_store/__init__.py +0 -0
  35. {runnable-0.21.0 → runnable-0.23.0}/extensions/run_log_store/chunked_fs.py +0 -0
  36. {runnable-0.21.0 → runnable-0.23.0}/extensions/run_log_store/db/implementation_FF.py +0 -0
  37. {runnable-0.21.0 → runnable-0.23.0}/extensions/run_log_store/db/integration_FF.py +0 -0
  38. {runnable-0.21.0 → runnable-0.23.0}/extensions/run_log_store/file_system.py +0 -0
  39. {runnable-0.21.0 → runnable-0.23.0}/extensions/run_log_store/generic_chunked.py +0 -0
  40. {runnable-0.21.0 → runnable-0.23.0}/extensions/run_log_store/pyproject.toml +0 -0
  41. {runnable-0.21.0 → runnable-0.23.0}/extensions/secrets/README.md +0 -0
  42. {runnable-0.21.0 → runnable-0.23.0}/extensions/secrets/dotenv.py +0 -0
  43. {runnable-0.21.0 → runnable-0.23.0}/extensions/secrets/pyproject.toml +0 -0
  44. {runnable-0.21.0 → runnable-0.23.0}/runnable/__init__.py +0 -0
  45. {runnable-0.21.0 → runnable-0.23.0}/runnable/catalog.py +0 -0
  46. {runnable-0.21.0 → runnable-0.23.0}/runnable/cli.py +0 -0
  47. {runnable-0.21.0 → runnable-0.23.0}/runnable/context.py +0 -0
  48. {runnable-0.21.0 → runnable-0.23.0}/runnable/datastore.py +0 -0
  49. {runnable-0.21.0 → runnable-0.23.0}/runnable/defaults.py +0 -0
  50. {runnable-0.21.0 → runnable-0.23.0}/runnable/exceptions.py +0 -0
  51. {runnable-0.21.0 → runnable-0.23.0}/runnable/graph.py +0 -0
  52. {runnable-0.21.0 → runnable-0.23.0}/runnable/names.py +0 -0
  53. {runnable-0.21.0 → runnable-0.23.0}/runnable/nodes.py +0 -0
  54. {runnable-0.21.0 → runnable-0.23.0}/runnable/parameters.py +0 -0
  55. {runnable-0.21.0 → runnable-0.23.0}/runnable/pickler.py +0 -0
  56. {runnable-0.21.0 → runnable-0.23.0}/runnable/secrets.py +0 -0
  57. {runnable-0.21.0 → runnable-0.23.0}/runnable/tasks.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: runnable
3
- Version: 0.21.0
3
+ Version: 0.23.0
4
4
  Summary: Add your description here
5
5
  Author-email: "Vammi, Vijay" <vijay.vammi@astrazeneca.com>
6
6
  License-File: LICENSE
@@ -2,7 +2,7 @@ import logging
2
2
  import os
3
3
  from typing import Dict, List, Optional
4
4
 
5
- from runnable import context, defaults, exceptions, parameters, utils
5
+ from runnable import context, defaults, exceptions, parameters, task_console, utils
6
6
  from runnable.datastore import DataCatalog, JobLog, JsonParameter
7
7
  from runnable.executor import BaseJobExecutor
8
8
 
@@ -158,3 +158,17 @@ class GenericJobExecutor(BaseJobExecutor):
158
158
  data_catalogs.extend(data_catalog)
159
159
 
160
160
  return data_catalogs
161
+
162
+ def add_task_log_to_catalog(
163
+ self, name: str, map_variable: Dict[str, str | int | float] | None = None
164
+ ):
165
+ log_file_name = utils.make_log_file_name(
166
+ name=name,
167
+ map_variable=map_variable,
168
+ )
169
+ task_console.save_text(log_file_name)
170
+ # Put the log file in the catalog
171
+ self._context.catalog_handler.put(
172
+ name=log_file_name, run_id=self._context.run_id
173
+ )
174
+ os.remove(log_file_name)
@@ -210,6 +210,20 @@ class GenericPipelineExecutor(BasePipelineExecutor):
210
210
  """
211
211
  return int(os.environ.get(defaults.ATTEMPT_NUMBER, 1))
212
212
 
213
+ def add_task_log_to_catalog(
214
+ self, name: str, map_variable: Dict[str, str | int | float] | None = None
215
+ ):
216
+ log_file_name = utils.make_log_file_name(
217
+ name=name,
218
+ map_variable=map_variable,
219
+ )
220
+ task_console.save_text(log_file_name)
221
+ # Put the log file in the catalog
222
+ self._context.catalog_handler.put(
223
+ name=log_file_name, run_id=self._context.run_id
224
+ )
225
+ os.remove(log_file_name)
226
+
213
227
  def _execute_node(
214
228
  self,
215
229
  node: BaseNode,
@@ -336,14 +350,6 @@ class GenericPipelineExecutor(BasePipelineExecutor):
336
350
  )
337
351
  self.trigger_node_execution(node=node, map_variable=map_variable, **kwargs)
338
352
 
339
- log_file_name = utils.make_log_file_name(node=node, map_variable=map_variable)
340
- task_console.save_text(log_file_name, clear=True)
341
-
342
- self._context.catalog_handler.put(
343
- name=log_file_name, run_id=self._context.run_id
344
- )
345
- os.remove(log_file_name)
346
-
347
353
  def trigger_node_execution(
348
354
  self, node: BaseNode, map_variable: TypeMapVariable = None, **kwargs
349
355
  ):
@@ -482,6 +488,11 @@ class GenericPipelineExecutor(BasePipelineExecutor):
482
488
  console.print(e, style=defaults.error_style)
483
489
  logger.exception(e)
484
490
  raise
491
+ finally:
492
+ # Add task log to the catalog
493
+ self.add_task_log_to_catalog(
494
+ name=working_on.internal_name, map_variable=map_variable
495
+ )
485
496
 
486
497
  console.rule(style="[dark orange]")
487
498
 
@@ -6,7 +6,7 @@ import string
6
6
  from collections import namedtuple
7
7
  from enum import Enum
8
8
  from functools import cached_property
9
- from typing import Annotated, Literal, Optional
9
+ from typing import Annotated, Any, Literal, Optional, cast
10
10
 
11
11
  from pydantic import (
12
12
  BaseModel,
@@ -20,7 +20,13 @@ from pydantic import (
20
20
  from pydantic.alias_generators import to_camel
21
21
  from ruamel.yaml import YAML
22
22
 
23
- from extensions.nodes.nodes import MapNode, ParallelNode, TaskNode
23
+ from extensions.nodes.nodes import (
24
+ MapNode,
25
+ ParallelNode,
26
+ StubNode,
27
+ SuccessNode,
28
+ TaskNode,
29
+ )
24
30
  from extensions.pipeline_executor import GenericPipelineExecutor
25
31
  from runnable import defaults, utils
26
32
  from runnable.defaults import TypeMapVariable
@@ -229,9 +235,8 @@ class Resources(BaseModel):
229
235
  requests: Request = Field(default=Request(), serialization_alias="requests")
230
236
 
231
237
 
232
- # This is what the user can override per template
233
- # Some are specific to container and some are specific to dag
234
- class TemplateDefaults(BaseModelWIthConfig):
238
+ # Lets construct this from UserDefaults
239
+ class ArgoTemplateDefaults(BaseModelWIthConfig):
235
240
  active_deadline_seconds: Optional[int] = Field(default=86400) # 1 day
236
241
  fail_fast: bool = Field(default=True)
237
242
  node_selector: dict[str, str] = Field(default_factory=dict)
@@ -240,10 +245,32 @@ class TemplateDefaults(BaseModelWIthConfig):
240
245
  timeout: Optional[str] = Field(default=None)
241
246
  tolerations: Optional[list[Toleration]] = Field(default=None)
242
247
 
243
- # These are in addition to what argo spec provides
244
- image: str
245
- image_pull_policy: Optional[ImagePullPolicy] = Field(default=ImagePullPolicy.Always)
248
+ model_config = ConfigDict(
249
+ extra="ignore",
250
+ )
251
+
252
+
253
+ class CommonDefaults(BaseModelWIthConfig):
254
+ active_deadline_seconds: Optional[int] = Field(default=86400) # 1 day
255
+ fail_fast: bool = Field(default=True)
256
+ node_selector: dict[str, str] = Field(default_factory=dict)
257
+ parallelism: Optional[int] = Field(default=None)
258
+ retry_strategy: Optional[RetryStrategy] = Field(default=None)
259
+ timeout: Optional[str] = Field(default=None)
260
+ tolerations: Optional[list[Toleration]] = Field(default=None)
261
+ image_pull_policy: ImagePullPolicy = Field(default=ImagePullPolicy.Always)
246
262
  resources: Resources = Field(default_factory=Resources)
263
+ env: list[EnvVar | SecretEnvVar] = Field(default_factory=list, exclude=True)
264
+
265
+
266
+ # The user provided defaults at the top level
267
+ class UserDefaults(CommonDefaults):
268
+ image: str
269
+
270
+
271
+ # Overrides need not have image
272
+ class Overrides(CommonDefaults):
273
+ image: Optional[str] = Field(default=None)
247
274
 
248
275
 
249
276
  # User provides this as part of the argoSpec
@@ -257,8 +284,8 @@ class ArgoWorkflowSpec(BaseModelWIthConfig):
257
284
  pod_gc: Optional[PodGC] = Field(default=None, serialization_alias="podGC")
258
285
  retry_strategy: Optional[RetryStrategy] = Field(default=None)
259
286
  service_account_name: Optional[str] = Field(default=None)
260
- template_defaults: TemplateDefaults
261
287
  tolerations: Optional[list[Toleration]] = Field(default=None)
288
+ template_defaults: Optional[ArgoTemplateDefaults] = Field(default=None)
262
289
 
263
290
 
264
291
  class ArgoMetadata(BaseModelWIthConfig):
@@ -320,7 +347,6 @@ class ContainerTemplate((BaseModelWIthConfig)):
320
347
  inputs: Optional[Inputs] = Field(default=None)
321
348
  outputs: Optional[Outputs] = Field(default=None)
322
349
 
323
- # The remaining can be from template defaults or node overrides
324
350
  active_deadline_seconds: Optional[int] = Field(default=86400) # 1 day
325
351
  metadata: Optional[PodMetaData] = Field(default=None)
326
352
  node_selector: dict[str, str] = Field(default_factory=dict)
@@ -355,13 +381,7 @@ class ArgoExecutor(GenericPipelineExecutor):
355
381
  from_attributes=True,
356
382
  use_enum_values=True,
357
383
  )
358
-
359
- argo_workflow: ArgoWorkflow
360
-
361
- # Lets use a generic one
362
384
  pvc_for_runnable: Optional[str] = Field(default=None)
363
- # pvc_for_catalog: Optional[str] = Field(default=None)
364
- # pvc_for_run_log: Optional[str] = Field(default=None)
365
385
  custom_volumes: Optional[list[CustomVolume]] = Field(
366
386
  default_factory=list[CustomVolume]
367
387
  )
@@ -373,6 +393,11 @@ class ArgoExecutor(GenericPipelineExecutor):
373
393
  default="INFO"
374
394
  )
375
395
 
396
+ defaults: UserDefaults
397
+ argo_workflow: ArgoWorkflow
398
+
399
+ overrides: dict[str, Overrides] = Field(default_factory=dict)
400
+
376
401
  # This should be used when we refer to run_id or log_level in the containers
377
402
  _run_id_as_parameter: str = PrivateAttr(default="{{workflow.parameters.run_id}}")
378
403
  _log_level_as_parameter: str = PrivateAttr(
@@ -386,6 +411,11 @@ class ArgoExecutor(GenericPipelineExecutor):
386
411
  _container_catalog_location: str = PrivateAttr(default="/tmp/catalog/")
387
412
  _added_initial_container: bool = PrivateAttr(default=False)
388
413
 
414
+ def model_post_init(self, __context: Any) -> None:
415
+ self.argo_workflow.spec.template_defaults = ArgoTemplateDefaults(
416
+ **self.defaults.model_dump()
417
+ )
418
+
389
419
  def sanitize_name(self, name: str) -> str:
390
420
  formatted_name = name.replace(" ", "-").replace(".", "-").replace("_", "-")
391
421
  tag = "".join(random.choices(string.ascii_lowercase + string.digits, k=6))
@@ -423,8 +453,6 @@ class ArgoExecutor(GenericPipelineExecutor):
423
453
  parameters: Optional[list[Parameter]],
424
454
  task_name: str,
425
455
  ):
426
- template_defaults = self.argo_workflow.spec.template_defaults.model_dump()
427
-
428
456
  map_variable: TypeMapVariable = {}
429
457
  for parameter in parameters or []:
430
458
  map_variable[parameter.name] = ( # type: ignore
@@ -440,8 +468,8 @@ class ArgoExecutor(GenericPipelineExecutor):
440
468
 
441
469
  core_container_template = CoreContainerTemplate(
442
470
  command=shlex.split(fan_command),
443
- image=template_defaults["image"],
444
- image_pull_policy=template_defaults["image_pull_policy"],
471
+ image=self.defaults.image,
472
+ image_pull_policy=self.defaults.image_pull_policy,
445
473
  volume_mounts=[
446
474
  volume_pair.volume_mount for volume_pair in self.volume_pairs
447
475
  ],
@@ -457,12 +485,17 @@ class ArgoExecutor(GenericPipelineExecutor):
457
485
  outputs = Outputs(parameters=[OutputParameter(name="iterate-on")])
458
486
 
459
487
  container_template = ContainerTemplate(
460
- container=core_container_template,
461
488
  name=task_name,
462
- volumes=[volume_pair.volume for volume_pair in self.volume_pairs],
489
+ container=core_container_template,
463
490
  inputs=Inputs(parameters=parameters),
464
491
  outputs=outputs,
465
- **template_defaults,
492
+ active_deadline_seconds=self.defaults.active_deadline_seconds,
493
+ node_selector=self.defaults.node_selector,
494
+ parallelism=self.defaults.parallelism,
495
+ retry_strategy=self.defaults.retry_strategy,
496
+ timeout=self.defaults.timeout,
497
+ tolerations=self.defaults.tolerations,
498
+ volumes=[volume_pair.volume for volume_pair in self.volume_pairs],
466
499
  )
467
500
 
468
501
  self._templates.append(container_template)
@@ -473,14 +506,23 @@ class ArgoExecutor(GenericPipelineExecutor):
473
506
  task_name: str,
474
507
  inputs: Optional[Inputs] = None,
475
508
  ) -> ContainerTemplate:
476
- template_defaults = self.argo_workflow.spec.template_defaults.model_dump()
509
+ assert (
510
+ isinstance(node, TaskNode)
511
+ or isinstance(node, StubNode)
512
+ or isinstance(node, SuccessNode)
513
+ )
477
514
 
478
- node_overide = {}
479
- if hasattr(node, "overides"):
480
- node_overide = node.overides
515
+ node_override = None
516
+ if hasattr(node, "overrides"):
517
+ override_key = node.overrides.get(self.service_name, "")
518
+ try:
519
+ node_override = self.overrides.get(override_key)
520
+ except: # noqa
521
+ raise Exception("Override not found for: ", override_key)
481
522
 
482
- # update template defaults with node overrides
483
- template_defaults.update(node_overide)
523
+ effective_settings = self.defaults.model_dump()
524
+ if node_override:
525
+ effective_settings.update(node_override.model_dump())
484
526
 
485
527
  inputs = inputs or Inputs(parameters=[])
486
528
 
@@ -500,35 +542,66 @@ class ArgoExecutor(GenericPipelineExecutor):
500
542
 
501
543
  core_container_template = CoreContainerTemplate(
502
544
  command=shlex.split(command),
503
- image=template_defaults["image"],
504
- image_pull_policy=template_defaults["image_pull_policy"],
545
+ image=effective_settings["image"],
546
+ image_pull_policy=effective_settings["image_pull_policy"],
547
+ resources=effective_settings["resources"],
505
548
  volume_mounts=[
506
549
  volume_pair.volume_mount for volume_pair in self.volume_pairs
507
550
  ],
508
551
  )
509
552
 
510
553
  self._set_up_initial_container(container_template=core_container_template)
554
+ self._expose_secrets_to_task(
555
+ working_on=node, container_template=core_container_template
556
+ )
557
+ self._set_env_vars_to_task(node, core_container_template)
511
558
 
512
559
  container_template = ContainerTemplate(
513
- container=core_container_template,
514
560
  name=task_name,
561
+ container=core_container_template,
515
562
  inputs=Inputs(
516
563
  parameters=[
517
564
  Parameter(name=param.name) for param in inputs.parameters or []
518
565
  ]
519
566
  ),
520
567
  volumes=[volume_pair.volume for volume_pair in self.volume_pairs],
521
- **template_defaults,
568
+ **node_override.model_dump() if node_override else {},
522
569
  )
523
570
 
524
571
  return container_template
525
572
 
573
+ def _set_env_vars_to_task(
574
+ self, working_on: BaseNode, container_template: CoreContainerTemplate
575
+ ):
576
+ if not isinstance(working_on, TaskNode):
577
+ return
578
+
579
+ global_envs: dict[str, str] = {}
580
+
581
+ for env_var in self.defaults.env:
582
+ env_var = cast(EnvVar, env_var)
583
+ global_envs[env_var.name] = env_var.value
584
+
585
+ override_key = working_on.overrides.get(self.service_name, "")
586
+ node_override = self.overrides.get(override_key, None)
587
+
588
+ # Update the global envs with the node overrides
589
+ if node_override:
590
+ for env_var in node_override.env:
591
+ env_var = cast(EnvVar, env_var)
592
+ global_envs[env_var.name] = env_var.value
593
+
594
+ for key, value in global_envs.items():
595
+ env_var_to_add = EnvVar(name=key, value=value)
596
+ container_template.env.append(env_var_to_add)
597
+
526
598
  def _expose_secrets_to_task(
527
599
  self,
528
600
  working_on: BaseNode,
529
601
  container_template: CoreContainerTemplate,
530
602
  ):
531
- assert isinstance(working_on, TaskNode)
603
+ if not isinstance(working_on, TaskNode):
604
+ return
532
605
  secrets = working_on.executable.secrets
533
606
  for secret in secrets:
534
607
  assert self.secret_from_k8s is not None
@@ -770,11 +843,6 @@ class ArgoExecutor(GenericPipelineExecutor):
770
843
 
771
844
  argo_workflow_dump = self.argo_workflow.model_dump(
772
845
  by_alias=True,
773
- exclude={
774
- "spec": {
775
- "template_defaults": {"image_pull_policy", "image", "resources"}
776
- }
777
- },
778
846
  exclude_none=True,
779
847
  round_trip=False,
780
848
  )
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "runnable"
3
- version = "0.21.0"
3
+ version = "0.23.0"
4
4
  description = "Add your description here"
5
5
  readme = "README.md"
6
6
  authors = [
@@ -345,15 +345,9 @@ def execute_single_node(
345
345
  try:
346
346
  executor.execute_node(node=node_to_execute, map_variable=map_variable_dict)
347
347
  finally:
348
- log_file_name = utils.make_log_file_name(
349
- node=node_to_execute,
350
- map_variable=map_variable_dict,
348
+ run_context.executor.add_task_log_to_catalog(
349
+ name=node_to_execute.internal_name, map_variable=map_variable_dict
351
350
  )
352
- task_console.save_text(log_file_name)
353
-
354
- # Put the log file in the catalog
355
- run_context.catalog_handler.put(name=log_file_name, run_id=run_context.run_id)
356
- os.remove(log_file_name)
357
351
 
358
352
  executor.send_return_code()
359
353
 
@@ -408,7 +402,10 @@ def execute_job_yaml_spec(
408
402
  )
409
403
 
410
404
  assert isinstance(executor, BaseJobExecutor)
411
- executor.submit_job(job, catalog_settings=catalog_config)
405
+ try:
406
+ executor.submit_job(job, catalog_settings=catalog_config)
407
+ finally:
408
+ run_context.executor.add_task_log_to_catalog("job")
412
409
 
413
410
  executor.send_return_code()
414
411
 
@@ -483,9 +480,12 @@ def execute_job_non_local(
483
480
  "Executing the job from the user. We are still in the caller's compute environment"
484
481
  )
485
482
 
486
- run_context.executor.execute_job(
487
- run_context.job, catalog_settings=run_context.job_catalog_settings
488
- )
483
+ try:
484
+ run_context.executor.execute_job(
485
+ run_context.job, catalog_settings=run_context.job_catalog_settings
486
+ )
487
+ finally:
488
+ run_context.executor.add_task_log_to_catalog("job")
489
489
 
490
490
  run_context.executor.send_return_code()
491
491
 
@@ -84,6 +84,11 @@ class BaseExecutor(ABC, BaseModel):
84
84
  """
85
85
  ...
86
86
 
87
+ @abstractmethod
88
+ def add_task_log_to_catalog(
89
+ self, name: str, map_variable: Optional[TypeMapVariable] = None
90
+ ): ...
91
+
87
92
 
88
93
  class BaseJobExecutor(BaseExecutor):
89
94
  service_type: str = "job_executor"
@@ -899,7 +899,10 @@ class BaseJob(BaseModel):
899
899
  job = self.get_task()
900
900
  catalog_settings = self.return_catalog_settings()
901
901
 
902
- run_context.executor.submit_job(job, catalog_settings=catalog_settings)
902
+ try:
903
+ run_context.executor.submit_job(job, catalog_settings=catalog_settings)
904
+ finally:
905
+ run_context.executor.add_task_log_to_catalog("job")
903
906
 
904
907
  logger.info(
905
908
  "Executing the job from the user. We are still in the caller's compute environment"
@@ -109,8 +109,16 @@ def apply_variables(
109
109
  raise Exception("Argument Variables should be dict")
110
110
 
111
111
  json_d = json.dumps(apply_to)
112
- transformed = str_template(json_d).substitute(**variables)
113
- return json.loads(transformed)
112
+ string_template = str_template(json_d)
113
+
114
+ template = string_template.safe_substitute(variables)
115
+
116
+ if "$" in template:
117
+ logger.warning(
118
+ "Not all variables found in the config are found in the variables"
119
+ )
120
+
121
+ return json.loads(template)
114
122
 
115
123
 
116
124
  def get_module_and_attr_names(command: str) -> Tuple[str, str]:
@@ -620,9 +628,9 @@ def gather_variables() -> Dict[str, str]:
620
628
  return variables
621
629
 
622
630
 
623
- def make_log_file_name(node: BaseNode, map_variable: TypeMapVariable) -> str:
631
+ def make_log_file_name(name: str, map_variable: TypeMapVariable) -> str:
624
632
  random_tag = "".join(random.choices(string.ascii_uppercase + string.digits, k=3))
625
- log_file_name = node.name
633
+ log_file_name = name
626
634
 
627
635
  if map_variable:
628
636
  for _, value in map_variable.items():
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes