runnable 0.18.0__tar.gz → 0.19.1__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (58) hide show
  1. {runnable-0.18.0 → runnable-0.19.1}/PKG-INFO +1 -1
  2. {runnable-0.18.0 → runnable-0.19.1}/extensions/job_executor/k8s.py +150 -26
  3. {runnable-0.18.0 → runnable-0.19.1}/extensions/job_executor/local.py +1 -0
  4. {runnable-0.18.0 → runnable-0.19.1}/extensions/job_executor/local_container.py +1 -0
  5. {runnable-0.18.0 → runnable-0.19.1}/extensions/pipeline_executor/local_container.py +0 -1
  6. {runnable-0.18.0 → runnable-0.19.1}/pyproject.toml +2 -1
  7. {runnable-0.18.0 → runnable-0.19.1}/runnable/__init__.py +1 -0
  8. {runnable-0.18.0 → runnable-0.19.1}/runnable/catalog.py +1 -2
  9. {runnable-0.18.0 → runnable-0.19.1}/runnable/entrypoints.py +1 -5
  10. {runnable-0.18.0 → runnable-0.19.1}/runnable/executor.py +1 -1
  11. {runnable-0.18.0 → runnable-0.19.1}/runnable/parameters.py +0 -9
  12. {runnable-0.18.0 → runnable-0.19.1}/runnable/utils.py +5 -25
  13. {runnable-0.18.0 → runnable-0.19.1}/.gitignore +0 -0
  14. {runnable-0.18.0 → runnable-0.19.1}/LICENSE +0 -0
  15. {runnable-0.18.0 → runnable-0.19.1}/README.md +0 -0
  16. {runnable-0.18.0 → runnable-0.19.1}/extensions/README.md +0 -0
  17. {runnable-0.18.0 → runnable-0.19.1}/extensions/__init__.py +0 -0
  18. {runnable-0.18.0 → runnable-0.19.1}/extensions/catalog/README.md +0 -0
  19. {runnable-0.18.0 → runnable-0.19.1}/extensions/catalog/file_system.py +0 -0
  20. {runnable-0.18.0 → runnable-0.19.1}/extensions/catalog/pyproject.toml +0 -0
  21. {runnable-0.18.0 → runnable-0.19.1}/extensions/job_executor/README.md +0 -0
  22. {runnable-0.18.0 → runnable-0.19.1}/extensions/job_executor/__init__.py +0 -0
  23. {runnable-0.18.0 → runnable-0.19.1}/extensions/job_executor/k8s_job_spec.yaml +0 -0
  24. {runnable-0.18.0 → runnable-0.19.1}/extensions/job_executor/pyproject.toml +0 -0
  25. {runnable-0.18.0 → runnable-0.19.1}/extensions/nodes/README.md +0 -0
  26. {runnable-0.18.0 → runnable-0.19.1}/extensions/nodes/nodes.py +0 -0
  27. {runnable-0.18.0 → runnable-0.19.1}/extensions/nodes/pyproject.toml +0 -0
  28. {runnable-0.18.0 → runnable-0.19.1}/extensions/pipeline_executor/README.md +0 -0
  29. {runnable-0.18.0 → runnable-0.19.1}/extensions/pipeline_executor/__init__.py +0 -0
  30. {runnable-0.18.0 → runnable-0.19.1}/extensions/pipeline_executor/argo.py +0 -0
  31. {runnable-0.18.0 → runnable-0.19.1}/extensions/pipeline_executor/argo_specification.yaml +0 -0
  32. {runnable-0.18.0 → runnable-0.19.1}/extensions/pipeline_executor/local.py +0 -0
  33. {runnable-0.18.0 → runnable-0.19.1}/extensions/pipeline_executor/mocked.py +0 -0
  34. {runnable-0.18.0 → runnable-0.19.1}/extensions/pipeline_executor/pyproject.toml +0 -0
  35. {runnable-0.18.0 → runnable-0.19.1}/extensions/pipeline_executor/retry.py +0 -0
  36. {runnable-0.18.0 → runnable-0.19.1}/extensions/run_log_store/README.md +0 -0
  37. {runnable-0.18.0 → runnable-0.19.1}/extensions/run_log_store/__init__.py +0 -0
  38. {runnable-0.18.0 → runnable-0.19.1}/extensions/run_log_store/chunked_fs.py +0 -0
  39. {runnable-0.18.0 → runnable-0.19.1}/extensions/run_log_store/db/implementation_FF.py +0 -0
  40. {runnable-0.18.0 → runnable-0.19.1}/extensions/run_log_store/db/integration_FF.py +0 -0
  41. {runnable-0.18.0 → runnable-0.19.1}/extensions/run_log_store/file_system.py +0 -0
  42. {runnable-0.18.0 → runnable-0.19.1}/extensions/run_log_store/generic_chunked.py +0 -0
  43. {runnable-0.18.0 → runnable-0.19.1}/extensions/run_log_store/pyproject.toml +0 -0
  44. {runnable-0.18.0 → runnable-0.19.1}/extensions/secrets/README.md +0 -0
  45. {runnable-0.18.0 → runnable-0.19.1}/extensions/secrets/dotenv.py +0 -0
  46. {runnable-0.18.0 → runnable-0.19.1}/extensions/secrets/pyproject.toml +0 -0
  47. {runnable-0.18.0 → runnable-0.19.1}/runnable/cli.py +0 -0
  48. {runnable-0.18.0 → runnable-0.19.1}/runnable/context.py +0 -0
  49. {runnable-0.18.0 → runnable-0.19.1}/runnable/datastore.py +0 -0
  50. {runnable-0.18.0 → runnable-0.19.1}/runnable/defaults.py +0 -0
  51. {runnable-0.18.0 → runnable-0.19.1}/runnable/exceptions.py +0 -0
  52. {runnable-0.18.0 → runnable-0.19.1}/runnable/graph.py +0 -0
  53. {runnable-0.18.0 → runnable-0.19.1}/runnable/names.py +0 -0
  54. {runnable-0.18.0 → runnable-0.19.1}/runnable/nodes.py +0 -0
  55. {runnable-0.18.0 → runnable-0.19.1}/runnable/pickler.py +0 -0
  56. {runnable-0.18.0 → runnable-0.19.1}/runnable/sdk.py +0 -0
  57. {runnable-0.18.0 → runnable-0.19.1}/runnable/secrets.py +0 -0
  58. {runnable-0.18.0 → runnable-0.19.1}/runnable/tasks.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: runnable
3
- Version: 0.18.0
3
+ Version: 0.19.1
4
4
  Summary: Add your description here
5
5
  Author-email: "Vammi, Vijay" <vijay.vammi@astrazeneca.com>
6
6
  License-File: LICENSE
@@ -101,23 +101,40 @@ class HostPath(BaseModel):
101
101
  path: str
102
102
 
103
103
 
104
- class Volume(BaseModel):
104
+ class HostPathVolume(BaseModel):
105
105
  name: str
106
106
  host_path: HostPath
107
107
 
108
108
 
109
- class TemplateSpec(BaseModel):
109
+ class PVCClaim(BaseModel):
110
+ claim_name: str
111
+
112
+ model_config = ConfigDict(
113
+ alias_generator=to_camel,
114
+ populate_by_name=True,
115
+ from_attributes=True,
116
+ )
117
+
118
+
119
+ class PVCVolume(BaseModel):
120
+ name: str
121
+ persistent_volume_claim: PVCClaim
122
+
123
+
124
+ class K8sTemplateSpec(BaseModel):
110
125
  active_deadline_seconds: int = Field(default=60 * 60 * 2) # 2 hours
111
126
  node_selector: Optional[dict[str, str]] = None
112
127
  tolerations: Optional[list[dict[str, str]]] = None
113
- volumes: Optional[list[Volume]] = Field(default_factory=lambda: [])
128
+ volumes: Optional[list[HostPathVolume | PVCVolume]] = Field(
129
+ default_factory=lambda: []
130
+ )
114
131
  service_account_name: Optional[str] = "default"
115
132
  restart_policy: RestartPolicy = RestartPolicy.NEVER
116
133
  container: Container
117
134
 
118
135
 
119
- class Template(BaseModel):
120
- spec: TemplateSpec
136
+ class K8sTemplate(BaseModel):
137
+ spec: K8sTemplateSpec
121
138
  metadata: Optional[ObjectMetaData] = None
122
139
 
123
140
 
@@ -125,32 +142,25 @@ class Spec(BaseModel):
125
142
  active_deadline_seconds: Optional[int] = Field(default=60 * 60 * 2) # 2 hours
126
143
  backoff_limit: int = 6
127
144
  selector: Optional[LabelSelector] = None
128
- template: Template
145
+ template: K8sTemplate
129
146
  ttl_seconds_after_finished: Optional[int] = Field(default=60 * 60 * 24) # 24 hours
130
147
 
131
148
 
132
- class K8sJobExecutor(GenericJobExecutor):
149
+ class GenericK8sJobExecutor(GenericJobExecutor):
133
150
  service_name: str = "k8s-job"
134
151
  config_path: Optional[str] = None
135
152
  job_spec: Spec
136
153
  mock: bool = False
137
-
138
- # The location the mount of .run_log_store is mounted to in minikube
139
- # ensure that minikube mount $HOME/workspace/runnable/.run_log_store:/volume/run_logs is executed first
140
- # $HOME/workspace/runnable/.catalog:/volume/catalog
141
- # Ensure that the docker build is done with eval $(minikube docker-env)
142
- mini_k8s_run_log_location: str = Field(default="/volume/run_logs/")
143
- mini_k8s_catalog_location: str = Field(default="/volume/catalog/")
154
+ namespace: str = Field(default="default")
144
155
 
145
156
  _is_local: bool = PrivateAttr(default=False)
157
+ _volume_mounts: list[VolumeMount] = PrivateAttr(default_factory=lambda: [])
158
+ _volumes: list[HostPathVolume | PVCVolume] = PrivateAttr(default_factory=lambda: [])
146
159
 
147
160
  _container_log_location: str = PrivateAttr(default="/tmp/run_logs/")
148
161
  _container_catalog_location: str = PrivateAttr(default="/tmp/catalog/")
149
162
  _container_secrets_location: str = PrivateAttr(default="/tmp/dotenv")
150
163
 
151
- _volumes: list[Volume] = []
152
- _volume_mounts: list[VolumeMount] = []
153
-
154
164
  model_config = ConfigDict(
155
165
  alias_generator=to_camel,
156
166
  populate_by_name=True,
@@ -180,6 +190,7 @@ class K8sJobExecutor(GenericJobExecutor):
180
190
  self._use_volumes()
181
191
 
182
192
  job_log = self._context.run_log_store.get_job_log(run_id=self._context.run_id)
193
+ self.add_code_identities(job_log)
183
194
 
184
195
  attempt_log = job.execute_command(
185
196
  attempt_number=self.step_attempt_number,
@@ -287,14 +298,17 @@ class K8sJobExecutor(GenericJobExecutor):
287
298
  )
288
299
 
289
300
  logger.info(f"Submitting job: {job.__dict__}")
301
+ if self.mock:
302
+ print(job.__dict__)
303
+ return
290
304
 
291
305
  try:
292
306
  k8s_batch = self._client.BatchV1Api()
293
307
  response = k8s_batch.create_namespaced_job(
294
308
  body=job,
295
- namespace="default",
296
309
  _preload_content=False,
297
310
  pretty=True,
311
+ namespace=self.namespace,
298
312
  )
299
313
  logger.debug(f"Kubernetes job response: {response}")
300
314
  except Exception as e:
@@ -302,6 +316,43 @@ class K8sJobExecutor(GenericJobExecutor):
302
316
  print(e)
303
317
  raise
304
318
 
319
+ def _create_volumes(self): ...
320
+
321
+ def _use_volumes(self):
322
+ match self._context.run_log_store.service_name:
323
+ case "file-system":
324
+ self._context.run_log_store.log_folder = self._container_log_location
325
+ case "chunked-fs":
326
+ self._context.run_log_store.log_folder = self._container_log_location
327
+
328
+ match self._context.catalog_handler.service_name:
329
+ case "file-system":
330
+ self._context.catalog_handler.catalog_location = (
331
+ self._container_catalog_location
332
+ )
333
+
334
+
335
+ class MiniK8sJobExecutor(GenericK8sJobExecutor):
336
+ service_name: str = "k8s-job"
337
+ config_path: Optional[str] = None
338
+ job_spec: Spec
339
+ mock: bool = False
340
+
341
+ # The location the mount of .run_log_store is mounted to in minikube
342
+ # ensure that minikube mount $HOME/workspace/runnable/.run_log_store:/volume/run_logs is executed first
343
+ # $HOME/workspace/runnable/.catalog:/volume/catalog
344
+ # Ensure that the docker build is done with eval $(minikube docker-env)
345
+ mini_k8s_run_log_location: str = Field(default="/volume/run_logs/")
346
+ mini_k8s_catalog_location: str = Field(default="/volume/catalog/")
347
+
348
+ _is_local: bool = PrivateAttr(default=False)
349
+
350
+ model_config = ConfigDict(
351
+ alias_generator=to_camel,
352
+ populate_by_name=True,
353
+ from_attributes=True,
354
+ )
355
+
305
356
  def _create_volumes(self):
306
357
  match self._context.run_log_store.service_name:
307
358
  case "file-system":
@@ -311,7 +362,7 @@ class K8sJobExecutor(GenericJobExecutor):
311
362
  # You then are creating a volume that is mounted to /tmp/run_logs in the container
312
363
  # You are then referring to it.
313
364
  # https://stackoverflow.com/questions/57411456/minikube-mounted-host-folders-are-not-working
314
- Volume(
365
+ HostPathVolume(
315
366
  name="run-logs",
316
367
  host_path=HostPath(path=self.mini_k8s_run_log_location),
317
368
  )
@@ -323,7 +374,7 @@ class K8sJobExecutor(GenericJobExecutor):
323
374
  )
324
375
  case "chunked-fs":
325
376
  self._volumes.append(
326
- Volume(
377
+ HostPathVolume(
327
378
  name="run-logs",
328
379
  host_path=HostPath(path=self.mini_k8s_run_log_location),
329
380
  )
@@ -337,7 +388,7 @@ class K8sJobExecutor(GenericJobExecutor):
337
388
  match self._context.catalog_handler.service_name:
338
389
  case "file-system":
339
390
  self._volumes.append(
340
- Volume(
391
+ HostPathVolume(
341
392
  name="catalog",
342
393
  host_path=HostPath(path=self.mini_k8s_catalog_location),
343
394
  )
@@ -348,15 +399,88 @@ class K8sJobExecutor(GenericJobExecutor):
348
399
  )
349
400
  )
350
401
 
351
- def _use_volumes(self):
402
+
403
+ class K8sJobExecutor(GenericK8sJobExecutor):
404
+ service_name: str = "k8s-job"
405
+ config_path: Optional[str] = None
406
+ job_spec: Spec
407
+ mock: bool = False
408
+ pvc_claim_name: str
409
+
410
+ # change the spec to pull image if not present
411
+ def model_post_init(self, __context):
412
+ self.job_spec.template.spec.container.image_pull_policy = ImagePullPolicy.ALWAYS
413
+
414
+ _is_local: bool = PrivateAttr(default=False)
415
+
416
+ model_config = ConfigDict(
417
+ alias_generator=to_camel,
418
+ populate_by_name=True,
419
+ from_attributes=True,
420
+ )
421
+
422
+ def execute_job(self, job: BaseTaskType, catalog_settings=Optional[List[str]]):
423
+ self._use_volumes()
424
+ self._set_up_run_log()
425
+
426
+ job_log = self._context.run_log_store.create_job_log()
427
+ self._context.run_log_store.add_job_log(
428
+ run_id=self._context.run_id, job_log=job_log
429
+ )
430
+
431
+ job_log = self._context.run_log_store.get_job_log(run_id=self._context.run_id)
432
+ self.add_code_identities(job_log)
433
+
434
+ attempt_log = job.execute_command(
435
+ attempt_number=self.step_attempt_number,
436
+ mock=self.mock,
437
+ )
438
+
439
+ job_log.status = attempt_log.status
440
+ job_log.attempts.append(attempt_log)
441
+
442
+ data_catalogs_put: Optional[List[DataCatalog]] = self._sync_catalog(
443
+ catalog_settings=catalog_settings
444
+ )
445
+ logger.debug(f"data_catalogs_put: {data_catalogs_put}")
446
+
447
+ job_log.add_data_catalogs(data_catalogs_put or [])
448
+
449
+ console.print("Summary of job")
450
+ console.print(job_log.get_summary())
451
+
452
+ self._context.run_log_store.add_job_log(
453
+ run_id=self._context.run_id, job_log=job_log
454
+ )
455
+
456
+ def _create_volumes(self):
457
+ self._volumes.append(
458
+ PVCVolume(
459
+ name=self.pvc_claim_name,
460
+ persistent_volume_claim=PVCClaim(claim_name=self.pvc_claim_name),
461
+ )
462
+ )
352
463
  match self._context.run_log_store.service_name:
353
464
  case "file-system":
354
- self._context.run_log_store.log_folder = self._container_log_location
465
+ self._volume_mounts.append(
466
+ VolumeMount(
467
+ name=self.pvc_claim_name,
468
+ mount_path=self._container_log_location,
469
+ )
470
+ )
355
471
  case "chunked-fs":
356
- self._context.run_log_store.log_folder = self._container_log_location
472
+ self._volume_mounts.append(
473
+ VolumeMount(
474
+ name=self.pvc_claim_name,
475
+ mount_path=self._container_log_location,
476
+ )
477
+ )
357
478
 
358
479
  match self._context.catalog_handler.service_name:
359
480
  case "file-system":
360
- self._context.catalog_handler.catalog_location = (
361
- self._container_catalog_location
481
+ self._volume_mounts.append(
482
+ VolumeMount(
483
+ name=self.pvc_claim_name,
484
+ mount_path=self._container_catalog_location,
485
+ )
362
486
  )
@@ -37,6 +37,7 @@ class LocalJobExecutor(GenericJobExecutor):
37
37
  logger.info("Trying to execute job")
38
38
 
39
39
  job_log = self._context.run_log_store.get_job_log(run_id=self._context.run_id)
40
+ self.add_code_identities(job_log)
40
41
 
41
42
  attempt_log = job.execute_command(
42
43
  attempt_number=self.step_attempt_number,
@@ -52,6 +52,7 @@ class LocalContainerJobExecutor(GenericJobExecutor):
52
52
  logger.info("Trying to execute job")
53
53
 
54
54
  job_log = self._context.run_log_store.get_job_log(run_id=self._context.run_id)
55
+ self.add_code_identities(job_log)
55
56
 
56
57
  attempt_log = job.execute_command(
57
58
  attempt_number=self.step_attempt_number,
@@ -268,7 +268,6 @@ class LocalContainerExecutor(GenericPipelineExecutor):
268
268
  f"Please provide a docker_image using executor_config of the step {node.name} or at global config"
269
269
  )
270
270
 
271
- # TODO: Should consider using getpass.getuser() when running the docker container? Volume permissions
272
271
  container = client.containers.create(
273
272
  image=docker_image,
274
273
  command=command,
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "runnable"
3
- version = "0.18.0"
3
+ version = "0.19.1"
4
4
  description = "Add your description here"
5
5
  readme = "README.md"
6
6
  authors = [
@@ -98,6 +98,7 @@ include = [
98
98
  [project.entry-points.'job_executor']
99
99
  "local" = "extensions.job_executor.local:LocalJobExecutor"
100
100
  "local-container" = "extensions.job_executor.local_container:LocalContainerJobExecutor"
101
+ "mini-k8s-job" = "extensions.job_executor.k8s:MiniK8sJobExecutor"
101
102
  "k8s-job" = "extensions.job_executor.k8s:K8sJobExecutor"
102
103
  # "argo" = "extensions.pipeline_executor.argo:ArgoExecutor"
103
104
  # "mocked" = "extensions.pipeline_executor.mocked:MockedExecutor"
@@ -1,5 +1,6 @@
1
1
  # ruff: noqa
2
2
 
3
+
3
4
  import logging
4
5
  import os
5
6
  from logging.config import dictConfig
@@ -10,8 +10,6 @@ from runnable.datastore import DataCatalog
10
10
 
11
11
  logger = logging.getLogger(defaults.LOGGER_NAME)
12
12
 
13
- # TODO: Should ** be allowed as glob pattern as it can potentially copy everything to catalog
14
-
15
13
 
16
14
  def is_catalog_out_of_sync(
17
15
  catalog, synced_catalogs=Optional[List[DataCatalog]]
@@ -170,3 +168,4 @@ class DoNothingCatalog(BaseCatalog):
170
168
  Does nothing
171
169
  """
172
170
  logger.info("Using a do-nothing catalog, doing nothing while sync between runs")
171
+ logger.info("Using a do-nothing catalog, doing nothing while sync between runs")
@@ -16,9 +16,6 @@ from runnable.executor import BaseJobExecutor, BasePipelineExecutor
16
16
  logger = logging.getLogger(defaults.LOGGER_NAME)
17
17
 
18
18
 
19
- print("") # removes the buffer print
20
-
21
-
22
19
  def get_default_configs() -> RunnableConfig:
23
20
  """
24
21
  User can provide extensions as part of their code base, runnable-config.yaml provides the place to put them.
@@ -128,11 +125,10 @@ def prepare_configurations(
128
125
  "job-executor", None
129
126
  ) # type: ignore
130
127
  if not job_executor_config:
131
- executor_config = cast(
128
+ job_executor_config = cast(
132
129
  ServiceConfig,
133
130
  runnable_defaults.get("job-executor", defaults.DEFAULT_JOB_EXECUTOR),
134
131
  )
135
-
136
132
  assert job_executor_config, "Job executor is not provided"
137
133
  configured_executor = utils.get_provider_by_name_and_type(
138
134
  "job_executor", job_executor_config
@@ -11,9 +11,9 @@ import runnable.context as context
11
11
  from runnable import defaults
12
12
  from runnable.datastore import DataCatalog, JobLog, StepLog
13
13
  from runnable.defaults import TypeMapVariable
14
- from runnable.graph import Graph
15
14
 
16
15
  if TYPE_CHECKING: # pragma: no cover
16
+ from runnable.graph import Graph
17
17
  from runnable.nodes import BaseNode
18
18
  from runnable.tasks import BaseTaskType
19
19
 
@@ -15,8 +15,6 @@ from runnable.utils import remove_prefix
15
15
 
16
16
  logger = logging.getLogger(defaults.LOGGER_NAME)
17
17
 
18
- # TODO: Revisit this, it might be a bit too complicated than required
19
-
20
18
 
21
19
  def get_user_set_parameters(remove: bool = False) -> Dict[str, JsonParameter]:
22
20
  """
@@ -50,13 +48,6 @@ def get_user_set_parameters(remove: bool = False) -> Dict[str, JsonParameter]:
50
48
  return parameters
51
49
 
52
50
 
53
- def serialize_parameter_as_str(value: Any) -> str:
54
- if isinstance(value, BaseModel):
55
- return json.dumps(value.model_dump())
56
-
57
- return json.dumps(value)
58
-
59
-
60
51
  def filter_arguments_for_func(
61
52
  func: Callable[..., Any],
62
53
  params: Dict[str, Any],
@@ -17,7 +17,7 @@ from ruamel.yaml import YAML
17
17
  from stevedore import driver
18
18
 
19
19
  import runnable.context as context
20
- from runnable import defaults, names
20
+ from runnable import console, defaults, names
21
21
  from runnable.defaults import TypeMapVariable
22
22
 
23
23
  if TYPE_CHECKING: # pragma: no cover
@@ -176,7 +176,7 @@ def is_a_git_repo() -> bool:
176
176
  logger.info("Found the code to be git versioned")
177
177
  return True
178
178
  except BaseException: # pylint: disable=W0702
179
- logger.error("No git repo found, unsafe hash")
179
+ console.print("Not a git repo", style="bold red")
180
180
 
181
181
  return False
182
182
 
@@ -195,27 +195,7 @@ def get_current_code_commit() -> Union[str, None]:
195
195
  logger.info("Found the git commit to be: %s", label)
196
196
  return label
197
197
  except BaseException: # pylint: disable=W0702
198
- logger.exception("Error getting git hash")
199
- raise
200
-
201
-
202
- def archive_git_tracked(name: str):
203
- """Generate a git archive of the tracked files.
204
-
205
- Args:
206
- name (str): The name to give the archive
207
-
208
- Raises:
209
- Exception: If its not a git repo
210
- """
211
- command = f"git archive -v -o {name}.tar.gz --format=tar.gz HEAD"
212
-
213
- if not is_a_git_repo():
214
- raise Exception("Not a git repo")
215
- try:
216
- subprocess.check_output(command.split()).strip().decode("utf-8")
217
- except BaseException: # pylint: disable=W0702
218
- logger.exception("Error archiving repo")
198
+ console.print("Not a git repo, error getting hash", style="bold red")
219
199
  raise
220
200
 
221
201
 
@@ -234,7 +214,7 @@ def is_git_clean() -> Tuple[bool, Union[None, str]]:
234
214
  return True, None
235
215
  return False, label
236
216
  except BaseException: # pylint: disable=W0702
237
- logger.exception("Error checking if the code is git clean")
217
+ console.print("Not a git repo, not clean", style="bold red")
238
218
 
239
219
  return False, None
240
220
 
@@ -253,7 +233,7 @@ def get_git_remote() -> Union[str, None]:
253
233
  logger.info("Found the git remote to be: %s", label)
254
234
  return label
255
235
  except BaseException: # pylint: disable=W0702
256
- logger.exception("Error getting git remote")
236
+ console.print("Not a git repo, no remote", style="bold red")
257
237
  raise
258
238
 
259
239
 
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes