dagster-k8s 0.25.4rc0__tar.gz → 0.25.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dagster-k8s might be problematic. Click here for more details.

Files changed (28) hide show
  1. {dagster-k8s-0.25.4rc0/dagster_k8s.egg-info → dagster-k8s-0.25.6}/PKG-INFO +1 -1
  2. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s/client.py +5 -1
  3. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s/executor.py +1 -1
  4. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s/launcher.py +14 -14
  5. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s/models.py +1 -1
  6. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s/ops/k8s_job_op.py +18 -1
  7. dagster-k8s-0.25.6/dagster_k8s/version.py +1 -0
  8. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6/dagster_k8s.egg-info}/PKG-INFO +1 -1
  9. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s.egg-info/requires.txt +1 -1
  10. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/setup.py +1 -1
  11. dagster-k8s-0.25.4rc0/dagster_k8s/version.py +0 -1
  12. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/LICENSE +0 -0
  13. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/MANIFEST.in +0 -0
  14. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/README.md +0 -0
  15. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s/__init__.py +0 -0
  16. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s/container_context.py +0 -0
  17. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s/job.py +0 -0
  18. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s/kubernetes_version.py +0 -0
  19. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s/ops/__init__.py +0 -0
  20. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s/pipes.py +0 -0
  21. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s/py.typed +0 -0
  22. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s/test.py +0 -0
  23. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s/utils.py +0 -0
  24. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s.egg-info/SOURCES.txt +0 -0
  25. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s.egg-info/dependency_links.txt +0 -0
  26. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s.egg-info/not-zip-safe +0 -0
  27. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/dagster_k8s.egg-info/top_level.txt +0 -0
  28. {dagster-k8s-0.25.4rc0 → dagster-k8s-0.25.6}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dagster-k8s
3
- Version: 0.25.4rc0
3
+ Version: 0.25.6
4
4
  Summary: A Dagster integration for k8s
5
5
  Home-page: https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-k8s
6
6
  Author: Dagster Labs
@@ -673,7 +673,6 @@ class DagsterKubernetesClient:
673
673
  # State checks below, see:
674
674
  # https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.18/#containerstate-v1-core
675
675
  state = container_status.state
676
-
677
676
  if state.running is not None:
678
677
  if wait_for_state == WaitForPodState.Ready:
679
678
  # ready is boolean field of container status
@@ -684,6 +683,11 @@ class DagsterKubernetesClient:
684
683
  continue
685
684
  else:
686
685
  ready_containers.add(container_status.name)
686
+ if container_status.name in initcontainers:
687
+ self.logger(
688
+ f'Init container "{container_status.name}" is ready, waiting for non-init containers...'
689
+ )
690
+ continue
687
691
  if initcontainers.issubset(exited_containers | ready_containers):
688
692
  self.logger(f'Pod "{pod_name}" is ready, done waiting')
689
693
  break
@@ -332,7 +332,7 @@ class K8sStepHandler(StepHandler):
332
332
  container_context = self._get_container_context(step_handler_context)
333
333
 
334
334
  status = self._api_client.get_job_status(
335
- namespace=container_context.namespace,
335
+ namespace=container_context.namespace, # pyright: ignore[reportArgumentType]
336
336
  job_name=job_name,
337
337
  )
338
338
  if not status:
@@ -313,6 +313,11 @@ class K8sRunLauncher(RunLauncher, ConfigurableClass):
313
313
 
314
314
  self._launch_k8s_job_with_args(job_name, args, run)
315
315
 
316
+ def _get_resume_attempt_number(self, run: DagsterRun) -> Optional[int]:
317
+ if not self.supports_run_worker_crash_recovery:
318
+ return None
319
+ return self._instance.count_resume_run_attempts(run.run_id)
320
+
316
321
  def terminate(self, run_id):
317
322
  check.str_param(run_id, "run_id")
318
323
  run = self._instance.get_run_by_id(run_id)
@@ -325,7 +330,7 @@ class K8sRunLauncher(RunLauncher, ConfigurableClass):
325
330
  container_context = self.get_container_context_for_run(run)
326
331
 
327
332
  job_name = get_job_name_from_run_id(
328
- run_id, resume_attempt_number=self._instance.count_resume_run_attempts(run.run_id)
333
+ run_id, resume_attempt_number=self._get_resume_attempt_number(run)
329
334
  )
330
335
 
331
336
  try:
@@ -367,12 +372,10 @@ class K8sRunLauncher(RunLauncher, ConfigurableClass):
367
372
  self, run: DagsterRun, include_container_logs: Optional[bool] = True
368
373
  ) -> Optional[str]:
369
374
  container_context = self.get_container_context_for_run(run)
370
- if self.supports_run_worker_crash_recovery:
371
- resume_attempt_number = self._instance.count_resume_run_attempts(run.run_id)
372
- else:
373
- resume_attempt_number = None
374
375
 
375
- job_name = get_job_name_from_run_id(run.run_id, resume_attempt_number=resume_attempt_number)
376
+ job_name = get_job_name_from_run_id(
377
+ run.run_id, resume_attempt_number=self._get_resume_attempt_number(run)
378
+ )
376
379
  namespace = container_context.namespace
377
380
  pod_names = self._api_client.get_pod_names_in_job(job_name, namespace=namespace)
378
381
  full_msg = ""
@@ -397,7 +400,7 @@ class K8sRunLauncher(RunLauncher, ConfigurableClass):
397
400
  )
398
401
 
399
402
  else:
400
- job_debug_info = self._api_client.get_job_debug_info(job_name, namespace=namespace)
403
+ job_debug_info = self._api_client.get_job_debug_info(job_name, namespace=namespace) # pyright: ignore[reportArgumentType]
401
404
  full_msg = (
402
405
  full_msg
403
406
  + "\n\n"
@@ -411,15 +414,12 @@ class K8sRunLauncher(RunLauncher, ConfigurableClass):
411
414
  def check_run_worker_health(self, run: DagsterRun):
412
415
  container_context = self.get_container_context_for_run(run)
413
416
 
414
- if self.supports_run_worker_crash_recovery:
415
- resume_attempt_number = self._instance.count_resume_run_attempts(run.run_id)
416
- else:
417
- resume_attempt_number = None
418
-
419
- job_name = get_job_name_from_run_id(run.run_id, resume_attempt_number=resume_attempt_number)
417
+ job_name = get_job_name_from_run_id(
418
+ run.run_id, resume_attempt_number=self._get_resume_attempt_number(run)
419
+ )
420
420
  try:
421
421
  status = self._api_client.get_job_status(
422
- namespace=container_context.namespace,
422
+ namespace=container_context.namespace, # pyright: ignore[reportArgumentType]
423
423
  job_name=job_name,
424
424
  )
425
425
  except Exception:
@@ -69,7 +69,7 @@ def _k8s_parse_value(data: Any, classname: str, attr_name: str) -> Any:
69
69
  elif klass == object:
70
70
  return data
71
71
  elif klass == datetime.date:
72
- return parse(data).date()
72
+ return parse(data).date() # pyright: ignore[reportAttributeAccessIssue]
73
73
  elif klass == datetime.datetime:
74
74
  return parse(data)
75
75
  else:
@@ -371,7 +371,10 @@ def execute_k8s_job(
371
371
  watch = kubernetes.watch.Watch() # consider moving in to api_client
372
372
 
373
373
  api_client.wait_for_pod(
374
- pod_to_watch, namespace, wait_timeout=timeout, start_time=start_time
374
+ pod_to_watch,
375
+ namespace, # pyright: ignore[reportArgumentType]
376
+ wait_timeout=timeout,
377
+ start_time=start_time, # pyright: ignore[reportArgumentType]
375
378
  )
376
379
 
377
380
  log_stream = watch.stream(
@@ -416,6 +419,20 @@ def execute_k8s_job(
416
419
  num_pods_to_wait_for=num_pods_to_wait_for,
417
420
  )
418
421
  except (DagsterExecutionInterruptedError, Exception) as e:
422
+ try:
423
+ pods = api_client.get_pod_names_in_job(job_name=job_name, namespace=namespace)
424
+ pod_debug_info = "\n\n".join(
425
+ [api_client.get_pod_debug_info(pod_name, namespace) for pod_name in pods]
426
+ )
427
+ except Exception:
428
+ context.log.exception(
429
+ f"Error trying to get pod debug information for failed k8s job {job_name}"
430
+ )
431
+ else:
432
+ context.log.error(
433
+ f"Debug information for failed k8s job {job_name}:\n\n{pod_debug_info}"
434
+ )
435
+
419
436
  if delete_failed_k8s_jobs:
420
437
  context.log.info(
421
438
  f"Deleting Kubernetes job {job_name} in namespace {namespace} due to exception"
@@ -0,0 +1 @@
1
+ __version__ = "0.25.6"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dagster-k8s
3
- Version: 0.25.4rc0
3
+ Version: 0.25.6
4
4
  Summary: A Dagster integration for k8s
5
5
  Home-page: https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-k8s
6
6
  Author: Dagster Labs
@@ -1,3 +1,3 @@
1
- dagster==1.9.4rc0
1
+ dagster==1.9.6
2
2
  kubernetes<32
3
3
  google-auth!=2.23.1
@@ -43,7 +43,7 @@ setup(
43
43
  include_package_data=True,
44
44
  python_requires=">=3.9,<3.13",
45
45
  install_requires=[
46
- "dagster==1.9.4rc0",
46
+ "dagster==1.9.6",
47
47
  f"kubernetes<{KUBERNETES_VERSION_UPPER_BOUND}",
48
48
  # exclude a google-auth release that added an overly restrictive urllib3 pin that confuses dependency resolvers
49
49
  "google-auth!=2.23.1",
@@ -1 +0,0 @@
1
- __version__ = "0.25.4rc0"
File without changes
File without changes
File without changes