pulpcore 3.76.1__py3-none-any.whl → 3.77.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pulpcore might be problematic. Click here for more details.

@@ -2,13 +2,13 @@
2
2
  from pulpcore.tasking.tasks import dispatch
3
3
 
4
4
  from pulpcore.app.tasks import (
5
+ ageneral_update,
5
6
  fs_publication_export,
6
7
  fs_repo_version_export,
7
8
  general_create,
8
9
  general_create_from_temp_file,
9
10
  general_delete,
10
11
  general_multi_delete,
11
- general_update,
12
12
  orphan_cleanup,
13
13
  reclaim_space,
14
14
  )
@@ -16,6 +16,7 @@ from pulpcore.app.tasks.repository import add_and_remove
16
16
 
17
17
 
18
18
  __all__ = [
19
+ "ageneral_update",
19
20
  "dispatch",
20
21
  "fs_publication_export",
21
22
  "fs_repo_version_export",
@@ -23,7 +24,6 @@ __all__ = [
23
24
  "general_create_from_temp_file",
24
25
  "general_delete",
25
26
  "general_multi_delete",
26
- "general_update",
27
27
  "orphan_cleanup",
28
28
  "reclaim_space",
29
29
  "add_and_remove",
pulpcore/pytest_plugin.py CHANGED
@@ -9,6 +9,7 @@ import shutil
9
9
  import socket
10
10
  import ssl
11
11
  import subprocess
12
+ import sys
12
13
  import threading
13
14
  import uuid
14
15
 
@@ -1017,6 +1018,7 @@ def dispatch_task(pulpcore_bindings):
1017
1018
 
1018
1019
  assert process.returncode == 0
1019
1020
  task_href = process.stdout.decode().strip()
1021
+ print(process.stderr.decode(), file=sys.stderr)
1020
1022
  return task_href
1021
1023
 
1022
1024
  return _dispatch_task
pulpcore/tasking/_util.py CHANGED
@@ -71,8 +71,8 @@ def write_memory_usage(stop_event, path):
71
71
  current_mb_in_use = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024
72
72
  file.write(f"{seconds}\t{current_mb_in_use:.2f}\n")
73
73
  file.flush()
74
- time.sleep(5)
75
- seconds += 5
74
+ time.sleep(2)
75
+ seconds += 2
76
76
 
77
77
 
78
78
  def child_signal_handler(sig, frame):
@@ -135,15 +135,21 @@ def perform_task(task_pk, task_working_dir_rel_path):
135
135
 
136
136
  def _execute_task_and_profile(task):
137
137
  with tempfile.TemporaryDirectory(dir=settings.WORKING_DIRECTORY) as temp_dir:
138
- pyinstrument_func = _pyinstrument_diagnostic_decorator(temp_dir, execute_task)
139
- memory_func = _memory_diagnostic_decorator(temp_dir, pyinstrument_func)
138
+ _execute_task = execute_task
140
139
 
141
- memory_func(task)
140
+ if settings.TASK_DIAGNOSTICS is True or "memory" in settings.TASK_DIAGNOSTICS:
141
+ _execute_task = _memory_diagnostic_decorator(temp_dir, _execute_task)
142
+ if settings.TASK_DIAGNOSTICS is True or "pyinstrument" in settings.TASK_DIAGNOSTICS:
143
+ _execute_task = _pyinstrument_diagnostic_decorator(temp_dir, _execute_task)
144
+ if settings.TASK_DIAGNOSTICS is True or "memray" in settings.TASK_DIAGNOSTICS:
145
+ _execute_task = _memray_diagnostic_decorator(temp_dir, _execute_task)
146
+
147
+ _execute_task(task)
142
148
 
143
149
 
144
150
  def _memory_diagnostic_decorator(temp_dir, func):
145
151
  def __memory_diagnostic_decorator(task):
146
- mem_diagnostics_file_path = os.path.join(temp_dir, "memory.datum")
152
+ mem_diagnostics_file_path = os.path.join(temp_dir, "memory_profile.datum")
147
153
  # It would be better to have this recording happen in the parent process instead of here
148
154
  # https://github.com/pulp/pulpcore/issues/2337
149
155
  stop_event = threading.Event()
@@ -158,10 +164,10 @@ def _memory_diagnostic_decorator(temp_dir, func):
158
164
  artifact = Artifact.init_and_validate(mem_diagnostics_file_path)
159
165
  with suppress(IntegrityError):
160
166
  artifact.save()
161
-
162
- ProfileArtifact.objects.get_or_create(artifact=artifact, name="memory_profile", task=task)
163
-
164
- _logger.info("Created memory diagnostic data.")
167
+ ProfileArtifact.objects.get_or_create(
168
+ artifact=artifact, name="memory_profile", task=task
169
+ )
170
+ _logger.info("Created memory diagnostic data.")
165
171
 
166
172
  return __memory_diagnostic_decorator
167
173
 
@@ -171,7 +177,7 @@ def _pyinstrument_diagnostic_decorator(temp_dir, func):
171
177
  if importlib.util.find_spec("pyinstrument") is not None:
172
178
  from pyinstrument import Profiler
173
179
 
174
- with Profiler() as profiler:
180
+ with Profiler(interval=0.002) as profiler:
175
181
  func(task)
176
182
 
177
183
  profile_file_path = os.path.join(temp_dir, "pyinstrument.html")
@@ -182,16 +188,40 @@ def _pyinstrument_diagnostic_decorator(temp_dir, func):
182
188
  artifact = Artifact.init_and_validate(str(profile_file_path))
183
189
  with suppress(IntegrityError):
184
190
  artifact.save()
191
+ ProfileArtifact.objects.get_or_create(
192
+ artifact=artifact, name="pyinstrument_profile", task=task
193
+ )
194
+ _logger.info("Created pyinstrument profile data.")
195
+ else:
196
+ func(task)
185
197
 
186
- ProfileArtifact.objects.get_or_create(
187
- artifact=artifact, name="pyinstrument_data", task=task
188
- )
198
+ return __pyinstrument_diagnostic_decorator
199
+
200
+
201
+ def _memray_diagnostic_decorator(temp_dir, func):
202
+ def __memray_diagnostic_decorator(task):
203
+ if importlib.util.find_spec("memray") is not None:
204
+ import memray
205
+
206
+ profile_file_path = os.path.join(temp_dir, "memray_profile.bin")
207
+ with memray.Tracker(
208
+ profile_file_path,
209
+ native_traces=False,
210
+ file_format=memray.FileFormat.AGGREGATED_ALLOCATIONS,
211
+ ):
212
+ func(task)
189
213
 
190
- _logger.info("Created pyinstrument profile data.")
214
+ artifact = Artifact.init_and_validate(str(profile_file_path))
215
+ with suppress(IntegrityError):
216
+ artifact.save()
217
+ ProfileArtifact.objects.get_or_create(
218
+ artifact=artifact, name="memray_profile", task=task
219
+ )
220
+ _logger.info("Created memray memory profile data.")
191
221
  else:
192
222
  func(task)
193
223
 
194
- return __pyinstrument_diagnostic_decorator
224
+ return __memray_diagnostic_decorator
195
225
 
196
226
 
197
227
  def dispatch_scheduled_tasks():
pulpcore/tasking/tasks.py CHANGED
@@ -7,6 +7,8 @@ import os
7
7
  import sys
8
8
  import traceback
9
9
  import tempfile
10
+ import threading
11
+ from asgiref.sync import sync_to_async
10
12
  from datetime import timedelta
11
13
  from gettext import gettext as _
12
14
 
@@ -16,12 +18,13 @@ from django.db.models import Model, Max
16
18
  from django_guid import get_guid
17
19
  from pulpcore.app.apps import MODULE_PLUGIN_VERSIONS
18
20
  from pulpcore.app.models import Task, TaskGroup
19
- from pulpcore.app.util import current_task, get_domain, get_prn
21
+ from pulpcore.app.util import current_task, get_domain, get_prn, deprecation_logger
20
22
  from pulpcore.constants import (
21
23
  TASK_FINAL_STATES,
22
24
  TASK_INCOMPLETE_STATES,
23
25
  TASK_STATES,
24
26
  TASK_DISPATCH_LOCK,
27
+ IMMEDIATE_TIMEOUT,
25
28
  )
26
29
  from pulpcore.tasking.kafka import send_task_notification
27
30
 
@@ -75,11 +78,38 @@ def _execute_task(task):
75
78
  func = getattr(module, function_name)
76
79
  args = task.enc_args or ()
77
80
  kwargs = task.enc_kwargs or {}
78
- result = func(*args, **kwargs)
79
- if asyncio.iscoroutine(result):
81
+ immediate = task.immediate
82
+ is_coroutine_fn = asyncio.iscoroutinefunction(func)
83
+
84
+ if not is_coroutine_fn:
85
+ if immediate:
86
+ deprecation_logger.warning(
87
+ "Immediate tasks must be coroutine functions. "
88
+ "Support for non-coroutine immediate tasks will be dropped "
89
+ "in pulpcore 3.85."
90
+ )
91
+ func = sync_to_async(func)
92
+ is_coroutine_fn = True
93
+ else:
94
+ func(*args, **kwargs)
95
+
96
+ if is_coroutine_fn:
80
97
  _logger.debug("Task is coroutine %s", task.pk)
98
+ coro = func(*args, **kwargs)
99
+ if immediate:
100
+ coro = asyncio.wait_for(coro, timeout=IMMEDIATE_TIMEOUT)
81
101
  loop = asyncio.get_event_loop()
82
- loop.run_until_complete(result)
102
+ try:
103
+ loop.run_until_complete(coro)
104
+ except asyncio.TimeoutError:
105
+ _logger.info(
106
+ "Immediate task %s timed out after %s seconds.", task.pk, IMMEDIATE_TIMEOUT
107
+ )
108
+ raise RuntimeError(
109
+ "Immediate task timed out after {timeout} seconds.".format(
110
+ timeout=IMMEDIATE_TIMEOUT,
111
+ )
112
+ )
83
113
 
84
114
  except Exception:
85
115
  exc_type, exc, tb = sys.exc_info()
@@ -101,6 +131,14 @@ def _execute_task(task):
101
131
  send_task_notification(task)
102
132
 
103
133
 
134
+ def running_from_thread_pool() -> bool:
135
+ # TODO: this needs an alternative approach ASAP!
136
+ # Currently we rely on the weak fact that ThreadPoolExecutor names threads like:
137
+ # "ThreadPoolExecutor-0_0"
138
+ thread_name = threading.current_thread().name
139
+ return "ThreadPoolExecutor" in thread_name
140
+
141
+
104
142
  def dispatch(
105
143
  func,
106
144
  args=None,
@@ -148,6 +186,8 @@ def dispatch(
148
186
  ValueError: When `resources` is an unsupported type.
149
187
  """
150
188
 
189
+ # Can't run short tasks immediately if running from thread pool
190
+ immediate = immediate and not running_from_thread_pool()
151
191
  assert deferred or immediate, "A task must be at least `deferred` or `immediate`."
152
192
 
153
193
  if callable(func):
@@ -10,8 +10,10 @@ from urllib.parse import urljoin
10
10
  from uuid import uuid4
11
11
 
12
12
  from pulpcore.client.pulpcore import ApiException
13
+ from contextlib import contextmanager
13
14
 
14
- from pulpcore.tests.functional.utils import download_file
15
+ from pulpcore.tests.functional.utils import download_file, PulpTaskError
16
+ from pulpcore.constants import IMMEDIATE_TIMEOUT
15
17
 
16
18
 
17
19
  @pytest.fixture(scope="module")
@@ -445,3 +447,151 @@ def test_cancel_task_group(pulpcore_bindings, dispatch_task_group, gen_user):
445
447
 
446
448
  with gen_user(model_roles=["core.task_owner"]):
447
449
  pulpcore_bindings.TaskGroupsApi.task_groups_cancel(tgroup_href, {"state": "canceled"})
450
+
451
+
452
+ LT_TIMEOUT = IMMEDIATE_TIMEOUT / 2
453
+ GT_TIMEOUT = IMMEDIATE_TIMEOUT * 2
454
+
455
+
456
+ class TestImmediateTaskWithNoResource:
457
+
458
+ @pytest.mark.parallel
459
+ def test_succeeds_on_api_worker(self, pulpcore_bindings, dispatch_task):
460
+ """
461
+ GIVEN a task with no resource requirements
462
+ AND the task IS an async function
463
+ WHEN dispatching a task as immediate
464
+ THEN the task completes with no associated worker
465
+ """
466
+ task_href = dispatch_task(
467
+ "pulpcore.app.tasks.test.asleep", args=(LT_TIMEOUT,), immediate=True
468
+ )
469
+ task = pulpcore_bindings.TasksApi.read(task_href)
470
+ assert task.state == "completed"
471
+ assert task.worker is None
472
+
473
+ @pytest.mark.parallel
474
+ def test_executes_on_api_worker_when_no_async(self, pulpcore_bindings, dispatch_task, capsys):
475
+ """
476
+ GIVEN a task with no resource requirements
477
+ AND the task IS NOT an async function
478
+ WHEN dispatching a task as immediate
479
+ THEN the task completes with no associated worker
480
+ """
481
+ # TODO: on 3.85 this should throw an error
482
+ task_href = dispatch_task(
483
+ "pulpcore.app.tasks.test.sleep", args=(LT_TIMEOUT,), immediate=True
484
+ )
485
+ stderr_content = capsys.readouterr().err
486
+ task = pulpcore_bindings.TasksApi.read(task_href)
487
+ assert task.state == "completed"
488
+ assert task.worker is None
489
+ assert "Support for non-coroutine immediate tasks will be dropped" in stderr_content
490
+
491
+ @pytest.mark.parallel
492
+ def test_timeouts_on_api_worker(self, pulpcore_bindings, dispatch_task):
493
+ """
494
+ GIVEN a task with no resource requirements
495
+ AND the task is an async function
496
+ WHEN dispatching a task as immediate
497
+ AND it takes longer than timeout
498
+ THEN the task fails with a timeout error message
499
+ """
500
+ task_href = dispatch_task(
501
+ "pulpcore.app.tasks.test.asleep", args=(GT_TIMEOUT,), immediate=True
502
+ )
503
+ task = pulpcore_bindings.TasksApi.read(task_href)
504
+ assert task.worker is None
505
+ assert "task timed out after" in task.error["description"]
506
+
507
+
508
+ @pytest.fixture
509
+ def resource_blocker(pulpcore_bindings, dispatch_task):
510
+
511
+ @contextmanager
512
+ def _resource_blocker(exclusive_resources: list[str], duration=20):
513
+ task_href = dispatch_task(
514
+ "pulpcore.app.tasks.test.sleep",
515
+ args=(duration,),
516
+ exclusive_resources=exclusive_resources,
517
+ )
518
+ yield
519
+ # Trying to cancel a finished task will return a 409 code.
520
+ # We can ignore if that's the case, because all we want here is to cut time down.
521
+ # Otherwise it might be a real error.
522
+ try:
523
+ pulpcore_bindings.TasksApi.tasks_cancel(task_href, {"state": "canceled"})
524
+ except ApiException as e:
525
+ if e.status != 409:
526
+ raise
527
+
528
+ return _resource_blocker
529
+
530
+
531
+ class TestImmediateTaskWithBlockedResource:
532
+
533
+ @pytest.mark.parallel
534
+ def test_executes_in_task_worker(
535
+ self, resource_blocker, dispatch_task, monitor_task, pulpcore_bindings
536
+ ):
537
+ """
538
+ GIVEN an async task requiring busy resources
539
+ WHEN dispatching a task as immediate
540
+ THEN the task completes with a worker
541
+ """
542
+ COMMON_RESOURCE = str(uuid4())
543
+ with resource_blocker(exclusive_resources=[COMMON_RESOURCE]):
544
+ task_href = dispatch_task(
545
+ "pulpcore.app.tasks.test.asleep",
546
+ args=(LT_TIMEOUT,),
547
+ immediate=True,
548
+ exclusive_resources=[COMMON_RESOURCE],
549
+ )
550
+ task = monitor_task(task_href)
551
+ assert task.state == "completed"
552
+ assert task.worker is not None
553
+
554
+ @pytest.mark.parallel
555
+ def test_throws_when_non_deferrable(
556
+ self, resource_blocker, pulpcore_bindings, dispatch_task, monitor_task
557
+ ):
558
+ """
559
+ GIVEN an async task requiring busy resources
560
+ WHEN dispatching as immediate and not deferrable
561
+ THEN an error is raised
562
+ """
563
+ COMMON_RESOURCE = str(uuid4())
564
+ with resource_blocker(exclusive_resources=[COMMON_RESOURCE]):
565
+ task_href = dispatch_task(
566
+ "pulpcore.app.tasks.test.asleep",
567
+ args=(0,),
568
+ immediate=True,
569
+ deferred=False,
570
+ exclusive_resources=[COMMON_RESOURCE],
571
+ )
572
+ task = pulpcore_bindings.TasksApi.read(task_href)
573
+ assert task.state == "canceled"
574
+ assert task.worker is None
575
+ assert "Resources temporarily unavailable." in task.error["reason"]
576
+
577
+ @pytest.mark.parallel
578
+ def test_times_out_on_task_worker(
579
+ self, resource_blocker, pulpcore_bindings, dispatch_task, monitor_task
580
+ ):
581
+ """
582
+ GIVEN an async task requiring busy resources
583
+ WHEN dispatching a task as immediate
584
+ AND it takes longer than timeout
585
+ THEN an error is raised
586
+ """
587
+ COMMON_RESOURCE = str(uuid4())
588
+ with pytest.raises(PulpTaskError) as ctx:
589
+ with resource_blocker(exclusive_resources=[COMMON_RESOURCE]):
590
+ task_href = dispatch_task(
591
+ "pulpcore.app.tasks.test.asleep",
592
+ args=(GT_TIMEOUT,),
593
+ immediate=True,
594
+ exclusive_resources=[COMMON_RESOURCE],
595
+ )
596
+ monitor_task(task_href)
597
+ assert "task timed out after" in ctx.value.task.error["description"]
@@ -16,7 +16,7 @@ from pulpcore.tests.functional.utils import get_from_url
16
16
  @pytest.mark.parallel
17
17
  def test_full_workflow(
18
18
  file_repo_with_auto_publish,
19
- basic_manifest_path,
19
+ duplicate_filename_paths,
20
20
  file_remote_factory,
21
21
  file_bindings,
22
22
  distribution_base_url,
@@ -37,7 +37,8 @@ def test_full_workflow(
37
37
  return r.status, r.headers.get("X-PULP-CACHE")
38
38
 
39
39
  # Sync from the remote and assert that a new repository version is created
40
- remote = file_remote_factory(manifest_path=basic_manifest_path, policy="immediate")
40
+ manifest_1, manifest_2 = duplicate_filename_paths
41
+ remote = file_remote_factory(manifest_path=manifest_1, policy="immediate")
41
42
  body = RepositorySyncURL(remote=remote.pulp_href)
42
43
  monitor_task(
43
44
  file_bindings.RepositoriesFileApi.sync(file_repo_with_auto_publish.pulp_href, body).task
@@ -129,6 +130,21 @@ def test_full_workflow(
129
130
  url = urljoin(distro_base_url, file)
130
131
  assert (200, "HIT" if i % 2 == 1 else "MISS") == _check_cache(url), file
131
132
 
133
+ # Sync a new remote with same filenames but on-demand
134
+ remote = file_remote_factory(manifest_path=manifest_2, policy="on_demand")
135
+ body = RepositorySyncURL(remote=remote.pulp_href)
136
+ monitor_task(
137
+ file_bindings.RepositoriesFileApi.sync(file_repo_with_auto_publish.pulp_href, body).task
138
+ )
139
+ repo = file_bindings.RepositoriesFileApi.read(file_repo_with_auto_publish.pulp_href)
140
+ assert repo.latest_version_href.endswith("/versions/3/")
141
+
142
+ # Test that cache is invalidated from sync, but on-demand responses are immediately cached
143
+ files = ["1.iso", "1.iso", "2.iso", "2.iso", "3.iso", "3.iso"]
144
+ for i, file in enumerate(files):
145
+ url = urljoin(distro_base_url, file)
146
+ assert (200, "HIT" if i % 2 == 1 else None) == _check_cache(url), file
147
+
132
148
  # Tests that deleting a repository invalidates the cache"""
133
149
  monitor_task(file_bindings.RepositoriesFileApi.delete(repo.pulp_href).task)
134
150
  files = ["", "PULP_MANIFEST", "2.iso"]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pulpcore
3
- Version: 3.76.1
3
+ Version: 3.77.1
4
4
  Summary: Pulp Django Application and Related Modules
5
5
  Author-email: Pulp Team <pulp-list@redhat.com>
6
6
  Project-URL: Homepage, https://pulpproject.org
@@ -21,14 +21,14 @@ Classifier: Programming Language :: Python :: 3.12
21
21
  Requires-Python: >=3.9
22
22
  Description-Content-Type: text/markdown
23
23
  License-File: LICENSE
24
- Requires-Dist: aiodns<=3.2.0,>=3.0
24
+ Requires-Dist: aiodns<=3.3.0,>=3.0
25
25
  Requires-Dist: aiofiles<24.2.0,>=22.1
26
26
  Requires-Dist: aiohttp<3.12,>=3.8.1
27
27
  Requires-Dist: asyncio-throttle<=1.0.2,>=1.0
28
28
  Requires-Dist: async-timeout<4.0.4,>=4.0.3; python_version < "3.11"
29
29
  Requires-Dist: backoff<2.2.2,>=2.1.2
30
30
  Requires-Dist: click<=8.1.8,>=8.1.0
31
- Requires-Dist: cryptography<44.0.3,>=38.0.1
31
+ Requires-Dist: cryptography<44.0.4,>=38.0.1
32
32
  Requires-Dist: Django~=4.2.0
33
33
  Requires-Dist: django-filter<=25.1,>=23.1
34
34
  Requires-Dist: django-guid<=3.5.1,>=3.3
@@ -52,13 +52,13 @@ Requires-Dist: opentelemetry-exporter-otlp-proto-http<1.33,>=1.27.0
52
52
  Requires-Dist: protobuf<6.0,>=4.21.1
53
53
  Requires-Dist: pulp-glue<0.33,>=0.18.0
54
54
  Requires-Dist: pygtrie<=2.5.0,>=2.5
55
- Requires-Dist: psycopg[binary]<=3.2.6,>=3.1.8
55
+ Requires-Dist: psycopg[binary]<=3.2.7,>=3.1.8
56
56
  Requires-Dist: pyparsing<=3.2.3,>=3.1.0
57
57
  Requires-Dist: python-gnupg<=0.5.4,>=0.5
58
58
  Requires-Dist: PyYAML<=6.0.2,>=5.1.1
59
59
  Requires-Dist: redis<5.2.2,>=4.3
60
60
  Requires-Dist: tablib<3.6.0
61
- Requires-Dist: url-normalize<=2.2.0,>=1.4.3
61
+ Requires-Dist: url-normalize<=2.2.1,>=1.4.3
62
62
  Requires-Dist: uuid6<=2024.7.10,>=2023.5.2
63
63
  Requires-Dist: whitenoise<6.10.0,>=5.0
64
64
  Requires-Dist: yarl<1.20.1,>=1.8
@@ -75,6 +75,9 @@ Requires-Dist: django-prometheus; extra == "prometheus"
75
75
  Provides-Extra: kafka
76
76
  Requires-Dist: cloudevents==1.11.0; extra == "kafka"
77
77
  Requires-Dist: confluent-kafka<2.10.0,>=2.4.0; extra == "kafka"
78
+ Provides-Extra: diagnostics
79
+ Requires-Dist: pyinstrument~=5.0; extra == "diagnostics"
80
+ Requires-Dist: memray~=1.17; extra == "diagnostics"
78
81
  Dynamic: license-file
79
82
 
80
83