pulpcore 3.85.1__py3-none-any.whl → 3.87.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pulpcore might be problematic. Click here for more details.

Files changed (33) hide show
  1. pulp_certguard/app/__init__.py +1 -1
  2. pulp_file/app/__init__.py +1 -1
  3. pulpcore/app/apps.py +1 -1
  4. pulpcore/app/management/commands/optimizemigration.py +84 -0
  5. pulpcore/app/management/commands/remove-plugin.py +2 -5
  6. pulpcore/app/migrations/0139_task_app_lock.py +19 -0
  7. pulpcore/app/migrations/0140_require_appstatus_zdu.py +15 -0
  8. pulpcore/app/migrations/0141_alter_appstatus_name.py +18 -0
  9. pulpcore/app/models/status.py +20 -25
  10. pulpcore/app/models/task.py +6 -1
  11. pulpcore/app/serializers/status.py +18 -8
  12. pulpcore/app/serializers/task.py +1 -29
  13. pulpcore/app/settings.py +12 -0
  14. pulpcore/app/tasks/analytics.py +3 -4
  15. pulpcore/app/tasks/importer.py +2 -2
  16. pulpcore/app/views/status.py +5 -6
  17. pulpcore/app/viewsets/task.py +14 -5
  18. pulpcore/middleware.py +37 -10
  19. pulpcore/openapi/__init__.py +1 -1
  20. pulpcore/plugin/repo_version_utils.py +8 -2
  21. pulpcore/pytest_plugin.py +6 -4
  22. pulpcore/tasking/tasks.py +16 -15
  23. pulpcore/tasking/worker.py +59 -41
  24. pulpcore/tests/functional/api/test_tasking.py +14 -51
  25. pulpcore/tests/functional/api/test_workers.py +1 -2
  26. pulpcore/tests/unit/content/test_heartbeat.py +2 -1
  27. pulpcore/tests/unit/models/test_repository.py +84 -1
  28. {pulpcore-3.85.1.dist-info → pulpcore-3.87.0.dist-info}/METADATA +3 -3
  29. {pulpcore-3.85.1.dist-info → pulpcore-3.87.0.dist-info}/RECORD +33 -29
  30. {pulpcore-3.85.1.dist-info → pulpcore-3.87.0.dist-info}/WHEEL +0 -0
  31. {pulpcore-3.85.1.dist-info → pulpcore-3.87.0.dist-info}/entry_points.txt +0 -0
  32. {pulpcore-3.85.1.dist-info → pulpcore-3.87.0.dist-info}/licenses/LICENSE +0 -0
  33. {pulpcore-3.85.1.dist-info → pulpcore-3.87.0.dist-info}/top_level.txt +0 -0
@@ -116,9 +116,15 @@ def validate_version_paths(version):
116
116
  Raises:
117
117
  ValueError: If two artifact relative paths overlap
118
118
  """
119
- paths = ContentArtifact.objects.filter(content__pk__in=version.content).values_list(
120
- "relative_path", flat=True
119
+ # Get unique (path, artifact) pairs to allow artifacts shared across content
120
+ content_artifacts = (
121
+ ContentArtifact.objects.filter(content__pk__in=version.content)
122
+ .values_list("relative_path", "artifact")
123
+ .distinct()
121
124
  )
125
+
126
+ paths = [path for path, artifact_id in content_artifacts]
127
+
122
128
  try:
123
129
  validate_file_paths(paths)
124
130
  except ValueError as e:
pulpcore/pytest_plugin.py CHANGED
@@ -1013,23 +1013,25 @@ def dispatch_task(pulpcore_bindings):
1013
1013
  commands = (
1014
1014
  "from django_guid import set_guid; "
1015
1015
  "from pulpcore.tasking.tasks import dispatch; "
1016
- "from pulpcore.app.models import TaskGroup; "
1016
+ "from pulpcore.app.models import TaskGroup, AppStatus; "
1017
1017
  "from pulpcore.app.util import get_url, set_current_user; "
1018
1018
  "from django.contrib.auth import get_user_model; "
1019
+ f"app_status=AppStatus.objects.create(name='test-' + {cid!r},app_type='worker'); "
1019
1020
  "User = get_user_model(); "
1020
1021
  f"user = User.objects.filter(username='{username}').first(); "
1021
1022
  "set_current_user(user); "
1022
1023
  f"set_guid({cid!r}); "
1023
1024
  f"tg = {task_group_id!r} and TaskGroup.objects.filter(pk={task_group_id!r}).first(); "
1024
1025
  f"task = dispatch(*{args!r}, task_group=tg, **{kwargs!r}); "
1026
+ "app_status.delete(); "
1025
1027
  "print(get_url(task))"
1026
1028
  )
1027
1029
 
1028
1030
  process = subprocess.run(["pulpcore-manager", "shell", "-c", commands], capture_output=True)
1029
-
1030
- assert process.returncode == 0
1031
+ err_log = process.stderr.decode()
1032
+ assert process.returncode == 0, err_log
1031
1033
  task_href = process.stdout.decode().strip()
1032
- print(process.stderr.decode(), file=sys.stderr)
1034
+ print(err_log, file=sys.stderr)
1033
1035
  return task_href
1034
1036
 
1035
1037
  return _dispatch_task
pulpcore/tasking/tasks.py CHANGED
@@ -8,7 +8,6 @@ import sys
8
8
  import traceback
9
9
  import tempfile
10
10
  import threading
11
- from asgiref.sync import sync_to_async
12
11
  from gettext import gettext as _
13
12
 
14
13
  from django.conf import settings
@@ -16,8 +15,12 @@ from django.db import connection, transaction
16
15
  from django.db.models import Model
17
16
  from django_guid import get_guid
18
17
  from pulpcore.app.apps import MODULE_PLUGIN_VERSIONS
19
- from pulpcore.app.models import Task, TaskGroup
20
- from pulpcore.app.util import current_task, get_domain, get_prn, deprecation_logger
18
+ from pulpcore.app.models import Task, TaskGroup, AppStatus
19
+ from pulpcore.app.util import (
20
+ current_task,
21
+ get_domain,
22
+ get_prn,
23
+ )
21
24
  from pulpcore.constants import (
22
25
  TASK_FINAL_STATES,
23
26
  TASK_INCOMPLETE_STATES,
@@ -81,19 +84,11 @@ def _execute_task(task):
81
84
  immediate = task.immediate
82
85
  is_coroutine_fn = asyncio.iscoroutinefunction(func)
83
86
 
84
- if not is_coroutine_fn:
85
- if immediate:
86
- deprecation_logger.warning(
87
- "Immediate tasks must be coroutine functions. "
88
- "Support for non-coroutine immediate tasks will be dropped "
89
- "in pulpcore 3.85."
90
- )
91
- func = sync_to_async(func)
92
- is_coroutine_fn = True
93
- else:
94
- func(*args, **kwargs)
87
+ if immediate and not is_coroutine_fn:
88
+ raise ValueError("Immediate tasks must be async functions.")
95
89
 
96
90
  if is_coroutine_fn:
91
+ # both regular and immediate tasks can be coroutines, but only immediate must timeout
97
92
  _logger.debug("Task is coroutine %s", task.pk)
98
93
  coro = func(*args, **kwargs)
99
94
  if immediate:
@@ -110,6 +105,8 @@ def _execute_task(task):
110
105
  timeout=IMMEDIATE_TIMEOUT,
111
106
  )
112
107
  )
108
+ else:
109
+ func(*args, **kwargs)
113
110
 
114
111
  except Exception:
115
112
  exc_type, exc, tb = sys.exc_info()
@@ -240,6 +237,7 @@ def dispatch(
240
237
  immediate=immediate,
241
238
  deferred=deferred,
242
239
  profile_options=x_task_diagnostics_var.get(None),
240
+ app_lock=(immediate and AppStatus.objects.current()) or None,
243
241
  )
244
242
  task.refresh_from_db() # The database may have assigned a timestamp for us.
245
243
  if immediate:
@@ -272,13 +270,16 @@ def dispatch(
272
270
  try:
273
271
  execute_task(task)
274
272
  finally:
275
- # whether the task fails or not, we should always restore the workdir
273
+ # Whether the task fails or not, we should always restore the workdir.
276
274
  os.chdir(cur_dir)
277
275
 
278
276
  if resources:
279
277
  notify_workers = True
280
278
  elif deferred:
279
+ # Resources are blocked. Let the others handle it.
281
280
  notify_workers = True
281
+ task.app_lock = None
282
+ task.save()
282
283
  else:
283
284
  task.set_canceling()
284
285
  task.set_canceled(TASK_STATES.CANCELED, "Resources temporarily unavailable.")
@@ -77,7 +77,6 @@ class PulpcoreWorker:
77
77
  self.app_status = AppStatus.objects.create(
78
78
  name=self.name, app_type="worker", versions=self.versions
79
79
  )
80
- self.worker = self.app_status._old_status
81
80
  except IntegrityError:
82
81
  _logger.error(f"A worker with name {self.name} already exists in the database.")
83
82
  exit(1)
@@ -184,15 +183,17 @@ class PulpcoreWorker:
184
183
  for app_worker in qs:
185
184
  _logger.info(_("Clean missing %s worker %s."), app_worker.app_type, app_worker.name)
186
185
  qs.delete()
187
- for cls, cls_name in (
188
- (Worker, "pulp"),
189
- (ApiAppStatus, "api"),
190
- (ContentAppStatus, "content"),
191
- ):
192
- qs = cls.objects.missing(age=timedelta(days=7))
193
- for app_worker in qs:
194
- _logger.info(_("Clean missing %s worker %s."), cls_name, app_worker.name)
195
- qs.delete()
186
+ with contextlib.suppress(DatabaseError):
187
+ # By now a migration on a newer release may have deleted these tables already.
188
+ for cls, cls_name in (
189
+ (Worker, "pulp"),
190
+ (ApiAppStatus, "api"),
191
+ (ContentAppStatus, "content"),
192
+ ):
193
+ qs = cls.objects.missing(age=timedelta(days=7))
194
+ for app_worker in qs:
195
+ _logger.info(_("Clean missing %s worker %s."), cls_name, app_worker.name)
196
+ qs.delete()
196
197
 
197
198
  def beat(self):
198
199
  if self.app_status.last_heartbeat < timezone.now() - self.heartbeat_period:
@@ -363,41 +364,60 @@ class PulpcoreWorker:
363
364
  def iter_tasks(self):
364
365
  """Iterate over ready tasks and yield each task while holding the lock."""
365
366
  while not self.shutdown_requested:
366
- # When batching this query, be sure to use "pulp_created" as a cursor
367
+ # When batching this query, be sure to use "pulp_created" as a cursor.
367
368
  for task in Task.objects.filter(
368
369
  state__in=TASK_INCOMPLETE_STATES,
369
370
  unblocked_at__isnull=False,
370
371
  ).order_by("-immediate", F("pulp_created") + Value(timedelta(seconds=8)) * Random()):
371
- # This code will only be called if we acquired the lock successfully
372
- # The lock will be automatically be released at the end of the block
372
+ # This code will only be called if we acquired the lock successfully.
373
+ # The lock will be automatically be released at the end of the block.
373
374
  with contextlib.suppress(AdvisoryLockError), task:
374
- # Check if someone else changed the task before we got the lock
375
- task.refresh_from_db()
376
-
377
- if task.state == TASK_STATES.CANCELING and task.worker is None:
378
- # No worker picked this task up before being canceled
379
- if self.cancel_abandoned_task(task, TASK_STATES.CANCELED):
380
- # Continue looking for the next task without considering this
381
- # tasks resources, as we just released them
382
- continue
383
- if task.state in [TASK_STATES.RUNNING, TASK_STATES.CANCELING]:
384
- # A running task without a lock must be abandoned
385
- if self.cancel_abandoned_task(
386
- task, TASK_STATES.FAILED, "Worker has gone missing."
375
+ # We got the advisory lock (OLD) now try to get the app_lock (NEW).
376
+ rows = Task.objects.filter(pk=task.pk, app_lock=None).update(
377
+ app_lock=AppStatus.objects.current()
378
+ )
379
+ if rows == 0:
380
+ _logger.error(
381
+ "Acquired advisory lock but missed the app_lock for the task. "
382
+ "This should only happen during the upgrade phase to the new app_lock."
383
+ )
384
+ continue
385
+ try:
386
+ # Check if someone else changed the task before we got the lock.
387
+ task.refresh_from_db()
388
+
389
+ if task.state == TASK_STATES.CANCELING:
390
+ # No worker picked this task up before being canceled.
391
+ if self.cancel_abandoned_task(task, TASK_STATES.CANCELED):
392
+ # Continue looking for the next task without considering this
393
+ # tasks resources, as we just released them.
394
+ continue
395
+ if task.state == TASK_STATES.RUNNING:
396
+ # A running task without a lock must be abandoned.
397
+ if self.cancel_abandoned_task(
398
+ task, TASK_STATES.FAILED, "Worker has gone missing."
399
+ ):
400
+ # Continue looking for the next task without considering this
401
+ # tasks resources, as we just released them.
402
+ continue
403
+
404
+ # This statement is using lazy evaluation.
405
+ if (
406
+ task.state == TASK_STATES.WAITING
407
+ and task.unblocked_at is not None
408
+ and self.is_compatible(task)
387
409
  ):
388
- # Continue looking for the next task without considering this
389
- # tasks resources, as we just released them
390
- continue
391
-
392
- # This statement is using lazy evaluation
393
- if (
394
- task.state == TASK_STATES.WAITING
395
- and task.unblocked_at is not None
396
- and self.is_compatible(task)
397
- ):
398
- yield task
399
- # Start from the top of the Task list
400
- break
410
+ yield task
411
+ # Start from the top of the Task list.
412
+ break
413
+ finally:
414
+ rows = Task.objects.filter(
415
+ pk=task.pk, app_lock=AppStatus.objects.current()
416
+ ).update(app_lock=None)
417
+ if rows != 1:
418
+ raise RuntimeError(
419
+ "Something other than us is messing around with locks."
420
+ )
401
421
  else:
402
422
  # No task found in the for-loop
403
423
  break
@@ -426,8 +446,6 @@ class PulpcoreWorker:
426
446
 
427
447
  self.cancel_task = False
428
448
  self.task = task
429
- task.worker = self.worker
430
- task.save(update_fields=["worker"])
431
449
  cancel_state = None
432
450
  cancel_reason = None
433
451
  domain = task.pulp_domain
@@ -230,25 +230,6 @@ def test_retrieve_task_with_minimal_fields(task, bindings_cfg):
230
230
  assert unexpected_fields.isdisjoint(returned_fields)
231
231
 
232
232
 
233
- @pytest.mark.parallel
234
- def test_retrieve_task_using_invalid_worker(pulpcore_bindings):
235
- """Expects to raise an exception when using invalid worker value as filter."""
236
-
237
- with pytest.raises(ApiException) as ctx:
238
- pulpcore_bindings.TasksApi.list(worker=str(uuid4()))
239
-
240
- assert ctx.value.status == 400
241
-
242
-
243
- @pytest.mark.parallel
244
- def test_retrieve_task_using_valid_worker(task, pulpcore_bindings):
245
- """Expects to retrieve a task using a valid worker URI as filter."""
246
-
247
- response = pulpcore_bindings.TasksApi.list(worker=task.worker)
248
-
249
- assert response.results and response.count
250
-
251
-
252
233
  @pytest.mark.parallel
253
234
  def test_retrieve_task_using_valid_date(task, pulpcore_bindings):
254
235
  """Expects to retrieve a task using a valid date."""
@@ -278,22 +259,6 @@ def test_search_task_using_an_invalid_name(pulpcore_bindings):
278
259
  assert not search_results.results and not search_results.count
279
260
 
280
261
 
281
- @pytest.mark.parallel
282
- def test_filter_tasks_using_worker__in_filter(pulpcore_bindings, dispatch_task, monitor_task):
283
- task1_href = dispatch_task("pulpcore.app.tasks.test.sleep", args=(0,))
284
- task2_href = dispatch_task("pulpcore.app.tasks.test.sleep", args=(0,))
285
-
286
- task1 = monitor_task(task1_href)
287
- task2 = monitor_task(task2_href)
288
-
289
- search_results = pulpcore_bindings.TasksApi.list(worker__in=(task1.worker, task2.worker))
290
-
291
- tasks_hrefs = [task.pulp_href for task in search_results.results]
292
-
293
- assert task1_href in tasks_hrefs
294
- assert task2_href in tasks_hrefs
295
-
296
-
297
262
  @pytest.mark.parallel
298
263
  def test_filter_tasks_using_pulp_created_filter(pulpcore_bindings, dispatch_task):
299
264
 
@@ -487,25 +452,23 @@ class TestImmediateTaskWithNoResource:
487
452
  )
488
453
  task = pulpcore_bindings.TasksApi.read(task_href)
489
454
  assert task.state == "completed"
490
- assert task.worker is None
491
455
 
492
456
  @pytest.mark.parallel
493
- def test_executes_on_api_worker_when_no_async(self, pulpcore_bindings, dispatch_task, capsys):
457
+ def test_executes_on_api_worker_when_no_async(
458
+ self, pulpcore_bindings, dispatch_task, monitor_task
459
+ ):
494
460
  """
495
461
  GIVEN a task with no resource requirements
496
462
  AND the task IS NOT an async function
497
463
  WHEN dispatching a task as immediate
498
- THEN the task completes with no associated worker
464
+ THEN the dispatch should throw an error
499
465
  """
500
- # TODO: on 3.85 this should throw an error
501
- task_href = dispatch_task(
502
- "pulpcore.app.tasks.test.sleep", args=(LT_TIMEOUT,), immediate=True
503
- )
504
- stderr_content = capsys.readouterr().err
505
- task = pulpcore_bindings.TasksApi.read(task_href)
506
- assert task.state == "completed"
507
- assert task.worker is None
508
- assert "Support for non-coroutine immediate tasks will be dropped" in stderr_content
466
+ with pytest.raises(PulpTaskError) as ctx:
467
+ task_href = dispatch_task(
468
+ "pulpcore.app.tasks.test.sleep", args=(LT_TIMEOUT,), immediate=True
469
+ )
470
+ monitor_task(task_href)
471
+ assert "Immediate tasks must be async functions" in ctx.value.task.error["description"]
509
472
 
510
473
  @pytest.mark.parallel
511
474
  def test_timeouts_on_api_worker(self, pulpcore_bindings, dispatch_task):
@@ -520,7 +483,7 @@ class TestImmediateTaskWithNoResource:
520
483
  "pulpcore.app.tasks.test.asleep", args=(GT_TIMEOUT,), immediate=True
521
484
  )
522
485
  task = pulpcore_bindings.TasksApi.read(task_href)
523
- assert task.worker is None
486
+ assert task.state == "failed"
524
487
  assert "task timed out after" in task.error["description"]
525
488
 
526
489
 
@@ -556,7 +519,7 @@ class TestImmediateTaskWithBlockedResource:
556
519
  """
557
520
  GIVEN an async task requiring busy resources
558
521
  WHEN dispatching a task as immediate
559
- THEN the task completes with a worker
522
+ THEN the task returns as waiting first
560
523
  """
561
524
  COMMON_RESOURCE = str(uuid4())
562
525
  with resource_blocker(exclusive_resources=[COMMON_RESOURCE]):
@@ -566,9 +529,10 @@ class TestImmediateTaskWithBlockedResource:
566
529
  immediate=True,
567
530
  exclusive_resources=[COMMON_RESOURCE],
568
531
  )
532
+ task = pulpcore_bindings.TasksApi.read(task_href)
533
+ assert task.state == "waiting"
569
534
  task = monitor_task(task_href)
570
535
  assert task.state == "completed"
571
- assert task.worker is not None
572
536
 
573
537
  @pytest.mark.parallel
574
538
  def test_throws_when_non_deferrable(
@@ -590,7 +554,6 @@ class TestImmediateTaskWithBlockedResource:
590
554
  )
591
555
  task = pulpcore_bindings.TasksApi.read(task_href)
592
556
  assert task.state == "canceled"
593
- assert task.worker is None
594
557
  assert "Resources temporarily unavailable." in task.error["reason"]
595
558
 
596
559
  @pytest.mark.parallel
@@ -4,7 +4,6 @@ import pytest
4
4
  import subprocess
5
5
  import uuid
6
6
  from datetime import datetime, timedelta
7
- from random import choice
8
7
  from time import sleep
9
8
 
10
9
 
@@ -23,7 +22,7 @@ def test_worker_actions(pulpcore_bindings):
23
22
  assert val is not None
24
23
 
25
24
  # Pick a random worker to be used for the next assertions.
26
- chosen_worker = choice(workers)
25
+ chosen_worker = next(worker for worker in workers if not worker.name.startswith("test-"))
27
26
 
28
27
  # Read a worker by its pulp_href.
29
28
  read_worker = pulpcore_bindings.WorkersApi.read(chosen_worker.pulp_href)
@@ -5,7 +5,7 @@ from django.db.utils import InterfaceError, OperationalError
5
5
 
6
6
  from pulpcore.content import _heartbeat
7
7
  from pulpcore.content.handler import Handler
8
- from pulpcore.app.models.status import AppStatusManager
8
+ from pulpcore.app.models.status import AppStatus, AppStatusManager
9
9
 
10
10
 
11
11
  class MockException(Exception):
@@ -25,6 +25,7 @@ async def test_db_connection_interface_error(monkeypatch, settings, error_class)
25
25
  mock_acreate = AsyncMock()
26
26
  mock_acreate.return_value = mock_app_status
27
27
  monkeypatch.setattr(AppStatusManager, "acreate", mock_acreate)
28
+ monkeypatch.setattr(AppStatus, "objects", AppStatusManager())
28
29
  mock_reset_db = Mock()
29
30
  monkeypatch.setattr(Handler, "_reset_db_connection", mock_reset_db)
30
31
  settings.CONTENT_APP_TTL = 1
@@ -3,7 +3,8 @@ from uuid import uuid4
3
3
 
4
4
  from itertools import compress
5
5
 
6
- from pulpcore.plugin.models import Content, Repository
6
+ from pulpcore.plugin.models import Artifact, Content, ContentArtifact, Repository
7
+ from pulpcore.plugin.repo_version_utils import validate_version_paths
7
8
 
8
9
 
9
10
  def pks_of_next_qs(qs_generator):
@@ -258,3 +259,85 @@ def test_next_version_with_multiple_versions():
258
259
 
259
260
  assert repository.next_version == 4
260
261
  assert repository.latest_version().number == 1
262
+
263
+
264
+ @pytest.mark.django_db
265
+ def test_shared_artifact_same_path_validation(tmp_path):
266
+ """
267
+ Test that multiple content units can reference the same artifact with the same
268
+ relative path without causing validation errors.
269
+
270
+ This reproduces scenarios where different content units legitimately share
271
+ the same artifact (e.g. upstream source files).
272
+ """
273
+ # Create a repository
274
+ repository = Repository.objects.create(name=uuid4())
275
+ repository.CONTENT_TYPES = [Content]
276
+
277
+ # Create a shared artifact using proper test pattern
278
+ artifact_path = tmp_path / "shared_file.txt"
279
+ artifact_path.write_text("Shared content data")
280
+ shared_artifact = Artifact.init_and_validate(str(artifact_path))
281
+ shared_artifact.save()
282
+
283
+ # Create two content units (simulates any content that shares artifacts)
284
+ content1 = Content.objects.create(pulp_type="core.content")
285
+ content2 = Content.objects.create(pulp_type="core.content")
286
+
287
+ # Both content units reference the same artifact with same path
288
+ ContentArtifact.objects.create(
289
+ content=content1, artifact=shared_artifact, relative_path="shared/common_file.txt"
290
+ )
291
+ ContentArtifact.objects.create(
292
+ content=content2, artifact=shared_artifact, relative_path="shared/common_file.txt"
293
+ )
294
+
295
+ # Create a repository version with both content units
296
+ with repository.new_version() as new_version:
297
+ new_version.add_content(Content.objects.filter(pk__in=[content1.pk, content2.pk]))
298
+
299
+ # This should not raise validation errors with our fix
300
+ validate_version_paths(new_version)
301
+
302
+
303
+ @pytest.mark.django_db
304
+ def test_different_artifacts_same_path_validation_fails(tmp_path):
305
+ """
306
+ Test that different artifacts trying to use the same relative path
307
+ still fail validation (this is a real conflict that should be caught).
308
+ """
309
+ # Create a repository
310
+ repository = Repository.objects.create(name=uuid4())
311
+ repository.CONTENT_TYPES = [Content]
312
+
313
+ # Create two different artifacts using proper test pattern
314
+ artifact1_path = tmp_path / "artifact1.txt"
315
+ artifact1_path.write_text("Content of first artifact")
316
+ artifact1 = Artifact.init_and_validate(str(artifact1_path))
317
+ artifact1.save()
318
+
319
+ artifact2_path = tmp_path / "artifact2.txt"
320
+ artifact2_path.write_text("Content of second artifact") # Different content
321
+ artifact2 = Artifact.init_and_validate(str(artifact2_path))
322
+ artifact2.save()
323
+
324
+ # Create two content units with different artifacts but same path
325
+ content1 = Content.objects.create(pulp_type="core.content")
326
+ content2 = Content.objects.create(pulp_type="core.content")
327
+
328
+ ContentArtifact.objects.create(
329
+ content=content1, artifact=artifact1, relative_path="conflicting/file.txt"
330
+ )
331
+ ContentArtifact.objects.create(
332
+ content=content2,
333
+ artifact=artifact2,
334
+ relative_path="conflicting/file.txt", # Same path, different artifact
335
+ )
336
+
337
+ # Create a repository version with both content units
338
+ with repository.new_version() as new_version:
339
+ new_version.add_content(Content.objects.filter(pk__in=[content1.pk, content2.pk]))
340
+
341
+ # This should raise a validation error due to path conflict
342
+ with pytest.raises(ValueError, match="Repository version errors"):
343
+ validate_version_paths(new_version)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pulpcore
3
- Version: 3.85.1
3
+ Version: 3.87.0
4
4
  Summary: Pulp Django Application and Related Modules
5
5
  Author-email: Pulp Team <pulp-list@redhat.com>
6
6
  Project-URL: Homepage, https://pulpproject.org
@@ -45,12 +45,12 @@ Requires-Dist: PyOpenSSL<26.0
45
45
  Requires-Dist: opentelemetry-api<1.37,>=1.27.0
46
46
  Requires-Dist: opentelemetry-sdk<1.37,>=1.27.0
47
47
  Requires-Dist: opentelemetry-exporter-otlp-proto-http<1.37,>=1.27.0
48
- Requires-Dist: protobuf<6.0,>=4.21.1
48
+ Requires-Dist: protobuf<7.0,>=4.21.1
49
49
  Requires-Dist: pulp-glue<0.36,>=0.28.0
50
50
  Requires-Dist: pygtrie<=2.5.0,>=2.5
51
51
  Requires-Dist: psycopg[binary]<3.3,>=3.1.8
52
52
  Requires-Dist: pyparsing<3.3,>=3.1.0
53
- Requires-Dist: python-gnupg<=0.5.4,>=0.5
53
+ Requires-Dist: python-gnupg<0.6,>=0.5.0
54
54
  Requires-Dist: PyYAML<6.1,>=5.1.1
55
55
  Requires-Dist: redis<6.5,>=4.3.0
56
56
  Requires-Dist: tablib<3.6,>=3.5.0