plexus-python-common 1.0.56__tar.gz → 1.0.57__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/PKG-INFO +1 -1
  2. plexus_python_common-1.0.56/src/plexus/common/resources/tags/unittest.tagset.yaml → plexus_python_common-1.0.57/src/plexus/common/resources/tags/unittest-1.0.0.tagset.yaml +1 -0
  3. plexus_python_common-1.0.56/src/plexus/common/resources/tags/universal.tagset.yaml → plexus_python_common-1.0.57/src/plexus/common/resources/tags/universal-1.0.0.tagset.yaml +1 -0
  4. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/utils/bagutils.py +2 -2
  5. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/utils/ormutils.py +13 -3
  6. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/utils/s3utils.py +13 -10
  7. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/utils/tagutils.py +194 -59
  8. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/utils/testutils.py +0 -1
  9. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus_python_common.egg-info/PKG-INFO +1 -1
  10. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus_python_common.egg-info/SOURCES.txt +2 -2
  11. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/utils/tagutils_test.py +23 -21
  12. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/.editorconfig +0 -0
  13. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/.github/workflows/pr.yml +0 -0
  14. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/.github/workflows/push.yml +0 -0
  15. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/.gitignore +0 -0
  16. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/MANIFEST.in +0 -0
  17. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/README.md +0 -0
  18. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/VERSION +0 -0
  19. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/pyproject.toml +0 -0
  20. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/jsonutils/dummy.0.jsonl +0 -0
  21. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/jsonutils/dummy.1.jsonl +0 -0
  22. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/jsonutils/dummy.2.jsonl +0 -0
  23. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/0-dummy +0 -0
  24. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/1-dummy +0 -0
  25. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/2-dummy +0 -0
  26. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/dummy.0.0.jsonl +0 -0
  27. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/dummy.0.0.vol-0.jsonl +0 -0
  28. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/dummy.0.jsonl +0 -0
  29. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/dummy.1.1.jsonl +0 -0
  30. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/dummy.1.1.vol-1.jsonl +0 -0
  31. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/dummy.1.jsonl +0 -0
  32. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/dummy.2.2.jsonl +0 -0
  33. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/dummy.2.2.vol-2.jsonl +0 -0
  34. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/dummy.2.jsonl +0 -0
  35. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/dummy.csv.part0 +0 -0
  36. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/dummy.csv.part1 +0 -0
  37. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/dummy.csv.part2 +0 -0
  38. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/pathutils/dummy.txt +0 -0
  39. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/s3utils/dir.baz/file.bar.baz +0 -0
  40. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/s3utils/dir.baz/file.foo.bar +0 -0
  41. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/s3utils/dir.baz/file.foo.baz +0 -0
  42. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/s3utils/dir.foo/dir.foo.bar/dir.foo.bar.baz/file.foo.bar.baz +0 -0
  43. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/s3utils/dir.foo/dir.foo.bar/file.bar.baz +0 -0
  44. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/s3utils/dir.foo/dir.foo.bar/file.foo.bar +0 -0
  45. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/s3utils/dir.foo/dir.foo.bar/file.foo.baz +0 -0
  46. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/s3utils/dir.foo/file.bar +0 -0
  47. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/s3utils/dir.foo/file.baz +0 -0
  48. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/s3utils/dir.foo/file.foo +0 -0
  49. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/s3utils_archive/archive.compressed.zip +0 -0
  50. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/resources/unittest/s3utils_archive/archive.uncompressed.zip +0 -0
  51. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/setup.cfg +0 -0
  52. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/setup.py +0 -0
  53. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/__init__.py +0 -0
  54. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/carto/OSMFile.py +0 -0
  55. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/carto/OSMNode.py +0 -0
  56. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/carto/OSMTags.py +0 -0
  57. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/carto/OSMWay.py +0 -0
  58. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/carto/__init__.py +0 -0
  59. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/pose.py +0 -0
  60. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/proj.py +0 -0
  61. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/resources/__init__.py +0 -0
  62. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/resources/tags/__init__.py +0 -0
  63. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/utils/__init__.py +0 -0
  64. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/utils/apiutils.py +0 -0
  65. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/utils/config.py +0 -0
  66. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/utils/datautils.py +0 -0
  67. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/utils/dockerutils.py +0 -0
  68. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/utils/jsonutils.py +0 -0
  69. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/utils/pathutils.py +0 -0
  70. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/utils/sqlutils.py +0 -0
  71. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus/common/utils/strutils.py +0 -0
  72. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus_python_common.egg-info/dependency_links.txt +0 -0
  73. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus_python_common.egg-info/not-zip-safe +0 -0
  74. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus_python_common.egg-info/requires.txt +0 -0
  75. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/src/plexus_python_common.egg-info/top_level.txt +0 -0
  76. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/__init__.py +0 -0
  77. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/__init__.py +0 -0
  78. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/carto/__init__.py +0 -0
  79. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/carto/osm_file_test.py +0 -0
  80. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/carto/osm_tags_test.py +0 -0
  81. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/pose_test.py +0 -0
  82. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/proj_test.py +0 -0
  83. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/utils/__init__.py +0 -0
  84. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/utils/bagutils_test.py +0 -0
  85. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/utils/datautils_test.py +0 -0
  86. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/utils/dockerutils_test.py +0 -0
  87. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/utils/jsonutils_test.py +0 -0
  88. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/utils/ormutils_test.py +0 -0
  89. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/utils/pathutils_test.py +0 -0
  90. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/utils/s3utils_test.py +0 -0
  91. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/utils/strutils_test.py +0 -0
  92. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/plexus_tests/common/utils/testutils_test.py +0 -0
  93. {plexus_python_common-1.0.56 → plexus_python_common-1.0.57}/test/testenv.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: plexus-python-common
3
- Version: 1.0.56
3
+ Version: 1.0.57
4
4
  Classifier: Programming Language :: Python :: 3
5
5
  Classifier: Programming Language :: Python :: 3.12
6
6
  Classifier: Programming Language :: Python :: 3.13
@@ -1,4 +1,5 @@
1
1
  $namespace: "unittest"
2
+ $version: "1.0.0"
2
3
  # language=markdown
3
4
  $desc: !markdown |
4
5
  **Unit Test Tagset**
@@ -1,4 +1,5 @@
1
1
  $namespace: "universal"
2
+ $version: "1.0.0"
2
3
  # language=markdown
3
4
  $desc: !markdown |
4
5
  **Universal Tagset**
@@ -217,9 +217,9 @@ class BagWriter(AbstractContextManager, BagIOBase):
217
217
 
218
218
  :param bag_file_path: path to the SQLite DB file to create.
219
219
  :param overwrite: whether to overwrite the SQLite DB file if it already exists. If False and the file already
220
- exists, a FileExistsError is raised. If True, the existing file is deleted and a new one is created.
220
+ exists, a FileExistsError is raised. If True, the existing file is deleted and a new one is created.
221
221
  :param exist_ok: whether to ignore if the SQLite DB file already exists. If False and the file already exists,
222
- a FileExistsError is raised. If True, the existing file is used and no new file is created.
222
+ a FileExistsError is raised. If True, the existing file is used and no new file is created.
223
223
  """
224
224
  super().__init__(bag_file_path)
225
225
 
@@ -267,6 +267,7 @@ class ChangingModelMixinProtocol(SequenceModelMixinProtocol):
267
267
  def make_index_created_at(cls, index_name: str) -> sa.Index:
268
268
  """
269
269
  Helper to create an index on the ``created_at`` field with the given index name.
270
+
270
271
  :param index_name: Name of the index to create.
271
272
  :return: The created SQLAlchemy Index object.
272
273
  """
@@ -282,6 +283,7 @@ class SnapshotModelMixinProtocol(SequenceModelMixinProtocol):
282
283
  def make_index_created_at_expired_at(cls, index_name: str) -> sa.Index:
283
284
  """
284
285
  Helper to create an index on the ``created_at`` and ``expired_at`` fields with the given index name.
286
+
285
287
  :param index_name: Name of the index to create.
286
288
  :return: The created SQLAlchemy Index object.
287
289
  """
@@ -292,6 +294,7 @@ class SnapshotModelMixinProtocol(SequenceModelMixinProtocol):
292
294
  """
293
295
  Helper to create a unique index on the ``record_sqn`` field for active records (where ``expired_at`` is NULL).
294
296
  This ensures that there is only one active snapshot per record at any given time.
297
+
295
298
  :param index_name: Name of the index to create.
296
299
  :return: The created SQLAlchemy Index object.
297
300
  """
@@ -302,6 +305,7 @@ class SnapshotModelMixinProtocol(SequenceModelMixinProtocol):
302
305
  """
303
306
  Helper to create a non-unique index on the specified fields for active records (where ``expired_at`` is NULL).
304
307
  This allows efficient querying of active snapshots based on the specified fields.
308
+
305
309
  :param index_name: Name of the index to create.
306
310
  :param fields: Fields to include in the index.
307
311
  :return: The created SQLAlchemy Index object.
@@ -314,6 +318,7 @@ class SnapshotModelMixinProtocol(SequenceModelMixinProtocol):
314
318
  Helper to create a unique index on the specified fields for active records (where ``expired_at`` is NULL).
315
319
  This ensures that there is only one active snapshot per combination of the specified fields at any given
316
320
  time.
321
+
317
322
  :param index_name: Name of the index to create.
318
323
  :param fields: Fields to include in the unique index.
319
324
  :return: The created SQLAlchemy Index object.
@@ -333,6 +338,7 @@ class RevisionModelMixinProtocol(SequenceModelMixinProtocol):
333
338
  """
334
339
  Helper to create an index on the ``created_at``, ``updated_at``, and ``expired_at`` fields with the given
335
340
  index name.
341
+
336
342
  :param index_name: Name of the index to create.
337
343
  :return: The created SQLAlchemy Index object.
338
344
  """
@@ -343,6 +349,7 @@ class RevisionModelMixinProtocol(SequenceModelMixinProtocol):
343
349
  """
344
350
  Helper to create a unique index on the ``record_sqn`` and ``revision`` fields.
345
351
  This ensures that each revision number is unique per record.
352
+
346
353
  :param index_name: Name of the index to create.
347
354
  :return: The created SQLAlchemy Index object.
348
355
  """
@@ -353,6 +360,7 @@ class RevisionModelMixinProtocol(SequenceModelMixinProtocol):
353
360
  """
354
361
  Helper to create a unique index on the ``record_sqn`` field for active records (where ``expired_at`` is NULL).
355
362
  This ensures that there is only one active revision per record at any given time.
363
+
356
364
  :param index_name: Name of the index to create.
357
365
  :return: The created SQLAlchemy Index object.
358
366
  """
@@ -363,6 +371,7 @@ class RevisionModelMixinProtocol(SequenceModelMixinProtocol):
363
371
  """
364
372
  Helper to create a non-unique index on the specified fields for active records (where ``expired_at`` is NULL).
365
373
  This allows efficient querying of active revisions based on the specified fields.
374
+
366
375
  :param index_name: Name of the index to create.
367
376
  :param fields: Fields to include in the index.
368
377
  :return: The created SQLAlchemy Index object.
@@ -375,6 +384,7 @@ class RevisionModelMixinProtocol(SequenceModelMixinProtocol):
375
384
  Helper to create a unique index on the specified fields for active records (where ``expired_at`` is NULL).
376
385
  This ensures that there is only one active revision per combination of the specified fields at any given
377
386
  time.
387
+
378
388
  :param index_name: Name of the index to create.
379
389
  :param fields: Fields to include in the unique index.
380
390
  :return: The created SQLAlchemy Index object.
@@ -465,7 +475,7 @@ def make_changing_model_mixin(dialect: str | None = None) -> type[ChangingModelM
465
475
 
466
476
  :param dialect: The database dialect to determine the column types for the fields.
467
477
  :return: A mixin class that can be used with SQLModel models to add the common fields and validation logic for
468
- updatable records.
478
+ updatable records.
469
479
  """
470
480
 
471
481
  class ModelMixin(SQLModel):
@@ -527,7 +537,7 @@ def make_snapshot_model_mixin(dialect: str | None = None) -> type[SnapshotModelM
527
537
 
528
538
  :param dialect: The database dialect to determine the column types for the fields.
529
539
  :return: A mixin class that can be used with SQLModel models to add the common fields and validation logic for
530
- record snapshots.
540
+ record snapshots.
531
541
  """
532
542
 
533
543
  class ModelMixin(SQLModel):
@@ -622,7 +632,7 @@ def make_revision_model_mixin(dialect: str | None = None) -> type[RevisionModelM
622
632
 
623
633
  :param dialect: The database dialect to determine the column types for the fields.
624
634
  :return: A mixin class that can be used with SQLModel models to add the common fields and validation logic for
625
- record revisions.
635
+ record revisions.
626
636
  """
627
637
 
628
638
  class ModelMixin(SQLModel):
@@ -549,15 +549,16 @@ def s3_archive_member_tree(
549
549
  Directory members are recognized by names ending with a trailing slash ("/").
550
550
 
551
551
  Example:
552
- {
553
- "dir1/": (ZipInfo, {
554
- "file1.txt": (ZipInfo, None),
555
- "subdir/": (ZipInfo, {
556
- "file2.txt": (ZipInfo, None)
557
- })
558
- }),
559
- "file3.txt": (ZipInfo, None)
560
- }
552
+
553
+ >>> {
554
+ ... "dir1/": (ZipInfo, {
555
+ ... "file1.txt": (ZipInfo, None),
556
+ ... "subdir/": (ZipInfo, {
557
+ ... "file2.txt": (ZipInfo, None)
558
+ ... })
559
+ ... }),
560
+ ... "file3.txt": (ZipInfo, None)
561
+ ... }
561
562
 
562
563
  :param client: An instance of ``S3Client``.
563
564
  :param bucket: Bucket name.
@@ -602,7 +603,9 @@ def s3_archive_list_files(
602
603
  under that directory will be included in the results.
603
604
 
604
605
  Example usage:
605
- >>> archive_size, member_zip_infos, missed_members = s3_archive_list_files(client, bucket, key, members=["file1.txt", "dir1/"])
606
+
607
+ >>> archive_size, member_zip_infos, missed_members = s3_archive_list_files(
608
+ ... client, bucket, key, members=["file1.txt", "dir1/"])
606
609
  >>> for info in member_zip_infos:
607
610
  ... print(info.filename, info.file_size)
608
611
  >>> if missed_members:
@@ -71,6 +71,9 @@ class Tag(object):
71
71
  def parent_tag_name(self) -> str | None:
72
72
  return head_or_none(self.name.rsplit(":", 1))
73
73
 
74
+ def unbind(self) -> Self:
75
+ return self
76
+
74
77
  def validate(self, props: JsonType | None, *, raise_on_error: bool = False) -> bool:
75
78
  if self.schema is None or props is None:
76
79
  return True
@@ -83,18 +86,30 @@ class Tag(object):
83
86
  return False
84
87
 
85
88
 
89
+ @dataclasses.dataclass(frozen=True, eq=True, order=True)
90
+ class BoundTag(Tag):
91
+ namespace: str | None = None
92
+ version: str | None = None
93
+
94
+ def unbind(self) -> Tag:
95
+ return Tag(name=self.name, desc=self.desc, schema=self.schema)
96
+
97
+
86
98
  class Tagset(Sequence[Tag], Mapping[str, Tag]):
87
- def __init__(self, namespace: str, desc: str | RichDesc) -> None:
99
+ def __init__(self, namespace: str, version: str, desc: str | RichDesc) -> None:
88
100
  super().__init__()
89
101
  self.namespace = namespace
102
+ self.version = version
90
103
  self.desc = desc
91
104
  self.tags: list[Tag] = []
92
105
  self.tags_dict: dict[str, Tag] = {}
93
106
  self.tags_tree: dicttree[str, Tag] = {}
94
107
 
95
- def __contains__(self, item: str | Tag) -> bool:
96
- tag_name = item.name if isinstance(item, Tag) else item
97
- return tag_name in self.tags_dict
108
+ def __contains__(self, item: str | Tag | BoundTag | None) -> bool:
109
+ try:
110
+ return self.get(item) is not None
111
+ except ValueError:
112
+ return False
98
113
 
99
114
  def __len__(self) -> int:
100
115
  return len(self.tags)
@@ -111,10 +126,22 @@ class Tagset(Sequence[Tag], Mapping[str, Tag]):
111
126
  def items(self):
112
127
  return self.tags_dict.items()
113
128
 
114
- def get(self, item: str | Tag) -> Tag | None:
129
+ def get(self, item: str | Tag | BoundTag) -> Tag | None:
130
+ if item is None:
131
+ return None
132
+ if isinstance(item, BoundTag):
133
+ if item.namespace != self.namespace or item.version != self.version:
134
+ raise ValueError(f"tag bind '{item}' does not belong to this tagset")
115
135
  tag_name = item.name if isinstance(item, Tag) else item
116
136
  return self.tags_dict.get(tag_name)
117
137
 
138
+ def bind(self, tag: Tag) -> BoundTag:
139
+ return BoundTag(tag.name, tag.desc, tag.schema, self.namespace, self.version)
140
+
141
+ def get_bound(self, item: str | Tag | BoundTag) -> BoundTag | None:
142
+ tag = self.get(item)
143
+ return None if tag is None else self.bind(tag)
144
+
118
145
  def clone(self) -> Self:
119
146
  return clone_tagset(self)
120
147
 
@@ -128,22 +155,18 @@ class Tagset(Sequence[Tag], Mapping[str, Tag]):
128
155
  def tag_names(self) -> list[str]:
129
156
  return list(self.tags_dict.keys())
130
157
 
131
- def child_tags(self, parent: str | Tag) -> list[Tag]:
158
+ def child_tags(self, parent: str | Tag | BoundTag) -> list[BoundTag]:
159
+ parent = self.get(parent)
132
160
  if parent is None:
133
161
  return []
134
- if isinstance(parent, str):
135
- return self.child_tags(self.get(parent))
136
-
137
162
  subtree = dicttree_subtree(self.tags_tree, parent.tag_parts)
138
- return list(dicttree_children(subtree)) if subtree else []
163
+ return list(self.bind(tag) for tag in dicttree_children(subtree)) if subtree else []
139
164
 
140
- def parent_tags(self, child: str | Tag) -> list[Tag]:
165
+ def parent_tags(self, child: str | Tag | BoundTag) -> list[BoundTag]:
166
+ child = self.get(child)
141
167
  if child is None:
142
168
  return []
143
- if isinstance(child, str):
144
- return self.parent_tags(self.get(child))
145
-
146
- return list(dicttree_lineage(self.tags_tree, child.tag_parts[:-1]))
169
+ return list(self.bind(tag) for tag in dicttree_lineage(self.tags_tree, child.tag_parts[:-1]))
147
170
 
148
171
  def validate(self, tag_name: str, props: JsonType | None, *, raise_on_error: bool = False) -> bool:
149
172
  tag = self.get(tag_name)
@@ -161,9 +184,14 @@ class MutableTagset(Tagset):
161
184
  def frozen(self):
162
185
  return clone_tagset(self)
163
186
 
164
- def add(self, tag: Tag) -> Self:
187
+ def add(self, tag: Tag | BoundTag) -> Self:
165
188
  if tag.name in self.tags_dict:
166
189
  raise ValueError(f"duplicate tag name '{tag.name}'")
190
+ if isinstance(tag, BoundTag):
191
+ if tag.namespace != self.namespace or tag.version != self.version:
192
+ raise ValueError(f"tag bind '{tag}' does not belong to this tagset")
193
+
194
+ tag = tag.unbind()
167
195
 
168
196
  self.tags.append(tag)
169
197
  self.tags_dict[tag.name] = tag
@@ -187,7 +215,7 @@ class MutableTagset(Tagset):
187
215
 
188
216
  def clone_tagset(tagset: Tagset) -> Tagset:
189
217
  tagset = clone_mutable_tagset(tagset)
190
- new_tagset = Tagset(namespace=tagset.namespace, desc=tagset.desc)
218
+ new_tagset = Tagset(namespace=tagset.namespace, version=tagset.version, desc=tagset.desc)
191
219
  new_tagset.tags = tagset.tags
192
220
  new_tagset.tags_dict = tagset.tags_dict
193
221
  new_tagset.tags_tree = tagset.tags_tree
@@ -195,7 +223,7 @@ def clone_tagset(tagset: Tagset) -> Tagset:
195
223
 
196
224
 
197
225
  def clone_mutable_tagset(tagset: Tagset) -> MutableTagset:
198
- new_tagset = MutableTagset(namespace=tagset.namespace, desc=tagset.desc)
226
+ new_tagset = MutableTagset(namespace=tagset.namespace, version=tagset.version, desc=tagset.desc)
199
227
  for tag in tagset.tags:
200
228
  new_tagset.add(tag)
201
229
  return new_tagset
@@ -250,6 +278,14 @@ def populate_tagset(tagset_spec: JsonObject) -> Tagset:
250
278
  except ValueError as e:
251
279
  raise ValueError(f"tagset namespace '{namespace}' is not in snake case") from e
252
280
 
281
+ version = tagset_spec.get("$version")
282
+ if version is None:
283
+ raise ValueError("missing '$version' in tagset spec")
284
+ try:
285
+ validate_semver(version)
286
+ except ValueError as e:
287
+ raise ValueError(f"tagset version '{version}' is not a valid semantic version") from e
288
+
253
289
  desc = tagset_spec.get("$desc")
254
290
  if desc is None:
255
291
  raise ValueError("missing '$desc' in tagset spec")
@@ -260,7 +296,7 @@ def populate_tagset(tagset_spec: JsonObject) -> Tagset:
260
296
  if tags is None:
261
297
  raise ValueError("missing '$tags' in tagset spec")
262
298
 
263
- tagset = MutableTagset(namespace=namespace, desc=desc)
299
+ tagset = MutableTagset(namespace=namespace, version=version, desc=desc)
264
300
 
265
301
  for tag in validate_and_collect("", tags):
266
302
  tagset.add(tag)
@@ -270,10 +306,26 @@ def populate_tagset(tagset_spec: JsonObject) -> Tagset:
270
306
 
271
307
  @singleton
272
308
  def predefined_tagsets() -> dict[str, Tagset]:
309
+ from packaging.version import Version
310
+
273
311
  tagsets: dict[str, Tagset] = {}
312
+ latest_targets: dict[str, tuple[Version, Tagset]] = {}
313
+
274
314
  for _, tagset_spec in predefined_tagset_specs():
275
315
  tagset = populate_tagset(tagset_spec)
316
+ version = Version(tagset.version)
317
+ if tagset.namespace in latest_targets:
318
+ latest_version, _ = latest_targets[tagset.namespace]
319
+ if version > latest_version:
320
+ latest_targets[tagset.namespace] = (version, tagset)
321
+ else:
322
+ latest_targets[tagset.namespace] = (version, tagset)
323
+
324
+ tagsets[f"{tagset.namespace}:{tagset.version}"] = tagset
325
+
326
+ for _, tagset in latest_targets.values():
276
327
  tagsets[tagset.namespace] = tagset
328
+
277
329
  return tagsets
278
330
 
279
331
 
@@ -289,7 +341,7 @@ def render_tagset_markdown_readme(tagset: Tagset) -> str:
289
341
 
290
342
  template_str = textwrap.dedent(
291
343
  """
292
- # Tagset {{ tagset.namespace }}
344
+ # Tagset {{ tagset.namespace }} (Version {{ tagset.version }})
293
345
 
294
346
  {{ tagset.desc | render_desc }}
295
347
 
@@ -401,6 +453,16 @@ class TagRecord(BaseModel):
401
453
  sa_column=sa.Column(sa_sqlite.TIMESTAMP, nullable=False),
402
454
  description="End datetime of the tag record",
403
455
  )
456
+ tagset_namespace: str | None = Field(
457
+ sa_column=sa.Column(sa_sqlite.VARCHAR(128), nullable=True),
458
+ default=None,
459
+ description="Namespace of the tagset that the tag belongs to",
460
+ )
461
+ tagset_version: str | None = Field(
462
+ sa_column=sa.Column(sa_sqlite.VARCHAR(64), nullable=True),
463
+ default=None,
464
+ description="Version of the tagset that the tag belongs to",
465
+ )
404
466
  tag: str = Field(
405
467
  sa_column=sa.Column(sa_sqlite.VARCHAR(256), nullable=False),
406
468
  description="Tag name",
@@ -423,6 +485,20 @@ class TagRecord(BaseModel):
423
485
  validate_dt_timezone(v, allow_naive=True)
424
486
  return v
425
487
 
488
+ @pdt.field_validator("tagset_namespace", mode="after")
489
+ @classmethod
490
+ def validate_tagset_namespace(cls, v: str | None) -> str | None:
491
+ if v is not None:
492
+ validate_snake_case(v)
493
+ return v
494
+
495
+ @pdt.field_validator("tagset_version", mode="after")
496
+ @classmethod
497
+ def validate_tagset_version(cls, v: str | None) -> str | None:
498
+ if v is not None:
499
+ validate_semver(v)
500
+ return v
501
+
426
502
  @pdt.model_validator(mode="after")
427
503
  def validate_begin_dt_end_dt(self) -> Self:
428
504
  if self.begin_dt > self.end_dt:
@@ -458,6 +534,8 @@ if typing.TYPE_CHECKING:
458
534
  target_sqn: sa_orm.Mapped[int] = ...
459
535
  begin_dt: sa_orm.Mapped[datetime.datetime] = ...
460
536
  end_dt: sa_orm.Mapped[datetime.datetime] = ...
537
+ tagset_namespace: sa_orm.Mapped[str | None] = ...
538
+ tagset_version: sa_orm.Mapped[str | None] = ...
461
539
  tag: sa_orm.Mapped[str] = ...
462
540
  props: sa_orm.Mapped[JsonType | None] = ...
463
541
 
@@ -591,7 +669,9 @@ class TagCache(object):
591
669
  self,
592
670
  begin_dt: datetime.datetime | None = None,
593
671
  end_dt: datetime.datetime | None = None,
594
- tag_pattern: str | None = None,
672
+ tagset_namespace: str | None = None,
673
+ tagset_version: str | None = None,
674
+ tag_prefix: str | None = None,
595
675
  *,
596
676
  tagsets: Sequence[Tagset] | None = None,
597
677
  tagset_inverted: bool = False,
@@ -602,11 +682,12 @@ class TagCache(object):
602
682
 
603
683
  :param begin_dt: Filter by begin time (inclusive)
604
684
  :param end_dt: Filter by end time (inclusive)
605
- :param tag_pattern: Filter by tag name pattern (SQL LIKE syntax, e.g. "dummy_tag:%" to match all tags starting
606
- with "dummy_tag:")
685
+ :param tagset_namespace: Filter by tagset namespace (exact match)
686
+ :param tagset_version: Filter by tagset version (exact match)
687
+ :param tag_prefix: Filter by tag name prefix, e.g. "dummy_tag:" to match all tags starting with "dummy_tag:"
607
688
  :param tagsets: Filter by tagsets (match tags that are in any of the specified tagsets)
608
689
  :param tagset_inverted: Whether to invert the tagset filter (match tags that are NOT in any of the specified
609
- tagsets)
690
+ tagsets)
610
691
  :param batch_size: Number of records to fetch per batch from the database (for memory efficiency)
611
692
  :return: Generator of ``TagRecordTable`` instances that match the filters
612
693
  """
@@ -616,8 +697,12 @@ class TagCache(object):
616
697
  query = query.filter(TagRecordTable.end_dt >= begin_dt)
617
698
  if end_dt:
618
699
  query = query.filter(TagRecordTable.begin_dt <= end_dt)
619
- if tag_pattern:
620
- query = query.filter(TagRecordTable.tag.like(f"{escape_sql_like(tag_pattern)}%", escape="\\"))
700
+ if tagset_namespace:
701
+ query = query.filter(TagRecordTable.tagset_namespace == tagset_namespace)
702
+ if tagset_version:
703
+ query = query.filter(TagRecordTable.tagset_version == tagset_version)
704
+ if tag_prefix:
705
+ query = query.filter(TagRecordTable.tag.like(f"{escape_sql_like(tag_prefix)}%", escape="\\"))
621
706
  if tagsets:
622
707
  if tagset_inverted:
623
708
  query = query.filter(
@@ -633,7 +718,9 @@ class TagCache(object):
633
718
  self,
634
719
  begin_dt: datetime.datetime | None = None,
635
720
  end_dt: datetime.datetime | None = None,
636
- tag_pattern: str | None = None,
721
+ tagset_namespace: str | None = None,
722
+ tagset_version: str | None = None,
723
+ tag_prefix: str | None = None,
637
724
  *,
638
725
  target_identifier: str | None = None,
639
726
  target_tagger_name: str | None = None,
@@ -650,8 +737,9 @@ class TagCache(object):
650
737
 
651
738
  :param begin_dt: Filter by begin time (inclusive)
652
739
  :param end_dt: Filter by end time (inclusive)
653
- :param tag_pattern: Filter by tag name pattern (SQL LIKE syntax, e.g. "dummy_tag:%" to match all tags starting
654
- with "dummy_tag:")
740
+ :param tagset_namespace: Filter by tagset namespace (exact match)
741
+ :param tagset_version: Filter by tagset version (exact match)
742
+ :param tag_prefix: Filter by tag name prefix, e.g. "dummy_tag:" to match all tags starting with "dummy_tag:"
655
743
  :param target_identifier: Filter by target identifier (exact match)
656
744
  :param target_tagger_name: Filter by target tagger name (exact match)
657
745
  :param target_tagger_version: Filter by target tagger version (exact match)
@@ -660,7 +748,7 @@ class TagCache(object):
660
748
  :param target_end_dt: Filter by target end time (inclusive)
661
749
  :param tagsets: Filter by tagsets (match tags that are in any of the specified tagsets)
662
750
  :param tagset_inverted: Whether to invert the tagset filter (match tags that are NOT in any of the specified
663
- tagsets)
751
+ tagsets)
664
752
  :param batch_size: Number of records to fetch per batch from the database (for memory efficiency)
665
753
  :return: Generator of ``TagRecordTable`` instances that match the filters
666
754
  """
@@ -674,8 +762,12 @@ class TagCache(object):
674
762
  query = query.filter(TagRecordTable.end_dt >= begin_dt)
675
763
  if end_dt:
676
764
  query = query.filter(TagRecordTable.begin_dt <= end_dt)
677
- if tag_pattern:
678
- query = query.filter(TagRecordTable.tag.like(f"{escape_sql_like(tag_pattern)}%", escape="\\"))
765
+ if tagset_namespace:
766
+ query = query.filter(TagRecordTable.tagset_namespace == tagset_namespace)
767
+ if tagset_version:
768
+ query = query.filter(TagRecordTable.tagset_version == tagset_version)
769
+ if tag_prefix:
770
+ query = query.filter(TagRecordTable.tag.like(f"{escape_sql_like(tag_prefix)}%", escape="\\"))
679
771
  if target_identifier:
680
772
  query = query.filter(TagTargetTable.identifier == target_identifier)
681
773
  if target_tagger_name:
@@ -703,7 +795,9 @@ class TagCache(object):
703
795
  self,
704
796
  begin_dt: datetime.datetime | None = None,
705
797
  end_dt: datetime.datetime | None = None,
706
- tag_pattern: str | None = None,
798
+ tagset_namespace: str | None = None,
799
+ tagset_version: str | None = None,
800
+ tag_prefix: str | None = None,
707
801
  *,
708
802
  tagsets: Sequence[Tagset] | None = None,
709
803
  tagset_inverted: bool = False,
@@ -713,11 +807,12 @@ class TagCache(object):
713
807
 
714
808
  :param begin_dt: Filter by begin time (inclusive)
715
809
  :param end_dt: Filter by end time (inclusive)
716
- :param tag_pattern: Filter by tag name pattern (SQL LIKE syntax, e.g. "dummy_tag:%" to match all tags starting
717
- with "dummy_tag:")
810
+ :param tagset_namespace: Filter by tagset namespace (exact match)
811
+ :param tagset_version: Filter by tagset version (exact match)
812
+ :param tag_prefix: Filter by tag name prefix, e.g. "dummy_tag:" to match all tags starting with "dummy_tag:"
718
813
  :param tagsets: Filter by tagsets (match tags that are in any of the specified tagsets)
719
814
  :param tagset_inverted: Whether to invert the tagset filter (match tags that are NOT in any of the specified
720
- tagsets)
815
+ tagsets)
721
816
  """
722
817
  with self.make_session() as session:
723
818
  query = session.query(TagRecordTable)
@@ -725,8 +820,12 @@ class TagCache(object):
725
820
  query = query.filter(TagRecordTable.end_dt >= begin_dt)
726
821
  if end_dt:
727
822
  query = query.filter(TagRecordTable.begin_dt <= end_dt)
728
- if tag_pattern:
729
- query = query.filter(TagRecordTable.tag.like(f"{escape_sql_like(tag_pattern)}%", escape="\\"))
823
+ if tagset_namespace:
824
+ query = query.filter(TagRecordTable.tagset_namespace == tagset_namespace)
825
+ if tagset_version:
826
+ query = query.filter(TagRecordTable.tagset_version == tagset_version)
827
+ if tag_prefix:
828
+ query = query.filter(TagRecordTable.tag.like(f"{escape_sql_like(tag_prefix)}%", escape="\\"))
730
829
  if tagsets:
731
830
  if tagset_inverted:
732
831
  query = query.filter(
@@ -803,7 +902,9 @@ class TargetedTagCache(object):
803
902
  self,
804
903
  begin_dt: datetime.datetime | None = None,
805
904
  end_dt: datetime.datetime | None = None,
806
- tag_pattern: str | None = None,
905
+ tagset_namespace: str | None = None,
906
+ tagset_version: str | None = None,
907
+ tag_prefix: str | None = None,
807
908
  *,
808
909
  tagsets: Sequence[Tagset] | None = None,
809
910
  tagset_inverted: bool = False,
@@ -814,11 +915,12 @@ class TargetedTagCache(object):
814
915
 
815
916
  :param begin_dt: Filter by begin time (inclusive)
816
917
  :param end_dt: Filter by end time (inclusive)
817
- :param tag_pattern: Filter by tag name pattern (SQL LIKE syntax, e.g. "dummy_tag:%" to match all tags starting
818
- with "dummy_tag:")
918
+ :param tagset_namespace: Filter by tagset namespace (exact match)
919
+ :param tagset_version: Filter by tagset version (exact match)
920
+ :param tag_prefix: Filter by tag name prefix, e.g. "dummy_tag:" to match all tags starting with "dummy_tag:"
819
921
  :param tagsets: Filter by tagsets (match tags that are in any of the specified tagsets)
820
922
  :param tagset_inverted: Whether to invert the tagset filter (match tags that are NOT in any of the specified
821
- tagsets)
923
+ tagsets)
822
924
  :param batch_size: Number of records to fetch per batch from the database (for memory efficiency)
823
925
  :return: Generator of ``TagRecordTable`` instances that match the filters
824
926
  """
@@ -829,8 +931,12 @@ class TargetedTagCache(object):
829
931
  query = query.filter(TagRecordTable.end_dt >= begin_dt)
830
932
  if end_dt:
831
933
  query = query.filter(TagRecordTable.begin_dt <= end_dt)
832
- if tag_pattern:
833
- query = query.filter(TagRecordTable.tag.like(f"{escape_sql_like(tag_pattern)}%", escape="\\"))
934
+ if tagset_namespace:
935
+ query = query.filter(TagRecordTable.tagset_namespace == tagset_namespace)
936
+ if tagset_version:
937
+ query = query.filter(TagRecordTable.tagset_version == tagset_version)
938
+ if tag_prefix:
939
+ query = query.filter(TagRecordTable.tag.like(f"{escape_sql_like(tag_prefix)}%", escape="\\"))
834
940
  if tagsets:
835
941
  if tagset_inverted:
836
942
  query = query.filter(
@@ -846,17 +952,24 @@ class TargetedTagCache(object):
846
952
  self,
847
953
  begin_dt: datetime.datetime | None,
848
954
  end_dt: datetime.datetime | None,
849
- tag: str | Tag,
955
+ tag: str | Tag | BoundTag,
850
956
  props: JsonType | None = None,
957
+ tagset_namespace: str | None = None,
958
+ tagset_version: str | None = None,
851
959
  ) -> Self:
852
960
  """
853
- Add a tag record to the cache for the specified time range. If begin_dt or end_dt is None, it will default to
854
- the target's begin_dt or end_dt respectively.
961
+ Add a tag record to the cache for the specified time range. If ``begin_dt`` or ``end_dt`` is None, it will
962
+ default to the target's ``begin_dt`` or ``end_dt`` respectively.
855
963
 
856
964
  :param begin_dt: Begin datetime of the tag record
857
965
  :param end_dt: End datetime of the tag record
858
- :param tag: Tag name or Tag instance to be added (if Tag instance is provided, its name will be used)
966
+ :param tag: Tag name or ``Tag``/``BoundTag`` instance to be added. If ``Tag``/``BoundTag`` instance is provided,
967
+ its name will be used.
859
968
  :param props: Additional properties of the tag record in JSON format (optional)
969
+ :param tagset_namespace: Namespace of the tagset that the tag belongs to. If the ``tag`` parameter is a
970
+ ``BoundTag`` instance, this parameter will be ignored and the namespace from the instance will be used.
971
+ :param tagset_version: Version of the tagset that the tag belongs to. If the ``tag`` parameter is a
972
+ ``BoundTag`` instance, this parameter will be ignored and the version from the instance will be used.
860
973
  :return: Self instance for chaining
861
974
  """
862
975
  with self.make_session() as session:
@@ -864,6 +977,8 @@ class TargetedTagCache(object):
864
977
  target_sqn=self.target_info.sqn,
865
978
  begin_dt=begin_dt or self.target_info.begin_dt,
866
979
  end_dt=end_dt or self.target_info.end_dt,
980
+ tagset_namespace=tag.namespace if isinstance(tag, BoundTag) else tagset_namespace,
981
+ tagset_version=tag.version if isinstance(tag, BoundTag) else tagset_version,
867
982
  tag=tag.name if isinstance(tag, Tag) else tag,
868
983
  props=props,
869
984
  )
@@ -872,18 +987,31 @@ class TargetedTagCache(object):
872
987
 
873
988
  return self
874
989
 
875
- def add_tag(self, tag: str | Tag, props: JsonType | None = None) -> Self:
990
+ def add_tag(
991
+ self,
992
+ tag: str | Tag | BoundTag,
993
+ props: JsonType | None = None,
994
+ tagset_namespace: str | None = None,
995
+ tagset_version: str | None = None,
996
+ ) -> Self:
876
997
  """
877
998
  Add a tag record to the cache for the entire target range.
878
999
 
879
- :param tag: Tag name or Tag instance to be added (if Tag instance is provided, its name will be used)
1000
+ :param tag: Tag name or ``Tag``/``BoundTag`` instance to be added. If ``Tag``/``BoundTag`` instance is provided,
1001
+ its name will be used.
880
1002
  :param props: Additional properties of the tag record in JSON format (optional)
1003
+ :param tagset_namespace: Namespace of the tagset that the tag belongs to. If the ``tag`` parameter is a
1004
+ ``BoundTag`` instance, this parameter will be ignored and the namespace from the instance will be used.
1005
+ :param tagset_version: Version of the tagset that the tag belongs to. If the ``tag`` parameter is a
1006
+ ``BoundTag`` instance, this parameter will be ignored and the version from the instance will be used.
881
1007
  :return: Self instance for chaining
882
1008
  """
883
1009
  return self.add_ranged_tag(
884
1010
  begin_dt=self.target_info.begin_dt,
885
1011
  end_dt=self.target_info.end_dt,
886
1012
  tag=tag,
1013
+ tagset_namespace=tagset_namespace,
1014
+ tagset_version=tagset_version,
887
1015
  props=props,
888
1016
  )
889
1017
 
@@ -891,7 +1019,9 @@ class TargetedTagCache(object):
891
1019
  self,
892
1020
  begin_dt: datetime.datetime | None = None,
893
1021
  end_dt: datetime.datetime | None = None,
894
- tag_pattern: str | None = None,
1022
+ tagset_namespace: str | None = None,
1023
+ tagset_version: str | None = None,
1024
+ tag_prefix: str | None = None,
895
1025
  *,
896
1026
  tagsets: Sequence[Tagset] | None = None,
897
1027
  tagset_inverted: bool = False,
@@ -901,11 +1031,12 @@ class TargetedTagCache(object):
901
1031
 
902
1032
  :param begin_dt: Filter by begin time (inclusive)
903
1033
  :param end_dt: Filter by end time (inclusive)
904
- :param tag_pattern: Filter by tag name pattern (SQL LIKE syntax, e.g. "dummy_tag:%" to match all tags starting
905
- with "dummy_tag:")
1034
+ :param tagset_namespace: Filter by tagset namespace (exact match)
1035
+ :param tagset_version: Filter by tagset version (exact match)
1036
+ :param tag_prefix: Filter by tag name prefix, e.g. "dummy_tag:" to match all tags starting with "dummy_tag:"
906
1037
  :param tagsets: Filter by tagsets (match tags that are in any of the specified tagsets)
907
1038
  :param tagset_inverted: Whether to invert the tagset filter (match tags that are NOT in any of the specified
908
- tagsets)
1039
+ tagsets)
909
1040
  :return: Self instance for chaining
910
1041
  """
911
1042
  with self.make_session() as session:
@@ -914,8 +1045,12 @@ class TargetedTagCache(object):
914
1045
  query = query.filter(TagRecordTable.end_dt >= begin_dt)
915
1046
  if end_dt:
916
1047
  query = query.filter(TagRecordTable.begin_dt <= end_dt)
917
- if tag_pattern:
918
- query = query.filter(TagRecordTable.tag.like(f"{escape_sql_like(tag_pattern)}%", escape="\\"))
1048
+ if tagset_namespace:
1049
+ query = query.filter(TagRecordTable.tagset_namespace == tagset_namespace)
1050
+ if tagset_version:
1051
+ query = query.filter(TagRecordTable.tagset_version == tagset_version)
1052
+ if tag_prefix:
1053
+ query = query.filter(TagRecordTable.tag.like(f"{escape_sql_like(tag_prefix)}%", escape="\\"))
919
1054
  if tagsets:
920
1055
  if tagset_inverted:
921
1056
  query = query.filter(
@@ -938,10 +1073,10 @@ def tag_cache(*, identifier: str | None = None, file_path: str | None = None) ->
938
1073
  string and return a ``TagCache`` instance associated with a file path derived from the identifier.
939
1074
 
940
1075
  :param identifier: An optional string identifier for the tag cache. If provided, it must be in snake case format
941
- and will be used to derive the file path for the tag cache. If not provided, a default file path will be used.
1076
+ and will be used to derive the file path for the tag cache. If not provided, a default file path will be used.
942
1077
  :param file_path: An optional file path for the tag cache. If provided, it will be used directly. If not provided,
943
- the file path will be derived from the identifier if it is provided, or a default file path will be used if the
944
- identifier is not provided. Note that both 'identifier' and 'file_path' cannot be specified at the same time.
1078
+ the file path will be derived from the identifier if it is provided, or a default file path will be used if the
1079
+ identifier is not provided. Note that both 'identifier' and 'file_path' cannot be specified at the same time.
945
1080
  :return: A ``TagCache`` instance associated with the specified or default file path.
946
1081
  """
947
1082
  if identifier is not None and file_path is not None:
@@ -80,7 +80,6 @@ def patched_value_factory[T](
80
80
 
81
81
  :param app: FastAPI application instance.
82
82
  :param value_factory: Original value factory to be overridden.
83
- :return:
84
83
  """
85
84
 
86
85
  class Patcher(object):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: plexus-python-common
3
- Version: 1.0.56
3
+ Version: 1.0.57
4
4
  Classifier: Programming Language :: Python :: 3
5
5
  Classifier: Programming Language :: Python :: 3.12
6
6
  Classifier: Programming Language :: Python :: 3.13
@@ -48,8 +48,8 @@ src/plexus/common/carto/OSMWay.py
48
48
  src/plexus/common/carto/__init__.py
49
49
  src/plexus/common/resources/__init__.py
50
50
  src/plexus/common/resources/tags/__init__.py
51
- src/plexus/common/resources/tags/unittest.tagset.yaml
52
- src/plexus/common/resources/tags/universal.tagset.yaml
51
+ src/plexus/common/resources/tags/unittest-1.0.0.tagset.yaml
52
+ src/plexus/common/resources/tags/universal-1.0.0.tagset.yaml
53
53
  src/plexus/common/utils/__init__.py
54
54
  src/plexus/common/utils/apiutils.py
55
55
  src/plexus/common/utils/bagutils.py
@@ -25,14 +25,16 @@ class TagUtilsTest(unittest.TestCase):
25
25
  for tag in tagset:
26
26
  self.assertIn(tag, tagset)
27
27
  self.assertIn(tag.name, tagset)
28
- self.assertEqual(tag, tagset.get(tag))
29
- self.assertEqual(tag, tagset.get(tag.name))
28
+ self.assertEqual(tag.name, tagset.get(tag).name)
29
+ self.assertEqual(tag.name, tagset.get(tag.name).name)
30
30
 
31
31
  markdown = render_tagset_markdown_readme(tagset)
32
32
  self.assertIsInstance(markdown, str)
33
33
 
34
34
  print(markdown)
35
35
 
36
+ self.assertEqual(predefined_tagsets().get("unittest"), predefined_tagsets().get("unittest:1.0.0"))
37
+
36
38
  data_tag_validate = [
37
39
  ("level_1_tag", {"dummy_property": "dummy_value"}, True,),
38
40
 
@@ -71,7 +73,7 @@ class TagUtilsTest(unittest.TestCase):
71
73
 
72
74
  @ddt.idata(data_tag_validate)
73
75
  @ddt.unpack
74
- def test_tag_validate(self, tag: str, props: JsonObject, should_pass: bool):
76
+ def test_tagset_validate(self, tag: str, props: JsonObject, should_pass: bool):
75
77
  tagset = predefined_tagsets().get("unittest")
76
78
 
77
79
  if should_pass:
@@ -82,7 +84,7 @@ class TagUtilsTest(unittest.TestCase):
82
84
  tagset.validate(tag, props, raise_on_error=True)
83
85
 
84
86
  def test_tag_cache(self):
85
- tagset = MutableTagset(namespace="tagset", desc="A dummy tagset for testing")
87
+ tagset = MutableTagset(namespace="tagset", version="1.0.0", desc="A dummy tagset for testing")
86
88
  tagset.add(Tag(name="dummy:foo", desc="A dummy tag for testing"))
87
89
  tagset.add(Tag(name="dummy:bar", desc="Another dummy tag for testing"))
88
90
 
@@ -125,21 +127,21 @@ class TagUtilsTest(unittest.TestCase):
125
127
  60 + 1)
126
128
  self.assertEqual(len(list(target_cache.iter_tags(dt_parse_iso("2020-01-01T00:00:00+00:00"),
127
129
  dt_parse_iso("2020-01-01T00:01:00+00:00"),
128
- tag_pattern="dummy:foo"))),
130
+ tag_prefix="dummy:foo"))),
129
131
  60 // len(tags) + 1)
130
132
  self.assertEqual(len(list(target_cache.iter_tags(dt_parse_iso("2020-01-01T00:00:00+00:00"),
131
133
  dt_parse_iso("2020-01-01T00:01:00+00:00"),
132
- tag_pattern="dummy:bar"))),
134
+ tag_prefix="dummy:bar"))),
133
135
  60 // len(tags))
134
136
  self.assertEqual(len(list(target_cache.iter_tags(dt_parse_iso("2020-01-01T00:00:00+00:00"),
135
137
  dt_parse_iso("2020-01-01T00:01:00+00:00"),
136
- tag_pattern="dummy"))),
138
+ tag_prefix="dummy"))),
137
139
  60 + 1)
138
- self.assertEqual(len(list(target_cache.iter_tags(tag_pattern="dummy:foo"))),
140
+ self.assertEqual(len(list(target_cache.iter_tags(tag_prefix="dummy:foo"))),
139
141
  tags_count // len(tags))
140
- self.assertEqual(len(list(target_cache.iter_tags(tag_pattern="dummy:bar"))),
142
+ self.assertEqual(len(list(target_cache.iter_tags(tag_prefix="dummy:bar"))),
141
143
  tags_count // len(tags))
142
- self.assertEqual(len(list(target_cache.iter_tags(tag_pattern="dummy"))), tags_count)
144
+ self.assertEqual(len(list(target_cache.iter_tags(tag_prefix="dummy"))), tags_count)
143
145
 
144
146
  self.assertEqual(len(list(cache.iter_tags())), tags_count)
145
147
  self.assertEqual(len(list(cache.iter_tags(tagsets=[tagset]))), tags_count // 2)
@@ -150,19 +152,19 @@ class TagUtilsTest(unittest.TestCase):
150
152
  60 + 1)
151
153
  self.assertEqual(len(list(cache.iter_tags(dt_parse_iso("2020-01-01T00:00:00+00:00"),
152
154
  dt_parse_iso("2020-01-01T00:01:00+00:00"),
153
- tag_pattern="dummy:foo"))),
155
+ tag_prefix="dummy:foo"))),
154
156
  60 // len(tags) + 1)
155
157
  self.assertEqual(len(list(cache.iter_tags(dt_parse_iso("2020-01-01T00:00:00+00:00"),
156
158
  dt_parse_iso("2020-01-01T00:01:00+00:00"),
157
- tag_pattern="dummy:bar"))),
159
+ tag_prefix="dummy:bar"))),
158
160
  60 // len(tags))
159
161
  self.assertEqual(len(list(cache.iter_tags(dt_parse_iso("2020-01-01T00:00:00+00:00"),
160
162
  dt_parse_iso("2020-01-01T00:01:00+00:00"),
161
- tag_pattern="dummy"))),
163
+ tag_prefix="dummy"))),
162
164
  60 + 1)
163
- self.assertEqual(len(list(cache.iter_tags(tag_pattern="dummy:foo"))), tags_count // len(tags))
164
- self.assertEqual(len(list(cache.iter_tags(tag_pattern="dummy:bar"))), tags_count // len(tags))
165
- self.assertEqual(len(list(cache.iter_tags(tag_pattern="dummy"))), tags_count)
165
+ self.assertEqual(len(list(cache.iter_tags(tag_prefix="dummy:foo"))), tags_count // len(tags))
166
+ self.assertEqual(len(list(cache.iter_tags(tag_prefix="dummy:bar"))), tags_count // len(tags))
167
+ self.assertEqual(len(list(cache.iter_tags(tag_prefix="dummy"))), tags_count)
166
168
 
167
169
  target_cache.remove_tags(dt_parse_iso("2020-01-01T00:00:00+00:00"), dt_parse_iso("2020-01-01T00:01:00+00:00"))
168
170
 
@@ -180,7 +182,7 @@ class TagUtilsTest(unittest.TestCase):
180
182
  self.assertEqual(len(list(cache.iter_tags())), 0)
181
183
 
182
184
  def test_tag_cache__multithread(self):
183
- tagset = MutableTagset(namespace="tagset", desc="A dummy tagset for testing")
185
+ tagset = MutableTagset(namespace="tagset", version="1.0.0", desc="A dummy tagset for testing")
184
186
  tagset.add(Tag(name="dummy:foo", desc="A dummy tag for testing"))
185
187
  tagset.add(Tag(name="dummy:bar", desc="Another dummy tag for testing"))
186
188
 
@@ -239,15 +241,15 @@ class TagUtilsTest(unittest.TestCase):
239
241
 
240
242
  for target_cache in target_caches:
241
243
  self.assertEqual(len(list(target_cache.iter_tags())), tasks_count_per_target_cache)
242
- self.assertEqual(len(list(target_cache.iter_tags(tag_pattern="dummy:bar"))),
244
+ self.assertEqual(len(list(target_cache.iter_tags(tag_prefix="dummy:bar"))),
243
245
  tasks_count_per_target_cache // len(tags))
244
246
 
245
247
  self.assertEqual(len(list(cache.iter_tag_and_targets())), total_tasks_count)
246
- self.assertEqual(len(list(cache.iter_tag_and_targets(tag_pattern="dummy:bar"))),
248
+ self.assertEqual(len(list(cache.iter_tag_and_targets(tag_prefix="dummy:bar"))),
247
249
  total_tasks_count // len(tags))
248
250
 
249
251
  def test_tag_cache__clone(self):
250
- tagset = MutableTagset(namespace="tagset", desc="A dummy tagset for testing")
252
+ tagset = MutableTagset(namespace="tagset", version="1.0.0", desc="A dummy tagset for testing")
251
253
  tagset.add(Tag(name="dummy:foo", desc="A dummy tag for testing"))
252
254
  tagset.add(Tag(name="dummy:bar", desc="Another dummy tag for testing"))
253
255
 
@@ -301,7 +303,7 @@ class TagUtilsTest(unittest.TestCase):
301
303
  tags_count // 2)
302
304
 
303
305
  def test_tag_cache__clone_same_file(self):
304
- tagset = MutableTagset(namespace="tagset", desc="A dummy tagset for testing")
306
+ tagset = MutableTagset(namespace="tagset", version="1.0.0", desc="A dummy tagset for testing")
305
307
  tagset.add(Tag(name="dummy:foo", desc="A dummy tag for testing"))
306
308
  tagset.add(Tag(name="dummy:bar", desc="Another dummy tag for testing"))
307
309