infrahub-testcontainers 1.4.12__py3-none-any.whl → 1.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -152,6 +152,8 @@ class InfrahubDockerCompose(DockerCompose):
152
152
  "INFRAHUB_TESTING_TASKMGR_BACKGROUND_SVC_REPLICAS": "1",
153
153
  "PREFECT_MESSAGING_BROKER": "prefect_redis.messaging",
154
154
  "PREFECT_MESSAGING_CACHE": "prefect_redis.messaging",
155
+ "PREFECT_SERVER_EVENTS_CAUSAL_ORDERING": "prefect_redis.ordering",
156
+ "PREFECT_SERVER_CONCURRENCY_LEASE_STORAGE": "prefect_redis.lease_storage",
155
157
  "PREFECT__SERVER_WEBSERVER_ONLY": "true",
156
158
  "PREFECT_API_DATABASE_MIGRATE_ON_START": "false",
157
159
  "PREFECT_API_BLOCKS_REGISTER_ON_START": "false",
@@ -237,7 +239,9 @@ class InfrahubDockerCompose(DockerCompose):
237
239
  for service_name, service_data in INFRAHUB_SERVICES.items()
238
240
  }
239
241
 
240
- def database_create_backup(self, backup_name: str = "neo4j_database.backup", dest_dir: Path | None = None) -> None:
242
+ def database_create_backup(
243
+ self, backup_name: str = "neo4j_database.backup", dest_dir: Path | None = None, compress: bool = False
244
+ ) -> None:
241
245
  assert self.use_neo4j_enterprise
242
246
 
243
247
  self.exec_in_container(
@@ -245,7 +249,7 @@ class InfrahubDockerCompose(DockerCompose):
245
249
  "neo4j-admin",
246
250
  "database",
247
251
  "backup",
248
- "--compress=false",
252
+ f"--compress={'true' if compress else 'false'}",
249
253
  "--to-path",
250
254
  str(self.internal_backup_dir),
251
255
  ],
@@ -513,3 +517,111 @@ class InfrahubDockerCompose(DockerCompose):
513
517
  )
514
518
  self.start()
515
519
  print("Database restored successfully")
520
+
521
+ def task_manager_create_backup(self, backup_name: str = "prefect.dump", dest_dir: Path | None = None) -> Path:
522
+ """Create a backup of the task manager PostgreSQL database using ``pg_dump``.
523
+
524
+ Args:
525
+ backup_name: Name of the archive file to create. Defaults to ``prefect.dump``.
526
+ dest_dir: Optional host directory where the backup should be copied after it is
527
+ produced. When omitted, the backup remains in ``external_backup_dir``.
528
+
529
+ Returns:
530
+ Path to the backup archive on the host filesystem.
531
+
532
+ Raises:
533
+ FileNotFoundError: If the pg_dump command completes but no archive is produced.
534
+ """
535
+
536
+ service_name = "task-manager-db"
537
+
538
+ try:
539
+ self.get_container(service_name=service_name)
540
+ except ContainerIsNotRunning:
541
+ self.start_container(service_name=service_name)
542
+
543
+ self.external_backup_dir.mkdir(parents=True, exist_ok=True)
544
+
545
+ internal_backup_path = self.internal_backup_dir / backup_name
546
+ dump_command = [
547
+ "pg_dump",
548
+ "--format=custom",
549
+ "--blobs",
550
+ "--no-owner",
551
+ "--no-privileges",
552
+ "--dbname=postgresql://postgres:postgres@localhost:5432/prefect",
553
+ f"--file={internal_backup_path}",
554
+ ]
555
+ self.exec_in_container(command=dump_command, service_name=service_name)
556
+
557
+ source_path = self.external_backup_dir / backup_name
558
+ if not source_path.exists():
559
+ raise FileNotFoundError(f"Backup file {source_path} was not created")
560
+
561
+ final_path = source_path
562
+ if dest_dir:
563
+ dest_dir.mkdir(parents=True, exist_ok=True)
564
+ if dest_dir.resolve() != self.external_backup_dir.resolve():
565
+ final_path = dest_dir / backup_name
566
+ shutil.copy(source_path, final_path)
567
+
568
+ return final_path
569
+
570
+ def task_manager_restore_backup(self, backup_file: Path) -> None:
571
+ """Restore the task manager PostgreSQL database from a ``pg_restore`` archive.
572
+
573
+ Args:
574
+ backup_file: Path to the backup archive on the host filesystem.
575
+
576
+ Raises:
577
+ FileNotFoundError: If the provided backup archive does not exist.
578
+ """
579
+
580
+ if not backup_file.exists():
581
+ raise FileNotFoundError(f"Backup file {backup_file} does not exist")
582
+
583
+ service_name = "task-manager-db"
584
+
585
+ try:
586
+ self.get_container(service_name=service_name)
587
+ except ContainerIsNotRunning:
588
+ self.start_container(service_name=service_name)
589
+
590
+ self.external_backup_dir.mkdir(parents=True, exist_ok=True)
591
+ target_path = self.external_backup_dir / backup_file.name
592
+ shutil.copy(backup_file, target_path)
593
+
594
+ admin_dsn = "postgresql://postgres:postgres@localhost:5432/postgres"
595
+ prefect_dsn = "postgresql://postgres:postgres@localhost:5432/prefect"
596
+ internal_backup_path = self.internal_backup_dir / backup_file.name
597
+
598
+ terminate_sessions_command = [
599
+ "psql",
600
+ f"--dbname={admin_dsn}",
601
+ "--command",
602
+ "SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = 'prefect';",
603
+ ]
604
+ drop_database_command = [
605
+ "psql",
606
+ f"--dbname={admin_dsn}",
607
+ "--command",
608
+ "DROP DATABASE IF EXISTS prefect WITH (FORCE);",
609
+ ]
610
+ create_database_command = [
611
+ "psql",
612
+ f"--dbname={admin_dsn}",
613
+ "--command",
614
+ "CREATE DATABASE prefect OWNER postgres;",
615
+ ]
616
+ restore_command = [
617
+ "pg_restore",
618
+ "--no-owner",
619
+ "--role=postgres",
620
+ f"--dbname={prefect_dsn}",
621
+ str(internal_backup_path),
622
+ ]
623
+
624
+ self.exec_in_container(command=terminate_sessions_command, service_name=service_name)
625
+ self.exec_in_container(command=drop_database_command, service_name=service_name)
626
+ self.exec_in_container(command=create_database_command, service_name=service_name)
627
+ self.exec_in_container(command=restore_command, service_name=service_name)
@@ -184,6 +184,8 @@ services:
184
184
 
185
185
  PREFECT_MESSAGING_BROKER:
186
186
  PREFECT_MESSAGING_CACHE:
187
+ PREFECT_SERVER_EVENTS_CAUSAL_ORDERING:
188
+ PREFECT_SERVER_CONCURRENCY_LEASE_STORAGE:
187
189
  PREFECT__SERVER_WEBSERVER_ONLY:
188
190
  PREFECT_API_DATABASE_MIGRATE_ON_START:
189
191
  PREFECT_API_BLOCKS_REGISTER_ON_START:
@@ -225,6 +227,8 @@ services:
225
227
  INFRAHUB_CACHE_ADDRESS: ${INFRAHUB_TESTING_CACHE_ADDRESS}
226
228
  PREFECT_MESSAGING_BROKER: prefect_redis.messaging
227
229
  PREFECT_MESSAGING_CACHE: prefect_redis.messaging
230
+ PREFECT_SERVER_EVENTS_CAUSAL_ORDERING: prefect_redis.ordering
231
+ PREFECT_SERVER_CONCURRENCY_LEASE_STORAGE: prefect_redis.lease_storage
228
232
  PREFECT_REDIS_MESSAGING_HOST: "${INFRAHUB_TESTING_CACHE_ADDRESS:-cache}"
229
233
  PREFECT_REDIS_MESSAGING_DB: "1"
230
234
  PREFECT_REDIS_MESSAGING_CONSUMER_MIN_IDLE_TIME: "30"
@@ -244,6 +248,7 @@ services:
244
248
  - POSTGRES_DB=prefect
245
249
  volumes:
246
250
  - workflow_db:/var/lib/postgresql/data
251
+ - "./${INFRAHUB_TESTING_LOCAL_DB_BACKUP_DIRECTORY}:${INFRAHUB_TESTING_INTERNAL_DB_BACKUP_DIRECTORY}"
247
252
  healthcheck:
248
253
  test: ["CMD-SHELL", "pg_isready"]
249
254
  interval: 10s
@@ -95,6 +95,8 @@ services:
95
95
 
96
96
  PREFECT_MESSAGING_BROKER:
97
97
  PREFECT_MESSAGING_CACHE:
98
+ PREFECT_SERVER_EVENTS_CAUSAL_ORDERING:
99
+ PREFECT_SERVER_CONCURRENCY_LEASE_STORAGE:
98
100
  PREFECT__SERVER_WEBSERVER_ONLY:
99
101
  PREFECT_API_DATABASE_MIGRATE_ON_START:
100
102
  PREFECT_API_BLOCKS_REGISTER_ON_START:
@@ -136,6 +138,8 @@ services:
136
138
  INFRAHUB_CACHE_ADDRESS: ${INFRAHUB_TESTING_CACHE_ADDRESS}
137
139
  PREFECT_MESSAGING_BROKER: prefect_redis.messaging
138
140
  PREFECT_MESSAGING_CACHE: prefect_redis.messaging
141
+ PREFECT_SERVER_EVENTS_CAUSAL_ORDERING: prefect_redis.ordering
142
+ PREFECT_SERVER_CONCURRENCY_LEASE_STORAGE: prefect_redis.lease_storage
139
143
  PREFECT_REDIS_MESSAGING_HOST: "${INFRAHUB_TESTING_CACHE_ADDRESS:-cache}"
140
144
  PREFECT_REDIS_MESSAGING_DB: "1"
141
145
  PREFECT_REDIS_MESSAGING_CONSUMER_MIN_IDLE_TIME: "30"
@@ -155,6 +159,7 @@ services:
155
159
  - POSTGRES_DB=prefect
156
160
  volumes:
157
161
  - workflow_db:/var/lib/postgresql/data
162
+ - "./${INFRAHUB_TESTING_LOCAL_DB_BACKUP_DIRECTORY}:${INFRAHUB_TESTING_INTERNAL_DB_BACKUP_DIRECTORY}"
158
163
  healthcheck:
159
164
  test: ["CMD-SHELL", "pg_isready"]
160
165
  interval: 10s
@@ -1,4 +1,4 @@
1
- from datetime import datetime, timezone
1
+ from datetime import UTC, datetime
2
2
  from enum import Enum
3
3
  from typing import Any
4
4
 
@@ -27,7 +27,7 @@ class InfrahubResultContext(BaseModel):
27
27
 
28
28
  class InfrahubActiveMeasurementItem(BaseModel):
29
29
  definition: MeasurementDefinition
30
- start_time: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
30
+ start_time: datetime = Field(default_factory=lambda: datetime.now(UTC))
31
31
  context: dict[str, Any] = Field(default_factory=dict)
32
32
 
33
33
 
@@ -1,6 +1,6 @@
1
1
  import hashlib
2
2
  import json
3
- from datetime import datetime, timezone
3
+ from datetime import UTC, datetime
4
4
  from types import TracebackType
5
5
  from typing import Any
6
6
 
@@ -35,7 +35,7 @@ class InfrahubPerformanceTest:
35
35
  self.env_vars = {}
36
36
  self.project_name = ""
37
37
  self.test_info = {}
38
- self.start_time = datetime.now(timezone.utc)
38
+ self.start_time = datetime.now(UTC)
39
39
  self.end_time: datetime | None = None
40
40
  self.results_url = results_url
41
41
  self.scraper_endpoint = ""
@@ -57,7 +57,7 @@ class InfrahubPerformanceTest:
57
57
 
58
58
  def finalize(self, session: pytest.Session) -> None:
59
59
  if self.initialized:
60
- self.end_time = datetime.now(timezone.utc)
60
+ self.end_time = datetime.now(UTC)
61
61
  self.extract_test_session_information(session)
62
62
  self.send_results()
63
63
 
@@ -129,7 +129,7 @@ class InfrahubPerformanceTest:
129
129
  if not exc_type and self.active_measurements:
130
130
  self.add_measurement(
131
131
  definition=self.active_measurements.definition,
132
- value=(datetime.now(timezone.utc) - self.active_measurements.start_time).total_seconds() * 1000,
132
+ value=(datetime.now(UTC) - self.active_measurements.start_time).total_seconds() * 1000,
133
133
  context=self.active_measurements.context,
134
134
  )
135
135
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: infrahub-testcontainers
3
- Version: 1.4.12
3
+ Version: 1.5.0
4
4
  Summary: Testcontainers instance for Infrahub to easily build integration tests
5
5
  License: Apache-2.0
6
6
  Author: OpsMill
@@ -15,7 +15,7 @@ Classifier: Programming Language :: Python :: 3.11
15
15
  Classifier: Programming Language :: Python :: 3.12
16
16
  Classifier: Programming Language :: Python :: 3.13
17
17
  Requires-Dist: httpx (>=0.28.1,<0.29.0)
18
- Requires-Dist: prefect-client (==3.4.13)
18
+ Requires-Dist: prefect-client (==3.4.23)
19
19
  Requires-Dist: psutil
20
20
  Requires-Dist: pydantic (>=2.10.6,<3.0.0)
21
21
  Requires-Dist: pytest
@@ -0,0 +1,17 @@
1
+ infrahub_testcontainers/__init__.py,sha256=oPpmesGgYBSdKTg1L37FGwYBeao1EHury5SJGul-CT8,216
2
+ infrahub_testcontainers/constants.py,sha256=mZ4hLvcf4rKk9wC7EId4MQxAY0sk4V99deB04N0J2bg,85
3
+ infrahub_testcontainers/container.py,sha256=VHsTETUX6a-XlgZ4fgexJMRL7nytkF7lQXujep2STTY,24931
4
+ infrahub_testcontainers/docker-compose-cluster.test.yml,sha256=WFWQLDoeS0E5MfjlOejfwaFMNu8ejsJ6ay7iuLonlCc,15108
5
+ infrahub_testcontainers/docker-compose.test.yml,sha256=BSZ8C3tEg7g6a9Qm4TcCIqxgX1RkqSjfF8y8fcs3SZ0,11506
6
+ infrahub_testcontainers/haproxy.cfg,sha256=QUkG2Xu-hKoknPOeYKAkBT_xJH6U9CfIS0DTMFZJsnk,1305
7
+ infrahub_testcontainers/helpers.py,sha256=rGEWIeUfDg4w1wJNCzTm7_H1oA58HaMSORjVlHw1aWA,4677
8
+ infrahub_testcontainers/host.py,sha256=Z4_gGoGKKeM_HGVS7SdYL1FTNGyLBk8wzicdSKHpfmM,1486
9
+ infrahub_testcontainers/measurements.py,sha256=gR-uTasSIFCXrwvnNpIpfsQIopKftT7pBiarCgIShaQ,2214
10
+ infrahub_testcontainers/models.py,sha256=-TScLFW3V7SOe3eS1C8ANKmabpxGDTX5xu-V1pgtnbs,940
11
+ infrahub_testcontainers/performance_test.py,sha256=k14E0loMNuvhIJKEItbeKdbcOC0NBM-HhSe0RnVhJJo,6117
12
+ infrahub_testcontainers/plugin.py,sha256=I3RuZQ0dARyKHuqCf0y1Yj731P2Mwf3BJUehRJKeWrs,5645
13
+ infrahub_testcontainers/prometheus.yml,sha256=610xQEyj3xuVJMzPkC4m1fRnCrjGpiRBrXA2ytCLa54,599
14
+ infrahub_testcontainers-1.5.0.dist-info/METADATA,sha256=1-Xks9U--HM9DSo0MKt1-0KCEFRBRBcERVYRlrDOZOI,1029
15
+ infrahub_testcontainers-1.5.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
16
+ infrahub_testcontainers-1.5.0.dist-info/entry_points.txt,sha256=gHOERdtVE0P8dYz6FHkn2KplpbXvCDJQnuWg_IP0-qQ,76
17
+ infrahub_testcontainers-1.5.0.dist-info/RECORD,,
@@ -1,17 +0,0 @@
1
- infrahub_testcontainers/__init__.py,sha256=oPpmesGgYBSdKTg1L37FGwYBeao1EHury5SJGul-CT8,216
2
- infrahub_testcontainers/constants.py,sha256=mZ4hLvcf4rKk9wC7EId4MQxAY0sk4V99deB04N0J2bg,85
3
- infrahub_testcontainers/container.py,sha256=RodXcIry-ppcpce_xnzpZuBHVinYyMq64NZ6cuCvhBE,20516
4
- infrahub_testcontainers/docker-compose-cluster.test.yml,sha256=3Zgupnt6rOCk5EK0PsUXJL1thmTbKzQelC3AeS3sGKA,14764
5
- infrahub_testcontainers/docker-compose.test.yml,sha256=EWxll1vzAdOzTdJX3_um30nIygM-bSHPskF6AelSZRk,11162
6
- infrahub_testcontainers/haproxy.cfg,sha256=QUkG2Xu-hKoknPOeYKAkBT_xJH6U9CfIS0DTMFZJsnk,1305
7
- infrahub_testcontainers/helpers.py,sha256=rGEWIeUfDg4w1wJNCzTm7_H1oA58HaMSORjVlHw1aWA,4677
8
- infrahub_testcontainers/host.py,sha256=Z4_gGoGKKeM_HGVS7SdYL1FTNGyLBk8wzicdSKHpfmM,1486
9
- infrahub_testcontainers/measurements.py,sha256=gR-uTasSIFCXrwvnNpIpfsQIopKftT7pBiarCgIShaQ,2214
10
- infrahub_testcontainers/models.py,sha256=ASYyvl7d_WQz_i7y8-3iab9hwwmCl3OCJavqVbe8nXU,954
11
- infrahub_testcontainers/performance_test.py,sha256=hvwiy6tc_lWniYqGkqfOXVGAmA_IV15VOZqbiD9ezno,6149
12
- infrahub_testcontainers/plugin.py,sha256=I3RuZQ0dARyKHuqCf0y1Yj731P2Mwf3BJUehRJKeWrs,5645
13
- infrahub_testcontainers/prometheus.yml,sha256=610xQEyj3xuVJMzPkC4m1fRnCrjGpiRBrXA2ytCLa54,599
14
- infrahub_testcontainers-1.4.12.dist-info/METADATA,sha256=TL6Jt6n31Z8sc-hlbkRtPFoxVfBecpu842VePWrcZ3I,1030
15
- infrahub_testcontainers-1.4.12.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
16
- infrahub_testcontainers-1.4.12.dist-info/entry_points.txt,sha256=gHOERdtVE0P8dYz6FHkn2KplpbXvCDJQnuWg_IP0-qQ,76
17
- infrahub_testcontainers-1.4.12.dist-info/RECORD,,