dagster-postgres 0.23.11__tar.gz → 0.27.12__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/LICENSE +1 -1
  2. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/MANIFEST.in +2 -0
  3. {dagster-postgres-0.23.11/dagster_postgres.egg-info → dagster_postgres-0.27.12}/PKG-INFO +15 -4
  4. dagster_postgres-0.27.12/README.md +4 -0
  5. dagster_postgres-0.27.12/dagster_postgres/__init__.py +15 -0
  6. dagster_postgres-0.27.12/dagster_postgres/event_log/__init__.py +1 -0
  7. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/dagster_postgres/event_log/event_log.py +39 -18
  8. dagster_postgres-0.27.12/dagster_postgres/run_storage/__init__.py +1 -0
  9. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/dagster_postgres/run_storage/run_storage.py +13 -11
  10. dagster_postgres-0.27.12/dagster_postgres/schedule_storage/__init__.py +3 -0
  11. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/dagster_postgres/schedule_storage/schedule_storage.py +14 -9
  12. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/dagster_postgres/storage.py +5 -5
  13. dagster_postgres-0.27.12/dagster_postgres/test_fixtures/__init__.py +75 -0
  14. dagster_postgres-0.27.12/dagster_postgres/test_fixtures/docker-compose.yml +10 -0
  15. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/dagster_postgres/utils.py +2 -1
  16. dagster_postgres-0.27.12/dagster_postgres/version.py +1 -0
  17. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12/dagster_postgres.egg-info}/PKG-INFO +15 -4
  18. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/dagster_postgres.egg-info/SOURCES.txt +3 -1
  19. dagster_postgres-0.27.12/dagster_postgres.egg-info/requires.txt +2 -0
  20. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/setup.py +4 -5
  21. dagster-postgres-0.23.11/README.md +0 -4
  22. dagster-postgres-0.23.11/dagster_postgres/__init__.py +0 -15
  23. dagster-postgres-0.23.11/dagster_postgres/event_log/__init__.py +0 -1
  24. dagster-postgres-0.23.11/dagster_postgres/run_storage/__init__.py +0 -1
  25. dagster-postgres-0.23.11/dagster_postgres/schedule_storage/__init__.py +0 -1
  26. dagster-postgres-0.23.11/dagster_postgres/version.py +0 -1
  27. dagster-postgres-0.23.11/dagster_postgres.egg-info/requires.txt +0 -2
  28. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/dagster_postgres/alembic/alembic.ini +0 -0
  29. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/dagster_postgres/py.typed +0 -0
  30. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/dagster_postgres.egg-info/dependency_links.txt +0 -0
  31. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/dagster_postgres.egg-info/not-zip-safe +0 -0
  32. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/dagster_postgres.egg-info/top_level.txt +0 -0
  33. {dagster-postgres-0.23.11 → dagster_postgres-0.27.12}/setup.cfg +0 -0
@@ -186,7 +186,7 @@
186
186
  same "printed page" as the copyright notice for easier
187
187
  identification within third-party archives.
188
188
 
189
- Copyright 2023 Dagster Labs, Inc".
189
+ Copyright 2025 Dagster Labs, Inc.
190
190
 
191
191
  Licensed under the Apache License, Version 2.0 (the "License");
192
192
  you may not use this file except in compliance with the License.
@@ -3,3 +3,5 @@ graft dagster_postgres/alembic
3
3
  global-exclude __pycache__
4
4
  global-exclude *.py[co]
5
5
  include dagster_postgres/py.typed
6
+ exclude conftest.py
7
+ include dagster_postgres/test_fixtures/docker-compose.yml
@@ -1,17 +1,28 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: dagster-postgres
3
- Version: 0.23.11
3
+ Version: 0.27.12
4
4
  Summary: A Dagster integration for postgres
5
5
  Home-page: https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-postgres
6
6
  Author: Dagster Labs
7
7
  Author-email: hello@dagsterlabs.com
8
8
  License: Apache-2.0
9
- Classifier: Programming Language :: Python :: 3.8
10
9
  Classifier: Programming Language :: Python :: 3.9
11
10
  Classifier: Programming Language :: Python :: 3.10
12
11
  Classifier: Programming Language :: Python :: 3.11
13
12
  Classifier: Programming Language :: Python :: 3.12
13
+ Classifier: Programming Language :: Python :: 3.13
14
14
  Classifier: License :: OSI Approved :: Apache Software License
15
15
  Classifier: Operating System :: OS Independent
16
- Requires-Python: >=3.8,<3.13
16
+ Requires-Python: >=3.9,<3.14
17
17
  License-File: LICENSE
18
+ Requires-Dist: dagster==1.11.12
19
+ Requires-Dist: psycopg2-binary
20
+ Dynamic: author
21
+ Dynamic: author-email
22
+ Dynamic: classifier
23
+ Dynamic: home-page
24
+ Dynamic: license
25
+ Dynamic: license-file
26
+ Dynamic: requires-dist
27
+ Dynamic: requires-python
28
+ Dynamic: summary
@@ -0,0 +1,4 @@
1
+ # dagster-postgres
2
+
3
+ The docs for `dagster-postgres` can be found
4
+ [here](https://docs.dagster.io/api/python-api/libraries/dagster-postgres).
@@ -0,0 +1,15 @@
1
+ from dagster_shared.libraries import DagsterLibraryRegistry
2
+
3
+ from dagster_postgres.event_log import PostgresEventLogStorage
4
+ from dagster_postgres.run_storage import PostgresRunStorage
5
+ from dagster_postgres.schedule_storage import PostgresScheduleStorage
6
+ from dagster_postgres.storage import DagsterPostgresStorage
7
+ from dagster_postgres.version import __version__
8
+
9
+ DagsterLibraryRegistry.register("dagster-postgres", __version__)
10
+ __all__ = [
11
+ "DagsterPostgresStorage",
12
+ "PostgresEventLogStorage",
13
+ "PostgresRunStorage",
14
+ "PostgresScheduleStorage",
15
+ ]
@@ -0,0 +1 @@
1
+ from dagster_postgres.event_log.event_log import PostgresEventLogStorage as PostgresEventLogStorage
@@ -1,5 +1,6 @@
1
+ from collections.abc import Iterator, Mapping, Sequence
1
2
  from contextlib import contextmanager
2
- from typing import Any, ContextManager, Iterator, Mapping, Optional, Sequence, cast
3
+ from typing import Any, ContextManager, Optional, cast # noqa: UP035
3
4
 
4
5
  import dagster._check as check
5
6
  import sqlalchemy as db
@@ -33,7 +34,7 @@ from dagster._serdes import ConfigurableClass, ConfigurableClassData, deserializ
33
34
  from sqlalchemy import event
34
35
  from sqlalchemy.engine import Connection
35
36
 
36
- from ..utils import (
37
+ from dagster_postgres.utils import (
37
38
  create_pg_connection,
38
39
  pg_alembic_config,
39
40
  pg_url_from_config,
@@ -111,12 +112,15 @@ class PostgresEventLogStorage(SqlEventLogStorage, ConfigurableClass):
111
112
  SqlEventLogStorageMetadata.create_all(conn)
112
113
  stamp_alembic_rev(pg_alembic_config(__file__), conn)
113
114
 
114
- def optimize_for_webserver(self, statement_timeout: int, pool_recycle: int) -> None:
115
+ def optimize_for_webserver(
116
+ self, statement_timeout: int, pool_recycle: int, max_overflow: int
117
+ ) -> None:
115
118
  # When running in dagster-webserver, hold an open connection and set statement_timeout
116
119
  kwargs = {
117
120
  "isolation_level": "AUTOCOMMIT",
118
121
  "pool_size": 1,
119
122
  "pool_recycle": pool_recycle,
123
+ "max_overflow": max_overflow,
120
124
  }
121
125
  existing_options = self._engine.url.query.get("options")
122
126
  if existing_options:
@@ -172,7 +176,6 @@ class PostgresEventLogStorage(SqlEventLogStorage, ConfigurableClass):
172
176
  event (EventLogEntry): The event to store.
173
177
  """
174
178
  check.inst_param(event, "event", EventLogEntry)
175
-
176
179
  insert_event_statement = self.prepare_insert_event(event) # from SqlEventLogStorage.py
177
180
  with self._connect() as conn:
178
181
  result = conn.execute(
@@ -208,25 +211,45 @@ class PostgresEventLogStorage(SqlEventLogStorage, ConfigurableClass):
208
211
  self.store_asset_check_event(event, event_id)
209
212
 
210
213
  def store_event_batch(self, events: Sequence[EventLogEntry]) -> None:
214
+ from dagster import DagsterEventType
215
+
211
216
  check.sequence_param(events, "event", of_type=EventLogEntry)
212
217
 
218
+ event_types = {event.get_dagster_event().event_type for event in events}
219
+
213
220
  check.invariant(
214
- all(event.get_dagster_event().event_type in BATCH_WRITABLE_EVENTS for event in events),
221
+ all(event_type in BATCH_WRITABLE_EVENTS for event_type in event_types),
215
222
  f"{BATCH_WRITABLE_EVENTS} are the only currently supported events for batch writes.",
216
223
  )
224
+ events = [
225
+ event
226
+ for event in events
227
+ if not event.get_dagster_event().is_asset_failed_to_materialize
228
+ ]
229
+ if len(events) == 0:
230
+ return
217
231
 
218
- insert_event_statement = self.prepare_insert_event_batch(events)
219
- with self._connect() as conn:
220
- result = conn.execute(insert_event_statement.returning(SqlEventLogStorageTable.c.id))
221
- event_ids = [cast(int, row[0]) for row in result.fetchall()]
232
+ if event_types == {DagsterEventType.ASSET_MATERIALIZATION} or event_types == {
233
+ DagsterEventType.ASSET_OBSERVATION
234
+ }:
235
+ insert_event_statement = self.prepare_insert_event_batch(events)
236
+ with self._connect() as conn:
237
+ result = conn.execute(
238
+ insert_event_statement.returning(SqlEventLogStorageTable.c.id)
239
+ )
240
+ event_ids = [cast("int", row[0]) for row in result.fetchall()]
222
241
 
223
- # We only update the asset table with the last event
224
- self.store_asset_event(events[-1], event_ids[-1])
242
+ # We only update the asset table with the last event
243
+ self.store_asset_event(events[-1], event_ids[-1])
225
244
 
226
- if any((event_id is None for event_id in event_ids)):
227
- raise DagsterInvariantViolationError("Cannot store asset event tags for null event id.")
245
+ if any(event_id is None for event_id in event_ids):
246
+ raise DagsterInvariantViolationError(
247
+ "Cannot store asset event tags for null event id."
248
+ )
228
249
 
229
- self.store_asset_event_tags(events, event_ids)
250
+ self.store_asset_event_tags(events, event_ids)
251
+ else:
252
+ return super().store_event_batch(events)
230
253
 
231
254
  def store_asset_event(self, event: EventLogEntry, event_id: int) -> None:
232
255
  check.inst_param(event, "event", EventLogEntry)
@@ -325,13 +348,11 @@ class PostgresEventLogStorage(SqlEventLogStorage, ConfigurableClass):
325
348
 
326
349
  def has_secondary_index(self, name: str) -> bool:
327
350
  if name not in self._secondary_index_cache:
328
- self._secondary_index_cache[name] = super(
329
- PostgresEventLogStorage, self
330
- ).has_secondary_index(name)
351
+ self._secondary_index_cache[name] = super().has_secondary_index(name)
331
352
  return self._secondary_index_cache[name]
332
353
 
333
354
  def enable_secondary_index(self, name: str) -> None:
334
- super(PostgresEventLogStorage, self).enable_secondary_index(name)
355
+ super().enable_secondary_index(name)
335
356
  if name in self._secondary_index_cache:
336
357
  del self._secondary_index_cache[name]
337
358
 
@@ -0,0 +1 @@
1
+ from dagster_postgres.run_storage.run_storage import PostgresRunStorage as PostgresRunStorage
@@ -1,5 +1,6 @@
1
1
  import zlib
2
- from typing import ContextManager, Mapping, Optional
2
+ from collections.abc import Mapping
3
+ from typing import ContextManager, Optional # noqa: UP035
3
4
 
4
5
  import dagster._check as check
5
6
  import sqlalchemy as db
@@ -24,11 +25,11 @@ from dagster._core.storage.sql import (
24
25
  )
25
26
  from dagster._daemon.types import DaemonHeartbeat
26
27
  from dagster._serdes import ConfigurableClass, ConfigurableClassData, serialize_value
27
- from dagster._utils import utc_datetime_from_timestamp
28
+ from dagster._time import datetime_from_timestamp
28
29
  from sqlalchemy import event
29
30
  from sqlalchemy.engine import Connection
30
31
 
31
- from ..utils import (
32
+ from dagster_postgres.utils import (
32
33
  create_pg_connection,
33
34
  pg_alembic_config,
34
35
  pg_url_from_config,
@@ -107,12 +108,15 @@ class PostgresRunStorage(SqlRunStorage, ConfigurableClass):
107
108
  # This revision may be shared by any other dagster storage classes using the same DB
108
109
  stamp_alembic_rev(pg_alembic_config(__file__), conn)
109
110
 
110
- def optimize_for_webserver(self, statement_timeout: int, pool_recycle: int) -> None:
111
+ def optimize_for_webserver(
112
+ self, statement_timeout: int, pool_recycle: int, max_overflow: int
113
+ ) -> None:
111
114
  # When running in dagster-webserver, hold an open connection and set statement_timeout
112
115
  kwargs = {
113
116
  "isolation_level": "AUTOCOMMIT",
114
117
  "pool_size": 1,
115
118
  "pool_recycle": pool_recycle,
119
+ "max_overflow": max_overflow,
116
120
  }
117
121
  existing_options = self._engine.url.query.get("options")
118
122
  if existing_options:
@@ -133,7 +137,7 @@ class PostgresRunStorage(SqlRunStorage, ConfigurableClass):
133
137
  return pg_config()
134
138
 
135
139
  @classmethod
136
- def from_config_value(
140
+ def from_config_value( # pyright: ignore[reportIncompatibleMethodOverride]
137
141
  cls, inst_data: Optional[ConfigurableClassData], config_value: PostgresStorageConfig
138
142
  ):
139
143
  return PostgresRunStorage(
@@ -164,13 +168,11 @@ class PostgresRunStorage(SqlRunStorage, ConfigurableClass):
164
168
 
165
169
  def has_built_index(self, migration_name: str) -> bool:
166
170
  if migration_name not in self._index_migration_cache:
167
- self._index_migration_cache[migration_name] = super(
168
- PostgresRunStorage, self
169
- ).has_built_index(migration_name)
171
+ self._index_migration_cache[migration_name] = super().has_built_index(migration_name)
170
172
  return self._index_migration_cache[migration_name]
171
173
 
172
174
  def mark_index_built(self, migration_name: str) -> None:
173
- super(PostgresRunStorage, self).mark_index_built(migration_name)
175
+ super().mark_index_built(migration_name)
174
176
  if migration_name in self._index_migration_cache:
175
177
  del self._index_migration_cache[migration_name]
176
178
 
@@ -180,7 +182,7 @@ class PostgresRunStorage(SqlRunStorage, ConfigurableClass):
180
182
  conn.execute(
181
183
  db_dialects.postgresql.insert(DaemonHeartbeatsTable)
182
184
  .values(
183
- timestamp=utc_datetime_from_timestamp(daemon_heartbeat.timestamp),
185
+ timestamp=datetime_from_timestamp(daemon_heartbeat.timestamp),
184
186
  daemon_type=daemon_heartbeat.daemon_type,
185
187
  daemon_id=daemon_heartbeat.daemon_id,
186
188
  body=serialize_value(daemon_heartbeat),
@@ -188,7 +190,7 @@ class PostgresRunStorage(SqlRunStorage, ConfigurableClass):
188
190
  .on_conflict_do_update(
189
191
  index_elements=[DaemonHeartbeatsTable.c.daemon_type],
190
192
  set_={
191
- "timestamp": utc_datetime_from_timestamp(daemon_heartbeat.timestamp),
193
+ "timestamp": datetime_from_timestamp(daemon_heartbeat.timestamp),
192
194
  "daemon_id": daemon_heartbeat.daemon_id,
193
195
  "body": serialize_value(daemon_heartbeat),
194
196
  },
@@ -0,0 +1,3 @@
1
+ from dagster_postgres.schedule_storage.schedule_storage import (
2
+ PostgresScheduleStorage as PostgresScheduleStorage,
3
+ )
@@ -1,13 +1,14 @@
1
- from typing import ContextManager, Optional, Sequence
1
+ from collections.abc import Sequence
2
+ from typing import ContextManager, Optional # noqa: UP035
2
3
 
3
4
  import dagster._check as check
4
- import pendulum
5
5
  import sqlalchemy as db
6
6
  import sqlalchemy.dialects as db_dialects
7
7
  import sqlalchemy.pool as db_pool
8
8
  from dagster._config.config_schema import UserConfigSchema
9
+ from dagster._core.definitions.asset_key import EntityKey
9
10
  from dagster._core.definitions.declarative_automation.serialized_objects import (
10
- AssetConditionEvaluationWithRunIds,
11
+ AutomationConditionEvaluationWithRunIds,
11
12
  )
12
13
  from dagster._core.scheduler.instigation import InstigatorState
13
14
  from dagster._core.storage.config import PostgresStorageConfig, pg_config
@@ -24,10 +25,11 @@ from dagster._core.storage.sql import (
24
25
  stamp_alembic_rev,
25
26
  )
26
27
  from dagster._serdes import ConfigurableClass, ConfigurableClassData, serialize_value
28
+ from dagster._time import get_current_datetime
27
29
  from sqlalchemy import event
28
30
  from sqlalchemy.engine import Connection
29
31
 
30
- from ..utils import (
32
+ from dagster_postgres.utils import (
31
33
  create_pg_connection,
32
34
  pg_alembic_config,
33
35
  pg_url_from_config,
@@ -102,12 +104,15 @@ class PostgresScheduleStorage(SqlScheduleStorage, ConfigurableClass):
102
104
  self.migrate()
103
105
  self.optimize()
104
106
 
105
- def optimize_for_webserver(self, statement_timeout: int, pool_recycle: int) -> None:
107
+ def optimize_for_webserver(
108
+ self, statement_timeout: int, pool_recycle: int, max_overflow: int
109
+ ) -> None:
106
110
  # When running in dagster-webserver, hold an open connection and set statement_timeout
107
111
  kwargs = {
108
112
  "isolation_level": "AUTOCOMMIT",
109
113
  "pool_size": 1,
110
114
  "pool_recycle": pool_recycle,
115
+ "max_overflow": max_overflow,
111
116
  }
112
117
  existing_options = self._engine.url.query.get("options")
113
118
  if existing_options:
@@ -128,7 +133,7 @@ class PostgresScheduleStorage(SqlScheduleStorage, ConfigurableClass):
128
133
  return pg_config()
129
134
 
130
135
  @classmethod
131
- def from_config_value(
136
+ def from_config_value( # pyright: ignore[reportIncompatibleMethodOverride]
132
137
  cls, inst_data: Optional[ConfigurableClassData], config_value: PostgresStorageConfig
133
138
  ) -> "PostgresScheduleStorage":
134
139
  return PostgresScheduleStorage(
@@ -175,7 +180,7 @@ class PostgresScheduleStorage(SqlScheduleStorage, ConfigurableClass):
175
180
  "status": state.status.value,
176
181
  "instigator_type": state.instigator_type.value,
177
182
  "instigator_body": serialize_value(state),
178
- "update_timestamp": pendulum.now("UTC"),
183
+ "update_timestamp": get_current_datetime(),
179
184
  },
180
185
  )
181
186
  )
@@ -183,7 +188,7 @@ class PostgresScheduleStorage(SqlScheduleStorage, ConfigurableClass):
183
188
  def add_auto_materialize_asset_evaluations(
184
189
  self,
185
190
  evaluation_id: int,
186
- asset_evaluations: Sequence[AssetConditionEvaluationWithRunIds],
191
+ asset_evaluations: Sequence[AutomationConditionEvaluationWithRunIds[EntityKey]],
187
192
  ):
188
193
  if not asset_evaluations:
189
194
  return
@@ -192,7 +197,7 @@ class PostgresScheduleStorage(SqlScheduleStorage, ConfigurableClass):
192
197
  [
193
198
  {
194
199
  "evaluation_id": evaluation_id,
195
- "asset_key": evaluation.asset_key.to_string(),
200
+ "asset_key": evaluation.key.to_db_string(),
196
201
  "asset_evaluation_body": serialize_value(evaluation),
197
202
  "num_requested": evaluation.num_requested,
198
203
  }
@@ -9,10 +9,10 @@ from dagster._core.storage.runs import RunStorage
9
9
  from dagster._core.storage.schedules import ScheduleStorage
10
10
  from dagster._serdes import ConfigurableClass, ConfigurableClassData
11
11
 
12
- from .event_log import PostgresEventLogStorage
13
- from .run_storage import PostgresRunStorage
14
- from .schedule_storage import PostgresScheduleStorage
15
- from .utils import pg_url_from_config
12
+ from dagster_postgres.event_log import PostgresEventLogStorage
13
+ from dagster_postgres.run_storage import PostgresRunStorage
14
+ from dagster_postgres.schedule_storage import PostgresScheduleStorage
15
+ from dagster_postgres.utils import pg_url_from_config
16
16
 
17
17
 
18
18
  class DagsterPostgresStorage(DagsterStorage, ConfigurableClass):
@@ -59,7 +59,7 @@ class DagsterPostgresStorage(DagsterStorage, ConfigurableClass):
59
59
  return pg_config()
60
60
 
61
61
  @classmethod
62
- def from_config_value(
62
+ def from_config_value( # pyright: ignore[reportIncompatibleMethodOverride]
63
63
  cls, inst_data: Optional[ConfigurableClassData], config_value: PostgresStorageConfig
64
64
  ) -> "DagsterPostgresStorage":
65
65
  return DagsterPostgresStorage(
@@ -0,0 +1,75 @@
1
+ import tempfile
2
+ from contextlib import contextmanager
3
+ from pathlib import Path
4
+
5
+ import pytest
6
+ from dagster._core.test_utils import instance_for_test
7
+ from dagster._utils.merger import merge_dicts
8
+ from dagster_test.fixtures import docker_compose_cm, network_name_from_yml
9
+
10
+ from dagster_postgres.utils import get_conn_string, wait_for_connection
11
+
12
+ compose_file = Path(__file__).parent / "docker-compose.yml"
13
+
14
+
15
+ @pytest.fixture(scope="session")
16
+ def postgres_network():
17
+ yield network_name_from_yml(compose_file)
18
+
19
+
20
+ @pytest.fixture(scope="session")
21
+ def postgres_hostname():
22
+ with docker_compose_cm(docker_compose_yml=compose_file) as hostnames:
23
+ yield hostnames["postgres"]
24
+
25
+
26
+ @pytest.fixture(scope="session")
27
+ def postgres_conn_str(postgres_hostname):
28
+ conn_str = get_conn_string(
29
+ username="test",
30
+ password="test",
31
+ hostname=postgres_hostname,
32
+ db_name="test",
33
+ params=dict(connect_timeout=5),
34
+ )
35
+ wait_for_connection(
36
+ conn_str,
37
+ retry_limit=10,
38
+ retry_wait=3,
39
+ )
40
+
41
+ yield conn_str
42
+
43
+
44
+ @pytest.fixture
45
+ def postgres_instance(postgres_conn_str):
46
+ @contextmanager
47
+ def _instance(overrides=None):
48
+ with tempfile.TemporaryDirectory() as temp_dir:
49
+ with instance_for_test(
50
+ temp_dir=temp_dir,
51
+ overrides=merge_dicts(
52
+ {
53
+ "run_storage": {
54
+ "module": "dagster_postgres.run_storage.run_storage",
55
+ "class": "PostgresRunStorage",
56
+ "config": {"postgres_url": postgres_conn_str},
57
+ },
58
+ "event_log_storage": {
59
+ "module": "dagster_postgres.event_log.event_log",
60
+ "class": "PostgresEventLogStorage",
61
+ "config": {"postgres_url": postgres_conn_str},
62
+ },
63
+ "schedule_storage": {
64
+ "module": "dagster_postgres.schedule_storage.schedule_storage",
65
+ "class": "PostgresScheduleStorage",
66
+ "config": {"postgres_url": postgres_conn_str},
67
+ },
68
+ },
69
+ overrides if overrides else {},
70
+ ),
71
+ ) as instance:
72
+ instance.wipe()
73
+ yield instance
74
+
75
+ return _instance
@@ -0,0 +1,10 @@
1
+ services:
2
+ postgres:
3
+ image: postgres:16
4
+ container_name: postgres
5
+ ports:
6
+ - "5432:5432"
7
+ environment:
8
+ POSTGRES_PASSWORD: "test"
9
+ POSTGRES_USER: "test"
10
+ POSTGRES_DB: "test"
@@ -1,7 +1,8 @@
1
1
  import logging
2
2
  import time
3
+ from collections.abc import Iterator, Mapping
3
4
  from contextlib import contextmanager
4
- from typing import Any, Callable, Iterator, Mapping, Optional, TypeVar
5
+ from typing import Any, Callable, Optional, TypeVar
5
6
  from urllib.parse import quote, urlencode
6
7
 
7
8
  import alembic.config
@@ -0,0 +1 @@
1
+ __version__ = "0.27.12"
@@ -1,17 +1,28 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: dagster-postgres
3
- Version: 0.23.11
3
+ Version: 0.27.12
4
4
  Summary: A Dagster integration for postgres
5
5
  Home-page: https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-postgres
6
6
  Author: Dagster Labs
7
7
  Author-email: hello@dagsterlabs.com
8
8
  License: Apache-2.0
9
- Classifier: Programming Language :: Python :: 3.8
10
9
  Classifier: Programming Language :: Python :: 3.9
11
10
  Classifier: Programming Language :: Python :: 3.10
12
11
  Classifier: Programming Language :: Python :: 3.11
13
12
  Classifier: Programming Language :: Python :: 3.12
13
+ Classifier: Programming Language :: Python :: 3.13
14
14
  Classifier: License :: OSI Approved :: Apache Software License
15
15
  Classifier: Operating System :: OS Independent
16
- Requires-Python: >=3.8,<3.13
16
+ Requires-Python: >=3.9,<3.14
17
17
  License-File: LICENSE
18
+ Requires-Dist: dagster==1.11.12
19
+ Requires-Dist: psycopg2-binary
20
+ Dynamic: author
21
+ Dynamic: author-email
22
+ Dynamic: classifier
23
+ Dynamic: home-page
24
+ Dynamic: license
25
+ Dynamic: license-file
26
+ Dynamic: requires-dist
27
+ Dynamic: requires-python
28
+ Dynamic: summary
@@ -20,4 +20,6 @@ dagster_postgres/event_log/event_log.py
20
20
  dagster_postgres/run_storage/__init__.py
21
21
  dagster_postgres/run_storage/run_storage.py
22
22
  dagster_postgres/schedule_storage/__init__.py
23
- dagster_postgres/schedule_storage/schedule_storage.py
23
+ dagster_postgres/schedule_storage/schedule_storage.py
24
+ dagster_postgres/test_fixtures/__init__.py
25
+ dagster_postgres/test_fixtures/docker-compose.yml
@@ -0,0 +1,2 @@
1
+ dagster==1.11.12
2
+ psycopg2-binary
@@ -1,11 +1,10 @@
1
1
  from pathlib import Path
2
- from typing import Dict
3
2
 
4
3
  from setuptools import find_packages, setup
5
4
 
6
5
 
7
6
  def get_version() -> str:
8
- version: Dict[str, str] = {}
7
+ version: dict[str, str] = {}
9
8
  with open(Path(__file__).parent / "dagster_postgres/version.py", encoding="utf8") as fp:
10
9
  exec(fp.read(), version)
11
10
 
@@ -24,11 +23,11 @@ setup(
24
23
  description="A Dagster integration for postgres",
25
24
  url="https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-postgres",
26
25
  classifiers=[
27
- "Programming Language :: Python :: 3.8",
28
26
  "Programming Language :: Python :: 3.9",
29
27
  "Programming Language :: Python :: 3.10",
30
28
  "Programming Language :: Python :: 3.11",
31
29
  "Programming Language :: Python :: 3.12",
30
+ "Programming Language :: Python :: 3.13",
32
31
  "License :: OSI Approved :: Apache Software License",
33
32
  "Operating System :: OS Independent",
34
33
  ],
@@ -39,7 +38,7 @@ setup(
39
38
  ]
40
39
  },
41
40
  include_package_data=True,
42
- python_requires=">=3.8,<3.13",
43
- install_requires=["dagster==1.7.11", "psycopg2-binary"],
41
+ python_requires=">=3.9,<3.14",
42
+ install_requires=["dagster==1.11.12", "psycopg2-binary"],
44
43
  zip_safe=False,
45
44
  )
@@ -1,4 +0,0 @@
1
- # dagster-postgres
2
-
3
- The docs for `dagster-postgres` can be found
4
- [here](https://docs.dagster.io/_apidocs/libraries/dagster-postgres).
@@ -1,15 +0,0 @@
1
- from dagster._core.libraries import DagsterLibraryRegistry
2
-
3
- from .event_log import PostgresEventLogStorage
4
- from .run_storage import PostgresRunStorage
5
- from .schedule_storage import PostgresScheduleStorage
6
- from .storage import DagsterPostgresStorage
7
- from .version import __version__
8
-
9
- DagsterLibraryRegistry.register("dagster-postgres", __version__)
10
- __all__ = [
11
- "DagsterPostgresStorage",
12
- "PostgresEventLogStorage",
13
- "PostgresRunStorage",
14
- "PostgresScheduleStorage",
15
- ]
@@ -1 +0,0 @@
1
- from .event_log import PostgresEventLogStorage as PostgresEventLogStorage
@@ -1 +0,0 @@
1
- from .run_storage import PostgresRunStorage as PostgresRunStorage
@@ -1 +0,0 @@
1
- from .schedule_storage import PostgresScheduleStorage as PostgresScheduleStorage
@@ -1 +0,0 @@
1
- __version__ = "0.23.11"
@@ -1,2 +0,0 @@
1
- dagster==1.7.11
2
- psycopg2-binary