scalable-pypeline 2.1.22__tar.gz → 2.1.24__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of scalable-pypeline might be problematic. Click here for more details.

Files changed (50) hide show
  1. {scalable-pypeline-2.1.22/scalable_pypeline.egg-info → scalable-pypeline-2.1.24}/PKG-INFO +1 -1
  2. scalable-pypeline-2.1.24/pypeline/__init__.py +1 -0
  3. scalable-pypeline-2.1.24/pypeline/barrier.py +63 -0
  4. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/constants.py +12 -2
  5. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/dramatiq.py +29 -5
  6. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/utils/dramatiq_utils.py +9 -5
  7. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24/scalable_pypeline.egg-info}/PKG-INFO +1 -1
  8. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/scalable_pypeline.egg-info/requires.txt +1 -1
  9. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/setup.py +1 -1
  10. scalable-pypeline-2.1.22/pypeline/__init__.py +0 -1
  11. scalable-pypeline-2.1.22/pypeline/barrier.py +0 -37
  12. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/LICENSE +0 -0
  13. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/MANIFEST.in +0 -0
  14. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/README.md +0 -0
  15. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/executable_job_config_schema.py +0 -0
  16. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/extensions.py +0 -0
  17. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/flask/__init__.py +0 -0
  18. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/flask/api/__init__.py +0 -0
  19. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/flask/api/pipelines.py +0 -0
  20. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/flask/api/schedules.py +0 -0
  21. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/flask/decorators.py +0 -0
  22. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/flask/flask_pypeline.py +0 -0
  23. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/pipeline_config_schema.py +0 -0
  24. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/pipeline_settings_schema.py +0 -0
  25. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/pipelines/__init__.py +0 -0
  26. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/pipelines/composition/__init__.py +0 -0
  27. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/pipelines/composition/parallel_pipeline_composition.py +0 -0
  28. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/pipelines/composition/pypeline_composition.py +0 -0
  29. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/pipelines/factory.py +0 -0
  30. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/pipelines/middleware/__init__.py +0 -0
  31. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/pipelines/middleware/get_active_worker_id_middleware.py +0 -0
  32. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/pipelines/middleware/graceful_shutdown_middleware.py +0 -0
  33. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/pipelines/middleware/parallel_pipeline_middleware.py +0 -0
  34. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/pipelines/middleware/pypeline_middleware.py +0 -0
  35. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/pypeline_yaml.py +0 -0
  36. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/schedule_config_schema.py +0 -0
  37. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/utils/__init__.py +0 -0
  38. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/utils/config_utils.py +0 -0
  39. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/utils/executable_job_util.py +0 -0
  40. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/utils/graceful_shutdown_util.py +0 -0
  41. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/utils/module_utils.py +0 -0
  42. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/utils/pipeline_utils.py +0 -0
  43. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/pypeline/utils/schema_utils.py +0 -0
  44. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/requirements.txt +0 -0
  45. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/scalable_pypeline.egg-info/SOURCES.txt +0 -0
  46. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/scalable_pypeline.egg-info/dependency_links.txt +0 -0
  47. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/scalable_pypeline.egg-info/entry_points.txt +0 -0
  48. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/scalable_pypeline.egg-info/top_level.txt +0 -0
  49. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/setup.cfg +0 -0
  50. {scalable-pypeline-2.1.22 → scalable-pypeline-2.1.24}/tests/fixtures/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: scalable-pypeline
3
- Version: 2.1.22
3
+ Version: 2.1.24
4
4
  Summary: PypeLine - Python pipelines for the Real World
5
5
  Home-page: https://gitlab.com/bravos2/pypeline
6
6
  Author: Bravos Power Corporation
@@ -0,0 +1 @@
1
+ __version__ = "2.1.24"
@@ -0,0 +1,63 @@
1
+ import time
2
+ import redis
3
+ from redis.sentinel import Sentinel
4
+ from urllib.parse import urlparse
5
+ from constants import (
6
+ REDIS_SENTINEL_MASTER_NAME,
7
+ DEFAULT_REDIS_SOCKET_CONNECT_TIMEOUT,
8
+ DEFAULT_REDIS_SOCKET_TIMEOUT,
9
+ DEFAULT_REDIS_RETRY_ON_TIMEOUT,
10
+ DEFAULT_REDIS_SOCKET_KEEPALIVE,
11
+ DEFAULT_REDIS_HEALTH_CHECK_INTERVAL,
12
+ )
13
+
14
+
15
+ class LockingParallelBarrier:
16
+ def __init__(self, redis_url, task_key="task_counter", lock_key="task_lock"):
17
+ # Connect to Redis using the provided URL
18
+ if REDIS_SENTINEL_MASTER_NAME is not None:
19
+ parsed_redis_url = urlparse(redis_url)
20
+ redis_sentinel = Sentinel(
21
+ sentinels=[(parsed_redis_url.hostname, parsed_redis_url.port)],
22
+ )
23
+ self.redis = redis_sentinel.master_for(
24
+ REDIS_SENTINEL_MASTER_NAME,
25
+ db=int(parsed_redis_url.path[1]) if parsed_redis_url.path else 0,
26
+ password=parsed_redis_url.password,
27
+ socket_connect_timeout=DEFAULT_REDIS_SOCKET_CONNECT_TIMEOUT,
28
+ socket_timeout=DEFAULT_REDIS_SOCKET_TIMEOUT,
29
+ retry_on_timeout=DEFAULT_REDIS_RETRY_ON_TIMEOUT,
30
+ socket_keepalive=DEFAULT_REDIS_SOCKET_KEEPALIVE,
31
+ health_check_interval=DEFAULT_REDIS_HEALTH_CHECK_INTERVAL,
32
+ decode_responses=True,
33
+ )
34
+ else:
35
+ self.redis = redis.StrictRedis.from_url(redis_url, decode_responses=True)
36
+ self.task_key = task_key
37
+ self.lock_key = lock_key
38
+
39
+ def acquire_lock(self, timeout=5):
40
+ """Acquire a lock using Redis."""
41
+ while True:
42
+ if self.redis.set(self.lock_key, "locked", nx=True, ex=timeout):
43
+ return True
44
+ time.sleep(0.1)
45
+
46
+ def release_lock(self):
47
+ """Release the lock in Redis."""
48
+ self.redis.delete(self.lock_key)
49
+
50
+ def set_task_count(self, count):
51
+ """Initialize the task counter in Redis."""
52
+ self.redis.set(self.task_key, count)
53
+
54
+ def decrement_task_count(self):
55
+ """Decrement the task counter in Redis."""
56
+ return self.redis.decr(self.task_key)
57
+
58
+ def task_exists(self):
59
+ return self.redis.exists(self.task_key)
60
+
61
+ def get_task_count(self):
62
+ """Get the current value of the task counter."""
63
+ return int(self.redis.get(self.task_key) or 0)
@@ -1,5 +1,4 @@
1
- """ Pypeline Constants
2
- """
1
+ """Pypeline Constants"""
3
2
 
4
3
  import os
5
4
 
@@ -15,6 +14,7 @@ DEFAULT_BROKER_CALLABLE = os.environ.get(
15
14
  # Pypeline broker connections
16
15
  RABBIT_URL = os.environ.get("RABBIT_URL", "amqp://admin:password@127.0.0.1:5672")
17
16
  REDIS_URL = os.environ.get("REDIS_URL", "redis://localhost:6379/0")
17
+ REDIS_SENTINEL_MASTER_NAME = os.environ.get("REDIS_SENTINEL_MASTER_NAME", None)
18
18
 
19
19
  # Pypeline task defaults
20
20
  PARALLEL_PIPELINE_CALLBACK_BARRIER_TTL = int(
@@ -39,6 +39,16 @@ DEFAULT_BROKER_BLOCKED_CONNECTION_TIMEOUT = int(
39
39
  DEFAULT_BROKER_HEARTBEAT_TIMEOUT = int(
40
40
  os.getenv("DEFAULT_BROKER_HEARTBEAT_TIMEOUT", 300000)
41
41
  )
42
+ DEFAULT_REDIS_SOCKET_CONNECT_TIMEOUT = int(
43
+ os.getenv("DEFAULT_REDIS_SOCKET_CONNECT_TIMEOUT", 1)
44
+ )
45
+ DEFAULT_REDIS_SOCKET_TIMEOUT = int(os.getenv("DEFAULT_REDIS_SOCKET_TIMEOUT", 2))
46
+ DEFAULT_REDIS_RETRY_ON_TIMEOUT = bool(os.getenv("DEFAULT_REDIS_RETRY_ON_TIMEOUT", True))
47
+ DEFAULT_REDIS_SOCKET_KEEPALIVE = bool(os.getenv("DEFAULT_REDIS_SOCKET_KEEPALIVE", True))
48
+ DEFAULT_REDIS_HEALTH_CHECK_INTERVAL = int(
49
+ os.getenv("DEFAULT_REDIS_HEALTH_CHECK_INTERVAL", 30)
50
+ )
51
+
42
52
  MESSAGE_BROKER = os.getenv("MESSAGE_BROKER", "RABBITMQ")
43
53
 
44
54
  MS_IN_SECONDS = 1000
@@ -8,7 +8,7 @@ import click
8
8
  from urllib.parse import urlparse
9
9
 
10
10
  from dramatiq.brokers.redis import RedisBroker
11
-
11
+ from redis.sentinel import Sentinel
12
12
  from pypeline.extensions import pypeline_config
13
13
  from warnings import warn
14
14
  from apscheduler.schedulers.blocking import BlockingScheduler
@@ -30,6 +30,7 @@ from flask.cli import with_appcontext
30
30
 
31
31
  from pypeline.constants import (
32
32
  REDIS_URL,
33
+ REDIS_SENTINEL_MASTER_NAME,
33
34
  RABBIT_URL,
34
35
  DEFAULT_BROKER_CALLABLE,
35
36
  DEFAULT_BROKER_CONNECTION_HEARTBEAT,
@@ -37,6 +38,11 @@ from pypeline.constants import (
37
38
  DEFAULT_BROKER_CONNECTION_ATTEMPTS,
38
39
  MESSAGE_BROKER,
39
40
  DEFAULT_BROKER_HEARTBEAT_TIMEOUT,
41
+ DEFAULT_REDIS_SOCKET_CONNECT_TIMEOUT,
42
+ DEFAULT_REDIS_SOCKET_TIMEOUT,
43
+ DEFAULT_REDIS_RETRY_ON_TIMEOUT,
44
+ DEFAULT_REDIS_SOCKET_KEEPALIVE,
45
+ DEFAULT_REDIS_HEALTH_CHECK_INTERVAL,
40
46
  )
41
47
  from pypeline.pipelines.middleware.get_active_worker_id_middleware import (
42
48
  GetActiveWorkerIdMiddleware,
@@ -45,7 +51,8 @@ from pypeline.pipelines.middleware.parallel_pipeline_middleware import ParallelP
45
51
  from pypeline.pipelines.middleware.pypeline_middleware import PypelineMiddleware
46
52
  from pypeline.utils.config_utils import (
47
53
  retrieve_latest_schedule_config,
48
- get_service_config_for_worker, retrieve_executable_job_config,
54
+ get_service_config_for_worker,
55
+ retrieve_executable_job_config,
49
56
  )
50
57
  from pypeline.utils.dramatiq_utils import (
51
58
  guess_code_directory,
@@ -62,7 +69,23 @@ logger = logging.getLogger(__name__)
62
69
 
63
70
 
64
71
  def configure_default_broker(broker: Broker = None):
65
- redis_backend = RedisBackend(url=REDIS_URL)
72
+ redis_client = None
73
+ if REDIS_SENTINEL_MASTER_NAME is not None:
74
+ parsed_redis_url = urlparse(REDIS_URL)
75
+ redis_sentinel = Sentinel(
76
+ sentinels=[(parsed_redis_url.hostname, parsed_redis_url.port)],
77
+ )
78
+ redis_client = redis_sentinel.master_for(
79
+ REDIS_SENTINEL_MASTER_NAME,
80
+ db=int(parsed_redis_url.path[1]) if parsed_redis_url.path else 0,
81
+ password=parsed_redis_url.password,
82
+ socket_connect_timeout=DEFAULT_REDIS_SOCKET_CONNECT_TIMEOUT,
83
+ socket_timeout=DEFAULT_REDIS_SOCKET_TIMEOUT,
84
+ retry_on_timeout=DEFAULT_REDIS_RETRY_ON_TIMEOUT,
85
+ socket_keepalive=DEFAULT_REDIS_SOCKET_KEEPALIVE,
86
+ health_check_interval=DEFAULT_REDIS_HEALTH_CHECK_INTERVAL,
87
+ )
88
+ redis_backend = RedisBackend(client=redis_client, url=REDIS_URL)
66
89
 
67
90
  if MESSAGE_BROKER == "RABBITMQ":
68
91
  parsed_url = urlparse(RABBIT_URL)
@@ -85,7 +108,9 @@ def configure_default_broker(broker: Broker = None):
85
108
  broker
86
109
  if broker is not None
87
110
  else RedisBroker(
88
- url=REDIS_URL, heartbeat_timeout=DEFAULT_BROKER_HEARTBEAT_TIMEOUT
111
+ client=redis_client,
112
+ url=REDIS_URL,
113
+ heartbeat_timeout=DEFAULT_BROKER_HEARTBEAT_TIMEOUT,
89
114
  )
90
115
  )
91
116
 
@@ -169,7 +194,6 @@ def register_actors_for_workers(broker: Broker):
169
194
  logger.exception(f"Unable to add a task to dramatiq: {e}")
170
195
 
171
196
 
172
-
173
197
  class Dramatiq:
174
198
  """Flask extension bridging Dramatiq broker and Flask app.
175
199
 
@@ -113,11 +113,15 @@ class LazyActor(object):
113
113
  return getattr(self.actor, name)
114
114
 
115
115
  def register(self, broker):
116
- self.actor = register_actor(
117
- actor_name=f"{self.fn.__module__}.{self.fn.__name__}-{self.kw['queue_name']}",
118
- broker=broker,
119
- **self.kw,
120
- )(ensure_return_value(default_value=True)(self.fn))
116
+ actor_name = f"{self.fn.__module__}.{self.fn.__name__}-{self.kw['queue_name']}"
117
+ if actor_name in broker.actors:
118
+ self.actor = broker.actors[actor_name]
119
+ else:
120
+ self.actor = register_actor(
121
+ actor_name=actor_name,
122
+ broker=broker,
123
+ **self.kw,
124
+ )(ensure_return_value(default_value=True)(self.fn))
121
125
 
122
126
  # Next is regular actor API.
123
127
  def send(self, *a, **kw):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: scalable-pypeline
3
- Version: 2.1.22
3
+ Version: 2.1.24
4
4
  Summary: PypeLine - Python pipelines for the Real World
5
5
  Home-page: https://gitlab.com/bravos2/pypeline
6
6
  Author: Bravos Power Corporation
@@ -20,7 +20,7 @@ pytest-cov<3,>=2.6.1
20
20
  tox<4,>=3.14.1
21
21
  mock<2,>=1
22
22
  responses<0.11,>=0.10.16
23
- fakeredis<3,>=2.10.3
23
+ fakeredis<2.31,>=2.10.3
24
24
 
25
25
  [web]
26
26
  gunicorn
@@ -190,7 +190,7 @@ setup(
190
190
  "tox>=3.14.1,<4",
191
191
  "mock>=1,<2",
192
192
  "responses>=0.10.16,<0.11",
193
- "fakeredis>=2.10.3,<3",
193
+ "fakeredis>=2.10.3,<2.31", # fakeredis version compatible with redis 4.x
194
194
  ],
195
195
  },
196
196
  entry_points={
@@ -1 +0,0 @@
1
- __version__ = "2.1.22"
@@ -1,37 +0,0 @@
1
- import time
2
-
3
- import redis
4
-
5
-
6
- class LockingParallelBarrier:
7
- def __init__(self, redis_url, task_key="task_counter", lock_key="task_lock"):
8
- # Connect to Redis using the provided URL
9
- self.redis = redis.StrictRedis.from_url(redis_url, decode_responses=True)
10
- self.task_key = task_key
11
- self.lock_key = lock_key
12
-
13
- def acquire_lock(self, timeout=5):
14
- """Acquire a lock using Redis."""
15
- while True:
16
- if self.redis.set(self.lock_key, "locked", nx=True, ex=timeout):
17
- return True
18
- time.sleep(0.1)
19
-
20
- def release_lock(self):
21
- """Release the lock in Redis."""
22
- self.redis.delete(self.lock_key)
23
-
24
- def set_task_count(self, count):
25
- """Initialize the task counter in Redis."""
26
- self.redis.set(self.task_key, count)
27
-
28
- def decrement_task_count(self):
29
- """Decrement the task counter in Redis."""
30
- return self.redis.decr(self.task_key)
31
-
32
- def task_exists(self):
33
- return self.redis.exists(self.task_key)
34
-
35
- def get_task_count(self):
36
- """Get the current value of the task counter."""
37
- return int(self.redis.get(self.task_key) or 0)