schd 0.0.12__py3-none-any.whl → 0.0.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- schd/config.py +2 -0
- schd/scheduler.py +3 -2
- schd/schedulers/remote.py +31 -8
- {schd-0.0.12.dist-info → schd-0.0.13.dist-info}/METADATA +1 -1
- {schd-0.0.12.dist-info → schd-0.0.13.dist-info}/RECORD +8 -8
- {schd-0.0.12.dist-info → schd-0.0.13.dist-info}/WHEEL +0 -0
- {schd-0.0.12.dist-info → schd-0.0.13.dist-info}/entry_points.txt +0 -0
- {schd-0.0.12.dist-info → schd-0.0.13.dist-info}/top_level.txt +0 -0
schd/config.py
CHANGED
schd/scheduler.py
CHANGED
@@ -195,7 +195,7 @@ class LocalScheduler:
|
|
195
195
|
async def init(self):
|
196
196
|
pass
|
197
197
|
|
198
|
-
async def add_job(self, job: Job,
|
198
|
+
async def add_job(self, job: Job, job_name: str, job_config:JobConfig) -> None:
|
199
199
|
"""
|
200
200
|
Add a job to the scheduler.
|
201
201
|
|
@@ -205,6 +205,7 @@ class LocalScheduler:
|
|
205
205
|
"""
|
206
206
|
self._jobs[job_name] = job
|
207
207
|
try:
|
208
|
+
cron_expression = job_config.cron
|
208
209
|
cron_trigger = CronTrigger.from_crontab(cron_expression)
|
209
210
|
self.scheduler.add_job(self.execute_job, cron_trigger, kwargs={'job_name':job_name})
|
210
211
|
logger.info(f"Job '{job_name or job.__class__.__name__}' added with cron expression: {cron_expression}")
|
@@ -304,7 +305,7 @@ async def run_daemon(config_file=None):
|
|
304
305
|
job_class_name = job_config.cls
|
305
306
|
job_cron = job_config.cron
|
306
307
|
job = build_job(job_name, job_class_name, job_config)
|
307
|
-
await scheduler.add_job(job,
|
308
|
+
await scheduler.add_job(job, job_name, job_config)
|
308
309
|
logger.info('job added, %s', job_name)
|
309
310
|
|
310
311
|
logger.info('scheduler starting.')
|
schd/schedulers/remote.py
CHANGED
@@ -3,10 +3,11 @@ from contextlib import redirect_stdout
|
|
3
3
|
import io
|
4
4
|
import json
|
5
5
|
import os
|
6
|
-
from typing import Dict
|
6
|
+
from typing import Dict, Tuple
|
7
7
|
from urllib.parse import urljoin
|
8
8
|
import aiohttp
|
9
9
|
import aiohttp.client_exceptions
|
10
|
+
from schd.config import JobConfig
|
10
11
|
from schd.job import JobContext, Job
|
11
12
|
|
12
13
|
import logging
|
@@ -25,11 +26,14 @@ class RemoteApiClient:
|
|
25
26
|
response.raise_for_status()
|
26
27
|
result = await response.json()
|
27
28
|
|
28
|
-
async def register_job(self, worker_name, job_name, cron):
|
29
|
+
async def register_job(self, worker_name, job_name, cron, timezone=None):
|
29
30
|
url = urljoin(self._base_url, f'/api/workers/{worker_name}/jobs/{job_name}')
|
30
31
|
post_data = {
|
31
32
|
'cron': cron,
|
32
33
|
}
|
34
|
+
if timezone:
|
35
|
+
post_data['timezone'] = timezone
|
36
|
+
|
33
37
|
async with aiohttp.ClientSession() as session:
|
34
38
|
async with session.put(url, json=post_data) as response:
|
35
39
|
response.raise_for_status()
|
@@ -80,16 +84,23 @@ class RemoteScheduler:
|
|
80
84
|
def __init__(self, worker_name:str, remote_host:str):
|
81
85
|
self.client = RemoteApiClient(remote_host)
|
82
86
|
self._worker_name = worker_name
|
83
|
-
self._jobs:"Dict[str,Job]" = {}
|
87
|
+
self._jobs:"Dict[str,Tuple[Job,str]]" = {}
|
84
88
|
self._loop_task = None
|
85
89
|
self._loop = asyncio.get_event_loop()
|
90
|
+
self.queue_semaphores = {}
|
86
91
|
|
87
92
|
async def init(self):
|
88
93
|
await self.client.register_worker(self._worker_name)
|
89
94
|
|
90
|
-
async def add_job(self, job:Job,
|
91
|
-
|
92
|
-
|
95
|
+
async def add_job(self, job:Job, job_name:str, job_config:JobConfig):
|
96
|
+
cron = job_config.cron
|
97
|
+
queue_name = job_config.queue or ''
|
98
|
+
await self.client.register_job(self._worker_name, job_name=job_name, cron=cron, timezone=job_config.timezone)
|
99
|
+
self._jobs[job_name] = (job, queue_name)
|
100
|
+
if queue_name not in self.queue_semaphores:
|
101
|
+
# each queue has a max concurrency of 1
|
102
|
+
max_conc = 1
|
103
|
+
self.queue_semaphores[queue_name] = asyncio.Semaphore(max_conc)
|
93
104
|
|
94
105
|
async def start_main_loop(self):
|
95
106
|
while True:
|
@@ -97,11 +108,19 @@ class RemoteScheduler:
|
|
97
108
|
try:
|
98
109
|
async for event in self.client.subscribe_worker_eventstream(self._worker_name):
|
99
110
|
print(event)
|
100
|
-
|
111
|
+
job_name = event['data']['job_name']
|
112
|
+
instance_id = event['data']['id']
|
113
|
+
_, queue_name = self._jobs[job_name]
|
114
|
+
# Queue concurrency control
|
115
|
+
semaphore = self.queue_semaphores[queue_name]
|
116
|
+
self._loop.create_task(self._run_with_semaphore(semaphore, job_name, instance_id))
|
117
|
+
# await self.execute_task(event['data']['job_name'], event['data']['id'])
|
101
118
|
except aiohttp.client_exceptions.ClientPayloadError:
|
102
119
|
logger.info('connection lost')
|
120
|
+
await asyncio.sleep(1)
|
103
121
|
except aiohttp.client_exceptions.SocketTimeoutError:
|
104
122
|
logger.info('SocketTimeoutError')
|
123
|
+
await asyncio.sleep(1)
|
105
124
|
except aiohttp.client_exceptions.ClientConnectorError:
|
106
125
|
# cannot connect, try later
|
107
126
|
logger.debug('connect failed, ClientConnectorError, try later.')
|
@@ -115,7 +134,7 @@ class RemoteScheduler:
|
|
115
134
|
self._loop_task = self._loop.create_task(self.start_main_loop())
|
116
135
|
|
117
136
|
async def execute_task(self, job_name, instance_id:int):
|
118
|
-
job = self._jobs[job_name]
|
137
|
+
job, _ = self._jobs[job_name]
|
119
138
|
logfile_dir = f'joblog/{instance_id}'
|
120
139
|
if not os.path.exists(logfile_dir):
|
121
140
|
os.makedirs(logfile_dir)
|
@@ -148,3 +167,7 @@ class RemoteScheduler:
|
|
148
167
|
output_stream.close()
|
149
168
|
await self.client.commit_job_log(self._worker_name, job_name, instance_id, logfile_path)
|
150
169
|
await self.client.update_job_instance(self._worker_name, job_name, instance_id, status='COMPLETED', ret_code=ret_code)
|
170
|
+
|
171
|
+
async def _run_with_semaphore(self, semaphore, job_name, instance_id):
|
172
|
+
async with semaphore:
|
173
|
+
await self.execute_task(job_name, instance_id)
|
@@ -1,7 +1,7 @@
|
|
1
1
|
schd/__init__.py,sha256=ZOKi_EM_Ao1bVuMO_3kKsyHwO0zn1I0hNk7eirjowrA,24
|
2
|
-
schd/config.py,sha256=
|
2
|
+
schd/config.py,sha256=YBSRiXZApC_ylPYUM4cwTU7FjVqMHwG3vPy-F6SA95k,3930
|
3
3
|
schd/job.py,sha256=AoW-2W1hRY_O4nz_pKukgmEXYkjlP2XmYimyoGCs-Bw,614
|
4
|
-
schd/scheduler.py,sha256=
|
4
|
+
schd/scheduler.py,sha256=EYIU7aX8qs-RYS_eXJc-JZU3e1T-mTMzkEv51b-AXgM,12931
|
5
5
|
schd/util.py,sha256=NH4EqIns1Y01yz1Rf3cw-tlKSLc-XNuS9hHbAdq5D_I,592
|
6
6
|
schd/cmds/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
7
7
|
schd/cmds/base.py,sha256=ZnNcJozQFLbpYyNp8dhHzm3BzFQa0hm6KyCC6URfueY,122
|
@@ -10,9 +10,9 @@ schd/cmds/jobs.py,sha256=843M6rcMjqmm8yevHF4QsJxc0fw8LQfSX7OEA_OH0Dg,590
|
|
10
10
|
schd/cmds/run.py,sha256=T7WeTMf6-crmC8fD6YoMsp0nQbKemNTf4_Q9a-b789w,853
|
11
11
|
schd/cmds/schd.py,sha256=vOlfQCQT81KhLMvlP3tlymEWpxmqlBOa5vnJoeFXVlw,996
|
12
12
|
schd/schedulers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
13
|
-
schd/schedulers/remote.py,sha256=
|
14
|
-
schd-0.0.
|
15
|
-
schd-0.0.
|
16
|
-
schd-0.0.
|
17
|
-
schd-0.0.
|
18
|
-
schd-0.0.
|
13
|
+
schd/schedulers/remote.py,sha256=QMALl5VKAC1YzVZUF0JBYYF2XCuF4rwd5OBpn3mKJQY,7736
|
14
|
+
schd-0.0.13.dist-info/METADATA,sha256=-WBWRbJgcsJ-lNKcbKLJDYKLrd1f5tv6Mri5CqTxNEI,195
|
15
|
+
schd-0.0.13.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
|
16
|
+
schd-0.0.13.dist-info/entry_points.txt,sha256=VvUhIaucvHlggoz-4lWtQgXbDwoWH9x-iT0QOUHEMyI,45
|
17
|
+
schd-0.0.13.dist-info/top_level.txt,sha256=Vojim8xSOsYyQHrZBNhx7n0F7WqbEJ2SeBFAvEnrJ_U,5
|
18
|
+
schd-0.0.13.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|