schd 0.0.10__py3-none-any.whl → 0.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
schd/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = '0.0.10'
1
+ __version__ = '0.0.11'
schd/cmds/daemon.py CHANGED
@@ -1,3 +1,4 @@
1
+ import asyncio
1
2
  import logging
2
3
  import sys
3
4
  from .base import CommandBase
@@ -22,4 +23,4 @@ class DaemonCommand(CommandBase):
22
23
  log_stream = sys.stdout
23
24
 
24
25
  logging.basicConfig(level=logging.INFO, format='%(asctime)s %(name)s %(levelname)s %(message)s', datefmt='%Y-%m-%d %H:%M:%S', stream=log_stream)
25
- run_daemon(config_file)
26
+ asyncio.run(run_daemon(config_file))
schd/config.py ADDED
@@ -0,0 +1,99 @@
1
+ from dataclasses import dataclass, field, fields, is_dataclass
2
+ import os
3
+ from typing import Any, Dict, Optional, Type, TypeVar, Union, get_args, get_origin, get_type_hints
4
+ import yaml
5
+
6
+ T = TypeVar("T", bound="ConfigValue")
7
+
8
+
9
+ class ConfigValue:
10
+ """
11
+ ConfigValue present some config settings.
12
+ A configvalue class should also be decorated as @dataclass.
13
+ A ConfigValue class contains some fields, for example:
14
+
15
+ @dataclass
16
+ class SimpleIntValue(ConfigValue):
17
+ a: int
18
+
19
+ User can call derived class 's from_dict class method to construct an instance.
20
+ config = SimpleIntValue.from_dict({'a': 1})
21
+ """
22
+ @classmethod
23
+ def from_dict(cls: Type[T], data: Dict[str, Any]) -> T:
24
+ """
25
+ Creates an instance of the class using the fields specified in the dictionary.
26
+ Handles nested fields that are also derived from ConfigValue.
27
+ """
28
+ type_hints = get_type_hints(cls)
29
+ init_data:Dict[str,Any] = {}
30
+ if not is_dataclass(cls):
31
+ raise TypeError('class %s is not dataclass' % cls)
32
+
33
+ for f in fields(cls):
34
+ field_name = f.name
35
+ json_key = f.metadata.get("json", f.name)
36
+ field_type = type_hints[field_name]
37
+ origin = get_origin(field_type)
38
+ args = get_args(field_type)
39
+
40
+ if json_key in data:
41
+ value = data[json_key]
42
+ # Handle nested ConfigValue objects
43
+ if isinstance(field_type, type) and issubclass(field_type, ConfigValue):
44
+ init_data[field_name] = field_type.from_dict(value)
45
+ # Handle lists of ConfigValue objects List[ConfigValue]
46
+ elif origin is list and issubclass(args[0], ConfigValue):
47
+ nested_type = field_type.__args__[0]
48
+ init_data[field_name] = [nested_type.from_dict(item) for item in value]
49
+ # Handle Optional[ConfigValue]
50
+ elif origin is Union and type(None) in args:
51
+ actual_type = next((arg for arg in args if arg is not type(None)), None)
52
+ if actual_type and issubclass(actual_type, ConfigValue):
53
+ init_data[field_name] = actual_type.from_dict(value) if value is not None else None
54
+ else:
55
+ init_data[field_name] = value
56
+ # Case 4: Dict[str, ConfigValue]
57
+ elif origin is dict and issubclass(args[1], ConfigValue):
58
+ value_type = args[1]
59
+ init_data[field_name] = {
60
+ k: value_type.from_dict(v) for k, v in value.items()
61
+ }
62
+ else:
63
+ init_data[field_name] = value
64
+ return cls(**init_data)
65
+
66
+
67
+ @dataclass
68
+ class JobConfig(ConfigValue):
69
+ cls: str = field(metadata={"json": "class"})
70
+ cron: str
71
+ cmd: Optional[str] = None
72
+ params: dict = field(default_factory=dict)
73
+
74
+
75
+ @dataclass
76
+ class SchdConfig(ConfigValue):
77
+ jobs: Dict[str, JobConfig] = field(default_factory=dict)
78
+ scheduler_cls: str = 'LocalScheduler'
79
+ scheduler_remote_host: Optional[str] = None
80
+ worker_name: str = 'local'
81
+
82
+ def __getitem__(self,key):
83
+ # compatible to old fashion config['key']
84
+ if hasattr(self, key):
85
+ return getattr(self,key)
86
+ else:
87
+ raise KeyError(key)
88
+
89
+
90
+ def read_config(config_file=None) -> SchdConfig:
91
+ if config_file is None and 'SCHD_CONFIG' in os.environ:
92
+ config_file = os.environ['SCHD_CONFIG']
93
+
94
+ if config_file is None:
95
+ config_file = 'conf/schd.yaml'
96
+
97
+ with open(config_file, 'r', encoding='utf8') as f:
98
+ config = SchdConfig.from_dict(yaml.load(f, Loader=yaml.FullLoader))
99
+ return config
schd/job.py ADDED
@@ -0,0 +1,25 @@
1
+ from typing import Protocol, Union
2
+
3
+
4
+ class JobExecutionResult(Protocol):
5
+ def get_code(self) -> int:...
6
+
7
+
8
+ class JobContext:
9
+ def __init__(self, job_name:str, logger=None, stdout=None, stderr=None):
10
+ self.job_name = job_name
11
+ self.logger = logger
12
+ self.output_to_console = False
13
+ self.stdout = stdout
14
+ self.stderr = stderr
15
+
16
+
17
+ class Job(Protocol):
18
+ """
19
+ Protocol to represent a job structure.
20
+ """
21
+ def execute(self, context:JobContext) -> Union[JobExecutionResult, int, None]:
22
+ """
23
+ execute the job
24
+ """
25
+ pass
schd/scheduler.py CHANGED
@@ -1,9 +1,12 @@
1
1
  import argparse
2
+ import asyncio
3
+ from contextlib import redirect_stdout
2
4
  import logging
3
5
  import importlib
6
+ import io
4
7
  import os
5
8
  import sys
6
- from typing import Any
9
+ from typing import Any, Optional, Dict
7
10
  import smtplib
8
11
  from email.mime.text import MIMEText
9
12
  from email.header import Header
@@ -12,33 +15,41 @@ import tempfile
12
15
  from apscheduler.schedulers.blocking import BlockingScheduler
13
16
  from apscheduler.triggers.cron import CronTrigger
14
17
  from apscheduler.executors.pool import ThreadPoolExecutor
15
- import yaml
16
18
  from schd import __version__ as schd_version
19
+ from schd.schedulers.remote import RemoteScheduler
17
20
  from schd.util import ensure_bool
18
-
21
+ from schd.job import Job, JobContext, JobExecutionResult
22
+ from schd.config import JobConfig, SchdConfig, read_config
19
23
 
20
24
  logger = logging.getLogger(__name__)
21
25
 
22
26
 
23
- def build_job(job_name, job_class_name, config):
24
- if not '.' in job_class_name:
27
+ class DefaultJobExecutionResult(JobExecutionResult):
28
+ def __init__(self, code:int, log:str):
29
+ self.code = code
30
+ self.log = log
31
+
32
+
33
+ def build_job(job_name, job_class_name, config:JobConfig)->Job:
34
+ if not ':' in job_class_name:
25
35
  module = sys.modules[__name__]
26
36
  job_cls = getattr(module, job_class_name)
27
37
  else:
28
- module_name, cls_name = job_class_name.rsplit('.', 1)
38
+ # format "packagea.moduleb:ClassC"
39
+ module_name, cls_name = job_class_name.rsplit(':', 1)
29
40
  m = importlib.import_module(module_name)
30
41
  job_cls = getattr(m, cls_name)
31
42
 
32
43
  if hasattr(job_cls, 'from_settings'):
33
44
  job = job_cls.from_settings(job_name=job_name, config=config)
34
45
  else:
35
- job = job_cls(**config)
46
+ job = job_cls(**config.params)
36
47
 
37
48
  return job
38
49
 
39
50
 
40
51
  class JobFailedException(Exception):
41
- def __init__(self, job_name, error_message, inner_ex:"Exception"=None):
52
+ def __init__(self, job_name, error_message, inner_ex:"Optional[Exception]"=None):
42
53
  self.job_name = job_name
43
54
  self.error_message = error_message
44
55
  self.inner_ex = inner_ex
@@ -51,12 +62,6 @@ class CommandJobFailedException(JobFailedException):
51
62
  self.output = output
52
63
 
53
64
 
54
- class JobContext:
55
- def __init__(self, job_name):
56
- self.job_name = job_name
57
- self.output_to_console = False
58
-
59
-
60
65
  class CommandJob:
61
66
  def __init__(self, cmd, job_name=None):
62
67
  self.cmd = cmd
@@ -65,9 +70,30 @@ class CommandJob:
65
70
 
66
71
  @classmethod
67
72
  def from_settings(cls, job_name=None, config=None, **kwargs):
68
- return cls(cmd=config['cmd'], job_name=job_name)
73
+ # compatible with old cmd field
74
+ command = config.params.get('cmd') or config.cmd
75
+ return cls(cmd=command, job_name=job_name)
69
76
 
70
- def __call__(self, context:"JobContext"=None, **kwds: Any) -> Any:
77
+ def execute(self, context:JobContext) -> int:
78
+ process = subprocess.Popen(
79
+ self.cmd,
80
+ shell=True,
81
+ env=os.environ,
82
+ stdout=subprocess.PIPE,
83
+ stderr=subprocess.PIPE,
84
+ text=True
85
+ )
86
+
87
+ stdout, stderr = process.communicate()
88
+ if context.stdout:
89
+ context.stdout.write(stdout)
90
+ context.stdout.write(stderr)
91
+
92
+ ret_code = process.wait()
93
+ return ret_code
94
+
95
+
96
+ def __call__(self, context:"Optional[JobContext]"=None, **kwds: Any) -> Any:
71
97
  output_to_console = False
72
98
  if context is not None:
73
99
  output_to_console = context.output_to_console
@@ -120,7 +146,6 @@ class EmailErrorNotifier:
120
146
 
121
147
  def __call__(self, ex:"Exception"):
122
148
  if isinstance(ex, JobFailedException):
123
- ex: "JobFailedException" = ex
124
149
  job_name = ex.job_name
125
150
  error_message = str(ex)
126
151
  else:
@@ -129,9 +154,9 @@ class EmailErrorNotifier:
129
154
 
130
155
  mail_subject = f'Schd job failed. {job_name}'
131
156
  msg = MIMEText(error_message, 'plain', 'utf8')
132
- msg['From'] = Header(self.from_addr)
133
- msg['To'] = Header(self.to_addr)
134
- msg['Subject'] = Header(mail_subject)
157
+ msg['From'] = str(Header(self.from_addr, 'utf8'))
158
+ msg['To'] = str(Header(self.to_addr, 'utf8'))
159
+ msg['Subject'] = str(Header(mail_subject, 'utf8'))
135
160
 
136
161
  try:
137
162
  smtp = smtplib.SMTP(self.smtp_server, self.smtp_port)
@@ -153,24 +178,100 @@ class ConsoleErrorNotifier:
153
178
  print(e)
154
179
 
155
180
 
156
- def read_config(config_file=None):
157
- if config_file is None and 'SCHD_CONFIG' in os.environ:
158
- config_file = os.environ['SCHD_CONFIG']
181
+ class LocalScheduler:
182
+ def __init__(self, max_concurrent_jobs: int = 10):
183
+ """
184
+ Initialize the LocalScheduler with support for concurrent job execution.
185
+
186
+ :param max_concurrent_jobs: Maximum number of jobs to run concurrently.
187
+ """
188
+ executors = {
189
+ 'default': ThreadPoolExecutor(max_concurrent_jobs)
190
+ }
191
+ self.scheduler = BlockingScheduler(executors=executors)
192
+ self._jobs:Dict[str, Job] = {}
193
+ logger.info("LocalScheduler initialized in 'local' mode with concurrency support")
194
+
195
+ async def init(self):
196
+ pass
197
+
198
+ async def add_job(self, job: Job, cron_expression: str, job_name: str) -> None:
199
+ """
200
+ Add a job to the scheduler.
201
+
202
+ :param job: An instance of a class conforming to the Job protocol.
203
+ :param cron_expression: A string representing the cron schedule.
204
+ :param job_name: Optional name for the job.
205
+ """
206
+ self._jobs[job_name] = job
207
+ try:
208
+ cron_trigger = CronTrigger.from_crontab(cron_expression)
209
+ self.scheduler.add_job(self.execute_job, cron_trigger, kwargs={'job_name':job_name})
210
+ logger.info(f"Job '{job_name or job.__class__.__name__}' added with cron expression: {cron_expression}")
211
+ except Exception as e:
212
+ logger.error(f"Failed to add job '{job_name or job.__class__.__name__}': {str(e)}")
213
+ raise
214
+
215
+ def execute_job(self, job_name:str):
216
+ job = self._jobs[job_name]
217
+ output_stream = io.StringIO()
218
+ context = JobContext(job_name=job_name, stdout=output_stream)
219
+ try:
220
+ with redirect_stdout(output_stream):
221
+ job_result = job.execute(context)
222
+
223
+ if job_result is None:
224
+ ret_code = 0
225
+ elif isinstance(job_result, int):
226
+ ret_code = job_result
227
+ elif hasattr(job_result, 'get_code'):
228
+ ret_code = job_result.get_code()
229
+ else:
230
+ raise ValueError('unsupported result type: %s', job_result)
231
+
232
+ except Exception as ex:
233
+ logger.exception('error when executing job, %s', ex)
234
+ ret_code = -1
235
+
236
+ logger.info('job %s execute complete: %d', job_name, ret_code)
237
+ logger.info('job %s process output: \n%s', job_name, output_stream.getvalue())
159
238
 
160
- if config_file is None:
161
- config_file = 'conf/schd.yaml'
239
+ def run(self):
240
+ """
241
+ Start the scheduler.
242
+ """
243
+ try:
244
+ logger.info("Starting LocalScheduler...")
245
+ self.scheduler.start()
246
+ except (KeyboardInterrupt, SystemExit):
247
+ logger.info("Scheduler stopped.")
162
248
 
163
- with open(config_file, 'r', encoding='utf8') as f:
164
- config = yaml.load(f, Loader=yaml.FullLoader)
249
+ def start(self):
250
+ self.scheduler.start()
165
251
 
166
- return config
252
+
253
+ def build_scheduler(config:SchdConfig):
254
+ scheduler_cls = os.environ.get('SCHD_SCHEDULER_CLS') or config.scheduler_cls
255
+
256
+ if scheduler_cls == 'LocalScheduler':
257
+ scheduler = LocalScheduler()
258
+ elif scheduler_cls == 'RemoteScheduler':
259
+ logger.info('scheduler_cls: %s', scheduler_cls)
260
+ scheduler_remote_host = os.environ.get('SCHD_SCHEDULER_REMOTE_HOST') or config.scheduler_remote_host
261
+ assert scheduler_remote_host, 'scheduler_remote_host cannot be none'
262
+ logger.info('scheduler_remote_host: %s ', scheduler_remote_host)
263
+ scheduler = RemoteScheduler(worker_name=config.worker_name, remote_host=scheduler_remote_host)
264
+ else:
265
+ raise ValueError('invalid scheduler_cls: %s' % scheduler_cls)
266
+ return scheduler
167
267
 
168
268
 
169
- def run_daemon(config_file=None):
269
+ async def run_daemon(config_file=None):
170
270
  config = read_config(config_file=config_file)
171
- sched = BlockingScheduler(executors={'default': ThreadPoolExecutor(10)})
271
+ scheduler = build_scheduler(config)
272
+ await scheduler.init()
172
273
 
173
- if 'error_notifier' in config:
274
+ if hasattr(config, 'error_notifier'):
174
275
  error_notifier_config = config['error_notifier']
175
276
  error_notifier_type = error_notifier_config.get('type', 'console')
176
277
  if error_notifier_type == 'console':
@@ -196,24 +297,28 @@ def run_daemon(config_file=None):
196
297
  else:
197
298
  job_error_handler = ConsoleErrorNotifier()
198
299
 
199
- for job_name, job_config in config['jobs'].items():
200
- job_class_name = job_config.pop('class')
201
- job_cron = job_config.pop('cron')
300
+ for job_name, job_config in config.jobs.items():
301
+ job_class_name = job_config.cls
302
+ job_cron = job_config.cron
202
303
  job = build_job(job_name, job_class_name, job_config)
203
- job_warpped = JobExceptionWrapper(job, job_error_handler)
204
- sched.add_job(job_warpped, CronTrigger.from_crontab(job_cron), id=job_name, misfire_grace_time=10)
304
+ await scheduler.add_job(job, job_cron, job_name=job_name)
205
305
  logger.info('job added, %s', job_name)
206
306
 
207
307
  logger.info('scheduler starting.')
208
- sched.start()
308
+ scheduler.start()
309
+ while True:
310
+ await asyncio.sleep(1000)
209
311
 
210
- def main():
312
+
313
+ async def main():
211
314
  parser = argparse.ArgumentParser()
212
315
  parser.add_argument('--logfile')
213
316
  parser.add_argument('--config', '-c')
214
317
  args = parser.parse_args()
215
318
  config_file = args.config
216
319
 
320
+ logging.basicConfig(level=logging.DEBUG)
321
+
217
322
  print(f'starting schd, {schd_version}, config_file={config_file}')
218
323
 
219
324
  if args.logfile:
@@ -224,8 +329,8 @@ def main():
224
329
  log_stream = sys.stdout
225
330
 
226
331
  logging.basicConfig(level=logging.INFO, format='%(asctime)s %(name)s - %(levelname)s %(message)s', datefmt='%Y-%m-%d %H:%M:%S', stream=log_stream)
227
- run_daemon(config_file)
332
+ await run_daemon(config_file)
228
333
 
229
334
 
230
335
  if __name__ == '__main__':
231
- main()
336
+ asyncio.run(main())
File without changes
@@ -0,0 +1,150 @@
1
+ import asyncio
2
+ from contextlib import redirect_stdout
3
+ import io
4
+ import json
5
+ import os
6
+ from typing import Dict
7
+ from urllib.parse import urljoin
8
+ import aiohttp
9
+ import aiohttp.client_exceptions
10
+ from schd.job import JobContext, Job
11
+
12
+ import logging
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class RemoteApiClient:
18
+ def __init__(self, base_url:str):
19
+ self._base_url = base_url
20
+
21
+ async def register_worker(self, name:str):
22
+ url = urljoin(self._base_url, f'/api/workers/{name}')
23
+ async with aiohttp.ClientSession() as session:
24
+ async with session.put(url) as response:
25
+ response.raise_for_status()
26
+ result = await response.json()
27
+
28
+ async def register_job(self, worker_name, job_name, cron):
29
+ url = urljoin(self._base_url, f'/api/workers/{worker_name}/jobs/{job_name}')
30
+ post_data = {
31
+ 'cron': cron,
32
+ }
33
+ async with aiohttp.ClientSession() as session:
34
+ async with session.put(url, json=post_data) as response:
35
+ response.raise_for_status()
36
+ result = await response.json()
37
+
38
+ async def subscribe_worker_eventstream(self, worker_name):
39
+ url = urljoin(self._base_url, f'/api/workers/{worker_name}/eventstream')
40
+
41
+ timeout = aiohttp.ClientTimeout(sock_read=600)
42
+ async with aiohttp.ClientSession(timeout=timeout) as session:
43
+ async with session.get(url) as resp:
44
+ resp.raise_for_status()
45
+ async for line in resp.content:
46
+ decoded = line.decode("utf-8").strip()
47
+ logger.info('got event, raw data: %s', decoded)
48
+ event = json.loads(decoded)
49
+ event_type = event['event_type']
50
+ if event_type == 'NewJobInstance':
51
+ # event = JobInstanceEvent()
52
+ yield event
53
+ else:
54
+ raise ValueError('unknown event type %s' % event_type)
55
+
56
+ async def update_job_instance(self, worker_name, job_name, job_instance_id, status, ret_code=None):
57
+ url = urljoin(self._base_url, f'/api/workers/{worker_name}/jobs/{job_name}/{job_instance_id}')
58
+ post_data = {'status':status}
59
+ if ret_code is not None:
60
+ post_data['ret_code'] = ret_code
61
+
62
+ async with aiohttp.ClientSession() as session:
63
+ async with session.put(url, json=post_data) as response:
64
+ response.raise_for_status()
65
+ result = await response.json()
66
+
67
+ async def commit_job_log(self, worker_name, job_name, job_instance_id, logfile_path):
68
+ upload_url = urljoin(self._base_url, f'/api/workers/{worker_name}/jobs/{job_name}/{job_instance_id}/log')
69
+ async with aiohttp.ClientSession() as session:
70
+ with open(logfile_path, 'rb') as f:
71
+ data = aiohttp.FormData()
72
+ data.add_field('logfile', f, filename=os.path.basename(logfile_path), content_type='application/octet-stream')
73
+
74
+ async with session.put(upload_url, data=data) as resp:
75
+ print("Status:", resp.status)
76
+ print("Response:", await resp.text())
77
+
78
+
79
+ class RemoteScheduler:
80
+ def __init__(self, worker_name:str, remote_host:str):
81
+ self.client = RemoteApiClient(remote_host)
82
+ self._worker_name = worker_name
83
+ self._jobs:"Dict[str,Job]" = {}
84
+ self._loop_task = None
85
+ self._loop = asyncio.get_event_loop()
86
+
87
+ async def init(self):
88
+ await self.client.register_worker(self._worker_name)
89
+
90
+ async def add_job(self, job:Job, cron, job_name):
91
+ await self.client.register_job(self._worker_name, job_name=job_name, cron=cron)
92
+ self._jobs[job_name] = job
93
+
94
+ async def start_main_loop(self):
95
+ while True:
96
+ logger.info('start_main_loop ')
97
+ try:
98
+ async for event in self.client.subscribe_worker_eventstream(self._worker_name):
99
+ print(event)
100
+ await self.execute_task(event['data']['job_name'], event['data']['id'])
101
+ except aiohttp.client_exceptions.ClientPayloadError:
102
+ logger.info('connection lost')
103
+ except aiohttp.client_exceptions.SocketTimeoutError:
104
+ logger.info('SocketTimeoutError')
105
+ except aiohttp.client_exceptions.ClientConnectorError:
106
+ # cannot connect, try later
107
+ logger.debug('connect failed, ClientConnectorError, try later.')
108
+ await asyncio.sleep(10)
109
+ continue
110
+ except Exception as ex:
111
+ logger.error('error in start_main_loop, %s', ex, exc_info=ex)
112
+ break
113
+
114
+ def start(self):
115
+ self._loop_task = self._loop.create_task(self.start_main_loop())
116
+
117
+ async def execute_task(self, job_name, instance_id:int):
118
+ job = self._jobs[job_name]
119
+ logfile_dir = f'joblog/{instance_id}'
120
+ if not os.path.exists(logfile_dir):
121
+ os.makedirs(logfile_dir)
122
+ logfile_path = os.path.join(logfile_dir, 'output.txt')
123
+ output_stream = io.FileIO(logfile_path, mode='w+')
124
+ text_stream = io.TextIOWrapper(output_stream, encoding='utf-8')
125
+
126
+ context = JobContext(job_name=job_name, stdout=text_stream)
127
+ await self.client.update_job_instance(self._worker_name, job_name, instance_id, status='RUNNING')
128
+ try:
129
+ with redirect_stdout(text_stream):
130
+ job_result = job.execute(context)
131
+
132
+ if job_result is None:
133
+ ret_code = 0
134
+ elif isinstance(job_result, int):
135
+ ret_code = job_result
136
+ elif hasattr(job_result, 'get_code'):
137
+ ret_code = job_result.get_code()
138
+ else:
139
+ raise ValueError('unsupported result type: %s', job_result)
140
+
141
+ except Exception as ex:
142
+ logger.exception('error when executing job, %s', ex)
143
+ ret_code = -1
144
+
145
+ logger.info('job %s execute complete: %d, log_file: %s', job_name, ret_code, logfile_path)
146
+ text_stream.flush()
147
+ output_stream.flush()
148
+ output_stream.close()
149
+ await self.client.commit_job_log(self._worker_name, job_name, instance_id, logfile_path)
150
+ await self.client.update_job_instance(self._worker_name, job_name, instance_id, status='COMPLETED', ret_code=ret_code)
@@ -1,8 +1,9 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: schd
3
- Version: 0.0.10
3
+ Version: 0.0.11
4
4
  Home-page: https://github.com/kevenli/schd
5
5
  License: ApacheV2
6
- Requires-Dist: apscheduler <4.0
6
+ Requires-Dist: apscheduler<4.0
7
7
  Requires-Dist: pyaml
8
+ Requires-Dist: aiohttp
8
9
 
@@ -0,0 +1,18 @@
1
+ schd/__init__.py,sha256=PJtUBAnQMxYDTqpslqeXmCybtsqQwT54QhX5X07AO50,24
2
+ schd/config.py,sha256=21KpKPOwa7sAu4m2YS7kd644M2Lxnx2HuU_Czqs-R48,3873
3
+ schd/job.py,sha256=AoW-2W1hRY_O4nz_pKukgmEXYkjlP2XmYimyoGCs-Bw,614
4
+ schd/scheduler.py,sha256=YtfF8YRdSNjQvxOig7aCjh9EaRWzjmP34ThPeccfbs0,12706
5
+ schd/util.py,sha256=NH4EqIns1Y01yz1Rf3cw-tlKSLc-XNuS9hHbAdq5D_I,592
6
+ schd/cmds/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
+ schd/cmds/base.py,sha256=ZnNcJozQFLbpYyNp8dhHzm3BzFQa0hm6KyCC6URfueY,122
8
+ schd/cmds/daemon.py,sha256=MReUf8TDyE-zEUjJ1T1v4RBD8f2edpE7TeGdooaHD6Y,889
9
+ schd/cmds/jobs.py,sha256=843M6rcMjqmm8yevHF4QsJxc0fw8LQfSX7OEA_OH0Dg,590
10
+ schd/cmds/run.py,sha256=T7WeTMf6-crmC8fD6YoMsp0nQbKemNTf4_Q9a-b789w,853
11
+ schd/cmds/schd.py,sha256=vOlfQCQT81KhLMvlP3tlymEWpxmqlBOa5vnJoeFXVlw,996
12
+ schd/schedulers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
+ schd/schedulers/remote.py,sha256=m9UiHE8TziBzzOGHwdCOnOraOiy-78bFAQJ-m21fOtg,6567
14
+ schd-0.0.11.dist-info/METADATA,sha256=DQsIwXYzT_MKtlkgf7qZ6cfeWF1biMcZRMdFbmOcb-w,195
15
+ schd-0.0.11.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
16
+ schd-0.0.11.dist-info/entry_points.txt,sha256=VvUhIaucvHlggoz-4lWtQgXbDwoWH9x-iT0QOUHEMyI,45
17
+ schd-0.0.11.dist-info/top_level.txt,sha256=Vojim8xSOsYyQHrZBNhx7n0F7WqbEJ2SeBFAvEnrJ_U,5
18
+ schd-0.0.11.dist-info/RECORD,,
@@ -1,14 +0,0 @@
1
- schd/__init__.py,sha256=5Sd9bMRrA4DcXB91Hc_yZbt-fB07LABN3xouof-5uOg,24
2
- schd/scheduler.py,sha256=Y1JKEB5VAZ9ziaR9Z9_QNkAVhBlMIkwrb4vRZhOF7QA,8555
3
- schd/util.py,sha256=NH4EqIns1Y01yz1Rf3cw-tlKSLc-XNuS9hHbAdq5D_I,592
4
- schd/cmds/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- schd/cmds/base.py,sha256=ZnNcJozQFLbpYyNp8dhHzm3BzFQa0hm6KyCC6URfueY,122
6
- schd/cmds/daemon.py,sha256=xeQlG6zG0Z_6IrY1Aod4Zm7F0CNkfVaGWX8GBGVSv4c,860
7
- schd/cmds/jobs.py,sha256=843M6rcMjqmm8yevHF4QsJxc0fw8LQfSX7OEA_OH0Dg,590
8
- schd/cmds/run.py,sha256=T7WeTMf6-crmC8fD6YoMsp0nQbKemNTf4_Q9a-b789w,853
9
- schd/cmds/schd.py,sha256=vOlfQCQT81KhLMvlP3tlymEWpxmqlBOa5vnJoeFXVlw,996
10
- schd-0.0.10.dist-info/METADATA,sha256=6EwtjSwKwdgWx-SK2z21b822IBGgbE96MXFzcmyy_SM,172
11
- schd-0.0.10.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
12
- schd-0.0.10.dist-info/entry_points.txt,sha256=VvUhIaucvHlggoz-4lWtQgXbDwoWH9x-iT0QOUHEMyI,45
13
- schd-0.0.10.dist-info/top_level.txt,sha256=Vojim8xSOsYyQHrZBNhx7n0F7WqbEJ2SeBFAvEnrJ_U,5
14
- schd-0.0.10.dist-info/RECORD,,
File without changes