zrb 0.0.117__py3-none-any.whl → 0.0.119__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,76 @@
1
+ from zrb.helper.typing import Optional
2
+ from zrb.helper.typecheck import typechecked
3
+
4
+ import asyncio
5
+ import time
6
+
7
+ LOG_NAME_LENGTH = 20
8
+
9
+
10
+ @typechecked
11
+ class TimeTracker():
12
+
13
+ def __init__(self):
14
+ self.__start_time: float = 0
15
+ self.__end_time: float = 0
16
+
17
+ def _start_timer(self):
18
+ self.__start_time = time.time()
19
+
20
+ def _end_timer(self):
21
+ self.__end_time = time.time()
22
+
23
+ def _get_elapsed_time(self) -> float:
24
+ return self.__end_time - self.__start_time
25
+
26
+
27
+ @typechecked
28
+ class AttemptTracker():
29
+
30
+ def __init__(self, retry: int = 2):
31
+ self.__retry = retry
32
+ self.__attempt: int = 1
33
+
34
+ def _get_max_attempt(self) -> int:
35
+ return self.__retry + 1
36
+
37
+ def _get_attempt(self) -> int:
38
+ return self.__attempt
39
+
40
+ def _increase_attempt(self):
41
+ self.__attempt += 1
42
+
43
+ def _should_attempt(self) -> bool:
44
+ attempt = self._get_attempt()
45
+ max_attempt = self._get_max_attempt()
46
+ return attempt <= max_attempt
47
+
48
+ def _is_last_attempt(self) -> bool:
49
+ attempt = self._get_attempt()
50
+ max_attempt = self._get_max_attempt()
51
+ return attempt >= max_attempt
52
+
53
+
54
+ @typechecked
55
+ class FinishTracker():
56
+
57
+ def __init__(self):
58
+ self.__execution_queue: Optional[asyncio.Queue] = None
59
+ self.__counter = 0
60
+
61
+ async def _mark_awaited(self):
62
+ if self.__execution_queue is None:
63
+ self.__execution_queue = asyncio.Queue()
64
+ self.__counter += 1
65
+
66
+ async def _mark_done(self):
67
+ # Tracker might be started several times
68
+ # However, when the execution is marked as done, it applied globally
69
+ # Thus, we need to send event as much as the counter.
70
+ for i in range(self.__counter):
71
+ await self.__execution_queue.put(True)
72
+
73
+ async def _is_done(self) -> bool:
74
+ while self.__execution_queue is None:
75
+ await asyncio.sleep(0.05)
76
+ return await self.__execution_queue.get()
zrb/task/checker.py CHANGED
@@ -1,6 +1,6 @@
1
1
  from zrb.helper.typing import Any, Callable, Iterable, Optional, Union
2
2
  from zrb.helper.typecheck import typechecked
3
- from zrb.task.base_task import BaseTask
3
+ from zrb.task.base_task.base_task import BaseTask
4
4
  from zrb.task.any_task import AnyTask
5
5
  from zrb.task.any_task_event_handler import (
6
6
  OnTriggered, OnWaiting, OnSkipped, OnStarted, OnReady, OnRetry, OnFailed
@@ -87,3 +87,6 @@ class Checker(BaseTask):
87
87
  self.print_out_dark(message)
88
88
  return
89
89
  self.log_debug(message)
90
+
91
+ def __repr__(self) -> str:
92
+ return f'<Checker name={self._name}>'
zrb/task/cmd_task.py CHANGED
@@ -2,11 +2,12 @@ from zrb.helper.typing import (
2
2
  Any, Callable, Iterable, List, Optional, Union, TypeVar
3
3
  )
4
4
  from zrb.helper.typecheck import typechecked
5
+ from zrb.helper.string.conversion import to_variable_name
5
6
  from zrb.task.any_task import AnyTask
6
7
  from zrb.task.any_task_event_handler import (
7
8
  OnTriggered, OnWaiting, OnSkipped, OnStarted, OnReady, OnRetry, OnFailed
8
9
  )
9
- from zrb.task.base_task import BaseTask
10
+ from zrb.task.base_task.base_task import BaseTask
10
11
  from zrb.task_env.env import Env
11
12
  from zrb.task_env.env_file import EnvFile
12
13
  from zrb.task_group.group import Group
@@ -150,7 +151,7 @@ class CmdTask(BaseTask):
150
151
  max_error_line = max_error_line if max_error_line > 0 else 1
151
152
  self._cmd = cmd
152
153
  self._cmd_path = cmd_path
153
- self._set_cwd(cwd)
154
+ self.__set_cwd(cwd)
154
155
  self._max_output_size = max_output_line
155
156
  self._max_error_size = max_error_line
156
157
  self._output_buffer: Iterable[str] = []
@@ -164,7 +165,10 @@ class CmdTask(BaseTask):
164
165
  def copy(self) -> TCmdTask:
165
166
  return super().copy()
166
167
 
167
- def _set_cwd(
168
+ def set_cwd(self, cwd: Union[str, pathlib.Path]):
169
+ self.__set_cwd(cwd)
170
+
171
+ def __set_cwd(
168
172
  self, cwd: Optional[Union[str, pathlib.Path]]
169
173
  ):
170
174
  if cwd is None:
@@ -192,7 +196,7 @@ class CmdTask(BaseTask):
192
196
  super().inject_envs()
193
197
  input_map = self.get_input_map()
194
198
  for task_input in self._get_combined_inputs():
195
- input_key = self._get_normalized_input_key(task_input.get_name())
199
+ input_key = to_variable_name(task_input.get_name())
196
200
  input_value = input_map.get(input_key)
197
201
  env_name = '_INPUT_' + input_key.upper()
198
202
  should_render = task_input.should_render()
@@ -205,7 +209,7 @@ class CmdTask(BaseTask):
205
209
 
206
210
  async def run(self, *args: Any, **kwargs: Any) -> CmdResult:
207
211
  cmd = self.get_cmd_script(*args, **kwargs)
208
- self.print_out_dark('Run script: ' + self._get_multiline_repr(cmd))
212
+ self.print_out_dark('Run script: ' + self.__get_multiline_repr(cmd))
209
213
  self.print_out_dark('Working directory: ' + self._cwd)
210
214
  self._output_buffer = []
211
215
  self._error_buffer = []
@@ -225,14 +229,14 @@ class CmdTask(BaseTask):
225
229
  self._pids.append(process.pid)
226
230
  self._process = process
227
231
  try:
228
- signal.signal(signal.SIGINT, self._on_kill)
229
- signal.signal(signal.SIGTERM, self._on_kill)
232
+ signal.signal(signal.SIGINT, self.__on_kill)
233
+ signal.signal(signal.SIGTERM, self.__on_kill)
230
234
  except Exception as e:
231
235
  self.print_err(e)
232
- atexit.register(self._on_exit)
233
- await self._wait_process(process)
236
+ atexit.register(self.__on_exit)
237
+ await self.__wait_process(process)
234
238
  self.log_info('Process completed')
235
- atexit.unregister(self._on_exit)
239
+ atexit.unregister(self.__on_exit)
236
240
  output = '\n'.join(self._output_buffer)
237
241
  error = '\n'.join(self._error_buffer)
238
242
  # get return code
@@ -254,36 +258,42 @@ class CmdTask(BaseTask):
254
258
  return True
255
259
  return super()._is_last_attempt()
256
260
 
257
- def _on_kill(self, signum: Any, frame: Any):
261
+ def __on_kill(self, signum: Any, frame: Any):
258
262
  self._global_state.no_more_attempt = True
259
263
  self._global_state.is_killed_by_signal = True
260
264
  self.print_out_dark(f'Getting signal {signum}')
261
265
  for pid in self._pids:
262
- self._kill_by_pid(pid)
263
- self.print_out_dark(f'Exiting with signal {signum}')
266
+ self.__kill_by_pid(pid)
267
+ tasks = asyncio.all_tasks()
268
+ for task in tasks:
269
+ try:
270
+ task.cancel()
271
+ except Exception as e:
272
+ self.print_err(e)
264
273
  time.sleep(0.3)
274
+ self.print_out_dark(f'Exiting with signal {signum}')
265
275
  sys.exit(signum)
266
276
 
267
- def _on_exit(self):
277
+ def __on_exit(self):
268
278
  self._global_state.no_more_attempt = True
269
- self._kill_by_pid(self._process.pid)
279
+ self.__kill_by_pid(self._process.pid)
270
280
 
271
- def _kill_by_pid(self, pid: int):
281
+ def __kill_by_pid(self, pid: int):
272
282
  '''
273
283
  Kill a pid, gracefully
274
284
  '''
275
285
  try:
276
286
  process_ever_exists = False
277
- if self._is_process_exist(pid):
287
+ if self.__is_process_exist(pid):
278
288
  process_ever_exists = True
279
289
  self.print_out_dark(f'Send SIGTERM to process {pid}')
280
290
  os.killpg(os.getpgid(pid), signal.SIGTERM)
281
291
  time.sleep(0.3)
282
- if self._is_process_exist(pid):
292
+ if self.__is_process_exist(pid):
283
293
  self.print_out_dark(f'Send SIGINT to process {pid}')
284
294
  os.killpg(os.getpgid(pid), signal.SIGINT)
285
295
  time.sleep(0.3)
286
- if self._is_process_exist(pid):
296
+ if self.__is_process_exist(pid):
287
297
  self.print_out_dark(f'Send SIGKILL to process {pid}')
288
298
  os.killpg(os.getpgid(pid), signal.SIGKILL)
289
299
  if process_ever_exists:
@@ -291,30 +301,30 @@ class CmdTask(BaseTask):
291
301
  except Exception:
292
302
  self.log_error(f'Cannot kill process {pid}')
293
303
 
294
- def _is_process_exist(self, pid: int) -> bool:
304
+ def __is_process_exist(self, pid: int) -> bool:
295
305
  try:
296
306
  os.killpg(os.getpgid(pid), 0)
297
307
  return True
298
308
  except ProcessLookupError:
299
309
  return False
300
310
 
301
- async def _wait_process(self, process: asyncio.subprocess.Process):
311
+ async def __wait_process(self, process: asyncio.subprocess.Process):
302
312
  # Create queue
303
313
  stdout_queue = asyncio.Queue()
304
314
  stderr_queue = asyncio.Queue()
305
315
  # Read from streams and put into queue
306
- stdout_process = asyncio.create_task(self._queue_stream(
316
+ stdout_process = asyncio.create_task(self.__queue_stream(
307
317
  process.stdout, stdout_queue
308
318
  ))
309
- stderr_process = asyncio.create_task(self._queue_stream(
319
+ stderr_process = asyncio.create_task(self.__queue_stream(
310
320
  process.stderr, stderr_queue
311
321
  ))
312
322
  # Handle messages in queue
313
- stdout_log_process = asyncio.create_task(self._log_from_queue(
323
+ stdout_log_process = asyncio.create_task(self.__log_from_queue(
314
324
  stdout_queue, self.print_out,
315
325
  self._output_buffer, self._max_output_size
316
326
  ))
317
- stderr_log_process = asyncio.create_task(self._log_from_queue(
327
+ stderr_log_process = asyncio.create_task(self.__log_from_queue(
318
328
  stderr_queue, self.print_err,
319
329
  self._error_buffer, self._max_error_size
320
330
  ))
@@ -338,13 +348,13 @@ class CmdTask(BaseTask):
338
348
  ) -> str:
339
349
  if not isinstance(cmd_path, str) or cmd_path != '':
340
350
  if callable(cmd_path):
341
- return self._get_rendered_cmd_path(cmd_path(*args, **kwargs))
342
- return self._get_rendered_cmd_path(cmd_path)
351
+ return self.__get_rendered_cmd_path(cmd_path(*args, **kwargs))
352
+ return self.__get_rendered_cmd_path(cmd_path)
343
353
  if callable(cmd):
344
- return self._get_rendered_cmd(cmd(*args, **kwargs))
345
- return self._get_rendered_cmd(cmd)
354
+ return self.__get_rendered_cmd(cmd(*args, **kwargs))
355
+ return self.__get_rendered_cmd(cmd)
346
356
 
347
- def _get_rendered_cmd_path(
357
+ def __get_rendered_cmd_path(
348
358
  self, cmd_path: Union[str, Iterable[str]]
349
359
  ) -> str:
350
360
  if isinstance(cmd_path, str):
@@ -354,12 +364,12 @@ class CmdTask(BaseTask):
354
364
  for cmd_path_str in cmd_path
355
365
  ])
356
366
 
357
- def _get_rendered_cmd(self, cmd: Union[str, Iterable[str]]) -> str:
367
+ def __get_rendered_cmd(self, cmd: Union[str, Iterable[str]]) -> str:
358
368
  if isinstance(cmd, str):
359
369
  return self.render_str(cmd)
360
370
  return self.render_str('\n'.join(list(cmd)))
361
371
 
362
- async def _queue_stream(self, stream, queue: asyncio.Queue):
372
+ async def __queue_stream(self, stream, queue: asyncio.Queue):
363
373
  while True:
364
374
  try:
365
375
  line = await stream.readline()
@@ -369,7 +379,7 @@ class CmdTask(BaseTask):
369
379
  break
370
380
  await queue.put(line)
371
381
 
372
- async def _log_from_queue(
382
+ async def __log_from_queue(
373
383
  self,
374
384
  queue: asyncio.Queue,
375
385
  print_log: Callable[[str], None],
@@ -381,17 +391,27 @@ class CmdTask(BaseTask):
381
391
  if not line:
382
392
  break
383
393
  line_str = line.decode('utf-8').rstrip()
384
- self._add_to_buffer(buffer, max_size, line_str)
394
+ self.__add_to_buffer(buffer, max_size, line_str)
385
395
  _reset_stty()
386
396
  print_log(line_str)
387
397
  _reset_stty()
388
398
 
389
- def _add_to_buffer(
399
+ def __add_to_buffer(
390
400
  self, buffer: Iterable[str], max_size: int, new_line: str
391
401
  ):
392
402
  if len(buffer) >= max_size:
393
403
  buffer.pop(0)
394
404
  buffer.append(new_line)
395
-
405
+
406
+ def __get_multiline_repr(self, text: str) -> str:
407
+ lines_repr: Iterable[str] = []
408
+ lines = text.split('\n')
409
+ if len(lines) == 1:
410
+ return lines[0]
411
+ for index, line in enumerate(lines):
412
+ line_number_repr = str(index + 1).rjust(4, '0')
413
+ lines_repr.append(f' {line_number_repr} | {line}')
414
+ return '\n' + '\n'.join(lines_repr)
415
+
396
416
  def __repr__(self) -> str:
397
417
  return f'<CmdTask name={self._name}>'
@@ -145,10 +145,10 @@ class DockerComposeTask(CmdTask):
145
145
  self._compose_flags = compose_flags
146
146
  self._compose_args = compose_args
147
147
  self._compose_env_prefix = compose_env_prefix
148
- self._compose_template_file = self._get_compose_template_file(
148
+ self._compose_template_file = self.__get_compose_template_file(
149
149
  compose_file
150
150
  )
151
- self._compose_runtime_file = self._get_compose_runtime_file(
151
+ self._compose_runtime_file = self.__get_compose_runtime_file(
152
152
  self._compose_template_file
153
153
  )
154
154
  # Flag to make mark whether service config and compose environments
@@ -160,7 +160,7 @@ class DockerComposeTask(CmdTask):
160
160
  return super().copy()
161
161
 
162
162
  async def run(self, *args, **kwargs: Any) -> CmdResult:
163
- self._generate_compose_runtime_file()
163
+ self.__generate_compose_runtime_file()
164
164
  try:
165
165
  result = await super().run(*args, **kwargs)
166
166
  finally:
@@ -192,7 +192,7 @@ class DockerComposeTask(CmdTask):
192
192
  for _, service_config in self._compose_service_configs.items():
193
193
  self.insert_env_file(*service_config.get_env_files())
194
194
 
195
- def _generate_compose_runtime_file(self):
195
+ def __generate_compose_runtime_file(self):
196
196
  compose_data = read_compose_file(self._compose_template_file)
197
197
  for service, service_config in self._compose_service_configs.items():
198
198
  envs: List[Env] = []
@@ -200,10 +200,12 @@ class DockerComposeTask(CmdTask):
200
200
  for env_file in env_files:
201
201
  envs += env_file.get_envs()
202
202
  envs += service_config.get_envs()
203
- compose_data = self._apply_service_env(compose_data, service, envs)
203
+ compose_data = self.__apply_service_env(
204
+ compose_data, service, envs
205
+ )
204
206
  write_compose_file(self._compose_runtime_file, compose_data)
205
207
 
206
- def _apply_service_env(
208
+ def __apply_service_env(
207
209
  self, compose_data: Any, service: str, envs: List[Env]
208
210
  ) -> Any:
209
211
  # service not found
@@ -213,12 +215,13 @@ class DockerComposeTask(CmdTask):
213
215
  # service has no environment definition
214
216
  if 'environment' not in compose_data['services'][service]:
215
217
  compose_data['services'][service]['environment'] = {
216
- env.name: self._get_env_compose_value(env) for env in envs
218
+ env.get_name(): self.__get_env_compose_value(env)
219
+ for env in envs
217
220
  }
218
221
  return compose_data
219
222
  # service environment is a map
220
223
  if isinstance(compose_data['services'][service]['environment'], dict):
221
- new_env_map = self._get_service_new_env_map(
224
+ new_env_map = self.__get_service_new_env_map(
222
225
  compose_data['services'][service]['environment'], envs
223
226
  )
224
227
  for key, value in new_env_map.items():
@@ -226,43 +229,44 @@ class DockerComposeTask(CmdTask):
226
229
  return compose_data
227
230
  # service environment is a list
228
231
  if isinstance(compose_data['services'][service]['environment'], list):
229
- new_env_list = self._get_service_new_env_list(
232
+ new_env_list = self.__get_service_new_env_list(
230
233
  compose_data['services'][service]['environment'], envs
231
234
  )
232
235
  compose_data['services'][service]['environment'] += new_env_list
233
236
  return compose_data
234
237
  return compose_data
235
238
 
236
- def _get_service_new_env_map(
239
+ def __get_service_new_env_map(
237
240
  self, service_env_map: Mapping[str, str], new_envs: List[Env]
238
241
  ) -> Mapping[str, str]:
239
242
  new_service_envs: Mapping[str, str] = {}
240
243
  for env in new_envs:
241
- if env.name in service_env_map:
244
+ env_name = env.get_name()
245
+ if env_name in service_env_map:
242
246
  continue
243
- new_service_envs[env.name] = self._get_env_compose_value(env)
247
+ new_service_envs[env_name] = self.__get_env_compose_value(env)
244
248
  return new_service_envs
245
249
 
246
- def _get_service_new_env_list(
250
+ def __get_service_new_env_list(
247
251
  self, service_env_list: List[str], new_envs: List[Env]
248
252
  ) -> List[str]:
249
253
  new_service_envs: List[str] = []
250
254
  for env in new_envs:
251
255
  should_be_added = 0 == len([
252
256
  service_env for service_env in service_env_list
253
- if service_env.startswith(env.name + '=')
257
+ if service_env.startswith(env.get_name() + '=')
254
258
  ])
255
259
  if not should_be_added:
256
260
  continue
257
261
  new_service_envs.append(
258
- env.name + '=' + self._get_env_compose_value(env)
262
+ env.get_name() + '=' + self.__get_env_compose_value(env)
259
263
  )
260
264
  return new_service_envs
261
265
 
262
- def _get_env_compose_value(self, env: Env) -> str:
263
- return '${' + env.name + ':-' + env.default + '}'
266
+ def __get_env_compose_value(self, env: Env) -> str:
267
+ return '${' + env.get_name() + ':-' + env.get_default() + '}'
264
268
 
265
- def _get_compose_runtime_file(self, compose_file_name: str) -> str:
269
+ def __get_compose_runtime_file(self, compose_file_name: str) -> str:
266
270
  directory, file = os.path.split(compose_file_name)
267
271
  prefix = '_' if file.startswith('.') else '._'
268
272
  runtime_prefix = self.get_cmd_name()
@@ -281,7 +285,7 @@ class DockerComposeTask(CmdTask):
281
285
  runtime_file_name = prefix + file + runtime_prefix
282
286
  return os.path.join(directory, runtime_file_name)
283
287
 
284
- def _get_compose_template_file(self, compose_file: Optional[str]) -> str:
288
+ def __get_compose_template_file(self, compose_file: Optional[str]) -> str:
285
289
  if compose_file is None:
286
290
  for _compose_file in [
287
291
  'compose.yml', 'compose.yaml',
zrb/task/flow_task.py CHANGED
@@ -2,7 +2,7 @@ from zrb.helper.typing import (
2
2
  Callable, Iterable, List, Optional, TypeVar, Union
3
3
  )
4
4
  from zrb.helper.typecheck import typechecked
5
- from zrb.task.base_task import BaseTask
5
+ from zrb.task.base_task.base_task import BaseTask
6
6
  from zrb.task.any_task import AnyTask
7
7
  from zrb.task.any_task_event_handler import (
8
8
  OnTriggered, OnWaiting, OnSkipped, OnStarted, OnReady, OnRetry, OnFailed
zrb/task/notifier.py ADDED
@@ -0,0 +1,157 @@
1
+ from zrb.helper.typing import Any, Callable, Iterable, Optional, Union
2
+ from zrb.helper.typecheck import typechecked
3
+ from zrb.task.base_task.base_task import BaseTask
4
+ from zrb.task.any_task import AnyTask
5
+ from zrb.task.any_task_event_handler import (
6
+ OnTriggered, OnWaiting, OnSkipped, OnStarted, OnReady, OnRetry, OnFailed
7
+ )
8
+ from zrb.task_env.env import Env
9
+ from zrb.task_env.env_file import EnvFile
10
+ from zrb.task_group.group import Group
11
+ from zrb.task_input.any_input import AnyInput
12
+ from zrb.helper.accessories.icon import get_random_icon
13
+ from zrb.helper.string.modification import double_quote
14
+
15
+ import os
16
+ import subprocess
17
+
18
+ CURRENT_DIR = os.path.dirname(__file__)
19
+ NOTIFY_PS1_PATH = os.path.realpath(os.path.abspath(os.path.join(
20
+ os.path.dirname(CURRENT_DIR), 'shell-scripts', 'notify.ps1'
21
+ )))
22
+
23
+
24
+ @typechecked
25
+ class Notifier(BaseTask):
26
+ def __init__(
27
+ self,
28
+ name: str = 'port-check',
29
+ group: Optional[Group] = None,
30
+ inputs: Iterable[AnyInput] = [],
31
+ envs: Iterable[Env] = [],
32
+ env_files: Iterable[EnvFile] = [],
33
+ icon: Optional[str] = None,
34
+ color: Optional[str] = None,
35
+ description: str = '',
36
+ title: str = '',
37
+ message: str = '',
38
+ show_toast: bool = True,
39
+ show_stdout: bool = True,
40
+ upstreams: Iterable[AnyTask] = [],
41
+ on_triggered: Optional[OnTriggered] = None,
42
+ on_waiting: Optional[OnWaiting] = None,
43
+ on_skipped: Optional[OnSkipped] = None,
44
+ on_started: Optional[OnStarted] = None,
45
+ on_ready: Optional[OnReady] = None,
46
+ on_retry: Optional[OnRetry] = None,
47
+ on_failed: Optional[OnFailed] = None,
48
+ checking_interval: Union[int, float] = 0,
49
+ retry: int = 2,
50
+ retry_interval: Union[float, int] = 1,
51
+ should_execute: Union[bool, str, Callable[..., bool]] = True
52
+ ):
53
+ BaseTask.__init__(
54
+ self,
55
+ name=name,
56
+ group=group,
57
+ inputs=inputs,
58
+ envs=envs,
59
+ env_files=env_files,
60
+ icon=icon,
61
+ color=color,
62
+ description=description,
63
+ upstreams=upstreams,
64
+ on_triggered=on_triggered,
65
+ on_waiting=on_waiting,
66
+ on_skipped=on_skipped,
67
+ on_started=on_started,
68
+ on_ready=on_ready,
69
+ on_retry=on_retry,
70
+ on_failed=on_failed,
71
+ checkers=[],
72
+ checking_interval=checking_interval,
73
+ retry=retry,
74
+ retry_interval=retry_interval,
75
+ should_execute=should_execute,
76
+ )
77
+ self._title = title if title != '' else name
78
+ self._message = message if message != '' else get_random_icon()
79
+ self._show_toast = show_toast
80
+ self._show_stdout = show_stdout
81
+
82
+ async def run(self, *args: Any, **kwargs: Any) -> str:
83
+ title = self.render_str(self._title)
84
+ message = self.render_str(self._message)
85
+ notify_kwargs = {
86
+ key: value
87
+ for key, value in kwargs.items() if key not in ('title', 'message')
88
+ }
89
+ await self.notify(title, message, **notify_kwargs)
90
+ return message
91
+
92
+ async def notify(self, title: str, message: str, **kwargs: Any) -> None:
93
+ task: BaseTask = kwargs.get('_task')
94
+ if self._show_toast and _is_powershell_available():
95
+ cmd = [
96
+ 'powershell.exe',
97
+ '-ExecutionPolicy', 'Bypass',
98
+ '-File', NOTIFY_PS1_PATH,
99
+ '-Title', title,
100
+ '-Message', message
101
+ ]
102
+ subprocess.run(cmd, stdout=subprocess.DEVNULL)
103
+ if self._show_toast and _is_osascript_available():
104
+ q_message = double_quote(message)
105
+ q_title = double_quote(title)
106
+ cmd = [
107
+ 'osascript',
108
+ '-e',
109
+ f'display notification "{q_message}" with title "{q_title}"'
110
+ ]
111
+ if self._show_toast and _is_notify_send_available():
112
+ cmd = ['notify-send', title, message]
113
+ subprocess.run(cmd, stdout=subprocess.DEVNULL)
114
+ if self._show_stdout:
115
+ task.print_out(message)
116
+ task._play_bell()
117
+
118
+ def __repr__(self) -> str:
119
+ return f'<Notifier name={self._name}>'
120
+
121
+
122
+ def _is_powershell_available():
123
+ try:
124
+ subprocess.run(
125
+ ['powershell.exe', '-Command', 'echo "Checking PowerShell"'],
126
+ check=True, stdout=subprocess.DEVNULL,
127
+ stderr=subprocess.DEVNULL
128
+ )
129
+ return True
130
+ except (subprocess.CalledProcessError, FileNotFoundError, PermissionError):
131
+ return False
132
+
133
+
134
+ def _is_notify_send_available():
135
+ try:
136
+ subprocess.run(
137
+ ['notify-send', '--version'],
138
+ check=True,
139
+ stdout=subprocess.DEVNULL,
140
+ stderr=subprocess.DEVNULL
141
+ )
142
+ return True
143
+ except (subprocess.CalledProcessError, FileNotFoundError, PermissionError):
144
+ return False
145
+
146
+
147
+ def _is_osascript_available():
148
+ try:
149
+ subprocess.run(
150
+ ['osascript', '-e', 'return'],
151
+ check=True,
152
+ stdout=subprocess.DEVNULL,
153
+ stderr=subprocess.DEVNULL
154
+ )
155
+ return True
156
+ except (subprocess.CalledProcessError, FileNotFoundError, PermissionError):
157
+ return False
@@ -2,7 +2,7 @@ from zrb.helper.typing import (
2
2
  Any, Callable, Iterable, Mapping, Optional, Union
3
3
  )
4
4
  from zrb.helper.typecheck import typechecked
5
- from zrb.task.base_task import BaseTask
5
+ from zrb.task.base_task.base_task import BaseTask
6
6
  from zrb.task.any_task import AnyTask
7
7
  from zrb.task.any_task_event_handler import (
8
8
  OnTriggered, OnWaiting, OnSkipped, OnStarted, OnReady, OnRetry, OnFailed
@@ -46,7 +46,7 @@ class RecurringTask(BaseTask):
46
46
  should_execute: Union[bool, str, Callable[..., bool]] = True,
47
47
  return_upstream_result: bool = False
48
48
  ):
49
- inputs = list(inputs) + task._get_inputs()
49
+ inputs = list(inputs) + task._get_combined_inputs()
50
50
  envs = list(envs) + task._get_envs()
51
51
  env_files = list(env_files) + task._get_env_files()
52
52
  BaseTask.__init__(
@@ -97,6 +97,7 @@ class RecurringTask(BaseTask):
97
97
  key: kwargs[key]
98
98
  for key in kwargs if key not in ['_task']
99
99
  }
100
+ is_first_time = True
100
101
  while True:
101
102
  # Create trigger functions
102
103
  trigger_functions = []
@@ -107,8 +108,12 @@ class RecurringTask(BaseTask):
107
108
  trigger_functions.append(asyncio.create_task(
108
109
  trigger_function(*args, **task_kwargs)
109
110
  ))
111
+ self.print_out_dark('Waiting for next trigger')
112
+ # Mark task as done since trigger has been defined.
113
+ if is_first_time:
114
+ await self._mark_done()
115
+ is_first_time = False
110
116
  # Wait for the first task to complete
111
- self.print_out_dark('Waiting for trigger')
112
117
  _, pending = await asyncio.wait(
113
118
  trigger_functions, return_when=asyncio.FIRST_COMPLETED
114
119
  )
@@ -123,5 +128,15 @@ class RecurringTask(BaseTask):
123
128
  is_async=True, raise_error=False, show_done_info=False
124
129
  )
125
130
  self.print_out_dark('Executing the task')
126
- asyncio.create_task(fn(*args, **task_kwargs))
127
- self._play_bell()
131
+ asyncio.create_task(
132
+ self.__run_and_play_bell(fn, *args, **task_kwargs)
133
+ )
134
+
135
+ async def __run_and_play_bell(
136
+ self, fn: Callable[[Any], Any], *args: Any, **kwargs: Any
137
+ ):
138
+ await fn(*args, **kwargs)
139
+ self._play_bell()
140
+
141
+ def __repr__(self) -> str:
142
+ return f'<RecurringTask name={self._name}>'
@@ -2,7 +2,7 @@ from zrb.helper.typing import (
2
2
  Any, Callable, Iterable, Mapping, Optional, Union, TypeVar
3
3
  )
4
4
  from zrb.helper.typecheck import typechecked
5
- from zrb.task.base_task import BaseTask
5
+ from zrb.task.base_task.base_task import BaseTask
6
6
  from zrb.task.any_task import AnyTask
7
7
  from zrb.task.any_task_event_handler import (
8
8
  OnTriggered, OnWaiting, OnSkipped, OnStarted, OnReady, OnRetry, OnFailed