datatailr 0.1.66__tar.gz → 0.1.81__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. {datatailr-0.1.66/src/datatailr.egg-info → datatailr-0.1.81}/PKG-INFO +19 -15
  2. {datatailr-0.1.66 → datatailr-0.1.81}/README.md +18 -14
  3. {datatailr-0.1.66 → datatailr-0.1.81}/pyproject.toml +6 -1
  4. {datatailr-0.1.66 → datatailr-0.1.81}/setup.py +3 -1
  5. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/__init__.py +14 -0
  6. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/acl.py +5 -5
  7. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/blob.py +9 -2
  8. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/build/image.py +6 -4
  9. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/excel/__init__.py +2 -2
  10. datatailr-0.1.81/src/datatailr/excel/addin.py +201 -0
  11. datatailr-0.1.81/src/datatailr/excel/stubs.py +37 -0
  12. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/logging.py +85 -4
  13. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/scheduler/__init__.py +8 -2
  14. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/scheduler/arguments_cache.py +8 -6
  15. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/scheduler/base.py +66 -21
  16. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/scheduler/batch.py +58 -14
  17. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/scheduler/batch_decorator.py +12 -3
  18. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/scheduler/constants.py +2 -2
  19. datatailr-0.1.81/src/datatailr/scheduler/job.py +112 -0
  20. datatailr-0.1.81/src/datatailr/scheduler/workflow.py +84 -0
  21. datatailr-0.1.81/src/datatailr/tag.py +35 -0
  22. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/wrapper.py +5 -3
  23. {datatailr-0.1.66 → datatailr-0.1.81/src/datatailr.egg-info}/PKG-INFO +19 -15
  24. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr.egg-info/SOURCES.txt +9 -0
  25. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr.egg-info/entry_points.txt +1 -0
  26. datatailr-0.1.81/src/datatailr.egg-info/top_level.txt +2 -0
  27. datatailr-0.1.81/src/datatailr_demo/README.md +112 -0
  28. datatailr-0.1.81/src/datatailr_demo/__init__.py +15 -0
  29. datatailr-0.1.81/src/datatailr_demo/examples.py +47 -0
  30. datatailr-0.1.81/src/sbin/datatailr_cli.py +195 -0
  31. datatailr-0.1.81/src/sbin/datatailr_run.py +381 -0
  32. {datatailr-0.1.66 → datatailr-0.1.81}/src/sbin/datatailr_run_excel.py +5 -3
  33. datatailr-0.1.66/src/datatailr.egg-info/top_level.txt +0 -1
  34. datatailr-0.1.66/src/sbin/datatailr_run.py +0 -198
  35. {datatailr-0.1.66 → datatailr-0.1.81}/LICENSE +0 -0
  36. {datatailr-0.1.66 → datatailr-0.1.81}/setup.cfg +0 -0
  37. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/build/__init__.py +0 -0
  38. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/dt_json.py +0 -0
  39. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/errors.py +0 -0
  40. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/group.py +0 -0
  41. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/scheduler/schedule.py +0 -0
  42. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/scheduler/utils.py +0 -0
  43. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/user.py +0 -0
  44. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/utils.py +0 -0
  45. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr/version.py +0 -0
  46. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr.egg-info/dependency_links.txt +0 -0
  47. {datatailr-0.1.66 → datatailr-0.1.81}/src/datatailr.egg-info/requires.txt +0 -0
  48. {datatailr-0.1.66 → datatailr-0.1.81}/src/sbin/datatailr_run_app.py +0 -0
  49. {datatailr-0.1.66 → datatailr-0.1.81}/src/sbin/datatailr_run_batch.py +0 -0
  50. {datatailr-0.1.66 → datatailr-0.1.81}/src/sbin/datatailr_run_service.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datatailr
3
- Version: 0.1.66
3
+ Version: 0.1.81
4
4
  Summary: Ready-to-Use Platform That Drives Business Insights
5
5
  Author-email: Datatailr <info@datatailr.com>
6
6
  License-Expression: MIT
@@ -84,25 +84,27 @@ print(datatailr.__provider__)
84
84
  The following example shows how to create a simple data pipeline using the Datatailr Python package.
85
85
 
86
86
  ```python
87
- from datatailr.scheduler import batch_job, Batch
87
+ from datatailr import workflow, task
88
88
 
89
- @batch_job()
89
+ @task()
90
90
  def func_no_args() -> str:
91
91
  return "no_args"
92
92
 
93
93
 
94
- @batch_job()
94
+ @task()
95
95
  def func_with_args(a: int, b: float) -> str:
96
96
  return f"args: {a}, {b}"
97
97
 
98
- with Batch(name="MY test DAG", local_run=True) as dag:
98
+ @workflow(name="MY test DAG")
99
+ def my_workflow():
99
100
  for n in range(2):
100
101
  res1 = func_no_args().alias(f"func_{n}")
101
102
  res2 = func_with_args(1, res1).alias(f"func_with_args_{n}")
103
+ my_workflow(local_run=True)
102
104
  ```
103
105
 
104
106
  Running this code will create a graph of jobs and execute it.
105
- Each node on the graph represents a job, which in turn is a call to a function decorated with `@batch_job()`.
107
+ Each node on the graph represents a job, which in turn is a call to a function decorated with `@task()`.
106
108
 
107
109
  Since this is a local run then the execution of each node will happen sequentially in the same process.
108
110
 
@@ -117,14 +119,14 @@ You will first need to separate your function definitions from the DAG definitio
117
119
  ```python
118
120
  # my_module.py
119
121
 
120
- from datatailr.scheduler import batch_job
122
+ from datatailr import task
121
123
 
122
- @batch_job()
124
+ @task()
123
125
  def func_no_args() -> str:
124
126
  return "no_args"
125
127
 
126
128
 
127
- @batch_job()
129
+ @task()
128
130
  def func_with_args(a: int, b: float) -> str:
129
131
  return f"args: {a}, {b}"
130
132
  ```
@@ -133,18 +135,20 @@ To use these functions in a batch job, you just need to import them and run in a
133
135
 
134
136
  ```python
135
137
  from my_module import func_no_args, func_with_args
136
- from datatailr.scheduler import Batch, Schedule
138
+ from datatailr import workflow
137
139
 
138
- schedule = Schedule(at_hours=0)
139
-
140
- with Batch(name="MY test DAG", schedule=schedule) as dag:
140
+ @workflow(name="MY test DAG")
141
+ def my_workflow():
141
142
  for n in range(2):
142
143
  res1 = func_no_args().alias(f"func_{n}")
143
144
  res2 = func_with_args(1, res1).alias(f"func_with_args_{n}")
145
+
146
+ schedule = Schedule(at_hours=0)
147
+ my_workflow(schedule=schedule)
144
148
  ```
145
149
 
146
- This will submit the entire DAG for execution, and the scheduler will take care of running the jobs in parallel and managing the resources.
147
- The DAG in the example above will be scheduled to run daily at 00:00.
150
+ This will submit the entire workflow for execution, and the scheduler will take care of running the jobs in parallel and managing the resources.
151
+ The workflow in the example above will be scheduled to run daily at 00:00.
148
152
 
149
153
  ___
150
154
  Visit [our website](https://www.datatailr.com/) for more!
@@ -47,25 +47,27 @@ print(datatailr.__provider__)
47
47
  The following example shows how to create a simple data pipeline using the Datatailr Python package.
48
48
 
49
49
  ```python
50
- from datatailr.scheduler import batch_job, Batch
50
+ from datatailr import workflow, task
51
51
 
52
- @batch_job()
52
+ @task()
53
53
  def func_no_args() -> str:
54
54
  return "no_args"
55
55
 
56
56
 
57
- @batch_job()
57
+ @task()
58
58
  def func_with_args(a: int, b: float) -> str:
59
59
  return f"args: {a}, {b}"
60
60
 
61
- with Batch(name="MY test DAG", local_run=True) as dag:
61
+ @workflow(name="MY test DAG")
62
+ def my_workflow():
62
63
  for n in range(2):
63
64
  res1 = func_no_args().alias(f"func_{n}")
64
65
  res2 = func_with_args(1, res1).alias(f"func_with_args_{n}")
66
+ my_workflow(local_run=True)
65
67
  ```
66
68
 
67
69
  Running this code will create a graph of jobs and execute it.
68
- Each node on the graph represents a job, which in turn is a call to a function decorated with `@batch_job()`.
70
+ Each node on the graph represents a job, which in turn is a call to a function decorated with `@task()`.
69
71
 
70
72
  Since this is a local run then the execution of each node will happen sequentially in the same process.
71
73
 
@@ -80,14 +82,14 @@ You will first need to separate your function definitions from the DAG definitio
80
82
  ```python
81
83
  # my_module.py
82
84
 
83
- from datatailr.scheduler import batch_job
85
+ from datatailr import task
84
86
 
85
- @batch_job()
87
+ @task()
86
88
  def func_no_args() -> str:
87
89
  return "no_args"
88
90
 
89
91
 
90
- @batch_job()
92
+ @task()
91
93
  def func_with_args(a: int, b: float) -> str:
92
94
  return f"args: {a}, {b}"
93
95
  ```
@@ -96,18 +98,20 @@ To use these functions in a batch job, you just need to import them and run in a
96
98
 
97
99
  ```python
98
100
  from my_module import func_no_args, func_with_args
99
- from datatailr.scheduler import Batch, Schedule
101
+ from datatailr import workflow
100
102
 
101
- schedule = Schedule(at_hours=0)
102
-
103
- with Batch(name="MY test DAG", schedule=schedule) as dag:
103
+ @workflow(name="MY test DAG")
104
+ def my_workflow():
104
105
  for n in range(2):
105
106
  res1 = func_no_args().alias(f"func_{n}")
106
107
  res2 = func_with_args(1, res1).alias(f"func_with_args_{n}")
108
+
109
+ schedule = Schedule(at_hours=0)
110
+ my_workflow(schedule=schedule)
107
111
  ```
108
112
 
109
- This will submit the entire DAG for execution, and the scheduler will take care of running the jobs in parallel and managing the resources.
110
- The DAG in the example above will be scheduled to run daily at 00:00.
113
+ This will submit the entire workflow for execution, and the scheduler will take care of running the jobs in parallel and managing the resources.
114
+ The workflow in the example above will be scheduled to run daily at 00:00.
111
115
 
112
116
  ___
113
117
  Visit [our website](https://www.datatailr.com/) for more!
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "datatailr"
7
- version = "0.1.66"
7
+ version = "0.1.81"
8
8
  description = "Ready-to-Use Platform That Drives Business Insights"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.9"
@@ -39,6 +39,7 @@ datatailr_run_batch = "datatailr.sbin.datatailr_run_batch:run"
39
39
  datatailr_run_app = "datatailr.sbin.datatailr_run_app:run"
40
40
  datatailr_run_excel = "datatailr.sbin.datatailr_run_excel:run"
41
41
  datatailr_run_service = "datatailr.sbin.datatailr_run_service:run"
42
+ datatailr = "datatailr.sbin.datatailr_cli:main"
42
43
 
43
44
  [project.optional-dependencies]
44
45
  dev = [
@@ -57,6 +58,10 @@ dev = [
57
58
  "myst-parser"
58
59
  ]
59
60
 
61
+ [tool.coverage.run]
62
+ branch = true
63
+ source = ["./src/datatailr"]
64
+
60
65
  [tool.ruff]
61
66
  src = [
62
67
  "src",
@@ -10,12 +10,14 @@ setup(
10
10
  (
11
11
  "/datatailr/sbin",
12
12
  [
13
+ "src/sbin/datatailr_cli.py",
13
14
  "src/sbin/datatailr_run.py",
14
15
  "src/sbin/datatailr_run_batch.py",
15
16
  "src/sbin/datatailr_run_app.py",
16
17
  "src/sbin/datatailr_run_excel.py",
17
18
  "src/sbin/datatailr_run_service.py",
18
19
  ],
19
- )
20
+ ),
21
+ ("datatailr_demo", ["src/datatailr_demo/README.md"]),
20
22
  ],
21
23
  )
@@ -16,6 +16,14 @@ from datatailr.blob import Blob
16
16
  from datatailr.build import Image
17
17
  from datatailr.utils import Environment, is_dt_installed
18
18
  from datatailr.version import __version__
19
+ from datatailr.scheduler import (
20
+ App,
21
+ Service,
22
+ ExcelAddin,
23
+ workflow,
24
+ task,
25
+ set_allow_unsafe_scheduling,
26
+ )
19
27
 
20
28
  system = dt__System()
21
29
  if isinstance(system, mock_cli_tool):
@@ -33,4 +41,10 @@ __all__ = [
33
41
  "__version__",
34
42
  "__provider__",
35
43
  "is_dt_installed",
44
+ "App",
45
+ "Service",
46
+ "ExcelAddin",
47
+ "workflow",
48
+ "task",
49
+ "set_allow_unsafe_scheduling",
36
50
  ]
@@ -25,15 +25,15 @@ class ACL:
25
25
  self,
26
26
  user: Union[User, str],
27
27
  group: Optional[Union[Group, str]] = None,
28
- permissions: Optional[List[str]] = None,
28
+ permissions: Optional[List[str] | str] = None,
29
29
  ):
30
30
  if user is None:
31
31
  user = User.signed_user()
32
32
  self.user = user if isinstance(user, User) else User.get(user)
33
- if self.user is not None:
34
- self.group = (
35
- group if group and isinstance(group, Group) else self.user.primary_group
36
- )
33
+ if group is None:
34
+ group = self.user.primary_group
35
+ group = group if isinstance(group, Group) else Group.get(str(group))
36
+ self.group = group
37
37
  self.permissions = permissions or "rwr---"
38
38
 
39
39
  self.__group_can_read = False
@@ -10,12 +10,14 @@
10
10
 
11
11
  from __future__ import annotations
12
12
 
13
+ import os
13
14
  import tempfile
14
15
 
15
16
  from datatailr.wrapper import dt__Blob
16
17
 
17
18
  # Datatailr Blob API Client
18
19
  __client__ = dt__Blob()
20
+ __user__ = os.getenv("USER", "root")
19
21
 
20
22
 
21
23
  class Blob:
@@ -81,8 +83,13 @@ class Blob:
81
83
  """
82
84
  # Since direct reading and writting of blobs is not implemented yet, we are using a temporary file.
83
85
  # This is a workaround to allow reading the blob content directly from the blob storage.
84
-
85
- with tempfile.NamedTemporaryFile(delete=True) as temp_file:
86
+ temp_dir = f"/home/{__user__}/tmp"
87
+ if not os.path.exists(temp_dir):
88
+ temp_dir = "/tmp"
89
+ else:
90
+ temp_dir += "/.dt"
91
+ os.makedirs(temp_dir, exist_ok=True)
92
+ with tempfile.NamedTemporaryFile(dir=temp_dir, delete=True) as temp_file:
86
93
  self.get_file(name, temp_file.name)
87
94
  with open(temp_file.name, "r") as f:
88
95
  return f.read()
@@ -10,7 +10,7 @@
10
10
 
11
11
  import json
12
12
  import os
13
- import re
13
+ import sys
14
14
  from typing import Optional
15
15
 
16
16
  from datatailr import ACL, User
@@ -26,7 +26,7 @@ class Image:
26
26
  def __init__(
27
27
  self,
28
28
  acl: Optional[ACL] = None,
29
- python_version: str = "3.12",
29
+ python_version: str = "auto",
30
30
  python_requirements: str | list[str] = "",
31
31
  build_script_pre: str = "",
32
32
  build_script_post: str = "",
@@ -56,8 +56,10 @@ class Image:
56
56
  def python_version(self, value: str):
57
57
  if not isinstance(value, str):
58
58
  raise TypeError("python_version must be a string.")
59
- if not re.match(r"^\d+\.\d+(\.\d+)?$", value):
60
- raise ValueError("Invalid python_version format. Expected format: X.Y[.Z]")
59
+ if value.lower() == "auto":
60
+ value = f"{sys.version_info.major}.{sys.version_info.minor}"
61
+ if value not in ["3.10", "3.11", "3.12", "3.13", "3.14"]:
62
+ raise ValueError(f"Invalid python_version: {value}")
61
63
  self._python_version = value
62
64
 
63
65
  @property
@@ -2,8 +2,8 @@
2
2
 
3
3
  # noqa: F401
4
4
  try:
5
- from dt.excel import Addin
6
- from dt.excel import Queue
5
+ from datatailr.excel.addin import Addin
6
+ from datatailr.excel.addin import Queue
7
7
  except ImportError:
8
8
 
9
9
  class DummyAddin:
@@ -0,0 +1,201 @@
1
+ """
2
+ Copyright (c) 2025 - Datatailr Inc.
3
+ All Rights Reserved.
4
+
5
+ This file is part of Datatailr and subject to the terms and conditions
6
+ defined in 'LICENSE.txt'. Unauthorized copying and/or distribution
7
+ of this file, in parts or full, via any medium is strictly prohibited.
8
+ """
9
+
10
+ import os
11
+ import sys
12
+ import importlib
13
+ import subprocess
14
+ import inspect
15
+ from urllib.parse import urlparse
16
+
17
+ import numpy as np
18
+
19
+ try:
20
+ from dt.excel_base import Addin as AddinBase, Queue # type: ignore
21
+ except ImportError as e:
22
+ from datatailr.excel.stubs import AddinBase, Queue
23
+
24
+
25
+ def __progress__(queue, stop):
26
+ from time import sleep
27
+
28
+ bar = ["█", "██", "███", "████", "█████", "██████", "███████"]
29
+
30
+ count = 0
31
+ while True:
32
+ if stop.is_set():
33
+ return
34
+ queue.push(bar[count % len(bar)])
35
+ count += 1
36
+ sleep(0.25)
37
+
38
+
39
+ def get_package_root(mod):
40
+ # Given module, e.g., dt.excel located at /opt/datatailr/python/dt/excel.py
41
+ # return entry for sys.path so it could be imported as a module.
42
+ # For the module above: /opt/datatailr/python
43
+ mod_path = os.path.abspath(mod.__file__)
44
+ mod_parts = mod.__name__.split(".")
45
+ for _ in range(len(mod_parts)):
46
+ mod_path = os.path.dirname(mod_path)
47
+ return mod_path
48
+
49
+
50
+ def matches_annotation(value, annotation):
51
+ if isinstance(value, np.ndarray):
52
+ return True
53
+ if annotation is bool:
54
+ return isinstance(value, bool) or (type(value) is int and value in (0, 1))
55
+ if annotation is float:
56
+ return isinstance(value, float) or (type(value) is int)
57
+ return isinstance(value, annotation)
58
+
59
+
60
+ def extract_hostname(url: str) -> str | None:
61
+ url = url if url else ""
62
+ if "://" not in url:
63
+ url = "//" + url
64
+ return urlparse(url).hostname
65
+
66
+
67
+ class Addin(AddinBase):
68
+ def __init__(self, *args, **kwargs):
69
+ super(Addin, self).__init__(*args, **kwargs)
70
+ f = inspect.currentframe().f_back
71
+ mod = inspect.getmodule(f)
72
+ if mod is not None:
73
+ setattr(mod, "__dt_addin__", self)
74
+
75
+ def run(self, port, ws_port, ide=True):
76
+ # Excel addin executable will try to import an object literally called "addin"
77
+ # from a module passed to dt-excel.sh as an argument. So to find which module
78
+ # to pass to dt-excel.sh, we walk the callstack until a module with "addin"
79
+ # object of type Addin is found. If not -- inform user about this requirement.
80
+ found_module = None
81
+ for frame_info in inspect.stack():
82
+ mod = inspect.getmodule(frame_info.frame)
83
+ if not mod or not hasattr(mod, "__name__"):
84
+ continue
85
+
86
+ temp_path = get_package_root(mod)
87
+ sys.path.insert(0, temp_path)
88
+ try:
89
+ imported_mod = importlib.import_module(mod.__name__)
90
+ finally:
91
+ sys.path.pop(0)
92
+
93
+ addin_obj = getattr(imported_mod, "__dt_addin__", None)
94
+ if addin_obj is self or id(addin_obj) == id(self):
95
+ found_module = mod
96
+ break
97
+
98
+ if not found_module:
99
+ raise ValueError(
100
+ "'__dt_addin__' not found."
101
+ )
102
+
103
+ if found_module.__name__ != "__main__":
104
+ # addin.run was called from the initial python script (where __name__ == "__main__")
105
+ module_name = found_module.__name__
106
+ if found_module.__file__ is None:
107
+ raise ValueError(f"Module {found_module.__name__} has no __file__")
108
+ dir_name = os.path.dirname(os.path.abspath(found_module.__file__))
109
+ else:
110
+ # initial python script did not call addin.run() itself (e.g. it imported function that called addin.run)
111
+ filename = inspect.getsourcefile(found_module)
112
+ if filename is None:
113
+ raise ValueError(f"Cannot determine filename for module {found_module}")
114
+ module_name = os.path.splitext(os.path.basename(filename))[0]
115
+ dir_name = os.path.dirname(os.path.abspath(filename))
116
+
117
+ ide_flag = "-i" if ide else ""
118
+ hostname = extract_hostname(os.environ.get("VSCODE_PROXY_URI"))
119
+
120
+ subprocess.run(
121
+ [
122
+ "bash",
123
+ "-c",
124
+ f'PYTHONPATH="{dir_name}:$PYTHONPATH" /opt/datatailr/bin/dt-excel.sh {ide_flag} -n -H {hostname} -p {port} -w {ws_port} {module_name}',
125
+ ]
126
+ )
127
+
128
+ def expose(
129
+ self, description, help, volatile=False, streaming=False, progressbar=False
130
+ ):
131
+ if streaming and progressbar:
132
+ raise ValueError(
133
+ "you cannot specify progressbar and streaming at the same time"
134
+ )
135
+
136
+ def decorator(func):
137
+ signature = inspect.signature(func)
138
+
139
+ def wrapper(*args, **kwargs):
140
+ # TODO: check whether it's possible to use a kwarg instead so that a decorated function can
141
+ # be called directly from python code without requiring positional argument for _id
142
+ _id = args[0]
143
+
144
+ bound = signature.bind_partial(**kwargs)
145
+ bound.apply_defaults()
146
+ for arg in signature.parameters.values():
147
+ if streaming and arg.name == "queue":
148
+ continue
149
+
150
+ if not matches_annotation(
151
+ bound.arguments[arg.name], arg.annotation
152
+ ):
153
+ raise ValueError(
154
+ "excel/python/dt/excel.py: Got argument of wrong type, expected %s or numpy.ndarray, got %s"
155
+ % (arg.annotation, type(bound.arguments[arg.name]))
156
+ )
157
+ queue = Queue(self.name.lower() + "." + func.__name__, _id)
158
+ if not streaming:
159
+ if not progressbar:
160
+ result = func(**kwargs)
161
+ if hasattr(result, "tolist"):
162
+ result = result.tolist()
163
+ return result
164
+
165
+ from threading import Event, Thread
166
+
167
+ error = None
168
+
169
+ stop = Event()
170
+ thread = Thread(target=__progress__, args=(queue, stop))
171
+ thread.start()
172
+ try:
173
+ result = func(**kwargs)
174
+ except Exception as exception:
175
+ error = str(exception)
176
+
177
+ stop.set()
178
+ thread.join()
179
+
180
+ if error is not None:
181
+ queue.error(error)
182
+ else:
183
+ queue.push(result)
184
+ return
185
+ try:
186
+ func(queue, **kwargs)
187
+ except Exception as exception:
188
+ queue.error(str(exception))
189
+
190
+ self.decorator_impl(
191
+ signature,
192
+ wrapper,
193
+ func.__name__,
194
+ description,
195
+ help,
196
+ volatile,
197
+ streaming or progressbar,
198
+ )
199
+ return wrapper
200
+
201
+ return decorator
@@ -0,0 +1,37 @@
1
+ """
2
+ Copyright (c) 2025 - Datatailr Inc.
3
+ All Rights Reserved.
4
+
5
+ This file is part of Datatailr and subject to the terms and conditions
6
+ defined in 'LICENSE.txt'. Unauthorized copying and/or distribution
7
+ of this file, in parts or full, via any medium is strictly prohibited.
8
+ """
9
+
10
+
11
+ class AddinBase:
12
+ def __init__(self, name, *args, **kwargs):
13
+ self.name = name
14
+
15
+ def decorator_impl(
16
+ self,
17
+ signature,
18
+ wrapper,
19
+ func_name,
20
+ description,
21
+ help,
22
+ volatile,
23
+ streaming,
24
+ ):
25
+ pass
26
+
27
+
28
+ class Queue:
29
+ def __init__(self, name, _id):
30
+ self.name = name
31
+ self.id = _id
32
+
33
+ def push(self, value):
34
+ print(f"Queue {self.name} ({self.id}): {value}")
35
+
36
+ def error(self, message):
37
+ print(f"Queue {self.name} ({self.id}) Error: {message}")