datatailr 0.1.6__py3-none-any.whl → 0.1.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datatailr might be problematic. Click here for more details.

@@ -0,0 +1,37 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # *************************************************************************
4
+ #
5
+ # Copyright (c) 2025 - Datatailr Inc.
6
+ # All Rights Reserved.
7
+ #
8
+ # This file is part of Datatailr and subject to the terms and conditions
9
+ # defined in 'LICENSE.txt'. Unauthorized copying and/or distribution
10
+ # of this file, in parts or full, via any medium is strictly prohibited.
11
+ # *************************************************************************
12
+
13
+ import os
14
+ import sys
15
+ import runpy
16
+ from importlib.resources import files
17
+
18
+ from datatailr.logging import DatatailrLogger
19
+
20
+
21
+ logger = DatatailrLogger(os.path.abspath(__file__)).get_logger()
22
+
23
+
24
+ def run():
25
+ logger.info("Starting Datatailr app...")
26
+ entrypoint = os.environ.get("DATATAILR_ENTRYPOINT")
27
+ if entrypoint is None or ":" not in entrypoint:
28
+ raise ValueError(
29
+ "Environment variable 'DATATAILR_ENTRYPOINT' is not in the format 'module_name:file_name'."
30
+ )
31
+
32
+ module_name, file_name = entrypoint.split(":")
33
+
34
+ script = files(module_name).joinpath(file_name)
35
+ sys.argv = ["streamlit", "run", str(script), *sys.argv[1:]]
36
+ logger.info(f"Running entrypoint: {entrypoint}")
37
+ runpy.run_module("streamlit", run_name="__main__")
@@ -12,19 +12,19 @@
12
12
 
13
13
  import importlib
14
14
  import os
15
- import pickle
16
15
 
17
- from datatailr import dt__Blob
18
16
  from datatailr.logging import DatatailrLogger
19
17
 
20
18
  logger = DatatailrLogger(os.path.abspath(__file__)).get_logger()
21
19
 
22
20
 
23
- def main():
21
+ def run():
22
+ logger.info("Running Datatailr batch job")
24
23
  entry_point = os.environ.get("DATATAILR_BATCH_ENTRYPOINT")
25
24
  batch_run_id = os.environ.get("DATATAILR_BATCH_RUN_ID")
26
25
  batch_id = os.environ.get("DATATAILR_BATCH_ID")
27
26
  job_id = os.environ.get("DATATAILR_JOB_ID")
27
+ logger.info(f"Batch run ID: {batch_run_id}, Batch ID: {batch_id}, Job ID: {job_id}")
28
28
 
29
29
  if entry_point is None:
30
30
  raise ValueError(
@@ -44,20 +44,5 @@ def main():
44
44
  raise ValueError(
45
45
  f"The function '{func_name}' in module '{module_name}' is not callable."
46
46
  )
47
- result = function()
48
- result_path = f"batch-results-{batch_run_id}-{job_id}.pkl"
49
- with open(result_path, "wb") as f:
50
- pickle.dump(result, f)
51
- blob = dt__Blob()
52
- blob.cp(result_path, "blob://")
53
- logger.info(f"{result_path} copied to blob storage.")
54
-
55
-
56
- if __name__ == "__main__":
57
- try:
58
- logger.debug("Starting job execution...")
59
- main()
60
- logger.debug("Job executed successfully.")
61
- except Exception as e:
62
- logger.error(f"Error during job execution: {e}")
63
- raise
47
+ function()
48
+ logger.info("Datatailr batch job completed successfully.")
@@ -0,0 +1,34 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # *************************************************************************
4
+ #
5
+ # Copyright (c) 2025 - Datatailr Inc.
6
+ # All Rights Reserved.
7
+ #
8
+ # This file is part of Datatailr and subject to the terms and conditions
9
+ # defined in 'LICENSE.txt'. Unauthorized copying and/or distribution
10
+ # of this file, in parts or full, via any medium is strictly prohibited.
11
+ # *************************************************************************
12
+
13
+ import os
14
+ import subprocess
15
+
16
+ from datatailr.logging import DatatailrLogger
17
+
18
+ logger = DatatailrLogger(os.path.abspath(__file__)).get_logger()
19
+
20
+
21
+ def run():
22
+ logger.info("Starting Datatailr excel add-in...")
23
+ entrypoint = os.environ.get("DATATAILR_ENTRYPOINT")
24
+ hostname = os.environ.get("DATATAILR_HOST")
25
+
26
+ if entrypoint is None:
27
+ raise ValueError("Environment variable 'DATATAILR_ENTRYPOINT' is not set.")
28
+
29
+ if hostname is None:
30
+ raise ValueError("Environment variable 'DATATAILR_HOST' is not set.")
31
+
32
+ entrypoint = f'./dt-excel.sh -n -H "{hostname}" -p 8080 "{entrypoint}"'
33
+ logger.info(f"Running entrypoint: {entrypoint}")
34
+ subprocess.run(entrypoint, shell=True)
@@ -0,0 +1,34 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # *************************************************************************
4
+ #
5
+ # Copyright (c) 2025 - Datatailr Inc.
6
+ # All Rights Reserved.
7
+ #
8
+ # This file is part of Datatailr and subject to the terms and conditions
9
+ # defined in 'LICENSE.txt'. Unauthorized copying and/or distribution
10
+ # of this file, in parts or full, via any medium is strictly prohibited.
11
+ # *************************************************************************
12
+
13
+ import os
14
+ import importlib
15
+
16
+ from datatailr.logging import DatatailrLogger
17
+
18
+ logger = DatatailrLogger(os.path.abspath(__file__)).get_logger()
19
+
20
+
21
+ def run():
22
+ logger.info("Starting Datatailr service...")
23
+ entrypoint = os.environ.get("DATATAILR_ENTRYPOINT")
24
+ port = os.environ.get("DATATAILR_SERVICE_PORT")
25
+
26
+ if entrypoint is None:
27
+ raise ValueError("Environment variable 'DATATAILR_ENTRYPOINT' is not set.")
28
+
29
+ if port is None:
30
+ raise ValueError("Environment variable 'DATATAILR_SERVICE_PORT' is not set.")
31
+
32
+ entrypoint_module = importlib.import_module(entrypoint)
33
+ entrypoint_module.__service_main__(int(port))
34
+ logger.info(f"Running entrypoint: {entrypoint}")
@@ -8,31 +8,47 @@
8
8
  # of this file, in parts or full, via any medium is strictly prohibited.
9
9
  # *************************************************************************
10
10
 
11
- from datatailr.errors import BatchJobError, DatatailrError
11
+ r"""
12
+ Datatailr Scheduler Module
13
+ ==========================
14
+
15
+ The `datatailr.scheduler` module provides a framework for scheduling and managing batch jobs.
16
+
17
+ The main job types are:
18
+ _______________________
19
+
20
+ - **Batch**: Represents a batch job that can be scheduled and executed.
21
+ The job can include multiple tasks which can be run in parallel or sequentially.
22
+ - **Service**: Represents a service job that runs continuously.
23
+ - **App**: Represents a web app or a dashboard, which can be built using one of the supported frameworks,
24
+ such as `Streamlit <https://streamlit.io/>`_, `Dash <https://dash.plotly.com/>`_, or `Panel <https://panel.holoviz.org/>`_.
25
+ - **Excel**: Represents an Excel add-in.
26
+ """
27
+
28
+ from datatailr.errors import BatchJobError
12
29
  from datatailr.scheduler.base import (
13
- ACL,
14
30
  EntryPoint,
15
31
  Environment,
16
32
  Job,
17
33
  JobType,
18
34
  Resources,
19
- User,
35
+ set_allow_unsafe_scheduling,
20
36
  )
21
37
  from datatailr.scheduler.batch import Batch, BatchJob, DuplicateJobNameError
22
- from datatailr.scheduler.batch_decorator import batch_decorator as batch
38
+ from datatailr.scheduler.batch_decorator import batch_decorator as batch_job
39
+ from datatailr.scheduler.schedule import Schedule
23
40
 
24
41
  __all__ = [
25
42
  "Job",
26
43
  "JobType",
27
44
  "Environment",
28
- "User",
29
45
  "Resources",
30
- "ACL",
31
46
  "EntryPoint",
32
47
  "Batch",
33
48
  "BatchJob",
34
- "batch",
35
- "DatatailrError",
49
+ "batch_job",
36
50
  "BatchJobError",
37
51
  "DuplicateJobNameError",
52
+ "set_allow_unsafe_scheduling",
53
+ "Schedule",
38
54
  ]
@@ -21,17 +21,26 @@ and the inner dictionaries contain the arguments.
21
21
  This module is for internal use of the datatailr package.
22
22
  """
23
23
 
24
- from collections import defaultdict
24
+ from datatailr.dt_json import json, decode_json
25
+ import os
25
26
  import pickle
26
- from typing import Any, Dict
27
+ from typing import Any, Dict, Optional
27
28
 
28
29
  from datatailr import is_dt_installed, Blob
29
- from datatailr.scheduler import BatchJob
30
+ from datatailr.errors import DatatailrError
30
31
 
31
32
 
32
33
  __BLOB_STORAGE__ = Blob()
33
34
 
34
35
 
36
+ class CacheNotFoundError(DatatailrError):
37
+ """Custom error for cache operations."""
38
+
39
+ def __init__(self, message: str):
40
+ super().__init__(message)
41
+ self.message = message
42
+
43
+
35
44
  class ArgumentsCache:
36
45
  def __init__(self, use_persistent_cache: bool = is_dt_installed()):
37
46
  """
@@ -40,11 +49,12 @@ class ArgumentsCache:
40
49
  :param use_persistent_cache: If True, use the persistent cache backend. Otherwise, use in-memory cache.
41
50
  """
42
51
  self.use_persistent_cache = use_persistent_cache
43
- self.in_memory_cache: Dict[str, Dict[str, Dict[str, Any]]] = defaultdict(
44
- lambda: defaultdict(dict)
45
- )
52
+ if not self.use_persistent_cache:
53
+ # Create a temp folder, for local caching
54
+ os.makedirs("/tmp/datatailr/batch/arguments", exist_ok=True)
55
+ os.makedirs("/tmp/datatailr/batch/results", exist_ok=True)
46
56
 
47
- def add_arguments(self, batch_run_id: str, job: str, arguments: Dict[str, Any]):
57
+ def add_arguments(self, batch_id: str, arguments: Dict[str, Any]):
48
58
  """
49
59
  Add arguments to the cache for a specific job and batch run.
50
60
 
@@ -52,13 +62,16 @@ class ArgumentsCache:
52
62
  :param job_name: Name of the job.
53
63
  :param arguments: Dictionary of arguments to store.
54
64
  """
55
- if self.use_persistent_cache and isinstance(job, str):
56
- path = f"{batch_run_id}/{job}/args"
65
+ path = f"/tmp/datatailr/batch/arguments/{batch_id}.pkl"
66
+ if self.use_persistent_cache:
57
67
  self._add_to_persistent_cache(path, arguments)
58
68
  else:
59
- self.in_memory_cache[batch_run_id][job]["args"] = arguments
69
+ with open(path, "wb") as f:
70
+ pickle.dump(arguments, f)
60
71
 
61
- def get_arguments(self, batch_run_id: str, job: str) -> Dict[str, Any]:
72
+ def get_arguments(
73
+ self, batch_id: str, job: str, batch_run_id: Optional[str]
74
+ ) -> Dict[str, Any]:
62
75
  """
63
76
  Retrieve arguments from the cache for a specific job and batch run.
64
77
 
@@ -66,27 +79,37 @@ class ArgumentsCache:
66
79
  :param job_name: Name of the job.
67
80
  :return: Dictionary of arguments.
68
81
  """
82
+ path = f"/tmp/datatailr/batch/arguments/{batch_id}.pkl"
69
83
  if self.use_persistent_cache and isinstance(job, str):
70
- path = f"{batch_run_id}/{job}/args"
71
- arg_keys = self._get_from_persistent_cache(path)
72
- if not isinstance(arg_keys, dict):
73
- raise TypeError(
74
- f"Expected a dictionary for arguments, got {type(arg_keys)}"
75
- )
84
+ try:
85
+ arg_keys = self._get_from_persistent_cache(path)
86
+ except RuntimeError:
87
+ return {}
76
88
  else:
77
- arg_keys = (
78
- self.in_memory_cache.get(batch_run_id, {})
79
- .get(job, {})
80
- .get("args", {})
81
- .items()
82
- )
83
- arguments = {}
84
- for key, value in arg_keys:
85
- if isinstance(value, BatchJob):
86
- arguments[key] = value.name
87
- else:
88
- arguments[key] = value
89
- return arguments
89
+ if not os.path.exists(path):
90
+ raise CacheNotFoundError(
91
+ f"Cache file not found: {path}. Ensure that the arguments have been cached."
92
+ )
93
+ with open(path, "rb") as f:
94
+ try:
95
+ arg_keys = pickle.load(f)
96
+ except EOFError:
97
+ return {}
98
+ if not isinstance(arg_keys, dict):
99
+ raise TypeError(
100
+ f"Expected a dictionary for arguments, got {type(arg_keys)}"
101
+ )
102
+ if batch_run_id is None:
103
+ return arg_keys[job]
104
+ arguments_mapping = decode_json(
105
+ os.getenv("DATATAILR_JOB_ARGUMENT_MAPPING", "{}")
106
+ )
107
+ arguments_mapping = {value: key for key, value in arguments_mapping.items()}
108
+ args = {
109
+ arguments_mapping.get(name, name): self.get_result(batch_run_id, value)
110
+ for name, value in arg_keys[job].items()
111
+ }
112
+ return args
90
113
 
91
114
  def add_result(self, batch_run_id: str, job: str, result: Any):
92
115
  """
@@ -96,13 +119,14 @@ class ArgumentsCache:
96
119
  :param job: Name of the job.
97
120
  :param result: Result of the batch job.
98
121
  """
122
+ path = f"/tmp/datatailr/batch/results/{batch_run_id}_{job}.pkl"
99
123
  if self.use_persistent_cache and isinstance(job, str):
100
- path = f"{batch_run_id}/{job}/result"
101
124
  self._add_to_persistent_cache(path, result)
102
125
  else:
103
- self.in_memory_cache[batch_run_id][job]["result"] = result
126
+ with open(path, "wb") as f:
127
+ pickle.dump(result, f)
104
128
 
105
- def get_result(self, batch_run_id: str, job: str) -> Any:
129
+ def get_result(self, batch_run_id: str, job: Any) -> Any:
106
130
  """
107
131
  Retrieve the result of a batch job from the cache.
108
132
 
@@ -110,10 +134,17 @@ class ArgumentsCache:
110
134
  :param job: Name of the job.
111
135
  :return: Result of the batch job.
112
136
  """
137
+ path = f"/tmp/datatailr/batch/results/{batch_run_id}_{job}.pkl"
113
138
  if self.use_persistent_cache and isinstance(job, str):
114
- path = f"{batch_run_id}/{job}/result"
115
139
  return self._get_from_persistent_cache(path)
116
- return self.in_memory_cache[batch_run_id][job].get("result")
140
+ else:
141
+ if not os.path.exists(path):
142
+ return job
143
+ with open(path, "rb") as f:
144
+ try:
145
+ return pickle.load(f)
146
+ except EOFError:
147
+ return None
117
148
 
118
149
  def _add_to_persistent_cache(self, path: str, blob: Any):
119
150
  """
@@ -124,9 +155,8 @@ class ArgumentsCache:
124
155
  :raises TypeError: If the blob cannot be pickled.
125
156
 
126
157
  """
127
- __BLOB_STORAGE__.put_blob(
128
- path, pickle.dumps(blob, protocol=pickle.HIGHEST_PROTOCOL)
129
- )
158
+ path = path.replace("/tmp/", "")
159
+ __BLOB_STORAGE__.put_blob(path, json.dumps(blob))
130
160
 
131
161
  def _get_from_persistent_cache(self, path: str) -> Any:
132
162
  """
@@ -134,8 +164,6 @@ class ArgumentsCache:
134
164
 
135
165
  :param path: Path in the Blob storage where the blob is stored.
136
166
  """
137
- try:
138
- data = __BLOB_STORAGE__.get_blob(path)
139
- return pickle.loads(data)
140
- except (TypeError, EOFError):
141
- return {}
167
+ path = path.replace("/tmp/", "")
168
+ data = __BLOB_STORAGE__.get_blob(path)
169
+ return json.loads(data)