datatailr 0.1.68__tar.gz → 0.1.70__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datatailr might be problematic. Click here for more details.

Files changed (40) hide show
  1. {datatailr-0.1.68/src/datatailr.egg-info → datatailr-0.1.70}/PKG-INFO +1 -1
  2. {datatailr-0.1.68 → datatailr-0.1.70}/pyproject.toml +1 -1
  3. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/scheduler/arguments_cache.py +4 -1
  4. {datatailr-0.1.68 → datatailr-0.1.70/src/datatailr.egg-info}/PKG-INFO +1 -1
  5. {datatailr-0.1.68 → datatailr-0.1.70}/src/sbin/datatailr_run.py +34 -8
  6. {datatailr-0.1.68 → datatailr-0.1.70}/LICENSE +0 -0
  7. {datatailr-0.1.68 → datatailr-0.1.70}/README.md +0 -0
  8. {datatailr-0.1.68 → datatailr-0.1.70}/setup.cfg +0 -0
  9. {datatailr-0.1.68 → datatailr-0.1.70}/setup.py +0 -0
  10. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/__init__.py +0 -0
  11. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/acl.py +0 -0
  12. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/blob.py +0 -0
  13. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/build/__init__.py +0 -0
  14. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/build/image.py +0 -0
  15. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/dt_json.py +0 -0
  16. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/errors.py +0 -0
  17. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/excel/__init__.py +0 -0
  18. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/group.py +0 -0
  19. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/logging.py +0 -0
  20. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/scheduler/__init__.py +0 -0
  21. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/scheduler/base.py +0 -0
  22. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/scheduler/batch.py +0 -0
  23. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/scheduler/batch_decorator.py +0 -0
  24. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/scheduler/constants.py +0 -0
  25. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/scheduler/schedule.py +0 -0
  26. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/scheduler/utils.py +0 -0
  27. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/tag.py +0 -0
  28. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/user.py +0 -0
  29. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/utils.py +0 -0
  30. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/version.py +0 -0
  31. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr/wrapper.py +0 -0
  32. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr.egg-info/SOURCES.txt +0 -0
  33. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr.egg-info/dependency_links.txt +0 -0
  34. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr.egg-info/entry_points.txt +0 -0
  35. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr.egg-info/requires.txt +0 -0
  36. {datatailr-0.1.68 → datatailr-0.1.70}/src/datatailr.egg-info/top_level.txt +0 -0
  37. {datatailr-0.1.68 → datatailr-0.1.70}/src/sbin/datatailr_run_app.py +0 -0
  38. {datatailr-0.1.68 → datatailr-0.1.70}/src/sbin/datatailr_run_batch.py +0 -0
  39. {datatailr-0.1.68 → datatailr-0.1.70}/src/sbin/datatailr_run_excel.py +0 -0
  40. {datatailr-0.1.68 → datatailr-0.1.70}/src/sbin/datatailr_run_service.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datatailr
3
- Version: 0.1.68
3
+ Version: 0.1.70
4
4
  Summary: Ready-to-Use Platform That Drives Business Insights
5
5
  Author-email: Datatailr <info@datatailr.com>
6
6
  License-Expression: MIT
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "datatailr"
7
- version = "0.1.68"
7
+ version = "0.1.70"
8
8
  description = "Ready-to-Use Platform That Drives Business Insights"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.9"
@@ -49,7 +49,10 @@ class ArgumentsCache:
49
49
 
50
50
  :param use_persistent_cache: If True, use the persistent cache backend. Otherwise, use in-memory cache.
51
51
  """
52
- self.__bucket_name__ = dt__Tag().get("blob_storage_prefix") + "batch"
52
+ try:
53
+ self.__bucket_name__ = dt__Tag().get("blob_storage_prefix") + "batch"
54
+ except Exception:
55
+ self.__bucket_name__ = "local-batch"
53
56
  self.use_persistent_cache = use_persistent_cache
54
57
  if not self.use_persistent_cache:
55
58
  # Create a temp folder, for local caching
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datatailr
3
- Version: 0.1.68
3
+ Version: 0.1.70
4
4
  Summary: Ready-to-Use Platform That Drives Business Insights
5
5
  Author-email: Datatailr <info@datatailr.com>
6
6
  License-Expression: MIT
@@ -37,7 +37,7 @@ import subprocess
37
37
  import os
38
38
  import shlex
39
39
  import sysconfig
40
- from typing import Tuple
40
+ from typing import Optional, Tuple
41
41
  from datatailr.logging import DatatailrLogger
42
42
  from datatailr.utils import is_dt_installed
43
43
 
@@ -105,7 +105,12 @@ def prepare_command_argv(command: str | list, user: str, env_vars: dict) -> list
105
105
  return ["sudo", "-u", user, "env", *env_kv, *command]
106
106
 
107
107
 
108
- def run_single_command_non_blocking(command: str | list, user: str, env_vars: dict):
108
+ def run_single_command_non_blocking(
109
+ command: str | list,
110
+ user: str,
111
+ env_vars: dict,
112
+ log_stream_name: Optional[str | None] = None,
113
+ ) -> int:
109
114
  """
110
115
  Runs a single command non-blocking and returns the exit code after it finishes.
111
116
  This is designed to be run within an Executor.
@@ -114,7 +119,16 @@ def run_single_command_non_blocking(command: str | list, user: str, env_vars: di
114
119
  cmd_label = " ".join(argv[4:]) # For logging purposes
115
120
 
116
121
  try:
117
- proc = subprocess.Popen(argv)
122
+ if log_stream_name:
123
+ stdout_file_path = f"/opt/datatailr/var/log/{log_stream_name}.log"
124
+ stderr_file_path = f"/opt/datatailr/var/log/{log_stream_name}_error.log"
125
+ with (
126
+ open(stdout_file_path, "ab", buffering=0) as stdout_file,
127
+ open(stderr_file_path, "ab", buffering=0) as stderr_file,
128
+ ):
129
+ proc = subprocess.Popen(argv, stdout=stdout_file, stderr=stderr_file)
130
+ else:
131
+ proc = subprocess.Popen(argv)
118
132
  returncode = proc.wait()
119
133
 
120
134
  if returncode != 0:
@@ -126,18 +140,27 @@ def run_single_command_non_blocking(command: str | list, user: str, env_vars: di
126
140
 
127
141
 
128
142
  def run_commands_in_parallel(
129
- commands: list[str | list], user: str, env_vars: dict
143
+ commands: list[str | list],
144
+ user: str,
145
+ env_vars: dict,
146
+ log_stream_names: Optional[list[str | None]] = None,
130
147
  ) -> tuple[int, int]:
131
148
  """
132
149
  Executes two commands concurrently using a ThreadPoolExecutor.
133
150
  Returns a tuple of (return_code_cmd1, return_code_cmd2).
134
151
  """
135
- with concurrent.futures.ThreadPoolExecutor(max_workers=-1) as executor:
152
+ with concurrent.futures.ThreadPoolExecutor(max_workers=len(commands)) as executor:
136
153
  futures = []
137
- for command in commands:
154
+ for command, log_stream_name in zip(
155
+ commands, log_stream_names or [None] * len(commands)
156
+ ):
138
157
  futures.append(
139
158
  executor.submit(
140
- run_single_command_non_blocking, command, user, env_vars
159
+ run_single_command_non_blocking,
160
+ command,
161
+ user,
162
+ env_vars,
163
+ log_stream_name,
141
164
  )
142
165
  )
143
166
  results = [
@@ -196,6 +219,7 @@ def main():
196
219
  env["PS1"] = (
197
220
  r"""\[\e[2m\]\A\[\e[0m\]|\[\e[38;5;40m\]\u\[\e[92m\]@${DATATAILR_JOB_NAME:-datatailr}\[\e[0m\]/\[\e[94;1m\]\w\[\e[0m\]\$"""
198
221
  )
222
+ os.makedirs("/opt/datatailr/var/log", exist_ok=True)
199
223
  ide_command = [
200
224
  "code-server",
201
225
  "--auth=none",
@@ -210,7 +234,9 @@ def main():
210
234
  "--NotebookApp.token=''",
211
235
  "--NotebookApp.password=''",
212
236
  ]
213
- run_commands_in_parallel([ide_command, jupyter_command], user, env)
237
+ run_commands_in_parallel(
238
+ [ide_command, jupyter_command], user, env, ["code-server", "jupyter"]
239
+ )
214
240
 
215
241
  else:
216
242
  raise ValueError(f"Unknown job type: {job_type}")
File without changes
File without changes
File without changes
File without changes