datatailr 0.1.66__tar.gz → 0.1.68__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datatailr might be problematic. Click here for more details.

Files changed (40) hide show
  1. {datatailr-0.1.66/src/datatailr.egg-info → datatailr-0.1.68}/PKG-INFO +1 -1
  2. {datatailr-0.1.66 → datatailr-0.1.68}/pyproject.toml +5 -1
  3. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/acl.py +5 -5
  4. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/scheduler/arguments_cache.py +8 -6
  5. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/scheduler/base.py +37 -9
  6. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/scheduler/batch.py +4 -2
  7. datatailr-0.1.68/src/datatailr/tag.py +35 -0
  8. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/wrapper.py +5 -3
  9. {datatailr-0.1.66 → datatailr-0.1.68/src/datatailr.egg-info}/PKG-INFO +1 -1
  10. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr.egg-info/SOURCES.txt +1 -0
  11. {datatailr-0.1.66 → datatailr-0.1.68}/src/sbin/datatailr_run.py +63 -35
  12. {datatailr-0.1.66 → datatailr-0.1.68}/src/sbin/datatailr_run_excel.py +3 -1
  13. {datatailr-0.1.66 → datatailr-0.1.68}/LICENSE +0 -0
  14. {datatailr-0.1.66 → datatailr-0.1.68}/README.md +0 -0
  15. {datatailr-0.1.66 → datatailr-0.1.68}/setup.cfg +0 -0
  16. {datatailr-0.1.66 → datatailr-0.1.68}/setup.py +0 -0
  17. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/__init__.py +0 -0
  18. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/blob.py +0 -0
  19. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/build/__init__.py +0 -0
  20. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/build/image.py +0 -0
  21. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/dt_json.py +0 -0
  22. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/errors.py +0 -0
  23. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/excel/__init__.py +0 -0
  24. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/group.py +0 -0
  25. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/logging.py +0 -0
  26. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/scheduler/__init__.py +0 -0
  27. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/scheduler/batch_decorator.py +0 -0
  28. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/scheduler/constants.py +0 -0
  29. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/scheduler/schedule.py +0 -0
  30. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/scheduler/utils.py +0 -0
  31. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/user.py +0 -0
  32. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/utils.py +0 -0
  33. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr/version.py +0 -0
  34. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr.egg-info/dependency_links.txt +0 -0
  35. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr.egg-info/entry_points.txt +0 -0
  36. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr.egg-info/requires.txt +0 -0
  37. {datatailr-0.1.66 → datatailr-0.1.68}/src/datatailr.egg-info/top_level.txt +0 -0
  38. {datatailr-0.1.66 → datatailr-0.1.68}/src/sbin/datatailr_run_app.py +0 -0
  39. {datatailr-0.1.66 → datatailr-0.1.68}/src/sbin/datatailr_run_batch.py +0 -0
  40. {datatailr-0.1.66 → datatailr-0.1.68}/src/sbin/datatailr_run_service.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datatailr
3
- Version: 0.1.66
3
+ Version: 0.1.68
4
4
  Summary: Ready-to-Use Platform That Drives Business Insights
5
5
  Author-email: Datatailr <info@datatailr.com>
6
6
  License-Expression: MIT
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "datatailr"
7
- version = "0.1.66"
7
+ version = "0.1.68"
8
8
  description = "Ready-to-Use Platform That Drives Business Insights"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.9"
@@ -57,6 +57,10 @@ dev = [
57
57
  "myst-parser"
58
58
  ]
59
59
 
60
+ [tool.coverage.run]
61
+ branch = true
62
+ source = ["./src/datatailr"]
63
+
60
64
  [tool.ruff]
61
65
  src = [
62
66
  "src",
@@ -25,15 +25,15 @@ class ACL:
25
25
  self,
26
26
  user: Union[User, str],
27
27
  group: Optional[Union[Group, str]] = None,
28
- permissions: Optional[List[str]] = None,
28
+ permissions: Optional[List[str] | str] = None,
29
29
  ):
30
30
  if user is None:
31
31
  user = User.signed_user()
32
32
  self.user = user if isinstance(user, User) else User.get(user)
33
- if self.user is not None:
34
- self.group = (
35
- group if group and isinstance(group, Group) else self.user.primary_group
36
- )
33
+ if group is None:
34
+ group = self.user.primary_group
35
+ group = group if isinstance(group, Group) else Group.get(str(group))
36
+ self.group = group
37
37
  self.permissions = permissions or "rwr---"
38
38
 
39
39
  self.__group_can_read = False
@@ -28,6 +28,7 @@ from typing import Any, Dict, Optional
28
28
 
29
29
  from datatailr import is_dt_installed, Blob
30
30
  from datatailr.errors import DatatailrError
31
+ from datatailr.wrapper import dt__Tag
31
32
 
32
33
 
33
34
  __BLOB_STORAGE__ = Blob()
@@ -48,11 +49,12 @@ class ArgumentsCache:
48
49
 
49
50
  :param use_persistent_cache: If True, use the persistent cache backend. Otherwise, use in-memory cache.
50
51
  """
52
+ self.__bucket_name__ = dt__Tag().get("blob_storage_prefix") + "batch"
51
53
  self.use_persistent_cache = use_persistent_cache
52
54
  if not self.use_persistent_cache:
53
55
  # Create a temp folder, for local caching
54
- os.makedirs("/tmp/datatailr/batch/arguments", exist_ok=True)
55
- os.makedirs("/tmp/datatailr/batch/results", exist_ok=True)
56
+ os.makedirs(f"/tmp/{self.__bucket_name__}/arguments", exist_ok=True)
57
+ os.makedirs(f"/tmp/{self.__bucket_name__}/results", exist_ok=True)
56
58
 
57
59
  def add_arguments(self, batch_id: str, arguments: Dict[str, Any]):
58
60
  """
@@ -62,7 +64,7 @@ class ArgumentsCache:
62
64
  :param job_name: Name of the job.
63
65
  :param arguments: Dictionary of arguments to store.
64
66
  """
65
- path = f"/tmp/datatailr/batch/arguments/{batch_id}.pkl"
67
+ path = f"/tmp/{self.__bucket_name__}/arguments/{batch_id}.pkl"
66
68
  if self.use_persistent_cache:
67
69
  self._add_to_persistent_cache(path, arguments)
68
70
  else:
@@ -79,7 +81,7 @@ class ArgumentsCache:
79
81
  :param job_name: Name of the job.
80
82
  :return: Dictionary of arguments.
81
83
  """
82
- path = f"/tmp/datatailr/batch/arguments/{batch_id}.pkl"
84
+ path = f"/tmp/{self.__bucket_name__}/arguments/{batch_id}.pkl"
83
85
  if self.use_persistent_cache and isinstance(job, str):
84
86
  try:
85
87
  arg_keys = self._get_from_persistent_cache(path)
@@ -115,7 +117,7 @@ class ArgumentsCache:
115
117
  :param job: Name of the job.
116
118
  :param result: Result of the batch job.
117
119
  """
118
- path = f"/tmp/datatailr/batch/results/{batch_run_id}/{job}.pkl"
120
+ path = f"/tmp/{self.__bucket_name__}/results/{batch_run_id}/{job}.pkl"
119
121
  if self.use_persistent_cache and isinstance(job, str):
120
122
  self._add_to_persistent_cache(path, result)
121
123
  else:
@@ -131,7 +133,7 @@ class ArgumentsCache:
131
133
  :param job: Name of the job.
132
134
  :return: Result of the batch job.
133
135
  """
134
- path = f"/tmp/datatailr/batch/results/{batch_run_id}/{job}.pkl"
136
+ path = f"/tmp/{self.__bucket_name__}/results/{batch_run_id}/{job}.pkl"
135
137
  if self.use_persistent_cache and isinstance(job, str):
136
138
  return self._get_from_persistent_cache(path)
137
139
  else:
@@ -290,14 +290,30 @@ class Job:
290
290
  Returns a tuple of (branch: str, commit_hash: str).
291
291
  """
292
292
  path_to_repo = self.image.path_to_repo or "."
293
- local_commit = run_shell_command(f"cd {path_to_repo} && git rev-parse HEAD")[0]
294
- branch_name = run_shell_command(
295
- f"cd {path_to_repo} && git rev-parse --abbrev-ref HEAD"
296
- )[0]
293
+ try:
294
+ local_commit = run_shell_command(
295
+ f"cd {path_to_repo} && git rev-parse HEAD"
296
+ )[0]
297
+ branch_name = run_shell_command(
298
+ f"cd {path_to_repo} && git rev-parse --abbrev-ref HEAD"
299
+ )[0]
300
+ return_code = run_shell_command(
301
+ f"cd {path_to_repo} && git diff --exit-code"
302
+ )
303
+ except RuntimeError as e:
304
+ if "git: not found" in str(e):
305
+ logger.warning(
306
+ "Git is not installed or not found in PATH. Repository validation is not possible."
307
+ )
308
+ branch_name, local_commit, return_code = "unknown", "unknown", None
309
+ else:
310
+ raise RepoValidationError(
311
+ f"Error accessing git repository at {path_to_repo}: {e}"
312
+ ) from e
297
313
 
298
314
  if os.getenv("DATATAILR_ALLOW_UNSAFE_SCHEDULING", "false").lower() == "true":
299
315
  return branch_name, local_commit
300
- return_code = run_shell_command(f"cd {path_to_repo} && git diff --exit-code")
316
+
301
317
  is_committed = return_code is not None and return_code[1] == 0
302
318
 
303
319
  if not is_committed:
@@ -337,6 +353,13 @@ class Job:
337
353
  """
338
354
  return {}
339
355
 
356
+ def __set_existing_id__(self, job_id: dict):
357
+ if isinstance(job_id, dict) and "Ids" in job_id and len(job_id["Ids"]) > 0:
358
+ job_id = job_id["Ids"][0]
359
+ if isinstance(job_id, str):
360
+ logger.debug(f"Setting existing job ID: {job_id}")
361
+ self.__id = job_id
362
+
340
363
  def __run_command__(self, command: str) -> Tuple[bool, str]:
341
364
  """
342
365
  Run a command in the context of the job.
@@ -350,18 +373,23 @@ class Job:
350
373
  temp_file_name = self.__prepare__()
351
374
 
352
375
  if command == "run":
353
- __client__.run(f"file://{temp_file_name}", **self.get_schedule_args())
376
+ result = __client__.run(
377
+ f"file://{temp_file_name}", **self.get_schedule_args()
378
+ )
354
379
  elif command == "save":
355
- __client__.save(f"file://{temp_file_name}", **self.get_schedule_args())
380
+ result = __client__.save(
381
+ f"file://{temp_file_name}", **self.get_schedule_args()
382
+ )
356
383
  elif command == "start":
357
- __client__.start(self.name, environment=self.environment)
384
+ result = __client__.start(self.name, environment=self.environment)
358
385
  else:
359
386
  raise ValueError(f"Unknown command: {command}")
360
387
  os.remove(temp_file_name)
361
388
  except Exception as e:
362
389
  logger.error(f"Error running command '{command}': {e}")
363
390
  return False, str(e)
364
- return True, f"Job '{self.name}' {command}d successfully."
391
+ self.__set_existing_id__(result)
392
+ return True, result
365
393
 
366
394
  def save(self) -> Tuple[bool, str]:
367
395
  """
@@ -501,8 +501,10 @@ class Batch(Job):
501
501
  __ARGUMENTS_CACHE__.add_arguments(self.id, args)
502
502
 
503
503
  def save(self) -> Tuple[bool, str]:
504
- self.prepare_args()
505
- return super().save()
504
+ status = super().save()
505
+ if status[0]:
506
+ self.prepare_args()
507
+ return status
506
508
 
507
509
  def run(self) -> Tuple[bool, str]:
508
510
  self.prepare_args()
@@ -0,0 +1,35 @@
1
+ # *************************************************************************
2
+ #
3
+ # Copyright (c) 2025 - Datatailr Inc.
4
+ # All Rights Reserved.
5
+ #
6
+ # This file is part of Datatailr and subject to the terms and conditions
7
+ # defined in 'LICENSE.txt'. Unauthorized copying and/or distribution
8
+ # of this file, in parts or full, via any medium is strictly prohibited.
9
+ # *************************************************************************
10
+
11
+ from typing import Any, ClassVar
12
+
13
+
14
+ class dt__Tag:
15
+ """
16
+ Tag management for local runs in the absence of the DataTailr platform.
17
+ All instances share the same tag store.
18
+ """
19
+
20
+ # shared across all instances
21
+ tags: ClassVar[dict[str, Any]] = {
22
+ "blob_storage_prefix": "local-no-dt-",
23
+ }
24
+
25
+ def ls(self) -> dict[str, Any]:
26
+ return self.__class__.tags
27
+
28
+ def get(self, name: str) -> Any:
29
+ return self.__class__.tags.get(name)
30
+
31
+ def set(self, name: str, value: Any) -> None:
32
+ self.__class__.tags[name] = value
33
+
34
+ def rm(self, name: str) -> None:
35
+ self.__class__.tags.pop(name, None)
@@ -14,6 +14,7 @@ import subprocess
14
14
  from typing import Union
15
15
 
16
16
  from datatailr.utils import is_dt_installed
17
+ from datatailr.tag import dt__Tag as local_no_dt_Tag
17
18
 
18
19
  API_JSON_PATH: Union[str, None] = None
19
20
 
@@ -33,13 +34,14 @@ else:
33
34
 
34
35
  def type_map(arg_type):
35
36
  mapping = {
36
- "String": "str",
37
+ "string": "str",
37
38
  "file": "str",
38
39
  "boolean": "bool",
39
40
  "int": "int",
41
+ "integer": "int",
40
42
  "float": "float",
41
43
  }
42
- return mapping.get(arg_type, "str")
44
+ return mapping.get(arg_type.lower(), "str")
43
45
 
44
46
 
45
47
  def add_quotes(arg):
@@ -198,7 +200,7 @@ dt__Email = globals().get("dt__Email", mock_cli_tool)
198
200
  dt__Kv = globals().get("dt__Kv", mock_cli_tool)
199
201
  dt__Log = globals().get("dt__Log", mock_cli_tool)
200
202
  dt__Node = globals().get("dt__Node", mock_cli_tool)
201
- dt__Tag = globals().get("dt__Tag", mock_cli_tool)
203
+ dt__Tag = globals().get("dt__Tag", local_no_dt_Tag)
202
204
  dt__Registry = globals().get("dt__Registry", mock_cli_tool)
203
205
  dt__Service = globals().get("dt__Service", mock_cli_tool)
204
206
  dt__Settings = globals().get("dt__Settings", mock_cli_tool)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datatailr
3
- Version: 0.1.66
3
+ Version: 0.1.68
4
4
  Summary: Ready-to-Use Platform That Drives Business Insights
5
5
  Author-email: Datatailr <info@datatailr.com>
6
6
  License-Expression: MIT
@@ -9,6 +9,7 @@ src/datatailr/dt_json.py
9
9
  src/datatailr/errors.py
10
10
  src/datatailr/group.py
11
11
  src/datatailr/logging.py
12
+ src/datatailr/tag.py
12
13
  src/datatailr/user.py
13
14
  src/datatailr/utils.py
14
15
  src/datatailr/version.py
@@ -32,9 +32,10 @@
32
32
  # DATATAILR_JOB_ID - the unique identifier for the job.
33
33
 
34
34
 
35
+ import concurrent.futures
35
36
  import subprocess
36
37
  import os
37
- import sys
38
+ import shlex
38
39
  import sysconfig
39
40
  from typing import Tuple
40
41
  from datatailr.logging import DatatailrLogger
@@ -82,40 +83,67 @@ def create_user_and_group() -> Tuple[str, str]:
82
83
  return user, group
83
84
 
84
85
 
85
- def run_command_as_user(command: str | list, user: str, env_vars: dict):
86
- """
87
- Run a command as a specific user with the given environment variables.
88
- """
86
+ def prepare_command_argv(command: str | list, user: str, env_vars: dict) -> list[str]:
89
87
  if isinstance(command, str):
90
- command = command.split(" ")
88
+ command = shlex.split(command)
91
89
 
92
90
  python_libdir = sysconfig.get_config_var("LIBDIR")
93
-
94
91
  ld_library_path = get_env_var("LD_LIBRARY_PATH", "")
92
+
95
93
  if ld_library_path:
96
94
  python_libdir = ld_library_path + ":" + python_libdir
97
- else:
98
- python_libdir = python_libdir
99
95
 
100
- env_vars = {
96
+ # Base environment variables setup
97
+ base_env = {
101
98
  "PATH": get_env_var("PATH", ""),
102
99
  "PYTHONPATH": get_env_var("PYTHONPATH", ""),
103
100
  "LD_LIBRARY_PATH": python_libdir,
104
- } | env_vars
101
+ }
102
+
103
+ merged_env = base_env | env_vars
104
+ env_kv = [f"{k}={v}" for k, v in merged_env.items()]
105
+ return ["sudo", "-u", user, "env", *env_kv, *command]
105
106
 
106
- env_kv = [f"{k}={v}" for k, v in env_vars.items()]
107
- argv = ["sudo", "-u", user, "env", *env_kv, *command]
107
+
108
+ def run_single_command_non_blocking(command: str | list, user: str, env_vars: dict):
109
+ """
110
+ Runs a single command non-blocking and returns the exit code after it finishes.
111
+ This is designed to be run within an Executor.
112
+ """
113
+ argv = prepare_command_argv(command, user, env_vars)
114
+ cmd_label = " ".join(argv[4:]) # For logging purposes
108
115
 
109
116
  try:
110
117
  proc = subprocess.Popen(argv)
111
118
  returncode = proc.wait()
119
+
112
120
  if returncode != 0:
113
- logger.error(f"Command failed with exit code {returncode}")
114
- sys.exit(returncode)
115
- except subprocess.CalledProcessError as e:
116
- logger.error(f"Command failed with exit code {e.returncode}")
117
- logger.error(f"stderr: {e.stderr}")
118
- sys.exit(1)
121
+ logger.error(f"Command '{cmd_label}' failed with exit code {returncode}")
122
+ return returncode
123
+ except Exception as e:
124
+ logger.error(f"Execution error for '{cmd_label}': {e}")
125
+ return 1
126
+
127
+
128
+ def run_commands_in_parallel(
129
+ commands: list[str | list], user: str, env_vars: dict
130
+ ) -> tuple[int, int]:
131
+ """
132
+ Executes two commands concurrently using a ThreadPoolExecutor.
133
+ Returns a tuple of (return_code_cmd1, return_code_cmd2).
134
+ """
135
+ with concurrent.futures.ThreadPoolExecutor(max_workers=-1) as executor:
136
+ futures = []
137
+ for command in commands:
138
+ futures.append(
139
+ executor.submit(
140
+ run_single_command_non_blocking, command, user, env_vars
141
+ )
142
+ )
143
+ results = [
144
+ future.result() for future in concurrent.futures.as_completed(futures)
145
+ ]
146
+ return results[0], results[1]
119
147
 
120
148
 
121
149
  def main():
@@ -137,7 +165,7 @@ def main():
137
165
  "DATATAILR_BATCH_ID": batch_id,
138
166
  "DATATAILR_BATCH_ENTRYPOINT": entrypoint,
139
167
  } | env
140
- run_command_as_user("datatailr_run_batch", user, env)
168
+ run_single_command_non_blocking("datatailr_run_batch", user, env)
141
169
  elif job_type == "service":
142
170
  port = get_env_var("DATATAILR_SERVICE_PORT", 8080)
143
171
  entrypoint = get_env_var("DATATAILR_ENTRYPOINT")
@@ -145,13 +173,13 @@ def main():
145
173
  "DATATAILR_ENTRYPOINT": entrypoint,
146
174
  "DATATAILR_SERVICE_PORT": port,
147
175
  } | env
148
- run_command_as_user("datatailr_run_service", user, env)
176
+ run_single_command_non_blocking("datatailr_run_service", user, env)
149
177
  elif job_type == "app":
150
178
  entrypoint = get_env_var("DATATAILR_ENTRYPOINT")
151
179
  env = {
152
180
  "DATATAILR_ENTRYPOINT": entrypoint,
153
181
  } | env
154
- run_command_as_user("datatailr_run_app", user, env)
182
+ run_single_command_non_blocking("datatailr_run_app", user, env)
155
183
  elif job_type == "excel":
156
184
  host = get_env_var("DATATAILR_HOST", "")
157
185
  local = get_env_var("DATATAILR_LOCAL", "")
@@ -162,28 +190,28 @@ def main():
162
190
  "DATATAILR_HOST": host,
163
191
  "DATATAILR_LOCAL": local,
164
192
  } | env
165
- run_command_as_user("datatailr_run_excel", user, env)
166
- elif job_type == "ide":
167
- command = [
193
+ run_single_command_non_blocking("datatailr_run_excel", user, env)
194
+ elif job_type == "workspace":
195
+ # Set a custom PS1 for the IDE terminal: 17:38|user@my-ide/~/dir/path:$
196
+ env["PS1"] = (
197
+ r"""\[\e[2m\]\A\[\e[0m\]|\[\e[38;5;40m\]\u\[\e[92m\]@${DATATAILR_JOB_NAME:-datatailr}\[\e[0m\]/\[\e[94;1m\]\w\[\e[0m\]\$"""
198
+ )
199
+ ide_command = [
168
200
  "code-server",
169
201
  "--auth=none",
170
- "--bind-addr=0.0.0.0:8080",
202
+ "--bind-addr=0.0.0.0:9090",
171
203
  f'--app-name="Datatailr IDE {get_env_var("DATATAILR_USER")}"',
172
204
  ]
173
- run_command_as_user(command, user, env)
174
- elif job_type == "jupyter":
175
- command = [
176
- "uv",
177
- "run",
178
- "jupyter",
179
- "lab",
205
+ jupyter_command = [
206
+ "jupyter-lab",
180
207
  "--ip='*'",
181
- "--port=8080",
208
+ "--port=7070",
182
209
  "--no-browser",
183
210
  "--NotebookApp.token=''",
184
211
  "--NotebookApp.password=''",
185
212
  ]
186
- run_command_as_user(command, user, env)
213
+ run_commands_in_parallel([ide_command, jupyter_command], user, env)
214
+
187
215
  else:
188
216
  raise ValueError(f"Unknown job type: {job_type}")
189
217
 
@@ -35,6 +35,8 @@ def run():
35
35
  raise ValueError("Environment variable 'DATATAILR_HOST' is not set.")
36
36
  local_flag = ""
37
37
 
38
- entrypoint = f'/opt/datatailr/bin/dt-excel.sh -n -H "{hostname}" {local_flag} -p 8080 -w 8000 {entrypoint}'
38
+ module_name = entrypoint.split(":", 1)[0]
39
+
40
+ entrypoint = f'/opt/datatailr/bin/dt-excel.sh -n -H "{hostname}" {local_flag} -p 8080 -w 8000 {module_name}'
39
41
  logger.info(f"Running entrypoint: {entrypoint}")
40
42
  subprocess.run(entrypoint, shell=True)
File without changes
File without changes
File without changes
File without changes