datatailr 0.1.67__py3-none-any.whl → 0.1.68__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datatailr might be problematic. Click here for more details.

datatailr/acl.py CHANGED
@@ -25,15 +25,15 @@ class ACL:
25
25
  self,
26
26
  user: Union[User, str],
27
27
  group: Optional[Union[Group, str]] = None,
28
- permissions: Optional[List[str]] = None,
28
+ permissions: Optional[List[str] | str] = None,
29
29
  ):
30
30
  if user is None:
31
31
  user = User.signed_user()
32
32
  self.user = user if isinstance(user, User) else User.get(user)
33
- if self.user is not None:
34
- self.group = (
35
- group if group and isinstance(group, Group) else self.user.primary_group
36
- )
33
+ if group is None:
34
+ group = self.user.primary_group
35
+ group = group if isinstance(group, Group) else Group.get(str(group))
36
+ self.group = group
37
37
  self.permissions = permissions or "rwr---"
38
38
 
39
39
  self.__group_can_read = False
@@ -32,9 +32,10 @@
32
32
  # DATATAILR_JOB_ID - the unique identifier for the job.
33
33
 
34
34
 
35
+ import concurrent.futures
35
36
  import subprocess
36
37
  import os
37
- import sys
38
+ import shlex
38
39
  import sysconfig
39
40
  from typing import Tuple
40
41
  from datatailr.logging import DatatailrLogger
@@ -82,40 +83,67 @@ def create_user_and_group() -> Tuple[str, str]:
82
83
  return user, group
83
84
 
84
85
 
85
- def run_command_as_user(command: str | list, user: str, env_vars: dict):
86
- """
87
- Run a command as a specific user with the given environment variables.
88
- """
86
+ def prepare_command_argv(command: str | list, user: str, env_vars: dict) -> list[str]:
89
87
  if isinstance(command, str):
90
- command = command.split(" ")
88
+ command = shlex.split(command)
91
89
 
92
90
  python_libdir = sysconfig.get_config_var("LIBDIR")
93
-
94
91
  ld_library_path = get_env_var("LD_LIBRARY_PATH", "")
92
+
95
93
  if ld_library_path:
96
94
  python_libdir = ld_library_path + ":" + python_libdir
97
- else:
98
- python_libdir = python_libdir
99
95
 
100
- env_vars = {
96
+ # Base environment variables setup
97
+ base_env = {
101
98
  "PATH": get_env_var("PATH", ""),
102
99
  "PYTHONPATH": get_env_var("PYTHONPATH", ""),
103
100
  "LD_LIBRARY_PATH": python_libdir,
104
- } | env_vars
101
+ }
102
+
103
+ merged_env = base_env | env_vars
104
+ env_kv = [f"{k}={v}" for k, v in merged_env.items()]
105
+ return ["sudo", "-u", user, "env", *env_kv, *command]
105
106
 
106
- env_kv = [f"{k}={v}" for k, v in env_vars.items()]
107
- argv = ["sudo", "-u", user, "env", *env_kv, *command]
107
+
108
+ def run_single_command_non_blocking(command: str | list, user: str, env_vars: dict):
109
+ """
110
+ Runs a single command non-blocking and returns the exit code after it finishes.
111
+ This is designed to be run within an Executor.
112
+ """
113
+ argv = prepare_command_argv(command, user, env_vars)
114
+ cmd_label = " ".join(argv[4:]) # For logging purposes
108
115
 
109
116
  try:
110
117
  proc = subprocess.Popen(argv)
111
118
  returncode = proc.wait()
119
+
112
120
  if returncode != 0:
113
- logger.error(f"Command failed with exit code {returncode}")
114
- sys.exit(returncode)
115
- except subprocess.CalledProcessError as e:
116
- logger.error(f"Command failed with exit code {e.returncode}")
117
- logger.error(f"stderr: {e.stderr}")
118
- sys.exit(1)
121
+ logger.error(f"Command '{cmd_label}' failed with exit code {returncode}")
122
+ return returncode
123
+ except Exception as e:
124
+ logger.error(f"Execution error for '{cmd_label}': {e}")
125
+ return 1
126
+
127
+
128
+ def run_commands_in_parallel(
129
+ commands: list[str | list], user: str, env_vars: dict
130
+ ) -> tuple[int, int]:
131
+ """
132
+ Executes two commands concurrently using a ThreadPoolExecutor.
133
+ Returns a tuple of (return_code_cmd1, return_code_cmd2).
134
+ """
135
+ with concurrent.futures.ThreadPoolExecutor(max_workers=-1) as executor:
136
+ futures = []
137
+ for command in commands:
138
+ futures.append(
139
+ executor.submit(
140
+ run_single_command_non_blocking, command, user, env_vars
141
+ )
142
+ )
143
+ results = [
144
+ future.result() for future in concurrent.futures.as_completed(futures)
145
+ ]
146
+ return results[0], results[1]
119
147
 
120
148
 
121
149
  def main():
@@ -137,7 +165,7 @@ def main():
137
165
  "DATATAILR_BATCH_ID": batch_id,
138
166
  "DATATAILR_BATCH_ENTRYPOINT": entrypoint,
139
167
  } | env
140
- run_command_as_user("datatailr_run_batch", user, env)
168
+ run_single_command_non_blocking("datatailr_run_batch", user, env)
141
169
  elif job_type == "service":
142
170
  port = get_env_var("DATATAILR_SERVICE_PORT", 8080)
143
171
  entrypoint = get_env_var("DATATAILR_ENTRYPOINT")
@@ -145,13 +173,13 @@ def main():
145
173
  "DATATAILR_ENTRYPOINT": entrypoint,
146
174
  "DATATAILR_SERVICE_PORT": port,
147
175
  } | env
148
- run_command_as_user("datatailr_run_service", user, env)
176
+ run_single_command_non_blocking("datatailr_run_service", user, env)
149
177
  elif job_type == "app":
150
178
  entrypoint = get_env_var("DATATAILR_ENTRYPOINT")
151
179
  env = {
152
180
  "DATATAILR_ENTRYPOINT": entrypoint,
153
181
  } | env
154
- run_command_as_user("datatailr_run_app", user, env)
182
+ run_single_command_non_blocking("datatailr_run_app", user, env)
155
183
  elif job_type == "excel":
156
184
  host = get_env_var("DATATAILR_HOST", "")
157
185
  local = get_env_var("DATATAILR_LOCAL", "")
@@ -162,8 +190,12 @@ def main():
162
190
  "DATATAILR_HOST": host,
163
191
  "DATATAILR_LOCAL": local,
164
192
  } | env
165
- run_command_as_user("datatailr_run_excel", user, env)
193
+ run_single_command_non_blocking("datatailr_run_excel", user, env)
166
194
  elif job_type == "workspace":
195
+ # Set a custom PS1 for the IDE terminal: 17:38|user@my-ide/~/dir/path:$
196
+ env["PS1"] = (
197
+ r"""\[\e[2m\]\A\[\e[0m\]|\[\e[38;5;40m\]\u\[\e[92m\]@${DATATAILR_JOB_NAME:-datatailr}\[\e[0m\]/\[\e[94;1m\]\w\[\e[0m\]\$"""
198
+ )
167
199
  ide_command = [
168
200
  "code-server",
169
201
  "--auth=none",
@@ -178,8 +210,7 @@ def main():
178
210
  "--NotebookApp.token=''",
179
211
  "--NotebookApp.password=''",
180
212
  ]
181
- # TODO: run both commands in parallel
182
- run_command_as_user(ide_command, user, env)
213
+ run_commands_in_parallel([ide_command, jupyter_command], user, env)
183
214
 
184
215
  else:
185
216
  raise ValueError(f"Unknown job type: {job_type}")
@@ -35,6 +35,8 @@ def run():
35
35
  raise ValueError("Environment variable 'DATATAILR_HOST' is not set.")
36
36
  local_flag = ""
37
37
 
38
- entrypoint = f'/opt/datatailr/bin/dt-excel.sh -n -H "{hostname}" {local_flag} -p 8080 -w 8000 {entrypoint}'
38
+ module_name = entrypoint.split(":", 1)[0]
39
+
40
+ entrypoint = f'/opt/datatailr/bin/dt-excel.sh -n -H "{hostname}" {local_flag} -p 8080 -w 8000 {module_name}'
39
41
  logger.info(f"Running entrypoint: {entrypoint}")
40
42
  subprocess.run(entrypoint, shell=True)
@@ -28,6 +28,7 @@ from typing import Any, Dict, Optional
28
28
 
29
29
  from datatailr import is_dt_installed, Blob
30
30
  from datatailr.errors import DatatailrError
31
+ from datatailr.wrapper import dt__Tag
31
32
 
32
33
 
33
34
  __BLOB_STORAGE__ = Blob()
@@ -48,11 +49,12 @@ class ArgumentsCache:
48
49
 
49
50
  :param use_persistent_cache: If True, use the persistent cache backend. Otherwise, use in-memory cache.
50
51
  """
52
+ self.__bucket_name__ = dt__Tag().get("blob_storage_prefix") + "batch"
51
53
  self.use_persistent_cache = use_persistent_cache
52
54
  if not self.use_persistent_cache:
53
55
  # Create a temp folder, for local caching
54
- os.makedirs("/tmp/datatailr/batch/arguments", exist_ok=True)
55
- os.makedirs("/tmp/datatailr/batch/results", exist_ok=True)
56
+ os.makedirs(f"/tmp/{self.__bucket_name__}/arguments", exist_ok=True)
57
+ os.makedirs(f"/tmp/{self.__bucket_name__}/results", exist_ok=True)
56
58
 
57
59
  def add_arguments(self, batch_id: str, arguments: Dict[str, Any]):
58
60
  """
@@ -62,7 +64,7 @@ class ArgumentsCache:
62
64
  :param job_name: Name of the job.
63
65
  :param arguments: Dictionary of arguments to store.
64
66
  """
65
- path = f"/tmp/datatailr/batch/arguments/{batch_id}.pkl"
67
+ path = f"/tmp/{self.__bucket_name__}/arguments/{batch_id}.pkl"
66
68
  if self.use_persistent_cache:
67
69
  self._add_to_persistent_cache(path, arguments)
68
70
  else:
@@ -79,7 +81,7 @@ class ArgumentsCache:
79
81
  :param job_name: Name of the job.
80
82
  :return: Dictionary of arguments.
81
83
  """
82
- path = f"/tmp/datatailr/batch/arguments/{batch_id}.pkl"
84
+ path = f"/tmp/{self.__bucket_name__}/arguments/{batch_id}.pkl"
83
85
  if self.use_persistent_cache and isinstance(job, str):
84
86
  try:
85
87
  arg_keys = self._get_from_persistent_cache(path)
@@ -115,7 +117,7 @@ class ArgumentsCache:
115
117
  :param job: Name of the job.
116
118
  :param result: Result of the batch job.
117
119
  """
118
- path = f"/tmp/datatailr/batch/results/{batch_run_id}/{job}.pkl"
120
+ path = f"/tmp/{self.__bucket_name__}/results/{batch_run_id}/{job}.pkl"
119
121
  if self.use_persistent_cache and isinstance(job, str):
120
122
  self._add_to_persistent_cache(path, result)
121
123
  else:
@@ -131,7 +133,7 @@ class ArgumentsCache:
131
133
  :param job: Name of the job.
132
134
  :return: Result of the batch job.
133
135
  """
134
- path = f"/tmp/datatailr/batch/results/{batch_run_id}/{job}.pkl"
136
+ path = f"/tmp/{self.__bucket_name__}/results/{batch_run_id}/{job}.pkl"
135
137
  if self.use_persistent_cache and isinstance(job, str):
136
138
  return self._get_from_persistent_cache(path)
137
139
  else:
@@ -290,14 +290,30 @@ class Job:
290
290
  Returns a tuple of (branch: str, commit_hash: str).
291
291
  """
292
292
  path_to_repo = self.image.path_to_repo or "."
293
- local_commit = run_shell_command(f"cd {path_to_repo} && git rev-parse HEAD")[0]
294
- branch_name = run_shell_command(
295
- f"cd {path_to_repo} && git rev-parse --abbrev-ref HEAD"
296
- )[0]
293
+ try:
294
+ local_commit = run_shell_command(
295
+ f"cd {path_to_repo} && git rev-parse HEAD"
296
+ )[0]
297
+ branch_name = run_shell_command(
298
+ f"cd {path_to_repo} && git rev-parse --abbrev-ref HEAD"
299
+ )[0]
300
+ return_code = run_shell_command(
301
+ f"cd {path_to_repo} && git diff --exit-code"
302
+ )
303
+ except RuntimeError as e:
304
+ if "git: not found" in str(e):
305
+ logger.warning(
306
+ "Git is not installed or not found in PATH. Repository validation is not possible."
307
+ )
308
+ branch_name, local_commit, return_code = "unknown", "unknown", None
309
+ else:
310
+ raise RepoValidationError(
311
+ f"Error accessing git repository at {path_to_repo}: {e}"
312
+ ) from e
297
313
 
298
314
  if os.getenv("DATATAILR_ALLOW_UNSAFE_SCHEDULING", "false").lower() == "true":
299
315
  return branch_name, local_commit
300
- return_code = run_shell_command(f"cd {path_to_repo} && git diff --exit-code")
316
+
301
317
  is_committed = return_code is not None and return_code[1] == 0
302
318
 
303
319
  if not is_committed:
@@ -337,6 +353,13 @@ class Job:
337
353
  """
338
354
  return {}
339
355
 
356
+ def __set_existing_id__(self, job_id: dict):
357
+ if isinstance(job_id, dict) and "Ids" in job_id and len(job_id["Ids"]) > 0:
358
+ job_id = job_id["Ids"][0]
359
+ if isinstance(job_id, str):
360
+ logger.debug(f"Setting existing job ID: {job_id}")
361
+ self.__id = job_id
362
+
340
363
  def __run_command__(self, command: str) -> Tuple[bool, str]:
341
364
  """
342
365
  Run a command in the context of the job.
@@ -350,18 +373,23 @@ class Job:
350
373
  temp_file_name = self.__prepare__()
351
374
 
352
375
  if command == "run":
353
- __client__.run(f"file://{temp_file_name}", **self.get_schedule_args())
376
+ result = __client__.run(
377
+ f"file://{temp_file_name}", **self.get_schedule_args()
378
+ )
354
379
  elif command == "save":
355
- __client__.save(f"file://{temp_file_name}", **self.get_schedule_args())
380
+ result = __client__.save(
381
+ f"file://{temp_file_name}", **self.get_schedule_args()
382
+ )
356
383
  elif command == "start":
357
- __client__.start(self.name, environment=self.environment)
384
+ result = __client__.start(self.name, environment=self.environment)
358
385
  else:
359
386
  raise ValueError(f"Unknown command: {command}")
360
387
  os.remove(temp_file_name)
361
388
  except Exception as e:
362
389
  logger.error(f"Error running command '{command}': {e}")
363
390
  return False, str(e)
364
- return True, f"Job '{self.name}' {command}d successfully."
391
+ self.__set_existing_id__(result)
392
+ return True, result
365
393
 
366
394
  def save(self) -> Tuple[bool, str]:
367
395
  """
@@ -501,8 +501,10 @@ class Batch(Job):
501
501
  __ARGUMENTS_CACHE__.add_arguments(self.id, args)
502
502
 
503
503
  def save(self) -> Tuple[bool, str]:
504
- self.prepare_args()
505
- return super().save()
504
+ status = super().save()
505
+ if status[0]:
506
+ self.prepare_args()
507
+ return status
506
508
 
507
509
  def run(self) -> Tuple[bool, str]:
508
510
  self.prepare_args()
datatailr/tag.py ADDED
@@ -0,0 +1,35 @@
1
+ # *************************************************************************
2
+ #
3
+ # Copyright (c) 2025 - Datatailr Inc.
4
+ # All Rights Reserved.
5
+ #
6
+ # This file is part of Datatailr and subject to the terms and conditions
7
+ # defined in 'LICENSE.txt'. Unauthorized copying and/or distribution
8
+ # of this file, in parts or full, via any medium is strictly prohibited.
9
+ # *************************************************************************
10
+
11
+ from typing import Any, ClassVar
12
+
13
+
14
+ class dt__Tag:
15
+ """
16
+ Tag management for local runs in the absence of the DataTailr platform.
17
+ All instances share the same tag store.
18
+ """
19
+
20
+ # shared across all instances
21
+ tags: ClassVar[dict[str, Any]] = {
22
+ "blob_storage_prefix": "local-no-dt-",
23
+ }
24
+
25
+ def ls(self) -> dict[str, Any]:
26
+ return self.__class__.tags
27
+
28
+ def get(self, name: str) -> Any:
29
+ return self.__class__.tags.get(name)
30
+
31
+ def set(self, name: str, value: Any) -> None:
32
+ self.__class__.tags[name] = value
33
+
34
+ def rm(self, name: str) -> None:
35
+ self.__class__.tags.pop(name, None)
datatailr/wrapper.py CHANGED
@@ -14,6 +14,7 @@ import subprocess
14
14
  from typing import Union
15
15
 
16
16
  from datatailr.utils import is_dt_installed
17
+ from datatailr.tag import dt__Tag as local_no_dt_Tag
17
18
 
18
19
  API_JSON_PATH: Union[str, None] = None
19
20
 
@@ -199,7 +200,7 @@ dt__Email = globals().get("dt__Email", mock_cli_tool)
199
200
  dt__Kv = globals().get("dt__Kv", mock_cli_tool)
200
201
  dt__Log = globals().get("dt__Log", mock_cli_tool)
201
202
  dt__Node = globals().get("dt__Node", mock_cli_tool)
202
- dt__Tag = globals().get("dt__Tag", mock_cli_tool)
203
+ dt__Tag = globals().get("dt__Tag", local_no_dt_Tag)
203
204
  dt__Registry = globals().get("dt__Registry", mock_cli_tool)
204
205
  dt__Service = globals().get("dt__Service", mock_cli_tool)
205
206
  dt__Settings = globals().get("dt__Settings", mock_cli_tool)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datatailr
3
- Version: 0.1.67
3
+ Version: 0.1.68
4
4
  Summary: Ready-to-Use Platform That Drives Business Insights
5
5
  Author-email: Datatailr <info@datatailr.com>
6
6
  License-Expression: MIT
@@ -1,33 +1,34 @@
1
1
  datatailr/__init__.py,sha256=QTTG8X76BnlQwVx5N4ZQtSbLkgFipZ9NJGAbvtfuk_g,1051
2
- datatailr/acl.py,sha256=tlDy6VlHinSy5W1FbVxcNQNi7FliWUXy3ssIbzaPp28,4157
2
+ datatailr/acl.py,sha256=7hBwF7TP_ADoDryYEFuXx2FCLavLmp3k_F0-sEXg26g,4173
3
3
  datatailr/blob.py,sha256=9lKZKm4eRKVE4_t2zy3hh6Z_Ov4iaTA3-tJFjnQKezg,3313
4
4
  datatailr/dt_json.py,sha256=3xmTqDBk68oPl2UW8UVOYPaBw4lAsVg6nDLwcen5nuo,2252
5
5
  datatailr/errors.py,sha256=p_e4ao3sFEfz1g4LvEDqw6bVzHJPJSINLjJ8H6_PqOo,751
6
6
  datatailr/group.py,sha256=AC0nCA44eEWZCJCq2klPqkFg_995mS3C_wu5uSFFLtU,4426
7
7
  datatailr/logging.py,sha256=msr0vzIaOtXmKjxsPpMw0Zl7jX9C_W0IuUwNjmqhhcA,4146
8
+ datatailr/tag.py,sha256=YdTOBbbY5tYHJjiyaTgjsOprbwHLuc6jhLKDKXRlv7k,1081
8
9
  datatailr/user.py,sha256=tjIMbQ-tb2yFhgLBl_eTb47kKiy-RoM3KtYS3zmJiwc,6736
9
10
  datatailr/utils.py,sha256=0wEVD2UqBgfMRLO7DIL271ejSt-_0FU4ig1cSwnO5PI,2010
10
11
  datatailr/version.py,sha256=N9K8ZxlwFFSz8XSgbgaTWZY4k2J0JKfj698nZ_O2pIU,536
11
- datatailr/wrapper.py,sha256=2OEkMo6ls7sTddfS-ev-R1zKbYc194vDULQy7cm4Q8M,8025
12
+ datatailr/wrapper.py,sha256=45RrMeYIFFWJAtOlQZRe1fT9daeq4vFlj6nIajbewEY,8080
12
13
  datatailr/build/__init__.py,sha256=_dA7b4L6wsaAFaSxUoYSJ1oaRqDHDMR20kqoCocSOss,487
13
14
  datatailr/build/image.py,sha256=YC8ML-l-sj6TcIBY-DCx_vaeI_7SmL9fPFhHnuxzRh0,5509
14
15
  datatailr/excel/__init__.py,sha256=IpXEPIFuu8IG3uQ8k7FDHqiHnChtyp-jgE8Plx2cWXQ,656
15
- datatailr/sbin/datatailr_run.py,sha256=8wZIeiqcbuZKC9K1HZnUo6iAbXcL0yQ7LG4YHAQ0XsM,7235
16
+ datatailr/sbin/datatailr_run.py,sha256=iQpWHhlU-Y3gxyugTNcdh4Dkh8OHa-0KXEunGU9Kbxo,8478
16
17
  datatailr/sbin/datatailr_run_app.py,sha256=itF76XC2F4RK9s6bkoEppEiYwSLHK_5Jai3yvC-kFhY,1501
17
18
  datatailr/sbin/datatailr_run_batch.py,sha256=UWnp96j_G66R_Cape7Bb-rbK6UBLF7Y5_mTlWyGJAVQ,1818
18
- datatailr/sbin/datatailr_run_excel.py,sha256=MY2XH-T5bWABR23C_15eLhjaxggGRgjRhA-ByS6CtMc,1422
19
+ datatailr/sbin/datatailr_run_excel.py,sha256=BLWmvxpKEE_8vJhs8E4VWq07FOBof5tlow-AkIEXtHw,1470
19
20
  datatailr/sbin/datatailr_run_service.py,sha256=DO9LGOpz3CVZOJJRHb4ac7AgY_mLbXHGadSyVCeIknc,1212
20
21
  datatailr/scheduler/__init__.py,sha256=qydHYVtEP6SUWd2CQ6FRdTdRWNz3SbYPJy4FK_wOvMk,1772
21
- datatailr/scheduler/arguments_cache.py,sha256=dl8IG9RTLSrJUk6g0_LYolkcuqW-LF52wdLPhs07LRI,6076
22
- datatailr/scheduler/base.py,sha256=0EN9YUiBgyYMzdsf4KFpNiQWTlCuV3NgCiauFHlYPbQ,15838
23
- datatailr/scheduler/batch.py,sha256=SBh9_G8WhQ8IkJw8NLjZ_HPbn5glIyDMQCrpaxd90Ko,17685
22
+ datatailr/scheduler/arguments_cache.py,sha256=00OE0DhobYteBOnirjulO1ltgGBRamAdCO168O3_Zes,6236
23
+ datatailr/scheduler/base.py,sha256=OWRblRCmKVe1stN43J35_g-1oKH4qteU4lrDRezyMV4,16829
24
+ datatailr/scheduler/batch.py,sha256=CQCH1wHhW1qx09J7iQNQleErJ4n0nssAbd6u9YS6FMY,17735
24
25
  datatailr/scheduler/batch_decorator.py,sha256=LqL1bsupWLn-YEQUvFJYae7R3ogrL5-VodyiiScrkRw,5806
25
26
  datatailr/scheduler/constants.py,sha256=5WWTsfwZ_BA8gVDOTa2AQX9DJ0NzfaWgtY3vrODS2-8,606
26
27
  datatailr/scheduler/schedule.py,sha256=0XJJen2nL1xplRs0Xbjwgq3T-0bFCOrJzkSALdio998,3741
27
28
  datatailr/scheduler/utils.py,sha256=up6oR2iwe6G52LkvgfO394xchXgCYNjOMGRQW3e8PQk,1082
28
- datatailr-0.1.67.dist-info/licenses/LICENSE,sha256=ikKP4_O-UD_b8FuNdKmbzTb6odd0JX085ZW_FAPN3VI,1066
29
- datatailr-0.1.67.dist-info/METADATA,sha256=Y3m4aVMQtprItJcKYof3Fne2bACIrmAOp5at5JZxcWc,5146
30
- datatailr-0.1.67.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
31
- datatailr-0.1.67.dist-info/entry_points.txt,sha256=YqXfk2At-olW4PUSRkqvy_O3Mbv7uTKCCPuAAiz3Qbg,312
32
- datatailr-0.1.67.dist-info/top_level.txt,sha256=75gntW0X_SKpqxLL6hAPipvpk28GAhJBvoyqN_HohWU,10
33
- datatailr-0.1.67.dist-info/RECORD,,
29
+ datatailr-0.1.68.dist-info/licenses/LICENSE,sha256=ikKP4_O-UD_b8FuNdKmbzTb6odd0JX085ZW_FAPN3VI,1066
30
+ datatailr-0.1.68.dist-info/METADATA,sha256=jSAMgwWq9plzAjbjmrKKGmYumhCa6dY7jqkyBBy-pRo,5146
31
+ datatailr-0.1.68.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
32
+ datatailr-0.1.68.dist-info/entry_points.txt,sha256=YqXfk2At-olW4PUSRkqvy_O3Mbv7uTKCCPuAAiz3Qbg,312
33
+ datatailr-0.1.68.dist-info/top_level.txt,sha256=75gntW0X_SKpqxLL6hAPipvpk28GAhJBvoyqN_HohWU,10
34
+ datatailr-0.1.68.dist-info/RECORD,,