datatailr 0.1.70__tar.gz → 0.1.72__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datatailr might be problematic. Click here for more details.
- {datatailr-0.1.70/src/datatailr.egg-info → datatailr-0.1.72}/PKG-INFO +1 -1
- {datatailr-0.1.70 → datatailr-0.1.72}/pyproject.toml +1 -1
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/blob.py +9 -2
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/excel/__init__.py +2 -2
- datatailr-0.1.72/src/datatailr/excel/addin.py +169 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/scheduler/arguments_cache.py +1 -4
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/scheduler/base.py +3 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/scheduler/batch.py +22 -6
- {datatailr-0.1.70 → datatailr-0.1.72/src/datatailr.egg-info}/PKG-INFO +1 -1
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr.egg-info/SOURCES.txt +1 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/sbin/datatailr_run.py +24 -7
- {datatailr-0.1.70 → datatailr-0.1.72}/LICENSE +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/README.md +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/setup.cfg +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/setup.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/__init__.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/acl.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/build/__init__.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/build/image.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/dt_json.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/errors.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/group.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/logging.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/scheduler/__init__.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/scheduler/batch_decorator.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/scheduler/constants.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/scheduler/schedule.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/scheduler/utils.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/tag.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/user.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/utils.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/version.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr/wrapper.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr.egg-info/dependency_links.txt +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr.egg-info/entry_points.txt +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr.egg-info/requires.txt +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/datatailr.egg-info/top_level.txt +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/sbin/datatailr_run_app.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/sbin/datatailr_run_batch.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/sbin/datatailr_run_excel.py +0 -0
- {datatailr-0.1.70 → datatailr-0.1.72}/src/sbin/datatailr_run_service.py +0 -0
|
@@ -10,12 +10,14 @@
|
|
|
10
10
|
|
|
11
11
|
from __future__ import annotations
|
|
12
12
|
|
|
13
|
+
import os
|
|
13
14
|
import tempfile
|
|
14
15
|
|
|
15
16
|
from datatailr.wrapper import dt__Blob
|
|
16
17
|
|
|
17
18
|
# Datatailr Blob API Client
|
|
18
19
|
__client__ = dt__Blob()
|
|
20
|
+
__user__ = os.getenv("USER", "root")
|
|
19
21
|
|
|
20
22
|
|
|
21
23
|
class Blob:
|
|
@@ -81,8 +83,13 @@ class Blob:
|
|
|
81
83
|
"""
|
|
82
84
|
# Since direct reading and writting of blobs is not implemented yet, we are using a temporary file.
|
|
83
85
|
# This is a workaround to allow reading the blob content directly from the blob storage.
|
|
84
|
-
|
|
85
|
-
|
|
86
|
+
temp_dir = f"/home/{__user__}/tmp"
|
|
87
|
+
if not os.path.exists(temp_dir):
|
|
88
|
+
temp_dir = "/tmp"
|
|
89
|
+
else:
|
|
90
|
+
temp_dir += "/.dt"
|
|
91
|
+
os.makedirs(temp_dir, exist_ok=True)
|
|
92
|
+
with tempfile.NamedTemporaryFile(dir=temp_dir, delete=True) as temp_file:
|
|
86
93
|
self.get_file(name, temp_file.name)
|
|
87
94
|
with open(temp_file.name, "r") as f:
|
|
88
95
|
return f.read()
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Copyright (c) 2025 - Datatailr Inc.
|
|
3
|
+
All Rights Reserved.
|
|
4
|
+
|
|
5
|
+
This file is part of Datatailr and subject to the terms and conditions
|
|
6
|
+
defined in 'LICENSE.txt'. Unauthorized copying and/or distribution
|
|
7
|
+
of this file, in parts or full, via any medium is strictly prohibited.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import os
|
|
11
|
+
import sys
|
|
12
|
+
import importlib
|
|
13
|
+
import subprocess
|
|
14
|
+
import inspect
|
|
15
|
+
import numpy as np
|
|
16
|
+
from dt.excel_base import Addin as AddinBase, Queue # type: ignore
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def __progress__(queue, stop):
|
|
20
|
+
from time import sleep
|
|
21
|
+
|
|
22
|
+
bar = ["█", "██", "███", "████", "█████", "██████", "███████"]
|
|
23
|
+
|
|
24
|
+
count = 0
|
|
25
|
+
while True:
|
|
26
|
+
if stop.is_set():
|
|
27
|
+
return
|
|
28
|
+
queue.push(bar[count % len(bar)])
|
|
29
|
+
count += 1
|
|
30
|
+
sleep(0.25)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_package_root(mod):
|
|
34
|
+
# Given module, e.g., dt.excel located at /opt/datatailr/python/dt/excel.py
|
|
35
|
+
# return entry for sys.path so it could be imported as a module.
|
|
36
|
+
# For the module above: /opt/datatailr/python
|
|
37
|
+
mod_path = os.path.abspath(mod.__file__)
|
|
38
|
+
mod_parts = mod.__name__.split(".")
|
|
39
|
+
for _ in range(len(mod_parts)):
|
|
40
|
+
mod_path = os.path.dirname(mod_path)
|
|
41
|
+
return mod_path
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class Addin(AddinBase):
|
|
45
|
+
def __init__(self, *args, **kwargs):
|
|
46
|
+
super(Addin, self).__init__(*args, **kwargs)
|
|
47
|
+
|
|
48
|
+
def run(self, port):
|
|
49
|
+
# Excel addin executable will try to import an object literally called "addin"
|
|
50
|
+
# from a module passed to dt-excel.sh as an argument. So to find which module
|
|
51
|
+
# to pass to dt-excel.sh, we walk the callstack until a module with "addin"
|
|
52
|
+
# object of type Addin is found. If not -- inform user about this requirement.
|
|
53
|
+
found_module = None
|
|
54
|
+
for frame_info in inspect.stack():
|
|
55
|
+
mod = inspect.getmodule(frame_info.frame)
|
|
56
|
+
if not mod or not hasattr(mod, "__name__"):
|
|
57
|
+
continue
|
|
58
|
+
|
|
59
|
+
temp_path = get_package_root(mod)
|
|
60
|
+
sys.path.insert(0, temp_path)
|
|
61
|
+
try:
|
|
62
|
+
imported_mod = importlib.import_module(mod.__name__)
|
|
63
|
+
finally:
|
|
64
|
+
sys.path.pop(0)
|
|
65
|
+
|
|
66
|
+
addin_obj = getattr(imported_mod, "addin", None)
|
|
67
|
+
if addin_obj is self or id(addin_obj) == id(self):
|
|
68
|
+
found_module = mod
|
|
69
|
+
break
|
|
70
|
+
|
|
71
|
+
if not found_module:
|
|
72
|
+
raise ValueError(
|
|
73
|
+
"'addin' not found. Please, use 'addin' as variable name for your Addin instance."
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
if found_module.__name__ != "__main__":
|
|
77
|
+
# addin.run was called from the initial python script (where __name__ == "__main__")
|
|
78
|
+
module_name = found_module.__name__
|
|
79
|
+
if found_module.__file__ is None:
|
|
80
|
+
raise ValueError(f"Module {found_module.__name__} has no __file__")
|
|
81
|
+
dir_name = os.path.dirname(os.path.abspath(found_module.__file__))
|
|
82
|
+
else:
|
|
83
|
+
# initial python script did not call addin.run() itself (e.g. it imported function that called addin.run)
|
|
84
|
+
filename = inspect.getsourcefile(found_module)
|
|
85
|
+
if filename is None:
|
|
86
|
+
raise ValueError(f"Cannot determine filename for module {found_module}")
|
|
87
|
+
module_name = os.path.splitext(os.path.basename(filename))[0]
|
|
88
|
+
dir_name = os.path.dirname(os.path.abspath(filename))
|
|
89
|
+
|
|
90
|
+
subprocess.run(
|
|
91
|
+
[
|
|
92
|
+
"bash",
|
|
93
|
+
"-c",
|
|
94
|
+
f'PYTHONPATH="{dir_name}:$PYTHONPATH" /opt/datatailr/bin/dt-excel.sh -n -H "localhost" -l -p {port} -w 8000 {module_name}',
|
|
95
|
+
]
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
def expose(
|
|
99
|
+
self, description, help, volatile=False, streaming=False, progressbar=False
|
|
100
|
+
):
|
|
101
|
+
if streaming and progressbar:
|
|
102
|
+
raise ValueError(
|
|
103
|
+
"you cannot specify progressbar and streaming at the same time"
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
def decorator(func):
|
|
107
|
+
signature = inspect.signature(func)
|
|
108
|
+
|
|
109
|
+
def wrapper(*args, **kwargs):
|
|
110
|
+
id = args[0]
|
|
111
|
+
|
|
112
|
+
for arg in signature.parameters.values():
|
|
113
|
+
if streaming and arg.name == "queue":
|
|
114
|
+
continue
|
|
115
|
+
|
|
116
|
+
if not (
|
|
117
|
+
isinstance(kwargs[arg.name], arg.annotation)
|
|
118
|
+
or isinstance(kwargs[arg.name], np.ndarray)
|
|
119
|
+
):
|
|
120
|
+
raise ValueError(
|
|
121
|
+
"excel/python/dt/excel.py: Got argument of wrong type, expected %s or numpy.ndarray, got %s"
|
|
122
|
+
% (arg.annotation, type(kwargs[arg.name]))
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
if not streaming:
|
|
126
|
+
if not progressbar:
|
|
127
|
+
result = func(**kwargs)
|
|
128
|
+
if hasattr(result, "tolist"):
|
|
129
|
+
result = result.tolist()
|
|
130
|
+
return result
|
|
131
|
+
|
|
132
|
+
from threading import Event, Thread
|
|
133
|
+
|
|
134
|
+
error = None
|
|
135
|
+
queue = Queue(self.name.lower() + "." + func.__name__, id)
|
|
136
|
+
stop = Event()
|
|
137
|
+
thread = Thread(target=__progress__, args=(queue, stop))
|
|
138
|
+
thread.start()
|
|
139
|
+
try:
|
|
140
|
+
result = func(**kwargs)
|
|
141
|
+
except Exception as exception:
|
|
142
|
+
error = str(exception)
|
|
143
|
+
|
|
144
|
+
stop.set()
|
|
145
|
+
thread.join()
|
|
146
|
+
|
|
147
|
+
if error is not None:
|
|
148
|
+
queue.error(error)
|
|
149
|
+
else:
|
|
150
|
+
queue.push(result)
|
|
151
|
+
return
|
|
152
|
+
|
|
153
|
+
try:
|
|
154
|
+
func(Queue(self.name.lower() + "." + func.__name__, id), **kwargs)
|
|
155
|
+
except Exception as exception:
|
|
156
|
+
queue.error(str(exception))
|
|
157
|
+
|
|
158
|
+
self.decorator_impl(
|
|
159
|
+
signature,
|
|
160
|
+
wrapper,
|
|
161
|
+
func.__name__,
|
|
162
|
+
description,
|
|
163
|
+
help,
|
|
164
|
+
volatile,
|
|
165
|
+
streaming or progressbar,
|
|
166
|
+
)
|
|
167
|
+
return wrapper
|
|
168
|
+
|
|
169
|
+
return decorator
|
|
@@ -49,10 +49,7 @@ class ArgumentsCache:
|
|
|
49
49
|
|
|
50
50
|
:param use_persistent_cache: If True, use the persistent cache backend. Otherwise, use in-memory cache.
|
|
51
51
|
"""
|
|
52
|
-
|
|
53
|
-
self.__bucket_name__ = dt__Tag().get("blob_storage_prefix") + "batch"
|
|
54
|
-
except Exception:
|
|
55
|
-
self.__bucket_name__ = "local-batch"
|
|
52
|
+
self.__bucket_name__ = dt__Tag().get("blob_storage_prefix") + "batch"
|
|
56
53
|
self.use_persistent_cache = use_persistent_cache
|
|
57
54
|
if not self.use_persistent_cache:
|
|
58
55
|
# Create a temp folder, for local caching
|
|
@@ -183,6 +183,9 @@ class Job:
|
|
|
183
183
|
build_script_pre=build_script_pre,
|
|
184
184
|
build_script_post=build_script_post,
|
|
185
185
|
)
|
|
186
|
+
if entrypoint is not None:
|
|
187
|
+
image.path_to_repo = entrypoint.path_to_repo
|
|
188
|
+
image.path_to_module = entrypoint.path_to_module
|
|
186
189
|
self.image = image
|
|
187
190
|
self.type = type if entrypoint is None else entrypoint.type
|
|
188
191
|
self.entrypoint = entrypoint
|
|
@@ -294,7 +294,7 @@ class BatchJob:
|
|
|
294
294
|
env = {
|
|
295
295
|
"DATATAILR_BATCH_ID": str(self.dag.id),
|
|
296
296
|
"DATATAILR_JOB_ID": str(self.__id),
|
|
297
|
-
"DATATAILR_JOB_NAME": self.name,
|
|
297
|
+
"DATATAILR_JOB_NAME": f"{self.dag.name}[{self.__id}]",
|
|
298
298
|
}
|
|
299
299
|
self.entrypoint(env=env)
|
|
300
300
|
else:
|
|
@@ -479,10 +479,26 @@ class Batch(Job):
|
|
|
479
479
|
|
|
480
480
|
def prepare_args(self) -> None:
|
|
481
481
|
def arg_name(arg: Union[BatchJob, str]) -> str:
|
|
482
|
-
return arg.
|
|
482
|
+
return f"{self.name}[{arg.id}]" if isinstance(arg, BatchJob) else arg
|
|
483
|
+
|
|
484
|
+
def adjust_mapping(mapping: Dict[str, str]) -> Dict[str, str]:
|
|
485
|
+
result = {}
|
|
486
|
+
for k, v in mapping.items():
|
|
487
|
+
if isinstance(v, BatchJob):
|
|
488
|
+
result[k] = f"{self.name}[{v.id}]"
|
|
489
|
+
elif isinstance(v, str):
|
|
490
|
+
job = self.get_job_by_name(v)
|
|
491
|
+
if job is not None:
|
|
492
|
+
result[k] = f"{self.name}[{job.id}]"
|
|
493
|
+
else:
|
|
494
|
+
result[k] = v
|
|
495
|
+
else:
|
|
496
|
+
raise TypeError(
|
|
497
|
+
f"Unsupported type in argument mapping: {type(v)} for key {k}"
|
|
498
|
+
)
|
|
499
|
+
return result
|
|
483
500
|
|
|
484
501
|
def merged(dst: dict[str, str], src: dict[str, str]) -> dict[str, str]:
|
|
485
|
-
# copy so we don't mutate the original mapping
|
|
486
502
|
out = dict(dst)
|
|
487
503
|
seen_vals = set(out.values())
|
|
488
504
|
for k, v in src.items():
|
|
@@ -492,12 +508,12 @@ class Batch(Job):
|
|
|
492
508
|
return out
|
|
493
509
|
|
|
494
510
|
args = {
|
|
495
|
-
j.
|
|
496
|
-
j.argument_mapping
|
|
511
|
+
f"{self.name}[{j.id}]": merged(
|
|
512
|
+
adjust_mapping(j.argument_mapping),
|
|
513
|
+
{j.argument_mapping.get(k, k): arg_name(v) for k, v in j.args.items()},
|
|
497
514
|
)
|
|
498
515
|
for j in self.__jobs
|
|
499
516
|
}
|
|
500
|
-
|
|
501
517
|
__ARGUMENTS_CACHE__.add_arguments(self.id, args)
|
|
502
518
|
|
|
503
519
|
def save(self) -> Tuple[bool, str]:
|
|
@@ -23,6 +23,7 @@ src/datatailr.egg-info/top_level.txt
|
|
|
23
23
|
src/datatailr/build/__init__.py
|
|
24
24
|
src/datatailr/build/image.py
|
|
25
25
|
src/datatailr/excel/__init__.py
|
|
26
|
+
src/datatailr/excel/addin.py
|
|
26
27
|
src/datatailr/scheduler/__init__.py
|
|
27
28
|
src/datatailr/scheduler/arguments_cache.py
|
|
28
29
|
src/datatailr/scheduler/base.py
|
|
@@ -35,6 +35,7 @@
|
|
|
35
35
|
import concurrent.futures
|
|
36
36
|
import subprocess
|
|
37
37
|
import os
|
|
38
|
+
import stat
|
|
38
39
|
import shlex
|
|
39
40
|
import sysconfig
|
|
40
41
|
from typing import Optional, Tuple
|
|
@@ -80,6 +81,21 @@ def create_user_and_group() -> Tuple[str, str]:
|
|
|
80
81
|
os.system(
|
|
81
82
|
f"getent passwd {user} || useradd -g {group} -s /bin/bash -m {user} -u {uid} -o"
|
|
82
83
|
)
|
|
84
|
+
|
|
85
|
+
permissions = (
|
|
86
|
+
stat.S_IWOTH
|
|
87
|
+
| stat.S_IXOTH
|
|
88
|
+
| stat.S_IWUSR
|
|
89
|
+
| stat.S_IRUSR
|
|
90
|
+
| stat.S_IRGRP
|
|
91
|
+
| stat.S_IWGRP
|
|
92
|
+
| stat.S_IXUSR
|
|
93
|
+
| stat.S_IXGRP
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
os.makedirs(f"/home/{user}/tmp/.dt", exist_ok=True)
|
|
97
|
+
os.chmod(f"/home/{user}/tmp/.dt", permissions)
|
|
98
|
+
|
|
83
99
|
return user, group
|
|
84
100
|
|
|
85
101
|
|
|
@@ -88,16 +104,13 @@ def prepare_command_argv(command: str | list, user: str, env_vars: dict) -> list
|
|
|
88
104
|
command = shlex.split(command)
|
|
89
105
|
|
|
90
106
|
python_libdir = sysconfig.get_config_var("LIBDIR")
|
|
91
|
-
ld_library_path = get_env_var("LD_LIBRARY_PATH",
|
|
92
|
-
|
|
93
|
-
if ld_library_path:
|
|
94
|
-
python_libdir = ld_library_path + ":" + python_libdir
|
|
107
|
+
ld_library_path = get_env_var("LD_LIBRARY_PATH", None)
|
|
95
108
|
|
|
96
109
|
# Base environment variables setup
|
|
97
110
|
base_env = {
|
|
98
111
|
"PATH": get_env_var("PATH", ""),
|
|
99
112
|
"PYTHONPATH": get_env_var("PYTHONPATH", ""),
|
|
100
|
-
"LD_LIBRARY_PATH": python_libdir,
|
|
113
|
+
"LD_LIBRARY_PATH": ":".join(filter(None, [python_libdir, ld_library_path])),
|
|
101
114
|
}
|
|
102
115
|
|
|
103
116
|
merged_env = base_env | env_vars
|
|
@@ -144,7 +157,7 @@ def run_commands_in_parallel(
|
|
|
144
157
|
user: str,
|
|
145
158
|
env_vars: dict,
|
|
146
159
|
log_stream_names: Optional[list[str | None]] = None,
|
|
147
|
-
) ->
|
|
160
|
+
) -> int:
|
|
148
161
|
"""
|
|
149
162
|
Executes two commands concurrently using a ThreadPoolExecutor.
|
|
150
163
|
Returns a tuple of (return_code_cmd1, return_code_cmd2).
|
|
@@ -166,7 +179,7 @@ def run_commands_in_parallel(
|
|
|
166
179
|
results = [
|
|
167
180
|
future.result() for future in concurrent.futures.as_completed(futures)
|
|
168
181
|
]
|
|
169
|
-
return
|
|
182
|
+
return 0 if all(code == 0 for code in results) else 1
|
|
170
183
|
|
|
171
184
|
|
|
172
185
|
def main():
|
|
@@ -226,6 +239,7 @@ def main():
|
|
|
226
239
|
"--bind-addr=0.0.0.0:9090",
|
|
227
240
|
f'--app-name="Datatailr IDE {get_env_var("DATATAILR_USER")}"',
|
|
228
241
|
]
|
|
242
|
+
job_name = get_env_var("DATATAILR_JOB_NAME")
|
|
229
243
|
jupyter_command = [
|
|
230
244
|
"jupyter-lab",
|
|
231
245
|
"--ip='*'",
|
|
@@ -233,6 +247,9 @@ def main():
|
|
|
233
247
|
"--no-browser",
|
|
234
248
|
"--NotebookApp.token=''",
|
|
235
249
|
"--NotebookApp.password=''",
|
|
250
|
+
f"--ServerApp.base_url=/workspace/{job_name}/jupyter/",
|
|
251
|
+
f"--ServerApp.static_url_prefix=/workspace/{job_name}/jupyter/static/",
|
|
252
|
+
f"--ServerApp.root_dir=/home/{user}",
|
|
236
253
|
]
|
|
237
254
|
run_commands_in_parallel(
|
|
238
255
|
[ide_command, jupyter_command], user, env, ["code-server", "jupyter"]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|