pwact 0.3.2__tar.gz → 0.3.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pwact-0.3.2 → pwact-0.3.4}/PKG-INFO +1 -1
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/explore/run_model_md.py +2 -4
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/init_bulk/init_bulk_run.py +1 -9
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/label/labeling.py +2 -1
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/train/train_model.py +2 -1
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/user_input/resource.py +8 -8
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/constant.py +3 -3
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/slurm_script.py +55 -7
- {pwact-0.3.2 → pwact-0.3.4}/pwact.egg-info/PKG-INFO +1 -1
- {pwact-0.3.2 → pwact-0.3.4}/setup.py +1 -1
- {pwact-0.3.2 → pwact-0.3.4}/LICENSE +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/README.md +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/environment.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/explore/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/explore/select_image.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/init_bulk/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/init_bulk/aimd.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/init_bulk/direct.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/init_bulk/duplicate_scale.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/init_bulk/explore.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/init_bulk/relabel.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/init_bulk/relax.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/label/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/slurm/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/slurm/slurm.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/slurm/slurm_tool.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/test/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/test/test.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/train/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/train/dp_kpu.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/user_input/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/user_input/cmd_infos.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/user_input/init_bulk_input.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/user_input/iter_input.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/user_input/scf_param.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/user_input/train_param/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/user_input/train_param/model_param.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/user_input/train_param/nep_param.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/user_input/train_param/nn_feature_type.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/user_input/train_param/optimizer_param.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/user_input/train_param/train_param.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/user_input/train_param/work_file_param.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/active_learning/user_input/workdir.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/bin/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/data_format/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/data_format/configop.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/main.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/app_lib/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/app_lib/common.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/app_lib/cp2k.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/app_lib/cp2k_dp.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/app_lib/do_direct_sample.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/app_lib/do_eqv2model.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/app_lib/lammps.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/app_lib/pwmat.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/draw/__init__.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/draw/hist_model_devi.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/file_operation.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/format_input_output.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/json_operation.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/pre_al_data_util.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/process_tool.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact/utils/tmp.py +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact.egg-info/SOURCES.txt +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact.egg-info/dependency_links.txt +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact.egg-info/entry_points.txt +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/pwact.egg-info/top_level.txt +0 -0
- {pwact-0.3.2 → pwact-0.3.4}/setup.cfg +0 -0
|
@@ -80,7 +80,8 @@ class Explore(object):
|
|
|
80
80
|
def back_explore(self):
|
|
81
81
|
slurm_remain, slurm_success = get_slurm_job_run_info(self.real_md_dir, \
|
|
82
82
|
job_patten="*-{}".format(EXPLORE_FILE_STRUCTURE.md_job), \
|
|
83
|
-
tag_patten="*-{}".format(EXPLORE_FILE_STRUCTURE.md_tag)
|
|
83
|
+
tag_patten="*-{}".format(EXPLORE_FILE_STRUCTURE.md_tag),
|
|
84
|
+
for_back = True)
|
|
84
85
|
slurm_done = True if len(slurm_remain) == 0 and len(slurm_success) > 0 else False
|
|
85
86
|
if slurm_done:
|
|
86
87
|
# bk and do new job
|
|
@@ -180,9 +181,6 @@ class Explore(object):
|
|
|
180
181
|
job_patten="*-{}".format(EXPLORE_FILE_STRUCTURE.md_job), \
|
|
181
182
|
tag_patten="*-{}".format(EXPLORE_FILE_STRUCTURE.md_tag))
|
|
182
183
|
# for slurm remain, check if tags done
|
|
183
|
-
slurm_done = True if len(slurm_remain) == 0 and len(slurm_success) > 0 else False
|
|
184
|
-
if slurm_done is False:
|
|
185
|
-
slurm_remain = recheck_slurm_by_jobtag(slurm_remain, EXPLORE_FILE_STRUCTURE.md_tag)
|
|
186
184
|
if len(slurm_remain) > 0:
|
|
187
185
|
#recover slurm jobs
|
|
188
186
|
if len(slurm_remain) > 0:
|
|
@@ -164,15 +164,7 @@ def do_collection(resource: Resource, input_param:InitBulkParam):
|
|
|
164
164
|
copy_dir(bigmodel_dir, os.path.join(collection_dir, INIT_BULK.bigmodel))
|
|
165
165
|
|
|
166
166
|
if len(result_save_path) > 0:
|
|
167
|
-
|
|
168
|
-
for _data_path in result_save_path:
|
|
169
|
-
if input_param.data_format == PWDATA.extxyz:
|
|
170
|
-
_path_path.append(_data_path)
|
|
171
|
-
elif input_param.data_format == PWDATA.pwmlff_npy: # */PWdata/*.npy
|
|
172
|
-
tmp = search_files(_data_path, "*/position.npy")
|
|
173
|
-
_path_path.extend([os.path.dirname(_) for _ in tmp])
|
|
174
|
-
|
|
175
|
-
result_lines = ["\"{}\",".format(_) for _ in _path_path]
|
|
167
|
+
result_lines = ["\"{}\",".format(_) for _ in result_save_path]
|
|
176
168
|
result_lines = "\n".join(result_lines)
|
|
177
169
|
# result_lines = result_lines[:-1] # Filter the last ','
|
|
178
170
|
result_save_path = os.path.join(collection_dir, INIT_BULK.npy_format_name)
|
|
@@ -149,7 +149,8 @@ class Labeling(object):
|
|
|
149
149
|
if self.input_param.scf.dft_style == DFT_STYLE.bigmodel:
|
|
150
150
|
slurm_remain, slurm_success = get_slurm_job_run_info(self.real_bigmodel_dir, \
|
|
151
151
|
job_patten="*-{}".format(LABEL_FILE_STRUCTURE.bigmodel_job), \
|
|
152
|
-
tag_patten="*-{}".format(LABEL_FILE_STRUCTURE.bigmodel_tag)
|
|
152
|
+
tag_patten="*-{}".format(LABEL_FILE_STRUCTURE.bigmodel_tag),
|
|
153
|
+
for_back = True)
|
|
153
154
|
slurm_done = True if len(slurm_remain) == 0 and len(slurm_success) > 0 else False
|
|
154
155
|
else:
|
|
155
156
|
slurm_remain, slurm_success = get_slurm_job_run_info(self.real_scf_dir, \
|
|
@@ -50,7 +50,8 @@ class ModelTrian(object):
|
|
|
50
50
|
def back_train(self):
|
|
51
51
|
slurm_remain, slurm_success = get_slurm_job_run_info(self.real_train_dir, \
|
|
52
52
|
job_patten="*-{}".format(TRAIN_FILE_STRUCTUR.train_job), \
|
|
53
|
-
tag_patten="*-{}".format(TRAIN_FILE_STRUCTUR.train_tag)
|
|
53
|
+
tag_patten="*-{}".format(TRAIN_FILE_STRUCTUR.train_tag),
|
|
54
|
+
for_back = True)
|
|
54
55
|
slurm_done = True if len(slurm_remain) == 0 and len(slurm_success) > 0 else False # len(slurm_remain) > 0 exist slurm jobs need to do
|
|
55
56
|
if slurm_done:
|
|
56
57
|
# bk and do new job
|
|
@@ -6,7 +6,7 @@ class Resource(object):
|
|
|
6
6
|
# scf_style for init_bulk relabel
|
|
7
7
|
def __init__(self, json_dict:dict, job_type:str=AL_WORK.run_iter, dft_style:str=None, scf_style:str=None) -> None:
|
|
8
8
|
if job_type == AL_WORK.run_iter:
|
|
9
|
-
self.train_resource = self.get_resource(get_required_parameter("train", json_dict))
|
|
9
|
+
self.train_resource = self.get_resource(get_required_parameter("train", json_dict), default_groupsize=1)
|
|
10
10
|
if self.train_resource.number_node > 1:
|
|
11
11
|
self.train_resource.number_node = 1
|
|
12
12
|
if self.train_resource.gpu_per_node > 1:
|
|
@@ -16,28 +16,28 @@ class Resource(object):
|
|
|
16
16
|
print("Warining: the resouce of node, gpu per node and cpu per node in training automatically adjust to [1, 1, 1]")
|
|
17
17
|
self.train_resource.command = self.train_resource.command.upper()
|
|
18
18
|
|
|
19
|
-
self.explore_resource = self.get_resource(get_required_parameter("explore", json_dict))
|
|
19
|
+
self.explore_resource = self.get_resource(get_required_parameter("explore", json_dict), default_groupsize=1)
|
|
20
20
|
if "-in" in self.explore_resource.command:
|
|
21
21
|
self.explore_resource.command = self.explore_resource.command.split('-in')[0].strip()
|
|
22
22
|
self.explore_resource.command = "{} -in {} > {}".format(self.explore_resource.command, LAMMPS.input_lammps, SLURM_OUT.md_out)
|
|
23
23
|
else:
|
|
24
24
|
if "explore" in json_dict.keys():
|
|
25
|
-
self.explore_resource = self.get_resource(get_required_parameter("explore", json_dict))
|
|
25
|
+
self.explore_resource = self.get_resource(get_required_parameter("explore", json_dict), default_groupsize=1)
|
|
26
26
|
else:
|
|
27
27
|
self.explore_resource = None
|
|
28
28
|
# check dft resource
|
|
29
29
|
if "dft" in json_dict.keys():
|
|
30
|
-
self.dft_resource = self.get_resource(get_required_parameter("dft", json_dict))
|
|
30
|
+
self.dft_resource = self.get_resource(get_required_parameter("dft", json_dict), default_groupsize=-1)
|
|
31
31
|
else:
|
|
32
32
|
self.dft_resource = ResourceDetail("mpirun -np 1 PWmat", 1, 1, 1, 1, 1, None, None, None)
|
|
33
33
|
|
|
34
34
|
if "direct" in json_dict.keys():
|
|
35
|
-
self.direct_resource = self.get_resource(get_required_parameter("direct", json_dict))
|
|
35
|
+
self.direct_resource = self.get_resource(get_required_parameter("direct", json_dict), default_groupsize=1)
|
|
36
36
|
else:
|
|
37
37
|
self.direct_resource = None
|
|
38
38
|
|
|
39
39
|
if "scf" in json_dict.keys():
|
|
40
|
-
self.scf_resource = self.get_resource(get_parameter("scf", json_dict, None))
|
|
40
|
+
self.scf_resource = self.get_resource(get_parameter("scf", json_dict, None), default_groupsize=-1)
|
|
41
41
|
else:
|
|
42
42
|
self.scf_resource = None
|
|
43
43
|
# dftb_command = get_parameter("dftb_command", json_dict["dft"], None)
|
|
@@ -66,9 +66,9 @@ class Resource(object):
|
|
|
66
66
|
# cls._instance = cls(json_dict)
|
|
67
67
|
# return cls._instance
|
|
68
68
|
|
|
69
|
-
def get_resource(self, json_dict:dict):
|
|
69
|
+
def get_resource(self, json_dict:dict, default_groupsize=1):
|
|
70
70
|
command = get_required_parameter("command", json_dict)
|
|
71
|
-
group_size = get_parameter("group_size", json_dict,
|
|
71
|
+
group_size = get_parameter("group_size", json_dict, default_groupsize)
|
|
72
72
|
parallel_num = get_parameter("parallel_num", json_dict, 1)
|
|
73
73
|
number_node = get_parameter("number_node", json_dict, 1)
|
|
74
74
|
gpu_per_node = get_parameter("gpu_per_node", json_dict, 0)
|
|
@@ -10,9 +10,9 @@ class AL_WORK:
|
|
|
10
10
|
run_iter = "run"
|
|
11
11
|
|
|
12
12
|
class AL_STRUCTURE:
|
|
13
|
-
train = "train"
|
|
14
|
-
explore = "explore"
|
|
15
|
-
labeling = "label"
|
|
13
|
+
train = "00.train"
|
|
14
|
+
explore = "01.explore"
|
|
15
|
+
labeling = "02.label"
|
|
16
16
|
pertub = "pertub"
|
|
17
17
|
aimd = "aimd"
|
|
18
18
|
collection = "collection"
|
|
@@ -35,11 +35,13 @@ Obtain the execution status of the slurm jobs under the 'dir'
|
|
|
35
35
|
|
|
36
36
|
0-scf.job 1-scf.job 2-scf.job 3-scf.job 4-scf.job
|
|
37
37
|
0-tag.scf.success 1-tag.scf.success 2-tag.scf.success 3-tag.scf.success 4-tag.scf.success
|
|
38
|
+
new change:
|
|
39
|
+
only jugt the slurm file is done by the tag under each subwork tag. so the tag of job file is nouse maybe
|
|
38
40
|
param {*} dir
|
|
39
41
|
Returns:
|
|
40
42
|
Author: WU Xingxing
|
|
41
43
|
'''
|
|
42
|
-
def get_slurm_job_run_info(dir:str, job_patten:str="*.job", tag_patten:str="tag.*.success"):
|
|
44
|
+
def get_slurm_job_run_info(dir:str, job_patten:str="*.job", tag_patten:str="tag.*.success", for_back:bool=False):
|
|
43
45
|
slurm_job_files = sorted(glob.glob(os.path.join(dir, job_patten)))
|
|
44
46
|
slrum_job_dirs = [int(os.path.basename(_).split('-')[0]) for _ in slurm_job_files]
|
|
45
47
|
|
|
@@ -49,14 +51,39 @@ def get_slurm_job_run_info(dir:str, job_patten:str="*.job", tag_patten:str="tag.
|
|
|
49
51
|
slurm_failed = []
|
|
50
52
|
slurm_success = []
|
|
51
53
|
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
54
|
+
if for_back is False:
|
|
55
|
+
for slurm_file in slurm_job_files:
|
|
56
|
+
if slurm_job_done_by_jobtag(slurm_file):
|
|
57
|
+
slurm_success.append(slurm_file)
|
|
58
|
+
else:
|
|
59
|
+
slurm_failed.append(slurm_file)
|
|
60
|
+
else:
|
|
61
|
+
for i, d in enumerate(slrum_job_dirs):
|
|
62
|
+
if d in slrum_tag_sucess_dirs:
|
|
63
|
+
slurm_success.append(slurm_job_files[i])
|
|
64
|
+
else:
|
|
65
|
+
slurm_failed.append(slurm_job_files[i])
|
|
57
66
|
|
|
58
67
|
return slurm_failed, slurm_success
|
|
59
68
|
|
|
69
|
+
|
|
70
|
+
def slurm_job_done_by_jobtag(slurm_file):
|
|
71
|
+
with open(slurm_file, 'r') as f:
|
|
72
|
+
content = f.read()
|
|
73
|
+
cd_pattern = r'cd\s+([^\n]+)'
|
|
74
|
+
directories = re.findall(cd_pattern, content)
|
|
75
|
+
if not directories:
|
|
76
|
+
raise Exception("Error! There is no task in the slurm.job file {}".format(slurm_file))
|
|
77
|
+
for directory in directories:
|
|
78
|
+
directory = directory.strip()
|
|
79
|
+
success_file = glob.glob(os.path.join(directory, "*.success"))
|
|
80
|
+
if len(success_file) > 0:
|
|
81
|
+
continue
|
|
82
|
+
else:
|
|
83
|
+
return False
|
|
84
|
+
return True
|
|
85
|
+
|
|
86
|
+
|
|
60
87
|
def recheck_slurm_by_jobtag(slurm_files:list[str], tag):
|
|
61
88
|
remain_job = []
|
|
62
89
|
for slurm_file in slurm_files:
|
|
@@ -76,6 +103,27 @@ def recheck_slurm_by_jobtag(slurm_files:list[str], tag):
|
|
|
76
103
|
break
|
|
77
104
|
return remain_job
|
|
78
105
|
|
|
106
|
+
# def slurm_job_is_done_by_jobtag(dir:str, job_patten:str="*.job", tag_patten:str="tag.*.success"):
|
|
107
|
+
# slurm_job_files = sorted(glob.glob(os.path.join(dir, job_patten)))
|
|
108
|
+
# slurm_failed = []
|
|
109
|
+
# for slurm_file in slurm_job_files:
|
|
110
|
+
# with open(slurm_file, 'r') as f:
|
|
111
|
+
# content = f.read()
|
|
112
|
+
# cd_pattern = r'cd\s+([^\n]+)'
|
|
113
|
+
# directories = re.findall(cd_pattern, content)
|
|
114
|
+
# if not directories:
|
|
115
|
+
# raise Exception("Error! There is no task in the slurm.job file {}".format(slurm_file))
|
|
116
|
+
# for directory in directories:
|
|
117
|
+
# directory = directory.strip()
|
|
118
|
+
# success_file = os.path.join(directory, tag_patten)
|
|
119
|
+
# if os.path.exists(success_file):
|
|
120
|
+
# continue
|
|
121
|
+
# else:
|
|
122
|
+
# slurm_failed.append(slurm_file)
|
|
123
|
+
# break
|
|
124
|
+
# return slurm_failed
|
|
125
|
+
|
|
126
|
+
|
|
79
127
|
'''
|
|
80
128
|
description:
|
|
81
129
|
split the job_list with groupsize
|
|
@@ -85,7 +133,7 @@ return {*} [["job1","job2",...,"job_groupseze"], ..., [..., "job_N", "NONE",...,
|
|
|
85
133
|
author: wuxingxing
|
|
86
134
|
'''
|
|
87
135
|
def split_job_for_group(groupsize:int , job_list:list[str], parallel_num=1):
|
|
88
|
-
groupsize =
|
|
136
|
+
groupsize = len(job_list) if groupsize == -1 else groupsize
|
|
89
137
|
if groupsize > 1:
|
|
90
138
|
groupsize_adj = ceil(groupsize/parallel_num)
|
|
91
139
|
if groupsize_adj*parallel_num > groupsize:
|
|
@@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
|
|
|
5
5
|
|
|
6
6
|
setuptools.setup(
|
|
7
7
|
name="pwact",
|
|
8
|
-
version="0.3.
|
|
8
|
+
version="0.3.4",
|
|
9
9
|
author="LonxunQuantum",
|
|
10
10
|
author_email="lonxun@pwmat.com",
|
|
11
11
|
description="PWACT is an open-source automated active learning platform based on MatPL for efficient data sampling.",
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|