halib 0.1.11__py3-none-any.whl → 0.1.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- halib/__init__.py +45 -0
- halib/filetype/csvfile.py +67 -47
- halib/filetype/jsonfile.py +1 -1
- halib/filetype/textfile.py +4 -4
- halib/filetype/videofile.py +40 -27
- halib/online/gdrive.py +72 -59
- halib/online/gdrive_mkdir.py +27 -15
- halib/online/gdrive_test.py +30 -16
- halib/online/projectmake.py +56 -40
- halib/plot.py +11 -9
- halib/sys/filesys.py +16 -9
- {halib-0.1.11.dist-info → halib-0.1.15.dist-info}/METADATA +7 -1
- halib-0.1.15.dist-info/RECORD +30 -0
- halib-0.1.11.dist-info/RECORD +0 -30
- {halib-0.1.11.dist-info → halib-0.1.15.dist-info}/LICENSE.txt +0 -0
- {halib-0.1.11.dist-info → halib-0.1.15.dist-info}/WHEEL +0 -0
- {halib-0.1.11.dist-info → halib-0.1.15.dist-info}/top_level.txt +0 -0
halib/__init__.py
CHANGED
@@ -0,0 +1,45 @@
|
|
1
|
+
__all__ = [
|
2
|
+
"cmd",
|
3
|
+
"fs",
|
4
|
+
"filetype",
|
5
|
+
"np",
|
6
|
+
"pd",
|
7
|
+
"timebudget",
|
8
|
+
"tqdm",
|
9
|
+
"logger",
|
10
|
+
"inspect",
|
11
|
+
"rprint",
|
12
|
+
"console",
|
13
|
+
"pprint",
|
14
|
+
"plt",
|
15
|
+
"console",
|
16
|
+
"console_log",
|
17
|
+
]
|
18
|
+
|
19
|
+
import numpy as np
|
20
|
+
import pandas as pd
|
21
|
+
from .filetype import *
|
22
|
+
from .sys import cmd
|
23
|
+
from .sys import filesys as fs
|
24
|
+
|
25
|
+
# for log
|
26
|
+
from loguru import logger
|
27
|
+
from rich import inspect
|
28
|
+
from rich import print as rprint
|
29
|
+
from rich.console import Console
|
30
|
+
from rich.pretty import pprint
|
31
|
+
from timebudget import timebudget
|
32
|
+
from tqdm import tqdm
|
33
|
+
import matplotlib.pyplot as plt
|
34
|
+
|
35
|
+
console = Console()
|
36
|
+
|
37
|
+
|
38
|
+
def console_log(func):
|
39
|
+
def wrapper(*args, **kwargs):
|
40
|
+
console.rule(f"<{func.__name__}>")
|
41
|
+
result = func(*args, **kwargs)
|
42
|
+
console.rule(f"</{func.__name__}>")
|
43
|
+
return result
|
44
|
+
|
45
|
+
return wrapper
|
halib/filetype/csvfile.py
CHANGED
@@ -9,15 +9,16 @@ from loguru import logger
|
|
9
9
|
|
10
10
|
console = Console()
|
11
11
|
|
12
|
+
|
12
13
|
def read(file, separator=","):
|
13
14
|
df = pd.read_csv(file, separator)
|
14
15
|
return df
|
15
16
|
|
16
17
|
|
17
18
|
# for append, mode = 'a'
|
18
|
-
def fn_write(df, outfile, mode=
|
19
|
-
if not outfile.endswith(
|
20
|
-
outfile = f
|
19
|
+
def fn_write(df, outfile, mode="w", header=True, index_label=None):
|
20
|
+
if not outfile.endswith(".csv"):
|
21
|
+
outfile = f"{outfile}.csv"
|
21
22
|
if index_label is not None:
|
22
23
|
df.to_csv(outfile, mode=mode, header=header, index_label=index_label)
|
23
24
|
else:
|
@@ -30,84 +31,103 @@ def fn_make_df_with_columns(columns):
|
|
30
31
|
|
31
32
|
|
32
33
|
def fn_insert_rows(df, singleRow_or_rowList):
|
33
|
-
row_data =
|
34
|
+
row_data = (
|
35
|
+
singleRow_or_rowList
|
36
|
+
if type(singleRow_or_rowList[0]) is list
|
37
|
+
else [singleRow_or_rowList]
|
38
|
+
)
|
34
39
|
new_row_df = pd.DataFrame(row_data, columns=df.columns)
|
35
40
|
df = pd.concat([df, new_row_df], ignore_index=True)
|
36
41
|
return df
|
37
42
|
|
38
43
|
|
39
44
|
def fn_display_df(df):
|
40
|
-
print(tabulate(df, headers=
|
41
|
-
|
45
|
+
print(tabulate(df, headers="keys", tablefmt="psql", numalign="right"))
|
42
46
|
|
43
|
-
def fn_config_display_pd(max_rows=None, max_columns=None,
|
44
|
-
display_width=1000, col_header_justify='center',
|
45
|
-
precision=10):
|
46
|
-
pd.set_option('display.max_rows', max_rows)
|
47
|
-
pd.set_option('display.max_columns', max_columns)
|
48
|
-
pd.set_option('display.width', display_width)
|
49
|
-
pd.set_option('display.colheader_justify', col_header_justify)
|
50
|
-
pd.set_option('display.precision', precision)
|
51
47
|
|
48
|
+
def fn_config_display_pd(
|
49
|
+
max_rows=None,
|
50
|
+
max_columns=None,
|
51
|
+
display_width=1000,
|
52
|
+
col_header_justify="center",
|
53
|
+
precision=10,
|
54
|
+
):
|
55
|
+
pd.set_option("display.max_rows", max_rows)
|
56
|
+
pd.set_option("display.max_columns", max_columns)
|
57
|
+
pd.set_option("display.width", display_width)
|
58
|
+
pd.set_option("display.colheader_justify", col_header_justify)
|
59
|
+
pd.set_option("display.precision", precision)
|
52
60
|
|
53
61
|
|
54
62
|
class DFCreator(dict):
|
55
63
|
"""docstring for ClassName."""
|
56
|
-
|
57
|
-
def __init__(self
|
58
|
-
|
59
|
-
|
60
|
-
|
64
|
+
|
65
|
+
def __init__(self, *arg, **kw):
|
66
|
+
super(DFCreator, self).__init__(*arg, **kw)
|
67
|
+
self.row_pool_dict = {}
|
68
|
+
|
61
69
|
def create_table(self, table_name, columns):
|
62
70
|
self[table_name] = pd.DataFrame(columns=columns)
|
63
71
|
self.row_pool_dict[table_name] = []
|
64
|
-
|
65
|
-
|
72
|
+
|
73
|
+
"""Instead of inserting to dataframe, insert to row pool for fast computation"""
|
74
|
+
|
66
75
|
def insert_rows(self, table_name, singleRow_or_rowList):
|
67
|
-
rows_data =
|
76
|
+
rows_data = (
|
77
|
+
singleRow_or_rowList
|
78
|
+
if type(singleRow_or_rowList[0]) is list
|
79
|
+
else [singleRow_or_rowList]
|
80
|
+
)
|
68
81
|
self.row_pool_dict[table_name].extend(rows_data)
|
69
|
-
|
70
|
-
|
82
|
+
|
83
|
+
"""Fill from row pool to actual table dataframe"""
|
84
|
+
|
71
85
|
def fill_table_from_row_pool(self, table_name):
|
72
86
|
if len(self.row_pool_dict[table_name]) > 0:
|
73
87
|
# concat row pool to table dataframe
|
74
|
-
self[table_name] = fn_insert_rows(
|
75
|
-
|
88
|
+
self[table_name] = fn_insert_rows(
|
89
|
+
self[table_name], self.row_pool_dict[table_name]
|
90
|
+
)
|
91
|
+
# free the pool
|
76
92
|
self.row_pool_dict[table_name] = []
|
77
|
-
|
78
|
-
def write_table(
|
93
|
+
|
94
|
+
def write_table(
|
95
|
+
self,
|
96
|
+
table_name,
|
97
|
+
output_dir,
|
98
|
+
out_file_name=None,
|
99
|
+
mode="w",
|
100
|
+
header=True,
|
101
|
+
index_label=None,
|
102
|
+
):
|
79
103
|
self.fill_table_from_row_pool(table_name)
|
80
|
-
|
104
|
+
|
81
105
|
if not out_file_name:
|
82
|
-
outfile = f
|
106
|
+
outfile = f"{output_dir}/{table_name}.csv"
|
83
107
|
else:
|
84
|
-
outfile = f
|
85
|
-
|
108
|
+
outfile = f"{output_dir}/{out_file_name}.csv"
|
109
|
+
|
86
110
|
fn_write(self[table_name], outfile, mode, header, index_label)
|
87
|
-
|
88
|
-
def write_all_table(self, output_dir, mode=
|
111
|
+
|
112
|
+
def write_all_table(self, output_dir, mode="w", header=True, index_label=None):
|
89
113
|
for table_name in self.keys():
|
90
|
-
outfile = f
|
114
|
+
outfile = f"{output_dir}/{table_name}.csv"
|
91
115
|
fn_write(self[table_name], outfile, mode, header, index_label)
|
92
|
-
|
116
|
+
|
93
117
|
def display_table(self, table_name):
|
94
118
|
self.fill_table_from_row_pool(table_name)
|
95
119
|
fn_display_df(self[table_name])
|
96
|
-
|
120
|
+
|
97
121
|
def display_table_schema(self, table_name):
|
98
122
|
columns = list(self[table_name].columns)
|
99
|
-
console.print(f
|
100
|
-
|
123
|
+
console.print(f"TABLE {table_name}: {columns}", style="bold blue")
|
124
|
+
|
101
125
|
def display_all_table_schema(self):
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
126
|
+
table_names = list(self.keys())
|
127
|
+
for table_name in table_names:
|
128
|
+
self.display_table_schema(table_name)
|
129
|
+
|
106
130
|
def display_all_table(self):
|
107
131
|
for table_name in self.keys():
|
108
132
|
console.rule(table_name)
|
109
133
|
self.display_table(table_name)
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
halib/filetype/jsonfile.py
CHANGED
halib/filetype/textfile.py
CHANGED
@@ -1,13 +1,13 @@
|
|
1
1
|
def read_line_by_line(file_path):
|
2
|
-
with open(file_path,
|
2
|
+
with open(file_path, "r") as file:
|
3
3
|
lines = file.readlines()
|
4
4
|
lines = [line.rstrip() for line in lines]
|
5
5
|
return lines
|
6
6
|
|
7
7
|
|
8
8
|
def write(lines, outfile, append=False):
|
9
|
-
mode =
|
10
|
-
with open(outfile, mode, encoding=
|
9
|
+
mode = "a" if append else "w"
|
10
|
+
with open(outfile, mode, encoding="utf-8") as f:
|
11
11
|
for line in lines:
|
12
12
|
f.write(line)
|
13
|
-
f.write(
|
13
|
+
f.write("\n")
|
halib/filetype/videofile.py
CHANGED
@@ -10,27 +10,28 @@ from halib.filetype import textfile
|
|
10
10
|
|
11
11
|
|
12
12
|
class VideoResolution(Enum):
|
13
|
-
VR480p =
|
14
|
-
VR576p =
|
15
|
-
VR720p_hd =
|
16
|
-
VR1080p_full_hd =
|
17
|
-
VR4K_uhd =
|
18
|
-
VR8K_uhd =
|
13
|
+
VR480p = "720x480"
|
14
|
+
VR576p = "1280x720"
|
15
|
+
VR720p_hd = "1280x720"
|
16
|
+
VR1080p_full_hd = "1920x1080 "
|
17
|
+
VR4K_uhd = "3840x2160"
|
18
|
+
VR8K_uhd = "7680x4320"
|
19
19
|
|
20
20
|
def __str__(self):
|
21
|
-
return
|
21
|
+
return "%s" % self.value
|
22
22
|
|
23
23
|
|
24
24
|
def get_video_resolution_size(video_resolution):
|
25
|
-
separator =
|
25
|
+
separator = "x"
|
26
26
|
resolution_str = str(video_resolution)
|
27
27
|
info_arr = resolution_str.split(separator)
|
28
28
|
width, height = int(info_arr[0]), int(info_arr[1])
|
29
29
|
return width, height
|
30
30
|
|
31
31
|
|
32
|
-
def get_videos_by_resolution(
|
33
|
-
|
32
|
+
def get_videos_by_resolution(
|
33
|
+
directory, video_resolution, video_ext="mp4", include_better=True
|
34
|
+
):
|
34
35
|
video_paths = filesys.filter_files_by_extension(directory, video_ext)
|
35
36
|
filtered_video_paths = []
|
36
37
|
for path in video_paths:
|
@@ -62,7 +63,9 @@ progress_bar = None
|
|
62
63
|
def on_progress(bytes_done, total_bytes):
|
63
64
|
global progress_bar
|
64
65
|
if progress_bar is None:
|
65
|
-
progress_bar = enlighten.get_manager().counter(
|
66
|
+
progress_bar = enlighten.get_manager().counter(
|
67
|
+
total=20, desc="Downloading", unit="byte", color="blue"
|
68
|
+
)
|
66
69
|
|
67
70
|
progress_bar.total = total_bytes
|
68
71
|
progress_bar.count = bytes_done
|
@@ -74,33 +77,38 @@ def on_progress(bytes_done, total_bytes):
|
|
74
77
|
|
75
78
|
|
76
79
|
def get_youtube_url(full_url_or_video_code):
|
77
|
-
if
|
80
|
+
if "youtube" in full_url_or_video_code:
|
78
81
|
url = full_url_or_video_code
|
79
82
|
else:
|
80
|
-
url = f
|
83
|
+
url = f"https://youtube.com/watch?v={full_url_or_video_code}"
|
81
84
|
return url
|
82
85
|
|
83
86
|
|
84
|
-
def download_yt_video(
|
85
|
-
|
87
|
+
def download_yt_video(
|
88
|
+
full_url_or_video_code,
|
89
|
+
save_folder="./",
|
90
|
+
report_progress=False,
|
91
|
+
video_idx="1",
|
92
|
+
total_video="1",
|
93
|
+
):
|
86
94
|
url = get_youtube_url(full_url_or_video_code)
|
87
95
|
filesys.make_dir(save_folder)
|
88
96
|
filesys.change_current_dir(save_folder)
|
89
97
|
try:
|
90
98
|
yt = Youtube(url)
|
91
|
-
title_en = yt.title.encode(
|
99
|
+
title_en = yt.title.encode("ascii", "ignore")
|
92
100
|
file_download = yt.formats.first()
|
93
101
|
if report_progress:
|
94
|
-
print(f
|
102
|
+
print(f"\n[{video_idx}/{total_video}][DOWNLOAD]{title_en}")
|
95
103
|
file_download.download(onprogress=on_progress, skip_existing=True)
|
96
104
|
except TypeError:
|
97
|
-
print(f
|
105
|
+
print(f"[ERROR] download {url}")
|
98
106
|
|
99
107
|
|
100
|
-
def download_playlist(
|
101
|
-
|
102
|
-
|
103
|
-
print(f
|
108
|
+
def download_playlist(
|
109
|
+
playlist_url, save_folder="./", report_progress=False, start_pattern=None
|
110
|
+
):
|
111
|
+
print(f"[DOWNLOAD PLAYLIST] {playlist_url}")
|
104
112
|
pl = Playlist(playlist_url).videos
|
105
113
|
total_video = len(pl)
|
106
114
|
should_start = False
|
@@ -108,7 +116,7 @@ def download_playlist(playlist_url, save_folder='./',
|
|
108
116
|
count = 0
|
109
117
|
for idx, code in enumerate(pl):
|
110
118
|
try:
|
111
|
-
url = f
|
119
|
+
url = f"https://youtube.com/watch?v={code}"
|
112
120
|
yt = Youtube(url)
|
113
121
|
count += 1
|
114
122
|
if start_pattern is None:
|
@@ -116,12 +124,16 @@ def download_playlist(playlist_url, save_folder='./',
|
|
116
124
|
elif start_pattern in yt.title:
|
117
125
|
should_start = True
|
118
126
|
if should_start:
|
119
|
-
download_yt_video(
|
120
|
-
|
121
|
-
|
127
|
+
download_yt_video(
|
128
|
+
url,
|
129
|
+
save_folder,
|
130
|
+
report_progress,
|
131
|
+
video_idx=str(count),
|
132
|
+
total_video=str(total_video),
|
133
|
+
)
|
122
134
|
|
123
135
|
except TypeError:
|
124
|
-
print(f
|
136
|
+
print(f"[ERROR] download {url}")
|
125
137
|
enlighten.get_manager().stop()
|
126
138
|
|
127
139
|
|
@@ -134,6 +146,7 @@ def download_multiple_playlist_in_files(text_file, report_progress=False):
|
|
134
146
|
plUrl = folder_plUrl.split()[1]
|
135
147
|
download_playlist(plUrl, save_folder=folder, report_progress=report_progress)
|
136
148
|
|
149
|
+
|
137
150
|
# test code
|
138
151
|
# pl = 'https://youtube.com/playlist?list=PLYaaU301HUe03PabLEGbMGB8nhHgq58Zr'
|
139
152
|
# download_playlist(pl, './test', report_progress=True)
|
halib/online/gdrive.py
CHANGED
@@ -9,6 +9,7 @@ import requests
|
|
9
9
|
import json
|
10
10
|
|
11
11
|
import googleapiclient.errors
|
12
|
+
|
12
13
|
# Import Google libraries
|
13
14
|
from pydrive.auth import GoogleAuth
|
14
15
|
from pydrive.drive import GoogleDrive
|
@@ -24,9 +25,9 @@ ggDrive = None
|
|
24
25
|
ggAuth = None
|
25
26
|
|
26
27
|
|
27
|
-
def get_gg_drive(settings_file=
|
28
|
+
def get_gg_drive(settings_file="settings.yaml"):
|
28
29
|
"""
|
29
|
-
|
30
|
+
Authenticate to Google API
|
30
31
|
"""
|
31
32
|
global ggDrive
|
32
33
|
global ggAuth
|
@@ -46,13 +47,13 @@ def get_folder_id(gg_parent_folder_id, folder_name):
|
|
46
47
|
file_list = GoogleDriveFileList()
|
47
48
|
try:
|
48
49
|
file_list = drive.ListFile(
|
49
|
-
{
|
50
|
+
{"q": "'{0}' in parents and trashed=false".format(gg_parent_folder_id)}
|
50
51
|
).GetList()
|
51
52
|
# Exit if the parent folder doesn't exist
|
52
53
|
except googleapiclient.errors.HttpError as err:
|
53
54
|
# Parse error message
|
54
|
-
message = ast.literal_eval(err.content)[
|
55
|
-
if message ==
|
55
|
+
message = ast.literal_eval(err.content)["error"]["message"]
|
56
|
+
if message == "File not found: ":
|
56
57
|
print(message + folder_name)
|
57
58
|
exit(1)
|
58
59
|
# Exit with stacktrace in case of other error
|
@@ -61,22 +62,22 @@ def get_folder_id(gg_parent_folder_id, folder_name):
|
|
61
62
|
|
62
63
|
# Find the the destination folder in the parent folder's files
|
63
64
|
for file1 in file_list:
|
64
|
-
if file1[
|
65
|
-
print(
|
66
|
-
return file1[
|
65
|
+
if file1["title"] == folder_name:
|
66
|
+
print("title: %s, id: %s" % (file1["title"], file1["id"]))
|
67
|
+
return file1["id"]
|
67
68
|
|
68
69
|
|
69
70
|
def create_folder(folder_name, gg_parent_folder_id):
|
70
71
|
"""
|
71
|
-
|
72
|
+
Create folder on Google Drive
|
72
73
|
"""
|
73
74
|
|
74
75
|
folder_metadata = {
|
75
|
-
|
76
|
+
"title": folder_name,
|
76
77
|
# Define the file type as folder
|
77
|
-
|
78
|
+
"mimeType": "application/vnd.google-apps.folder",
|
78
79
|
# ID of the parent folder
|
79
|
-
|
80
|
+
"parents": [{"kind": "drive#fileLink", "id": gg_parent_folder_id}],
|
80
81
|
}
|
81
82
|
drive = get_gg_drive()
|
82
83
|
folder = drive.CreateFile(folder_metadata)
|
@@ -84,7 +85,7 @@ def create_folder(folder_name, gg_parent_folder_id):
|
|
84
85
|
|
85
86
|
# Return folder information
|
86
87
|
# print('title: %s, id: %s' % (folder['title'], folder['id']))
|
87
|
-
return folder[
|
88
|
+
return folder["id"]
|
88
89
|
|
89
90
|
|
90
91
|
def is_in_ignore_list(local_path, ignore_list=None):
|
@@ -99,77 +100,88 @@ def is_in_ignore_list(local_path, ignore_list=None):
|
|
99
100
|
|
100
101
|
def upload_file(local_file_path, gg_folder_id, ignore_list=None):
|
101
102
|
"""
|
102
|
-
|
103
|
+
Upload local file to Google Drive folder
|
103
104
|
"""
|
104
105
|
drive = get_gg_drive()
|
105
106
|
if not is_in_ignore_list(local_file_path, ignore_list):
|
106
107
|
link = ""
|
107
|
-
|
108
|
-
print('uploading ' + local_file_path)
|
108
|
+
|
109
|
+
# print('uploading ' + local_file_path)
|
109
110
|
try:
|
110
111
|
# Upload file to folder.
|
111
112
|
title = filesys.get_file_name(local_file_path, split_file_ext=False)
|
112
113
|
|
113
114
|
# delete file if exist on gg folder
|
114
115
|
query = f"'{gg_folder_id}' in parents and trashed=false"
|
115
|
-
file_list = drive.ListFile({
|
116
|
+
file_list = drive.ListFile({"q": f"{query}"}).GetList()
|
116
117
|
for file in file_list:
|
117
|
-
if file[
|
118
|
-
print(f'[DELETE] {title} on Google Drive')
|
118
|
+
if file["title"] == title:
|
119
|
+
# print(f'[DELETE] {title} on Google Drive')
|
119
120
|
file.Delete()
|
120
121
|
break
|
121
|
-
print('uploading ' + local_file_path)
|
122
|
+
# print('uploading ' + local_file_path)
|
122
123
|
f = drive.CreateFile(
|
123
|
-
{
|
124
|
-
|
124
|
+
{
|
125
|
+
"title": f"{title}",
|
126
|
+
"parents": [{"kind": "drive#fileLink", "id": gg_folder_id}],
|
127
|
+
}
|
128
|
+
)
|
125
129
|
f.SetContentFile(local_file_path)
|
126
|
-
f.Upload(param={
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
130
|
+
f.Upload(param={"supportsAllDrives": True})
|
131
|
+
access_token = (
|
132
|
+
ggAuth.credentials.access_token
|
133
|
+
) # gauth is from drive = GoogleDrive(gauth) Please modify this for your actual script.
|
134
|
+
# print(f'access_token {access_token}')
|
135
|
+
file_id = f["id"]
|
136
|
+
url = (
|
137
|
+
"https://www.googleapis.com/drive/v3/files/"
|
138
|
+
+ file_id
|
139
|
+
+ "/permissions?supportsAllDrives=true"
|
140
|
+
)
|
141
|
+
headers = {
|
142
|
+
"Authorization": "Bearer " + access_token,
|
143
|
+
"Content-Type": "application/json",
|
144
|
+
}
|
145
|
+
payload = {"type": "anyone", "value": "anyone", "role": "reader"}
|
134
146
|
res = requests.post(url, data=json.dumps(payload), headers=headers)
|
135
|
-
print(res.status_code)
|
136
|
-
print(res.content)
|
137
147
|
# SHARABLE LINK
|
138
|
-
link = f[
|
148
|
+
link = f["alternateLink"]
|
139
149
|
return link
|
140
150
|
except Exception as e:
|
141
|
-
print(
|
151
|
+
print("error uploading " + local_file_path)
|
142
152
|
print(e)
|
143
153
|
return link
|
144
154
|
# Skip the file if it's empty
|
145
155
|
else:
|
146
|
-
print(
|
147
|
-
return ""
|
156
|
+
print("file {0} is empty or in ignore list".format(local_file_path))
|
157
|
+
return "" # return empty string
|
148
158
|
|
149
159
|
|
150
|
-
def recursive_walk_and_upload(
|
151
|
-
|
160
|
+
def recursive_walk_and_upload(
|
161
|
+
local_folder_path, gg_folder_id, processed_path, ignore_list=None
|
162
|
+
):
|
152
163
|
for root, sub_folders, files in os.walk(local_folder_path):
|
153
164
|
# already processed folder
|
154
165
|
if root in processed_path:
|
155
|
-
print(f
|
166
|
+
print(f"[SKIP] already processed folder {root}")
|
156
167
|
return
|
157
|
-
print(f
|
158
|
-
print(f
|
168
|
+
print(f"\n\n[RECURSIVE] {local_folder_path}, {gg_folder_id}")
|
169
|
+
print(f"[FF] {root} {sub_folders} {files}")
|
159
170
|
if sub_folders:
|
160
171
|
for sub_folder in sub_folders:
|
161
172
|
sub_folder_path = os.path.join(root, sub_folder)
|
162
|
-
print(f
|
173
|
+
print(f"process {sub_folder_path}")
|
163
174
|
if is_in_ignore_list(sub_folder_path, ignore_list):
|
164
175
|
continue
|
165
176
|
# Get destination folder ID
|
166
177
|
gg_sub_folder_id = get_folder_id(gg_folder_id, sub_folder)
|
167
178
|
# Create the folder if it doesn't exists
|
168
179
|
if not gg_sub_folder_id:
|
169
|
-
print(
|
180
|
+
print("creating folder " + sub_folder)
|
170
181
|
gg_sub_folder_id = create_folder(sub_folder, gg_folder_id)
|
171
|
-
recursive_walk_and_upload(
|
172
|
-
|
182
|
+
recursive_walk_and_upload(
|
183
|
+
sub_folder_path, gg_sub_folder_id, processed_path, ignore_list
|
184
|
+
)
|
173
185
|
if files:
|
174
186
|
for file in files:
|
175
187
|
filePath = os.path.join(root, file)
|
@@ -177,15 +189,15 @@ def recursive_walk_and_upload(local_folder_path, gg_folder_id,
|
|
177
189
|
processed_path.append(root)
|
178
190
|
|
179
191
|
|
180
|
-
def upload_folder_to_drive(
|
181
|
-
|
182
|
-
|
192
|
+
def upload_folder_to_drive(
|
193
|
+
local_folder, gg_folder_id, content_only=True, ignore_file=None
|
194
|
+
):
|
195
|
+
"""
|
196
|
+
Upload folder to Google Drive folder
|
197
|
+
bool content_only: if true, we only upload files and folder inside local_folder
|
198
|
+
else create a folder with the same name of the local folder and upload all files and folders
|
199
|
+
in the local folder to it
|
183
200
|
"""
|
184
|
-
Upload folder to Google Drive folder
|
185
|
-
bool content_only: if true, we only upload files and folder inside local_folder
|
186
|
-
else create a folder with the same name of the local folder and upload all files and folders
|
187
|
-
in the local folder to it
|
188
|
-
"""
|
189
201
|
|
190
202
|
ignore_list = None
|
191
203
|
if ignore_file:
|
@@ -201,17 +213,18 @@ def upload_folder_to_drive(local_folder, gg_folder_id,
|
|
201
213
|
|
202
214
|
processed_path = []
|
203
215
|
local_folder = os.path.normpath(local_folder)
|
204
|
-
recursive_walk_and_upload(
|
205
|
-
|
216
|
+
recursive_walk_and_upload(
|
217
|
+
local_folder, gg_folder_id_to_upload, processed_path, ignore_list
|
218
|
+
)
|
206
219
|
|
207
220
|
|
208
221
|
def main():
|
209
|
-
settingf = r
|
210
|
-
folder_id =
|
222
|
+
settingf = r"halib\online\settings.yaml"
|
223
|
+
folder_id = "1RXew5llcebEXclbEAKQ2IWDtTWmW0d_z"
|
211
224
|
get_gg_drive(settingf)
|
212
|
-
sharelink= upload_file(r"D:\Dev\github_proj\halib\LICENSE.txt", folder_id)
|
225
|
+
sharelink = upload_file(r"D:\Dev\github_proj\halib\LICENSE.txt", folder_id)
|
213
226
|
print(sharelink)
|
214
227
|
|
215
228
|
|
216
229
|
if __name__ == "__main__":
|
217
|
-
main()
|
230
|
+
main()
|
halib/online/gdrive_mkdir.py
CHANGED
@@ -6,15 +6,27 @@ from halib.filetype import textfile
|
|
6
6
|
|
7
7
|
|
8
8
|
def parse_args():
|
9
|
-
parser = ArgumentParser(
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
9
|
+
parser = ArgumentParser(description="Upload local folder to Google Drive")
|
10
|
+
parser.add_argument(
|
11
|
+
"-a",
|
12
|
+
"--authFile",
|
13
|
+
type=str,
|
14
|
+
help="authenticate file to Google Drive",
|
15
|
+
default="settings.yaml",
|
16
|
+
)
|
17
|
+
parser.add_argument(
|
18
|
+
"-g",
|
19
|
+
"--GDriveParentFolder",
|
20
|
+
type=str,
|
21
|
+
help="Destination parent folder ID in Google Drive",
|
22
|
+
)
|
23
|
+
parser.add_argument(
|
24
|
+
"-n",
|
25
|
+
"--folderName",
|
26
|
+
type=str,
|
27
|
+
help="name of the folder which is about to be created",
|
28
|
+
default="untitled",
|
29
|
+
)
|
18
30
|
return parser.parse_args()
|
19
31
|
|
20
32
|
|
@@ -24,17 +36,17 @@ def main():
|
|
24
36
|
gDrive_parent_folder_id = args.GDriveParentFolder
|
25
37
|
folder_name = args.folderName
|
26
38
|
|
27
|
-
if folder_name ==
|
28
|
-
folder_name = datetime.today().strftime(
|
39
|
+
if folder_name == "untitled":
|
40
|
+
folder_name = datetime.today().strftime("%Y.%m.%d_%Hh%M")
|
29
41
|
else:
|
30
|
-
date_str = datetime.today().strftime(
|
31
|
-
folder_name = f
|
42
|
+
date_str = datetime.today().strftime("%Y.%m.%d_%Hh%M")
|
43
|
+
folder_name = f"{date_str}_{folder_name}"
|
32
44
|
|
33
|
-
print(f
|
45
|
+
print(f"[GDrive] creating {folder_name} in GDrive folder {gDrive_parent_folder_id}")
|
34
46
|
|
35
47
|
gdrive.get_gg_drive(auth_file)
|
36
48
|
folder_id = gdrive.create_folder(folder_name, gDrive_parent_folder_id)
|
37
|
-
textfile.write([folder_id],
|
49
|
+
textfile.write([folder_id], "./GDriveFolder.txt")
|
38
50
|
|
39
51
|
|
40
52
|
if __name__ == "__main__":
|
halib/online/gdrive_test.py
CHANGED
@@ -4,18 +4,32 @@ from halib.online import gdrive
|
|
4
4
|
|
5
5
|
|
6
6
|
def parse_args():
|
7
|
-
parser = ArgumentParser(
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
parser.add_argument(
|
16
|
-
|
17
|
-
|
18
|
-
|
7
|
+
parser = ArgumentParser(description="Upload local folder to Google Drive")
|
8
|
+
parser.add_argument(
|
9
|
+
"-a",
|
10
|
+
"--authFile",
|
11
|
+
type=str,
|
12
|
+
help="authenticate file to Google Drive",
|
13
|
+
default="settings.yaml",
|
14
|
+
)
|
15
|
+
parser.add_argument("-s", "--source", type=str, help="Folder to upload")
|
16
|
+
parser.add_argument(
|
17
|
+
"-d", "--destination", type=str, help="Destination folder ID in Google Drive"
|
18
|
+
)
|
19
|
+
parser.add_argument(
|
20
|
+
"-c",
|
21
|
+
"--contentOnly",
|
22
|
+
type=str,
|
23
|
+
help="Parent Folder in Google Drive",
|
24
|
+
default="True",
|
25
|
+
)
|
26
|
+
parser.add_argument(
|
27
|
+
"-i",
|
28
|
+
"--ignoreFile",
|
29
|
+
type=str,
|
30
|
+
help="file containing files/folders to ignore",
|
31
|
+
default=None,
|
32
|
+
)
|
19
33
|
|
20
34
|
return parser.parse_args()
|
21
35
|
|
@@ -25,12 +39,12 @@ def main():
|
|
25
39
|
auth_file = args.authFile
|
26
40
|
local_folder = args.source
|
27
41
|
gg_folder_id = args.destination
|
28
|
-
content_only =
|
42
|
+
content_only = args.contentOnly.lower() == "true"
|
29
43
|
ignore_file = args.ignoreFile
|
30
44
|
gdrive.get_gg_drive(auth_file)
|
31
|
-
gdrive.upload_folder_to_drive(
|
32
|
-
|
33
|
-
|
45
|
+
gdrive.upload_folder_to_drive(
|
46
|
+
local_folder, gg_folder_id, content_only=content_only, ignore_file=ignore_file
|
47
|
+
)
|
34
48
|
|
35
49
|
|
36
50
|
if __name__ == "__main__":
|
halib/online/projectmake.py
CHANGED
@@ -23,42 +23,48 @@ def get_curl(url, user_and_pass, verbose=True):
|
|
23
23
|
|
24
24
|
|
25
25
|
def get_user_and_pass(username, appPass):
|
26
|
-
return f
|
26
|
+
return f"{username}:{appPass}"
|
27
27
|
|
28
28
|
|
29
|
-
def create_repo(
|
30
|
-
|
29
|
+
def create_repo(
|
30
|
+
username, appPass, repo_name, workspace, proj_name, template_repo="py-proj-template"
|
31
|
+
):
|
31
32
|
buffer = BytesIO()
|
32
|
-
url = f
|
33
|
-
data = json.dumps({
|
33
|
+
url = f"https://api.bitbucket.org/2.0/repositories/{workspace}/{repo_name}"
|
34
|
+
data = json.dumps({"scm": "git", "project": {"key": f"{proj_name}"}})
|
34
35
|
|
35
36
|
user_and_pass = get_user_and_pass(username, appPass)
|
36
37
|
c = get_curl(url, user_and_pass)
|
37
38
|
c.setopt(pycurl.WRITEDATA, buffer)
|
38
39
|
c.setopt(pycurl.POST, 1)
|
39
40
|
c.setopt(pycurl.POSTFIELDS, data)
|
40
|
-
c.setopt(pycurl.HTTPHEADER, [
|
41
|
+
c.setopt(pycurl.HTTPHEADER, ["Accept: application/json"])
|
41
42
|
c.perform()
|
42
43
|
RESPOND_CODE = c.getinfo(pycurl.HTTP_CODE)
|
43
44
|
c.close()
|
44
45
|
# log info
|
45
46
|
body = buffer.getvalue()
|
46
|
-
msg = body.decode(
|
47
|
-
successful = True if str(RESPOND_CODE) ==
|
47
|
+
msg = body.decode("iso-8859-1")
|
48
|
+
successful = True if str(RESPOND_CODE) == "200" else False
|
48
49
|
|
49
50
|
if successful and template_repo:
|
50
|
-
template_repo_url = f
|
51
|
+
template_repo_url = f"https://{username}:{appPass}@bitbucket.org/{workspace}/{template_repo}.git"
|
51
52
|
git_clone(template_repo_url)
|
52
|
-
template_folder = f
|
53
|
+
template_folder = f"./{template_repo}"
|
53
54
|
|
54
|
-
created_repo_url =
|
55
|
+
created_repo_url = (
|
56
|
+
f"https://{username}:{appPass}@bitbucket.org/{workspace}/{repo_name}.git"
|
57
|
+
)
|
55
58
|
git_clone(created_repo_url)
|
56
|
-
created_folder = f
|
57
|
-
shutil.copytree(
|
58
|
-
|
59
|
-
|
59
|
+
created_folder = f"./{repo_name}"
|
60
|
+
shutil.copytree(
|
61
|
+
template_folder,
|
62
|
+
created_folder,
|
63
|
+
dirs_exist_ok=True,
|
64
|
+
ignore=shutil.ignore_patterns(".git"),
|
65
|
+
)
|
60
66
|
os.system('rmdir /S /Q "{}"'.format(template_folder))
|
61
|
-
project_folder =
|
67
|
+
project_folder = "project_name"
|
62
68
|
|
63
69
|
filesys.change_current_dir(created_folder)
|
64
70
|
filesys.rename_dir_or_file(project_folder, repo_name)
|
@@ -69,15 +75,20 @@ def create_repo(username, appPass, repo_name, workspace,
|
|
69
75
|
|
70
76
|
|
71
77
|
def parse_args():
|
72
|
-
parser = ArgumentParser(
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
78
|
+
parser = ArgumentParser(description="Upload local folder to Google Drive")
|
79
|
+
parser.add_argument(
|
80
|
+
"-a",
|
81
|
+
"--authFile",
|
82
|
+
type=str,
|
83
|
+
help="authenticate file (json) to Bitbucket",
|
84
|
+
default="bitbucket.json",
|
85
|
+
)
|
86
|
+
parser.add_argument(
|
87
|
+
"-r", "--repoName", type=str, help="Repository name", default="hahv-proj"
|
88
|
+
)
|
89
|
+
parser.add_argument(
|
90
|
+
"-t", "--templateRepo", type=str, help="template repo to fork", default="True"
|
91
|
+
)
|
81
92
|
return parser.parse_args()
|
82
93
|
|
83
94
|
|
@@ -91,24 +102,29 @@ def main():
|
|
91
102
|
repo_name = args.repoName
|
92
103
|
|
93
104
|
authInfo = jsonfile.read(authFile)
|
94
|
-
username = authInfo[
|
95
|
-
appPass = authInfo[
|
96
|
-
workspace_id = authInfo[
|
97
|
-
project_id = authInfo[
|
98
|
-
use_template =
|
99
|
-
template_repo = authInfo[
|
100
|
-
|
101
|
-
extra_info = f
|
102
|
-
print(f
|
103
|
-
|
104
|
-
successful, msg = create_repo(
|
105
|
-
|
106
|
-
|
105
|
+
username = authInfo["username"]
|
106
|
+
appPass = authInfo["appPass"]
|
107
|
+
workspace_id = authInfo["workspace_id"]
|
108
|
+
project_id = authInfo["project_id"]
|
109
|
+
use_template = args.templateRepo.lower() == "true"
|
110
|
+
template_repo = authInfo["template_repo"] if use_template else ""
|
111
|
+
|
112
|
+
extra_info = f"[Use template project {template_repo}]" if use_template else ""
|
113
|
+
print(f"[BitBucket] creating {repo_name} Project in Bitbucket {extra_info}")
|
114
|
+
|
115
|
+
successful, msg = create_repo(
|
116
|
+
username,
|
117
|
+
appPass,
|
118
|
+
repo_name,
|
119
|
+
workspace_id,
|
120
|
+
project_id,
|
121
|
+
template_repo=template_repo,
|
122
|
+
)
|
107
123
|
if successful:
|
108
|
-
print(f
|
124
|
+
print(f"[Bitbucket] {repo_name} created successfully.{extra_info}")
|
109
125
|
else:
|
110
126
|
formatted_msg = jsonfile.beautify(msg)
|
111
|
-
print(f
|
127
|
+
print(f"[Bitbucket] {repo_name} created failed. Details:\n{formatted_msg}")
|
112
128
|
|
113
129
|
|
114
130
|
if __name__ == "__main__":
|
halib/plot.py
CHANGED
@@ -3,14 +3,16 @@ import seaborn as sns
|
|
3
3
|
import matplotlib
|
4
4
|
|
5
5
|
|
6
|
-
def save_fig_latex_pgf(filename, directory=
|
6
|
+
def save_fig_latex_pgf(filename, directory="."):
|
7
7
|
matplotlib.use("pgf")
|
8
|
-
matplotlib.rcParams.update(
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
8
|
+
matplotlib.rcParams.update(
|
9
|
+
{
|
10
|
+
"pgf.texsystem": "pdflatex",
|
11
|
+
"font.family": "serif",
|
12
|
+
"text.usetex": True,
|
13
|
+
"pgf.rcfonts": False,
|
14
|
+
}
|
15
|
+
)
|
16
|
+
if ".pgf" not in filename:
|
17
|
+
filename = f"{directory}/{filename}.pgf"
|
16
18
|
plt.savefig(filename)
|
halib/sys/filesys.py
CHANGED
@@ -38,8 +38,9 @@ def make_dir(directory):
|
|
38
38
|
|
39
39
|
|
40
40
|
def copy_dir(src_dir, dst_dir, dirs_exist_ok=True, ignore_patterns=None):
|
41
|
-
shutil.copytree(
|
42
|
-
|
41
|
+
shutil.copytree(
|
42
|
+
src_dir, dst_dir, dirs_exist_ok=dirs_exist_ok, ignore=ignore_patterns
|
43
|
+
)
|
43
44
|
|
44
45
|
|
45
46
|
def delete_dir(directory):
|
@@ -47,14 +48,20 @@ def delete_dir(directory):
|
|
47
48
|
|
48
49
|
|
49
50
|
def list_dirs(directory):
|
50
|
-
folders = list(
|
51
|
-
|
51
|
+
folders = list(
|
52
|
+
filter(
|
53
|
+
lambda x: os.path.isdir(os.path.join(directory, x)), os.listdir(directory)
|
54
|
+
)
|
55
|
+
)
|
52
56
|
return folders
|
53
57
|
|
54
58
|
|
55
59
|
def list_files(directory):
|
56
|
-
files = list(
|
57
|
-
|
60
|
+
files = list(
|
61
|
+
filter(
|
62
|
+
lambda x: os.path.isfile(os.path.join(directory, x)), os.listdir(directory)
|
63
|
+
)
|
64
|
+
)
|
58
65
|
return files
|
59
66
|
|
60
67
|
|
@@ -66,12 +73,12 @@ def filter_files_by_extension(directory, ext, recursive=True):
|
|
66
73
|
else:
|
67
74
|
ext_list = [ext]
|
68
75
|
if not recursive:
|
69
|
-
filter_pattern = f
|
76
|
+
filter_pattern = f"{directory}/*"
|
70
77
|
else:
|
71
|
-
filter_pattern = f
|
78
|
+
filter_pattern = f"{directory}/**/*"
|
72
79
|
|
73
80
|
for ext_item in ext_list:
|
74
|
-
ext_filter = f
|
81
|
+
ext_filter = f"{filter_pattern}.{ext_item}"
|
75
82
|
files = glob.glob(filter_pattern, recursive=True)
|
76
83
|
files = [f for f in files if is_file(f) and f.endswith(ext_item)]
|
77
84
|
result_files.extend(files)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: halib
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.15
|
4
4
|
Summary: Small library for common tasks
|
5
5
|
Author: Hoang Van Ha
|
6
6
|
Author-email: hoangvanhauit@gmail.com
|
@@ -19,6 +19,12 @@ Requires-Dist: loguru
|
|
19
19
|
|
20
20
|
Helper package for coding and automation
|
21
21
|
|
22
|
+
**Version 0.1.15**
|
23
|
+
|
24
|
+
+ `__init__.py`: add common logging library; also `console_log` decorator to log function (start and end)
|
25
|
+
|
26
|
+
---
|
27
|
+
|
22
28
|
**Version 0.1.10**
|
23
29
|
|
24
30
|
+ filesys: fix typo on "is_exit" to "is_exist"
|
@@ -0,0 +1,30 @@
|
|
1
|
+
halib/__init__.py,sha256=_cfeDmxju_Unr9WPzk4qPIP_7hU7ncNbuEShhXGQmhc,874
|
2
|
+
halib/csvfile.py,sha256=Eoeni0NIbNG3mB5ESWAvNwhJxOjmCaPd1qqYRHImbvk,1567
|
3
|
+
halib/filesys.py,sha256=r1SftGKM7nyw6QbY5UmcueZLkXEIleSzhui7dQsosPw,2907
|
4
|
+
halib/gdrive.py,sha256=-dx8hbknor1stIXhAzCnCfOHAPWm9a9z7L0epsOBHjA,6274
|
5
|
+
halib/gdrive_mkdir.py,sha256=0Gq65i2YaWaGMdJqXX7zthhb0qZnNwrhdoHK6IBIVv8,1456
|
6
|
+
halib/gdrive_test.py,sha256=e8yOYB5MZhdHbeLzjvTcOXbJNKk_DKNWURIRkKxHBMs,1378
|
7
|
+
halib/jsonfile.py,sha256=9XfdFS1wcTdBRmpAGzVu2dVCcJp8RCpsSY16f6KUNts,480
|
8
|
+
halib/listop.py,sha256=Vpa8_2fI0wySpB2-8sfTBkyi_A4FhoFVVvFiuvW8N64,339
|
9
|
+
halib/plot.py,sha256=drLaHL_JVnIeFxsgYYXepJHIECzOmRImX40cWPJJshs,475
|
10
|
+
halib/projectmake.py,sha256=EOj_CADQBUfQX35JNRdF3nZepnB_ruJFoQeXmwfhh0w,4045
|
11
|
+
halib/textfile.py,sha256=EhVFrit-nRBJx18e6rtIqcE1cSbgsLnMXe_kdhi1EPI,399
|
12
|
+
halib/videofile.py,sha256=HVRijfyPN_VJ4HnichUHNLGPWS-jhCHBhA2LHB5Xgjo,4734
|
13
|
+
halib/filetype/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
14
|
+
halib/filetype/csvfile.py,sha256=mXqJYcEKyRDLbdkgy68yceJEPhfLV-L2bz5ljrOKwSM,4181
|
15
|
+
halib/filetype/jsonfile.py,sha256=9LBdM7LV9QgJA1bzJRkq69qpWOP22HDXPGirqXTgSCw,480
|
16
|
+
halib/filetype/textfile.py,sha256=QtuI5PdLxu4hAqSeafr3S8vCXwtvgipWV4Nkl7AzDYM,399
|
17
|
+
halib/filetype/videofile.py,sha256=n4lRKhQH8uRxVrhvdw_NUfrnChocflv3LfGlGya0WUs,4761
|
18
|
+
halib/online/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
19
|
+
halib/online/gdrive.py,sha256=tiVCJpJKci0wQ7H9iKti0K3AxsTp_q8e9uBSxPOBB6s,7794
|
20
|
+
halib/online/gdrive_mkdir.py,sha256=Ur9-J1uzaM9qgpWF-d6md3gjkrFdPiMNLmbJtpQjXDI,1496
|
21
|
+
halib/online/gdrive_test.py,sha256=njzCzd__exYp-0yPyGsznG0Sm3GfPWLicxgSmm4V37o,1362
|
22
|
+
halib/online/projectmake.py,sha256=r9vPID23o_GG7TJy32PIUMvjtl2f-M9nlYJq63GL6ZA,4041
|
23
|
+
halib/sys/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
24
|
+
halib/sys/cmd.py,sha256=b2x7JPcNnFjLGheIESVYvqAb-w2UwBM1PAwYxMZ5YjA,228
|
25
|
+
halib/sys/filesys.py,sha256=ERpnELLDKJoTIIKf-AajgkY62nID4qmqmX5TkE95APU,2931
|
26
|
+
halib-0.1.15.dist-info/LICENSE.txt,sha256=qZssdna4aETiR8znYsShUjidu-U4jUT9Q-EWNlZ9yBQ,1100
|
27
|
+
halib-0.1.15.dist-info/METADATA,sha256=KPDZWFgV3SSkxWwsLKgyBkXDpXmzqqIPrHYUYuORoH0,1620
|
28
|
+
halib-0.1.15.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
29
|
+
halib-0.1.15.dist-info/top_level.txt,sha256=7AD6PLaQTreE0Fn44mdZsoHBe_Zdd7GUmjsWPyQ7I-k,6
|
30
|
+
halib-0.1.15.dist-info/RECORD,,
|
halib-0.1.11.dist-info/RECORD
DELETED
@@ -1,30 +0,0 @@
|
|
1
|
-
halib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
-
halib/csvfile.py,sha256=Eoeni0NIbNG3mB5ESWAvNwhJxOjmCaPd1qqYRHImbvk,1567
|
3
|
-
halib/filesys.py,sha256=r1SftGKM7nyw6QbY5UmcueZLkXEIleSzhui7dQsosPw,2907
|
4
|
-
halib/gdrive.py,sha256=-dx8hbknor1stIXhAzCnCfOHAPWm9a9z7L0epsOBHjA,6274
|
5
|
-
halib/gdrive_mkdir.py,sha256=0Gq65i2YaWaGMdJqXX7zthhb0qZnNwrhdoHK6IBIVv8,1456
|
6
|
-
halib/gdrive_test.py,sha256=e8yOYB5MZhdHbeLzjvTcOXbJNKk_DKNWURIRkKxHBMs,1378
|
7
|
-
halib/jsonfile.py,sha256=9XfdFS1wcTdBRmpAGzVu2dVCcJp8RCpsSY16f6KUNts,480
|
8
|
-
halib/listop.py,sha256=Vpa8_2fI0wySpB2-8sfTBkyi_A4FhoFVVvFiuvW8N64,339
|
9
|
-
halib/plot.py,sha256=3RmH9RDQN4DNrMVY-uDoNCId7eDfosLPD2Jz1zyqPHQ,439
|
10
|
-
halib/projectmake.py,sha256=EOj_CADQBUfQX35JNRdF3nZepnB_ruJFoQeXmwfhh0w,4045
|
11
|
-
halib/textfile.py,sha256=EhVFrit-nRBJx18e6rtIqcE1cSbgsLnMXe_kdhi1EPI,399
|
12
|
-
halib/videofile.py,sha256=HVRijfyPN_VJ4HnichUHNLGPWS-jhCHBhA2LHB5Xgjo,4734
|
13
|
-
halib/filetype/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
14
|
-
halib/filetype/csvfile.py,sha256=V3_y5KBs9_WsCCXYAEFC2hAXA5EbPE6dkpIqV97pnSA,4084
|
15
|
-
halib/filetype/jsonfile.py,sha256=9XfdFS1wcTdBRmpAGzVu2dVCcJp8RCpsSY16f6KUNts,480
|
16
|
-
halib/filetype/textfile.py,sha256=EhVFrit-nRBJx18e6rtIqcE1cSbgsLnMXe_kdhi1EPI,399
|
17
|
-
halib/filetype/videofile.py,sha256=HVRijfyPN_VJ4HnichUHNLGPWS-jhCHBhA2LHB5Xgjo,4734
|
18
|
-
halib/online/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
19
|
-
halib/online/gdrive.py,sha256=CdrGsLFQF3ojzOAoVR9xO13T_JIKiRkRIlq_mTbzJf8,7783
|
20
|
-
halib/online/gdrive_mkdir.py,sha256=0Gq65i2YaWaGMdJqXX7zthhb0qZnNwrhdoHK6IBIVv8,1456
|
21
|
-
halib/online/gdrive_test.py,sha256=e8yOYB5MZhdHbeLzjvTcOXbJNKk_DKNWURIRkKxHBMs,1378
|
22
|
-
halib/online/projectmake.py,sha256=EOj_CADQBUfQX35JNRdF3nZepnB_ruJFoQeXmwfhh0w,4045
|
23
|
-
halib/sys/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
24
|
-
halib/sys/cmd.py,sha256=b2x7JPcNnFjLGheIESVYvqAb-w2UwBM1PAwYxMZ5YjA,228
|
25
|
-
halib/sys/filesys.py,sha256=TEvnjLPwH_7pflw10xv_4L7xC51-CcIjwuyFWALcD-s,2908
|
26
|
-
halib-0.1.11.dist-info/LICENSE.txt,sha256=qZssdna4aETiR8znYsShUjidu-U4jUT9Q-EWNlZ9yBQ,1100
|
27
|
-
halib-0.1.11.dist-info/METADATA,sha256=erdnxtdoQ1WOvRx6a810VOSFHO35t22GFjmvvi6Bawc,1488
|
28
|
-
halib-0.1.11.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
29
|
-
halib-0.1.11.dist-info/top_level.txt,sha256=7AD6PLaQTreE0Fn44mdZsoHBe_Zdd7GUmjsWPyQ7I-k,6
|
30
|
-
halib-0.1.11.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|