halib 0.2.1__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. halib/__init__.py +3 -3
  2. halib/common/__init__.py +0 -0
  3. halib/common/common.py +178 -0
  4. halib/common/rich_color.py +285 -0
  5. halib/filetype/csvfile.py +3 -9
  6. halib/filetype/ipynb.py +3 -5
  7. halib/filetype/jsonfile.py +0 -3
  8. halib/filetype/textfile.py +0 -1
  9. halib/filetype/videofile.py +91 -2
  10. halib/filetype/yamlfile.py +3 -3
  11. halib/online/projectmake.py +7 -6
  12. halib/online/tele_noti.py +165 -0
  13. halib/research/core/__init__.py +0 -0
  14. halib/research/core/base_config.py +144 -0
  15. halib/research/core/base_exp.py +157 -0
  16. halib/research/core/param_gen.py +108 -0
  17. halib/research/core/wandb_op.py +117 -0
  18. halib/research/data/__init__.py +0 -0
  19. halib/research/data/dataclass_util.py +41 -0
  20. halib/research/data/dataset.py +208 -0
  21. halib/research/data/torchloader.py +165 -0
  22. halib/research/perf/__init__.py +0 -0
  23. halib/research/perf/flop_calc.py +190 -0
  24. halib/research/perf/gpu_mon.py +58 -0
  25. halib/research/perf/perfcalc.py +363 -0
  26. halib/research/perf/perfmetrics.py +137 -0
  27. halib/research/perf/perftb.py +778 -0
  28. halib/research/perf/profiler.py +301 -0
  29. halib/research/viz/__init__.py +0 -0
  30. halib/research/viz/plot.py +754 -0
  31. halib/system/filesys.py +60 -20
  32. halib/system/path.py +73 -0
  33. halib/utils/dict.py +9 -0
  34. halib/utils/list.py +12 -0
  35. {halib-0.2.1.dist-info → halib-0.2.2.dist-info}/METADATA +4 -1
  36. {halib-0.2.1.dist-info → halib-0.2.2.dist-info}/RECORD +39 -14
  37. {halib-0.2.1.dist-info → halib-0.2.2.dist-info}/WHEEL +0 -0
  38. {halib-0.2.1.dist-info → halib-0.2.2.dist-info}/licenses/LICENSE.txt +0 -0
  39. {halib-0.2.1.dist-info → halib-0.2.2.dist-info}/top_level.txt +0 -0
halib/__init__.py CHANGED
@@ -56,8 +56,7 @@ from .filetype.yamlfile import load_yaml
56
56
  from .system import cmd
57
57
  from .system import filesys as fs
58
58
  from .filetype import csvfile
59
- from .cuda import tcuda
60
- from .common import (
59
+ from .common.common import (
61
60
  console,
62
61
  console_log,
63
62
  ConsoleLog,
@@ -65,6 +64,7 @@ from .common import (
65
64
  norm_str,
66
65
  pprint_box,
67
66
  pprint_local_path,
67
+ tcuda
68
68
  )
69
69
 
70
70
  # for log
@@ -76,7 +76,7 @@ from timebudget import timebudget
76
76
  import omegaconf
77
77
  from omegaconf import OmegaConf
78
78
  from omegaconf.dictconfig import DictConfig
79
- from .rich_color import rcolor_str, rcolor_palette, rcolor_palette_all, rcolor_all_str
79
+ from .common.rich_color import rcolor_str, rcolor_palette, rcolor_palette_all, rcolor_all_str
80
80
 
81
81
  # for visualization
82
82
  import seaborn as sns
File without changes
halib/common/common.py ADDED
@@ -0,0 +1,178 @@
1
+ import os
2
+ import re
3
+ import arrow
4
+ import importlib
5
+
6
+ import rich
7
+ from rich import print
8
+ from rich.panel import Panel
9
+ from rich.console import Console
10
+ from rich.pretty import pprint, Pretty
11
+
12
+ from pathlib import Path, PureWindowsPath
13
+
14
+
15
+ console = Console()
16
+
17
+
18
+ def seed_everything(seed=42):
19
+ import random
20
+ import numpy as np
21
+
22
+ random.seed(seed)
23
+ np.random.seed(seed)
24
+ # import torch if it is available
25
+ try:
26
+ import torch
27
+
28
+ torch.manual_seed(seed)
29
+ torch.cuda.manual_seed(seed)
30
+ torch.cuda.manual_seed_all(seed)
31
+ torch.backends.cudnn.deterministic = True
32
+ torch.backends.cudnn.benchmark = False
33
+ except ImportError:
34
+ pprint("torch not imported, skipping torch seed_everything")
35
+ pass
36
+
37
+
38
+ def now_str(sep_date_time="."):
39
+ assert sep_date_time in [
40
+ ".",
41
+ "_",
42
+ "-",
43
+ ], "sep_date_time must be one of '.', '_', or '-'"
44
+ now_string = arrow.now().format(f"YYYYMMDD{sep_date_time}HHmmss")
45
+ return now_string
46
+
47
+
48
+ def norm_str(in_str):
49
+ # Replace one or more whitespace characters with a single underscore
50
+ norm_string = re.sub(r"\s+", "_", in_str)
51
+ # Remove leading and trailing spaces
52
+ norm_string = norm_string.strip()
53
+ return norm_string
54
+
55
+
56
+ def pprint_box(obj, title="", border_style="green"):
57
+ """
58
+ Pretty print an object in a box.
59
+ """
60
+ rich.print(
61
+ Panel(Pretty(obj, expand_all=True), title=title, border_style=border_style)
62
+ )
63
+
64
+
65
+ def console_rule(msg, do_norm_msg=True, is_end_tag=False):
66
+ msg = norm_str(msg) if do_norm_msg else msg
67
+ if is_end_tag:
68
+ console.rule(f"</{msg}>")
69
+ else:
70
+ console.rule(f"<{msg}>")
71
+
72
+
73
+ def console_log(func):
74
+ def wrapper(*args, **kwargs):
75
+ console_rule(func.__name__)
76
+ result = func(*args, **kwargs)
77
+ console_rule(func.__name__, is_end_tag=True)
78
+ return result
79
+
80
+ return wrapper
81
+
82
+
83
+ class ConsoleLog:
84
+ def __init__(self, message):
85
+ self.message = message
86
+
87
+ def __enter__(self):
88
+ console_rule(self.message)
89
+ return self
90
+
91
+ def __exit__(self, exc_type, exc_value, traceback):
92
+ console_rule(self.message, is_end_tag=True)
93
+ if exc_type is not None:
94
+ print(f"An exception of type {exc_type} occurred.")
95
+ print(f"Exception message: {exc_value}")
96
+
97
+
98
+ def linux_to_wins_path(path: str) -> str:
99
+ """
100
+ Convert a Linux-style WSL path (/mnt/c/... or /mnt/d/...) to a Windows-style path (C:\...).
101
+ """
102
+ # Handle only /mnt/<drive>/... style
103
+ if (
104
+ path.startswith("/mnt/")
105
+ and len(path) > 6
106
+ and path[5].isalpha()
107
+ and path[6] == "/"
108
+ ):
109
+ drive = path[5].upper() # Extract drive letter
110
+ win_path = f"{drive}:{path[6:]}" # Replace "/mnt/c/" with "C:/"
111
+ else:
112
+ win_path = path # Return unchanged if not a WSL-style path
113
+ # Normalize to Windows-style backslashes
114
+ return str(PureWindowsPath(win_path))
115
+
116
+
117
+ def pprint_local_path(
118
+ local_path: str, get_wins_path: bool = False, tag: str = ""
119
+ ) -> str:
120
+ """
121
+ Pretty-print a local path with emoji and clickable file:// URI.
122
+
123
+ Args:
124
+ local_path: Path to file or directory (Linux or Windows style).
125
+ get_wins_path: If True on Linux, convert WSL-style path to Windows style before printing.
126
+ tag: Optional console log tag.
127
+
128
+ Returns:
129
+ The file URI string.
130
+ """
131
+ p = Path(local_path).resolve()
132
+ type_str = "📄" if p.is_file() else "📁" if p.is_dir() else "❓"
133
+
134
+ if get_wins_path and os.name == "posix":
135
+ # Try WSL → Windows conversion
136
+ converted = linux_to_wins_path(str(p))
137
+ if converted != str(p): # Conversion happened
138
+ file_uri = str(PureWindowsPath(converted).as_uri())
139
+ else:
140
+ file_uri = p.as_uri()
141
+ else:
142
+ file_uri = p.as_uri()
143
+
144
+ content_str = f"{type_str} [link={file_uri}]{file_uri}[/link]"
145
+
146
+ if tag:
147
+ with ConsoleLog(tag):
148
+ console.print(content_str)
149
+ else:
150
+ console.print(content_str)
151
+
152
+ return file_uri
153
+
154
+
155
+ def tcuda():
156
+ NOT_INSTALLED = "Not Installed"
157
+ GPU_AVAILABLE = "GPU(s) Available"
158
+ ls_lib = ["torch", "tensorflow"]
159
+ lib_stats = {lib: NOT_INSTALLED for lib in ls_lib}
160
+ for lib in ls_lib:
161
+ spec = importlib.util.find_spec(lib)
162
+ if spec:
163
+ if lib == "torch":
164
+ import torch
165
+
166
+ lib_stats[lib] = str(torch.cuda.device_count()) + " " + GPU_AVAILABLE
167
+ elif lib == "tensorflow":
168
+ import tensorflow as tf
169
+
170
+ lib_stats[lib] = (
171
+ str(len(tf.config.list_physical_devices("GPU")))
172
+ + " "
173
+ + GPU_AVAILABLE
174
+ )
175
+ console.rule("<CUDA Library Stats>")
176
+ pprint(lib_stats)
177
+ console.rule("</CUDA Library Stats>")
178
+ return lib_stats
@@ -0,0 +1,285 @@
1
+ from rich.console import Console
2
+ from rich.pretty import pprint
3
+ from rich.table import Table
4
+ from rich.text import Text
5
+ from rich.panel import Panel
6
+
7
+ # List of colors
8
+ # ! https://rich.readthedocs.io/en/stable/appendix/colors.html
9
+ all_colors = [
10
+ "black",
11
+ "red",
12
+ "green",
13
+ "yellow",
14
+ "blue",
15
+ "magenta",
16
+ "cyan",
17
+ "white",
18
+ "bright_black",
19
+ "bright_red",
20
+ "bright_green",
21
+ "bright_yellow",
22
+ "bright_blue",
23
+ "bright_magenta",
24
+ "bright_cyan",
25
+ "bright_white",
26
+ "grey0",
27
+ "navy_blue",
28
+ "dark_blue",
29
+ "blue3",
30
+ "blue1",
31
+ "dark_green",
32
+ "deep_sky_blue4",
33
+ "dodger_blue3",
34
+ "dodger_blue2",
35
+ "green4",
36
+ "spring_green4",
37
+ "turquoise4",
38
+ "deep_sky_blue3",
39
+ "dodger_blue1",
40
+ "dark_cyan",
41
+ "light_sea_green",
42
+ "deep_sky_blue2",
43
+ "deep_sky_blue1",
44
+ "green3",
45
+ "spring_green3",
46
+ "cyan3",
47
+ "dark_turquoise",
48
+ "turquoise2",
49
+ "green1",
50
+ "spring_green2",
51
+ "spring_green1",
52
+ "medium_spring_green",
53
+ "cyan2",
54
+ "cyan1",
55
+ "purple4",
56
+ "purple3",
57
+ "blue_violet",
58
+ "grey37",
59
+ "medium_purple4",
60
+ "slate_blue3",
61
+ "royal_blue1",
62
+ "chartreuse4",
63
+ "pale_turquoise4",
64
+ "steel_blue",
65
+ "steel_blue3",
66
+ "cornflower_blue",
67
+ "dark_sea_green4",
68
+ "cadet_blue",
69
+ "sky_blue3",
70
+ "chartreuse3",
71
+ "sea_green3",
72
+ "aquamarine3",
73
+ "medium_turquoise",
74
+ "steel_blue1",
75
+ "sea_green2",
76
+ "sea_green1",
77
+ "dark_slate_gray2",
78
+ "dark_red",
79
+ "dark_magenta",
80
+ "orange4",
81
+ "light_pink4",
82
+ "plum4",
83
+ "medium_purple3",
84
+ "slate_blue1",
85
+ "wheat4",
86
+ "grey53",
87
+ "light_slate_grey",
88
+ "medium_purple",
89
+ "light_slate_blue",
90
+ "yellow4",
91
+ "dark_sea_green",
92
+ "light_sky_blue3",
93
+ "sky_blue2",
94
+ "chartreuse2",
95
+ "pale_green3",
96
+ "dark_slate_gray3",
97
+ "sky_blue1",
98
+ "chartreuse1",
99
+ "light_green",
100
+ "aquamarine1",
101
+ "dark_slate_gray1",
102
+ "deep_pink4",
103
+ "medium_violet_red",
104
+ "dark_violet",
105
+ "purple",
106
+ "medium_orchid3",
107
+ "medium_orchid",
108
+ "dark_goldenrod",
109
+ "rosy_brown",
110
+ "grey63",
111
+ "medium_purple2",
112
+ "medium_purple1",
113
+ "dark_khaki",
114
+ "navajo_white3",
115
+ "grey69",
116
+ "light_steel_blue3",
117
+ "light_steel_blue",
118
+ "dark_olive_green3",
119
+ "dark_sea_green3",
120
+ "light_cyan3",
121
+ "light_sky_blue1",
122
+ "green_yellow",
123
+ "dark_olive_green2",
124
+ "pale_green1",
125
+ "dark_sea_green2",
126
+ "pale_turquoise1",
127
+ "red3",
128
+ "deep_pink3",
129
+ "magenta3",
130
+ "dark_orange3",
131
+ "indian_red",
132
+ "hot_pink3",
133
+ "hot_pink2",
134
+ "orchid",
135
+ "orange3",
136
+ "light_salmon3",
137
+ "light_pink3",
138
+ "pink3",
139
+ "plum3",
140
+ "violet",
141
+ "gold3",
142
+ "light_goldenrod3",
143
+ "tan",
144
+ "misty_rose3",
145
+ "thistle3",
146
+ "plum2",
147
+ "yellow3",
148
+ "khaki3",
149
+ "light_yellow3",
150
+ "grey84",
151
+ "light_steel_blue1",
152
+ "yellow2",
153
+ "dark_olive_green1",
154
+ "dark_sea_green1",
155
+ "honeydew2",
156
+ "light_cyan1",
157
+ "red1",
158
+ "deep_pink2",
159
+ "deep_pink1",
160
+ "magenta2",
161
+ "magenta1",
162
+ "orange_red1",
163
+ "indian_red1",
164
+ "hot_pink",
165
+ "medium_orchid1",
166
+ "dark_orange",
167
+ "salmon1",
168
+ "light_coral",
169
+ "pale_violet_red1",
170
+ "orchid2",
171
+ "orchid1",
172
+ "orange1",
173
+ "sandy_brown",
174
+ "light_salmon1",
175
+ "light_pink1",
176
+ "pink1",
177
+ "plum1",
178
+ "gold1",
179
+ "light_goldenrod2",
180
+ "navajo_white1",
181
+ "misty_rose1",
182
+ "thistle1",
183
+ "yellow1",
184
+ "light_goldenrod1",
185
+ "khaki1",
186
+ "wheat1",
187
+ "cornsilk1",
188
+ "grey100",
189
+ "grey3",
190
+ "grey7",
191
+ "grey11",
192
+ "grey15",
193
+ "grey19",
194
+ "grey23",
195
+ "grey27",
196
+ "grey30",
197
+ "grey35",
198
+ "grey39",
199
+ "grey42",
200
+ "grey46",
201
+ "grey50",
202
+ "grey54",
203
+ "grey58",
204
+ "grey62",
205
+ "grey66",
206
+ "grey70",
207
+ "grey74",
208
+ "grey78",
209
+ "grey82",
210
+ "grey85",
211
+ "grey89",
212
+ "grey93",
213
+ ]
214
+
215
+ basic_colors = [
216
+ "black",
217
+ "red",
218
+ "green",
219
+ "yellow",
220
+ "blue",
221
+ "magenta",
222
+ "cyan",
223
+ "white",
224
+ "bright_black",
225
+ "bright_red",
226
+ "bright_green",
227
+ "bright_yellow",
228
+ "bright_blue",
229
+ "bright_magenta",
230
+ "bright_cyan",
231
+ "bright_white",
232
+ ]
233
+
234
+ def rcolor_all_str():
235
+ pprint(all_colors)
236
+
237
+ def rcolor_basic_str():
238
+ pprint(basic_colors)
239
+
240
+ def rcolor_str(in_str, color="white"):
241
+ assert color in all_colors, f"color must be one of {all_colors}"
242
+ return f"[{color}]{in_str}[/{color}]"
243
+
244
+ def rcolor_palette(color_list):
245
+ # make sure all colors are valid (in all_colors)
246
+ for color in color_list:
247
+ assert (
248
+ color in all_colors
249
+ ), f"color must be a valid color. call <rcolor_all_str()> or <rcolor_palette_all()> to see all valid colors"
250
+ # Initialize console
251
+ console = Console()
252
+
253
+ # Create a table with horizontal lines and six columns
254
+ table = Table(show_header=True, header_style="bold magenta", show_lines=True)
255
+
256
+ # Define the columns
257
+ table.add_column("Color Name 1", style="bold")
258
+ table.add_column("Sample 1", style="bold")
259
+ table.add_column("Color Name 2", style="bold")
260
+ table.add_column("Sample 2", style="bold")
261
+ table.add_column("Color Name 3", style="bold")
262
+ table.add_column("Sample 3", style="bold")
263
+
264
+ # Adjust the number of rows needed for the table
265
+ num_colors = len(color_list)
266
+ num_rows = (num_colors + 2) // 3 # Ceiling division to ensure all colors fit
267
+
268
+ # Add rows to the table
269
+ for i in range(num_rows):
270
+ color1 = color_list[i * 3] if i * 3 < num_colors else ""
271
+ color2 = color_list[i * 3 + 1] if i * 3 + 1 < num_colors else ""
272
+ color3 = color_list[i * 3 + 2] if i * 3 + 2 < num_colors else ""
273
+ filled_rect1 = Text(" " * 10, style=f"on {color1}") if color1 else ""
274
+ filled_rect2 = Text(" " * 10, style=f"on {color2}") if color2 else ""
275
+ filled_rect3 = Text(" " * 10, style=f"on {color3}") if color3 else ""
276
+ table.add_row(color1, filled_rect1, color2, filled_rect2, color3, filled_rect3)
277
+
278
+ # Print the table
279
+ console.print(table)
280
+
281
+ def rcolor_palette_basic():
282
+ rcolor_palette(basic_colors)
283
+
284
+ def rcolor_palette_all():
285
+ rcolor_palette(all_colors)
halib/filetype/csvfile.py CHANGED
@@ -1,19 +1,13 @@
1
+ import csv
2
+ import textwrap
1
3
  import pandas as pd
4
+ import pygwalker as pyg
2
5
  from tabulate import tabulate
3
6
  from rich.console import Console
4
- from rich import print as rprint
5
- from rich import inspect
6
- from rich.pretty import pprint
7
- from tqdm import tqdm
8
- from loguru import logger
9
7
  from itables import init_notebook_mode, show
10
- import pygwalker as pyg
11
- import textwrap
12
- import csv
13
8
 
14
9
  console = Console()
15
10
 
16
-
17
11
  def read(file, separator=","):
18
12
  df = pd.read_csv(file, separator)
19
13
  return df
halib/filetype/ipynb.py CHANGED
@@ -1,10 +1,8 @@
1
- from contextlib import contextmanager
2
- from pathlib import Path
3
-
4
1
  import ipynbname
2
+ from pathlib import Path
3
+ from contextlib import contextmanager
5
4
 
6
- from ..common import console, now_str
7
-
5
+ from ..common.common import now_str
8
6
 
9
7
  @contextmanager
10
8
  def gen_ipynb_name(
@@ -1,17 +1,14 @@
1
1
  import json
2
2
 
3
-
4
3
  def read(file):
5
4
  with open(file) as f:
6
5
  data = json.load(f)
7
6
  return data
8
7
 
9
-
10
8
  def write(data_dict, outfile):
11
9
  with open(outfile, "w") as json_file:
12
10
  json.dump(data_dict, json_file)
13
11
 
14
-
15
12
  def beautify(json_str):
16
13
  formatted_json = json_str
17
14
  try:
@@ -4,7 +4,6 @@ def read_line_by_line(file_path):
4
4
  lines = [line.rstrip() for line in lines]
5
5
  return lines
6
6
 
7
-
8
7
  def write(lines, outfile, append=False):
9
8
  mode = "a" if append else "w"
10
9
  with open(outfile, mode, encoding="utf-8") as f:
@@ -1,11 +1,100 @@
1
+ import os
1
2
  import cv2
2
- import textfile
3
3
  import enlighten
4
+
4
5
  from enum import Enum
5
- from ..system import filesys
6
6
  from tube_dl import Youtube, Playlist
7
7
  from moviepy.video.io.ffmpeg_tools import ffmpeg_extract_subclip
8
8
 
9
+ from . import textfile
10
+ from . import csvfile
11
+ from ..system import filesys
12
+
13
+ class VideoUtils:
14
+ @staticmethod
15
+ def _default_meta_extractor(video_path):
16
+ """Default video metadata extractor function."""
17
+ # Open the video file
18
+ cap = cv2.VideoCapture(video_path)
19
+
20
+ # Check if the video was opened successfully
21
+ if not cap.isOpened():
22
+ print(f"Error: Could not open video file {video_path}")
23
+ return None
24
+
25
+ # Get the frame count
26
+ frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
27
+
28
+ # Get the FPS
29
+ fps = cap.get(cv2.CAP_PROP_FPS)
30
+
31
+ # get frame size
32
+ width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
33
+ height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
34
+
35
+ # Release the video capture object
36
+ cap.release()
37
+
38
+ meta_dict = {
39
+ "video_path": video_path,
40
+ "width": width,
41
+ "height": height,
42
+ "frame_count": frame_count,
43
+ "fps": fps,
44
+ }
45
+ return meta_dict
46
+
47
+ @staticmethod
48
+ def get_video_meta_dict(video_path, meta_dict_extractor_func=None):
49
+ assert os.path.exists(video_path), f"Video file {video_path} does not exist"
50
+ if meta_dict_extractor_func and callable(meta_dict_extractor_func):
51
+ assert (
52
+ meta_dict_extractor_func.__code__.co_argcount == 1
53
+ ), "meta_dict_extractor_func must take exactly one argument (video_path)"
54
+ meta_dict = meta_dict_extractor_func(video_path)
55
+ assert isinstance(
56
+ meta_dict, dict
57
+ ), "meta_dict_extractor_func must return a dictionary"
58
+ assert "video_path" in meta_dict, "meta_dict must contain 'video_path'"
59
+ else:
60
+ meta_dict = VideoUtils._default_meta_extractor(video_path=video_path)
61
+ return meta_dict
62
+
63
+ @staticmethod
64
+ def get_video_dir_meta_df(
65
+ video_dir,
66
+ video_exts=[".mp4", ".avi", ".mov", ".mkv"],
67
+ search_recursive=False,
68
+ csv_outfile=None,
69
+ ):
70
+ assert os.path.exists(video_dir), f"Video directory {video_dir} does not exist"
71
+ video_files = filesys.filter_files_by_extension(
72
+ video_dir, video_exts, recursive=search_recursive
73
+ )
74
+ assert (
75
+ len(video_files) > 0
76
+ ), f"No video files found in {video_dir} with extensions {video_exts}"
77
+ video_meta_list = []
78
+ for vfile in video_files:
79
+ meta_dict = VideoUtils.get_video_meta_dict(vfile)
80
+ if meta_dict:
81
+ video_meta_list.append(meta_dict)
82
+ dfmk = csvfile.DFCreator()
83
+ columns = list(video_meta_list[0].keys())
84
+ assert len(columns) > 0, "No video metadata found"
85
+ assert "video_path" in columns, "video_path column not found in video metadata"
86
+ # move video_path to the first column
87
+ columns.remove("video_path")
88
+ columns.insert(0, "video_path")
89
+ dfmk.create_table("video_meta", columns)
90
+ rows = [[meta[col] for col in columns] for meta in video_meta_list]
91
+ dfmk.insert_rows("video_meta", rows)
92
+ dfmk.fill_table_from_row_pool("video_meta")
93
+
94
+ if csv_outfile:
95
+ dfmk["video_meta"].to_csv(csv_outfile, index=False, sep=";")
96
+ return dfmk["video_meta"].copy()
97
+
9
98
 
10
99
  class VideoResolution(Enum):
11
100
  VR480p = "720x480"
@@ -2,15 +2,15 @@ import time
2
2
  import networkx as nx
3
3
  from rich import inspect
4
4
  from rich.pretty import pprint
5
- from omegaconf import OmegaConf
6
5
  from rich.console import Console
6
+
7
+ from omegaconf import OmegaConf
7
8
  from argparse import ArgumentParser
8
9
 
9
- from ..research.mics import *
10
+ from ..system.path import *
10
11
 
11
12
  console = Console()
12
13
 
13
-
14
14
  def _load_yaml_recursively(
15
15
  yaml_file, yaml_files=[], share_nx_graph=nx.DiGraph(), log_info=False
16
16
  ):
@@ -1,17 +1,18 @@
1
1
  # coding=utf-8
2
- import json
2
+
3
3
  import os
4
+ import json
5
+ import pycurl
4
6
  import shutil
5
- from argparse import ArgumentParser
6
- from io import BytesIO
7
+ import certifi
7
8
  import subprocess
9
+ from io import BytesIO
10
+
11
+ from argparse import ArgumentParser
8
12
 
9
- import certifi
10
- import pycurl
11
13
  from ..filetype import jsonfile
12
14
  from ..system import filesys
13
15
 
14
-
15
16
  def get_curl(url, user_and_pass, verbose=True):
16
17
  c = pycurl.Curl()
17
18
  c.setopt(pycurl.VERBOSE, verbose)