halib 0.1.53__py3-none-any.whl → 0.1.55__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
halib/filetype/csvfile.py CHANGED
@@ -49,7 +49,11 @@ def auto_wrap(cell, width=40):
49
49
 
50
50
  def fn_display_df(df, max_col_width=40):
51
51
  # Apply wrapping; tablefmt="psql" for PostgreSQL-like output
52
- wrapped_df = df.applymap(lambda x: auto_wrap(x, width=max_col_width))
52
+ # wrapped_df = df.applymap(lambda x: auto_wrap(x, width=max_col_width))
53
+ # fix the future warning of applymap
54
+ wrapped_df = df.apply(
55
+ lambda col: col.map(lambda x: auto_wrap(x, width=max_col_width))
56
+ )
53
57
  print(tabulate(wrapped_df, headers="keys", tablefmt="grid", numalign="right"))
54
58
 
55
59
  def showdf(df, display_mode="itable", in_jupyter=True, all_interactive=False):
halib/research/perftb.py CHANGED
@@ -249,12 +249,15 @@ class PerfTB:
249
249
 
250
250
  current_our_method = -1 # Start with -1 to avoid index error
251
251
  exp_pattern_dict = {}
252
+ shown_legends = set()
252
253
  for row_idx, metric in enumerate(metric_list, start=1):
253
254
  metric_df = df[df["Metric"] == metric]
254
255
  list_exp = list(metric_df["Experiment"].unique())
255
256
  if custom_sort_exp_fn:
256
257
  list_exp = custom_sort_exp_fn(list_exp)
257
258
  for exp in list_exp:
259
+ showlegend = exp not in shown_legends
260
+ shown_legends.add(exp) # since it is a set, it will only keep unique values
258
261
  should_highlight = (
259
262
  custom_highlight_method_fn is not None and custom_highlight_method_fn(exp)
260
263
  )
@@ -274,7 +277,7 @@ class PerfTB:
274
277
  y=exp_df["Value"],
275
278
  name=f"{exp}",
276
279
  legendgroup=exp,
277
- showlegend=(row_idx == 1), # Show legend only for the first row
280
+ showlegend=showlegend, # Show legend only for the first row
278
281
  marker=dict(
279
282
  color=color_map[exp],
280
283
  pattern=(
@@ -749,8 +752,19 @@ def test_mics() -> None:
749
752
  custom_sort_exp_fn=lambda exps: sorted(exps, reverse=True),
750
753
  open_plot=True,
751
754
  )
755
+ def test_bench2():
756
+ perftb = PerfTB.from_csv(
757
+ "test/bench2.csv",
758
+ sep=";")
759
+ perftb.display()
760
+ perftb.plot(
761
+ save_path="zout/bench2_plot.svg",
762
+ title="Bench2 Performance Comparison",
763
+ open_plot=True,
764
+ )
752
765
 
753
766
 
754
767
  # Example usage
755
768
  if __name__ == "__main__":
756
- test_mics()
769
+ # test_mics()
770
+ test_bench2()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: halib
3
- Version: 0.1.53
3
+ Version: 0.1.55
4
4
  Summary: Small library for common tasks
5
5
  Author: Hoang Van Ha
6
6
  Author-email: hoangvanhauit@gmail.com
@@ -45,7 +45,7 @@ Requires-Dist: dataclass-wizard
45
45
 
46
46
  Helper package for coding and automation
47
47
 
48
- **Version 0.1.53**
48
+ **Version 0.1.55**
49
49
 
50
50
  + add `util/dataclass_util` to help dynamically create `dataclass` classes from dictionary or YAML file, including support for nested dataclasses. From there, we can use `dataclass_wizard` to create a list of `dataclass` classes with the help from ChatGPT.
51
51
 
@@ -17,7 +17,7 @@ halib/textfile.py,sha256=EhVFrit-nRBJx18e6rtIqcE1cSbgsLnMXe_kdhi1EPI,399
17
17
  halib/torchloader.py,sha256=-q9YE-AoHZE1xQX2dgNxdqtucEXYs4sQ22WXdl6EGfI,6500
18
18
  halib/videofile.py,sha256=NTLTZ-j6YD47duw2LN2p-lDQDglYFP1LpEU_0gzHLdI,4737
19
19
  halib/filetype/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
- halib/filetype/csvfile.py,sha256=aH6A2Z9KhD9XVaf_iPc6C3vBDalRjgzY4acE69wYoks,5574
20
+ halib/filetype/csvfile.py,sha256=YtJHYft72I4VmKo9QpMv6TPV_62chcwdAIyRRumJKOI,5727
21
21
  halib/filetype/jsonfile.py,sha256=9LBdM7LV9QgJA1bzJRkq69qpWOP22HDXPGirqXTgSCw,480
22
22
  halib/filetype/textfile.py,sha256=QtuI5PdLxu4hAqSeafr3S8vCXwtvgipWV4Nkl7AzDYM,399
23
23
  halib/filetype/videofile.py,sha256=4nfVAYYtoT76y8P4WYyxNna4Iv1o2iV6xaMcUzNPC4s,4736
@@ -30,7 +30,7 @@ halib/online/projectmake.py,sha256=Zrs96WgXvO4nIrwxnCOletL4aTBge-EoF0r7hpKO1w8,4
30
30
  halib/research/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
31
  halib/research/benchquery.py,sha256=FuKnbWQtCEoRRtJAfN-zaN-jPiO_EzsakmTOMiqi7GQ,4626
32
32
  halib/research/dataset.py,sha256=QU0Hr5QFb8_XlvnOMgC9QJGIpwXAZ9lDd0RdQi_QRec,6743
33
- halib/research/perftb.py,sha256=BEwWIteVGtXtUkfboLYbIENxYL1vpVIVioU9eLrbqic,30007
33
+ halib/research/perftb.py,sha256=vazU-dYBJhfc4sK4zFgxOvzeXGi-5TyPHCt20ItiWhY,30463
34
34
  halib/research/plot.py,sha256=-pDUk4z3C_GnyJ5zWmf-mGMdT4gaipVJWzIgcpIPiRk,9448
35
35
  halib/research/torchloader.py,sha256=yqUjcSiME6H5W210363HyRUrOi3ISpUFAFkTr1w4DCw,6503
36
36
  halib/research/wandb_op.py,sha256=YzLEqME5kIRxi3VvjFkW83wnFrsn92oYeqYuNwtYRkY,4188
@@ -44,8 +44,8 @@ halib/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
44
  halib/utils/dataclass_util.py,sha256=aD5Ik9BRJzIEyfdlOCHmOddA7TM_TIAfapZfU0vMigE,1414
45
45
  halib/utils/listop.py,sha256=Vpa8_2fI0wySpB2-8sfTBkyi_A4FhoFVVvFiuvW8N64,339
46
46
  halib/utils/tele_noti.py,sha256=-4WXZelCA4W9BroapkRyIdUu9cUVrcJJhegnMs_WpGU,5928
47
- halib-0.1.53.dist-info/LICENSE.txt,sha256=qZssdna4aETiR8znYsShUjidu-U4jUT9Q-EWNlZ9yBQ,1100
48
- halib-0.1.53.dist-info/METADATA,sha256=TgyMGbS3YvF5Nm7LzNvbAZBp6P4Oc0Tk3cGQY4c6G4I,4675
49
- halib-0.1.53.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
50
- halib-0.1.53.dist-info/top_level.txt,sha256=7AD6PLaQTreE0Fn44mdZsoHBe_Zdd7GUmjsWPyQ7I-k,6
51
- halib-0.1.53.dist-info/RECORD,,
47
+ halib-0.1.55.dist-info/LICENSE.txt,sha256=qZssdna4aETiR8znYsShUjidu-U4jUT9Q-EWNlZ9yBQ,1100
48
+ halib-0.1.55.dist-info/METADATA,sha256=UTs61uUtKLYKPv7gxXjxpZ1i6tuenHKbvTz5GNRAHYQ,4675
49
+ halib-0.1.55.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
50
+ halib-0.1.55.dist-info/top_level.txt,sha256=7AD6PLaQTreE0Fn44mdZsoHBe_Zdd7GUmjsWPyQ7I-k,6
51
+ halib-0.1.55.dist-info/RECORD,,
File without changes