BERATools 0.2.3__py3-none-any.whl → 0.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. beratools/__init__.py +8 -3
  2. beratools/core/{algo_footprint_rel.py → algo_canopy_footprint_exp.py} +176 -139
  3. beratools/core/algo_centerline.py +61 -77
  4. beratools/core/algo_common.py +48 -57
  5. beratools/core/algo_cost.py +18 -25
  6. beratools/core/algo_dijkstra.py +37 -45
  7. beratools/core/algo_line_grouping.py +100 -100
  8. beratools/core/algo_merge_lines.py +40 -8
  9. beratools/core/algo_split_with_lines.py +289 -304
  10. beratools/core/algo_vertex_optimization.py +25 -46
  11. beratools/core/canopy_threshold_relative.py +755 -0
  12. beratools/core/constants.py +8 -9
  13. beratools/{tools → core}/line_footprint_functions.py +411 -258
  14. beratools/core/logger.py +18 -2
  15. beratools/core/tool_base.py +17 -75
  16. beratools/gui/assets/BERALogo.ico +0 -0
  17. beratools/gui/assets/BERA_Splash.gif +0 -0
  18. beratools/gui/assets/BERA_WizardImage.png +0 -0
  19. beratools/gui/assets/beratools.json +475 -2171
  20. beratools/gui/bt_data.py +585 -234
  21. beratools/gui/bt_gui_main.py +129 -91
  22. beratools/gui/main.py +4 -7
  23. beratools/gui/tool_widgets.py +530 -354
  24. beratools/tools/__init__.py +0 -7
  25. beratools/tools/{line_footprint_absolute.py → canopy_footprint_absolute.py} +81 -56
  26. beratools/tools/canopy_footprint_exp.py +113 -0
  27. beratools/tools/centerline.py +30 -37
  28. beratools/tools/check_seed_line.py +127 -0
  29. beratools/tools/common.py +65 -586
  30. beratools/tools/{line_footprint_fixed.py → ground_footprint.py} +140 -117
  31. beratools/tools/line_footprint_relative.py +64 -35
  32. beratools/tools/tool_template.py +48 -40
  33. beratools/tools/vertex_optimization.py +20 -34
  34. beratools/utility/env_checks.py +53 -0
  35. beratools/utility/spatial_common.py +210 -0
  36. beratools/utility/tool_args.py +138 -0
  37. beratools-0.2.4.dist-info/METADATA +134 -0
  38. beratools-0.2.4.dist-info/RECORD +50 -0
  39. {beratools-0.2.3.dist-info → beratools-0.2.4.dist-info}/WHEEL +1 -1
  40. beratools-0.2.4.dist-info/entry_points.txt +3 -0
  41. beratools-0.2.4.dist-info/licenses/LICENSE +674 -0
  42. beratools/core/algo_tiler.py +0 -428
  43. beratools/gui/__init__.py +0 -11
  44. beratools/gui/batch_processing_dlg.py +0 -513
  45. beratools/gui/map_window.py +0 -162
  46. beratools/tools/Beratools_r_script.r +0 -1120
  47. beratools/tools/Ht_metrics.py +0 -116
  48. beratools/tools/batch_processing.py +0 -136
  49. beratools/tools/canopy_threshold_relative.py +0 -672
  50. beratools/tools/canopycostraster.py +0 -222
  51. beratools/tools/fl_regen_csf.py +0 -428
  52. beratools/tools/forest_line_attributes.py +0 -408
  53. beratools/tools/line_grouping.py +0 -45
  54. beratools/tools/ln_relative_metrics.py +0 -615
  55. beratools/tools/r_cal_lpi_elai.r +0 -25
  56. beratools/tools/r_generate_pd_focalraster.r +0 -101
  57. beratools/tools/r_interface.py +0 -80
  58. beratools/tools/r_point_density.r +0 -9
  59. beratools/tools/rpy_chm2trees.py +0 -86
  60. beratools/tools/rpy_dsm_chm_by.py +0 -81
  61. beratools/tools/rpy_dtm_by.py +0 -63
  62. beratools/tools/rpy_find_cellsize.py +0 -43
  63. beratools/tools/rpy_gnd_csf.py +0 -74
  64. beratools/tools/rpy_hummock_hollow.py +0 -85
  65. beratools/tools/rpy_hummock_hollow_raster.py +0 -71
  66. beratools/tools/rpy_las_info.py +0 -51
  67. beratools/tools/rpy_laz2las.py +0 -40
  68. beratools/tools/rpy_lpi_elai_lascat.py +0 -466
  69. beratools/tools/rpy_normalized_lidar_by.py +0 -56
  70. beratools/tools/rpy_percent_above_dbh.py +0 -80
  71. beratools/tools/rpy_points2trees.py +0 -88
  72. beratools/tools/rpy_vegcoverage.py +0 -94
  73. beratools/tools/tiler.py +0 -48
  74. beratools/tools/zonal_threshold.py +0 -144
  75. beratools-0.2.3.dist-info/METADATA +0 -108
  76. beratools-0.2.3.dist-info/RECORD +0 -74
  77. beratools-0.2.3.dist-info/entry_points.txt +0 -2
  78. beratools-0.2.3.dist-info/licenses/LICENSE +0 -22
beratools/core/logger.py CHANGED
@@ -12,6 +12,7 @@ Description:
12
12
 
13
13
  The purpose of this script is to provide logger functions.
14
14
  """
15
+
15
16
  import logging
16
17
  import logging.handlers
17
18
  import sys
@@ -20,12 +21,27 @@ from beratools.gui.bt_data import BTData
20
21
 
21
22
  bt = BTData()
22
23
 
24
+
23
25
  class NoParsingFilter(logging.Filter):
26
+ """
27
+ Filter to exclude log messages that start with "parsing".
28
+
29
+ This is useful to avoid cluttering the log with parsing-related messages.
30
+ """
31
+
24
32
  def filter(self, record):
25
33
  return not record.getMessage().startswith("parsing")
26
34
 
27
35
 
28
36
  class Logger(object):
37
+ """
38
+ Logger class to handle logging in the BERA Tools application.
39
+
40
+ This class sets up a logger that outputs to both the console and a file.
41
+ It allows for different logging levels for console and file outputs.
42
+ It also provides a method to print messages directly to the logger.
43
+ """
44
+
29
45
  def __init__(self, name, console_level=logging.INFO, file_level=logging.INFO):
30
46
  self.logger = logging.getLogger(name)
31
47
  self.name = name
@@ -52,7 +68,7 @@ class Logger(object):
52
68
  handler.flush()
53
69
 
54
70
  def setup_logger(self):
55
- # Change root logger level from WARNING (default) to NOTSET
71
+ # Change root logger level from WARNING (default) to NOTSET
56
72
  # in order for all messages to be delegated.
57
73
  logging.getLogger().setLevel(logging.NOTSET)
58
74
  log_file = bt.get_logger_file_name(self.name)
@@ -66,7 +82,7 @@ class Logger(object):
66
82
 
67
83
  # Add file rotating handler, 5MB size limit, 5 backups
68
84
  rotating_handler = logging.handlers.RotatingFileHandler(
69
- filename=log_file, maxBytes=5*1000*1000, backupCount=5
85
+ filename=log_file, maxBytes=5 * 1000 * 1000, backupCount=5
70
86
  )
71
87
 
72
88
  rotating_handler.setLevel(self.file_level)
@@ -12,23 +12,18 @@ Description:
12
12
 
13
13
  The purpose of this script is to provide fundamental utilities for tools.
14
14
  """
15
+
15
16
  import concurrent.futures as con_futures
16
17
  import warnings
17
18
  from multiprocessing.pool import Pool
18
19
 
19
- # import dask.distributed as dask_dist
20
20
  import geopandas as gpd
21
21
  import pandas as pd
22
-
23
- # from dask import config as dask_cfg
24
22
  from tqdm.auto import tqdm
25
23
 
26
- # import ray
27
24
  import beratools.core.constants as bt_const
25
+ from beratools.utility.tool_args import CallMode, determine_cpu_core_limit
28
26
 
29
- # settings for dask
30
- # dask_cfg.set({"distributed.scheduler.worker-ttl": None})
31
- # warnings.simplefilter("ignore", dask_dist.comm.core.CommClosedError)
32
27
  warnings.simplefilter(action="ignore", category=FutureWarning)
33
28
 
34
29
 
@@ -62,22 +57,31 @@ def result_is_valid(result):
62
57
 
63
58
  def print_msg(app_name, step, total_steps):
64
59
  print(f' "PROGRESS_LABEL {app_name} {step} of {total_steps}" ', flush=True)
65
- print(f' %{step / total_steps * 100} ', flush=True)
60
+ print(f" %{step / total_steps * 100} ", flush=True)
61
+
62
+
63
+ def parallel_mode(processes):
64
+ if processes <= 0:
65
+ processes = determine_cpu_core_limit()
66
+
67
+ if processes == 1:
68
+ return bt_const.ParallelMode.SEQUENTIAL, processes
69
+ else:
70
+ return bt_const.ParallelMode.MULTIPROCESSING, processes
66
71
 
67
72
 
68
73
  def execute_multiprocessing(
69
74
  in_func,
70
75
  in_data,
71
76
  app_name,
72
- processes,
73
- workers=1,
74
- mode=bt_const.PARALLEL_MODE,
75
- verbose=False,
76
- scheduler_file="dask_scheduler.json",
77
+ processes=0,
78
+ call_mode=CallMode.CLI,
77
79
  ):
78
80
  out_result = []
79
81
  step = 0
80
82
  total_steps = len(in_data)
83
+ mode, processes = parallel_mode(processes)
84
+ verbose = True if call_mode == CallMode.GUI else False
81
85
 
82
86
  try:
83
87
  if mode == bt_const.ParallelMode.MULTIPROCESSING:
@@ -85,7 +89,6 @@ def execute_multiprocessing(
85
89
  print("Using {} CPU cores".format(processes), flush=True)
86
90
 
87
91
  with Pool(processes) as pool:
88
- # print(multiprocessing.active_children())
89
92
  with tqdm(total=total_steps, disable=verbose) as pbar:
90
93
  for result in pool.imap_unordered(in_func, in_data):
91
94
  if result_is_valid(result):
@@ -128,67 +131,6 @@ def execute_multiprocessing(
128
131
  print_msg(app_name, step, total_steps)
129
132
  else:
130
133
  pbar.update()
131
- # elif mode == bt_const.ParallelMode.DASK:
132
- # print("Dask processing started...", flush=True)
133
- # print("Using {} CPU cores".format(processes), flush=True)
134
- # dask_client = dask_dist.Client(threads_per_worker=1, n_workers=processes)
135
- # print(f"Local Dask client: {dask_client}")
136
- # try:
137
- # print('start processing')
138
- # result = dask_client.map(in_func, in_data)
139
- # seq = dask_dist.as_completed(result)
140
-
141
- # with tqdm(total=total_steps, disable=verbose) as pbar:
142
- # for i in seq:
143
- # if result_is_valid(result):
144
- # out_result.append(i.result())
145
-
146
- # step += 1
147
- # if verbose:
148
- # print_msg(app_name, step, total_steps)
149
- # else:
150
- # pbar.update()
151
- # except Exception as e:
152
- # print(f'ParallelMode.DASK: {e}')
153
- # dask_client.close()
154
-
155
- # dask_client.close()
156
- # elif mode == bt_const.ParallelMode.SLURM:
157
- # print("Slurm Dask processing started...", flush=True)
158
- # dask_client = dask_dist.Client(scheduler_file=scheduler_file)
159
- # print(f"Slurm cluster Dask client: {dask_client}")
160
- # try:
161
- # print("start processing")
162
- # result = dask_client.map(in_func, in_data)
163
- # seq = dask_dist.as_completed(result)
164
- # dask_dist.progress(result)
165
-
166
- # for i in seq:
167
- # if result_is_valid(result):
168
- # out_result.append(i.result())
169
- # except Exception as e:
170
- # print(f'ParallelMode.SLURM: {e}')
171
- # dask_client.close()
172
-
173
- # dask_client.close()
174
- # ! important !
175
- # comment temporarily, man enable later if need to use ray
176
- # elif mode == bt_const.ParallelMode.RAY:
177
- # ray.init(log_to_driver=False)
178
- # process_single_line_ray = ray.remote(in_func)
179
- # result_ids = [process_single_line_ray.remote(item) for item in in_data]
180
- #
181
- # while len(result_ids):
182
- # done_id, result_ids = ray.wait(result_ids)
183
- # result_item = ray.get(done_id[0])
184
- #
185
- # if result_is_valid(result_item):
186
- # out_result.append(result_item)
187
- #
188
- # step += 1
189
- # print_msg(app_name, step, total_steps)
190
-
191
- # ray.shutdown()
192
134
  except Exception as e:
193
135
  print(e)
194
136
  return None
Binary file
Binary file