birdnet-analyzer 2.0.0__py3-none-any.whl → 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. birdnet_analyzer/__init__.py +9 -8
  2. birdnet_analyzer/analyze/__init__.py +5 -5
  3. birdnet_analyzer/analyze/__main__.py +3 -4
  4. birdnet_analyzer/analyze/cli.py +25 -25
  5. birdnet_analyzer/analyze/core.py +241 -245
  6. birdnet_analyzer/analyze/utils.py +692 -701
  7. birdnet_analyzer/audio.py +368 -372
  8. birdnet_analyzer/cli.py +709 -707
  9. birdnet_analyzer/config.py +242 -242
  10. birdnet_analyzer/eBird_taxonomy_codes_2021E.json +25279 -25279
  11. birdnet_analyzer/embeddings/__init__.py +3 -4
  12. birdnet_analyzer/embeddings/__main__.py +3 -3
  13. birdnet_analyzer/embeddings/cli.py +12 -13
  14. birdnet_analyzer/embeddings/core.py +69 -70
  15. birdnet_analyzer/embeddings/utils.py +179 -193
  16. birdnet_analyzer/evaluation/__init__.py +196 -195
  17. birdnet_analyzer/evaluation/__main__.py +3 -3
  18. birdnet_analyzer/evaluation/assessment/__init__.py +0 -0
  19. birdnet_analyzer/evaluation/assessment/metrics.py +388 -0
  20. birdnet_analyzer/evaluation/assessment/performance_assessor.py +409 -0
  21. birdnet_analyzer/evaluation/assessment/plotting.py +379 -0
  22. birdnet_analyzer/evaluation/preprocessing/__init__.py +0 -0
  23. birdnet_analyzer/evaluation/preprocessing/data_processor.py +631 -0
  24. birdnet_analyzer/evaluation/preprocessing/utils.py +98 -0
  25. birdnet_analyzer/gui/__init__.py +19 -23
  26. birdnet_analyzer/gui/__main__.py +3 -3
  27. birdnet_analyzer/gui/analysis.py +175 -174
  28. birdnet_analyzer/gui/assets/arrow_down.svg +4 -4
  29. birdnet_analyzer/gui/assets/arrow_left.svg +4 -4
  30. birdnet_analyzer/gui/assets/arrow_right.svg +4 -4
  31. birdnet_analyzer/gui/assets/arrow_up.svg +4 -4
  32. birdnet_analyzer/gui/assets/gui.css +28 -28
  33. birdnet_analyzer/gui/assets/gui.js +93 -93
  34. birdnet_analyzer/gui/embeddings.py +619 -620
  35. birdnet_analyzer/gui/evaluation.py +795 -813
  36. birdnet_analyzer/gui/localization.py +75 -68
  37. birdnet_analyzer/gui/multi_file.py +245 -246
  38. birdnet_analyzer/gui/review.py +519 -527
  39. birdnet_analyzer/gui/segments.py +191 -191
  40. birdnet_analyzer/gui/settings.py +128 -129
  41. birdnet_analyzer/gui/single_file.py +267 -269
  42. birdnet_analyzer/gui/species.py +95 -95
  43. birdnet_analyzer/gui/train.py +696 -698
  44. birdnet_analyzer/gui/utils.py +810 -808
  45. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_af.txt +6522 -6522
  46. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_ar.txt +6522 -6522
  47. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_bg.txt +6522 -6522
  48. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_ca.txt +6522 -6522
  49. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_cs.txt +6522 -6522
  50. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_da.txt +6522 -6522
  51. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_de.txt +6522 -6522
  52. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_el.txt +6522 -6522
  53. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_en_uk.txt +6522 -6522
  54. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_es.txt +6522 -6522
  55. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_fi.txt +6522 -6522
  56. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_fr.txt +6522 -6522
  57. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_he.txt +6522 -6522
  58. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_hr.txt +6522 -6522
  59. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_hu.txt +6522 -6522
  60. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_in.txt +6522 -6522
  61. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_is.txt +6522 -6522
  62. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_it.txt +6522 -6522
  63. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_ja.txt +6522 -6522
  64. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_ko.txt +6522 -6522
  65. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_lt.txt +6522 -6522
  66. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_ml.txt +6522 -6522
  67. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_nl.txt +6522 -6522
  68. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_no.txt +6522 -6522
  69. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_pl.txt +6522 -6522
  70. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_pt_BR.txt +6522 -6522
  71. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_pt_PT.txt +6522 -6522
  72. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_ro.txt +6522 -6522
  73. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_ru.txt +6522 -6522
  74. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_sk.txt +6522 -6522
  75. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_sl.txt +6522 -6522
  76. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_sr.txt +6522 -6522
  77. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_sv.txt +6522 -6522
  78. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_th.txt +6522 -6522
  79. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_tr.txt +6522 -6522
  80. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_uk.txt +6522 -6522
  81. birdnet_analyzer/labels/V2.4/BirdNET_GLOBAL_6K_V2.4_Labels_zh.txt +6522 -6522
  82. birdnet_analyzer/lang/de.json +334 -334
  83. birdnet_analyzer/lang/en.json +334 -334
  84. birdnet_analyzer/lang/fi.json +334 -334
  85. birdnet_analyzer/lang/fr.json +334 -334
  86. birdnet_analyzer/lang/id.json +334 -334
  87. birdnet_analyzer/lang/pt-br.json +334 -334
  88. birdnet_analyzer/lang/ru.json +334 -334
  89. birdnet_analyzer/lang/se.json +334 -334
  90. birdnet_analyzer/lang/tlh.json +334 -334
  91. birdnet_analyzer/lang/zh_TW.json +334 -334
  92. birdnet_analyzer/model.py +1212 -1243
  93. birdnet_analyzer/playground.py +5 -0
  94. birdnet_analyzer/search/__init__.py +3 -3
  95. birdnet_analyzer/search/__main__.py +3 -3
  96. birdnet_analyzer/search/cli.py +11 -12
  97. birdnet_analyzer/search/core.py +78 -78
  98. birdnet_analyzer/search/utils.py +107 -111
  99. birdnet_analyzer/segments/__init__.py +3 -3
  100. birdnet_analyzer/segments/__main__.py +3 -3
  101. birdnet_analyzer/segments/cli.py +13 -14
  102. birdnet_analyzer/segments/core.py +81 -78
  103. birdnet_analyzer/segments/utils.py +383 -394
  104. birdnet_analyzer/species/__init__.py +3 -3
  105. birdnet_analyzer/species/__main__.py +3 -3
  106. birdnet_analyzer/species/cli.py +13 -14
  107. birdnet_analyzer/species/core.py +35 -35
  108. birdnet_analyzer/species/utils.py +74 -75
  109. birdnet_analyzer/train/__init__.py +3 -3
  110. birdnet_analyzer/train/__main__.py +3 -3
  111. birdnet_analyzer/train/cli.py +13 -14
  112. birdnet_analyzer/train/core.py +113 -113
  113. birdnet_analyzer/train/utils.py +877 -847
  114. birdnet_analyzer/translate.py +133 -104
  115. birdnet_analyzer/utils.py +426 -419
  116. {birdnet_analyzer-2.0.0.dist-info → birdnet_analyzer-2.0.1.dist-info}/METADATA +137 -129
  117. birdnet_analyzer-2.0.1.dist-info/RECORD +125 -0
  118. {birdnet_analyzer-2.0.0.dist-info → birdnet_analyzer-2.0.1.dist-info}/WHEEL +1 -1
  119. {birdnet_analyzer-2.0.0.dist-info → birdnet_analyzer-2.0.1.dist-info}/licenses/LICENSE +18 -18
  120. birdnet_analyzer-2.0.0.dist-info/RECORD +0 -117
  121. {birdnet_analyzer-2.0.0.dist-info → birdnet_analyzer-2.0.1.dist-info}/entry_points.txt +0 -0
  122. {birdnet_analyzer-2.0.0.dist-info → birdnet_analyzer-2.0.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,98 @@
1
+ """
2
+ Utility Functions for Data Processing Tasks
3
+
4
+ This module provides helper functions to handle common data processing tasks, such as:
5
+ - Extracting recording filenames from file paths or filenames.
6
+ - Reading and concatenating text files from a specified directory.
7
+
8
+ It is designed to work seamlessly with pandas and file system operations.
9
+ """
10
+
11
+ import os
12
+
13
+ import pandas as pd
14
+
15
+
16
+ def extract_recording_filename(path_column: pd.Series) -> pd.Series:
17
+ """
18
+ Extract the recording filename from a path column.
19
+
20
+ This function processes a pandas Series containing file paths and extracts the base filename
21
+ (without the extension) for each path.
22
+
23
+ Args:
24
+ path_column (pd.Series): A pandas Series containing file paths.
25
+
26
+ Returns:
27
+ pd.Series: A pandas Series containing the extracted recording filenames.
28
+ """
29
+ # Apply a lambda function to extract the base filename without extension
30
+ return path_column.apply(lambda x: os.path.splitext(os.path.basename(x))[0] if isinstance(x, str) else x)
31
+
32
+
33
+ def extract_recording_filename_from_filename(filename_series: pd.Series) -> pd.Series:
34
+ """
35
+ Extract the recording filename from a filename Series.
36
+
37
+ This function processes a pandas Series containing filenames and extracts the base filename
38
+ (without the extension) for each.
39
+
40
+ Args:
41
+ filename_series (pd.Series): A pandas Series containing filenames.
42
+
43
+ Returns:
44
+ pd.Series: A pandas Series containing the extracted recording filenames.
45
+ """
46
+ # Apply a lambda function to split filenames and remove the extension
47
+ return filename_series.apply(lambda x: x.split(".")[0] if isinstance(x, str) else x)
48
+
49
+
50
+ def read_and_concatenate_files_in_directory(directory_path: str) -> pd.DataFrame:
51
+ """
52
+ Read and concatenate all .txt files in a directory into a single DataFrame.
53
+
54
+ This function scans the specified directory for all .txt files, reads each file into a DataFrame,
55
+ appends a 'source_file' column containing the filename, and concatenates all DataFrames into one.
56
+ If the files have inconsistent columns, a ValueError is raised.
57
+
58
+ Args:
59
+ directory_path (str): Path to the directory containing the .txt files.
60
+
61
+ Returns:
62
+ pd.DataFrame: A concatenated DataFrame containing the data from all .txt files,
63
+ or an empty DataFrame if no files are found.
64
+
65
+ Raises:
66
+ ValueError: If the columns in the files are inconsistent.
67
+ """
68
+ df_list: list[pd.DataFrame] = [] # List to hold individual DataFrames
69
+ columns_set = None # To ensure consistency in column names
70
+
71
+ # Iterate through each file in the directory
72
+ for filename in sorted(os.listdir(directory_path)):
73
+ if filename.endswith(".txt"):
74
+ filepath = os.path.join(directory_path, filename) # Construct the full file path
75
+
76
+ try:
77
+ # Attempt to read the file as a tab-separated values file with UTF-8 encoding
78
+ df = pd.read_csv(filepath, sep="\t", encoding="utf-8")
79
+ except UnicodeDecodeError:
80
+ # Fallback to 'latin-1' encoding if UTF-8 fails
81
+ df = pd.read_csv(filepath, sep="\t", encoding="latin-1")
82
+
83
+ # Check for column consistency across files
84
+ if columns_set is None:
85
+ columns_set = set(df.columns) # Initialize with the first file's columns
86
+ elif set(df.columns) != columns_set:
87
+ raise ValueError(f"File {filename} has different columns than the previous files.")
88
+
89
+ # Add a column to indicate the source file for traceability
90
+ df["source_file"] = filename
91
+
92
+ # Append the DataFrame to the list
93
+ df_list.append(df)
94
+
95
+ # Concatenate all DataFrames if any were processed, else return an empty DataFrame
96
+ if df_list:
97
+ return pd.concat(df_list, ignore_index=True)
98
+ return pd.DataFrame() # Return an empty DataFrame if no .txt files were found
@@ -1,23 +1,19 @@
1
- def main():
2
- import birdnet_analyzer.gui.multi_file as mfa
3
- import birdnet_analyzer.gui.review as review
4
- import birdnet_analyzer.gui.segments as gs
5
- import birdnet_analyzer.gui.single_file as sfa
6
- import birdnet_analyzer.gui.species as species
7
- import birdnet_analyzer.gui.train as train
8
- import birdnet_analyzer.gui.utils as gu
9
- import birdnet_analyzer.gui.embeddings as embeddings
10
- import birdnet_analyzer.gui.evaluation as evaluation
11
-
12
- gu.open_window(
13
- [
14
- sfa.build_single_analysis_tab,
15
- mfa.build_multi_analysis_tab,
16
- train.build_train_tab,
17
- gs.build_segments_tab,
18
- review.build_review_tab,
19
- species.build_species_tab,
20
- embeddings.build_embeddings_tab,
21
- evaluation.build_evaluation_tab,
22
- ]
23
- )
1
+ def main():
2
+ import birdnet_analyzer.gui.multi_file as mfa
3
+ import birdnet_analyzer.gui.segments as gs
4
+ import birdnet_analyzer.gui.single_file as sfa
5
+ import birdnet_analyzer.gui.utils as gu
6
+ from birdnet_analyzer.gui import embeddings, evaluation, review, species, train
7
+
8
+ gu.open_window(
9
+ [
10
+ sfa.build_single_analysis_tab,
11
+ mfa.build_multi_analysis_tab,
12
+ train.build_train_tab,
13
+ gs.build_segments_tab,
14
+ review.build_review_tab,
15
+ species.build_species_tab,
16
+ embeddings.build_embeddings_tab,
17
+ evaluation.build_evaluation_tab,
18
+ ]
19
+ )
@@ -1,3 +1,3 @@
1
- from birdnet_analyzer.gui import main
2
-
3
- main()
1
+ from birdnet_analyzer.gui import main
2
+
3
+ main()
@@ -1,174 +1,175 @@
1
- import concurrent.futures
2
- import os
3
- from pathlib import Path
4
-
5
- import gradio as gr
6
-
7
- import birdnet_analyzer.config as cfg
8
- import birdnet_analyzer.gui.utils as gu
9
- import birdnet_analyzer.gui.localization as loc
10
- import birdnet_analyzer.model as model
11
-
12
-
13
- from birdnet_analyzer.analyze.utils import analyze_file, combine_results, save_analysis_params
14
-
15
- SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
16
- ORIGINAL_LABELS_FILE = str(Path(SCRIPT_DIR).parent / cfg.LABELS_FILE)
17
-
18
-
19
- def analyze_file_wrapper(entry):
20
- """
21
- Wrapper function for analyzing a file.
22
-
23
- Args:
24
- entry (tuple): A tuple where the first element is the file path and the
25
- remaining elements are arguments to be passed to the
26
- analyze.analyzeFile function.
27
-
28
- Returns:
29
- tuple: A tuple where the first element is the file path and the second
30
- element is the result of the analyze.analyzeFile function.
31
- """
32
- return (entry[0], analyze_file(entry))
33
-
34
-
35
- def run_analysis(
36
- input_path: str,
37
- output_path: str | None,
38
- use_top_n: bool,
39
- top_n: int,
40
- confidence: float,
41
- sensitivity: float,
42
- overlap: float,
43
- merge_consecutive: int,
44
- audio_speed: float,
45
- fmin: int,
46
- fmax: int,
47
- species_list_choice: str,
48
- species_list_file,
49
- lat: float,
50
- lon: float,
51
- week: int,
52
- use_yearlong: bool,
53
- sf_thresh: float,
54
- custom_classifier_file,
55
- output_types: str,
56
- combine_tables: bool,
57
- locale: str,
58
- batch_size: int,
59
- threads: int,
60
- input_dir: str,
61
- skip_existing: bool,
62
- save_params: bool,
63
- progress: gr.Progress | None,
64
- ):
65
- """Starts the analysis.
66
-
67
- Args:
68
- input_path: Either a file or directory.
69
- output_path: The output path for the result, if None the input_path is used
70
- confidence: The selected minimum confidence.
71
- sensitivity: The selected sensitivity.
72
- overlap: The selected segment overlap.
73
- merge_consecutive: The number of consecutive segments to merge into one.
74
- audio_speed: The selected audio speed.
75
- fmin: The selected minimum bandpass frequency.
76
- fmax: The selected maximum bandpass frequency.
77
- species_list_choice: The choice for the species list.
78
- species_list_file: The selected custom species list file.
79
- lat: The selected latitude.
80
- lon: The selected longitude.
81
- week: The selected week of the year.
82
- use_yearlong: Use yearlong instead of week.
83
- sf_thresh: The threshold for the predicted species list.
84
- custom_classifier_file: Custom classifier to be used.
85
- output_type: The type of result to be generated.
86
- output_filename: The filename for the combined output.
87
- locale: The translation to be used.
88
- batch_size: The number of samples in a batch.
89
- threads: The number of threads to be used.
90
- input_dir: The input directory.
91
- progress: The gradio progress bar.
92
- """
93
- if progress is not None:
94
- progress(0, desc=f"{loc.localize('progress-preparing')} ...")
95
-
96
- from birdnet_analyzer.analyze.core import _set_params
97
-
98
- locale = locale.lower()
99
- custom_classifier = custom_classifier_file if species_list_choice == gu._CUSTOM_CLASSIFIER else None
100
- slist = species_list_file if species_list_choice == gu._CUSTOM_SPECIES else None
101
- lat = lat if species_list_choice == gu._PREDICT_SPECIES else -1
102
- lon = lon if species_list_choice == gu._PREDICT_SPECIES else -1
103
- week = -1 if use_yearlong else week
104
-
105
- flist = _set_params(
106
- input=input_dir if input_dir else input_path,
107
- min_conf=confidence,
108
- custom_classifier=custom_classifier,
109
- sensitivity=min(1.25, max(0.75, float(sensitivity))),
110
- locale=locale,
111
- overlap=max(0.0, min(2.9, float(overlap))),
112
- merge_consecutive=max(1, int(merge_consecutive)),
113
- audio_speed=max(0.1, 1.0 / (audio_speed * -1)) if audio_speed < 0 else max(1.0, float(audio_speed)),
114
- fmin=max(0, min(cfg.SIG_FMAX, int(fmin))),
115
- fmax=max(cfg.SIG_FMIN, min(cfg.SIG_FMAX, int(fmax))),
116
- bs=max(1, int(batch_size)),
117
- combine_results=combine_tables,
118
- rtype=output_types,
119
- skip_existing_results=skip_existing,
120
- threads=max(1, int(threads)),
121
- labels_file=ORIGINAL_LABELS_FILE,
122
- sf_thresh=sf_thresh,
123
- lat=lat,
124
- lon=lon,
125
- week=week,
126
- slist=slist,
127
- top_n=top_n if use_top_n else None,
128
- output=output_path,
129
- )
130
-
131
- if species_list_choice == gu._CUSTOM_CLASSIFIER:
132
- if custom_classifier_file is None:
133
- raise gr.Error(loc.localize("validation-no-custom-classifier-selected"))
134
-
135
- model.reset_custom_classifier()
136
-
137
- gu.validate(cfg.FILE_LIST, loc.localize("validation-no-audio-files-found"))
138
-
139
- result_list = []
140
-
141
- if progress is not None:
142
- progress(0, desc=f"{loc.localize('progress-starting')} ...")
143
-
144
- # Analyze files
145
- if cfg.CPU_THREADS < 2:
146
- for entry in flist:
147
- result_list.append(analyze_file_wrapper(entry))
148
- else:
149
- with concurrent.futures.ProcessPoolExecutor(max_workers=cfg.CPU_THREADS) as executor:
150
- futures = (executor.submit(analyze_file_wrapper, arg) for arg in flist)
151
- for i, f in enumerate(concurrent.futures.as_completed(futures), start=1):
152
- if progress is not None:
153
- progress((i, len(flist)), total=len(flist), unit="files")
154
- result = f.result()
155
-
156
- result_list.append(result)
157
-
158
- # Combine results?
159
- if cfg.COMBINE_RESULTS:
160
- combine_list = [[r[1] for r in result_list if r[0] == i[0]][0] for i in flist]
161
- print(f"Combining results, writing to {cfg.OUTPUT_PATH}...", end="", flush=True)
162
- combine_results(combine_list)
163
- print("done!", flush=True)
164
-
165
- if save_params:
166
- save_analysis_params(os.path.join(cfg.OUTPUT_PATH, cfg.ANALYSIS_PARAMS_FILENAME))
167
-
168
- return (
169
- [[os.path.relpath(r[0], input_dir), bool(r[1])] for r in result_list]
170
- if input_dir
171
- else result_list[0][1]["csv"]
172
- if result_list[0][1]
173
- else None
174
- )
1
+ import concurrent.futures
2
+ import os
3
+ from pathlib import Path
4
+
5
+ import gradio as gr
6
+
7
+ import birdnet_analyzer.config as cfg
8
+ import birdnet_analyzer.gui.localization as loc
9
+ import birdnet_analyzer.gui.utils as gu
10
+ from birdnet_analyzer import model
11
+ from birdnet_analyzer.analyze.utils import (
12
+ analyze_file,
13
+ combine_results,
14
+ save_analysis_params,
15
+ )
16
+
17
+ SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
18
+ ORIGINAL_LABELS_FILE = str(Path(SCRIPT_DIR).parent / cfg.LABELS_FILE)
19
+
20
+
21
+ def analyze_file_wrapper(entry):
22
+ """
23
+ Wrapper function for analyzing a file.
24
+
25
+ Args:
26
+ entry (tuple): A tuple where the first element is the file path and the
27
+ remaining elements are arguments to be passed to the
28
+ analyze.analyzeFile function.
29
+
30
+ Returns:
31
+ tuple: A tuple where the first element is the file path and the second
32
+ element is the result of the analyze.analyzeFile function.
33
+ """
34
+ return (entry[0], analyze_file(entry))
35
+
36
+
37
+ def run_analysis(
38
+ input_path: str,
39
+ output_path: str | None,
40
+ use_top_n: bool,
41
+ top_n: int,
42
+ confidence: float,
43
+ sensitivity: float,
44
+ overlap: float,
45
+ merge_consecutive: int,
46
+ audio_speed: float,
47
+ fmin: int,
48
+ fmax: int,
49
+ species_list_choice: str,
50
+ species_list_file,
51
+ lat: float,
52
+ lon: float,
53
+ week: int,
54
+ use_yearlong: bool,
55
+ sf_thresh: float,
56
+ custom_classifier_file,
57
+ output_types: str,
58
+ combine_tables: bool,
59
+ locale: str,
60
+ batch_size: int,
61
+ threads: int,
62
+ input_dir: str,
63
+ skip_existing: bool,
64
+ save_params: bool,
65
+ progress: gr.Progress | None,
66
+ ):
67
+ """Starts the analysis.
68
+
69
+ Args:
70
+ input_path: Either a file or directory.
71
+ output_path: The output path for the result, if None the input_path is used
72
+ confidence: The selected minimum confidence.
73
+ sensitivity: The selected sensitivity.
74
+ overlap: The selected segment overlap.
75
+ merge_consecutive: The number of consecutive segments to merge into one.
76
+ audio_speed: The selected audio speed.
77
+ fmin: The selected minimum bandpass frequency.
78
+ fmax: The selected maximum bandpass frequency.
79
+ species_list_choice: The choice for the species list.
80
+ species_list_file: The selected custom species list file.
81
+ lat: The selected latitude.
82
+ lon: The selected longitude.
83
+ week: The selected week of the year.
84
+ use_yearlong: Use yearlong instead of week.
85
+ sf_thresh: The threshold for the predicted species list.
86
+ custom_classifier_file: Custom classifier to be used.
87
+ output_type: The type of result to be generated.
88
+ output_filename: The filename for the combined output.
89
+ locale: The translation to be used.
90
+ batch_size: The number of samples in a batch.
91
+ threads: The number of threads to be used.
92
+ input_dir: The input directory.
93
+ progress: The gradio progress bar.
94
+ """
95
+ if progress is not None:
96
+ progress(0, desc=f"{loc.localize('progress-preparing')} ...")
97
+
98
+ from birdnet_analyzer.analyze.core import _set_params
99
+
100
+ locale = locale.lower()
101
+ custom_classifier = custom_classifier_file if species_list_choice == gu._CUSTOM_CLASSIFIER else None
102
+ slist = species_list_file if species_list_choice == gu._CUSTOM_SPECIES else None
103
+ lat = lat if species_list_choice == gu._PREDICT_SPECIES else -1
104
+ lon = lon if species_list_choice == gu._PREDICT_SPECIES else -1
105
+ week = -1 if use_yearlong else week
106
+
107
+ flist = _set_params(
108
+ audio_input=input_dir if input_dir else input_path,
109
+ min_conf=confidence,
110
+ custom_classifier=custom_classifier,
111
+ sensitivity=min(1.25, max(0.75, float(sensitivity))),
112
+ locale=locale,
113
+ overlap=max(0.0, min(2.9, float(overlap))),
114
+ merge_consecutive=max(1, int(merge_consecutive)),
115
+ audio_speed=max(0.1, 1.0 / (audio_speed * -1)) if audio_speed < 0 else max(1.0, float(audio_speed)),
116
+ fmin=max(0, min(cfg.SIG_FMAX, int(fmin))),
117
+ fmax=max(cfg.SIG_FMIN, min(cfg.SIG_FMAX, int(fmax))),
118
+ bs=max(1, int(batch_size)),
119
+ combine_results=combine_tables,
120
+ rtype=output_types,
121
+ skip_existing_results=skip_existing,
122
+ threads=max(1, int(threads)),
123
+ labels_file=ORIGINAL_LABELS_FILE,
124
+ sf_thresh=sf_thresh,
125
+ lat=lat,
126
+ lon=lon,
127
+ week=week,
128
+ slist=slist,
129
+ top_n=top_n if use_top_n else None,
130
+ output=output_path,
131
+ )
132
+
133
+ if species_list_choice == gu._CUSTOM_CLASSIFIER:
134
+ if custom_classifier_file is None:
135
+ raise gr.Error(loc.localize("validation-no-custom-classifier-selected"))
136
+
137
+ model.reset_custom_classifier()
138
+
139
+ gu.validate(cfg.FILE_LIST, loc.localize("validation-no-audio-files-found"))
140
+
141
+ result_list = []
142
+
143
+ if progress is not None:
144
+ progress(0, desc=f"{loc.localize('progress-starting')} ...")
145
+
146
+ # Analyze files
147
+ if cfg.CPU_THREADS < 2:
148
+ result_list.extend(analyze_file_wrapper(entry) for entry in flist)
149
+ else:
150
+ with concurrent.futures.ProcessPoolExecutor(max_workers=cfg.CPU_THREADS) as executor:
151
+ futures = (executor.submit(analyze_file_wrapper, arg) for arg in flist)
152
+ for i, f in enumerate(concurrent.futures.as_completed(futures), start=1):
153
+ if progress is not None:
154
+ progress((i, len(flist)), total=len(flist), unit="files")
155
+ result = f.result()
156
+
157
+ result_list.append(result)
158
+
159
+ # Combine results?
160
+ if cfg.COMBINE_RESULTS:
161
+ combine_list = [[r[1] for r in result_list if r[0] == i[0]][0] for i in flist]
162
+ print(f"Combining results, writing to {cfg.OUTPUT_PATH}...", end="", flush=True)
163
+ combine_results(combine_list)
164
+ print("done!", flush=True)
165
+
166
+ if save_params:
167
+ save_analysis_params(os.path.join(cfg.OUTPUT_PATH, cfg.ANALYSIS_PARAMS_FILENAME))
168
+
169
+ return (
170
+ [[os.path.relpath(r[0], input_dir), bool(r[1])] for r in result_list]
171
+ if input_dir
172
+ else result_list[0][1]["csv"]
173
+ if result_list[0][1]
174
+ else None
175
+ )
@@ -1,4 +1,4 @@
1
- <svg width="50" height="50" viewBox="0 0 50 50" xmlns="http://www.w3.org/2000/svg">
2
- <rect x="5" y="5" width="40" height="40" rx="5" ry="5" fill="#ccc" stroke="#888" stroke-width="2"/>
3
- <polygon points="25,36 12,24 20,24 20,16 30,16 30,24 38,24" fill="black"/>
4
- </svg>
1
+ <svg width="50" height="50" viewBox="0 0 50 50" xmlns="http://www.w3.org/2000/svg">
2
+ <rect x="5" y="5" width="40" height="40" rx="5" ry="5" fill="#ccc" stroke="#888" stroke-width="2"/>
3
+ <polygon points="25,36 12,24 20,24 20,16 30,16 30,24 38,24" fill="black"/>
4
+ </svg>
@@ -1,4 +1,4 @@
1
- <svg width="50" height="50" viewBox="0 0 50 50" xmlns="http://www.w3.org/2000/svg">
2
- <rect x="5" y="5" width="40" height="40" rx="5" ry="5" fill="#ccc" stroke="#888" stroke-width="2"/>
3
- <polygon points="14,25 26,12 26,20 34,20 34,30 26,30 26,38" fill="black"/>
4
- </svg>
1
+ <svg width="50" height="50" viewBox="0 0 50 50" xmlns="http://www.w3.org/2000/svg">
2
+ <rect x="5" y="5" width="40" height="40" rx="5" ry="5" fill="#ccc" stroke="#888" stroke-width="2"/>
3
+ <polygon points="14,25 26,12 26,20 34,20 34,30 26,30 26,38" fill="black"/>
4
+ </svg>
@@ -1,4 +1,4 @@
1
- <svg width="50" height="50" viewBox="0 0 50 50" xmlns="http://www.w3.org/2000/svg">
2
- <rect x="5" y="5" width="40" height="40" rx="5" ry="5" fill="#ccc" stroke="#888" stroke-width="2"/>
3
- <polygon points="36,25 24,12 24,20 16,20 16,30 24,30 24,38" fill="black"/>
4
- </svg>
1
+ <svg width="50" height="50" viewBox="0 0 50 50" xmlns="http://www.w3.org/2000/svg">
2
+ <rect x="5" y="5" width="40" height="40" rx="5" ry="5" fill="#ccc" stroke="#888" stroke-width="2"/>
3
+ <polygon points="36,25 24,12 24,20 16,20 16,30 24,30 24,38" fill="black"/>
4
+ </svg>
@@ -1,4 +1,4 @@
1
- <svg width="50" height="50" viewBox="0 0 50 50" xmlns="http://www.w3.org/2000/svg">
2
- <rect x="5" y="5" width="40" height="40" rx="5" ry="5" fill="#ccc" stroke="#888" stroke-width="2"/>
3
- <polygon points="25,14 12,26 20,26 20,34 30,34 30,26 38,26" fill="black"/>
4
- </svg>
1
+ <svg width="50" height="50" viewBox="0 0 50 50" xmlns="http://www.w3.org/2000/svg">
2
+ <rect x="5" y="5" width="40" height="40" rx="5" ry="5" fill="#ccc" stroke="#888" stroke-width="2"/>
3
+ <polygon points="25,14 12,26 20,26 20,34 30,34 30,26 38,26" fill="black"/>
4
+ </svg>
@@ -1,29 +1,29 @@
1
- footer {
2
- display: none !important;
3
- }
4
-
5
- #single_file_audio,
6
- #single_file_audio * {
7
- max-height: 81.6px;
8
- min-height: 0;
9
- }
10
-
11
- #update-available a {
12
- text-decoration: none;
13
- }
14
-
15
- :root {
16
- --block-title-text-color: var(--neutral-800);
17
- --block-info-text-color: var(--neutral-500);
18
- }
19
-
20
- #single-file-output td:first-of-type span {
21
- text-align: center;
22
- }
23
-
24
- #embeddings-search-results {
25
- max-height: 1107px;
26
- overflow: auto;
27
- flex-wrap: nowrap;
28
- padding-right: 5px;
1
+ footer {
2
+ display: none !important;
3
+ }
4
+
5
+ #single_file_audio,
6
+ #single_file_audio * {
7
+ max-height: 81.6px;
8
+ min-height: 0;
9
+ }
10
+
11
+ #update-available a {
12
+ text-decoration: none;
13
+ }
14
+
15
+ :root {
16
+ --block-title-text-color: var(--neutral-800);
17
+ --block-info-text-color: var(--neutral-500);
18
+ }
19
+
20
+ #single-file-output td:first-of-type span {
21
+ text-align: center;
22
+ }
23
+
24
+ #embeddings-search-results {
25
+ max-height: 1107px;
26
+ overflow: auto;
27
+ flex-wrap: nowrap;
28
+ padding-right: 5px;
29
29
  }