pycoustic 0.1.12__py3-none-any.whl → 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pycoustic-0.1.12.dist-info → pycoustic-0.1.13.dist-info}/METADATA +3 -3
- pycoustic-0.1.13.dist-info/RECORD +9 -0
- pycoustic/pycoustic_gui_app-ai.py +0 -635
- pycoustic/pycoustic_streamlit_gpt5.py +0 -421
- pycoustic/streamlit-ai.py +0 -492
- pycoustic/streamlit-new.py +0 -142
- pycoustic/streamlit_pycoustic_gpt5_dead.py +0 -234
- pycoustic-0.1.12.dist-info/RECORD +0 -14
- {pycoustic-0.1.12.dist-info → pycoustic-0.1.13.dist-info}/WHEEL +0 -0
@@ -1,421 +0,0 @@
|
|
1
|
-
# Python
|
2
|
-
# streamlit_pycoustic_app.py
|
3
|
-
import io
|
4
|
-
import json
|
5
|
-
import tempfile
|
6
|
-
from datetime import time
|
7
|
-
|
8
|
-
import pandas as pd
|
9
|
-
import streamlit as st
|
10
|
-
|
11
|
-
# Import pycoustic classes
|
12
|
-
from pycoustic import Log, Survey
|
13
|
-
|
14
|
-
# python
|
15
|
-
import os
|
16
|
-
from typing import Optional
|
17
|
-
|
18
|
-
class _SafeNoop:
|
19
|
-
"""
|
20
|
-
Minimal no-op proxy that safely absorbs attribute access and calls.
|
21
|
-
Prevents AttributeError like "'str' object has no attribute ...".
|
22
|
-
"""
|
23
|
-
def __init__(self, name: str = "object"):
|
24
|
-
self._name = name
|
25
|
-
|
26
|
-
def __getattr__(self, item):
|
27
|
-
return _SafeNoop(f"{self._name}.{item}")
|
28
|
-
|
29
|
-
def __call__(self, *args, **kwargs):
|
30
|
-
return None
|
31
|
-
|
32
|
-
def __repr__(self) -> str:
|
33
|
-
return f"<_SafeNoop {self._name}>"
|
34
|
-
|
35
|
-
def _sanitize_session_state() -> None:
|
36
|
-
"""
|
37
|
-
Replace any string left in common survey/log slots with a safe no-op proxy.
|
38
|
-
This avoids downstream AttributeError when code expects objects.
|
39
|
-
"""
|
40
|
-
try:
|
41
|
-
import streamlit as st # type: ignore
|
42
|
-
except Exception:
|
43
|
-
return
|
44
|
-
|
45
|
-
for key in ("survey", "log_obj", "log"):
|
46
|
-
if key in st.session_state:
|
47
|
-
val = st.session_state.get(key)
|
48
|
-
if isinstance(val, str):
|
49
|
-
# Preserve original label if useful for UI
|
50
|
-
st.session_state[f"{key}_name"] = val
|
51
|
-
# Install a no-op proxy in place of the string
|
52
|
-
st.session_state[key] = _SafeNoop(key)
|
53
|
-
|
54
|
-
# Run sanitization as early as possible
|
55
|
-
_sanitize_session_state()
|
56
|
-
|
57
|
-
def _resolve_survey_like() -> Optional[object]:
|
58
|
-
"""
|
59
|
-
Return the first available survey-like object from session state,
|
60
|
-
or None if nothing usable is present.
|
61
|
-
"""
|
62
|
-
try:
|
63
|
-
import streamlit as st # type: ignore
|
64
|
-
except Exception:
|
65
|
-
return None
|
66
|
-
|
67
|
-
for key in ("survey", "log_obj", "log"):
|
68
|
-
if key in st.session_state:
|
69
|
-
return st.session_state.get(key)
|
70
|
-
return None
|
71
|
-
|
72
|
-
def _coerce_hm_tuple(val) -> tuple[int, int]:
|
73
|
-
"""
|
74
|
-
Coerces an input into a (hour, minute) tuple.
|
75
|
-
Accepts tuples, lists, or 'HH:MM' / 'H:M' strings.
|
76
|
-
"""
|
77
|
-
if isinstance(val, (tuple, list)) and len(val) == 2:
|
78
|
-
return int(val[0]), int(val[1])
|
79
|
-
if isinstance(val, str):
|
80
|
-
parts = val.strip().split(":")
|
81
|
-
if len(parts) == 2:
|
82
|
-
return int(parts[0]), int(parts[1])
|
83
|
-
# Fallback to 00:00 if invalid
|
84
|
-
return 0, 0
|
85
|
-
|
86
|
-
def _set_periods_on_survey(day_tuple, eve_tuple, night_tuple) -> None:
|
87
|
-
"""
|
88
|
-
Accepts (hour, minute) tuples and updates the Survey periods, if available.
|
89
|
-
Safely no-ops if a proper survey object isn't present.
|
90
|
-
"""
|
91
|
-
survey = _resolve_survey_like()
|
92
|
-
if survey is None:
|
93
|
-
return
|
94
|
-
|
95
|
-
times = {
|
96
|
-
"day": _coerce_hm_tuple(day_tuple),
|
97
|
-
"evening": _coerce_hm_tuple(eve_tuple),
|
98
|
-
"night": _coerce_hm_tuple(night_tuple),
|
99
|
-
}
|
100
|
-
|
101
|
-
setter = getattr(survey, "set_periods", None)
|
102
|
-
if callable(setter):
|
103
|
-
try:
|
104
|
-
setter(times=times)
|
105
|
-
except Exception:
|
106
|
-
# Swallow to keep the UI responsive even if backend rejects values
|
107
|
-
pass
|
108
|
-
|
109
|
-
def _looks_like_path(s: str) -> bool:
|
110
|
-
s = s.strip()
|
111
|
-
return (
|
112
|
-
s.lower().endswith(".csv")
|
113
|
-
or os.sep in s
|
114
|
-
or "/" in s
|
115
|
-
or "\\" in s
|
116
|
-
)
|
117
|
-
|
118
|
-
def _usable_acoustic_obj(obj) -> bool:
|
119
|
-
# Consider it usable if it exposes either API used elsewhere.
|
120
|
-
return hasattr(obj, "set_periods") or hasattr(obj, "_leq_by_date")
|
121
|
-
|
122
|
-
def _coerce_or_clear_state_key(st, key: str) -> None:
|
123
|
-
"""
|
124
|
-
If st.session_state[key] is a string:
|
125
|
-
- If it looks like a CSV path, try to build a Log object from it.
|
126
|
-
- Otherwise, move it to key_name and clear the object slot to avoid attribute errors.
|
127
|
-
"""
|
128
|
-
if key not in st.session_state:
|
129
|
-
return
|
130
|
-
|
131
|
-
val = st.session_state.get(key)
|
132
|
-
|
133
|
-
# Already usable object
|
134
|
-
if _usable_acoustic_obj(val):
|
135
|
-
return
|
136
|
-
|
137
|
-
# Try to coerce from a CSV-like path string
|
138
|
-
if isinstance(val, str):
|
139
|
-
if _looks_like_path(val):
|
140
|
-
try:
|
141
|
-
import pycoustic as pc # Lazy import
|
142
|
-
st.session_state[key] = pc.Log(path=val.strip())
|
143
|
-
return
|
144
|
-
except Exception:
|
145
|
-
# Fall through to clearing if coercion fails
|
146
|
-
pass
|
147
|
-
|
148
|
-
# Preserve label for UI, clear the object slot to avoid attribute errors
|
149
|
-
st.session_state[f"{key}_name"] = val
|
150
|
-
st.session_state[key] = None
|
151
|
-
|
152
|
-
def _normalize_session_state() -> None:
|
153
|
-
try:
|
154
|
-
import streamlit as st # type: ignore
|
155
|
-
except Exception:
|
156
|
-
return
|
157
|
-
|
158
|
-
# Coerce or clear common object keys
|
159
|
-
for k in ("survey", "log_obj", "log"):
|
160
|
-
_coerce_or_clear_state_key(st, k)
|
161
|
-
|
162
|
-
# Promote first usable object into the canonical "survey" slot
|
163
|
-
if not _usable_acoustic_obj(st.session_state.get("survey")):
|
164
|
-
for k in ("log_obj", "log"):
|
165
|
-
candidate = st.session_state.get(k)
|
166
|
-
if _usable_acoustic_obj(candidate):
|
167
|
-
st.session_state["survey"] = candidate
|
168
|
-
break
|
169
|
-
|
170
|
-
# Run normalization early so downstream code doesn't encounter attribute errors
|
171
|
-
_normalize_session_state()
|
172
|
-
|
173
|
-
|
174
|
-
# --------------- Helpers ---------------
|
175
|
-
|
176
|
-
def save_upload_to_tmp(uploaded_file) -> str:
|
177
|
-
"""Persist an uploaded CSV to a temporary file and return its path."""
|
178
|
-
# Create a persistent temporary file (delete later on reset)
|
179
|
-
with tempfile.NamedTemporaryFile(delete=False, suffix=".csv") as tmp:
|
180
|
-
tmp.write(uploaded_file.getbuffer())
|
181
|
-
return tmp.name
|
182
|
-
|
183
|
-
|
184
|
-
def build_survey(log_map: dict, times_kwarg: dict | None = None) -> Survey:
|
185
|
-
"""Create a Survey, attach logs, and optionally call set_periods(times=...)."""
|
186
|
-
survey = Survey()
|
187
|
-
|
188
|
-
# Attach logs to the Survey (simple, direct assignment to internal storage)
|
189
|
-
# If a public adder method exists, prefer that; fallback to internal attribute.
|
190
|
-
if hasattr(survey, "add_log"):
|
191
|
-
for key, lg in log_map.items():
|
192
|
-
try:
|
193
|
-
survey.add_log(key, lg) # type: ignore[attr-defined]
|
194
|
-
except Exception:
|
195
|
-
# Fallback if signature differs
|
196
|
-
setattr(survey, "_logs", log_map)
|
197
|
-
break
|
198
|
-
else:
|
199
|
-
setattr(survey, "_logs", log_map)
|
200
|
-
|
201
|
-
# Apply periods if provided
|
202
|
-
if times_kwarg is not None:
|
203
|
-
try:
|
204
|
-
survey.set_periods(times=times_kwarg)
|
205
|
-
except Exception as e:
|
206
|
-
st.warning(f"set_periods failed with provided times: {e}")
|
207
|
-
|
208
|
-
return survey
|
209
|
-
|
210
|
-
|
211
|
-
def flatten_columns(df: pd.DataFrame) -> pd.DataFrame:
|
212
|
-
"""Flatten MultiIndex columns for nicer display in Streamlit."""
|
213
|
-
if isinstance(df.columns, pd.MultiIndex):
|
214
|
-
flat = df.copy()
|
215
|
-
flat.columns = [" / ".join(map(str, c)) for c in df.columns.to_flat_index()]
|
216
|
-
return flat
|
217
|
-
return df
|
218
|
-
|
219
|
-
|
220
|
-
def parse_extra_kwargs(raw: str) -> dict:
|
221
|
-
"""Parse a JSON dict from a text area. Returns {} on error."""
|
222
|
-
if not raw or not raw.strip():
|
223
|
-
return {}
|
224
|
-
try:
|
225
|
-
parsed = json.loads(raw)
|
226
|
-
if not isinstance(parsed, dict):
|
227
|
-
st.warning("Extra kwargs JSON should be an object/dict; ignoring.")
|
228
|
-
return {}
|
229
|
-
return parsed
|
230
|
-
except Exception as e:
|
231
|
-
st.warning(f"Unable to parse extra kwargs JSON. Ignoring. Error: {e}")
|
232
|
-
return {}
|
233
|
-
|
234
|
-
|
235
|
-
# --------------- Streamlit App ---------------
|
236
|
-
|
237
|
-
st.set_page_config(page_title="pycoustic GUI", layout="wide")
|
238
|
-
st.title("pycoustic Streamlit GUI")
|
239
|
-
|
240
|
-
# Initialize session state
|
241
|
-
ss = st.session_state
|
242
|
-
ss.setdefault("tmp_paths", []) # List[str] for cleanup
|
243
|
-
ss.setdefault("logs", {}) # Dict[str, Log]
|
244
|
-
ss.setdefault("survey", None) # Survey or None
|
245
|
-
ss.setdefault("resi_df", None) # Cached summary
|
246
|
-
ss.setdefault("periods_times", { # Default times for set_periods()
|
247
|
-
"day": (7, 0),
|
248
|
-
"evening": (23, 0),
|
249
|
-
"night": (23, 0),
|
250
|
-
})
|
251
|
-
ss.setdefault("lmax_n", 5)
|
252
|
-
ss.setdefault("lmax_t", 30)
|
253
|
-
ss.setdefault("extra_kwargs_raw", "{}")
|
254
|
-
|
255
|
-
with st.expander("1) Load CSV data", expanded=True):
|
256
|
-
st.write("Upload one or more CSV files to create Log objects for a single Survey.")
|
257
|
-
|
258
|
-
uploaded = st.file_uploader(
|
259
|
-
"Select CSV files",
|
260
|
-
type=["csv"],
|
261
|
-
accept_multiple_files=True,
|
262
|
-
help="Each CSV should match the expected pycoustic format."
|
263
|
-
)
|
264
|
-
|
265
|
-
if uploaded:
|
266
|
-
st.caption("Assign a position name for each file (defaults to base filename).")
|
267
|
-
|
268
|
-
# Build a list of (file, default_name) for user naming
|
269
|
-
pos_names = []
|
270
|
-
for idx, f in enumerate(uploaded):
|
271
|
-
default_name = f.name.rsplit(".", 1)[0]
|
272
|
-
name = st.text_input(
|
273
|
-
f"Position name for file {idx + 1}: {f.name}",
|
274
|
-
value=default_name,
|
275
|
-
key=f"pos_name_{f.name}_{idx}"
|
276
|
-
)
|
277
|
-
pos_names.append((f, name.strip() or default_name))
|
278
|
-
|
279
|
-
col_l, col_r = st.columns([1, 1])
|
280
|
-
replace = col_l.checkbox("Replace existing survey/logs", value=True)
|
281
|
-
load_btn = col_r.button("Load CSVs")
|
282
|
-
|
283
|
-
if load_btn:
|
284
|
-
if replace:
|
285
|
-
# Reset previous state
|
286
|
-
for p in ss["tmp_paths"]:
|
287
|
-
try:
|
288
|
-
# Cleanup files on supported OS; not critical if fails
|
289
|
-
import os
|
290
|
-
os.unlink(p)
|
291
|
-
except Exception:
|
292
|
-
pass
|
293
|
-
ss["tmp_paths"] = []
|
294
|
-
ss["logs"] = {}
|
295
|
-
ss["survey"] = None
|
296
|
-
ss["resi_df"] = None
|
297
|
-
|
298
|
-
added = 0
|
299
|
-
for f, pos_name in pos_names:
|
300
|
-
try:
|
301
|
-
tmp_path = save_upload_to_tmp(f)
|
302
|
-
ss["tmp_paths"].append(tmp_path)
|
303
|
-
log_obj = Log(path=tmp_path)
|
304
|
-
ss["logs"][pos_name] = log_obj
|
305
|
-
added += 1
|
306
|
-
except Exception as e:
|
307
|
-
st.error(f"Failed to load {f.name}: {e}")
|
308
|
-
|
309
|
-
if added > 0:
|
310
|
-
st.success(f"Loaded {added} file(s) into logs.")
|
311
|
-
else:
|
312
|
-
st.warning("No files loaded. Please check the CSV format and try again.")
|
313
|
-
|
314
|
-
if ss["logs"]:
|
315
|
-
st.info(f"Current logs in session: {', '.join(ss['logs'].keys())}")
|
316
|
-
|
317
|
-
with st.expander("2) Configure periods (survey.set_periods)", expanded=True):
|
318
|
-
st.write("Set daily period start times. These will be passed as times=... to set_periods().")
|
319
|
-
|
320
|
-
# Show time pickers; convert to tuples (hour, minute)
|
321
|
-
day_t = st.time_input("Day start", value=time(ss["periods_times"]["day"][0], ss["periods_times"]["day"][1]))
|
322
|
-
eve_t = st.time_input("Evening start", value=time(ss["periods_times"]["evening"][0], ss["periods_times"]["evening"][1]))
|
323
|
-
night_t = st.time_input("Night start", value=time(ss["periods_times"]["night"][0], ss["periods_times"]["night"][1]))
|
324
|
-
|
325
|
-
# Update in session
|
326
|
-
new_times = {
|
327
|
-
"day": (day_t.hour, day_t.minute),
|
328
|
-
"evening": (eve_t.hour, eve_t.minute),
|
329
|
-
"night": (night_t.hour, night_t.minute),
|
330
|
-
}
|
331
|
-
|
332
|
-
apply_periods = st.button("Apply periods to Survey")
|
333
|
-
|
334
|
-
if apply_periods:
|
335
|
-
if not ss["logs"]:
|
336
|
-
st.warning("Load logs first.")
|
337
|
-
else:
|
338
|
-
ss["periods_times"] = new_times
|
339
|
-
# Build or update Survey
|
340
|
-
ss["survey"] = build_survey(ss["logs"], times_kwarg=ss["periods_times"])
|
341
|
-
# Invalidate old summary
|
342
|
-
ss["resi_df"] = None
|
343
|
-
st.success("Periods applied to Survey.")
|
344
|
-
|
345
|
-
with st.expander("3) Compute results (survey.resi_summary)", expanded=True):
|
346
|
-
st.write("Set kwargs for resi_summary(). Adjust lmax_n and lmax_t, and optionally pass extra kwargs as JSON.")
|
347
|
-
|
348
|
-
col1, col2 = st.columns([1, 1])
|
349
|
-
ss["lmax_n"] = col1.number_input("lmax_n", min_value=1, value=int(ss["lmax_n"]), step=1)
|
350
|
-
ss["lmax_t"] = col2.number_input("lmax_t", min_value=1, value=int(ss["lmax_t"]), step=1)
|
351
|
-
|
352
|
-
ss["extra_kwargs_raw"] = st.text_area(
|
353
|
-
"Extra kwargs (JSON object)",
|
354
|
-
value=ss["extra_kwargs_raw"],
|
355
|
-
height=120,
|
356
|
-
help="Example: {\"include_LAE\": true} (only pass valid kwargs for resi_summary)"
|
357
|
-
)
|
358
|
-
|
359
|
-
compute = st.button("Update resi_summary")
|
360
|
-
|
361
|
-
if compute:
|
362
|
-
if ss["survey"] is None:
|
363
|
-
if not ss["logs"]:
|
364
|
-
st.warning("Load logs first.")
|
365
|
-
else:
|
366
|
-
# Create Survey if missing
|
367
|
-
ss["survey"] = build_survey(ss["logs"], times_kwarg=ss["periods_times"])
|
368
|
-
|
369
|
-
if ss["survey"] is not None:
|
370
|
-
kwargs = parse_extra_kwargs(ss["extra_kwargs_raw"])
|
371
|
-
kwargs["lmax_n"] = int(ss["lmax_n"])
|
372
|
-
kwargs["lmax_t"] = int(ss["lmax_t"])
|
373
|
-
|
374
|
-
try:
|
375
|
-
df = ss["survey"].resi_summary(**kwargs)
|
376
|
-
if df is None or (hasattr(df, "empty") and df.empty):
|
377
|
-
st.info("resi_summary returned no data.")
|
378
|
-
ss["resi_df"] = None
|
379
|
-
else:
|
380
|
-
ss["resi_df"] = df
|
381
|
-
st.success("resi_summary updated.")
|
382
|
-
except Exception as e:
|
383
|
-
st.error(f"resi_summary failed: {e}")
|
384
|
-
|
385
|
-
# --------------- Results ---------------
|
386
|
-
st.subheader("resi_summary results")
|
387
|
-
if ss["resi_df"] is not None:
|
388
|
-
show_df = flatten_columns(ss["resi_df"])
|
389
|
-
st.dataframe(show_df, use_container_width=True)
|
390
|
-
|
391
|
-
# Download
|
392
|
-
try:
|
393
|
-
csv_buf = io.StringIO()
|
394
|
-
show_df.to_csv(csv_buf)
|
395
|
-
st.download_button(
|
396
|
-
"Download CSV",
|
397
|
-
data=csv_buf.getvalue(),
|
398
|
-
file_name="resi_summary.csv",
|
399
|
-
mime="text/csv"
|
400
|
-
)
|
401
|
-
except Exception as e:
|
402
|
-
st.warning(f"Unable to prepare CSV download: {e}")
|
403
|
-
else:
|
404
|
-
st.info("No results yet. Load CSVs, apply periods, and compute resi_summary.")
|
405
|
-
|
406
|
-
# --------------- Utilities ---------------
|
407
|
-
with st.sidebar:
|
408
|
-
st.header("Utilities")
|
409
|
-
if st.button("Reset session"):
|
410
|
-
# Clean up temp files
|
411
|
-
for p in ss["tmp_paths"]:
|
412
|
-
try:
|
413
|
-
import os
|
414
|
-
os.unlink(p)
|
415
|
-
except Exception:
|
416
|
-
pass
|
417
|
-
for key in list(st.session_state.keys()):
|
418
|
-
del st.session_state[key]
|
419
|
-
st.experimental_rerun()
|
420
|
-
|
421
|
-
st.caption("Tip: After uploading and loading files, set periods, then compute resi_summary.")
|