pycoustic 0.1.8__py3-none-any.whl → 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pycoustic/log.py +10 -4
- pycoustic/pycoustic_gui_app-ai.py +635 -0
- pycoustic/pycoustic_streamlit_gpt5.py +421 -0
- pycoustic/streamlit-ai.py +401 -431
- pycoustic/streamlit-new.py +142 -0
- pycoustic/streamlit_pycoustic_gpt5_dead.py +234 -0
- pycoustic/survey.py +35 -21
- {pycoustic-0.1.8.dist-info → pycoustic-0.1.9.dist-info}/METADATA +4 -2
- pycoustic-0.1.9.dist-info/RECORD +14 -0
- pycoustic-0.1.8.dist-info/RECORD +0 -10
- {pycoustic-0.1.8.dist-info → pycoustic-0.1.9.dist-info}/WHEEL +0 -0
@@ -0,0 +1,142 @@
|
|
1
|
+
import os
|
2
|
+
import tempfile
|
3
|
+
from typing import List, Dict
|
4
|
+
|
5
|
+
import pandas as pd
|
6
|
+
import plotly.graph_objects as go
|
7
|
+
import streamlit as st
|
8
|
+
|
9
|
+
# Import pycoustic classes
|
10
|
+
from log import *
|
11
|
+
from survey import *
|
12
|
+
from weather import *
|
13
|
+
|
14
|
+
st.set_page_config(page_title="pycoustic GUI", layout="wide")
|
15
|
+
st.title("pycoustic Streamlit GUI")
|
16
|
+
|
17
|
+
# Initialize session state
|
18
|
+
ss = st.session_state
|
19
|
+
ss.setdefault("tmp_paths", []) # List[str] for cleanup
|
20
|
+
ss.setdefault("logs", {}) # Dict[str, Log]
|
21
|
+
ss.setdefault("survey", None) # Survey or None
|
22
|
+
ss.setdefault("resi_df", None) # Cached summary
|
23
|
+
ss.setdefault("periods_times", { # Default times for set_periods()
|
24
|
+
"day": (7, 0),
|
25
|
+
"evening": (23, 0),
|
26
|
+
"night": (23, 0),
|
27
|
+
})
|
28
|
+
ss.setdefault("lmax_n", 5)
|
29
|
+
ss.setdefault("lmax_t", 30)
|
30
|
+
ss.setdefault("extra_kwargs_raw", "{}")
|
31
|
+
|
32
|
+
|
33
|
+
def save_upload_to_tmp(uploaded_file) -> str:
|
34
|
+
"""Persist an uploaded CSV to a temporary file and return its path."""
|
35
|
+
# Create a persistent temporary file (delete later on reset)
|
36
|
+
with tempfile.NamedTemporaryFile(delete=False, suffix=".csv") as tmp:
|
37
|
+
tmp.write(uploaded_file.getbuffer())
|
38
|
+
return tmp.name
|
39
|
+
|
40
|
+
|
41
|
+
def build_survey(log_map: dict, times_kwarg: dict | None = None) -> Survey:
|
42
|
+
"""Create a Survey, attach logs, and optionally call set_periods(times=...)."""
|
43
|
+
survey = Survey()
|
44
|
+
|
45
|
+
# Attach logs to the Survey (simple, direct assignment to internal storage)
|
46
|
+
# If a public adder method exists, prefer that; fallback to internal attribute.
|
47
|
+
if hasattr(survey, "add_log"):
|
48
|
+
for key, lg in log_map.items():
|
49
|
+
try:
|
50
|
+
survey.add_log(key, lg) # type: ignore[attr-defined]
|
51
|
+
except Exception:
|
52
|
+
# Fallback if signature differs
|
53
|
+
setattr(survey, "_logs", log_map)
|
54
|
+
break
|
55
|
+
else:
|
56
|
+
setattr(survey, "_logs", log_map)
|
57
|
+
|
58
|
+
# Apply periods if provided
|
59
|
+
if times_kwarg is not None:
|
60
|
+
try:
|
61
|
+
survey.set_periods(times=times_kwarg)
|
62
|
+
except Exception as e:
|
63
|
+
st.warning(f"set_periods failed with provided times: {e}")
|
64
|
+
|
65
|
+
return survey
|
66
|
+
|
67
|
+
|
68
|
+
# File Upload in expander container
|
69
|
+
with st.expander("1) Load CSV data", expanded=True):
|
70
|
+
st.write("Upload one or more CSV files to create Log objects for a single Survey.")
|
71
|
+
|
72
|
+
uploaded = st.file_uploader(
|
73
|
+
"Select CSV files",
|
74
|
+
type=["csv"],
|
75
|
+
accept_multiple_files=True,
|
76
|
+
help="Each CSV should match the expected pycoustic format."
|
77
|
+
)
|
78
|
+
|
79
|
+
if uploaded:
|
80
|
+
st.caption("Assign a position name for each file (defaults to base filename).")
|
81
|
+
|
82
|
+
# Build a list of (file, default_name) for user naming
|
83
|
+
pos_names = []
|
84
|
+
for idx, f in enumerate(uploaded):
|
85
|
+
default_name = f.name.rsplit(".", 1)[0]
|
86
|
+
name = st.text_input(
|
87
|
+
f"Position name for file {idx + 1}: {f.name}",
|
88
|
+
value=default_name,
|
89
|
+
key=f"pos_name_{f.name}_{idx}"
|
90
|
+
)
|
91
|
+
pos_names.append((f, name.strip() or default_name))
|
92
|
+
|
93
|
+
col_l, col_r = st.columns([1, 1])
|
94
|
+
replace = col_l.checkbox("Replace existing survey/logs", value=True)
|
95
|
+
load_btn = col_r.button("Load CSVs")
|
96
|
+
|
97
|
+
if load_btn:
|
98
|
+
if replace:
|
99
|
+
# Reset previous state
|
100
|
+
for p in ss["tmp_paths"]:
|
101
|
+
try:
|
102
|
+
# Cleanup files on supported OS; not critical if fails
|
103
|
+
import os
|
104
|
+
os.unlink(p)
|
105
|
+
except Exception:
|
106
|
+
pass
|
107
|
+
ss["tmp_paths"] = []
|
108
|
+
ss["logs"] = {}
|
109
|
+
ss["survey"] = None
|
110
|
+
ss["resi_df"] = None
|
111
|
+
|
112
|
+
added = 0
|
113
|
+
for f, pos_name in pos_names:
|
114
|
+
try:
|
115
|
+
tmp_path = save_upload_to_tmp(f)
|
116
|
+
ss["tmp_paths"].append(tmp_path)
|
117
|
+
log_obj = Log(path=tmp_path)
|
118
|
+
ss["logs"][pos_name] = log_obj
|
119
|
+
added += 1
|
120
|
+
except Exception as e:
|
121
|
+
st.error(f"Failed to load {f.name}: {e}")
|
122
|
+
|
123
|
+
if added > 0:
|
124
|
+
st.success(f"Loaded {added} file(s) into logs.")
|
125
|
+
else:
|
126
|
+
st.warning("No files loaded. Please check the CSV format and try again.")
|
127
|
+
|
128
|
+
if ss["logs"]:
|
129
|
+
st.info(f"Current logs in session: {', '.join(ss['logs'].keys())}")
|
130
|
+
|
131
|
+
ss["survey"] = Survey()
|
132
|
+
for k in ss["logs"].keys():
|
133
|
+
ss["survey"].add_log(ss["survey"], name="k")
|
134
|
+
st.text(k)
|
135
|
+
|
136
|
+
st.text(type(ss["survey"]))
|
137
|
+
st.table(ss["survey"].resi_summary())
|
138
|
+
|
139
|
+
with st.expander("Broadband Summary", expanded=True):
|
140
|
+
df = ss["survey"]._logs
|
141
|
+
st.text(df)
|
142
|
+
#test
|
@@ -0,0 +1,234 @@
|
|
1
|
+
# streamlit_pycoustic_app.py
|
2
|
+
import ast
|
3
|
+
import datetime as dt
|
4
|
+
import tempfile
|
5
|
+
from pathlib import Path
|
6
|
+
from typing import Any, Dict, Iterable
|
7
|
+
|
8
|
+
import streamlit as st
|
9
|
+
|
10
|
+
from pycoustic import Log, Survey
|
11
|
+
|
12
|
+
|
13
|
+
def _parse_kwargs(text: str) -> Dict[str, Any]:
|
14
|
+
"""
|
15
|
+
Safely parse a Python dict literal from text area.
|
16
|
+
Returns {} if empty or invalid.
|
17
|
+
"""
|
18
|
+
if not text or not text.strip():
|
19
|
+
return {}
|
20
|
+
try:
|
21
|
+
parsed = ast.literal_eval(text)
|
22
|
+
return parsed if isinstance(parsed, dict) else {}
|
23
|
+
except Exception:
|
24
|
+
return {}
|
25
|
+
|
26
|
+
|
27
|
+
def _display_result(obj: Any):
|
28
|
+
"""
|
29
|
+
Display helper to handle common return types.
|
30
|
+
"""
|
31
|
+
# Plotly Figure-like
|
32
|
+
if hasattr(obj, "to_plotly_json"):
|
33
|
+
st.plotly_chart(obj, use_container_width=True)
|
34
|
+
return
|
35
|
+
|
36
|
+
# Pandas DataFrame-like
|
37
|
+
if hasattr(obj, "to_dict") and hasattr(obj, "columns"):
|
38
|
+
st.dataframe(obj, use_container_width=True)
|
39
|
+
return
|
40
|
+
|
41
|
+
# Dict/list -> JSON
|
42
|
+
if isinstance(obj, (dict, list)):
|
43
|
+
st.json(obj)
|
44
|
+
return
|
45
|
+
|
46
|
+
# Fallback
|
47
|
+
st.write(obj)
|
48
|
+
|
49
|
+
|
50
|
+
def _ensure_state():
|
51
|
+
if "survey" not in st.session_state:
|
52
|
+
st.session_state["survey"] = None
|
53
|
+
if "periods" not in st.session_state:
|
54
|
+
st.session_state["periods"] = {"day": (7, 0), "evening": (19, 0), "night": (23, 0)}
|
55
|
+
|
56
|
+
|
57
|
+
def _write_uploaded_to_temp(uploaded) -> str:
|
58
|
+
"""
|
59
|
+
Persist an UploadedFile to a temporary file and return the path.
|
60
|
+
Using a real file path keeps Log(...) happy across environments.
|
61
|
+
"""
|
62
|
+
suffix = Path(uploaded.name).suffix or ".csv"
|
63
|
+
with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as tmp:
|
64
|
+
tmp.write(uploaded.getbuffer())
|
65
|
+
return tmp.name
|
66
|
+
|
67
|
+
|
68
|
+
def _build_survey_from_files(files) -> Survey:
|
69
|
+
"""
|
70
|
+
Create a Survey and attach Log objects for each uploaded file.
|
71
|
+
"""
|
72
|
+
survey = Survey()
|
73
|
+
for f in files:
|
74
|
+
# Persist to disk to ensure compatibility with pandas and any path usage in Log
|
75
|
+
tmp_path = _write_uploaded_to_temp(f)
|
76
|
+
log_obj = Log(path=tmp_path)
|
77
|
+
|
78
|
+
key = Path(f.name).stem
|
79
|
+
# Attach Log to survey
|
80
|
+
if hasattr(survey, "add_log"):
|
81
|
+
try:
|
82
|
+
survey.add_log(key, log_obj)
|
83
|
+
except TypeError:
|
84
|
+
survey.add_log(log_obj, key)
|
85
|
+
else:
|
86
|
+
# Fallback to internal storage if no public API is available
|
87
|
+
survey._logs[key] = log_obj # noqa: SLF001
|
88
|
+
return survey
|
89
|
+
|
90
|
+
|
91
|
+
def _apply_periods_to_all_logs(survey: Survey, times: Dict[str, tuple[int, int]]):
|
92
|
+
"""
|
93
|
+
Apply set_periods to each Log attached to the Survey.
|
94
|
+
This avoids calling set_periods on Survey if it doesn't exist.
|
95
|
+
"""
|
96
|
+
logs: Iterable[Log] = getattr(survey, "_logs", {}).values()
|
97
|
+
for log in logs:
|
98
|
+
if hasattr(log, "set_periods"):
|
99
|
+
log.set_periods(times=times)
|
100
|
+
|
101
|
+
|
102
|
+
def _render_period_controls(survey: Survey):
|
103
|
+
st.subheader("Assessment Periods")
|
104
|
+
|
105
|
+
# Current periods from session (defaults set in _ensure_state)
|
106
|
+
periods = st.session_state["periods"]
|
107
|
+
day_h, day_m = periods["day"]
|
108
|
+
eve_h, eve_m = periods["evening"]
|
109
|
+
night_h, night_m = periods["night"]
|
110
|
+
|
111
|
+
c1, c2, c3 = st.columns(3)
|
112
|
+
with c1:
|
113
|
+
day_time = st.time_input("Day starts", value=dt.time(day_h, day_m), key="period_day_start")
|
114
|
+
with c2:
|
115
|
+
eve_time = st.time_input("Evening starts", value=dt.time(eve_h, eve_m), key="period_eve_start")
|
116
|
+
with c3:
|
117
|
+
night_time = st.time_input("Night starts", value=dt.time(night_h, night_m), key="period_night_start")
|
118
|
+
|
119
|
+
new_periods = {
|
120
|
+
"day": (int(day_time.hour), int(day_time.minute)),
|
121
|
+
"evening": (int(eve_time.hour), int(eve_time.minute)),
|
122
|
+
"night": (int(night_time.hour), int(night_time.minute)),
|
123
|
+
}
|
124
|
+
|
125
|
+
if st.button("Apply periods to all logs", key="apply_periods"):
|
126
|
+
try:
|
127
|
+
_apply_periods_to_all_logs(survey, new_periods)
|
128
|
+
st.session_state["periods"] = new_periods
|
129
|
+
st.success("Periods applied to all logs.")
|
130
|
+
except Exception as e:
|
131
|
+
st.warning(f"Could not set periods: {e}")
|
132
|
+
|
133
|
+
|
134
|
+
def _render_method_runner(survey: Survey, method_name: str, help_text: str = ""):
|
135
|
+
"""
|
136
|
+
Generic UI for running a Survey method with kwargs provided via text area.
|
137
|
+
"""
|
138
|
+
with st.expander(method_name, expanded=True):
|
139
|
+
if help_text:
|
140
|
+
st.caption(help_text)
|
141
|
+
|
142
|
+
kwargs_text = st.text_area(
|
143
|
+
"kwargs (Python dict literal)",
|
144
|
+
value="{}",
|
145
|
+
key=f"kwargs_{method_name}",
|
146
|
+
placeholder='Example: {"position": "UA1", "date": "2023-06-01"}',
|
147
|
+
height=100,
|
148
|
+
)
|
149
|
+
|
150
|
+
kwargs = _parse_kwargs(kwargs_text)
|
151
|
+
if st.button(f"Run {method_name}", key=f"run_{method_name}"):
|
152
|
+
try:
|
153
|
+
fn = getattr(survey, method_name)
|
154
|
+
result = fn(**kwargs)
|
155
|
+
_display_result(result)
|
156
|
+
except AttributeError:
|
157
|
+
st.error(f"Survey has no method named '{method_name}'.")
|
158
|
+
except Exception as e:
|
159
|
+
st.error(f"Error running {method_name}: {e}")
|
160
|
+
|
161
|
+
|
162
|
+
def main():
|
163
|
+
st.set_page_config(page_title="pycoustic GUI", layout="wide")
|
164
|
+
st.title("pycoustic – Streamlit GUI")
|
165
|
+
|
166
|
+
_ensure_state()
|
167
|
+
|
168
|
+
st.sidebar.header("Load CSV Logs")
|
169
|
+
files = st.sidebar.file_uploader(
|
170
|
+
"Upload one or more CSV files",
|
171
|
+
type=["csv"],
|
172
|
+
accept_multiple_files=True,
|
173
|
+
help="Each file becomes a Log; all Logs go into one Survey."
|
174
|
+
)
|
175
|
+
|
176
|
+
build = st.sidebar.button("Create / Update Survey", type="primary")
|
177
|
+
|
178
|
+
if build and files:
|
179
|
+
try:
|
180
|
+
survey = _build_survey_from_files(files)
|
181
|
+
# Apply default periods to all logs
|
182
|
+
_apply_periods_to_all_logs(survey, st.session_state["periods"])
|
183
|
+
st.session_state["survey"] = survey
|
184
|
+
st.success("Survey created/updated.")
|
185
|
+
except Exception as e:
|
186
|
+
st.error(f"Unable to create Survey: {e}")
|
187
|
+
|
188
|
+
survey: Survey = st.session_state.get("survey")
|
189
|
+
|
190
|
+
if survey is None:
|
191
|
+
st.info("Upload CSV files in the sidebar and click 'Create / Update Survey' to begin.")
|
192
|
+
return
|
193
|
+
|
194
|
+
# Period controls
|
195
|
+
_render_period_controls(survey)
|
196
|
+
|
197
|
+
st.markdown("---")
|
198
|
+
st.header("Survey Outputs")
|
199
|
+
|
200
|
+
_render_method_runner(
|
201
|
+
survey,
|
202
|
+
"resi_summary",
|
203
|
+
help_text="Summary results for residential assessment. Provide any optional kwargs here."
|
204
|
+
)
|
205
|
+
_render_method_runner(
|
206
|
+
survey,
|
207
|
+
"modal",
|
208
|
+
help_text="Run modal analysis over the survey. Provide any optional kwargs here."
|
209
|
+
)
|
210
|
+
_render_method_runner(
|
211
|
+
survey,
|
212
|
+
"leq_spectra",
|
213
|
+
help_text="Compute or plot Leq spectra. Provide any optional kwargs here."
|
214
|
+
)
|
215
|
+
_render_method_runner(
|
216
|
+
survey,
|
217
|
+
"lmax_spectra",
|
218
|
+
help_text="Compute or plot Lmax spectra. Provide any optional kwargs here."
|
219
|
+
)
|
220
|
+
|
221
|
+
st.markdown("---")
|
222
|
+
with st.expander("Loaded Logs", expanded=False):
|
223
|
+
try:
|
224
|
+
names = list(getattr(survey, "_logs", {}).keys())
|
225
|
+
if names:
|
226
|
+
st.write(", ".join(names))
|
227
|
+
else:
|
228
|
+
st.write("No logs found in survey.")
|
229
|
+
except Exception:
|
230
|
+
st.write("Unable to list logs.")
|
231
|
+
|
232
|
+
|
233
|
+
if __name__ == "__main__":
|
234
|
+
main()
|
pycoustic/survey.py
CHANGED
@@ -39,6 +39,17 @@ class Survey:
|
|
39
39
|
df.columns = new_cols
|
40
40
|
return df
|
41
41
|
|
42
|
+
# def _leq_by_date(self, data, cols=None):
|
43
|
+
# """
|
44
|
+
# Delegate Leq-by-date computation to one of the underlying Log instances.
|
45
|
+
# Assumes all logs share the same period configuration.
|
46
|
+
# """
|
47
|
+
# if not getattr(self, "_logs", None):
|
48
|
+
# raise AttributeError("Survey has no logs available to compute _leq_by_date")
|
49
|
+
# any_log = next(iter(self._logs.values()))
|
50
|
+
# if not hasattr(any_log, "_leq_by_date"):
|
51
|
+
# raise AttributeError("Underlying Log does not implement _leq_by_date")
|
52
|
+
# return any_log._leq_by_date(data, cols=cols)
|
42
53
|
# ###########################---PUBLIC---######################################
|
43
54
|
|
44
55
|
def set_periods(self, times=None):
|
@@ -105,51 +116,54 @@ class Survey:
|
|
105
116
|
leq_cols = [("Leq", "A")]
|
106
117
|
if max_cols is None:
|
107
118
|
max_cols = [("Lmax", "A")]
|
108
|
-
|
109
|
-
|
119
|
+
|
120
|
+
for key, lg in self._logs.items(): # changed: iterate items() to get lg directly
|
110
121
|
combined_list = []
|
122
|
+
headers_for_log = [] # new: collect headers per log
|
123
|
+
|
111
124
|
# Day
|
112
|
-
days =
|
125
|
+
days = lg.leq_by_date(lg.get_period(data=lg.get_antilogs(), period="days"), cols=leq_cols)
|
113
126
|
days.sort_index(inplace=True)
|
114
127
|
combined_list.append(days)
|
115
|
-
|
128
|
+
headers_for_log.extend(["Daytime"] * len(leq_cols)) # changed: don't reset global headers
|
129
|
+
|
116
130
|
# Evening
|
117
|
-
if
|
118
|
-
evenings =
|
131
|
+
if lg.is_evening():
|
132
|
+
evenings = lg.leq_by_date(lg.get_period(data=lg.get_antilogs(), period="evenings"), cols=leq_cols)
|
119
133
|
evenings.sort_index(inplace=True)
|
120
134
|
combined_list.append(evenings)
|
121
|
-
|
122
|
-
|
135
|
+
headers_for_log.extend(["Evening"] * len(leq_cols))
|
136
|
+
|
123
137
|
# Night Leq
|
124
|
-
nights =
|
138
|
+
nights = lg.leq_by_date(lg.get_period(data=lg.get_antilogs(), period="nights"), cols=leq_cols)
|
125
139
|
nights.sort_index(inplace=True)
|
126
140
|
combined_list.append(nights)
|
127
|
-
|
128
|
-
|
141
|
+
headers_for_log.extend(["Night-time"] * len(leq_cols))
|
142
|
+
|
129
143
|
# Night max
|
130
|
-
maxes =
|
131
|
-
maxes =
|
132
|
-
maxes =
|
144
|
+
maxes = lg.as_interval(t=lmax_t)
|
145
|
+
maxes = lg.get_period(data=maxes, period="nights", night_idx=True)
|
146
|
+
maxes = lg.get_nth_high_low(n=lmax_n, data=maxes)[max_cols]
|
133
147
|
maxes.sort_index(inplace=True)
|
134
|
-
# +++
|
135
|
-
# SS Feb2025 - Code changed to prevent exception
|
136
|
-
#maxes.index = maxes.index.date
|
137
148
|
try:
|
138
149
|
maxes.index = pd.to_datetime(maxes.index)
|
139
150
|
maxes.index = maxes.index.date
|
140
151
|
except Exception as e:
|
141
152
|
print(f"Error converting index to date: {e}")
|
142
|
-
# SSS ---
|
143
153
|
maxes.index.name = None
|
144
154
|
combined_list.append(maxes)
|
145
|
-
|
146
|
-
|
155
|
+
headers_for_log.extend(["Night-time"] * len(max_cols))
|
156
|
+
|
147
157
|
summary = pd.concat(objs=combined_list, axis=1)
|
148
158
|
summary = self._insert_multiindex(df=summary, super=key)
|
149
159
|
combi = pd.concat(objs=[combi, summary], axis=0)
|
160
|
+
|
161
|
+
# append this log's headers to the global list
|
162
|
+
period_headers.extend(headers_for_log)
|
163
|
+
|
150
164
|
combi = self._insert_header(df=combi, new_head_list=period_headers, header_idx=0)
|
151
165
|
return combi
|
152
|
-
|
166
|
+
#test
|
153
167
|
def modal(self, cols=None, by_date=False, day_t="60min", evening_t="60min", night_t="15min"):
|
154
168
|
"""
|
155
169
|
Get a dataframe summarising Modal L90 values for each time period, as suggested by BS 4142:2014.
|
@@ -1,11 +1,13 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: pycoustic
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.9
|
4
4
|
Summary:
|
5
5
|
Author: thumpercastle
|
6
6
|
Author-email: tony.ryb@gmail.com
|
7
|
-
Requires-Python: >=3.
|
7
|
+
Requires-Python: >=3.10,<=3.13
|
8
8
|
Classifier: Programming Language :: Python :: 3
|
9
|
+
Classifier: Programming Language :: Python :: 3.10
|
10
|
+
Classifier: Programming Language :: Python :: 3.11
|
9
11
|
Classifier: Programming Language :: Python :: 3.12
|
10
12
|
Classifier: Programming Language :: Python :: 3.13
|
11
13
|
Requires-Dist: numpy (==2.2.6)
|
@@ -0,0 +1,14 @@
|
|
1
|
+
pycoustic/__init__.py,sha256=jq9Tzc5nEgXh8eNf0AkAypmw3Dda9A-iSy-tyFaTksA,89
|
2
|
+
pycoustic/log.py,sha256=e8rAy9hIYP2H-3vTDVe0-6swe_n_gXjuFCu6Q-xNiYQ,17827
|
3
|
+
pycoustic/pycoustic_gui_app-ai.py,sha256=nEX7Q5oWzTLmtC_xqbh74vXpQak8gwuqf2ScPq1Ir7o,24432
|
4
|
+
pycoustic/pycoustic_gui_app.py,sha256=Hs61Y8fAp7uoRONa4RLSVl0UvGXZZ96n5eJGilErlAU,11143
|
5
|
+
pycoustic/pycoustic_streamlit_gpt5.py,sha256=gpkPPBGwADt9HFI4S7YD1U-TjpLTMVwcBUJd7wTefek,14259
|
6
|
+
pycoustic/streamlit-ai.py,sha256=OZdrQbGwQyVvA_4Q8bTOCZUZGdSlZG9NL9z3f16W-A8,16414
|
7
|
+
pycoustic/streamlit-new.py,sha256=AR5dwQinMXugvGcyNvI_W59bfFRGj6E90Fqah9toKto,4885
|
8
|
+
pycoustic/streamlit_pycoustic_gpt5_dead.py,sha256=sFUxLkvNUZoh2cVzruqsJJiLIlJxOQQpYYK6oHZfPlM,7309
|
9
|
+
pycoustic/survey.py,sha256=6gC2sd0vOusx8bEyCwqmfSR5k04VeV93Ong0OdEVVks,24071
|
10
|
+
pycoustic/tkgui.py,sha256=YAy5f_qkXZ3yU8BvB-nIVQX1fYwPs_IkwmDEXHPMAa4,13997
|
11
|
+
pycoustic/weather.py,sha256=q9FbDKjY0WaNvaYMHeDk7Bhbq0_Q7ehsTM_vUaCjeAk,3753
|
12
|
+
pycoustic-0.1.9.dist-info/METADATA,sha256=2NDXL0ovNkEJKxx-P2ErBkdTHNA1AWL77RFAaKQdI6o,8515
|
13
|
+
pycoustic-0.1.9.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
14
|
+
pycoustic-0.1.9.dist-info/RECORD,,
|
pycoustic-0.1.8.dist-info/RECORD
DELETED
@@ -1,10 +0,0 @@
|
|
1
|
-
pycoustic/__init__.py,sha256=jq9Tzc5nEgXh8eNf0AkAypmw3Dda9A-iSy-tyFaTksA,89
|
2
|
-
pycoustic/log.py,sha256=S9Vc2hgHShd2YnDAdBs6by_8hE6l1HIoPgZ0po-KT5I,17614
|
3
|
-
pycoustic/pycoustic_gui_app.py,sha256=Hs61Y8fAp7uoRONa4RLSVl0UvGXZZ96n5eJGilErlAU,11143
|
4
|
-
pycoustic/streamlit-ai.py,sha256=bGYPvrHmQEFua8IrJhBNAdXDzUnQvcjINaRx2-F-5Ms,18637
|
5
|
-
pycoustic/survey.py,sha256=pvdiR4wn1Zicr7rL1fZ5IgUnls5Ikg2j1RFw36WgcB0,23471
|
6
|
-
pycoustic/tkgui.py,sha256=YAy5f_qkXZ3yU8BvB-nIVQX1fYwPs_IkwmDEXHPMAa4,13997
|
7
|
-
pycoustic/weather.py,sha256=q9FbDKjY0WaNvaYMHeDk7Bhbq0_Q7ehsTM_vUaCjeAk,3753
|
8
|
-
pycoustic-0.1.8.dist-info/METADATA,sha256=AqC1SlTm94A-qFF0Sm13c2JZW4pVBdFfsYWWHBymN2o,8413
|
9
|
-
pycoustic-0.1.8.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
10
|
-
pycoustic-0.1.8.dist-info/RECORD,,
|
File without changes
|