weac 3.0.0__py3-none-any.whl → 3.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
weac/utils/misc.py ADDED
@@ -0,0 +1,127 @@
1
+ """
2
+ This module contains miscellaneous utility functions.
3
+ """
4
+
5
+ from typing import Literal
6
+
7
+ import numpy as np
8
+
9
+ from weac.components import Layer
10
+ from weac.constants import G_MM_S2, LSKI_MM
11
+
12
+
13
+ def decompose_to_normal_tangential(f: float, phi: float) -> tuple[float, float]:
14
+ """
15
+ Resolve a gravity-type force/line-load into its tangential (downslope) and
16
+ normal (into-slope) components with respect to an inclined surface.
17
+
18
+ Parameters
19
+ ----------
20
+ f : float
21
+ is interpreted as a vertical load magnitude
22
+ acting straight downward (global y negative).
23
+ phi : float
24
+ Surface dip angle `in degrees`, measured from horizontal.
25
+ Positive `phi` means the surface slopes upward in +x.
26
+
27
+ Returns
28
+ -------
29
+ f_norm, f_tan : float
30
+ Magnitudes of the tangential ( + downslope ) and normal
31
+ ( + into-slope ) components, respectively.
32
+ """
33
+ # Convert units
34
+ phi = np.deg2rad(phi) # Convert inclination to rad
35
+ # Split into components
36
+ f_norm = f * np.cos(phi) # Normal direction
37
+ f_tan = -f * np.sin(phi) # Tangential direction
38
+ return f_norm, f_tan
39
+
40
+
41
+ def get_skier_point_load(m: float) -> float:
42
+ """
43
+ Calculate skier point load.
44
+
45
+ Arguments
46
+ ---------
47
+ m : float
48
+ Skier weight [kg].
49
+
50
+ Returns
51
+ -------
52
+ f : float
53
+ Skier load [N/mm].
54
+ """
55
+ F = 1e-3 * m * G_MM_S2 / LSKI_MM # Total skier
56
+ return F
57
+
58
+
59
+ def load_dummy_profile(
60
+ profile_id: Literal[
61
+ "a", "b", "c", "d", "e", "f", "h", "soft", "medium", "hard", "comp"
62
+ ],
63
+ ) -> list[Layer]:
64
+ """Define standard layering types for comparison."""
65
+ soft_layer = Layer(rho=180, h=120, E=5)
66
+ medium_layer = Layer(rho=270, h=120, E=30)
67
+ hard_layer = Layer(rho=350, h=120, E=93.8)
68
+
69
+ tested_layers = [
70
+ Layer(rho=350, h=120),
71
+ Layer(rho=270, h=120),
72
+ Layer(rho=180, h=120),
73
+ ]
74
+
75
+ # Database (top to bottom)
76
+ database = {
77
+ # Layered
78
+ "a": [hard_layer, medium_layer, soft_layer],
79
+ "b": [soft_layer, medium_layer, hard_layer],
80
+ "c": [hard_layer, soft_layer, hard_layer],
81
+ "d": [soft_layer, hard_layer, soft_layer],
82
+ "e": [hard_layer, soft_layer, soft_layer],
83
+ "f": [soft_layer, soft_layer, hard_layer],
84
+ "tested": tested_layers,
85
+ # Homogeneous
86
+ "h": [medium_layer, medium_layer, medium_layer],
87
+ "soft": [soft_layer, soft_layer, soft_layer],
88
+ "medium": [medium_layer, medium_layer, medium_layer],
89
+ "hard": [hard_layer, hard_layer, hard_layer],
90
+ # Comparison
91
+ "comp": [
92
+ Layer(rho=240, h=200, E=5.23),
93
+ ],
94
+ }
95
+
96
+ # Load profile
97
+ try:
98
+ profile = database[profile_id.lower()]
99
+ except KeyError:
100
+ raise ValueError(f"Profile {profile_id} is not defined.") from None
101
+ return profile
102
+
103
+
104
+ def isnotebook() -> bool:
105
+ """
106
+ Check if code is running in a Jupyter notebook environment.
107
+
108
+ Returns
109
+ -------
110
+ bool
111
+ True if running in Jupyter notebook, False otherwise.
112
+ """
113
+ try:
114
+ # Check if we're in IPython
115
+ from IPython import get_ipython # pylint: disable=import-outside-toplevel
116
+
117
+ if get_ipython() is None:
118
+ return False
119
+
120
+ # Check if we're specifically in a notebook (not just IPython terminal)
121
+ if get_ipython().__class__.__name__ == "ZMQInteractiveShell":
122
+ return True # Jupyter notebook
123
+ if get_ipython().__class__.__name__ == "TerminalInteractiveShell":
124
+ return False # IPython terminal
125
+ return False # Other IPython environments
126
+ except ImportError:
127
+ return False # IPython not available
@@ -0,0 +1,82 @@
1
+ """
2
+ Snow grain types and hand hardness values.
3
+
4
+ These values are used in Pydantic models for validation and correspond to the
5
+ parameterizations available in `geldsetzer.py`.
6
+ """
7
+
8
+ from enum import Enum
9
+
10
+
11
+ class GrainType(str, Enum):
12
+ """SnowPilot grain type codes (see `geldsetzer.GRAIN_TYPE`)."""
13
+
14
+ DF = "DF"
15
+ DFbk = "DFbk"
16
+ DFdc = "DFdc"
17
+ DH = "DH"
18
+ DHch = "DHch"
19
+ DHcp = "DHcp"
20
+ DHla = "DHla"
21
+ DHpr = "DHpr"
22
+ DHxr = "DHxr"
23
+ FC = "FC"
24
+ FCsf = "FCsf"
25
+ FCso = "FCso"
26
+ FCxr = "FCxr"
27
+ IF = "IF"
28
+ IFbi = "IFbi"
29
+ IFic = "IFic"
30
+ IFil = "IFil"
31
+ IFrc = "IFrc"
32
+ IFsc = "IFsc"
33
+ MF = "MF"
34
+ MFcl = "MFcl"
35
+ MFcr = "MFcr"
36
+ MFpc = "MFpc"
37
+ MFsl = "MFsl"
38
+ PP = "PP"
39
+ PPco = "PPco"
40
+ PPgp = "PPgp"
41
+ PPhl = "PPhl"
42
+ PPip = "PPip"
43
+ PPir = "PPir"
44
+ PPnd = "PPnd"
45
+ PPpl = "PPpl"
46
+ PPrm = "PPrm"
47
+ PPsd = "PPsd"
48
+ RG = "RG"
49
+ RGlr = "RGlr"
50
+ RGsr = "RGsr"
51
+ RGwp = "RGwp"
52
+ RGxf = "RGxf"
53
+ SH = "SH"
54
+ SHcv = "SHcv"
55
+ SHsu = "SHsu"
56
+ SHxr = "SHxr"
57
+
58
+
59
+ class HandHardness(str, Enum):
60
+ """Field hand hardness codes (see `geldsetzer.HAND_HARDNESS`).
61
+
62
+ Enum member names avoid starting with digits and special characters.
63
+ """
64
+
65
+ Fm = "F-"
66
+ F = "F"
67
+ Fp = "F+"
68
+ _4Fm = "4F-"
69
+ _4F = "4F"
70
+ _4Fp = "4F+"
71
+ _1Fm = "1F-"
72
+ _1F = "1F"
73
+ _1Fp = "1F+"
74
+ Pm = "P-"
75
+ P = "P"
76
+ Pp = "P+"
77
+ Km = "K-"
78
+ K = "K"
79
+ Kp = "K+"
80
+ Im = "I-"
81
+ I = "I"
82
+ Ip = "I+"
@@ -0,0 +1,331 @@
1
+ """
2
+ Utilizes the snowpylot library to convert a CAAML file to a WEAC ModelInput.
3
+
4
+ The snowpylot library is used to parse the CAAML file and extract the snowpit.
5
+ The snowpit is then converted to a List of WEAC ModelInput.
6
+
7
+ Based on the different stability tests performed, several scenarios are created.
8
+ Each scenario is a WEAC ModelInput.
9
+
10
+ The scenarios are created based on the following logic:
11
+ - For each PropSawTest, a scenario is created with `the cut length` and `a standard segment.`
12
+ - For each ExtColumnTest, a scenario is created with `a standard segment.`
13
+ - For each ComprTest, a scenario is created with `a standard segment.`
14
+ - For each RBlockTest, a scenario is created with `a standard segment.`
15
+
16
+ The `a standard segment` is a segment with a length of 1000 mm and a foundation of True.
17
+
18
+ The `the cut length` is the cut length of the PropSawTest.
19
+ The `the column length` is the column length of the PropSawTest.
20
+ """
21
+
22
+ import logging
23
+
24
+ import numpy as np
25
+ from snowpylot import caaml_parser
26
+ from snowpylot.layer import Layer as SnowpylotLayer
27
+ from snowpylot.snow_pit import SnowPit
28
+ from snowpylot.snow_profile import DensityObs
29
+
30
+ # Import WEAC components
31
+ from weac.components import (
32
+ Layer,
33
+ WeakLayer,
34
+ )
35
+ from weac.utils.geldsetzer import compute_density
36
+
37
+ logger = logging.getLogger(__name__)
38
+
39
+ convert_to_mm = {"cm": 10, "mm": 1, "m": 1000, "dm": 100}
40
+ convert_to_deg = {"deg": 1, "rad": 180 / np.pi}
41
+
42
+
43
+ class SnowPilotParser:
44
+ """Parser for SnowPilot files using the snowpylot library."""
45
+
46
+ def __init__(self, file_path: str):
47
+ self.snowpit: SnowPit = caaml_parser(file_path)
48
+
49
+ def extract_layers(self) -> tuple[list[Layer], list[str]]:
50
+ """Extract layers from snowpit."""
51
+ snowpit = self.snowpit
52
+ # Extract layers from snowpit: list[SnowpylotLayer]
53
+ sp_layers: list[SnowpylotLayer] = [
54
+ layer
55
+ for layer in snowpit.snow_profile.layers
56
+ if layer.depth_top is not None
57
+ ]
58
+ sp_layers = sorted(sp_layers, key=lambda x: x.depth_top[0]) # type: ignore
59
+
60
+ # Extract density layers from snowpit: list[DensityObs]
61
+ sp_density_layers: list[DensityObs] = [
62
+ layer
63
+ for layer in snowpit.snow_profile.density_profile
64
+ if layer.depth_top is not None
65
+ ]
66
+ sp_density_layers = sorted(sp_density_layers, key=lambda x: x.depth_top[0]) # type: ignore
67
+
68
+ # Populate WEAC layers: list[Layer]
69
+ layers: list[Layer] = []
70
+ density_methods: list[str] = []
71
+ for _i, layer in enumerate(sp_layers):
72
+ # Parameters
73
+ grain_type = None
74
+ grain_size = None
75
+ hand_hardness = None
76
+ density = None
77
+ thickness = None
78
+
79
+ # extract THICKNESS
80
+ if layer.thickness is not None:
81
+ thickness, unit = layer.thickness
82
+ thickness = thickness * convert_to_mm[unit] # Convert to mm
83
+ else:
84
+ raise ValueError("Thickness not found")
85
+
86
+ # extract GRAIN TYPE and SIZE
87
+ if layer.grain_form_primary:
88
+ if layer.grain_form_primary.grain_form:
89
+ grain_type = layer.grain_form_primary.grain_form
90
+ if layer.grain_form_primary.grain_size_avg:
91
+ grain_size = (
92
+ layer.grain_form_primary.grain_size_avg[0]
93
+ * convert_to_mm[layer.grain_form_primary.grain_size_avg[1]]
94
+ )
95
+ elif layer.grain_form_primary.grain_size_max:
96
+ grain_size = (
97
+ layer.grain_form_primary.grain_size_max[0]
98
+ * convert_to_mm[layer.grain_form_primary.grain_size_max[1]]
99
+ )
100
+
101
+ # extract DENSITY
102
+ # Get layer depth range in mm for density matching
103
+ layer_depth_top_mm = layer.depth_top[0] * convert_to_mm[layer.depth_top[1]]
104
+ layer_depth_bottom_mm = layer_depth_top_mm + thickness
105
+ # Try to find density measurement that overlaps with this layer
106
+ measured_density = self.get_density_for_layer_range(
107
+ layer_depth_top_mm, layer_depth_bottom_mm, sp_density_layers
108
+ )
109
+
110
+ # Handle hardness and create layers accordingly
111
+ if layer.hardness_top is not None and layer.hardness_bottom is not None:
112
+ hand_hardness_top = layer.hardness_top
113
+ hand_hardness_bottom = layer.hardness_bottom
114
+
115
+ # Two hardness values - split into two layers
116
+ half_thickness = thickness / 2
117
+ layer_mid_depth_mm = layer_depth_top_mm + half_thickness
118
+
119
+ # Create top layer (first half)
120
+ if measured_density is not None:
121
+ density_top = self.get_density_for_layer_range(
122
+ layer_depth_top_mm, layer_mid_depth_mm, sp_density_layers
123
+ )
124
+ if density_top is None:
125
+ density_methods.append("geldsetzer")
126
+ density_top = compute_density(grain_type, hand_hardness_top)
127
+ else:
128
+ density_methods.append("density_obs")
129
+ else:
130
+ density_methods.append("geldsetzer")
131
+ density_top = compute_density(grain_type, hand_hardness_top)
132
+
133
+ layers.append(
134
+ Layer(
135
+ rho=density_top,
136
+ h=half_thickness,
137
+ grain_type=grain_type,
138
+ grain_size=grain_size,
139
+ hand_hardness=hand_hardness_top,
140
+ )
141
+ )
142
+
143
+ # Create bottom layer (second half)
144
+ if measured_density is not None:
145
+ density_bottom = self.get_density_for_layer_range(
146
+ layer_mid_depth_mm, layer_depth_bottom_mm, sp_density_layers
147
+ )
148
+ if density_bottom is None:
149
+ density_methods.append("geldsetzer")
150
+ density_bottom = compute_density(
151
+ grain_type, hand_hardness_bottom
152
+ )
153
+ else:
154
+ density_methods.append("density_obs")
155
+ else:
156
+ try:
157
+ density_methods.append("geldsetzer")
158
+ density_bottom = compute_density(
159
+ grain_type, hand_hardness_bottom
160
+ )
161
+ except Exception as exc:
162
+ raise AttributeError(
163
+ "Layer is missing density information; density profile, "
164
+ "hand hardness and grain type are all missing. "
165
+ "Excluding SnowPit from calculations."
166
+ ) from exc
167
+
168
+ layers.append(
169
+ Layer(
170
+ rho=density_bottom,
171
+ h=half_thickness,
172
+ grain_type=grain_type,
173
+ grain_size=grain_size,
174
+ hand_hardness=hand_hardness_bottom,
175
+ )
176
+ )
177
+ else:
178
+ # Single hardness value - create one layer
179
+ hand_hardness = layer.hardness
180
+
181
+ if measured_density is not None:
182
+ density = measured_density
183
+ density_methods.append("density_obs")
184
+ else:
185
+ try:
186
+ density_methods.append("geldsetzer")
187
+ density = compute_density(grain_type, hand_hardness)
188
+ except Exception as exc:
189
+ raise AttributeError(
190
+ "Layer is missing density information; density profile, "
191
+ "hand hardness and grain type are all missing. "
192
+ "Excluding SnowPit from calculations."
193
+ ) from exc
194
+
195
+ layers.append(
196
+ Layer(
197
+ rho=density,
198
+ h=thickness,
199
+ grain_type=grain_type,
200
+ grain_size=grain_size,
201
+ hand_hardness=hand_hardness,
202
+ )
203
+ )
204
+
205
+ if len(layers) == 0:
206
+ raise AttributeError(
207
+ "No layers found for snowpit. Excluding SnowPit from calculations."
208
+ )
209
+ return layers, density_methods
210
+
211
+ def get_density_for_layer_range(
212
+ self,
213
+ layer_top_mm: float,
214
+ layer_bottom_mm: float,
215
+ sp_density_layers: list[DensityObs],
216
+ ) -> float | None:
217
+ """Find density measurements that overlap with the given layer depth range.
218
+
219
+ Args:
220
+ layer_top_mm: Top depth of layer in mm
221
+ layer_bottom_mm: Bottom depth of layer in mm
222
+ sp_density_layers: list of density observations
223
+
224
+ Returns:
225
+ Average density from overlapping measurements, or None if no overlap
226
+ """
227
+ if not sp_density_layers:
228
+ return None
229
+
230
+ overlapping_densities = []
231
+ overlapping_weights = []
232
+
233
+ for density_obs in sp_density_layers:
234
+ if density_obs.depth_top is None or density_obs.thickness is None:
235
+ continue
236
+
237
+ # Convert density observation depth range to mm
238
+ density_top_mm = (
239
+ density_obs.depth_top[0] * convert_to_mm[density_obs.depth_top[1]]
240
+ )
241
+ density_thickness_mm = (
242
+ density_obs.thickness[0] * convert_to_mm[density_obs.thickness[1]]
243
+ )
244
+ density_bottom_mm = density_top_mm + density_thickness_mm
245
+
246
+ # Check for overlap between layer and density measurement
247
+ overlap_top = max(layer_top_mm, density_top_mm)
248
+ overlap_bottom = min(layer_bottom_mm, density_bottom_mm)
249
+
250
+ if overlap_top < overlap_bottom: # There is overlap
251
+ overlap_thickness = overlap_bottom - overlap_top
252
+
253
+ # Extract density value
254
+ if density_obs.density is not None:
255
+ density_value = density_obs.density[0] # (value, unit)
256
+
257
+ overlapping_densities.append(density_value)
258
+ overlapping_weights.append(overlap_thickness)
259
+
260
+ if overlapping_densities:
261
+ # Calculate weighted average based on overlap thickness
262
+ total_weight = sum(overlapping_weights)
263
+ if total_weight > 0:
264
+ weighted_density = (
265
+ sum(
266
+ d * w
267
+ for d, w in zip(overlapping_densities, overlapping_weights)
268
+ )
269
+ / total_weight
270
+ )
271
+ return float(weighted_density)
272
+ return None
273
+
274
+ def extract_weak_layer_and_layers_above(
275
+ self, weak_layer_depth: float, layers: list[Layer]
276
+ ) -> tuple[WeakLayer, list[Layer]]:
277
+ """Extract weak layer and layers above the weak layer for the given
278
+ depth_top extracted from the stability test."""
279
+ depth = 0
280
+ layers_above = []
281
+ weak_layer_rho = None
282
+ weak_layer_hand_hardness = None
283
+ weak_layer_grain_type = None
284
+ weak_layer_grain_size = None
285
+ if weak_layer_depth <= 0:
286
+ raise ValueError(
287
+ "The depth of the weak layer is not positive. "
288
+ "Excluding SnowPit from calculations."
289
+ )
290
+ if weak_layer_depth > sum(layer.h for layer in layers):
291
+ raise ValueError(
292
+ "The depth of the weak layer is below the recorded layers. "
293
+ "Excluding SnowPit from calculations."
294
+ )
295
+ layers = [layer.model_copy(deep=True) for layer in layers]
296
+ for i, layer in enumerate(layers):
297
+ if depth + layer.h < weak_layer_depth:
298
+ layers_above.append(layer)
299
+ depth += layer.h
300
+ elif depth < weak_layer_depth < depth + layer.h:
301
+ layer.h = weak_layer_depth - depth
302
+ layers_above.append(layer)
303
+ weak_layer_rho = layers[i].rho
304
+ weak_layer_hand_hardness = layers[i].hand_hardness
305
+ weak_layer_grain_type = layers[i].grain_type
306
+ weak_layer_grain_size = layers[i].grain_size
307
+ break
308
+ elif depth + layer.h == weak_layer_depth:
309
+ if i + 1 < len(layers):
310
+ layers_above.append(layer)
311
+ weak_layer_rho = layers[i + 1].rho
312
+ weak_layer_hand_hardness = layers[i + 1].hand_hardness
313
+ weak_layer_grain_type = layers[i + 1].grain_type
314
+ weak_layer_grain_size = layers[i + 1].grain_size
315
+ else:
316
+ weak_layer_rho = layers[i].rho
317
+ weak_layer_hand_hardness = layers[i].hand_hardness
318
+ weak_layer_grain_type = layers[i].grain_type
319
+ weak_layer_grain_size = layers[i].grain_size
320
+ break
321
+
322
+ weak_layer = WeakLayer(
323
+ rho=weak_layer_rho,
324
+ h=20.0,
325
+ hand_hardness=weak_layer_hand_hardness,
326
+ grain_type=weak_layer_grain_type,
327
+ grain_size=weak_layer_grain_size,
328
+ )
329
+ if len(layers_above) == 0:
330
+ raise ValueError("No layers above weak layer found")
331
+ return weak_layer, layers_above
@@ -1,9 +1,9 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: weac
3
- Version: 3.0.0
3
+ Version: 3.0.2
4
4
  Summary: Weak layer anticrack nucleation model
5
5
  Author-email: 2phi GbR <mail@2phi.de>
6
- License: Proprietary
6
+ License-Expression: MIT
7
7
  Project-URL: Homepage, https://github.com/2phi/weac
8
8
  Project-URL: Demo, https://github.com/2phi/weac/blob/main/demo/demo.ipynb
9
9
  Project-URL: Documentation, https://2phi.github.io/weac
@@ -11,7 +11,6 @@ Project-URL: Issues and feature requests, https://github.com/2phi/weac/issues
11
11
  Classifier: Programming Language :: Python :: 3
12
12
  Classifier: Programming Language :: Python :: 3.12
13
13
  Classifier: Programming Language :: Python :: 3.13
14
- Classifier: License :: Other/Proprietary License
15
14
  Classifier: Operating System :: OS Independent
16
15
  Classifier: Topic :: Scientific/Engineering
17
16
  Requires-Python: >=3.12
@@ -36,6 +35,9 @@ Requires-Dist: traitlets>=5.14.3; extra == "interactive"
36
35
  Provides-Extra: docs
37
36
  Requires-Dist: sphinx; extra == "docs"
38
37
  Requires-Dist: sphinxawesome-theme; extra == "docs"
38
+ Provides-Extra: build
39
+ Requires-Dist: build; extra == "build"
40
+ Requires-Dist: twine; extra == "build"
39
41
  Provides-Extra: dev
40
42
  Requires-Dist: nbclient>=0.10.0; extra == "dev"
41
43
  Requires-Dist: nbconvert>=7.16.4; extra == "dev"
@@ -55,7 +57,7 @@ Requires-Dist: pylint>=3.2.0; extra == "dev"
55
57
  Requires-Dist: pycodestyle>=2.11.1; extra == "dev"
56
58
  Requires-Dist: black>=24.4.0; extra == "dev"
57
59
  Requires-Dist: isort>=5.13.0; extra == "dev"
58
- Requires-Dist: bump2version>=1.0.1; extra == "dev"
60
+ Requires-Dist: bump-my-version; extra == "dev"
59
61
  Dynamic: license-file
60
62
 
61
63
  <!-- LOGO AND TITLE-->
@@ -128,12 +130,13 @@ Dynamic: license-file
128
130
 
129
131
  1. [About the project](#about-the-project)
130
132
  2. [Installation](#installation)
131
- 3. [Usage](#usage)
132
- 4. [Roadmap](#roadmap)
133
- 5. [Release history](#release-history)
134
- 6. [How to contribute](#how-to-contribute)
135
- 7. [License](#license)
136
- 8. [Contact](#contact)
133
+ 3. [Development Setup](#development-setup)
134
+ 4. [Usage](#usage)
135
+ 5. [Roadmap](#roadmap)
136
+ 6. [Release history](#release-history)
137
+ 7. [How to contribute](#how-to-contribute)
138
+ 8. [License](#license)
139
+ 9. [Contact](#contact)
137
140
 
138
141
  <!-- ABOUT THE PROJECT -->
139
142
  ## About the project
@@ -189,6 +192,100 @@ Needs (runtime dependencies are declared in [pyproject.toml](https://github.com/
189
192
  - [Snowpylot](https://github.com/connellymk/snowpylot) ≥ 1.1.3
190
193
 
191
194
 
195
+ <!-- DEVELOPMENT SETUP -->
196
+ ## Development Setup
197
+
198
+ This project uses [uv](https://github.com/astral-sh/uv) for fast Python package management and project handling.
199
+
200
+ ### Installing uv
201
+
202
+ Install uv following the [official installation guide](https://github.com/astral-sh/uv#installation):
203
+
204
+ ```bash
205
+ # On macOS and Linux
206
+ curl -LsSf https://astral.sh/uv/install.sh | sh
207
+
208
+ # Using pip (alternative)
209
+ pip install uv
210
+ ```
211
+
212
+ ### Setting up the development environment
213
+
214
+ Clone the repository and set up the development environment:
215
+
216
+ ```bash
217
+ git clone https://github.com/2phi/weac
218
+ cd weac
219
+
220
+ # Install Python 3.12+ if not already available
221
+ # uv will automatically use the version specified in .python-version
222
+
223
+ # For basic setup (if only running the package):
224
+ uv sync
225
+
226
+ # For development (recommended for contributors):
227
+ uv sync --extra dev
228
+
229
+ # Activate the virtual environment
230
+ source .venv/bin/activate # On Windows: .venv\Scripts\activate
231
+ ```
232
+
233
+ ### Running tests
234
+
235
+ Run the test suite using uv:
236
+
237
+ ```bash
238
+ # Run all tests
239
+ uv run python tests/run_tests.py
240
+
241
+ # Or use pytest directly (if installed)
242
+ uv run pytest
243
+ ```
244
+
245
+ ### Code formatting and linting
246
+
247
+ This project uses [ruff](https://github.com/astral-sh/ruff) for fast Python linting and formatting:
248
+
249
+ ```bash
250
+ # Format code
251
+ uv run ruff format .
252
+
253
+ # Check for linting issues
254
+ uv run ruff check .
255
+
256
+ # Fix auto-fixable linting issues
257
+ uv run ruff check . --fix
258
+ ```
259
+
260
+ ### Building the package
261
+
262
+ Build the package for distribution:
263
+
264
+ ```bash
265
+ # Build wheel and source distribution
266
+ uv build
267
+
268
+ # Install in editable mode for development
269
+ uv pip install -e .
270
+ ```
271
+
272
+ ### Additional uv commands
273
+
274
+ ```bash
275
+ # Update dependencies
276
+ uv sync --upgrade
277
+
278
+ # Add a new dependency
279
+ uv add package-name
280
+
281
+ # Add a development dependency
282
+ uv add --dev package-name
283
+
284
+ # Show environment info
285
+ uv run python --version
286
+ uv run pip list
287
+ ```
288
+
192
289
  <!-- USAGE EXAMPLES -->
193
290
  ## Usage
194
291