tsam-xarray 0.0.1a0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. tsam_xarray-0.0.1a0/.github/dependabot.yml +10 -0
  2. tsam_xarray-0.0.1a0/.github/workflows/ci.yaml +103 -0
  3. tsam_xarray-0.0.1a0/.github/workflows/dependabot-auto-merge.yaml +39 -0
  4. tsam_xarray-0.0.1a0/.github/workflows/pr-title.yaml +32 -0
  5. tsam_xarray-0.0.1a0/.github/workflows/publish.yaml +66 -0
  6. tsam_xarray-0.0.1a0/.github/workflows/release.yaml +27 -0
  7. tsam_xarray-0.0.1a0/.gitignore +35 -0
  8. tsam_xarray-0.0.1a0/.pre-commit-config.yaml +13 -0
  9. tsam_xarray-0.0.1a0/.readthedocs.yaml +19 -0
  10. tsam_xarray-0.0.1a0/.release-please-config.json +17 -0
  11. tsam_xarray-0.0.1a0/.release-please-manifest.json +3 -0
  12. tsam_xarray-0.0.1a0/CHANGELOG.md +19 -0
  13. tsam_xarray-0.0.1a0/CLAUDE.md +51 -0
  14. tsam_xarray-0.0.1a0/PKG-INFO +79 -0
  15. tsam_xarray-0.0.1a0/README.md +68 -0
  16. tsam_xarray-0.0.1a0/docs/changelog.md +1 -0
  17. tsam_xarray-0.0.1a0/docs/examples/getting-started.ipynb +219 -0
  18. tsam_xarray-0.0.1a0/docs/examples/multi-dim.ipynb +228 -0
  19. tsam_xarray-0.0.1a0/docs/examples/segmentation.ipynb +226 -0
  20. tsam_xarray-0.0.1a0/docs/gen_ref_pages.py +11 -0
  21. tsam_xarray-0.0.1a0/docs/index.md +41 -0
  22. tsam_xarray-0.0.1a0/docs/stylesheets/extra.css +71 -0
  23. tsam_xarray-0.0.1a0/mkdocs.yml +124 -0
  24. tsam_xarray-0.0.1a0/pyproject.toml +72 -0
  25. tsam_xarray-0.0.1a0/src/tsam_xarray/__init__.py +6 -0
  26. tsam_xarray-0.0.1a0/src/tsam_xarray/_core.py +457 -0
  27. tsam_xarray-0.0.1a0/src/tsam_xarray/_result.py +226 -0
  28. tsam_xarray-0.0.1a0/src/tsam_xarray/_sample_data.py +107 -0
  29. tsam_xarray-0.0.1a0/src/tsam_xarray/_version.py +34 -0
  30. tsam_xarray-0.0.1a0/test/test_aggregate.py +634 -0
@@ -0,0 +1,10 @@
1
+ version: 2
2
+ updates:
3
+ - package-ecosystem: github-actions
4
+ directory: /
5
+ schedule:
6
+ interval: weekly
7
+ - package-ecosystem: pip
8
+ directory: /
9
+ schedule:
10
+ interval: weekly
@@ -0,0 +1,103 @@
1
+ name: CI
2
+
3
+ on:
4
+ pull_request:
5
+ workflow_call:
6
+
7
+ concurrency:
8
+ group: ${{ github.workflow }}-${{ github.ref }}
9
+ cancel-in-progress: true
10
+
11
+ jobs:
12
+ lint:
13
+ name: Lint
14
+ runs-on: ubuntu-24.04
15
+ steps:
16
+ - uses: actions/checkout@v6
17
+ - uses: astral-sh/ruff-action@v3
18
+ - run: ruff check --output-format=github
19
+ - run: ruff format --diff
20
+
21
+ typecheck:
22
+ name: Type check
23
+ runs-on: ubuntu-24.04
24
+ steps:
25
+ - uses: actions/checkout@v6
26
+ with:
27
+ fetch-depth: 0
28
+
29
+ - uses: astral-sh/setup-uv@v7
30
+ with:
31
+ enable-cache: true
32
+
33
+ - uses: actions/setup-python@v6
34
+ with:
35
+ python-version: "3.12"
36
+
37
+ - name: Install
38
+ run: uv sync --group dev
39
+
40
+ - name: Mypy
41
+ run: uv run mypy src/
42
+
43
+ test:
44
+ name: Test (Python ${{ matrix.python-version }})
45
+ runs-on: ubuntu-24.04
46
+ strategy:
47
+ fail-fast: false
48
+ matrix:
49
+ python-version: ["3.12", "3.13", "3.14"]
50
+ steps:
51
+ - uses: actions/checkout@v6
52
+ with:
53
+ fetch-depth: 0
54
+
55
+ - uses: astral-sh/setup-uv@v7
56
+ with:
57
+ enable-cache: true
58
+
59
+ - uses: actions/setup-python@v6
60
+ with:
61
+ python-version: ${{ matrix.python-version }}
62
+
63
+ - name: Install
64
+ run: uv sync --group dev
65
+
66
+ - name: Run tests
67
+ run: uv run pytest --cov --cov-report=xml --cov-report=term-missing
68
+
69
+ - uses: codecov/codecov-action@v5
70
+ if: matrix.python-version == '3.12'
71
+ with:
72
+ token: ${{ secrets.CODECOV_TOKEN }}
73
+
74
+ docs:
75
+ name: Build docs
76
+ runs-on: ubuntu-24.04
77
+ steps:
78
+ - uses: actions/checkout@v6
79
+ with:
80
+ fetch-depth: 0
81
+
82
+ - uses: astral-sh/setup-uv@v7
83
+ with:
84
+ enable-cache: true
85
+
86
+ - uses: actions/setup-python@v6
87
+ with:
88
+ python-version: "3.12"
89
+
90
+ - name: Install
91
+ run: uv sync --group docs
92
+
93
+ - name: Build MkDocs
94
+ run: uv run mkdocs build --strict
95
+
96
+ ci-success:
97
+ name: CI Success
98
+ if: always()
99
+ needs: [lint, typecheck, test, docs]
100
+ runs-on: ubuntu-24.04
101
+ steps:
102
+ - if: contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled')
103
+ run: exit 1
@@ -0,0 +1,39 @@
1
+ name: Dependabot auto-merge
2
+
3
+ on:
4
+ pull_request:
5
+
6
+ permissions:
7
+ contents: write
8
+ pull-requests: write
9
+
10
+ jobs:
11
+ auto-merge:
12
+ name: Auto-merge patch
13
+ if: github.actor == 'dependabot[bot]'
14
+ runs-on: ubuntu-24.04
15
+ steps:
16
+ - uses: dependabot/fetch-metadata@v2
17
+ id: metadata
18
+
19
+ - name: Generate app token
20
+ if: steps.metadata.outputs.update-type == 'version-update:semver-patch'
21
+ id: app-token
22
+ uses: actions/create-github-app-token@v3
23
+ with:
24
+ app-id: ${{ secrets.APP_ID }}
25
+ private-key: ${{ secrets.APP_PRIVATE_KEY }}
26
+
27
+ - name: Approve PR
28
+ if: steps.metadata.outputs.update-type == 'version-update:semver-patch'
29
+ run: gh pr review "$PR" --approve
30
+ env:
31
+ PR: ${{ github.event.pull_request.html_url }}
32
+ GH_TOKEN: ${{ steps.app-token.outputs.token }}
33
+
34
+ - name: Enable auto-merge
35
+ if: steps.metadata.outputs.update-type == 'version-update:semver-patch'
36
+ run: gh pr merge "$PR" --auto --squash
37
+ env:
38
+ PR: ${{ github.event.pull_request.html_url }}
39
+ GH_TOKEN: ${{ github.token }}
@@ -0,0 +1,32 @@
1
+ name: PR Title
2
+
3
+ on:
4
+ push:
5
+ branches: ["release-please--**"]
6
+ pull_request:
7
+ types: [opened, edited, synchronize, reopened]
8
+
9
+ jobs:
10
+ validate:
11
+ name: Validate conventional commit
12
+ if: github.event_name == 'pull_request'
13
+ runs-on: ubuntu-24.04
14
+ permissions:
15
+ pull-requests: read
16
+ steps:
17
+ - uses: amannn/action-semantic-pull-request@v6
18
+ with:
19
+ types: |
20
+ feat
21
+ fix
22
+ refactor
23
+ test
24
+ docs
25
+ chore
26
+ ci
27
+ build
28
+ perf
29
+ revert
30
+ style
31
+ env:
32
+ GITHUB_TOKEN: ${{ github.token }}
@@ -0,0 +1,66 @@
1
+ name: Publish
2
+
3
+ on:
4
+ push:
5
+ tags: ["v*"]
6
+
7
+ concurrency:
8
+ group: publish-${{ github.ref_name }}
9
+ cancel-in-progress: false
10
+
11
+ permissions:
12
+ contents: write
13
+ id-token: write
14
+ attestations: write
15
+
16
+ jobs:
17
+ publish:
18
+ name: Build & publish to PyPI
19
+ runs-on: ubuntu-24.04
20
+ timeout-minutes: 10
21
+ environment:
22
+ name: pypi
23
+ url: https://pypi.org/project/tsam_xarray
24
+ steps:
25
+ - uses: actions/checkout@v6
26
+ with:
27
+ ref: ${{ github.ref_name }}
28
+ fetch-depth: 0
29
+
30
+ - uses: astral-sh/setup-uv@v7
31
+ with:
32
+ enable-cache: true
33
+
34
+ - uses: actions/setup-python@v6
35
+ with:
36
+ python-version: "3.12"
37
+
38
+ - name: Build
39
+ run: uv build
40
+
41
+ - uses: pypa/gh-action-pypi-publish@release/v1
42
+
43
+ github-release:
44
+ name: Create GitHub Release
45
+ needs: [publish]
46
+ if: github.event_name == 'push' && needs.publish.result == 'success'
47
+ runs-on: ubuntu-24.04
48
+ steps:
49
+ - uses: actions/checkout@v6
50
+ with:
51
+ ref: ${{ github.ref_name }}
52
+
53
+ - name: Create GitHub Release
54
+ run: |
55
+ if gh release view "$TAG" &>/dev/null; then
56
+ echo "Release $TAG already exists, skipping."
57
+ exit 0
58
+ fi
59
+ if [[ "$TAG" == *-* ]]; then
60
+ gh release create "$TAG" --generate-notes --prerelease
61
+ else
62
+ gh release create "$TAG" --generate-notes
63
+ fi
64
+ env:
65
+ GH_TOKEN: ${{ github.token }}
66
+ TAG: ${{ github.ref_name }}
@@ -0,0 +1,27 @@
1
+ name: Release
2
+
3
+ on:
4
+ push:
5
+ branches: [main]
6
+
7
+ permissions:
8
+ contents: write
9
+ pull-requests: write
10
+
11
+ jobs:
12
+ release-please:
13
+ name: Release Please
14
+ runs-on: ubuntu-24.04
15
+ steps:
16
+ - uses: actions/create-github-app-token@v3
17
+ id: app-token
18
+ with:
19
+ app-id: ${{ secrets.APP_ID }}
20
+ private-key: ${{ secrets.APP_PRIVATE_KEY }}
21
+
22
+ - uses: googleapis/release-please-action@v4
23
+ id: release
24
+ with:
25
+ token: ${{ steps.app-token.outputs.token }}
26
+ config-file: .release-please-config.json
27
+ manifest-file: .release-please-manifest.json
@@ -0,0 +1,35 @@
1
+ # Python
2
+ __pycache__/
3
+ *.py[cod]
4
+ *.egg-info/
5
+ dist/
6
+ build/
7
+
8
+ # Virtual environments
9
+ .venv/
10
+
11
+ # IDE
12
+ .idea/
13
+ .vscode/
14
+ *.swp
15
+
16
+ # Coverage
17
+ htmlcov/
18
+ .coverage
19
+ .coverage.*
20
+
21
+ # mypy
22
+ .mypy_cache/
23
+
24
+ # hatch-vcs generated
25
+ src/tsam_xarray/_version.py
26
+
27
+ # uv
28
+ uv.lock
29
+
30
+ # Docs build
31
+ site/
32
+ .cache/
33
+
34
+ # OS
35
+ .DS_Store
@@ -0,0 +1,13 @@
1
+ repos:
2
+ - repo: https://github.com/kynan/nbstripout
3
+ rev: 0.9.0
4
+ hooks:
5
+ - id: nbstripout
6
+ files: ^docs/.*\.ipynb$
7
+
8
+ - repo: https://github.com/astral-sh/ruff-pre-commit
9
+ rev: v0.11.8
10
+ hooks:
11
+ - id: ruff
12
+ args: [--fix]
13
+ - id: ruff-format
@@ -0,0 +1,19 @@
1
+ version: 2
2
+
3
+ mkdocs:
4
+ configuration: mkdocs.yml
5
+ fail_on_warning: true
6
+
7
+ build:
8
+ os: ubuntu-24.04
9
+ tools:
10
+ python: "3.13"
11
+ jobs:
12
+ pre_create_environment:
13
+ - asdf plugin add uv
14
+ - asdf install uv latest
15
+ - asdf global uv latest
16
+ create_environment:
17
+ - uv venv "${READTHEDOCS_VIRTUALENV_PATH}"
18
+ install:
19
+ - UV_PROJECT_ENVIRONMENT="${READTHEDOCS_VIRTUALENV_PATH}" uv sync --group docs
@@ -0,0 +1,17 @@
1
+ {
2
+ "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json",
3
+ "bootstrap-sha": "98e8ae9ccca7b625103fcc1b63d9671491cf420d",
4
+ "include-component-in-tag": false,
5
+ "packages": {
6
+ ".": {
7
+ "release-type": "simple",
8
+ "package-name": "tsam_xarray",
9
+ "bump-minor-for-major-pre-major": true,
10
+ "bump-patch-for-minor-pre-major": true,
11
+ "changelog-path": "CHANGELOG.md",
12
+ "prerelease": true,
13
+ "prerelease-type": "alpha",
14
+ "initial-version": "0.0.1-alpha.0"
15
+ }
16
+ }
17
+ }
@@ -0,0 +1,3 @@
1
+ {
2
+ ".": "0.0.1-alpha.0"
3
+ }
@@ -0,0 +1,19 @@
1
+ # Changelog
2
+
3
+ ## 0.0.1-alpha.0 (2026-03-25)
4
+
5
+
6
+ ### Features
7
+
8
+ * dict-based weights API ([#31](https://github.com/FBumann/tsam_xarray/issues/31)) ([1141f61](https://github.com/FBumann/tsam_xarray/commit/1141f6113448f151f87ac65950aba087a479da1b))
9
+ * implement aggregate() API with stack_dims and slice_dims ([#9](https://github.com/FBumann/tsam_xarray/issues/9)) ([dc1070c](https://github.com/FBumann/tsam_xarray/commit/dc1070c2aec930214d1896c2f86782e0e5301add))
10
+ * per-dimension weight mapping for multi-dim cluster_dim ([#26](https://github.com/FBumann/tsam_xarray/issues/26)) ([18e62f5](https://github.com/FBumann/tsam_xarray/commit/18e62f555ffe9bcf99c5156101cc903347625b38))
11
+ * segment_durations as DataArray and disaggregate() method ([#28](https://github.com/FBumann/tsam_xarray/issues/28)) ([9358696](https://github.com/FBumann/tsam_xarray/commit/9358696b81011fe7708a51cd71962a9f00e27e02))
12
+
13
+
14
+ ### Bug Fixes
15
+
16
+ * configure release-please for 0.0.1-alpha prerelease ([#29](https://github.com/FBumann/tsam_xarray/issues/29)) ([8036b30](https://github.com/FBumann/tsam_xarray/commit/8036b30041a2c6df6329481ec36cd62efb8e92bd))
17
+ * replace remaining my-package placeholder in docs/index.md ([e4f5dc0](https://github.com/FBumann/tsam_xarray/commit/e4f5dc0229d24b75828f3ba02a85db3417c17ef5))
18
+
19
+ ## Changelog
@@ -0,0 +1,51 @@
1
+ # CLAUDE.md
2
+
3
+ ## Project
4
+
5
+ tsam_xarray — lightweight xarray wrapper for [tsam](https://github.com/FZJ-IEK3-VSA/tsam) v3 time series aggregation. DataArray-based API.
6
+
7
+ ## Commands
8
+
9
+ ```bash
10
+ uv run pytest test/ -v # tests
11
+ uv run mypy src/ # type check
12
+ uv run ruff check src/ test/ # lint
13
+ uv run ruff format src/ test/ # format
14
+ uv run mkdocs serve # docs (live reload)
15
+ uv run mkdocs build # docs (build)
16
+ ```
17
+
18
+ ## Architecture
19
+
20
+ - `src/tsam_xarray/_core.py` — `aggregate()` function, internal stack/unstack, slice loop
21
+ - `src/tsam_xarray/_result.py` — `AggregationResult`, `AccuracyMetrics` dataclasses
22
+ - `src/tsam_xarray/_sample_data.py` — `sample_energy_data()` for docs/testing (private)
23
+ - `src/tsam_xarray/__init__.py` — public re-exports (this is the public API surface)
24
+ - `_version.py` is auto-generated by hatch-vcs — do not edit, excluded from ruff
25
+
26
+ ## Key design decisions
27
+
28
+ - **DataArray in, DataArray out** — no Dataset API
29
+ - **`cluster_dim`** — required; dims to cluster together; stacked internally, results auto-unstacked
30
+ - **`time_dim`** — required; the time dimension
31
+ - **Auto-slicing** — all dims not in `time_dim` or `cluster_dim` are sliced independently
32
+ - **tsam v3 API only** — `tsam.aggregate()`, never the legacy `TimeSeriesAggregation`
33
+ - **MultiIndex passthrough** — pandas MultiIndex flows through tsam natively (no string separator)
34
+ - **Slice dims are uniform** — same params across all slices; per-slice params = user loops manually
35
+
36
+ ## When making changes
37
+
38
+ - Run `ruff format` before committing — CI checks formatting diffs
39
+ - Run `mypy src/` — strict mode, use `type: ignore` sparingly with specific error codes
40
+ - `_version.py` fails ruff — this is expected (auto-generated), already excluded
41
+ - Update or add a notebook in `docs/examples/` when adding user-facing features
42
+ - Notebooks are executed at docs build time — keep them fast (use `sample_energy_data(n_days=30)`)
43
+ - Use `xarray_plotly` for plots in notebooks (`.plotly.line()`, `.plotly.imshow()`, etc.)
44
+ - Use `.to_dataframe()` to show tabular data in notebooks
45
+ - Pre-commit strips notebook outputs via nbstripout
46
+
47
+ ## Conventions
48
+
49
+ - Conventional commits: `feat:`, `fix:`, `refactor:`, `docs:`, `test:`, `chore:`
50
+ - PRs to main — branch protection requires CI Success check
51
+ - release-please handles versioning (pre-v1 alpha mode)
@@ -0,0 +1,79 @@
1
+ Metadata-Version: 2.4
2
+ Name: tsam_xarray
3
+ Version: 0.0.1a0
4
+ Summary: Lightweight xarray wrapper for tsam time series aggregation
5
+ License-Expression: MIT
6
+ Requires-Python: >=3.12
7
+ Requires-Dist: bottleneck>=1.4
8
+ Requires-Dist: tsam>=3.2.0
9
+ Requires-Dist: xarray>=2024.1
10
+ Description-Content-Type: text/markdown
11
+
12
+ # tsam_xarray
13
+
14
+ Lightweight [xarray](https://xarray.dev/) wrapper for [tsam](https://github.com/FZJ-IEK3-VSA/tsam) time series aggregation.
15
+
16
+ ## Installation
17
+
18
+ ```bash
19
+ pip install tsam_xarray
20
+ ```
21
+
22
+ ## Quick start
23
+
24
+ ```python
25
+ import numpy as np
26
+ import pandas as pd
27
+ import xarray as xr
28
+ import tsam_xarray
29
+
30
+ # Create sample data: 30 days of hourly solar and wind data
31
+ time = pd.date_range("2020-01-01", periods=30 * 24, freq="h")
32
+ da = xr.DataArray(
33
+ np.random.default_rng(42).random((len(time), 2)),
34
+ dims=["time", "variable"],
35
+ coords={"time": time, "variable": ["solar", "wind"]},
36
+ )
37
+
38
+ # Aggregate to 4 typical days
39
+ result = tsam_xarray.aggregate(
40
+ da, time_dim="time", cluster_dim="variable", n_clusters=4,
41
+ )
42
+
43
+ result.typical_periods # (cluster, timestep, variable)
44
+ result.cluster_weights # (cluster,) — days each represents
45
+ result.accuracy.rmse # (variable,) — per-variable RMSE
46
+ result.reconstructed # same shape as input
47
+ ```
48
+
49
+ ## Multi-dimensional data
50
+
51
+ ```python
52
+ # 4D data: (time, variable, region, scenario)
53
+ da = xr.DataArray(...)
54
+
55
+ # Cluster variable × region together; scenario is sliced independently
56
+ result = tsam_xarray.aggregate(
57
+ da,
58
+ time_dim="time",
59
+ cluster_dim=["variable", "region"],
60
+ n_clusters=8,
61
+ )
62
+
63
+ result.typical_periods # (scenario, cluster, timestep, variable, region)
64
+ ```
65
+
66
+ All [tsam.aggregate()](https://github.com/FZJ-IEK3-VSA/tsam) keyword arguments pass through:
67
+
68
+ ```python
69
+ from tsam import ClusterConfig, SegmentConfig
70
+
71
+ result = tsam_xarray.aggregate(
72
+ da,
73
+ time_dim="time",
74
+ cluster_dim="variable",
75
+ n_clusters=8,
76
+ cluster=ClusterConfig(method="kmeans"),
77
+ segments=SegmentConfig(n_segments=6),
78
+ )
79
+ ```
@@ -0,0 +1,68 @@
1
+ # tsam_xarray
2
+
3
+ Lightweight [xarray](https://xarray.dev/) wrapper for [tsam](https://github.com/FZJ-IEK3-VSA/tsam) time series aggregation.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ pip install tsam_xarray
9
+ ```
10
+
11
+ ## Quick start
12
+
13
+ ```python
14
+ import numpy as np
15
+ import pandas as pd
16
+ import xarray as xr
17
+ import tsam_xarray
18
+
19
+ # Create sample data: 30 days of hourly solar and wind data
20
+ time = pd.date_range("2020-01-01", periods=30 * 24, freq="h")
21
+ da = xr.DataArray(
22
+ np.random.default_rng(42).random((len(time), 2)),
23
+ dims=["time", "variable"],
24
+ coords={"time": time, "variable": ["solar", "wind"]},
25
+ )
26
+
27
+ # Aggregate to 4 typical days
28
+ result = tsam_xarray.aggregate(
29
+ da, time_dim="time", cluster_dim="variable", n_clusters=4,
30
+ )
31
+
32
+ result.typical_periods # (cluster, timestep, variable)
33
+ result.cluster_weights # (cluster,) — days each represents
34
+ result.accuracy.rmse # (variable,) — per-variable RMSE
35
+ result.reconstructed # same shape as input
36
+ ```
37
+
38
+ ## Multi-dimensional data
39
+
40
+ ```python
41
+ # 4D data: (time, variable, region, scenario)
42
+ da = xr.DataArray(...)
43
+
44
+ # Cluster variable × region together; scenario is sliced independently
45
+ result = tsam_xarray.aggregate(
46
+ da,
47
+ time_dim="time",
48
+ cluster_dim=["variable", "region"],
49
+ n_clusters=8,
50
+ )
51
+
52
+ result.typical_periods # (scenario, cluster, timestep, variable, region)
53
+ ```
54
+
55
+ All [tsam.aggregate()](https://github.com/FZJ-IEK3-VSA/tsam) keyword arguments pass through:
56
+
57
+ ```python
58
+ from tsam import ClusterConfig, SegmentConfig
59
+
60
+ result = tsam_xarray.aggregate(
61
+ da,
62
+ time_dim="time",
63
+ cluster_dim="variable",
64
+ n_clusters=8,
65
+ cluster=ClusterConfig(method="kmeans"),
66
+ segments=SegmentConfig(n_segments=6),
67
+ )
68
+ ```
@@ -0,0 +1 @@
1
+ ../CHANGELOG.md