tsam 2.3.1__tar.gz → 2.3.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tsam-2.3.3/.github/workflows/daily_tests.yml +43 -0
- tsam-2.3.3/.github/workflows/test_on_push_and_pull.yml +71 -0
- tsam-2.3.3/Makefile +27 -0
- {tsam-2.3.1 → tsam-2.3.3}/PKG-INFO +15 -12
- {tsam-2.3.1 → tsam-2.3.3}/README.md +12 -2
- {tsam-2.3.1 → tsam-2.3.3}/requirements.txt +2 -2
- {tsam-2.3.1 → tsam-2.3.3}/requirements.yml +1 -1
- {tsam-2.3.1 → tsam-2.3.3}/requirements_dev.txt +2 -1
- tsam-2.3.3/requirements_dev.yml +11 -0
- {tsam-2.3.1 → tsam-2.3.3}/setup.py +3 -6
- {tsam-2.3.1 → tsam-2.3.3}/test/test_assert_raises.py +1 -1
- {tsam-2.3.1 → tsam-2.3.3}/test/test_cluster_order.py +4 -4
- {tsam-2.3.1 → tsam-2.3.3}/test/test_hierarchical.py +2 -2
- {tsam-2.3.1 → tsam-2.3.3}/test/test_hypertuneAggregation.py +3 -2
- {tsam-2.3.1 → tsam-2.3.3}/test/test_k_medoids.py +2 -2
- {tsam-2.3.1 → tsam-2.3.3}/test/test_segmentation.py +2 -2
- {tsam-2.3.1 → tsam-2.3.3}/test/test_subhourlyResolution.py +1 -1
- {tsam-2.3.1 → tsam-2.3.3}/tsam/timeseriesaggregation.py +11 -9
- {tsam-2.3.1 → tsam-2.3.3}/tsam/utils/durationRepresentation.py +4 -4
- {tsam-2.3.1 → tsam-2.3.3}/tsam/utils/k_medoids_contiguity.py +7 -0
- {tsam-2.3.1 → tsam-2.3.3}/tsam/utils/k_medoids_exact.py +5 -0
- {tsam-2.3.1 → tsam-2.3.3}/tsam.egg-info/PKG-INFO +15 -12
- {tsam-2.3.1 → tsam-2.3.3}/tsam.egg-info/SOURCES.txt +3 -1
- {tsam-2.3.1 → tsam-2.3.3}/tsam.egg-info/requires.txt +2 -2
- tsam-2.3.1/.github/workflows/pytest.yml +0 -36
- tsam-2.3.1/requirements_dev.yml +0 -8
- {tsam-2.3.1 → tsam-2.3.3}/.gitignore +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/.gitlab-ci.yml +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/.readthedocs.yml +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/LICENSE.txt +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/MANIFEST.in +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/Makefile +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/make.bat +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/_static/logo.png +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/_static/tsam-logo.png +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/conf.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/durationRepresentationDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/exactKmedoidsDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/furtherReadingDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/gettingStartedDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/hypertunedaggregationDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/index.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/installationDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/integratedSoftwareDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/kmaxoidsDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/legalNoticeDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/mathematicalBackgroundDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/newsDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/periodAggregationDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/representationsDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/segmentationDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/structureOfTsamDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/docs/source/timeseriesaggregationDoc.rst +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/aggregation_example.ipynb +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/aggregation_method_showcase.ipynb +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/aggregation_optiinput.ipynb +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/aggregation_representation.ipynb +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/aggregation_segment_period_animation.ipynb +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/aggregation_segment_period_building_timeseries.ipynb +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/aggregation_segment_period_opti.ipynb +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/aggregation_segmentation.ipynb +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/example_k_maxoids.ipynb +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/get_clustercenter_indices.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/predefined_sequence_example.ipynb +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/results/paretoOptimalAggregation.csv +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/results/preprocessed_wind.csv +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/results/testperiods_hierarchical.csv +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/results/testperiods_kmeans.csv +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/results/testperiods_kmedoids.csv +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/results/testperiods_predefClusterOrder.csv +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/results/testperiods_predefClusterOrderAndClusterCenters.csv +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/results/testperiods_segmentation.csv +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/examples/testdata.csv +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/setup.cfg +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_accuracyIndicators.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_adjacent_periods.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_aggregate_hiearchical.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_averaging.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_durationCurve.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_durationRepresentation.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_extremePeriods.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_k_maxoids.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_k_medoids_contiguity.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_minmaxRepresentation.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_preprocess.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_properties.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_samemean.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_subhourly_periods.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/test/test_weightingFactors.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/tsam/__init__.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/tsam/hyperparametertuning.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/tsam/periodAggregation.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/tsam/representations.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/tsam/utils/__init__.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/tsam/utils/k_maxoids.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/tsam/utils/segmentation.py +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/tsam.egg-info/dependency_links.txt +0 -0
- {tsam-2.3.1 → tsam-2.3.3}/tsam.egg-info/top_level.txt +0 -0
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
# This tests are run daily to check incompatibilties introduced by new versions of dependencies
|
|
2
|
+
name: Daily tsam tests
|
|
3
|
+
on:
|
|
4
|
+
schedule:
|
|
5
|
+
# * is a special character in YAML so you have to quote this string
|
|
6
|
+
# Some Examples for cron syntax https://crontab.guru/examples.html
|
|
7
|
+
# Schedules job at any point after 12 pm
|
|
8
|
+
- cron: '0 0 * * *'
|
|
9
|
+
# Weekly after sunday
|
|
10
|
+
# - cron: 0 0 * * 0
|
|
11
|
+
|
|
12
|
+
jobs:
|
|
13
|
+
PythonAndOsTest:
|
|
14
|
+
name: Test for Python ${{matrix.python-version}} on ${{matrix.os}}
|
|
15
|
+
runs-on: ${{matrix.os}}
|
|
16
|
+
strategy:
|
|
17
|
+
fail-fast: false
|
|
18
|
+
matrix:
|
|
19
|
+
os: ["ubuntu-latest","ubuntu-20.04", "macos-latest","macos-13","macos-12", "windows-latest","windows-2019"]
|
|
20
|
+
# os: ["ubuntu-latest"]
|
|
21
|
+
python-version: [ "3.9", "3.10", "3.11", "3.12"]
|
|
22
|
+
|
|
23
|
+
steps:
|
|
24
|
+
- uses: actions/checkout@v2
|
|
25
|
+
- name: Set up Python 3.10
|
|
26
|
+
uses: actions/setup-python@v2
|
|
27
|
+
with:
|
|
28
|
+
python-version: ${{matrix.python-version}}
|
|
29
|
+
- name: Install dependencies
|
|
30
|
+
run: |
|
|
31
|
+
python -m pip install --upgrade pip
|
|
32
|
+
pip install pytest
|
|
33
|
+
pip install pytest-cov
|
|
34
|
+
pip install codecov
|
|
35
|
+
pip install -r requirements.txt
|
|
36
|
+
pip install --no-cache-dir -e .
|
|
37
|
+
|
|
38
|
+
- name: Test with pytest
|
|
39
|
+
working-directory: ./test/
|
|
40
|
+
run: |
|
|
41
|
+
pytest
|
|
42
|
+
codecov
|
|
43
|
+
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
# This workflow will install Python dependencies and run tests and lint with a single version of Python
|
|
2
|
+
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
|
|
3
|
+
|
|
4
|
+
# Based on David Neuroth pylpg
|
|
5
|
+
|
|
6
|
+
name: Test on Push and Pull
|
|
7
|
+
|
|
8
|
+
on:
|
|
9
|
+
push:
|
|
10
|
+
pull_request:
|
|
11
|
+
branches: [ master ]
|
|
12
|
+
|
|
13
|
+
jobs:
|
|
14
|
+
PythonAndOsTest:
|
|
15
|
+
name: Test for Python ${{matrix.python-version}} on ${{matrix.os}}
|
|
16
|
+
runs-on: ${{matrix.os}}
|
|
17
|
+
strategy:
|
|
18
|
+
fail-fast: false
|
|
19
|
+
matrix:
|
|
20
|
+
os: ["ubuntu-latest","ubuntu-20.04", "macos-latest","macos-13","macos-12", "windows-latest","windows-2019"]
|
|
21
|
+
python-version: [ "3.9", "3.10", "3.11", "3.12"]
|
|
22
|
+
|
|
23
|
+
steps:
|
|
24
|
+
- uses: actions/checkout@v2
|
|
25
|
+
- name: Set up Python 3.10
|
|
26
|
+
uses: actions/setup-python@v2
|
|
27
|
+
with:
|
|
28
|
+
python-version: ${{matrix.python-version}}
|
|
29
|
+
- name: Install dependencies
|
|
30
|
+
run: |
|
|
31
|
+
python -m pip install --upgrade pip
|
|
32
|
+
pip install pytest
|
|
33
|
+
pip install pytest-cov
|
|
34
|
+
pip install codecov
|
|
35
|
+
pip install -r requirements.txt
|
|
36
|
+
pip install --no-cache-dir -e .
|
|
37
|
+
|
|
38
|
+
- name: Test with pytest
|
|
39
|
+
working-directory: ./test/
|
|
40
|
+
run: |
|
|
41
|
+
pytest
|
|
42
|
+
codecov
|
|
43
|
+
|
|
44
|
+
NumpyTest:
|
|
45
|
+
name: Test for numpy ${{matrix.python-numpy-version.numpy}} and python ${{matrix.python-numpy-version.python}}
|
|
46
|
+
runs-on: ${{matrix.os}}
|
|
47
|
+
strategy:
|
|
48
|
+
fail-fast: false
|
|
49
|
+
matrix:
|
|
50
|
+
os: ["ubuntu-latest","ubuntu-20.04", "macos-latest","macos-13","macos-12", "windows-latest","windows-2019"]
|
|
51
|
+
python-numpy-version: [ {python : 3.9,numpy : 1.25}, {python : 3.9,numpy : 1.26},{python : 3.9,numpy : 2.0}]
|
|
52
|
+
steps:
|
|
53
|
+
- uses: actions/checkout@v2
|
|
54
|
+
- name: Set up Python 3.10
|
|
55
|
+
uses: actions/setup-python@v2
|
|
56
|
+
with:
|
|
57
|
+
python-version: ${{matrix.python-numpy-version.python}}
|
|
58
|
+
- name: Install dependencies
|
|
59
|
+
run: |
|
|
60
|
+
python -m pip install numpy==${{matrix.python-numpy-version.numpy}} --upgrade pip
|
|
61
|
+
pip install pytest
|
|
62
|
+
pip install pytest-cov
|
|
63
|
+
pip install codecov
|
|
64
|
+
pip install -r requirements.txt
|
|
65
|
+
pip install --no-cache-dir -e .
|
|
66
|
+
|
|
67
|
+
- name: Test with pytest
|
|
68
|
+
working-directory: ./test/
|
|
69
|
+
run: |
|
|
70
|
+
pytest
|
|
71
|
+
codecov
|
tsam-2.3.3/Makefile
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
#!make
|
|
2
|
+
LOCAL_VENVS_DIR=~/.virtualenvs
|
|
3
|
+
PROJECT_NAME=tsam
|
|
4
|
+
PYTHON=python3.11
|
|
5
|
+
LOCAL_VENV_DIR := ${LOCAL_VENVS_DIR}/${PROJECT_NAME}
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
test:
|
|
9
|
+
. ${LOCAL_VENV_DIR}/bin/activate; pytest
|
|
10
|
+
|
|
11
|
+
sdist:
|
|
12
|
+
. ${LOCAL_VENV_DIR}/bin/activate; ${PYTHON} setup.py sdist
|
|
13
|
+
|
|
14
|
+
upload:
|
|
15
|
+
twine upload dist/*
|
|
16
|
+
|
|
17
|
+
clean:
|
|
18
|
+
rm dist/*
|
|
19
|
+
|
|
20
|
+
dist: sdist upload clean
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
setup_venv:
|
|
25
|
+
mkdir -p ${LOCAL_VENVS_DIR}
|
|
26
|
+
${PYTHON} -m venv ${LOCAL_VENV_DIR}
|
|
27
|
+
. ${LOCAL_VENV_DIR}/bin/activate; pip install -r requirements.txt; pip install -e .
|
|
@@ -1,13 +1,11 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: tsam
|
|
3
|
-
Version: 2.3.
|
|
3
|
+
Version: 2.3.3
|
|
4
4
|
Summary: Time series aggregation module (tsam) to create typical periods
|
|
5
5
|
Home-page: https://github.com/FZJ-IEK3-VSA/tsam
|
|
6
6
|
Author: Leander Kotzur, Maximilian Hoffmann
|
|
7
|
-
Author-email: leander.kotzur@googlemail.com,
|
|
8
|
-
License: UNKNOWN
|
|
7
|
+
Author-email: leander.kotzur@googlemail.com, maximilian.hoffmann@julumni.fz-juelich.de
|
|
9
8
|
Keywords: clustering,optimization
|
|
10
|
-
Platform: UNKNOWN
|
|
11
9
|
Classifier: Development Status :: 4 - Beta
|
|
12
10
|
Classifier: Intended Audience :: End Users/Desktop
|
|
13
11
|
Classifier: Intended Audience :: Science/Research
|
|
@@ -16,17 +14,14 @@ Classifier: Natural Language :: English
|
|
|
16
14
|
Classifier: Operating System :: OS Independent
|
|
17
15
|
Classifier: Programming Language :: Python
|
|
18
16
|
Classifier: Programming Language :: Python :: 2
|
|
19
|
-
Classifier: Programming Language :: Python :: 2.7
|
|
20
17
|
Classifier: Programming Language :: Python :: 3
|
|
21
|
-
Classifier: Programming Language :: Python :: 3.4
|
|
22
|
-
Classifier: Programming Language :: Python :: 3.5
|
|
23
|
-
Classifier: Programming Language :: Python :: 3.6
|
|
24
18
|
Classifier: Topic :: Scientific/Engineering :: Mathematics
|
|
25
19
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
20
|
+
Requires-Python: >=3.9
|
|
26
21
|
Description-Content-Type: text/markdown
|
|
27
22
|
License-File: LICENSE.txt
|
|
28
23
|
|
|
29
|
-
[](https://github.com/FZJ-IEK3-VSA/tsam/actions) [](https://pypi.python.org/pypi/tsam) [](https://anaconda.org/conda-forge/tsam) [](https://tsam.readthedocs.io/en/latest/) []((https://github.com/FZJ-IEK3-VSA/tsam/blob/master/LICENSE.txt)) [](https://codecov.io/gh/FZJ-IEK3-VSA/tsam)
|
|
30
25
|
[](https://mybinder.org/v2/gh/FZJ-IEK3-VSA/voila-tsam/HEAD?urlpath=voila/render/Time-Series-Aggregation-Module.ipynb)
|
|
31
26
|
|
|
32
27
|
<a href="https://www.fz-juelich.de/en/iek/iek-3"><img src="https://www.fz-juelich.de/static/media/Logo.2ceb35fc.svg" alt="Forschungszentrum Juelich Logo" width="230px"></a>
|
|
@@ -47,10 +42,14 @@ The documentation of the tsam code can be found [**here**](https://tsam.readthed
|
|
|
47
42
|
|
|
48
43
|
|
|
49
44
|
## Installation
|
|
50
|
-
Directly install via pip as follows:
|
|
45
|
+
Directly install via pip from pypi as follows:
|
|
51
46
|
|
|
52
47
|
pip install tsam
|
|
53
48
|
|
|
49
|
+
of install from conda forge with the following command:
|
|
50
|
+
|
|
51
|
+
conda install tsam -c conda-forge
|
|
52
|
+
|
|
54
53
|
Alternatively, clone a local copy of the repository to your computer
|
|
55
54
|
|
|
56
55
|
git clone https://github.com/FZJ-IEK3-VSA/tsam.git
|
|
@@ -65,6 +64,12 @@ Or install directly via python as
|
|
|
65
64
|
python setup.py install
|
|
66
65
|
|
|
67
66
|
In order to use the k-medoids clustering, make sure that you have installed a MILP solver. As default [HiGHS](https://github.com/ERGO-Code/HiGHS) is used. Nevertheless, in case you have access to a license we recommend commercial solvers (e.g. Gurobi or CPLEX) since they have a better performance.
|
|
67
|
+
|
|
68
|
+
### Developer installation
|
|
69
|
+
|
|
70
|
+
In order to setup a virtual environment in Linux, correct the python name in the Makefile and call
|
|
71
|
+
|
|
72
|
+
make setup_venv
|
|
68
73
|
|
|
69
74
|
|
|
70
75
|
## Examples
|
|
@@ -161,5 +166,3 @@ This work is supported by the Helmholtz Association under the Joint Initiative [
|
|
|
161
166
|
<a href="https://www.helmholtz.de/en/"><img src="https://www.helmholtz.de/fileadmin/user_upload/05_aktuelles/Marke_Design/logos/HG_LOGO_S_ENG_RGB.jpg" alt="Helmholtz Logo" width="200px" style="float:right"></a>
|
|
162
167
|
|
|
163
168
|
|
|
164
|
-
|
|
165
|
-
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
[](https://github.com/FZJ-IEK3-VSA/tsam/actions) [](https://pypi.python.org/pypi/tsam) [](https://anaconda.org/conda-forge/tsam) [](https://tsam.readthedocs.io/en/latest/) []((https://github.com/FZJ-IEK3-VSA/tsam/blob/master/LICENSE.txt)) [](https://codecov.io/gh/FZJ-IEK3-VSA/tsam)
|
|
2
2
|
[](https://mybinder.org/v2/gh/FZJ-IEK3-VSA/voila-tsam/HEAD?urlpath=voila/render/Time-Series-Aggregation-Module.ipynb)
|
|
3
3
|
|
|
4
4
|
<a href="https://www.fz-juelich.de/en/iek/iek-3"><img src="https://www.fz-juelich.de/static/media/Logo.2ceb35fc.svg" alt="Forschungszentrum Juelich Logo" width="230px"></a>
|
|
@@ -19,10 +19,14 @@ The documentation of the tsam code can be found [**here**](https://tsam.readthed
|
|
|
19
19
|
|
|
20
20
|
|
|
21
21
|
## Installation
|
|
22
|
-
Directly install via pip as follows:
|
|
22
|
+
Directly install via pip from pypi as follows:
|
|
23
23
|
|
|
24
24
|
pip install tsam
|
|
25
25
|
|
|
26
|
+
of install from conda forge with the following command:
|
|
27
|
+
|
|
28
|
+
conda install tsam -c conda-forge
|
|
29
|
+
|
|
26
30
|
Alternatively, clone a local copy of the repository to your computer
|
|
27
31
|
|
|
28
32
|
git clone https://github.com/FZJ-IEK3-VSA/tsam.git
|
|
@@ -37,6 +41,12 @@ Or install directly via python as
|
|
|
37
41
|
python setup.py install
|
|
38
42
|
|
|
39
43
|
In order to use the k-medoids clustering, make sure that you have installed a MILP solver. As default [HiGHS](https://github.com/ERGO-Code/HiGHS) is used. Nevertheless, in case you have access to a license we recommend commercial solvers (e.g. Gurobi or CPLEX) since they have a better performance.
|
|
44
|
+
|
|
45
|
+
### Developer installation
|
|
46
|
+
|
|
47
|
+
In order to setup a virtual environment in Linux, correct the python name in the Makefile and call
|
|
48
|
+
|
|
49
|
+
make setup_venv
|
|
40
50
|
|
|
41
51
|
|
|
42
52
|
## Examples
|
|
@@ -8,14 +8,15 @@ with open(os.path.join(dir_path, "README.md"), "r") as fh:
|
|
|
8
8
|
|
|
9
9
|
setuptools.setup(
|
|
10
10
|
name="tsam",
|
|
11
|
-
version="2.3.
|
|
11
|
+
version="2.3.3",
|
|
12
12
|
author="Leander Kotzur, Maximilian Hoffmann",
|
|
13
|
-
author_email="leander.kotzur@googlemail.com,
|
|
13
|
+
author_email="leander.kotzur@googlemail.com, maximilian.hoffmann@julumni.fz-juelich.de",
|
|
14
14
|
description="Time series aggregation module (tsam) to create typical periods",
|
|
15
15
|
long_description=long_description,
|
|
16
16
|
long_description_content_type="text/markdown",
|
|
17
17
|
url="https://github.com/FZJ-IEK3-VSA/tsam",
|
|
18
18
|
include_package_data=True,
|
|
19
|
+
python_requires='>=3.9',
|
|
19
20
|
packages=setuptools.find_packages(),
|
|
20
21
|
install_requires=required_packages,
|
|
21
22
|
setup_requires=["setuptools-git"],
|
|
@@ -28,11 +29,7 @@ setuptools.setup(
|
|
|
28
29
|
"Operating System :: OS Independent",
|
|
29
30
|
"Programming Language :: Python",
|
|
30
31
|
"Programming Language :: Python :: 2",
|
|
31
|
-
"Programming Language :: Python :: 2.7",
|
|
32
32
|
"Programming Language :: Python :: 3",
|
|
33
|
-
"Programming Language :: Python :: 3.4",
|
|
34
|
-
"Programming Language :: Python :: 3.5",
|
|
35
|
-
"Programming Language :: Python :: 3.6",
|
|
36
33
|
"Topic :: Scientific/Engineering :: Mathematics",
|
|
37
34
|
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
38
35
|
],
|
|
@@ -221,7 +221,7 @@ def test_assert_raises():
|
|
|
221
221
|
|
|
222
222
|
# check erroneous dataframe containing NaN values
|
|
223
223
|
rawNan = copy.deepcopy((raw))
|
|
224
|
-
rawNan.iloc[10, :] = np.
|
|
224
|
+
rawNan.iloc[10, :] = np.nan
|
|
225
225
|
aggregation = tsam.TimeSeriesAggregation(timeSeries=rawNan)
|
|
226
226
|
np.testing.assert_raises_regex(
|
|
227
227
|
ValueError,
|
|
@@ -103,21 +103,21 @@ def test_cluster_order():
|
|
|
103
103
|
orig_raw_predefClusterOrder[typPeriods_predefClusterOrder.columns]
|
|
104
104
|
.unstack()
|
|
105
105
|
.loc[sortedDaysOrig1, :]
|
|
106
|
-
.stack()
|
|
106
|
+
.stack(future_stack=True,)
|
|
107
107
|
)
|
|
108
|
-
test1 = typPeriods_predefClusterOrder.unstack().loc[sortedDaysTest1, :].stack()
|
|
108
|
+
test1 = typPeriods_predefClusterOrder.unstack().loc[sortedDaysTest1, :].stack(future_stack=True,)
|
|
109
109
|
orig2 = (
|
|
110
110
|
orig_raw_predefClusterOrderAndClusterCenters[
|
|
111
111
|
typPeriods_predefClusterOrderAndClusterCenters.columns
|
|
112
112
|
]
|
|
113
113
|
.unstack()
|
|
114
114
|
.loc[sortedDaysOrig2, :]
|
|
115
|
-
.stack()
|
|
115
|
+
.stack(future_stack=True,)
|
|
116
116
|
)
|
|
117
117
|
test2 = (
|
|
118
118
|
typPeriods_predefClusterOrderAndClusterCenters.unstack()
|
|
119
119
|
.loc[sortedDaysTest2, :]
|
|
120
|
-
.stack()
|
|
120
|
+
.stack(future_stack=True,)
|
|
121
121
|
)
|
|
122
122
|
|
|
123
123
|
np.testing.assert_array_almost_equal(
|
|
@@ -46,8 +46,8 @@ def test_hierarchical():
|
|
|
46
46
|
sortedDaysTest = typPeriods.groupby(level=0).sum().sort_values("GHI").index
|
|
47
47
|
|
|
48
48
|
# rearange their order
|
|
49
|
-
orig = orig_raw[typPeriods.columns].unstack().loc[sortedDaysOrig, :].stack()
|
|
50
|
-
test = typPeriods.unstack().loc[sortedDaysTest, :].stack()
|
|
49
|
+
orig = orig_raw[typPeriods.columns].unstack().loc[sortedDaysOrig, :].stack(future_stack=True,)
|
|
50
|
+
test = typPeriods.unstack().loc[sortedDaysTest, :].stack(future_stack=True,)
|
|
51
51
|
|
|
52
52
|
np.testing.assert_array_almost_equal(orig.values, test.values, decimal=4)
|
|
53
53
|
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import time
|
|
3
|
+
import pytest
|
|
3
4
|
|
|
4
5
|
import pandas as pd
|
|
5
6
|
import numpy as np
|
|
@@ -74,7 +75,7 @@ def test_optimalPair():
|
|
|
74
75
|
assert windPeriods * windSegments <= len(raw["Wind"])*datareduction
|
|
75
76
|
assert windPeriods * windSegments >= len(raw["Wind"])*datareduction * 0.8
|
|
76
77
|
|
|
77
|
-
|
|
78
|
+
@pytest.mark.skip(reason="This test is too slow")
|
|
78
79
|
def test_steepest_gradient_leads_to_optima():
|
|
79
80
|
"""
|
|
80
81
|
Based on the hint of Eva Simarik, check if the RMSE is for the optimized combination
|
|
@@ -147,7 +148,7 @@ def test_paretoOptimalAggregation():
|
|
|
147
148
|
raw,
|
|
148
149
|
hoursPerPeriod=12,
|
|
149
150
|
clusterMethod="hierarchical",
|
|
150
|
-
representationMethod="
|
|
151
|
+
representationMethod="meanRepresentation",
|
|
151
152
|
distributionPeriodWise=False,
|
|
152
153
|
rescaleClusterPeriods=False,
|
|
153
154
|
segmentation=True,
|
|
@@ -43,8 +43,8 @@ def test_k_medoids():
|
|
|
43
43
|
sortedDaysTest = typPeriods.groupby(level=0).sum().sort_values("GHI").index
|
|
44
44
|
|
|
45
45
|
# rearange their order
|
|
46
|
-
orig = orig_raw[typPeriods.columns].unstack().loc[sortedDaysOrig, :].stack()
|
|
47
|
-
test = typPeriods.unstack().loc[sortedDaysTest, :].stack()
|
|
46
|
+
orig = orig_raw[typPeriods.columns].unstack().loc[sortedDaysOrig, :].stack(future_stack=True,)
|
|
47
|
+
test = typPeriods.unstack().loc[sortedDaysTest, :].stack(future_stack=True,)
|
|
48
48
|
|
|
49
49
|
np.testing.assert_array_almost_equal(orig.values, test.values, decimal=4)
|
|
50
50
|
|
|
@@ -46,8 +46,8 @@ def test_segmentation():
|
|
|
46
46
|
sortedDaysTest = typPeriods.groupby(level=0).sum().sort_values("GHI").index
|
|
47
47
|
|
|
48
48
|
# rearange their order
|
|
49
|
-
orig = orig_raw[typPeriods.columns].unstack().loc[sortedDaysOrig, :].stack()
|
|
50
|
-
test = typPeriods.unstack().loc[sortedDaysTest, :].stack()
|
|
49
|
+
orig = orig_raw[typPeriods.columns].unstack().loc[sortedDaysOrig, :].stack(future_stack=True,)
|
|
50
|
+
test = typPeriods.unstack().loc[sortedDaysTest, :].stack(future_stack=True,)
|
|
51
51
|
|
|
52
52
|
np.testing.assert_array_almost_equal(orig.values, test.values, decimal=4)
|
|
53
53
|
|
|
@@ -8,7 +8,6 @@ import pandas as pd
|
|
|
8
8
|
import numpy as np
|
|
9
9
|
|
|
10
10
|
from sklearn.metrics import mean_squared_error, mean_absolute_error
|
|
11
|
-
from sklearn.metrics.pairwise import euclidean_distances
|
|
12
11
|
from sklearn import preprocessing
|
|
13
12
|
|
|
14
13
|
from tsam.periodAggregation import aggregatePeriods
|
|
@@ -27,6 +26,9 @@ TOLERANCE = 1e-6
|
|
|
27
26
|
MIN_WEIGHT = 1e-6
|
|
28
27
|
|
|
29
28
|
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
|
|
30
32
|
def unstackToPeriods(timeSeries, timeStepsPerPeriod):
|
|
31
33
|
"""
|
|
32
34
|
Extend the timeseries to an integer multiple of the period length and
|
|
@@ -388,21 +390,21 @@ class TimeSeriesAggregation(object):
|
|
|
388
390
|
try:
|
|
389
391
|
timedelta = self.timeSeries.index[1] - self.timeSeries.index[0]
|
|
390
392
|
self.resolution = float(timedelta.total_seconds()) / 3600
|
|
391
|
-
except AttributeError:
|
|
393
|
+
except AttributeError as exc:
|
|
392
394
|
raise ValueError(
|
|
393
395
|
"'resolution' argument has to be nonnegative float or int"
|
|
394
396
|
+ " or the given timeseries needs a datetime index"
|
|
395
|
-
)
|
|
397
|
+
) from exc
|
|
396
398
|
except TypeError:
|
|
397
399
|
try:
|
|
398
400
|
self.timeSeries.index = pd.to_datetime(self.timeSeries.index)
|
|
399
401
|
timedelta = self.timeSeries.index[1] - self.timeSeries.index[0]
|
|
400
402
|
self.resolution = float(timedelta.total_seconds()) / 3600
|
|
401
|
-
except:
|
|
403
|
+
except Exception as exc:
|
|
402
404
|
raise ValueError(
|
|
403
405
|
"'resolution' argument has to be nonnegative float or int"
|
|
404
406
|
+ " or the given timeseries needs a datetime index"
|
|
405
|
-
)
|
|
407
|
+
) from exc
|
|
406
408
|
|
|
407
409
|
if not (isinstance(self.resolution, int) or isinstance(self.resolution, float)):
|
|
408
410
|
raise ValueError("resolution has to be nonnegative float or int")
|
|
@@ -870,9 +872,9 @@ class TimeSeriesAggregation(object):
|
|
|
870
872
|
)
|
|
871
873
|
|
|
872
874
|
# reset values higher than the upper sacle or less than zero
|
|
873
|
-
typicalPeriods[column].clip(lower=0, upper=scale_ub
|
|
875
|
+
typicalPeriods[column] = typicalPeriods[column].clip(lower=0, upper=scale_ub)
|
|
874
876
|
|
|
875
|
-
typicalPeriods[column].fillna(0.0
|
|
877
|
+
typicalPeriods[column] = typicalPeriods[column].fillna(0.0)
|
|
876
878
|
|
|
877
879
|
# calc new sum and new diff to orig data
|
|
878
880
|
sum_clu_wo_peak = np.sum(
|
|
@@ -967,7 +969,7 @@ class TimeSeriesAggregation(object):
|
|
|
967
969
|
# check for additional cluster parameters
|
|
968
970
|
if self.evalSumPeriods:
|
|
969
971
|
evaluationValues = (
|
|
970
|
-
self.normalizedPeriodlyProfiles.stack(level=0)
|
|
972
|
+
self.normalizedPeriodlyProfiles.stack(future_stack=True,level=0)
|
|
971
973
|
.sum(axis=1)
|
|
972
974
|
.unstack(level=1)
|
|
973
975
|
)
|
|
@@ -1237,7 +1239,7 @@ class TimeSeriesAggregation(object):
|
|
|
1237
1239
|
columns=self.normalizedPeriodlyProfiles.columns,
|
|
1238
1240
|
index=self.normalizedPeriodlyProfiles.index,
|
|
1239
1241
|
)
|
|
1240
|
-
clustered_data_df = clustered_data_df.stack(level="TimeStep")
|
|
1242
|
+
clustered_data_df = clustered_data_df.stack(future_stack=True,level="TimeStep")
|
|
1241
1243
|
|
|
1242
1244
|
# back in form
|
|
1243
1245
|
self.normalizedPredictedData = pd.DataFrame(
|
|
@@ -57,7 +57,7 @@ def durationRepresentation(
|
|
|
57
57
|
# get all the values of a certain attribute and cluster
|
|
58
58
|
candidateValues = candidates.loc[indice[0], a]
|
|
59
59
|
# sort all values
|
|
60
|
-
sortedAttr = candidateValues.stack().sort_values()
|
|
60
|
+
sortedAttr = candidateValues.stack(future_stack=True,).sort_values()
|
|
61
61
|
# reindex and arrange such that every sorted segment gets represented by its mean
|
|
62
62
|
sortedAttr.index = pd.MultiIndex.from_tuples(clean_index)
|
|
63
63
|
representationValues = sortedAttr.unstack(level=0).mean(axis=1)
|
|
@@ -97,8 +97,8 @@ def durationRepresentation(
|
|
|
97
97
|
# concat centroid values and cluster weights for all clusters
|
|
98
98
|
meansAndWeights = pd.concat(
|
|
99
99
|
[
|
|
100
|
-
pd.DataFrame(np.array(meanVals)).stack(),
|
|
101
|
-
pd.DataFrame(np.array(clusterLengths)).stack(),
|
|
100
|
+
pd.DataFrame(np.array(meanVals)).stack(future_stack=True,),
|
|
101
|
+
pd.DataFrame(np.array(clusterLengths)).stack(future_stack=True,),
|
|
102
102
|
],
|
|
103
103
|
axis=1,
|
|
104
104
|
)
|
|
@@ -107,7 +107,7 @@ def durationRepresentation(
|
|
|
107
107
|
# save order of the sorted centroid values across all clusters
|
|
108
108
|
order = meansAndWeightsSorted.index
|
|
109
109
|
# sort all values of the original time series
|
|
110
|
-
sortedAttr = candidates.loc[:, a].stack().sort_values().values
|
|
110
|
+
sortedAttr = candidates.loc[:, a].stack(future_stack=True,).sort_values().values
|
|
111
111
|
# take mean of sections of the original duration curve according to the cluster and its weight the
|
|
112
112
|
# respective section is assigned to
|
|
113
113
|
representationValues = []
|
|
@@ -5,6 +5,11 @@ import numpy as np
|
|
|
5
5
|
from sklearn.base import BaseEstimator, ClusterMixin, TransformerMixin
|
|
6
6
|
from sklearn.metrics.pairwise import PAIRWISE_DISTANCE_FUNCTIONS
|
|
7
7
|
from sklearn.utils import check_array
|
|
8
|
+
|
|
9
|
+
# switch to numpy 2.0
|
|
10
|
+
np.float_ = np.float64
|
|
11
|
+
np.complex_=np.complex128
|
|
12
|
+
|
|
8
13
|
import pyomo.environ as pyomo
|
|
9
14
|
import pyomo.opt as opt
|
|
10
15
|
from pyomo.contrib import appsi
|
|
@@ -1,13 +1,11 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: tsam
|
|
3
|
-
Version: 2.3.
|
|
3
|
+
Version: 2.3.3
|
|
4
4
|
Summary: Time series aggregation module (tsam) to create typical periods
|
|
5
5
|
Home-page: https://github.com/FZJ-IEK3-VSA/tsam
|
|
6
6
|
Author: Leander Kotzur, Maximilian Hoffmann
|
|
7
|
-
Author-email: leander.kotzur@googlemail.com,
|
|
8
|
-
License: UNKNOWN
|
|
7
|
+
Author-email: leander.kotzur@googlemail.com, maximilian.hoffmann@julumni.fz-juelich.de
|
|
9
8
|
Keywords: clustering,optimization
|
|
10
|
-
Platform: UNKNOWN
|
|
11
9
|
Classifier: Development Status :: 4 - Beta
|
|
12
10
|
Classifier: Intended Audience :: End Users/Desktop
|
|
13
11
|
Classifier: Intended Audience :: Science/Research
|
|
@@ -16,17 +14,14 @@ Classifier: Natural Language :: English
|
|
|
16
14
|
Classifier: Operating System :: OS Independent
|
|
17
15
|
Classifier: Programming Language :: Python
|
|
18
16
|
Classifier: Programming Language :: Python :: 2
|
|
19
|
-
Classifier: Programming Language :: Python :: 2.7
|
|
20
17
|
Classifier: Programming Language :: Python :: 3
|
|
21
|
-
Classifier: Programming Language :: Python :: 3.4
|
|
22
|
-
Classifier: Programming Language :: Python :: 3.5
|
|
23
|
-
Classifier: Programming Language :: Python :: 3.6
|
|
24
18
|
Classifier: Topic :: Scientific/Engineering :: Mathematics
|
|
25
19
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
20
|
+
Requires-Python: >=3.9
|
|
26
21
|
Description-Content-Type: text/markdown
|
|
27
22
|
License-File: LICENSE.txt
|
|
28
23
|
|
|
29
|
-
[](https://github.com/FZJ-IEK3-VSA/tsam/actions) [](https://pypi.python.org/pypi/tsam) [](https://anaconda.org/conda-forge/tsam) [](https://tsam.readthedocs.io/en/latest/) []((https://github.com/FZJ-IEK3-VSA/tsam/blob/master/LICENSE.txt)) [](https://codecov.io/gh/FZJ-IEK3-VSA/tsam)
|
|
30
25
|
[](https://mybinder.org/v2/gh/FZJ-IEK3-VSA/voila-tsam/HEAD?urlpath=voila/render/Time-Series-Aggregation-Module.ipynb)
|
|
31
26
|
|
|
32
27
|
<a href="https://www.fz-juelich.de/en/iek/iek-3"><img src="https://www.fz-juelich.de/static/media/Logo.2ceb35fc.svg" alt="Forschungszentrum Juelich Logo" width="230px"></a>
|
|
@@ -47,10 +42,14 @@ The documentation of the tsam code can be found [**here**](https://tsam.readthed
|
|
|
47
42
|
|
|
48
43
|
|
|
49
44
|
## Installation
|
|
50
|
-
Directly install via pip as follows:
|
|
45
|
+
Directly install via pip from pypi as follows:
|
|
51
46
|
|
|
52
47
|
pip install tsam
|
|
53
48
|
|
|
49
|
+
of install from conda forge with the following command:
|
|
50
|
+
|
|
51
|
+
conda install tsam -c conda-forge
|
|
52
|
+
|
|
54
53
|
Alternatively, clone a local copy of the repository to your computer
|
|
55
54
|
|
|
56
55
|
git clone https://github.com/FZJ-IEK3-VSA/tsam.git
|
|
@@ -65,6 +64,12 @@ Or install directly via python as
|
|
|
65
64
|
python setup.py install
|
|
66
65
|
|
|
67
66
|
In order to use the k-medoids clustering, make sure that you have installed a MILP solver. As default [HiGHS](https://github.com/ERGO-Code/HiGHS) is used. Nevertheless, in case you have access to a license we recommend commercial solvers (e.g. Gurobi or CPLEX) since they have a better performance.
|
|
67
|
+
|
|
68
|
+
### Developer installation
|
|
69
|
+
|
|
70
|
+
In order to setup a virtual environment in Linux, correct the python name in the Makefile and call
|
|
71
|
+
|
|
72
|
+
make setup_venv
|
|
68
73
|
|
|
69
74
|
|
|
70
75
|
## Examples
|
|
@@ -161,5 +166,3 @@ This work is supported by the Helmholtz Association under the Joint Initiative [
|
|
|
161
166
|
<a href="https://www.helmholtz.de/en/"><img src="https://www.helmholtz.de/fileadmin/user_upload/05_aktuelles/Marke_Design/logos/HG_LOGO_S_ENG_RGB.jpg" alt="Helmholtz Logo" width="200px" style="float:right"></a>
|
|
162
167
|
|
|
163
168
|
|
|
164
|
-
|
|
165
|
-
|
|
@@ -3,13 +3,15 @@
|
|
|
3
3
|
.readthedocs.yml
|
|
4
4
|
LICENSE.txt
|
|
5
5
|
MANIFEST.in
|
|
6
|
+
Makefile
|
|
6
7
|
README.md
|
|
7
8
|
requirements.txt
|
|
8
9
|
requirements.yml
|
|
9
10
|
requirements_dev.txt
|
|
10
11
|
requirements_dev.yml
|
|
11
12
|
setup.py
|
|
12
|
-
.github/workflows/
|
|
13
|
+
.github/workflows/daily_tests.yml
|
|
14
|
+
.github/workflows/test_on_push_and_pull.yml
|
|
13
15
|
docs/Makefile
|
|
14
16
|
docs/make.bat
|
|
15
17
|
docs/source/conf.py
|
|
@@ -1,36 +0,0 @@
|
|
|
1
|
-
# This workflow will install Python dependencies and run tests and lint with a single version of Python
|
|
2
|
-
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
|
|
3
|
-
|
|
4
|
-
# Based on David Neuroth pylpg
|
|
5
|
-
|
|
6
|
-
name: pytest
|
|
7
|
-
|
|
8
|
-
on:
|
|
9
|
-
push:
|
|
10
|
-
pull_request:
|
|
11
|
-
branches: [ master ]
|
|
12
|
-
|
|
13
|
-
jobs:
|
|
14
|
-
build:
|
|
15
|
-
runs-on: ubuntu-latest
|
|
16
|
-
|
|
17
|
-
steps:
|
|
18
|
-
- uses: actions/checkout@v2
|
|
19
|
-
- name: Set up Python 3.10
|
|
20
|
-
uses: actions/setup-python@v2
|
|
21
|
-
with:
|
|
22
|
-
python-version: '3.10'
|
|
23
|
-
- name: Install dependencies
|
|
24
|
-
run: |
|
|
25
|
-
python -m pip install --upgrade pip
|
|
26
|
-
pip install pytest
|
|
27
|
-
pip install pytest-cov
|
|
28
|
-
pip install codecov
|
|
29
|
-
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
|
30
|
-
pip install --no-cache-dir -e .
|
|
31
|
-
|
|
32
|
-
- name: Test with pytest
|
|
33
|
-
working-directory: ./test/
|
|
34
|
-
run: |
|
|
35
|
-
pytest
|
|
36
|
-
codecov
|
tsam-2.3.1/requirements_dev.yml
DELETED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{tsam-2.3.1 → tsam-2.3.3}/examples/results/testperiods_predefClusterOrderAndClusterCenters.csv
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|