timesat-cli 1.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,216 @@
1
+ from __future__ import annotations
2
+ import math, os, datetime
3
+
4
+ def run(jsfile: str) -> None:
5
+
6
+ import numpy as np
7
+ import rasterio
8
+ import timesat # external dependency
9
+
10
+ from .config import load_config, build_param_array
11
+ from .readers import read_file_lists, open_image_data
12
+ from .fsutils import create_output_folders, memory_plan, close_all
13
+ from .writers import prepare_profiles, write_layers
14
+ from .dateutils import date_with_ignored_day, generate_output_timeseries_dates
15
+
16
+ VPP_NAMES = ["SOSD","SOSV","LSLOPE","EOSD","EOSV","RSLOPE","LENGTH",
17
+ "MINV","MAXD","MAXV","AMPL","TPROD","SPROD"]
18
+
19
+ def _build_output_filenames(st_folder: str, vpp_folder: str, p_outindex, yrstart: int, yrend: int, p_ignoreday: int):
20
+ outyfitfn = []
21
+ outyfitqafn = []
22
+ for i_tv in p_outindex:
23
+ yfitdate = date_with_ignored_day(yrstart, int(i_tv), p_ignoreday)
24
+ outyfitfn.append(os.path.join(st_folder, f"TIMESAT_{yfitdate.strftime('%Y%m%d')}.tif"))
25
+ outyfitqafn.append(os.path.join(st_folder, f"TIMESAT_{yfitdate.strftime('%Y%m%d')}_QA.tif"))
26
+
27
+ outvppfn = []
28
+ outvppqafn = []
29
+ outnsfn = []
30
+ for i_yr in range(yrstart, yrend + 1):
31
+ for i_seas in range(2):
32
+ for name in VPP_NAMES:
33
+ outvppfn.append(os.path.join(vpp_folder, f"TIMESAT_{name}_{i_yr}_season_{i_seas+1}.tif"))
34
+ outvppqafn.append(os.path.join(vpp_folder, f"TIMESAT_QA_{i_yr}_season_{i_seas+1}.tif"))
35
+ outnsfn.append(os.path.join(vpp_folder, f"TIMESAT_{i_yr}_numseason.tif"))
36
+ return outyfitfn, outyfitqafn, outvppfn, outvppqafn, outnsfn
37
+
38
+
39
+ print(jsfile)
40
+ cfg = load_config(jsfile)
41
+ s = cfg.settings
42
+
43
+ if s.outputfolder == '':
44
+ print('Nothing to do...')
45
+ return
46
+
47
+ # Precompute arrays once per block to pass into timesat
48
+ landuse_arr = build_param_array(s, 'landuse', 'uint8')
49
+ p_fitmethod_arr = build_param_array(s, 'p_fitmethod', 'uint8')
50
+ p_smooth_arr = build_param_array(s, 'p_smooth', 'double')
51
+ p_nenvi_arr = build_param_array(s, 'p_nenvi', 'uint8')
52
+ p_wfactnum_arr = build_param_array(s, 'p_wfactnum', 'double')
53
+ p_startmethod_arr = build_param_array(s, 'p_startmethod', 'uint8')
54
+ p_startcutoff_arr = build_param_array(s, 'p_startcutoff', 'double', shape=(2,), fortran_2d=True)
55
+ p_low_percentile_arr = build_param_array(s, 'p_low_percentile', 'double')
56
+ p_fillbase_arr = build_param_array(s, 'p_fillbase', 'uint8')
57
+ p_seasonmethod_arr = build_param_array(s, 'p_seasonmethod', 'uint8')
58
+ p_seapar_arr = build_param_array(s, 'p_seapar', 'double')
59
+
60
+
61
+ timevector, flist, qlist, yr, yrstart, yrend = read_file_lists(s.tv_list, s.image_file_list, s.quality_file_list)
62
+
63
+ z = len(flist)
64
+ print(f'num of images: {z}')
65
+ print('First image: ' + os.path.basename(flist[0]))
66
+ print('Last image: ' + os.path.basename(flist[-1]))
67
+ print(yrstart)
68
+
69
+ # -------load inputs----------------
70
+ use_s3 = getattr(s, "s3env", None)
71
+ if use_s3:
72
+ from .config_s3 import load_s3_config, build_rasterio_s3_opts, to_vsis3_paths
73
+ cfg_s3 = load_s3_config()
74
+ s3_opts = build_rasterio_s3_opts(cfg_s3)
75
+ flist = [to_vsis3_paths(s3_opts, cfg_s3["S3_BUCKET"], k) for k in flist]
76
+ qlist = [to_vsis3_paths(s3_opts, cfg_s3["S3_BUCKET"], k) for k in qlist] if qlist else []
77
+ else:
78
+ s3_opts = None
79
+
80
+ # batch_size = int(getattr(s, "read_batch_size", 32)) # recommended: 16–32 (S3), 64–128 (local SSD)
81
+
82
+ # ------load image info---------------
83
+ with rasterio.open(flist[0], "r") as temp:
84
+ img_profile = temp.profile
85
+
86
+ if sum(s.imwindow) == 0:
87
+ dx, dy = img_profile['width'], img_profile['height']
88
+ else:
89
+ dx, dy = int(s.imwindow[2]), int(s.imwindow[3])
90
+
91
+
92
+ # ------output-----------------
93
+ st_folder, vpp_folder = create_output_folders(s.outputfolder)
94
+
95
+ p_outindex, p_outindex_num = generate_output_timeseries_dates(s.p_st_timestep, yr, yrstart)
96
+
97
+ outyfitfn, outyfitqafn, outvppfn, outvppqafn, outnsfn = _build_output_filenames(st_folder, vpp_folder, p_outindex, yrstart, yrend, s.p_ignoreday)
98
+
99
+ img_profile_st, img_profile_vpp, img_profile_qa, img_profile_ns = prepare_profiles(img_profile, s.p_nodata, s.scale, s.offset)
100
+ # Open output datasets once and reuse them for all blocks
101
+ st_datasets = []
102
+ stqa_datasets = []
103
+ vpp_datasets = []
104
+ vppqa_datasets = []
105
+ ns_dataset = []
106
+
107
+ # VPP outputs
108
+ if s.outputvariables == 1:
109
+ for path in outvppfn:
110
+ ds = rasterio.open(path, "w", **img_profile_vpp)
111
+ vpp_datasets.append(ds)
112
+ for path in outvppqafn:
113
+ ds = rasterio.open(path, "w", **img_profile_qa)
114
+ vppqa_datasets.append(ds)
115
+ for path in outnsfn:
116
+ ds = rasterio.open(path, "w", **img_profile_ns)
117
+ ns_dataset.append(ds)
118
+
119
+ # ST (yfit) outputs
120
+ for path in outyfitfn:
121
+ ds = rasterio.open(path, "w", **img_profile_st)
122
+ st_datasets.append(ds)
123
+ for path in outyfitqafn:
124
+ ds = rasterio.open(path, "w", **img_profile_qa)
125
+ stqa_datasets.append(ds)
126
+
127
+
128
+ # compute blocks
129
+ y_slice_size, num_block = memory_plan(dx, dy, z, p_outindex_num, yr, s.max_memory_gb)
130
+ y_slice_end = dy % y_slice_size if (dy % y_slice_size) > 0 else y_slice_size
131
+ print('y_slice_size = ' + str(y_slice_size))
132
+
133
+ for iblock in range(num_block):
134
+ print(f'Processing block: {iblock + 1}/{num_block} starttime: {datetime.datetime.now()}')
135
+ x = dx
136
+ y = int(y_slice_size) if iblock != num_block - 1 else int(y_slice_end)
137
+ x_map = int(s.imwindow[0])
138
+ y_map = int(iblock * y_slice_size + s.imwindow[1])
139
+
140
+ # vi, qa, lc = open_image_data_batched(
141
+ # x_map, y_map, x, y,
142
+ # flist,
143
+ # qlist,
144
+ # (s.lc_file if s.lc_file else None),
145
+ # img_profile['dtype'],
146
+ # s.p_a,
147
+ # s.p_band_id,
148
+ # batch_size=batch_size,
149
+ # s3_opts=s3_opts,
150
+ # )
151
+ vi, qa, lc = open_image_data(
152
+ x_map, y_map, x, y,
153
+ flist,
154
+ qlist,
155
+ (s.lc_file if s.lc_file else None),
156
+ img_profile['dtype'],
157
+ s.p_a,
158
+ s.p_band_id,
159
+ )
160
+
161
+ print('--- start TIMESAT processing --- starttime: ' + str(datetime.datetime.now()))
162
+
163
+ if s.scale != 1 or s.offset != 0:
164
+ vi = vi * s.scale + s.offset
165
+
166
+ vpp, vppqa, nseason, yfit, yfitqa, seasonfit, tseq = timesat.tsfprocess(
167
+ yr, vi, qa, timevector, lc, s.p_nclasses, landuse_arr, p_outindex,
168
+ s.p_ignoreday, s.p_ylu, s.p_printflag, p_fitmethod_arr, p_smooth_arr,
169
+ s.p_nodata, s.p_davailwin, s.p_outlier,
170
+ p_nenvi_arr, p_wfactnum_arr, p_startmethod_arr, p_startcutoff_arr,
171
+ p_low_percentile_arr, p_fillbase_arr, s.p_hrvppformat,
172
+ p_seasonmethod_arr, p_seapar_arr, s.outputvariables)
173
+
174
+ print('--- start writing geotif --- starttime: ' + str(datetime.datetime.now()))
175
+ window = (x_map, y_map, x, y)
176
+
177
+ if s.outputvariables == 1:
178
+ vpp = np.moveaxis(vpp, -1, 0)
179
+ write_layers(vpp_datasets, vpp, window)
180
+
181
+ vppqa = np.moveaxis(vppqa, -1, 0)
182
+ write_layers(vppqa_datasets, vppqa, window)
183
+
184
+ nseason = np.moveaxis(nseason, -1, 0)
185
+ write_layers(ns_dataset, nseason, window)
186
+
187
+ # Move to (t, y, x)
188
+ yfit = np.moveaxis(yfit, -1, 0)
189
+
190
+ nodata_val = img_profile_st.get("nodata", s.p_nodata)
191
+ yfit = np.nan_to_num(yfit, nan=nodata_val, posinf=nodata_val, neginf=nodata_val)
192
+
193
+ if s.scale == 1 and s.offset == 0:
194
+ yfit = yfit.astype(img_profile['dtype'])
195
+ else:
196
+ yfit = yfit.astype('float32')
197
+ write_layers(st_datasets, yfit, window)
198
+
199
+ yfitqa = np.moveaxis(yfitqa, -1, 0)
200
+ write_layers(stqa_datasets, yfitqa, window)
201
+
202
+ print(f'Block: {iblock + 1}/{num_block} finishedtime: {datetime.datetime.now()}')
203
+
204
+ close_all(
205
+ st_datasets,
206
+ stqa_datasets,
207
+ )
208
+
209
+ if s.outputvariables == 1:
210
+ close_all(
211
+ vpp_datasets,
212
+ vppqa_datasets,
213
+ ns_dataset,
214
+ )
215
+
216
+
timesat_cli/qa.py ADDED
@@ -0,0 +1,25 @@
1
+ import numpy as np
2
+
3
+ __all__ = ["assign_qa_weight"]
4
+
5
+
6
+ def assign_qa_weight(p_a, qa: np.ndarray) -> np.ndarray:
7
+ """Map QA values to weights using rules in p_a."""
8
+ p_a = np.asarray(p_a)
9
+ if qa.size == 0:
10
+ return qa
11
+ qa_out = np.zeros_like(qa, dtype=float)
12
+ if p_a.size == 0:
13
+ return qa_out
14
+
15
+ if p_a.shape[1] == 2:
16
+ for qa_value, weight in p_a:
17
+ mask = (qa == qa_value)
18
+ qa_out[mask] = weight
19
+ elif p_a.shape[1] == 3:
20
+ for min_val, max_val, weight in p_a:
21
+ mask = (qa >= min_val) & (qa <= max_val)
22
+ qa_out[mask] = weight
23
+ else:
24
+ raise ValueError("p_a must have either 2 or 3 columns.")
25
+ return qa_out
timesat_cli/readers.py ADDED
@@ -0,0 +1,220 @@
1
+ from __future__ import annotations
2
+
3
+ import datetime
4
+ import os
5
+ import re
6
+
7
+ import numpy as np
8
+ import rasterio
9
+ from rasterio.windows import Window
10
+
11
+ from .qa import assign_qa_weight
12
+
13
+ __all__ = ["read_file_lists", "open_image_data","open_image_data_batched"]
14
+
15
+ def _parse_dates_from_name(name: str) -> tuple[int, int, int]:
16
+ date_regex1 = r"\d{4}-\d{2}-\d{2}"
17
+ date_regex2 = r"\d{4}\d{2}\d{2}"
18
+ try:
19
+ dates = re.findall(date_regex1, name)
20
+ position = name.find(dates[0])
21
+ y = int(name[position : position + 4])
22
+ m = int(name[position + 5 : position + 7])
23
+ d = int(name[position + 8 : position + 10])
24
+ return y, m, d
25
+ except Exception:
26
+ try:
27
+ dates = re.findall(date_regex2, name)
28
+ position = name.find(dates[0])
29
+ y = int(name[position : position + 4])
30
+ m = int(name[position + 4 : position + 6])
31
+ d = int(name[position + 6 : position + 8])
32
+ return y, m, d
33
+ except Exception as e:
34
+ raise ValueError(f"No date found in filename: {name}") from e
35
+
36
+
37
+ def _read_time_vector(tlist: str, filepaths: list[str]):
38
+ """Return (timevector, yr, yrstart, yrend) in YYYYDOY format."""
39
+ flist = [os.path.basename(p) for p in filepaths]
40
+ timevector = np.ndarray(len(flist), order="F", dtype="uint32")
41
+ if tlist == "":
42
+ for i, fname in enumerate(flist):
43
+ y, m, d = _parse_dates_from_name(fname)
44
+ doy = (datetime.date(y, m, d) - datetime.date(y, 1, 1)).days + 1
45
+ timevector[i] = y * 1000 + doy
46
+ else:
47
+ with open(tlist, "r") as f:
48
+ lines = f.read().splitlines()
49
+ for idx, val in enumerate(lines):
50
+ n = len(val)
51
+ if n == 8: # YYYYMMDD
52
+ dt = datetime.datetime.strptime(val, "%Y%m%d")
53
+ timevector[idx] = int(f"{dt.year}{dt.timetuple().tm_yday:03d}")
54
+ elif n == 7: # YYYYDOY
55
+ _ = datetime.datetime.strptime(val, "%Y%j")
56
+ timevector[idx] = int(val)
57
+ else:
58
+ raise ValueError(f"Unrecognized date format: {val}")
59
+
60
+ yrstart = int(np.floor(timevector.min() / 1000))
61
+ yrend = int(np.floor(timevector.max() / 1000))
62
+ yr = yrend - yrstart + 1
63
+ return timevector, yr, yrstart, yrend
64
+
65
+
66
+ def _unique_by_timevector(flist: list[str], qlist: list[str], timevector):
67
+ tv_unique, indices = np.unique(timevector, return_index=True)
68
+ flist2 = [flist[i] for i in indices]
69
+ qlist2 = [qlist[i] for i in indices] if qlist else []
70
+ return tv_unique, flist2, qlist2
71
+
72
+
73
+ def read_file_lists(
74
+ tlist: str, data_list: str, qa_list: str
75
+ ) -> tuple[np.ndarray, list[str], list[str], int, int, int]:
76
+ qlist: list[str] | str = ""
77
+ with open(data_list, "r") as f:
78
+ flist = f.read().splitlines()
79
+ if qa_list != "":
80
+ with open(qa_list, "r") as f:
81
+ qlist = f.read().splitlines()
82
+ if len(flist) != len(qlist):
83
+ raise ValueError("No. of Data and QA are not consistent")
84
+
85
+ timevector, yr, yrstart, yrend = _read_time_vector(tlist, flist)
86
+ timevector, flist, qlist = _unique_by_timevector(flist, qlist, timevector)
87
+ return (
88
+ timevector,
89
+ flist,
90
+ (qlist if isinstance(qlist, list) else []),
91
+ yr,
92
+ yrstart,
93
+ yrend,
94
+ )
95
+
96
+ def open_image_data(
97
+ x_map: int,
98
+ y_map: int,
99
+ x: int,
100
+ y: int,
101
+ data_files: list[str],
102
+ qa_files: list[str],
103
+ lc_file: str | None,
104
+ data_type: str,
105
+ p_a,
106
+ layer: int,
107
+ ):
108
+ """
109
+ Open each raster, read the window immediately, and close it.
110
+ Suitable for local paths or presigned HTTPS URLs.
111
+
112
+ NOTE: This does not use rasterio.Env (AWS options blocked in your env).
113
+ """
114
+ z = len(data_files)
115
+ if qa_files and len(qa_files) != z:
116
+ raise ValueError(f"qa_files length ({len(qa_files)}) must match data_files length ({z})")
117
+
118
+ win = Window(x_map, y_map, x, y)
119
+
120
+ # Allocate final outputs
121
+ vi = np.empty((y, x, z), order="F", dtype=data_type)
122
+ qa = np.empty((y, x, z), order="F", dtype=data_type)
123
+ lc = np.empty((y, x), order="F", dtype=np.uint8)
124
+
125
+ # 1) VI: open -> read -> close (per file)
126
+ for i, path in enumerate(data_files):
127
+ with rasterio.open(path, "r") as ds:
128
+ # Read returns (y, x) when a single band is selected
129
+ vi[:, :, i] = ds.read(layer, window=win)
130
+
131
+ # 2) QA: open -> read -> close (per file), or fill with ones
132
+ if not qa_files:
133
+ qa.fill(1)
134
+ else:
135
+ for i, path in enumerate(qa_files):
136
+ with rasterio.open(path, "r") as ds:
137
+ # QA is commonly band 1; change if needed
138
+ qa[:, :, i] = ds.read(1, window=win)
139
+ print('data read')
140
+ qa = assign_qa_weight(p_a, qa)
141
+
142
+ # 3) LC: open -> read -> close (once)
143
+ if not lc_file:
144
+ lc.fill(1)
145
+ else:
146
+ with rasterio.open(lc_file, "r") as ds:
147
+ lc[:, :] = ds.read(1, window=win)
148
+ if lc.dtype != np.uint8:
149
+ lc[:] = lc.astype(np.uint8, copy=False)
150
+
151
+ return vi, qa, lc
152
+
153
+
154
+ def open_image_data_batched(
155
+ x_map: int,
156
+ y_map: int,
157
+ x: int,
158
+ y: int,
159
+ data_files: list[str],
160
+ qa_files: list[str],
161
+ lc_file: str | None,
162
+ data_type: str,
163
+ p_a,
164
+ layer: int,
165
+ batch_size: int = 32,
166
+ s3_opts: dict | None = None, # kept for API compatibility, but NOT used
167
+ ):
168
+ """
169
+ Read VI, QA, and LC blocks by opening datasets in small batches.
170
+
171
+ IMPORTANT:
172
+ - Do NOT use rasterio.Env(AWS_...) in this environment (blocked).
173
+ - For S3/S3-compatible, pass presigned HTTPS URLs in data_files/qa_files/lc_file.
174
+ """
175
+
176
+ z = len(data_files)
177
+ if qa_files and len(qa_files) != z:
178
+ raise ValueError(f"qa_files length ({len(qa_files)}) must match data_files length ({z})")
179
+
180
+ vi = np.empty((y, x, z), order="F", dtype=data_type)
181
+ qa = np.empty((y, x, z), order="F", dtype=data_type)
182
+ lc = np.empty((y, x), order="F", dtype=np.uint8)
183
+
184
+ win = Window(x_map, y_map, x, y)
185
+ def _read_stack(paths: list[str], out_arr: np.ndarray, band: int):
186
+ for j0 in range(0, z, batch_size):
187
+ j1 = min(z, j0 + batch_size)
188
+ dss = [rasterio.open(p, "r") for p in paths[j0:j1]]
189
+ try:
190
+ for k, ds in enumerate(dss):
191
+ ds.read(band, window=win, out=out_arr[:, :, j0 + k])
192
+ finally:
193
+ for ds in dss:
194
+ try:
195
+ ds.close()
196
+ except Exception:
197
+ pass
198
+
199
+ # 1) VI
200
+ _read_stack(data_files, vi, band=layer)
201
+
202
+ # 2) QA
203
+ if not qa_files:
204
+ qa.fill(1)
205
+ else:
206
+ # QA is usually band 1; change if your QA files differ
207
+ _read_stack(qa_files, qa, band=1)
208
+ qa = assign_qa_weight(p_a, qa)
209
+
210
+ # 3) LC
211
+ if not lc_file:
212
+ lc.fill(1)
213
+ else:
214
+ with rasterio.open(lc_file, "r") as ds:
215
+ ds.read(1, window=win, out=lc)
216
+ if lc.dtype != np.uint8:
217
+ lc[:] = lc.astype(np.uint8, copy=False)
218
+
219
+ return vi, qa, lc
220
+
timesat_cli/writers.py ADDED
@@ -0,0 +1,47 @@
1
+ from __future__ import annotations
2
+
3
+ import copy
4
+
5
+ import numpy as np
6
+ import rasterio
7
+ from rasterio.windows import Window
8
+
9
+ __all__ = ["prepare_profiles", "write_layers"]
10
+
11
+
12
+ def prepare_profiles(img_profile, p_nodata: float, scale: float, offset: float):
13
+ img_profile_st = copy.deepcopy(img_profile)
14
+ img_profile_st.update(compress="lzw")
15
+ if scale != 1 or offset != 0:
16
+ img_profile_st.update(dtype=rasterio.float32)
17
+
18
+ img_profile_vpp = copy.deepcopy(img_profile)
19
+ img_profile_vpp.update(nodata=p_nodata, dtype=rasterio.float32, compress="lzw")
20
+
21
+ img_profile_qa = copy.deepcopy(img_profile)
22
+ img_profile_qa.update(nodata=0, dtype=rasterio.uint8, compress="lzw")
23
+
24
+ img_profile_ns = copy.deepcopy(img_profile)
25
+ img_profile_ns.update(nodata=255, dtype=rasterio.uint8, compress="lzw")
26
+
27
+ return img_profile_st, img_profile_vpp, img_profile_qa, img_profile_ns
28
+
29
+
30
+ def write_layers(
31
+ datasets: list[rasterio.io.DatasetWriter],
32
+ arrays: np.ndarray,
33
+ window: tuple[int, int, int, int],
34
+ ) -> None:
35
+ """
36
+ Write a block (window) for each array into the corresponding open dataset.
37
+
38
+ datasets : list of open rasterio DatasetWriter objects
39
+ arrays : np.ndarray with shape (n_layers, y, x) or iterable of 2D arrays
40
+ window : (x_map, y_map, x, y)
41
+ """
42
+ x_map, y_map, x, y = window
43
+ win = Window(x_map, y_map, x, y)
44
+
45
+ for i, arr in enumerate(arrays, 1):
46
+ dst = datasets[i - 1]
47
+ dst.write(arr, window=win, indexes=1)
@@ -0,0 +1,199 @@
1
+ Metadata-Version: 2.4
2
+ Name: timesat-cli
3
+ Version: 1.4.3
4
+ Summary: Python-based command line interface for TIMESAT
5
+ Author: Zhanzhang Cai
6
+ License: GPL-3.0-only
7
+ License-File: LICENSE
8
+ Classifier: Development Status :: 5 - Production/Stable
9
+ Classifier: Environment :: Console
10
+ Classifier: Intended Audience :: Science/Research
11
+ Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
12
+ Classifier: Operating System :: OS Independent
13
+ Classifier: Programming Language :: Python :: 3
14
+ Classifier: Topic :: Scientific/Engineering :: GIS
15
+ Requires-Python: >=3.10
16
+ Requires-Dist: boto3
17
+ Requires-Dist: dotenv
18
+ Requires-Dist: numpy
19
+ Requires-Dist: pandas
20
+ Requires-Dist: rasterio
21
+ Requires-Dist: timesat>=4.1.19
22
+ Description-Content-Type: text/markdown
23
+
24
+ # TIMESAT CLI
25
+
26
+ `TIMESAT CLI` is a command line interface and workflow manager for the [TIMESAT](https://pypi.org/project/timesat/) package.
27
+ It provides a convenient way to configure and execute TIMESAT processing pipelines directly from the command line or automated scripts.
28
+
29
+ ---
30
+
31
+ ## Requirements
32
+
33
+ Before you begin, make sure you have:
34
+
35
+ - **Miniconda** or **Anaconda** (for environment management)
36
+ Download: [https://docs.conda.io/en/latest/miniconda.html](https://docs.conda.io/en/latest/miniconda.html)
37
+ - **Python 3.10+**
38
+
39
+ ---
40
+
41
+ ## Installation
42
+
43
+ `timesat-cli` is available on **PyPI** and can be installed using **pip** or **uv**.
44
+ Although it is not published on Conda, you can safely install it *inside* a Conda environment.
45
+
46
+ ### Option 1 — Install inside a Conda environment
47
+
48
+ ```bash
49
+ conda create -n timesat-cli python=3.12
50
+ conda activate timesat-cli
51
+ pip install timesat-cli
52
+ ```
53
+
54
+ > This approach uses Conda only for environment isolation.
55
+ > The installation itself is handled by pip, which will automatically install `timesat` and all required dependencies.
56
+
57
+ ---
58
+
59
+ ### Option 2 — Install via uv (recommended for pure Python environments)
60
+
61
+ [`uv`](https://github.com/astral-sh/uv) is a modern, high-performance alternative to pip and venv.
62
+
63
+ 1. Install `uv`:
64
+
65
+ ```bash
66
+ pip install uv
67
+ # or
68
+ curl -LsSf https://astral.sh/uv/install.sh | sh
69
+ ```
70
+
71
+ 2. Create a virtual environment and install the package:
72
+
73
+ ```bash
74
+ uv venv .venv
75
+ source .venv/bin/activate
76
+ uv pip install timesat-cli
77
+ ```
78
+
79
+ > `uv` provides faster dependency resolution and caching.
80
+ > It will automatically install `timesat` and related dependencies.
81
+
82
+ ---
83
+
84
+ ### Option 3 — Direct installation with pip
85
+
86
+ If you already have Python 3.10+ installed:
87
+
88
+ ```bash
89
+ pip install timesat-cli
90
+ ```
91
+
92
+ ---
93
+
94
+
95
+ ## Running the Application
96
+
97
+ After installation, start the GUI with:
98
+
99
+ ```bash
100
+ timesat-cli path/to/settings.json
101
+ ```
102
+
103
+ or equivalently:
104
+
105
+ ```bash
106
+ python -m timesat_cli path/to/settings.json
107
+ ```
108
+
109
+ ---
110
+
111
+ ## Advanced Usage
112
+
113
+ If you wish to customize or extend the workflow, you can also run or modify the main script directly:
114
+
115
+ ```bash
116
+ python timesat_run.py
117
+ ```
118
+
119
+ The file 'timesat_run.py' contains the full example pipeline that invokes core modules from the 'timesat_cli' package, including configuration loading, file management, TIMESAT processing, and output writing.
120
+
121
+ ---
122
+
123
+ ## HRVPP Notes — QFLAG2 weights
124
+ If you work with HRVPP quality flags (`QFLAG2`), the following weights `w` are commonly applied:
125
+
126
+ | QFLAG2 value | Weight `w` |
127
+ |---:|---:|
128
+ | 1 | 1.0 |
129
+ | 4097 | 1.0 |
130
+ | 8193 | 1.0 |
131
+ | 12289 | 1.0 |
132
+ | 1025 | 0.5 |
133
+ | 9217 | 0.5 |
134
+ | 2049 | 0.5 |
135
+ | 6145 | 0.5 |
136
+ | 3073 | 0.5 |
137
+
138
+ Example (settings.json):
139
+
140
+ ```python
141
+ "p_a": {
142
+ "value": [
143
+ [1, 1.0],
144
+ [4097, 1.0],
145
+ [8193, 1.0],
146
+ [12289, 1.0],
147
+ [1025, 0.5],
148
+ [9217, 0.5],
149
+ [2049, 0.5],
150
+ [6145, 0.5],
151
+ [3073, 0.5]
152
+ ],
153
+ "description": "QA weighting rules. Leave empty [] to keep original QA values. Use [qa_value, weight] for exact matches or [min, max, weight] for ranges."
154
+ }
155
+ ```
156
+
157
+ ---
158
+
159
+ ## License
160
+
161
+ **TIMESAT-CLI** is released under the **GNU General Public License (GPL)**.
162
+ You are free to use, modify, and distribute this software under the terms of the GPL.
163
+
164
+ The GPL license applies **only to the TIMESAT-CLI source code and assets** provided in this repository.
165
+
166
+ ### 📦 Dependency Licenses
167
+
168
+ - `timesat` may install additional open-source dependencies (e.g., Flask, pandas, NumPy).
169
+ - Each dependency retains its own license (MIT, BSD, Apache, etc.).
170
+ - Before redistributing or bundling this software, review the license terms of each dependency carefully.
171
+
172
+ ### ⚖️ Summary
173
+
174
+ | Component | License Type | Notes |
175
+ |------------------|--------------|-------|
176
+ | TIMESAT-CLI | GPL v3 | Open source, modification and redistribution permitted under GPL. |
177
+ | TIMESAT | Proprietary | All rights reserved. Redistribution and modification prohibited without written consent. |
178
+ | Other Dependencies | Various (MIT/BSD/Apache) | Check individual package licenses before redistribution. |
179
+
180
+ For detailed license information, refer to the license files distributed with each installed package.
181
+
182
+ ---
183
+
184
+ ## Citation
185
+
186
+ If you use **TIMESAT**, **TIMESAT-CLI** or **TIMESAT-GUI** in your research, please cite the corresponding release on Zenodo:
187
+
188
+ > Cai, Z., Eklundh, L., & Jönsson, P. (2025). *TIMESAT4: is a software package for analysing time-series of satellite sensor data* (Version 4.1.x) [Computer software]. Zenodo.
189
+ > [https://doi.org/10.5281/zenodo.17369757](https://doi.org/10.5281/zenodo.17369757)
190
+
191
+ ---
192
+
193
+ ## Acknowledgments
194
+
195
+ - [TIMESAT](https://www.nateko.lu.se/TIMESAT) — Original analysis framework for satellite time-series data.
196
+ - This project acknowledges the Swedish National Space Agency (SNSA), the European Environment Agency (EEA), and the European Space Agency (ESA) for their support and for providing access to satellite data and related resources that made this software possible.
197
+
198
+ ---
199
+