timewise 1.0.0a1__py3-none-any.whl → 1.0.0a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,7 @@
1
1
  from pathlib import Path
2
2
  import logging
3
3
  from typing import Iterable, List, cast
4
+ from importlib.util import find_spec
4
5
 
5
6
  import numpy as np
6
7
  from numpy import typing as npt
@@ -8,12 +9,19 @@ import pandas as pd
8
9
  from pymongo import MongoClient, ASCENDING
9
10
  from pymongo.collection import Collection
10
11
  from pymongo.database import Database
11
- from ampel.cli.JobCommand import JobCommand
12
- from ampel.types import DataPointId, StockId
12
+
13
+ if find_spec("ampel.core"):
14
+ AMPEL_EXISTS = True
15
+ from ampel.cli.JobCommand import JobCommand
16
+ else:
17
+ AMPEL_EXISTS = False
13
18
 
14
19
 
15
20
  logger = logging.getLogger(__name__)
16
21
 
22
+ # copy from ampel.types
23
+ StockId = int | bytes | str
24
+
17
25
 
18
26
  class AmpelInterface:
19
27
  def __init__(
@@ -71,6 +79,10 @@ class AmpelInterface:
71
79
  return self.make_ampel_job_file(cfg_path)
72
80
 
73
81
  def run(self, timewise_cfg_path: Path, ampel_config_path: Path):
82
+ if not AMPEL_EXISTS:
83
+ raise ModuleNotFoundError(
84
+ "You are trying to run ampel but it is not installed!"
85
+ )
74
86
  ampel_job_path = self.prepare(timewise_cfg_path)
75
87
  cmd = JobCommand()
76
88
  parser = cmd.get_parser()
@@ -123,7 +135,7 @@ class AmpelInterface:
123
135
  index.append(ic["id"])
124
136
  return pd.DataFrame(records, index=index)
125
137
 
126
- def extract_selected_datapoint_ids(self, stock_id: StockId) -> List[DataPointId]:
138
+ def extract_selected_datapoint_ids(self, stock_id: StockId) -> List[int]:
127
139
  d = self.t1.find_one({"stock": stock_id})
128
140
  if d is None:
129
141
  return []
@@ -26,6 +26,8 @@ def calculate_epochs(
26
26
  visit_mask: npt.NDArray[np.int64],
27
27
  counts: npt.NDArray[np.int64],
28
28
  remove_outliers: bool,
29
+ outlier_threshold: float,
30
+ outlier_quantile: float,
29
31
  outlier_mask: npt.NDArray[np.bool_] | None = None,
30
32
  ) -> tuple[
31
33
  npt.NDArray[np.float64],
@@ -70,7 +72,7 @@ def calculate_epochs(
70
72
  # --------------------- remove outliers in the bins ---------------------- #
71
73
 
72
74
  # if we do not want to clean outliers just set the threshold to infinity
73
- outlier_thresh = np.inf if not remove_outliers else 20
75
+ _outlier_threshold = np.inf if not remove_outliers else outlier_threshold
74
76
 
75
77
  # set up empty masks
76
78
  outlier_mask = cast(
@@ -156,15 +158,16 @@ def calculate_epochs(
156
158
  # take the maximum value of the measured single exposure errors and the standard deviation
157
159
  u = np.maximum(std, e_meas)
158
160
 
159
- # calculate 90% confidence interval
160
- u70 = np.zeros_like(counts, dtype=float)
161
- u70[one_points_mask] = 1e-10
161
+ # Estimate the spread of the flux.
162
+ # To be robust against outliers, do that with quantiles instead of std
163
+ qs = np.zeros_like(counts, dtype=float)
164
+ qs[one_points_mask] = 1e-10
162
165
  visits_at_least_two_point = np.unique(visit_mask[~one_points_mask[visit_mask]])
163
- u70[visits_at_least_two_point] = np.array(
166
+ qs[visits_at_least_two_point] = np.array(
164
167
  [
165
168
  np.quantile(
166
169
  abs(f[(visit_mask == i) & use_mask] - median[i]),
167
- 0.7,
170
+ outlier_quantile,
168
171
  method="interpolated_inverted_cdf",
169
172
  )
170
173
  for i in visits_at_least_two_point
@@ -173,7 +176,7 @@ def calculate_epochs(
173
176
 
174
177
  # --------------------- remove outliers in the bins ---------------------- #
175
178
  remaining_outliers = (
176
- abs(median[visit_mask] - f) > outlier_thresh * u70[visit_mask]
179
+ abs(median[visit_mask] - f) > _outlier_threshold * qs[visit_mask]
177
180
  ) & ~outlier_mask
178
181
  outlier_mask |= remaining_outliers
179
182
  n_remaining_outlier = sum(remaining_outliers) if remove_outliers else 0
@@ -187,7 +190,12 @@ def calculate_epochs(
187
190
  return median, u, bin_ulim_bool, outlier_mask, use_mask, n_points
188
191
 
189
192
 
190
- def stack_visits(lightcurve: pd.DataFrame, clean_outliers: bool = True):
193
+ def stack_visits(
194
+ lightcurve: pd.DataFrame,
195
+ outlier_threshold: float,
196
+ outlier_quantile: float,
197
+ clean_outliers: bool = True,
198
+ ):
191
199
  """
192
200
  Combine the data by visits of the satellite of one region in the sky.
193
201
  The visits typically consist of some tens of observations. The individual visits are separated by about
@@ -204,7 +212,7 @@ def stack_visits(lightcurve: pd.DataFrame, clean_outliers: bool = True):
204
212
  """
205
213
 
206
214
  # ------------------------- create visit mask -------------------------- #
207
- visit_map = get_visit_map(lightcurve)
215
+ visit_map = get_visit_map(lightcurve.mjd)
208
216
  counts = np.bincount(visit_map)
209
217
 
210
218
  stacked_data: Dict[str, Any] = dict()
@@ -235,6 +243,8 @@ def stack_visits(lightcurve: pd.DataFrame, clean_outliers: bool = True):
235
243
  counts,
236
244
  remove_outliers=remove_outliers,
237
245
  outlier_mask=outlier_mask,
246
+ outlier_quantile=outlier_quantile,
247
+ outlier_threshold=outlier_threshold,
238
248
  )
239
249
  n_outliers = np.sum(outlier_mask)
240
250
 
@@ -300,6 +310,8 @@ def stack_visits(lightcurve: pd.DataFrame, clean_outliers: bool = True):
300
310
  counts,
301
311
  remove_outliers=False,
302
312
  outlier_mask=outlier_masks[keys.FLUX_EXT],
313
+ outlier_threshold=outlier_threshold,
314
+ outlier_quantile=outlier_quantile,
303
315
  )
304
316
  )
305
317
  stacked_data[f"{b}{keys.MEAN}{keys.FLUX_DENSITY_EXT}"] = mean_fd
timewise/query/base.py CHANGED
@@ -1,6 +1,6 @@
1
1
  import abc
2
- from typing import ClassVar, List
3
- from pydantic import BaseModel
2
+ from typing import ClassVar, List, Self
3
+ from pydantic import BaseModel, model_validator
4
4
  from hashlib import sha256
5
5
 
6
6
  from ..tables import TableType
@@ -22,6 +22,13 @@ class Query(abc.ABC, BaseModel):
22
22
  columns: List[str]
23
23
  table: TableType
24
24
 
25
+ @model_validator(mode="after")
26
+ def check_columns(self) -> Self:
27
+ for column in self.columns:
28
+ if column not in self.table.columns_dtypes:
29
+ raise KeyError(f"{column} not found in table {self.table.name}")
30
+ return self
31
+
25
32
  @property
26
33
  @abc.abstractmethod
27
34
  def input_columns(self) -> dict[str, str]: ...
timewise/util/visits.py CHANGED
@@ -3,31 +3,31 @@ import pandas as pd
3
3
  import numpy.typing as npt
4
4
 
5
5
 
6
- def get_visit_map(lightcurve: pd.DataFrame) -> npt.NDArray[np.int64]:
6
+ def get_visit_map(
7
+ mjd: npt.NDArray[np.float64] | pd.Series,
8
+ ) -> npt.NDArray[np.int64]:
7
9
  """
8
10
  Create a map datapoint to visit
9
11
 
10
- :param lightcurve: the raw lightcurve
11
- :type lightcurve: pd.DataFrame
12
+ :param mjd: the MJDs of the observations
13
+ :type mjd: npt.NDArray[float]
12
14
  :returns: visit map
13
15
  :rtype: npt.ArrayLike
14
16
  """
15
17
  # ------------------------- find epoch intervals -------------------------- #
16
- sorted_mjds = np.sort(lightcurve.mjd)
18
+ sorted_mjds = np.sort(mjd)
17
19
  epoch_bounds_mask = (sorted_mjds[1:] - sorted_mjds[:-1]) > 100
18
20
  epoch_bins = np.array(
19
- [
20
- lightcurve.mjd.min() * 0.99
21
- ] # this makes sure that the first datapoint gets selected
21
+ [mjd.min() * 0.99] # this makes sure that the first datapoint gets selected
22
22
  + list(
23
23
  ((sorted_mjds[1:] + sorted_mjds[:-1]) / 2)[epoch_bounds_mask]
24
24
  ) # finding the middle between
25
25
  +
26
26
  # two visits
27
27
  [
28
- lightcurve.mjd.max() * 1.01
28
+ mjd.max() * 1.01
29
29
  ] # this just makes sure that the last datapoint gets selected as well
30
30
  )
31
31
 
32
- visit_mask = np.digitize(lightcurve.mjd, epoch_bins) - 1
32
+ visit_mask = np.digitize(mjd, epoch_bins) - 1
33
33
  return visit_mask
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: timewise
3
- Version: 1.0.0a1
3
+ Version: 1.0.0a5
4
4
  Summary: Download WISE infrared data for many objects and process them with AMPEL
5
5
  License: MIT
6
6
  License-File: LICENSE
@@ -10,12 +10,13 @@ Requires-Python: >=3.11,<3.12
10
10
  Classifier: License :: OSI Approved :: MIT License
11
11
  Classifier: Programming Language :: Python :: 3
12
12
  Classifier: Programming Language :: Python :: 3.11
13
+ Provides-Extra: ampel
13
14
  Provides-Extra: dev
14
15
  Provides-Extra: docs
15
- Requires-Dist: ampel-alerts (==0.10.3a5)
16
- Requires-Dist: ampel-core (>=0.10.4.post0,<0.11.0)
17
- Requires-Dist: ampel-photometry (>=0.10.1,<0.11.0)
18
- Requires-Dist: ampel-plot (>=0.9.1,<0.10.0)
16
+ Requires-Dist: ampel-alerts (==0.10.3a5) ; extra == "ampel"
17
+ Requires-Dist: ampel-core (>=0.10.4.post0,<0.11.0) ; extra == "ampel"
18
+ Requires-Dist: ampel-photometry (>=0.10.1,<0.11.0) ; extra == "ampel"
19
+ Requires-Dist: ampel-plot (>=0.9.1,<0.10.0) ; extra == "ampel"
19
20
  Requires-Dist: astropy (>=5.1,<6.0.0)
20
21
  Requires-Dist: autodoc_pydantic[erdantic] (>=2.2.0,<3.0.0) ; extra == "docs"
21
22
  Requires-Dist: backoff (>=2.1.2,<3.0.0)
@@ -36,7 +37,6 @@ Requires-Dist: ruff (>=0.13.0,<0.14.0) ; extra == "dev"
36
37
  Requires-Dist: scikit-image (>=0.19.3,<0.22.0)
37
38
  Requires-Dist: scikit-learn (>=1.3.0,<2.0.0)
38
39
  Requires-Dist: scipy-stubs (>=1.16.2.0,<2.0.0.0) ; extra == "dev"
39
- Requires-Dist: seaborn (>=0.11.2,<0.14.0)
40
40
  Requires-Dist: sphinx-rtd-theme (>=1.3.0,<2.0.0) ; extra == "docs"
41
41
  Requires-Dist: tqdm (>=4.64.0,<5.0.0)
42
42
  Requires-Dist: typer (>=0.19.2,<0.20.0)
@@ -60,23 +60,33 @@ Description-Content-Type: text/markdown
60
60
  This package downloads WISE data for positions on the sky and stacks single-exposure photometry per visit
61
61
 
62
62
  ## Prerequisites
63
+ Python version 3.11.
63
64
 
64
- `timewise` makes use of [AMPEL](https://ampelproject.github.io/ampelastro/) and needs a running [MongoDB](https://www.mongodb.com/).
65
+ If you want to not only download individual exposure photometry but also stack detections per visit (see below),
66
+ you must have access to a running [MongoDB](https://www.mongodb.com/)*.
67
+
68
+ <sub>* On MacOS have alook at the custom `brew` tap
69
+ [here](https://stackoverflow.com/questions/57856809/installing-mongodb-with-homebrew)
70
+ to get the MongoDB community edition. </sub>
65
71
 
66
72
  ## Installation
67
- The package can be installed via `pip`:
73
+
74
+ ### If you use timewise only for downloading
75
+ The package can be installed via `pip` (but make sure to install the v1 pre-release):
68
76
  ```bash
69
- pip install timewise
77
+ pip install --pre timewise==1.0.0a5
78
+ ```
79
+ ### If you use timewise also for stacking individual exposures
80
+ You must install with the `ampel` extra:
81
+ ```bash
82
+ pip install --pre timewise[ampel]==1.0.0a5
70
83
  ```
71
-
72
84
  To tell AMPEL which modules, aka units, to use, build the corresponding configuration file:
73
85
  ```bash
74
86
  ampel config build -distributions ampel timewise -stop-on-errors 0 -out <path-to-ampel-config-file>
75
87
  ```
76
88
 
77
- ## Usage
78
-
79
- ### Command line interface
89
+ ## Command line interface
80
90
 
81
91
  ```
82
92
  Usage: timewise [OPTIONS] COMMAND [ARGS]...
@@ -161,40 +171,50 @@ ampel:
161
171
  This configuration file will be the input to all subcommands. Downloading and stacking can be run together or separate.
162
172
 
163
173
 
164
- #### All-in-one:
165
- Run download, stacking, and export:
174
+ ### To only download the data:
166
175
  ```bash
167
- timewise run-chain <path-to-config-file> <path-to-ampel-config-file> <output-directory>
176
+ timewise download <path-to-config-file>
168
177
  ```
178
+ The photometry can be found in FITS files in the working directory specified in the configuration file\
179
+ along with metadata JSON files. These tell `timewise` which quries have already completed (per chunk) so the
180
+ download process can be interrupted and re-started at a later time.
181
+
182
+ ### Stack individual exposure by visits
183
+ As mentioned above, this needs installation with the ampel extra.
184
+
169
185
 
170
- #### Separate download and processing:
171
- To only download the data:
186
+ To **execute the stacking** after the download:
172
187
  ```bash
173
- timewise download <path-to-config-file>
188
+ timewise process <path-to-config-file> <path-to-ampel-config-file>
174
189
  ```
175
190
 
176
- To execute the stacking:
191
+ Make some **diagnostic plots** to check the datapoint selection and binning:
177
192
  ```bash
178
- timewise process <path-to-config-file> <path-to-ampel-config-file>
193
+ timewise plot <path-to-config-file> <indices-to-plot> <output-directory>
179
194
  ```
180
195
 
181
- #### Run AMPEL manually
182
- Prepare an AMPEL job file for stacking the single-exposure data:
196
+ As a shortcut, you can also run **download, stacking, and export in one command**:
183
197
  ```bash
184
- timewise prepare-ampel <path-to-config-file>
198
+ timewise run-chain <path-to-config-file> <path-to-ampel-config-file> <output-directory>
185
199
  ```
186
- The result will contain the path to the prepared AMPEL job file that can be run with
200
+
201
+ For more configuration options of the stacking, you can **run AMPEL manually**.
202
+
203
+ 1. Prepare an AMPEL job file for stacking the single-exposure data:
187
204
  ```bash
188
- ampel job -config <path-to-ampel-config-file> -schema <path-to-ampel-job-file>
205
+ timewise prepare-ampel <path-to-config-file>
189
206
  ```
207
+ The result will contain the path to the prepared AMPEL job file.
190
208
 
191
- #### Make some diagnostic plots
192
- To check the datapoint selection and binning, take a quick look at the data:
209
+ 2. Run the AMPEL job
193
210
  ```bash
194
- timewise plot <path-to-config-file> <indices-to-plot> <output-directory>
211
+ ampel job -config <path-to-ampel-config-file> -schema <path-to-ampel-job-file>
195
212
  ```
196
213
 
197
214
 
215
+
216
+
217
+
198
218
  ## Citation
199
219
  If you use `timewise` please make sure to cite [Necker et al. A&A 695, A228 (2025)](https://www.aanda.org/articles/aa/abs/2025/03/aa51340-24/aa51340-24.html).
200
220
  Additionally, you might want to include a reference to the specific version you are using: [![DOI](https://zenodo.org/badge/449677569.svg)](https://zenodo.org/badge/latestdoi/449677569)
@@ -1,27 +1,38 @@
1
- timewise/__init__.py,sha256=pHLUWHD0-i3VhQGA7h59p8IgmMU_fyxItpb1wjckTmY,24
1
+ ampel/timewise/alert/TimewiseAlertSupplier.py,sha256=RmAWJVOfObaI1x0gt9agRt45WrVj1nhQchaMQ9QgUIA,4127
2
+ ampel/timewise/alert/load/TimewiseFileLoader.py,sha256=ChjOpQpyGTA-H_fjnvQGwa37IFfvfJanPmnVKORVElo,4271
3
+ ampel/timewise/ingest/TiCompilerOptions.py,sha256=xzGOOWAFoFsazocXLsTqtaHIqt_WUXFaacLUoBVKlbo,758
4
+ ampel/timewise/ingest/TiDataPointShaper.py,sha256=KbAg-J0DkMaJA9K67xMY5Pe0SIP0LNKsBNAMnTYV9zA,3180
5
+ ampel/timewise/ingest/TiMongoMuxer.py,sha256=YDiqaYiJ1TvyHUMAlNWHBHCpw0tH4Nvf8eyHUxMbzS8,6475
6
+ ampel/timewise/ingest/tags.py,sha256=ggn0Y6QUnjIwMzOlgKmPX1JPhmnvc21nUrORFhaR0uI,564
7
+ ampel/timewise/t1/T1HDBSCAN.py,sha256=vi1PWF0HFAedVJA4rVoR2YOXQ4jcEgEoUICtmg3rZjw,9301
8
+ ampel/timewise/t1/TimewiseFilter.py,sha256=ZSaXiSDKXCusknLJRMhe2Awj_YfkM74JVXeRbiZonmA,1564
9
+ ampel/timewise/t2/T2StackVisits.py,sha256=x7YcVLz_5fOPAfzso4fFWy4Pwn5txPfRT_Hx49PdbTo,1847
10
+ ampel/timewise/util/AuxDiagnosticPlotter.py,sha256=MZP4LWpFgBUPWPxlx8EEFeRc8rhI7FDO_LlbhKtkwrw,1641
11
+ ampel/timewise/util/pdutil.py,sha256=CJqeYqL2I6A5WnF5S342bwM4omJY-1XwT1XWY2uk3bc,1524
12
+ timewise/__init__.py,sha256=8aDB3NLZbXhuSHcb6tn7CHSOTJ5XB8CCwEQpahSJtxw,24
2
13
  timewise/backend/__init__.py,sha256=w79nWfCw8n9g98CkHWJELmb4j9xblWC4DGZOV3_XhH4,134
3
14
  timewise/backend/base.py,sha256=dHxRzu2q3uQ0wdGmDxnn-p68Tp19qChue7HMEu56wNA,1080
4
15
  timewise/backend/filesystem.py,sha256=GQ4Hrb6_7Q7fKOn6QUl8bqAihAmyeZoTRxElNIwPQ1Y,2465
5
16
  timewise/chunking.py,sha256=q7njvTSD84gdcvIk54SC-Ob863MsR79RPec8HS-bm4U,1668
6
- timewise/cli.py,sha256=l6j-M2-x1LeDEAEnuDv9tadTNmVoxxMNgfyOaS-0oAw,3641
17
+ timewise/cli.py,sha256=W8SXlUQo7a5aS4_kW40KTs2UkTLQS12JuhgwkRb23DY,4172
7
18
  timewise/config.py,sha256=ZTSokRZMZDqBqVFV9DxvO-47yE9E9xWF48Rcjb0QG10,1044
8
19
  timewise/io/__init__.py,sha256=S7vb0glKJnw6ztOlrD-0Wma2bQZ2RwpmXDLFJLKBMVo,35
9
20
  timewise/io/config.py,sha256=aizLxt9l4aeWQNsvcemtQdr_fW1vLmpRSofcgA3Bgvk,2239
10
- timewise/io/download.py,sha256=rKjeW7nh7xUtHh8llvsg_qmkx71Wzn2LggQnyHwqJOI,10719
21
+ timewise/io/download.py,sha256=IMNWur1FWPKJ7SNMTjevZgWqPP_eYLzXr4Pjp3XOeXI,10589
11
22
  timewise/io/stable_tap.py,sha256=jukCkBi2d7WACOo_kXTMCppzWUsN-pLVg9EDqHi3qd0,3478
12
23
  timewise/plot/__init__.py,sha256=cc00UenWC_8zAkBH-Ylhs3yCF49tAqZ2Al9MfOoXYDI,120
13
- timewise/plot/diagnostic.py,sha256=CKxOYd030CRz-JBkxW5lo0GoQ3nNpEWKh3WlxVdio20,8272
24
+ timewise/plot/diagnostic.py,sha256=GRp-OUwz2yzzDu9qdViFg_e3Mxl5t1IvUJXZHuMKB2U,8276
14
25
  timewise/plot/lightcurve.py,sha256=oK0y6RFzv7QSrO1Qqyc1wNghsTILC9QAfIjuoA8i92I,3757
15
26
  timewise/plot/panstarrs.py,sha256=X2ZULm7QT91cp4qociG0fVeI0saGLJwyKzL0141Vqis,8014
16
27
  timewise/plot/sdss.py,sha256=cc1zU-4XFkqc8xH5yqCyMsDJf9w_54B_6NeRMjr9Pt8,2622
17
28
  timewise/process/__init__.py,sha256=Yk-j1B1MnBuuaM6eFi43TxdWmFKbwFHvDsuQZt4yB_c,70
18
- timewise/process/config.py,sha256=0TtlxbZs5FNEUt_tv8DKpOxjxp7OoTVunLRUOSw0zPM,831
19
- timewise/process/interface.py,sha256=ZTUreyu_WkFTs8pOBNqFmCqgVs5OXEtPHjwpGVV1X_s,4929
29
+ timewise/process/config.py,sha256=d4hCvNb8JyJf2-0BTQgJaeOmHGZK_aHEDMb_xyqJgbU,1060
30
+ timewise/process/interface.py,sha256=JOIh1wYuE1s9XdSfBRS7qUXexlr-p5UxdwaSjKlRZxE,5207
20
31
  timewise/process/keys.py,sha256=0TEVn-BwfzHGlScU-N8AtxgkhA2mUO0wBu4_ol_ylH4,197
21
- timewise/process/stacking.py,sha256=X5GTkCV5TMn4JmAu8WVM0tQR6mocxNbxrMdR4OVx40Y,12256
32
+ timewise/process/stacking.py,sha256=5qk9SYTfDFNFsnw9KAGYW-RndGtdmENKEiIqD6mrJ6I,12707
22
33
  timewise/process/template.yml,sha256=U_xKmygDl3E-viTgZEI8pQIJwWduB52SdI2X9vy61Yo,1037
23
34
  timewise/query/__init__.py,sha256=1OA_FlLI7O0aIDOXHpBKMOyMvYLCd0kQVkzoqbxouyE,242
24
- timewise/query/base.py,sha256=LzG207uIQOEE_RucEKBI4-sHR_EwyILGSlSk10HlEeU,973
35
+ timewise/query/base.py,sha256=GjAoQZpaxVHcpCgPco1y5bpR0Jtd_sp_bYMI26WpAK0,1262
25
36
  timewise/query/positional.py,sha256=6fLfRdzat1jMqJAY7k7dEdnAZRiWyRKpWkbqpwzDKLo,1255
26
37
  timewise/tables/__init__.py,sha256=5efKk4YEhHLA_QTMYc8CQp0nQWXG-F3EAPE6935ManU,263
27
38
  timewise/tables/allwise_p3as_mep.py,sha256=NfCnTOdj_6DSFUWGmrwDyrrxIWvJFHktHWF40K_w5wQ,632
@@ -31,9 +42,9 @@ timewise/types.py,sha256=MB-aqpyos1aCS58QWBas34WcwFHiHOkrDj_d9_ZxuVc,667
31
42
  timewise/util/backoff.py,sha256=bU5yhsBO4U53XEPJ_32tgql9rq_Rzrv7w32nVQYHr64,272
32
43
  timewise/util/csv_utils.py,sha256=5i3Jd4c58-doPs1N_hyYZ8Uc2nvuk9nGwgPNZoNrlu0,298
33
44
  timewise/util/error_threading.py,sha256=uyV1Ri-wf87lpa17Xlp520B1V8DWHh3v9Mk97QrPmv0,2264
34
- timewise/util/visits.py,sha256=3intUo1iiSovu7PB7uUrT-IAxyuOxl58aL0mKj5dAMI,1039
35
- timewise-1.0.0a1.dist-info/METADATA,sha256=2KeU_-cjdIZePzr7sKNiu-nY97FeuBq087UrzXj_aI8,9172
36
- timewise-1.0.0a1.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
37
- timewise-1.0.0a1.dist-info/entry_points.txt,sha256=mYh1HsUFbV7KT8kxiGqVtR3Pk0oEk6Bd-2c5FsYVhG4,45
38
- timewise-1.0.0a1.dist-info/licenses/LICENSE,sha256=sVoNJWiTlH-NarJx0wdsob468Pg3JE6vIIgll4lCa3E,1070
39
- timewise-1.0.0a1.dist-info/RECORD,,
45
+ timewise/util/visits.py,sha256=4ZXwH7OXmp98tUPZplvuOtykB048kZoYXyuRTwQxT7w,998
46
+ timewise-1.0.0a5.dist-info/METADATA,sha256=PWFFKX75JxlhRVYm6S9ypWaif_QT22vVe1b2VCQF18w,10181
47
+ timewise-1.0.0a5.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
48
+ timewise-1.0.0a5.dist-info/entry_points.txt,sha256=mYh1HsUFbV7KT8kxiGqVtR3Pk0oEk6Bd-2c5FsYVhG4,45
49
+ timewise-1.0.0a5.dist-info/licenses/LICENSE,sha256=sVoNJWiTlH-NarJx0wdsob468Pg3JE6vIIgll4lCa3E,1070
50
+ timewise-1.0.0a5.dist-info/RECORD,,