cubexpress 0.1.9__py3-none-any.whl → 0.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cubexpress might be problematic. Click here for more details.

cubexpress/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
1
  from cubexpress.conversion import lonlat2rt, geo2utm
2
- from cubexpress.geotyping import RasterTransform, Request, RequestSet
2
+ from cubexpress.geotyping import RasterTransform, Request, RequestSet, GeotransformDict
3
3
  from cubexpress.cloud_utils import s2_cloud_table
4
4
  from cubexpress.cube import get_cube
5
5
  from cubexpress.request import table_to_requestset
@@ -11,6 +11,7 @@ from cubexpress.request import table_to_requestset
11
11
  __all__ = [
12
12
  "lonlat2rt",
13
13
  "RasterTransform",
14
+ "GeotransformDict",
14
15
  "Request",
15
16
  "RequestSet",
16
17
  "geo2utm",
@@ -19,7 +20,7 @@ __all__ = [
19
20
  "table_to_requestset"
20
21
  ]
21
22
 
22
- # Dynamic version import
23
- import importlib.metadata
23
+ # # Dynamic version import
24
+ # import importlib.metadata
24
25
 
25
- __version__ = importlib.metadata.version("cubexpress")
26
+ # __version__ = importlib.metadata.version("cubexpress")
cubexpress/cloud_utils.py CHANGED
@@ -46,7 +46,7 @@ def _cloud_table_single_range(
46
46
  * ``id`` – Sentinel-2 ID
47
47
  * ``cs_cdf`` – Cloud Score Plus CDF (0–1)
48
48
  * ``date`` – acquisition date (YYYY-MM-DD)
49
- * ``high_null_flag`` – 1 if cloud score missing
49
+ * ``null_flag`` – 1 if cloud score missing
50
50
 
51
51
  Notes
52
52
  -----
@@ -83,7 +83,7 @@ def _cloud_table_single_range(
83
83
  except ee.ee_exception.EEException as e:
84
84
  if "No bands in collection" in str(e):
85
85
  return pd.DataFrame(
86
- columns=["id", "cs_cdf", "date", "high_null_flag"]
86
+ columns=["id", "cs_cdf", "date", "null_flag"]
87
87
  )
88
88
  raise
89
89
 
@@ -95,14 +95,17 @@ def _cloud_table_single_range(
95
95
  .merge(df_raw, on="id", how="left")
96
96
  .assign(
97
97
  date=lambda d: pd.to_datetime(d["id"].str[:8], format="%Y%m%d").dt.strftime("%Y-%m-%d"),
98
- high_null_flag=lambda d: d["cs_cdf"].isna().astype(int),
98
+ null_flag=lambda d: d["cs_cdf"].isna().astype(int),
99
99
  )
100
100
  .drop(columns=["longitude", "latitude", "time"])
101
101
  )
102
102
 
103
103
  # fill missing scores with daily mean
104
+ df["lon"] = lon
105
+ df["lat"] = lat
104
106
  df["cs_cdf"] = df["cs_cdf"].fillna(df.groupby("date")["cs_cdf"].transform("mean"))
105
107
 
108
+
106
109
  return df
107
110
 
108
111
 
@@ -161,7 +164,7 @@ def s2_cloud_table(
161
164
  # ─── 1. Load cached data if present ────────────────────────────────────
162
165
  if cache and cache_file.exists():
163
166
  if verbose:
164
- print("📂 Loading cached table …")
167
+ print("📂 Loading cached metadata …")
165
168
  df_cached = pd.read_parquet(cache_file)
166
169
  have_idx = pd.to_datetime(df_cached["date"], errors="coerce").dropna()
167
170
 
@@ -173,7 +176,7 @@ def s2_cloud_table(
173
176
  and dt.date.fromisoformat(end) <= cached_end
174
177
  ):
175
178
  if verbose:
176
- print("✅ Served entirely from cache.")
179
+ print("✅ Served entirely from metadata.")
177
180
  df_full = df_cached
178
181
  else:
179
182
  # Identify missing segments and fetch only those.
@@ -192,15 +195,21 @@ def s2_cloud_table(
192
195
  lon, lat, edge_size, a2, b2
193
196
  )
194
197
  )
195
- df_new = pd.concat(df_new_parts, ignore_index=True)
196
- df_full = (
197
- pd.concat([df_cached, df_new], ignore_index=True)
198
- .sort_values("date", kind="mergesort")
199
- )
198
+ df_new_parts = [df for df in df_new_parts if not df.empty]
199
+
200
+ if df_new_parts:
201
+
202
+ df_new = pd.concat(df_new_parts, ignore_index=True)
203
+ df_full = (
204
+ pd.concat([df_cached, df_new], ignore_index=True)
205
+ .sort_values("date", kind="mergesort")
206
+ )
207
+ else:
208
+ df_full = df_cached
200
209
  else:
201
210
 
202
211
  if verbose:
203
- msg = "Generating table (no cache found)…" if cache else "Generating table…"
212
+ msg = "Generating metadata (no cache found)…" if cache else "Generating metadata…"
204
213
  print("⏳", msg)
205
214
  df_full = _cloud_table_single_range(
206
215
  lon, lat, edge_size, start, end
@@ -230,4 +239,5 @@ def s2_cloud_table(
230
239
  "collection": collection
231
240
  }
232
241
  )
233
- return result
242
+ return result
243
+
cubexpress/cube.py CHANGED
@@ -23,13 +23,13 @@ from cubexpress.downloader import download_manifest, download_manifests
23
23
  from cubexpress.geospatial import quadsplit_manifest, calculate_cell_size
24
24
  from cubexpress.request import table_to_requestset
25
25
  import pandas as pd
26
+ from cubexpress.geotyping import RequestSet
26
27
 
27
28
 
28
29
  def get_geotiff(
29
30
  manifest: Dict[str, Any],
30
31
  full_outname: pathlib.Path | str,
31
32
  join: bool = True,
32
- eraser: bool = True,
33
33
  nworks: int = 4,
34
34
  verbose: bool = True,
35
35
  ) -> None:
@@ -52,19 +52,26 @@ def get_geotiff(
52
52
  size = manifest["grid"]["dimensions"]["width"] # square images assumed
53
53
  cell_w, cell_h, power = calculate_cell_size(str(err), size)
54
54
  tiled = quadsplit_manifest(manifest, cell_w, cell_h, power)
55
- download_manifests(tiled, full_outname, join, eraser, nworks)
55
+ download_manifests(
56
+ manifests = tiled,
57
+ full_outname = full_outname,
58
+ join = join,
59
+ max_workers = nworks
60
+ )
56
61
 
57
62
  if verbose:
58
63
  print(f"Downloaded {full_outname}")
59
64
 
60
65
 
61
66
  def get_cube(
62
- table: pd.DataFrame,
67
+ # table: pd.DataFrame,
68
+ requests: pd.DataFrame | RequestSet,
63
69
  outfolder: pathlib.Path | str,
64
- join: bool = True,
65
- eraser: bool = True,
66
70
  mosaic: bool = True,
71
+ join: bool = True,
67
72
  nworks: int = 4,
73
+ verbose: bool = True,
74
+ cache: bool = True
68
75
  ) -> None:
69
76
  """Download every request in *requests* to *outfolder* using a thread pool.
70
77
 
@@ -81,20 +88,45 @@ def get_cube(
81
88
  Pool size for concurrent downloads; default **4**.
82
89
  """
83
90
 
84
- requests = table_to_requestset(
85
- table=table,
86
- mosaic=mosaic
87
- )
91
+ # requests = table_to_requestset(
92
+ # table=table,
93
+ # mosaic=mosaic
94
+ # )
95
+
96
+ outfolder = pathlib.Path(outfolder).expanduser().resolve()
88
97
 
89
98
  with concurrent.futures.ThreadPoolExecutor(max_workers=nworks) as pool:
90
99
  futures = []
91
100
  for _, row in requests._dataframe.iterrows():
92
101
  outname = pathlib.Path(outfolder) / f"{row.id}.tif"
102
+ if outname.exists() and cache:
103
+ continue
93
104
  outname.parent.mkdir(parents=True, exist_ok=True)
94
- futures.append(pool.submit(get_geotiff, row.manifest, outname, join, eraser, nworks))
105
+ futures.append(
106
+ pool.submit(
107
+ get_geotiff,
108
+ row.manifest, # manifest = row.manifest
109
+ outname, # full_outname = outname
110
+ join, # join = join
111
+ nworks, # nworks = nworks
112
+ verbose # verbose = verbose
113
+ )
114
+ )
95
115
 
96
116
  for fut in concurrent.futures.as_completed(futures):
97
117
  try:
98
118
  fut.result()
99
119
  except Exception as exc: # noqa: BLE001 – log and keep going
100
120
  print(f"Download error: {exc}")
121
+
122
+ # download_df = requests._dataframe[["outname", "cs_cdf", "date"]].copy()
123
+ # download_df["outname"] = outfolder / requests._dataframe["outname"]
124
+ # download_df.rename(columns={"outname": "full_outname"}, inplace=True)
125
+
126
+ return
127
+
128
+ # manifest = row.manifest
129
+ # full_outname = outname
130
+ # join: bool = True,
131
+ # nworks: int = 4,
132
+ # verbose: bool = True,
cubexpress/downloader.py CHANGED
@@ -25,6 +25,8 @@ from rasterio.merge import merge
25
25
  from rasterio.enums import Resampling
26
26
  import os
27
27
  import shutil
28
+ import tempfile
29
+ from cubexpress.geospatial import merge_tifs
28
30
 
29
31
  os.environ['CPL_LOG_ERRORS'] = 'OFF'
30
32
  logging.getLogger('rasterio._env').setLevel(logging.ERROR)
@@ -56,12 +58,12 @@ def download_manifest(ulist: Dict[str, Any], full_outname: pathlib.Path) -> None
56
58
  blockxsize=256,
57
59
  blockysize=256,
58
60
  compress="ZSTD",
59
- # zstd_level=13,
61
+ zstd_level=13,
60
62
  predictor=2,
61
63
  num_threads=20,
62
64
  nodata=65535,
63
65
  dtype="uint16",
64
- count=13,
66
+ count=12,
65
67
  photometric="MINISBLACK"
66
68
  )
67
69
 
@@ -69,10 +71,9 @@ def download_manifest(ulist: Dict[str, Any], full_outname: pathlib.Path) -> None
69
71
  dst.write(src.read())
70
72
 
71
73
  def download_manifests(
72
- manifests: List[Dict[str, Any]],
74
+ manifests: list[Dict[str, Any]],
73
75
  full_outname: pathlib.Path,
74
76
  join: bool = True,
75
- eraser: bool = True,
76
77
  max_workers: int = 4,
77
78
  ) -> None:
78
79
  """Download every manifest in *manifests* concurrently.
@@ -81,55 +82,38 @@ def download_manifests(
81
82
  ``full_outname.parent/full_outname.stem`` with names ``000000.tif``,
82
83
  ``000001.tif`` … according to the list order.
83
84
  """
85
+ # full_outname = pathlib.Path("/home/contreras/Documents/GitHub/cubexpress/cubexpress_test/2017-08-19_6mfrw_18LVN.tif")
86
+
87
+ if join:
88
+ tmp_dir = pathlib.Path(tempfile.mkdtemp(prefix="s2tmp_"))
89
+ full_outname_temp = tmp_dir / full_outname.name
90
+
84
91
  with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
85
92
  futures = []
86
93
 
87
94
  for index, umanifest in enumerate(manifests):
88
- folder = full_outname.parent / full_outname.stem
95
+ folder = full_outname_temp.parent / full_outname_temp.stem
89
96
  folder.mkdir(parents=True, exist_ok=True)
90
97
  outname = folder / f"{index:06d}.tif"
91
- futures.append(executor.submit(download_manifest, umanifest, outname))
98
+ futures.append(
99
+ executor.submit(
100
+ download_manifest,
101
+ umanifest, # ulist = umanifest
102
+ outname # full_outname = outname
103
+ )
104
+ )
92
105
 
93
106
  for fut in concurrent.futures.as_completed(futures):
94
107
  try:
95
108
  fut.result()
96
109
  except Exception as exc: # noqa: BLE001
97
110
  print(f"Error en una de las descargas: {exc}") # noqa: T201
98
-
99
- if join:
100
-
101
- dir_path = full_outname.parent / full_outname.stem
102
- input_files = sorted(dir_path.glob("*.tif"))
103
-
104
- if dir_path.exists() and len(input_files) > 1:
105
111
 
106
- with rio.Env(GDAL_NUM_THREADS="8", NUM_THREADS="8"):
107
- srcs = [rio.open(fp) for fp in input_files]
108
- mosaic, out_transform = merge(
109
- srcs,
110
- nodata=65535,
111
- resampling=Resampling.nearest
112
- )
113
-
114
- meta = srcs[0].profile.copy()
115
- meta["transform"] = out_transform
116
- meta.update(
117
- height=mosaic.shape[1],
118
- width=mosaic.shape[2]
119
- )
120
-
121
- with rio.open(full_outname, "w", **meta) as dst:
122
- dst.write(mosaic)
123
112
 
124
- for src in srcs:
125
- src.close()
126
-
127
- if eraser:
128
- # Delete a folder with pathlib
129
- shutil.rmtree(dir_path)
130
-
131
- print("✅ Mosaico generado:", full_outname)
132
- return full_outname
133
-
134
- else:
135
- return full_outname
113
+ dir_path = full_outname_temp.parent / full_outname_temp.stem
114
+ if dir_path.exists():
115
+ input_files = sorted(dir_path.glob("*.tif"))
116
+ merge_tifs(input_files, full_outname)
117
+ shutil.rmtree(dir_path)
118
+ else:
119
+ raise ValueError(f"Error in {full_outname}")
cubexpress/geospatial.py CHANGED
@@ -2,6 +2,11 @@ import ee
2
2
  import re
3
3
  from copy import deepcopy
4
4
  from typing import Dict
5
+ import pathlib
6
+ import rasterio as rio
7
+ from rasterio.merge import merge
8
+ from rasterio.enums import Resampling
9
+
5
10
 
6
11
 
7
12
  def quadsplit_manifest(manifest: Dict, cell_width: int, cell_height: int, power: int) -> list[Dict]:
@@ -27,8 +32,6 @@ def quadsplit_manifest(manifest: Dict, cell_width: int, cell_height: int, power:
27
32
 
28
33
  return manifests
29
34
 
30
-
31
-
32
35
  def calculate_cell_size(ee_error_message: str, size: int) -> tuple[int, int]:
33
36
  match = re.findall(r'\d+', ee_error_message)
34
37
  image_pixel = int(match[0])
@@ -53,3 +56,66 @@ def _square_roi(lon: float, lat: float, edge_size: int, scale: int) -> ee.Geomet
53
56
  half = edge_size * scale / 2
54
57
  point = ee.Geometry.Point([lon, lat])
55
58
  return point.buffer(half).bounds()
59
+
60
+
61
+
62
+ def merge_tifs(
63
+ input_files: list[pathlib.Path],
64
+ output_path: pathlib.Path,
65
+ *,
66
+ nodata: int = 65535,
67
+ gdal_threads: int = 8
68
+ ) -> None:
69
+ """
70
+ Merge a list of GeoTIFF files into a single mosaic and write it out.
71
+
72
+ Parameters
73
+ ----------
74
+ input_files : list[Path]
75
+ Paths to the GeoTIFF tiles to be merged.
76
+ output_path : Path
77
+ Destination path for the merged GeoTIFF.
78
+ nodata : int, optional
79
+ NoData value to assign in the mosaic (default: 65535).
80
+ gdal_threads : int, optional
81
+ Number of GDAL threads to use for reading/writing (default: 8).
82
+
83
+ Raises
84
+ ------
85
+ ValueError
86
+ If `input_files` is empty.
87
+ """
88
+ if not input_files:
89
+ raise ValueError("The input_files list is empty")
90
+
91
+ # Ensure output path is a Path object
92
+ output_path = pathlib.Path(output_path).expanduser().resolve()
93
+ output_path.parent.mkdir(parents=True, exist_ok=True)
94
+
95
+ # Set GDAL threading environment
96
+ with rio.Env(GDAL_NUM_THREADS=str(gdal_threads), NUM_THREADS=str(gdal_threads)):
97
+ # Open all source datasets
98
+ srcs = [rio.open(fp) for fp in input_files]
99
+ try:
100
+ # Merge sources into one mosaic
101
+ mosaic, out_transform = merge(
102
+ srcs,
103
+ nodata=nodata,
104
+ resampling=Resampling.nearest
105
+ )
106
+
107
+ # Copy metadata from the first source and update it
108
+ meta = srcs[0].profile.copy()
109
+ meta.update({
110
+ "transform": out_transform,
111
+ "height": mosaic.shape[1],
112
+ "width": mosaic.shape[2]
113
+ })
114
+
115
+ # Write the merged mosaic to disk
116
+ with rio.open(output_path, "w", **meta) as dst:
117
+ dst.write(mosaic)
118
+ finally:
119
+ # Always close all open datasets
120
+ for src in srcs:
121
+ src.close()
cubexpress/geotyping.py CHANGED
@@ -306,13 +306,17 @@ class RequestSet(BaseModel):
306
306
  "crsCode": meta.raster_transform.crs,
307
307
  },
308
308
  },
309
+ # "cs_cdf": int(meta.id.split("_")[-1]) / 100,
310
+ # "date": meta.id.split("_")[0],
309
311
  "outname": f"{meta.id}.tif",
310
312
  }
313
+
311
314
  for index, meta in enumerate(self.requestset)
312
315
  ]
313
316
  )
314
317
 
315
318
 
319
+
316
320
  def _validate_dataframe_schema(self) -> None:
317
321
  """
318
322
  Checks that the `_dataframe` contains the required columns and that each column
@@ -367,21 +371,7 @@ class RequestSet(BaseModel):
367
371
  f"Column '{col_name}' has an invalid type in row {i}. "
368
372
  f"Expected {expected_type}, got {type(value)}"
369
373
  )
370
-
371
- # B) Validation of the `manifest` column structure
372
- # - Must contain at least 'assetId' or 'expression'
373
- # - Must contain 'grid' with the minimum required sub-keys
374
- # - Example:
375
- # {
376
- # "fileFormat": "GEO_TIFF",
377
- # "bandIds": [...],
378
- # "grid": {
379
- # "dimensions": {"width": ..., "height": ...},
380
- # "affineTransform": {...},
381
- # "crsCode": ...
382
- # },
383
- # // Either "assetId" or "expression" must be here
384
- # }
374
+
385
375
  for i, row in self._dataframe.iterrows():
386
376
  manifest = row["manifest"]
387
377
 
cubexpress/request.py CHANGED
@@ -31,8 +31,7 @@ def table_to_requestset(
31
31
  If *df* is empty after filtering.
32
32
 
33
33
  """
34
-
35
-
34
+
36
35
  df = table.copy()
37
36
 
38
37
  if df.empty:
@@ -48,33 +47,65 @@ def table_to_requestset(
48
47
  reqs: list[Request] = []
49
48
 
50
49
  if mosaic:
51
- # group all asset IDs per day
50
+
52
51
  grouped = (
53
- df.groupby("date")["id"] # Series con listas de ids por día
54
- .apply(list)
52
+ df.groupby('date')
53
+ .agg(
54
+ id_list = ('id', list),
55
+ tiles = (
56
+ 'id',
57
+ lambda ids: ','.join(
58
+ sorted({i.split('_')[-1][1:] for i in ids})
59
+ )
60
+ ),
61
+ cs_cdf_mean = (
62
+ 'cs_cdf',
63
+ lambda x: int(round(x.mean(), 2) * 100)
64
+ )
65
+ )
55
66
  )
56
67
 
57
- for day, img_ids in grouped.items():
58
- ee_img = ee.ImageCollection(
59
- [ee.Image(f"{df.attrs['collection']}/{img}") for img in img_ids]
60
- ).mosaic()
61
-
62
- reqs.append(
63
- Request(
64
- id=f"{day}_{centre_hash}",
65
- raster_transform=rt,
66
- image=ee_img,
67
- bands=df.attrs["bands"],
68
+ for day, row in grouped.iterrows():
69
+
70
+
71
+ img_ids = row["id_list"]
72
+ cdf = row["cs_cdf_mean"]
73
+
74
+ if len(img_ids) > 1:
75
+
76
+ ee_img = ee.ImageCollection(
77
+ [ee.Image(f"{df.attrs['collection']}/{img}") for img in img_ids]
78
+ ).mosaic()
79
+
80
+ reqs.append(
81
+ Request(
82
+ id=f"{day}_{centre_hash}_{cdf}",
83
+ raster_transform=rt,
84
+ image=ee_img,
85
+ bands=df.attrs["bands"],
86
+ )
68
87
  )
69
- )
70
- else: # one request per asset
88
+ else:
89
+ for img_id in img_ids:
90
+ # tile = img_id.split("_")[-1][1:]
91
+ reqs.append(
92
+ Request(
93
+ # id=f"{day}_{centre_hash}_{tile}_{cdf}",
94
+ id=f"{day}_{centre_hash}_{cdf}",
95
+ raster_transform=rt,
96
+ image=f"{df.attrs['collection']}/{img_id}",
97
+ bands=df.attrs["bands"],
98
+ )
99
+ )
100
+ else:
71
101
  for _, row in df.iterrows():
72
102
  img_id = row["id"]
73
- day = row["date"]
74
-
103
+ # tile = img_id.split("_")[-1][1:]
104
+ day = row["date"]
105
+ cdf = int(round(row["cs_cdf"], 2) * 100)
75
106
  reqs.append(
76
107
  Request(
77
- id=f"{day}_{centre_hash}_{img_id}",
108
+ id=f"{day}_{centre_hash}_{cdf}",
78
109
  raster_transform=rt,
79
110
  image=f"{df.attrs['collection']}/{img_id}",
80
111
  bands=df.attrs["bands"],
@@ -1,9 +1,8 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cubexpress
3
- Version: 0.1.9
3
+ Version: 0.1.11
4
4
  Summary: Efficient processing of cubic Earth-observation (EO) data.
5
5
  Home-page: https://github.com/andesdatacube/cubexpress
6
- License: MIT
7
6
  Keywords: earth-engine,sentinel-2,geospatial,eo,cube
8
7
  Author: Julio Contreras
9
8
  Author-email: contrerasnetk@gmail.com
@@ -20,6 +19,7 @@ Requires-Dist: earthengine-api (>=1.5.12)
20
19
  Requires-Dist: numpy (>=2.0.2)
21
20
  Requires-Dist: pandas (>=2.2.2)
22
21
  Requires-Dist: pyarrow (>=14.0.0)
22
+ Requires-Dist: pydantic (>=2.11.4)
23
23
  Requires-Dist: pygeohash (>=1.2.0)
24
24
  Requires-Dist: pyproj (>=3.6.0)
25
25
  Requires-Dist: rasterio (>=1.3.9)
@@ -0,0 +1,13 @@
1
+ cubexpress/__init__.py,sha256=sKXcYQQPREFhVCHP81lL_5hAurUTm8MX1xVOEOMF-nA,618
2
+ cubexpress/cache.py,sha256=EZiR2AJfplaLpqMIVFb5piCAgFqHKF1vgLIrutfz8tA,1425
3
+ cubexpress/cloud_utils.py,sha256=BxS3HADLNj6rdFGYUjpcXA1Vvsa87JoL28YEAsu51H4,7482
4
+ cubexpress/conversion.py,sha256=JSaMnswY-2n5E4H2zxb-oEOTJ8UPzXfMeSVCremtvTw,2520
5
+ cubexpress/cube.py,sha256=SMN6MvezfeHipFE4v4f23dxWGk9h2t2s2aeeppD0voY,4133
6
+ cubexpress/downloader.py,sha256=XsLDlq2ZHEccc1ET8ghnuOIYtGazVDwXohMSWBemVMw,4067
7
+ cubexpress/geospatial.py,sha256=2DGwl3pyfNEOj8nn9gjc-tiiTXhV2ez9Bghz1I0vERs,3822
8
+ cubexpress/geotyping.py,sha256=Fbnn7EoRvXrtjTRFTS4CPzQbxG4PA6WkfeM4YUp9iKg,16696
9
+ cubexpress/request.py,sha256=PiDqnt3qB9tac4KkZdPIrv5VeRHqobk1u2q1VCCH2lI,3390
10
+ cubexpress-0.1.11.dist-info/LICENSE,sha256=XjoS-d76b7Cl-VgCWhQk83tNf2dNldKBN8SrImwGc2Q,1072
11
+ cubexpress-0.1.11.dist-info/METADATA,sha256=pxtqImmO_wIyA9P_0TWaxxps0O-95O6aVLbfEQ9GvBk,9651
12
+ cubexpress-0.1.11.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
13
+ cubexpress-0.1.11.dist-info/RECORD,,
@@ -1,13 +0,0 @@
1
- cubexpress/__init__.py,sha256=RjyAqwiD0rU_Z5tCJTYNGKXZ1ggpfPB51wzhr0KwweY,570
2
- cubexpress/cache.py,sha256=EZiR2AJfplaLpqMIVFb5piCAgFqHKF1vgLIrutfz8tA,1425
3
- cubexpress/cloud_utils.py,sha256=aamTm-PxbPQ4ARwd5faG1a1sjKegbtkd0LxT7wYZJ60,7238
4
- cubexpress/conversion.py,sha256=JSaMnswY-2n5E4H2zxb-oEOTJ8UPzXfMeSVCremtvTw,2520
5
- cubexpress/cube.py,sha256=fwD_UdH0oBWSK-2-fMPPm3YKxcw1xxnm2g0vrZuChI8,3172
6
- cubexpress/downloader.py,sha256=NoJXxCZ7SXBMzUDcXU6DGa2vce61g716FYYfq17pH0k,4461
7
- cubexpress/geospatial.py,sha256=ZbsPIgsYQFnNFXUuQ136rJsL4b2Bf91o0Vsswby2dFc,1812
8
- cubexpress/geotyping.py,sha256=XuBcJAgNxvXCCIDmWijI70p6dEFlu6UfbqwQlWXSWQw,17155
9
- cubexpress/request.py,sha256=ZWVIXo0_rVkX1fBWREbtvvdYUSZPCv4LIcPdrMKKuLs,2270
10
- cubexpress-0.1.9.dist-info/LICENSE,sha256=XjoS-d76b7Cl-VgCWhQk83tNf2dNldKBN8SrImwGc2Q,1072
11
- cubexpress-0.1.9.dist-info/METADATA,sha256=qplHASBXni3m6kOAFIw8Jy2fBFqY1QfLDaNM3ou6cMk,9628
12
- cubexpress-0.1.9.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
13
- cubexpress-0.1.9.dist-info/RECORD,,