specklia 1.9.66__tar.gz → 1.9.68__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: specklia
3
- Version: 1.9.66
3
+ Version: 1.9.68
4
4
  Summary: Python client for Specklia, a geospatial point cloud database by Earthwave.
5
5
  Home-page: https://specklia.earthwave.co.uk/
6
6
  Author: Earthwave Ltd
@@ -0,0 +1,63 @@
1
+ """Setup file for The specklia client."""
2
+
3
+ import os
4
+
5
+ from setuptools import find_packages, setup
6
+
7
+ with open("README.md", "r", encoding="utf-8") as fh:
8
+ long_description = fh.read()
9
+
10
+ if os.path.exists("full_version.txt"):
11
+ with open("full_version.txt", "r", encoding="utf-8") as fh:
12
+ """
13
+ Note that this file is generated by the CI chain based on the git tag
14
+ (by ew_continuous_integration/define_new_version_number.py)
15
+ It should not be present in the repository by default.
16
+ """
17
+ version_number = fh.read()
18
+ else:
19
+ version_number = "v0.0.0" # default value when under development
20
+
21
+ setup(
22
+ name="specklia",
23
+ version=version_number,
24
+ description="Python client for Specklia, a geospatial point cloud database by Earthwave.",
25
+ long_description=long_description,
26
+ long_description_content_type="text/markdown",
27
+ author="Earthwave Ltd",
28
+ author_email="support@earthwave.co.uk",
29
+ url="https://specklia.earthwave.co.uk/",
30
+ python_requires=">=3.11",
31
+ license="MIT",
32
+ packages=find_packages(),
33
+ # These generate the icons in the sidebar on PyPI
34
+ project_urls={
35
+ "Homepage": "https://specklia.earthwave.co.uk/",
36
+ "Changelog": "https://specklia.earthwave.co.uk/generated_docs/change_log.html",
37
+ "Documentation": "https://specklia.earthwave.co.uk/generated_docs/index.html",
38
+ "Twitter": "https://twitter.com/earth__wave",
39
+ },
40
+ classifiers=[
41
+ "Development Status :: 4 - Beta",
42
+ "Programming Language :: Python :: 3.11",
43
+ "Topic :: Database :: Front-Ends",
44
+ "Topic :: Scientific/Engineering :: GIS",
45
+ "Topic :: Software Development :: Libraries :: Python Modules",
46
+ "License :: OSI Approved :: MIT License",
47
+ "Operating System :: OS Independent",
48
+ "Environment :: Console",
49
+ "Intended Audience :: Science/Research",
50
+ ],
51
+ # note requirements listed ininstall_requires should be the *minimum required*
52
+ # in order to allow pip to resolve multiple installed packages properly.
53
+ # requirements.txt should contain a specific known working version instead.
54
+ install_requires=[
55
+ "blosc",
56
+ "geopandas",
57
+ "pandas",
58
+ "pyarrow",
59
+ "rasterio",
60
+ "requests",
61
+ "shapely",
62
+ ],
63
+ )
@@ -20,25 +20,24 @@ IMPORTANT: THE VERSION HERE IN THE SPECKLIA PACKAGE MUST NOT BE MADE DEPENDENT U
20
20
  IS PRIVATE BUT THIS PACKAGE IS PUBLIC!
21
21
  """
22
22
 
23
+ import struct
24
+ import time
23
25
  from enum import Enum
24
26
  from io import BytesIO
25
27
  from logging import Logger
26
- import struct
27
- import time
28
28
  from typing import List, Tuple, Union
29
29
 
30
+ import requests
30
31
  from geopandas import GeoDataFrame
31
32
  from geopandas import read_feather as read_geofeather
32
- from pandas import DataFrame
33
- from pandas import read_feather
34
- import requests
33
+ from pandas import DataFrame, read_feather
35
34
 
36
35
  log = Logger(__name__)
37
36
 
38
37
  CHUNK_DB_NAME = "data_transfer_chunks"
39
38
  CHUNK_METADATA_COLLECTION_NAME = "chunk_metadata"
40
39
  MAX_CHUNK_AGE_SECONDS = 3600
41
- MAX_CHUNK_SIZE_BYTES = 5 * 1024 ** 2 # must be small enough to fit into an HTTP GET Request
40
+ MAX_CHUNK_SIZE_BYTES = 5 * 1024**2 # must be small enough to fit into an HTTP GET Request
42
41
  CHUNK_DOWNLOAD_RETRIES = 10
43
42
  CHUNK_DOWNLOAD_TIMEOUT_S = 10
44
43
 
@@ -74,19 +73,16 @@ def upload_chunks(api_address: str, chunks: List[Tuple[int, bytes]]) -> str:
74
73
  The chunk set uuid of the uploaded chunks
75
74
  """
76
75
  # post the first chunk to start the upload
77
- response = requests.post(
78
- api_address + f"/chunk/upload/{chunks[0][0]}-of-{len(chunks)}",
79
- data=chunks[0][1])
76
+ response = requests.post(api_address + f"/chunk/upload/{chunks[0][0]}-of-{len(chunks)}", data=chunks[0][1])
80
77
  response.raise_for_status()
81
- log.info("response from very first /chunk/upload was '%s'", response.json())
82
- chunk_set_uuid = response.json()['chunk_set_uuid']
78
+ log.debug("response from very first /chunk/upload was '%s'", response.json())
79
+ chunk_set_uuid = response.json()["chunk_set_uuid"]
83
80
 
84
81
  # post the rest of the chunks in a random order
85
82
  for i, chunk in chunks[1:]:
86
- response = requests.post(
87
- api_address + f"/chunk/upload/{chunk_set_uuid}/{i}-of-{len(chunks)}", data=chunk)
83
+ response = requests.post(api_address + f"/chunk/upload/{chunk_set_uuid}/{i}-of-{len(chunks)}", data=chunk)
88
84
  response.raise_for_status()
89
- log.info("response from subsequent /chunk/upload/uuid call was '%s'", response.text)
85
+ log.debug("response from subsequent /chunk/upload/uuid call was '%s'", response.text)
90
86
 
91
87
  return chunk_set_uuid
92
88
 
@@ -123,31 +119,29 @@ def download_chunks(api_address: str, chunk_set_uuid: str, num_chunks: int) -> b
123
119
  while retries < CHUNK_DOWNLOAD_RETRIES and not success:
124
120
  try:
125
121
  this_chunk_response = requests.get(
126
- f"{api_address}/chunk/download/{chunk_set_uuid}/{chunk_ordinal}",
127
- timeout=CHUNK_DOWNLOAD_TIMEOUT_S
122
+ f"{api_address}/chunk/download/{chunk_set_uuid}/{chunk_ordinal}", timeout=CHUNK_DOWNLOAD_TIMEOUT_S
128
123
  )
129
124
  this_chunk_response.raise_for_status()
130
- ordinal = struct.unpack('i', this_chunk_response.content[:4])[0]
125
+ ordinal = struct.unpack("i", this_chunk_response.content[:4])[0]
131
126
  chunk = this_chunk_response.content[4:]
132
- assert ordinal == chunk_ordinal, (
133
- f"Chunk ordinal mismatch: expected {chunk_ordinal}, got {ordinal}")
127
+ assert ordinal == chunk_ordinal, f"Chunk ordinal mismatch: expected {chunk_ordinal}, got {ordinal}"
134
128
  chunks.append(chunk)
135
129
  success = True
136
130
  except (requests.Timeout, requests.ConnectionError) as e:
137
131
  retries += 1
138
- log.warning(
139
- "Request failed with %s. Retrying (%s/%s)...", e, retries, CHUNK_DOWNLOAD_RETRIES)
132
+ log.warning("Request failed with %s. Retrying (%s/%s)...", e, retries, CHUNK_DOWNLOAD_RETRIES)
140
133
  time.sleep(1) # Small backoff before retrying
141
134
  if not success:
142
135
  error_message = (
143
- f"Failed to download from chunk set {chunk_set_uuid} after {CHUNK_DOWNLOAD_TIMEOUT_S} attempts.")
136
+ f"Failed to download from chunk set {chunk_set_uuid} after {CHUNK_DOWNLOAD_TIMEOUT_S} attempts."
137
+ )
144
138
  log.error(error_message)
145
139
  raise RuntimeError(error_message)
146
140
 
147
141
  # Let the server know that we are done with this data and it can be deleted.
148
- requests.delete(f'{api_address}/chunk/delete/{chunk_set_uuid}')
142
+ requests.delete(f"{api_address}/chunk/delete/{chunk_set_uuid}")
149
143
 
150
- return b''.join(chunks)
144
+ return b"".join(chunks)
151
145
 
152
146
 
153
147
  def split_into_chunks(data: bytes, chunk_size: int = MAX_CHUNK_SIZE_BYTES) -> List[Tuple[int, bytes]]:
@@ -166,8 +160,7 @@ def split_into_chunks(data: bytes, chunk_size: int = MAX_CHUNK_SIZE_BYTES) -> Li
166
160
  List[Tuple[int, bytes]]
167
161
  A list of tuples containing the ordinal number of the chunk and each chunk
168
162
  """
169
- return list(
170
- enumerate((data[i:i + chunk_size] for i in range(0, len(data), chunk_size)), start=1))
163
+ return list(enumerate((data[i : i + chunk_size] for i in range(0, len(data), chunk_size)), start=1))
171
164
 
172
165
 
173
166
  def deserialise_dataframe(data: bytes) -> Union[DataFrame, GeoDataFrame]:
@@ -211,7 +204,7 @@ def serialise_dataframe(df: Union[DataFrame, GeoDataFrame]) -> bytes:
211
204
 
212
205
  Parameters
213
206
  ----------
214
- df : DataFrame
207
+ df: Union[DataFrame, GeoDataFrame]
215
208
  Input dataframe
216
209
 
217
210
  Returns
@@ -221,6 +214,6 @@ def serialise_dataframe(df: Union[DataFrame, GeoDataFrame]) -> bytes:
221
214
  """
222
215
  feather_buffer = BytesIO()
223
216
  # Browser implementations of feather do not support compressed feather formats.
224
- df.to_feather(feather_buffer, compression='uncompressed')
217
+ df.to_feather(feather_buffer, compression="uncompressed")
225
218
  feather_buffer.seek(0)
226
219
  return feather_buffer.getvalue()