pyogrio 0.12.0__cp314-cp314t-macosx_12_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyogrio/.dylibs/libgdal.37.3.11.4.dylib +0 -0
- pyogrio/__init__.py +57 -0
- pyogrio/_compat.py +54 -0
- pyogrio/_env.py +59 -0
- pyogrio/_err.cpython-314t-darwin.so +0 -0
- pyogrio/_geometry.cpython-314t-darwin.so +0 -0
- pyogrio/_io.cpython-314t-darwin.so +0 -0
- pyogrio/_ogr.cpython-314t-darwin.so +0 -0
- pyogrio/_version.py +21 -0
- pyogrio/_vsi.cpython-314t-darwin.so +0 -0
- pyogrio/core.py +387 -0
- pyogrio/errors.py +25 -0
- pyogrio/gdal_data/GDAL-targets-release.cmake +19 -0
- pyogrio/gdal_data/GDAL-targets.cmake +106 -0
- pyogrio/gdal_data/GDALConfig.cmake +24 -0
- pyogrio/gdal_data/GDALConfigVersion.cmake +65 -0
- pyogrio/gdal_data/GDALLogoBW.svg +138 -0
- pyogrio/gdal_data/GDALLogoColor.svg +126 -0
- pyogrio/gdal_data/GDALLogoGS.svg +126 -0
- pyogrio/gdal_data/LICENSE.TXT +467 -0
- pyogrio/gdal_data/MM_m_idofic.csv +321 -0
- pyogrio/gdal_data/copyright +467 -0
- pyogrio/gdal_data/cubewerx_extra.wkt +48 -0
- pyogrio/gdal_data/default.rsc +0 -0
- pyogrio/gdal_data/ecw_cs.wkt +1453 -0
- pyogrio/gdal_data/eedaconf.json +23 -0
- pyogrio/gdal_data/epsg.wkt +1 -0
- pyogrio/gdal_data/esri_StatePlane_extra.wkt +631 -0
- pyogrio/gdal_data/gdal_algorithm.schema.json +220 -0
- pyogrio/gdal_data/gdalg.schema.json +36 -0
- pyogrio/gdal_data/gdalicon.png +0 -0
- pyogrio/gdal_data/gdalinfo_output.schema.json +390 -0
- pyogrio/gdal_data/gdalmdiminfo_output.schema.json +326 -0
- pyogrio/gdal_data/gdaltileindex.xsd +253 -0
- pyogrio/gdal_data/gdalvrt.xsd +927 -0
- pyogrio/gdal_data/gfs.xsd +246 -0
- pyogrio/gdal_data/gml_registry.xml +117 -0
- pyogrio/gdal_data/gml_registry.xsd +66 -0
- pyogrio/gdal_data/grib2_center.csv +251 -0
- pyogrio/gdal_data/grib2_process.csv +102 -0
- pyogrio/gdal_data/grib2_subcenter.csv +63 -0
- pyogrio/gdal_data/grib2_table_4_2_0_0.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_1.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_13.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_14.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_15.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_16.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_17.csv +11 -0
- pyogrio/gdal_data/grib2_table_4_2_0_18.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_19.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_190.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_191.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_2.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_20.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_21.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_3.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_4.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_5.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_6.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_0_7.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_10_0.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_10_1.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_10_191.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_10_2.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_10_3.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_10_4.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_1_0.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_1_1.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_1_2.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_20_0.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_20_1.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_20_2.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_2_0.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_2_3.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_2_4.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_2_5.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_2_6.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_3_0.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_3_1.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_3_2.csv +28 -0
- pyogrio/gdal_data/grib2_table_4_2_3_3.csv +8 -0
- pyogrio/gdal_data/grib2_table_4_2_3_4.csv +14 -0
- pyogrio/gdal_data/grib2_table_4_2_3_5.csv +11 -0
- pyogrio/gdal_data/grib2_table_4_2_3_6.csv +11 -0
- pyogrio/gdal_data/grib2_table_4_2_4_0.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_4_1.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_4_10.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_4_2.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_4_3.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_4_4.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_4_5.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_4_6.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_4_7.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_4_8.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_4_9.csv +261 -0
- pyogrio/gdal_data/grib2_table_4_2_local_Canada.csv +5 -0
- pyogrio/gdal_data/grib2_table_4_2_local_HPC.csv +2 -0
- pyogrio/gdal_data/grib2_table_4_2_local_MRMS.csv +175 -0
- pyogrio/gdal_data/grib2_table_4_2_local_NCEP.csv +401 -0
- pyogrio/gdal_data/grib2_table_4_2_local_NDFD.csv +38 -0
- pyogrio/gdal_data/grib2_table_4_2_local_index.csv +7 -0
- pyogrio/gdal_data/grib2_table_4_5.csv +261 -0
- pyogrio/gdal_data/grib2_table_versions.csv +3 -0
- pyogrio/gdal_data/gt_datum.csv +229 -0
- pyogrio/gdal_data/gt_ellips.csv +24 -0
- pyogrio/gdal_data/header.dxf +1124 -0
- pyogrio/gdal_data/inspire_cp_BasicPropertyUnit.gfs +57 -0
- pyogrio/gdal_data/inspire_cp_CadastralBoundary.gfs +60 -0
- pyogrio/gdal_data/inspire_cp_CadastralParcel.gfs +81 -0
- pyogrio/gdal_data/inspire_cp_CadastralZoning.gfs +161 -0
- pyogrio/gdal_data/jpfgdgml_AdmArea.gfs +59 -0
- pyogrio/gdal_data/jpfgdgml_AdmBdry.gfs +49 -0
- pyogrio/gdal_data/jpfgdgml_AdmPt.gfs +59 -0
- pyogrio/gdal_data/jpfgdgml_BldA.gfs +54 -0
- pyogrio/gdal_data/jpfgdgml_BldL.gfs +54 -0
- pyogrio/gdal_data/jpfgdgml_Cntr.gfs +54 -0
- pyogrio/gdal_data/jpfgdgml_CommBdry.gfs +49 -0
- pyogrio/gdal_data/jpfgdgml_CommPt.gfs +59 -0
- pyogrio/gdal_data/jpfgdgml_Cstline.gfs +54 -0
- pyogrio/gdal_data/jpfgdgml_ElevPt.gfs +54 -0
- pyogrio/gdal_data/jpfgdgml_GCP.gfs +94 -0
- pyogrio/gdal_data/jpfgdgml_LeveeEdge.gfs +49 -0
- pyogrio/gdal_data/jpfgdgml_RailCL.gfs +54 -0
- pyogrio/gdal_data/jpfgdgml_RdASL.gfs +44 -0
- pyogrio/gdal_data/jpfgdgml_RdArea.gfs +54 -0
- pyogrio/gdal_data/jpfgdgml_RdCompt.gfs +59 -0
- pyogrio/gdal_data/jpfgdgml_RdEdg.gfs +59 -0
- pyogrio/gdal_data/jpfgdgml_RdMgtBdry.gfs +49 -0
- pyogrio/gdal_data/jpfgdgml_RdSgmtA.gfs +59 -0
- pyogrio/gdal_data/jpfgdgml_RvrMgtBdry.gfs +49 -0
- pyogrio/gdal_data/jpfgdgml_SBAPt.gfs +49 -0
- pyogrio/gdal_data/jpfgdgml_SBArea.gfs +54 -0
- pyogrio/gdal_data/jpfgdgml_SBBdry.gfs +44 -0
- pyogrio/gdal_data/jpfgdgml_WA.gfs +54 -0
- pyogrio/gdal_data/jpfgdgml_WL.gfs +54 -0
- pyogrio/gdal_data/jpfgdgml_WStrA.gfs +54 -0
- pyogrio/gdal_data/jpfgdgml_WStrL.gfs +54 -0
- pyogrio/gdal_data/leaflet_template.html +102 -0
- pyogrio/gdal_data/nitf_spec.xml +3288 -0
- pyogrio/gdal_data/nitf_spec.xsd +171 -0
- pyogrio/gdal_data/ogr_fields_override.schema.json +125 -0
- pyogrio/gdal_data/ogrinfo_output.schema.json +528 -0
- pyogrio/gdal_data/ogrvrt.xsd +528 -0
- pyogrio/gdal_data/osmconf.ini +134 -0
- pyogrio/gdal_data/ozi_datum.csv +131 -0
- pyogrio/gdal_data/ozi_ellips.csv +35 -0
- pyogrio/gdal_data/pci_datum.txt +530 -0
- pyogrio/gdal_data/pci_ellips.txt +129 -0
- pyogrio/gdal_data/pdfcomposition.xsd +703 -0
- pyogrio/gdal_data/pds4_template.xml +65 -0
- pyogrio/gdal_data/plscenesconf.json +1985 -0
- pyogrio/gdal_data/ruian_vf_ob_v1.gfs +1455 -0
- pyogrio/gdal_data/ruian_vf_st_uvoh_v1.gfs +86 -0
- pyogrio/gdal_data/ruian_vf_st_v1.gfs +1489 -0
- pyogrio/gdal_data/ruian_vf_v1.gfs +2126 -0
- pyogrio/gdal_data/s57agencies.csv +249 -0
- pyogrio/gdal_data/s57attributes.csv +484 -0
- pyogrio/gdal_data/s57expectedinput.csv +1008 -0
- pyogrio/gdal_data/s57objectclasses.csv +287 -0
- pyogrio/gdal_data/seed_2d.dgn +0 -0
- pyogrio/gdal_data/seed_3d.dgn +0 -0
- pyogrio/gdal_data/stateplane.csv +259 -0
- pyogrio/gdal_data/template_tiles.mapml +28 -0
- pyogrio/gdal_data/tms_LINZAntarticaMapTileGrid.json +190 -0
- pyogrio/gdal_data/tms_MapML_APSTILE.json +268 -0
- pyogrio/gdal_data/tms_MapML_CBMTILE.json +346 -0
- pyogrio/gdal_data/tms_NZTM2000.json +243 -0
- pyogrio/gdal_data/trailer.dxf +434 -0
- pyogrio/gdal_data/usage +4 -0
- pyogrio/gdal_data/vcpkg-cmake-wrapper.cmake +23 -0
- pyogrio/gdal_data/vcpkg.spdx.json +291 -0
- pyogrio/gdal_data/vcpkg_abi_info.txt +45 -0
- pyogrio/gdal_data/vdv452.xml +349 -0
- pyogrio/gdal_data/vdv452.xsd +45 -0
- pyogrio/gdal_data/vicar.json +164 -0
- pyogrio/geopandas.py +978 -0
- pyogrio/proj_data/CH +22 -0
- pyogrio/proj_data/GL27 +23 -0
- pyogrio/proj_data/ITRF2000 +24 -0
- pyogrio/proj_data/ITRF2008 +94 -0
- pyogrio/proj_data/ITRF2014 +55 -0
- pyogrio/proj_data/ITRF2020 +91 -0
- pyogrio/proj_data/copyright +34 -0
- pyogrio/proj_data/deformation_model.schema.json +582 -0
- pyogrio/proj_data/nad.lst +142 -0
- pyogrio/proj_data/nad27 +810 -0
- pyogrio/proj_data/nad83 +745 -0
- pyogrio/proj_data/other.extra +53 -0
- pyogrio/proj_data/proj-config-version.cmake +44 -0
- pyogrio/proj_data/proj-config.cmake +79 -0
- pyogrio/proj_data/proj-targets-release.cmake +19 -0
- pyogrio/proj_data/proj-targets.cmake +107 -0
- pyogrio/proj_data/proj.db +0 -0
- pyogrio/proj_data/proj.ini +59 -0
- pyogrio/proj_data/proj4-targets-release.cmake +19 -0
- pyogrio/proj_data/proj4-targets.cmake +107 -0
- pyogrio/proj_data/projjson.schema.json +1174 -0
- pyogrio/proj_data/triangulation.schema.json +214 -0
- pyogrio/proj_data/usage +9 -0
- pyogrio/proj_data/vcpkg.spdx.json +203 -0
- pyogrio/proj_data/vcpkg_abi_info.txt +28 -0
- pyogrio/proj_data/world +214 -0
- pyogrio/raw.py +897 -0
- pyogrio/tests/__init__.py +0 -0
- pyogrio/tests/conftest.py +588 -0
- pyogrio/tests/fixtures/README.md +108 -0
- pyogrio/tests/fixtures/curve.gpkg +0 -0
- pyogrio/tests/fixtures/curvepolygon.gpkg +0 -0
- pyogrio/tests/fixtures/line_zm.gpkg +0 -0
- pyogrio/tests/fixtures/list_field_values_file.parquet +0 -0
- pyogrio/tests/fixtures/list_nested_struct_file.parquet +0 -0
- pyogrio/tests/fixtures/multisurface.gpkg +0 -0
- pyogrio/tests/fixtures/naturalearth_lowres/naturalearth_lowres.cpg +1 -0
- pyogrio/tests/fixtures/naturalearth_lowres/naturalearth_lowres.dbf +0 -0
- pyogrio/tests/fixtures/naturalearth_lowres/naturalearth_lowres.prj +1 -0
- pyogrio/tests/fixtures/naturalearth_lowres/naturalearth_lowres.shp +0 -0
- pyogrio/tests/fixtures/naturalearth_lowres/naturalearth_lowres.shx +0 -0
- pyogrio/tests/fixtures/sample.osm.pbf +0 -0
- pyogrio/tests/fixtures/test_gpkg_nulls.gpkg +0 -0
- pyogrio/tests/test_arrow.py +1160 -0
- pyogrio/tests/test_core.py +702 -0
- pyogrio/tests/test_geopandas_io.py +3218 -0
- pyogrio/tests/test_path.py +374 -0
- pyogrio/tests/test_raw_io.py +1473 -0
- pyogrio/tests/test_util.py +56 -0
- pyogrio/util.py +258 -0
- pyogrio-0.12.0.dist-info/METADATA +125 -0
- pyogrio-0.12.0.dist-info/RECORD +231 -0
- pyogrio-0.12.0.dist-info/WHEEL +6 -0
- pyogrio-0.12.0.dist-info/licenses/LICENSE +21 -0
- pyogrio-0.12.0.dist-info/top_level.txt +1 -0
|
File without changes
|
|
@@ -0,0 +1,588 @@
|
|
|
1
|
+
"""Module with helper functions, fixtures, and common test data for pyogrio tests."""
|
|
2
|
+
|
|
3
|
+
from io import BytesIO
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from zipfile import ZIP_DEFLATED, ZipFile
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
|
|
9
|
+
from pyogrio import __gdal_version_string__, __version__, list_drivers
|
|
10
|
+
from pyogrio._compat import (
|
|
11
|
+
GDAL_GE_37,
|
|
12
|
+
HAS_ARROW_WRITE_API,
|
|
13
|
+
HAS_GDAL_GEOS,
|
|
14
|
+
HAS_PYARROW,
|
|
15
|
+
HAS_PYPROJ,
|
|
16
|
+
HAS_SHAPELY,
|
|
17
|
+
)
|
|
18
|
+
from pyogrio.core import vsi_rmtree
|
|
19
|
+
from pyogrio.raw import read, write
|
|
20
|
+
|
|
21
|
+
import pytest
|
|
22
|
+
|
|
23
|
+
_data_dir = Path(__file__).parent.resolve() / "fixtures"
|
|
24
|
+
|
|
25
|
+
# mapping of driver extension to driver name for well-supported drivers
|
|
26
|
+
DRIVERS = {
|
|
27
|
+
".fgb": "FlatGeobuf",
|
|
28
|
+
".geojson": "GeoJSON",
|
|
29
|
+
".geojsonl": "GeoJSONSeq",
|
|
30
|
+
".geojsons": "GeoJSONSeq",
|
|
31
|
+
".gpkg": "GPKG",
|
|
32
|
+
".gpkg.zip": "GPKG",
|
|
33
|
+
".shp": "ESRI Shapefile",
|
|
34
|
+
".shp.zip": "ESRI Shapefile",
|
|
35
|
+
".shz": "ESRI Shapefile",
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
# mapping of driver name to extension
|
|
39
|
+
DRIVER_EXT = {driver: ext for ext, driver in DRIVERS.items()}
|
|
40
|
+
|
|
41
|
+
ALL_EXTS = [".fgb", ".geojson", ".geojsonl", ".gpkg", ".shp"]
|
|
42
|
+
|
|
43
|
+
START_FID = {
|
|
44
|
+
".fgb": 0,
|
|
45
|
+
".geojson": 0,
|
|
46
|
+
".geojsonl": 0,
|
|
47
|
+
".geojsons": 0,
|
|
48
|
+
".gpkg": 1,
|
|
49
|
+
".shp": 0,
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
GDAL_HAS_PARQUET_DRIVER = "Parquet" in list_drivers()
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def pytest_report_header(config):
|
|
56
|
+
drivers = ", ".join(
|
|
57
|
+
f"{driver}({capability})"
|
|
58
|
+
for driver, capability in sorted(list_drivers().items())
|
|
59
|
+
)
|
|
60
|
+
return (
|
|
61
|
+
f"pyogrio {__version__}\n"
|
|
62
|
+
f"GDAL {__gdal_version_string__}\n"
|
|
63
|
+
f"Supported drivers: {drivers}"
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
# marks to skip tests if optional dependecies are not present
|
|
68
|
+
requires_pyarrow_api = pytest.mark.skipif(not HAS_PYARROW, reason="pyarrow required")
|
|
69
|
+
|
|
70
|
+
requires_pyproj = pytest.mark.skipif(not HAS_PYPROJ, reason="pyproj required")
|
|
71
|
+
|
|
72
|
+
requires_arrow_write_api = pytest.mark.skipif(
|
|
73
|
+
not HAS_ARROW_WRITE_API or not HAS_PYARROW,
|
|
74
|
+
reason="GDAL>=3.8 required for Arrow write API",
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
requires_gdal_geos = pytest.mark.skipif(
|
|
78
|
+
not HAS_GDAL_GEOS, reason="GDAL compiled with GEOS required"
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
requires_shapely = pytest.mark.skipif(not HAS_SHAPELY, reason="Shapely >= 2.0 required")
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def prepare_testfile(testfile_path, dst_dir, ext):
|
|
85
|
+
if ext == ".gpkg.zip" and not GDAL_GE_37:
|
|
86
|
+
pytest.skip(".gpkg.zip support requires GDAL >= 3.7")
|
|
87
|
+
|
|
88
|
+
if ext == testfile_path.suffix:
|
|
89
|
+
return testfile_path
|
|
90
|
+
|
|
91
|
+
dst_path = dst_dir / f"{testfile_path.stem}{ext}"
|
|
92
|
+
if dst_path.exists():
|
|
93
|
+
return dst_path
|
|
94
|
+
|
|
95
|
+
meta, _, geometry, field_data = read(testfile_path)
|
|
96
|
+
|
|
97
|
+
if ext == ".fgb":
|
|
98
|
+
# For .fgb, spatial_index=False to avoid the rows being reordered
|
|
99
|
+
meta["spatial_index"] = False
|
|
100
|
+
# allow mixed Polygons/MultiPolygons type
|
|
101
|
+
meta["geometry_type"] = "Unknown"
|
|
102
|
+
|
|
103
|
+
elif ext in (".gpkg", ".gpkg.zip"):
|
|
104
|
+
# For .gpkg, spatial_index=False to avoid the rows being reordered
|
|
105
|
+
meta["spatial_index"] = False
|
|
106
|
+
meta["geometry_type"] = "MultiPolygon"
|
|
107
|
+
|
|
108
|
+
write(dst_path, geometry, field_data, **meta)
|
|
109
|
+
return dst_path
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
@pytest.fixture(scope="session")
|
|
113
|
+
def data_dir():
|
|
114
|
+
return _data_dir
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
@pytest.fixture(scope="function")
|
|
118
|
+
def naturalearth_lowres(tmp_path, request):
|
|
119
|
+
ext = getattr(request, "param", ".shp")
|
|
120
|
+
testfile_path = _data_dir / Path("naturalearth_lowres/naturalearth_lowres.shp")
|
|
121
|
+
|
|
122
|
+
return prepare_testfile(testfile_path, tmp_path, ext)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
@pytest.fixture(scope="function", params=ALL_EXTS)
|
|
126
|
+
def naturalearth_lowres_all_ext(tmp_path, naturalearth_lowres, request):
|
|
127
|
+
return prepare_testfile(naturalearth_lowres, tmp_path, request.param)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
@pytest.fixture(scope="function", params=[".geojson"])
|
|
131
|
+
def naturalearth_lowres_geojson(tmp_path, naturalearth_lowres, request):
|
|
132
|
+
return prepare_testfile(naturalearth_lowres, tmp_path, request.param)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
@pytest.fixture(scope="function")
|
|
136
|
+
def naturalearth_lowres_vsi(tmp_path, naturalearth_lowres):
|
|
137
|
+
"""Wrap naturalearth_lowres as a zip file for VSI tests"""
|
|
138
|
+
|
|
139
|
+
path = tmp_path / f"{naturalearth_lowres.name}.zip"
|
|
140
|
+
with ZipFile(path, mode="w", compression=ZIP_DEFLATED, compresslevel=5) as out:
|
|
141
|
+
for ext in ["dbf", "prj", "shp", "shx", "cpg"]:
|
|
142
|
+
filename = f"{naturalearth_lowres.stem}.{ext}"
|
|
143
|
+
out.write(naturalearth_lowres.parent / filename, filename)
|
|
144
|
+
|
|
145
|
+
return path, f"/vsizip/{path}/{naturalearth_lowres.name}"
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
@pytest.fixture(scope="function")
|
|
149
|
+
def naturalearth_lowres_vsimem(naturalearth_lowres):
|
|
150
|
+
"""Write naturalearth_lowres to a vsimem file for VSI tests"""
|
|
151
|
+
|
|
152
|
+
meta, _, geometry, field_data = read(naturalearth_lowres)
|
|
153
|
+
name = f"pyogrio_fixture_{naturalearth_lowres.stem}"
|
|
154
|
+
dst_path = Path(f"/vsimem/{name}/{name}.gpkg")
|
|
155
|
+
meta["spatial_index"] = False
|
|
156
|
+
meta["geometry_type"] = "MultiPolygon"
|
|
157
|
+
|
|
158
|
+
write(dst_path, geometry, field_data, layer="naturalearth_lowres", **meta)
|
|
159
|
+
yield dst_path
|
|
160
|
+
|
|
161
|
+
vsi_rmtree(dst_path.parent)
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
@pytest.fixture(scope="session")
|
|
165
|
+
def line_zm_file():
|
|
166
|
+
return _data_dir / "line_zm.gpkg"
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
@pytest.fixture(scope="session")
|
|
170
|
+
def curve_file():
|
|
171
|
+
return _data_dir / "curve.gpkg"
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
@pytest.fixture(scope="session")
|
|
175
|
+
def curve_polygon_file():
|
|
176
|
+
return _data_dir / "curvepolygon.gpkg"
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
@pytest.fixture(scope="session")
|
|
180
|
+
def multisurface_file():
|
|
181
|
+
return _data_dir / "multisurface.gpkg"
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
@pytest.fixture(scope="session")
|
|
185
|
+
def test_gpkg_nulls():
|
|
186
|
+
return _data_dir / "test_gpkg_nulls.gpkg"
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
@pytest.fixture(scope="function")
|
|
190
|
+
def no_geometry_file(tmp_path):
|
|
191
|
+
# create a GPKG layer that does not include geometry
|
|
192
|
+
filename = tmp_path / "test_no_geometry.gpkg"
|
|
193
|
+
write(
|
|
194
|
+
filename,
|
|
195
|
+
layer="no_geometry",
|
|
196
|
+
geometry=None,
|
|
197
|
+
field_data=[np.array(["a", "b", "c"])],
|
|
198
|
+
fields=["col"],
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
return filename
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def list_field_values_geojson_file(tmp_path):
|
|
205
|
+
# Create a GeoJSON file with list values in a property
|
|
206
|
+
list_geojson = """{
|
|
207
|
+
"type": "FeatureCollection",
|
|
208
|
+
"features": [
|
|
209
|
+
{
|
|
210
|
+
"type": "Feature",
|
|
211
|
+
"properties": {
|
|
212
|
+
"int": 1,
|
|
213
|
+
"list_int": [0, 1],
|
|
214
|
+
"list_double": [0.0, 1.0],
|
|
215
|
+
"list_string": ["string1", "string2"],
|
|
216
|
+
"list_int_with_null": [0, null],
|
|
217
|
+
"list_string_with_null": ["string1", null]
|
|
218
|
+
},
|
|
219
|
+
"geometry": { "type": "Point", "coordinates": [0, 2] }
|
|
220
|
+
},
|
|
221
|
+
{
|
|
222
|
+
"type": "Feature",
|
|
223
|
+
"properties": {
|
|
224
|
+
"int": 2,
|
|
225
|
+
"list_int": [2, 3],
|
|
226
|
+
"list_double": [2.0, 3.0],
|
|
227
|
+
"list_string": ["string3", "string4", ""],
|
|
228
|
+
"list_int_with_null": [2, 3],
|
|
229
|
+
"list_string_with_null": ["string3", "string4", ""]
|
|
230
|
+
},
|
|
231
|
+
"geometry": { "type": "Point", "coordinates": [1, 2] }
|
|
232
|
+
},
|
|
233
|
+
{
|
|
234
|
+
"type": "Feature",
|
|
235
|
+
"properties": {
|
|
236
|
+
"int": 3,
|
|
237
|
+
"list_int": [],
|
|
238
|
+
"list_double": [],
|
|
239
|
+
"list_string": [],
|
|
240
|
+
"list_int_with_null": [],
|
|
241
|
+
"list_string_with_null": []
|
|
242
|
+
},
|
|
243
|
+
"geometry": { "type": "Point", "coordinates": [2, 2] }
|
|
244
|
+
},
|
|
245
|
+
{
|
|
246
|
+
"type": "Feature",
|
|
247
|
+
"properties": {
|
|
248
|
+
"int": 4,
|
|
249
|
+
"list_int": null,
|
|
250
|
+
"list_double": null,
|
|
251
|
+
"list_string": null,
|
|
252
|
+
"list_int_with_null": null,
|
|
253
|
+
"list_string_with_null": null
|
|
254
|
+
},
|
|
255
|
+
"geometry": { "type": "Point", "coordinates": [2, 2] }
|
|
256
|
+
},
|
|
257
|
+
{
|
|
258
|
+
"type": "Feature",
|
|
259
|
+
"properties": {
|
|
260
|
+
"int": 5,
|
|
261
|
+
"list_int": null,
|
|
262
|
+
"list_double": null,
|
|
263
|
+
"list_string": [""],
|
|
264
|
+
"list_int_with_null": null,
|
|
265
|
+
"list_string_with_null": [""]
|
|
266
|
+
},
|
|
267
|
+
"geometry": { "type": "Point", "coordinates": [2, 2] }
|
|
268
|
+
}
|
|
269
|
+
]
|
|
270
|
+
}"""
|
|
271
|
+
|
|
272
|
+
filename = tmp_path / "test_ogr_types_list.geojson"
|
|
273
|
+
with open(filename, "w") as f:
|
|
274
|
+
_ = f.write(list_geojson)
|
|
275
|
+
|
|
276
|
+
return filename
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
def list_field_values_parquet_file():
|
|
280
|
+
"""Return the path to a Parquet file with list values in a property.
|
|
281
|
+
|
|
282
|
+
Because in the CI environments pyarrow.parquet is typically not available, we save
|
|
283
|
+
the file in the test data directory instead of always creating it from scratch.
|
|
284
|
+
|
|
285
|
+
The code to create it is here though, in case it needs to be recreated later.
|
|
286
|
+
"""
|
|
287
|
+
# Check if the file already exists in the test data dir
|
|
288
|
+
fixture_path = _data_dir / "list_field_values_file.parquet"
|
|
289
|
+
if fixture_path.exists():
|
|
290
|
+
return fixture_path
|
|
291
|
+
|
|
292
|
+
# The file doesn't exist, so create it
|
|
293
|
+
try:
|
|
294
|
+
import pyarrow as pa
|
|
295
|
+
from pyarrow import parquet as pq
|
|
296
|
+
|
|
297
|
+
import shapely
|
|
298
|
+
except ImportError as ex:
|
|
299
|
+
raise RuntimeError(
|
|
300
|
+
f"test file {fixture_path} does not exist, but error importing: {ex}."
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
table = pa.table(
|
|
304
|
+
{
|
|
305
|
+
"geometry": shapely.to_wkb(shapely.points(np.ones((5, 2)))),
|
|
306
|
+
"int": [1, 2, 3, 4, 5],
|
|
307
|
+
"list_int": [[0, 1], [2, 3], [], None, None],
|
|
308
|
+
"list_double": [[0.0, 1.0], [2.0, 3.0], [], None, None],
|
|
309
|
+
"list_string": [
|
|
310
|
+
["string1", "string2"],
|
|
311
|
+
["string3", "string4", ""],
|
|
312
|
+
[],
|
|
313
|
+
None,
|
|
314
|
+
[""],
|
|
315
|
+
],
|
|
316
|
+
"list_int_with_null": [[0, None], [2, 3], [], None, None],
|
|
317
|
+
"list_string_with_null": [
|
|
318
|
+
["string1", None],
|
|
319
|
+
["string3", "string4", ""],
|
|
320
|
+
[],
|
|
321
|
+
None,
|
|
322
|
+
[""],
|
|
323
|
+
],
|
|
324
|
+
}
|
|
325
|
+
)
|
|
326
|
+
pq.write_table(table, fixture_path)
|
|
327
|
+
|
|
328
|
+
return fixture_path
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
@pytest.fixture(scope="function", params=[".geojson", ".parquet"])
|
|
332
|
+
def list_field_values_files(tmp_path, request):
|
|
333
|
+
if request.param == ".geojson":
|
|
334
|
+
return list_field_values_geojson_file(tmp_path)
|
|
335
|
+
elif request.param == ".parquet":
|
|
336
|
+
return list_field_values_parquet_file()
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
@pytest.fixture(scope="function")
|
|
340
|
+
def nested_geojson_file(tmp_path):
|
|
341
|
+
# create GeoJSON file with nested properties
|
|
342
|
+
nested_geojson = """{
|
|
343
|
+
"type": "FeatureCollection",
|
|
344
|
+
"features": [
|
|
345
|
+
{
|
|
346
|
+
"type": "Feature",
|
|
347
|
+
"geometry": {
|
|
348
|
+
"type": "Point",
|
|
349
|
+
"coordinates": [0, 0]
|
|
350
|
+
},
|
|
351
|
+
"properties": {
|
|
352
|
+
"top_level": "A",
|
|
353
|
+
"intermediate_level": {
|
|
354
|
+
"bottom_level": "B"
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
]
|
|
359
|
+
}"""
|
|
360
|
+
|
|
361
|
+
filename = tmp_path / "test_nested.geojson"
|
|
362
|
+
with open(filename, "w") as f:
|
|
363
|
+
_ = f.write(nested_geojson)
|
|
364
|
+
|
|
365
|
+
return filename
|
|
366
|
+
|
|
367
|
+
|
|
368
|
+
@pytest.fixture(scope="function")
|
|
369
|
+
def list_nested_struct_parquet_file(tmp_path):
|
|
370
|
+
"""Create a Parquet file in tmp_path with nested values in a property.
|
|
371
|
+
|
|
372
|
+
Because in the CI environments pyarrow.parquet is typically not available, we save
|
|
373
|
+
the file in the test data directory instead of always creating it from scratch.
|
|
374
|
+
|
|
375
|
+
The code to create it is here though, in case it needs to be recreated later.
|
|
376
|
+
"""
|
|
377
|
+
# Check if the file already exists in the test data dir
|
|
378
|
+
fixture_path = _data_dir / "list_nested_struct_file.parquet"
|
|
379
|
+
if fixture_path.exists():
|
|
380
|
+
return fixture_path
|
|
381
|
+
|
|
382
|
+
# The file doesn't exist, so create it
|
|
383
|
+
try:
|
|
384
|
+
import pyarrow as pa
|
|
385
|
+
from pyarrow import parquet as pq
|
|
386
|
+
|
|
387
|
+
import shapely
|
|
388
|
+
except ImportError as ex:
|
|
389
|
+
raise RuntimeError(
|
|
390
|
+
f"test file {fixture_path} does not exist, but error importing: {ex}."
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
table = pa.table(
|
|
394
|
+
{
|
|
395
|
+
"geometry": shapely.to_wkb(shapely.points(np.ones((3, 2)))),
|
|
396
|
+
"col_flat": [0, 1, 2],
|
|
397
|
+
"col_struct": [{"a": 1, "b": 2}] * 3,
|
|
398
|
+
"col_nested": [[{"a": 1, "b": 2}] * 2] * 3,
|
|
399
|
+
"col_list": [[1, 2, 3]] * 3,
|
|
400
|
+
}
|
|
401
|
+
)
|
|
402
|
+
pq.write_table(table, fixture_path)
|
|
403
|
+
|
|
404
|
+
return fixture_path
|
|
405
|
+
|
|
406
|
+
|
|
407
|
+
@pytest.fixture(scope="function")
|
|
408
|
+
def datetime_file(tmp_path):
|
|
409
|
+
# create GeoJSON file with millisecond precision
|
|
410
|
+
datetime_geojson = """{
|
|
411
|
+
"type": "FeatureCollection",
|
|
412
|
+
"features": [
|
|
413
|
+
{
|
|
414
|
+
"type": "Feature",
|
|
415
|
+
"properties": { "col": "2020-01-01T09:00:00.123" },
|
|
416
|
+
"geometry": { "type": "Point", "coordinates": [1, 1] }
|
|
417
|
+
},
|
|
418
|
+
{
|
|
419
|
+
"type": "Feature",
|
|
420
|
+
"properties": { "col": "2020-01-01T10:00:00" },
|
|
421
|
+
"geometry": { "type": "Point", "coordinates": [2, 2] }
|
|
422
|
+
}
|
|
423
|
+
]
|
|
424
|
+
}"""
|
|
425
|
+
|
|
426
|
+
filename = tmp_path / "test_datetime.geojson"
|
|
427
|
+
with open(filename, "w") as f:
|
|
428
|
+
_ = f.write(datetime_geojson)
|
|
429
|
+
|
|
430
|
+
return filename
|
|
431
|
+
|
|
432
|
+
|
|
433
|
+
@pytest.fixture(scope="function")
|
|
434
|
+
def datetime_tz_file(tmp_path):
|
|
435
|
+
# create GeoJSON file with datetimes with time zone
|
|
436
|
+
datetime_tz_geojson = """{
|
|
437
|
+
"type": "FeatureCollection",
|
|
438
|
+
"features": [
|
|
439
|
+
{
|
|
440
|
+
"type": "Feature",
|
|
441
|
+
"properties": { "datetime_col": "2020-01-01T09:00:00.123-05:00" },
|
|
442
|
+
"geometry": { "type": "Point", "coordinates": [1, 1] }
|
|
443
|
+
},
|
|
444
|
+
{
|
|
445
|
+
"type": "Feature",
|
|
446
|
+
"properties": { "datetime_col": "2020-01-01T10:00:00-05:00" },
|
|
447
|
+
"geometry": { "type": "Point", "coordinates": [2, 2] }
|
|
448
|
+
}
|
|
449
|
+
]
|
|
450
|
+
}"""
|
|
451
|
+
|
|
452
|
+
filename = tmp_path / "test_datetime_tz.geojson"
|
|
453
|
+
with open(filename, "w") as f:
|
|
454
|
+
f.write(datetime_tz_geojson)
|
|
455
|
+
|
|
456
|
+
return filename
|
|
457
|
+
|
|
458
|
+
|
|
459
|
+
@pytest.fixture(scope="function")
|
|
460
|
+
def geojson_bytes(tmp_path):
|
|
461
|
+
"""Extracts first 3 records from naturalearth_lowres and writes to GeoJSON,
|
|
462
|
+
returning bytes"""
|
|
463
|
+
meta, _, geometry, field_data = read(
|
|
464
|
+
_data_dir / Path("naturalearth_lowres/naturalearth_lowres.shp"), max_features=3
|
|
465
|
+
)
|
|
466
|
+
|
|
467
|
+
filename = tmp_path / "test.geojson"
|
|
468
|
+
write(filename, geometry, field_data, **meta)
|
|
469
|
+
|
|
470
|
+
with open(filename, "rb") as f:
|
|
471
|
+
bytes_buffer = f.read()
|
|
472
|
+
|
|
473
|
+
return bytes_buffer
|
|
474
|
+
|
|
475
|
+
|
|
476
|
+
@pytest.fixture(scope="function")
|
|
477
|
+
def geojson_datetime_long_ago(tmp_path):
|
|
478
|
+
# create GeoJSON file with datetimes from long ago
|
|
479
|
+
datetime_tz_geojson = """{
|
|
480
|
+
"type": "FeatureCollection",
|
|
481
|
+
"features": [
|
|
482
|
+
{
|
|
483
|
+
"type": "Feature",
|
|
484
|
+
"properties": { "datetime_col": "1670-01-01T09:00:00" },
|
|
485
|
+
"geometry": { "type": "Point", "coordinates": [1, 1] }
|
|
486
|
+
}
|
|
487
|
+
]
|
|
488
|
+
}"""
|
|
489
|
+
|
|
490
|
+
filename = tmp_path / "test_datetime_long_ago.geojson"
|
|
491
|
+
with open(filename, "w") as f:
|
|
492
|
+
f.write(datetime_tz_geojson)
|
|
493
|
+
|
|
494
|
+
return filename
|
|
495
|
+
|
|
496
|
+
|
|
497
|
+
@pytest.fixture(scope="function")
|
|
498
|
+
def geojson_filelike(tmp_path):
|
|
499
|
+
"""Extracts first 3 records from naturalearth_lowres and writes to GeoJSON,
|
|
500
|
+
returning open file handle"""
|
|
501
|
+
meta, _, geometry, field_data = read(
|
|
502
|
+
_data_dir / Path("naturalearth_lowres/naturalearth_lowres.shp"), max_features=3
|
|
503
|
+
)
|
|
504
|
+
|
|
505
|
+
filename = tmp_path / "test.geojson"
|
|
506
|
+
write(filename, geometry, field_data, layer="test", **meta)
|
|
507
|
+
|
|
508
|
+
with open(filename, "rb") as f:
|
|
509
|
+
yield f
|
|
510
|
+
|
|
511
|
+
|
|
512
|
+
@pytest.fixture(scope="function")
|
|
513
|
+
def kml_file(tmp_path):
|
|
514
|
+
# create KML file
|
|
515
|
+
kml_data = """<?xml version="1.0" encoding="utf-8" ?>
|
|
516
|
+
<kml xmlns="http://www.opengis.net/kml/2.2">
|
|
517
|
+
<Document id="root_doc">
|
|
518
|
+
<Schema name="interfaces1" id="interfaces1">
|
|
519
|
+
<SimpleField name="id" type="float"></SimpleField>
|
|
520
|
+
<SimpleField name="formation" type="string"></SimpleField>
|
|
521
|
+
</Schema>
|
|
522
|
+
<Folder><name>interfaces1</name>
|
|
523
|
+
<Placemark>
|
|
524
|
+
<ExtendedData><SchemaData schemaUrl="#interfaces1">
|
|
525
|
+
<SimpleData name="formation">Ton</SimpleData>
|
|
526
|
+
</SchemaData></ExtendedData>
|
|
527
|
+
<Point><coordinates>19.1501280458077,293.313485355882</coordinates></Point>
|
|
528
|
+
</Placemark>
|
|
529
|
+
</Folder>
|
|
530
|
+
</Document>
|
|
531
|
+
</kml>
|
|
532
|
+
"""
|
|
533
|
+
filename = tmp_path / "test.kml"
|
|
534
|
+
with open(filename, "w") as f:
|
|
535
|
+
_ = f.write(kml_data)
|
|
536
|
+
|
|
537
|
+
return filename
|
|
538
|
+
|
|
539
|
+
|
|
540
|
+
@pytest.fixture(scope="function")
|
|
541
|
+
def nonseekable_bytes(tmp_path):
|
|
542
|
+
# mock a non-seekable byte stream, such as a zstandard handle
|
|
543
|
+
class NonSeekableBytesIO(BytesIO):
|
|
544
|
+
def seekable(self):
|
|
545
|
+
return False
|
|
546
|
+
|
|
547
|
+
def seek(self, *args, **kwargs):
|
|
548
|
+
raise OSError("cannot seek")
|
|
549
|
+
|
|
550
|
+
# wrap GeoJSON into a non-seekable BytesIO
|
|
551
|
+
geojson = """{
|
|
552
|
+
"type": "FeatureCollection",
|
|
553
|
+
"features": [
|
|
554
|
+
{
|
|
555
|
+
"type": "Feature",
|
|
556
|
+
"properties": { },
|
|
557
|
+
"geometry": { "type": "Point", "coordinates": [1, 1] }
|
|
558
|
+
}
|
|
559
|
+
]
|
|
560
|
+
}"""
|
|
561
|
+
|
|
562
|
+
return NonSeekableBytesIO(geojson.encode("UTF-8"))
|
|
563
|
+
|
|
564
|
+
|
|
565
|
+
@pytest.fixture(
|
|
566
|
+
scope="session",
|
|
567
|
+
params=[
|
|
568
|
+
# Japanese
|
|
569
|
+
("CP932", "ホ"),
|
|
570
|
+
# Chinese
|
|
571
|
+
("CP936", "中文"),
|
|
572
|
+
# Central European
|
|
573
|
+
("CP1250", "Đ"),
|
|
574
|
+
# Latin 1 / Western European
|
|
575
|
+
("CP1252", "ÿ"),
|
|
576
|
+
# Greek
|
|
577
|
+
("CP1253", "Φ"),
|
|
578
|
+
# Arabic
|
|
579
|
+
("CP1256", "ش"),
|
|
580
|
+
],
|
|
581
|
+
)
|
|
582
|
+
def encoded_text(request):
|
|
583
|
+
"""Return tuple with encoding name and very short sample text in that encoding
|
|
584
|
+
NOTE: it was determined through testing that code pages for MS-DOS do not
|
|
585
|
+
consistently work across all Python installations (in particular, fail with conda),
|
|
586
|
+
but ANSI code pages appear to work properly.
|
|
587
|
+
"""
|
|
588
|
+
return request.param
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
# Test datasets
|
|
2
|
+
|
|
3
|
+
## Obtaining / creating test datasets
|
|
4
|
+
|
|
5
|
+
If a test dataset can be created in code, do that instead. If it is used in a
|
|
6
|
+
single test, create the test dataset as part of that test. If it is used in
|
|
7
|
+
more than a single test, add it to `pyogrio/tests/conftest.py` instead, as a
|
|
8
|
+
function-scoped test fixture.
|
|
9
|
+
|
|
10
|
+
If you need to obtain 3rd party test files:
|
|
11
|
+
|
|
12
|
+
- add a section below that describes the source location and processing steps
|
|
13
|
+
to derive that dataset
|
|
14
|
+
- make sure the license is compatible with including in Pyogrio (public domain or open-source)
|
|
15
|
+
and record that license below
|
|
16
|
+
|
|
17
|
+
Please keep the test files no larger than necessary to use in tests.
|
|
18
|
+
|
|
19
|
+
## Included test datasets
|
|
20
|
+
|
|
21
|
+
### Natural Earth lowres
|
|
22
|
+
|
|
23
|
+
`naturalearth_lowres.shp` was copied from GeoPandas.
|
|
24
|
+
|
|
25
|
+
License: public domain
|
|
26
|
+
|
|
27
|
+
### GPKG test dataset with null values
|
|
28
|
+
|
|
29
|
+
`test_gpkg_nulls.gpkg` was created using Fiona backend to GeoPandas:
|
|
30
|
+
|
|
31
|
+
```
|
|
32
|
+
from collections import OrderedDict
|
|
33
|
+
|
|
34
|
+
import fiona
|
|
35
|
+
import geopandas as gp
|
|
36
|
+
import numpy as np
|
|
37
|
+
from pyogrio import write_dataframe
|
|
38
|
+
|
|
39
|
+
filename = "test_gpkg_nulls.gpkg"
|
|
40
|
+
|
|
41
|
+
df = gp.GeoDataFrame(
|
|
42
|
+
{
|
|
43
|
+
"col_bool": np.array([True, False, True], dtype="bool"),
|
|
44
|
+
"col_int8": np.array([1, 2, 3], dtype="int8"),
|
|
45
|
+
"col_int16": np.array([1, 2, 3], dtype="int16"),
|
|
46
|
+
"col_int32": np.array([1, 2, 3], dtype="int32"),
|
|
47
|
+
"col_int64": np.array([1, 2, 3], dtype="int64"),
|
|
48
|
+
"col_uint8": np.array([1, 2, 3], dtype="uint8"),
|
|
49
|
+
"col_uint16": np.array([1, 2, 3], dtype="uint16"),
|
|
50
|
+
"col_uint32": np.array([1, 2, 3], dtype="uint32"),
|
|
51
|
+
"col_uint64": np.array([1, 2, 3], dtype="uint64"),
|
|
52
|
+
"col_float32": np.array([1.5, 2.5, 3.5], dtype="float32"),
|
|
53
|
+
"col_float64": np.array([1.5, 2.5, 3.5], dtype="float64"),
|
|
54
|
+
},
|
|
55
|
+
geometry=gp.points_from_xy([0, 1, 2], [0, 1, 2]),
|
|
56
|
+
crs="EPSG:4326",
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
write_dataframe(df, filename)
|
|
60
|
+
|
|
61
|
+
# construct row with null values
|
|
62
|
+
# Note: np.nan can only be used for float values
|
|
63
|
+
null_row = {
|
|
64
|
+
"type": "Fetaure",
|
|
65
|
+
"id": 4,
|
|
66
|
+
"properties": OrderedDict(
|
|
67
|
+
[
|
|
68
|
+
("col_bool", None),
|
|
69
|
+
("col_int8", None),
|
|
70
|
+
("col_int16", None),
|
|
71
|
+
("col_int32", None),
|
|
72
|
+
("col_int64", None),
|
|
73
|
+
("col_uint8", None),
|
|
74
|
+
("col_uint16", None),
|
|
75
|
+
("col_uint32", None),
|
|
76
|
+
("col_uint64", None),
|
|
77
|
+
("col_float32", np.nan),
|
|
78
|
+
("col_float64", np.nan),
|
|
79
|
+
]
|
|
80
|
+
),
|
|
81
|
+
"geometry": {"type": "Point", "coordinates": (4.0, 4.0)},
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
# append row with nulls to GPKG
|
|
85
|
+
with fiona.open(filename, "a") as c:
|
|
86
|
+
c.write(null_row)
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
NOTE: Reading boolean values into GeoPandas using Fiona backend treats those
|
|
90
|
+
values as `None` and column dtype as `object`; Pyogrio treats those values as
|
|
91
|
+
`np.nan` and column dtype as `float64`.
|
|
92
|
+
|
|
93
|
+
License: same as Pyogrio
|
|
94
|
+
|
|
95
|
+
### OSM PBF test
|
|
96
|
+
|
|
97
|
+
This was downloaded from https://github.com/openstreetmap/OSM-binary/blob/master/resources/sample.pbf
|
|
98
|
+
|
|
99
|
+
License: [Open Data Commons Open Database License (ODbL)](https://opendatacommons.org/licenses/odbl/)
|
|
100
|
+
|
|
101
|
+
### Test files for geometry types that are downgraded on read
|
|
102
|
+
|
|
103
|
+
`line_zm.gpkg` was created using QGIS to digitize a LineString GPKG layer with Z and M enabled. Downgraded to LineString Z on read.
|
|
104
|
+
`curve.gpkg` was created using QGIS to digitize a Curve GPKG layer. Downgraded to LineString on read.
|
|
105
|
+
`curvepolygon.gpkg` was created using QGIS to digitize a CurvePolygon GPKG layer. Downgraded to Polygon on read.
|
|
106
|
+
`multisurface.gpkg` was created using QGIS to digitize a MultiSurface GPKG layer. Downgraded to MultiPolygon on read.
|
|
107
|
+
|
|
108
|
+
License: same as Pyogrio
|
|
Binary file
|
|
Binary file
|
|
Binary file
|