pyogrio 0.7.2__cp310-cp310-manylinux_2_28_aarch64.whl → 0.9.0__cp310-cp310-manylinux_2_28_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyogrio might be problematic. Click here for more details.

Files changed (48) hide show
  1. pyogrio/__init__.py +4 -0
  2. pyogrio/_compat.py +6 -1
  3. pyogrio/_err.cpython-310-aarch64-linux-gnu.so +0 -0
  4. pyogrio/_err.pyx +7 -3
  5. pyogrio/_geometry.cpython-310-aarch64-linux-gnu.so +0 -0
  6. pyogrio/_io.cpython-310-aarch64-linux-gnu.so +0 -0
  7. pyogrio/_io.pyx +904 -242
  8. pyogrio/_ogr.cpython-310-aarch64-linux-gnu.so +0 -0
  9. pyogrio/_ogr.pxd +69 -13
  10. pyogrio/_ogr.pyx +8 -24
  11. pyogrio/_version.py +3 -3
  12. pyogrio/_vsi.cpython-310-aarch64-linux-gnu.so +0 -0
  13. pyogrio/_vsi.pxd +4 -0
  14. pyogrio/_vsi.pyx +140 -0
  15. pyogrio/core.py +43 -44
  16. pyogrio/gdal_data/GDAL-targets-release.cmake +3 -3
  17. pyogrio/gdal_data/GDAL-targets.cmake +10 -6
  18. pyogrio/gdal_data/GDALConfigVersion.cmake +3 -3
  19. pyogrio/gdal_data/gdalinfo_output.schema.json +2 -0
  20. pyogrio/gdal_data/gdalvrt.xsd +163 -0
  21. pyogrio/gdal_data/ogrinfo_output.schema.json +12 -1
  22. pyogrio/gdal_data/vcpkg.spdx.json +26 -26
  23. pyogrio/gdal_data/vcpkg_abi_info.txt +27 -26
  24. pyogrio/geopandas.py +140 -34
  25. pyogrio/proj_data/ITRF2008 +2 -2
  26. pyogrio/proj_data/proj-config-version.cmake +2 -2
  27. pyogrio/proj_data/proj-config.cmake +2 -1
  28. pyogrio/proj_data/proj-targets.cmake +13 -13
  29. pyogrio/proj_data/proj.db +0 -0
  30. pyogrio/proj_data/proj4-targets.cmake +13 -13
  31. pyogrio/proj_data/vcpkg.spdx.json +20 -42
  32. pyogrio/proj_data/vcpkg_abi_info.txt +14 -15
  33. pyogrio/raw.py +438 -116
  34. pyogrio/tests/conftest.py +75 -6
  35. pyogrio/tests/fixtures/poly_not_enough_points.shp.zip +0 -0
  36. pyogrio/tests/test_arrow.py +841 -7
  37. pyogrio/tests/test_core.py +99 -7
  38. pyogrio/tests/test_geopandas_io.py +827 -121
  39. pyogrio/tests/test_path.py +23 -3
  40. pyogrio/tests/test_raw_io.py +276 -50
  41. pyogrio/util.py +39 -19
  42. {pyogrio-0.7.2.dist-info → pyogrio-0.9.0.dist-info}/METADATA +2 -2
  43. {pyogrio-0.7.2.dist-info → pyogrio-0.9.0.dist-info}/RECORD +210 -207
  44. {pyogrio-0.7.2.dist-info → pyogrio-0.9.0.dist-info}/WHEEL +1 -1
  45. pyogrio.libs/{libgdal-cb554135.so.33.3.7.2 → libgdal-6ff0914e.so.34.3.8.5} +0 -0
  46. pyogrio/tests/win32.py +0 -86
  47. {pyogrio-0.7.2.dist-info → pyogrio-0.9.0.dist-info}/LICENSE +0 -0
  48. {pyogrio-0.7.2.dist-info → pyogrio-0.9.0.dist-info}/top_level.txt +0 -0
pyogrio/geopandas.py CHANGED
@@ -2,14 +2,14 @@ import os
2
2
 
3
3
  import numpy as np
4
4
 
5
- from pyogrio._compat import HAS_GEOPANDAS, PANDAS_GE_15, PANDAS_GE_20
5
+ from pyogrio._compat import HAS_GEOPANDAS, PANDAS_GE_15, PANDAS_GE_20, PANDAS_GE_22
6
6
  from pyogrio.raw import (
7
7
  DRIVERS_NO_MIXED_SINGLE_MULTI,
8
8
  DRIVERS_NO_MIXED_DIMENSIONS,
9
- detect_write_driver,
10
9
  read,
11
10
  read_arrow,
12
11
  write,
12
+ _get_write_path_driver,
13
13
  )
14
14
  from pyogrio.errors import DataSourceError
15
15
  import warnings
@@ -33,7 +33,9 @@ def _stringify_path(path):
33
33
  def _try_parse_datetime(ser):
34
34
  import pandas as pd # only called when pandas is known to be installed
35
35
 
36
- if PANDAS_GE_20:
36
+ if PANDAS_GE_22:
37
+ datetime_kwargs = dict(format="ISO8601")
38
+ elif PANDAS_GE_20:
37
39
  datetime_kwargs = dict(format="ISO8601", errors="ignore")
38
40
  else:
39
41
  datetime_kwargs = dict(yearfirst=True)
@@ -48,10 +50,13 @@ def _try_parse_datetime(ser):
48
50
  try:
49
51
  res = pd.to_datetime(ser, **datetime_kwargs)
50
52
  except Exception:
51
- pass
53
+ res = ser
52
54
  # if object dtype, try parse as utc instead
53
55
  if res.dtype == "object":
54
- res = pd.to_datetime(ser, utc=True, **datetime_kwargs)
56
+ try:
57
+ res = pd.to_datetime(ser, utc=True, **datetime_kwargs)
58
+ except Exception:
59
+ pass
55
60
 
56
61
  if res.dtype != "object":
57
62
  # GDAL only supports ms precision, convert outputs to match.
@@ -82,6 +87,7 @@ def read_dataframe(
82
87
  sql_dialect=None,
83
88
  fid_as_index=False,
84
89
  use_arrow=None,
90
+ on_invalid="raise",
85
91
  arrow_to_pandas_kwargs=None,
86
92
  **kwargs,
87
93
  ):
@@ -101,13 +107,16 @@ def read_dataframe(
101
107
  of the layer in the data source. Defaults to first layer in data source.
102
108
  encoding : str, optional (default: None)
103
109
  If present, will be used as the encoding for reading string values from
104
- the data source, unless encoding can be inferred directly from the data
105
- source.
110
+ the data source. By default will automatically try to detect the native
111
+ encoding and decode to ``UTF-8``.
106
112
  columns : list-like, optional (default: all columns)
107
113
  List of column names to import from the data source. Column names must
108
114
  exactly match the names in the data source, and will be returned in
109
115
  the order they occur in the data source. To avoid reading any columns,
110
- pass an empty list-like.
116
+ pass an empty list-like. If combined with ``where`` parameter, must
117
+ include columns referenced in the ``where`` expression or the data may
118
+ not be correctly read; the data source may return empty results or
119
+ raise an exception (behavior varies by driver).
111
120
  read_geometry : bool, optional (default: True)
112
121
  If True, will read geometry into a GeoSeries. If False, a Pandas DataFrame
113
122
  will be returned instead.
@@ -152,7 +161,12 @@ def read_dataframe(
152
161
  the starting index is driver and file specific (e.g. typically 0 for
153
162
  Shapefile and 1 for GeoPackage, but can still depend on the specific
154
163
  file). The performance of reading a large number of features usings FIDs
155
- is also driver specific.
164
+ is also driver specific and depends on the value of ``use_arrow``. The order
165
+ of the rows returned is undefined. If you would like to sort based on FID, use
166
+ ``fid_as_index=True`` to have the index of the GeoDataFrame returned set to the
167
+ FIDs of the features read. If ``use_arrow=True``, the number of FIDs is limited
168
+ to 4997 for drivers with 'OGRSQL' as default SQL dialect. To read a larger
169
+ number of FIDs, set ``user_arrow=False``.
156
170
  sql : str, optional (default: None)
157
171
  The SQL statement to execute. Look at the sql_dialect parameter for more
158
172
  information on the syntax to use for the query. When combined with other
@@ -184,6 +198,15 @@ def read_dataframe(
184
198
  installed). When enabled, this provides a further speed-up.
185
199
  Defaults to False, but this default can also be globally overridden
186
200
  by setting the ``PYOGRIO_USE_ARROW=1`` environment variable.
201
+ on_invalid : str, optional (default: "raise")
202
+
203
+ - **raise**: an exception will be raised if a WKB input geometry is
204
+ invalid.
205
+ - **warn**: a warning will be raised and invalid WKB geometries will be
206
+ returned as ``None``.
207
+ - **ignore**: invalid WKB geometries will be returned as ``None``
208
+ without a warning.
209
+
187
210
  arrow_to_pandas_kwargs : dict, optional (default: None)
188
211
  When `use_arrow` is True, these kwargs will be passed to the `to_pandas`_
189
212
  call for the arrow to pandas conversion.
@@ -221,7 +244,6 @@ def read_dataframe(
221
244
 
222
245
  import pandas as pd
223
246
  import geopandas as gp
224
- from geopandas.array import from_wkb
225
247
  import shapely # if geopandas is present, shapely is expected to be present
226
248
 
227
249
  path_or_buffer = _stringify_path(path_or_buffer)
@@ -279,10 +301,10 @@ def read_dataframe(
279
301
  if PANDAS_GE_15 and wkb_values.dtype != object:
280
302
  # for example ArrowDtype will otherwise create numpy array with pd.NA
281
303
  wkb_values = wkb_values.to_numpy(na_value=None)
282
- df["geometry"] = from_wkb(wkb_values, crs=meta["crs"])
304
+ df["geometry"] = shapely.from_wkb(wkb_values, on_invalid=on_invalid)
283
305
  if force_2d:
284
306
  df["geometry"] = shapely.force_2d(df["geometry"])
285
- return gp.GeoDataFrame(df, geometry="geometry")
307
+ return gp.GeoDataFrame(df, geometry="geometry", crs=meta["crs"])
286
308
  else:
287
309
  return df
288
310
 
@@ -302,9 +324,9 @@ def read_dataframe(
302
324
  if geometry is None or not read_geometry:
303
325
  return df
304
326
 
305
- geometry = from_wkb(geometry, crs=meta["crs"])
327
+ geometry = shapely.from_wkb(geometry, on_invalid=on_invalid)
306
328
 
307
- return gp.GeoDataFrame(df, geometry=geometry)
329
+ return gp.GeoDataFrame(df, geometry=geometry, crs=meta["crs"])
308
330
 
309
331
 
310
332
  # TODO: handle index properly
@@ -318,6 +340,7 @@ def write_dataframe(
318
340
  promote_to_multi=None,
319
341
  nan_as_null=True,
320
342
  append=False,
343
+ use_arrow=None,
321
344
  dataset_metadata=None,
322
345
  layer_metadata=None,
323
346
  metadata=None,
@@ -335,16 +358,20 @@ def write_dataframe(
335
358
  all values will be converted to strings to be written to the
336
359
  output file, except None and np.nan, which will be set to NULL
337
360
  in the output file.
338
- path : str
339
- path to file
340
- layer :str, optional (default: None)
341
- layer name
361
+ path : str or io.BytesIO
362
+ path to output file on writeable file system or an io.BytesIO object to
363
+ allow writing to memory
364
+ NOTE: support for writing to memory is limited to specific drivers.
365
+ layer : str, optional (default: None)
366
+ layer name to create. If writing to memory and layer name is not
367
+ provided, it layer name will be set to a UUID4 value.
342
368
  driver : string, optional (default: None)
343
- The OGR format driver used to write the vector file. By default write_dataframe
344
- attempts to infer driver from path.
369
+ The OGR format driver used to write the vector file. By default attempts
370
+ to infer driver from path. Must be provided to write to memory.
345
371
  encoding : str, optional (default: None)
346
372
  If present, will be used as the encoding for writing string values to
347
- the file.
373
+ the file. Use with caution, only certain drivers support encodings
374
+ other than UTF-8.
348
375
  geometry_type : string, optional (default: None)
349
376
  By default, the geometry type of the layer will be inferred from the
350
377
  data, after applying the promote_to_multi logic. If the data only contains a
@@ -376,8 +403,17 @@ def write_dataframe(
376
403
  append : bool, optional (default: False)
377
404
  If True, the data source specified by path already exists, and the
378
405
  driver supports appending to an existing data source, will cause the
379
- data to be appended to the existing records in the data source.
406
+ data to be appended to the existing records in the data source. Not
407
+ supported for writing to in-memory files.
380
408
  NOTE: append support is limited to specific drivers and GDAL versions.
409
+ use_arrow : bool, optional (default: False)
410
+ Whether to use Arrow as the transfer mechanism of the data to write
411
+ from Python to GDAL (requires GDAL >= 3.8 and `pyarrow` to be
412
+ installed). When enabled, this provides a further speed-up.
413
+ Defaults to False, but this default can also be globally overridden
414
+ by setting the ``PYOGRIO_USE_ARROW=1`` environment variable.
415
+ Using Arrow does not support writing an object-dtype column with
416
+ mixed types.
381
417
  dataset_metadata : dict, optional (default: None)
382
418
  Metadata to be stored at the dataset level in the output file; limited
383
419
  to drivers that support writing metadata, such as GPKG, and silently
@@ -389,10 +425,10 @@ def write_dataframe(
389
425
  metadata : dict, optional (default: None)
390
426
  alias of layer_metadata
391
427
  dataset_options : dict, optional
392
- Dataset creation option (format specific) passed to OGR. Specify as
428
+ Dataset creation options (format specific) passed to OGR. Specify as
393
429
  a key-value dictionary.
394
430
  layer_options : dict, optional
395
- Layer creation option (format specific) passed to OGR. Specify as
431
+ Layer creation options (format specific) passed to OGR. Specify as
396
432
  a key-value dictionary.
397
433
  **kwargs
398
434
  Additional driver-specific dataset or layer creation options passed
@@ -412,13 +448,12 @@ def write_dataframe(
412
448
  import pandas as pd
413
449
  from pyproj.enums import WktVersion # if geopandas is available so is pyproj
414
450
 
415
- path = str(path)
416
-
417
451
  if not isinstance(df, pd.DataFrame):
418
452
  raise ValueError("'df' must be a DataFrame or GeoDataFrame")
419
453
 
420
- if driver is None:
421
- driver = detect_write_driver(path)
454
+ if use_arrow is None:
455
+ use_arrow = bool(int(os.environ.get("PYOGRIO_USE_ARROW", "0")))
456
+ path, driver = _get_write_path_driver(path, driver, append=append)
422
457
 
423
458
  geometry_columns = df.columns[df.dtypes == "geometry"]
424
459
  if len(geometry_columns) > 1:
@@ -456,7 +491,7 @@ def write_dataframe(
456
491
  # https://gdal.org/development/rfc/rfc56_millisecond_precision.html#core-changes
457
492
  # Convert each row offset to a signed multiple of 15m and add to GMT value
458
493
  gdal_offset_representation = tz_offset // pd.Timedelta("15m") + 100
459
- gdal_tz_offsets[name] = gdal_offset_representation
494
+ gdal_tz_offsets[name] = gdal_offset_representation.values
460
495
  else:
461
496
  values = col.values
462
497
  if isinstance(values, pd.api.extensions.ExtensionArray):
@@ -473,6 +508,9 @@ def write_dataframe(
473
508
  field_mask.append(None)
474
509
 
475
510
  # Determine geometry_type and/or promote_to_multi
511
+ if geometry_column is not None:
512
+ geometry_types_all = geometry.geom_type
513
+
476
514
  if geometry_column is not None and (
477
515
  geometry_type is None or promote_to_multi is None
478
516
  ):
@@ -482,9 +520,7 @@ def write_dataframe(
482
520
  # If there is data, infer layer geometry type + promote_to_multi
483
521
  if not df.empty:
484
522
  # None/Empty geometries sometimes report as Z incorrectly, so ignore them
485
- has_z_arr = geometry[
486
- (geometry != np.array(None)) & (~geometry.is_empty)
487
- ].has_z
523
+ has_z_arr = geometry[geometry.notna() & (~geometry.is_empty)].has_z
488
524
  has_z = has_z_arr.any()
489
525
  all_z = has_z_arr.all()
490
526
 
@@ -493,7 +529,7 @@ def write_dataframe(
493
529
  f"Mixed 2D and 3D coordinates are not supported by {driver}"
494
530
  )
495
531
 
496
- geometry_types = pd.Series(geometry.type.unique()).dropna().values
532
+ geometry_types = pd.Series(geometry_types_all.unique()).dropna().values
497
533
  if len(geometry_types) == 1:
498
534
  tmp_geometry_type = geometry_types[0]
499
535
  if promote_to_multi and tmp_geometry_type in (
@@ -537,10 +573,80 @@ def write_dataframe(
537
573
  # if possible use EPSG codes instead
538
574
  epsg = geometry.crs.to_epsg()
539
575
  if epsg:
540
- crs = f"EPSG:{epsg}"
576
+ crs = f"EPSG:{epsg}" # noqa: E231
541
577
  else:
542
578
  crs = geometry.crs.to_wkt(WktVersion.WKT1_GDAL)
543
579
 
580
+ if use_arrow:
581
+ import pyarrow as pa
582
+ from pyogrio.raw import write_arrow
583
+
584
+ if geometry_column is not None:
585
+ # Convert to multi type
586
+ if promote_to_multi:
587
+ import shapely
588
+
589
+ mask_points = geometry_types_all == "Point"
590
+ mask_linestrings = geometry_types_all == "LineString"
591
+ mask_polygons = geometry_types_all == "Polygon"
592
+
593
+ if mask_points.any():
594
+ geometry[mask_points] = shapely.multipoints(
595
+ np.atleast_2d(geometry[mask_points]), axis=0
596
+ )
597
+
598
+ if mask_linestrings.any():
599
+ geometry[mask_linestrings] = shapely.multilinestrings(
600
+ np.atleast_2d(geometry[mask_linestrings]), axis=0
601
+ )
602
+
603
+ if mask_polygons.any():
604
+ geometry[mask_polygons] = shapely.multipolygons(
605
+ np.atleast_2d(geometry[mask_polygons]), axis=0
606
+ )
607
+
608
+ geometry = to_wkb(geometry.values)
609
+ df = df.copy(deep=False)
610
+ # convert to plain DataFrame to avoid warning from geopandas about
611
+ # writing non-geometries to the geometry column
612
+ df = pd.DataFrame(df, copy=False)
613
+ df[geometry_column] = geometry
614
+
615
+ table = pa.Table.from_pandas(df, preserve_index=False)
616
+
617
+ if geometry_column is not None:
618
+ # ensure that the geometry column is binary (for all-null geometries,
619
+ # this could be a wrong type)
620
+ geom_field = table.schema.field(geometry_column)
621
+ if not (
622
+ pa.types.is_binary(geom_field.type)
623
+ or pa.types.is_large_binary(geom_field.type)
624
+ ):
625
+ table = table.set_column(
626
+ table.schema.get_field_index(geometry_column),
627
+ geom_field.with_type(pa.binary()),
628
+ table[geometry_column].cast(pa.binary()),
629
+ )
630
+
631
+ write_arrow(
632
+ table,
633
+ path,
634
+ layer=layer,
635
+ driver=driver,
636
+ geometry_name=geometry_column,
637
+ geometry_type=geometry_type,
638
+ crs=crs,
639
+ encoding=encoding,
640
+ append=append,
641
+ dataset_metadata=dataset_metadata,
642
+ layer_metadata=layer_metadata,
643
+ metadata=metadata,
644
+ dataset_options=dataset_options,
645
+ layer_options=layer_options,
646
+ **kwargs,
647
+ )
648
+ return
649
+
544
650
  # If there is geometry data, prepare it to be written
545
651
  if geometry_column is not None:
546
652
  geometry = to_wkb(geometry.values)
@@ -42,7 +42,7 @@
42
42
 
43
43
  <CARB> +proj=helmert +drx=0.000049 +dry=-0.001088 +drz=0.000664 +convention=position_vector
44
44
 
45
- <EURA> +proj=helmert +drx=-0.000083 +dry=0.000534 +drz=0.000750 +convention=position_vector
45
+ <EURA> +proj=helmert +drx=-0.000083 +dry=-0.000534 +drz=0.000750 +convention=position_vector
46
46
 
47
47
  <INDI> +proj=helmert +drx=0.001232 +dry=0.000303 +drz=0.001540 +convention=position_vector
48
48
 
@@ -75,7 +75,7 @@
75
75
 
76
76
  <CARB_T> +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.000049 +dry=-0.001088 +drz=0.000664 +convention=position_vector
77
77
 
78
- <EURA_T> +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000083 +dry=0.000534 +drz=0.000750 +convention=position_vector
78
+ <EURA_T> +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000083 +dry=-0.000534 +drz=0.000750 +convention=position_vector
79
79
 
80
80
  <INDI_T> +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.001232 +dry=0.000303 +drz=0.001540 +convention=position_vector
81
81
 
@@ -1,8 +1,8 @@
1
1
  # Version checking for PROJ
2
2
 
3
- set (PACKAGE_VERSION "9.3.0")
3
+ set (PACKAGE_VERSION "9.4.0")
4
4
  set (PACKAGE_VERSION_MAJOR "9")
5
- set (PACKAGE_VERSION_MINOR "3")
5
+ set (PACKAGE_VERSION_MINOR "4")
6
6
  set (PACKAGE_VERSION_PATCH "0")
7
7
 
8
8
  # These variable definitions parallel those in PROJ's
@@ -27,7 +27,8 @@ if("TRUE")
27
27
  endif()
28
28
  cmake_policy(POP)
29
29
 
30
- find_dependency(unofficial-sqlite3 CONFIG)
30
+ find_dependency(SQLite3)
31
+
31
32
  if(DEFINED PROJ_CONFIG_FIND_TIFF_DEP)
32
33
  find_dependency(TIFF)
33
34
  endif()
@@ -3,11 +3,11 @@
3
3
  if("${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}" LESS 2.8)
4
4
  message(FATAL_ERROR "CMake >= 2.8.0 required")
5
5
  endif()
6
- if(CMAKE_VERSION VERSION_LESS "2.8.3")
7
- message(FATAL_ERROR "CMake >= 2.8.3 required")
6
+ if(CMAKE_VERSION VERSION_LESS "2.8.12")
7
+ message(FATAL_ERROR "CMake >= 2.8.12 required")
8
8
  endif()
9
9
  cmake_policy(PUSH)
10
- cmake_policy(VERSION 2.8.3...3.25)
10
+ cmake_policy(VERSION 2.8.12...3.27)
11
11
  #----------------------------------------------------------------
12
12
  # Generated CMake target import file.
13
13
  #----------------------------------------------------------------
@@ -60,13 +60,9 @@ add_library(PROJ::proj STATIC IMPORTED)
60
60
  set_target_properties(PROJ::proj PROPERTIES
61
61
  INTERFACE_COMPILE_DEFINITIONS "PROJ_DLL="
62
62
  INTERFACE_INCLUDE_DIRECTORIES "${_IMPORT_PREFIX}/include"
63
- INTERFACE_LINK_LIBRARIES "\$<LINK_ONLY:-lm>;\$<LINK_ONLY:-ldl>;\$<LINK_ONLY:-pthread>;\$<LINK_ONLY:unofficial::sqlite3::sqlite3>;\$<LINK_ONLY:>;\$<LINK_ONLY:TIFF::TIFF>;\$<LINK_ONLY:CURL::libcurl>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:ws2_32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:wldap32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:advapi32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:crypt32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:normaliz>>"
63
+ INTERFACE_LINK_LIBRARIES "\$<LINK_ONLY:-lm>;\$<LINK_ONLY:-ldl>;\$<LINK_ONLY:-pthread>;\$<LINK_ONLY:SQLite::SQLite3>;\$<LINK_ONLY:>;\$<LINK_ONLY:TIFF::TIFF>;\$<LINK_ONLY:CURL::libcurl>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:ws2_32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:wldap32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:advapi32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:crypt32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:normaliz>>"
64
64
  )
65
65
 
66
- if(CMAKE_VERSION VERSION_LESS 2.8.12)
67
- message(FATAL_ERROR "This file relies on consumers using CMake 2.8.12 or greater.")
68
- endif()
69
-
70
66
  # Load information for each installed configuration.
71
67
  file(GLOB _cmake_config_files "${CMAKE_CURRENT_LIST_DIR}/proj-targets-*.cmake")
72
68
  foreach(_cmake_config_file IN LISTS _cmake_config_files)
@@ -80,9 +76,12 @@ set(_IMPORT_PREFIX)
80
76
 
81
77
  # Loop over all imported files and verify that they actually exist
82
78
  foreach(_cmake_target IN LISTS _cmake_import_check_targets)
83
- foreach(_cmake_file IN LISTS "_cmake_import_check_files_for_${_cmake_target}")
84
- if(NOT EXISTS "${_cmake_file}")
85
- message(FATAL_ERROR "The imported target \"${_cmake_target}\" references the file
79
+ if(CMAKE_VERSION VERSION_LESS "3.28"
80
+ OR NOT DEFINED _cmake_import_check_xcframework_for_${_cmake_target}
81
+ OR NOT IS_DIRECTORY "${_cmake_import_check_xcframework_for_${_cmake_target}}")
82
+ foreach(_cmake_file IN LISTS "_cmake_import_check_files_for_${_cmake_target}")
83
+ if(NOT EXISTS "${_cmake_file}")
84
+ message(FATAL_ERROR "The imported target \"${_cmake_target}\" references the file
86
85
  \"${_cmake_file}\"
87
86
  but this file does not exist. Possible reasons include:
88
87
  * The file was deleted, renamed, or moved to another location.
@@ -91,8 +90,9 @@ but this file does not exist. Possible reasons include:
91
90
  \"${CMAKE_CURRENT_LIST_FILE}\"
92
91
  but not all the files it references.
93
92
  ")
94
- endif()
95
- endforeach()
93
+ endif()
94
+ endforeach()
95
+ endif()
96
96
  unset(_cmake_file)
97
97
  unset("_cmake_import_check_files_for_${_cmake_target}")
98
98
  endforeach()
pyogrio/proj_data/proj.db CHANGED
Binary file
@@ -3,11 +3,11 @@
3
3
  if("${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}" LESS 2.8)
4
4
  message(FATAL_ERROR "CMake >= 2.8.0 required")
5
5
  endif()
6
- if(CMAKE_VERSION VERSION_LESS "2.8.3")
7
- message(FATAL_ERROR "CMake >= 2.8.3 required")
6
+ if(CMAKE_VERSION VERSION_LESS "2.8.12")
7
+ message(FATAL_ERROR "CMake >= 2.8.12 required")
8
8
  endif()
9
9
  cmake_policy(PUSH)
10
- cmake_policy(VERSION 2.8.3...3.25)
10
+ cmake_policy(VERSION 2.8.12...3.27)
11
11
  #----------------------------------------------------------------
12
12
  # Generated CMake target import file.
13
13
  #----------------------------------------------------------------
@@ -60,13 +60,9 @@ add_library(PROJ4::proj STATIC IMPORTED)
60
60
  set_target_properties(PROJ4::proj PROPERTIES
61
61
  INTERFACE_COMPILE_DEFINITIONS "PROJ_DLL="
62
62
  INTERFACE_INCLUDE_DIRECTORIES "${_IMPORT_PREFIX}/include"
63
- INTERFACE_LINK_LIBRARIES "\$<LINK_ONLY:-lm>;\$<LINK_ONLY:-ldl>;\$<LINK_ONLY:-pthread>;\$<LINK_ONLY:unofficial::sqlite3::sqlite3>;\$<LINK_ONLY:>;\$<LINK_ONLY:TIFF::TIFF>;\$<LINK_ONLY:CURL::libcurl>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:ws2_32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:wldap32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:advapi32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:crypt32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:normaliz>>"
63
+ INTERFACE_LINK_LIBRARIES "\$<LINK_ONLY:-lm>;\$<LINK_ONLY:-ldl>;\$<LINK_ONLY:-pthread>;\$<LINK_ONLY:SQLite::SQLite3>;\$<LINK_ONLY:>;\$<LINK_ONLY:TIFF::TIFF>;\$<LINK_ONLY:CURL::libcurl>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:ws2_32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:wldap32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:advapi32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:crypt32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:normaliz>>"
64
64
  )
65
65
 
66
- if(CMAKE_VERSION VERSION_LESS 2.8.12)
67
- message(FATAL_ERROR "This file relies on consumers using CMake 2.8.12 or greater.")
68
- endif()
69
-
70
66
  # Load information for each installed configuration.
71
67
  file(GLOB _cmake_config_files "${CMAKE_CURRENT_LIST_DIR}/proj4-targets-*.cmake")
72
68
  foreach(_cmake_config_file IN LISTS _cmake_config_files)
@@ -80,9 +76,12 @@ set(_IMPORT_PREFIX)
80
76
 
81
77
  # Loop over all imported files and verify that they actually exist
82
78
  foreach(_cmake_target IN LISTS _cmake_import_check_targets)
83
- foreach(_cmake_file IN LISTS "_cmake_import_check_files_for_${_cmake_target}")
84
- if(NOT EXISTS "${_cmake_file}")
85
- message(FATAL_ERROR "The imported target \"${_cmake_target}\" references the file
79
+ if(CMAKE_VERSION VERSION_LESS "3.28"
80
+ OR NOT DEFINED _cmake_import_check_xcframework_for_${_cmake_target}
81
+ OR NOT IS_DIRECTORY "${_cmake_import_check_xcframework_for_${_cmake_target}}")
82
+ foreach(_cmake_file IN LISTS "_cmake_import_check_files_for_${_cmake_target}")
83
+ if(NOT EXISTS "${_cmake_file}")
84
+ message(FATAL_ERROR "The imported target \"${_cmake_target}\" references the file
86
85
  \"${_cmake_file}\"
87
86
  but this file does not exist. Possible reasons include:
88
87
  * The file was deleted, renamed, or moved to another location.
@@ -91,8 +90,9 @@ but this file does not exist. Possible reasons include:
91
90
  \"${CMAKE_CURRENT_LIST_FILE}\"
92
91
  but not all the files it references.
93
92
  ")
94
- endif()
95
- endforeach()
93
+ endif()
94
+ endforeach()
95
+ endif()
96
96
  unset(_cmake_file)
97
97
  unset("_cmake_import_check_files_for_${_cmake_target}")
98
98
  endforeach()