pyogrio 0.9.0__cp39-cp39-manylinux_2_28_aarch64.whl → 0.11.0__cp39-cp39-manylinux_2_28_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyogrio might be problematic. Click here for more details.

Files changed (86) hide show
  1. pyogrio/__init__.py +28 -21
  2. pyogrio/_compat.py +15 -1
  3. pyogrio/_env.py +4 -6
  4. pyogrio/_err.cpython-39-aarch64-linux-gnu.so +0 -0
  5. pyogrio/_geometry.cpython-39-aarch64-linux-gnu.so +0 -0
  6. pyogrio/_io.cpython-39-aarch64-linux-gnu.so +0 -0
  7. pyogrio/_ogr.cpython-39-aarch64-linux-gnu.so +0 -0
  8. pyogrio/_version.py +3 -3
  9. pyogrio/_vsi.cpython-39-aarch64-linux-gnu.so +0 -0
  10. pyogrio/core.py +86 -20
  11. pyogrio/errors.py +9 -16
  12. pyogrio/gdal_data/GDAL-targets-release.cmake +3 -3
  13. pyogrio/gdal_data/GDAL-targets.cmake +2 -2
  14. pyogrio/gdal_data/GDALConfig.cmake +0 -1
  15. pyogrio/gdal_data/GDALConfigVersion.cmake +3 -3
  16. pyogrio/gdal_data/MM_m_idofic.csv +321 -0
  17. pyogrio/gdal_data/gdalinfo_output.schema.json +3 -3
  18. pyogrio/gdal_data/gdaltileindex.xsd +253 -0
  19. pyogrio/gdal_data/gdalvrt.xsd +178 -63
  20. pyogrio/gdal_data/nitf_spec.xml +1 -17
  21. pyogrio/gdal_data/nitf_spec.xsd +1 -17
  22. pyogrio/gdal_data/ogrinfo_output.schema.json +23 -0
  23. pyogrio/gdal_data/ogrvrt.xsd +4 -17
  24. pyogrio/gdal_data/osmconf.ini +3 -1
  25. pyogrio/gdal_data/pci_datum.txt +222 -155
  26. pyogrio/gdal_data/pci_ellips.txt +90 -38
  27. pyogrio/gdal_data/pdfcomposition.xsd +1 -17
  28. pyogrio/gdal_data/vcpkg.spdx.json +32 -27
  29. pyogrio/gdal_data/vcpkg_abi_info.txt +30 -29
  30. pyogrio/gdal_data/vdv452.xml +1 -17
  31. pyogrio/gdal_data/vdv452.xsd +1 -17
  32. pyogrio/geopandas.py +122 -66
  33. pyogrio/proj_data/ITRF2014 +1 -1
  34. pyogrio/proj_data/ITRF2020 +91 -0
  35. pyogrio/proj_data/proj-config-version.cmake +2 -2
  36. pyogrio/proj_data/proj-config.cmake +1 -1
  37. pyogrio/proj_data/proj-targets.cmake +3 -3
  38. pyogrio/proj_data/proj.db +0 -0
  39. pyogrio/proj_data/proj.ini +11 -3
  40. pyogrio/proj_data/proj4-targets.cmake +3 -3
  41. pyogrio/proj_data/projjson.schema.json +1 -1
  42. pyogrio/proj_data/usage +7 -2
  43. pyogrio/proj_data/vcpkg.spdx.json +27 -22
  44. pyogrio/proj_data/vcpkg_abi_info.txt +17 -16
  45. pyogrio/raw.py +46 -30
  46. pyogrio/tests/conftest.py +214 -12
  47. pyogrio/tests/fixtures/README.md +32 -13
  48. pyogrio/tests/fixtures/curve.gpkg +0 -0
  49. pyogrio/tests/fixtures/{test_multisurface.gpkg → curvepolygon.gpkg} +0 -0
  50. pyogrio/tests/fixtures/line_zm.gpkg +0 -0
  51. pyogrio/tests/fixtures/multisurface.gpkg +0 -0
  52. pyogrio/tests/test_arrow.py +181 -24
  53. pyogrio/tests/test_core.py +170 -76
  54. pyogrio/tests/test_geopandas_io.py +483 -135
  55. pyogrio/tests/test_path.py +39 -17
  56. pyogrio/tests/test_raw_io.py +170 -55
  57. pyogrio/tests/test_util.py +56 -0
  58. pyogrio/util.py +69 -32
  59. pyogrio-0.11.0.dist-info/METADATA +124 -0
  60. {pyogrio-0.9.0.dist-info → pyogrio-0.11.0.dist-info}/RECORD +200 -214
  61. {pyogrio-0.9.0.dist-info → pyogrio-0.11.0.dist-info}/WHEEL +1 -1
  62. {pyogrio-0.9.0.dist-info → pyogrio-0.11.0.dist-info/licenses}/LICENSE +1 -1
  63. pyogrio.libs/{libgdal-6ff0914e.so.34.3.8.5 → libgdal-4bc0d15f.so.36.3.10.3} +0 -0
  64. pyogrio/_err.pxd +0 -4
  65. pyogrio/_err.pyx +0 -250
  66. pyogrio/_geometry.pxd +0 -4
  67. pyogrio/_geometry.pyx +0 -129
  68. pyogrio/_io.pxd +0 -0
  69. pyogrio/_io.pyx +0 -2742
  70. pyogrio/_ogr.pxd +0 -444
  71. pyogrio/_ogr.pyx +0 -346
  72. pyogrio/_vsi.pxd +0 -4
  73. pyogrio/_vsi.pyx +0 -140
  74. pyogrio/arrow_bridge.h +0 -115
  75. pyogrio/gdal_data/bag_template.xml +0 -201
  76. pyogrio/gdal_data/gmlasconf.xml +0 -169
  77. pyogrio/gdal_data/gmlasconf.xsd +0 -1066
  78. pyogrio/gdal_data/netcdf_config.xsd +0 -143
  79. pyogrio/tests/fixtures/poly_not_enough_points.shp.zip +0 -0
  80. pyogrio/tests/fixtures/test_datetime.geojson +0 -7
  81. pyogrio/tests/fixtures/test_datetime_tz.geojson +0 -8
  82. pyogrio/tests/fixtures/test_fgdb.gdb.zip +0 -0
  83. pyogrio/tests/fixtures/test_nested.geojson +0 -18
  84. pyogrio/tests/fixtures/test_ogr_types_list.geojson +0 -12
  85. pyogrio-0.9.0.dist-info/METADATA +0 -100
  86. {pyogrio-0.9.0.dist-info → pyogrio-0.11.0.dist-info}/top_level.txt +0 -0
pyogrio/geopandas.py CHANGED
@@ -1,24 +1,31 @@
1
+ """Functions for reading and writing GeoPandas dataframes."""
2
+
1
3
  import os
4
+ import warnings
2
5
 
3
6
  import numpy as np
4
7
 
5
- from pyogrio._compat import HAS_GEOPANDAS, PANDAS_GE_15, PANDAS_GE_20, PANDAS_GE_22
8
+ from pyogrio._compat import (
9
+ HAS_GEOPANDAS,
10
+ PANDAS_GE_15,
11
+ PANDAS_GE_20,
12
+ PANDAS_GE_22,
13
+ PANDAS_GE_30,
14
+ PYARROW_GE_19,
15
+ )
16
+ from pyogrio.errors import DataSourceError
6
17
  from pyogrio.raw import (
7
- DRIVERS_NO_MIXED_SINGLE_MULTI,
8
18
  DRIVERS_NO_MIXED_DIMENSIONS,
19
+ DRIVERS_NO_MIXED_SINGLE_MULTI,
20
+ _get_write_path_driver,
9
21
  read,
10
22
  read_arrow,
11
23
  write,
12
- _get_write_path_driver,
13
24
  )
14
- from pyogrio.errors import DataSourceError
15
- import warnings
16
25
 
17
26
 
18
27
  def _stringify_path(path):
19
- """
20
- Convert path-like to a string if possible, pass-through other objects
21
- """
28
+ """Convert path-like to a string if possible, pass-through other objects."""
22
29
  if isinstance(path, str):
23
30
  return path
24
31
 
@@ -34,11 +41,11 @@ def _try_parse_datetime(ser):
34
41
  import pandas as pd # only called when pandas is known to be installed
35
42
 
36
43
  if PANDAS_GE_22:
37
- datetime_kwargs = dict(format="ISO8601")
44
+ datetime_kwargs = {"format": "ISO8601"}
38
45
  elif PANDAS_GE_20:
39
- datetime_kwargs = dict(format="ISO8601", errors="ignore")
46
+ datetime_kwargs = {"format": "ISO8601", "errors": "ignore"}
40
47
  else:
41
- datetime_kwargs = dict(yearfirst=True)
48
+ datetime_kwargs = {"yearfirst": True}
42
49
  with warnings.catch_warnings():
43
50
  warnings.filterwarnings(
44
51
  "ignore",
@@ -52,13 +59,13 @@ def _try_parse_datetime(ser):
52
59
  except Exception:
53
60
  res = ser
54
61
  # if object dtype, try parse as utc instead
55
- if res.dtype == "object":
62
+ if res.dtype in ("object", "string"):
56
63
  try:
57
64
  res = pd.to_datetime(ser, utc=True, **datetime_kwargs)
58
65
  except Exception:
59
66
  pass
60
67
 
61
- if res.dtype != "object":
68
+ if res.dtype.kind == "M": # any datetime64
62
69
  # GDAL only supports ms precision, convert outputs to match.
63
70
  # Pandas 2.0 supports datetime[ms] directly, prior versions only support [ns],
64
71
  # Instead, round the values to [ms] precision.
@@ -92,6 +99,7 @@ def read_dataframe(
92
99
  **kwargs,
93
100
  ):
94
101
  """Read from an OGR data source to a GeoPandas GeoDataFrame or Pandas DataFrame.
102
+
95
103
  If the data source does not have a geometry column or ``read_geometry`` is False,
96
104
  a DataFrame will be returned.
97
105
 
@@ -100,7 +108,7 @@ def read_dataframe(
100
108
  Parameters
101
109
  ----------
102
110
  path_or_buffer : pathlib.Path or str, or bytes buffer
103
- A dataset path or URI, or raw buffer.
111
+ A dataset path or URI, raw buffer, or file-like object with a read method.
104
112
  layer : int or str, optional (default: first layer)
105
113
  If an integer is provided, it corresponds to the index of the layer
106
114
  with the data source. If a string is provided, it must match the name
@@ -199,13 +207,18 @@ def read_dataframe(
199
207
  Defaults to False, but this default can also be globally overridden
200
208
  by setting the ``PYOGRIO_USE_ARROW=1`` environment variable.
201
209
  on_invalid : str, optional (default: "raise")
210
+ The action to take when an invalid geometry is encountered. Possible
211
+ values:
202
212
 
203
213
  - **raise**: an exception will be raised if a WKB input geometry is
204
214
  invalid.
205
- - **warn**: a warning will be raised and invalid WKB geometries will be
206
- returned as ``None``.
215
+ - **warn**: invalid WKB geometries will be returned as ``None`` and a
216
+ warning will be raised.
207
217
  - **ignore**: invalid WKB geometries will be returned as ``None``
208
218
  without a warning.
219
+ - **fix**: an effort is made to fix invalid input geometries (currently
220
+ just unclosed rings). If this is not possible, they are returned as
221
+ ``None`` without a warning. Requires GEOS >= 3.11 and shapely >= 2.1.
209
222
 
210
223
  arrow_to_pandas_kwargs : dict, optional (default: None)
211
224
  When `use_arrow` is True, these kwargs will be passed to the `to_pandas`_
@@ -238,12 +251,13 @@ def read_dataframe(
238
251
 
239
252
  https://arrow.apache.org/docs/python/generated/pyarrow.Table.html#pyarrow.Table.to_pandas
240
253
 
241
- """ # noqa: E501
254
+ """
242
255
  if not HAS_GEOPANDAS:
243
256
  raise ImportError("geopandas is required to use pyogrio.read_dataframe()")
244
257
 
245
- import pandas as pd
246
258
  import geopandas as gp
259
+ import pandas as pd
260
+
247
261
  import shapely # if geopandas is present, shapely is expected to be present
248
262
 
249
263
  path_or_buffer = _stringify_path(path_or_buffer)
@@ -278,14 +292,42 @@ def read_dataframe(
278
292
  )
279
293
 
280
294
  if use_arrow:
295
+ import pyarrow as pa
296
+
281
297
  meta, table = result
282
298
 
283
299
  # split_blocks and self_destruct decrease memory usage, but have as side effect
284
300
  # that accessing table afterwards causes crash, so del table to avoid.
285
301
  kwargs = {"self_destruct": True}
302
+ if PANDAS_GE_30:
303
+ # starting with pyarrow 19.0, pyarrow will correctly handle this themselves,
304
+ # so only use types_mapper as workaround for older versions
305
+ if not PYARROW_GE_19:
306
+ kwargs["types_mapper"] = {
307
+ pa.string(): pd.StringDtype(na_value=np.nan),
308
+ pa.large_string(): pd.StringDtype(na_value=np.nan),
309
+ pa.json_(): pd.StringDtype(na_value=np.nan),
310
+ }.get
311
+ # TODO enable the below block when upstream issue to accept extension types
312
+ # is fixed
313
+ # else:
314
+ # # for newer pyarrow, still include mapping for json
315
+ # # GDAL 3.11 started to emit this extension type, but pyarrow does not
316
+ # # yet support it properly in the conversion to pandas
317
+ # kwargs["types_mapper"] = {
318
+ # pa.json_(): pd.StringDtype(na_value=np.nan),
319
+ # }.get
286
320
  if arrow_to_pandas_kwargs is not None:
287
321
  kwargs.update(arrow_to_pandas_kwargs)
288
- df = table.to_pandas(**kwargs)
322
+
323
+ try:
324
+ df = table.to_pandas(**kwargs)
325
+ except UnicodeDecodeError as ex:
326
+ # Arrow does not support reading data in a non-UTF-8 encoding
327
+ raise DataSourceError(
328
+ "The file being read is not encoded in UTF-8; please use_arrow=False"
329
+ ) from ex
330
+
289
331
  del table
290
332
 
291
333
  if fid_as_index:
@@ -329,7 +371,6 @@ def read_dataframe(
329
371
  return gp.GeoDataFrame(df, geometry=geometry, crs=meta["crs"])
330
372
 
331
373
 
332
- # TODO: handle index properly
333
374
  def write_dataframe(
334
375
  df,
335
376
  path,
@@ -348,8 +389,7 @@ def write_dataframe(
348
389
  layer_options=None,
349
390
  **kwargs,
350
391
  ):
351
- """
352
- Write GeoPandas GeoDataFrame to an OGR file format.
392
+ """Write GeoPandas GeoDataFrame to an OGR file format.
353
393
 
354
394
  Parameters
355
395
  ----------
@@ -360,7 +400,8 @@ def write_dataframe(
360
400
  in the output file.
361
401
  path : str or io.BytesIO
362
402
  path to output file on writeable file system or an io.BytesIO object to
363
- allow writing to memory
403
+ allow writing to memory. Will raise NotImplementedError if an open file
404
+ handle is passed; use BytesIO instead.
364
405
  NOTE: support for writing to memory is limited to specific drivers.
365
406
  layer : str, optional (default: None)
366
407
  layer name to create. If writing to memory and layer name is not
@@ -438,15 +479,15 @@ def write_dataframe(
438
479
  explicit `dataset_options` or `layer_options` keywords to manually
439
480
  do this (for example if an option exists as both dataset and layer
440
481
  option).
482
+
441
483
  """
442
484
  # TODO: add examples to the docstring (e.g. OGR kwargs)
443
485
 
444
486
  if not HAS_GEOPANDAS:
445
487
  raise ImportError("geopandas is required to use pyogrio.write_dataframe()")
446
488
 
447
- from geopandas.array import to_wkb
448
489
  import pandas as pd
449
- from pyproj.enums import WktVersion # if geopandas is available so is pyproj
490
+ from geopandas.array import to_wkb
450
491
 
451
492
  if not isinstance(df, pd.DataFrame):
452
493
  raise ValueError("'df' must be a DataFrame or GeoDataFrame")
@@ -465,47 +506,9 @@ def write_dataframe(
465
506
  if len(geometry_columns) > 0:
466
507
  geometry_column = geometry_columns[0]
467
508
  geometry = df[geometry_column]
468
- fields = [c for c in df.columns if not c == geometry_column]
469
509
  else:
470
510
  geometry_column = None
471
511
  geometry = None
472
- fields = list(df.columns)
473
-
474
- # TODO: may need to fill in pd.NA, etc
475
- field_data = []
476
- field_mask = []
477
- # dict[str, np.array(int)] special case for dt-tz fields
478
- gdal_tz_offsets = {}
479
- for name in fields:
480
- col = df[name]
481
- if isinstance(col.dtype, pd.DatetimeTZDtype):
482
- # Deal with datetimes with timezones by passing down timezone separately
483
- # pass down naive datetime
484
- naive = col.dt.tz_localize(None)
485
- values = naive.values
486
- # compute offset relative to UTC explicitly
487
- tz_offset = naive - col.dt.tz_convert("UTC").dt.tz_localize(None)
488
- # Convert to GDAL timezone offset representation.
489
- # GMT is represented as 100 and offsets are represented by adding /
490
- # subtracting 1 for every 15 minutes different from GMT.
491
- # https://gdal.org/development/rfc/rfc56_millisecond_precision.html#core-changes
492
- # Convert each row offset to a signed multiple of 15m and add to GMT value
493
- gdal_offset_representation = tz_offset // pd.Timedelta("15m") + 100
494
- gdal_tz_offsets[name] = gdal_offset_representation.values
495
- else:
496
- values = col.values
497
- if isinstance(values, pd.api.extensions.ExtensionArray):
498
- from pandas.arrays import IntegerArray, FloatingArray, BooleanArray
499
-
500
- if isinstance(values, (IntegerArray, FloatingArray, BooleanArray)):
501
- field_data.append(values._data)
502
- field_mask.append(values._mask)
503
- else:
504
- field_data.append(np.asarray(values))
505
- field_mask.append(np.asarray(values.isna()))
506
- else:
507
- field_data.append(values)
508
- field_mask.append(None)
509
512
 
510
513
  # Determine geometry_type and/or promote_to_multi
511
514
  if geometry_column is not None:
@@ -520,7 +523,10 @@ def write_dataframe(
520
523
  # If there is data, infer layer geometry type + promote_to_multi
521
524
  if not df.empty:
522
525
  # None/Empty geometries sometimes report as Z incorrectly, so ignore them
523
- has_z_arr = geometry[geometry.notna() & (~geometry.is_empty)].has_z
526
+ with warnings.catch_warnings():
527
+ warnings.filterwarnings("ignore", r"GeoSeries\.notna", UserWarning)
528
+ geometry_notna = geometry.notna()
529
+ has_z_arr = geometry[geometry_notna & (~geometry.is_empty)].has_z
524
530
  has_z = has_z_arr.any()
525
531
  all_z = has_z_arr.all()
526
532
 
@@ -573,12 +579,13 @@ def write_dataframe(
573
579
  # if possible use EPSG codes instead
574
580
  epsg = geometry.crs.to_epsg()
575
581
  if epsg:
576
- crs = f"EPSG:{epsg}" # noqa: E231
582
+ crs = f"EPSG:{epsg}"
577
583
  else:
578
- crs = geometry.crs.to_wkt(WktVersion.WKT1_GDAL)
584
+ crs = geometry.crs.to_wkt("WKT1_GDAL")
579
585
 
580
586
  if use_arrow:
581
587
  import pyarrow as pa
588
+
582
589
  from pyogrio.raw import write_arrow
583
590
 
584
591
  if geometry_column is not None:
@@ -614,6 +621,15 @@ def write_dataframe(
614
621
 
615
622
  table = pa.Table.from_pandas(df, preserve_index=False)
616
623
 
624
+ # Null arrow columns are not supported by GDAL, so convert to string
625
+ for field_index, field in enumerate(table.schema):
626
+ if field.type == pa.null():
627
+ table = table.set_column(
628
+ field_index,
629
+ field.with_type(pa.string()),
630
+ table[field_index].cast(pa.string()),
631
+ )
632
+
617
633
  if geometry_column is not None:
618
634
  # ensure that the geometry column is binary (for all-null geometries,
619
635
  # this could be a wrong type)
@@ -650,6 +666,46 @@ def write_dataframe(
650
666
  # If there is geometry data, prepare it to be written
651
667
  if geometry_column is not None:
652
668
  geometry = to_wkb(geometry.values)
669
+ fields = [c for c in df.columns if not c == geometry_column]
670
+ else:
671
+ fields = list(df.columns)
672
+
673
+ # Convert data to numpy arrays for writing
674
+ # TODO: may need to fill in pd.NA, etc
675
+ field_data = []
676
+ field_mask = []
677
+ # dict[str, np.array(int)] special case for dt-tz fields
678
+ gdal_tz_offsets = {}
679
+ for name in fields:
680
+ col = df[name]
681
+ if isinstance(col.dtype, pd.DatetimeTZDtype):
682
+ # Deal with datetimes with timezones by passing down timezone separately
683
+ # pass down naive datetime
684
+ naive = col.dt.tz_localize(None)
685
+ values = naive.values
686
+ # compute offset relative to UTC explicitly
687
+ tz_offset = naive - col.dt.tz_convert("UTC").dt.tz_localize(None)
688
+ # Convert to GDAL timezone offset representation.
689
+ # GMT is represented as 100 and offsets are represented by adding /
690
+ # subtracting 1 for every 15 minutes different from GMT.
691
+ # https://gdal.org/development/rfc/rfc56_millisecond_precision.html#core-changes
692
+ # Convert each row offset to a signed multiple of 15m and add to GMT value
693
+ gdal_offset_representation = tz_offset // pd.Timedelta("15m") + 100
694
+ gdal_tz_offsets[name] = gdal_offset_representation.values
695
+ else:
696
+ values = col.values
697
+ if isinstance(values, pd.api.extensions.ExtensionArray):
698
+ from pandas.arrays import BooleanArray, FloatingArray, IntegerArray
699
+
700
+ if isinstance(values, (IntegerArray, FloatingArray, BooleanArray)):
701
+ field_data.append(values._data)
702
+ field_mask.append(values._mask)
703
+ else:
704
+ field_data.append(np.asarray(values))
705
+ field_mask.append(np.asarray(values.isna()))
706
+ else:
707
+ field_data.append(values)
708
+ field_mask.append(None)
653
709
 
654
710
  write(
655
711
  path,
@@ -7,7 +7,7 @@
7
7
 
8
8
  <ITRF2000> +proj=helmert +x=0.0007 +y=0.0012 +z=-0.0261 +s=0.00212 +dx=0.0001 +dy=0.0001 +dz=-0.0019 +ds=0.00011 +t_epoch=2010.0 +convention=position_vector
9
9
 
10
- <ITRF97> +proj=helmert +x=0.0074 +y=-0.0005 +z=-0.0628 +d=0.0038 +rz=0.00026 +dx0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector
10
+ <ITRF97> +proj=helmert +x=0.0074 +y=-0.0005 +z=-0.0628 +s=0.0038 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector
11
11
 
12
12
  <ITRF96> +proj=helmert +x=0.0074 +y=-0.0005 +z=-0.0628 +s=0.0038 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector
13
13
 
@@ -0,0 +1,91 @@
1
+ # ITRF2020 params are in mm/year, PJ_helmert uses m/year
2
+
3
+ # Generated with generate_itrf2020.py from EPSG database
4
+
5
+ <ITRF2014> +proj=helmert +x=-0.0014 +y=-0.0009 +z=0.0014 +s=-0.00042 +dy=-0.0001 +dz=0.0002 +t_epoch=2015 +convention=position_vector
6
+
7
+ <ITRF2008> +proj=helmert +x=0.0002 +y=0.001 +z=0.0033 +s=-0.00029 +dy=-0.0001 +dz=0.0001 +ds=3e-05 +t_epoch=2015 +convention=position_vector
8
+
9
+ <ITRF2005> +proj=helmert +x=0.0027 +y=0.0001 +z=-0.0014 +s=0.00065 +dx=0.0003 +dy=-0.0001 +dz=0.0001 +ds=3e-05 +t_epoch=2015 +convention=position_vector
10
+
11
+ <ITRF2000> +proj=helmert +x=-0.0002 +y=0.0008 +z=-0.0342 +s=0.00225 +dx=0.0001 +dz=-0.0017 +ds=0.00011 +t_epoch=2015 +convention=position_vector
12
+
13
+ <ITRF97> +proj=helmert +x=0.0065 +y=-0.0039 +z=-0.0779 +rz=0.00036 +s=0.00398 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector
14
+
15
+ <ITRF96> +proj=helmert +x=0.0065 +y=-0.0039 +z=-0.0779 +rz=0.00036 +s=0.00398 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector
16
+
17
+ <ITRF94> +proj=helmert +x=0.0065 +y=-0.0039 +z=-0.0779 +rz=0.00036 +s=0.00398 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector
18
+
19
+ <ITRF93> +proj=helmert +x=-0.0658 +y=0.0019 +z=-0.0713 +rx=-0.00336 +ry=-0.00433 +rz=0.00075 +s=0.00447 +dx=-0.0028 +dy=-0.0002 +dz=-0.0023 +drx=-0.00011 +dry=-0.00019 +drz=7e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector
20
+
21
+ <ITRF92> +proj=helmert +x=0.0145 +y=-0.0019 +z=-0.0859 +rz=0.00036 +s=0.00327 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector
22
+
23
+ <ITRF91> +proj=helmert +x=0.0265 +y=0.0121 +z=-0.0919 +rz=0.00036 +s=0.00467 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector
24
+
25
+ <ITRF90> +proj=helmert +x=0.0245 +y=0.0081 +z=-0.1079 +rz=0.00036 +s=0.00497 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector
26
+
27
+ <ITRF89> +proj=helmert +x=0.0295 +y=0.0321 +z=-0.1459 +rz=0.00036 +s=0.00837 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector
28
+
29
+ <ITRF88> +proj=helmert +x=0.0245 +y=-0.0039 +z=-0.1699 +rx=0.0001 +rz=0.00036 +s=0.01147 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector
30
+
31
+ # ITRF2020 Plate Motion Model parameters
32
+ #
33
+ # As described in https://agupubs.onlinelibrary.wiley.com/doi/full/10.1029/2023GL106373
34
+ # (and also in https://itrf.ign.fr/docs/solutions/itrf2020/ITRF2020-PMM.dat, but with
35
+ # different units)
36
+
37
+ <AMUR> +proj=helmert +drx=-0.000131 +dry=-0.000551 +drz=0.000837 +convention=position_vector
38
+
39
+ <ANTA> +proj=helmert +drx=-0.000269 +dry=-0.000312 +drz=0.000678 +convention=position_vector
40
+
41
+ <ARAB> +proj=helmert +drx=0.001129 +dry=-0.000146 +drz=0.001438 +convention=position_vector
42
+
43
+ <AUST> +proj=helmert +drx=0.001487 +dry=0.001175 +drz=0.001223 +convention=position_vector
44
+
45
+ <CARB> +proj=helmert +drx=0.000207 +dry=-0.001422 +drz=0.000726 +convention=position_vector
46
+
47
+ <EURA> +proj=helmert +drx=-0.000085 +dry=-0.000519 +drz=0.000753 +convention=position_vector
48
+
49
+ <INDI> +proj=helmert +drx=0.001137 +dry=0.000013 +drz=0.001444 +convention=position_vector
50
+
51
+ <NAZC> +proj=helmert +drx=-0.000327 +dry=-0.001561 +drz=0.001605 +convention=position_vector
52
+
53
+ <NOAM> +proj=helmert +drx=0.000045 +dry=-0.000666 +drz=-0.000098 +convention=position_vector
54
+
55
+ <NUBI> +proj=helmert +drx=0.000090 +dry=-0.000585 +drz=0.000717 +convention=position_vector
56
+
57
+ <PCFC> +proj=helmert +drx=-0.000404 +dry=0.001021 +drz=-0.002154 +convention=position_vector
58
+
59
+ <SOAM> +proj=helmert +drx=-0.000261 +dry=-0.000282 +drz=-0.000157 +convention=position_vector
60
+
61
+ <SOMA> +proj=helmert +drx=-0.000081 +dry=-0.000719 +drz=0.000864 +convention=position_vector
62
+
63
+ # Plate names suffixed by _T (for Translation) that includes the translation
64
+ # rates +dx=0.00037 +dy=0.00035 +dz=0.00074 given by Table 2 of the ITRF2020 plate motion model
65
+ # paper
66
+
67
+ <AMUR_T> +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=-0.000131 +dry=-0.000551 +drz=0.000837 +convention=position_vector
68
+
69
+ <ANTA_T> +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=-0.000269 +dry=-0.000312 +drz=0.000678 +convention=position_vector
70
+
71
+ <ARAB_T> +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=0.001129 +dry=-0.000146 +drz=0.001438 +convention=position_vector
72
+
73
+ <AUST_T> +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=0.001487 +dry=0.001175 +drz=0.001223 +convention=position_vector
74
+
75
+ <CARB_T> +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=0.000207 +dry=-0.001422 +drz=0.000726 +convention=position_vector
76
+
77
+ <EURA_T> +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=-0.000085 +dry=-0.000519 +drz=0.000753 +convention=position_vector
78
+
79
+ <INDI_T> +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=0.001137 +dry=0.000013 +drz=0.001444 +convention=position_vector
80
+
81
+ <NAZC_T> +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=-0.000327 +dry=-0.001561 +drz=0.001605 +convention=position_vector
82
+
83
+ <NOAM_T> +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=0.000045 +dry=-0.000666 +drz=-0.000098 +convention=position_vector
84
+
85
+ <NUBI_T> +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=0.000090 +dry=-0.000585 +drz=0.000717 +convention=position_vector
86
+
87
+ <PCFC_T> +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=-0.000404 +dry=0.001021 +drz=-0.002154 +convention=position_vector
88
+
89
+ <SOAM_T> +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=-0.000261 +dry=-0.000282 +drz=-0.000157 +convention=position_vector
90
+
91
+ <SOMA_T> +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=-0.000081 +dry=-0.000719 +drz=0.000864 +convention=position_vector
@@ -1,8 +1,8 @@
1
1
  # Version checking for PROJ
2
2
 
3
- set (PACKAGE_VERSION "9.4.0")
3
+ set (PACKAGE_VERSION "9.6.0")
4
4
  set (PACKAGE_VERSION_MAJOR "9")
5
- set (PACKAGE_VERSION_MINOR "4")
5
+ set (PACKAGE_VERSION_MINOR "6")
6
6
  set (PACKAGE_VERSION_PATCH "0")
7
7
 
8
8
  # These variable definitions parallel those in PROJ's
@@ -27,7 +27,7 @@ if("TRUE")
27
27
  endif()
28
28
  cmake_policy(POP)
29
29
 
30
- find_dependency(SQLite3)
30
+ find_dependency(unofficial-sqlite3)
31
31
 
32
32
  if(DEFINED PROJ_CONFIG_FIND_TIFF_DEP)
33
33
  find_dependency(TIFF)
@@ -1,13 +1,13 @@
1
1
  # Generated by CMake
2
2
 
3
3
  if("${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}" LESS 2.8)
4
- message(FATAL_ERROR "CMake >= 2.8.0 required")
4
+ message(FATAL_ERROR "CMake >= 2.8.12 required")
5
5
  endif()
6
6
  if(CMAKE_VERSION VERSION_LESS "2.8.12")
7
7
  message(FATAL_ERROR "CMake >= 2.8.12 required")
8
8
  endif()
9
9
  cmake_policy(PUSH)
10
- cmake_policy(VERSION 2.8.12...3.27)
10
+ cmake_policy(VERSION 2.8.12...3.29)
11
11
  #----------------------------------------------------------------
12
12
  # Generated CMake target import file.
13
13
  #----------------------------------------------------------------
@@ -60,7 +60,7 @@ add_library(PROJ::proj STATIC IMPORTED)
60
60
  set_target_properties(PROJ::proj PROPERTIES
61
61
  INTERFACE_COMPILE_DEFINITIONS "PROJ_DLL="
62
62
  INTERFACE_INCLUDE_DIRECTORIES "${_IMPORT_PREFIX}/include"
63
- INTERFACE_LINK_LIBRARIES "\$<LINK_ONLY:-lm>;\$<LINK_ONLY:-ldl>;\$<LINK_ONLY:-pthread>;\$<LINK_ONLY:SQLite::SQLite3>;\$<LINK_ONLY:>;\$<LINK_ONLY:TIFF::TIFF>;\$<LINK_ONLY:CURL::libcurl>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:ws2_32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:wldap32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:advapi32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:crypt32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:normaliz>>"
63
+ INTERFACE_LINK_LIBRARIES "\$<LINK_ONLY:-lm>;\$<LINK_ONLY:-ldl>;\$<LINK_ONLY:-pthread>;\$<LINK_ONLY:unofficial::sqlite3::sqlite3>;\$<LINK_ONLY:>;\$<LINK_ONLY:TIFF::TIFF>;\$<LINK_ONLY:CURL::libcurl>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:ws2_32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:wldap32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:advapi32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:crypt32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:normaliz>>"
64
64
  )
65
65
 
66
66
  # Load information for each installed configuration.
pyogrio/proj_data/proj.db CHANGED
Binary file
@@ -23,13 +23,13 @@ cache_enabled = on
23
23
  cache_size_MB = 300
24
24
 
25
25
  ; Time-to-live delay in seconds before already accessed remote resources are
26
- ; acessed again to check if they have been updated.
26
+ ; accessed again to check if they have been updated.
27
27
  cache_ttl_sec = 86400
28
28
 
29
29
  ; Can be set to on so that by default the lack of a known resource files needed
30
30
  ; for the best transformation PROJ would normally use causes an error, or off
31
31
  ; to accept missing resource files without errors or warnings.
32
- ; This default value itself is overriden by the PROJ_ONLY_BEST_DEFAULT environment
32
+ ; This default value itself is overridden by the PROJ_ONLY_BEST_DEFAULT environment
33
33
  ; variable if set, and then by the ONLY_BEST setting that can be
34
34
  ; passed to the proj_create_crs_to_crs() method, or with the --only-best
35
35
  ; option of the cs2cs program.
@@ -38,10 +38,18 @@ cache_ttl_sec = 86400
38
38
  only_best_default = off
39
39
 
40
40
  ; Filename of the Certificate Authority (CA) bundle.
41
- ; Can be overriden with the PROJ_CURL_CA_BUNDLE / CURL_CA_BUNDLE environment variable.
41
+ ; Can be overridden with the PROJ_CURL_CA_BUNDLE / CURL_CA_BUNDLE environment variable.
42
42
  ; (added in PROJ 9.0)
43
43
  ; ca_bundle_path = /path/to/cabundle.pem
44
44
 
45
+ ; When this is set to on, the operating systems native CA store will be used for certificate verification
46
+ ; If you set this option to on and also set ca_bundle_path then during verification those certificates are
47
+ ; searched in addition to the native CA store.
48
+ ; (added in PROJ 9.6)
49
+ ; Valid values = on, off
50
+ ;native_ca = on
51
+
52
+
45
53
  ; Transverse Mercator (and UTM) default algorithm: auto, evenden_snyder or poder_engsager
46
54
  ; * evenden_snyder is the fastest, but less accurate far from central meridian
47
55
  ; * poder_engsager is slower, but more accurate far from central meridian
@@ -1,13 +1,13 @@
1
1
  # Generated by CMake
2
2
 
3
3
  if("${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}" LESS 2.8)
4
- message(FATAL_ERROR "CMake >= 2.8.0 required")
4
+ message(FATAL_ERROR "CMake >= 2.8.12 required")
5
5
  endif()
6
6
  if(CMAKE_VERSION VERSION_LESS "2.8.12")
7
7
  message(FATAL_ERROR "CMake >= 2.8.12 required")
8
8
  endif()
9
9
  cmake_policy(PUSH)
10
- cmake_policy(VERSION 2.8.12...3.27)
10
+ cmake_policy(VERSION 2.8.12...3.29)
11
11
  #----------------------------------------------------------------
12
12
  # Generated CMake target import file.
13
13
  #----------------------------------------------------------------
@@ -60,7 +60,7 @@ add_library(PROJ4::proj STATIC IMPORTED)
60
60
  set_target_properties(PROJ4::proj PROPERTIES
61
61
  INTERFACE_COMPILE_DEFINITIONS "PROJ_DLL="
62
62
  INTERFACE_INCLUDE_DIRECTORIES "${_IMPORT_PREFIX}/include"
63
- INTERFACE_LINK_LIBRARIES "\$<LINK_ONLY:-lm>;\$<LINK_ONLY:-ldl>;\$<LINK_ONLY:-pthread>;\$<LINK_ONLY:SQLite::SQLite3>;\$<LINK_ONLY:>;\$<LINK_ONLY:TIFF::TIFF>;\$<LINK_ONLY:CURL::libcurl>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:ws2_32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:wldap32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:advapi32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:crypt32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:normaliz>>"
63
+ INTERFACE_LINK_LIBRARIES "\$<LINK_ONLY:-lm>;\$<LINK_ONLY:-ldl>;\$<LINK_ONLY:-pthread>;\$<LINK_ONLY:unofficial::sqlite3::sqlite3>;\$<LINK_ONLY:>;\$<LINK_ONLY:TIFF::TIFF>;\$<LINK_ONLY:CURL::libcurl>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:ws2_32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:wldap32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:advapi32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:crypt32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:normaliz>>"
64
64
  )
65
65
 
66
66
  # Load information for each installed configuration.
@@ -2,7 +2,7 @@
2
2
  "$id": "https://proj.org/schemas/v0.7/projjson.schema.json",
3
3
  "$schema": "http://json-schema.org/draft-07/schema#",
4
4
  "description": "Schema for PROJJSON (v0.7)",
5
- "$comment": "This file exists both in data/ and in schemas/vXXX/. Keep both in sync. And if changing the value of $id, change PROJJSON_DEFAULT_VERSION accordingly in io.cpp",
5
+ "$comment": "This document is copyright Even Rouault and PROJ contributors, 2019-2023, and subject to the MIT license. This file exists both in data/ and in schemas/vXXX/. Keep both in sync. And if changing the value of $id, change PROJJSON_DEFAULT_VERSION accordingly in io.cpp",
6
6
 
7
7
  "oneOf": [
8
8
  { "$ref": "#/definitions/crs" },
pyogrio/proj_data/usage CHANGED
@@ -1,4 +1,9 @@
1
1
  proj provides CMake targets:
2
2
 
3
- find_package(PROJ CONFIG REQUIRED)
4
- target_link_libraries(main PRIVATE PROJ::proj)
3
+ find_package(PROJ CONFIG REQUIRED)
4
+ target_link_libraries(main PRIVATE PROJ::proj)
5
+
6
+ proj provides pkg-config modules:
7
+
8
+ # Coordinate transformation software library
9
+ proj