pyogrio 0.9.0__cp38-cp38-macosx_12_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyogrio might be problematic. Click here for more details.

Files changed (239) hide show
  1. pyogrio/.dylibs/libgdal.34.3.8.5.dylib +0 -0
  2. pyogrio/__init__.py +48 -0
  3. pyogrio/_compat.py +41 -0
  4. pyogrio/_env.py +61 -0
  5. pyogrio/_err.cpython-38-darwin.so +0 -0
  6. pyogrio/_err.pxd +4 -0
  7. pyogrio/_err.pyx +250 -0
  8. pyogrio/_geometry.cpython-38-darwin.so +0 -0
  9. pyogrio/_geometry.pxd +4 -0
  10. pyogrio/_geometry.pyx +129 -0
  11. pyogrio/_io.cpython-38-darwin.so +0 -0
  12. pyogrio/_io.pxd +0 -0
  13. pyogrio/_io.pyx +2742 -0
  14. pyogrio/_ogr.cpython-38-darwin.so +0 -0
  15. pyogrio/_ogr.pxd +444 -0
  16. pyogrio/_ogr.pyx +346 -0
  17. pyogrio/_version.py +21 -0
  18. pyogrio/_vsi.cpython-38-darwin.so +0 -0
  19. pyogrio/_vsi.pxd +4 -0
  20. pyogrio/_vsi.pyx +140 -0
  21. pyogrio/arrow_bridge.h +115 -0
  22. pyogrio/core.py +320 -0
  23. pyogrio/errors.py +32 -0
  24. pyogrio/gdal_data/GDAL-targets-release.cmake +19 -0
  25. pyogrio/gdal_data/GDAL-targets.cmake +105 -0
  26. pyogrio/gdal_data/GDALConfig.cmake +25 -0
  27. pyogrio/gdal_data/GDALConfigVersion.cmake +85 -0
  28. pyogrio/gdal_data/GDALLogoBW.svg +138 -0
  29. pyogrio/gdal_data/GDALLogoColor.svg +126 -0
  30. pyogrio/gdal_data/GDALLogoGS.svg +126 -0
  31. pyogrio/gdal_data/LICENSE.TXT +467 -0
  32. pyogrio/gdal_data/bag_template.xml +201 -0
  33. pyogrio/gdal_data/copyright +467 -0
  34. pyogrio/gdal_data/cubewerx_extra.wkt +48 -0
  35. pyogrio/gdal_data/default.rsc +0 -0
  36. pyogrio/gdal_data/ecw_cs.wkt +1453 -0
  37. pyogrio/gdal_data/eedaconf.json +23 -0
  38. pyogrio/gdal_data/epsg.wkt +1 -0
  39. pyogrio/gdal_data/esri_StatePlane_extra.wkt +631 -0
  40. pyogrio/gdal_data/gdalicon.png +0 -0
  41. pyogrio/gdal_data/gdalinfo_output.schema.json +346 -0
  42. pyogrio/gdal_data/gdalmdiminfo_output.schema.json +321 -0
  43. pyogrio/gdal_data/gdalvrt.xsd +772 -0
  44. pyogrio/gdal_data/gfs.xsd +246 -0
  45. pyogrio/gdal_data/gml_registry.xml +117 -0
  46. pyogrio/gdal_data/gml_registry.xsd +66 -0
  47. pyogrio/gdal_data/gmlasconf.xml +169 -0
  48. pyogrio/gdal_data/gmlasconf.xsd +1066 -0
  49. pyogrio/gdal_data/grib2_center.csv +251 -0
  50. pyogrio/gdal_data/grib2_process.csv +102 -0
  51. pyogrio/gdal_data/grib2_subcenter.csv +63 -0
  52. pyogrio/gdal_data/grib2_table_4_2_0_0.csv +261 -0
  53. pyogrio/gdal_data/grib2_table_4_2_0_1.csv +261 -0
  54. pyogrio/gdal_data/grib2_table_4_2_0_13.csv +261 -0
  55. pyogrio/gdal_data/grib2_table_4_2_0_14.csv +261 -0
  56. pyogrio/gdal_data/grib2_table_4_2_0_15.csv +261 -0
  57. pyogrio/gdal_data/grib2_table_4_2_0_16.csv +261 -0
  58. pyogrio/gdal_data/grib2_table_4_2_0_17.csv +11 -0
  59. pyogrio/gdal_data/grib2_table_4_2_0_18.csv +261 -0
  60. pyogrio/gdal_data/grib2_table_4_2_0_19.csv +261 -0
  61. pyogrio/gdal_data/grib2_table_4_2_0_190.csv +261 -0
  62. pyogrio/gdal_data/grib2_table_4_2_0_191.csv +261 -0
  63. pyogrio/gdal_data/grib2_table_4_2_0_2.csv +261 -0
  64. pyogrio/gdal_data/grib2_table_4_2_0_20.csv +261 -0
  65. pyogrio/gdal_data/grib2_table_4_2_0_21.csv +261 -0
  66. pyogrio/gdal_data/grib2_table_4_2_0_3.csv +261 -0
  67. pyogrio/gdal_data/grib2_table_4_2_0_4.csv +261 -0
  68. pyogrio/gdal_data/grib2_table_4_2_0_5.csv +261 -0
  69. pyogrio/gdal_data/grib2_table_4_2_0_6.csv +261 -0
  70. pyogrio/gdal_data/grib2_table_4_2_0_7.csv +261 -0
  71. pyogrio/gdal_data/grib2_table_4_2_10_0.csv +261 -0
  72. pyogrio/gdal_data/grib2_table_4_2_10_1.csv +261 -0
  73. pyogrio/gdal_data/grib2_table_4_2_10_191.csv +261 -0
  74. pyogrio/gdal_data/grib2_table_4_2_10_2.csv +261 -0
  75. pyogrio/gdal_data/grib2_table_4_2_10_3.csv +261 -0
  76. pyogrio/gdal_data/grib2_table_4_2_10_4.csv +261 -0
  77. pyogrio/gdal_data/grib2_table_4_2_1_0.csv +261 -0
  78. pyogrio/gdal_data/grib2_table_4_2_1_1.csv +261 -0
  79. pyogrio/gdal_data/grib2_table_4_2_1_2.csv +261 -0
  80. pyogrio/gdal_data/grib2_table_4_2_20_0.csv +261 -0
  81. pyogrio/gdal_data/grib2_table_4_2_20_1.csv +261 -0
  82. pyogrio/gdal_data/grib2_table_4_2_20_2.csv +261 -0
  83. pyogrio/gdal_data/grib2_table_4_2_2_0.csv +261 -0
  84. pyogrio/gdal_data/grib2_table_4_2_2_3.csv +261 -0
  85. pyogrio/gdal_data/grib2_table_4_2_2_4.csv +261 -0
  86. pyogrio/gdal_data/grib2_table_4_2_2_5.csv +261 -0
  87. pyogrio/gdal_data/grib2_table_4_2_2_6.csv +261 -0
  88. pyogrio/gdal_data/grib2_table_4_2_3_0.csv +261 -0
  89. pyogrio/gdal_data/grib2_table_4_2_3_1.csv +261 -0
  90. pyogrio/gdal_data/grib2_table_4_2_3_2.csv +28 -0
  91. pyogrio/gdal_data/grib2_table_4_2_3_3.csv +8 -0
  92. pyogrio/gdal_data/grib2_table_4_2_3_4.csv +14 -0
  93. pyogrio/gdal_data/grib2_table_4_2_3_5.csv +11 -0
  94. pyogrio/gdal_data/grib2_table_4_2_3_6.csv +11 -0
  95. pyogrio/gdal_data/grib2_table_4_2_4_0.csv +261 -0
  96. pyogrio/gdal_data/grib2_table_4_2_4_1.csv +261 -0
  97. pyogrio/gdal_data/grib2_table_4_2_4_10.csv +261 -0
  98. pyogrio/gdal_data/grib2_table_4_2_4_2.csv +261 -0
  99. pyogrio/gdal_data/grib2_table_4_2_4_3.csv +261 -0
  100. pyogrio/gdal_data/grib2_table_4_2_4_4.csv +261 -0
  101. pyogrio/gdal_data/grib2_table_4_2_4_5.csv +261 -0
  102. pyogrio/gdal_data/grib2_table_4_2_4_6.csv +261 -0
  103. pyogrio/gdal_data/grib2_table_4_2_4_7.csv +261 -0
  104. pyogrio/gdal_data/grib2_table_4_2_4_8.csv +261 -0
  105. pyogrio/gdal_data/grib2_table_4_2_4_9.csv +261 -0
  106. pyogrio/gdal_data/grib2_table_4_2_local_Canada.csv +5 -0
  107. pyogrio/gdal_data/grib2_table_4_2_local_HPC.csv +2 -0
  108. pyogrio/gdal_data/grib2_table_4_2_local_MRMS.csv +175 -0
  109. pyogrio/gdal_data/grib2_table_4_2_local_NCEP.csv +401 -0
  110. pyogrio/gdal_data/grib2_table_4_2_local_NDFD.csv +38 -0
  111. pyogrio/gdal_data/grib2_table_4_2_local_index.csv +7 -0
  112. pyogrio/gdal_data/grib2_table_4_5.csv +261 -0
  113. pyogrio/gdal_data/grib2_table_versions.csv +3 -0
  114. pyogrio/gdal_data/gt_datum.csv +229 -0
  115. pyogrio/gdal_data/gt_ellips.csv +24 -0
  116. pyogrio/gdal_data/header.dxf +1124 -0
  117. pyogrio/gdal_data/inspire_cp_BasicPropertyUnit.gfs +57 -0
  118. pyogrio/gdal_data/inspire_cp_CadastralBoundary.gfs +60 -0
  119. pyogrio/gdal_data/inspire_cp_CadastralParcel.gfs +81 -0
  120. pyogrio/gdal_data/inspire_cp_CadastralZoning.gfs +161 -0
  121. pyogrio/gdal_data/jpfgdgml_AdmArea.gfs +59 -0
  122. pyogrio/gdal_data/jpfgdgml_AdmBdry.gfs +49 -0
  123. pyogrio/gdal_data/jpfgdgml_AdmPt.gfs +59 -0
  124. pyogrio/gdal_data/jpfgdgml_BldA.gfs +54 -0
  125. pyogrio/gdal_data/jpfgdgml_BldL.gfs +54 -0
  126. pyogrio/gdal_data/jpfgdgml_Cntr.gfs +54 -0
  127. pyogrio/gdal_data/jpfgdgml_CommBdry.gfs +49 -0
  128. pyogrio/gdal_data/jpfgdgml_CommPt.gfs +59 -0
  129. pyogrio/gdal_data/jpfgdgml_Cstline.gfs +54 -0
  130. pyogrio/gdal_data/jpfgdgml_ElevPt.gfs +54 -0
  131. pyogrio/gdal_data/jpfgdgml_GCP.gfs +94 -0
  132. pyogrio/gdal_data/jpfgdgml_LeveeEdge.gfs +49 -0
  133. pyogrio/gdal_data/jpfgdgml_RailCL.gfs +54 -0
  134. pyogrio/gdal_data/jpfgdgml_RdASL.gfs +44 -0
  135. pyogrio/gdal_data/jpfgdgml_RdArea.gfs +54 -0
  136. pyogrio/gdal_data/jpfgdgml_RdCompt.gfs +59 -0
  137. pyogrio/gdal_data/jpfgdgml_RdEdg.gfs +59 -0
  138. pyogrio/gdal_data/jpfgdgml_RdMgtBdry.gfs +49 -0
  139. pyogrio/gdal_data/jpfgdgml_RdSgmtA.gfs +59 -0
  140. pyogrio/gdal_data/jpfgdgml_RvrMgtBdry.gfs +49 -0
  141. pyogrio/gdal_data/jpfgdgml_SBAPt.gfs +49 -0
  142. pyogrio/gdal_data/jpfgdgml_SBArea.gfs +54 -0
  143. pyogrio/gdal_data/jpfgdgml_SBBdry.gfs +44 -0
  144. pyogrio/gdal_data/jpfgdgml_WA.gfs +54 -0
  145. pyogrio/gdal_data/jpfgdgml_WL.gfs +54 -0
  146. pyogrio/gdal_data/jpfgdgml_WStrA.gfs +54 -0
  147. pyogrio/gdal_data/jpfgdgml_WStrL.gfs +54 -0
  148. pyogrio/gdal_data/netcdf_config.xsd +143 -0
  149. pyogrio/gdal_data/nitf_spec.xml +3306 -0
  150. pyogrio/gdal_data/nitf_spec.xsd +189 -0
  151. pyogrio/gdal_data/ogrinfo_output.schema.json +505 -0
  152. pyogrio/gdal_data/ogrvrt.xsd +543 -0
  153. pyogrio/gdal_data/osmconf.ini +132 -0
  154. pyogrio/gdal_data/ozi_datum.csv +131 -0
  155. pyogrio/gdal_data/ozi_ellips.csv +35 -0
  156. pyogrio/gdal_data/pci_datum.txt +463 -0
  157. pyogrio/gdal_data/pci_ellips.txt +77 -0
  158. pyogrio/gdal_data/pdfcomposition.xsd +721 -0
  159. pyogrio/gdal_data/pds4_template.xml +65 -0
  160. pyogrio/gdal_data/plscenesconf.json +1985 -0
  161. pyogrio/gdal_data/ruian_vf_ob_v1.gfs +1455 -0
  162. pyogrio/gdal_data/ruian_vf_st_uvoh_v1.gfs +86 -0
  163. pyogrio/gdal_data/ruian_vf_st_v1.gfs +1489 -0
  164. pyogrio/gdal_data/ruian_vf_v1.gfs +2126 -0
  165. pyogrio/gdal_data/s57agencies.csv +249 -0
  166. pyogrio/gdal_data/s57attributes.csv +484 -0
  167. pyogrio/gdal_data/s57expectedinput.csv +1008 -0
  168. pyogrio/gdal_data/s57objectclasses.csv +287 -0
  169. pyogrio/gdal_data/seed_2d.dgn +0 -0
  170. pyogrio/gdal_data/seed_3d.dgn +0 -0
  171. pyogrio/gdal_data/stateplane.csv +259 -0
  172. pyogrio/gdal_data/template_tiles.mapml +28 -0
  173. pyogrio/gdal_data/tms_LINZAntarticaMapTileGrid.json +190 -0
  174. pyogrio/gdal_data/tms_MapML_APSTILE.json +268 -0
  175. pyogrio/gdal_data/tms_MapML_CBMTILE.json +346 -0
  176. pyogrio/gdal_data/tms_NZTM2000.json +243 -0
  177. pyogrio/gdal_data/trailer.dxf +434 -0
  178. pyogrio/gdal_data/usage +4 -0
  179. pyogrio/gdal_data/vcpkg-cmake-wrapper.cmake +23 -0
  180. pyogrio/gdal_data/vcpkg.spdx.json +264 -0
  181. pyogrio/gdal_data/vcpkg_abi_info.txt +41 -0
  182. pyogrio/gdal_data/vdv452.xml +367 -0
  183. pyogrio/gdal_data/vdv452.xsd +63 -0
  184. pyogrio/gdal_data/vicar.json +164 -0
  185. pyogrio/geopandas.py +675 -0
  186. pyogrio/proj_data/CH +22 -0
  187. pyogrio/proj_data/GL27 +23 -0
  188. pyogrio/proj_data/ITRF2000 +24 -0
  189. pyogrio/proj_data/ITRF2008 +94 -0
  190. pyogrio/proj_data/ITRF2014 +55 -0
  191. pyogrio/proj_data/copyright +34 -0
  192. pyogrio/proj_data/deformation_model.schema.json +582 -0
  193. pyogrio/proj_data/nad.lst +142 -0
  194. pyogrio/proj_data/nad27 +810 -0
  195. pyogrio/proj_data/nad83 +745 -0
  196. pyogrio/proj_data/other.extra +53 -0
  197. pyogrio/proj_data/proj-config-version.cmake +44 -0
  198. pyogrio/proj_data/proj-config.cmake +79 -0
  199. pyogrio/proj_data/proj-targets-release.cmake +19 -0
  200. pyogrio/proj_data/proj-targets.cmake +107 -0
  201. pyogrio/proj_data/proj.db +0 -0
  202. pyogrio/proj_data/proj.ini +51 -0
  203. pyogrio/proj_data/proj4-targets-release.cmake +19 -0
  204. pyogrio/proj_data/proj4-targets.cmake +107 -0
  205. pyogrio/proj_data/projjson.schema.json +1174 -0
  206. pyogrio/proj_data/triangulation.schema.json +214 -0
  207. pyogrio/proj_data/usage +4 -0
  208. pyogrio/proj_data/vcpkg.spdx.json +198 -0
  209. pyogrio/proj_data/vcpkg_abi_info.txt +27 -0
  210. pyogrio/proj_data/world +214 -0
  211. pyogrio/raw.py +871 -0
  212. pyogrio/tests/__init__.py +0 -0
  213. pyogrio/tests/conftest.py +204 -0
  214. pyogrio/tests/fixtures/README.md +89 -0
  215. pyogrio/tests/fixtures/naturalearth_lowres/naturalearth_lowres.cpg +1 -0
  216. pyogrio/tests/fixtures/naturalearth_lowres/naturalearth_lowres.dbf +0 -0
  217. pyogrio/tests/fixtures/naturalearth_lowres/naturalearth_lowres.prj +1 -0
  218. pyogrio/tests/fixtures/naturalearth_lowres/naturalearth_lowres.shp +0 -0
  219. pyogrio/tests/fixtures/naturalearth_lowres/naturalearth_lowres.shx +0 -0
  220. pyogrio/tests/fixtures/poly_not_enough_points.shp.zip +0 -0
  221. pyogrio/tests/fixtures/sample.osm.pbf +0 -0
  222. pyogrio/tests/fixtures/test_datetime.geojson +7 -0
  223. pyogrio/tests/fixtures/test_datetime_tz.geojson +8 -0
  224. pyogrio/tests/fixtures/test_fgdb.gdb.zip +0 -0
  225. pyogrio/tests/fixtures/test_gpkg_nulls.gpkg +0 -0
  226. pyogrio/tests/fixtures/test_multisurface.gpkg +0 -0
  227. pyogrio/tests/fixtures/test_nested.geojson +18 -0
  228. pyogrio/tests/fixtures/test_ogr_types_list.geojson +12 -0
  229. pyogrio/tests/test_arrow.py +1041 -0
  230. pyogrio/tests/test_core.py +588 -0
  231. pyogrio/tests/test_geopandas_io.py +2174 -0
  232. pyogrio/tests/test_path.py +352 -0
  233. pyogrio/tests/test_raw_io.py +1404 -0
  234. pyogrio/util.py +223 -0
  235. pyogrio-0.9.0.dist-info/LICENSE +21 -0
  236. pyogrio-0.9.0.dist-info/METADATA +100 -0
  237. pyogrio-0.9.0.dist-info/RECORD +239 -0
  238. pyogrio-0.9.0.dist-info/WHEEL +5 -0
  239. pyogrio-0.9.0.dist-info/top_level.txt +1 -0
pyogrio/geopandas.py ADDED
@@ -0,0 +1,675 @@
1
+ import os
2
+
3
+ import numpy as np
4
+
5
+ from pyogrio._compat import HAS_GEOPANDAS, PANDAS_GE_15, PANDAS_GE_20, PANDAS_GE_22
6
+ from pyogrio.raw import (
7
+ DRIVERS_NO_MIXED_SINGLE_MULTI,
8
+ DRIVERS_NO_MIXED_DIMENSIONS,
9
+ read,
10
+ read_arrow,
11
+ write,
12
+ _get_write_path_driver,
13
+ )
14
+ from pyogrio.errors import DataSourceError
15
+ import warnings
16
+
17
+
18
+ def _stringify_path(path):
19
+ """
20
+ Convert path-like to a string if possible, pass-through other objects
21
+ """
22
+ if isinstance(path, str):
23
+ return path
24
+
25
+ # checking whether path implements the filesystem protocol
26
+ if hasattr(path, "__fspath__"):
27
+ return path.__fspath__()
28
+
29
+ # pass-though other objects
30
+ return path
31
+
32
+
33
+ def _try_parse_datetime(ser):
34
+ import pandas as pd # only called when pandas is known to be installed
35
+
36
+ if PANDAS_GE_22:
37
+ datetime_kwargs = dict(format="ISO8601")
38
+ elif PANDAS_GE_20:
39
+ datetime_kwargs = dict(format="ISO8601", errors="ignore")
40
+ else:
41
+ datetime_kwargs = dict(yearfirst=True)
42
+ with warnings.catch_warnings():
43
+ warnings.filterwarnings(
44
+ "ignore",
45
+ ".*parsing datetimes with mixed time zones will raise.*",
46
+ FutureWarning,
47
+ )
48
+ # pre-emptive try catch for when pandas will raise
49
+ # (can tighten the exception type in future when it does)
50
+ try:
51
+ res = pd.to_datetime(ser, **datetime_kwargs)
52
+ except Exception:
53
+ res = ser
54
+ # if object dtype, try parse as utc instead
55
+ if res.dtype == "object":
56
+ try:
57
+ res = pd.to_datetime(ser, utc=True, **datetime_kwargs)
58
+ except Exception:
59
+ pass
60
+
61
+ if res.dtype != "object":
62
+ # GDAL only supports ms precision, convert outputs to match.
63
+ # Pandas 2.0 supports datetime[ms] directly, prior versions only support [ns],
64
+ # Instead, round the values to [ms] precision.
65
+ if PANDAS_GE_20:
66
+ res = res.dt.as_unit("ms")
67
+ else:
68
+ res = res.dt.round(freq="ms")
69
+ return res
70
+
71
+
72
+ def read_dataframe(
73
+ path_or_buffer,
74
+ /,
75
+ layer=None,
76
+ encoding=None,
77
+ columns=None,
78
+ read_geometry=True,
79
+ force_2d=False,
80
+ skip_features=0,
81
+ max_features=None,
82
+ where=None,
83
+ bbox=None,
84
+ mask=None,
85
+ fids=None,
86
+ sql=None,
87
+ sql_dialect=None,
88
+ fid_as_index=False,
89
+ use_arrow=None,
90
+ on_invalid="raise",
91
+ arrow_to_pandas_kwargs=None,
92
+ **kwargs,
93
+ ):
94
+ """Read from an OGR data source to a GeoPandas GeoDataFrame or Pandas DataFrame.
95
+ If the data source does not have a geometry column or ``read_geometry`` is False,
96
+ a DataFrame will be returned.
97
+
98
+ Requires ``geopandas`` >= 0.8.
99
+
100
+ Parameters
101
+ ----------
102
+ path_or_buffer : pathlib.Path or str, or bytes buffer
103
+ A dataset path or URI, or raw buffer.
104
+ layer : int or str, optional (default: first layer)
105
+ If an integer is provided, it corresponds to the index of the layer
106
+ with the data source. If a string is provided, it must match the name
107
+ of the layer in the data source. Defaults to first layer in data source.
108
+ encoding : str, optional (default: None)
109
+ If present, will be used as the encoding for reading string values from
110
+ the data source. By default will automatically try to detect the native
111
+ encoding and decode to ``UTF-8``.
112
+ columns : list-like, optional (default: all columns)
113
+ List of column names to import from the data source. Column names must
114
+ exactly match the names in the data source, and will be returned in
115
+ the order they occur in the data source. To avoid reading any columns,
116
+ pass an empty list-like. If combined with ``where`` parameter, must
117
+ include columns referenced in the ``where`` expression or the data may
118
+ not be correctly read; the data source may return empty results or
119
+ raise an exception (behavior varies by driver).
120
+ read_geometry : bool, optional (default: True)
121
+ If True, will read geometry into a GeoSeries. If False, a Pandas DataFrame
122
+ will be returned instead.
123
+ force_2d : bool, optional (default: False)
124
+ If the geometry has Z values, setting this to True will cause those to
125
+ be ignored and 2D geometries to be returned
126
+ skip_features : int, optional (default: 0)
127
+ Number of features to skip from the beginning of the file before
128
+ returning features. If greater than available number of features, an
129
+ empty DataFrame will be returned. Using this parameter may incur
130
+ significant overhead if the driver does not support the capability to
131
+ randomly seek to a specific feature, because it will need to iterate
132
+ over all prior features.
133
+ max_features : int, optional (default: None)
134
+ Number of features to read from the file.
135
+ where : str, optional (default: None)
136
+ Where clause to filter features in layer by attribute values. If the data source
137
+ natively supports SQL, its specific SQL dialect should be used (eg. SQLite and
138
+ GeoPackage: `SQLITE`_, PostgreSQL). If it doesn't, the `OGRSQL WHERE`_ syntax
139
+ should be used. Note that it is not possible to overrule the SQL dialect, this
140
+ is only possible when you use the ``sql`` parameter.
141
+ Examples: ``"ISO_A3 = 'CAN'"``, ``"POP_EST > 10000000 AND POP_EST < 100000000"``
142
+ bbox : tuple of (xmin, ymin, xmax, ymax) (default: None)
143
+ If present, will be used to filter records whose geometry intersects this
144
+ box. This must be in the same CRS as the dataset. If GEOS is present
145
+ and used by GDAL, only geometries that intersect this bbox will be
146
+ returned; if GEOS is not available or not used by GDAL, all geometries
147
+ with bounding boxes that intersect this bbox will be returned.
148
+ Cannot be combined with ``mask`` keyword.
149
+ mask : Shapely geometry, optional (default: None)
150
+ If present, will be used to filter records whose geometry intersects
151
+ this geometry. This must be in the same CRS as the dataset. If GEOS is
152
+ present and used by GDAL, only geometries that intersect this geometry
153
+ will be returned; if GEOS is not available or not used by GDAL, all
154
+ geometries with bounding boxes that intersect the bounding box of this
155
+ geometry will be returned. Requires Shapely >= 2.0.
156
+ Cannot be combined with ``bbox`` keyword.
157
+ fids : array-like, optional (default: None)
158
+ Array of integer feature id (FID) values to select. Cannot be combined
159
+ with other keywords to select a subset (``skip_features``,
160
+ ``max_features``, ``where``, ``bbox``, ``mask``, or ``sql``). Note that
161
+ the starting index is driver and file specific (e.g. typically 0 for
162
+ Shapefile and 1 for GeoPackage, but can still depend on the specific
163
+ file). The performance of reading a large number of features usings FIDs
164
+ is also driver specific and depends on the value of ``use_arrow``. The order
165
+ of the rows returned is undefined. If you would like to sort based on FID, use
166
+ ``fid_as_index=True`` to have the index of the GeoDataFrame returned set to the
167
+ FIDs of the features read. If ``use_arrow=True``, the number of FIDs is limited
168
+ to 4997 for drivers with 'OGRSQL' as default SQL dialect. To read a larger
169
+ number of FIDs, set ``user_arrow=False``.
170
+ sql : str, optional (default: None)
171
+ The SQL statement to execute. Look at the sql_dialect parameter for more
172
+ information on the syntax to use for the query. When combined with other
173
+ keywords like ``columns``, ``skip_features``, ``max_features``,
174
+ ``where``, ``bbox``, or ``mask``, those are applied after the SQL query.
175
+ Be aware that this can have an impact on performance, (e.g. filtering
176
+ with the ``bbox`` or ``mask`` keywords may not use spatial indexes).
177
+ Cannot be combined with the ``layer`` or ``fids`` keywords.
178
+ sql_dialect : str, optional (default: None)
179
+ The SQL dialect the SQL statement is written in. Possible values:
180
+
181
+ - **None**: if the data source natively supports SQL, its specific SQL dialect
182
+ will be used by default (eg. SQLite and Geopackage: `SQLITE`_, PostgreSQL).
183
+ If the data source doesn't natively support SQL, the `OGRSQL`_ dialect is
184
+ the default.
185
+ - '`OGRSQL`_': can be used on any data source. Performance can suffer
186
+ when used on data sources with native support for SQL.
187
+ - '`SQLITE`_': can be used on any data source. All spatialite_
188
+ functions can be used. Performance can suffer on data sources with
189
+ native support for SQL, except for Geopackage and SQLite as this is
190
+ their native SQL dialect.
191
+
192
+ fid_as_index : bool, optional (default: False)
193
+ If True, will use the FIDs of the features that were read as the
194
+ index of the GeoDataFrame. May start at 0 or 1 depending on the driver.
195
+ use_arrow : bool, optional (default: False)
196
+ Whether to use Arrow as the transfer mechanism of the read data
197
+ from GDAL to Python (requires GDAL >= 3.6 and `pyarrow` to be
198
+ installed). When enabled, this provides a further speed-up.
199
+ Defaults to False, but this default can also be globally overridden
200
+ by setting the ``PYOGRIO_USE_ARROW=1`` environment variable.
201
+ on_invalid : str, optional (default: "raise")
202
+
203
+ - **raise**: an exception will be raised if a WKB input geometry is
204
+ invalid.
205
+ - **warn**: a warning will be raised and invalid WKB geometries will be
206
+ returned as ``None``.
207
+ - **ignore**: invalid WKB geometries will be returned as ``None``
208
+ without a warning.
209
+
210
+ arrow_to_pandas_kwargs : dict, optional (default: None)
211
+ When `use_arrow` is True, these kwargs will be passed to the `to_pandas`_
212
+ call for the arrow to pandas conversion.
213
+ **kwargs
214
+ Additional driver-specific dataset open options passed to OGR. Invalid
215
+ options will trigger a warning.
216
+
217
+ Returns
218
+ -------
219
+ GeoDataFrame or DataFrame (if no geometry is present)
220
+
221
+ .. _OGRSQL:
222
+
223
+ https://gdal.org/user/ogr_sql_dialect.html#ogr-sql-dialect
224
+
225
+ .. _OGRSQL WHERE:
226
+
227
+ https://gdal.org/user/ogr_sql_dialect.html#where
228
+
229
+ .. _SQLITE:
230
+
231
+ https://gdal.org/user/sql_sqlite_dialect.html#sql-sqlite-dialect
232
+
233
+ .. _spatialite:
234
+
235
+ https://www.gaia-gis.it/gaia-sins/spatialite-sql-latest.html
236
+
237
+ .. _to_pandas:
238
+
239
+ https://arrow.apache.org/docs/python/generated/pyarrow.Table.html#pyarrow.Table.to_pandas
240
+
241
+ """ # noqa: E501
242
+ if not HAS_GEOPANDAS:
243
+ raise ImportError("geopandas is required to use pyogrio.read_dataframe()")
244
+
245
+ import pandas as pd
246
+ import geopandas as gp
247
+ import shapely # if geopandas is present, shapely is expected to be present
248
+
249
+ path_or_buffer = _stringify_path(path_or_buffer)
250
+
251
+ if use_arrow is None:
252
+ use_arrow = bool(int(os.environ.get("PYOGRIO_USE_ARROW", "0")))
253
+
254
+ read_func = read_arrow if use_arrow else read
255
+ gdal_force_2d = False if use_arrow else force_2d
256
+ if not use_arrow:
257
+ # For arrow, datetimes are read as is.
258
+ # For numpy IO, datetimes are read as string values to preserve timezone info
259
+ # as numpy does not directly support timezones.
260
+ kwargs["datetime_as_string"] = True
261
+ result = read_func(
262
+ path_or_buffer,
263
+ layer=layer,
264
+ encoding=encoding,
265
+ columns=columns,
266
+ read_geometry=read_geometry,
267
+ force_2d=gdal_force_2d,
268
+ skip_features=skip_features,
269
+ max_features=max_features,
270
+ where=where,
271
+ bbox=bbox,
272
+ mask=mask,
273
+ fids=fids,
274
+ sql=sql,
275
+ sql_dialect=sql_dialect,
276
+ return_fids=fid_as_index,
277
+ **kwargs,
278
+ )
279
+
280
+ if use_arrow:
281
+ meta, table = result
282
+
283
+ # split_blocks and self_destruct decrease memory usage, but have as side effect
284
+ # that accessing table afterwards causes crash, so del table to avoid.
285
+ kwargs = {"self_destruct": True}
286
+ if arrow_to_pandas_kwargs is not None:
287
+ kwargs.update(arrow_to_pandas_kwargs)
288
+ df = table.to_pandas(**kwargs)
289
+ del table
290
+
291
+ if fid_as_index:
292
+ df = df.set_index(meta["fid_column"])
293
+ df.index.names = ["fid"]
294
+
295
+ geometry_name = meta["geometry_name"] or "wkb_geometry"
296
+ if not fid_as_index and len(df.columns) == 0:
297
+ # Index not asked, no geometry column and no attribute columns: return empty
298
+ return pd.DataFrame()
299
+ elif geometry_name in df.columns:
300
+ wkb_values = df.pop(geometry_name)
301
+ if PANDAS_GE_15 and wkb_values.dtype != object:
302
+ # for example ArrowDtype will otherwise create numpy array with pd.NA
303
+ wkb_values = wkb_values.to_numpy(na_value=None)
304
+ df["geometry"] = shapely.from_wkb(wkb_values, on_invalid=on_invalid)
305
+ if force_2d:
306
+ df["geometry"] = shapely.force_2d(df["geometry"])
307
+ return gp.GeoDataFrame(df, geometry="geometry", crs=meta["crs"])
308
+ else:
309
+ return df
310
+
311
+ meta, index, geometry, field_data = result
312
+
313
+ columns = meta["fields"].tolist()
314
+ data = {columns[i]: field_data[i] for i in range(len(columns))}
315
+ if fid_as_index:
316
+ index = pd.Index(index, name="fid")
317
+ else:
318
+ index = None
319
+ df = pd.DataFrame(data, columns=columns, index=index)
320
+ for dtype, c in zip(meta["dtypes"], df.columns):
321
+ if dtype.startswith("datetime"):
322
+ df[c] = _try_parse_datetime(df[c])
323
+
324
+ if geometry is None or not read_geometry:
325
+ return df
326
+
327
+ geometry = shapely.from_wkb(geometry, on_invalid=on_invalid)
328
+
329
+ return gp.GeoDataFrame(df, geometry=geometry, crs=meta["crs"])
330
+
331
+
332
+ # TODO: handle index properly
333
+ def write_dataframe(
334
+ df,
335
+ path,
336
+ layer=None,
337
+ driver=None,
338
+ encoding=None,
339
+ geometry_type=None,
340
+ promote_to_multi=None,
341
+ nan_as_null=True,
342
+ append=False,
343
+ use_arrow=None,
344
+ dataset_metadata=None,
345
+ layer_metadata=None,
346
+ metadata=None,
347
+ dataset_options=None,
348
+ layer_options=None,
349
+ **kwargs,
350
+ ):
351
+ """
352
+ Write GeoPandas GeoDataFrame to an OGR file format.
353
+
354
+ Parameters
355
+ ----------
356
+ df : GeoDataFrame or DataFrame
357
+ The data to write. For attribute columns of the "object" dtype,
358
+ all values will be converted to strings to be written to the
359
+ output file, except None and np.nan, which will be set to NULL
360
+ in the output file.
361
+ path : str or io.BytesIO
362
+ path to output file on writeable file system or an io.BytesIO object to
363
+ allow writing to memory
364
+ NOTE: support for writing to memory is limited to specific drivers.
365
+ layer : str, optional (default: None)
366
+ layer name to create. If writing to memory and layer name is not
367
+ provided, it layer name will be set to a UUID4 value.
368
+ driver : string, optional (default: None)
369
+ The OGR format driver used to write the vector file. By default attempts
370
+ to infer driver from path. Must be provided to write to memory.
371
+ encoding : str, optional (default: None)
372
+ If present, will be used as the encoding for writing string values to
373
+ the file. Use with caution, only certain drivers support encodings
374
+ other than UTF-8.
375
+ geometry_type : string, optional (default: None)
376
+ By default, the geometry type of the layer will be inferred from the
377
+ data, after applying the promote_to_multi logic. If the data only contains a
378
+ single geometry type (after applying the logic of promote_to_multi), this type
379
+ is used for the layer. If the data (still) contains mixed geometry types, the
380
+ output layer geometry type will be set to "Unknown".
381
+
382
+ This parameter does not modify the geometry, but it will try to force the layer
383
+ type of the output file to this value. Use this parameter with caution because
384
+ using a non-default layer geometry type may result in errors when writing the
385
+ file, may be ignored by the driver, or may result in invalid files. Possible
386
+ values are: "Unknown", "Point", "LineString", "Polygon", "MultiPoint",
387
+ "MultiLineString", "MultiPolygon" or "GeometryCollection".
388
+ promote_to_multi : bool, optional (default: None)
389
+ If True, will convert singular geometry types in the data to their
390
+ corresponding multi geometry type for writing. By default, will convert
391
+ mixed singular and multi geometry types to multi geometry types for drivers
392
+ that do not support mixed singular and multi geometry types. If False, geometry
393
+ types will not be promoted, which may result in errors or invalid files when
394
+ attempting to write mixed singular and multi geometry types to drivers that do
395
+ not support such combinations.
396
+ nan_as_null : bool, default True
397
+ For floating point columns (float32 / float64), whether NaN values are
398
+ written as "null" (missing value). Defaults to True because in pandas
399
+ NaNs are typically used as missing value. Note that when set to False,
400
+ behaviour is format specific: some formats don't support NaNs by
401
+ default (e.g. GeoJSON will skip this property) or might treat them as
402
+ null anyway (e.g. GeoPackage).
403
+ append : bool, optional (default: False)
404
+ If True, the data source specified by path already exists, and the
405
+ driver supports appending to an existing data source, will cause the
406
+ data to be appended to the existing records in the data source. Not
407
+ supported for writing to in-memory files.
408
+ NOTE: append support is limited to specific drivers and GDAL versions.
409
+ use_arrow : bool, optional (default: False)
410
+ Whether to use Arrow as the transfer mechanism of the data to write
411
+ from Python to GDAL (requires GDAL >= 3.8 and `pyarrow` to be
412
+ installed). When enabled, this provides a further speed-up.
413
+ Defaults to False, but this default can also be globally overridden
414
+ by setting the ``PYOGRIO_USE_ARROW=1`` environment variable.
415
+ Using Arrow does not support writing an object-dtype column with
416
+ mixed types.
417
+ dataset_metadata : dict, optional (default: None)
418
+ Metadata to be stored at the dataset level in the output file; limited
419
+ to drivers that support writing metadata, such as GPKG, and silently
420
+ ignored otherwise. Keys and values must be strings.
421
+ layer_metadata : dict, optional (default: None)
422
+ Metadata to be stored at the layer level in the output file; limited to
423
+ drivers that support writing metadata, such as GPKG, and silently
424
+ ignored otherwise. Keys and values must be strings.
425
+ metadata : dict, optional (default: None)
426
+ alias of layer_metadata
427
+ dataset_options : dict, optional
428
+ Dataset creation options (format specific) passed to OGR. Specify as
429
+ a key-value dictionary.
430
+ layer_options : dict, optional
431
+ Layer creation options (format specific) passed to OGR. Specify as
432
+ a key-value dictionary.
433
+ **kwargs
434
+ Additional driver-specific dataset or layer creation options passed
435
+ to OGR. pyogrio will attempt to automatically pass those keywords
436
+ either as dataset or as layer creation option based on the known
437
+ options for the specific driver. Alternatively, you can use the
438
+ explicit `dataset_options` or `layer_options` keywords to manually
439
+ do this (for example if an option exists as both dataset and layer
440
+ option).
441
+ """
442
+ # TODO: add examples to the docstring (e.g. OGR kwargs)
443
+
444
+ if not HAS_GEOPANDAS:
445
+ raise ImportError("geopandas is required to use pyogrio.write_dataframe()")
446
+
447
+ from geopandas.array import to_wkb
448
+ import pandas as pd
449
+ from pyproj.enums import WktVersion # if geopandas is available so is pyproj
450
+
451
+ if not isinstance(df, pd.DataFrame):
452
+ raise ValueError("'df' must be a DataFrame or GeoDataFrame")
453
+
454
+ if use_arrow is None:
455
+ use_arrow = bool(int(os.environ.get("PYOGRIO_USE_ARROW", "0")))
456
+ path, driver = _get_write_path_driver(path, driver, append=append)
457
+
458
+ geometry_columns = df.columns[df.dtypes == "geometry"]
459
+ if len(geometry_columns) > 1:
460
+ raise ValueError(
461
+ "'df' must have only one geometry column. "
462
+ "Multiple geometry columns are not supported for output using OGR."
463
+ )
464
+
465
+ if len(geometry_columns) > 0:
466
+ geometry_column = geometry_columns[0]
467
+ geometry = df[geometry_column]
468
+ fields = [c for c in df.columns if not c == geometry_column]
469
+ else:
470
+ geometry_column = None
471
+ geometry = None
472
+ fields = list(df.columns)
473
+
474
+ # TODO: may need to fill in pd.NA, etc
475
+ field_data = []
476
+ field_mask = []
477
+ # dict[str, np.array(int)] special case for dt-tz fields
478
+ gdal_tz_offsets = {}
479
+ for name in fields:
480
+ col = df[name]
481
+ if isinstance(col.dtype, pd.DatetimeTZDtype):
482
+ # Deal with datetimes with timezones by passing down timezone separately
483
+ # pass down naive datetime
484
+ naive = col.dt.tz_localize(None)
485
+ values = naive.values
486
+ # compute offset relative to UTC explicitly
487
+ tz_offset = naive - col.dt.tz_convert("UTC").dt.tz_localize(None)
488
+ # Convert to GDAL timezone offset representation.
489
+ # GMT is represented as 100 and offsets are represented by adding /
490
+ # subtracting 1 for every 15 minutes different from GMT.
491
+ # https://gdal.org/development/rfc/rfc56_millisecond_precision.html#core-changes
492
+ # Convert each row offset to a signed multiple of 15m and add to GMT value
493
+ gdal_offset_representation = tz_offset // pd.Timedelta("15m") + 100
494
+ gdal_tz_offsets[name] = gdal_offset_representation.values
495
+ else:
496
+ values = col.values
497
+ if isinstance(values, pd.api.extensions.ExtensionArray):
498
+ from pandas.arrays import IntegerArray, FloatingArray, BooleanArray
499
+
500
+ if isinstance(values, (IntegerArray, FloatingArray, BooleanArray)):
501
+ field_data.append(values._data)
502
+ field_mask.append(values._mask)
503
+ else:
504
+ field_data.append(np.asarray(values))
505
+ field_mask.append(np.asarray(values.isna()))
506
+ else:
507
+ field_data.append(values)
508
+ field_mask.append(None)
509
+
510
+ # Determine geometry_type and/or promote_to_multi
511
+ if geometry_column is not None:
512
+ geometry_types_all = geometry.geom_type
513
+
514
+ if geometry_column is not None and (
515
+ geometry_type is None or promote_to_multi is None
516
+ ):
517
+ tmp_geometry_type = "Unknown"
518
+ has_z = False
519
+
520
+ # If there is data, infer layer geometry type + promote_to_multi
521
+ if not df.empty:
522
+ # None/Empty geometries sometimes report as Z incorrectly, so ignore them
523
+ has_z_arr = geometry[geometry.notna() & (~geometry.is_empty)].has_z
524
+ has_z = has_z_arr.any()
525
+ all_z = has_z_arr.all()
526
+
527
+ if driver in DRIVERS_NO_MIXED_DIMENSIONS and has_z and not all_z:
528
+ raise DataSourceError(
529
+ f"Mixed 2D and 3D coordinates are not supported by {driver}"
530
+ )
531
+
532
+ geometry_types = pd.Series(geometry_types_all.unique()).dropna().values
533
+ if len(geometry_types) == 1:
534
+ tmp_geometry_type = geometry_types[0]
535
+ if promote_to_multi and tmp_geometry_type in (
536
+ "Point",
537
+ "LineString",
538
+ "Polygon",
539
+ ):
540
+ tmp_geometry_type = f"Multi{tmp_geometry_type}"
541
+ elif len(geometry_types) == 2:
542
+ # Check if the types are corresponding multi + single types
543
+ if "Polygon" in geometry_types and "MultiPolygon" in geometry_types:
544
+ multi_type = "MultiPolygon"
545
+ elif (
546
+ "LineString" in geometry_types
547
+ and "MultiLineString" in geometry_types
548
+ ):
549
+ multi_type = "MultiLineString"
550
+ elif "Point" in geometry_types and "MultiPoint" in geometry_types:
551
+ multi_type = "MultiPoint"
552
+ else:
553
+ multi_type = None
554
+
555
+ # If they are corresponding multi + single types
556
+ if multi_type is not None:
557
+ if (
558
+ promote_to_multi is None
559
+ and driver in DRIVERS_NO_MIXED_SINGLE_MULTI
560
+ ):
561
+ promote_to_multi = True
562
+ if promote_to_multi:
563
+ tmp_geometry_type = multi_type
564
+
565
+ if geometry_type is None:
566
+ geometry_type = tmp_geometry_type
567
+ if has_z and geometry_type != "Unknown":
568
+ geometry_type = f"{geometry_type} Z"
569
+
570
+ crs = None
571
+ if geometry_column is not None and geometry.crs:
572
+ # TODO: this may need to be WKT1, due to issues
573
+ # if possible use EPSG codes instead
574
+ epsg = geometry.crs.to_epsg()
575
+ if epsg:
576
+ crs = f"EPSG:{epsg}" # noqa: E231
577
+ else:
578
+ crs = geometry.crs.to_wkt(WktVersion.WKT1_GDAL)
579
+
580
+ if use_arrow:
581
+ import pyarrow as pa
582
+ from pyogrio.raw import write_arrow
583
+
584
+ if geometry_column is not None:
585
+ # Convert to multi type
586
+ if promote_to_multi:
587
+ import shapely
588
+
589
+ mask_points = geometry_types_all == "Point"
590
+ mask_linestrings = geometry_types_all == "LineString"
591
+ mask_polygons = geometry_types_all == "Polygon"
592
+
593
+ if mask_points.any():
594
+ geometry[mask_points] = shapely.multipoints(
595
+ np.atleast_2d(geometry[mask_points]), axis=0
596
+ )
597
+
598
+ if mask_linestrings.any():
599
+ geometry[mask_linestrings] = shapely.multilinestrings(
600
+ np.atleast_2d(geometry[mask_linestrings]), axis=0
601
+ )
602
+
603
+ if mask_polygons.any():
604
+ geometry[mask_polygons] = shapely.multipolygons(
605
+ np.atleast_2d(geometry[mask_polygons]), axis=0
606
+ )
607
+
608
+ geometry = to_wkb(geometry.values)
609
+ df = df.copy(deep=False)
610
+ # convert to plain DataFrame to avoid warning from geopandas about
611
+ # writing non-geometries to the geometry column
612
+ df = pd.DataFrame(df, copy=False)
613
+ df[geometry_column] = geometry
614
+
615
+ table = pa.Table.from_pandas(df, preserve_index=False)
616
+
617
+ if geometry_column is not None:
618
+ # ensure that the geometry column is binary (for all-null geometries,
619
+ # this could be a wrong type)
620
+ geom_field = table.schema.field(geometry_column)
621
+ if not (
622
+ pa.types.is_binary(geom_field.type)
623
+ or pa.types.is_large_binary(geom_field.type)
624
+ ):
625
+ table = table.set_column(
626
+ table.schema.get_field_index(geometry_column),
627
+ geom_field.with_type(pa.binary()),
628
+ table[geometry_column].cast(pa.binary()),
629
+ )
630
+
631
+ write_arrow(
632
+ table,
633
+ path,
634
+ layer=layer,
635
+ driver=driver,
636
+ geometry_name=geometry_column,
637
+ geometry_type=geometry_type,
638
+ crs=crs,
639
+ encoding=encoding,
640
+ append=append,
641
+ dataset_metadata=dataset_metadata,
642
+ layer_metadata=layer_metadata,
643
+ metadata=metadata,
644
+ dataset_options=dataset_options,
645
+ layer_options=layer_options,
646
+ **kwargs,
647
+ )
648
+ return
649
+
650
+ # If there is geometry data, prepare it to be written
651
+ if geometry_column is not None:
652
+ geometry = to_wkb(geometry.values)
653
+
654
+ write(
655
+ path,
656
+ layer=layer,
657
+ driver=driver,
658
+ geometry=geometry,
659
+ field_data=field_data,
660
+ field_mask=field_mask,
661
+ fields=fields,
662
+ crs=crs,
663
+ geometry_type=geometry_type,
664
+ encoding=encoding,
665
+ promote_to_multi=promote_to_multi,
666
+ nan_as_null=nan_as_null,
667
+ append=append,
668
+ dataset_metadata=dataset_metadata,
669
+ layer_metadata=layer_metadata,
670
+ metadata=metadata,
671
+ dataset_options=dataset_options,
672
+ layer_options=layer_options,
673
+ gdal_tz_offsets=gdal_tz_offsets,
674
+ **kwargs,
675
+ )
pyogrio/proj_data/CH ADDED
@@ -0,0 +1,22 @@
1
+ # This init file provides definitions for CH1903 and CH1903/LV03
2
+ # projections using the distortion grids developed by Swisstopo.
3
+ # See: https://shop.swisstopo.admin.ch/en/products/geo_software/GIS_info
4
+ #
5
+ # You'll need to download the grids separately and put in a directory
6
+ # scanned by libproj.
7
+ #
8
+ # Note that an independent effort was made to derive an usable grid
9
+ # from the CH1903->CH1903+ grid initially available from the Swisstopo
10
+ # website. You can read about this other effort here:
11
+ # http://lists.maptools.org/pipermail/proj/2012-February/006093.html
12
+ # It may be of interest because the latter was by some reported as being
13
+ # more accurate than the former:
14
+ # http://lists.maptools.org/pipermail/proj/2012-February/006119.html
15
+ #
16
+ # This init file uses the official one
17
+ #
18
+ <metadata> +origin=Swisstopo +lastupdate=2012-02-27
19
+ # CH1903/LV03
20
+ <1903_LV03> +proj=somerc +lat_0=46.95240555555556 +lon_0=7.439583333333333 +k_0=1 +x_0=600000 +y_0=200000 +ellps=bessel +units=m +nadgrids=CHENyx06_ETRS.gsb +no_defs
21
+ # CH1903
22
+ <1903> +proj=longlat +ellps=bessel +nadgrids=CHENyx06_ETRS.gsb +no_defs <>