diff -Nru fiona-1.7.10/appveyor.yml fiona-1.8.6/appveyor.yml --- fiona-1.7.10/appveyor.yml 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/appveyor.yml 2019-03-19 04:25:07.000000000 +0000 @@ -1,5 +1,4 @@ # Based on appveyor.yml from https://github.com/PDAL/PDAL and https://github.com/ogrisel/python-appveyor-demo -# platform: x64 @@ -14,13 +13,40 @@ GDAL_HOME: "C:\\gdal" matrix: - # - PYTHON: "C:\\Python27.10-x64" - # PYTHON_VERSION: "2.7.10" - # PYTHON_ARCH: "64" + - PYTHON: "C:\\Python27-x64" + PYTHON_VERSION: "2.7.14" + PYTHON_ARCH: "64" + GDAL_VERSION: "1.11.4" + GIS_INTERNALS: "release-1800-x64-gdal-1-11-4-mapserver-6-4-3.zip" + GIS_INTERNALS_LIBS: "release-1800-x64-gdal-1-11-4-mapserver-6-4-3-libs.zip" + + - PYTHON: "C:\\Python36-x64" + PYTHON_VERSION: "3.6.4" + PYTHON_ARCH: "64" + GDAL_VERSION: "1.11.4" + GIS_INTERNALS: "release-1800-x64-gdal-1-11-4-mapserver-6-4-3.zip" + GIS_INTERNALS_LIBS: "release-1800-x64-gdal-1-11-4-mapserver-6-4-3-libs.zip" + + - PYTHON: "C:\\Python36-x64" + PYTHON_VERSION: "3.6.4" + PYTHON_ARCH: "64" + GDAL_VERSION: "2.2.3" + GIS_INTERNALS: "release-1911-x64-gdal-2-2-3-mapserver-7-0-7.zip" + GIS_INTERNALS_LIBS: "release-1911-x64-gdal-2-2-3-mapserver-7-0-7-libs.zip" + + - PYTHON: "C:\\Python36-x64" + PYTHON_VERSION: "3.6.4" + PYTHON_ARCH: "64" + GDAL_VERSION: "2.3.0" + GIS_INTERNALS: "release-1911-x64-gdal-2-3-0-mapserver-7-0-7.zip" + GIS_INTERNALS_LIBS: "release-1911-x64-gdal-2-3-0-mapserver-7-0-7-libs.zip" - - PYTHON: "C:\\Python34-x64" - PYTHON_VERSION: "3.4.3" + - PYTHON: "C:\\Python36-x64" + PYTHON_VERSION: "3.6.4" PYTHON_ARCH: "64" + GDAL_VERSION: "2.4.0" + GIS_INTERNALS: "release-1911-x64-gdal-2-4-0-mapserver-7-2-2.zip" + GIS_INTERNALS_LIBS: "release-1911-x64-gdal-2-4-0-mapserver-7-2-2-libs.zip" install: @@ -44,7 +70,7 @@ - "python --version" - "python -c \"import struct; print(struct.calcsize('P') * 8)\"" - + # https://code.google.com/p/pymat2/wiki/WindowsTips #- ps: (Get-Content "%VS90COMNTOOLS%\..\..\VC\vcvarsall.bat) | ForEach-Object { $_ -replace "vcvarsamd64.bat", "vcvars64.bat" } | Set-Content "%VS90COMNTOOLS%\..\..\VC\vcvarsall.bat # - '%CMD_IN_ENV% echo "conv env"' @@ -57,31 +83,32 @@ - ps: mkdir C:\build | out-null - ps: mkdir C:\gdal | out-null - - - curl http://download.gisinternals.com/sdk/downloads/release-1600-x64-gdal-1-11-4-mapserver-6-4-3.zip --output gdalbin.zip + + - curl http://download.gisinternals.com/sdk/downloads/%GIS_INTERNALS% --output gdalbin.zip - 7z x gdalbin.zip -oC:\gdal - - curl http://download.gisinternals.com/sdk/downloads/release-1600-x64-gdal-1-11-4-mapserver-6-4-3-libs.zip --output gdallibs.zip + - curl http://download.gisinternals.com/sdk/downloads/%GIS_INTERNALS_LIBS% --output gdallibs.zip - 7z x gdallibs.zip -oC:\gdal - "SET PATH=C:\\gdal;C:\\gdal\\bin;C:\\gdal\\data;C:\\gdal\\bin\\gdal\\apps;%PATH%" - "SET GDAL_DATA=C:\\gdal\\bin\\gdal-data" + - "SET PACKAGE_DATA=1" - ECHO "Filesystem C:/GDAL:" - ps: "ls \"C:/GDAL\"" - - + + - cd C:\projects\fiona # Upgrade to the latest version of pip to avoid it displaying warnings # about it being out of date. # - "python -m pip install --disable-pip-version-check --user --upgrade pip" - + - pip --version - + # Install the build dependencies of the project. If some dependencies contain # compiled extensions and are not provided as pre-built wheel packages, # pip will build them from source using the MSVC compiler matching the # target Python version and architecture - "%CMD_IN_ENV% pip install -r requirements-dev.txt" - + build_script: # Build the compiled extension @@ -89,11 +116,32 @@ - cmd: echo %PYTHONPATH% - - "%CMD_IN_ENV% python setup.py build_ext -IC:\\gdal\\include -lgdal_i -LC:\\gdal\\lib install --gdalversion 1.11.4" + # copy gisinternal gdal librarys into .libs + - cmd: xcopy C:\gdal\bin\*.dll fiona\.libs\ + - cmd: xcopy C:\gdal\*.rtf fiona\.libs\licenses\ + + # build fiona and create a wheel + - "%CMD_IN_ENV% python setup.py build_ext -IC:\\gdal\\include -lgdal_i -LC:\\gdal\\lib bdist_wheel --gdalversion %GDAL_VERSION%" + + # install the wheel + - ps: python -m pip install --upgrade pip + - ps: python -m pip install --force-reinstall --ignore-installed (gci dist\*.whl | % { "$_" }) + - ps: move fiona fiona.build test_script: # Run the project tests - cmd: SET - - "%CMD_IN_ENV% nosetests --exclude test_filter_vsi --exclude test_geopackage" + - ps: python -c "import fiona" + + # Our Windows GDAL doesn't have iconv and can't support certain tests. + - "%CMD_IN_ENV% python -m pytest -m \"not iconv\" --cov fiona --cov-report term-missing" + +matrix: + allow_failures: + - PYTHON_VERSION: "2.7.14" + +artifacts: + - path: dist\*.whl + name: wheel diff -Nru fiona-1.7.10/CHANGES.txt fiona-1.8.6/CHANGES.txt --- fiona-1.7.10/CHANGES.txt 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/CHANGES.txt 2019-03-19 04:25:07.000000000 +0000 @@ -3,6 +3,244 @@ All issue numbers are relative to https://github.com/Toblerity/Fiona/issues. +1.8.6 (2019-03-18) +------------------ + +- The advertisement for JSON driver enablement in 1.8.5 was false (#176), but + in this release they are ready for use. + +1.8.5 (2019-03-15) +------------------ + +- GDAL seems to work best if GDAL_DATA is set as early as possible. Ideally it + is set when building the library or in the environment before importing + Fiona, but for wheels we patch GDAL_DATA into os.environ when fiona.env + is imported. This resolves #731. +- A combination of bugs which allowed .cpg files to be overlooked has been + fixed (#726). +- On entering a collection context (Collection.__enter__) a new anonymous GDAL + environment is created if needed and entered. This makes `with + fiona.open(...) as collection:` roughly equivalent to `with fiona.open(...) + as collection, Env():`. This helps prevent bugs when Collections are created + and then used later or in different scopes. +- Missing GDAL support for TopoJSON, GeoJSONSeq, and ESRIJSON has been enabled + (#721). +- A regression in handling of polygons with M values (#724) has been fixed. +- Per-feature debug logging calls in OGRFeatureBuilder methods have been + eliminated to improve feature writing performance (#718). +- Native support for datasets in Google Cloud Storage identified by "gs" + resource names has been added (#709). +- Support has been added for triangle, polyhedral surface, and TIN geometry + types (#679). +- Notes about using the MemoryFile and ZipMemoryFile classes has been added to + the manual (#674). + +1.8.4 (2018-12-10) +------------------ + +- 3D geometries can now be transformed with a specified precision (#523). +- A bug producing a spurious DriverSupportError for Shapefiles with a "time" + field (#692) has been fixed. +- Patching of the GDAL_DATA environment variable was accidentally left in place + in 1.8.3 and now has been removed. + +1.8.3 (2018-11-30) +------------------ + +- The RASTERIO_ENV config environment marker this project picked up from + Rasterio has been renamed to FIONA_ENV (#665). +- Options --gdal-data and --proj-data have been added to the fio-env command so + that users of Rasterio wheels can get paths to set GDAL_DATA and PROJ_LIB + environment variables. +- The unsuccessful attempt to make GDAL and PROJ support file discovery and + configuration automatic within collection's crs and crs_wkt properties has + been reverted. Users must execute such code inside a `with Env()` block or + set the GDAL_DATA and PROJ_LIB environment variables needed by GDAL. + +1.8.2 (2018-11-19) +------------------ + +Bug fixes: + +- Raise FionaValueError when an iterator's __next__ is called and the session + is found to be missing or inactive instead of passing a null pointer to + OGR_L_GetNextFeature (#687). + +1.8.1 (2018-11-15) +------------------ + +Bug fixes: + +- Add checks around OSRGetAuthorityName and OSRGetAuthorityCode calls that will + log problems with looking up these items. +- Opened data sources are now released before we raise exceptions in + WritingSession.start (#676). This fixes an issue with locked files on + Windows. +- We now ensure that an Env instance exists when getting the crs or crs_wkt + properties of a Collection (#673, #690). Otherwise, required GDAL and PROJ + data files included in Fiona wheels can not be found. +- GDAL and PROJ data search has been refactored to improve testability (#678). +- In the project's Cython code, void* pointers have been replaced with proper + GDAL types (#672). +- Pervasive warning level log messages about ENCODING creation options (#668) + have been eliminated. + +1.8.0 (2018-10-31) +------------------ + +This is the final 1.8.0 release. Thanks, everyone! + +Bug fixes: + +- We cpdef Session.stop so that it has a C version that can be called safely + from __dealloc__, fixing a PyPy issue (#659, #553). + +1.8rc1 (2018-10-26) +------------------- + +There are no changes in 1.8rc1 other than more test standardization and the +introduction of a temporary test_collection_legacy.py module to support the +build of fully tested Python 2.7 macosx wheels on Travis-CI. + +1.8b2 (2018-10-23) +------------------ + +Bug fixes: + +- The ensure_env_with_credentials decorator will no longer clobber credentials + of the outer environment. This fixes a bug reported to the Rasterio project + and which also existed in Fiona. +- An unused import of the packaging module and the dependency have been + removed (#653). +- The Env class logged to the 'rasterio' hierarchy instead of 'fiona'. This + mistake has been corrected (#646). +- The Mapping abstract base class is imported from collections.abc when + possible (#647). + +Refactoring: + +- Standardization of the tests on pytest functions and fixtures continues and + is nearing completion (#648, #649, #650, #651, #652). + +1.8b1 (2018-10-15) +------------------ + +Deprecations: + +- Collection slicing has been deprecated and will be prohibited in a future + version. + +Bug fixes: + +- Rasterio CRS objects passed to transform module methods will be converted + to dicts as needed (#590). +- Implicitly convert curve geometries to their linear approximations rather + than failing (#617). +- Migrated unittest test cases in test_collection.py and test_layer.py to the + use of the standard data_dir and path_coutwildrnp_shp fixtures (#616). +- Root logger configuration has been removed from all test scripts (#615). +- An AWS session is created for the CLI context Env only if explicitly + requested, matching the behavior of Rasterio's CLI (#635). +- Dependency on attrs is made explicit. +- Other dependencies are pinned to known good versions in requirements files. +- Unused arguments have been removed from the Env constructor (#637). + +Refactoring: + +- A with_context_env decorator has been added and used to set up the GDAL + environment for CLI commands. The command functions themselves are now + simplified. + +1.8a3 (2018-10-01) +------------------ + +Deprecations: + +- The ``fiona.drivers()`` context manager is officially deprecated. All + users should switch to ``fiona.Env()``, which registers format drivers and + manages GDAL configuration in a reversible manner. + +Bug fixes: + +- The Collection class now filters log messages about skipped fields to + a maximum of one warning message per field (#627). +- The boto3 module is only imported when needed (#507, #629). +- Compatibility with Click 7.0 is achieved (#633). +- Use of %r instead of %s in a debug() call prevents UnicodeDecodeErrors + (#620). + +1.8a2 (2018-07-24) +------------------ + +New features: + +- 64-bit integers are the now the default for int type fields (#562, #564). +- 'http', 's3', 'zip+http', and 'zip+s3' URI schemes for datasets are now + supported (#425, #426). +- We've added a ``MemoryFile`` class which supports formatted in-memory + feature collections (#501). +- Added support for GDAL 2.x boolean field sub-type (#531). +- A new ``fio rm`` command makes it possible to cleanly remove multi-file + datasets (#538). +- The geometry type in a feature collection is more flexible. We can now + specify not only a single geometry type, but a sequence of permissible types, + or "Any" to permit any geometry type (#539). +- Support for GDAL 2.2+ null fields has been added (#554). +- The new ``gdal_open_vector()`` function of our internal API provides much + improved error handling (#557). + +Bug fixes: + +- The bug involving OrderedDict import on Python 2.7 has been fixed (#533). +- An ``AttributeError`` raised when the ``--bbox`` option of fio-cat is used + with more than one input file has been fixed (#543, #544). +- Obsolete and derelict fiona.tool module has been removed. +- Revert the change in 0a2bc7c that discards Z in geometry types when a + collection's schema is reported (#541). +- Require six version 1.7 or higher (#550). +- A regression related to "zip+s3" URIs has been fixed. +- Debian's GDAL data locations are now searched by default (#583). + +1.8a1 (2017-11-06) +------------------ + +New features: + +- Each call of ``writerecords()`` involves one or more transactions of up to + 20,000 features each. This improves performance when writing GeoPackage files + as the previous transaction size was only 200 features (#476, #491). + +Packaging: + +- Fiona's Cython source files have been refactored so that there are no longer + separate extension modules for GDAL 1.x and GDAL 2.x. Instead there is a base + extension module based on GDAL 2.x and shim modules for installations that + use GDAL 1.x. + +1.7.11.post1 (2018-01-08) +------------------------- + +- This post-release adds missing expat (and thereby GPX format) support to + the included GDAL library (still version 2.2.2). + +1.7.11 (2017-12-14) +------------------- + +- The ``encoding`` keyword argument for ``fiona.open()``, which is intended + to allow a caller to override a data source's own and possibly erroneous + encoding, has not been working (#510, #512). The problem is that we weren't + always setting GDAL open or config options before opening the data sources. + This bug is resolved by a number of commits in the maint-1.7 branch and + the fix is demonstrated in tests/test_encoding.py. +- An ``--encoding`` option has been added to fio-load to enable creation of + encoded shapefiles with an accompanying .cpg file (#499, #517). + +1.7.10.post1 (2017-10-30) +------------------------- + +- A post-release has been made to fix a problem with macosx wheels uploaded + to PyPI. + 1.7.10 (2017-10-26) ------------------- diff -Nru fiona-1.7.10/CITATION.txt fiona-1.8.6/CITATION.txt --- fiona-1.7.10/CITATION.txt 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/CITATION.txt 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,10 @@ +If you use Fiona for any published work, please cite it using the reference +below: + +@Misc{, + author = {Sean Gillies and others}, + organization = {Toblerity}, + title = {Fiona is OGR's neat, nimble, no-nonsense API}, + year = {2011--}, + url = "https://github.com/Toblerity/Fiona" +} diff -Nru fiona-1.7.10/CREDITS.txt fiona-1.8.6/CREDITS.txt --- fiona-1.7.10/CREDITS.txt 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/CREDITS.txt 2019-03-19 04:25:07.000000000 +0000 @@ -4,35 +4,54 @@ Fiona is written by: - Sean Gillies -- Rene Buffat +- René Buffat +- Joshua Arnott - Kevin Wurster -- Micah Cochran +- Micah Cochran - Matthew Perry -- Joshua Arnott +- Elliott Sales de Andrade - Kelsey Jordahl - Patrick Young - Simon Norris - Hannes Gräuler - Johan Van de Wauw - Jacob Wasserman -- Ryan Grout - Michael Weisman -- fredj +- Ryan Grout - Bas Couwenberg - Brendan Ward +- Hannes - Michele Citterio - Miro Hrončok +- Sid Kapur +- Tim Tröndle +- fredj - qinfeng -- Michael Weisman +- Ariel Nunez +- Ariki - Brandon Liu +- Chris Mutel +- Denis Rykov +- Efrén +- Egor Fedorov +- Even Rouault +- Filipe Fernandes +- Géraud +- Hannes Gräuler +- Jesse Crocker +- Juan Luis Cano Rodríguez - Ludovic Delauné - Martijn Visser -- Ariel Nunez +- Matthew Perry +- Michael Weisman - Oliver Tonnhofer - Stefano Costa +- Stephane Poss - dimlev - wilsaj -- Jesse Crocker + +The GeoPandas project (Joris Van den Bossche et al.) has been a major driver +for new features in 1.8.0. Fiona would not be possible without the great work of Frank Warmerdam and other GDAL/OGR developers. diff -Nru fiona-1.7.10/debian/changelog fiona-1.8.6/debian/changelog --- fiona-1.7.10/debian/changelog 2017-12-17 21:09:05.000000000 +0000 +++ fiona-1.8.6/debian/changelog 2019-07-14 14:00:00.000000000 +0000 @@ -1,8 +1,127 @@ -fiona (1.7.10-1build1) bionic; urgency=medium +fiona (1.8.6-1~bionic1) bionic; urgency=medium + + * No change rebuild for GDAL 2.4.2 transition. - * Rebuild against new gdal-abi-2-2-3. + -- Angelos Tzotsos Sun, 14 Jul 2019 16:00:00 +0200 - -- Gianfranco Costamagna Sun, 17 Dec 2017 22:09:05 +0100 +fiona (1.8.6-1~bionic0) bionic; urgency=medium + + * No change rebuild for Bionic. + + -- Angelos Tzotsos Thu, 02 May 2019 12:00:00 +0200 + +fiona (1.8.6-1~exp1) experimental; urgency=medium + + * Team upload. + * New upstream release. + * Ignore test_data_paths. + + -- Bas Couwenberg Tue, 19 Mar 2019 06:50:58 +0100 + +fiona (1.8.5-1~exp1) experimental; urgency=medium + + * Team upload. + * New upstream release. + * Bump Standards-Version to 4.3.0, no changes. + * Refresh patches. + * Add python{,3}-mock to build dependencies. + + -- Bas Couwenberg Sat, 16 Mar 2019 08:34:02 +0100 + +fiona (1.8.4-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + + -- Bas Couwenberg Tue, 11 Dec 2018 09:48:55 +0100 + +fiona (1.8.3-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + + -- Bas Couwenberg Sat, 01 Dec 2018 08:51:27 +0100 + +fiona (1.8.2-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + + -- Bas Couwenberg Tue, 20 Nov 2018 14:57:39 +0100 + +fiona (1.8.1-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + * Refresh patches. + + -- Bas Couwenberg Fri, 16 Nov 2018 06:56:52 +0100 + +fiona (1.8.0-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + * Bump Standards-Version to 4.2.1, no changes. + * Use pytest instead of nose. + * Refresh patches. + * Add python{,3}-{attr,boto3} to build dependencies. + * Don't remove fiona/ogrext.pyx in clean target. + * Update skipped tests for switch from nose to pytest. + + -- Bas Couwenberg Wed, 31 Oct 2018 20:19:57 +0100 + +fiona (1.7.13-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + * Use filter instead of findstring to prevent partial matches. + * Bump Standards-Version to 4.2.0, no changes. + * Drop autopkgtests to test installability & module import. + * Add lintian override for testsuite-autopkgtest-missing. + * Update watch file to use releases instead of tags. + * Refresh patches. + * Exclude test_feature from tests. + * Update watch file to limit matches to archive path. + * Fix incomplete-creative-commons-license issue. + + -- Bas Couwenberg Wed, 15 Aug 2018 18:20:14 +0200 + +fiona (1.7.12-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + * Bump Standards-Version to 4.1.5, no changes. + * Use Python 2 sphinx-build for python2 build explicitly. + + -- Bas Couwenberg Wed, 11 Jul 2018 15:19:46 +0200 + +fiona (1.7.11-3) unstable; urgency=medium + + * Team upload. + * Skip tests that fail with GDAL 2.3.0. + + -- Bas Couwenberg Thu, 24 May 2018 20:54:30 +0200 + +fiona (1.7.11-2) unstable; urgency=medium + + * Team upload. + * Skip test_listing tests, causes FTBFS on various architectures. + * Drop X-Python-Version field. + + -- Bas Couwenberg Sun, 06 May 2018 08:36:54 +0200 + +fiona (1.7.11-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + * Strip trailing whitespace from changelog, control & rules files. + * Update copyright-format URL to use HTTPS. + * Update Vcs-* URLs for Salsa. + * Bump Standards-Version to 4.1.4, no changes. + * Add module import tests to autopkgtest configuration. + * Add upstream metadata. + + -- Bas Couwenberg Sat, 05 May 2018 20:58:47 +0200 fiona (1.7.10-1) unstable; urgency=medium @@ -216,7 +335,7 @@ fiona (1.5.0-2) unstable; urgency=medium - * Initial upload, after FTP-master comment about missing license + * Initial upload, after FTP-master comment about missing license * Add license for docs/manual.rst (CC-BY-3.0-US) * Fix compatibility with Cython 0.22 diff -Nru fiona-1.7.10/debian/control fiona-1.8.6/debian/control --- fiona-1.7.10/debian/control 2017-11-01 20:58:16.000000000 +0000 +++ fiona-1.8.6/debian/control 2019-03-16 07:46:46.000000000 +0000 @@ -13,15 +13,19 @@ python3-all, python-all-dev, python3-all-dev, + python-attr, + python3-attr, + python-boto3, + python3-boto3, python-click-plugins, python3-click-plugins, python-cligj, python3-cligj, python-enum34, + python-mock, + python3-mock, python-munch, python3-munch, - python-nose, - python3-nose, python-pytest, python3-pytest, python-setuptools, @@ -30,11 +34,10 @@ python3-six, python-sphinx, python3-sphinx -Standards-Version: 4.1.1 -Vcs-Browser: https://anonscm.debian.org/cgit/pkg-grass/fiona.git -Vcs-Git: https://anonscm.debian.org/git/pkg-grass/fiona.git +Standards-Version: 4.3.0 +Vcs-Browser: https://salsa.debian.org/debian-gis-team/fiona +Vcs-Git: https://salsa.debian.org/debian-gis-team/fiona.git Homepage: https://github.com/Toblerity/Fiona -X-Python-Version: >= 2.5 Package: python-fiona Architecture: any @@ -97,7 +100,7 @@ Section: doc Depends: ${sphinxdoc:Depends}, ${misc:Depends} -Description: Python API for reading/writing vector geospatial data (docs) +Description: Python API for reading/writing vector geospatial data (docs) Fiona is a Python wrapper around the OGR vector data abstraction library. Fiona is designed to be simple and dependable. It focuses on reading and writing data in standard Python IO style and relies upon familiar diff -Nru fiona-1.7.10/debian/copyright fiona-1.8.6/debian/copyright --- fiona-1.7.10/debian/copyright 2017-11-01 20:58:16.000000000 +0000 +++ fiona-1.8.6/debian/copyright 2018-08-15 17:29:18.000000000 +0000 @@ -1,4 +1,4 @@ -Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: Fiona Source: https://github.com/Toblerity/Fiona @@ -40,26 +40,26 @@ License: BSD-3-Clause Files: tests/data/* -Copyright: disclaimed +Copyright: disclaimed License: public-domain The coutwildrnp shapefile and all .txt files are extracts from the US National Map's 1:2M scale Wilderness Area boundaries [1] and are in the public domain. [1] http://nationalmap.gov/small_scale/atlasftp.html . In http://dds.cr.usgs.gov/pub/data/nationalatlas/wildrnp020_nt00174.tar.gz - wildrnp020.txt states: + wildrnp020.txt states: " Use_Constraints: - None. Acknowledgment of the National Atlas of the United States of + None. Acknowledgment of the National Atlas of the United States of America would be appreciated in products derived from these data." Files: docs/manual.rst -Copyright: 2014-2015 Sean C. Gillies +Copyright: 2014-2015 Sean C. Gillies License: CC-BY-3.0-US License: BSD-3-Clause Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - . + . * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright @@ -68,7 +68,7 @@ * Neither the name of the University of North Carolina nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. - . + . THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -84,6 +84,14 @@ License: CC-BY-3.0-US http://creativecommons.org/licenses/by/3.0/us/legalcode . + CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE LEGAL + SERVICES. DISTRIBUTION OF THIS LICENSE DOES NOT CREATE AN ATTORNEY-CLIENT + RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS INFORMATION ON AN "AS-IS" BASIS. + CREATIVE COMMONS MAKES NO WARRANTIES REGARDING THE INFORMATION PROVIDED, AND + DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM ITS USE. + . + License + . THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE COMMONS PUBLIC LICENSE ("CCPL" OR "LICENSE"). THE WORK IS PROTECTED BY COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS diff -Nru fiona-1.7.10/debian/patches/0001-Rename-fio-command-to-fiona-to-avoid-name-clash.patch fiona-1.8.6/debian/patches/0001-Rename-fio-command-to-fiona-to-avoid-name-clash.patch --- fiona-1.7.10/debian/patches/0001-Rename-fio-command-to-fiona-to-avoid-name-clash.patch 2017-11-01 20:58:16.000000000 +0000 +++ fiona-1.8.6/debian/patches/0001-Rename-fio-command-to-fiona-to-avoid-name-clash.patch 2019-03-16 07:33:36.000000000 +0000 @@ -9,7 +9,7 @@ --- a/setup.py +++ b/setup.py -@@ -251,7 +251,7 @@ setup_args = dict( +@@ -309,7 +309,7 @@ setup_args = dict( packages=['fiona', 'fiona.fio'], entry_points=''' [console_scripts] diff -Nru fiona-1.7.10/debian/patches/0002-Remove-outside-reference-possible-privacy-breach.patch fiona-1.8.6/debian/patches/0002-Remove-outside-reference-possible-privacy-breach.patch --- fiona-1.7.10/debian/patches/0002-Remove-outside-reference-possible-privacy-breach.patch 2017-11-01 20:58:16.000000000 +0000 +++ fiona-1.8.6/debian/patches/0002-Remove-outside-reference-possible-privacy-breach.patch 2018-10-31 17:40:50.000000000 +0000 @@ -8,13 +8,16 @@ --- a/README.rst +++ b/README.rst -@@ -4,12 +4,6 @@ Fiona +@@ -4,15 +4,6 @@ Fiona - Fiona is OGR's neat, nimble, no-nonsense API for Python programmers. + Fiona is OGR's neat and nimble API for Python programmers. --.. image:: https://travis-ci.org/Toblerity/Fiona.png?branch=master +-.. image:: https://travis-ci.org/Toblerity/Fiona.png?branch=master - :target: https://travis-ci.org/Toblerity/Fiona - +-.. image:: https://ci.appveyor.com/api/projects/status/github/Toblerity/Fiona?svg=true +- :target: https://ci.appveyor.com/project/sgillies/fiona/branch/master +- -.. image:: https://coveralls.io/repos/Toblerity/Fiona/badge.png - :target: https://coveralls.io/r/Toblerity/Fiona - diff -Nru fiona-1.7.10/debian/patches/0006-Remove-unknown-distribution-options.patch fiona-1.8.6/debian/patches/0006-Remove-unknown-distribution-options.patch --- fiona-1.7.10/debian/patches/0006-Remove-unknown-distribution-options.patch 2017-11-01 20:58:16.000000000 +0000 +++ fiona-1.8.6/debian/patches/0006-Remove-unknown-distribution-options.patch 2019-03-16 07:33:40.000000000 +0000 @@ -6,7 +6,7 @@ --- a/setup.py +++ b/setup.py -@@ -233,11 +233,8 @@ if sys.version_info < (3, 4): +@@ -291,11 +291,8 @@ extras_require['all'] = list(set(it.chai setup_args = dict( cmdclass={'sdist': sdist_multi_gdal}, diff -Nru fiona-1.7.10/debian/rules fiona-1.8.6/debian/rules --- fiona-1.7.10/debian/rules 2017-11-01 20:58:16.000000000 +0000 +++ fiona-1.8.6/debian/rules 2019-03-19 05:50:58.000000000 +0000 @@ -11,23 +11,43 @@ DEB_BUILD_ARCH ?= $(shell dpkg-architecture -qDEB_BUILD_ARCH) export PYBUILD_NAME=fiona -export PYBUILD_AFTER_BUILD_python2 = PYTHONPATH={build_dir} http_proxy='127.0.0.1:9' sphinx-build -N -bhtml -D today="$(BUILD_DATE)" docs/ build/html -export PYBUILD_TEST_NOSE=1 +export PYBUILD_AFTER_BUILD_python2 = PYTHONPATH={build_dir} http_proxy='127.0.0.1:9' /usr/share/sphinx/scripts/python2/sphinx-build -N -bhtml -D today="$(BUILD_DATE)" docs/ build/html +export PYBUILD_TEST_PYTEST=1 export PYBUILD_BEFORE_TEST=cp -r {dir}/tests {build_dir} export PYBUILD_AFTER_TEST=rm -rf {build_dir}/tests -#fio_load and cli are excluded as these only work after installation -export PYBUILD_TEST_ARGS=--exclude test_geopackage --exclude fiona --exclude test_filter_vsi --exclude cli --exclude fio_load --exclude test_fio_info --exclude test_fio_ls --exclude test_unicode +export PYBUILD_TEST_ARGS=--ignore tests/test_bytescollection.py \ + --ignore tests/test_collection.py \ + --ignore tests/test_data_paths.py \ + --ignore tests/test_feature.py \ + --ignore tests/test_filter_vsi.py \ + --ignore tests/test_fio_bounds.py \ + --ignore tests/test_fio_calc.py \ + --ignore tests/test_fio_cat.py \ + --ignore tests/test_fio_collect.py \ + --ignore tests/test_fio_distrib.py \ + --ignore tests/test_fio_dump.py \ + --ignore tests/test_fio_filter.py \ + --ignore tests/test_fio_info.py \ + --ignore tests/test_fio_load.py \ + --ignore tests/test_fio_ls.py \ + --ignore tests/test_fio_rm.py \ + --ignore tests/test_geopackage.py \ + --ignore tests/test_layer.py \ + --ignore tests/test_listing.py \ + --ignore tests/test_profile.py \ + --ignore tests/test_unicode.py \ + --ignore tests/test_vfs.py %: dh $@ --with python2,python3,sphinxdoc --buildsystem pybuild override_dh_clean: dh_clean - rm -rf fiona/*.so gdal-config.txt fiona/*.c VERSION.txt fiona/*.cpp Fiona.egg-info/ fiona/ogrext.pyx + rm -rf fiona/*.so gdal-config.txt fiona/*.c VERSION.txt fiona/*.cpp Fiona.egg-info/ override_dh_auto_test: # Ignore test failures on problematic architectures only -ifneq (,$(findstring $(DEB_BUILD_ARCH),"hurd-i386 kfreebsd-amd64 kfreebsd-i386 ppc64")) +ifneq (,$(filter $(DEB_BUILD_ARCH),hurd-i386 kfreebsd-amd64 kfreebsd-i386 ppc64)) dh_auto_test || echo "Ignoring test failures" else dh_auto_test diff -Nru fiona-1.7.10/debian/source/lintian-overrides fiona-1.8.6/debian/source/lintian-overrides --- fiona-1.7.10/debian/source/lintian-overrides 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/debian/source/lintian-overrides 2018-08-15 17:25:55.000000000 +0000 @@ -0,0 +1,3 @@ +# Not worth the effort +testsuite-autopkgtest-missing + diff -Nru fiona-1.7.10/debian/tests/control fiona-1.8.6/debian/tests/control --- fiona-1.7.10/debian/tests/control 2017-11-01 20:58:16.000000000 +0000 +++ fiona-1.8.6/debian/tests/control 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -# Test installability -Depends: @ -Test-Command: /bin/true diff -Nru fiona-1.7.10/debian/upstream/metadata fiona-1.8.6/debian/upstream/metadata --- fiona-1.7.10/debian/upstream/metadata 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/debian/upstream/metadata 2018-07-11 13:17:44.000000000 +0000 @@ -0,0 +1,6 @@ +--- +Bug-Database: https://github.com/Toblerity/Fiona/issues +Bug-Submit: https://github.com/Toblerity/Fiona/issues/new +Name: Fiona +Repository: https://github.com/Toblerity/Fiona.git +Repository-Browse: https://github.com/Toblerity/Fiona diff -Nru fiona-1.7.10/debian/watch fiona-1.8.6/debian/watch --- fiona-1.7.10/debian/watch 2017-11-01 20:58:16.000000000 +0000 +++ fiona-1.8.6/debian/watch 2018-08-15 16:42:13.000000000 +0000 @@ -3,5 +3,5 @@ dversionmangle=s/\+(debian|dfsg|ds|deb)\d*$//,\ uversionmangle=s/(\d)[_\.\-\+]?((RC|rc|pre|dev|b|beta|a|alpha)\d*)$/$1~$2/,\ filenamemangle=s/(?:.*?\/)?(?:rel|r|v|fiona)?[\-\_]?(\d\S+)\.(tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz)))/fiona-$1.$2/ \ -https://github.com/Toblerity/Fiona/tags \ -(?:.*/)?(?:rel|r|v|fiona)?[\-\_]?(\d\S+)\.(?:tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) +https://github.com/Toblerity/Fiona/releases \ +(?:.*/archive/)?(?:rel|r|v|fiona)?[\-\_]?(\d\S+)\.(?:tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) diff -Nru fiona-1.7.10/docs/cli.rst fiona-1.8.6/docs/cli.rst --- fiona-1.7.10/docs/cli.rst 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/docs/cli.rst 2019-03-19 04:25:07.000000000 +0000 @@ -10,23 +10,27 @@ Fiona command line interface. Options: - -v, --verbose Increase verbosity. - -q, --quiet Decrease verbosity. - --version Show the version and exit. - --help Show this message and exit. + -v, --verbose Increase verbosity. + -q, --quiet Decrease verbosity. + --version Show the version and exit. + --gdal-version Show the version and exit. + --python-version Show the version and exit. + --help Show this message and exit. Commands: bounds Print the extent of GeoJSON objects - buffer Buffer geometries on all sides by a fixed distance. + calc Calculate GeoJSON property by Python expression cat Concatenate and print the features of datasets collect Collect a sequence of features. - distrib Distribute features from a collection + distrib Distribute features from a collection. dump Dump a dataset to GeoJSON. env Print information about the fio environment. - filter Filter GeoJSON features by python expression + filter Filter GeoJSON features by python expression. info Print information about a dataset. insp Open a dataset and start an interpreter. load Load GeoJSON to a dataset in another format. + ls List layers in a datasource. + rm Remove a datasource or an individual layer. It is developed using the ``click`` package and is new in 1.1.6. @@ -55,6 +59,28 @@ > | fio bounds --with-id {"id": "0", "bbox": [0.735, 51.357216, 0.947778, 51.444717]} +calc +---- + +New in 1.7b1 + +The calc command creates a new property on GeoJSON features using the +specified expression. + +The expression is evaluated in a restricted namespace containing 4 functions +(`sum`, `pow`, `min`, `max`), the `math` module, the shapely `shape` function, +type conversions (`bool`, `int`, `str`, `len`, `float`), and an object `f` +representing the feature to be evaluated. This `f` object allows access in +javascript-style dot notation for convenience. + +The expression will be evaluated for each feature and its return value will be +added to the properties as the specified property_name. Existing properties +will not be overwritten by default (an `Exception` is raised). + +.. code-block:: console + + $ fio cat data.shp | fio calc sumAB "f.properties.A + f.properties.B" + cat --- @@ -112,7 +138,7 @@ ---- The dump command reads a vector dataset and writes a GeoJSON feature collection -to stdout. Its output can be piped to ``rio load`` (see below). +to stdout. Its output can be piped to ``fio load`` (see below). .. code-block:: console @@ -251,15 +277,28 @@ If the expression evaluates to a "truthy" value, the feature is printed verbatim. Otherwise, the feature is excluded from the output. -For example +.. code-block:: console - fio cat data.shp \ - | fio filter "f.properties.area > 1000.0" \ - | fio collect > large_polygons.geojson + $ fio cat data.shp \ + > | fio filter "f.properties.area > 1000.0" \ + > | fio collect > large_polygons.geojson Would create a geojson file with only those features from `data.shp` where the area was over a given threshold. +rm +-- +The ``fio rm`` command deletes an entire datasource or a single layer in a +multi-layer datasource. If the datasource is composed of multiple files +(e.g. an ESRI Shapefile) all of the files will be removed. + +.. code-block:: console + + $ fio rm countries.shp + $ fio rm --layer forests land_cover.gpkg + +New in 1.8.0. + Coordinate Reference System Transformations ------------------------------------------- diff -Nru fiona-1.7.10/docs/fiona.fio.rst fiona-1.8.6/docs/fiona.fio.rst --- fiona-1.7.10/docs/fiona.fio.rst 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/docs/fiona.fio.rst 2019-03-19 04:25:07.000000000 +0000 @@ -124,6 +124,14 @@ :undoc-members: :show-inheritance: +fiona.fio.rm module +------------------- + +.. automodule:: fiona.fio.rm + :members: + :undoc-members: + :show-inheritance: + Module contents --------------- diff -Nru fiona-1.7.10/docs/manual.rst fiona-1.8.6/docs/manual.rst --- fiona-1.7.10/docs/manual.rst 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/docs/manual.rst 2019-03-19 04:25:07.000000000 +0000 @@ -119,7 +119,7 @@ with fiona.open('docs/data/test_uk.shp', 'r') as source: # Copy the source schema and add two new properties. - sink_schema = source.schema.copy() + sink_schema = source.schema sink_schema['properties']['s_area'] = 'float' sink_schema['properties']['timestamp'] = 'datetime' @@ -301,6 +301,8 @@ 'properties': OrderedDict([(u'CAT', 232.0), (u'FIPS_CNTRY', u'UK'), (u'CNTRY_NAME', u'United Kingdom'), (u'AREA', 244820.0), (u'POP_CNTRY', 60270708.0)]), 'type': 'Feature'} +Note that these indices are controlled by GDAL, and do not always follow Python conventions. They can start from 0, 1 (e.g. geopackages), or even other values, and have no guarantee of contiguity. Negative indices will only function correctly if indices start from 0 and are contiguous. + Closing Files ------------- @@ -711,9 +713,10 @@ Some files may contain vectors that are :dfn:`invalid` from a simple features standpoint due to accident (inadequate quality control on the - producer's end) or intention ("dirty" vectors saved to a file for special - treatment). Fiona doesn't sniff for or attempt to clean dirty data, so make - sure you're getting yours from a clean source. + producer's end), intention ("dirty" vectors saved to a file for special + treatment) or discrepancies of the numeric precision models (Fiona can't + handle fixed precision models yet). Fiona doesn't sniff for or attempt to + clean dirty data, so make sure you're getting yours from a clean source. Writing Vector Data =================== @@ -813,16 +816,16 @@ You may also call :py:meth:`flush` periodically to write the buffer contents to disk. -Writing New Files ------------------ +Creating files of the same structure +------------------------------------ Writing a new file is more complex than appending to an existing file because the file CRS, format, and schema have not yet been defined and must be done so by the programmer. Still, it's not very complicated. A schema is just a mapping, as described above. A CRS is also just a mapping, and the possible -formats are enumerated in the :py:attr:`fiona.drivers` list. +formats are enumerated in the :py:attr:`fiona.supported_drivers` list. -Copy the parameters of our demo file. +Review the parameters of our demo file. .. sourcecode:: pycon @@ -843,7 +846,7 @@ 'AREA': 'float:15.2', 'POP_CNTRY': 'float:15.2'}} -And now create a new file using them. +We can create a new file using them. .. sourcecode:: pycon @@ -898,6 +901,86 @@ >>> source = fiona.open('docs/data/test_uk.shp') >>> sink = fiona.open('/tmp/foo.shp', 'w', **source.meta) +Writing new files from scratch +------------------------------- + +To write a new file from scratch we have to define our own specific driver, crs and schema. + +To ensure the order of the attribute fields is predictable, in both the schema and the actual manifestation as feature attributes, we will use ordered dictionaries. + +.. sourcecode:: pycon + + >>> from collections import OrderedDict + +Consider the following record, structured in accordance to the `Python geo protocol `__, representing the Eiffel Tower using a point geometry with UTM coordinates in zone 31N. + +.. sourcecode:: pycon + + >>> eiffel_tower = { + ... 'geometry': { + ... 'type': 'Point', + ... 'coordinates': (448252, 5411935) + ... }, + ... 'properties': OrderedDict([ + ... ('name', 'Eiffel Tower'), + ... ('height', 300.01), + ... ('view', 'scenic'), + ... ('year', 1889) + ... ]) + ... } + +A corresponding scheme could be: + +.. sourcecode:: pycon + + >>> landmarks_schema = { + ... 'geometry': 'Point', + ... 'properties': OrderedDict([ + ... ('name', 'str'), + ... ('height', 'float'), + ... ('view', 'str'), + ... ('year', 'int') + ... ]) + ... } + +The coordinate reference system of these landmark coordinates is ETRS89 / UTM zone 31N which is referenced in the EPSG database as EPSG:25831. + +.. sourcecode:: pycon + + >>> from fiona.crs import from_epsg + >>> landmarks_crs = from_epsg(25831) + +An appropriate driver could be: + +.. sourcecode:: pycon + + >>> output_driver = "GeoJSON" + +Having specified schema, crs and driver, we are ready to open a file for writing our record: + +.. sourcecode:: pycon + + >>> with fiona.open( + ... '/tmp/foo.geojson', + ... 'w', + ... driver=output_driver, + ... crs=landmarks_crs, + ... schema=landmarks_schema) as c: + ... c.write(eiffel_tower) + ... + + >>> import pprint + >>> with fiona.open('/tmp/foo.geojson') as source: + ... for record in source: + ... pprint.pprint(record) + {'geometry': {'coordinates': (448252.0, 5411935.0), 'type': 'Point'}, + 'id': '0', + 'properties': OrderedDict([('name', 'Eiffel Tower'), + ('height', 300.01), + ('view', 'scenic'), + ('year', 1889)]), + 'type': 'Feature'} + Ordering Record Fields ...................... @@ -954,6 +1037,81 @@ Advanced Topics =============== +OGR configuration options +------------------------- + +GDAL/OGR has a large number of features that are controlled by global or +thread-local configuration options. Fiona allows you to configure these options +using a context manager, ``fiona.Env``. This class's constructor takes GDAL/OGR +configuration options as keyword arguments. To see debugging information from +GDAL/OGR, for example, you may do the following. + +.. sourcecode:: python + + import logging + + import fiona + + + logging.basicConfig(level=logging.DEBUG) + + with fiona.Env(CPL_DEBUG=True): + fiona.open('tests/data/coutwildrnp.shp') + +The following extra messages will appear in the Python logger's output.:: + + DEBUG:fiona._env:CPLE_None in GNM: GNMRegisterAllInternal + DEBUG:fiona._env:CPLE_None in GNM: RegisterGNMFile + DEBUG:fiona._env:CPLE_None in GNM: RegisterGNMdatabase + DEBUG:fiona._env:CPLE_None in GNM: GNMRegisterAllInternal + DEBUG:fiona._env:CPLE_None in GNM: RegisterGNMFile + DEBUG:fiona._env:CPLE_None in GNM: RegisterGNMdatabase + DEBUG:fiona._env:CPLE_None in GDAL: GDALOpen(tests/data/coutwildrnp.shp, this=0x1683930) succeeds as ESRI Shapefile. + +If you call ``fiona.open()`` with no surrounding ``Env`` environment, one will +be created for you. + +When your program exits the environent's with block the configuration reverts +to its previous state. + +Cloud storage credentials +------------------------- + +One of the most important uses of ``fiona.Env`` is to set credentials for +accessing data stored in AWS S3 or another cloud storage system. + +.. sourcecode:: python + + from fiona.session import AWSSession + import fiona + + with fiona.Env( + session=AWSession( + aws_access_key_id="key", + aws_secret_access_key="secret", + ) + ): + fiona.open("zip+s3://example-bucket/example.zip") + +The AWSSession class is currently the only credential session manager in Fiona. +The source code has an example of how classes for other cloud storage providers +may be implemented. AWSSession relies upon boto3 and botocore, which will be +installed as extra dependencies of Fiona if you run ``pip install fiona[s3]``. + +If you call ``fiona.open()`` with no surrounding ``Env`` and pass a path to an +S3 object, a session will be created for you using code equivalent to the +following code. + +.. sourcecode:: python + + import boto3 + + from fiona.session import AWSSession + import fiona + + with fiona.Env(session=AWSSession(boto3.Session())): + fiona.open('zip+s3://fiona-testing/coutwildrnp.zip') + Slicing and masking iterators ----------------------------- diff -Nru fiona-1.7.10/environment.yml fiona-1.8.6/environment.yml --- fiona-1.7.10/environment.yml 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/environment.yml 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,8 @@ +name: _fiona +channels: +- defaults +- conda-forge +dependencies: +- python>=3.5 +- cython +- libgdal diff -Nru fiona-1.7.10/fiona/collection.py fiona-1.8.6/fiona/collection.py --- fiona-1.7.10/fiona/collection.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/collection.py 2019-03-19 04:25:07.000000000 +0000 @@ -5,15 +5,17 @@ import os import warnings -from fiona import compat +from fiona import compat, vfs from fiona.ogrext import Iterator, ItemsIterator, KeysIterator from fiona.ogrext import Session, WritingSession -from fiona.ogrext import ( - calc_gdal_version_num, get_gdal_version_num, get_gdal_release_name) -from fiona.ogrext import buffer_to_virtual_file, remove_virtual_file -from fiona.errors import DriverError, SchemaError, CRSError -from fiona._drivers import driver_count, GDALEnv +from fiona.ogrext import buffer_to_virtual_file, remove_virtual_file, GEOMETRY_TYPES +from fiona.errors import (DriverError, SchemaError, CRSError, UnsupportedGeometryTypeError, DriverSupportError) +from fiona.logutils import FieldSkipLogFilter +from fiona._env import get_gdal_release_name, get_gdal_version_tuple +from fiona.env import env_ctx_if_needed +from fiona.errors import FionaDeprecationWarning from fiona.drvsupport import supported_drivers +from fiona.path import Path, vsi_path, parse_path from six import string_types, binary_type @@ -31,7 +33,9 @@ def __init__(self, path, mode='r', driver=None, schema=None, crs=None, encoding=None, layer=None, vsi=None, archive=None, - enabled_drivers=None, crs_wkt=None, **kwargs): + enabled_drivers=None, crs_wkt=None, ignore_fields=None, + ignore_geometry=False, + **kwargs): """The required ``path`` is the absolute or relative path to a file, such as '/data/test_uk.shp'. In ``mode`` 'r', data can @@ -48,7 +52,7 @@ options. """ - if not isinstance(path, string_types): + if not isinstance(path, (string_types, Path)): raise TypeError("invalid path: %r" % path) if not isinstance(mode, string_types) or mode not in ('r', 'w', 'a'): raise TypeError("invalid mode: %r" % mode) @@ -65,14 +69,13 @@ if layer and not isinstance(layer, tuple(list(string_types) + [int])): raise TypeError("invalid name: %r" % layer) if vsi: - if not isinstance(vsi, string_types) or vsi not in ('zip', 'tar', 'gzip'): + if not isinstance(vsi, string_types) or not vfs.valid_vsi(vsi): raise TypeError("invalid vsi: %r" % vsi) if archive and not isinstance(archive, string_types): raise TypeError("invalid archive: %r" % archive) # Check GDAL version against drivers - if (driver == "GPKG" and - get_gdal_version_num() < calc_gdal_version_num(1, 11, 0)): + if (driver == "GPKG" and get_gdal_version_tuple() < (1, 11, 0)): raise DriverError( "GPKG driver requires GDAL 1.11.0, fiona was compiled " "against: {}".format(get_gdal_release_name())) @@ -87,19 +90,26 @@ self._crs_wkt = None self.env = None self.enabled_drivers = enabled_drivers + self.ignore_fields = ignore_fields + self.ignore_geometry = bool(ignore_geometry) - self.path = vsi_path(path, vsi, archive) + if vsi: + self.path = vfs.vsi_path(path, vsi, archive) + path = parse_path(self.path) + else: + path = parse_path(path) + self.path = vsi_path(path) if mode == 'w': if layer and not isinstance(layer, string_types): - raise ValueError("in 'r' mode, layer names must be strings") + raise ValueError("in 'w' mode, layer names must be strings") if driver == 'GeoJSON': if layer is not None: raise ValueError("the GeoJSON format does not have layers") self.name = 'OgrGeoJSON' # TODO: raise ValueError as above for other single-layer formats. else: - self.name = layer or os.path.basename(os.path.splitext(path)[0]) + self.name = layer or os.path.basename(os.path.splitext(path.path)[0]) else: if layer in (0, None): self.name = 0 @@ -129,6 +139,8 @@ raise SchemaError("schema lacks: geometry") self._schema = schema + self._check_schema_driver_support() + if crs_wkt: self._crs_wkt = crs_wkt elif crs: @@ -137,20 +149,14 @@ else: raise CRSError("crs lacks init or proj parameter") - if driver_count == 0: - # create a local manager and enter - self.env = GDALEnv() - else: - self.env = GDALEnv() - self.env.__enter__() - self._driver = driver + kwargs.update(encoding=encoding) self.encoding = encoding try: if self.mode == 'r': self.session = Session() - self.session.start(self) + self.session.start(self, **kwargs) elif self.mode in ('a', 'w'): self.session = WritingSession() self.session.start(self, **kwargs) @@ -160,8 +166,11 @@ if self.session is not None: self.guard_driver_mode() - if not self.encoding: - self.encoding = self.session.get_fileencoding().lower() + + if self.mode in ("a", "w"): + self._valid_geom_types = _get_valid_geom_types(self.schema, self.driver) + + self.field_skip_log_filter = FieldSkipLogFilter() def __repr__(self): return "<%s Collection '%s', mode '%s' at %s>" % ( @@ -316,7 +325,7 @@ """Returns next record from iterator.""" warnings.warn("Collection.__next__() is buggy and will be removed in " "Fiona 2.0. Switch to `next(iter(collection))`.", - DeprecationWarning, stacklevel=2) + FionaDeprecationWarning, stacklevel=2) if not self.iterator: iter(self) return next(self.iterator) @@ -326,6 +335,9 @@ def __getitem__(self, item): return self.session.__getitem__(item) + def get(self, item): + return self.session.get(item) + def writerecords(self, records): """Stages multiple records for writing to disk.""" if self.closed: @@ -387,6 +399,35 @@ self._bounds = self.session.get_extent() return self._bounds + def _check_schema_driver_support(self): + """Check support for the schema against the driver + + See GH#572 for discussion. + """ + gdal_version_major = get_gdal_version_tuple().major + + for field in self._schema["properties"].values(): + field_type = field.split(":")[0] + if self._driver == "ESRI Shapefile": + if field_type == "datetime": + raise DriverSupportError("ESRI Shapefile does not support datetime fields") + elif field_type == "time": + raise DriverSupportError("ESRI Shapefile does not support time fields") + elif self._driver == "GPKG": + if field_type == "time": + raise DriverSupportError("GPKG does not support time fields") + elif gdal_version_major == 1: + if field_type == "datetime": + raise DriverSupportError("GDAL 1.x GPKG driver does not support datetime fields") + elif self._driver == "GeoJSON": + if gdal_version_major == 1: + if field_type == "date": + warnings.warn("GeoJSON driver in GDAL 1.x silently converts date to string in non-standard format") + elif field_type == "datetime": + warnings.warn("GeoJSON driver in GDAL 1.x silently converts datetime to string in non-standard format") + elif field_type == "time": + warnings.warn("GeoJSON driver in GDAL 1.x silently converts time to string") + def flush(self): """Flush the buffer.""" if self.session is not None: @@ -398,7 +439,7 @@ def close(self): """In append or write mode, flushes data to disk, then ends access.""" - if self.session is not None: + if self.session is not None and self.session.isactive(): if self.mode in ('a', 'w'): self.flush() log.debug("Flushed buffer") @@ -415,9 +456,14 @@ return self.session is None def __enter__(self): + logging.getLogger('fiona.ogrext').addFilter(self.field_skip_log_filter) + self._env = env_ctx_if_needed() + self._env.__enter__() return self def __exit__(self, type, value, traceback): + logging.getLogger('fiona.ogrext').removeFilter(self.field_skip_log_filter) + self._env.__exit__() self.close() def __del__(self): @@ -426,6 +472,36 @@ self.close() +ALL_GEOMETRY_TYPES = set([ + geom_type for geom_type in GEOMETRY_TYPES.values() + if "3D " not in geom_type and geom_type != "None"]) +ALL_GEOMETRY_TYPES.add("None") + + +def _get_valid_geom_types(schema, driver): + """Returns a set of geometry types the schema will accept""" + schema_geom_type = schema["geometry"] + if isinstance(schema_geom_type, string_types) or schema_geom_type is None: + schema_geom_type = (schema_geom_type,) + valid_types = set() + for geom_type in schema_geom_type: + geom_type = str(geom_type).lstrip("3D ") + if geom_type == "Unknown" or geom_type == "Any": + valid_types.update(ALL_GEOMETRY_TYPES) + else: + if geom_type not in ALL_GEOMETRY_TYPES: + raise UnsupportedGeometryTypeError(geom_type) + valid_types.add(geom_type) + + # shapefiles don't differentiate between single/multi geometries, except points + if driver == "ESRI Shapefile" and "Point" not in valid_types: + for geom_type in list(valid_types): + if not geom_type.startswith("Multi"): + valid_types.add("Multi" + geom_type) + + return valid_types + + def get_filetype(bytesbuf): """Detect compression type of bytesbuf. @@ -481,17 +557,3 @@ self.path + ":" + str(self.name), self.mode, hex(id(self))) - - -def vsi_path(path, vsi=None, archive=None): - # If a VSF and archive file are specified, we convert the path to - # an OGR VSI path (see cpl_vsi.h). - if vsi: - if archive: - result = '/vsi{0}/{1}{2}'.format(vsi, archive, path) - else: - result = '/vsi{0}/{1}'.format(vsi, path) - else: - result = path - - return result diff -Nru fiona-1.7.10/fiona/compat.py fiona-1.8.6/fiona/compat.py --- fiona-1.7.10/fiona/compat.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/compat.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,12 +1,26 @@ +import sys import collections -from six.moves import UserDict + try: from collections import OrderedDict except ImportError: from ordereddict import OrderedDict +if sys.version_info[0] >= 3: + from urllib.parse import urlparse + from collections import UserDict + from inspect import getfullargspec as getargspec +else: + from urlparse import urlparse + from UserDict import UserDict + from inspect import getargspec + +if sys.version_info >= (3, 3): + from collections.abc import Mapping +else: + from collections import Mapping # Users can pass in objects that subclass a few different objects # More specifically, rasterio has a CRS() class that subclasses UserDict() # In Python 2 UserDict() is in its own module and does not subclass Mapping() -DICT_TYPES = (dict, collections.Mapping, UserDict) +DICT_TYPES = (dict, Mapping, UserDict) diff -Nru fiona-1.7.10/fiona/_csl.pxd fiona-1.8.6/fiona/_csl.pxd --- fiona-1.7.10/fiona/_csl.pxd 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/_csl.pxd 2019-03-19 04:25:07.000000000 +0000 @@ -1,5 +1,6 @@ # String API functions. cdef extern from "cpl_string.h": + char ** CSLAddNameValue (char **list, char *name, char *value) char ** CSLSetNameValue (char **list, char *name, char *value) void CSLDestroy (char **list) diff -Nru fiona-1.7.10/fiona/_drivers.pyx fiona-1.8.6/fiona/_drivers.pyx --- fiona-1.7.10/fiona/_drivers.pyx 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/_drivers.pyx 2019-03-19 04:25:07.000000000 +0000 @@ -18,7 +18,8 @@ cdef extern from "cpl_error.h": - void CPLSetErrorHandler (void *handler) + ctypedef void (*CPLErrorHandler)(int, int, const char*); + void CPLSetErrorHandler (CPLErrorHandler handler) cdef extern from "gdal.h": @@ -28,6 +29,7 @@ void * GDALGetDriver(int i) const char * GDALGetDriverShortName(void *driver) const char * GDALGetDriverLongName(void *driver) + const char * GDALVersionInfo(const char *key) cdef extern from "ogr_api.h": @@ -41,7 +43,7 @@ const char * OGR_Dr_GetName(void *driver) -log = logging.getLogger('Fiona') +log = logging.getLogger(__name__) class NullHandler(logging.Handler): def emit(self, record): pass @@ -104,22 +106,40 @@ GDALAllRegister() if OGRGetDriverCount() == 0: OGRRegisterAll() - CPLSetErrorHandler(errorHandler) + CPLSetErrorHandler(errorHandler) if OGRGetDriverCount() == 0: raise ValueError("Drivers not registered") if 'GDAL_DATA' in os.environ: log.debug("GDAL_DATA: %s", os.environ['GDAL_DATA']) else: + # We will try a few well-known paths, starting with the + # official wheel path. whl_datadir = os.path.abspath( os.path.join(os.path.dirname(__file__), "gdal_data")) - share_datadir = os.path.join(sys.prefix, 'share/gdal') + fhs_share_datadir = os.path.join(sys.prefix, 'share/gdal') + + # Debian supports multiple GDAL installs. + gdal_release_name = GDALVersionInfo("RELEASE_NAME") + gdal_release_name = gdal_release_name.decode('utf-8') + deb_share_datadir = os.path.join( + fhs_share_datadir, + "{}.{}".format(*gdal_release_name.split('.')[:2])) + + # If we find GDAL data at the well-known paths, we will + # add a GDAL_DATA key to the config options dict. if os.path.exists(os.path.join(whl_datadir, 'pcs.csv')): - os.environ['GDAL_DATA'] = whl_datadir log.debug("Set GDAL_DATA = %r", whl_datadir) - elif os.path.exists(os.path.join(share_datadir, 'pcs.csv')): - os.environ['GDAL_DATA'] = share_datadir - log.debug("Set GDAL_DATA = %r", share_datadir) + self.options['GDAL_DATA'] = whl_datadir + + elif os.path.exists(os.path.join(deb_share_datadir, 'pcs.csv')): + log.debug("Set GDAL_DATA = %r", deb_share_datadir) + self.options['GDAL_DATA'] = deb_share_datadir + + elif os.path.exists(os.path.join(fhs_share_datadir, 'pcs.csv')): + log.debug("Set GDAL_DATA = %r", fhs_share_datadir) + self.options['GDAL_DATA'] = fhs_share_datadir + else: log.warning("GDAL data files not located, GDAL_DATA not set") @@ -129,12 +149,15 @@ whl_datadir = os.path.abspath( os.path.join(os.path.dirname(__file__), "proj_data")) share_datadir = os.path.join(sys.prefix, 'share/proj') + if os.path.exists(whl_datadir): - os.environ['PROJ_LIB'] = whl_datadir log.debug("Set PROJ_LIB = %r", whl_datadir) + os.environ['PROJ_LIB'] = whl_datadir + elif os.path.exists(share_datadir): - os.environ['PROJ_LIB'] = share_datadir log.debug("Set PROJ_LIB = %r", share_datadir) + os.environ['PROJ_LIB'] = share_datadir + else: log.warning("PROJ data files not located, PROJ_LIB not set") @@ -147,7 +170,11 @@ val_b = ('ON' if val else 'OFF').encode('utf-8') val_c = val_b CPLSetThreadLocalConfigOption(key_c, val_c) - log.debug("Option %s=%s", key, CPLGetConfigOption(key_c, NULL)) + # Logging of config options has been disabled to prevent + # credential leakage and will be completely + # removed as soon as the Fiona driver environment and + # AWS authentication interactions are stable. + # log.debug("Option %s=%s", key, CPLGetConfigOption(key_c, NULL)) return self def stop(self): diff -Nru fiona-1.7.10/fiona/drvsupport.py fiona-1.8.6/fiona/drvsupport.py --- fiona-1.7.10/fiona/drvsupport.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/drvsupport.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -from fiona._drivers import GDALEnv +from fiona.env import Env # Here is the list of available drivers as (name, modes) tuples. Currently, @@ -11,143 +11,148 @@ # out the multi-layer formats. supported_drivers = dict([ -#OGR Vector Formats -#Format Name Code Creation Georeferencing Compiled by default -#Aeronav FAA files AeronavFAA No Yes Yes + # OGR Vector Formats + # Format Name Code Creation Georeferencing Compiled by default + # Aeronav FAA files AeronavFAA No Yes Yes ("AeronavFAA", "r"), -#ESRI ArcObjects ArcObjects No Yes No, needs ESRI ArcObjects -#Arc/Info Binary Coverage AVCBin No Yes Yes -# multi-layer -# ("AVCBin", "r"), -#Arc/Info .E00 (ASCII) Coverage AVCE00 No Yes Yes -# multi-layer -# ("AVCE00", "r"), -#Arc/Info Generate ARCGEN No No Yes + # ESRI ArcObjects ArcObjects No Yes No, needs ESRI ArcObjects + # Arc/Info Binary Coverage AVCBin No Yes Yes + # multi-layer + # ("AVCBin", "r"), + # Arc/Info .E00 (ASCII) Coverage AVCE00 No Yes Yes + # multi-layer + # ("AVCE00", "r"), + # Arc/Info Generate ARCGEN No No Yes ("ARCGEN", "r"), -#Atlas BNA BNA Yes No Yes + # Atlas BNA BNA Yes No Yes ("BNA", "raw"), -#AutoCAD DWG DWG No No No -#AutoCAD DXF DXF Yes No Yes + # AutoCAD DWG DWG No No No + # AutoCAD DXF DXF Yes No Yes ("DXF", "raw"), -#Comma Separated Value (.csv) CSV Yes No Yes -#CouchDB / GeoCouch CouchDB Yes Yes No, needs libcurl -#DODS/OPeNDAP DODS No Yes No, needs libdap -#EDIGEO EDIGEO No Yes Yes -# multi-layer? Hard to tell from the OGR docs -# ("EDIGEO", "r"), -#ElasticSearch ElasticSearch Yes (write-only) - No, needs libcurl -#ESRI FileGDB FileGDB Yes Yes No, needs FileGDB API library -# multi-layer + # Comma Separated Value (.csv) CSV Yes No Yes + ("CSV", "raw"), + # CouchDB / GeoCouch CouchDB Yes Yes No, needs libcurl + # DODS/OPeNDAP DODS No Yes No, needs libdap + # EDIGEO EDIGEO No Yes Yes + # multi-layer? Hard to tell from the OGR docs + # ("EDIGEO", "r"), + # ElasticSearch ElasticSearch Yes (write-only) - No, needs libcurl + # ESRI FileGDB FileGDB Yes Yes No, needs FileGDB API library + # multi-layer ("FileGDB", "raw"), ("OpenFileGDB", "r"), -#ESRI Personal GeoDatabase PGeo No Yes No, needs ODBC library -#ESRI ArcSDE SDE No Yes No, needs ESRI SDE -#ESRI Shapefile ESRI Shapefile Yes Yes Yes + # ESRI Personal GeoDatabase PGeo No Yes No, needs ODBC library + # ESRI ArcSDE SDE No Yes No, needs ESRI SDE + # ESRIJSON ESRIJSON No Yes Yes + ("ESRIJSON", "r"), + # ESRI Shapefile ESRI Shapefile Yes Yes Yes ("ESRI Shapefile", "raw"), -#FMEObjects Gateway FMEObjects Gateway No Yes No, needs FME -#GeoJSON GeoJSON Yes Yes Yes + # FMEObjects Gateway FMEObjects Gateway No Yes No, needs FME + # GeoJSON GeoJSON Yes Yes Yes ("GeoJSON", "rw"), -#Géoconcept Export Geoconcept Yes Yes Yes -# multi-layers -# ("Geoconcept", "raw"), -#Geomedia .mdb Geomedia No No No, needs ODBC library -#GeoPackage GPKG Yes Yes No, needs libsqlite3 + # GeoJSONSeq GeoJSON sequences Yes Yes Yes + ("GeoJSONSeq", "rw"), + # Géoconcept Export Geoconcept Yes Yes Yes + # multi-layers + # ("Geoconcept", "raw"), + # Geomedia .mdb Geomedia No No No, needs ODBC library + # GeoPackage GPKG Yes Yes No, needs libsqlite3 ("GPKG", "rw"), -#GeoRSS GeoRSS Yes Yes Yes (read support needs libexpat) -#Google Fusion Tables GFT Yes Yes No, needs libcurl -#GML GML Yes Yes Yes (read support needs Xerces or libexpat) -#GMT GMT Yes Yes Yes + # GeoRSS GeoRSS Yes Yes Yes (read support needs libexpat) + # Google Fusion Tables GFT Yes Yes No, needs libcurl + # GML GML Yes Yes Yes (read support needs Xerces or libexpat) + ("GML", "raw"), + # GMT GMT Yes Yes Yes ("GMT", "raw"), -#GPSBabel GPSBabel Yes Yes Yes (needs GPSBabel and GPX driver) -#GPX GPX Yes Yes Yes (read support needs libexpat) + # GPSBabel GPSBabel Yes Yes Yes (needs GPSBabel and GPX driver) + # GPX GPX Yes Yes Yes (read support needs libexpat) ("GPX", "raw"), -#GRASS GRASS No Yes No, needs libgrass -#GPSTrackMaker (.gtm, .gtz) GPSTrackMaker Yes Yes Yes + # GRASS GRASS No Yes No, needs libgrass + # GPSTrackMaker (.gtm, .gtz) GPSTrackMaker Yes Yes Yes ("GPSTrackMaker", "raw"), -#Hydrographic Transfer Format HTF No Yes Yes -# TODO: Fiona is not ready for multi-layer formats: ("HTF", "r"), -#Idrisi Vector (.VCT) Idrisi No Yes Yes + # Hydrographic Transfer Format HTF No Yes Yes + # TODO: Fiona is not ready for multi-layer formats: ("HTF", "r"), + # Idrisi Vector (.VCT) Idrisi No Yes Yes ("Idrisi", "r"), -#Informix DataBlade IDB Yes Yes No, needs Informix DataBlade -#INTERLIS "Interlis 1" and "Interlis 2" Yes Yes No, needs Xerces (INTERLIS model reading needs ili2c.jar) -#INGRES INGRES Yes No No, needs INGRESS -#KML KML Yes Yes Yes (read support needs libexpat) -#LIBKML LIBKML Yes Yes No, needs libkml -#Mapinfo File MapInfo File Yes Yes Yes + # Informix DataBlade IDB Yes Yes No, needs Informix DataBlade + # INTERLIS "Interlis 1" and "Interlis 2" Yes Yes No, needs Xerces (INTERLIS model reading needs ili2c.jar) + # INGRES INGRES Yes No No, needs INGRESS + # KML KML Yes Yes Yes (read support needs libexpat) + # LIBKML LIBKML Yes Yes No, needs libkml + # Mapinfo File MapInfo File Yes Yes Yes ("MapInfo File", "raw"), -#Microstation DGN DGN Yes No Yes + # Microstation DGN DGN Yes No Yes ("DGN", "raw"), -#Access MDB (PGeo and Geomedia capable) MDB No Yes No, needs JDK/JRE -#Memory Memory Yes Yes Yes -#MySQL MySQL No Yes No, needs MySQL library -#NAS - ALKIS NAS No Yes No, needs Xerces -#Oracle Spatial OCI Yes Yes No, needs OCI library -#ODBC ODBC No Yes No, needs ODBC library -#MS SQL Spatial MSSQLSpatial Yes Yes No, needs ODBC library -#Open Document Spreadsheet ODS Yes No No, needs libexpat -#OGDI Vectors (VPF, VMAP, DCW) OGDI No Yes No, needs OGDI library -#OpenAir OpenAir No Yes Yes -# multi-layer -# ("OpenAir", "r"), -#PCI Geomatics Database File PCIDSK No No Yes, using internal PCIDSK SDK (from GDAL 1.7.0) + # Access MDB (PGeo and Geomedia capable) MDB No Yes No, needs JDK/JRE + # Memory Memory Yes Yes Yes + # MySQL MySQL No Yes No, needs MySQL library + # NAS - ALKIS NAS No Yes No, needs Xerces + # Oracle Spatial OCI Yes Yes No, needs OCI library + # ODBC ODBC No Yes No, needs ODBC library + # MS SQL Spatial MSSQLSpatial Yes Yes No, needs ODBC library + # Open Document Spreadsheet ODS Yes No No, needs libexpat + # OGDI Vectors (VPF, VMAP, DCW) OGDI No Yes No, needs OGDI library + # OpenAir OpenAir No Yes Yes + # multi-layer + # ("OpenAir", "r"), + # PCI Geomatics Database File PCIDSK No No Yes, using internal PCIDSK SDK (from GDAL 1.7.0) ("PCIDSK", "r"), -#PDS PDS No Yes Yes + # PDS PDS No Yes Yes ("PDS", "r"), -#PGDump PostgreSQL SQL dump Yes Yes Yes -#PostgreSQL/PostGIS PostgreSQL/PostGIS Yes Yes No, needs PostgreSQL client library (libpq) -#EPIInfo .REC REC No No Yes -#S-57 (ENC) S57 No Yes Yes -# multi-layer -# ("S57", "r"), -#SDTS SDTS No Yes Yes -# multi-layer -# ("SDTS", "r"), -#SEG-P1 / UKOOA P1/90 SEGUKOOA No Yes Yes -# multi-layers -# ("SEGUKOOA", "r"), -#SEG-Y SEGY No No Yes + # PGDump PostgreSQL SQL dump Yes Yes Yes + # PostgreSQL/PostGIS PostgreSQL/PostGIS Yes Yes No, needs PostgreSQL client library (libpq) + # EPIInfo .REC REC No No Yes + # S-57 (ENC) S57 No Yes Yes + # multi-layer + ("S57", "r"), + # SDTS SDTS No Yes Yes + # multi-layer + # ("SDTS", "r"), + # SEG-P1 / UKOOA P1/90 SEGUKOOA No Yes Yes + # multi-layers + # ("SEGUKOOA", "r"), + # SEG-Y SEGY No No Yes ("SEGY", "r"), -#Norwegian SOSI Standard SOSI No Yes No, needs FYBA library -#SQLite/SpatiaLite SQLite Yes Yes No, needs libsqlite3 or libspatialite -#SUA SUA No Yes Yes + # Norwegian SOSI Standard SOSI No Yes No, needs FYBA library + # SQLite/SpatiaLite SQLite Yes Yes No, needs libsqlite3 or libspatialite + # SUA SUA No Yes Yes ("SUA", "r"), -#SVG SVG No Yes No, needs libexpat -#UK .NTF UK. NTF No Yes Yes -# multi-layer -# ("UK. NTF", "r"), -#U.S. Census TIGER/Line TIGER No Yes Yes -# multi-layer -# ("TIGER", "r"), -#VFK data VFK No Yes Yes -# multi-layer -# ("VFK", "r"), -#VRT - Virtual Datasource VRT No Yes Yes -# multi-layer -# ("VRT", "r"), -#OGC WFS (Web Feature Service) WFS Yes Yes No, needs libcurl -#MS Excel format XLS No No No, needs libfreexl -#Office Open XML spreadsheet XLSX Yes No No, needs libexpat -#X-Plane/Flighgear aeronautical data XPLANE No Yes Yes -# multi-layer -# ("XPLANE", "r") + # SVG SVG No Yes No, needs libexpat + # TopoJSON TopoJSON No Yes Yes + ("TopoJSON", "r"), + # UK .NTF UK. NTF No Yes Yes + # multi-layer + # ("UK. NTF", "r"), + # U.S. Census TIGER/Line TIGER No Yes Yes + # multi-layer + # ("TIGER", "r"), + # VFK data VFK No Yes Yes + # multi-layer + # ("VFK", "r"), + # VRT - Virtual Datasource VRT No Yes Yes + # multi-layer + # ("VRT", "r"), + # OGC WFS (Web Feature Service) WFS Yes Yes No, needs libcurl + # MS Excel format XLS No No No, needs libfreexl + # Office Open XML spreadsheet XLSX Yes No No, needs libexpat + # X-Plane/Flighgear aeronautical data XPLANE No Yes Yes + # multi-layer + # ("XPLANE", "r") ]) -# Removes drivers in the supported_drivers dictionary that the +# Removes drivers in the supported_drivers dictionary that the # machine's installation of OGR due to how it is compiled. # OGR may not have optional libararies compiled or installed. def _filter_supported_drivers(): global supported_drivers - gdalenv = GDALEnv() - ogrdrv_names = gdalenv.start().drivers().keys() - supported_drivers_copy = supported_drivers.copy() - - for drv in supported_drivers.keys(): - if drv not in ogrdrv_names: - del supported_drivers_copy[drv] - - gdalenv.stop() + with Env() as gdalenv: + ogrdrv_names = gdalenv.drivers().keys() + supported_drivers_copy = supported_drivers.copy() + for drv in supported_drivers.keys(): + if drv not in ogrdrv_names: + del supported_drivers_copy[drv] supported_drivers = supported_drivers_copy diff -Nru fiona-1.7.10/fiona/_env.pxd fiona-1.8.6/fiona/_env.pxd --- fiona-1.7.10/fiona/_env.pxd 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/_env.pxd 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,6 @@ +cdef class ConfigEnv(object): + cdef public object options + + +cdef class GDALEnv(ConfigEnv): + cdef public object _have_registered_drivers diff -Nru fiona-1.7.10/fiona/env.py fiona-1.8.6/fiona/env.py --- fiona-1.7.10/fiona/env.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/env.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,613 @@ +"""Fiona's GDAL/AWS environment""" + +from contextlib import contextmanager +from functools import wraps, total_ordering +import logging +import os +import re +import threading + +import attr +from six import string_types + +from fiona._env import ( + GDALEnv, calc_gdal_version_num, get_gdal_version_num, get_gdal_config, + set_gdal_config, get_gdal_release_name, GDALDataFinder, PROJDataFinder) +from fiona.compat import getargspec +from fiona.errors import EnvError, GDALVersionError +from fiona.session import Session, DummySession + + +class ThreadEnv(threading.local): + def __init__(self): + self._env = None # Initialises in each thread + + # When the outermost 'fiona.Env()' executes '__enter__' it + # probes the GDAL environment to see if any of the supplied + # config options already exist, the assumption being that they + # were set with 'osgeo.gdal.SetConfigOption()' or possibly + # 'fiona.env.set_gdal_config()'. The discovered options are + # reinstated when the outermost Fiona environment exits. + # Without this check any environment options that are present in + # the GDAL environment and are also passed to 'fiona.Env()' + # will be unset when 'fiona.Env()' tears down, regardless of + # their value. For example: + # + # from osgeo import gdal import fiona + # + # gdal.SetConfigOption('key', 'value') + # with fiona.Env(key='something'): + # pass + # + # The config option 'key' would be unset when 'Env()' exits. + # A more comprehensive solution would also leverage + # https://trac.osgeo.org/gdal/changeset/37273 but this gets + # Fiona + older versions of GDAL halfway there. One major + # assumption is that environment variables are not set directly + # with 'osgeo.gdal.SetConfigOption()' OR + # 'fiona.env.set_gdal_config()' inside of a 'fiona.Env()'. + self._discovered_options = None + + +local = ThreadEnv() + +log = logging.getLogger(__name__) + + +class Env(object): + """Abstraction for GDAL and AWS configuration + + The GDAL library is stateful: it has a registry of format drivers, + an error stack, and dozens of configuration options. + + Fiona's approach to working with GDAL is to wrap all the state + up using a Python context manager (see PEP 343, + https://www.python.org/dev/peps/pep-0343/). When the context is + entered GDAL drivers are registered, error handlers are + configured, and configuration options are set. When the context + is exited, drivers are removed from the registry and other + configurations are removed. + + Example: + + with fiona.Env(GDAL_CACHEMAX=512) as env: + # All drivers are registered, GDAL's raster block cache + # size is set to 512MB. + # Commence processing... + ... + # End of processing. + + # At this point, configuration options are set to their + # previous (possible unset) values. + + A boto3 session or boto3 session constructor arguments + `aws_access_key_id`, `aws_secret_access_key`, `aws_session_token` + may be passed to Env's constructor. In the latter case, a session + will be created as soon as needed. AWS credentials are configured + for GDAL as needed. + """ + + @classmethod + def default_options(cls): + """Default configuration options + + Parameters + ---------- + None + + Returns + ------- + dict + """ + return { + 'CHECK_WITH_INVERT_PROJ': True, + 'GTIFF_IMPLICIT_JPEG_OVR': False, + "FIONA_ENV": True + } + + def __init__( + self, session=None, **options): + """Create a new GDAL/AWS environment. + + Note: this class is a context manager. GDAL isn't configured + until the context is entered via `with fiona.Env():` + + Parameters + ---------- + session : optional + A Session object. + **options : optional + A mapping of GDAL configuration options, e.g., + `CPL_DEBUG=True, CHECK_WITH_INVERT_PROJ=False`. + + Returns + ------- + Env + + Notes + ----- + We raise EnvError if the GDAL config options + AWS_ACCESS_KEY_ID or AWS_SECRET_ACCESS_KEY are given. AWS + credentials are handled exclusively by boto3. + + Examples + -------- + + >>> with Env(CPL_DEBUG=True, CPL_CURL_VERBOSE=True): + ... with fiona.open("zip+https://example.com/a.zip") as col: + ... print(col.meta) + + For access to secured cloud resources, a Fiona Session may be + passed to the constructor. + + >>> import boto3 + >>> from fiona.session import AWSSession + >>> boto3_session = boto3.Session(...) + >>> with Env(AWSSession(boto3_session)): + ... with fiona.open("zip+s3://example/a.zip") as col: + ... print(col.meta) + + """ + if ('AWS_ACCESS_KEY_ID' in options or + 'AWS_SECRET_ACCESS_KEY' in options): + raise EnvError( + "GDAL's AWS config options can not be directly set. " + "AWS credentials are handled exclusively by boto3.") + + if session: + self.session = session + else: + self.session = DummySession() + + self.options = options.copy() + self.context_options = {} + + @classmethod + def from_defaults(cls, session=None, **options): + """Create an environment with default config options + + Parameters + ---------- + session : optional + A Session object. + **options : optional + A mapping of GDAL configuration options, e.g., + `CPL_DEBUG=True, CHECK_WITH_INVERT_PROJ=False`. + + Returns + ------- + Env + + Notes + ----- + The items in kwargs will be overlaid on the default values. + + """ + opts = Env.default_options() + opts.update(**options) + return Env(session=session, **opts) + + @property + def is_credentialized(self): + """Test for existence of cloud credentials + + Returns + ------- + bool + """ + return hascreds() + + def credentialize(self): + """Get credentials and configure GDAL + + Note well: this method is a no-op if the GDAL environment + already has credentials, unless session is not None. + + Returns + ------- + None + + """ + if hascreds(): + pass + else: + cred_opts = self.session.get_credential_options() + self.options.update(**cred_opts) + setenv(**cred_opts) + + def drivers(self): + """Return a mapping of registered drivers.""" + return local._env.drivers() + + def __enter__(self): + log.debug("Entering env context: %r", self) + if local._env is None: + log.debug("Starting outermost env") + self._has_parent_env = False + + # See note directly above where _discovered_options is globally + # defined. This MUST happen before calling 'defenv()'. + local._discovered_options = {} + # Don't want to reinstate the "FIONA_ENV" option. + probe_env = {k for k in self.options.keys() if k != "FIONA_ENV"} + for key in probe_env: + val = get_gdal_config(key, normalize=False) + if val is not None: + local._discovered_options[key] = val + log.debug("Discovered option: %s=%s", key, val) + + defenv(**self.options) + self.context_options = {} + else: + self._has_parent_env = True + self.context_options = getenv() + setenv(**self.options) + + self.credentialize() + + log.debug("Entered env context: %r", self) + return self + + def __exit__(self, exc_type=None, exc_val=None, exc_tb=None): + log.debug("Exiting env context: %r", self) + delenv() + if self._has_parent_env: + defenv() + setenv(**self.context_options) + else: + log.debug("Exiting outermost env") + # See note directly above where _discovered_options is globally + # defined. + while local._discovered_options: + key, val = local._discovered_options.popitem() + set_gdal_config(key, val, normalize=False) + log.debug( + "Set discovered option back to: '%s=%s", key, val) + local._discovered_options = None + log.debug("Exited env context: %r", self) + + +def defenv(**options): + """Create a default environment if necessary.""" + if local._env: + log.debug("GDAL environment exists: %r", local._env) + else: + log.debug("No GDAL environment exists") + local._env = GDALEnv() + local._env.update_config_options(**options) + log.debug( + "New GDAL environment %r created", local._env) + local._env.start() + + +def getenv(): + """Get a mapping of current options.""" + if not local._env: + raise EnvError("No GDAL environment exists") + else: + log.debug("Got a copy of environment %r options", local._env) + return local._env.options.copy() + + +def hasenv(): + return bool(local._env) + + +def setenv(**options): + """Set options in the existing environment.""" + if not local._env: + raise EnvError("No GDAL environment exists") + else: + local._env.update_config_options(**options) + log.debug("Updated existing %r with options %r", local._env, options) + + +def hascreds(): + return local._env is not None and all(key in local._env.get_config_options() for key in ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY']) + + +def delenv(): + """Delete options in the existing environment.""" + if not local._env: + raise EnvError("No GDAL environment exists") + else: + local._env.clear_config_options() + log.debug("Cleared existing %r options", local._env) + local._env.stop() + local._env = None + + +class NullContextManager(object): + def __init__(self): + pass + def __enter__(self): + return self + def __exit__(self, *args): + pass + + +def env_ctx_if_needed(): + """Return an Env if one does not exist + + Returns + ------- + Env or a do-nothing context manager + + """ + if local._env: + return NullContextManager() + else: + return Env.from_defaults() + + +def ensure_env(f): + """A decorator that ensures an env exists before a function + calls any GDAL C functions. + + Parameters + ---------- + f : function + A function. + + Returns + ------- + A function wrapper. + + Notes + ----- + If there is already an existing environment, the wrapper does + nothing and immediately calls f with the given arguments. + + """ + @wraps(f) + def wrapper(*args, **kwargs): + if local._env: + return f(*args, **kwargs) + else: + with Env.from_defaults(): + return f(*args, **kwargs) + return wrapper + + +def ensure_env_with_credentials(f): + """Ensures a config environment exists and has credentials. + + Parameters + ---------- + f : function + A function. + + Returns + ------- + A function wrapper. + + Notes + ----- + The function wrapper checks the first argument of f and + credentializes the environment if the first argument is a URI with + scheme "s3". + + If there is already an existing environment, the wrapper does + nothing and immediately calls f with the given arguments. + + """ + @wraps(f) + def wrapper(*args, **kwargs): + if local._env: + return f(*args, **kwargs) + else: + if isinstance(args[0], str): + session = Session.from_path(args[0]) + else: + session = Session.from_path(None) + + with Env.from_defaults(session=session): + log.debug("Credentialized: {!r}".format(getenv())) + return f(*args, **kwargs) + return wrapper + + +@attr.s(slots=True) +@total_ordering +class GDALVersion(object): + """Convenience class for obtaining GDAL major and minor version + components and comparing between versions. This is highly + simplistic and assumes a very normal numbering scheme for versions + and ignores everything except the major and minor components. + """ + + major = attr.ib(default=0, validator=attr.validators.instance_of(int)) + minor = attr.ib(default=0, validator=attr.validators.instance_of(int)) + + def __eq__(self, other): + return (self.major, self.minor) == tuple(other.major, other.minor) + + def __lt__(self, other): + return (self.major, self.minor) < tuple(other.major, other.minor) + + def __repr__(self): + return "GDALVersion(major={0}, minor={1})".format(self.major, self.minor) + + def __str__(self): + return "{0}.{1}".format(self.major, self.minor) + + @classmethod + def parse(cls, input): + """ + Parses input tuple or string to GDALVersion. If input is a GDALVersion + instance, it is returned. + + Parameters + ---------- + input: tuple of (major, minor), string, or instance of GDALVersion + + Returns + ------- + GDALVersion instance + """ + + if isinstance(input, cls): + return input + if isinstance(input, tuple): + return cls(*input) + elif isinstance(input, string_types): + # Extract major and minor version components. + # alpha, beta, rc suffixes ignored + match = re.search(r'^\d+\.\d+', input) + if not match: + raise ValueError( + "value does not appear to be a valid GDAL version " + "number: {}".format(input)) + major, minor = (int(c) for c in match.group().split('.')) + return cls(major=major, minor=minor) + + raise TypeError("GDALVersion can only be parsed from a string or tuple") + + @classmethod + def runtime(cls): + """Return GDALVersion of current GDAL runtime""" + return cls.parse(get_gdal_release_name()) + + def at_least(self, other): + other = self.__class__.parse(other) + return self >= other + + +def require_gdal_version(version, param=None, values=None, is_max_version=False, + reason=''): + """A decorator that ensures the called function or parameters are supported + by the runtime version of GDAL. Raises GDALVersionError if conditions + are not met. + + Examples: + \b + @require_gdal_version('2.2') + def some_func(): + + calling `some_func` with a runtime version of GDAL that is < 2.2 raises a + GDALVersionErorr. + + \b + @require_gdal_version('2.2', param='foo') + def some_func(foo='bar'): + + calling `some_func` with parameter `foo` of any value on GDAL < 2.2 raises + a GDALVersionError. + + \b + @require_gdal_version('2.2', param='foo', values=('bar',)) + def some_func(foo=None): + + calling `some_func` with parameter `foo` and value `bar` on GDAL < 2.2 + raises a GDALVersionError. + + + Parameters + ------------ + version: tuple, string, or GDALVersion + param: string (optional, default: None) + If `values` are absent, then all use of this parameter with a value + other than default value requires at least GDAL `version`. + values: tuple, list, or set (optional, default: None) + contains values that require at least GDAL `version`. `param` + is required for `values`. + is_max_version: bool (optional, default: False) + if `True` indicates that the version provided is the maximum version + allowed, instead of requiring at least that version. + reason: string (optional: default: '') + custom error message presented to user in addition to message about + GDAL version. Use this to provide an explanation of what changed + if necessary context to the user. + + Returns + --------- + wrapped function + """ + + if values is not None: + if param is None: + raise ValueError( + 'require_gdal_version: param must be provided with values') + + if not isinstance(values, (tuple, list, set)): + raise ValueError( + 'require_gdal_version: values must be a tuple, list, or set') + + version = GDALVersion.parse(version) + runtime = GDALVersion.runtime() + inequality = '>=' if runtime < version else '<=' + reason = '\n{0}'.format(reason) if reason else reason + + def decorator(f): + @wraps(f) + def wrapper(*args, **kwds): + if ((runtime < version and not is_max_version) or + (is_max_version and runtime > version)): + + if param is None: + raise GDALVersionError( + "GDAL version must be {0} {1}{2}".format( + inequality, str(version), reason)) + + # normalize args and kwds to dict + argspec = getargspec(f) + full_kwds = kwds.copy() + + if argspec.args: + full_kwds.update(dict(zip(argspec.args[:len(args)], args))) + + if argspec.defaults: + defaults = dict(zip( + reversed(argspec.args), reversed(argspec.defaults))) + else: + defaults = {} + + if param in full_kwds: + if values is None: + if param not in defaults or ( + full_kwds[param] != defaults[param]): + raise GDALVersionError( + 'usage of parameter "{0}" requires ' + 'GDAL {1} {2}{3}'.format(param, inequality, + version, reason)) + + elif full_kwds[param] in values: + raise GDALVersionError( + 'parameter "{0}={1}" requires ' + 'GDAL {2} {3}{4}'.format( + param, full_kwds[param], inequality, version, reason)) + + return f(*args, **kwds) + + return wrapper + + return decorator + + +# Patch the environment if needed, such as in the installed wheel case. + +if 'GDAL_DATA' not in os.environ: + + # See https://github.com/mapbox/rasterio/issues/1631. + if GDALDataFinder().find_file("header.dxf"): + log.debug("GDAL data files are available at built-in paths") + + else: + path = GDALDataFinder().search() + + if path: + os.environ['GDAL_DATA'] = path + log.debug("GDAL_DATA not found in environment, set to %r.", path) + +if 'PROJ_LIB' not in os.environ: + + # See https://github.com/mapbox/rasterio/issues/1631. + if PROJDataFinder().has_data(): + log.debug("PROJ data files are available at built-in paths") + + else: + path = PROJDataFinder().search() + + if path: + os.environ['PROJ_LIB'] = path + log.debug("PROJ data not found in environment, set to %r.", path) diff -Nru fiona-1.7.10/fiona/_env.pyx fiona-1.8.6/fiona/_env.pyx --- fiona-1.7.10/fiona/_env.pyx 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/_env.pyx 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,449 @@ +# cython: c_string_type=unicode, c_string_encoding=utf8 +"""GDAL and OGR driver and configuration management + +The main thread always utilizes CPLSetConfigOption. Child threads +utilize CPLSetThreadLocalConfigOption instead. All threads use +CPLGetConfigOption and not CPLGetThreadLocalConfigOption, thus child +threads will inherit config options from the main thread unless the +option is set to a new value inside the thread. +""" + +include "gdal.pxi" + +from collections import namedtuple +import logging +import os +import os.path +import sys +import threading + +from fiona._err cimport exc_wrap_int, exc_wrap_ogrerr +from fiona._err import CPLE_BaseError +from fiona.errors import EnvError + + +level_map = { + 0: 0, + 1: logging.DEBUG, + 2: logging.WARNING, + 3: logging.ERROR, + 4: logging.CRITICAL } + +code_map = { + 0: 'CPLE_None', + 1: 'CPLE_AppDefined', + 2: 'CPLE_OutOfMemory', + 3: 'CPLE_FileIO', + 4: 'CPLE_OpenFailed', + 5: 'CPLE_IllegalArg', + 6: 'CPLE_NotSupported', + 7: 'CPLE_AssertionFailed', + 8: 'CPLE_NoWriteAccess', + 9: 'CPLE_UserInterrupt', + 10: 'ObjectNull', + + # error numbers 11-16 are introduced in GDAL 2.1. See + # https://github.com/OSGeo/gdal/pull/98. + 11: 'CPLE_HttpResponse', + 12: 'CPLE_AWSBucketNotFound', + 13: 'CPLE_AWSObjectNotFound', + 14: 'CPLE_AWSAccessDenied', + 15: 'CPLE_AWSInvalidCredentials', + 16: 'CPLE_AWSSignatureDoesNotMatch'} + + +log = logging.getLogger(__name__) + + +cdef bint is_64bit = sys.maxsize > 2 ** 32 + + +cdef _safe_osr_release(OGRSpatialReferenceH srs): + """Wrapper to handle OSR release when NULL.""" + if srs != NULL: + OSRRelease(srs) + srs = NULL + + +def calc_gdal_version_num(maj, min, rev): + """Calculates the internal gdal version number based on major, minor and revision + + GDAL Version Information macro changed with GDAL version 1.10.0 (April 2013) + + """ + if (maj, min, rev) >= (1, 10, 0): + return int(maj * 1000000 + min * 10000 + rev * 100) + else: + return int(maj * 1000 + min * 100 + rev * 10) + + +def get_gdal_version_num(): + """Return current internal version number of gdal""" + return int(GDALVersionInfo("VERSION_NUM")) + + +def get_gdal_release_name(): + """Return release name of gdal""" + cdef const char *name_c = NULL + name_c = GDALVersionInfo("RELEASE_NAME") + name = name_c + return name + + +GDALVersion = namedtuple("GDALVersion", ["major", "minor", "revision"]) + + +def get_gdal_version_tuple(): + """ + Calculates gdal version tuple from gdal's internal version number. + + GDAL Version Information macro changed with GDAL version 1.10.0 (April 2013) + """ + gdal_version_num = get_gdal_version_num() + + if gdal_version_num >= calc_gdal_version_num(1, 10, 0): + major = gdal_version_num // 1000000 + minor = (gdal_version_num - (major * 1000000)) // 10000 + revision = (gdal_version_num - (major * 1000000) - (minor * 10000)) // 100 + return GDALVersion(major, minor, revision) + else: + major = gdal_version_num // 1000 + minor = (gdal_version_num - (major * 1000)) // 100 + revision = (gdal_version_num - (major * 1000) - (minor * 100)) // 10 + return GDALVersion(major, minor, revision) + + +cdef void log_error(CPLErr err_class, int err_no, const char* msg) with gil: + """Send CPL debug messages and warnings to Python's logger.""" + log = logging.getLogger(__name__) + if err_no in code_map: + log.log(level_map[err_class], "%s", msg) + else: + log.info("Unknown error number %r.", err_no) + + +# Definition of GDAL callback functions, one for Windows and one for +# other platforms. Each calls log_error(). +IF UNAME_SYSNAME == "Windows": + cdef void __stdcall logging_error_handler(CPLErr err_class, int err_no, + const char* msg) with gil: + log_error(err_class, err_no, msg) +ELSE: + cdef void logging_error_handler(CPLErr err_class, int err_no, + const char* msg) with gil: + log_error(err_class, err_no, msg) + + +def driver_count(): + """Return the count of all drivers""" + return GDALGetDriverCount() + OGRGetDriverCount() + + +cpdef get_gdal_config(key, normalize=True): + """Get the value of a GDAL configuration option. When requesting + ``GDAL_CACHEMAX`` the value is returned unaltered. + + Parameters + ---------- + key : str + Name of config option. + normalize : bool, optional + Convert values of ``"ON"'`` and ``"OFF"`` to ``True`` and ``False``. + """ + key = key.encode('utf-8') + + # GDAL_CACHEMAX is a special case + if key.lower() == b'gdal_cachemax': + if is_64bit: + return GDALGetCacheMax64() + else: + return GDALGetCacheMax() + else: + val = CPLGetConfigOption(key, NULL) + + if not val: + return None + elif not normalize: + return val + elif val.isdigit(): + return int(val) + else: + if val == u'ON': + return True + elif val == u'OFF': + return False + else: + return val + + +cpdef set_gdal_config(key, val, normalize=True): + """Set a GDAL configuration option's value. + + Parameters + ---------- + key : str + Name of config option. + normalize : bool, optional + Convert ``True`` to `"ON"` and ``False`` to `"OFF"``. + """ + key = key.encode('utf-8') + + # GDAL_CACHEMAX is a special case + if key.lower() == b'gdal_cachemax': + if is_64bit: + GDALSetCacheMax64(val) + else: + GDALSetCacheMax(val) + return + elif normalize and isinstance(val, bool): + val = ('ON' if val and val else 'OFF').encode('utf-8') + else: + # Value could be an int + val = str(val).encode('utf-8') + + if isinstance(threading.current_thread(), threading._MainThread): + CPLSetConfigOption(key, val) + else: + CPLSetThreadLocalConfigOption(key, val) + + +cpdef del_gdal_config(key): + """Delete a GDAL configuration option. + + Parameters + ---------- + key : str + Name of config option. + """ + key = key.encode('utf-8') + if isinstance(threading.current_thread(), threading._MainThread): + CPLSetConfigOption(key, NULL) + else: + CPLSetThreadLocalConfigOption(key, NULL) + + +cdef class ConfigEnv(object): + """Configuration option management""" + + def __init__(self, **options): + self.options = options.copy() + self.update_config_options(**self.options) + + def update_config_options(self, **kwargs): + """Update GDAL config options.""" + for key, val in kwargs.items(): + set_gdal_config(key, val) + self.options[key] = val + + def clear_config_options(self): + """Clear GDAL config options.""" + while self.options: + key, val = self.options.popitem() + del_gdal_config(key) + + def get_config_options(self): + return {k: get_gdal_config(k) for k in self.options} + + +class GDALDataFinder(object): + """Finds GDAL data files + + Note: this is not part of the 1.8.x public API. + + """ + def find_file(self, basename): + """Returns path of a GDAL data file or None + + Parameters + ---------- + basename : str + Basename of a data file such as "header.dxf" + + Returns + ------- + str (on success) or None (on failure) + + """ + cdef const char *path_c = NULL + basename_b = basename.encode('utf-8') + path_c = CPLFindFile("gdal", basename_b) + if path_c == NULL: + return None + else: + path = path_c + return path + + def search(self, prefix=None): + """Returns GDAL data directory + + Note well that os.environ is not consulted. + + Returns + ------- + str or None + + """ + path = self.search_wheel(prefix or __file__) + if not path: + path = self.search_prefix(prefix or sys.prefix) + if not path: + path = self.search_debian(prefix or sys.prefix) + return path + + def search_wheel(self, prefix=None): + """Check wheel location""" + if prefix is None: + prefix = __file__ + datadir = os.path.abspath(os.path.join(os.path.dirname(prefix), "gdal_data")) + return datadir if os.path.exists(os.path.join(datadir, 'pcs.csv')) else None + + def search_prefix(self, prefix=sys.prefix): + """Check sys.prefix location""" + datadir = os.path.join(prefix, 'share', 'gdal') + return datadir if os.path.exists(os.path.join(datadir, 'pcs.csv')) else None + + def search_debian(self, prefix=sys.prefix): + """Check Debian locations""" + gdal_release_name = GDALVersionInfo("RELEASE_NAME") + datadir = os.path.join(prefix, 'share', 'gdal', '{}.{}'.format(*gdal_release_name.split('.')[:2])) + return datadir if os.path.exists(os.path.join(datadir, 'pcs.csv')) else None + + +class PROJDataFinder(object): + """Finds PROJ data files + + Note: this is not part of the public 1.8.x API. + + """ + def has_data(self): + """Returns True if PROJ's data files can be found + + Returns + ------- + bool + + """ + cdef OGRSpatialReferenceH osr = OSRNewSpatialReference(NULL) + + try: + exc_wrap_ogrerr(exc_wrap_int(OSRImportFromProj4(osr, "+init=epsg:4326"))) + except CPLE_BaseError: + return False + else: + return True + finally: + _safe_osr_release(osr) + + + def search(self, prefix=None): + """Returns PROJ data directory + + Note well that os.environ is not consulted. + + Returns + ------- + str or None + + """ + path = self.search_wheel(prefix or __file__) + if not path: + path = self.search_prefix(prefix or sys.prefix) + return path + + def search_wheel(self, prefix=None): + """Check wheel location""" + if prefix is None: + prefix = __file__ + datadir = os.path.abspath(os.path.join(os.path.dirname(prefix), "proj_data")) + return datadir if os.path.exists(datadir) else None + + def search_prefix(self, prefix=sys.prefix): + """Check sys.prefix location""" + datadir = os.path.join(prefix, 'share', 'proj') + return datadir if os.path.exists(datadir) else None + + +cdef class GDALEnv(ConfigEnv): + """Configuration and driver management""" + + def __init__(self, **options): + super(GDALEnv, self).__init__(**options) + self._have_registered_drivers = False + + def start(self): + CPLPushErrorHandler(logging_error_handler) + log.debug("Logging error handler pushed.") + + # The outer if statement prevents each thread from acquiring a + # lock when the environment starts, and the inner avoids a + # potential race condition. + if not self._have_registered_drivers: + with threading.Lock(): + if not self._have_registered_drivers: + + GDALAllRegister() + OGRRegisterAll() + log.debug("All drivers registered.") + + if 'GDAL_DATA' in os.environ: + self.update_config_options(GDAL_DATA=os.environ['GDAL_DATA']) + log.debug("GDAL_DATA found in environment: %r.", os.environ['GDAL_DATA']) + + # See https://github.com/mapbox/rasterio/issues/1631. + elif GDALDataFinder().find_file("header.dxf"): + log.debug("GDAL data files are available at built-in paths") + + else: + path = GDALDataFinder().search() + + if path: + self.update_config_options(GDAL_DATA=path) + log.debug("GDAL_DATA not found in environment, set to %r.", path) + + if 'PROJ_LIB' in os.environ: + log.debug("PROJ_LIB found in environment: %r.", os.environ['PROJ_LIB']) + + elif PROJDataFinder().has_data(): + log.debug("PROJ data files are available at built-in paths") + + else: + path = PROJDataFinder().search() + + if path: + os.environ['PROJ_LIB'] = path + log.debug("PROJ data not found in environment, set to %r.", path) + + if driver_count() == 0: + CPLPopErrorHandler() + log.debug("Error handler popped") + raise ValueError("Drivers not registered.") + + # Flag the drivers as registered, otherwise every thread + # will acquire a threadlock every time a new environment + # is started rather than just whenever the first thread + # actually makes it this far. + self._have_registered_drivers = True + + log.debug("Started GDALEnv %r.", self) + + def stop(self): + # NB: do not restore the CPL error handler to its default + # state here. If you do, log messages will be written to stderr + # by GDAL instead of being sent to Python's logging module. + log.debug("Stopping GDALEnv %r.", self) + CPLPopErrorHandler() + log.debug("Error handler popped.") + log.debug("Stopped GDALEnv %r.", self) + + def drivers(self): + cdef OGRSFDriverH driver = NULL + cdef int i + + result = {} + for i in range(OGRGetDriverCount()): + drv = OGRGetDriver(i) + key = OGR_Dr_GetName(drv) + val = OGR_Dr_GetName(drv) + result[key] = val + + return result diff -Nru fiona-1.7.10/fiona/errors.py fiona-1.8.6/fiona/errors.py --- fiona-1.7.10/fiona/errors.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/errors.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,6 +1,10 @@ # Errors. +class FionaError(Exception): + """Base Fiona error""" + + class FionaValueError(ValueError): """Fiona-specific value errors""" @@ -25,9 +29,39 @@ """A format specific driver error.""" +class DriverSupportError(DriverIOError): + """Driver does not support schema""" + + +class DatasetDeleteError(IOError): + """Failure to delete a dataset""" + + class FieldNameEncodeError(UnicodeEncodeError): """Failure to encode a field name.""" class UnsupportedGeometryTypeError(KeyError): """When a OGR geometry type isn't supported by Fiona.""" + + +class GeometryTypeValidationError(FionaValueError): + """Tried to write a geometry type not specified in the schema""" + + +class TransactionError(RuntimeError): + """Failure relating to GDAL transactions""" + + +class EnvError(FionaError): + """Environment Errors""" + + +class GDALVersionError(FionaError): + """Raised if the runtime version of GDAL does not meet the required + version of GDAL. + """ + + +class FionaDeprecationWarning(UserWarning): + """A warning about deprecation of Fiona features""" diff -Nru fiona-1.7.10/fiona/_err.pxd fiona-1.8.6/fiona/_err.pxd --- fiona-1.7.10/fiona/_err.pxd 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/_err.pxd 2019-03-19 04:25:07.000000000 +0000 @@ -1 +1,15 @@ +from libc.stdio cimport * + +cdef extern from "cpl_vsi.h": + + ctypedef FILE VSILFILE + +cdef extern from "ogr_core.h": + + ctypedef int OGRErr + + +cdef int exc_wrap_int(int retval) except -1 +cdef OGRErr exc_wrap_ogrerr(OGRErr retval) except -1 cdef void *exc_wrap_pointer(void *ptr) except NULL +cdef VSILFILE *exc_wrap_vsilfile(VSILFILE *f) except NULL diff -Nru fiona-1.7.10/fiona/_err.pyx fiona-1.8.6/fiona/_err.pyx --- fiona-1.7.10/fiona/_err.pyx 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/_err.pyx 2019-03-19 04:25:07.000000000 +0000 @@ -66,7 +66,7 @@ return self.__unicode__() def __unicode__(self): - return "{}".format(self.errmsg) + return u"{}".format(self.errmsg) @property def args(self): @@ -137,6 +137,22 @@ pass +class FionaNullPointerError(CPLE_BaseError): + """ + Returned from exc_wrap_pointer when a NULL pointer is passed, but no GDAL + error was raised. + """ + pass + + +class FionaCPLError(CPLE_BaseError): + """ + Returned from exc_wrap_int when a error code is returned, but no GDAL + error was set. + """ + pass + + # Map of GDAL error numbers to the Python exceptions. exception_map = { 1: CPLE_AppDefinedError, @@ -187,7 +203,7 @@ cdef inline object exc_check(): """Checks GDAL error stack for fatal or non-fatal errors - + Returns ------- An Exception, SystemExit, or None @@ -219,6 +235,31 @@ return +cdef int exc_wrap_int(int err) except -1: + """Wrap a GDAL/OGR function that returns CPLErr or OGRErr (int) + + Raises a Rasterio exception if a non-fatal error has be set. + """ + if err: + exc = exc_check() + if exc: + raise exc + else: + raise FionaCPLError(-1, -1, "The wrapped function returned an error code, but no error message was set.") + return err + + +cdef OGRErr exc_wrap_ogrerr(OGRErr err) except -1: + """Wrap a function that returns OGRErr but does not use the + CPL error stack. + + """ + if err == 0: + return err + else: + raise CPLE_BaseError(3, err, "OGR Error code {}".format(err)) + + cdef void *exc_wrap_pointer(void *ptr) except NULL: """Wrap a GDAL/OGR function that returns GDALDatasetH etc (void *) Raises a Rasterio exception if a non-fatal error has be set. @@ -227,7 +268,21 @@ exc = exc_check() if exc: raise exc - return NULL + else: + # null pointer was passed, but no error message from GDAL + raise FionaNullPointerError(-1, -1, "NULL pointer error") return ptr - + + +cdef VSILFILE *exc_wrap_vsilfile(VSILFILE *f) except NULL: + """Wrap a GDAL/OGR function that returns GDALDatasetH etc (void *) + + Raises a Rasterio exception if a non-fatal error has be set. + """ + if f == NULL: + exc = exc_check() + if exc: + raise exc + return f + cpl_errs = GDALErrCtxManager() diff -Nru fiona-1.7.10/fiona/fio/bounds.py fiona-1.8.6/fiona/fio/bounds.py --- fiona-1.7.10/fiona/fio/bounds.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/fio/bounds.py 2019-03-19 04:25:07.000000000 +0000 @@ -9,6 +9,7 @@ import fiona from fiona.fio.helpers import obj_gen +from fiona.fio import with_context_env @click.command(short_help="Print the extent of GeoJSON objects") @@ -23,6 +24,7 @@ "(default: without).") @use_rs_opt @click.pass_context +@with_context_env def bounds(ctx, precision, explode, with_id, with_obj, use_rs): """Print the bounding boxes of GeoJSON objects read from stdin. @@ -36,10 +38,8 @@ as GeoJSON object, use --with-obj. This has the effect of updating input objects with {id: identifier, bbox: bounds}. """ - verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 - logger = logging.getLogger('fio') + logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') - stdout = click.get_text_stream('stdout') try: source = obj_gen(stdin) for i, obj in enumerate(source): diff -Nru fiona-1.7.10/fiona/fio/calc.py fiona-1.8.6/fiona/fio/calc.py --- fiona-1.7.10/fiona/fio/calc.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/fio/calc.py 2019-03-19 04:25:07.000000000 +0000 @@ -6,6 +6,7 @@ from cligj import use_rs_opt from .helpers import obj_gen, eval_feature_expression +from fiona.fio import with_context_env @click.command(short_help="Calculate GeoJSON property by Python expression") @@ -15,6 +16,7 @@ help="Overwrite properties, default: False") @use_rs_opt @click.pass_context +@with_context_env def calc(ctx, property_name, expression, overwrite, use_rs): """ Create a new property on GeoJSON features using the specified expression. @@ -37,9 +39,8 @@ \b $ fio cat data.shp | fio calc sumAB "f.properties.A + f.properties.B" """ - logger = logging.getLogger('fio') + logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') - try: source = obj_gen(stdin) for i, obj in enumerate(source): diff -Nru fiona-1.7.10/fiona/fio/cat.py fiona-1.8.6/fiona/fio/cat.py --- fiona-1.7.10/fiona/fio/cat.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/fio/cat.py 2019-03-19 04:25:07.000000000 +0000 @@ -10,7 +10,7 @@ import fiona from fiona.transform import transform_geom -from fiona.fio import options +from fiona.fio import options, with_context_env warnings.simplefilter('default') @@ -18,8 +18,7 @@ # Cat command @click.command(short_help="Concatenate and print the features of datasets") -@click.argument('files', nargs=-1, type=click.Path(resolve_path=False), - required=True, metavar="INPUTS...") +@click.argument('files', nargs=-1, required=True, metavar="INPUTS...") @click.option('--layer', default=None, multiple=True, callback=options.cb_multilayer, help="Input layer(s), specified as 'fileindex:layer` " @@ -35,9 +34,9 @@ @click.option('--bbox', default=None, metavar="w,s,e,n", help="filter for features intersecting a bounding box") @click.pass_context +@with_context_env def cat(ctx, files, precision, indent, compact, ignore_errors, dst_crs, use_rs, bbox, layer): - """ Concatenate and print the features of input datasets as a sequence of GeoJSON features. @@ -46,8 +45,7 @@ Use the '--layer' option to select a different layer. """ - verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 - logger = logging.getLogger('fio') + logger = logging.getLogger(__name__) dump_kwds = {'sort_keys': True} if indent: @@ -64,27 +62,27 @@ for i in range(1, len(files) + 1): if str(i) not in layer.keys(): layer[str(i)] = [0] + try: - with fiona.drivers(CPL_DEBUG=verbosity > 2): - for i, path in enumerate(files, 1): - for lyr in layer[str(i)]: - with fiona.open(path, layer=lyr) as src: - if bbox: - try: - bbox = tuple(map(float, bbox.split(','))) - except ValueError: - bbox = json.loads(bbox) - for i, feat in src.items(bbox=bbox): - if dst_crs or precision >= 0: - g = transform_geom( - src.crs, dst_crs, feat['geometry'], - antimeridian_cutting=True, - precision=precision) - feat['geometry'] = g - feat['bbox'] = fiona.bounds(g) - if use_rs: - click.echo(u'\u001e', nl=False) - click.echo(json.dumps(feat, **dump_kwds)) + if bbox: + try: + bbox = tuple(map(float, bbox.split(','))) + except ValueError: + bbox = json.loads(bbox) + for i, path in enumerate(files, 1): + for lyr in layer[str(i)]: + with fiona.open(path, layer=lyr) as src: + for i, feat in src.items(bbox=bbox): + if dst_crs or precision >= 0: + g = transform_geom( + src.crs, dst_crs, feat['geometry'], + antimeridian_cutting=True, + precision=precision) + feat['geometry'] = g + feat['bbox'] = fiona.bounds(g) + if use_rs: + click.echo(u'\u001e', nl=False) + click.echo(json.dumps(feat, **dump_kwds)) except Exception: logger.exception("Exception caught during processing") diff -Nru fiona-1.7.10/fiona/fio/collect.py fiona-1.8.6/fiona/fio/collect.py --- fiona-1.7.10/fiona/fio/collect.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/fio/collect.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,15 +1,14 @@ """$ fio collect""" -from functools import partial +from functools import partial, wraps import json import logging import click import cligj -from fiona.fio import helpers -from fiona.fio import options +from fiona.fio import helpers, options, with_context_env from fiona.transform import transform_geom @@ -31,12 +30,12 @@ @click.option('--parse/--no-parse', default=True, help="load and dump the geojson feature (default is True)") @click.pass_context +@with_context_env def collect(ctx, precision, indent, compact, record_buffered, ignore_errors, src_crs, with_ld_context, add_ld_context_item, parse): """Make a GeoJSON feature collection from a sequence of GeoJSON features and print it.""" - verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 - logger = logging.getLogger('fio') + logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') sink = click.get_text_stream('stdout') @@ -53,7 +52,8 @@ transformer = partial(transform_geom, src_crs, 'EPSG:4326', antimeridian_cutting=True, precision=precision) else: - transformer = lambda x: x + def transformer(x): + return x first_line = next(stdin) diff -Nru fiona-1.7.10/fiona/fio/distrib.py fiona-1.8.6/fiona/fio/distrib.py --- fiona-1.7.10/fiona/fio/distrib.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/fio/distrib.py 2019-03-19 04:25:07.000000000 +0000 @@ -7,21 +7,19 @@ import click import cligj -from fiona.fio import helpers +from fiona.fio import helpers, with_context_env @click.command() @cligj.use_rs_opt @click.pass_context +@with_context_env def distrib(ctx, use_rs): - """Distribute features from a collection. Print the features of GeoJSON objects read from stdin. """ - - verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 - logger = logging.getLogger('fio') + logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') try: source = helpers.obj_gen(stdin) diff -Nru fiona-1.7.10/fiona/fio/dump.py fiona-1.8.6/fiona/fio/dump.py --- fiona-1.7.10/fiona/fio/dump.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/fio/dump.py 2019-03-19 04:25:07.000000000 +0000 @@ -9,13 +9,12 @@ import cligj import fiona -from fiona.fio import helpers -from fiona.fio import options +from fiona.fio import helpers, options, with_context_env from fiona.transform import transform_geom @click.command(short_help="Dump a dataset to GeoJSON.") -@click.argument('input', type=click.Path(), required=True) +@click.argument('input', required=True) @click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer, help="Print information about a specific layer. The first " "layer is used by default. Layers use zero-based " @@ -35,14 +34,14 @@ help="map a term to a URI and add it to the output's JSON LD " "context.") @click.pass_context +@with_context_env def dump(ctx, input, encoding, precision, indent, compact, record_buffered, ignore_errors, with_ld_context, add_ld_context_item, layer): """Dump a dataset either as a GeoJSON feature collection (the default) or a sequence of GeoJSON features.""" - verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 - logger = logging.getLogger('fio') + logger = logging.getLogger(__name__) sink = click.get_text_stream('stdout') dump_kwds = {'sort_keys': True} @@ -65,56 +64,85 @@ return feat try: - with fiona.drivers(CPL_DEBUG=verbosity > 2): - with fiona.open(input, **open_kwds) as source: - meta = source.meta - meta['fields'] = dict(source.schema['properties'].items()) - - if record_buffered: - # Buffer GeoJSON data at the feature level for smaller - # memory footprint. - indented = bool(indent) - rec_indent = "\n" + " " * (2 * (indent or 0)) - - collection = { - 'type': 'FeatureCollection', - 'fiona:schema': meta['schema'], - 'fiona:crs': meta['crs'], - 'features': []} + with fiona.open(input, **open_kwds) as source: + meta = source.meta + meta['fields'] = dict(source.schema['properties'].items()) + + if record_buffered: + # Buffer GeoJSON data at the feature level for smaller + # memory footprint. + indented = bool(indent) + rec_indent = "\n" + " " * (2 * (indent or 0)) + + collection = { + 'type': 'FeatureCollection', + 'fiona:schema': meta['schema'], + 'fiona:crs': meta['crs'], + 'features': []} + if with_ld_context: + collection['@context'] = helpers.make_ld_context( + add_ld_context_item) + + head, tail = json.dumps( + collection, **dump_kwds).split('[]') + + sink.write(head) + sink.write("[") + + itr = iter(source) + + # Try the first record. + try: + i, first = 0, next(itr) + first = transformer(first) if with_ld_context: - collection['@context'] = helpers.make_ld_context( - add_ld_context_item) - - head, tail = json.dumps( - collection, **dump_kwds).split('[]') - - sink.write(head) - sink.write("[") - - itr = iter(source) + first = helpers.id_record(first) + if indented: + sink.write(rec_indent) + sink.write(json.dumps( + first, **dump_kwds).replace("\n", rec_indent)) + except StopIteration: + pass + except Exception as exc: + # Ignoring errors is *not* the default. + if ignore_errors: + logger.error( + "failed to serialize file record %d (%s), " + "continuing", + i, exc) + else: + # Log error and close up the GeoJSON, leaving it + # more or less valid no matter what happens above. + logger.critical( + "failed to serialize file record %d (%s), " + "quiting", + i, exc) + sink.write("]") + sink.write(tail) + if indented: + sink.write("\n") + raise - # Try the first record. + # Because trailing commas aren't valid in JSON arrays + # we'll write the item separator before each of the + # remaining features. + for i, rec in enumerate(itr, 1): + rec = transformer(rec) try: - i, first = 0, next(itr) - first = transformer(first) if with_ld_context: - first = helpers.id_record(first) + rec = helpers.id_record(rec) if indented: sink.write(rec_indent) + sink.write(item_sep) sink.write(json.dumps( - first, **dump_kwds).replace("\n", rec_indent)) - except StopIteration: - pass + rec, **dump_kwds).replace("\n", rec_indent)) except Exception as exc: - # Ignoring errors is *not* the default. if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: - # Log error and close up the GeoJSON, leaving it - # more or less valid no matter what happens above. logger.critical( "failed to serialize file record %d (%s), " "quiting", @@ -125,58 +153,28 @@ sink.write("\n") raise - # Because trailing commas aren't valid in JSON arrays - # we'll write the item separator before each of the - # remaining features. - for i, rec in enumerate(itr, 1): - rec = transformer(rec) - try: - if with_ld_context: - rec = helpers.id_record(rec) - if indented: - sink.write(rec_indent) - sink.write(item_sep) - sink.write(json.dumps( - rec, **dump_kwds).replace("\n", rec_indent)) - except Exception as exc: - if ignore_errors: - logger.error( - "failed to serialize file record %d (%s), " - "continuing", - i, exc) - else: - logger.critical( - "failed to serialize file record %d (%s), " - "quiting", - i, exc) - sink.write("]") - sink.write(tail) - if indented: - sink.write("\n") - raise - - # Close up the GeoJSON after writing all features. - sink.write("]") - sink.write(tail) - if indented: - sink.write("\n") - + # Close up the GeoJSON after writing all features. + sink.write("]") + sink.write(tail) + if indented: + sink.write("\n") + + else: + # Buffer GeoJSON data at the collection level. The default. + collection = { + 'type': 'FeatureCollection', + 'fiona:schema': meta['schema'], + 'fiona:crs': meta['crs']} + if with_ld_context: + collection['@context'] = helpers.make_ld_context( + add_ld_context_item) + collection['features'] = [ + helpers.id_record(transformer(rec)) + for rec in source] else: - # Buffer GeoJSON data at the collection level. The default. - collection = { - 'type': 'FeatureCollection', - 'fiona:schema': meta['schema'], - 'fiona:crs': meta['crs']} - if with_ld_context: - collection['@context'] = helpers.make_ld_context( - add_ld_context_item) - collection['features'] = [ - helpers.id_record(transformer(rec)) - for rec in source] - else: - collection['features'] = [ - transformer(source.crs, rec) for rec in source] - json.dump(collection, sink, **dump_kwds) + collection['features'] = [ + transformer(source.crs, rec) for rec in source] + json.dump(collection, sink, **dump_kwds) except Exception: logger.exception("Exception caught during processing") diff -Nru fiona-1.7.10/fiona/fio/env.py fiona-1.8.6/fiona/fio/env.py --- fiona-1.7.10/fiona/fio/env.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/fio/env.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,29 +1,38 @@ """$ fio env""" - -import logging +import json +import os import click import fiona +from fiona._env import GDALDataFinder, PROJDataFinder @click.command(short_help="Print information about the fio environment.") @click.option('--formats', 'key', flag_value='formats', default=True, help="Enumerate the available formats.") +@click.option('--credentials', 'key', flag_value='credentials', default=False, + help="Print credentials.") +@click.option('--gdal-data', 'key', flag_value='gdal_data', default=False, + help="Print GDAL data path.") +@click.option('--proj-data', 'key', flag_value='proj_data', default=False, + help="Print PROJ data path.") @click.pass_context def env(ctx, key): - """Print information about the Fiona environment: available formats, etc. """ - - verbosity = (ctx.obj and ctx.obj.get('verbosity')) or 1 - logger = logging.getLogger('fio') stdout = click.get_text_stream('stdout') - with fiona.drivers(CPL_DEBUG=(verbosity > 2)) as env: + with ctx.obj['env'] as env: if key == 'formats': for k, v in sorted(fiona.supported_drivers.items()): modes = ', '.join("'" + m + "'" for m in v) stdout.write("%s (modes %s)\n" % (k, modes)) stdout.write('\n') + elif key == 'credentials': + click.echo(json.dumps(env.session.credentials)) + elif key == 'gdal_data': + click.echo(os.environ.get('GDAL_DATA') or GDALDataFinder().search()) + elif key == 'proj_data': + click.echo(os.environ.get('PROJ_LIB') or PROJDataFinder().search()) diff -Nru fiona-1.7.10/fiona/fio/filter.py fiona-1.8.6/fiona/fio/filter.py --- fiona-1.7.10/fiona/fio/filter.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/fio/filter.py 2019-03-19 04:25:07.000000000 +0000 @@ -8,12 +8,14 @@ from cligj import use_rs_opt from fiona.fio.helpers import obj_gen, eval_feature_expression +from fiona.fio import with_context_env @click.command() @click.argument('filter_expression') @use_rs_opt @click.pass_context +@with_context_env def filter(ctx, filter_expression, use_rs): """ Filter GeoJSON features by python expression. @@ -28,14 +30,15 @@ allows item access via javascript-style dot notation using munch) The expression will be evaluated for each feature and, if true, - the feature will be included in the output. + the feature will be included in the output. For example: - e.g. fio cat data.shp \ - | fio filter "f.properties.area > 1000.0" \ - | fio collect > large_polygons.geojson + \b + $ fio cat data.shp \\ + | fio filter "f.properties.area > 1000.0" \\ + | fio collect > large_polygons.geojson """ - logger = logging.getLogger('fio') + logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') try: diff -Nru fiona-1.7.10/fiona/fio/info.py fiona-1.8.6/fiona/fio/info.py --- fiona-1.7.10/fiona/fio/info.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/fio/info.py 2019-03-19 04:25:07.000000000 +0000 @@ -9,12 +9,12 @@ import fiona import fiona.crs -from fiona.fio import options +from fiona.fio import options, with_context_env @click.command() # One or more files. -@click.argument('input', type=click.Path(exists=False)) +@click.argument('input', required=True) @click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer, help="Print information about a specific layer. The first " "layer is used by default. Layers use zero-based " @@ -34,8 +34,8 @@ @click.option('--name', 'meta_member', flag_value='name', help="Print the datasource's name.") @click.pass_context +@with_context_env def info(ctx, input, indent, meta_member, layer): - """ Print information about a dataset. @@ -43,30 +43,28 @@ Use the '--layer' option to select a different layer. """ - verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 - logger = logging.getLogger('fio') + logger = logging.getLogger(__name__) try: - with fiona.drivers(CPL_DEBUG=verbosity > 2): - with fiona.open(input, layer=layer) as src: - info = src.meta - info.update(bounds=src.bounds, name=src.name) - try: - info.update(count=len(src)) - except TypeError: - info.update(count=None) - logger.debug("Setting 'count' to None/null - layer does " - "not support counting") - proj4 = fiona.crs.to_string(src.crs) - if proj4.startswith('+init=epsg'): - proj4 = proj4.split('=')[1].upper() - info['crs'] = proj4 - if meta_member: - if isinstance(info[meta_member], (list, tuple)): - click.echo(" ".join(map(str, info[meta_member]))) - else: - click.echo(info[meta_member]) + with fiona.open(input, layer=layer) as src: + info = src.meta + info.update(bounds=src.bounds, name=src.name) + try: + info.update(count=len(src)) + except TypeError: + info.update(count=None) + logger.debug("Setting 'count' to None/null - layer does " + "not support counting") + proj4 = fiona.crs.to_string(src.crs) + if proj4.startswith('+init=epsg'): + proj4 = proj4.split('=')[1].upper() + info['crs'] = proj4 + if meta_member: + if isinstance(info[meta_member], (list, tuple)): + click.echo(" ".join(map(str, info[meta_member]))) else: - click.echo(json.dumps(info, indent=indent)) + click.echo(info[meta_member]) + else: + click.echo(json.dumps(info, indent=indent)) except Exception: logger.exception("Exception caught during processing") diff -Nru fiona-1.7.10/fiona/fio/__init__.py fiona-1.8.6/fiona/fio/__init__.py --- fiona-1.7.10/fiona/fio/__init__.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/fio/__init__.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,19 @@ +"""Fiona's command line interface""" + +from functools import wraps + + +def with_context_env(f): + """Pops the Fiona Env from the passed context and executes the + wrapped func in the context of that obj. + + Click's pass_context decorator must precede this decorator, or else + there will be no context in the wrapper args. + """ + @wraps(f) + def wrapper(*args, **kwds): + ctx = args[0] + env = ctx.obj.pop('env') + with env: + return f(*args, **kwds) + return wrapper diff -Nru fiona-1.7.10/fiona/fio/insp.py fiona-1.8.6/fiona/fio/insp.py --- fiona-1.7.10/fiona/fio/insp.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/fio/insp.py 2019-03-19 04:25:07.000000000 +0000 @@ -8,39 +8,38 @@ import click import fiona +from fiona.fio import with_context_env @click.command(short_help="Open a dataset and start an interpreter.") -@click.argument('src_path', type=click.Path(exists=True)) +@click.argument('src_path', required=True) @click.option('--ipython', 'interpreter', flag_value='ipython', help="Use IPython as interpreter.") @click.pass_context +@with_context_env def insp(ctx, src_path, interpreter): - - verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 - logger = logging.getLogger('fio') - + """Open a collection within an interactive interpreter. + """ + logger = logging.getLogger(__name__) banner = 'Fiona %s Interactive Inspector (Python %s)\n' \ 'Type "src.schema", "next(src)", or "help(src)" ' \ 'for more information.' \ % (fiona.__version__, '.'.join(map(str, sys.version_info[:3]))) try: - with fiona.drivers(CPL_DEBUG=verbosity > 2): - with fiona.open(src_path) as src: - - scope = locals() + with fiona.open(src_path) as src: + scope = locals() + if not interpreter: + code.interact(banner, local=scope) + elif interpreter == 'ipython': + import IPython + IPython.InteractiveShell.banner1 = banner + IPython.start_ipython(argv=[], user_ns=scope) + else: + raise click.ClickException( + 'Interpreter {} is unsupported or missing ' + 'dependencies'.format(interpreter)) - if not interpreter: - code.interact(banner, local=scope) - elif interpreter == 'ipython': - import IPython - IPython.InteractiveShell.banner1 = banner - IPython.start_ipython(argv=[], user_ns=scope) - else: - raise click.ClickException( - 'Interpreter {} is unsupported or missing ' - 'dependencies'.format(interpreter)) except Exception: logger.exception("Exception caught during processing") raise click.Abort() diff -Nru fiona-1.7.10/fiona/fio/load.py fiona-1.8.6/fiona/fio/load.py --- fiona-1.7.10/fiona/fio/load.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/fio/load.py 2019-03-19 04:25:07.000000000 +0000 @@ -2,43 +2,37 @@ from functools import partial -import itertools -import json import logging import click +import cligj import fiona -from fiona.fio import options +from fiona.fio import options, with_context_env +from fiona.schema import FIELD_TYPES_MAP_REV from fiona.transform import transform_geom -FIELD_TYPES_MAP_REV = dict([(v, k) for k, v in fiona.FIELD_TYPES_MAP.items()]) - - @click.command(short_help="Load GeoJSON to a dataset in another format.") -@click.argument('output', type=click.Path(), required=True) -@click.option('-f', '--format', '--driver', required=True, +@click.argument('output', required=True) +@click.option('-f', '--format', '--driver', 'driver', required=True, help="Output format driver name.") @options.src_crs_opt @click.option('--dst-crs', '--dst_crs', help="Destination CRS. Defaults to --src-crs when not given.") -@click.option('--sequence / --no-sequence', default=False, - help="Specify whether the input stream is a LF-delimited " - "sequence of GeoJSON features (the default) or a single " - "GeoJSON feature collection.") +@cligj.features_in_arg @click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer, help="Load features into specified layer. Layers use " "zero-based numbering when accessed by index.") @click.pass_context -def load(ctx, output, driver, src_crs, dst_crs, sequence, layer): +@with_context_env +def load(ctx, output, driver, src_crs, dst_crs, features, layer): """Load features from JSON to a file in another format. The input is a GeoJSON feature collection or optionally a sequence of - GeoJSON feature objects.""" - verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 - logger = logging.getLogger('fio') - stdin = click.get_text_stream('stdin') + GeoJSON feature objects. + """ + logger = logging.getLogger(__name__) dst_crs = dst_crs or src_crs @@ -46,40 +40,13 @@ transformer = partial(transform_geom, src_crs, dst_crs, antimeridian_cutting=True, precision=-1) else: - transformer = lambda x: x + def transformer(x): + return x - first_line = next(stdin) - - # If input is RS-delimited JSON sequence. - if first_line.startswith(u'\x1e'): - def feature_gen(): - buffer = first_line.strip(u'\x1e') - for line in stdin: - if line.startswith(u'\x1e'): - if buffer: - feat = json.loads(buffer) - feat['geometry'] = transformer(feat['geometry']) - yield feat - buffer = line.strip(u'\x1e') - else: - buffer += line - else: - feat = json.loads(buffer) - feat['geometry'] = transformer(feat['geometry']) - yield feat - elif sequence: - def feature_gen(): - yield json.loads(first_line) - for line in stdin: - feat = json.loads(line) - feat['geometry'] = transformer(feat['geometry']) - yield feat - else: - def feature_gen(): - text = "".join(itertools.chain([first_line], stdin)) - for feat in json.loads(text)['features']: - feat['geometry'] = transformer(feat['geometry']) - yield feat + def feature_gen(): + for feat in features: + feat['geometry'] = transformer(feat['geometry']) + yield feat try: source = feature_gen() @@ -92,15 +59,14 @@ (k, FIELD_TYPES_MAP_REV.get(type(v)) or 'str') for k, v in first['properties'].items()]) - with fiona.drivers(CPL_DEBUG=verbosity > 2): - with fiona.open( - output, 'w', - driver=driver, - crs=dst_crs, - schema=schema, - layer=layer) as dst: - dst.write(first) - dst.writerecords(source) + with fiona.open( + output, 'w', + driver=driver, + crs=dst_crs, + schema=schema, + layer=layer) as dst: + dst.write(first) + dst.writerecords(source) except Exception: logger.exception("Exception caught during processing") diff -Nru fiona-1.7.10/fiona/fio/ls.py fiona-1.8.6/fiona/fio/ls.py --- fiona-1.7.10/fiona/fio/ls.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/fio/ls.py 2019-03-19 04:25:07.000000000 +0000 @@ -7,20 +7,17 @@ from cligj import indent_opt import fiona +from fiona.fio import with_context_env @click.command() -@click.argument('input', type=click.Path(exists=True)) +@click.argument('input', required=True) @indent_opt @click.pass_context +@with_context_env def ls(ctx, input, indent): - """ List layers in a datasource. """ - - verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 - - with fiona.drivers(CPL_DEBUG=verbosity > 2): - result = fiona.listlayers(input) - click.echo(json.dumps(result, indent=indent)) + result = fiona.listlayers(input) + click.echo(json.dumps(result, indent=indent)) diff -Nru fiona-1.7.10/fiona/fio/main.py fiona-1.8.6/fiona/fio/main.py --- fiona-1.7.10/fiona/fio/main.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/fio/main.py 2019-03-19 04:25:07.000000000 +0000 @@ -5,7 +5,6 @@ import logging from pkg_resources import iter_entry_points -import warnings import sys import click @@ -14,10 +13,11 @@ import fiona from fiona import __version__ as fio_version +from fiona.session import AWSSession, DummySession def configure_logging(verbosity): - log_level = max(10, 30 - 10*verbosity) + log_level = max(10, 30 - 10 * verbosity) logging.basicConfig(stream=sys.stderr, level=log_level) @@ -26,15 +26,39 @@ @click.group() @verbose_opt @quiet_opt +@click.option( + "--aws-profile", + help="Select a profile from the AWS credentials file") +@click.option( + "--aws-no-sign-requests", + is_flag=True, + help="Make requests anonymously") +@click.option( + "--aws-requester-pays", + is_flag=True, + help="Requester pays data transfer costs") @click.version_option(fio_version) @click.version_option(fiona.__gdal_version__, '--gdal-version', - prog_name='GDAL') + prog_name='GDAL') @click.version_option(sys.version, '--python-version', prog_name='Python') @click.pass_context -def main_group(ctx, verbose, quiet): - - """Fiona command line interface.""" - +def main_group( + ctx, verbose, quiet, aws_profile, aws_no_sign_requests, + aws_requester_pays): + """Fiona command line interface. + """ verbosity = verbose - quiet configure_logging(verbosity) - ctx.obj = {'verbosity': verbosity} + ctx.obj = {} + ctx.obj["verbosity"] = verbosity + ctx.obj["aws_profile"] = aws_profile + envopts = {"CPL_DEBUG": (verbosity > 2)} + if aws_profile or aws_no_sign_requests: + session = AWSSession( + profile_name=aws_profile, + aws_unsigned=aws_no_sign_requests, + requester_pays=aws_requester_pays, + ) + else: + session = DummySession() + ctx.obj["env"] = fiona.Env(session=session, **envopts) diff -Nru fiona-1.7.10/fiona/fio/rm.py fiona-1.8.6/fiona/fio/rm.py --- fiona-1.7.10/fiona/fio/rm.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/fio/rm.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,29 @@ +import click +import logging + +import fiona +from fiona.fio import with_context_env + + +logger = logging.getLogger(__name__) + +@click.command(help="Remove a datasource or an individual layer.") +@click.argument("input", required=True) +@click.option("--layer", type=str, default=None, required=False, help="Name of layer to remove.") +@click.option("--yes", is_flag=True) +@click.pass_context +@with_context_env +def rm(ctx, input, layer, yes): + if layer is None: + kind = "datasource" + else: + kind = "layer" + + if not yes: + click.confirm("The {} will be removed. Are you sure?".format(kind), abort=True) + + try: + fiona.remove(input, layer=layer) + except Exception: + logger.exception("Failed to remove {}.".format(kind)) + raise click.Abort() diff -Nru fiona-1.7.10/fiona/gdal.pxi fiona-1.8.6/fiona/gdal.pxi --- fiona-1.7.10/fiona/gdal.pxi 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/gdal.pxi 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,526 @@ +# GDAL API definitions. + +from libc.stdio cimport FILE + + +cdef extern from "cpl_conv.h" nogil: + + void *CPLMalloc(size_t) + void CPLFree(void* ptr) + void CPLSetThreadLocalConfigOption(const char* key, const char* val) + void CPLSetConfigOption(const char* key, const char* val) + const char* CPLGetConfigOption(const char* key, const char* default) + const char *CPLFindFile(const char *pszClass, const char *pszBasename) + + +cdef extern from "cpl_error.h" nogil: + + ctypedef enum CPLErr: + CE_None + CE_Debug + CE_Warning + CE_Failure + CE_Fatal + + # CPLErrorNum eludes me at the moment, I'm calling it 'int' + # for now. + ctypedef void (*CPLErrorHandler)(CPLErr, int, const char*) + + void CPLErrorReset() + int CPLGetLastErrorNo() + const char* CPLGetLastErrorMsg() + CPLErr CPLGetLastErrorType() + void CPLPushErrorHandler(CPLErrorHandler handler) + void CPLPopErrorHandler() + + +cdef extern from "cpl_string.h" nogil: + + int CSLCount(char **papszStrList) + char **CSLAddString(char **strlist, const char *string) + char **CSLAddNameValue(char **papszStrList, const char *pszName, + const char *pszValue) + char **CSLDuplicate(char **papszStrList) + int CSLFindName(char **papszStrList, const char *pszName) + int CSLFetchBoolean(char **papszStrList, const char *pszName, int default) + const char *CSLFetchNameValue(char **papszStrList, const char *pszName) + char **CSLSetNameValue(char **list, char *name, char *val) + void CSLDestroy(char **list) + char **CSLMerge(char **first, char **second) + + +cdef extern from "cpl_vsi.h" nogil: + + ctypedef int vsi_l_offset + ctypedef FILE VSILFILE + + unsigned char *VSIGetMemFileBuffer(const char *path, + vsi_l_offset *data_len, + int take_ownership) + VSILFILE *VSIFileFromMemBuffer(const char *path, void *data, + vsi_l_offset data_len, int take_ownership) + VSILFILE* VSIFOpenL(const char *path, const char *mode) + int VSIFCloseL(VSILFILE *fp) + int VSIUnlink(const char *path) + + int VSIFFlushL(VSILFILE *fp) + size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) + vsi_l_offset VSIFTellL(VSILFILE *fp) + int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) + size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + + +cdef extern from "ogr_srs_api.h" nogil: + + ctypedef void * OGRCoordinateTransformationH + ctypedef void * OGRSpatialReferenceH + + OGRCoordinateTransformationH OCTNewCoordinateTransformation( + OGRSpatialReferenceH source, + OGRSpatialReferenceH dest) + void OCTDestroyCoordinateTransformation( + OGRCoordinateTransformationH source) + int OCTTransform(OGRCoordinateTransformationH ct, int nCount, double *x, + double *y, double *z) + int OSRAutoIdentifyEPSG(OGRSpatialReferenceH srs) + void OSRCleanup() + OGRSpatialReferenceH OSRClone(OGRSpatialReferenceH srs) + int OSRExportToProj4(OGRSpatialReferenceH srs, char **params) + int OSRExportToWkt(OGRSpatialReferenceH srs, char **params) + int OSRFixup(OGRSpatialReferenceH srs) + const char *OSRGetAuthorityName(OGRSpatialReferenceH srs, const char *key) + const char *OSRGetAuthorityCode(OGRSpatialReferenceH srs, const char *key) + int OSRImportFromEPSG(OGRSpatialReferenceH srs, int code) + int OSRImportFromProj4(OGRSpatialReferenceH srs, const char *proj) + int OSRIsGeographic(OGRSpatialReferenceH srs) + int OSRIsProjected(OGRSpatialReferenceH srs) + int OSRIsSame(OGRSpatialReferenceH srs1, OGRSpatialReferenceH srs2) + OGRSpatialReferenceH OSRNewSpatialReference(const char *wkt) + void OSRRelease(OGRSpatialReferenceH srs) + int OSRSetFromUserInput(OGRSpatialReferenceH srs, const char *input) + + +cdef extern from "gdal.h" nogil: + + ctypedef void * GDALMajorObjectH + ctypedef void * GDALDatasetH + ctypedef void * GDALRasterBandH + ctypedef void * GDALDriverH + ctypedef void * GDALColorTableH + ctypedef void * GDALRasterAttributeTableH + ctypedef void * GDALAsyncReaderH + + ctypedef long long GSpacing + ctypedef unsigned long long GIntBig + + ctypedef enum GDALDataType: + GDT_Unknown + GDT_Byte + GDT_UInt16 + GDT_Int16 + GDT_UInt32 + GDT_Int32 + GDT_Float32 + GDT_Float64 + GDT_CInt16 + GDT_CInt32 + GDT_CFloat32 + GDT_CFloat64 + GDT_TypeCount + + ctypedef enum GDALAccess: + GA_ReadOnly + GA_Update + + ctypedef enum GDALRWFlag: + GF_Read + GF_Write + + ctypedef enum GDALRIOResampleAlg: + GRIORA_NearestNeighbour + GRIORA_Bilinear + GRIORA_Cubic, + GRIORA_CubicSpline + GRIORA_Lanczos + GRIORA_Average + GRIORA_Mode + GRIORA_Gauss + + ctypedef enum GDALColorInterp: + GCI_Undefined + GCI_GrayIndex + GCI_PaletteIndex + GCI_RedBand + GCI_GreenBand + GCI_BlueBand + GCI_AlphaBand + GCI_HueBand + GCI_SaturationBand + GCI_LightnessBand + GCI_CyanBand + GCI_YCbCr_YBand + GCI_YCbCr_CbBand + GCI_YCbCr_CrBand + GCI_Max + + ctypedef struct GDALColorEntry: + short c1 + short c2 + short c3 + short c4 + + ctypedef struct GDAL_GCP: + char *pszId + char *pszInfo + double dfGCPPixel + double dfGCPLine + double dfGCPX + double dfGCPY + double dfGCPZ + + void GDALAllRegister() + void GDALDestroyDriverManager() + int GDALGetDriverCount() + GDALDriverH GDALGetDriver(int i) + const char *GDALGetDriverShortName(GDALDriverH driver) + const char *GDALGetDriverLongName(GDALDriverH driver) + const char* GDALGetDescription(GDALMajorObjectH obj) + void GDALSetDescription(GDALMajorObjectH obj, const char *text) + GDALDriverH GDALGetDriverByName(const char *name) + GDALDatasetH GDALOpen(const char *filename, GDALAccess access) # except -1 + GDALDatasetH GDALOpenShared(const char *filename, GDALAccess access) # except -1 + void GDALFlushCache(GDALDatasetH hds) + void GDALClose(GDALDatasetH hds) + GDALDriverH GDALGetDatasetDriver(GDALDatasetH hds) + int GDALGetGeoTransform(GDALDatasetH hds, double *transform) + const char *GDALGetProjectionRef(GDALDatasetH hds) + int GDALGetRasterXSize(GDALDatasetH hds) + int GDALGetRasterYSize(GDALDatasetH hds) + int GDALGetRasterCount(GDALDatasetH hds) + GDALRasterBandH GDALGetRasterBand(GDALDatasetH hds, int num) + GDALRasterBandH GDALGetOverview(GDALRasterBandH hband, int num) + int GDALGetRasterBandXSize(GDALRasterBandH hband) + int GDALGetRasterBandYSize(GDALRasterBandH hband) + const char *GDALGetRasterUnitType(GDALRasterBandH hband) + CPLErr GDALSetRasterUnitType(GDALRasterBandH hband, const char *val) + int GDALSetGeoTransform(GDALDatasetH hds, double *transform) + int GDALSetProjection(GDALDatasetH hds, const char *wkt) + void GDALGetBlockSize(GDALRasterBandH , int *xsize, int *ysize) + int GDALGetRasterDataType(GDALRasterBandH band) + double GDALGetRasterNoDataValue(GDALRasterBandH band, int *success) + int GDALSetRasterNoDataValue(GDALRasterBandH band, double value) + int GDALDatasetRasterIO(GDALRasterBandH band, int, int xoff, int yoff, + int xsize, int ysize, void *buffer, int width, + int height, int, int count, int *bmap, int poff, + int loff, int boff) + int GDALRasterIO(GDALRasterBandH band, int, int xoff, int yoff, int xsize, + int ysize, void *buffer, int width, int height, int, + int poff, int loff) + int GDALFillRaster(GDALRasterBandH band, double rvalue, double ivalue) + GDALDatasetH GDALCreate(GDALDriverH driver, const char *path, int width, + int height, int nbands, GDALDataType dtype, + const char **options) + GDALDatasetH GDALCreateCopy(GDALDriverH driver, const char *path, + GDALDatasetH hds, int strict, char **options, + void *progress_func, void *progress_data) + char** GDALGetMetadata(GDALMajorObjectH obj, const char *pszDomain) + int GDALSetMetadata(GDALMajorObjectH obj, char **papszMD, + const char *pszDomain) + const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, + const char *pszDomain) + int GDALSetMetadataItem(GDALMajorObjectH obj, const char *pszName, + const char *pszValue, const char *pszDomain) + const GDALColorEntry *GDALGetColorEntry(GDALColorTableH table, int) + void GDALSetColorEntry(GDALColorTableH table, int i, + const GDALColorEntry *poEntry) + int GDALSetRasterColorTable(GDALRasterBandH band, GDALColorTableH table) + GDALColorTableH GDALGetRasterColorTable(GDALRasterBandH band) + GDALColorTableH GDALCreateColorTable(int) + void GDALDestroyColorTable(GDALColorTableH table) + int GDALGetColorEntryCount(GDALColorTableH table) + int GDALGetRasterColorInterpretation(GDALRasterBandH band) + int GDALSetRasterColorInterpretation(GDALRasterBandH band, GDALColorInterp) + int GDALGetMaskFlags(GDALRasterBandH band) + int GDALCreateDatasetMaskBand(GDALDatasetH hds, int flags) + void *GDALGetMaskBand(GDALRasterBandH band) + int GDALCreateMaskBand(GDALDatasetH hds, int flags) + int GDALGetOverviewCount(GDALRasterBandH band) + int GDALBuildOverviews(GDALDatasetH hds, const char *resampling, + int nOverviews, int *overviews, int nBands, + int *bands, void *progress_func, + void *progress_data) + int GDALCheckVersion(int nVersionMajor, int nVersionMinor, + const char *pszCallingComponentName) + const char* GDALVersionInfo(const char *pszRequest) + CPLErr GDALSetGCPs(GDALDatasetH hDS, int nGCPCount, const GDAL_GCP *pasGCPList, + const char *pszGCPProjection) + const GDAL_GCP *GDALGetGCPs(GDALDatasetH hDS) + int GDALGetGCPCount(GDALDatasetH hDS) + const char *GDALGetGCPProjection(GDALDatasetH hDS) + int GDALGetCacheMax() + void GDALSetCacheMax(int nBytes) + GIntBig GDALGetCacheMax64() + void GDALSetCacheMax64(GIntBig nBytes) + CPLErr GDALDeleteDataset(GDALDriverH, const char *) + char** GDALGetFileList(GDALDatasetH hDS) + CPLErr GDALCopyDatasetFiles (GDALDriverH hDriver, const char * pszNewName, const char * pszOldName) + + +cdef extern from "ogr_api.h" nogil: + + ctypedef void * OGRLayerH + ctypedef void * OGRDataSourceH + ctypedef void * OGRSFDriverH + ctypedef void * OGRFieldDefnH + ctypedef void * OGRFeatureDefnH + ctypedef void * OGRFeatureH + ctypedef void * OGRGeometryH + + ctypedef int OGRErr + + ctypedef struct OGREnvelope: + double MinX + double MaxX + double MinY + double MaxY + + void OGRRegisterAll() + void OGRCleanupAll() + int OGRGetDriverCount() + + char *OGR_Dr_GetName(OGRSFDriverH driver) + OGRDataSourceH OGR_Dr_CreateDataSource(OGRSFDriverH driver, + const char *path, char **options) + int OGR_Dr_DeleteDataSource(OGRSFDriverH driver, const char *path) + int OGR_DS_DeleteLayer(OGRDataSourceH datasource, int n) + OGRLayerH OGR_DS_CreateLayer(OGRDataSourceH datasource, const char *name, + OGRSpatialReferenceH crs, int geomType, + char **options) + OGRLayerH OGR_DS_ExecuteSQL(OGRDataSourceH, const char *name, + OGRGeometryH filter, const char *dialext) + void OGR_DS_Destroy(OGRDataSourceH datasource) + OGRSFDriverH OGR_DS_GetDriver(OGRLayerH layer_defn) + OGRLayerH OGR_DS_GetLayerByName(OGRDataSourceH datasource, + const char *name) + int OGR_DS_GetLayerCount(OGRDataSourceH datasource) + OGRLayerH OGR_DS_GetLayer(OGRDataSourceH datasource, int n) + void OGR_DS_ReleaseResultSet(OGRDataSourceH datasource, OGRLayerH results) + int OGR_DS_SyncToDisk(OGRDataSourceH datasource) + OGRFeatureH OGR_F_Create(OGRFeatureDefnH featuredefn) + void OGR_F_Destroy(OGRFeatureH feature) + long OGR_F_GetFID(OGRFeatureH feature) + int OGR_F_IsFieldSet(OGRFeatureH feature, int n) + int OGR_F_GetFieldAsDateTime(OGRFeatureH feature, int n, int *y, int *m, + int *d, int *h, int *m, int *s, int *z) + double OGR_F_GetFieldAsDouble(OGRFeatureH feature, int n) + int OGR_F_GetFieldAsInteger(OGRFeatureH feature, int n) + const char *OGR_F_GetFieldAsString(OGRFeatureH feature, int n) + int OGR_F_GetFieldCount(OGRFeatureH feature) + OGRFieldDefnH OGR_F_GetFieldDefnRef(OGRFeatureH feature, int n) + int OGR_F_GetFieldIndex(OGRFeatureH feature, const char *name) + OGRGeometryH OGR_F_GetGeometryRef(OGRFeatureH feature) + void OGR_F_SetFieldDateTime(OGRFeatureH feature, int n, int y, int m, + int d, int hh, int mm, int ss, int tz) + void OGR_F_SetFieldDouble(OGRFeatureH feature, int n, double value) + void OGR_F_SetFieldInteger(OGRFeatureH feature, int n, int value) + void OGR_F_SetFieldString(OGRFeatureH feature, int n, const char *value) + int OGR_F_SetGeometryDirectly(OGRFeatureH feature, OGRGeometryH geometry) + OGRFeatureDefnH OGR_FD_Create(const char *name) + int OGR_FD_GetFieldCount(OGRFeatureDefnH featuredefn) + OGRFieldDefnH OGR_FD_GetFieldDefn(OGRFeatureDefnH featuredefn, int n) + int OGR_FD_GetGeomType(OGRFeatureDefnH featuredefn) + const char *OGR_FD_GetName(OGRFeatureDefnH featuredefn) + OGRFieldDefnH OGR_Fld_Create(const char *name, int fieldtype) + void OGR_Fld_Destroy(OGRFieldDefnH) + char *OGR_Fld_GetNameRef(OGRFieldDefnH) + int OGR_Fld_GetPrecision(OGRFieldDefnH) + int OGR_Fld_GetType(OGRFieldDefnH) + int OGR_Fld_GetWidth(OGRFieldDefnH) + void OGR_Fld_Set(OGRFieldDefnH, const char *name, int fieldtype, int width, + int precision, int justification) + void OGR_Fld_SetPrecision(OGRFieldDefnH, int n) + void OGR_Fld_SetWidth(OGRFieldDefnH, int n) + OGRErr OGR_G_AddGeometryDirectly(OGRGeometryH geometry, OGRGeometryH part) + void OGR_G_AddPoint(OGRGeometryH geometry, double x, double y, double z) + void OGR_G_AddPoint_2D(OGRGeometryH geometry, double x, double y) + void OGR_G_CloseRings(OGRGeometryH geometry) + OGRGeometryH OGR_G_CreateGeometry(int wkbtypecode) + OGRGeometryH OGR_G_CreateGeometryFromJson(const char *json) + void OGR_G_DestroyGeometry(OGRGeometryH geometry) + char *OGR_G_ExportToJson(OGRGeometryH geometry) + void OGR_G_ExportToWkb(OGRGeometryH geometry, int endianness, char *buffer) + int OGR_G_GetCoordinateDimension(OGRGeometryH geometry) + int OGR_G_GetGeometryCount(OGRGeometryH geometry) + const char *OGR_G_GetGeometryName(OGRGeometryH geometry) + int OGR_G_GetGeometryType(OGRGeometryH geometry) + OGRGeometryH OGR_G_GetGeometryRef(OGRGeometryH geometry, int n) + int OGR_G_GetPointCount(OGRGeometryH geometry) + double OGR_G_GetX(OGRGeometryH geometry, int n) + double OGR_G_GetY(OGRGeometryH geometry, int n) + double OGR_G_GetZ(OGRGeometryH geometry, int n) + void OGR_G_ImportFromWkb(OGRGeometryH geometry, unsigned char *bytes, + int nbytes) + int OGR_G_WkbSize(OGRGeometryH geometry) + OGRErr OGR_L_CreateFeature(OGRLayerH layer, OGRFeatureH feature) + int OGR_L_CreateField(OGRLayerH layer, OGRFieldDefnH, int flexible) + OGRErr OGR_L_GetExtent(OGRLayerH layer, void *extent, int force) + OGRFeatureH OGR_L_GetFeature(OGRLayerH layer, int n) + int OGR_L_GetFeatureCount(OGRLayerH layer, int m) + OGRFeatureDefnH OGR_L_GetLayerDefn(OGRLayerH layer) + const char *OGR_L_GetName(OGRLayerH layer) + OGRFeatureH OGR_L_GetNextFeature(OGRLayerH layer) + OGRGeometryH OGR_L_GetSpatialFilter(OGRLayerH layer) + OGRSpatialReferenceH OGR_L_GetSpatialRef(OGRLayerH layer) + void OGR_L_ResetReading(OGRLayerH layer) + void OGR_L_SetSpatialFilter(OGRLayerH layer, OGRGeometryH geometry) + void OGR_L_SetSpatialFilterRect(OGRLayerH layer, double minx, double miny, + double maxx, double maxy) + int OGR_L_TestCapability(OGRLayerH layer, const char *name) + OGRSFDriverH OGRGetDriverByName(const char *) + OGRSFDriverH OGRGetDriver(int i) + OGRDataSourceH OGROpen(const char *path, int mode, void *x) + OGRDataSourceH OGROpenShared(const char *path, int mode, void *x) + int OGRReleaseDataSource(OGRDataSourceH datasource) + + +cdef extern from "gdalwarper.h" nogil: + + ctypedef enum GDALResampleAlg: + GRA_NearestNeighbour + GRA_Bilinear + GRA_Cubic + GRA_CubicSpline + GRA_Lanczos + GRA_Average + GRA_Mode + + ctypedef int (*GDALMaskFunc)( + void *pMaskFuncArg, int nBandCount, int eType, int nXOff, int nYOff, + int nXSize, int nYSize, unsigned char **papabyImageData, + int bMaskIsFloat, void *pMask) + + ctypedef int (*GDALTransformerFunc)( + void *pTransformerArg, int bDstToSrc, int nPointCount, double *x, + double *y, double *z, int *panSuccess) + + ctypedef struct GDALWarpOptions: + char **papszWarpOptions + double dfWarpMemoryLimit + GDALResampleAlg eResampleAlg + GDALDataType eWorkingDataType + GDALDatasetH hSrcDS + GDALDatasetH hDstDS + # 0 for all bands + int nBandCount + # List of source band indexes + int *panSrcBands + # List of destination band indexes + int *panDstBands + # The source band so use as an alpha (transparency) value, 0=disabled + int nSrcAlphaBand + # The dest. band so use as an alpha (transparency) value, 0=disabled + int nDstAlphaBand + # The "nodata" value real component for each input band, if NULL there isn't one */ + double *padfSrcNoDataReal + # The "nodata" value imaginary component - may be NULL even if real component is provided. */ + double *padfSrcNoDataImag + # The "nodata" value real component for each output band, if NULL there isn't one */ + double *padfDstNoDataReal + # The "nodata" value imaginary component - may be NULL even if real component is provided. */ + double *padfDstNoDataImag + # GDALProgressFunc() compatible progress reporting function, or NULL if there isn't one. */ + void *pfnProgress + # Callback argument to be passed to pfnProgress. */ + void *pProgressArg + # Type of spatial point transformer function */ + GDALTransformerFunc pfnTransformer + # Handle to image transformer setup structure */ + void *pTransformerArg + GDALMaskFunc *papfnSrcPerBandValidityMaskFunc + void **papSrcPerBandValidityMaskFuncArg + GDALMaskFunc pfnSrcValidityMaskFunc + void *pSrcValidityMaskFuncArg + GDALMaskFunc pfnSrcDensityMaskFunc + void *pSrcDensityMaskFuncArg + GDALMaskFunc pfnDstDensityMaskFunc + void *pDstDensityMaskFuncArg + GDALMaskFunc pfnDstValidityMaskFunc + void *pDstValidityMaskFuncArg + int (*pfnPreWarpChunkProcessor)(void *pKern, void *pArg) + void *pPreWarpProcessorArg + int (*pfnPostWarpChunkProcessor)(void *pKern, void *pArg) + void *pPostWarpProcessorArg + # Optional OGRPolygonH for a masking cutline. */ + OGRGeometryH hCutline + # Optional blending distance to apply across cutline in pixels, default is 0 + double dfCutlineBlendDist + + GDALWarpOptions *GDALCreateWarpOptions() + void GDALDestroyWarpOptions(GDALWarpOptions *options) + + GDALDatasetH GDALAutoCreateWarpedVRT( + GDALDatasetH hSrcDS, const char *pszSrcWKT, const char *pszDstWKT, + GDALResampleAlg eResampleAlg, double dfMaxError, + const GDALWarpOptions *psOptionsIn) + + GDALDatasetH GDALCreateWarpedVRT( + GDALDatasetH hSrcDS, int nPixels, int nLines, + double *padfGeoTransform, const GDALWarpOptions *psOptionsIn) + + +cdef extern from "gdal_alg.h" nogil: + + int GDALPolygonize(GDALRasterBandH band, GDALRasterBandH mask_band, + OGRLayerH layer, int fidx, char **options, + void *progress_func, void *progress_data) + int GDALFPolygonize(GDALRasterBandH band, GDALRasterBandH mask_band, + OGRLayerH layer, int fidx, char **options, + void *progress_func, void *progress_data) + int GDALSieveFilter(GDALRasterBandH src_band, GDALRasterBandH mask_band, + GDALRasterBandH dst_band, int size, int connectivity, + char **options, void *progress_func, + void *progress_data) + int GDALRasterizeGeometries(GDALDatasetH hds, int band_count, + int *dst_bands, int geom_count, + OGRGeometryH *geometries, + GDALTransformerFunc transform_func, + void *transform, double *pixel_values, + char **options, void *progress_func, + void *progress_data) + void *GDALCreateGenImgProjTransformer(GDALDatasetH src_hds, + const char *pszSrcWKT, GDALDatasetH dst_hds, + const char *pszDstWKT, + int bGCPUseOK, double dfGCPErrorThreshold, + int nOrder) + void *GDALCreateGenImgProjTransformer2(GDALDatasetH src_hds, GDALDatasetH dst_hds, char **options) + void *GDALCreateGenImgProjTransformer3( + const char *pszSrcWKT, const double *padfSrcGeoTransform, + const char *pszDstWKT, const double *padfDstGeoTransform) + void GDALSetGenImgProjTransformerDstGeoTransform(void *hTransformArg, double *padfGeoTransform) + int GDALGenImgProjTransform(void *pTransformArg, int bDstToSrc, + int nPointCount, double *x, double *y, + double *z, int *panSuccess) + void GDALDestroyGenImgProjTransformer(void *) + void *GDALCreateApproxTransformer(GDALTransformerFunc pfnRawTransformer, + void *pRawTransformerArg, + double dfMaxError) + int GDALApproxTransform(void *pTransformArg, int bDstToSrc, int npoints, + double *x, double *y, double *z, int *panSuccess) + void GDALDestroyApproxTransformer(void *) + void GDALApproxTransformerOwnsSubtransformer(void *, int) + int GDALFillNodata(GDALRasterBandH dst_band, GDALRasterBandH mask_band, + double max_search_distance, int deprecated, + int smoothing_iterations, char **options, + void *progress_func, void *progress_data) + int GDALChecksumImage(GDALRasterBandH band, int xoff, int yoff, int width, + int height) + int GDALSuggestedWarpOutput2( + GDALDatasetH hSrcDS, GDALTransformerFunc pfnRawTransformer, + void * pTransformArg, double * padfGeoTransformOut, int * pnPixels, + int * pnLines, double * padfExtent, int nOptions) + + +cdef extern from "ogr_core.h" nogil: + + char *OGRGeometryTypeToName(int type) diff -Nru fiona-1.7.10/fiona/_geometry.pxd fiona-1.8.6/fiona/_geometry.pxd --- fiona-1.7.10/fiona/_geometry.pxd 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/_geometry.pxd 2019-03-19 04:25:07.000000000 +0000 @@ -3,6 +3,81 @@ ctypedef int OGRErr +cdef extern from "ogr_core.h": + ctypedef enum OGRwkbGeometryType: + wkbUnknown + wkbPoint + wkbLineString + wkbPolygon + wkbMultiPoint + wkbMultiLineString + wkbMultiPolygon + wkbGeometryCollection + wkbCircularString + wkbCompoundCurve + wkbCurvePolygon + wkbMultiCurve + wkbMultiSurface + wkbCurve + wkbSurface + wkbPolyhedralSurface + wkbTIN + wkbTriangle + wkbNone + wkbLinearRing + wkbCircularStringZ + wkbCompoundCurveZ + wkbCurvePolygonZ + wkbMultiCurveZ + wkbMultiSurfaceZ + wkbCurveZ + wkbSurfaceZ + wkbPolyhedralSurfaceZ + wkbTINZ + wkbTriangleZ + wkbPointM + wkbLineStringM + wkbPolygonM + wkbMultiPointM + wkbMultiLineStringM + wkbMultiPolygonM + wkbGeometryCollectionM + wkbCircularStringM + wkbCompoundCurveM + wkbCurvePolygonM + wkbMultiCurveM + wkbMultiSurfaceM + wkbCurveM + wkbSurfaceM + wkbPolyhedralSurfaceM + wkbTINM + wkbTriangleM + wkbPointZM + wkbLineStringZM + wkbPolygonZM + wkbMultiPointZM + wkbMultiLineStringZM + wkbMultiPolygonZM + wkbGeometryCollectionZM + wkbCircularStringZM + wkbCompoundCurveZM + wkbCurvePolygonZM + wkbMultiCurveZM + wkbMultiSurfaceZM + wkbCurveZM + wkbSurfaceZM + wkbPolyhedralSurfaceZM + wkbTINZM + wkbTriangleZM + wkbPoint25D + wkbLineString25D + wkbPolygon25D + wkbMultiPoint25D + wkbMultiLineString25D + wkbMultiPolygon25D + wkbGeometryCollection25D + + ctypedef struct OGREnvelope: double MinX double MaxX @@ -15,7 +90,7 @@ void OGR_G_AddPoint (void *geometry, double x, double y, double z) void OGR_G_AddPoint_2D (void *geometry, double x, double y) void OGR_G_CloseRings (void *geometry) - void * OGR_G_CreateGeometry (int wkbtypecode) + void * OGR_G_CreateGeometry (OGRwkbGeometryType wkbtypecode) void OGR_G_DestroyGeometry (void *geometry) unsigned char * OGR_G_ExportToJson (void *geometry) void OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) @@ -65,5 +140,7 @@ cdef void * build(self, object geom) except NULL -cdef unsigned int geometry_type_code(object name) +cdef unsigned int geometry_type_code(object name) except? 9999 cdef object normalize_geometry_type_code(unsigned int code) +cdef unsigned int base_geometry_type_code(unsigned int code) + diff -Nru fiona-1.7.10/fiona/_geometry.pyx fiona-1.8.6/fiona/_geometry.pyx --- fiona-1.7.10/fiona/_geometry.pyx 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/_geometry.pyx 2019-03-19 04:25:07.000000000 +0000 @@ -11,7 +11,7 @@ def emit(self, record): pass -log = logging.getLogger("Fiona") +log = logging.getLogger(__name__) log.addHandler(NullHandler()) # Mapping of OGR integer geometry types to GeoJSON type names. @@ -49,7 +49,7 @@ GEOJSON2OGR_GEOMETRY_TYPES = dict((v, k) for k, v in GEOMETRY_TYPES.iteritems()) -cdef unsigned int geometry_type_code(name): +cdef unsigned int geometry_type_code(name) except? 9999: """Map OGC geometry type names to integer codes.""" offset = 0 if name.endswith('ZM'): @@ -67,19 +67,28 @@ cdef object normalize_geometry_type_code(unsigned int code): - """Normalize geometry type codes.""" + """Normalize M geometry type codes.""" + # Normalize 'M' types to 2D types. + if 2000 < code < 3000: + code = code % 1000 + # Normalize 'ZM' types to 3D types. + elif 3000 < code < 4000: + code = (code % 1000) | 0x80000000 + if code not in GEOMETRY_TYPES: + raise UnsupportedGeometryTypeError(code) + + return code + + +cdef inline unsigned int base_geometry_type_code(unsigned int code): + """ Returns base geometry code without Z, M and ZM types """ # Remove 2.5D flag. - norm_code = code & (~0x80000000) + code = code & (~0x80000000) # Normalize Z, M, and ZM types. Fiona 1.x does not support M # and doesn't treat OGC 'Z' variants as special types of their # own. - norm_code = norm_code % 1000 - - if norm_code not in GEOMETRY_TYPES: - raise UnsupportedGeometryTypeError(norm_code) - - return norm_code + return code % 1000 # Geometry related functions and classes follow. @@ -87,7 +96,7 @@ """Make an OGR geometry from a WKB string""" wkbtype = bytearray(wkb)[1] cdef unsigned char *buffer = wkb - cdef void *cogr_geometry = OGR_G_CreateGeometry(wkbtype) + cdef void *cogr_geometry = OGR_G_CreateGeometry(wkbtype) if cogr_geometry is not NULL: OGR_G_ImportFromWkb(cogr_geometry, buffer, len(wkb)) return cogr_geometry @@ -164,13 +173,7 @@ cdef unsigned int etype = OGR_G_GetGeometryType(geom) - # Remove 2.5D flag. - self.code = etype & (~0x80000000) - - # Normalize Z, M, and ZM types. Fiona 1.x does not support M - # and doesn't treat OGC 'Z' variants as special types of their - # own. - self.code = self.code % 1000 + self.code = base_geometry_type_code(etype) if self.code not in GEOMETRY_TYPES: raise UnsupportedGeometryTypeError(self.code) @@ -193,7 +196,7 @@ """Builds OGR geometries from Fiona geometries. """ cdef void * _createOgrGeometry(self, int geom_type) except NULL: - cdef void *cogr_geometry = OGR_G_CreateGeometry(geom_type) + cdef void *cogr_geometry = OGR_G_CreateGeometry(geom_type) if cogr_geometry == NULL: raise Exception("Could not create OGR Geometry of type: %i" % geom_type) return cogr_geometry @@ -214,16 +217,13 @@ cdef void * _buildLineString(self, object coordinates) except NULL: cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['LineString']) for coordinate in coordinates: - log.debug("Adding point %s", coordinate) self._addPointToGeometry(cogr_geometry, coordinate) return cogr_geometry cdef void * _buildLinearRing(self, object coordinates) except NULL: cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['LinearRing']) for coordinate in coordinates: - log.debug("Adding point %s", coordinate) self._addPointToGeometry(cogr_geometry, coordinate) - log.debug("Closing ring") OGR_G_CloseRings(cogr_geometry) return cogr_geometry @@ -231,54 +231,40 @@ cdef void *cogr_ring cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['Polygon']) for ring in coordinates: - log.debug("Adding ring %s", ring) cogr_ring = self._buildLinearRing(ring) - log.debug("Built ring") OGR_G_AddGeometryDirectly(cogr_geometry, cogr_ring) - log.debug("Added ring %s", ring) return cogr_geometry cdef void * _buildMultiPoint(self, object coordinates) except NULL: cdef void *cogr_part cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['MultiPoint']) for coordinate in coordinates: - log.debug("Adding point %s", coordinate) cogr_part = self._buildPoint(coordinate) OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part) - log.debug("Added point %s", coordinate) return cogr_geometry cdef void * _buildMultiLineString(self, object coordinates) except NULL: cdef void *cogr_part cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['MultiLineString']) for line in coordinates: - log.debug("Adding line %s", line) cogr_part = self._buildLineString(line) - log.debug("Built line") OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part) - log.debug("Added line %s", line) return cogr_geometry cdef void * _buildMultiPolygon(self, object coordinates) except NULL: cdef void *cogr_part cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['MultiPolygon']) for part in coordinates: - log.debug("Adding polygon %s", part) cogr_part = self._buildPolygon(part) - log.debug("Built polygon") OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part) - log.debug("Added polygon %s", part) return cogr_geometry cdef void * _buildGeometryCollection(self, object coordinates) except NULL: cdef void *cogr_part cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['GeometryCollection']) for part in coordinates: - log.debug("Adding part %s", part) cogr_part = OGRGeomBuilder().build(part) - log.debug("Built part") OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part) - log.debug("Added part %s", part) return cogr_geometry cdef void * build(self, object geometry) except NULL: @@ -305,11 +291,6 @@ raise ValueError("Unsupported geometry type %s" % typename) -cdef geometry(void *geom): - """Factory for Fiona geometries""" - return GeomBuilder().build(geom) - - def geometryRT(geometry): # For testing purposes only, leaks the JSON data cdef void *cogr_geometry = OGRGeomBuilder().build(geometry) diff -Nru fiona-1.7.10/fiona/__init__.py fiona-1.8.6/fiona/__init__.py --- fiona-1.7.10/fiona/__init__.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/__init__.py 2019-03-19 04:25:07.000000000 +0000 @@ -62,17 +62,37 @@ writing modes) flush contents to disk when their ``with`` blocks end. """ +from contextlib import contextmanager import logging import os +import sys +import warnings + from six import string_types -from fiona.collection import Collection, BytesCollection, vsi_path -from fiona._drivers import driver_count, GDALEnv +try: + from pathlib import Path +except ImportError: # pragma: no cover + class Path: + pass + +if sys.platform == "win32": + libdir = os.path.join(os.path.dirname(__file__), ".libs") + os.environ["PATH"] = os.environ["PATH"] + ";" + libdir + +from fiona.collection import BytesCollection, Collection from fiona.drvsupport import supported_drivers +from fiona.env import ensure_env_with_credentials, Env +from fiona.errors import FionaDeprecationWarning +from fiona._env import driver_count +from fiona._env import ( + calc_gdal_version_num, get_gdal_version_num, get_gdal_release_name, + get_gdal_version_tuple) from fiona.compat import OrderedDict -from fiona.ogrext import _bounds, _listlayers, FIELD_TYPES_MAP, _remove -from fiona.ogrext import ( - calc_gdal_version_num, get_gdal_version_num, get_gdal_release_name) +from fiona.io import MemoryFile +from fiona.ogrext import _bounds, _listlayers, FIELD_TYPES_MAP, _remove, _remove_layer +from fiona.path import ParsedPath, parse_path, vsi_path +from fiona.vfs import parse_paths as vfs_parse_paths # These modules are imported by fiona.ogrext, but are also import here to # help tools like cx_Freeze find them automatically @@ -81,25 +101,20 @@ __all__ = ['bounds', 'listlayers', 'open', 'prop_type', 'prop_width'] -__version__ = "1.7.10" -__gdal_version__ = get_gdal_release_name().decode('utf-8') +__version__ = "1.8.6" +__gdal_version__ = get_gdal_release_name() + +gdal_version = get_gdal_version_tuple() log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) -def open( - path, - mode='r', - driver=None, - schema=None, - crs=None, - encoding=None, - layer=None, - vfs=None, - enabled_drivers=None, - crs_wkt=None): - """Open file at ``path`` in ``mode`` "r" (read), "a" (append), or - "w" (write) and return a ``Collection`` object. +@ensure_env_with_credentials +def open(fp, mode='r', driver=None, schema=None, crs=None, encoding=None, + layer=None, vfs=None, enabled_drivers=None, crs_wkt=None, + **kwargs): + """Open a collection for read, append, or write In write mode, a driver name such as "ESRI Shapefile" or "GPX" (see OGR docs or ``ogr2ogr --help`` on the command line) and a schema @@ -133,11 +148,6 @@ When the provided path is to a file containing multiple named layers of data, a layer can be singled out by ``layer``. - A virtual filesystem can be specified. The ``vfs`` parameter may be - an Apache Commons VFS style string beginning with "zip://" or - "tar://"". In this case, the ``path`` must be an absolute path - within that container. - The drivers enabled for opening datasets may be restricted to those listed in the ``enabled_drivers`` parameter. This and the ``driver`` parameter afford much control over opening of files. @@ -151,37 +161,116 @@ fiona.open( 'example.shp', enabled_drivers=['GeoJSON', 'ESRI Shapefile']) + Parameters + ---------- + fp : URI (str or pathlib.Path), or file-like object + A dataset resource identifier or file object. + mode : str + One of 'r', to read (the default); 'a', to append; or 'w', to + write. + driver : str + In 'w' mode a format driver name is required. In 'r' or 'a' + mode this parameter has no effect. + schema : dict + Required in 'w' mode, has no effect in 'r' or 'a' mode. + crs : str or dict + Required in 'w' mode, has no effect in 'r' or 'a' mode. + encoding : str + Name of the encoding used to encode or decode the dataset. + layer : int or str + The integer index or name of a layer in a multi-layer dataset. + vfs : str + This is a deprecated parameter. A URI scheme such as "zip://" + should be used instead. + enabled_drivers : list + An optional list of driver names to used when opening a + collection. + crs_wkt : str + An optional WKT representation of a coordinate reference + system. + kwargs : mapping + Other driver-specific parameters that will be interpreted by + the OGR library as layer creation or opening options. + + Returns + ------- + Collection """ - # Parse the vfs into a vsi and an archive path. - path, vsi, archive = parse_paths(path, vfs) - if mode in ('a', 'r'): - if archive: - if not os.path.exists(archive): - raise IOError("no such archive file: %r" % archive) - elif path != '-' and not os.path.exists(path): - raise IOError("no such file or directory: %r" % path) - c = Collection(path, mode, driver=driver, encoding=encoding, - layer=layer, vsi=vsi, archive=archive, - enabled_drivers=enabled_drivers) - elif mode == 'w': + + if mode == 'r' and hasattr(fp, 'read'): + + @contextmanager + def fp_reader(fp): + memfile = MemoryFile(fp.read()) + dataset = memfile.open() + try: + yield dataset + finally: + dataset.close() + memfile.close() + + return fp_reader(fp) + + elif mode == 'w' and hasattr(fp, 'write'): if schema: # Make an ordered dict of schema properties. this_schema = schema.copy() this_schema['properties'] = OrderedDict(schema['properties']) else: this_schema = None - c = Collection(path, mode, crs=crs, driver=driver, schema=this_schema, - encoding=encoding, layer=layer, vsi=vsi, archive=archive, - enabled_drivers=enabled_drivers, crs_wkt=crs_wkt) + + @contextmanager + def fp_writer(fp): + memfile = MemoryFile() + dataset = memfile.open( + driver=driver, crs=crs, schema=schema, layer=layer, + encoding=encoding, enabled_drivers=enabled_drivers, + **kwargs) + try: + yield dataset + finally: + dataset.close() + memfile.seek(0) + fp.write(memfile.read()) + memfile.close() + + return fp_writer(fp) + else: - raise ValueError( - "mode string must be one of 'r', 'w', or 'a', not %s" % mode) - return c + # If a pathlib.Path instance is given, convert it to a string path. + if isinstance(fp, Path): + fp = str(fp) + + if vfs: + # Parse the vfs into a vsi and an archive path. + path, scheme, archive = vfs_parse_paths(fp, vfs=vfs) + path = ParsedPath(path, archive, scheme) + else: + path = parse_path(fp) + + if mode in ('a', 'r'): + c = Collection(path, mode, driver=driver, encoding=encoding, + layer=layer, enabled_drivers=enabled_drivers, **kwargs) + elif mode == 'w': + if schema: + # Make an ordered dict of schema properties. + this_schema = schema.copy() + this_schema['properties'] = OrderedDict(schema['properties']) + else: + this_schema = None + c = Collection(path, mode, crs=crs, driver=driver, schema=this_schema, + encoding=encoding, layer=layer, enabled_drivers=enabled_drivers, crs_wkt=crs_wkt, + **kwargs) + else: + raise ValueError( + "mode string must be one of 'r', 'w', or 'a', not %s" % mode) + + return c collection = open -def remove(path_or_collection, driver=None): +def remove(path_or_collection, driver=None, layer=None): """Deletes an OGR data source The required ``path`` argument may be an absolute or relative file path. @@ -203,11 +292,13 @@ collection.close() else: path = path_or_collection - if driver is None: - raise ValueError("The driver argument is required when removing a path") - _remove(path, driver) + if layer is None: + _remove(path, driver) + else: + _remove_layer(path, layer, driver) +@ensure_env_with_credentials def listlayers(path, vfs=None): """Returns a list of layer names in their index order. @@ -224,29 +315,14 @@ if vfs and not isinstance(vfs, string_types): raise TypeError("invalid vfs: %r" % vfs) - path, vsi, archive = parse_paths(path, vfs) - - if archive: - if not os.path.exists(archive): - raise IOError("no such archive file: %r" % archive) - elif not os.path.exists(path): - raise IOError("no such file or directory: %r" % path) - - with drivers(): - return _listlayers(vsi_path(path, vsi, archive)) - - -def parse_paths(path, vfs=None): - archive = vsi = None if vfs: - parts = vfs.split("://") - vsi = parts.pop(0) if parts else None - archive = parts.pop(0) if parts else None + pobj_vfs = parse_path(vfs) + pobj_path = parse_path(path) + pobj = ParsedPath(pobj_path.path, pobj_vfs.path, pobj_vfs.scheme) else: - parts = path.split("://") - path = parts.pop() if parts else None - vsi = parts.pop() if parts else None - return path, vsi, archive + pobj = parse_path(path) + + return _listlayers(vsi_path(pobj)) def prop_width(val): @@ -279,13 +355,18 @@ def drivers(*args, **kwargs): - """Returns a context manager with registered drivers.""" + """Returns a context manager with registered drivers. + + DEPRECATED + """ + warnings.warn("Use fiona.Env() instead.", FionaDeprecationWarning, stacklevel=2) + if driver_count == 0: log.debug("Creating a chief GDALEnv in drivers()") - return GDALEnv(**kwargs) + return Env(**kwargs) else: log.debug("Creating a not-responsible GDALEnv in drivers()") - return GDALEnv(**kwargs) + return Env(**kwargs) def bounds(ob): diff -Nru fiona-1.7.10/fiona/io.py fiona-1.8.6/fiona/io.py --- fiona-1.7.10/fiona/io.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/io.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,105 @@ +"""Classes capable of reading and writing collections +""" + +from collections import OrderedDict +import logging + +from fiona.ogrext import MemoryFileBase +from fiona.collection import Collection + + +log = logging.getLogger(__name__) + + +class MemoryFile(MemoryFileBase): + """A BytesIO-like object, backed by an in-memory file. + + This allows formatted files to be read and written without I/O. + + A MemoryFile created with initial bytes becomes immutable. A + MemoryFile created without initial bytes may be written to using + either file-like or dataset interfaces. + + Examples + -------- + + """ + def __init__(self, file_or_bytes=None, filename=None, ext=''): + super(MemoryFile, self).__init__( + file_or_bytes=file_or_bytes, filename=filename, ext=ext) + + def open(self, driver=None, schema=None, crs=None, encoding=None, + layer=None, vfs=None, enabled_drivers=None, crs_wkt=None, + **kwargs): + """Open the file and return a Fiona collection object. + + If data has already been written, the file is opened in 'r' + mode. Otherwise, the file is opened in 'w' mode. + + Parameters + ---------- + Note well that there is no `path` parameter: a `MemoryFile` + contains a single dataset and there is no need to specify a + path. + + Other parameters are optional and have the same semantics as the + parameters of `fiona.open()`. + """ + vsi_path = self.name + + if self.closed: + raise IOError("I/O operation on closed file.") + if self.exists(): + return Collection(vsi_path, 'r', driver=driver, encoding=encoding, + layer=layer, enabled_drivers=enabled_drivers, + **kwargs) + else: + if schema: + # Make an ordered dict of schema properties. + this_schema = schema.copy() + this_schema['properties'] = OrderedDict(schema['properties']) + else: + this_schema = None + return Collection(vsi_path, 'w', crs=crs, driver=driver, + schema=this_schema, encoding=encoding, + layer=layer, enabled_drivers=enabled_drivers, + crs_wkt=crs_wkt, **kwargs) + + def __enter__(self): + return self + + def __exit__(self, *args, **kwargs): + self.close() + + +class ZipMemoryFile(MemoryFile): + """A read-only BytesIO-like object backed by an in-memory zip file. + + This allows a zip file containing formatted files to be read + without I/O. + """ + + def __init__(self, file_or_bytes=None): + super(ZipMemoryFile, self).__init__(file_or_bytes, ext='zip') + + def open(self, path, driver=None, encoding=None, layer=None, + enabled_drivers=None, **kwargs): + """Open a dataset within the zipped stream. + + Parameters + ---------- + path : str + Path to a dataset in the zip file, relative to the root of the + archive. + + Returns + ------- + A Fiona collection object + """ + vsi_path = '/vsizip{0}/{1}'.format(self.name, path.lstrip('/')) + + if self.closed: + raise IOError("I/O operation on closed file.") + return Collection(vsi_path, 'r', driver=driver, encoding=encoding, + layer=layer, enabled_drivers=enabled_drivers, + **kwargs) diff -Nru fiona-1.7.10/fiona/logutils.py fiona-1.8.6/fiona/logutils.py --- fiona-1.7.10/fiona/logutils.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/logutils.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,37 @@ +"""Logging helper classes.""" + +import logging + + +class FieldSkipLogFilter(logging.Filter): + """Filter field skip log messges. + + At most, one message per field skipped per loop will be passed. + """ + + def __init__(self, name=''): + super(FieldSkipLogFilter, self).__init__(name) + self.seen_msgs = set() + + def filter(self, record): + """Pass record if not seen.""" + if getattr(record, 'msg', "").startswith("Skipping field"): + msg = record.getMessage() + retval = msg not in self.seen_msgs + self.seen_msgs.add(msg) + return retval + else: + return 1 + + +class LogFiltering(object): + + def __init__(self, logger, filter): + self.logger = logger + self.filter = filter + + def __enter__(self): + self.logger.addFilter(self.filter) + + def __exit__(self, *args, **kwargs): + self.logger.removeFilter(self.filter) diff -Nru fiona-1.7.10/fiona/ogrext1.pxd fiona-1.8.6/fiona/ogrext1.pxd --- fiona-1.7.10/fiona/ogrext1.pxd 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/ogrext1.pxd 2019-03-19 04:25:07.000000000 +0000 @@ -2,6 +2,8 @@ # All rights reserved. # See ../LICENSE.txt +from libc.stdio cimport FILE + cdef extern from "gdal.h": char * GDALVersionInfo (char *pszRequest) @@ -16,17 +18,30 @@ const char *CPLGetConfigOption (char *, char *) cdef extern from "cpl_string.h": + char ** CSLAddNameValue (char **list, char *name, char *value) char ** CSLSetNameValue (char **list, char *name, char *value) void CSLDestroy (char **list) + char ** CSLAddString(char **list, const char *string) -cdef extern from "cpl_vsi.h": - ctypedef struct VSILFILE: - pass - int VSIFCloseL (VSILFILE *) - VSILFILE * VSIFileFromMemBuffer (const char * filename, - unsigned char * data, - int data_len, - int take_ownership) +cdef extern from "cpl_vsi.h" nogil: + ctypedef int vsi_l_offset + ctypedef FILE VSILFILE + + unsigned char *VSIGetMemFileBuffer(const char *path, + vsi_l_offset *data_len, + int take_ownership) + VSILFILE *VSIFileFromMemBuffer(const char *path, void *data, + vsi_l_offset data_len, int take_ownership) + VSILFILE* VSIFOpenL(const char *path, const char *mode) + int VSIFCloseL(VSILFILE *fp) + int VSIUnlink(const char *path) + + int VSIFFlushL(VSILFILE *fp) + size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) + vsi_l_offset VSIFTellL(VSILFILE *fp) + int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) + size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIUnlink (const char * pathname) ctypedef int OGRErr @@ -37,8 +52,99 @@ double MaxY cdef extern from "ogr_core.h": + ctypedef enum OGRwkbGeometryType: + wkbUnknown + wkbPoint + wkbLineString + wkbPolygon + wkbMultiPoint + wkbMultiLineString + wkbMultiPolygon + wkbGeometryCollection + wkbCircularString + wkbCompoundCurve + wkbCurvePolygon + wkbMultiCurve + wkbMultiSurface + wkbCurve + wkbSurface + wkbPolyhedralSurface + wkbTIN + wkbTriangle + wkbNone + wkbLinearRing + wkbCircularStringZ + wkbCompoundCurveZ + wkbCurvePolygonZ + wkbMultiCurveZ + wkbMultiSurfaceZ + wkbCurveZ + wkbSurfaceZ + wkbPolyhedralSurfaceZ + wkbTINZ + wkbTriangleZ + wkbPointM + wkbLineStringM + wkbPolygonM + wkbMultiPointM + wkbMultiLineStringM + wkbMultiPolygonM + wkbGeometryCollectionM + wkbCircularStringM + wkbCompoundCurveM + wkbCurvePolygonM + wkbMultiCurveM + wkbMultiSurfaceM + wkbCurveM + wkbSurfaceM + wkbPolyhedralSurfaceM + wkbTINM + wkbTriangleM + wkbPointZM + wkbLineStringZM + wkbPolygonZM + wkbMultiPointZM + wkbMultiLineStringZM + wkbMultiPolygonZM + wkbGeometryCollectionZM + wkbCircularStringZM + wkbCompoundCurveZM + wkbCurvePolygonZM + wkbMultiCurveZM + wkbMultiSurfaceZM + wkbCurveZM + wkbSurfaceZM + wkbPolyhedralSurfaceZM + wkbTINZM + wkbTriangleZM + wkbPoint25D + wkbLineString25D + wkbPolygon25D + wkbMultiPoint25D + wkbMultiLineString25D + wkbMultiPolygon25D + wkbGeometryCollection25D + + ctypedef enum OGRFieldType: + OFTInteger + OFTIntegerList + OFTReal + OFTRealList + OFTString + OFTStringList + OFTWideString + OFTWideStringList + OFTBinary + OFTDate + OFTTime + OFTDateTime + OFTMaxType + char * OGRGeometryTypeToName(int) + char * ODsCCreateLayer = "CreateLayer" + char * ODsCDeleteLayer = "DeleteLayer" + cdef extern from "ogr_srs_api.h": ctypedef void * OGRSpatialReferenceH @@ -61,7 +167,7 @@ int OCTTransform (void *ct, int nCount, double *x, double *y, double *z) cdef extern from "ogr_api.h": - char * OGR_Dr_GetName (void *driver) + const char * OGR_Dr_GetName (void *driver) void * OGR_Dr_CreateDataSource (void *driver, const char *path, char **options) int OGR_Dr_DeleteDataSource (void *driver, char *) void * OGR_Dr_Open (void *driver, const char *path, int bupdate) @@ -76,6 +182,7 @@ void * OGR_DS_GetLayer (void *datasource, int n) void OGR_DS_ReleaseResultSet (void *datasource, void *results) int OGR_DS_SyncToDisk (void *datasource) + int OGR_DS_TestCapability(void *datasource, char *capability) void * OGR_F_Create (void *featuredefn) void OGR_F_Destroy (void *feature) long OGR_F_GetFID (void *feature) @@ -84,21 +191,24 @@ double OGR_F_GetFieldAsDouble (void *feature, int n) int OGR_F_GetFieldAsInteger (void *feature, int n) char * OGR_F_GetFieldAsString (void *feature, int n) + unsigned char * OGR_F_GetFieldAsBinary(void *feature, int n, int *s) int OGR_F_GetFieldCount (void *feature) void * OGR_F_GetFieldDefnRef (void *feature, int n) int OGR_F_GetFieldIndex (void *feature, char *name) void * OGR_F_GetGeometryRef (void *feature) + void * OGR_F_StealGeometry (void *feature) void OGR_F_SetFieldDateTime (void *feature, int n, int y, int m, int d, int hh, int mm, int ss, int tz) void OGR_F_SetFieldDouble (void *feature, int n, double value) void OGR_F_SetFieldInteger (void *feature, int n, int value) void OGR_F_SetFieldString (void *feature, int n, char *value) + void OGR_F_SetFieldBinary (void *feature, int n, int l, unsigned char *value) int OGR_F_SetGeometryDirectly (void *feature, void *geometry) void * OGR_FD_Create (char *name) int OGR_FD_GetFieldCount (void *featuredefn) void * OGR_FD_GetFieldDefn (void *featuredefn, int n) int OGR_FD_GetGeomType (void *featuredefn) char * OGR_FD_GetName (void *featuredefn) - void * OGR_Fld_Create (char *name, int fieldtype) + void * OGR_Fld_Create (char *name, OGRFieldType fieldtype) void OGR_Fld_Destroy (void *fielddefn) char * OGR_Fld_GetNameRef (void *fielddefn) int OGR_Fld_GetPrecision (void *fielddefn) @@ -126,8 +236,11 @@ double OGR_G_GetZ (void *geometry, int n) void OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize (void *geometry) + void * OGR_G_ForceToMultiPolygon (void *geometry) + void * OGR_G_ForceToPolygon (void *geometry) + void * OGR_G_Clone(void *geometry) OGRErr OGR_L_CreateFeature (void *layer, void *feature) - int OGR_L_CreateField (void *layer, void *fielddefn, int flexible) + OGRErr OGR_L_CreateField (void *layer, void *fielddefn, int flexible) OGRErr OGR_L_GetExtent (void *layer, void *extent, int force) void * OGR_L_GetFeature (void *layer, int n) int OGR_L_GetFeatureCount (void *layer, int m) @@ -146,4 +259,5 @@ void * OGROpen (char *path, int mode, void *x) void * OGROpenShared (char *path, int mode, void *x) int OGRReleaseDataSource (void *datasource) + OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) diff -Nru fiona-1.7.10/fiona/ogrext1.pyx fiona-1.8.6/fiona/ogrext1.pyx --- fiona-1.7.10/fiona/ogrext1.pyx 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/ogrext1.pyx 1970-01-01 00:00:00.000000000 +0000 @@ -1,1286 +0,0 @@ -# These are extension functions and classes using the OGR C API. - -from __future__ import absolute_import - -import datetime -import json -import locale -import logging -import os -import warnings -import math -import uuid - -from six import integer_types, string_types, text_type - -from fiona cimport ogrext1 -from fiona._geometry cimport ( - GeomBuilder, OGRGeomBuilder, geometry_type_code, - normalize_geometry_type_code) -from fiona._err cimport exc_wrap_pointer - -from fiona._err import cpl_errs -from fiona._geometry import GEOMETRY_TYPES -from fiona import compat -from fiona.errors import ( - DriverError, DriverIOError, SchemaError, CRSError, FionaValueError) -from fiona.compat import OrderedDict -from fiona.rfc3339 import parse_date, parse_datetime, parse_time -from fiona.rfc3339 import FionaDateType, FionaDateTimeType, FionaTimeType - - -log = logging.getLogger("Fiona") -class NullHandler(logging.Handler): - def emit(self, record): - pass -log.addHandler(NullHandler()) - - -# Mapping of OGR integer field types to Fiona field type names. -# -# Lists are currently unsupported in this version, but might be done as -# arrays in a future version. - -FIELD_TYPES = [ - 'int', # OFTInteger, Simple 32bit integer - None, # OFTIntegerList, List of 32bit integers - 'float', # OFTReal, Double Precision floating point - None, # OFTRealList, List of doubles - 'str', # OFTString, String of ASCII chars - None, # OFTStringList, Array of strings - None, # OFTWideString, deprecated - None, # OFTWideStringList, deprecated - None, # OFTBinary, Raw Binary data - 'date', # OFTDate, Date - 'time', # OFTTime, Time - 'datetime', # OFTDateTime, Date and Time - 'int', # OFTInteger64, Single 64bit integer #Not supported - None, # OFTInteger64List, List of 64bit integers #Not supported - ] - -# Mapping of Fiona field type names to Python types. -FIELD_TYPES_MAP = { - 'int': int, - 'float': float, - 'str': text_type, - 'date': FionaDateType, - 'time': FionaTimeType, - 'datetime': FionaDateTimeType - } - -# OGR Driver capability -cdef const char * ODrCCreateDataSource = "CreateDataSource" -cdef const char * ODrCDeleteDataSource = "DeleteDataSource" - -# OGR Layer capability -cdef const char * OLC_RANDOMREAD = "RandomRead" -cdef const char * OLC_SEQUENTIALWRITE = "SequentialWrite" -cdef const char * OLC_RANDOMWRITE = "RandomWrite" -cdef const char * OLC_FASTSPATIALFILTER = "FastSpatialFilter" -cdef const char * OLC_FASTFEATURECOUNT = "FastFeatureCount" -cdef const char * OLC_FASTGETEXTENT = "FastGetExtent" -cdef const char * OLC_FASTSETNEXTBYINDEX = "FastSetNextByIndex" -cdef const char * OLC_CREATEFIELD = "CreateField" -cdef const char * OLC_CREATEGEOMFIELD = "CreateGeomField" -cdef const char * OLC_DELETEFIELD = "DeleteField" -cdef const char * OLC_REORDERFIELDS = "ReorderFields" -cdef const char * OLC_ALTERFIELDDEFN = "AlterFieldDefn" -cdef const char * OLC_DELETEFEATURE = "DeleteFeature" -cdef const char * OLC_STRINGSASUTF8 = "StringsAsUTF8" -cdef const char * OLC_TRANSACTIONS = "Transactions" - -# OGR integer error types. - -OGRERR_NONE = 0 -OGRERR_NOT_ENOUGH_DATA = 1 # not enough data to deserialize */ -OGRERR_NOT_ENOUGH_MEMORY = 2 -OGRERR_UNSUPPORTED_GEOMETRY_TYPE = 3 -OGRERR_UNSUPPORTED_OPERATION = 4 -OGRERR_CORRUPT_DATA = 5 -OGRERR_FAILURE = 6 -OGRERR_UNSUPPORTED_SRS = 7 -OGRERR_INVALID_HANDLE = 8 - - -def _explode(coords): - """Explode a GeoJSON geometry's coordinates object and yield - coordinate tuples. As long as the input is conforming, the type of - the geometry doesn't matter.""" - for e in coords: - if isinstance(e, (float, int)): - yield coords - break - else: - for f in _explode(e): - yield f - - -def _bounds(geometry): - """Bounding box of a GeoJSON geometry""" - try: - xyz = tuple(zip(*list(_explode(geometry['coordinates'])))) - return min(xyz[0]), min(xyz[1]), max(xyz[0]), max(xyz[1]) - except (KeyError, TypeError): - return None - -def calc_gdal_version_num(maj, min, rev): - """Calculates the internal gdal version number based on major, minor and revision""" - return int(maj * 1000000 + min * 10000 + rev*100) - -def get_gdal_version_num(): - """Return current internal version number of gdal""" - return int(ogrext1.GDALVersionInfo("VERSION_NUM")) - -def get_gdal_release_name(): - """Return release name of gdal""" - return ogrext1.GDALVersionInfo("RELEASE_NAME") - - -# Feature extension classes and functions follow. - -cdef class FeatureBuilder: - """Build Fiona features from OGR feature pointers. - - No OGR objects are allocated by this function and the feature - argument is not destroyed. - """ - - cdef build(self, void *feature, encoding='utf-8', bbox=False, driver=None): - # The only method anyone ever needs to call - cdef void *fdefn - cdef int i - cdef int y = 0 - cdef int m = 0 - cdef int d = 0 - cdef int hh = 0 - cdef int mm = 0 - cdef int ss = 0 - cdef int tz = 0 - cdef int retval - cdef char *key_c - props = OrderedDict() - for i in range(ogrext1.OGR_F_GetFieldCount(feature)): - fdefn = ogrext1.OGR_F_GetFieldDefnRef(feature, i) - if fdefn == NULL: - raise ValueError("Null feature definition") - key_c = ogrext1.OGR_Fld_GetNameRef(fdefn) - if key_c == NULL: - raise ValueError("Null field name reference") - key_b = key_c - key = key_b.decode(encoding) - fieldtypename = FIELD_TYPES[ogrext1.OGR_Fld_GetType(fdefn)] - if not fieldtypename: - log.warning( - "Skipping field %s: invalid type %s", - key, - ogrext1.OGR_Fld_GetType(fdefn)) - continue - - # TODO: other types - fieldtype = FIELD_TYPES_MAP[fieldtypename] - if not ogrext1.OGR_F_IsFieldSet(feature, i): - props[key] = None - elif fieldtype is int: - props[key] = ogrext1.OGR_F_GetFieldAsInteger(feature, i) - elif fieldtype is float: - props[key] = ogrext1.OGR_F_GetFieldAsDouble(feature, i) - - elif fieldtype is text_type: - try: - val = ogrext1.OGR_F_GetFieldAsString(feature, i) - val = val.decode(encoding) - except UnicodeError: - log.error("Failed to decode property '%s' value '%s'", - key, val) - raise - - # Does the text contain a JSON object? Let's check. - # Let's check as cheaply as we can. - if driver == 'GeoJSON' and val.startswith('{'): - try: - val = json.loads(val) - except ValueError as err: - log.warning(str(err)) - - # Now add to the properties object. - props[key] = val - - elif fieldtype in (FionaDateType, FionaTimeType, FionaDateTimeType): - retval = ogrext1.OGR_F_GetFieldAsDateTime( - feature, i, &y, &m, &d, &hh, &mm, &ss, &tz) - if fieldtype is FionaDateType: - props[key] = datetime.date(y, m, d).isoformat() - elif fieldtype is FionaTimeType: - props[key] = datetime.time(hh, mm, ss).isoformat() - else: - props[key] = datetime.datetime( - y, m, d, hh, mm, ss).isoformat() - else: - log.debug("%s: None, fieldtype: %r, %r" % (key, fieldtype, fieldtype in string_types)) - props[key] = None - - cdef void *cogr_geometry = ogrext1.OGR_F_GetGeometryRef(feature) - if cogr_geometry != NULL: - geom = GeomBuilder().build(cogr_geometry) - else: - geom = None - return { - 'type': 'Feature', - 'id': str(ogrext1.OGR_F_GetFID(feature)), - 'geometry': geom, - 'properties': props } - - -cdef class OGRFeatureBuilder: - - """Builds an OGR Feature from a Fiona feature mapping. - - Allocates one OGR Feature which should be destroyed by the caller. - Borrows a layer definition from the collection. - """ - - cdef void * build(self, feature, collection) except NULL: - cdef void *cogr_geometry = NULL - cdef char *string_c - cdef WritingSession session - session = collection.session - cdef void *cogr_layer = session.cogr_layer - if cogr_layer == NULL: - raise ValueError("Null layer") - cdef void *cogr_featuredefn = ogrext1.OGR_L_GetLayerDefn(cogr_layer) - if cogr_featuredefn == NULL: - raise ValueError("Null feature definition") - cdef void *cogr_feature = ogrext1.OGR_F_Create(cogr_featuredefn) - if cogr_feature == NULL: - raise ValueError("Null feature") - - if feature['geometry'] is not None: - cogr_geometry = OGRGeomBuilder().build( - feature['geometry']) - ogrext1.OGR_F_SetGeometryDirectly(cogr_feature, cogr_geometry) - - # OGR_F_SetFieldString takes UTF-8 encoded strings ('bytes' in - # Python 3). - encoding = session.get_internalencoding() - - for key, value in feature['properties'].items(): - log.debug( - "Looking up %s in %s", key, repr(session._schema_mapping)) - ogr_key = session._schema_mapping[key] - schema_type = collection.schema['properties'][key] - - # Catch and re-raise unicode encoding errors. - try: - key_bytes = ogr_key.encode(encoding) - except UnicodeError: - log.error("Failed to encode property '%s' value '%s'", - key, value) - raise - - key_c = key_bytes - i = ogrext1.OGR_F_GetFieldIndex(cogr_feature, key_c) - if i < 0: - continue - - # Special case: serialize dicts to assist OGR. - if isinstance(value, dict): - value = json.dumps(value) - - # Continue over the standard OGR types. - if isinstance(value, integer_types): - ogrext1.OGR_F_SetFieldInteger(cogr_feature, i, value) - elif isinstance(value, float): - ogrext1.OGR_F_SetFieldDouble(cogr_feature, i, value) - elif (isinstance(value, string_types) - and schema_type in ['date', 'time', 'datetime']): - if schema_type == 'date': - y, m, d, hh, mm, ss, ff = parse_date(value) - elif schema_type == 'time': - y, m, d, hh, mm, ss, ff = parse_time(value) - else: - y, m, d, hh, mm, ss, ff = parse_datetime(value) - ogrext1.OGR_F_SetFieldDateTime( - cogr_feature, i, y, m, d, hh, mm, ss, 0) - elif (isinstance(value, datetime.date) - and schema_type == 'date'): - y, m, d = value.year, value.month, value.day - ogrext1.OGR_F_SetFieldDateTime( - cogr_feature, i, y, m, d, 0, 0, 0, 0) - elif (isinstance(value, datetime.datetime) - and schema_type == 'datetime'): - y, m, d = value.year, value.month, value.day - hh, mm, ss = value.hour, value.minute, value.second - ogrext1.OGR_F_SetFieldDateTime( - cogr_feature, i, y, m, d, hh, mm, ss, 0) - elif (isinstance(value, datetime.time) - and schema_type == 'time'): - hh, mm, ss = value.hour, value.minute, value.second - ogrext1.OGR_F_SetFieldDateTime( - cogr_feature, i, 0, 0, 0, hh, mm, ss, 0) - elif isinstance(value, string_types): - # Catch, log, and re-raise string field value encoding errors. - try: - value_bytes = value.encode(encoding) - except UnicodeError: - log.error("Failed to encode property '%s' value '%s'", - key, value) - raise - string_c = value_bytes - ogrext1.OGR_F_SetFieldString(cogr_feature, i, string_c) - elif value is None: - pass # keep field unset/null - else: - raise ValueError("Invalid field type %s" % type(value)) - log.debug("Set field %s: %s" % (key, value)) - return cogr_feature - - -cdef _deleteOgrFeature(void *cogr_feature): - """Delete an OGR feature""" - if cogr_feature != NULL: - ogrext1.OGR_F_Destroy(cogr_feature) - cogr_feature = NULL - - -def featureRT(feature, collection): - # For testing purposes only, leaks the JSON data - cdef void *cogr_feature = OGRFeatureBuilder().build(feature, collection) - cdef void *cogr_geometry = ogrext1.OGR_F_GetGeometryRef(cogr_feature) - if cogr_geometry == NULL: - raise ValueError("Null geometry") - log.debug("Geometry: %s" % ogrext1.OGR_G_ExportToJson(cogr_geometry)) - encoding = collection.encoding or 'utf-8' - result = FeatureBuilder().build( - cogr_feature, - bbox=False, - encoding=encoding, - driver=collection.driver - ) - _deleteOgrFeature(cogr_feature) - return result - - -# Collection-related extension classes and functions - -cdef class Session: - - cdef void *cogr_ds - cdef void *cogr_layer - cdef object _fileencoding - cdef object _encoding - cdef object collection - - def __init__(self): - self.cogr_ds = NULL - self.cogr_layer = NULL - self._fileencoding = None - self._encoding = None - - def __dealloc__(self): - self.stop() - - def start(self, collection): - cdef const char *path_c = NULL - cdef const char *name_c = NULL - cdef void *drv = NULL - cdef void *ds = NULL - - if collection.path == '-': - path = '/vsistdin/' - else: - path = collection.path - try: - path_b = path.encode('utf-8') - except UnicodeError: - # Presume already a UTF-8 encoded string - path_b = path - path_c = path_b - - with cpl_errs: - drivers = [] - if collection._driver: - drivers = [collection._driver] - elif collection.enabled_drivers: - drivers = collection.enabled_drivers - if drivers: - for name in drivers: - name_b = name.encode() - name_c = name_b - log.debug("Trying driver: %s", name) - drv = ogrext1.OGRGetDriverByName(name_c) - if drv != NULL: - ds = ogrext1.OGR_Dr_Open(drv, path_c, 0) - if ds != NULL: - self.cogr_ds = ds - collection._driver = name - break - else: - self.cogr_ds = ogrext1.OGROpen(path_c, 0, NULL) - - if self.cogr_ds == NULL: - raise FionaValueError( - "No dataset found at path '%s' using drivers: %s" % ( - collection.path, - drivers or '*')) - - if isinstance(collection.name, string_types): - name_b = collection.name.encode('utf-8') - name_c = name_b - self.cogr_layer = ogrext1.OGR_DS_GetLayerByName( - self.cogr_ds, name_c) - elif isinstance(collection.name, int): - self.cogr_layer = ogrext1.OGR_DS_GetLayer( - self.cogr_ds, collection.name) - name_c = ogrext1.OGR_L_GetName(self.cogr_layer) - name_b = name_c - collection.name = name_b.decode('utf-8') - - if self.cogr_layer == NULL: - raise ValueError("Null layer: " + repr(collection.name)) - - self.collection = collection - - userencoding = self.collection.encoding - if userencoding: - ogrext1.CPLSetThreadLocalConfigOption('SHAPE_ENCODING', '') - self._fileencoding = userencoding.upper() - else: - self._fileencoding = ( - ogrext1.OGR_L_TestCapability( - self.cogr_layer, OLC_STRINGSASUTF8) and - 'utf-8') or ( - self.get_driver() == "ESRI Shapefile" and - 'ISO-8859-1') or locale.getpreferredencoding().upper() - - def stop(self): - self.cogr_layer = NULL - if self.cogr_ds != NULL: - ogrext1.OGR_DS_Destroy(self.cogr_ds) - self.cogr_ds = NULL - - def get_fileencoding(self): - return self._fileencoding - - def get_internalencoding(self): - if not self._encoding: - fileencoding = self.get_fileencoding() - self._encoding = ( - ogrext1.OGR_L_TestCapability( - self.cogr_layer, OLC_STRINGSASUTF8) and - 'utf-8') or fileencoding - return self._encoding - - def get_length(self): - if self.cogr_layer == NULL: - raise ValueError("Null layer") - return ogrext1.OGR_L_GetFeatureCount(self.cogr_layer, 0) - - def get_driver(self): - cdef void *cogr_driver = ogrext1.OGR_DS_GetDriver(self.cogr_ds) - if cogr_driver == NULL: - raise ValueError("Null driver") - cdef char *name = ogrext1.OGR_Dr_GetName(cogr_driver) - driver_name = name - return driver_name.decode() - - def get_schema(self): - cdef int i - cdef int n - cdef void *cogr_featuredefn - cdef void *cogr_fielddefn - cdef char *key_c - props = [] - - if self.cogr_layer == NULL: - raise ValueError("Null layer") - - cogr_featuredefn = ogrext1.OGR_L_GetLayerDefn(self.cogr_layer) - if cogr_featuredefn == NULL: - raise ValueError("Null feature definition") - n = ogrext1.OGR_FD_GetFieldCount(cogr_featuredefn) - for i from 0 <= i < n: - cogr_fielddefn = ogrext1.OGR_FD_GetFieldDefn(cogr_featuredefn, i) - if cogr_fielddefn == NULL: - raise ValueError("Null field definition") - key_c = ogrext1.OGR_Fld_GetNameRef(cogr_fielddefn) - key_b = key_c - if not bool(key_b): - raise ValueError("Invalid field name ref: %s" % key) - key = key_b.decode(self.get_internalencoding()) - fieldtypename = FIELD_TYPES[ogrext1.OGR_Fld_GetType(cogr_fielddefn)] - if not fieldtypename: - log.warning( - "Skipping field %s: invalid type %s", - key, - ogrext1.OGR_Fld_GetType(cogr_fielddefn)) - continue - val = fieldtypename - if fieldtypename == 'float': - fmt = "" - width = ogrext1.OGR_Fld_GetWidth(cogr_fielddefn) - if width: # and width != 24: - fmt = ":%d" % width - precision = ogrext1.OGR_Fld_GetPrecision(cogr_fielddefn) - if precision: # and precision != 15: - fmt += ".%d" % precision - val = "float" + fmt - elif fieldtypename == 'int': - fmt = "" - width = ogrext1.OGR_Fld_GetWidth(cogr_fielddefn) - if width: # and width != 11: - fmt = ":%d" % width - val = fieldtypename + fmt - elif fieldtypename == 'str': - fmt = "" - width = ogrext1.OGR_Fld_GetWidth(cogr_fielddefn) - if width: # and width != 80: - fmt = ":%d" % width - val = fieldtypename + fmt - - props.append((key, val)) - - code = normalize_geometry_type_code( - ogrext1.OGR_FD_GetGeomType(cogr_featuredefn)) - - return { - 'properties': OrderedDict(props), - 'geometry': GEOMETRY_TYPES[code]} - - def get_crs(self): - cdef char *proj_c = NULL - cdef char *auth_key = NULL - cdef char *auth_val = NULL - cdef void *cogr_crs = NULL - if self.cogr_layer == NULL: - raise ValueError("Null layer") - cogr_crs = ogrext1.OGR_L_GetSpatialRef(self.cogr_layer) - crs = {} - if cogr_crs != NULL: - log.debug("Got coordinate system") - - retval = ogrext1.OSRAutoIdentifyEPSG(cogr_crs) - if retval > 0: - log.info("Failed to auto identify EPSG: %d", retval) - - auth_key = ogrext1.OSRGetAuthorityName(cogr_crs, NULL) - auth_val = ogrext1.OSRGetAuthorityCode(cogr_crs, NULL) - - if auth_key != NULL and auth_val != NULL: - key_b = auth_key - key = key_b.decode('utf-8') - if key == 'EPSG': - val_b = auth_val - val = val_b.decode('utf-8') - crs['init'] = "epsg:" + val - else: - ogrext1.OSRExportToProj4(cogr_crs, &proj_c) - if proj_c == NULL: - raise ValueError("Null projection") - proj_b = proj_c - log.debug("Params: %s", proj_b) - value = proj_b.decode() - value = value.strip() - for param in value.split(): - kv = param.split("=") - if len(kv) == 2: - k, v = kv - try: - v = float(v) - if v % 1 == 0: - v = int(v) - except ValueError: - # Leave v as a string - pass - elif len(kv) == 1: - k, v = kv[0], True - else: - raise ValueError("Unexpected proj parameter %s" % param) - k = k.lstrip("+") - crs[k] = v - - ogrext1.CPLFree(proj_c) - else: - log.debug("Projection not found (cogr_crs was NULL)") - return crs - - def get_crs_wkt(self): - cdef char *proj_c = NULL - if self.cogr_layer == NULL: - raise ValueError("Null layer") - cogr_crs = ogrext1.OGR_L_GetSpatialRef(self.cogr_layer) - crs_wkt = "" - if cogr_crs != NULL: - log.debug("Got coordinate system") - ogrext1.OSRExportToWkt(cogr_crs, &proj_c) - if proj_c == NULL: - raise ValueError("Null projection") - proj_b = proj_c - crs_wkt = proj_b.decode('utf-8') - ogrext1.CPLFree(proj_c) - else: - log.debug("Projection not found (cogr_crs was NULL)") - return crs_wkt - - def get_extent(self): - if self.cogr_layer == NULL: - raise ValueError("Null layer") - cdef ogrext1.OGREnvelope extent - result = ogrext1.OGR_L_GetExtent(self.cogr_layer, &extent, 1) - return (extent.MinX, extent.MinY, extent.MaxX, extent.MaxY) - - def has_feature(self, fid): - """Provides access to feature data by FID. - - Supports Collection.__contains__(). - """ - cdef void * cogr_feature - fid = int(fid) - cogr_feature = ogrext1.OGR_L_GetFeature(self.cogr_layer, fid) - if cogr_feature != NULL: - _deleteOgrFeature(cogr_feature) - return True - else: - return False - - def get_feature(self, fid): - """Provides access to feature data by FID. - - Supports Collection.__contains__(). - """ - cdef void * cogr_feature - fid = int(fid) - cogr_feature = ogrext1.OGR_L_GetFeature(self.cogr_layer, fid) - if cogr_feature != NULL: - _deleteOgrFeature(cogr_feature) - return True - else: - return False - - - def __getitem__(self, item): - cdef void * cogr_feature - if isinstance(item, slice): - itr = Iterator(self.collection, item.start, item.stop, item.step) - log.debug("Slice: %r", item) - return list(itr) - elif isinstance(item, int): - index = item - # from the back - if index < 0: - ftcount = ogrext1.OGR_L_GetFeatureCount(self.cogr_layer, 0) - if ftcount == -1: - raise IndexError( - "collection's dataset does not support negative indexes") - index += ftcount - cogr_feature = ogrext1.OGR_L_GetFeature(self.cogr_layer, index) - if cogr_feature == NULL: - return None - feature = FeatureBuilder().build( - cogr_feature, - bbox=False, - encoding=self.get_internalencoding(), - driver=self.collection.driver - ) - _deleteOgrFeature(cogr_feature) - return feature - - - def isactive(self): - if self.cogr_layer != NULL and self.cogr_ds != NULL: - return 1 - else: - return 0 - - -cdef class WritingSession(Session): - - cdef object _schema_mapping - - def start(self, collection): - cdef void *cogr_fielddefn - cdef void *cogr_driver - cdef void *cogr_ds = NULL - cdef void *cogr_layer = NULL - cdef void *cogr_srs = NULL - cdef char **options = NULL - self.collection = collection - cdef char *path_c - cdef char *driver_c - cdef char *name_c - cdef char *proj_c - cdef char *fileencoding_c - path = collection.path - - if collection.mode == 'a': - if os.path.exists(path): - try: - path_b = path.encode('utf-8') - except UnicodeError: - path_b = path - path_c = path_b - with cpl_errs: - self.cogr_ds = ogrext1.OGROpen(path_c, 1, NULL) - if self.cogr_ds == NULL: - raise RuntimeError("Failed to open %s" % path) - cogr_driver = ogrext1.OGR_DS_GetDriver(self.cogr_ds) - if cogr_driver == NULL: - raise ValueError("Null driver") - - if isinstance(collection.name, string_types): - name_b = collection.name.encode() - name_c = name_b - self.cogr_layer = ogrext1.OGR_DS_GetLayerByName( - self.cogr_ds, name_c) - elif isinstance(collection.name, int): - self.cogr_layer = ogrext1.OGR_DS_GetLayer( - self.cogr_ds, collection.name) - - if self.cogr_layer == NULL: - raise RuntimeError( - "Failed to get layer %s" % collection.name) - else: - raise OSError("No such file or directory %s" % path) - - userencoding = self.collection.encoding - self._fileencoding = (userencoding or ( - ogrext1.OGR_L_TestCapability(self.cogr_layer, OLC_STRINGSASUTF8) and - 'utf-8') or ( - self.get_driver() == "ESRI Shapefile" and - 'ISO-8859-1') or locale.getpreferredencoding()).upper() - - elif collection.mode == 'w': - try: - path_b = path.encode('utf-8') - except UnicodeError: - path_b = path - path_c = path_b - - driver_b = collection.driver.encode() - driver_c = driver_b - - # TODO: use exc_wrap_pointer() - cogr_driver = ogrext1.OGRGetDriverByName(driver_c) - if cogr_driver == NULL: - raise ValueError("Null driver") - - # Our most common use case is the creation of a new data - # file and historically we've assumed that it's a file on - # the local filesystem and queryable via os.path. - # - # TODO: remove the assumption. - # TODO: use exc_wrap_pointer(). - if not os.path.exists(path): - cogr_ds = ogrext1.OGR_Dr_CreateDataSource( - cogr_driver, path_c, NULL) - - else: - cogr_ds = ogrext1.OGROpen(path_c, 1, NULL) - if cogr_ds == NULL: - try: - cogr_ds = exc_wrap_pointer( - ogrext1.OGR_Dr_CreateDataSource( - cogr_driver, path_c, NULL)) - except Exception as exc: - raise DriverIOError(str(exc)) - - elif collection.name is None: - ogrext1.OGR_DS_Destroy(cogr_ds) - cogr_ds = NULL - log.debug("Deleted pre-existing data at %s", path) - - cogr_ds = ogrext1.OGR_Dr_CreateDataSource( - cogr_driver, path_c, NULL) - - else: - pass - - if cogr_ds == NULL: - raise RuntimeError("Failed to open %s" % path) - else: - self.cogr_ds = cogr_ds - - # Set the spatial reference system from the crs given to the - # collection constructor. We by-pass the crs_wkt and crs - # properties because they aren't accessible until the layer - # is constructed (later). - col_crs = collection._crs_wkt or collection._crs - if col_crs: - cogr_srs = ogrext1.OSRNewSpatialReference(NULL) - if cogr_srs == NULL: - raise ValueError("NULL spatial reference") - # First, check for CRS strings like "EPSG:3857". - if isinstance(col_crs, string_types): - proj_b = col_crs.encode('utf-8') - proj_c = proj_b - ogrext1.OSRSetFromUserInput(cogr_srs, proj_c) - elif isinstance(col_crs, compat.DICT_TYPES): - # EPSG is a special case. - init = col_crs.get('init') - if init: - log.debug("Init: %s", init) - auth, val = init.split(':') - if auth.upper() == 'EPSG': - log.debug("Setting EPSG: %s", val) - ogrext1.OSRImportFromEPSG(cogr_srs, int(val)) - else: - params = [] - col_crs['wktext'] = True - for k, v in col_crs.items(): - if v is True or (k in ('no_defs', 'wktext') and v): - params.append("+%s" % k) - else: - params.append("+%s=%s" % (k, v)) - proj = " ".join(params) - log.debug("PROJ.4 to be imported: %r", proj) - proj_b = proj.encode('utf-8') - proj_c = proj_b - ogrext1.OSRImportFromProj4(cogr_srs, proj_c) - else: - raise ValueError("Invalid CRS") - - # Fixup, export to WKT, and set the GDAL dataset's projection. - ogrext1.OSRFixup(cogr_srs) - - # Figure out what encoding to use. The encoding parameter given - # to the collection constructor takes highest precedence, then - # 'iso-8859-1', then the system's default encoding as last resort. - sysencoding = locale.getpreferredencoding() - userencoding = collection.encoding - self._fileencoding = (userencoding or ( - collection.driver == "ESRI Shapefile" and - 'ISO-8859-1') or sysencoding).upper() - - fileencoding = self.get_fileencoding() - if fileencoding: - fileencoding_b = fileencoding.encode('utf-8') - fileencoding_c = fileencoding_b - options = ogrext1.CSLSetNameValue(options, "ENCODING", fileencoding_c) - log.debug("Output file encoding: %s", fileencoding) - - # Does the layer exist already? If so, we delete it. - layer_count = ogrext1.OGR_DS_GetLayerCount(self.cogr_ds) - layer_names = [] - for i in range(layer_count): - cogr_layer = ogrext1.OGR_DS_GetLayer(cogr_ds, i) - name_c = ogrext1.OGR_L_GetName(cogr_layer) - name_b = name_c - layer_names.append(name_b.decode('utf-8')) - - idx = -1 - if isinstance(collection.name, string_types): - if collection.name in layer_names: - idx = layer_names.index(collection.name) - elif isinstance(collection.name, int): - if collection.name >= 0 and collection.name < layer_count: - idx = collection.name - if idx >= 0: - log.debug("Deleted pre-existing layer at %s", collection.name) - ogrext1.OGR_DS_DeleteLayer(self.cogr_ds, idx) - - # Create the named layer in the datasource. - name_b = collection.name.encode('utf-8') - name_c = name_b - - try: - self.cogr_layer = exc_wrap_pointer( - ogrext1.OGR_DS_CreateLayer( - self.cogr_ds, name_c, cogr_srs, - geometry_type_code( - collection.schema.get('geometry', 'Unknown')), - options)) - except Exception as exc: - raise DriverError(str(exc)) - finally: - # Shapefile layers make a copy of the passed srs. GPKG - # layers, on the other hand, increment its reference - # count. OSRRelease() is the safe way to release - # OGRSpatialReferenceH. - if cogr_srs != NULL: - ogrext1.OSRRelease(cogr_srs) - if options != NULL: - ogrext1.CSLDestroy(options) - - log.debug("Created layer") - - # Next, make a layer definition from the given schema properties, - # which are an ordered dict since Fiona 1.0.1. - for key, value in collection.schema['properties'].items(): - log.debug("Creating field: %s %s", key, value) - - # Convert 'long' to 'int'. See - # https://github.com/Toblerity/Fiona/issues/101. - if value == 'long': - value = 'int' - - # Is there a field width/precision? - width = precision = None - if ':' in value: - value, fmt = value.split(':') - if '.' in fmt: - width, precision = map(int, fmt.split('.')) - else: - width = int(fmt) - - encoding = self.get_internalencoding() - key_bytes = key.encode(encoding) - cogr_fielddefn = ogrext1.OGR_Fld_Create( - key_bytes, - FIELD_TYPES.index(value) ) - if cogr_fielddefn == NULL: - raise ValueError("Null field definition") - if width: - ogrext1.OGR_Fld_SetWidth(cogr_fielddefn, width) - if precision: - ogrext1.OGR_Fld_SetPrecision(cogr_fielddefn, precision) - ogrext1.OGR_L_CreateField(self.cogr_layer, cogr_fielddefn, 1) - ogrext1.OGR_Fld_Destroy(cogr_fielddefn) - log.debug("Created fields") - - # Mapping of the Python collection schema to the munged - # OGR schema. - ogr_schema = self.get_schema() - self._schema_mapping = dict(zip( - collection.schema['properties'].keys(), - ogr_schema['properties'].keys() )) - - log.debug("Writing started") - - def writerecs(self, records, collection): - """Writes buffered records to OGR.""" - cdef void *cogr_driver - cdef void *cogr_feature - - cdef void *cogr_layer = self.cogr_layer - if cogr_layer == NULL: - raise ValueError("Null layer") - - schema_geom_type = collection.schema['geometry'] - cogr_driver = ogrext1.OGR_DS_GetDriver(self.cogr_ds) - if ogrext1.OGR_Dr_GetName(cogr_driver) == b"GeoJSON": - def validate_geometry_type(rec): - return True - elif ogrext1.OGR_Dr_GetName(cogr_driver) == b"ESRI Shapefile" \ - and "Point" not in collection.schema['geometry']: - schema_geom_type = collection.schema['geometry'].lstrip( - "3D ").lstrip("Multi") - def validate_geometry_type(rec): - return rec['geometry'] is None or \ - rec['geometry']['type'].lstrip( - "3D ").lstrip("Multi") == schema_geom_type - else: - schema_geom_type = collection.schema['geometry'].lstrip("3D ") - def validate_geometry_type(rec): - return rec['geometry'] is None or \ - rec['geometry']['type'].lstrip("3D ") == schema_geom_type - - schema_props_keys = set(collection.schema['properties'].keys()) - for record in records: - log.debug("Creating feature in layer: %s" % record) - # Validate against collection's schema. - if set(record['properties'].keys()) != schema_props_keys: - raise ValueError( - "Record does not match collection schema: %r != %r" % ( - record['properties'].keys(), - list(schema_props_keys) )) - if not validate_geometry_type(record): - raise ValueError( - "Record's geometry type does not match " - "collection schema's geometry type: %r != %r" % ( - record['geometry']['type'], - collection.schema['geometry'] )) - - cogr_feature = OGRFeatureBuilder().build(record, collection) - result = ogrext1.OGR_L_CreateFeature(cogr_layer, cogr_feature) - if result != OGRERR_NONE: - raise RuntimeError("Failed to write record: %s" % record) - _deleteOgrFeature(cogr_feature) - - def sync(self, collection): - """Syncs OGR to disk.""" - cdef void *cogr_ds = self.cogr_ds - cdef void *cogr_layer = self.cogr_layer - if cogr_ds == NULL: - raise ValueError("Null data source") - log.debug("Syncing OGR to disk") - retval = ogrext1.OGR_DS_SyncToDisk(cogr_ds) - if retval != OGRERR_NONE: - raise RuntimeError("Failed to sync to disk") - - -cdef class Iterator: - - """Provides iterated access to feature data. - """ - - # Reference to its Collection - cdef collection - cdef encoding - cdef int next_index - cdef stop - cdef start - cdef step - cdef fastindex - cdef stepsign - - def __cinit__(self, collection, start=None, stop=None, step=None, - bbox=None, mask=None): - if collection.session is None: - raise ValueError("I/O operation on closed collection") - self.collection = collection - cdef Session session - cdef void *cogr_geometry - session = self.collection.session - cdef void *cogr_layer = session.cogr_layer - if cogr_layer == NULL: - raise ValueError("Null layer") - ogrext1.OGR_L_ResetReading(cogr_layer) - - if bbox and mask: - raise ValueError("mask and bbox can not be set together") - - if bbox: - ogrext1.OGR_L_SetSpatialFilterRect( - cogr_layer, bbox[0], bbox[1], bbox[2], bbox[3]) - elif mask: - cogr_geometry = OGRGeomBuilder().build(mask) - ogrext1.OGR_L_SetSpatialFilter(cogr_layer, cogr_geometry) - ogrext1.OGR_G_DestroyGeometry(cogr_geometry) - - else: - ogrext1.OGR_L_SetSpatialFilter( - cogr_layer, NULL) - self.encoding = session.get_internalencoding() - - self.fastindex = ogrext1.OGR_L_TestCapability( - session.cogr_layer, OLC_FASTSETNEXTBYINDEX) - - ftcount = ogrext1.OGR_L_GetFeatureCount(session.cogr_layer, 0) - if ftcount == -1 and ((start is not None and start < 0) or - (stop is not None and stop < 0)): - raise IndexError( - "collection's dataset does not support negative slice indexes") - - if stop is not None and stop < 0: - stop += ftcount - - if start is None: - start = 0 - if start is not None and start < 0: - start += ftcount - - # step size - if step is None: - step = 1 - if step == 0: - raise ValueError("slice step cannot be zero") - if step < 0 and not self.fastindex: - warnings.warn("Layer does not support" \ - "OLCFastSetNextByIndex, negative step size may" \ - " be slow", RuntimeWarning) - self.stepsign = int(math.copysign(1, step)) - self.stop = stop - self.start = start - self.step = step - - self.next_index = start - log.debug("Index: %d", self.next_index) - ogrext1.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) - - - def __iter__(self): - return self - - - def _next(self): - """Internal method to set read cursor to next item""" - - cdef Session session - session = self.collection.session - - # Check if next_index is valid - if self.next_index < 0: - raise StopIteration - - if self.stepsign == 1: - if self.next_index < self.start or (self.stop is not None and self.next_index >= self.stop): - raise StopIteration - else: - if self.next_index > self.start or (self.stop is not None and self.next_index <= self.stop): - raise StopIteration - - - # Set read cursor to next_item position - if self.step > 1 and self.fastindex: - ogrext1.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) - - elif self.step > 1 and not self.fastindex and not self.next_index == self.start: - for _ in range(self.step - 1): - # TODO rbuffat add test -> OGR_L_GetNextFeature increments cursor by 1, therefore self.step - 1 as one increment was performed when feature is read - cogr_feature = ogrext1.OGR_L_GetNextFeature(session.cogr_layer) - if cogr_feature == NULL: - raise StopIteration - elif self.step > 1 and not self.fastindex and self.next_index == self.start: - ogrext1.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) - - elif self.step == 0: - # ogrext1.OGR_L_GetNextFeature increments read cursor by one - pass - elif self.step < 0: - ogrext1.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) - - # set the next index - self.next_index += self.step - - - def __next__(self): - cdef void * cogr_feature - cdef Session session - session = self.collection.session - - #Update read cursor - self._next() - - # Get the next feature. - cogr_feature = ogrext1.OGR_L_GetNextFeature(session.cogr_layer) - if cogr_feature == NULL: - raise StopIteration - - feature = FeatureBuilder().build( - cogr_feature, - bbox=False, - encoding=self.encoding, - driver=self.collection.driver - ) - _deleteOgrFeature(cogr_feature) - return feature - - -cdef class ItemsIterator(Iterator): - - def __next__(self): - - cdef long fid - cdef void * cogr_feature - cdef Session session - session = self.collection.session - - #Update read cursor - self._next() - - # Get the next feature. - cogr_feature = ogrext1.OGR_L_GetNextFeature(session.cogr_layer) - if cogr_feature == NULL: - raise StopIteration - - - fid = ogrext1.OGR_F_GetFID(cogr_feature) - feature = FeatureBuilder().build( - cogr_feature, - bbox=False, - encoding=self.encoding, - driver=self.collection.driver - ) - _deleteOgrFeature(cogr_feature) - - return fid, feature - - -cdef class KeysIterator(Iterator): - - def __next__(self): - cdef long fid - cdef void * cogr_feature - cdef Session session - session = self.collection.session - - #Update read cursor - self._next() - - # Get the next feature. - cogr_feature = ogrext1.OGR_L_GetNextFeature(session.cogr_layer) - if cogr_feature == NULL: - raise StopIteration - - fid = ogrext1.OGR_F_GetFID(cogr_feature) - _deleteOgrFeature(cogr_feature) - - return fid - - -def _remove(path, driver=None): - """Deletes an OGR data source - """ - cdef void *cogr_driver - cdef int result - - if driver is None: - driver = 'ESRI Shapefile' - - cogr_driver = ogrext1.OGRGetDriverByName(driver.encode('utf-8')) - if cogr_driver == NULL: - raise ValueError("Null driver") - - if not ogrext1.OGR_Dr_TestCapability(cogr_driver, ODrCDeleteDataSource): - raise RuntimeError("Driver does not support dataset removal operation") - - result = ogrext1.OGR_Dr_DeleteDataSource(cogr_driver, path.encode('utf-8')) - if result != OGRERR_NONE: - raise RuntimeError("Failed to remove data source {}".format(path)) - - -def _listlayers(path): - - """Provides a list of the layers in an OGR data source. - """ - - cdef void *cogr_ds - cdef void *cogr_layer - cdef char *path_c - cdef char *name_c - - # Open OGR data source. - try: - path_b = path.encode('utf-8') - except UnicodeError: - path_b = path - path_c = path_b - with cpl_errs: - cogr_ds = ogrext1.OGROpen(path_c, 0, NULL) - if cogr_ds == NULL: - raise ValueError("No data available at path '%s'" % path) - - # Loop over the layers to get their names. - layer_count = ogrext1.OGR_DS_GetLayerCount(cogr_ds) - layer_names = [] - for i in range(layer_count): - cogr_layer = ogrext1.OGR_DS_GetLayer(cogr_ds, i) - name_c = ogrext1.OGR_L_GetName(cogr_layer) - name_b = name_c - layer_names.append(name_b.decode('utf-8')) - - # Close up data source. - if cogr_ds != NULL: - ogrext1.OGR_DS_Destroy(cogr_ds) - cogr_ds = NULL - - return layer_names - -def buffer_to_virtual_file(bytesbuf, ext=''): - """Maps a bytes buffer to a virtual file. - - `ext` is empty or begins with a period and contains at most one period. - """ - vsi_filename = os.path.join('/vsimem', uuid.uuid4().hex + ext) - vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') - - vsi_handle = ogrext1.VSIFileFromMemBuffer(vsi_cfilename, bytesbuf, len(bytesbuf), 0) - if vsi_handle == NULL: - raise OSError('failed to map buffer to file') - if ogrext1.VSIFCloseL(vsi_handle) != 0: - raise OSError('failed to close mapped file handle') - - return vsi_filename - -def remove_virtual_file(vsi_filename): - vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') - return ogrext1.VSIUnlink(vsi_cfilename) diff -Nru fiona-1.7.10/fiona/ogrext2.pxd fiona-1.8.6/fiona/ogrext2.pxd --- fiona-1.7.10/fiona/ogrext2.pxd 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/ogrext2.pxd 2019-03-19 04:25:07.000000000 +0000 @@ -2,14 +2,131 @@ # All rights reserved. # See ../LICENSE.txt +from libc.stdio cimport FILE + + +cdef extern from "ogr_core.h": + + ctypedef int OGRErr + + ctypedef enum OGRwkbGeometryType: + wkbUnknown + wkbPoint + wkbLineString + wkbPolygon + wkbMultiPoint + wkbMultiLineString + wkbMultiPolygon + wkbGeometryCollection + wkbCircularString + wkbCompoundCurve + wkbCurvePolygon + wkbMultiCurve + wkbMultiSurface + wkbCurve + wkbSurface + wkbPolyhedralSurface + wkbTIN + wkbTriangle + wkbNone + wkbLinearRing + wkbCircularStringZ + wkbCompoundCurveZ + wkbCurvePolygonZ + wkbMultiCurveZ + wkbMultiSurfaceZ + wkbCurveZ + wkbSurfaceZ + wkbPolyhedralSurfaceZ + wkbTINZ + wkbTriangleZ + wkbPointM + wkbLineStringM + wkbPolygonM + wkbMultiPointM + wkbMultiLineStringM + wkbMultiPolygonM + wkbGeometryCollectionM + wkbCircularStringM + wkbCompoundCurveM + wkbCurvePolygonM + wkbMultiCurveM + wkbMultiSurfaceM + wkbCurveM + wkbSurfaceM + wkbPolyhedralSurfaceM + wkbTINM + wkbTriangleM + wkbPointZM + wkbLineStringZM + wkbPolygonZM + wkbMultiPointZM + wkbMultiLineStringZM + wkbMultiPolygonZM + wkbGeometryCollectionZM + wkbCircularStringZM + wkbCompoundCurveZM + wkbCurvePolygonZM + wkbMultiCurveZM + wkbMultiSurfaceZM + wkbCurveZM + wkbSurfaceZM + wkbPolyhedralSurfaceZM + wkbTINZM + wkbTriangleZM + wkbPoint25D + wkbLineString25D + wkbPolygon25D + wkbMultiPoint25D + wkbMultiLineString25D + wkbMultiPolygon25D + wkbGeometryCollection25D + + ctypedef enum OGRFieldType: + OFTInteger + OFTIntegerList + OFTReal + OFTRealList + OFTString + OFTStringList + OFTWideString + OFTWideStringList + OFTBinary + OFTDate + OFTTime + OFTDateTime + OFTInteger64 + OFTInteger64List + OFTMaxType + + ctypedef int OGRFieldSubType + cdef int OFSTNone = 0 + cdef int OFSTBoolean = 1 + cdef int OFSTInt16 = 2 + cdef int OFSTFloat32 = 3 + cdef int OFSTMaxSubType = 3 + + ctypedef struct OGREnvelope: + double MinX + double MaxX + double MinY + double MaxY + + char * OGRGeometryTypeToName(int) + + + char * ODsCCreateLayer = "CreateLayer" + char * ODsCDeleteLayer = "DeleteLayer" + + cdef extern from "gdal.h": char * GDALVersionInfo (char *pszRequest) void * GDALGetDriverByName(const char * pszName) void * GDALOpenEx(const char * pszFilename, unsigned int nOpenFlags, - const char ** papszAllowedDrivers, - const char ** papszOpenOptions, - const char *const *papszSibling1Files + const char *const *papszAllowedDrivers, + const char *const *papszOpenOptions, + const char *const *papszSiblingFiles ) int GDAL_OF_UPDATE int GDAL_OF_READONLY @@ -36,6 +153,10 @@ char * GDALGetDriverShortName(void * hDriver) char * GDALGetDatasetDriver (void * hDataset) int GDALDeleteDataset(void * hDriver, const char * pszFilename) + OGRErr GDALDatasetStartTransaction (void * hDataset, int bForce) + OGRErr GDALDatasetCommitTransaction (void * hDataset) + OGRErr GDALDatasetRollbackTransaction (void * hDataset) + int GDALDatasetTestCapability (void * hDataset, char *) ctypedef enum GDALDataType: @@ -64,35 +185,34 @@ cdef extern from "cpl_string.h": + char ** CSLAddNameValue (char **list, const char *name, const char *value) char ** CSLSetNameValue (char **list, const char *name, const char *value) void CSLDestroy (char **list) char ** CSLAddString(char **list, const char *string) -cdef extern from "cpl_vsi.h": - ctypedef struct VSILFILE: - pass - int VSIFCloseL (VSILFILE *) - VSILFILE * VSIFileFromMemBuffer (const char * filename, - unsigned char * data, - int data_len, - int take_ownership) +cdef extern from "cpl_vsi.h" nogil: + ctypedef int vsi_l_offset + ctypedef FILE VSILFILE + + unsigned char *VSIGetMemFileBuffer(const char *path, + vsi_l_offset *data_len, + int take_ownership) + VSILFILE *VSIFileFromMemBuffer(const char *path, void *data, + vsi_l_offset data_len, int take_ownership) + VSILFILE* VSIFOpenL(const char *path, const char *mode) + int VSIFCloseL(VSILFILE *fp) + int VSIUnlink(const char *path) + + int VSIFFlushL(VSILFILE *fp) + size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) + vsi_l_offset VSIFTellL(VSILFILE *fp) + int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) + size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIUnlink (const char * pathname) -cdef extern from "ogr_core.h": - - ctypedef int OGRErr - - ctypedef struct OGREnvelope: - double MinX - double MaxX - double MinY - double MaxY - - char * OGRGeometryTypeToName(int) - - cdef extern from "ogr_srs_api.h": ctypedef void * OGRSpatialReferenceH @@ -116,11 +236,12 @@ cdef extern from "ogr_api.h": - char * OGR_Dr_GetName (void *driver) + const char * OGR_Dr_GetName (void *driver) void * OGR_Dr_CreateDataSource (void *driver, const char *path, char **options) int OGR_Dr_DeleteDataSource (void *driver, char *) void * OGR_Dr_Open (void *driver, const char *path, int bupdate) int OGR_Dr_TestCapability (void *driver, const char *) + int OGR_DS_DeleteLayer (void *datasource, int n) void * OGR_F_Create (void *featuredefn) void OGR_F_Destroy (void *feature) long OGR_F_GetFID (void *feature) @@ -129,21 +250,25 @@ double OGR_F_GetFieldAsDouble (void *feature, int n) int OGR_F_GetFieldAsInteger (void *feature, int n) char * OGR_F_GetFieldAsString (void *feature, int n) + unsigned char * OGR_F_GetFieldAsBinary(void *feature, int n, int *s) int OGR_F_GetFieldCount (void *feature) void * OGR_F_GetFieldDefnRef (void *feature, int n) int OGR_F_GetFieldIndex (void *feature, char *name) void * OGR_F_GetGeometryRef (void *feature) + void * OGR_F_StealGeometry (void *feature) void OGR_F_SetFieldDateTime (void *feature, int n, int y, int m, int d, int hh, int mm, int ss, int tz) void OGR_F_SetFieldDouble (void *feature, int n, double value) void OGR_F_SetFieldInteger (void *feature, int n, int value) void OGR_F_SetFieldString (void *feature, int n, char *value) + void OGR_F_SetFieldBinary (void *feature, int n, int l, unsigned char *value) + void OGR_F_SetFieldNull (void *feature, int n) # new in GDAL 2.2 int OGR_F_SetGeometryDirectly (void *feature, void *geometry) void * OGR_FD_Create (char *name) int OGR_FD_GetFieldCount (void *featuredefn) void * OGR_FD_GetFieldDefn (void *featuredefn, int n) int OGR_FD_GetGeomType (void *featuredefn) char * OGR_FD_GetName (void *featuredefn) - void * OGR_Fld_Create (char *name, int fieldtype) + void * OGR_Fld_Create (char *name, OGRFieldType fieldtype) void OGR_Fld_Destroy (void *fielddefn) char * OGR_Fld_GetNameRef (void *fielddefn) int OGR_Fld_GetPrecision (void *fielddefn) @@ -152,6 +277,8 @@ void OGR_Fld_Set (void *fielddefn, char *name, int fieldtype, int width, int precision, int justification) void OGR_Fld_SetPrecision (void *fielddefn, int n) void OGR_Fld_SetWidth (void *fielddefn, int n) + OGRFieldSubType OGR_Fld_GetSubType(void *fielddefn) + void OGR_Fld_SetSubType(void *fielddefn, OGRFieldSubType subtype) OGRErr OGR_G_AddGeometryDirectly (void *geometry, void *part) void OGR_G_AddPoint (void *geometry, double x, double y, double z) void OGR_G_AddPoint_2D (void *geometry, double x, double y) @@ -171,11 +298,15 @@ double OGR_G_GetZ (void *geometry, int n) void OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize (void *geometry) + void * OGR_G_ForceToMultiPolygon (void *geometry) + void * OGR_G_ForceToPolygon (void *geometry) + void * OGR_G_Clone(void *geometry) OGRErr OGR_L_CreateFeature (void *layer, void *feature) - int OGR_L_CreateField (void *layer, void *fielddefn, int flexible) + OGRErr OGR_L_CreateField (void *layer, void *fielddefn, int flexible) OGRErr OGR_L_GetExtent (void *layer, void *extent, int force) void * OGR_L_GetFeature (void *layer, int n) int OGR_L_GetFeatureCount (void *layer, int m) + void * OGR_G_GetLinearGeometry (void *hGeom, double dfMaxAngleStepSizeDegrees, char **papszOptions) void * OGR_L_GetLayerDefn (void *layer) char * OGR_L_GetName (void *layer) void * OGR_L_GetNextFeature (void *layer) @@ -191,6 +322,7 @@ void * OGROpen (char *path, int mode, void *x) void * OGROpenShared (char *path, int mode, void *x) int OGRReleaseDataSource (void *datasource) + OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) long long OGR_F_GetFieldAsInteger64 (void *feature, int n) void OGR_F_SetFieldInteger64 (void *feature, int n, long long value) diff -Nru fiona-1.7.10/fiona/ogrext2.pyx fiona-1.8.6/fiona/ogrext2.pyx --- fiona-1.7.10/fiona/ogrext2.pyx 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/ogrext2.pyx 1970-01-01 00:00:00.000000000 +0000 @@ -1,1330 +0,0 @@ -# These are extension functions and classes using the OGR C API. - -from __future__ import absolute_import - -import datetime -import json -import locale -import logging -import os -import warnings -import math -import uuid - -from six import integer_types, string_types, text_type - -from fiona cimport ogrext2 -from fiona.ogrext2 cimport OGREnvelope -from fiona._geometry cimport ( - GeomBuilder, OGRGeomBuilder, geometry_type_code, - normalize_geometry_type_code) -from fiona._err cimport exc_wrap_pointer - -from fiona._err import cpl_errs -from fiona._geometry import GEOMETRY_TYPES -from fiona import compat -from fiona.errors import ( - DriverError, DriverIOError, SchemaError, CRSError, FionaValueError) -from fiona.compat import OrderedDict -from fiona.rfc3339 import parse_date, parse_datetime, parse_time -from fiona.rfc3339 import FionaDateType, FionaDateTimeType, FionaTimeType - -from libc.stdlib cimport malloc, free -from libc.string cimport strcmp - - -log = logging.getLogger("Fiona") - -# Mapping of OGR integer field types to Fiona field type names. -# -# Lists are currently unsupported in this version, but might be done as -# arrays in a future version. - -FIELD_TYPES = [ - 'int', # OFTInteger, Simple 32bit integer - None, # OFTIntegerList, List of 32bit integers - 'float', # OFTReal, Double Precision floating point - None, # OFTRealList, List of doubles - 'str', # OFTString, String of ASCII chars - None, # OFTStringList, Array of strings - None, # OFTWideString, deprecated - None, # OFTWideStringList, deprecated - None, # OFTBinary, Raw Binary data - 'date', # OFTDate, Date - 'time', # OFTTime, Time - 'datetime', # OFTDateTime, Date and Time - 'int', # OFTInteger64, Single 64bit integer - None, # OFTInteger64List, List of 64bit integers - ] - -# Mapping of Fiona field type names to Python types. -FIELD_TYPES_MAP = { - 'int': int, - 'float': float, - 'str': text_type, - 'date': FionaDateType, - 'time': FionaTimeType, - 'datetime': FionaDateTimeType - } - -# OGR Driver capability -cdef const char * ODrCCreateDataSource = "CreateDataSource" -cdef const char * ODrCDeleteDataSource = "DeleteDataSource" - -# OGR Layer capability -cdef const char * OLC_RANDOMREAD = "RandomRead" -cdef const char * OLC_SEQUENTIALWRITE = "SequentialWrite" -cdef const char * OLC_RANDOMWRITE = "RandomWrite" -cdef const char * OLC_FASTSPATIALFILTER = "FastSpatialFilter" -cdef const char * OLC_FASTFEATURECOUNT = "FastFeatureCount" -cdef const char * OLC_FASTGETEXTENT = "FastGetExtent" -cdef const char * OLC_FASTSETNEXTBYINDEX = "FastSetNextByIndex" -cdef const char * OLC_CREATEFIELD = "CreateField" -cdef const char * OLC_CREATEGEOMFIELD = "CreateGeomField" -cdef const char * OLC_DELETEFIELD = "DeleteField" -cdef const char * OLC_REORDERFIELDS = "ReorderFields" -cdef const char * OLC_ALTERFIELDDEFN = "AlterFieldDefn" -cdef const char * OLC_DELETEFEATURE = "DeleteFeature" -cdef const char * OLC_STRINGSASUTF8 = "StringsAsUTF8" -cdef const char * OLC_TRANSACTIONS = "Transactions" - -# OGR integer error types. - -OGRERR_NONE = 0 -OGRERR_NOT_ENOUGH_DATA = 1 # not enough data to deserialize */ -OGRERR_NOT_ENOUGH_MEMORY = 2 -OGRERR_UNSUPPORTED_GEOMETRY_TYPE = 3 -OGRERR_UNSUPPORTED_OPERATION = 4 -OGRERR_CORRUPT_DATA = 5 -OGRERR_FAILURE = 6 -OGRERR_UNSUPPORTED_SRS = 7 -OGRERR_INVALID_HANDLE = 8 - - -def _explode(coords): - """Explode a GeoJSON geometry's coordinates object and yield - coordinate tuples. As long as the input is conforming, the type of - the geometry doesn't matter.""" - for e in coords: - if isinstance(e, (float, int)): - yield coords - break - else: - for f in _explode(e): - yield f - - -def _bounds(geometry): - """Bounding box of a GeoJSON geometry""" - try: - xyz = tuple(zip(*list(_explode(geometry['coordinates'])))) - return min(xyz[0]), min(xyz[1]), max(xyz[0]), max(xyz[1]) - except (KeyError, TypeError): - return None - -def calc_gdal_version_num(maj, min, rev): - """Calculates the internal gdal version number based on major, minor and revision""" - return int(maj * 1000000 + min * 10000 + rev*100) - -def get_gdal_version_num(): - """Return current internal version number of gdal""" - return int(ogrext2.GDALVersionInfo("VERSION_NUM")) - -def get_gdal_release_name(): - """Return release name of gdal""" - return ogrext2.GDALVersionInfo("RELEASE_NAME") - - -# Feature extension classes and functions follow. - -cdef class FeatureBuilder: - """Build Fiona features from OGR feature pointers. - - No OGR objects are allocated by this function and the feature - argument is not destroyed. - """ - - cdef build(self, void *feature, encoding='utf-8', bbox=False, driver=None): - # The only method anyone ever needs to call - cdef void *fdefn - cdef int i - cdef int y = 0 - cdef int m = 0 - cdef int d = 0 - cdef int hh = 0 - cdef int mm = 0 - cdef int ss = 0 - cdef int tz = 0 - cdef int retval - cdef const char *key_c = NULL - props = OrderedDict() - for i in range(ogrext2.OGR_F_GetFieldCount(feature)): - fdefn = ogrext2.OGR_F_GetFieldDefnRef(feature, i) - if fdefn == NULL: - raise ValueError("Null feature definition") - key_c = ogrext2.OGR_Fld_GetNameRef(fdefn) - if key_c == NULL: - raise ValueError("Null field name reference") - key_b = key_c - key = key_b.decode(encoding) - fieldtypename = FIELD_TYPES[ogrext2.OGR_Fld_GetType(fdefn)] - if not fieldtypename: - log.warning( - "Skipping field %s: invalid type %s", - key, - ogrext2.OGR_Fld_GetType(fdefn)) - continue - - # TODO: other types - fieldtype = FIELD_TYPES_MAP[fieldtypename] - if not ogrext2.OGR_F_IsFieldSet(feature, i): - props[key] = None - elif fieldtype is int: - props[key] = ogrext2.OGR_F_GetFieldAsInteger64(feature, i) - elif fieldtype is float: - props[key] = ogrext2.OGR_F_GetFieldAsDouble(feature, i) - - elif fieldtype is text_type: - try: - val = ogrext2.OGR_F_GetFieldAsString(feature, i) - val = val.decode(encoding) - except UnicodeDecodeError: - log.warning( - "Failed to decode %s using %s codec", val, encoding) - - # Does the text contain a JSON object? Let's check. - # Let's check as cheaply as we can. - if driver == 'GeoJSON' and val.startswith('{'): - try: - val = json.loads(val) - except ValueError as err: - log.warning(str(err)) - - # Now add to the properties object. - props[key] = val - - elif fieldtype in (FionaDateType, FionaTimeType, FionaDateTimeType): - retval = ogrext2.OGR_F_GetFieldAsDateTime( - feature, i, &y, &m, &d, &hh, &mm, &ss, &tz) - if fieldtype is FionaDateType: - props[key] = datetime.date(y, m, d).isoformat() - elif fieldtype is FionaTimeType: - props[key] = datetime.time(hh, mm, ss).isoformat() - else: - props[key] = datetime.datetime( - y, m, d, hh, mm, ss).isoformat() - else: - log.debug("%s: None, fieldtype: %r, %r" % (key, fieldtype, fieldtype in string_types)) - props[key] = None - - cdef void *cogr_geometry = ogrext2.OGR_F_GetGeometryRef(feature) - if cogr_geometry is not NULL: - geom = GeomBuilder().build(cogr_geometry) - else: - geom = None - return { - 'type': 'Feature', - 'id': str(ogrext2.OGR_F_GetFID(feature)), - 'geometry': geom, - 'properties': props } - - -cdef class OGRFeatureBuilder: - - """Builds an OGR Feature from a Fiona feature mapping. - - Allocates one OGR Feature which should be destroyed by the caller. - Borrows a layer definition from the collection. - """ - - cdef void * build(self, feature, collection) except NULL: - cdef void *cogr_geometry = NULL - cdef const char *string_c = NULL - cdef WritingSession session - session = collection.session - cdef void *cogr_layer = session.cogr_layer - if cogr_layer == NULL: - raise ValueError("Null layer") - cdef void *cogr_featuredefn = ogrext2.OGR_L_GetLayerDefn(cogr_layer) - if cogr_featuredefn == NULL: - raise ValueError("Null feature definition") - cdef void *cogr_feature = ogrext2.OGR_F_Create(cogr_featuredefn) - if cogr_feature == NULL: - raise ValueError("Null feature") - - if feature['geometry'] is not None: - cogr_geometry = OGRGeomBuilder().build( - feature['geometry']) - ogrext2.OGR_F_SetGeometryDirectly(cogr_feature, cogr_geometry) - - # OGR_F_SetFieldString takes UTF-8 encoded strings ('bytes' in - # Python 3). - encoding = session.get_internalencoding() - - for key, value in feature['properties'].items(): - log.debug( - "Looking up %s in %s", key, repr(session._schema_mapping)) - ogr_key = session._schema_mapping[key] - schema_type = collection.schema['properties'][key] - try: - key_bytes = ogr_key.encode(encoding) - except UnicodeDecodeError: - log.warning("Failed to encode %s using %s codec", key, encoding) - key_bytes = ogr_key - key_c = key_bytes - i = ogrext2.OGR_F_GetFieldIndex(cogr_feature, key_c) - if i < 0: - continue - - # Special case: serialize dicts to assist OGR. - if isinstance(value, dict): - value = json.dumps(value) - - # Continue over the standard OGR types. - if isinstance(value, integer_types): - ogrext2.OGR_F_SetFieldInteger64(cogr_feature, i, value) - elif isinstance(value, float): - ogrext2.OGR_F_SetFieldDouble(cogr_feature, i, value) - elif (isinstance(value, string_types) - and schema_type in ['date', 'time', 'datetime']): - if schema_type == 'date': - y, m, d, hh, mm, ss, ff = parse_date(value) - elif schema_type == 'time': - y, m, d, hh, mm, ss, ff = parse_time(value) - else: - y, m, d, hh, mm, ss, ff = parse_datetime(value) - ogrext2.OGR_F_SetFieldDateTime( - cogr_feature, i, y, m, d, hh, mm, ss, 0) - elif (isinstance(value, datetime.date) - and schema_type == 'date'): - y, m, d = value.year, value.month, value.day - ogrext2.OGR_F_SetFieldDateTime( - cogr_feature, i, y, m, d, 0, 0, 0, 0) - elif (isinstance(value, datetime.datetime) - and schema_type == 'datetime'): - y, m, d = value.year, value.month, value.day - hh, mm, ss = value.hour, value.minute, value.second - ogrext2.OGR_F_SetFieldDateTime( - cogr_feature, i, y, m, d, hh, mm, ss, 0) - elif (isinstance(value, datetime.time) - and schema_type == 'time'): - hh, mm, ss = value.hour, value.minute, value.second - ogrext2.OGR_F_SetFieldDateTime( - cogr_feature, i, 0, 0, 0, hh, mm, ss, 0) - elif isinstance(value, string_types): - try: - value_bytes = value.encode(encoding) - except UnicodeDecodeError: - log.warning( - "Failed to encode %s using %s codec", value, encoding) - value_bytes = value - string_c = value_bytes - ogrext2.OGR_F_SetFieldString(cogr_feature, i, string_c) - elif value is None: - pass # keep field unset/null - else: - raise ValueError("Invalid field type %s" % type(value)) - log.debug("Set field %s: %s" % (key, value)) - return cogr_feature - - -cdef _deleteOgrFeature(void *cogr_feature): - """Delete an OGR feature""" - if cogr_feature is not NULL: - ogrext2.OGR_F_Destroy(cogr_feature) - cogr_feature = NULL - - -def featureRT(feature, collection): - # For testing purposes only, leaks the JSON data - cdef void *cogr_feature = OGRFeatureBuilder().build(feature, collection) - cdef void *cogr_geometry = ogrext2.OGR_F_GetGeometryRef(cogr_feature) - if cogr_geometry == NULL: - raise ValueError("Null geometry") - log.debug("Geometry: %s" % ogrext2.OGR_G_ExportToJson(cogr_geometry)) - encoding = collection.encoding or 'utf-8' - result = FeatureBuilder().build( - cogr_feature, - bbox=False, - encoding=encoding, - driver=collection.driver - ) - _deleteOgrFeature(cogr_feature) - return result - - -# Collection-related extension classes and functions - -cdef class Session: - - cdef void *cogr_ds - cdef void *cogr_layer - cdef object _fileencoding - cdef object _encoding - cdef object collection - - def __init__(self): - self.cogr_ds = NULL - self.cogr_layer = NULL - self._fileencoding = None - self._encoding = None - - def __dealloc__(self): - self.stop() - - def start(self, collection): - cdef const char *path_c = NULL - cdef const char *name_c = NULL - cdef void *drv = NULL - cdef void *ds = NULL - cdef char **drvs = NULL - - if collection.path == '-': - path = '/vsistdin/' - else: - path = collection.path - try: - path_b = path.encode('utf-8') - except UnicodeDecodeError: - # Presume already a UTF-8 encoded string - path_b = path - path_c = path_b - - # TODO: eliminate this context manager in 2.0 as we have done - # in Rasterio 1.0. - with cpl_errs: - - # We have two ways of specifying drivers to try. Resolve the - # values into a single set of driver short names. - if collection._driver: - drivers = set([collection._driver]) - elif collection.enabled_drivers: - drivers = set(collection.enabled_drivers) - else: - drivers = None - - # If there are specified drivers, make a GDAL string list - # of their names. - if drivers: - for name in drivers: - name_b = name.encode() - name_c = name_b - log.debug("Trying driver: %s", name) - drv = ogrext2.GDALGetDriverByName(name_c) - if drv != NULL: - drvs = ogrext2.CSLAddString(drvs, name_c) - - flags = ogrext2.GDAL_OF_VECTOR | ogrext2.GDAL_OF_READONLY - try: - self.cogr_ds = ogrext2.GDALOpenEx( - path_c, flags, drvs, NULL, NULL) - finally: - ogrext2.CSLDestroy(drvs) - - if self.cogr_ds == NULL: - raise FionaValueError( - "No dataset found at path '%s' using drivers: %s" % ( - collection.path, - drivers or '*')) - - if isinstance(collection.name, string_types): - name_b = collection.name.encode('utf-8') - name_c = name_b - self.cogr_layer = ogrext2.GDALDatasetGetLayerByName( - self.cogr_ds, name_c) - elif isinstance(collection.name, int): - self.cogr_layer = ogrext2.GDALDatasetGetLayer( - self.cogr_ds, collection.name) - name_c = ogrext2.OGR_L_GetName(self.cogr_layer) - name_b = name_c - collection.name = name_b.decode('utf-8') - - if self.cogr_layer == NULL: - raise ValueError("Null layer: " + repr(collection.name)) - - self.collection = collection - - userencoding = self.collection.encoding - if userencoding: - ogrext2.CPLSetThreadLocalConfigOption('SHAPE_ENCODING', '') - self._fileencoding = userencoding.upper() - else: - self._fileencoding = ( - ogrext2.OGR_L_TestCapability( - self.cogr_layer, OLC_STRINGSASUTF8) and - 'utf-8') or ( - self.get_driver() == "ESRI Shapefile" and - 'ISO-8859-1') or locale.getpreferredencoding().upper() - - def stop(self): - self.cogr_layer = NULL - if self.cogr_ds != NULL: - ogrext2.GDALClose(self.cogr_ds) - self.cogr_ds = NULL - - def get_fileencoding(self): - return self._fileencoding - - def get_internalencoding(self): - if not self._encoding: - fileencoding = self.get_fileencoding() - self._encoding = ( - ogrext2.OGR_L_TestCapability( - self.cogr_layer, OLC_STRINGSASUTF8) and - 'utf-8') or fileencoding - return self._encoding - - def get_length(self): - if self.cogr_layer == NULL: - raise ValueError("Null layer") - return ogrext2.OGR_L_GetFeatureCount(self.cogr_layer, 0) - - def get_driver(self): - cdef void *cogr_driver = ogrext2.GDALGetDatasetDriver(self.cogr_ds) - if cogr_driver == NULL: - raise ValueError("Null driver") - cdef const char *name = ogrext2.OGR_Dr_GetName(cogr_driver) - driver_name = name - return driver_name.decode() - - def get_schema(self): - cdef int i - cdef int n - cdef void *cogr_featuredefn - cdef void *cogr_fielddefn - cdef const char *key_c - props = [] - - if self.cogr_layer == NULL: - raise ValueError("Null layer") - - cogr_featuredefn = ogrext2.OGR_L_GetLayerDefn(self.cogr_layer) - if cogr_featuredefn == NULL: - raise ValueError("Null feature definition") - n = ogrext2.OGR_FD_GetFieldCount(cogr_featuredefn) - for i from 0 <= i < n: - cogr_fielddefn = ogrext2.OGR_FD_GetFieldDefn(cogr_featuredefn, i) - if cogr_fielddefn == NULL: - raise ValueError("Null field definition") - key_c = ogrext2.OGR_Fld_GetNameRef(cogr_fielddefn) - key_b = key_c - if not bool(key_b): - raise ValueError("Invalid field name ref: %s" % key) - key = key_b.decode(self.get_internalencoding()) - fieldtypename = FIELD_TYPES[ogrext2.OGR_Fld_GetType(cogr_fielddefn)] - if not fieldtypename: - log.warning( - "Skipping field %s: invalid type %s", - key, - ogrext2.OGR_Fld_GetType(cogr_fielddefn)) - continue - val = fieldtypename - if fieldtypename == 'float': - fmt = "" - width = ogrext2.OGR_Fld_GetWidth(cogr_fielddefn) - if width: # and width != 24: - fmt = ":%d" % width - precision = ogrext2.OGR_Fld_GetPrecision(cogr_fielddefn) - if precision: # and precision != 15: - fmt += ".%d" % precision - val = "float" + fmt - elif fieldtypename == 'int': - fmt = "" - width = ogrext2.OGR_Fld_GetWidth(cogr_fielddefn) - if width: # and width != 11: - fmt = ":%d" % width - val = fieldtypename + fmt - elif fieldtypename == 'str': - fmt = "" - width = ogrext2.OGR_Fld_GetWidth(cogr_fielddefn) - if width: # and width != 80: - fmt = ":%d" % width - val = fieldtypename + fmt - - props.append((key, val)) - - code = normalize_geometry_type_code( - ogrext2.OGR_FD_GetGeomType(cogr_featuredefn)) - - return { - 'properties': OrderedDict(props), - 'geometry': GEOMETRY_TYPES[code]} - - def get_crs(self): - cdef char *proj_c = NULL - cdef const char *auth_key = NULL - cdef const char *auth_val = NULL - cdef void *cogr_crs = NULL - if self.cogr_layer == NULL: - raise ValueError("Null layer") - cogr_crs = ogrext2.OGR_L_GetSpatialRef(self.cogr_layer) - crs = {} - if cogr_crs is not NULL: - log.debug("Got coordinate system") - - retval = ogrext2.OSRAutoIdentifyEPSG(cogr_crs) - if retval > 0: - log.info("Failed to auto identify EPSG: %d", retval) - - auth_key = ogrext2.OSRGetAuthorityName(cogr_crs, NULL) - auth_val = ogrext2.OSRGetAuthorityCode(cogr_crs, NULL) - - if auth_key != NULL and auth_val != NULL: - key_b = auth_key - key = key_b.decode('utf-8') - if key == 'EPSG': - val_b = auth_val - val = val_b.decode('utf-8') - crs['init'] = "epsg:" + val - else: - ogrext2.OSRExportToProj4(cogr_crs, &proj_c) - if proj_c == NULL: - raise ValueError("Null projection") - proj_b = proj_c - log.debug("Params: %s", proj_b) - value = proj_b.decode() - value = value.strip() - for param in value.split(): - kv = param.split("=") - if len(kv) == 2: - k, v = kv - try: - v = float(v) - if v % 1 == 0: - v = int(v) - except ValueError: - # Leave v as a string - pass - elif len(kv) == 1: - k, v = kv[0], True - else: - raise ValueError("Unexpected proj parameter %s" % param) - k = k.lstrip("+") - crs[k] = v - - ogrext2.CPLFree(proj_c) - else: - log.debug("Projection not found (cogr_crs was NULL)") - return crs - - def get_crs_wkt(self): - cdef char *proj_c = NULL - if self.cogr_layer == NULL: - raise ValueError("Null layer") - cogr_crs = ogrext2.OGR_L_GetSpatialRef(self.cogr_layer) - crs_wkt = "" - if cogr_crs is not NULL: - log.debug("Got coordinate system") - ogrext2.OSRExportToWkt(cogr_crs, &proj_c) - if proj_c == NULL: - raise ValueError("Null projection") - proj_b = proj_c - crs_wkt = proj_b.decode('utf-8') - ogrext2.CPLFree(proj_c) - else: - log.debug("Projection not found (cogr_crs was NULL)") - return crs_wkt - - def get_extent(self): - cdef OGREnvelope extent - - if self.cogr_layer == NULL: - raise ValueError("Null layer") - - result = ogrext2.OGR_L_GetExtent(self.cogr_layer, &extent, 1) - return (extent.MinX, extent.MinY, extent.MaxX, extent.MaxY) - - def has_feature(self, fid): - """Provides access to feature data by FID. - - Supports Collection.__contains__(). - """ - cdef void * cogr_feature - fid = int(fid) - cogr_feature = ogrext2.OGR_L_GetFeature(self.cogr_layer, fid) - if cogr_feature != NULL: - _deleteOgrFeature(cogr_feature) - return True - else: - return False - - def get_feature(self, fid): - """Provides access to feature data by FID. - - Supports Collection.__contains__(). - """ - cdef void * cogr_feature - fid = int(fid) - cogr_feature = ogrext2.OGR_L_GetFeature(self.cogr_layer, fid) - if cogr_feature != NULL: - _deleteOgrFeature(cogr_feature) - return True - else: - return False - - - def __getitem__(self, item): - cdef void * cogr_feature - if isinstance(item, slice): - itr = Iterator(self.collection, item.start, item.stop, item.step) - log.debug("Slice: %r", item) - return list(itr) - elif isinstance(item, int): - index = item - # from the back - if index < 0: - ftcount = ogrext2.OGR_L_GetFeatureCount(self.cogr_layer, 0) - if ftcount == -1: - raise IndexError( - "collection's dataset does not support negative indexes") - index += ftcount - cogr_feature = ogrext2.OGR_L_GetFeature(self.cogr_layer, index) - if cogr_feature == NULL: - return None - feature = FeatureBuilder().build( - cogr_feature, - bbox=False, - encoding=self.get_internalencoding(), - driver=self.collection.driver - ) - _deleteOgrFeature(cogr_feature) - return feature - - - def isactive(self): - if self.cogr_layer != NULL and self.cogr_ds != NULL: - return 1 - else: - return 0 - - -cdef class WritingSession(Session): - - cdef object _schema_mapping - - def start(self, collection): - cdef void *cogr_fielddefn = NULL - cdef void *cogr_driver = NULL - cdef void *cogr_ds = NULL - cdef void *cogr_layer = NULL - cdef void *cogr_srs = NULL - cdef char **options = NULL - self.collection = collection - cdef const char *path_c = NULL - cdef const char *driver_c = NULL - cdef const char *name_c = NULL - cdef const char *proj_c = NULL - cdef const char *fileencoding_c = NULL - path = collection.path - - if collection.mode == 'a': - if os.path.exists(path): - try: - path_b = path.encode('utf-8') - except UnicodeDecodeError: - path_b = path - path_c = path_b - self.cogr_ds = ogrext2.GDALOpenEx(path_c, - ogrext2.GDAL_OF_VECTOR | ogrext2.GDAL_OF_UPDATE, - NULL, NULL, NULL) - - cogr_driver = ogrext2.GDALGetDatasetDriver(self.cogr_ds) - if cogr_driver == NULL: - raise ValueError("Null driver") - - if isinstance(collection.name, string_types): - name_b = collection.name.encode() - name_c = name_b - self.cogr_layer = ogrext2.GDALDatasetGetLayerByName( - self.cogr_ds, name_c) - elif isinstance(collection.name, int): - self.cogr_layer = ogrext2.GDALDatasetGetLayer( - self.cogr_ds, collection.name) - - if self.cogr_layer == NULL: - raise RuntimeError( - "Failed to get layer %s" % collection.name) - else: - raise OSError("No such file or directory %s" % path) - - userencoding = self.collection.encoding - self._fileencoding = (userencoding or ( - ogrext2.OGR_L_TestCapability(self.cogr_layer, OLC_STRINGSASUTF8) and - 'utf-8') or ( - self.get_driver() == "ESRI Shapefile" and - 'ISO-8859-1') or locale.getpreferredencoding()).upper() - - elif collection.mode == 'w': - try: - path_b = path.encode('utf-8') - except UnicodeDecodeError: - path_b = path - path_c = path_b - - driver_b = collection.driver.encode() - driver_c = driver_b - - cogr_driver = ogrext2.GDALGetDriverByName(driver_c) - if cogr_driver == NULL: - raise ValueError("Null driver") - - # Our most common use case is the creation of a new data - # file and historically we've assumed that it's a file on - # the local filesystem and queryable via os.path. - # - # TODO: remove the assumption. - if not os.path.exists(path): - cogr_ds = exc_wrap_pointer(ogrext2.GDALCreate( - cogr_driver, - path_c, - 0, - 0, - 0, - ogrext2.GDT_Unknown, - NULL)) - - # TODO: revisit the logic in the following blocks when we - # change the assumption above. - # TODO: use exc_wrap_pointer() - else: - cogr_ds = ogrext2.GDALOpenEx(path_c, - ogrext2.GDAL_OF_VECTOR | ogrext2.GDAL_OF_UPDATE, - NULL, - NULL, - NULL) - - # TODO: use exc_wrap_pointer() - if cogr_ds == NULL: - cogr_ds = ogrext2.GDALCreate( - cogr_driver, - path_c, - 0, - 0, - 0, - ogrext2.GDT_Unknown, - NULL) - - elif collection.name is None: - ogrext2.GDALClose(cogr_ds) - cogr_ds = NULL - log.debug("Deleted pre-existing data at %s", path) - cogr_ds = ogrext2.GDALCreate( - cogr_driver, - path_c, - 0, - 0, - 0, - ogrext2.GDT_Unknown, - NULL) - - else: - pass - - if cogr_ds == NULL: - raise RuntimeError("Failed to open %s" % path) - else: - self.cogr_ds = cogr_ds - - # Set the spatial reference system from the crs given to the - # collection constructor. We by-pass the crs_wkt and crs - # properties because they aren't accessible until the layer - # is constructed (later). - col_crs = collection._crs_wkt or collection._crs - if col_crs: - cogr_srs = ogrext2.OSRNewSpatialReference(NULL) - if cogr_srs == NULL: - raise ValueError("NULL spatial reference") - # First, check for CRS strings like "EPSG:3857". - if isinstance(col_crs, string_types): - proj_b = col_crs.encode('utf-8') - proj_c = proj_b - ogrext2.OSRSetFromUserInput(cogr_srs, proj_c) - elif isinstance(col_crs, compat.DICT_TYPES): - # EPSG is a special case. - init = col_crs.get('init') - if init: - log.debug("Init: %s", init) - auth, val = init.split(':') - if auth.upper() == 'EPSG': - log.debug("Setting EPSG: %s", val) - ogrext2.OSRImportFromEPSG(cogr_srs, int(val)) - else: - params = [] - col_crs['wktext'] = True - for k, v in col_crs.items(): - if v is True or (k in ('no_defs', 'wktext') and v): - params.append("+%s" % k) - else: - params.append("+%s=%s" % (k, v)) - proj = " ".join(params) - log.debug("PROJ.4 to be imported: %r", proj) - proj_b = proj.encode('utf-8') - proj_c = proj_b - ogrext2.OSRImportFromProj4(cogr_srs, proj_c) - else: - raise ValueError("Invalid CRS") - - # Fixup, export to WKT, and set the GDAL dataset's projection. - ogrext2.OSRFixup(cogr_srs) - - # Figure out what encoding to use. The encoding parameter given - # to the collection constructor takes highest precedence, then - # 'iso-8859-1', then the system's default encoding as last resort. - sysencoding = locale.getpreferredencoding() - userencoding = collection.encoding - self._fileencoding = (userencoding or ( - collection.driver == "ESRI Shapefile" and - 'ISO-8859-1') or sysencoding).upper() - - # The ENCODING option makes no sense for some drivers and - # will result in a warning. Fixing is a TODO. - fileencoding = self.get_fileencoding() - if fileencoding: - fileencoding_b = fileencoding.encode() - fileencoding_c = fileencoding_b - with cpl_errs: - options = ogrext2.CSLSetNameValue(options, "ENCODING", fileencoding_c) - - # Does the layer exist already? If so, we delete it. - layer_count = ogrext2.GDALDatasetGetLayerCount(self.cogr_ds) - layer_names = [] - for i in range(layer_count): - cogr_layer = ogrext2.GDALDatasetGetLayer(cogr_ds, i) - name_c = ogrext2.OGR_L_GetName(cogr_layer) - name_b = name_c - layer_names.append(name_b.decode('utf-8')) - - idx = -1 - if isinstance(collection.name, string_types): - if collection.name in layer_names: - idx = layer_names.index(collection.name) - elif isinstance(collection.name, int): - if collection.name >= 0 and collection.name < layer_count: - idx = collection.name - if idx >= 0: - log.debug("Deleted pre-existing layer at %s", collection.name) - ogrext2.GDALDatasetDeleteLayer(self.cogr_ds, idx) - - # Create the named layer in the datasource. - name_b = collection.name.encode('utf-8') - name_c = name_b - - try: - self.cogr_layer = exc_wrap_pointer( - ogrext2.GDALDatasetCreateLayer( - self.cogr_ds, name_c, cogr_srs, - geometry_type_code( - collection.schema.get('geometry', 'Unknown')), - options)) - except Exception as exc: - raise DriverIOError(str(exc)) - finally: - if options != NULL: - ogrext2.CSLDestroy(options) - - # Shapefile layers make a copy of the passed srs. GPKG - # layers, on the other hand, increment its reference - # count. OSRRelease() is the safe way to release - # OGRSpatialReferenceH. - if cogr_srs != NULL: - ogrext2.OSRRelease(cogr_srs) - - if self.cogr_layer == NULL: - raise ValueError("Null layer") - - log.debug("Created layer %s", collection.name) - - # Next, make a layer definition from the given schema properties, - # which are an ordered dict since Fiona 1.0.1. - for key, value in collection.schema['properties'].items(): - log.debug("Creating field: %s %s", key, value) - - # Convert 'long' to 'int'. See - # https://github.com/Toblerity/Fiona/issues/101. - if value == 'long': - value = 'int' - - # Is there a field width/precision? - width = precision = None - if ':' in value: - value, fmt = value.split(':') - if '.' in fmt: - width, precision = map(int, fmt.split('.')) - else: - width = int(fmt) - - field_type = FIELD_TYPES.index(value) - # See https://trac.osgeo.org/gdal/wiki/rfc31_ogr_64 - if value == 'int' and (width is not None and width >= 10): - field_type = 12 - - encoding = self.get_internalencoding() - key_bytes = key.encode(encoding) - - cogr_fielddefn = ogrext2.OGR_Fld_Create( - key_bytes, - field_type) - if cogr_fielddefn == NULL: - raise ValueError("Null field definition") - if width: - ogrext2.OGR_Fld_SetWidth(cogr_fielddefn, width) - if precision: - ogrext2.OGR_Fld_SetPrecision(cogr_fielddefn, precision) - ogrext2.OGR_L_CreateField(self.cogr_layer, cogr_fielddefn, 1) - ogrext2.OGR_Fld_Destroy(cogr_fielddefn) - log.debug("Created fields") - - # Mapping of the Python collection schema to the munged - # OGR schema. - ogr_schema = self.get_schema() - self._schema_mapping = dict(zip( - collection.schema['properties'].keys(), - ogr_schema['properties'].keys() )) - - log.debug("Writing started") - - def writerecs(self, records, collection): - """Writes buffered records to OGR.""" - cdef void *cogr_driver - cdef void *cogr_feature - - cdef void *cogr_layer = self.cogr_layer - if cogr_layer == NULL: - raise ValueError("Null layer") - - schema_geom_type = collection.schema['geometry'] - cogr_driver = ogrext2.GDALGetDatasetDriver(self.cogr_ds) - if ogrext2.OGR_Dr_GetName(cogr_driver) == b"GeoJSON": - def validate_geometry_type(rec): - return True - elif ogrext2.OGR_Dr_GetName(cogr_driver) == b"ESRI Shapefile" \ - and "Point" not in collection.schema['geometry']: - schema_geom_type = collection.schema['geometry'].lstrip( - "3D ").lstrip("Multi") - def validate_geometry_type(rec): - return rec['geometry'] is None or \ - rec['geometry']['type'].lstrip( - "3D ").lstrip("Multi") == schema_geom_type - else: - schema_geom_type = collection.schema['geometry'].lstrip("3D ") - def validate_geometry_type(rec): - return rec['geometry'] is None or \ - rec['geometry']['type'].lstrip("3D ") == schema_geom_type - - schema_props_keys = set(collection.schema['properties'].keys()) - for record in records: - log.debug("Creating feature in layer: %s" % record) - # Validate against collection's schema. - if set(record['properties'].keys()) != schema_props_keys: - raise ValueError( - "Record does not match collection schema: %r != %r" % ( - record['properties'].keys(), - list(schema_props_keys) )) - if not validate_geometry_type(record): - raise ValueError( - "Record's geometry type does not match " - "collection schema's geometry type: %r != %r" % ( - record['geometry']['type'], - collection.schema['geometry'] )) - - cogr_feature = OGRFeatureBuilder().build(record, collection) - result = ogrext2.OGR_L_CreateFeature(cogr_layer, cogr_feature) - if result != OGRERR_NONE: - raise RuntimeError("Failed to write record: %s" % record) - _deleteOgrFeature(cogr_feature) - - def sync(self, collection): - """Syncs OGR to disk.""" - cdef void *cogr_ds = self.cogr_ds - cdef void *cogr_layer = self.cogr_layer - if cogr_ds == NULL: - raise ValueError("Null data source") - - - with cpl_errs: - ogrext2.GDALFlushCache(cogr_ds) - - log.debug("Flushed data source cache") - -cdef class Iterator: - - """Provides iterated access to feature data. - """ - - # Reference to its Collection - cdef collection - cdef encoding - cdef int next_index - cdef stop - cdef start - cdef step - cdef fastindex - cdef stepsign - - def __cinit__(self, collection, start=None, stop=None, step=None, - bbox=None, mask=None): - if collection.session is None: - raise ValueError("I/O operation on closed collection") - self.collection = collection - cdef Session session - cdef void *cogr_geometry - session = self.collection.session - cdef void *cogr_layer = session.cogr_layer - if cogr_layer == NULL: - raise ValueError("Null layer") - ogrext2.OGR_L_ResetReading(cogr_layer) - - if bbox and mask: - raise ValueError("mask and bbox can not be set together") - - if bbox: - ogrext2.OGR_L_SetSpatialFilterRect( - cogr_layer, bbox[0], bbox[1], bbox[2], bbox[3]) - elif mask: - cogr_geometry = OGRGeomBuilder().build(mask) - ogrext2.OGR_L_SetSpatialFilter(cogr_layer, cogr_geometry) - ogrext2.OGR_G_DestroyGeometry(cogr_geometry) - - else: - ogrext2.OGR_L_SetSpatialFilter( - cogr_layer, NULL) - self.encoding = session.get_internalencoding() - - self.fastindex = ogrext2.OGR_L_TestCapability( - session.cogr_layer, OLC_FASTSETNEXTBYINDEX) - - ftcount = ogrext2.OGR_L_GetFeatureCount(session.cogr_layer, 0) - if ftcount == -1 and ((start is not None and start < 0) or - (stop is not None and stop < 0)): - raise IndexError( - "collection's dataset does not support negative slice indexes") - - if stop is not None and stop < 0: - stop += ftcount - - if start is None: - start = 0 - if start is not None and start < 0: - start += ftcount - - # step size - if step is None: - step = 1 - if step == 0: - raise ValueError("slice step cannot be zero") - if step < 0 and not self.fastindex: - warnings.warn("Layer does not support" \ - "OLCFastSetNextByIndex, negative step size may" \ - " be slow", RuntimeWarning) - self.stepsign = int(math.copysign(1, step)) - self.stop = stop - self.start = start - self.step = step - - self.next_index = start - log.debug("Index: %d", self.next_index) - ogrext2.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) - - - def __iter__(self): - return self - - - def _next(self): - """Internal method to set read cursor to next item""" - - cdef Session session - session = self.collection.session - - # Check if next_index is valid - if self.next_index < 0: - raise StopIteration - - if self.stepsign == 1: - if self.next_index < self.start or (self.stop is not None and self.next_index >= self.stop): - raise StopIteration - else: - if self.next_index > self.start or (self.stop is not None and self.next_index <= self.stop): - raise StopIteration - - - # Set read cursor to next_item position - if self.step > 1 and self.fastindex: - ogrext2.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) - - elif self.step > 1 and not self.fastindex and not self.next_index == self.start: - for _ in range(self.step - 1): - # TODO rbuffat add test -> OGR_L_GetNextFeature increments cursor by 1, therefore self.step - 1 as one increment was performed when feature is read - cogr_feature = ogrext2.OGR_L_GetNextFeature(session.cogr_layer) - if cogr_feature == NULL: - raise StopIteration - elif self.step > 1 and not self.fastindex and self.next_index == self.start: - ogrext2.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) - - elif self.step == 0: - # ogrext2.OGR_L_GetNextFeature increments read cursor by one - pass - elif self.step < 0: - ogrext2.OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) - - # set the next index - self.next_index += self.step - - - def __next__(self): - cdef void * cogr_feature - cdef Session session - session = self.collection.session - - #Update read cursor - self._next() - - # Get the next feature. - cogr_feature = ogrext2.OGR_L_GetNextFeature(session.cogr_layer) - if cogr_feature == NULL: - raise StopIteration - - feature = FeatureBuilder().build( - cogr_feature, - bbox=False, - encoding=self.encoding, - driver=self.collection.driver - ) - _deleteOgrFeature(cogr_feature) - return feature - - -cdef class ItemsIterator(Iterator): - - def __next__(self): - - cdef long fid - cdef void * cogr_feature - cdef Session session - session = self.collection.session - - #Update read cursor - self._next() - - # Get the next feature. - cogr_feature = ogrext2.OGR_L_GetNextFeature(session.cogr_layer) - if cogr_feature == NULL: - raise StopIteration - - - fid = ogrext2.OGR_F_GetFID(cogr_feature) - feature = FeatureBuilder().build( - cogr_feature, - bbox=False, - encoding=self.encoding, - driver=self.collection.driver - ) - _deleteOgrFeature(cogr_feature) - - return fid, feature - - -cdef class KeysIterator(Iterator): - - def __next__(self): - cdef long fid - cdef void * cogr_feature - cdef Session session - session = self.collection.session - - #Update read cursor - self._next() - - # Get the next feature. - cogr_feature = ogrext2.OGR_L_GetNextFeature(session.cogr_layer) - if cogr_feature == NULL: - raise StopIteration - - fid = ogrext2.OGR_F_GetFID(cogr_feature) - _deleteOgrFeature(cogr_feature) - - return fid - - -def _remove(path, driver=None): - """Deletes an OGR data source - """ - cdef void *cogr_driver - cdef int result - - if driver is None: - driver = 'ESRI Shapefile' - - cogr_driver = ogrext2.OGRGetDriverByName(driver.encode('utf-8')) - if cogr_driver == NULL: - raise ValueError("Null driver") - - if not ogrext2.OGR_Dr_TestCapability(cogr_driver, ODrCDeleteDataSource): - raise RuntimeError("Driver does not support dataset removal operation") - - result = ogrext2.GDALDeleteDataset(cogr_driver, path.encode('utf-8')) - if result != OGRERR_NONE: - raise RuntimeError("Failed to remove data source {}".format(path)) - - -def _listlayers(path): - - """Provides a list of the layers in an OGR data source. - """ - - cdef void *cogr_ds - cdef void *cogr_layer - cdef const char *path_c - cdef const char *name_c - - # Open OGR data source. - try: - path_b = path.encode('utf-8') - except UnicodeDecodeError: - path_b = path - path_c = path_b - with cpl_errs: - cogr_ds = ogrext2.GDALOpenEx(path_c, - ogrext2.GDAL_OF_VECTOR | ogrext2.GDAL_OF_READONLY, - NULL, - NULL, - NULL) -# cogr_ds = ogrext2.OGROpen(path_c, 0, NULL) - if cogr_ds == NULL: - raise ValueError("No data available at path '%s'" % path) - - # Loop over the layers to get their names. - layer_count = ogrext2.GDALDatasetGetLayerCount(cogr_ds) - layer_names = [] - for i in range(layer_count): - cogr_layer = ogrext2.GDALDatasetGetLayer(cogr_ds, i) - name_c = ogrext2.OGR_L_GetName(cogr_layer) - name_b = name_c - layer_names.append(name_b.decode('utf-8')) - - # Close up data source. - if cogr_ds != NULL: - ogrext2.GDALClose(cogr_ds) - cogr_ds = NULL - - return layer_names - -def buffer_to_virtual_file(bytesbuf, ext=''): - """Maps a bytes buffer to a virtual file. - - `ext` is empty or begins with a period and contains at most one period. - """ - vsi_filename = os.path.join('/vsimem', uuid.uuid4().hex + ext) - vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') - - vsi_handle = ogrext2.VSIFileFromMemBuffer(vsi_cfilename, bytesbuf, len(bytesbuf), 0) - if vsi_handle == NULL: - raise OSError('failed to map buffer to file') - if ogrext2.VSIFCloseL(vsi_handle) != 0: - raise OSError('failed to close mapped file handle') - - return vsi_filename - -def remove_virtual_file(vsi_filename): - vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') - return ogrext2.VSIUnlink(vsi_cfilename) diff -Nru fiona-1.7.10/fiona/ogrext.pyx fiona-1.8.6/fiona/ogrext.pyx --- fiona-1.7.10/fiona/ogrext.pyx 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/ogrext.pyx 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,1767 @@ +# These are extension functions and classes using the OGR C API. + +from __future__ import absolute_import + +import datetime +import json +import locale +import logging +import os +import warnings +import math +import uuid +from collections import namedtuple + +from six import integer_types, string_types, text_type + +from fiona._shim cimport * + +from fiona._geometry cimport ( + GeomBuilder, OGRGeomBuilder, geometry_type_code, + normalize_geometry_type_code, base_geometry_type_code) +from fiona._err cimport exc_wrap_int, exc_wrap_pointer, exc_wrap_vsilfile + +import fiona +from fiona._env import GDALVersion, get_gdal_version_num +from fiona._err import cpl_errs, FionaNullPointerError, CPLE_BaseError, CPLE_OpenFailedError +from fiona._geometry import GEOMETRY_TYPES +from fiona import compat +from fiona.errors import ( + DriverError, DriverIOError, SchemaError, CRSError, FionaValueError, + TransactionError, GeometryTypeValidationError, DatasetDeleteError, + FionaDeprecationWarning) +from fiona.compat import OrderedDict +from fiona.rfc3339 import parse_date, parse_datetime, parse_time +from fiona.rfc3339 import FionaDateType, FionaDateTimeType, FionaTimeType +from fiona.schema import FIELD_TYPES, FIELD_TYPES_MAP, normalize_field_type +from fiona.path import vsi_path + +from fiona._shim cimport is_field_null + +from libc.stdlib cimport malloc, free +from libc.string cimport strcmp +from cpython cimport PyBytes_FromStringAndSize, PyBytes_AsString + + +cdef extern from "ogr_api.h" nogil: + + ctypedef void * OGRLayerH + ctypedef void * OGRDataSourceH + ctypedef void * OGRSFDriverH + ctypedef void * OGRFieldDefnH + ctypedef void * OGRFeatureDefnH + ctypedef void * OGRFeatureH + ctypedef void * OGRGeometryH + + +log = logging.getLogger(__name__) + +DEFAULT_TRANSACTION_SIZE = 20000 + +# OGR Driver capability +cdef const char * ODrCCreateDataSource = "CreateDataSource" +cdef const char * ODrCDeleteDataSource = "DeleteDataSource" + +# OGR Layer capability +cdef const char * OLC_RANDOMREAD = "RandomRead" +cdef const char * OLC_SEQUENTIALWRITE = "SequentialWrite" +cdef const char * OLC_RANDOMWRITE = "RandomWrite" +cdef const char * OLC_FASTSPATIALFILTER = "FastSpatialFilter" +cdef const char * OLC_FASTFEATURECOUNT = "FastFeatureCount" +cdef const char * OLC_FASTGETEXTENT = "FastGetExtent" +cdef const char * OLC_FASTSETNEXTBYINDEX = "FastSetNextByIndex" +cdef const char * OLC_CREATEFIELD = "CreateField" +cdef const char * OLC_CREATEGEOMFIELD = "CreateGeomField" +cdef const char * OLC_DELETEFIELD = "DeleteField" +cdef const char * OLC_REORDERFIELDS = "ReorderFields" +cdef const char * OLC_ALTERFIELDDEFN = "AlterFieldDefn" +cdef const char * OLC_DELETEFEATURE = "DeleteFeature" +cdef const char * OLC_STRINGSASUTF8 = "StringsAsUTF8" +cdef const char * OLC_TRANSACTIONS = "Transactions" + +# OGR integer error types. + +OGRERR_NONE = 0 +OGRERR_NOT_ENOUGH_DATA = 1 # not enough data to deserialize */ +OGRERR_NOT_ENOUGH_MEMORY = 2 +OGRERR_UNSUPPORTED_GEOMETRY_TYPE = 3 +OGRERR_UNSUPPORTED_OPERATION = 4 +OGRERR_CORRUPT_DATA = 5 +OGRERR_FAILURE = 6 +OGRERR_UNSUPPORTED_SRS = 7 +OGRERR_INVALID_HANDLE = 8 + + +def _explode(coords): + """Explode a GeoJSON geometry's coordinates object and yield + coordinate tuples. As long as the input is conforming, the type of + the geometry doesn't matter.""" + for e in coords: + if isinstance(e, (float, int)): + yield coords + break + else: + for f in _explode(e): + yield f + + +def _bounds(geometry): + """Bounding box of a GeoJSON geometry""" + try: + xyz = tuple(zip(*list(_explode(geometry['coordinates'])))) + return min(xyz[0]), min(xyz[1]), max(xyz[0]), max(xyz[1]) + except (KeyError, TypeError): + return None + + +cdef int GDAL_VERSION_NUM = get_gdal_version_num() + + +# Feature extension classes and functions follow. + +cdef class FeatureBuilder: + """Build Fiona features from OGR feature pointers. + + No OGR objects are allocated by this function and the feature + argument is not destroyed. + """ + + cdef build(self, void *feature, encoding='utf-8', bbox=False, driver=None, ignore_fields=None, ignore_geometry=False): + """Build a Fiona feature object from an OGR feature + + Parameters + ---------- + feature : void * + The OGR feature # TODO: use a real typedef + encoding : str + The encoding of OGR feature attributes + bbox : bool + Not used + driver : str + OGR format driver name like 'GeoJSON' + ignore_fields : sequence + A sequence of field names that will be ignored and omitted + in the Fiona feature properties + ignore_geometry : bool + Flag for whether the OGR geometry field is to be ignored + + Returns + ------- + dict + """ + cdef void *fdefn = NULL + cdef int i + cdef int y = 0 + cdef int m = 0 + cdef int d = 0 + cdef int hh = 0 + cdef int mm = 0 + cdef int ss = 0 + cdef int tz = 0 + cdef unsigned char *data = NULL + cdef int l + cdef int retval + cdef int fieldsubtype + cdef const char *key_c = NULL + + # Skeleton of the feature to be returned. + fid = OGR_F_GetFID(feature) + props = OrderedDict() + fiona_feature = { + "type": "Feature", + "id": str(fid), + "properties": props, + } + + ignore_fields = set(ignore_fields or []) + + # Iterate over the fields of the OGR feature. + for i in range(OGR_F_GetFieldCount(feature)): + fdefn = OGR_F_GetFieldDefnRef(feature, i) + if fdefn == NULL: + raise ValueError("Null feature definition") + key_c = OGR_Fld_GetNameRef(fdefn) + if key_c == NULL: + raise ValueError("Null field name reference") + key_b = key_c + key = key_b.decode(encoding) + + if key in ignore_fields: + continue + + fieldtypename = FIELD_TYPES[OGR_Fld_GetType(fdefn)] + fieldsubtype = get_field_subtype(fdefn) + if not fieldtypename: + log.warning( + "Skipping field %s: invalid type %s", + key, + OGR_Fld_GetType(fdefn)) + continue + + # TODO: other types + fieldtype = FIELD_TYPES_MAP[fieldtypename] + + if is_field_null(feature, i): + props[key] = None + + elif fieldtypename is 'int32': + if fieldsubtype == OFSTBoolean: + props[key] = bool(OGR_F_GetFieldAsInteger(feature, i)) + else: + props[key] = OGR_F_GetFieldAsInteger(feature, i) + + elif fieldtype is int: + if fieldsubtype == OFSTBoolean: + props[key] = bool(OGR_F_GetFieldAsInteger64(feature, i)) + else: + props[key] = OGR_F_GetFieldAsInteger64(feature, i) + + elif fieldtype is float: + props[key] = OGR_F_GetFieldAsDouble(feature, i) + + elif fieldtype is text_type: + try: + val = OGR_F_GetFieldAsString(feature, i) + val = val.decode(encoding) + except UnicodeDecodeError: + log.warning( + "Failed to decode %s using %s codec", val, encoding) + + # Does the text contain a JSON object? Let's check. + # Let's check as cheaply as we can. + if driver == 'GeoJSON' and val.startswith('{'): + try: + val = json.loads(val) + except ValueError as err: + log.warning(str(err)) + + # Now add to the properties object. + props[key] = val + + elif fieldtype in (FionaDateType, FionaTimeType, FionaDateTimeType): + retval = OGR_F_GetFieldAsDateTime( + feature, i, &y, &m, &d, &hh, &mm, &ss, &tz) + try: + if fieldtype is FionaDateType: + props[key] = datetime.date(y, m, d).isoformat() + elif fieldtype is FionaTimeType: + props[key] = datetime.time(hh, mm, ss).isoformat() + else: + props[key] = datetime.datetime( + y, m, d, hh, mm, ss).isoformat() + except ValueError as err: + log.exception(err) + props[key] = None + + elif fieldtype is bytes: + data = OGR_F_GetFieldAsBinary(feature, i, &l) + props[key] = data[:l] + + else: + log.debug("%s: None, fieldtype: %r, %r" % (key, fieldtype, fieldtype in string_types)) + props[key] = None + + cdef void *cogr_geometry = NULL + cdef void *org_geometry = NULL + + if not ignore_geometry: + cogr_geometry = OGR_F_GetGeometryRef(feature) + + if cogr_geometry is not NULL: + + code = base_geometry_type_code(OGR_G_GetGeometryType(cogr_geometry)) + + if 8 <= code <= 14: # Curves. + cogr_geometry = get_linear_geometry(cogr_geometry) + geom = GeomBuilder().build(cogr_geometry) + OGR_G_DestroyGeometry(cogr_geometry) + + elif 15 <= code <= 17: + # We steal the geometry: the geometry of the in-memory feature is now null + # and we are responsible for cogr_geometry. + org_geometry = OGR_F_StealGeometry(feature) + + if code in (15, 16): + cogr_geometry = OGR_G_ForceToMultiPolygon(org_geometry) + elif code == 17: + cogr_geometry = OGR_G_ForceToPolygon(org_geometry) + + geom = GeomBuilder().build(cogr_geometry) + OGR_G_DestroyGeometry(cogr_geometry) + + else: + geom = GeomBuilder().build(cogr_geometry) + + fiona_feature["geometry"] = geom + + else: + + fiona_feature["geometry"] = None + + return fiona_feature + + +cdef class OGRFeatureBuilder: + + """Builds an OGR Feature from a Fiona feature mapping. + + Allocates one OGR Feature which should be destroyed by the caller. + Borrows a layer definition from the collection. + """ + + cdef void * build(self, feature, collection) except NULL: + cdef void *cogr_geometry = NULL + cdef const char *string_c = NULL + cdef WritingSession session + session = collection.session + cdef void *cogr_layer = session.cogr_layer + if cogr_layer == NULL: + raise ValueError("Null layer") + cdef void *cogr_featuredefn = OGR_L_GetLayerDefn(cogr_layer) + if cogr_featuredefn == NULL: + raise ValueError("Null feature definition") + cdef void *cogr_feature = OGR_F_Create(cogr_featuredefn) + if cogr_feature == NULL: + raise ValueError("Null feature") + + if feature['geometry'] is not None: + cogr_geometry = OGRGeomBuilder().build( + feature['geometry']) + OGR_F_SetGeometryDirectly(cogr_feature, cogr_geometry) + + # OGR_F_SetFieldString takes encoded strings ('bytes' in Python 3). + encoding = session._get_internal_encoding() + + for key, value in feature['properties'].items(): + log.debug( + "Looking up %s in %s", key, repr(session._schema_mapping)) + ogr_key = session._schema_mapping[key] + + schema_type = normalize_field_type(collection.schema['properties'][key]) + + log.debug("Normalizing schema type for key %r in schema %r to %r", key, collection.schema['properties'], schema_type) + + try: + key_bytes = ogr_key.encode(encoding) + except UnicodeDecodeError: + log.warning("Failed to encode %s using %s codec", key, encoding) + key_bytes = ogr_key + key_c = key_bytes + i = OGR_F_GetFieldIndex(cogr_feature, key_c) + if i < 0: + continue + + # Special case: serialize dicts to assist OGR. + if isinstance(value, dict): + value = json.dumps(value) + + # Continue over the standard OGR types. + if isinstance(value, integer_types): + + log.debug("Setting field %r, type %r, to value %r", i, schema_type, value) + + if schema_type == 'int32': + OGR_F_SetFieldInteger(cogr_feature, i, value) + else: + OGR_F_SetFieldInteger64(cogr_feature, i, value) + + elif isinstance(value, float): + OGR_F_SetFieldDouble(cogr_feature, i, value) + elif (isinstance(value, string_types) + and schema_type in ['date', 'time', 'datetime']): + if schema_type == 'date': + y, m, d, hh, mm, ss, ff = parse_date(value) + elif schema_type == 'time': + y, m, d, hh, mm, ss, ff = parse_time(value) + else: + y, m, d, hh, mm, ss, ff = parse_datetime(value) + OGR_F_SetFieldDateTime( + cogr_feature, i, y, m, d, hh, mm, ss, 0) + elif (isinstance(value, datetime.date) + and schema_type == 'date'): + y, m, d = value.year, value.month, value.day + OGR_F_SetFieldDateTime( + cogr_feature, i, y, m, d, 0, 0, 0, 0) + elif (isinstance(value, datetime.datetime) + and schema_type == 'datetime'): + y, m, d = value.year, value.month, value.day + hh, mm, ss = value.hour, value.minute, value.second + OGR_F_SetFieldDateTime( + cogr_feature, i, y, m, d, hh, mm, ss, 0) + elif (isinstance(value, datetime.time) + and schema_type == 'time'): + hh, mm, ss = value.hour, value.minute, value.second + OGR_F_SetFieldDateTime( + cogr_feature, i, 0, 0, 0, hh, mm, ss, 0) + elif isinstance(value, bytes) and schema_type == "bytes": + string_c = value + OGR_F_SetFieldBinary(cogr_feature, i, len(value), + string_c) + elif isinstance(value, string_types): + try: + value_bytes = value.encode(encoding) + except UnicodeDecodeError: + log.warning( + "Failed to encode %s using %s codec", value, encoding) + value_bytes = value + string_c = value_bytes + OGR_F_SetFieldString(cogr_feature, i, string_c) + elif value is None: + set_field_null(cogr_feature, i) + else: + raise ValueError("Invalid field type %s" % type(value)) + log.debug("Set field %s: %r" % (key, value)) + return cogr_feature + + +cdef _deleteOgrFeature(void *cogr_feature): + """Delete an OGR feature""" + if cogr_feature is not NULL: + OGR_F_Destroy(cogr_feature) + cogr_feature = NULL + + +def featureRT(feature, collection): + # For testing purposes only, leaks the JSON data + cdef void *cogr_feature = OGRFeatureBuilder().build(feature, collection) + cdef void *cogr_geometry = OGR_F_GetGeometryRef(cogr_feature) + if cogr_geometry == NULL: + raise ValueError("Null geometry") + result = FeatureBuilder().build( + cogr_feature, + encoding='utf-8', + bbox=False, + driver=collection.driver + ) + _deleteOgrFeature(cogr_feature) + return result + + +# Collection-related extension classes and functions + +cdef class Session: + + cdef void *cogr_ds + cdef void *cogr_layer + cdef object _fileencoding + cdef object _encoding + cdef object collection + + def __init__(self): + self.cogr_ds = NULL + self.cogr_layer = NULL + self._fileencoding = None + self._encoding = None + + def __dealloc__(self): + self.stop() + + def start(self, collection, **kwargs): + cdef const char *path_c = NULL + cdef const char *name_c = NULL + cdef void *drv = NULL + cdef void *ds = NULL + cdef char **ignore_fields = NULL + + path_b = collection.path.encode('utf-8') + path_c = path_b + + self._fileencoding = kwargs.get('encoding') or collection.encoding + + # We have two ways of specifying drivers to try. Resolve the + # values into a single set of driver short names. + if collection._driver: + drivers = set([collection._driver]) + elif collection.enabled_drivers: + drivers = set(collection.enabled_drivers) + else: + drivers = None + + encoding = kwargs.pop('encoding', None) + if encoding: + kwargs['encoding'] = encoding.upper() + + self.cogr_ds = gdal_open_vector(path_c, 0, drivers, kwargs) + + if isinstance(collection.name, string_types): + name_b = collection.name.encode('utf-8') + name_c = name_b + self.cogr_layer = GDALDatasetGetLayerByName(self.cogr_ds, name_c) + elif isinstance(collection.name, int): + self.cogr_layer = GDALDatasetGetLayer(self.cogr_ds, collection.name) + name_c = OGR_L_GetName(self.cogr_layer) + name_b = name_c + collection.name = name_b.decode('utf-8') + + if self.cogr_layer == NULL: + raise ValueError("Null layer: " + repr(collection.name)) + + encoding = self._get_internal_encoding() + + if collection.ignore_fields: + try: + for name in collection.ignore_fields: + try: + name_b = name.encode(encoding) + except AttributeError: + raise TypeError("Ignored field \"{}\" has type \"{}\", expected string".format(name, name.__class__.__name__)) + ignore_fields = CSLAddString(ignore_fields, name_b) + OGR_L_SetIgnoredFields(self.cogr_layer, ignore_fields) + finally: + CSLDestroy(ignore_fields) + + self.collection = collection + + cpdef stop(self): + self.cogr_layer = NULL + if self.cogr_ds != NULL: + GDALClose(self.cogr_ds) + self.cogr_ds = NULL + + def get_fileencoding(self): + """DEPRECATED""" + warnings.warn("get_fileencoding is deprecated and will be removed in a future version.", FionaDeprecationWarning) + return self._fileencoding + + def _get_fallback_encoding(self): + """Determine a format-specific fallback encoding to use when using OGR_F functions + + Parameters + ---------- + None + + Returns + ------- + str + + """ + if "Shapefile" in self.get_driver(): + return 'iso-8859-1' + else: + return locale.getpreferredencoding() + + + def _get_internal_encoding(self): + """Determine the encoding to use when use OGR_F functions + + Parameters + ---------- + None + + Returns + ------- + str + + Notes + ----- + If the layer implements RFC 23 support for UTF-8, the return + value will be 'utf-8' and callers can be certain that this is + correct. If the layer does not have the OLC_STRINGSASUTF8 + capability marker, it is not possible to know exactly what the + internal encoding is and this method returns best guesses. That + means ISO-8859-1 for shapefiles and the locale's preferred + encoding for other formats such as CSV files. + + """ + if OGR_L_TestCapability(self.cogr_layer, OLC_STRINGSASUTF8): + return 'utf-8' + else: + return self._fileencoding or self._get_fallback_encoding() + + def get_length(self): + if self.cogr_layer == NULL: + raise ValueError("Null layer") + return OGR_L_GetFeatureCount(self.cogr_layer, 0) + + def get_driver(self): + cdef void *cogr_driver = GDALGetDatasetDriver(self.cogr_ds) + if cogr_driver == NULL: + raise ValueError("Null driver") + cdef const char *name = OGR_Dr_GetName(cogr_driver) + driver_name = name + return driver_name.decode() + + def get_schema(self): + cdef int i + cdef int n + cdef void *cogr_featuredefn = NULL + cdef void *cogr_fielddefn = NULL + cdef const char *key_c + props = [] + + if self.cogr_layer == NULL: + raise ValueError("Null layer") + + if self.collection.ignore_fields: + ignore_fields = self.collection.ignore_fields + else: + ignore_fields = set() + + cogr_featuredefn = OGR_L_GetLayerDefn(self.cogr_layer) + if cogr_featuredefn == NULL: + raise ValueError("Null feature definition") + + encoding = self._get_internal_encoding() + + n = OGR_FD_GetFieldCount(cogr_featuredefn) + + for i from 0 <= i < n: + cogr_fielddefn = OGR_FD_GetFieldDefn(cogr_featuredefn, i) + if cogr_fielddefn == NULL: + raise ValueError("Null field definition") + + key_c = OGR_Fld_GetNameRef(cogr_fielddefn) + key_b = key_c + + if not bool(key_b): + raise ValueError("Invalid field name ref: %s" % key) + + key = key_b.decode(encoding) + + if key in ignore_fields: + log.debug("By request, ignoring field %r", key) + continue + + fieldtypename = FIELD_TYPES[OGR_Fld_GetType(cogr_fielddefn)] + if not fieldtypename: + log.warning( + "Skipping field %s: invalid type %s", + key, + OGR_Fld_GetType(cogr_fielddefn)) + continue + + val = fieldtypename + if fieldtypename == 'float': + fmt = "" + width = OGR_Fld_GetWidth(cogr_fielddefn) + if width: # and width != 24: + fmt = ":%d" % width + precision = OGR_Fld_GetPrecision(cogr_fielddefn) + if precision: # and precision != 15: + fmt += ".%d" % precision + val = "float" + fmt + elif fieldtypename in ('int32', 'int64'): + fmt = "" + width = OGR_Fld_GetWidth(cogr_fielddefn) + if width: + fmt = ":%d" % width + val = 'int' + fmt + elif fieldtypename == 'str': + fmt = "" + width = OGR_Fld_GetWidth(cogr_fielddefn) + if width: + fmt = ":%d" % width + val = fieldtypename + fmt + + props.append((key, val)) + + ret = {"properties": OrderedDict(props)} + + if not self.collection.ignore_geometry: + code = normalize_geometry_type_code( + OGR_FD_GetGeomType(cogr_featuredefn)) + ret["geometry"] = GEOMETRY_TYPES[code] + + return ret + + def get_crs(self): + """Get the layer's CRS + + Returns + ------- + CRS + + """ + cdef char *proj_c = NULL + cdef const char *auth_key = NULL + cdef const char *auth_val = NULL + cdef void *cogr_crs = NULL + + if self.cogr_layer == NULL: + raise ValueError("Null layer") + + try: + cogr_crs = exc_wrap_pointer(OGR_L_GetSpatialRef(self.cogr_layer)) + # TODO: we don't intend to use try/except for flow control + # this is a work around for a GDAL issue. + except FionaNullPointerError: + log.debug("Layer has no coordinate system") + + if cogr_crs is not NULL: + + log.debug("Got coordinate system") + crs = {} + + try: + + retval = OSRAutoIdentifyEPSG(cogr_crs) + if retval > 0: + log.info("Failed to auto identify EPSG: %d", retval) + + try: + auth_key = exc_wrap_pointer(OSRGetAuthorityName(cogr_crs, NULL)) + auth_val = exc_wrap_pointer(OSRGetAuthorityCode(cogr_crs, NULL)) + + except CPLE_BaseError as exc: + log.debug("{}".format(exc)) + + if auth_key != NULL and auth_val != NULL: + key_b = auth_key + key = key_b.decode('utf-8') + if key == 'EPSG': + val_b = auth_val + val = val_b.decode('utf-8') + crs['init'] = "epsg:" + val + + else: + OSRExportToProj4(cogr_crs, &proj_c) + if proj_c == NULL: + raise ValueError("Null projection") + proj_b = proj_c + log.debug("Params: %s", proj_b) + value = proj_b.decode() + value = value.strip() + for param in value.split(): + kv = param.split("=") + if len(kv) == 2: + k, v = kv + try: + v = float(v) + if v % 1 == 0: + v = int(v) + except ValueError: + # Leave v as a string + pass + elif len(kv) == 1: + k, v = kv[0], True + else: + raise ValueError("Unexpected proj parameter %s" % param) + k = k.lstrip("+") + crs[k] = v + + finally: + CPLFree(proj_c) + return crs + + else: + log.debug("Projection not found (cogr_crs was NULL)") + + return {} + + def get_crs_wkt(self): + cdef char *proj_c = NULL + cdef void *cogr_crs = NULL + + if self.cogr_layer == NULL: + raise ValueError("Null layer") + + try: + cogr_crs = exc_wrap_pointer(OGR_L_GetSpatialRef(self.cogr_layer)) + + # TODO: we don't intend to use try/except for flow control + # this is a work around for a GDAL issue. + except FionaNullPointerError: + log.debug("Layer has no coordinate system") + except fiona._err.CPLE_OpenFailedError as exc: + log.debug("A support file wasn't opened. See the preceding ERROR level message.") + cogr_crs = OGR_L_GetSpatialRef(self.cogr_layer) + log.debug("Called OGR_L_GetSpatialRef() again without error checking.") + if cogr_crs == NULL: + raise exc + + if cogr_crs is not NULL: + log.debug("Got coordinate system") + + try: + OSRExportToWkt(cogr_crs, &proj_c) + if proj_c == NULL: + raise ValueError("Null projection") + proj_b = proj_c + crs_wkt = proj_b.decode('utf-8') + + finally: + CPLFree(proj_c) + return crs_wkt + + else: + log.debug("Projection not found (cogr_crs was NULL)") + return "" + + def get_extent(self): + cdef OGREnvelope extent + + if self.cogr_layer == NULL: + raise ValueError("Null layer") + + result = OGR_L_GetExtent(self.cogr_layer, &extent, 1) + return (extent.MinX, extent.MinY, extent.MaxX, extent.MaxY) + + def has_feature(self, fid): + """Provides access to feature data by FID. + + Supports Collection.__contains__(). + """ + cdef void * cogr_feature + fid = int(fid) + cogr_feature = OGR_L_GetFeature(self.cogr_layer, fid) + if cogr_feature != NULL: + _deleteOgrFeature(cogr_feature) + return True + else: + return False + + def get_feature(self, fid): + """Provides access to feature data by FID. + + Supports Collection.__contains__(). + """ + cdef void * cogr_feature + fid = int(fid) + cogr_feature = OGR_L_GetFeature(self.cogr_layer, fid) + if cogr_feature != NULL: + feature = FeatureBuilder().build( + cogr_feature, + encoding=self._get_internal_encoding(), + bbox=False, + driver=self.collection.driver, + ignore_fields=self.collection.ignore_fields, + ignore_geometry=self.collection.ignore_geometry, + ) + _deleteOgrFeature(cogr_feature) + return feature + else: + raise KeyError("There is no feature with fid {!r}".format(fid)) + + get = get_feature + + # TODO: Make this an alias for get_feature in a future version. + def __getitem__(self, item): + cdef void * cogr_feature + if isinstance(item, slice): + warnings.warn("Collection slicing is deprecated and will be disabled in a future version.", FionaDeprecationWarning) + itr = Iterator(self.collection, item.start, item.stop, item.step) + log.debug("Slice: %r", item) + return list(itr) + elif isinstance(item, int): + index = item + # from the back + if index < 0: + ftcount = OGR_L_GetFeatureCount(self.cogr_layer, 0) + if ftcount == -1: + raise IndexError( + "collection's dataset does not support negative indexes") + index += ftcount + cogr_feature = OGR_L_GetFeature(self.cogr_layer, index) + if cogr_feature == NULL: + return None + feature = FeatureBuilder().build( + cogr_feature, + encoding=self._get_internal_encoding(), + bbox=False, + driver=self.collection.driver, + ignore_fields=self.collection.ignore_fields, + ignore_geometry=self.collection.ignore_geometry, + ) + _deleteOgrFeature(cogr_feature) + return feature + + def isactive(self): + if self.cogr_layer != NULL and self.cogr_ds != NULL: + return 1 + else: + return 0 + + +cdef class WritingSession(Session): + + cdef object _schema_mapping + + def start(self, collection, **kwargs): + cdef void *cogr_srs = NULL + cdef char **options = NULL + cdef const char *path_c = NULL + cdef const char *driver_c = NULL + cdef const char *name_c = NULL + cdef const char *proj_c = NULL + cdef const char *fileencoding_c = NULL + cdef OGRFieldSubType field_subtype + cdef int ret + path = collection.path + self.collection = collection + + userencoding = kwargs.get('encoding') + + if collection.mode == 'a': + + if not os.path.exists(path): + raise OSError("No such file or directory %s" % path) + + try: + path_b = path.encode('utf-8') + except UnicodeDecodeError: + path_b = path + path_c = path_b + + try: + self.cogr_ds = gdal_open_vector(path_c, 1, None, kwargs) + + if isinstance(collection.name, string_types): + name_b = collection.name.encode('utf-8') + name_c = name_b + self.cogr_layer = exc_wrap_pointer(GDALDatasetGetLayerByName(self.cogr_ds, name_c)) + + elif isinstance(collection.name, int): + self.cogr_layer = exc_wrap_pointer(GDALDatasetGetLayer(self.cogr_ds, collection.name)) + + except CPLE_BaseError as exc: + OGRReleaseDataSource(self.cogr_ds) + self.cogr_ds = NULL + self.cogr_layer = NULL + raise DriverError(u"{}".format(exc)) + + else: + self._fileencoding = userencoding or self._get_fallback_encoding() + + elif collection.mode == 'w': + + try: + path_b = path.encode('utf-8') + except UnicodeDecodeError: + path_b = path + path_c = path_b + + driver_b = collection.driver.encode() + driver_c = driver_b + cogr_driver = exc_wrap_pointer(GDALGetDriverByName(driver_c)) + + # Our most common use case is the creation of a new data + # file and historically we've assumed that it's a file on + # the local filesystem and queryable via os.path. + # + # TODO: remove the assumption. + if not os.path.exists(path): + log.debug("File doesn't exist. Creating a new one...") + cogr_ds = gdal_create(cogr_driver, path_c, {}) + + # TODO: revisit the logic in the following blocks when we + # change the assumption above. + else: + if collection.driver == "GeoJSON" and os.path.exists(path): + # manually remove geojson file as GDAL doesn't do this for us + os.unlink(path) + try: + # attempt to open existing dataset in write mode + cogr_ds = gdal_open_vector(path_c, 1, None, kwargs) + except DriverError: + # failed, attempt to create it + cogr_ds = gdal_create(cogr_driver, path_c, kwargs) + else: + # check capability of creating a new layer in the existing dataset + capability = check_capability_create_layer(cogr_ds) + if GDAL_VERSION_NUM < 2000000 and collection.driver == "GeoJSON": + # GeoJSON driver tells lies about it's capability + capability = False + if not capability or collection.name is None: + # unable to use existing dataset, recreate it + GDALClose(cogr_ds) + cogr_ds = NULL + cogr_ds = gdal_create(cogr_driver, path_c, kwargs) + + self.cogr_ds = cogr_ds + + # Set the spatial reference system from the crs given to the + # collection constructor. We by-pass the crs_wkt and crs + # properties because they aren't accessible until the layer + # is constructed (later). + try: + + col_crs = collection._crs_wkt or collection._crs + + if col_crs: + cogr_srs = exc_wrap_pointer(OSRNewSpatialReference(NULL)) + + # First, check for CRS strings like "EPSG:3857". + if isinstance(col_crs, string_types): + proj_b = col_crs.encode('utf-8') + proj_c = proj_b + OSRSetFromUserInput(cogr_srs, proj_c) + + elif isinstance(col_crs, compat.DICT_TYPES): + # EPSG is a special case. + init = col_crs.get('init') + if init: + log.debug("Init: %s", init) + auth, val = init.split(':') + if auth.upper() == 'EPSG': + log.debug("Setting EPSG: %s", val) + OSRImportFromEPSG(cogr_srs, int(val)) + else: + params = [] + col_crs['wktext'] = True + for k, v in col_crs.items(): + if v is True or (k in ('no_defs', 'wktext') and v): + params.append("+%s" % k) + else: + params.append("+%s=%s" % (k, v)) + proj = " ".join(params) + log.debug("PROJ.4 to be imported: %r", proj) + proj_b = proj.encode('utf-8') + proj_c = proj_b + OSRImportFromProj4(cogr_srs, proj_c) + + else: + raise ValueError("Invalid CRS") + + # Fixup, export to WKT, and set the GDAL dataset's projection. + OSRFixup(cogr_srs) + + except (ValueError, CPLE_BaseError) as exc: + OGRReleaseDataSource(self.cogr_ds) + self.cogr_ds = NULL + self.cogr_layer = NULL + raise CRSError(u"{}".format(exc)) + + # Determine which encoding to use. The encoding parameter given to + # the collection constructor takes highest precedence, then + # 'iso-8859-1' (for shapefiles), then the system's default encoding + # as last resort. + sysencoding = locale.getpreferredencoding() + self._fileencoding = userencoding or ("Shapefile" in collection.driver and 'iso-8859-1') or sysencoding + + if "Shapefile" in collection.driver: + if self._fileencoding: + fileencoding_b = self._fileencoding.upper().encode('utf-8') + fileencoding_c = fileencoding_b + options = CSLSetNameValue(options, "ENCODING", fileencoding_c) + + # Does the layer exist already? If so, we delete it. + layer_count = GDALDatasetGetLayerCount(self.cogr_ds) + layer_names = [] + for i in range(layer_count): + cogr_layer = GDALDatasetGetLayer(cogr_ds, i) + name_c = OGR_L_GetName(cogr_layer) + name_b = name_c + layer_names.append(name_b.decode('utf-8')) + + idx = -1 + if isinstance(collection.name, string_types): + if collection.name in layer_names: + idx = layer_names.index(collection.name) + elif isinstance(collection.name, int): + if collection.name >= 0 and collection.name < layer_count: + idx = collection.name + if idx >= 0: + log.debug("Deleted pre-existing layer at %s", collection.name) + GDALDatasetDeleteLayer(self.cogr_ds, idx) + + # Create the named layer in the datasource. + name_b = collection.name.encode('utf-8') + name_c = name_b + + for k, v in kwargs.items(): + + if v is None: + continue + + # We need to remove encoding from the layer creation + # options if we're not creating a shapefile. + if k == 'encoding' and "Shapefile" not in collection.driver: + continue + + k = k.upper().encode('utf-8') + + if isinstance(v, bool): + v = ('ON' if v else 'OFF').encode('utf-8') + else: + v = str(v).encode('utf-8') + log.debug("Set option %r: %r", k, v) + options = CSLAddNameValue(options, k, v) + + geometry_type = collection.schema.get("geometry", "Unknown") + if not isinstance(geometry_type, string_types) and geometry_type is not None: + geometry_types = set(geometry_type) + if len(geometry_types) > 1: + geometry_type = "Unknown" + else: + geometry_type = geometry_types.pop() + if geometry_type == "Any" or geometry_type is None: + geometry_type = "Unknown" + geometry_code = geometry_type_code(geometry_type) + + try: + self.cogr_layer = exc_wrap_pointer( + GDALDatasetCreateLayer( + self.cogr_ds, name_c, cogr_srs, + geometry_code, options)) + + except Exception as exc: + OGRReleaseDataSource(self.cogr_ds) + self.cogr_ds = NULL + raise DriverIOError(u"{}".format(exc)) + + finally: + if options != NULL: + CSLDestroy(options) + + # Shapefile layers make a copy of the passed srs. GPKG + # layers, on the other hand, increment its reference + # count. OSRRelease() is the safe way to release + # OGRSpatialReferenceH. + if cogr_srs != NULL: + OSRRelease(cogr_srs) + + log.debug("Created layer %s", collection.name) + + # Next, make a layer definition from the given schema properties, + # which are an ordered dict since Fiona 1.0.1. + + encoding = self._get_internal_encoding() + + for key, value in collection.schema['properties'].items(): + + log.debug("Begin creating field: %r value: %r", key, value) + + field_subtype = OFSTNone + + # Convert 'long' to 'int'. See + # https://github.com/Toblerity/Fiona/issues/101. + if fiona.gdal_version.major >= 2 and value in ('int', 'long'): + value = 'int64' + elif value == 'int': + value = 'int32' + + if value == 'bool': + value = 'int32' + field_subtype = OFSTBoolean + + # Is there a field width/precision? + width = precision = None + if ':' in value: + value, fmt = value.split(':') + + log.debug("Field format parsing, value: %r, fmt: %r", value, fmt) + + if '.' in fmt: + width, precision = map(int, fmt.split('.')) + else: + width = int(fmt) + + if value == 'int': + if GDAL_VERSION_NUM >= 2000000 and (width == 0 or width >= 10): + value = 'int64' + else: + value = 'int32' + + field_type = FIELD_TYPES.index(value) + + try: + key_bytes = key.encode(encoding) + cogr_fielddefn = exc_wrap_pointer(OGR_Fld_Create(key_bytes, field_type)) + if width: + OGR_Fld_SetWidth(cogr_fielddefn, width) + if precision: + OGR_Fld_SetPrecision(cogr_fielddefn, precision) + if field_subtype != OFSTNone: + # subtypes are new in GDAL 2.x, ignored in 1.x + set_field_subtype(cogr_fielddefn, field_subtype) + exc_wrap_int(OGR_L_CreateField(self.cogr_layer, cogr_fielddefn, 1)) + + except (UnicodeEncodeError, CPLE_BaseError) as exc: + OGRReleaseDataSource(self.cogr_ds) + self.cogr_ds = NULL + self.cogr_layer = NULL + raise SchemaError(u"{}".format(exc)) + + else: + OGR_Fld_Destroy(cogr_fielddefn) + log.debug("End creating field %r", key) + + # Mapping of the Python collection schema to the munged + # OGR schema. + ogr_schema = self.get_schema() + self._schema_mapping = dict(zip( + collection.schema['properties'].keys(), + ogr_schema['properties'].keys() )) + + log.debug("Writing started") + + def writerecs(self, records, collection): + """Writes buffered records to OGR.""" + cdef void *cogr_driver + cdef void *cogr_feature + cdef int features_in_transaction = 0 + + cdef void *cogr_layer = self.cogr_layer + if cogr_layer == NULL: + raise ValueError("Null layer") + + schema_geom_type = collection.schema['geometry'] + cogr_driver = GDALGetDatasetDriver(self.cogr_ds) + driver_name = OGR_Dr_GetName(cogr_driver).decode("utf-8") + + valid_geom_types = collection._valid_geom_types + def validate_geometry_type(record): + if record["geometry"] is None: + return True + return record["geometry"]["type"].lstrip("3D ") in valid_geom_types + + log.debug("Starting transaction (initial)") + result = gdal_start_transaction(self.cogr_ds, 0) + if result == OGRERR_FAILURE: + raise TransactionError("Failed to start transaction") + + schema_props_keys = set(collection.schema['properties'].keys()) + for record in records: + log.debug("Creating feature in layer: %s" % record) + # Validate against collection's schema. + if set(record['properties'].keys()) != schema_props_keys: + raise ValueError( + "Record does not match collection schema: %r != %r" % ( + record['properties'].keys(), + list(schema_props_keys) )) + if not validate_geometry_type(record): + raise GeometryTypeValidationError( + "Record's geometry type does not match " + "collection schema's geometry type: %r != %r" % ( + record['geometry']['type'], + collection.schema['geometry'] )) + + cogr_feature = OGRFeatureBuilder().build(record, collection) + result = OGR_L_CreateFeature(cogr_layer, cogr_feature) + if result != OGRERR_NONE: + raise RuntimeError("Failed to write record: %s" % record) + _deleteOgrFeature(cogr_feature) + + features_in_transaction += 1 + if features_in_transaction == DEFAULT_TRANSACTION_SIZE: + log.debug("Comitting transaction (intermediate)") + result = gdal_commit_transaction(self.cogr_ds) + if result == OGRERR_FAILURE: + raise TransactionError("Failed to commit transaction") + log.debug("Starting transaction (intermediate)") + result = gdal_start_transaction(self.cogr_ds, 0) + if result == OGRERR_FAILURE: + raise TransactionError("Failed to start transaction") + features_in_transaction = 0 + + log.debug("Comitting transaction (final)") + result = gdal_commit_transaction(self.cogr_ds) + if result == OGRERR_FAILURE: + raise TransactionError("Failed to commit transaction") + + def sync(self, collection): + """Syncs OGR to disk.""" + cdef void *cogr_ds = self.cogr_ds + cdef void *cogr_layer = self.cogr_layer + if cogr_ds == NULL: + raise ValueError("Null data source") + + + gdal_flush_cache(cogr_ds) + log.debug("Flushed data source cache") + +cdef class Iterator: + + """Provides iterated access to feature data. + """ + + # Reference to its Collection + cdef collection + cdef encoding + cdef int next_index + cdef stop + cdef start + cdef step + cdef fastindex + cdef stepsign + + def __cinit__(self, collection, start=None, stop=None, step=None, + bbox=None, mask=None): + if collection.session is None: + raise ValueError("I/O operation on closed collection") + self.collection = collection + cdef Session session + cdef void *cogr_geometry + session = self.collection.session + cdef void *cogr_layer = session.cogr_layer + if cogr_layer == NULL: + raise ValueError("Null layer") + OGR_L_ResetReading(cogr_layer) + + if bbox and mask: + raise ValueError("mask and bbox can not be set together") + + if bbox: + OGR_L_SetSpatialFilterRect( + cogr_layer, bbox[0], bbox[1], bbox[2], bbox[3]) + elif mask: + cogr_geometry = OGRGeomBuilder().build(mask) + OGR_L_SetSpatialFilter(cogr_layer, cogr_geometry) + OGR_G_DestroyGeometry(cogr_geometry) + + else: + OGR_L_SetSpatialFilter(cogr_layer, NULL) + + self.encoding = session._get_internal_encoding() + + self.fastindex = OGR_L_TestCapability( + session.cogr_layer, OLC_FASTSETNEXTBYINDEX) + + ftcount = OGR_L_GetFeatureCount(session.cogr_layer, 0) + if ftcount == -1 and ((start is not None and start < 0) or + (stop is not None and stop < 0)): + raise IndexError( + "collection's dataset does not support negative slice indexes") + + if stop is not None and stop < 0: + stop += ftcount + + if start is None: + start = 0 + if start is not None and start < 0: + start += ftcount + + # step size + if step is None: + step = 1 + if step == 0: + raise ValueError("slice step cannot be zero") + if step < 0 and not self.fastindex: + warnings.warn("Layer does not support" \ + "OLCFastSetNextByIndex, negative step size may" \ + " be slow", RuntimeWarning) + self.stepsign = int(math.copysign(1, step)) + self.stop = stop + self.start = start + self.step = step + + self.next_index = start + log.debug("Index: %d", self.next_index) + OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) + + def __iter__(self): + return self + + def _next(self): + """Internal method to set read cursor to next item""" + + cdef Session session + session = self.collection.session + + # Check if next_index is valid + if self.next_index < 0: + raise StopIteration + + if self.stepsign == 1: + if self.next_index < self.start or (self.stop is not None and self.next_index >= self.stop): + raise StopIteration + else: + if self.next_index > self.start or (self.stop is not None and self.next_index <= self.stop): + raise StopIteration + + # Set read cursor to next_item position + if self.step > 1 and self.fastindex: + OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) + + elif self.step > 1 and not self.fastindex and not self.next_index == self.start: + for _ in range(self.step - 1): + # TODO rbuffat add test -> OGR_L_GetNextFeature increments cursor by 1, therefore self.step - 1 as one increment was performed when feature is read + cogr_feature = OGR_L_GetNextFeature(session.cogr_layer) + if cogr_feature == NULL: + raise StopIteration + elif self.step > 1 and not self.fastindex and self.next_index == self.start: + OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) + + elif self.step == 0: + # OGR_L_GetNextFeature increments read cursor by one + pass + elif self.step < 0: + OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) + + # set the next index + self.next_index += self.step + + def __next__(self): + cdef OGRFeatureH cogr_feature = NULL + cdef OGRLayerH cogr_layer = NULL + cdef Session session + + session = self.collection.session + + if not session or not session.isactive: + raise FionaValueError("Session is inactive, dataset is closed or layer is unavailable.") + + # Update read cursor + self._next() + + # Get the next feature. + cogr_feature = OGR_L_GetNextFeature(session.cogr_layer) + if cogr_feature == NULL: + raise StopIteration + + try: + return FeatureBuilder().build( + cogr_feature, + encoding=self.collection.session._get_internal_encoding(), + bbox=False, + driver=self.collection.driver, + ignore_fields=self.collection.ignore_fields, + ignore_geometry=self.collection.ignore_geometry, + ) + finally: + _deleteOgrFeature(cogr_feature) + + +cdef class ItemsIterator(Iterator): + + def __next__(self): + + cdef long fid + cdef void * cogr_feature + cdef Session session + session = self.collection.session + + #Update read cursor + self._next() + + # Get the next feature. + cogr_feature = OGR_L_GetNextFeature(session.cogr_layer) + if cogr_feature == NULL: + raise StopIteration + + fid = OGR_F_GetFID(cogr_feature) + feature = FeatureBuilder().build( + cogr_feature, + encoding=self.collection.session._get_internal_encoding(), + bbox=False, + driver=self.collection.driver, + ignore_fields=self.collection.ignore_fields, + ignore_geometry=self.collection.ignore_geometry, + ) + _deleteOgrFeature(cogr_feature) + + return fid, feature + + +cdef class KeysIterator(Iterator): + + def __next__(self): + cdef long fid + cdef void * cogr_feature + cdef Session session + session = self.collection.session + + #Update read cursor + self._next() + + # Get the next feature. + cogr_feature = OGR_L_GetNextFeature(session.cogr_layer) + if cogr_feature == NULL: + raise StopIteration + + fid = OGR_F_GetFID(cogr_feature) + _deleteOgrFeature(cogr_feature) + + return fid + + +def _remove(path, driver=None): + """Deletes an OGR data source + """ + cdef void *cogr_driver + cdef void *cogr_ds + cdef int result + cdef char *driver_c + + if driver is None: + # attempt to identify the driver by opening the dataset + try: + cogr_ds = gdal_open_vector(path.encode("utf-8"), 0, None, {}) + except (DriverError, FionaNullPointerError): + raise DatasetDeleteError("Failed to remove data source {}".format(path)) + cogr_driver = GDALGetDatasetDriver(cogr_ds) + GDALClose(cogr_ds) + else: + cogr_driver = OGRGetDriverByName(driver.encode("utf-8")) + + if cogr_driver == NULL: + raise DatasetDeleteError("Null driver when attempting to delete {}".format(path)) + + if not OGR_Dr_TestCapability(cogr_driver, ODrCDeleteDataSource): + raise DatasetDeleteError("Driver does not support dataset removal operation") + + result = GDALDeleteDataset(cogr_driver, path.encode('utf-8')) + if result != OGRERR_NONE: + raise DatasetDeleteError("Failed to remove data source {}".format(path)) + + +def _remove_layer(path, layer, driver=None): + cdef void *cogr_ds + cdef int layer_index + + if isinstance(layer, integer_types): + layer_index = layer + layer_str = str(layer_index) + else: + layer_names = _listlayers(path) + try: + layer_index = layer_names.index(layer) + except ValueError: + raise ValueError("Layer \"{}\" does not exist in datasource: {}".format(layer, path)) + layer_str = '"{}"'.format(layer) + + if layer_index < 0: + layer_names = _listlayers(path) + layer_index = len(layer_names) + layer_index + + try: + cogr_ds = gdal_open_vector(path.encode("utf-8"), 1, None, {}) + except (DriverError, FionaNullPointerError): + raise DatasetDeleteError("Failed to remove data source {}".format(path)) + + result = OGR_DS_DeleteLayer(cogr_ds, layer_index) + GDALClose(cogr_ds) + if result == OGRERR_UNSUPPORTED_OPERATION: + raise DatasetDeleteError("Removal of layer {} not supported by driver".format(layer_str)) + elif result != OGRERR_NONE: + raise DatasetDeleteError("Failed to remove layer {} from datasource: {}".format(layer_str, path)) + + +def _listlayers(path, **kwargs): + + """Provides a list of the layers in an OGR data source. + """ + + cdef void *cogr_ds = NULL + cdef void *cogr_layer = NULL + cdef const char *path_c + cdef const char *name_c + + # Open OGR data source. + try: + path_b = path.encode('utf-8') + except UnicodeDecodeError: + path_b = path + path_c = path_b + cogr_ds = gdal_open_vector(path_c, 0, None, kwargs) + + # Loop over the layers to get their names. + layer_count = GDALDatasetGetLayerCount(cogr_ds) + layer_names = [] + for i in range(layer_count): + cogr_layer = GDALDatasetGetLayer(cogr_ds, i) + name_c = OGR_L_GetName(cogr_layer) + name_b = name_c + layer_names.append(name_b.decode('utf-8')) + + # Close up data source. + if cogr_ds != NULL: + GDALClose(cogr_ds) + cogr_ds = NULL + + return layer_names + + +def buffer_to_virtual_file(bytesbuf, ext=''): + """Maps a bytes buffer to a virtual file. + + `ext` is empty or begins with a period and contains at most one period. + """ + + vsi_filename = '/vsimem/{}'.format(uuid.uuid4().hex + ext) + vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') + + vsi_handle = VSIFileFromMemBuffer(vsi_cfilename, bytesbuf, len(bytesbuf), 0) + + if vsi_handle == NULL: + raise OSError('failed to map buffer to file') + if VSIFCloseL(vsi_handle) != 0: + raise OSError('failed to close mapped file handle') + + return vsi_filename + + +def remove_virtual_file(vsi_filename): + vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') + return VSIUnlink(vsi_cfilename) + + +cdef class MemoryFileBase(object): + """Base for a BytesIO-like class backed by an in-memory file.""" + + def __init__(self, file_or_bytes=None, filename=None, ext=''): + """A file in an in-memory filesystem. + + Parameters + ---------- + file_or_bytes : file or bytes + A file opened in binary mode or bytes or a bytearray + filename : str + A filename for the in-memory file under /vsimem + ext : str + A file extension for the in-memory file under /vsimem. Ignored if + filename was provided. + """ + cdef VSILFILE *vsi_handle = NULL + + if file_or_bytes: + if hasattr(file_or_bytes, 'read'): + initial_bytes = file_or_bytes.read() + else: + initial_bytes = file_or_bytes + if not isinstance(initial_bytes, (bytearray, bytes)): + raise TypeError( + "Constructor argument must be a file opened in binary " + "mode or bytes/bytearray.") + else: + initial_bytes = b'' + + if filename: + # GDAL's SRTMHGT driver requires the filename to be "correct" (match + # the bounds being written) + self.name = '/vsimem/{0}'.format(filename) + else: + # GDAL 2.1 requires a .zip extension for zipped files. + self.name = '/vsimem/{0}.{1}'.format(uuid.uuid4(), ext.lstrip('.')) + + self.path = self.name.encode('utf-8') + self._len = 0 + self._pos = 0 + self.closed = False + + self._initial_bytes = initial_bytes + cdef unsigned char *buffer = self._initial_bytes + + if self._initial_bytes: + + vsi_handle = VSIFileFromMemBuffer( + self.path, buffer, len(self._initial_bytes), 0) + self._len = len(self._initial_bytes) + + if vsi_handle == NULL: + raise IOError( + "Failed to create in-memory file using initial bytes.") + + if VSIFCloseL(vsi_handle) != 0: + raise IOError( + "Failed to properly close in-memory file.") + + def exists(self): + """Test if the in-memory file exists. + + Returns + ------- + bool + True if the in-memory file exists. + """ + cdef VSILFILE *fp = NULL + cdef const char *cypath = self.path + + with nogil: + fp = VSIFOpenL(cypath, 'r') + + if fp != NULL: + VSIFCloseL(fp) + return True + else: + return False + + def __len__(self): + """Length of the file's buffer in number of bytes. + + Returns + ------- + int + """ + cdef unsigned char *buff = NULL + cdef const char *cfilename = self.path + cdef vsi_l_offset buff_len = 0 + buff = VSIGetMemFileBuffer(self.path, &buff_len, 0) + return int(buff_len) + + def close(self): + """Close MemoryFile and release allocated memory.""" + VSIUnlink(self.path) + self._pos = 0 + self._initial_bytes = None + self.closed = True + + def read(self, size=-1): + """Read size bytes from MemoryFile.""" + cdef VSILFILE *fp = NULL + # Return no bytes immediately if the position is at or past the + # end of the file. + length = len(self) + + if self._pos >= length: + self._pos = length + return b'' + + if size == -1: + size = length - self._pos + else: + size = min(size, length - self._pos) + + cdef unsigned char *buffer = CPLMalloc(size) + cdef bytes result + + fp = VSIFOpenL(self.path, 'r') + + try: + fp = exc_wrap_vsilfile(fp) + if VSIFSeekL(fp, self._pos, 0) < 0: + raise IOError( + "Failed to seek to offset %s in %s.", + self._pos, self.name) + + objects_read = VSIFReadL(buffer, 1, size, fp) + result = buffer[:objects_read] + + finally: + VSIFCloseL(fp) + CPLFree(buffer) + + self._pos += len(result) + return result + + def seek(self, offset, whence=0): + """Seek to position in MemoryFile.""" + if whence == 0: + pos = offset + elif whence == 1: + pos = self._pos + offset + elif whence == 2: + pos = len(self) - offset + if pos < 0: + raise ValueError("negative seek position: {}".format(pos)) + if pos > len(self): + raise ValueError("seek position past end of file: {}".format(pos)) + self._pos = pos + return self._pos + + def tell(self): + """Tell current position in MemoryFile.""" + return self._pos + + def write(self, data): + """Write data bytes to MemoryFile""" + cdef VSILFILE *fp = NULL + cdef const unsigned char *view = data + n = len(data) + + if not self.exists(): + fp = exc_wrap_vsilfile(VSIFOpenL(self.path, 'w')) + else: + fp = exc_wrap_vsilfile(VSIFOpenL(self.path, 'r+')) + if VSIFSeekL(fp, self._pos, 0) < 0: + raise IOError( + "Failed to seek to offset %s in %s.", self._pos, self.name) + + result = VSIFWriteL(view, 1, n, fp) + VSIFFlushL(fp) + VSIFCloseL(fp) + + self._pos += result + self._len = max(self._len, self._pos) + + return result diff -Nru fiona-1.7.10/fiona/path.py fiona-1.8.6/fiona/path.py --- fiona-1.7.10/fiona/path.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/path.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,185 @@ +"""Dataset paths, identifiers, and filenames""" + +import re +import sys + +import attr + +from fiona.compat import urlparse + +# Supported URI schemes and their mapping to GDAL's VSI suffix. +# TODO: extend for other cloud plaforms. +SCHEMES = { + 'ftp': 'curl', + 'gzip': 'gzip', + 'http': 'curl', + 'https': 'curl', + 's3': 's3', + 'tar': 'tar', + 'zip': 'zip', + 'file': 'file', + 'gs': 'gs', +} + +CURLSCHEMES = set([k for k, v in SCHEMES.items() if v == 'curl']) + +# TODO: extend for other cloud plaforms. +REMOTESCHEMES = set([k for k, v in SCHEMES.items() if v in ('curl', 's3', 'gs')]) + + +class Path(object): + """Base class for dataset paths""" + + +@attr.s(slots=True) +class ParsedPath(Path): + """Result of parsing a dataset URI/Path + + Attributes + ---------- + path : str + Parsed path. Includes the hostname and query string in the case + of a URI. + archive : str + Parsed archive path. + scheme : str + URI scheme such as "https" or "zip+s3". + """ + path = attr.ib() + archive = attr.ib() + scheme = attr.ib() + + @classmethod + def from_uri(cls, uri): + parts = urlparse(uri) + path = parts.path + scheme = parts.scheme or None + + if parts.query: + path += "?" + parts.query + + if parts.scheme and parts.netloc: + path = parts.netloc + path + + parts = path.split('!') + path = parts.pop() if parts else None + archive = parts.pop() if parts else None + return ParsedPath(path, archive, scheme) + + @property + def name(self): + """The parsed path's original URI""" + if not self.scheme: + return self.path + elif self.archive: + return "{}://{}!{}".format(self.scheme, self.archive, self.path) + else: + return "{}://{}".format(self.scheme, self.path) + + @property + def is_remote(self): + """Test if the path is a remote, network URI""" + return self.scheme and self.scheme.split('+')[-1] in REMOTESCHEMES + + @property + def is_local(self): + """Test if the path is a local URI""" + return not self.scheme or (self.scheme and self.scheme.split('+')[-1] not in REMOTESCHEMES) + + +@attr.s(slots=True) +class UnparsedPath(Path): + """Encapsulates legacy GDAL filenames + + Attributes + ---------- + path : str + The legacy GDAL filename. + """ + path = attr.ib() + + @property + def name(self): + """The unparsed path's original path""" + return self.path + + +def parse_path(path): + """Parse a dataset's identifier or path into its parts + + Parameters + ---------- + path : str or path-like object + The path to be parsed. + + Returns + ------- + ParsedPath or UnparsedPath + + Notes + ----- + When legacy GDAL filenames are encountered, they will be returned + in a UnparsedPath. + """ + if isinstance(path, Path): + return path + + # Windows drive letters (e.g. "C:\") confuse `urlparse` as they look like + # URL schemes + elif sys.platform == "win32" and re.match("^[a-zA-Z]\\:", path): + return UnparsedPath(path) + + elif path.startswith('/vsi'): + return UnparsedPath(path) + + else: + parts = urlparse(path) + + # if the scheme is not one of Rasterio's supported schemes, we + # return an UnparsedPath. + if parts.scheme and not all(p in SCHEMES for p in parts.scheme.split('+')): + return UnparsedPath(path) + + else: + return ParsedPath.from_uri(path) + + +def vsi_path(path): + """Convert a parsed path to a GDAL VSI path + + Parameters + ---------- + path : Path + A ParsedPath or UnparsedPath object. + + Returns + ------- + str + """ + if isinstance(path, UnparsedPath): + return path.path + + elif isinstance(path, ParsedPath): + + if not path.scheme: + return path.path + + else: + if path.scheme.split('+')[-1] in CURLSCHEMES: + suffix = '{}://'.format(path.scheme.split('+')[-1]) + else: + suffix = '' + + prefix = '/'.join('vsi{0}'.format(SCHEMES[p]) for p in path.scheme.split('+') if p != 'file') + + if prefix: + if path.archive: + result = '/{}/{}{}/{}'.format(prefix, suffix, path.archive, path.path.lstrip('/')) + else: + result = '/{}/{}{}'.format(prefix, suffix, path.path) + else: + result = path.path + return result + + else: + raise ValueError("path must be a ParsedPath or UnparsedPath object") diff -Nru fiona-1.7.10/fiona/schema.pyx fiona-1.8.6/fiona/schema.pyx --- fiona-1.7.10/fiona/schema.pyx 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/schema.pyx 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,83 @@ +from six import text_type + +from fiona.errors import SchemaError +from fiona.rfc3339 import FionaDateType, FionaDateTimeType, FionaTimeType + + +cdef extern from "gdal.h": + char * GDALVersionInfo (char *pszRequest) + + +def _get_gdal_version_num(): + """Return current internal version number of gdal""" + return int(GDALVersionInfo("VERSION_NUM")) + + +GDAL_VERSION_NUM = _get_gdal_version_num() + +# Mapping of OGR integer field types to Fiona field type names. +# Lists are currently unsupported in this version, but might be done as +# arrays in a future version. +FIELD_TYPES = [ + 'int32', # OFTInteger, Simple 32bit integer + None, # OFTIntegerList, List of 32bit integers + 'float', # OFTReal, Double Precision floating point + None, # OFTRealList, List of doubles + 'str', # OFTString, String of UTF-8 chars + None, # OFTStringList, Array of strings + None, # OFTWideString, deprecated + None, # OFTWideStringList, deprecated + 'bytes', # OFTBinary, Raw Binary data + 'date', # OFTDate, Date + 'time', # OFTTime, Time + 'datetime', # OFTDateTime, Date and Time + 'int64', # OFTInteger64, Single 64bit integer + None # OFTInteger64List, List of 64bit integers +] + +# Mapping of Fiona field type names to Python types. +FIELD_TYPES_MAP = { + 'int32': int, + 'float': float, + 'str': text_type, + 'date': FionaDateType, + 'time': FionaTimeType, + 'datetime': FionaDateTimeType, + 'bytes': bytes, + 'int64': int, + 'int': int +} + +FIELD_TYPES_MAP_REV = dict([(v, k) for k, v in FIELD_TYPES_MAP.items()]) +FIELD_TYPES_MAP_REV[int] = 'int' + + +def normalize_field_type(ftype): + """Normalize free form field types to an element of FIELD_TYPES + + Parameters + ---------- + ftype : str + A type:width format like 'int:9' or 'str:255' + + Returns + ------- + str + An element from FIELD_TYPES + """ + if ftype in FIELD_TYPES: + return ftype + elif ftype == 'bool': + return 'bool' + elif ftype.startswith('int'): + width = int((ftype.split(':')[1:] or ['0'])[0]) + if GDAL_VERSION_NUM >= 2000000 and (width == 0 or width >= 10): + return 'int64' + else: + return 'int32' + elif ftype.startswith('str'): + return 'str' + elif ftype.startswith('float'): + return 'float' + else: + raise SchemaError("Unknown field type: {}".format(ftype)) diff -Nru fiona-1.7.10/fiona/session.py fiona-1.8.6/fiona/session.py --- fiona-1.7.10/fiona/session.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/session.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,237 @@ +"""Abstraction for sessions in various clouds.""" + + +from fiona.path import parse_path, UnparsedPath + + +class Session(object): + """Base for classes that configure access to secured resources. + + Attributes + ---------- + credentials : dict + Keys and values for session credentials. + + Notes + ----- + This class is not intended to be instantiated. + + """ + + def get_credential_options(self): + """Get credentials as GDAL configuration options + + Returns + ------- + dict + + """ + return NotImplementedError + + @staticmethod + def from_foreign_session(session, cls=None): + """Create a session object matching the foreign `session`. + + Parameters + ---------- + session : obj + A foreign session object. + cls : Session class, optional + The class to return. + + Returns + ------- + Session + + """ + if not cls: + return DummySession() + else: + return cls(session) + + @staticmethod + def from_path(path, *args, **kwargs): + """Create a session object suited to the data at `path`. + + Parameters + ---------- + path : str + A dataset path or identifier. + args : sequence + Positional arguments for the foreign session constructor. + kwargs : dict + Keyword arguments for the foreign session constructor. + + Returns + ------- + Session + + """ + if not path: + return DummySession() + + path = parse_path(path) + + if isinstance(path, UnparsedPath) or path.is_local: + return DummySession() + + elif path.scheme == "s3" or path.scheme.endswith("+s3") or "amazonaws.com" in path.path: + return AWSSession(*args, **kwargs) + + # This factory can be extended to other cloud providers here. + # elif path.scheme == "cumulonimbus": # for example. + # return CumulonimbusSession(*args, **kwargs) + + else: + return DummySession() + + +class DummySession(Session): + """A dummy session. + + Attributes + ---------- + credentials : dict + The session credentials. + + """ + + def __init__(self, *args, **kwargs): + self._session = None + self.credentials = {} + + def get_credential_options(self): + """Get credentials as GDAL configuration options + + Returns + ------- + dict + + """ + return {} + + +class AWSSession(Session): + """Configures access to secured resources stored in AWS S3. + """ + + def __init__( + self, session=None, aws_unsigned=False, aws_access_key_id=None, + aws_secret_access_key=None, aws_session_token=None, + region_name=None, profile_name=None, requester_pays=False): + """Create a new boto3 session + + Parameters + ---------- + session : optional + A boto3 session object. + aws_unsigned : bool, optional (default: False) + If True, requests will be unsigned. + aws_access_key_id : str, optional + An access key id, as per boto3. + aws_secret_access_key : str, optional + A secret access key, as per boto3. + aws_session_token : str, optional + A session token, as per boto3. + region_name : str, optional + A region name, as per boto3. + profile_name : str, optional + A shared credentials profile name, as per boto3. + requester_pays : bool, optional + True if the requester agrees to pay transfer costs (default: + False) + """ + import boto3 + + if session: + self._session = session + else: + self._session = boto3.Session( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + region_name=region_name, + profile_name=profile_name) + + self.requester_pays = requester_pays + self.unsigned = aws_unsigned + self._creds = self._session._session.get_credentials() + + @property + def credentials(self): + """The session credentials as a dict""" + creds = {} + if self._creds: + if self._creds.access_key: # pragma: no branch + creds['aws_access_key_id'] = self._creds.access_key + if self._creds.secret_key: # pragma: no branch + creds['aws_secret_access_key'] = self._creds.secret_key + if self._creds.token: + creds['aws_session_token'] = self._creds.token + if self._session.region_name: + creds['aws_region'] = self._session.region_name + if self.requester_pays: + creds['aws_request_payer'] = 'requester' + return creds + + def get_credential_options(self): + """Get credentials as GDAL configuration options + + Returns + ------- + dict + + """ + if self.unsigned: + return {'AWS_NO_SIGN_REQUEST': 'YES'} + else: + return {k.upper(): v for k, v in self.credentials.items()} + +class GSSession(Session): + """Configures access to secured resources stored in Google Cloud Storage + """ + def __init__(self, google_application_credentials=None): + """Create new Google Cloude Storage session + + Parameters + ---------- + google_application_credentials: string + Path to the google application credentials JSON file. + """ + + self._creds = {} + if google_application_credentials is not None: + self._creds['google_application_credentials'] = google_application_credentials + + @classmethod + def hascreds(cls, config): + """Determine if the given configuration has proper credentials + + Parameters + ---------- + cls : class + A Session class. + config : dict + GDAL configuration as a dict. + + Returns + ------- + bool + + """ + return 'GOOGLE_APPLICATION_CREDENTIALS' in config + + @property + def credentials(self): + """The session credentials as a dict""" + return self._creds + + def get_credential_options(self): + """Get credentials as GDAL configuration options + + Returns + ------- + dict + + """ + return {k.upper(): v for k, v in self.credentials.items()} diff -Nru fiona-1.7.10/fiona/_shim1.pxd fiona-1.8.6/fiona/_shim1.pxd --- fiona-1.7.10/fiona/_shim1.pxd 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/_shim1.pxd 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,33 @@ +include "ogrext1.pxd" + +ctypedef enum OGRFieldSubType: + OFSTNone = 0 + OFSTBoolean = 1 + OFSTInt16 = 2 + OFSTFloat32 = 3 + OFSTMaxSubType = 3 + +cdef bint is_field_null(void *feature, int n) +cdef void set_field_null(void *feature, int n) +cdef void gdal_flush_cache(void *cogr_ds) +cdef void* gdal_open_vector(const char* path_c, int mode, drivers, options) except NULL +cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL +cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) +cdef OGRErr gdal_commit_transaction(void *cogr_ds) +cdef OGRErr gdal_rollback_transaction(void *cogr_ds) +cdef OGRFieldSubType get_field_subtype(void *fielddefn) +cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype) +cdef bint check_capability_create_layer(void *cogr_ds) +cdef void *get_linear_geometry(void *geom) + +from fiona._shim cimport OGR_F_GetFieldAsInteger as OGR_F_GetFieldAsInteger64 +from fiona._shim cimport OGR_F_SetFieldInteger as OGR_F_SetFieldInteger64 +from fiona._shim cimport OGR_DS_GetLayerByName as GDALDatasetGetLayerByName +from fiona._shim cimport OGR_DS_GetLayer as GDALDatasetGetLayer +from fiona._shim cimport OGR_DS_Destroy as GDALClose +from fiona._shim cimport OGR_DS_GetDriver as GDALGetDatasetDriver +from fiona._shim cimport OGRGetDriverByName as GDALGetDriverByName +from fiona._shim cimport OGR_DS_GetLayerCount as GDALDatasetGetLayerCount +from fiona._shim cimport OGR_DS_DeleteLayer as GDALDatasetDeleteLayer +from fiona._shim cimport OGR_DS_CreateLayer as GDALDatasetCreateLayer +from fiona._shim cimport OGR_Dr_DeleteDataSource as GDALDeleteDataset diff -Nru fiona-1.7.10/fiona/_shim1.pyx fiona-1.8.6/fiona/_shim1.pyx --- fiona-1.7.10/fiona/_shim1.pyx 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/_shim1.pyx 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,125 @@ +"""Shims on top of ogrext for GDAL versions < 2""" + +import logging + +from fiona.ogrext1 cimport * +from fiona._err cimport exc_wrap_pointer +from fiona._err import cpl_errs, CPLE_BaseError, FionaNullPointerError +from fiona.errors import DriverError + + +log = logging.getLogger(__name__) + +cdef int OGRERR_NONE = 0 + + +cdef bint is_field_null(void *feature, int n): + if not OGR_F_IsFieldSet(feature, n): + return True + else: + return False + + +cdef void set_field_null(void *feature, int n): + pass + + +cdef void gdal_flush_cache(void *cogr_ds): + retval = OGR_DS_SyncToDisk(cogr_ds) + if retval != OGRERR_NONE: + raise RuntimeError("Failed to sync to disk") + + +cdef void* gdal_open_vector(const char *path_c, int mode, drivers, options) except NULL: + cdef void* cogr_ds = NULL + cdef void* drv = NULL + cdef void* ds = NULL + + encoding = options.get('encoding', None) + if encoding: + val = encoding.encode('utf-8') + CPLSetThreadLocalConfigOption('SHAPE_ENCODING', val) + else: + CPLSetThreadLocalConfigOption('SHAPE_ENCODING', "") + + if drivers: + for name in drivers: + name_b = name.encode() + name_c = name_b + drv = OGRGetDriverByName(name_c) + if drv != NULL: + ds = OGR_Dr_Open(drv, path_c, mode) + if ds != NULL: + cogr_ds = ds + break + else: + cogr_ds = OGROpen(path_c, mode, NULL) + + try: + return exc_wrap_pointer(cogr_ds) + except FionaNullPointerError: + raise DriverError("Failed to open dataset (mode={}): {}".format(mode, path_c.decode("utf-8"))) + except CPLE_BaseError as exc: + raise DriverError(str(exc)) + + +cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL: + cdef void* cogr_ds = NULL + cdef char **opts = NULL + + encoding = options.get('encoding', None) + if encoding: + val = encoding.encode('utf-8') + CPLSetThreadLocalConfigOption('SHAPE_ENCODING', val) + else: + CPLSetThreadLocalConfigOption('SHAPE_ENCODING', "") + + for k, v in options.items(): + k = k.upper().encode('utf-8') + if isinstance(v, bool): + v = ('ON' if v else 'OFF').encode('utf-8') + else: + v = str(v).encode('utf-8') + log.debug("Set option %r: %r", k, v) + opts = CSLAddNameValue(opts, k, v) + + try: + return exc_wrap_pointer( + OGR_Dr_CreateDataSource(cogr_driver, path_c, opts) + ) + except FionaNullPointerError: + raise DriverError("Failed to create dataset: {}".format(path_c.decode("utf-8"))) + except CPLE_BaseError as exc: + raise DriverError(str(exc)) + finally: + CSLDestroy(opts) + + +# transactions are not supported in GDAL 1.x +cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): + return OGRERR_NONE + + +cdef OGRErr gdal_commit_transaction(void* cogr_ds): + return OGRERR_NONE + + +cdef OGRErr gdal_rollback_transaction(void* cogr_ds): + return OGRERR_NONE + + +# field subtypes are not supported in GDAL 1.x +cdef OGRFieldSubType get_field_subtype(void *fielddefn): + return OFSTNone + + +cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype): + pass + + +cdef bint check_capability_create_layer(void *cogr_ds): + return OGR_DS_TestCapability(cogr_ds, ODsCCreateLayer) + + +cdef void *get_linear_geometry(void *geom): + return geom diff -Nru fiona-1.7.10/fiona/_shim22.pxd fiona-1.8.6/fiona/_shim22.pxd --- fiona-1.7.10/fiona/_shim22.pxd 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/_shim22.pxd 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,14 @@ +include "ogrext2.pxd" + +cdef bint is_field_null(void *feature, int n) +cdef void set_field_null(void *feature, int n) +cdef void gdal_flush_cache(void *cogr_ds) +cdef void* gdal_open_vector(const char *path_c, int mode, drivers, options) except NULL +cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL +cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) +cdef OGRErr gdal_commit_transaction(void *cogr_ds) +cdef OGRErr gdal_rollback_transaction(void *cogr_ds) +cdef OGRFieldSubType get_field_subtype(void *fielddefn) +cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype) +cdef bint check_capability_create_layer(void *cogr_ds) +cdef void *get_linear_geometry(void *geom) diff -Nru fiona-1.7.10/fiona/_shim22.pyx fiona-1.8.6/fiona/_shim22.pyx --- fiona-1.7.10/fiona/_shim22.pyx 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/_shim22.pyx 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,134 @@ +"""Shims on top of ogrext for GDAL versions >= 2.2""" + +cdef extern from "ogr_api.h": + + int OGR_F_IsFieldNull(void *feature, int n) + + +from fiona.ogrext2 cimport * +from fiona._err cimport exc_wrap_pointer +from fiona._err import cpl_errs, CPLE_BaseError, FionaNullPointerError +from fiona.errors import DriverError + +import logging + + +log = logging.getLogger(__name__) + + +cdef bint is_field_null(void *feature, int n): + if OGR_F_IsFieldNull(feature, n): + return True + elif not OGR_F_IsFieldSet(feature, n): + return True + else: + return False + + +cdef void set_field_null(void *feature, int n): + OGR_F_SetFieldNull(feature, n) + + +cdef void gdal_flush_cache(void *cogr_ds): + with cpl_errs: + GDALFlushCache(cogr_ds) + + +cdef void* gdal_open_vector(char* path_c, int mode, drivers, options) except NULL: + cdef void* cogr_ds = NULL + cdef char **drvs = NULL + cdef void* drv = NULL + cdef char **open_opts = NULL + + flags = GDAL_OF_VECTOR | GDAL_OF_VERBOSE_ERROR + if mode == 1: + flags |= GDAL_OF_UPDATE + else: + flags |= GDAL_OF_READONLY + + if drivers: + for name in drivers: + name_b = name.encode() + name_c = name_b + drv = GDALGetDriverByName(name_c) + if drv != NULL: + drvs = CSLAddString(drvs, name_c) + + for k, v in options.items(): + + if v is None: + continue + + k = k.upper().encode('utf-8') + if isinstance(v, bool): + v = ('ON' if v else 'OFF').encode('utf-8') + else: + v = str(v).encode('utf-8') + log.debug("Set option %r: %r", k, v) + open_opts = CSLAddNameValue(open_opts, k, v) + + open_opts = CSLAddNameValue(open_opts, "VALIDATE_OPEN_OPTIONS", "NO") + + try: + cogr_ds = exc_wrap_pointer( + GDALOpenEx(path_c, flags, drvs, open_opts, NULL) + ) + return cogr_ds + except FionaNullPointerError: + raise DriverError("Failed to open dataset (mode={}): {}".format(mode, path_c.decode("utf-8"))) + except CPLE_BaseError as exc: + raise DriverError(str(exc)) + finally: + CSLDestroy(drvs) + CSLDestroy(open_opts) + + +cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL: + cdef char **creation_opts = NULL + cdef void *cogr_ds = NULL + + for k, v in options.items(): + k = k.upper().encode('utf-8') + if isinstance(v, bool): + v = ('ON' if v else 'OFF').encode('utf-8') + else: + v = str(v).encode('utf-8') + log.debug("Set option %r: %r", k, v) + creation_opts = CSLAddNameValue(creation_opts, k, v) + + try: + return exc_wrap_pointer(GDALCreate(cogr_driver, path_c, 0, 0, 0, GDT_Unknown, creation_opts)) + except FionaNullPointerError: + raise DriverError("Failed to create dataset: {}".format(path_c.decode("utf-8"))) + except CPLE_BaseError as exc: + raise DriverError(str(exc)) + finally: + CSLDestroy(creation_opts) + + +cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): + return GDALDatasetStartTransaction(cogr_ds, force) + + +cdef OGRErr gdal_commit_transaction(void* cogr_ds): + return GDALDatasetCommitTransaction(cogr_ds) + + +cdef OGRErr gdal_rollback_transaction(void* cogr_ds): + return GDALDatasetRollbackTransaction(cogr_ds) + + +cdef OGRFieldSubType get_field_subtype(void *fielddefn): + return OGR_Fld_GetSubType(fielddefn) + + +cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype): + OGR_Fld_SetSubType(fielddefn, subtype) + + +cdef bint check_capability_create_layer(void *cogr_ds): + return GDALDatasetTestCapability(cogr_ds, ODsCCreateLayer) + + +cdef void *get_linear_geometry(void *geom): + return OGR_G_GetLinearGeometry(geom, 0.0, NULL) diff -Nru fiona-1.7.10/fiona/_shim2.pxd fiona-1.8.6/fiona/_shim2.pxd --- fiona-1.7.10/fiona/_shim2.pxd 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/_shim2.pxd 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,14 @@ +include "ogrext2.pxd" + +cdef bint is_field_null(void *feature, int n) +cdef void set_field_null(void *feature, int n) +cdef void gdal_flush_cache(void *cogr_ds) +cdef void* gdal_open_vector(const char* path_c, int mode, drivers, options) except NULL +cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL +cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) +cdef OGRErr gdal_commit_transaction(void *cogr_ds) +cdef OGRErr gdal_rollback_transaction(void *cogr_ds) +cdef OGRFieldSubType get_field_subtype(void *fielddefn) +cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype) +cdef bint check_capability_create_layer(void *cogr_ds) +cdef void *get_linear_geometry(void *geom) diff -Nru fiona-1.7.10/fiona/_shim2.pyx fiona-1.8.6/fiona/_shim2.pyx --- fiona-1.7.10/fiona/_shim2.pyx 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/_shim2.pyx 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,127 @@ +"""Shims on top of ogrext for GDAL versions > 2""" + +from fiona.ogrext2 cimport * +from fiona._err cimport exc_wrap_pointer +from fiona._err import cpl_errs, CPLE_BaseError, FionaNullPointerError +from fiona.errors import DriverError + +import logging + + +log = logging.getLogger(__name__) + + +cdef bint is_field_null(void *feature, int n): + if not OGR_F_IsFieldSet(feature, n): + return True + else: + return False + + +cdef void set_field_null(void *feature, int n): + pass + + +cdef void gdal_flush_cache(void *cogr_ds): + with cpl_errs: + GDALFlushCache(cogr_ds) + + +cdef void* gdal_open_vector(const char* path_c, int mode, drivers, options) except NULL: + cdef void* cogr_ds = NULL + cdef char **drvs = NULL + cdef char **open_opts = NULL + + flags = GDAL_OF_VECTOR | GDAL_OF_VERBOSE_ERROR + if mode == 1: + flags |= GDAL_OF_UPDATE + else: + flags |= GDAL_OF_READONLY + + if drivers: + for name in drivers: + name_b = name.encode() + name_c = name_b + #log.debug("Trying driver: %s", name) + drv = GDALGetDriverByName(name_c) + if drv != NULL: + drvs = CSLAddString(drvs, name_c) + + for k, v in options.items(): + + if v is None: + continue + + k = k.upper().encode('utf-8') + if isinstance(v, bool): + v = ('ON' if v else 'OFF').encode('utf-8') + else: + v = str(v).encode('utf-8') + log.debug("Set option %r: %r", k, v) + open_opts = CSLAddNameValue(open_opts, k, v) + + open_opts = CSLAddNameValue(open_opts, "VALIDATE_OPEN_OPTIONS", "NO") + + try: + cogr_ds = exc_wrap_pointer(GDALOpenEx( + path_c, flags, drvs, open_opts, NULL) + ) + return cogr_ds + except FionaNullPointerError: + raise DriverError("Failed to open dataset (mode={}): {}".format(mode, path_c.decode("utf-8"))) + except CPLE_BaseError as exc: + raise DriverError(str(exc)) + finally: + CSLDestroy(drvs) + CSLDestroy(open_opts) + + +cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL: + cdef char **creation_opts = NULL + + for k, v in options.items(): + k = k.upper().encode('utf-8') + if isinstance(v, bool): + v = ('ON' if v else 'OFF').encode('utf-8') + else: + v = str(v).encode('utf-8') + log.debug("Set option %r: %r", k, v) + creation_opts = CSLAddNameValue(creation_opts, k, v) + + try: + return exc_wrap_pointer(GDALCreate(cogr_driver, path_c, 0, 0, 0, GDT_Unknown, creation_opts)) + except FionaNullPointerError: + raise DriverError("Failed to create dataset: {}".format(path_c.decode("utf-8"))) + except CPLE_BaseError as exc: + raise DriverError(str(exc)) + finally: + CSLDestroy(creation_opts) + + +cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): + return GDALDatasetStartTransaction(cogr_ds, force) + + +cdef OGRErr gdal_commit_transaction(void* cogr_ds): + return GDALDatasetCommitTransaction(cogr_ds) + + +cdef OGRErr gdal_rollback_transaction(void* cogr_ds): + return GDALDatasetRollbackTransaction(cogr_ds) + + +cdef OGRFieldSubType get_field_subtype(void *fielddefn): + return OGR_Fld_GetSubType(fielddefn) + + +cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype): + OGR_Fld_SetSubType(fielddefn, subtype) + + +cdef bint check_capability_create_layer(void *cogr_ds): + return GDALDatasetTestCapability(cogr_ds, ODsCCreateLayer) + + +cdef void *get_linear_geometry(void *geom): + return OGR_G_GetLinearGeometry(geom, 0.0, NULL) + diff -Nru fiona-1.7.10/fiona/tool.py fiona-1.8.6/fiona/tool.py --- fiona-1.7.10/fiona/tool.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/tool.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,257 +0,0 @@ -""" fiona.tool - -Converts Shapefiles (etc) to GeoJSON. -""" - -import json -import logging -import pprint -import sys - -from six.moves import map - -import fiona - - -def open_output(arg): - """Returns an opened output stream.""" - if arg == sys.stdout: - return arg - else: - return open(arg, 'w') - -def make_ld_context(context_items): - """Returns a JSON-LD Context object. - - See http://json-ld.org/spec/latest/json-ld.""" - ctx = { - 'type': '@type', - 'id': '@id', - 'FeatureCollection': '_:n1', - '_crs': {'@id': '_:n2', '@type': '@id'}, - 'bbox': 'http://geovocab.org/geometry#bbox', - 'features': '_:n3', - 'Feature': 'http://geovocab.org/spatial#Feature', - 'properties': '_:n4', - 'geometry': 'http://geovocab.org/geometry#geometry', - 'Point': 'http://geovocab.org/geometry#Point', - 'LineString': 'http://geovocab.org/geometry#LineString', - 'Polygon': 'http://geovocab.org/geometry#Polygon', - 'MultiPoint': 'http://geovocab.org/geometry#MultiPoint', - 'MultiLineString': 'http://geovocab.org/geometry#MultiLineString', - 'MultiPolygon': 'http://geovocab.org/geometry#MultiPolygon', - 'GeometryCollection': - 'http://geovocab.org/geometry#GeometryCollection', - 'coordinates': '_:n5'} - for item in context_items or []: - t, uri = item.split("=") - ctx[t.strip()] = uri.strip() - return ctx - -def crs_uri(crs): - """Returns a CRS URN computed from a crs dict.""" - # References version 6.3 of the EPSG database. - # TODO: get proper version from GDAL/OGR API? - if crs['proj'] == 'longlat' and ( - crs['datum'] == 'WGS84' or crs['ellps'] == 'WGS84'): - return 'urn:ogc:def:crs:OGC:1.3:CRS84' - elif 'epsg:' in crs.get('init', ''): - epsg, code = crs['init'].split(':') - return 'urn:ogc:def:crs:EPSG::%s' % code - else: - return None - -def id_record(rec): - """Converts a record's id to a blank node id and returns the record.""" - rec['id'] = '_:f%s' % rec['id'] - return rec - -def main(args, dump_kw, item_sep, ignore_errors): - """Returns 0 on success, 1 on error, for sys.exit.""" - with fiona.drivers(): - - with open_output(args.outfile) as sink: - - with fiona.open(args.infile) as source: - - meta = source.meta.copy() - meta['fields'] = dict(source.schema['properties'].items()) - - if args.description: - meta['name'] = args.infile - meta['schema']['properties'] = list( - source.schema['properties'].items()) - json.dump(meta, sink, **dump_kw) - - elif args.record_buffered: - # Buffer GeoJSON data at the feature level for smaller - # memory footprint. - - indented = bool(args.indent) - rec_indent = "\n" + " " * (2 * (args.indent or 0)) - - collection = { - 'type': 'FeatureCollection', - 'fiona:schema': meta['schema'], - 'fiona:crs': meta['crs'], - '_crs': crs_uri(meta['crs']), - 'features': [] } - if args.use_ld_context: - collection['@context'] = make_ld_context( - args.ld_context_items) - - head, tail = json.dumps(collection, **dump_kw).split('[]') - - sink.write(head) - sink.write("[") - - itr = iter(source) - - # Try the first record. - try: - i, first = 0, next(itr) - if args.use_ld_context: - first = id_record(first) - if indented: - sink.write(rec_indent) - sink.write( - json.dumps(first, **dump_kw - ).replace("\n", rec_indent)) - except StopIteration: - pass - except Exception as exc: - # Ignoring errors is *not* the default. - if ignore_errors: - logger.error( - "failed to serialize file record %d (%s), " - "continuing", - i, exc) - else: - # Log error and close up the GeoJSON, leaving it - # more or less valid no matter what happens above. - logger.critical( - "failed to serialize file record %d (%s), " - "quiting", - i, exc) - sink.write("]") - sink.write(tail) - if indented: - sink.write("\n") - return 1 - - # Because trailing commas aren't valid in JSON arrays - # we'll write the item separator before each of the - # remaining features. - for i, rec in enumerate(itr, 1): - try: - if args.use_ld_context: - rec = id_record(rec) - if indented: - sink.write(rec_indent) - sink.write(item_sep) - sink.write( - json.dumps(rec, **dump_kw - ).replace("\n", rec_indent)) - except Exception as exc: - if ignore_errors: - logger.error( - "failed to serialize file record %d (%s), " - "continuing", - i, exc) - else: - logger.critical( - "failed to serialize file record %d (%s), " - "quiting", - i, exc) - sink.write("]") - sink.write(tail) - if indented: - sink.write("\n") - return 1 - - # Close up the GeoJSON after writing all features. - sink.write("]") - sink.write(tail) - if indented: - sink.write("\n") - - else: - # Buffer GeoJSON data at the collection level. The default. - collection = { - 'type': 'FeatureCollection', - 'fiona:schema': meta['schema'], - 'fiona:crs': meta['crs'], - '_crs': crs_uri(meta['crs']) } - if args.use_ld_context: - collection['@context'] = make_ld_context( - args.ld_context_items) - collection['features'] = list(map(id_record, source)) - else: - collection['features'] = list(source) - json.dump(collection, sink, **dump_kw) - - return 0 - -if __name__ == '__main__': - - import argparse - - logging.basicConfig(stream=sys.stderr, level=logging.INFO) - logger = logging.getLogger('fiona.tool') - - parser = argparse.ArgumentParser( - description="Serialize a file's records or description to GeoJSON") - - parser.add_argument('infile', - help="input file name") - parser.add_argument('outfile', - nargs='?', - help="output file name, defaults to stdout if omitted", - default=sys.stdout) - parser.add_argument('-d', '--description', - action='store_true', - help="serialize file's data description (schema) only") - parser.add_argument('-n', '--indent', - type=int, - default=None, - metavar='N', - help="indentation level in N number of chars") - parser.add_argument('--compact', - action='store_true', - help="use compact separators (',', ':')") - parser.add_argument('--encoding', - default=None, - metavar='ENC', - help="Specify encoding of the input file") - parser.add_argument('--record-buffered', - dest='record_buffered', - action='store_true', - help="Economical buffering of writes at record, not collection (default), level") - parser.add_argument('--ignore-errors', - dest='ignore_errors', - action='store_true', - help="log errors but do not stop serialization") - parser.add_argument('--use-ld-context', - dest='use_ld_context', - action='store_true', - help="add a JSON-LD context to JSON output") - parser.add_argument('--add-ld-context-item', - dest='ld_context_items', - action='append', - metavar='TERM=URI', - help="map a term to a URI and add it to the output's JSON LD context") - - args = parser.parse_args() - - # Keyword args to be used in all following json.dump* calls. - dump_kw = {'sort_keys': True} - if args.indent: - dump_kw['indent'] = args.indent - if args.compact: - dump_kw['separators'] = (',', ':') - - item_sep = args.compact and ',' or ', ' - ignore_errors = args.ignore_errors - - sys.exit(main(args, dump_kw, item_sep, ignore_errors)) - diff -Nru fiona-1.7.10/fiona/_transform.pyx fiona-1.8.6/fiona/_transform.pyx --- fiona-1.7.10/fiona/_transform.pyx 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/fiona/_transform.pyx 2019-03-19 04:25:07.000000000 +0000 @@ -9,6 +9,8 @@ from fiona cimport _cpl, _crs, _csl, _geometry from fiona._crs cimport OGRSpatialReferenceH +from fiona.compat import UserDict + cdef extern from "ogr_geometry.h" nogil: @@ -25,7 +27,7 @@ pass -log = logging.getLogger("Fiona") +log = logging.getLogger(__name__) class NullHandler(logging.Handler): def emit(self, record): pass @@ -40,6 +42,9 @@ raise ValueError("NULL spatial reference") params = [] # Normally, we expect a CRS dict. + if isinstance(crs, UserDict): + crs = dict(crs) + if isinstance(crs, dict): # EPSG is a special case. init = crs.get('init') @@ -149,37 +154,66 @@ _csl.CSLDestroy(options) _crs.OSRRelease(src) _crs.OSRRelease(dst) + else: g = geom + if precision >= 0: + if g['type'] == 'Point': - x, y = g['coordinates'] + coords = list(g['coordinates']) + x, y = coords[:2] x = round(x, precision) y = round(y, precision) new_coords = [x, y] + if len(coords) == 3: + z = coords[2] + new_coords.append(round(z, precision)) + elif g['type'] in ['LineString', 'MultiPoint']: - xp, yp = zip(*g['coordinates']) + coords = list(zip(*g['coordinates'])) + xp, yp = coords[:2] xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] - new_coords = list(zip(xp, yp)) + if len(coords) == 3: + zp = coords[2] + zp = [round(v, precision) for v in zp] + new_coords = list(zip(xp, yp, zp)) + else: + new_coords = list(zip(xp, yp)) + elif g['type'] in ['Polygon', 'MultiLineString']: new_coords = [] for piece in g['coordinates']: - xp, yp = zip(*piece) + coords = list(zip(*piece)) + xp, yp = coords[:2] xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] - new_coords.append(list(zip(xp, yp))) + if len(coords) == 3: + zp = coords[2] + zp = [round(v, precision) for v in zp] + new_coords.append(list(zip(xp, yp, zp))) + else: + new_coords.append(list(zip(xp, yp))) + elif g['type'] == 'MultiPolygon': parts = g['coordinates'] new_coords = [] for part in parts: inner_coords = [] for ring in part: - xp, yp = zip(*ring) + coords = list(zip(*ring)) + xp, yp = coords[:2] xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] - inner_coords.append(list(zip(xp, yp))) + if len(coords) == 3: + zp = coords[2] + zp = [round(v, precision) for v in zp] + inner_coords.append(list(zip(xp, yp, zp))) + else: + inner_coords.append(list(zip(xp, yp))) new_coords.append(inner_coords) + g['coordinates'] = new_coords return g diff -Nru fiona-1.7.10/fiona/vfs.py fiona-1.8.6/fiona/vfs.py --- fiona-1.7.10/fiona/vfs.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/fiona/vfs.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,87 @@ +"""Implementation of Apache VFS schemes and URLs.""" + +import os +import sys +import re +from fiona.compat import urlparse + + +# Supported URI schemes and their mapping to GDAL's VSI suffix. +# TODO: extend for other cloud plaforms. +SCHEMES = { + 'ftp': 'curl', + 'gzip': 'gzip', + 'http': 'curl', + 'https': 'curl', + 's3': 's3', + 'tar': 'tar', + 'zip': 'zip', + 'gs': 'gs', +} + +CURLSCHEMES = set([k for k, v in SCHEMES.items() if v == 'curl']) + +# TODO: extend for other cloud plaforms. +REMOTESCHEMES = set([k for k, v in SCHEMES.items() if v in ('curl', 's3', 'gs')]) + + +def valid_vsi(vsi): + """Ensures all parts of our vsi path are valid schemes.""" + return all(p in SCHEMES for p in vsi.split('+')) + +def is_remote(scheme): + if scheme is None: + return False + return any(p in REMOTESCHEMES for p in scheme.split('+')) + + +def vsi_path(path, vsi=None, archive=None): + # If a VSI and archive file are specified, we convert the path to + # an OGR VSI path (see cpl_vsi.h). + if vsi: + prefix = '/'.join('vsi{0}'.format(SCHEMES[p]) for p in vsi.split('+')) + if archive: + result = '/{0}/{1}{2}'.format(prefix, archive, path) + else: + result = '/{0}/{1}'.format(prefix, path) + else: + result = path + + return result + + +def parse_paths(uri, vfs=None): + """Parse a URI or Apache VFS URL into its parts + + Returns: tuple + (path, scheme, archive) + """ + archive = scheme = None + path = uri + # Windows drive letters (e.g. "C:\") confuse `urlparse` as they look like + # URL schemes + if sys.platform == "win32" and re.match("^[a-zA-Z]\\:", path): + return path, None, None + if vfs: + parts = urlparse(vfs) + scheme = parts.scheme + archive = parts.path + if parts.netloc and parts.netloc != 'localhost': + archive = parts.netloc + archive + else: + parts = urlparse(path) + scheme = parts.scheme + path = parts.path + if parts.netloc and parts.netloc != 'localhost': + if scheme.split("+")[-1] in CURLSCHEMES: + # We need to deal with cases such as zip+https://server.com/data.zip + path = "{}://{}{}".format(scheme.split("+")[-1], parts.netloc, path) + else: + path = parts.netloc + path + if scheme in SCHEMES: + parts = path.split('!') + path = parts.pop() if parts else None + archive = parts.pop() if parts else None + + scheme = None if not scheme else scheme + return path, scheme, archive diff -Nru fiona-1.7.10/.gitignore fiona-1.8.6/.gitignore --- fiona-1.7.10/.gitignore 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/.gitignore 2019-03-19 04:25:07.000000000 +0000 @@ -22,6 +22,7 @@ *.egg-info/ .installed.cfg *.egg +.libs # PyInstaller # Usually these files are written by a python script from a template @@ -63,6 +64,7 @@ # fiona VERSION.txt +fiona/_shim.c fiona/ogrext.c fiona/_crs.c fiona/_drivers.c @@ -70,7 +72,20 @@ fiona/_geometry.c fiona/_transform.cpp fiona/ograpi.pxd -fiona/ogrext.pyx +fiona/_shim1.c +fiona/_shim2.c +fiona/_shim22.c +fiona/_shim.pxd +fiona/_shim.pyx tests/data/coutwildrnp.json tests/data/coutwildrnp.tar tests/data/coutwildrnp.zip +tests/data/coutwildrnp.gpkg +.DS_Store +.ipynb_checkpoints +.pytest_cache +MANIFEST +fiona/_env.c +fiona/ogrext1.c +fiona/ogrext2.c +fiona/schema.c diff -Nru fiona-1.7.10/ISSUE_TEMPLATE.md fiona-1.8.6/ISSUE_TEMPLATE.md --- fiona-1.7.10/ISSUE_TEMPLATE.md 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/ISSUE_TEMPLATE.md 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,41 @@ + + +## Expected behavior and actual behavior. + +For example: I expected to read 10 features from a file and an exception occurred +on the 3rd. + +## Steps to reproduce the problem. + +For example: a script with required data. + +## Operating system + +For example: Mac OS X 10.12.3. + +## Fiona and GDAL version and provenance + +For example: the 1.7.10.post1 manylinux1 wheel installed from PyPI using pip version 9.0.1. + +For example: GDAL 2.1.0 installed via Homebrew diff -Nru fiona-1.7.10/MANIFEST.in fiona-1.8.6/MANIFEST.in --- fiona-1.7.10/MANIFEST.in 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/MANIFEST.in 2019-03-19 04:25:07.000000000 +0000 @@ -8,6 +8,5 @@ recursive-include tests *.py recursive-include tests/data * include fiona/*.c fiona/*.cpp -exclude fiona/ogrext.c include CHANGES.txt CREDITS.txt LICENSE.txt VERSION.txt README.rst include benchmark.py setup.py requirements.txt diff -Nru fiona-1.7.10/README.rst fiona-1.8.6/README.rst --- fiona-1.7.10/README.rst 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/README.rst 2019-03-19 04:25:07.000000000 +0000 @@ -2,11 +2,14 @@ Fiona ===== -Fiona is OGR's neat, nimble, no-nonsense API for Python programmers. +Fiona is OGR's neat and nimble API for Python programmers. -.. image:: https://travis-ci.org/Toblerity/Fiona.png?branch=master +.. image:: https://travis-ci.org/Toblerity/Fiona.png?branch=master :target: https://travis-ci.org/Toblerity/Fiona +.. image:: https://ci.appveyor.com/api/projects/status/github/Toblerity/Fiona?svg=true + :target: https://ci.appveyor.com/project/sgillies/fiona/branch/master + .. image:: https://coveralls.io/repos/Toblerity/Fiona/badge.png :target: https://coveralls.io/r/Toblerity/Fiona @@ -16,11 +19,12 @@ classes specific to OGR. Fiona can read and write real-world data using multi-layered GIS formats and zipped virtual file systems and integrates readily with other Python GIS packages such as pyproj_, Rtree_, and Shapely_. +Fiona is supported only on CPython versions 2.7 and 3.4+. For more details, see: * Fiona `home page `__ -* Fiona `docs and manual `__ +* Fiona `docs and manual `__ * Fiona `examples `__ Usage @@ -39,56 +43,43 @@ .. code-block:: python import fiona - - # Register format drivers with a context manager - - with fiona.drivers(): - - # Open a file for reading. We'll call this the "source." - - with fiona.open('tests/data/coutwildrnp.shp') as source: - - # The file we'll write to, the "sink", must be initialized - # with a coordinate system, a format driver name, and - # a record schema. We can get initial values from the open - # collection's ``meta`` property and then modify them as - # desired. - - meta = source.meta - meta['schema']['geometry'] = 'Point' - - # Open an output file, using the same format driver and - # coordinate reference system as the source. The ``meta`` - # mapping fills in the keyword parameters of fiona.open(). - - with fiona.open('test_write.shp', 'w', **meta) as sink: - - # Process only the records intersecting a box. - for f in source.filter(bbox=(-107.0, 37.0, -105.0, 39.0)): - - # Get a point on the boundary of the record's - # geometry. - - f['geometry'] = { - 'type': 'Point', - 'coordinates': f['geometry']['coordinates'][0][0]} - - # Write the record out. - - sink.write(f) - - # The sink's contents are flushed to disk and the file is - # closed when its ``with`` block ends. This effectively - # executes ``sink.flush(); sink.close()``. - - # At the end of the ``with fiona.drivers()`` block, context - # manager exits and all drivers are de-registered. - -The fiona.drivers() function and context manager are new in 1.1. The -example above shows the way to use it to register and de-register -drivers in a deterministic and efficient way. Code written for Fiona 1.0 -will continue to work: opened collections may manage the global driver -registry if no other manager is present. + + # Open a file for reading. We'll call this the "source." + + with fiona.open('tests/data/coutwildrnp.shp') as src: + + # The file we'll write to, the "destination", must be initialized + # with a coordinate system, a format driver name, and + # a record schema. We can get initial values from the open + # collection's ``meta`` property and then modify them as + # desired. + + meta = src.meta + meta['schema']['geometry'] = 'Point' + + # Open an output file, using the same format driver and + # coordinate reference system as the source. The ``meta`` + # mapping fills in the keyword parameters of fiona.open(). + + with fiona.open('test_write.shp', 'w', **meta) as dst: + + # Process only the records intersecting a box. + for f in src.filter(bbox=(-107.0, 37.0, -105.0, 39.0)): + + # Get a point on the boundary of the record's + # geometry. + + f['geometry'] = { + 'type': 'Point', + 'coordinates': f['geometry']['coordinates'][0][0]} + + # Write the record out. + + dst.write(f) + + # The destination's contents are flushed to disk and the file is + # closed when its ``with`` block ends. This effectively + # executes ``dst.flush(); dst.close()``. Reading Multilayer data ----------------------- @@ -100,12 +91,10 @@ .. code-block:: python - with fiona.drivers(): + for layername in fiona.listlayers('tests/data'): + with fiona.open('tests/data', layer=layername) as src: + print(layername, len(src)) - for layername in fiona.listlayers('tests/data'): - with fiona.open('tests/data', layer=layername) as src: - print(layername, len(src)) - # Output: # (u'coutwildrnp', 67) @@ -114,12 +103,10 @@ .. code-block:: python - with fiona.drivers(): + for i, layername in enumerate(fiona.listlayers('tests/data')): + with fiona.open('tests/data', layer=i) as src: + print(i, layername, len(src)) - for i, layername in enumerate(fiona.listlayers('tests/data')): - with fiona.open('tests/data', layer=i) as src: - print(i, layername, len(src)) - # Output: # (0, u'coutwildrnp', 67) @@ -130,24 +117,22 @@ writing. .. code-block:: python - - with fiona.drivers(): - with open('tests/data/cowildrnp.shp') as src: - meta = src.meta - f = next(src) - - with fiona.open('/tmp/foo', 'w', layer='bar', **meta) as dst: - dst.write(f) - - print(fiona.listlayers('/tmp/foo')) - - with fiona.open('/tmp/foo', layer='bar') as src: - print(len(src)) - f = next(src) - print(f['geometry']['type']) - print(f['properties']) - + with open('tests/data/cowildrnp.shp') as src: + meta = src.meta + f = next(src) + + with fiona.open('/tmp/foo', 'w', layer='bar', **meta) as dst: + dst.write(f) + + print(fiona.listlayers('/tmp/foo')) + + with fiona.open('/tmp/foo', layer='bar') as src: + print(len(src)) + f = next(src) + print(f['geometry']['type']) + print(f['properties']) + # Output: # [u'bar'] # 1 @@ -170,21 +155,25 @@ .. code-block:: python - with fiona.drivers(): + for i, layername in enumerate( + fiona.listlayers('zip://tests/data/coutwildrnp.zip'): + with fiona.open('zip://tests/data/coutwildrnp.zip', layer=i) as src: + print(i, layername, len(src)) - for i, layername in enumerate( - fiona.listlayers( - '/', - vfs='zip://tests/data/coutwildrnp.zip')): - with fiona.open( - '/', - vfs='zip://tests/data/coutwildrnp.zip', - layer=i) as src: - print(i, layername, len(src)) - # Output: # (0, u'coutwildrnp', 67) +Fiona can also read from more exotic file systems. For instance, a +zipped shape file in S3 can be accessed like so: + +.. code-block:: python + + with fiona.open('zip+s3://mapbox/rasterio/coutwildrnp.zip') as src: + print(len(src)) + + # Output: + # 67 + Fiona CLI ========= @@ -225,7 +214,7 @@ Installation ============ -Fiona requires Python 2.6, 2.7, 3.3, or 3.4 and GDAL/OGR 1.8+. To build from +Fiona requires Python 2.7 or 3.4+ and GDAL/OGR 1.8+. To build from a source distribution you will need a C compiler and GDAL and Python development headers and libraries (libgdal1-dev for Debian/Ubuntu, gdal-dev for CentOS/Fedora). @@ -242,7 +231,7 @@ Python Requirements ------------------- -Fiona depends on the modules ``six``, ``cligj``, ``munch``, ``argparse``, and +Fiona depends on the modules ``enum34``, ``six``, ``cligj``, ``munch``, ``argparse``, and ``ordereddict`` (the two latter modules are standard in Python 2.7+). Pip will fetch these requirements for you, but users installing Fiona from a Windows installer must get them separately. @@ -265,14 +254,15 @@ If gdal-config is not available or if GDAL/OGR headers and libs aren't installed to a well known location, you must set include dirs, library dirs, and libraries options via the setup.cfg file or setup command line as shown -below (using ``git``). You must also specify the major version of the GDAL API -(1 or 2) on the setup command line. +below (using ``git``). You must also specify the version of the GDAL API on the +command line using the ``--gdalversion`` argument (see example below) or with +the ``GDAL_VERSION`` environment variable (e.g. ``export GDAL_VERSION=2.1``). .. code-block:: console (fiona_env)$ git clone git://github.com/Toblerity/Fiona.git (fiona_env)$ cd Fiona - (fiona_env)$ python setup.py build_ext -I/path/to/gdal/include -L/path/to/gdal/lib -lgdal install --gdalversion 1 + (fiona_env)$ python setup.py build_ext -I/path/to/gdal/include -L/path/to/gdal/lib -lgdal install --gdalversion 2.1 Or specify that build options and GDAL API version should be provided by a particular gdal-config program. @@ -292,24 +282,32 @@ the compiled libraries and headers (include files). When building from source on Windows, it is important to know that setup.py -cannot rely on gdal-config, which is only present on UNIX systems, to discover -the locations of header files and libraries that Fiona needs to compile its -C extensions. On Windows, these paths need to be provided by the user. -You will need to find the include files and the library files for gdal and -use setup.py as follows. You must also specify the major version of the GDAL -API (1 or 2) on the setup command line. +cannot rely on gdal-config, which is only present on UNIX systems, to discover +the locations of header files and libraries that Fiona needs to compile its +C extensions. On Windows, these paths need to be provided by the user. +You will need to find the include files and the library files for gdal and +use setup.py as follows. You must also specify the version of the GDAL API on the +command line using the ``--gdalversion`` argument (see example below) or with +the ``GDAL_VERSION`` environment variable (e.g. ``set GDAL_VERSION=2.1``). .. code-block:: console - $ python setup.py build_ext -I -lgdal_i -L install --gdalversion 1 + $ python setup.py build_ext -I -lgdal_i -L install --gdalversion 2.1 + +Note: The GDAL DLL (``gdal111.dll`` or similar) and gdal-data directory need to +be in your Windows PATH otherwise Fiona will fail to work. -Note: The GDAL dll (gdal111.dll) and gdal-data directory need to be in your -Windows PATH otherwise Fiona will fail to work. +The [Appveyor CI build](https://ci.appveyor.com/project/sgillies/fiona/history) +uses the GISInternals GDAL binaries to build Fiona. This produces a binary wheel +for successful builds, which includes GDAL and other dependencies, for users +wanting to try an unstable development version. +The [Appveyor configuration file](appveyor.yml) may be a useful example for +users building from source on Windows. Development and testing ======================= -Building from the source requires Cython. Tests require Nose. If the GDAL/OGR +Building from the source requires Cython. Tests require `pytest `_. If the GDAL/OGR libraries, headers, and `gdal-config`_ program are installed to well known locations on your system (via your system's package manager), you can do this:: @@ -317,7 +315,7 @@ (fiona_env)$ cd Fiona (fiona_env)$ pip install cython (fiona_env)$ pip install -e .[test] - (fiona_env)$ nosetests + (fiona_env)$ py.test Or you can use the ``pep-518-install`` script:: @@ -329,7 +327,7 @@ lib dirs and GDAL library on the command line:: (fiona_env)$ python setup.py build_ext -I/path/to/gdal/include -L/path/to/gdal/lib -lgdal --gdalversion 2 develop - (fiona_env)$ nosetests + (fiona_env)$ py.test .. _OGR: http://www.gdal.org/ogr .. _pyproj: http://pypi.python.org/pypi/pyproj/ diff -Nru fiona-1.7.10/readthedocs.yml fiona-1.8.6/readthedocs.yml --- fiona-1.7.10/readthedocs.yml 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/readthedocs.yml 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,5 @@ +python: + version: 3 + pip_install: true +conda: + file: environment.yml diff -Nru fiona-1.7.10/requirements-dev.txt fiona-1.8.6/requirements-dev.txt --- fiona-1.7.10/requirements-dev.txt 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/requirements-dev.txt 2019-03-19 04:25:07.000000000 +0000 @@ -1,7 +1,8 @@ -r requirements.txt -coverage -cython>=0.21.2 -nose -pytest -pytest-cov -setuptools +coverage==4.5.1 +cython==0.28.3 +pytest==3.4.2 +pytest-cov==2.5.1 +setuptools==39.0.1 +boto3==1.9.19 +wheel==0.31.1 diff -Nru fiona-1.7.10/requirements.txt fiona-1.8.6/requirements.txt --- fiona-1.7.10/requirements.txt 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/requirements.txt 2019-03-19 04:25:07.000000000 +0000 @@ -1,5 +1,8 @@ -argparse -cligj -six -ordereddict -munch +argparse==1.4.0 +attrs==18.2.0 +click-plugins==1.0.4 +cligj==0.5.0 +enum34==1.1.6 ; python_version < '3.4' +munch==2.3.2 +ordereddict==1.1 ; python_version <= '2.7' +six==1.11.0 diff -Nru fiona-1.7.10/scripts/travis_gdal_install.sh fiona-1.8.6/scripts/travis_gdal_install.sh --- fiona-1.7.10/scripts/travis_gdal_install.sh 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/scripts/travis_gdal_install.sh 2019-03-19 04:25:07.000000000 +0000 @@ -5,7 +5,6 @@ --with-geos \ --with-expat \ --without-libtool \ - --with-libz=internal \ --with-libtiff=internal \ --with-geotiff=internal \ --without-gif \ @@ -33,8 +32,8 @@ --without-ingres \ --without-xerces \ --without-odbc \ - --without-curl \ - --without-sqlite3 \ + --with-curl \ + --with-sqlite3 \ --without-dwgdirect \ --without-idb \ --without-sde \ @@ -54,25 +53,21 @@ ls -l $GDALINST -if [ "$GDALVERSION" = "1.9.2" -a ! -d "$GDALINST/gdal-$GDALVERSION" ]; then - cd $GDALBUILD - wget http://download.osgeo.org/gdal/gdal-$GDALVERSION.tar.gz - tar -xzf gdal-$GDALVERSION.tar.gz - cd gdal-$GDALVERSION +if [ "$GDALVERSION" = "trunk" ]; then + # always rebuild trunk + git clone -b master --single-branch --depth=1 https://github.com/OSGeo/gdal.git $GDALBUILD/trunk + cd $GDALBUILD/trunk/gdal ./configure --prefix=$GDALINST/gdal-$GDALVERSION $GDALOPTS - make -s -j 2 + make -j 2 make install -fi - - -# download and compile gdal version -if [ "$GDALVERSION" != "1.9.2" -a ! -d "$GDALINST/gdal-$GDALVERSION" ]; then +elif [ ! -d "$GDALINST/gdal-$GDALVERSION" ]; then + # only build if not already installed cd $GDALBUILD wget http://download.osgeo.org/gdal/$GDALVERSION/gdal-$GDALVERSION.tar.gz tar -xzf gdal-$GDALVERSION.tar.gz cd gdal-$GDALVERSION ./configure --prefix=$GDALINST/gdal-$GDALVERSION $GDALOPTS - make -s -j 2 + make -j 2 make install fi diff -Nru fiona-1.7.10/setup.cfg fiona-1.8.6/setup.cfg --- fiona-1.7.10/setup.cfg 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/setup.cfg 2019-03-19 04:25:07.000000000 +0000 @@ -1,4 +1,2 @@ -[nosetests] -tests=tests -nocapture=1 -verbosity=3 +[tool:pytest] +testpaths = tests diff -Nru fiona-1.7.10/setup.py fiona-1.8.6/setup.py --- fiona-1.7.10/setup.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/setup.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,6 +1,6 @@ from distutils.command.sdist import sdist from distutils import log -import logging +import itertools as it import os import shutil import subprocess @@ -38,6 +38,7 @@ pass shutil.copytree(datadir, destdir) + # Parse the version from the fiona module. with open('fiona/__init__.py', 'r') as f: for line in f: @@ -70,17 +71,19 @@ # Extend distutil's sdist command to generate C extension sources from -# both `ogrext`.pyx` and `ogrext2.pyx` for GDAL 1.x and 2.x. +# the _shim extension modules for GDAL 1.x and 2.x. class sdist_multi_gdal(sdist): def run(self): - shutil.copy('fiona/ogrext1.pyx', 'fiona/ogrext.pyx') - _ = check_output(['cython', '-v', '-f', 'fiona/ogrext.pyx', - '-o', 'fiona/ogrext1.c']) - print(_) - shutil.copy('fiona/ogrext2.pyx', 'fiona/ogrext.pyx') - _ = check_output(['cython', '-v', '-f', 'fiona/ogrext.pyx', - '-o', 'fiona/ogrext2.c']) - print(_) + sources = { + "_shim1": "_shim", + "_shim2": "_shim", + "_shim22": "_shim" + } + for src_a, src_b in sources.items(): + shutil.copy('fiona/{}.pyx'.format(src_a), 'fiona/{}.pyx'.format(src_b)) + _ = check_output(['cython', '-v', '-f', 'fiona/{}.pyx'.format(src_b), + '-o', 'fiona/{}.c'.format(src_a)]) + print(_) sdist.run(self) # Building Fiona requires options that can be obtained from GDAL's gdal-config @@ -94,12 +97,14 @@ # 1. By the gdal-config program, optionally pointed to by GDAL_CONFIG # 2. By a GDAL_VERSION environment variable. This overrides number 1. + include_dirs = [] library_dirs = [] libraries = [] extra_link_args = [] gdal_output = [None for i in range(4)] -gdalversion = '2' +gdalversion = None +language = None if 'clean' not in sys.argv: try: @@ -168,12 +173,33 @@ log.info("Copying proj data from %s" % projdatadir) copy_data_tree(projdatadir, 'fiona/proj_data') + if "--cython-language" in sys.argv: + index = sys.argv.index("--cython-language") + sys.argv.pop(index) + language = sys.argv.pop(index).lower() + + gdal_version_parts = gdalversion.split('.') + gdal_major_version = int(gdal_version_parts[0]) + gdal_minor_version = int(gdal_version_parts[1]) + ext_options = dict( include_dirs=include_dirs, library_dirs=library_dirs, libraries=libraries, extra_link_args=extra_link_args) +# GDAL 2.3+ requires C++11 + +if language == "c++": + ext_options["language"] = "c++" + if sys.platform != "win32": + ext_options["extra_compile_args"] = ["-std=c++11"] + +ext_options_cpp = ext_options.copy() +if sys.platform != "win32": + ext_options_cpp["extra_compile_args"] = ["-std=c++11"] + + # Define the extension modules. ext_modules = [] @@ -186,42 +212,65 @@ sys.exit(1) if gdalversion.startswith("1"): - log.info("Building Fiona for gdal 1.x: {0}".format(gdalversion)) - shutil.copy('fiona/ogrext1.pyx', 'fiona/ogrext.pyx') - else: - log.info("Building Fiona for gdal 2.x: {0}".format(gdalversion)) - shutil.copy('fiona/ogrext2.pyx', 'fiona/ogrext.pyx') + shutil.copy('fiona/_shim1.pyx', 'fiona/_shim.pyx') + shutil.copy('fiona/_shim1.pxd', 'fiona/_shim.pxd') + elif gdal_major_version == 2: + if gdal_minor_version >= 2: + log.info("Building Fiona for gdal 2.2+: {0}".format(gdalversion)) + shutil.copy('fiona/_shim22.pyx', 'fiona/_shim.pyx') + shutil.copy('fiona/_shim22.pxd', 'fiona/_shim.pxd') + else: + log.info("Building Fiona for gdal 2.0.x-2.1.x: {0}".format(gdalversion)) + shutil.copy('fiona/_shim2.pyx', 'fiona/_shim.pyx') + shutil.copy('fiona/_shim2.pxd', 'fiona/_shim.pxd') ext_modules = cythonize([ Extension('fiona._geometry', ['fiona/_geometry.pyx'], **ext_options), - Extension('fiona._transform', ['fiona/_transform.pyx'], **ext_options), + Extension('fiona.schema', ['fiona/schema.pyx'], **ext_options), + Extension('fiona._transform', ['fiona/_transform.pyx'], **ext_options_cpp), Extension('fiona._crs', ['fiona/_crs.pyx'], **ext_options), + Extension('fiona._env', ['fiona/_env.pyx'], **ext_options), Extension('fiona._drivers', ['fiona/_drivers.pyx'], **ext_options), Extension('fiona._err', ['fiona/_err.pyx'], **ext_options), - Extension('fiona.ogrext', ['fiona/ogrext.pyx'], **ext_options)]) + Extension('fiona._shim', ['fiona/_shim.pyx'], **ext_options), + Extension('fiona.ogrext', ['fiona/ogrext.pyx'], **ext_options) + ], + compiler_directives={"language_level": "3"} + ) # If there's no manifest template, as in an sdist, we just specify .c files. elif "clean" not in sys.argv: ext_modules = [ - Extension('fiona._transform', ['fiona/_transform.cpp'], **ext_options), + Extension('fiona.schema', ['fiona/schema.c'], **ext_options), + Extension('fiona._transform', ['fiona/_transform.cpp'], **ext_options_cpp), Extension('fiona._geometry', ['fiona/_geometry.c'], **ext_options), Extension('fiona._crs', ['fiona/_crs.c'], **ext_options), + Extension('fiona._env', ['fiona/_env.c'], **ext_options), Extension('fiona._drivers', ['fiona/_drivers.c'], **ext_options), - Extension('fiona._err', ['fiona/_err.c'], **ext_options)] + Extension('fiona._err', ['fiona/_err.c'], **ext_options), + Extension('fiona.ogrext', ['fiona/ogrext.c'], **ext_options), + ] - if gdalversion.startswith("1"): + if gdal_major_version == 1: log.info("Building Fiona for gdal 1.x: {0}".format(gdalversion)) ext_modules.append( - Extension('fiona.ogrext', ['fiona/ogrext1.c'], **ext_options)) + Extension('fiona._shim', ['fiona/_shim1.c'], **ext_options)) else: - log.info("Building Fiona for gdal 2.x: {0}".format(gdalversion)) - ext_modules.append( - Extension('fiona.ogrext', ['fiona/ogrext2.c'], **ext_options)) + if gdal_minor_version >= 2: + log.info("Building Fiona for gdal 2.2+: {0}".format(gdalversion)) + ext_modules.append( + Extension('fiona._shim', ['fiona/_shim22.c'], **ext_options)) + else: + log.info("Building Fiona for gdal 2.0.x-2.1.x: {0}".format(gdalversion)) + ext_modules.append( + Extension('fiona._shim', ['fiona/_shim2.c'], **ext_options)) requirements = [ - 'cligj', - 'click-plugins', - 'six', + 'attrs>=17', + 'click>=4.0,<8', + 'cligj>=0.5', + 'click-plugins>=1.0', + 'six>=1.7', 'munch'] if sys.version_info < (2, 7): @@ -231,6 +280,15 @@ if sys.version_info < (3, 4): requirements.append('enum34') +extras_require = { + 'calc': ['shapely'], + 's3': ['boto3>=1.2.4'], + 'test': ['pytest>=3', 'pytest-cov', 'boto3>=1.2.4', 'mock; python_version<"3.4"'] +} + +extras_require['all'] = list(set(it.chain(*extras_require.values()))) + + setup_args = dict( cmdclass={'sdist': sdist_multi_gdal}, metadata_version='1.2', @@ -266,13 +324,10 @@ insp=fiona.fio.insp:insp load=fiona.fio.load:load ls=fiona.fio.ls:ls + rm=fiona.fio.rm:rm ''', install_requires=requirements, - extras_require={ - 'calc': ['shapely'], - 'test': ['nose']}, - tests_require=['nose'], - test_suite='nose.collector', + extras_require=extras_require, ext_modules=ext_modules, classifiers=[ 'Development Status :: 5 - Production/Stable', @@ -285,6 +340,6 @@ 'Topic :: Scientific/Engineering :: GIS']) if os.environ.get('PACKAGE_DATA'): - setup_args['package_data'] = {'fiona': ['gdal_data/*', 'proj_data/*']} + setup_args['package_data'] = {'fiona': ['gdal_data/*', 'proj_data/*', '.libs/*', '.libs/licenses/*']} setup(**setup_args) diff -Nru fiona-1.7.10/tests/conftest.py fiona-1.8.6/tests/conftest.py --- fiona-1.7.10/tests/conftest.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/conftest.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,277 @@ +"""pytest fixtures and automatic test data generation.""" + +import copy +import json +import os.path +import shutil +import tarfile +import zipfile + +from click.testing import CliRunner +import pytest + +import fiona +from fiona.env import GDALVersion + + +def pytest_report_header(config): + headers = [] + # gdal version number + gdal_release_name = fiona.get_gdal_release_name() + headers.append('GDAL: {} ({})'.format(gdal_release_name, fiona.get_gdal_version_num())) + supported_drivers = ", ".join(sorted(list(fiona.drvsupport.supported_drivers.keys()))) + # supported drivers + headers.append("Supported drivers: {}".format(supported_drivers)) + return '\n'.join(headers) + + +_COUTWILDRNP_FILES = [ + 'coutwildrnp.shp', 'coutwildrnp.shx', 'coutwildrnp.dbf', 'coutwildrnp.prj'] + + +def _read_file(name): + with open(os.path.join(os.path.dirname(__file__), name)) as f: + return f.read() + + +has_gpkg = "GPKG" in fiona.drvsupport.supported_drivers.keys() +has_gpkg_reason = "Requires geopackage driver" +requires_gpkg = pytest.mark.skipif(not has_gpkg, reason=has_gpkg_reason) + + +@pytest.fixture(scope='function') +def gdalenv(request): + import fiona.env + + def fin(): + if fiona.env.local._env: + fiona.env.delenv() + fiona.env.local._env = None + request.addfinalizer(fin) + + +@pytest.fixture(scope='session') +def data_dir(): + """Absolute file path to the directory containing test datasets.""" + return os.path.abspath(os.path.join(os.path.dirname(__file__), 'data')) + + +@pytest.fixture(scope='function') +def data(tmpdir, data_dir): + """A temporary directory containing a copy of the files in data.""" + for filename in _COUTWILDRNP_FILES: + shutil.copy(os.path.join(data_dir, filename), str(tmpdir)) + return tmpdir + + +@pytest.fixture(scope='session') +def path_curves_line_csv(data_dir): + """Path to ```curves_line.csv``""" + return os.path.join(data_dir, 'curves_line.csv') + + +@pytest.fixture(scope='session') +def path_test_tin_shp(data_dir): + """Path to ```test_tin.shp``""" + return os.path.join(data_dir, 'test_tin.shp') + +@pytest.fixture(scope='session') +def path_test_tin_csv(data_dir): + """Path to ```test_tin.csv``""" + return os.path.join(data_dir, 'test_tin.csv') + +@pytest.fixture(scope='session') +def path_coutwildrnp_shp(data_dir): + """Path to ```coutwildrnp.shp``""" + return os.path.join(data_dir, 'coutwildrnp.shp') + + +@pytest.fixture(scope='session') +def path_coutwildrnp_zip(data_dir): + """Creates ``coutwildrnp.zip`` if it does not exist and returns the absolute + file path.""" + path = os.path.join(data_dir, 'coutwildrnp.zip') + if not os.path.exists(path): + with zipfile.ZipFile(path, 'w') as zip: + for filename in _COUTWILDRNP_FILES: + zip.write(os.path.join(data_dir, filename), filename) + return path + + +@pytest.fixture(scope='session') +def path_grenada_geojson(data_dir): + """Path to ```grenada.geojson```""" + return os.path.join(data_dir, 'grenada.geojson') + + +@pytest.fixture(scope='session') +def bytes_coutwildrnp_zip(path_coutwildrnp_zip): + """The zip file's bytes""" + with open(path_coutwildrnp_zip, 'rb') as src: + return src.read() + + +@pytest.fixture(scope='session') +def path_coutwildrnp_tar(data_dir): + """Creates ``coutwildrnp.tar`` if it does not exist and returns the absolute + file path.""" + path = os.path.join(data_dir, 'coutwildrnp.tar') + if not os.path.exists(path): + with tarfile.open(path, 'w') as tar: + for filename in _COUTWILDRNP_FILES: + tar.add( + os.path.join(data_dir, filename), + arcname=os.path.join('testing', filename)) + return path + + +@pytest.fixture(scope='session') +def path_coutwildrnp_json(data_dir): + """Creates ``coutwildrnp.json`` if it does not exist and returns the absolute + file path.""" + path = os.path.join(data_dir, 'coutwildrnp.json') + if not os.path.exists(path): + name = _COUTWILDRNP_FILES[0] + with fiona.open(os.path.join(data_dir, name), 'r') as source: + features = [feat for feat in source] + my_layer = { + 'type': 'FeatureCollection', + 'features': features} + with open(path, 'w') as f: + f.write(json.dumps(my_layer)) + return path + + +@pytest.fixture(scope='session') +def bytes_grenada_geojson(path_grenada_geojson): + """The geojson as bytes.""" + with open(path_grenada_geojson, 'rb') as src: + return src.read() + + +@pytest.fixture(scope='session') +def path_coutwildrnp_gpkg(data_dir): + """Creates ``coutwildrnp.gpkg`` if it does not exist and returns the absolute + file path.""" + if not has_gpkg: + raise RuntimeError("GDAL has not been compiled with GPKG support") + path = os.path.join(data_dir, 'coutwildrnp.gpkg') + if not os.path.exists(path): + filename_shp = _COUTWILDRNP_FILES[0] + path_shp = os.path.join(data_dir, filename_shp) + with fiona.open(path_shp, "r") as src: + meta = copy.deepcopy(src.meta) + meta["driver"] = "GPKG" + with fiona.open(path, "w", **meta) as dst: + dst.writerecords(src) + return path + + +@pytest.fixture(scope='session') +def path_gpx(data_dir): + return os.path.join(data_dir, 'test_gpx.gpx') + + +@pytest.fixture(scope='session') +def feature_collection(): + """GeoJSON feature collection on a single line.""" + return _read_file(os.path.join('data', 'collection.txt')) + + +@pytest.fixture(scope='session') +def feature_collection_pp(): + """Same as above but with pretty-print styling applied.""" + return _read_file(os.path.join('data', 'collection-pp.txt')) + + +@pytest.fixture(scope='session') +def feature_seq(): + """One feature per line.""" + return _read_file(os.path.join('data', 'sequence.txt')) + + +@pytest.fixture(scope='session') +def feature_seq_pp_rs(): + """Same as above but each feature has pretty-print styling""" + return _read_file(os.path.join('data', 'sequence-pp.txt')) + + +@pytest.fixture(scope='session') +def runner(): + """Returns a ```click.testing.CliRunner()`` instance.""" + return CliRunner() + + +@pytest.fixture(scope='class') +def uttc_path_coutwildrnp_zip(path_coutwildrnp_zip, request): + """Make the ``path_coutwildrnp_zip`` fixture work with a + ``unittest.TestCase()``. ``uttc`` stands for unittest test case.""" + request.cls.path_coutwildrnp_zip = path_coutwildrnp_zip + + +@pytest.fixture(scope='class') +def uttc_path_coutwildrnp_tar(path_coutwildrnp_tar, request): + """Make the ``path_coutwildrnp_tar`` fixture work with a + ``unittest.TestCase()``. ``uttc`` stands for unittest test case.""" + request.cls.path_coutwildrnp_tar = path_coutwildrnp_tar + + +@pytest.fixture(scope='class') +def uttc_path_coutwildrnp_json(path_coutwildrnp_json, request): + """Make the ``path_coutwildrnp_json`` fixture work with a + ``unittest.TestCase()``. ``uttc`` stands for unittest test case.""" + request.cls.path_coutwildrnp_json = path_coutwildrnp_json + + +@pytest.fixture(scope='class') +def uttc_data_dir(data_dir, request): + """Make the ``data_dir`` fixture work with a ``unittest.TestCase()``. + ``uttc`` stands for unittest test case.""" + request.cls.data_dir = data_dir + + +@pytest.fixture(scope='class') +def uttc_path_gpx(path_gpx, request): + """Make the ``path_gpx`` fixture work with a ``unittest.TestCase()``. + ``uttc`` stands for unittest test case.""" + request.cls.path_gpx = path_gpx + + +# GDAL 2.3.x silently converts ESRI WKT to OGC WKT +# The regular expression below will match against either +WGS84PATTERN = 'GEOGCS\["(?:GCS_WGS_1984|WGS 84)",DATUM\["WGS_1984",SPHEROID\["WGS[_ ]84"' + +# Define helpers to skip tests based on GDAL version +gdal_version = GDALVersion.runtime() + +requires_only_gdal1 = pytest.mark.skipif( + gdal_version.major != 1, + reason="Only relevant for GDAL 1.x") + +requires_gdal2 = pytest.mark.skipif( + not gdal_version.major >= 2, + reason="Requires GDAL 2.x") + +requires_gdal21 = pytest.mark.skipif( + not gdal_version.at_least('2.1'), + reason="Requires GDAL 2.1.x") + +requires_gdal22 = pytest.mark.skipif( + not gdal_version.at_least('2.2'), + reason="Requires GDAL 2.2.x") + +requires_gdal24 = pytest.mark.skipif( + not gdal_version.at_least('2.4'), + reason="Requires GDAL 2.4.x") + + +@pytest.fixture(scope="class") +def unittest_data_dir(data_dir, request): + """Makes data_dir available to unittest tests""" + request.cls.data_dir = data_dir + + +@pytest.fixture(scope="class") +def unittest_path_coutwildrnp_shp(path_coutwildrnp_shp, request): + """Makes shapefile path available to unittest tests""" + request.cls.path_coutwildrnp_shp = path_coutwildrnp_shp diff -Nru fiona-1.7.10/tests/data/coutwildrnp.cpg fiona-1.8.6/tests/data/coutwildrnp.cpg --- fiona-1.7.10/tests/data/coutwildrnp.cpg 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/data/coutwildrnp.cpg 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1 @@ +ISO-8859-1 diff -Nru fiona-1.7.10/tests/data/curves_line.csv fiona-1.8.6/tests/data/curves_line.csv --- fiona-1.7.10/tests/data/curves_line.csv 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/data/curves_line.csv 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,10 @@ +WKT,SHAPE_Length +"MULTILINESTRING ((-1.02439 48.4878,2.471545 48.45528))",3.49608621305261 +"MULTICURVE (COMPOUNDCURVE ((-0.9105691 47.21951,1.414634 47.17073),CIRCULARSTRING (1.414634 47.17073,2.423818 47.48377,1.407531 46.72668),(1.407531 46.72668,-0.9243407 46.72668)))",8.39459167219456 +"MULTICURVE (COMPOUNDCURVE (CIRCULARSTRING (-0.3902439 46.42109,0.2422325 45.78862,-0.3902439 45.15614,-1.02272 45.78862,-0.3902439 46.42109)))",3.97396663612273 +"MULTILINESTRING ((2.404137 38.88428,2.475991 38.93491,2.54878 38.98351,2.622149 39.02986,2.69574 39.07372,2.769195 39.11488,2.842157 39.15314,2.914269 39.18832,2.98518 39.22023,3.054546 39.24874,3.122027 39.2737,3.187295 39.29498,3.250033 39.31248,3.309934 39.32613,3.366707 39.33584,3.420075 39.34158,3.469778 39.34331,3.515575 39.34104,3.557241 39.33476,3.594574 39.32451,3.627391 39.31034,3.655534 39.29232,3.678865 39.27053,3.697271 39.24509,3.710661 39.21612,3.71897 39.18375,3.722158 39.14814,3.72021 39.10948,3.713135 39.06794,3.700967 39.02373,3.683765 38.97707,3.661614 38.92818,3.634622 38.87729,3.602919 38.82467,3.566661 38.77056,3.526023 38.71523,3.481205 38.65895,3.432424 38.60199,3.379918 38.54462,3.323942 38.48714,3.26477 38.42982,3.20269 38.37293,3.138004 38.31676,3.071028 38.26158,3.002087 38.20766,2.931517 38.15525,2.859663 38.10463,2.786874 38.05602,2.713506 38.00968,2.639914 37.96582,2.566459 37.92466,2.493498 37.88639,2.421386 37.85122,2.350474 37.8193,2.281109 37.79079,2.213628 37.76584,2.148359 37.74456,2.085621 37.72705,2.02572 37.71341,1.968947 37.70369,1.915579 37.69795,1.865876 37.69622,1.82008 37.6985,1.778414 37.70477,1.741081 37.71502,1.708263 37.72919,1.68012 37.74721,1.656789 37.769,1.638384 37.79444,1.624994 37.82342,1.616684 37.85579,1.613496 37.89139,1.615444 37.93006,1.62252 37.97159,1.634688 38.0158,1.651889 38.06247,1.67404 38.11136,1.701033 38.16224,1.732735 38.21486,1.768994 38.26897,1.809631 38.3243,1.854449 38.38059,1.90323 38.43755,1.955737 38.49491,2.011712 38.55239,2.070884 38.60972,2.132964 38.6666,2.19765 38.72277,2.264627 38.77795,2.333568 38.83188,2.404137 38.88428))",5.67762431364471 +"MULTILINESTRING ((-0.6666667 44.03252,-0.6056813 44.10943,-0.5428571 44.18257,-0.4782797 44.25197,-0.4120346 44.31763,-0.3442073 44.37959,-0.2748833 44.43785,-0.2041482 44.49243,-0.1320875 44.54336,-0.0587867 44.59064,0.0156686 44.63431,0.091193 44.67437,0.167701 44.71084,0.2451069 44.74375,0.3233253 44.77311,0.4022706 44.79894,0.4818574 44.82125,0.5620001 44.84007,0.6426132 44.85542,0.7236111 44.8673,0.8049083 44.87574,0.8864194 44.88076,0.9680587 44.88238,1.049741 44.88061,1.13138 44.87547,1.212891 44.86698,1.294188 44.85516,1.375186 44.84003,1.455799 44.8216,1.535942 44.79989,1.615529 44.77492,1.694474 44.74671,1.772693 44.71528,1.850099 44.68064,1.926607 44.64281,2.002131 44.60182,2.076586 44.55767,2.149887 44.51039,2.221948 44.46,2.292683 44.4065))",3.34511332340398 +"MULTICURVE (COMPOUNDCURVE ((-1.300813 42.89431,0.3902439 43.31707),CIRCULARSTRING (0.3902439 43.31707,1.4163 43.74383,2.455285 43.34959),(2.455285 43.34959,2.455121 43.34941,2.454636 43.34885,2.453842 43.34794,2.452751 43.34666,2.451373 43.34503,2.44972 43.34305,2.447803 43.34073,2.445634 43.33807,2.443223 43.33507,2.440583 43.33175,2.437724 43.3281,2.434658 43.32413,2.431396 43.31985,2.42795 43.31526,2.42433 43.31037,2.420549 43.30517,2.416617 43.29968,2.412546 43.2939,2.408347 43.28784,2.404032 43.28149,2.399611 43.27487,2.395096 43.26799,2.390499 43.26083,2.385831 43.25342,2.381103 43.24575,2.376327 43.23783,2.371513 43.22966,2.366673 43.22126,2.361819 43.21262,2.356962 43.20374,2.352112 43.19465,2.347283 43.18533,2.342484 43.17579,2.337727 43.16605,2.333023 43.1561,2.328385 43.14594,2.323822 43.13559,2.319347 43.12505,2.314971 43.11433,2.310704 43.10342,2.30656 43.09233,2.302547 43.08108,2.29868 43.06965,2.294967 43.05807,2.291421 43.04633,2.288054 43.03443,2.284875 43.02239,2.281898 43.01021,2.279132 42.99789,2.27659 42.98543,2.274283 42.97285,2.272222 42.96015,2.270418 42.94733,2.268883 42.9344,2.267628 42.92136,2.266664 42.90821,2.266003 42.89497,2.265657 42.88164,2.265635 42.86821,2.265951 42.85471,2.266614 42.84112,2.267637 42.82747,2.26903 42.81374,2.270806 42.79995,2.272975 42.7861,2.275548 42.7722,2.278538 42.75825,2.281955 42.74426,2.28581 42.73022,2.290116 42.71616,2.294883 42.70206,2.300123 42.68794,2.305847 42.6738,2.312066 42.65965,2.318792 42.64548,2.326036 42.63132,2.333809 42.61715,2.342123 42.60299,2.350989 42.58883,2.360418 42.5747,2.370422 42.56058,2.381012 42.54649,2.392199 42.53243,2.403995 42.5184,2.416411 42.50442,2.429458 42.49047,2.443148 42.47658,2.457492 42.46274,2.472501 42.44896,2.488187 42.43525,2.504561 42.4216,2.521634 42.40803,2.539418 42.39454,2.557924 42.38113,2.577163 42.36781,2.597146 42.35459,2.617886 42.34146),(2.617886 42.34146,2.636783 42.32997,2.656209 42.31853,2.676146 42.30716,2.696577 42.29584,2.717483 42.28458,2.738844 42.27338,2.760644 42.26223,2.782863 42.25113,2.805484 42.24007,2.828486 42.22907,2.851854 42.21811,2.875567 42.2072,2.899607 42.19633,2.923957 42.18549,2.948598 42.1747,2.97351 42.16394,2.998677 42.15321,3.024079 42.14252,3.049698 42.13186,3.075516 42.12122,3.101515 42.11062,3.127675 42.10003,3.153979 42.08947,3.180407 42.07893,3.206943 42.06841,3.233567 42.0579,3.260261 42.04741,3.287006 42.03694,3.313785 42.02647,3.340578 42.01602,3.367367 42.00557,3.394135 41.99512,3.420862 41.98468,3.44753 41.97424,3.474121 41.96381,3.500616 41.95336,3.526997 41.94292,3.553246 41.93247,3.579344 41.92201,3.605273 41.91154,3.631014 41.90106,3.656549 41.89057,3.681859 41.88006,3.706927 41.86953,3.731733 41.85898,3.75626 41.84841,3.780489 41.83782,3.804401 41.82721,3.827979 41.81656,3.851203 41.80589,3.874056 41.79519,3.896518 41.78445,3.918573 41.77368,3.9402 41.76288,3.961382 41.75203,3.982101 41.74115,4.002337 41.73022,4.022073 41.71925,4.041291 41.70823,4.059971 41.69717,4.078095 41.68605,4.095646 41.67488,4.112604 41.66366,4.128951 41.65239,4.144669 41.64105,4.15974 41.62966,4.174144 41.6182,4.187864 41.60669,4.200882 41.5951,4.213178 41.58345,4.224734 41.57174,4.235533 41.55995,4.245555 41.54808,4.254783 41.53615,4.263197 41.52413,4.27078 41.51204,4.277512 41.49987,4.283376 41.48762,4.288354 41.47528,4.292426 41.46285,4.295575 41.45034,4.297782 41.43774,4.299028 41.42504,4.299296 41.41226,4.298567 41.39937,4.296822 41.38639,4.294043 41.37331,4.290211 41.36013,4.285309 41.34685,4.279318 41.33346,4.272219 41.31996,4.263995 41.30635,4.254626 41.29264,4.244094 41.27881,4.232381 41.26486,4.219468 41.2508,4.205338 41.23662,4.189971 41.22232,4.17335 41.2079,4.155455 41.19336,4.136269 41.17868,4.115773 41.16388,4.093948 41.14896,4.070777 41.13389,4.046241 41.1187,4.020321 41.10337,3.993 41.0879,3.964258 41.07229,3.934077 41.05654,3.902439 41.04065),CIRCULARSTRING (3.902439 41.04065,1.775383 40.65987,0.3414634 42.27642)))",12.2623236074563 +"MULTILINESTRING ((-0.2762998 38.32375,-0.2637102 38.43947,-0.2447018 38.55117,-0.2193601 38.65833,-0.1877989 38.76047,-0.1501601 38.85714,-0.1066131 38.94789,-0.0573536 39.03233,-0.002603 39.11007,0.0573925 39.18076,0.1223631 39.24409,0.1920168 39.29976,0.2660403 39.34754,0.3441008 39.3872))",1.29261161044762 +"MULTICURVE (COMPOUNDCURVE (CIRCULARSTRING (-1.389372 40.02584,-1.109435 40.65503,-0.4250745 40.73184),CIRCULARSTRING (-0.4250745 40.73184,-0.2233581 40.09231,0.4014657 40.33579)),COMPOUNDCURVE (CIRCULARSTRING (0.9008338 40.26691,1.138662 40.45594,1.434641 40.38745)))",3.57349361227513 +"MULTILINESTRING ((1.383736 39.35035,1.012627 38.5647,0.5434618 37.97689,-0.0220862 37.58902))",2.3133339931156 diff -Nru fiona-1.7.10/tests/data/gre.cpg fiona-1.8.6/tests/data/gre.cpg --- fiona-1.7.10/tests/data/gre.cpg 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/data/gre.cpg 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1 @@ +UTF-8 Binary files /tmp/tmpPX9ih5/IN_8n7xqVQ/fiona-1.7.10/tests/data/gre.dbf and /tmp/tmpPX9ih5/z8sn6C7itX/fiona-1.8.6/tests/data/gre.dbf differ diff -Nru fiona-1.7.10/tests/data/gre.prj fiona-1.8.6/tests/data/gre.prj --- fiona-1.7.10/tests/data/gre.prj 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/data/gre.prj 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]] \ No newline at end of file Binary files /tmp/tmpPX9ih5/IN_8n7xqVQ/fiona-1.7.10/tests/data/gre.shp and /tmp/tmpPX9ih5/z8sn6C7itX/fiona-1.8.6/tests/data/gre.shp differ Binary files /tmp/tmpPX9ih5/IN_8n7xqVQ/fiona-1.7.10/tests/data/gre.shx and /tmp/tmpPX9ih5/z8sn6C7itX/fiona-1.8.6/tests/data/gre.shx differ diff -Nru fiona-1.7.10/tests/data/issue627.geojson fiona-1.8.6/tests/data/issue627.geojson --- fiona-1.7.10/tests/data/issue627.geojson 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/data/issue627.geojson 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,1609 @@ +{ + "features": [{ + "geometry": { + "coordinates": [ + [ + [ + -111.73527526855469, + 41.995094299316406 + ], + [ + -111.65931701660156, + 41.99627685546875 + ], + [ + -111.6587142944336, + 41.9921875 + ], + [ + -111.65888977050781, + 41.95676803588867 + ], + [ + -111.67082977294922, + 41.91230010986328 + ], + [ + -111.67332458496094, + 41.905494689941406 + ], + [ + -111.67088317871094, + 41.90049362182617 + ], + [ + -111.66474914550781, + 41.893211364746094 + ], + [ + -111.6506576538086, + 41.875465393066406 + ], + [ + -111.64759826660156, + 41.87091827392578 + ], + [ + -111.64640808105469, + 41.86273956298828 + ], + [ + -111.64334869384766, + 41.858192443847656 + ], + [ + -111.63720703125, + 41.85499572753906 + ], + [ + -111.633544921875, + 41.847267150878906 + ], + [ + -111.63053894042969, + 41.83409118652344 + ], + [ + -111.6330337524414, + 41.82728576660156 + ], + [ + -111.63983154296875, + 41.8227653503418 + ], + [ + -111.6484603881836, + 41.82188034057617 + ], + [ + -111.66077423095703, + 41.82327651977539 + ], + [ + -111.6712417602539, + 41.82330322265625 + ], + [ + -111.67618560791016, + 41.82013702392578 + ], + [ + -111.68803405761719, + 41.78792953491211 + ], + [ + -111.69361114501953, + 41.77931594848633 + ], + [ + -111.70162200927734, + 41.77797317504883 + ], + [ + -111.70901489257812, + 41.77663040161133 + ], + [ + -111.71395111083984, + 41.772098541259766 + ], + [ + -111.71891784667969, + 41.763031005859375 + ], + [ + -111.72816467285156, + 41.75851058959961 + ], + [ + -111.74726104736328, + 41.75537109375 + ], + [ + -111.75650024414062, + 41.752662658691406 + ], + [ + -111.77067565917969, + 41.7445182800293 + ], + [ + -111.77064514160156, + 41.75495910644531 + ], + [ + -111.75585174560547, + 41.76219940185547 + ], + [ + -111.7330551147461, + 41.766693115234375 + ], + [ + -111.72749328613281, + 41.77212905883789 + ], + [ + -111.71883392333984, + 41.7834587097168 + ], + [ + -111.71080780029297, + 41.78889083862305 + ], + [ + -111.70340728759766, + 41.79250717163086 + ], + [ + -111.70030212402344, + 41.798404693603516 + ], + [ + -111.70210266113281, + 41.8088493347168 + ], + [ + -111.70760345458984, + 41.819759368896484 + ], + [ + -111.71312713623047, + 41.82340621948242 + ], + [ + -111.71929168701172, + 41.82341766357422 + ], + [ + -111.72545623779297, + 41.8225212097168 + ], + [ + -111.7341537475586, + 41.803016662597656 + ], + [ + -111.740966796875, + 41.79213333129883 + ], + [ + -111.74531555175781, + 41.78215408325195 + ], + [ + -111.77122497558594, + 41.7658576965332 + ], + [ + -111.77056884765625, + 41.77811813354492 + ], + [ + -111.7662582397461, + 41.778106689453125 + ], + [ + -111.76746368408203, + 41.78628158569336 + ], + [ + -111.76253509521484, + 41.78627395629883 + ], + [ + -111.76241302490234, + 41.82259750366211 + ], + [ + -111.77104187011719, + 41.8221549987793 + ], + [ + -111.77161407470703, + 41.83351135253906 + ], + [ + -111.7333755493164, + 41.84524154663086 + ], + [ + -111.73274993896484, + 41.847511291503906 + ], + [ + -111.7376708984375, + 41.84979248046875 + ], + [ + -111.77157592773438, + 41.845767974853516 + ], + [ + -111.77215576171875, + 41.85802459716797 + ], + [ + -111.75243377685547, + 41.85844802856445 + ], + [ + -111.72467803955078, + 41.86384201049805 + ], + [ + -111.71109771728516, + 41.868804931640625 + ], + [ + -111.70182037353516, + 41.87604904174805 + ], + [ + -111.69624328613281, + 41.88193893432617 + ], + [ + -111.69497680664062, + 41.88874816894531 + ], + [ + -111.70053100585938, + 41.89057540893555 + ], + [ + -111.70793151855469, + 41.88923263549805 + ], + [ + -111.72091674804688, + 41.87972640991211 + ], + [ + -111.73388671875, + 41.87384796142578 + ], + [ + -111.75301361083984, + 41.86888885498047 + ], + [ + -111.75350952148438, + 41.90249252319336 + ], + [ + -111.74364471435547, + 41.90247344970703 + ], + [ + -111.74463653564453, + 41.967864990234375 + ], + [ + -111.7119369506836, + 41.96416473388672 + ], + [ + -111.69283294677734, + 41.95912551879883 + ], + [ + -111.68911743164062, + 41.96047592163086 + ], + [ + -111.6891098022461, + 41.96320343017578 + ], + [ + -111.69341278076172, + 41.96684646606445 + ], + [ + -111.70449829101562, + 41.972320556640625 + ], + [ + -111.7341079711914, + 41.97828674316406 + ], + [ + -111.73527526855469, + 41.995094299316406 + ] + ] + ], + "type": "Polygon" + }, + "id": "0", + "properties": { + "skip_me": [1, 2, 3, 4], + "AGBUR": "FS", + "AREA": 0.0179264, + "FEATURE1": "Wilderness", + "FEATURE2": null, + "NAME": "Mount Naomi Wilderness", + "PERIMETER": 1.22107, + "STATE": "UT", + "STATE_FIPS": "49", + "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Naomi", + "WILDRNP020": 332 + }, + "type": "Feature" +},{ + "geometry": { + "coordinates": [ + [ + [ + -112.00384521484375, + 41.552703857421875 + ], + [ + -112.00446319580078, + 41.56586456298828 + ], + [ + -112.0112075805664, + 41.56586456298828 + ], + [ + -112.01121520996094, + 41.57902526855469 + ], + [ + -112.01734924316406, + 41.57902526855469 + ], + [ + -112.0173568725586, + 41.594459533691406 + ], + [ + -112.02779388427734, + 41.5940055847168 + ], + [ + -112.02779388427734, + 41.60171890258789 + ], + [ + -112.03945922851562, + 41.60126495361328 + ], + [ + -112.04007720947266, + 41.608524322509766 + ], + [ + -112.04744720458984, + 41.608524322509766 + ], + [ + -112.0474624633789, + 41.62804412841797 + ], + [ + -112.05974578857422, + 41.62758255004883 + ], + [ + -112.05975341796875, + 41.640296936035156 + ], + [ + -112.050537109375, + 41.64030075073242 + ], + [ + -112.05054473876953, + 41.64983367919922 + ], + [ + -112.04132843017578, + 41.64983367919922 + ], + [ + -112.04195404052734, + 41.66299819946289 + ], + [ + -112.05793762207031, + 41.662540435791016 + ], + [ + -112.0579605102539, + 41.692047119140625 + ], + [ + -112.07394409179688, + 41.692039489746094 + ], + [ + -112.07459259033203, + 41.72381591796875 + ], + [ + -112.06167602539062, + 41.72382354736328 + ], + [ + -112.0616683959961, + 41.71383285522461 + ], + [ + -112.05490112304688, + 41.713836669921875 + ], + [ + -112.04137420654297, + 41.71384048461914 + ], + [ + -112.04138946533203, + 41.7379035949707 + ], + [ + -112.0376968383789, + 41.74108123779297 + ], + [ + -112.03339385986328, + 41.741085052490234 + ], + [ + -112.02908325195312, + 41.729736328125 + ], + [ + -112.02599334716797, + 41.71657180786133 + ], + [ + -112.0241470336914, + 41.71157455444336 + ], + [ + -112.0272216796875, + 41.704769134521484 + ], + [ + -112.02413940429688, + 41.70068359375 + ], + [ + -112.01676177978516, + 41.69977951049805 + ], + [ + -112.01615142822266, + 41.7070426940918 + ], + [ + -112.00508117675781, + 41.707496643066406 + ], + [ + -112.00508117675781, + 41.66618347167969 + ], + [ + -111.9792709350586, + 41.6666374206543 + ], + [ + -111.9786605834961, + 41.653926849365234 + ], + [ + -111.96821594238281, + 41.65346908569336 + ], + [ + -111.96760559082031, + 41.6407585144043 + ], + [ + -111.96146392822266, + 41.6407585144043 + ], + [ + -111.96025085449219, + 41.61125183105469 + ], + [ + -111.95042419433594, + 41.61124801635742 + ], + [ + -111.94796752929688, + 41.60988235473633 + ], + [ + -111.94735717773438, + 41.60761260986328 + ], + [ + -111.9522705078125, + 41.60443878173828 + ], + [ + -111.96455383300781, + 41.60262680053711 + ], + [ + -111.9682388305664, + 41.60398864746094 + ], + [ + -111.9725341796875, + 41.60807418823242 + ], + [ + -111.97560119628906, + 41.60943603515625 + ], + [ + -111.97928619384766, + 41.61034393310547 + ], + [ + -111.98542785644531, + 41.609439849853516 + ], + [ + -111.98481750488281, + 41.58356475830078 + ], + [ + -111.97868347167969, + 41.58356857299805 + ], + [ + -111.97745513916016, + 41.570404052734375 + ], + [ + -111.97132110595703, + 41.57085418701172 + ], + [ + -111.97132110595703, + 41.56450271606445 + ], + [ + -111.98297882080078, + 41.564048767089844 + ], + [ + -111.98175811767578, + 41.54090118408203 + ], + [ + -111.98176574707031, + 41.53545379638672 + ], + [ + -112.00323486328125, + 41.53545379638672 + ], + [ + -112.00384521484375, + 41.552703857421875 + ] + ] + ], + "type": "Polygon" + }, + "id": "1", + "properties": { + "skip_me": [1, 2, 3, 4], + "AGBUR": "FS", + "AREA": 0.0104441, + "FEATURE1": "Wilderness", + "FEATURE2": null, + "NAME": "Wellsville Mountain Wilderness", + "PERIMETER": 0.755827, + "STATE": "UT", + "STATE_FIPS": "49", + "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Wellsville%20Mountain", + "WILDRNP020": 336 + }, + "type": "Feature" +},{ + "geometry": { + "coordinates": [ + [ + [ + -106.79289245605469, + 40.98352813720703 + ], + [ + -106.78697204589844, + 40.979736328125 + ], + [ + -106.77783966064453, + 40.98030471801758 + ], + [ + -106.76583862304688, + 40.976890563964844 + ], + [ + -106.74164581298828, + 40.98578643798828 + ], + [ + -106.73485565185547, + 40.98528289794922 + ], + [ + -106.72955322265625, + 40.981449127197266 + ], + [ + -106.70484924316406, + 40.97372817993164 + ], + [ + -106.69345092773438, + 40.964256286621094 + ], + [ + -106.68531799316406, + 40.95088577270508 + ], + [ + -106.6842269897461, + 40.9403190612793 + ], + [ + -106.67102813720703, + 40.9429817199707 + ], + [ + -106.65811157226562, + 40.9423828125 + ], + [ + -106.62374114990234, + 40.93569564819336 + ], + [ + -106.62068939208984, + 40.92386245727539 + ], + [ + -106.62251281738281, + 40.91773986816406 + ], + [ + -106.62690734863281, + 40.91284942626953 + ], + [ + -106.63843536376953, + 40.909149169921875 + ], + [ + -106.63716888427734, + 40.90835189819336 + ], + [ + -106.63470458984375, + 40.90473937988281 + ], + [ + -106.63285064697266, + 40.90202713012695 + ], + [ + -106.62979125976562, + 40.89977264404297 + ], + [ + -106.62614440917969, + 40.8997917175293 + ], + [ + -106.62005615234375, + 40.8989143371582 + ], + [ + -106.61578369140625, + 40.897117614746094 + ], + [ + -106.60540771484375, + 40.891727447509766 + ], + [ + -106.59563446044922, + 40.886329650878906 + ], + [ + -106.59073638916016, + 40.882266998291016 + ], + [ + -106.5852279663086, + 40.87685012817383 + ], + [ + -106.5809097290039, + 40.86960983276367 + ], + [ + -106.57662963867188, + 40.86600112915039 + ], + [ + -106.57352447509766, + 40.85739517211914 + ], + [ + -106.5728759765625, + 40.85240936279297 + ], + [ + -106.57404327392578, + 40.84695816040039 + ], + [ + -106.57461547851562, + 40.842872619628906 + ], + [ + -106.5751953125, + 40.83924102783203 + ], + [ + -106.57881927490234, + 40.83650588989258 + ], + [ + -106.58184051513672, + 40.83467483520508 + ], + [ + -106.58241271972656, + 40.83013916015625 + ], + [ + -106.58171844482422, + 40.820159912109375 + ], + [ + -106.5792236328125, + 40.812007904052734 + ], + [ + -106.57978820800781, + 40.80610656738281 + ], + [ + -106.58213806152344, + 40.79656982421875 + ], + [ + -106.58269500732422, + 40.79066848754883 + ], + [ + -106.57960510253906, + 40.783878326416016 + ], + [ + -106.57835388183594, + 40.77935028076172 + ], + [ + -106.58013916015625, + 40.7752571105957 + ], + [ + -106.58430480957031, + 40.7652587890625 + ], + [ + -106.58670043945312, + 40.76071548461914 + ], + [ + -106.58545684814453, + 40.7570915222168 + ], + [ + -106.5848159790039, + 40.75346374511719 + ], + [ + -106.58174896240234, + 40.74894332885742 + ], + [ + -106.58234405517578, + 40.74803161621094 + ], + [ + -106.58658599853516, + 40.74755859375 + ], + [ + -106.59872436523438, + 40.747955322265625 + ], + [ + -106.60479736328125, + 40.748382568359375 + ], + [ + -106.60721588134766, + 40.74746322631836 + ], + [ + -106.60900115966797, + 40.74382400512695 + ], + [ + -106.60894775390625, + 40.73747634887695 + ], + [ + -106.6070785522461, + 40.73158645629883 + ], + [ + -106.60218811035156, + 40.727073669433594 + ], + [ + -106.5967025756836, + 40.72392654418945 + ], + [ + -106.59486389160156, + 40.72166442871094 + ], + [ + -106.5942153930664, + 40.716678619384766 + ], + [ + -106.5947494506836, + 40.70760726928711 + ], + [ + -106.5953140258789, + 40.70307159423828 + ], + [ + -106.59712219238281, + 40.70124816894531 + ], + [ + -106.6073989868164, + 40.69802474975586 + ], + [ + -106.6146469116211, + 40.6948127746582 + ], + [ + -106.61764526367188, + 40.69116973876953 + ], + [ + -106.61882019042969, + 40.68708038330078 + ], + [ + -106.6175765991211, + 40.6834602355957 + ], + [ + -106.61328887939453, + 40.678035736083984 + ], + [ + -106.60779571533203, + 40.673980712890625 + ], + [ + -106.59989929199219, + 40.6717529296875 + ], + [ + -106.59384155273438, + 40.67177963256836 + ], + [ + -106.59019470214844, + 40.67043685913086 + ], + [ + -106.5895767211914, + 40.66862487792969 + ], + [ + -106.59136962890625, + 40.66634750366211 + ], + [ + -106.59799194335938, + 40.66041946411133 + ], + [ + -106.59857177734375, + 40.65769577026367 + ], + [ + -106.59490203857422, + 40.65363311767578 + ], + [ + -106.59366607666016, + 40.65046310424805 + ], + [ + -106.59303283691406, + 40.647743225097656 + ], + [ + -106.59422302246094, + 40.645015716552734 + ], + [ + -106.59963989257812, + 40.640907287597656 + ], + [ + -106.60326385498047, + 40.639530181884766 + ], + [ + -106.61173248291016, + 40.63813018798828 + ], + [ + -106.61901092529297, + 40.638999938964844 + ], + [ + -106.62630462646484, + 40.64213943481445 + ], + [ + -106.63054656982422, + 40.642574310302734 + ], + [ + -106.63539123535156, + 40.64209747314453 + ], + [ + -106.63960266113281, + 40.63935470581055 + ], + [ + -106.64500427246094, + 40.63343048095703 + ], + [ + -106.6498031616211, + 40.628414154052734 + ], + [ + -106.6497802734375, + 40.625694274902344 + ], + [ + -106.63697052001953, + 40.615325927734375 + ], + [ + -106.63512420654297, + 40.612613677978516 + ], + [ + -106.63632202148438, + 40.61124801635742 + ], + [ + -106.64175415039062, + 40.608951568603516 + ], + [ + -106.64354705810547, + 40.606224060058594 + ], + [ + -106.64291381835938, + 40.6035041809082 + ], + [ + -106.6392593383789, + 40.60080337524414 + ], + [ + -106.63076782226562, + 40.599029541015625 + ], + [ + -106.61563873291016, + 40.59955978393555 + ], + [ + -106.60778045654297, + 40.600502014160156 + ], + [ + -106.6053466796875, + 40.599605560302734 + ], + [ + -106.60411071777344, + 40.596893310546875 + ], + [ + -106.60340881347656, + 40.58555603027344 + ], + [ + -106.60521697998047, + 40.58373260498047 + ], + [ + -106.61064147949219, + 40.58143997192383 + ], + [ + -106.613037109375, + 40.57870864868164 + ], + [ + -106.61483001708984, + 40.57643127441406 + ], + [ + -106.61785125732422, + 40.57505416870117 + ], + [ + -106.6250991821289, + 40.57365798950195 + ], + [ + -106.62934112548828, + 40.574546813964844 + ], + [ + -106.63177490234375, + 40.57634735107422 + ], + [ + -106.63542175292969, + 40.57769012451172 + ], + [ + -106.63965606689453, + 40.57721710205078 + ], + [ + -106.64266204833984, + 40.57538604736328 + ], + [ + -106.64685821533203, + 40.57128143310547 + ], + [ + -106.64983367919922, + 40.56536865234375 + ], + [ + -106.65398406982422, + 40.556278228759766 + ], + [ + -106.65937805175781, + 40.55035400390625 + ], + [ + -106.66238403320312, + 40.54852294921875 + ], + [ + -106.67203521728516, + 40.545753479003906 + ], + [ + -106.68049621582031, + 40.54389190673828 + ], + [ + -106.68592834472656, + 40.5429573059082 + ], + [ + -106.69017028808594, + 40.54338836669922 + ], + [ + -106.69866180419922, + 40.54606246948242 + ], + [ + -106.7083740234375, + 40.5491828918457 + ], + [ + -106.71505737304688, + 40.55232238769531 + ], + [ + -106.71932983398438, + 40.55592346191406 + ], + [ + -106.72181701660156, + 40.562713623046875 + ], + [ + -106.72555541992188, + 40.573123931884766 + ], + [ + -106.7274169921875, + 40.5776481628418 + ], + [ + -106.72985076904297, + 40.57944869995117 + ], + [ + -106.73593139648438, + 40.58168029785156 + ], + [ + -106.74137878417969, + 40.58164978027344 + ], + [ + -106.75103759765625, + 40.57978057861328 + ], + [ + -106.7564926147461, + 40.5802001953125 + ], + [ + -106.76197052001953, + 40.583343505859375 + ], + [ + -106.77957153320312, + 40.587772369384766 + ], + [ + -106.78685760498047, + 40.589542388916016 + ], + [ + -106.79415130615234, + 40.59267044067383 + ], + [ + -106.79841613769531, + 40.59490966796875 + ], + [ + -106.80142974853516, + 40.59353256225586 + ], + [ + -106.80381774902344, + 40.59079360961914 + ], + [ + -106.80681610107422, + 40.58805465698242 + ], + [ + -106.81224822998047, + 40.58711242675781 + ], + [ + -106.81649780273438, + 40.58799362182617 + ], + [ + -106.82198333740234, + 40.59158706665039 + ], + [ + -106.83171844482422, + 40.59560012817383 + ], + [ + -106.83780670166016, + 40.59873580932617 + ], + [ + -106.84148406982422, + 40.60234069824219 + ], + [ + -106.8421401977539, + 40.60687255859375 + ], + [ + -106.84039306640625, + 40.612327575683594 + ], + [ + -106.83265686035156, + 40.624168395996094 + ], + [ + -106.82845306396484, + 40.62737274169922 + ], + [ + -106.82423400878906, + 40.628761291503906 + ], + [ + -106.81940460205078, + 40.63060760498047 + ], + [ + -106.8182144165039, + 40.63242721557617 + ], + [ + -106.81886291503906, + 40.636051177978516 + ], + [ + -106.8231430053711, + 40.639652252197266 + ], + [ + -106.82379913330078, + 40.64373016357422 + ], + [ + -106.822021484375, + 40.647369384765625 + ], + [ + -106.8154296875, + 40.65376281738281 + ], + [ + -106.8112564086914, + 40.65923309326172 + ], + [ + -106.80648803710938, + 40.66606903076172 + ], + [ + -106.80532836914062, + 40.67106628417969 + ], + [ + -106.80181121826172, + 40.6815185546875 + ], + [ + -106.79650115966797, + 40.694252014160156 + ], + [ + -106.79470825195312, + 40.69698715209961 + ], + [ + -106.79168701171875, + 40.69791030883789 + ], + [ + -106.7862319946289, + 40.69749450683594 + ], + [ + -106.78018188476562, + 40.69843673706055 + ], + [ + -106.77717590332031, + 40.701175689697266 + ], + [ + -106.77783203125, + 40.70570755004883 + ], + [ + -106.78404235839844, + 40.71882629394531 + ], + [ + -106.78651428222656, + 40.72289276123047 + ], + [ + -106.78714752197266, + 40.725608825683594 + ], + [ + -106.78474426269531, + 40.7278938293457 + ], + [ + -106.77869415283203, + 40.72929000854492 + ], + [ + -106.7666244506836, + 40.73435592651367 + ], + [ + -106.76301574707031, + 40.737552642822266 + ], + [ + -106.76185607910156, + 40.742095947265625 + ], + [ + -106.75950622558594, + 40.749366760253906 + ], + [ + -106.75529479980469, + 40.753021240234375 + ], + [ + -106.7498779296875, + 40.75713348388672 + ], + [ + -106.74443054199219, + 40.75807189941406 + ], + [ + -106.73834228515625, + 40.75674819946289 + ], + [ + -106.72919464111328, + 40.75226593017578 + ], + [ + -106.72676849365234, + 40.75227737426758 + ], + [ + -106.71226501464844, + 40.75780487060547 + ], + [ + -106.70623016357422, + 40.76146697998047 + ], + [ + -106.7044448852539, + 40.764652252197266 + ], + [ + -106.70579528808594, + 40.77870559692383 + ], + [ + -106.70585632324219, + 40.784603118896484 + ], + [ + -106.70348358154297, + 40.7905158996582 + ], + [ + -106.7041244506836, + 40.79368591308594 + ], + [ + -106.70658111572266, + 40.79684829711914 + ], + [ + -106.71088409423828, + 40.801815032958984 + ], + [ + -106.71340942382812, + 40.81132507324219 + ], + [ + -106.71715545654297, + 40.821739196777344 + ], + [ + -106.71839904785156, + 40.8244514465332 + ], + [ + -106.72084045410156, + 40.825801849365234 + ], + [ + -106.73668670654297, + 40.83160400390625 + ], + [ + -106.7427978515625, + 40.834747314453125 + ], + [ + -106.75139617919922, + 40.84422302246094 + ], + [ + -106.7538833618164, + 40.84965133666992 + ], + [ + -106.75150299072266, + 40.8537483215332 + ], + [ + -106.74608612060547, + 40.858768463134766 + ], + [ + -106.74429321289062, + 40.86150360107422 + ], + [ + -106.74491882324219, + 40.863311767578125 + ], + [ + -106.74796295166016, + 40.86420440673828 + ], + [ + -106.75586700439453, + 40.86460494995117 + ], + [ + -106.76317596435547, + 40.86637878417969 + ], + [ + -106.76990509033203, + 40.870418548583984 + ], + [ + -106.77301788330078, + 40.87629699707031 + ], + [ + -106.77313232421875, + 40.887638092041016 + ], + [ + -106.77294158935547, + 40.90522384643555 + ], + [ + -106.78055572509766, + 40.90708541870117 + ], + [ + -106.7989501953125, + 40.89576721191406 + ], + [ + -106.8059310913086, + 40.898101806640625 + ], + [ + -106.80464172363281, + 40.90327072143555 + ], + [ + -106.80973815917969, + 40.91682052612305 + ], + [ + -106.8172378540039, + 40.9182014465332 + ], + [ + -106.81767272949219, + 40.92233657836914 + ], + [ + -106.823486328125, + 40.925209045410156 + ], + [ + -106.83625793457031, + 40.924407958984375 + ], + [ + -106.84684753417969, + 40.92605209350586 + ], + [ + -106.85509490966797, + 40.940330505371094 + ], + [ + -106.8475570678711, + 40.96160888671875 + ], + [ + -106.84174346923828, + 40.970298767089844 + ], + [ + -106.83457946777344, + 40.97213363647461 + ], + [ + -106.82674407958984, + 40.9675407409668 + ], + [ + -106.81885528564453, + 40.97404479980469 + ], + [ + -106.79289245605469, + 40.98352813720703 + ] + ] + ], + "type": "Polygon" + }, + "id": "2", + "properties": { + "skip_me": [1, 2, 3, 4], + "AGBUR": "FS", + "AREA": 0.0714955, + "FEATURE1": "Wilderness", + "FEATURE2": null, + "NAME": "Mount Zirkel Wilderness", + "PERIMETER": 1.70851, + "STATE": "CO", + "STATE_FIPS": "08", + "URL": "http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Zirkel", + "WILDRNP020": 357 + }, + "type": "Feature" +}], + "type": "FeatureCollection" +} diff -Nru fiona-1.7.10/tests/data/test_tin.csv fiona-1.8.6/tests/data/test_tin.csv --- fiona-1.7.10/tests/data/test_tin.csv 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/data/test_tin.csv 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,3 @@ +WKT,id +"TIN (((0 0 0, 0 0 1, 0 1 0, 0 0 0)), ((0 0 0, 0 1 0, 1 1 0, 0 0 0)))",1 +"TRIANGLE((0 0 0,0 1 0,1 1 0,0 0 0))",2 Binary files /tmp/tmpPX9ih5/IN_8n7xqVQ/fiona-1.7.10/tests/data/test_tin.dbf and /tmp/tmpPX9ih5/z8sn6C7itX/fiona-1.8.6/tests/data/test_tin.dbf differ Binary files /tmp/tmpPX9ih5/IN_8n7xqVQ/fiona-1.7.10/tests/data/test_tin.shp and /tmp/tmpPX9ih5/z8sn6C7itX/fiona-1.8.6/tests/data/test_tin.shp differ Binary files /tmp/tmpPX9ih5/IN_8n7xqVQ/fiona-1.7.10/tests/data/test_tin.shx and /tmp/tmpPX9ih5/z8sn6C7itX/fiona-1.8.6/tests/data/test_tin.shx differ diff -Nru fiona-1.7.10/tests/fixtures.py fiona-1.8.6/tests/fixtures.py --- fiona-1.7.10/tests/fixtures.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/fixtures.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -import os.path - - -def read_file(name): - return open(os.path.join(os.path.dirname(__file__), name)).read() - -# GeoJSON feature collection on a single line -feature_collection = read_file('data/collection.txt') - -# Same as above but with pretty-print styling applied -feature_collection_pp = read_file('data/collection-pp.txt') - -# One feature per line -feature_seq = read_file('data/sequence.txt') - -# Same as above but each feature has pretty-print styling -feature_seq_pp_rs = read_file('data/sequence-pp.txt') diff -Nru fiona-1.7.10/tests/__init__.py fiona-1.8.6/tests/__init__.py --- fiona-1.7.10/tests/__init__.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/__init__.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,39 +1,10 @@ -import os +"""Do not delete! At least one of the ``unittest.TestCase()`` based tests do +a relative import inside the ``tests`` directory to use another test as a +base class. This file can probably be deleted if that condition is removed. -DATADIR = os.path.abspath('tests/data') -FILES = ['coutwildrnp.shp', 'coutwildrnp.shx', 'coutwildrnp.dbf', 'coutwildrnp.prj'] +For example: -def create_zipfile(zipfilename): - import zipfile - with zipfile.ZipFile(zipfilename, 'w') as zip: - for filename in FILES: - zip.write(os.path.join(DATADIR, filename), filename) - -def create_tarfile(tarfilename): - import tarfile - with tarfile.open(tarfilename, 'w') as tar: - for filename in FILES: - tar.add(os.path.join(DATADIR, filename), arcname='testing/%s' % filename) - -def create_jsonfile(jsonfilename): - import json - import fiona - with fiona.open(os.path.join(DATADIR, FILES[0]), 'r') as source: - features = [feat for feat in source] - my_layer = {'type': 'FeatureCollection', - 'features': features} - with open(jsonfilename, 'w') as f: - f.write(json.dumps(my_layer)) - -def setup(): - """Setup function for nosetests to create test files if they do not exist - """ - zipfile = os.path.join(DATADIR, 'coutwildrnp.zip') - tarfile = os.path.join(DATADIR, 'coutwildrnp.tar') - jsonfile = os.path.join(DATADIR, 'coutwildrnp.json') - if not os.path.exists(zipfile): - create_zipfile(zipfile) - if not os.path.exists(tarfile): - create_tarfile(tarfile) - if not os.path.exists(jsonfile): - create_jsonfile(jsonfile) + $ git grep 'from \.' | grep test + tests/test_layer.py:from .test_collection import TestReading + tests/test_vfs.py:from .test_collection import TestReading +""" diff -Nru fiona-1.7.10/tests/test_bigint.py fiona-1.8.6/tests/test_bigint.py --- fiona-1.7.10/tests/test_bigint.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_bigint.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,13 +1,4 @@ -import fiona -import os -import shutil -import tempfile -import unittest -from fiona.ogrext import calc_gdal_version_num, get_gdal_version_num - -""" - -OGR 54bit handling: https://trac.osgeo.org/gdal/wiki/rfc31_ogr_64 +"""OGR 64bit handling: https://trac.osgeo.org/gdal/wiki/rfc31_ogr_64 Shapefile: OFTInteger fields are created by default with a width of 9 characters, so to be unambiguously read as OFTInteger (and if specifying @@ -23,47 +14,56 @@ accordingly (and same for integer fields of size 19 or 20, in case of overflow of 64 bit integer, OFTReal is chosen) """ -class TestBigInt(unittest.TestCase): - def setUp(self): - self.tempdir = tempfile.mkdtemp() +import pytest + +import fiona +from fiona.env import calc_gdal_version_num, get_gdal_version_num - def tearDown(self): - shutil.rmtree(self.tempdir) - def testCreateBigIntSchema(self): - name = os.path.join(self.tempdir, 'output1.shp') - - a_bigint = 10 ** 18 - 1 - fieldname = 'abigint' - - kwargs = { - 'driver': 'ESRI Shapefile', - 'crs': 'EPSG:4326', - 'schema': { - 'geometry': 'Point', - 'properties': [(fieldname, 'int:10')]}} - if get_gdal_version_num() < calc_gdal_version_num(2, 0, 0): - with self.assertRaises(OverflowError): - with fiona.open(name, 'w', **kwargs) as dst: - rec = {} - rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)} - rec['properties'] = {fieldname: a_bigint} - dst.write(rec) - else: - - with fiona.open(name, 'w', **kwargs) as dst: - rec = {} - rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)} - rec['properties'] = {fieldname: a_bigint} - dst.write(rec) - - with fiona.open(name) as src: - if get_gdal_version_num() >= calc_gdal_version_num(2, 0, 0): - first = next(iter(src)) - self.assertEqual(first['properties'][fieldname], a_bigint) - - -if __name__ == "__main__": - # import sys;sys.argv = ['', 'Test.testName'] - unittest.main() +@pytest.mark.xfail(fiona.gdal_version.major < 2, + reason="64-bit integer fields require GDAL 2+") +def testCreateBigIntSchema(tmpdir): + name = str(tmpdir.join('output1.shp')) + + a_bigint = 10 ** 18 - 1 + fieldname = 'abigint' + + kwargs = { + 'driver': 'ESRI Shapefile', + 'crs': 'EPSG:4326', + 'schema': { + 'geometry': 'Point', + 'properties': [(fieldname, 'int:10')]}} + + with fiona.open(name, 'w', **kwargs) as dst: + rec = {} + rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)} + rec['properties'] = {fieldname: a_bigint} + dst.write(rec) + + with fiona.open(name) as src: + if fiona.gdal_version >= (2, 0, 0): + first = next(iter(src)) + assert first['properties'][fieldname] == a_bigint + + +@pytest.mark.skipif(get_gdal_version_num() < calc_gdal_version_num(2, 0, 0), + reason="Test requires GDAL 2+") +@pytest.mark.parametrize('dtype', ['int', 'int64']) +def test_issue691(tmpdir, dtype): + """Type 'int' maps to 'int64'""" + schema = {'geometry': 'Any', 'properties': {'foo': dtype}} + with fiona.open( + str(tmpdir.join('test.shp')), 'w', driver='Shapefile', + schema=schema, crs='epsg:4326') as dst: + dst.write({ + 'type': 'Feature', + 'geometry': {'type': 'Point', + 'coordinates': (-122.278015, 37.868995)}, + 'properties': {'foo': 3694063472}}) + + with fiona.open(str(tmpdir.join('test.shp'))) as src: + assert src.schema['properties']['foo'] == 'int:18' + first = next(iter(src)) + assert first['properties']['foo'] == 3694063472 diff -Nru fiona-1.7.10/tests/test_binary_field.py fiona-1.8.6/tests/test_binary_field.py --- fiona-1.7.10/tests/test_binary_field.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_binary_field.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,42 @@ +import fiona + +import pytest +import struct +from collections import OrderedDict +from .conftest import requires_gpkg + + +@requires_gpkg +def test_binary_field(tmpdir): + meta = { + "driver": "GPKG", + "schema": { + "geometry": "Point", + "properties": OrderedDict([ + ("name", "str"), + ("data", "bytes"), + ]) + } + } + + # create some binary data + input_data = struct.pack("256B", *range(256)) + + # write the binary data to a BLOB field + filename = str(tmpdir.join("binary_test.gpkg")) + with fiona.open(filename, "w", **meta) as dst: + feature = { + "geometry": {"type": "Point", "coordinates": ((0, 0))}, + "properties": { + "name": "test", + u"data": input_data, + } + } + dst.write(feature) + + # read the data back and check consistency + with fiona.open(filename, "r") as src: + feature = next(iter(src)) + assert feature["properties"]["name"] == "test" + output_data = feature["properties"]["data"] + assert output_data == input_data diff -Nru fiona-1.7.10/tests/test_bytescollection.py fiona-1.8.6/tests/test_bytescollection.py --- fiona-1.7.10/tests/test_bytescollection.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_bytescollection.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,211 +1,199 @@ -# Testing BytesCollection -import sys -import unittest +"""Tests for ``fiona.BytesCollection()``.""" + import pytest import six import fiona -FIXME_WINDOWS = sys.platform.startswith('win') - -class ReadingTest(unittest.TestCase): - - def setUp(self): - with open('tests/data/coutwildrnp.json') as src: +class TestReading(object): + @pytest.fixture(autouse=True) + def bytes_collection_object(self, path_coutwildrnp_json): + with open(path_coutwildrnp_json) as src: bytesbuf = src.read().encode('utf-8') self.c = fiona.BytesCollection(bytesbuf) - - def tearDown(self): + yield self.c.close() - @unittest.skipIf(six.PY2, 'string are bytes in Python 2') - def test_construct_with_str(self): - with open('tests/data/coutwildrnp.json') as src: + @pytest.mark.skipif(six.PY2, reason='string are bytes in Python 2') + def test_construct_with_str(self, path_coutwildrnp_json): + with open(path_coutwildrnp_json) as src: strbuf = src.read() - self.assertRaises(ValueError, fiona.BytesCollection, strbuf) + with pytest.raises(ValueError): + fiona.BytesCollection(strbuf) - @unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") def test_open_repr(self): # I'm skipping checking the name of the virtual file as it produced by uuid. - self.assertTrue(repr(self.c).startswith(" 0) + assert len(self.c.name) > 0 def test_mode(self): - self.assertEqual(self.c.mode, 'r') + assert self.c.mode == 'r' - @unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") def test_collection(self): - self.assertEqual(self.c.encoding, 'utf-8') + assert self.c.encoding == 'utf-8' def test_iter(self): - self.assertTrue(iter(self.c)) + assert iter(self.c) def test_closed_no_iter(self): self.c.close() - self.assertRaises(ValueError, iter, self.c) + with pytest.raises(ValueError): + iter(self.c) def test_len(self): - self.assertEqual(len(self.c), 67) + assert len(self.c) == 67 def test_closed_len(self): # Len is lazy, it's never computed in this case. TODO? self.c.close() - self.assertEqual(len(self.c), 0) + assert len(self.c) == 0 def test_len_closed_len(self): # Lazy len is computed in this case and sticks. len(self.c) self.c.close() - self.assertEqual(len(self.c), 67) + assert len(self.c) == 67 def test_driver(self): - self.assertEqual(self.c.driver, "GeoJSON") + assert self.c.driver == "GeoJSON" def test_closed_driver(self): self.c.close() - self.assertEqual(self.c.driver, None) + assert self.c.driver is None def test_driver_closed_driver(self): self.c.driver self.c.close() - self.assertEqual(self.c.driver, "GeoJSON") + assert self.c.driver == "GeoJSON" def test_schema(self): s = self.c.schema['properties'] - self.assertEqual(s['PERIMETER'], "float") - self.assertEqual(s['NAME'], "str") - self.assertEqual(s['URL'], "str") - self.assertEqual(s['STATE_FIPS'], "str") - self.assertEqual(s['WILDRNP020'], "int") + assert s['PERIMETER'] == "float" + assert s['NAME'] == "str" + assert s['URL'] == "str" + assert s['STATE_FIPS'] == "str" + assert s['WILDRNP020'] == "int" def test_closed_schema(self): # Schema is lazy too, never computed in this case. TODO? self.c.close() - self.assertEqual(self.c.schema, None) + assert self.c.schema is None def test_schema_closed_schema(self): self.c.schema self.c.close() - self.assertEqual( - sorted(self.c.schema.keys()), - ['geometry', 'properties']) + assert sorted(self.c.schema.keys()) == ['geometry', 'properties'] def test_crs(self): - crs = self.c.crs - self.assertEqual(crs['init'], 'epsg:4326') + assert self.c.crs['init'] == 'epsg:4326' def test_crs_wkt(self): - crs = self.c.crs_wkt - self.assertTrue(crs.startswith('GEOGCS["WGS 84"')) + assert self.c.crs_wkt.startswith('GEOGCS["WGS 84"') def test_closed_crs(self): # Crs is lazy too, never computed in this case. TODO? self.c.close() - self.assertEqual(self.c.crs, None) + assert self.c.crs is None def test_crs_closed_crs(self): self.c.crs self.c.close() - self.assertEqual( - sorted(self.c.crs.keys()), - ['init']) + assert sorted(self.c.crs.keys()) == ['init'] def test_meta(self): - self.assertEqual( - sorted(self.c.meta.keys()), - ['crs', 'crs_wkt', 'driver', 'schema']) + assert (sorted(self.c.meta.keys()) == + ['crs', 'crs_wkt', 'driver', 'schema']) def test_bounds(self): - self.assertAlmostEqual(self.c.bounds[0], -113.564247, 6) - self.assertAlmostEqual(self.c.bounds[1], 37.068981, 6) - self.assertAlmostEqual(self.c.bounds[2], -104.970871, 6) - self.assertAlmostEqual(self.c.bounds[3], 41.996277, 6) + assert self.c.bounds[0] == pytest.approx(-113.564247) + assert self.c.bounds[1] == pytest.approx(37.068981) + assert self.c.bounds[2] == pytest.approx(-104.970871) + assert self.c.bounds[3] == pytest.approx(41.996277) def test_iter_one(self): itr = iter(self.c) f = next(itr) - self.assertEqual(f['id'], "0") - self.assertEqual(f['properties']['STATE'], 'UT') + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' def test_iter_list(self): f = list(self.c)[0] - self.assertEqual(f['id'], "0") - self.assertEqual(f['properties']['STATE'], 'UT') + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' def test_re_iter_list(self): f = list(self.c)[0] # Run through iterator f = list(self.c)[0] # Run through a new, reset iterator - self.assertEqual(f['id'], "0") - self.assertEqual(f['properties']['STATE'], 'UT') + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' def test_getitem_one(self): f = self.c[0] - self.assertEqual(f['id'], "0") - self.assertEqual(f['properties']['STATE'], 'UT') + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' def test_no_write(self): - self.assertRaises(IOError, self.c.write, {}) + with pytest.raises(IOError): + self.c.write({}) def test_iter_items_list(self): i, f = list(self.c.items())[0] - self.assertEqual(i, 0) - self.assertEqual(f['id'], "0") - self.assertEqual(f['properties']['STATE'], 'UT') + assert i == 0 + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' def test_iter_keys_list(self): i = list(self.c.keys())[0] - self.assertEqual(i, 0) + assert i == 0 def test_in_keys(self): - self.assertTrue(0 in self.c.keys()) - self.assertTrue(0 in self.c) + assert 0 in self.c.keys() + assert 0 in self.c -class FilterReadingTest(unittest.TestCase): - def setUp(self): - with open('tests/data/coutwildrnp.json') as src: +class TestFilterReading(object): + @pytest.fixture(autouse=True) + def bytes_collection_object(self, path_coutwildrnp_json): + with open(path_coutwildrnp_json) as src: bytesbuf = src.read().encode('utf-8') self.c = fiona.BytesCollection(bytesbuf) - - def tearDown(self): + yield self.c.close() def test_filter_1(self): results = list(self.c.filter(bbox=(-120.0, 30.0, -100.0, 50.0))) - self.assertEqual(len(results), 67) + assert len(results) == 67 f = results[0] - self.assertEqual(f['id'], "0") - self.assertEqual(f['properties']['STATE'], 'UT') + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' def test_filter_reset(self): results = list(self.c.filter(bbox=(-112.0, 38.0, -106.0, 40.0))) - self.assertEqual(len(results), 26) + assert len(results) == 26 results = list(self.c.filter()) - self.assertEqual(len(results), 67) + assert len(results) == 67 def test_filter_mask(self): mask = { @@ -213,33 +201,30 @@ 'coordinates': ( ((-112, 38), (-112, 40), (-106, 40), (-106, 38), (-112, 38)),)} results = list(self.c.filter(mask=mask)) - self.assertEqual(len(results), 26) - + assert len(results) == 26 -def test_zipped_bytes_collection(): - with open('tests/data/coutwildrnp.zip', 'rb') as src: - zip_file_bytes = src.read() - with fiona.BytesCollection(zip_file_bytes) as col: +def test_zipped_bytes_collection(bytes_coutwildrnp_zip): + """Open a zipped stream of bytes as a collection""" + with fiona.BytesCollection(bytes_coutwildrnp_zip) as col: assert col.name == 'coutwildrnp' + assert len(col) == 67 - -def test_grenada_bytes_geojson(): +@pytest.mark.skipif(fiona.gdal_version >= (2, 3, 0), + reason="Changed behavior with gdal 2.3, possibly related to RFC 70:" + "Guessing output format from output file name extension for utilities") +def test_grenada_bytes_geojson(bytes_grenada_geojson): """Read grenada.geojson as BytesCollection. grenada.geojson is an example of geojson that GDAL's GeoJSON driver will fail to read successfully unless the file's extension reflects its json'ness. """ - with open('tests/data/grenada.geojson', 'rb') as src: - bytes_grenada_geojson = src.read() - # We expect an exception if the GeoJSON driver isn't specified. with pytest.raises(fiona.errors.FionaValueError): with fiona.BytesCollection(bytes_grenada_geojson) as col: pass # If told what driver to use, we should be good. - with fiona.BytesCollection(bytes_grenada_geojson, driver='GeoJSON') as col: + with fiona.BytesCollection(bytes_grenada_geojson, driver='GeoJSON') as col: assert len(col) == 1 - diff -Nru fiona-1.7.10/tests/test_collection_crs.py fiona-1.8.6/tests/test_collection_crs.py --- fiona-1.7.10/tests/test_collection_crs.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_collection_crs.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,24 +1,36 @@ import os -import tempfile +import re import fiona import fiona.crs +from .conftest import WGS84PATTERN, requires_gdal2 -def test_collection_crs_wkt(): - with fiona.open('tests/data/coutwildrnp.shp') as src: - assert src.crs_wkt.startswith( - 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84"') +def test_collection_crs_wkt(path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp) as src: + assert re.match(WGS84PATTERN, src.crs_wkt) -def test_collection_no_crs_wkt(): + +def test_collection_no_crs_wkt(tmpdir, path_coutwildrnp_shp): """crs members of a dataset with no crs can be accessed safely.""" - tmpdir = tempfile.gettempdir() - filename = os.path.join(tmpdir, 'test.shp') - with fiona.open('tests/data/coutwildrnp.shp') as src: + filename = str(tmpdir.join("test.shp")) + with fiona.open(path_coutwildrnp_shp) as src: profile = src.meta del profile['crs'] del profile['crs_wkt'] with fiona.open(filename, 'w', **profile) as dst: assert dst.crs_wkt == "" assert dst.crs == {} + + +@requires_gdal2 +def test_collection_create_crs_wkt(tmpdir): + """A collection can be created using crs_wkt""" + filename = str(tmpdir.join("test.shp")) + wkt = 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]' + with fiona.open(filename, 'w', schema={'geometry': 'Point', 'properties': {'foo': 'int'}}, crs_wkt=wkt, driver='GeoJSON') as dst: + assert dst.crs_wkt == wkt + + with fiona.open(filename) as col: + assert col.crs_wkt.startswith('GEOGCS["WGS 84') diff -Nru fiona-1.7.10/tests/test_collection_legacy.py fiona-1.8.6/tests/test_collection_legacy.py --- fiona-1.7.10/tests/test_collection_legacy.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_collection_legacy.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,185 @@ +# Testing collections and workspaces + +import unittest +import re + +import pytest + +import fiona + +from .conftest import WGS84PATTERN + + +@pytest.mark.usefixtures("unittest_path_coutwildrnp_shp") +class ReadingTest(unittest.TestCase): + + def setUp(self): + self.c = fiona.open(self.path_coutwildrnp_shp, "r") + + def tearDown(self): + self.c.close() + + def test_open_repr(self): + assert ( + repr(self.c) == + ("".format(hexid=hex(id(self.c)), path=self.path_coutwildrnp_shp))) + + def test_closed_repr(self): + self.c.close() + assert ( + repr(self.c) == + ("".format(hexid=hex(id(self.c)), path=self.path_coutwildrnp_shp))) + + def test_path(self): + assert self.c.path == self.path_coutwildrnp_shp + + def test_name(self): + assert self.c.name == 'coutwildrnp' + + def test_mode(self): + assert self.c.mode == 'r' + + def test_encoding(self): + assert self.c.encoding is None + + def test_iter(self): + assert iter(self.c) + + def test_closed_no_iter(self): + self.c.close() + with pytest.raises(ValueError): + iter(self.c) + + def test_len(self): + assert len(self.c) == 67 + + def test_closed_len(self): + # Len is lazy, it's never computed in this case. TODO? + self.c.close() + assert len(self.c) == 0 + + def test_len_closed_len(self): + # Lazy len is computed in this case and sticks. + len(self.c) + self.c.close() + assert len(self.c) == 67 + + def test_driver(self): + assert self.c.driver == "ESRI Shapefile" + + def test_closed_driver(self): + self.c.close() + assert self.c.driver is None + + def test_driver_closed_driver(self): + self.c.driver + self.c.close() + assert self.c.driver == "ESRI Shapefile" + + def test_schema(self): + s = self.c.schema['properties'] + assert s['PERIMETER'] == "float:24.15" + assert s['NAME'] == "str:80" + assert s['URL'] == "str:101" + assert s['STATE_FIPS'] == "str:80" + assert s['WILDRNP020'] == "int:10" + + def test_closed_schema(self): + # Schema is lazy too, never computed in this case. TODO? + self.c.close() + assert self.c.schema is None + + def test_schema_closed_schema(self): + self.c.schema + self.c.close() + assert sorted(self.c.schema.keys()) == ['geometry', 'properties'] + + def test_crs(self): + crs = self.c.crs + assert crs['init'] == 'epsg:4326' + + def test_crs_wkt(self): + crs = self.c.crs_wkt + assert re.match(WGS84PATTERN, crs) + + def test_closed_crs(self): + # Crs is lazy too, never computed in this case. TODO? + self.c.close() + assert self.c.crs is None + + def test_crs_closed_crs(self): + self.c.crs + self.c.close() + assert sorted(self.c.crs.keys()) == ['init'] + + def test_meta(self): + assert (sorted(self.c.meta.keys()) == + ['crs', 'crs_wkt', 'driver', 'schema']) + + def test_profile(self): + assert (sorted(self.c.profile.keys()) == + ['crs', 'crs_wkt', 'driver', 'schema']) + + def test_bounds(self): + assert self.c.bounds[0] == pytest.approx(-113.564247) + assert self.c.bounds[1] == pytest.approx(37.068981) + assert self.c.bounds[2] == pytest.approx(-104.970871) + assert self.c.bounds[3] == pytest.approx(41.996277) + + def test_context(self): + with fiona.open(self.path_coutwildrnp_shp, "r") as c: + assert c.name == 'coutwildrnp' + assert len(c) == 67 + assert c.closed + + def test_iter_one(self): + itr = iter(self.c) + f = next(itr) + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' + + def test_iter_list(self): + f = list(self.c)[0] + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' + + def test_re_iter_list(self): + f = list(self.c)[0] # Run through iterator + f = list(self.c)[0] # Run through a new, reset iterator + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' + + def test_getitem_one(self): + f = self.c[0] + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' + + def test_getitem_iter_combo(self): + i = iter(self.c) + f = next(i) + f = next(i) + assert f['id'] == "1" + f = self.c[0] + assert f['id'] == "0" + f = next(i) + assert f['id'] == "2" + + def test_no_write(self): + with pytest.raises(IOError): + self.c.write({}) + + def test_iter_items_list(self): + i, f = list(self.c.items())[0] + assert i == 0 + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' + + def test_iter_keys_list(self): + i = list(self.c.keys())[0] + assert i == 0 + + def test_in_keys(self): + assert 0 in self.c.keys() + assert 0 in self.c diff -Nru fiona-1.7.10/tests/test_collection.py fiona-1.8.6/tests/test_collection.py --- fiona-1.7.10/tests/test_collection.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_collection.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,296 +1,343 @@ # Testing collections and workspaces import datetime -import logging -import os -import shutil import sys -import subprocess -import tempfile -import unittest +import re + +import pytest import fiona from fiona.collection import Collection, supported_drivers +from fiona.env import getenv from fiona.errors import FionaValueError, DriverError +from .conftest import WGS84PATTERN -FIXME_WINDOWS = sys.platform.startswith('win') - -WILDSHP = 'tests/data/coutwildrnp.shp' - -logging.basicConfig(stream=sys.stderr, level=logging.INFO) -TEMPDIR = tempfile.gettempdir() - - -class SupportedDriversTest(unittest.TestCase): +class TestSupportedDrivers(object): def test_shapefile(self): - self.assertTrue("ESRI Shapefile" in supported_drivers) - self.assertEqual( - set(supported_drivers["ESRI Shapefile"]), set("raw")) + assert "ESRI Shapefile" in supported_drivers + assert set(supported_drivers["ESRI Shapefile"]) == set("raw") def test_map(self): - self.assertTrue("MapInfo File" in supported_drivers) - self.assertEqual( - set(supported_drivers["MapInfo File"]), set("raw")) + assert "MapInfo File" in supported_drivers + assert set(supported_drivers["MapInfo File"]) == set("raw") -class CollectionArgsTest(unittest.TestCase): +class TestCollectionArgs(object): def test_path(self): - self.assertRaises(TypeError, Collection, (0)) + with pytest.raises(TypeError): + Collection(0) def test_mode(self): - self.assertRaises(TypeError, Collection, ("foo"), mode=0) + with pytest.raises(TypeError): + Collection("foo", mode=0) def test_driver(self): - self.assertRaises(TypeError, Collection, ("foo"), mode='w', driver=1) + with pytest.raises(TypeError): + Collection("foo", mode='w', driver=1) def test_schema(self): - self.assertRaises( - TypeError, Collection, ("foo"), mode='w', - driver="ESRI Shapefile", schema=1) + with pytest.raises(TypeError): + Collection("foo", mode='w', driver="ESRI Shapefile", schema=1) def test_crs(self): - self.assertRaises( - TypeError, Collection, ("foo"), mode='w', - driver="ESRI Shapefile", schema=0, crs=1) + with pytest.raises(TypeError): + Collection("foo", mode='w', driver="ESRI Shapefile", schema=0, + crs=1) def test_encoding(self): - self.assertRaises( - TypeError, Collection, ("foo"), mode='r', - encoding=1) + with pytest.raises(TypeError): + Collection("foo", mode='r', encoding=1) def test_layer(self): - self.assertRaises( - TypeError, Collection, ("foo"), mode='r', - layer=0.5) + with pytest.raises(TypeError): + Collection("foo", mode='r', layer=0.5) def test_vsi(self): - self.assertRaises( - TypeError, Collection, ("foo"), mode='r', - vsi='git') + with pytest.raises(TypeError): + Collection("foo", mode='r', vsi='git') def test_archive(self): - self.assertRaises( - TypeError, Collection, ("foo"), mode='r', - archive=1) + with pytest.raises(TypeError): + Collection("foo", mode='r', archive=1) def test_write_numeric_layer(self): - self.assertRaises(ValueError, Collection, ("foo"), mode='w', layer=1) + with pytest.raises(ValueError): + Collection("foo", mode='w', layer=1) def test_write_geojson_layer(self): - self.assertRaises(ValueError, Collection, ("foo"), mode='w', driver='GeoJSON', layer='foo') + with pytest.raises(ValueError): + Collection("foo", mode='w', driver='GeoJSON', layer='foo') def test_append_geojson(self): - self.assertRaises(ValueError, Collection, ("foo"), mode='w', driver='ARCGEN') + with pytest.raises(ValueError): + Collection("foo", mode='w', driver='ARCGEN') -class OpenExceptionTest(unittest.TestCase): +class TestOpenException(object): def test_no_archive(self): - self.assertRaises(IOError, fiona.open, ("/"), mode='r', vfs="zip:///foo.zip") - + with pytest.raises(DriverError): + fiona.open("/", mode='r', vfs="zip:///foo.zip") -class ReadingTest(unittest.TestCase): - def setUp(self): - self.c = fiona.open(WILDSHP, "r") +class TestReading(object): + @pytest.fixture(autouse=True) + def shapefile(self, path_coutwildrnp_shp): + self.c = fiona.open(path_coutwildrnp_shp, "r") + yield + self.c.close() + + def test_open_repr(self, path_coutwildrnp_shp): + assert ( + repr(self.c) == + ("".format(hexid=hex(id(self.c)), + path=path_coutwildrnp_shp))) + + def test_closed_repr(self, path_coutwildrnp_shp): + self.c.close() + assert ( + repr(self.c) == + ("".format(hexid=hex(id(self.c)), + path=path_coutwildrnp_shp))) - def tearDown(self): - self.c.close() - - def test_open_repr(self): - self.assertEqual( - repr(self.c), - ("" % hex(id(self.c)))) - - def test_closed_repr(self): - self.c.close() - self.assertEqual( - repr(self.c), - ("" % hex(id(self.c)))) - - def test_path(self): - self.assertEqual(self.c.path, WILDSHP) + def test_path(self, path_coutwildrnp_shp): + assert self.c.path == path_coutwildrnp_shp def test_name(self): - self.assertEqual(self.c.name, 'coutwildrnp') + assert self.c.name == 'coutwildrnp' def test_mode(self): - self.assertEqual(self.c.mode, 'r') + assert self.c.mode == 'r' - def test_collection(self): - self.assertEqual(self.c.encoding, 'iso-8859-1') + def test_encoding(self): + assert self.c.encoding is None def test_iter(self): - self.assertTrue(iter(self.c)) + assert iter(self.c) def test_closed_no_iter(self): self.c.close() - self.assertRaises(ValueError, iter, self.c) + with pytest.raises(ValueError): + iter(self.c) def test_len(self): - self.assertEqual(len(self.c), 67) + assert len(self.c) == 67 def test_closed_len(self): # Len is lazy, it's never computed in this case. TODO? self.c.close() - self.assertEqual(len(self.c), 0) + assert len(self.c) == 0 def test_len_closed_len(self): # Lazy len is computed in this case and sticks. len(self.c) self.c.close() - self.assertEqual(len(self.c), 67) + assert len(self.c) == 67 def test_driver(self): - self.assertEqual(self.c.driver, "ESRI Shapefile") + assert self.c.driver == "ESRI Shapefile" def test_closed_driver(self): self.c.close() - self.assertEqual(self.c.driver, None) + assert self.c.driver is None def test_driver_closed_driver(self): self.c.driver self.c.close() - self.assertEqual(self.c.driver, "ESRI Shapefile") + assert self.c.driver == "ESRI Shapefile" def test_schema(self): s = self.c.schema['properties'] - self.assertEqual(s['PERIMETER'], "float:24.15") - self.assertEqual(s['NAME'], "str:80") - self.assertEqual(s['URL'], "str:101") - self.assertEqual(s['STATE_FIPS'], "str:80") - self.assertEqual(s['WILDRNP020'], "int:10") + assert s['PERIMETER'] == "float:24.15" + assert s['NAME'] == "str:80" + assert s['URL'] == "str:101" + assert s['STATE_FIPS'] == "str:80" + assert s['WILDRNP020'] == "int:10" def test_closed_schema(self): # Schema is lazy too, never computed in this case. TODO? self.c.close() - self.assertEqual(self.c.schema, None) + assert self.c.schema is None def test_schema_closed_schema(self): self.c.schema self.c.close() - self.assertEqual( - sorted(self.c.schema.keys()), - ['geometry', 'properties']) + assert sorted(self.c.schema.keys()) == ['geometry', 'properties'] def test_crs(self): crs = self.c.crs - self.assertEqual(crs['init'], 'epsg:4326') + assert crs['init'] == 'epsg:4326' def test_crs_wkt(self): crs = self.c.crs_wkt - self.assertTrue(crs.startswith('GEOGCS["GCS_WGS_1984"')) + assert re.match(WGS84PATTERN, crs) def test_closed_crs(self): # Crs is lazy too, never computed in this case. TODO? self.c.close() - self.assertEqual(self.c.crs, None) + assert self.c.crs is None def test_crs_closed_crs(self): self.c.crs self.c.close() - self.assertEqual( - sorted(self.c.crs.keys()), - ['init']) + assert sorted(self.c.crs.keys()) == ['init'] def test_meta(self): - self.assertEqual( - sorted(self.c.meta.keys()), - ['crs', 'crs_wkt', 'driver', 'schema']) + assert (sorted(self.c.meta.keys()) == + ['crs', 'crs_wkt', 'driver', 'schema']) def test_profile(self): - self.assertEqual( - sorted(self.c.profile.keys()), - ['crs', 'crs_wkt', 'driver', 'schema']) + assert (sorted(self.c.profile.keys()) == + ['crs', 'crs_wkt', 'driver', 'schema']) def test_bounds(self): - self.assertAlmostEqual(self.c.bounds[0], -113.564247, 6) - self.assertAlmostEqual(self.c.bounds[1], 37.068981, 6) - self.assertAlmostEqual(self.c.bounds[2], -104.970871, 6) - self.assertAlmostEqual(self.c.bounds[3], 41.996277, 6) - - def test_context(self): - with fiona.open(WILDSHP, "r") as c: - self.assertEqual(c.name, 'coutwildrnp') - self.assertEqual(len(c), 67) - self.assertEqual(c.closed, True) + assert self.c.bounds[0] == pytest.approx(-113.564247) + assert self.c.bounds[1] == pytest.approx(37.068981) + assert self.c.bounds[2] == pytest.approx(-104.970871) + assert self.c.bounds[3] == pytest.approx(41.996277) + + def test_context(self, path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp, "r") as c: + assert c.name == 'coutwildrnp' + assert len(c) == 67 + assert c.crs + assert c.closed def test_iter_one(self): itr = iter(self.c) f = next(itr) - self.assertEqual(f['id'], "0") - self.assertEqual(f['properties']['STATE'], 'UT') + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' def test_iter_list(self): f = list(self.c)[0] - self.assertEqual(f['id'], "0") - self.assertEqual(f['properties']['STATE'], 'UT') + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' def test_re_iter_list(self): f = list(self.c)[0] # Run through iterator f = list(self.c)[0] # Run through a new, reset iterator - self.assertEqual(f['id'], "0") - self.assertEqual(f['properties']['STATE'], 'UT') + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' def test_getitem_one(self): f = self.c[0] - self.assertEqual(f['id'], "0") - self.assertEqual(f['properties']['STATE'], 'UT') + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' def test_getitem_iter_combo(self): i = iter(self.c) f = next(i) f = next(i) - self.assertEqual(f['id'], "1") + assert f['id'] == "1" f = self.c[0] - self.assertEqual(f['id'], "0") + assert f['id'] == "0" f = next(i) - self.assertEqual(f['id'], "2") + assert f['id'] == "2" def test_no_write(self): - self.assertRaises(IOError, self.c.write, {}) + with pytest.raises(IOError): + self.c.write({}) def test_iter_items_list(self): i, f = list(self.c.items())[0] - self.assertEqual(i, 0) - self.assertEqual(f['id'], "0") - self.assertEqual(f['properties']['STATE'], 'UT') + assert i == 0 + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' def test_iter_keys_list(self): i = list(self.c.keys())[0] - self.assertEqual(i, 0) + assert i == 0 def test_in_keys(self): - self.assertTrue(0 in self.c.keys()) - self.assertTrue(0 in self.c) - - -class FilterReadingTest(unittest.TestCase): + assert 0 in self.c.keys() + assert 0 in self.c - def setUp(self): - self.c = fiona.open(WILDSHP, "r") - def tearDown(self): +class TestReadingPathTest(object): + def test_open_path(self, path_coutwildrnp_shp): + pathlib = pytest.importorskip("pathlib") + with fiona.open(pathlib.Path(path_coutwildrnp_shp)) as collection: + assert collection.name == 'coutwildrnp' + + +@pytest.mark.usefixtures("unittest_path_coutwildrnp_shp") +class TestIgnoreFieldsAndGeometry(object): + + def test_without_ignore(self): + with fiona.open(self.path_coutwildrnp_shp, "r") as collection: + assert("AREA" in collection.schema["properties"].keys()) + assert("STATE" in collection.schema["properties"].keys()) + assert("NAME" in collection.schema["properties"].keys()) + assert("geometry" in collection.schema.keys()) + + feature = next(iter(collection)) + assert(feature["properties"]["AREA"] is not None) + assert(feature["properties"]["STATE"] is not None) + assert(feature["properties"]["NAME"] is not None) + assert(feature["geometry"] is not None) + + def test_ignore_fields(self): + with fiona.open(self.path_coutwildrnp_shp, "r", ignore_fields=["AREA", "STATE"]) as collection: + assert("AREA" not in collection.schema["properties"].keys()) + assert("STATE" not in collection.schema["properties"].keys()) + assert("NAME" in collection.schema["properties"].keys()) + assert("geometry" in collection.schema.keys()) + + feature = next(iter(collection)) + assert("AREA" not in feature["properties"].keys()) + assert("STATE" not in feature["properties"].keys()) + assert(feature["properties"]["NAME"] is not None) + assert(feature["geometry"] is not None) + + def test_ignore_invalid_field_missing(self): + with fiona.open(self.path_coutwildrnp_shp, "r", ignore_fields=["DOES_NOT_EXIST"]): + pass + + def test_ignore_invalid_field_not_string(self): + with pytest.raises(TypeError): + with fiona.open(self.path_coutwildrnp_shp, "r", ignore_fields=[42]): + pass + + def test_ignore_geometry(self): + with fiona.open(self.path_coutwildrnp_shp, "r", ignore_geometry=True) as collection: + assert("AREA" in collection.schema["properties"].keys()) + assert("STATE" in collection.schema["properties"].keys()) + assert("NAME" in collection.schema["properties"].keys()) + assert("geometry" not in collection.schema.keys()) + + feature = next(iter(collection)) + assert(feature["properties"]["AREA"] is not None) + assert(feature["properties"]["STATE"] is not None) + assert(feature["properties"]["NAME"] is not None) + assert("geometry" not in feature.keys()) + + +class TestFilterReading(object): + @pytest.fixture(autouse=True) + def shapefile(self, path_coutwildrnp_shp): + self.c = fiona.open(path_coutwildrnp_shp, "r") + yield self.c.close() def test_filter_1(self): results = list(self.c.filter(bbox=(-120.0, 30.0, -100.0, 50.0))) - self.assertEqual(len(results), 67) + assert len(results) == 67 f = results[0] - self.assertEqual(f['id'], "0") - self.assertEqual(f['properties']['STATE'], 'UT') + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' def test_filter_reset(self): results = list(self.c.filter(bbox=(-112.0, 38.0, -106.0, 40.0))) - self.assertEqual(len(results), 26) + assert len(results) == 26 results = list(self.c.filter()) - self.assertEqual(len(results), 67) + assert len(results) == 67 def test_filter_mask(self): mask = { @@ -298,53 +345,258 @@ 'coordinates': ( ((-112, 38), (-112, 40), (-106, 40), (-106, 38), (-112, 38)),)} results = list(self.c.filter(mask=mask)) - self.assertEqual(len(results), 26) + assert len(results) == 26 -class UnsupportedDriverTest(unittest.TestCase): +class TestUnsupportedDriver(object): - def test_immediate_fail_driver(self): + def test_immediate_fail_driver(self, tmpdir): schema = { 'geometry': 'Point', 'properties': {'label': 'str', u'verit\xe9': 'int'}} - self.assertRaises( - DriverError, - fiona.open, os.path.join(TEMPDIR, "foo"), "w", "Bogus", schema=schema) - -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test isn't working. There is a codepage issue regarding Windows-1252 and UTF-8. ") -class GenericWritingTest(unittest.TestCase): - - @classmethod - def setUpClass(self): - self.tempdir = tempfile.mkdtemp() + with pytest.raises(DriverError): + fiona.open(str(tmpdir.join("foo")), "w", "Bogus", schema=schema) + + +@pytest.mark.iconv +class TestGenericWritingTest(object): + @pytest.fixture(autouse=True) + def no_iter_shp(self, tmpdir): schema = { 'geometry': 'Point', 'properties': [('label', 'str'), (u'verit\xe9', 'int')]} - self.c = fiona.open(os.path.join(self.tempdir, "test-no-iter.shp"), + self.c = fiona.open(str(tmpdir.join("test-no-iter.shp")), 'w', driver="ESRI Shapefile", schema=schema, encoding='Windows-1252') - - @classmethod - def tearDownClass(self): + yield self.c.close() - shutil.rmtree(self.tempdir) def test_encoding(self): - self.assertEqual(self.c.encoding, 'Windows-1252') + assert self.c.encoding == 'Windows-1252' def test_no_iter(self): - self.assertRaises(IOError, iter, self.c) + with pytest.raises(IOError): + iter(self.c) def test_no_filter(self): - self.assertRaises(IOError, self.c.filter) + with pytest.raises(IOError): + self.c.filter() + + +class TestPropertiesNumberFormatting(object): + @pytest.fixture(autouse=True) + def shapefile(self, tmpdir): + self.filename = str(tmpdir.join("properties_number_formatting_test")) + + _records_with_float_property1 = [ + { + 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, + 'properties': {'property1': 12.22} + }, + { + 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.2)}, + 'properties': {'property1': 12.88} + } + ] + + _records_with_float_property1_as_string = [ + { + 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, + 'properties': {'property1': '12.22'} + }, + { + 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.2)}, + 'properties': {'property1': '12.88'} + } + ] + + _records_with_invalid_number_property1 = [ + { + 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.3)}, + 'properties': {'property1': 'invalid number'} + } + ] + def _write_collection(self, records, schema, driver): + with fiona.open( + self.filename, + "w", + driver=driver, + schema=schema, + crs='epsg:4326', + encoding='utf-8' + ) as c: + c.writerecords(records) + + def test_shape_driver_truncates_float_property_to_requested_int_format(self): + driver = "ESRI Shapefile" + self._write_collection( + self._records_with_float_property1, + {'geometry': 'Point', 'properties': [('property1', 'int')]}, + driver + ) + + with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + assert 2 == len(c) + + rf1, rf2 = list(c) + + assert 12 == rf1['properties']['property1'] + assert 12 == rf2['properties']['property1'] + + def test_shape_driver_rounds_float_property_to_requested_digits_number(self): + driver = "ESRI Shapefile" + self._write_collection( + self._records_with_float_property1, + {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, + driver + ) + + with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + assert 2 == len(c) + + rf1, rf2 = list(c) + + assert 12.2 == rf1['properties']['property1'] + assert 12.9 == rf2['properties']['property1'] + + def test_string_is_converted_to_number_and_truncated_to_requested_int_by_shape_driver(self): + driver = "ESRI Shapefile" + self._write_collection( + self._records_with_float_property1_as_string, + {'geometry': 'Point', 'properties': [('property1', 'int')]}, + driver + ) -class PointWritingTest(unittest.TestCase): + with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + assert 2 == len(c) + + rf1, rf2 = list(c) + + assert 12 == rf1['properties']['property1'] + assert 12 == rf2['properties']['property1'] + + def test_string_is_converted_to_number_and_rounded_to_requested_digits_number_by_shape_driver(self): + driver = "ESRI Shapefile" + self._write_collection( + self._records_with_float_property1_as_string, + {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, + driver + ) + + with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + assert 2 == len(c) + + rf1, rf2 = list(c) + + assert 12.2 == rf1['properties']['property1'] + assert 12.9 == rf2['properties']['property1'] + + def test_invalid_number_is_converted_to_0_and_written_by_shape_driver(self): + driver = "ESRI Shapefile" + self._write_collection( + self._records_with_invalid_number_property1, + # {'geometry': 'Point', 'properties': [('property1', 'int')]}, + {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, + driver + ) + + with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + assert 1 == len(c) + + rf1 = c[0] + + assert 0 == rf1['properties']['property1'] + + def test_geojson_driver_truncates_float_property_to_requested_int_format(self): + driver = "GeoJSON" + self._write_collection( + self._records_with_float_property1, + {'geometry': 'Point', 'properties': [('property1', 'int')]}, + driver + ) + + with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + assert 2 == len(c) + + rf1, rf2 = list(c) - def setUp(self): - self.tempdir = tempfile.mkdtemp() - self.filename = os.path.join(self.tempdir, "point_writing_test.shp") + assert 12 == rf1['properties']['property1'] + assert 12 == rf2['properties']['property1'] + + def test_geojson_driver_does_not_round_float_property_to_requested_digits_number(self): + driver = "GeoJSON" + self._write_collection( + self._records_with_float_property1, + {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, + driver + ) + + with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + assert 2 == len(c) + + rf1, rf2 = list(c) + + # **************************************** + # FLOAT FORMATTING IS NOT RESPECTED... + assert 12.22 == rf1['properties']['property1'] + assert 12.88 == rf2['properties']['property1'] + + def test_string_is_converted_to_number_and_truncated_to_requested_int_by_geojson_driver(self): + driver = "GeoJSON" + self._write_collection( + self._records_with_float_property1_as_string, + {'geometry': 'Point', 'properties': [('property1', 'int')]}, + driver + ) + + with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + assert 2 == len(c) + + rf1, rf2 = list(c) + + assert 12 == rf1['properties']['property1'] + assert 12 == rf2['properties']['property1'] + + def test_string_is_converted_to_number_but_not_rounded_to_requested_digits_number_by_geojson_driver(self): + driver = "GeoJSON" + self._write_collection( + self._records_with_float_property1_as_string, + {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, + driver + ) + + with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + assert 2 == len(c) + + rf1, rf2 = list(c) + + # **************************************** + # FLOAT FORMATTING IS NOT RESPECTED... + assert 12.22 == rf1['properties']['property1'] + assert 12.88 == rf2['properties']['property1'] + + def test_invalid_number_is_converted_to_0_and_written_by_geojson_driver(self): + driver = "GeoJSON" + self._write_collection( + self._records_with_invalid_number_property1, + # {'geometry': 'Point', 'properties': [('property1', 'int')]}, + {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, + driver + ) + + with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + assert 1 == len(c) + + rf1 = c[0] + + assert 0 == rf1['properties']['property1'] + + +class TestPointWriting(object): + @pytest.fixture(autouse=True) + def shapefile(self, tmpdir): + self.filename = str(tmpdir.join("point_writing_test.shp")) self.sink = fiona.open( self.filename, "w", @@ -354,36 +606,29 @@ 'properties': [('title', 'str'), ('date', 'date')]}, crs='epsg:4326', encoding='utf-8') - - def tearDown(self): + yield self.sink.close() - shutil.rmtree(self.tempdir) - def test_cpg(self): + def test_cpg(self, tmpdir): """Requires GDAL 1.9""" self.sink.close() - self.assertTrue(open(os.path.join( - self.tempdir, "point_writing_test.cpg")).readline() == 'UTF-8') + encoding = tmpdir.join("point_writing_test.cpg").read() + assert encoding == "UTF-8" def test_write_one(self): - self.assertEqual(len(self.sink), 0) - self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0)) + assert len(self.sink) == 0 + assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) f = { 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, 'properties': {'title': 'point one', 'date': "2012-01-29"}} self.sink.writerecords([f]) - self.assertEqual(len(self.sink), 1) - self.assertEqual(self.sink.bounds, (0.0, 0.1, 0.0, 0.1)) + assert len(self.sink) == 1 + assert self.sink.bounds == (0.0, 0.1, 0.0, 0.1) self.sink.close() - info = subprocess.check_output( - ["ogrinfo", self.filename, "point_writing_test"]) - self.assertTrue( - 'date (Date) = 2012/01/29' in info.decode('utf-8'), - info) def test_write_two(self): - self.assertEqual(len(self.sink), 0) - self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0)) + assert len(self.sink) == 0 + assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) f1 = { 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, 'properties': {'title': 'point one', 'date': "2012-01-29"}} @@ -391,18 +636,18 @@ 'geometry': {'type': 'Point', 'coordinates': (0.0, -0.1)}, 'properties': {'title': 'point two', 'date': "2012-01-29"}} self.sink.writerecords([f1, f2]) - self.assertEqual(len(self.sink), 2) - self.assertEqual(self.sink.bounds, (0.0, -0.1, 0.0, 0.1)) + assert len(self.sink) == 2 + assert self.sink.bounds == (0.0, -0.1, 0.0, 0.1) def test_write_one_null_geom(self): - self.assertEqual(len(self.sink), 0) - self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0)) + assert len(self.sink) == 0 + assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) f = { 'geometry': None, 'properties': {'title': 'point one', 'date': "2012-01-29"}} self.sink.writerecords([f]) - self.assertEqual(len(self.sink), 1) - self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0)) + assert len(self.sink) == 1 + assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) def test_validate_record(self): fvalid = { @@ -411,41 +656,38 @@ finvalid = { 'geometry': {'type': 'Point', 'coordinates': (0.0, -0.1)}, 'properties': {'not-a-title': 'point two', 'date': "2012-01-29"}} - self.assertTrue(self.sink.validate_record(fvalid)) - self.assertFalse(self.sink.validate_record(finvalid)) + assert self.sink.validate_record(fvalid) + assert not self.sink.validate_record(finvalid) -class LineWritingTest(unittest.TestCase): - - def setUp(self): - self.tempdir = tempfile.mkdtemp() +class TestLineWriting(object): + @pytest.fixture(autouse=True) + def shapefile(self, tmpdir): self.sink = fiona.open( - os.path.join(self.tempdir, "line_writing_test.shp"), + str(tmpdir.join("line_writing_test.shp")), "w", driver="ESRI Shapefile", schema={ 'geometry': 'LineString', 'properties': [('title', 'str'), ('date', 'date')]}, crs={'init': "epsg:4326", 'no_defs': True}) - - def tearDown(self): + yield self.sink.close() - shutil.rmtree(self.tempdir) def test_write_one(self): - self.assertEqual(len(self.sink), 0) - self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0)) + assert len(self.sink) == 0 + assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) f = { 'geometry': {'type': 'LineString', 'coordinates': [(0.0, 0.1), (0.0, 0.2)]}, 'properties': {'title': 'line one', 'date': "2012-01-29"}} self.sink.writerecords([f]) - self.assertEqual(len(self.sink), 1) - self.assertEqual(self.sink.bounds, (0.0, 0.1, 0.0, 0.2)) + assert len(self.sink) == 1 + assert self.sink.bounds == (0.0, 0.1, 0.0, 0.2) def test_write_two(self): - self.assertEqual(len(self.sink), 0) - self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0)) + assert len(self.sink) == 0 + assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) f1 = { 'geometry': {'type': 'LineString', 'coordinates': [(0.0, 0.1), (0.0, 0.2)]}, @@ -456,18 +698,18 @@ [(0.0, -0.1), (0.0, -0.2)]]}, 'properties': {'title': 'line two', 'date': "2012-01-29"}} self.sink.writerecords([f1, f2]) - self.assertEqual(len(self.sink), 2) - self.assertEqual(self.sink.bounds, (0.0, -0.2, 0.0, 0.2)) + assert len(self.sink) == 2 + assert self.sink.bounds == (0.0, -0.2, 0.0, 0.2) -class PointAppendTest(unittest.TestCase): - def setUp(self): - self.tempdir = tempfile.mkdtemp() - with fiona.open(WILDSHP, "r") as input: - output_schema = input.schema.copy() +class TestPointAppend(object): + @pytest.fixture(autouse=True) + def shapefile(self, tmpdir, path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp, "r") as input: + output_schema = input.schema output_schema['geometry'] = '3D Point' with fiona.open( - os.path.join(self.tempdir, "test_append_point.shp"), + str(tmpdir.join("test_append_point.shp")), 'w', crs=None, driver="ESRI Shapefile", schema=output_schema) as output: for f in input: @@ -476,12 +718,9 @@ 'coordinates': f['geometry']['coordinates'][0][0]} output.write(f) - def tearDown(self): - shutil.rmtree(self.tempdir) - - def test_append_point(self): - with fiona.open(os.path.join(self.tempdir, "test_append_point.shp"), "a") as c: - self.assertEqual(c.schema['geometry'], 'Point') + def test_append_point(self, tmpdir): + with fiona.open(str(tmpdir.join("test_append_point.shp")), "a") as c: + assert c.schema['geometry'] == '3D Point' c.write({'geometry': {'type': 'Point', 'coordinates': (0.0, 45.0)}, 'properties': {'PERIMETER': 1.0, 'FEATURE2': None, @@ -493,15 +732,14 @@ 'STATE_FIPS': 1, 'WILDRNP020': 1, 'STATE': 'XL'}}) - self.assertEqual(len(c), 68) + assert len(c) == 68 -class LineAppendTest(unittest.TestCase): - - def setUp(self): - self.tempdir = tempfile.mkdtemp() +class TestLineAppend(object): + @pytest.fixture(autouse=True) + def shapefile(self, tmpdir): with fiona.open( - os.path.join(self.tempdir, "test_append_line.shp"), + str(tmpdir.join("test_append_line.shp")), "w", driver="ESRI Shapefile", schema={ @@ -513,12 +751,9 @@ 'properties': {'title': 'line one', 'date': "2012-01-29"}} output.writerecords([f]) - def tearDown(self): - shutil.rmtree(self.tempdir) - - def test_append_line(self): - with fiona.open(os.path.join(self.tempdir, "test_append_line.shp"), "a") as c: - self.assertEqual(c.schema['geometry'], 'LineString') + def test_append_line(self, tmpdir): + with fiona.open(str(tmpdir.join("test_append_line.shp")), "a") as c: + assert c.schema['geometry'] == 'LineString' f1 = { 'geometry': {'type': 'LineString', 'coordinates': [(0.0, 0.1), (0.0, 0.2)]}, @@ -529,110 +764,138 @@ [(0.0, -0.1), (0.0, -0.2)]]}, 'properties': {'title': 'line two', 'date': "2012-01-29"}} c.writerecords([f1, f2]) - self.assertEqual(len(c), 3) - self.assertEqual(c.bounds, (0.0, -0.2, 0.0, 0.2)) - + assert len(c) == 3 + assert c.bounds == (0.0, -0.2, 0.0, 0.2) -class ShapefileFieldWidthTest(unittest.TestCase): - def test_text(self): - self.tempdir = tempfile.mkdtemp() - with fiona.open( - os.path.join(self.tempdir, "textfield.shp"), 'w', - schema={'geometry': 'Point', 'properties': {'text': 'str:254'}}, - driver="ESRI Shapefile") as c: - c.write( - {'geometry': {'type': 'Point', 'coordinates': (0.0, 45.0)}, - 'properties': {'text': 'a' * 254}}) - c = fiona.open(os.path.join(self.tempdir, "textfield.shp"), "r") - self.assertEqual(c.schema['properties']['text'], 'str:254') - f = next(iter(c)) - self.assertEqual(f['properties']['text'], 'a' * 254) - c.close() - - def tearDown(self): - shutil.rmtree(self.tempdir) - - -class CollectionTest(unittest.TestCase): - - def test_invalid_mode(self): - self.assertRaises(ValueError, fiona.open, os.path.join(TEMPDIR, "bogus.shp"), "r+") - - def test_w_args(self): - self.assertRaises(FionaValueError, fiona.open, os.path.join(TEMPDIR, "test-no-iter.shp"), "w") - self.assertRaises( - FionaValueError, fiona.open, os.path.join(TEMPDIR, "test-no-iter.shp"), "w", "Driver") +def test_shapefile_field_width(tmpdir): + name = str(tmpdir.join('textfield.shp')) + with fiona.open( + name, 'w', + schema={'geometry': 'Point', 'properties': {'text': 'str:254'}}, + driver="ESRI Shapefile") as c: + c.write( + {'geometry': {'type': 'Point', 'coordinates': (0.0, 45.0)}, + 'properties': {'text': 'a' * 254}}) + c = fiona.open(name, "r") + assert c.schema['properties']['text'] == 'str:254' + f = next(iter(c)) + assert f['properties']['text'] == 'a' * 254 + c.close() + + +class TestCollection(object): + + def test_invalid_mode(self, tmpdir): + with pytest.raises(ValueError): + fiona.open(str(tmpdir.join("bogus.shp")), "r+") + + def test_w_args(self, tmpdir): + with pytest.raises(FionaValueError): + fiona.open(str(tmpdir.join("test-no-iter.shp")), "w") + with pytest.raises(FionaValueError): + fiona.open(str(tmpdir.join("test-no-iter.shp")), "w", "Driver") def test_no_path(self): - self.assertRaises(IOError, fiona.open, "no-path.shp", "a") + with pytest.raises(Exception): + fiona.open("no-path.shp", "a") def test_no_read_conn_str(self): - self.assertRaises(IOError, fiona.open, "PG:dbname=databasename", "r") + with pytest.raises(DriverError): + fiona.open("PG:dbname=databasename", "r") - @unittest.skipIf(sys.platform.startswith("win"), + @pytest.mark.skipif(sys.platform.startswith("win"), reason="test only for *nix based system") def test_no_read_directory(self): - self.assertRaises(ValueError, fiona.open, "/dev/null", "r") - + with pytest.raises(DriverError): + fiona.open("/dev/null", "r") -class GeoJSONCRSWritingTest(unittest.TestCase): - def setUp(self): - self.tempdir = tempfile.mkdtemp() - self.filename = os.path.join(self.tempdir, "crs_writing_test.json") - self.sink = fiona.open( - self.filename, - "w", - driver="GeoJSON", - schema={ - 'geometry': 'Point', - 'properties': [('title', 'str'), ('date', 'date')]}, - crs={'a': 6370997, 'lon_0': -100, 'y_0': 0, 'no_defs': True, 'proj': 'laea', 'x_0': 0, 'units': 'm', 'b': 6370997, 'lat_0': 45}) - - - def tearDown(self): - self.sink.close() - shutil.rmtree(self.tempdir) - - -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Test raises PermissionError. Please look into why this test isn't working.") -class DateTimeTest(unittest.TestCase): - - def setUp(self): - self.tempdir = tempfile.mkdtemp() - - def test_date(self): - self.sink = fiona.open( - os.path.join(self.tempdir, "date_test.shp"), - "w", - driver="ESRI Shapefile", - schema={ - 'geometry': 'Point', - 'properties': [('id', 'int'), ('date', 'date')]}, - crs={'init': "epsg:4326", 'no_defs': True}) - - recs = [{ - 'geometry': {'type': 'Point', - 'coordinates': (7.0, 50.0)}, - 'properties': {'id': 1, 'date': '2013-02-25'} - }, { - 'geometry': {'type': 'Point', - 'coordinates': (7.0, 50.2)}, - 'properties': {'id': 1, 'date': datetime.date(2014, 2, 3)} - }] - self.sink.writerecords(recs) - self.sink.close() - self.assertEqual(len(self.sink), 2) - - with fiona.open(os.path.join(self.tempdir, "date_test.shp"), "r") as c: - self.assertEqual(len(c), 2) - - rf1, rf2 = list(c) - self.assertEqual(rf1['properties']['date'], '2013-02-25') - self.assertEqual(rf2['properties']['date'], '2014-02-03') - +def test_date(tmpdir): + name = str(tmpdir.join("date_test.shp")) + sink = fiona.open( + name, "w", + driver="ESRI Shapefile", + schema={ + 'geometry': 'Point', + 'properties': [('id', 'int'), ('date', 'date')]}, + crs={'init': "epsg:4326", 'no_defs': True}) - def tearDown(self): - shutil.rmtree(self.tempdir) + recs = [{ + 'geometry': {'type': 'Point', + 'coordinates': (7.0, 50.0)}, + 'properties': {'id': 1, 'date': '2013-02-25'} + }, { + 'geometry': {'type': 'Point', + 'coordinates': (7.0, 50.2)}, + 'properties': {'id': 1, 'date': datetime.date(2014, 2, 3)} + }] + sink.writerecords(recs) + sink.close() + assert len(sink) == 2 + + with fiona.open(name, "r") as c: + assert len(c) == 2 + + rf1, rf2 = list(c) + assert rf1['properties']['date'] == '2013-02-25' + assert rf2['properties']['date'] == '2014-02-03' + + +def test_open_kwargs(tmpdir, path_coutwildrnp_shp): + dstfile = str(tmpdir.join('test.json')) + with fiona.open(path_coutwildrnp_shp) as src: + kwds = src.profile + kwds['driver'] = 'GeoJSON' + kwds['coordinate_precision'] = 2 + with fiona.open(dstfile, 'w', **kwds) as dst: + dst.writerecords(ftr for ftr in src) + + with open(dstfile) as f: + assert '"coordinates": [ [ [ -111.74, 42.0 ], [ -111.66, 42.0 ]' in \ + f.read(2000) + + +@pytest.mark.network +def test_collection_http(): + ds = fiona.Collection('http://raw.githubusercontent.com/OSGeo/gdal/master/autotest/ogr/data/poly.shp', vsi='http') + assert ds.path == '/vsicurl/http://raw.githubusercontent.com/OSGeo/gdal/master/autotest/ogr/data/poly.shp' + assert len(ds) == 10 + + +@pytest.mark.network +def test_collection_zip_http(): + ds = fiona.Collection('http://raw.githubusercontent.com/OSGeo/gdal/master/autotest/ogr/data/poly.zip', vsi='zip+http') + assert ds.path == '/vsizip/vsicurl/http://raw.githubusercontent.com/OSGeo/gdal/master/autotest/ogr/data/poly.zip' + assert len(ds) == 10 + + +def test_encoding_option_warning(tmpdir, caplog): + """There is no ENCODING creation option log warning for GeoJSON""" + fiona.Collection(str(tmpdir.join("test.geojson")), "w", driver="GeoJSON", crs="epsg:4326", + schema={"geometry": "Point", "properties": {"foo": "int"}}) + assert not caplog.text + + +def test_closed_session_next(path_coutwildrnp_shp): + """Confirm fix for issue #687""" + src = fiona.open(path_coutwildrnp_shp) + itr = iter(src) + list(itr) + src.close() + with pytest.raises(FionaValueError): + next(itr) + + +def test_collection_no_env(path_coutwildrnp_shp): + """We have no GDAL env left over from open""" + collection = fiona.open(path_coutwildrnp_shp) + assert collection + with pytest.raises(Exception): + getenv() + + +def test_collection_env(path_coutwildrnp_shp): + """We have a GDAL env within collection context""" + with fiona.open(path_coutwildrnp_shp): + assert 'FIONA_ENV' in getenv() diff -Nru fiona-1.7.10/tests/test_compound_crs.py fiona-1.8.6/tests/test_compound_crs.py --- fiona-1.7.10/tests/test_compound_crs.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_compound_crs.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,11 @@ +"""Test of compound CRS crash avoidance""" + +import fiona + + +def test_compound_crs(data): + """Don't crash""" + prj = data.join("coutwildrnp.prj") + prj.write("""COMPD_CS["unknown",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]],VERT_CS["unknown",VERT_DATUM["unknown",2005],UNIT["metre",1.0,AUTHORITY["EPSG","9001"]],AXIS["Up",UP]]]""") + with fiona.open(str(data.join("coutwildrnp.shp"))) as collection: + assert collection.crs == {} diff -Nru fiona-1.7.10/tests/test_curve_geometries.py fiona-1.8.6/tests/test_curve_geometries.py --- fiona-1.7.10/tests/test_curve_geometries.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_curve_geometries.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,17 @@ +"""Tests of features related to GDAL RFC 49 + +See https://trac.osgeo.org/gdal/wiki/rfc49_curve_geometries. +""" + +import fiona + +from .conftest import requires_gdal2 + + +@requires_gdal2 +def test_line_curve_conversion(path_curves_line_csv): + """Convert curved geometries to linear approximations""" + with fiona.open(path_curves_line_csv) as col: + assert col.schema['geometry'] == 'Unknown' + features = list(col) + assert len(features) == 9 diff -Nru fiona-1.7.10/tests/test_data_paths.py fiona-1.8.6/tests/test_data_paths.py --- fiona-1.7.10/tests/test_data_paths.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_data_paths.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,54 @@ +"""Tests of GDAL and PROJ data finding""" + +import os.path + +from click.testing import CliRunner +import pytest + +import fiona +from fiona._env import GDALDataFinder, PROJDataFinder +from fiona.fio.main import main_group + + +@pytest.mark.wheel +def test_gdal_data_wheel(): + """Get GDAL data path from a wheel""" + assert GDALDataFinder().search() == os.path.join(os.path.dirname(fiona.__file__), 'gdal_data') + + +@pytest.mark.wheel +def test_proj_data_wheel(): + """Get GDAL data path from a wheel""" + assert PROJDataFinder().search() == os.path.join(os.path.dirname(fiona.__file__), 'proj_data') + + +@pytest.mark.wheel +def test_env_gdal_data_wheel(): + runner = CliRunner() + result = runner.invoke(main_group, ['env', '--gdal-data']) + assert result.exit_code == 0 + assert result.output.strip() == os.path.join(os.path.dirname(fiona.__file__), 'gdal_data') + + +@pytest.mark.wheel +def test_env_proj_data_wheel(): + runner = CliRunner() + result = runner.invoke(main_group, ['env', '--proj-data']) + assert result.exit_code == 0 + assert result.output.strip() == os.path.join(os.path.dirname(fiona.__file__), 'proj_data') + + +def test_env_gdal_data_environ(monkeypatch): + monkeypatch.setenv('GDAL_DATA', '/foo/bar') + runner = CliRunner() + result = runner.invoke(main_group, ['env', '--gdal-data']) + assert result.exit_code == 0 + assert result.output.strip() == '/foo/bar' + + +def test_env_proj_data_environ(monkeypatch): + monkeypatch.setenv('PROJ_LIB', '/foo/bar') + runner = CliRunner() + result = runner.invoke(main_group, ['env', '--proj-data']) + assert result.exit_code == 0 + assert result.output.strip() == '/foo/bar' diff -Nru fiona-1.7.10/tests/test_datetime.py fiona-1.8.6/tests/test_datetime.py --- fiona-1.7.10/tests/test_datetime.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_datetime.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,268 @@ +""" +See also test_rfc3339.py for datetime parser tests. +""" + +import fiona +import pytest +import tempfile, shutil +import os +from fiona.errors import DriverSupportError +from .conftest import requires_gpkg + +GDAL_MAJOR_VER = fiona.get_gdal_version_num() // 1000000 + +GEOMETRY_TYPE = "Point" +GEOMETRY_EXAMPLE = {"type": "Point", "coordinates": [1, 2]} + +DRIVER_FILENAME = { + "ESRI Shapefile": "test.shp", + "GPKG": "test.gpkg", + "GeoJSON": "test.geojson", + "MapInfo File": "test.tab", +} + +DATE_EXAMPLE = "2018-03-25" +DATETIME_EXAMPLE = "2018-03-25T22:49:05" +TIME_EXAMPLE = "22:49:05" + +class TestDateFieldSupport: + def write_data(self, driver): + filename = DRIVER_FILENAME[driver] + temp_dir = tempfile.mkdtemp() + path = os.path.join(temp_dir, filename) + schema = { + "geometry": GEOMETRY_TYPE, + "properties": { + "date": "date", + } + } + records = [ + { + "geometry": GEOMETRY_EXAMPLE, + "properties": { + "date": DATE_EXAMPLE, + } + }, + { + "geometry": GEOMETRY_EXAMPLE, + "properties": { + "date": None, + } + }, + ] + with fiona.Env(), fiona.open(path, "w", driver=driver, schema=schema) as collection: + collection.writerecords(records) + + with fiona.Env(), fiona.open(path, "r") as collection: + schema = collection.schema + features = list(collection) + + shutil.rmtree(temp_dir) + + return schema, features + + def test_shapefile(self): + driver = "ESRI Shapefile" + schema, features = self.write_data(driver) + + assert schema["properties"]["date"] == "date" + assert features[0]["properties"]["date"] == DATE_EXAMPLE + assert features[1]["properties"]["date"] is None + + @requires_gpkg + def test_gpkg(self): + driver = "GPKG" + schema, features = self.write_data(driver) + + assert schema["properties"]["date"] == "date" + assert features[0]["properties"]["date"] == DATE_EXAMPLE + assert features[1]["properties"]["date"] is None + + def test_geojson(self): + # GDAL 1: date field silently converted to string + # GDAL 1: date string format uses / instead of - + driver = "GeoJSON" + schema, features = self.write_data(driver) + + if GDAL_MAJOR_VER >= 2: + assert schema["properties"]["date"] == "date" + assert features[0]["properties"]["date"] == DATE_EXAMPLE + else: + assert schema["properties"]["date"] == "str" + assert features[0]["properties"]["date"] == "2018/03/25" + assert features[1]["properties"]["date"] is None + + def test_mapinfo(self): + driver = "MapInfo File" + schema, features = self.write_data(driver) + + assert schema["properties"]["date"] == "date" + assert features[0]["properties"]["date"] == DATE_EXAMPLE + assert features[1]["properties"]["date"] is None + + +class TestDatetimeFieldSupport: + def write_data(self, driver): + filename = DRIVER_FILENAME[driver] + temp_dir = tempfile.mkdtemp() + path = os.path.join(temp_dir, filename) + schema = { + "geometry": GEOMETRY_TYPE, + "properties": { + "datetime": "datetime", + } + } + records = [ + { + "geometry": GEOMETRY_EXAMPLE, + "properties": { + "datetime": DATETIME_EXAMPLE, + } + }, + { + "geometry": GEOMETRY_EXAMPLE, + "properties": { + "datetime": None, + } + }, + ] + with fiona.Env(), fiona.open(path, "w", driver=driver, schema=schema) as collection: + collection.writerecords(records) + + with fiona.Env(), fiona.open(path, "r") as collection: + schema = collection.schema + features = list(collection) + + shutil.rmtree(temp_dir) + + return schema, features + + def test_shapefile(self): + # datetime is silently converted to date + driver = "ESRI Shapefile" + + with pytest.raises(DriverSupportError): + schema, features = self.write_data(driver) + + # assert schema["properties"]["datetime"] == "date" + # assert features[0]["properties"]["datetime"] == "2018-03-25" + # assert features[1]["properties"]["datetime"] is None + + @requires_gpkg + def test_gpkg(self): + # GDAL 1: datetime silently downgraded to date + driver = "GPKG" + + if GDAL_MAJOR_VER >= 2: + schema, features = self.write_data(driver) + assert schema["properties"]["datetime"] == "datetime" + assert features[0]["properties"]["datetime"] == DATETIME_EXAMPLE + assert features[1]["properties"]["datetime"] is None + else: + with pytest.raises(DriverSupportError): + schema, features = self.write_data(driver) + + def test_geojson(self): + # GDAL 1: datetime silently converted to string + # GDAL 1: date string format uses / instead of - + driver = "GeoJSON" + schema, features = self.write_data(driver) + + if GDAL_MAJOR_VER >= 2: + assert schema["properties"]["datetime"] == "datetime" + assert features[0]["properties"]["datetime"] == DATETIME_EXAMPLE + else: + assert schema["properties"]["datetime"] == "str" + assert features[0]["properties"]["datetime"] == "2018/03/25 22:49:05" + assert features[1]["properties"]["datetime"] is None + + def test_mapinfo(self): + driver = "MapInfo File" + schema, features = self.write_data(driver) + + assert schema["properties"]["datetime"] == "datetime" + assert features[0]["properties"]["datetime"] == DATETIME_EXAMPLE + assert features[1]["properties"]["datetime"] is None + + +class TestTimeFieldSupport: + def write_data(self, driver): + filename = DRIVER_FILENAME[driver] + temp_dir = tempfile.mkdtemp() + path = os.path.join(temp_dir, filename) + schema = { + "geometry": GEOMETRY_TYPE, + "properties": { + "time": "time", + } + } + records = [ + { + "geometry": GEOMETRY_EXAMPLE, + "properties": { + "time": TIME_EXAMPLE, + } + }, + { + "geometry": GEOMETRY_EXAMPLE, + "properties": { + "time": None, + } + }, + ] + with fiona.Env(), fiona.open(path, "w", driver=driver, schema=schema) as collection: + collection.writerecords(records) + + with fiona.Env(), fiona.open(path, "r") as collection: + schema = collection.schema + features = list(collection) + + shutil.rmtree(temp_dir) + + return schema, features + + def test_shapefile(self): + # no support for time fields + driver = "ESRI Shapefile" + with pytest.raises(DriverSupportError): + self.write_data(driver) + + @requires_gpkg + def test_gpkg(self): + # GDAL 2: time field is silently converted to string + # GDAL 1: time field dropped completely + driver = "GPKG" + + with pytest.raises(DriverSupportError): + schema, features = self.write_data(driver) + + # if GDAL_MAJOR_VER >= 2: + # assert schema["properties"]["time"] == "str" + # assert features[0]["properties"]["time"] == TIME_EXAMPLE + # assert features[1]["properties"]["time"] is None + # else: + # assert "time" not in schema["properties"] + + def test_geojson(self): + # GDAL 1: time field silently converted to string + driver = "GeoJSON" + schema, features = self.write_data(driver) + + if GDAL_MAJOR_VER >= 2: + assert schema["properties"]["time"] == "time" + else: + assert schema["properties"]["time"] == "str" + assert features[0]["properties"]["time"] == TIME_EXAMPLE + assert features[1]["properties"]["time"] is None + + def test_mapinfo(self): + # GDAL 2: null time is converted to 00:00:00 (regression?) + driver = "MapInfo File" + schema, features = self.write_data(driver) + + assert schema["properties"]["time"] == "time" + assert features[0]["properties"]["time"] == TIME_EXAMPLE + if GDAL_MAJOR_VER >= 2: + assert features[1]["properties"]["time"] == "00:00:00" + else: + assert features[1]["properties"]["time"] is None diff -Nru fiona-1.7.10/tests/test_drivers.py fiona-1.8.6/tests/test_drivers.py --- fiona-1.7.10/tests/test_drivers.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_drivers.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,37 +1,31 @@ -import logging -import os.path -import shutil -import sys -import tempfile -import unittest +"""Tests for Fiona's OGR driver interface.""" -import fiona +import logging + +import pytest -logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) +import fiona +from fiona.errors import FionaDeprecationWarning -FIXME_WINDOWS = sys.platform.startswith('win') -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Raises PermissionError Please look into why this test isn't working.") -def test_options(tmpdir=None): - """Test that setting CPL_DEBUG=ON works""" - if tmpdir is None: - tempdir = tempfile.mkdtemp() - logfile = os.path.join(tempdir, 'example.log') - else: - logfile = str(tmpdir.join('example.log')) - logger = logging.getLogger('Fiona') +def test_options(tmpdir, path_coutwildrnp_shp): + """Test that setting CPL_DEBUG=ON works and that a warning is raised.""" + logfile = str(tmpdir.mkdir('tests').join('test_options.log')) + logger = logging.getLogger() logger.setLevel(logging.DEBUG) fh = logging.FileHandler(logfile) fh.setLevel(logging.DEBUG) logger.addHandler(fh) - with fiona.drivers(CPL_DEBUG=True): - c = fiona.open("tests/data/coutwildrnp.shp") - c.close() - log = open(logfile).read() - assert "Option CPL_DEBUG" in log - - if tempdir and tmpdir is None: - shutil.rmtree(tempdir) + # fiona.drivers() will be deprecated. + with pytest.warns(FionaDeprecationWarning): + with fiona.drivers(CPL_DEBUG=True): + c = fiona.open(path_coutwildrnp_shp) + c.close() + with open(logfile, "r") as f: + log = f.read() + if fiona.gdal_version.major >= 2: + assert "GDALOpen" in log + else: + assert "OGROpen" in log diff -Nru fiona-1.7.10/tests/test_drvsupport.py fiona-1.8.6/tests/test_drvsupport.py --- fiona-1.7.10/tests/test_drvsupport.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_drvsupport.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,14 @@ +"""Tests of driver support""" + +import pytest + +from .conftest import requires_gdal24 + +import fiona.drvsupport + + +@requires_gdal24 +@pytest.mark.parametrize('format', ['GeoJSON', 'ESRIJSON', 'TopoJSON', 'GeoJSONSeq']) +def test_geojsonseq(format): + """Format is available""" + assert format in fiona.drvsupport.supported_drivers.keys() diff -Nru fiona-1.7.10/tests/test_encoding.py fiona-1.8.6/tests/test_encoding.py --- fiona-1.7.10/tests/test_encoding.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_encoding.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,53 @@ +# coding=utf-8 +"""Encoding tests""" + +from glob import glob +import os +import shutil + +import pytest + +import fiona + +from .conftest import requires_gdal2 + + +@pytest.fixture(scope='function') +def gre_shp_cp1252(tmpdir): + """A tempdir containing copies of gre.* files, .cpg set to cp1252 + + The shapefile attributes are in fact utf-8 encoded. + """ + test_files = glob(os.path.join(os.path.dirname(__file__), 'data/gre.*')) + tmpdir = tmpdir.mkdir('data') + for filename in test_files: + shutil.copy(filename, str(tmpdir)) + tmpdir.join('gre.cpg').write('CP1252') + yield tmpdir.join('gre.shp') + + +@requires_gdal2 +def test_broken_encoding(gre_shp_cp1252): + """Reading as cp1252 mis-encodes a Russian name""" + with fiona.open(str(gre_shp_cp1252)) as src: + assert src.session._get_internal_encoding() == 'utf-8' + feat = next(iter(src)) + assert feat['properties']['name_ru'] != u'Гренада' + + +@requires_gdal2 +def test_cpg_encoding(gre_shp_cp1252): + """Reads a Russian name""" + gre_shp_cp1252.join('../gre.cpg').write('UTF-8') + with fiona.open(str(gre_shp_cp1252)) as src: + assert src.session._get_internal_encoding() == 'utf-8' + feat = next(iter(src)) + assert feat['properties']['name_ru'] == u'Гренада' + + +@requires_gdal2 +def test_override_encoding(gre_shp_cp1252): + """utf-8 override succeeds""" + with fiona.open(str(gre_shp_cp1252), encoding='utf-8') as src: + assert src.session._get_internal_encoding() == 'utf-8' + assert next(iter(src))['properties']['name_ru'] == u'Гренада' diff -Nru fiona-1.7.10/tests/test__env.py fiona-1.8.6/tests/test__env.py --- fiona-1.7.10/tests/test__env.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test__env.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,129 @@ +"""Tests of _env util module""" + +import pytest +try: + from unittest import mock +except ImportError: + import mock + +from fiona._env import GDALDataFinder, PROJDataFinder + +from .conftest import gdal_version + + +@pytest.fixture +def mock_wheel(tmpdir): + """A fake rasterio wheel""" + moduledir = tmpdir.mkdir("rasterio") + moduledir.ensure("__init__.py") + moduledir.ensure("_env.py") + moduledir.ensure("gdal_data/pcs.csv") + moduledir.ensure("proj_data/epsg") + return moduledir + + +@pytest.fixture +def mock_fhs(tmpdir): + """A fake FHS system""" + tmpdir.ensure("share/gdal/pcs.csv") + tmpdir.ensure("share/proj/epsg") + return tmpdir + + +@pytest.fixture +def mock_debian(tmpdir): + """A fake Debian multi-install system""" + tmpdir.ensure("share/gdal/1.11/pcs.csv") + tmpdir.ensure("share/gdal/2.0/pcs.csv") + tmpdir.ensure("share/gdal/2.1/pcs.csv") + tmpdir.ensure("share/gdal/2.2/pcs.csv") + tmpdir.ensure("share/gdal/2.3/pcs.csv") + tmpdir.ensure("share/gdal/2.4/pcs.csv") + tmpdir.ensure("share/proj/epsg") + return tmpdir + + +def test_search_wheel_gdal_data_failure(tmpdir): + """Fail to find GDAL data in a non-wheel""" + finder = GDALDataFinder() + assert not finder.search_wheel(str(tmpdir)) + + +def test_search_wheel_gdal_data(mock_wheel): + """Find GDAL data in a wheel""" + finder = GDALDataFinder() + assert finder.search_wheel(str(mock_wheel.join("_env.py"))) == str(mock_wheel.join("gdal_data")) + + +def test_search_prefix_gdal_data_failure(tmpdir): + """Fail to find GDAL data in a bogus prefix""" + finder = GDALDataFinder() + assert not finder.search_prefix(str(tmpdir)) + + +def test_search_prefix_gdal_data(mock_fhs): + """Find GDAL data under prefix""" + finder = GDALDataFinder() + assert finder.search_prefix(str(mock_fhs)) == str(mock_fhs.join("share").join("gdal")) + + +def test_search_debian_gdal_data_failure(tmpdir): + """Fail to find GDAL data in a bogus Debian location""" + finder = GDALDataFinder() + assert not finder.search_debian(str(tmpdir)) + + +def test_search_debian_gdal_data(mock_debian): + """Find GDAL data under Debian locations""" + finder = GDALDataFinder() + assert finder.search_debian(str(mock_debian)) == str(mock_debian.join("share").join("gdal").join("{}.{}".format(gdal_version.major, gdal_version.minor))) + + +def test_search_gdal_data_wheel(mock_wheel): + finder = GDALDataFinder() + assert finder.search(str(mock_wheel.join("_env.py"))) == str(mock_wheel.join("gdal_data")) + + +def test_search_gdal_data_fhs(mock_fhs): + finder = GDALDataFinder() + assert finder.search(str(mock_fhs)) == str(mock_fhs.join("share").join("gdal")) + + +def test_search_gdal_data_debian(mock_debian): + """Find GDAL data under Debian locations""" + finder = GDALDataFinder() + assert finder.search(str(mock_debian)) == str(mock_debian.join("share").join("gdal").join("{}.{}".format(gdal_version.major, gdal_version.minor))) + + +def test_search_wheel_proj_data_failure(tmpdir): + """Fail to find GDAL data in a non-wheel""" + finder = PROJDataFinder() + assert not finder.search_wheel(str(tmpdir)) + + +def test_search_wheel_proj_data(mock_wheel): + """Find GDAL data in a wheel""" + finder = PROJDataFinder() + assert finder.search_wheel(str(mock_wheel.join("_env.py"))) == str(mock_wheel.join("proj_data")) + + +def test_search_prefix_proj_data_failure(tmpdir): + """Fail to find GDAL data in a bogus prefix""" + finder = PROJDataFinder() + assert not finder.search_prefix(str(tmpdir)) + + +def test_search_prefix_proj_data(mock_fhs): + """Find GDAL data under prefix""" + finder = PROJDataFinder() + assert finder.search_prefix(str(mock_fhs)) == str(mock_fhs.join("share").join("proj")) + + +def test_search_proj_data_wheel(mock_wheel): + finder = PROJDataFinder() + assert finder.search(str(mock_wheel.join("_env.py"))) == str(mock_wheel.join("proj_data")) + + +def test_search_proj_data_fhs(mock_fhs): + finder = PROJDataFinder() + assert finder.search(str(mock_fhs)) == str(mock_fhs.join("share").join("proj")) diff -Nru fiona-1.7.10/tests/test_env.py fiona-1.8.6/tests/test_env.py --- fiona-1.7.10/tests/test_env.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_env.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,115 @@ +"""Tests of fiona.env""" + +import os +import sys +try: + from unittest import mock +except ImportError: + import mock + +import pytest + +import fiona +from fiona import _env +from fiona.env import getenv, ensure_env, ensure_env_with_credentials +from fiona.session import AWSSession, GSSession + + +def test_nested_credentials(monkeypatch): + """Check that rasterio.open() doesn't wipe out surrounding credentials""" + + @ensure_env_with_credentials + def fake_opener(path): + return fiona.env.getenv() + + with fiona.env.Env(session=AWSSession(aws_access_key_id='foo', aws_secret_access_key='bar')): + assert fiona.env.getenv()['AWS_ACCESS_KEY_ID'] == 'foo' + assert fiona.env.getenv()['AWS_SECRET_ACCESS_KEY'] == 'bar' + + monkeypatch.setenv('AWS_ACCESS_KEY_ID', 'lol') + monkeypatch.setenv('AWS_SECRET_ACCESS_KEY', 'wut') + gdalenv = fake_opener('s3://foo/bar') + assert gdalenv['AWS_ACCESS_KEY_ID'] == 'foo' + assert gdalenv['AWS_SECRET_ACCESS_KEY'] == 'bar' + + +def test_ensure_env_decorator(gdalenv): + @ensure_env + def f(): + return getenv()['FIONA_ENV'] + assert f() is True + + +def test_ensure_env_decorator_sets_gdal_data(gdalenv, monkeypatch): + """fiona.env.ensure_env finds GDAL from environment""" + @ensure_env + def f(): + return getenv()['GDAL_DATA'] + + monkeypatch.setenv('GDAL_DATA', '/lol/wut') + assert f() == '/lol/wut' + + +@mock.patch("fiona._env.GDALDataFinder.find_file") +def test_ensure_env_decorator_sets_gdal_data_prefix(find_file, gdalenv, monkeypatch, tmpdir): + """fiona.env.ensure_env finds GDAL data under a prefix""" + @ensure_env + def f(): + return getenv()['GDAL_DATA'] + + find_file.return_value = None + tmpdir.ensure("share/gdal/pcs.csv") + monkeypatch.delenv('GDAL_DATA', raising=False) + monkeypatch.setattr(_env, '__file__', str(tmpdir.join("fake.py"))) + monkeypatch.setattr(sys, 'prefix', str(tmpdir)) + + assert f() == str(tmpdir.join("share").join("gdal")) + + +@mock.patch("fiona._env.GDALDataFinder.find_file") +def test_ensure_env_decorator_sets_gdal_data_wheel(find_file, gdalenv, monkeypatch, tmpdir): + """fiona.env.ensure_env finds GDAL data in a wheel""" + @ensure_env + def f(): + return getenv()['GDAL_DATA'] + + find_file.return_value = None + tmpdir.ensure("gdal_data/pcs.csv") + monkeypatch.delenv('GDAL_DATA', raising=False) + monkeypatch.setattr(_env, '__file__', str(tmpdir.join(os.path.basename(_env.__file__)))) + + assert f() == str(tmpdir.join("gdal_data")) + + +@mock.patch("fiona._env.GDALDataFinder.find_file") +def test_ensure_env_with_decorator_sets_gdal_data_wheel(find_file, gdalenv, monkeypatch, tmpdir): + """fiona.env.ensure_env finds GDAL data in a wheel""" + @ensure_env_with_credentials + def f(*args): + return getenv()['GDAL_DATA'] + + find_file.return_value = None + tmpdir.ensure("gdal_data/pcs.csv") + monkeypatch.delenv('GDAL_DATA', raising=False) + monkeypatch.setattr(_env, '__file__', str(tmpdir.join(os.path.basename(_env.__file__)))) + + assert f("foo") == str(tmpdir.join("gdal_data")) + + +def test_ensure_env_crs(path_coutwildrnp_shp): + """Decoration of .crs works""" + assert fiona.open(path_coutwildrnp_shp).crs + + +def test_nested_gs_credentials(monkeypatch): + """Check that rasterio.open() doesn't wipe out surrounding credentials""" + + @ensure_env_with_credentials + def fake_opener(path): + return fiona.env.getenv() + + with fiona.env.Env(session=GSSession(google_application_credentials='foo')): + assert fiona.env.getenv()['GOOGLE_APPLICATION_CREDENTIALS'] == 'foo' + + gdalenv = fake_opener('gs://foo/bar') + assert gdalenv['GOOGLE_APPLICATION_CREDENTIALS'] == 'foo' diff -Nru fiona-1.7.10/tests/test_feature.py fiona-1.8.6/tests/test_feature.py --- fiona-1.7.10/tests/test_feature.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_feature.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,112 +1,145 @@ -# testing features, to be called by nosetests +"""Tests for feature objects.""" import logging import os import shutil import sys import tempfile -import unittest +import pytest +import fiona from fiona import collection from fiona.collection import Collection from fiona.ogrext import featureRT -#logging.basicConfig(stream=sys.stderr, level=logging.INFO) -class PointRoundTripTest(unittest.TestCase): - def setUp(self): +class TestPointRoundTrip(object): + + def setup(self): self.tempdir = tempfile.mkdtemp() schema = {'geometry': 'Point', 'properties': {'title': 'str'}} self.c = Collection(os.path.join(self.tempdir, "foo.shp"), "w", driver="ESRI Shapefile", schema=schema) - def tearDown(self): + + def teardown(self): self.c.close() shutil.rmtree(self.tempdir) + def test_geometry(self): - f = { 'id': '1', + f = { 'id': '1', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) - self.assertEqual( - sorted(g['geometry'].items()), + assert ( + sorted(g['geometry'].items()) == [('coordinates', (0.0, 0.0)), ('type', 'Point')]) + def test_properties(self): - f = { 'id': '1', + f = { 'id': '1', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) - self.assertEqual(g['properties']['title'], 'foo') + assert g['properties']['title'] == 'foo' + def test_none_property(self): f = { 'id': '1', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, 'properties': {'title': None} } g = featureRT(f, self.c) - self.assertEqual(g['properties']['title'], None) + assert g['properties']['title'] is None + + +class TestLineStringRoundTrip(object): -class LineStringRoundTripTest(unittest.TestCase): - def setUp(self): + def setup(self): self.tempdir = tempfile.mkdtemp() schema = {'geometry': 'LineString', 'properties': {'title': 'str'}} self.c = Collection(os.path.join(self.tempdir, "foo.shp"), "w", "ESRI Shapefile", schema=schema) - def tearDown(self): + + def teardown(self): self.c.close() shutil.rmtree(self.tempdir) + def test_geometry(self): - f = { 'id': '1', - 'geometry': { 'type': 'LineString', + f = { 'id': '1', + 'geometry': { 'type': 'LineString', 'coordinates': [(0.0, 0.0), (1.0, 1.0)] }, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) - self.assertEqual( - sorted(g['geometry'].items()), - [('coordinates', [(0.0, 0.0), (1.0, 1.0)]), + assert ( + sorted(g['geometry'].items()) == + [('coordinates', [(0.0, 0.0), (1.0, 1.0)]), ('type', 'LineString')]) + def test_properties(self): f = { 'id': '1', 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) - self.assertEqual(g['properties']['title'], 'foo') + assert g['properties']['title'] == 'foo' + -class PolygonRoundTripTest(unittest.TestCase): - def setUp(self): +class TestPolygonRoundTrip(object): + + def setup(self): self.tempdir = tempfile.mkdtemp() schema = {'geometry': 'Polygon', 'properties': {'title': 'str'}} self.c = Collection(os.path.join(self.tempdir, "foo.shp"), "w", "ESRI Shapefile", schema=schema) - def tearDown(self): + + def teardown(self): self.c.close() shutil.rmtree(self.tempdir) + def test_geometry(self): - f = { 'id': '1', - 'geometry': { 'type': 'Polygon', - 'coordinates': - [[(0.0, 0.0), - (0.0, 1.0), - (1.0, 1.0), - (1.0, 0.0), + f = { 'id': '1', + 'geometry': { 'type': 'Polygon', + 'coordinates': + [[(0.0, 0.0), + (0.0, 1.0), + (1.0, 1.0), + (1.0, 0.0), (0.0, 0.0)]] }, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) - self.assertEqual( - sorted(g['geometry'].items()), - [('coordinates', [[(0.0, 0.0), - (0.0, 1.0), - (1.0, 1.0), - (1.0, 0.0), - (0.0, 0.0)]]), + assert ( + sorted(g['geometry'].items()) == + [('coordinates', [[(0.0, 0.0), + (0.0, 1.0), + (1.0, 1.0), + (1.0, 0.0), + (0.0, 0.0)]]), ('type', 'Polygon')]) + def test_properties(self): - f = { 'id': '1', - 'geometry': { 'type': 'Polygon', - 'coordinates': - [[(0.0, 0.0), - (0.0, 1.0), - (1.0, 1.0), - (1.0, 0.0), + f = { 'id': '1', + 'geometry': { 'type': 'Polygon', + 'coordinates': + [[(0.0, 0.0), + (0.0, 1.0), + (1.0, 1.0), + (1.0, 0.0), (0.0, 0.0)]] }, 'properties': {'title': u'foo'} } g = featureRT(f, self.c) - self.assertEqual(g['properties']['title'], 'foo') + assert g['properties']['title'] == 'foo' + +@pytest.mark.parametrize("driver, extension", [("ESRI Shapefile", "shp"), ("GeoJSON", "geojson")]) +def test_feature_null_field(tmpdir, driver, extension): + """ + In GDAL 2.2 the behaviour of OGR_F_IsFieldSet slightly changed. Some drivers + (e.g. GeoJSON) also require fields to be explicitly set to null. + See GH #460. + """ + meta = {"driver": driver, "schema": {"geometry": "Point", "properties": {"RETURN_P": "str"}}} + filename = os.path.join(str(tmpdir), "test_null."+extension) + with fiona.open(filename, "w", **meta) as dst: + g = {"coordinates": [1.0, 2.0], "type": "Point"} + feature = {"geometry": g, "properties": {"RETURN_P": None}} + dst.write(feature) + + with fiona.open(filename, "r") as src: + feature = next(iter(src)) + assert(feature["properties"]["RETURN_P"] is None) diff -Nru fiona-1.7.10/tests/test_fio_bounds.py fiona-1.8.6/tests/test_fio_bounds.py --- fiona-1.7.10/tests/test_fio_bounds.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_fio_bounds.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,94 +1,80 @@ -import json -import re +"""Tests for `$ fio bounds`.""" -import click -from click.testing import CliRunner -from fiona.fio import bounds +import re -from .fixtures import ( - feature_collection, feature_collection_pp, feature_seq, feature_seq_pp_rs) +from fiona.fio import bounds +from fiona.fio.main import main_group -def test_fail(): - runner = CliRunner() - result = runner.invoke(bounds.bounds, [], '5') +def test_fail(runner): + result = runner.invoke(main_group, ['bounds', ], '5') assert result.exit_code == 1 -def test_seq(): - runner = CliRunner() - result = runner.invoke(bounds.bounds, [], feature_seq) +def test_seq(feature_seq, runner): + result = runner.invoke(main_group, ['bounds', ], feature_seq) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 -def test_seq_rs(): - runner = CliRunner() - result = runner.invoke(bounds.bounds, [], feature_seq_pp_rs) +def test_seq_rs(feature_seq_pp_rs, runner): + result = runner.invoke(main_group, ['bounds', ], feature_seq_pp_rs) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 -def test_precision(): - runner = CliRunner() - result = runner.invoke(bounds.bounds, ['--precision', 1], feature_seq) +def test_precision(feature_seq, runner): + result = runner.invoke(main_group, ['bounds', '--precision', 1], feature_seq) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d{1}\D', result.output)) == 8 -def test_explode(): - runner = CliRunner() - result = runner.invoke(bounds.bounds, ['--explode'], feature_collection) +def test_explode(feature_collection, runner): + result = runner.invoke(main_group, ['bounds', '--explode'], feature_collection) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 -def test_explode_pp(): - runner = CliRunner() - result = runner.invoke(bounds.bounds, ['--explode'], feature_collection_pp) +def test_explode_pp(feature_collection_pp, runner): + result = runner.invoke(main_group, ['bounds', '--explode'], feature_collection_pp) assert result.exit_code == 0 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 -def test_with_id(): - runner = CliRunner() - result = runner.invoke(bounds.bounds, ['--with-id'], feature_seq) +def test_with_id(feature_seq, runner): + result = runner.invoke(main_group, ['bounds', '--with-id'], feature_seq) assert result.exit_code == 0 assert result.output.count('id') == result.output.count('bbox') == 2 -def test_explode_with_id(): - runner = CliRunner() +def test_explode_with_id(feature_collection, runner): result = runner.invoke( - bounds.bounds, ['--explode', '--with-id'], feature_collection) + main_group, ['bounds', '--explode', '--with-id'], feature_collection) assert result.exit_code == 0 assert result.output.count('id') == result.output.count('bbox') == 2 -def test_with_obj(): - runner = CliRunner() - result = runner.invoke(bounds.bounds, ['--with-obj'], feature_seq) +def test_with_obj(feature_seq, runner): + result = runner.invoke(main_group, ['bounds', '--with-obj'], feature_seq) assert result.exit_code == 0 assert result.output.count('geometry') == result.output.count('bbox') == 2 -def test_bounds_explode_with_obj(): - runner = CliRunner() +def test_bounds_explode_with_obj(feature_collection, runner): result = runner.invoke( - bounds.bounds, ['--explode', '--with-obj'], feature_collection) + main_group, ['bounds', '--explode', '--with-obj'], feature_collection) assert result.exit_code == 0 assert result.output.count('geometry') == result.output.count('bbox') == 2 -def test_explode_output_rs(): - runner = CliRunner() - result = runner.invoke(bounds.bounds, ['--explode', '--rs'], feature_collection) +def test_explode_output_rs(feature_collection, runner): + result = runner.invoke(main_group, ['bounds', '--explode', '--rs'], feature_collection) assert result.exit_code == 0 assert result.output.count(u'\u001e') == 2 assert result.output.count('[') == result.output.count(']') == 2 diff -Nru fiona-1.7.10/tests/test_fio_calc.py fiona-1.8.6/tests/test_fio_calc.py --- fiona-1.7.10/tests/test_fio_calc.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_fio_calc.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,16 +1,17 @@ +"""Tests for `$ fio calc`.""" + + from __future__ import division import json from click.testing import CliRunner -from fiona.fio.calc import calc -from .fixtures import feature_seq +from fiona.fio.main import main_group def test_fail(): runner = CliRunner() - result = runner.invoke(calc, - ["TEST", "f.properties.test > 5"], + result = runner.invoke(main_group, ['calc', "TEST", "f.properties.test > 5"], '{"type": "no_properties"}') assert result.exit_code == 1 @@ -21,16 +22,18 @@ try: features.append(json.loads(x)) except: - pass # nosetests puts some debugging garbage to stdout + # Click combines stdout and stderr and shapely dumps logs to + # stderr that are not JSON + # https://github.com/pallets/click/issues/371 + pass return features -def test_calc_seq(): - runner = CliRunner() - - result = runner.invoke(calc, - ["TEST", "f.properties.AREA / f.properties.PERIMETER"], - feature_seq) +def test_calc_seq(feature_seq, runner): + result = runner.invoke(main_group, ['calc', + "TEST", + "f.properties.AREA / f.properties.PERIMETER"], + feature_seq) assert result.exit_code == 0 feats = _load(result.output) @@ -40,29 +43,22 @@ feat['properties']['AREA'] / feat['properties']['PERIMETER'] -def test_bool_seq(): - runner = CliRunner() - - result = runner.invoke(calc, - ["TEST", "f.properties.AREA > 0.015"], +def test_bool_seq(feature_seq, runner): + result = runner.invoke(main_group, ['calc', "TEST", "f.properties.AREA > 0.015"], feature_seq) assert result.exit_code == 0 feats = _load(result.output) assert len(feats) == 2 - assert feats[0]['properties']['TEST'] == True - assert feats[1]['properties']['TEST'] == False + assert feats[0]['properties']['TEST'] + assert not feats[1]['properties']['TEST'] -def test_existing_property(): - runner = CliRunner() - - result = runner.invoke(calc, - ["AREA", "f.properties.AREA * 2"], +def test_existing_property(feature_seq, runner): + result = runner.invoke(main_group, ['calc', "AREA", "f.properties.AREA * 2"], feature_seq) assert result.exit_code == 1 - result = runner.invoke(calc, - ["--overwrite", "AREA", "f.properties.AREA * 2"], + result = runner.invoke(main_group, ['calc', "--overwrite", "AREA", "f.properties.AREA * 2"], feature_seq) assert result.exit_code == 0 feats = _load(result.output) diff -Nru fiona-1.7.10/tests/test_fio_cat.py fiona-1.8.6/tests/test_fio_cat.py --- fiona-1.7.10/tests/test_fio_cat.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_fio_cat.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,80 +1,86 @@ -import json -import sys -import unittest +"""Tests for `$ fio cat`.""" + +import os +import pytest from click.testing import CliRunner +from fiona.fio.main import main_group from fiona.fio import cat -from .fixtures import feature_seq -from .fixtures import feature_seq_pp_rs - - -WILDSHP = 'tests/data/coutwildrnp.shp' -FIXME_WINDOWS = sys.platform.startswith('win') - -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_one(): +def test_one(path_coutwildrnp_shp): runner = CliRunner() - result = runner.invoke(cat.cat, [WILDSHP]) + result = runner.invoke(main_group, ['cat', path_coutwildrnp_shp]) assert result.exit_code == 0 assert result.output.count('"Feature"') == 67 -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_two(): + +def test_two(path_coutwildrnp_shp): runner = CliRunner() - result = runner.invoke(cat.cat, [WILDSHP, WILDSHP]) + result = runner.invoke(main_group, ['cat', path_coutwildrnp_shp, path_coutwildrnp_shp]) assert result.exit_code == 0 assert result.output.count('"Feature"') == 134 -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_bbox_no(): + +def test_bbox_no(path_coutwildrnp_shp): runner = CliRunner() result = runner.invoke( - cat.cat, - [WILDSHP, '--bbox', '0,10,80,20'], + main_group, + ['cat', path_coutwildrnp_shp, '--bbox', '0,10,80,20'], catch_exceptions=False) assert result.exit_code == 0 assert result.output == "" -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_bbox_yes(): + +def test_bbox_yes(path_coutwildrnp_shp): runner = CliRunner() result = runner.invoke( - cat.cat, - [WILDSHP, '--bbox', '-109,37,-107,39'], + main_group, + ['cat', path_coutwildrnp_shp, '--bbox', '-109,37,-107,39'], catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 19 -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_bbox_json_yes(): + +def test_bbox_yes_two_files(path_coutwildrnp_shp): runner = CliRunner() result = runner.invoke( - cat.cat, - [WILDSHP, '--bbox', '[-109,37,-107,39]'], + main_group, + ['cat', path_coutwildrnp_shp, path_coutwildrnp_shp, '--bbox', '-109,37,-107,39'], + catch_exceptions=False) + assert result.exit_code == 0 + assert result.output.count('"Feature"') == 38 + + +def test_bbox_json_yes(path_coutwildrnp_shp): + runner = CliRunner() + result = runner.invoke( + main_group, + ['cat', path_coutwildrnp_shp, '--bbox', '[-109,37,-107,39]'], catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 19 -def test_multi_layer(): +def test_multi_layer(data_dir): layerdef = "1:coutwildrnp,1:coutwildrnp" runner = CliRunner() result = runner.invoke( - cat.cat, ['--layer', layerdef, 'tests/data/']) + main_group, ['cat', '--layer', layerdef, data_dir]) assert result.output.count('"Feature"') == 134 -def test_multi_layer_fail(): +def test_multi_layer_fail(data_dir): runner = CliRunner() - result = runner.invoke(cat.cat, ['--layer', - '200000:coutlildrnp', - 'tests/data']) + result = runner.invoke(main_group, ['cat', '--layer', '200000:coutlildrnp', + data_dir]) assert result.exit_code != 0 + + +def test_vfs(path_coutwildrnp_zip): + runner = CliRunner() + result = runner.invoke(main_group, [ + 'cat', 'zip://{}'.format(path_coutwildrnp_zip)]) + assert result.exit_code == 0 + assert result.output.count('"Feature"') == 67 diff -Nru fiona-1.7.10/tests/test_fio_collect.py fiona-1.8.6/tests/test_fio_collect.py --- fiona-1.7.10/tests/test_fio_collect.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_fio_collect.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,50 +1,40 @@ -"""Unittests for $ fio collect""" +"""Tests for `$ fio collect`.""" import json import sys -import unittest from click.testing import CliRunner +import pytest -from fiona.fio import collect +# from fiona.fio import collect +from fiona.fio.main import main_group -from .fixtures import feature_seq -from .fixtures import feature_seq_pp_rs -FIXME_WINDOWS = sys.platform.startswith('win') - -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_collect_rs(): +def test_collect_rs(feature_seq_pp_rs): runner = CliRunner() result = runner.invoke( - collect.collect, - ['--src-crs', 'EPSG:3857'], + main_group, ['collect', '--src-crs', 'EPSG:3857'], feature_seq_pp_rs, catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_collect_no_rs(): +def test_collect_no_rs(feature_seq): runner = CliRunner() result = runner.invoke( - collect.collect, - ['--src-crs', 'EPSG:3857'], + main_group, ['collect', '--src-crs', 'EPSG:3857'], feature_seq, catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 -def test_collect_ld(): +def test_collect_ld(feature_seq): runner = CliRunner() result = runner.invoke( - collect.collect, - ['--with-ld-context', '--add-ld-context-item', 'foo=bar'], + main_group, ['collect', '--with-ld-context', '--add-ld-context-item', 'foo=bar'], feature_seq, catch_exceptions=False) assert result.exit_code == 0 @@ -52,18 +42,17 @@ assert '"foo": "bar"' in result.output -def test_collect_rec_buffered(): +def test_collect_rec_buffered(feature_seq): runner = CliRunner() - result = runner.invoke(collect.collect, ['--record-buffered'], feature_seq) + result = runner.invoke(main_group, ['collect', '--record-buffered'], feature_seq) assert result.exit_code == 0 assert '"FeatureCollection"' in result.output -def test_collect_noparse(): +def test_collect_noparse(feature_seq): runner = CliRunner() result = runner.invoke( - collect.collect, - ['--no-parse'], + main_group, ['collect', '--no-parse'], feature_seq, catch_exceptions=False) assert result.exit_code == 0 @@ -71,11 +60,10 @@ assert len(json.loads(result.output)['features']) == 2 -def test_collect_noparse_records(): +def test_collect_noparse_records(feature_seq): runner = CliRunner() result = runner.invoke( - collect.collect, - ['--no-parse', '--record-buffered'], + main_group, ['collect', '--no-parse', '--record-buffered'], feature_seq, catch_exceptions=False) assert result.exit_code == 0 @@ -83,21 +71,19 @@ assert len(json.loads(result.output)['features']) == 2 -def test_collect_src_crs(): +def test_collect_src_crs(feature_seq): runner = CliRunner() result = runner.invoke( - collect.collect, - ['--no-parse', '--src-crs', 'epsg:4326'], + main_group, ['collect', '--no-parse', '--src-crs', 'epsg:4326'], feature_seq, catch_exceptions=False) assert result.exit_code == 2 -def test_collect_noparse_rs(): +def test_collect_noparse_rs(feature_seq_pp_rs): runner = CliRunner() result = runner.invoke( - collect.collect, - ['--no-parse'], + main_group, ['collect', '--no-parse'], feature_seq_pp_rs, catch_exceptions=False) assert result.exit_code == 0 diff -Nru fiona-1.7.10/tests/test_fio_distrib.py fiona-1.8.6/tests/test_fio_distrib.py --- fiona-1.7.10/tests/test_fio_distrib.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_fio_distrib.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,23 +1,20 @@ -"""Unittests for $ fio distrib""" +"""Tests for `$ fio distrib`.""" from click.testing import CliRunner -from fiona.fio import distrib +from fiona.fio.main import main_group -from .fixtures import feature_collection -from .fixtures import feature_collection_pp - -def test_distrib(): +def test_distrib(feature_collection_pp): runner = CliRunner() - result = runner.invoke(distrib.distrib, [], feature_collection_pp) + result = runner.invoke(main_group, ['distrib', ], feature_collection_pp) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 -def test_distrib_no_rs(): +def test_distrib_no_rs(feature_collection): runner = CliRunner() - result = runner.invoke(distrib.distrib, [], feature_collection) + result = runner.invoke(main_group, ['distrib', ], feature_collection) assert result.exit_code == 0 assert result.output.count('"Feature"') == 2 diff -Nru fiona-1.7.10/tests/test_fio_dump.py fiona-1.8.6/tests/test_fio_dump.py --- fiona-1.7.10/tests/test_fio_dump.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_fio_dump.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,30 +1,34 @@ """Unittests for $ fio dump""" -import sys -import unittest -from click.testing import CliRunner - -from fiona.fio import dump +import json +from click.testing import CliRunner -WILDSHP = 'tests/data/coutwildrnp.shp' -TESTGPX = 'tests/data/test_gpx.gpx' +import fiona +from fiona.fio.main import main_group -FIXME_WINDOWS = sys.platform.startswith('win') -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_dump(): +def test_dump(path_coutwildrnp_shp): runner = CliRunner() - result = runner.invoke(dump.dump, [WILDSHP]) + result = runner.invoke(main_group, ['dump', path_coutwildrnp_shp]) assert result.exit_code == 0 assert '"FeatureCollection"' in result.output -def test_dump_layer(): +def test_dump_layer(path_gpx): for layer in ('routes', '1'): runner = CliRunner() - result = runner.invoke(dump.dump, [TESTGPX, '--layer', layer]) + result = runner.invoke(main_group, ['dump', path_gpx, '--layer', layer]) assert result.exit_code == 0 assert '"FeatureCollection"' in result.output + + +def test_dump_layer_vfs(path_coutwildrnp_zip): + path = 'zip://{}'.format(path_coutwildrnp_zip) + result = CliRunner().invoke(main_group, ['dump', path]) + assert result.exit_code == 0 + loaded = json.loads(result.output) + with fiona.open(path) as src: + assert len(loaded['features']) == len(src) + assert len(loaded['features']) > 0 diff -Nru fiona-1.7.10/tests/test_fio_filter.py fiona-1.8.6/tests/test_fio_filter.py --- fiona-1.7.10/tests/test_fio_filter.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_fio_filter.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,29 +1,28 @@ -from click.testing import CliRunner +"""Tests for `$ fio filter`.""" -from fiona.fio import filter +from fiona.fio.main import main_group -from .fixtures import feature_seq - -def test_fail(): - runner = CliRunner() - result = runner.invoke(filter.filter, - ["f.properties.test > 5"], - "{'type': 'no_properties'}") +def test_fail(runner): + result = runner.invoke(main_group, ['filter', + "f.properties.test > 5" + ], "{'type': 'no_properties'}") assert result.exit_code == 1 -def test_seq(): - runner = CliRunner() +def test_seq(feature_seq, runner): - result = runner.invoke(filter.filter, ["f.properties.AREA > 0.01"], feature_seq) + result = runner.invoke(main_group, ['filter', + "f.properties.AREA > 0.01"], feature_seq) assert result.exit_code == 0 assert result.output.count('Feature') == 2 - result = runner.invoke(filter.filter, ["f.properties.AREA > 0.015"], feature_seq) + result = runner.invoke(main_group, ['filter', + "f.properties.AREA > 0.015"], feature_seq) assert result.exit_code == 0 assert result.output.count('Feature') == 1 - result = runner.invoke(filter.filter, ["f.properties.AREA > 0.02"], feature_seq) + result = runner.invoke(main_group, ['filter', + "f.properties.AREA > 0.02"], feature_seq) assert result.exit_code == 0 assert result.output.count('Feature') == 0 diff -Nru fiona-1.7.10/tests/test_fio_info.py fiona-1.8.6/tests/test_fio_info.py --- fiona-1.7.10/tests/test_fio_info.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_fio_info.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,48 +1,46 @@ +"""Tests for ``$ fio info``.""" + + import json from pkg_resources import iter_entry_points import re import sys -import unittest from click.testing import CliRunner +import pytest from fiona.fio.main import main_group -WILDSHP = 'tests/data/coutwildrnp.shp' - -FIXME_WINDOWS = sys.platform.startswith('win') - -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_info_json(): +def test_info_json(path_coutwildrnp_shp): runner = CliRunner() - result = runner.invoke(main_group, ['info', WILDSHP]) + result = runner.invoke(main_group, ['info', path_coutwildrnp_shp]) assert result.exit_code == 0 assert '"count": 67' in result.output assert '"crs": "EPSG:4326"' in result.output assert '"driver": "ESRI Shapefile"' in result.output assert '"name": "coutwildrnp"' in result.output -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_info_count(): + +def test_info_count(path_coutwildrnp_shp): runner = CliRunner() - result = runner.invoke(main_group, ['info', '--count', WILDSHP]) + result = runner.invoke( + main_group, ['info', '--count', path_coutwildrnp_shp]) assert result.exit_code == 0 assert result.output == "67\n" -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_info_bounds(): + +def test_info_bounds(path_coutwildrnp_shp): runner = CliRunner() - result = runner.invoke(main_group, ['info', '--bounds', WILDSHP]) + result = runner.invoke( + main_group, ['info', '--bounds', path_coutwildrnp_shp]) assert result.exit_code == 0 assert len(re.findall(r'\d*\.\d*', result.output)) == 4 def test_all_registered(): - # Make sure all the subcommands are actually registered to the main CLI group + """Make sure all the subcommands are actually registered to the main CLI + group.""" for ep in iter_entry_points('fiona.fio_commands'): assert ep.name in main_group.commands @@ -54,31 +52,36 @@ return lines -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_info_no_count(): +def test_info_no_count(path_gpx): """Make sure we can still get a `$ fio info` report on datasources that do not support feature counting, AKA `len(collection)`. """ runner = CliRunner() - result = runner.invoke(main_group, ['info', 'tests/data/test_gpx.gpx']) + result = runner.invoke(main_group, ['info', path_gpx]) assert result.exit_code == 0 lines = _filter_info_warning(result.output.splitlines()) assert len(lines) == 1, "First line is warning & second is JSON. No more." assert json.loads(lines[0])['count'] is None -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_info_layer(): +def test_info_layer(path_gpx): for layer in ('routes', '1'): runner = CliRunner() result = runner.invoke(main_group, [ 'info', - 'tests/data/test_gpx.gpx', + path_gpx, '--layer', layer]) - print(result.output) assert result.exit_code == 0 lines = _filter_info_warning(result.output.splitlines()) assert len(lines) == 1, "1st line is warning & 2nd is JSON - no more." assert json.loads(lines[0])['name'] == 'routes' + + +def test_info_vfs(path_coutwildrnp_zip, path_coutwildrnp_shp): + runner = CliRunner() + zip_result = runner.invoke(main_group, [ + 'info', 'zip://{}'.format(path_coutwildrnp_zip)]) + shp_result = runner.invoke(main_group, [ + 'info', path_coutwildrnp_shp]) + assert zip_result.exit_code == shp_result.exit_code == 0 + assert zip_result.output == shp_result.output diff -Nru fiona-1.7.10/tests/test_fio_load.py fiona-1.8.6/tests/test_fio_load.py --- fiona-1.7.10/tests/test_fio_load.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_fio_load.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,116 +1,78 @@ +"""Tests for `$ fio load`.""" + + import json import os import shutil import sys -import tempfile -import unittest -from click.testing import CliRunner +import pytest import fiona from fiona.fio.main import main_group -from .fixtures import ( - feature_collection, feature_seq, feature_seq_pp_rs) -FIXME_WINDOWS = sys.platform.startswith('win') - -def test_err(): - runner = CliRunner() +def test_err(runner): result = runner.invoke( main_group, ['load'], '', catch_exceptions=False) assert result.exit_code == 2 -def test_exception(tmpdir=None): - if tmpdir is None: - tmpdir = tempfile.mkdtemp() - tmpfile = os.path.join(tmpdir, 'test.shp') - else: - tmpfile = str(tmpdir.join('test.shp')) - runner = CliRunner() - result = runner.invoke( - main_group, ['load', '-f', 'Shapefile', tmpfile], '42', catch_exceptions=False) +def test_exception(tmpdir, runner): + tmpfile = str(tmpdir.mkdir('tests').join('test_exception.shp')) + result = runner.invoke(main_group, [ + 'load', '-f', 'Shapefile', tmpfile + ], '42', catch_exceptions=False) assert result.exit_code == 1 -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_collection(tmpdir=None): - if tmpdir is None: - tmpdir = tempfile.mkdtemp() - tmpfile = os.path.join(tmpdir, 'test.shp') - else: - tmpfile = str(tmpdir.join('test.shp')) - runner = CliRunner() + +def test_collection(tmpdir, feature_collection, runner): + tmpfile = str(tmpdir.mkdir('tests').join('test_collection.shp')) result = runner.invoke( main_group, ['load', '-f', 'Shapefile', tmpfile], feature_collection) assert result.exit_code == 0 assert len(fiona.open(tmpfile)) == 2 -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_seq_rs(tmpdir=None): - if tmpdir is None: - tmpdir = tempfile.mkdtemp() - tmpfile = os.path.join(tmpdir, 'test.shp') - else: - tmpfile = str(tmpdir.join('test.shp')) - runner = CliRunner() + +def test_seq_rs(feature_seq_pp_rs, tmpdir, runner): + tmpfile = str(tmpdir.mkdir('tests').join('test_seq_rs.shp')) result = runner.invoke( main_group, ['load', '-f', 'Shapefile', tmpfile], feature_seq_pp_rs) assert result.exit_code == 0 assert len(fiona.open(tmpfile)) == 2 -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_seq_no_rs(tmpdir=None): - if tmpdir is None: - tmpdir = tempfile.mkdtemp() - tmpfile = os.path.join(tmpdir, 'test.shp') - else: - tmpfile = str(tmpdir.join('test.shp')) - runner = CliRunner() - result = runner.invoke( - main_group, ['load', '-f', 'Shapefile', '--sequence', tmpfile], feature_seq) +def test_seq_no_rs(tmpdir, runner, feature_seq): + tmpfile = str(tmpdir.mkdir('tests').join('test_seq_no_rs.shp')) + result = runner.invoke(main_group, [ + 'load', '-f', 'Shapefile', tmpfile], feature_seq) assert result.exit_code == 0 assert len(fiona.open(tmpfile)) == 2 -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_dst_crs_default_to_src_crs(tmpdir=None): - # When --dst-crs is not given default to --src-crs. - if tmpdir is None: - tmpdir = tempfile.mkdtemp() - tmpfile = os.path.join(tmpdir, 'test.shp') - else: - tmpfile = str(tmpdir.join('test.shp')) - runner = CliRunner() - result = runner.invoke( - main_group, [ - 'load', '--src-crs', 'EPSG:32617', '-f', 'Shapefile', '--sequence', tmpfile - ], feature_seq) +def test_dst_crs_default_to_src_crs(tmpdir, runner, feature_seq): + """When --dst-crs is not given default to --src-crs.""" + tmpfile = str(tmpdir.mkdir('tests').join('test_src_vs_dst_crs.shp')) + result = runner.invoke(main_group, [ + 'load', + '--src-crs', + 'EPSG:32617', + '-f', 'Shapefile', + tmpfile + ], feature_seq) assert result.exit_code == 0 with fiona.open(tmpfile) as src: assert src.crs == {'init': 'epsg:32617'} assert len(src) == len(feature_seq.splitlines()) -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_different_crs(tmpdir=None): - if tmpdir is None: - tmpdir = tempfile.mkdtemp() - tmpfile = os.path.join(tmpdir, 'test.shp') - else: - tmpfile = str(tmpdir.join('test.shp')) - runner = CliRunner() +def test_different_crs(tmpdir, runner, feature_seq): + tmpfile = str(tmpdir.mkdir('tests').join('test_different_crs.shp')) result = runner.invoke( main_group, [ 'load', '--src-crs', 'EPSG:32617', '--dst-crs', 'EPSG:32610', - '-f', 'Shapefile', '--sequence', tmpfile + '-f', 'Shapefile', tmpfile ], feature_seq) assert result.exit_code == 0 with fiona.open(tmpfile) as src: @@ -118,30 +80,23 @@ assert len(src) == len(feature_seq.splitlines()) -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_dst_crs_no_src(tmpdir=None): - if tmpdir is None: - tmpdir = tempfile.mkdtemp() - tmpfile = os.path.join(tmpdir, 'test.shp') - else: - tmpfile = str(tmpdir.join('test.shp')) - runner = CliRunner() - result = runner.invoke( - main_group, [ - 'load', '--dst-crs', 'EPSG:32610', '-f', 'Shapefile', '--sequence', tmpfile - ], feature_seq) +def test_dst_crs_no_src(tmpdir, runner, feature_seq): + tmpfile = str(tmpdir.mkdir('tests').join('test_dst_crs_no_src.shp')) + result = runner.invoke(main_group, [ + 'load', + '--dst-crs', + 'EPSG:32610', + '-f', 'Shapefile', + tmpfile + ], feature_seq) assert result.exit_code == 0 with fiona.open(tmpfile) as src: assert src.crs == {'init': 'epsg:32610'} assert len(src) == len(feature_seq.splitlines()) -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_fio_load_layer(): - - tmpdir = tempfile.mkdtemp() +def test_fio_load_layer(tmpdir, runner): + outdir = str(tmpdir.mkdir('tests').mkdir('test_fio_load_layer')) try: feature = { 'type': 'Feature', @@ -151,24 +106,20 @@ 'coordinates': (5.0, 39.0) } } - sequence = os.linesep.join(map(json.dumps, [feature, feature])) - - runner = CliRunner() result = runner.invoke(main_group, [ 'load', - tmpdir, + outdir, '--driver', 'ESRI Shapefile', '--src-crs', 'EPSG:4236', - '--layer', 'test_layer', - '--sequence'], + '--layer', 'test_layer'], input=sequence) assert result.exit_code == 0 - with fiona.open(tmpdir) as src: + with fiona.open(outdir) as src: assert len(src) == 2 assert src.name == 'test_layer' assert src.schema['geometry'] == 'Point' finally: - shutil.rmtree(tmpdir) + shutil.rmtree(outdir) diff -Nru fiona-1.7.10/tests/test_fio_ls.py fiona-1.8.6/tests/test_fio_ls.py --- fiona-1.7.10/tests/test_fio_ls.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_fio_ls.py 2019-03-19 04:25:07.000000000 +0000 @@ -2,64 +2,59 @@ import json -import shutil import sys -import tempfile -import unittest - +import os from click.testing import CliRunner - +import pytest import fiona from fiona.fio.main import main_group -FIXME_WINDOWS = sys.platform.startswith('win') -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_fio_ls_single_layer(): +def test_fio_ls_single_layer(data_dir): - result = CliRunner().invoke(main_group, [ - 'ls', - 'tests/data/']) + result = CliRunner().invoke(main_group, ['ls', data_dir]) assert result.exit_code == 0 assert len(result.output.splitlines()) == 1 - assert json.loads(result.output) == ['coutwildrnp'] + assert sorted(json.loads(result.output)) == ['coutwildrnp', 'gre', 'test_tin'] -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why this test is not working.") -def test_fio_ls_indent(): +def test_fio_ls_indent(path_coutwildrnp_shp): result = CliRunner().invoke(main_group, [ 'ls', '--indent', '4', - 'tests/data/coutwildrnp.shp']) + path_coutwildrnp_shp]) assert result.exit_code == 0 assert len(result.output.strip().splitlines()) == 3 assert json.loads(result.output) == ['coutwildrnp'] -def test_fio_ls_multi_layer(): +def test_fio_ls_multi_layer(path_coutwildrnp_shp, tmpdir): + outdir = str(tmpdir.mkdir('test_fio_ls_multi_layer')) - infile = 'tests/data/coutwildrnp.shp' - outdir = tempfile.mkdtemp() - try: - - # Copy test shapefile into new directory - # Shapefile driver treats a directory of shapefiles as a single - # multi-layer datasource - layer_names = ['l1', 'l2'] - for layer in layer_names: - with fiona.open(infile) as src, \ - fiona.open(outdir, 'w', layer=layer, **src.meta) as dst: - for feat in src: - dst.write(feat) - - # Run CLI test - result = CliRunner().invoke(main_group, [ - 'ls', outdir]) - assert result.exit_code == 0 - assert json.loads(result.output) == layer_names + # Copy test shapefile into new directory + # Shapefile driver treats a directory of shapefiles as a single + # multi-layer datasource + layer_names = ['l1', 'l2'] + for layer in layer_names: + with fiona.open(path_coutwildrnp_shp) as src, \ + fiona.open(outdir, 'w', layer=layer, **src.meta) as dst: + for feat in src: + dst.write(feat) - finally: - shutil.rmtree(outdir) + # Run CLI test + result = CliRunner().invoke(main_group, [ + 'ls', outdir]) + assert result.exit_code == 0 + json_result = json.loads(result.output) + assert sorted(json_result) == sorted(layer_names) + + +def test_fio_ls_vfs(path_coutwildrnp_zip): + runner = CliRunner() + result = runner.invoke(main_group, [ + 'ls', 'zip://{}'.format(path_coutwildrnp_zip)]) + assert result.exit_code == 0 + loaded = json.loads(result.output) + assert len(loaded) == 1 + assert loaded[0] == 'coutwildrnp' diff -Nru fiona-1.7.10/tests/test_fio_rm.py fiona-1.8.6/tests/test_fio_rm.py --- fiona-1.7.10/tests/test_fio_rm.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_fio_rm.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,61 @@ +import os +import pytest +import fiona +from click.testing import CliRunner +from fiona.fio.main import main_group + +def create_sample_data(filename, driver, **extra_meta): + meta = { + 'driver': driver, + 'schema': { + 'geometry': 'Point', + 'properties': {} + } + } + meta.update(extra_meta) + with fiona.open(filename, 'w', **meta) as dst: + dst.write({ + 'geometry': { + 'type': 'Point', + 'coordinates': (0, 0), + }, + 'properties': {}, + }) + assert(os.path.exists(filename)) + +drivers = ["ESRI Shapefile", "GeoJSON"] +@pytest.mark.parametrize("driver", drivers) +def test_remove(tmpdir, driver): + extension = {"ESRI Shapefile": "shp", "GeoJSON": "json"}[driver] + filename = "delete_me.{extension}".format(extension=extension) + filename = str(tmpdir.join(filename)) + create_sample_data(filename, driver) + + result = CliRunner().invoke(main_group, [ + "rm", + filename, + "--yes" + ]) + print(result.output) + assert result.exit_code == 0 + assert not os.path.exists(filename) + + +has_gpkg = "GPKG" in fiona.supported_drivers.keys() +@pytest.mark.skipif(not has_gpkg, reason="Requires GPKG driver") +def test_remove_layer(tmpdir): + filename = str(tmpdir.join("a_filename.gpkg")) + create_sample_data(filename, "GPKG", layer="layer1") + create_sample_data(filename, "GPKG", layer="layer2") + assert fiona.listlayers(filename) == ["layer1", "layer2"] + + result = CliRunner().invoke(main_group, [ + "rm", + filename, + "--layer", "layer2", + "--yes" + ]) + print(result.output) + assert result.exit_code == 0 + assert os.path.exists(filename) + assert fiona.listlayers(filename) == ["layer1"] diff -Nru fiona-1.7.10/tests/test_geojson.py fiona-1.8.6/tests/test_geojson.py --- fiona-1.7.10/tests/test_geojson.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_geojson.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,72 +1,111 @@ - -import logging -import os -import shutil -import sys -import tempfile -import unittest +import pytest import fiona from fiona.collection import supported_drivers from fiona.errors import FionaValueError, DriverError, SchemaError, CRSError -# logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) - -log = logging.getLogger(__name__) - - -class ReadingTest(unittest.TestCase): - - def setUp(self): - self.c = fiona.open('tests/data/coutwildrnp.json', 'r') - - def tearDown(self): - self.c.close() - - def test_json(self): - self.assertEqual(len(self.c), 67) - -class WritingTest(unittest.TestCase): - - def setUp(self): - self.tempdir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.tempdir) - - def test_json(self): - path = os.path.join(self.tempdir, 'foo.json') - with fiona.open(path, 'w', - driver='GeoJSON', - schema={'geometry': 'Unknown', 'properties': [('title', 'str')]}) as c: - c.writerecords([{ - 'geometry': {'type': 'Point', 'coordinates': [0.0, 0.0]}, - 'properties': {'title': 'One'}}]) - c.writerecords([{ - 'geometry': {'type': 'MultiPoint', 'coordinates': [[0.0, 0.0]]}, - 'properties': {'title': 'Two'}}]) - with fiona.open(path) as c: - self.assertEqual(c.schema['geometry'], 'Unknown') - self.assertEqual(len(c), 2) - - def test_json_overwrite(self): - path = os.path.join(self.tempdir, 'foo.json') - - with fiona.drivers(), fiona.open(path, 'w', - driver='GeoJSON', - schema={'geometry': 'Unknown', 'properties': [('title', 'str')]}) as c: - c.writerecords([{ - 'geometry': {'type': 'Point', 'coordinates': [0.0, 0.0]}, - 'properties': {'title': 'One'}}]) - c.writerecords([{ - 'geometry': {'type': 'MultiPoint', 'coordinates': [[0.0, 0.0]]}, - 'properties': {'title': 'Two'}}]) - - # Overwrite should raise DriverIOError. - try: - with fiona.drivers(), fiona.open(path, 'w', driver='GeoJSON', - schema={'geometry': 'Unknown', 'properties': [('title', 'str')]}) as c: - pass - except IOError: - pass +def test_json_read(path_coutwildrnp_json): + with fiona.open(path_coutwildrnp_json, 'r') as c: + assert len(c) == 67 + + +def test_json(tmpdir): + """Write a simple GeoJSON file""" + path = str(tmpdir.join('foo.json')) + with fiona.open(path, 'w', + driver='GeoJSON', + schema={'geometry': 'Unknown', + 'properties': [('title', 'str')]}) as c: + c.writerecords([{ + 'geometry': {'type': 'Point', 'coordinates': [0.0, 0.0]}, + 'properties': {'title': 'One'}}]) + c.writerecords([{ + 'geometry': {'type': 'MultiPoint', 'coordinates': [[0.0, 0.0]]}, + 'properties': {'title': 'Two'}}]) + with fiona.open(path) as c: + assert c.schema['geometry'] == 'Unknown' + assert len(c) == 2 + + +def test_json_overwrite(tmpdir): + """Overwrite an existing GeoJSON file""" + path = str(tmpdir.join('foo.json')) + + driver = "GeoJSON" + schema1 = {"geometry": "Unknown", "properties": [("title", "str")]} + schema2 = {"geometry": "Unknown", "properties": [("other", "str")]} + + features1 = [ + { + "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, + "properties": {"title": "One"}, + }, + { + "geometry": {"type": "MultiPoint", "coordinates": [[0.0, 0.0]]}, + "properties": {"title": "Two"}, + } + ] + features2 = [ + { + "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, + "properties": {"other": "Three"}, + }, + ] + + # write some data to a file + with fiona.open(path, "w", driver=driver, schema=schema1) as c: + c.writerecords(features1) + + # test the data was written correctly + with fiona.open(path, "r") as c: + assert len(c) == 2 + feature = next(iter(c)) + assert feature["properties"]["title"] == "One" + + # attempt to overwrite the existing file with some new data + with fiona.open(path, "w", driver=driver, schema=schema2) as c: + c.writerecords(features2) + + # test the second file was written correctly + with fiona.open(path, "r") as c: + assert len(c) == 1 + feature = next(iter(c)) + assert feature["properties"]["other"] == "Three" + + +def test_json_overwrite_invalid(tmpdir): + """Overwrite an existing file that isn't a valid GeoJSON""" + + # write some invalid data to a file + path = str(tmpdir.join('foo.json')) + with open(path, "w") as f: + f.write("This isn't a valid GeoJSON file!!!") + + schema1 = {"geometry": "Unknown", "properties": [("title", "str")]} + features1 = [ + { + "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, + "properties": {"title": "One"}, + }, + { + "geometry": {"type": "MultiPoint", "coordinates": [[0.0, 0.0]]}, + "properties": {"title": "Two"}, + } + ] + + # attempt to overwrite it with a valid file + with fiona.open(path, "w", driver="GeoJSON", schema=schema1) as dst: + dst.writerecords(features1) + + # test the data was written correctly + with fiona.open(path, "r") as src: + assert len(src) == 2 + + +def test_write_json_invalid_directory(tmpdir): + """Attempt to create a file in a directory that doesn't exist""" + path = str(tmpdir.join('does-not-exist', 'foo.json')) + schema = {"geometry": "Unknown", "properties": [("title", "str")]} + with pytest.raises(DriverError): + fiona.open(path, "w", driver="GeoJSON", schema=schema) diff -Nru fiona-1.7.10/tests/test_geometry.py fiona-1.8.6/tests/test_geometry.py --- fiona-1.7.10/tests/test_geometry.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_geometry.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,193 +1,133 @@ -# testing geometry extension, to be called by nosetests +"""Tests for geometry objects.""" -import logging -import sys -import unittest +import pytest from fiona._geometry import (GeomBuilder, geometryRT) from fiona.errors import UnsupportedGeometryTypeError -logging.basicConfig(stream=sys.stderr, level=logging.INFO) - - def geometry_wkb(wkb): + try: + wkb = bytes.fromhex(wkb) + except AttributeError: + wkb = wkb.decode('hex') return GeomBuilder().build_wkb(wkb) -class OGRBuilderExceptionsTest(unittest.TestCase): - def test(self): - geom = {'type': "Bogus", 'coordinates': None} - self.assertRaises(ValueError, geometryRT, geom) - -# The round tripping tests are defined in this not to be run base class. -# -class RoundTripping(object): - """Derive type specific classes from this.""" - def test_type(self): - self.assertEqual( - geometryRT(self.geom)['type'], self.geom['type']) - def test_coordinates(self): - self.assertEqual( - geometryRT(self.geom)['coordinates'], self.geom['coordinates']) - -# All these get their tests from the RoundTripping class. -# -class PointRoundTripTest(unittest.TestCase, RoundTripping): - def setUp(self): - self.geom = {'type': "Point", 'coordinates': (0.0, 0.0)} - -class LineStringRoundTripTest(unittest.TestCase, RoundTripping): - def setUp(self): - self.geom = { - 'type': "LineString", - 'coordinates': [(0.0, 0.0), (1.0, 1.0)]} - -class PolygonRoundTripTest1(unittest.TestCase, RoundTripping): - """An explicitly closed polygon.""" - def setUp(self): - self.geom = { - 'type': "Polygon", - 'coordinates': [ - [(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]} - -class PolygonRoundTripTest2(unittest.TestCase, RoundTripping): - """An implicitly closed polygon.""" - def setUp(self): - self.geom = { - 'type': "Polygon", - 'coordinates': [ - [(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)]]} - def test_coordinates(self): - self.assertEqual( - [geometryRT(self.geom)['coordinates'][0][:-1]], - self.geom['coordinates']) - -class MultiPointRoundTripTest(unittest.TestCase, RoundTripping): - def setUp(self): - self.geom = { - 'type': "MultiPoint", 'coordinates': [(0.0, 0.0), (1.0, 1.0)]} - -class MultiLineStringRoundTripTest(unittest.TestCase, RoundTripping): - def setUp(self): - self.geom = { - 'type': "MultiLineString", - 'coordinates': [[(0.0, 0.0), (1.0, 1.0)]]} - -class MultiPolygonRoundTripTest1(unittest.TestCase, RoundTripping): - def setUp(self): - # This is an explicitly closed polygon. - self.geom = { - 'type': "MultiPolygon", - 'coordinates': [[ - [(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)] - ]]} - -class MultiPolygonRoundTripTest2(unittest.TestCase, RoundTripping): - def setUp(self): - # This is an implicitly closed polygon. - self.geom = { - 'type': "MultiPolygon", - 'coordinates': - [[[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)]]]} - def test_coordinates(self): - self.assertEqual( - [[geometryRT(self.geom)['coordinates'][0][0][:-1]]], - self.geom['coordinates']) - -class GeometryCollectionRoundTripTest(unittest.TestCase): - def setUp(self): - self.geom = { - 'type': "GeometryCollection", - 'geometries': [ - {'type': "Point", 'coordinates': (0.0, 0.0)}, { - 'type': "LineString", - 'coordinates': [(0.0, 0.0), (1.0, 1.0)]}]} - def test_len(self): - result = geometryRT(self.geom) - self.assertEqual(len(result['geometries']), 2) - def test_type(self): - result = geometryRT(self.geom) - self.assertEqual( - [g['type'] for g in result['geometries']], - ['Point', 'LineString']) - -class PointTest(unittest.TestCase): - def test_point(self): - # Hex-encoded Point (0 0) - try: - wkb = bytes.fromhex("010100000000000000000000000000000000000000") - except: - wkb = "010100000000000000000000000000000000000000".decode('hex') - geom = geometry_wkb(wkb) - self.assertEqual(geom['type'], "Point") - self.assertEqual(geom['coordinates'], (0.0, 0.0)) - -class LineStringTest(unittest.TestCase): - def test_line(self): - # Hex-encoded LineString (0 0, 1 1) - try: - wkb = bytes.fromhex("01020000000200000000000000000000000000000000000000000000000000f03f000000000000f03f") - except: - wkb = "01020000000200000000000000000000000000000000000000000000000000f03f000000000000f03f".decode('hex') - geom = geometry_wkb(wkb) - self.assertEqual(geom['type'], "LineString") - self.assertEqual(geom['coordinates'], [(0.0, 0.0), (1.0, 1.0)]) - -class PolygonTest(unittest.TestCase): - def test_polygon(self): - # 1 x 1 box (0, 0, 1, 1) - try: - wkb = bytes.fromhex("01030000000100000005000000000000000000f03f0000000000000000000000000000f03f000000000000f03f0000000000000000000000000000f03f00000000000000000000000000000000000000000000f03f0000000000000000") - except: - wkb = "01030000000100000005000000000000000000f03f0000000000000000000000000000f03f000000000000f03f0000000000000000000000000000f03f00000000000000000000000000000000000000000000f03f0000000000000000".decode('hex') - geom = geometry_wkb(wkb) - self.assertEqual(geom['type'], "Polygon") - self.assertEqual(len(geom['coordinates']), 1) - self.assertEqual(len(geom['coordinates'][0]), 5) - x, y = zip(*geom['coordinates'][0]) - self.assertEqual(min(x), 0.0) - self.assertEqual(min(y), 0.0) - self.assertEqual(max(x), 1.0) - self.assertEqual(max(y), 1.0) - -class MultiPointTest(unittest.TestCase): - def test_multipoint(self): - try: - wkb = bytes.fromhex("0104000000020000000101000000000000000000000000000000000000000101000000000000000000f03f000000000000f03f") - except: - wkb = "0104000000020000000101000000000000000000000000000000000000000101000000000000000000f03f000000000000f03f".decode('hex') - geom = geometry_wkb(wkb) - self.assertEqual(geom['type'], "MultiPoint") - self.assertEqual(geom['coordinates'], [(0.0, 0.0), (1.0, 1.0)]) - -class MultiLineStringTest(unittest.TestCase): - def test_multilinestring(self): - # Hex-encoded LineString (0 0, 1 1) - try: - wkb = bytes.fromhex("01050000000100000001020000000200000000000000000000000000000000000000000000000000f03f000000000000f03f") - except: - wkb = "01050000000100000001020000000200000000000000000000000000000000000000000000000000f03f000000000000f03f".decode('hex') - geom = geometry_wkb(wkb) - self.assertEqual(geom['type'], "MultiLineString") - self.assertEqual(len(geom['coordinates']), 1) - self.assertEqual(len(geom['coordinates'][0]), 2) - self.assertEqual(geom['coordinates'][0], [(0.0, 0.0), (1.0, 1.0)]) - -class MultiPolygonTest(unittest.TestCase): - def test_multipolygon(self): - # [1 x 1 box (0, 0, 1, 1)] - try: - wkb = bytes.fromhex("01060000000100000001030000000100000005000000000000000000f03f0000000000000000000000000000f03f000000000000f03f0000000000000000000000000000f03f00000000000000000000000000000000000000000000f03f0000000000000000") - except: - wkb = "01060000000100000001030000000100000005000000000000000000f03f0000000000000000000000000000f03f000000000000f03f0000000000000000000000000000f03f00000000000000000000000000000000000000000000f03f0000000000000000".decode('hex') - geom = geometry_wkb(wkb) - self.assertEqual(geom['type'], "MultiPolygon") - self.assertEqual(len(geom['coordinates']), 1) - self.assertEqual(len(geom['coordinates'][0]), 1) - self.assertEqual(len(geom['coordinates'][0][0]), 5) - x, y = zip(*geom['coordinates'][0][0]) - self.assertEqual(min(x), 0.0) - self.assertEqual(min(y), 0.0) - self.assertEqual(max(x), 1.0) - self.assertEqual(max(y), 1.0) +def test_ogr_builder_exceptions(): + geom = {'type': "Bogus", 'coordinates': None} + with pytest.raises(ValueError): + geometryRT(geom) + + +@pytest.mark.parametrize('geom_type, coordinates', [ + ('Point', (0.0, 0.0)), + ('LineString', [(0.0, 0.0), (1.0, 1.0)]), + ('Polygon', + [[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]), + ('MultiPoint', [(0.0, 0.0), (1.0, 1.0)]), + ('MultiLineString', [[(0.0, 0.0), (1.0, 1.0)]]), + ('MultiPolygon', + [[[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]]), +]) +def test_round_tripping(geom_type, coordinates): + result = geometryRT({'type': geom_type, 'coordinates': coordinates}) + assert result['type'] == geom_type + assert result['coordinates'] == coordinates + + +@pytest.mark.parametrize('geom_type, coordinates', [ + ('Polygon', [[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)]]), + ('MultiPolygon', [[[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)]]]), +]) +def test_implicitly_closed_round_tripping(geom_type, coordinates): + result = geometryRT({'type': geom_type, 'coordinates': coordinates}) + assert result['type'] == geom_type + result_coordinates = result['coordinates'] + while not isinstance(coordinates[0], tuple): + result_coordinates = result_coordinates[0] + coordinates = coordinates[0] + assert result_coordinates[:-1] == coordinates + + +def test_geometry_collection_round_trip(): + geom = { + 'type': "GeometryCollection", + 'geometries': [ + {'type': "Point", 'coordinates': (0.0, 0.0)}, { + 'type': "LineString", + 'coordinates': [(0.0, 0.0), (1.0, 1.0)]}]} + + result = geometryRT(geom) + assert len(result['geometries']) == 2 + assert [g['type'] for g in result['geometries']] == ['Point', 'LineString'] + + +def test_point_wkb(): + # Hex-encoded Point (0 0) + wkb = "010100000000000000000000000000000000000000" + geom = geometry_wkb(wkb) + assert geom['type'] == "Point" + assert geom['coordinates'] == (0.0, 0.0) + + +def test_line_wkb(): + # Hex-encoded LineString (0 0, 1 1) + wkb = ("01020000000200000000000000000000000000000000000000000000000000f03f" + "000000000000f03f") + geom = geometry_wkb(wkb) + assert geom['type'] == "LineString" + assert geom['coordinates'] == [(0.0, 0.0), (1.0, 1.0)] + + +def test_polygon_wkb(): + # 1 x 1 box (0, 0, 1, 1) + wkb = ("01030000000100000005000000000000000000f03f000000000000000000000000" + "0000f03f000000000000f03f0000000000000000000000000000f03f0000000000" + "0000000000000000000000000000000000f03f0000000000000000") + geom = geometry_wkb(wkb) + assert geom['type'], "Polygon" + assert len(geom['coordinates']) == 1 + assert len(geom['coordinates'][0]) == 5 + x, y = zip(*geom['coordinates'][0]) + assert min(x) == 0.0 + assert min(y) == 0.0 + assert max(x) == 1.0 + assert max(y) == 1.0 + + +def test_multipoint_wkb(): + wkb = ("010400000002000000010100000000000000000000000000000000000000010100" + "0000000000000000f03f000000000000f03f") + geom = geometry_wkb(wkb) + assert geom['type'] == "MultiPoint" + assert geom['coordinates'] == [(0.0, 0.0), (1.0, 1.0)] + + +def test_multilinestring_wkb(): + # Hex-encoded LineString (0 0, 1 1) + wkb = ("010500000001000000010200000002000000000000000000000000000000000000" + "00000000000000f03f000000000000f03f") + geom = geometry_wkb(wkb) + assert geom['type'] == "MultiLineString" + assert len(geom['coordinates']) == 1 + assert len(geom['coordinates'][0]) == 2 + assert geom['coordinates'][0] == [(0.0, 0.0), (1.0, 1.0)] + + +def test_multipolygon_wkb(): + # [1 x 1 box (0, 0, 1, 1)] + wkb = ("01060000000100000001030000000100000005000000000000000000f03f000000" + "0000000000000000000000f03f000000000000f03f000000000000000000000000" + "0000f03f00000000000000000000000000000000000000000000f03f0000000000" + "000000") + geom = geometry_wkb(wkb) + assert geom['type'] == "MultiPolygon" + assert len(geom['coordinates']) == 1 + assert len(geom['coordinates'][0]) == 1 + assert len(geom['coordinates'][0][0]) == 5 + x, y = zip(*geom['coordinates'][0][0]) + assert min(x) == 0.0 + assert min(y) == 0.0 + assert max(x) == 1.0 + assert max(y) == 1.0 diff -Nru fiona-1.7.10/tests/test_geopackage.py fiona-1.8.6/tests/test_geopackage.py --- fiona-1.7.10/tests/test_geopackage.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_geopackage.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,86 +1,95 @@ - -import logging import os -import os.path -import shutil -import sys -import tempfile -import unittest - import pytest - import fiona -from fiona.collection import supported_drivers -from fiona.errors import FionaValueError, DriverError, SchemaError, CRSError -from fiona.ogrext import calc_gdal_version_num, get_gdal_version_num - - -logging.basicConfig(stream=sys.stderr, level=logging.INFO) - - -class ReadingTest(unittest.TestCase): - - def setUp(self): - pass - - def tearDown(self): - pass - - @pytest.mark.skipif(not os.path.exists('tests/data/coutwildrnp.gpkg'), - reason="Requires geopackage fixture") - def test_gpkg(self): - if get_gdal_version_num() < calc_gdal_version_num(1, 11, 0): - self.assertRaises(DriverError, fiona.open, 'tests/data/coutwildrnp.gpkg', 'r', driver="GPKG") - else: - with fiona.open('tests/data/coutwildrnp.gpkg', 'r', driver="GPKG") as c: - self.assertEquals(len(c), 48) - - -class WritingTest(unittest.TestCase): - - def setUp(self): - self.tempdir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.tempdir) - - @pytest.mark.skipif(not os.path.exists('tests/data/coutwildrnp.gpkg'), - reason="Requires geopackage fixture") - def test_gpkg(self): - schema = {'geometry': 'Point', - 'properties': [('title', 'str')]} - crs = { - 'a': 6370997, - 'lon_0': -100, - 'y_0': 0, - 'no_defs': True, - 'proj': 'laea', - 'x_0': 0, - 'units': 'm', - 'b': 6370997, - 'lat_0': 45} - - path = os.path.join(self.tempdir, 'foo.gpkg') - - if get_gdal_version_num() < calc_gdal_version_num(1, 11, 0): - self.assertRaises(DriverError, - fiona.open, - path, - 'w', - driver='GPKG', - schema=schema, - crs=crs) - else: - with fiona.open(path, 'w', - driver='GPKG', - schema=schema, - crs=crs) as c: - c.writerecords([{ - 'geometry': {'type': 'Point', 'coordinates': [0.0, 0.0]}, - 'properties': {'title': 'One'}}]) - c.writerecords([{ - 'geometry': {'type': 'Point', 'coordinates': [2.0, 3.0]}, - 'properties': {'title': 'Two'}}]) - with fiona.open(path) as c: - self.assertEquals(c.schema['geometry'], 'Point') - self.assertEquals(len(c), 2) +from .conftest import requires_gpkg + +example_schema = { + 'geometry': 'Point', + 'properties': [('title', 'str')], +} + +example_crs = { + 'a': 6370997, + 'lon_0': -100, + 'y_0': 0, + 'no_defs': True, + 'proj': 'laea', + 'x_0': 0, + 'units': 'm', + 'b': 6370997, + 'lat_0': 45, +} + +example_features = [ + { + "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, + "properties": {"title": "One"}, + }, + { + "geometry": {"type": "Point", "coordinates": [1.0, 2.0]}, + "properties": {"title": "Two"}, + }, + { + "geometry": {"type": "Point", "coordinates": [3.0, 4.0]}, + "properties": {"title": "Three"}, + }, +] + +@requires_gpkg +def test_read_gpkg(path_coutwildrnp_gpkg): + """ + Implicitly tests writing gpkg as the fixture will create the data source on + first request + """ + with fiona.open(path_coutwildrnp_gpkg, "r") as src: + assert len(src) == 67 + feature = next(iter(src)) + assert feature["geometry"]["type"] == "Polygon" + assert feature["properties"]["NAME"] == "Mount Naomi Wilderness" + +@requires_gpkg +def test_write_gpkg(tmpdir): + path = str(tmpdir.join('foo.gpkg')) + + with fiona.open(path, 'w', + driver='GPKG', + schema=example_schema, + crs=example_crs) as dst: + dst.writerecords(example_features) + + with fiona.open(path) as src: + assert src.schema['geometry'] == 'Point' + assert src.crs == example_crs + assert len(src) == 3 + +@requires_gpkg +def test_write_multilayer_gpkg(tmpdir): + """ + Test that writing a second layer to an existing geopackage doesn't remove + and existing layer for the dataset. + """ + path = str(tmpdir.join('foo.gpkg')) + + with fiona.open(path, 'w', + driver='GPKG', + schema=example_schema, + layer="layer1", + crs=example_crs) as dst: + dst.writerecords(example_features[0:2]) + + with fiona.open(path, 'w', + driver='GPKG', + schema=example_schema, + layer="layer2", + crs=example_crs) as dst: + dst.writerecords(example_features[2:]) + + with fiona.open(path, layer="layer1") as src: + assert src.schema['geometry'] == 'Point' + assert src.crs == example_crs + assert len(src) == 2 + + with fiona.open(path, layer="layer2") as src: + assert src.schema['geometry'] == 'Point' + assert src.crs == example_crs + assert len(src) == 1 diff -Nru fiona-1.7.10/tests/test_integration.py fiona-1.8.6/tests/test_integration.py --- fiona-1.7.10/tests/test_integration.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_integration.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,59 +1,43 @@ """Unittests to verify Fiona is functioning properly with other software.""" -import collections -import os -import shutil -import tempfile -import unittest - import six import fiona -class TestCRSNonDict(unittest.TestCase): +def test_dict_subclass(tmpdir): + """Rasterio now has a `CRS()` class that subclasses + `collections.UserDict()`. Make sure we can receive it. + + `UserDict()` is a good class to test against because in Python 2 it is + not a subclass of `collections.Mapping()`, so it provides an edge case. + """ + + class CRS(six.moves.UserDict): + pass + + outfile = str(tmpdir.join('test_UserDict.geojson')) + + profile = { + 'crs': CRS(init='EPSG:4326'), + 'driver': 'GeoJSON', + 'schema': { + 'geometry': 'Point', + 'properties': {} + } + } - @classmethod - def setUpClass(self): - self.tempdir = tempfile.mkdtemp() - - @classmethod - def tearDownClass(self): - shutil.rmtree(self.tempdir) - - def test_dict_subclass(self): - """Rasterio now has a `CRS()` class that subclasses - `collections.UserDict()`. Make sure we can receive it. - - `UserDict()` is a good class to test against because in Python 2 it is - not a subclass of `collections.Mapping()`, so it provides an edge case. - """ - - class CRS(six.moves.UserDict): - pass - - outfile = os.path.join(self.tempdir, 'test_UserDict.geojson') - - profile = { - 'crs': CRS(init='EPSG:4326'), - 'driver': 'GeoJSON', - 'schema': { - 'geometry': 'Point', - 'properties': {} + with fiona.open(outfile, 'w', **profile) as dst: + dst.write({ + 'type': 'Feature', + 'properties': {}, + 'geometry': { + 'type': 'Point', + 'coordinates': (10, -10) } - } + }) - with fiona.open(outfile, 'w', **profile) as dst: - dst.write({ - 'type': 'Feature', - 'properties': {}, - 'geometry': { - 'type': 'Point', - 'coordinates': (10, -10) - } - }) - - with fiona.open(outfile) as src: - assert len(src) == 1 - assert src.crs == {'init': 'epsg:4326'} + with fiona.open(outfile) as src: + assert len(src) == 1 + assert src.crs == {'init': 'epsg:4326'} diff -Nru fiona-1.7.10/tests/test_layer.py fiona-1.8.6/tests/test_layer.py --- fiona-1.7.10/tests/test_layer.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_layer.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,77 +1,70 @@ -import logging -import os -import shutil -import sys -import tempfile -import unittest +import pytest import fiona +from .test_collection import TestReading -logging.basicConfig(stream=sys.stderr, level=logging.INFO) -from .test_collection import ReadingTest - -def test_index_selection(): - with fiona.open('tests/data/coutwildrnp.shp', 'r', layer=0) as c: +def test_index_selection(path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp, 'r', layer=0) as c: assert len(c) == 67 -class FileReadingTest(ReadingTest): - - def setUp(self): - self.c = fiona.open('tests/data/coutwildrnp.shp', 'r', layer='coutwildrnp') - - def tearDown(self): + +class TestFileReading(TestReading): + @pytest.fixture(autouse=True) + def shapefile(self, path_coutwildrnp_shp): + self.c = fiona.open(path_coutwildrnp_shp, 'r', layer='coutwildrnp') + yield self.c.close() - def test_open_repr(self): - self.assertEqual( - repr(self.c), - ("" % hex(id(self.c)))) + def test_open_repr(self, path_coutwildrnp_shp): + assert ( + repr(self.c) == + ("".format(path=path_coutwildrnp_shp, id=hex(id(self.c))))) - def test_closed_repr(self): + def test_closed_repr(self, path_coutwildrnp_shp): self.c.close() - self.assertEqual( - repr(self.c), - ("" % hex(id(self.c)))) + assert ( + repr(self.c) == + ("".format(path=path_coutwildrnp_shp, id=hex(id(self.c))))) def test_name(self): - self.assertEqual(self.c.name, 'coutwildrnp') + assert self.c.name == 'coutwildrnp' -class DirReadingTest(ReadingTest): - - def setUp(self): - self.c = fiona.open("tests/data", "r", layer="coutwildrnp") - - def tearDown(self): + +class TestDirReading(TestReading): + @pytest.fixture(autouse=True) + def shapefile(self, data_dir): + self.c = fiona.open(data_dir, "r", layer="coutwildrnp") + yield self.c.close() - def test_open_repr(self): - self.assertEqual( - repr(self.c), - ("" % hex(id(self.c)))) + def test_open_repr(self, data_dir): + assert ( + repr(self.c) == + ("".format(path=data_dir, id=hex(id(self.c))))) - def test_closed_repr(self): + def test_closed_repr(self, data_dir): self.c.close() - self.assertEqual( - repr(self.c), - ("" % hex(id(self.c)))) + assert ( + repr(self.c) == + ("".format(path=data_dir, id=hex(id(self.c))))) def test_name(self): - self.assertEqual(self.c.name, 'coutwildrnp') + assert self.c.name == 'coutwildrnp' + + def test_path(self, data_dir): + assert self.c.path == data_dir - def test_path(self): - self.assertEqual(self.c.path, "tests/data") -class InvalidLayerTest(unittest.TestCase): +def test_invalid_layer(path_coutwildrnp_shp): + with pytest.raises(ValueError): + fiona.open(path_coutwildrnp_shp, layer="foo") - def test_invalid(self): - self.assertRaises(ValueError, fiona.open, ("tests/data/coutwildrnp.shp"), layer="foo") - def test_write_numeric_layer(self): - self.assertRaises(ValueError, fiona.open, - (os.path.join(tempfile.gettempdir(), "test-no-iter.shp")), - mode='w', layer=0) +def test_write_invalid_numeric_layer(path_coutwildrnp_shp, tmpdir): + with pytest.raises(ValueError): + fiona.open(str(tmpdir.join("test-no-iter.shp")), mode='w', layer=0) diff -Nru fiona-1.7.10/tests/test_listing.py fiona-1.8.6/tests/test_listing.py --- fiona-1.7.10/tests/test_listing.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_listing.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,51 +1,61 @@ +"""Test listing a datasource's layers.""" + import logging -import os -import shutil import sys -import unittest +import os + +import pytest import fiona import fiona.ogrext +from fiona.errors import DriverError + + +def test_single_file_private(path_coutwildrnp_shp): + with fiona.Env(): + assert fiona.ogrext._listlayers( + path_coutwildrnp_shp) == ['coutwildrnp'] + + +def test_single_file(path_coutwildrnp_shp): + assert fiona.listlayers(path_coutwildrnp_shp) == ['coutwildrnp'] + + +def test_directory(data_dir): + assert sorted(fiona.listlayers(data_dir)) == ['coutwildrnp', 'gre', 'test_tin'] + -FIXME_WINDOWS = sys.platform.startswith("win") +def test_directory_trailing_slash(data_dir): + assert sorted(fiona.listlayers(data_dir)) == ['coutwildrnp', 'gre', 'test_tin'] -logging.basicConfig(stream=sys.stderr, level=logging.INFO) -def test_single_file_private(): - with fiona.drivers(): - assert fiona.ogrext._listlayers('tests/data/coutwildrnp.shp') == ['coutwildrnp'] +def test_zip_path(path_coutwildrnp_zip): + assert fiona.listlayers( + 'zip://{}'.format(path_coutwildrnp_zip)) == ['coutwildrnp'] -def test_single_file(): - assert fiona.listlayers('tests/data/coutwildrnp.shp') == ['coutwildrnp'] -def test_directory(): - assert fiona.listlayers('tests/data') == ['coutwildrnp'] +def test_zip_path_arch(path_coutwildrnp_zip): + vfs = 'zip://{}'.format(path_coutwildrnp_zip) + assert fiona.listlayers('/coutwildrnp.shp', vfs=vfs) == ['coutwildrnp'] -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. ValueError raised. Please look into why this test isn't working.") -def test_directory_trailing_slash(): - assert fiona.listlayers('tests/data/') == ['coutwildrnp'] -def test_zip_path(): - assert fiona.listlayers('zip://tests/data/coutwildrnp.zip') == ['coutwildrnp'] +def test_list_not_existing(data_dir): + """Test underlying Cython function correctly raises""" + path = os.path.join(data_dir, "does_not_exist.geojson") + with pytest.raises(DriverError): + fiona.ogrext._listlayers(path) -def test_zip_path_arch(): - assert fiona.listlayers('/coutwildrnp.shp', vfs='zip://tests/data/coutwildrnp.zip') == ['coutwildrnp'] -class ListLayersArgsTest(unittest.TestCase): - def test_path(self): - self.assertRaises(TypeError, fiona.listlayers, (1)) - def test_vfs(self): - self.assertRaises(TypeError, fiona.listlayers, ("/"), vfs=1) - def test_path_ioerror(self): - self.assertRaises(IOError, fiona.listlayers, ("foobar")) +def test_invalid_path(): + with pytest.raises(TypeError): + fiona.listlayers(1) -def test_parse_path(): - assert fiona.parse_paths("zip://foo.zip") == ("foo.zip", "zip", None) -def test_parse_path2(): - assert fiona.parse_paths("foo") == ("foo", None, None) +def test_invalid_vfs(): + with pytest.raises(TypeError): + fiona.listlayers("/", vfs=1) -def test_parse_vfs(): - assert fiona.parse_paths("/", "zip://foo.zip") == ("/", "zip", "foo.zip") +def test_invalid_path_ioerror(): + with pytest.raises(DriverError): + fiona.listlayers("foobar") diff -Nru fiona-1.7.10/tests/test_logutils.py fiona-1.8.6/tests/test_logutils.py --- fiona-1.7.10/tests/test_logutils.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_logutils.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,41 @@ +"""Tests of skipped field log message filtering""" + +import logging +import os + +import fiona +from fiona.logutils import LogFiltering, FieldSkipLogFilter + + +def test_filtering(caplog): + """Test that ordinary log messages pass""" + logger = logging.getLogger() + with LogFiltering(logger, FieldSkipLogFilter()): + logger.warning("Attention!") + logger.warning("Skipping field 1") + logger.warning("Skipping field 2") + logger.warning("Danger!") + logger.warning("Skipping field 1") + assert len(caplog.records) == 4 + assert caplog.records[0].getMessage() == "Attention!" + assert caplog.records[1].getMessage() == "Skipping field 1" + assert caplog.records[2].getMessage() == "Skipping field 2" + assert caplog.records[3].getMessage() == "Danger!" + + +def test_skipping_slice(caplog, data_dir): + """Collection filters out all but one warning message""" + with fiona.open(os.path.join(data_dir, "issue627.geojson")) as src: + results = list(src) + assert len(results) == 3 + assert not any(['skip_me' in f['properties'] for f in results]) + assert len([rec for rec in caplog.records if rec.getMessage().startswith('Skipping')]) == 1 + + +def test_skipping_list(caplog, data_dir): + """Collection filters out all but one warning message""" + with fiona.open(os.path.join(data_dir, "issue627.geojson")) as src: + results = list(src) + assert len(results) == 3 + assert not any(['skip_me' in f['properties'] for f in results]) + assert len([rec for rec in caplog.records if rec.getMessage().startswith('Skipping')]) == 1 diff -Nru fiona-1.7.10/tests/test_memoryfile.py fiona-1.8.6/tests/test_memoryfile.py --- fiona-1.7.10/tests/test_memoryfile.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_memoryfile.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,77 @@ +"""Tests of MemoryFile and ZippedMemoryFile""" + +from io import BytesIO +import pytest +import uuid + +import fiona +from fiona.io import MemoryFile, ZipMemoryFile + + +@pytest.fixture(scope='session') +def profile_first_coutwildrnp_shp(path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp) as col: + return col.profile, next(iter(col)) + + +def test_memoryfile(path_coutwildrnp_json): + """In-memory GeoJSON file can be read""" + with open(path_coutwildrnp_json, 'rb') as f: + data = f.read() + with MemoryFile(data) as memfile: + with memfile.open() as collection: + assert len(collection) == 67 + + +def test_zip_memoryfile(bytes_coutwildrnp_zip): + """In-memory zipped Shapefile can be read""" + with ZipMemoryFile(bytes_coutwildrnp_zip) as memfile: + with memfile.open('coutwildrnp.shp') as collection: + assert len(collection) == 67 + + +def test_write_memoryfile(profile_first_coutwildrnp_shp): + """In-memory Shapefile can be written""" + profile, first = profile_first_coutwildrnp_shp + profile['driver'] = 'GeoJSON' + with MemoryFile() as memfile: + with memfile.open(**profile) as col: + col.write(first) + memfile.seek(0) + data = memfile.read() + + with MemoryFile(data) as memfile: + with memfile.open() as col: + assert len(col) == 1 + + +def test_memoryfile_bytesio(path_coutwildrnp_json): + """In-memory GeoJSON file can be read""" + with open(path_coutwildrnp_json, 'rb') as f: + data = f.read() + + with fiona.open(BytesIO(data)) as collection: + assert len(collection) == 67 + + +def test_memoryfile_fileobj(path_coutwildrnp_json): + """In-memory GeoJSON file can be read""" + with open(path_coutwildrnp_json, 'rb') as f: + + with fiona.open(f) as collection: + assert len(collection) == 67 + + +def test_write_memoryfile_(profile_first_coutwildrnp_shp): + """In-memory Shapefile can be written""" + profile, first = profile_first_coutwildrnp_shp + profile['driver'] = 'GeoJSON' + with BytesIO() as fout: + with fiona.open(fout, 'w', **profile) as col: + col.write(first) + fout.seek(0) + data = fout.read() + + with MemoryFile(data) as memfile: + with memfile.open() as col: + assert len(col) == 1 diff -Nru fiona-1.7.10/tests/test_multiconxn.py fiona-1.8.6/tests/test_multiconxn.py --- fiona-1.7.10/tests/test_multiconxn.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_multiconxn.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,52 +1,41 @@ -import logging -import os -import shutil -import sys -import tempfile -import unittest +import pytest import fiona from fiona.compat import OrderedDict -logging.basicConfig(stream=sys.stderr, level=logging.INFO) -FIXME_WINDOWS = sys.platform.startswith("win") - -class ReadAccess(unittest.TestCase): +class TestReadAccess(object): # To check that we'll be able to get multiple 'r' connections to layers # in a single file. - - def setUp(self): - self.c = fiona.open("tests/data/coutwildrnp.shp", "r", layer="coutwildrnp") - - def tearDown(self): - self.c.close() - def test_meta(self): - with fiona.open("tests/data/coutwildrnp.shp", "r", layer="coutwildrnp") as c2: - self.assertEqual(len(self.c), len(c2)) - self.assertEqual(sorted(self.c.schema.items()), sorted(c2.schema.items())) + def test_meta(self, path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp, "r", layer="coutwildrnp") as c: + with fiona.open(path_coutwildrnp_shp, "r", + layer="coutwildrnp") as c2: + assert len(c) == len(c2) + assert sorted(c.schema.items()) == sorted(c2.schema.items()) + + def test_feat(self, path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp, "r", layer="coutwildrnp") as c: + f1 = next(iter(c)) + with fiona.open(path_coutwildrnp_shp, "r", + layer="coutwildrnp") as c2: + f2 = next(iter(c2)) + assert f1 == f2 - def test_meta(self): - f1 = next(iter(self.c)) - with fiona.open("tests/data/coutwildrnp.shp", "r", layer="coutwildrnp") as c2: - f2 = next(iter(c2)) - self.assertEqual(f1, f2) - -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. These tests raise PermissionErrors on Windows in Python 3.x (which doesn't exist in Python 2.7). Please look into why this test isn't working.") -class ReadWriteAccess(unittest.TestCase): + +class TestReadWriteAccess(object): # To check that we'll be able to read from a file that we're # writing to. - - def setUp(self): - self.tempdir = tempfile.mkdtemp() + + @pytest.fixture(autouse=True) + def multi_write_test_shp(self, tmpdir): + self.shapefile_path = str(tmpdir.join("multi_write_test.shp")) self.c = fiona.open( - os.path.join(self.tempdir, "multi_write_test.shp"), - "w", + self.shapefile_path, "w", driver="ESRI Shapefile", schema={ - 'geometry': 'Point', + 'geometry': 'Point', 'properties': [('title', 'str:80'), ('date', 'date')]}, crs={'init': "epsg:4326", 'no_defs': True}, encoding='utf-8') @@ -56,46 +45,41 @@ 'properties': OrderedDict([('title', 'point one'), ('date', '2012-01-29')])} self.c.writerecords([self.f]) self.c.flush() - - def tearDown(self): + yield self.c.close() - shutil.rmtree(self.tempdir) def test_meta(self): - c2 = fiona.open(os.path.join(self.tempdir, "multi_write_test.shp"), "r") - self.assertEqual(len(self.c), len(c2)) - self.assertEqual(sorted(self.c.schema.items()), sorted(c2.schema.items())) + c2 = fiona.open(self.shapefile_path, "r") + assert len(self.c) == len(c2) + assert sorted(self.c.schema.items()) == sorted(c2.schema.items()) + c2.close() def test_read(self): - c2 = fiona.open(os.path.join(self.tempdir, "multi_write_test.shp"), "r") + c2 = fiona.open(self.shapefile_path, "r") f2 = next(iter(c2)) del f2['id'] - self.assertEqual(self.f, f2) + assert self.f == f2 + c2.close() def test_read_after_close(self): - c2 = fiona.open(os.path.join(self.tempdir, "multi_write_test.shp"), "r") + c2 = fiona.open(self.shapefile_path, "r") self.c.close() f2 = next(iter(c2)) del f2['id'] - self.assertEqual(self.f, f2) + assert self.f == f2 + c2.close() -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. These tests raise PermissionErrors on Windows in Python 3.x (which doesn't exist in Python 2.7). Please look into why this test isn't working.") -class LayerCreation(unittest.TestCase): - - def setUp(self): - self.tempdir = tempfile.mkdtemp() - self.dir = os.path.join(self.tempdir, 'layer_creation') - if os.path.exists(self.dir): - shutil.rmtree(self.dir) - os.mkdir(self.dir) + +class TestLayerCreation(object): + @pytest.fixture(autouse=True) + def layer_creation_shp(self, tmpdir): + self.dir = tmpdir.mkdir('layer_creation') self.c = fiona.open( - self.dir, - 'w', + str(self.dir), 'w', layer='write_test', driver='ESRI Shapefile', schema={ - 'geometry': 'Point', + 'geometry': 'Point', 'properties': [('title', 'str:80'), ('date', 'date')]}, crs={'init': "epsg:4326", 'no_defs': True}, encoding='utf-8') @@ -105,25 +89,26 @@ 'properties': OrderedDict([('title', 'point one'), ('date', '2012-01-29')])} self.c.writerecords([self.f]) self.c.flush() - - def tearDown(self): + yield self.c.close() - shutil.rmtree(self.tempdir) def test_meta(self): - c2 = fiona.open(os.path.join(self.dir, "write_test.shp"), "r") - self.assertEqual(len(self.c), len(c2)) - self.assertEqual(sorted(self.c.schema.items()), sorted(c2.schema.items())) + c2 = fiona.open(str(self.dir.join("write_test.shp")), "r") + assert len(self.c) == len(c2) + assert sorted(self.c.schema.items()) == sorted(c2.schema.items()) + c2.close() def test_read(self): - c2 = fiona.open(os.path.join(self.dir, "write_test.shp"), "r") + c2 = fiona.open(str(self.dir.join("write_test.shp")), "r") f2 = next(iter(c2)) del f2['id'] - self.assertEqual(self.f, f2) + assert self.f == f2 + c2.close() def test_read_after_close(self): - c2 = fiona.open(os.path.join(self.dir, "write_test.shp"), "r") + c2 = fiona.open(str(self.dir.join("write_test.shp")), "r") self.c.close() f2 = next(iter(c2)) del f2['id'] - self.assertEqual(self.f, f2) + assert self.f == f2 + c2.close() diff -Nru fiona-1.7.10/tests/test_non_counting_layer.py fiona-1.8.6/tests/test_non_counting_layer.py --- fiona-1.7.10/tests/test_non_counting_layer.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_non_counting_layer.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,35 +1,38 @@ -import unittest +import pytest import fiona +from fiona.errors import FionaDeprecationWarning -GPX_FILE = 'tests/data/test_gpx.gpx' -class NonCountingLayerTest(unittest.TestCase): - def setUp(self): - self.c = fiona.open(GPX_FILE, "r", layer="track_points") - - def tearDown(self): +@pytest.mark.usefixtures('uttc_path_gpx') +class TestNonCountingLayer(object): + def setup(self): + self.c = fiona.open(self.path_gpx, "r", layer="track_points") + + def teardown(self): self.c.close() def test_len_fail(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): len(self.c) def test_list(self): features = list(self.c) - self.assertEqual(len(features), 19) + assert len(features) == 19 def test_getitem(self): - feature = self.c[2] + self.c[2] def test_fail_getitem_negative_index(self): - with self.assertRaises(IndexError): + with pytest.raises(IndexError): self.c[-1] def test_slice(self): - features = self.c[2:5] - self.assertEqual(len(features), 3) + with pytest.warns(FionaDeprecationWarning): + features = self.c[2:5] + assert len(features) == 3 def test_fail_slice_negative_index(self): - with self.assertRaises(IndexError): - self.c[2:-4] + with pytest.warns(FionaDeprecationWarning): + with pytest.raises(IndexError): + self.c[2:-4] diff -Nru fiona-1.7.10/tests/test_open.py fiona-1.8.6/tests/test_open.py --- fiona-1.7.10/tests/test_open.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_open.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,8 @@ +"""Tests of file opening""" + +import fiona + + +def test_open_shp(path_coutwildrnp_shp): + """Open a shapefile""" + assert fiona.open(path_coutwildrnp_shp) diff -Nru fiona-1.7.10/tests/test_profile.py fiona-1.8.6/tests/test_profile.py --- fiona-1.7.10/tests/test_profile.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_profile.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,20 +1,20 @@ import os -import tempfile +import re import fiona +from .conftest import WGS84PATTERN -def test_profile(): - with fiona.open('tests/data/coutwildrnp.shp') as src: - assert src.meta['crs_wkt'] == 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]' +def test_profile(path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp) as src: + assert re.match(WGS84PATTERN, src.crs_wkt) -def test_profile_creation_wkt(): - tmpdir = tempfile.mkdtemp() - outfilename = os.path.join(tmpdir, 'test.shp') - with fiona.open('tests/data/coutwildrnp.shp') as src: +def test_profile_creation_wkt(tmpdir, path_coutwildrnp_shp): + outfilename = str(tmpdir.join("test.shp")) + with fiona.open(path_coutwildrnp_shp) as src: profile = src.meta profile['crs'] = 'bogus' with fiona.open(outfilename, 'w', **profile) as dst: assert dst.crs == {'init': 'epsg:4326'} - assert dst.crs_wkt == 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]' + assert re.match(WGS84PATTERN, dst.crs_wkt) diff -Nru fiona-1.7.10/tests/test_read_drivers.py fiona-1.8.6/tests/test_read_drivers.py --- fiona-1.7.10/tests/test_read_drivers.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_read_drivers.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,26 +1,20 @@ -import logging -import sys - import pytest import fiona from fiona.errors import FionaValueError -logging.basicConfig(stream=sys.stderr, level=logging.INFO) - -def test_read_fail(): +def test_read_fail(path_coutwildrnp_shp): with pytest.raises(FionaValueError): - fiona.open('tests/data/coutwildrnp.shp', driver='GeoJSON') + fiona.open(path_coutwildrnp_shp, driver='GeoJSON') with pytest.raises(FionaValueError): - fiona.open('tests/data/coutwildrnp.shp', enabled_drivers=['GeoJSON']) + fiona.open(path_coutwildrnp_shp, enabled_drivers=['GeoJSON']) -def test_read(): - with fiona.open( - 'tests/data/coutwildrnp.shp', driver='ESRI Shapefile') as src: +def test_read(path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp, driver='ESRI Shapefile') as src: assert src.driver == 'ESRI Shapefile' with fiona.open( - 'tests/data/coutwildrnp.shp', + path_coutwildrnp_shp, enabled_drivers=['GeoJSON', 'ESRI Shapefile']) as src: assert src.driver == 'ESRI Shapefile' diff -Nru fiona-1.7.10/tests/test_remove.py fiona-1.8.6/tests/test_remove.py --- fiona-1.7.10/tests/test_remove.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_remove.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,17 +1,16 @@ import logging import sys import os +import itertools +from .conftest import requires_gpkg -import tempfile import pytest import fiona +from fiona.errors import DatasetDeleteError -logging.basicConfig(stream=sys.stderr, level=logging.INFO) - - -def create_sample_data(filename, driver): +def create_sample_data(filename, driver, **extra_meta): meta = { 'driver': driver, 'schema': { @@ -19,6 +18,7 @@ 'properties': {} } } + meta.update(extra_meta) with fiona.open(filename, 'w', **meta) as dst: dst.write({ 'geometry': { @@ -30,48 +30,88 @@ assert(os.path.exists(filename)) -def test_remove(tmpdir=None): - if tmpdir is None: - tmpdir = tempfile.mkdtemp() - filename_shp = os.path.join(tmpdir, 'test.shp') - - create_sample_data(filename_shp, driver='ESRI Shapefile') - fiona.remove(filename_shp, driver='ESRI Shapefile') - assert(not os.path.exists(filename_shp)) - - with pytest.raises(RuntimeError): - fiona.remove(filename_shp, driver='ESRI Shapefile') - -def test_remove_driver(tmpdir=None): - if tmpdir is None: - tmpdir = tempfile.mkdtemp() - filename_shp = os.path.join(tmpdir, 'test.shp') - filename_json = os.path.join(tmpdir, 'test.json') - - create_sample_data(filename_shp, driver='ESRI Shapefile') - create_sample_data(filename_json, driver='GeoJSON') - fiona.remove(filename_json, driver='GeoJSON') - assert(not os.path.exists(filename_json)) - assert(os.path.exists(filename_shp)) - -def test_remove_collection(tmpdir=None): - if tmpdir is None: - tmpdir = tempfile.mkdtemp() - filename_shp = os.path.join(tmpdir, 'test.shp') - - create_sample_data(filename_shp, driver='ESRI Shapefile') - collection = fiona.open(filename_shp, 'r') - fiona.remove(collection) - assert(not os.path.exists(filename_shp)) - -def test_remove_path_without_driver(tmpdir=None): - if tmpdir is None: - tmpdir = tempfile.mkdtemp() - filename_shp = os.path.join(tmpdir, 'test.shp') - - create_sample_data(filename_shp, driver='ESRI Shapefile') - - with pytest.raises(Exception): - fiona.remove(filename_shp) - - assert(os.path.exists(filename_shp)) +drivers = ["ESRI Shapefile", "GeoJSON"] +kinds = ["path", "collection"] +specify_drivers = [True, False] +test_data = itertools.product(drivers, kinds, specify_drivers) + + +@pytest.mark.parametrize("driver, kind, specify_driver", test_data) +def test_remove(tmpdir, kind, driver, specify_driver): + """Test various dataset removal operations""" + extension = {"ESRI Shapefile": "shp", "GeoJSON": "json"}[driver] + filename = "delete_me.{extension}".format(extension=extension) + output_filename = str(tmpdir.join(filename)) + + create_sample_data(output_filename, driver=driver) + if kind == "collection": + to_delete = fiona.open(output_filename, "r") + else: + to_delete = output_filename + + assert os.path.exists(output_filename) + if specify_driver: + fiona.remove(to_delete, driver=driver) + else: + fiona.remove(to_delete) + assert not os.path.exists(output_filename) + + +def test_remove_nonexistent(tmpdir): + """Attempting to remove a file that does not exist results in an IOError""" + filename = str(tmpdir.join("does_not_exist.shp")) + assert not os.path.exists(filename) + with pytest.raises(IOError): + fiona.remove(filename) + +@requires_gpkg +def test_remove_layer(tmpdir): + filename = str(tmpdir.join("a_filename.gpkg")) + create_sample_data(filename, "GPKG", layer="layer1") + create_sample_data(filename, "GPKG", layer="layer2") + create_sample_data(filename, "GPKG", layer="layer3") + create_sample_data(filename, "GPKG", layer="layer4") + assert fiona.listlayers(filename) == ["layer1", "layer2", "layer3", "layer4"] + + # remove by index + fiona.remove(filename, layer=2) + assert fiona.listlayers(filename) == ["layer1", "layer2", "layer4"] + + # remove by name + fiona.remove(filename, layer="layer2") + assert fiona.listlayers(filename) == ["layer1", "layer4"] + + # remove by negative index + fiona.remove(filename, layer=-1) + assert fiona.listlayers(filename) == ["layer1"] + + # invalid layer name + with pytest.raises(ValueError): + fiona.remove(filename, layer="invalid_layer_name") + + # invalid layer index + with pytest.raises(DatasetDeleteError): + fiona.remove(filename, layer=999) + + +def test_remove_layer_shapefile(tmpdir): + """Removal of layer in shapefile actually deletes the datasource""" + filename = str(tmpdir.join("a_filename.shp")) + create_sample_data(filename, "ESRI Shapefile") + fiona.remove(filename, layer=0) + assert not os.path.exists(filename) + + +def test_remove_layer_geojson(tmpdir): + """Removal of layers is not supported by GeoJSON driver + + The reason for failure is slightly different between GDAL 2.2+ and < 2.2. + With < 2.2 the datasource will fail to open in write mode (IOError), while + with 2.2+ the datasource will open but the removal operation will fail (not + supported). + """ + filename = str(tmpdir.join("a_filename.geojson")) + create_sample_data(filename, "GeoJSON") + with pytest.raises((RuntimeError, IOError)): + fiona.remove(filename, layer=0) + assert os.path.exists(filename) diff -Nru fiona-1.7.10/tests/test_revolvingdoor.py fiona-1.8.6/tests/test_revolvingdoor.py --- fiona-1.7.10/tests/test_revolvingdoor.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_revolvingdoor.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,36 +1,17 @@ # Test of opening and closing and opening -import logging -import os.path -import shutil -import subprocess -import sys -import tempfile -import unittest - import fiona -logging.basicConfig(stream=sys.stderr, level=logging.INFO) -log = logging.getLogger('fiona.tests') -class RevolvingDoorTest(unittest.TestCase): +def test_write_revolving_door(tmpdir, path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp) as src: + meta = src.meta + features = list(src) + + shpname = str(tmpdir.join('foo.shp')) - def setUp(self): - self.tempdir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.tempdir) - - def test_write_revolving_door(self): - - with fiona.open('tests/data/coutwildrnp.shp') as src: - meta = src.meta - features = list(src) - - shpname = os.path.join(self.tempdir, 'foo.shp') - - with fiona.open(shpname, 'w', **meta) as dst: - dst.writerecords(features) + with fiona.open(shpname, 'w', **meta) as dst: + dst.writerecords(features) - with fiona.open(shpname) as src: - pass + with fiona.open(shpname) as src: + pass diff -Nru fiona-1.7.10/tests/test_rfc3339.py fiona-1.8.6/tests/test_rfc3339.py --- fiona-1.7.10/tests/test_rfc3339.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_rfc3339.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,63 +1,60 @@ -# testing Fiona's RFC 3339 support, to be called by nosetests +"""Tests for Fiona's RFC 3339 support.""" + -import logging import re -import sys -import unittest + +import pytest from fiona.rfc3339 import parse_date, parse_datetime, parse_time from fiona.rfc3339 import group_accessor, pattern_date -logging.basicConfig(stream=sys.stderr, level=logging.INFO) -class DateParseTest(unittest.TestCase): +class TestDateParse(object): def test_yyyymmdd(self): - self.assertEqual( - parse_date("2012-01-29"), (2012, 1, 29, 0, 0, 0, 0.0)) + assert parse_date("2012-01-29") == (2012, 1, 29, 0, 0, 0, 0.0) def test_error(self): - self.assertRaises(ValueError, parse_date, ("xxx")) + with pytest.raises(ValueError): + parse_date("xxx") + + +class TestTimeParse(object): -class TimeParseTest(unittest.TestCase): - def test_hhmmss(self): - self.assertEqual( - parse_time("10:11:12"), (0, 0, 0, 10, 11, 12, 0.0)) + assert parse_time("10:11:12") == (0, 0, 0, 10, 11, 12, 0.0) def test_hhmm(self): - self.assertEqual( - parse_time("10:11"), (0, 0, 0, 10, 11, 0, 0.0)) + assert parse_time("10:11") == (0, 0, 0, 10, 11, 0, 0.0) def test_hhmmssff(self): - self.assertEqual( - parse_time("10:11:12.42"), - (0, 0, 0, 10, 11, 12, 0.42*1000000.0)) + assert parse_time("10:11:12.42") == (0, 0, 0, 10, 11, 12, 0.42*1000000) def test_hhmmssz(self): - self.assertEqual( - parse_time("10:11:12Z"), (0, 0, 0, 10, 11, 12, 0.0)) + assert parse_time("10:11:12Z") == (0, 0, 0, 10, 11, 12, 0.0) def test_hhmmssoff(self): - self.assertEqual( - parse_time("10:11:12-01:00"), (0, 0, 0, 10, 11, 12, 0.0)) + assert parse_time("10:11:12-01:00") == (0, 0, 0, 10, 11, 12, 0.0) def test_error(self): - self.assertRaises(ValueError, parse_time, ("xxx")) + with pytest.raises(ValueError): + parse_time("xxx") + + +class TestDatetimeParse(object): -class DatetimeParseTest(unittest.TestCase): - def test_yyyymmdd(self): - self.assertEqual( - parse_datetime("2012-01-29T10:11:12"), + assert ( + parse_datetime("2012-01-29T10:11:12") == (2012, 1, 29, 10, 11, 12, 0.0)) def test_error(self): - self.assertRaises(ValueError, parse_datetime, ("xxx")) + with pytest.raises(ValueError): + parse_datetime("xxx") + def test_group_accessor_indexerror(): match = re.search(pattern_date, '2012-01-29') g = group_accessor(match) assert g.group(-1) == 0 assert g.group(6) == 0 - diff -Nru fiona-1.7.10/tests/test_rfc64_tin.py fiona-1.8.6/tests/test_rfc64_tin.py --- fiona-1.7.10/tests/test_rfc64_tin.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_rfc64_tin.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,49 @@ +"""Tests of features related to GDAL RFC 64 + +See https://trac.osgeo.org/gdal/wiki/rfc64_triangle_polyhedralsurface_tin. +""" + +import fiona + +from .conftest import requires_gdal22 + + +def test_tin_shp(path_test_tin_shp): + """Convert TIN to MultiPolygon""" + with fiona.open(path_test_tin_shp) as col: + assert col.schema['geometry'] == 'Unknown' + features = list(col) + assert len(features) == 1 + assert features[0]['geometry']['type'] == 'MultiPolygon' + assert features[0]['geometry']['coordinates'] == [[[(0.0, 0.0, 0.0), + (0.0, 0.0, 1.0), + (0.0, 1.0, 0.0), + (0.0, 0.0, 0.0)]], + [[(0.0, 0.0, 0.0), + (0.0, 1.0, 0.0), + (1.0, 1.0, 0.0), + (0.0, 0.0, 0.0)]]] + + +@requires_gdal22 +def test_tin_csv(path_test_tin_csv): + """Convert TIN to MultiPolygon and Triangle to Polygon""" + with fiona.open(path_test_tin_csv) as col: + assert col.schema['geometry'] == 'Unknown' + features = list(col) + assert len(features) == 2 + assert features[0]['geometry']['type'] == 'MultiPolygon' + assert features[0]['geometry']['coordinates'] == [[[(0.0, 0.0, 0.0), + (0.0, 0.0, 1.0), + (0.0, 1.0, 0.0), + (0.0, 0.0, 0.0)]], + [[(0.0, 0.0, 0.0), + (0.0, 1.0, 0.0), + (1.0, 1.0, 0.0), + (0.0, 0.0, 0.0)]]] + + assert features[1]['geometry']['type'] == 'Polygon' + assert features[1]['geometry']['coordinates'] == [[(0.0, 0.0, 0.0), + (0.0, 1.0, 0.0), + (1.0, 1.0, 0.0), + (0.0, 0.0, 0.0)]] diff -Nru fiona-1.7.10/tests/test_schema_geom.py fiona-1.8.6/tests/test_schema_geom.py --- fiona-1.7.10/tests/test_schema_geom.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_schema_geom.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,169 @@ +""" +Tests related to the validation of feature geometry types against the schema. +""" + +import fiona +import pytest + +from fiona.errors import GeometryTypeValidationError, UnsupportedGeometryTypeError + +@pytest.fixture +def filename_shp(tmpdir): + return str(tmpdir.join("example.shp")) + +@pytest.fixture +def filename_json(tmpdir): + return str(tmpdir.join("example.json")) + +properties = {"name": "str"} +PROPERTIES = {"name": "example"} +POINT = {"type": "Point", "coordinates": (1.0, 2.0)} +LINESTRING = {"type": "LineString", "coordinates": [(1.0, 2.0), (3.0, 4.0)]} +POLYGON = {"type": "Polygon", "coordinates": [[(0.0, 0.0), (1.0, 1.0), (0.0, 0.1)]]} +MULTILINESTRING = {"type": "MultiLineString", "coordinates": [[(0.0, 0.0), (1.0, 1.0)], [(1.0, 2.0), (3.0, 4.0)]]} +GEOMETRYCOLLECTION = {"type": "GeometryCollection", "geometries": [POINT, LINESTRING, POLYGON]} +INVALID = {"type": "InvalidType", "coordinates": (42.0, 43.0)} +POINT_3D = {"type": "Point", "coordinates": (1.0, 2.0, 3.0)} + +def write_point(collection): + feature = {"geometry": POINT, "properties": PROPERTIES} + collection.write(feature) + +def write_linestring(collection): + feature = {"geometry": LINESTRING, "properties": PROPERTIES} + collection.write(feature) + +def write_polygon(collection): + feature = {"geometry": POLYGON, "properties": PROPERTIES} + collection.write(feature) + +def write_invalid(collection): + feature = {"geometry": INVALID, "properties": PROPERTIES} + collection.write(feature) + +def write_multilinestring(collection): + feature = {"geometry": MULTILINESTRING, "properties": PROPERTIES} + collection.write(feature) + +def write_point_3d(collection): + feature = {"geometry": POINT_3D, "properties": PROPERTIES} + collection.write(feature) + +def write_geometrycollection(collection): + feature = {"geometry": GEOMETRYCOLLECTION, "properties": PROPERTIES} + collection.write(feature) + +def write_null(collection): + feature = {"geometry": None, "properties": PROPERTIES} + collection.write(feature) + +def test_point(filename_shp): + schema = {"geometry": "Point", "properties": properties} + with fiona.open(filename_shp, "w", driver="ESRI Shapefile", schema=schema) as collection: + write_point(collection) + write_point_3d(collection) + write_null(collection) + + with pytest.raises(GeometryTypeValidationError): + write_linestring(collection) + + with pytest.raises(GeometryTypeValidationError): + write_invalid(collection) + +def test_multi_type(filename_json): + schema = {"geometry": ("Point", "LineString"), "properties": properties} + with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: + write_point(collection) + write_linestring(collection) + write_null(collection) + + with pytest.raises(GeometryTypeValidationError): + write_polygon(collection) + + with pytest.raises(GeometryTypeValidationError): + write_invalid(collection) + +def test_unknown(filename_json): + """Reading and writing layers with "Unknown" (i.e. any) geometry type""" + # write a layer with a mixture of geometry types + schema = {"geometry": "Unknown", "properties": properties} + with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: + write_point(collection) + write_linestring(collection) + write_polygon(collection) + write_geometrycollection(collection) + write_null(collection) + + with pytest.raises(GeometryTypeValidationError): + write_invalid(collection) + + # copy the features to a new layer, reusing the layers metadata + with fiona.open(filename_json, "r", driver="GeoJSON") as src: + filename_dst = filename_json.replace(".json", "_v2.json") + assert src.schema["geometry"] == "Unknown" + with fiona.open(filename_dst, "w", **src.meta) as dst: + dst.writerecords(src) + +def test_any(filename_json): + schema = {"geometry": "Any", "properties": properties} + with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: + write_point(collection) + write_linestring(collection) + write_polygon(collection) + write_geometrycollection(collection) + write_null(collection) + + with pytest.raises(GeometryTypeValidationError): + write_invalid(collection) + +def test_broken(filename_json): + schema = {"geometry": "NOT_VALID", "properties": properties} + with pytest.raises(UnsupportedGeometryTypeError): + with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema): + pass + +def test_broken_list(filename_json): + schema = {"geometry": ("Point", "LineString", "NOT_VALID"), "properties": properties} + with pytest.raises(UnsupportedGeometryTypeError): + collection = fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) + +def test_invalid_schema(filename_shp): + """Features match schema but geometries not supported by driver""" + schema = {"geometry": ("Point", "LineString"), "properties": properties} + with fiona.open(filename_shp, "w", driver="ESRI Shapefile", schema=schema) as collection: + write_linestring(collection) + + with pytest.raises(RuntimeError): + # ESRI Shapefile can only store a single geometry type + write_point(collection) + +def test_esri_multi_geom(filename_shp): + """ESRI Shapefile doesn't differentiate between LineString/MultiLineString""" + schema = {"geometry": "LineString", "properties": properties} + with fiona.open(filename_shp, "w", driver="ESRI Shapefile", schema=schema) as collection: + write_linestring(collection) + write_multilinestring(collection) + + with pytest.raises(GeometryTypeValidationError): + write_point(collection) + +def test_3d_schema_ignored(filename_json): + schema = {"geometry": "3D Point", "properties": properties} + with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: + write_point(collection) + write_point_3d(collection) + +def test_geometrycollection_schema(filename_json): + schema = {"geometry": "GeometryCollection", "properties": properties} + with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: + write_geometrycollection(collection) + +def test_none_schema(filename_json): + schema = {"geometry": None, "properties": properties} + with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: + write_null(collection) + + with pytest.raises(GeometryTypeValidationError): + write_point(collection) + with pytest.raises(GeometryTypeValidationError): + write_linestring(collection) diff -Nru fiona-1.7.10/tests/test_schema.py fiona-1.8.6/tests/test_schema.py --- fiona-1.7.10/tests/test_schema.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_schema.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,161 +1,141 @@ +import fiona +from fiona.errors import SchemaError, UnsupportedGeometryTypeError, \ + DriverSupportError +from fiona.schema import FIELD_TYPES, normalize_field_type import os -import shutil import tempfile -import unittest -import fiona -from fiona.errors import UnsupportedGeometryTypeError +import pytest + +from .conftest import requires_only_gdal1, requires_gdal2 + +def test_schema_ordering_items(tmpdir): + name = str(tmpdir.join('test_scheme.shp')) + items = [('title', 'str:80'), ('date', 'date')] + with fiona.open(name, 'w', + driver="ESRI Shapefile", + schema={ + 'geometry': 'LineString', + 'properties': items}) as c: + assert list(c.schema['properties'].items()) == items + with fiona.open(name) as c: + assert list(c.schema['properties'].items()) == items + + +def test_shapefile_schema(tmpdir): + name = str(tmpdir.join('test_schema.shp')) + items = sorted({ + 'AWATER10': 'float', + 'CLASSFP10': 'str', + 'ZipCodeType': 'str', + 'EstimatedPopulation': 'float', + 'LocationType': 'str', + 'ALAND10': 'float', + 'TotalWages': 'float', + 'FUNCSTAT10': 'str', + 'Long': 'float', + 'City': 'str', + 'TaxReturnsFiled': 'float', + 'State': 'str', + 'Location': 'str', + 'GSrchCnt': 'float', + 'INTPTLAT10': 'str', + 'Lat': 'float', + 'MTFCC10': 'str', + 'Decommisioned': 'str', + 'GEOID10': 'str', + 'INTPTLON10': 'str'}.items()) + with fiona.open(name, 'w', + driver="ESRI Shapefile", + schema={'geometry': 'Polygon', 'properties': items}) as c: + assert list(c.schema['properties'].items()) == items + c.write( + {'geometry': {'coordinates': [[(-117.882442, 33.783633), + (-117.882284, 33.783817), + (-117.863348, 33.760016), + (-117.863478, 33.760016), + (-117.863869, 33.760017), + (-117.864, 33.760017999999995), + (-117.864239, 33.760019), + (-117.876608, 33.755769), + (-117.882886, 33.783114), + (-117.882688, 33.783345), + (-117.882639, 33.783401999999995), + (-117.88259, 33.78346), + (-117.882442, 33.783633)]], + 'type': 'Polygon'}, + 'id': '1', + 'properties': { + 'ALAND10': 8819240.0, + 'AWATER10': 309767.0, + 'CLASSFP10': 'B5', + 'City': 'SANTA ANA', + 'Decommisioned': False, + 'EstimatedPopulation': 27773.0, + 'FUNCSTAT10': 'S', + 'GEOID10': '92706', + 'GSrchCnt': 0.0, + 'INTPTLAT10': '+33.7653010', + 'INTPTLON10': '-117.8819759', + 'Lat': 33.759999999999998, + 'Location': 'NA-US-CA-SANTA ANA', + 'LocationType': 'PRIMARY', + 'Long': -117.88, + 'MTFCC10': 'G6350', + 'State': 'CA', + 'TaxReturnsFiled': 14635.0, + 'TotalWages': 521280485.0, + 'ZipCodeType': 'STANDARD'}, + 'type': 'Feature'}) + assert len(c) == 1 + with fiona.open(name) as c: + assert ( + list(c.schema['properties'].items()) == + sorted([('AWATER10', 'float:24.15'), + ('CLASSFP10', 'str:80'), + ('ZipCodeTyp', 'str:80'), + ('EstimatedP', 'float:24.15'), + ('LocationTy', 'str:80'), + ('ALAND10', 'float:24.15'), + ('INTPTLAT10', 'str:80'), + ('FUNCSTAT10', 'str:80'), + ('Long', 'float:24.15'), + ('City', 'str:80'), + ('TaxReturns', 'float:24.15'), + ('State', 'str:80'), + ('Location', 'str:80'), + ('GSrchCnt', 'float:24.15'), + ('TotalWages', 'float:24.15'), + ('Lat', 'float:24.15'), + ('MTFCC10', 'str:80'), + ('INTPTLON10', 'str:80'), + ('GEOID10', 'str:80'), + ('Decommisio', 'str:80')])) + f = next(iter(c)) + assert f['properties']['EstimatedP'] == 27773.0 -class SchemaOrder(unittest.TestCase): - def setUp(self): - self.tempdir = tempfile.mkdtemp() +def test_field_truncation_issue177(tmpdir): + name = str(tmpdir.join('output.shp')) - def tearDown(self): - shutil.rmtree(self.tempdir) - - def test_schema_ordering_items(self): - items = [('title', 'str:80'), ('date', 'date')] - with fiona.open(os.path.join(self.tempdir, 'test_schema.shp'), 'w', - driver="ESRI Shapefile", - schema={ - 'geometry': 'LineString', - 'properties': items }) as c: - self.assertEqual(list(c.schema['properties'].items()), items) - with fiona.open(os.path.join(self.tempdir, 'test_schema.shp')) as c: - self.assertEqual(list(c.schema['properties'].items()), items) - -class ShapefileSchema(unittest.TestCase): - - def setUp(self): - self.tempdir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.tempdir) - - def test_schema(self): - items = sorted({ - 'AWATER10': 'float', - 'CLASSFP10': 'str', - 'ZipCodeType': 'str', - 'EstimatedPopulation': 'float', - 'LocationType': 'str', - 'ALAND10': 'float', - 'TotalWages': 'float', - 'FUNCSTAT10': 'str', - 'Long': 'float', - 'City': 'str', - 'TaxReturnsFiled': 'float', - 'State': 'str', - 'Location': 'str', - 'GSrchCnt': 'float', - 'INTPTLAT10': 'str', - 'Lat': 'float', - 'MTFCC10': 'str', - 'Decommisioned': 'str', - 'GEOID10': 'str', - 'INTPTLON10': 'str'}.items()) - with fiona.open(os.path.join(self.tempdir, 'test_schema.shp'), 'w', - driver="ESRI Shapefile", - schema={ - 'geometry': 'Polygon', - 'properties': items }) as c: - self.assertEqual(list(c.schema['properties'].items()), items) - c.write( - {'geometry': {'coordinates': [[(-117.882442, 33.783633), - (-117.882284, 33.783817), - (-117.863348, 33.760016), - (-117.863478, 33.760016), - (-117.863869, 33.760017), - (-117.864, 33.760017999999995), - (-117.864239, 33.760019), - (-117.876608, 33.755769), - (-117.882886, 33.783114), - (-117.882688, 33.783345), - (-117.882639, 33.783401999999995), - (-117.88259, 33.78346), - (-117.882442, 33.783633)]], - 'type': 'Polygon'}, - 'id': '1', - 'properties':{ - 'ALAND10': 8819240.0, - 'AWATER10': 309767.0, - 'CLASSFP10': 'B5', - 'City': 'SANTA ANA', - 'Decommisioned': False, - 'EstimatedPopulation': 27773.0, - 'FUNCSTAT10': 'S', - 'GEOID10': '92706', - 'GSrchCnt': 0.0, - 'INTPTLAT10': '+33.7653010', - 'INTPTLON10': '-117.8819759', - 'Lat': 33.759999999999998, - 'Location': 'NA-US-CA-SANTA ANA', - 'LocationType': 'PRIMARY', - 'Long': -117.88, - 'MTFCC10': 'G6350', - 'State': 'CA', - 'TaxReturnsFiled': 14635.0, - 'TotalWages': 521280485.0, - 'ZipCodeType': 'STANDARD'}, - 'type': 'Feature'} ) - self.assertEqual(len(c), 1) - with fiona.open(os.path.join(self.tempdir, 'test_schema.shp')) as c: - self.assertEqual( - list(c.schema['properties'].items()), - sorted([('AWATER10', 'float:24.15'), - ('CLASSFP10', 'str:80'), - ('ZipCodeTyp', 'str:80'), - ('EstimatedP', 'float:24.15'), - ('LocationTy', 'str:80'), - ('ALAND10', 'float:24.15'), - ('INTPTLAT10', 'str:80'), - ('FUNCSTAT10', 'str:80'), - ('Long', 'float:24.15'), - ('City', 'str:80'), - ('TaxReturns', 'float:24.15'), - ('State', 'str:80'), - ('Location', 'str:80'), - ('GSrchCnt', 'float:24.15'), - ('TotalWages', 'float:24.15'), - ('Lat', 'float:24.15'), - ('MTFCC10', 'str:80'), - ('INTPTLON10', 'str:80'), - ('GEOID10', 'str:80'), - ('Decommisio', 'str:80')]) ) - f = next(iter(c)) - self.assertEqual(f['properties']['EstimatedP'], 27773.0) - - -class FieldTruncationTestCase(unittest.TestCase): - - def setUp(self): - self.tempdir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.tempdir) - - def test_issue177(self): - name = os.path.join(self.tempdir, 'output.shp') - - kwargs = { - 'driver': 'ESRI Shapefile', - 'crs': 'EPSG:4326', - 'schema': { - 'geometry': 'Point', - 'properties': [('a_fieldname', 'float')]}} - - with fiona.open(name, 'w', **kwargs) as dst: - rec = {} - rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)} - rec['properties'] = {'a_fieldname': 3.0} - dst.write(rec) - - with fiona.open(name) as src: - first = next(iter(src)) - assert first['geometry'] == {'type': 'Point', 'coordinates': (0, 0)} - assert first['properties']['a_fieldnam'] == 3.0 + kwargs = { + 'driver': 'ESRI Shapefile', + 'crs': 'EPSG:4326', + 'schema': { + 'geometry': 'Point', + 'properties': [('a_fieldname', 'float')]}} + + with fiona.open(name, 'w', **kwargs) as dst: + rec = {} + rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)} + rec['properties'] = {'a_fieldname': 3.0} + dst.write(rec) + + with fiona.open(name) as src: + first = next(iter(src)) + assert first['geometry'] == {'type': 'Point', 'coordinates': (0, 0)} + assert first['properties']['a_fieldnam'] == 3.0 def test_unsupported_geometry_type(): @@ -168,7 +148,78 @@ 'geometry': 'BOGUS', 'properties': {}}} - try: + with pytest.raises(UnsupportedGeometryTypeError): fiona.open(tmpfile, 'w', **profile) - except UnsupportedGeometryTypeError: - assert True + + +@pytest.mark.parametrize('x', list(range(1, 10))) +def test_normalize_int32(x): + assert normalize_field_type('int:{}'.format(x)) == 'int32' + + +@requires_gdal2 +@pytest.mark.parametrize('x', list(range(10, 20))) +def test_normalize_int64(x): + assert normalize_field_type('int:{}'.format(x)) == 'int64' + + +@pytest.mark.parametrize('x', list(range(0, 20))) +def test_normalize_str(x): + assert normalize_field_type('str:{}'.format(x)) == 'str' + + +def test_normalize_bool(): + assert normalize_field_type('bool') == 'bool' + + +def test_normalize_float(): + assert normalize_field_type('float:25.8') == 'float' + + +def generate_field_types(): + """ + Produce a unique set of field types in a consistent order. + + This ensures that tests are able to run in parallel. + """ + types = set(FIELD_TYPES) + types.remove(None) + return list(sorted(types)) + [None] + + +@pytest.mark.parametrize('x', generate_field_types()) +def test_normalize_std(x): + assert normalize_field_type(x) == x + + +def test_normalize_error(): + with pytest.raises(SchemaError): + assert normalize_field_type('thingy') + + +@requires_only_gdal1 +@pytest.mark.parametrize('field_type', ['time', 'datetime']) +def test_check_schema_driver_support_shp(tmpdir, field_type): + + with pytest.raises(DriverSupportError): + name = str(tmpdir.join('test_scheme.shp')) + items = [('field1', field_type)] + with fiona.open(name, 'w', + driver="ESRI Shapefile", + schema={ + 'geometry': 'LineString', + 'properties': items}) as c: + pass + + +@requires_only_gdal1 +def test_check_schema_driver_support_gpkg(tmpdir): + with pytest.raises(DriverSupportError): + name = str(tmpdir.join('test_scheme.gpkg')) + items = [('field1', 'time')] + with fiona.open(name, 'w', + driver="GPKG", + schema={ + 'geometry': 'LineString', + 'properties': items}) as c: + pass diff -Nru fiona-1.7.10/tests/test_session.py fiona-1.8.6/tests/test_session.py --- fiona-1.7.10/tests/test_session.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_session.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,9 @@ +"""Tests of the ogrext.Session class""" + +import fiona + + +def test_get(path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp) as col: + feat3 = col.get(2) + assert feat3['properties']['NAME'] == 'Mount Zirkel Wilderness' diff -Nru fiona-1.7.10/tests/test_slice.py fiona-1.8.6/tests/test_slice.py --- fiona-1.7.10/tests/test_slice.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_slice.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,40 +1,48 @@ +"""Note well: collection slicing is deprecated! +""" + import logging import sys -import fiona +import pytest -logging.basicConfig(stream=sys.stderr, level=logging.INFO) +import fiona +from fiona.errors import FionaDeprecationWarning -def test_collection_get(): - with fiona.open('tests/data/coutwildrnp.shp') as src: +def test_collection_get(path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp) as src: result = src[5] assert result['id'] == '5' -def test_collection_slice(): - with fiona.open('tests/data/coutwildrnp.shp') as src: + +def test_collection_slice(path_coutwildrnp_shp): + with pytest.warns(FionaDeprecationWarning), fiona.open(path_coutwildrnp_shp) as src: results = src[:5] assert isinstance(results, list) assert len(results) == 5 assert results[4]['id'] == '4' -def test_collection_iterator_slice(): - with fiona.open('tests/data/coutwildrnp.shp') as src: + +def test_collection_iterator_slice(path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp) as src: results = list(src.items(5)) assert len(results) == 5 k, v = results[4] assert k == 4 assert v['id'] == '4' -def test_collection_iterator_next(): - with fiona.open('tests/data/coutwildrnp.shp') as src: + +def test_collection_iterator_next(path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp) as src: k, v = next(src.items(5, None)) assert k == 5 assert v['id'] == '5' -def test_collection_iterator_items_slice(): - with fiona.open('tests/data/coutwildrnp.shp') as src: - l = len(src) +def test_collection_iterator_items_slice(path_coutwildrnp_shp): + + with fiona.open(path_coutwildrnp_shp) as src: + count = len(src) items = list(src.items(0, 5)) assert len(items) == 5 @@ -49,10 +57,10 @@ assert len(items) == 4 items = list(src.items(0, None)) - assert len(items) == l + assert len(items) == count items = list(src.items(5, None)) - assert len(items) == (l - 5) + assert len(items) == (count - 5) items = list(src.items(5, None, -1)) assert len(items) == 6 @@ -67,9 +75,10 @@ assert len(items) == 4 items = list(src.items(-5, None, -1)) - assert len(items) == (l - 5 + 1) + assert len(items) == (count - 5 + 1) + -def test_collection_iterator_keys_next(): - with fiona.open('tests/data/coutwildrnp.shp') as src: +def test_collection_iterator_keys_next(path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp) as src: k = next(src.keys(5, None)) assert k == 5 diff -Nru fiona-1.7.10/tests/test_subtypes.py fiona-1.8.6/tests/test_subtypes.py --- fiona-1.7.10/tests/test_subtypes.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_subtypes.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,51 @@ +import fiona +import six + +def test_read_bool_subtype(tmpdir): + test_data = """{"type": "FeatureCollection", "features": [{"type": "Feature", "properties": {"bool": true, "not_bool": 1, "float": 42.5}, "geometry": null}]}""" + path = tmpdir.join("test_read_bool_subtype.geojson") + with open(str(path), "w") as f: + f.write(test_data) + + with fiona.open(str(path), "r") as src: + feature = next(iter(src)) + + if fiona.gdal_version.major >= 2: + assert type(feature["properties"]["bool"]) is bool + else: + assert type(feature["properties"]["bool"]) is int + assert isinstance(feature["properties"]["not_bool"], six.integer_types) + assert type(feature["properties"]["float"]) is float + +def test_write_bool_subtype(tmpdir): + path = tmpdir.join("test_write_bool_subtype.geojson") + + schema = { + "geometry": "Point", + "properties": { + "bool": "bool", + "not_bool": "int", + "float": "float", + } + } + + feature = { + "geometry": None, + "properties": { + "bool": True, + "not_bool": 1, + "float": 42.5, + } + } + + with fiona.open(str(path), "w", driver="GeoJSON", schema=schema) as dst: + dst.write(feature) + + with open(str(path), "r") as f: + data = f.read() + + if fiona.gdal_version.major >= 2: + assert """"bool": true""" in data + else: + assert """"bool": 1""" in data + assert """"not_bool": 1""" in data diff -Nru fiona-1.7.10/tests/test_transactions.py fiona-1.8.6/tests/test_transactions.py --- fiona-1.7.10/tests/test_transactions.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_transactions.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,74 @@ +import os +import fiona +import fiona.ogrext +import logging +from random import uniform, randint +from collections import defaultdict +import pytest + +has_gpkg = "GPKG" in fiona.supported_drivers.keys() + +def create_records(count): + for n in range(count): + record = { + "geometry": {"type": "Point", "coordinates": [uniform(-180, 180), uniform(-90, 90)]}, + "properties": {"value": randint(0, 1000)} + } + yield record + +class DebugHandler(logging.Handler): + def __init__(self, pattern): + logging.Handler.__init__(self) + self.pattern = pattern + self.history = defaultdict(lambda: 0) + + def emit(self, record): + if self.pattern in record.msg: + self.history[record.msg] += 1 + + +log = logging.getLogger() + + +@pytest.mark.skipif(not has_gpkg, reason="Requires geopackage driver") +class TestTransaction: + def setup_method(self): + self.handler = DebugHandler(pattern="transaction") + self.handler.setLevel(logging.DEBUG) + log.setLevel(logging.DEBUG) + log.addHandler(self.handler) + + def teardown_method(self): + log.removeHandler(self.handler) + + def test_transaction(self, tmpdir): + """ + Test transaction start/commit is called the expected number of times, + and that the default transaction size can be overloaded. The test uses + a custom logging handler to listen for the debug messages produced + when the transaction is started/comitted. + """ + num_records = 250 + transaction_size = 100 + + assert fiona.ogrext.DEFAULT_TRANSACTION_SIZE == 20000 + fiona.ogrext.DEFAULT_TRANSACTION_SIZE = transaction_size + assert fiona.ogrext.DEFAULT_TRANSACTION_SIZE == transaction_size + + path = str(tmpdir.join("output.gpkg")) + + schema = { + "geometry": "Point", + "properties": {"value": "int"} + } + + with fiona.open(path, "w", driver="GPKG", schema=schema) as dst: + dst.writerecords(create_records(num_records)) + + assert self.handler.history["Starting transaction (initial)"] == 1 + assert self.handler.history["Starting transaction (intermediate)"] == num_records // transaction_size + assert self.handler.history["Comitting transaction (intermediate)"] == num_records // transaction_size + assert self.handler.history["Comitting transaction (final)"] == 1 + + with fiona.open(path, "r") as src: + assert len(src) == num_records diff -Nru fiona-1.7.10/tests/test_transform.py fiona-1.8.6/tests/test_transform.py --- fiona-1.7.10/tests/test_transform.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_transform.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,50 @@ +"""Tests of the transform submodule""" + +import math + +import pytest + +from fiona import transform + + +@pytest.mark.parametrize( + "geom", + [ + {"type": "Point", "coordinates": [0.0, 0.0, 1000.0]}, + { + "type": "LineString", + "coordinates": [[0.0, 0.0, 1000.0], [0.1, 0.1, -1000.0]], + }, + { + "type": "MultiPoint", + "coordinates": [[0.0, 0.0, 1000.0], [0.1, 0.1, -1000.0]], + }, + { + "type": "Polygon", + "coordinates": [ + [ + [0.0, 0.0, 1000.0], + [0.1, 0.1, -1000.0], + [0.1, -0.1, math.pi], + [0.0, 0.0, 1000.0], + ] + ], + }, + { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [0.0, 0.0, 1000.0], + [0.1, 0.1, -1000.0], + [0.1, -0.1, math.pi], + [0.0, 0.0, 1000.0], + ] + ] + ], + }, + ], +) +def test_transform_geom_with_z(geom): + """Transforming a geom with Z succeeds""" + g2 = transform.transform_geom("epsg:4326", "epsg:3857", geom, precision=3) diff -Nru fiona-1.7.10/tests/test_unicode.py fiona-1.8.6/tests/test_unicode.py --- fiona-1.7.10/tests/test_unicode.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_unicode.py 2019-03-19 04:25:07.000000000 +0000 @@ -5,41 +5,33 @@ import shutil import sys import tempfile -import unittest +from collections import OrderedDict import pytest -import six import fiona +from fiona.errors import SchemaError -logging.basicConfig(stream=sys.stderr, level=logging.INFO) +class TestUnicodePath(object): -FIXME_WINDOWS = sys.platform.startswith('win') - -class UnicodePathTest(unittest.TestCase): - - def setUp(self): + def setup(self): tempdir = tempfile.mkdtemp() - self.dir = os.path.join(tempdir, 'français') - shutil.copytree('tests/data/', self.dir) + self.dir = os.path.join(tempdir, u'français') + shutil.copytree(os.path.join(os.path.dirname(__file__), 'data'), + self.dir) - def tearDown(self): + def teardown(self): shutil.rmtree(os.path.dirname(self.dir)) def test_unicode_path(self): path = self.dir + '/coutwildrnp.shp' - if sys.version_info < (3,): - path = path.decode('utf-8') with fiona.open(path) as c: assert len(c) == 67 def test_unicode_path_layer(self): path = self.dir layer = 'coutwildrnp' - if sys.version_info < (3,): - path = path.decode('utf-8') - layer = layer.decode('utf-8') with fiona.open(path, layer=layer) as c: assert len(c) == 67 @@ -49,14 +41,13 @@ with fiona.open(path) as c: assert len(c) == 67 -@unittest.skipIf(FIXME_WINDOWS, - reason="FIXME on Windows. Please look into why these tests are not working. Note: test_write_utf8 works.") -class UnicodeStringFieldTest(unittest.TestCase): - def setUp(self): +class TestUnicodeStringField(object): + + def setup(self): self.tempdir = tempfile.mkdtemp() - def tearDown(self): + def teardown(self): shutil.rmtree(self.tempdir) @pytest.mark.xfail(reason="OGR silently fails to convert strings") @@ -90,7 +81,7 @@ with fiona.open(os.path.join(self.tempdir), encoding='latin1') as c: f = next(iter(c)) # Next assert fails. - self.assertEqual(f['properties']['label'], u'徐汇区') + assert f['properties']['label'] == u'徐汇区' def test_write_utf8(self): schema = { @@ -107,9 +98,10 @@ with fiona.open(os.path.join(self.tempdir), encoding='utf-8') as c: f = next(iter(c)) - self.assertEqual(f['properties']['label'], u'Ba\u2019kelalan') - self.assertEqual(f['properties'][u'verit\xe9'], 0) + assert f['properties']['label'] == u'Ba\u2019kelalan' + assert f['properties'][u'verit\xe9'] == 0 + @pytest.mark.iconv def test_write_gb18030(self): """Can write a simplified Chinese shapefile""" schema = { @@ -125,5 +117,35 @@ with fiona.open(os.path.join(self.tempdir), encoding='gb18030') as c: f = next(iter(c)) - self.assertEqual(f['properties']['label'], u'徐汇区') - self.assertEqual(f['properties']['num'], 0) + assert f['properties']['label'] == u'徐汇区' + assert f['properties']['num'] == 0 + + @pytest.mark.iconv + def test_gb2312_field_wrong_encoding(self): + """Attempt to create field with a name not supported by the encoding + + ESRI Shapefile driver defaults to ISO-8859-1 encoding if none is + specified. This doesn't support the field name used. Previously this + went undetected and would raise a KeyError later when the user tried + to write a feature to the layer. Instead we raise a more useful error. + + See GH#595. + """ + field_name = u"区县名称" + meta = { + "schema": { + "properties": OrderedDict([(field_name, "int")]), + "geometry": "Point", + }, + "driver": "ESRI Shapefile", + } + feature = { + "properties": {field_name: 123}, + "geometry": {"type": "Point", "coordinates": [1, 2]} + } + # when encoding is specified, write is successful + with fiona.open(os.path.join(self.tempdir, "test1.shp"), "w", encoding="GB2312", **meta) as collection: + collection.write(feature) + # no encoding + with pytest.raises(SchemaError): + fiona.open(os.path.join(self.tempdir, "test2.shp"), "w", **meta) diff -Nru fiona-1.7.10/tests/test_version.py fiona-1.8.6/tests/test_version.py --- fiona-1.7.10/tests/test_version.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.6/tests/test_version.py 2019-03-19 04:25:07.000000000 +0000 @@ -0,0 +1,21 @@ +import fiona +from fiona.ogrext import GDALVersion + +def test_version_tuple(): + version = fiona.gdal_version + assert version.major >= 1 and isinstance(version.major, int) + assert version.minor >= 0 and isinstance(version.minor, int) + assert version.revision >= 0 and isinstance(version.revision, int) + +def test_version_comparison(): + # version against version + assert GDALVersion(4, 0, 0) > GDALVersion(3, 2, 1) + assert GDALVersion(2, 0, 0) < GDALVersion(3, 2, 1) + assert GDALVersion(3, 2, 2) > GDALVersion(3, 2, 1) + assert GDALVersion(3, 2, 0) < GDALVersion(3, 2, 1) + + # tuple against version + assert (4, 0, 0) > GDALVersion(3, 2, 1) + assert (2, 0, 0) < GDALVersion(3, 2, 1) + assert (3, 2, 2) > GDALVersion(3, 2, 1) + assert (3, 2, 0) < GDALVersion(3, 2, 1) diff -Nru fiona-1.7.10/tests/test_vfs.py fiona-1.8.6/tests/test_vfs.py --- fiona-1.7.10/tests/test_vfs.py 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/tests/test_vfs.py 2019-03-19 04:25:07.000000000 +0000 @@ -1,120 +1,211 @@ import logging -import os -import shutil import sys -import unittest +import os + +import pytest +import boto3 import fiona +from fiona.vfs import vsi_path, parse_paths + +from .test_collection import TestReading +from .test_collection_legacy import ReadingTest + -logging.basicConfig(stream=sys.stderr, level=logging.INFO) +# Custom markers (from rasterio) +mingdalversion = pytest.mark.skipif( + fiona.gdal_version < (2, 1, 0), + reason="S3 raster access requires GDAL 2.1") -from .test_collection import ReadingTest +credentials = pytest.mark.skipif( + not(boto3.Session()._session.get_credentials()), + reason="S3 raster access requires credentials") +# TODO: remove this once we've successfully moved the tar tests over +# to TestVsiReading. + class VsiReadingTest(ReadingTest): - # There's a bug in GDAL 1.9.2 http://trac.osgeo.org/gdal/ticket/5093 # in which the VSI driver reports the wrong number of features. # I'm overriding ReadingTest's test_filter_1 with a function that # passes and creating a new method in this class that we can exclude # from the test runner at run time. + @pytest.mark.xfail(reason="The number of features present in the archive " + "differs based on the GDAL version.") def test_filter_vsi(self): results = list(self.c.filter(bbox=(-114.0, 35.0, -104, 45.0))) - self.assertEqual(len(results), 67) + assert len(results) == 67 f = results[0] - self.assertEqual(f['id'], "0") - self.assertEqual(f['properties']['STATE'], 'UT') + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' -class ZipReadingTest(VsiReadingTest): - - def setUp(self): - self.c = fiona.open("zip://tests/data/coutwildrnp.zip", "r") +class TestVsiReading(TestReading): + # There's a bug in GDAL 1.9.2 http://trac.osgeo.org/gdal/ticket/5093 + # in which the VSI driver reports the wrong number of features. + # I'm overriding TestReading's test_filter_1 with a function that + # passes and creating a new method in this class that we can exclude + # from the test runner at run time. - def tearDown(self): + @pytest.mark.xfail(reason="The number of features present in the archive " + "differs based on the GDAL version.") + def test_filter_vsi(self): + results = list(self.c.filter(bbox=(-114.0, 35.0, -104, 45.0))) + assert len(results) == 67 + f = results[0] + assert f['id'] == "0" + assert f['properties']['STATE'] == 'UT' + + +class TestZipReading(TestVsiReading): + @pytest.fixture(autouse=True) + def zipfile(self, data_dir, path_coutwildrnp_zip): + self.c = fiona.open("zip://{}".format(path_coutwildrnp_zip, "r")) + self.path = os.path.join(data_dir, 'coutwildrnp.zip') + yield self.c.close() def test_open_repr(self): - self.assertEqual( - repr(self.c), - ("" % hex(id(self.c)))) + assert ( + repr(self.c) == + ("".format( + id=hex(id(self.c)), + path=self.path))) def test_closed_repr(self): self.c.close() - self.assertEqual( - repr(self.c), - ("" % hex(id(self.c)))) + assert ( + repr(self.c) == + ("".format( + id=hex(id(self.c)), + path=self.path))) def test_path(self): - self.assertEqual(self.c.path, '/vsizip/tests/data/coutwildrnp.zip') + assert self.c.path == '/vsizip/{path}'.format(path=self.path) -class ZipArchiveReadingTest(VsiReadingTest): - - def setUp(self): - self.c = fiona.open("/coutwildrnp.shp", "r", vfs="zip://tests/data/coutwildrnp.zip") - - def tearDown(self): +class TestZipArchiveReading(TestVsiReading): + @pytest.fixture(autouse=True) + def zipfile(self, data_dir, path_coutwildrnp_zip): + vfs = 'zip://{}'.format(path_coutwildrnp_zip) + self.c = fiona.open("/coutwildrnp.shp", "r", vfs=vfs) + self.path = os.path.join(data_dir, 'coutwildrnp.zip') + yield self.c.close() def test_open_repr(self): - self.assertEqual( - repr(self.c), - ("" % hex(id(self.c)))) + assert ( + repr(self.c) == + ("".format( + id=hex(id(self.c)), + path=self.path))) def test_closed_repr(self): self.c.close() - self.assertEqual( - repr(self.c), - ("" % hex(id(self.c)))) + assert ( + repr(self.c) == + ("".format( + id=hex(id(self.c)), + path=self.path))) def test_path(self): - self.assertEqual(self.c.path, '/vsizip/tests/data/coutwildrnp.zip/coutwildrnp.shp') - + assert (self.c.path == + '/vsizip/{path}/coutwildrnp.shp'.format(path=self.path)) -class ZipArchiveReadingTestAbsPath(ZipArchiveReadingTest): - def setUp(self): - self.c = fiona.open( - "/coutwildrnp.shp", "r", - vfs="zip://" + os.path.abspath("tests/data/coutwildrnp.zip")) +class TestZipArchiveReadingAbsPath(TestZipArchiveReading): + @pytest.fixture(autouse=True) + def zipfile(self, path_coutwildrnp_zip): + vfs = 'zip://{}'.format(os.path.abspath(path_coutwildrnp_zip)) + self.c = fiona.open("/coutwildrnp.shp", "r", vfs=vfs) + yield + self.c.close() def test_open_repr(self): - self.assert_(repr(self.c).startswith("" % hex(id(self.c)))) + assert ( + repr(self.c) == + ("".format( + id=hex(id(self.c)), + path=self.path))) def test_closed_repr(self): self.c.close() - self.assertEqual( - repr(self.c), - ("" % hex(id(self.c)))) + assert ( + repr(self.c) == + ("".format( + id=hex(id(self.c)), + path=self.path))) def test_path(self): - self.assertEqual(self.c.path, '/vsitar/tests/data/coutwildrnp.tar/testing/coutwildrnp.shp') + assert ( + self.c.path == + '/vsitar/{path}/testing/coutwildrnp.shp'.format(path=self.path)) + + +@pytest.mark.network +def test_open_http(): + ds = fiona.open('https://raw.githubusercontent.com/OSGeo/gdal/master/autotest/ogr/data/poly.shp') + assert len(ds) == 10 + + +@credentials +@mingdalversion +@pytest.mark.network +def test_open_s3(): + ds = fiona.open('zip+s3://fiona-testing/coutwildrnp.zip') + assert len(ds) == 67 + + +@pytest.mark.network +def test_open_zip_https(): + ds = fiona.open('zip+https://s3.amazonaws.com/fiona-testing/coutwildrnp.zip') + assert len(ds) == 67 + + +def test_parse_path(): + assert parse_paths("zip://foo.zip") == ("foo.zip", "zip", None) + + +def test_parse_path2(): + assert parse_paths("foo") == ("foo", None, None) + +def test_parse_vfs(): + assert parse_paths("/", "zip://foo.zip") == ("/", "zip", "foo.zip") diff -Nru fiona-1.7.10/.travis.yml fiona-1.8.6/.travis.yml --- fiona-1.7.10/.travis.yml 2017-10-26 22:27:31.000000000 +0000 +++ fiona-1.8.6/.travis.yml 2019-03-19 04:25:07.000000000 +0000 @@ -4,6 +4,7 @@ directories: - $GDALINST - ~/.cache/pip + env: global: - PIP_WHEEL_DIR=$HOME/.cache/pip/wheels @@ -11,10 +12,17 @@ - GDALINST=$HOME/gdalinstall - GDALBUILD=$HOME/gdalbuild matrix: - - GDALVERSION="1.9.2" - GDALVERSION="1.11.5" - GDALVERSION="2.0.3" - - GDALVERSION="2.1.1" + - GDALVERSION="2.1.4" + - GDALVERSION="2.2.4" + - GDALVERSION="2.3.2" + - GDALVERSION="trunk" + +matrix: + allow_failures: + - env: GDALVERSION="trunk" + addons: apt: packages: @@ -25,11 +33,14 @@ - libatlas-dev - libatlas-base-dev - gfortran + python: - "2.7" - - "3.5" + - "3.6" + before_install: - pip install -U pip + - pip install wheel coveralls>=1.1 --upgrade - pip install setuptools==36.0.1 - pip install wheel - . ./scripts/travis_gdal_install.sh @@ -38,15 +49,21 @@ - export GDAL_DATA=$GDALINST/gdal-$GDALVERSION/share/gdal - export PROJ_LIB=/usr/share/proj - gdal-config --version + install: - - "if [ $(gdal-config --version) == \"$GDALVERSION\" ]; then echo \"Using gdal $GDALVERSION\"; else echo \"NOT using gdal $GDALVERSION as expected; aborting\"; exit 1; fi" - - "pip wheel -r requirements-dev.txt" - - "pip install -r requirements-dev.txt" - - "python setup.py sdist" - - "pip install --upgrade --force-reinstall --global-option=build_ext --global-option='-I$GDALINST/gdal-$GDALVERSION/include' --global-option='-L$GDALINST/gdal-$GDALVERSION/lib' --global-option='-R$GDALINST/gdal-$GDALVERSION/lib' dist/Fiona*" - - "fio --version" - - "cp -r tests /tmp" -script: - - "cd /tmp && coverage run --source=fiona --omit='*.pxd,*.pyx,*/tests/*,*/docs/*,*/examples/*,*/benchmarks/*' -m nose --exclude test_filter_vsi --exclude test_geopackage --exclude test_write_mismatch tests" + - pip install -r requirements-dev.txt + - if [ "$GDALVERSION" = "trunk" ]; then echo "Using gdal trunk"; elif [ $(gdal-config --version) == "$GDALVERSION" ]; then echo "Using gdal $GDALVERSION"; else echo "NOT using gdal $GDALVERSION as expected; aborting"; exit 1; fi + - pip install --upgrade --force-reinstall --global-option=build_ext --global-option='-I$GDALINST/gdal-$GDALVERSION/include' --global-option='-L$GDALINST/gdal-$GDALVERSION/lib' --global-option='-R$GDALINST/gdal-$GDALVERSION/lib' -e . + - pip install -e .[test] + - fio --version + - gdal-config --version + - fio --gdal-version + +script: + - python -m pytest -m "not wheel" --cov fiona --cov-report term-missing + after_success: - coveralls || echo "!! intermittent coveralls failure" + +before_cache: + - if [ "$GDALVERSION" = "trunk" ]; then rm -rf $GDALINST/gdal-$GDALVERSION; fi