diff -Nru fiona-1.8.13/appveyor.yml fiona-1.8.20/appveyor.yml --- fiona-1.8.13/appveyor.yml 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/appveyor.yml 2021-05-31 21:29:33.000000000 +0000 @@ -12,8 +12,14 @@ CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_env.cmd" GDAL_HOME: "C:\\gdal" PYTHONWARNINGS: "ignore:DEPRECATION::pip._internal.cli.base_command" + ENABLE_DEPRECATED_DRIVER_GTM: "YES" matrix: + # PYTHON_VERSION and PYTHON_ARCH are required by run_with_env.cmd. + # The 4-digit number in the GISInternals archives is the MSVC version used to build + # the libraries. It does not need to match the version of MSVC used to build Python. + # https://en.wikipedia.org/wiki/Microsoft_Visual_C%2B%2B#Internal_version_numbering + - PYTHON: "C:\\Python27-x64" PYTHON_VERSION: "2.7" PYTHON_ARCH: "64" @@ -49,6 +55,57 @@ GIS_INTERNALS: "release-1911-x64-gdal-2-4-2-mapserver-7-4-0.zip" GIS_INTERNALS_LIBS: "release-1911-x64-gdal-2-4-2-mapserver-7-4-0-libs.zip" + - PYTHON: "C:\\Python36-x64" + PYTHON_VERSION: "3.6" + PYTHON_ARCH: "64" + GDAL_VERSION: "3.0.4" + GIS_INTERNALS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3.zip" + GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3-libs.zip" + PROJ_LIB: "C:\\gdal\\bin\\proj6\\share" + + - PYTHON: "C:\\Python37-x64" + PYTHON_VERSION: "3.7" + PYTHON_ARCH: "64" + GDAL_VERSION: "2.4.2" + GIS_INTERNALS: "release-1911-x64-gdal-2-4-2-mapserver-7-4-0.zip" + GIS_INTERNALS_LIBS: "release-1911-x64-gdal-2-4-2-mapserver-7-4-0-libs.zip" + + - PYTHON: "C:\\Python37-x64" + PYTHON_VERSION: "3.7" + PYTHON_ARCH: "64" + GDAL_VERSION: "3.0.4" + GIS_INTERNALS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3.zip" + GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3-libs.zip" + PROJ_LIB: "C:\\gdal\\bin\\proj6\\share" + + - PYTHON: "C:\\Python37-x64" + PYTHON_VERSION: "3.7" + PYTHON_ARCH: "64" + GDAL_VERSION: "3.1.2" + GIS_INTERNALS: "release-1911-x64-gdal-3-1-2-mapserver-7-6-1.zip" + GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-1-2-mapserver-7-6-1-libs.zip" + PROJ_LIB: "C:\\gdal\\bin\\proj6\\share" + + - PYTHON: "C:\\Python38-x64" + PYTHON_VERSION: "3.8" + PYTHON_ARCH: "64" + GDAL_VERSION: "3.0.4" + GIS_INTERNALS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3.zip" + GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3-libs.zip" + PROJ_LIB: "C:\\gdal\\bin\\proj6\\share" + + - PYTHON: "C:\\Python38-x64" + PYTHON_VERSION: "3.8" + PYTHON_ARCH: "64" + GDAL_VERSION: "3.1.2" + GIS_INTERNALS: "release-1911-x64-gdal-3-1-2-mapserver-7-6-1.zip" + GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-1-2-mapserver-7-6-1-libs.zip" + PROJ_LIB: "C:\\gdal\\bin\\proj6\\share" + +matrix: + allow_failures: + - GDAL_VERSION: "1.11.4" + install: - ECHO "Filesystem root:" @@ -102,6 +159,7 @@ build_script: # Build the compiled extension - cmd: echo %PATH% + - cmd: echo %PYTHONPATH% # copy gisinternal gdal librarys into .libs @@ -121,12 +179,13 @@ test_script: # Run the project tests - cmd: SET - - ps: python -m pip list + - ps: python -c "import fiona" # Our Windows GDAL doesn't have iconv and can't support certain tests. - "%CMD_IN_ENV% python -m pytest -m \"not iconv and not wheel\" --cov fiona --cov-report term-missing" + artifacts: - path: dist\*.whl name: wheel diff -Nru fiona-1.8.13/CHANGES.txt fiona-1.8.20/CHANGES.txt --- fiona-1.8.13/CHANGES.txt 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/CHANGES.txt 2021-05-31 21:29:33.000000000 +0000 @@ -3,6 +3,97 @@ All issue numbers are relative to https://github.com/Toblerity/Fiona/issues. +1.8.20 (2021-05-31) +------------------- + +Packaging: + +- Wheels include GDAL 3.3.0 and GEOS 3.9.1. + +Bug fixes: + +- Allow use with click 8 and higher (#1015). + +1.8.19 (2021-04-07) +------------------- + +Packaging: + +- Wheels include GDAL 3.2.1 and PROJ 7.2.1. + +Bug fixes: + +- In fiona/env.py the GDAL data path is now configured using set_gdal_config + instead by setting the GDAL_DATA environment variable (#1007). +- Spurious iterator reset warnings have been eliminatged (#987). + +1.8.18 (2020-11-17) +------------------- + +- The precision option of transform has been fixed for the case of + GeometryCollections (#971, #972). +- Added missing --co (creation) option to fio-load (#390). +- If the certifi package can be imported, its certificate store location will + be passed to GDAL during import of fiona._env unless CURL_CA_BUNDLE is + already set. +- Warn when feature fields named "" are found (#955). + +1.8.17 (2020-09-09) +------------------- + +- To fix issue #952 the fio-cat command no longer cuts feature geometries at + the anti-meridian by default. A --cut-at-antimeridian option has been added + to allow cutting of geometries in a geographic destination coordinate + reference system. + +1.8.16 (2020-09-04) +------------------- + +- More OGR errors and warnings arising in calls to GDAL C API functions are + surfaced (#946). +- A circular import introduced in some cases in 1.8.15 has been fixed (#945). + +1.8.15 (2020-09-03) +------------------- + +- Change shim functions to not return tuples (#942) as a solution for the + packaging problem reported in #941. +- Raise a Python exception when VSIFOpenL fails (#937). + +1.8.14 (2020-08-31) +------------------- + +- When creating a new Collection in a MemoryFile with a default (random) name + Fiona will attempt to use a format driver-supported file extension (#934). + When initializing a MemoryFile with bytes of data formatted for a vector + driver that requires a certain file name or extension, the user should + continue to pass an appropriate filename and/or extension. +- Read support for FlatGeobuf has been enabled in the drvsupport module. +- The MemoryFile implementation has been improved so that it can support multi-part + S3 downloads (#906). This is largely a port of code from rasterio. +- Axis ordering for results of fiona.transform was wrong when CRS were passed + in the "EPSG:dddd" form (#919). This has been fixed by (#926). +- Allow implicit access to the only dataset in a ZipMemoryFile. The path + argument of ZipMemoryFile.open() is now optional (#928). +- Improve support for datetime types: support milliseconds (#744), timezones (#914) + and improve warnings if type is not supported by driver (#572). +- Fix "Failed to commit transaction" TransactionError for FileGDB driver. +- Load GDAL DLL dependencies on Python 3.8+ / Windows with add_dll_directory() (#851). +- Do not require optional properties (#848). +- Ensure that slice does not overflow available data (#884). +- Resolve issue when "ERROR 4: Unable to open EPSG support file gcs.csv." is raised on + importing fiona (#897). +- Resolve issue resulting in possible mixed up fields names (affecting only DXF, GPX, + GPSTrackMacker and DGN driver) (#916). +- Ensure crs_wkt is passed when writing to MemoryFile (#907). + + +1.8.13.post1 (2020-02-21) +------------------------- + +- This release is being made to improve binary wheel compatibility with shapely + 1.7.0. There have been no changes to the fiona package code since 1.8.13. + 1.8.13 (2019-12-05) ------------------- diff -Nru fiona-1.8.13/.coveragerc fiona-1.8.20/.coveragerc --- fiona-1.8.13/.coveragerc 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/.coveragerc 2021-05-31 21:29:33.000000000 +0000 @@ -0,0 +1,3 @@ +[run] +plugins = Cython.Coverage +omit = *pxd diff -Nru fiona-1.8.13/debian/changelog fiona-1.8.20/debian/changelog --- fiona-1.8.13/debian/changelog 2020-02-18 09:43:33.000000000 +0000 +++ fiona-1.8.20/debian/changelog 2021-12-22 09:00:00.000000000 +0000 @@ -1,20 +1,126 @@ -fiona (1.8.13-1build3) focal; urgency=medium +fiona (1.8.20-2~focal1) focal; urgency=medium - * No-change rebuild to drop python3.7. + * No change rebuild for GDAL 3.4.0 transition. - -- Matthias Klose Tue, 18 Feb 2020 10:43:33 +0100 + -- Angelos Tzotsos Wed, 22 Dec 2021 11:00:00 +0200 -fiona (1.8.13-1build2) focal; urgency=medium +fiona (1.8.20-2~focal0) focal; urgency=medium - * No-change rebuild with fixed binutils on arm64. + * No change rebuild for Focal. - -- Matthias Klose Mon, 10 Feb 2020 08:12:04 +0100 + -- Angelos Tzotsos Sat, 02 Oct 2021 13:00:00 +0300 -fiona (1.8.13-1build1) focal; urgency=medium +fiona (1.8.20-2) unstable; urgency=medium - * No-change rebuild for gdal soname change. + * Team upload. + * Set GDAL_ENABLE_DEPRECATED_DRIVER_GTM=YES to fix test failures. + * Skip tests/test_drvsupport.py, fails with GDAL 3.3.1. + (closes: #992526) - -- Matthias Klose Tue, 14 Jan 2020 14:30:31 +0000 + -- Bas Couwenberg Fri, 20 Aug 2021 11:08:06 +0200 + +fiona (1.8.20-1) unstable; urgency=medium + + * Team upload. + * Move from experimental to unstable. + + -- Bas Couwenberg Sun, 15 Aug 2021 14:39:44 +0200 + +fiona (1.8.20-1~exp1) experimental; urgency=medium + + * Team upload. + * New upstream release. + * Refresh patches. + + -- Bas Couwenberg Tue, 01 Jun 2021 05:42:39 +0200 + +fiona (1.8.19-1~exp1) experimental; urgency=medium + + * Team upload. + * New upstream release. + * Bump Standards-Version to 4.5.1, no changes. + * Update watch file for GitHub URL changes. + * Refresh patches. + + -- Bas Couwenberg Thu, 08 Apr 2021 05:57:31 +0200 + +fiona (1.8.18-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + * Bump watch file version to 4. + * Add python3-ceritifi to build dependencies. + * Refresh patches. + * Update lintian overrides. + + -- Bas Couwenberg Wed, 18 Nov 2020 04:24:09 +0100 + +fiona (1.8.17-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + + -- Bas Couwenberg Thu, 10 Sep 2020 05:51:36 +0200 + +fiona (1.8.16-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + + -- Bas Couwenberg Sat, 05 Sep 2020 06:45:00 +0200 + +fiona (1.8.15-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + * Drop unused lintian overrides. + + -- Bas Couwenberg Fri, 04 Sep 2020 06:19:41 +0200 + +fiona (1.8.14-3) unstable; urgency=medium + + * Team upload. + * Also mark test_append_or_driver_error as flaky. + + -- Bas Couwenberg Tue, 01 Sep 2020 08:39:46 +0200 + +fiona (1.8.14-2) unstable; urgency=medium + + * Team upload. + * Add patch to mark test_write_or_driver_error as flaky. + + -- Bas Couwenberg Tue, 01 Sep 2020 07:32:18 +0200 + +fiona (1.8.14-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + * Refresh patches. + * Add overrides for package-contains-documentation-outside-usr-share-doc. + * Mark patches as Forwarded: not-needed. + + -- Bas Couwenberg Tue, 01 Sep 2020 06:08:59 +0200 + +fiona (1.8.13-3) unstable; urgency=medium + + * Team upload. + * Add patch to mark test_mapinfo as flaky. + (closes: #964461) + * Fix manpage. + + -- Bas Couwenberg Tue, 07 Jul 2020 18:43:25 +0200 + +fiona (1.8.13-2) unstable; urgency=medium + + * Team upload. + * Drop Name field from upstream metadata. + * Bump Standards-Version to 4.5.0, no changes. + * Update watch file to not match post releases. + * Bump debhelper compat to 10. + * Add upstream patch to fix FTBFS with GDAL 3.1. + (closes: #960369) + + -- Bas Couwenberg Fri, 15 May 2020 15:12:39 +0200 fiona (1.8.13-1) unstable; urgency=medium diff -Nru fiona-1.8.13/debian/compat fiona-1.8.20/debian/compat --- fiona-1.8.13/debian/compat 2019-10-22 04:24:10.000000000 +0000 +++ fiona-1.8.20/debian/compat 2020-03-19 16:53:06.000000000 +0000 @@ -1 +1 @@ -9 +10 diff -Nru fiona-1.8.13/debian/control fiona-1.8.20/debian/control --- fiona-1.8.13/debian/control 2019-10-22 04:24:10.000000000 +0000 +++ fiona-1.8.20/debian/control 2021-10-02 10:00:00.000000000 +0000 @@ -3,7 +3,7 @@ Uploaders: Johan Van de Wauw Section: python Priority: optional -Build-Depends: debhelper (>= 9), +Build-Depends: debhelper (>= 10~), dh-python, gdal-bin, libgdal-dev, @@ -12,6 +12,7 @@ python3-all-dev, python3-attr, python3-boto3, + python3-certifi, python3-click-plugins, python3-cligj, python3-mock, @@ -20,7 +21,7 @@ python3-setuptools, python3-six, python3-sphinx -Standards-Version: 4.4.1 +Standards-Version: 4.5.1 Vcs-Browser: https://salsa.debian.org/debian-gis-team/fiona Vcs-Git: https://salsa.debian.org/debian-gis-team/fiona.git Homepage: https://github.com/Toblerity/Fiona diff -Nru fiona-1.8.13/debian/fiona.1 fiona-1.8.20/debian/fiona.1 --- fiona-1.8.13/debian/fiona.1 2019-10-22 04:24:10.000000000 +0000 +++ fiona-1.8.20/debian/fiona.1 2020-07-07 16:53:26.000000000 +0000 @@ -1,7 +1,7 @@ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.47.2. -.TH FIONA, "1" "September 2015" "fiona, version 1.6.2" "User Commands" +.TH FIONA "1" "September 2015" "fiona, version 1.6.2" "User Commands" .SH NAME -fiona, \- command line tools for reading/writing geospatial vector data +fiona \- command line tools for reading/writing geospatial vector data .SH SYNOPSIS .B fiona [\fI\,OPTIONS\/\fR] \fI\,COMMAND \/\fR[\fI\,ARGS\/\fR]... diff -Nru fiona-1.8.13/debian/patches/0001-Rename-fio-command-to-fiona-to-avoid-name-clash.patch fiona-1.8.20/debian/patches/0001-Rename-fio-command-to-fiona-to-avoid-name-clash.patch --- fiona-1.8.13/debian/patches/0001-Rename-fio-command-to-fiona-to-avoid-name-clash.patch 2019-12-05 13:11:47.000000000 +0000 +++ fiona-1.8.20/debian/patches/0001-Rename-fio-command-to-fiona-to-avoid-name-clash.patch 2021-08-15 12:39:37.000000000 +0000 @@ -9,7 +9,7 @@ --- a/setup.py +++ b/setup.py -@@ -314,7 +314,7 @@ setup_args = dict( +@@ -325,7 +325,7 @@ setup_args = dict( packages=['fiona', 'fiona.fio'], entry_points=''' [console_scripts] diff -Nru fiona-1.8.13/debian/patches/0002-Remove-outside-reference-possible-privacy-breach.patch fiona-1.8.20/debian/patches/0002-Remove-outside-reference-possible-privacy-breach.patch --- fiona-1.8.13/debian/patches/0002-Remove-outside-reference-possible-privacy-breach.patch 2019-10-22 04:24:10.000000000 +0000 +++ fiona-1.8.20/debian/patches/0002-Remove-outside-reference-possible-privacy-breach.patch 2021-08-15 12:39:37.000000000 +0000 @@ -1,6 +1,7 @@ From: Johan Van de Wauw Date: Wed, 4 Feb 2015 20:26:56 +0100 Subject: Remove outside reference possible-privacy-breach +Forwarded: not-needed --- README.rst | 6 ------ @@ -10,15 +11,15 @@ +++ b/README.rst @@ -4,15 +4,6 @@ Fiona - Fiona is OGR's neat and nimble API for Python programmers. + Fiona is GDAL_'s neat and nimble vector API for Python programmers. --.. image:: https://travis-ci.org/Toblerity/Fiona.png?branch=master -- :target: https://travis-ci.org/Toblerity/Fiona +-.. image:: https://github.com/Toblerity/Fiona/workflows/Linux%20CI/badge.svg?branch=maint-1.8 +- :target: https://github.com/Toblerity/Fiona/actions?query=branch%3Amaint-1.8 - -.. image:: https://ci.appveyor.com/api/projects/status/github/Toblerity/Fiona?svg=true - :target: https://ci.appveyor.com/project/sgillies/fiona/branch/master - --.. image:: https://coveralls.io/repos/Toblerity/Fiona/badge.png +-.. image:: https://coveralls.io/repos/Toblerity/Fiona/badge.svg - :target: https://coveralls.io/r/Toblerity/Fiona - Fiona is designed to be simple and dependable. It focuses on reading and diff -Nru fiona-1.8.13/debian/patches/0006-Remove-unknown-distribution-options.patch fiona-1.8.20/debian/patches/0006-Remove-unknown-distribution-options.patch --- fiona-1.8.13/debian/patches/0006-Remove-unknown-distribution-options.patch 2019-12-05 13:11:50.000000000 +0000 +++ fiona-1.8.20/debian/patches/0006-Remove-unknown-distribution-options.patch 2021-08-15 12:39:37.000000000 +0000 @@ -3,10 +3,11 @@ UserWarning: Unknown distribution option: 'requires_python' UserWarning: Unknown distribution option: 'requires_external' Author: Bas Couwenberg +Forwarded: not-needed --- a/setup.py +++ b/setup.py -@@ -296,11 +296,8 @@ extras_require['all'] = list(set(it.chai +@@ -307,11 +307,8 @@ extras_require['all'] = list(set(it.chai setup_args = dict( cmdclass={'sdist': sdist_multi_gdal}, diff -Nru fiona-1.8.13/debian/patches/clean-target.patch fiona-1.8.20/debian/patches/clean-target.patch --- fiona-1.8.13/debian/patches/clean-target.patch 2019-10-22 04:24:10.000000000 +0000 +++ fiona-1.8.20/debian/patches/clean-target.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -Description: Fix clean target. - gdal_*_version undefined for `setup.py clean`. -Author: Bas Couwenberg - ---- a/setup.py -+++ b/setup.py -@@ -183,7 +183,7 @@ if 'clean' not in sys.argv: - gdal_major_version = int(gdal_version_parts[0]) - gdal_minor_version = int(gdal_version_parts[1]) - --log.info("GDAL version major=%r minor=%r", gdal_major_version, gdal_minor_version) -+ log.info("GDAL version major=%r minor=%r", gdal_major_version, gdal_minor_version) - - ext_options = dict( - include_dirs=include_dirs, diff -Nru fiona-1.8.13/debian/patches/series fiona-1.8.20/debian/patches/series --- fiona-1.8.13/debian/patches/series 2019-10-22 04:24:10.000000000 +0000 +++ fiona-1.8.20/debian/patches/series 2020-09-01 06:17:47.000000000 +0000 @@ -1,4 +1,4 @@ 0001-Rename-fio-command-to-fiona-to-avoid-name-clash.patch 0002-Remove-outside-reference-possible-privacy-breach.patch 0006-Remove-unknown-distribution-options.patch -clean-target.patch +test_drvsupport.patch diff -Nru fiona-1.8.13/debian/patches/test_drvsupport.patch fiona-1.8.20/debian/patches/test_drvsupport.patch --- fiona-1.8.13/debian/patches/test_drvsupport.patch 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/debian/patches/test_drvsupport.patch 2020-11-18 03:23:33.000000000 +0000 @@ -0,0 +1,23 @@ +Description: Mark test_write_or_driver_error as flaky. +Author: Bas Couwenberg +Bug: https://github.com/Toblerity/Fiona/issues/935 +Forwarded: not-needed + +--- a/tests/test_drvsupport.py ++++ b/tests/test_drvsupport.py +@@ -16,6 +16,7 @@ def test_geojsonseq(format): + assert format in fiona.drvsupport.supported_drivers.keys() + + ++@pytest.mark.xfail(strict=False) + @pytest.mark.parametrize('driver', [driver for driver, raw in supported_drivers.items() if 'w' in raw]) + def test_write_or_driver_error(tmpdir, driver, testdata_generator): + """ +@@ -95,6 +96,7 @@ def test_write_does_not_work_when_gdal_s + c.writerecords(records1) + + ++@pytest.mark.xfail(strict=False) + @pytest.mark.parametrize('driver', [driver for driver, raw in supported_drivers.items() if 'a' in raw]) + def test_append_or_driver_error(tmpdir, testdata_generator, driver): + """ Test if driver supports append mode. diff -Nru fiona-1.8.13/debian/rules fiona-1.8.20/debian/rules --- fiona-1.8.13/debian/rules 2019-10-25 17:52:52.000000000 +0000 +++ fiona-1.8.20/debian/rules 2021-08-20 09:05:22.000000000 +0000 @@ -10,6 +10,8 @@ DEB_BUILD_ARCH ?= $(shell dpkg-architecture -qDEB_BUILD_ARCH) +export GDAL_ENABLE_DEPRECATED_DRIVER_GTM=YES + export PYBUILD_NAME=fiona export PYBUILD_AFTER_BUILD_python3 = mkdir -p doc-build && cd doc-build && PYTHONPATH={build_dir} http_proxy='127.0.0.1:9' python{version} -m sphinx -N -bhtml -D today="$(BUILD_DATE)" ../docs/ ../build/html export PYBUILD_TEST_PYTEST=1 @@ -18,6 +20,7 @@ export PYBUILD_TEST_ARGS=--ignore tests/test_bytescollection.py \ --ignore tests/test_collection.py \ --ignore tests/test_data_paths.py \ + --ignore tests/test_drvsupport.py \ --ignore tests/test_feature.py \ --ignore tests/test_filter_vsi.py \ --ignore tests/test_fio_bounds.py \ diff -Nru fiona-1.8.13/debian/source/lintian-overrides fiona-1.8.20/debian/source/lintian-overrides --- fiona-1.8.13/debian/source/lintian-overrides 2019-10-22 04:24:10.000000000 +0000 +++ fiona-1.8.20/debian/source/lintian-overrides 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -# Not worth the effort -testsuite-autopkgtest-missing - diff -Nru fiona-1.8.13/debian/upstream/metadata fiona-1.8.20/debian/upstream/metadata --- fiona-1.8.13/debian/upstream/metadata 2019-10-22 04:24:10.000000000 +0000 +++ fiona-1.8.20/debian/upstream/metadata 2021-06-01 03:42:17.000000000 +0000 @@ -1,6 +1,5 @@ --- Bug-Database: https://github.com/Toblerity/Fiona/issues Bug-Submit: https://github.com/Toblerity/Fiona/issues/new -Name: Fiona Repository: https://github.com/Toblerity/Fiona.git Repository-Browse: https://github.com/Toblerity/Fiona diff -Nru fiona-1.8.13/debian/watch fiona-1.8.20/debian/watch --- fiona-1.8.13/debian/watch 2019-10-22 04:24:10.000000000 +0000 +++ fiona-1.8.20/debian/watch 2021-03-20 07:31:49.000000000 +0000 @@ -1,7 +1,7 @@ -version=3 +version=4 opts=\ dversionmangle=s/\+(debian|dfsg|ds|deb)\d*$//,\ uversionmangle=s/(\d)[_\.\-\+]?((RC|rc|pre|dev|b|beta|a|alpha)\d*)$/$1~$2/,\ filenamemangle=s/(?:.*?\/)?(?:rel|r|v|fiona)?[\-\_]?(\d\S+)\.(tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz)))/fiona-$1.$2/ \ https://github.com/Toblerity/Fiona/releases \ -(?:.*/archive/)?(?:rel|r|v|fiona)?[\-\_]?(\d\S+)\.(?:tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) +(?:.*?/archive/(?:.*?/)?)?(?:rel|r|v|fiona)?[\-\_]?(\d\S+(?`_ library. A very simple wrapper for minimalists. +wrapper for vector data access functions from the `GDAL/OGR +`_ library. A very simple wrapper for minimalists. It reads data records from files as GeoJSON-like mappings and writes the same kind of mappings as records back to files. That's it. There are no layers, no cursors, no geometric operations, no transformations between coordinate @@ -156,7 +155,7 @@ sink.write(f) - except Exception, e: + except Exception as e: logging.exception("Error processing feature %s:", f['id']) # The sink file is written to disk and closed when its block ends. @@ -171,7 +170,7 @@ same fields, and a record's fields concern a single geographic feature. Different systems model records in different ways, but the various models have enough in common that programmers have been able to create useful abstract data -models. The `OGR model `__ is one. Its +models. The `OGR model `__ is one. Its primary entities are :dfn:`Data Sources`, :dfn:`Layers`, and :dfn:`Features`. Features have not fields, but attributes and a :dfn:`Geometry`. An OGR Layer contains Features of a single type ("roads" or "wells", for example). The @@ -271,10 +270,6 @@ Collection indexing ------------------- -.. admonition:: - - New in version 1.1.6 - Features of a collection may also be accessed by index. .. code-block:: pycon @@ -303,13 +298,15 @@ Note that these indices are controlled by GDAL, and do not always follow Python conventions. They can start from 0, 1 (e.g. geopackages), or even other values, and have no guarantee of contiguity. Negative indices will only function correctly if indices start from 0 and are contiguous. +New in version 1.1.6 + Closing Files ------------- A :py:class:`~fiona.collection.Collection` involves external resources. There's no guarantee that these will be released unless you explicitly :py:meth:`~fiona.collection.Collection.close` the object or use -a :py:keyword:`with` statement. When a :py:class:`~fiona.collection.Collection` +a :keyword:`with` statement. When a :py:class:`~fiona.collection.Collection` is a context guard, it is closed no matter what happens within the block. .. sourcecode:: pycon @@ -823,7 +820,7 @@ the file CRS, format, and schema have not yet been defined and must be done so by the programmer. Still, it's not very complicated. A schema is just a mapping, as described above. A CRS is also just a mapping, and the possible -formats are enumerated in the :py:attr:`fiona.supported_drivers` list. +formats are enumerated in the :py:attr:`fiona.supported_drivers` dictionary. Review the parameters of our demo file. @@ -1001,7 +998,7 @@ a shapefile where the first field is 'foo' and the second field is 'bar'. If you want 'bar' to be the first field, you must use a list of property items -.. sourcecode:: python +.. sourcecode:: pycon c = fiona.open( '/tmp/file.shp', @@ -1011,7 +1008,7 @@ or an ordered dict. -.. sourcecode:: python +.. sourcecode:: pycon from collections import OrderedDict @@ -1119,7 +1116,8 @@ allowing efficient bounding box searches. A collection's :py:meth:`~fiona.collection.Collection.items` method returns an iterator over pairs of FIDs and records that intersect a given ``(minx, miny, maxx, maxy)`` -bounding box or geometry object. The +bounding box or geometry object. Spatial filtering may be inaccurate and returning +all features overlapping the envelope of the geometry. The collection's own coordinate reference system (see below) is used to interpret the box's values. If you want a list of the iterator's items, pass it to Python's builtin :py:func:`list` as shown below. @@ -1151,7 +1149,7 @@ 3 To filter features by property values, use Python's builtin :py:func:`filter` and -:py:keyword:`lambda` or your own filter function that takes a single feature +:keyword:`lambda` or your own filter function that takes a single feature record and returns ``True`` or ``False``. .. sourcecode:: pycon @@ -1171,7 +1169,7 @@ per file have been shown and the venerable Esri Shapefile has been the primary example. Other GIS data formats can encode multiple layers or feature types within a single file or directory. Esri's `File Geodatabase -`__ is one example of such a format. +`__ is one example of such a format. A more useful example, for the purpose of this manual, is a directory comprising multiple shapefiles. The following three shell commands will create just such a two layered data source from the test data distributed with Fiona. @@ -1318,6 +1316,34 @@ ... 48 + +Unsupported drivers +------------------- + +In :py:attr:`fiona.supported_drivers` a selection of GDAL/OGR's +drivers that is tested to work with Fiona is maintained. By default, Fiona +allows only these drivers with their listed access modes: r for read support, +respectively a for append and w for write. + +These restrictions can be circumvented by modifying :py:attr:`fiona.supported_drivers`: + +.. sourcecode:: python + + import fiona + fiona.drvsupport.supported_drivers["LIBKML"] = "raw" + with fiona.open("file.kmz") as collection: + pass + +It should, however, first be verified, if the local installation of GDAL/OGR +includes the required driver: + +.. sourcecode:: python + + from fiona.env import Env + + with Env() as gdalenv: + print(gdalenv.drivers().keys()) + Dumpgj ====== @@ -1377,4 +1403,3 @@ .. [GeoJSON] http://geojson.org .. [JSON] http://www.ietf.org/rfc/rfc4627 .. [SFA] http://en.wikipedia.org/wiki/Simple_feature_access - diff -Nru fiona-1.8.13/fiona/collection.py fiona-1.8.20/fiona/collection.py --- fiona-1.8.13/fiona/collection.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/collection.py 2021-05-31 21:29:33.000000000 +0000 @@ -5,19 +5,22 @@ import os import warnings -from fiona import compat, vfs -from fiona.ogrext import Iterator, ItemsIterator, KeysIterator -from fiona.ogrext import Session, WritingSession -from fiona.ogrext import buffer_to_virtual_file, remove_virtual_file, GEOMETRY_TYPES -from fiona.errors import (DriverError, SchemaError, CRSError, UnsupportedGeometryTypeError, DriverSupportError) -from fiona.logutils import FieldSkipLogFilter -from fiona._crs import crs_to_wkt -from fiona._env import get_gdal_release_name, get_gdal_version_tuple -from fiona.env import env_ctx_if_needed -from fiona.errors import FionaDeprecationWarning -from fiona.drvsupport import supported_drivers -from fiona.path import Path, vsi_path, parse_path -from six import string_types, binary_type +import fiona._loading +with fiona._loading.add_gdal_dll_directories(): + from fiona import compat, vfs + from fiona.ogrext import Iterator, ItemsIterator, KeysIterator + from fiona.ogrext import Session, WritingSession + from fiona.ogrext import buffer_to_virtual_file, remove_virtual_file, GEOMETRY_TYPES + from fiona.errors import (DriverError, SchemaError, CRSError, UnsupportedGeometryTypeError, DriverSupportError) + from fiona.logutils import FieldSkipLogFilter + from fiona._crs import crs_to_wkt + from fiona._env import get_gdal_release_name, get_gdal_version_tuple + from fiona.env import env_ctx_if_needed + from fiona.errors import FionaDeprecationWarning + from fiona.drvsupport import (supported_drivers, driver_mode_mingdal, _driver_converts_field_type_silently_to_str, + _driver_supports_field) + from fiona.path import Path, vsi_path, parse_path + from six import string_types, binary_type log = logging.getLogger(__name__) @@ -75,12 +78,6 @@ if archive and not isinstance(archive, string_types): raise TypeError("invalid archive: %r" % archive) - # Check GDAL version against drivers - if (driver == "GPKG" and get_gdal_version_tuple() < (1, 11, 0)): - raise DriverError( - "GPKG driver requires GDAL 1.11.0, fiona was compiled " - "against: {}".format(get_gdal_release_name())) - self.session = None self.iterator = None self._len = 0 @@ -94,6 +91,17 @@ self.ignore_fields = ignore_fields self.ignore_geometry = bool(ignore_geometry) + # Check GDAL version against drivers + if driver in driver_mode_mingdal[mode] and get_gdal_version_tuple() < driver_mode_mingdal[mode][driver]: + min_gdal_version = ".".join(list(map(str, driver_mode_mingdal[mode][driver]))) + + raise DriverError( + "{driver} driver requires at least GDAL {min_gdal_version} for mode '{mode}', " + "Fiona was compiled against: {gdal}".format(driver=driver, + mode=mode, + min_gdal_version=min_gdal_version, + gdal=get_gdal_release_name())) + if vsi: self.path = vfs.vsi_path(path, vsi, archive) path = parse_path(self.path) @@ -231,6 +239,10 @@ Positional arguments ``stop`` or ``start, stop[, step]`` allows iteration to skip over items or stop at a specific item. + + Note: spatial filtering using ``mask`` may be inaccurate and returning + all features overlapping the envelope of ``mask``. + """ if self.closed: raise ValueError("I/O operation on closed collection") @@ -259,6 +271,10 @@ Positional arguments ``stop`` or ``start, stop[, step]`` allows iteration to skip over items or stop at a specific item. + + Note: spatial filtering using ``mask`` may be inaccurate and returning + all features overlapping the envelope of ``mask``. + """ if self.closed: raise ValueError("I/O operation on closed collection") @@ -287,6 +303,9 @@ Positional arguments ``stop`` or ``start, stop[, step]`` allows iteration to skip over items or stop at a specific item. + + Note: spatial filtering using ``mask`` may be inaccurate and returning + all features overlapping the envelope of ``mask``. """ if self.closed: raise ValueError("I/O operation on closed collection") @@ -341,7 +360,7 @@ raise IOError("collection not open for writing") self.session.writerecs(records, self) self._len = self.session.get_length() - self._bounds = self.session.get_extent() + self._bounds = None def write(self, record): """Stages a record for writing to disk.""" @@ -403,25 +422,23 @@ for field in self._schema["properties"].values(): field_type = field.split(":")[0] - if self._driver == "ESRI Shapefile": - if field_type == "datetime": - raise DriverSupportError("ESRI Shapefile does not support datetime fields") - elif field_type == "time": - raise DriverSupportError("ESRI Shapefile does not support time fields") - elif self._driver == "GPKG": - if field_type == "time": - raise DriverSupportError("GPKG does not support time fields") - elif gdal_version_major == 1: - if field_type == "datetime": - raise DriverSupportError("GDAL 1.x GPKG driver does not support datetime fields") - elif self._driver == "GeoJSON": - if gdal_version_major == 1: - if field_type == "date": - warnings.warn("GeoJSON driver in GDAL 1.x silently converts date to string in non-standard format") - elif field_type == "datetime": - warnings.warn("GeoJSON driver in GDAL 1.x silently converts datetime to string in non-standard format") - elif field_type == "time": - warnings.warn("GeoJSON driver in GDAL 1.x silently converts time to string") + + if not _driver_supports_field(self.driver, field_type): + if self.driver == 'GPKG' and gdal_version_major < 2 and field_type == "datetime": + raise DriverSupportError("GDAL 1.x GPKG driver does not support datetime fields") + else: + raise DriverSupportError("{driver} does not support {field_type} " + "fields".format(driver=self.driver, + field_type=field_type)) + elif field_type in {'time', 'datetime', 'date'} and _driver_converts_field_type_silently_to_str(self.driver, + field_type): + if self._driver == "GeoJSON" and gdal_version_major < 2 and field_type in {'datetime', 'date'}: + warnings.warn("GeoJSON driver in GDAL 1.x silently converts {} to string" + " in non-standard format".format(field_type)) + else: + warnings.warn("{driver} driver silently converts {field_type} " + "to string".format(driver=self.driver, + field_type=field_type)) def flush(self): """Flush the buffer.""" @@ -429,7 +446,7 @@ self.session.sync(self) new_len = self.session.get_length() self._len = new_len > self._len and new_len or self._len - self._bounds = self.session.get_extent() + self._bounds = None def close(self): """In append or write mode, flushes data to disk, then ends diff -Nru fiona-1.8.13/fiona/drvsupport.py fiona-1.8.20/fiona/drvsupport.py --- fiona-1.8.13/fiona/drvsupport.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/drvsupport.py 2021-05-31 21:29:33.000000000 +0000 @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- - from fiona.env import Env +from fiona._env import get_gdal_version_num, calc_gdal_version_num # Here is the list of available drivers as (name, modes) tuples. Currently, # we only expose the defaults (excepting FileGDB). We also don't expose # the CSV or GeoJSON drivers. Use Python's csv and json modules instead. # Might still exclude a few more of these after making a pass through the -# entries for each at http://www.gdal.org/ogr/ogr_formats.html to screen +# entries for each at https://gdal.org/drivers/vector/index.html to screen # out the multi-layer formats. supported_drivers = dict([ @@ -25,10 +25,10 @@ # Arc/Info Generate ARCGEN No No Yes ("ARCGEN", "r"), # Atlas BNA BNA Yes No Yes - ("BNA", "raw"), + ("BNA", "rw"), # AutoCAD DWG DWG No No No # AutoCAD DXF DXF Yes No Yes - ("DXF", "raw"), + ("DXF", "rw"), # Comma Separated Value (.csv) CSV Yes No Yes ("CSV", "raw"), # CouchDB / GeoCouch CouchDB Yes Yes No, needs libcurl @@ -48,8 +48,9 @@ # ESRI Shapefile ESRI Shapefile Yes Yes Yes ("ESRI Shapefile", "raw"), # FMEObjects Gateway FMEObjects Gateway No Yes No, needs FME + ("FlatGeobuf", "rw"), # GeoJSON GeoJSON Yes Yes Yes - ("GeoJSON", "rw"), + ("GeoJSON", "raw"), # GeoJSONSeq GeoJSON sequences Yes Yes Yes ("GeoJSONSeq", "rw"), # GĂ©oconcept Export Geoconcept Yes Yes Yes @@ -57,19 +58,21 @@ # ("Geoconcept", "raw"), # Geomedia .mdb Geomedia No No No, needs ODBC library # GeoPackage GPKG Yes Yes No, needs libsqlite3 - ("GPKG", "rw"), + ("GPKG", "raw"), # GeoRSS GeoRSS Yes Yes Yes (read support needs libexpat) # Google Fusion Tables GFT Yes Yes No, needs libcurl # GML GML Yes Yes Yes (read support needs Xerces or libexpat) - ("GML", "raw"), + ("GML", "rw"), # GMT GMT Yes Yes Yes - ("GMT", "raw"), + ("GMT", "rw"), + # GMT renamed to OGR_GMT for GDAL 2.x + ("OGR_GMT", "rw"), # GPSBabel GPSBabel Yes Yes Yes (needs GPSBabel and GPX driver) # GPX GPX Yes Yes Yes (read support needs libexpat) - ("GPX", "raw"), + ("GPX", "rw"), # GRASS GRASS No Yes No, needs libgrass # GPSTrackMaker (.gtm, .gtz) GPSTrackMaker Yes Yes Yes - ("GPSTrackMaker", "raw"), + ("GPSTrackMaker", "rw"), # Hydrographic Transfer Format HTF No Yes Yes # TODO: Fiona is not ready for multi-layer formats: ("HTF", "r"), # Idrisi Vector (.VCT) Idrisi No Yes Yes @@ -96,9 +99,11 @@ # multi-layer # ("OpenAir", "r"), # PCI Geomatics Database File PCIDSK No No Yes, using internal PCIDSK SDK (from GDAL 1.7.0) - ("PCIDSK", "r"), + ("PCIDSK", "rw"), # PDS PDS No Yes Yes ("PDS", "r"), + # PDS renamed to OGR_PDS for GDAL 2.x + ("OGR_PDS", "r"), # PGDump PostgreSQL SQL dump Yes Yes Yes # PostgreSQL/PostGIS PostgreSQL/PostGIS Yes Yes No, needs PostgreSQL client library (libpq) # EPIInfo .REC REC No No Yes @@ -115,6 +120,7 @@ ("SEGY", "r"), # Norwegian SOSI Standard SOSI No Yes No, needs FYBA library # SQLite/SpatiaLite SQLite Yes Yes No, needs libsqlite3 or libspatialite + ("SQLite", "raw"), # SUA SUA No Yes Yes ("SUA", "r"), # SVG SVG No Yes No, needs libexpat @@ -141,9 +147,42 @@ ]) +# Minimal gdal version for different modes +driver_mode_mingdal = { + + 'r': {'GPKG': (1, 11, 0), + 'GeoJSONSeq': (2, 4, 0), + 'FlatGeobuf': (3, 1, 0)}, + + 'w': {'GPKG': (1, 11, 0), + 'PCIDSK': (2, 0, 0), + 'GeoJSONSeq': (2, 4, 0), + 'FlatGeobuf': (3, 1, 3)}, + + 'a': {'GPKG': (1, 11, 0), + 'GeoJSON': (2, 1, 0), + 'MapInfo File': (2, 0, 0)} +} + + +def _driver_supports_mode(driver, mode): + """ Returns True if driver supports mode, False otherwise + + Note: this function is not part of Fiona's public API. + """ + if driver not in supported_drivers: + return False + if mode not in supported_drivers[driver]: + return False + if driver in driver_mode_mingdal[mode]: + if get_gdal_version_num() < calc_gdal_version_num(*driver_mode_mingdal[mode][driver]): + return False + return True + + # Removes drivers in the supported_drivers dictionary that the # machine's installation of OGR due to how it is compiled. -# OGR may not have optional libararies compiled or installed. +# OGR may not have optional libraries compiled or installed. def _filter_supported_drivers(): global supported_drivers @@ -156,4 +195,159 @@ supported_drivers = supported_drivers_copy + _filter_supported_drivers() + +# driver_converts_to_str contains field type, driver combinations that are silently converted to string +# None: field type is always converted to str +# (2, 0, 0): starting from gdal 2.0 field type is not converted to string +_driver_converts_to_str = { + 'time': { + 'CSV': None, + 'PCIDSK': None, + 'GeoJSON': (2, 0, 0), + 'GPKG': None, + 'GMT': None, + 'OGR_GMT': None + }, + 'datetime': { + 'CSV': None, + 'PCIDSK': None, + 'GeoJSON': (2, 0, 0), + 'GML': (3, 1, 0), + }, + 'date': { + 'CSV': None, + 'PCIDSK': None, + 'GeoJSON': (2, 0, 0), + 'GMT': None, + 'OGR_GMT': None, + 'GML': (3, 1, 0), + } +} + + +def _driver_converts_field_type_silently_to_str(driver, field_type): + """ Returns True if the driver converts the field_type silently to str, False otherwise + + Note: this function is not part of Fiona's public API. + """ + if field_type in _driver_converts_to_str and driver in _driver_converts_to_str[field_type]: + if _driver_converts_to_str[field_type][driver] is None: + return True + elif get_gdal_version_num() < calc_gdal_version_num(*_driver_converts_to_str[field_type][driver]): + return True + return False + + +# None: field type is never supported, (2, 0, 0) field type is supported starting with gdal 2.0 +_driver_field_type_unsupported = { + 'time': { + 'ESRI Shapefile': None, + 'GPKG': (2, 0, 0), + 'GPX': None, + 'GPSTrackMaker': None, + 'GML': (3, 1, 0), + 'DGN': None, + 'BNA': None, + 'DXF': None, + 'PCIDSK': (2, 1, 0), + 'FileGDB': None, + 'FlatGeobuf': None + }, + 'datetime': { + 'ESRI Shapefile': None, + 'GPKG': (2, 0, 0), + 'DGN': None, + 'BNA': None, + 'DXF': None, + 'PCIDSK': (2, 1, 0) + }, + 'date': { + 'GPX': None, + 'GPSTrackMaker': None, + 'DGN': None, + 'BNA': None, + 'DXF': None, + 'PCIDSK': (2, 1, 0), + 'FileGDB': None, + 'FlatGeobuf': None + } +} + + +def _driver_supports_field(driver, field_type): + """ Returns True if the driver supports the field_type, False otherwise + + Note: this function is not part of Fiona's public API. + """ + if field_type in _driver_field_type_unsupported and driver in _driver_field_type_unsupported[field_type]: + if _driver_field_type_unsupported[field_type][driver] is None: + return False + elif get_gdal_version_num() < calc_gdal_version_num(*_driver_field_type_unsupported[field_type][driver]): + return False + + return True + + +# None: field type never supports timezones, (2, 0, 0): field type supports timezones with GDAL 2.0.0 +_drivers_not_supporting_timezones = { + 'datetime': { + 'MapInfo File': None, + 'GPKG': (3, 1, 0), + 'GPSTrackMaker': (3, 1, 1), + 'FileGDB': None, + 'SQLite': (2, 4, 0) + }, + 'time': { + 'MapInfo File': None, + 'GPKG': None, + 'GPSTrackMaker': None, + 'GeoJSON': None, + 'GeoJSONSeq': None, + 'GML': None, + 'CSV': None, + 'GMT': None, + 'OGR_GMT': None, + 'SQLite': None + } +} + + +def _driver_supports_timezones(driver, field_type): + """ Returns True if the driver supports timezones for field_type, False otherwise + + Note: this function is not part of Fiona's public API. + """ + if field_type in _drivers_not_supporting_timezones and driver in _drivers_not_supporting_timezones[field_type]: + if _drivers_not_supporting_timezones[field_type][driver] is None: + return False + elif get_gdal_version_num() < calc_gdal_version_num(*_drivers_not_supporting_timezones[field_type][driver]): + return False + return True + + +# None: driver never supports timezones, (2, 0, 0): driver supports timezones with GDAL 2.0.0 +_drivers_not_supporting_milliseconds = { + 'GPSTrackMaker': None, + 'FileGDB': None +} + + +def _driver_supports_milliseconds(driver): + """ Returns True if the driver supports milliseconds, False otherwise + + Note: this function is not part of Fiona's public API. + """ + # GDAL 2.0 introduced support for milliseconds + if get_gdal_version_num() < calc_gdal_version_num(2, 0, 0): + return False + + if driver in _drivers_not_supporting_milliseconds: + if _drivers_not_supporting_milliseconds[driver] is None: + return False + elif calc_gdal_version_num(*_drivers_not_supporting_milliseconds[driver]) < get_gdal_version_num(): + return False + + return True + diff -Nru fiona-1.8.13/fiona/env.py fiona-1.8.20/fiona/env.py --- fiona-1.8.13/fiona/env.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/env.py 2021-05-31 21:29:33.000000000 +0000 @@ -1,6 +1,5 @@ """Fiona's GDAL/AWS environment""" -from contextlib import contextmanager from functools import wraps, total_ordering import logging import os @@ -10,13 +9,22 @@ import attr from six import string_types -from fiona._env import ( - GDALEnv, calc_gdal_version_num, get_gdal_version_num, get_gdal_config, - set_gdal_config, get_gdal_release_name, GDALDataFinder, PROJDataFinder, - set_proj_data_search_path) -from fiona.compat import getargspec -from fiona.errors import EnvError, GDALVersionError -from fiona.session import Session, DummySession +import fiona._loading +with fiona._loading.add_gdal_dll_directories(): + from fiona._env import ( + GDALDataFinder, + GDALEnv, + PROJDataFinder, + calc_gdal_version_num, + get_gdal_config, + get_gdal_release_name, + get_gdal_version_num, + set_gdal_config, + set_proj_data_search_path, + ) + from fiona.compat import getargspec + from fiona.errors import EnvError, GDALVersionError + from fiona.session import Session, DummySession class ThreadEnv(threading.local): @@ -321,8 +329,10 @@ class NullContextManager(object): def __init__(self): pass + def __enter__(self): return self + def __exit__(self, *args): pass @@ -587,30 +597,41 @@ # Patch the environment if needed, such as in the installed wheel case. -if "GDAL_DATA" not in os.environ: +if 'GDAL_DATA' not in os.environ: + + path = GDALDataFinder().search_wheel() + + if path: + log.debug("GDAL data found in package: path=%r.", path) + set_gdal_config("GDAL_DATA", path) # See https://github.com/mapbox/rasterio/issues/1631. - if GDALDataFinder().find_file("header.dxf"): - log.debug("GDAL data files are available at built-in paths") + elif GDALDataFinder().find_file("header.dxf"): + log.debug("GDAL data files are available at built-in paths.") else: path = GDALDataFinder().search() if path: - os.environ['GDAL_DATA'] = path - log.debug("GDAL_DATA not found in environment, set to %r.", path) + set_gdal_config("GDAL_DATA", path) + log.debug("GDAL data found in other locations: path=%r.", path) if "PROJ_LIB" in os.environ: path = os.environ["PROJ_LIB"] set_proj_data_search_path(path) +elif PROJDataFinder().search_wheel(): + path = PROJDataFinder().search_wheel() + log.debug("PROJ data found in package: path=%r.", path) + set_proj_data_search_path(path) + # See https://github.com/mapbox/rasterio/issues/1631. elif PROJDataFinder().has_data(): - log.debug("PROJ data files are available at built-in paths") + log.debug("PROJ data files are available at built-in paths.") else: path = PROJDataFinder().search() if path: - log.debug("PROJ data not found in environment, setting to %r.", path) + log.debug("PROJ data found in other locations: path=%r.", path) set_proj_data_search_path(path) diff -Nru fiona-1.8.13/fiona/_env.pyx fiona-1.8.20/fiona/_env.pyx --- fiona-1.8.13/fiona/_env.pyx 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/_env.pyx 2021-05-31 21:29:33.000000000 +0000 @@ -18,7 +18,7 @@ import threading from fiona._err cimport exc_wrap_int, exc_wrap_ogrerr -from fiona._shim cimport set_proj_search_path +from fiona._shim cimport set_proj_search_path, get_proj_version from fiona._err import CPLE_BaseError from fiona.errors import EnvError @@ -55,6 +55,11 @@ log = logging.getLogger(__name__) +try: + import certifi + os.environ.setdefault("CURL_CA_BUNDLE", certifi.where()) +except ImportError: + pass cdef bint is_64bit = sys.maxsize > 2 ** 32 @@ -114,6 +119,21 @@ return GDALVersion(major, minor, revision) +def get_proj_version_tuple(): + """ + Returns proj version tuple for gdal >= 3.0.1, otherwise None + """ + cdef int major + cdef int minor + cdef int patch + gdal_version_num = get_gdal_version_num() + if gdal_version_num < calc_gdal_version_num(3, 0, 1): + proj_version = None + else: + get_proj_version(&major, &minor, &patch) + return (major, minor, patch) + + cdef void log_error(CPLErr err_class, int err_no, const char* msg) with gil: """Send CPL debug messages and warnings to Python's logger.""" log = logging.getLogger(__name__) @@ -373,7 +393,6 @@ def start(self): CPLPushErrorHandler(logging_error_handler) - log.debug("Logging error handler pushed.") # The outer if statement prevents each thread from acquiring a # lock when the environment starts, and the inner avoids a @@ -384,41 +403,53 @@ GDALAllRegister() OGRRegisterAll() - log.debug("All drivers registered.") if 'GDAL_DATA' in os.environ: + log.debug("GDAL_DATA found in environment.") self.update_config_options(GDAL_DATA=os.environ['GDAL_DATA']) - log.debug("GDAL_DATA found in environment: %r.", os.environ['GDAL_DATA']) - - # See https://github.com/mapbox/rasterio/issues/1631. - elif GDALDataFinder().find_file("header.dxf"): - log.debug("GDAL data files are available at built-in paths") else: - path = GDALDataFinder().search() + path = GDALDataFinder().search_wheel() if path: + log.debug("GDAL data found in package: path=%r.", path) self.update_config_options(GDAL_DATA=path) - log.debug("GDAL_DATA not found in environment, set to %r.", path) + + # See https://github.com/mapbox/rasterio/issues/1631. + elif GDALDataFinder().find_file("header.dxf"): + log.debug("GDAL data files are available at built-in paths.") + + else: + path = GDALDataFinder().search() + + if path: + log.debug("GDAL data found in other locations: path=%r.", path) + self.update_config_options(GDAL_DATA=path) if 'PROJ_LIB' in os.environ: - log.debug("PROJ_LIB found in environment: %r.", os.environ['PROJ_LIB']) + log.debug("PROJ_LIB found in environment.") path = os.environ["PROJ_LIB"] set_proj_data_search_path(path) - elif PROJDataFinder().has_data(): - log.debug("PROJ data files are available at built-in paths") - else: - path = PROJDataFinder().search() + path = PROJDataFinder().search_wheel() if path: - log.debug("PROJ data not found in environment, setting to %r.", path) + log.debug("PROJ data found in package: path=%r.", path) set_proj_data_search_path(path) + elif PROJDataFinder().has_data(): + log.debug("PROJ data files are available at built-in paths.") + + else: + path = PROJDataFinder().search() + + if path: + log.debug("PROJ data found in other locations: path=%r.", path) + set_proj_data_search_path(path) + if driver_count() == 0: CPLPopErrorHandler() - log.debug("Error handler popped") raise ValueError("Drivers not registered.") # Flag the drivers as registered, otherwise every thread @@ -427,7 +458,7 @@ # actually makes it this far. self._have_registered_drivers = True - log.debug("Started GDALEnv %r.", self) + log.debug("Started GDALEnv: self=%r.", self) def stop(self): # NB: do not restore the CPL error handler to its default diff -Nru fiona-1.8.13/fiona/errors.py fiona-1.8.20/fiona/errors.py --- fiona-1.8.13/fiona/errors.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/errors.py 2021-05-31 21:29:33.000000000 +0000 @@ -65,3 +65,7 @@ class FionaDeprecationWarning(UserWarning): """A warning about deprecation of Fiona features""" + + +class FeatureWarning(UserWarning): + """A warning about serialization of a feature""" diff -Nru fiona-1.8.13/fiona/_err.pxd fiona-1.8.20/fiona/_err.pxd --- fiona-1.8.13/fiona/_err.pxd 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/_err.pxd 2021-05-31 21:29:33.000000000 +0000 @@ -9,6 +9,7 @@ ctypedef int OGRErr +cdef get_last_error_msg() cdef int exc_wrap_int(int retval) except -1 cdef OGRErr exc_wrap_ogrerr(OGRErr retval) except -1 cdef void *exc_wrap_pointer(void *ptr) except NULL diff -Nru fiona-1.8.13/fiona/_err.pyx fiona-1.8.20/fiona/_err.pyx --- fiona-1.8.13/fiona/_err.pyx 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/_err.pyx 2021-05-31 21:29:33.000000000 +0000 @@ -235,6 +235,27 @@ return +cdef get_last_error_msg(): + """Checks GDAL error stack for the latest error message + Returns + ------- + An error message or empty string + """ + + err_msg = CPLGetLastErrorMsg() + + if err_msg != NULL: + # Reformat messages. + msg_b = err_msg + msg = msg_b.decode('utf-8') + msg = msg.replace("`", "'") + msg = msg.replace("\n", " ") + else: + msg = "" + + return msg + + cdef int exc_wrap_int(int err) except -1: """Wrap a GDAL/OGR function that returns CPLErr or OGRErr (int) diff -Nru fiona-1.8.13/fiona/fio/cat.py fiona-1.8.20/fiona/fio/cat.py --- fiona-1.8.13/fiona/fio/cat.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/fio/cat.py 2021-05-31 21:29:33.000000000 +0000 @@ -31,12 +31,33 @@ help="log errors but do not stop serialization.") @options.dst_crs_opt @cligj.use_rs_opt -@click.option('--bbox', default=None, metavar="w,s,e,n", - help="filter for features intersecting a bounding box") +@click.option( + "--bbox", + default=None, + metavar="w,s,e,n", + help="filter for features intersecting a bounding box", +) +@click.option( + "--cut-at-antimeridian", + is_flag=True, + default=False, + help="Optionally cut geometries at the anti-meridian. To be used only for a geographic destination CRS.", +) @click.pass_context @with_context_env -def cat(ctx, files, precision, indent, compact, ignore_errors, dst_crs, - use_rs, bbox, layer): +def cat( + ctx, + files, + precision, + indent, + compact, + ignore_errors, + dst_crs, + use_rs, + bbox, + cut_at_antimeridian, + layer, +): """ Concatenate and print the features of input datasets as a sequence of GeoJSON features. @@ -76,7 +97,7 @@ if dst_crs or precision >= 0: g = transform_geom( src.crs, dst_crs, feat['geometry'], - antimeridian_cutting=True, + antimeridian_cutting=cut_at_antimeridian, precision=precision) feat['geometry'] = g feat['bbox'] = fiona.bounds(g) diff -Nru fiona-1.8.13/fiona/fio/env.py fiona-1.8.20/fiona/fio/env.py --- fiona-1.8.13/fiona/fio/env.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/fio/env.py 2021-05-31 21:29:33.000000000 +0000 @@ -6,7 +6,9 @@ import click import fiona -from fiona._env import GDALDataFinder, PROJDataFinder +import fiona._loading +with fiona._loading.add_gdal_dll_directories(): + from fiona._env import GDALDataFinder, PROJDataFinder @click.command(short_help="Print information about the fio environment.") diff -Nru fiona-1.8.13/fiona/fio/info.py fiona-1.8.20/fiona/fio/info.py --- fiona-1.8.13/fiona/fio/info.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/fio/info.py 2021-05-31 21:29:33.000000000 +0000 @@ -9,6 +9,7 @@ import fiona import fiona.crs +from fiona.errors import DriverError from fiona.fio import options, with_context_env @@ -47,7 +48,13 @@ try: with fiona.open(input, layer=layer) as src: info = src.meta - info.update(bounds=src.bounds, name=src.name) + info.update(name=src.name) + try: + info.update(bounds=src.bounds) + except DriverError: + info.update(bounds=None) + logger.debug("Setting 'bounds' to None - driver " + "was not able to calculate bounds") try: info.update(count=len(src)) except TypeError: diff -Nru fiona-1.8.13/fiona/fio/load.py fiona-1.8.20/fiona/fio/load.py --- fiona-1.8.13/fiona/fio/load.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/fio/load.py 2021-05-31 21:29:33.000000000 +0000 @@ -13,6 +13,37 @@ from fiona.transform import transform_geom +def _cb_key_val(ctx, param, value): + """ + click callback to validate `--opt KEY1=VAL1 --opt KEY2=VAL2` and collect + in a dictionary like the one below, which is what the CLI function receives. + If no value or `None` is received then an empty dictionary is returned. + + { + 'KEY1': 'VAL1', + 'KEY2': 'VAL2' + } + + Note: `==VAL` breaks this as `str.split('=', 1)` is used. + + """ + if not value: + return {} + else: + out = {} + for pair in value: + if "=" not in pair: + raise click.BadParameter( + "Invalid syntax for KEY=VAL arg: {}".format(pair) + ) + else: + k, v = pair.split("=", 1) + k = k.lower() + v = v.lower() + out[k] = None if v.lower() in ["none", "null", "nil", "nada"] else v + return out + + @click.command(short_help="Load GeoJSON to a dataset in another format.") @click.argument('output', required=True) @click.option('-f', '--format', '--driver', 'driver', required=True, @@ -21,16 +52,30 @@ @click.option('--dst-crs', '--dst_crs', help="Destination CRS. Defaults to --src-crs when not given.") @cligj.features_in_arg -@click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer, - help="Load features into specified layer. Layers use " - "zero-based numbering when accessed by index.") +@click.option( + "--layer", + metavar="INDEX|NAME", + callback=options.cb_layer, + help="Load features into specified layer. Layers use " + "zero-based numbering when accessed by index.", +) +@click.option( + "--co", + "--profile", + "creation_options", + metavar="NAME=VALUE", + multiple=True, + callback=_cb_key_val, + help="Driver specific creation options. See the documentation for the selected output driver for more information.", +) @click.pass_context @with_context_env -def load(ctx, output, driver, src_crs, dst_crs, features, layer): +def load(ctx, output, driver, src_crs, dst_crs, features, layer, creation_options): """Load features from JSON to a file in another format. The input is a GeoJSON feature collection or optionally a sequence of GeoJSON feature objects. + """ logger = logging.getLogger(__name__) @@ -60,11 +105,14 @@ for k, v in first['properties'].items()]) with fiona.open( - output, 'w', - driver=driver, - crs=dst_crs, - schema=schema, - layer=layer) as dst: + output, + "w", + driver=driver, + crs=dst_crs, + schema=schema, + layer=layer, + **creation_options + ) as dst: dst.write(first) dst.writerecords(source) diff -Nru fiona-1.8.13/fiona/gdal.pxi fiona-1.8.20/fiona/gdal.pxi --- fiona-1.8.13/fiona/gdal.pxi 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/gdal.pxi 2021-05-31 21:29:33.000000000 +0000 @@ -226,8 +226,7 @@ char** GDALGetMetadata(GDALMajorObjectH obj, const char *pszDomain) int GDALSetMetadata(GDALMajorObjectH obj, char **papszMD, const char *pszDomain) - const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, - const char *pszDomain) + const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszDomain) int GDALSetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszValue, const char *pszDomain) const GDALColorEntry *GDALGetColorEntry(GDALColorTableH table, int) @@ -348,7 +347,7 @@ OGRGeometryH OGR_G_CreateGeometryFromJson(const char *json) void OGR_G_DestroyGeometry(OGRGeometryH geometry) char *OGR_G_ExportToJson(OGRGeometryH geometry) - void OGR_G_ExportToWkb(OGRGeometryH geometry, int endianness, char *buffer) + OGRErr OGR_G_ExportToWkb(OGRGeometryH geometry, int endianness, char *buffer) int OGR_G_GetCoordinateDimension(OGRGeometryH geometry) int OGR_G_GetGeometryCount(OGRGeometryH geometry) const char *OGR_G_GetGeometryName(OGRGeometryH geometry) diff -Nru fiona-1.8.13/fiona/_geometry.pxd fiona-1.8.20/fiona/_geometry.pxd --- fiona-1.8.13/fiona/_geometry.pxd 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/_geometry.pxd 2021-05-31 21:29:33.000000000 +0000 @@ -103,7 +103,7 @@ double OGR_G_GetX (void *geometry, int n) double OGR_G_GetY (void *geometry, int n) double OGR_G_GetZ (void *geometry, int n) - void OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) + OGRErr OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize (void *geometry) diff -Nru fiona-1.8.13/fiona/_geometry.pyx fiona-1.8.20/fiona/_geometry.pyx --- fiona-1.8.13/fiona/_geometry.pyx 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/_geometry.pyx 2021-05-31 21:29:33.000000000 +0000 @@ -5,6 +5,7 @@ import logging from fiona.errors import UnsupportedGeometryTypeError +from fiona._err cimport exc_wrap_int class NullHandler(logging.Handler): @@ -100,7 +101,7 @@ cdef unsigned char *buffer = wkb cdef void *cogr_geometry = OGR_G_CreateGeometry(wkbtype) if cogr_geometry is not NULL: - OGR_G_ImportFromWkb(cogr_geometry, buffer, len(wkb)) + exc_wrap_int(OGR_G_ImportFromWkb(cogr_geometry, buffer, len(wkb))) return cogr_geometry @@ -234,7 +235,7 @@ cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['Polygon']) for ring in coordinates: cogr_ring = self._buildLinearRing(ring) - OGR_G_AddGeometryDirectly(cogr_geometry, cogr_ring) + exc_wrap_int(OGR_G_AddGeometryDirectly(cogr_geometry, cogr_ring)) return cogr_geometry cdef void * _buildMultiPoint(self, object coordinates) except NULL: @@ -242,7 +243,7 @@ cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['MultiPoint']) for coordinate in coordinates: cogr_part = self._buildPoint(coordinate) - OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part) + exc_wrap_int(OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part)) return cogr_geometry cdef void * _buildMultiLineString(self, object coordinates) except NULL: @@ -250,7 +251,7 @@ cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['MultiLineString']) for line in coordinates: cogr_part = self._buildLineString(line) - OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part) + exc_wrap_int(OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part)) return cogr_geometry cdef void * _buildMultiPolygon(self, object coordinates) except NULL: @@ -258,7 +259,7 @@ cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['MultiPolygon']) for part in coordinates: cogr_part = self._buildPolygon(part) - OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part) + exc_wrap_int(OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part)) return cogr_geometry cdef void * _buildGeometryCollection(self, object coordinates) except NULL: @@ -266,7 +267,7 @@ cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['GeometryCollection']) for part in coordinates: cogr_part = OGRGeomBuilder().build(part) - OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part) + exc_wrap_int(OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part)) return cogr_geometry cdef void * build(self, object geometry) except NULL: diff -Nru fiona-1.8.13/fiona/__init__.py fiona-1.8.20/fiona/__init__.py --- fiona-1.8.13/fiona/__init__.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/__init__.py 2021-05-31 21:29:33.000000000 +0000 @@ -67,7 +67,7 @@ import os import sys import warnings - +import platform from six import string_types try: @@ -81,28 +81,31 @@ libdir = os.path.join(os.path.dirname(__file__), ".libs") os.environ["PATH"] = os.environ["PATH"] + ";" + libdir -from fiona.collection import BytesCollection, Collection -from fiona.drvsupport import supported_drivers -from fiona.env import ensure_env_with_credentials, Env -from fiona.errors import FionaDeprecationWarning -from fiona._env import driver_count -from fiona._env import ( - calc_gdal_version_num, get_gdal_version_num, get_gdal_release_name, - get_gdal_version_tuple) -from fiona.compat import OrderedDict -from fiona.io import MemoryFile -from fiona.ogrext import _bounds, _listlayers, FIELD_TYPES_MAP, _remove, _remove_layer -from fiona.path import ParsedPath, parse_path, vsi_path -from fiona.vfs import parse_paths as vfs_parse_paths - -# These modules are imported by fiona.ogrext, but are also import here to -# help tools like cx_Freeze find them automatically -from fiona import _geometry, _err, rfc3339 -import uuid +import fiona._loading +with fiona._loading.add_gdal_dll_directories(): + from fiona.collection import BytesCollection, Collection + from fiona.drvsupport import supported_drivers + from fiona.env import ensure_env_with_credentials, Env + from fiona.errors import FionaDeprecationWarning + from fiona._env import driver_count + from fiona._env import ( + calc_gdal_version_num, get_gdal_version_num, get_gdal_release_name, + get_gdal_version_tuple) + from fiona.compat import OrderedDict + from fiona.io import MemoryFile + from fiona.ogrext import _bounds, _listlayers, FIELD_TYPES_MAP, _remove, _remove_layer + from fiona.path import ParsedPath, parse_path, vsi_path + from fiona.vfs import parse_paths as vfs_parse_paths + from fiona._show_versions import show_versions + + # These modules are imported by fiona.ogrext, but are also import here to + # help tools like cx_Freeze find them automatically + from fiona import _geometry, _err, rfc3339 + import uuid __all__ = ['bounds', 'listlayers', 'open', 'prop_type', 'prop_width'] -__version__ = "1.8.13" +__version__ = "1.8.20" __gdal_version__ = get_gdal_release_name() gdal_version = get_gdal_version_tuple() @@ -224,9 +227,9 @@ def fp_writer(fp): memfile = MemoryFile() dataset = memfile.open( - driver=driver, crs=crs, schema=schema, layer=layer, + driver=driver, crs=crs, schema=this_schema, layer=layer, encoding=encoding, enabled_drivers=enabled_drivers, - **kwargs) + crs_wkt=crs_wkt, **kwargs) try: yield dataset finally: @@ -256,7 +259,14 @@ if schema: # Make an ordered dict of schema properties. this_schema = schema.copy() - this_schema['properties'] = OrderedDict(schema['properties']) + if 'properties' in schema: + this_schema['properties'] = OrderedDict(schema['properties']) + else: + this_schema['properties'] = OrderedDict() + + if 'geometry' not in this_schema: + this_schema['geometry'] = None + else: this_schema = None c = Collection(path, mode, crs=crs, driver=driver, schema=this_schema, diff -Nru fiona-1.8.13/fiona/io.py fiona-1.8.20/fiona/io.py --- fiona-1.8.13/fiona/io.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/io.py 2021-05-31 21:29:33.000000000 +0000 @@ -4,8 +4,10 @@ from collections import OrderedDict import logging -from fiona.ogrext import MemoryFileBase -from fiona.collection import Collection +import fiona._loading +with fiona._loading.add_gdal_dll_directories(): + from fiona.ogrext import MemoryFileBase + from fiona.collection import Collection log = logging.getLogger(__name__) @@ -24,7 +26,9 @@ -------- """ - def __init__(self, file_or_bytes=None, filename=None, ext=''): + def __init__(self, file_or_bytes=None, filename=None, ext=""): + if ext and not ext.startswith("."): + ext = "." + ext super(MemoryFile, self).__init__( file_or_bytes=file_or_bytes, filename=filename, ext=ext) @@ -45,25 +49,36 @@ Other parameters are optional and have the same semantics as the parameters of `fiona.open()`. """ - vsi_path = self.name - if self.closed: raise IOError("I/O operation on closed file.") - if self.exists(): - return Collection(vsi_path, 'r', driver=driver, encoding=encoding, - layer=layer, enabled_drivers=enabled_drivers, - **kwargs) - else: - if schema: - # Make an ordered dict of schema properties. - this_schema = schema.copy() - this_schema['properties'] = OrderedDict(schema['properties']) - else: - this_schema = None - return Collection(vsi_path, 'w', crs=crs, driver=driver, - schema=this_schema, encoding=encoding, - layer=layer, enabled_drivers=enabled_drivers, - crs_wkt=crs_wkt, **kwargs) + + if not self.exists(): + self._ensure_extension(driver) + this_schema = schema.copy() + this_schema["properties"] = OrderedDict(schema["properties"]) + return Collection( + self.name, + "w", + crs=crs, + driver=driver, + schema=this_schema, + encoding=encoding, + layer=layer, + enabled_drivers=enabled_drivers, + crs_wkt=crs_wkt, + **kwargs + ) + + elif self.mode in ("r", "r+"): + return Collection( + self.name, + "r", + driver=driver, + encoding=encoding, + layer=layer, + enabled_drivers=enabled_drivers, + **kwargs + ) def __enter__(self): return self @@ -80,9 +95,9 @@ """ def __init__(self, file_or_bytes=None): - super(ZipMemoryFile, self).__init__(file_or_bytes, ext='zip') + super(ZipMemoryFile, self).__init__(file_or_bytes, ext=".zip") - def open(self, path, driver=None, encoding=None, layer=None, + def open(self, path=None, driver=None, encoding=None, layer=None, enabled_drivers=None, **kwargs): """Open a dataset within the zipped stream. @@ -95,11 +110,15 @@ Returns ------- A Fiona collection object - """ - vsi_path = '/vsizip{0}/{1}'.format(self.name, path.lstrip('/')) + """ if self.closed: raise IOError("I/O operation on closed file.") + if path: + vsi_path = '/vsizip{0}/{1}'.format(self.name, path.lstrip('/')) + else: + vsi_path = '/vsizip{0}'.format(self.name) + return Collection(vsi_path, 'r', driver=driver, encoding=encoding, layer=layer, enabled_drivers=enabled_drivers, **kwargs) diff -Nru fiona-1.8.13/fiona/_loading.py fiona-1.8.20/fiona/_loading.py --- fiona-1.8.13/fiona/_loading.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/fiona/_loading.py 2021-05-31 21:29:33.000000000 +0000 @@ -0,0 +1,28 @@ +import glob +import os +import logging +import contextlib +import platform +import sys + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + +# With Python >= 3.8 on Windows directories in PATH are not automatically +# searched for DLL dependencies and must be added manually with +# os.add_dll_directory. +# see https://github.com/Toblerity/Fiona/issues/851 + + +@contextlib.contextmanager +def add_gdal_dll_directories(): + dll_dirs = [] + if platform.system() == 'Windows' and sys.version_info >= (3, 8): + dll_directory = os.path.join(os.path.dirname(__file__), '.libs') + if os.path.exists(dll_directory): + dll_dirs.append(os.add_dll_directory(dll_directory)) + try: + yield None + finally: + for dll_dir in dll_dirs: + dll_dir.close() diff -Nru fiona-1.8.13/fiona/ogrext1.pxd fiona-1.8.20/fiona/ogrext1.pxd --- fiona-1.8.13/fiona/ogrext1.pxd 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/ogrext1.pxd 2021-05-31 21:29:33.000000000 +0000 @@ -4,18 +4,27 @@ from libc.stdio cimport FILE + cdef extern from "gdal.h": + ctypedef void * GDALDriverH + ctypedef void * GDALMajorObjectH + + const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszDomain) char * GDALVersionInfo (char *pszRequest) + cdef extern from "gdal_version.h": int GDAL_COMPUTE_VERSION(int maj, int min, int rev) + cdef extern from "cpl_conv.h": void * CPLMalloc (size_t) void CPLFree (void *ptr) void CPLSetThreadLocalConfigOption (char *key, char *val) void CPLSetConfigOption (char *key, char *val) const char *CPLGetConfigOption (char *, char *) + int CPLCheckForFile(char *, char **) + cdef extern from "cpl_string.h": char ** CSLAddNameValue (char **list, char *name, char *value) @@ -23,9 +32,17 @@ void CSLDestroy (char **list) char ** CSLAddString(char **list, const char *string) + +cdef extern from "sys/stat.h" nogil: + struct stat: + pass + + cdef extern from "cpl_vsi.h" nogil: + ctypedef int vsi_l_offset ctypedef FILE VSILFILE + ctypedef stat VSIStatBufL unsigned char *VSIGetMemFileBuffer(const char *path, vsi_l_offset *data_len, @@ -35,14 +52,16 @@ VSILFILE* VSIFOpenL(const char *path, const char *mode) int VSIFCloseL(VSILFILE *fp) int VSIUnlink(const char *path) - + int VSIMkdir(const char *path, long mode) + int VSIRmdir(const char *path) int VSIFFlushL(VSILFILE *fp) size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) vsi_l_offset VSIFTellL(VSILFILE *fp) int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) - int VSIUnlink (const char * pathname) + int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) + ctypedef int OGRErr ctypedef struct OGREnvelope: @@ -51,6 +70,7 @@ double MinY double MaxY + cdef extern from "ogr_core.h": ctypedef enum OGRwkbGeometryType: wkbUnknown @@ -234,7 +254,7 @@ double OGR_G_GetX (void *geometry, int n) double OGR_G_GetY (void *geometry, int n) double OGR_G_GetZ (void *geometry, int n) - void OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) + OGRErr OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize (void *geometry) void * OGR_G_ForceToMultiPolygon (void *geometry) void * OGR_G_ForceToPolygon (void *geometry) diff -Nru fiona-1.8.13/fiona/ogrext2.pxd fiona-1.8.20/fiona/ogrext2.pxd --- fiona-1.8.13/fiona/ogrext2.pxd 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/ogrext2.pxd 2021-05-31 21:29:33.000000000 +0000 @@ -117,9 +117,13 @@ char * ODsCCreateLayer = "CreateLayer" char * ODsCDeleteLayer = "DeleteLayer" + char * ODsCTransactions = "Transactions" cdef extern from "gdal.h": + ctypedef void * GDALDriverH + ctypedef void * GDALMajorObjectH + char * GDALVersionInfo (char *pszRequest) void * GDALGetDriverByName(const char * pszName) void * GDALOpenEx(const char * pszFilename, @@ -157,7 +161,7 @@ OGRErr GDALDatasetCommitTransaction (void * hDataset) OGRErr GDALDatasetRollbackTransaction (void * hDataset) int GDALDatasetTestCapability (void * hDataset, char *) - + const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszDomain) ctypedef enum GDALDataType: GDT_Unknown @@ -182,6 +186,7 @@ void CPLFree (void *ptr) void CPLSetThreadLocalConfigOption (char *key, char *val) const char *CPLGetConfigOption (char *, char *) + int CPLCheckForFile(char *, char **) cdef extern from "cpl_string.h": @@ -191,9 +196,16 @@ char ** CSLAddString(char **list, const char *string) +cdef extern from "sys/stat.h" nogil: + struct stat: + pass + + cdef extern from "cpl_vsi.h" nogil: + ctypedef int vsi_l_offset ctypedef FILE VSILFILE + ctypedef stat VSIStatBufL unsigned char *VSIGetMemFileBuffer(const char *path, vsi_l_offset *data_len, @@ -203,14 +215,15 @@ VSILFILE* VSIFOpenL(const char *path, const char *mode) int VSIFCloseL(VSILFILE *fp) int VSIUnlink(const char *path) - + int VSIMkdir(const char *path, long mode) + int VSIRmdir(const char *path) int VSIFFlushL(VSILFILE *fp) size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) vsi_l_offset VSIFTellL(VSILFILE *fp) int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) - int VSIUnlink (const char * pathname) + int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) cdef extern from "ogr_srs_api.h": @@ -237,16 +250,12 @@ cdef extern from "ogr_api.h": const char * OGR_Dr_GetName (void *driver) - void * OGR_Dr_CreateDataSource (void *driver, const char *path, char **options) - int OGR_Dr_DeleteDataSource (void *driver, char *) - void * OGR_Dr_Open (void *driver, const char *path, int bupdate) int OGR_Dr_TestCapability (void *driver, const char *) - int OGR_DS_DeleteLayer (void *datasource, int n) void * OGR_F_Create (void *featuredefn) void OGR_F_Destroy (void *feature) long OGR_F_GetFID (void *feature) int OGR_F_IsFieldSet (void *feature, int n) - int OGR_F_GetFieldAsDateTime (void *feature, int n, int *y, int *m, int *d, int *h, int *m, int *s, int *z) + int OGR_F_GetFieldAsDateTimeEx (void *feature, int n, int *y, int *m, int *d, int *h, int *m, float *s, int *z) double OGR_F_GetFieldAsDouble (void *feature, int n) int OGR_F_GetFieldAsInteger (void *feature, int n) char * OGR_F_GetFieldAsString (void *feature, int n) @@ -256,7 +265,7 @@ int OGR_F_GetFieldIndex (void *feature, char *name) void * OGR_F_GetGeometryRef (void *feature) void * OGR_F_StealGeometry (void *feature) - void OGR_F_SetFieldDateTime (void *feature, int n, int y, int m, int d, int hh, int mm, int ss, int tz) + void OGR_F_SetFieldDateTimeEx (void *feature, int n, int y, int m, int d, int hh, int mm, float ss, int tz) void OGR_F_SetFieldDouble (void *feature, int n, double value) void OGR_F_SetFieldInteger (void *feature, int n, int value) void OGR_F_SetFieldString (void *feature, int n, char *value) @@ -287,7 +296,6 @@ void OGR_G_DestroyGeometry (void *geometry) unsigned char * OGR_G_ExportToJson (void *geometry) void OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) - int OGR_G_GetCoordinateDimension (void *geometry) int OGR_G_GetGeometryCount (void *geometry) unsigned char * OGR_G_GetGeometryName (void *geometry) int OGR_G_GetGeometryType (void *geometry) @@ -296,7 +304,7 @@ double OGR_G_GetX (void *geometry, int n) double OGR_G_GetY (void *geometry, int n) double OGR_G_GetZ (void *geometry, int n) - void OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) + OGRErr OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize (void *geometry) void * OGR_G_ForceToMultiPolygon (void *geometry) void * OGR_G_ForceToPolygon (void *geometry) @@ -318,10 +326,6 @@ void *layer, double minx, double miny, double maxx, double maxy ) int OGR_L_TestCapability (void *layer, char *name) - void * OGRGetDriverByName (char *) - void * OGROpen (char *path, int mode, void *x) - void * OGROpenShared (char *path, int mode, void *x) - int OGRReleaseDataSource (void *datasource) OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) long long OGR_F_GetFieldAsInteger64 (void *feature, int n) diff -Nru fiona-1.8.13/fiona/ogrext3.pxd fiona-1.8.20/fiona/ogrext3.pxd --- fiona-1.8.13/fiona/ogrext3.pxd 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/ogrext3.pxd 2021-05-31 21:29:33.000000000 +0000 @@ -117,9 +117,13 @@ char * ODsCCreateLayer = "CreateLayer" char * ODsCDeleteLayer = "DeleteLayer" + char * ODsCTransactions = "Transactions" cdef extern from "gdal.h": + ctypedef void * GDALDriverH + ctypedef void * GDALMajorObjectH + char * GDALVersionInfo (char *pszRequest) void * GDALGetDriverByName(const char * pszName) void * GDALOpenEx(const char * pszFilename, @@ -157,7 +161,7 @@ OGRErr GDALDatasetCommitTransaction (void * hDataset) OGRErr GDALDatasetRollbackTransaction (void * hDataset) int GDALDatasetTestCapability (void * hDataset, char *) - + const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszDomain) ctypedef enum GDALDataType: GDT_Unknown @@ -174,14 +178,17 @@ GDT_CFloat64 GDT_TypeCount + cdef extern from "gdal_version.h": int GDAL_COMPUTE_VERSION(int maj, int min, int rev) + cdef extern from "cpl_conv.h": void * CPLMalloc (size_t) void CPLFree (void *ptr) void CPLSetThreadLocalConfigOption (char *key, char *val) const char *CPLGetConfigOption (char *, char *) + int CPLCheckForFile(char *, char **) cdef extern from "cpl_string.h": @@ -191,9 +198,16 @@ char ** CSLAddString(char **list, const char *string) +cdef extern from "sys/stat.h" nogil: + struct stat: + pass + + cdef extern from "cpl_vsi.h" nogil: + ctypedef int vsi_l_offset ctypedef FILE VSILFILE + ctypedef stat VSIStatBufL unsigned char *VSIGetMemFileBuffer(const char *path, vsi_l_offset *data_len, @@ -203,14 +217,15 @@ VSILFILE* VSIFOpenL(const char *path, const char *mode) int VSIFCloseL(VSILFILE *fp) int VSIUnlink(const char *path) - + int VSIMkdir(const char *path, long mode) + int VSIRmdir(const char *path) int VSIFFlushL(VSILFILE *fp) size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) vsi_l_offset VSIFTellL(VSILFILE *fp) int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) - int VSIUnlink (const char * pathname) + int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) cdef extern from "ogr_srs_api.h": @@ -232,20 +247,18 @@ void * OCTNewCoordinateTransformation (OGRSpatialReferenceH source, OGRSpatialReferenceH dest) void OCTDestroyCoordinateTransformation (void *source) int OCTTransform (void *ct, int nCount, double *x, double *y, double *z) + void OSRGetPROJVersion (int *pnMajor, int *pnMinor, int *pnPatch) + cdef extern from "ogr_api.h": const char * OGR_Dr_GetName (void *driver) - void * OGR_Dr_CreateDataSource (void *driver, const char *path, char **options) - int OGR_Dr_DeleteDataSource (void *driver, char *) - void * OGR_Dr_Open (void *driver, const char *path, int bupdate) int OGR_Dr_TestCapability (void *driver, const char *) - int OGR_DS_DeleteLayer (void *datasource, int n) void * OGR_F_Create (void *featuredefn) void OGR_F_Destroy (void *feature) long OGR_F_GetFID (void *feature) int OGR_F_IsFieldSet (void *feature, int n) - int OGR_F_GetFieldAsDateTime (void *feature, int n, int *y, int *m, int *d, int *h, int *m, int *s, int *z) + int OGR_F_GetFieldAsDateTimeEx (void *feature, int n, int *y, int *m, int *d, int *h, int *m, float *s, int *z) double OGR_F_GetFieldAsDouble (void *feature, int n) int OGR_F_GetFieldAsInteger (void *feature, int n) char * OGR_F_GetFieldAsString (void *feature, int n) @@ -255,7 +268,7 @@ int OGR_F_GetFieldIndex (void *feature, char *name) void * OGR_F_GetGeometryRef (void *feature) void * OGR_F_StealGeometry (void *feature) - void OGR_F_SetFieldDateTime (void *feature, int n, int y, int m, int d, int hh, int mm, int ss, int tz) + void OGR_F_SetFieldDateTimeEx (void *feature, int n, int y, int m, int d, int hh, int mm, float ss, int tz) void OGR_F_SetFieldDouble (void *feature, int n, double value) void OGR_F_SetFieldInteger (void *feature, int n, int value) void OGR_F_SetFieldString (void *feature, int n, char *value) @@ -285,8 +298,7 @@ void * OGR_G_CreateGeometry (int wkbtypecode) void OGR_G_DestroyGeometry (void *geometry) unsigned char * OGR_G_ExportToJson (void *geometry) - void OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) - int OGR_G_GetCoordinateDimension (void *geometry) + OGRErr OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) int OGR_G_GetGeometryCount (void *geometry) unsigned char * OGR_G_GetGeometryName (void *geometry) int OGR_G_GetGeometryType (void *geometry) @@ -317,10 +329,6 @@ void *layer, double minx, double miny, double maxx, double maxy ) int OGR_L_TestCapability (void *layer, char *name) - void * OGRGetDriverByName (char *) - void * OGROpen (char *path, int mode, void *x) - void * OGROpenShared (char *path, int mode, void *x) - int OGRReleaseDataSource (void *datasource) OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) long long OGR_F_GetFieldAsInteger64 (void *feature, int n) diff -Nru fiona-1.8.13/fiona/ogrext.pyx fiona-1.8.20/fiona/ogrext.pyx --- fiona-1.8.13/fiona/ogrext.pyx 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/ogrext.pyx 2021-05-31 21:29:33.000000000 +0000 @@ -9,7 +9,7 @@ import os import warnings import math -import uuid +from uuid import uuid4 from collections import namedtuple from six import integer_types, string_types, text_type @@ -19,17 +19,18 @@ from fiona._geometry cimport ( GeomBuilder, OGRGeomBuilder, geometry_type_code, normalize_geometry_type_code, base_geometry_type_code) -from fiona._err cimport exc_wrap_int, exc_wrap_pointer, exc_wrap_vsilfile +from fiona._err cimport exc_wrap_int, exc_wrap_pointer, exc_wrap_vsilfile, get_last_error_msg import fiona -from fiona._env import GDALVersion, get_gdal_version_num +from fiona._env import get_gdal_version_num, calc_gdal_version_num, get_gdal_version_tuple from fiona._err import cpl_errs, FionaNullPointerError, CPLE_BaseError, CPLE_OpenFailedError from fiona._geometry import GEOMETRY_TYPES from fiona import compat +from fiona.env import Env from fiona.errors import ( DriverError, DriverIOError, SchemaError, CRSError, FionaValueError, TransactionError, GeometryTypeValidationError, DatasetDeleteError, - FionaDeprecationWarning) + FeatureWarning, FionaDeprecationWarning) from fiona.compat import OrderedDict from fiona.rfc3339 import parse_date, parse_datetime, parse_time from fiona.rfc3339 import FionaDateType, FionaDateTimeType, FionaTimeType @@ -41,6 +42,7 @@ from libc.stdlib cimport malloc, free from libc.string cimport strcmp from cpython cimport PyBytes_FromStringAndSize, PyBytes_AsString +from fiona.drvsupport import _driver_supports_timezones cdef extern from "ogr_api.h" nogil: @@ -78,6 +80,7 @@ cdef const char * OLC_DELETEFEATURE = "DeleteFeature" cdef const char * OLC_STRINGSASUTF8 = "StringsAsUTF8" cdef const char * OLC_TRANSACTIONS = "Transactions" +cdef const char * OLC_IGNOREFIELDS = "IgnoreFields" # OGR integer error types. @@ -117,8 +120,17 @@ cdef int GDAL_VERSION_NUM = get_gdal_version_num() +class TZ(datetime.tzinfo): + + def __init__(self, minutes): + self.minutes = minutes + + def utcoffset(self, dt): + return datetime.timedelta(minutes=self.minutes) + # Feature extension classes and functions follow. + cdef class FeatureBuilder: """Build Fiona features from OGR feature pointers. @@ -151,18 +163,19 @@ """ cdef void *fdefn = NULL cdef int i + cdef unsigned char *data = NULL + cdef int l + cdef int retval + cdef int fieldsubtype + cdef const char *key_c = NULL + # Parameters for get_field_as_datetime cdef int y = 0 cdef int m = 0 cdef int d = 0 cdef int hh = 0 cdef int mm = 0 - cdef int ss = 0 + cdef float fss = 0.0 cdef int tz = 0 - cdef unsigned char *data = NULL - cdef int l - cdef int retval - cdef int fieldsubtype - cdef const char *key_c = NULL # Skeleton of the feature to be returned. fid = OGR_F_GetFID(feature) @@ -179,12 +192,14 @@ for i in range(OGR_F_GetFieldCount(feature)): fdefn = OGR_F_GetFieldDefnRef(feature, i) if fdefn == NULL: - raise ValueError("Null feature definition") + raise ValueError("NULL field definition at index {}".format(i)) key_c = OGR_Fld_GetNameRef(fdefn) if key_c == NULL: - raise ValueError("Null field name reference") + raise ValueError("NULL field name reference at index {}".format(i)) key_b = key_c key = key_b.decode(encoding) + if not key: + warnings.warn("Empty field name at index {}".format(i)) if key in ignore_fields: continue @@ -239,16 +254,25 @@ props[key] = val elif fieldtype in (FionaDateType, FionaTimeType, FionaDateTimeType): - retval = OGR_F_GetFieldAsDateTime( - feature, i, &y, &m, &d, &hh, &mm, &ss, &tz) + retval = get_field_as_datetime(feature, i, &y, &m, &d, &hh, &mm, &fss, &tz) + ms, ss = math.modf(fss) + ss = int(ss) + ms = int(round(ms * 10**6)) + + # OGR_F_GetFieldAsDateTimeEx: (0=unknown, 1=localtime, 100=GMT, see data model for details) + # CPLParseRFC822DateTime: (0=unknown, 100=GMT, 101=GMT+15minute, 99=GMT-15minute), or NULL + tzinfo = None + if tz > 1: + tz_minutes = (tz - 100) * 15 + tzinfo = TZ(tz_minutes) + try: if fieldtype is FionaDateType: props[key] = datetime.date(y, m, d).isoformat() elif fieldtype is FionaTimeType: - props[key] = datetime.time(hh, mm, ss).isoformat() + props[key] = datetime.time(hh, mm, ss, ms, tzinfo).isoformat() else: - props[key] = datetime.datetime( - y, m, d, hh, mm, ss).isoformat() + props[key] = datetime.datetime(y, m, d, hh, mm, ss, ms, tzinfo).isoformat() except ValueError as err: log.exception(err) props[key] = None @@ -327,7 +351,7 @@ if feature['geometry'] is not None: cogr_geometry = OGRGeomBuilder().build( feature['geometry']) - OGR_F_SetGeometryDirectly(cogr_feature, cogr_geometry) + exc_wrap_int(OGR_F_SetGeometryDirectly(cogr_feature, cogr_geometry)) # OGR_F_SetFieldString takes encoded strings ('bytes' in Python 3). encoding = session._get_internal_encoding() @@ -367,32 +391,62 @@ elif isinstance(value, float): OGR_F_SetFieldDouble(cogr_feature, i, value) - elif (isinstance(value, string_types) - and schema_type in ['date', 'time', 'datetime']): - if schema_type == 'date': - y, m, d, hh, mm, ss, ff = parse_date(value) - elif schema_type == 'time': - y, m, d, hh, mm, ss, ff = parse_time(value) + elif schema_type in ['date', 'time', 'datetime'] and value is not None: + if isinstance(value, string_types): + if schema_type == 'date': + y, m, d, hh, mm, ss, ms, tz = parse_date(value) + elif schema_type == 'time': + y, m, d, hh, mm, ss, ms, tz = parse_time(value) + else: + y, m, d, hh, mm, ss, ms, tz = parse_datetime(value) + elif (isinstance(value, datetime.date) and schema_type == 'date'): + y, m, d = value.year, value.month, value.day + hh = mm = ss = ms = 0 + tz = None + elif (isinstance(value, datetime.datetime) and schema_type == 'datetime'): + y, m, d = value.year, value.month, value.day + hh, mm, ss, ms = value.hour, value.minute, value.second, value.microsecond + if value.utcoffset() is None: + tz = None + else: + tz = value.utcoffset().total_seconds() / 60 + elif (isinstance(value, datetime.time) and schema_type == 'time'): + y = m = d = 0 + hh, mm, ss, ms = value.hour, value.minute, value.second, value.microsecond + if value.utcoffset() is None: + tz = None + else: + tz = value.utcoffset().total_seconds() / 60 + + # Convert to UTC if driver does not support timezones + if tz is not None and not _driver_supports_timezones(collection.driver, schema_type): + + if schema_type == 'datetime': + d_tz = datetime.datetime(y, m, d, hh, mm, ss, int(ms), TZ(tz)) + d_utc = d_tz - d_tz.utcoffset() + y, m, d = d_utc.year, d_utc.month, d_utc.day + hh, mm, ss, ms = d_utc.hour, d_utc.minute, d_utc.second, d_utc.microsecond + tz = 0 + del d_utc, d_tz + elif schema_type == 'time': + d_tz = datetime.datetime(1900, 1, 1, hh, mm, ss, int(ms), TZ(tz)) + d_utc = d_tz - d_tz.utcoffset() + y = m = d = 0 + hh, mm, ss, ms = d_utc.hour, d_utc.minute, d_utc.second, d_utc.microsecond + tz = 0 + del d_utc, d_tz + + # tzinfo: (0=unknown, 100=GMT, 101=GMT+15minute, 99=GMT-15minute), or NULL + if tz is not None: + tzinfo = int(tz / 15.0 + 100) else: - y, m, d, hh, mm, ss, ff = parse_datetime(value) - OGR_F_SetFieldDateTime( - cogr_feature, i, y, m, d, hh, mm, ss, 0) - elif (isinstance(value, datetime.date) - and schema_type == 'date'): - y, m, d = value.year, value.month, value.day - OGR_F_SetFieldDateTime( - cogr_feature, i, y, m, d, 0, 0, 0, 0) - elif (isinstance(value, datetime.datetime) - and schema_type == 'datetime'): - y, m, d = value.year, value.month, value.day - hh, mm, ss = value.hour, value.minute, value.second - OGR_F_SetFieldDateTime( - cogr_feature, i, y, m, d, hh, mm, ss, 0) - elif (isinstance(value, datetime.time) - and schema_type == 'time'): - hh, mm, ss = value.hour, value.minute, value.second - OGR_F_SetFieldDateTime( - cogr_feature, i, 0, 0, 0, hh, mm, ss, 0) + tzinfo = 0 + + # Add microseconds to seconds + ss += ms / 10**6 + + set_field_datetime(cogr_feature, i, y, m, d, hh, mm, ss, tzinfo) + elif isinstance(value, bytes) and schema_type == "bytes": string_c = value OGR_F_SetFieldBinary(cogr_feature, i, len(value), @@ -446,12 +500,14 @@ cdef object _fileencoding cdef object _encoding cdef object collection + cdef bint cursor_interrupted def __init__(self): self.cogr_ds = NULL self.cogr_layer = NULL self._fileencoding = None self._encoding = None + self.cursor_interrupted = False def __dealloc__(self): self.stop() @@ -499,6 +555,8 @@ encoding = self._get_internal_encoding() if collection.ignore_fields: + if not OGR_L_TestCapability(self.cogr_layer, OLC_IGNOREFIELDS): + raise DriverError("Driver does not support ignore_fields") try: for name in collection.ignore_fields: try: @@ -571,7 +629,7 @@ def get_length(self): if self.cogr_layer == NULL: raise ValueError("Null layer") - return OGR_L_GetFeatureCount(self.cogr_layer, 0) + return self._get_feature_count(0) def get_driver(self): cdef void *cogr_driver = GDALGetDatasetDriver(self.cogr_ds) @@ -608,15 +666,15 @@ for i from 0 <= i < n: cogr_fielddefn = OGR_FD_GetFieldDefn(cogr_featuredefn, i) if cogr_fielddefn == NULL: - raise ValueError("Null field definition") + raise ValueError("NULL field definition at index {}".format(i)) key_c = OGR_Fld_GetNameRef(cogr_fielddefn) + if key_c == NULL: + raise ValueError("NULL field name reference at index {}".format(i)) key_b = key_c - - if not bool(key_b): - raise ValueError("Invalid field name ref: %s" % key) - key = key_b.decode(encoding) + if not key: + warnings.warn("Empty field name at index {}".format(i), FeatureWarning) if key in ignore_fields: log.debug("By request, ignoring field %r", key) @@ -794,7 +852,18 @@ raise ValueError("Null layer") result = OGR_L_GetExtent(self.cogr_layer, &extent, 1) + self.cursor_interrupted = True + if result != OGRERR_NONE: + raise DriverError("Driver was not able to calculate bounds") return (extent.MinX, extent.MinY, extent.MaxX, extent.MaxY) + + + cdef int _get_feature_count(self, force=0): + if self.cogr_layer == NULL: + raise ValueError("Null layer") + self.cursor_interrupted = True + return OGR_L_GetFeatureCount(self.cogr_layer, force) + def has_feature(self, fid): """Provides access to feature data by FID. @@ -846,7 +915,7 @@ index = item # from the back if index < 0: - ftcount = OGR_L_GetFeatureCount(self.cogr_layer, 0) + ftcount = self._get_feature_count(0) if ftcount == -1: raise IndexError( "collection's dataset does not support negative indexes") @@ -914,7 +983,7 @@ self.cogr_layer = exc_wrap_pointer(GDALDatasetGetLayer(self.cogr_ds, collection.name)) except CPLE_BaseError as exc: - OGRReleaseDataSource(self.cogr_ds) + GDALClose(self.cogr_ds) self.cogr_ds = NULL self.cogr_layer = NULL raise DriverError(u"{}".format(exc)) @@ -922,6 +991,8 @@ else: self._fileencoding = userencoding or self._get_fallback_encoding() + before_fields = self.get_schema()['properties'] + elif collection.mode == 'w': try: @@ -934,39 +1005,43 @@ driver_c = driver_b cogr_driver = exc_wrap_pointer(GDALGetDriverByName(driver_c)) - # Our most common use case is the creation of a new data - # file and historically we've assumed that it's a file on - # the local filesystem and queryable via os.path. - # - # TODO: remove the assumption. - if not os.path.exists(path): + if not CPLCheckForFile(path_c, NULL): log.debug("File doesn't exist. Creating a new one...") - cogr_ds = gdal_create(cogr_driver, path_c, {}) + with Env(GDAL_VALIDATE_CREATION_OPTIONS="NO"): + cogr_ds = gdal_create(cogr_driver, path_c, kwargs) - # TODO: revisit the logic in the following blocks when we - # change the assumption above. else: - if collection.driver == "GeoJSON" and os.path.exists(path): - # manually remove geojson file as GDAL doesn't do this for us - os.unlink(path) - try: - # attempt to open existing dataset in write mode - cogr_ds = gdal_open_vector(path_c, 1, None, kwargs) - except DriverError: - # failed, attempt to create it - cogr_ds = gdal_create(cogr_driver, path_c, kwargs) - else: - # check capability of creating a new layer in the existing dataset - capability = check_capability_create_layer(cogr_ds) - if GDAL_VERSION_NUM < 2000000 and collection.driver == "GeoJSON": - # GeoJSON driver tells lies about it's capability - capability = False - if not capability or collection.name is None: - # unable to use existing dataset, recreate it - GDALClose(cogr_ds) - cogr_ds = NULL + if collection.driver == "GeoJSON": + # We must manually remove geojson files as GDAL doesn't do this for us. + log.debug("Removing GeoJSON file") + if path.startswith("/vsi"): + VSIUnlink(path_c) + else: + os.unlink(path) + with Env(GDAL_VALIDATE_CREATION_OPTIONS="NO"): cogr_ds = gdal_create(cogr_driver, path_c, kwargs) + else: + try: + # Attempt to open existing dataset in write mode, + # letting GDAL/OGR handle the overwriting. + cogr_ds = gdal_open_vector(path_c, 1, None, kwargs) + except DriverError: + # log.exception("Caught DriverError") + # failed, attempt to create it + with Env(GDAL_VALIDATE_CREATION_OPTIONS="NO"): + cogr_ds = gdal_create(cogr_driver, path_c, kwargs) + else: + # check capability of creating a new layer in the existing dataset + capability = check_capability_create_layer(cogr_ds) + if not capability or collection.name is None: + # unable to use existing dataset, recreate it + log.debug("Unable to use existing dataset: capability=%r, name=%r", capability, collection.name) + GDALClose(cogr_ds) + cogr_ds = NULL + with Env(GDAL_VALIDATE_CREATION_OPTIONS="NO"): + cogr_ds = gdal_create(cogr_driver, path_c, kwargs) + self.cogr_ds = cogr_ds # Set the spatial reference system from the crs given to the @@ -982,7 +1057,7 @@ OSRSetFromUserInput(cogr_srs, proj_c) osr_set_traditional_axis_mapping_strategy(cogr_srs) except CPLE_BaseError as exc: - OGRReleaseDataSource(self.cogr_ds) + GDALClose(self.cogr_ds) self.cogr_ds = NULL self.cogr_layer = NULL raise CRSError(u"{}".format(exc)) @@ -1061,7 +1136,7 @@ geometry_code, options)) except Exception as exc: - OGRReleaseDataSource(self.cogr_ds) + GDALClose(self.cogr_ds) self.cogr_ds = NULL raise DriverIOError(u"{}".format(exc)) @@ -1083,7 +1158,20 @@ encoding = self._get_internal_encoding() - for key, value in collection.schema['properties'].items(): + # Test if default fields are included in provided schema + schema_fields = collection.schema['properties'] + default_fields = self.get_schema()['properties'] + for key, value in default_fields.items(): + if key in schema_fields and not schema_fields[key] == value: + raise SchemaError("Property '{}' must have type '{}' " + "for driver '{}'".format(key, value, self.collection.driver)) + + new_fields = OrderedDict([(key, value) for key, value in schema_fields.items() + if key not in default_fields]) + before_fields = default_fields.copy() + before_fields.update(new_fields) + + for key, value in new_fields.items(): log.debug("Begin creating field: %r value: %r", key, value) @@ -1133,7 +1221,7 @@ exc_wrap_int(OGR_L_CreateField(self.cogr_layer, cogr_fielddefn, 1)) except (UnicodeEncodeError, CPLE_BaseError) as exc: - OGRReleaseDataSource(self.cogr_ds) + GDALClose(self.cogr_ds) self.cogr_ds = NULL self.cogr_layer = NULL raise SchemaError(u"{}".format(exc)) @@ -1144,10 +1232,9 @@ # Mapping of the Python collection schema to the munged # OGR schema. - ogr_schema = self.get_schema() - self._schema_mapping = dict(zip( - collection.schema['properties'].keys(), - ogr_schema['properties'].keys() )) + after_fields = self.get_schema()['properties'] + self._schema_mapping = dict(zip(before_fields.keys(), + after_fields.keys())) log.debug("Writing started") @@ -1170,15 +1257,24 @@ if record["geometry"] is None: return True return record["geometry"]["type"].lstrip("3D ") in valid_geom_types - - log.debug("Starting transaction (initial)") - result = gdal_start_transaction(self.cogr_ds, 0) - if result == OGRERR_FAILURE: - raise TransactionError("Failed to start transaction") + transactions_supported = check_capability_transaction(self.cogr_ds) + log.debug("Transaction supported: {}".format(transactions_supported)) + if transactions_supported: + log.debug("Starting transaction (initial)") + result = gdal_start_transaction(self.cogr_ds, 0) + if result == OGRERR_FAILURE: + raise TransactionError("Failed to start transaction") schema_props_keys = set(collection.schema['properties'].keys()) for record in records: log.debug("Creating feature in layer: %s" % record) + + # Check for optional elements + if 'properties' not in record: + record['properties'] = {} + if 'geometry' not in record: + record['geometry'] = None + # Validate against collection's schema. if set(record['properties'].keys()) != schema_props_keys: raise ValueError( @@ -1195,25 +1291,29 @@ cogr_feature = OGRFeatureBuilder().build(record, collection) result = OGR_L_CreateFeature(cogr_layer, cogr_feature) if result != OGRERR_NONE: - raise RuntimeError("Failed to write record: %s" % record) + msg = get_last_error_msg() + raise RuntimeError("GDAL Error: {msg} \n \n Failed to write record: " + "{record}".format(msg=msg, record=record)) _deleteOgrFeature(cogr_feature) - features_in_transaction += 1 - if features_in_transaction == DEFAULT_TRANSACTION_SIZE: - log.debug("Comitting transaction (intermediate)") - result = gdal_commit_transaction(self.cogr_ds) - if result == OGRERR_FAILURE: - raise TransactionError("Failed to commit transaction") - log.debug("Starting transaction (intermediate)") - result = gdal_start_transaction(self.cogr_ds, 0) - if result == OGRERR_FAILURE: - raise TransactionError("Failed to start transaction") - features_in_transaction = 0 - - log.debug("Comitting transaction (final)") - result = gdal_commit_transaction(self.cogr_ds) - if result == OGRERR_FAILURE: - raise TransactionError("Failed to commit transaction") + if transactions_supported: + features_in_transaction += 1 + if features_in_transaction == DEFAULT_TRANSACTION_SIZE: + log.debug("Comitting transaction (intermediate)") + result = gdal_commit_transaction(self.cogr_ds) + if result == OGRERR_FAILURE: + raise TransactionError("Failed to commit transaction") + log.debug("Starting transaction (intermediate)") + result = gdal_start_transaction(self.cogr_ds, 0) + if result == OGRERR_FAILURE: + raise TransactionError("Failed to start transaction") + features_in_transaction = 0 + + if transactions_supported: + log.debug("Comitting transaction (final)") + result = gdal_commit_transaction(self.cogr_ds) + if result == OGRERR_FAILURE: + raise TransactionError("Failed to commit transaction") def sync(self, collection): """Syncs OGR to disk.""" @@ -1239,6 +1339,8 @@ cdef start cdef step cdef fastindex + cdef fastcount + cdef ftcount cdef stepsign def __cinit__(self, collection, start=None, stop=None, step=None, @@ -1272,20 +1374,38 @@ self.fastindex = OGR_L_TestCapability( session.cogr_layer, OLC_FASTSETNEXTBYINDEX) + log.debug("OLC_FASTSETNEXTBYINDEX: {}".format(self.fastindex)) - ftcount = OGR_L_GetFeatureCount(session.cogr_layer, 0) - if ftcount == -1 and ((start is not None and start < 0) or + self.fastcount = OGR_L_TestCapability( + session.cogr_layer, OLC_FASTFEATURECOUNT) + log.debug("OLC_FASTFEATURECOUNT: {}".format(self.fastcount)) + + # In some cases we need to force count of all features + # We need to check if start is not greater ftcount: (start is not None and start > 0) + # If start is a negative index: (start is not None and start < 0) + # If stop is a negative index: (stop is not None and stop < 0) + if ((start is not None and not start == 0) or + (stop is not None and stop < 0)): + if not self.fastcount: + warnings.warn("Layer does not support" \ + " OLC_FASTFEATURECOUNT, negative slices or start values other than zero" \ + " may be slow.", RuntimeWarning) + self.ftcount = session._get_feature_count(1) + else: + self.ftcount = session._get_feature_count(0) + + if self.ftcount == -1 and ((start is not None and start < 0) or (stop is not None and stop < 0)): raise IndexError( "collection's dataset does not support negative slice indexes") if stop is not None and stop < 0: - stop += ftcount + stop += self.ftcount if start is None: start = 0 if start is not None and start < 0: - start += ftcount + start += self.ftcount # step size if step is None: @@ -1294,16 +1414,31 @@ raise ValueError("slice step cannot be zero") if step < 0 and not self.fastindex: warnings.warn("Layer does not support" \ - "OLCFastSetNextByIndex, negative step size may" \ - " be slow", RuntimeWarning) + " OLCFastSetNextByIndex, negative step size may" \ + " be slow.", RuntimeWarning) + + # Check if we are outside of the range: + if not self.ftcount == -1: + if start > self.ftcount and step > 0: + start = -1 + if start > self.ftcount and step < 0: + start = self.ftcount - 1 + elif self.ftcount == -1 and not start == 0: + warnings.warn("Layer is unable to check if slice is within range of data.", + RuntimeWarning) + self.stepsign = int(math.copysign(1, step)) self.stop = stop self.start = start self.step = step self.next_index = start - log.debug("Index: %d", self.next_index) - OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) + log.debug("Next index: %d", self.next_index) + + # Set OGR_L_SetNextByIndex only if within range + if start >= 0 and (self.ftcount == -1 or self.start < self.ftcount): + exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) + session.cursor_interrupted = False def __iter__(self): return self @@ -1314,10 +1449,16 @@ cdef Session session session = self.collection.session + # Check if next_index is valid if self.next_index < 0: raise StopIteration + # GeoJSON driver with gdal 2.1 - 2.2 returns last feature + # if index greater than number of features + if self.ftcount >= 0 and self.next_index >= self.ftcount: + raise StopIteration + if self.stepsign == 1: if self.next_index < self.start or (self.stop is not None and self.next_index >= self.stop): raise StopIteration @@ -1326,26 +1467,31 @@ raise StopIteration # Set read cursor to next_item position - if self.step > 1 and self.fastindex: - OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) - - elif self.step > 1 and not self.fastindex and not self.next_index == self.start: - for _ in range(self.step - 1): - # TODO rbuffat add test -> OGR_L_GetNextFeature increments cursor by 1, therefore self.step - 1 as one increment was performed when feature is read - cogr_feature = OGR_L_GetNextFeature(session.cogr_layer) - if cogr_feature == NULL: - raise StopIteration - elif self.step > 1 and not self.fastindex and self.next_index == self.start: - OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) - - elif self.step == 0: - # OGR_L_GetNextFeature increments read cursor by one - pass - elif self.step < 0: - OGR_L_SetNextByIndex(session.cogr_layer, self.next_index) + if session.cursor_interrupted: + if not self.fastindex and not self.next_index == 0: + warnings.warn("Sequential read of iterator was interrupted. Resetting iterator. " + "This can negatively impact the performance.", RuntimeWarning) + exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) + session.cursor_interrupted = False + else: + if self.step > 1 and self.fastindex: + exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) + elif self.step > 1 and not self.fastindex and not self.next_index == self.start: + # GDALs default implementation of SetNextByIndex is calling ResetReading() and then + # calling GetNextFeature n times. We can shortcut that if we know the previous index. + # OGR_L_GetNextFeature increments cursor by 1, therefore self.step - 1 as one increment was performed when feature is read + for _ in range(self.step - 1): + cogr_feature = OGR_L_GetNextFeature(session.cogr_layer) + if cogr_feature == NULL: + raise StopIteration + elif self.step > 1 and not self.fastindex and self.next_index == self.start: + exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) + elif self.step < 0: + exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) # set the next index self.next_index += self.step + log.debug("Next index: %d", self.next_index) def __next__(self): cdef OGRFeatureH cogr_feature = NULL @@ -1404,6 +1550,7 @@ ignore_fields=self.collection.ignore_fields, ignore_geometry=self.collection.ignore_geometry, ) + _deleteOgrFeature(cogr_feature) return fid, feature @@ -1448,7 +1595,7 @@ cogr_driver = GDALGetDatasetDriver(cogr_ds) GDALClose(cogr_ds) else: - cogr_driver = OGRGetDriverByName(driver.encode("utf-8")) + cogr_driver = GDALGetDriverByName(driver.encode("utf-8")) if cogr_driver == NULL: raise DatasetDeleteError("Null driver when attempting to delete {}".format(path)) @@ -1485,7 +1632,7 @@ except (DriverError, FionaNullPointerError): raise DatasetDeleteError("Failed to remove data source {}".format(path)) - result = OGR_DS_DeleteLayer(cogr_ds, layer_index) + result = GDALDatasetDeleteLayer(cogr_ds, layer_index) GDALClose(cogr_ds) if result == OGRERR_UNSUPPORTED_OPERATION: raise DatasetDeleteError("Removal of layer {} not supported by driver".format(layer_str)) @@ -1534,7 +1681,7 @@ `ext` is empty or begins with a period and contains at most one period. """ - vsi_filename = '/vsimem/{}'.format(uuid.uuid4().hex + ext) + vsi_filename = '/vsimem/{}'.format(uuid4().hex + ext) vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') vsi_handle = VSIFileFromMemBuffer(vsi_cfilename, bytesbuf, len(bytesbuf), 0) @@ -1552,65 +1699,106 @@ return VSIUnlink(vsi_cfilename) -cdef class MemoryFileBase(object): +cdef class MemoryFileBase: """Base for a BytesIO-like class backed by an in-memory file.""" - def __init__(self, file_or_bytes=None, filename=None, ext=''): + cdef VSILFILE * _vsif + + def __init__(self, file_or_bytes=None, dirname=None, filename=None, ext=''): """A file in an in-memory filesystem. Parameters ---------- file_or_bytes : file or bytes - A file opened in binary mode or bytes or a bytearray + A file opened in binary mode or bytes filename : str A filename for the in-memory file under /vsimem ext : str A file extension for the in-memory file under /vsimem. Ignored if filename was provided. - """ - cdef VSILFILE *vsi_handle = NULL + """ if file_or_bytes: if hasattr(file_or_bytes, 'read'): initial_bytes = file_or_bytes.read() - else: + elif isinstance(file_or_bytes, bytes): initial_bytes = file_or_bytes - if not isinstance(initial_bytes, (bytearray, bytes)): + else: raise TypeError( "Constructor argument must be a file opened in binary " - "mode or bytes/bytearray.") + "mode or bytes.") else: initial_bytes = b'' + # Make an in-memory directory specific to this dataset to help organize + # auxiliary files. + self._dirname = dirname or str(uuid4().hex) + VSIMkdir("/vsimem/{0}".format(self._dirname).encode("utf-8"), 0666) + if filename: # GDAL's SRTMHGT driver requires the filename to be "correct" (match # the bounds being written) - self.name = '/vsimem/{0}'.format(filename) + self.name = "/vsimem/{0}/{1}".format(self._dirname, filename) else: # GDAL 2.1 requires a .zip extension for zipped files. - self.name = '/vsimem/{0}.{1}'.format(uuid.uuid4(), ext.lstrip('.')) - - self.path = self.name.encode('utf-8') - self._len = 0 - self._pos = 0 - self.closed = False + self.name = "/vsimem/{0}/{0}{1}".format(self._dirname, ext) + name_b = self.name.encode('utf-8') self._initial_bytes = initial_bytes cdef unsigned char *buffer = self._initial_bytes if self._initial_bytes: + self._vsif = VSIFileFromMemBuffer( + name_b, buffer, len(self._initial_bytes), 0) + self.mode = "r" + + else: + self._vsif = NULL + self.mode = "r+" + + self.closed = False - vsi_handle = VSIFileFromMemBuffer( - self.path, buffer, len(self._initial_bytes), 0) - self._len = len(self._initial_bytes) - - if vsi_handle == NULL: - raise IOError( - "Failed to create in-memory file using initial bytes.") - - if VSIFCloseL(vsi_handle) != 0: - raise IOError( - "Failed to properly close in-memory file.") + def _open(self): + """Ensure that the instance has a valid VSI file handle.""" + cdef VSILFILE *fp = NULL + name_b = self.name.encode('utf-8') + + if not self.exists(): + fp = VSIFOpenL(name_b, "w") + if fp == NULL: + raise OSError("VSIFOpenL failed") + else: + VSIFCloseL(fp) + self._vsif = NULL + + if self._vsif == NULL: + fp = VSIFOpenL(name_b, self.mode.encode("utf-8")) + if fp == NULL: + log.error("VSIFOpenL failed: name=%r, mode=%r", self.name, self.mode) + raise OSError("VSIFOpenL failed") + else: + self._vsif = fp + + def _ensure_extension(self, drivername=None): + """Ensure that the instance's name uses a file extension supported by the driver.""" + # Avoid a crashing bug with GDAL versions < 2. + if get_gdal_version_tuple() < (2, ): + return + + name_b = drivername.encode("utf-8") + cdef const char *name_c = name_b + cdef GDALDriverH driver = GDALGetDriverByName(name_c) + cdef const char *extension_c = GDALGetMetadataItem(driver, "DMD_EXTENSION", NULL) + + if extension_c != NULL: + extension_b = extension_c + recommended_extension = extension_b.decode("utf-8") + if not recommended_extension.startswith("."): + recommended_extension = "." + recommended_extension + root, ext = os.path.splitext(self.name) + if not ext: + log.info("Setting extension: root=%r, extension=%r", root, recommended_extension) + self.name = root + recommended_extension def exists(self): """Test if the in-memory file exists. @@ -1619,18 +1807,11 @@ ------- bool True if the in-memory file exists. - """ - cdef VSILFILE *fp = NULL - cdef const char *cypath = self.path - - with nogil: - fp = VSIFOpenL(cypath, 'r') - if fp != NULL: - VSIFCloseL(fp) - return True - else: - return False + """ + cdef VSIStatBufL st_buf + name_b = self.name.encode('utf-8') + return VSIStatL(name_b, &st_buf) == 0 def __len__(self): """Length of the file's buffer in number of bytes. @@ -1638,96 +1819,76 @@ Returns ------- int + """ - cdef unsigned char *buff = NULL - cdef const char *cfilename = self.path - cdef vsi_l_offset buff_len = 0 - buff = VSIGetMemFileBuffer(self.path, &buff_len, 0) - return int(buff_len) + if not self.getbuffer(): + return 0 + return self.getbuffer().size + + def getbuffer(self): + """Return a view on bytes of the file, or None.""" + cdef unsigned char *buffer = NULL + cdef vsi_l_offset buffer_len = 0 + cdef unsigned char [:] buff_view + + name_b = self.name.encode('utf-8') + buffer = VSIGetMemFileBuffer(name_b, &buffer_len, 0) + + if buffer == NULL or buffer_len == 0: + return None + else: + buff_view = buffer + return buff_view def close(self): - """Close MemoryFile and release allocated memory.""" - VSIUnlink(self.path) - self._pos = 0 - self._initial_bytes = None + """Close and tear down VSI file and directory.""" + if self._vsif != NULL: + VSIFCloseL(self._vsif) + self._vsif = NULL + VSIRmdir(self._dirname.encode("utf-8")) self.closed = True - def read(self, size=-1): - """Read size bytes from MemoryFile.""" - cdef VSILFILE *fp = NULL - # Return no bytes immediately if the position is at or past the - # end of the file. - length = len(self) - - if self._pos >= length: - self._pos = length - return b'' + def seek(self, offset, whence=0): + self._open() + return VSIFSeekL(self._vsif, offset, whence) - if size == -1: - size = length - self._pos + def tell(self): + self._open() + if self._vsif != NULL: + return VSIFTellL(self._vsif) else: - size = min(size, length - self._pos) + return 0 - cdef unsigned char *buffer = CPLMalloc(size) + def read(self, size=-1): + """Read size bytes from MemoryFile.""" cdef bytes result + cdef unsigned char *buffer = NULL + cdef vsi_l_offset buffer_len = 0 - fp = VSIFOpenL(self.path, 'r') + if size < 0: + name_b = self.name.encode('utf-8') + buffer = VSIGetMemFileBuffer(name_b, &buffer_len, 0) + size = buffer_len - try: - fp = exc_wrap_vsilfile(fp) - if VSIFSeekL(fp, self._pos, 0) < 0: - raise IOError( - "Failed to seek to offset %s in %s.", - self._pos, self.name) + buffer = CPLMalloc(size) + + self._open() - objects_read = VSIFReadL(buffer, 1, size, fp) + try: + objects_read = VSIFReadL(buffer, 1, size, self._vsif) result = buffer[:objects_read] + return result finally: - VSIFCloseL(fp) CPLFree(buffer) - self._pos += len(result) return result - def seek(self, offset, whence=0): - """Seek to position in MemoryFile.""" - if whence == 0: - pos = offset - elif whence == 1: - pos = self._pos + offset - elif whence == 2: - pos = len(self) - offset - if pos < 0: - raise ValueError("negative seek position: {}".format(pos)) - if pos > len(self): - raise ValueError("seek position past end of file: {}".format(pos)) - self._pos = pos - return self._pos - - def tell(self): - """Tell current position in MemoryFile.""" - return self._pos - def write(self, data): """Write data bytes to MemoryFile""" - cdef VSILFILE *fp = NULL cdef const unsigned char *view = data n = len(data) - - if not self.exists(): - fp = exc_wrap_vsilfile(VSIFOpenL(self.path, 'w')) - else: - fp = exc_wrap_vsilfile(VSIFOpenL(self.path, 'r+')) - if VSIFSeekL(fp, self._pos, 0) < 0: - raise IOError( - "Failed to seek to offset %s in %s.", self._pos, self.name) - - result = VSIFWriteL(view, 1, n, fp) - VSIFFlushL(fp) - VSIFCloseL(fp) - - self._pos += result - self._len = max(self._len, self._pos) - + self._open() + result = VSIFWriteL(view, 1, n, self._vsif) + VSIFFlushL(self._vsif) return result diff -Nru fiona-1.8.13/fiona/rfc3339.py fiona-1.8.20/fiona/rfc3339.py --- fiona-1.8.13/fiona/rfc3339.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/rfc3339.py 2021-05-31 21:29:33.000000000 +0000 @@ -4,77 +4,135 @@ # writing will convert times with a timezone offset to GMT (Z) and otherwise # will write times with the unknown zone. -import datetime import logging import re log = logging.getLogger("Fiona") + # Fiona's 'date', 'time', and 'datetime' types are sub types of 'str'. + class FionaDateType(str): """Dates without time.""" + class FionaTimeType(str): """Times without dates.""" + class FionaDateTimeType(str): """Dates and times.""" + pattern_date = re.compile(r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)") pattern_time = re.compile( - r"(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?" ) + r"(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?") pattern_datetime = re.compile( - r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)(T)?(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?" ) + r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)(T)?(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?") + class group_accessor(object): def __init__(self, m): self.match = m + def group(self, i): try: return self.match.group(i) or 0 except IndexError: return 0 + def parse_time(text): - """Given a RFC 3339 time, returns a tz-naive datetime tuple""" + """ Given a time, returns a datetime tuple + + Parameters + ---------- + text: string to be parsed + + Returns + ------- + (int, int , int, int, int, int, int, int): + datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) + + """ match = re.search(pattern_time, text) if match is None: raise ValueError("Time data '%s' does not match pattern" % text) g = group_accessor(match) log.debug("Match groups: %s", match.groups()) + + if g.group(8) == '-': + tz = -1.0 * (int(g.group(9)) * 60 + int(g.group(11))) + elif g.group(8) == '+': + tz = int(g.group(9)) * 60 + int(g.group(11)) + else: + tz = None + return (0, 0, 0, - int(g.group(1)), - int(g.group(3)), - int(g.group(5)), - 1000000.0*float(g.group(6)) ) + int(g.group(1)), + int(g.group(3)), + int(g.group(5)), + int(1000000.0 * float(g.group(6))), + tz + ) + def parse_date(text): - """Given a RFC 3339 date, returns a tz-naive datetime tuple""" + """Given a date, returns a datetime tuple + + Parameters + ---------- + text: string to be parsed + + Returns + ------- + (int, int , int, int, int, int, int, int): + datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) + """ match = re.search(pattern_date, text) if match is None: raise ValueError("Time data '%s' does not match pattern" % text) g = group_accessor(match) log.debug("Match groups: %s", match.groups()) return ( - int(g.group(1)), - int(g.group(3)), + int(g.group(1)), + int(g.group(3)), int(g.group(5)), - 0, 0, 0, 0.0 ) + 0, 0, 0, 0, None) + def parse_datetime(text): - """Given a RFC 3339 datetime, returns a tz-naive datetime tuple""" + """Given a datetime, returns a datetime tuple + + Parameters + ---------- + text: string to be parsed + + Returns + ------- + (int, int , int, int, int, int, int, int): + datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) + """ match = re.search(pattern_datetime, text) if match is None: raise ValueError("Time data '%s' does not match pattern" % text) g = group_accessor(match) log.debug("Match groups: %s", match.groups()) + + if g.group(14) == '-': + tz = -1.0 * (int(g.group(15)) * 60 + int(g.group(17))) + elif g.group(14) == '+': + tz = int(g.group(15)) * 60 + int(g.group(17)) + else: + tz = None + return ( - int(g.group(1)), - int(g.group(3)), + int(g.group(1)), + int(g.group(3)), int(g.group(5)), - int(g.group(7)), - int(g.group(9)), - int(g.group(11)), - 1000000.0*float(g.group(12)) ) - + int(g.group(7)), + int(g.group(9)), + int(g.group(11)), + int(1000000.0 * float(g.group(12))), + tz) diff -Nru fiona-1.8.13/fiona/_shim1.pxd fiona-1.8.20/fiona/_shim1.pxd --- fiona-1.8.13/fiona/_shim1.pxd 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/_shim1.pxd 2021-05-31 21:29:33.000000000 +0000 @@ -12,6 +12,7 @@ cdef void gdal_flush_cache(void *cogr_ds) cdef void* gdal_open_vector(const char* path_c, int mode, drivers, options) except NULL cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL +cdef bint check_capability_transaction(void *cogr_ds) cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) cdef OGRErr gdal_commit_transaction(void *cogr_ds) cdef OGRErr gdal_rollback_transaction(void *cogr_ds) @@ -22,6 +23,9 @@ cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) cdef void set_proj_search_path(object path) +cdef void get_proj_version(int *, int *, int *) +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) +cdef int get_field_as_datetime(void *cogr_feature, int iField, int *, int *, int *, int *, int *, float *, int *) from fiona._shim cimport OGR_F_GetFieldAsInteger as OGR_F_GetFieldAsInteger64 from fiona._shim cimport OGR_F_SetFieldInteger as OGR_F_SetFieldInteger64 diff -Nru fiona-1.8.13/fiona/_shim1.pyx fiona-1.8.20/fiona/_shim1.pyx --- fiona-1.8.13/fiona/_shim1.pyx 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/_shim1.pyx 2021-05-31 21:29:33.000000000 +0000 @@ -3,7 +3,7 @@ import os from fiona.ogrext1 cimport * -from fiona._err cimport exc_wrap_pointer +from fiona._err cimport exc_wrap_pointer, exc_wrap_int from fiona._err import cpl_errs, CPLE_BaseError, FionaNullPointerError from fiona.errors import DriverError @@ -23,7 +23,7 @@ cdef void gdal_flush_cache(void *cogr_ds): - retval = OGR_DS_SyncToDisk(cogr_ds) + retval = exc_wrap_int(OGR_DS_SyncToDisk(cogr_ds)) if retval != OGRERR_NONE: raise RuntimeError("Failed to sync to disk") @@ -93,6 +93,12 @@ # transactions are not supported in GDAL 1.x + + +cdef bint check_capability_transaction(void *cogr_ds): + return False + + cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): return OGRERR_NONE @@ -132,3 +138,24 @@ cdef void set_proj_search_path(object path): os.environ["PROJ_LIB"] = path + + +cdef void get_proj_version(int* major, int* minor, int* patch): + cdef int val = -1 + major[0] = val + minor[0] = val + patch[0] = val + + +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): + cdef int nSecond + nSecond = int(fSecond) + OGR_F_SetFieldDateTime(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, nSecond, nTZFlag) + + +cdef int get_field_as_datetime(void *cogr_feature, int iField, int* nYear, int* nMonth, int* nDay, int* nHour, int* nMinute, float* fSecond, int* nTZFlag): + cdef int retval + cdef int nSecond + retval = OGR_F_GetFieldAsDateTime(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, &nSecond, nTZFlag) + fSecond[0] = float(nSecond) + return retval diff -Nru fiona-1.8.13/fiona/_shim22.pxd fiona-1.8.20/fiona/_shim22.pxd --- fiona-1.8.13/fiona/_shim22.pxd 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/_shim22.pxd 2021-05-31 21:29:33.000000000 +0000 @@ -5,6 +5,7 @@ cdef void gdal_flush_cache(void *cogr_ds) cdef void* gdal_open_vector(const char *path_c, int mode, drivers, options) except NULL cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL +cdef bint check_capability_transaction(void *cogr_ds) cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) cdef OGRErr gdal_commit_transaction(void *cogr_ds) cdef OGRErr gdal_rollback_transaction(void *cogr_ds) @@ -15,3 +16,6 @@ cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) cdef void set_proj_search_path(object path) +cdef void get_proj_version(int *, int *, int *) +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) +cdef int get_field_as_datetime(void *cogr_feature, int iField, int *, int *, int *, int *, int *, float *, int *) diff -Nru fiona-1.8.13/fiona/_shim22.pyx fiona-1.8.20/fiona/_shim22.pyx --- fiona-1.8.13/fiona/_shim22.pyx 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/_shim22.pyx 2021-05-31 21:29:33.000000000 +0000 @@ -107,6 +107,10 @@ CSLDestroy(creation_opts) +cdef bint check_capability_transaction(void *cogr_ds): + return GDALDatasetTestCapability(cogr_ds, ODsCTransactions) + + cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): return GDALDatasetStartTransaction(cogr_ds, force) @@ -144,3 +148,18 @@ cdef void set_proj_search_path(object path): os.environ["PROJ_LIB"] = path + + +cdef void get_proj_version(int* major, int* minor, int* patch): + cdef int val = -1 + major[0] = val + minor[0] = val + patch[0] = val + + +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): + OGR_F_SetFieldDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) + + +cdef int get_field_as_datetime(void *cogr_feature, int iField, int* nYear, int* nMonth, int* nDay, int* nHour, int* nMinute, float* fSecond, int* nTZFlag): + return OGR_F_GetFieldAsDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) diff -Nru fiona-1.8.13/fiona/_shim2.pxd fiona-1.8.20/fiona/_shim2.pxd --- fiona-1.8.13/fiona/_shim2.pxd 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/_shim2.pxd 2021-05-31 21:29:33.000000000 +0000 @@ -5,6 +5,7 @@ cdef void gdal_flush_cache(void *cogr_ds) cdef void* gdal_open_vector(const char* path_c, int mode, drivers, options) except NULL cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL +cdef bint check_capability_transaction(void *cogr_ds) cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) cdef OGRErr gdal_commit_transaction(void *cogr_ds) cdef OGRErr gdal_rollback_transaction(void *cogr_ds) @@ -15,3 +16,6 @@ cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) cdef void set_proj_search_path(object path) +cdef void get_proj_version(int *, int *, int *) +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) +cdef int get_field_as_datetime(void *cogr_feature, int iField, int *, int *, int *, int *, int *, float *, int *) diff -Nru fiona-1.8.13/fiona/_shim2.pyx fiona-1.8.20/fiona/_shim2.pyx --- fiona-1.8.13/fiona/_shim2.pyx 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/_shim2.pyx 2021-05-31 21:29:33.000000000 +0000 @@ -98,6 +98,10 @@ CSLDestroy(creation_opts) +cdef bint check_capability_transaction(void *cogr_ds): + return GDALDatasetTestCapability(cogr_ds, ODsCTransactions) + + cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): return GDALDatasetStartTransaction(cogr_ds, force) @@ -134,3 +138,18 @@ cdef void set_proj_search_path(object path): os.environ["PROJ_LIB"] = path + + +cdef void get_proj_version(int* major, int* minor, int* patch): + cdef int val = -1 + major[0] = val + minor[0] = val + patch[0] = val + + +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): + OGR_F_SetFieldDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) + + +cdef int get_field_as_datetime(void *cogr_feature, int iField, int* nYear, int* nMonth, int* nDay, int* nHour, int* nMinute, float* fSecond, int* nTZFlag): + return OGR_F_GetFieldAsDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) diff -Nru fiona-1.8.13/fiona/_shim3.pxd fiona-1.8.20/fiona/_shim3.pxd --- fiona-1.8.13/fiona/_shim3.pxd 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/_shim3.pxd 2021-05-31 21:29:33.000000000 +0000 @@ -5,6 +5,7 @@ cdef void gdal_flush_cache(void *cogr_ds) cdef void* gdal_open_vector(const char *path_c, int mode, drivers, options) except NULL cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL +cdef bint check_capability_transaction(void *cogr_ds) cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) cdef OGRErr gdal_commit_transaction(void *cogr_ds) cdef OGRErr gdal_rollback_transaction(void *cogr_ds) @@ -15,3 +16,6 @@ cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) cdef void set_proj_search_path(object path) +cdef void get_proj_version(int *, int *, int *) +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) +cdef int get_field_as_datetime(void *cogr_feature, int iField, int *, int *, int *, int *, int *, float *, int *) diff -Nru fiona-1.8.13/fiona/_shim3.pyx fiona-1.8.20/fiona/_shim3.pyx --- fiona-1.8.13/fiona/_shim3.pyx 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/_shim3.pyx 2021-05-31 21:29:33.000000000 +0000 @@ -15,7 +15,7 @@ void OSRSetPROJSearchPaths(const char *const *papszPaths) -from fiona.ogrext2 cimport * +from fiona.ogrext3 cimport * from fiona._err cimport exc_wrap_pointer from fiona._err import cpl_errs, CPLE_BaseError, FionaNullPointerError from fiona.errors import DriverError @@ -116,6 +116,10 @@ CSLDestroy(creation_opts) +cdef bint check_capability_transaction(void *cogr_ds): + return GDALDatasetTestCapability(cogr_ds, ODsCTransactions) + + cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): return GDALDatasetStartTransaction(cogr_ds, force) @@ -159,3 +163,15 @@ path_c = path_b paths = CSLAddString(paths, path_c) OSRSetPROJSearchPaths(paths) + + +cdef void get_proj_version(int* major, int* minor, int* patch): + OSRGetPROJVersion(major, minor, patch) + + +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): + OGR_F_SetFieldDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) + + +cdef int get_field_as_datetime(void *cogr_feature, int iField, int* nYear, int* nMonth, int* nDay, int* nHour, int* nMinute, float* fSecond, int* nTZFlag): + return OGR_F_GetFieldAsDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) diff -Nru fiona-1.8.13/fiona/_show_versions.py fiona-1.8.20/fiona/_show_versions.py --- fiona-1.8.13/fiona/_show_versions.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/fiona/_show_versions.py 2021-05-31 21:29:33.000000000 +0000 @@ -0,0 +1,40 @@ +import platform +import sys +import os +import fiona +from fiona._env import get_gdal_release_name, get_proj_version_tuple + + +def show_versions(): + """ + Prints information useful for bug reports + """ + + fiona_version = fiona.__version__ + gdal_release_name = get_gdal_release_name() + proj_version_tuple = get_proj_version_tuple() + if proj_version_tuple is not None: + proj_version = ".".join(map(str, proj_version_tuple)) + else: + proj_version = "Proj version not available" + os_info = "{system} {release}".format(system=platform.system(), + release=platform.release()) + python_version = platform.python_version() + python_exec = sys.executable + + msg = ("Fiona version: {fiona_version}" + "\nGDAL version: {gdal_release_name}" + "\nPROJ version: {proj_version}" + "\n" + "\nOS: {os_info}" + "\nPython: {python_version}" + "\nPython executable: '{python_exec}'" + "\n" + ) + + print(msg.format(fiona_version=fiona_version, + gdal_release_name=gdal_release_name, + proj_version=proj_version, + os_info=os_info, + python_version=python_version, + python_exec=python_exec)) diff -Nru fiona-1.8.13/fiona/transform.py fiona-1.8.20/fiona/transform.py --- fiona-1.8.13/fiona/transform.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/transform.py 2021-05-31 21:29:33.000000000 +0000 @@ -1,6 +1,8 @@ """Coordinate and geometry warping and reprojection""" -from fiona._transform import _transform, _transform_geom +import fiona._loading +with fiona._loading.add_gdal_dll_directories(): + from fiona._transform import _transform, _transform_geom def transform(src_crs, dst_crs, xs, ys): diff -Nru fiona-1.8.13/fiona/_transform.pyx fiona-1.8.20/fiona/_transform.pyx --- fiona-1.8.13/fiona/_transform.pyx 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/fiona/_transform.pyx 2021-05-31 21:29:33.000000000 +0000 @@ -53,7 +53,6 @@ auth, val = init.split(':') if auth.upper() == 'EPSG': _crs.OSRImportFromEPSG(osr, int(val)) - osr_set_traditional_axis_mapping_strategy(osr) else: crs['wktext'] = True for k, v in crs.items(): @@ -66,12 +65,13 @@ proj_b = proj.encode('utf-8') proj_c = proj_b _crs.OSRImportFromProj4(osr, proj_c) - osr_set_traditional_axis_mapping_strategy(osr) # Fall back for CRS strings like "EPSG:3857." else: proj_b = crs.encode('utf-8') proj_c = proj_b _crs.OSRSetFromUserInput(osr, proj_c) + + osr_set_traditional_axis_mapping_strategy(osr) return osr @@ -162,8 +162,8 @@ g = geom if precision >= 0: - - if g['type'] == 'Point': + + def round_point(g): coords = list(g['coordinates']) x, y = coords[:2] x = round(x, precision) @@ -172,8 +172,10 @@ if len(coords) == 3: z = coords[2] new_coords.append(round(z, precision)) - - elif g['type'] in ['LineString', 'MultiPoint']: + return new_coords + + + def round_linestring(g): coords = list(zip(*g['coordinates'])) xp, yp = coords[:2] xp = [round(v, precision) for v in xp] @@ -184,8 +186,10 @@ new_coords = list(zip(xp, yp, zp)) else: new_coords = list(zip(xp, yp)) + return new_coords - elif g['type'] in ['Polygon', 'MultiLineString']: + + def round_polygon(g): new_coords = [] for piece in g['coordinates']: coords = list(zip(*piece)) @@ -198,8 +202,9 @@ new_coords.append(list(zip(xp, yp, zp))) else: new_coords.append(list(zip(xp, yp))) + return new_coords - elif g['type'] == 'MultiPolygon': + def round_multipolygon(g): parts = g['coordinates'] new_coords = [] for part in parts: @@ -216,7 +221,24 @@ else: inner_coords.append(list(zip(xp, yp))) new_coords.append(inner_coords) + return new_coords - g['coordinates'] = new_coords + def round_geometry(g): + if g['type'] == 'Point': + g['coordinates'] = round_point(g) + elif g['type'] in ['LineString', 'MultiPoint']: + g['coordinates'] = round_linestring(g) + elif g['type'] in ['Polygon', 'MultiLineString']: + g['coordinates'] = round_polygon(g) + elif g['type'] == 'MultiPolygon': + g['coordinates'] = round_multipolygon(g) + else: + raise RuntimeError("Unsupported geometry type: {}".format(g['type'])) + + if g['type'] == 'GeometryCollection': + for _g in g['geometries']: + round_geometry(_g) + else: + round_geometry(g) return g diff -Nru fiona-1.8.13/.github/workflows/ci_linux.yml fiona-1.8.20/.github/workflows/ci_linux.yml --- fiona-1.8.13/.github/workflows/ci_linux.yml 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/.github/workflows/ci_linux.yml 2021-05-31 21:29:33.000000000 +0000 @@ -0,0 +1,216 @@ +name: Linux CI + +on: [push, pull_request] + +jobs: + build: + name: Python ${{ matrix.python }} / GDAL ${{ matrix.GDALVERSION }} / PROJ ${{ matrix.PROJVERSION }} + runs-on: [ubuntu-18.04] + if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')" + strategy: + fail-fast: false + matrix: + include: + # Test all supported gdal minor versions (except latest stable) with one python version + - { + python: 3.6, + GDALVERSION: "2.0.3", + PROJVERSION: "4.9.3", + allow_failure: "false", + } + - { + python: 3.6, + GDALVERSION: "2.1.4", + PROJVERSION: "4.9.3", + allow_failure: "false", + } + - { + python: 3.6, + GDALVERSION: "2.2.4", + PROJVERSION: "4.9.3", + allow_failure: "false", + } + - { + python: 3.6, + GDALVERSION: "2.3.3", + PROJVERSION: "4.9.3", + allow_failure: "false", + } + - { + python: 3.6, + GDALVERSION: "2.4.4", + PROJVERSION: "4.9.3", + allow_failure: "false", + } + - { + python: 3.6, + GDALVERSION: "3.0.4", + PROJVERSION: "6.2.1", + allow_failure: "false", + } + - { + python: 3.6, + GDALVERSION: "3.1.0", + PROJVERSION: "6.3.2", + allow_failure: "false", + } + + # Test all supported python versions with latest stable gdal release + - { + python: 3.6, + GDALVERSION: "3.2.1", + PROJVERSION: "7.2.1", + allow_failure: "false", + } + - { + python: 3.7, + GDALVERSION: "3.2.1", + PROJVERSION: "7.2.1", + allow_failure: "false", + } + - { + python: 3.8, + GDALVERSION: "3.2.1", + PROJVERSION: "7.2.1", + allow_failure: "false", + } + - { + python: 3.9, + GDALVERSION: "3.2.1", + PROJVERSION: "7.2.1", + allow_failure: "false", + } + + # Test GDAL master + - { + python: 3.6, + GDALVERSION: "master", + PROJVERSION: "7.2.1", + allow_failure: "true", + } + + env: + CYTHON_COVERAGE: "true" + MAKEFLAGS: "-j 4 -s" + CXXFLAGS: "-O0" + CFLAGS: "-O0" + PROJVERSION: ${{ matrix.PROJVERSION }} + GDALVERSION: ${{ matrix.GDALVERSION }} + GDALINST: ${{ github.workspace }}/gdalinstall + GDALBUILD: ${{ github.workspace }}/gdalbuild + PROJINST: ${{ github.workspace }}/gdalinstall + PROJBUILD: ${{ github.workspace }}/projbuild + FILEGDB: ${{ github.workspace }}/gdalinstall/filegdb + + # Emulate travis + TRAVIS_BUILD_DIR: ${{ github.workspace }} + TRAVIS_OS_NAME: "linux" + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + + - name: Set env variables + run: | + + # Additional env variables + echo "GDAL_DATA=$GDALINST/gdal-$GDALVERSION/share/gdal" >> $GITHUB_ENV + echo "PROJ_LIB=$GDALINST/gdal-$GDALVERSION/share/proj" >> $GITHUB_ENV + echo "LD_LIBRARY_PATH=$GDALINST/gdal-$GDALVERSION/lib:$GDALINST/proj-$PROJVERSION/lib:$FILEGDB/lib:\$LD_LIBRARY_PATH" >> $GITHUB_ENV + + # Add PATH + echo "$GDALINST/gdal-$GDALVERSION/bin" >> $GITHUB_PATH + echo "$GDALINST/proj-$PROJVERSION/bin" >> $GITHUB_PATH + + echo "cat \$GITHUB_ENV" + cat $GITHUB_ENV + echo "" + echo "cat \$GITHUB_PATH" + cat $GITHUB_PATH + + - name: Install apt packages + run: | + sudo apt-get install libatlas-base-dev libcurl4-openssl-dev libgeos-dev libfreexl-dev libzstd-dev libspatialite-dev + + # Unlike travis, packages from non default repositories are installed. + # While default repositories e.g. bionic/universe or bionic/main) tend to keep packages at the same API / ABI level, + # this is not guaranteed with other repositories. + # The following command creates a list of these packages, which is used as key for the GDAL cache + # The repositories of packages can be identified in the the output of `sudo apt-get install` + apt list --installed | grep 'libgeos-dev\|libxml2-dev' > $GITHUB_WORKSPACE/apt_list + cat $GITHUB_WORKSPACE/apt_list + + - name: Cache GDAL binaries + uses: actions/cache@v2 + with: + path: gdalinstall + key: ${{ runner.os }}-gdal-${{ matrix.GDALVERSION }}-proj-${{ matrix.PROJVERSION }}-${{ hashFiles('**/apt_list') }} + + - name: Cache pip + uses: actions/cache@v2 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + + - name: Install Python dependencies + run: | + python -m pip install -U pip + python -m pip install -U wheel + python -m pip install -r requirements-ci.txt + python -m pip wheel -r requirements-dev.txt + python -m pip install -r requirements-dev.txt + + - name: Build PROJ + run: | + chmod +x scripts/travis_proj_install.sh && ./scripts/travis_proj_install.sh + + - name: Install FileGDB + run: | + chmod +x scripts/travis_filegdb_install.sh && ./scripts/travis_filegdb_install.sh + + - name: Build GDAL + continue-on-error: ${{ matrix.allow_failure == 'true' }} + run: | + chmod +x scripts/travis_gdal_install.sh && ./scripts/travis_gdal_install.sh + gdal-config --version + + - name: Build Fiona + continue-on-error: ${{ matrix.allow_failure == 'true' }} + run: | + if [ "$GDALVERSION" = "master" ]; then echo "Using gdal master"; elif [ $($GDALINST/gdal-$GDALVERSION/bin/gdal-config --version) == $(sed 's/[a-zA-Z].*//g' <<< $GDALVERSION) ]; then echo "Using gdal $GDALVERSION"; else echo "NOT using gdal $GDALVERSION as expected; aborting"; exit 1; fi + GDAL_CONFIG=$GDALINST/gdal-$GDALVERSION/bin/gdal-config python -m pip install --no-deps --force-reinstall --no-use-pep517 -e . + + - name: Print Environment + continue-on-error: ${{ matrix.allow_failure == 'true' }} + run: | + echo "python -m pip freeze" + python -m pip freeze + + echo "" + echo "fio --version" + fio --version + + echo "" + echo "fio --gdal-version" + fio --gdal-version + + echo "" + echo "python -c \"import fiona; fiona.show_versions()\"" + python -c "import fiona; fiona.show_versions()" + + - name: pytest + continue-on-error: ${{ matrix.allow_failure == 'true' }} + run: | + GDAL_ENABLE_DEPRECATED_DRIVER_GTM=YES python -m pytest -m "not wheel" --cov fiona --cov-report term-missing + + - name: Coveralls + continue-on-error: ${{ matrix.allow_failure == 'true' }} + run: coveralls || echo "!! intermittent coveralls failure" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff -Nru fiona-1.8.13/.github/workflows/rstcheck.yml fiona-1.8.20/.github/workflows/rstcheck.yml --- fiona-1.8.13/.github/workflows/rstcheck.yml 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/.github/workflows/rstcheck.yml 2021-05-31 21:29:33.000000000 +0000 @@ -0,0 +1,26 @@ +name: rstcheck + +# Run this workflow every time a new commit pushed to your repository +on: [push, pull_request] + +jobs: + rstcheck: + name: rstcheck + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.8 + + - name: Install Python dependencies + run: | + python -m pip install sphinx==3.2.1 rstcheck==3.3.1 + + - name: Run rstcheck + run: | + rstcheck -r --ignore-directives automodule --ignore-substitutions version,release,today . diff -Nru fiona-1.8.13/pyproject.toml fiona-1.8.20/pyproject.toml --- fiona-1.8.13/pyproject.toml 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/pyproject.toml 2021-05-31 21:29:33.000000000 +0000 @@ -1,3 +1,2 @@ [build-system] -# Minimum requirements for the build system to execute. -requires = ["setuptools", "wheel", "cython"] +requires = ["setuptools", "wheel", "cython==0.29.21", "oldest-supported-numpy"] diff -Nru fiona-1.8.13/pytest.ini fiona-1.8.20/pytest.ini --- fiona-1.8.13/pytest.ini 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/pytest.ini 2021-05-31 21:29:33.000000000 +0000 @@ -0,0 +1,13 @@ +[pytest] +filterwarnings = + ignore:.*Sequential read of iterator was interrupted*:RuntimeWarning + ignore:.*negative slices or start values other than zero may be slow*:RuntimeWarning + ignore:.*negative step size may be slow*:RuntimeWarning + ignore:.*is buggy and will be removed in Fiona 2.0.* + +markers = + iconv: marks tests that require gdal to be compiled with iconv + network: marks tests that require a network connection + wheel: marks test that only works when installed from wheel + +testpaths = tests diff -Nru fiona-1.8.13/README.rst fiona-1.8.20/README.rst --- fiona-1.8.13/README.rst 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/README.rst 2021-05-31 21:29:33.000000000 +0000 @@ -2,15 +2,15 @@ Fiona ===== -Fiona is OGR's neat and nimble API for Python programmers. +Fiona is GDAL_'s neat and nimble vector API for Python programmers. -.. image:: https://travis-ci.org/Toblerity/Fiona.png?branch=master - :target: https://travis-ci.org/Toblerity/Fiona +.. image:: https://github.com/Toblerity/Fiona/workflows/Linux%20CI/badge.svg?branch=maint-1.8 + :target: https://github.com/Toblerity/Fiona/actions?query=branch%3Amaint-1.8 .. image:: https://ci.appveyor.com/api/projects/status/github/Toblerity/Fiona?svg=true :target: https://ci.appveyor.com/project/sgillies/fiona/branch/master -.. image:: https://coveralls.io/repos/Toblerity/Fiona/badge.png +.. image:: https://coveralls.io/repos/Toblerity/Fiona/badge.svg :target: https://coveralls.io/r/Toblerity/Fiona Fiona is designed to be simple and dependable. It focuses on reading and @@ -155,8 +155,7 @@ .. code-block:: python - for i, layername in enumerate( - fiona.listlayers('zip://tests/data/coutwildrnp.zip'): + for i, layername in enumerate(fiona.listlayers('zip://tests/data/coutwildrnp.zip')): with fiona.open('zip://tests/data/coutwildrnp.zip', layer=i) as src: print(i, layername, len(src)) @@ -174,6 +173,7 @@ # Output: # 67 + Fiona CLI ========= @@ -275,7 +275,7 @@ ------- Binary installers are available at -http://www.lfd.uci.edu/~gohlke/pythonlibs/#fiona and coming eventually to PyPI. +https://www.lfd.uci.edu/~gohlke/pythonlibs/#fiona and coming eventually to PyPI. You can download a binary distribution of GDAL from `here `_. You will also need to download @@ -294,14 +294,20 @@ $ python setup.py build_ext -I -lgdal_i -L install --gdalversion 2.1 -Note: The GDAL DLL (``gdal111.dll`` or similar) and gdal-data directory need to -be in your Windows PATH otherwise Fiona will fail to work. +Note: The following environment variables needs to be set so that Fiona works correctly: + +* The directory containing the GDAL DLL (``gdal304.dll`` or similar) needs to be in your + Windows ``PATH`` (e.g. ``C:\gdal\bin``). +* The gdal-data directory needs to be in your Windows ``PATH`` or the environment variable + ``GDAL_DATA`` must be set (e.g. ``C:\gdal\bin\gdal-data``). +* The environment variable ``PROJ_LIB`` must be set to the proj library directory (e.g. + ``C:\gdal\bin\proj6\share``) -The [Appveyor CI build](https://ci.appveyor.com/project/sgillies/fiona/history) +The `Appveyor CI build `_ uses the GISInternals GDAL binaries to build Fiona. This produces a binary wheel for successful builds, which includes GDAL and other dependencies, for users wanting to try an unstable development version. -The [Appveyor configuration file](appveyor.yml) may be a useful example for +The `Appveyor configuration file `_ may be a useful example for users building from source on Windows. Development and testing @@ -329,7 +335,7 @@ (fiona_env)$ python setup.py build_ext -I/path/to/gdal/include -L/path/to/gdal/lib -lgdal --gdalversion 2 develop (fiona_env)$ py.test -.. _OGR: http://www.gdal.org/ogr +.. _GDAL: http://www.gdal.org .. _pyproj: http://pypi.python.org/pypi/pyproj/ .. _Rtree: http://pypi.python.org/pypi/Rtree/ .. _Shapely: http://pypi.python.org/pypi/Shapely/ diff -Nru fiona-1.8.13/requirements-ci.txt fiona-1.8.20/requirements-ci.txt --- fiona-1.8.13/requirements-ci.txt 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/requirements-ci.txt 2021-05-31 21:29:33.000000000 +0000 @@ -0,0 +1 @@ +coveralls diff -Nru fiona-1.8.13/requirements-dev.txt fiona-1.8.20/requirements-dev.txt --- fiona-1.8.13/requirements-dev.txt 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/requirements-dev.txt 2021-05-31 21:29:33.000000000 +0000 @@ -1,9 +1,10 @@ -r requirements.txt coverage==4.5.4 -cython==0.29.7 +cython==0.29.21 mock ; python_version < '3.3' pytest==4.6.6 pytest-cov==2.8.1 -setuptools==39.0.1 +setuptools==41.6.0 boto3==1.9.19 -wheel==0.31.1 +wheel==0.33.6 +pytz==2020.1 diff -Nru fiona-1.8.13/requirements.txt fiona-1.8.20/requirements.txt --- fiona-1.8.13/requirements.txt 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/requirements.txt 2021-05-31 21:29:33.000000000 +0000 @@ -1,8 +1,7 @@ -argparse==1.4.0 attrs==18.2.0 click-plugins==1.0.4 cligj==0.5.0 -enum34==1.1.6 ; python_version < '3.4' munch==2.3.2 -ordereddict==1.1 ; python_version <= '2.7' six==1.11.0 +enum34==1.1.6 ; python_version < '3.4' +certifi diff -Nru fiona-1.8.13/scripts/check_deprecated.py fiona-1.8.20/scripts/check_deprecated.py --- fiona-1.8.13/scripts/check_deprecated.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/scripts/check_deprecated.py 2021-05-31 21:29:33.000000000 +0000 @@ -0,0 +1,57 @@ +import glob +import os +from collections import defaultdict +import re + +ignored_files = {'_shim.pyx', '_shim1.pyx', '_shim1.pxd', 'ogrext1.pxd'} + +# List of deprecated methods from https://gdal.org/doxygen/deprecated.html#_deprecated000028 +deprecated = { + 'CPL_LSBINT16PTR', + 'CPL_LSBINT32PTR(x)', + 'OGR_Dr_CopyDataSource', + 'OGR_Dr_CreateDataSource', + 'OGR_Dr_DeleteDataSource', + 'OGR_Dr_Open', + 'OGR_Dr_TestCapability', + 'OGR_DS_CopyLayer', + 'OGR_DS_CreateLayer', + 'OGR_DS_DeleteLayer', + 'OGR_DS_Destroy', + 'OGR_DS_ExecuteSQL', + 'OGR_DS_GetDriver', + 'OGR_DS_GetLayer', + 'OGR_DS_GetLayerByName', + 'OGR_DS_GetLayerCount', + 'OGR_DS_GetName', + 'OGR_DS_ReleaseResultSet', + 'OGR_DS_TestCapability', + 'OGR_G_GetCoordinateDimension', + 'OGR_G_SetCoordinateDimension', + 'OGRGetDriver', + 'OGRGetDriverByName', + 'OGRGetDriverCount', + 'OGROpen', + 'OGROpenShared', + 'OGRRegisterAll', + 'OGRReleaseDataSource', +} + +found_lines = defaultdict(list) +files = glob.glob('fiona/*.pyx') + glob.glob('fiona/*.pxd') +for path in files: + if os.path.basename(path) in ignored_files: + continue + + with open(path, 'r') as f: + for i, line in enumerate(f): + for deprecated_method in deprecated: + match = re.search('{}\s*\('.format(deprecated_method), line) + if match: + found_lines[path].append((i+1, line.strip(), deprecated_method)) + +for path in sorted(found_lines): + print(path) + for line_nr, line, method in found_lines[path]: + print("\t{}\t{}".format(line_nr, line)) + print("") diff -Nru fiona-1.8.13/scripts/check_urls.py fiona-1.8.20/scripts/check_urls.py --- fiona-1.8.13/scripts/check_urls.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/scripts/check_urls.py 2021-05-31 21:29:33.000000000 +0000 @@ -0,0 +1,38 @@ +import requests +import glob +import re + + +def test_urls(files): + headers = {'User-Agent': 'Mozilla/5.0 (compatible; MSIE 6.0; Fiona CI check)'} + + for fpath in files: + print("Processing: {}".format(fpath)) + with open(fpath) as f: + + text = f.read() + urls = re.findall('(https?:\/\/[^\s`>\'"()]+)', text) + + for url in urls: + http_code = None + try: + r = requests.get(url, headers=headers) + http_code = r.status_code + warn = '' + if not http_code == 200: + warn = ' <--- !!!' + except Exception as e: + warn = str(e) + + if len(warn) > 0: + print("\t {url} HTTP code: {http} {warn}".format(url=url, + http=http_code, + warn=warn) + ) + + +print("Test URLs in documentation") +test_urls(glob.glob('**/*.rst', recursive=True)) +print('') +print('Test URLs in code') +test_urls(glob.glob('fiona/**/*.py', recursive=True)) diff -Nru fiona-1.8.13/scripts/travis_filegdb_install.sh fiona-1.8.20/scripts/travis_filegdb_install.sh --- fiona-1.8.13/scripts/travis_filegdb_install.sh 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/scripts/travis_filegdb_install.sh 2021-05-31 21:29:33.000000000 +0000 @@ -0,0 +1,18 @@ +#!/bin/bash + +# Install filegdb if not already installed +if [ ! -d "$FILEGDB" ]; then + mkdir -p $FILEGDB + cd $FILEGDB + wget -q https://github.com/Esri/file-geodatabase-api/raw/master/FileGDB_API_1.5.1/FileGDB_API_1_5_1-64gcc51.tar.gz + tar -xzf FileGDB_API_1_5_1-64gcc51.tar.gz --strip=1 FileGDB_API-64gcc51 + rm FileGDB_API_1_5_1-64gcc51.tar.gz + rm -rf samples + rm -rf doc +fi + +export LD_LIBRARY_PATH=$FILEGDB/lib:$LD_LIBRARY_PATH + +# change back to travis build dir +cd $TRAVIS_BUILD_DIR + diff -Nru fiona-1.8.13/scripts/travis_gdal_install.sh fiona-1.8.20/scripts/travis_gdal_install.sh --- fiona-1.8.13/scripts/travis_gdal_install.sh 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/scripts/travis_gdal_install.sh 2021-05-31 21:29:33.000000000 +0000 @@ -42,7 +42,14 @@ --without-ruby \ --without-perl \ --without-php \ - --without-python" + --without-python \ + --with-oci=no \ + --without-mrf \ + --with-webp=no" + +if [ -d "$FILEGDB" ]; then + GDALOPTS="$GDALOPTS --with-fgdb=$FILEGDB" +fi # Create build dir if not exists if [ ! -d "$GDALBUILD" ]; then @@ -60,23 +67,26 @@ cd $GDALBUILD git clone --depth 1 https://github.com/OSGeo/gdal gdal-$GDALVERSION cd gdal-$GDALVERSION/gdal + echo $PROJVERSION > newproj.txt git rev-parse HEAD > newrev.txt BUILD=no # Only build if nothing cached or if the GDAL revision changed if test ! -f $GDALINST/gdal-$GDALVERSION/rev.txt; then BUILD=yes - elif ! diff newrev.txt $GDALINST/gdal-$GDALVERSION/rev.txt >/dev/null; then + elif ( ! diff newrev.txt $GDALINST/gdal-$GDALVERSION/rev.txt >/dev/null ) || ( ! diff newproj.txt $GDALINST/gdal-$GDALVERSION/newproj.txt >/dev/null ); then BUILD=yes fi if test "$BUILD" = "yes"; then mkdir -p $GDALINST/gdal-$GDALVERSION cp newrev.txt $GDALINST/gdal-$GDALVERSION/rev.txt + cp newproj.txt $GDALINST/gdal-$GDALVERSION/newproj.txt ./configure --prefix=$GDALINST/gdal-$GDALVERSION $GDALOPTS $PROJOPT - make -j 4 + make make install fi else + case "$GDALVERSION" in 3*) PROJOPT="--with-proj=$GDALINST/gdal-$GDALVERSION" @@ -99,6 +109,9 @@ 1*) PROJOPT="--with-static-proj4=$GDALINST/gdal-$GDALVERSION" ;; + *) + PROJOPT="--with-proj=$GDALINST/gdal-$GDALVERSION" + ;; esac if [ ! -d "$GDALINST/gdal-$GDALVERSION/share/gdal" ]; then @@ -108,7 +121,7 @@ tar -xzf gdal-$GDALVERSION.tar.gz cd gdal-$gdalver ./configure --prefix=$GDALINST/gdal-$GDALVERSION $GDALOPTS $PROJOPT - make -j 4 + make make install fi fi diff -Nru fiona-1.8.13/scripts/travis_proj_install.sh fiona-1.8.20/scripts/travis_proj_install.sh --- fiona-1.8.13/scripts/travis_proj_install.sh 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/scripts/travis_proj_install.sh 2021-05-31 21:29:33.000000000 +0000 @@ -18,9 +18,10 @@ cd $PROJBUILD wget -q https://download.osgeo.org/proj/proj-$PROJVERSION.tar.gz tar -xzf proj-$PROJVERSION.tar.gz - cd proj-$PROJVERSION + projver=$(expr "$PROJVERSION" : '\([0-9]*.[0-9]*.[0-9]*\)') + cd proj-$projver ./configure --prefix=$PROJINST/gdal-$GDALVERSION - make -s -j 2 + make -s make install fi diff -Nru fiona-1.8.13/setup.cfg fiona-1.8.20/setup.cfg --- fiona-1.8.13/setup.cfg 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/setup.cfg 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -[tool:pytest] -testpaths = tests diff -Nru fiona-1.8.13/setup.py fiona-1.8.20/setup.py --- fiona-1.8.13/setup.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/setup.py 2021-05-31 21:29:33.000000000 +0000 @@ -5,7 +5,6 @@ import shutil import subprocess import sys - from setuptools import setup from setuptools.extension import Extension @@ -183,7 +182,7 @@ gdal_major_version = int(gdal_version_parts[0]) gdal_minor_version = int(gdal_version_parts[1]) -log.info("GDAL version major=%r minor=%r", gdal_major_version, gdal_minor_version) + log.info("GDAL version major=%r minor=%r", gdal_major_version, gdal_minor_version) ext_options = dict( include_dirs=include_dirs, @@ -191,6 +190,16 @@ libraries=libraries, extra_link_args=extra_link_args) +# Enable coverage for cython pyx files. +if os.environ.get('CYTHON_COVERAGE'): + from Cython.Compiler.Options import get_directive_defaults + directive_defaults = get_directive_defaults() + directive_defaults['linetrace'] = True + directive_defaults['binding'] = True + + ext_options.update(dict( + define_macros=[("CYTHON_TRACE_NOGIL", "1")])) + # GDAL 2.3+ requires C++11 if language == "c++": @@ -275,11 +284,13 @@ requirements = [ 'attrs>=17', - 'click>=4.0,<8', + 'certifi', + 'click>=4.0', 'cligj>=0.5', 'click-plugins>=1.0', 'six>=1.7', 'munch', + "setuptools", 'argparse; python_version < "2.7"', 'ordereddict; python_version < "2.7"', 'enum34; python_version < "3.4"' diff -Nru fiona-1.8.13/tests/conftest.py fiona-1.8.20/tests/conftest.py --- fiona-1.8.13/tests/conftest.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/conftest.py 2021-05-31 21:29:33.000000000 +0000 @@ -6,13 +6,31 @@ import shutil import tarfile import zipfile - +from collections import OrderedDict from click.testing import CliRunner import pytest import fiona +from fiona.crs import from_epsg from fiona.env import GDALVersion +driver_extensions = {'DXF': 'dxf', + 'CSV': 'csv', + 'ESRI Shapefile': 'shp', + 'FileGDB': 'gdb', + 'GML': 'gml', + 'GPX': 'gpx', + 'GPSTrackMaker': 'gtm', + 'MapInfo File': 'tab', + 'DGN': 'dgn', + 'GPKG': 'gpkg', + 'GeoJSON': 'json', + 'GeoJSONSeq': 'geojsons', + 'GMT': 'gmt', + 'OGR_GMT': 'gmt', + 'BNA': 'bna', + 'FlatGeobuf': 'fgb'} + def pytest_report_header(config): headers = [] @@ -25,6 +43,19 @@ return '\n'.join(headers) +def get_temp_filename(driver): + + basename = "foo" + extension = driver_extensions.get(driver, "bar") + prefix = "" + if driver == 'GeoJSONSeq': + prefix = "GeoJSONSeq:" + + return "{prefix}{basename}.{extension}".format(prefix=prefix, + basename=basename, + extension=extension) + + _COUTWILDRNP_FILES = [ 'coutwildrnp.shp', 'coutwildrnp.shx', 'coutwildrnp.dbf', 'coutwildrnp.prj'] @@ -272,6 +303,11 @@ not gdal_version.major >= 3, reason="Requires GDAL 3.x") +travis_only = pytest.mark.skipif( + not os.getenv("TRAVIS", "false") == "true", + reason="Requires travis CI environment" +) + @pytest.fixture(scope="class") def unittest_data_dir(data_dir, request): @@ -283,3 +319,137 @@ def unittest_path_coutwildrnp_shp(path_coutwildrnp_shp, request): """Makes shapefile path available to unittest tests""" request.cls.path_coutwildrnp_shp = path_coutwildrnp_shp + + +@pytest.fixture() +def testdata_generator(): + """ Helper function to create test data sets for ideally all supported drivers + """ + + def get_schema(driver): + special_schemas = {'CSV': {'geometry': None, 'properties': OrderedDict([('position', 'int')])}, + 'BNA': {'geometry': 'Point', 'properties': {}}, + 'DXF': {'properties': OrderedDict( + [('Layer', 'str'), + ('SubClasses', 'str'), + ('Linetype', 'str'), + ('EntityHandle', 'str'), + ('Text', 'str')]), + 'geometry': 'Point'}, + 'GPX': {'geometry': 'Point', + 'properties': OrderedDict([('ele', 'float'), ('time', 'datetime')])}, + 'GPSTrackMaker': {'properties': OrderedDict([]), 'geometry': 'Point'}, + 'DGN': {'properties': OrderedDict([]), 'geometry': 'LineString'}, + 'MapInfo File': {'geometry': 'Point', 'properties': OrderedDict([('position', 'str')])} + } + + return special_schemas.get(driver, {'geometry': 'Point', 'properties': OrderedDict([('position', 'int')])}) + + def get_crs(driver): + special_crs = {'MapInfo File': from_epsg(4326)} + return special_crs.get(driver, None) + + def get_records(driver, range): + special_records1 = {'CSV': [{'geometry': None, 'properties': {'position': i}} for i in range], + 'BNA': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {}} + for i + in range], + 'DXF': [ + {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, + 'properties': OrderedDict( + [('Layer', '0'), + ('SubClasses', 'AcDbEntity:AcDbPoint'), + ('Linetype', None), + ('EntityHandle', str(i + 20000)), + ('Text', None)])} for i in range], + 'GPX': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, + 'properties': {'ele': 0.0, 'time': '2020-03-24T16:08:40+00:00'}} for i + in range], + 'GPSTrackMaker': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, + 'properties': {}} for i in range], + 'DGN': [ + {'geometry': {'type': 'LineString', 'coordinates': [(float(i), 0.0), (0.0, 0.0)]}, + 'properties': {}} for i in range], + 'MapInfo File': [ + {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, + 'properties': {'position': str(i)}} for i in range], + 'PCIDSK': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i), 0.0)}, + 'properties': {'position': i}} for i in range] + } + return special_records1.get(driver, [ + {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {'position': i}} for i in + range]) + + def get_records2(driver, range): + special_records2 = {'DGN': [ + {'geometry': {'type': 'LineString', 'coordinates': [(float(i), 0.0), (0.0, 0.0)]}, + 'properties': OrderedDict( + [('Type', 4), + ('Level', 0), + ('GraphicGroup', 0), + ('ColorIndex', 0), + ('Weight', 0), + ('Style', 0), + ('EntityNum', None), + ('MSLink', None), + ('Text', None)])} for i in range], + } + return special_records2.get(driver, get_records(driver, range)) + + def get_create_kwargs(driver): + kwargs = { + 'FlatGeobuf': {'SPATIAL_INDEX': False} + } + return kwargs.get(driver, {}) + + def test_equal(driver, val_in, val_out): + is_good = True + is_good = is_good and val_in['geometry'] == val_out['geometry'] + for key in val_in['properties']: + if key in val_out['properties']: + if driver == 'FileGDB' and isinstance(val_in['properties'][key], int): + is_good = is_good and str(val_in['properties'][key]) == str(int(val_out['properties'][key])) + else: + is_good = is_good and str(val_in['properties'][key]) == str(val_out['properties'][key]) + else: + is_good = False + return is_good + + def _testdata_generator(driver, range1, range2): + """ Generate test data and helper methods for a specific driver. Each set of generated set of records + contains the position specified with range. These positions are either encoded as field or in the geometry + of the record, depending of the driver characteristics. + + Parameters + ---------- + driver: str + Name of drive to generate tests for + range1: list of integer + Range of positions for first set of records + range2: list of integer + Range of positions for second set of records + + Returns + ------- + schema + A schema for the records + crs + A crs for the records + records1 + A set of records containing the positions of range1 + records2 + A set of records containing the positions of range2 + test_equal + A function that returns True if the geometry is equal between the generated records and a record and if + the properties of the generated records can be found in a record + """ + return get_schema(driver), get_crs(driver), get_records(driver, range1), get_records2(driver, range2),\ + test_equal, get_create_kwargs(driver) + + return _testdata_generator + + +@pytest.fixture(scope='session') +def path_test_tz_geojson(data_dir): + """Path to ```test_tz.geojson``""" + return os.path.join(data_dir, 'test_tz.geojson') diff -Nru fiona-1.8.13/tests/data/test_tz.geojson fiona-1.8.20/tests/data/test_tz.geojson --- fiona-1.8.13/tests/data/test_tz.geojson 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/tests/data/test_tz.geojson 2021-05-31 21:29:33.000000000 +0000 @@ -0,0 +1,18 @@ +{ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "properties": { + "test": "2015-04-22T00:00:00+07:00" + }, + "geometry": { + "type": "Point", + "coordinates": [ + -79.4, + 43.6 + ] + } + } + ] +} diff -Nru fiona-1.8.13/tests/test_bounds.py fiona-1.8.20/tests/test_bounds.py --- fiona-1.8.13/tests/test_bounds.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_bounds.py 2021-05-31 21:29:33.000000000 +0000 @@ -1,4 +1,9 @@ +import pytest import fiona +from fiona.drvsupport import supported_drivers, _driver_supports_mode +from fiona.errors import DriverError +from .conftest import driver_extensions +from fiona.env import GDALVersion def test_bounds_point(): @@ -17,5 +22,48 @@ def test_bounds_z(): - g = {'type': 'Point', 'coordinates': [10,10,10]} + g = {'type': 'Point', 'coordinates': [10, 10, 10]} assert fiona.bounds(g) == (10, 10, 10, 10) + + +ignore_write_drivers = set(['CSV', 'GPX', 'GPSTrackMaker', 'DXF', 'DGN', 'MapInfo File']) +write_drivers = [driver for driver, raw in supported_drivers.items() if + _driver_supports_mode(driver, 'w') and driver not in ignore_write_drivers] + + +@pytest.mark.parametrize('driver', write_drivers) +def test_bounds(tmpdir, driver): + """Test if bounds are correctly calculated after writing + + """ + + if driver == 'BNA' and GDALVersion.runtime() < GDALVersion(2, 0): + # BNA driver segfaults with gdal 1.11 + return + + extension = driver_extensions.get(driver, "bar") + path = str(tmpdir.join('foo.{}'.format(extension))) + + with fiona.open(path, 'w', + driver=driver, + schema={'geometry': 'Point', + 'properties': [('title', 'str')]}, + fiona_force_driver=True) as c: + + c.writerecords([{'geometry': {'type': 'Point', 'coordinates': (1.0, 10.0)}, + 'properties': {'title': 'One'}}]) + + try: + bounds = c.bounds + assert bounds == (1.0, 10.0, 1.0, 10.0) + except Exception as e: + assert isinstance(e, DriverError) + + c.writerecords([{'geometry': {'type': 'Point', 'coordinates': (2.0, 20.0)}, + 'properties': {'title': 'Two'}}]) + + try: + bounds = c.bounds + assert bounds == (1.0, 10.0, 2.0, 20.0) + except Exception as e: + assert isinstance(e, DriverError) diff -Nru fiona-1.8.13/tests/test_collection.py fiona-1.8.20/tests/test_collection.py --- fiona-1.8.13/tests/test_collection.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_collection.py 2021-05-31 21:29:33.000000000 +0000 @@ -1,17 +1,20 @@ # Testing collections and workspaces import datetime +import os +import random import sys import re +from collections import OrderedDict import pytest import fiona -from fiona.collection import Collection, supported_drivers -from fiona.env import getenv +from fiona.collection import Collection +from fiona.env import getenv, GDALVersion from fiona.errors import FionaValueError, DriverError, FionaDeprecationWarning - -from .conftest import WGS84PATTERN +from .conftest import WGS84PATTERN, get_temp_filename +from fiona.drvsupport import supported_drivers, driver_mode_mingdal class TestSupportedDrivers(object): @@ -858,16 +861,28 @@ @pytest.mark.network def test_collection_http(): - ds = fiona.Collection('http://raw.githubusercontent.com/OSGeo/gdal/master/autotest/ogr/data/poly.shp', vsi='http') - assert ds.path == '/vsicurl/http://raw.githubusercontent.com/OSGeo/gdal/master/autotest/ogr/data/poly.shp' - assert len(ds) == 10 + ds = fiona.Collection( + "https://raw.githubusercontent.com/Toblerity/Fiona/master/tests/data/coutwildrnp.shp", + vsi="https", + ) + assert ( + ds.path + == "/vsicurl/https://raw.githubusercontent.com/Toblerity/Fiona/master/tests/data/coutwildrnp.shp" + ) + assert len(ds) == 67 @pytest.mark.network def test_collection_zip_http(): - ds = fiona.Collection('http://raw.githubusercontent.com/OSGeo/gdal/master/autotest/ogr/data/poly.zip', vsi='zip+http') - assert ds.path == '/vsizip/vsicurl/http://raw.githubusercontent.com/OSGeo/gdal/master/autotest/ogr/data/poly.zip' - assert len(ds) == 10 + ds = fiona.Collection( + "https://raw.githubusercontent.com/Toblerity/Fiona/master/tests/data/coutwildrnp.zip", + vsi="zip+https", + ) + assert ( + ds.path + == "/vsizip/vsicurl/https://raw.githubusercontent.com/Toblerity/Fiona/master/tests/data/coutwildrnp.zip" + ) + assert len(ds) == 67 def test_encoding_option_warning(tmpdir, caplog): @@ -899,3 +914,51 @@ """We have a GDAL env within collection context""" with fiona.open(path_coutwildrnp_shp): assert 'FIONA_ENV' in getenv() + + +@pytest.mark.parametrize('driver,filename', [('ESRI Shapefile', 'test.shp'), + ('GeoJSON', 'test.json'), + ('GPKG', 'test.gpkg')]) +def test_mask_polygon_triangle(tmpdir, driver, filename): + """ Test if mask works for non trivial geometries""" + schema = {'geometry': 'Polygon', 'properties': OrderedDict([('position_i', 'int'), ('position_j', 'int')])} + records = [{'geometry': {'type': 'Polygon', 'coordinates': (((float(i), float(j)), (float(i + 1), float(j)), + (float(i + 1), float(j + 1)), (float(i), float(j + 1)), + (float(i), float(j))),)}, + 'properties': {'position_i': i, 'position_j': j}} for i in range(10) for j in range(10)] + random.shuffle(records) + + path = str(tmpdir.join(filename)) + + with fiona.open(path, 'w', + driver=driver, + schema=schema,) as c: + c.writerecords(records) + + with fiona.open(path) as c: + items = list( + c.items(mask={'type': 'Polygon', 'coordinates': (((2.0, 2.0), (4.0, 4.0), (4.0, 6.0), (2.0, 2.0)),)})) + assert len(items) == 15 + + +def test_collection__empty_column_name(tmpdir): + """Based on pull #955""" + tmpfile = str(tmpdir.join("test_empty.geojson")) + with pytest.warns(UserWarning, match="Empty field name at index 0"): + with fiona.open(tmpfile, "w", driver="GeoJSON", schema={ + "geometry": "Point", + "properties": {"": "str", "name": "str"} + }) as tmp: + tmp.writerecords([{ + "geometry": {"type": "Point", "coordinates": [ 8, 49 ] }, + "properties": { "": "", "name": "test" } + }]) + + with fiona.open(tmpfile) as tmp: + with pytest.warns(UserWarning, match="Empty field name at index 0"): + assert tmp.schema == { + "geometry": "Point", + "properties": {"": "str", "name": "str"} + } + with pytest.warns(UserWarning, match="Empty field name at index 0"): + next(tmp) diff -Nru fiona-1.8.13/tests/test_compound_crs.py fiona-1.8.20/tests/test_compound_crs.py --- fiona-1.8.13/tests/test_compound_crs.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_compound_crs.py 2021-05-31 21:29:33.000000000 +0000 @@ -8,4 +8,4 @@ prj = data.join("coutwildrnp.prj") prj.write("""COMPD_CS["unknown",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]],VERT_CS["unknown",VERT_DATUM["unknown",2005],UNIT["metre",1.0,AUTHORITY["EPSG","9001"]],AXIS["Up",UP]]]""") with fiona.open(str(data.join("coutwildrnp.shp"))) as collection: - assert collection.crs == {} + assert isinstance(collection.crs, dict) diff -Nru fiona-1.8.13/tests/test_cursor_interruptions.py fiona-1.8.20/tests/test_cursor_interruptions.py --- fiona-1.8.13/tests/test_cursor_interruptions.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/tests/test_cursor_interruptions.py 2021-05-31 21:29:33.000000000 +0000 @@ -0,0 +1,158 @@ +import fiona +import pytest +from fiona.drvsupport import driver_mode_mingdal, _driver_supports_mode +from fiona.errors import DriverError +from tests.conftest import get_temp_filename + + +@pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys() + if _driver_supports_mode(driver, 'w')]) +def test_write_getextent(tmpdir, driver, testdata_generator): + """ + Test if a call to OGR_L_GetExtent has side effects for writing + + """ + + schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 10), range(10, 20)) + path = str(tmpdir.join(get_temp_filename(driver))) + positions = set([int(r['properties']['position']) for r in records1 + records2]) + + with fiona.open(path, 'w', + driver=driver, + crs=crs, + schema=schema, + **create_kwargs) as c: + c.writerecords(records1) + + # Call to OGR_L_GetExtent + try: + c.bounds + except DriverError: + pass + + c.writerecords(records2) + + with fiona.open(path) as c: + data = set([int(f['properties']['position']) for f in c]) + assert len(positions) == len(data) + for p in positions: + assert p in data + + +@pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys() + if _driver_supports_mode(driver, 'w')]) +def test_read_getextent(tmpdir, driver, testdata_generator): + """ + Test if a call to OGR_L_GetExtent has side effects for reading + + """ + + schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 10), range(10, 20)) + path = str(tmpdir.join(get_temp_filename(driver))) + positions = set([int(r['properties']['position']) for r in records1 + records2]) + + with fiona.open(path, 'w', + driver=driver, + crs=crs, + schema=schema, + **create_kwargs) as c: + c.writerecords(records1) + c.writerecords(records2) + + with fiona.open(path) as c: + data = set() + for _ in range(len(records1)): + f = next(c) + data.add(int(f['properties']['position'])) + + # Call to OGR_L_GetExtent + try: + c.bounds + except DriverError: + pass + + for _ in range(len(records1)): + f = next(c) + data.add(int(f['properties']['position'])) + assert len(positions) == len(data) + for p in positions: + assert p in data + + +@pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys() + if _driver_supports_mode(driver, 'w')]) +def test_write_getfeaturecount(tmpdir, driver, testdata_generator): + """ + Test if a call to OGR_L_GetFeatureCount has side effects for writing + + """ + + schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 10), range(10, 20)) + path = str(tmpdir.join(get_temp_filename(driver))) + positions = set([int(r['properties']['position']) for r in records1 + records2]) + + with fiona.open(path, 'w', + driver=driver, + crs=crs, + schema=schema, + **create_kwargs) as c: + c.writerecords(records1) + + # Call to OGR_L_GetFeatureCount + try: + assert len(c) == len(records1) + except TypeError: + pass + c.writerecords(records2) + + with fiona.open(path) as c: + data = set([int(f['properties']['position']) for f in c]) + assert len(positions) == len(data) + for p in positions: + assert p in data + + +@pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys() + if _driver_supports_mode(driver, 'w')]) +def test_read_getfeaturecount(tmpdir, driver, testdata_generator): + """ + Test if a call to OGR_L_GetFeatureCount has side effects for reading + + """ + + schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 10), range(10, 20)) + path = str(tmpdir.join(get_temp_filename(driver))) + positions = set([int(r['properties']['position']) for r in records1 + records2]) + + with fiona.open(path, 'w', + driver=driver, + crs=crs, + schema=schema, + **create_kwargs) as c: + c.writerecords(records1) + c.writerecords(records2) + + with fiona.open(path) as c: + data = set() + for _ in range(len(records1)): + f = next(c) + data.add(int(f['properties']['position'])) + + # Call to OGR_L_GetFeatureCount + try: + assert len(data) == len(records1) + except TypeError: + pass + + for _ in range(len(records1)): + f = next(c) + data.add(int(f['properties']['position'])) + + try: + assert len(data) == len(records1 + records2) + except TypeError: + pass + + assert len(positions) == len(data) + for p in positions: + assert p in data \ No newline at end of file diff -Nru fiona-1.8.13/tests/test_datetime.py fiona-1.8.20/tests/test_datetime.py --- fiona-1.8.13/tests/test_datetime.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_datetime.py 2021-05-31 21:29:33.000000000 +0000 @@ -2,267 +2,658 @@ See also test_rfc3339.py for datetime parser tests. """ +from collections import OrderedDict import fiona +from fiona._env import get_gdal_version_num, calc_gdal_version_num import pytest -import tempfile, shutil -import os from fiona.errors import DriverSupportError -from .conftest import requires_gpkg - -GDAL_MAJOR_VER = fiona.get_gdal_version_num() // 1000000 - -GEOMETRY_TYPE = "Point" -GEOMETRY_EXAMPLE = {"type": "Point", "coordinates": [1, 2]} - -DRIVER_FILENAME = { - "ESRI Shapefile": "test.shp", - "GPKG": "test.gpkg", - "GeoJSON": "test.geojson", - "MapInfo File": "test.tab", -} - -DATE_EXAMPLE = "2018-03-25" -DATETIME_EXAMPLE = "2018-03-25T22:49:05" -TIME_EXAMPLE = "22:49:05" - -class TestDateFieldSupport: - def write_data(self, driver): - filename = DRIVER_FILENAME[driver] - temp_dir = tempfile.mkdtemp() - path = os.path.join(temp_dir, filename) - schema = { - "geometry": GEOMETRY_TYPE, - "properties": { - "date": "date", - } - } - records = [ - { - "geometry": GEOMETRY_EXAMPLE, - "properties": { - "date": DATE_EXAMPLE, - } - }, - { - "geometry": GEOMETRY_EXAMPLE, - "properties": { - "date": None, - } - }, - ] - with fiona.Env(), fiona.open(path, "w", driver=driver, schema=schema) as collection: - collection.writerecords(records) - - with fiona.Env(), fiona.open(path, "r") as collection: - schema = collection.schema - features = list(collection) - - shutil.rmtree(temp_dir) - - return schema, features - - def test_shapefile(self): - driver = "ESRI Shapefile" - schema, features = self.write_data(driver) - - assert schema["properties"]["date"] == "date" - assert features[0]["properties"]["date"] == DATE_EXAMPLE - assert features[1]["properties"]["date"] is None - - @requires_gpkg - def test_gpkg(self): - driver = "GPKG" - schema, features = self.write_data(driver) - - assert schema["properties"]["date"] == "date" - assert features[0]["properties"]["date"] == DATE_EXAMPLE - assert features[1]["properties"]["date"] is None - - def test_geojson(self): - # GDAL 1: date field silently converted to string - # GDAL 1: date string format uses / instead of - - driver = "GeoJSON" - schema, features = self.write_data(driver) - - if GDAL_MAJOR_VER >= 2: - assert schema["properties"]["date"] == "date" - assert features[0]["properties"]["date"] == DATE_EXAMPLE - else: - assert schema["properties"]["date"] == "str" - assert features[0]["properties"]["date"] == "2018/03/25" - assert features[1]["properties"]["date"] is None - - def test_mapinfo(self): - driver = "MapInfo File" - schema, features = self.write_data(driver) - - assert schema["properties"]["date"] == "date" - assert features[0]["properties"]["date"] == DATE_EXAMPLE - assert features[1]["properties"]["date"] is None - - -class TestDatetimeFieldSupport: - def write_data(self, driver): - filename = DRIVER_FILENAME[driver] - temp_dir = tempfile.mkdtemp() - path = os.path.join(temp_dir, filename) - schema = { - "geometry": GEOMETRY_TYPE, - "properties": { - "datetime": "datetime", - } - } - records = [ - { - "geometry": GEOMETRY_EXAMPLE, - "properties": { - "datetime": DATETIME_EXAMPLE, - } - }, - { - "geometry": GEOMETRY_EXAMPLE, - "properties": { - "datetime": None, - } - }, - ] - with fiona.Env(), fiona.open(path, "w", driver=driver, schema=schema) as collection: - collection.writerecords(records) - - with fiona.Env(), fiona.open(path, "r") as collection: - schema = collection.schema - features = list(collection) - - shutil.rmtree(temp_dir) - - return schema, features - - def test_shapefile(self): - # datetime is silently converted to date - driver = "ESRI Shapefile" - - with pytest.raises(DriverSupportError): - schema, features = self.write_data(driver) - - # assert schema["properties"]["datetime"] == "date" - # assert features[0]["properties"]["datetime"] == "2018-03-25" - # assert features[1]["properties"]["datetime"] is None - - @requires_gpkg - def test_gpkg(self): - # GDAL 1: datetime silently downgraded to date - driver = "GPKG" - - if GDAL_MAJOR_VER >= 2: - schema, features = self.write_data(driver) - assert schema["properties"]["datetime"] == "datetime" - assert features[0]["properties"]["datetime"] == DATETIME_EXAMPLE - assert features[1]["properties"]["datetime"] is None - else: - with pytest.raises(DriverSupportError): - schema, features = self.write_data(driver) - - def test_geojson(self): - # GDAL 1: datetime silently converted to string - # GDAL 1: date string format uses / instead of - - driver = "GeoJSON" - schema, features = self.write_data(driver) - - if GDAL_MAJOR_VER >= 2: - assert schema["properties"]["datetime"] == "datetime" - assert features[0]["properties"]["datetime"] == DATETIME_EXAMPLE - else: - assert schema["properties"]["datetime"] == "str" - assert features[0]["properties"]["datetime"] == "2018/03/25 22:49:05" - assert features[1]["properties"]["datetime"] is None - - def test_mapinfo(self): - driver = "MapInfo File" - schema, features = self.write_data(driver) - - assert schema["properties"]["datetime"] == "datetime" - assert features[0]["properties"]["datetime"] == DATETIME_EXAMPLE - assert features[1]["properties"]["datetime"] is None - - -class TestTimeFieldSupport: - def write_data(self, driver): - filename = DRIVER_FILENAME[driver] - temp_dir = tempfile.mkdtemp() - path = os.path.join(temp_dir, filename) - schema = { - "geometry": GEOMETRY_TYPE, - "properties": { - "time": "time", - } - } - records = [ - { - "geometry": GEOMETRY_EXAMPLE, - "properties": { - "time": TIME_EXAMPLE, - } - }, - { - "geometry": GEOMETRY_EXAMPLE, - "properties": { - "time": None, - } - }, - ] - with fiona.Env(), fiona.open(path, "w", driver=driver, schema=schema) as collection: - collection.writerecords(records) - - with fiona.Env(), fiona.open(path, "r") as collection: - schema = collection.schema - features = list(collection) - - shutil.rmtree(temp_dir) - - return schema, features - - def test_shapefile(self): - # no support for time fields - driver = "ESRI Shapefile" - with pytest.raises(DriverSupportError): - self.write_data(driver) - - @requires_gpkg - def test_gpkg(self): - # GDAL 2: time field is silently converted to string - # GDAL 1: time field dropped completely - driver = "GPKG" - - with pytest.raises(DriverSupportError): - schema, features = self.write_data(driver) - - # if GDAL_MAJOR_VER >= 2: - # assert schema["properties"]["time"] == "str" - # assert features[0]["properties"]["time"] == TIME_EXAMPLE - # assert features[1]["properties"]["time"] is None - # else: - # assert "time" not in schema["properties"] - - def test_geojson(self): - # GDAL 1: time field silently converted to string - driver = "GeoJSON" - schema, features = self.write_data(driver) - - if GDAL_MAJOR_VER >= 2: - assert schema["properties"]["time"] == "time" - else: - assert schema["properties"]["time"] == "str" - assert features[0]["properties"]["time"] == TIME_EXAMPLE - assert features[1]["properties"]["time"] is None - - def test_mapinfo(self): - # GDAL 2: null time is converted to 00:00:00 (regression?) - driver = "MapInfo File" - schema, features = self.write_data(driver) - - assert schema["properties"]["time"] == "time" - assert features[0]["properties"]["time"] == TIME_EXAMPLE - if GDAL_MAJOR_VER >= 2: - assert features[1]["properties"]["time"] == "00:00:00" - else: - assert features[1]["properties"]["time"] is None +from fiona.rfc3339 import parse_time, parse_datetime +from .conftest import get_temp_filename +from fiona.env import GDALVersion +import datetime +from fiona.drvsupport import (supported_drivers, driver_mode_mingdal, _driver_converts_field_type_silently_to_str, + _driver_supports_field, _driver_converts_to_str, _driver_supports_timezones, + _driver_supports_milliseconds, _driver_supports_mode) +import pytz +from pytz import timezone + +gdal_version = GDALVersion.runtime() + + +def get_schema(driver, field_type): + if driver == 'GPX': + return {'properties': OrderedDict([('ele', 'float'), + ('time', field_type)]), + 'geometry': 'Point'} + if driver == 'GPSTrackMaker': + return { + 'properties': OrderedDict([('name', 'str'), ('comment', 'str'), ('icon', 'int'), ('time', field_type)]), + 'geometry': 'Point'} + if driver == 'CSV': + return {"properties": {"datefield": field_type}} + return {"geometry": "Point", + "properties": {"datefield": field_type}} + + +def get_records(driver, values): + if driver == 'GPX': + return [{"geometry": {"type": "Point", "coordinates": [1, 2]}, + "properties": {'ele': 0, "time": val}} for val in values] + if driver == 'GPSTrackMaker': + return [{"geometry": {"type": "Point", "coordinates": [1, 2]}, + "properties": OrderedDict([('name', ''), ('comment', ''), ('icon', 48), ('time', val)])} for + val in values] + if driver == 'CSV': + return [{"properties": {"datefield": val}} for val in values] + + return [{"geometry": {"type": "Point", "coordinates": [1, 2]}, + "properties": {"datefield": val}} for val in values] + + +def get_schema_field(driver, schema): + if driver in {'GPX', 'GPSTrackMaker'}: + return schema["properties"]["time"] + return schema["properties"]["datefield"] + + +def get_field(driver, f): + if driver in {'GPX', 'GPSTrackMaker'}: + return f["properties"]["time"] + return f['properties']['datefield'] + + +class TZ(datetime.tzinfo): + + def __init__(self, minutes): + self.minutes = minutes + + def utcoffset(self, dt): + return datetime.timedelta(minutes=self.minutes) + + +def generate_testdata(field_type, driver): + """ Generate test cases for test_datefield + + Each test case has the format [(in_value1, true_value as datetime.*object), + (in_value2, true_value as datetime.*object), ...] + """ + + # Test data for 'date' data type + if field_type == 'date': + return [("2018-03-25", datetime.date(2018, 3, 25)), + (datetime.date(2018, 3, 25), datetime.date(2018, 3, 25))] + + # Test data for 'datetime' data type + if field_type == 'datetime': + return [("2018-03-25T22:49:05", datetime.datetime(2018, 3, 25, 22, 49, 5)), + (datetime.datetime(2018, 3, 25, 22, 49, 5), datetime.datetime(2018, 3, 25, 22, 49, 5)), + ("2018-03-25T22:49:05.23", datetime.datetime(2018, 3, 25, 22, 49, 5, 230000)), + (datetime.datetime(2018, 3, 25, 22, 49, 5, 230000), datetime.datetime(2018, 3, 25, 22, 49, 5, 230000)), + ("2018-03-25T22:49:05.123456", datetime.datetime(2018, 3, 25, 22, 49, 5, 123000)), + (datetime.datetime(2018, 3, 25, 22, 49, 5, 123456), datetime.datetime(2018, 3, 25, 22, 49, 5, 123000)), + ("2018-03-25T22:49:05+01:30", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90))), + ("2018-03-25T22:49:05-01:30", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90))), + (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90)), + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90))), + (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90)), + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90))), + (datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')), + datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich'))), + (datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain')), + datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain'))), + (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15)), + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15))), + (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15)), + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), + ("2018-03-25T22:49:05-23:45", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), + ("2018-03-25T22:49:05+23:45", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15)))] + + # Test data for 'time' data type + elif field_type == 'time': + return [("22:49:05", datetime.time(22, 49, 5)), + (datetime.time(22, 49, 5), datetime.time(22, 49, 5)), + ("22:49:05.23", datetime.time(22, 49, 5, 230000)), + (datetime.time(22, 49, 5, 230000), datetime.time(22, 49, 5, 230000)), + ("22:49:05.123456", datetime.time(22, 49, 5, 123000)), + (datetime.time(22, 49, 5, 123456), datetime.time(22, 49, 5, 123000)), + ("22:49:05+01:30", datetime.time(22, 49, 5, tzinfo=TZ(90))), + ("22:49:05-01:30", datetime.time(22, 49, 5, tzinfo=TZ(-90))), + (datetime.time(22, 49, 5, tzinfo=TZ(90)), datetime.time(22, 49, 5, tzinfo=TZ(90))), + (datetime.time(22, 49, 5, tzinfo=TZ(-90)), datetime.time(22, 49, 5, tzinfo=TZ(-90))), + (datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15)), + datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15))), + (datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15)), + datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), + ("22:49:05-23:45", datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), + ("22:49:05+23:45", datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15)))] + + +def compare_datetimes_utc(d1, d2): + """ Test if two time objects are the same. Native times are assumed to be UTC""" + + if d1.tzinfo is None: + d1 = d1.replace(tzinfo=TZ(0)) + + if d2.tzinfo is None: + d2 = d2.replace(tzinfo=TZ(0)) + + return d1 == d2 + + +def test_compare_datetimes_utc(): + """ Test compare_datetimes_utc """ + d1 = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(60)) + d2 = datetime.datetime(2020, 1, 21, 11, 30, 0, tzinfo=TZ(0)) + assert d1 == d2 + assert compare_datetimes_utc(d1, d2) + + d1 = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(-60)) + d2 = datetime.datetime(2020, 1, 21, 11, 30, 0, tzinfo=TZ(0)) + assert not d1 == d2 + assert not compare_datetimes_utc(d1, d2) + + d1 = datetime.datetime(2020, 1, 21, 13, 0, 0, tzinfo=TZ(60)) + d2 = datetime.datetime(2020, 1, 21, 5, 0, 0, tzinfo=TZ(-60 * 7)) + assert d1 == d2 + assert compare_datetimes_utc(d1, d2) + + d1 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')) + d2 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc) + assert d1 == d2 + assert compare_datetimes_utc(d1, d2) + + d1 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')) + d2 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain')) + assert d1 == d2 + assert compare_datetimes_utc(d1, d2) + + d1 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')) + d2 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain')) + assert d1 == d2 + assert compare_datetimes_utc(d1, d2) + + +def convert_time_to_utc(d): + """ Convert datetime.time object to UTC""" + d = datetime.datetime(1900, 1, 1, d.hour, d.minute, d.second, d.microsecond, d.tzinfo) + d -= d.utcoffset() + return d.time() + + +def compare_times_utc(d1, d2): + """ Test if two datetime.time objects with fixed timezones have the same UTC time""" + if d1.tzinfo is not None: + d1 = convert_time_to_utc(d1) + + if d2.tzinfo is not None: + d2 = convert_time_to_utc(d2) + + return d1.replace(tzinfo=None) == d2.replace(tzinfo=None) + + +def test_compare_times_utc(): + """ + Test compare_times_utc + """ + d1 = datetime.time(12, 30, 0, tzinfo=TZ(60)) + d2 = datetime.time(11, 30, 0, tzinfo=TZ(0)) + assert compare_times_utc(d1, d2) + + d1 = datetime.time(12, 30, 0, tzinfo=TZ(-60)) + d2 = datetime.time(11, 30, 0, tzinfo=TZ(0)) + assert not compare_times_utc(d1, d2) + + d1 = datetime.time(13, 0, 0, tzinfo=TZ(60)) + d2 = datetime.time(5, 0, 0, tzinfo=TZ(-60 * 7)) + assert compare_times_utc(d1, d2) + + d1 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('MET')).timetz() + d2 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('EST')).timetz() + assert compare_times_utc(d1, d2) + + +def get_tz_offset(d): + """ Returns a Timezone (sign, hours, minutes) tuples + + E.g.: for '2020-01-21T12:30:00+01:30' ('+', 1, 30) is returned + + """ + offset_minutes = d.utcoffset().total_seconds() / 60 + if offset_minutes < 0: + sign = "-" + else: + sign = "+" + hours = int(abs(offset_minutes) / 60) + minutes = int(abs(offset_minutes) % 60) + return sign, hours, minutes + + +def test_get_tz_offset(): + """ Test get_tz_offset""" + d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(90)) + assert get_tz_offset(d) == ('+', 1, 30) + + d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(-90)) + assert get_tz_offset(d) == ('-', 1, 30) + + d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(60 * 24 - 15)) + assert get_tz_offset(d) == ('+', 23, 45) + + d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(-60 * 24 + 15)) + assert get_tz_offset(d) == ('-', 23, 45) + + +def generate_testcases(): + """ Generate test cases for drivers that support datefields, convert datefields to string or do not support + datefiels""" + _test_cases_datefield = [] + _test_cases_datefield_to_str = [] + _test_cases_datefield_not_supported = [] + + for field_type in ['time', 'datetime', 'date']: + # Select only driver that are capable of writing fields + for driver, raw in supported_drivers.items(): + if _driver_supports_mode(driver, 'w'): + if _driver_supports_field(driver, field_type): + if _driver_converts_field_type_silently_to_str(driver, field_type): + _test_cases_datefield_to_str.append((driver, field_type)) + else: + _test_cases_datefield.append((driver, field_type)) + else: + _test_cases_datefield_not_supported.append((driver, field_type)) + + return _test_cases_datefield, _test_cases_datefield_to_str, _test_cases_datefield_not_supported + + +test_cases_datefield, test_cases_datefield_to_str, test_cases_datefield_not_supported = generate_testcases() + + +@pytest.mark.parametrize("driver, field_type", test_cases_datefield) +def test_datefield(tmpdir, driver, field_type): + """ + Test date, time, datetime field types. + """ + + def _validate(val, val_exp, field_type, driver): + + if field_type == 'date': + return val == val_exp.isoformat() + + elif field_type == 'datetime': + + # some drivers do not support timezones. In this case, Fiona converts datetime fields with a timezone other + # than UTC to UTC. Thus, both the datetime read by Fiona, as well as expected value are first converted to + # UTC before compared. + + # Milliseconds + if _driver_supports_milliseconds(driver): + y, m, d, hh, mm, ss, ms, tz = parse_datetime(val) + if tz is not None: + tz = TZ(tz) + val_d = datetime.datetime(y, m, d, hh, mm, ss, ms, tz) + return compare_datetimes_utc(val_d, val_exp) + else: + # No Milliseconds + y, m, d, hh, mm, ss, ms, tz = parse_datetime(val) + if tz is not None: + tz = TZ(tz) + val_d = datetime.datetime(y, m, d, hh, mm, ss, ms, tz) + return compare_datetimes_utc(val_d, val_exp.replace(microsecond=0)) + + elif field_type == 'time': + + # some drivers do not support timezones. In this case, Fiona converts datetime fields with a timezone other + # than UTC to UTC. Thus, both the time read by Fiona, as well as expected value are first converted to UTC + # before compared. + + # Milliseconds + if _driver_supports_milliseconds(driver): + y, m, d, hh, mm, ss, ms, tz = parse_time(val) + if tz is not None: + tz = TZ(tz) + val_d = datetime.time(hh, mm, ss, ms, tz) + return compare_times_utc(val_d, val_exp) + else: + # No Milliseconds + y, m, d, hh, mm, ss, ms, tz = parse_time(val) + if tz is not None: + tz = TZ(tz) + val_d = datetime.time(hh, mm, ss, ms, tz) + return compare_times_utc(val_d, val_exp.replace(microsecond=0)) + return False + + schema = get_schema(driver, field_type) + path = str(tmpdir.join(get_temp_filename(driver))) + values_in, values_exp = zip(*generate_testdata(field_type, driver)) + records = get_records(driver, values_in) + + with fiona.open(path, 'w', + driver=driver, + schema=schema) as c: + c.writerecords(records) + + with fiona.open(path, 'r') as c: + assert get_schema_field(driver, c.schema) == field_type + items = [get_field(driver, f) for f in c] + assert len(items) == len(values_in) + for val, val_exp in zip(items, values_exp): + assert _validate(val, val_exp, field_type, driver), \ + "{} does not match {}".format(val, val_exp.isoformat()) + + +@pytest.mark.parametrize("driver, field_type", test_cases_datefield_to_str) +def test_datefield_driver_converts_to_string(tmpdir, driver, field_type): + """ + Test handling of date, time, datetime for drivers that convert these types to string. + + As the formatting can be arbitrary, we only test if the elements of a date / datetime / time object + is included in the string. E.g. for the PCIDSK driver if hour 22 from date.time(22:49:05) is in + '0000/00/00 22:49:05'. + + """ + + def _validate(val, val_exp, field_type, driver): + + if field_type == 'date': + if (str(val_exp.year) in val and + str(val_exp.month) in val and + str(val_exp.day) in val): + return True + elif field_type == 'datetime': + + if not _driver_supports_timezones(driver, field_type) and val_exp.utcoffset() is not None: + val_exp = convert_time_to_utc(val_exp) + + # datetime fields can, depending on the driver, support: + # - Timezones + # - Milliseconds, respectively Microseconds + + # No timezone + if val_exp.utcoffset() is None: + # No Milliseconds + if not _driver_supports_milliseconds(driver): + if (str(val_exp.year) in val and + str(val_exp.month) in val and + str(val_exp.day) in val and + str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val): + return True + else: + # Microseconds + if (str(val_exp.year) in val and + str(val_exp.month) in val and + str(val_exp.day) in val and + str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(val_exp.microsecond) in val): + return True + # Milliseconds + elif (str(val_exp.year) in val and + str(val_exp.month) in val and + str(val_exp.day) in val and + str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(int(val_exp.microsecond / 1000)) in val): + return True + # With timezone + else: + sign, hours, minutes = get_tz_offset(val_exp) + if minutes > 0: + tz = "{sign}{hours:02d}{minutes:02d}".format(sign=sign, + hours=int(hours), + minutes=int(minutes)) + else: + tz = "{sign}{hours:02d}".format(sign=sign, hours=int(hours)) + print("tz", tz) + # No Milliseconds + if not _driver_supports_milliseconds(driver): + if (str(val_exp.year) in val and + str(val_exp.month) in val and + str(val_exp.day) in val and + str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + tz in val): + return True + else: + # Microseconds + if (str(val_exp.year) in val and + str(val_exp.month) in val and + str(val_exp.day) in val and + str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(val_exp.microsecond) in val and + tz in val): + return True + # Milliseconds + elif (str(val_exp.year) in val and + str(val_exp.month) in val and + str(val_exp.day) in val and + str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(int(val_exp.microsecond / 1000)) in val and + tz in val): + return True + + elif field_type == 'time': + + # time fields can, depending on the driver, support: + # - Timezones + # - Milliseconds, respectively Microseconds + + if not _driver_supports_timezones(driver, field_type) and val_exp.utcoffset() is not None: + val_exp = convert_time_to_utc(val_exp) + + # No timezone + if val_exp.utcoffset() is None: + # No Milliseconds + if not _driver_supports_milliseconds(driver): + if (str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val): + return True + else: + # Microseconds + if (str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(val_exp.microsecond) in val): + return True + # Milliseconds + elif (str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(int(val_exp.microsecond / 1000)) in val): + return True + # With timezone + else: + + sign, hours, minutes = get_tz_offset(val_exp) + if minutes > 0: + tz = "{sign}{hours:02d}{minutes:02d}".format(sign=sign, + hours=int(hours), + minutes=int(minutes)) + else: + tz = "{sign}{hours:02d}".format(sign=sign, hours=int(hours)) + # No Milliseconds + if not _driver_supports_milliseconds(driver): + if (str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + tz in val): + return True + else: + # Microseconds + if (str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(val_exp.microsecond) in val and + tz in val): + return True + # Milliseconds + elif (str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(int(val_exp.microsecond / 1000)) in val + and tz in val): + return True + return False + + schema = get_schema(driver, field_type) + path = str(tmpdir.join(get_temp_filename(driver))) + values_in, values_exp = zip(*generate_testdata(field_type, driver)) + records = get_records(driver, values_exp) + + with pytest.warns(UserWarning) as record: + with fiona.open(path, 'w', + driver=driver, + schema=schema) as c: + c.writerecords(records) + assert len(record) == 1 + assert "silently converts" in record[0].message.args[0] + + with fiona.open(path, 'r') as c: + assert get_schema_field(driver, c.schema) == 'str' + items = [get_field(driver, f) for f in c] + assert len(items) == len(values_in) + for val, val_exp in zip(items, values_exp): + assert _validate(val, val_exp, field_type, driver), \ + "{} does not match {}".format(val, val_exp.isoformat()) + + +@pytest.mark.filterwarnings('ignore:.*driver silently converts *:UserWarning') +@pytest.mark.parametrize("driver,field_type", test_cases_datefield + test_cases_datefield_to_str) +def test_datefield_null(tmpdir, driver, field_type): + """ + Test handling of null values for date, time, datetime types for write capable drivers + """ + + def _validate(val, val_exp, field_type, driver): + if (driver == 'MapInfo File' and field_type == 'time' and + calc_gdal_version_num(2, 0, 0) <= get_gdal_version_num() < calc_gdal_version_num(3, 1, 1)): + return val == '00:00:00' + if val is None or val == '': + return True + return False + + schema = get_schema(driver, field_type) + path = str(tmpdir.join(get_temp_filename(driver))) + values_in = [None] + records = get_records(driver, values_in) + + with fiona.open(path, 'w', + driver=driver, + schema=schema) as c: + c.writerecords(records) + + with fiona.open(path, 'r') as c: + items = [get_field(driver, f) for f in c] + assert len(items) == 1 + + assert _validate(items[0], None, field_type, driver), \ + "{} does not match {}".format(items[0], None) + + +@pytest.mark.parametrize("driver, field_type", test_cases_datefield_not_supported) +def test_datetime_field_unsupported(tmpdir, driver, field_type): + """ Test if DriverSupportError is raised for unsupported field_types""" + schema = get_schema(driver, field_type) + path = str(tmpdir.join(get_temp_filename(driver))) + values_in, values_out = zip(*generate_testdata(field_type, driver)) + records = get_records(driver, values_in) + + with pytest.raises(DriverSupportError): + with fiona.open(path, 'w', + driver=driver, + schema=schema) as c: + c.writerecords(records) + + +@pytest.mark.parametrize("driver, field_type", test_cases_datefield_not_supported) +def test_datetime_field_type_marked_not_supported_is_not_supported(tmpdir, driver, field_type, monkeypatch): + """ Test if a date/datetime/time field type marked as not not supported is really not supported + + Warning: Success of this test does not necessary mean that a field is not supported. E.g. errors can occour due to + special schema requirements of drivers. This test only covers the standard case. + + """ + + if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): + pytest.skip("BNA driver segfaults with gdal 1.11") + + monkeypatch.delitem(fiona.drvsupport._driver_field_type_unsupported[field_type], driver) + + schema = get_schema(driver, field_type) + path = str(tmpdir.join(get_temp_filename(driver))) + values_in, values_out = zip(*generate_testdata(field_type, driver)) + records = get_records(driver, values_in) + + is_good = True + try: + with fiona.open(path, 'w', + driver=driver, + schema=schema) as c: + c.writerecords(records) + + with fiona.open(path, 'r') as c: + if not get_schema_field(driver, c.schema) == field_type: + is_good = False + items = [get_field(driver, f) for f in c] + for val_in, val_out in zip(items, values_out): + if not val_in == val_out: + is_good = False + except: + is_good = False + assert not is_good + + +def generate_tostr_testcases(): + """ Flatten driver_converts_to_str to a list of (field_type, driver) tuples""" + cases = [] + for field_type in _driver_converts_to_str: + for driver in _driver_converts_to_str[field_type]: + driver_supported = driver in supported_drivers + driver_can_write = _driver_supports_mode(driver, 'w') + field_supported = _driver_supports_field(driver, field_type) + converts_to_str = _driver_converts_field_type_silently_to_str(driver, field_type) + if driver_supported and driver_can_write and converts_to_str and field_supported: + cases.append((field_type, driver)) + return cases + + +@pytest.mark.filterwarnings('ignore:.*driver silently converts *:UserWarning') +@pytest.mark.parametrize("driver,field_type", test_cases_datefield_to_str) +def test_driver_marked_as_silently_converts_to_str_converts_silently_to_str(tmpdir, driver, field_type, monkeypatch): + """ Test if a driver and field_type is marked in fiona.drvsupport.driver_converts_to_str to convert to str really + silently converts to str + + If this test fails, it should be considered to replace the respective None value in + fiona.drvsupport.driver_converts_to_str with a GDALVersion(major, minor) value. + """ + + monkeypatch.delitem(fiona.drvsupport._driver_converts_to_str[field_type], driver) + + schema = get_schema(driver, field_type) + path = str(tmpdir.join(get_temp_filename(driver))) + values_in, values_out = zip(*generate_testdata(field_type, driver)) + records = get_records(driver, values_in) + + with fiona.open(path, 'w', + driver=driver, + schema=schema) as c: + c.writerecords(records) + + with fiona.open(path, 'r') as c: + assert get_schema_field(driver, c.schema) == 'str' + + +def test_read_timezone_geojson(path_test_tz_geojson): + """Test if timezones are read correctly""" + with fiona.open(path_test_tz_geojson) as c: + items = list(c) + assert items[0]['properties']['test'] == '2015-04-22T00:00:00+07:00' diff -Nru fiona-1.8.13/tests/test_driver_options.py fiona-1.8.20/tests/test_driver_options.py --- fiona-1.8.13/tests/test_driver_options.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/tests/test_driver_options.py 2021-05-31 21:29:33.000000000 +0000 @@ -0,0 +1,30 @@ +import os +import tempfile +from collections import OrderedDict +import glob +import fiona +from tests.conftest import get_temp_filename, requires_gdal2 + + +@requires_gdal2 +def test_gml_format_option(): + """ Test GML dataset creation option FORMAT (see https://github.com/Toblerity/Fiona/issues/968)""" + + schema = {'geometry': 'Point', 'properties': OrderedDict([('position', 'int')])} + records = [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {'position': i}} for i in + range(10)] + + tmpdir = tempfile.mkdtemp() + fpath = os.path.join(tmpdir, get_temp_filename('GML')) + + with fiona.open(fpath, + 'w', + driver="GML", + schema=schema, + FORMAT="GML3") as out: + out.writerecords(records) + + xsd_path = glob.glob(os.path.join(tmpdir, "*.xsd"))[0] + with open(xsd_path) as f: + xsd = f.read() + assert "http://schemas.opengis.net/gml/3.1.1" in xsd diff -Nru fiona-1.8.13/tests/test_drvsupport.py fiona-1.8.20/tests/test_drvsupport.py --- fiona-1.8.13/tests/test_drvsupport.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_drvsupport.py 2021-05-31 21:29:33.000000000 +0000 @@ -1,10 +1,12 @@ """Tests of driver support""" import pytest - -from .conftest import requires_gdal24 - +from .conftest import requires_gdal24, get_temp_filename +from fiona.drvsupport import supported_drivers, driver_mode_mingdal import fiona.drvsupport +from fiona.env import GDALVersion +from fiona._env import calc_gdal_version_num, get_gdal_version_num +from fiona.errors import DriverError @requires_gdal24 @@ -12,3 +14,286 @@ def test_geojsonseq(format): """Format is available""" assert format in fiona.drvsupport.supported_drivers.keys() + + +@pytest.mark.parametrize('driver', [driver for driver, raw in supported_drivers.items() if 'w' in raw]) +def test_write_or_driver_error(tmpdir, driver, testdata_generator): + """ + Test if write mode works. + + """ + + if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): + pytest.skip("BNA driver segfaults with gdal 1.11") + + schema, crs, records1, _, test_equal, create_kwargs = testdata_generator(driver, range(0, 10), []) + path = str(tmpdir.join(get_temp_filename(driver))) + + if (driver in driver_mode_mingdal['w'] and + get_gdal_version_num() < calc_gdal_version_num(*driver_mode_mingdal['w'][driver])): + + # Test if DriverError is raised for gdal < driver_mode_mingdal + with pytest.raises(DriverError): + with fiona.open(path, 'w', + driver=driver, + crs=crs, + schema=schema, + **create_kwargs) as c: + c.writerecords(records1) + + else: + # Test if we can write + with fiona.open(path, 'w', + driver=driver, + crs=crs, + schema=schema, + **create_kwargs) as c: + + c.writerecords(records1) + + if driver in {'FileGDB', 'OpenFileGDB'}: + open_driver = driver + else: + open_driver = None + with fiona.open(path, driver=open_driver) as c: + assert c.driver == driver + items = list(c) + assert len(items) == len(records1) + for val_in, val_out in zip(records1, items): + assert test_equal(driver, val_in, val_out), "in: {val_in}, out: {val_out}".format(val_in=val_in, + val_out=val_out) + + +@pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys()]) +def test_write_does_not_work_when_gdal_smaller_mingdal(tmpdir, driver, testdata_generator, monkeypatch): + """ + Test if driver really can't write for gdal < driver_mode_mingdal + + If this test fails, it should be considered to update driver_mode_mingdal in drvsupport.py. + + """ + + if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): + pytest.skip("BNA driver segfaults with gdal 1.11") + if (driver == 'FlatGeobuf' and + calc_gdal_version_num(3, 1, 0) <= get_gdal_version_num() < calc_gdal_version_num(3, 1, 3)): + pytest.skip("See https://github.com/Toblerity/Fiona/pull/924") + + schema, crs, records1, _, test_equal, create_kwargs = testdata_generator(driver, range(0, 10), []) + path = str(tmpdir.join(get_temp_filename(driver))) + + if (driver in driver_mode_mingdal['w'] and + get_gdal_version_num() < calc_gdal_version_num(*driver_mode_mingdal['w'][driver])): + monkeypatch.delitem(fiona.drvsupport.driver_mode_mingdal['w'], driver) + + with pytest.raises(Exception): + with fiona.open(path, 'w', + driver=driver, + crs=crs, + schema=schema, + **create_kwargs) as c: + c.writerecords(records1) + + +@pytest.mark.parametrize('driver', [driver for driver, raw in supported_drivers.items() if 'a' in raw]) +def test_append_or_driver_error(tmpdir, testdata_generator, driver): + """ Test if driver supports append mode. + + Some driver only allow a specific schema. These drivers can be excluded by adding them to blacklist_append_drivers. + + """ + if driver == "DGN": + pytest.xfail("DGN schema has changed") + + if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): + pytest.skip("BNA driver segfaults with gdal 1.11") + + path = str(tmpdir.join(get_temp_filename(driver))) + schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 5), range(5, 10)) + + # If driver is not able to write, we cannot test append + if (driver in driver_mode_mingdal['w'] + and get_gdal_version_num() < calc_gdal_version_num(*driver_mode_mingdal['w'][driver])): + return + + # Create test file to append to + with fiona.open(path, 'w', + driver=driver, + crs=crs, + schema=schema, + **create_kwargs) as c: + + c.writerecords(records1) + + if (driver in driver_mode_mingdal['a'] + and get_gdal_version_num() < calc_gdal_version_num(*driver_mode_mingdal['a'][driver])): + + # Test if DriverError is raised for gdal < driver_mode_mingdal + with pytest.raises(DriverError): + with fiona.open(path, 'a', + driver=driver) as c: + c.writerecords(records2) + + else: + # Test if we can append + with fiona.open(path, 'a', + driver=driver) as c: + c.writerecords(records2) + + if driver in {'FileGDB', 'OpenFileGDB'}: + open_driver = driver + else: + open_driver = None + with fiona.open(path, driver=open_driver) as c: + assert c.driver == driver + items = list(c) + assert len(items) == len(records1) + len(records2) + for val_in, val_out in zip(records1 + records2, items): + assert test_equal(driver, val_in, val_out), "in: {val_in}, out: {val_out}".format(val_in=val_in, + val_out=val_out) + + +@pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['a'].keys() + if driver in supported_drivers]) +def test_append_does_not_work_when_gdal_smaller_mingdal(tmpdir, driver, testdata_generator, monkeypatch): + """ Test if driver supports append mode. + + If this test fails, it should be considered to update driver_mode_mingdal in drvsupport.py. + + """ + + if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): + pytest.skip("BNA driver segfaults with gdal 1.11") + + path = str(tmpdir.join(get_temp_filename(driver))) + schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 5), range(5, 10)) + + # If driver is not able to write, we cannot test append + if (driver in driver_mode_mingdal['w'] + and get_gdal_version_num() < calc_gdal_version_num(*driver_mode_mingdal['w'][driver])): + return + + # Create test file to append to + with fiona.open(path, 'w', + driver=driver, + crs=crs, + schema=schema, + **create_kwargs) as c: + + c.writerecords(records1) + + if (driver in driver_mode_mingdal['a'] + and get_gdal_version_num() < calc_gdal_version_num(*driver_mode_mingdal['a'][driver])): + # Test if driver really can't append for gdal < driver_mode_mingdal + + monkeypatch.delitem(fiona.drvsupport.driver_mode_mingdal['a'], driver) + + with pytest.raises(Exception): + with fiona.open(path, 'a', + driver=driver) as c: + c.writerecords(records2) + + if driver in {'FileGDB', 'OpenFileGDB'}: + open_driver = driver + else: + open_driver = None + with fiona.open(path, driver=open_driver) as c: + assert c.driver == driver + items = list(c) + assert len(items) == len(records1) + len(records2) + for val_in, val_out in zip(records1 + records2, items): + assert test_equal(driver, val_in, val_out) + + +@pytest.mark.parametrize('driver', [driver for driver, raw in supported_drivers.items() if raw == 'r']) +def test_no_write_driver_cannot_write(tmpdir, driver, testdata_generator, monkeypatch): + """Test if read only driver cannot write + + If this test fails, it should be considered to enable write support for the respective driver in drvsupport.py. + + """ + + monkeypatch.setitem(fiona.drvsupport.supported_drivers, driver, 'rw') + schema, crs, records1, _, test_equal, create_kwargs = testdata_generator(driver, range(0, 5), []) + + if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): + pytest.skip("BNA driver segfaults with gdal 1.11") + + if driver == "FlatGeobuf": + pytest.xfail("FlatGeobuf doesn't raise an error but doesn't have write support") + + path = str(tmpdir.join(get_temp_filename(driver))) + + with pytest.raises(Exception): + with fiona.open(path, 'w', + driver=driver, + crs=crs, + schema=schema, + **create_kwargs) as c: + c.writerecords(records1) + + +@pytest.mark.parametrize('driver', [driver for driver, raw in supported_drivers.items() if + 'w' in raw and 'a' not in raw]) +def test_no_append_driver_cannot_append(tmpdir, driver, testdata_generator, monkeypatch): + """ + Test if a driver that supports write and not append cannot also append + + If this test fails, it should be considered to enable append support for the respective driver in drvsupport.py. + + """ + + monkeypatch.setitem(fiona.drvsupport.supported_drivers, driver, 'raw') + + if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): + pytest.skip("BNA driver segfaults with gdal 1.11") + + path = str(tmpdir.join(get_temp_filename(driver))) + schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 5), range(5, 10)) + + # If driver is not able to write, we cannot test append + if (driver in driver_mode_mingdal['w'] and + get_gdal_version_num() < calc_gdal_version_num(*driver_mode_mingdal['w'][driver])): + return + + # Create test file to append to + with fiona.open(path, 'w', + driver=driver, + crs=crs, + schema=schema, + **create_kwargs) as c: + + c.writerecords(records1) + + is_good = True + try: + with fiona.open(path, 'a', + driver=driver) as c: + c.writerecords(records2) + + if driver in {'FileGDB', 'OpenFileGDB'}: + open_driver = driver + else: + open_driver = None + with fiona.open(path, driver=open_driver) as c: + assert c.driver == driver + items = list(c) + is_good = is_good and len(items) == len(records1) + len(records2) + for val_in, val_out in zip(records1 + records2, items): + is_good = is_good and test_equal(driver, val_in, val_out) + except: + is_good = False + + assert not is_good + + +def test_mingdal_drivers_are_supported(): + """ + Test if mode and driver is enabled in supported_drivers + """ + + for mode in driver_mode_mingdal: + for driver in driver_mode_mingdal[mode]: + # we cannot test drivers that are not present in the gdal installation + if driver in supported_drivers: + assert mode in supported_drivers[driver] diff -Nru fiona-1.8.13/tests/test__env.py fiona-1.8.20/tests/test__env.py --- fiona-1.8.13/tests/test__env.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test__env.py 2021-05-31 21:29:33.000000000 +0000 @@ -33,13 +33,8 @@ @pytest.fixture def mock_debian(tmpdir): """A fake Debian multi-install system""" - tmpdir.ensure("share/gdal/1.11/header.dxf") - tmpdir.ensure("share/gdal/2.0/header.dxf") - tmpdir.ensure("share/gdal/2.1/header.dxf") - tmpdir.ensure("share/gdal/2.2/header.dxf") - tmpdir.ensure("share/gdal/2.3/header.dxf") - tmpdir.ensure("share/gdal/2.4/header.dxf") - tmpdir.ensure("share/gdal/3.0/header.dxf") + tmpdir.ensure("share/gdal/{}.{}/header.dxf".format(gdal_version.major, + gdal_version.minor)) tmpdir.ensure("share/proj/epsg") return tmpdir diff -Nru fiona-1.8.13/tests/test_fio_cat.py fiona-1.8.20/tests/test_fio_cat.py --- fiona-1.8.13/tests/test_fio_cat.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_fio_cat.py 2021-05-31 21:29:33.000000000 +0000 @@ -84,3 +84,13 @@ 'cat', 'zip://{}'.format(path_coutwildrnp_zip)]) assert result.exit_code == 0 assert result.output.count('"Feature"') == 67 + + +def test_dst_crs_epsg3857(path_coutwildrnp_shp): + """Confirm fix of issue #952""" + runner = CliRunner() + result = runner.invoke( + main_group, ["cat", "--dst-crs", "EPSG:3857", path_coutwildrnp_shp] + ) + assert result.exit_code == 0 + assert result.output.count('"Feature"') == 67 diff -Nru fiona-1.8.13/tests/test_fio_load.py fiona-1.8.20/tests/test_fio_load.py --- fiona-1.8.13/tests/test_fio_load.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_fio_load.py 2021-05-31 21:29:33.000000000 +0000 @@ -4,7 +4,6 @@ import json import os import shutil -import sys import pytest @@ -34,7 +33,6 @@ assert len(fiona.open(tmpfile)) == 2 - def test_seq_rs(feature_seq_pp_rs, tmpdir, runner): tmpfile = str(tmpdir.mkdir('tests').join('test_seq_rs.shp')) result = runner.invoke( @@ -123,3 +121,15 @@ finally: shutil.rmtree(outdir) + + +@pytest.mark.iconv +def test_creation_options(tmpdir, runner, feature_seq): + tmpfile = str(tmpdir.mkdir("tests").join("test.shp")) + result = runner.invoke( + main_group, + ["load", "-f", "Shapefile", "--co", "ENCODING=LATIN1", tmpfile], + feature_seq, + ) + assert result.exit_code == 0 + assert tmpdir.join("tests/test.cpg").read() == "LATIN1" diff -Nru fiona-1.8.13/tests/test_memoryfile.py fiona-1.8.20/tests/test_memoryfile.py --- fiona-1.8.13/tests/test_memoryfile.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_memoryfile.py 2021-05-31 21:29:33.000000000 +0000 @@ -1,12 +1,15 @@ """Tests of MemoryFile and ZippedMemoryFile""" +from collections import OrderedDict from io import BytesIO + import pytest -import uuid import fiona from fiona.io import MemoryFile, ZipMemoryFile +from .conftest import requires_gdal2 + @pytest.fixture(scope='session') def profile_first_coutwildrnp_shp(path_coutwildrnp_shp): @@ -14,11 +17,34 @@ return col.profile, next(iter(col)) -def test_memoryfile(path_coutwildrnp_json): - """In-memory GeoJSON file can be read""" +@pytest.fixture(scope='session') +def data_coutwildrnp_json(path_coutwildrnp_json): with open(path_coutwildrnp_json, 'rb') as f: - data = f.read() - with MemoryFile(data) as memfile: + return f.read() + + +def test_memoryfile_ext(): + """File extensions are handled""" + assert MemoryFile(ext=".geojson").name.endswith(".geojson") + + +def test_memoryfile_bare_ext(): + """File extensions without a leading . are handled""" + assert MemoryFile(ext="geojson").name.endswith(".geojson") + + +def test_memoryfile_init(data_coutwildrnp_json): + """In-memory GeoJSON file can be read""" + with MemoryFile(data_coutwildrnp_json) as memfile: + with memfile.open() as collection: + assert len(collection) == 67 + + +def test_memoryfile_incr_init(data_coutwildrnp_json): + """In-memory GeoJSON file written in 2 parts can be read""" + with MemoryFile() as memfile: + memfile.write(data_coutwildrnp_json[:1000]) + memfile.write(data_coutwildrnp_json[1000:]) with memfile.open() as collection: assert len(collection) == 67 @@ -30,8 +56,33 @@ assert len(collection) == 67 +def test_zip_memoryfile_infer_layer_name(bytes_coutwildrnp_zip): + """In-memory zipped Shapefile can be read with the default layer""" + with ZipMemoryFile(bytes_coutwildrnp_zip) as memfile: + with memfile.open() as collection: + assert len(collection) == 67 + + +def test_open_closed(): + """Get an exception when opening a dataset on a closed MemoryFile""" + memfile = MemoryFile() + memfile.close() + assert memfile.closed + with pytest.raises(IOError): + memfile.open() + + +def test_open_closed_zip(): + """Get an exception when opening a dataset on a closed ZipMemoryFile""" + memfile = ZipMemoryFile() + memfile.close() + assert memfile.closed + with pytest.raises(IOError): + memfile.open() + + def test_write_memoryfile(profile_first_coutwildrnp_shp): - """In-memory Shapefile can be written""" + """In-memory GeoJSON can be written""" profile, first = profile_first_coutwildrnp_shp profile['driver'] = 'GeoJSON' with MemoryFile() as memfile: @@ -45,25 +96,56 @@ assert len(col) == 1 -def test_memoryfile_bytesio(path_coutwildrnp_json): - """In-memory GeoJSON file can be read""" +@requires_gdal2 +def test_memoryfile_write_extension(profile_first_coutwildrnp_shp): + """In-memory shapefile gets an .shp extension by default""" + profile, first = profile_first_coutwildrnp_shp + profile['driver'] = 'ESRI Shapefile' + with MemoryFile() as memfile: + with memfile.open(**profile) as col: + col.write(first) + assert memfile.name.endswith(".shp") + + +def test_memoryfile_open_file_or_bytes_read(path_coutwildrnp_json): + """Test MemoryFile.open when file_or_bytes has a read attribute """ with open(path_coutwildrnp_json, 'rb') as f: - data = f.read() + with MemoryFile(f) as memfile: + with memfile.open() as collection: + assert len(collection) == 67 - with fiona.open(BytesIO(data)) as collection: + +def test_memoryfile_bytesio(data_coutwildrnp_json): + """GeoJSON file stored in BytesIO can be read""" + with fiona.open(BytesIO(data_coutwildrnp_json)) as collection: assert len(collection) == 67 def test_memoryfile_fileobj(path_coutwildrnp_json): - """In-memory GeoJSON file can be read""" + """GeoJSON file in an open file object can be read""" with open(path_coutwildrnp_json, 'rb') as f: - with fiona.open(f) as collection: assert len(collection) == 67 -def test_write_memoryfile_(profile_first_coutwildrnp_shp): - """In-memory Shapefile can be written""" +def test_memoryfile_len(data_coutwildrnp_json): + """Test MemoryFile.__len__ """ + with MemoryFile() as memfile: + assert len(memfile) == 0 + memfile.write(data_coutwildrnp_json) + assert len(memfile) == len(data_coutwildrnp_json) + + +def test_memoryfile_tell(data_coutwildrnp_json): + """Test MemoryFile.tell() """ + with MemoryFile() as memfile: + assert memfile.tell() == 0 + memfile.write(data_coutwildrnp_json) + assert memfile.tell() == len(data_coutwildrnp_json) + + +def test_write_bytesio(profile_first_coutwildrnp_shp): + """GeoJSON can be written to BytesIO""" profile, first = profile_first_coutwildrnp_shp profile['driver'] = 'GeoJSON' with BytesIO() as fout: @@ -75,3 +157,14 @@ with MemoryFile(data) as memfile: with memfile.open() as col: assert len(col) == 1 + + +def test_mapinfo_raises(): + """Reported to be a crasher in #937""" + driver = 'MapInfo File' + schema = {'geometry': 'Point', 'properties': OrderedDict([('position', 'str')])} + + with BytesIO() as fout: + with pytest.raises(OSError): + with fiona.open(fout, "w", driver=driver, schema=schema) as collection: + collection.write({"type": "Feature", "geometry": {"type": "Point", "coordinates": (0, 0)}, "properties": {"position": "x"}}) diff -Nru fiona-1.8.13/tests/test_non_counting_layer.py fiona-1.8.20/tests/test_non_counting_layer.py --- fiona-1.8.13/tests/test_non_counting_layer.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_non_counting_layer.py 2021-05-31 21:29:33.000000000 +0000 @@ -32,7 +32,6 @@ features = self.c[2:5] assert len(features) == 3 - def test_fail_slice_negative_index(self): - with pytest.warns(FionaDeprecationWarning): - with pytest.raises(IndexError): - self.c[2:-4] + def test_warn_slice_negative_index(self): + with pytest.warns((FionaDeprecationWarning, RuntimeWarning)): + self.c[2:-4] diff -Nru fiona-1.8.13/tests/test_open.py fiona-1.8.20/tests/test_open.py --- fiona-1.8.13/tests/test_open.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_open.py 2021-05-31 21:29:33.000000000 +0000 @@ -1,7 +1,13 @@ """Tests of file opening""" +import io import os + +import pytest + import fiona +from fiona._crs import crs_to_wkt +from fiona.errors import DriverError def test_open_shp(path_coutwildrnp_shp): @@ -13,3 +19,41 @@ path = os.path.relpath(os.path.join(data_dir, "!test.geojson")) assert os.path.exists(path), "Missing test data" assert fiona.open(path), "Failed to open !test.geojson" + + +@pytest.mark.xfail(raises=DriverError) +def test_write_memfile_crs_wkt(): + example_schema = { + "geometry": "Point", + "properties": [("title", "str")], + } + + example_features = [ + { + "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, + "properties": {"title": "One"}, + }, + { + "geometry": {"type": "Point", "coordinates": [1.0, 2.0]}, + "properties": {"title": "Two"}, + }, + { + "geometry": {"type": "Point", "coordinates": [3.0, 4.0]}, + "properties": {"title": "Three"}, + }, + ] + + with io.BytesIO() as fd: + with fiona.open( + fd, + "w", + driver="GPKG", + schema=example_schema, + crs_wkt=crs_to_wkt("EPSG:32611"), + ) as dst: + dst.writerecords(example_features) + + fd.seek(0) + with fiona.open(fd) as src: + assert src.driver == "GPKG" + assert src.crs == {"init": "epsg:32611"} diff -Nru fiona-1.8.13/tests/test_rfc3339.py fiona-1.8.20/tests/test_rfc3339.py --- fiona-1.8.13/tests/test_rfc3339.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_rfc3339.py 2021-05-31 21:29:33.000000000 +0000 @@ -12,7 +12,7 @@ class TestDateParse(object): def test_yyyymmdd(self): - assert parse_date("2012-01-29") == (2012, 1, 29, 0, 0, 0, 0.0) + assert parse_date("2012-01-29") == (2012, 1, 29, 0, 0, 0, 0.0, None) def test_error(self): with pytest.raises(ValueError): @@ -22,19 +22,22 @@ class TestTimeParse(object): def test_hhmmss(self): - assert parse_time("10:11:12") == (0, 0, 0, 10, 11, 12, 0.0) + assert parse_time("10:11:12") == (0, 0, 0, 10, 11, 12, 0.0, None) def test_hhmm(self): - assert parse_time("10:11") == (0, 0, 0, 10, 11, 0, 0.0) + assert parse_time("10:11") == (0, 0, 0, 10, 11, 0, 0.0, None) def test_hhmmssff(self): - assert parse_time("10:11:12.42") == (0, 0, 0, 10, 11, 12, 0.42*1000000) + assert parse_time("10:11:12.42") == (0, 0, 0, 10, 11, 12, 0.42*1000000, None) def test_hhmmssz(self): - assert parse_time("10:11:12Z") == (0, 0, 0, 10, 11, 12, 0.0) + assert parse_time("10:11:12Z") == (0, 0, 0, 10, 11, 12, 0.0, None) def test_hhmmssoff(self): - assert parse_time("10:11:12-01:00") == (0, 0, 0, 10, 11, 12, 0.0) + assert parse_time("10:11:12-01:30") == (0, 0, 0, 10, 11, 12, 0.0, -90) + + def test_hhmmssoff2(self): + assert parse_time("10:11:12+01:30") == (0, 0, 0, 10, 11, 12, 0.0, 90) def test_error(self): with pytest.raises(ValueError): @@ -46,7 +49,17 @@ def test_yyyymmdd(self): assert ( parse_datetime("2012-01-29T10:11:12") == - (2012, 1, 29, 10, 11, 12, 0.0)) + (2012, 1, 29, 10, 11, 12, 0.0, None)) + + def test_yyyymmddTZ(self): + assert ( + parse_datetime("2012-01-29T10:11:12+01:30") == + (2012, 1, 29, 10, 11, 12, 0.0, 90)) + + def test_yyyymmddTZ2(self): + assert ( + parse_datetime("2012-01-29T10:11:12-01:30") == + (2012, 1, 29, 10, 11, 12, 0.0, -90)) def test_error(self): with pytest.raises(ValueError): diff -Nru fiona-1.8.13/tests/test_schema.py fiona-1.8.20/tests/test_schema.py --- fiona-1.8.13/tests/test_schema.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_schema.py 2021-05-31 21:29:33.000000000 +0000 @@ -1,10 +1,14 @@ +from collections import OrderedDict + import fiona from fiona.errors import SchemaError, UnsupportedGeometryTypeError, \ DriverSupportError from fiona.schema import FIELD_TYPES, normalize_field_type import os import tempfile - +from .conftest import get_temp_filename +from fiona.drvsupport import driver_mode_mingdal +from fiona.env import GDALVersion import pytest from .conftest import requires_only_gdal1, requires_gdal2 @@ -223,3 +227,160 @@ 'geometry': 'LineString', 'properties': items}) as c: pass + + +@pytest.mark.parametrize('driver', ['GPKG', 'GeoJSON']) +def test_geometry_only_schema_write(tmpdir, driver): + schema = { + "geometry": "Polygon", + # No properties defined here. + } + + record = {'geometry': {'type': 'Polygon', 'coordinates': [[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]}} + + path = str(tmpdir.join(get_temp_filename(driver))) + + with fiona.open(path, + mode='w', + driver=driver, + schema=schema) as c: + c.write(record) + + with fiona.open(path, + mode='r', + driver=driver) as c: + data = [f for f in c] + assert len(data) == 1 + assert len(data[0].get('properties', {})) == 0 + assert data[0]['geometry'] == record['geometry'] + + +@pytest.mark.parametrize('driver', ['GPKG', 'GeoJSON']) +def test_geometry_only_schema_update(tmpdir, driver): + + # Guard unsupported drivers + if driver in driver_mode_mingdal['a'] and GDALVersion.runtime() < GDALVersion( + *driver_mode_mingdal['a'][driver][:2]): + return + + schema = { + "geometry": "Polygon", + # No properties defined here. + } + + record1 = { + 'geometry': {'type': 'Polygon', 'coordinates': [[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]}} + record2 = { + 'geometry': {'type': 'Polygon', 'coordinates': [[(0.0, 0.0), (2.0, 0.0), (2.0, 2.0), (2.0, 0.0), (0.0, 0.0)]]}} + + path = str(tmpdir.join(get_temp_filename(driver))) + + # Create file + with fiona.open(path, + mode='w', + driver=driver, + schema=schema) as c: + c.write(record1) + + # Append record + with fiona.open(path, + mode='a', + driver=driver) as c: + c.write(record2) + + with fiona.open(path, + mode='r', + driver=driver) as c: + data = [f for f in c] + assert len(data) == 2 + + for f in data: + assert len(f.get('properties', {})) == 0 + assert data[0]['geometry'] == record1['geometry'] + assert data[1]['geometry'] == record2['geometry'] + + +@pytest.mark.parametrize('driver', ['GPKG', 'GeoJSON']) +def test_property_only_schema_write(tmpdir, driver): + + schema = { + # No geometry defined here. + "properties": {'prop1': 'str'} + } + + record1 = {'properties': {'prop1': 'one'}} + + path = str(tmpdir.join(get_temp_filename(driver))) + + with fiona.open(path, + mode='w', + driver=driver, + schema=schema) as c: + c.write(record1) + + with fiona.open(path, + mode='r', + driver=driver) as c: + data = [f for f in c] + assert len(data) == 1 + assert len(data[0].get('properties', {})) == 1 + assert 'prop1' in data[0]['properties'] and data[0]['properties']['prop1'] == 'one' + for f in data: + assert 'geometry' not in f or f['geometry'] is None + + +@pytest.mark.parametrize('driver', ['GPKG', 'GeoJSON']) +def test_property_only_schema_update(tmpdir, driver): + + # Guard unsupported drivers + if driver in driver_mode_mingdal['a'] and GDALVersion.runtime() < GDALVersion( + *driver_mode_mingdal['a'][driver][:2]): + return + + schema = { + # No geometry defined here. + "properties": {'prop1': 'str'} + } + + record1 = {'properties': {'prop1': 'one'}} + record2 = {'properties': {'prop1': 'two'}} + + path = str(tmpdir.join(get_temp_filename(driver))) + + # Create file + with fiona.open(path, + mode='w', + driver=driver, + schema=schema) as c: + c.write(record1) + + # Append record + with fiona.open(path, + mode='a', + driver=driver) as c: + c.write(record2) + + with fiona.open(path, + mode='r', + driver=driver) as c: + data = [f for f in c] + assert len(data) == 2 + for f in data: + assert len(f.get('properties', {})) == 1 + assert 'geometry' not in f or f['geometry'] is None + assert 'prop1' in data[0]['properties'] and data[0]['properties']['prop1'] == 'one' + assert 'prop1' in data[1]['properties'] and data[1]['properties']['prop1'] == 'two' + + +def test_schema_default_fields_wrong_type(tmpdir): + """ Test for SchemaError if a default field is specified with a different type""" + + name = str(tmpdir.join('test.gpx')) + schema = {'properties': OrderedDict([('ele', 'str'), ('time', 'datetime')]), + 'geometry': 'Point'} + + with pytest.raises(SchemaError): + with fiona.open(name, 'w', + driver="GPX", + schema=schema) as c: + pass diff -Nru fiona-1.8.13/tests/test_slice.py fiona-1.8.20/tests/test_slice.py --- fiona-1.8.13/tests/test_slice.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_slice.py 2021-05-31 21:29:33.000000000 +0000 @@ -1,13 +1,18 @@ """Note well: collection slicing is deprecated! """ - -import logging -import sys - +import tempfile +import shutil +import os +from collections import OrderedDict import pytest - +from fiona.env import GDALVersion import fiona from fiona.errors import FionaDeprecationWarning +from .conftest import get_temp_filename +from fiona.drvsupport import supported_drivers, _driver_supports_mode + +gdal_version = GDALVersion.runtime() + def test_collection_get(path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp) as src: @@ -39,43 +44,108 @@ assert v['id'] == '5' -def test_collection_iterator_items_slice(path_coutwildrnp_shp): - - with fiona.open(path_coutwildrnp_shp) as src: - count = len(src) - - items = list(src.items(0, 5)) - assert len(items) == 5 - - items = list(src.items(1, 5)) - assert len(items) == 4 - - items = list(src.items(-5, None)) - assert len(items) == 5 - - items = list(src.items(-5, -1)) - assert len(items) == 4 - - items = list(src.items(0, None)) - assert len(items) == count - - items = list(src.items(5, None)) - assert len(items) == (count - 5) - - items = list(src.items(5, None, -1)) - assert len(items) == 6 - - items = list(src.items(5, None, -2)) - assert len(items) == 3 - - items = list(src.items(4, None, -2)) - assert len(items) == 3 - - items = list(src.items(-1, -5, -1)) - assert len(items) == 4 - - items = list(src.items(-5, None, -1)) - assert len(items) == (count - 5 + 1) +@pytest.fixture(scope="module", params=[driver for driver in supported_drivers if + _driver_supports_mode(driver, 'w') + and driver not in {'DGN', 'MapInfo File', 'GPSTrackMaker', 'GPX', 'BNA', 'DXF'}]) +def slice_dataset_path(request): + """ Create temporary datasets for test_collection_iterator_items_slice()""" + + driver = request.param + min_id = 0 + max_id = 9 + + def get_schema(driver): + special_schemas = {'CSV': {'geometry': None, 'properties': OrderedDict([('position', 'int')])}} + return special_schemas.get(driver, {'geometry': 'Point', 'properties': OrderedDict([('position', 'int')])}) + + def get_records(driver, range): + special_records1 = {'CSV': [{'geometry': None, 'properties': {'position': i}} for i in range], + 'PCIDSK': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i), 0.0)}, + 'properties': {'position': i}} for i in range] + } + return special_records1.get(driver, [ + {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {'position': i}} for i in + range]) + + schema = get_schema(driver) + records = get_records(driver, range(min_id, max_id + 1)) + + create_kwargs = {} + if driver == 'FlatGeobuf': + create_kwargs['SPATIAL_INDEX'] = False + + tmpdir = tempfile.mkdtemp() + path = os.path.join(tmpdir, get_temp_filename(driver)) + + with fiona.open(path, 'w', + driver=driver, + schema=schema, + **create_kwargs) as c: + c.writerecords(records) + yield path + shutil.rmtree(tmpdir) + + +@pytest.mark.parametrize("args", [(0, 5, None), + (1, 5, None), + (-5, None, None), + (-5, -1, None), + (0, None, None), + (5, None, None), + (8, None, None), + (9, None, None), + (10, None, None), + (0, 5, 2), + (0, 5, 2), + (1, 5, 2), + (-5, None, 2), + (-5, -1, 2), + (0, None, 2), + (0, 8, 2), + (0, 9, 2), + (0, 10, 2), + (1, 8, 2), + (1, 9, 2), + (1, 10, 2), + (1, None, 2), + (5, None, 2), + (5, None, -1), + (5, None, -2), + (5, None, None), + (4, None, -2), + (-1, -5, -1), + (-5, None, -1), + (0, 5, 1), + (5, 15, 1), + (15, 30, 1), + (5, 0, -1), + (15, 5, -1), + (30, 15, -1), + (0, 5, 2), + (5, 15, 2), + (15, 30, 2), + (5, 0, -2), + (15, 5, -2), + (30, 15, -2) + ]) +@pytest.mark.filterwarnings('ignore:.*OLC_FASTFEATURECOUNT*') +@pytest.mark.filterwarnings('ignore:.*OLCFastSetNextByIndex*') +def test_collection_iterator_items_slice(slice_dataset_path, args): + """ Test if c.items(start, stop, step) returns the correct features. + """ + + start, stop, step = args + min_id = 0 + max_id = 9 + + positions = list(range(min_id, max_id + 1))[start:stop:step] + + with fiona.open(slice_dataset_path, 'r') as c: + items = list(c.items(start, stop, step)) + assert len(items) == len(positions) + record_positions = [int(item[1]['properties']['position']) for item in items] + for expected_position, record_position in zip(positions, record_positions): + assert expected_position == record_position def test_collection_iterator_keys_next(path_coutwildrnp_shp): diff -Nru fiona-1.8.13/tests/test_transactions.py fiona-1.8.20/tests/test_transactions.py --- fiona-1.8.13/tests/test_transactions.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_transactions.py 2021-05-31 21:29:33.000000000 +0000 @@ -5,9 +5,11 @@ from random import uniform, randint from collections import defaultdict import pytest +from tests.conftest import requires_gdal2 has_gpkg = "GPKG" in fiona.supported_drivers.keys() + def create_records(count): for n in range(count): record = { @@ -16,6 +18,7 @@ } yield record + class DebugHandler(logging.Handler): def __init__(self, pattern): logging.Handler.__init__(self) @@ -30,6 +33,7 @@ log = logging.getLogger() +@requires_gdal2 @pytest.mark.skipif(not has_gpkg, reason="Requires geopackage driver") class TestTransaction: def setup_method(self): diff -Nru fiona-1.8.13/tests/test_transform.py fiona-1.8.20/tests/test_transform.py --- fiona-1.8.13/tests/test_transform.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_transform.py 2021-05-31 21:29:33.000000000 +0000 @@ -1,9 +1,7 @@ """Tests of the transform submodule""" import math - import pytest - from fiona import transform @@ -48,3 +46,39 @@ def test_transform_geom_with_z(geom): """Transforming a geom with Z succeeds""" g2 = transform.transform_geom("epsg:4326", "epsg:3857", geom, precision=3) + + +@pytest.mark.parametrize("crs", ["epsg:4326", + "EPSG:4326", + "WGS84", + {'init': 'epsg:4326'}, + {'proj': 'longlat', 'datum': 'WGS84', 'no_defs': True}, + "OGC:CRS84"]) +def test_axis_ordering(crs): + """ Test if transform uses traditional_axis_mapping """ + + expected = (-8427998.647958742, 4587905.27136252) + t1 = transform.transform(crs, "epsg:3857", [-75.71], [38.06]) + assert (t1[0][0], t1[1][0]) == pytest.approx(expected) + geom = {"type": "Point", "coordinates": [-75.71, 38.06]} + g1 = transform.transform_geom(crs, "epsg:3857", geom, precision=3) + assert g1["coordinates"] == pytest.approx(expected) + + rev_expected = (-75.71, 38.06) + t2 = transform.transform("epsg:3857", crs, [-8427998.647958742], [4587905.27136252]) + assert (t2[0][0], t2[1][0]) == pytest.approx(rev_expected) + geom = {"type": "Point", "coordinates": [-8427998.647958742, 4587905.27136252]} + g2 = transform.transform_geom("epsg:3857", crs, geom, precision=3) + assert g2["coordinates"] == pytest.approx(rev_expected) + + +def test_transform_issue971(): + """ See https://github.com/Toblerity/Fiona/issues/971 """ + source_crs = "epsg:25832" + dest_src = "epsg:4326" + geom = {'type': 'GeometryCollection', 'geometries': [{'type': 'LineString', + 'coordinates': [(512381.8870945257, 5866313.311218272), + (512371.23869999964, 5866322.282500001), + (512364.6014999999, 5866328.260199999)]}]} + geom_transformed = transform.transform_geom(source_crs, dest_src, geom, precision=3) + assert geom_transformed['geometries'][0]['coordinates'][0] == pytest.approx((9.184, 52.946)) diff -Nru fiona-1.8.13/tests/test_version.py fiona-1.8.20/tests/test_version.py --- fiona-1.8.13/tests/test_version.py 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/tests/test_version.py 2021-05-31 21:29:33.000000000 +0000 @@ -1,5 +1,11 @@ import fiona -from fiona.ogrext import GDALVersion +import platform +import re +import os +import sys +from tests.conftest import travis_only +from fiona._env import GDALVersion, get_gdal_release_name + def test_version_tuple(): version = fiona.gdal_version @@ -7,15 +13,60 @@ assert version.minor >= 0 and isinstance(version.minor, int) assert version.revision >= 0 and isinstance(version.revision, int) + def test_version_comparison(): # version against version assert GDALVersion(4, 0, 0) > GDALVersion(3, 2, 1) assert GDALVersion(2, 0, 0) < GDALVersion(3, 2, 1) assert GDALVersion(3, 2, 2) > GDALVersion(3, 2, 1) assert GDALVersion(3, 2, 0) < GDALVersion(3, 2, 1) - + # tuple against version assert (4, 0, 0) > GDALVersion(3, 2, 1) assert (2, 0, 0) < GDALVersion(3, 2, 1) assert (3, 2, 2) > GDALVersion(3, 2, 1) assert (3, 2, 0) < GDALVersion(3, 2, 1) + + +@travis_only +def test_show_versions(capsys): + version_pattern = re.compile(r"(\d+).(\d+).(\d+)") + + os_info = "{system} {release}".format(system=platform.system(), + release=platform.release()) + python_version = platform.python_version() + python_exec = sys.executable + + msg = ("Fiona version: {fiona_version}" + "\nGDAL version: {gdal_release_name}" + "\nPROJ version: {proj_version}" + "\n" + "\nOS: {os_info}" + "\nPython: {python_version}" + "\nPython executable: '{python_exec}'" + "\n" + ) + + if fiona.gdal_version < GDALVersion(3, 0, 1): + proj_version = "Proj version not available" + else: + proj_version = os.getenv("PROJVERSION") + proj_version = re.match(version_pattern, proj_version).group(0) + + gdal_version = os.getenv("GDALVERSION") + if not gdal_version == "master": + gdal_version = re.match(version_pattern, gdal_version).group(0) + else: + gdal_version = get_gdal_release_name() + + msg_formatted = msg.format(fiona_version=fiona.__version__, + gdal_release_name=gdal_version, + proj_version=proj_version, + os_info=os_info, + python_version=python_version, + python_exec=python_exec) + + fiona.show_versions() + captured = capsys.readouterr() + + assert captured.out.strip() == msg_formatted.strip() diff -Nru fiona-1.8.13/tests/test_write.py fiona-1.8.20/tests/test_write.py --- fiona-1.8.13/tests/test_write.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.8.20/tests/test_write.py 2021-05-31 21:29:33.000000000 +0000 @@ -0,0 +1,26 @@ +"""New tests of writing feature collections.""" + +import fiona +from fiona.crs import from_epsg + + +def test_issue771(tmpdir, caplog): + """Overwrite a GeoJSON file without logging errors.""" + schema = {"geometry": "Point", "properties": {"zero": "int"}} + + feature = { + "geometry": {"type": "Point", "coordinates": (0, 0)}, + "properties": {"zero": "0"}, + } + + outputfile = tmpdir.join("test.geojson") + + for i in range(2): + with fiona.open( + str(outputfile), "w", driver="GeoJSON", schema=schema, crs=from_epsg(4326) + ) as collection: + collection.write(feature) + assert outputfile.exists() + + for record in caplog.records: + assert record.levelname != "ERROR" diff -Nru fiona-1.8.13/.travis.yml fiona-1.8.20/.travis.yml --- fiona-1.8.13/.travis.yml 2019-12-05 14:21:39.000000000 +0000 +++ fiona-1.8.20/.travis.yml 1970-01-01 00:00:00.000000000 +0000 @@ -1,73 +0,0 @@ -dist: trusty - -language: python - -python: - - "2.7" - - "3.6" - -cache: - directories: - - $GDALINST - - ~/.cache/pip - -env: - global: - - PIP_WHEEL_DIR=$HOME/.cache/pip/wheels - - PIP_FIND_LINKS=file://$HOME/.cache/pip/wheels - - GDALINST=$HOME/gdalinstall - - GDALBUILD=$HOME/gdalbuild - - PROJINST=$HOME/gdalinstall - - PROJBUILD=$HOME/projbuild - matrix: - - GDALVERSION="1.11.5" PROJVERSION="4.8.0" - - GDALVERSION="2.0.3" PROJVERSION="4.9.3" - - GDALVERSION="2.1.4" PROJVERSION="4.9.3" - - GDALVERSION="2.2.4" PROJVERSION="4.9.3" - - GDALVERSION="2.3.3" PROJVERSION="4.9.3" - - GDALVERSION="2.4.2" PROJVERSION="4.9.3" - - GDALVERSION="3.0.1" PROJVERSION="6.1.1" - - GDALVERSION="master" PROJVERSION="6.1.1" - -matrix: - allow_failures: - - env: GDALVERSION="master" PROJVERSION="6.1.1" - -addons: - apt: - packages: - - libgdal-dev - - libatlas-dev - - libatlas-base-dev - - gfortran - -before_install: - - pip install -U pip - - pip install wheel coveralls>=1.1 --upgrade - - pip install setuptools==36.0.1 - - pip install wheel - - . ./scripts/travis_proj_install.sh - - . ./scripts/travis_gdal_install.sh - - export PATH=$GDALINST/gdal-$GDALVERSION/bin:$PATH - - export LD_LIBRARY_PATH=$GDALINST/gdal-$GDALVERSION/lib:$LD_LIBRARY_PATH - - export GDAL_DATA=$GDALINST/gdal-$GDALVERSION/share/gdal - - export PROJ_LIB=$GDALINST/gdal-$GDALVERSION/share/proj - - gdal-config --version - -install: - - if [ "$GDALVERSION" = "master" ]; then echo "Using gdal master"; elif [ $(gdal-config --version) == "$GDALVERSION" ]; then echo "Using gdal $GDALVERSION"; else echo "NOT using gdal $GDALVERSION as expected; aborting"; exit 1; fi - - "python -m pip wheel -r requirements-dev.txt" - - "python -m pip install -r requirements-dev.txt" - - "GDAL_CONFIG=$GDALINST/gdal-$GDALVERSION/bin/gdal-config python -m pip install --upgrade --force-reinstall --no-use-pep517 -e .[test]" - - fio --version - - gdal-config --version - - fio --gdal-version - -script: - - python -m pytest -m "not wheel" --cov fiona --cov-report term-missing - -after_success: - - coveralls || echo "!! intermittent coveralls failure" - -before_cache: - - if [ "$GDALVERSION" = "trunk" ]; then rm -rf $GDALINST/gdal-$GDALVERSION; fi