diff -Nru fiona-1.8.22/.github/workflows/ci_linux.yml fiona-1.9.5/.github/workflows/ci_linux.yml --- fiona-1.8.22/.github/workflows/ci_linux.yml 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/.github/workflows/ci_linux.yml 1970-01-01 00:00:00.000000000 +0000 @@ -1,222 +0,0 @@ -name: Linux CI - -on: [push, pull_request] - -jobs: - build: - name: Python ${{ matrix.python }} / GDAL ${{ matrix.GDALVERSION }} / PROJ ${{ matrix.PROJVERSION }} - runs-on: [ubuntu-18.04] - if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')" - strategy: - fail-fast: false - matrix: - include: - # Test all supported gdal minor versions (except latest stable) with one python version - - { - python: 3.6, - GDALVERSION: "2.0.3", - PROJVERSION: "4.9.3", - allow_failure: "false", - } - - { - python: 3.6, - GDALVERSION: "2.1.4", - PROJVERSION: "4.9.3", - allow_failure: "false", - } - - { - python: 3.6, - GDALVERSION: "2.2.4", - PROJVERSION: "4.9.3", - allow_failure: "false", - } - - { - python: 3.6, - GDALVERSION: "2.3.3", - PROJVERSION: "4.9.3", - allow_failure: "false", - } - - { - python: 3.6, - GDALVERSION: "2.4.4", - PROJVERSION: "4.9.3", - allow_failure: "false", - } - - { - python: 3.6, - GDALVERSION: "3.0.4", - PROJVERSION: "6.2.1", - allow_failure: "false", - } - - { - python: 3.6, - GDALVERSION: "3.1.0", - PROJVERSION: "6.3.2", - allow_failure: "false", - } - - # Test all supported python versions with latest stable gdal release - - { - python: 3.6, - GDALVERSION: "3.2.1", - PROJVERSION: "7.2.1", - allow_failure: "false", - } - - { - python: 3.7, - GDALVERSION: "3.2.1", - PROJVERSION: "7.2.1", - allow_failure: "false", - } - - { - python: 3.8, - GDALVERSION: "3.2.1", - PROJVERSION: "7.2.1", - allow_failure: "false", - } - - { - python: 3.9, - GDALVERSION: "3.2.1", - PROJVERSION: "7.2.1", - allow_failure: "false", - } - - { - python: "3.10", - GDALVERSION: "3.3.3", - PROJVERSION: "8.2.0", - allow_failure: "false", - } - - # Test GDAL master - - { - python: 3.6, - GDALVERSION: "master", - PROJVERSION: "7.2.1", - allow_failure: "true", - } - - env: - CYTHON_COVERAGE: "true" - MAKEFLAGS: "-j 4 -s" - CXXFLAGS: "-O0" - CFLAGS: "-O0" - PROJVERSION: ${{ matrix.PROJVERSION }} - GDALVERSION: ${{ matrix.GDALVERSION }} - GDALINST: ${{ github.workspace }}/gdalinstall - GDALBUILD: ${{ github.workspace }}/gdalbuild - PROJINST: ${{ github.workspace }}/gdalinstall - PROJBUILD: ${{ github.workspace }}/projbuild - FILEGDB: ${{ github.workspace }}/gdalinstall/filegdb - - # Emulate travis - TRAVIS_BUILD_DIR: ${{ github.workspace }} - TRAVIS_OS_NAME: "linux" - - steps: - - uses: actions/checkout@v2 - - - name: Set up Python ${{ matrix.python }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python }} - - - name: Set env variables - run: | - - # Additional env variables - echo "GDAL_DATA=$GDALINST/gdal-$GDALVERSION/share/gdal" >> $GITHUB_ENV - echo "PROJ_LIB=$GDALINST/gdal-$GDALVERSION/share/proj" >> $GITHUB_ENV - echo "LD_LIBRARY_PATH=$GDALINST/gdal-$GDALVERSION/lib:$GDALINST/proj-$PROJVERSION/lib:$FILEGDB/lib:\$LD_LIBRARY_PATH" >> $GITHUB_ENV - - # Add PATH - echo "$GDALINST/gdal-$GDALVERSION/bin" >> $GITHUB_PATH - echo "$GDALINST/proj-$PROJVERSION/bin" >> $GITHUB_PATH - - echo "cat \$GITHUB_ENV" - cat $GITHUB_ENV - echo "" - echo "cat \$GITHUB_PATH" - cat $GITHUB_PATH - - - name: Install apt packages - run: | - sudo apt-get install libatlas-base-dev libcurl4-openssl-dev libgeos-dev libfreexl-dev libzstd-dev libspatialite-dev - - # Unlike travis, packages from non default repositories are installed. - # While default repositories e.g. bionic/universe or bionic/main) tend to keep packages at the same API / ABI level, - # this is not guaranteed with other repositories. - # The following command creates a list of these packages, which is used as key for the GDAL cache - # The repositories of packages can be identified in the the output of `sudo apt-get install` - apt list --installed | grep 'libgeos-dev\|libxml2-dev' > $GITHUB_WORKSPACE/apt_list - cat $GITHUB_WORKSPACE/apt_list - - - name: Cache GDAL binaries - uses: actions/cache@v2 - with: - path: gdalinstall - key: ${{ runner.os }}-gdal-${{ matrix.GDALVERSION }}-proj-${{ matrix.PROJVERSION }}-${{ hashFiles('**/apt_list') }} - - - name: Cache pip - uses: actions/cache@v2 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - - name: Install Python dependencies - run: | - python -m pip install -U pip - python -m pip install -U wheel - python -m pip install -r requirements-ci.txt - python -m pip wheel -r requirements-dev.txt - python -m pip install -r requirements-dev.txt - - - name: Build PROJ - run: | - chmod +x scripts/travis_proj_install.sh && ./scripts/travis_proj_install.sh - - - name: Install FileGDB - run: | - chmod +x scripts/travis_filegdb_install.sh && ./scripts/travis_filegdb_install.sh - - - name: Build GDAL - continue-on-error: ${{ matrix.allow_failure == 'true' }} - run: | - chmod +x scripts/travis_gdal_install.sh && ./scripts/travis_gdal_install.sh - gdal-config --version - - - name: Build Fiona - continue-on-error: ${{ matrix.allow_failure == 'true' }} - run: | - if [ "$GDALVERSION" = "master" ]; then echo "Using gdal master"; elif [ $($GDALINST/gdal-$GDALVERSION/bin/gdal-config --version) == $(sed 's/[a-zA-Z].*//g' <<< $GDALVERSION) ]; then echo "Using gdal $GDALVERSION"; else echo "NOT using gdal $GDALVERSION as expected; aborting"; exit 1; fi - GDAL_CONFIG=$GDALINST/gdal-$GDALVERSION/bin/gdal-config python -m pip install --no-deps --force-reinstall --no-use-pep517 -e . - - - name: Print Environment - continue-on-error: ${{ matrix.allow_failure == 'true' }} - run: | - echo "python -m pip freeze" - python -m pip freeze - - echo "" - echo "fio --version" - fio --version - - echo "" - echo "fio --gdal-version" - fio --gdal-version - - echo "" - echo "python -c \"import fiona; fiona.show_versions()\"" - python -c "import fiona; fiona.show_versions()" - - - name: pytest - continue-on-error: ${{ matrix.allow_failure == 'true' }} - run: | - GDAL_ENABLE_DEPRECATED_DRIVER_GTM=YES python -m pytest -m "not wheel" --cov fiona --cov-report term-missing - - - name: Coveralls - continue-on-error: ${{ matrix.allow_failure == 'true' }} - run: coveralls || echo "!! intermittent coveralls failure" - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff -Nru fiona-1.8.22/.github/workflows/rstcheck.yml fiona-1.9.5/.github/workflows/rstcheck.yml --- fiona-1.8.22/.github/workflows/rstcheck.yml 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/.github/workflows/rstcheck.yml 2023-10-11 23:19:44.000000000 +0000 @@ -1,7 +1,17 @@ name: rstcheck -# Run this workflow every time a new commit pushed to your repository -on: [push, pull_request] +# Run this workflow for commits to doc files +on: + push: + paths: + - ".github/workflows/rstcheck.yml" + - "README.rst" + - "docs/**" + pull_request: + paths: + - ".github/workflows/rstcheck.yml" + - "README.rst" + - "docs/**" jobs: rstcheck: @@ -10,7 +20,7 @@ steps: - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v2 @@ -19,7 +29,7 @@ - name: Install Python dependencies run: | - python -m pip install sphinx==3.2.1 rstcheck==3.3.1 + python -m pip install rstcheck[sphinx]==4.1.0 - name: Run rstcheck run: | diff -Nru fiona-1.8.22/.github/workflows/test_gdal_latest.yml fiona-1.9.5/.github/workflows/test_gdal_latest.yml --- fiona-1.8.22/.github/workflows/test_gdal_latest.yml 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/.github/workflows/test_gdal_latest.yml 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,84 @@ +name: Test GDAL Latest + +on: + push: + branches: [ main, 'maint-*' ] + schedule: + - cron: '0 0 * * 0' + pull_request: # also build on PRs touching this file + paths: + - ".github/workflows/test_gdal_latest.yaml" + - "ci/gdal-compile.sh" + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} + cancel-in-progress: true + +jobs: + test_gdal_latest: + name: GDAL Latest + runs-on: ubuntu-latest + container: osgeo/proj:9.1.0 + env: + GDAL_DIR: ${{ github.workspace }}/gdal_install + GDAL_DATA: ${{ github.workspace }}/gdal_install/share/gdal + LD_LIBRARY_PATH: "${{ github.workspace }}/gdal_install/lib/:${LD_LIBRARY_PATH}" + steps: + - uses: actions/checkout@v4 + - name: Update + run: | + apt-get update + apt-get -y install software-properties-common + add-apt-repository -y ppa:deadsnakes/ppa + apt-get update + + - name: Set up Python + run: | + apt-get install -y --no-install-recommends \ + python3.10 \ + python3.10-dev \ + python3.10-venv \ + python3-pip \ + g++ + + - name: Install GDAL + shell: bash + run: | + apt-get update + apt-get install -qq \ + libcurl4-gnutls-dev \ + libgeos-dev \ + libjpeg-dev \ + libnetcdf-dev \ + libhdf4-alt-dev \ + libhdf5-serial-dev \ + libssl-dev \ + libsqlite3-dev \ + libexpat-dev \ + libxerces-c-dev \ + libpng-dev \ + libopenjp2-7-dev \ + libzstd-dev \ + libwebp-dev \ + cmake \ + curl \ + git + bash ci/gdal-compile.sh git + + - name: Install dependencies + run: | + export PATH="${GDAL_DIR}/bin/:${PATH}" + python3.10 -m venv testenv + . testenv/bin/activate + python -m pip install --upgrade pip + python -m pip wheel -r requirements-dev.txt + python -m pip install -r requirements-dev.txt + python setup.py clean + python -m pip install --no-deps --force-reinstall -e .[test] + + - name: Test + shell: bash + run: | + export PATH="${GDAL_DIR}/bin/:${PATH}" + . testenv/bin/activate + python -m pytest -v -m "not wheel or gdal" -rxXs --cov fiona --cov-report term-missing diff -Nru fiona-1.8.22/.github/workflows/tests.yml fiona-1.9.5/.github/workflows/tests.yml --- fiona-1.8.22/.github/workflows/tests.yml 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/.github/workflows/tests.yml 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,139 @@ +name: Tests + +on: + push: + branches: [ master, 'maint-*' ] + paths: + - '.github/workflows/tests.yaml' + - 'requirements*.txt' + - 'setup.py' + - 'setup.cfg' + - 'MANIFEST.in' + - 'pyproject.toml' + - 'scripts/**' + - 'fiona/**' + - 'tests/**' + pull_request: + branches: [ master, 'maint-*' ] + paths: + - '.github/workflows/tests.yaml' + - 'requirements*.txt' + - 'setup.py' + - 'setup.cfg' + - 'MANIFEST.in' + - 'pyproject.toml' + - 'scripts/**' + - 'fiona/**' + - 'tests/**' + schedule: + - cron: '0 0 * * 0' + +jobs: + docker_tests: + runs-on: ubuntu-latest + name: Docker | GDAL=${{ matrix.gdal-version }} | python=${{ matrix.python-version }} + container: ghcr.io/osgeo/gdal:ubuntu-small-${{ matrix.gdal-version }} + env: + DEBIAN_FRONTEND: noninteractive + strategy: + fail-fast: false + matrix: + include: + - python-version: '3.7' + gdal-version: '3.4.3' + - python-version: '3.8' + gdal-version: '3.4.3' + - python-version: '3.9' + gdal-version: '3.5.3' + - python-version: '3.10' + gdal-version: '3.6.4' + - python-version: '3.11' + gdal-version: '3.7.1' + - python-version: '3.12' + gdal-version: '3.7.1' + + steps: + - uses: actions/checkout@v4 + + - name: Update + run: | + apt-get update + apt-get -y install software-properties-common + add-apt-repository -y ppa:deadsnakes/ppa + apt-get update + + - name: Set up Python ${{ matrix.python-version }} + run: | + apt-get install -y --no-install-recommends \ + python${{ matrix.python-version }} \ + python${{ matrix.python-version }}-dev \ + python${{ matrix.python-version }}-venv \ + python3-pip \ + g++ + + - name: Install dependencies + run: | + python${{ matrix.python-version }} -m venv testenv + . testenv/bin/activate + python -m pip install --upgrade pip + python -m pip wheel -r requirements-dev.txt + python -m pip install -r requirements-dev.txt + python setup.py clean + python -m pip install --no-deps --force-reinstall -e .[test] + + - name: run tests + run: | + . testenv/bin/activate + python -m pytest -v -m "not wheel or gdal" -rxXs --cov fiona --cov-report term-missing + + conda_test: + name: Conda | ${{ matrix.os }} | python=${{ matrix.python-version }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: true + matrix: + os: [macos-latest] + python-version: ['3.9', '3.10'] + include: + - os: ubuntu-latest + python-version: '3.10' + steps: + - uses: actions/checkout@v4 + + - name: Conda Setup + uses: s-weigand/setup-conda@v1 + with: + conda-channels: conda-forge + + - name: Install Env + shell: bash + run: | + conda config --prepend channels conda-forge + conda config --set channel_priority strict + conda create -n test python=${{ matrix.python-version }} libgdal geos=3.10.3 cython=0.29 numpy + source activate test + python -m pip install -e . || python -m pip install -e . + python -m pip install -r requirements-dev.txt + + - name: Check and Log Environment + shell: bash + run: | + source activate test + python -V + conda info + + - name: Test with Coverage (Ubuntu) + if: matrix.os == 'ubuntu-latest' + shell: bash + run: | + source activate test + python -m pytest -v -m "not wheel" -rxXs --cov fiona --cov-report term-missing + + - name: Test with Coverage (OSX) + if: matrix.os == 'macos-latest' + shell: bash + run: | + source activate test + python -m pytest -v -m "not wheel" -rxXs --cov fiona --cov-report term-missing + + - uses: codecov/codecov-action@v3 diff -Nru fiona-1.8.22/.gitignore fiona-1.9.5/.gitignore --- fiona-1.8.22/.gitignore 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/.gitignore 2023-10-11 23:19:44.000000000 +0000 @@ -64,28 +64,13 @@ # fiona VERSION.txt -fiona/_shim.c -fiona/ogrext.c -fiona/_crs.c -fiona/_drivers.c -fiona/_err.c -fiona/_geometry.c -fiona/_transform.cpp +fiona/*.c +fiona/*.cpp fiona/ograpi.pxd -fiona/_shim1.c -fiona/_shim2.c -fiona/_shim22.c -fiona/_shim.pxd -fiona/_shim.pyx tests/data/coutwildrnp.json tests/data/coutwildrnp.tar -tests/data/coutwildrnp.zip tests/data/coutwildrnp.gpkg .DS_Store .ipynb_checkpoints .pytest_cache MANIFEST -fiona/_env.c -fiona/ogrext1.c -fiona/ogrext2.c -fiona/schema.c diff -Nru fiona-1.8.22/.mailmap fiona-1.9.5/.mailmap --- fiona-1.8.22/.mailmap 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/.mailmap 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,18 @@ +Alan D. Snow +Hannes Gräuler +Hannes Gräuler +Hannes Gräuler +Kevin Wurster +Kevin Wurster +Kevin Wurster +Matthew Perry +Micah Cochran +Michael Weisman +Patrick Young +Patrick Young +René Buffat +René Buffat +Sean Gillies +Sean Gillies +Sean Gillies +Sean Gillies diff -Nru fiona-1.8.22/.readthedocs.yaml fiona-1.9.5/.readthedocs.yaml --- fiona-1.8.22/.readthedocs.yaml 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/.readthedocs.yaml 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,9 @@ +version: 2 + +build: + os: "ubuntu-22.04" + tools: + python: "mambaforge-22.9" + +conda: + environment: environment.yml diff -Nru fiona-1.8.22/CHANGES.txt fiona-1.9.5/CHANGES.txt --- fiona-1.8.22/CHANGES.txt 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/CHANGES.txt 2023-10-11 23:19:44.000000000 +0000 @@ -3,12 +3,201 @@ All issue numbers are relative to https://github.com/Toblerity/Fiona/issues. -1.8.22 (2022-10-14) -------------------- +1.9.5 (2023-10-11) +------------------ + +Bug fixes: + +- Expand keys in schema mismatch exception, resolving #1278. +- Preserve the null properties and geometry of a Feature when serializing + (#1276). + +Packaging: + +* Builds now require Cython >= 3.0.2 (#1276). +* PyPI wheels include GDAL 3.6.4, PROJ 9.0.1, and GEOS 3.11.2. +* PyPI wheels include curl 8.4.0, addressing CVE-2023-38545 and CVE-38546. +* PyPI wheels are now available for Python 3.12. + +1.9.4.post1 (2023-05-23) +------------------------ + +Extraneous files were unintentionally packaged in the 1.9.4 wheels. This post1 +release excludes them so that wheel contents are as in version 1.9.3. + +1.9.4 (2023-05-16) +------------------ + +- The performance of Feature.from_dict() has been improved (#1267). +- Several sources of meaningless log messages from fiona._geometry about NULL + geometries are avoided (#1264). +- The Parquet driver has been added to the list of supported drivers and will + be available if your system's GDAL library links libarrow. Note that fiona + wheels on PyPI do not include libarrow as it is rather large. +- Ensure that fiona._vendor modules are found and included. +- Bytes type feature properties are now hex encoded when serializing to GeoJSON + (#1263). +- Docstrings for listdir and listlayers have been clarified and harmonized. +- Nose style test cases have been converted to unittest.TestCase (#1256). +- The munch package used by fio-filter and fio-calc is now vendored and patched + to remove usage of the deprecated pkg_resources module (#1255). + +1.9.3 (2023-04-10) +------------------ + +- Rasterio CRS objects are compatible with the Collection constructor and are + now accepted (#1248). +- Enable append mode for fio-load (#1237). +- Reading a GeoJSON with an empty array property can result in a segmentation + fault since version 1.9.0. This has been fixed (#1228). + +1.9.2 (2023-03-20) +------------------ + +- Get command entry points using importlib.metadata (#1220). +- Instead of warning, transform_geom() raises an exception when some points + can't be reprojected unless the caller opts in to partial reprojection. This + restores the behavior of version 1.8.22. +- Add support for open options to all CLI commands that call fiona.open + (#1215). +- Fix a memory leak that can occur when iterating over a dataset using strides + (#1205). +- ZipMemoryFile now supports zipped GDB data (#1203). + +1.9.1 (2023-02-09) +------------------ + +- Log a warning message when identically named fields are encountered (#1201). +- Avoid dependence on listdir order in tests (#1193). +- Prevent empty geometries arrays from appearing in __geo_interface__ (#1197). +- setuptools added to pyproject.toml. Its pkg_resources module is used by the + CLI (#1191). + +1.9.0 (2023-01-30) +------------------ + +- CITATION.txt has been replaced by a new CITATION.cff file and the credits + have been updated. +- In setup.py the distutils (deprecated) logger is no longer used. + +1.9b2 (2023-01-22) +------------------ + +- Add Feature.__geo_interface__ property (#1181). +- Invalid creation options are filtered and ignored (#1180). +- The readme doc has been shortened and freshened up, with a modern example for + version 1.9.0 (#1174). +- The Geometry class now provides and looks for __geo_interface__ (#1174). +- The top level fiona module now exports Feature, Geometry, and Properties + (#1174). +- Functions that take Feature or Geometry objects will continue to take dicts + or objects that provide __geo_interface__ (#1177). This reverses the + deprecation introduced in 1.9a2. +- Python ignores SIGPIPE by default. By never catching BrokenPipeError via + `except Exception` when, for example, piping the output of rio-shapes to + the Unix head program, we avoid getting an unhandled BrokenPipeError message + when the interpreter shuts down (#2689). + +1.9b1 (2022-12-13) +------------------ + +New features: + +* Add listdir and listlayers method to io.MemoryFile (resolving #754). +* Add support for TIN and triangle geometries (#1163). +* Add an allow_unsupported_drivers option to fiona.open() (#1126). +* Added support for the OGR StringList field type (#1141). + +Changes and bug fixes: + +* Missing and unused imports have been added or removed. +* Make sure that errors aren't lost when a collection can't be saved properly + (#1169). +* Ensure that ZipMemoryFile have the proper GDAL name after creation so that we + can use listdir() (#1092). +* The fiona._loading module, which supports DLL loading on Windows, + has been moved into __init__.py and is no longer used anywhere else (#1168). +* Move project metadata to pyproject.toml (#1165). +* Update drvsupport.py to reflect new format capabilities in GDAL 3.6.0 + (#1122). +* Remove debug logging from env and _env modules. + +1.9a3 (2022-10-17) +------------------ + +Packaging: + +* Builds now require Cython >= 0.29.29 because of +* https://github.com/cython/cython/issues/4609 (see #1143). +* PyPI wheels now include GDAL 3.5.2, PROJ 9.0.1, and GEOS 3.11.0. +* PyPI wheels are now available for Python 3.11. + +1.9a2 (2022-06-10) +------------------ + +Deprecations: + +- Fiona's API methods will accept feature and geometry dicts in 1.9.0, but this + usage is deprecated. Instances of Feature and Geometry will be required in + 2.0. +- The precision keyword argument of fiona.transform.transform_geom is + deprecated and will be removed in version 2.0. +- Deprecated usage has been eliminated in the project. Fiona's tests pass when + run with a -Werror::DeprecationWarning filter. + +Changes: + +- Fiona's FionaDeprecationWarning now sub-classes DeprecationWarning. +- Some test modules have beeen re-formatted using black. -Builds now require Cython >= 0.29.29 because of -https://github.com/cython/cython/issues/4609 (#1143). +New features: + +- Fiona Collections now carry a context exit stack into which we can push fiona + Envs and MemoryFiles (#1059). +- Fiona has a new CRS class, like rasterio's, which is compatible with the CRS + dicts of previous versions (#714). + +1.9a1 (2022-05-19) +------------------ + +Deprecations: + +- The fiona.drivers() function has been deprecated and will be removed in + version 2.0. It should be replaced by fiona.Env(). +- The new fiona.meta module will be renamed to fiona.drivers in version 2.0. + +Packaging: + +- Source distributions contain no C source files and require Cython to create + them from .pyx files (#1096). + +Changes: + +- Shims for various versions of GDAL have been removed and are replaced by + Cython compilation conditions (#1093). +- Use of CURL_CA_BUNDLE environment variable is replaced by a more specific + GDAL/PROJ_CURL_CA_BUNDLE (#1095). +- Fiona's feature accessors now return instances of fiona.model.Feature instead + of Python dicts (#787). The Feature class is compatible with code that + expects GeoJSON-like dicts but also provides id, geometry, and properties + attributes. The last two of these are instances of fiona.model.Geometry and + fiona.model.Properties. +- GDAL 3.1.0 is the minimum GDAL version. +- Drop Python 2, and establish Python 3.7 as the minimum version (#1079). +- Remove six and reduce footprint of fiona.compat (#985). + +New features: + +- The appropriate format driver can be detected from filename in write mode (#948). +- Driver metadata including dataset open and dataset and layer creations + options are now exposed through methods of the fiona.meta module (#950). +- CRS WKT format support (#979). +- Add 'where' SQL clause to set attribute filter (#961, #1097). + +Bug fixes: +- Env and Session classes have been updated for parity with rasterio and to + resolve a credential refresh bug (#1055). 1.8.21 (2022-02-07) ------------------- @@ -671,7 +860,7 @@ a typo (#254). Special thanks to WFMU DJ Liz Berg for the awesome playlist that's fueling my -release sprint. Check it out at http://wfmu.org/playlists/shows/62083. You +release sprint. Check it out at https://wfmu.org/playlists/shows/62083. You can't unhear Love Coffin. 1.6.0 (2015-07-21) diff -Nru fiona-1.8.22/CITATION.cff fiona-1.9.5/CITATION.cff --- fiona-1.8.22/CITATION.cff 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/CITATION.cff 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,48 @@ +cff-version: 1.2.0 +message: "Please cite this software using these metadata." +type: software +title: Fiona +version: "1.9.5" +date-released: "2023-10-11" +abstract: "Fiona streams simple feature data to and from GIS formats like GeoPackage and Shapefile." +keywords: + - cartography + - GIS + - OGR +repository-artifact: https://pypi.org/project/Fiona +repository-code: https://github.com/Toblerity/Fiona +license: "BSD-3-Clause" +authors: + - given-names: Sean + family-names: Gillies + alias: sgillies + orcid: https://orcid.org/0000-0002-8401-9184 + - given-names: René + family-names: Buffat + alias: rbuffat + orcid: https://orcid.org/0000-0002-9836-3314 + - given-names: Joshua + family-names: Arnott + alias: snorfalorpagus + - given-names: Mike W. + family-names: Taves + alias: mwtoews + orcid: https://orcid.org/0000-0003-3657-7963 + - given-names: Kevin + family-names: Wurster + alias: geowurster + orcid: https://orcid.org/0000-0001-9044-0832 + - given-names: Alan D. + family-names: Snow + alias: snowman2 + orcid: https://orcid.org/0000-0002-7333-3100 + - given-names: Micah + family-names: Cochran + alias: micahcochran + - given-names: Elliott + family-names: Sales de Andrade + alias: QuLogic + orcid: https://orcid.org/0000-0001-7310-8942 + - given-names: Matthew + family-names: Perry + alias: perrygeo diff -Nru fiona-1.8.22/CITATION.txt fiona-1.9.5/CITATION.txt --- fiona-1.8.22/CITATION.txt 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/CITATION.txt 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -If you use Fiona for any published work, please cite it using the reference -below: - -@Misc{, - author = {Sean Gillies and others}, - organization = {Toblerity}, - title = {Fiona is OGR's neat, nimble, no-nonsense API}, - year = {2011--}, - url = "https://github.com/Toblerity/Fiona" -} diff -Nru fiona-1.8.22/CODE_OF_CONDUCT.md fiona-1.9.5/CODE_OF_CONDUCT.md --- fiona-1.8.22/CODE_OF_CONDUCT.md 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/CODE_OF_CONDUCT.md 2023-10-11 23:19:44.000000000 +0000 @@ -1,3 +1,4 @@ + # Contributor Code of Conduct As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities. @@ -11,7 +12,7 @@ * Trolling or insulting/derogatory comments * Public or private harassment * Publishing other's private information, such as physical or electronic addresses, without explicit permission -* Other unethical or unprofessional conduct. +* Other unethical or unprofessional conduct Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team. @@ -19,4 +20,5 @@ Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers. -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org), version 1.2.0, available at https://www.contributor-covenant.org/version/1/2/0/code-of-conduct.html + diff -Nru fiona-1.8.22/CREDITS.txt fiona-1.9.5/CREDITS.txt --- fiona-1.8.22/CREDITS.txt 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/CREDITS.txt 2023-10-11 23:19:44.000000000 +0000 @@ -3,51 +3,51 @@ Fiona is written by: -- Sean Gillies -- René Buffat -- Joshua Arnott -- Kevin Wurster -- Micah Cochran -- Matthew Perry -- Elliott Sales de Andrade -- Kelsey Jordahl -- Patrick Young -- Simon Norris -- Hannes Gräuler -- Johan Van de Wauw -- Jacob Wasserman -- Michael Weisman -- Ryan Grout -- Bas Couwenberg -- Brendan Ward -- Hannes -- Michele Citterio -- Miro Hrončok -- Sid Kapur -- Tim Tröndle -- fredj -- qinfeng +- Alan D. Snow - Ariel Nunez - Ariki +- Bas Couwenberg - Brandon Liu +- Brendan Ward - Chris Mutel - Denis Rykov +- dimlev - Efrén - Egor Fedorov +- Elliott Sales de Andrade - Even Rouault +- Ewout ter Hoeven - Filipe Fernandes +- fredj - Géraud - Hannes Gräuler +- Jacob Wasserman - Jesse Crocker +- Johan Van de Wauw +- Joris Van den Bossche +- Joshua Arnott - Juan Luis Cano Rodríguez +- Kelsey Jordahl +- Kevin Wurster - Ludovic Delauné - Martijn Visser -- Matthew Perry -- Michael Weisman +- Matthew Perry +- Micah Cochran +- Michael Weisman +- Michele Citterio +- Mike Taves +- Miro Hrončok - Oliver Tonnhofer +- Patrick Young +- qinfeng +- René Buffat +- Ryan Grout +- Sean Gillies +- Sid Kapur +- Simon Norris - Stefano Costa - Stephane Poss -- dimlev +- Tim Tröndle - wilsaj The GeoPandas project (Joris Van den Bossche et al.) has been a major driver @@ -57,6 +57,6 @@ GDAL/OGR developers. Some portions of this work were supported by a grant (for Pleiades_) from the -U.S. National Endowment for the Humanities (http://www.neh.gov). +U.S. National Endowment for the Humanities (https://www.neh.gov). -.. _Pleiades: http://pleiades.stoa.org +.. _Pleiades: https://pleiades.stoa.org diff -Nru fiona-1.8.22/FAQ.rst fiona-1.9.5/FAQ.rst --- fiona-1.8.22/FAQ.rst 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/FAQ.rst 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,7 @@ +Frequently asked questions and answers +====================================== + +What does "ValueError: Invalid field type " mean? +------------------------------------------------------------------------ + +Fiona maps the built-in Python types to `field types of the OGR API `__ (``float`` to ``OFTReal``, etc.). Users may need to convert instances of other classes (like ``cx_Oracle.LOB``) to strings or bytes when writing data to new GIS datasets using fiona. diff -Nru fiona-1.8.22/ISSUE_TEMPLATE.md fiona-1.9.5/ISSUE_TEMPLATE.md --- fiona-1.8.22/ISSUE_TEMPLATE.md 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/ISSUE_TEMPLATE.md 2023-10-11 23:19:44.000000000 +0000 @@ -10,6 +10,18 @@ You think you've found something? We believe you. +The primary forum for questions about installation and usage of Fiona is +https://fiona.groups.io/g/main. The authors and other users will answer +questions when they have expertise to share and time to explain. Please take the +time to craft a clear question and be patient about responses. Please do not +bring these questions to Fiona's issue tracker, which we want to reserve for +bug reports and other actionable issues. + +Questions about development of Fiona, brainstorming, requests for comment, +and not-yet-actionable proposals are welcome in the project's developers +discussion group https://fiona.groups.io/g/dev. Issues opened in Fiona's +GitHub repo which haven't been socialized there may be perfunctorily closed. + Please note: Fiona contains extension modules and is thus susceptible to C library compatibility issues. If you are reporting an installation or module import issue, please note that this project only accepts reports about problems diff -Nru fiona-1.8.22/MANIFEST.in fiona-1.9.5/MANIFEST.in --- fiona-1.8.22/MANIFEST.in 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/MANIFEST.in 2023-10-11 23:19:44.000000000 +0000 @@ -2,11 +2,13 @@ global-exclude *.pyc recursive-exclude docs/data * recursive-exclude docs/_build * -exclude MANIFEST.in +recursive-exclude _build * +recursive-exclude venv * exclude *.txt *.py recursive-include docs *.rst *.txt recursive-include tests *.py recursive-include tests/data * -include fiona/*.c fiona/*.cpp -include CHANGES.txt CREDITS.txt LICENSE.txt VERSION.txt README.rst -include benchmark.py setup.py requirements.txt +recursive-include fiona *.pyx *.pxd *.pxi +recursive-exclude fiona *.c *.cpp +include CHANGES.txt CITATION.cff CREDITS.txt LICENSE.txt README.rst +include benchmark.py pyproject.toml setup.py requirements.txt diff -Nru fiona-1.8.22/README.rst fiona-1.9.5/README.rst --- fiona-1.8.22/README.rst 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/README.rst 2023-10-11 23:19:44.000000000 +0000 @@ -2,341 +2,124 @@ Fiona ===== -Fiona is GDAL_'s neat and nimble vector API for Python programmers. +.. image:: https://github.com/Toblerity/Fiona/workflows/Tests/badge.svg?branch=maint-1.9 + :target: https://github.com/Toblerity/Fiona/actions?query=branch%3Amaint-1.9 -.. image:: https://github.com/Toblerity/Fiona/workflows/Linux%20CI/badge.svg?branch=maint-1.8 - :target: https://github.com/Toblerity/Fiona/actions?query=branch%3Amaint-1.8 +Fiona streams simple feature data to and from GIS formats like GeoPackage and +Shapefile. -.. image:: https://ci.appveyor.com/api/projects/status/github/Toblerity/Fiona?svg=true - :target: https://ci.appveyor.com/project/sgillies/fiona/branch/master +Fiona can read and write real-world data using multi-layered GIS formats, +zipped and in-memory virtual file systems, from files on your hard drive or in +cloud storage. This project includes Python modules and a command line +interface (CLI). + +Fiona depends on `GDAL `__ but is different from GDAL's own +`bindings `__. Fiona is designed to +be highly productive and to make it easy to write code which is easy to read. -.. image:: https://coveralls.io/repos/Toblerity/Fiona/badge.svg - :target: https://coveralls.io/r/Toblerity/Fiona - -Fiona is designed to be simple and dependable. It focuses on reading and -writing data in standard Python IO style and relies upon familiar Python types -and protocols such as files, dictionaries, mappings, and iterators instead of -classes specific to OGR. Fiona can read and write real-world data using -multi-layered GIS formats and zipped virtual file systems and integrates -readily with other Python GIS packages such as pyproj_, Rtree_, and Shapely_. -Fiona is supported only on CPython versions 2.7 and 3.4+. - -For more details, see: - -* Fiona `home page `__ -* Fiona `docs and manual `__ -* Fiona `examples `__ - -Usage -===== - -Collections ------------ - -Records are read from and written to ``file``-like `Collection` objects -returned from the ``fiona.open()`` function. Records are mappings modeled on -the GeoJSON format. They don't have any spatial methods of their own, so if you -want to do anything fancy with them you will probably need Shapely or something -like it. Here is an example of using Fiona to read some records from one data -file, change their geometry attributes, and write them to a new data file. - -.. code-block:: python - - import fiona - - # Open a file for reading. We'll call this the "source." - - with fiona.open('tests/data/coutwildrnp.shp') as src: - - # The file we'll write to, the "destination", must be initialized - # with a coordinate system, a format driver name, and - # a record schema. We can get initial values from the open - # collection's ``meta`` property and then modify them as - # desired. - - meta = src.meta - meta['schema']['geometry'] = 'Point' - - # Open an output file, using the same format driver and - # coordinate reference system as the source. The ``meta`` - # mapping fills in the keyword parameters of fiona.open(). - - with fiona.open('test_write.shp', 'w', **meta) as dst: - - # Process only the records intersecting a box. - for f in src.filter(bbox=(-107.0, 37.0, -105.0, 39.0)): - - # Get a point on the boundary of the record's - # geometry. - - f['geometry'] = { - 'type': 'Point', - 'coordinates': f['geometry']['coordinates'][0][0]} - - # Write the record out. - - dst.write(f) - - # The destination's contents are flushed to disk and the file is - # closed when its ``with`` block ends. This effectively - # executes ``dst.flush(); dst.close()``. - -Reading Multilayer data ------------------------ - -Collections can also be made from single layers within multilayer files or -directories of data. The target layer is specified by name or by its integer -index within the file or directory. The ``fiona.listlayers()`` function -provides an index ordered list of layer names. - -.. code-block:: python - - for layername in fiona.listlayers('tests/data'): - with fiona.open('tests/data', layer=layername) as src: - print(layername, len(src)) - - # Output: - # (u'coutwildrnp', 67) - -Layer can also be specified by index. In this case, ``layer=0`` and -``layer='test_uk'`` specify the same layer in the data file or directory. - -.. code-block:: python - - for i, layername in enumerate(fiona.listlayers('tests/data')): - with fiona.open('tests/data', layer=i) as src: - print(i, layername, len(src)) - - # Output: - # (0, u'coutwildrnp', 67) - -Writing Multilayer data ------------------------ +Installation +============ -Multilayer data can be written as well. Layers must be specified by name when -writing. +Fiona has several `extension modules +`__ which link against +libgdal. This complicates installation. Binary distributions (wheels) +containing libgdal and its own dependencies are available from the Python +Package Index and can be installed using pip. -.. code-block:: python +.. code-block:: console - with open('tests/data/cowildrnp.shp') as src: - meta = src.meta - f = next(src) - - with fiona.open('/tmp/foo', 'w', layer='bar', **meta) as dst: - dst.write(f) - - print(fiona.listlayers('/tmp/foo')) - - with fiona.open('/tmp/foo', layer='bar') as src: - print(len(src)) - f = next(src) - print(f['geometry']['type']) - print(f['properties']) - - # Output: - # [u'bar'] - # 1 - # Polygon - # OrderedDict([(u'PERIMETER', 1.22107), (u'FEATURE2', None), (u'NAME', u'Mount Naomi Wilderness'), (u'FEATURE1', u'Wilderness'), (u'URL', u'http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Naomi'), (u'AGBUR', u'FS'), (u'AREA', 0.0179264), (u'STATE_FIPS', u'49'), (u'WILDRNP020', 332), (u'STATE', u'UT')]) + pip install fiona -A view of the /tmp/foo directory will confirm the creation of the new files. +These wheels are mainly intended to make installation easy for simple +applications, not so much for production. They are not tested for compatibility +with all other binary wheels, conda packages, or QGIS, and omit many of GDAL's +optional format drivers. If you need, for example, GML support you will need to +build and install Fiona from a source distribution. It is possible to install +Fiona from source using pip (version >= 22.3) and the `--no-binary` option. A +specific GDAL installation can be selected by setting the GDAL_CONFIG +environment variable. .. code-block:: console - $ ls /tmp/foo - bar.cpg bar.dbf bar.prj bar.shp bar.shx - -Collections from archives and virtual file systems --------------------------------------------------- - -Zip and Tar archives can be treated as virtual filesystems and Collections can -be made from paths and layers within them. In other words, Fiona lets you read -and write zipped Shapefiles. + pip install -U pip + pip install --no-binary fiona fiona -.. code-block:: python +Many users find Anaconda and conda-forge a good way to install Fiona and get +access to more optional format drivers (like GML). - for i, layername in enumerate(fiona.listlayers('zip://tests/data/coutwildrnp.zip')): - with fiona.open('zip://tests/data/coutwildrnp.zip', layer=i) as src: - print(i, layername, len(src)) +Fiona 1.9 requires Python 3.7 or higher and GDAL 3.2 or higher. - # Output: - # (0, u'coutwildrnp', 67) +Python Usage +============ -Fiona can also read from more exotic file systems. For instance, a -zipped shape file in S3 can be accessed like so: +Features are read from and written to file-like ``Collection`` objects returned +from the ``fiona.open()`` function. Features are data classes modeled on the +GeoJSON format. They don't have any spatial methods of their own, so if you +want to transform them you will need Shapely or something like it. Here is an +example of using Fiona to read some features from one data file, change their +geometry attributes using Shapely, and write them to a new data file. .. code-block:: python - with fiona.open('zip+s3://mapbox/rasterio/coutwildrnp.zip') as src: - print(len(src)) - - # Output: - # 67 + import fiona + from fiona import Feature, Geometry + from shapely.geometry import mapping, shape + # Open a file for reading. We'll call this the source. + with fiona.open( + "zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip" + ) as src: + + # The file we'll write to must be initialized with a coordinate + # system, a format driver name, and a record schema. We can get + # initial values from the open source's profile property and then + # modify them as we need. + profile = src.profile + profile["schema"]["geometry"] = "Point" + profile["driver"] = "GPKG" + + # Open an output file, using the same format driver and coordinate + # reference system as the source. The profile mapping fills in the + # keyword parameters of fiona.open. + with fiona.open("centroids.gpkg", "w", **profile) as dst: + + # Process only the feature records intersecting a box. + for feat in src.filter(bbox=(-107.0, 37.0, -105.0, 39.0)): + + # Get the feature's centroid. + centroid_shp = shape(feat.geometry).centroid + new_geom = Geometry.from_dict(centroid_shp) + + # Write the feature out. + dst.write( + Feature(geometry=new_geom, properties=f.properties) + ) + + # The destination's contents are flushed to disk and the file is + # closed when its with block ends. This effectively + # executes ``dst.flush(); dst.close()``. -Fiona CLI +CLI Usage ========= Fiona's command line interface, named "fio", is documented at `docs/cli.rst -`__. Its ``fio -info`` pretty prints information about a data file. - -.. code-block:: console - - $ fio info --indent 2 tests/data/coutwildrnp.shp - { - "count": 67, - "crs": "EPSG:4326", - "driver": "ESRI Shapefile", - "bounds": [ - -113.56424713134766, - 37.0689811706543, - -104.97087097167969, - 41.99627685546875 - ], - "schema": { - "geometry": "Polygon", - "properties": { - "PERIMETER": "float:24.15", - "FEATURE2": "str:80", - "NAME": "str:80", - "FEATURE1": "str:80", - "URL": "str:101", - "AGBUR": "str:80", - "AREA": "float:24.15", - "STATE_FIPS": "str:80", - "WILDRNP020": "int:10", - "STATE": "str:80" - } - } - } - -Installation -============ - -Fiona requires Python 2.7 or 3.4+ and GDAL/OGR 1.8+. To build from -a source distribution you will need a C compiler and GDAL and Python -development headers and libraries (libgdal1-dev for Debian/Ubuntu, gdal-dev for -CentOS/Fedora). - -To build from a repository copy, you will also need Cython to build C sources -from the project's .pyx files. See the project's requirements-dev.txt file for -guidance. - -The `Kyngchaos GDAL frameworks -`__ will satisfy -the GDAL/OGR dependency for OS X, as will Homebrew's GDAL Formula (``brew install -gdal``). - -Python Requirements -------------------- - -Fiona depends on the modules ``enum34``, ``six``, ``cligj``, ``munch``, ``argparse``, and -``ordereddict`` (the two latter modules are standard in Python 2.7+). Pip will -fetch these requirements for you, but users installing Fiona from a Windows -installer must get them separately. - -Unix-like systems ------------------ - -Assuming you're using a virtualenv (if not, skip to the 4th command) and -GDAL/OGR libraries, headers, and `gdal-config`_ program are installed to well -known locations on your system via your system's package manager (``brew -install gdal`` using Homebrew on OS X), installation is this simple. - -.. code-block:: console - - $ mkdir fiona_env - $ virtualenv fiona_env - $ source fiona_env/bin/activate - (fiona_env)$ pip install fiona - -If gdal-config is not available or if GDAL/OGR headers and libs aren't -installed to a well known location, you must set include dirs, library dirs, -and libraries options via the setup.cfg file or setup command line as shown -below (using ``git``). You must also specify the version of the GDAL API on the -command line using the ``--gdalversion`` argument (see example below) or with -the ``GDAL_VERSION`` environment variable (e.g. ``export GDAL_VERSION=2.1``). - -.. code-block:: console - - (fiona_env)$ git clone git://github.com/Toblerity/Fiona.git - (fiona_env)$ cd Fiona - (fiona_env)$ python setup.py build_ext -I/path/to/gdal/include -L/path/to/gdal/lib -lgdal install --gdalversion 2.1 - -Or specify that build options and GDAL API version should be provided by a -particular gdal-config program. +`__. The CLI has a +number of different commands. Its ``fio cat`` command streams GeoJSON features +from any dataset. .. code-block:: console - (fiona_env)$ GDAL_CONFIG=/path/to/gdal-config pip install fiona - -Windows -------- - -Binary installers are available at -https://www.lfd.uci.edu/~gohlke/pythonlibs/#fiona and coming eventually to PyPI. + $ fio cat --compact tests/data/coutwildrnp.shp | jq -c '.' + {"geometry":{"coordinates":[[[-111.73527526855469,41.995094299316406],...]]}} + ... -You can download a binary distribution of GDAL from `here -`_. You will also need to download -the compiled libraries and headers (include files). - -When building from source on Windows, it is important to know that setup.py -cannot rely on gdal-config, which is only present on UNIX systems, to discover -the locations of header files and libraries that Fiona needs to compile its -C extensions. On Windows, these paths need to be provided by the user. -You will need to find the include files and the library files for gdal and -use setup.py as follows. You must also specify the version of the GDAL API on the -command line using the ``--gdalversion`` argument (see example below) or with -the ``GDAL_VERSION`` environment variable (e.g. ``set GDAL_VERSION=2.1``). +Documentation +============= -.. code-block:: console - - $ python setup.py build_ext -I -lgdal_i -L install --gdalversion 2.1 +For more details about this project, please see: -Note: The following environment variables needs to be set so that Fiona works correctly: - -* The directory containing the GDAL DLL (``gdal304.dll`` or similar) needs to be in your - Windows ``PATH`` (e.g. ``C:\gdal\bin``). -* The gdal-data directory needs to be in your Windows ``PATH`` or the environment variable - ``GDAL_DATA`` must be set (e.g. ``C:\gdal\bin\gdal-data``). -* The environment variable ``PROJ_LIB`` must be set to the proj library directory (e.g. - ``C:\gdal\bin\proj6\share``) - -The `Appveyor CI build `_ -uses the GISInternals GDAL binaries to build Fiona. This produces a binary wheel -for successful builds, which includes GDAL and other dependencies, for users -wanting to try an unstable development version. -The `Appveyor configuration file `_ may be a useful example for -users building from source on Windows. - -Development and testing -======================= - -Building from the source requires Cython. Tests require `pytest `_. If the GDAL/OGR -libraries, headers, and `gdal-config`_ program are installed to well known -locations on your system (via your system's package manager), you can do this:: - - (fiona_env)$ git clone git://github.com/Toblerity/Fiona.git - (fiona_env)$ cd Fiona - (fiona_env)$ pip install cython - (fiona_env)$ pip install -e .[test] - (fiona_env)$ py.test - -Or you can use the ``pep-518-install`` script:: - - (fiona_env)$ git clone git://github.com/Toblerity/Fiona.git - (fiona_env)$ cd Fiona - (fiona_env)$ ./pep-518-install - -If you have a non-standard environment, you'll need to specify the include and -lib dirs and GDAL library on the command line:: - - (fiona_env)$ python setup.py build_ext -I/path/to/gdal/include -L/path/to/gdal/lib -lgdal --gdalversion 2 develop - (fiona_env)$ py.test - -.. _GDAL: http://www.gdal.org -.. _pyproj: http://pypi.python.org/pypi/pyproj/ -.. _Rtree: http://pypi.python.org/pypi/Rtree/ -.. _Shapely: http://pypi.python.org/pypi/Shapely/ -.. _gdal-config: http://www.gdal.org/gdal-config.html +* Fiona `home page `__ +* `Docs and manual `__ +* `Examples `__ +* Main `user discussion group `__ +* `Developers discussion group `__ diff -Nru fiona-1.8.22/appveyor.yml fiona-1.9.5/appveyor.yml --- fiona-1.8.22/appveyor.yml 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/appveyor.yml 2023-10-11 23:19:44.000000000 +0000 @@ -19,63 +19,29 @@ # The 4-digit number in the GISInternals archives is the MSVC version used to build # the libraries. It does not need to match the version of MSVC used to build Python. # https://en.wikipedia.org/wiki/Microsoft_Visual_C%2B%2B#Internal_version_numbering - - - PYTHON: "C:\\Python27-x64" - PYTHON_VERSION: "2.7" - PYTHON_ARCH: "64" - GDAL_VERSION: "1.11.4" - GIS_INTERNALS: "release-1800-x64-gdal-1-11-4-mapserver-6-4-3.zip" - GIS_INTERNALS_LIBS: "release-1800-x64-gdal-1-11-4-mapserver-6-4-3-libs.zip" - - - PYTHON: "C:\\Python36-x64" - PYTHON_VERSION: "3.6" - PYTHON_ARCH: "64" - GDAL_VERSION: "1.11.4" - GIS_INTERNALS: "release-1800-x64-gdal-1-11-4-mapserver-6-4-3.zip" - GIS_INTERNALS_LIBS: "release-1800-x64-gdal-1-11-4-mapserver-6-4-3-libs.zip" - - - PYTHON: "C:\\Python36-x64" - PYTHON_VERSION: "3.6" - PYTHON_ARCH: "64" - GDAL_VERSION: "2.2.3" - GIS_INTERNALS: "release-1911-x64-gdal-2-2-3-mapserver-7-0-7.zip" - GIS_INTERNALS_LIBS: "release-1911-x64-gdal-2-2-3-mapserver-7-0-7-libs.zip" - - - PYTHON: "C:\\Python36-x64" - PYTHON_VERSION: "3.6" - PYTHON_ARCH: "64" - GDAL_VERSION: "2.3.3" - GIS_INTERNALS: "release-1911-x64-gdal-2-3-3-mapserver-7-2-1.zip" - GIS_INTERNALS_LIBS: "release-1911-x64-gdal-2-3-3-mapserver-7-2-1-libs.zip" - - - PYTHON: "C:\\Python36-x64" - PYTHON_VERSION: "3.6" + # Test all supported gdal minor versions (except latest stable) with one python version + - PYTHON: "C:\\Python38-x64" + PYTHON_VERSION: "3.8" PYTHON_ARCH: "64" - GDAL_VERSION: "2.4.2" - GIS_INTERNALS: "release-1911-x64-gdal-2-4-2-mapserver-7-4-0.zip" - GIS_INTERNALS_LIBS: "release-1911-x64-gdal-2-4-2-mapserver-7-4-0-libs.zip" + GDAL_VERSION: "2.4.3" + GIS_INTERNALS: "release-1911-x64-gdal-2-4-3-mapserver-7-4-2.zip" + GIS_INTERNALS_LIBS: "release-1911-x64-gdal-2-4-3-mapserver-7-4-2-libs.zip" - - PYTHON: "C:\\Python36-x64" - PYTHON_VERSION: "3.6" + - PYTHON: "C:\\Python38-x64" + PYTHON_VERSION: "3.8" PYTHON_ARCH: "64" GDAL_VERSION: "3.0.4" GIS_INTERNALS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3.zip" GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3-libs.zip" PROJ_LIB: "C:\\gdal\\bin\\proj6\\share" - - PYTHON: "C:\\Python37-x64" - PYTHON_VERSION: "3.7" - PYTHON_ARCH: "64" - GDAL_VERSION: "2.4.2" - GIS_INTERNALS: "release-1911-x64-gdal-2-4-2-mapserver-7-4-0.zip" - GIS_INTERNALS_LIBS: "release-1911-x64-gdal-2-4-2-mapserver-7-4-0-libs.zip" - - - PYTHON: "C:\\Python37-x64" - PYTHON_VERSION: "3.7" + # Test all supported python versions with latest stable gdal release + - PYTHON: "C:\\Python36-x64" + PYTHON_VERSION: "3.6" PYTHON_ARCH: "64" - GDAL_VERSION: "3.0.4" - GIS_INTERNALS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3.zip" - GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3-libs.zip" + GDAL_VERSION: "3.1.2" + GIS_INTERNALS: "release-1911-x64-gdal-3-1-2-mapserver-7-6-1.zip" + GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-1-2-mapserver-7-6-1-libs.zip" PROJ_LIB: "C:\\gdal\\bin\\proj6\\share" - PYTHON: "C:\\Python37-x64" @@ -89,23 +55,11 @@ - PYTHON: "C:\\Python38-x64" PYTHON_VERSION: "3.8" PYTHON_ARCH: "64" - GDAL_VERSION: "3.0.4" - GIS_INTERNALS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3.zip" - GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-0-4-mapserver-7-4-3-libs.zip" - PROJ_LIB: "C:\\gdal\\bin\\proj6\\share" - - - PYTHON: "C:\\Python38-x64" - PYTHON_VERSION: "3.8" - PYTHON_ARCH: "64" GDAL_VERSION: "3.1.2" GIS_INTERNALS: "release-1911-x64-gdal-3-1-2-mapserver-7-6-1.zip" GIS_INTERNALS_LIBS: "release-1911-x64-gdal-3-1-2-mapserver-7-6-1-libs.zip" PROJ_LIB: "C:\\gdal\\bin\\proj6\\share" -matrix: - allow_failures: - - GDAL_VERSION: "1.11.4" - install: - ECHO "Filesystem root:" @@ -155,11 +109,12 @@ # target Python version and architecture - "%CMD_IN_ENV% pip install -r requirements-dev.txt" + # Install coverage testing dependencies +# - ps: python -m pip install coveralls>=1.1 --upgrade build_script: # Build the compiled extension - cmd: echo %PATH% - - cmd: echo %PYTHONPATH% # copy gisinternal gdal librarys into .libs @@ -185,7 +140,6 @@ # Our Windows GDAL doesn't have iconv and can't support certain tests. - "%CMD_IN_ENV% python -m pytest -m \"not iconv and not wheel\" --cov fiona --cov-report term-missing" - artifacts: - path: dist\*.whl name: wheel diff -Nru fiona-1.8.22/benchmark-max.py fiona-1.9.5/benchmark-max.py --- fiona-1.8.22/benchmark-max.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/benchmark-max.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,4 +1,3 @@ - import timeit from fiona import collection from osgeo import ogr diff -Nru fiona-1.8.22/benchmark-min.py fiona-1.9.5/benchmark-min.py --- fiona-1.8.22/benchmark-min.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/benchmark-min.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,4 +1,3 @@ - import timeit from fiona import collection from osgeo import ogr diff -Nru fiona-1.8.22/benchmark.py fiona-1.9.5/benchmark.py --- fiona-1.8.22/benchmark.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/benchmark.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,4 +1,3 @@ - import timeit from fiona import collection from osgeo import ogr diff -Nru fiona-1.8.22/ci/gdal-compile.sh fiona-1.9.5/ci/gdal-compile.sh --- fiona-1.8.22/ci/gdal-compile.sh 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/ci/gdal-compile.sh 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,51 @@ +#!/bin/bash +# Example usage: +# GDAL_DIR=$PWD/gdal bash gdal_compile.sh 3.6.0rc2 +set -e +pushd . +echo "Building GDAL ($1) from source..." +BUILD_GDAL_DIR=gdal-${1:0:5} +# Download PROJ +if [[ $1 == "git" ]]; then + git clone https://github.com/OSGeo/GDAL.git ${BUILD_GDAL_DIR} +else + curl https://download.osgeo.org/gdal/${1:0:5}/gdal-$1.tar.gz > ${BUILD_GDAL_DIR}.tar.gz + tar zxf ${BUILD_GDAL_DIR}.tar.gz + rm ${BUILD_GDAL_DIR}.tar.gz +fi +cd ${BUILD_GDAL_DIR} +mkdir build +cd build +# build using cmake +cmake .. \ + -DCMAKE_INSTALL_PREFIX=$GDAL_DIR \ + -DBUILD_SHARED_LIBS=ON \ + -DCMAKE_BUILD_TYPE=Release \ + -DGDAL_BUILD_OPTIONAL_DRIVERS=OFF \ + -DGDAL_ENABLE_DRIVER_MBTILES=OFF \ + -DOGR_BUILD_OPTIONAL_DRIVERS=OFF \ + -DOGR_ENABLE_DRIVER_CSV=ON \ + -DOGR_ENABLE_DRIVER_DGN=ON \ + -DOGR_ENABLE_DRIVER_DXF=ON \ + -DOGR_ENABLE_DRIVER_FLATGEOBUF=ON \ + -DOGR_ENABLE_DRIVER_GEOJSON=ON \ + -DOGR_ENABLE_DRIVER_GML=ON \ + -DOGR_ENABLE_DRIVER_GMT=ON \ + -DOGR_ENABLE_DRIVER_GPKG=ON \ + -DOGR_ENABLE_DRIVER_GPX=ON \ + -DOGR_ENABLE_DRIVER_OPENFILEGDB=ON \ + -DGDAL_ENABLE_DRIVER_PCIDSK=ON \ + -DOGR_ENABLE_DRIVER_S57=ON \ + -DOGR_ENABLE_DRIVER_SHAPE=ON \ + -DOGR_ENABLE_DRIVER_SQLITE=ON \ + -DOGR_ENABLE_DRIVER_TAB=ON \ + -DOGR_ENABLE_DRIVER_VRT=ON \ + -DBUILD_CSHARP_BINDINGS=OFF \ + -DBUILD_PYTHON_BINDINGS=OFF \ + -DBUILD_JAVA_BINDINGS=OFF +cmake --build . -j$(nproc) +cmake --install . +# cleanup +cd ../.. +rm -rf ${BUILD_GDAL_DIR} +popd diff -Nru fiona-1.8.22/debian/.gitlab-ci.yml fiona-1.9.5/debian/.gitlab-ci.yml --- fiona-1.8.22/debian/.gitlab-ci.yml 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/debian/.gitlab-ci.yml 2023-08-25 15:07:15.000000000 +0000 @@ -0,0 +1,6 @@ +--- +include: + - https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/recipes/debian.yml + +variables: + SALSA_CI_ENABLE_BUILD_PACKAGE_TWICE: 1 diff -Nru fiona-1.8.22/debian/changelog fiona-1.9.5/debian/changelog --- fiona-1.8.22/debian/changelog 2022-12-05 08:00:00.000000000 +0000 +++ fiona-1.9.5/debian/changelog 2024-01-26 19:00:00.000000000 +0000 @@ -1,8 +1,118 @@ -fiona (1.8.22-1~jammy0) jammy; urgency=medium +fiona (1.9.5-1build2~jammy3) jammy; urgency=medium - * Back to require at least cython3 0.21. + * Disable tests for Jammy. - -- Angelos Tzotsos Mon, 05 Dec 2022 10:00:00 +0200 + -- Angelos Tzotsos Fri, 26 Jan 2024 21:00:00 +0200 + +fiona (1.9.5-1build2) noble; urgency=medium + + * Rebuild against new libgdal34. + + -- Gianfranco Costamagna Fri, 17 Nov 2023 19:14:37 +0100 + +fiona (1.9.5-1build1) noble; urgency=medium + + * No-change rebuild to build with python3.12 as supported. + + -- Matthias Klose Thu, 02 Nov 2023 10:20:24 +0100 + +fiona (1.9.5-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + * Switch to dh-sequence-*. + * Refresh patches. + + -- Bas Couwenberg Thu, 12 Oct 2023 05:21:02 +0200 + +fiona (1.9.4-3) unstable; urgency=medium + + * Team upload. + * Use autopkgtest-pkg-pybuild testsuite. + * Enable Salsa CI. + * Remove generated files in clean target. + * Add patch to fix pytz.exceptions.UnknownTimeZoneError: 'US/Mountain'. + * Update lintian overrides. + + -- Bas Couwenberg Tue, 22 Aug 2023 17:05:16 +0200 + +fiona (1.9.4-2) unstable; urgency=medium + + * Team upload. + * Bump debhelper compat to 13. + * Add upstream patch to not install all .py files in wheel. + (closes: #1040361) + + -- Bas Couwenberg Wed, 05 Jul 2023 10:56:53 +0200 + +fiona (1.9.4-1) unstable; urgency=medium + + * Team upload. + * Move from experimental to unstable. + + -- Bas Couwenberg Sun, 11 Jun 2023 12:37:37 +0200 + +fiona (1.9.4-1~exp1) experimental; urgency=medium + + * Team upload. + * New upstream release. + * Update copyright file. + + -- Bas Couwenberg Wed, 17 May 2023 05:18:21 +0200 + +fiona (1.9.3-1~exp1) experimental; urgency=medium + + * Team upload. + * New upstream release. + + -- Bas Couwenberg Tue, 11 Apr 2023 05:27:08 +0200 + +fiona (1.9.2-1~exp1) experimental; urgency=medium + + * Team upload. + * New upstream release. + + -- Bas Couwenberg Tue, 21 Mar 2023 05:28:33 +0100 + +fiona (1.9.1-1~exp1) experimental; urgency=medium + + * Team upload. + * New upstream release. + * Refresh patches. + + -- Bas Couwenberg Fri, 10 Feb 2023 07:19:16 +0100 + +fiona (1.9.0-1) unstable; urgency=medium + + * Team upload. + * New upstream release. + * Drop no-distutils.patch, applied upstream. + * Move from experimental to unstable. + + -- Bas Couwenberg Tue, 31 Jan 2023 09:09:48 +0100 + +fiona (1.9~b2-1~exp1) experimental; urgency=medium + + * Team upload. + * New upstream beta release. + * Bump Standards-Version to 4.6.2, no changes. + * Add patch to not use deprecated distutils module. + + -- Bas Couwenberg Sun, 22 Jan 2023 19:55:03 +0100 + +fiona (1.9~b1-1~exp1) experimental; urgency=medium + + * Team upload. + * New upstream beta release. + * Add Rules-Requires-Root to control file. + * Drop obsolete mock & six build dependencies. + * Refresh patches. + * Drop fiona-doc package, fails to build. + * Copy pytest.ini to pybuild build directory. + * Ignore additional tests that fail without network. + * Add python3-tz to build dependencies. + + -- Bas Couwenberg Thu, 15 Dec 2022 19:20:56 +0100 fiona (1.8.22-1) unstable; urgency=medium diff -Nru fiona-1.8.22/debian/clean fiona-1.9.5/debian/clean --- fiona-1.8.22/debian/clean 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/debian/clean 2023-08-12 08:26:54.000000000 +0000 @@ -0,0 +1,6 @@ +*-info/ +fiona/*.c +fiona/*.cpp +fiona/*.so +gdal-config.txt +VERSION.txt diff -Nru fiona-1.8.22/debian/control fiona-1.9.5/debian/control --- fiona-1.8.22/debian/control 2022-12-05 08:00:00.000000000 +0000 +++ fiona-1.9.5/debian/control 2024-01-26 19:00:00.000000000 +0000 @@ -3,11 +3,12 @@ Uploaders: Johan Van de Wauw Section: python Priority: optional -Build-Depends: debhelper-compat (= 12), +Build-Depends: debhelper-compat (= 13), dh-python, + dh-sequence-python3, gdal-bin, libgdal-dev, - cython3 (>= 0.21), + cython3, pybuild-plugin-pyproject, python3-all, python3-all-dev, @@ -16,22 +17,22 @@ python3-certifi, python3-click-plugins, python3-cligj, - python3-mock, python3-munch, python3-pytest, - python3-six, - python3-sphinx -Standards-Version: 4.6.1 + python3-setuptools, + python3-tz +Standards-Version: 4.6.2 Vcs-Browser: https://salsa.debian.org/debian-gis-team/fiona Vcs-Git: https://salsa.debian.org/debian-gis-team/fiona.git Homepage: https://github.com/Toblerity/Fiona +Rules-Requires-Root: no +Testsuite: autopkgtest-pkg-pybuild Package: python3-fiona Architecture: any Depends: ${python3:Depends}, ${misc:Depends}, ${shlibs:Depends} -Suggests: fiona-doc Description: Python 3 API for reading/writing vector geospatial data Fiona is a Python wrapper around the OGR vector data abstraction library. Fiona is designed to be simple and dependable. It focuses on reading @@ -51,7 +52,6 @@ python3-click-plugins, ${python3:Depends}, ${misc:Depends} -Suggests: fiona-doc Description: Command line tool for reading/writing vector geospatial data Fiona is a Python wrapper around the OGR vector data abstraction library. Fiona is designed to be simple and dependable. It focuses on reading @@ -63,20 +63,3 @@ such as pyproj, Rtree, and Shapely. . This package provides the fiona command line tools - -Package: fiona-doc -Architecture: all -Section: doc -Depends: ${sphinxdoc:Depends}, - ${misc:Depends} -Description: Python API for reading/writing vector geospatial data (docs) - Fiona is a Python wrapper around the OGR vector data abstraction library. - Fiona is designed to be simple and dependable. It focuses on reading - and writing data in standard Python IO style and relies upon familiar - Python types and protocols such as files, dictionaries, mappings, and - iterators instead of classes specific to OGR. Fiona can read and write - real-world data using multi-layered GIS formats and zipped virtual - file systems and integrates readily with other Python GIS packages - such as pyproj, Rtree, and Shapely. - . - This package contains the html documentation for Fiona. diff -Nru fiona-1.8.22/debian/copyright fiona-1.9.5/debian/copyright --- fiona-1.8.22/debian/copyright 2019-10-22 04:24:10.000000000 +0000 +++ fiona-1.9.5/debian/copyright 2023-08-12 08:26:54.000000000 +0000 @@ -35,9 +35,13 @@ * wilsaj License: BSD-3-Clause -Files: debian/* -Copyright: 2014-2017 Johan Van de Wauw -License: BSD-3-Clause +Files: docs/manual.rst +Copyright: 2014-2015 Sean C. Gillies +License: CC-BY-3.0-US + +Files: fiona/_vendor/munch/* +Copyright: 2010, David Schoonover +License: Expat Files: tests/data/* Copyright: disclaimed @@ -52,9 +56,9 @@ None. Acknowledgment of the National Atlas of the United States of America would be appreciated in products derived from these data." -Files: docs/manual.rst -Copyright: 2014-2015 Sean C. Gillies -License: CC-BY-3.0-US +Files: debian/* +Copyright: 2014-2017 Johan Van de Wauw +License: BSD-3-Clause License: BSD-3-Clause Redistribution and use in source and binary forms, with or without @@ -81,6 +85,25 @@ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +License: Expat + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + . + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + . + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + License: CC-BY-3.0-US http://creativecommons.org/licenses/by/3.0/us/legalcode . diff -Nru fiona-1.8.22/debian/patches/0001-Rename-fio-command-to-fiona-to-avoid-name-clash.patch fiona-1.9.5/debian/patches/0001-Rename-fio-command-to-fiona-to-avoid-name-clash.patch --- fiona-1.8.22/debian/patches/0001-Rename-fio-command-to-fiona-to-avoid-name-clash.patch 2022-02-07 16:25:12.000000000 +0000 +++ fiona-1.9.5/debian/patches/0001-Rename-fio-command-to-fiona-to-avoid-name-clash.patch 2023-10-12 03:20:21.000000000 +0000 @@ -1,20 +1,16 @@ -From: Johan Van de Wauw -Date: Fri, 24 Oct 2014 21:09:21 +0200 -Subject: Rename fio command to fiona to avoid name clash +Description: Rename fio command to fiona to avoid name clash + There is already another package providing a binary "fio" (fio). +Author: Johan Van de Wauw +Forwarded: not-needed -There is already another package providing a binary "fio" (fio). ---- - setup.py | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - ---- a/setup.py -+++ b/setup.py -@@ -328,7 +328,7 @@ setup_args = dict( - packages=['fiona', 'fiona.fio'], - entry_points=''' - [console_scripts] -- fio=fiona.fio.main:main_group -+ fiona=fiona.fio.main:main_group +--- a/pyproject.toml ++++ b/pyproject.toml +@@ -56,7 +56,7 @@ test = [ + ] + + [project.scripts] +-fio = "fiona.fio.main:main_group" ++fiona = "fiona.fio.main:main_group" - [fiona.fio_commands] - bounds=fiona.fio.bounds:bounds + [project.entry-points."fiona.fio_commands"] + bounds = "fiona.fio.bounds:bounds" diff -Nru fiona-1.8.22/debian/patches/0002-Remove-outside-reference-possible-privacy-breach.patch fiona-1.9.5/debian/patches/0002-Remove-outside-reference-possible-privacy-breach.patch --- fiona-1.8.22/debian/patches/0002-Remove-outside-reference-possible-privacy-breach.patch 2021-08-15 12:39:37.000000000 +0000 +++ fiona-1.9.5/debian/patches/0002-Remove-outside-reference-possible-privacy-breach.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -From: Johan Van de Wauw -Date: Wed, 4 Feb 2015 20:26:56 +0100 -Subject: Remove outside reference possible-privacy-breach -Forwarded: not-needed - ---- - README.rst | 6 ------ - 1 file changed, 6 deletions(-) - ---- a/README.rst -+++ b/README.rst -@@ -4,15 +4,6 @@ Fiona - - Fiona is GDAL_'s neat and nimble vector API for Python programmers. - --.. image:: https://github.com/Toblerity/Fiona/workflows/Linux%20CI/badge.svg?branch=maint-1.8 -- :target: https://github.com/Toblerity/Fiona/actions?query=branch%3Amaint-1.8 -- --.. image:: https://ci.appveyor.com/api/projects/status/github/Toblerity/Fiona?svg=true -- :target: https://ci.appveyor.com/project/sgillies/fiona/branch/master -- --.. image:: https://coveralls.io/repos/Toblerity/Fiona/badge.svg -- :target: https://coveralls.io/r/Toblerity/Fiona -- - Fiona is designed to be simple and dependable. It focuses on reading and - writing data in standard Python IO style and relies upon familiar Python types - and protocols such as files, dictionaries, mappings, and iterators instead of diff -Nru fiona-1.8.22/debian/patches/0006-Remove-unknown-distribution-options.patch fiona-1.9.5/debian/patches/0006-Remove-unknown-distribution-options.patch --- fiona-1.8.22/debian/patches/0006-Remove-unknown-distribution-options.patch 2022-02-07 16:25:14.000000000 +0000 +++ fiona-1.9.5/debian/patches/0006-Remove-unknown-distribution-options.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -Description: Remove unknown distribution options. - UserWarning: Unknown distribution option: 'metadata_version' - UserWarning: Unknown distribution option: 'requires_python' - UserWarning: Unknown distribution option: 'requires_external' -Author: Bas Couwenberg -Forwarded: not-needed - ---- a/setup.py -+++ b/setup.py -@@ -310,11 +310,8 @@ extras_require['all'] = list(set(it.chai - - setup_args = dict( - cmdclass={'sdist': sdist_multi_gdal}, -- metadata_version='1.2', - name='Fiona', - version=version, -- requires_python='>=2.6', -- requires_external='GDAL (>=1.8)', - description="Fiona reads and writes spatial data files", - license='BSD', - keywords='gis vector feature data', diff -Nru fiona-1.8.22/debian/patches/series fiona-1.9.5/debian/patches/series --- fiona-1.8.22/debian/patches/series 2020-09-01 06:17:47.000000000 +0000 +++ fiona-1.9.5/debian/patches/series 2023-10-12 03:20:45.000000000 +0000 @@ -1,4 +1,2 @@ 0001-Rename-fio-command-to-fiona-to-avoid-name-clash.patch -0002-Remove-outside-reference-possible-privacy-breach.patch -0006-Remove-unknown-distribution-options.patch test_drvsupport.patch diff -Nru fiona-1.8.22/debian/patches/test_drvsupport.patch fiona-1.9.5/debian/patches/test_drvsupport.patch --- fiona-1.8.22/debian/patches/test_drvsupport.patch 2022-02-07 16:26:44.000000000 +0000 +++ fiona-1.9.5/debian/patches/test_drvsupport.patch 2023-08-12 08:26:54.000000000 +0000 @@ -5,7 +5,7 @@ --- a/tests/test_drvsupport.py +++ b/tests/test_drvsupport.py -@@ -22,6 +22,7 @@ def test_geojsonseq(format): +@@ -23,6 +23,7 @@ def test_geojsonseq(format): assert format in fiona.drvsupport.supported_drivers.keys() @@ -13,10 +13,10 @@ @pytest.mark.parametrize( "driver", [driver for driver, raw in supported_drivers.items() if "w" in raw] ) -@@ -109,6 +110,7 @@ def test_write_does_not_work_when_gdal_s - c.writerecords(records1) - +@@ -96,6 +97,7 @@ def test_write_does_not_work_when_gdal_s + # Some driver only allow a specific schema. These drivers can be + # excluded by adding them to blacklist_append_drivers. +@pytest.mark.xfail(strict=False) @pytest.mark.parametrize( "driver", [driver for driver, raw in supported_drivers.items() if "a" in raw] diff -Nru fiona-1.8.22/debian/python3-fiona.lintian-overrides fiona-1.9.5/debian/python3-fiona.lintian-overrides --- fiona-1.8.22/debian/python3-fiona.lintian-overrides 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/debian/python3-fiona.lintian-overrides 2023-08-22 15:10:59.000000000 +0000 @@ -0,0 +1,3 @@ +# False positive +package-contains-documentation-outside-usr-share-doc [usr/lib/python3/dist-packages/*.dist-info/top_level.txt] + diff -Nru fiona-1.8.22/debian/rules fiona-1.9.5/debian/rules --- fiona-1.8.22/debian/rules 2021-09-11 16:14:06.000000000 +0000 +++ fiona-1.9.5/debian/rules 2024-01-26 19:00:00.000000000 +0000 @@ -6,17 +6,14 @@ export LC_ALL=C.UTF-8 export LANG=C.UTF-8 -BUILD_DATE=$(shell LC_ALL=C date -u "+%B %d, %Y" -d "@$(SOURCE_DATE_EPOCH)") - DEB_BUILD_ARCH ?= $(shell dpkg-architecture -qDEB_BUILD_ARCH) export GDAL_ENABLE_DEPRECATED_DRIVER_GTM=YES export PYBUILD_NAME=fiona -export PYBUILD_AFTER_BUILD_python3 = mkdir -p doc-build && cd doc-build && PYTHONPATH={build_dir} http_proxy='127.0.0.1:9' python{version} -m sphinx -N -bhtml -D today="$(BUILD_DATE)" ../docs/ ../build/html export PYBUILD_TEST_PYTEST=1 -export PYBUILD_BEFORE_TEST=cp -r {dir}/tests {build_dir} -export PYBUILD_AFTER_TEST=rm -rf {build_dir}/tests +export PYBUILD_BEFORE_TEST=cp -r pytest.ini {dir}/tests {build_dir} +export PYBUILD_AFTER_TEST=rm -rf {build_dir}/pytest.ini {build_dir}/tests export PYBUILD_TEST_ARGS=--ignore tests/test_bytescollection.py \ --ignore tests/test_collection.py \ --ignore tests/test_data_paths.py \ @@ -35,30 +32,26 @@ --ignore tests/test_fio_ls.py \ --ignore tests/test_fio_rm.py \ --ignore tests/test_geopackage.py \ + --ignore tests/test_http_session.py \ --ignore tests/test_layer.py \ --ignore tests/test_listing.py \ --ignore tests/test_profile.py \ --ignore tests/test_unicode.py \ - --ignore tests/test_vfs.py + --ignore tests/test_vfs.py \ + --ignore tests/test_write.py %: - dh $@ --with python3,sphinxdoc --buildsystem pybuild - -override_dh_clean: - dh_clean - rm -rf fiona/*.so gdal-config.txt fiona/*.c VERSION.txt fiona/*.cpp Fiona.egg-info/ - rm -rf doc-build + dh $@ --buildsystem pybuild override_dh_auto_test: # Ignore test failures on problematic architectures only -ifneq (,$(filter $(DEB_BUILD_ARCH),hurd-i386 kfreebsd-amd64 kfreebsd-i386 ppc64)) +ifneq (,$(filter $(DEB_BUILD_ARCH),amd64 hurd-i386 kfreebsd-amd64 kfreebsd-i386 ppc64)) dh_auto_test || echo "Ignoring test failures" else dh_auto_test endif -override_dh_install: - dh_install +execute_after_dh_install: rm -rf debian/python3-fiona/usr/bin debian/fiona.1: diff -Nru fiona-1.8.22/docs/cli.rst fiona-1.9.5/docs/cli.rst --- fiona-1.8.22/docs/cli.rst 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/docs/cli.rst 2023-10-11 23:19:44.000000000 +0000 @@ -86,7 +86,7 @@ The cat command concatenates the features of one or more datasets and prints them as a `JSON text sequence -`__ of features. +`__ of features. In other words: GeoJSON feature objects, possibly pretty printed, optionally separated by ASCII RS (\x1e) chars using `--rs`. @@ -104,6 +104,14 @@ New in 1.4.0. +The cat command provides optional methods to filter data, which are +different to the ``fio filter`` tool. +A bounding box ``--bbox w,s,e,n`` tests for a spatial intersection with +the geometries. An attribute filter ``--where TEXT`` can use +an `SQL WHERE clause `__. +If more than one datasets is passed to ``fio cat``, the attributes used +in the WHERE clause must be valid for each dataset. + collect ------- @@ -286,6 +294,9 @@ Would create a geojson file with only those features from `data.shp` where the area was over a given threshold. +Note this tool is different than ``fio cat --where TEXT ...``, which provides +SQL WHERE clause filtering of feature attributes. + rm -- The ``fio rm`` command deletes an entire datasource or a single layer in a diff -Nru fiona-1.8.22/docs/conf.py fiona-1.9.5/docs/conf.py --- fiona-1.8.22/docs/conf.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/docs/conf.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # Fiona documentation build configuration file, created by # sphinx-quickstart on Mon Dec 26 12:16:26 2011. @@ -11,13 +10,12 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import fiona import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) +# sys.path.insert(0, os.path.abspath('..')) # -- General configuration ----------------------------------------------------- @@ -26,7 +24,14 @@ # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc'] +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + 'sphinx.ext.napoleon', + 'sphinx.ext.todo', + 'sphinx_click', +] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -41,17 +46,23 @@ master_doc = 'index' # General information about the project. -project = u'Fiona' -copyright = u'2011, Sean Gillies' +project = 'Fiona' +copyright = '2011, Sean Gillies' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. -# -# The short X.Y version. -version = fiona.__version__ -# The full version, including alpha/beta/rc tags. -release = fiona.__version__ + +try: + release = fiona.__version__ +except: + with open('../fiona/__init__.py') as f: + for line in f: + if line.find("__version__") >= 0: + version = line.split("=")[1].strip() + version = version.strip('"') + version = version.strip("'") + continue # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -93,7 +104,8 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. #html_theme = 'default' -html_theme = 'sphinxdoc' +#html_theme = 'sphinxdoc' +html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -185,8 +197,8 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'Fiona.tex', u'Fiona Documentation', - u'Sean Gillies', 'manual'), + ('index', 'Fiona.tex', 'Fiona Documentation', + 'Sean Gillies', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -215,8 +227,8 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'fiona', u'Fiona Documentation', - [u'Sean Gillies'], 1) + ('index', 'fiona', 'Fiona Documentation', + ['Sean Gillies'], 1) ] # If true, show URL addresses after external links. @@ -229,8 +241,8 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'Fiona', u'Fiona Documentation', - u'Sean Gillies', 'Fiona', 'One line description of project.', + ('index', 'Fiona', 'Fiona Documentation', + 'Sean Gillies', 'Fiona', 'One line description of project.', 'Miscellaneous'), ] @@ -247,10 +259,10 @@ # -- Options for Epub output --------------------------------------------------- # Bibliographic Dublin Core info. -epub_title = u'Fiona' -epub_author = u'Sean Gillies' -epub_publisher = u'Sean Gillies' -epub_copyright = u'2011, Sean Gillies' +epub_title = 'Fiona' +epub_author = 'Sean Gillies' +epub_publisher = 'Sean Gillies' +epub_copyright = '2011, Sean Gillies' # The language of the text. It defaults to the language option # or en if the language is not set. diff -Nru fiona-1.8.22/docs/fiona.rst fiona-1.9.5/docs/fiona.rst --- fiona-1.8.22/docs/fiona.rst 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/docs/fiona.rst 2023-10-11 23:19:44.000000000 +0000 @@ -43,6 +43,14 @@ :undoc-members: :show-inheritance: +fiona.env module +---------------- + +.. automodule:: fiona.env + :members: + :undoc-members: + :show-inheritance: + fiona.errors module ------------------- @@ -59,6 +67,22 @@ :undoc-members: :show-inheritance: +fiona.io module +--------------- + +.. automodule:: fiona.io + :members: + :undoc-members: + :show-inheritance: + +fiona.logutils module +--------------------- + +.. automodule:: fiona.logutils + :members: + :undoc-members: + :show-inheritance: + fiona.ogrext module ------------------- @@ -67,34 +91,34 @@ :undoc-members: :show-inheritance: -fiona.ogrext1 module --------------------- +fiona.path module +----------------- -.. automodule:: fiona.ogrext1 +.. automodule:: fiona.path :members: :undoc-members: :show-inheritance: -fiona.ogrext2 module +fiona.rfc3339 module -------------------- -.. automodule:: fiona.ogrext2 +.. automodule:: fiona.rfc3339 :members: :undoc-members: :show-inheritance: -fiona.rfc3339 module --------------------- +fiona.schema module +------------------- -.. automodule:: fiona.rfc3339 +.. automodule:: fiona.schema :members: :undoc-members: :show-inheritance: -fiona.tool module ------------------ +fiona.session module +-------------------- -.. automodule:: fiona.tool +.. automodule:: fiona.session :members: :undoc-members: :show-inheritance: @@ -107,9 +131,17 @@ :undoc-members: :show-inheritance: +fiona.vfs module +---------------- -Module contents ---------------- +.. automodule:: fiona.vfs + :members: + :undoc-members: + :show-inheritance: + + +fiona module +------------ .. automodule:: fiona :members: diff -Nru fiona-1.8.22/docs/index.rst fiona-1.9.5/docs/index.rst --- fiona-1.8.22/docs/index.rst 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/docs/index.rst 2023-10-11 23:19:44.000000000 +0000 @@ -1,10 +1,43 @@ -Fiona Documentation Contents -============================ +=============================================== +Fiona: access to simple geospatial feature data +=============================================== + +Fiona streams simple feature data to and from GIS formats like GeoPackage and +Shapefile. Simple features are record, or row-like, and have a single geometry +attribute. Fiona can read and write real-world simple feature data using +multi-layered GIS formats, zipped and in-memory virtual file systems, from +files on your hard drive or in cloud storage. This project includes Python +modules and a command line interface (CLI). + +Here's an example of streaming and filtering features from a zipped dataset on +the web and saving them to a new layer in a new Geopackage file. + +.. code-block:: python + + import fiona + + with fiona.open( + "zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip" + ) as src: + profile = src.profile + profile["driver"] = "GPKG" + + with fiona.open("example.gpkg", "w", layer="selection", **profile) as dst: + dst.writerecords(feat in src.filter(bbox=(-107.0, 37.0, -105.0, 39.0))) + +The same result can be achieved on the command line using a combination of +fio-cat and fio-load. + +.. code-block:: console + + fio cat zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip --bbox "-107.0,37.0,-105.0,39.0" \ + | fio load -f GPKG --layer selection example.gpkg .. toctree:: :maxdepth: 2 - README + Project Information + Installation User Manual API Documentation CLI Documentation diff -Nru fiona-1.8.22/docs/install.rst fiona-1.9.5/docs/install.rst --- fiona-1.8.22/docs/install.rst 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/docs/install.rst 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,77 @@ +============ +Installation +============ + +Installation of the Fiona package is complicated by its dependency on libgdal +and other C libraries. There are easy installations paths and an advanced +installation path. + +Easy installation +================= + +Fiona has several `extension modules +`__ which link against +libgdal. This complicates installation. Binary distributions (wheels) +containing libgdal and its own dependencies are available from the Python +Package Index and can be installed using pip. + +.. code-block:: console + + pip install fiona + +These wheels are mainly intended to make installation easy for simple +applications, not so much for production. They are not tested for compatibility +with all other binary wheels, conda packages, or QGIS, and omit many of GDAL's +optional format drivers. If you need, for example, GML support you will need to +build and install Fiona from a source distribution. + +Many users find Anaconda and conda-forge a good way to install Fiona and get +access to more optional format drivers (like GML). + +Fiona 1.9 (coming soon) requires Python 3.7 or higher and GDAL 3.2 or higher. + +Advanced installation +===================== + +Once GDAL and its dependencies are installed on your computer (how to do this +is documented at https://gdal.org) Fiona can be built and installed using +setuptools or pip. If your GDAL installation provides the ``gdal-config`` +program, the process is simpler. + +Without pip: + +.. code-block:: console + + GDAL_CONFIG=/path/to/gdal-config python setup.py install + +With pip (version >= 22.3 is required): + +.. code-block:: console + + python -m pip install --user -U pip + GDAL_CONFIG=/path/to/gdal-config python -m pip install --user . + +These are pretty much equivalent. Pip will use setuptools as the build backend. +If the gdal-config program is on your executable path, then you don't need to +set the environment variable. + +Without gdal-config you will need to configure header and library locations for +the build in another way. One way to do this is to create a setup.cfg file in +the source directory with content like this: + +.. code-block:: ini + + [build_ext] + include_dirs = C:/vcpkg/installed/x64-windows/include + libraries = gdal + library_dirs = C:/vcpkg/installed/x64-windows/lib + +This is the approach taken by Fiona's `wheel-building workflow +`__. +With this file in place you can run either ``python setup.py install`` or ``python +-m pip install --user .``. + +You can also pass those three values on the command line following the +`setuptools documentation +`__. +However, the setup.cfg approach is easier. diff -Nru fiona-1.8.22/docs/manual.rst fiona-1.9.5/docs/manual.rst --- fiona-1.8.22/docs/manual.rst 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/docs/manual.rst 2023-10-11 23:19:44.000000000 +0000 @@ -5,11 +5,13 @@ :Author: Sean Gillies, :Version: |release| :Date: |today| -:Copyright: - This work is licensed under a `Creative Commons Attribution 3.0 - United States License`__. +:Copyright: + This work, with the exception of code examples, is licensed under a `Creative + Commons Attribution 3.0 United States License`__. The code examples are + licensed under the BSD 3-clause license (see LICENSE.txt in the repository + root). -.. __: http://creativecommons.org/licenses/by/3.0/us/ +.. __: https://creativecommons.org/licenses/by/3.0/us/ :Abstract: Fiona is OGR's neat, nimble, no-nonsense API. This document explains how to @@ -35,13 +37,13 @@ and :dfn:`vectors` representing discrete entities like roads and administrative boundaries. Fiona is concerned exclusively with the latter. It is a Python wrapper for vector data access functions from the `GDAL/OGR -`_ library. A very simple wrapper for minimalists. +`_ library. A very simple wrapper for minimalists. It reads data records from files as GeoJSON-like mappings and writes the same kind of mappings as records back to files. That's it. There are no layers, no cursors, no geometric operations, no transformations between coordinate systems, no remote method calls; all these concerns are left to other Python -packages such as :py:mod:`Shapely ` and -:py:mod:`pyproj ` and Python language +packages such as :py:mod:`Shapely ` and +:py:mod:`pyproj ` and Python language protocols. Why? To eliminate unnecessary complication. Fiona aims to be simple to understand and use, with no gotchas. @@ -88,77 +90,74 @@ Example ------- -The first example of using Fiona is this: copying records from one file to -another, adding two attributes and making sure that all polygons are facing -"up". Orientation of polygons is significant in some applications, extruded -polygons in Google Earth for one. No other library (like :py:mod:`Shapely`) is -needed here, which keeps it uncomplicated. There's a :file:`test_uk` file in -the Fiona repository for use in this and other examples. - -.. sourcecode:: python - - import datetime - import logging - import sys - - import fiona - - logging.basicConfig(stream=sys.stderr, level=logging.INFO) - - def signed_area(coords): - """Return the signed area enclosed by a ring using the linear time - algorithm at http://www.cgafaq.info/wiki/Polygon_Area. A value >= 0 - indicates a counter-clockwise oriented ring. - """ - xs, ys = map(list, zip(*coords)) - xs.append(xs[1]) - ys.append(ys[1]) - return sum(xs[i]*(ys[i+1]-ys[i-1]) for i in range(1, len(coords)))/2.0 - - with fiona.open('docs/data/test_uk.shp', 'r') as source: - - # Copy the source schema and add two new properties. - sink_schema = source.schema - sink_schema['properties']['s_area'] = 'float' - sink_schema['properties']['timestamp'] = 'datetime' - - # Create a sink for processed features with the same format and - # coordinate reference system as the source. - with fiona.open( - 'oriented-ccw.shp', 'w', - crs=source.crs, - driver=source.driver, - schema=sink_schema, - ) as sink: - - for f in source: - - try: - - # If any feature's polygon is facing "down" (has rings - # wound clockwise), its rings will be reordered to flip - # it "up". - g = f['geometry'] - assert g['type'] == "Polygon" - rings = g['coordinates'] - sa = sum(signed_area(r) for r in rings) - if sa < 0.0: - rings = [r[::-1] for r in rings] - g['coordinates'] = rings - f['geometry'] = g - - # Add the signed area of the polygon and a timestamp - # to the feature properties map. - f['properties'].update( - s_area=sa, - timestamp=datetime.datetime.now().isoformat() ) - - sink.write(f) - - except Exception as e: - logging.exception("Error processing feature %s:", f['id']) +The first example of using Fiona is this: copying features (another word for +record) from one file to another, adding two attributes and making sure that +all polygons are facing "up". Orientation of polygons is significant in some +applications, extruded polygons in Google Earth for one. No other library (like +:py:mod:`Shapely`) is needed here, which keeps it uncomplicated. There's a +:file:`coutwildrnp.zip` file in the Fiona repository for use in this and other +examples. + +.. code-block:: python + + import datetime + + import fiona + from fiona import Geometry, Feature, Properties - # The sink file is written to disk and closed when its block ends. + + def signed_area(coords): + """Return the signed area enclosed by a ring using the linear time + algorithm at http://www.cgafaq.info/wiki/Polygon_Area. A value >= 0 + indicates a counter-clockwise oriented ring. + """ + xs, ys = map(list, zip(*coords)) + xs.append(xs[1]) + ys.append(ys[1]) + return sum(xs[i] * (ys[i + 1] - ys[i - 1]) for i in range(1, len(coords))) / 2.0 + + + with fiona.open( + "zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip" + ) as src: + + # Copy the source schema and add two new properties. + dst_schema = src.schema + dst_schema["properties"]["signed_area"] = "float" + dst_schema["properties"]["timestamp"] = "datetime" + + # Create a sink for processed features with the same format and + # coordinate reference system as the source. + with fiona.open( + "example.gpkg", + mode="w", + layer="oriented-ccw", + crs=src.crs, + driver="GPKG", + schema=dst_schema, + ) as dst: + for feat in src: + # If any feature's polygon is facing "down" (has rings + # wound clockwise), its rings will be reordered to flip + # it "up". + geom = feat.geometry + assert geom.type == "Polygon" + rings = geom.coordinates + sa = sum(signed_area(ring) for ring in rings) + + if sa < 0.0: + rings = [r[::-1] for r in rings] + geom = Geometry(type=geom.type, coordinates=rings) + + # Add the signed area of the polygon and a timestamp + # to the feature properties map. + props = Properties.from_dict( + **feat.properties, + signed_area=sa, + timestamp=datetime.datetime.now().isoformat() + ) + + dst.write(Feature(geometry=geom, properties=props)) Data Model ========== @@ -199,7 +198,7 @@ Fiona's major design principles. .. admonition:: TL;DR - + Fiona subscribes to the conventional record model of data, but provides GeoJSON-like access to the data via Python file-like and mapping protocols. @@ -210,61 +209,55 @@ :py:func:`~fiona.open` function. It returns an opened :py:class:`~fiona.collection.Collection` object. -.. sourcecode:: pycon - - >>> import fiona - >>> c = fiona.open('docs/data/test_uk.shp', 'r') - >>> c - - >>> c.closed - False - -.. admonition:: API Change +.. code-block:: pycon - :py:func:`fiona.collection` is deprecated, but aliased to - :py:func:`fiona.open` in version 0.9. + >>> import fiona + >>> colxn = fiona.open("zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip", "r") + >>> colxn + + >>> collection.closed + False Mode ``'r'`` is the default and will be omitted in following examples. Fiona's :py:class:`~fiona.collection.Collection` is like a Python :py:class:`file`, but is iterable for records rather than lines. -.. sourcecode:: pycon +.. code-block:: pycon - >>> next(c) - {'geometry': {'type': 'Polygon', 'coordinates': ... - >>> len(list(c)) - 48 + >>> next(iter(colxn)) + {'geometry': {'type': 'Polygon', 'coordinates': ... + >>> len(list(colxn)) + 67 Note that :py:func:`list` iterates over the entire collection, effectively emptying it as with a Python :py:class:`file`. -.. sourcecode:: pycon +.. code-block:: pycon - >>> next(c) - Traceback (most recent call last): - ... - StopIteration - >>> len(list(c)) - 0 + >>> next(iter(colxn)) + Traceback (most recent call last): + ... + StopIteration + >>> len(list(colxn)) + 0 Seeking the beginning of the file is not supported. You must reopen the collection to get back to the beginning. -.. sourcecode:: pycon +.. code-block:: pycon - >>> c = fiona.open('docs/data/test_uk.shp') - >>> len(list(c)) - 48 + >>> colxn = fiona.open("zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip") + >>> len(list(colxn)) + 67 .. admonition:: File Encoding The format drivers will attempt to detect the encoding of your data, but may - fail. In my experience GDAL 1.7.2 (for example) doesn't detect that the - encoding of the Natural Earth dataset is Windows-1252. In this case, the - proper encoding can be specified explicitly by using the ``encoding`` - keyword parameter of :py:func:`fiona.open`: ``encoding='Windows-1252'``. - + fail. In this case, the proper encoding can be specified explicitly by using + the ``encoding`` keyword parameter of :py:func:`fiona.open`, for example: + ``encoding='Windows-1252'``. + New in version 0.9.1. Collection indexing @@ -274,29 +267,15 @@ .. code-block:: pycon - >>> import pprint - >>> with fiona.open('docs/data/test_uk.shp') as src: - ... pprint.pprint(src[1]) - ... - {'geometry': {'coordinates': [[(-4.663611, 51.158333), - (-4.669168, 51.159439), - (-4.673334, 51.161385), - (-4.674445, 51.165276), - (-4.67139, 51.185272), - (-4.669445, 51.193054), - (-4.665556, 51.195), - (-4.65889, 51.195), - (-4.656389, 51.192215), - (-4.646389, 51.164444), - (-4.646945, 51.160828), - (-4.651668, 51.159439), - (-4.663611, 51.158333)]], - 'type': 'Polygon'}, - 'id': '1', - 'properties': OrderedDict([(u'CAT', 232.0), (u'FIPS_CNTRY', u'UK'), (u'CNTRY_NAME', u'United Kingdom'), (u'AREA', 244820.0), (u'POP_CNTRY', 60270708.0)]), - 'type': 'Feature'} + >>> with fiona.open("zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip") as colxn: + ... print(colxn[1]) + ... + -Note that these indices are controlled by GDAL, and do not always follow Python conventions. They can start from 0, 1 (e.g. geopackages), or even other values, and have no guarantee of contiguity. Negative indices will only function correctly if indices start from 0 and are contiguous. +Note that these indices are controlled by GDAL, and do not always follow Python +conventions. They can start from 0, 1 (e.g. geopackages), or even other values, +and have no guarantee of contiguity. Negative indices will only function +correctly if indices start from 0 and are contiguous. New in version 1.1.6 @@ -309,27 +288,27 @@ a :keyword:`with` statement. When a :py:class:`~fiona.collection.Collection` is a context guard, it is closed no matter what happens within the block. -.. sourcecode:: pycon +.. code-block:: pycon - >>> try: - ... with fiona.open('docs/data/test_uk.shp') as c: - ... print(len(list(c))) - ... assert True is False - ... except: - ... print(c.closed) - ... raise - ... - 48 - True - Traceback (most recent call last): + >>> try: + ... with fiona.open("zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip") as colxn: + ... print(len(list(colxn))) + ... assert True is False + ... except Exception: + ... print(colxn.closed) + ... raise ... - AssertionError + 67 + True + Traceback (most recent call last): + ... + AssertionError An exception is raised in the :keyword:`with` block above, but as you can see -from the print statement in the :keyword:`except` clause :py:meth:`c.__exit__` -(and thereby :py:meth:`c.close`) has been called. +from the print statement in the :keyword:`except` clause :py:meth:`colxn.__exit__` +(and thereby :py:meth:`colxn.close`) has been called. -.. important:: Always call :py:meth:`~fiona.collection.Collection.close` or +.. important:: Always call :py:meth:`~fiona.collection.Collection.close` or use :keyword:`with` and you'll never stumble over tied-up external resources, locked files, etc. @@ -342,63 +321,67 @@ :py:attr:`~fiona.collection.Collection.driver` attribute which names the :program:`OGR` :dfn:`format driver` used to open the vector file. -.. sourcecode:: pycon +.. code-block:: pycon - >>> c = fiona.open('docs/data/test_uk.shp') - >>> c.driver - 'ESRI Shapefile' + >>> colxn = fiona.open("zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip") + >>> colxn.driver + 'ESRI Shapefile' The :dfn:`coordinate reference system` (CRS) of the collection's vector data is accessed via a read-only :py:attr:`~fiona.collection.Collection.crs` attribute. -.. sourcecode:: pycon - - >>> c.crs - {'no_defs': True, 'ellps': 'WGS84', 'datum': 'WGS84', 'proj': 'longlat'} +.. code-block:: pycon -The CRS is represented by a mapping of :program:`PROJ.4` parameters. + >>> colxn.crs + CRS.from_epsg(4326) The :py:mod:`fiona.crs` module provides 3 functions to assist with these mappings. :py:func:`~fiona.crs.to_string` converts mappings to PROJ.4 strings: -.. sourcecode:: pycon +.. code-block:: pycon - >>> from fiona.crs import to_string - >>> print(to_string(c.crs)) - +datum=WGS84 +ellps=WGS84 +no_defs +proj=longlat + >>> from fiona.crs import to_string + >>> to_string(colxn.crs) + 'EPSG:4326' :py:func:`~fiona.crs.from_string` does the inverse. -.. sourcecode:: pycon +.. code-block:: pycon - >>> from fiona.crs import from_string - >>> from_string("+datum=WGS84 +ellps=WGS84 +no_defs +proj=longlat") - {'no_defs': True, 'ellps': 'WGS84', 'datum': 'WGS84', 'proj': 'longlat'} + >>> from fiona.crs import from_string + >>> from_string("+datum=WGS84 +ellps=WGS84 +no_defs +proj=longlat") + CRS.from_epsg(4326) :py:func:`~fiona.crs.from_epsg` is a shortcut to CRS mappings from EPSG codes. -.. sourcecode:: pycon +.. code-block:: pycon - >>> from fiona.crs import from_epsg - >>> from_epsg(3857) - {'init': 'epsg:3857', 'no_defs': True} + >>> from fiona.crs import from_epsg + >>> from_epsg(3857) + CRS.from_epsg(3857) + +.. admonition:: No Validation + + Both :py:func:`~fiona.crs.from_epsg` and :py:func:`~fiona.crs.from_string` + simply restructure data, they do not ensure that the resulting mapping is + a pre-defined or otherwise valid CRS in any way. The number of records in the collection's file can be obtained via Python's built in :py:func:`len` function. -.. sourcecode:: pycon +.. code-block:: pycon - >>> len(c) - 48 + >>> len(colxn) + 67 The :dfn:`minimum bounding rectangle` (MBR) or :dfn:`bounds` of the collection's records is obtained via a read-only :py:attr:`~fiona.collection.Collection.bounds` attribute. -.. sourcecode:: pycon +.. code-block:: pycon - >>> c.bounds - (-8.621389, 49.911659, 1.749444, 60.844444) + >>> colxn.bounds + (-113.56424713134766, 37.0689811706543, -104.97087097167969, 41.99627685546875) Finally, the schema of its record type (a vector file has a single type of record, remember) is accessed via a read-only @@ -406,17 +389,22 @@ and 'properties' items. The former is a string and the latter is an ordered dict with items having the same order as the fields in the data file. -.. sourcecode:: pycon +.. code-block:: pycon + + >>> import pprint + >>> pprint.pprint(colxn.schema) + {'geometry': 'Polygon', + 'properties': {'AGBUR': 'str:80', + 'AREA': 'float:24.15', + 'FEATURE1': 'str:80', + 'FEATURE2': 'str:80', + 'NAME': 'str:80', + 'PERIMETER': 'float:24.15', + 'STATE': 'str:80', + 'STATE_FIPS': 'str:80', + 'URL': 'str:101', + 'WILDRNP020': 'int:10'}} - >>> import pprint - >>> pprint.pprint(c.schema) - {'geometry': 'Polygon', - 'properties': {'CAT': 'float:16', - 'FIPS_CNTRY': 'str', - 'CNTRY_NAME': 'str', - 'AREA': 'float:15.2', - 'POP_CNTRY': 'float:15.2'}} - Keeping Schemas Simple ---------------------- @@ -425,43 +413,46 @@ Modulo a record's 'id' key, the keys of a schema mapping are the same as the keys of the collection's record mappings. -.. sourcecode:: pycon +.. code-block:: pycon - >>> rec = next(c) - >>> set(rec.keys()) - set(c.schema.keys()) - {'id'} - >>> set(rec['properties'].keys()) == set(c.schema['properties'].keys()) - True + >>> feat = next(iter(colxn)) + >>> set(feat.keys()) - set(colxn.schema.keys()) + {'id'} + >>> set(feat['properties'].keys()) == set(colxn.schema['properties'].keys()) + True The values of the schema mapping are either additional mappings or field type names like 'Polygon', 'float', and 'str'. The corresponding Python types can be found in a dictionary named :py:attr:`fiona.FIELD_TYPES_MAP`. -.. sourcecode:: pycon +.. code-block:: pycon - >>> pprint.pprint(fiona.FIELD_TYPES_MAP) - {'date': , - 'datetime': , - 'float': , - 'int': , - 'str': , - 'time': } + >>> pprint.pprint(fiona.FIELD_TYPES_MAP) + {'List[str]': typing.List[str], + 'bytes': , + 'date': , + 'datetime': , + 'float': , + 'int': , + 'int32': , + 'int64': , + 'str': , + 'time': } Field Types ----------- In a nutshell, the types and their names are as near to what you'd expect in -Python (or Javascript) as possible. The 'str' vs 'unicode' muddle is a fact of -life in Python < 3.0. Fiona records have Unicode strings, but their field type -name is 'str' (looking forward to Python 3). +Python (or Javascript) as possible. Since Python 3, the 'str' field type +may contain Unicode characters. -.. sourcecode:: pycon +.. code-block:: pycon - >>> type(rec['properties']['CNTRY_NAME']) + >>> type(feat.properties['NAME']) - >>> c.schema['properties']['CNTRY_NAME'] + >>> colxn.schema['properties']['NAME'] 'str' - >>> fiona.FIELD_TYPES_MAP[c.schema['properties']['CNTRY_NAME']] + >>> fiona.FIELD_TYPES_MAP[colxn.schema['properties']['NAME']] String type fields may also indicate their maximum width. A value of 'str:25' @@ -472,7 +463,7 @@ Fiona provides a function to get the width of a property. -.. sourcecode:: pycon +.. code-block:: pycon >>> from fiona import prop_width >>> prop_width('str:25') @@ -482,7 +473,7 @@ Another function gets the proper Python type of a property. -.. sourcecode:: pycon +.. code-block:: pycon >>> from fiona import prop_type >>> prop_type('int') @@ -492,14 +483,6 @@ >>> prop_type('str:25') -The example above is for Python 3. With Python 2, the type of 'str' properties -is 'unicode'. - -.. sourcecode:: pycon - - >>> prop_type('str:25') - - Geometry Types -------------- @@ -531,120 +514,83 @@ that indicates 'Polygon' in its schema may yield either 'Polygon' or 'MultiPolygon' features. -Records -======= +Features +======== -A record you get from a collection is a Python :py:class:`dict` structured -exactly like a GeoJSON Feature. Fiona records are self-describing; the names of -its fields are contained within the data structure and the values in the fields -are typed properly for the type of record. Numeric field values are instances -of type :py:class:`int` and :py:class:`float`, for example, not strings. - -.. sourcecode:: pycon - - >>> pprint.pprint(rec) - {'geometry': {'coordinates': [[(-4.663611, 51.158333), - (-4.669168, 51.159439), - (-4.673334, 51.161385), - (-4.674445, 51.165276), - (-4.67139, 51.185272), - (-4.669445, 51.193054), - (-4.665556, 51.195), - (-4.65889, 51.195), - (-4.656389, 51.192215), - (-4.646389, 51.164444), - (-4.646945, 51.160828), - (-4.651668, 51.159439), - (-4.663611, 51.158333)]], - 'type': 'Polygon'}, - 'id': '1', - 'properties': {'CAT': 232.0, - 'FIPS_CNTRY': 'UK', - 'CNTRY_NAME': 'United Kingdom', - 'AREA': 244820.0, - 'POP_CNTRY': 60270708.0}} +A record you get from a collection is structured like a GeoJSON Feature. Fiona +records are self-describing; the names of its fields are contained within the +data structure and the values in the fields are typed properly for the type of +record. Numeric field values are instances of type :py:class:`int` and +:py:class:`float`, for example, not strings. The record data has no references to the :py:class:`~fiona.collection.Collection` from which it originates or to any other external resource. It's entirely independent and safe to use in any way. Closing the collection does not affect the record at all. -.. sourcecode:: pycon +.. admonition:: Features are mappings, not dicts + + In Fiona versions before 1.9.0 features were Python dicts, mutable and JSON + serializable. Since 1.9.0 features are mappings and not immediately JSON + serializable. - >>> c.close() - >>> rec['id'] - '1' + Instances of Feature can be converted to dicts with + :py:func:`fiona.model.to_dict` or serialized using the json module and + :py:class:`fiona.model.ObjectEncoder`. -Record Id ---------- +Feature Id +---------- -A record has an ``id`` key. As in the GeoJSON specification, its corresponding -value is a string unique within the data file. +A feature has an ``id`` attribute. As in the GeoJSON specification, its +corresponding value is a string unique within the data file. -.. sourcecode:: pycon +.. code-block:: pycon - >>> c = fiona.open('docs/data/test_uk.shp') - >>> rec = next(c) - >>> rec['id'] - '0' + >>> colxn = fiona.open("zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip") + >>> feat = next(iter(colxn)) + >>> feat.id + '0' .. admonition:: OGR Details In the :program:`OGR` model, feature ids are long integers. Fiona record ids are therefore usually string representations of integer record indexes. -Record Properties ------------------ +Feature Properties +------------------ + +A feature has a ``properties`` attribute. Its value is a mapping. The keys of +the properties mapping are the same as the keys of the properties mapping in +the schema of the collection the record comes from (see above). + +.. code-block:: pycon + + >>> for k, v in feat.properties.items(): + ... print(k, v) + ... + PERIMETER 1.22107 + FEATURE2 None + NAME Mount Naomi Wilderness + FEATURE1 Wilderness + URL http://www.wilderness.net/index.cfm?fuse=NWPS&sec=wildView&wname=Mount%20Naomi + AGBUR FS + AREA 0.0179264 + STATE_FIPS 49 + WILDRNP020 332 + STATE UT + +Feature Geometry +---------------- + +A feature has a ``geometry`` attribute. Its value is a mapping with ``type`` +and ``coordinates`` keys. -A record has a ``properties`` key. Its corresponding value is a mapping: an -ordered dict to be precise. The keys of the properties mapping are the same as -the keys of the properties mapping in the schema of the collection the record -comes from (see above). - -.. sourcecode:: pycon - - >>> pprint.pprint(rec['properties']) - {'CAT': 232.0, - 'FIPS_CNTRY': 'UK', - 'CNTRY_NAME': 'United Kingdom', - 'AREA': 244820.0, - 'POP_CNTRY': 60270708.0} - -Record Geometry ---------------- - -A record has a ``geometry`` key. Its corresponding value is a mapping with -``type`` and ``coordinates`` keys. - -.. sourcecode:: pycon - - >>> pprint.pprint(rec['geometry']) - {'coordinates': [[(0.899167, 51.357216), - (0.885278, 51.35833), - (0.7875, 51.369438), - (0.781111, 51.370552), - (0.766111, 51.375832), - (0.759444, 51.380829), - (0.745278, 51.39444), - (0.740833, 51.400276), - (0.735, 51.408333), - (0.740556, 51.429718), - (0.748889, 51.443604), - (0.760278, 51.444717), - (0.791111, 51.439995), - (0.892222, 51.421387), - (0.904167, 51.418884), - (0.908889, 51.416939), - (0.930555, 51.398888), - (0.936667, 51.393608), - (0.943889, 51.384995), - (0.9475, 51.378609), - (0.947778, 51.374718), - (0.946944, 51.371109), - (0.9425, 51.369164), - (0.904722, 51.358055), - (0.899167, 51.357216)]], - 'type': 'Polygon'} +.. code-block:: pycon + + >>> feat.geometry["type"] + 'Polygon' + >>> feat.geometry["coordinates"] + [[(-111.73527526855469, 41.995094299316406), ..., (-111.73527526855469, 41.995094299316406)]] Since the coordinates are just tuples, or lists of tuples, or lists of lists of tuples, the ``type`` tells you how to interpret them. @@ -683,9 +629,9 @@ above are representations of geometric objects made up of :dfn:`point sets`. The following -.. sourcecode:: python +.. code-block:: python - {'type': 'LineString', 'coordinates': [(0.0, 0.0), (0.0, 1.0)]} + {"type": "LineString", "coordinates": [(0.0, 0.0), (0.0, 1.0)]} represents not just two points, but the set of infinitely many points along the line of length 1.0 from ``(0.0, 0.0)`` to ``(0.0, 1.0)``. In the application of @@ -694,7 +640,7 @@ equal in the Python sense or not. If you have Shapely (which implements Simple Features Access) installed, you can see this in by verifying the following. -.. sourcecode:: pycon +.. code-block:: pycon >>> from shapely.geometry import shape >>> l1 = shape( @@ -722,7 +668,7 @@ ``'w'`` (write). .. admonition:: Note - + The in situ "update" mode of :program:`OGR` is quite format dependent and is therefore not supported by Fiona. @@ -730,75 +676,74 @@ -------------------------------- Let's start with the simplest if not most common use case, adding new records -to an existing file. The file is copied before modification and a suitable -record extracted in the example below. +to an existing file. -.. sourcecode:: pycon +.. code-block:: console - >>> with fiona.open('docs/data/test_uk.shp') as c: - ... rec = next(c) - >>> rec['id'] = '-1' - >>> rec['properties']['CNTRY_NAME'] = 'Gondor' - >>> import os - >>> os.system("cp docs/data/test_uk.* /tmp") - 0 + $ wget https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip + $ unzip coutwildrnp.zip The coordinate reference system. format, and schema of the file are already defined, so it's opened with just two arguments as for reading, but in ``'a'`` mode. The new record is written to the end of the file using the :py:meth:`~fiona.collection.Collection.write` method. Accordingly, the length -of the file grows from 48 to 49. +of the file grows from 67 to 68. -.. sourcecode:: pycon +.. code-block:: pycon - >>> with fiona.open('/tmp/test_uk.shp', 'a') as c: - ... print(len(c)) - ... c.write(rec) - ... print(len(c)) - ... - 48 - 49 + >>> with fiona.open("coutwildrnp.shp", "a") as dst: + ... print(len(dst)) + ... with fiona.open("zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip") as src: + ... feat = src[0] + ... print(feat.id, feat.properties["NAME"]) + ... dst.write(feat) + ... print(len(c)) + ... + 67 + ('0', 'Mount Naomi Wilderness') + 68 -The record you write must match the file's schema (because a file contains one +The feature you write must match the file's schema (because a file contains one type of record, remember). You'll get a :py:class:`ValueError` if it doesn't. -.. sourcecode:: pycon +.. code-block:: pycon - >>> with fiona.open('/tmp/test_uk.shp', 'a') as c: - ... c.write({'properties': {'foo': 'bar'}}) - ... - Traceback (most recent call last): + >>> with fiona.open("coutwildrnp.shp", "a") as dst: + ... dst.write({'properties': {'foo': 'bar'}}) ... - ValueError: Record data not match collection schema + Traceback (most recent call last): + ... + ValueError: Record data not match collection schema Now, what about record ids? The id of a record written to a file is ignored and replaced by the next value appropriate for the file. If you read the file just appended to above, -.. sourcecode:: pycon +.. code-block:: pycon - >>> with fiona.open('/tmp/test_uk.shp', 'a') as c: - ... records = list(c) - >>> records[-1]['id'] - '48' - >>> records[-1]['properties']['CNTRY_NAME'] - 'Gondor' + >>> with fiona.open("coutwildrnp.shp") as colxn: + ... feat = colxn[-1] + ... + >>> feat.id + '67' + >>> feat.properties["NAME"] + 'Mount Naomi Wilderness' -You'll see that the id of ``'-1'`` which the record had when written is -replaced by ``'48'``. +You'll see that the id of ``'0'`` which the record had when written is replaced +by ``'67'``. The :py:meth:`~fiona.collection.Collection.write` method writes a single record to the collection's file. Its sibling :py:meth:`~fiona.collection.Collection.writerecords` writes a sequence (or iterator) of records. -.. sourcecode:: pycon +.. code-block:: pycon - >>> with fiona.open('/tmp/test_uk.shp', 'a') as c: - ... c.writerecords([rec, rec, rec]) - ... print(len(c)) - ... - 52 + >>> with fiona.open("coutwildrnp.shp", "a") as colxn: + ... colxn.writerecords([feat, feat, feat]) + ... print(len(colxn)) + ... + 71 .. admonition:: Duplication @@ -806,6 +751,20 @@ write 3 duplicate records to the file, and they will be given unique sequential ids. +.. admonition:: Transactions + + Fiona uses transactions during write operations to ensure data integrity. + :py:meth:`writerecords` will start and commit one transaction. If there + are lots of records, intermediate commits will be performed at reasonable + intervals. + + Depending on the driver, a transaction can be a very costly operation. + Since :py:meth:`write` is just a thin convenience wrapper that calls + :py:meth:`writerecords` with a single record, you may experience significant + performance issue if you write lots of features one by one using this method. + Consider preparing your data first and then writing it in a single call to + :py:meth:`writerecords`. + .. admonition:: Buffering Fiona's output is buffered. The records passed to :py:meth:`write` and @@ -813,170 +772,150 @@ You may also call :py:meth:`flush` periodically to write the buffer contents to disk. +.. admonition:: Format requirements + + Format drivers may have specific requirements about what they store. For + example, the Shapefile driver may "fix" topologically invalid features. + Creating files of the same structure ------------------------------------ Writing a new file is more complex than appending to an existing file because the file CRS, format, and schema have not yet been defined and must be done so by the programmer. Still, it's not very complicated. A schema is just -a mapping, as described above. A CRS is also just a mapping, and the possible +a mapping, as described above. The possible formats are enumerated in the :py:attr:`fiona.supported_drivers` dictionary. Review the parameters of our demo file. -.. sourcecode:: pycon +.. code-block:: pycon - >>> with fiona.open('docs/data/test_uk.shp') as source: - ... source_driver = source.driver - ... source_crs = source.crs - ... source_schema = source.schema - ... - >>> source_driver - 'ESRI Shapefile' - >>> source_crs - {'no_defs': True, 'ellps': 'WGS84', 'datum': 'WGS84', 'proj': 'longlat'} - >>> pprint.pprint(source_schema) - {'geometry': 'Polygon', - 'properties': {'CAT': 'float:16', - 'FIPS_CNTRY': 'str', - 'CNTRY_NAME': 'str', - 'AREA': 'float:15.2', - 'POP_CNTRY': 'float:15.2'}} + >>> with fiona.open( + ... "zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip" + ) as src: + ... driver = src.driver + ... crs = src.crs + ... schema = src.schema + ... feat = src[1] + ... + >>> driver + 'ESRI Shapefile' + >>> crs + CRS.from_epsg(4326) + >>> pprint.pprint(schema) + {'geometry': 'Polygon', + 'properties': {'AGBUR': 'str:80', + 'AREA': 'float:24.15', + 'FEATURE1': 'str:80', + 'FEATURE2': 'str:80', + 'NAME': 'str:80', + 'PERIMETER': 'float:24.15', + 'STATE': 'str:80', + 'STATE_FIPS': 'str:80', + 'URL': 'str:101', + 'WILDRNP020': 'int:10'}} We can create a new file using them. -.. sourcecode:: pycon +.. code-block:: pycon - >>> with fiona.open( - ... '/tmp/foo.shp', - ... 'w', - ... driver=source_driver, - ... crs=source_crs, - ... schema=source_schema) as c: - ... print(len(c)) - ... c.write(rec) - ... print(len(c)) - ... - 0 - 1 - >>> c.closed - True - >>> len(c) - 1 + >>> with fiona.open("example.shp", "w", driver=driver, crs=crs, schema=schema) as dst: + ... print(len(dst)) + ... dst.write(feat) + ... print(len(dst)) + ... + 0 + 1 + >>> dst.closed + True + >>> len(dst) + 1 Because the properties of the source schema are ordered and are passed in the same order to the write-mode collection, the written file's fields have the same order as those of the source file. -.. sourcecode:: console - - $ ogrinfo /tmp/foo.shp foo -so - INFO: Open of `/tmp/foo.shp' - using driver `ESRI Shapefile' successful. - - Layer name: foo - Geometry: 3D Polygon - Feature Count: 1 - Extent: (0.735000, 51.357216) - (0.947778, 51.444717) - Layer SRS WKT: - GEOGCS["GCS_WGS_1984", - DATUM["WGS_1984", - SPHEROID["WGS_84",6378137,298.257223563]], - PRIMEM["Greenwich",0], - UNIT["Degree",0.017453292519943295]] - CAT: Real (16.0) - FIPS_CNTRY: String (80.0) - CNTRY_NAME: String (80.0) - AREA: Real (15.2) - POP_CNTRY: Real (15.2) - -The :py:attr:`~fiona.collection.Collection.meta` attribute makes duplication of +The :py:attr:`~fiona.collection.Collection.profile` attribute makes duplication of a file's meta properties even easier. -.. sourcecode:: pycon +.. code-block:: pycon - >>> source = fiona.open('docs/data/test_uk.shp') - >>> sink = fiona.open('/tmp/foo.shp', 'w', **source.meta) + >>> src = fiona.open("zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip") + >>> dst = fiona.open("example.shp", "w", **src.profile) Writing new files from scratch ------------------------------- -To write a new file from scratch we have to define our own specific driver, crs and schema. +To write a new file from scratch we have to define our own specific driver, crs +and schema. -To ensure the order of the attribute fields is predictable, in both the schema and the actual manifestation as feature attributes, we will use ordered dictionaries. +To ensure the order of the attribute fields is predictable, in both the schema +and the actual manifestation as feature attributes, we will use ordered +dictionaries. -.. sourcecode:: pycon +.. code-block:: pycon - >>> from collections import OrderedDict +Consider the following record, structured in accordance to the `Python geo +protocol `__, representing the Eiffel +Tower using a point geometry with UTM coordinates in zone 31N. -Consider the following record, structured in accordance to the `Python geo protocol `__, representing the Eiffel Tower using a point geometry with UTM coordinates in zone 31N. - -.. sourcecode:: pycon - - >>> eiffel_tower = { - ... 'geometry': { - ... 'type': 'Point', - ... 'coordinates': (448252, 5411935) - ... }, - ... 'properties': OrderedDict([ - ... ('name', 'Eiffel Tower'), - ... ('height', 300.01), - ... ('view', 'scenic'), - ... ('year', 1889) - ... ]) - ... } +.. code-block:: pycon + + >>> eiffel_tower = { + ... 'geometry': { + ... 'type': 'Point', + ... 'coordinates': (448252, 5411935) + ... }, + ... 'properties': dict([ + ... ('name', 'Eiffel Tower'), + ... ('height', 300.01), + ... ('view', 'scenic'), + ... ('year', 1889) + ... ]) + ... } A corresponding scheme could be: -.. sourcecode:: pycon +.. code-block:: pycon - >>> landmarks_schema = { - ... 'geometry': 'Point', - ... 'properties': OrderedDict([ - ... ('name', 'str'), - ... ('height', 'float'), - ... ('view', 'str'), - ... ('year', 'int') - ... ]) - ... } + >>> landmarks_schema = { + ... 'geometry': 'Point', + ... 'properties': dict([ + ... ('name', 'str'), + ... ('height', 'float'), + ... ('view', 'str'), + ... ('year', 'int') + ... ]) + ... } -The coordinate reference system of these landmark coordinates is ETRS89 / UTM zone 31N which is referenced in the EPSG database as EPSG:25831. +The coordinate reference system of these landmark coordinates is ETRS89 / UTM +zone 31N which is referenced in the EPSG database as EPSG:25831. -.. sourcecode:: pycon +.. code-block:: pycon - >>> from fiona.crs import from_epsg - >>> landmarks_crs = from_epsg(25831) + >>> from fiona.crs import CRS + >>> landmarks_crs = CRS.from_epsg(25831) An appropriate driver could be: -.. sourcecode:: pycon +.. code-block:: pycon - >>> output_driver = "GeoJSON" + >>> driver = "GeoJSON" -Having specified schema, crs and driver, we are ready to open a file for writing our record: +Having specified schema, crs and driver, we are ready to open a file for +writing our record: -.. sourcecode:: pycon +.. code-block:: pycon - >>> with fiona.open( - ... '/tmp/foo.geojson', - ... 'w', - ... driver=output_driver, - ... crs=landmarks_crs, - ... schema=landmarks_schema) as c: - ... c.write(eiffel_tower) - ... - - >>> import pprint - >>> with fiona.open('/tmp/foo.geojson') as source: - ... for record in source: - ... pprint.pprint(record) - {'geometry': {'coordinates': (448252.0, 5411935.0), 'type': 'Point'}, - 'id': '0', - 'properties': OrderedDict([('name', 'Eiffel Tower'), - ('height', 300.01), - ('view', 'scenic'), - ('year', 1889)]), - 'type': 'Feature'} + >>> with fiona.open( + ... "landmarks.geojson", + ... "w", + ... driver="GeoJSON", + ... crs=CRS.from_epsg(25831), + ... schema=landmarks_schema + ... ) as colxn: + ... colxn.write(eiffel_tower) + ... Ordering Record Fields ...................... @@ -987,49 +926,46 @@ dict is given, the ordering is determined by the output of that dict's :py:func:`~items` method. -For example, since - -.. sourcecode:: pycon - - >>> {'bar': 'int', 'foo': 'str'}.keys() - ['foo', 'bar'] - -a schema of ``{'properties': {'bar': 'int', 'foo': 'str'}}`` will produce -a shapefile where the first field is 'foo' and the second field is 'bar'. If -you want 'bar' to be the first field, you must use a list of property items - -.. sourcecode:: pycon - - c = fiona.open( - '/tmp/file.shp', - 'w', - schema={'properties': [('bar', 'int'), ('foo', 'str')], ...}, - ... ) - -or an ordered dict. - -.. sourcecode:: pycon - - from collections import OrderedDict - - schema_props = OrderedDict([('bar', 'int'), ('foo', 'str')]) - - c = fiona.open( - '/tmp/file.shp', - 'w', - schema={'properties': schema_props, ...}, - ... ) - - -Coordinates and Geometry Types ------------------------------- +3D Coordinates and Geometry Types +--------------------------------- If you write 3D coordinates, ones having (x, y, z) tuples, to a 2D file ('Point' schema geometry, for example) the z values will be lost. +.. code-block:: python + + >>> feat = {"geometry": {"type": "Point", "coordinates": (-1, 1, 5)}} + >>> with fiona.open( + ... "example.shp", + ... "w", + ... driver="Shapefile", + ... schema={"geometry": "Point", "properties": {}} + ... ) as dst: + ... dst.write(feat) + ... + >>> with fiona.open("example.shp") as src: + ... print(src[0].geometry.coordinates) + ... + (-1.0, 1.0) + If you write 2D coordinates, ones having only (x, y) tuples, to a 3D file ('3D Point' schema geometry, for example) a default z value of 0 will be provided. +.. code-block:: python + + >>> feat = {"geometry": {"type": "Point", "coordinates": (-1, 1)}} + >>> with fiona.open( + ... "example.shp", + ... "w", + ... driver="Shapefile", + ... schema={"geometry": "3D Point", "properties": {}} + ... ) as dst: + ... dst.write(feat) + ... + >>> with fiona.open("example.shp") as src: + ... print(src[0].geometry.coordinates) + ... + (-1.0, 1.0, 0.0) Advanced Topics =============== @@ -1038,24 +974,26 @@ ------------------------- GDAL/OGR has a large number of features that are controlled by global or -thread-local configuration options. Fiona allows you to configure these options -using a context manager, ``fiona.Env``. This class's constructor takes GDAL/OGR -configuration options as keyword arguments. To see debugging information from -GDAL/OGR, for example, you may do the following. +thread-local `configuration options. `_ +Fiona allows you to configure these options using a context manager, ``fiona.Env``. +This class's constructor takes GDAL/OGR configuration options as keyword arguments. +To see debugging information from GDAL/OGR, for example, you may do the following. -.. sourcecode:: python +.. code-block:: python import logging - import fiona - logging.basicConfig(level=logging.DEBUG) with fiona.Env(CPL_DEBUG=True): - fiona.open('tests/data/coutwildrnp.shp') + fiona.open( + "zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip" + ) + +The following extra messages will appear in the Python logger's output. -The following extra messages will appear in the Python logger's output.:: +.. code-block:: DEBUG:fiona._env:CPLE_None in GNM: GNMRegisterAllInternal DEBUG:fiona._env:CPLE_None in GNM: RegisterGNMFile @@ -1068,8 +1006,28 @@ If you call ``fiona.open()`` with no surrounding ``Env`` environment, one will be created for you. -When your program exits the environent's with block the configuration reverts -to its previous state. +When your program exits the environment's ``with`` block the configuration reverts +to its previous state. + +Driver configuration options +---------------------------- + +Drivers can have dataset open, dataset creation, respectively layer creation +options. These options can be found on the drivers page on `GDAL's homepage. +`_ or using the ``fiona.meta`` +module: + +.. code-block:: pycon + + >>> import fiona.meta + >>> fiona.meta.print_driver_options("GeoJSON") + +These options can be passed to ``fiona.open``: + +.. code-block:: python + + import fiona + fiona.open('tests/data/coutwildrnp.json', ARRAY_AS_STRING="YES") Cloud storage credentials ------------------------- @@ -1077,18 +1035,18 @@ One of the most important uses of ``fiona.Env`` is to set credentials for accessing data stored in AWS S3 or another cloud storage system. -.. sourcecode:: python +.. code-block:: python - from fiona.session import AWSSession - import fiona + import fiona + from fiona.session import AWSSession - with fiona.Env( - session=AWSession( - aws_access_key_id="key", - aws_secret_access_key="secret", - ) - ): - fiona.open("zip+s3://example-bucket/example.zip") + with fiona.Env( + session=AWSSession( + aws_access_key_id="key", + aws_secret_access_key="secret" + ) + ): + fiona.open("zip+s3://example-bucket/example.zip") The AWSSession class is currently the only credential session manager in Fiona. The source code has an example of how classes for other cloud storage providers @@ -1099,7 +1057,7 @@ S3 object, a session will be created for you using code equivalent to the following code. -.. sourcecode:: python +.. code-block:: python import boto3 @@ -1107,7 +1065,7 @@ import fiona with fiona.Env(session=AWSSession(boto3.Session())): - fiona.open('zip+s3://fiona-testing/coutwildrnp.zip') + fiona.open("zip+s3://fiona-testing/coutwildrnp.zip") Slicing and masking iterators ----------------------------- @@ -1116,35 +1074,37 @@ allowing efficient bounding box searches. A collection's :py:meth:`~fiona.collection.Collection.items` method returns an iterator over pairs of FIDs and records that intersect a given ``(minx, miny, maxx, maxy)`` -bounding box or geometry object. Spatial filtering may be inaccurate and returning -all features overlapping the envelope of the geometry. The +bounding box or geometry object. Spatial filtering may be inaccurate and +returning all features overlapping the envelope of the geometry. The collection's own coordinate reference system (see below) is used to interpret -the box's values. If you want a list of the iterator's items, pass it to Python's -builtin :py:func:`list` as shown below. +the box's values. If you want a list of the iterator's items, pass it to +Python's builtin :py:func:`list` as shown below. -.. sourcecode:: pycon +.. code-block:: pycon - >>> c = fiona.open('docs/data/test_uk.shp') - >>> hits = list(c.items(bbox=(-5.0, 55.0, 0.0, 60.0))) - >>> len(hits) - 7 + >>> colxn = fiona.open( + ... "zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip" + ... ) + >>> hits = list(colxn.items(bbox=(-110.0, 36.0, -108.0, 38.0))) + >>> len(hits) + 5 The iterator method takes the same ``stop`` or ``start, stop[, step]`` -slicing arguments as :py:func:`itertools.islice`. +slicing arguments as :py:func:`itertools.islice`. To get just the first two items from that iterator, pass a stop index. -.. sourcecode:: pycon +.. code-block:: pycon - >>> hits = c.items(2, bbox=(-5.0, 55.0, 0.0, 60.0)) + >>> hits = colxn.items(2, bbox=(-110.0, 36.0, -108.0, 38.0))) >>> len(list(hits)) 2 To get the third through fifth items from that iterator, pass start and stop indexes. -.. sourcecode:: pycon +.. code-block:: pycon - >>> hits = c.items(2, 5, bbox=(-5.0, 55.0, 0.0, 60.0)) + >>> hits = colxn.items(2, 5, bbox=(-110.0, 36.0, -108.0, 38.0))) >>> len(list(hits)) 3 @@ -1152,15 +1112,17 @@ :keyword:`lambda` or your own filter function that takes a single feature record and returns ``True`` or ``False``. -.. sourcecode:: pycon +.. code-block:: pycon - >>> def pass_positive_area(rec): - ... return rec['properties'].get('AREA', 0.0) > 0.0 - ... - >>> c = fiona.open('docs/data/test_uk.shp') - >>> hits = filter(pass_positive_area, c) - >>> len(list(hits)) - 48 + >>> def pass_positive_area(rec): + ... return rec['properties'].get('AREA', 0.0) > 0.0 + ... + >>> colxn = fiona.open( + ... "zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip" + ... ) + >>> hits = filter(pass_positive_area, colxn) + >>> len(list(hits)) + 67 Reading Multilayer data ----------------------- @@ -1168,86 +1130,86 @@ Up to this point, only simple datasets with one thematic layer or feature type per file have been shown and the venerable Esri Shapefile has been the primary example. Other GIS data formats can encode multiple layers or feature types -within a single file or directory. Esri's `File Geodatabase -`__ is one example of such a format. -A more useful example, for the purpose of this manual, is a directory -comprising multiple shapefiles. The following three shell commands will create -just such a two layered data source from the test data distributed with Fiona. - -.. sourcecode:: console - - $ mkdir /tmp/data - $ ogr2ogr /tmp/data/ docs/data/test_uk.shp test_uk -nln foo - $ ogr2ogr /tmp/data/ docs/data/test_uk.shp test_uk -nln bar +within a single file or directory. GeoPackage is one example of such a format. +A more useful example, for the purpose of this manual, is a directory or +zipfile comprising multiple shapefiles. The GitHub-hosted zipfile we've been +using in these examples is, in fact, such a multilayer dataset. -The layers of a data source can be listed using :py:func:`fiona.listlayers`. In +The layers of a dataset can be listed using :py:func:`fiona.listlayers`. In the shapefile format case, layer names match base names of the files. -.. sourcecode:: pycon +.. code-block:: pycon - >>> fiona.listlayers('/tmp/data') - ['bar', 'foo'] + >>> fiona.listlayers( + ... "zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip" + ... ) + ['coutwildrnp'] Unlike OGR, Fiona has no classes representing layers or data sources. To access the features of a layer, open a collection using the path to the data source and specify the layer by name using the `layer` keyword. -.. sourcecode:: pycon +.. code-block:: pycon + + dataset_path = "zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip" + >>> for name in fiona.listlayers(dataset_path): + ... with fiona.open(dataset_path, layer=name) as colxn: + ... pprint.pprint(colxn.schema) + ... + {'geometry': 'Polygon', + 'properties': {'AGBUR': 'str:80', + 'AREA': 'float:24.15', + 'FEATURE1': 'str:80', + 'FEATURE2': 'str:80', + 'NAME': 'str:80', + 'PERIMETER': 'float:24.15', + 'STATE': 'str:80', + 'STATE_FIPS': 'str:80', + 'URL': 'str:101', + 'WILDRNP020': 'int:10'}} + +Layers may also be specified by their numerical index. - >>> import pprint - >>> datasrc_path = '/tmp/data' - >>> for name in fiona.listlayers(datasrc_path): - ... with fiona.open(datasrc_path, layer=name) as c: - ... pprint.pprint(c.schema) - ... - {'geometry': 'Polygon', - 'properties': {'CAT': 'float:16', - 'FIPS_CNTRY': 'str', - 'CNTRY_NAME': 'str', - 'AREA': 'float:15.2', - 'POP_CNTRY': 'float:15.2'}} - {'geometry': 'Polygon', - 'properties': {'CAT': 'float:16', - 'FIPS_CNTRY': 'str', - 'CNTRY_NAME': 'str', - 'AREA': 'float:15.2', - 'POP_CNTRY': 'float:15.2'}} - -Layers may also be specified by their index. - -.. sourcecode:: pycon - - >>> for i, name in enumerate(fiona.listlayers(datasrc_path)): - ... with fiona.open(datasrc_path, layer=i) as c: - ... print(len(c)) - ... - 48 - 48 +.. code-block:: pycon + + >>> for index, name in enumerate(fiona.listlayers(dataset_path)): + ... with fiona.open(dataset_path, layer=index) as colxn: + ... print(len(colxn)) + ... + 67 If no layer is specified, :py:func:`fiona.open` returns an open collection using the first layer. -.. sourcecode:: pycon +.. code-block:: pycon + + >>> with fiona.open(dataset_path) as colxn: + ... colxn.name == fiona.listlayers(datasset_path)[0] + ... + True - >>> with fiona.open(datasrc_path) as c: - ... c.name == fiona.listlayers(datasrc_path)[0] - ... - True +We've been relying on this implicit behavior throughout the manual. The most general way to open a shapefile for reading, using all of the parameters of :py:func:`fiona.open`, is to treat it as a data source with a named layer. -.. sourcecode:: pycon +.. code-block:: pycon - >>> fiona.open('docs/data/test_uk.shp', 'r', layer='test_uk') + >>> fiona.open( + ... "zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip", + ... mode="r", + ... layer="coutwildrnp" + ... ) In practice, it is fine to rely on the implicit first layer and default ``'r'`` mode and open a shapefile like this: -.. sourcecode:: pycon +.. code-block:: pycon - >>> fiona.open('docs/data/test_uk.shp') + >>> fiona.open( + ... "zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip", + ... ) Writing Multilayer data ----------------------- @@ -1255,129 +1217,87 @@ To write an entirely new layer to a multilayer data source, simply provide a unique name to the `layer` keyword argument. -.. sourcecode:: pycon +.. code-block:: pycon - >>> 'wah' not in fiona.listlayers(datasrc_path) - True - >>> with fiona.open(datasrc_path, layer='bar') as c: - ... with fiona.open(datasrc_path, 'w', layer='wah', **c.meta) as d: - ... d.write(next(c)) - ... - >>> fiona.listlayers(datasrc_path) - ['bar', 'foo', 'wah'] + >>> with fiona.open( + ... "zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip", + ... ) as src: + ... with fiona.open("example.gpkg", "w", layer="example one", **src.profile) as dst: + ... dst.writerecords(src) + ... + >>> fiona.listlayers("example.gpkg") + ['example one'] In ``'w'`` mode, existing layers will be overwritten if specified, just as normal files are overwritten by Python's :py:func:`open` function. -.. sourcecode:: pycon +Unsupported drivers +------------------- - >>> 'wah' in fiona.listlayers(datasrc_path) - True - >>> with fiona.open(datasrc_path, layer='bar') as c: - ... with fiona.open(datasrc_path, 'w', layer='wah', **c.meta) as d: - ... # Overwrites the existing layer named 'wah'! +Fiona maintains a list of OGR drivers in :py:attr:`fiona.supported_drivers` +that are tested and known to work together with Fiona. Opening a dataset using +an unsupported driver or access mode results in an :py:attr: `DriverError` +exception. By passing `allow_unsupported_drivers=True` to :py:attr:`fiona.open` +no compatibility checks are performed and unsupported OGR drivers can be used. +However, there are no guarantees that Fiona will be able to access or write +data correctly using an unsupported driver. -Virtual filesystems -------------------- +.. code-block:: python -Zip and Tar archives can be treated as virtual filesystems and collections can -be made from paths and layers within them. In other words, Fiona lets you read -zipped shapefiles. For example, make a Zip archive from the shapefile -distributed with Fiona. - -.. sourcecode:: console - - $ zip /tmp/zed.zip docs/data/test_uk.* - adding: docs/data/test_uk.shp (deflated 48%) - adding: docs/data/test_uk.shx (deflated 37%) - adding: docs/data/test_uk.dbf (deflated 98%) - adding: docs/data/test_uk.prj (deflated 15%) - -The `vfs` keyword parameter for :py:func:`fiona.listlayers` and -:py:func:`fiona.open` may be an Apache Commons VFS style string beginning with -"zip://" or "tar://" and followed by an absolute or relative path to the -archive file. When this parameter is used, the first argument to must be an -absolute path within that archive. The layers in that Zip archive are: - -.. sourcecode:: pycon - - >>> import fiona - >>> fiona.listlayers('/docs/data', vfs='zip:///tmp/zed.zip') - ['test_uk'] - -The single shapefile may also be accessed like so: - -.. sourcecode:: pycon - - >>> with fiona.open( - ... '/docs/data/test_uk.shp', - ... vfs='zip:///tmp/zed.zip') as c: - ... print(len(c)) - ... - 48 + import fiona + with fiona.open("file.kmz", allow_unsupported_drivers=True) as collection: + ... -Unsupported drivers -------------------- +Not all OGR drivers are necessarily enabled in every GDAL distribution. The +following code snippet lists the drivers included in the GDAL installation +used by Fiona: -In :py:attr:`fiona.supported_drivers` a selection of GDAL/OGR's -drivers that is tested to work with Fiona is maintained. By default, Fiona -allows only these drivers with their listed access modes: r for read support, -respectively a for append and w for write. +.. code-block:: python -These restrictions can be circumvented by modifying :py:attr:`fiona.supported_drivers`: + from fiona.env import Env -.. sourcecode:: python + with Env() as gdalenv: + print(gdalenv.drivers().keys()) - import fiona - fiona.drvsupport.supported_drivers["LIBKML"] = "raw" - with fiona.open("file.kmz") as collection: - pass - -It should, however, first be verified, if the local installation of GDAL/OGR -includes the required driver: - -.. sourcecode:: python - - from fiona.env import Env - - with Env() as gdalenv: - print(gdalenv.drivers().keys()) - -Dumpgj -====== - -Fiona installs a script named ``dumpgj``. It converts files to GeoJSON with -JSON-LD context as an option and is intended to be an upgrade to "ogr2ogr -f -GeoJSON". - -.. sourcecode:: console - - $ dumpgj --help - usage: dumpgj [-h] [-d] [-n N] [--compact] [--encoding ENC] - [--record-buffered] [--ignore-errors] [--use-ld-context] - [--add-ld-context-item TERM=URI] - infile [outfile] - - Serialize a file's records or description to GeoJSON - - positional arguments: - infile input file name - outfile output file name, defaults to stdout if omitted - - optional arguments: - -h, --help show this help message and exit - -d, --description serialize file's data description (schema) only - -n N, --indent N indentation level in N number of chars - --compact use compact separators (',', ':') - --encoding ENC Specify encoding of the input file - --record-buffered Economical buffering of writes at record, not - collection (default), level - --ignore-errors log errors but do not stop serialization - --use-ld-context add a JSON-LD context to JSON output - --add-ld-context-item TERM=URI - map a term to a URI and add it to the output's JSON LD - context +MemoryFile and ZipMemoryFile +---------------------------- + +:py:class:`fiona.io.MemoryFile` and :py:class:`fiona.io.ZipMemoryFile` allow +formatted feature collections, even zipped feature collections, to be read or +written in memory, with no filesystem access required. For example, you may +have a zipped shapefile in a stream of bytes coming from a web upload or +download. + +.. code-block:: pycon + + >>> data = open('tests/data/coutwildrnp.zip', 'rb').read() + >>> len(data) + 154006 + >>> data[:20] + b'PK\x03\x04\x14\x00\x00\x00\x00\x00\xaa~VM\xech\xae\x1e\xec\xab' + +The feature collection in this stream of bytes can be accessed by wrapping it +in an instance of ZipMemoryFile. + +.. code-block:: pycon + + >>> from fiona.io import ZipMemoryFile + >>> with ZipMemoryFile(data) as zip: + ... with zip.open('coutwildrnp.shp') as collection: + ... print(len(collection)) + ... print(collection.schema) + ... + 67 + {'properties': OrderedDict([('PERIMETER', 'float:24.15'), ('FEATURE2', 'str:80'), ('NAME', 'str:80'), ('FEATURE1', 'str:80'), ('URL', 'str:101'), ('AGBUR', 'str:80'), ('AREA', 'float:24.15'), ('STATE_FIPS', 'str:80'), ('WILDRNP020', 'int:10'), ('STATE', 'str:80')]), 'geometry': 'Polygon'} + +*New in 1.8.0* + +Fiona command line interface +============================ + +Fiona comes with a command line interface called "fio". See the +`CLI Documentation `__ for detailed usage instructions. Final Notes =========== @@ -1399,7 +1319,7 @@ ========== .. [Kent1978] William Kent, Data and Reality, North Holland, 1978. -.. [ESRI1998] ESRI Shapefile Technical Description. July 1998. http://www.esri.com/library/whitepapers/pdfs/shapefile.pdf -.. [GeoJSON] http://geojson.org -.. [JSON] http://www.ietf.org/rfc/rfc4627 -.. [SFA] http://en.wikipedia.org/wiki/Simple_feature_access +.. [ESRI1998] ESRI Shapefile Technical Description. July 1998. https://www.esri.com/library/whitepapers/pdfs/shapefile.pdf +.. [GeoJSON] https://geojson.org +.. [JSON] https://www.ietf.org/rfc/rfc4627 +.. [SFA] https://en.wikipedia.org/wiki/Simple_feature_access diff -Nru fiona-1.8.22/docs/modules.rst fiona-1.9.5/docs/modules.rst --- fiona-1.8.22/docs/modules.rst 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/docs/modules.rst 2023-10-11 23:19:44.000000000 +0000 @@ -2,6 +2,5 @@ ===== .. toctree:: - :maxdepth: 4 fiona diff -Nru fiona-1.8.22/environment.yml fiona-1.9.5/environment.yml --- fiona-1.8.22/environment.yml 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/environment.yml 2023-10-11 23:19:44.000000000 +0000 @@ -1,8 +1,12 @@ name: _fiona channels: -- defaults - conda-forge +- defaults dependencies: -- python>=3.5 -- cython -- libgdal +- pip +- python=3.9.* +- libgdal=3.4.* +- sphinx-click +- sphinx-rtd-theme +- pip: + - jinja2==3.0.3 diff -Nru fiona-1.8.22/examples/open.py fiona-1.9.5/examples/open.py --- fiona-1.8.22/examples/open.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/examples/open.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,4 +1,3 @@ - import fiona # This module contains examples of opening files to get feature collections in diff -Nru fiona-1.8.22/examples/orient-ccw.py fiona-1.9.5/examples/orient-ccw.py --- fiona-1.8.22/examples/orient-ccw.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/examples/orient-ccw.py 2023-10-11 23:19:44.000000000 +0000 @@ -60,6 +60,6 @@ sink.write(f) - except Exception, e: + except Exception as e: logging.exception("Error processing feature %s:", f['id']) diff -Nru fiona-1.8.22/examples/with-descartes.py fiona-1.9.5/examples/with-descartes.py --- fiona-1.8.22/examples/with-descartes.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/examples/with-descartes.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,4 +1,3 @@ - import subprocess from matplotlib import pyplot diff -Nru fiona-1.8.22/examples/with-pyproj.py fiona-1.9.5/examples/with-pyproj.py --- fiona-1.8.22/examples/with-pyproj.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/examples/with-pyproj.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,4 +1,3 @@ - import logging import sys @@ -34,7 +33,7 @@ f['geometry']['coordinates'] = new_coords sink.write(f) - except Exception, e: + except Exception as e: # Writing uncleanable features to a different shapefile # is another option. logging.exception("Error transforming feature %s:", f['id']) diff -Nru fiona-1.8.22/examples/with-shapely.py fiona-1.9.5/examples/with-shapely.py --- fiona-1.8.22/examples/with-shapely.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/examples/with-shapely.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,4 +1,3 @@ - import logging import sys @@ -28,7 +27,7 @@ f['geometry'] = mapping(geom) sink.write(f) - except Exception, e: + except Exception as e: # Writing uncleanable features to a different shapefile # is another option. logging.exception("Error cleaning feature %s:", f['id']) diff -Nru fiona-1.8.22/fiona/__init__.py fiona-1.9.5/fiona/__init__.py --- fiona-1.8.22/fiona/__init__.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/__init__.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ Fiona is OGR's neat, nimble API. @@ -8,104 +6,83 @@ integrates readily with other Python GIS packages such as pyproj, Rtree and Shapely. -How minimal? Fiona can read features as mappings from shapefiles or -other GIS vector formats and write mappings as features to files using -the same formats. That's all. There aren't any feature or geometry -classes. Features and their geometries are just data. - A Fiona feature is a Python mapping inspired by the GeoJSON format. It -has `id`, 'geometry`, and `properties` keys. The value of `id` is -a string identifier unique within the feature's parent collection. The -`geometry` is another mapping with `type` and `coordinates` keys. The -`properties` of a feature is another mapping corresponding to its -attribute table. For example: - - {'id': '1', - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, - 'properties': {'label': u'Null Island'} } - -is a Fiona feature with a point geometry and one property. - -Features are read and written using objects returned by the -``collection`` function. These ``Collection`` objects are a lot like -Python ``file`` objects. A ``Collection`` opened in reading mode serves -as an iterator over features. One opened in a writing mode provides -a ``write`` method. - -Usage ------ - -Here's an example of reading a select few polygon features from -a shapefile and for each, picking off the first vertex of the exterior -ring of the polygon and using that as the point geometry for a new -feature writing to a "points.shp" file. - - >>> import fiona - >>> with fiona.open('docs/data/test_uk.shp', 'r') as inp: - ... output_schema = inp.schema.copy() - ... output_schema['geometry'] = 'Point' - ... with collection( - ... "points.shp", "w", - ... crs=inp.crs, - ... driver="ESRI Shapefile", - ... schema=output_schema - ... ) as out: - ... for f in inp.filter( - ... bbox=(-5.0, 55.0, 0.0, 60.0) - ... ): - ... value = f['geometry']['coordinates'][0][0] - ... f['geometry'] = { - ... 'type': 'Point', 'coordinates': value} - ... out.write(f) +has ``id``, ``geometry``, and ``properties`` attributes. The value of +``id`` is a string identifier unique within the feature's parent +collection. The ``geometry`` is another mapping with ``type`` and +``coordinates`` keys. The ``properties`` of a feature is another mapping +corresponding to its attribute table. + +Features are read and written using the ``Collection`` class. These +``Collection`` objects are a lot like Python ``file`` objects. A +``Collection`` opened in reading mode serves as an iterator over +features. One opened in a writing mode provides a ``write`` method. -Because Fiona collections are context managers, they are closed and (in -writing modes) flush contents to disk when their ``with`` blocks end. """ -from contextlib import contextmanager +import glob import logging import os -import sys -import warnings +from pathlib import Path import platform -from six import string_types - -try: - from pathlib import Path -except ImportError: # pragma: no cover - class Path: - pass - -# TODO: remove this? Or at least move it, flake8 complains. -if sys.platform == "win32": - libdir = os.path.join(os.path.dirname(__file__), ".libs") - os.environ["PATH"] = os.environ["PATH"] + ";" + libdir - -import fiona._loading -with fiona._loading.add_gdal_dll_directories(): - from fiona.collection import BytesCollection, Collection - from fiona.drvsupport import supported_drivers - from fiona.env import ensure_env_with_credentials, Env - from fiona.errors import FionaDeprecationWarning - from fiona._env import driver_count - from fiona._env import ( - calc_gdal_version_num, get_gdal_version_num, get_gdal_release_name, - get_gdal_version_tuple) - from fiona.compat import OrderedDict - from fiona.io import MemoryFile - from fiona.ogrext import _bounds, _listlayers, FIELD_TYPES_MAP, _remove, _remove_layer - from fiona.path import ParsedPath, parse_path, vsi_path - from fiona.vfs import parse_paths as vfs_parse_paths - from fiona._show_versions import show_versions - - # These modules are imported by fiona.ogrext, but are also import here to - # help tools like cx_Freeze find them automatically - from fiona import _geometry, _err, rfc3339 - import uuid +import warnings +if platform.system() == "Windows": + _whl_dir = os.path.join(os.path.dirname(__file__), ".libs") + if os.path.exists(_whl_dir): + os.add_dll_directory(_whl_dir) + else: + if "PATH" in os.environ: + for p in os.environ["PATH"].split(os.pathsep): + if glob.glob(os.path.join(p, "gdal*.dll")): + os.add_dll_directory(p) + + +from fiona._env import ( + calc_gdal_version_num, + get_gdal_release_name, + get_gdal_version_num, + get_gdal_version_tuple, +) +from fiona._env import driver_count +from fiona._show_versions import show_versions +from fiona.collection import BytesCollection, Collection +from fiona.drvsupport import supported_drivers +from fiona.env import ensure_env_with_credentials, Env +from fiona.errors import FionaDeprecationWarning +from fiona.io import MemoryFile +from fiona.model import Feature, Geometry, Properties +from fiona.ogrext import ( + FIELD_TYPES_MAP, + _bounds, + _listdir, + _listlayers, + _remove, + _remove_layer, +) +from fiona.path import ParsedPath, parse_path, vsi_path +from fiona.vfs import parse_paths as vfs_parse_paths + +# These modules are imported by fiona.ogrext, but are also import here to +# help tools like cx_Freeze find them automatically +from fiona import _geometry, _err, rfc3339 +import uuid + + +__all__ = [ + "Feature", + "Geometry", + "Properties", + "bounds", + "listlayers", + "listdir", + "open", + "prop_type", + "prop_width", + "remove", +] -__all__ = ['bounds', 'listlayers', 'open', 'prop_type', 'prop_width'] -__version__ = "1.8.22" +__version__ = "1.9.5" __gdal_version__ = get_gdal_release_name() gdal_version = get_gdal_version_tuple() @@ -115,9 +92,20 @@ @ensure_env_with_credentials -def open(fp, mode='r', driver=None, schema=None, crs=None, encoding=None, - layer=None, vfs=None, enabled_drivers=None, crs_wkt=None, - **kwargs): +def open( + fp, + mode="r", + driver=None, + schema=None, + crs=None, + encoding=None, + layer=None, + vfs=None, + enabled_drivers=None, + crs_wkt=None, + allow_unsupported_drivers=False, + **kwargs +): """Open a collection for read, append, or write In write mode, a driver name such as "ESRI Shapefile" or "GPX" (see @@ -147,7 +135,7 @@ The drivers used by Fiona will try to detect the encoding of data files. If they fail, you may provide the proper ``encoding``, such - as 'Windows-1252' for the Natural Earth datasets. + as 'Windows-1252' for the original Natural Earth datasets. When the provided path is to a file containing multiple named layers of data, a layer can be singled out by ``layer``. @@ -192,6 +180,17 @@ crs_wkt : str An optional WKT representation of a coordinate reference system. + ignore_fields : list + List of field names to ignore on load. + ignore_geometry : bool + Ignore the geometry on load. + include_fields : list + List of a subset of field names to include on load. + wkt_version : fiona.enums.WktVersion or str, optional + Version to use to for the CRS WKT. + Defaults to GDAL's default (WKT1_GDAL for GDAL 3). + allow_unsupported_drivers : bool + If set to true do not limit GDAL drivers to set set of known working. kwargs : mapping Other driver-specific parameters that will be interpreted by the OGR library as layer creation or opening options. @@ -201,108 +200,153 @@ Collection """ - if mode == 'r' and hasattr(fp, 'read'): + if mode == "r" and hasattr(fp, "read"): + memfile = MemoryFile(fp.read()) + colxn = memfile.open( + driver=driver, + crs=crs, + schema=schema, + layer=layer, + encoding=encoding, + enabled_drivers=enabled_drivers, + allow_unsupported_drivers=allow_unsupported_drivers, + **kwargs + ) + colxn._env.enter_context(memfile) + return colxn - @contextmanager - def fp_reader(fp): - memfile = MemoryFile(fp.read()) - dataset = memfile.open( - driver=driver, crs=crs, schema=schema, layer=layer, - encoding=encoding, enabled_drivers=enabled_drivers, - **kwargs) - try: - yield dataset - finally: - dataset.close() - memfile.close() - - return fp_reader(fp) - - elif mode == 'w' and hasattr(fp, 'write'): - if schema: - # Make an ordered dict of schema properties. - this_schema = schema.copy() - this_schema['properties'] = OrderedDict(schema['properties']) - else: - this_schema = None + elif mode == "w" and hasattr(fp, "write"): + memfile = MemoryFile() + colxn = memfile.open( + driver=driver, + crs=crs, + schema=schema, + layer=layer, + encoding=encoding, + enabled_drivers=enabled_drivers, + allow_unsupported_drivers=allow_unsupported_drivers, + crs_wkt=crs_wkt, + **kwargs + ) + colxn._env.enter_context(memfile) - @contextmanager - def fp_writer(fp): - memfile = MemoryFile() - dataset = memfile.open( - driver=driver, crs=crs, schema=this_schema, layer=layer, - encoding=encoding, enabled_drivers=enabled_drivers, - crs_wkt=crs_wkt, **kwargs) - try: - yield dataset - finally: - dataset.close() - memfile.seek(0) - fp.write(memfile.read()) - memfile.close() + # For the writing case we push an extra callback onto the + # ExitStack. It ensures that the MemoryFile's contents are + # copied to the open file object. + def func(*args, **kwds): + memfile.seek(0) + fp.write(memfile.read()) - return fp_writer(fp) + colxn._env.callback(func) + return colxn elif mode == "a" and hasattr(fp, "write"): raise OSError( "Append mode is not supported for datasets in a Python file object." ) + # TODO: test for a shared base class or abstract type. + elif isinstance(fp, MemoryFile): + if mode.startswith("r"): + colxn = fp.open( + driver=driver, + allow_unsupported_drivers=allow_unsupported_drivers, + **kwargs + ) + + # Note: FilePath does not support writing and an exception will + # result from this. + elif mode.startswith("w"): + colxn = fp.open( + driver=driver, + crs=crs, + schema=schema, + layer=layer, + encoding=encoding, + enabled_drivers=enabled_drivers, + allow_unsupported_drivers=allow_unsupported_drivers, + crs_wkt=crs_wkt, + **kwargs + ) + return colxn + + # At this point, the fp argument is a string or path-like object + # which can be converted to a string. else: # If a pathlib.Path instance is given, convert it to a string path. if isinstance(fp, Path): fp = str(fp) if vfs: - warnings.warn("The vfs keyword argument is deprecated. Instead, pass a URL that uses a zip or tar (for example) scheme.", FionaDeprecationWarning, stacklevel=2) + warnings.warn( + "The vfs keyword argument is deprecated and will be removed in version 2.0.0. Instead, pass a URL that uses a zip or tar (for example) scheme.", + FionaDeprecationWarning, + stacklevel=2, + ) path, scheme, archive = vfs_parse_paths(fp, vfs=vfs) path = ParsedPath(path, archive, scheme) else: path = parse_path(fp) - if mode in ('a', 'r'): - c = Collection(path, mode, driver=driver, encoding=encoding, - layer=layer, enabled_drivers=enabled_drivers, **kwargs) - elif mode == 'w': - if schema: - # Make an ordered dict of schema properties. - this_schema = schema.copy() - if 'properties' in schema: - this_schema['properties'] = OrderedDict(schema['properties']) - else: - this_schema['properties'] = OrderedDict() - - if 'geometry' not in this_schema: - this_schema['geometry'] = None - - else: - this_schema = None - c = Collection(path, mode, crs=crs, driver=driver, schema=this_schema, - encoding=encoding, layer=layer, enabled_drivers=enabled_drivers, crs_wkt=crs_wkt, - **kwargs) + if mode in ("a", "r"): + colxn = Collection( + path, + mode, + driver=driver, + encoding=encoding, + layer=layer, + enabled_drivers=enabled_drivers, + allow_unsupported_drivers=allow_unsupported_drivers, + **kwargs + ) + elif mode == "w": + colxn = Collection( + path, + mode, + crs=crs, + driver=driver, + schema=schema, + encoding=encoding, + layer=layer, + enabled_drivers=enabled_drivers, + crs_wkt=crs_wkt, + allow_unsupported_drivers=allow_unsupported_drivers, + **kwargs + ) else: - raise ValueError( - "mode string must be one of 'r', 'w', or 'a', not %s" % mode) + raise ValueError("mode string must be one of {'r', 'w', 'a'}") - return c + return colxn collection = open +@ensure_env_with_credentials def remove(path_or_collection, driver=None, layer=None): - """Deletes an OGR data source + """Delete an OGR data source or one of its layers. - The required ``path`` argument may be an absolute or relative file path. - Alternatively, a Collection can be passed instead in which case the path - and driver are automatically determined. Otherwise the ``driver`` argument - must be specified. + If no layer is specified, the entire dataset and all of its layers + and associated sidecar files will be deleted. - Raises a ``RuntimeError`` if the data source cannot be deleted. + Parameters + ---------- + path_or_collection : str, pathlib.Path, or Collection + The target Collection or its path. + driver : str, optional + The name of a driver to be used for deletion, optional. Can + usually be detected. + layer : str or int, optional + The name or index of a specific layer. - Example usage: + Returns + ------- + None - fiona.remove('test.shp', 'ESRI Shapefile') + Raises + ------ + DatasetDeleteError + If the data source cannot be deleted. """ if isinstance(path_or_collection, Collection): @@ -310,6 +354,8 @@ path = collection.path driver = collection.driver collection.close() + elif isinstance(path_or_collection, Path): + path = str(path_or_collection) else: path = path_or_collection if layer is None: @@ -319,58 +365,113 @@ @ensure_env_with_credentials -def listlayers(fp, vfs=None): - """List layer names in their index order +def listdir(fp): + """Lists the datasets in a directory or archive file. + + Archive files must be prefixed like "zip://" or "tar://". Parameters ---------- - fp : URI (str or pathlib.Path), or file-like object - A dataset resource identifier or file object. + fp : str or pathlib.Path + Directory or archive path. + + Returns + ------- + list of str + A list of datasets. + + Raises + ------ + TypeError + If the input is not a str or Path. + + """ + if isinstance(fp, Path): + fp = str(fp) + + if not isinstance(fp, str): + raise TypeError("invalid path: %r" % fp) + + pobj = parse_path(fp) + return _listdir(vsi_path(pobj)) + + +@ensure_env_with_credentials +def listlayers(fp, vfs=None, **kwargs): + """Lists the layers (collections) in a dataset. + + Archive files must be prefixed like "zip://" or "tar://". + + Parameters + ---------- + fp : str, pathlib.Path, or file-like object + A dataset identifier or file object containing a dataset. vfs : str This is a deprecated parameter. A URI scheme such as "zip://" should be used instead. + kwargs : dict + Dataset opening options and other keyword args. Returns ------- - list + list of str A list of layer name strings. + Raises + ------ + TypeError + If the input is not a str, Path, or file object. + """ if hasattr(fp, 'read'): - with MemoryFile(fp.read()) as memfile: - return _listlayers(memfile.name) - + return _listlayers(memfile.name, **kwargs) else: - if isinstance(fp, Path): fp = str(fp) - if not isinstance(fp, string_types): + if not isinstance(fp, str): raise TypeError("invalid path: %r" % fp) - if vfs and not isinstance(vfs, string_types): + if vfs and not isinstance(vfs, str): raise TypeError("invalid vfs: %r" % vfs) if vfs: - warnings.warn("The vfs keyword argument is deprecated. Instead, pass a URL that uses a zip or tar (for example) scheme.", FionaDeprecationWarning, stacklevel=2) + warnings.warn( + "The vfs keyword argument is deprecated and will be removed in 2.0. " + "Instead, pass a URL that uses a zip or tar (for example) scheme.", + FionaDeprecationWarning, + stacklevel=2, + ) pobj_vfs = parse_path(vfs) pobj_path = parse_path(fp) pobj = ParsedPath(pobj_path.path, pobj_vfs.path, pobj_vfs.scheme) else: pobj = parse_path(fp) - return _listlayers(vsi_path(pobj)) + return _listlayers(vsi_path(pobj), **kwargs) def prop_width(val): """Returns the width of a str type property. - Undefined for non-str properties. Example: + Undefined for non-str properties. + + Parameters + ---------- + val : str + A type:width string from a collection schema. + + Returns + ------- + int or None + + Examples + -------- + >>> prop_width('str:25') + 25 + >>> prop_width('str') + 80 - >>> prop_width('str:25') - 25 - >>> prop_width('str') - 80 """ if val.startswith('str'): return int((val.split(":")[1:] or ["80"])[0]) @@ -380,12 +481,23 @@ def prop_type(text): """Returns a schema property's proper Python type. - Example: + Parameters + ---------- + text : str + A type name, with or without width. + + Returns + ------- + obj + A Python class. + + Examples + -------- + >>> prop_type('int') + + >>> prop_type('str:25') + - >>> prop_type('int') - - >>> prop_type('str:25') - """ key = text.split(':')[0] return FIELD_TYPES_MAP[key] diff -Nru fiona-1.8.22/fiona/_crs.pxd fiona-1.9.5/fiona/_crs.pxd --- fiona-1.8.22/fiona/_crs.pxd 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_crs.pxd 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -# Coordinate system and transform API functions. - -cdef extern from "ogr_srs_api.h": - - ctypedef void * OGRSpatialReferenceH - - void OSRCleanup () - OGRSpatialReferenceH OSRClone (OGRSpatialReferenceH srs) - int OSRExportToProj4 (OGRSpatialReferenceH srs, char **params) - int OSRExportToWkt (OGRSpatialReferenceH srs, char **params) - int OSRImportFromEPSG (OGRSpatialReferenceH srs, int code) - int OSRImportFromProj4 (OGRSpatialReferenceH srs, char *proj) - int OSRSetFromUserInput (OGRSpatialReferenceH srs, char *input) - int OSRAutoIdentifyEPSG (OGRSpatialReferenceH srs) - const char * OSRGetAuthorityName (OGRSpatialReferenceH srs, const char *key) - const char * OSRGetAuthorityCode (OGRSpatialReferenceH srs, const char *key) - OGRSpatialReferenceH OSRNewSpatialReference (char *wkt) - void OSRRelease (OGRSpatialReferenceH srs) - void * OCTNewCoordinateTransformation (OGRSpatialReferenceH source, OGRSpatialReferenceH dest) - void OCTDestroyCoordinateTransformation (void *source) - int OCTTransform (void *ct, int nCount, double *x, double *y, double *z) diff -Nru fiona-1.8.22/fiona/_crs.pyx fiona-1.9.5/fiona/_crs.pyx --- fiona-1.8.22/fiona/_crs.pyx 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_crs.pyx 2023-10-11 23:19:44.000000000 +0000 @@ -31,7 +31,11 @@ try: cogr_srs = exc_wrap_pointer(OSRNewSpatialReference(NULL)) except CPLE_BaseError as exc: - raise CRSError(u"{}".format(exc)) + raise CRSError(str(exc)) + + # check for other CRS classes + if hasattr(crs, "to_wkt") and callable(crs.to_wkt): + crs = crs.to_wkt() # First, check for CRS strings like "EPSG:3857". if isinstance(crs, string_types): @@ -61,7 +65,6 @@ proj_b = proj.encode('utf-8') proj_c = proj_b OSRImportFromProj4(cogr_srs, proj_c) - else: raise CRSError("Invalid input to create CRS: {}".format(crs)) diff -Nru fiona-1.8.22/fiona/_env.pxd fiona-1.9.5/fiona/_env.pxd --- fiona-1.8.22/fiona/_env.pxd 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_env.pxd 2023-10-11 23:19:44.000000000 +0000 @@ -1,6 +1,17 @@ +include "gdal.pxi" + + +cdef extern from "ogr_srs_api.h": + void OSRSetPROJSearchPaths(const char *const *papszPaths) + void OSRGetPROJVersion (int *pnMajor, int *pnMinor, int *pnPatch) + + cdef class ConfigEnv(object): cdef public object options cdef class GDALEnv(ConfigEnv): cdef public object _have_registered_drivers + + +cdef _safe_osr_release(OGRSpatialReferenceH srs) diff -Nru fiona-1.8.22/fiona/_env.pyx fiona-1.9.5/fiona/_env.pyx --- fiona-1.8.22/fiona/_env.pyx 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_env.pyx 2023-10-11 23:19:44.000000000 +0000 @@ -8,8 +8,6 @@ option is set to a new value inside the thread. """ -include "gdal.pxi" - from collections import namedtuple import logging import os @@ -18,11 +16,9 @@ import threading from fiona._err cimport exc_wrap_int, exc_wrap_ogrerr -from fiona._shim cimport set_proj_search_path, get_proj_version from fiona._err import CPLE_BaseError from fiona.errors import EnvError - level_map = { 0: 0, 1: logging.DEBUG, @@ -57,12 +53,24 @@ try: import certifi - os.environ.setdefault("CURL_CA_BUNDLE", certifi.where()) + ca_bundle = certifi.where() + os.environ.setdefault("GDAL_CURL_CA_BUNDLE", ca_bundle) + os.environ.setdefault("PROJ_CURL_CA_BUNDLE", ca_bundle) except ImportError: pass + + cdef bint is_64bit = sys.maxsize > 2 ** 32 +cdef void set_proj_search_path(object path): + cdef const char **paths = NULL + cdef const char *path_c = NULL + path_b = path.encode("utf-8") + path_c = path_b + paths = CSLAddString(paths, path_c) + OSRSetPROJSearchPaths(paths) + cdef _safe_osr_release(OGRSpatialReferenceH srs): """Wrapper to handle OSR release when NULL.""" @@ -121,17 +129,13 @@ def get_proj_version_tuple(): """ - Returns proj version tuple for gdal >= 3.0.1, otherwise None + Returns proj version tuple """ - cdef int major - cdef int minor - cdef int patch - gdal_version_num = get_gdal_version_num() - if gdal_version_num < calc_gdal_version_num(3, 0, 1): - proj_version = None - else: - get_proj_version(&major, &minor, &patch) - return (major, minor, patch) + cdef int major = 0 + cdef int minor = 0 + cdef int patch = 0 + OSRGetPROJVersion(&major, &minor, &patch) + return (major, minor, patch) cdef void log_error(CPLErr err_class, int err_no, const char* msg) with gil: @@ -189,9 +193,9 @@ elif val.isdigit(): return int(val) else: - if val == u'ON': + if val == 'ON': return True - elif val == u'OFF': + elif val == 'OFF': return False else: return val @@ -388,7 +392,7 @@ """Configuration and driver management""" def __init__(self, **options): - super(GDALEnv, self).__init__(**options) + super().__init__(**options) self._have_registered_drivers = False def start(self): @@ -426,7 +430,13 @@ log.debug("GDAL data found in other locations: path=%r.", path) self.update_config_options(GDAL_DATA=path) - if 'PROJ_LIB' in os.environ: + if 'PROJ_DATA' in os.environ: + # PROJ 9.1+ + log.debug("PROJ_DATA found in environment.") + path = os.environ["PROJ_DATA"] + set_proj_data_search_path(path) + elif 'PROJ_LIB' in os.environ: + # PROJ < 9.1 log.debug("PROJ_LIB found in environment.") path = os.environ["PROJ_LIB"] set_proj_data_search_path(path) @@ -458,16 +468,11 @@ # actually makes it this far. self._have_registered_drivers = True - log.debug("Started GDALEnv: self=%r.", self) - def stop(self): # NB: do not restore the CPL error handler to its default # state here. If you do, log messages will be written to stderr # by GDAL instead of being sent to Python's logging module. - log.debug("Stopping GDALEnv %r.", self) CPLPopErrorHandler() - log.debug("Error handler popped.") - log.debug("Stopped GDALEnv %r.", self) def drivers(self): cdef OGRSFDriverH driver = NULL diff -Nru fiona-1.8.22/fiona/_err.pxd fiona-1.9.5/fiona/_err.pxd --- fiona-1.8.22/fiona/_err.pxd 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_err.pxd 2023-10-11 23:19:44.000000000 +0000 @@ -8,7 +8,6 @@ ctypedef int OGRErr - cdef get_last_error_msg() cdef int exc_wrap_int(int retval) except -1 cdef OGRErr exc_wrap_ogrerr(OGRErr retval) except -1 diff -Nru fiona-1.8.22/fiona/_err.pyx fiona-1.9.5/fiona/_err.pyx --- fiona-1.8.22/fiona/_err.pyx 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_err.pyx 2023-10-11 23:19:44.000000000 +0000 @@ -63,10 +63,7 @@ self.errmsg = errmsg def __str__(self): - return self.__unicode__() - - def __unicode__(self): - return u"{}".format(self.errmsg) + return str(self.errmsg) @property def args(self): @@ -237,11 +234,12 @@ cdef get_last_error_msg(): """Checks GDAL error stack for the latest error message + Returns ------- An error message or empty string - """ + """ err_msg = CPLGetLastErrorMsg() if err_msg != NULL: @@ -277,8 +275,10 @@ """ if err == 0: return err - else: - raise CPLE_BaseError(3, err, "OGR Error code {}".format(err)) + exc = exc_check() + if exc: + raise exc + raise CPLE_BaseError(3, err, "OGR Error code {}".format(err)) cdef void *exc_wrap_pointer(void *ptr) except NULL: diff -Nru fiona-1.8.22/fiona/_geometry.pxd fiona-1.9.5/fiona/_geometry.pxd --- fiona-1.8.22/fiona/_geometry.pxd 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_geometry.pxd 2023-10-11 23:19:44.000000000 +0000 @@ -108,21 +108,19 @@ cdef class GeomBuilder: - cdef void *geom - cdef object code - cdef object geomtypename cdef object ndims - cdef _buildCoords(self, void *geom) - cpdef _buildPoint(self) - cpdef _buildLineString(self) - cpdef _buildLinearRing(self) - cdef _buildParts(self, void *geom) - cpdef _buildPolygon(self) - cpdef _buildMultiPoint(self) - cpdef _buildMultiLineString(self) - cpdef _buildMultiPolygon(self) - cpdef _buildGeometryCollection(self) - cdef build(self, void *geom) + cdef list _buildCoords(self, void *geom) + cdef dict _buildPoint(self, void *geom) + cdef dict _buildLineString(self, void *geom) + cdef dict _buildLinearRing(self, void *geom) + cdef list _buildParts(self, void *geom) + cdef dict _buildPolygon(self, void *geom) + cdef dict _buildMultiPoint(self, void *geom) + cdef dict _buildMultiLineString(self, void *geom) + cdef dict _buildMultiPolygon(self, void *geom) + cdef dict _buildGeometryCollection(self, void *geom) + cdef object build_from_feature(self, void *feature) + cdef object build(self, void *geom) cpdef build_wkb(self, object wkb) diff -Nru fiona-1.8.22/fiona/_geometry.pyx fiona-1.9.5/fiona/_geometry.pyx --- fiona-1.8.22/fiona/_geometry.pyx 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_geometry.pyx 2023-10-11 23:19:44.000000000 +0000 @@ -2,9 +2,12 @@ from __future__ import absolute_import +include "gdal.pxi" + import logging from fiona.errors import UnsupportedGeometryTypeError +from fiona.model import decode_object, GEOMETRY_TYPES, Geometry, OGRGeometryType from fiona._err cimport exc_wrap_int @@ -15,40 +18,16 @@ log = logging.getLogger(__name__) log.addHandler(NullHandler()) -# Mapping of OGR integer geometry types to GeoJSON type names. -GEOMETRY_TYPES = { - 0: 'Unknown', - 1: 'Point', - 2: 'LineString', - 3: 'Polygon', - 4: 'MultiPoint', - 5: 'MultiLineString', - 6: 'MultiPolygon', - 7: 'GeometryCollection', - # Unsupported types. - #8: 'CircularString', - #9: 'CompoundCurve', - #10: 'CurvePolygon', - #11: 'MultiCurve', - #12: 'MultiSurface', - #13: 'Curve', - #14: 'Surface', - #15: 'PolyhedralSurface', - #16: 'TIN', - #17: 'Triangle', - 100: 'None', - 101: 'LinearRing', - 0x80000001: '3D Point', - 0x80000002: '3D LineString', - 0x80000003: '3D Polygon', - 0x80000004: '3D MultiPoint', - 0x80000005: '3D MultiLineString', - 0x80000006: '3D MultiPolygon', - 0x80000007: '3D GeometryCollection' } # mapping of GeoJSON type names to OGR integer geometry types GEOJSON2OGR_GEOMETRY_TYPES = dict((v, k) for k, v in GEOMETRY_TYPES.iteritems()) +cdef int ogr_get_geometry_type(void *geometry): + # OGR_G_GetGeometryType with NULL geometry support + if geometry == NULL: + return 0 # unknown + return OGR_G_GetGeometryType(geometry) + cdef unsigned int geometry_type_code(name) except? 9999: """Map OGC geometry type names to integer codes.""" @@ -115,7 +94,15 @@ cdef class GeomBuilder: """Builds Fiona (GeoJSON) geometries from an OGR geometry handle. """ - cdef _buildCoords(self, void *geom): + + # Note: The geometry passed to OGR_G_ForceToPolygon and + # OGR_G_ForceToMultiPolygon must be removed from the container / + # feature beforehand and the returned geometry needs to be cleaned up + # afterwards. + # OGR_G_GetLinearGeometry returns a copy of the geometry that needs + # to be cleaned up afterwards. + + cdef list _buildCoords(self, void *geom): # Build a coordinate sequence cdef int i if geom == NULL: @@ -128,66 +115,151 @@ values.append(OGR_G_GetZ(geom, i)) coords.append(tuple(values)) return coords - - cpdef _buildPoint(self): - return {'type': 'Point', 'coordinates': self._buildCoords(self.geom)[0]} - - cpdef _buildLineString(self): - return {'type': 'LineString', 'coordinates': self._buildCoords(self.geom)} - - cpdef _buildLinearRing(self): - return {'type': 'LinearRing', 'coordinates': self._buildCoords(self.geom)} - - cdef _buildParts(self, void *geom): + + cdef dict _buildPoint(self, void *geom): + return {'type': 'Point', 'coordinates': self._buildCoords(geom)[0]} + + cdef dict _buildLineString(self, void *geom): + return {'type': 'LineString', 'coordinates': self._buildCoords(geom)} + + cdef dict _buildLinearRing(self, void *geom): + return {'type': 'LinearRing', 'coordinates': self._buildCoords(geom)} + + cdef list _buildParts(self, void *geom): cdef int j + cdef int code + cdef int count cdef void *part if geom == NULL: raise ValueError("Null geom") parts = [] - for j in range(OGR_G_GetGeometryCount(geom)): + j = 0 + count = OGR_G_GetGeometryCount(geom) + while j < count: part = OGR_G_GetGeometryRef(geom, j) - parts.append(GeomBuilder().build(part)) + code = base_geometry_type_code(ogr_get_geometry_type(part)) + if code in ( + OGRGeometryType.PolyhedralSurface.value, + OGRGeometryType.TIN.value, + OGRGeometryType.Triangle.value, + ): + OGR_G_RemoveGeometry(geom, j, False) + # Removing a geometry will cause the geometry count to drop by one, + # and all “higher” geometries will shuffle down one in index. + count -= 1 + parts.append(GeomBuilder().build(part)) + else: + parts.append(GeomBuilder().build(part)) + j += 1 return parts - - cpdef _buildPolygon(self): - coordinates = [p['coordinates'] for p in self._buildParts(self.geom)] + + cdef dict _buildPolygon(self, void *geom): + coordinates = [p['coordinates'] for p in self._buildParts(geom)] return {'type': 'Polygon', 'coordinates': coordinates} - - cpdef _buildMultiPoint(self): - coordinates = [p['coordinates'] for p in self._buildParts(self.geom)] + + cdef dict _buildMultiPoint(self, void *geom): + coordinates = [p['coordinates'] for p in self._buildParts(geom)] return {'type': 'MultiPoint', 'coordinates': coordinates} - - cpdef _buildMultiLineString(self): - coordinates = [p['coordinates'] for p in self._buildParts(self.geom)] + + cdef dict _buildMultiLineString(self, void *geom): + coordinates = [p['coordinates'] for p in self._buildParts(geom)] return {'type': 'MultiLineString', 'coordinates': coordinates} - - cpdef _buildMultiPolygon(self): - coordinates = [p['coordinates'] for p in self._buildParts(self.geom)] + + cdef dict _buildMultiPolygon(self, void *geom): + coordinates = [p['coordinates'] for p in self._buildParts(geom)] return {'type': 'MultiPolygon', 'coordinates': coordinates} - cpdef _buildGeometryCollection(self): - parts = self._buildParts(self.geom) + cdef dict _buildGeometryCollection(self, void *geom): + parts = self._buildParts(geom) return {'type': 'GeometryCollection', 'geometries': parts} - - cdef build(self, void *geom): - # The only method anyone needs to call - if geom == NULL: - raise ValueError("Null geom") - cdef unsigned int etype = OGR_G_GetGeometryType(geom) - self.code = base_geometry_type_code(etype) + cdef object build_from_feature(self, void *feature): + # Build Geometry from *OGRFeatureH + cdef void *cogr_geometry = NULL + cdef int code + + cogr_geometry = OGR_F_GetGeometryRef(feature) + code = base_geometry_type_code(ogr_get_geometry_type(cogr_geometry)) + + # We need to take ownership of the geometry before we can call + # OGR_G_ForceToPolygon or OGR_G_ForceToMultiPolygon + if code in ( + OGRGeometryType.PolyhedralSurface.value, + OGRGeometryType.TIN.value, + OGRGeometryType.Triangle.value, + ): + cogr_geometry = OGR_F_StealGeometry(feature) + return self.build(cogr_geometry) - if self.code not in GEOMETRY_TYPES: - raise UnsupportedGeometryTypeError(self.code) + cdef object build(self, void *geom): + # Build Geometry from *OGRGeometryH - self.geomtypename = GEOMETRY_TYPES[self.code] + cdef void *geometry_to_dealloc = NULL + + if geom == NULL: + return None + + code = base_geometry_type_code(ogr_get_geometry_type(geom)) + + # We convert special geometries (Curves, TIN, Triangle, ...) + # to GeoJSON compatible geometries (LineStrings, Polygons, MultiPolygon, ...) + if code in ( + OGRGeometryType.CircularString.value, + OGRGeometryType.CompoundCurve.value, + OGRGeometryType.CurvePolygon.value, + OGRGeometryType.MultiCurve.value, + OGRGeometryType.MultiSurface.value, + # OGRGeometryType.Curve.value, # Abstract type + # OGRGeometryType.Surface.value, # Abstract type + ): + geometry_to_dealloc = OGR_G_GetLinearGeometry(geom, 0.0, NULL) + code = base_geometry_type_code(ogr_get_geometry_type(geometry_to_dealloc)) + geom = geometry_to_dealloc + elif code in ( + OGRGeometryType.PolyhedralSurface.value, + OGRGeometryType.TIN.value, + OGRGeometryType.Triangle.value, + ): + if code in (OGRGeometryType.PolyhedralSurface.value, OGRGeometryType.TIN.value): + geometry_to_dealloc = OGR_G_ForceToMultiPolygon(geom) + elif code == OGRGeometryType.Triangle.value: + geometry_to_dealloc = OGR_G_ForceToPolygon(geom) + code = base_geometry_type_code(ogr_get_geometry_type(geometry_to_dealloc)) + geom = geometry_to_dealloc self.ndims = OGR_G_GetCoordinateDimension(geom) - self.geom = geom - return getattr(self, '_build' + self.geomtypename)() + + if code not in GEOMETRY_TYPES: + raise UnsupportedGeometryTypeError(code) + + geomtypename = GEOMETRY_TYPES[code] + if geomtypename == "Point": + built = self._buildPoint(geom) + elif geomtypename == "LineString": + built = self._buildLineString(geom) + elif geomtypename == "LinearRing": + built = self._buildLinearRing(geom) + elif geomtypename == "Polygon": + built = self._buildPolygon(geom) + elif geomtypename == "MultiPoint": + built = self._buildMultiPoint(geom) + elif geomtypename == "MultiLineString": + built = self._buildMultiLineString(geom) + elif geomtypename == "MultiPolygon": + built = self._buildMultiPolygon(geom) + elif geomtypename == "GeometryCollection": + built = self._buildGeometryCollection(geom) + else: + raise UnsupportedGeometryTypeError(code) + + # Cleanup geometries we have ownership over + if geometry_to_dealloc is not NULL: + OGR_G_DestroyGeometry(geometry_to_dealloc) + + return Geometry.from_dict(built) cpdef build_wkb(self, object wkb): - # The only other method anyone needs to call + # Build geometry from wkb cdef object data = wkb cdef void *cogr_geometry = _createOgrGeomFromWKB(data) result = self.build(cogr_geometry) @@ -216,20 +288,20 @@ cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['Point']) self._addPointToGeometry(cogr_geometry, coordinates) return cogr_geometry - + cdef void * _buildLineString(self, object coordinates) except NULL: cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['LineString']) for coordinate in coordinates: self._addPointToGeometry(cogr_geometry, coordinate) return cogr_geometry - + cdef void * _buildLinearRing(self, object coordinates) except NULL: cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['LinearRing']) for coordinate in coordinates: self._addPointToGeometry(cogr_geometry, coordinate) OGR_G_CloseRings(cogr_geometry) return cogr_geometry - + cdef void * _buildPolygon(self, object coordinates) except NULL: cdef void *cogr_ring cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['Polygon']) @@ -262,17 +334,19 @@ exc_wrap_int(OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part)) return cogr_geometry - cdef void * _buildGeometryCollection(self, object coordinates) except NULL: + cdef void * _buildGeometryCollection(self, object geometries) except NULL: cdef void *cogr_part cdef void *cogr_geometry = self._createOgrGeometry(GEOJSON2OGR_GEOMETRY_TYPES['GeometryCollection']) - for part in coordinates: + for part in geometries: cogr_part = OGRGeomBuilder().build(part) exc_wrap_int(OGR_G_AddGeometryDirectly(cogr_geometry, cogr_part)) return cogr_geometry cdef void * build(self, object geometry) except NULL: - cdef object typename = geometry['type'] - cdef object coordinates = geometry.get('coordinates') + cdef object typename = geometry.type + cdef object coordinates = geometry.coordinates + cdef object geometries = geometry.geometries + if typename == 'Point': return self._buildPoint(coordinates) elif typename == 'LineString': @@ -288,14 +362,14 @@ elif typename == 'MultiPolygon': return self._buildMultiPolygon(coordinates) elif typename == 'GeometryCollection': - coordinates = geometry.get('geometries') - return self._buildGeometryCollection(coordinates) + return self._buildGeometryCollection(geometries) else: - raise ValueError("Unsupported geometry type %s" % typename) + raise UnsupportedGeometryTypeError("Unsupported geometry type %s" % typename) -def geometryRT(geometry): +def geometryRT(geom): # For testing purposes only, leaks the JSON data + geometry = decode_object(geom) cdef void *cogr_geometry = OGRGeomBuilder().build(geometry) result = GeomBuilder().build(cogr_geometry) _deleteOgrGeom(cogr_geometry) diff -Nru fiona-1.8.22/fiona/_loading.py fiona-1.9.5/fiona/_loading.py --- fiona-1.8.22/fiona/_loading.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_loading.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,28 +0,0 @@ -import glob -import os -import logging -import contextlib -import platform -import sys - -log = logging.getLogger(__name__) -log.addHandler(logging.NullHandler()) - -# With Python >= 3.8 on Windows directories in PATH are not automatically -# searched for DLL dependencies and must be added manually with -# os.add_dll_directory. -# see https://github.com/Toblerity/Fiona/issues/851 - - -@contextlib.contextmanager -def add_gdal_dll_directories(): - dll_dirs = [] - if platform.system() == 'Windows' and sys.version_info >= (3, 8): - dll_directory = os.path.join(os.path.dirname(__file__), '.libs') - if os.path.exists(dll_directory): - dll_dirs.append(os.add_dll_directory(dll_directory)) - try: - yield None - finally: - for dll_dir in dll_dirs: - dll_dir.close() diff -Nru fiona-1.8.22/fiona/_shim1.pxd fiona-1.9.5/fiona/_shim1.pxd --- fiona-1.8.22/fiona/_shim1.pxd 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_shim1.pxd 1970-01-01 00:00:00.000000000 +0000 @@ -1,40 +0,0 @@ -include "ogrext1.pxd" - -ctypedef enum OGRFieldSubType: - OFSTNone = 0 - OFSTBoolean = 1 - OFSTInt16 = 2 - OFSTFloat32 = 3 - OFSTMaxSubType = 3 - -cdef bint is_field_null(void *feature, int n) -cdef void set_field_null(void *feature, int n) -cdef void gdal_flush_cache(void *cogr_ds) -cdef void* gdal_open_vector(const char* path_c, int mode, drivers, options) except NULL -cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL -cdef bint check_capability_transaction(void *cogr_ds) -cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) -cdef OGRErr gdal_commit_transaction(void *cogr_ds) -cdef OGRErr gdal_rollback_transaction(void *cogr_ds) -cdef OGRFieldSubType get_field_subtype(void *fielddefn) -cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype) -cdef bint check_capability_create_layer(void *cogr_ds) -cdef void *get_linear_geometry(void *geom) -cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) -cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) -cdef void set_proj_search_path(object path) -cdef void get_proj_version(int *, int *, int *) -cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) -cdef int get_field_as_datetime(void *cogr_feature, int iField, int *, int *, int *, int *, int *, float *, int *) - -from fiona._shim cimport OGR_F_GetFieldAsInteger as OGR_F_GetFieldAsInteger64 -from fiona._shim cimport OGR_F_SetFieldInteger as OGR_F_SetFieldInteger64 -from fiona._shim cimport OGR_DS_GetLayerByName as GDALDatasetGetLayerByName -from fiona._shim cimport OGR_DS_GetLayer as GDALDatasetGetLayer -from fiona._shim cimport OGR_DS_Destroy as GDALClose -from fiona._shim cimport OGR_DS_GetDriver as GDALGetDatasetDriver -from fiona._shim cimport OGRGetDriverByName as GDALGetDriverByName -from fiona._shim cimport OGR_DS_GetLayerCount as GDALDatasetGetLayerCount -from fiona._shim cimport OGR_DS_DeleteLayer as GDALDatasetDeleteLayer -from fiona._shim cimport OGR_DS_CreateLayer as GDALDatasetCreateLayer -from fiona._shim cimport OGR_Dr_DeleteDataSource as GDALDeleteDataset diff -Nru fiona-1.8.22/fiona/_shim1.pyx fiona-1.9.5/fiona/_shim1.pyx --- fiona-1.8.22/fiona/_shim1.pyx 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_shim1.pyx 1970-01-01 00:00:00.000000000 +0000 @@ -1,161 +0,0 @@ -"""Shims on top of ogrext for GDAL versions < 2""" - -import os - -from fiona.ogrext1 cimport * -from fiona._err cimport exc_wrap_pointer, exc_wrap_int -from fiona._err import cpl_errs, CPLE_BaseError, FionaNullPointerError -from fiona.errors import DriverError - - -cdef int OGRERR_NONE = 0 - - -cdef bint is_field_null(void *feature, int n): - if not OGR_F_IsFieldSet(feature, n): - return True - else: - return False - - -cdef void set_field_null(void *feature, int n): - pass - - -cdef void gdal_flush_cache(void *cogr_ds): - retval = exc_wrap_int(OGR_DS_SyncToDisk(cogr_ds)) - if retval != OGRERR_NONE: - raise RuntimeError("Failed to sync to disk") - - -cdef void* gdal_open_vector(const char *path_c, int mode, drivers, options) except NULL: - cdef void* cogr_ds = NULL - cdef void* drv = NULL - cdef void* ds = NULL - - encoding = options.get('encoding', None) - if encoding: - val = encoding.encode('utf-8') - CPLSetThreadLocalConfigOption('SHAPE_ENCODING', val) - else: - CPLSetThreadLocalConfigOption('SHAPE_ENCODING', "") - - if drivers: - for name in drivers: - name_b = name.encode() - name_c = name_b - drv = OGRGetDriverByName(name_c) - if drv != NULL: - ds = OGR_Dr_Open(drv, path_c, mode) - if ds != NULL: - cogr_ds = ds - break - else: - cogr_ds = OGROpen(path_c, mode, NULL) - - try: - return exc_wrap_pointer(cogr_ds) - except FionaNullPointerError: - raise DriverError("Failed to open dataset (mode={}): {}".format(mode, path_c.decode("utf-8"))) - except CPLE_BaseError as exc: - raise DriverError(str(exc)) - - -cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL: - cdef void* cogr_ds = NULL - cdef char **opts = NULL - - encoding = options.get('encoding', None) - if encoding: - val = encoding.encode('utf-8') - CPLSetThreadLocalConfigOption('SHAPE_ENCODING', val) - else: - CPLSetThreadLocalConfigOption('SHAPE_ENCODING', "") - - for k, v in options.items(): - k = k.upper().encode('utf-8') - if isinstance(v, bool): - v = ('ON' if v else 'OFF').encode('utf-8') - else: - v = str(v).encode('utf-8') - opts = CSLAddNameValue(opts, k, v) - - try: - return exc_wrap_pointer( - OGR_Dr_CreateDataSource(cogr_driver, path_c, opts) - ) - except FionaNullPointerError: - raise DriverError("Failed to create dataset: {}".format(path_c.decode("utf-8"))) - except CPLE_BaseError as exc: - raise DriverError(str(exc)) - finally: - CSLDestroy(opts) - - -# transactions are not supported in GDAL 1.x - - -cdef bint check_capability_transaction(void *cogr_ds): - return False - - -cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): - return OGRERR_NONE - - -cdef OGRErr gdal_commit_transaction(void* cogr_ds): - return OGRERR_NONE - - -cdef OGRErr gdal_rollback_transaction(void* cogr_ds): - return OGRERR_NONE - - -# field subtypes are not supported in GDAL 1.x -cdef OGRFieldSubType get_field_subtype(void *fielddefn): - return OFSTNone - - -cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype): - pass - - -cdef bint check_capability_create_layer(void *cogr_ds): - return OGR_DS_TestCapability(cogr_ds, ODsCCreateLayer) - - -cdef void *get_linear_geometry(void *geom): - return geom - - -cdef const char* osr_get_name(OGRSpatialReferenceH hSrs): - return '' - - -cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs): - OSRFixup(hSrs) - - -cdef void set_proj_search_path(object path): - os.environ["PROJ_LIB"] = path - - -cdef void get_proj_version(int* major, int* minor, int* patch): - cdef int val = -1 - major[0] = val - minor[0] = val - patch[0] = val - - -cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): - cdef int nSecond - nSecond = int(fSecond) - OGR_F_SetFieldDateTime(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, nSecond, nTZFlag) - - -cdef int get_field_as_datetime(void *cogr_feature, int iField, int* nYear, int* nMonth, int* nDay, int* nHour, int* nMinute, float* fSecond, int* nTZFlag): - cdef int retval - cdef int nSecond - retval = OGR_F_GetFieldAsDateTime(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, &nSecond, nTZFlag) - fSecond[0] = float(nSecond) - return retval diff -Nru fiona-1.8.22/fiona/_shim2.pxd fiona-1.9.5/fiona/_shim2.pxd --- fiona-1.8.22/fiona/_shim2.pxd 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_shim2.pxd 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -include "ogrext2.pxd" - -cdef bint is_field_null(void *feature, int n) -cdef void set_field_null(void *feature, int n) -cdef void gdal_flush_cache(void *cogr_ds) -cdef void* gdal_open_vector(const char* path_c, int mode, drivers, options) except NULL -cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL -cdef bint check_capability_transaction(void *cogr_ds) -cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) -cdef OGRErr gdal_commit_transaction(void *cogr_ds) -cdef OGRErr gdal_rollback_transaction(void *cogr_ds) -cdef OGRFieldSubType get_field_subtype(void *fielddefn) -cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype) -cdef bint check_capability_create_layer(void *cogr_ds) -cdef void *get_linear_geometry(void *geom) -cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) -cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) -cdef void set_proj_search_path(object path) -cdef void get_proj_version(int *, int *, int *) -cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) -cdef int get_field_as_datetime(void *cogr_feature, int iField, int *, int *, int *, int *, int *, float *, int *) diff -Nru fiona-1.8.22/fiona/_shim2.pyx fiona-1.9.5/fiona/_shim2.pyx --- fiona-1.8.22/fiona/_shim2.pyx 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_shim2.pyx 1970-01-01 00:00:00.000000000 +0000 @@ -1,155 +0,0 @@ -"""Shims on top of ogrext for GDAL versions > 2""" - -import logging -import os - -from fiona.ogrext2 cimport * -from fiona._err cimport exc_wrap_pointer -from fiona._err import cpl_errs, CPLE_BaseError, FionaNullPointerError -from fiona.errors import DriverError - - -log = logging.getLogger(__name__) - - -cdef bint is_field_null(void *feature, int n): - if not OGR_F_IsFieldSet(feature, n): - return True - else: - return False - - -cdef void set_field_null(void *feature, int n): - pass - - -cdef void gdal_flush_cache(void *cogr_ds): - with cpl_errs: - GDALFlushCache(cogr_ds) - - -cdef void* gdal_open_vector(const char* path_c, int mode, drivers, options) except NULL: - cdef void* cogr_ds = NULL - cdef char **drvs = NULL - cdef char **open_opts = NULL - - flags = GDAL_OF_VECTOR | GDAL_OF_VERBOSE_ERROR - if mode == 1: - flags |= GDAL_OF_UPDATE - else: - flags |= GDAL_OF_READONLY - - if drivers: - for name in drivers: - name_b = name.encode() - name_c = name_b - drv = GDALGetDriverByName(name_c) - if drv != NULL: - drvs = CSLAddString(drvs, name_c) - - for k, v in options.items(): - - if v is None: - continue - - k = k.upper().encode('utf-8') - if isinstance(v, bool): - v = ('ON' if v else 'OFF').encode('utf-8') - else: - v = str(v).encode('utf-8') - log.debug("Set option %r: %r", k, v) - open_opts = CSLAddNameValue(open_opts, k, v) - - open_opts = CSLAddNameValue(open_opts, "VALIDATE_OPEN_OPTIONS", "NO") - - try: - cogr_ds = exc_wrap_pointer(GDALOpenEx( - path_c, flags, drvs, open_opts, NULL) - ) - return cogr_ds - except FionaNullPointerError: - raise DriverError("Failed to open dataset (mode={}): {}".format(mode, path_c.decode("utf-8"))) - except CPLE_BaseError as exc: - raise DriverError(str(exc)) - finally: - CSLDestroy(drvs) - CSLDestroy(open_opts) - - -cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL: - cdef char **creation_opts = NULL - - for k, v in options.items(): - k = k.upper().encode('utf-8') - if isinstance(v, bool): - v = ('ON' if v else 'OFF').encode('utf-8') - else: - v = str(v).encode('utf-8') - log.debug("Set option %r: %r", k, v) - creation_opts = CSLAddNameValue(creation_opts, k, v) - - try: - return exc_wrap_pointer(GDALCreate(cogr_driver, path_c, 0, 0, 0, GDT_Unknown, creation_opts)) - except FionaNullPointerError: - raise DriverError("Failed to create dataset: {}".format(path_c.decode("utf-8"))) - except CPLE_BaseError as exc: - raise DriverError(str(exc)) - finally: - CSLDestroy(creation_opts) - - -cdef bint check_capability_transaction(void *cogr_ds): - return GDALDatasetTestCapability(cogr_ds, ODsCTransactions) - - -cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): - return GDALDatasetStartTransaction(cogr_ds, force) - - -cdef OGRErr gdal_commit_transaction(void* cogr_ds): - return GDALDatasetCommitTransaction(cogr_ds) - - -cdef OGRErr gdal_rollback_transaction(void* cogr_ds): - return GDALDatasetRollbackTransaction(cogr_ds) - - -cdef OGRFieldSubType get_field_subtype(void *fielddefn): - return OGR_Fld_GetSubType(fielddefn) - - -cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype): - OGR_Fld_SetSubType(fielddefn, subtype) - - -cdef bint check_capability_create_layer(void *cogr_ds): - return GDALDatasetTestCapability(cogr_ds, ODsCCreateLayer) - - -cdef void *get_linear_geometry(void *geom): - return OGR_G_GetLinearGeometry(geom, 0.0, NULL) - -cdef const char* osr_get_name(OGRSpatialReferenceH hSrs): - return '' - -cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs): - OSRFixup(hSrs) - - -cdef void set_proj_search_path(object path): - os.environ["PROJ_LIB"] = path - - -cdef void get_proj_version(int* major, int* minor, int* patch): - cdef int val = -1 - major[0] = val - minor[0] = val - patch[0] = val - - -cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): - OGR_F_SetFieldDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) - - -cdef int get_field_as_datetime(void *cogr_feature, int iField, int* nYear, int* nMonth, int* nDay, int* nHour, int* nMinute, float* fSecond, int* nTZFlag): - return OGR_F_GetFieldAsDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) diff -Nru fiona-1.8.22/fiona/_shim22.pxd fiona-1.9.5/fiona/_shim22.pxd --- fiona-1.8.22/fiona/_shim22.pxd 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_shim22.pxd 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -include "ogrext2.pxd" - -cdef bint is_field_null(void *feature, int n) -cdef void set_field_null(void *feature, int n) -cdef void gdal_flush_cache(void *cogr_ds) -cdef void* gdal_open_vector(const char *path_c, int mode, drivers, options) except NULL -cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL -cdef bint check_capability_transaction(void *cogr_ds) -cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) -cdef OGRErr gdal_commit_transaction(void *cogr_ds) -cdef OGRErr gdal_rollback_transaction(void *cogr_ds) -cdef OGRFieldSubType get_field_subtype(void *fielddefn) -cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype) -cdef bint check_capability_create_layer(void *cogr_ds) -cdef void *get_linear_geometry(void *geom) -cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) -cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) -cdef void set_proj_search_path(object path) -cdef void get_proj_version(int *, int *, int *) -cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) -cdef int get_field_as_datetime(void *cogr_feature, int iField, int *, int *, int *, int *, int *, float *, int *) diff -Nru fiona-1.8.22/fiona/_shim22.pyx fiona-1.9.5/fiona/_shim22.pyx --- fiona-1.8.22/fiona/_shim22.pyx 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_shim22.pyx 1970-01-01 00:00:00.000000000 +0000 @@ -1,165 +0,0 @@ -"""Shims on top of ogrext for GDAL versions >= 2.2""" - -cdef extern from "ogr_api.h": - - int OGR_F_IsFieldNull(void *feature, int n) - -import logging -import os - -from fiona.ogrext2 cimport * -from fiona._err cimport exc_wrap_pointer - -from fiona._err import cpl_errs, CPLE_BaseError, FionaNullPointerError -from fiona.errors import DriverError - - -log = logging.getLogger(__name__) - - -cdef bint is_field_null(void *feature, int n): - if OGR_F_IsFieldNull(feature, n): - return True - elif not OGR_F_IsFieldSet(feature, n): - return True - else: - return False - - -cdef void set_field_null(void *feature, int n): - OGR_F_SetFieldNull(feature, n) - - -cdef void gdal_flush_cache(void *cogr_ds): - with cpl_errs: - GDALFlushCache(cogr_ds) - - -cdef void* gdal_open_vector(char* path_c, int mode, drivers, options) except NULL: - cdef void* cogr_ds = NULL - cdef char **drvs = NULL - cdef void* drv = NULL - cdef char **open_opts = NULL - - flags = GDAL_OF_VECTOR | GDAL_OF_VERBOSE_ERROR - if mode == 1: - flags |= GDAL_OF_UPDATE - else: - flags |= GDAL_OF_READONLY - - if drivers: - for name in drivers: - name_b = name.encode() - name_c = name_b - drv = GDALGetDriverByName(name_c) - if drv != NULL: - drvs = CSLAddString(drvs, name_c) - - for k, v in options.items(): - - if v is None: - continue - - k = k.upper().encode('utf-8') - if isinstance(v, bool): - v = ('ON' if v else 'OFF').encode('utf-8') - else: - v = str(v).encode('utf-8') - log.debug("Set option %r: %r", k, v) - open_opts = CSLAddNameValue(open_opts, k, v) - - open_opts = CSLAddNameValue(open_opts, "VALIDATE_OPEN_OPTIONS", "NO") - - try: - cogr_ds = exc_wrap_pointer( - GDALOpenEx(path_c, flags, drvs, open_opts, NULL) - ) - return cogr_ds - except FionaNullPointerError: - raise DriverError("Failed to open dataset (mode={}): {}".format(mode, path_c.decode("utf-8"))) - except CPLE_BaseError as exc: - raise DriverError(str(exc)) - finally: - CSLDestroy(drvs) - CSLDestroy(open_opts) - - -cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL: - cdef char **creation_opts = NULL - cdef void *cogr_ds = NULL - - for k, v in options.items(): - k = k.upper().encode('utf-8') - if isinstance(v, bool): - v = ('ON' if v else 'OFF').encode('utf-8') - else: - v = str(v).encode('utf-8') - log.debug("Set option %r: %r", k, v) - creation_opts = CSLAddNameValue(creation_opts, k, v) - - try: - return exc_wrap_pointer(GDALCreate(cogr_driver, path_c, 0, 0, 0, GDT_Unknown, creation_opts)) - except FionaNullPointerError: - raise DriverError("Failed to create dataset: {}".format(path_c.decode("utf-8"))) - except CPLE_BaseError as exc: - raise DriverError(str(exc)) - finally: - CSLDestroy(creation_opts) - - -cdef bint check_capability_transaction(void *cogr_ds): - return GDALDatasetTestCapability(cogr_ds, ODsCTransactions) - - -cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): - return GDALDatasetStartTransaction(cogr_ds, force) - - -cdef OGRErr gdal_commit_transaction(void* cogr_ds): - return GDALDatasetCommitTransaction(cogr_ds) - - -cdef OGRErr gdal_rollback_transaction(void* cogr_ds): - return GDALDatasetRollbackTransaction(cogr_ds) - - -cdef OGRFieldSubType get_field_subtype(void *fielddefn): - return OGR_Fld_GetSubType(fielddefn) - - -cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype): - OGR_Fld_SetSubType(fielddefn, subtype) - - -cdef bint check_capability_create_layer(void *cogr_ds): - return GDALDatasetTestCapability(cogr_ds, ODsCCreateLayer) - - -cdef void *get_linear_geometry(void *geom): - return OGR_G_GetLinearGeometry(geom, 0.0, NULL) - - -cdef const char* osr_get_name(OGRSpatialReferenceH hSrs): - return '' - - -cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs): - OSRFixup(hSrs) - -cdef void set_proj_search_path(object path): - os.environ["PROJ_LIB"] = path - - -cdef void get_proj_version(int* major, int* minor, int* patch): - cdef int val = -1 - major[0] = val - minor[0] = val - patch[0] = val - - -cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): - OGR_F_SetFieldDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) - - -cdef int get_field_as_datetime(void *cogr_feature, int iField, int* nYear, int* nMonth, int* nDay, int* nHour, int* nMinute, float* fSecond, int* nTZFlag): - return OGR_F_GetFieldAsDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) diff -Nru fiona-1.8.22/fiona/_shim3.pxd fiona-1.9.5/fiona/_shim3.pxd --- fiona-1.8.22/fiona/_shim3.pxd 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_shim3.pxd 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -include "ogrext3.pxd" - -cdef bint is_field_null(void *feature, int n) -cdef void set_field_null(void *feature, int n) -cdef void gdal_flush_cache(void *cogr_ds) -cdef void* gdal_open_vector(const char *path_c, int mode, drivers, options) except NULL -cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL -cdef bint check_capability_transaction(void *cogr_ds) -cdef OGRErr gdal_start_transaction(void *cogr_ds, int force) -cdef OGRErr gdal_commit_transaction(void *cogr_ds) -cdef OGRErr gdal_rollback_transaction(void *cogr_ds) -cdef OGRFieldSubType get_field_subtype(void *fielddefn) -cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype) -cdef bint check_capability_create_layer(void *cogr_ds) -cdef void *get_linear_geometry(void *geom) -cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) -cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) -cdef void set_proj_search_path(object path) -cdef void get_proj_version(int *, int *, int *) -cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) -cdef int get_field_as_datetime(void *cogr_feature, int iField, int *, int *, int *, int *, int *, float *, int *) diff -Nru fiona-1.8.22/fiona/_shim3.pyx fiona-1.9.5/fiona/_shim3.pyx --- fiona-1.8.22/fiona/_shim3.pyx 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_shim3.pyx 1970-01-01 00:00:00.000000000 +0000 @@ -1,177 +0,0 @@ -"""Shims on top of ogrext for GDAL versions >= 3.0""" - -cdef extern from "ogr_api.h": - - int OGR_F_IsFieldNull(void *feature, int n) - - -cdef extern from "ogr_srs_api.h" nogil: - - ctypedef enum OSRAxisMappingStrategy: - OAMS_TRADITIONAL_GIS_ORDER - - const char* OSRGetName(OGRSpatialReferenceH hSRS) - void OSRSetAxisMappingStrategy(OGRSpatialReferenceH hSRS, OSRAxisMappingStrategy) - void OSRSetPROJSearchPaths(const char *const *papszPaths) - - -from fiona.ogrext3 cimport * -from fiona._err cimport exc_wrap_pointer -from fiona._err import cpl_errs, CPLE_BaseError, FionaNullPointerError -from fiona.errors import DriverError - -import logging - - -log = logging.getLogger(__name__) - - -cdef bint is_field_null(void *feature, int n): - if OGR_F_IsFieldNull(feature, n): - return True - elif not OGR_F_IsFieldSet(feature, n): - return True - else: - return False - - -cdef void set_field_null(void *feature, int n): - OGR_F_SetFieldNull(feature, n) - - -cdef void gdal_flush_cache(void *cogr_ds): - with cpl_errs: - GDALFlushCache(cogr_ds) - - -cdef void* gdal_open_vector(char* path_c, int mode, drivers, options) except NULL: - cdef void* cogr_ds = NULL - cdef char **drvs = NULL - cdef void* drv = NULL - cdef char **open_opts = NULL - - flags = GDAL_OF_VECTOR | GDAL_OF_VERBOSE_ERROR - if mode == 1: - flags |= GDAL_OF_UPDATE - else: - flags |= GDAL_OF_READONLY - - if drivers: - for name in drivers: - name_b = name.encode() - name_c = name_b - drv = GDALGetDriverByName(name_c) - if drv != NULL: - drvs = CSLAddString(drvs, name_c) - - for k, v in options.items(): - - if v is None: - continue - - k = k.upper().encode('utf-8') - if isinstance(v, bool): - v = ('ON' if v else 'OFF').encode('utf-8') - else: - v = str(v).encode('utf-8') - log.debug("Set option %r: %r", k, v) - open_opts = CSLAddNameValue(open_opts, k, v) - - open_opts = CSLAddNameValue(open_opts, "VALIDATE_OPEN_OPTIONS", "NO") - - try: - cogr_ds = exc_wrap_pointer( - GDALOpenEx(path_c, flags, drvs, open_opts, NULL) - ) - return cogr_ds - except FionaNullPointerError: - raise DriverError("Failed to open dataset (mode={}): {}".format(mode, path_c.decode("utf-8"))) - except CPLE_BaseError as exc: - raise DriverError(str(exc)) - finally: - CSLDestroy(drvs) - CSLDestroy(open_opts) - - -cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL: - cdef char **creation_opts = NULL - cdef void *cogr_ds = NULL - - for k, v in options.items(): - k = k.upper().encode('utf-8') - if isinstance(v, bool): - v = ('ON' if v else 'OFF').encode('utf-8') - else: - v = str(v).encode('utf-8') - log.debug("Set option %r: %r", k, v) - creation_opts = CSLAddNameValue(creation_opts, k, v) - - try: - return exc_wrap_pointer(GDALCreate(cogr_driver, path_c, 0, 0, 0, GDT_Unknown, creation_opts)) - except FionaNullPointerError: - raise DriverError("Failed to create dataset: {}".format(path_c.decode("utf-8"))) - except CPLE_BaseError as exc: - raise DriverError(str(exc)) - finally: - CSLDestroy(creation_opts) - - -cdef bint check_capability_transaction(void *cogr_ds): - return GDALDatasetTestCapability(cogr_ds, ODsCTransactions) - - -cdef OGRErr gdal_start_transaction(void* cogr_ds, int force): - return GDALDatasetStartTransaction(cogr_ds, force) - - -cdef OGRErr gdal_commit_transaction(void* cogr_ds): - return GDALDatasetCommitTransaction(cogr_ds) - - -cdef OGRErr gdal_rollback_transaction(void* cogr_ds): - return GDALDatasetRollbackTransaction(cogr_ds) - - -cdef OGRFieldSubType get_field_subtype(void *fielddefn): - return OGR_Fld_GetSubType(fielddefn) - - -cdef void set_field_subtype(void *fielddefn, OGRFieldSubType subtype): - OGR_Fld_SetSubType(fielddefn, subtype) - - -cdef bint check_capability_create_layer(void *cogr_ds): - return GDALDatasetTestCapability(cogr_ds, ODsCCreateLayer) - - -cdef void *get_linear_geometry(void *geom): - return OGR_G_GetLinearGeometry(geom, 0.0, NULL) - - -cdef const char* osr_get_name(OGRSpatialReferenceH hSrs): - return OSRGetName(hSrs) - - -cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs): - OSRSetAxisMappingStrategy(hSrs, OAMS_TRADITIONAL_GIS_ORDER) - - -cdef void set_proj_search_path(object path): - cdef char **paths = NULL - cdef const char *path_c = NULL - path_b = path.encode("utf-8") - path_c = path_b - paths = CSLAddString(paths, path_c) - OSRSetPROJSearchPaths(paths) - - -cdef void get_proj_version(int* major, int* minor, int* patch): - OSRGetPROJVersion(major, minor, patch) - - -cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): - OGR_F_SetFieldDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) - - -cdef int get_field_as_datetime(void *cogr_feature, int iField, int* nYear, int* nMonth, int* nDay, int* nHour, int* nMinute, float* fSecond, int* nTZFlag): - return OGR_F_GetFieldAsDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) diff -Nru fiona-1.8.22/fiona/_show_versions.py fiona-1.9.5/fiona/_show_versions.py --- fiona-1.8.22/fiona/_show_versions.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_show_versions.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,6 +1,6 @@ import platform import sys -import os + import fiona from fiona._env import get_gdal_release_name, get_proj_version_tuple @@ -13,10 +13,7 @@ fiona_version = fiona.__version__ gdal_release_name = get_gdal_release_name() proj_version_tuple = get_proj_version_tuple() - if proj_version_tuple is not None: - proj_version = ".".join(map(str, proj_version_tuple)) - else: - proj_version = "Proj version not available" + proj_version = ".".join(map(str, proj_version_tuple)) os_info = "{system} {release}".format(system=platform.system(), release=platform.release()) python_version = platform.python_version() diff -Nru fiona-1.8.22/fiona/_transform.pyx fiona-1.9.5/fiona/_transform.pyx --- fiona-1.8.22/fiona/_transform.pyx 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/_transform.pyx 2023-10-11 23:19:44.000000000 +0000 @@ -1,16 +1,21 @@ # distutils: language = c++ # # Coordinate and geometry transformations. - from __future__ import absolute_import +include "gdal.pxi" + import logging +import warnings +from collections import UserDict -from fiona cimport _cpl, _crs, _csl, _geometry -from fiona._crs cimport OGRSpatialReferenceH -from fiona._shim cimport osr_set_traditional_axis_mapping_strategy +from fiona cimport _cpl, _csl, _geometry +from fiona.crs cimport OGRSpatialReferenceH, osr_set_traditional_axis_mapping_strategy -from fiona.compat import UserDict +from fiona.compat import DICT_TYPES +from fiona.crs import CRS +from fiona.errors import TransformError +from fiona.model import Geometry cdef extern from "ogr_geometry.h" nogil: @@ -36,40 +41,20 @@ cdef void *_crs_from_crs(object crs): - cdef char *proj_c = NULL + cdef char *wkt_c = NULL cdef OGRSpatialReferenceH osr = NULL - osr = _crs.OSRNewSpatialReference(NULL) + osr = OSRNewSpatialReference(NULL) + if osr == NULL: raise ValueError("NULL spatial reference") + params = [] - # Normally, we expect a CRS dict. - if isinstance(crs, UserDict): - crs = dict(crs) - - if isinstance(crs, dict): - # EPSG is a special case. - init = crs.get('init') - if init: - auth, val = init.split(':') - if auth.upper() == 'EPSG': - _crs.OSRImportFromEPSG(osr, int(val)) - else: - crs['wktext'] = True - for k, v in crs.items(): - if v is True or (k in ('no_defs', 'wktext') and v): - params.append("+%s" % k) - else: - params.append("+%s=%s" % (k, v)) - proj = " ".join(params) - log.debug("PROJ.4 to be imported: %r", proj) - proj_b = proj.encode('utf-8') - proj_c = proj_b - _crs.OSRImportFromProj4(osr, proj_c) - # Fall back for CRS strings like "EPSG:3857." - else: - proj_b = crs.encode('utf-8') - proj_c = proj_b - _crs.OSRSetFromUserInput(osr, proj_c) + + wkt = CRS.from_user_input(crs).to_wkt() + + wkt_b = wkt.encode('utf-8') + wkt_c = wkt_b + OSRSetFromUserInput(osr, wkt_c) osr_set_traditional_axis_mapping_strategy(osr) return osr @@ -96,8 +81,8 @@ x[i] = xs[i] y[i] = ys[i] - transform = _crs.OCTNewCoordinateTransformation(src, dst) - res = _crs.OCTTransform(transform, n, x, y, NULL) + transform = OCTNewCoordinateTransformation(src, dst) + res = OCTTransform(transform, n, x, y, NULL) res_xs = [0]*n res_ys = [0]*n @@ -108,16 +93,46 @@ _cpl.CPLFree(x) _cpl.CPLFree(y) - _crs.OCTDestroyCoordinateTransformation(transform) - _crs.OSRRelease(src) - _crs.OSRRelease(dst) + OCTDestroyCoordinateTransformation(transform) + OSRRelease(src) + OSRRelease(dst) return res_xs, res_ys -def _transform_geom( - src_crs, dst_crs, geom, antimeridian_cutting, antimeridian_offset, - precision): - """Return a transformed geometry.""" +cdef object _transform_single_geom( + object single_geom, + OGRGeometryFactory *factory, + void *transform, + char **options, +): + """Transform a single geometry.""" + cdef void *src_ogr_geom = NULL + cdef void *dst_ogr_geom = NULL + + src_ogr_geom = _geometry.OGRGeomBuilder().build(single_geom) + dst_ogr_geom = factory.transformWithOptions( + src_ogr_geom, + transform, + options) + + if dst_ogr_geom == NULL and CPLGetConfigOption("OGR_ENABLE_PARTIAL_REPROJECTION", "OFF") != b"ON": + raise TransformError( + "Full reprojection failed. To enable partial reprojection set OGR_ENABLE_PARTIAL_REPROJECTION=True" + ) + else: + out_geom = _geometry.GeomBuilder().build(dst_ogr_geom) + _geometry.OGR_G_DestroyGeometry(dst_ogr_geom) + + if src_ogr_geom != NULL: + _geometry.OGR_G_DestroyGeometry(src_ogr_geom) + + return out_geom + + +def _transform_geom(src_crs, dst_crs, geom, antimeridian_cutting, antimeridian_offset, precision): + """Return transformed geometries. + + """ cdef char *proj_c = NULL cdef char *key_c = NULL cdef char *val_c = NULL @@ -126,119 +141,58 @@ cdef OGRSpatialReferenceH dst = NULL cdef void *transform = NULL cdef OGRGeometryFactory *factory = NULL - cdef void *src_ogr_geom = NULL - cdef void *dst_ogr_geom = NULL - cdef int i - if src_crs and dst_crs: - src = _crs_from_crs(src_crs) - dst = _crs_from_crs(dst_crs) - transform = _crs.OCTNewCoordinateTransformation(src, dst) - - # Transform options. - options = _csl.CSLSetNameValue( - options, "DATELINEOFFSET", - str(antimeridian_offset).encode('utf-8')) - if antimeridian_cutting: - options = _csl.CSLSetNameValue(options, "WRAPDATELINE", "YES") - - factory = new OGRGeometryFactory() - src_ogr_geom = _geometry.OGRGeomBuilder().build(geom) - dst_ogr_geom = factory.transformWithOptions( - src_ogr_geom, - transform, - options) - g = _geometry.GeomBuilder().build(dst_ogr_geom) + if not all([src_crs, dst_crs]): + raise RuntimeError("Must provide a source and destination CRS.") - _geometry.OGR_G_DestroyGeometry(dst_ogr_geom) - _geometry.OGR_G_DestroyGeometry(src_ogr_geom) - _crs.OCTDestroyCoordinateTransformation(transform) - if options != NULL: - _csl.CSLDestroy(options) - _crs.OSRRelease(src) - _crs.OSRRelease(dst) + src = _crs_from_crs(src_crs) + dst = _crs_from_crs(dst_crs) + transform = OCTNewCoordinateTransformation(src, dst) + # Transform options. + options = _csl.CSLSetNameValue( + options, + "DATELINEOFFSET", + str(antimeridian_offset).encode('utf-8') + ) + + if antimeridian_cutting: + options = _csl.CSLSetNameValue(options, "WRAPDATELINE", "YES") + + factory = new OGRGeometryFactory() + + if isinstance(geom, Geometry): + out_geom = recursive_round( + _transform_single_geom(geom, factory, transform, options), precision) else: - g = geom - - if precision >= 0: - - def round_point(g): - coords = list(g['coordinates']) - x, y = coords[:2] - x = round(x, precision) - y = round(y, precision) - new_coords = [x, y] - if len(coords) == 3: - z = coords[2] - new_coords.append(round(z, precision)) - return new_coords - - - def round_linestring(g): - coords = list(zip(*g['coordinates'])) - xp, yp = coords[:2] - xp = [round(v, precision) for v in xp] - yp = [round(v, precision) for v in yp] - if len(coords) == 3: - zp = coords[2] - zp = [round(v, precision) for v in zp] - new_coords = list(zip(xp, yp, zp)) - else: - new_coords = list(zip(xp, yp)) - return new_coords - - - def round_polygon(g): - new_coords = [] - for piece in g['coordinates']: - coords = list(zip(*piece)) - xp, yp = coords[:2] - xp = [round(v, precision) for v in xp] - yp = [round(v, precision) for v in yp] - if len(coords) == 3: - zp = coords[2] - zp = [round(v, precision) for v in zp] - new_coords.append(list(zip(xp, yp, zp))) - else: - new_coords.append(list(zip(xp, yp))) - return new_coords - - def round_multipolygon(g): - parts = g['coordinates'] - new_coords = [] - for part in parts: - inner_coords = [] - for ring in part: - coords = list(zip(*ring)) - xp, yp = coords[:2] - xp = [round(v, precision) for v in xp] - yp = [round(v, precision) for v in yp] - if len(coords) == 3: - zp = coords[2] - zp = [round(v, precision) for v in zp] - inner_coords.append(list(zip(xp, yp, zp))) - else: - inner_coords.append(list(zip(xp, yp))) - new_coords.append(inner_coords) - return new_coords - - def round_geometry(g): - if g['type'] == 'Point': - g['coordinates'] = round_point(g) - elif g['type'] in ['LineString', 'MultiPoint']: - g['coordinates'] = round_linestring(g) - elif g['type'] in ['Polygon', 'MultiLineString']: - g['coordinates'] = round_polygon(g) - elif g['type'] == 'MultiPolygon': - g['coordinates'] = round_multipolygon(g) - else: - raise RuntimeError("Unsupported geometry type: {}".format(g['type'])) - - if g['type'] == 'GeometryCollection': - for _g in g['geometries']: - round_geometry(_g) - else: - round_geometry(g) - - return g + out_geom = [ + recursive_round( + _transform_single_geom(single_geom, factory, transform, options), + precision, + ) + for single_geom in geom + ] + + OCTDestroyCoordinateTransformation(transform) + + if options != NULL: + _csl.CSLDestroy(options) + + OSRRelease(src) + OSRRelease(dst) + + return out_geom + + +def recursive_round(obj, precision): + """Recursively round coordinates.""" + if precision < 0: + return obj + if getattr(obj, 'geometries', None): + return Geometry(geometries=[recursive_round(part, precision) for part in obj.geometries]) + elif getattr(obj, 'coordinates', None): + return Geometry(coordinates=[recursive_round(part, precision) for part in obj.coordinates]) + if isinstance(obj, (int, float)): + return round(obj, precision) + else: + return [recursive_round(part, precision) for part in obj] diff -Nru fiona-1.8.22/fiona/_vendor/munch/LICENSE.txt fiona-1.9.5/fiona/_vendor/munch/LICENSE.txt --- fiona-1.8.22/fiona/_vendor/munch/LICENSE.txt 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/fiona/_vendor/munch/LICENSE.txt 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,19 @@ +Copyright (c) 2010 David Schoonover + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff -Nru fiona-1.8.22/fiona/_vendor/munch/__init__.py fiona-1.9.5/fiona/_vendor/munch/__init__.py --- fiona-1.8.22/fiona/_vendor/munch/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/fiona/_vendor/munch/__init__.py 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,534 @@ +""" Munch is a subclass of dict with attribute-style access. + + >>> b = Munch() + >>> b.hello = 'world' + >>> b.hello + 'world' + >>> b['hello'] += "!" + >>> b.hello + 'world!' + >>> b.foo = Munch(lol=True) + >>> b.foo.lol + True + >>> b.foo is b['foo'] + True + + It is safe to import * from this module: + + __all__ = ('Munch', 'munchify','unmunchify') + + un/munchify provide dictionary conversion; Munches can also be + converted via Munch.to/fromDict(). +""" + +from .python3_compat import iterkeys, iteritems, Mapping #, u + +__version__ = "2.5.0" +VERSION = tuple(map(int, __version__.split('.')[:3])) + +__all__ = ('Munch', 'munchify', 'DefaultMunch', 'DefaultFactoryMunch', 'unmunchify') + + + +class Munch(dict): + """ A dictionary that provides attribute-style access. + + >>> b = Munch() + >>> b.hello = 'world' + >>> b.hello + 'world' + >>> b['hello'] += "!" + >>> b.hello + 'world!' + >>> b.foo = Munch(lol=True) + >>> b.foo.lol + True + >>> b.foo is b['foo'] + True + + A Munch is a subclass of dict; it supports all the methods a dict does... + + >>> sorted(b.keys()) + ['foo', 'hello'] + + Including update()... + + >>> b.update({ 'ponies': 'are pretty!' }, hello=42) + >>> print (repr(b)) + Munch({'ponies': 'are pretty!', 'foo': Munch({'lol': True}), 'hello': 42}) + + As well as iteration... + + >>> sorted([ (k,b[k]) for k in b ]) + [('foo', Munch({'lol': True})), ('hello', 42), ('ponies', 'are pretty!')] + + And "splats". + + >>> "The {knights} who say {ni}!".format(**Munch(knights='lolcats', ni='can haz')) + 'The lolcats who say can haz!' + + See unmunchify/Munch.toDict, munchify/Munch.fromDict for notes about conversion. + """ + def __init__(self, *args, **kwargs): # pylint: disable=super-init-not-called + self.update(*args, **kwargs) + + # only called if k not found in normal places + def __getattr__(self, k): + """ Gets key if it exists, otherwise throws AttributeError. + + nb. __getattr__ is only called if key is not found in normal places. + + >>> b = Munch(bar='baz', lol={}) + >>> b.foo + Traceback (most recent call last): + ... + AttributeError: foo + + >>> b.bar + 'baz' + >>> getattr(b, 'bar') + 'baz' + >>> b['bar'] + 'baz' + + >>> b.lol is b['lol'] + True + >>> b.lol is getattr(b, 'lol') + True + """ + try: + # Throws exception if not in prototype chain + return object.__getattribute__(self, k) + except AttributeError: + try: + return self[k] + except KeyError: + raise AttributeError(k) + + def __setattr__(self, k, v): + """ Sets attribute k if it exists, otherwise sets key k. A KeyError + raised by set-item (only likely if you subclass Munch) will + propagate as an AttributeError instead. + + >>> b = Munch(foo='bar', this_is='useful when subclassing') + >>> hasattr(b.values, '__call__') + True + >>> b.values = 'uh oh' + >>> b.values + 'uh oh' + >>> b['values'] + Traceback (most recent call last): + ... + KeyError: 'values' + """ + try: + # Throws exception if not in prototype chain + object.__getattribute__(self, k) + except AttributeError: + try: + self[k] = v + except: + raise AttributeError(k) + else: + object.__setattr__(self, k, v) + + def __delattr__(self, k): + """ Deletes attribute k if it exists, otherwise deletes key k. A KeyError + raised by deleting the key--such as when the key is missing--will + propagate as an AttributeError instead. + + >>> b = Munch(lol=42) + >>> del b.lol + >>> b.lol + Traceback (most recent call last): + ... + AttributeError: lol + """ + try: + # Throws exception if not in prototype chain + object.__getattribute__(self, k) + except AttributeError: + try: + del self[k] + except KeyError: + raise AttributeError(k) + else: + object.__delattr__(self, k) + + def toDict(self): + """ Recursively converts a munch back into a dictionary. + + >>> b = Munch(foo=Munch(lol=True), hello=42, ponies='are pretty!') + >>> sorted(b.toDict().items()) + [('foo', {'lol': True}), ('hello', 42), ('ponies', 'are pretty!')] + + See unmunchify for more info. + """ + return unmunchify(self) + + @property + def __dict__(self): + return self.toDict() + + def __repr__(self): + """ Invertible* string-form of a Munch. + + >>> b = Munch(foo=Munch(lol=True), hello=42, ponies='are pretty!') + >>> print (repr(b)) + Munch({'ponies': 'are pretty!', 'foo': Munch({'lol': True}), 'hello': 42}) + >>> eval(repr(b)) + Munch({'ponies': 'are pretty!', 'foo': Munch({'lol': True}), 'hello': 42}) + + >>> with_spaces = Munch({1: 2, 'a b': 9, 'c': Munch({'simple': 5})}) + >>> print (repr(with_spaces)) + Munch({'a b': 9, 1: 2, 'c': Munch({'simple': 5})}) + >>> eval(repr(with_spaces)) + Munch({'a b': 9, 1: 2, 'c': Munch({'simple': 5})}) + + (*) Invertible so long as collection contents are each repr-invertible. + """ + return '{0}({1})'.format(self.__class__.__name__, dict.__repr__(self)) + + def __dir__(self): + return list(iterkeys(self)) + + def __getstate__(self): + """ Implement a serializable interface used for pickling. + + See https://docs.python.org/3.6/library/pickle.html. + """ + return {k: v for k, v in self.items()} + + def __setstate__(self, state): + """ Implement a serializable interface used for pickling. + + See https://docs.python.org/3.6/library/pickle.html. + """ + self.clear() + self.update(state) + + __members__ = __dir__ # for python2.x compatibility + + @classmethod + def fromDict(cls, d): + """ Recursively transforms a dictionary into a Munch via copy. + + >>> b = Munch.fromDict({'urmom': {'sez': {'what': 'what'}}}) + >>> b.urmom.sez.what + 'what' + + See munchify for more info. + """ + return munchify(d, cls) + + def copy(self): + return type(self).fromDict(self) + + def update(self, *args, **kwargs): + """ + Override built-in method to call custom __setitem__ method that may + be defined in subclasses. + """ + for k, v in iteritems(dict(*args, **kwargs)): + self[k] = v + + def get(self, k, d=None): + """ + D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None. + """ + if k not in self: + return d + return self[k] + + def setdefault(self, k, d=None): + """ + D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D + """ + if k not in self: + self[k] = d + return self[k] + + +class AutoMunch(Munch): + def __setattr__(self, k, v): + """ Works the same as Munch.__setattr__ but if you supply + a dictionary as value it will convert it to another Munch. + """ + if isinstance(v, Mapping) and not isinstance(v, (AutoMunch, Munch)): + v = munchify(v, AutoMunch) + super(AutoMunch, self).__setattr__(k, v) + + +class DefaultMunch(Munch): + """ + A Munch that returns a user-specified value for missing keys. + """ + + def __init__(self, *args, **kwargs): + """ Construct a new DefaultMunch. Like collections.defaultdict, the + first argument is the default value; subsequent arguments are the + same as those for dict. + """ + # Mimic collections.defaultdict constructor + if args: + default = args[0] + args = args[1:] + else: + default = None + super(DefaultMunch, self).__init__(*args, **kwargs) + self.__default__ = default + + def __getattr__(self, k): + """ Gets key if it exists, otherwise returns the default value.""" + try: + return super(DefaultMunch, self).__getattr__(k) + except AttributeError: + return self.__default__ + + def __setattr__(self, k, v): + if k == '__default__': + object.__setattr__(self, k, v) + else: + super(DefaultMunch, self).__setattr__(k, v) + + def __getitem__(self, k): + """ Gets key if it exists, otherwise returns the default value.""" + try: + return super(DefaultMunch, self).__getitem__(k) + except KeyError: + return self.__default__ + + def __getstate__(self): + """ Implement a serializable interface used for pickling. + + See https://docs.python.org/3.6/library/pickle.html. + """ + return (self.__default__, {k: v for k, v in self.items()}) + + def __setstate__(self, state): + """ Implement a serializable interface used for pickling. + + See https://docs.python.org/3.6/library/pickle.html. + """ + self.clear() + default, state_dict = state + self.update(state_dict) + self.__default__ = default + + @classmethod + def fromDict(cls, d, default=None): + # pylint: disable=arguments-differ + return munchify(d, factory=lambda d_: cls(default, d_)) + + def copy(self): + return type(self).fromDict(self, default=self.__default__) + + def __repr__(self): + return '{0}({1!r}, {2})'.format( + type(self).__name__, self.__undefined__, dict.__repr__(self)) + + +class DefaultFactoryMunch(Munch): + """ A Munch that calls a user-specified function to generate values for + missing keys like collections.defaultdict. + + >>> b = DefaultFactoryMunch(list, {'hello': 'world!'}) + >>> b.hello + 'world!' + >>> b.foo + [] + >>> b.bar.append('hello') + >>> b.bar + ['hello'] + """ + + def __init__(self, default_factory, *args, **kwargs): + super(DefaultFactoryMunch, self).__init__(*args, **kwargs) + self.default_factory = default_factory + + @classmethod + def fromDict(cls, d, default_factory): + # pylint: disable=arguments-differ + return munchify(d, factory=lambda d_: cls(default_factory, d_)) + + def copy(self): + return type(self).fromDict(self, default_factory=self.default_factory) + + def __repr__(self): + factory = self.default_factory.__name__ + return '{0}({1}, {2})'.format( + type(self).__name__, factory, dict.__repr__(self)) + + def __setattr__(self, k, v): + if k == 'default_factory': + object.__setattr__(self, k, v) + else: + super(DefaultFactoryMunch, self).__setattr__(k, v) + + def __missing__(self, k): + self[k] = self.default_factory() + return self[k] + + +# While we could convert abstract types like Mapping or Iterable, I think +# munchify is more likely to "do what you mean" if it is conservative about +# casting (ex: isinstance(str,Iterable) == True ). +# +# Should you disagree, it is not difficult to duplicate this function with +# more aggressive coercion to suit your own purposes. + +def munchify(x, factory=Munch): + """ Recursively transforms a dictionary into a Munch via copy. + + >>> b = munchify({'urmom': {'sez': {'what': 'what'}}}) + >>> b.urmom.sez.what + 'what' + + munchify can handle intermediary dicts, lists and tuples (as well as + their subclasses), but ymmv on custom datatypes. + + >>> b = munchify({ 'lol': ('cats', {'hah':'i win again'}), + ... 'hello': [{'french':'salut', 'german':'hallo'}] }) + >>> b.hello[0].french + 'salut' + >>> b.lol[1].hah + 'i win again' + + nb. As dicts are not hashable, they cannot be nested in sets/frozensets. + """ + # Munchify x, using `seen` to track object cycles + seen = dict() + + def munchify_cycles(obj): + # If we've already begun munchifying obj, just return the already-created munchified obj + try: + return seen[id(obj)] + except KeyError: + pass + + # Otherwise, first partly munchify obj (but without descending into any lists or dicts) and save that + seen[id(obj)] = partial = pre_munchify(obj) + # Then finish munchifying lists and dicts inside obj (reusing munchified obj if cycles are encountered) + return post_munchify(partial, obj) + + def pre_munchify(obj): + # Here we return a skeleton of munchified obj, which is enough to save for later (in case + # we need to break cycles) but it needs to filled out in post_munchify + if isinstance(obj, Mapping): + return factory({}) + elif isinstance(obj, list): + return type(obj)() + elif isinstance(obj, tuple): + type_factory = getattr(obj, "_make", type(obj)) + return type_factory(munchify_cycles(item) for item in obj) + else: + return obj + + def post_munchify(partial, obj): + # Here we finish munchifying the parts of obj that were deferred by pre_munchify because they + # might be involved in a cycle + if isinstance(obj, Mapping): + partial.update((k, munchify_cycles(obj[k])) for k in iterkeys(obj)) + elif isinstance(obj, list): + partial.extend(munchify_cycles(item) for item in obj) + elif isinstance(obj, tuple): + for (item_partial, item) in zip(partial, obj): + post_munchify(item_partial, item) + + return partial + + return munchify_cycles(x) + + +def unmunchify(x): + """ Recursively converts a Munch into a dictionary. + + >>> b = Munch(foo=Munch(lol=True), hello=42, ponies='are pretty!') + >>> sorted(unmunchify(b).items()) + [('foo', {'lol': True}), ('hello', 42), ('ponies', 'are pretty!')] + + unmunchify will handle intermediary dicts, lists and tuples (as well as + their subclasses), but ymmv on custom datatypes. + + >>> b = Munch(foo=['bar', Munch(lol=True)], hello=42, + ... ponies=('are pretty!', Munch(lies='are trouble!'))) + >>> sorted(unmunchify(b).items()) #doctest: +NORMALIZE_WHITESPACE + [('foo', ['bar', {'lol': True}]), ('hello', 42), ('ponies', ('are pretty!', {'lies': 'are trouble!'}))] + + nb. As dicts are not hashable, they cannot be nested in sets/frozensets. + """ + + # Munchify x, using `seen` to track object cycles + seen = dict() + + def unmunchify_cycles(obj): + # If we've already begun unmunchifying obj, just return the already-created unmunchified obj + try: + return seen[id(obj)] + except KeyError: + pass + + # Otherwise, first partly unmunchify obj (but without descending into any lists or dicts) and save that + seen[id(obj)] = partial = pre_unmunchify(obj) + # Then finish unmunchifying lists and dicts inside obj (reusing unmunchified obj if cycles are encountered) + return post_unmunchify(partial, obj) + + def pre_unmunchify(obj): + # Here we return a skeleton of unmunchified obj, which is enough to save for later (in case + # we need to break cycles) but it needs to filled out in post_unmunchify + if isinstance(obj, Mapping): + return dict() + elif isinstance(obj, list): + return type(obj)() + elif isinstance(obj, tuple): + type_factory = getattr(obj, "_make", type(obj)) + return type_factory(unmunchify_cycles(item) for item in obj) + else: + return obj + + def post_unmunchify(partial, obj): + # Here we finish unmunchifying the parts of obj that were deferred by pre_unmunchify because they + # might be involved in a cycle + if isinstance(obj, Mapping): + partial.update((k, unmunchify_cycles(obj[k])) for k in iterkeys(obj)) + elif isinstance(obj, list): + partial.extend(unmunchify_cycles(v) for v in obj) + elif isinstance(obj, tuple): + for (value_partial, value) in zip(partial, obj): + post_unmunchify(value_partial, value) + + return partial + + return unmunchify_cycles(x) + + +# Serialization + +try: + try: + import json + except ImportError: + import simplejson as json + + def toJSON(self, **options): + """ Serializes this Munch to JSON. Accepts the same keyword options as `json.dumps()`. + + >>> b = Munch(foo=Munch(lol=True), hello=42, ponies='are pretty!') + >>> json.dumps(b) == b.toJSON() + True + """ + return json.dumps(self, **options) + + def fromJSON(cls, stream, *args, **kwargs): + """ Deserializes JSON to Munch or any of its subclasses. + """ + factory = lambda d: cls(*(args + (d,)), **kwargs) + return munchify(json.loads(stream), factory=factory) + + Munch.toJSON = toJSON + Munch.fromJSON = classmethod(fromJSON) + +except ImportError: + pass + + diff -Nru fiona-1.8.22/fiona/_vendor/munch/python3_compat.py fiona-1.9.5/fiona/_vendor/munch/python3_compat.py --- fiona-1.8.22/fiona/_vendor/munch/python3_compat.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/fiona/_vendor/munch/python3_compat.py 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,6 @@ +from six import u, iteritems, iterkeys # pylint: disable=unused-import +try: + from collections.abc import Mapping # pylint: disable=unused-import +except ImportError: + # Legacy Python + from collections import Mapping # pylint: disable=unused-import diff -Nru fiona-1.8.22/fiona/collection.py fiona-1.9.5/fiona/collection.py --- fiona-1.8.22/fiona/collection.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/collection.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,32 +1,45 @@ -# -*- coding: utf-8 -*- -# Collections provide file-like access to feature data +"""Collections provide file-like access to feature data.""" +from contextlib import ExitStack import logging import os import warnings +from collections import OrderedDict -import fiona._loading -with fiona._loading.add_gdal_dll_directories(): - from fiona import compat, vfs - from fiona.ogrext import Iterator, ItemsIterator, KeysIterator - from fiona.ogrext import Session, WritingSession - from fiona.ogrext import buffer_to_virtual_file, remove_virtual_file, GEOMETRY_TYPES - from fiona.errors import (DriverError, SchemaError, CRSError, UnsupportedGeometryTypeError, DriverSupportError) - from fiona.logutils import FieldSkipLogFilter - from fiona._crs import crs_to_wkt - from fiona._env import get_gdal_release_name, get_gdal_version_tuple - from fiona.env import env_ctx_if_needed - from fiona.errors import FionaDeprecationWarning - from fiona.drvsupport import (supported_drivers, driver_mode_mingdal, _driver_converts_field_type_silently_to_str, - _driver_supports_field) - from fiona.path import Path, vsi_path, parse_path - from six import string_types, binary_type +from fiona import compat, vfs +from fiona.ogrext import Iterator, ItemsIterator, KeysIterator +from fiona.ogrext import Session, WritingSession +from fiona.ogrext import buffer_to_virtual_file, remove_virtual_file, GEOMETRY_TYPES +from fiona.errors import ( + DriverError, + DriverSupportError, + GDALVersionError, + SchemaError, + UnsupportedGeometryTypeError, + UnsupportedOperation, +) +from fiona.logutils import FieldSkipLogFilter +from fiona.crs import CRS +from fiona._env import get_gdal_release_name, get_gdal_version_tuple +from fiona.env import env_ctx_if_needed +from fiona.errors import FionaDeprecationWarning +from fiona.drvsupport import ( + driver_from_extension, + supported_drivers, + driver_mode_mingdal, + _driver_converts_field_type_silently_to_str, + _driver_supports_field, +) +from fiona.path import Path, vsi_path, parse_path +_GDAL_VERSION_TUPLE = get_gdal_version_tuple() +_GDAL_RELEASE_NAME = get_gdal_release_name() + log = logging.getLogger(__name__) -class Collection(object): +class Collection: """A file-like interface to features of a vector dataset @@ -35,11 +48,26 @@ represented as GeoJSON-like mappings. """ - def __init__(self, path, mode='r', driver=None, schema=None, crs=None, - encoding=None, layer=None, vsi=None, archive=None, - enabled_drivers=None, crs_wkt=None, ignore_fields=None, - ignore_geometry=False, - **kwargs): + def __init__( + self, + path, + mode="r", + driver=None, + schema=None, + crs=None, + encoding=None, + layer=None, + vsi=None, + archive=None, + enabled_drivers=None, + crs_wkt=None, + ignore_fields=None, + ignore_geometry=False, + include_fields=None, + wkt_version=None, + allow_unsupported_drivers=False, + **kwargs + ): """The required ``path`` is the absolute or relative path to a file, such as '/data/test_uk.shp'. In ``mode`` 'r', data can @@ -54,29 +82,64 @@ In 'w' mode, kwargs will be mapped to OGR layer creation options. + """ + self._closed = True - if not isinstance(path, (string_types, Path)): + if not isinstance(path, (str, Path)): raise TypeError("invalid path: %r" % path) - if not isinstance(mode, string_types) or mode not in ('r', 'w', 'a'): + if not isinstance(mode, str) or mode not in ("r", "w", "a"): raise TypeError("invalid mode: %r" % mode) - if driver and not isinstance(driver, string_types): + if driver and not isinstance(driver, str): raise TypeError("invalid driver: %r" % driver) - if schema and not hasattr(schema, 'get'): + if schema and not hasattr(schema, "get"): raise TypeError("invalid schema: %r" % schema) - if crs and not isinstance(crs, compat.DICT_TYPES + string_types): + + # Rasterio's CRS is compatible with Fiona. This class + # constructor only requires that the crs value have a to_wkt() + # method. + if ( + crs + and not isinstance(crs, compat.DICT_TYPES + (str, CRS)) + and not (hasattr(crs, "to_wkt") and callable(crs.to_wkt)) + ): raise TypeError("invalid crs: %r" % crs) - if crs_wkt and not isinstance(crs_wkt, string_types): + + if crs_wkt and not isinstance(crs_wkt, str): raise TypeError("invalid crs_wkt: %r" % crs_wkt) - if encoding and not isinstance(encoding, string_types): + if encoding and not isinstance(encoding, str): raise TypeError("invalid encoding: %r" % encoding) - if layer and not isinstance(layer, tuple(list(string_types) + [int])): + if layer and not isinstance(layer, (str, int)): raise TypeError("invalid name: %r" % layer) if vsi: - if not isinstance(vsi, string_types) or not vfs.valid_vsi(vsi): + if not isinstance(vsi, str) or not vfs.valid_vsi(vsi): raise TypeError("invalid vsi: %r" % vsi) - if archive and not isinstance(archive, string_types): + if archive and not isinstance(archive, str): raise TypeError("invalid archive: %r" % archive) + if ignore_fields is not None and include_fields is not None: + raise ValueError("Cannot specify both 'ignore_fields' and 'include_fields'") + + if mode == "w" and driver is None: + driver = driver_from_extension(path) + + # Check GDAL version against drivers + if ( + driver in driver_mode_mingdal[mode] + and get_gdal_version_tuple() < driver_mode_mingdal[mode][driver] + ): + min_gdal_version = ".".join( + list(map(str, driver_mode_mingdal[mode][driver])) + ) + + raise DriverError( + "{driver} driver requires at least GDAL {min_gdal_version} for mode '{mode}', " + "Fiona was compiled against: {gdal}".format( + driver=driver, + mode=mode, + min_gdal_version=min_gdal_version, + gdal=get_gdal_release_name(), + ) + ) self.session = None self.iterator = None @@ -86,21 +149,32 @@ self._schema = None self._crs = None self._crs_wkt = None - self.env = None self.enabled_drivers = enabled_drivers + self.include_fields = include_fields self.ignore_fields = ignore_fields self.ignore_geometry = bool(ignore_geometry) + self._allow_unsupported_drivers = allow_unsupported_drivers + self._env = None + self._closed = True # Check GDAL version against drivers - if driver in driver_mode_mingdal[mode] and get_gdal_version_tuple() < driver_mode_mingdal[mode][driver]: - min_gdal_version = ".".join(list(map(str, driver_mode_mingdal[mode][driver]))) + if ( + driver in driver_mode_mingdal[mode] + and get_gdal_version_tuple() < driver_mode_mingdal[mode][driver] + ): + min_gdal_version = ".".join( + list(map(str, driver_mode_mingdal[mode][driver])) + ) raise DriverError( "{driver} driver requires at least GDAL {min_gdal_version} for mode '{mode}', " - "Fiona was compiled against: {gdal}".format(driver=driver, - mode=mode, - min_gdal_version=min_gdal_version, - gdal=get_gdal_release_name())) + "Fiona was compiled against: {gdal}".format( + driver=driver, + mode=mode, + min_gdal_version=min_gdal_version, + gdal=get_gdal_release_name(), + ) + ) if vsi: self.path = vfs.vsi_path(path, vsi, archive) @@ -109,13 +183,13 @@ path = parse_path(path) self.path = vsi_path(path) - if mode == 'w': - if layer and not isinstance(layer, string_types): + if mode == "w": + if layer and not isinstance(layer, str): raise ValueError("in 'w' mode, layer names must be strings") - if driver == 'GeoJSON': + if driver == "GeoJSON": if layer is not None: raise ValueError("the GeoJSON format does not have layers") - self.name = 'OgrGeoJSON' + self.name = "OgrGeoJSON" # TODO: raise ValueError as above for other single-layer formats. else: self.name = layer or os.path.basename(os.path.splitext(path.path)[0]) @@ -127,43 +201,50 @@ self.mode = mode - if self.mode == 'w': - if driver == 'Shapefile': - driver = 'ESRI Shapefile' + if self.mode == "w": + if driver == "Shapefile": + driver = "ESRI Shapefile" if not driver: raise DriverError("no driver") - elif driver not in supported_drivers: - raise DriverError( - "unsupported driver: %r" % driver) - elif self.mode not in supported_drivers[driver]: - raise DriverError( - "unsupported mode: %r" % self.mode) + if not allow_unsupported_drivers: + if driver not in supported_drivers: + raise DriverError("unsupported driver: %r" % driver) + if self.mode not in supported_drivers[driver]: + raise DriverError("unsupported mode: %r" % self.mode) self._driver = driver if not schema: raise SchemaError("no schema") - elif 'properties' not in schema: - raise SchemaError("schema lacks: properties") - elif 'geometry' not in schema: - raise SchemaError("schema lacks: geometry") + if "properties" in schema: + # Make an ordered dict of schema properties. + this_schema = schema.copy() + this_schema["properties"] = OrderedDict(schema["properties"]) + schema = this_schema + else: + schema["properties"] = OrderedDict() + if "geometry" not in schema: + schema["geometry"] = None self._schema = schema self._check_schema_driver_support() + if crs_wkt or crs: - self._crs_wkt = crs_to_wkt(crs_wkt or crs) + self._crs_wkt = CRS.from_user_input(crs_wkt or crs).to_wkt( + version=wkt_version + ) self._driver = driver kwargs.update(encoding=encoding) self.encoding = encoding try: - if self.mode == 'r': + if self.mode == "r": self.session = Session() self.session.start(self, **kwargs) - elif self.mode in ('a', 'w'): + elif self.mode in ("a", "w"): self.session = WritingSession() self.session.start(self, **kwargs) - except IOError: + except OSError: self.session = None raise @@ -174,20 +255,24 @@ self._valid_geom_types = _get_valid_geom_types(self.schema, self.driver) self.field_skip_log_filter = FieldSkipLogFilter() + self._env = ExitStack() + self._closed = False def __repr__(self): - return "<%s Collection '%s', mode '%s' at %s>" % ( + return "<{} Collection '{}', mode '{}' at {}>".format( self.closed and "closed" or "open", self.path + ":" + str(self.name), self.mode, - hex(id(self))) + hex(id(self)), + ) def guard_driver_mode(self): - driver = self.session.get_driver() - if driver not in supported_drivers: - raise DriverError("unsupported driver: %r" % driver) - if self.mode not in supported_drivers[driver]: - raise DriverError("unsupported mode: %r" % self.mode) + if not self._allow_unsupported_drivers: + driver = self.session.get_driver() + if driver not in supported_drivers: + raise DriverError("unsupported driver: %r" % driver) + if self.mode not in supported_drivers[driver]: + raise DriverError("unsupported mode: %r" % self.mode) @property def driver(self): @@ -210,7 +295,7 @@ @property def crs(self): - """Returns a Proj4 string.""" + """The coordinate reference system (CRS) of the Collection.""" if self._crs is None and self.session: self._crs = self.session.get_crs() return self._crs @@ -222,20 +307,125 @@ self._crs_wkt = self.session.get_crs_wkt() return self._crs_wkt + def tags(self, ns=None): + """Returns a dict containing copies of the dataset or layers's + tags. Tags are pairs of key and value strings. Tags belong to + namespaces. The standard namespaces are: default (None) and + 'IMAGE_STRUCTURE'. Applications can create their own additional + namespaces. + + Parameters + ---------- + ns: str, optional + Can be used to select a namespace other than the default. + + Returns + ------- + dict + """ + if _GDAL_VERSION_TUPLE.major < 2: + raise GDALVersionError( + "tags requires GDAL 2+, fiona was compiled " + "against: {}".format(_GDAL_RELEASE_NAME) + ) + if self.session: + return self.session.tags(ns=ns) + return None + + def get_tag_item(self, key, ns=None): + """Returns tag item value + + Parameters + ---------- + key: str + The key for the metadata item to fetch. + ns: str, optional + Used to select a namespace other than the default. + + Returns + ------- + str + """ + if _GDAL_VERSION_TUPLE.major < 2: + raise GDALVersionError( + "get_tag_item requires GDAL 2+, fiona was compiled " + "against: {}".format(_GDAL_RELEASE_NAME) + ) + if self.session: + return self.session.get_tag_item(key=key, ns=ns) + return None + + def update_tags(self, tags, ns=None): + """Writes a dict containing the dataset or layers's tags. + Tags are pairs of key and value strings. Tags belong to + namespaces. The standard namespaces are: default (None) and + 'IMAGE_STRUCTURE'. Applications can create their own additional + namespaces. + + Parameters + ---------- + tags: dict + The dict of metadata items to set. + ns: str, optional + Used to select a namespace other than the default. + + Returns + ------- + int + """ + if _GDAL_VERSION_TUPLE.major < 2: + raise GDALVersionError( + "update_tags requires GDAL 2+, fiona was compiled " + "against: {}".format(_GDAL_RELEASE_NAME) + ) + if not isinstance(self.session, WritingSession): + raise UnsupportedOperation("Unable to update tags as not in writing mode.") + return self.session.update_tags(tags, ns=ns) + + def update_tag_item(self, key, tag, ns=None): + """Updates the tag item value + + Parameters + ---------- + key: str + The key for the metadata item to set. + tag: str + The value of the metadata item to set. + ns: str, optional + Used to select a namespace other than the default. + + Returns + ------- + int + """ + if _GDAL_VERSION_TUPLE.major < 2: + raise GDALVersionError( + "update_tag_item requires GDAL 2+, fiona was compiled " + "against: {}".format(_GDAL_RELEASE_NAME) + ) + if not isinstance(self.session, WritingSession): + raise UnsupportedOperation("Unable to update tag as not in writing mode.") + return self.session.update_tag_item(key=key, tag=tag, ns=ns) + @property def meta(self): """Returns a mapping with the driver, schema, crs, and additional properties.""" return { - 'driver': self.driver, 'schema': self.schema, 'crs': self.crs, - 'crs_wkt': self.crs_wkt} + "driver": self.driver, + "schema": self.schema, + "crs": self.crs, + "crs_wkt": self.crs_wkt, + } profile = meta def filter(self, *args, **kwds): """Returns an iterator over records, but filtered by a test for spatial intersection with the provided ``bbox``, a (minx, miny, - maxx, maxy) tuple or a geometry ``mask``. + maxx, maxy) tuple or a geometry ``mask``. An attribute filter can + be set using an SQL ``where`` clause, which uses the `OGR SQL dialect + `__. Positional arguments ``stop`` or ``start, stop[, step]`` allows iteration to skip over items or stop at a specific item. @@ -246,8 +436,8 @@ """ if self.closed: raise ValueError("I/O operation on closed collection") - elif self.mode != 'r': - raise IOError("collection not open for reading") + elif self.mode != "r": + raise OSError("collection not open for reading") if args: s = slice(*args) start = s.start @@ -255,19 +445,21 @@ step = s.step else: start = stop = step = None - bbox = kwds.get('bbox') - mask = kwds.get('mask') + bbox = kwds.get("bbox") + mask = kwds.get("mask") if bbox and mask: raise ValueError("mask and bbox can not be set together") - self.iterator = Iterator( - self, start, stop, step, bbox, mask) + where = kwds.get("where") + self.iterator = Iterator(self, start, stop, step, bbox, mask, where) return self.iterator def items(self, *args, **kwds): """Returns an iterator over FID, record pairs, optionally filtered by a test for spatial intersection with the provided ``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry - ``mask``. + ``mask``. An attribute filter can be set using an SQL ``where`` + clause, which uses the `OGR SQL dialect + `__. Positional arguments ``stop`` or ``start, stop[, step]`` allows iteration to skip over items or stop at a specific item. @@ -278,8 +470,8 @@ """ if self.closed: raise ValueError("I/O operation on closed collection") - elif self.mode != 'r': - raise IOError("collection not open for reading") + elif self.mode != "r": + raise OSError("collection not open for reading") if args: s = slice(*args) start = s.start @@ -287,19 +479,21 @@ step = s.step else: start = stop = step = None - bbox = kwds.get('bbox') - mask = kwds.get('mask') + bbox = kwds.get("bbox") + mask = kwds.get("mask") if bbox and mask: raise ValueError("mask and bbox can not be set together") - self.iterator = ItemsIterator( - self, start, stop, step, bbox, mask) + where = kwds.get("where") + self.iterator = ItemsIterator(self, start, stop, step, bbox, mask, where) return self.iterator def keys(self, *args, **kwds): """Returns an iterator over FIDs, optionally filtered by a test for spatial intersection with the provided ``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry - ``mask``. + ``mask``. An attribute filter can be set using an SQL ``where`` + clause, which uses the `OGR SQL dialect + `__. Positional arguments ``stop`` or ``start, stop[, step]`` allows iteration to skip over items or stop at a specific item. @@ -309,8 +503,8 @@ """ if self.closed: raise ValueError("I/O operation on closed collection") - elif self.mode != 'r': - raise IOError("collection not open for reading") + elif self.mode != "r": + raise OSError("collection not open for reading") if args: s = slice(*args) start = s.start @@ -318,12 +512,12 @@ step = s.step else: start = stop = step = None - bbox = kwds.get('bbox') - mask = kwds.get('mask') + bbox = kwds.get("bbox") + mask = kwds.get("mask") if bbox and mask: raise ValueError("mask and bbox can not be set together") - self.iterator = KeysIterator( - self, start, stop, step, bbox, mask) + where = kwds.get("where") + self.iterator = KeysIterator(self, start, stop, step, bbox, mask, where) return self.iterator def __contains__(self, fid): @@ -337,9 +531,12 @@ def __next__(self): """Returns next record from iterator.""" - warnings.warn("Collection.__next__() is buggy and will be removed in " - "Fiona 2.0. Switch to `next(iter(collection))`.", - FionaDeprecationWarning, stacklevel=2) + warnings.warn( + "Collection.__next__() is buggy and will be removed in " + "Fiona 2.0. Switch to `next(iter(collection))`.", + FionaDeprecationWarning, + stacklevel=2, + ) if not self.iterator: iter(self) return next(self.iterator) @@ -356,14 +553,18 @@ """Stages multiple records for writing to disk.""" if self.closed: raise ValueError("I/O operation on closed collection") - if self.mode not in ('a', 'w'): - raise IOError("collection not open for writing") + if self.mode not in ("a", "w"): + raise OSError("collection not open for writing") self.session.writerecs(records, self) self._len = self.session.get_length() self._bounds = None def write(self, record): - """Stages a record for writing to disk.""" + """Stages a record for writing to disk. + + Note: Each call of this method will start and commit a + unique transaction with the data source. + """ self.writerecords([record]) def validate_record(self, record): @@ -373,10 +574,9 @@ """ # Currently we only compare keys of properties, not the types of # values. - return ( - set(record['properties'].keys()) == - set(self.schema['properties'].keys()) and - self.validate_record_geometry(record)) + return set(record["properties"].keys()) == set( + self.schema["properties"].keys() + ) and self.validate_record_geometry(record) def validate_record_geometry(self, record): """Compares the record's geometry to the collection's schema. @@ -387,15 +587,15 @@ # OGR reports these mixed files as type "Polygon" or "LineString" # but will return either these or their multi counterparts when # reading features. - if (self.driver == "ESRI Shapefile" and - "Point" not in record['geometry']['type']): - return record['geometry']['type'].lstrip( - "Multi") == self.schema['geometry'].lstrip("3D ").lstrip( - "Multi") + if ( + self.driver == "ESRI Shapefile" + and "Point" not in record["geometry"]["type"] + ): + return record["geometry"]["type"].lstrip("Multi") == self.schema[ + "geometry" + ].lstrip("3D ").lstrip("Multi") else: - return ( - record['geometry']['type'] == - self.schema['geometry'].lstrip("3D ")) + return record["geometry"]["type"] == self.schema["geometry"].lstrip("3D ") def __len__(self): if self._len <= 0 and self.session is not None: @@ -418,27 +618,48 @@ See GH#572 for discussion. """ - gdal_version_major = get_gdal_version_tuple().major + gdal_version_major = _GDAL_VERSION_TUPLE.major for field in self._schema["properties"].values(): field_type = field.split(":")[0] if not _driver_supports_field(self.driver, field_type): - if self.driver == 'GPKG' and gdal_version_major < 2 and field_type == "datetime": - raise DriverSupportError("GDAL 1.x GPKG driver does not support datetime fields") + if ( + self.driver == "GPKG" + and gdal_version_major < 2 + and field_type == "datetime" + ): + raise DriverSupportError( + "GDAL 1.x GPKG driver does not support datetime fields" + ) else: - raise DriverSupportError("{driver} does not support {field_type} " - "fields".format(driver=self.driver, - field_type=field_type)) - elif field_type in {'time', 'datetime', 'date'} and _driver_converts_field_type_silently_to_str(self.driver, - field_type): - if self._driver == "GeoJSON" and gdal_version_major < 2 and field_type in {'datetime', 'date'}: - warnings.warn("GeoJSON driver in GDAL 1.x silently converts {} to string" - " in non-standard format".format(field_type)) + raise DriverSupportError( + "{driver} does not support {field_type} " + "fields".format(driver=self.driver, field_type=field_type) + ) + elif ( + field_type + in { + "time", + "datetime", + "date", + } + and _driver_converts_field_type_silently_to_str(self.driver, field_type) + ): + if ( + self._driver == "GeoJSON" + and gdal_version_major < 2 + and field_type in {"datetime", "date"} + ): + warnings.warn( + "GeoJSON driver in GDAL 1.x silently converts {} to string" + " in non-standard format".format(field_type) + ) else: - warnings.warn("{driver} driver silently converts {field_type} " - "to string".format(driver=self.driver, - field_type=field_type)) + warnings.warn( + "{driver} driver silently converts {field_type} " + "to string".format(driver=self.driver, field_type=field_type) + ) def flush(self): """Flush the buffer.""" @@ -449,51 +670,54 @@ self._bounds = None def close(self): - """In append or write mode, flushes data to disk, then ends - access.""" - if self.session is not None and self.session.isactive(): - if self.mode in ('a', 'w'): - self.flush() - log.debug("Flushed buffer") - self.session.stop() - log.debug("Stopped session") - self.session = None - self.iterator = None - if self.env: - self.env.__exit__() + """In append or write mode, flushes data to disk, then ends access.""" + if not self._closed: + if self.session is not None and self.session.isactive(): + if self.mode in ("a", "w"): + self.flush() + log.debug("Flushed buffer") + self.session.stop() + log.debug("Stopped session") + self.session = None + self.iterator = None + if self._env: + self._env.close() + self._env = None + self._closed = True @property def closed(self): """``False`` if data can be accessed, otherwise ``True``.""" - return self.session is None + return self._closed def __enter__(self): - logging.getLogger('fiona.ogrext').addFilter(self.field_skip_log_filter) - self._env = env_ctx_if_needed() - self._env.__enter__() + self._env.enter_context(env_ctx_if_needed()) + logging.getLogger("fiona.ogrext").addFilter(self.field_skip_log_filter) return self def __exit__(self, type, value, traceback): - logging.getLogger('fiona.ogrext').removeFilter(self.field_skip_log_filter) - self._env.__exit__() + logging.getLogger("fiona.ogrext").removeFilter(self.field_skip_log_filter) self.close() def __del__(self): # Note: you can't count on this being called. Call close() explicitly # or use the context manager protocol ("with"). - self.close() + if not self._closed: + self.close() -ALL_GEOMETRY_TYPES = set([ - geom_type for geom_type in GEOMETRY_TYPES.values() - if "3D " not in geom_type and geom_type != "None"]) +ALL_GEOMETRY_TYPES = { + geom_type + for geom_type in GEOMETRY_TYPES.values() + if "3D " not in geom_type and geom_type != "None" +} ALL_GEOMETRY_TYPES.add("None") def _get_valid_geom_types(schema, driver): """Returns a set of geometry types the schema will accept""" schema_geom_type = schema["geometry"] - if isinstance(schema_geom_type, string_types) or schema_geom_type is None: + if isinstance(schema_geom_type, str) or schema_geom_type is None: schema_geom_type = (schema_geom_type,) valid_types = set() for geom_type in schema_geom_type: @@ -518,21 +742,25 @@ """Detect compression type of bytesbuf. ZIP only. TODO: add others relevant to GDAL/OGR.""" - if bytesbuf[:4].startswith(b'PK\x03\x04'): - return 'zip' + if bytesbuf[:4].startswith(b"PK\x03\x04"): + return "zip" else: - return '' + return "" class BytesCollection(Collection): """BytesCollection takes a buffer of bytes and maps that to a virtual file that can then be opened by fiona. """ + def __init__(self, bytesbuf, **kwds): """Takes buffer of bytes whose contents is something we'd like to open with Fiona and maps it to a virtual file. + """ - if not isinstance(bytesbuf, binary_type): + self._closed = True + + if not isinstance(bytesbuf, bytes): raise ValueError("input buffer must be bytes") # Hold a reference to the buffer, as bad things will happen if @@ -544,27 +772,29 @@ # it. If the requested driver is for GeoJSON, we append an an # appropriate extension to ensure the driver reads it. filetype = get_filetype(self.bytesbuf) - ext = '' - if filetype == 'zip': - ext = '.zip' - elif kwds.get('driver') == "GeoJSON": - ext = '.json' + ext = "" + if filetype == "zip": + ext = ".zip" + elif kwds.get("driver") == "GeoJSON": + ext = ".json" self.virtual_file = buffer_to_virtual_file(self.bytesbuf, ext=ext) # Instantiate the parent class. - super(BytesCollection, self).__init__(self.virtual_file, vsi=filetype, **kwds) + super().__init__(self.virtual_file, vsi=filetype, **kwds) + self._closed = False def close(self): """Removes the virtual file associated with the class.""" - super(BytesCollection, self).close() + super().close() if self.virtual_file: remove_virtual_file(self.virtual_file) self.virtual_file = None self.bytesbuf = None def __repr__(self): - return "<%s BytesCollection '%s', mode '%s' at %s>" % ( + return "<{} BytesCollection '{}', mode '{}' at {}>".format( self.closed and "closed" or "open", self.path + ":" + str(self.name), self.mode, - hex(id(self))) + hex(id(self)), + ) diff -Nru fiona-1.8.22/fiona/compat.py fiona-1.9.5/fiona/compat.py --- fiona-1.8.22/fiona/compat.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/compat.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,26 +1,12 @@ -import sys -import collections +from collections import UserDict +from collections.abc import Mapping -try: - from collections import OrderedDict -except ImportError: - from ordereddict import OrderedDict - -if sys.version_info[0] >= 3: - from urllib.parse import urlparse - from collections import UserDict - from inspect import getfullargspec as getargspec -else: - from urlparse import urlparse - from UserDict import UserDict - from inspect import getargspec +DICT_TYPES = (dict, Mapping, UserDict) -if sys.version_info >= (3, 3): - from collections.abc import Mapping -else: - from collections import Mapping -# Users can pass in objects that subclass a few different objects -# More specifically, rasterio has a CRS() class that subclasses UserDict() -# In Python 2 UserDict() is in its own module and does not subclass Mapping() -DICT_TYPES = (dict, Mapping, UserDict) +def strencode(instr, encoding="utf-8"): + try: + instr = instr.encode(encoding) + except (UnicodeDecodeError, AttributeError): + pass + return instr diff -Nru fiona-1.8.22/fiona/crs.pxd fiona-1.9.5/fiona/crs.pxd --- fiona-1.8.22/fiona/crs.pxd 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/fiona/crs.pxd 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,11 @@ +include "gdal.pxi" + + +cdef class CRS: + cdef OGRSpatialReferenceH _osr + cdef object _data + cdef object _epsg + cdef object _wkt + + +cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) diff -Nru fiona-1.8.22/fiona/crs.py fiona-1.9.5/fiona/crs.py --- fiona-1.8.22/fiona/crs.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/crs.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,6 +1,6 @@ """Coordinate reference systems and functions -PROJ.4 is the law of this land: http://proj.osgeo.org/. But whereas PROJ.4 +PROJ is the law of this land: https://proj.org/. But whereas PROJ coordinate reference systems are described by strings of parameters such as +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs @@ -69,7 +69,7 @@ # Below is the big list of PROJ4 parameters from # http://trac.osgeo.org/proj/wiki/GenParms. -# It is parsed into a list of paramter keys ``all_proj_keys``. +# It is parsed into a list of parameter keys ``all_proj_keys``. _param_data = """ +a Semimajor radius of the ellipsoid axis diff -Nru fiona-1.8.22/fiona/crs.pyx fiona-1.9.5/fiona/crs.pyx --- fiona-1.8.22/fiona/crs.pyx 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/fiona/crs.pyx 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,1297 @@ +# cython: boundscheck=False, embedsignature=True + +"""Coordinate reference systems, the CRS class and supporting functions. + +A coordinate reference system (CRS) defines how a dataset's pixels map +to locations on, for example, a globe or the Earth. A CRS may be local +or global. The GIS field shares a number of authority files that define +CRS. "EPSG:32618" is the name of a regional CRS from the European +Petroleum Survey Group authority file. "OGC:CRS84" is the name of a +global CRS from the Open Geospatial Consortium authority. Custom CRS can +be described in text using several formats. Rasterio's CRS class is our +abstraction for coordinate reference systems. + +A fiona.Collection's crs property is an instance of CRS. CRS are also +used to define transformations between coordinate reference systems. +These transformations are performed by the PROJ library. Rasterio does +not call PROJ functions directly, but invokes them via calls to GDAL's +"OSR*" functions. + +""" + +from collections import defaultdict +import json +import logging +import pickle +import typing +import warnings +import re + +import fiona._env +from fiona._err import CPLE_BaseError, CPLE_NotSupportedError +from fiona.compat import DICT_TYPES +from fiona.errors import CRSError, FionaDeprecationWarning +from fiona.enums import WktVersion + +from fiona._env cimport _safe_osr_release +from fiona._err cimport exc_wrap_ogrerr, exc_wrap_int, exc_wrap_pointer + + +log = logging.getLogger(__name__) + + +_RE_PROJ_PARAM = re.compile(r""" + \+ # parameter starts with '+' character + (?P\w+) # capture parameter name + \=? # match both key only and key-value parameters + (?P\S+)? # capture all characters up to next space (None if no value) + \s*? # consume remaining whitespace, if any +""", re.X) + + +cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs): + OSRSetAxisMappingStrategy(hSrs, OAMS_TRADITIONAL_GIS_ORDER) + + +cdef class CRS: + """A geographic or projected coordinate reference system. + + .. versionadded:: 1.9.0 + + CRS objects may be created by passing PROJ parameters as keyword + arguments to the standard constructor or by passing EPSG codes, PROJ + mappings, PROJ strings, or WKT strings to the from_epsg, from_dict, + from_string, or from_wkt static methods. + + Examples + -------- + + The from_dict method takes PROJ parameters as keyword arguments. + + >>> crs = CRS.from_dict(proj="aea") + + EPSG codes may be used with the from_epsg method. + + >>> crs = CRS.from_epsg(3005) + + The from_string method takes a variety of input. + + >>> crs = CRS.from_string("EPSG:3005") + + """ + def __init__(self, initialdata=None, **kwargs): + """Make a CRS from a PROJ dict or mapping. + + Parameters + ---------- + initialdata : mapping, optional + A dictionary or other mapping + kwargs : mapping, optional + Another mapping. Will be overlaid on the initialdata. + + Returns + ------- + CRS + + """ + cdef CRS tmp + self._data = {} + self._epsg = None + self._wkt = None + + if initialdata or kwargs: + tmp = CRS.from_dict(initialdata=initialdata, **kwargs) + self._osr = OSRClone(tmp._osr) + self._wkt = tmp._wkt + self._data = tmp.data + self._epsg = tmp._epsg + + @property + def data(self): + """A PROJ4 dict representation of the CRS. + """ + if not self._data: + self._data = self.to_dict() + return self._data + + @property + def is_valid(self): + """Test that the CRS is a geographic or projected CRS. + + Returns + ------- + bool + + """ + return self.is_geographic or self.is_projected + + @property + def is_epsg_code(self): + """Test if the CRS is defined by an EPSG code. + + Returns + ------- + bool + + """ + try: + return bool(self.to_epsg()) + except CRSError: + return False + + @property + def wkt(self): + """An OGC WKT representation of the CRS + + Returns + ------- + str + + """ + if not self._wkt: + self._wkt = self.to_wkt() + return self._wkt + + @property + def is_geographic(self): + """Test if the CRS is a geographic coordinate reference system. + + Returns + ------- + bool + + Raises + ------ + CRSError + + """ + try: + return bool(OSRIsGeographic(self._osr) == 1) + except CPLE_BaseError as exc: + raise CRSError("{}".format(exc)) + + @property + def is_projected(self): + """Test if the CRS is a projected coordinate reference system. + + Returns + ------- + bool + + Raises + ------ + CRSError + + """ + try: + return bool(OSRIsProjected(self._osr) == 1) + except CPLE_BaseError as exc: + raise CRSError("{}".format(exc)) + + @property + def linear_units(self): + """Get a short name for the linear units of the CRS. + + Returns + ------- + units : str + "m", "ft", etc. + + Raises + ------ + CRSError + + """ + try: + return self.linear_units_factor[0] + except CRSError: + return "unknown" + + @property + def linear_units_factor(self): + """Get linear units and the conversion factor to meters of the CRS. + + Returns + ------- + units : str + "m", "ft", etc. + factor : float + Ratio of one unit to one meter. + + Raises + ------ + CRSError + + """ + cdef char *units_c = NULL + cdef double to_meters + + try: + if self.is_projected: + to_meters = OSRGetLinearUnits(self._osr, &units_c) + else: + raise CRSError("Linear units factor is not defined for non projected CRS") + except CPLE_BaseError as exc: + raise CRSError("{}".format(exc)) + else: + units_b = units_c + return (units_b.decode('utf-8'), to_meters) + + @property + def units_factor(self): + """Get units and the conversion factor of the CRS. + + Returns + ------- + units : str + "m", "ft", etc. + factor : float + Ratio of one unit to one radian if the CRS is geographic + otherwise, it is to one meter. + + Raises + ------ + CRSError + + """ + cdef char *units_c = NULL + cdef double factor + + try: + if self.is_geographic: + factor = OSRGetAngularUnits(self._osr, &units_c) + else: + factor = OSRGetLinearUnits(self._osr, &units_c) + except CPLE_BaseError as exc: + raise CRSError(exc) + else: + units_b = units_c + return (units_b.decode('utf-8'), factor) + + def to_dict(self, projjson=False): + """Convert CRS to a PROJ dict. + + .. note:: If there is a corresponding EPSG code, it will be used + when returning PROJ parameter dict. + + .. versionadded:: 1.9.0 + + Parameters + ---------- + projjson: bool, default=False + If True, will convert to PROJ JSON dict (Requites GDAL 3.1+ + and PROJ 6.2+). If False, will convert to PROJ parameter + dict. + + Returns + ------- + dict + + """ + cdef OGRSpatialReferenceH osr = NULL + cdef char *proj_c = NULL + + if projjson: + text = self._projjson() + return json.loads(text) if text else {} + + epsg_code = self.to_epsg() + + if epsg_code: + return {'init': 'epsg:{}'.format(epsg_code)} + else: + try: + osr = exc_wrap_pointer(OSRClone(self._osr)) + exc_wrap_ogrerr(OSRExportToProj4(osr, &proj_c)) + + except CPLE_BaseError as exc: + return {} + # raise CRSError("The WKT could not be parsed. {}".format(exc)) + + else: + proj_b = proj_c + proj = proj_b.decode('utf-8') + + finally: + CPLFree(proj_c) + _safe_osr_release(osr) + + def parse(v): + try: + return int(v) + except ValueError: + pass + try: + return float(v) + except ValueError: + return v + + rv = {} + for param in _RE_PROJ_PARAM.finditer(proj): + key, value = param.groups() + if key not in all_proj_keys: + continue + + if value is None or value.lower() == "true": + rv[key] = True + elif value.lower() == "false": + continue + else: + rv[key] = parse(value) + return rv + + def to_proj4(self): + """Convert to a PROJ4 representation. + + Returns + ------- + str + + """ + return ' '.join(['+{}={}'.format(key, val) for key, val in self.data.items()]) + + def to_wkt(self, morph_to_esri_dialect=False, version=None): + """Convert to a OGC WKT representation. + + .. versionadded:: 1.9.0 + + Parameters + ---------- + morph_to_esri_dialect : bool, optional + Whether or not to morph to the Esri dialect of WKT Only + applies to GDAL versions < 3. This parameter will be removed + in a future version of fiona (2.0.0). + version : WktVersion or str, optional + The version of the WKT output. + Defaults to GDAL's default (WKT1_GDAL for GDAL 3). + + Returns + ------- + str + + Raises + ------ + CRSError + + """ + cdef char *conv_wkt = NULL + cdef const char* options_wkt[2] + options_wkt[0] = NULL + options_wkt[1] = NULL + + try: + if OSRGetName(self._osr) != NULL: + if morph_to_esri_dialect: + warnings.warn( + "'morph_to_esri_dialect' ignored with GDAL 3+. " + "Use 'version=WktVersion.WKT1_ESRI' instead." + ) + if version: + version = WktVersion(version).value + wkt_format = "FORMAT={}".format(version).encode("utf-8") + options_wkt[0] = wkt_format + exc_wrap_ogrerr(OSRExportToWktEx(self._osr, &conv_wkt, options_wkt)) + except (CPLE_BaseError, ValueError) as exc: + raise CRSError("Cannot convert to WKT. {}".format(exc)) from exc + + else: + if conv_wkt != NULL: + return conv_wkt.decode('utf-8') + else: + return '' + finally: + CPLFree(conv_wkt) + + + def to_epsg(self, confidence_threshold=70): + """Convert to the best match EPSG code. + + For a CRS created using an EPSG code, that same value is + returned. For other CRS, including custom CRS, an attempt is + made to match it to definitions in the EPSG authority file. + Matches with a confidence below the threshold are discarded. + + Parameters + ---------- + confidence_threshold : int + Percent match confidence threshold (0-100). + + Returns + ------- + int or None + + Raises + ------ + CRSError + + """ + if self._epsg is not None: + return self._epsg + else: + matches = self._matches(confidence_threshold=confidence_threshold) + if "EPSG" in matches: + self._epsg = int(matches["EPSG"][0]) + return self._epsg + else: + return None + + def to_authority(self, confidence_threshold=70): + """Convert to the best match authority name and code. + + For a CRS created using an EPSG code, that same value is + returned. For other CRS, including custom CRS, an attempt is + made to match it to definitions in authority files. Matches + with a confidence below the threshold are discarded. + + Parameters + ---------- + confidence_threshold : int + Percent match confidence threshold (0-100). + + Returns + ------- + name : str + Authority name. + code : str + Code from the authority file. + + or None + + """ + matches = self._matches(confidence_threshold=confidence_threshold) + # Note: before version 1.2.7 this function only paid attention + # to EPSG as an authority, which is why it takes priority over + # others even if they were a better match. + if "EPSG" in matches: + return "EPSG", matches["EPSG"][0] + elif "OGC" in matches: + return "OGC", matches["OGC"][0] + elif "ESRI" in matches: + return "ESRI", matches["ESRI"][0] + else: + return None + + def _matches(self, confidence_threshold=70): + """Find matches in authority files. + + Returns + ------- + dict : {name: [codes]} + A dictionary in which capitalized authority names are the + keys and lists of codes ordered by match confidence, + descending, are the values. + + """ + cdef OGRSpatialReferenceH osr = NULL + cdef OGRSpatialReferenceH *matches = NULL + cdef int *confidences = NULL + cdef int num_matches = 0 + cdef int i = 0 + + results = defaultdict(list) + + try: + osr = exc_wrap_pointer(OSRClone(self._osr)) + + matches = OSRFindMatches(osr, NULL, &num_matches, &confidences) + + for i in range(num_matches): + confidence = confidences[i] + c_code = OSRGetAuthorityCode(matches[i], NULL) + c_name = OSRGetAuthorityName(matches[i], NULL) + + if c_code == NULL: + log.debug("returned authority code was null") + if c_name == NULL: + log.debug("returned authority name was null") + + if c_code != NULL and c_name != NULL and confidence >= confidence_threshold: + log.debug( + "Matched. confidence=%r, c_code=%r, c_name=%r", + confidence, c_code, c_name) + code = c_code.decode('utf-8') + name = c_name.decode('utf-8') + results[name].append(code) + return results + + finally: + _safe_osr_release(osr) + OSRFreeSRSArray(matches) + CPLFree(confidences) + + def to_string(self): + """Convert to a PROJ4 or WKT string. + + The output will be reduced as much as possible by attempting a + match to CRS defined in authority files. + + Notes + ----- + Mapping keys are tested against the ``all_proj_keys`` list. + Values of ``True`` are omitted, leaving the key bare: + {'no_defs': True} -> "+no_defs" and items where the value is + otherwise not a str, int, or float are omitted. + + Returns + ------- + str + + Raises + ------ + CRSError + + """ + auth = self.to_authority() + if auth: + return ":".join(auth) + else: + return self.to_wkt() or self.to_proj4() + + @staticmethod + def from_epsg(code): + """Make a CRS from an EPSG code. + + Parameters + ---------- + code : int or str + An EPSG code. Strings will be converted to integers. + + Notes + ----- + The input code is not validated against an EPSG database. + + Returns + ------- + CRS + + Raises + ------ + CRSError + + """ + cdef CRS obj = CRS.__new__(CRS) + + try: + code = int(code) + except OverflowError as err: + raise CRSError(f"Not in the range of valid EPSG codes: {code}") from err + except TypeError as err: + raise CRSError(f"Not a valid EPSG codes: {code}") from err + + if code <= 0: + raise CRSError("EPSG codes are positive integers") + + try: + exc_wrap_ogrerr(exc_wrap_int(OSRImportFromEPSG(obj._osr, code))) + except OverflowError as err: + raise CRSError(f"Not in the range of valid EPSG codes: {code}") from err + except CPLE_BaseError as exc: + raise CRSError("The EPSG code is unknown. {}".format(exc)) + else: + osr_set_traditional_axis_mapping_strategy(obj._osr) + obj._epsg = code + return obj + + @staticmethod + def from_proj4(proj): + """Make a CRS from a PROJ4 string. + + Parameters + ---------- + proj : str + A PROJ4 string like "+proj=longlat ..." + + Returns + ------- + CRS + + Raises + ------ + CRSError + + """ + cdef CRS obj = CRS.__new__(CRS) + + # Filter out nonsensical items that might have crept in. + items_filtered = [] + for param in _RE_PROJ_PARAM.finditer(proj): + value = param.group('value') + if value is None: + items_filtered.append(param.group()) + elif value.lower() == "false": + continue + else: + items_filtered.append(param.group()) + + proj = ' '.join(items_filtered) + proj_b = proj.encode('utf-8') + + try: + exc_wrap_ogrerr(exc_wrap_int(OSRImportFromProj4(obj._osr, proj_b))) + except CPLE_BaseError as exc: + raise CRSError("The PROJ4 dict could not be understood. {}".format(exc)) + else: + osr_set_traditional_axis_mapping_strategy(obj._osr) + return obj + + @staticmethod + def from_dict(initialdata=None, **kwargs): + """Make a CRS from a dict of PROJ parameters or PROJ JSON. + + Parameters + ---------- + initialdata : mapping, optional + A dictionary or other mapping + kwargs : mapping, optional + Another mapping. Will be overlaid on the initialdata. + + Returns + ------- + CRS + + Raises + ------ + CRSError + + """ + if initialdata is not None: + data = dict(initialdata.items()) + else: + data = {} + data.update(**kwargs) + + if not ("init" in data or "proj" in data): + # We've been given a PROJ JSON-encoded text. + return CRS.from_user_input(json.dumps(data)) + + # "+init=epsg:xxxx" is deprecated in GDAL. If we find this, we will + # extract the epsg code and dispatch to from_epsg. + if 'init' in data and data['init'].lower().startswith('epsg:'): + epsg_code = int(data['init'].split(':')[1]) + return CRS.from_epsg(epsg_code) + + # Continue with the general case. + pjargs = [] + for key in data.keys() & all_proj_keys: + val = data[key] + if val is None or val is True: + pjargs.append('+{}'.format(key)) + elif val is False: + pass + else: + pjargs.append('+{}={}'.format(key, val)) + + proj = ' '.join(pjargs) + b_proj = proj.encode('utf-8') + + cdef CRS obj = CRS.__new__(CRS) + + try: + exc_wrap_ogrerr(OSRImportFromProj4(obj._osr, b_proj)) + except CPLE_BaseError as exc: + raise CRSError("The PROJ4 dict could not be understood. {}".format(exc)) + else: + osr_set_traditional_axis_mapping_strategy(obj._osr) + return obj + + @staticmethod + def from_wkt(wkt, morph_from_esri_dialect=False): + """Make a CRS from a WKT string. + + Parameters + ---------- + wkt : str + A WKT string. + morph_from_esri_dialect : bool, optional + If True, items in the input using Esri's dialect of WKT + will be replaced by OGC standard equivalents. + + Returns + ------- + CRS + + Raises + ------ + CRSError + + """ + cdef char *wkt_c = NULL + + if not isinstance(wkt, str): + raise ValueError("A string is expected") + + wkt_b= wkt.encode('utf-8') + wkt_c = wkt_b + + cdef CRS obj = CRS.__new__(CRS) + + try: + errcode = exc_wrap_ogrerr(OSRImportFromWkt(obj._osr, &wkt_c)) + except CPLE_BaseError as exc: + raise CRSError("The WKT could not be parsed. {}".format(exc)) + else: + osr_set_traditional_axis_mapping_strategy(obj._osr) + return obj + + @staticmethod + def from_user_input(value, morph_from_esri_dialect=False): + """Make a CRS from a variety of inputs. + + Parameters + ---------- + value : object + User input of many different kinds. + morph_from_esri_dialect : bool, optional + If True, items in the input using Esri's dialect of WKT + will be replaced by OGC standard equivalents. + + Returns + ------- + CRS + + Raises + ------ + CRSError + + """ + cdef const char *text_c = NULL + cdef CRS obj + + if isinstance(value, CRS): + return value + elif hasattr(value, "to_wkt") and callable(value.to_wkt): + return CRS.from_wkt(value.to_wkt(), morph_from_esri_dialect=morph_from_esri_dialect) + elif isinstance(value, int): + return CRS.from_epsg(value) + elif isinstance(value, DICT_TYPES): + return CRS(**value) + + elif isinstance(value, str): + text_b = value.encode('utf-8') + text_c = text_b + obj = CRS.__new__(CRS) + try: + errcode = exc_wrap_ogrerr(OSRSetFromUserInput(obj._osr, text_c)) + except CPLE_BaseError as exc: + raise CRSError("The WKT could not be parsed. {}".format(exc)) + else: + osr_set_traditional_axis_mapping_strategy(obj._osr) + return obj + + else: + raise CRSError("CRS is invalid: {!r}".format(value)) + + @staticmethod + def from_authority(auth_name, code): + """Make a CRS from an authority name and code. + + .. versionadded:: 1.9.0 + + Parameters + ---------- + auth_name: str + The name of the authority. + code : int or str + The code used by the authority. + + Returns + ------- + CRS + + Raises + ------ + CRSError + + """ + return CRS.from_string("{auth_name}:{code}".format(auth_name=auth_name, code=code)) + + @staticmethod + def from_string(value, morph_from_esri_dialect=False): + """Make a CRS from an EPSG, PROJ, or WKT string + + Parameters + ---------- + value : str + An EPSG, PROJ, or WKT string. + morph_from_esri_dialect : bool, optional + If True, items in the input using Esri's dialect of WKT + will be replaced by OGC standard equivalents. + + Returns + ------- + CRS + + Raises + ------ + CRSError + + """ + try: + value = value.strip() + except AttributeError: + pass + + if not value: + raise CRSError("CRS is empty or invalid: {!r}".format(value)) + + elif value.upper().startswith('EPSG:') and "+" not in value: + auth, val = value.split(':') + if not val: + raise CRSError("Invalid CRS: {!r}".format(value)) + return CRS.from_epsg(val) + + elif value.startswith('{') or value.startswith('['): + # may be json, try to decode it + try: + val = json.loads(value, strict=False) + except ValueError: + raise CRSError('CRS appears to be JSON but is not valid') + + if not val: + raise CRSError("CRS is empty JSON") + else: + return CRS.from_dict(**val) + + elif value.endswith("]"): + return CRS.from_wkt(value, morph_from_esri_dialect=morph_from_esri_dialect) + elif "=" in value: + return CRS.from_proj4(value) + else: + return CRS.from_user_input(value, morph_from_esri_dialect=morph_from_esri_dialect) + + def __cinit__(self): + self._osr = OSRNewSpatialReference(NULL) + + def __dealloc__(self): + _safe_osr_release(self._osr) + + def __hash__(self): + return hash(self.wkt) + + def __getitem__(self, item): + return self.data[item] + + def __iter__(self): + return iter(self.data) + + def __len__(self): + return len(self.data) + + def get(self, item): + return self.data.get(item) + + def items(self): + return self.data.items() + + def keys(self): + return self.data.keys() + + def values(self): + return self.data.values() + + def __bool__(self): + return bool(self.wkt) + + __nonzero__ = __bool__ + + def __getstate__(self): + return self.to_wkt() + + def __setstate__(self, state): + cdef CRS tmp + tmp = CRS.from_wkt(state) + self._osr = OSRClone(tmp._osr) + self._wkt = tmp._wkt + self._data = tmp.data + self._epsg = tmp._epsg + + def __copy__(self): + return pickle.loads(pickle.dumps(self)) + + def __hash__(self): + return hash(self.to_wkt()) + + def __str__(self): + return self.to_string() + + def __repr__(self): + epsg_code = self.to_epsg() + if epsg_code: + return "CRS.from_epsg({})".format(epsg_code) + else: + return "CRS.from_wkt('{}')".format(self.wkt) + + def __eq__(self, other): + cdef OGRSpatialReferenceH osr_s = NULL + cdef OGRSpatialReferenceH osr_o = NULL + cdef CRS crs_o + + try: + crs_o = CRS.from_user_input(other) + except CRSError: + return False + + epsg_s = self.to_epsg() + epsg_o = crs_o.to_epsg() + + if epsg_s is not None and epsg_o is not None and epsg_s == epsg_o: + return True + + else: + try: + osr_s = exc_wrap_pointer(OSRClone(self._osr)) + osr_o = exc_wrap_pointer(OSRClone(crs_o._osr)) + return bool(OSRIsSame(osr_s, osr_o) == 1) + + finally: + _safe_osr_release(osr_s) + _safe_osr_release(osr_o) + + + def _matches(self, confidence_threshold=70): + """Find matches in authority files. + + Parameters + ---------- + confidence_threshold : int + Percent match confidence threshold (0-100). + + Returns + ------- + dict : {name: [codes]} + A dictionary in which capitalized authority names are the + keys and lists of codes ordered by match confidence, + descending, are the values. + + """ + cdef OGRSpatialReferenceH osr = NULL + cdef OGRSpatialReferenceH *matches = NULL + cdef int *confidences = NULL + cdef int num_matches = 0 + cdef int i = 0 + + results = defaultdict(list) + + try: + osr = exc_wrap_pointer(OSRClone(self._osr)) + + matches = OSRFindMatches(osr, NULL, &num_matches, &confidences) + + for i in range(num_matches): + confidence = confidences[i] + c_code = OSRGetAuthorityCode(matches[i], NULL) + c_name = OSRGetAuthorityName(matches[i], NULL) + + log.debug( + "Matched. confidence=%r, c_code=%r, c_name=%r", + confidence, c_code, c_name) + + if c_code != NULL and c_name != NULL and confidence >= confidence_threshold: + code = c_code.decode('utf-8') + name = c_name.decode('utf-8') + results[name].append(code) + + return results + + finally: + _safe_osr_release(osr) + OSRFreeSRSArray(matches) + CPLFree(confidences) + + def _projjson(self): + """Get a PROJ JSON representation. + + For internal use only. + + .. versionadded:: 1.9.0 + + .. note:: Requires GDAL 3.1+ and PROJ 6.2+ + + Returns + ------- + projjson : str + PROJ JSON-encoded text. + + Raises + ------ + CRSError + + """ + cdef char *conv_json = NULL + cdef const char* options[2] + + try: + if OSRGetName(self._osr) != NULL: + options[0] = b"MULTILINE=NO" + options[1] = NULL + exc_wrap_ogrerr(OSRExportToPROJJSON(self._osr, &conv_json, options)) + except CPLE_BaseError as exc: + raise CRSError("Cannot convert to PROJ JSON. {}".format(exc)) + + else: + if conv_json != NULL: + return conv_json.decode('utf-8') + else: + return '' + finally: + CPLFree(conv_json) + + +def epsg_treats_as_latlong(input_crs): + """Test if the CRS is in latlon order + + .. versionadded:: 1.9.0 + + From GDAL docs: + + > This method returns TRUE if EPSG feels this geographic coordinate + system should be treated as having lat/long coordinate ordering. + + > Currently this returns TRUE for all geographic coordinate systems with + an EPSG code set, and axes set defining it as lat, long. + + > FALSE will be returned for all coordinate systems that are not + geographic, or that do not have an EPSG code set. + + > **Note** + + > Important change of behavior since GDAL 3.0. + In previous versions, geographic CRS imported with importFromEPSG() + would cause this method to return FALSE on them, whereas now it returns + TRUE, since importFromEPSG() is now equivalent to importFromEPSGA(). + + Parameters + ---------- + input_crs : CRS + Coordinate reference system, as a fiona CRS object + Example: CRS({'init': 'EPSG:4326'}) + + Returns + ------- + bool + + """ + cdef CRS crs + + if not isinstance(input_crs, CRS): + crs = CRS.from_user_input(input_crs) + else: + crs = input_crs + + try: + return bool(OSREPSGTreatsAsLatLong(crs._osr) == 1) + except CPLE_BaseError as exc: + raise CRSError("{}".format(exc)) + + +def epsg_treats_as_northingeasting(input_crs): + """Test if the CRS should be treated as having northing/easting coordinate ordering + + .. versionadded:: 1.9.0 + + From GDAL docs: + + > This method returns TRUE if EPSG feels this projected coordinate + system should be treated as having northing/easting coordinate ordering. + + > Currently this returns TRUE for all projected coordinate systems with + an EPSG code set, and axes set defining it as northing, easting. + + > FALSE will be returned for all coordinate systems that are not + projected, or that do not have an EPSG code set. + + > **Note** + + > Important change of behavior since GDAL 3.0. + In previous versions, projected CRS with northing, easting axis order + imported with importFromEPSG() would cause this method to return FALSE + on them, whereas now it returns TRUE, since importFromEPSG() is now + equivalent to importFromEPSGA(). + + Parameters + ---------- + input_crs : CRS + Coordinate reference system, as a fiona CRS object + Example: CRS({'init': 'EPSG:4326'}) + + Returns + ------- + bool + + """ + cdef CRS crs + + if not isinstance(input_crs, CRS): + crs = CRS.from_user_input(input_crs) + else: + crs = input_crs + + try: + return bool(OSREPSGTreatsAsNorthingEasting(crs._osr) == 1) + except CPLE_BaseError as exc: + raise CRSError("{}".format(exc)) + + +# Below is the big list of PROJ4 parameters from +# http://trac.osgeo.org/proj/wiki/GenParms. +# It is parsed into a list of parameter keys ``all_proj_keys``. + +_param_data = """ ++a Semimajor radius of the ellipsoid axis ++alpha ? Used with Oblique Mercator and possibly a few others ++axis Axis orientation (new in 4.8.0) ++b Semiminor radius of the ellipsoid axis ++datum Datum name (see `proj -ld`) ++ellps Ellipsoid name (see `proj -le`) ++init Initialize from a named CRS ++k Scaling factor (old name) ++k_0 Scaling factor (new name) ++lat_0 Latitude of origin ++lat_1 Latitude of first standard parallel ++lat_2 Latitude of second standard parallel ++lat_ts Latitude of true scale ++lon_0 Central meridian ++lonc ? Longitude used with Oblique Mercator and possibly a few others ++lon_wrap Center longitude to use for wrapping (see below) ++nadgrids Filename of NTv2 grid file to use for datum transforms (see below) ++no_defs Don't use the /usr/share/proj/proj_def.dat defaults file ++over Allow longitude output outside -180 to 180 range, disables wrapping (see below) ++pm Alternate prime meridian (typically a city name, see below) ++proj Projection name (see `proj -l`) ++south Denotes southern hemisphere UTM zone ++to_meter Multiplier to convert map units to 1.0m ++towgs84 3 or 7 term datum transform parameters (see below) ++units meters, US survey feet, etc. ++vto_meter vertical conversion to meters. ++vunits vertical units. ++x_0 False easting ++y_0 False northing ++zone UTM zone ++a Semimajor radius of the ellipsoid axis ++alpha ? Used with Oblique Mercator and possibly a few others ++azi ++b Semiminor radius of the ellipsoid axis ++belgium ++beta ++czech ++e Eccentricity of the ellipsoid = sqrt(1 - b^2/a^2) = sqrt( f*(2-f) ) ++ellps Ellipsoid name (see `proj -le`) ++es Eccentricity of the ellipsoid squared ++f Flattening of the ellipsoid (often presented as an inverse, e.g. 1/298) ++gamma ++geoc ++guam ++h ++k Scaling factor (old name) ++K ++k_0 Scaling factor (new name) ++lat_0 Latitude of origin ++lat_1 Latitude of first standard parallel ++lat_2 Latitude of second standard parallel ++lat_b ++lat_t ++lat_ts Latitude of true scale ++lon_0 Central meridian ++lon_1 ++lon_2 ++lonc ? Longitude used with Oblique Mercator and possibly a few others ++lsat ++m ++M ++n ++no_cut ++no_off ++no_rot ++ns ++o_alpha ++o_lat_1 ++o_lat_2 ++o_lat_c ++o_lat_p ++o_lon_1 ++o_lon_2 ++o_lon_c ++o_lon_p ++o_proj ++over ++p ++path ++proj Projection name (see `proj -l`) ++q ++R ++R_a ++R_A Compute radius such that the area of the sphere is the same as the area of the ellipsoid ++rf Reciprocal of the ellipsoid flattening term (e.g. 298) ++R_g ++R_h ++R_lat_a ++R_lat_g ++rot ++R_V ++s ++south Denotes southern hemisphere UTM zone ++sym ++t ++theta ++tilt ++to_meter Multiplier to convert map units to 1.0m ++units meters, US survey feet, etc. ++vopt ++W ++westo ++wktext ++x_0 False easting ++y_0 False northing ++zone UTM zone +""" + +all_proj_keys = set(line.split(' ', 1)[0][1:] for line in filter(None, _param_data.splitlines())) +all_proj_keys.add('no_mayo') + + +def from_epsg(val): + """Given an integer code, returns an EPSG-like mapping. + + .. deprecated:: 1.9.0 + This function will be removed in version 2.0. Please use + CRS.from_epsg() instead. + + """ + warnings.warn( + "This function will be removed in version 2.0. Please use CRS.from_epsg() instead.", + FionaDeprecationWarning, + stacklevel=2, + ) + return CRS.from_epsg(val) + + +def from_string(val): + """Turn a PROJ.4 string into a mapping of parameters. + + .. deprecated:: 1.9.0 + This function will be removed in version 2.0. Please use + CRS.from_string() instead. + + """ + warnings.warn( + "This function will be removed in version 2.0. Please use CRS.from_string() instead.", + FionaDeprecationWarning, + stacklevel=2, + ) + return CRS.from_string(val) + + +def to_string(val): + """Turn a parameter mapping into a more conventional PROJ.4 string. + + .. deprecated:: 1.9.0 + This function will be removed in version 2.0. Please use + CRS.to_string() instead. + + """ + warnings.warn( + "This function will be removed in version 2.0. Please use CRS.to_string() instead.", + FionaDeprecationWarning, + stacklevel=2, + ) + return CRS.from_user_input(val).to_string() diff -Nru fiona-1.8.22/fiona/drvsupport.py fiona-1.9.5/fiona/drvsupport.py --- fiona-1.8.22/fiona/drvsupport.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/drvsupport.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,7 +1,10 @@ -# -*- coding: utf-8 -*- +import os + from fiona.env import Env -from fiona._env import get_gdal_version_num, calc_gdal_version_num +from fiona._env import get_gdal_version_tuple + +_GDAL_VERSION = get_gdal_version_tuple() # Here is the list of available drivers as (name, modes) tuples. Currently, # we only expose the defaults (excepting FileGDB). We also don't expose @@ -10,159 +13,165 @@ # entries for each at https://gdal.org/drivers/vector/index.html to screen # out the multi-layer formats. -supported_drivers = dict([ - # OGR Vector Formats - # Format Name Code Creation Georeferencing Compiled by default - # Aeronav FAA files AeronavFAA No Yes Yes - ("AeronavFAA", "r"), - # ESRI ArcObjects ArcObjects No Yes No, needs ESRI ArcObjects - # Arc/Info Binary Coverage AVCBin No Yes Yes - # multi-layer - # ("AVCBin", "r"), - # Arc/Info .E00 (ASCII) Coverage AVCE00 No Yes Yes - # multi-layer - # ("AVCE00", "r"), - # Arc/Info Generate ARCGEN No No Yes - ("ARCGEN", "r"), - # Atlas BNA BNA Yes No Yes - ("BNA", "rw"), - # AutoCAD DWG DWG No No No - # AutoCAD DXF DXF Yes No Yes - ("DXF", "rw"), - # Comma Separated Value (.csv) CSV Yes No Yes - ("CSV", "raw"), - # CouchDB / GeoCouch CouchDB Yes Yes No, needs libcurl - # DODS/OPeNDAP DODS No Yes No, needs libdap - # EDIGEO EDIGEO No Yes Yes - # multi-layer? Hard to tell from the OGR docs - # ("EDIGEO", "r"), - # ElasticSearch ElasticSearch Yes (write-only) - No, needs libcurl - # ESRI FileGDB FileGDB Yes Yes No, needs FileGDB API library - # multi-layer - ("FileGDB", "raw"), - ("OpenFileGDB", "r"), - # ESRI Personal GeoDatabase PGeo No Yes No, needs ODBC library - # ESRI ArcSDE SDE No Yes No, needs ESRI SDE - # ESRIJSON ESRIJSON No Yes Yes - ("ESRIJSON", "r"), - # ESRI Shapefile ESRI Shapefile Yes Yes Yes - ("ESRI Shapefile", "raw"), - # FMEObjects Gateway FMEObjects Gateway No Yes No, needs FME - ("FlatGeobuf", "rw"), - # GeoJSON GeoJSON Yes Yes Yes - ("GeoJSON", "raw"), - # GeoJSONSeq GeoJSON sequences Yes Yes Yes - ("GeoJSONSeq", "rw"), - # Géoconcept Export Geoconcept Yes Yes Yes - # multi-layers - # ("Geoconcept", "raw"), - # Geomedia .mdb Geomedia No No No, needs ODBC library - # GeoPackage GPKG Yes Yes No, needs libsqlite3 - ("GPKG", "raw"), - # GeoRSS GeoRSS Yes Yes Yes (read support needs libexpat) - # Google Fusion Tables GFT Yes Yes No, needs libcurl - # GML GML Yes Yes Yes (read support needs Xerces or libexpat) - ("GML", "rw"), - # GMT GMT Yes Yes Yes - ("GMT", "rw"), - # GMT renamed to OGR_GMT for GDAL 2.x - ("OGR_GMT", "rw"), - # GPSBabel GPSBabel Yes Yes Yes (needs GPSBabel and GPX driver) - # GPX GPX Yes Yes Yes (read support needs libexpat) - ("GPX", "rw"), - # GRASS GRASS No Yes No, needs libgrass - # GPSTrackMaker (.gtm, .gtz) GPSTrackMaker Yes Yes Yes - ("GPSTrackMaker", "rw"), - # Hydrographic Transfer Format HTF No Yes Yes - # TODO: Fiona is not ready for multi-layer formats: ("HTF", "r"), - # Idrisi Vector (.VCT) Idrisi No Yes Yes - ("Idrisi", "r"), - # Informix DataBlade IDB Yes Yes No, needs Informix DataBlade - # INTERLIS "Interlis 1" and "Interlis 2" Yes Yes No, needs Xerces (INTERLIS model reading needs ili2c.jar) - # INGRES INGRES Yes No No, needs INGRESS - # KML KML Yes Yes Yes (read support needs libexpat) - # LIBKML LIBKML Yes Yes No, needs libkml - # Mapinfo File MapInfo File Yes Yes Yes - ("MapInfo File", "raw"), - # Microstation DGN DGN Yes No Yes - ("DGN", "raw"), - # Access MDB (PGeo and Geomedia capable) MDB No Yes No, needs JDK/JRE - # Memory Memory Yes Yes Yes - # MySQL MySQL No Yes No, needs MySQL library - # NAS - ALKIS NAS No Yes No, needs Xerces - # Oracle Spatial OCI Yes Yes No, needs OCI library - # ODBC ODBC No Yes No, needs ODBC library - # MS SQL Spatial MSSQLSpatial Yes Yes No, needs ODBC library - # Open Document Spreadsheet ODS Yes No No, needs libexpat - # OGDI Vectors (VPF, VMAP, DCW) OGDI No Yes No, needs OGDI library - # OpenAir OpenAir No Yes Yes - # multi-layer - # ("OpenAir", "r"), - # PCI Geomatics Database File PCIDSK No No Yes, using internal PCIDSK SDK (from GDAL 1.7.0) - ("PCIDSK", "raw"), - # PDS PDS No Yes Yes - ("PDS", "r"), - # PDS renamed to OGR_PDS for GDAL 2.x - ("OGR_PDS", "r"), - # PGDump PostgreSQL SQL dump Yes Yes Yes - # PostgreSQL/PostGIS PostgreSQL/PostGIS Yes Yes No, needs PostgreSQL client library (libpq) - # EPIInfo .REC REC No No Yes - # S-57 (ENC) S57 No Yes Yes - # multi-layer - ("S57", "r"), - # SDTS SDTS No Yes Yes - # multi-layer - # ("SDTS", "r"), - # SEG-P1 / UKOOA P1/90 SEGUKOOA No Yes Yes - # multi-layers - # ("SEGUKOOA", "r"), - # SEG-Y SEGY No No Yes - ("SEGY", "r"), - # Norwegian SOSI Standard SOSI No Yes No, needs FYBA library - # SQLite/SpatiaLite SQLite Yes Yes No, needs libsqlite3 or libspatialite - ("SQLite", "raw"), - # SUA SUA No Yes Yes - ("SUA", "r"), - # SVG SVG No Yes No, needs libexpat - # TopoJSON TopoJSON No Yes Yes - ("TopoJSON", "r"), - # UK .NTF UK. NTF No Yes Yes - # multi-layer - # ("UK. NTF", "r"), - # U.S. Census TIGER/Line TIGER No Yes Yes - # multi-layer - # ("TIGER", "r"), - # VFK data VFK No Yes Yes - # multi-layer - # ("VFK", "r"), - # VRT - Virtual Datasource VRT No Yes Yes - # multi-layer - # ("VRT", "r"), - # OGC WFS (Web Feature Service) WFS Yes Yes No, needs libcurl - # MS Excel format XLS No No No, needs libfreexl - # Office Open XML spreadsheet XLSX Yes No No, needs libexpat - # X-Plane/Flighgear aeronautical data XPLANE No Yes Yes - # multi-layer - # ("XPLANE", "r") -]) - +supported_drivers = dict( + [ + # OGR Vector Formats + # Format Name Code Creation Georeferencing Compiled by default + # Aeronav FAA files AeronavFAA No Yes Yes + ("AeronavFAA", "r"), + # ESRI ArcObjects ArcObjects No Yes No, needs ESRI ArcObjects + # Arc/Info Binary Coverage AVCBin No Yes Yes + # multi-layer + # ("AVCBin", "r"), + # Arc/Info .E00 (ASCII) Coverage AVCE00 No Yes Yes + # multi-layer + # ("AVCE00", "r"), + # Arc/Info Generate ARCGEN No No Yes + ("ARCGEN", "r"), + # Atlas BNA BNA Yes No Yes + ("BNA", "rw"), + # AutoCAD DWG DWG No No No + # AutoCAD DXF DXF Yes No Yes + ("DXF", "rw"), + # Comma Separated Value (.csv) CSV Yes No Yes + ("CSV", "raw"), + # CouchDB / GeoCouch CouchDB Yes Yes No, needs libcurl + # DODS/OPeNDAP DODS No Yes No, needs libdap + # EDIGEO EDIGEO No Yes Yes + # multi-layer? Hard to tell from the OGR docs + # ("EDIGEO", "r"), + # ElasticSearch ElasticSearch Yes (write-only) - No, needs libcurl + # ESRI FileGDB FileGDB Yes Yes No, needs FileGDB API library + # multi-layer + ("FileGDB", "raw"), + ("OpenFileGDB", "raw"), + # ESRI Personal GeoDatabase PGeo No Yes No, needs ODBC library + # ESRI ArcSDE SDE No Yes No, needs ESRI SDE + # ESRIJSON ESRIJSON No Yes Yes + ("ESRIJSON", "r"), + # ESRI Shapefile ESRI Shapefile Yes Yes Yes + ("ESRI Shapefile", "raw"), + # FMEObjects Gateway FMEObjects Gateway No Yes No, needs FME + ("FlatGeobuf", "raw"), + # GeoJSON GeoJSON Yes Yes Yes + ("GeoJSON", "raw"), + # GeoJSONSeq GeoJSON sequences Yes Yes Yes + ("GeoJSONSeq", "raw"), + # Géoconcept Export Geoconcept Yes Yes Yes + # multi-layers + # ("Geoconcept", "raw"), + # Geomedia .mdb Geomedia No No No, needs ODBC library + # GeoPackage GPKG Yes Yes No, needs libsqlite3 + ("GPKG", "raw"), + # GeoRSS GeoRSS Yes Yes Yes (read support needs libexpat) + # Google Fusion Tables GFT Yes Yes No, needs libcurl + # GML GML Yes Yes Yes (read support needs Xerces or libexpat) + ("GML", "rw"), + # GMT GMT Yes Yes Yes + ("GMT", "rw"), + # GMT renamed to OGR_GMT for GDAL 2.x + ("OGR_GMT", "rw"), + # GPSBabel GPSBabel Yes Yes Yes (needs GPSBabel and GPX driver) + # GPX GPX Yes Yes Yes (read support needs libexpat) + ("GPX", "rw"), + # GRASS GRASS No Yes No, needs libgrass + # GPSTrackMaker (.gtm, .gtz) GPSTrackMaker Yes Yes Yes + # ("GPSTrackMaker", "rw"), + # Hydrographic Transfer Format HTF No Yes Yes + # TODO: Fiona is not ready for multi-layer formats: ("HTF", "r"), + # Idrisi Vector (.VCT) Idrisi No Yes Yes + ("Idrisi", "r"), + # Informix DataBlade IDB Yes Yes No, needs Informix DataBlade + # INTERLIS "Interlis 1" and "Interlis 2" Yes Yes No, needs Xerces (INTERLIS model reading needs ili2c.jar) + # INGRES INGRES Yes No No, needs INGRESS + # KML KML Yes Yes Yes (read support needs libexpat) + # LIBKML LIBKML Yes Yes No, needs libkml + # Mapinfo File MapInfo File Yes Yes Yes + ("MapInfo File", "raw"), + # Microstation DGN DGN Yes No Yes + ("DGN", "raw"), + # Access MDB (PGeo and Geomedia capable) MDB No Yes No, needs JDK/JRE + # Memory Memory Yes Yes Yes + # MySQL MySQL No Yes No, needs MySQL library + # NAS - ALKIS NAS No Yes No, needs Xerces + # Oracle Spatial OCI Yes Yes No, needs OCI library + # ODBC ODBC No Yes No, needs ODBC library + # MS SQL Spatial MSSQLSpatial Yes Yes No, needs ODBC library + # Open Document Spreadsheet ODS Yes No No, needs libexpat + # OGDI Vectors (VPF, VMAP, DCW) OGDI No Yes No, needs OGDI library + # OpenAir OpenAir No Yes Yes + # multi-layer + # ("OpenAir", "r"), + # (Geo)Parquet + ("Parquet", "raw"), + # PCI Geomatics Database File PCIDSK No No Yes, using internal PCIDSK SDK (from GDAL 1.7.0) + ("PCIDSK", "raw"), + # PDS PDS No Yes Yes + ("PDS", "r"), + # PDS renamed to OGR_PDS for GDAL 2.x + ("OGR_PDS", "r"), + # PGDump PostgreSQL SQL dump Yes Yes Yes + # PostgreSQL/PostGIS PostgreSQL/PostGIS Yes Yes No, needs PostgreSQL client library (libpq) + # EPIInfo .REC REC No No Yes + # S-57 (ENC) S57 No Yes Yes + # multi-layer + ("S57", "r"), + # SDTS SDTS No Yes Yes + # multi-layer + # ("SDTS", "r"), + # SEG-P1 / UKOOA P1/90 SEGUKOOA No Yes Yes + # multi-layers + # ("SEGUKOOA", "r"), + # SEG-Y SEGY No No Yes + ("SEGY", "r"), + # Norwegian SOSI Standard SOSI No Yes No, needs FYBA library + # SQLite/SpatiaLite SQLite Yes Yes No, needs libsqlite3 or libspatialite + ("SQLite", "raw"), + # SUA SUA No Yes Yes + ("SUA", "r"), + # SVG SVG No Yes No, needs libexpat + # TopoJSON TopoJSON No Yes Yes + ("TopoJSON", "r"), + # UK .NTF UK. NTF No Yes Yes + # multi-layer + # ("UK. NTF", "r"), + # U.S. Census TIGER/Line TIGER No Yes Yes + # multi-layer + # ("TIGER", "r"), + # VFK data VFK No Yes Yes + # multi-layer + # ("VFK", "r"), + # VRT - Virtual Datasource VRT No Yes Yes + # multi-layer + # ("VRT", "r"), + # OGC WFS (Web Feature Service) WFS Yes Yes No, needs libcurl + # MS Excel format XLS No No No, needs libfreexl + # Office Open XML spreadsheet XLSX Yes No No, needs libexpat + # X-Plane/Flighgear aeronautical data XPLANE No Yes Yes + # multi-layer + # ("XPLANE", "r") + ] +) # Minimal gdal version for different modes driver_mode_mingdal = { - - 'r': {'GPKG': (1, 11, 0), - 'GeoJSONSeq': (2, 4, 0), - 'FlatGeobuf': (3, 1, 0)}, - - 'w': {'GPKG': (1, 11, 0), - 'PCIDSK': (2, 0, 0), - 'GeoJSONSeq': (2, 4, 0), - 'FlatGeobuf': (3, 1, 3)}, - - 'a': {'GPKG': (1, 11, 0), - 'PCIDSK': (2, 0, 0), - 'GeoJSON': (2, 1, 0), - 'MapInfo File': (2, 0, 0)} + "r": {"GPKG": (1, 11, 0), "GeoJSONSeq": (2, 4, 0), "FlatGeobuf": (3, 1, 0)}, + "w": { + "GPKG": (1, 11, 0), + "PCIDSK": (2, 0, 0), + "GeoJSONSeq": (2, 4, 0), + "FlatGeobuf": (3, 1, 3), + "OpenFileGDB": (3, 6, 0), + }, + "a": { + "GPKG": (1, 11, 0), + "PCIDSK": (2, 0, 0), + "GeoJSON": (2, 1, 0), + "GeoJSONSeq": (3, 6, 0), + "MapInfo File": (2, 0, 0), + "FlatGeobuf": (3, 5, 1), + "OpenFileGDB": (3, 6, 0), + }, } @@ -176,7 +185,7 @@ if mode not in supported_drivers[driver]: return False if driver in driver_mode_mingdal[mode]: - if get_gdal_version_num() < calc_gdal_version_num(*driver_mode_mingdal[mode][driver]): + if _GDAL_VERSION < driver_mode_mingdal[mode][driver]: return False return True @@ -199,9 +208,58 @@ _filter_supported_drivers() -# driver_converts_to_str contains field type, driver combinations that are silently converted to string -# None: field type is always converted to str -# (2, 0, 0): starting from gdal 2.0 field type is not converted to string + +def vector_driver_extensions(): + """ + Returns + ------- + dict: + Map of extensions to the driver. + """ + from fiona.meta import extensions # prevent circular import + + extension_to_driver = {} + for drv, modes in supported_drivers.items(): + # update extensions based on driver suppport + for extension in extensions(drv) or (): + if "w" in modes: + extension_to_driver[extension] = extension_to_driver.get(extension, drv) + return extension_to_driver + + +def driver_from_extension(path): + """ + Attempt to auto-detect driver based on the extension. + + Parameters + ---------- + path: str or pathlike object + The path to the dataset to write with. + + Returns + ------- + str: + The name of the driver for the extension. + """ + try: + # in case the path is a file handle + # or a partsed path + path = path.name + except AttributeError: + pass + + driver_extensions = vector_driver_extensions() + + try: + return driver_extensions[os.path.splitext(path)[-1].lstrip(".").lower()] + except KeyError: + raise ValueError("Unable to detect driver. Please specify driver.") + + +# driver_converts_to_str contains field type, driver combinations that +# are silently converted to string None: field type is always converted +# to str (2, 0, 0): starting from gdal 2.0 field type is not converted +# to string _driver_converts_to_str = { 'time': { 'CSV': None, @@ -236,25 +294,26 @@ if field_type in _driver_converts_to_str and driver in _driver_converts_to_str[field_type]: if _driver_converts_to_str[field_type][driver] is None: return True - elif get_gdal_version_num() < calc_gdal_version_num(*_driver_converts_to_str[field_type][driver]): + elif _GDAL_VERSION < _driver_converts_to_str[field_type][driver]: return True return False # None: field type is never supported, (2, 0, 0) field type is supported starting with gdal 2.0 _driver_field_type_unsupported = { - 'time': { - 'ESRI Shapefile': None, - 'GPKG': (2, 0, 0), - 'GPX': None, - 'GPSTrackMaker': None, - 'GML': (3, 1, 0), - 'DGN': None, - 'BNA': None, - 'DXF': None, - 'PCIDSK': (2, 1, 0), - 'FileGDB': None, - 'FlatGeobuf': None + "time": { + "ESRI Shapefile": None, + "GPKG": (2, 0, 0), + "GPX": None, + "GPSTrackMaker": None, + "GML": (3, 1, 0), + "DGN": None, + "BNA": None, + "DXF": None, + "PCIDSK": (2, 1, 0), + "FileGDB": (3, 5, 0), + "FlatGeobuf": None, + "OpenFileGDB": None, }, 'datetime': { 'ESRI Shapefile': None, @@ -264,16 +323,17 @@ 'DXF': None, 'PCIDSK': (2, 1, 0) }, - 'date': { - 'GPX': None, - 'GPSTrackMaker': None, - 'DGN': None, - 'BNA': None, - 'DXF': None, - 'PCIDSK': (2, 1, 0), - 'FileGDB': None, - 'FlatGeobuf': None - } + "date": { + "GPX": None, + "GPSTrackMaker": None, + "DGN": None, + "BNA": None, + "DXF": None, + "PCIDSK": (2, 1, 0), + "FileGDB": (3, 5, 0), + "FlatGeobuf": None, + "OpenFileGDB": None, + }, } @@ -285,7 +345,7 @@ if field_type in _driver_field_type_unsupported and driver in _driver_field_type_unsupported[field_type]: if _driver_field_type_unsupported[field_type][driver] is None: return False - elif get_gdal_version_num() < calc_gdal_version_num(*_driver_field_type_unsupported[field_type][driver]): + elif _GDAL_VERSION < _driver_field_type_unsupported[field_type][driver]: return False return True @@ -300,18 +360,18 @@ 'FileGDB': None, 'SQLite': (2, 4, 0) }, - 'time': { - 'MapInfo File': None, - 'GPKG': None, - 'GPSTrackMaker': None, - 'GeoJSON': None, - 'GeoJSONSeq': None, - 'GML': None, - 'CSV': None, - 'GMT': None, - 'OGR_GMT': None, - 'SQLite': None - } + "time": { + "MapInfo File": None, + "GPKG": None, + "GPSTrackMaker": None, + "GeoJSON": None, + "GeoJSONSeq": None, + "GML": None, + "CSV": None, + "GMT": None, + "OGR_GMT": None, + "SQLite": None, + }, } @@ -323,15 +383,16 @@ if field_type in _drivers_not_supporting_timezones and driver in _drivers_not_supporting_timezones[field_type]: if _drivers_not_supporting_timezones[field_type][driver] is None: return False - elif get_gdal_version_num() < calc_gdal_version_num(*_drivers_not_supporting_timezones[field_type][driver]): + elif _GDAL_VERSION < _drivers_not_supporting_timezones[field_type][driver]: return False return True # None: driver never supports timezones, (2, 0, 0): driver supports timezones with GDAL 2.0.0 _drivers_not_supporting_milliseconds = { - 'GPSTrackMaker': None, - 'FileGDB': None + "GPSTrackMaker": None, + "FileGDB": None, + "OpenFileGDB": None, } @@ -341,14 +402,13 @@ Note: this function is not part of Fiona's public API. """ # GDAL 2.0 introduced support for milliseconds - if get_gdal_version_num() < calc_gdal_version_num(2, 0, 0): + if _GDAL_VERSION.major < 2: return False if driver in _drivers_not_supporting_milliseconds: if _drivers_not_supporting_milliseconds[driver] is None: return False - elif calc_gdal_version_num(*_drivers_not_supporting_milliseconds[driver]) < get_gdal_version_num(): + elif _drivers_not_supporting_milliseconds[driver] < _GDAL_VERSION: return False return True - diff -Nru fiona-1.8.22/fiona/enums.py fiona-1.9.5/fiona/enums.py --- fiona-1.8.22/fiona/enums.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/fiona/enums.py 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,31 @@ +"""Enumerations.""" + +from enum import Enum + + +class WktVersion(Enum): + """ + .. versionadded:: 1.9.0 + + Supported CRS WKT string versions. + """ + + #: WKT Version 2 from 2015 + WKT2_2015 = "WKT2_2015" + #: Alias for latest WKT Version 2 + WKT2 = "WKT2" + #: WKT Version 2 from 2019 + WKT2_2019 = "WKT2_2018" + #: WKT Version 1 GDAL Style + WKT1_GDAL = "WKT1_GDAL" + #: Alias for WKT Version 1 GDAL Style + WKT1 = "WKT1" + #: WKT Version 1 ESRI Style + WKT1_ESRI = "WKT1_ESRI" + + @classmethod + def _missing_(cls, value): + if value == "WKT2_2019": + # WKT2_2019 alias added in GDAL 3.2, use WKT2_2018 for compatibility + return WktVersion.WKT2_2019 + raise ValueError(f"Invalid value for WktVersion: {value}") diff -Nru fiona-1.8.22/fiona/env.py fiona-1.9.5/fiona/env.py --- fiona-1.8.22/fiona/env.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/env.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,30 +1,28 @@ """Fiona's GDAL/AWS environment""" from functools import wraps, total_ordering +from inspect import getfullargspec import logging import os import re import threading +import warnings import attr -from six import string_types -import fiona._loading -with fiona._loading.add_gdal_dll_directories(): - from fiona._env import ( - GDALDataFinder, - GDALEnv, - PROJDataFinder, - calc_gdal_version_num, - get_gdal_config, - get_gdal_release_name, - get_gdal_version_num, - set_gdal_config, - set_proj_data_search_path, - ) - from fiona.compat import getargspec - from fiona.errors import EnvError, GDALVersionError - from fiona.session import Session, DummySession +from fiona._env import ( + GDALDataFinder, + GDALEnv, + PROJDataFinder, + calc_gdal_version_num, + get_gdal_config, + get_gdal_release_name, + get_gdal_version_num, + set_gdal_config, + set_proj_data_search_path, +) +from fiona.errors import EnvError, FionaDeprecationWarning, GDALVersionError +from fiona.session import Session, DummySession class ThreadEnv(threading.local): @@ -63,7 +61,7 @@ log = logging.getLogger(__name__) -class Env(object): +class Env: """Abstraction for GDAL and AWS configuration The GDAL library is stateful: it has a registry of format drivers, @@ -107,17 +105,23 @@ Returns ------- dict + """ return { - 'CHECK_WITH_INVERT_PROJ': True, - 'GTIFF_IMPLICIT_JPEG_OVR': False, - "FIONA_ENV": True + "CHECK_WITH_INVERT_PROJ": True, + "GTIFF_IMPLICIT_JPEG_OVR": False, + "FIONA_ENV": True, } def __init__( - self, session=None, **options): + self, + session=None, + aws_unsigned=False, + profile_name=None, + session_class=Session.aws_or_dummy, + **options + ): """Create a new GDAL/AWS environment. - Note: this class is a context manager. GDAL isn't configured until the context is entered via `with fiona.Env():` @@ -125,6 +129,12 @@ ---------- session : optional A Session object. + aws_unsigned : bool, optional + Do not sign cloud requests. + profile_name : str, optional + A shared credentials profile name, as per boto3. + session_class : Session, optional + A sub-class of Session. **options : optional A mapping of GDAL configuration options, e.g., `CPL_DEBUG=True, CHECK_WITH_INVERT_PROJ=False`. @@ -141,30 +151,66 @@ Examples -------- - >>> with Env(CPL_DEBUG=True, CPL_CURL_VERBOSE=True): ... with fiona.open("zip+https://example.com/a.zip") as col: - ... print(col.meta) + ... print(col.profile) - For access to secured cloud resources, a Fiona Session may be - passed to the constructor. + For access to secured cloud resources, a Fiona Session or a + foreign session object may be passed to the constructor. >>> import boto3 >>> from fiona.session import AWSSession >>> boto3_session = boto3.Session(...) >>> with Env(AWSSession(boto3_session)): ... with fiona.open("zip+s3://example/a.zip") as col: - ... print(col.meta) + ... print(col.profile """ - if ('AWS_ACCESS_KEY_ID' in options or - 'AWS_SECRET_ACCESS_KEY' in options): + aws_access_key_id = options.pop("aws_access_key_id", None) + # Warn deprecation in 1.9, remove in 2.0. + if aws_access_key_id: + warnings.warn( + "Passing abstract session keyword arguments is deprecated. " + "Pass a Fiona AWSSession object instead.", + FionaDeprecationWarning, + ) + + aws_secret_access_key = options.pop("aws_secret_access_key", None) + aws_session_token = options.pop("aws_session_token", None) + region_name = options.pop("region_name", None) + + if not {"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"}.isdisjoint(options): raise EnvError( "GDAL's AWS config options can not be directly set. " - "AWS credentials are handled exclusively by boto3.") + "AWS credentials are handled exclusively by boto3." + ) if session: + # Passing a session via keyword argument is the canonical + # way to configure access to secured cloud resources. + # Warn deprecation in 1.9, remove in 2.0. + if not isinstance(session, Session): + warnings.warn( + "Passing a boto3 session is deprecated. Pass a Fiona AWSSession object instead.", + FionaDeprecationWarning, + ) + session = Session.aws_or_dummy(session=session) + self.session = session + + elif aws_access_key_id or profile_name or aws_unsigned: + self.session = Session.aws_or_dummy( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + region_name=region_name, + profile_name=profile_name, + aws_unsigned=aws_unsigned, + ) + + elif {"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"}.issubset(os.environ.keys()): + self.session = Session.from_environ() + else: self.session = DummySession() @@ -172,16 +218,15 @@ self.context_options = {} @classmethod - def from_defaults(cls, session=None, **options): + def from_defaults(cls, *args, **kwargs): """Create an environment with default config options Parameters ---------- - session : optional - A Session object. - **options : optional - A mapping of GDAL configuration options, e.g., - `CPL_DEBUG=True, CHECK_WITH_INVERT_PROJ=False`. + args : optional + Positional arguments for Env() + kwargs : optional + Keyword arguments for Env() Returns ------- @@ -192,19 +237,9 @@ The items in kwargs will be overlaid on the default values. """ - opts = Env.default_options() - opts.update(**options) - return Env(session=session, **opts) - - @property - def is_credentialized(self): - """Test for existence of cloud credentials - - Returns - ------- - bool - """ - return hascreds() + options = Env.default_options() + options.update(**kwargs) + return Env(*args, **options) def credentialize(self): """Get credentials and configure GDAL @@ -217,33 +252,34 @@ None """ - if hascreds(): - pass - else: - cred_opts = self.session.get_credential_options() - self.options.update(**cred_opts) - setenv(**cred_opts) + cred_opts = self.session.get_credential_options() + self.options.update(**cred_opts) + setenv(**cred_opts) def drivers(self): """Return a mapping of registered drivers.""" return local._env.drivers() + def _dump_open_datasets(self): + """Writes descriptions of open datasets to stderr + + For debugging and testing purposes. + """ + return local._env._dump_open_datasets() + def __enter__(self): - log.debug("Entering env context: %r", self) if local._env is None: - log.debug("Starting outermost env") self._has_parent_env = False # See note directly above where _discovered_options is globally # defined. This MUST happen before calling 'defenv()'. local._discovered_options = {} - # Don't want to reinstate the "FIONA_ENV" option. - probe_env = {k for k in self.options.keys() if k != "FIONA_ENV"} + # Don't want to reinstate the "RASTERIO_ENV" option. + probe_env = {k for k in self.options.keys() if k != "RASTERIO_ENV"} for key in probe_env: val = get_gdal_config(key, normalize=False) if val is not None: local._discovered_options[key] = val - log.debug("Discovered option: %s=%s", key, val) defenv(**self.options) self.context_options = {} @@ -253,39 +289,29 @@ setenv(**self.options) self.credentialize() - - log.debug("Entered env context: %r", self) return self def __exit__(self, exc_type=None, exc_val=None, exc_tb=None): - log.debug("Exiting env context: %r", self) delenv() if self._has_parent_env: defenv() setenv(**self.context_options) else: - log.debug("Exiting outermost env") # See note directly above where _discovered_options is globally # defined. while local._discovered_options: key, val = local._discovered_options.popitem() set_gdal_config(key, val, normalize=False) - log.debug( - "Set discovered option back to: '%s=%s", key, val) + local._discovered_options = None - log.debug("Exited env context: %r", self) def defenv(**options): """Create a default environment if necessary.""" - if local._env: - log.debug("GDAL environment exists: %r", local._env) - else: - log.debug("No GDAL environment exists") + if not local._env: local._env = GDALEnv() local._env.update_config_options(**options) - log.debug( - "New GDAL environment %r created", local._env) + local._env.start() @@ -294,7 +320,6 @@ if not local._env: raise EnvError("No GDAL environment exists") else: - log.debug("Got a copy of environment %r options", local._env) return local._env.options.copy() @@ -308,11 +333,14 @@ raise EnvError("No GDAL environment exists") else: local._env.update_config_options(**options) - log.debug("Updated existing %r with options %r", local._env, options) def hascreds(): - return local._env is not None and all(key in local._env.get_config_options() for key in ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY']) + warnings.warn("Please use Env.session.hascreds() instead", FionaDeprecationWarning) + return local._env is not None and all( + key in local._env.get_config_options() + for key in ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"] + ) def delenv(): @@ -321,12 +349,12 @@ raise EnvError("No GDAL environment exists") else: local._env.clear_config_options() - log.debug("Cleared existing %r options", local._env) + local._env.stop() local._env = None -class NullContextManager(object): +class NullContextManager: def __init__(self): pass @@ -370,6 +398,7 @@ nothing and immediately calls f with the given arguments. """ + @wraps(f) def wrapper(*args, **kwargs): if local._env: @@ -377,6 +406,7 @@ else: with Env.from_defaults(): return f(*args, **kwargs) + return wrapper @@ -402,29 +432,41 @@ nothing and immediately calls f with the given arguments. """ + @wraps(f) - def wrapper(*args, **kwargs): + def wrapper(*args, **kwds): if local._env: - return f(*args, **kwargs) + env_ctor = Env else: - if isinstance(args[0], str): - session = Session.from_path(args[0]) - else: - session = Session.from_path(None) + env_ctor = Env.from_defaults + + fp_arg = kwds.get("fp", None) or args[0] + + if isinstance(fp_arg, str): + session_cls = Session.cls_from_path(fp_arg) + + if local._env and session_cls.hascreds(getenv()): + session_cls = DummySession + + session = session_cls() + + else: + session = DummySession() + + with env_ctor(session=session): + return f(*args, **kwds) - with Env.from_defaults(session=session): - log.debug("Credentialized: {!r}".format(getenv())) - return f(*args, **kwargs) return wrapper @attr.s(slots=True) @total_ordering -class GDALVersion(object): +class GDALVersion: """Convenience class for obtaining GDAL major and minor version components and comparing between versions. This is highly simplistic and assumes a very normal numbering scheme for versions and ignores everything except the major and minor components. + """ major = attr.ib(default=0, validator=attr.validators.instance_of(int)) @@ -437,10 +479,10 @@ return (self.major, self.minor) < tuple(other.major, other.minor) def __repr__(self): - return "GDALVersion(major={0}, minor={1})".format(self.major, self.minor) + return f"GDALVersion(major={self.major}, minor={self.minor})" def __str__(self): - return "{0}.{1}".format(self.major, self.minor) + return f"{self.major}.{self.minor}" @classmethod def parse(cls, input): @@ -455,21 +497,22 @@ Returns ------- GDALVersion instance - """ + """ if isinstance(input, cls): return input if isinstance(input, tuple): return cls(*input) - elif isinstance(input, string_types): + elif isinstance(input, str): # Extract major and minor version components. # alpha, beta, rc suffixes ignored - match = re.search(r'^\d+\.\d+', input) + match = re.search(r"^\d+\.\d+", input) if not match: raise ValueError( "value does not appear to be a valid GDAL version " - "number: {}".format(input)) - major, minor = (int(c) for c in match.group().split('.')) + "number: {}".format(input) + ) + major, minor = (int(c) for c in match.group().split(".")) return cls(major=major, minor=minor) raise TypeError("GDALVersion can only be parsed from a string or tuple") @@ -484,8 +527,9 @@ return self >= other -def require_gdal_version(version, param=None, values=None, is_max_version=False, - reason=''): +def require_gdal_version( + version, param=None, values=None, is_max_version=False, reason="" +): """A decorator that ensures the called function or parameters are supported by the runtime version of GDAL. Raises GDALVersionError if conditions are not met. @@ -533,60 +577,69 @@ Returns --------- wrapped function - """ + """ if values is not None: if param is None: - raise ValueError( - 'require_gdal_version: param must be provided with values') + raise ValueError("require_gdal_version: param must be provided with values") if not isinstance(values, (tuple, list, set)): raise ValueError( - 'require_gdal_version: values must be a tuple, list, or set') + "require_gdal_version: values must be a tuple, list, or set" + ) version = GDALVersion.parse(version) runtime = GDALVersion.runtime() - inequality = '>=' if runtime < version else '<=' - reason = '\n{0}'.format(reason) if reason else reason + inequality = ">=" if runtime < version else "<=" + reason = f"\n{reason}" if reason else reason def decorator(f): @wraps(f) def wrapper(*args, **kwds): - if ((runtime < version and not is_max_version) or - (is_max_version and runtime > version)): + if (runtime < version and not is_max_version) or ( + is_max_version and runtime > version + ): if param is None: raise GDALVersionError( - "GDAL version must be {0} {1}{2}".format( - inequality, str(version), reason)) + "GDAL version must be {} {}{}".format( + inequality, str(version), reason + ) + ) # normalize args and kwds to dict - argspec = getargspec(f) + argspec = getfullargspec(f) full_kwds = kwds.copy() if argspec.args: - full_kwds.update(dict(zip(argspec.args[:len(args)], args))) + full_kwds.update(dict(zip(argspec.args[: len(args)], args))) if argspec.defaults: - defaults = dict(zip( - reversed(argspec.args), reversed(argspec.defaults))) + defaults = dict( + zip(reversed(argspec.args), reversed(argspec.defaults)) + ) else: defaults = {} if param in full_kwds: if values is None: if param not in defaults or ( - full_kwds[param] != defaults[param]): + full_kwds[param] != defaults[param] + ): raise GDALVersionError( - 'usage of parameter "{0}" requires ' - 'GDAL {1} {2}{3}'.format(param, inequality, - version, reason)) + 'usage of parameter "{}" requires ' + "GDAL {} {}{}".format( + param, inequality, version, reason + ) + ) elif full_kwds[param] in values: raise GDALVersionError( - 'parameter "{0}={1}" requires ' - 'GDAL {2} {3}{4}'.format( - param, full_kwds[param], inequality, version, reason)) + 'parameter "{}={}" requires ' + "GDAL {} {}{}".format( + param, full_kwds[param], inequality, version, reason + ) + ) return f(*args, **kwds) @@ -597,7 +650,7 @@ # Patch the environment if needed, such as in the installed wheel case. -if 'GDAL_DATA' not in os.environ: +if "GDAL_DATA" not in os.environ: path = GDALDataFinder().search_wheel() @@ -616,7 +669,13 @@ set_gdal_config("GDAL_DATA", path) log.debug("GDAL data found in other locations: path=%r.", path) -if "PROJ_LIB" in os.environ: +if 'PROJ_DATA' in os.environ: + # PROJ 9.1+ + path = os.environ["PROJ_DATA"] + set_proj_data_search_path(path) + +elif "PROJ_LIB" in os.environ: + # PROJ < 9.1 path = os.environ["PROJ_LIB"] set_proj_data_search_path(path) diff -Nru fiona-1.8.22/fiona/errors.py fiona-1.9.5/fiona/errors.py --- fiona-1.8.22/fiona/errors.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/errors.py 2023-10-11 23:19:44.000000000 +0000 @@ -5,10 +5,14 @@ """Base Fiona error""" -class FionaValueError(ValueError): +class FionaValueError(FionaError, ValueError): """Fiona-specific value errors""" +class AttributeFilterError(FionaValueError): + """Error processing SQL WHERE clause with the dataset.""" + + class DriverError(FionaValueError): """Encapsulates unsupported driver and driver mode errors.""" @@ -21,11 +25,15 @@ """When a crs mapping has neither init or proj items.""" -class DataIOError(IOError): +class UnsupportedOperation(FionaError): + """Raised when reading from a file opened in 'w' mode""" + + +class DataIOError(OSError): """IO errors involving driver registration or availability.""" -class DriverIOError(IOError): +class DriverIOError(OSError): """A format specific driver error.""" @@ -33,7 +41,7 @@ """Driver does not support schema""" -class DatasetDeleteError(IOError): +class DatasetDeleteError(OSError): """Failure to delete a dataset""" @@ -63,7 +71,11 @@ """ -class FionaDeprecationWarning(UserWarning): +class TransformError(FionaError): + """Raised if a coordinate transformation fails.""" + + +class FionaDeprecationWarning(DeprecationWarning): """A warning about deprecation of Fiona features""" diff -Nru fiona-1.8.22/fiona/fio/bounds.py fiona-1.9.5/fiona/fio/bounds.py --- fiona-1.8.22/fiona/fio/bounds.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/bounds.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,8 +1,6 @@ """$ fio bounds""" - import json -import logging import click from cligj import precision_opt, use_rs_opt @@ -10,6 +8,7 @@ import fiona from fiona.fio.helpers import obj_gen from fiona.fio import with_context_env +from fiona.model import ObjectEncoder @click.command(short_help="Print the extent of GeoJSON objects") @@ -37,52 +36,54 @@ To print the input objects themselves along with their bounds as GeoJSON object, use --with-obj. This has the effect of updating input objects with {id: identifier, bbox: bounds}. + """ - logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') - try: - source = obj_gen(stdin) - for i, obj in enumerate(source): - obj_id = obj.get('id', 'collection:' + str(i)) - xs = [] - ys = [] - features = obj.get('features') or [obj] - for j, feat in enumerate(features): - feat_id = feat.get('id', 'feature:' + str(i)) - w, s, e, n = fiona.bounds(feat) - if precision > 0: - w, s, e, n = (round(v, precision) - for v in (w, s, e, n)) - if explode: - if with_id: - rec = { - 'parent': obj_id, - 'id': feat_id, - 'bbox': (w, s, e, n)} - elif with_obj: - feat.update(parent=obj_id, bbox=(w, s, e, n)) - rec = feat - else: - rec = (w, s, e, n) - if use_rs: - click.echo(u'\u001e', nl=False) - click.echo(json.dumps(rec)) - else: - xs.extend([w, e]) - ys.extend([s, n]) - if not explode: - w, s, e, n = (min(xs), min(ys), max(xs), max(ys)) + source = obj_gen(stdin) + + for i, obj in enumerate(source): + obj_id = obj.get("id", "collection:" + str(i)) + xs = [] + ys = [] + features = obj.get("features") or [obj] + + for j, feat in enumerate(features): + feat_id = feat.get("id", "feature:" + str(i)) + w, s, e, n = fiona.bounds(feat) + + if precision > 0: + w, s, e, n = (round(v, precision) for v in (w, s, e, n)) + if explode: + if with_id: - rec = {'id': obj_id, 'bbox': (w, s, e, n)} + rec = {"parent": obj_id, "id": feat_id, "bbox": (w, s, e, n)} elif with_obj: - obj.update(id=obj_id, bbox=(w, s, e, n)) - rec = obj + feat.update(parent=obj_id, bbox=(w, s, e, n)) + rec = feat else: rec = (w, s, e, n) + if use_rs: - click.echo(u'\u001e', nl=False) - click.echo(json.dumps(rec)) + click.echo('\x1e', nl=False) + + click.echo(json.dumps(rec, cls=ObjectEncoder)) + + else: + xs.extend([w, e]) + ys.extend([s, n]) + + if not explode: + w, s, e, n = (min(xs), min(ys), max(xs), max(ys)) + + if with_id: + rec = {"id": obj_id, "bbox": (w, s, e, n)} + elif with_obj: + obj.update(id=obj_id, bbox=(w, s, e, n)) + rec = obj + else: + rec = (w, s, e, n) + + if use_rs: + click.echo("\x1e", nl=False) - except Exception: - logger.exception("Exception caught during processing") - raise click.Abort() + click.echo(json.dumps(rec, cls=ObjectEncoder)) diff -Nru fiona-1.8.22/fiona/fio/calc.py fiona-1.9.5/fiona/fio/calc.py --- fiona-1.8.22/fiona/fio/calc.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/calc.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,12 +1,11 @@ -from __future__ import division import json -import logging import click from cligj import use_rs_opt from .helpers import obj_gen, eval_feature_expression from fiona.fio import with_context_env +from fiona.model import ObjectEncoder @click.command(short_help="Calculate GeoJSON property by Python expression") @@ -38,27 +37,27 @@ \b $ fio cat data.shp | fio calc sumAB "f.properties.A + f.properties.B" + """ - logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') - try: - source = obj_gen(stdin) - for i, obj in enumerate(source): - features = obj.get('features') or [obj] - for j, feat in enumerate(features): - - if not overwrite and property_name in feat['properties']: - raise click.UsageError( - '{0} already exists in properties; ' - 'rename or use --overwrite'.format(property_name)) - - feat['properties'][property_name] = eval_feature_expression( - feat, expression) - - if use_rs: - click.echo(u'\u001e', nl=False) - click.echo(json.dumps(feat)) - - except Exception: - logger.exception("Exception caught during processing") - raise click.Abort() + source = obj_gen(stdin) + + for i, obj in enumerate(source): + features = obj.get("features") or [obj] + + for j, feat in enumerate(features): + + if not overwrite and property_name in feat["properties"]: + raise click.UsageError( + "{} already exists in properties; " + "rename or use --overwrite".format(property_name) + ) + + feat["properties"][property_name] = eval_feature_expression( + feat, expression + ) + + if use_rs: + click.echo("\x1e", nl=False) + + click.echo(json.dumps(feat, cls=ObjectEncoder)) diff -Nru fiona-1.8.22/fiona/fio/cat.py fiona-1.9.5/fiona/fio/cat.py --- fiona-1.8.22/fiona/fio/cat.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/cat.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,8 +1,6 @@ -"""$ fio cat""" - +"""fio-cat""" import json -import logging import warnings import click @@ -10,25 +8,34 @@ import fiona from fiona.transform import transform_geom +from fiona.model import Feature, ObjectEncoder from fiona.fio import options, with_context_env +from fiona.fio.helpers import recursive_round +from fiona.errors import AttributeFilterError - -warnings.simplefilter('default') +warnings.simplefilter("default") # Cat command @click.command(short_help="Concatenate and print the features of datasets") -@click.argument('files', nargs=-1, required=True, metavar="INPUTS...") -@click.option('--layer', default=None, multiple=True, - callback=options.cb_multilayer, - help="Input layer(s), specified as 'fileindex:layer` " - "For example, '1:foo,2:bar' will concatenate layer foo " - "from file 1 and layer bar from file 2") +@click.argument("files", nargs=-1, required=True, metavar="INPUTS...") +@click.option( + "--layer", + default=None, + multiple=True, + callback=options.cb_multilayer, + help="Input layer(s), specified as 'fileindex:layer` " + "For example, '1:foo,2:bar' will concatenate layer foo " + "from file 1 and layer bar from file 2", +) @cligj.precision_opt @cligj.indent_opt @cligj.compact_opt -@click.option('--ignore-errors/--no-ignore-errors', default=False, - help="log errors but do not stop serialization.") +@click.option( + "--ignore-errors/--no-ignore-errors", + default=False, + help="log errors but do not stop serialization.", +) @options.dst_crs_opt @cligj.use_rs_opt @click.option( @@ -38,11 +45,19 @@ help="filter for features intersecting a bounding box", ) @click.option( + "--where", + default=None, + help="attribute filter using SQL where clause", +) +@click.option( "--cut-at-antimeridian", is_flag=True, default=False, help="Optionally cut geometries at the anti-meridian. To be used only for a geographic destination CRS.", ) +@click.option('--where', default=None, + help="attribute filter using SQL where clause") +@options.open_opt @click.pass_context @with_context_env def cat( @@ -55,8 +70,10 @@ dst_crs, use_rs, bbox, + where, cut_at_antimeridian, layer, + open_options, ): """ Concatenate and print the features of input datasets as a sequence of @@ -64,15 +81,13 @@ When working with a multi-layer dataset the first layer is used by default. Use the '--layer' option to select a different layer. - """ - - logger = logging.getLogger(__name__) - dump_kwds = {'sort_keys': True} + """ + dump_kwds = {"sort_keys": True} if indent: - dump_kwds['indent'] = indent + dump_kwds["indent"] = indent if compact: - dump_kwds['separators'] = (',', ':') + dump_kwds["separators"] = (",", ":") # Validate file idexes provided in --layer option # (can't pass the files to option callback) @@ -87,24 +102,38 @@ try: if bbox: try: - bbox = tuple(map(float, bbox.split(','))) + bbox = tuple(map(float, bbox.split(","))) except ValueError: bbox = json.loads(bbox) + for i, path in enumerate(files, 1): for lyr in layer[str(i)]: - with fiona.open(path, layer=lyr) as src: - for i, feat in src.items(bbox=bbox): - if dst_crs or precision >= 0: - g = transform_geom( - src.crs, dst_crs, feat['geometry'], + with fiona.open(path, layer=lyr, **open_options) as src: + for i, feat in src.items(bbox=bbox, where=where): + geom = feat.geometry + + if dst_crs: + geom = transform_geom( + src.crs, + dst_crs, + geom, antimeridian_cutting=cut_at_antimeridian, - precision=precision) - feat['geometry'] = g - feat['bbox'] = fiona.bounds(g) + ) + + if precision >= 0: + geom = recursive_round(geom, precision) + + feat = Feature( + id=feat.id, + properties=feat.properties, + geometry=geom, + bbox=fiona.bounds(geom), + ) + if use_rs: - click.echo(u'\u001e', nl=False) - click.echo(json.dumps(feat, **dump_kwds)) + click.echo("\x1e", nl=False) + + click.echo(json.dumps(feat, cls=ObjectEncoder, **dump_kwds)) - except Exception: - logger.exception("Exception caught during processing") - raise click.Abort() + except AttributeFilterError as e: + raise click.BadParameter("'where' clause is invalid: " + str(e)) diff -Nru fiona-1.8.22/fiona/fio/collect.py fiona-1.9.5/fiona/fio/collect.py --- fiona-1.8.22/fiona/fio/collect.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/collect.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,7 +1,6 @@ -"""$ fio collect""" +"""fio-collect""" - -from functools import partial, wraps +from functools import partial import json import logging @@ -9,6 +8,7 @@ import cligj from fiona.fio import helpers, options, with_context_env +from fiona.model import Geometry, ObjectEncoder from fiona.transform import transform_geom @@ -16,42 +16,72 @@ @cligj.precision_opt @cligj.indent_opt @cligj.compact_opt -@click.option('--record-buffered/--no-record-buffered', default=False, - help="Economical buffering of writes at record, not collection " - "(default), level.") -@click.option('--ignore-errors/--no-ignore-errors', default=False, - help="log errors but do not stop serialization.") +@click.option( + "--record-buffered/--no-record-buffered", + default=False, + help="Economical buffering of writes at record, not collection " + "(default), level.", +) +@click.option( + "--ignore-errors/--no-ignore-errors", + default=False, + help="log errors but do not stop serialization.", +) @options.src_crs_opt -@click.option('--with-ld-context/--without-ld-context', default=False, - help="add a JSON-LD context to JSON output.") -@click.option('--add-ld-context-item', multiple=True, - help="map a term to a URI and add it to the output's JSON LD " - "context.") -@click.option('--parse/--no-parse', default=True, - help="load and dump the geojson feature (default is True)") +@click.option( + "--with-ld-context/--without-ld-context", + default=False, + help="add a JSON-LD context to JSON output.", +) +@click.option( + "--add-ld-context-item", + multiple=True, + help="map a term to a URI and add it to the output's JSON LD " "context.", +) +@click.option( + "--parse/--no-parse", + default=True, + help="load and dump the geojson feature (default is True)", +) @click.pass_context @with_context_env -def collect(ctx, precision, indent, compact, record_buffered, ignore_errors, - src_crs, with_ld_context, add_ld_context_item, parse): +def collect( + ctx, + precision, + indent, + compact, + record_buffered, + ignore_errors, + src_crs, + with_ld_context, + add_ld_context_item, + parse, +): """Make a GeoJSON feature collection from a sequence of GeoJSON features and print it.""" logger = logging.getLogger(__name__) - stdin = click.get_text_stream('stdin') - sink = click.get_text_stream('stdout') + stdin = click.get_text_stream("stdin") + sink = click.get_text_stream("stdout") - dump_kwds = {'sort_keys': True} + dump_kwds = {"sort_keys": True} if indent: - dump_kwds['indent'] = indent + dump_kwds["indent"] = indent if compact: - dump_kwds['separators'] = (',', ':') - item_sep = compact and ',' or ', ' + dump_kwds["separators"] = (",", ":") + item_sep = compact and "," or ", " if src_crs: if not parse: raise click.UsageError("Can't specify --src-crs with --no-parse") - transformer = partial(transform_geom, src_crs, 'EPSG:4326', - antimeridian_cutting=True, precision=precision) + transformer = partial( + transform_geom, + src_crs, + "EPSG:4326", + antimeridian_cutting=True, + precision=precision, + ) else: + def transformer(x): return x @@ -60,156 +90,156 @@ # If parsing geojson if parse: # If input is RS-delimited JSON sequence. - if first_line.startswith(u'\x1e'): + if first_line.startswith("\x1e"): + def feature_text_gen(): - buffer = first_line.strip(u'\x1e') + buffer = first_line.strip("\x1e") for line in stdin: - if line.startswith(u'\x1e'): + if line.startswith("\x1e"): if buffer: feat = json.loads(buffer) - feat['geometry'] = transformer(feat['geometry']) - yield json.dumps(feat, **dump_kwds) - buffer = line.strip(u'\x1e') + feat["geometry"] = transformer( + Geometry.from_dict(**feat["geometry"]) + ) + yield json.dumps(feat, cls=ObjectEncoder, **dump_kwds) + buffer = line.strip("\x1e") else: buffer += line else: feat = json.loads(buffer) - feat['geometry'] = transformer(feat['geometry']) - yield json.dumps(feat, **dump_kwds) + feat["geometry"] = transformer( + Geometry.from_dict(**feat["geometry"]) + ) + yield json.dumps(feat, cls=ObjectEncoder, **dump_kwds) + else: + def feature_text_gen(): feat = json.loads(first_line) - feat['geometry'] = transformer(feat['geometry']) - yield json.dumps(feat, **dump_kwds) + feat["geometry"] = transformer(Geometry.from_dict(**feat["geometry"])) + yield json.dumps(feat, cls=ObjectEncoder, **dump_kwds) for line in stdin: feat = json.loads(line) - feat['geometry'] = transformer(feat['geometry']) - yield json.dumps(feat, **dump_kwds) + feat["geometry"] = transformer( + Geometry.from_dict(**feat["geometry"]) + ) + yield json.dumps(feat, cls=ObjectEncoder, **dump_kwds) # If *not* parsing geojson else: # If input is RS-delimited JSON sequence. - if first_line.startswith(u'\x1e'): + if first_line.startswith("\x1e"): + def feature_text_gen(): - buffer = first_line.strip(u'\x1e') + buffer = first_line.strip("\x1e") for line in stdin: - if line.startswith(u'\x1e'): + if line.startswith("\x1e"): if buffer: yield buffer - buffer = line.strip(u'\x1e') + buffer = line.strip("\x1e") else: buffer += line else: yield buffer + else: + def feature_text_gen(): yield first_line - for line in stdin: - yield line + yield from stdin - try: - source = feature_text_gen() + source = feature_text_gen() - if record_buffered: - # Buffer GeoJSON data at the feature level for smaller - # memory footprint. - indented = bool(indent) - rec_indent = "\n" + " " * (2 * (indent or 0)) - - collection = { - 'type': 'FeatureCollection', - 'features': []} - if with_ld_context: - collection['@context'] = helpers.make_ld_context( - add_ld_context_item) + if record_buffered: + # Buffer GeoJSON data at the feature level for smaller + # memory footprint. + indented = bool(indent) + rec_indent = "\n" + " " * (2 * (indent or 0)) + + collection = {"type": "FeatureCollection", "features": []} + if with_ld_context: + collection["@context"] = helpers.make_ld_context(add_ld_context_item) - head, tail = json.dumps(collection, **dump_kwds).split('[]') + head, tail = json.dumps(collection, cls=ObjectEncoder, **dump_kwds).split("[]") - sink.write(head) - sink.write("[") + sink.write(head) + sink.write("[") - # Try the first record. + # Try the first record. + try: + i, first = 0, next(source) + if with_ld_context: + first = helpers.id_record(first) + if indented: + sink.write(rec_indent) + sink.write(first.replace("\n", rec_indent)) + except StopIteration: + pass + except Exception as exc: + # Ignoring errors is *not* the default. + if ignore_errors: + logger.error( + "failed to serialize file record %d (%s), " "continuing", i, exc + ) + else: + # Log error and close up the GeoJSON, leaving it + # more or less valid no matter what happens above. + logger.critical( + "failed to serialize file record %d (%s), " "quiting", i, exc + ) + sink.write("]") + sink.write(tail) + if indented: + sink.write("\n") + raise + + # Because trailing commas aren't valid in JSON arrays + # we'll write the item separator before each of the + # remaining features. + for i, rec in enumerate(source, 1): try: - i, first = 0, next(source) if with_ld_context: - first = helpers.id_record(first) + rec = helpers.id_record(rec) if indented: sink.write(rec_indent) - sink.write(first.replace("\n", rec_indent)) - except StopIteration: - pass + sink.write(item_sep) + sink.write(rec.replace("\n", rec_indent)) except Exception as exc: - # Ignoring errors is *not* the default. if ignore_errors: logger.error( - "failed to serialize file record %d (%s), " - "continuing", - i, exc) + "failed to serialize file record %d (%s), " "continuing", + i, + exc, + ) else: - # Log error and close up the GeoJSON, leaving it - # more or less valid no matter what happens above. logger.critical( - "failed to serialize file record %d (%s), " - "quiting", - i, exc) + "failed to serialize file record %d (%s), " "quiting", + i, + exc, + ) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise - # Because trailing commas aren't valid in JSON arrays - # we'll write the item separator before each of the - # remaining features. - for i, rec in enumerate(source, 1): - try: - if with_ld_context: - rec = helpers.id_record(rec) - if indented: - sink.write(rec_indent) - sink.write(item_sep) - sink.write(rec.replace("\n", rec_indent)) - except Exception as exc: - if ignore_errors: - logger.error( - "failed to serialize file record %d (%s), " - "continuing", - i, exc) - else: - logger.critical( - "failed to serialize file record %d (%s), " - "quiting", - i, exc) - sink.write("]") - sink.write(tail) - if indented: - sink.write("\n") - raise - - # Close up the GeoJSON after writing all features. - sink.write("]") - sink.write(tail) - if indented: - sink.write("\n") - - else: - # Buffer GeoJSON data at the collection level. The default. - collection = { - 'type': 'FeatureCollection', - 'features': []} - if with_ld_context: - collection['@context'] = helpers.make_ld_context( - add_ld_context_item) - - head, tail = json.dumps(collection, **dump_kwds).split('[]') - sink.write(head) - sink.write("[") - sink.write(",".join(source)) - sink.write("]") - sink.write(tail) + # Close up the GeoJSON after writing all features. + sink.write("]") + sink.write(tail) + if indented: sink.write("\n") - except Exception: - logger.exception("Exception caught during processing") - raise click.Abort() + else: + # Buffer GeoJSON data at the collection level. The default. + collection = {"type": "FeatureCollection", "features": []} + if with_ld_context: + collection["@context"] = helpers.make_ld_context(add_ld_context_item) + + head, tail = json.dumps(collection, cls=ObjectEncoder, **dump_kwds).split("[]") + sink.write(head) + sink.write("[") + sink.write(",".join(source)) + sink.write("]") + sink.write(tail) + sink.write("\n") diff -Nru fiona-1.8.22/fiona/fio/distrib.py fiona-1.9.5/fiona/fio/distrib.py --- fiona-1.8.22/fiona/fio/distrib.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/distrib.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,13 +1,12 @@ """$ fio distrib""" - import json -import logging import click import cligj from fiona.fio import helpers, with_context_env +from fiona.model import ObjectEncoder @click.command() @@ -18,22 +17,19 @@ """Distribute features from a collection. Print the features of GeoJSON objects read from stdin. + """ - logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') - try: - source = helpers.obj_gen(stdin) - for i, obj in enumerate(source): - obj_id = obj.get('id', 'collection:' + str(i)) - features = obj.get('features') or [obj] - for j, feat in enumerate(features): - if obj.get('type') == 'FeatureCollection': - feat['parent'] = obj_id - feat_id = feat.get('id', 'feature:' + str(i)) - feat['id'] = feat_id - if use_rs: - click.echo(u'\u001e', nl=False) - click.echo(json.dumps(feat)) - except Exception: - logger.exception("Exception caught during processing") - raise click.Abort() + source = helpers.obj_gen(stdin) + + for i, obj in enumerate(source): + obj_id = obj.get("id", "collection:" + str(i)) + features = obj.get("features") or [obj] + for j, feat in enumerate(features): + if obj.get("type") == "FeatureCollection": + feat["parent"] = obj_id + feat_id = feat.get("id", "feature:" + str(i)) + feat["id"] = feat_id + if use_rs: + click.echo("\x1e", nl=False) + click.echo(json.dumps(feat, cls=ObjectEncoder)) diff -Nru fiona-1.8.22/fiona/fio/dump.py fiona-1.9.5/fiona/fio/dump.py --- fiona-1.8.22/fiona/fio/dump.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/dump.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,5 +1,4 @@ -"""$ fio dump""" - +"""fio-dump""" from functools import partial import json @@ -10,6 +9,7 @@ import fiona from fiona.fio import helpers, options, with_context_env +from fiona.model import Feature, ObjectEncoder from fiona.transform import transform_geom @@ -33,10 +33,23 @@ @click.option('--add-ld-context-item', multiple=True, help="map a term to a URI and add it to the output's JSON LD " "context.") +@options.open_opt @click.pass_context @with_context_env -def dump(ctx, input, encoding, precision, indent, compact, record_buffered, - ignore_errors, with_ld_context, add_ld_context_item, layer): +def dump( + ctx, + input, + encoding, + precision, + indent, + compact, + record_buffered, + ignore_errors, + with_ld_context, + add_ld_context_item, + layer, + open_options, +): """Dump a dataset either as a GeoJSON feature collection (the default) or a sequence of GeoJSON features.""" @@ -51,68 +64,105 @@ dump_kwds['separators'] = (',', ':') item_sep = compact and ',' or ', ' - open_kwds = {} if encoding: - open_kwds['encoding'] = encoding + open_options["encoding"] = encoding if layer: - open_kwds['layer'] = layer + open_options["layer"] = layer def transformer(crs, feat): - tg = partial(transform_geom, crs, 'EPSG:4326', - antimeridian_cutting=True, precision=precision) - feat['geometry'] = tg(feat['geometry']) - return feat - - try: - with fiona.open(input, **open_kwds) as source: - meta = source.meta - meta['fields'] = dict(source.schema['properties'].items()) - - if record_buffered: - # Buffer GeoJSON data at the feature level for smaller - # memory footprint. - indented = bool(indent) - rec_indent = "\n" + " " * (2 * (indent or 0)) - - collection = { - 'type': 'FeatureCollection', - 'fiona:schema': meta['schema'], - 'fiona:crs': meta['crs'], - 'features': []} + tg = partial( + transform_geom, + crs, + "EPSG:4326", + antimeridian_cutting=True, + precision=precision, + ) + return Feature( + id=feat.id, properties=feat.properties, geometry=tg(feat.geometry) + ) + + with fiona.open(input, **open_options) as source: + meta = source.meta + meta["fields"] = dict(source.schema["properties"].items()) + + if record_buffered: + # Buffer GeoJSON data at the feature level for smaller + # memory footprint. + indented = bool(indent) + rec_indent = "\n" + " " * (2 * (indent or 0)) + + collection = { + "type": "FeatureCollection", + "fiona:schema": meta["schema"], + "fiona:crs": meta["crs"], + "features": [], + } + if with_ld_context: + collection["@context"] = helpers.make_ld_context(add_ld_context_item) + + head, tail = json.dumps(collection, **dump_kwds).split("[]") + + sink.write(head) + sink.write("[") + + itr = iter(source) + + # Try the first record. + try: + i, first = 0, next(itr) + first = transformer(first) if with_ld_context: - collection['@context'] = helpers.make_ld_context( - add_ld_context_item) - - head, tail = json.dumps( - collection, **dump_kwds).split('[]') - - sink.write(head) - sink.write("[") - - itr = iter(source) + first = helpers.id_record(first) + if indented: + sink.write(rec_indent) + sink.write( + json.dumps(first, cls=ObjectEncoder, **dump_kwds).replace( + "\n", rec_indent + ) + ) + except StopIteration: + pass + except Exception as exc: + # Ignoring errors is *not* the default. + if ignore_errors: + logger.error( + "failed to serialize file record %d (%s), " "continuing", i, exc + ) + else: + # Log error and close up the GeoJSON, leaving it + # more or less valid no matter what happens above. + logger.critical( + "failed to serialize file record %d (%s), " "quiting", i, exc + ) + sink.write("]") + sink.write(tail) + if indented: + sink.write("\n") + raise - # Try the first record. + # Because trailing commas aren't valid in JSON arrays + # we'll write the item separator before each of the + # remaining features. + for i, rec in enumerate(itr, 1): + rec = transformer(rec) try: - i, first = 0, next(itr) - first = transformer(first) if with_ld_context: - first = helpers.id_record(first) + rec = helpers.id_record(rec) if indented: sink.write(rec_indent) - sink.write(json.dumps( - first, **dump_kwds).replace("\n", rec_indent)) - except StopIteration: - pass + sink.write(item_sep) + sink.write( + json.dumps(rec, cls=ObjectEncoder, **dump_kwds).replace( + "\n", rec_indent + ) + ) except Exception as exc: - # Ignoring errors is *not* the default. if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: - # Log error and close up the GeoJSON, leaving it - # more or less valid no matter what happens above. logger.critical( "failed to serialize file record %d (%s), " "quiting", @@ -123,59 +173,26 @@ sink.write("\n") raise - # Because trailing commas aren't valid in JSON arrays - # we'll write the item separator before each of the - # remaining features. - for i, rec in enumerate(itr, 1): - rec = transformer(rec) - try: - if with_ld_context: - rec = helpers.id_record(rec) - if indented: - sink.write(rec_indent) - sink.write(item_sep) - sink.write(json.dumps( - rec, **dump_kwds).replace("\n", rec_indent)) - except Exception as exc: - if ignore_errors: - logger.error( - "failed to serialize file record %d (%s), " - "continuing", - i, exc) - else: - logger.critical( - "failed to serialize file record %d (%s), " - "quiting", - i, exc) - sink.write("]") - sink.write(tail) - if indented: - sink.write("\n") - raise - - # Close up the GeoJSON after writing all features. - sink.write("]") - sink.write(tail) - if indented: - sink.write("\n") - + # Close up the GeoJSON after writing all features. + sink.write("]") + sink.write(tail) + if indented: + sink.write("\n") + + else: + # Buffer GeoJSON data at the collection level. The default. + collection = { + "type": "FeatureCollection", + "fiona:schema": meta["schema"], + "fiona:crs": meta["crs"].to_string(), + } + if with_ld_context: + collection["@context"] = helpers.make_ld_context(add_ld_context_item) + collection["features"] = [ + helpers.id_record(transformer(rec)) for rec in source + ] else: - # Buffer GeoJSON data at the collection level. The default. - collection = { - 'type': 'FeatureCollection', - 'fiona:schema': meta['schema'], - 'fiona:crs': meta['crs']} - if with_ld_context: - collection['@context'] = helpers.make_ld_context( - add_ld_context_item) - collection['features'] = [ - helpers.id_record(transformer(rec)) - for rec in source] - else: - collection['features'] = [ - transformer(source.crs, rec) for rec in source] - json.dump(collection, sink, **dump_kwds) - - except Exception: - logger.exception("Exception caught during processing") - raise click.Abort() + collection["features"] = [ + transformer(source.crs, rec) for rec in source + ] + json.dump(collection, sink, cls=ObjectEncoder, **dump_kwds) diff -Nru fiona-1.8.22/fiona/fio/env.py fiona-1.9.5/fiona/fio/env.py --- fiona-1.8.22/fiona/fio/env.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/env.py 2023-10-11 23:19:44.000000000 +0000 @@ -6,9 +6,7 @@ import click import fiona -import fiona._loading -with fiona._loading.add_gdal_dll_directories(): - from fiona._env import GDALDataFinder, PROJDataFinder +from fiona._env import GDALDataFinder, PROJDataFinder @click.command(short_help="Print information about the fio environment.") @@ -30,11 +28,11 @@ if key == 'formats': for k, v in sorted(fiona.supported_drivers.items()): modes = ', '.join("'" + m + "'" for m in v) - stdout.write("%s (modes %s)\n" % (k, modes)) + stdout.write(f"{k} (modes {modes})\n") stdout.write('\n') elif key == 'credentials': click.echo(json.dumps(env.session.credentials)) elif key == 'gdal_data': click.echo(os.environ.get('GDAL_DATA') or GDALDataFinder().search()) elif key == 'proj_data': - click.echo(os.environ.get('PROJ_LIB') or PROJDataFinder().search()) + click.echo(os.environ.get('PROJ_DATA', os.environ.get('PROJ_LIB')) or PROJDataFinder().search()) diff -Nru fiona-1.8.22/fiona/fio/filter.py fiona-1.9.5/fiona/fio/filter.py --- fiona-1.8.22/fiona/fio/filter.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/filter.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,6 +1,5 @@ """$ fio filter""" - import json import logging @@ -11,6 +10,9 @@ from fiona.fio import with_context_env +logger = logging.getLogger(__name__) + + @click.command() @click.argument('filter_expression') @use_rs_opt @@ -36,23 +38,17 @@ $ fio cat data.shp \\ | fio filter "f.properties.area > 1000.0" \\ | fio collect > large_polygons.geojson - """ - logger = logging.getLogger(__name__) + """ stdin = click.get_text_stream('stdin') + source = obj_gen(stdin) - try: - source = obj_gen(stdin) - for i, obj in enumerate(source): - features = obj.get('features') or [obj] - for j, feat in enumerate(features): - if not eval_feature_expression(feat, filter_expression): - continue - - if use_rs: - click.echo(u'\u001e', nl=False) - click.echo(json.dumps(feat)) - - except Exception: - logger.exception("Exception caught during processing") - raise click.Abort() + for i, obj in enumerate(source): + features = obj.get("features") or [obj] + for j, feat in enumerate(features): + if not eval_feature_expression(feat, filter_expression): + continue + + if use_rs: + click.echo("\x1e", nl=False) + click.echo(json.dumps(feat)) diff -Nru fiona-1.8.22/fiona/fio/helpers.py fiona-1.9.5/fiona/fio/helpers.py --- fiona-1.8.22/fiona/fio/helpers.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/helpers.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,37 +1,43 @@ +"""Helper objects needed by multiple CLI commands. + """ -Helper objects needed by multiple CLI commands. -""" + from functools import partial import json import math import warnings -from munch import munchify +from fiona.model import Geometry, to_dict +from fiona._vendor.munch import munchify -warnings.simplefilter('default') +warnings.simplefilter("default") -def obj_gen(lines): +def obj_gen(lines, object_hook=None): """Return a generator of JSON objects loaded from ``lines``.""" first_line = next(lines) - if first_line.startswith(u'\x1e'): + if first_line.startswith("\x1e"): + def gen(): - buffer = first_line.strip(u'\x1e') + buffer = first_line.strip("\x1e") for line in lines: - if line.startswith(u'\x1e'): + if line.startswith("\x1e"): if buffer: - yield json.loads(buffer) - buffer = line.strip(u'\x1e') + yield json.loads(buffer, object_hook=object_hook) + buffer = line.strip("\x1e") else: buffer += line else: - yield json.loads(buffer) + yield json.loads(buffer, object_hook=object_hook) + else: + def gen(): - yield json.loads(first_line) + yield json.loads(first_line, object_hook=object_hook) for line in lines: - yield json.loads(line) + yield json.loads(line, object_hook=object_hook) + return gen() @@ -43,22 +49,25 @@ def eval_feature_expression(feature, expression): - safe_dict = {'f': munchify(feature)} - safe_dict.update({ - 'sum': sum, - 'pow': pow, - 'min': min, - 'max': max, - 'math': math, - 'bool': bool, - 'int': partial(nullable, int), - 'str': partial(nullable, str), - 'float': partial(nullable, float), - 'len': partial(nullable, len), - }) + safe_dict = {"f": munchify(to_dict(feature))} + safe_dict.update( + { + "sum": sum, + "pow": pow, + "min": min, + "max": max, + "math": math, + "bool": bool, + "int": partial(nullable, int), + "str": partial(nullable, str), + "float": partial(nullable, float), + "len": partial(nullable, len), + } + ) try: from shapely.geometry import shape - safe_dict['shape'] = shape + + safe_dict["shape"] = shape except ImportError: pass return eval(expression, {"__builtins__": None}, safe_dict) @@ -67,39 +76,33 @@ def make_ld_context(context_items): """Returns a JSON-LD Context object. - See http://json-ld.org/spec/latest/json-ld.""" + See https://json-ld.org/spec/latest/json-ld/.""" ctx = { - "@context": { - "geojson": "http://ld.geojson.org/vocab#", - "Feature": "geojson:Feature", - "FeatureCollection": "geojson:FeatureCollection", - "GeometryCollection": "geojson:GeometryCollection", - "LineString": "geojson:LineString", - "MultiLineString": "geojson:MultiLineString", - "MultiPoint": "geojson:MultiPoint", - "MultiPolygon": "geojson:MultiPolygon", - "Point": "geojson:Point", - "Polygon": "geojson:Polygon", - "bbox": { - "@container": "@list", - "@id": "geojson:bbox" - }, - "coordinates": "geojson:coordinates", - "datetime": "http://www.w3.org/2006/time#inXSDDateTime", - "description": "http://purl.org/dc/terms/description", - "features": { - "@container": "@set", - "@id": "geojson:features" - }, - "geometry": "geojson:geometry", - "id": "@id", - "properties": "geojson:properties", - "start": "http://www.w3.org/2006/time#hasBeginning", - "stop": "http://www.w3.org/2006/time#hasEnding", - "title": "http://purl.org/dc/terms/title", - "type": "@type", - "when": "geojson:when" - } + "@context": { + "geojson": "http://ld.geojson.org/vocab#", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "bbox": {"@container": "@list", "@id": "geojson:bbox"}, + "coordinates": "geojson:coordinates", + "datetime": "http://www.w3.org/2006/time#inXSDDateTime", + "description": "http://purl.org/dc/terms/description", + "features": {"@container": "@set", "@id": "geojson:features"}, + "geometry": "geojson:geometry", + "id": "@id", + "properties": "geojson:properties", + "start": "http://www.w3.org/2006/time#hasBeginning", + "stop": "http://www.w3.org/2006/time#hasEnding", + "title": "http://purl.org/dc/terms/title", + "type": "@type", + "when": "geojson:when", + } } for item in context_items or []: t, uri = item.split("=") @@ -109,5 +112,23 @@ def id_record(rec): """Converts a record's id to a blank node id and returns the record.""" - rec['id'] = '_:f%s' % rec['id'] + rec["id"] = "_:f%s" % rec["id"] return rec + + +def recursive_round(obj, precision): + """Recursively round coordinates.""" + if precision < 0: + return obj + if getattr(obj, "geometries", None): + return Geometry( + geometries=[recursive_round(part, precision) for part in obj.geometries] + ) + elif getattr(obj, "coordinates", None): + return Geometry( + coordinates=[recursive_round(part, precision) for part in obj.coordinates] + ) + if isinstance(obj, (int, float)): + return round(obj, precision) + else: + return [recursive_round(part, precision) for part in obj] diff -Nru fiona-1.8.22/fiona/fio/info.py fiona-1.9.5/fiona/fio/info.py --- fiona-1.8.22/fiona/fio/info.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/info.py 2023-10-11 23:19:44.000000000 +0000 @@ -12,6 +12,8 @@ from fiona.errors import DriverError from fiona.fio import options, with_context_env +logger = logging.getLogger(__name__) + @click.command() # One or more files. @@ -34,45 +36,43 @@ "(left, bottom, right, top).") @click.option('--name', 'meta_member', flag_value='name', help="Print the datasource's name.") +@options.open_opt @click.pass_context @with_context_env -def info(ctx, input, indent, meta_member, layer): +def info(ctx, input, indent, meta_member, layer, open_options): """ Print information about a dataset. When working with a multi-layer dataset the first layer is used by default. Use the '--layer' option to select a different layer. + """ + with fiona.open(input, layer=layer, **open_options) as src: + info = src.meta + info.update(name=src.name) - logger = logging.getLogger(__name__) - try: - with fiona.open(input, layer=layer) as src: - info = src.meta - info.update(name=src.name) - try: - info.update(bounds=src.bounds) - except DriverError: - info.update(bounds=None) - logger.debug("Setting 'bounds' to None - driver " - "was not able to calculate bounds") - try: - info.update(count=len(src)) - except TypeError: - info.update(count=None) - logger.debug("Setting 'count' to None/null - layer does " - "not support counting") - proj4 = fiona.crs.to_string(src.crs) - if proj4.startswith('+init=epsg'): - proj4 = proj4.split('=')[1].upper() - info['crs'] = proj4 - if meta_member: - if isinstance(info[meta_member], (list, tuple)): - click.echo(" ".join(map(str, info[meta_member]))) - else: - click.echo(info[meta_member]) - else: - click.echo(json.dumps(info, indent=indent)) + try: + info.update(bounds=src.bounds) + except DriverError: + info.update(bounds=None) + logger.debug( + "Setting 'bounds' to None - driver was not able to calculate bounds" + ) - except Exception: - logger.exception("Exception caught during processing") - raise click.Abort() + try: + info.update(count=len(src)) + except TypeError: + info.update(count=None) + logger.debug( + "Setting 'count' to None/null - layer does not support counting" + ) + + info["crs"] = src.crs.to_string() + + if meta_member: + if isinstance(info[meta_member], (list, tuple)): + click.echo(" ".join(map(str, info[meta_member]))) + else: + click.echo(info[meta_member]) + else: + click.echo(json.dumps(info, indent=indent)) diff -Nru fiona-1.8.22/fiona/fio/insp.py fiona-1.9.5/fiona/fio/insp.py --- fiona-1.8.22/fiona/fio/insp.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/insp.py 2023-10-11 23:19:44.000000000 +0000 @@ -2,44 +2,42 @@ import code -import logging import sys import click import fiona -from fiona.fio import with_context_env +from fiona.fio import options, with_context_env @click.command(short_help="Open a dataset and start an interpreter.") -@click.argument('src_path', required=True) -@click.option('--ipython', 'interpreter', flag_value='ipython', - help="Use IPython as interpreter.") +@click.argument("src_path", required=True) +@click.option( + "--ipython", "interpreter", flag_value="ipython", help="Use IPython as interpreter." +) +@options.open_opt @click.pass_context @with_context_env -def insp(ctx, src_path, interpreter): - """Open a collection within an interactive interpreter. - """ - logger = logging.getLogger(__name__) - banner = 'Fiona %s Interactive Inspector (Python %s)\n' \ - 'Type "src.schema", "next(src)", or "help(src)" ' \ - 'for more information.' \ - % (fiona.__version__, '.'.join(map(str, sys.version_info[:3]))) - - try: - with fiona.open(src_path) as src: - scope = locals() - if not interpreter: - code.interact(banner, local=scope) - elif interpreter == 'ipython': - import IPython - IPython.InteractiveShell.banner1 = banner - IPython.start_ipython(argv=[], user_ns=scope) - else: - raise click.ClickException( - 'Interpreter {} is unsupported or missing ' - 'dependencies'.format(interpreter)) - - except Exception: - logger.exception("Exception caught during processing") - raise click.Abort() +def insp(ctx, src_path, interpreter, open_options): + """Open a collection within an interactive interpreter.""" + banner = ( + "Fiona %s Interactive Inspector (Python %s)\n" + 'Type "src.schema", "next(src)", or "help(src)" ' + "for more information." + % (fiona.__version__, ".".join(map(str, sys.version_info[:3]))) + ) + + with fiona.open(src_path, **open_options) as src: + scope = locals() + if not interpreter: + code.interact(banner, local=scope) + elif interpreter == "ipython": + import IPython + + IPython.InteractiveShell.banner1 = banner + IPython.start_ipython(argv=[], user_ns=scope) + else: + raise click.ClickException( + "Interpreter {} is unsupported or missing " + "dependencies".format(interpreter) + ) diff -Nru fiona-1.8.22/fiona/fio/load.py fiona-1.9.5/fiona/fio/load.py --- fiona-1.8.22/fiona/fio/load.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/load.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,56 +1,26 @@ """$ fio load""" - from functools import partial -import logging import click import cligj import fiona from fiona.fio import options, with_context_env +from fiona.model import Feature, Geometry from fiona.schema import FIELD_TYPES_MAP_REV from fiona.transform import transform_geom -def _cb_key_val(ctx, param, value): - """ - click callback to validate `--opt KEY1=VAL1 --opt KEY2=VAL2` and collect - in a dictionary like the one below, which is what the CLI function receives. - If no value or `None` is received then an empty dictionary is returned. - - { - 'KEY1': 'VAL1', - 'KEY2': 'VAL2' - } - - Note: `==VAL` breaks this as `str.split('=', 1)` is used. - - """ - if not value: - return {} - else: - out = {} - for pair in value: - if "=" not in pair: - raise click.BadParameter( - "Invalid syntax for KEY=VAL arg: {}".format(pair) - ) - else: - k, v = pair.split("=", 1) - k = k.lower() - v = v.lower() - out[k] = None if v.lower() in ["none", "null", "nil", "nada"] else v - return out - - @click.command(short_help="Load GeoJSON to a dataset in another format.") -@click.argument('output', required=True) -@click.option('-f', '--format', '--driver', 'driver', required=True, - help="Output format driver name.") +@click.argument("output", required=True) +@click.option("-f", "--format", "--driver", "driver", help="Output format driver name.") @options.src_crs_opt -@click.option('--dst-crs', '--dst_crs', - help="Destination CRS. Defaults to --src-crs when not given.") +@click.option( + "--dst-crs", + "--dst_crs", + help="Destination CRS. Defaults to --src-crs when not given.", +) @cligj.features_in_arg @click.option( "--layer", @@ -59,52 +29,75 @@ help="Load features into specified layer. Layers use " "zero-based numbering when accessed by index.", ) -@click.option( - "--co", - "--profile", - "creation_options", - metavar="NAME=VALUE", - multiple=True, - callback=_cb_key_val, - help="Driver specific creation options. See the documentation for the selected output driver for more information.", -) +@options.creation_opt +@options.open_opt +@click.option("--append", is_flag=True, help="Open destination layer in append mode.") @click.pass_context @with_context_env -def load(ctx, output, driver, src_crs, dst_crs, features, layer, creation_options): +def load( + ctx, + output, + driver, + src_crs, + dst_crs, + features, + layer, + creation_options, + open_options, + append, +): """Load features from JSON to a file in another format. The input is a GeoJSON feature collection or optionally a sequence of GeoJSON feature objects. """ - logger = logging.getLogger(__name__) - dst_crs = dst_crs or src_crs if src_crs and dst_crs and src_crs != dst_crs: - transformer = partial(transform_geom, src_crs, dst_crs, - antimeridian_cutting=True, precision=-1) + transformer = partial( + transform_geom, src_crs, dst_crs, antimeridian_cutting=True + ) else: + def transformer(x): - return x + return Geometry.from_dict(**x) def feature_gen(): - for feat in features: - feat['geometry'] = transformer(feat['geometry']) - yield feat + """Convert stream of JSON to features. - try: - source = feature_gen() + Yields + ------ + Feature + + """ + try: + for feat in features: + feat["geometry"] = transformer(Geometry.from_dict(**feat["geometry"])) + yield Feature.from_dict(**feat) + except TypeError: + raise click.ClickException("Invalid input.") - # Use schema of first feature as a template. - # TODO: schema specified on command line? + source = feature_gen() + + # Use schema of first feature as a template. + # TODO: schema specified on command line? + try: first = next(source) - schema = {'geometry': first['geometry']['type']} - schema['properties'] = dict([ - (k, FIELD_TYPES_MAP_REV.get(type(v)) or 'str') - for k, v in first['properties'].items()]) + except TypeError: + raise click.ClickException("Invalid input.") + + # print(first, first.geometry) + schema = {"geometry": first.geometry.type} + schema["properties"] = { + k: FIELD_TYPES_MAP_REV.get(type(v)) or "str" + for k, v in first.properties.items() + } - with fiona.open( + if append: + opener = fiona.open(output, "a", layer=layer, **open_options) + else: + opener = fiona.open( output, "w", driver=driver, @@ -112,10 +105,8 @@ schema=schema, layer=layer, **creation_options - ) as dst: - dst.write(first) - dst.writerecords(source) - - except Exception: - logger.exception("Exception caught during processing") - raise click.Abort() + ) + + with opener as dst: + dst.write(first) + dst.writerecords(source) diff -Nru fiona-1.8.22/fiona/fio/ls.py fiona-1.9.5/fiona/fio/ls.py --- fiona-1.8.22/fiona/fio/ls.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/ls.py 2023-10-11 23:19:44.000000000 +0000 @@ -7,17 +7,18 @@ from cligj import indent_opt import fiona -from fiona.fio import with_context_env +from fiona.fio import options, with_context_env @click.command() @click.argument('input', required=True) @indent_opt +@options.open_opt @click.pass_context @with_context_env -def ls(ctx, input, indent): +def ls(ctx, input, indent, open_options): """ List layers in a datasource. """ - result = fiona.listlayers(input) + result = fiona.listlayers(input, **open_options) click.echo(json.dumps(result, indent=indent)) diff -Nru fiona-1.8.22/fiona/fio/main.py fiona-1.9.5/fiona/fio/main.py --- fiona-1.8.22/fiona/fio/main.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/main.py 2023-10-11 23:19:44.000000000 +0000 @@ -3,14 +3,19 @@ """ +import itertools import logging -from pkg_resources import iter_entry_points import sys import click from click_plugins import with_plugins from cligj import verbose_opt, quiet_opt +if sys.version_info < (3, 10): + from importlib_metadata import entry_points +else: + from importlib.metadata import entry_points + import fiona from fiona import __version__ as fio_version from fiona.session import AWSSession, DummySession @@ -21,8 +26,12 @@ logging.basicConfig(stream=sys.stderr, level=log_level) -@with_plugins(ep for ep in list(iter_entry_points('fiona.fio_commands')) + - list(iter_entry_points('fiona.fio_plugins'))) +@with_plugins( + itertools.chain( + entry_points(group="fiona.fio_commands"), + entry_points(group="fiona.fio_plugins"), + ) +) @click.group() @verbose_opt @quiet_opt diff -Nru fiona-1.8.22/fiona/fio/options.py fiona-1.9.5/fiona/fio/options.py --- fiona-1.8.22/fiona/fio/options.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/options.py 2023-10-11 23:19:44.000000000 +0000 @@ -34,6 +34,37 @@ return out +def cb_key_val(ctx, param, value): + """ + click callback to validate `--opt KEY1=VAL1 --opt KEY2=VAL2` and collect + in a dictionary like the one below, which is what the CLI function receives. + If no value or `None` is received then an empty dictionary is returned. + + { + 'KEY1': 'VAL1', + 'KEY2': 'VAL2' + } + + Note: `==VAL` breaks this as `str.split('=', 1)` is used. + + """ + if not value: + return {} + else: + out = {} + for pair in value: + if "=" not in pair: + raise click.BadParameter( + f"Invalid syntax for KEY=VAL arg: {pair}" + ) + else: + k, v = pair.split("=", 1) + k = k.lower() + v = v.lower() + out[k] = None if v.lower() in ["none", "null", "nil", "nada"] else v + return out + + def validate_multilayer_file_index(files, layerdict): """ Ensure file indexes provided in the --layer option are valid @@ -41,4 +72,25 @@ for key in layerdict.keys(): if key not in [str(k) for k in range(1, len(files) + 1)]: layer = key + ":" + layerdict[key][0] - raise click.BadParameter("Layer {} does not exist".format(layer)) + raise click.BadParameter(f"Layer {layer} does not exist") + + +creation_opt = click.option( + "--co", + "--profile", + "creation_options", + metavar="NAME=VALUE", + multiple=True, + callback=cb_key_val, + help="Driver specific creation options. See the documentation for the selected output driver for more information.", +) + + +open_opt = click.option( + "--oo", + "open_options", + metavar="NAME=VALUE", + multiple=True, + callback=cb_key_val, + help="Driver specific open options. See the documentation for the selected output driver for more information.", +) diff -Nru fiona-1.8.22/fiona/fio/rm.py fiona-1.9.5/fiona/fio/rm.py --- fiona-1.8.22/fiona/fio/rm.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/fio/rm.py 2023-10-11 23:19:44.000000000 +0000 @@ -7,6 +7,7 @@ logger = logging.getLogger(__name__) + @click.command(help="Remove a datasource or an individual layer.") @click.argument("input", required=True) @click.option("--layer", type=str, default=None, required=False, help="Name of layer to remove.") @@ -20,10 +21,10 @@ kind = "layer" if not yes: - click.confirm("The {} will be removed. Are you sure?".format(kind), abort=True) + click.confirm(f"The {kind} will be removed. Are you sure?", abort=True) try: fiona.remove(input, layer=layer) except Exception: - logger.exception("Failed to remove {}.".format(kind)) + logger.exception(f"Failed to remove {kind}.") raise click.Abort() diff -Nru fiona-1.8.22/fiona/gdal.pxi fiona-1.9.5/fiona/gdal.pxi --- fiona-1.8.22/fiona/gdal.pxi 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/gdal.pxi 2023-10-11 23:19:44.000000000 +0000 @@ -2,17 +2,37 @@ from libc.stdio cimport FILE +cdef extern from "gdal_version.h": + int GDAL_COMPUTE_VERSION(int maj, int min, int rev) -cdef extern from "cpl_conv.h" nogil: - void *CPLMalloc(size_t) - void CPLFree(void* ptr) - void CPLSetThreadLocalConfigOption(const char* key, const char* val) +cdef extern from "cpl_conv.h": + void * CPLMalloc (size_t) + void CPLFree (void *ptr) + void CPLSetThreadLocalConfigOption (char *key, char *val) + const char *CPLGetConfigOption (char *, char *) void CPLSetConfigOption(const char* key, const char* val) - const char* CPLGetConfigOption(const char* key, const char* default) + int CPLCheckForFile(char *, char **) const char *CPLFindFile(const char *pszClass, const char *pszBasename) +cdef extern from "cpl_string.h": + char ** CSLAddNameValue (char **list, const char *name, const char *value) + char ** CSLSetNameValue (char **list, const char *name, const char *value) + void CSLDestroy (char **list) + char ** CSLAddString(char **list, const char *string) + int CSLCount(char **papszStrList) + char **CSLDuplicate(char **papszStrList) + int CSLFindName(char **papszStrList, const char *pszName) + int CSLFetchBoolean(char **papszStrList, const char *pszName, int default) + const char *CSLFetchNameValue(char **papszStrList, const char *pszName) + char **CSLMerge(char **first, char **second) + +cdef extern from "sys/stat.h" nogil: + struct stat: + int st_mode + + cdef extern from "cpl_error.h" nogil: ctypedef enum CPLErr: @@ -34,25 +54,11 @@ void CPLPopErrorHandler() -cdef extern from "cpl_string.h" nogil: - - int CSLCount(char **papszStrList) - char **CSLAddString(char **strlist, const char *string) - char **CSLAddNameValue(char **papszStrList, const char *pszName, - const char *pszValue) - char **CSLDuplicate(char **papszStrList) - int CSLFindName(char **papszStrList, const char *pszName) - int CSLFetchBoolean(char **papszStrList, const char *pszName, int default) - const char *CSLFetchNameValue(char **papszStrList, const char *pszName) - char **CSLSetNameValue(char **list, char *name, char *val) - void CSLDestroy(char **list) - char **CSLMerge(char **first, char **second) - - cdef extern from "cpl_vsi.h" nogil: ctypedef int vsi_l_offset ctypedef FILE VSILFILE + ctypedef stat VSIStatBufL unsigned char *VSIGetMemFileBuffer(const char *path, vsi_l_offset *data_len, @@ -65,14 +71,21 @@ int VSIFFlushL(VSILFILE *fp) size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + char** VSIReadDir(const char* pszPath) int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) vsi_l_offset VSIFTellL(VSILFILE *fp) int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + int VSIMkdir(const char *path, long mode) + int VSIRmdir(const char *path) + int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) + int VSI_ISDIR(int mode) + cdef extern from "ogr_srs_api.h" nogil: + ctypedef int OGRErr ctypedef void * OGRCoordinateTransformationH ctypedef void * OGRSpatialReferenceH @@ -92,12 +105,147 @@ const char *OSRGetAuthorityCode(OGRSpatialReferenceH srs, const char *key) int OSRImportFromEPSG(OGRSpatialReferenceH srs, int code) int OSRImportFromProj4(OGRSpatialReferenceH srs, const char *proj) + int OSRImportFromWkt(OGRSpatialReferenceH srs, char **wkt) int OSRIsGeographic(OGRSpatialReferenceH srs) int OSRIsProjected(OGRSpatialReferenceH srs) int OSRIsSame(OGRSpatialReferenceH srs1, OGRSpatialReferenceH srs2) OGRSpatialReferenceH OSRNewSpatialReference(const char *wkt) void OSRRelease(OGRSpatialReferenceH srs) int OSRSetFromUserInput(OGRSpatialReferenceH srs, const char *input) + double OSRGetLinearUnits(OGRSpatialReferenceH srs, char **ppszName) + double OSRGetAngularUnits(OGRSpatialReferenceH srs, char **ppszName) + int OSREPSGTreatsAsLatLong(OGRSpatialReferenceH srs) + int OSREPSGTreatsAsNorthingEasting(OGRSpatialReferenceH srs) + OGRSpatialReferenceH *OSRFindMatches(OGRSpatialReferenceH srs, char **options, int *entries, int **matchConfidence) + void OSRFreeSRSArray(OGRSpatialReferenceH *srs) + ctypedef enum OSRAxisMappingStrategy: + OAMS_TRADITIONAL_GIS_ORDER + + const char* OSRGetName(OGRSpatialReferenceH hSRS) + void OSRSetAxisMappingStrategy(OGRSpatialReferenceH hSRS, OSRAxisMappingStrategy) + void OSRSetPROJSearchPaths(const char *const *papszPaths) + char ** OSRGetPROJSearchPaths() + OGRErr OSRExportToWktEx(OGRSpatialReferenceH, char ** ppszResult, + const char* const* papszOptions) + OGRErr OSRExportToPROJJSON(OGRSpatialReferenceH hSRS, + char ** ppszReturn, + const char* const* papszOptions) + + +cdef extern from "ogr_core.h" nogil: + + ctypedef int OGRErr + char *OGRGeometryTypeToName(int type) + + ctypedef enum OGRwkbGeometryType: + wkbUnknown + wkbPoint + wkbLineString + wkbPolygon + wkbMultiPoint + wkbMultiLineString + wkbMultiPolygon + wkbGeometryCollection + wkbCircularString + wkbCompoundCurve + wkbCurvePolygon + wkbMultiCurve + wkbMultiSurface + wkbCurve + wkbSurface + wkbPolyhedralSurface + wkbTIN + wkbTriangle + wkbNone + wkbLinearRing + wkbCircularStringZ + wkbCompoundCurveZ + wkbCurvePolygonZ + wkbMultiCurveZ + wkbMultiSurfaceZ + wkbCurveZ + wkbSurfaceZ + wkbPolyhedralSurfaceZ + wkbTINZ + wkbTriangleZ + wkbPointM + wkbLineStringM + wkbPolygonM + wkbMultiPointM + wkbMultiLineStringM + wkbMultiPolygonM + wkbGeometryCollectionM + wkbCircularStringM + wkbCompoundCurveM + wkbCurvePolygonM + wkbMultiCurveM + wkbMultiSurfaceM + wkbCurveM + wkbSurfaceM + wkbPolyhedralSurfaceM + wkbTINM + wkbTriangleM + wkbPointZM + wkbLineStringZM + wkbPolygonZM + wkbMultiPointZM + wkbMultiLineStringZM + wkbMultiPolygonZM + wkbGeometryCollectionZM + wkbCircularStringZM + wkbCompoundCurveZM + wkbCurvePolygonZM + wkbMultiCurveZM + wkbMultiSurfaceZM + wkbCurveZM + wkbSurfaceZM + wkbPolyhedralSurfaceZM + wkbTINZM + wkbTriangleZM + wkbPoint25D + wkbLineString25D + wkbPolygon25D + wkbMultiPoint25D + wkbMultiLineString25D + wkbMultiPolygon25D + wkbGeometryCollection25D + + ctypedef enum OGRFieldType: + OFTInteger + OFTIntegerList + OFTReal + OFTRealList + OFTString + OFTStringList + OFTWideString + OFTWideStringList + OFTBinary + OFTDate + OFTTime + OFTDateTime + OFTInteger64 + OFTInteger64List + OFTMaxType + + ctypedef int OGRFieldSubType + cdef int OFSTNone = 0 + cdef int OFSTBoolean = 1 + cdef int OFSTInt16 = 2 + cdef int OFSTFloat32 = 3 + cdef int OFSTMaxSubType = 3 + + ctypedef struct OGREnvelope: + double MinX + double MaxX + double MinY + double MaxY + + char * OGRGeometryTypeToName(int) + + + char * ODsCCreateLayer = "CreateLayer" + char * ODsCDeleteLayer = "DeleteLayer" + char * ODsCTransactions = "Transactions" cdef extern from "gdal.h" nogil: @@ -264,6 +412,40 @@ char** GDALGetFileList(GDALDatasetH hDS) CPLErr GDALCopyDatasetFiles (GDALDriverH hDriver, const char * pszNewName, const char * pszOldName) + void * GDALOpenEx(const char * pszFilename, + unsigned int nOpenFlags, + const char *const *papszAllowedDrivers, + const char *const *papszOpenOptions, + const char *const *papszSiblingFiles + ) + int GDAL_OF_UPDATE + int GDAL_OF_READONLY + int GDAL_OF_VECTOR + int GDAL_OF_VERBOSE_ERROR + int GDALDatasetGetLayerCount(void * hds) + void * GDALDatasetGetLayer(void * hDS, int iLayer) + void * GDALDatasetGetLayerByName(void * hDS, char * pszName) + void GDALClose(void * hDS) + void * GDALCreate(void * hDriver, + const char * pszFilename, + int nXSize, + int nYSize, + int nBands, + GDALDataType eBandType, + char ** papszOptions) + void * GDALDatasetCreateLayer(void * hDS, + const char * pszName, + void * hSpatialRef, + int eType, + char ** papszOptions) + int GDALDatasetDeleteLayer(void * hDS, int iLayer) + void GDALFlushCache(void * hDS) + char * GDALGetDriverShortName(void * hDriver) + OGRErr GDALDatasetStartTransaction (void * hDataset, int bForce) + OGRErr GDALDatasetCommitTransaction (void * hDataset) + OGRErr GDALDatasetRollbackTransaction (void * hDataset) + int GDALDatasetTestCapability (void * hDataset, char *) + cdef extern from "ogr_api.h" nogil: @@ -275,8 +457,6 @@ ctypedef void * OGRFeatureH ctypedef void * OGRGeometryH - ctypedef int OGRErr - ctypedef struct OGREnvelope: double MinX double MaxX @@ -314,6 +494,7 @@ double OGR_F_GetFieldAsDouble(OGRFeatureH feature, int n) int OGR_F_GetFieldAsInteger(OGRFeatureH feature, int n) const char *OGR_F_GetFieldAsString(OGRFeatureH feature, int n) + char **OGR_F_GetFieldAsStringList( OGRFeatureH feature, int n) int OGR_F_GetFieldCount(OGRFeatureH feature) OGRFieldDefnH OGR_F_GetFieldDefnRef(OGRFeatureH feature, int n) int OGR_F_GetFieldIndex(OGRFeatureH feature, const char *name) @@ -340,6 +521,7 @@ void OGR_Fld_SetPrecision(OGRFieldDefnH, int n) void OGR_Fld_SetWidth(OGRFieldDefnH, int n) OGRErr OGR_G_AddGeometryDirectly(OGRGeometryH geometry, OGRGeometryH part) + OGRErr OGR_G_RemoveGeometry(OGRGeometryH geometry, int i, int delete) void OGR_G_AddPoint(OGRGeometryH geometry, double x, double y, double z) void OGR_G_AddPoint_2D(OGRGeometryH geometry, double x, double y) void OGR_G_CloseRings(OGRGeometryH geometry) @@ -357,7 +539,7 @@ double OGR_G_GetX(OGRGeometryH geometry, int n) double OGR_G_GetY(OGRGeometryH geometry, int n) double OGR_G_GetZ(OGRGeometryH geometry, int n) - void OGR_G_ImportFromWkb(OGRGeometryH geometry, unsigned char *bytes, + OGRErr OGR_G_ImportFromWkb(OGRGeometryH geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize(OGRGeometryH geometry) OGRErr OGR_L_CreateFeature(OGRLayerH layer, OGRFeatureH feature) @@ -380,6 +562,46 @@ OGRDataSourceH OGROpen(const char *path, int mode, void *x) OGRDataSourceH OGROpenShared(const char *path, int mode, void *x) int OGRReleaseDataSource(OGRDataSourceH datasource) + const char * OGR_Dr_GetName (void *driver) + int OGR_Dr_TestCapability (void *driver, const char *) + void * OGR_F_Create (void *featuredefn) + void OGR_F_Destroy (void *feature) + long OGR_F_GetFID (void *feature) + int OGR_F_IsFieldSet (void *feature, int n) + int OGR_F_GetFieldAsDateTimeEx (void *feature, int n, int *y, int *m, int *d, int *h, int *m, float *s, int *z) + double OGR_F_GetFieldAsDouble (void *feature, int n) + int OGR_F_GetFieldAsInteger (void *feature, int n) + char * OGR_F_GetFieldAsString (void *feature, int n) + unsigned char * OGR_F_GetFieldAsBinary(void *feature, int n, int *s) + int OGR_F_GetFieldCount (void *feature) + void * OGR_F_GetFieldDefnRef (void *feature, int n) + int OGR_F_GetFieldIndex (void *feature, char *name) + void * OGR_F_GetGeometryRef (void *feature) + void * OGR_F_StealGeometry (void *feature) + void OGR_F_SetFieldDateTimeEx (void *feature, int n, int y, int m, int d, int hh, int mm, float ss, int tz) + void OGR_F_SetFieldDouble (void *feature, int n, double value) + void OGR_F_SetFieldInteger (void *feature, int n, int value) + void OGR_F_SetFieldString (void *feature, int n, char *value) + void OGR_F_SetFieldBinary (void *feature, int n, int l, unsigned char *value) + void OGR_F_SetFieldNull (void *feature, int n) # new in GDAL 2.2 + int OGR_F_SetGeometryDirectly (void *feature, void *geometry) + void * OGR_FD_Create (char *name) + int OGR_FD_GetFieldCount (void *featuredefn) + void * OGR_FD_GetFieldDefn (void *featuredefn, int n) + int OGR_FD_GetGeomType (void *featuredefn) + char * OGR_FD_GetName (void *featuredefn) + OGRFieldSubType OGR_Fld_GetSubType(void *fielddefn) + void OGR_Fld_SetSubType(void *fielddefn, OGRFieldSubType subtype) + void * OGR_G_ForceToMultiPolygon (void *geometry) + void * OGR_G_ForceToPolygon (void *geometry) + void * OGR_G_Clone(void *geometry) + void * OGR_G_GetLinearGeometry (void *hGeom, double dfMaxAngleStepSizeDegrees, char **papszOptions) + OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) + OGRErr OGR_L_SetAttributeFilter(void *layer, const char*) + OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) + long long OGR_F_GetFieldAsInteger64 (void *feature, int n) + void OGR_F_SetFieldInteger64 (void *feature, int n, long long value) + int OGR_F_IsFieldNull(void *feature, int n) cdef extern from "gdalwarper.h" nogil: @@ -517,8 +739,3 @@ GDALDatasetH hSrcDS, GDALTransformerFunc pfnRawTransformer, void * pTransformArg, double * padfGeoTransformOut, int * pnPixels, int * pnLines, double * padfExtent, int nOptions) - - -cdef extern from "ogr_core.h" nogil: - - char *OGRGeometryTypeToName(int type) diff -Nru fiona-1.8.22/fiona/inspector.py fiona-1.9.5/fiona/inspector.py --- fiona-1.8.22/fiona/inspector.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/inspector.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,4 +1,3 @@ - import code import logging import sys @@ -11,31 +10,27 @@ def main(srcfile): - + """Open a dataset in an iteractive session.""" with fiona.drivers(): with fiona.open(srcfile) as src: - code.interact( 'Fiona %s Interactive Inspector (Python %s)\n' 'Type "src.schema", "next(src)", or "help(src)" ' - 'for more information.' % ( - fiona.__version__, '.'.join(map(str, sys.version_info[:3]))), - local=locals()) + "for more information." + % (fiona.__version__, ".".join(map(str, sys.version_info[:3]))), + local=locals(), + ) return 1 + if __name__ == '__main__': - import argparse parser = argparse.ArgumentParser( prog="python -m fiona.inspector", - description="Open a data file and drop into an interactive interpreter") - parser.add_argument( - 'src', - metavar='FILE', - help="Input dataset file name") + description="Open a data file and drop into an interactive interpreter", + ) + parser.add_argument("src", metavar="FILE", help="Input dataset file name") args = parser.parse_args() - main(args.src) - diff -Nru fiona-1.8.22/fiona/io.py fiona-1.9.5/fiona/io.py --- fiona-1.8.22/fiona/io.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/io.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,14 +1,12 @@ """Classes capable of reading and writing collections """ -from collections import OrderedDict import logging -import fiona._loading -with fiona._loading.add_gdal_dll_directories(): - from fiona.ogrext import MemoryFileBase - from fiona.collection import Collection - +from fiona.ogrext import MemoryFileBase, _listdir, _listlayers +from fiona.collection import Collection +from fiona.meta import supports_vsi +from fiona.errors import DriverError log = logging.getLogger(__name__) @@ -22,19 +20,38 @@ MemoryFile created without initial bytes may be written to using either file-like or dataset interfaces. - Examples - -------- + Parameters + ---------- + file_or_bytes : an open Python file, bytes, or None + If not None, the MemoryFile becomes immutable and read-only. + If None, it is write-only. + filename : str + An optional filename. The default is a UUID-based name. + ext : str + An optional file extension. Some format drivers require a + specific value. """ def __init__(self, file_or_bytes=None, filename=None, ext=""): if ext and not ext.startswith("."): ext = "." + ext - super(MemoryFile, self).__init__( + super().__init__( file_or_bytes=file_or_bytes, filename=filename, ext=ext) - def open(self, driver=None, schema=None, crs=None, encoding=None, - layer=None, vfs=None, enabled_drivers=None, crs_wkt=None, - **kwargs): + def open( + self, + mode=None, + driver=None, + schema=None, + crs=None, + encoding=None, + layer=None, + vfs=None, + enabled_drivers=None, + crs_wkt=None, + allow_unsupported_drivers=False, + **kwargs + ): """Open the file and return a Fiona collection object. If data has already been written, the file is opened in 'r' @@ -50,35 +67,84 @@ parameters of `fiona.open()`. """ if self.closed: - raise IOError("I/O operation on closed file.") + raise OSError("I/O operation on closed file.") + + if ( + not allow_unsupported_drivers + and driver is not None + and not supports_vsi(driver) + ): + raise DriverError("Driver {} does not support virtual files.") + + if mode in ('r', 'a') and not self.exists(): + raise OSError("MemoryFile does not exist.") + if layer is None and mode == 'w' and self.exists(): + raise OSError("MemoryFile already exists.") + + if not self.exists() or mode == 'w': + if driver is not None: + self._ensure_extension(driver) + mode = 'w' + elif mode is None: + mode = 'r' + + return Collection( + self.name, + mode, + crs=crs, + driver=driver, + schema=schema, + encoding=encoding, + layer=layer, + enabled_drivers=enabled_drivers, + allow_unsupported_drivers=allow_unsupported_drivers, + crs_wkt=crs_wkt, + **kwargs + ) + + def listdir(self, path=None): + """List files in a directory. + + Parameters + ---------- + path : URI (str or pathlib.Path) + A dataset resource identifier. + + Returns + ------- + list + A list of filename strings. + + """ + if self.closed: + raise OSError("I/O operation on closed file.") + if path: + vsi_path = "{}/{}".format(self.name, path.lstrip("/")) + else: + vsi_path = f"{self.name}" + return _listdir(vsi_path) + + def listlayers(self, path=None): + """List layer names in their index order + + Parameters + ---------- + path : URI (str or pathlib.Path) + A dataset resource identifier. + + Returns + ------- + list + A list of layer name strings. - if not self.exists(): - self._ensure_extension(driver) - this_schema = schema.copy() - this_schema["properties"] = OrderedDict(schema["properties"]) - return Collection( - self.name, - "w", - crs=crs, - driver=driver, - schema=this_schema, - encoding=encoding, - layer=layer, - enabled_drivers=enabled_drivers, - crs_wkt=crs_wkt, - **kwargs - ) - - elif self.mode in ("r", "r+"): - return Collection( - self.name, - "r", - driver=driver, - encoding=encoding, - layer=layer, - enabled_drivers=enabled_drivers, - **kwargs - ) + """ + if self.closed: + raise OSError("I/O operation on closed file.") + if path: + vsi_path = "{}/{}".format(self.name, path.lstrip("/")) + else: + vsi_path = f"{self.name}" + return _listlayers(vsi_path) def __enter__(self): return self @@ -92,13 +158,33 @@ This allows a zip file containing formatted files to be read without I/O. - """ - def __init__(self, file_or_bytes=None): - super(ZipMemoryFile, self).__init__(file_or_bytes, ext=".zip") + Parameters + ---------- + file_or_bytes : an open Python file, bytes, or None + If not None, the MemoryFile becomes immutable and read-only. If + None, it is write-only. + filename : str + An optional filename. The default is a UUID-based name. + ext : str + An optional file extension. Some format drivers require a + specific value. The default is ".zip". + """ - def open(self, path=None, driver=None, encoding=None, layer=None, - enabled_drivers=None, **kwargs): + def __init__(self, file_or_bytes=None, filename=None, ext=".zip"): + super().__init__(file_or_bytes, filename=filename, ext=ext) + self.name = f"/vsizip{self.name}" + + def open( + self, + path=None, + driver=None, + encoding=None, + layer=None, + enabled_drivers=None, + allow_unsupported_drivers=False, + **kwargs + ): """Open a dataset within the zipped stream. Parameters @@ -112,13 +198,25 @@ A Fiona collection object """ - if self.closed: - raise IOError("I/O operation on closed file.") if path: vsi_path = '/vsizip{0}/{1}'.format(self.name, path.lstrip('/')) else: vsi_path = '/vsizip{0}'.format(self.name) - return Collection(vsi_path, 'r', driver=driver, encoding=encoding, - layer=layer, enabled_drivers=enabled_drivers, - **kwargs) + if self.closed: + raise OSError("I/O operation on closed file.") + if path: + vsi_path = "{}/{}".format(self.name, path.lstrip("/")) + else: + vsi_path = f"{self.name}" + + return Collection( + vsi_path, + "r", + driver=driver, + encoding=encoding, + layer=layer, + enabled_drivers=enabled_drivers, + allow_unsupported_drivers=allow_unsupported_drivers, + **kwargs + ) diff -Nru fiona-1.8.22/fiona/logutils.py fiona-1.9.5/fiona/logutils.py --- fiona-1.8.22/fiona/logutils.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/logutils.py 2023-10-11 23:19:44.000000000 +0000 @@ -9,8 +9,8 @@ At most, one message per field skipped per loop will be passed. """ - def __init__(self, name=""): - super(FieldSkipLogFilter, self).__init__(name) + def __init__(self, name=''): + super().__init__(name) self.seen_msgs = set() def filter(self, record): @@ -24,7 +24,7 @@ return 1 -class LogFiltering(object): +class LogFiltering: def __init__(self, logger, filter): self.logger = logger self.filter = filter diff -Nru fiona-1.8.22/fiona/meta.py fiona-1.9.5/fiona/meta.py --- fiona-1.8.22/fiona/meta.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/fiona/meta.py 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,272 @@ +import logging +import xml.etree.ElementTree as ET + +from fiona.env import require_gdal_version +from fiona.ogrext import _get_metadata_item + +log = logging.getLogger(__name__) + + +class MetadataItem: + # since GDAL 2.0 + CREATION_FIELD_DATA_TYPES = "DMD_CREATIONFIELDDATATYPES" + # since GDAL 2.3 + CREATION_FIELD_DATA_SUB_TYPES = "DMD_CREATIONFIELDDATASUBTYPES" + CREATION_OPTION_LIST = "DMD_CREATIONOPTIONLIST" + LAYER_CREATION_OPTION_LIST = "DS_LAYER_CREATIONOPTIONLIST" + # since GDAL 2.0 + DATASET_OPEN_OPTIONS = "DMD_OPENOPTIONLIST" + # since GDAL 2.0 + EXTENSIONS = "DMD_EXTENSIONS" + EXTENSION = "DMD_EXTENSION" + VIRTUAL_IO = "DCAP_VIRTUALIO" + # since GDAL 2.0 + NOT_NULL_FIELDS = "DCAP_NOTNULL_FIELDS" + # since gdal 2.3 + NOT_NULL_GEOMETRY_FIELDS = "DCAP_NOTNULL_GEOMFIELDS" + # since GDAL 3.2 + UNIQUE_FIELDS = "DCAP_UNIQUE_FIELDS" + # since GDAL 2.0 + DEFAULT_FIELDS = "DCAP_DEFAULT_FIELDS" + OPEN = "DCAP_OPEN" + CREATE = "DCAP_CREATE" + + +def _parse_options(xml): + """Convert metadata xml to dict""" + options = {} + if len(xml) > 0: + + root = ET.fromstring(xml) + for option in root.iter('Option'): + + option_name = option.attrib['name'] + opt = {} + opt.update((k, v) for k, v in option.attrib.items() if not k == 'name') + + values = [] + for value in option.iter('Value'): + values.append(value.text) + if len(values) > 0: + opt['values'] = values + + options[option_name] = opt + + return options + + +@require_gdal_version('2.0') +def dataset_creation_options(driver): + """ Returns dataset creation options for driver + + Parameters + ---------- + driver : str + + Returns + ------- + dict + Dataset creation options + + """ + + xml = _get_metadata_item(driver, MetadataItem.CREATION_OPTION_LIST) + + if xml is None: + return {} + + if len(xml) == 0: + return {} + + return _parse_options(xml) + + +@require_gdal_version('2.0') +def layer_creation_options(driver): + """ Returns layer creation options for driver + + Parameters + ---------- + driver : str + + Returns + ------- + dict + Layer creation options + + """ + xml = _get_metadata_item(driver, MetadataItem.LAYER_CREATION_OPTION_LIST) + + if xml is None: + return {} + + if len(xml) == 0: + return {} + + return _parse_options(xml) + + +@require_gdal_version('2.0') +def dataset_open_options(driver): + """ Returns dataset open options for driver + + Parameters + ---------- + driver : str + + Returns + ------- + dict + Dataset open options + + """ + xml = _get_metadata_item(driver, MetadataItem.DATASET_OPEN_OPTIONS) + + if xml is None: + return {} + + if len(xml) == 0: + return {} + + return _parse_options(xml) + + +@require_gdal_version('2.0') +def print_driver_options(driver): + """ Print driver options for dataset open, dataset creation, and layer creation. + + Parameters + ---------- + driver : str + + """ + + for option_type, options in [("Dataset Open Options", dataset_open_options(driver)), + ("Dataset Creation Options", dataset_creation_options(driver)), + ("Layer Creation Options", layer_creation_options(driver))]: + + print(f"{option_type}:") + if len(options) == 0: + print("\tNo options available.") + else: + for option_name in options: + print(f"\t{option_name}:") + if 'description' in options[option_name]: + print("\t\tDescription: {description}".format(description=options[option_name]['description'])) + if 'type' in options[option_name]: + print("\t\tType: {type}".format(type=options[option_name]['type'])) + if 'values' in options[option_name] and len(options[option_name]['values']) > 0: + print("\t\tAccepted values: {values}".format(values=",".join(options[option_name]['values']))) + for attr_text, attribute in [('Default value', 'default'), + ('Required', 'required'), + ('Alias', 'aliasOf'), + ('Min', 'min'), + ('Max', 'max'), + ('Max size', 'maxsize'), + ('Scope', 'scope'), + ('Alternative configuration option', 'alt_config_option')]: + if attribute in options[option_name]: + print("\t\t{attr_text}: {attribute}".format(attr_text=attr_text, + attribute=options[option_name][attribute])) + print("") + + +@require_gdal_version('2.0') +def extensions(driver): + """ Returns file extensions supported by driver + + Parameters + ---------- + driver : str + + Returns + ------- + list + List with file extensions or None if not specified by driver + + """ + + exts = _get_metadata_item(driver, MetadataItem.EXTENSIONS) + + if exts is None: + return None + + return [ext for ext in exts.split(" ") if len(ext) > 0] + + +def extension(driver): + """ Returns file extension of driver + + Parameters + ---------- + driver : str + + Returns + ------- + str + File extensions or None if not specified by driver + + """ + + return _get_metadata_item(driver, MetadataItem.EXTENSION) + + +@require_gdal_version('2.0') +def supports_vsi(driver): + """ Returns True if driver supports GDAL's VSI*L API + + Parameters + ---------- + driver : str + + Returns + ------- + bool + + """ + virutal_io = _get_metadata_item(driver, MetadataItem.VIRTUAL_IO) + return virutal_io is not None and virutal_io.upper() == "YES" + + +@require_gdal_version('2.0') +def supported_field_types(driver): + """ Returns supported field types + + Parameters + ---------- + driver : str + + Returns + ------- + list + List with supported field types or None if not specified by driver + + """ + field_types_str = _get_metadata_item(driver, MetadataItem.CREATION_FIELD_DATA_TYPES) + + if field_types_str is None: + return None + + return [field_type for field_type in field_types_str.split(" ") if len(field_type) > 0] + + +@require_gdal_version('2.3') +def supported_sub_field_types(driver): + """ Returns supported sub field types + + Parameters + ---------- + driver : str + + Returns + ------- + list + List with supported field types or None if not specified by driver + + """ + field_types_str = _get_metadata_item(driver, MetadataItem.CREATION_FIELD_DATA_SUB_TYPES) + + if field_types_str is None: + return None + + return [field_type for field_type in field_types_str.split(" ") if len(field_type) > 0] diff -Nru fiona-1.8.22/fiona/model.py fiona-1.9.5/fiona/model.py --- fiona-1.8.22/fiona/model.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/fiona/model.py 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,437 @@ +"""Fiona data model""" + +from binascii import hexlify +from collections.abc import MutableMapping +from collections import OrderedDict +from enum import Enum +import itertools +from json import JSONEncoder +from warnings import warn + +from fiona.errors import FionaDeprecationWarning + + +class OGRGeometryType(Enum): + Unknown = 0 + Point = 1 + LineString = 2 + Polygon = 3 + MultiPoint = 4 + MultiLineString = 5 + MultiPolygon = 6 + GeometryCollection = 7 + CircularString = 8 + CompoundCurve = 9 + CurvePolygon = 10 + MultiCurve = 11 + MultiSurface = 12 + Curve = 13 + Surface = 14 + PolyhedralSurface = 15 + TIN = 16 + Triangle = 17 + NONE = 100 + LinearRing = 101 + CircularStringZ = 1008 + CompoundCurveZ = 1009 + CurvePolygonZ = 1010 + MultiCurveZ = 1011 + MultiSurfaceZ = 1012 + CurveZ = 1013 + SurfaceZ = 1014 + PolyhedralSurfaceZ = 1015 + TINZ = 1016 + TriangleZ = 1017 + PointM = 2001 + LineStringM = 2002 + PolygonM = 2003 + MultiPointM = 2004 + MultiLineStringM = 2005 + MultiPolygonM = 2006 + GeometryCollectionM = 2007 + CircularStringM = 2008 + CompoundCurveM = 2009 + CurvePolygonM = 2010 + MultiCurveM = 2011 + MultiSurfaceM = 2012 + CurveM = 2013 + SurfaceM = 2014 + PolyhedralSurfaceM = 2015 + TINM = 2016 + TriangleM = 2017 + PointZM = 3001 + LineStringZM = 3002 + PolygonZM = 3003 + MultiPointZM = 3004 + MultiLineStringZM = 3005 + MultiPolygonZM = 3006 + GeometryCollectionZM = 3007 + CircularStringZM = 3008 + CompoundCurveZM = 3009 + CurvePolygonZM = 3010 + MultiCurveZM = 3011 + MultiSurfaceZM = 3012 + CurveZM = 3013 + SurfaceZM = 3014 + PolyhedralSurfaceZM = 3015 + TINZM = 3016 + TriangleZM = 3017 + Point25D = 0x80000001 + LineString25D = 0x80000002 + Polygon25D = 0x80000003 + MultiPoint25D = 0x80000004 + MultiLineString25D = 0x80000005 + MultiPolygon25D = 0x80000006 + GeometryCollection25D = 0x80000007 + + +# Mapping of OGR integer geometry types to GeoJSON type names. +_GEO_TYPES = { + OGRGeometryType.Unknown.value: "Unknown", + OGRGeometryType.Point.value: "Point", + OGRGeometryType.LineString.value: "LineString", + OGRGeometryType.Polygon.value: "Polygon", + OGRGeometryType.MultiPoint.value: "MultiPoint", + OGRGeometryType.MultiLineString.value: "MultiLineString", + OGRGeometryType.MultiPolygon.value: "MultiPolygon", + OGRGeometryType.GeometryCollection.value: "GeometryCollection" +} + +GEOMETRY_TYPES = { + **_GEO_TYPES, + OGRGeometryType.NONE.value: "None", + OGRGeometryType.LinearRing.value: "LinearRing", + OGRGeometryType.Point25D.value: "3D Point", + OGRGeometryType.LineString25D.value: "3D LineString", + OGRGeometryType.Polygon25D.value: "3D Polygon", + OGRGeometryType.MultiPoint25D.value: "3D MultiPoint", + OGRGeometryType.MultiLineString25D.value: "3D MultiLineString", + OGRGeometryType.MultiPolygon25D.value: "3D MultiPolygon", + OGRGeometryType.GeometryCollection25D.value: "3D GeometryCollection", +} + + +class Object(MutableMapping): + """Base class for CRS, geometry, and feature objects + + In Fiona 2.0, the implementation of those objects will change. They + will no longer be dicts or derive from dict, and will lose some + features like mutability and default JSON serialization. + + Object will be used for these objects in Fiona 1.9. This class warns + about future deprecation of features. + """ + + _delegated_properties = [] + + def __init__(self, **kwds): + self._data = OrderedDict(**kwds) + + def _props(self): + return { + k: getattr(self._delegate, k) + for k in self._delegated_properties + if k is not None # getattr(self._delegate, k) is not None + } + + def __getitem__(self, item): + props = self._props() + props.update(**self._data) + return props[item] + + def __iter__(self): + props = self._props() + return itertools.chain(iter(props), iter(self._data)) + + def __len__(self): + props = self._props() + return len(props) + len(self._data) + + def __setitem__(self, key, value): + warn( + "instances of this class -- CRS, geometry, and feature objects -- will become immutable in fiona version 2.0", + FionaDeprecationWarning, + stacklevel=2, + ) + if key in self._delegated_properties: + setattr(self._delegate, key, value) + else: + self._data[key] = value + + def __delitem__(self, key): + warn( + "instances of this class -- CRS, geometry, and feature objects -- will become immutable in fiona version 2.0", + FionaDeprecationWarning, + stacklevel=2, + ) + if key in self._delegated_properties: + setattr(self._delegate, key, None) + else: + del self._data[key] + + def __eq__(self, other): + return dict(**self) == dict(**other) + + +class _Geometry: + def __init__(self, coordinates=None, type=None, geometries=None): + self.coordinates = coordinates + self.type = type + self.geometries = geometries + + +class Geometry(Object): + """A GeoJSON-like geometry + + Notes + ----- + Delegates coordinates and type properties to an instance of + _Geometry, which will become an extension class in Fiona 2.0. + + """ + + _delegated_properties = ["coordinates", "type", "geometries"] + + def __init__(self, coordinates=None, type=None, geometries=None, **data): + self._delegate = _Geometry( + coordinates=coordinates, type=type, geometries=geometries + ) + super().__init__(**data) + + @classmethod + def from_dict(cls, ob=None, **kwargs): + if ob is not None: + data = dict(getattr(ob, "__geo_interface__", ob)) + data.update(kwargs) + else: + data = kwargs + + if "geometries" in data and data["type"] == "GeometryCollection": + _ = data.pop("coordinates", None) + _ = data.pop("type", None) + return Geometry( + type="GeometryCollection", + geometries=[ + Geometry.from_dict(part) for part in data.pop("geometries") + ], + **data + ) + else: + _ = data.pop("geometries", None) + return Geometry( + type=data.pop("type", None), + coordinates=data.pop("coordinates", []), + **data + ) + + @property + def coordinates(self): + """The geometry's coordinates + + Returns + ------- + Sequence + + """ + return self._delegate.coordinates + + @property + def type(self): + """The geometry's type + + Returns + ------- + str + + """ + return self._delegate.type + + @property + def geometries(self): + """A collection's geometries. + + Returns + ------- + list + + """ + return self._delegate.geometries + + @property + def __geo_interface__(self): + return ObjectEncoder().default(self) + + +class _Feature: + def __init__(self, geometry=None, id=None, properties=None): + self.geometry = geometry + self.id = id + self.properties = properties + + +class Feature(Object): + """A GeoJSON-like feature + + Notes + ----- + Delegates geometry and properties to an instance of _Feature, which + will become an extension class in Fiona 2.0. + + """ + + _delegated_properties = ["geometry", "id", "properties"] + + def __init__(self, geometry=None, id=None, properties=None, **data): + if properties is None: + properties = Properties() + self._delegate = _Feature(geometry=geometry, id=id, properties=properties) + super().__init__(**data) + + @classmethod + def from_dict(cls, ob=None, **kwargs): + if ob is not None: + data = dict(getattr(ob, "__geo_interface__", ob)) + data.update(kwargs) + else: + data = kwargs + geom_data = data.pop("geometry", None) + + if isinstance(geom_data, Geometry): + geom = geom_data + else: + geom = Geometry.from_dict(geom_data) if geom_data is not None else None + + props_data = data.pop("properties", None) + + if isinstance(props_data, Properties): + props = props_data + else: + props = Properties(**props_data) if props_data is not None else None + + fid = data.pop("id", None) + return Feature(geometry=geom, id=fid, properties=props, **data) + + def __eq__(self, other): + return ( + self.geometry == other.geometry + and self.id == other.id + and self.properties == other.properties + ) + + @property + def geometry(self): + """The feature's geometry object + + Returns + ------- + Geometry + + """ + return self._delegate.geometry + + @property + def id(self): + """The feature's id + + Returns + ------ + object + + """ + return self._delegate.id + + @property + def properties(self): + """The feature's properties + + Returns + ------- + object + + """ + return self._delegate.properties + + @property + def type(self): + """The Feature's type + + Returns + ------- + str + + """ + return "Feature" + + @property + def __geo_interface__(self): + return ObjectEncoder().default(self) + + +class Properties(Object): + """A GeoJSON-like feature's properties""" + + def __init__(self, **kwds): + super().__init__(**kwds) + + @classmethod + def from_dict(cls, mapping=None, **kwargs): + if mapping: + return Properties(**mapping, **kwargs) + return Properties(**kwargs) + + +class ObjectEncoder(JSONEncoder): + """Encodes Geometry, Feature, and Properties.""" + + def default(self, o): + if isinstance(o, Object): + o_dict = {k: self.default(v) for k, v in o.items()} + if isinstance(o, Geometry): + if o.type == "GeometryCollection": + _ = o_dict.pop("coordinates", None) + else: + _ = o_dict.pop("geometries", None) + elif isinstance(o, Feature): + o_dict["type"] = "Feature" + return o_dict + elif isinstance(o, bytes): + return hexlify(o) + else: + return o + + +def decode_object(obj): + """A json.loads object_hook + + Parameters + ---------- + obj : dict + A decoded dict. + + Returns + ------- + Feature, Geometry, or dict + + """ + if isinstance(obj, Object): + return obj + else: + obj = obj.get("__geo_interface__", obj) + + _type = obj.get("type", None) + if (_type == "Feature") or "geometry" in obj: + return Feature.from_dict(obj) + elif _type in _GEO_TYPES.values(): + return Geometry.from_dict(obj) + else: + return obj + + +def to_dict(val): + """Converts an object to a dict""" + try: + obj = ObjectEncoder().default(val) + except TypeError: + return val + else: + return obj diff -Nru fiona-1.8.22/fiona/ogrext.pyx fiona-1.9.5/fiona/ogrext.pyx --- fiona-1.8.22/fiona/ogrext.pyx 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/ogrext.pyx 2023-10-11 23:19:44.000000000 +0000 @@ -1,7 +1,8 @@ # These are extension functions and classes using the OGR C API. - from __future__ import absolute_import +include "gdal.pxi" + import datetime import json import locale @@ -9,13 +10,11 @@ import os import warnings import math +from collections import namedtuple, OrderedDict +from typing import List from uuid import uuid4 -from collections import namedtuple - -from six import integer_types, string_types, text_type - -from fiona._shim cimport * +from fiona.crs cimport CRS, osr_set_traditional_axis_mapping_strategy from fiona._geometry cimport ( GeomBuilder, OGRGeomBuilder, geometry_type_code, normalize_geometry_type_code, base_geometry_type_code) @@ -23,21 +22,22 @@ import fiona from fiona._env import get_gdal_version_num, calc_gdal_version_num, get_gdal_version_tuple -from fiona._err import cpl_errs, FionaNullPointerError, CPLE_BaseError, CPLE_OpenFailedError +from fiona._err import ( + cpl_errs, FionaNullPointerError, CPLE_BaseError, CPLE_AppDefinedError, + CPLE_OpenFailedError) from fiona._geometry import GEOMETRY_TYPES from fiona import compat +from fiona.compat import strencode from fiona.env import Env from fiona.errors import ( DriverError, DriverIOError, SchemaError, CRSError, FionaValueError, TransactionError, GeometryTypeValidationError, DatasetDeleteError, - FeatureWarning, FionaDeprecationWarning) -from fiona.compat import OrderedDict + AttributeFilterError, FeatureWarning, FionaDeprecationWarning, UnsupportedGeometryTypeError) +from fiona.model import decode_object, Feature, Geometry, Properties +from fiona.path import vsi_path from fiona.rfc3339 import parse_date, parse_datetime, parse_time from fiona.rfc3339 import FionaDateType, FionaDateTimeType, FionaTimeType from fiona.schema import FIELD_TYPES, FIELD_TYPES_MAP, normalize_field_type -from fiona.path import vsi_path - -from fiona._shim cimport is_field_null, osr_get_name, osr_set_traditional_axis_mapping_strategy from libc.stdlib cimport malloc, free from libc.string cimport strcmp @@ -45,17 +45,6 @@ from fiona.drvsupport import _driver_supports_timezones -cdef extern from "ogr_api.h" nogil: - - ctypedef void * OGRLayerH - ctypedef void * OGRDataSourceH - ctypedef void * OGRSFDriverH - ctypedef void * OGRFieldDefnH - ctypedef void * OGRFeatureDefnH - ctypedef void * OGRFeatureH - ctypedef void * OGRGeometryH - - log = logging.getLogger(__name__) DEFAULT_TRANSACTION_SIZE = 20000 @@ -95,6 +84,104 @@ OGRERR_INVALID_HANDLE = 8 +cdef bint is_field_null(void *feature, int n): + if OGR_F_IsFieldNull(feature, n): + return True + elif not OGR_F_IsFieldSet(feature, n): + return True + else: + return False + + +cdef void gdal_flush_cache(void *cogr_ds): + with cpl_errs: + GDALFlushCache(cogr_ds) + + +cdef void* gdal_open_vector(char* path_c, int mode, drivers, options) except NULL: + cdef void* cogr_ds = NULL + cdef char **drvs = NULL + cdef void* drv = NULL + cdef char **open_opts = NULL + + flags = GDAL_OF_VECTOR | GDAL_OF_VERBOSE_ERROR + if mode == 1: + flags |= GDAL_OF_UPDATE + else: + flags |= GDAL_OF_READONLY + + if drivers: + for name in drivers: + name_b = name.encode() + name_c = name_b + drv = GDALGetDriverByName(name_c) + if drv != NULL: + drvs = CSLAddString(drvs, name_c) + + for k, v in options.items(): + + if v is not None: + kb = k.upper().encode('utf-8') + + if isinstance(v, bool): + vb = ('ON' if v else 'OFF').encode('utf-8') + else: + vb = str(v).encode('utf-8') + + open_opts = CSLAddNameValue(open_opts, kb, vb) + + open_opts = CSLAddNameValue(open_opts, "VALIDATE_OPEN_OPTIONS", "NO") + + try: + cogr_ds = exc_wrap_pointer( + GDALOpenEx(path_c, flags, drvs, open_opts, NULL) + ) + return cogr_ds + except FionaNullPointerError: + raise DriverError("Failed to open dataset (mode={}): {}".format(mode, path_c.decode("utf-8"))) + except CPLE_BaseError as exc: + raise DriverError(str(exc)) + finally: + CSLDestroy(drvs) + CSLDestroy(open_opts) + + +cdef void* gdal_create(void* cogr_driver, const char *path_c, options) except NULL: + cdef char **creation_opts = NULL + cdef void *cogr_ds = NULL + + db = GDALGetDriverShortName(cogr_driver) + + # To avoid a circular import. + from fiona import meta + + option_keys = set(key.upper() for key in options.keys()) + creation_option_keys = option_keys & set(meta.dataset_creation_options(db.decode("utf-8"))) + + for k, v in options.items(): + + if k.upper() in creation_option_keys: + + kb = k.upper().encode('utf-8') + + if isinstance(v, bool): + vb = ('ON' if v else 'OFF').encode('utf-8') + else: + vb = str(v).encode('utf-8') + + creation_opts = CSLAddNameValue(creation_opts, kb, vb) + + try: + return exc_wrap_pointer(GDALCreate(cogr_driver, path_c, 0, 0, 0, GDT_Unknown, creation_opts)) + except FionaNullPointerError: + raise DriverError("Failed to create dataset: {}".format(path_c.decode("utf-8"))) + except CPLE_BaseError as exc: + raise DriverError(str(exc)) + finally: + CSLDestroy(creation_opts) + + + def _explode(coords): """Explode a GeoJSON geometry's coordinates object and yield coordinate tuples. As long as the input is conforming, the type of @@ -164,11 +251,13 @@ cdef void *fdefn = NULL cdef int i cdef unsigned char *data = NULL + cdef char **string_list = NULL + cdef int string_list_index = 0 cdef int l cdef int retval cdef int fieldsubtype cdef const char *key_c = NULL - # Parameters for get_field_as_datetime + # Parameters for OGR_F_GetFieldAsDateTimeEx cdef int y = 0 cdef int m = 0 cdef int d = 0 @@ -180,11 +269,6 @@ # Skeleton of the feature to be returned. fid = OGR_F_GetFID(feature) props = OrderedDict() - fiona_feature = { - "type": "Feature", - "id": str(fid), - "properties": props, - } ignore_fields = set(ignore_fields or []) @@ -205,7 +289,7 @@ continue fieldtypename = FIELD_TYPES[OGR_Fld_GetType(fdefn)] - fieldsubtype = get_field_subtype(fdefn) + fieldsubtype = OGR_Fld_GetSubType(fdefn) if not fieldtypename: log.warning( "Skipping field %s: invalid type %s", @@ -234,9 +318,9 @@ elif fieldtype is float: props[key] = OGR_F_GetFieldAsDouble(feature, i) - elif fieldtype is text_type: + elif fieldtype is str: + val = OGR_F_GetFieldAsString(feature, i) try: - val = OGR_F_GetFieldAsString(feature, i) val = val.decode(encoding) except UnicodeDecodeError: log.warning( @@ -254,7 +338,7 @@ props[key] = val elif fieldtype in (FionaDateType, FionaTimeType, FionaDateTimeType): - retval = get_field_as_datetime(feature, i, &y, &m, &d, &hh, &mm, &fss, &tz) + retval = OGR_F_GetFieldAsDateTimeEx(feature, i, &y, &m, &d, &hh, &mm, &fss, &tz) ms, ss = math.modf(fss) ss = int(ss) ms = int(round(ms * 10**6)) @@ -280,49 +364,31 @@ elif fieldtype is bytes: data = OGR_F_GetFieldAsBinary(feature, i, &l) props[key] = data[:l] - + elif fieldtype is List[str]: + string_list = OGR_F_GetFieldAsStringList(feature, i) + string_list_index = 0 + props[key] = [] + if string_list != NULL: + while string_list[string_list_index] != NULL: + val = string_list[string_list_index] + try: + val = val.decode(encoding) + except UnicodeDecodeError: + log.warning( + "Failed to decode %s using %s codec", val, encoding + ) + props[key].append(val) + string_list_index += 1 else: - log.debug("%s: None, fieldtype: %r, %r" % (key, fieldtype, fieldtype in string_types)) props[key] = None cdef void *cogr_geometry = NULL - cdef void *org_geometry = NULL - + geom = None if not ignore_geometry: cogr_geometry = OGR_F_GetGeometryRef(feature) + geom = GeomBuilder().build_from_feature(feature) - if cogr_geometry is not NULL: - - code = base_geometry_type_code(OGR_G_GetGeometryType(cogr_geometry)) - - if 8 <= code <= 14: # Curves. - cogr_geometry = get_linear_geometry(cogr_geometry) - geom = GeomBuilder().build(cogr_geometry) - OGR_G_DestroyGeometry(cogr_geometry) - - elif 15 <= code <= 17: - # We steal the geometry: the geometry of the in-memory feature is now null - # and we are responsible for cogr_geometry. - org_geometry = OGR_F_StealGeometry(feature) - - if code in (15, 16): - cogr_geometry = OGR_G_ForceToMultiPolygon(org_geometry) - elif code == 17: - cogr_geometry = OGR_G_ForceToPolygon(org_geometry) - - geom = GeomBuilder().build(cogr_geometry) - OGR_G_DestroyGeometry(cogr_geometry) - - else: - geom = GeomBuilder().build(cogr_geometry) - - fiona_feature["geometry"] = geom - - else: - - fiona_feature["geometry"] = None - - return fiona_feature + return Feature(id=str(fid), properties=Properties(**props), geometry=geom) cdef class OGRFeatureBuilder: @@ -331,59 +397,46 @@ Allocates one OGR Feature which should be destroyed by the caller. Borrows a layer definition from the collection. - """ + """ cdef void * build(self, feature, collection) except NULL: cdef void *cogr_geometry = NULL cdef const char *string_c = NULL - cdef WritingSession session - session = collection.session + cdef WritingSession session = collection.session cdef void *cogr_layer = session.cogr_layer + cdef void *cogr_featuredefn = OGR_L_GetLayerDefn(cogr_layer) + cdef void *cogr_feature = OGR_F_Create(cogr_featuredefn) + if cogr_layer == NULL: raise ValueError("Null layer") - cdef void *cogr_featuredefn = OGR_L_GetLayerDefn(cogr_layer) + if cogr_featuredefn == NULL: raise ValueError("Null feature definition") - cdef void *cogr_feature = OGR_F_Create(cogr_featuredefn) + if cogr_feature == NULL: raise ValueError("Null feature") - if feature['geometry'] is not None: - cogr_geometry = OGRGeomBuilder().build( - feature['geometry']) + if feature.geometry is not None: + cogr_geometry = OGRGeomBuilder().build(feature.geometry) exc_wrap_int(OGR_F_SetGeometryDirectly(cogr_feature, cogr_geometry)) # OGR_F_SetFieldString takes encoded strings ('bytes' in Python 3). encoding = session._get_internal_encoding() - for key, value in feature['properties'].items(): - log.debug( - "Looking up %s in %s", key, repr(session._schema_mapping)) - ogr_key = session._schema_mapping[key] - - schema_type = normalize_field_type(collection.schema['properties'][key]) + for key, value in feature.properties.items(): + i = session._schema_mapping_index[key] - log.debug("Normalizing schema type for key %r in schema %r to %r", key, collection.schema['properties'], schema_type) - - try: - key_bytes = ogr_key.encode(encoding) - except UnicodeDecodeError: - log.warning("Failed to encode %s using %s codec", key, encoding) - key_bytes = ogr_key - key_c = key_bytes - i = OGR_F_GetFieldIndex(cogr_feature, key_c) if i < 0: continue + schema_type = session._schema_normalized_field_types[key] + # Special case: serialize dicts to assist OGR. if isinstance(value, dict): value = json.dumps(value) # Continue over the standard OGR types. - if isinstance(value, integer_types): - - log.debug("Setting field %r, type %r, to value %r", i, schema_type, value) - + if isinstance(value, int): if schema_type == 'int32': OGR_F_SetFieldInteger(cogr_feature, i, value) else: @@ -392,7 +445,7 @@ elif isinstance(value, float): OGR_F_SetFieldDouble(cogr_feature, i, value) elif schema_type in ['date', 'time', 'datetime'] and value is not None: - if isinstance(value, string_types): + if isinstance(value, str): if schema_type == 'date': y, m, d, hh, mm, ss, ms, tz = parse_date(value) elif schema_type == 'time': @@ -437,7 +490,7 @@ del d_utc, d_tz # tzinfo: (0=unknown, 100=GMT, 101=GMT+15minute, 99=GMT-15minute), or NULL - if tz is not None: + if tz is not None: tzinfo = int(tz / 15.0 + 100) else: tzinfo = 0 @@ -445,26 +498,20 @@ # Add microseconds to seconds ss += ms / 10**6 - set_field_datetime(cogr_feature, i, y, m, d, hh, mm, ss, tzinfo) + OGR_F_SetFieldDateTimeEx(cogr_feature, i, y, m, d, hh, mm, ss, tzinfo) elif isinstance(value, bytes) and schema_type == "bytes": string_c = value OGR_F_SetFieldBinary(cogr_feature, i, len(value), string_c) - elif isinstance(value, string_types): - try: - value_bytes = value.encode(encoding) - except UnicodeDecodeError: - log.warning( - "Failed to encode %s using %s codec", value, encoding) - value_bytes = value + elif isinstance(value, str): + value_bytes = strencode(value, encoding) string_c = value_bytes OGR_F_SetFieldString(cogr_feature, i, string_c) elif value is None: - set_field_null(cogr_feature, i) + OGR_F_SetFieldNull(cogr_feature, i) else: raise ValueError("Invalid field type %s" % type(value)) - log.debug("Set field %s: %r" % (key, value)) return cogr_feature @@ -475,8 +522,9 @@ cogr_feature = NULL -def featureRT(feature, collection): +def featureRT(feat, collection): # For testing purposes only, leaks the JSON data + feature = decode_object(feat) cdef void *cogr_feature = OGRFeatureBuilder().build(feature, collection) cdef void *cogr_geometry = OGR_F_GetGeometryRef(cogr_feature) if cogr_geometry == NULL: @@ -539,7 +587,7 @@ self.cogr_ds = gdal_open_vector(path_c, 0, drivers, kwargs) - if isinstance(collection.name, string_types): + if isinstance(collection.name, str): name_b = collection.name.encode('utf-8') name_c = name_b self.cogr_layer = GDALDatasetGetLayerByName(self.cogr_ds, name_c) @@ -554,27 +602,46 @@ encoding = self._get_internal_encoding() - if collection.ignore_fields: + if collection.ignore_fields or collection.include_fields is not None: if not OGR_L_TestCapability(self.cogr_layer, OLC_IGNOREFIELDS): raise DriverError("Driver does not support ignore_fields") + + self.collection = collection + + if self.collection.include_fields is not None: + self.collection.ignore_fields = list( + set(self.get_schema()["properties"]) - set(collection.include_fields) + ) + + if self.collection.ignore_fields: try: - for name in collection.ignore_fields: + for name in self.collection.ignore_fields: try: name_b = name.encode(encoding) except AttributeError: - raise TypeError("Ignored field \"{}\" has type \"{}\", expected string".format(name, name.__class__.__name__)) - ignore_fields = CSLAddString(ignore_fields, name_b) + raise TypeError( + "Ignored field \"{}\" has type \"{}\", expected string".format( + name, name.__class__.__name__ + ) + ) + else: + ignore_fields = CSLAddString(ignore_fields, name_b) + OGR_L_SetIgnoredFields(self.cogr_layer, ignore_fields) + finally: CSLDestroy(ignore_fields) - self.collection = collection - cpdef stop(self): self.cogr_layer = NULL if self.cogr_ds != NULL: - GDALClose(self.cogr_ds) - self.cogr_ds = NULL + try: + with cpl_errs: + GDALClose(self.cogr_ds) + except CPLE_BaseError as exc: + raise DriverError(str(exc)) + finally: + self.cogr_ds = NULL def get_fileencoding(self): """DEPRECATED""" @@ -640,12 +707,27 @@ return driver_name.decode() def get_schema(self): + """Get a dictionary representation of a collection's schema. + + The schema dict contains "geometry" and "properties" items. + + Returns + ------- + dict + + Warnings + -------- + Fiona 1.9 does not support multiple fields with the name + name. When encountered, a warning message is logged and the + field is skipped. + + """ cdef int i - cdef int n + cdef int num_fields cdef void *cogr_featuredefn = NULL cdef void *cogr_fielddefn = NULL cdef const char *key_c - props = [] + props = {} if self.cogr_layer == NULL: raise ValueError("Null layer") @@ -656,31 +738,46 @@ ignore_fields = set() cogr_featuredefn = OGR_L_GetLayerDefn(self.cogr_layer) + if cogr_featuredefn == NULL: raise ValueError("Null feature definition") encoding = self._get_internal_encoding() - n = OGR_FD_GetFieldCount(cogr_featuredefn) + num_fields = OGR_FD_GetFieldCount(cogr_featuredefn) - for i from 0 <= i < n: + for i from 0 <= i < num_fields: cogr_fielddefn = OGR_FD_GetFieldDefn(cogr_featuredefn, i) + if cogr_fielddefn == NULL: raise ValueError("NULL field definition at index {}".format(i)) key_c = OGR_Fld_GetNameRef(cogr_fielddefn) + if key_c == NULL: raise ValueError("NULL field name reference at index {}".format(i)) + key_b = key_c key = key_b.decode(encoding) + if not key: warnings.warn("Empty field name at index {}".format(i), FeatureWarning) if key in ignore_fields: - log.debug("By request, ignoring field %r", key) + continue + + # See gh-1178 for an example of a pathological collection + # with multiple identically name fields. + if key in props: + log.warning( + "Field name collision detected, field is skipped: i=%r, key=%r", + i, + key + ) continue fieldtypename = FIELD_TYPES[OGR_Fld_GetType(cogr_fielddefn)] + if not fieldtypename: log.warning( "Skipping field %s: invalid type %s", @@ -689,35 +786,46 @@ continue val = fieldtypename + if fieldtypename == 'float': fmt = "" width = OGR_Fld_GetWidth(cogr_fielddefn) + if width: # and width != 24: - fmt = ":%d" % width + fmt = ":{:d}".format(width) + precision = OGR_Fld_GetPrecision(cogr_fielddefn) + if precision: # and precision != 15: - fmt += ".%d" % precision + fmt += ".{:d}".format(precision) + val = "float" + fmt + elif fieldtypename in ('int32', 'int64'): fmt = "" width = OGR_Fld_GetWidth(cogr_fielddefn) + if width: - fmt = ":%d" % width + fmt = ":{:d}".format(width) + val = 'int' + fmt + elif fieldtypename == 'str': fmt = "" width = OGR_Fld_GetWidth(cogr_fielddefn) + if width: - fmt = ":%d" % width + fmt = ":{:d}".format(width) + val = fieldtypename + fmt - props.append((key, val)) + # Store the field name and description value. + props[key] = val - ret = {"properties": OrderedDict(props)} + ret = {"properties": props} if not self.collection.ignore_geometry: - code = normalize_geometry_type_code( - OGR_FD_GetGeomType(cogr_featuredefn)) + code = normalize_geometry_type_code(OGR_FD_GetGeomType(cogr_featuredefn)) ret["geometry"] = GEOMETRY_TYPES[code] return ret @@ -730,81 +838,11 @@ CRS """ - cdef char *proj_c = NULL - cdef const char *auth_key = NULL - cdef const char *auth_val = NULL - cdef void *cogr_crs = NULL - - if self.cogr_layer == NULL: - raise ValueError("Null layer") - - try: - cogr_crs = exc_wrap_pointer(OGR_L_GetSpatialRef(self.cogr_layer)) - # TODO: we don't intend to use try/except for flow control - # this is a work around for a GDAL issue. - except FionaNullPointerError: - log.debug("Layer has no coordinate system") - - if cogr_crs is not NULL: - - log.debug("Got coordinate system") - crs = {} - - try: - - retval = OSRAutoIdentifyEPSG(cogr_crs) - if retval > 0: - log.info("Failed to auto identify EPSG: %d", retval) - - try: - auth_key = exc_wrap_pointer(OSRGetAuthorityName(cogr_crs, NULL)) - auth_val = exc_wrap_pointer(OSRGetAuthorityCode(cogr_crs, NULL)) - - except CPLE_BaseError as exc: - log.debug("{}".format(exc)) - - if auth_key != NULL and auth_val != NULL: - key_b = auth_key - key = key_b.decode('utf-8') - if key == 'EPSG': - val_b = auth_val - val = val_b.decode('utf-8') - crs['init'] = "epsg:" + val - - else: - OSRExportToProj4(cogr_crs, &proj_c) - if proj_c == NULL: - raise ValueError("Null projection") - proj_b = proj_c - log.debug("Params: %s", proj_b) - value = proj_b.decode() - value = value.strip() - for param in value.split(): - kv = param.split("=") - if len(kv) == 2: - k, v = kv - try: - v = float(v) - if v % 1 == 0: - v = int(v) - except ValueError: - # Leave v as a string - pass - elif len(kv) == 1: - k, v = kv[0], True - else: - raise ValueError("Unexpected proj parameter %s" % param) - k = k.lstrip("+") - crs[k] = v - - finally: - CPLFree(proj_c) - return crs - + wkt = self.get_crs_wkt() + if not wkt: + return CRS() else: - log.debug("Projection not found (cogr_crs was NULL)") - - return {} + return CRS.from_user_input(wkt) def get_crs_wkt(self): cdef char *proj_c = NULL @@ -856,14 +894,13 @@ if result != OGRERR_NONE: raise DriverError("Driver was not able to calculate bounds") return (extent.MinX, extent.MinY, extent.MaxX, extent.MaxY) - - + cdef int _get_feature_count(self, force=0): if self.cogr_layer == NULL: raise ValueError("Null layer") + self.cursor_interrupted = True return OGR_L_GetFeatureCount(self.cogr_layer, force) - def has_feature(self, fid): """Provides access to feature data by FID. @@ -909,7 +946,6 @@ if isinstance(item, slice): warnings.warn("Collection slicing is deprecated and will be disabled in a future version.", FionaDeprecationWarning) itr = Iterator(self.collection, item.start, item.stop, item.step) - log.debug("Slice: %r", item) return list(itr) elif isinstance(item, int): index = item @@ -941,9 +977,81 @@ return 0 + def tags(self, ns=None): + """Returns a dict containing copies of the dataset or layers's + tags. Tags are pairs of key and value strings. Tags belong to + namespaces. The standard namespaces are: default (None) and + 'IMAGE_STRUCTURE'. Applications can create their own additional + namespaces. + + Parameters + ---------- + ns: str, optional + Can be used to select a namespace other than the default. + + Returns + ------- + dict + """ + cdef GDALMajorObjectH obj = NULL + if self.cogr_layer != NULL: + obj = self.cogr_layer + else: + obj = self.cogr_ds + + cdef const char *domain = NULL + if ns: + ns = ns.encode('utf-8') + domain = ns + + cdef char **metadata = NULL + metadata = GDALGetMetadata(obj, domain) + num_items = CSLCount(metadata) + + return dict(metadata[i].decode('utf-8').split('=', 1) for i in range(num_items)) + + + def get_tag_item(self, key, ns=None): + """Returns tag item value + + Parameters + ---------- + key: str + The key for the metadata item to fetch. + ns: str, optional + Used to select a namespace other than the default. + + Returns + ------- + str + """ + + key = key.encode('utf-8') + cdef const char *name = key + + cdef const char *domain = NULL + if ns: + ns = ns.encode('utf-8') + domain = ns + + cdef GDALMajorObjectH obj = NULL + if self.cogr_layer != NULL: + obj = self.cogr_layer + else: + obj = self.cogr_ds + + cdef char *value = NULL + value = GDALGetMetadataItem(obj, name, domain) + if value == NULL: + return None + return value.decode("utf-8") + + cdef class WritingSession(Session): cdef object _schema_mapping + cdef object _schema_mapping_index + cdef object _schema_normalized_field_types def start(self, collection, **kwargs): cdef OGRSpatialReferenceH cogr_srs = NULL @@ -962,19 +1070,15 @@ if collection.mode == 'a': - if not os.path.exists(path): - raise OSError("No such file or directory %s" % path) - - try: - path_b = path.encode('utf-8') - except UnicodeDecodeError: - path_b = path + path_b = strencode(path) path_c = path_b + if not CPLCheckForFile(path_c, NULL): + raise OSError("No such file or directory %s" % path) try: self.cogr_ds = gdal_open_vector(path_c, 1, None, kwargs) - if isinstance(collection.name, string_types): + if isinstance(collection.name, str): name_b = collection.name.encode('utf-8') name_c = name_b self.cogr_layer = exc_wrap_pointer(GDALDatasetGetLayerByName(self.cogr_ds, name_c)) @@ -986,7 +1090,7 @@ GDALClose(self.cogr_ds) self.cogr_ds = NULL self.cogr_layer = NULL - raise DriverError(u"{}".format(exc)) + raise DriverError(str(exc)) else: self._fileencoding = userencoding or self._get_fallback_encoding() @@ -994,11 +1098,7 @@ before_fields = self.get_schema()['properties'] elif collection.mode == 'w': - - try: - path_b = path.encode('utf-8') - except UnicodeDecodeError: - path_b = path + path_b = strencode(path) path_c = path_b driver_b = collection.driver.encode() @@ -1033,7 +1133,7 @@ cogr_ds = gdal_create(cogr_driver, path_c, kwargs) else: # check capability of creating a new layer in the existing dataset - capability = check_capability_create_layer(cogr_ds) + capability = GDALDatasetTestCapability(cogr_ds, ODsCCreateLayer) if not capability or collection.name is None: # unable to use existing dataset, recreate it log.debug("Unable to use existing dataset: capability=%r, name=%r", capability, collection.name) @@ -1060,7 +1160,7 @@ GDALClose(self.cogr_ds) self.cogr_ds = NULL self.cogr_layer = NULL - raise CRSError(u"{}".format(exc)) + raise CRSError(str(exc)) # Determine which encoding to use. The encoding parameter given to # the collection constructor takes highest precedence, then @@ -1079,13 +1179,13 @@ layer_count = GDALDatasetGetLayerCount(self.cogr_ds) layer_names = [] for i in range(layer_count): - cogr_layer = GDALDatasetGetLayer(cogr_ds, i) + cogr_layer = GDALDatasetGetLayer(self.cogr_ds, i) name_c = OGR_L_GetName(cogr_layer) name_b = name_c layer_names.append(name_b.decode('utf-8')) idx = -1 - if isinstance(collection.name, string_types): + if isinstance(collection.name, str): if collection.name in layer_names: idx = layer_names.index(collection.name) elif isinstance(collection.name, int): @@ -1099,46 +1199,53 @@ name_b = collection.name.encode('utf-8') name_c = name_b - for k, v in kwargs.items(): + # To avoid circular import. + from fiona import meta - if v is None: - continue + kwarg_keys = set(key.upper() for key in kwargs.keys()) + lyr_creation_option_keys = kwarg_keys & set(meta.layer_creation_options(collection.driver)) - # We need to remove encoding from the layer creation - # options if we're not creating a shapefile. - if k == 'encoding' and "Shapefile" not in collection.driver: - continue + for k, v in kwargs.items(): - k = k.upper().encode('utf-8') + if v is not None and k.upper() in lyr_creation_option_keys: + kb = k.upper().encode('utf-8') - if isinstance(v, bool): - v = ('ON' if v else 'OFF').encode('utf-8') - else: - v = str(v).encode('utf-8') - log.debug("Set option %r: %r", k, v) - options = CSLAddNameValue(options, k, v) + if isinstance(v, bool): + vb = ('ON' if v else 'OFF').encode('utf-8') + else: + vb = str(v).encode('utf-8') + + options = CSLAddNameValue(options, kb, vb) geometry_type = collection.schema.get("geometry", "Unknown") - if not isinstance(geometry_type, string_types) and geometry_type is not None: + + if not isinstance(geometry_type, str) and geometry_type is not None: geometry_types = set(geometry_type) + if len(geometry_types) > 1: geometry_type = "Unknown" else: geometry_type = geometry_types.pop() + if geometry_type == "Any" or geometry_type is None: geometry_type = "Unknown" + geometry_code = geometry_type_code(geometry_type) try: - self.cogr_layer = exc_wrap_pointer( - GDALDatasetCreateLayer( - self.cogr_ds, name_c, cogr_srs, - geometry_code, options)) + # In GDAL versions > 3.6.0 the following directive may + # suffice and we might be able to eliminate the import + # of fiona.meta in a future version of Fiona. + with Env(GDAL_VALIDATE_CREATION_OPTIONS="NO"): + self.cogr_layer = exc_wrap_pointer( + GDALDatasetCreateLayer( + self.cogr_ds, name_c, cogr_srs, + geometry_code, options)) except Exception as exc: GDALClose(self.cogr_ds) self.cogr_ds = NULL - raise DriverIOError(u"{}".format(exc)) + raise DriverIOError(str(exc)) finally: if options != NULL: @@ -1172,9 +1279,6 @@ before_fields.update(new_fields) for key, value in new_fields.items(): - - log.debug("Begin creating field: %r value: %r", key, value) - field_subtype = OFSTNone # Convert 'long' to 'int'. See @@ -1193,8 +1297,6 @@ if ':' in value: value, fmt = value.split(':') - log.debug("Field format parsing, value: %r, fmt: %r", value, fmt) - if '.' in fmt: width, precision = map(int, fmt.split('.')) else: @@ -1217,18 +1319,17 @@ OGR_Fld_SetPrecision(cogr_fielddefn, precision) if field_subtype != OFSTNone: # subtypes are new in GDAL 2.x, ignored in 1.x - set_field_subtype(cogr_fielddefn, field_subtype) + OGR_Fld_SetSubType(cogr_fielddefn, field_subtype) exc_wrap_int(OGR_L_CreateField(self.cogr_layer, cogr_fielddefn, 1)) except (UnicodeEncodeError, CPLE_BaseError) as exc: GDALClose(self.cogr_ds) self.cogr_ds = NULL self.cogr_layer = NULL - raise SchemaError(u"{}".format(exc)) + raise SchemaError(str(exc)) else: OGR_Fld_Destroy(cogr_fielddefn) - log.debug("End creating field %r", key) # Mapping of the Python collection schema to the munged # OGR schema. @@ -1236,82 +1337,109 @@ self._schema_mapping = dict(zip(before_fields.keys(), after_fields.keys())) + # Mapping of the Python collection schema to OGR field indices. + # We assume that get_schema()['properties'].keys() is in the exact OGR field order + assert len(before_fields) == len(after_fields) + self._schema_mapping_index = dict(zip(before_fields.keys(), range(len(after_fields.keys())))) + + # Mapping of the Python collection schema to normalized field types + self._schema_normalized_field_types = {k: normalize_field_type(v) for (k, v) in self.collection.schema['properties'].items()} + + log.debug("Writing started") def writerecs(self, records, collection): - """Writes buffered records to OGR.""" + """Writes records to collection storage. + + Parameters + ---------- + records : Iterable + A stream of feature records. + collection : Collection + The collection in which feature records are stored. + + Returns + ------- + None + + """ cdef void *cogr_driver cdef void *cogr_feature cdef int features_in_transaction = 0 - cdef void *cogr_layer = self.cogr_layer + if cogr_layer == NULL: raise ValueError("Null layer") - schema_geom_type = collection.schema['geometry'] - cogr_driver = GDALGetDatasetDriver(self.cogr_ds) - driver_name = OGR_Dr_GetName(cogr_driver).decode("utf-8") - valid_geom_types = collection._valid_geom_types + def validate_geometry_type(record): if record["geometry"] is None: return True return record["geometry"]["type"].lstrip("3D ") in valid_geom_types - transactions_supported = check_capability_transaction(self.cogr_ds) + + transactions_supported = GDALDatasetTestCapability(self.cogr_ds, ODsCTransactions) log.debug("Transaction supported: {}".format(transactions_supported)) + if transactions_supported: log.debug("Starting transaction (initial)") - result = gdal_start_transaction(self.cogr_ds, 0) + result = GDALDatasetStartTransaction(self.cogr_ds, 0) if result == OGRERR_FAILURE: raise TransactionError("Failed to start transaction") schema_props_keys = set(collection.schema['properties'].keys()) - for record in records: - log.debug("Creating feature in layer: %s" % record) - # Check for optional elements - if 'properties' not in record: - record['properties'] = {} - if 'geometry' not in record: - record['geometry'] = None + for _rec in records: + record = decode_object(_rec) # Validate against collection's schema. - if set(record['properties'].keys()) != schema_props_keys: + if set(record.properties.keys()) != schema_props_keys: raise ValueError( "Record does not match collection schema: %r != %r" % ( - record['properties'].keys(), + list(record.properties.keys()), list(schema_props_keys) )) + if not validate_geometry_type(record): raise GeometryTypeValidationError( "Record's geometry type does not match " "collection schema's geometry type: %r != %r" % ( - record['geometry']['type'], - collection.schema['geometry'] )) + record.geometry.type, + collection.schema['geometry'] )) cogr_feature = OGRFeatureBuilder().build(record, collection) result = OGR_L_CreateFeature(cogr_layer, cogr_feature) + if result != OGRERR_NONE: msg = get_last_error_msg() - raise RuntimeError("GDAL Error: {msg} \n \n Failed to write record: " - "{record}".format(msg=msg, record=record)) + raise RuntimeError( + "GDAL Error: {msg}. Failed to write record: {record}".format( + msg=msg, record=record + ) + ) + _deleteOgrFeature(cogr_feature) if transactions_supported: features_in_transaction += 1 + if features_in_transaction == DEFAULT_TRANSACTION_SIZE: - log.debug("Comitting transaction (intermediate)") - result = gdal_commit_transaction(self.cogr_ds) + log.debug("Committing transaction (intermediate)") + result = GDALDatasetCommitTransaction(self.cogr_ds) + if result == OGRERR_FAILURE: raise TransactionError("Failed to commit transaction") + log.debug("Starting transaction (intermediate)") - result = gdal_start_transaction(self.cogr_ds, 0) + result = GDALDatasetStartTransaction(self.cogr_ds, 0) + if result == OGRERR_FAILURE: raise TransactionError("Failed to start transaction") + features_in_transaction = 0 if transactions_supported: - log.debug("Comitting transaction (final)") - result = gdal_commit_transaction(self.cogr_ds) + log.debug("Committing transaction (final)") + result = GDALDatasetCommitTransaction(self.cogr_ds) if result == OGRERR_FAILURE: raise TransactionError("Failed to commit transaction") @@ -1322,10 +1450,83 @@ if cogr_ds == NULL: raise ValueError("Null data source") - gdal_flush_cache(cogr_ds) log.debug("Flushed data source cache") + def update_tags(self, tags, ns=None): + """Writes a dict containing the dataset or layers's tags. + Tags are pairs of key and value strings. Tags belong to + namespaces. The standard namespaces are: default (None) and + 'IMAGE_STRUCTURE'. Applications can create their own additional + namespaces. + + Parameters + ---------- + tags: dict + The dict of metadata items to set. + ns: str, optional + Used to select a namespace other than the default. + + Returns + ------- + int + """ + cdef GDALMajorObjectH obj = NULL + if self.cogr_layer != NULL: + obj = self.cogr_layer + else: + obj = self.cogr_ds + + cdef const char *domain = NULL + if ns: + ns = ns.encode('utf-8') + domain = ns + + cdef char **metadata = NULL + try: + for key, value in tags.items(): + key = key.encode("utf-8") + value = value.encode("utf-8") + metadata = CSLAddNameValue(metadata, key, value) + return GDALSetMetadata(obj, metadata, domain) + finally: + CSLDestroy(metadata) + + def update_tag_item(self, key, tag, ns=None): + """Updates the tag item value + + Parameters + ---------- + key: str + The key for the metadata item to set. + tag: str + The value of the metadata item to set. + ns: str + Used to select a namespace other than the default. + + Returns + ------- + int + """ + key = key.encode('utf-8') + cdef const char *name = key + tag = tag.encode("utf-8") + cdef char *value = tag + + cdef const char *domain = NULL + if ns: + ns = ns.encode('utf-8') + domain = ns + + cdef GDALMajorObjectH obj = NULL + if self.cogr_layer != NULL: + obj = self.cogr_layer + else: + obj = self.cogr_ds + + return GDALSetMetadataItem(obj, name, value, domain) + + cdef class Iterator: """Provides iterated access to feature data. @@ -1344,7 +1545,7 @@ cdef stepsign def __cinit__(self, collection, start=None, stop=None, step=None, - bbox=None, mask=None): + bbox=None, mask=None, where=None): if collection.session is None: raise ValueError("I/O operation on closed collection") self.collection = collection @@ -1363,13 +1564,26 @@ OGR_L_SetSpatialFilterRect( cogr_layer, bbox[0], bbox[1], bbox[2], bbox[3]) elif mask: - cogr_geometry = OGRGeomBuilder().build(mask) + mask_geom = decode_object(mask) + cogr_geometry = OGRGeomBuilder().build(mask_geom) OGR_L_SetSpatialFilter(cogr_layer, cogr_geometry) OGR_G_DestroyGeometry(cogr_geometry) else: OGR_L_SetSpatialFilter(cogr_layer, NULL) + if where: + where_b = where.encode('utf-8') + where_c = where_b + try: + exc_wrap_int( + OGR_L_SetAttributeFilter(cogr_layer, where_c)) + except CPLE_AppDefinedError as e: + raise AttributeFilterError(e) from None + + else: + OGR_L_SetAttributeFilter(cogr_layer, NULL) + self.encoding = session._get_internal_encoding() self.fastindex = OGR_L_TestCapability( @@ -1434,7 +1648,7 @@ self.next_index = start log.debug("Next index: %d", self.next_index) - + # Set OGR_L_SetNextByIndex only if within range if start >= 0 and (self.ftcount == -1 or self.start < self.ftcount): exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) @@ -1445,10 +1659,7 @@ def _next(self): """Internal method to set read cursor to next item""" - - cdef Session session - session = self.collection.session - + cdef Session session = self.collection.session # Check if next_index is valid if self.next_index < 0: @@ -1469,23 +1680,39 @@ # Set read cursor to next_item position if session.cursor_interrupted: if not self.fastindex and not self.next_index == 0: - warnings.warn("Sequential read of iterator was interrupted. Resetting iterator. " - "This can negatively impact the performance.", RuntimeWarning) + warnings.warn( + "Sequential read of iterator was interrupted. Resetting iterator. " + "This can negatively impact the performance.", RuntimeWarning + ) exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) session.cursor_interrupted = False + else: if self.step > 1 and self.fastindex: exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) + elif self.step > 1 and not self.fastindex and not self.next_index == self.start: - # GDALs default implementation of SetNextByIndex is calling ResetReading() and then - # calling GetNextFeature n times. We can shortcut that if we know the previous index. - # OGR_L_GetNextFeature increments cursor by 1, therefore self.step - 1 as one increment was performed when feature is read + # OGR's default implementation of SetNextByIndex is + # calling ResetReading() and then calling GetNextFeature + # n times. We can shortcut that if we know the previous + # index. OGR_L_GetNextFeature increments cursor by 1, + # therefore self.step - 1 as one increment was performed + # when feature is read. for _ in range(self.step - 1): - cogr_feature = OGR_L_GetNextFeature(session.cogr_layer) - if cogr_feature == NULL: - raise StopIteration + try: + cogr_feature = OGR_L_GetNextFeature(session.cogr_layer) + if cogr_feature == NULL: + raise StopIteration + finally: + _deleteOgrFeature(cogr_feature) + elif self.step > 1 and not self.fastindex and self.next_index == self.start: exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) + + elif self.step == 0: + # OGR_L_GetNextFeature increments read cursor by one + pass + elif self.step < 0: exc_wrap_int(OGR_L_SetNextByIndex(session.cogr_layer, self.next_index)) @@ -1495,10 +1722,7 @@ def __next__(self): cdef OGRFeatureH cogr_feature = NULL - cdef OGRLayerH cogr_layer = NULL - cdef Session session - - session = self.collection.session + cdef Session session = self.collection.session if not session or not session.isactive: raise FionaValueError("Session is inactive, dataset is closed or layer is unavailable.") @@ -1527,13 +1751,14 @@ cdef class ItemsIterator(Iterator): def __next__(self): - cdef long fid - cdef void * cogr_feature - cdef Session session - session = self.collection.session + cdef OGRFeatureH cogr_feature = NULL + cdef Session session = self.collection.session + + if not session or not session.isactive: + raise FionaValueError("Session is inactive, dataset is closed or layer is unavailable.") - #Update read cursor + # Update read cursor self._next() # Get the next feature. @@ -1541,30 +1766,33 @@ if cogr_feature == NULL: raise StopIteration - fid = OGR_F_GetFID(cogr_feature) - feature = FeatureBuilder().build( - cogr_feature, - encoding=self.collection.session._get_internal_encoding(), - bbox=False, - driver=self.collection.driver, - ignore_fields=self.collection.ignore_fields, - ignore_geometry=self.collection.ignore_geometry, - ) - - _deleteOgrFeature(cogr_feature) - - return fid, feature + try: + fid = OGR_F_GetFID(cogr_feature) + feature = FeatureBuilder().build( + cogr_feature, + encoding=self.collection.session._get_internal_encoding(), + bbox=False, + driver=self.collection.driver, + ignore_fields=self.collection.ignore_fields, + ignore_geometry=self.collection.ignore_geometry, + ) + else: + return fid, feature + finally: + _deleteOgrFeature(cogr_feature) cdef class KeysIterator(Iterator): def __next__(self): cdef long fid - cdef void * cogr_feature - cdef Session session - session = self.collection.session + cdef OGRFeatureH cogr_feature = NULL + cdef Session session = self.collection.session - #Update read cursor + if not session or not session.isactive: + raise FionaValueError("Session is inactive, dataset is closed or layer is unavailable.") + + # Update read cursor self._next() # Get the next feature. @@ -1612,7 +1840,7 @@ cdef void *cogr_ds cdef int layer_index - if isinstance(layer, integer_types): + if isinstance(layer, int): layer_index = layer layer_str = str(layer_index) else: @@ -1634,6 +1862,7 @@ result = GDALDatasetDeleteLayer(cogr_ds, layer_index) GDALClose(cogr_ds) + if result == OGRERR_UNSUPPORTED_OPERATION: raise DatasetDeleteError("Removal of layer {} not supported by driver".format(layer_str)) elif result != OGRERR_NONE: @@ -1651,10 +1880,7 @@ cdef const char *name_c # Open OGR data source. - try: - path_b = path.encode('utf-8') - except UnicodeDecodeError: - path_b = path + path_b = strencode(path) path_c = path_b cogr_ds = gdal_open_vector(path_c, 0, None, kwargs) @@ -1675,6 +1901,33 @@ return layer_names +def _listdir(path): + """List all files in path, if path points to a directory""" + cdef const char *path_c + cdef int n + cdef char** papszFiles + cdef VSIStatBufL st_buf + + try: + path_b = path.encode('utf-8') + except UnicodeDecodeError: + path_b = path + path_c = path_b + if not VSIStatL(path_c, &st_buf) == 0: + raise FionaValueError("Path '{}' does not exist.".format(path)) + if not VSI_ISDIR(st_buf.st_mode): + raise FionaValueError("Path '{}' is not a directory.".format(path)) + + papszFiles = VSIReadDir(path_c) + n = CSLCount(papszFiles) + files = [] + for i in range(n): + files.append(papszFiles[i].decode("utf-8")) + CSLDestroy(papszFiles) + + return files + + def buffer_to_virtual_file(bytesbuf, ext=''): """Maps a bytes buffer to a virtual file. @@ -1682,7 +1935,7 @@ """ vsi_filename = '/vsimem/{}'.format(uuid4().hex + ext) - vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') + vsi_cfilename = vsi_filename if not isinstance(vsi_filename, str) else vsi_filename.encode('utf-8') vsi_handle = VSIFileFromMemBuffer(vsi_cfilename, bytesbuf, len(bytesbuf), 0) @@ -1695,7 +1948,7 @@ def remove_virtual_file(vsi_filename): - vsi_cfilename = vsi_filename if not isinstance(vsi_filename, string_types) else vsi_filename.encode('utf-8') + vsi_cfilename = vsi_filename if not isinstance(vsi_filename, str) else vsi_filename.encode('utf-8') return VSIUnlink(vsi_cfilename) @@ -1751,7 +2004,6 @@ self._vsif = VSIFileFromMemBuffer( name_b, buffer, len(self._initial_bytes), 0) self.mode = "r" - else: self._vsif = NULL self.mode = "r+" @@ -1785,14 +2037,8 @@ if get_gdal_version_tuple() < (2, ): return - name_b = drivername.encode("utf-8") - cdef const char *name_c = name_b - cdef GDALDriverH driver = GDALGetDriverByName(name_c) - cdef const char *extension_c = GDALGetMetadataItem(driver, "DMD_EXTENSION", NULL) - - if extension_c != NULL: - extension_b = extension_c - recommended_extension = extension_b.decode("utf-8") + recommended_extension = _get_metadata_item(drivername, "DMD_EXTENSION") + if recommended_extension is not None: if not recommended_extension.startswith("."): recommended_extension = "." + recommended_extension root, ext = os.path.splitext(self.name) @@ -1885,8 +2131,6 @@ finally: CPLFree(buffer) - return result - def write(self, data): """Write data bytes to MemoryFile""" cdef const unsigned char *view = data @@ -1895,3 +2139,42 @@ result = VSIFWriteL(view, 1, n, self._vsif) VSIFFlushL(self._vsif) return result + + +def _get_metadata_item(driver, metadata_item): + """Query metadata items + Parameters + ---------- + driver : str + Driver to query + metadata_item : str or None + Metadata item to query + Returns + ------- + str or None + Metadata item + """ + cdef char* metadata_c = NULL + cdef void *cogr_driver + + if get_gdal_version_tuple() < (2, ): + return None + + if driver is None: + return None + + driver_b = strencode(driver) + cogr_driver = GDALGetDriverByName(driver_b) + if cogr_driver == NULL: + raise FionaValueError("Could not find driver '{}'".format(driver)) + + metadata_c = GDALGetMetadataItem(cogr_driver, metadata_item.encode('utf-8'), NULL) + + metadata = None + if metadata_c != NULL: + metadata = metadata_c + metadata = metadata.decode('utf-8') + if len(metadata) == 0: + metadata = None + + return metadata diff -Nru fiona-1.8.22/fiona/ogrext1.pxd fiona-1.9.5/fiona/ogrext1.pxd --- fiona-1.8.22/fiona/ogrext1.pxd 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/ogrext1.pxd 2023-10-11 23:19:44.000000000 +0000 @@ -31,11 +31,12 @@ char ** CSLSetNameValue (char **list, char *name, char *value) void CSLDestroy (char **list) char ** CSLAddString(char **list, const char *string) + int CSLCount(char **papszStrList) cdef extern from "sys/stat.h" nogil: struct stat: - pass + int st_mode cdef extern from "cpl_vsi.h" nogil: @@ -56,11 +57,13 @@ int VSIRmdir(const char *path) int VSIFFlushL(VSILFILE *fp) size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + char** VSIReadDir(const char* pszPath) int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) vsi_l_offset VSIFTellL(VSILFILE *fp) int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) + int VSI_ISDIR(int mode) ctypedef int OGRErr @@ -280,4 +283,5 @@ void * OGROpenShared (char *path, int mode, void *x) int OGRReleaseDataSource (void *datasource) OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) + OGRErr OGR_L_SetAttributeFilter(void *layer, const char*) OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) diff -Nru fiona-1.8.22/fiona/ogrext2.pxd fiona-1.9.5/fiona/ogrext2.pxd --- fiona-1.8.22/fiona/ogrext2.pxd 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/ogrext2.pxd 2023-10-11 23:19:44.000000000 +0000 @@ -194,11 +194,12 @@ char ** CSLSetNameValue (char **list, const char *name, const char *value) void CSLDestroy (char **list) char ** CSLAddString(char **list, const char *string) + int CSLCount(char **papszStrList) cdef extern from "sys/stat.h" nogil: struct stat: - pass + int st_mode cdef extern from "cpl_vsi.h" nogil: @@ -219,11 +220,13 @@ int VSIRmdir(const char *path) int VSIFFlushL(VSILFILE *fp) size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + char** VSIReadDir(const char* pszPath) int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) vsi_l_offset VSIFTellL(VSILFILE *fp) int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) + int VSI_ISDIR(int mode) cdef extern from "ogr_srs_api.h": @@ -295,7 +298,8 @@ void * OGR_G_CreateGeometry (int wkbtypecode) void OGR_G_DestroyGeometry (void *geometry) unsigned char * OGR_G_ExportToJson (void *geometry) - void OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) + OGRErr OGR_G_ExportToWkb (void *geometry, int endianness, char *buffer) + int OGR_G_GetCoordinateDimension (void *geometry) int OGR_G_GetGeometryCount (void *geometry) unsigned char * OGR_G_GetGeometryName (void *geometry) int OGR_G_GetGeometryType (void *geometry) @@ -327,6 +331,7 @@ ) int OGR_L_TestCapability (void *layer, char *name) OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) + OGRErr OGR_L_SetAttributeFilter(void *layer, const char*) OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) long long OGR_F_GetFieldAsInteger64 (void *feature, int n) void OGR_F_SetFieldInteger64 (void *feature, int n, long long value) diff -Nru fiona-1.8.22/fiona/ogrext3.pxd fiona-1.9.5/fiona/ogrext3.pxd --- fiona-1.8.22/fiona/ogrext3.pxd 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/ogrext3.pxd 2023-10-11 23:19:44.000000000 +0000 @@ -196,11 +196,12 @@ char ** CSLSetNameValue (char **list, const char *name, const char *value) void CSLDestroy (char **list) char ** CSLAddString(char **list, const char *string) + int CSLCount(char **papszStrList) cdef extern from "sys/stat.h" nogil: struct stat: - pass + int st_mode cdef extern from "cpl_vsi.h" nogil: @@ -221,11 +222,13 @@ int VSIRmdir(const char *path) int VSIFFlushL(VSILFILE *fp) size_t VSIFReadL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) + char** VSIReadDir(const char* pszPath) int VSIFSeekL(VSILFILE *fp, vsi_l_offset nOffset, int nWhence) vsi_l_offset VSIFTellL(VSILFILE *fp) int VSIFTruncateL(VSILFILE *fp, vsi_l_offset nNewSize) size_t VSIFWriteL(void *buffer, size_t nSize, size_t nCount, VSILFILE *fp) int VSIStatL(const char *pszFilename, VSIStatBufL *psStatBuf) + int VSI_ISDIR(int mode) cdef extern from "ogr_srs_api.h": @@ -307,7 +310,7 @@ double OGR_G_GetX (void *geometry, int n) double OGR_G_GetY (void *geometry, int n) double OGR_G_GetZ (void *geometry, int n) - void OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) + OGRErr OGR_G_ImportFromWkb (void *geometry, unsigned char *bytes, int nbytes) int OGR_G_WkbSize (void *geometry) void * OGR_G_ForceToMultiPolygon (void *geometry) void * OGR_G_ForceToPolygon (void *geometry) @@ -330,6 +333,7 @@ ) int OGR_L_TestCapability (void *layer, char *name) OGRErr OGR_L_SetIgnoredFields (void *layer, const char **papszFields) + OGRErr OGR_L_SetAttributeFilter(void *layer, const char*) OGRErr OGR_L_SetNextByIndex (void *layer, long nIndex) long long OGR_F_GetFieldAsInteger64 (void *feature, int n) void OGR_F_SetFieldInteger64 (void *feature, int n, long long value) diff -Nru fiona-1.8.22/fiona/path.py fiona-1.9.5/fiona/path.py --- fiona-1.8.22/fiona/path.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/path.py 2023-10-11 23:19:44.000000000 +0000 @@ -5,29 +5,33 @@ import attr -from fiona.compat import urlparse +from urllib.parse import urlparse # Supported URI schemes and their mapping to GDAL's VSI suffix. # TODO: extend for other cloud plaforms. SCHEMES = { - 'ftp': 'curl', - 'gzip': 'gzip', - 'http': 'curl', - 'https': 'curl', - 's3': 's3', - 'tar': 'tar', - 'zip': 'zip', - 'file': 'file', - 'gs': 'gs', + "ftp": "curl", + "gzip": "gzip", + "http": "curl", + "https": "curl", + "s3": "s3", + "tar": "tar", + "zip": "zip", + "file": "file", + "gs": "gs", + "oss": "oss", + "az": "az", } -CURLSCHEMES = set([k for k, v in SCHEMES.items() if v == 'curl']) +CURLSCHEMES = {k for k, v in SCHEMES.items() if v == 'curl'} # TODO: extend for other cloud plaforms. -REMOTESCHEMES = set([k for k, v in SCHEMES.items() if v in ('curl', 's3', 'gs')]) +REMOTESCHEMES = { + k for k, v in SCHEMES.items() if v in ("curl", "s3", "gs", "oss", "az") +} -class Path(object): +class Path: """Base class for dataset paths""" @@ -72,9 +76,9 @@ if not self.scheme: return self.path elif self.archive: - return "{}://{}!{}".format(self.scheme, self.archive, self.path) + return f"{self.scheme}://{self.archive}!{self.path}" else: - return "{}://{}".format(self.scheme, self.path) + return f"{self.scheme}://{self.path}" @property def is_remote(self): @@ -173,13 +177,13 @@ else: suffix = '' - prefix = '/'.join('vsi{0}'.format(SCHEMES[p]) for p in path.scheme.split('+') if p != 'file') + prefix = '/'.join(f'vsi{SCHEMES[p]}' for p in path.scheme.split('+') if p != 'file') if prefix: if path.archive: result = '/{}/{}{}/{}'.format(prefix, suffix, path.archive, path.path.lstrip('/')) else: - result = '/{}/{}{}'.format(prefix, suffix, path.path) + result = f'/{prefix}/{suffix}{path.path}' else: result = path.path return result diff -Nru fiona-1.8.22/fiona/rfc3339.py fiona-1.9.5/fiona/rfc3339.py --- fiona-1.8.22/fiona/rfc3339.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/rfc3339.py 2023-10-11 23:19:44.000000000 +0000 @@ -32,7 +32,7 @@ r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)(T)?(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?") -class group_accessor(object): +class group_accessor: def __init__(self, m): self.match = m diff -Nru fiona-1.8.22/fiona/schema.pyx fiona-1.9.5/fiona/schema.pyx --- fiona-1.8.22/fiona/schema.pyx 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/schema.pyx 2023-10-11 23:19:44.000000000 +0000 @@ -1,4 +1,8 @@ -from six import text_type +"""Fiona schema module.""" + +from typing import List + +from typing import List from fiona.errors import SchemaError from fiona.rfc3339 import FionaDateType, FionaDateTimeType, FionaTimeType @@ -24,7 +28,7 @@ 'float', # OFTReal, Double Precision floating point None, # OFTRealList, List of doubles 'str', # OFTString, String of UTF-8 chars - None, # OFTStringList, Array of strings + 'List[str]', # OFTStringList, Array of strings None, # OFTWideString, deprecated None, # OFTWideStringList, deprecated 'bytes', # OFTBinary, Raw Binary data @@ -39,13 +43,14 @@ FIELD_TYPES_MAP = { 'int32': int, 'float': float, - 'str': text_type, + 'str': str, 'date': FionaDateType, 'time': FionaTimeType, 'datetime': FionaDateTimeType, 'bytes': bytes, 'int64': int, - 'int': int + 'int': int, + 'List[str]': List[str], } FIELD_TYPES_MAP_REV = dict([(v, k) for k, v in FIELD_TYPES_MAP.items()]) diff -Nru fiona-1.8.22/fiona/session.py fiona-1.9.5/fiona/session.py --- fiona-1.8.22/fiona/session.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/session.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,10 +1,23 @@ """Abstraction for sessions in various clouds.""" +import logging +import os +import warnings from fiona.path import parse_path, UnparsedPath +log = logging.getLogger(__name__) -class Session(object): +try: + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + import boto3 +except ImportError: + log.debug("Could not import boto3, continuing with reduced functionality.") + boto3 = None + + +class Session: """Base for classes that configure access to secured resources. Attributes @@ -18,6 +31,24 @@ """ + @classmethod + def hascreds(cls, config): + """Determine if the given configuration has proper credentials + + Parameters + ---------- + cls : class + A Session class. + config : dict + GDAL configuration as a dict. + + Returns + ------- + bool + + """ + return NotImplemented + def get_credential_options(self): """Get credentials as GDAL configuration options @@ -26,7 +57,7 @@ dict """ - return NotImplementedError + return NotImplemented @staticmethod def from_foreign_session(session, cls=None): @@ -50,40 +81,116 @@ return cls(session) @staticmethod - def from_path(path, *args, **kwargs): - """Create a session object suited to the data at `path`. + def cls_from_path(path): + """Find the session class suited to the data at `path`. Parameters ---------- path : str A dataset path or identifier. - args : sequence - Positional arguments for the foreign session constructor. - kwargs : dict - Keyword arguments for the foreign session constructor. Returns ------- - Session + class """ if not path: - return DummySession() + return DummySession path = parse_path(path) if isinstance(path, UnparsedPath) or path.is_local: - return DummySession() + return DummySession - elif path.scheme == "s3" or path.scheme.endswith("+s3") or "amazonaws.com" in path.path: - return AWSSession(*args, **kwargs) + elif ( + path.scheme == "s3" or "amazonaws.com" in path.path + ) and "X-Amz-Signature" not in path.path: + if boto3 is not None: + return AWSSession + else: + log.info("boto3 not available, falling back to a DummySession.") + return DummySession + + elif path.scheme == "oss" or "aliyuncs.com" in path.path: + return OSSSession + + elif path.path.startswith("/vsiswift/"): + return SwiftSession + + elif path.scheme == "az": + return AzureSession # This factory can be extended to other cloud providers here. # elif path.scheme == "cumulonimbus": # for example. # return CumulonimbusSession(*args, **kwargs) else: - return DummySession() + return DummySession + + @staticmethod + def from_path(path, *args, **kwargs): + """Create a session object suited to the data at `path`. + + Parameters + ---------- + path : str + A dataset path or identifier. + args : sequence + Positional arguments for the foreign session constructor. + kwargs : dict + Keyword arguments for the foreign session constructor. + + Returns + ------- + Session + + """ + return Session.cls_from_path(path)(*args, **kwargs) + + @staticmethod + def aws_or_dummy(*args, **kwargs): + """Create an AWSSession if boto3 is available, else DummySession + Parameters + ---------- + path : str + A dataset path or identifier. + args : sequence + Positional arguments for the foreign session constructor. + kwargs : dict + Keyword arguments for the foreign session constructor. + Returns + ------- + Session + """ + if boto3 is not None: + return AWSSession(*args, **kwargs) + else: + return DummySession(*args, **kwargs) + + @staticmethod + def from_environ(*args, **kwargs): + """Create a session object suited to the environment. + Parameters + ---------- + path : str + A dataset path or identifier. + args : sequence + Positional arguments for the foreign session constructor. + kwargs : dict + Keyword arguments for the foreign session constructor. + Returns + ------- + Session + """ + try: + session = Session.aws_or_dummy(*args, **kwargs) + session.credentials + except RuntimeError: + log.warning( + "Credentials in environment have expired. Creating a DummySession." + ) + session = DummySession(*args, **kwargs) + return session class DummySession(Session): @@ -100,6 +207,24 @@ self._session = None self.credentials = {} + @classmethod + def hascreds(cls, config): + """Determine if the given configuration has proper credentials + + Parameters + ---------- + cls : class + A Session class. + config : dict + GDAL configuration as a dict. + + Returns + ------- + bool + + """ + return True + def get_credential_options(self): """Get credentials as GDAL configuration options @@ -116,10 +241,18 @@ """ def __init__( - self, session=None, aws_unsigned=False, aws_access_key_id=None, - aws_secret_access_key=None, aws_session_token=None, - region_name=None, profile_name=None, requester_pays=False): - """Create a new boto3 session + self, + session=None, + aws_unsigned=False, + aws_access_key_id=None, + aws_secret_access_key=None, + aws_session_token=None, + region_name=None, + profile_name=None, + endpoint_url=None, + requester_pays=False, + ): + """Create a new AWS session Parameters ---------- @@ -137,12 +270,13 @@ A region name, as per boto3. profile_name : str, optional A shared credentials profile name, as per boto3. + endpoint_url: str, optional + An endpoint_url, as per GDAL's AWS_S3_ENPOINT requester_pays : bool, optional True if the requester agrees to pay transfer costs (default: False) - """ - import boto3 + """ if session: self._session = session else: @@ -154,25 +288,51 @@ profile_name=profile_name) self.requester_pays = requester_pays - self.unsigned = aws_unsigned - self._creds = self._session._session.get_credentials() + self.unsigned = bool(os.getenv("AWS_NO_SIGN_REQUEST", aws_unsigned)) + self.endpoint_url = endpoint_url + self._creds = ( + self._session.get_credentials() + if not self.unsigned and self._session + else None + ) + + @classmethod + def hascreds(cls, config): + """Determine if the given configuration has proper credentials + + Parameters + ---------- + cls : class + A Session class. + config : dict + GDAL configuration as a dict. + + Returns + ------- + bool + + """ + return {"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"}.issubset(config.keys()) @property def credentials(self): """The session credentials as a dict""" - creds = {} - if self._creds: - if self._creds.access_key: # pragma: no branch - creds['aws_access_key_id'] = self._creds.access_key - if self._creds.secret_key: # pragma: no branch - creds['aws_secret_access_key'] = self._creds.secret_key - if self._creds.token: - creds['aws_session_token'] = self._creds.token + res = {} + if self._creds: # pragma: no branch + frozen_creds = self._creds.get_frozen_credentials() + if frozen_creds.access_key: # pragma: no branch + res["aws_access_key_id"] = frozen_creds.access_key + if frozen_creds.secret_key: # pragma: no branch + res["aws_secret_access_key"] = frozen_creds.secret_key + if frozen_creds.token: + res["aws_session_token"] = frozen_creds.token if self._session.region_name: - creds['aws_region'] = self._session.region_name + res["aws_region"] = self._session.region_name if self.requester_pays: - creds['aws_request_payer'] = 'requester' - return creds + res["aws_request_payer"] = "requester" + if self.endpoint_url: + res["aws_s3_endpoint"] = self.endpoint_url + return res def get_credential_options(self): """Get credentials as GDAL configuration options @@ -183,10 +343,14 @@ """ if self.unsigned: - return {'AWS_NO_SIGN_REQUEST': 'YES'} + opts = {"AWS_NO_SIGN_REQUEST": "YES"} + if "aws_region" in self.credentials: + opts["AWS_REGION"] = self.credentials["aws_region"] + return opts else: return {k.upper(): v for k, v in self.credentials.items()} + class GSSession(Session): """Configures access to secured resources stored in Google Cloud Storage """ @@ -197,8 +361,8 @@ ---------- google_application_credentials: string Path to the google application credentials JSON file. - """ + """ self._creds = {} if google_application_credentials is not None: self._creds['google_application_credentials'] = google_application_credentials @@ -235,3 +399,253 @@ """ return {k.upper(): v for k, v in self.credentials.items()} + + +class OSSSession(Session): + """Configures access to secured resources stored in Alibaba Cloud OSS.""" + + def __init__( + self, oss_access_key_id=None, oss_secret_access_key=None, oss_endpoint=None + ): + """Create new Alibaba Cloud OSS session + + Parameters + ---------- + oss_access_key_id: string, optional (default: None) + An access key id + oss_secret_access_key: string, optional (default: None) + An secret access key + oss_endpoint: string, optional (default: None) + the region attached to the bucket + + """ + self._creds = { + "oss_access_key_id": oss_access_key_id, + "oss_secret_access_key": oss_secret_access_key, + "oss_endpoint": oss_endpoint, + } + + @classmethod + def hascreds(cls, config): + """Determine if the given configuration has proper credentials + + Parameters + ---------- + cls : class + A Session class. + config : dict + GDAL configuration as a dict. + + Returns + ------- + bool + + """ + return {"OSS_ACCESS_KEY_ID", "OSS_SECRET_ACCESS_KEY"}.issubset(config.keys()) + + @property + def credentials(self): + """The session credentials as a dict""" + return self._creds + + def get_credential_options(self): + """Get credentials as GDAL configuration options + + Returns + ------- + dict + + """ + return {k.upper(): v for k, v in self.credentials.items()} + + +class SwiftSession(Session): + """Configures access to secured resources stored in OpenStack Swift Object Storage.""" + + def __init__( + self, + session=None, + swift_storage_url=None, + swift_auth_token=None, + swift_auth_v1_url=None, + swift_user=None, + swift_key=None, + ): + """Create new OpenStack Swift Object Storage Session. + + Three methods are possible: + 1. Create session by the swiftclient library. + 2. The SWIFT_STORAGE_URL and SWIFT_AUTH_TOKEN (this method + is recommended by GDAL docs). + 3. The SWIFT_AUTH_V1_URL, SWIFT_USER and SWIFT_KEY (This + depends on the swiftclient library). + + Parameters + ---------- + session: optional + A swiftclient connection object + swift_storage_url: + the storage URL + swift_auth_token: + the value of the x-auth-token authorization token + swift_storage_url: string, optional + authentication URL + swift_user: string, optional + user name to authenticate as + swift_key: string, optional + key/password to authenticate with + + Examples + -------- + >>> import rasterio + >>> from rasterio.session import SwiftSession + >>> fp = '/vsiswift/bucket/key.tif' + >>> conn = Connection( + ... authurl='http://127.0.0.1:7777/auth/v1.0', + ... user='test:tester', + ... key='testing' + ... ) + >>> session = SwiftSession(conn) + >>> with rasterio.Env(session): + >>> with rasterio.open(fp) as src: + >>> print(src.profile) + + """ + if swift_storage_url and swift_auth_token: + self._creds = { + "swift_storage_url": swift_storage_url, + "swift_auth_token": swift_auth_token, + } + else: + from swiftclient.client import Connection + + if session: + self._session = session + else: + self._session = Connection( + authurl=swift_auth_v1_url, user=swift_user, key=swift_key + ) + self._creds = { + "swift_storage_url": self._session.get_auth()[0], + "swift_auth_token": self._session.get_auth()[1], + } + + @classmethod + def hascreds(cls, config): + """Determine if the given configuration has proper credentials + + Parameters + ---------- + cls : class + A Session class. + config : dict + GDAL configuration as a dict. + + Returns + ------- + bool + + """ + return {"SWIFT_STORAGE_URL", "SWIFT_AUTH_TOKEN"}.issubset(config.keys()) + + @property + def credentials(self): + """The session credentials as a dict""" + return self._creds + + def get_credential_options(self): + """Get credentials as GDAL configuration options + + Returns + ------- + dict + + """ + return {k.upper(): v for k, v in self.credentials.items()} + + +class AzureSession(Session): + """Configures access to secured resources stored in Microsoft Azure Blob Storage.""" + + def __init__( + self, + azure_storage_connection_string=None, + azure_storage_account=None, + azure_storage_access_key=None, + azure_unsigned=False, + ): + """Create new Microsoft Azure Blob Storage session + + Parameters + ---------- + azure_storage_connection_string: string + A connection string contains both an account name and a secret key. + azure_storage_account: string + An account name + azure_storage_access_key: string + A secret key + azure_unsigned : bool, optional (default: False) + If True, requests will be unsigned. + + """ + self.unsigned = bool(os.getenv("AZURE_NO_SIGN_REQUEST", azure_unsigned)) + self.storage_account = os.getenv("AZURE_STORAGE_ACCOUNT", azure_storage_account) + + if azure_storage_connection_string: + self._creds = { + "azure_storage_connection_string": azure_storage_connection_string + } + elif not self.unsigned: + self._creds = { + "azure_storage_account": self.storage_account, + "azure_storage_access_key": azure_storage_access_key, + } + else: + self._creds = {"azure_storage_account": self.storage_account} + + @classmethod + def hascreds(cls, config): + """Determine if the given configuration has proper credentials + + Parameters + ---------- + cls : class + A Session class. + config : dict + GDAL configuration as a dict. + + Returns + ------- + bool + + """ + return ( + "AZURE_STORAGE_CONNECTION_STRING" in config + or {"AZURE_STORAGE_ACCOUNT", "AZURE_STORAGE_ACCESS_KEY"}.issubset( + config.keys() + ) + or {"AZURE_STORAGE_ACCOUNT", "AZURE_NO_SIGN_REQUEST"}.issubset( + config.keys() + ) + ) + + @property + def credentials(self): + """The session credentials as a dict""" + return self._creds + + def get_credential_options(self): + """Get credentials as GDAL configuration options + + Returns + ------- + dict + + """ + if self.unsigned: + return { + "AZURE_NO_SIGN_REQUEST": "YES", + "AZURE_STORAGE_ACCOUNT": self.storage_account, + } + else: + return {k.upper(): v for k, v in self.credentials.items()} diff -Nru fiona-1.8.22/fiona/transform.py fiona-1.9.5/fiona/transform.py --- fiona-1.8.22/fiona/transform.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/transform.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,8 +1,11 @@ """Coordinate and geometry warping and reprojection""" -import fiona._loading -with fiona._loading.add_gdal_dll_directories(): - from fiona._transform import _transform, _transform_geom +from warnings import warn + +from fiona._transform import _transform, _transform_geom +from fiona.compat import DICT_TYPES +from fiona.errors import FionaDeprecationWarning +from fiona.model import decode_object, Geometry def transform(src_crs, dst_crs, xs, ys): @@ -44,8 +47,13 @@ def transform_geom( - src_crs, dst_crs, geom, - antimeridian_cutting=False, antimeridian_offset=10.0, precision=-1): + src_crs, + dst_crs, + geom, + antimeridian_cutting=False, + antimeridian_offset=10.0, + precision=-1, +): """Transform a geometry obj from one reference system to another. Parameters @@ -60,7 +68,7 @@ on the "destination" or "to" side of the transformation. geom: obj A GeoJSON-like geometry object with 'type' and 'coordinates' - members. + members or an iterable of GeoJSON-like geometry objects. antimeridian_cutting: bool, optional ``True`` to cut output geometries in two at the antimeridian, the default is ``False`. @@ -68,14 +76,15 @@ A distance in decimal degrees from the antimeridian, outside of which geometries will not be cut. precision: int, optional - Optional rounding precision of output coordinates, in number - of decimal places. + Round geometry coordinates to this number of decimal places. + This parameter is deprecated and will be removed in 2.0. Returns ------- obj - A new GeoJSON-like geometry with transformed coordinates. Note - that if the output is at the antimeridian, it may be cut and + A new GeoJSON-like geometry (or a list of GeoJSON-like geometries + if an iterable was given as input) with transformed coordinates. Note + that if the output is at the antimeridian, it may be cut and of a different geometry ``type`` than the input, e.g., a polygon input may result in multi-polygon output. @@ -88,7 +97,28 @@ {'type': 'Point', 'coordinates': (957097.0952383667, 378940.8419189212)} """ + if precision >= 0: + warn( + "The precision keyword argument is deprecated and will be removed in 2.0", + FionaDeprecationWarning, + ) + # Function is implemented in the _transform C extension module. - return _transform_geom( - src_crs, dst_crs, geom, - antimeridian_cutting, antimeridian_offset, precision) + if isinstance(geom, (Geometry,) + DICT_TYPES): + return _transform_geom( + src_crs, + dst_crs, + decode_object(geom), + antimeridian_cutting, + antimeridian_offset, + precision, + ) + else: + return _transform_geom( + src_crs, + dst_crs, + (decode_object(g) for g in geom), + antimeridian_cutting, + antimeridian_offset, + precision, + ) diff -Nru fiona-1.8.22/fiona/vfs.py fiona-1.9.5/fiona/vfs.py --- fiona-1.8.22/fiona/vfs.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/fiona/vfs.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,9 +1,8 @@ """Implementation of Apache VFS schemes and URLs.""" -import os import sys import re -from fiona.compat import urlparse +from urllib.parse import urlparse # Supported URI schemes and their mapping to GDAL's VSI suffix. @@ -19,16 +18,17 @@ 'gs': 'gs', } -CURLSCHEMES = set([k for k, v in SCHEMES.items() if v == 'curl']) +CURLSCHEMES = {k for k, v in SCHEMES.items() if v == 'curl'} # TODO: extend for other cloud plaforms. -REMOTESCHEMES = set([k for k, v in SCHEMES.items() if v in ('curl', 's3', 'gs')]) +REMOTESCHEMES = {k for k, v in SCHEMES.items() if v in ('curl', 's3', 'gs')} def valid_vsi(vsi): """Ensures all parts of our vsi path are valid schemes.""" return all(p in SCHEMES for p in vsi.split('+')) + def is_remote(scheme): if scheme is None: return False @@ -39,11 +39,11 @@ # If a VSI and archive file are specified, we convert the path to # an OGR VSI path (see cpl_vsi.h). if vsi: - prefix = '/'.join('vsi{0}'.format(SCHEMES[p]) for p in vsi.split('+')) + prefix = '/'.join(f'vsi{SCHEMES[p]}' for p in vsi.split('+')) if archive: - result = '/{0}/{1}{2}'.format(prefix, archive, path) + result = f'/{prefix}/{archive}{path}' else: - result = '/{0}/{1}'.format(prefix, path) + result = f'/{prefix}/{path}' else: result = path diff -Nru fiona-1.8.22/pep-508-install fiona-1.9.5/pep-508-install --- fiona-1.8.22/pep-508-install 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/pep-508-install 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -"""Prototype support for PEP 518: - -"Specifying Minimum Build System Requirements for Python Projects". - -A future version of pip will do this for us and we'll remove this script. - -This script installs Fiona in develop mode (``pip install -e .[test]``). -""" - -import subprocess - - -def main(): - - # Parse config file for build system requirements. - build_system_requirements = None - with open('pyproject.toml') as config: - for line in config: - if line.startswith('requires'): - build_system_requirements = line.split('=')[-1] - - # Install them if found. - if build_system_requirements: - reqs = eval(build_system_requirements) - subprocess.call(['pip', 'install'] + reqs) - - # Now install our package in editable mode. - subprocess.call(['pip', 'install', '-e', '.[test]'] + reqs) - - -if __name__ == '__main__': - main() diff -Nru fiona-1.8.22/pyproject.toml fiona-1.9.5/pyproject.toml --- fiona-1.8.22/pyproject.toml 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/pyproject.toml 2023-10-11 23:19:44.000000000 +0000 @@ -1,7 +1,88 @@ [build-system] requires = [ - "cython>=0.29.29", + "cython~=3.0.2", "oldest-supported-numpy", - "setuptools", + "setuptools>=67.8", "wheel", ] +build-backend = "setuptools.build_meta" + +[project] +name = "fiona" +dynamic = ["readme", "version"] +authors = [ + {name = "Sean Gillies"}, +] +maintainers = [ + {name = "Fiona contributors"}, +] +description = "Fiona reads and writes spatial data files" +keywords = ["gis", "vector", "feature", "data"] +license = {text = "BSD 3-Clause"} +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Topic :: Scientific/Engineering :: GIS", +] + +requires-python = ">=3.7" +dependencies = [ + "attrs>=19.2.0", + "certifi", + "click~=8.0", + "click-plugins>=1.0", + "cligj>=0.5", + 'importlib-metadata;python_version<"3.10"', + "six", +] + +[project.optional-dependencies] +all = ["Fiona[calc,s3,test]"] +calc = ["shapely"] +s3 = ["boto3>=1.3.1"] +test = [ + "Fiona[s3]", + "pytest>=7", + "pytest-cov", + "pytz", +] + +[project.scripts] +fio = "fiona.fio.main:main_group" + +[project.entry-points."fiona.fio_commands"] +bounds = "fiona.fio.bounds:bounds" +calc = "fiona.fio.calc:calc" +cat = "fiona.fio.cat:cat" +collect = "fiona.fio.collect:collect" +distrib = "fiona.fio.distrib:distrib" +dump = "fiona.fio.dump:dump" +env = "fiona.fio.env:env" +filter = "fiona.fio.filter:filter" +info = "fiona.fio.info:info" +insp = "fiona.fio.insp:insp" +load = "fiona.fio.load:load" +ls = "fiona.fio.ls:ls" +rm = "fiona.fio.rm:rm" + +[project.urls] +Documentation = "https://fiona.readthedocs.io/" +Repository = "https://github.com/Toblerity/Fiona" + +[tool.setuptools] +include-package-data = false + +[tool.setuptools.dynamic] +version = {attr = "fiona.__version__"} +readme = {file = ["README.rst", "CHANGES.txt", "CREDITS.txt"]} + +[tool.setuptools.packages.find] +include = ["fiona", "fiona.*"] diff -Nru fiona-1.8.22/pytest.ini fiona-1.9.5/pytest.ini --- fiona-1.8.22/pytest.ini 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/pytest.ini 2023-10-11 23:19:44.000000000 +0000 @@ -1,13 +1,17 @@ [pytest] filterwarnings = + error ignore:.*Sequential read of iterator was interrupted*:RuntimeWarning ignore:.*negative slices or start values other than zero may be slow*:RuntimeWarning ignore:.*negative step size may be slow*:RuntimeWarning ignore:.*is buggy and will be removed in Fiona 2.0.* - -markers = + ignore:.*unclosed =1.3.1 +coverage~=6.5 +cython~=0.29.29 +pytest~=7.2 +pytest-cov~=4.0 +pytz==2022.6 +setuptools +wheel diff -Nru fiona-1.8.22/requirements.txt fiona-1.9.5/requirements.txt --- fiona-1.8.22/requirements.txt 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/requirements.txt 2023-10-11 23:19:44.000000000 +0000 @@ -1,7 +1,7 @@ -attrs==21.2.0 -click-plugins==1.0.4 -cligj==0.5.0 -munch==2.3.2 -six==1.16.0 -enum34==1.1.6 ; python_version < '3.4' +attrs>=19.2.0 +click~=8.0 +click-plugins +cligj>=0.5.0 +importlib-metadata;python_version<"3.10" +munch>=2.3.2 certifi diff -Nru fiona-1.8.22/scripts/check_deprecated.py fiona-1.9.5/scripts/check_deprecated.py --- fiona-1.8.22/scripts/check_deprecated.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/scripts/check_deprecated.py 2023-10-11 23:19:44.000000000 +0000 @@ -43,15 +43,15 @@ if os.path.basename(path) in ignored_files: continue - with open(path, 'r') as f: + with open(path) as f: for i, line in enumerate(f): for deprecated_method in deprecated: - match = re.search('{}\s*\('.format(deprecated_method), line) + match = re.search(fr'{deprecated_method}\s*\(', line) if match: found_lines[path].append((i+1, line.strip(), deprecated_method)) for path in sorted(found_lines): print(path) for line_nr, line, method in found_lines[path]: - print("\t{}\t{}".format(line_nr, line)) + print(f"\t{line_nr}\t{line}") print("") diff -Nru fiona-1.8.22/scripts/check_urls.py fiona-1.9.5/scripts/check_urls.py --- fiona-1.8.22/scripts/check_urls.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/scripts/check_urls.py 2023-10-11 23:19:44.000000000 +0000 @@ -7,11 +7,11 @@ headers = {'User-Agent': 'Mozilla/5.0 (compatible; MSIE 6.0; Fiona CI check)'} for fpath in files: - print("Processing: {}".format(fpath)) + print(f"Processing: {fpath}") with open(fpath) as f: text = f.read() - urls = re.findall('(https?:\/\/[^\s`>\'"()]+)', text) + urls = re.findall('(https?:\\/\\/[^\\s`>\'"()]+)', text) for url in urls: http_code = None diff -Nru fiona-1.8.22/scripts/travis_filegdb_install.sh fiona-1.9.5/scripts/travis_filegdb_install.sh --- fiona-1.8.22/scripts/travis_filegdb_install.sh 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/scripts/travis_filegdb_install.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -#!/bin/bash - -# Install filegdb if not already installed -if [ ! -d "$FILEGDB" ]; then - mkdir -p $FILEGDB - cd $FILEGDB - wget -q https://github.com/Esri/file-geodatabase-api/raw/master/FileGDB_API_1.5.1/FileGDB_API_1_5_1-64gcc51.tar.gz - tar -xzf FileGDB_API_1_5_1-64gcc51.tar.gz --strip=1 FileGDB_API-64gcc51 - rm FileGDB_API_1_5_1-64gcc51.tar.gz - rm -rf samples - rm -rf doc -fi - -export LD_LIBRARY_PATH=$FILEGDB/lib:$LD_LIBRARY_PATH - -# change back to travis build dir -cd $TRAVIS_BUILD_DIR - diff -Nru fiona-1.8.22/scripts/travis_gdal_install.sh fiona-1.9.5/scripts/travis_gdal_install.sh --- fiona-1.8.22/scripts/travis_gdal_install.sh 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/scripts/travis_gdal_install.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,130 +0,0 @@ -#!/bin/bash -# -# originally contributed by @rbuffat to Toblerity/Fiona -set -e - -GDALOPTS=" --with-ogr \ - --with-geos \ - --with-expat \ - --without-libtool \ - --with-libz=internal \ - --with-libtiff=internal \ - --with-geotiff=internal \ - --without-gif \ - --without-pg \ - --without-grass \ - --without-libgrass \ - --without-cfitsio \ - --without-pcraster \ - --with-netcdf \ - --with-png=internal \ - --with-jpeg=internal \ - --without-gif \ - --without-ogdi \ - --without-fme \ - --without-hdf4 \ - --without-hdf5 \ - --without-jasper \ - --without-ecw \ - --without-kakadu \ - --without-mrsid \ - --without-jp2mrsid \ - --without-bsb \ - --without-grib \ - --without-mysql \ - --without-ingres \ - --without-xerces \ - --without-odbc \ - --with-curl \ - --with-sqlite3 \ - --without-idb \ - --without-sde \ - --without-ruby \ - --without-perl \ - --without-php \ - --without-python \ - --with-oci=no \ - --without-mrf \ - --with-webp=no" - -if [ -d "$FILEGDB" ]; then - GDALOPTS="$GDALOPTS --with-fgdb=$FILEGDB" -fi - -# Create build dir if not exists -if [ ! -d "$GDALBUILD" ]; then - mkdir $GDALBUILD; -fi - -if [ ! -d "$GDALINST" ]; then - mkdir $GDALINST; -fi - -ls -l $GDALINST - -if [ "$GDALVERSION" = "master" ]; then - PROJOPT="--with-proj=$GDALINST/gdal-$GDALVERSION" - cd $GDALBUILD - git clone --depth 1 https://github.com/OSGeo/gdal gdal-$GDALVERSION - cd gdal-$GDALVERSION/gdal - echo $PROJVERSION > newproj.txt - git rev-parse HEAD > newrev.txt - BUILD=no - # Only build if nothing cached or if the GDAL revision changed - if test ! -f $GDALINST/gdal-$GDALVERSION/rev.txt; then - BUILD=yes - elif ( ! diff newrev.txt $GDALINST/gdal-$GDALVERSION/rev.txt >/dev/null ) || ( ! diff newproj.txt $GDALINST/gdal-$GDALVERSION/newproj.txt >/dev/null ); then - BUILD=yes - fi - if test "$BUILD" = "yes"; then - mkdir -p $GDALINST/gdal-$GDALVERSION - cp newrev.txt $GDALINST/gdal-$GDALVERSION/rev.txt - cp newproj.txt $GDALINST/gdal-$GDALVERSION/newproj.txt - ./configure --prefix=$GDALINST/gdal-$GDALVERSION $GDALOPTS $PROJOPT - make - make install - fi - -else - - case "$GDALVERSION" in - 3*) - PROJOPT="--with-proj=$GDALINST/gdal-$GDALVERSION" - ;; - 2.4*) - PROJOPT="--with-proj=$GDALINST/gdal-$GDALVERSION" - ;; - 2.3*) - PROJOPT="--with-proj=$GDALINST/gdal-$GDALVERSION" - ;; - 2.2*) - PROJOPT="--with-static-proj4=$GDALINST/gdal-$GDALVERSION" - ;; - 2.1*) - PROJOPT="--with-static-proj4=$GDALINST/gdal-$GDALVERSION" - ;; - 2.0*) - PROJOPT="--with-static-proj4=$GDALINST/gdal-$GDALVERSION" - ;; - 1*) - PROJOPT="--with-static-proj4=$GDALINST/gdal-$GDALVERSION" - ;; - *) - PROJOPT="--with-proj=$GDALINST/gdal-$GDALVERSION" - ;; - esac - - if [ ! -d "$GDALINST/gdal-$GDALVERSION/share/gdal" ]; then - cd $GDALBUILD - gdalver=$(expr "$GDALVERSION" : '\([0-9]*.[0-9]*.[0-9]*\)') - wget -q http://download.osgeo.org/gdal/$gdalver/gdal-$GDALVERSION.tar.gz - tar -xzf gdal-$GDALVERSION.tar.gz - cd gdal-$gdalver - ./configure --prefix=$GDALINST/gdal-$GDALVERSION $GDALOPTS $PROJOPT - make - make install - fi -fi - -# change back to travis build dir -cd $TRAVIS_BUILD_DIR diff -Nru fiona-1.8.22/scripts/travis_proj_install.sh fiona-1.9.5/scripts/travis_proj_install.sh --- fiona-1.8.22/scripts/travis_proj_install.sh 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/scripts/travis_proj_install.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -#!/bin/sh -set -e - -# Create build dir if not exists -if [ ! -d "$PROJBUILD" ]; then - mkdir $PROJBUILD; -fi - -if [ ! -d "$PROJINST" ]; then - mkdir $PROJINST; -fi - -ls -l $PROJINST - -echo "PROJ VERSION: $PROJVERSION" - -if [ ! -d "$PROJINST/gdal-$GDALVERSION/share/proj" ]; then - cd $PROJBUILD - wget -q https://download.osgeo.org/proj/proj-$PROJVERSION.tar.gz - tar -xzf proj-$PROJVERSION.tar.gz - projver=$(expr "$PROJVERSION" : '\([0-9]*.[0-9]*.[0-9]*\)') - cd proj-$projver - ./configure --prefix=$PROJINST/gdal-$GDALVERSION - make -s - make install -fi - -# change back to travis build dir -cd $TRAVIS_BUILD_DIR diff -Nru fiona-1.8.22/setup.py fiona-1.9.5/setup.py --- fiona-1.8.22/setup.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/setup.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,14 +1,19 @@ -from distutils.command.sdist import sdist -from distutils import log -import itertools as it +# Fiona build script. + +import logging import os import shutil import subprocess import sys + from setuptools import setup from setuptools.extension import Extension +# Ensure minimum version of Python is running +if sys.version_info[0:2] < (3, 6): + raise RuntimeError('Fiona requires Python>=3.6') + # Use Cython if available. try: from Cython.Build import cythonize @@ -17,17 +22,7 @@ def check_output(cmd): - # since subprocess.check_output doesn't exist in 2.6 - # we wrap it here. - try: - out = subprocess.check_output(cmd) - return out.decode('utf') - except AttributeError: - # For some reasone check_output doesn't exist - # So fall back on Popen - p = subprocess.Popen(cmd, stdout=subprocess.PIPE) - out, err = p.communicate() - return out + return subprocess.check_output(cmd).decode('utf') def copy_data_tree(datadir, destdir): @@ -38,54 +33,6 @@ shutil.copytree(datadir, destdir) -# Parse the version from the fiona module. -with open('fiona/__init__.py', 'r') as f: - for line in f: - if line.find("__version__") >= 0: - version = line.split("=")[1].strip() - version = version.strip('"') - version = version.strip("'") - break - -# Fiona's auxiliary files are UTF-8 encoded and we'll specify this when -# reading with Python 3+ -open_kwds = {} -if sys.version_info > (3,): - open_kwds['encoding'] = 'utf-8' - -with open('VERSION.txt', 'w', **open_kwds) as f: - f.write(version) - -with open('README.rst', **open_kwds) as f: - readme = f.read() - -with open('CREDITS.txt', **open_kwds) as f: - credits = f.read() - -with open('CHANGES.txt', **open_kwds) as f: - changes = f.read() - -# Set a flag for builds where the source directory is a repo checkout. -source_is_repo = os.path.exists("MANIFEST.in") - - -# Extend distutil's sdist command to generate C extension sources from -# the _shim extension modules for GDAL 1.x and 2.x. -class sdist_multi_gdal(sdist): - def run(self): - sources = { - "_shim1": "_shim", - "_shim2": "_shim", - "_shim22": "_shim", - "_shim3": "_shim" - } - for src_a, src_b in sources.items(): - shutil.copy('fiona/{}.pyx'.format(src_a), 'fiona/{}.pyx'.format(src_b)) - _ = check_output(['cython', '-v', '-f', 'fiona/{}.pyx'.format(src_b), - '-o', 'fiona/{}.c'.format(src_a)]) - print(_) - sdist.run(self) - # Building Fiona requires options that can be obtained from GDAL's gdal-config # program or can be specified using setup arguments. The latter override the # former. @@ -105,6 +52,8 @@ gdal_output = [None for i in range(4)] gdalversion = None language = None +gdal_major_version = 0 +gdal_minor_version = 0 if 'clean' not in sys.argv: try: @@ -125,52 +74,53 @@ extra_link_args.append(item) gdalversion = gdal_output[3] if gdalversion: - log.info("GDAL API version obtained from gdal-config: %s", - gdalversion) + logging.info("GDAL API version obtained from gdal-config: %s", + gdalversion) except Exception as e: if os.name == "nt": - log.info("Building on Windows requires extra options to setup.py " - "to locate needed GDAL files.\nMore information is " - "available in the README.") + logging.info("Building on Windows requires extra options to" + " setup.py to locate needed GDAL files.\nMore" + " information is available in the README.") else: - log.warn("Failed to get options via gdal-config: %s", str(e)) + logging.warn("Failed to get options via gdal-config: %s", str(e)) # Get GDAL API version from environment variable. if 'GDAL_VERSION' in os.environ: gdalversion = os.environ['GDAL_VERSION'] - log.info("GDAL API version obtained from environment: %s", gdalversion) + logging.info("GDAL API version obtained from environment: %s", + gdalversion) # Get GDAL API version from the command line if specified there. if '--gdalversion' in sys.argv: index = sys.argv.index('--gdalversion') sys.argv.pop(index) gdalversion = sys.argv.pop(index) - log.info("GDAL API version obtained from command line option: %s", - gdalversion) + logging.info("GDAL API version obtained from command line option: %s", + gdalversion) if not gdalversion: - log.fatal("A GDAL API version must be specified. Provide a path " - "to gdal-config using a GDAL_CONFIG environment variable " - "or use a GDAL_VERSION environment variable.") + logging.fatal("A GDAL API version must be specified. Provide a path " + "to gdal-config using a GDAL_CONFIG environment " + "variable or use a GDAL_VERSION environment variable.") sys.exit(1) if os.environ.get('PACKAGE_DATA'): destdir = 'fiona/gdal_data' if gdal_output[2]: - log.info("Copying gdal data from %s" % gdal_output[2]) + logging.info("Copying gdal data from %s" % gdal_output[2]) copy_data_tree(gdal_output[2], destdir) else: # check to see if GDAL_DATA is defined gdal_data = os.environ.get('GDAL_DATA', None) if gdal_data: - log.info("Copying gdal data from %s" % gdal_data) + logging.info("Copying gdal data from %s" % gdal_data) copy_data_tree(gdal_data, destdir) - # Conditionally copy PROJ.4 data. - projdatadir = os.environ.get('PROJ_LIB', '/usr/local/share/proj') + # Conditionally copy PROJ DATA. + projdatadir = os.environ.get('PROJ_DATA', os.environ.get('PROJ_LIB', '/usr/local/share/proj')) if os.path.exists(projdatadir): - log.info("Copying proj data from %s" % projdatadir) + logging.info("Copying proj data from %s" % projdatadir) copy_data_tree(projdatadir, 'fiona/proj_data') if "--cython-language" in sys.argv: @@ -182,13 +132,27 @@ gdal_major_version = int(gdal_version_parts[0]) gdal_minor_version = int(gdal_version_parts[1]) - log.info("GDAL version major=%r minor=%r", gdal_major_version, gdal_minor_version) + if (gdal_major_version, gdal_minor_version) < (3, 1): + raise SystemExit( + "ERROR: GDAL >= 3.1 is required for fiona. " + "Please upgrade GDAL." + ) + + logging.info("GDAL version major=%r minor=%r", gdal_major_version, + gdal_minor_version) + +compile_time_env = { + "CTE_GDAL_MAJOR_VERSION": gdal_major_version, + "CTE_GDAL_MINOR_VERSION": gdal_minor_version, +} ext_options = dict( include_dirs=include_dirs, library_dirs=library_dirs, libraries=libraries, - extra_link_args=extra_link_args) + extra_link_args=extra_link_args, + cython_compile_time_env=compile_time_env, +) # Enable coverage for cython pyx files. if os.environ.get('CYTHON_COVERAGE'): @@ -215,154 +179,43 @@ # Define the extension modules. ext_modules = [] -if source_is_repo and "clean" not in sys.argv: +if "clean" not in sys.argv: # When building from a repo, Cython is required. - log.info("MANIFEST.in found, presume a repo, cythonizing...") + logging.info("MANIFEST.in found, presume a repo, cythonizing...") if not cythonize: - log.fatal("Cython.Build.cythonize not found. " - "Cython is required to build from a repo.") - sys.exit(1) - - if gdalversion.startswith("1"): - shutil.copy('fiona/_shim1.pyx', 'fiona/_shim.pyx') - shutil.copy('fiona/_shim1.pxd', 'fiona/_shim.pxd') - elif gdal_major_version == 2: - if gdal_minor_version >= 2: - log.info("Building Fiona for gdal 2.2+: {0}".format(gdalversion)) - shutil.copy('fiona/_shim22.pyx', 'fiona/_shim.pyx') - shutil.copy('fiona/_shim22.pxd', 'fiona/_shim.pxd') - else: - log.info("Building Fiona for gdal 2.0.x-2.1.x: {0}".format(gdalversion)) - shutil.copy('fiona/_shim2.pyx', 'fiona/_shim.pyx') - shutil.copy('fiona/_shim2.pxd', 'fiona/_shim.pxd') - elif gdal_major_version == 3: - shutil.copy('fiona/_shim3.pyx', 'fiona/_shim.pyx') - shutil.copy('fiona/_shim3.pxd', 'fiona/_shim.pxd') - - ext_modules = cythonize([ - Extension('fiona._geometry', ['fiona/_geometry.pyx'], **ext_options), - Extension('fiona.schema', ['fiona/schema.pyx'], **ext_options), - Extension('fiona._transform', ['fiona/_transform.pyx'], **ext_options_cpp), - Extension('fiona._crs', ['fiona/_crs.pyx'], **ext_options), - Extension('fiona._env', ['fiona/_env.pyx'], **ext_options), - Extension('fiona._err', ['fiona/_err.pyx'], **ext_options), - Extension('fiona._shim', ['fiona/_shim.pyx'], **ext_options), - Extension('fiona.ogrext', ['fiona/ogrext.pyx'], **ext_options) + raise SystemExit( + "Cython.Build.cythonize not found. " + "Cython is required to build fiona." + ) + + ext_modules = cythonize( + [ + Extension("fiona._geometry", ["fiona/_geometry.pyx"], **ext_options), + Extension("fiona.schema", ["fiona/schema.pyx"], **ext_options), + Extension("fiona._transform", ["fiona/_transform.pyx"], **ext_options_cpp), + Extension("fiona.crs", ["fiona/crs.pyx"], **ext_options), + Extension("fiona._env", ["fiona/_env.pyx"], **ext_options), + Extension("fiona._err", ["fiona/_err.pyx"], **ext_options), + Extension("fiona.ogrext", ["fiona/ogrext.pyx"], **ext_options), ], - compiler_directives={"language_level": "3"} + compiler_directives={"language_level": "3"}, + compile_time_env=compile_time_env, ) -# If there's no manifest template, as in an sdist, we just specify .c files. -elif "clean" not in sys.argv: - ext_modules = [ - Extension('fiona.schema', ['fiona/schema.c'], **ext_options), - Extension('fiona._transform', ['fiona/_transform.cpp'], **ext_options_cpp), - Extension('fiona._geometry', ['fiona/_geometry.c'], **ext_options), - Extension('fiona._crs', ['fiona/_crs.c'], **ext_options), - Extension('fiona._env', ['fiona/_env.c'], **ext_options), - Extension('fiona._err', ['fiona/_err.c'], **ext_options), - Extension('fiona.ogrext', ['fiona/ogrext.c'], **ext_options), - ] - - if gdal_major_version == 1: - log.info("Building Fiona for gdal 1.x: {0}".format(gdalversion)) - ext_modules.append( - Extension('fiona._shim', ['fiona/_shim1.c'], **ext_options)) - elif gdal_major_version == 2: - if gdal_minor_version >= 2: - log.info("Building Fiona for gdal 2.2+: {0}".format(gdalversion)) - ext_modules.append( - Extension('fiona._shim', ['fiona/_shim22.c'], **ext_options)) - else: - log.info("Building Fiona for gdal 2.0.x-2.1.x: {0}".format(gdalversion)) - ext_modules.append( - Extension('fiona._shim', ['fiona/_shim2.c'], **ext_options)) - elif gdal_major_version == 3: - log.info("Building Fiona for gdal >= 3.0.x: {0}".format(gdalversion)) - ext_modules.append( - Extension('fiona._shim', ['fiona/_shim3.c'], **ext_options)) - -requirements = [ - 'attrs>=17', - 'certifi', - 'click>=4.0', - 'cligj>=0.5', - 'click-plugins>=1.0', - 'six>=1.7', - 'munch', - "setuptools", - 'argparse; python_version < "2.7"', - 'ordereddict; python_version < "2.7"', - 'enum34; python_version < "3.4"' -] -# Python 3.10 workaround as enum34 not available -if sys.version_info >= (3, 10): - requirements.remove('enum34; python_version < "3.4"') - -extras_require = { - 'calc': ['shapely'], - 's3': ['boto3>=1.2.4'], - 'test': ['pytest>=3', 'pytest-cov', 'boto3>=1.2.4', 'mock; python_version < "3.4"'] -} - -extras_require['all'] = list(set(it.chain(*extras_require.values()))) - - -setup_args = dict( - cmdclass={'sdist': sdist_multi_gdal}, - metadata_version='1.2', - name='Fiona', - version=version, - requires_python='>=2.6', - requires_external='GDAL (>=1.8)', - description="Fiona reads and writes spatial data files", - license='BSD', - keywords='gis vector feature data', - author='Sean Gillies', - author_email='sean.gillies@gmail.com', - maintainer='Sean Gillies', - maintainer_email='sean.gillies@gmail.com', - url='http://github.com/Toblerity/Fiona', - long_description=readme + "\n" + changes + "\n" + credits, - package_dir={'': '.'}, - packages=['fiona', 'fiona.fio'], - entry_points=''' - [console_scripts] - fio=fiona.fio.main:main_group - - [fiona.fio_commands] - bounds=fiona.fio.bounds:bounds - calc=fiona.fio.calc:calc - cat=fiona.fio.cat:cat - collect=fiona.fio.collect:collect - distrib=fiona.fio.distrib:distrib - dump=fiona.fio.dump:dump - env=fiona.fio.env:env - filter=fiona.fio.filter:filter - info=fiona.fio.info:info - insp=fiona.fio.insp:insp - load=fiona.fio.load:load - ls=fiona.fio.ls:ls - rm=fiona.fio.rm:rm - ''', - install_requires=requirements, - extras_require=extras_require, - ext_modules=ext_modules, - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: BSD License', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Topic :: Scientific/Engineering :: GIS']) +# Include these files for binary wheels +fiona_package_data = ['gdal.pxi', '*.pxd'] if os.environ.get('PACKAGE_DATA'): - setup_args['package_data'] = {'fiona': ['gdal_data/*', 'proj_data/*', '.libs/*', '.libs/licenses/*']} - -setup(**setup_args) + fiona_package_data.extend([ + 'gdal_data/*', + 'proj_data/*', + '.libs/*', + '.libs/licenses/*', + ]) + +# See pyproject.toml for project metadata +setup( + name="Fiona", # need by GitHub dependency graph + package_data={"fiona": fiona_package_data}, + ext_modules=ext_modules, +) diff -Nru fiona-1.8.22/tests/__init__.py fiona-1.9.5/tests/__init__.py --- fiona-1.8.22/tests/__init__.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/__init__.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,10 +1,2 @@ -"""Do not delete! At least one of the ``unittest.TestCase()`` based tests do -a relative import inside the ``tests`` directory to use another test as a -base class. This file can probably be deleted if that condition is removed. +"""Fiona's test package. Do not delete!""" -For example: - - $ git grep 'from \.' | grep test - tests/test_layer.py:from .test_collection import TestReading - tests/test_vfs.py:from .test_collection import TestReading -""" diff -Nru fiona-1.8.22/tests/conftest.py fiona-1.9.5/tests/conftest.py --- fiona-1.8.22/tests/conftest.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/conftest.py 2023-10-11 23:19:44.000000000 +0000 @@ -7,57 +7,68 @@ import tarfile import zipfile from collections import OrderedDict +import warnings + from click.testing import CliRunner import pytest import fiona -from fiona.crs import from_epsg +from fiona.crs import CRS from fiona.env import GDALVersion +from fiona.meta import extensions +from fiona.model import Feature, ObjectEncoder, to_dict + + +def pytest_collection_modifyitems(config, items): -driver_extensions = {'DXF': 'dxf', - 'CSV': 'csv', - 'ESRI Shapefile': 'shp', - 'FileGDB': 'gdb', - 'GML': 'gml', - 'GPX': 'gpx', - 'GPSTrackMaker': 'gtm', - 'MapInfo File': 'tab', - 'DGN': 'dgn', - 'GPKG': 'gpkg', - 'GeoJSON': 'json', - 'GeoJSONSeq': 'geojsons', - 'GMT': 'gmt', - 'OGR_GMT': 'gmt', - 'BNA': 'bna', - 'FlatGeobuf': 'fgb'} + # Fiona contains some tests that depend only on GDALs behavior. + # E.g. some test the driver specific access modes maintained in + # fiona/drvsupport.py for different GDAL versions. + # These tests can fail on exotic architectures (e.g. not returning + # the exact same value) + # We explicitly enable these tests on Fiona CI using pytest -m gdal + # and hide these tests otherwise. + markers_options = config.getoption("-m", "") + if "gdal" not in markers_options: + skip_gdal = pytest.mark.skip(reason="use '-m gdal' to run GDAL related tests.") + for item in items: + gdal_marker = item.get_closest_marker("gdal") + if gdal_marker is not None and gdal_marker.name == "gdal": + item.add_marker(skip_gdal) def pytest_report_header(config): headers = [] # gdal version number gdal_release_name = fiona.get_gdal_release_name() - headers.append('GDAL: {} ({})'.format(gdal_release_name, fiona.get_gdal_version_num())) - supported_drivers = ", ".join(sorted(list(fiona.drvsupport.supported_drivers.keys()))) + headers.append( + f"GDAL: {gdal_release_name} ({fiona.get_gdal_version_num()})" + ) + supported_drivers = ", ".join( + sorted(list(fiona.drvsupport.supported_drivers.keys())) + ) # supported drivers - headers.append("Supported drivers: {}".format(supported_drivers)) - return '\n'.join(headers) + headers.append(f"Supported drivers: {supported_drivers}") + return "\n".join(headers) def get_temp_filename(driver): - + """Create a temporary file name with driver extension if required.""" basename = "foo" - extension = driver_extensions.get(driver, "bar") - prefix = "" - if driver == 'GeoJSONSeq': - prefix = "GeoJSONSeq:" - - return "{prefix}{basename}.{extension}".format(prefix=prefix, - basename=basename, - extension=extension) + exts = extensions(driver) + if exts is None or len(exts) == 0: + ext = "" + else: + ext = f".{exts[0]}" + return f"{basename}{ext}" _COUTWILDRNP_FILES = [ - 'coutwildrnp.shp', 'coutwildrnp.shx', 'coutwildrnp.dbf', 'coutwildrnp.prj'] + "coutwildrnp.shp", + "coutwildrnp.shx", + "coutwildrnp.dbf", + "coutwildrnp.prj", +] def _read_file(name): @@ -70,7 +81,7 @@ requires_gpkg = pytest.mark.skipif(not has_gpkg, reason=has_gpkg_reason) -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def gdalenv(request): import fiona.env @@ -78,16 +89,17 @@ if fiona.env.local._env: fiona.env.delenv() fiona.env.local._env = None + request.addfinalizer(fin) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def data_dir(): """Absolute file path to the directory containing test datasets.""" - return os.path.abspath(os.path.join(os.path.dirname(__file__), 'data')) + return os.path.abspath(os.path.join(os.path.dirname(__file__), "data")) -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def data(tmpdir, data_dir): """A temporary directory containing a copy of the files in data.""" for filename in _COUTWILDRNP_FILES: @@ -95,98 +107,113 @@ return tmpdir -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def path_curves_line_csv(data_dir): """Path to ```curves_line.csv``""" - return os.path.join(data_dir, 'curves_line.csv') + return os.path.join(data_dir, "curves_line.csv") -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def path_test_tin_shp(data_dir): """Path to ```test_tin.shp``""" - return os.path.join(data_dir, 'test_tin.shp') + return os.path.join(data_dir, "test_tin.shp") -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def path_test_tin_csv(data_dir): """Path to ```test_tin.csv``""" - return os.path.join(data_dir, 'test_tin.csv') + return os.path.join(data_dir, "test_tin.csv") + -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def path_coutwildrnp_shp(data_dir): """Path to ```coutwildrnp.shp``""" - return os.path.join(data_dir, 'coutwildrnp.shp') + return os.path.join(data_dir, "coutwildrnp.shp") -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def path_coutwildrnp_zip(data_dir): """Creates ``coutwildrnp.zip`` if it does not exist and returns the absolute file path.""" - path = os.path.join(data_dir, 'coutwildrnp.zip') + path = os.path.join(data_dir, "coutwildrnp.zip") if not os.path.exists(path): - with zipfile.ZipFile(path, 'w') as zip: + with zipfile.ZipFile(path, "w") as zip: for filename in _COUTWILDRNP_FILES: zip.write(os.path.join(data_dir, filename), filename) return path -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def path_grenada_geojson(data_dir): """Path to ```grenada.geojson```""" - return os.path.join(data_dir, 'grenada.geojson') + return os.path.join(data_dir, "grenada.geojson") -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def bytes_coutwildrnp_zip(path_coutwildrnp_zip): """The zip file's bytes""" - with open(path_coutwildrnp_zip, 'rb') as src: + with open(path_coutwildrnp_zip, "rb") as src: return src.read() -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def path_coutwildrnp_tar(data_dir): """Creates ``coutwildrnp.tar`` if it does not exist and returns the absolute file path.""" - path = os.path.join(data_dir, 'coutwildrnp.tar') + path = os.path.join(data_dir, "coutwildrnp.tar") if not os.path.exists(path): - with tarfile.open(path, 'w') as tar: + with tarfile.open(path, "w") as tar: for filename in _COUTWILDRNP_FILES: tar.add( os.path.join(data_dir, filename), - arcname=os.path.join('testing', filename)) + arcname=os.path.join("testing", filename), + ) return path -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def path_coutwildrnp_json(data_dir): """Creates ``coutwildrnp.json`` if it does not exist and returns the absolute file path.""" - path = os.path.join(data_dir, 'coutwildrnp.json') + path = os.path.join(data_dir, "coutwildrnp.json") if not os.path.exists(path): name = _COUTWILDRNP_FILES[0] - with fiona.open(os.path.join(data_dir, name), 'r') as source: + with fiona.open(os.path.join(data_dir, name), "r") as source: features = [feat for feat in source] - my_layer = { - 'type': 'FeatureCollection', - 'features': features} - with open(path, 'w') as f: - f.write(json.dumps(my_layer)) + my_layer = {"type": "FeatureCollection", "features": features} + with open(path, "w") as f: + f.write(json.dumps(my_layer, cls=ObjectEncoder)) return path -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def bytes_grenada_geojson(path_grenada_geojson): """The geojson as bytes.""" - with open(path_grenada_geojson, 'rb') as src: + with open(path_grenada_geojson, "rb") as src: return src.read() -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") +def path_testopenfilegdb_zip(data_dir): + """Creates .gdb.zip file.""" + return os.path.join(data_dir, "testopenfilegdb.gdb.zip") + + + +@pytest.fixture(scope="session") +def bytes_testopenfilegdb_zip(path_testopenfilegdb_zip): + """.gdb.zip bytes.""" + with open(path_testopenfilegdb_zip, "rb") as f: + return f.read() + + +@pytest.fixture(scope="session") def path_coutwildrnp_gpkg(data_dir): """Creates ``coutwildrnp.gpkg`` if it does not exist and returns the absolute file path.""" if not has_gpkg: raise RuntimeError("GDAL has not been compiled with GPKG support") - path = os.path.join(data_dir, 'coutwildrnp.gpkg') + path = os.path.join(data_dir, "coutwildrnp.gpkg") if not os.path.exists(path): filename_shp = _COUTWILDRNP_FILES[0] path_shp = os.path.join(data_dir, filename_shp) @@ -198,70 +225,70 @@ return path -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def path_gpx(data_dir): - return os.path.join(data_dir, 'test_gpx.gpx') + return os.path.join(data_dir, "test_gpx.gpx") -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def feature_collection(): """GeoJSON feature collection on a single line.""" - return _read_file(os.path.join('data', 'collection.txt')) + return _read_file(os.path.join("data", "collection.txt")) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def feature_collection_pp(): """Same as above but with pretty-print styling applied.""" - return _read_file(os.path.join('data', 'collection-pp.txt')) + return _read_file(os.path.join("data", "collection-pp.txt")) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def feature_seq(): """One feature per line.""" - return _read_file(os.path.join('data', 'sequence.txt')) + return _read_file(os.path.join("data", "sequence.txt")) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def feature_seq_pp_rs(): """Same as above but each feature has pretty-print styling""" - return _read_file(os.path.join('data', 'sequence-pp.txt')) + return _read_file(os.path.join("data", "sequence-pp.txt")) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def runner(): """Returns a ```click.testing.CliRunner()`` instance.""" return CliRunner() -@pytest.fixture(scope='class') +@pytest.fixture(scope="class") def uttc_path_coutwildrnp_zip(path_coutwildrnp_zip, request): """Make the ``path_coutwildrnp_zip`` fixture work with a ``unittest.TestCase()``. ``uttc`` stands for unittest test case.""" request.cls.path_coutwildrnp_zip = path_coutwildrnp_zip -@pytest.fixture(scope='class') +@pytest.fixture(scope="class") def uttc_path_coutwildrnp_tar(path_coutwildrnp_tar, request): """Make the ``path_coutwildrnp_tar`` fixture work with a ``unittest.TestCase()``. ``uttc`` stands for unittest test case.""" request.cls.path_coutwildrnp_tar = path_coutwildrnp_tar -@pytest.fixture(scope='class') +@pytest.fixture(scope="class") def uttc_path_coutwildrnp_json(path_coutwildrnp_json, request): """Make the ``path_coutwildrnp_json`` fixture work with a ``unittest.TestCase()``. ``uttc`` stands for unittest test case.""" request.cls.path_coutwildrnp_json = path_coutwildrnp_json -@pytest.fixture(scope='class') +@pytest.fixture(scope="class") def uttc_data_dir(data_dir, request): """Make the ``data_dir`` fixture work with a ``unittest.TestCase()``. ``uttc`` stands for unittest test case.""" request.cls.data_dir = data_dir -@pytest.fixture(scope='class') +@pytest.fixture(scope="class") def uttc_path_gpx(path_gpx, request): """Make the ``path_gpx`` fixture work with a ``unittest.TestCase()``. ``uttc`` stands for unittest test case.""" @@ -270,42 +297,59 @@ # GDAL 2.3.x silently converts ESRI WKT to OGC WKT # The regular expression below will match against either -WGS84PATTERN = 'GEOGCS\["(?:GCS_WGS_1984|WGS 84)",DATUM\["WGS_1984",SPHEROID\["WGS[_ ]84"' +WGS84PATTERN = ( + r'GEOGCS\["(?:GCS_WGS_1984|WGS 84)",DATUM\["WGS_1984",SPHEROID\["WGS[_ ]84"' +) # Define helpers to skip tests based on GDAL version gdal_version = GDALVersion.runtime() requires_only_gdal1 = pytest.mark.skipif( - gdal_version.major != 1, - reason="Only relevant for GDAL 1.x") + gdal_version.major != 1, reason="Only relevant for GDAL 1.x" +) requires_gdal2 = pytest.mark.skipif( - not gdal_version.major >= 2, - reason="Requires GDAL 2.x") + not gdal_version.major >= 2, reason="Requires at least GDAL 2.x" +) requires_gdal21 = pytest.mark.skipif( - not gdal_version.at_least('2.1'), - reason="Requires GDAL 2.1.x") + not gdal_version.at_least("2.1"), reason="Requires at least GDAL 2.1.x" +) requires_gdal22 = pytest.mark.skipif( - not gdal_version.at_least('2.2'), - reason="Requires GDAL 2.2.x") + not gdal_version.at_least("2.2"), reason="Requires at least GDAL 2.2.x" +) + +requires_gdal23 = pytest.mark.skipif( + not gdal_version.at_least("2.3"), reason="Requires at least GDAL 2.3.x" +) requires_gdal24 = pytest.mark.skipif( - not gdal_version.at_least('2.4'), - reason="Requires GDAL 2.4.x") + not gdal_version.at_least("2.4"), reason="Requires at least GDAL 2.4.x" +) requires_gdal_lt_3 = pytest.mark.skipif( - not gdal_version.major < 3, - reason="Requires GDAL < 3") + not gdal_version.major < 3, reason="Requires at least GDAL < 3" +) requires_gdal3 = pytest.mark.skipif( - not gdal_version.major >= 3, - reason="Requires GDAL 3.x") + not gdal_version.major >= 3, reason="Requires at least GDAL 3.x" +) + +requires_gdal31 = pytest.mark.skipif( + not gdal_version.at_least("3.1"), reason="Requires at least GDAL 3.1.x" +) + +requires_gdal33 = pytest.mark.skipif( + not gdal_version.at_least("3.3"), reason="Requires at least GDAL 3.3.0" +) + +travis_only = pytest.mark.skipif( + not os.getenv("TRAVIS", "false") == "true", reason="Requires travis CI environment" +) travis_only = pytest.mark.skipif( - not os.getenv("TRAVIS", "false") == "true", - reason="Requires travis CI environment" + not os.getenv("TRAVIS", "false") == "true", reason="Requires travis CI environment" ) @@ -323,111 +367,209 @@ @pytest.fixture() def testdata_generator(): - """ Helper function to create test data sets for ideally all supported drivers - """ + """Helper function to create test data sets for ideally all supported drivers""" def get_schema(driver): - special_schemas = {'CSV': {'geometry': None, 'properties': OrderedDict([('position', 'int')])}, - 'BNA': {'geometry': 'Point', 'properties': {}}, - 'DXF': {'properties': OrderedDict( - [('Layer', 'str'), - ('SubClasses', 'str'), - ('Linetype', 'str'), - ('EntityHandle', 'str'), - ('Text', 'str')]), - 'geometry': 'Point'}, - 'GPX': {'geometry': 'Point', - 'properties': OrderedDict([('ele', 'float'), ('time', 'datetime')])}, - 'GPSTrackMaker': {'properties': OrderedDict([]), 'geometry': 'Point'}, - 'DGN': {'properties': OrderedDict([]), 'geometry': 'LineString'}, - 'MapInfo File': {'geometry': 'Point', 'properties': OrderedDict([('position', 'str')])} - } + special_schemas = { + "CSV": {"geometry": None, "properties": OrderedDict([("position", "int")])}, + "BNA": {"geometry": "Point", "properties": {}}, + "DXF": { + "properties": OrderedDict( + [ + ("Layer", "str"), + ("SubClasses", "str"), + ("Linetype", "str"), + ("EntityHandle", "str"), + ("Text", "str"), + ] + ), + "geometry": "Point", + }, + "GPX": { + "geometry": "Point", + "properties": OrderedDict([("ele", "float"), ("time", "datetime")]), + }, + "GPSTrackMaker": {"properties": OrderedDict([]), "geometry": "Point"}, + "DGN": {"properties": OrderedDict([]), "geometry": "LineString"}, + "MapInfo File": { + "geometry": "Point", + "properties": OrderedDict([("position", "str")]), + }, + } - return special_schemas.get(driver, {'geometry': 'Point', 'properties': OrderedDict([('position', 'int')])}) + return special_schemas.get( + driver, + {"geometry": "Point", "properties": OrderedDict([("position", "int")])}, + ) def get_crs(driver): - special_crs = {'MapInfo File': from_epsg(4326)} + special_crs = {"MapInfo File": CRS.from_epsg(4326)} return special_crs.get(driver, None) def get_records(driver, range): - special_records1 = {'CSV': [{'geometry': None, 'properties': {'position': i}} for i in range], - 'BNA': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {}} - for i - in range], - 'DXF': [ - {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, - 'properties': OrderedDict( - [('Layer', '0'), - ('SubClasses', 'AcDbEntity:AcDbPoint'), - ('Linetype', None), - ('EntityHandle', str(i + 20000)), - ('Text', None)])} for i in range], - 'GPX': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, - 'properties': {'ele': 0.0, 'time': '2020-03-24T16:08:40+00:00'}} for i - in range], - 'GPSTrackMaker': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, - 'properties': {}} for i in range], - 'DGN': [ - {'geometry': {'type': 'LineString', 'coordinates': [(float(i), 0.0), (0.0, 0.0)]}, - 'properties': {}} for i in range], - 'MapInfo File': [ - {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, - 'properties': {'position': str(i)}} for i in range], - 'PCIDSK': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i), 0.0)}, - 'properties': {'position': i}} for i in range] - } - return special_records1.get(driver, [ - {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {'position': i}} for i in - range]) + special_records1 = { + "CSV": [ + Feature.from_dict(**{"geometry": None, "properties": {"position": i}}) + for i in range + ], + "BNA": [ + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0.0, float(i))}, + "properties": {}, + } + ) + for i in range + ], + "DXF": [ + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0.0, float(i))}, + "properties": OrderedDict( + [ + ("Layer", "0"), + ("SubClasses", "AcDbEntity:AcDbPoint"), + ("Linetype", None), + ("EntityHandle", str(i + 20000)), + ("Text", None), + ] + ), + } + ) + for i in range + ], + "GPX": [ + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0.0, float(i))}, + "properties": {"ele": 0.0, "time": "2020-03-24T16:08:40+00:00"}, + } + ) + for i in range + ], + "GPSTrackMaker": [ + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0.0, float(i))}, + "properties": {}, + } + ) + for i in range + ], + "DGN": [ + Feature.from_dict( + **{ + "geometry": { + "type": "LineString", + "coordinates": [(float(i), 0.0), (0.0, 0.0)], + }, + "properties": {}, + } + ) + for i in range + ], + "MapInfo File": [ + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0.0, float(i))}, + "properties": {"position": str(i)}, + } + ) + for i in range + ], + "PCIDSK": [ + Feature.from_dict( + **{ + "geometry": { + "type": "Point", + "coordinates": (0.0, float(i), 0.0), + }, + "properties": {"position": i}, + } + ) + for i in range + ], + } + return special_records1.get( + driver, + [ + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0.0, float(i))}, + "properties": {"position": i}, + } + ) + for i in range + ], + ) def get_records2(driver, range): - special_records2 = {'DGN': [ - {'geometry': {'type': 'LineString', 'coordinates': [(float(i), 0.0), (0.0, 0.0)]}, - 'properties': OrderedDict( - [('Type', 4), - ('Level', 0), - ('GraphicGroup', 0), - ('ColorIndex', 0), - ('Weight', 0), - ('Style', 0), - ('EntityNum', None), - ('MSLink', None), - ('Text', None)])} for i in range], + special_records2 = { + "DGN": [ + Feature.from_dict( + **{ + "geometry": { + "type": "LineString", + "coordinates": [(float(i), 0.0), (0.0, 0.0)], + }, + "properties": OrderedDict( + [ + ("Type", 4), + ("Level", 0), + ("GraphicGroup", 0), + ("ColorIndex", 0), + ("Weight", 0), + ("Style", 0), + ("EntityNum", None), + ("MSLink", None), + ("Text", None), + ] + + ( + [("ULink", None)] + if gdal_version.at_least("3.3") + else [] + ) + ), + } + ) + for i in range + ], } return special_records2.get(driver, get_records(driver, range)) def get_create_kwargs(driver): - kwargs = { - 'FlatGeobuf': {'SPATIAL_INDEX': False} - } + kwargs = {"FlatGeobuf": {"SPATIAL_INDEX": False}} return kwargs.get(driver, {}) def test_equal(driver, val_in, val_out): - is_good = True - is_good = is_good and val_in['geometry'] == val_out['geometry'] - for key in val_in['properties']: - if key in val_out['properties']: - if driver == 'FileGDB' and isinstance(val_in['properties'][key], int): - is_good = is_good and str(val_in['properties'][key]) == str(int(val_out['properties'][key])) + assert val_in["geometry"] == to_dict(val_out["geometry"]) + for key in val_in["properties"]: + if key in val_out["properties"]: + if driver == "FileGDB" and isinstance(val_in["properties"][key], int): + assert str(val_in["properties"][key]) == str( + int(val_out["properties"][key]) + ) else: - is_good = is_good and str(val_in['properties'][key]) == str(val_out['properties'][key]) - else: - is_good = False - return is_good + assert str(val_in["properties"][key]) == str( + val_out["properties"][key] + ) def _testdata_generator(driver, range1, range2): - """ Generate test data and helper methods for a specific driver. Each set of generated set of records - contains the position specified with range. These positions are either encoded as field or in the geometry - of the record, depending of the driver characteristics. + """Generate test data and helper methods for a specific driver. + + Each set of generated set of records contains the position + specified with range. These positions are either encoded as + field or in the geometry of the record, depending of the driver + characteristics. Parameters ---------- - driver: str - Name of drive to generate tests for - range1: list of integer - Range of positions for first set of records - range2: list of integer - Range of positions for second set of records + driver : str + Name of drive to generate tests for + range1 : list of integer + Range of positions for first set of records + range2 : list of integer + Range of positions for second set of records Returns ------- @@ -440,16 +582,24 @@ records2 A set of records containing the positions of range2 test_equal - A function that returns True if the geometry is equal between the generated records and a record and if - the properties of the generated records can be found in a record + A function that returns True if the geometry is equal + between the generated records and a record and if the + properties of the generated records can be found in a + record. + """ - return get_schema(driver), get_crs(driver), get_records(driver, range1), get_records2(driver, range2),\ - test_equal, get_create_kwargs(driver) + return ( + get_schema(driver), + get_crs(driver), + get_records(driver, range1), + get_records2(driver, range2), + test_equal, + ) return _testdata_generator -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def path_test_tz_geojson(data_dir): """Path to ```test_tz.geojson``""" - return os.path.join(data_dir, 'test_tz.geojson') + return os.path.join(data_dir, "test_tz.geojson") Binary files /tmp/tmpygn6ugxl/tPadoVo7UG/fiona-1.8.22/tests/data/coutwildrnp.zip and /tmp/tmpygn6ugxl/uVtg9gF4CK/fiona-1.9.5/tests/data/coutwildrnp.zip differ diff -Nru fiona-1.8.22/tests/data/example.topojson fiona-1.9.5/tests/data/example.topojson --- fiona-1.8.22/tests/data/example.topojson 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/tests/data/example.topojson 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,39 @@ +{ + "type": "Topology", + "objects": { + "example": { + "type": "GeometryCollection", + "geometries": [ + { + "type": "Point", + "properties": { + "prop0": "value0" + }, + "coordinates": [102, 0.5] + }, + { + "type": "LineString", + "properties": { + "prop0": "value0", + "prop1": 0 + }, + "arcs": [0] + }, + { + "type": "Polygon", + "properties": { + "prop0": "value0", + "prop1": { + "this": "that" + } + }, + "arcs": [[-2]] + } + ] + } + }, + "arcs": [ + [[102, 0], [103, 1], [104, 0], [105, 1]], + [[100, 0], [101, 0], [101, 1], [100, 1], [100, 0]] + ] +} diff -Nru fiona-1.8.22/tests/data/test_tin.csv fiona-1.9.5/tests/data/test_tin.csv --- fiona-1.8.22/tests/data/test_tin.csv 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/data/test_tin.csv 2023-10-11 23:19:44.000000000 +0000 @@ -1,3 +1,4 @@ WKT,id "TIN (((0 0 0, 0 0 1, 0 1 0, 0 0 0)), ((0 0 0, 0 1 0, 1 1 0, 0 0 0)))",1 "TRIANGLE((0 0 0,0 1 0,1 1 0,0 0 0))",2 +"GEOMETRYCOLLECTION (TIN (((0 0 0, 0 0 1, 0 1 0, 0 0 0)), ((0 0 0, 0 1 0, 1 1 0, 0 0 0))), TRIANGLE((0 0 0,0 1 0,1 1 0,0 0 0)))",3 Binary files /tmp/tmpygn6ugxl/tPadoVo7UG/fiona-1.8.22/tests/data/testopenfilegdb.gdb.zip and /tmp/tmpygn6ugxl/uVtg9gF4CK/fiona-1.9.5/tests/data/testopenfilegdb.gdb.zip differ diff -Nru fiona-1.8.22/tests/test__env.py fiona-1.9.5/tests/test__env.py --- fiona-1.8.22/tests/test__env.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test__env.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,10 +1,8 @@ """Tests of _env util module""" +from unittest import mock + import pytest -try: - from unittest import mock -except ImportError: - import mock from fiona._env import GDALDataFinder, PROJDataFinder @@ -72,7 +70,7 @@ def test_search_debian_gdal_data(mock_debian): """Find GDAL data under Debian locations""" finder = GDALDataFinder() - assert finder.search_debian(str(mock_debian)) == str(mock_debian.join("share").join("gdal").join("{}.{}".format(gdal_version.major, gdal_version.minor))) + assert finder.search_debian(str(mock_debian)) == str(mock_debian.join("share").join("gdal").join(f"{gdal_version.major}.{gdal_version.minor}")) def test_search_gdal_data_wheel(mock_wheel): @@ -88,7 +86,7 @@ def test_search_gdal_data_debian(mock_debian): """Find GDAL data under Debian locations""" finder = GDALDataFinder() - assert finder.search(str(mock_debian)) == str(mock_debian.join("share").join("gdal").join("{}.{}".format(gdal_version.major, gdal_version.minor))) + assert finder.search(str(mock_debian)) == str(mock_debian.join("share").join("gdal").join(f"{gdal_version.major}.{gdal_version.minor}")) def test_search_wheel_proj_data_failure(tmpdir): diff -Nru fiona-1.8.22/tests/test_bigint.py fiona-1.9.5/tests/test_bigint.py --- fiona-1.8.22/tests/test_bigint.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_bigint.py 2023-10-11 23:19:44.000000000 +0000 @@ -19,51 +19,58 @@ import fiona from fiona.env import calc_gdal_version_num, get_gdal_version_num +from fiona.model import Feature -@pytest.mark.xfail(fiona.gdal_version.major < 2, - reason="64-bit integer fields require GDAL 2+") def testCreateBigIntSchema(tmpdir): - name = str(tmpdir.join('output1.shp')) + name = str(tmpdir.join("output1.shp")) a_bigint = 10 ** 18 - 1 - fieldname = 'abigint' + fieldname = "abigint" kwargs = { - 'driver': 'ESRI Shapefile', - 'crs': 'EPSG:4326', - 'schema': { - 'geometry': 'Point', - 'properties': [(fieldname, 'int:10')]}} + "driver": "ESRI Shapefile", + "crs": "EPSG:4326", + "schema": {"geometry": "Point", "properties": [(fieldname, "int:10")]}, + } - with fiona.open(name, 'w', **kwargs) as dst: + with fiona.open(name, "w", **kwargs) as dst: rec = {} - rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)} - rec['properties'] = {fieldname: a_bigint} - dst.write(rec) + rec["geometry"] = {"type": "Point", "coordinates": (0, 0)} + rec["properties"] = {fieldname: a_bigint} + dst.write(Feature.from_dict(**rec)) with fiona.open(name) as src: if fiona.gdal_version >= (2, 0, 0): first = next(iter(src)) - assert first['properties'][fieldname] == a_bigint + assert first["properties"][fieldname] == a_bigint -@pytest.mark.skipif(get_gdal_version_num() < calc_gdal_version_num(2, 0, 0), - reason="Test requires GDAL 2+") -@pytest.mark.parametrize('dtype', ['int', 'int64']) +@pytest.mark.parametrize("dtype", ["int", "int64"]) def test_issue691(tmpdir, dtype): """Type 'int' maps to 'int64'""" - schema = {'geometry': 'Any', 'properties': {'foo': dtype}} + schema = {"geometry": "Any", "properties": {"foo": dtype}} with fiona.open( - str(tmpdir.join('test.shp')), 'w', driver='Shapefile', - schema=schema, crs='epsg:4326') as dst: - dst.write({ - 'type': 'Feature', - 'geometry': {'type': 'Point', - 'coordinates': (-122.278015, 37.868995)}, - 'properties': {'foo': 3694063472}}) + str(tmpdir.join("test.shp")), + "w", + driver="Shapefile", + schema=schema, + crs="epsg:4326", + ) as dst: + dst.write( + Feature.from_dict( + **{ + "type": "Feature", + "geometry": { + "type": "Point", + "coordinates": (-122.278015, 37.868995), + }, + "properties": {"foo": 3694063472}, + } + ) + ) - with fiona.open(str(tmpdir.join('test.shp'))) as src: - assert src.schema['properties']['foo'] == 'int:18' + with fiona.open(str(tmpdir.join("test.shp"))) as src: + assert src.schema["properties"]["foo"] == "int:18" first = next(iter(src)) - assert first['properties']['foo'] == 3694063472 + assert first["properties"]["foo"] == 3694063472 diff -Nru fiona-1.8.22/tests/test_binary_field.py fiona-1.9.5/tests/test_binary_field.py --- fiona-1.8.22/tests/test_binary_field.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_binary_field.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,8 +1,12 @@ -import fiona +"""Binary BLOB field testing.""" import pytest import struct from collections import OrderedDict + +import fiona +from fiona.model import Feature + from .conftest import requires_gpkg @@ -12,11 +16,13 @@ "driver": "GPKG", "schema": { "geometry": "Point", - "properties": OrderedDict([ - ("name", "str"), - ("data", "bytes"), - ]) - } + "properties": OrderedDict( + [ + ("name", "str"), + ("data", "bytes"), + ] + ), + }, } # create some binary data @@ -25,18 +31,20 @@ # write the binary data to a BLOB field filename = str(tmpdir.join("binary_test.gpkg")) with fiona.open(filename, "w", **meta) as dst: - feature = { - "geometry": {"type": "Point", "coordinates": ((0, 0))}, - "properties": { - "name": "test", - u"data": input_data, + feature = Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": ((0, 0))}, + "properties": { + "name": "test", + "data": input_data, + }, } - } + ) dst.write(feature) # read the data back and check consistency with fiona.open(filename, "r") as src: feature = next(iter(src)) - assert feature["properties"]["name"] == "test" - output_data = feature["properties"]["data"] + assert feature.properties["name"] == "test" + output_data = feature.properties["data"] assert output_data == input_data diff -Nru fiona-1.8.22/tests/test_bounds.py fiona-1.9.5/tests/test_bounds.py --- fiona-1.8.22/tests/test_bounds.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_bounds.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,69 +1,87 @@ import pytest +from fiona._env import get_gdal_version_tuple + import fiona from fiona.drvsupport import supported_drivers, _driver_supports_mode from fiona.errors import DriverError -from .conftest import driver_extensions from fiona.env import GDALVersion +from tests.conftest import get_temp_filename def test_bounds_point(): - g = {'type': 'Point', 'coordinates': [10, 10]} + g = {"type": "Point", "coordinates": [10, 10]} assert fiona.bounds(g) == (10, 10, 10, 10) def test_bounds_line(): - g = {'type': 'LineString', 'coordinates': [[0, 0], [10, 10]]} + g = {"type": "LineString", "coordinates": [[0, 0], [10, 10]]} assert fiona.bounds(g) == (0, 0, 10, 10) def test_bounds_polygon(): - g = {'type': 'Polygon', 'coordinates': [[[0, 0], [10, 10], [10, 0]]]} + g = {"type": "Polygon", "coordinates": [[[0, 0], [10, 10], [10, 0]]]} assert fiona.bounds(g) == (0, 0, 10, 10) def test_bounds_z(): - g = {'type': 'Point', 'coordinates': [10, 10, 10]} + g = {"type": "Point", "coordinates": [10, 10, 10]} assert fiona.bounds(g) == (10, 10, 10, 10) -ignore_write_drivers = set(['CSV', 'GPX', 'GPSTrackMaker', 'DXF', 'DGN', 'MapInfo File']) -write_drivers = [driver for driver, raw in supported_drivers.items() if - _driver_supports_mode(driver, 'w') and driver not in ignore_write_drivers] - - -@pytest.mark.parametrize('driver', write_drivers) -def test_bounds(tmpdir, driver): - """Test if bounds are correctly calculated after writing - - """ - - if driver == 'BNA' and GDALVersion.runtime() < GDALVersion(2, 0): - # BNA driver segfaults with gdal 1.11 - return +# MapInfo File driver requires that the bounds (geographical extents) of a new file +# be set before writing the first feature (https://gdal.org/drivers/vector/mitab.html) - extension = driver_extensions.get(driver, "bar") - path = str(tmpdir.join('foo.{}'.format(extension))) - with fiona.open(path, 'w', - driver=driver, - schema={'geometry': 'Point', - 'properties': [('title', 'str')]}, - fiona_force_driver=True) as c: +@pytest.mark.parametrize( + "driver", + [ + driver + for driver in supported_drivers + if _driver_supports_mode(driver, "w") and not driver == "MapInfo File" + ], +) +def test_bounds(tmpdir, driver, testdata_generator): + """Test if bounds are correctly calculated after writing.""" + if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): + pytest.skip("BNA driver segfaults with gdal 1.11") + if driver == "ESRI Shapefile" and get_gdal_version_tuple() < (3, 1): + pytest.skip( + "Bug in GDALs Shapefile driver: https://github.com/OSGeo/gdal/issues/2269" + ) + + range1 = list(range(0, 5)) + range2 = list(range(5, 10)) + schema, crs, records1, records2, test_equal = testdata_generator( + driver, range1, range2 + ) + + if not schema["geometry"] == "Point": + pytest.skip("Driver does not support point geometries") + + filename = get_temp_filename(driver) + path = str(tmpdir.join(filename)) + + def calc_bounds(records): + xs = [] + ys = [] + for r in records: + xs.append(r.geometry["coordinates"][0]) + ys.append(r.geometry["coordinates"][1]) + return min(xs), max(xs), min(ys), max(ys) - c.writerecords([{'geometry': {'type': 'Point', 'coordinates': (1.0, 10.0)}, - 'properties': {'title': 'One'}}]) + with fiona.open(path, "w", driver=driver, schema=schema) as c: + c.writerecords(records1) try: bounds = c.bounds - assert bounds == (1.0, 10.0, 1.0, 10.0) + assert bounds == calc_bounds(records1) except Exception as e: assert isinstance(e, DriverError) - c.writerecords([{'geometry': {'type': 'Point', 'coordinates': (2.0, 20.0)}, - 'properties': {'title': 'Two'}}]) + c.writerecords(records2) try: bounds = c.bounds - assert bounds == (1.0, 10.0, 2.0, 20.0) + assert bounds == calc_bounds(records1 + records2) except Exception as e: assert isinstance(e, DriverError) diff -Nru fiona-1.8.22/tests/test_bytescollection.py fiona-1.9.5/tests/test_bytescollection.py --- fiona-1.8.22/tests/test_bytescollection.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_bytescollection.py 2023-10-11 23:19:44.000000000 +0000 @@ -2,21 +2,20 @@ import pytest -import six import fiona +from fiona.model import Geometry -class TestReading(object): +class TestReading: @pytest.fixture(autouse=True) def bytes_collection_object(self, path_coutwildrnp_json): with open(path_coutwildrnp_json) as src: - bytesbuf = src.read().encode('utf-8') + bytesbuf = src.read().encode("utf-8") self.c = fiona.BytesCollection(bytesbuf, encoding="utf-8") yield self.c.close() - @pytest.mark.skipif(six.PY2, reason='string are bytes in Python 2') def test_construct_with_str(self, path_coutwildrnp_json): with open(path_coutwildrnp_json) as src: strbuf = src.read() @@ -49,10 +48,10 @@ assert len(self.c.name) > 0 def test_mode(self): - assert self.c.mode == 'r' + assert self.c.mode == "r" def test_collection(self): - assert self.c.encoding == 'utf-8' + assert self.c.encoding == "utf-8" def test_iter(self): assert iter(self.c) @@ -89,12 +88,12 @@ assert self.c.driver == "GeoJSON" def test_schema(self): - s = self.c.schema['properties'] - assert s['PERIMETER'] == "float" - assert s['NAME'] == "str" - assert s['URL'] == "str" - assert s['STATE_FIPS'] == "str" - assert s['WILDRNP020'] == "int" + s = self.c.schema["properties"] + assert s["PERIMETER"] == "float" + assert s["NAME"] == "str" + assert s["URL"] == "str" + assert s["STATE_FIPS"] == "str" + assert s["WILDRNP020"] == "int" def test_closed_schema(self): # Schema is lazy too, never computed in this case. TODO? @@ -104,10 +103,10 @@ def test_schema_closed_schema(self): self.c.schema self.c.close() - assert sorted(self.c.schema.keys()) == ['geometry', 'properties'] + assert sorted(self.c.schema.keys()) == ["geometry", "properties"] def test_crs(self): - assert self.c.crs['init'] == 'epsg:4326' + assert self.c.crs["init"] == "epsg:4326" def test_crs_wkt(self): assert self.c.crs_wkt.startswith('GEOGCS["WGS 84"') @@ -120,11 +119,10 @@ def test_crs_closed_crs(self): self.c.crs self.c.close() - assert sorted(self.c.crs.keys()) == ['init'] + assert sorted(self.c.crs.keys()) == ["init"] def test_meta(self): - assert (sorted(self.c.meta.keys()) == - ['crs', 'crs_wkt', 'driver', 'schema']) + assert sorted(self.c.meta.keys()) == ["crs", "crs_wkt", "driver", "schema"] def test_bounds(self): assert self.c.bounds[0] == pytest.approx(-113.564247) @@ -135,34 +133,34 @@ def test_iter_one(self): itr = iter(self.c) f = next(itr) - assert f['id'] == "0" - assert f['properties']['STATE'] == 'UT' + assert f["id"] == "0" + assert f["properties"]["STATE"] == "UT" def test_iter_list(self): f = list(self.c)[0] - assert f['id'] == "0" - assert f['properties']['STATE'] == 'UT' + assert f["id"] == "0" + assert f["properties"]["STATE"] == "UT" def test_re_iter_list(self): f = list(self.c)[0] # Run through iterator f = list(self.c)[0] # Run through a new, reset iterator - assert f['id'] == "0" - assert f['properties']['STATE'] == 'UT' + assert f["id"] == "0" + assert f["properties"]["STATE"] == "UT" def test_getitem_one(self): f = self.c[0] - assert f['id'] == "0" - assert f['properties']['STATE'] == 'UT' + assert f["id"] == "0" + assert f["properties"]["STATE"] == "UT" def test_no_write(self): - with pytest.raises(IOError): + with pytest.raises(OSError): self.c.write({}) def test_iter_items_list(self): i, f = list(self.c.items())[0] assert i == 0 - assert f['id'] == "0" - assert f['properties']['STATE'] == 'UT' + assert f["id"] == "0" + assert f["properties"]["STATE"] == "UT" def test_iter_keys_list(self): i = list(self.c.keys())[0] @@ -173,11 +171,11 @@ assert 0 in self.c -class TestFilterReading(object): +class TestFilterReading: @pytest.fixture(autouse=True) def bytes_collection_object(self, path_coutwildrnp_json): with open(path_coutwildrnp_json) as src: - bytesbuf = src.read().encode('utf-8') + bytesbuf = src.read().encode("utf-8") self.c = fiona.BytesCollection(bytesbuf) yield self.c.close() @@ -186,8 +184,8 @@ results = list(self.c.filter(bbox=(-120.0, 30.0, -100.0, 50.0))) assert len(results) == 67 f = results[0] - assert f['id'] == "0" - assert f['properties']['STATE'] == 'UT' + assert f["id"] == "0" + assert f["properties"]["STATE"] == "UT" def test_filter_reset(self): results = list(self.c.filter(bbox=(-112.0, 38.0, -106.0, 40.0))) @@ -196,10 +194,14 @@ assert len(results) == 67 def test_filter_mask(self): - mask = { - 'type': 'Polygon', - 'coordinates': ( - ((-112, 38), (-112, 40), (-106, 40), (-106, 38), (-112, 38)),)} + mask = Geometry.from_dict( + **{ + "type": "Polygon", + "coordinates": ( + ((-112, 38), (-112, 40), (-106, 40), (-106, 38), (-112, 38)), + ), + } + ) results = list(self.c.filter(mask=mask)) assert len(results) == 26 @@ -207,12 +209,15 @@ def test_zipped_bytes_collection(bytes_coutwildrnp_zip): """Open a zipped stream of bytes as a collection""" with fiona.BytesCollection(bytes_coutwildrnp_zip) as col: - assert col.name == 'coutwildrnp' + assert col.name == "coutwildrnp" assert len(col) == 67 -@pytest.mark.skipif(fiona.gdal_version >= (2, 3, 0), + +@pytest.mark.skipif( + fiona.gdal_version >= (2, 3, 0), reason="Changed behavior with gdal 2.3, possibly related to RFC 70:" - "Guessing output format from output file name extension for utilities") + "Guessing output format from output file name extension for utilities", +) def test_grenada_bytes_geojson(bytes_grenada_geojson): """Read grenada.geojson as BytesCollection. @@ -226,5 +231,5 @@ pass # If told what driver to use, we should be good. - with fiona.BytesCollection(bytes_grenada_geojson, driver='GeoJSON') as col: + with fiona.BytesCollection(bytes_grenada_geojson, driver="GeoJSON") as col: assert len(col) == 1 diff -Nru fiona-1.8.22/tests/test_collection.py fiona-1.9.5/tests/test_collection.py --- fiona-1.8.22/tests/test_collection.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_collection.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,24 +1,30 @@ # Testing collections and workspaces +from collections import OrderedDict import datetime -import os +import logging import random -import sys import re -from collections import OrderedDict +import sys import pytest import fiona from fiona.collection import Collection -from fiona.env import getenv, GDALVersion -from fiona.errors import FionaValueError, DriverError, FionaDeprecationWarning -from .conftest import WGS84PATTERN, get_temp_filename from fiona.drvsupport import supported_drivers, driver_mode_mingdal +from fiona.env import getenv, GDALVersion +from fiona.errors import ( + AttributeFilterError, + FionaValueError, + DriverError, + FionaDeprecationWarning, +) +from fiona.model import Feature, Geometry +from .conftest import WGS84PATTERN, get_temp_filename -class TestSupportedDrivers(object): +class TestSupportedDrivers: def test_shapefile(self): assert "ESRI Shapefile" in supported_drivers assert set(supported_drivers["ESRI Shapefile"]) == set("raw") @@ -28,8 +34,7 @@ assert set(supported_drivers["MapInfo File"]) == set("raw") -class TestCollectionArgs(object): - +class TestCollectionArgs: def test_path(self): with pytest.raises(TypeError): Collection(0) @@ -40,54 +45,52 @@ def test_driver(self): with pytest.raises(TypeError): - Collection("foo", mode='w', driver=1) + Collection("foo", mode="w", driver=1) def test_schema(self): with pytest.raises(TypeError): - Collection("foo", mode='w', driver="ESRI Shapefile", schema=1) + Collection("foo", mode="w", driver="ESRI Shapefile", schema=1) def test_crs(self): with pytest.raises(TypeError): - Collection("foo", mode='w', driver="ESRI Shapefile", schema=0, - crs=1) + Collection("foo", mode="w", driver="ESRI Shapefile", schema=0, crs=1) def test_encoding(self): with pytest.raises(TypeError): - Collection("foo", mode='r', encoding=1) + Collection("foo", mode="r", encoding=1) def test_layer(self): with pytest.raises(TypeError): - Collection("foo", mode='r', layer=0.5) + Collection("foo", mode="r", layer=0.5) def test_vsi(self): with pytest.raises(TypeError): - Collection("foo", mode='r', vsi='git') + Collection("foo", mode="r", vsi="git") def test_archive(self): with pytest.raises(TypeError): - Collection("foo", mode='r', archive=1) + Collection("foo", mode="r", archive=1) def test_write_numeric_layer(self): with pytest.raises(ValueError): - Collection("foo", mode='w', layer=1) + Collection("foo", mode="w", layer=1) def test_write_geojson_layer(self): with pytest.raises(ValueError): - Collection("foo", mode='w', driver='GeoJSON', layer='foo') + Collection("foo", mode="w", driver="GeoJSON", layer="foo") def test_append_geojson(self): with pytest.raises(ValueError): - Collection("foo", mode='w', driver='ARCGEN') + Collection("foo", mode="w", driver="ARCGEN") -class TestOpenException(object): - +class TestOpenException: def test_no_archive(self): with pytest.warns(FionaDeprecationWarning), pytest.raises(DriverError): - fiona.open("/", mode='r', vfs="zip:///foo.zip") + fiona.open("/", mode="r", vfs="zip:///foo.zip") -class TestReading(object): +class TestReading: @pytest.fixture(autouse=True) def shapefile(self, path_coutwildrnp_shp): self.c = fiona.open(path_coutwildrnp_shp, "r") @@ -95,28 +98,26 @@ self.c.close() def test_open_repr(self, path_coutwildrnp_shp): - assert ( - repr(self.c) == - ("".format(hexid=hex(id(self.c)), - path=path_coutwildrnp_shp))) + assert repr(self.c) == ( + "".format(hexid=hex(id(self.c)), path=path_coutwildrnp_shp) + ) def test_closed_repr(self, path_coutwildrnp_shp): self.c.close() - assert ( - repr(self.c) == - ("".format(hexid=hex(id(self.c)), - path=path_coutwildrnp_shp))) + assert repr(self.c) == ( + "".format(hexid=hex(id(self.c)), path=path_coutwildrnp_shp) + ) def test_path(self, path_coutwildrnp_shp): assert self.c.path == path_coutwildrnp_shp def test_name(self): - assert self.c.name == 'coutwildrnp' + assert self.c.name == "coutwildrnp" def test_mode(self): - assert self.c.mode == 'r' + assert self.c.mode == "r" def test_encoding(self): assert self.c.encoding is None @@ -156,12 +157,12 @@ assert self.c.driver == "ESRI Shapefile" def test_schema(self): - s = self.c.schema['properties'] - assert s['PERIMETER'] == "float:24.15" - assert s['NAME'] == "str:80" - assert s['URL'] == "str:101" - assert s['STATE_FIPS'] == "str:80" - assert s['WILDRNP020'] == "int:10" + s = self.c.schema["properties"] + assert s["PERIMETER"] == "float:24.15" + assert s["NAME"] == "str:80" + assert s["URL"] == "str:101" + assert s["STATE_FIPS"] == "str:80" + assert s["WILDRNP020"] == "int:10" def test_closed_schema(self): # Schema is lazy too, never computed in this case. TODO? @@ -171,11 +172,11 @@ def test_schema_closed_schema(self): self.c.schema self.c.close() - assert sorted(self.c.schema.keys()) == ['geometry', 'properties'] + assert sorted(self.c.schema.keys()) == ["geometry", "properties"] def test_crs(self): crs = self.c.crs - assert crs['init'] == 'epsg:4326' + assert crs["init"] == "epsg:4326" def test_crs_wkt(self): crs = self.c.crs_wkt @@ -189,15 +190,13 @@ def test_crs_closed_crs(self): self.c.crs self.c.close() - assert sorted(self.c.crs.keys()) == ['init'] + assert sorted(self.c.crs.keys()) == ["init"] def test_meta(self): - assert (sorted(self.c.meta.keys()) == - ['crs', 'crs_wkt', 'driver', 'schema']) + assert sorted(self.c.meta.keys()) == ["crs", "crs_wkt", "driver", "schema"] def test_profile(self): - assert (sorted(self.c.profile.keys()) == - ['crs', 'crs_wkt', 'driver', 'schema']) + assert sorted(self.c.profile.keys()) == ["crs", "crs_wkt", "driver", "schema"] def test_bounds(self): assert self.c.bounds[0] == pytest.approx(-113.564247) @@ -207,7 +206,7 @@ def test_context(self, path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp, "r") as c: - assert c.name == 'coutwildrnp' + assert c.name == "coutwildrnp" assert len(c) == 67 assert c.crs assert c.closed @@ -215,44 +214,44 @@ def test_iter_one(self): itr = iter(self.c) f = next(itr) - assert f['id'] == "0" - assert f['properties']['STATE'] == 'UT' + assert f.id == "0" + assert f.properties["STATE"] == "UT" def test_iter_list(self): f = list(self.c)[0] - assert f['id'] == "0" - assert f['properties']['STATE'] == 'UT' + assert f.id == "0" + assert f.properties["STATE"] == "UT" def test_re_iter_list(self): f = list(self.c)[0] # Run through iterator f = list(self.c)[0] # Run through a new, reset iterator - assert f['id'] == "0" - assert f['properties']['STATE'] == 'UT' + assert f.id == "0" + assert f.properties["STATE"] == "UT" def test_getitem_one(self): f = self.c[0] - assert f['id'] == "0" - assert f['properties']['STATE'] == 'UT' + assert f.id == "0" + assert f.properties["STATE"] == "UT" def test_getitem_iter_combo(self): i = iter(self.c) f = next(i) f = next(i) - assert f['id'] == "1" + assert f.id == "1" f = self.c[0] - assert f['id'] == "0" + assert f.id == "0" f = next(i) - assert f['id'] == "2" + assert f.id == "2" def test_no_write(self): - with pytest.raises(IOError): + with pytest.raises(OSError): self.c.write({}) def test_iter_items_list(self): i, f = list(self.c.items())[0] assert i == 0 - assert f['id'] == "0" - assert f['properties']['STATE'] == 'UT' + assert f.id == "0" + assert f.properties["STATE"] == "UT" def test_iter_keys_list(self): i = list(self.c.keys())[0] @@ -263,44 +262,47 @@ assert 0 in self.c -class TestReadingPathTest(object): +class TestReadingPathTest: def test_open_path(self, path_coutwildrnp_shp): pathlib = pytest.importorskip("pathlib") with fiona.open(pathlib.Path(path_coutwildrnp_shp)) as collection: - assert collection.name == 'coutwildrnp' + assert collection.name == "coutwildrnp" @pytest.mark.usefixtures("unittest_path_coutwildrnp_shp") -class TestIgnoreFieldsAndGeometry(object): - +class TestIgnoreFieldsAndGeometry: def test_without_ignore(self): with fiona.open(self.path_coutwildrnp_shp, "r") as collection: - assert("AREA" in collection.schema["properties"].keys()) - assert("STATE" in collection.schema["properties"].keys()) - assert("NAME" in collection.schema["properties"].keys()) - assert("geometry" in collection.schema.keys()) + assert "AREA" in collection.schema["properties"].keys() + assert "STATE" in collection.schema["properties"].keys() + assert "NAME" in collection.schema["properties"].keys() + assert "geometry" in collection.schema.keys() feature = next(iter(collection)) - assert(feature["properties"]["AREA"] is not None) - assert(feature["properties"]["STATE"] is not None) - assert(feature["properties"]["NAME"] is not None) - assert(feature["geometry"] is not None) + assert feature["properties"]["AREA"] is not None + assert feature["properties"]["STATE"] is not None + assert feature["properties"]["NAME"] is not None + assert feature["geometry"] is not None def test_ignore_fields(self): - with fiona.open(self.path_coutwildrnp_shp, "r", ignore_fields=["AREA", "STATE"]) as collection: - assert("AREA" not in collection.schema["properties"].keys()) - assert("STATE" not in collection.schema["properties"].keys()) - assert("NAME" in collection.schema["properties"].keys()) - assert("geometry" in collection.schema.keys()) + with fiona.open( + self.path_coutwildrnp_shp, "r", ignore_fields=["AREA", "STATE"] + ) as collection: + assert "AREA" not in collection.schema["properties"].keys() + assert "STATE" not in collection.schema["properties"].keys() + assert "NAME" in collection.schema["properties"].keys() + assert "geometry" in collection.schema.keys() feature = next(iter(collection)) - assert("AREA" not in feature["properties"].keys()) - assert("STATE" not in feature["properties"].keys()) - assert(feature["properties"]["NAME"] is not None) - assert(feature["geometry"] is not None) + assert "AREA" not in feature["properties"].keys() + assert "STATE" not in feature["properties"].keys() + assert feature["properties"]["NAME"] is not None + assert feature["geometry"] is not None def test_ignore_invalid_field_missing(self): - with fiona.open(self.path_coutwildrnp_shp, "r", ignore_fields=["DOES_NOT_EXIST"]): + with fiona.open( + self.path_coutwildrnp_shp, "r", ignore_fields=["DOES_NOT_EXIST"] + ): pass def test_ignore_invalid_field_not_string(self): @@ -308,21 +310,57 @@ with fiona.open(self.path_coutwildrnp_shp, "r", ignore_fields=[42]): pass + def test_include_fields(self): + with fiona.open( + self.path_coutwildrnp_shp, "r", include_fields=["AREA", "STATE"] + ) as collection: + assert sorted(collection.schema["properties"]) == ["AREA", "STATE"] + assert "geometry" in collection.schema.keys() + + feature = next(iter(collection)) + assert sorted(feature["properties"]) == ["AREA", "STATE"] + assert feature["properties"]["AREA"] is not None + assert feature["properties"]["STATE"] is not None + assert feature["geometry"] is not None + + def test_include_fields__geom_only(self): + with fiona.open( + self.path_coutwildrnp_shp, "r", include_fields=() + ) as collection: + assert sorted(collection.schema["properties"]) == [] + assert "geometry" in collection.schema.keys() + + feature = next(iter(collection)) + assert sorted(feature["properties"]) == [] + assert feature["geometry"] is not None + + def test_include_fields__ignore_fields_error(self): + with pytest.raises(ValueError): + with fiona.open( + self.path_coutwildrnp_shp, + "r", + include_fields=["AREA"], + ignore_fields=["STATE"], + ) as collection: + pass + def test_ignore_geometry(self): - with fiona.open(self.path_coutwildrnp_shp, "r", ignore_geometry=True) as collection: - assert("AREA" in collection.schema["properties"].keys()) - assert("STATE" in collection.schema["properties"].keys()) - assert("NAME" in collection.schema["properties"].keys()) - assert("geometry" not in collection.schema.keys()) + with fiona.open( + self.path_coutwildrnp_shp, "r", ignore_geometry=True + ) as collection: + assert "AREA" in collection.schema["properties"].keys() + assert "STATE" in collection.schema["properties"].keys() + assert "NAME" in collection.schema["properties"].keys() + assert "geometry" not in collection.schema.keys() feature = next(iter(collection)) - assert(feature["properties"]["AREA"] is not None) - assert(feature["properties"]["STATE"] is not None) - assert(feature["properties"]["NAME"] is not None) - assert("geometry" not in feature.keys()) + assert feature.properties["AREA"] is not None + assert feature.properties["STATE"] is not None + assert feature.properties["NAME"] is not None + assert feature.geometry is None -class TestFilterReading(object): +class TestFilterReading: @pytest.fixture(autouse=True) def shapefile(self, path_coutwildrnp_shp): self.c = fiona.open(path_coutwildrnp_shp, "r") @@ -333,8 +371,8 @@ results = list(self.c.filter(bbox=(-120.0, 30.0, -100.0, 50.0))) assert len(results) == 67 f = results[0] - assert f['id'] == "0" - assert f['properties']['STATE'] == 'UT' + assert f.id == "0" + assert f.properties["STATE"] == "UT" def test_filter_reset(self): results = list(self.c.filter(bbox=(-112.0, 38.0, -106.0, 40.0))) @@ -343,260 +381,314 @@ assert len(results) == 67 def test_filter_mask(self): - mask = { - 'type': 'Polygon', - 'coordinates': ( - ((-112, 38), (-112, 40), (-106, 40), (-106, 38), (-112, 38)),)} + mask = Geometry.from_dict( + **{ + "type": "Polygon", + "coordinates": ( + ((-112, 38), (-112, 40), (-106, 40), (-106, 38), (-112, 38)), + ), + } + ) results = list(self.c.filter(mask=mask)) assert len(results) == 26 + def test_filter_where(self): + results = list(self.c.filter(where="NAME LIKE 'Mount%'")) + assert len(results) == 9 + assert all([x.properties["NAME"].startswith("Mount") for x in results]) + results = list(self.c.filter(where="NAME LIKE '%foo%'")) + assert len(results) == 0 + results = list(self.c.filter()) + assert len(results) == 67 -class TestUnsupportedDriver(object): + def test_filter_bbox_where(self): + # combined filter criteria + results = set(self.c.keys( + bbox=(-120.0, 40.0, -100.0, 50.0), where="NAME LIKE 'Mount%'")) + assert results == set([0, 2, 5, 13]) + results = set(self.c.keys()) + assert len(results) == 67 + def test_filter_where_error(self): + for w in ["bad stuff", "NAME=3", "NNAME LIKE 'Mount%'"]: + with pytest.raises(AttributeFilterError): + self.c.filter(where=w) + + def test_filter_bbox_where(self): + # combined filter criteria + results = set( + self.c.keys(bbox=(-120.0, 40.0, -100.0, 50.0), where="NAME LIKE 'Mount%'") + ) + assert results == {0, 2, 5, 13} + results = set(self.c.keys()) + assert len(results) == 67 + + def test_filter_where_error(self): + for w in ["bad stuff", "NAME=3", "NNAME LIKE 'Mount%'"]: + with pytest.raises(AttributeFilterError): + self.c.filter(where=w) + + +class TestUnsupportedDriver: def test_immediate_fail_driver(self, tmpdir): schema = { - 'geometry': 'Point', - 'properties': {'label': 'str', u'verit\xe9': 'int'}} + "geometry": "Point", + "properties": {"label": "str", "verit\xe9": "int"}, + } with pytest.raises(DriverError): fiona.open(str(tmpdir.join("foo")), "w", "Bogus", schema=schema) @pytest.mark.iconv -class TestGenericWritingTest(object): +class TestGenericWritingTest: @pytest.fixture(autouse=True) def no_iter_shp(self, tmpdir): schema = { - 'geometry': 'Point', - 'properties': [('label', 'str'), (u'verit\xe9', 'int')]} - self.c = fiona.open(str(tmpdir.join("test-no-iter.shp")), - 'w', driver="ESRI Shapefile", schema=schema, - encoding='Windows-1252') + "geometry": "Point", + "properties": [("label", "str"), ("verit\xe9", "int")], + } + self.c = fiona.open( + str(tmpdir.join("test-no-iter.shp")), + "w", + driver="ESRI Shapefile", + schema=schema, + encoding="Windows-1252", + ) yield self.c.close() def test_encoding(self): - assert self.c.encoding == 'Windows-1252' + assert self.c.encoding == "Windows-1252" def test_no_iter(self): - with pytest.raises(IOError): + with pytest.raises(OSError): iter(self.c) def test_no_filter(self): - with pytest.raises(IOError): + with pytest.raises(OSError): self.c.filter() -class TestPropertiesNumberFormatting(object): +class TestPropertiesNumberFormatting: @pytest.fixture(autouse=True) def shapefile(self, tmpdir): self.filename = str(tmpdir.join("properties_number_formatting_test")) _records_with_float_property1 = [ { - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, - 'properties': {'property1': 12.22} + "geometry": {"type": "Point", "coordinates": (0.0, 0.1)}, + "properties": {"property1": 12.22}, }, { - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.2)}, - 'properties': {'property1': 12.88} - } + "geometry": {"type": "Point", "coordinates": (0.0, 0.2)}, + "properties": {"property1": 12.88}, + }, ] _records_with_float_property1_as_string = [ { - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, - 'properties': {'property1': '12.22'} + "geometry": {"type": "Point", "coordinates": (0.0, 0.1)}, + "properties": {"property1": "12.22"}, }, { - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.2)}, - 'properties': {'property1': '12.88'} - } + "geometry": {"type": "Point", "coordinates": (0.0, 0.2)}, + "properties": {"property1": "12.88"}, + }, ] _records_with_invalid_number_property1 = [ { - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.3)}, - 'properties': {'property1': 'invalid number'} + "geometry": {"type": "Point", "coordinates": (0.0, 0.3)}, + "properties": {"property1": "invalid number"}, } ] def _write_collection(self, records, schema, driver): with fiona.open( - self.filename, - "w", - driver=driver, - schema=schema, - crs='epsg:4326', - encoding='utf-8' + self.filename, + "w", + driver=driver, + schema=schema, + crs="epsg:4326", + encoding="utf-8", ) as c: - c.writerecords(records) + c.writerecords([Feature.from_dict(**rec) for rec in records]) def test_shape_driver_truncates_float_property_to_requested_int_format(self): driver = "ESRI Shapefile" self._write_collection( self._records_with_float_property1, - {'geometry': 'Point', 'properties': [('property1', 'int')]}, - driver + {"geometry": "Point", "properties": [("property1", "int")]}, + driver, ) - with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + with fiona.open(self.filename, driver=driver, encoding="utf-8") as c: assert 2 == len(c) rf1, rf2 = list(c) - assert 12 == rf1['properties']['property1'] - assert 12 == rf2['properties']['property1'] + assert 12 == rf1.properties["property1"] + assert 12 == rf2.properties["property1"] def test_shape_driver_rounds_float_property_to_requested_digits_number(self): driver = "ESRI Shapefile" self._write_collection( self._records_with_float_property1, - {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, - driver + {"geometry": "Point", "properties": [("property1", "float:15.1")]}, + driver, ) - with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + with fiona.open(self.filename, driver=driver, encoding="utf-8") as c: assert 2 == len(c) rf1, rf2 = list(c) - assert 12.2 == rf1['properties']['property1'] - assert 12.9 == rf2['properties']['property1'] + assert 12.2 == rf1.properties["property1"] + assert 12.9 == rf2.properties["property1"] - def test_string_is_converted_to_number_and_truncated_to_requested_int_by_shape_driver(self): + def test_string_is_converted_to_number_and_truncated_to_requested_int_by_shape_driver( + self, + ): driver = "ESRI Shapefile" self._write_collection( self._records_with_float_property1_as_string, - {'geometry': 'Point', 'properties': [('property1', 'int')]}, - driver + {"geometry": "Point", "properties": [("property1", "int")]}, + driver, ) - with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + with fiona.open(self.filename, driver=driver, encoding="utf-8") as c: assert 2 == len(c) rf1, rf2 = list(c) - assert 12 == rf1['properties']['property1'] - assert 12 == rf2['properties']['property1'] + assert 12 == rf1.properties["property1"] + assert 12 == rf2.properties["property1"] - def test_string_is_converted_to_number_and_rounded_to_requested_digits_number_by_shape_driver(self): + def test_string_is_converted_to_number_and_rounded_to_requested_digits_number_by_shape_driver( + self, + ): driver = "ESRI Shapefile" self._write_collection( self._records_with_float_property1_as_string, - {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, - driver + {"geometry": "Point", "properties": [("property1", "float:15.1")]}, + driver, ) - with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + with fiona.open(self.filename, driver=driver, encoding="utf-8") as c: assert 2 == len(c) rf1, rf2 = list(c) - assert 12.2 == rf1['properties']['property1'] - assert 12.9 == rf2['properties']['property1'] + assert 12.2 == rf1.properties["property1"] + assert 12.9 == rf2.properties["property1"] def test_invalid_number_is_converted_to_0_and_written_by_shape_driver(self): driver = "ESRI Shapefile" self._write_collection( self._records_with_invalid_number_property1, # {'geometry': 'Point', 'properties': [('property1', 'int')]}, - {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, - driver + {"geometry": "Point", "properties": [("property1", "float:15.1")]}, + driver, ) - with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + with fiona.open(self.filename, driver=driver, encoding="utf-8") as c: assert 1 == len(c) rf1 = c[0] - assert 0 == rf1['properties']['property1'] + assert 0 == rf1.properties["property1"] def test_geojson_driver_truncates_float_property_to_requested_int_format(self): driver = "GeoJSON" self._write_collection( self._records_with_float_property1, - {'geometry': 'Point', 'properties': [('property1', 'int')]}, - driver + {"geometry": "Point", "properties": [("property1", "int")]}, + driver, ) - with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + with fiona.open(self.filename, driver=driver, encoding="utf-8") as c: assert 2 == len(c) rf1, rf2 = list(c) - assert 12 == rf1['properties']['property1'] - assert 12 == rf2['properties']['property1'] + assert 12 == rf1.properties["property1"] + assert 12 == rf2.properties["property1"] - def test_geojson_driver_does_not_round_float_property_to_requested_digits_number(self): + def test_geojson_driver_does_not_round_float_property_to_requested_digits_number( + self, + ): driver = "GeoJSON" self._write_collection( self._records_with_float_property1, - {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, - driver + {"geometry": "Point", "properties": [("property1", "float:15.1")]}, + driver, ) - with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + with fiona.open(self.filename, driver=driver, encoding="utf-8") as c: assert 2 == len(c) rf1, rf2 = list(c) # **************************************** # FLOAT FORMATTING IS NOT RESPECTED... - assert 12.22 == rf1['properties']['property1'] - assert 12.88 == rf2['properties']['property1'] + assert 12.22 == rf1.properties["property1"] + assert 12.88 == rf2.properties["property1"] - def test_string_is_converted_to_number_and_truncated_to_requested_int_by_geojson_driver(self): + def test_string_is_converted_to_number_and_truncated_to_requested_int_by_geojson_driver( + self, + ): driver = "GeoJSON" self._write_collection( self._records_with_float_property1_as_string, - {'geometry': 'Point', 'properties': [('property1', 'int')]}, - driver + {"geometry": "Point", "properties": [("property1", "int")]}, + driver, ) - with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + with fiona.open(self.filename, driver=driver, encoding="utf-8") as c: assert 2 == len(c) rf1, rf2 = list(c) - assert 12 == rf1['properties']['property1'] - assert 12 == rf2['properties']['property1'] + assert 12 == rf1.properties["property1"] + assert 12 == rf2.properties["property1"] - def test_string_is_converted_to_number_but_not_rounded_to_requested_digits_number_by_geojson_driver(self): + def test_string_is_converted_to_number_but_not_rounded_to_requested_digits_number_by_geojson_driver( + self, + ): driver = "GeoJSON" self._write_collection( self._records_with_float_property1_as_string, - {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, - driver + {"geometry": "Point", "properties": [("property1", "float:15.1")]}, + driver, ) - with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + with fiona.open(self.filename, driver=driver, encoding="utf-8") as c: assert 2 == len(c) rf1, rf2 = list(c) # **************************************** # FLOAT FORMATTING IS NOT RESPECTED... - assert 12.22 == rf1['properties']['property1'] - assert 12.88 == rf2['properties']['property1'] + assert 12.22 == rf1.properties["property1"] + assert 12.88 == rf2.properties["property1"] def test_invalid_number_is_converted_to_0_and_written_by_geojson_driver(self): driver = "GeoJSON" self._write_collection( self._records_with_invalid_number_property1, - # {'geometry': 'Point', 'properties': [('property1', 'int')]}, - {'geometry': 'Point', 'properties': [('property1', 'float:15.1')]}, - driver + {"geometry": "Point", "properties": [("property1", "float:15.1")]}, + driver, ) - with fiona.open(self.filename, driver=driver, encoding='utf-8') as c: + with fiona.open(self.filename, driver=driver, encoding="utf-8") as c: assert 1 == len(c) rf1 = c[0] - assert 0 == rf1['properties']['property1'] + assert 0 == rf1.properties["property1"] -class TestPointWriting(object): +class TestPointWriting: @pytest.fixture(autouse=True) def shapefile(self, tmpdir): self.filename = str(tmpdir.join("point_writing_test.shp")) @@ -605,10 +697,12 @@ "w", driver="ESRI Shapefile", schema={ - 'geometry': 'Point', - 'properties': [('title', 'str'), ('date', 'date')]}, - crs='epsg:4326', - encoding='utf-8') + "geometry": "Point", + "properties": [("title", "str"), ("date", "date")], + }, + crs="epsg:4326", + encoding="utf-8", + ) yield self.sink.close() @@ -621,9 +715,12 @@ def test_write_one(self): assert len(self.sink) == 0 assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) - f = { - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, - 'properties': {'title': 'point one', 'date': "2012-01-29"}} + f = Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0.0, 0.1)}, + "properties": {"title": "point one", "date": "2012-01-29"}, + } + ) self.sink.writerecords([f]) assert len(self.sink) == 1 assert self.sink.bounds == (0.0, 0.1, 0.0, 0.1) @@ -632,12 +729,18 @@ def test_write_two(self): assert len(self.sink) == 0 assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) - f1 = { - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, - 'properties': {'title': 'point one', 'date': "2012-01-29"}} - f2 = { - 'geometry': {'type': 'Point', 'coordinates': (0.0, -0.1)}, - 'properties': {'title': 'point two', 'date': "2012-01-29"}} + f1 = Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0.0, 0.1)}, + "properties": {"title": "point one", "date": "2012-01-29"}, + } + ) + f2 = Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0.0, -0.1)}, + "properties": {"title": "point two", "date": "2012-01-29"}, + } + ) self.sink.writerecords([f1, f2]) assert len(self.sink) == 2 assert self.sink.bounds == (0.0, -0.1, 0.0, 0.1) @@ -645,25 +748,30 @@ def test_write_one_null_geom(self): assert len(self.sink) == 0 assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) - f = { - 'geometry': None, - 'properties': {'title': 'point one', 'date': "2012-01-29"}} + f = Feature.from_dict( + **{ + "geometry": None, + "properties": {"title": "point one", "date": "2012-01-29"}, + } + ) self.sink.writerecords([f]) assert len(self.sink) == 1 assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) def test_validate_record(self): fvalid = { - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, - 'properties': {'title': 'point one', 'date': "2012-01-29"}} + "geometry": {"type": "Point", "coordinates": (0.0, 0.1)}, + "properties": {"title": "point one", "date": "2012-01-29"}, + } finvalid = { - 'geometry': {'type': 'Point', 'coordinates': (0.0, -0.1)}, - 'properties': {'not-a-title': 'point two', 'date': "2012-01-29"}} + "geometry": {"type": "Point", "coordinates": (0.0, -0.1)}, + "properties": {"not-a-title": "point two", "date": "2012-01-29"}, + } assert self.sink.validate_record(fvalid) assert not self.sink.validate_record(finvalid) -class TestLineWriting(object): +class TestLineWriting: @pytest.fixture(autouse=True) def shapefile(self, tmpdir): self.sink = fiona.open( @@ -671,19 +779,26 @@ "w", driver="ESRI Shapefile", schema={ - 'geometry': 'LineString', - 'properties': [('title', 'str'), ('date', 'date')]}, - crs={'init': "epsg:4326", 'no_defs': True}) + "geometry": "LineString", + "properties": [("title", "str"), ("date", "date")], + }, + crs={"init": "epsg:4326", "no_defs": True}, + ) yield self.sink.close() def test_write_one(self): assert len(self.sink) == 0 assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) - f = { - 'geometry': {'type': 'LineString', - 'coordinates': [(0.0, 0.1), (0.0, 0.2)]}, - 'properties': {'title': 'line one', 'date': "2012-01-29"}} + f = Feature.from_dict( + **{ + "geometry": { + "type": "LineString", + "coordinates": [(0.0, 0.1), (0.0, 0.2)], + }, + "properties": {"title": "line one", "date": "2012-01-29"}, + } + ) self.sink.writerecords([f]) assert len(self.sink) == 1 assert self.sink.bounds == (0.0, 0.1, 0.0, 0.2) @@ -691,104 +806,157 @@ def test_write_two(self): assert len(self.sink) == 0 assert self.sink.bounds == (0.0, 0.0, 0.0, 0.0) - f1 = { - 'geometry': {'type': 'LineString', - 'coordinates': [(0.0, 0.1), (0.0, 0.2)]}, - 'properties': {'title': 'line one', 'date': "2012-01-29"}} - f2 = { - 'geometry': {'type': 'MultiLineString', - 'coordinates': [[(0.0, 0.0), (0.0, -0.1)], - [(0.0, -0.1), (0.0, -0.2)]]}, - 'properties': {'title': 'line two', 'date': "2012-01-29"}} + f1 = Feature.from_dict( + **{ + "geometry": { + "type": "LineString", + "coordinates": [(0.0, 0.1), (0.0, 0.2)], + }, + "properties": {"title": "line one", "date": "2012-01-29"}, + } + ) + f2 = Feature.from_dict( + **{ + "geometry": { + "type": "MultiLineString", + "coordinates": [ + [(0.0, 0.0), (0.0, -0.1)], + [(0.0, -0.1), (0.0, -0.2)], + ], + }, + "properties": {"title": "line two", "date": "2012-01-29"}, + } + ) self.sink.writerecords([f1, f2]) assert len(self.sink) == 2 assert self.sink.bounds == (0.0, -0.2, 0.0, 0.2) -class TestPointAppend(object): +class TestPointAppend: @pytest.fixture(autouse=True) def shapefile(self, tmpdir, path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp, "r") as input: output_schema = input.schema - output_schema['geometry'] = '3D Point' + output_schema["geometry"] = "3D Point" with fiona.open( - str(tmpdir.join("test_append_point.shp")), - 'w', crs=None, driver="ESRI Shapefile", - schema=output_schema) as output: + str(tmpdir.join("test_append_point.shp")), + "w", + crs=None, + driver="ESRI Shapefile", + schema=output_schema, + ) as output: for f in input: - f['geometry'] = { - 'type': 'Point', - 'coordinates': f['geometry']['coordinates'][0][0]} - output.write(f) + fnew = Feature( + id=f.id, + properties=f.properties, + geometry=Geometry( + type="Point", coordinates=f.geometry.coordinates[0][0] + ), + ) + output.write(fnew) def test_append_point(self, tmpdir): with fiona.open(str(tmpdir.join("test_append_point.shp")), "a") as c: - assert c.schema['geometry'] == '3D Point' - c.write({'geometry': {'type': 'Point', 'coordinates': (0.0, 45.0)}, - 'properties': {'PERIMETER': 1.0, - 'FEATURE2': None, - 'NAME': 'Foo', - 'FEATURE1': None, - 'URL': 'http://example.com', - 'AGBUR': 'BAR', - 'AREA': 0.0, - 'STATE_FIPS': 1, - 'WILDRNP020': 1, - 'STATE': 'XL'}}) + assert c.schema["geometry"] == "3D Point" + c.write( + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0.0, 45.0)}, + "properties": { + "PERIMETER": 1.0, + "FEATURE2": None, + "NAME": "Foo", + "FEATURE1": None, + "URL": "http://example.com", + "AGBUR": "BAR", + "AREA": 0.0, + "STATE_FIPS": 1, + "WILDRNP020": 1, + "STATE": "XL", + }, + } + ) + ) assert len(c) == 68 -class TestLineAppend(object): +class TestLineAppend: @pytest.fixture(autouse=True) def shapefile(self, tmpdir): with fiona.open( - str(tmpdir.join("test_append_line.shp")), - "w", - driver="ESRI Shapefile", - schema={ - 'geometry': 'MultiLineString', - 'properties': {'title': 'str', 'date': 'date'}}, - crs={'init': "epsg:4326", 'no_defs': True}) as output: - f = {'geometry': {'type': 'MultiLineString', - 'coordinates': [[(0.0, 0.1), (0.0, 0.2)]]}, - 'properties': {'title': 'line one', 'date': "2012-01-29"}} + str(tmpdir.join("test_append_line.shp")), + "w", + driver="ESRI Shapefile", + schema={ + "geometry": "MultiLineString", + "properties": {"title": "str", "date": "date"}, + }, + crs={"init": "epsg:4326", "no_defs": True}, + ) as output: + f = Feature.from_dict( + **{ + "geometry": { + "type": "MultiLineString", + "coordinates": [[(0.0, 0.1), (0.0, 0.2)]], + }, + "properties": {"title": "line one", "date": "2012-01-29"}, + } + ) output.writerecords([f]) def test_append_line(self, tmpdir): with fiona.open(str(tmpdir.join("test_append_line.shp")), "a") as c: - assert c.schema['geometry'] == 'LineString' - f1 = { - 'geometry': {'type': 'LineString', - 'coordinates': [(0.0, 0.1), (0.0, 0.2)]}, - 'properties': {'title': 'line one', 'date': "2012-01-29"}} - f2 = { - 'geometry': {'type': 'MultiLineString', - 'coordinates': [[(0.0, 0.0), (0.0, -0.1)], - [(0.0, -0.1), (0.0, -0.2)]]}, - 'properties': {'title': 'line two', 'date': "2012-01-29"}} + assert c.schema["geometry"] == "LineString" + f1 = Feature.from_dict( + **{ + "geometry": { + "type": "LineString", + "coordinates": [(0.0, 0.1), (0.0, 0.2)], + }, + "properties": {"title": "line one", "date": "2012-01-29"}, + } + ) + f2 = Feature.from_dict( + **{ + "geometry": { + "type": "MultiLineString", + "coordinates": [ + [(0.0, 0.0), (0.0, -0.1)], + [(0.0, -0.1), (0.0, -0.2)], + ], + }, + "properties": {"title": "line two", "date": "2012-01-29"}, + } + ) c.writerecords([f1, f2]) assert len(c) == 3 assert c.bounds == (0.0, -0.2, 0.0, 0.2) def test_shapefile_field_width(tmpdir): - name = str(tmpdir.join('textfield.shp')) + name = str(tmpdir.join("textfield.shp")) with fiona.open( - name, 'w', - schema={'geometry': 'Point', 'properties': {'text': 'str:254'}}, - driver="ESRI Shapefile") as c: + name, + "w", + schema={"geometry": "Point", "properties": {"text": "str:254"}}, + driver="ESRI Shapefile", + ) as c: c.write( - {'geometry': {'type': 'Point', 'coordinates': (0.0, 45.0)}, - 'properties': {'text': 'a' * 254}}) + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0.0, 45.0)}, + "properties": {"text": "a" * 254}, + } + ) + ) c = fiona.open(name, "r") - assert c.schema['properties']['text'] == 'str:254' + assert c.schema["properties"]["text"] == "str:254" f = next(iter(c)) - assert f['properties']['text'] == 'a' * 254 + assert f.properties["text"] == "a" * 254 c.close() -class TestCollection(object): - +class TestCollection: def test_invalid_mode(self, tmpdir): with pytest.raises(ValueError): fiona.open(str(tmpdir.join("bogus.shp")), "r+") @@ -807,8 +975,9 @@ with pytest.raises(DriverError): fiona.open("PG:dbname=databasename", "r") - @pytest.mark.skipif(sys.platform.startswith("win"), - reason="test only for *nix based system") + @pytest.mark.skipif( + sys.platform.startswith("win"), reason="test only for *nix based system" + ) def test_no_read_directory(self): with pytest.raises(DriverError): fiona.open("/dev/null", "r") @@ -817,22 +986,27 @@ def test_date(tmpdir): name = str(tmpdir.join("date_test.shp")) sink = fiona.open( - name, "w", + name, + "w", driver="ESRI Shapefile", - schema={ - 'geometry': 'Point', - 'properties': [('id', 'int'), ('date', 'date')]}, - crs={'init': "epsg:4326", 'no_defs': True}) - - recs = [{ - 'geometry': {'type': 'Point', - 'coordinates': (7.0, 50.0)}, - 'properties': {'id': 1, 'date': '2013-02-25'} - }, { - 'geometry': {'type': 'Point', - 'coordinates': (7.0, 50.2)}, - 'properties': {'id': 1, 'date': datetime.date(2014, 2, 3)} - }] + schema={"geometry": "Point", "properties": [("id", "int"), ("date", "date")]}, + crs={"init": "epsg:4326", "no_defs": True}, + ) + + recs = [ + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (7.0, 50.0)}, + "properties": {"id": 1, "date": "2013-02-25"}, + } + ), + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (7.0, 50.2)}, + "properties": {"id": 1, "date": datetime.date(2014, 2, 3)}, + } + ), + ] sink.writerecords(recs) sink.close() assert len(sink) == 2 @@ -841,22 +1015,21 @@ assert len(c) == 2 rf1, rf2 = list(c) - assert rf1['properties']['date'] == '2013-02-25' - assert rf2['properties']['date'] == '2014-02-03' + assert rf1.properties["date"] == "2013-02-25" + assert rf2.properties["date"] == "2014-02-03" def test_open_kwargs(tmpdir, path_coutwildrnp_shp): - dstfile = str(tmpdir.join('test.json')) + dstfile = str(tmpdir.join("test.json")) with fiona.open(path_coutwildrnp_shp) as src: kwds = src.profile - kwds['driver'] = 'GeoJSON' - kwds['coordinate_precision'] = 2 - with fiona.open(dstfile, 'w', **kwds) as dst: + kwds["driver"] = "GeoJSON" + kwds["coordinate_precision"] = 2 + with fiona.open(dstfile, "w", **kwds) as dst: dst.writerecords(ftr for ftr in src) with open(dstfile) as f: - assert '"coordinates": [ [ [ -111.74, 42.0 ], [ -111.66, 42.0 ]' in \ - f.read(2000) + assert '"coordinates": [ [ [ -111.74, 42.0 ], [ -111.66, 42.0 ]' in f.read(2000) @pytest.mark.network @@ -878,21 +1051,25 @@ "https://raw.githubusercontent.com/Toblerity/Fiona/master/tests/data/coutwildrnp.zip", vsi="zip+https", ) - assert ( - ds.path - == "/vsizip/vsicurl/https://raw.githubusercontent.com/Toblerity/Fiona/master/tests/data/coutwildrnp.zip" - ) + assert ds.path == "/vsizip/vsicurl/https://raw.githubusercontent.com/Toblerity/Fiona/master/tests/data/coutwildrnp.zip" assert len(ds) == 67 def test_encoding_option_warning(tmpdir, caplog): """There is no ENCODING creation option log warning for GeoJSON""" - fiona.Collection(str(tmpdir.join("test.geojson")), "w", driver="GeoJSON", crs="epsg:4326", - schema={"geometry": "Point", "properties": {"foo": "int"}}) - assert not caplog.text + with caplog.at_level(logging.WARNING): + fiona.Collection( + str(tmpdir.join("test.geojson")), + "w", + driver="GeoJSON", + crs="EPSG:4326", + schema={"geometry": "Point", "properties": {"foo": "int"}}, + encoding="bogus", + ) + assert not caplog.text -def test_closed_session_next(path_coutwildrnp_shp): +def test_closed_session_next(gdalenv, path_coutwildrnp_shp): """Confirm fix for issue #687""" src = fiona.open(path_coutwildrnp_shp) itr = iter(src) @@ -913,31 +1090,65 @@ def test_collection_env(path_coutwildrnp_shp): """We have a GDAL env within collection context""" with fiona.open(path_coutwildrnp_shp): - assert 'FIONA_ENV' in getenv() + assert "FIONA_ENV" in getenv() -@pytest.mark.parametrize('driver,filename', [('ESRI Shapefile', 'test.shp'), - ('GeoJSON', 'test.json'), - ('GPKG', 'test.gpkg')]) +@pytest.mark.parametrize( + "driver,filename", + [("ESRI Shapefile", "test.shp"), ("GeoJSON", "test.json"), ("GPKG", "test.gpkg")], +) def test_mask_polygon_triangle(tmpdir, driver, filename): - """ Test if mask works for non trivial geometries""" - schema = {'geometry': 'Polygon', 'properties': OrderedDict([('position_i', 'int'), ('position_j', 'int')])} - records = [{'geometry': {'type': 'Polygon', 'coordinates': (((float(i), float(j)), (float(i + 1), float(j)), - (float(i + 1), float(j + 1)), (float(i), float(j + 1)), - (float(i), float(j))),)}, - 'properties': {'position_i': i, 'position_j': j}} for i in range(10) for j in range(10)] + """Test if mask works for non trivial geometries""" + schema = { + "geometry": "Polygon", + "properties": OrderedDict([("position_i", "int"), ("position_j", "int")]), + } + records = [ + Feature.from_dict( + **{ + "geometry": { + "type": "Polygon", + "coordinates": ( + ( + (float(i), float(j)), + (float(i + 1), float(j)), + (float(i + 1), float(j + 1)), + (float(i), float(j + 1)), + (float(i), float(j)), + ), + ), + }, + "properties": {"position_i": i, "position_j": j}, + } + ) + for i in range(10) + for j in range(10) + ] random.shuffle(records) path = str(tmpdir.join(filename)) - with fiona.open(path, 'w', - driver=driver, - schema=schema,) as c: + with fiona.open( + path, + "w", + driver=driver, + schema=schema, + ) as c: c.writerecords(records) with fiona.open(path) as c: items = list( - c.items(mask={'type': 'Polygon', 'coordinates': (((2.0, 2.0), (4.0, 4.0), (4.0, 6.0), (2.0, 2.0)),)})) + c.items( + mask=Geometry.from_dict( + **{ + "type": "Polygon", + "coordinates": ( + ((2.0, 2.0), (4.0, 4.0), (4.0, 6.0), (2.0, 2.0)), + ), + } + ) + ) + ) assert len(items) == 15 @@ -945,20 +1156,49 @@ """Based on pull #955""" tmpfile = str(tmpdir.join("test_empty.geojson")) with pytest.warns(UserWarning, match="Empty field name at index 0"): - with fiona.open(tmpfile, "w", driver="GeoJSON", schema={ - "geometry": "Point", - "properties": {"": "str", "name": "str"} - }) as tmp: - tmp.writerecords([{ - "geometry": {"type": "Point", "coordinates": [ 8, 49 ] }, - "properties": { "": "", "name": "test" } - }]) + with fiona.open( + tmpfile, + "w", + driver="GeoJSON", + schema={"geometry": "Point", "properties": {"": "str", "name": "str"}}, + ) as tmp: + tmp.writerecords( + [ + { + "geometry": {"type": "Point", "coordinates": [8, 49]}, + "properties": {"": "", "name": "test"}, + } + ] + ) with fiona.open(tmpfile) as tmp: with pytest.warns(UserWarning, match="Empty field name at index 0"): assert tmp.schema == { "geometry": "Point", - "properties": {"": "str", "name": "str"} + "properties": {"": "str", "name": "str"}, } with pytest.warns(UserWarning, match="Empty field name at index 0"): next(tmp) + + +@pytest.mark.parametrize( + "extension, driver", + [ + ("shp", "ESRI Shapefile"), + ("geojson", "GeoJSON"), + ("json", "GeoJSON"), + ("gpkg", "GPKG"), + ("SHP", "ESRI Shapefile"), + ], +) +def test_driver_detection(tmpdir, extension, driver): + with fiona.open( + str(tmpdir.join(f"test.{extension}")), + "w", + schema={ + "geometry": "MultiLineString", + "properties": {"title": "str", "date": "date"}, + }, + crs="EPSG:4326", + ) as output: + assert output.driver == driver diff -Nru fiona-1.8.22/tests/test_collection_crs.py fiona-1.9.5/tests/test_collection_crs.py --- fiona-1.8.22/tests/test_collection_crs.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_collection_crs.py 2023-10-11 23:19:44.000000000 +0000 @@ -6,7 +6,7 @@ import fiona import fiona.crs from fiona.errors import CRSError -from .conftest import WGS84PATTERN, requires_gdal2, requires_gdal3 +from .conftest import WGS84PATTERN def test_collection_crs_wkt(path_coutwildrnp_shp): @@ -23,10 +23,9 @@ del profile['crs_wkt'] with fiona.open(filename, 'w', **profile) as dst: assert dst.crs_wkt == "" - assert dst.crs == {} + assert dst.crs == fiona.crs.CRS() -@requires_gdal2 def test_collection_create_crs_wkt(tmpdir): """A collection can be created using crs_wkt""" filename = str(tmpdir.join("test.geojson")) @@ -38,7 +37,6 @@ assert col.crs_wkt.startswith('GEOGCS["WGS 84') or col.crs_wkt.startswith('GEOGCS["GCS_WGS_1984') -@requires_gdal3 def test_collection_urn_crs(tmpdir): filename = str(tmpdir.join("test.geojson")) crs = "urn:ogc:def:crs:OGC:1.3:CRS84" diff -Nru fiona-1.8.22/tests/test_collection_legacy.py fiona-1.9.5/tests/test_collection_legacy.py --- fiona-1.8.22/tests/test_collection_legacy.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_collection_legacy.py 2023-10-11 23:19:44.000000000 +0000 @@ -167,7 +167,7 @@ assert f['id'] == "2" def test_no_write(self): - with pytest.raises(IOError): + with pytest.raises(OSError): self.c.write({}) def test_iter_items_list(self): diff -Nru fiona-1.8.22/tests/test_compound_crs.py fiona-1.9.5/tests/test_compound_crs.py --- fiona-1.8.22/tests/test_compound_crs.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_compound_crs.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,6 +1,7 @@ """Test of compound CRS crash avoidance""" import fiona +from fiona.crs import CRS def test_compound_crs(data): @@ -8,4 +9,4 @@ prj = data.join("coutwildrnp.prj") prj.write("""COMPD_CS["unknown",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]],VERT_CS["unknown",VERT_DATUM["unknown",2005],UNIT["metre",1.0,AUTHORITY["EPSG","9001"]],AXIS["Up",UP]]]""") with fiona.open(str(data.join("coutwildrnp.shp"))) as collection: - assert isinstance(collection.crs, dict) + assert isinstance(collection.crs, CRS) diff -Nru fiona-1.8.22/tests/test_crs.py fiona-1.9.5/tests/test_crs.py --- fiona-1.8.22/tests/test_crs.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_crs.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,9 +1,12 @@ +"""Tests of fiona.crs.""" + import pytest -from fiona import crs, _crs -from fiona.errors import CRSError +from .conftest import requires_gdal33 -from .conftest import requires_gdal_lt_3 +from fiona import crs +from fiona.env import Env +from fiona.errors import CRSError, FionaDeprecationWarning def test_proj_keys(): @@ -13,36 +16,12 @@ assert 'no_mayo' in crs.all_proj_keys -def test_from_string(): - # A PROJ.4 string with extra whitespace. - val = crs.from_string( - " +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs +foo ") - assert len(val.items()) == 4 - assert val['proj'] == 'longlat' - assert val['ellps'] == 'WGS84' - assert val['datum'] == 'WGS84' - assert val['no_defs'] - assert 'foo' not in val - - -def test_from_string_utm(): - # A PROJ.4 string with extra whitespace and integer UTM zone. - val = crs.from_string( - " +proj=utm +zone=13 +ellps=WGS84 +foo ") - assert len(val.items()) == 3 - assert val['proj'] == 'utm' - assert val['ellps'] == 'WGS84' - assert val['zone'] == 13 - assert 'foo' not in val - - def test_to_string(): # Make a string from a mapping with a few bogus items val = { 'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', 'no_defs': True, 'foo': True, 'axis': False, 'belgium': [1, 2]} - assert crs.to_string( - val) == "+datum=WGS84 +ellps=WGS84 +no_defs +proj=longlat" + assert crs.CRS.from_user_input(val).to_string() == "EPSG:4326" def test_to_string_utm(): @@ -50,78 +29,22 @@ val = { 'proj': 'utm', 'ellps': 'WGS84', 'zone': 13, 'no_defs': True, 'foo': True, 'axis': False, 'belgium': [1, 2]} - assert crs.to_string( - val) == "+ellps=WGS84 +no_defs +proj=utm +zone=13" + assert crs.CRS.from_user_input(val).to_string() == "EPSG:32613" def test_to_string_epsg(): val = {'init': 'epsg:4326', 'no_defs': True} - assert crs.to_string(val) == "+init=epsg:4326 +no_defs" - - -def test_to_string_zeroval(): - # Make a string with some 0 values (e.g. esri:102017) - val = {'proj': 'laea', 'lat_0': 90, 'lon_0': 0, 'x_0': 0, 'y_0': 0, - 'ellps': 'WGS84', 'datum': 'WGS84', 'units': 'm', 'no_defs': True} - assert crs.to_string(val) == ( - "+datum=WGS84 +ellps=WGS84 +lat_0=90 +lon_0=0 +no_defs +proj=laea " - "+units=m +x_0=0 +y_0=0") + assert crs.CRS.from_user_input(val).to_string() == "EPSG:4326" def test_from_epsg(): - val = crs.from_epsg(4326) + val = crs.CRS.from_epsg(4326) assert val['init'] == "epsg:4326" - assert val['no_defs'] def test_from_epsg_neg(): - try: - crs.from_epsg(-1) - except ValueError: - pass - except: - raise - - -def test_to_string_unicode(): - # See issue #83. - val = crs.to_string({ - u'units': u'm', - u'no_defs': True, - u'datum': u'NAD83', - u'proj': u'utm', - u'zone': 16}) - assert 'NAD83' in val - - -@requires_gdal_lt_3 -def test_wktext(): - """Test +wktext parameter is preserved.""" - proj4 = ('+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 ' - '+x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext ' - '+no_defs') - assert 'wktext' in crs.from_string(proj4) - - -def test_towgs84(): - """+towgs84 is preserved""" - proj4 = ('+proj=lcc +lat_1=49 +lat_2=46 +lat_0=47.5 ' - '+lon_0=13.33333333333333 +x_0=400000 +y_0=400000 +ellps=bessel ' - '+towgs84=577.326,90.129,463.919,5.137,1.474,5.297,2.4232 ' - '+units=m +wktext +no_defs') - assert 'towgs84' in crs.from_string(proj4) - - -@requires_gdal_lt_3 -def test_towgs84_wkt(): - """+towgs84 +wktext are preserved in WKT""" - proj4 = ('+proj=lcc +lat_1=49 +lat_2=46 +lat_0=47.5 ' - '+lon_0=13.33333333333333 +x_0=400000 +y_0=400000 +ellps=bessel ' - '+towgs84=577.326,90.129,463.919,5.137,1.474,5.297,2.4232 ' - '+units=m +wktext +no_defs') - wkt = _crs.crs_to_wkt(proj4) - assert 'towgs84' in wkt - assert 'wktext' in _crs.crs_to_wkt(proj4) + with pytest.raises(CRSError): + crs.CRS.from_epsg(-1) @pytest.mark.parametrize("invalid_input", [ @@ -131,4 +54,96 @@ ]) def test_invalid_crs(invalid_input): with pytest.raises(CRSError): - _crs.crs_to_wkt(invalid_input) + crs.CRS.from_user_input(invalid_input) + + +def test_custom_crs(): + class CustomCRS: + def to_wkt(self): + return ( + 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",' + '6378137,298.257223563,AUTHORITY["EPSG","7030"]],' + 'AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,' + 'AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,' + 'AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + ) + + assert crs.CRS.from_user_input(CustomCRS()).to_wkt().startswith('GEOGCS["WGS 84"') + + +def test_crs__version(): + target_crs = ( + 'PROJCS["IaRCS_04_Sioux_City-Iowa_Falls_NAD_1983_2011_LCC_US_Feet",' + 'GEOGCS["GCS_NAD_1983_2011",DATUM["D_NAD_1983_2011",' + 'SPHEROID["GRS_1980",6378137.0,298.257222101]],' + 'PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],' + 'PROJECTION["Lambert_Conformal_Conic"],' + 'PARAMETER["False_Easting",14500000.0],' + 'PARAMETER["False_Northing",8600000.0],' + 'PARAMETER["Central_Meridian",-94.83333333333333],' + 'PARAMETER["Standard_Parallel_1",42.53333333333333],' + 'PARAMETER["Standard_Parallel_2",42.53333333333333],' + 'PARAMETER["Scale_Factor",1.000045],' + 'PARAMETER["Latitude_Of_Origin",42.53333333333333],' + 'UNIT["Foot_US",0.3048006096012192]]' + ) + assert ( + crs.CRS.from_user_input(target_crs) + .to_wkt(version="WKT2_2018") + .startswith( + 'PROJCRS["IaRCS_04_Sioux_City-Iowa_Falls_NAD_1983_2011_LCC_US_Feet"' + ) + ) + + +@requires_gdal33 +def test_crs__esri_only_wkt(): + """https://github.com/Toblerity/Fiona/issues/977""" + target_crs = ( + 'PROJCS["IaRCS_04_Sioux_City-Iowa_Falls_NAD_1983_2011_LCC_US_Feet",' + 'GEOGCS["GCS_NAD_1983_2011",DATUM["D_NAD_1983_2011",' + 'SPHEROID["GRS_1980",6378137.0,298.257222101]],' + 'PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],' + 'PROJECTION["Lambert_Conformal_Conic"],' + 'PARAMETER["False_Easting",14500000.0],' + 'PARAMETER["False_Northing",8600000.0],' + 'PARAMETER["Central_Meridian",-94.83333333333333],' + 'PARAMETER["Standard_Parallel_1",42.53333333333333],' + 'PARAMETER["Standard_Parallel_2",42.53333333333333],' + 'PARAMETER["Scale_Factor",1.000045],' + 'PARAMETER["Latitude_Of_Origin",42.53333333333333],' + 'UNIT["Foot_US",0.3048006096012192]]' + ) + assert ( + crs.CRS.from_user_input(target_crs) + .to_wkt() + .startswith( + ( + 'PROJCS["IaRCS_04_Sioux_City-Iowa_Falls_NAD_1983_2011_LCC_US_Feet"', + 'PROJCRS["IaRCS_04_Sioux_City-Iowa_Falls_NAD_1983_2011_LCC_US_Feet"', # GDAL 3.3+ + ) + ) + ) + + +def test_to_wkt__env_version(): + with Env(OSR_WKT_FORMAT="WKT2_2018"): + assert crs.CRS.from_string("EPSG:4326").to_wkt().startswith('GEOGCRS["WGS 84",') + + +def test_to_wkt__invalid_version(): + with pytest.raises(CRSError): + crs.CRS.from_string("EPSG:4326").to_wkt(version="invalid") + + +@pytest.mark.parametrize( + "func, arg", + [ + (crs.from_epsg, 4326), + (crs.from_string, "EPSG:4326"), + (crs.to_string, "EPSG:4326"), + ], +) +def test_from_func_deprecations(func, arg): + with pytest.warns(FionaDeprecationWarning): + _ = func(arg) diff -Nru fiona-1.8.22/tests/test_cursor_interruptions.py fiona-1.9.5/tests/test_cursor_interruptions.py --- fiona-1.8.22/tests/test_cursor_interruptions.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_cursor_interruptions.py 2023-10-11 23:19:44.000000000 +0000 @@ -5,23 +5,29 @@ from tests.conftest import get_temp_filename -@pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys() - if _driver_supports_mode(driver, 'w')]) +@pytest.mark.parametrize( + "driver", + [ + driver + for driver in driver_mode_mingdal["w"].keys() + if _driver_supports_mode(driver, "w") + ], +) def test_write_getextent(tmpdir, driver, testdata_generator): - """ - Test if a call to OGR_L_GetExtent has side effects for writing - - """ - - schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 10), range(10, 20)) + """Test if a call to OGR_L_GetExtent has side effects for writing.""" + schema, crs, records1, records2, _ = testdata_generator( + driver, range(0, 10), range(10, 20) + ) path = str(tmpdir.join(get_temp_filename(driver))) - positions = set([int(r['properties']['position']) for r in records1 + records2]) + positions = {int(r['properties']['position']) for r in records1 + records2} - with fiona.open(path, 'w', - driver=driver, - crs=crs, - schema=schema, - **create_kwargs) as c: + with fiona.open( + path, + "w", + driver=driver, + crs=crs, + schema=schema, + ) as c: c.writerecords(records1) # Call to OGR_L_GetExtent @@ -33,29 +39,35 @@ c.writerecords(records2) with fiona.open(path) as c: - data = set([int(f['properties']['position']) for f in c]) + data = {int(f['properties']['position']) for f in c} assert len(positions) == len(data) for p in positions: assert p in data -@pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys() - if _driver_supports_mode(driver, 'w')]) +@pytest.mark.parametrize( + "driver", + [ + driver + for driver in driver_mode_mingdal["w"].keys() + if _driver_supports_mode(driver, "w") + ], +) def test_read_getextent(tmpdir, driver, testdata_generator): - """ - Test if a call to OGR_L_GetExtent has side effects for reading - - """ - - schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 10), range(10, 20)) + """Test if a call to OGR_L_GetExtent has side effects for reading.""" + schema, crs, records1, records2, _ = testdata_generator( + driver, range(0, 10), range(10, 20) + ) path = str(tmpdir.join(get_temp_filename(driver))) - positions = set([int(r['properties']['position']) for r in records1 + records2]) + positions = {int(r['properties']['position']) for r in records1 + records2} - with fiona.open(path, 'w', - driver=driver, - crs=crs, - schema=schema, - **create_kwargs) as c: + with fiona.open( + path, + "w", + driver=driver, + crs=crs, + schema=schema, + ) as c: c.writerecords(records1) c.writerecords(records2) @@ -79,23 +91,29 @@ assert p in data -@pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys() - if _driver_supports_mode(driver, 'w')]) +@pytest.mark.parametrize( + "driver", + [ + driver + for driver in driver_mode_mingdal["w"].keys() + if _driver_supports_mode(driver, "w") + ], +) def test_write_getfeaturecount(tmpdir, driver, testdata_generator): - """ - Test if a call to OGR_L_GetFeatureCount has side effects for writing - - """ - - schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 10), range(10, 20)) + """Test if a call to OGR_L_GetFeatureCount has side effects for writing.""" + schema, crs, records1, records2, _ = testdata_generator( + driver, range(0, 10), range(10, 20) + ) path = str(tmpdir.join(get_temp_filename(driver))) - positions = set([int(r['properties']['position']) for r in records1 + records2]) + positions = {int(r['properties']['position']) for r in records1 + records2} - with fiona.open(path, 'w', - driver=driver, - crs=crs, - schema=schema, - **create_kwargs) as c: + with fiona.open( + path, + "w", + driver=driver, + crs=crs, + schema=schema, + ) as c: c.writerecords(records1) # Call to OGR_L_GetFeatureCount @@ -106,29 +124,35 @@ c.writerecords(records2) with fiona.open(path) as c: - data = set([int(f['properties']['position']) for f in c]) + data = {int(f['properties']['position']) for f in c} assert len(positions) == len(data) for p in positions: assert p in data -@pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys() - if _driver_supports_mode(driver, 'w')]) +@pytest.mark.parametrize( + "driver", + [ + driver + for driver in driver_mode_mingdal["w"].keys() + if _driver_supports_mode(driver, "w") + ], +) def test_read_getfeaturecount(tmpdir, driver, testdata_generator): - """ - Test if a call to OGR_L_GetFeatureCount has side effects for reading - - """ - - schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator(driver, range(0, 10), range(10, 20)) + """Test if a call to OGR_L_GetFeatureCount has side effects for reading.""" + schema, crs, records1, records2, _ = testdata_generator( + driver, range(0, 10), range(10, 20) + ) path = str(tmpdir.join(get_temp_filename(driver))) - positions = set([int(r['properties']['position']) for r in records1 + records2]) + positions = {int(r['properties']['position']) for r in records1 + records2} - with fiona.open(path, 'w', - driver=driver, - crs=crs, - schema=schema, - **create_kwargs) as c: + with fiona.open( + path, + "w", + driver=driver, + crs=crs, + schema=schema, + ) as c: c.writerecords(records1) c.writerecords(records2) @@ -155,4 +179,4 @@ assert len(positions) == len(data) for p in positions: - assert p in data \ No newline at end of file + assert p in data diff -Nru fiona-1.8.22/tests/test_data_paths.py fiona-1.9.5/tests/test_data_paths.py --- fiona-1.8.22/tests/test_data_paths.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_data_paths.py 2023-10-11 23:19:44.000000000 +0000 @@ -46,8 +46,11 @@ assert result.output.strip() == '/foo/bar' -def test_env_proj_data_environ(monkeypatch): - monkeypatch.setenv('PROJ_LIB', '/foo/bar') +@pytest.mark.parametrize("data_directory_env", ["PROJ_LIB", "PROJ_DATA"]) +def test_env_proj_data_environ(data_directory_env, monkeypatch): + monkeypatch.delenv('PROJ_DATA', raising=False) + monkeypatch.delenv('PROJ_LIB', raising=False) + monkeypatch.setenv(data_directory_env, '/foo/bar') runner = CliRunner() result = runner.invoke(main_group, ['env', '--proj-data']) assert result.exit_code == 0 diff -Nru fiona-1.8.22/tests/test_datetime.py fiona-1.9.5/tests/test_datetime.py --- fiona-1.8.22/tests/test_datetime.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_datetime.py 2023-10-11 23:19:44.000000000 +0000 @@ -10,10 +10,18 @@ from fiona.rfc3339 import parse_time, parse_datetime from .conftest import get_temp_filename from fiona.env import GDALVersion +from fiona.model import Feature import datetime -from fiona.drvsupport import (supported_drivers, driver_mode_mingdal, _driver_converts_field_type_silently_to_str, - _driver_supports_field, _driver_converts_to_str, _driver_supports_timezones, - _driver_supports_milliseconds, _driver_supports_mode) +from fiona.drvsupport import ( + supported_drivers, + driver_mode_mingdal, + _driver_converts_field_type_silently_to_str, + _driver_supports_field, + _driver_converts_to_str, + _driver_supports_timezones, + _driver_supports_milliseconds, + _driver_supports_mode, +) import pytz from pytz import timezone @@ -21,49 +29,80 @@ def get_schema(driver, field_type): - if driver == 'GPX': - return {'properties': OrderedDict([('ele', 'float'), - ('time', field_type)]), - 'geometry': 'Point'} - if driver == 'GPSTrackMaker': + if driver == "GPX": return { - 'properties': OrderedDict([('name', 'str'), ('comment', 'str'), ('icon', 'int'), ('time', field_type)]), - 'geometry': 'Point'} - if driver == 'CSV': + "properties": OrderedDict([("ele", "float"), ("time", field_type)]), + "geometry": "Point", + } + if driver == "GPSTrackMaker": + return { + "properties": OrderedDict( + [ + ("name", "str"), + ("comment", "str"), + ("icon", "int"), + ("time", field_type), + ] + ), + "geometry": "Point", + } + if driver == "CSV": return {"properties": {"datefield": field_type}} - return {"geometry": "Point", - "properties": {"datefield": field_type}} + return {"geometry": "Point", "properties": {"datefield": field_type}} def get_records(driver, values): - if driver == 'GPX': - return [{"geometry": {"type": "Point", "coordinates": [1, 2]}, - "properties": {'ele': 0, "time": val}} for val in values] - if driver == 'GPSTrackMaker': - return [{"geometry": {"type": "Point", "coordinates": [1, 2]}, - "properties": OrderedDict([('name', ''), ('comment', ''), ('icon', 48), ('time', val)])} for - val in values] - if driver == 'CSV': - return [{"properties": {"datefield": val}} for val in values] - - return [{"geometry": {"type": "Point", "coordinates": [1, 2]}, - "properties": {"datefield": val}} for val in values] + if driver == "GPX": + return [ + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": [1, 2]}, + "properties": {"ele": 0, "time": val}, + } + ) + for val in values + ] + if driver == "GPSTrackMaker": + return [ + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": [1, 2]}, + "properties": OrderedDict( + [("name", ""), ("comment", ""), ("icon", 48), ("time", val)] + ), + } + ) + for val in values + ] + if driver == "CSV": + return [ + Feature.from_dict(**{"properties": {"datefield": val}}) for val in values + ] + + return [ + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": [1, 2]}, + "properties": {"datefield": val}, + } + ) + for val in values + ] def get_schema_field(driver, schema): - if driver in {'GPX', 'GPSTrackMaker'}: + if driver in {"GPX", "GPSTrackMaker"}: return schema["properties"]["time"] return schema["properties"]["datefield"] def get_field(driver, f): - if driver in {'GPX', 'GPSTrackMaker'}: + if driver in {"GPX", "GPSTrackMaker"}: return f["properties"]["time"] - return f['properties']['datefield'] + return f.properties["datefield"] class TZ(datetime.tzinfo): - def __init__(self, minutes): self.minutes = minutes @@ -72,64 +111,127 @@ def generate_testdata(field_type, driver): - """ Generate test cases for test_datefield + """Generate test cases for test_datefield Each test case has the format [(in_value1, true_value as datetime.*object), (in_value2, true_value as datetime.*object), ...] """ # Test data for 'date' data type - if field_type == 'date': - return [("2018-03-25", datetime.date(2018, 3, 25)), - (datetime.date(2018, 3, 25), datetime.date(2018, 3, 25))] + if field_type == "date": + return [ + ("2018-03-25", datetime.date(2018, 3, 25)), + (datetime.date(2018, 3, 25), datetime.date(2018, 3, 25)), + ] # Test data for 'datetime' data type - if field_type == 'datetime': - return [("2018-03-25T22:49:05", datetime.datetime(2018, 3, 25, 22, 49, 5)), - (datetime.datetime(2018, 3, 25, 22, 49, 5), datetime.datetime(2018, 3, 25, 22, 49, 5)), - ("2018-03-25T22:49:05.23", datetime.datetime(2018, 3, 25, 22, 49, 5, 230000)), - (datetime.datetime(2018, 3, 25, 22, 49, 5, 230000), datetime.datetime(2018, 3, 25, 22, 49, 5, 230000)), - ("2018-03-25T22:49:05.123456", datetime.datetime(2018, 3, 25, 22, 49, 5, 123000)), - (datetime.datetime(2018, 3, 25, 22, 49, 5, 123456), datetime.datetime(2018, 3, 25, 22, 49, 5, 123000)), - ("2018-03-25T22:49:05+01:30", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90))), - ("2018-03-25T22:49:05-01:30", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90))), - (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90)), - datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90))), - (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90)), - datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90))), - (datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')), - datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich'))), - (datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain')), - datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain'))), - (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15)), - datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15))), - (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15)), - datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), - ("2018-03-25T22:49:05-23:45", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), - ("2018-03-25T22:49:05+23:45", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15)))] + if field_type == "datetime": + return [ + ("2018-03-25T22:49:05", datetime.datetime(2018, 3, 25, 22, 49, 5)), + ( + datetime.datetime(2018, 3, 25, 22, 49, 5), + datetime.datetime(2018, 3, 25, 22, 49, 5), + ), + ( + "2018-03-25T22:49:05.23", + datetime.datetime(2018, 3, 25, 22, 49, 5, 230000), + ), + ( + datetime.datetime(2018, 3, 25, 22, 49, 5, 230000), + datetime.datetime(2018, 3, 25, 22, 49, 5, 230000), + ), + ( + "2018-03-25T22:49:05.123456", + datetime.datetime(2018, 3, 25, 22, 49, 5, 123000), + ), + ( + datetime.datetime(2018, 3, 25, 22, 49, 5, 123456), + datetime.datetime(2018, 3, 25, 22, 49, 5, 123000), + ), + ( + "2018-03-25T22:49:05+01:30", + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90)), + ), + ( + "2018-03-25T22:49:05-01:30", + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90)), + ), + ( + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90)), + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90)), + ), + ( + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90)), + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90)), + ), + ( + datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone( + timezone("Europe/Zurich") + ), + datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone( + timezone("Europe/Zurich") + ), + ), + ( + datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone( + timezone("America/Denver") + ), + datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone( + timezone("America/Denver") + ), + ), + ( + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15)), + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15)), + ), + ( + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15)), + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15)), + ), + ( + "2018-03-25T22:49:05-23:45", + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15)), + ), + ( + "2018-03-25T22:49:05+23:45", + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15)), + ), + ] # Test data for 'time' data type - elif field_type == 'time': - return [("22:49:05", datetime.time(22, 49, 5)), - (datetime.time(22, 49, 5), datetime.time(22, 49, 5)), - ("22:49:05.23", datetime.time(22, 49, 5, 230000)), - (datetime.time(22, 49, 5, 230000), datetime.time(22, 49, 5, 230000)), - ("22:49:05.123456", datetime.time(22, 49, 5, 123000)), - (datetime.time(22, 49, 5, 123456), datetime.time(22, 49, 5, 123000)), - ("22:49:05+01:30", datetime.time(22, 49, 5, tzinfo=TZ(90))), - ("22:49:05-01:30", datetime.time(22, 49, 5, tzinfo=TZ(-90))), - (datetime.time(22, 49, 5, tzinfo=TZ(90)), datetime.time(22, 49, 5, tzinfo=TZ(90))), - (datetime.time(22, 49, 5, tzinfo=TZ(-90)), datetime.time(22, 49, 5, tzinfo=TZ(-90))), - (datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15)), - datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15))), - (datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15)), - datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), - ("22:49:05-23:45", datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), - ("22:49:05+23:45", datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15)))] + elif field_type == "time": + return [ + ("22:49:05", datetime.time(22, 49, 5)), + (datetime.time(22, 49, 5), datetime.time(22, 49, 5)), + ("22:49:05.23", datetime.time(22, 49, 5, 230000)), + (datetime.time(22, 49, 5, 230000), datetime.time(22, 49, 5, 230000)), + ("22:49:05.123456", datetime.time(22, 49, 5, 123000)), + (datetime.time(22, 49, 5, 123456), datetime.time(22, 49, 5, 123000)), + ("22:49:05+01:30", datetime.time(22, 49, 5, tzinfo=TZ(90))), + ("22:49:05-01:30", datetime.time(22, 49, 5, tzinfo=TZ(-90))), + ( + datetime.time(22, 49, 5, tzinfo=TZ(90)), + datetime.time(22, 49, 5, tzinfo=TZ(90)), + ), + ( + datetime.time(22, 49, 5, tzinfo=TZ(-90)), + datetime.time(22, 49, 5, tzinfo=TZ(-90)), + ), + ( + datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15)), + datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15)), + ), + ( + datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15)), + datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15)), + ), + ("22:49:05-23:45", datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), + ("22:49:05+23:45", datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15))), + ] def compare_datetimes_utc(d1, d2): - """ Test if two time objects are the same. Native times are assumed to be UTC""" + """Test if two time objects are the same. Native times are assumed to be UTC""" if d1.tzinfo is None: d1 = d1.replace(tzinfo=TZ(0)) @@ -141,7 +243,7 @@ def test_compare_datetimes_utc(): - """ Test compare_datetimes_utc """ + """Test compare_datetimes_utc""" d1 = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(60)) d2 = datetime.datetime(2020, 1, 21, 11, 30, 0, tzinfo=TZ(0)) assert d1 == d2 @@ -157,31 +259,43 @@ assert d1 == d2 assert compare_datetimes_utc(d1, d2) - d1 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')) + d1 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone( + timezone("Europe/Zurich") + ) d2 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc) assert d1 == d2 assert compare_datetimes_utc(d1, d2) - d1 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')) - d2 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain')) + d1 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone( + timezone("Europe/Zurich") + ) + d2 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone( + timezone("America/Denver") + ) assert d1 == d2 assert compare_datetimes_utc(d1, d2) - d1 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')) - d2 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain')) + d1 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone( + timezone("Europe/Zurich") + ) + d2 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone( + timezone("America/Denver") + ) assert d1 == d2 assert compare_datetimes_utc(d1, d2) def convert_time_to_utc(d): - """ Convert datetime.time object to UTC""" - d = datetime.datetime(1900, 1, 1, d.hour, d.minute, d.second, d.microsecond, d.tzinfo) + """Convert datetime.time object to UTC""" + d = datetime.datetime( + 1900, 1, 1, d.hour, d.minute, d.second, d.microsecond, d.tzinfo + ) d -= d.utcoffset() return d.time() def compare_times_utc(d1, d2): - """ Test if two datetime.time objects with fixed timezones have the same UTC time""" + """Test if two datetime.time objects with fixed timezones have the same UTC time""" if d1.tzinfo is not None: d1 = convert_time_to_utc(d1) @@ -207,15 +321,23 @@ d2 = datetime.time(5, 0, 0, tzinfo=TZ(-60 * 7)) assert compare_times_utc(d1, d2) - d1 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('MET')).timetz() - d2 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('EST')).timetz() + d1 = ( + datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc) + .astimezone(timezone("MET")) + .timetz() + ) + d2 = ( + datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc) + .astimezone(timezone("EST")) + .timetz() + ) assert compare_times_utc(d1, d2) def get_tz_offset(d): - """ Returns a Timezone (sign, hours, minutes) tuples + """Returns a Timezone (sign, hours, minutes) tuples - E.g.: for '2020-01-21T12:30:00+01:30' ('+', 1, 30) is returned + E.g.: for '2020-01-21T12:30:00+01:30' ('+', 1, 30) is returned """ offset_minutes = d.utcoffset().total_seconds() / 60 @@ -229,31 +351,31 @@ def test_get_tz_offset(): - """ Test get_tz_offset""" + """Test get_tz_offset""" d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(90)) - assert get_tz_offset(d) == ('+', 1, 30) + assert get_tz_offset(d) == ("+", 1, 30) d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(-90)) - assert get_tz_offset(d) == ('-', 1, 30) + assert get_tz_offset(d) == ("-", 1, 30) d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(60 * 24 - 15)) - assert get_tz_offset(d) == ('+', 23, 45) + assert get_tz_offset(d) == ("+", 23, 45) d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(-60 * 24 + 15)) - assert get_tz_offset(d) == ('-', 23, 45) + assert get_tz_offset(d) == ("-", 23, 45) def generate_testcases(): - """ Generate test cases for drivers that support datefields, convert datefields to string or do not support - datefiels""" + """Generate test cases for drivers that support date fields, convert date fields to string or do not support + date fields""" _test_cases_datefield = [] _test_cases_datefield_to_str = [] _test_cases_datefield_not_supported = [] - for field_type in ['time', 'datetime', 'date']: + for field_type in ["time", "datetime", "date"]: # Select only driver that are capable of writing fields for driver, raw in supported_drivers.items(): - if _driver_supports_mode(driver, 'w'): + if _driver_supports_mode(driver, "w"): if _driver_supports_field(driver, field_type): if _driver_converts_field_type_silently_to_str(driver, field_type): _test_cases_datefield_to_str.append((driver, field_type)) @@ -262,13 +384,22 @@ else: _test_cases_datefield_not_supported.append((driver, field_type)) - return _test_cases_datefield, _test_cases_datefield_to_str, _test_cases_datefield_not_supported + return ( + _test_cases_datefield, + _test_cases_datefield_to_str, + _test_cases_datefield_not_supported, + ) -test_cases_datefield, test_cases_datefield_to_str, test_cases_datefield_not_supported = generate_testcases() +( + test_cases_datefield, + test_cases_datefield_to_str, + test_cases_datefield_not_supported, +) = generate_testcases() @pytest.mark.parametrize("driver, field_type", test_cases_datefield) +@pytest.mark.gdal def test_datefield(tmpdir, driver, field_type): """ Test date, time, datetime field types. @@ -276,10 +407,10 @@ def _validate(val, val_exp, field_type, driver): - if field_type == 'date': + if field_type == "date": return val == val_exp.isoformat() - elif field_type == 'datetime': + elif field_type == "datetime": # some drivers do not support timezones. In this case, Fiona converts datetime fields with a timezone other # than UTC to UTC. Thus, both the datetime read by Fiona, as well as expected value are first converted to @@ -300,7 +431,7 @@ val_d = datetime.datetime(y, m, d, hh, mm, ss, ms, tz) return compare_datetimes_utc(val_d, val_exp.replace(microsecond=0)) - elif field_type == 'time': + elif field_type == "time": # some drivers do not support timezones. In this case, Fiona converts datetime fields with a timezone other # than UTC to UTC. Thus, both the time read by Fiona, as well as expected value are first converted to UTC @@ -327,21 +458,21 @@ values_in, values_exp = zip(*generate_testdata(field_type, driver)) records = get_records(driver, values_in) - with fiona.open(path, 'w', - driver=driver, - schema=schema) as c: + with fiona.open(path, "w", driver=driver, schema=schema) as c: c.writerecords(records) - with fiona.open(path, 'r') as c: - assert get_schema_field(driver, c.schema) == field_type + with fiona.open(path, "r") as c: + assert get_schema_field(driver, c.schema) == field_type, f"Returned field type is {get_schema_field(driver, c.schema)}, expected {field_type}" items = [get_field(driver, f) for f in c] assert len(items) == len(values_in) for val, val_exp in zip(items, values_exp): - assert _validate(val, val_exp, field_type, driver), \ - "{} does not match {}".format(val, val_exp.isoformat()) + assert _validate( + val, val_exp, field_type, driver + ), f"{val} does not match {val_exp.isoformat()}" @pytest.mark.parametrize("driver, field_type", test_cases_datefield_to_str) +@pytest.mark.gdal def test_datefield_driver_converts_to_string(tmpdir, driver, field_type): """ Test handling of date, time, datetime for drivers that convert these types to string. @@ -354,14 +485,19 @@ def _validate(val, val_exp, field_type, driver): - if field_type == 'date': - if (str(val_exp.year) in val and - str(val_exp.month) in val and - str(val_exp.day) in val): + if field_type == "date": + if ( + str(val_exp.year) in val + and str(val_exp.month) in val + and str(val_exp.day) in val + ): return True - elif field_type == 'datetime': + elif field_type == "datetime": - if not _driver_supports_timezones(driver, field_type) and val_exp.utcoffset() is not None: + if ( + not _driver_supports_timezones(driver, field_type) + and val_exp.utcoffset() is not None + ): val_exp = convert_time_to_utc(val_exp) # datetime fields can, depending on the driver, support: @@ -372,135 +508,162 @@ if val_exp.utcoffset() is None: # No Milliseconds if not _driver_supports_milliseconds(driver): - if (str(val_exp.year) in val and - str(val_exp.month) in val and - str(val_exp.day) in val and - str(val_exp.hour) in val and - str(val_exp.minute) in val and - str(val_exp.second) in val): + if ( + str(val_exp.year) in val + and str(val_exp.month) in val + and str(val_exp.day) in val + and str(val_exp.hour) in val + and str(val_exp.minute) in val + and str(val_exp.second) in val + ): return True else: # Microseconds - if (str(val_exp.year) in val and - str(val_exp.month) in val and - str(val_exp.day) in val and - str(val_exp.hour) in val and - str(val_exp.minute) in val and - str(val_exp.second) in val and - str(val_exp.microsecond) in val): + if ( + str(val_exp.year) in val + and str(val_exp.month) in val + and str(val_exp.day) in val + and str(val_exp.hour) in val + and str(val_exp.minute) in val + and str(val_exp.second) in val + and str(val_exp.microsecond) in val + ): return True # Milliseconds - elif (str(val_exp.year) in val and - str(val_exp.month) in val and - str(val_exp.day) in val and - str(val_exp.hour) in val and - str(val_exp.minute) in val and - str(val_exp.second) in val and - str(int(val_exp.microsecond / 1000)) in val): + elif ( + str(val_exp.year) in val + and str(val_exp.month) in val + and str(val_exp.day) in val + and str(val_exp.hour) in val + and str(val_exp.minute) in val + and str(val_exp.second) in val + and str(int(val_exp.microsecond / 1000)) in val + ): return True # With timezone else: sign, hours, minutes = get_tz_offset(val_exp) if minutes > 0: - tz = "{sign}{hours:02d}{minutes:02d}".format(sign=sign, - hours=int(hours), - minutes=int(minutes)) + tz = "{sign}{hours:02d}{minutes:02d}".format( + sign=sign, hours=int(hours), minutes=int(minutes) + ) else: - tz = "{sign}{hours:02d}".format(sign=sign, hours=int(hours)) + tz = f"{sign}{int(hours):02d}" print("tz", tz) # No Milliseconds if not _driver_supports_milliseconds(driver): - if (str(val_exp.year) in val and - str(val_exp.month) in val and - str(val_exp.day) in val and - str(val_exp.hour) in val and - str(val_exp.minute) in val and - str(val_exp.second) in val and - tz in val): + if ( + str(val_exp.year) in val + and str(val_exp.month) in val + and str(val_exp.day) in val + and str(val_exp.hour) in val + and str(val_exp.minute) in val + and str(val_exp.second) in val + and tz in val + ): return True else: # Microseconds - if (str(val_exp.year) in val and - str(val_exp.month) in val and - str(val_exp.day) in val and - str(val_exp.hour) in val and - str(val_exp.minute) in val and - str(val_exp.second) in val and - str(val_exp.microsecond) in val and - tz in val): + if ( + str(val_exp.year) in val + and str(val_exp.month) in val + and str(val_exp.day) in val + and str(val_exp.hour) in val + and str(val_exp.minute) in val + and str(val_exp.second) in val + and str(val_exp.microsecond) in val + and tz in val + ): return True # Milliseconds - elif (str(val_exp.year) in val and - str(val_exp.month) in val and - str(val_exp.day) in val and - str(val_exp.hour) in val and - str(val_exp.minute) in val and - str(val_exp.second) in val and - str(int(val_exp.microsecond / 1000)) in val and - tz in val): + elif ( + str(val_exp.year) in val + and str(val_exp.month) in val + and str(val_exp.day) in val + and str(val_exp.hour) in val + and str(val_exp.minute) in val + and str(val_exp.second) in val + and str(int(val_exp.microsecond / 1000)) in val + and tz in val + ): return True - elif field_type == 'time': + elif field_type == "time": # time fields can, depending on the driver, support: # - Timezones # - Milliseconds, respectively Microseconds - if not _driver_supports_timezones(driver, field_type) and val_exp.utcoffset() is not None: + if ( + not _driver_supports_timezones(driver, field_type) + and val_exp.utcoffset() is not None + ): val_exp = convert_time_to_utc(val_exp) # No timezone if val_exp.utcoffset() is None: # No Milliseconds if not _driver_supports_milliseconds(driver): - if (str(val_exp.hour) in val and - str(val_exp.minute) in val and - str(val_exp.second) in val): + if ( + str(val_exp.hour) in val + and str(val_exp.minute) in val + and str(val_exp.second) in val + ): return True else: # Microseconds - if (str(val_exp.hour) in val and - str(val_exp.minute) in val and - str(val_exp.second) in val and - str(val_exp.microsecond) in val): + if ( + str(val_exp.hour) in val + and str(val_exp.minute) in val + and str(val_exp.second) in val + and str(val_exp.microsecond) in val + ): return True # Milliseconds - elif (str(val_exp.hour) in val and - str(val_exp.minute) in val and - str(val_exp.second) in val and - str(int(val_exp.microsecond / 1000)) in val): + elif ( + str(val_exp.hour) in val + and str(val_exp.minute) in val + and str(val_exp.second) in val + and str(int(val_exp.microsecond / 1000)) in val + ): return True # With timezone else: sign, hours, minutes = get_tz_offset(val_exp) if minutes > 0: - tz = "{sign}{hours:02d}{minutes:02d}".format(sign=sign, - hours=int(hours), - minutes=int(minutes)) + tz = "{sign}{hours:02d}{minutes:02d}".format( + sign=sign, hours=int(hours), minutes=int(minutes) + ) else: - tz = "{sign}{hours:02d}".format(sign=sign, hours=int(hours)) + tz = f"{sign}{int(hours):02d}" # No Milliseconds if not _driver_supports_milliseconds(driver): - if (str(val_exp.hour) in val and - str(val_exp.minute) in val and - str(val_exp.second) in val and - tz in val): + if ( + str(val_exp.hour) in val + and str(val_exp.minute) in val + and str(val_exp.second) in val + and tz in val + ): return True else: # Microseconds - if (str(val_exp.hour) in val and - str(val_exp.minute) in val and - str(val_exp.second) in val and - str(val_exp.microsecond) in val and - tz in val): + if ( + str(val_exp.hour) in val + and str(val_exp.minute) in val + and str(val_exp.second) in val + and str(val_exp.microsecond) in val + and tz in val + ): return True # Milliseconds - elif (str(val_exp.hour) in val and - str(val_exp.minute) in val and - str(val_exp.second) in val and - str(int(val_exp.microsecond / 1000)) in val - and tz in val): + elif ( + str(val_exp.hour) in val + and str(val_exp.minute) in val + and str(val_exp.second) in val + and str(int(val_exp.microsecond / 1000)) in val + and tz in val + ): return True return False @@ -510,34 +673,41 @@ records = get_records(driver, values_exp) with pytest.warns(UserWarning) as record: - with fiona.open(path, 'w', - driver=driver, - schema=schema) as c: + with fiona.open(path, "w", driver=driver, schema=schema) as c: c.writerecords(records) assert len(record) == 1 assert "silently converts" in record[0].message.args[0] - with fiona.open(path, 'r') as c: - assert get_schema_field(driver, c.schema) == 'str' + with fiona.open(path, "r") as c: + assert get_schema_field(driver, c.schema) == "str" items = [get_field(driver, f) for f in c] assert len(items) == len(values_in) for val, val_exp in zip(items, values_exp): - assert _validate(val, val_exp, field_type, driver), \ - "{} does not match {}".format(val, val_exp.isoformat()) + assert _validate( + val, val_exp, field_type, driver + ), f"{val} does not match {val_exp.isoformat()}" -@pytest.mark.filterwarnings('ignore:.*driver silently converts *:UserWarning') -@pytest.mark.parametrize("driver,field_type", test_cases_datefield + test_cases_datefield_to_str) +@pytest.mark.filterwarnings("ignore:.*driver silently converts *:UserWarning") +@pytest.mark.parametrize( + "driver,field_type", test_cases_datefield + test_cases_datefield_to_str +) +@pytest.mark.gdal def test_datefield_null(tmpdir, driver, field_type): """ Test handling of null values for date, time, datetime types for write capable drivers """ def _validate(val, val_exp, field_type, driver): - if (driver == 'MapInfo File' and field_type == 'time' and - calc_gdal_version_num(2, 0, 0) <= get_gdal_version_num() < calc_gdal_version_num(3, 1, 1)): - return val == '00:00:00' - if val is None or val == '': + if ( + driver == "MapInfo File" + and field_type == "time" + and calc_gdal_version_num(2, 0, 0) + <= get_gdal_version_num() + < calc_gdal_version_num(3, 1, 1) + ): + return val == "00:00:00" + if val is None or val == "": return True return False @@ -546,37 +716,38 @@ values_in = [None] records = get_records(driver, values_in) - with fiona.open(path, 'w', - driver=driver, - schema=schema) as c: + with fiona.open(path, "w", driver=driver, schema=schema) as c: c.writerecords(records) - with fiona.open(path, 'r') as c: + with fiona.open(path, "r") as c: items = [get_field(driver, f) for f in c] assert len(items) == 1 - assert _validate(items[0], None, field_type, driver), \ - "{} does not match {}".format(items[0], None) + assert _validate( + items[0], None, field_type, driver + ), f"{items[0]} does not match {None}" @pytest.mark.parametrize("driver, field_type", test_cases_datefield_not_supported) +@pytest.mark.gdal def test_datetime_field_unsupported(tmpdir, driver, field_type): - """ Test if DriverSupportError is raised for unsupported field_types""" + """Test if DriverSupportError is raised for unsupported field_types""" schema = get_schema(driver, field_type) path = str(tmpdir.join(get_temp_filename(driver))) values_in, values_out = zip(*generate_testdata(field_type, driver)) records = get_records(driver, values_in) with pytest.raises(DriverSupportError): - with fiona.open(path, 'w', - driver=driver, - schema=schema) as c: + with fiona.open(path, "w", driver=driver, schema=schema) as c: c.writerecords(records) @pytest.mark.parametrize("driver, field_type", test_cases_datefield_not_supported) -def test_datetime_field_type_marked_not_supported_is_not_supported(tmpdir, driver, field_type, monkeypatch): - """ Test if a date/datetime/time field type marked as not not supported is really not supported +@pytest.mark.gdal +def test_datetime_field_type_marked_not_supported_is_not_supported( + tmpdir, driver, field_type, monkeypatch +): + """Test if a date/datetime/time field type marked as not not supported is really not supported Warning: Success of this test does not necessary mean that a field is not supported. E.g. errors can occour due to special schema requirements of drivers. This test only covers the standard case. @@ -586,7 +757,9 @@ if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): pytest.skip("BNA driver segfaults with gdal 1.11") - monkeypatch.delitem(fiona.drvsupport._driver_field_type_unsupported[field_type], driver) + monkeypatch.delitem( + fiona.drvsupport._driver_field_type_unsupported[field_type], driver + ) schema = get_schema(driver, field_type) path = str(tmpdir.join(get_temp_filename(driver))) @@ -595,12 +768,10 @@ is_good = True try: - with fiona.open(path, 'w', - driver=driver, - schema=schema) as c: + with fiona.open(path, "w", driver=driver, schema=schema) as c: c.writerecords(records) - with fiona.open(path, 'r') as c: + with fiona.open(path, "r") as c: if not get_schema_field(driver, c.schema) == field_type: is_good = False items = [get_field(driver, f) for f in c] @@ -613,28 +784,38 @@ def generate_tostr_testcases(): - """ Flatten driver_converts_to_str to a list of (field_type, driver) tuples""" + """Flatten driver_converts_to_str to a list of (field_type, driver) tuples""" cases = [] for field_type in _driver_converts_to_str: for driver in _driver_converts_to_str[field_type]: driver_supported = driver in supported_drivers - driver_can_write = _driver_supports_mode(driver, 'w') + driver_can_write = _driver_supports_mode(driver, "w") field_supported = _driver_supports_field(driver, field_type) - converts_to_str = _driver_converts_field_type_silently_to_str(driver, field_type) - if driver_supported and driver_can_write and converts_to_str and field_supported: + converts_to_str = _driver_converts_field_type_silently_to_str( + driver, field_type + ) + if ( + driver_supported + and driver_can_write + and converts_to_str + and field_supported + ): cases.append((field_type, driver)) return cases -@pytest.mark.filterwarnings('ignore:.*driver silently converts *:UserWarning') +@pytest.mark.filterwarnings("ignore:.*driver silently converts *:UserWarning") @pytest.mark.parametrize("driver,field_type", test_cases_datefield_to_str) -def test_driver_marked_as_silently_converts_to_str_converts_silently_to_str(tmpdir, driver, field_type, monkeypatch): - """ Test if a driver and field_type is marked in fiona.drvsupport.driver_converts_to_str to convert to str really - silently converts to str - - If this test fails, it should be considered to replace the respective None value in - fiona.drvsupport.driver_converts_to_str with a GDALVersion(major, minor) value. - """ +@pytest.mark.gdal +def test_driver_marked_as_silently_converts_to_str_converts_silently_to_str( + tmpdir, driver, field_type, monkeypatch +): + """Test if a driver and field_type is marked in fiona.drvsupport.driver_converts_to_str to convert to str really + silently converts to str + + If this test fails, it should be considered to replace the respective None value in + fiona.drvsupport.driver_converts_to_str with a GDALVersion(major, minor) value. + """ monkeypatch.delitem(fiona.drvsupport._driver_converts_to_str[field_type], driver) @@ -643,17 +824,15 @@ values_in, values_out = zip(*generate_testdata(field_type, driver)) records = get_records(driver, values_in) - with fiona.open(path, 'w', - driver=driver, - schema=schema) as c: + with fiona.open(path, "w", driver=driver, schema=schema) as c: c.writerecords(records) - with fiona.open(path, 'r') as c: - assert get_schema_field(driver, c.schema) == 'str' + with fiona.open(path, "r") as c: + assert get_schema_field(driver, c.schema) == "str" def test_read_timezone_geojson(path_test_tz_geojson): """Test if timezones are read correctly""" with fiona.open(path_test_tz_geojson) as c: items = list(c) - assert items[0]['properties']['test'] == '2015-04-22T00:00:00+07:00' + assert items[0]["properties"]["test"] == "2015-04-22T00:00:00+07:00" diff -Nru fiona-1.8.22/tests/test_driver_options.py fiona-1.9.5/tests/test_driver_options.py --- fiona-1.8.22/tests/test_driver_options.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_driver_options.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,30 +1,35 @@ -import os -import tempfile from collections import OrderedDict import glob +import os +import tempfile + import fiona -from tests.conftest import get_temp_filename, requires_gdal2 +from fiona.model import Feature + +from .conftest import get_temp_filename, requires_gdal2 -@requires_gdal2 -def test_gml_format_option(): - """ Test GML dataset creation option FORMAT (see https://github.com/Toblerity/Fiona/issues/968)""" +def test_gml_format_option(tmp_path): + """Test GML dataset creation option FORMAT (see gh-968)""" - schema = {'geometry': 'Point', 'properties': OrderedDict([('position', 'int')])} - records = [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {'position': i}} for i in - range(10)] + schema = {"geometry": "Point", "properties": OrderedDict([("position", "int")])} + records = [ + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0.0, float(i))}, + "properties": {"position": i}, + } + ) + for i in range(10) + ] - tmpdir = tempfile.mkdtemp() - fpath = os.path.join(tmpdir, get_temp_filename('GML')) + fpath = tmp_path.joinpath(get_temp_filename("GML")) - with fiona.open(fpath, - 'w', - driver="GML", - schema=schema, - FORMAT="GML3") as out: + with fiona.open(fpath, "w", driver="GML", schema=schema, FORMAT="GML3") as out: out.writerecords(records) - xsd_path = glob.glob(os.path.join(tmpdir, "*.xsd"))[0] + xsd_path = list(tmp_path.glob("*.xsd"))[0] + with open(xsd_path) as f: xsd = f.read() assert "http://schemas.opengis.net/gml/3.1.1" in xsd diff -Nru fiona-1.8.22/tests/test_drivers.py fiona-1.9.5/tests/test_drivers.py --- fiona-1.8.22/tests/test_drivers.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_drivers.py 2023-10-11 23:19:44.000000000 +0000 @@ -23,7 +23,7 @@ with fiona.drivers(CPL_DEBUG=True): c = fiona.open(path_coutwildrnp_shp) c.close() - with open(logfile, "r") as f: + with open(logfile) as f: log = f.read() if fiona.gdal_version.major >= 2: assert "GDALOpen" in log diff -Nru fiona-1.8.22/tests/test_drvsupport.py fiona-1.9.5/tests/test_drvsupport.py --- fiona-1.8.22/tests/test_drvsupport.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_drvsupport.py 2023-10-11 23:19:44.000000000 +0000 @@ -16,6 +16,7 @@ @requires_gdal24 +@pytest.mark.gdal @pytest.mark.parametrize("format", ["GeoJSON", "ESRIJSON", "TopoJSON", "GeoJSONSeq"]) def test_geojsonseq(format): """Format is available""" @@ -25,18 +26,13 @@ @pytest.mark.parametrize( "driver", [driver for driver, raw in supported_drivers.items() if "w" in raw] ) +@pytest.mark.gdal def test_write_or_driver_error(tmpdir, driver, testdata_generator): - """ - Test if write mode works. - - """ - + """Test if write mode works.""" if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): pytest.skip("BNA driver segfaults with gdal 1.11") - schema, crs, records1, _, test_equal, create_kwargs = testdata_generator( - driver, range(0, 10), [] - ) + schema, crs, records1, _, _ = testdata_generator(driver, range(0, 10), []) path = str(tmpdir.join(get_temp_filename(driver))) if driver in driver_mode_mingdal[ @@ -47,16 +43,12 @@ # Test if DriverError is raised for gdal < driver_mode_mingdal with pytest.raises(DriverError): - with fiona.open( - path, "w", driver=driver, crs=crs, schema=schema, **create_kwargs - ) as c: + with fiona.open(path, "w", driver=driver, crs=crs, schema=schema) as c: c.writerecords(records1) else: # Test if we can write - with fiona.open( - path, "w", driver=driver, crs=crs, schema=schema, **create_kwargs - ) as c: + with fiona.open(path, "w", driver=driver, crs=crs, schema=schema) as c: c.writerecords(records1) @@ -70,19 +62,16 @@ assert len(list(collection)) == len(records1) +# If this test fails, it should be considered to update +# driver_mode_mingdal in drvsupport.py. @pytest.mark.parametrize( "driver", [driver for driver in driver_mode_mingdal["w"].keys()] ) +@pytest.mark.gdal def test_write_does_not_work_when_gdal_smaller_mingdal( tmpdir, driver, testdata_generator, monkeypatch ): - """ - Test if driver really can't write for gdal < driver_mode_mingdal - - If this test fails, it should be considered to update driver_mode_mingdal in drvsupport.py. - - """ - + """Test if driver really can't write for gdal < driver_mode_mingdal.""" if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): pytest.skip("BNA driver segfaults with gdal 1.11") if driver == "FlatGeobuf" and calc_gdal_version_num( @@ -90,9 +79,7 @@ ) <= get_gdal_version_num() < calc_gdal_version_num(3, 1, 3): pytest.skip("See https://github.com/Toblerity/Fiona/pull/924") - schema, crs, records1, _, test_equal, create_kwargs = testdata_generator( - driver, range(0, 10), [] - ) + schema, crs, records1, _, _ = testdata_generator(driver, range(0, 10), []) path = str(tmpdir.join(get_temp_filename(driver))) if driver in driver_mode_mingdal[ @@ -103,21 +90,18 @@ monkeypatch.delitem(fiona.drvsupport.driver_mode_mingdal["w"], driver) with pytest.raises(Exception): - with fiona.open( - path, "w", driver=driver, crs=crs, schema=schema, **create_kwargs - ) as c: + with fiona.open(path, "w", driver=driver, crs=crs, schema=schema) as c: c.writerecords(records1) +# Some driver only allow a specific schema. These drivers can be +# excluded by adding them to blacklist_append_drivers. @pytest.mark.parametrize( "driver", [driver for driver, raw in supported_drivers.items() if "a" in raw] ) +@pytest.mark.gdal def test_append_or_driver_error(tmpdir, testdata_generator, driver): - """Test if driver supports append mode. - - Some driver only allow a specific schema. These drivers can be excluded by adding them to blacklist_append_drivers. - - """ + """Test if driver supports append mode.""" if driver == "DGN": pytest.xfail("DGN schema has changed") @@ -125,7 +109,7 @@ pytest.skip("BNA driver segfaults with gdal 1.11") path = str(tmpdir.join(get_temp_filename(driver))) - schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator( + schema, crs, records1, records2, _ = testdata_generator( driver, range(0, 5), range(5, 10) ) @@ -138,9 +122,7 @@ return # Create test file to append to - with fiona.open( - path, "w", driver=driver, crs=crs, schema=schema, **create_kwargs - ) as c: + with fiona.open(path, "w", driver=driver, crs=crs, schema=schema) as c: c.writerecords(records1) @@ -170,6 +152,8 @@ assert len(list(collection)) == len(records1) + len(records2) +# If this test fails, it should be considered to update +# driver_mode_mingdal in drvsupport.py. @pytest.mark.parametrize( "driver", [ @@ -178,20 +162,20 @@ if driver in supported_drivers ], ) +@pytest.mark.gdal def test_append_does_not_work_when_gdal_smaller_mingdal( tmpdir, driver, testdata_generator, monkeypatch ): - """Test if driver supports append mode. - - If this test fails, it should be considered to update driver_mode_mingdal in drvsupport.py. - - """ - + """Test if driver supports append mode.""" if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): pytest.skip("BNA driver segfaults with gdal 1.11") + if driver == "FlatGeobuf" and GDALVersion.runtime() < GDALVersion(3, 5): + pytest.skip("FlatGeobuf segfaults with GDAL < 3.5.1") + + path = str(tmpdir.join(get_temp_filename(driver))) - schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator( + schema, crs, records1, records2, _ = testdata_generator( driver, range(0, 5), range(5, 10) ) @@ -204,9 +188,7 @@ return # Create test file to append to - with fiona.open( - path, "w", driver=driver, crs=crs, schema=schema, **create_kwargs - ) as c: + with fiona.open(path, "w", driver=driver, crs=crs, schema=schema) as c: c.writerecords(records1) @@ -216,7 +198,6 @@ *driver_mode_mingdal["a"][driver] ): # Test if driver really can't append for gdal < driver_mode_mingdal - monkeypatch.delitem(fiona.drvsupport.driver_mode_mingdal["a"], driver) with pytest.raises(Exception): @@ -233,20 +214,16 @@ assert len(list(collection)) == len(records1) + len(records2) +# If this test fails, it should be considered to enable write support +# for the respective driver in drvsupport.py. @pytest.mark.parametrize( "driver", [driver for driver, raw in supported_drivers.items() if raw == "r"] ) +@pytest.mark.gdal def test_no_write_driver_cannot_write(tmpdir, driver, testdata_generator, monkeypatch): - """Test if read only driver cannot write - - If this test fails, it should be considered to enable write support for the respective driver in drvsupport.py. - - """ - + """Test if read only driver cannot write.""" monkeypatch.setitem(fiona.drvsupport.supported_drivers, driver, "rw") - schema, crs, records1, _, test_equal, create_kwargs = testdata_generator( - driver, range(0, 5), [] - ) + schema, crs, records1, _, _ = testdata_generator(driver, range(0, 5), []) if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): pytest.skip("BNA driver segfaults with gdal 1.11") @@ -257,12 +234,12 @@ path = str(tmpdir.join(get_temp_filename(driver))) with pytest.raises(Exception): - with fiona.open( - path, "w", driver=driver, crs=crs, schema=schema, **create_kwargs - ) as c: + with fiona.open(path, "w", driver=driver, crs=crs, schema=schema) as c: c.writerecords(records1) +# If this test fails, it should be considered to enable append support +# for the respective driver in drvsupport.py. @pytest.mark.parametrize( "driver", [ @@ -271,23 +248,18 @@ if "w" in raw and "a" not in raw ], ) +@pytest.mark.gdal def test_no_append_driver_cannot_append( tmpdir, driver, testdata_generator, monkeypatch ): - """ - Test if a driver that supports write and not append cannot also append - - If this test fails, it should be considered to enable append support for the respective driver in drvsupport.py. - - """ - + """Test if a driver that supports write and not append cannot also append.""" monkeypatch.setitem(fiona.drvsupport.supported_drivers, driver, "raw") - if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): - pytest.skip("BNA driver segfaults with gdal 1.11") + if driver == "FlatGeobuf" and get_gdal_version_num() == calc_gdal_version_num(3, 5, 0): + pytest.skip("FlatGeobuf driver segfaults with gdal 3.5.0") path = str(tmpdir.join(get_temp_filename(driver))) - schema, crs, records1, records2, test_equal, create_kwargs = testdata_generator( + schema, crs, records1, records2, _ = testdata_generator( driver, range(0, 5), range(5, 10) ) @@ -300,16 +272,13 @@ return # Create test file to append to - with fiona.open( - path, "w", driver=driver, crs=crs, schema=schema, **create_kwargs - ) as c: - + with fiona.open(path, "w", driver=driver, crs=crs, schema=schema) as c: c.writerecords(records1) try: with fiona.open(path, "a", driver=driver) as c: c.writerecords(records2) - except Exception as exc: + except Exception: log.exception("Caught exception in trying to append.") return @@ -330,3 +299,34 @@ # we cannot test drivers that are not present in the gdal installation if driver in supported_drivers: assert mode in supported_drivers[driver] + + +def test_allow_unsupported_drivers(monkeypatch, tmpdir): + """Test if allow unsupported drivers works as expected""" + + # We delete a known working driver from fiona.drvsupport so that we can use it + monkeypatch.delitem(fiona.drvsupport.supported_drivers, "GPKG") + + schema = {"geometry": "Polygon", "properties": {}} + + # Test that indeed we can't create a file without allow_unsupported_drivers + path1 = str(tmpdir.join("test1.gpkg")) + with pytest.raises(DriverError): + with fiona.open(path1, mode="w", driver="GPKG", schema=schema): + pass + + # Test that we can create file with allow_unsupported_drivers=True + path2 = str(tmpdir.join("test2.gpkg")) + try: + with fiona.open( + path2, + mode="w", + driver="GPKG", + schema=schema, + allow_unsupported_drivers=True, + ): + pass + except Exception as e: + assert ( + False + ), f"Using allow_unsupported_drivers=True should not raise an exception: {e}" diff -Nru fiona-1.8.22/tests/test_encoding.py fiona-1.9.5/tests/test_encoding.py --- fiona-1.8.22/tests/test_encoding.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_encoding.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,4 +1,3 @@ -# coding=utf-8 """Encoding tests""" from glob import glob @@ -32,7 +31,7 @@ with fiona.open(str(gre_shp_cp1252)) as src: assert src.session._get_internal_encoding() == 'utf-8' feat = next(iter(src)) - assert feat['properties']['name_ru'] != u'Гренада' + assert feat['properties']['name_ru'] != 'Гренада' @requires_gdal2 @@ -42,7 +41,7 @@ with fiona.open(str(gre_shp_cp1252)) as src: assert src.session._get_internal_encoding() == 'utf-8' feat = next(iter(src)) - assert feat['properties']['name_ru'] == u'Гренада' + assert feat['properties']['name_ru'] == 'Гренада' @requires_gdal2 @@ -50,4 +49,4 @@ """utf-8 override succeeds""" with fiona.open(str(gre_shp_cp1252), encoding='utf-8') as src: assert src.session._get_internal_encoding() == 'utf-8' - assert next(iter(src))['properties']['name_ru'] == u'Гренада' + assert next(iter(src))['properties']['name_ru'] == 'Гренада' diff -Nru fiona-1.8.22/tests/test_env.py fiona-1.9.5/tests/test_env.py --- fiona-1.8.22/tests/test_env.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_env.py 2023-10-11 23:19:44.000000000 +0000 @@ -2,16 +2,15 @@ import os import sys -try: - from unittest import mock -except ImportError: - import mock +from unittest import mock +import boto3 import pytest import fiona from fiona import _env -from fiona.env import getenv, ensure_env, ensure_env_with_credentials +from fiona.env import getenv, hasenv, ensure_env, ensure_env_with_credentials +from fiona.errors import FionaDeprecationWarning from fiona.session import AWSSession, GSSession @@ -22,76 +21,93 @@ def fake_opener(path): return fiona.env.getenv() - with fiona.env.Env(session=AWSSession(aws_access_key_id='foo', aws_secret_access_key='bar')): - assert fiona.env.getenv()['AWS_ACCESS_KEY_ID'] == 'foo' - assert fiona.env.getenv()['AWS_SECRET_ACCESS_KEY'] == 'bar' - - monkeypatch.setenv('AWS_ACCESS_KEY_ID', 'lol') - monkeypatch.setenv('AWS_SECRET_ACCESS_KEY', 'wut') - gdalenv = fake_opener('s3://foo/bar') - assert gdalenv['AWS_ACCESS_KEY_ID'] == 'foo' - assert gdalenv['AWS_SECRET_ACCESS_KEY'] == 'bar' + with fiona.env.Env( + session=AWSSession(aws_access_key_id="foo", aws_secret_access_key="bar") + ): + assert fiona.env.getenv()["AWS_ACCESS_KEY_ID"] == "foo" + assert fiona.env.getenv()["AWS_SECRET_ACCESS_KEY"] == "bar" + + monkeypatch.setenv("AWS_ACCESS_KEY_ID", "lol") + monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "wut") + gdalenv = fake_opener("s3://foo/bar") + assert gdalenv["AWS_ACCESS_KEY_ID"] == "foo" + assert gdalenv["AWS_SECRET_ACCESS_KEY"] == "bar" def test_ensure_env_decorator(gdalenv): @ensure_env def f(): - return getenv()['FIONA_ENV'] + return getenv()["FIONA_ENV"] + assert f() is True def test_ensure_env_decorator_sets_gdal_data(gdalenv, monkeypatch): """fiona.env.ensure_env finds GDAL from environment""" + @ensure_env def f(): - return getenv()['GDAL_DATA'] + return getenv()["GDAL_DATA"] - monkeypatch.setenv('GDAL_DATA', '/lol/wut') - assert f() == '/lol/wut' + monkeypatch.setenv("GDAL_DATA", "/lol/wut") + assert f() == "/lol/wut" @mock.patch("fiona._env.GDALDataFinder.find_file") -def test_ensure_env_decorator_sets_gdal_data_prefix(find_file, gdalenv, monkeypatch, tmpdir): +def test_ensure_env_decorator_sets_gdal_data_prefix( + find_file, gdalenv, monkeypatch, tmpdir +): """fiona.env.ensure_env finds GDAL data under a prefix""" + @ensure_env def f(): - return getenv()['GDAL_DATA'] + return getenv()["GDAL_DATA"] find_file.return_value = None tmpdir.ensure("share/gdal/header.dxf") - monkeypatch.delenv('GDAL_DATA', raising=False) - monkeypatch.setattr(_env, '__file__', str(tmpdir.join("fake.py"))) - monkeypatch.setattr(sys, 'prefix', str(tmpdir)) + monkeypatch.delenv("GDAL_DATA", raising=False) + monkeypatch.setattr(_env, "__file__", str(tmpdir.join("fake.py"))) + monkeypatch.setattr(sys, "prefix", str(tmpdir)) assert f() == str(tmpdir.join("share").join("gdal")) @mock.patch("fiona._env.GDALDataFinder.find_file") -def test_ensure_env_decorator_sets_gdal_data_wheel(find_file, gdalenv, monkeypatch, tmpdir): +def test_ensure_env_decorator_sets_gdal_data_wheel( + find_file, gdalenv, monkeypatch, tmpdir +): """fiona.env.ensure_env finds GDAL data in a wheel""" + @ensure_env def f(): - return getenv()['GDAL_DATA'] + return getenv()["GDAL_DATA"] find_file.return_value = None tmpdir.ensure("gdal_data/header.dxf") - monkeypatch.delenv('GDAL_DATA', raising=False) - monkeypatch.setattr(_env, '__file__', str(tmpdir.join(os.path.basename(_env.__file__)))) + monkeypatch.delenv("GDAL_DATA", raising=False) + monkeypatch.setattr( + _env, "__file__", str(tmpdir.join(os.path.basename(_env.__file__))) + ) assert f() == str(tmpdir.join("gdal_data")) @mock.patch("fiona._env.GDALDataFinder.find_file") -def test_ensure_env_with_decorator_sets_gdal_data_wheel(find_file, gdalenv, monkeypatch, tmpdir): +def test_ensure_env_with_decorator_sets_gdal_data_wheel( + find_file, gdalenv, monkeypatch, tmpdir +): """fiona.env.ensure_env finds GDAL data in a wheel""" + @ensure_env_with_credentials def f(*args): - return getenv()['GDAL_DATA'] + return getenv()["GDAL_DATA"] find_file.return_value = None tmpdir.ensure("gdal_data/header.dxf") - monkeypatch.delenv('GDAL_DATA', raising=False) - monkeypatch.setattr(_env, '__file__', str(tmpdir.join(os.path.basename(_env.__file__)))) + monkeypatch.delenv("GDAL_DATA", raising=False) + monkeypatch.setattr( + _env, "__file__", str(tmpdir.join(os.path.basename(_env.__file__))) + ) assert f("foo") == str(tmpdir.join("gdal_data")) @@ -101,6 +117,11 @@ assert fiona.open(path_coutwildrnp_shp).crs +def test_env_default_env(path_coutwildrnp_shp): + with fiona.open(path_coutwildrnp_shp): + assert hasenv() + + def test_nested_gs_credentials(monkeypatch): """Check that rasterio.open() doesn't wipe out surrounding credentials""" @@ -108,8 +129,33 @@ def fake_opener(path): return fiona.env.getenv() - with fiona.env.Env(session=GSSession(google_application_credentials='foo')): - assert fiona.env.getenv()['GOOGLE_APPLICATION_CREDENTIALS'] == 'foo' + with fiona.env.Env(session=GSSession(google_application_credentials="foo")): + assert fiona.env.getenv()["GOOGLE_APPLICATION_CREDENTIALS"] == "foo" + + gdalenv = fake_opener("gs://foo/bar") + assert gdalenv["GOOGLE_APPLICATION_CREDENTIALS"] == "foo" + - gdalenv = fake_opener('gs://foo/bar') - assert gdalenv['GOOGLE_APPLICATION_CREDENTIALS'] == 'foo' +def test_aws_session(gdalenv): + """Create an Env with a boto3 session.""" + aws_session = boto3.Session( + aws_access_key_id="id", + aws_secret_access_key="key", + aws_session_token="token", + region_name="null-island-1", + ) + with pytest.warns(FionaDeprecationWarning): + with fiona.env.Env(session=aws_session) as s: + assert ( + s.session._session.get_credentials().get_frozen_credentials().access_key + == "id" + ) + assert ( + s.session._session.get_credentials().get_frozen_credentials().secret_key + == "key" + ) + assert ( + s.session._session.get_credentials().get_frozen_credentials().token + == "token" + ) + assert s.session._session.region_name == "null-island-1" diff -Nru fiona-1.8.22/tests/test_feature.py fiona-1.9.5/tests/test_feature.py --- fiona-1.8.22/tests/test_feature.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_feature.py 2023-10-11 23:19:44.000000000 +0000 @@ -5,141 +5,171 @@ import shutil import sys import tempfile +import unittest + import pytest import fiona from fiona import collection from fiona.collection import Collection +from fiona.model import Feature from fiona.ogrext import featureRT -class TestPointRoundTrip(object): - - def setup(self): +class TestPointRoundTrip(unittest.TestCase): + def setUp(self): self.tempdir = tempfile.mkdtemp() - schema = {'geometry': 'Point', 'properties': {'title': 'str'}} - self.c = Collection(os.path.join(self.tempdir, "foo.shp"), - "w", driver="ESRI Shapefile", schema=schema) + schema = {"geometry": "Point", "properties": {"title": "str"}} + self.c = Collection( + os.path.join(self.tempdir, "foo.shp"), + "w", + driver="ESRI Shapefile", + schema=schema, + ) - def teardown(self): + def tearDdown(self): self.c.close() shutil.rmtree(self.tempdir) def test_geometry(self): - f = { 'id': '1', - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, - 'properties': {'title': u'foo'} } - g = featureRT(f, self.c) - assert ( - sorted(g['geometry'].items()) == - [('coordinates', (0.0, 0.0)), ('type', 'Point')]) + f = { + "id": "1", + "geometry": {"type": "Point", "coordinates": (0.0, 0.0)}, + "properties": {"title": "foo"}, + } + g = featureRT(f, self.c) + assert g.geometry.type == "Point" + assert g.geometry.coordinates == (0.0, 0.0) def test_properties(self): - f = { 'id': '1', - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, - 'properties': {'title': u'foo'} } + f = Feature.from_dict( + **{ + "id": "1", + "geometry": {"type": "Point", "coordinates": (0.0, 0.0)}, + "properties": {"title": "foo"}, + } + ) g = featureRT(f, self.c) - assert g['properties']['title'] == 'foo' + assert g.properties["title"] == "foo" def test_none_property(self): - f = { 'id': '1', - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, - 'properties': {'title': None} } + f = Feature.from_dict( + **{ + "id": "1", + "geometry": {"type": "Point", "coordinates": (0.0, 0.0)}, + "properties": {"title": None}, + } + ) g = featureRT(f, self.c) - assert g['properties']['title'] is None + assert g.properties["title"] is None -class TestLineStringRoundTrip(object): - - def setup(self): +class TestLineStringRoundTrip(unittest.TestCase): + def setUp(self): self.tempdir = tempfile.mkdtemp() - schema = {'geometry': 'LineString', 'properties': {'title': 'str'}} - self.c = Collection(os.path.join(self.tempdir, "foo.shp"), - "w", "ESRI Shapefile", schema=schema) + schema = {"geometry": "LineString", "properties": {"title": "str"}} + self.c = Collection( + os.path.join(self.tempdir, "foo.shp"), "w", "ESRI Shapefile", schema=schema + ) - def teardown(self): + def tearDown(self): self.c.close() shutil.rmtree(self.tempdir) def test_geometry(self): - f = { 'id': '1', - 'geometry': { 'type': 'LineString', - 'coordinates': [(0.0, 0.0), (1.0, 1.0)] }, - 'properties': {'title': u'foo'} } - g = featureRT(f, self.c) - assert ( - sorted(g['geometry'].items()) == - [('coordinates', [(0.0, 0.0), (1.0, 1.0)]), - ('type', 'LineString')]) + f = Feature.from_dict( + **{ + "id": "1", + "geometry": { + "type": "LineString", + "coordinates": [(0.0, 0.0), (1.0, 1.0)], + }, + "properties": {"title": "foo"}, + } + ) + g = featureRT(f, self.c) + assert g.geometry.type == "LineString" + assert g.geometry.coordinates == [(0.0, 0.0), (1.0, 1.0)] def test_properties(self): - f = { 'id': '1', - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)}, - 'properties': {'title': u'foo'} } + f = Feature.from_dict( + **{ + "id": "1", + "geometry": {"type": "Point", "coordinates": (0.0, 0.0)}, + "properties": {"title": "foo"}, + } + ) g = featureRT(f, self.c) - assert g['properties']['title'] == 'foo' + assert g.properties["title"] == "foo" -class TestPolygonRoundTrip(object): - - def setup(self): +class TestPolygonRoundTrip(unittest.TestCase): + def setUp(self): self.tempdir = tempfile.mkdtemp() - schema = {'geometry': 'Polygon', 'properties': {'title': 'str'}} - self.c = Collection(os.path.join(self.tempdir, "foo.shp"), - "w", "ESRI Shapefile", schema=schema) + schema = {"geometry": "Polygon", "properties": {"title": "str"}} + self.c = Collection( + os.path.join(self.tempdir, "foo.shp"), "w", "ESRI Shapefile", schema=schema + ) - def teardown(self): + def tearDown(self): self.c.close() shutil.rmtree(self.tempdir) def test_geometry(self): - f = { 'id': '1', - 'geometry': { 'type': 'Polygon', - 'coordinates': - [[(0.0, 0.0), - (0.0, 1.0), - (1.0, 1.0), - (1.0, 0.0), - (0.0, 0.0)]] }, - 'properties': {'title': u'foo'} } - g = featureRT(f, self.c) - assert ( - sorted(g['geometry'].items()) == - [('coordinates', [[(0.0, 0.0), - (0.0, 1.0), - (1.0, 1.0), - (1.0, 0.0), - (0.0, 0.0)]]), - ('type', 'Polygon')]) + f = Feature.from_dict( + **{ + "id": "1", + "geometry": { + "type": "Polygon", + "coordinates": [ + [(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)] + ], + }, + "properties": {"title": "foo"}, + } + ) + g = featureRT(f, self.c) + assert g.geometry.type == "Polygon" + assert g.geometry.coordinates == [ + [(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)] + ] def test_properties(self): - f = { 'id': '1', - 'geometry': { 'type': 'Polygon', - 'coordinates': - [[(0.0, 0.0), - (0.0, 1.0), - (1.0, 1.0), - (1.0, 0.0), - (0.0, 0.0)]] }, - 'properties': {'title': u'foo'} } - g = featureRT(f, self.c) - assert g['properties']['title'] == 'foo' - - -@pytest.mark.parametrize("driver, extension", [("ESRI Shapefile", "shp"), ("GeoJSON", "geojson")]) + f = Feature.from_dict( + **{ + "id": "1", + "geometry": { + "type": "Polygon", + "coordinates": [ + [(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)] + ], + }, + "properties": {"title": "foo"}, + } + ) + g = featureRT(f, self.c) + assert g.properties["title"] == "foo" + + +@pytest.mark.parametrize( + "driver, extension", [("ESRI Shapefile", "shp"), ("GeoJSON", "geojson")] +) def test_feature_null_field(tmpdir, driver, extension): """ In GDAL 2.2 the behaviour of OGR_F_IsFieldSet slightly changed. Some drivers (e.g. GeoJSON) also require fields to be explicitly set to null. See GH #460. """ - meta = {"driver": driver, "schema": {"geometry": "Point", "properties": {"RETURN_P": "str"}}} - filename = os.path.join(str(tmpdir), "test_null."+extension) + meta = { + "driver": driver, + "schema": {"geometry": "Point", "properties": {"RETURN_P": "str"}}, + } + filename = os.path.join(str(tmpdir), "test_null." + extension) with fiona.open(filename, "w", **meta) as dst: g = {"coordinates": [1.0, 2.0], "type": "Point"} - feature = {"geometry": g, "properties": {"RETURN_P": None}} + feature = Feature.from_dict(**{"geometry": g, "properties": {"RETURN_P": None}}) dst.write(feature) with fiona.open(filename, "r") as src: feature = next(iter(src)) - assert(feature["properties"]["RETURN_P"] is None) + assert feature.properties["RETURN_P"] is None diff -Nru fiona-1.8.22/tests/test_fio_bounds.py fiona-1.9.5/tests/test_fio_bounds.py --- fiona-1.8.22/tests/test_fio_bounds.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_fio_bounds.py 2023-10-11 23:19:44.000000000 +0000 @@ -76,6 +76,6 @@ def test_explode_output_rs(feature_collection, runner): result = runner.invoke(main_group, ['bounds', '--explode', '--rs'], feature_collection) assert result.exit_code == 0 - assert result.output.count(u'\u001e') == 2 + assert result.output.count('\x1e') == 2 assert result.output.count('[') == result.output.count(']') == 2 assert len(re.findall(r'\d*\.\d*', result.output)) == 8 diff -Nru fiona-1.8.22/tests/test_fio_calc.py fiona-1.9.5/tests/test_fio_calc.py --- fiona-1.8.22/tests/test_fio_calc.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_fio_calc.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,7 +1,6 @@ """Tests for `$ fio calc`.""" -from __future__ import division import json from click.testing import CliRunner @@ -54,9 +53,10 @@ def test_existing_property(feature_seq, runner): - result = runner.invoke(main_group, ['calc', "AREA", "f.properties.AREA * 2"], - feature_seq) - assert result.exit_code == 1 + result = runner.invoke( + main_group, ["calc", "AREA", "f.properties.AREA * 2"], feature_seq + ) + assert result.exit_code == 2 result = runner.invoke(main_group, ['calc', "--overwrite", "AREA", "f.properties.AREA * 2"], feature_seq) diff -Nru fiona-1.8.22/tests/test_fio_cat.py fiona-1.9.5/tests/test_fio_cat.py --- fiona-1.8.22/tests/test_fio_cat.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_fio_cat.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,12 +1,9 @@ """Tests for `$ fio cat`.""" -import os -import pytest from click.testing import CliRunner from fiona.fio.main import main_group -from fiona.fio import cat def test_one(path_coutwildrnp_shp): @@ -63,6 +60,55 @@ assert result.output.count('"Feature"') == 19 +def test_bbox_where(path_coutwildrnp_shp): + runner = CliRunner() + result = runner.invoke( + main_group, + ['cat', path_coutwildrnp_shp, '--bbox', '-120,40,-100,50', + '--where', "NAME LIKE 'Mount%'"], + catch_exceptions=False) + assert result.exit_code == 0 + assert result.output.count('"Feature"') == 4 + + +def test_where_no(path_coutwildrnp_shp): + runner = CliRunner() + result = runner.invoke( + main_group, + ['cat', path_coutwildrnp_shp, '--where', "STATE LIKE '%foo%'"], + catch_exceptions=False) + assert result.exit_code == 0 + assert result.output == "" + + +def test_where_yes(path_coutwildrnp_shp): + runner = CliRunner() + result = runner.invoke( + main_group, + ['cat', path_coutwildrnp_shp, '--where', "NAME LIKE 'Mount%'"], + catch_exceptions=False) + assert result.exit_code == 0 + assert result.output.count('"Feature"') == 9 + + +def test_where_yes_two_files(path_coutwildrnp_shp): + runner = CliRunner() + result = runner.invoke( + main_group, + ['cat', path_coutwildrnp_shp, path_coutwildrnp_shp, + '--where', "NAME LIKE 'Mount%'"], + catch_exceptions=False) + assert result.exit_code == 0 + assert result.output.count('"Feature"') == 18 + + +def test_where_fail(data_dir): + runner = CliRunner() + result = runner.invoke(main_group, ['cat', '--where', "NAME=3", + data_dir]) + assert result.exit_code != 0 + + def test_multi_layer(data_dir): layerdef = "1:coutwildrnp,1:coutwildrnp" runner = CliRunner() @@ -81,7 +127,7 @@ def test_vfs(path_coutwildrnp_zip): runner = CliRunner() result = runner.invoke(main_group, [ - 'cat', 'zip://{}'.format(path_coutwildrnp_zip)]) + 'cat', f'zip://{path_coutwildrnp_zip}']) assert result.exit_code == 0 assert result.output.count('"Feature"') == 67 diff -Nru fiona-1.8.22/tests/test_fio_dump.py fiona-1.9.5/tests/test_fio_dump.py --- fiona-1.8.22/tests/test_fio_dump.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_fio_dump.py 2023-10-11 23:19:44.000000000 +0000 @@ -4,6 +4,7 @@ import json from click.testing import CliRunner +import pytest import fiona from fiona.fio.main import main_group @@ -16,17 +17,17 @@ assert '"FeatureCollection"' in result.output -def test_dump_layer(path_gpx): - for layer in ('routes', '1'): - runner = CliRunner() - result = runner.invoke(main_group, ['dump', path_gpx, '--layer', layer]) - assert result.exit_code == 0 - assert '"FeatureCollection"' in result.output +@pytest.mark.parametrize("layer", ["routes", "1", "tracks", "track_points"]) +def test_dump_layer(path_gpx, layer): + runner = CliRunner() + result = runner.invoke(main_group, ["dump", path_gpx, "--layer", layer]) + assert result.exit_code == 0 + assert '"FeatureCollection"' in result.output def test_dump_layer_vfs(path_coutwildrnp_zip): - path = 'zip://{}'.format(path_coutwildrnp_zip) - result = CliRunner().invoke(main_group, ['dump', path]) + path = f"zip://{path_coutwildrnp_zip}" + result = CliRunner().invoke(main_group, ["dump", path]) assert result.exit_code == 0 loaded = json.loads(result.output) with fiona.open(path) as src: diff -Nru fiona-1.8.22/tests/test_fio_filter.py fiona-1.9.5/tests/test_fio_filter.py --- fiona-1.8.22/tests/test_fio_filter.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_fio_filter.py 2023-10-11 23:19:44.000000000 +0000 @@ -13,7 +13,7 @@ def test_seq(feature_seq, runner): result = runner.invoke(main_group, ['filter', - "f.properties.AREA > 0.01"], feature_seq) + "f.properties.AREA > 0.01"], feature_seq, catch_exceptions=False) assert result.exit_code == 0 assert result.output.count('Feature') == 2 diff -Nru fiona-1.8.22/tests/test_fio_info.py fiona-1.9.5/tests/test_fio_info.py --- fiona-1.8.22/tests/test_fio_info.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_fio_info.py 2023-10-11 23:19:44.000000000 +0000 @@ -80,7 +80,7 @@ def test_info_vfs(path_coutwildrnp_zip, path_coutwildrnp_shp): runner = CliRunner() zip_result = runner.invoke(main_group, [ - 'info', 'zip://{}'.format(path_coutwildrnp_zip)]) + 'info', f'zip://{path_coutwildrnp_zip}']) shp_result = runner.invoke(main_group, [ 'info', path_coutwildrnp_shp]) assert zip_result.exit_code == shp_result.exit_code == 0 diff -Nru fiona-1.8.22/tests/test_fio_load.py fiona-1.9.5/tests/test_fio_load.py --- fiona-1.8.22/tests/test_fio_load.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_fio_load.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,6 +1,6 @@ """Tests for `$ fio load`.""" - +from functools import partial import json import os import shutil @@ -9,6 +9,7 @@ import fiona from fiona.fio.main import main_group +from fiona.model import ObjectEncoder def test_err(runner): @@ -104,7 +105,7 @@ 'coordinates': (5.0, 39.0) } } - sequence = os.linesep.join(map(json.dumps, [feature, feature])) + sequence = os.linesep.join(map(partial(json.dumps, cls=ObjectEncoder), [feature, feature])) result = runner.invoke(main_group, [ 'load', outdir, @@ -122,7 +123,6 @@ finally: shutil.rmtree(outdir) - @pytest.mark.iconv def test_creation_options(tmpdir, runner, feature_seq): tmpfile = str(tmpdir.mkdir("tests").join("test.shp")) @@ -133,3 +133,79 @@ ) assert result.exit_code == 0 assert tmpdir.join("tests/test.cpg").read() == "LATIN1" + + +@pytest.mark.parametrize("extension, driver", [ + ("shp", "ESRI Shapefile"), + ("geojson", "GeoJSON"), + ("json", "GeoJSON"), + ("gpkg", "GPKG"), + ("SHP", "ESRI Shapefile"), +]) +def test_load__auto_detect_format(tmpdir, runner, feature_seq, extension, driver): + tmpfile = str(tmpdir.mkdir('tests').join(f'test_src_vs_dst_crs.{extension}')) + result = runner.invoke(main_group, [ + 'load', + '--src-crs', + 'EPSG:32617', + tmpfile + ], feature_seq) + assert result.exit_code == 0 + with fiona.open(tmpfile.lower()) as src: + assert src.crs == {'init': 'epsg:32617'} + assert len(src) == len(feature_seq.splitlines()) + assert src.driver == driver + + +def test_fio_load_layer_append(tmpdir, runner): + """Checking append mode.""" + outdir = str(tmpdir.mkdir("tests").mkdir("test_fio_load_layer")) + try: + feature = { + "type": "Feature", + "properties": {"key": "value"}, + "geometry": {"type": "Point", "coordinates": (5.0, 39.0)}, + } + sequence = os.linesep.join( + map(partial(json.dumps, cls=ObjectEncoder), [feature, feature]) + ) + + # Write mode to create layer. + result = runner.invoke( + main_group, + [ + "load", + outdir, + "--driver", + "ESRI Shapefile", + "--src-crs", + "EPSG:4236", + "--layer", + "test_layer", + ], + input=sequence, + ) + assert result.exit_code == 0 + + # Here's the append. + result = runner.invoke( + main_group, + [ + "load", + outdir, + "--driver=ESRI Shapefile", + "--src-crs=EPSG:4236", + "--layer=test_layer", + "--append", + ], + input=sequence, + ) + assert result.exit_code == 0 + + with fiona.open(outdir) as src: + assert len(src) == 4 + assert src.name == "test_layer" + assert src.schema["geometry"] == "Point" + + finally: + shutil.rmtree(outdir) diff -Nru fiona-1.8.22/tests/test_fio_ls.py fiona-1.9.5/tests/test_fio_ls.py --- fiona-1.8.22/tests/test_fio_ls.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_fio_ls.py 2023-10-11 23:19:44.000000000 +0000 @@ -53,7 +53,7 @@ def test_fio_ls_vfs(path_coutwildrnp_zip): runner = CliRunner() result = runner.invoke(main_group, [ - 'ls', 'zip://{}'.format(path_coutwildrnp_zip)]) + 'ls', f'zip://{path_coutwildrnp_zip}']) assert result.exit_code == 0 loaded = json.loads(result.output) assert len(loaded) == 1 diff -Nru fiona-1.8.22/tests/test_fio_rm.py fiona-1.9.5/tests/test_fio_rm.py --- fiona-1.8.22/tests/test_fio_rm.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_fio_rm.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,47 +1,49 @@ import os + import pytest -import fiona from click.testing import CliRunner + +import fiona +from fiona.model import Feature from fiona.fio.main import main_group + def create_sample_data(filename, driver, **extra_meta): - meta = { - 'driver': driver, - 'schema': { - 'geometry': 'Point', - 'properties': {} - } - } + meta = {"driver": driver, "schema": {"geometry": "Point", "properties": {}}} meta.update(extra_meta) - with fiona.open(filename, 'w', **meta) as dst: - dst.write({ - 'geometry': { - 'type': 'Point', - 'coordinates': (0, 0), - }, - 'properties': {}, - }) - assert(os.path.exists(filename)) + with fiona.open(filename, "w", **meta) as dst: + dst.write( + Feature.from_dict( + **{ + "geometry": { + "type": "Point", + "coordinates": (0, 0), + }, + "properties": {}, + } + ) + ) + assert os.path.exists(filename) + drivers = ["ESRI Shapefile", "GeoJSON"] + + @pytest.mark.parametrize("driver", drivers) def test_remove(tmpdir, driver): extension = {"ESRI Shapefile": "shp", "GeoJSON": "json"}[driver] - filename = "delete_me.{extension}".format(extension=extension) + filename = f"delete_me.{extension}" filename = str(tmpdir.join(filename)) create_sample_data(filename, driver) - - result = CliRunner().invoke(main_group, [ - "rm", - filename, - "--yes" - ]) - print(result.output) + + result = CliRunner().invoke(main_group, ["rm", filename, "--yes"]) assert result.exit_code == 0 assert not os.path.exists(filename) has_gpkg = "GPKG" in fiona.supported_drivers.keys() + + @pytest.mark.skipif(not has_gpkg, reason="Requires GPKG driver") def test_remove_layer(tmpdir): filename = str(tmpdir.join("a_filename.gpkg")) @@ -49,13 +51,9 @@ create_sample_data(filename, "GPKG", layer="layer2") assert fiona.listlayers(filename) == ["layer1", "layer2"] - result = CliRunner().invoke(main_group, [ - "rm", - filename, - "--layer", "layer2", - "--yes" - ]) - print(result.output) + result = CliRunner().invoke( + main_group, ["rm", filename, "--layer", "layer2", "--yes"] + ) assert result.exit_code == 0 assert os.path.exists(filename) assert fiona.listlayers(filename) == ["layer1"] diff -Nru fiona-1.8.22/tests/test_geojson.py fiona-1.9.5/tests/test_geojson.py --- fiona-1.8.22/tests/test_geojson.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_geojson.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,56 +1,83 @@ +"""Tests of behavior specific to GeoJSON""" + +import json + import pytest import fiona from fiona.collection import supported_drivers from fiona.errors import FionaValueError, DriverError, SchemaError, CRSError +from fiona.model import Feature def test_json_read(path_coutwildrnp_json): - with fiona.open(path_coutwildrnp_json, 'r') as c: + with fiona.open(path_coutwildrnp_json, "r") as c: assert len(c) == 67 def test_json(tmpdir): """Write a simple GeoJSON file""" - path = str(tmpdir.join('foo.json')) - with fiona.open(path, 'w', - driver='GeoJSON', - schema={'geometry': 'Unknown', - 'properties': [('title', 'str')]}) as c: - c.writerecords([{ - 'geometry': {'type': 'Point', 'coordinates': [0.0, 0.0]}, - 'properties': {'title': 'One'}}]) - c.writerecords([{ - 'geometry': {'type': 'MultiPoint', 'coordinates': [[0.0, 0.0]]}, - 'properties': {'title': 'Two'}}]) + path = str(tmpdir.join("foo.json")) + with fiona.open( + path, + "w", + driver="GeoJSON", + schema={"geometry": "Unknown", "properties": [("title", "str")]}, + ) as c: + c.writerecords( + [ + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, + "properties": {"title": "One"}, + } + ) + ] + ) + c.writerecords( + [ + Feature.from_dict( + **{ + "geometry": {"type": "MultiPoint", "coordinates": [[0.0, 0.0]]}, + "properties": {"title": "Two"}, + } + ) + ] + ) with fiona.open(path) as c: - assert c.schema['geometry'] == 'Unknown' + assert c.schema["geometry"] == "Unknown" assert len(c) == 2 def test_json_overwrite(tmpdir): """Overwrite an existing GeoJSON file""" - path = str(tmpdir.join('foo.json')) + path = str(tmpdir.join("foo.json")) driver = "GeoJSON" schema1 = {"geometry": "Unknown", "properties": [("title", "str")]} schema2 = {"geometry": "Unknown", "properties": [("other", "str")]} features1 = [ - { - "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, - "properties": {"title": "One"}, - }, - { - "geometry": {"type": "MultiPoint", "coordinates": [[0.0, 0.0]]}, - "properties": {"title": "Two"}, - } + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, + "properties": {"title": "One"}, + } + ), + Feature.from_dict( + **{ + "geometry": {"type": "MultiPoint", "coordinates": [[0.0, 0.0]]}, + "properties": {"title": "Two"}, + } + ), ] features2 = [ - { - "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, - "properties": {"other": "Three"}, - }, + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, + "properties": {"other": "Three"}, + } + ), ] # write some data to a file @@ -61,7 +88,7 @@ with fiona.open(path, "r") as c: assert len(c) == 2 feature = next(iter(c)) - assert feature["properties"]["title"] == "One" + assert feature.properties["title"] == "One" # attempt to overwrite the existing file with some new data with fiona.open(path, "w", driver=driver, schema=schema2) as c: @@ -71,27 +98,31 @@ with fiona.open(path, "r") as c: assert len(c) == 1 feature = next(iter(c)) - assert feature["properties"]["other"] == "Three" + assert feature.properties["other"] == "Three" def test_json_overwrite_invalid(tmpdir): """Overwrite an existing file that isn't a valid GeoJSON""" # write some invalid data to a file - path = str(tmpdir.join('foo.json')) + path = str(tmpdir.join("foo.json")) with open(path, "w") as f: f.write("This isn't a valid GeoJSON file!!!") schema1 = {"geometry": "Unknown", "properties": [("title", "str")]} features1 = [ - { - "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, - "properties": {"title": "One"}, - }, - { - "geometry": {"type": "MultiPoint", "coordinates": [[0.0, 0.0]]}, - "properties": {"title": "Two"}, - } + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, + "properties": {"title": "One"}, + } + ), + Feature.from_dict( + **{ + "geometry": {"type": "MultiPoint", "coordinates": [[0.0, 0.0]]}, + "properties": {"title": "Two"}, + } + ), ] # attempt to overwrite it with a valid file @@ -105,7 +136,31 @@ def test_write_json_invalid_directory(tmpdir): """Attempt to create a file in a directory that doesn't exist""" - path = str(tmpdir.join('does-not-exist', 'foo.json')) + path = str(tmpdir.join("does-not-exist", "foo.json")) schema = {"geometry": "Unknown", "properties": [("title", "str")]} with pytest.raises(DriverError): fiona.open(path, "w", driver="GeoJSON", schema=schema) + + +def test_empty_array_property(tmp_path): + """Confirm fix for bug reported in gh-1227.""" + tmp_path.joinpath("test.geojson").write_text( + json.dumps( + { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": {"type": "Point", "coordinates": [12, 24]}, + "properties": {"array_prop": ["some_value"]}, + }, + { + "type": "Feature", + "geometry": {"type": "Point", "coordinates": [12, 24]}, + "properties": {"array_prop": []}, + }, + ], + } + ) + ) + list(fiona.open(tmp_path.joinpath("test.geojson"))) diff -Nru fiona-1.8.22/tests/test_geometry.py fiona-1.9.5/tests/test_geometry.py --- fiona-1.8.22/tests/test_geometry.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_geometry.py 2023-10-11 23:19:44.000000000 +0000 @@ -2,48 +2,60 @@ import pytest -from fiona._geometry import (GeomBuilder, geometryRT) +from fiona._geometry import GeomBuilder, geometryRT from fiona.errors import UnsupportedGeometryTypeError +from fiona.model import Geometry def geometry_wkb(wkb): try: wkb = bytes.fromhex(wkb) except AttributeError: - wkb = wkb.decode('hex') + wkb = wkb.decode("hex") return GeomBuilder().build_wkb(wkb) def test_ogr_builder_exceptions(): - geom = {'type': "Bogus", 'coordinates': None} - with pytest.raises(ValueError): + geom = Geometry.from_dict(**{"type": "Bogus", "coordinates": None}) + with pytest.raises(UnsupportedGeometryTypeError): geometryRT(geom) -@pytest.mark.parametrize('geom_type, coordinates', [ - ('Point', (0.0, 0.0)), - ('LineString', [(0.0, 0.0), (1.0, 1.0)]), - ('Polygon', - [[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]), - ('MultiPoint', [(0.0, 0.0), (1.0, 1.0)]), - ('MultiLineString', [[(0.0, 0.0), (1.0, 1.0)]]), - ('MultiPolygon', - [[[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]]), -]) +@pytest.mark.parametrize( + "geom_type, coordinates", + [ + ("Point", (0.0, 0.0)), + ("LineString", [(0.0, 0.0), (1.0, 1.0)]), + ("Polygon", [[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]), + ("MultiPoint", [(0.0, 0.0), (1.0, 1.0)]), + ("MultiLineString", [[(0.0, 0.0), (1.0, 1.0)]]), + ( + "MultiPolygon", + [[[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]], + ), + ], +) def test_round_tripping(geom_type, coordinates): - result = geometryRT({'type': geom_type, 'coordinates': coordinates}) - assert result['type'] == geom_type - assert result['coordinates'] == coordinates + result = geometryRT( + Geometry.from_dict(**{"type": geom_type, "coordinates": coordinates}) + ) + assert result.type == geom_type + assert result.coordinates == coordinates -@pytest.mark.parametrize('geom_type, coordinates', [ - ('Polygon', [[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)]]), - ('MultiPolygon', [[[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)]]]), -]) +@pytest.mark.parametrize( + "geom_type, coordinates", + [ + ("Polygon", [[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)]]), + ("MultiPolygon", [[[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)]]]), + ], +) def test_implicitly_closed_round_tripping(geom_type, coordinates): - result = geometryRT({'type': geom_type, 'coordinates': coordinates}) - assert result['type'] == geom_type - result_coordinates = result['coordinates'] + result = geometryRT( + Geometry.from_dict(**{"type": geom_type, "coordinates": coordinates}) + ) + assert result.type == geom_type + result_coordinates = result.coordinates while not isinstance(coordinates[0], tuple): result_coordinates = result_coordinates[0] coordinates = coordinates[0] @@ -52,44 +64,49 @@ def test_geometry_collection_round_trip(): geom = { - 'type': "GeometryCollection", - 'geometries': [ - {'type': "Point", 'coordinates': (0.0, 0.0)}, { - 'type': "LineString", - 'coordinates': [(0.0, 0.0), (1.0, 1.0)]}]} + "type": "GeometryCollection", + "geometries": [ + {"type": "Point", "coordinates": (0.0, 0.0)}, + {"type": "LineString", "coordinates": [(0.0, 0.0), (1.0, 1.0)]}, + ], + } result = geometryRT(geom) - assert len(result['geometries']) == 2 - assert [g['type'] for g in result['geometries']] == ['Point', 'LineString'] + assert len(result["geometries"]) == 2 + assert [g["type"] for g in result["geometries"]] == ["Point", "LineString"] def test_point_wkb(): # Hex-encoded Point (0 0) wkb = "010100000000000000000000000000000000000000" geom = geometry_wkb(wkb) - assert geom['type'] == "Point" - assert geom['coordinates'] == (0.0, 0.0) + assert geom["type"] == "Point" + assert geom["coordinates"] == (0.0, 0.0) def test_line_wkb(): # Hex-encoded LineString (0 0, 1 1) - wkb = ("01020000000200000000000000000000000000000000000000000000000000f03f" - "000000000000f03f") + wkb = ( + "01020000000200000000000000000000000000000000000000000000000000f03f" + "000000000000f03f" + ) geom = geometry_wkb(wkb) - assert geom['type'] == "LineString" - assert geom['coordinates'] == [(0.0, 0.0), (1.0, 1.0)] + assert geom["type"] == "LineString" + assert geom["coordinates"] == [(0.0, 0.0), (1.0, 1.0)] def test_polygon_wkb(): # 1 x 1 box (0, 0, 1, 1) - wkb = ("01030000000100000005000000000000000000f03f000000000000000000000000" - "0000f03f000000000000f03f0000000000000000000000000000f03f0000000000" - "0000000000000000000000000000000000f03f0000000000000000") - geom = geometry_wkb(wkb) - assert geom['type'], "Polygon" - assert len(geom['coordinates']) == 1 - assert len(geom['coordinates'][0]) == 5 - x, y = zip(*geom['coordinates'][0]) + wkb = ( + "01030000000100000005000000000000000000f03f000000000000000000000000" + "0000f03f000000000000f03f0000000000000000000000000000f03f0000000000" + "0000000000000000000000000000000000f03f0000000000000000" + ) + geom = geometry_wkb(wkb) + assert geom["type"], "Polygon" + assert len(geom["coordinates"]) == 1 + assert len(geom["coordinates"][0]) == 5 + x, y = zip(*geom["coordinates"][0]) assert min(x) == 0.0 assert min(y) == 0.0 assert max(x) == 1.0 @@ -97,36 +114,42 @@ def test_multipoint_wkb(): - wkb = ("010400000002000000010100000000000000000000000000000000000000010100" - "0000000000000000f03f000000000000f03f") + wkb = ( + "010400000002000000010100000000000000000000000000000000000000010100" + "0000000000000000f03f000000000000f03f" + ) geom = geometry_wkb(wkb) - assert geom['type'] == "MultiPoint" - assert geom['coordinates'] == [(0.0, 0.0), (1.0, 1.0)] + assert geom["type"] == "MultiPoint" + assert geom["coordinates"] == [(0.0, 0.0), (1.0, 1.0)] def test_multilinestring_wkb(): # Hex-encoded LineString (0 0, 1 1) - wkb = ("010500000001000000010200000002000000000000000000000000000000000000" - "00000000000000f03f000000000000f03f") - geom = geometry_wkb(wkb) - assert geom['type'] == "MultiLineString" - assert len(geom['coordinates']) == 1 - assert len(geom['coordinates'][0]) == 2 - assert geom['coordinates'][0] == [(0.0, 0.0), (1.0, 1.0)] + wkb = ( + "010500000001000000010200000002000000000000000000000000000000000000" + "00000000000000f03f000000000000f03f" + ) + geom = geometry_wkb(wkb) + assert geom["type"] == "MultiLineString" + assert len(geom["coordinates"]) == 1 + assert len(geom["coordinates"][0]) == 2 + assert geom["coordinates"][0] == [(0.0, 0.0), (1.0, 1.0)] def test_multipolygon_wkb(): # [1 x 1 box (0, 0, 1, 1)] - wkb = ("01060000000100000001030000000100000005000000000000000000f03f000000" - "0000000000000000000000f03f000000000000f03f000000000000000000000000" - "0000f03f00000000000000000000000000000000000000000000f03f0000000000" - "000000") - geom = geometry_wkb(wkb) - assert geom['type'] == "MultiPolygon" - assert len(geom['coordinates']) == 1 - assert len(geom['coordinates'][0]) == 1 - assert len(geom['coordinates'][0][0]) == 5 - x, y = zip(*geom['coordinates'][0][0]) + wkb = ( + "01060000000100000001030000000100000005000000000000000000f03f000000" + "0000000000000000000000f03f000000000000f03f000000000000000000000000" + "0000f03f00000000000000000000000000000000000000000000f03f0000000000" + "000000" + ) + geom = geometry_wkb(wkb) + assert geom["type"] == "MultiPolygon" + assert len(geom["coordinates"]) == 1 + assert len(geom["coordinates"][0]) == 1 + assert len(geom["coordinates"][0][0]) == 5 + x, y = zip(*geom["coordinates"][0][0]) assert min(x) == 0.0 assert min(y) == 0.0 assert max(x) == 1.0 diff -Nru fiona-1.8.22/tests/test_geopackage.py fiona-1.9.5/tests/test_geopackage.py --- fiona-1.8.22/tests/test_geopackage.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_geopackage.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,40 +1,49 @@ import os import pytest import fiona +from fiona.model import Feature + from .conftest import requires_gpkg example_schema = { - 'geometry': 'Point', - 'properties': [('title', 'str')], + "geometry": "Point", + "properties": [("title", "str")], } example_crs = { - 'a': 6370997, - 'lon_0': -100, - 'y_0': 0, - 'no_defs': True, - 'proj': 'laea', - 'x_0': 0, - 'units': 'm', - 'b': 6370997, - 'lat_0': 45, + "a": 6370997, + "lon_0": -100, + "y_0": 0, + "no_defs": True, + "proj": "laea", + "x_0": 0, + "units": "m", + "b": 6370997, + "lat_0": 45, } example_features = [ - { - "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, - "properties": {"title": "One"}, - }, - { - "geometry": {"type": "Point", "coordinates": [1.0, 2.0]}, - "properties": {"title": "Two"}, - }, - { - "geometry": {"type": "Point", "coordinates": [3.0, 4.0]}, - "properties": {"title": "Three"}, - }, + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, + "properties": {"title": "One"}, + } + ), + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": [1.0, 2.0]}, + "properties": {"title": "Two"}, + } + ), + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": [3.0, 4.0]}, + "properties": {"title": "Three"}, + } + ), ] + @requires_gpkg def test_read_gpkg(path_coutwildrnp_gpkg): """ @@ -44,49 +53,46 @@ with fiona.open(path_coutwildrnp_gpkg, "r") as src: assert len(src) == 67 feature = next(iter(src)) - assert feature["geometry"]["type"] == "Polygon" - assert feature["properties"]["NAME"] == "Mount Naomi Wilderness" + assert feature.geometry["type"] == "Polygon" + assert feature.properties["NAME"] == "Mount Naomi Wilderness" + @requires_gpkg def test_write_gpkg(tmpdir): - path = str(tmpdir.join('foo.gpkg')) + path = str(tmpdir.join("foo.gpkg")) - with fiona.open(path, 'w', - driver='GPKG', - schema=example_schema, - crs=example_crs) as dst: + with fiona.open( + path, "w", driver="GPKG", schema=example_schema, crs=example_crs + ) as dst: dst.writerecords(example_features) with fiona.open(path) as src: - assert src.schema['geometry'] == 'Point' + assert src.schema["geometry"] == "Point" assert len(src) == 3 + @requires_gpkg def test_write_multilayer_gpkg(tmpdir): """ Test that writing a second layer to an existing geopackage doesn't remove and existing layer for the dataset. """ - path = str(tmpdir.join('foo.gpkg')) + path = str(tmpdir.join("foo.gpkg")) - with fiona.open(path, 'w', - driver='GPKG', - schema=example_schema, - layer="layer1", - crs=example_crs) as dst: + with fiona.open( + path, "w", driver="GPKG", schema=example_schema, layer="layer1", crs=example_crs + ) as dst: dst.writerecords(example_features[0:2]) - with fiona.open(path, 'w', - driver='GPKG', - schema=example_schema, - layer="layer2", - crs=example_crs) as dst: + with fiona.open( + path, "w", driver="GPKG", schema=example_schema, layer="layer2", crs=example_crs + ) as dst: dst.writerecords(example_features[2:]) with fiona.open(path, layer="layer1") as src: - assert src.schema['geometry'] == 'Point' + assert src.schema["geometry"] == "Point" assert len(src) == 2 with fiona.open(path, layer="layer2") as src: - assert src.schema['geometry'] == 'Point' + assert src.schema["geometry"] == "Point" assert len(src) == 1 diff -Nru fiona-1.8.22/tests/test_http_session.py fiona-1.9.5/tests/test_http_session.py --- fiona-1.8.22/tests/test_http_session.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/tests/test_http_session.py 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,329 @@ +"""Tests of session module""" + +from unittest import mock + +import pytest + +from fiona.session import ( + DummySession, + AWSSession, + Session, + OSSSession, + GSSession, + SwiftSession, + AzureSession, +) + + +def test_base_session_hascreds_notimpl(): + """Session.hascreds must be overridden""" + assert Session.hascreds({}) is NotImplemented + + +def test_base_session_get_credential_options_notimpl(): + """Session.get_credential_options must be overridden""" + assert Session().get_credential_options() is NotImplemented + + +def test_dummy_session(): + """DummySession works""" + sesh = DummySession() + assert sesh._session is None + assert sesh.get_credential_options() == {} + + +def test_aws_session_class(): + """AWSSession works""" + sesh = AWSSession(aws_access_key_id="foo", aws_secret_access_key="bar") + assert sesh._session + assert sesh.get_credential_options()["AWS_ACCESS_KEY_ID"] == "foo" + assert sesh.get_credential_options()["AWS_SECRET_ACCESS_KEY"] == "bar" + + +def test_aws_session_class_session(): + """AWSSession works""" + boto3 = pytest.importorskip("boto3") + sesh = AWSSession( + session=boto3.session.Session( + aws_access_key_id="foo", aws_secret_access_key="bar" + ) + ) + assert sesh._session + assert sesh.get_credential_options()["AWS_ACCESS_KEY_ID"] == "foo" + assert sesh.get_credential_options()["AWS_SECRET_ACCESS_KEY"] == "bar" + + +def test_aws_session_class_unsigned(): + """AWSSession works""" + pytest.importorskip("boto3") + sesh = AWSSession(aws_unsigned=True, region_name="us-mountain-1") + assert sesh.get_credential_options()["AWS_NO_SIGN_REQUEST"] == "YES" + assert sesh.get_credential_options()["AWS_REGION"] == "us-mountain-1" + + +def test_aws_session_class_profile(tmpdir, monkeypatch): + """Confirm that profile_name kwarg works.""" + pytest.importorskip("boto3") + credentials_file = tmpdir.join("credentials") + credentials_file.write( + "[testing]\n" + "aws_access_key_id = foo\n" + "aws_secret_access_key = bar\n" + "aws_session_token = baz" + ) + monkeypatch.setenv("AWS_SHARED_CREDENTIALS_FILE", str(credentials_file)) + monkeypatch.setenv("AWS_SESSION_TOKEN", "ignore_me") + sesh = AWSSession(profile_name="testing") + assert sesh._session + assert sesh.get_credential_options()["AWS_ACCESS_KEY_ID"] == "foo" + assert sesh.get_credential_options()["AWS_SECRET_ACCESS_KEY"] == "bar" + assert sesh.get_credential_options()["AWS_SESSION_TOKEN"] == "baz" + monkeypatch.undo() + + +def test_aws_session_class_endpoint(): + """Confirm that endpoint_url kwarg works.""" + pytest.importorskip("boto3") + sesh = AWSSession(endpoint_url="example.com") + assert sesh.get_credential_options()["AWS_S3_ENDPOINT"] == "example.com" + + +def test_session_factory_unparsed(): + """Get a DummySession for unparsed paths""" + sesh = Session.from_path("/vsicurl/lolwut") + assert isinstance(sesh, DummySession) + + +def test_session_factory_empty(): + """Get a DummySession for no path""" + sesh = Session.from_path("") + assert isinstance(sesh, DummySession) + + +def test_session_factory_local(): + """Get a DummySession for local paths""" + sesh = Session.from_path("file:///lolwut") + assert isinstance(sesh, DummySession) + + +def test_session_factory_unknown(): + """Get a DummySession for unknown paths""" + sesh = Session.from_path("https://fancy-cloud.com/lolwut") + assert isinstance(sesh, DummySession) + + +def test_session_factory_s3(): + """Get an AWSSession for s3:// paths""" + pytest.importorskip("boto3") + sesh = Session.from_path("s3://lol/wut") + assert isinstance(sesh, AWSSession) + + +def test_session_factory_s3_presigned_url(): + """Get a DummySession for presigned URLs""" + sesh = Session.from_path("https://fancy-cloud.com/lolwut?X-Amz-Signature=foo") + assert isinstance(sesh, DummySession) + + +def test_session_factory_s3_no_boto3(monkeypatch): + """Get an AWSSession for s3:// paths""" + pytest.importorskip("boto3") + with monkeypatch.context() as mpctx: + mpctx.setattr("fiona.session.boto3", None) + sesh = Session.from_path("s3://lol/wut") + assert isinstance(sesh, DummySession) + + +def test_session_factory_s3_kwargs(): + """Get an AWSSession for s3:// paths with keywords""" + pytest.importorskip("boto3") + sesh = Session.from_path( + "s3://lol/wut", aws_access_key_id="foo", aws_secret_access_key="bar" + ) + assert isinstance(sesh, AWSSession) + assert sesh._session.get_credentials().access_key == "foo" + assert sesh._session.get_credentials().secret_key == "bar" + + +def test_foreign_session_factory_dummy(): + sesh = Session.from_foreign_session(None) + assert isinstance(sesh, DummySession) + + +def test_foreign_session_factory_s3(): + boto3 = pytest.importorskip("boto3") + aws_session = boto3.Session(aws_access_key_id="foo", aws_secret_access_key="bar") + sesh = Session.from_foreign_session(aws_session, cls=AWSSession) + assert isinstance(sesh, AWSSession) + assert sesh._session.get_credentials().access_key == "foo" + assert sesh._session.get_credentials().secret_key == "bar" + + +def test_requester_pays(): + """GDAL is configured with requester pays""" + sesh = AWSSession( + aws_access_key_id="foo", aws_secret_access_key="bar", requester_pays=True + ) + assert sesh._session + assert sesh.get_credential_options()["AWS_REQUEST_PAYER"] == "requester" + + +def test_oss_session_class(): + """OSSSession works""" + oss_session = OSSSession( + oss_access_key_id="foo", + oss_secret_access_key="bar", + oss_endpoint="null-island-1", + ) + assert oss_session._creds + assert oss_session.get_credential_options()["OSS_ACCESS_KEY_ID"] == "foo" + assert oss_session.get_credential_options()["OSS_SECRET_ACCESS_KEY"] == "bar" + + +def test_session_factory_oss_kwargs(): + """Get an OSSSession for oss:// paths with keywords""" + sesh = Session.from_path( + "oss://lol/wut", oss_access_key_id="foo", oss_secret_access_key="bar" + ) + assert isinstance(sesh, OSSSession) + assert sesh.get_credential_options()["OSS_ACCESS_KEY_ID"] == "foo" + assert sesh.get_credential_options()["OSS_SECRET_ACCESS_KEY"] == "bar" + + +def test_google_session_ctor_no_arg(): + session = GSSession() + assert not session._creds + + +def test_gs_session_class(): + """GSSession works""" + gs_session = GSSession(google_application_credentials="foo") + assert gs_session._creds + assert ( + gs_session.get_credential_options()["GOOGLE_APPLICATION_CREDENTIALS"] == "foo" + ) + assert gs_session.hascreds({"GOOGLE_APPLICATION_CREDENTIALS": "foo"}) + + +def test_swift_session_class(): + """SwiftSession works""" + swift_session = SwiftSession( + swift_storage_url="foo", + swift_auth_token="bar", + ) + assert swift_session._creds + assert swift_session.get_credential_options()["SWIFT_STORAGE_URL"] == "foo" + assert swift_session.get_credential_options()["SWIFT_AUTH_TOKEN"] == "bar" + + +def test_swift_session_by_user_key(): + def mock_init( + self, + session=None, + swift_storage_url=None, + swift_auth_token=None, + swift_auth_v1_url=None, + swift_user=None, + swift_key=None, + ): + self._creds = {"SWIFT_STORAGE_URL": "foo", "SWIFT_AUTH_TOKEN": "bar"} + + with mock.patch("fiona.session.SwiftSession.__init__", new=mock_init): + swift_session = SwiftSession( + swift_auth_v1_url="foo", swift_user="bar", swift_key="key" + ) + assert swift_session._creds + assert swift_session.get_credential_options()["SWIFT_STORAGE_URL"] == "foo" + assert swift_session.get_credential_options()["SWIFT_AUTH_TOKEN"] == "bar" + + +def test_session_factory_swift_kwargs(): + """Get an SwiftSession for /vsiswift/bucket/key with keywords""" + sesh = Session.from_path( + "/vsiswift/lol/wut", swift_storage_url="foo", swift_auth_token="bar" + ) + assert isinstance(sesh, DummySession) + + +def test_session_aws_or_dummy_aws(): + """Get an AWSSession when boto3 is available""" + boto3 = pytest.importorskip("boto3") + assert isinstance(Session.aws_or_dummy(), AWSSession) + + +def test_session_aws_or_dummy_dummy(monkeypatch): + """Get a DummySession when boto3 is not available""" + boto3 = pytest.importorskip("boto3") + with monkeypatch.context() as mpctx: + mpctx.setattr("fiona.session.boto3", None) + assert isinstance(Session.aws_or_dummy(), DummySession) + + +def test_no_sign_request(monkeypatch): + """If AWS_NO_SIGN_REQUEST is set do not default to aws_unsigned=False""" + monkeypatch.setenv("AWS_NO_SIGN_REQUEST", "YES") + assert AWSSession().unsigned + + +def test_no_credentialization_if_unsigned(monkeypatch): + """Don't get credentials if we're not signing, see #1984""" + sesh = AWSSession(aws_unsigned=True) + assert sesh._creds is None + + +def test_azure_session_class(): + """AzureSession works""" + azure_session = AzureSession( + azure_storage_account="foo", azure_storage_access_key="bar" + ) + assert azure_session._creds + assert azure_session.get_credential_options()["AZURE_STORAGE_ACCOUNT"] == "foo" + assert azure_session.get_credential_options()["AZURE_STORAGE_ACCESS_KEY"] == "bar" + + +def test_azure_session_class_connection_string(): + """AzureSession works""" + azure_session = AzureSession( + azure_storage_connection_string="AccountName=myaccount;AccountKey=MY_ACCOUNT_KEY" + ) + assert azure_session._creds + assert ( + azure_session.get_credential_options()["AZURE_STORAGE_CONNECTION_STRING"] + == "AccountName=myaccount;AccountKey=MY_ACCOUNT_KEY" + ) + + +def test_session_factory_az_kwargs(): + """Get an AzureSession for az:// paths with keywords""" + sesh = Session.from_path( + "az://lol/wut", azure_storage_account="foo", azure_storage_access_key="bar" + ) + assert isinstance(sesh, AzureSession) + assert sesh.get_credential_options()["AZURE_STORAGE_ACCOUNT"] == "foo" + assert sesh.get_credential_options()["AZURE_STORAGE_ACCESS_KEY"] == "bar" + + +def test_session_factory_az_kwargs_connection_string(): + """Get an AzureSession for az:// paths with keywords""" + sesh = Session.from_path( + "az://lol/wut", + azure_storage_connection_string="AccountName=myaccount;AccountKey=MY_ACCOUNT_KEY", + ) + assert isinstance(sesh, AzureSession) + assert ( + sesh.get_credential_options()["AZURE_STORAGE_CONNECTION_STRING"] + == "AccountName=myaccount;AccountKey=MY_ACCOUNT_KEY" + ) + + +def test_azure_no_sign_request(monkeypatch): + """If AZURE_NO_SIGN_REQUEST is set do not default to azure_unsigned=False""" + monkeypatch.setenv("AZURE_NO_SIGN_REQUEST", "YES") + assert AzureSession().unsigned + + +def test_azure_session_class_unsigned(): + """AzureSession works""" + sesh = AzureSession(azure_unsigned=True, azure_storage_account="naipblobs") + assert sesh.get_credential_options()["AZURE_NO_SIGN_REQUEST"] == "YES" + assert sesh.get_credential_options()["AZURE_STORAGE_ACCOUNT"] == "naipblobs" diff -Nru fiona-1.8.22/tests/test_integration.py fiona-1.9.5/tests/test_integration.py --- fiona-1.8.22/tests/test_integration.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_integration.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,9 +1,10 @@ """Unittests to verify Fiona is functioning properly with other software.""" -import six +from collections import UserDict import fiona +from fiona.model import Feature def test_dict_subclass(tmpdir): @@ -14,30 +15,28 @@ not a subclass of `collections.Mapping()`, so it provides an edge case. """ - class CRS(six.moves.UserDict): + class CRS(UserDict): pass - outfile = str(tmpdir.join('test_UserDict.geojson')) + outfile = str(tmpdir.join("test_UserDict.geojson")) profile = { - 'crs': CRS(init='EPSG:4326'), - 'driver': 'GeoJSON', - 'schema': { - 'geometry': 'Point', - 'properties': {} - } + "crs": CRS(init="EPSG:4326"), + "driver": "GeoJSON", + "schema": {"geometry": "Point", "properties": {}}, } - with fiona.open(outfile, 'w', **profile) as dst: - dst.write({ - 'type': 'Feature', - 'properties': {}, - 'geometry': { - 'type': 'Point', - 'coordinates': (10, -10) - } - }) + with fiona.open(outfile, "w", **profile) as dst: + dst.write( + Feature.from_dict( + **{ + "type": "Feature", + "properties": {}, + "geometry": {"type": "Point", "coordinates": (10, -10)}, + } + ) + ) with fiona.open(outfile) as src: assert len(src) == 1 - assert src.crs == {'init': 'epsg:4326'} + assert src.crs == {"init": "epsg:4326"} diff -Nru fiona-1.8.22/tests/test_listing.py fiona-1.9.5/tests/test_listing.py --- fiona-1.8.22/tests/test_listing.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_listing.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,14 +1,14 @@ """Test listing a datasource's layers.""" -import logging -import sys +from pathlib import Path import os import pytest import fiona import fiona.ogrext -from fiona.errors import DriverError, FionaDeprecationWarning +from fiona.errors import DriverError, FionaDeprecationWarning, FionaValueError +from fiona.io import ZipMemoryFile def test_single_file_private(path_coutwildrnp_shp): @@ -31,11 +31,11 @@ def test_zip_path(path_coutwildrnp_zip): assert fiona.listlayers( - 'zip://{}'.format(path_coutwildrnp_zip)) == ['coutwildrnp'] + f'zip://{path_coutwildrnp_zip}') == ['coutwildrnp'] def test_zip_path_arch(path_coutwildrnp_zip): - vfs = 'zip://{}'.format(path_coutwildrnp_zip) + vfs = f'zip://{path_coutwildrnp_zip}' with pytest.warns(FionaDeprecationWarning): assert fiona.listlayers('/coutwildrnp.shp', vfs=vfs) == ['coutwildrnp'] @@ -62,6 +62,11 @@ fiona.listlayers("foobar") +def test_path_object(path_coutwildrnp_shp): + path_obj = Path(path_coutwildrnp_shp) + assert fiona.listlayers(path_obj) == ['coutwildrnp'] + + def test_listing_file(path_coutwildrnp_json): """list layers from an open file object""" with open(path_coutwildrnp_json, "rb") as f: @@ -72,3 +77,44 @@ """list layers from a Path object""" pathlib = pytest.importorskip("pathlib") assert len(fiona.listlayers(pathlib.Path(path_coutwildrnp_json))) == 1 + + +def test_listdir_path(path_coutwildrnp_zip): + """List directories in a path""" + assert sorted(fiona.listdir("zip://{}".format(path_coutwildrnp_zip))) == [ + "coutwildrnp.dbf", + "coutwildrnp.prj", + "coutwildrnp.shp", + "coutwildrnp.shx", + ] + + +def test_listdir_path_not_existing(data_dir): + """Test listing of a non existent directory""" + path = os.path.join(data_dir, "does_not_exist.zip") + with pytest.raises(FionaValueError): + fiona.listdir(path) + + +def test_listdir_invalid_path(): + """List directories with invalid path""" + with pytest.raises(TypeError): + assert fiona.listdir(1) + + +def test_listdir_file(path_coutwildrnp_zip): + """Test list directories of a file""" + with pytest.raises(FionaValueError): + fiona.listdir(f"zip://{path_coutwildrnp_zip}/coutwildrnp.shp") + + +def test_listdir_zipmemoryfile(bytes_coutwildrnp_zip): + """Test list directories of a zipped memory file.""" + with ZipMemoryFile(bytes_coutwildrnp_zip) as memfile: + print(memfile.name) + assert sorted(fiona.listdir(memfile.name)) == [ + "coutwildrnp.dbf", + "coutwildrnp.prj", + "coutwildrnp.shp", + "coutwildrnp.shx", + ] diff -Nru fiona-1.8.22/tests/test_memoryfile.py fiona-1.9.5/tests/test_memoryfile.py --- fiona-1.8.22/tests/test_memoryfile.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_memoryfile.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,13 +1,17 @@ """Tests of MemoryFile and ZippedMemoryFile""" +import os from collections import OrderedDict from io import BytesIO import os import pytest - import fiona +from fiona import supported_drivers +from fiona.drvsupport import _driver_supports_mode +from fiona.errors import DriverError from fiona.io import MemoryFile, ZipMemoryFile +from fiona.meta import supports_vsi from .conftest import requires_gdal2, requires_gpkg @@ -69,7 +73,7 @@ memfile = MemoryFile() memfile.close() assert memfile.closed - with pytest.raises(IOError): + with pytest.raises(OSError): memfile.open() @@ -78,7 +82,7 @@ memfile = ZipMemoryFile() memfile.close() assert memfile.closed - with pytest.raises(IOError): + with pytest.raises(OSError): memfile.open() @@ -160,6 +164,30 @@ assert len(col) == 1 +@requires_gpkg +def test_read_multilayer_memoryfile(path_coutwildrnp_json, tmpdir): + """Test read access to multilayer dataset in from file-like object""" + with fiona.open(path_coutwildrnp_json, "r") as src: + schema = src.schema + features = list(src) + + path = os.path.join(tmpdir, "test.gpkg") + with fiona.open(path, "w", driver="GPKG", schema=schema, layer="layer1") as dst: + dst.writerecords(features[0:5]) + with fiona.open(path, "w", driver="GPKG", schema=schema, layer="layer2") as dst: + dst.writerecords(features[5:]) + + with open(path, "rb") as f: + with fiona.open(f, layer="layer1") as src: + assert src.name == "layer1" + assert len(src) == 5 + # Bug reported in #781 where this next section would fail + with open(path, "rb") as f: + with fiona.open(f, layer="layer2") as src: + assert src.name == "layer2" + assert len(src) == 62 + + def test_append_bytesio_exception(data_coutwildrnp_json): """Append is not supported, see #1027.""" with pytest.raises(OSError): @@ -168,13 +196,130 @@ def test_mapinfo_raises(): """Reported to be a crasher in #937""" - driver = 'MapInfo File' - schema = {'geometry': 'Point', 'properties': OrderedDict([('position', 'str')])} + driver = "MapInfo File" + schema = {"geometry": "Point", "properties": OrderedDict([("position", "str")])} with BytesIO() as fout: with pytest.raises(OSError): with fiona.open(fout, "w", driver=driver, schema=schema) as collection: - collection.write({"type": "Feature", "geometry": {"type": "Point", "coordinates": (0, 0)}, "properties": {"position": "x"}}) + collection.write( + { + "type": "Feature", + "geometry": {"type": "Point", "coordinates": (0, 0)}, + "properties": {"position": "x"}, + } + ) + + +# TODO remove exclusion of MapInfo File once testdata_generator is fixed +@pytest.mark.parametrize( + "driver", + [ + driver + for driver in supported_drivers + if _driver_supports_mode(driver, "w") + and supports_vsi(driver) + and driver not in {"MapInfo File"} + ], +) +def test_write_memoryfile_drivers(driver, testdata_generator): + """ Test if driver is able to write to memoryfile """ + range1 = list(range(0, 5)) + schema, crs, records1, _, _ = testdata_generator(driver, range1, []) + + with MemoryFile() as memfile: + with memfile.open(driver=driver, schema=schema) as c: + c.writerecords(records1) + + with memfile.open(driver=driver) as c: + assert driver == c.driver + items = list(c) + assert len(items) == len(range1) + + +def test_multiple_layer_memoryfile(testdata_generator): + """ Test ability to create multiple layers in memoryfile""" + driver = "GPKG" + range1 = list(range(0, 5)) + range2 = list(range(5, 10)) + schema, crs, records1, records2, _ = testdata_generator(driver, range1, range2) + + with MemoryFile() as memfile: + with memfile.open(mode='w', driver=driver, schema=schema, layer="layer1") as c: + c.writerecords(records1) + with memfile.open(mode='w', driver=driver, schema=schema, layer="layer2") as c: + c.writerecords(records2) + + with memfile.open(driver=driver, layer="layer1") as c: + assert driver == c.driver + items = list(c) + assert len(items) == len(range1) + + with memfile.open(driver=driver, layer="layer2") as c: + assert driver == c.driver + items = list(c) + assert len(items) == len(range1) + + +# TODO remove exclusion of MapInfo File once testdata_generator is fixed +@pytest.mark.parametrize( + "driver", + [ + driver + for driver in supported_drivers + if _driver_supports_mode(driver, "a") + and supports_vsi(driver) + and driver not in {"MapInfo File"} + ], +) +def test_append_memoryfile_drivers(driver, testdata_generator): + """Test if driver is able to append to memoryfile""" + range1 = list(range(0, 5)) + range2 = list(range(5, 10)) + schema, crs, records1, records2, _ = testdata_generator(driver, range1, range2) + + with MemoryFile() as memfile: + with memfile.open(driver=driver, schema=schema) as c: + c.writerecords(records1) + + with memfile.open(mode='a', driver=driver, schema=schema) as c: + c.writerecords(records2) + + with memfile.open(driver=driver) as c: + assert driver == c.driver + items = list(c) + assert len(items) == len(range1 + range2) + + +def test_memoryfile_driver_does_not_support_vsi(): + """An exception is raised with a driver that does not support VSI""" + if "FileGDB" not in supported_drivers: + pytest.skip("FileGDB driver not available") + with pytest.raises(DriverError): + with MemoryFile() as memfile: + with memfile.open(driver="FileGDB"): + pass + + +@pytest.mark.parametrize('mode', ['r', 'a']) +def test_modes_on_non_existing_memoryfile(mode): + """Non existing memoryfile cannot opened in r or a mode""" + with MemoryFile() as memfile: + with pytest.raises(IOError): + with memfile.open(mode=mode): + pass + + +def test_write_mode_on_non_existing_memoryfile(profile_first_coutwildrnp_shp): + """Exception is raised if a memoryfile is opened in write mode on a non empty memoryfile""" + profile, first = profile_first_coutwildrnp_shp + profile['driver'] = 'GeoJSON' + with MemoryFile() as memfile: + with memfile.open(**profile) as col: + col.write(first) + with pytest.raises(IOError): + with memfile.open(mode="w"): + pass @requires_gpkg @@ -199,3 +344,63 @@ with fiona.open(f, layer="layer2") as src: assert src.name == "layer2" assert len(src) == 62 + + +def test_allow_unsupported_drivers(monkeypatch): + """Test if allow unsupported drivers works as expected""" + + # We delete a known working driver from fiona.drvsupport so that we can use it + monkeypatch.delitem(fiona.drvsupport.supported_drivers, "GPKG") + + schema = {"geometry": "Polygon", "properties": {}} + + # Test that indeed we can't create a file without allow_unsupported_drivers + with pytest.raises(DriverError): + with MemoryFile() as memfile: + with memfile.open(mode="w", driver="GPKG", schema=schema): + pass + + # Test that we can create file with allow_unsupported_drivers=True + try: + with MemoryFile() as memfile: + with memfile.open( + mode="w", driver="GPKG", schema=schema, allow_unsupported_drivers=True + ): + pass + except Exception as e: + assert ( + False + ), f"Using allow_unsupported_drivers=True should not raise an exception: {e}" + + +def test_listdir_zipmemoryfile(bytes_coutwildrnp_zip): + """Test list directories of a zipped memory file.""" + with ZipMemoryFile(bytes_coutwildrnp_zip) as memfile: + assert memfile.listdir() == [ + "coutwildrnp.shp", + "coutwildrnp.shx", + "coutwildrnp.dbf", + "coutwildrnp.prj", + ] + + +def test_listlayers_zipmemoryfile(bytes_coutwildrnp_zip): + """Test layers of a zipped memory file.""" + with ZipMemoryFile(bytes_coutwildrnp_zip) as memfile: + assert memfile.listlayers() == ["coutwildrnp"] + + +def test_listdir_gdbzipmemoryfile(bytes_testopenfilegdb_zip): + """Test list directories of a zipped GDB memory file.""" + with ZipMemoryFile(bytes_testopenfilegdb_zip, ext=".gdb.zip") as memfile: + assert memfile.listdir() == [ + "testopenfilegdb.gdb", + ] + + +def test_listdir_gdbzipmemoryfile_bis(bytes_testopenfilegdb_zip): + """Test list directories of a zipped GDB memory file.""" + with ZipMemoryFile(bytes_testopenfilegdb_zip, filename="temp.gdb.zip") as memfile: + assert memfile.listdir() == [ + "testopenfilegdb.gdb", + ] diff -Nru fiona-1.8.22/tests/test_meta.py fiona-1.9.5/tests/test_meta.py --- fiona-1.8.22/tests/test_meta.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/tests/test_meta.py 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,66 @@ +import pytest +import fiona +import fiona.drvsupport +import fiona.meta +from fiona.drvsupport import supported_drivers +from fiona.errors import FionaValueError +from .conftest import requires_gdal2, requires_gdal23, requires_gdal31 + + +@requires_gdal31 +@pytest.mark.parametrize("driver", supported_drivers) +def test_print_driver_options(driver): + """ Test fiona.meta.print_driver_options(driver) """ + # do not fail + fiona.meta.print_driver_options(driver) + + +@requires_gdal2 +def test_metadata_wrong_driver(): + """ Test that FionaValueError is raised for non existing driver""" + with pytest.raises(FionaValueError): + fiona.meta.print_driver_options("Not existing driver") + + +@requires_gdal2 +@pytest.mark.parametrize("driver", supported_drivers) +def test_extension(driver): + """ Test fiona.meta.extension(driver) """ + # do not fail + extension = fiona.meta.extension(driver) + assert extension is None or isinstance(extension, str) + + +@requires_gdal2 +@pytest.mark.parametrize("driver", supported_drivers) +def test_extensions(driver): + """ Test fiona.meta.extensions(driver) """ + # do not fail + extensions = fiona.meta.extensions(driver) + assert extensions is None or isinstance(extensions, list) + + +@requires_gdal2 +@pytest.mark.parametrize("driver", supported_drivers) +def test_supports_vsi(driver): + """ Test fiona.meta.supports_vsi(driver) """ + # do not fail + assert fiona.meta.supports_vsi(driver) in (True, False) + + +@requires_gdal2 +@pytest.mark.parametrize("driver", supported_drivers) +def test_supported_field_types(driver): + """ Test fiona.meta.supported_field_types(driver) """ + # do not fail + field_types = fiona.meta.supported_field_types(driver) + assert field_types is None or isinstance(field_types, list) + + +@requires_gdal23 +@pytest.mark.parametrize("driver", supported_drivers) +def test_supported_sub_field_types(driver): + """ Test fiona.meta.supported_sub_field_types(driver) """ + # do not fail + sub_field_types = fiona.meta.supported_sub_field_types(driver) + assert sub_field_types is None or isinstance(sub_field_types, list) diff -Nru fiona-1.8.22/tests/test_model.py fiona-1.9.5/tests/test_model.py --- fiona-1.8.22/tests/test_model.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/tests/test_model.py 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,335 @@ +"""Test of deprecations following RFC 1""" + +import pytest + +from fiona.errors import FionaDeprecationWarning +from fiona.model import ( + _Geometry, + Feature, + Geometry, + Object, + ObjectEncoder, + Properties, + decode_object, +) + + +def test_object_len(): + """object len is correct""" + obj = Object(g=1) + assert len(obj) == 1 + + +def test_object_iter(): + """object iter is correct""" + obj = Object(g=1) + assert [obj[k] for k in obj] == [1] + + +def test_object_setitem_warning(): + """Warn about __setitem__""" + obj = Object() + with pytest.warns(FionaDeprecationWarning, match="immutable"): + obj["g"] = 1 + assert "g" in obj + assert obj["g"] == 1 + + +def test_object_update_warning(): + """Warn about update""" + obj = Object() + with pytest.warns(FionaDeprecationWarning, match="immutable"): + obj.update(g=1) + assert "g" in obj + assert obj["g"] == 1 + + +def test_object_popitem_warning(): + """Warn about pop""" + obj = Object(g=1) + with pytest.warns(FionaDeprecationWarning, match="immutable"): + assert obj.pop("g") == 1 + assert "g" not in obj + + +def test_object_delitem_warning(): + """Warn about __delitem__""" + obj = Object(g=1) + with pytest.warns(FionaDeprecationWarning, match="immutable"): + del obj["g"] + assert "g" not in obj + + +def test_object_setitem_delegated(): + """Delegation in __setitem__ works""" + + class ThingDelegate: + def __init__(self, value): + self.value = value + + class Thing(Object): + _delegated_properties = ["value"] + + def __init__(self, value=None, **data): + self._delegate = ThingDelegate(value) + super().__init__(**data) + + thing = Thing() + assert thing["value"] is None + with pytest.warns(FionaDeprecationWarning, match="immutable"): + thing["value"] = 1 + assert thing["value"] == 1 + + +def test_object_delitem_delegated(): + """Delegation in __delitem__ works""" + + class ThingDelegate: + def __init__(self, value): + self.value = value + + class Thing(Object): + _delegated_properties = ["value"] + + def __init__(self, value=None, **data): + self._delegate = ThingDelegate(value) + super().__init__(**data) + + thing = Thing(1) + assert thing["value"] == 1 + with pytest.warns(FionaDeprecationWarning, match="immutable"): + del thing["value"] + assert thing["value"] is None + + +def test__geometry_ctor(): + """Construction of a _Geometry works""" + geom = _Geometry(type="Point", coordinates=(0, 0)) + assert geom.type == "Point" + assert geom.coordinates == (0, 0) + + +def test_geometry_type(): + """Geometry has a type""" + geom = Geometry(type="Point") + assert geom.type == "Point" + + +def test_geometry_coordinates(): + """Geometry has coordinates""" + geom = Geometry(coordinates=[(0, 0), (1, 1)]) + assert geom.coordinates == [(0, 0), (1, 1)] + + +def test_geometry__props(): + """Geometry properties as a dict""" + assert Geometry(coordinates=(0, 0), type="Point")._props() == { + "coordinates": (0, 0), + "type": "Point", + "geometries": None, + } + + +def test_geometry_gi(): + """Geometry __geo_interface__""" + gi = Geometry(coordinates=(0, 0), type="Point", geometries=[]).__geo_interface__ + assert gi["type"] == "Point" + assert gi["coordinates"] == (0, 0) + + +def test_feature_no_geometry(): + """Feature has no attribute""" + feat = Feature() + assert feat.geometry is None + + +def test_feature_geometry(): + """Feature has a geometry attribute""" + geom = Geometry(type="Point") + feat = Feature(geometry=geom) + assert feat.geometry is geom + + +def test_feature_no_id(): + """Feature has no id""" + feat = Feature() + assert feat.id is None + + +def test_feature_id(): + """Feature has an id""" + feat = Feature(id="123") + assert feat.id == "123" + + +def test_feature_no_properties(): + """Feature has no properties""" + feat = Feature() + assert len(feat.properties) == 0 + + +def test_feature_properties(): + """Feature has properties""" + feat = Feature(properties=Properties(foo=1)) + assert len(feat.properties) == 1 + assert feat.properties["foo"] == 1 + + +def test_feature_from_dict_kwargs(): + """Feature can be created from GeoJSON kwargs""" + data = { + "id": "foo", + "type": "Feature", + "geometry": {"type": "Point", "coordinates": (0, 0)}, + "properties": {"a": 0, "b": "bar"}, + "extras": {"this": 1}, + } + feat = Feature.from_dict(**data) + assert feat.id == "foo" + assert feat.type == "Feature" + assert feat.geometry.type == "Point" + assert feat.geometry.coordinates == (0, 0) + assert len(feat.properties) == 2 + assert feat.properties["a"] == 0 + assert feat.properties["b"] == "bar" + assert feat["extras"]["this"] == 1 + + +def test_feature_from_dict_obj(): + """Feature can be created from GeoJSON obj""" + data = { + "id": "foo", + "type": "Feature", + "geometry": {"type": "Point", "coordinates": (0, 0)}, + "properties": {"a": 0, "b": "bar"}, + "extras": {"this": 1}, + } + feat = Feature.from_dict(data) + assert feat.id == "foo" + assert feat.type == "Feature" + assert feat.geometry.type == "Point" + assert feat.geometry.coordinates == (0, 0) + assert len(feat.properties) == 2 + assert feat.properties["a"] == 0 + assert feat.properties["b"] == "bar" + assert feat["extras"]["this"] == 1 + + +def test_feature_from_dict_kwargs_2(): + """From GeoJSON kwargs using Geometry and Properties""" + data = { + "id": "foo", + "type": "Feature", + "geometry": Geometry(type="Point", coordinates=(0, 0)), + "properties": Properties(a=0, b="bar"), + "extras": {"this": 1}, + } + feat = Feature.from_dict(**data) + assert feat.id == "foo" + assert feat.type == "Feature" + assert feat.geometry.type == "Point" + assert feat.geometry.coordinates == (0, 0) + assert len(feat.properties) == 2 + assert feat.properties["a"] == 0 + assert feat.properties["b"] == "bar" + assert feat["extras"]["this"] == 1 + + +def test_geometry_encode(): + """Can encode a geometry""" + assert ObjectEncoder().default(Geometry(type="Point", coordinates=(0, 0))) == { + "type": "Point", + "coordinates": (0, 0), + } + + +def test_feature_encode(): + """Can encode a feature""" + o_dict = ObjectEncoder().default( + Feature( + id="foo", + geometry=Geometry(type="Point", coordinates=(0, 0)), + properties=Properties(a=1, foo="bar", bytes=b"01234"), + ) + ) + assert o_dict["id"] == "foo" + assert o_dict["geometry"]["type"] == "Point" + assert o_dict["geometry"]["coordinates"] == (0, 0) + assert o_dict["properties"]["bytes"] == b'3031323334' + + +def test_decode_object_hook(): + """Can decode a feature""" + data = { + "id": "foo", + "type": "Feature", + "geometry": {"type": "Point", "coordinates": (0, 0)}, + "properties": {"a": 0, "b": "bar"}, + "extras": {"this": 1}, + } + feat = decode_object(data) + assert feat.id == "foo" + assert feat.type == "Feature" + assert feat.geometry.type == "Point" + assert feat.geometry.coordinates == (0, 0) + assert len(feat.properties) == 2 + assert feat.properties["a"] == 0 + assert feat.properties["b"] == "bar" + assert feat["extras"]["this"] == 1 + + +def test_decode_object_hook_geometry(): + """Can decode a geometry""" + data = {"type": "Point", "coordinates": (0, 0)} + geometry = decode_object(data) + assert geometry.type == "Point" + assert geometry.coordinates == (0, 0) + + +@pytest.mark.parametrize("o", [{}, {"a": 1}, {"type": "FeatureCollection"}]) +def test_decode_object_hook_fallback(o): + """Pass through an ordinary dict""" + assert decode_object(o) == o + + +def test_properties(): + """Property factory works""" + assert Properties.from_dict(a=1, foo="bar")["a"] == 1 + + +def test_feature_gi(): + """Feature __geo_interface__.""" + gi = Feature( + id="foo", + geometry=Geometry(type="Point", coordinates=(0, 0)), + properties=Properties(a=1, foo="bar"), + ) + + assert gi["id"] == "foo" + assert gi["geometry"]["type"] == "Point" + assert gi["geometry"]["coordinates"] == (0, 0) + + +def test_encode_bytes(): + """Bytes are encoded using base64.""" + assert ObjectEncoder().default(b"01234") == b'3031323334' + + +def test_null_property_encoding(): + """A null feature property is retained.""" + # Verifies fix for gh-1270. + assert ObjectEncoder().default(Properties(a=1, b=None)) == {"a": 1, "b": None} + + +def test_null_geometry_encoding(): + """A null feature geometry is retained.""" + # Verifies fix for gh-1270. + o_dict = ObjectEncoder().default(Feature()) + assert o_dict["geometry"] is None + + +def test_geometry_collection_encoding(): + """No coordinates in a GeometryCollection.""" + assert "coordinates" not in ObjectEncoder().default( + Geometry(type="GeometryCollection", geometries=[]) + ) diff -Nru fiona-1.8.22/tests/test_multiconxn.py fiona-1.9.5/tests/test_multiconxn.py --- fiona-1.8.22/tests/test_multiconxn.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_multiconxn.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,30 +1,32 @@ +from collections import OrderedDict + import pytest import fiona -from fiona.compat import OrderedDict +from fiona.model import Feature, Geometry, Properties -class TestReadAccess(object): +class TestReadAccess: # To check that we'll be able to get multiple 'r' connections to layers # in a single file. def test_meta(self, path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp, "r", layer="coutwildrnp") as c: - with fiona.open(path_coutwildrnp_shp, "r", - layer="coutwildrnp") as c2: + with fiona.open(path_coutwildrnp_shp, "r", layer="coutwildrnp") as c2: assert len(c) == len(c2) assert sorted(c.schema.items()) == sorted(c2.schema.items()) def test_feat(self, path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp, "r", layer="coutwildrnp") as c: f1 = next(iter(c)) - with fiona.open(path_coutwildrnp_shp, "r", - layer="coutwildrnp") as c2: + with fiona.open(path_coutwildrnp_shp, "r", layer="coutwildrnp") as c2: f2 = next(iter(c2)) - assert f1 == f2 + assert f1.id == f2.id + assert f1.properties == f2.properties + assert f1.geometry.type == f2.geometry.type -class TestReadWriteAccess(object): +class TestReadWriteAccess: # To check that we'll be able to read from a file that we're # writing to. @@ -32,17 +34,21 @@ def multi_write_test_shp(self, tmpdir): self.shapefile_path = str(tmpdir.join("multi_write_test.shp")) self.c = fiona.open( - self.shapefile_path, "w", + self.shapefile_path, + "w", driver="ESRI Shapefile", schema={ - 'geometry': 'Point', - 'properties': [('title', 'str:80'), ('date', 'date')]}, - crs={'init': "epsg:4326", 'no_defs': True}, - encoding='utf-8') - self.f = { - 'type': 'Feature', - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, - 'properties': OrderedDict([('title', 'point one'), ('date', '2012-01-29')])} + "geometry": "Point", + "properties": [("title", "str:80"), ("date", "date")], + }, + crs={"init": "epsg:4326", "no_defs": True}, + encoding="utf-8", + ) + self.f = Feature( + id="0", + geometry=Geometry(type="Point", coordinates=(0.0, 0.1)), + properties=Properties(title="point one", date="2012-01-29"), + ) self.c.writerecords([self.f]) self.c.flush() yield @@ -57,36 +63,39 @@ def test_read(self): c2 = fiona.open(self.shapefile_path, "r") f2 = next(iter(c2)) - del f2['id'] - assert self.f == f2 + assert self.f.id == f2.id + assert self.f.properties == f2.properties + assert self.f.geometry.type == f2.geometry.type c2.close() def test_read_after_close(self): c2 = fiona.open(self.shapefile_path, "r") self.c.close() f2 = next(iter(c2)) - del f2['id'] - assert self.f == f2 + assert self.f.properties == f2.properties c2.close() -class TestLayerCreation(object): +class TestLayerCreation: @pytest.fixture(autouse=True) def layer_creation_shp(self, tmpdir): - self.dir = tmpdir.mkdir('layer_creation') + self.dir = tmpdir.mkdir("layer_creation") self.c = fiona.open( - str(self.dir), 'w', - layer='write_test', - driver='ESRI Shapefile', + str(self.dir), + "w", + layer="write_test", + driver="ESRI Shapefile", schema={ - 'geometry': 'Point', - 'properties': [('title', 'str:80'), ('date', 'date')]}, - crs={'init': "epsg:4326", 'no_defs': True}, - encoding='utf-8') - self.f = { - 'type': 'Feature', - 'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)}, - 'properties': OrderedDict([('title', 'point one'), ('date', '2012-01-29')])} + "geometry": "Point", + "properties": [("title", "str:80"), ("date", "date")], + }, + crs={"init": "epsg:4326", "no_defs": True}, + encoding="utf-8", + ) + self.f = Feature( + geometry=Geometry(type="Point", coordinates=(0.0, 0.1)), + properties=OrderedDict([("title", "point one"), ("date", "2012-01-29")]), + ) self.c.writerecords([self.f]) self.c.flush() yield @@ -101,14 +110,12 @@ def test_read(self): c2 = fiona.open(str(self.dir.join("write_test.shp")), "r") f2 = next(iter(c2)) - del f2['id'] - assert self.f == f2 + assert self.f.properties == f2.properties c2.close() def test_read_after_close(self): c2 = fiona.open(str(self.dir.join("write_test.shp")), "r") self.c.close() f2 = next(iter(c2)) - del f2['id'] - assert self.f == f2 + assert self.f.properties == f2.properties c2.close() diff -Nru fiona-1.8.22/tests/test_non_counting_layer.py fiona-1.9.5/tests/test_non_counting_layer.py --- fiona-1.8.22/tests/test_non_counting_layer.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_non_counting_layer.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,3 +1,5 @@ +import unittest + import pytest import fiona @@ -5,11 +7,11 @@ @pytest.mark.usefixtures('uttc_path_gpx') -class TestNonCountingLayer(object): - def setup(self): +class TestNonCountingLayer(unittest.TestCase): + def setUp(self): self.c = fiona.open(self.path_gpx, "r", layer="track_points") - def teardown(self): + def tearDown(self): self.c.close() def test_len_fail(self): diff -Nru fiona-1.8.22/tests/test_open.py fiona-1.9.5/tests/test_open.py --- fiona-1.8.22/tests/test_open.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_open.py 2023-10-11 23:19:44.000000000 +0000 @@ -6,10 +6,9 @@ import pytest import fiona -from fiona._crs import crs_to_wkt +from fiona.crs import CRS from fiona.errors import DriverError - -from .conftest import requires_gdal21 +from fiona.model import Feature def test_open_shp(path_coutwildrnp_shp): @@ -23,8 +22,6 @@ assert fiona.open(path), "Failed to open !test.geojson" -@requires_gdal21 -@pytest.mark.xfail(raises=DriverError) def test_write_memfile_crs_wkt(): example_schema = { "geometry": "Point", @@ -32,18 +29,24 @@ } example_features = [ - { - "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, - "properties": {"title": "One"}, - }, - { - "geometry": {"type": "Point", "coordinates": [1.0, 2.0]}, - "properties": {"title": "Two"}, - }, - { - "geometry": {"type": "Point", "coordinates": [3.0, 4.0]}, - "properties": {"title": "Three"}, - }, + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, + "properties": {"title": "One"}, + } + ), + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": [1.0, 2.0]}, + "properties": {"title": "Two"}, + } + ), + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": [3.0, 4.0]}, + "properties": {"title": "Three"}, + } + ), ] with io.BytesIO() as fd: @@ -52,11 +55,11 @@ "w", driver="GPKG", schema=example_schema, - crs_wkt=crs_to_wkt("EPSG:32611"), + crs_wkt=CRS.from_epsg(32611).to_wkt(), ) as dst: dst.writerecords(example_features) fd.seek(0) with fiona.open(fd) as src: assert src.driver == "GPKG" - assert src.crs == {"init": "epsg:32611"} + assert src.crs == "EPSG:32611" diff -Nru fiona-1.8.22/tests/test_props.py fiona-1.9.5/tests/test_props.py --- fiona-1.8.22/tests/test_props.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_props.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,30 +1,30 @@ import json import os.path -from six import text_type import tempfile import fiona from fiona import prop_type, prop_width +from fiona.model import Feature from fiona.rfc3339 import FionaDateType def test_width_str(): - assert prop_width('str:254') == 254 - assert prop_width('str') == 80 + assert prop_width("str:254") == 254 + assert prop_width("str") == 80 def test_width_other(): - assert prop_width('int') == None - assert prop_width('float') == None - assert prop_width('date') == None + assert prop_width("int") == None + assert prop_width("float") == None + assert prop_width("date") == None def test_types(): - assert prop_type('str:254') == text_type - assert prop_type('str') == text_type - assert isinstance(0, prop_type('int')) - assert isinstance(0.0, prop_type('float')) - assert prop_type('date') == FionaDateType + assert prop_type("str:254") == str + assert prop_type("str") == str + assert isinstance(0, prop_type("int")) + assert isinstance(0.0, prop_type("float")) + assert prop_type("date") == FionaDateType def test_read_json_object_properties(): @@ -74,17 +74,17 @@ } """ tmpdir = tempfile.mkdtemp() - filename = os.path.join(tmpdir, 'test.json') + filename = os.path.join(tmpdir, "test.json") - with open(filename, 'w') as f: + with open(filename, "w") as f: f.write(data) with fiona.open(filename) as src: ftr = next(iter(src)) - props = ftr['properties'] - assert props['upperLeftCoordinate']['latitude'] == 45.66894 - assert props['upperLeftCoordinate']['longitude'] == 87.91166 - assert props['tricky'] == "{gotcha" + props = ftr["properties"] + assert props["upperLeftCoordinate"]["latitude"] == 45.66894 + assert props["upperLeftCoordinate"]["longitude"] == 87.91166 + assert props["tricky"] == "{gotcha" def test_write_json_object_properties(): @@ -133,64 +133,59 @@ ] } """ - data = json.loads(data)['features'][0] + data = Feature.from_dict(**json.loads(data)["features"][0]) tmpdir = tempfile.mkdtemp() - filename = os.path.join(tmpdir, 'test.json') + filename = os.path.join(tmpdir, "test.json") with fiona.open( - filename, 'w', - driver='GeoJSON', - schema={ - 'geometry': 'Polygon', - 'properties': {'upperLeftCoordinate': 'str', 'tricky': 'str'}} - ) as dst: + filename, + "w", + driver="GeoJSON", + schema={ + "geometry": "Polygon", + "properties": {"upperLeftCoordinate": "str", "tricky": "str"}, + }, + ) as dst: dst.write(data) with fiona.open(filename) as src: ftr = next(iter(src)) - props = ftr['properties'] - assert props['upperLeftCoordinate']['latitude'] == 45.66894 - assert props['upperLeftCoordinate']['longitude'] == 87.91166 - assert props['tricky'] == "{gotcha" + props = ftr["properties"] + assert props["upperLeftCoordinate"]["latitude"] == 45.66894 + assert props["upperLeftCoordinate"]["longitude"] == 87.91166 + assert props["tricky"] == "{gotcha" def test_json_prop_decode_non_geojson_driver(): - feature = { - "type": "Feature", - "properties": { - "ulc": { - "latitude": 45.66894, - "longitude": 87.91166 + feature = Feature.from_dict( + **{ + "type": "Feature", + "properties": { + "ulc": {"latitude": 45.66894, "longitude": 87.91166}, + "tricky": "{gotcha", }, - "tricky": "{gotcha" - }, - "geometry": { - "type": "Point", - "coordinates": [10, 15] + "geometry": {"type": "Point", "coordinates": [10, 15]}, } - } + ) meta = { - 'crs': 'EPSG:4326', - 'driver': 'ESRI Shapefile', - 'schema': { - 'geometry': 'Point', - 'properties': { - 'ulc': 'str:255', - 'tricky': 'str:255' - } - } + "crs": "EPSG:4326", + "driver": "ESRI Shapefile", + "schema": { + "geometry": "Point", + "properties": {"ulc": "str:255", "tricky": "str:255"}, + }, } tmpdir = tempfile.mkdtemp() - filename = os.path.join(tmpdir, 'test.json') - with fiona.open(filename, 'w', **meta) as dst: + filename = os.path.join(tmpdir, "test.json") + with fiona.open(filename, "w", **meta) as dst: dst.write(feature) with fiona.open(filename) as src: actual = next(iter(src)) - assert isinstance(actual['properties']['ulc'], text_type) - a = json.loads(actual['properties']['ulc']) - e = json.loads(actual['properties']['ulc']) + assert isinstance(actual["properties"]["ulc"], str) + a = json.loads(actual["properties"]["ulc"]) + e = json.loads(actual["properties"]["ulc"]) assert e == a - assert actual['properties']['tricky'].startswith('{') + assert actual["properties"]["tricky"].startswith("{") diff -Nru fiona-1.8.22/tests/test_remove.py fiona-1.9.5/tests/test_remove.py --- fiona-1.8.22/tests/test_remove.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_remove.py 2023-10-11 23:19:44.000000000 +0000 @@ -8,26 +8,25 @@ import fiona from fiona.errors import DatasetDeleteError +from fiona.model import Feature def create_sample_data(filename, driver, **extra_meta): - meta = { - 'driver': driver, - 'schema': { - 'geometry': 'Point', - 'properties': {} - } - } + meta = {"driver": driver, "schema": {"geometry": "Point", "properties": {}}} meta.update(extra_meta) - with fiona.open(filename, 'w', **meta) as dst: - dst.write({ - 'geometry': { - 'type': 'Point', - 'coordinates': (0, 0), - }, - 'properties': {}, - }) - assert(os.path.exists(filename)) + with fiona.open(filename, "w", **meta) as dst: + dst.write( + Feature.from_dict( + **{ + "geometry": { + "type": "Point", + "coordinates": (0, 0), + }, + "properties": {}, + } + ) + ) + assert os.path.exists(filename) drivers = ["ESRI Shapefile", "GeoJSON"] @@ -40,7 +39,7 @@ def test_remove(tmpdir, kind, driver, specify_driver): """Test various dataset removal operations""" extension = {"ESRI Shapefile": "shp", "GeoJSON": "json"}[driver] - filename = "delete_me.{extension}".format(extension=extension) + filename = f"delete_me.{extension}" output_filename = str(tmpdir.join(filename)) create_sample_data(output_filename, driver=driver) @@ -58,12 +57,13 @@ def test_remove_nonexistent(tmpdir): - """Attempting to remove a file that does not exist results in an IOError""" + """Attempting to remove a file that does not exist results in an OSError""" filename = str(tmpdir.join("does_not_exist.shp")) assert not os.path.exists(filename) - with pytest.raises(IOError): + with pytest.raises(OSError): fiona.remove(filename) + @requires_gpkg def test_remove_layer(tmpdir): filename = str(tmpdir.join("a_filename.gpkg")) @@ -106,12 +106,12 @@ """Removal of layers is not supported by GeoJSON driver The reason for failure is slightly different between GDAL 2.2+ and < 2.2. - With < 2.2 the datasource will fail to open in write mode (IOError), while + With < 2.2 the datasource will fail to open in write mode (OSError), while with 2.2+ the datasource will open but the removal operation will fail (not supported). """ filename = str(tmpdir.join("a_filename.geojson")) create_sample_data(filename, "GeoJSON") - with pytest.raises((RuntimeError, IOError)): + with pytest.raises((RuntimeError, OSError)): fiona.remove(filename, layer=0) assert os.path.exists(filename) diff -Nru fiona-1.8.22/tests/test_rfc3339.py fiona-1.9.5/tests/test_rfc3339.py --- fiona-1.8.22/tests/test_rfc3339.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_rfc3339.py 2023-10-11 23:19:44.000000000 +0000 @@ -9,7 +9,7 @@ from fiona.rfc3339 import group_accessor, pattern_date -class TestDateParse(object): +class TestDateParse: def test_yyyymmdd(self): assert parse_date("2012-01-29") == (2012, 1, 29, 0, 0, 0, 0.0, None) @@ -19,7 +19,7 @@ parse_date("xxx") -class TestTimeParse(object): +class TestTimeParse: def test_hhmmss(self): assert parse_time("10:11:12") == (0, 0, 0, 10, 11, 12, 0.0, None) @@ -44,7 +44,7 @@ parse_time("xxx") -class TestDatetimeParse(object): +class TestDatetimeParse: def test_yyyymmdd(self): assert ( diff -Nru fiona-1.8.22/tests/test_rfc64_tin.py fiona-1.9.5/tests/test_rfc64_tin.py --- fiona-1.8.22/tests/test_rfc64_tin.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_rfc64_tin.py 2023-10-11 23:19:44.000000000 +0000 @@ -4,46 +4,52 @@ """ import fiona +from fiona.model import Geometry -from .conftest import requires_gdal22 + +def _test_tin(geometry: Geometry) -> None: + """Test if TIN (((0 0 0, 0 0 1, 0 1 0, 0 0 0)), ((0 0 0, 0 1 0, 1 1 0, 0 0 0))) + is correctly converted to MultiPolygon. + """ + assert geometry["type"] == "MultiPolygon" + assert geometry["coordinates"] == [ + [[(0.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.0, 1.0, 0.0), (0.0, 0.0, 0.0)]], + [[(0.0, 0.0, 0.0), (0.0, 1.0, 0.0), (1.0, 1.0, 0.0), (0.0, 0.0, 0.0)]], + ] + + +def _test_triangle(geometry: Geometry) -> None: + """Test if TRIANGLE((0 0 0,0 1 0,1 1 0,0 0 0)) + is correctly converted to MultiPolygon.""" + assert geometry["type"] == "Polygon" + assert geometry["coordinates"] == [ + [(0.0, 0.0, 0.0), (0.0, 1.0, 0.0), (1.0, 1.0, 0.0), (0.0, 0.0, 0.0)] + ] def test_tin_shp(path_test_tin_shp): """Convert TIN to MultiPolygon""" with fiona.open(path_test_tin_shp) as col: - assert col.schema['geometry'] == 'Unknown' + assert col.schema["geometry"] == "Unknown" features = list(col) assert len(features) == 1 - assert features[0]['geometry']['type'] == 'MultiPolygon' - assert features[0]['geometry']['coordinates'] == [[[(0.0, 0.0, 0.0), - (0.0, 0.0, 1.0), - (0.0, 1.0, 0.0), - (0.0, 0.0, 0.0)]], - [[(0.0, 0.0, 0.0), - (0.0, 1.0, 0.0), - (1.0, 1.0, 0.0), - (0.0, 0.0, 0.0)]]] + _test_tin(features[0]["geometry"]) -@requires_gdal22 def test_tin_csv(path_test_tin_csv): """Convert TIN to MultiPolygon and Triangle to Polygon""" with fiona.open(path_test_tin_csv) as col: - assert col.schema['geometry'] == 'Unknown' - features = list(col) - assert len(features) == 2 - assert features[0]['geometry']['type'] == 'MultiPolygon' - assert features[0]['geometry']['coordinates'] == [[[(0.0, 0.0, 0.0), - (0.0, 0.0, 1.0), - (0.0, 1.0, 0.0), - (0.0, 0.0, 0.0)]], - [[(0.0, 0.0, 0.0), - (0.0, 1.0, 0.0), - (1.0, 1.0, 0.0), - (0.0, 0.0, 0.0)]]] - - assert features[1]['geometry']['type'] == 'Polygon' - assert features[1]['geometry']['coordinates'] == [[(0.0, 0.0, 0.0), - (0.0, 1.0, 0.0), - (1.0, 1.0, 0.0), - (0.0, 0.0, 0.0)]] + assert col.schema["geometry"] == "Unknown" + + feature1 = next(col) + _test_tin(feature1["geometry"]) + + feature2 = next(col) + _test_triangle(feature2["geometry"]) + + feature3 = next(col) + assert feature3["geometry"]["type"] == "GeometryCollection" + assert len(feature3["geometry"]["geometries"]) == 2 + + _test_tin(feature3["geometry"]["geometries"][0]) + _test_triangle(feature3["geometry"]["geometries"][1]) diff -Nru fiona-1.8.22/tests/test_schema.py fiona-1.9.5/tests/test_schema.py --- fiona-1.8.22/tests/test_schema.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_schema.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,183 +1,204 @@ from collections import OrderedDict - -import fiona -from fiona.errors import SchemaError, UnsupportedGeometryTypeError, \ - DriverSupportError -from fiona.schema import FIELD_TYPES, normalize_field_type import os import tempfile -from .conftest import get_temp_filename + +import pytest + +import fiona from fiona.drvsupport import driver_mode_mingdal from fiona.env import GDALVersion -import pytest +from fiona.errors import SchemaError, UnsupportedGeometryTypeError, DriverSupportError +from fiona.model import Feature +from fiona.schema import FIELD_TYPES, normalize_field_type +from .conftest import get_temp_filename from .conftest import requires_only_gdal1, requires_gdal2 def test_schema_ordering_items(tmpdir): - name = str(tmpdir.join('test_scheme.shp')) - items = [('title', 'str:80'), ('date', 'date')] - with fiona.open(name, 'w', - driver="ESRI Shapefile", - schema={ - 'geometry': 'LineString', - 'properties': items}) as c: - assert list(c.schema['properties'].items()) == items + name = str(tmpdir.join("test_scheme.shp")) + items = [("title", "str:80"), ("date", "date")] + with fiona.open( + name, + "w", + driver="ESRI Shapefile", + schema={"geometry": "LineString", "properties": items}, + ) as c: + assert list(c.schema["properties"].items()) == items with fiona.open(name) as c: - assert list(c.schema['properties'].items()) == items + assert list(c.schema["properties"].items()) == items def test_shapefile_schema(tmpdir): - name = str(tmpdir.join('test_schema.shp')) - items = sorted({ - 'AWATER10': 'float', - 'CLASSFP10': 'str', - 'ZipCodeType': 'str', - 'EstimatedPopulation': 'float', - 'LocationType': 'str', - 'ALAND10': 'float', - 'TotalWages': 'float', - 'FUNCSTAT10': 'str', - 'Long': 'float', - 'City': 'str', - 'TaxReturnsFiled': 'float', - 'State': 'str', - 'Location': 'str', - 'GSrchCnt': 'float', - 'INTPTLAT10': 'str', - 'Lat': 'float', - 'MTFCC10': 'str', - 'Decommisioned': 'str', - 'GEOID10': 'str', - 'INTPTLON10': 'str'}.items()) - with fiona.open(name, 'w', - driver="ESRI Shapefile", - schema={'geometry': 'Polygon', 'properties': items}) as c: - assert list(c.schema['properties'].items()) == items + name = str(tmpdir.join("test_schema.shp")) + items = sorted( + { + "AWATER10": "float", + "CLASSFP10": "str", + "ZipCodeType": "str", + "EstimatedPopulation": "float", + "LocationType": "str", + "ALAND10": "float", + "TotalWages": "float", + "FUNCSTAT10": "str", + "Long": "float", + "City": "str", + "TaxReturnsFiled": "float", + "State": "str", + "Location": "str", + "GSrchCnt": "float", + "INTPTLAT10": "str", + "Lat": "float", + "MTFCC10": "str", + "Decommisioned": "str", + "GEOID10": "str", + "INTPTLON10": "str", + }.items() + ) + with fiona.open( + name, + "w", + driver="ESRI Shapefile", + schema={"geometry": "Polygon", "properties": items}, + ) as c: + assert list(c.schema["properties"].items()) == items c.write( - {'geometry': {'coordinates': [[(-117.882442, 33.783633), - (-117.882284, 33.783817), - (-117.863348, 33.760016), - (-117.863478, 33.760016), - (-117.863869, 33.760017), - (-117.864, 33.760017999999995), - (-117.864239, 33.760019), - (-117.876608, 33.755769), - (-117.882886, 33.783114), - (-117.882688, 33.783345), - (-117.882639, 33.783401999999995), - (-117.88259, 33.78346), - (-117.882442, 33.783633)]], - 'type': 'Polygon'}, - 'id': '1', - 'properties': { - 'ALAND10': 8819240.0, - 'AWATER10': 309767.0, - 'CLASSFP10': 'B5', - 'City': 'SANTA ANA', - 'Decommisioned': False, - 'EstimatedPopulation': 27773.0, - 'FUNCSTAT10': 'S', - 'GEOID10': '92706', - 'GSrchCnt': 0.0, - 'INTPTLAT10': '+33.7653010', - 'INTPTLON10': '-117.8819759', - 'Lat': 33.759999999999998, - 'Location': 'NA-US-CA-SANTA ANA', - 'LocationType': 'PRIMARY', - 'Long': -117.88, - 'MTFCC10': 'G6350', - 'State': 'CA', - 'TaxReturnsFiled': 14635.0, - 'TotalWages': 521280485.0, - 'ZipCodeType': 'STANDARD'}, - 'type': 'Feature'}) + Feature.from_dict( + **{ + "geometry": { + "coordinates": [ + [ + (-117.882442, 33.783633), + (-117.882284, 33.783817), + (-117.863348, 33.760016), + (-117.863478, 33.760016), + (-117.863869, 33.760017), + (-117.864, 33.760017999999995), + (-117.864239, 33.760019), + (-117.876608, 33.755769), + (-117.882886, 33.783114), + (-117.882688, 33.783345), + (-117.882639, 33.783401999999995), + (-117.88259, 33.78346), + (-117.882442, 33.783633), + ] + ], + "type": "Polygon", + }, + "id": "1", + "properties": { + "ALAND10": 8819240.0, + "AWATER10": 309767.0, + "CLASSFP10": "B5", + "City": "SANTA ANA", + "Decommisioned": False, + "EstimatedPopulation": 27773.0, + "FUNCSTAT10": "S", + "GEOID10": "92706", + "GSrchCnt": 0.0, + "INTPTLAT10": "+33.7653010", + "INTPTLON10": "-117.8819759", + "Lat": 33.759999999999998, + "Location": "NA-US-CA-SANTA ANA", + "LocationType": "PRIMARY", + "Long": -117.88, + "MTFCC10": "G6350", + "State": "CA", + "TaxReturnsFiled": 14635.0, + "TotalWages": 521280485.0, + "ZipCodeType": "STANDARD", + }, + "type": "Feature", + } + ) + ) assert len(c) == 1 with fiona.open(name) as c: - assert ( - list(c.schema['properties'].items()) == - sorted([('AWATER10', 'float:24.15'), - ('CLASSFP10', 'str:80'), - ('ZipCodeTyp', 'str:80'), - ('EstimatedP', 'float:24.15'), - ('LocationTy', 'str:80'), - ('ALAND10', 'float:24.15'), - ('INTPTLAT10', 'str:80'), - ('FUNCSTAT10', 'str:80'), - ('Long', 'float:24.15'), - ('City', 'str:80'), - ('TaxReturns', 'float:24.15'), - ('State', 'str:80'), - ('Location', 'str:80'), - ('GSrchCnt', 'float:24.15'), - ('TotalWages', 'float:24.15'), - ('Lat', 'float:24.15'), - ('MTFCC10', 'str:80'), - ('INTPTLON10', 'str:80'), - ('GEOID10', 'str:80'), - ('Decommisio', 'str:80')])) + assert list(c.schema["properties"].items()) == sorted( + [ + ("AWATER10", "float:24.15"), + ("CLASSFP10", "str:80"), + ("ZipCodeTyp", "str:80"), + ("EstimatedP", "float:24.15"), + ("LocationTy", "str:80"), + ("ALAND10", "float:24.15"), + ("INTPTLAT10", "str:80"), + ("FUNCSTAT10", "str:80"), + ("Long", "float:24.15"), + ("City", "str:80"), + ("TaxReturns", "float:24.15"), + ("State", "str:80"), + ("Location", "str:80"), + ("GSrchCnt", "float:24.15"), + ("TotalWages", "float:24.15"), + ("Lat", "float:24.15"), + ("MTFCC10", "str:80"), + ("INTPTLON10", "str:80"), + ("GEOID10", "str:80"), + ("Decommisio", "str:80"), + ] + ) f = next(iter(c)) - assert f['properties']['EstimatedP'] == 27773.0 + assert f.properties["EstimatedP"] == 27773.0 def test_field_truncation_issue177(tmpdir): - name = str(tmpdir.join('output.shp')) + name = str(tmpdir.join("output.shp")) kwargs = { - 'driver': 'ESRI Shapefile', - 'crs': 'EPSG:4326', - 'schema': { - 'geometry': 'Point', - 'properties': [('a_fieldname', 'float')]}} + "driver": "ESRI Shapefile", + "crs": "EPSG:4326", + "schema": {"geometry": "Point", "properties": [("a_fieldname", "float")]}, + } - with fiona.open(name, 'w', **kwargs) as dst: + with fiona.open(name, "w", **kwargs) as dst: rec = {} - rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)} - rec['properties'] = {'a_fieldname': 3.0} - dst.write(rec) + rec["geometry"] = {"type": "Point", "coordinates": (0, 0)} + rec["properties"] = {"a_fieldname": 3.0} + dst.write(Feature.from_dict(**rec)) with fiona.open(name) as src: first = next(iter(src)) - assert first['geometry'] == {'type': 'Point', 'coordinates': (0, 0)} - assert first['properties']['a_fieldnam'] == 3.0 + assert first.geometry.type == "Point" + assert first.geometry.coordinates == (0, 0) + assert first.properties["a_fieldnam"] == 3.0 def test_unsupported_geometry_type(): tmpdir = tempfile.mkdtemp() - tmpfile = os.path.join(tmpdir, 'test-test-geom.shp') + tmpfile = os.path.join(tmpdir, "test-test-geom.shp") profile = { - 'driver': 'ESRI Shapefile', - 'schema': { - 'geometry': 'BOGUS', - 'properties': {}}} + "driver": "ESRI Shapefile", + "schema": {"geometry": "BOGUS", "properties": {}}, + } with pytest.raises(UnsupportedGeometryTypeError): - fiona.open(tmpfile, 'w', **profile) + fiona.open(tmpfile, "w", **profile) -@pytest.mark.parametrize('x', list(range(1, 10))) +@pytest.mark.parametrize("x", list(range(1, 10))) def test_normalize_int32(x): - assert normalize_field_type('int:{}'.format(x)) == 'int32' + assert normalize_field_type(f"int:{x}") == "int32" @requires_gdal2 -@pytest.mark.parametrize('x', list(range(10, 20))) +@pytest.mark.parametrize("x", list(range(10, 20))) def test_normalize_int64(x): - assert normalize_field_type('int:{}'.format(x)) == 'int64' + assert normalize_field_type(f"int:{x}") == "int64" -@pytest.mark.parametrize('x', list(range(0, 20))) +@pytest.mark.parametrize("x", list(range(0, 20))) def test_normalize_str(x): - assert normalize_field_type('str:{}'.format(x)) == 'str' + assert normalize_field_type(f"str:{x}") == "str" def test_normalize_bool(): - assert normalize_field_type('bool') == 'bool' + assert normalize_field_type("bool") == "bool" def test_normalize_float(): - assert normalize_field_type('float:25.8') == 'float' + assert normalize_field_type("float:25.8") == "float" def generate_field_types(): @@ -191,76 +212,83 @@ return list(sorted(types)) + [None] -@pytest.mark.parametrize('x', generate_field_types()) +@pytest.mark.parametrize("x", generate_field_types()) def test_normalize_std(x): assert normalize_field_type(x) == x def test_normalize_error(): with pytest.raises(SchemaError): - assert normalize_field_type('thingy') + assert normalize_field_type("thingy") @requires_only_gdal1 -@pytest.mark.parametrize('field_type', ['time', 'datetime']) +@pytest.mark.parametrize("field_type", ["time", "datetime"]) def test_check_schema_driver_support_shp(tmpdir, field_type): with pytest.raises(DriverSupportError): - name = str(tmpdir.join('test_scheme.shp')) - items = [('field1', field_type)] - with fiona.open(name, 'w', - driver="ESRI Shapefile", - schema={ - 'geometry': 'LineString', - 'properties': items}) as c: - pass + name = str(tmpdir.join("test_scheme.shp")) + items = [("field1", field_type)] + with fiona.open( + name, + "w", + driver="ESRI Shapefile", + schema={"geometry": "LineString", "properties": items}, + ) as c: + pass @requires_only_gdal1 def test_check_schema_driver_support_gpkg(tmpdir): with pytest.raises(DriverSupportError): - name = str(tmpdir.join('test_scheme.gpkg')) - items = [('field1', 'time')] - with fiona.open(name, 'w', - driver="GPKG", - schema={ - 'geometry': 'LineString', - 'properties': items}) as c: + name = str(tmpdir.join("test_scheme.gpkg")) + items = [("field1", "time")] + with fiona.open( + name, + "w", + driver="GPKG", + schema={"geometry": "LineString", "properties": items}, + ) as c: pass -@pytest.mark.parametrize('driver', ['GPKG', 'GeoJSON']) +@pytest.mark.parametrize("driver", ["GPKG", "GeoJSON"]) def test_geometry_only_schema_write(tmpdir, driver): schema = { "geometry": "Polygon", # No properties defined here. } - record = {'geometry': {'type': 'Polygon', 'coordinates': [[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]}} + record = Feature.from_dict( + **{ + "geometry": { + "type": "Polygon", + "coordinates": [ + [(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)] + ], + } + } + ) path = str(tmpdir.join(get_temp_filename(driver))) - with fiona.open(path, - mode='w', - driver=driver, - schema=schema) as c: + with fiona.open(path, mode="w", driver=driver, schema=schema) as c: c.write(record) - with fiona.open(path, - mode='r', - driver=driver) as c: + with fiona.open(path, mode="r", driver=driver) as c: data = [f for f in c] assert len(data) == 1 - assert len(data[0].get('properties', {})) == 0 - assert data[0]['geometry'] == record['geometry'] + assert len(data[0].properties) == 0 + assert data[0].geometry.type == record.geometry["type"] -@pytest.mark.parametrize('driver', ['GPKG', 'GeoJSON']) +@pytest.mark.parametrize("driver", ["GPKG", "GeoJSON"]) def test_geometry_only_schema_update(tmpdir, driver): # Guard unsupported drivers - if driver in driver_mode_mingdal['a'] and GDALVersion.runtime() < GDALVersion( - *driver_mode_mingdal['a'][driver][:2]): + if driver in driver_mode_mingdal["a"] and GDALVersion.runtime() < GDALVersion( + *driver_mode_mingdal["a"][driver][:2] + ): return schema = { @@ -268,119 +296,149 @@ # No properties defined here. } - record1 = { - 'geometry': {'type': 'Polygon', 'coordinates': [[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)]]}} - record2 = { - 'geometry': {'type': 'Polygon', 'coordinates': [[(0.0, 0.0), (2.0, 0.0), (2.0, 2.0), (2.0, 0.0), (0.0, 0.0)]]}} + record1 = Feature.from_dict( + **{ + "geometry": { + "type": "Polygon", + "coordinates": [ + [(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)] + ], + } + } + ) + record2 = Feature.from_dict( + **{ + "geometry": { + "type": "Polygon", + "coordinates": [ + [(0.0, 0.0), (2.0, 0.0), (2.0, 2.0), (2.0, 0.0), (0.0, 0.0)] + ], + } + } + ) path = str(tmpdir.join(get_temp_filename(driver))) # Create file - with fiona.open(path, - mode='w', - driver=driver, - schema=schema) as c: + with fiona.open(path, mode="w", driver=driver, schema=schema) as c: c.write(record1) # Append record - with fiona.open(path, - mode='a', - driver=driver) as c: + with fiona.open(path, mode="a", driver=driver) as c: c.write(record2) - with fiona.open(path, - mode='r', - driver=driver) as c: + with fiona.open(path, mode="r", driver=driver) as c: data = [f for f in c] assert len(data) == 2 for f in data: - assert len(f.get('properties', {})) == 0 - assert data[0]['geometry'] == record1['geometry'] - assert data[1]['geometry'] == record2['geometry'] + assert len(f.properties) == 0 + + assert data[0].geometry.type == record1.geometry["type"] + assert data[1].geometry.type == record2.geometry["type"] -@pytest.mark.parametrize('driver', ['GPKG', 'GeoJSON']) +@pytest.mark.parametrize("driver", ["GPKG", "GeoJSON"]) def test_property_only_schema_write(tmpdir, driver): schema = { # No geometry defined here. - "properties": {'prop1': 'str'} + "properties": {"prop1": "str"} } - record1 = {'properties': {'prop1': 'one'}} + record1 = Feature.from_dict(**{"properties": {"prop1": "one"}}) path = str(tmpdir.join(get_temp_filename(driver))) - with fiona.open(path, - mode='w', - driver=driver, - schema=schema) as c: + with fiona.open(path, mode="w", driver=driver, schema=schema) as c: c.write(record1) - with fiona.open(path, - mode='r', - driver=driver) as c: + with fiona.open(path, mode="r", driver=driver) as c: data = [f for f in c] assert len(data) == 1 - assert len(data[0].get('properties', {})) == 1 - assert 'prop1' in data[0]['properties'] and data[0]['properties']['prop1'] == 'one' + assert len(data[0].properties) == 1 + assert "prop1" in data[0].properties and data[0].properties["prop1"] == "one" for f in data: - assert 'geometry' not in f or f['geometry'] is None + assert f.geometry is None -@pytest.mark.parametrize('driver', ['GPKG', 'GeoJSON']) +@pytest.mark.parametrize("driver", ["GPKG", "GeoJSON"]) def test_property_only_schema_update(tmpdir, driver): # Guard unsupported drivers - if driver in driver_mode_mingdal['a'] and GDALVersion.runtime() < GDALVersion( - *driver_mode_mingdal['a'][driver][:2]): + if driver in driver_mode_mingdal["a"] and GDALVersion.runtime() < GDALVersion( + *driver_mode_mingdal["a"][driver][:2] + ): return schema = { # No geometry defined here. - "properties": {'prop1': 'str'} + "properties": {"prop1": "str"} } - record1 = {'properties': {'prop1': 'one'}} - record2 = {'properties': {'prop1': 'two'}} + record1 = Feature.from_dict(**{"properties": {"prop1": "one"}}) + record2 = Feature.from_dict(**{"properties": {"prop1": "two"}}) path = str(tmpdir.join(get_temp_filename(driver))) # Create file - with fiona.open(path, - mode='w', - driver=driver, - schema=schema) as c: + with fiona.open(path, mode="w", driver=driver, schema=schema) as c: c.write(record1) # Append record - with fiona.open(path, - mode='a', - driver=driver) as c: + with fiona.open(path, mode="a", driver=driver) as c: c.write(record2) - with fiona.open(path, - mode='r', - driver=driver) as c: + with fiona.open(path, mode="r", driver=driver) as c: data = [f for f in c] assert len(data) == 2 for f in data: - assert len(f.get('properties', {})) == 1 - assert 'geometry' not in f or f['geometry'] is None - assert 'prop1' in data[0]['properties'] and data[0]['properties']['prop1'] == 'one' - assert 'prop1' in data[1]['properties'] and data[1]['properties']['prop1'] == 'two' + assert len(f.properties) == 1 + assert f.geometry is None + assert "prop1" in data[0].properties and data[0].properties["prop1"] == "one" + assert "prop1" in data[1].properties and data[1].properties["prop1"] == "two" def test_schema_default_fields_wrong_type(tmpdir): - """ Test for SchemaError if a default field is specified with a different type""" + """Test for SchemaError if a default field is specified with a different type""" - name = str(tmpdir.join('test.gpx')) - schema = {'properties': OrderedDict([('ele', 'str'), ('time', 'datetime')]), - 'geometry': 'Point'} + name = str(tmpdir.join("test.gpx")) + schema = { + "properties": OrderedDict([("ele", "str"), ("time", "datetime")]), + "geometry": "Point", + } with pytest.raises(SchemaError): - with fiona.open(name, 'w', - driver="GPX", - schema=schema) as c: + with fiona.open(name, "w", driver="GPX", schema=schema) as c: pass + + +def test_schema_string_list(tmp_path): + output_file = tmp_path / "fio_test.geojson" + schema = { + "properties": { + "time_range": "str", + }, + "geometry": "Point", +} + with fiona.open( + output_file, "w", driver="GeoJSON", schema=schema, crs="EPSG:4326" + ) as fds: + fds.writerecords( + [ + { + "id": 1, + "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, + "properties": { + "time_range": '["2020-01-01", "2020-01-02"]', + }, + }, + ] + ) + + with fiona.open(output_file) as fds: + assert fds.schema["properties"] == {"time_range": "List[str]"} + layers = list(fds) + assert layers[0]["properties"] == { + "time_range": ["2020-01-01", "2020-01-02"] + } diff -Nru fiona-1.8.22/tests/test_schema_geom.py fiona-1.9.5/tests/test_schema_geom.py --- fiona-1.8.22/tests/test_schema_geom.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_schema_geom.py 2023-10-11 23:19:44.000000000 +0000 @@ -6,60 +6,85 @@ import pytest from fiona.errors import GeometryTypeValidationError, UnsupportedGeometryTypeError +from fiona.model import Feature + @pytest.fixture def filename_shp(tmpdir): return str(tmpdir.join("example.shp")) + @pytest.fixture def filename_json(tmpdir): return str(tmpdir.join("example.json")) + properties = {"name": "str"} PROPERTIES = {"name": "example"} POINT = {"type": "Point", "coordinates": (1.0, 2.0)} LINESTRING = {"type": "LineString", "coordinates": [(1.0, 2.0), (3.0, 4.0)]} POLYGON = {"type": "Polygon", "coordinates": [[(0.0, 0.0), (1.0, 1.0), (0.0, 0.1)]]} -MULTILINESTRING = {"type": "MultiLineString", "coordinates": [[(0.0, 0.0), (1.0, 1.0)], [(1.0, 2.0), (3.0, 4.0)]]} -GEOMETRYCOLLECTION = {"type": "GeometryCollection", "geometries": [POINT, LINESTRING, POLYGON]} +MULTILINESTRING = { + "type": "MultiLineString", + "coordinates": [[(0.0, 0.0), (1.0, 1.0)], [(1.0, 2.0), (3.0, 4.0)]], +} +GEOMETRYCOLLECTION = { + "type": "GeometryCollection", + "geometries": [POINT, LINESTRING, POLYGON], +} INVALID = {"type": "InvalidType", "coordinates": (42.0, 43.0)} POINT_3D = {"type": "Point", "coordinates": (1.0, 2.0, 3.0)} + def write_point(collection): - feature = {"geometry": POINT, "properties": PROPERTIES} + feature = Feature.from_dict(**{"geometry": POINT, "properties": PROPERTIES}) collection.write(feature) + def write_linestring(collection): - feature = {"geometry": LINESTRING, "properties": PROPERTIES} + feature = Feature.from_dict(**{"geometry": LINESTRING, "properties": PROPERTIES}) collection.write(feature) + def write_polygon(collection): - feature = {"geometry": POLYGON, "properties": PROPERTIES} + feature = Feature.from_dict(**{"geometry": POLYGON, "properties": PROPERTIES}) collection.write(feature) + def write_invalid(collection): - feature = {"geometry": INVALID, "properties": PROPERTIES} + feature = Feature.from_dict(**{"geometry": INVALID, "properties": PROPERTIES}) collection.write(feature) + def write_multilinestring(collection): - feature = {"geometry": MULTILINESTRING, "properties": PROPERTIES} + feature = Feature.from_dict( + **{"geometry": MULTILINESTRING, "properties": PROPERTIES} + ) collection.write(feature) + def write_point_3d(collection): - feature = {"geometry": POINT_3D, "properties": PROPERTIES} + feature = Feature.from_dict(**{"geometry": POINT_3D, "properties": PROPERTIES}) collection.write(feature) + def write_geometrycollection(collection): - feature = {"geometry": GEOMETRYCOLLECTION, "properties": PROPERTIES} + feature = Feature.from_dict( + **{"geometry": GEOMETRYCOLLECTION, "properties": PROPERTIES} + ) collection.write(feature) + def write_null(collection): - feature = {"geometry": None, "properties": PROPERTIES} + feature = Feature.from_dict(**{"geometry": None, "properties": PROPERTIES}) collection.write(feature) + def test_point(filename_shp): schema = {"geometry": "Point", "properties": properties} - with fiona.open(filename_shp, "w", driver="ESRI Shapefile", schema=schema) as collection: + with fiona.open( + filename_shp, "w", driver="ESRI Shapefile", schema=schema + ) as collection: write_point(collection) write_point_3d(collection) write_null(collection) @@ -70,6 +95,7 @@ with pytest.raises(GeometryTypeValidationError): write_invalid(collection) + def test_multi_type(filename_json): schema = {"geometry": ("Point", "LineString"), "properties": properties} with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: @@ -83,6 +109,7 @@ with pytest.raises(GeometryTypeValidationError): write_invalid(collection) + def test_unknown(filename_json): """Reading and writing layers with "Unknown" (i.e. any) geometry type""" # write a layer with a mixture of geometry types @@ -104,6 +131,7 @@ with fiona.open(filename_dst, "w", **src.meta) as dst: dst.writerecords(src) + def test_any(filename_json): schema = {"geometry": "Any", "properties": properties} with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: @@ -116,48 +144,62 @@ with pytest.raises(GeometryTypeValidationError): write_invalid(collection) + def test_broken(filename_json): schema = {"geometry": "NOT_VALID", "properties": properties} with pytest.raises(UnsupportedGeometryTypeError): with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema): pass + def test_broken_list(filename_json): - schema = {"geometry": ("Point", "LineString", "NOT_VALID"), "properties": properties} + schema = { + "geometry": ("Point", "LineString", "NOT_VALID"), + "properties": properties, + } with pytest.raises(UnsupportedGeometryTypeError): collection = fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) + def test_invalid_schema(filename_shp): """Features match schema but geometries not supported by driver""" schema = {"geometry": ("Point", "LineString"), "properties": properties} - with fiona.open(filename_shp, "w", driver="ESRI Shapefile", schema=schema) as collection: + with fiona.open( + filename_shp, "w", driver="ESRI Shapefile", schema=schema + ) as collection: write_linestring(collection) with pytest.raises(RuntimeError): # ESRI Shapefile can only store a single geometry type write_point(collection) + def test_esri_multi_geom(filename_shp): """ESRI Shapefile doesn't differentiate between LineString/MultiLineString""" schema = {"geometry": "LineString", "properties": properties} - with fiona.open(filename_shp, "w", driver="ESRI Shapefile", schema=schema) as collection: + with fiona.open( + filename_shp, "w", driver="ESRI Shapefile", schema=schema + ) as collection: write_linestring(collection) write_multilinestring(collection) with pytest.raises(GeometryTypeValidationError): write_point(collection) + def test_3d_schema_ignored(filename_json): schema = {"geometry": "3D Point", "properties": properties} with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: write_point(collection) write_point_3d(collection) + def test_geometrycollection_schema(filename_json): schema = {"geometry": "GeometryCollection", "properties": properties} with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: write_geometrycollection(collection) + def test_none_schema(filename_json): schema = {"geometry": None, "properties": properties} with fiona.open(filename_json, "w", driver="GeoJSON", schema=schema) as collection: diff -Nru fiona-1.8.22/tests/test_session.py fiona-1.9.5/tests/test_session.py --- fiona-1.8.22/tests/test_session.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_session.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,9 +1,78 @@ """Tests of the ogrext.Session class""" +import pytest import fiona +from fiona.errors import GDALVersionError, UnsupportedOperation +from .conftest import gdal_version def test_get(path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp) as col: feat3 = col.get(2) assert feat3['properties']['NAME'] == 'Mount Zirkel Wilderness' + + +@pytest.mark.parametrize("layer, namespace, tags", [ + (None, None, {"test_tag1": "test_value1", "test_tag2": "test_value2"}), + (None, "test", {"test_tag1": "test_value1", "test_tag2": "test_value2"}), + (None, None, {}), + (None, "test", {}), + ("layer", None, {"test_tag1": "test_value1", "test_tag2": "test_value2"}), + ("layer", "test", {"test_tag1": "test_value1", "test_tag2": "test_value2"}), + ("layer", None, {}), + ("layer", "test", {}), +]) +@pytest.mark.skipif(gdal_version.major < 2, reason="Broken on GDAL 1.x") +def test_update_tags(layer, namespace, tags, tmpdir): + test_geopackage = str(tmpdir.join("test.gpkg")) + schema = {'properties': {'CDATA1': 'str:254'}, 'geometry': 'Polygon'} + with fiona.Env(), fiona.open( + test_geopackage, "w", driver="GPKG", schema=schema, layer=layer) as gpkg: + assert gpkg.tags() == {} + gpkg.update_tags(tags, ns=namespace) + + with fiona.Env(), fiona.open(test_geopackage, layer=layer) as gpkg: + assert gpkg.tags(ns=namespace) == tags + if namespace is not None: + assert gpkg.tags() == {} + with pytest.raises(UnsupportedOperation): + gpkg.update_tags({}, ns=namespace) + + +@pytest.mark.parametrize("layer, namespace", [ + (None, None), + (None, "test"), + ("test", None), + ("test", "test"), +]) +@pytest.mark.skipif(gdal_version.major < 2, reason="Broken on GDAL 1.x") +def test_update_tag_item(layer, namespace, tmpdir): + test_geopackage = str(tmpdir.join("test.gpkg")) + schema = {'properties': {'CDATA1': 'str:254'}, 'geometry': 'Polygon'} + with fiona.Env(), fiona.open( + test_geopackage, "w", driver="GPKG", schema=schema, layer=layer) as gpkg: + assert gpkg.get_tag_item("test_tag1", ns=namespace) is None + gpkg.update_tag_item("test_tag1", "test_value1", ns=namespace) + + with fiona.Env(), fiona.open(test_geopackage, layer=layer) as gpkg: + if namespace is not None: + assert gpkg.get_tag_item("test_tag1") is None + assert gpkg.get_tag_item("test_tag1", ns=namespace) == "test_value1" + with pytest.raises(UnsupportedOperation): + gpkg.update_tag_item("test_tag1", "test_value1", ns=namespace) + + +@pytest.mark.skipif(gdal_version.major >= 2, reason="Only raises on GDAL 1.x") +def test_gdal_version_error(tmpdir): + test_geopackage = str(tmpdir.join("test.gpkg")) + schema = {'properties': {'CDATA1': 'str:254'}, 'geometry': 'Polygon'} + with fiona.Env(), fiona.open( + test_geopackage, "w", driver="GPKG", schema=schema, layer="layer") as gpkg: + with pytest.raises(GDALVersionError): + gpkg.update_tags({"test_tag1": "test_value1"}, ns="test") + with pytest.raises(GDALVersionError): + gpkg.update_tag_item("test_tag1", "test_value1", ns="test") + with pytest.raises(GDALVersionError): + gpkg.tags() + with pytest.raises(GDALVersionError): + gpkg.get_tag_item("test_tag1") diff -Nru fiona-1.8.22/tests/test_slice.py fiona-1.9.5/tests/test_slice.py --- fiona-1.8.22/tests/test_slice.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_slice.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,15 +1,18 @@ -"""Note well: collection slicing is deprecated! -""" +"""Note well: collection slicing is deprecated!""" + import tempfile import shutil import os from collections import OrderedDict import pytest + from fiona.env import GDALVersion import fiona +from fiona.drvsupport import supported_drivers, _driver_supports_mode from fiona.errors import FionaDeprecationWarning +from fiona.model import Feature + from .conftest import get_temp_filename -from fiona.drvsupport import supported_drivers, _driver_supports_mode gdal_version = GDALVersion.runtime() @@ -17,7 +20,7 @@ def test_collection_get(path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp) as src: result = src[5] - assert result['id'] == '5' + assert result.id == "5" def test_collection_slice(path_coutwildrnp_shp): @@ -25,7 +28,7 @@ results = src[:5] assert isinstance(results, list) assert len(results) == 5 - assert results[4]['id'] == '4' + assert results[4].id == "4" def test_collection_iterator_slice(path_coutwildrnp_shp): @@ -34,105 +37,139 @@ assert len(results) == 5 k, v = results[4] assert k == 4 - assert v['id'] == '4' + assert v.id == "4" def test_collection_iterator_next(path_coutwildrnp_shp): with fiona.open(path_coutwildrnp_shp) as src: k, v = next(src.items(5, None)) assert k == 5 - assert v['id'] == '5' + assert v.id == "5" -@pytest.fixture(scope="module", params=[driver for driver in supported_drivers if - _driver_supports_mode(driver, 'w') - and driver not in {'DGN', 'MapInfo File', 'GPSTrackMaker', 'GPX', 'BNA', 'DXF'}]) +@pytest.fixture( + scope="module", + params=[ + driver + for driver in supported_drivers + if _driver_supports_mode(driver, "w") + and driver not in {"DGN", "MapInfo File", "GPSTrackMaker", "GPX", "BNA", "DXF"} + ], +) def slice_dataset_path(request): - """ Create temporary datasets for test_collection_iterator_items_slice()""" + """Create temporary datasets for test_collection_iterator_items_slice()""" driver = request.param min_id = 0 max_id = 9 def get_schema(driver): - special_schemas = {'CSV': {'geometry': None, 'properties': OrderedDict([('position', 'int')])}} - return special_schemas.get(driver, {'geometry': 'Point', 'properties': OrderedDict([('position', 'int')])}) + special_schemas = { + "CSV": {"geometry": None, "properties": OrderedDict([("position", "int")])} + } + return special_schemas.get( + driver, + {"geometry": "Point", "properties": OrderedDict([("position", "int")])}, + ) def get_records(driver, range): - special_records1 = {'CSV': [{'geometry': None, 'properties': {'position': i}} for i in range], - 'PCIDSK': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i), 0.0)}, - 'properties': {'position': i}} for i in range] - } - return special_records1.get(driver, [ - {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {'position': i}} for i in - range]) + special_records1 = { + "CSV": [ + Feature.from_dict(**{"geometry": None, "properties": {"position": i}}) + for i in range + ], + "PCIDSK": [ + Feature.from_dict( + **{ + "geometry": { + "type": "Point", + "coordinates": (0.0, float(i), 0.0), + }, + "properties": {"position": i}, + } + ) + for i in range + ], + } + return special_records1.get( + driver, + [ + Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0.0, float(i))}, + "properties": {"position": i}, + } + ) + for i in range + ], + ) schema = get_schema(driver) records = get_records(driver, range(min_id, max_id + 1)) create_kwargs = {} - if driver == 'FlatGeobuf': - create_kwargs['SPATIAL_INDEX'] = False + if driver == "FlatGeobuf": + create_kwargs["SPATIAL_INDEX"] = False tmpdir = tempfile.mkdtemp() path = os.path.join(tmpdir, get_temp_filename(driver)) - with fiona.open(path, 'w', - driver=driver, - schema=schema, - **create_kwargs) as c: + with fiona.open(path, "w", driver=driver, schema=schema, **create_kwargs) as c: c.writerecords(records) yield path shutil.rmtree(tmpdir) -@pytest.mark.parametrize("args", [(0, 5, None), - (1, 5, None), - (-5, None, None), - (-5, -1, None), - (0, None, None), - (5, None, None), - (8, None, None), - (9, None, None), - (10, None, None), - (0, 5, 2), - (0, 5, 2), - (1, 5, 2), - (-5, None, 2), - (-5, -1, 2), - (0, None, 2), - (0, 8, 2), - (0, 9, 2), - (0, 10, 2), - (1, 8, 2), - (1, 9, 2), - (1, 10, 2), - (1, None, 2), - (5, None, 2), - (5, None, -1), - (5, None, -2), - (5, None, None), - (4, None, -2), - (-1, -5, -1), - (-5, None, -1), - (0, 5, 1), - (5, 15, 1), - (15, 30, 1), - (5, 0, -1), - (15, 5, -1), - (30, 15, -1), - (0, 5, 2), - (5, 15, 2), - (15, 30, 2), - (5, 0, -2), - (15, 5, -2), - (30, 15, -2) - ]) -@pytest.mark.filterwarnings('ignore:.*OLC_FASTFEATURECOUNT*') -@pytest.mark.filterwarnings('ignore:.*OLCFastSetNextByIndex*') +@pytest.mark.parametrize( + "args", + [ + (0, 5, None), + (1, 5, None), + (-5, None, None), + (-5, -1, None), + (0, None, None), + (5, None, None), + (8, None, None), + (9, None, None), + (10, None, None), + (0, 5, 2), + (0, 5, 2), + (1, 5, 2), + (-5, None, 2), + (-5, -1, 2), + (0, None, 2), + (0, 8, 2), + (0, 9, 2), + (0, 10, 2), + (1, 8, 2), + (1, 9, 2), + (1, 10, 2), + (1, None, 2), + (5, None, 2), + (5, None, -1), + (5, None, -2), + (5, None, None), + (4, None, -2), + (-1, -5, -1), + (-5, None, -1), + (0, 5, 1), + (5, 15, 1), + (15, 30, 1), + (5, 0, -1), + (15, 5, -1), + (30, 15, -1), + (0, 5, 2), + (5, 15, 2), + (15, 30, 2), + (5, 0, -2), + (15, 5, -2), + (30, 15, -2), + ], +) +@pytest.mark.filterwarnings("ignore:.*OLC_FASTFEATURECOUNT*") +@pytest.mark.filterwarnings("ignore:.*OLCFastSetNextByIndex*") def test_collection_iterator_items_slice(slice_dataset_path, args): - """ Test if c.items(start, stop, step) returns the correct features. - """ + """Test if c.items(start, stop, step) returns the correct features.""" start, stop, step = args min_id = 0 @@ -140,10 +177,10 @@ positions = list(range(min_id, max_id + 1))[start:stop:step] - with fiona.open(slice_dataset_path, 'r') as c: + with fiona.open(slice_dataset_path, "r") as c: items = list(c.items(start, stop, step)) assert len(items) == len(positions) - record_positions = [int(item[1]['properties']['position']) for item in items] + record_positions = [int(item[1]["properties"]["position"]) for item in items] for expected_position, record_position in zip(positions, record_positions): assert expected_position == record_position diff -Nru fiona-1.8.22/tests/test_subtypes.py fiona-1.9.5/tests/test_subtypes.py --- fiona-1.8.22/tests/test_subtypes.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_subtypes.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,49 +1,53 @@ import fiona -import six +from fiona.model import Feature + def test_read_bool_subtype(tmpdir): test_data = """{"type": "FeatureCollection", "features": [{"type": "Feature", "properties": {"bool": true, "not_bool": 1, "float": 42.5}, "geometry": null}]}""" path = tmpdir.join("test_read_bool_subtype.geojson") with open(str(path), "w") as f: f.write(test_data) - + with fiona.open(str(path), "r") as src: feature = next(iter(src)) - + if fiona.gdal_version.major >= 2: assert type(feature["properties"]["bool"]) is bool else: assert type(feature["properties"]["bool"]) is int - assert isinstance(feature["properties"]["not_bool"], six.integer_types) + assert isinstance(feature["properties"]["not_bool"], int) assert type(feature["properties"]["float"]) is float + def test_write_bool_subtype(tmpdir): path = tmpdir.join("test_write_bool_subtype.geojson") - + schema = { "geometry": "Point", "properties": { "bool": "bool", "not_bool": "int", "float": "float", - } + }, } - - feature = { - "geometry": None, - "properties": { - "bool": True, - "not_bool": 1, - "float": 42.5, + + feature = Feature.from_dict( + **{ + "geometry": None, + "properties": { + "bool": True, + "not_bool": 1, + "float": 42.5, + }, } - } + ) with fiona.open(str(path), "w", driver="GeoJSON", schema=schema) as dst: dst.write(feature) - - with open(str(path), "r") as f: + + with open(str(path)) as f: data = f.read() - + if fiona.gdal_version.major >= 2: assert """"bool": true""" in data else: diff -Nru fiona-1.8.22/tests/test_topojson.py fiona-1.9.5/tests/test_topojson.py --- fiona-1.8.22/tests/test_topojson.py 1970-01-01 00:00:00.000000000 +0000 +++ fiona-1.9.5/tests/test_topojson.py 2023-10-11 23:19:44.000000000 +0000 @@ -0,0 +1,37 @@ +""" +Support for TopoJSON was added in OGR 1.11 to the `GeoJSON` driver. +Starting at GDAL 2.3 support was moved to the `TopoJSON` driver. +""" + +import os +import pytest + +import fiona +from fiona.env import GDALVersion +from fiona.model import Properties + + +gdal_version = GDALVersion.runtime() + +driver = "TopoJSON" if gdal_version.at_least((2, 3)) else "GeoJSON" +has_driver = driver in fiona.drvsupport.supported_drivers.keys() + + +@pytest.mark.skipif(not gdal_version.at_least((1, 11)), reason="Requires GDAL >= 1.11") +@pytest.mark.skipif(not has_driver, reason=f"Requires {driver} driver") +def test_read_topojson(data_dir): + """Test reading a TopoJSON file + + The TopoJSON support in GDAL is a little unpredictable. In some versions + the geometries or properties aren't parsed correctly. Here we just check + that we can open the file, get the right number of features out, and + that they have a geometry and some properties. See GH#722. + """ + with fiona.open(os.path.join(data_dir, "example.topojson"), "r") as collection: + features = list(collection) + + assert len(features) == 3, "unexpected number of features" + for feature in features: + assert isinstance(feature["properties"], Properties) + assert len(feature["properties"]) > 0 + assert feature["geometry"]["type"] in {"Point", "LineString", "Polygon"} diff -Nru fiona-1.8.22/tests/test_transactions.py fiona-1.9.5/tests/test_transactions.py --- fiona-1.8.22/tests/test_transactions.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_transactions.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,10 +1,13 @@ +from collections import defaultdict +import logging import os +import pytest +from random import uniform, randint + import fiona +from fiona.model import Feature import fiona.ogrext -import logging -from random import uniform, randint -from collections import defaultdict -import pytest + from tests.conftest import requires_gdal2 has_gpkg = "GPKG" in fiona.supported_drivers.keys() @@ -13,10 +16,13 @@ def create_records(count): for n in range(count): record = { - "geometry": {"type": "Point", "coordinates": [uniform(-180, 180), uniform(-90, 90)]}, - "properties": {"value": randint(0, 1000)} + "geometry": { + "type": "Point", + "coordinates": [uniform(-180, 180), uniform(-90, 90)], + }, + "properties": {"value": randint(0, 1000)}, } - yield record + yield Feature.from_dict(**record) class DebugHandler(logging.Handler): @@ -50,7 +56,7 @@ Test transaction start/commit is called the expected number of times, and that the default transaction size can be overloaded. The test uses a custom logging handler to listen for the debug messages produced - when the transaction is started/comitted. + when the transaction is started/committed. """ num_records = 250 transaction_size = 100 @@ -61,18 +67,21 @@ path = str(tmpdir.join("output.gpkg")) - schema = { - "geometry": "Point", - "properties": {"value": "int"} - } + schema = {"geometry": "Point", "properties": {"value": "int"}} with fiona.open(path, "w", driver="GPKG", schema=schema) as dst: dst.writerecords(create_records(num_records)) assert self.handler.history["Starting transaction (initial)"] == 1 - assert self.handler.history["Starting transaction (intermediate)"] == num_records // transaction_size - assert self.handler.history["Comitting transaction (intermediate)"] == num_records // transaction_size - assert self.handler.history["Comitting transaction (final)"] == 1 + assert ( + self.handler.history["Starting transaction (intermediate)"] + == num_records // transaction_size + ) + assert ( + self.handler.history["Committing transaction (intermediate)"] + == num_records // transaction_size + ) + assert self.handler.history["Committing transaction (final)"] == 1 with fiona.open(path, "r") as src: assert len(src) == num_records diff -Nru fiona-1.8.22/tests/test_transform.py fiona-1.9.5/tests/test_transform.py --- fiona-1.8.22/tests/test_transform.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_transform.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,84 +1,179 @@ """Tests of the transform submodule""" import math + import pytest + +import fiona from fiona import transform +from fiona.errors import FionaDeprecationWarning, TransformError +from fiona.model import Geometry +from .conftest import requires_gdal_lt_3 -@pytest.mark.parametrize( - "geom", - [ - {"type": "Point", "coordinates": [0.0, 0.0, 1000.0]}, - { - "type": "LineString", - "coordinates": [[0.0, 0.0, 1000.0], [0.1, 0.1, -1000.0]], - }, - { - "type": "MultiPoint", - "coordinates": [[0.0, 0.0, 1000.0], [0.1, 0.1, -1000.0]], - }, - { - "type": "Polygon", - "coordinates": [ + +TEST_GEOMS = [ + Geometry(type="Point", coordinates=[0.0, 0.0, 1000.0]), + Geometry(type="LineString", coordinates=[[0.0, 0.0, 1000.0], [0.1, 0.1, -1000.0]]), + Geometry(type="MultiPoint", coordinates=[[0.0, 0.0, 1000.0], [0.1, 0.1, -1000.0]]), + Geometry( + type="Polygon", + coordinates=[ + [ + [0.0, 0.0, 1000.0], + [0.1, 0.1, -1000.0], + [0.1, -0.1, math.pi], + [0.0, 0.0, 1000.0], + ] + ], + ), + Geometry( + type="MultiPolygon", + coordinates=[ + [ [ [0.0, 0.0, 1000.0], [0.1, 0.1, -1000.0], [0.1, -0.1, math.pi], [0.0, 0.0, 1000.0], ] - ], - }, - { - "type": "MultiPolygon", - "coordinates": [ - [ - [ - [0.0, 0.0, 1000.0], - [0.1, 0.1, -1000.0], - [0.1, -0.1, math.pi], - [0.0, 0.0, 1000.0], - ] - ] - ], - }, - ], -) + ] + ], + ), +] + + +@pytest.mark.parametrize("geom", TEST_GEOMS) def test_transform_geom_with_z(geom): """Transforming a geom with Z succeeds""" - g2 = transform.transform_geom("epsg:4326", "epsg:3857", geom, precision=3) + transform.transform_geom("epsg:4326", "epsg:3857", geom) -@pytest.mark.parametrize("crs", ["epsg:4326", - "EPSG:4326", - "WGS84", - {'init': 'epsg:4326'}, - {'proj': 'longlat', 'datum': 'WGS84', 'no_defs': True}, - "OGC:CRS84"]) -def test_axis_ordering(crs): - """ Test if transform uses traditional_axis_mapping """ +@pytest.mark.parametrize("geom", TEST_GEOMS) +def test_transform_geom_array_z(geom): + """Transforming a geom array with Z succeeds""" + g2 = transform.transform_geom( + "epsg:4326", + "epsg:3857", + [geom for _ in range(5)], + ) + assert isinstance(g2, list) + assert len(g2) == 5 + +@pytest.mark.parametrize( + "crs", + [ + "epsg:4326", + "EPSG:4326", + "WGS84", + {"init": "epsg:4326"}, + {"proj": "longlat", "datum": "WGS84", "no_defs": True}, + "OGC:CRS84", + ], +) +def test_axis_ordering_rev(crs): + """Test if transform uses traditional_axis_mapping""" expected = (-8427998.647958742, 4587905.27136252) t1 = transform.transform(crs, "epsg:3857", [-75.71], [38.06]) assert (t1[0][0], t1[1][0]) == pytest.approx(expected) - geom = {"type": "Point", "coordinates": [-75.71, 38.06]} - g1 = transform.transform_geom(crs, "epsg:3857", geom, precision=3) + geom = Geometry.from_dict(**{"type": "Point", "coordinates": [-75.71, 38.06]}) + g1 = transform.transform_geom(crs, "epsg:3857", geom) assert g1["coordinates"] == pytest.approx(expected) + +@pytest.mark.parametrize( + "crs", + [ + "epsg:4326", + "EPSG:4326", + "WGS84", + {"init": "epsg:4326"}, + {"proj": "longlat", "datum": "WGS84", "no_defs": True}, + "OGC:CRS84", + ], +) +def test_axis_ordering_fwd(crs): + """Test if transform uses traditional_axis_mapping""" rev_expected = (-75.71, 38.06) t2 = transform.transform("epsg:3857", crs, [-8427998.647958742], [4587905.27136252]) assert (t2[0][0], t2[1][0]) == pytest.approx(rev_expected) - geom = {"type": "Point", "coordinates": [-8427998.647958742, 4587905.27136252]} - g2 = transform.transform_geom("epsg:3857", crs, geom, precision=3) - assert g2["coordinates"] == pytest.approx(rev_expected) + geom = Geometry.from_dict( + **{"type": "Point", "coordinates": [-8427998.647958742, 4587905.27136252]} + ) + g2 = transform.transform_geom("epsg:3857", crs, geom) + assert g2.coordinates == pytest.approx(rev_expected) def test_transform_issue971(): - """ See https://github.com/Toblerity/Fiona/issues/971 """ - source_crs = "epsg:25832" - dest_src = "epsg:4326" - geom = {'type': 'GeometryCollection', 'geometries': [{'type': 'LineString', - 'coordinates': [(512381.8870945257, 5866313.311218272), - (512371.23869999964, 5866322.282500001), - (512364.6014999999, 5866328.260199999)]}]} - geom_transformed = transform.transform_geom(source_crs, dest_src, geom, precision=3) - assert geom_transformed['geometries'][0]['coordinates'][0] == pytest.approx((9.184, 52.946)) + """See https://github.com/Toblerity/Fiona/issues/971""" + source_crs = "EPSG:25832" + dest_src = "EPSG:4326" + geom = { + "type": "GeometryCollection", + "geometries": [ + { + "type": "LineString", + "coordinates": [ + (512381.8870945257, 5866313.311218272), + (512371.23869999964, 5866322.282500001), + (512364.6014999999, 5866328.260199999), + ], + } + ], + } + geom_transformed = transform.transform_geom(source_crs, dest_src, geom) + assert geom_transformed.geometries[0].coordinates[0] == pytest.approx( + (9.18427, 52.94630) + ) + + +def test_transform_geom_precision_deprecation(): + """Get a precision deprecation warning in 1.9.""" + with pytest.warns(FionaDeprecationWarning): + transform.transform_geom( + "epsg:4326", + "epsg:3857", + Geometry(type="Point", coordinates=(0, 0)), + precision=2, + ) + + +def test_partial_reprojection_error(): + """Raise an error about full reprojection failure unless we opt in.""" + geom = { + "type": "Polygon", + "coordinates": ( + ( + (6453888.0, -6453888.0), + (6453888.0, 6453888.0), + (-6453888.0, 6453888.0), + (-6453888.0, -6453888.0), + (6453888.0, -6453888.0), + ), + ), + } + src_crs = 'PROJCS["unknown",GEOGCS["unknown",DATUM["Unknown based on WGS84 ellipsoid",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]]],PROJECTION["Orthographic"],PARAMETER["latitude_of_origin",-90],PARAMETER["central_meridian",0],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH]]' + dst_crs = "EPSG:4326" + with pytest.raises(TransformError): + _ = transform.transform_geom(src_crs, dst_crs, geom) + + +def test_partial_reprojection_opt_in(): + """Get no exception if we opt in to partial reprojection.""" + geom = { + "type": "Polygon", + "coordinates": ( + ( + (6453888.0, -6453888.0), + (6453888.0, 6453888.0), + (-6453888.0, 6453888.0), + (-6453888.0, -6453888.0), + (6453888.0, -6453888.0), + ), + ), + } + src_crs = 'PROJCS["unknown",GEOGCS["unknown",DATUM["Unknown based on WGS84 ellipsoid",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]]],PROJECTION["Orthographic"],PARAMETER["latitude_of_origin",-90],PARAMETER["central_meridian",0],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH]]' + dst_crs = "EPSG:4326" + with fiona.Env(OGR_ENABLE_PARTIAL_REPROJECTION=True): + _ = transform.transform_geom(src_crs, dst_crs, geom) diff -Nru fiona-1.8.22/tests/test_unicode.py fiona-1.9.5/tests/test_unicode.py --- fiona-1.8.22/tests/test_unicode.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_unicode.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,53 +1,50 @@ -# coding: utf-8 - import logging import os import shutil import sys import tempfile from collections import OrderedDict +import unittest import pytest import fiona from fiona.errors import SchemaError +from fiona.model import Feature -class TestUnicodePath(object): - - def setup(self): +class TestUnicodePath(unittest.TestCase): + def setUp(self): tempdir = tempfile.mkdtemp() - self.dir = os.path.join(tempdir, u'français') - shutil.copytree(os.path.join(os.path.dirname(__file__), 'data'), - self.dir) + self.dir = os.path.join(tempdir, "français") + shutil.copytree(os.path.join(os.path.dirname(__file__), "data"), self.dir) - def teardown(self): + def tearDown(self): shutil.rmtree(os.path.dirname(self.dir)) def test_unicode_path(self): - path = self.dir + '/coutwildrnp.shp' + path = self.dir + "/coutwildrnp.shp" with fiona.open(path) as c: assert len(c) == 67 def test_unicode_path_layer(self): path = self.dir - layer = 'coutwildrnp' + layer = "coutwildrnp" with fiona.open(path, layer=layer) as c: assert len(c) == 67 def test_utf8_path(self): - path = self.dir + '/coutwildrnp.shp' + path = self.dir + "/coutwildrnp.shp" if sys.version_info < (3,): with fiona.open(path) as c: assert len(c) == 67 -class TestUnicodeStringField(object): - - def setup(self): +class TestUnicodeStringField(unittest.TestCase): + def setUp(self): self.tempdir = tempfile.mkdtemp() - def teardown(self): + def tearDown(self): shutil.rmtree(self.tempdir) @pytest.mark.xfail(reason="OGR silently fails to convert strings") @@ -64,61 +61,86 @@ # # Consequences: no error on write, but there will be an error # on reading the data and expecting latin-1. - schema = { - 'geometry': 'Point', - 'properties': {'label': 'str', 'num': 'int'}} + schema = {"geometry": "Point", "properties": {"label": "str", "num": "int"}} - with fiona.open(os.path.join(self.tempdir, "test-write-fail.shp"), - 'w', driver="ESRI Shapefile", schema=schema, - encoding='latin1') as c: - c.writerecords([{ - 'type': 'Feature', - 'geometry': {'type': 'Point', 'coordinates': [0, 0]}, - 'properties': { - 'label': u'徐汇区', - 'num': 0}}]) + with fiona.open( + os.path.join(self.tempdir, "test-write-fail.shp"), + "w", + driver="ESRI Shapefile", + schema=schema, + encoding="latin1", + ) as c: + c.writerecords( + [ + { + "type": "Feature", + "geometry": {"type": "Point", "coordinates": [0, 0]}, + "properties": {"label": "徐汇区", "num": 0}, + } + ] + ) - with fiona.open(os.path.join(self.tempdir), encoding='latin1') as c: + with fiona.open(os.path.join(self.tempdir), encoding="latin1") as c: f = next(iter(c)) # Next assert fails. - assert f['properties']['label'] == u'徐汇区' + assert f.properties["label"] == "徐汇区" def test_write_utf8(self): schema = { - 'geometry': 'Point', - 'properties': {'label': 'str', u'verit\xe9': 'int'}} - with fiona.open(os.path.join(self.tempdir, "test-write.shp"), - "w", "ESRI Shapefile", schema=schema, - encoding='utf-8') as c: - c.writerecords([{ - 'type': 'Feature', - 'geometry': {'type': 'Point', 'coordinates': [0, 0]}, - 'properties': { - 'label': u'Ba\u2019kelalan', u'verit\xe9': 0}}]) + "geometry": "Point", + "properties": {"label": "str", "verit\xe9": "int"}, + } + with fiona.open( + os.path.join(self.tempdir, "test-write.shp"), + "w", + "ESRI Shapefile", + schema=schema, + encoding="utf-8", + ) as c: + c.writerecords( + [ + Feature.from_dict( + **{ + "type": "Feature", + "geometry": {"type": "Point", "coordinates": [0, 0]}, + "properties": {"label": "Ba\u2019kelalan", "verit\xe9": 0}, + } + ) + ] + ) - with fiona.open(os.path.join(self.tempdir), encoding='utf-8') as c: + with fiona.open(os.path.join(self.tempdir), encoding="utf-8") as c: f = next(iter(c)) - assert f['properties']['label'] == u'Ba\u2019kelalan' - assert f['properties'][u'verit\xe9'] == 0 + assert f.properties["label"] == "Ba\u2019kelalan" + assert f.properties["verit\xe9"] == 0 @pytest.mark.iconv def test_write_gb18030(self): """Can write a simplified Chinese shapefile""" - schema = { - 'geometry': 'Point', - 'properties': {'label': 'str', 'num': 'int'}} - with fiona.open(os.path.join(self.tempdir, "test-write-gb18030.shp"), - 'w', driver="ESRI Shapefile", schema=schema, - encoding='gb18030') as c: - c.writerecords([{ - 'type': 'Feature', - 'geometry': {'type': 'Point', 'coordinates': [0, 0]}, - 'properties': {'label': u'徐汇区', 'num': 0}}]) + schema = {"geometry": "Point", "properties": {"label": "str", "num": "int"}} + with fiona.open( + os.path.join(self.tempdir, "test-write-gb18030.shp"), + "w", + driver="ESRI Shapefile", + schema=schema, + encoding="gb18030", + ) as c: + c.writerecords( + [ + Feature.from_dict( + **{ + "type": "Feature", + "geometry": {"type": "Point", "coordinates": [0, 0]}, + "properties": {"label": "徐汇区", "num": 0}, + } + ) + ] + ) - with fiona.open(os.path.join(self.tempdir), encoding='gb18030') as c: + with fiona.open(os.path.join(self.tempdir), encoding="gb18030") as c: f = next(iter(c)) - assert f['properties']['label'] == u'徐汇区' - assert f['properties']['num'] == 0 + assert f.properties["label"] == "徐汇区" + assert f.properties["num"] == 0 @pytest.mark.iconv def test_gb2312_field_wrong_encoding(self): @@ -131,7 +153,7 @@ See GH#595. """ - field_name = u"区县名称" + field_name = "区县名称" meta = { "schema": { "properties": OrderedDict([(field_name, "int")]), @@ -139,12 +161,16 @@ }, "driver": "ESRI Shapefile", } - feature = { - "properties": {field_name: 123}, - "geometry": {"type": "Point", "coordinates": [1, 2]} - } + feature = Feature.from_dict( + **{ + "properties": {field_name: 123}, + "geometry": {"type": "Point", "coordinates": [1, 2]}, + } + ) # when encoding is specified, write is successful - with fiona.open(os.path.join(self.tempdir, "test1.shp"), "w", encoding="GB2312", **meta) as collection: + with fiona.open( + os.path.join(self.tempdir, "test1.shp"), "w", encoding="GB2312", **meta + ) as collection: collection.write(feature) # no encoding with pytest.raises(SchemaError): diff -Nru fiona-1.8.22/tests/test_vfs.py fiona-1.9.5/tests/test_vfs.py --- fiona-1.8.22/tests/test_vfs.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_vfs.py 2023-10-11 23:19:44.000000000 +0000 @@ -15,17 +15,19 @@ # Custom markers (from rasterio) mingdalversion = pytest.mark.skipif( - fiona.gdal_version < (2, 1, 0), - reason="S3 raster access requires GDAL 2.1") + fiona.gdal_version < (2, 1, 0), reason="S3 raster access requires GDAL 2.1" +) credentials = pytest.mark.skipif( - not(boto3.Session()._session.get_credentials()), - reason="S3 raster access requires credentials") + not (boto3.Session()._session.get_credentials()), + reason="S3 raster access requires credentials", +) # TODO: remove this once we've successfully moved the tar tests over # to TestVsiReading. + class VsiReadingTest(ReadingTest): # There's a bug in GDAL 1.9.2 http://trac.osgeo.org/gdal/ticket/5093 # in which the VSI driver reports the wrong number of features. @@ -33,14 +35,16 @@ # passes and creating a new method in this class that we can exclude # from the test runner at run time. - @pytest.mark.xfail(reason="The number of features present in the archive " - "differs based on the GDAL version.") + @pytest.mark.xfail( + reason="The number of features present in the archive " + "differs based on the GDAL version." + ) def test_filter_vsi(self): results = list(self.c.filter(bbox=(-114.0, 35.0, -104, 45.0))) assert len(results) == 67 f = results[0] - assert f['id'] == "0" - assert f['properties']['STATE'] == 'UT' + assert f["id"] == "0" + assert f["properties"]["STATE"] == "UT" class TestVsiReading(TestReading): @@ -50,80 +54,73 @@ # passes and creating a new method in this class that we can exclude # from the test runner at run time. - @pytest.mark.xfail(reason="The number of features present in the archive " - "differs based on the GDAL version.") + @pytest.mark.xfail( + reason="The number of features present in the archive " + "differs based on the GDAL version." + ) def test_filter_vsi(self): results = list(self.c.filter(bbox=(-114.0, 35.0, -104, 45.0))) assert len(results) == 67 f = results[0] - assert f['id'] == "0" - assert f['properties']['STATE'] == 'UT' + assert f["id"] == "0" + assert f["properties"]["STATE"] == "UT" class TestZipReading(TestVsiReading): @pytest.fixture(autouse=True) def zipfile(self, data_dir, path_coutwildrnp_zip): self.c = fiona.open("zip://{}".format(path_coutwildrnp_zip, "r")) - self.path = os.path.join(data_dir, 'coutwildrnp.zip') + self.path = os.path.join(data_dir, "coutwildrnp.zip") yield self.c.close() def test_open_repr(self): - assert ( - repr(self.c) == - ("".format( - id=hex(id(self.c)), - path=self.path))) + assert repr(self.c) == ( + "".format(id=hex(id(self.c)), path=self.path) + ) def test_closed_repr(self): self.c.close() - assert ( - repr(self.c) == - ("".format( - id=hex(id(self.c)), - path=self.path))) + assert repr(self.c) == ( + "".format(id=hex(id(self.c)), path=self.path) + ) def test_path(self): - assert self.c.path == '/vsizip/{path}'.format(path=self.path) + assert self.c.path == f"/vsizip/{self.path}" class TestZipArchiveReading(TestVsiReading): @pytest.fixture(autouse=True) def zipfile(self, data_dir, path_coutwildrnp_zip): - vfs = 'zip://{}'.format(path_coutwildrnp_zip) + vfs = f"zip://{path_coutwildrnp_zip}" self.c = fiona.open(vfs + "!coutwildrnp.shp", "r") - self.path = os.path.join(data_dir, 'coutwildrnp.zip') + self.path = os.path.join(data_dir, "coutwildrnp.zip") yield self.c.close() def test_open_repr(self): - assert ( - repr(self.c) == - ("".format( - id=hex(id(self.c)), - path=self.path))) + assert repr(self.c) == ( + "".format(id=hex(id(self.c)), path=self.path) + ) def test_closed_repr(self): self.c.close() - assert ( - repr(self.c) == - ("".format( - id=hex(id(self.c)), - path=self.path))) + assert repr(self.c) == ( + "".format(id=hex(id(self.c)), path=self.path) + ) def test_path(self): - assert (self.c.path == - '/vsizip/{path}/coutwildrnp.shp'.format(path=self.path)) + assert self.c.path == f"/vsizip/{self.path}/coutwildrnp.shp" class TestZipArchiveReadingAbsPath(TestZipArchiveReading): @pytest.fixture(autouse=True) def zipfile(self, path_coutwildrnp_zip): - vfs = 'zip://{}'.format(os.path.abspath(path_coutwildrnp_zip)) + vfs = f"zip://{os.path.abspath(path_coutwildrnp_zip)}" self.c = fiona.open(vfs + "!coutwildrnp.shp", "r") yield self.c.close() @@ -136,46 +133,43 @@ assert repr(self.c).startswith("".format( - id=hex(id(self.c)), - path=self.path))) + assert repr(self.c) == ( + "".format(id=hex(id(self.c)), path=self.path) + ) def test_closed_repr(self): self.c.close() - assert ( - repr(self.c) == - ("".format( - id=hex(id(self.c)), - path=self.path))) + assert repr(self.c) == ( + "".format(id=hex(id(self.c)), path=self.path) + ) def test_path(self): - assert ( - self.c.path == - '/vsitar/{path}/testing/coutwildrnp.shp'.format(path=self.path)) + assert self.c.path == "/vsitar/{path}/testing/coutwildrnp.shp".format( + path=self.path + ) @pytest.mark.network def test_open_http(): - ds = fiona.open('https://raw.githubusercontent.com/OSGeo/gdal/master/autotest/ogr/data/poly.shp') + ds = fiona.open( + "https://raw.githubusercontent.com/OSGeo/gdal/master/autotest/ogr/data/poly.shp" + ) assert len(ds) == 10 @@ -183,13 +177,14 @@ @mingdalversion @pytest.mark.network def test_open_s3(): - ds = fiona.open('zip+s3://fiona-testing/coutwildrnp.zip') + ds = fiona.open("zip+s3://fiona-testing/coutwildrnp.zip") assert len(ds) == 67 +@credentials @pytest.mark.network def test_open_zip_https(): - ds = fiona.open('zip+https://s3.amazonaws.com/fiona-testing/coutwildrnp.zip') + ds = fiona.open("zip+https://s3.amazonaws.com/fiona-testing/coutwildrnp.zip") assert len(ds) == 67 diff -Nru fiona-1.8.22/tests/test_write.py fiona-1.9.5/tests/test_write.py --- fiona-1.8.22/tests/test_write.py 2022-10-14 23:26:41.000000000 +0000 +++ fiona-1.9.5/tests/test_write.py 2023-10-11 23:19:44.000000000 +0000 @@ -1,26 +1,141 @@ """New tests of writing feature collections.""" +import pytest + +from .conftest import requires_gdal33 + import fiona -from fiona.crs import from_epsg +from fiona.crs import CRS +from fiona.errors import DriverError +from fiona.model import Feature def test_issue771(tmpdir, caplog): """Overwrite a GeoJSON file without logging errors.""" schema = {"geometry": "Point", "properties": {"zero": "int"}} - feature = { - "geometry": {"type": "Point", "coordinates": (0, 0)}, - "properties": {"zero": "0"}, - } + feature = Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0, 0)}, + "properties": {"zero": "0"}, + } + ) outputfile = tmpdir.join("test.geojson") for i in range(2): with fiona.open( - str(outputfile), "w", driver="GeoJSON", schema=schema, crs=from_epsg(4326) + str(outputfile), + "w", + driver="GeoJSON", + schema=schema, + crs=CRS.from_epsg(4326), ) as collection: collection.write(feature) assert outputfile.exists() for record in caplog.records: assert record.levelname != "ERROR" + + +@requires_gdal33 +def test_write__esri_only_wkt(tmpdir): + """https://github.com/Toblerity/Fiona/issues/977""" + schema = {"geometry": "Point", "properties": {"zero": "int"}} + feature = Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0, 0)}, + "properties": {"zero": "0"}, + } + ) + target_crs = ( + 'PROJCS["IaRCS_04_Sioux_City-Iowa_Falls_NAD_1983_2011_LCC_US_Feet",' + 'GEOGCS["GCS_NAD_1983_2011",DATUM["D_NAD_1983_2011",' + 'SPHEROID["GRS_1980",6378137.0,298.257222101]],' + 'PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],' + 'PROJECTION["Lambert_Conformal_Conic"],' + 'PARAMETER["False_Easting",14500000.0],' + 'PARAMETER["False_Northing",8600000.0],' + 'PARAMETER["Central_Meridian",-94.83333333333333],' + 'PARAMETER["Standard_Parallel_1",42.53333333333333],' + 'PARAMETER["Standard_Parallel_2",42.53333333333333],' + 'PARAMETER["Scale_Factor",1.000045],' + 'PARAMETER["Latitude_Of_Origin",42.53333333333333],' + 'UNIT["Foot_US",0.3048006096012192]]' + ) + outputfile = tmpdir.join("test.shp") + with fiona.open( + str(outputfile), + "w", + driver="ESRI Shapefile", + schema=schema, + crs=target_crs, + ) as collection: + collection.write(feature) + assert collection.crs_wkt.startswith( + ( + 'PROJCS["IaRCS_04_Sioux_City-Iowa_Falls_NAD_1983_2011_LCC_US_Feet"', + 'PROJCRS["IaRCS_04_Sioux_City-Iowa_Falls_NAD_1983_2011_LCC_US_Feet"', # GDAL 3.3+ + ) + ) + + +def test_write__wkt_version(tmpdir): + """https://github.com/Toblerity/Fiona/issues/977""" + schema = {"geometry": "Point", "properties": {"zero": "int"}} + feature = Feature.from_dict( + **{ + "geometry": {"type": "Point", "coordinates": (0, 0)}, + "properties": {"zero": "0"}, + } + ) + target_crs = ( + 'PROJCS["IaRCS_04_Sioux_City-Iowa_Falls_NAD_1983_2011_LCC_US_Feet",' + 'GEOGCS["GCS_NAD_1983_2011",DATUM["D_NAD_1983_2011",' + 'SPHEROID["GRS_1980",6378137.0,298.257222101]],' + 'PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],' + 'PROJECTION["Lambert_Conformal_Conic"],' + 'PARAMETER["False_Easting",14500000.0],' + 'PARAMETER["False_Northing",8600000.0],' + 'PARAMETER["Central_Meridian",-94.83333333333333],' + 'PARAMETER["Standard_Parallel_1",42.53333333333333],' + 'PARAMETER["Standard_Parallel_2",42.53333333333333],' + 'PARAMETER["Scale_Factor",1.000045],' + 'PARAMETER["Latitude_Of_Origin",42.53333333333333],' + 'UNIT["Foot_US",0.3048006096012192]]' + ) + outputfile = tmpdir.join("test.shp") + with fiona.open( + str(outputfile), + "w", + driver="ESRI Shapefile", + schema=schema, + crs=target_crs, + wkt_version="WKT2_2018", + ) as collection: + collection.write(feature) + assert collection.crs_wkt.startswith( + 'PROJCRS["IaRCS_04_Sioux_City-Iowa_Falls_NAD_1983_2011_LCC_US_Feet"' + ) + + +def test_issue1169(): + """Don't swallow errors when a collection can't be written.""" + with pytest.raises(DriverError): + with fiona.open( + "s3://non-existing-bucket/test.geojson", + mode="w", + driver="GeoJSON", + schema={"geometry": "Point"}, + ) as collection: + collection.writerecords( + [ + Feature.from_dict( + **{ + "id": "0", + "type": "Feature", + "geometry": {"type": "Point", "coordinates": (1.0, 2.0)}, + } + ) + ] + )