diff -Nru davix-0.7.1/cmake/modules/FindCGSI_GSOAP.cmake davix-0.7.2/cmake/modules/FindCGSI_GSOAP.cmake --- davix-0.7.1/cmake/modules/FindCGSI_GSOAP.cmake 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/cmake/modules/FindCGSI_GSOAP.cmake 1970-01-01 00:00:00.000000000 +0000 @@ -1,44 +0,0 @@ -# -# This module detects if CGSI_GSOAP is installed and determines where the -# include files and libraries are. -# -# This code sets the following variables: -# -# CGSI_GSOAP_LIBRARIES = full path to the CGSI_GSOAP libraries -# CGSI_GSOAP_INCLUDE_DIR = include dir to be used when using the CGSI_GSOAP library -# CGSI_GSOAP_FOUND = set to true if CGSI_GSOAP was found successfully -# -# CGSI_GSOAP_LOCATION -# setting this enables search for CGSI_GSOAP libraries / headers in this location - - -# ----------------------------------------------------- -# CGSI_GSOAP Libraries -# ----------------------------------------------------- -find_library(CGSI_GSOAP_LIBRARIES - NAMES cgsi_plugin cgsi_plugin_gsoap_2.7 - HINTS ${CGSI_GSOAP_LOCATION}/lib ${CGSI_GSOAP_LOCATION}/lib64 ${CGSI_GSOAP_LOCATION}/lib32 ${STAGE_DIR}/lib ${STAGE_DIR}/lib64 - DOC "The main CGSI_GSOAP library" -) - -# ----------------------------------------------------- -# CGSI_GSOAP Include Directories -# ----------------------------------------------------- -find_path(CGSI_GSOAP_INCLUDE_DIR - NAMES cgsi_plugin.h - HINTS ${CGSI_GSOAP_LOCATION} ${CGSI_GSOAP_LOCATION}/include ${CGSI_GSOAP_LOCATION}/include/* ${STAGE_DIR}/include ${STAGE_DIR}/include - DOC "The CGSI_GSOAP include directory" -) -if(CGSI_GSOAP_INCLUDE_DIR) - message(STATUS "CGSI_GSOAP includes found in ${CGSI_GSOAP_INCLUDE_DIR}") -endif() - - - -# ----------------------------------------------------- -# handle the QUIETLY and REQUIRED arguments and set CGSI_GSOAP_FOUND to TRUE if -# all listed variables are TRUE -# ----------------------------------------------------- -include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(CGSI_GSOAP DEFAULT_MSG CGSI_GSOAP_LIBRARIES CGSI_GSOAP_INCLUDE_DIR) -mark_as_advanced(CGSI_GSOAP_INCLUDE_DIR CGSI_GSOAP_LIBRARIES) diff -Nru davix-0.7.1/cmake/modules/FindGRIDSITE.cmake davix-0.7.2/cmake/modules/FindGRIDSITE.cmake --- davix-0.7.1/cmake/modules/FindGRIDSITE.cmake 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/cmake/modules/FindGRIDSITE.cmake 1970-01-01 00:00:00.000000000 +0000 @@ -1,66 +0,0 @@ -# -# This module detects if GRIDSITE is installed and determines where the -# include files and libraries are. -# -# This code sets the following variables: -# -# GRIDSITE_LIBRARIES = full path to the GRIDSITE libraries -# GRIDSITE_SSL_LIBRARIES = full path to the GRIDSITE ssl libraries -# GRIDSITE_INCLUDE_DIR = include dir to be used when using the GRIDSITE library -# GRIDSITE_WSDL2H = wsdl2h binary -# GRIDSITE_SOAPCPP2 = soapcpp2 binary -# GRIDSITE_FOUND = set to true if GRIDSITE was found successfully -# -# GRIDSITE_LOCATION -# setting this enables search for GRIDSITE libraries / headers in this location - - -# ------------------------------------------------------ -# try pkg config search -# -# ----------------------------------------------------- - - -find_package(PkgConfig) -pkg_check_modules(PC_GRIDSITE QUIET gridsite-openssl>=1.7.25) - -IF(PC_GRIDSITE_FOUND) - -SET(GRIDSITE_LIBRARIES ${PC_GRIDSITE_LIBRARIES}) -SET(GRIDSITE_INCLUDE_DIR ${PC_GRIDSITE_INCLUDE_DIRS}) -SET(GRIDSITE_DEFINITIONS "${PC_GRIDSITE_CFLAGS} ${PC_GRIDSITE_CFLAGS_OTHER}") - -ELSE(PC_GRIDSITE_FOUND) - -# ----------------------------------------------------- -# GRIDSITE Libraries -# ----------------------------------------------------- -find_library(GRIDSITE_LIBRARIES - NAMES gridsite - HINTS ${GRIDSITE_LOCATION}/lib ${GRIDSITE_LOCATION}/lib64 - ${GRIDSITE_LOCATION}/lib32 - DOC "The main GRIDSITE library" -) - -# ----------------------------------------------------- -# GRIDSITE Include Directories -# ----------------------------------------------------- -find_path(GRIDSITE_INCLUDE_DIR - NAMES gridsite.h - HINTS ${GRIDSITE_LOCATION} ${GRIDSITE_LOCATION}/include ${GRIDSITE_LOCATION}/include/* - DOC "The GRIDSITE include directory" -) - -SET(GRIDSITE_DEFINITIONS "") - -ENDIF(PC_GRIDSITE_FOUND) - - -# ----------------------------------------------------- -# handle the QUIETLY and REQUIRED arguments and set GRIDSITE_FOUND to TRUE if -# all listed variables are TRUE -# ----------------------------------------------------- -include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(GRIDSITE DEFAULT_MSG GRIDSITE_LIBRARIES - GRIDSITE_INCLUDE_DIR) -mark_as_advanced(GRIDSITE_INCLUDE_DIR GRIDSITE_LIBRARIES ) diff -Nru davix-0.7.1/cmake/modules/FindSphinx.cmake davix-0.7.2/cmake/modules/FindSphinx.cmake --- davix-0.7.1/cmake/modules/FindSphinx.cmake 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/cmake/modules/FindSphinx.cmake 2019-02-15 13:02:34.000000000 +0000 @@ -43,7 +43,7 @@ endif() # Try to find sphinx-build -find_program(SPHINX_EXECUTABLE sphinx-build +find_program(SPHINX_EXECUTABLE sphinx-build sphinx-build-3 HINTS ${SPHINX_DIR} $ENV{SPHINX_DIR} PATH_SUFFIXES bin DOC "Sphinx documentation generator tool" @@ -52,7 +52,7 @@ if (SPHINX_EXECUTABLE) # Try to check Sphinx version by importing Sphinx execute_process( - COMMAND ${PYTHON_EXECUTABLE} -c "import sphinx; print sphinx.__version__" + COMMAND ${PYTHON_EXECUTABLE} -c "import sphinx; print(sphinx.__version__)" OUTPUT_VARIABLE SPHINX_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) diff -Nru davix-0.7.1/CMakeLists.txt davix-0.7.2/CMakeLists.txt --- davix-0.7.1/CMakeLists.txt 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/CMakeLists.txt 2019-02-15 13:02:34.000000000 +0000 @@ -3,14 +3,17 @@ project (davix) cmake_minimum_required (VERSION 2.6) +# Find the python executable to use during the build. +find_package(PythonInterp REQUIRED) + #------------------------------------------------------------------------------- # Regenerate include/davix/features.hpp and version.cmake at _build_ time #------------------------------------------------------------------------------- add_custom_target(GenerateVersionInfo ALL DEPENDS Version) add_custom_command( OUTPUT Version - COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/genversion.py --template include/davix/features.hpp.in --out include/davix/features.hpp - COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/genversion.py --template version.cmake.in --out version.cmake + COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/genversion.py --template include/davix/features.hpp.in --out include/davix/features.hpp + COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/genversion.py --template version.cmake.in --out version.cmake WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} ) @@ -19,7 +22,7 @@ # only regenerates it at compile time. #------------------------------------------------------------------------------- execute_process( - COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/genversion.py --template version.cmake.in --out version.cmake + COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/genversion.py --template version.cmake.in --out version.cmake WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} ) @@ -69,7 +72,6 @@ option(ENABLE_THIRD_PARTY_COPY "enable or disable third party copy support" FALSE) # tests -option(FUNCTIONAL_TESTS "enable or disable the functional tests" FALSE) option(BENCH_TESTS "enable or disable the bench tests" FALSE) # libs checks diff -Nru davix-0.7.1/debian/changelog davix-0.7.2/debian/changelog --- davix-0.7.1/debian/changelog 2019-01-22 14:00:49.000000000 +0000 +++ davix-0.7.2/debian/changelog 2019-02-19 10:46:50.000000000 +0000 @@ -1,8 +1,8 @@ -davix (0.7.1-3build1) disco; urgency=medium +davix (0.7.2-1) unstable; urgency=medium - * Rebuild against new libgsoap-2.8.75. + * Update to version 0.7.2 - -- Gianfranco Costamagna Tue, 22 Jan 2019 15:00:49 +0100 + -- Mattias Ellert Tue, 19 Feb 2019 11:46:50 +0100 davix (0.7.1-3) unstable; urgency=medium diff -Nru davix-0.7.1/debian/copyright davix-0.7.2/debian/copyright --- davix-0.7.1/debian/copyright 2018-10-26 12:31:09.000000000 +0000 +++ davix-0.7.2/debian/copyright 2019-02-19 10:46:50.000000000 +0000 @@ -1,9 +1,9 @@ Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: davix -Source: https://github.com/cern-fts/davix/archive/R_0_7_1.tar.gz +Source: https://github.com/cern-fts/davix/archive/R_0_7_2.tar.gz Files: * -Copyright: 2013-2018 CERN +Copyright: 2013-2019 CERN License: LGPL-2.1+ Files: deps/libneon @@ -197,7 +197,7 @@ Comment: Not used - removed in configure Files: debian/* -Copyright: 2013-2018 Mattias Ellert +Copyright: 2013-2019 Mattias Ellert License: LGPL-2.1+ Files: debian/missing-sources/modernizr.js diff -Nru davix-0.7.1/debian/davix-doc.maintscript davix-0.7.2/debian/davix-doc.maintscript --- davix-0.7.1/debian/davix-doc.maintscript 1970-01-01 00:00:00.000000000 +0000 +++ davix-0.7.2/debian/davix-doc.maintscript 2019-02-19 10:46:50.000000000 +0000 @@ -0,0 +1,5 @@ +dir_to_symlink /usr/share/doc/davix/html/_static/css ../../../../sphinx_rtd_theme/static/css 0.6.5 davix-doc + +dir_to_symlink /usr/share/doc/davix/html/_static/fonts ../../../../sphinx_rtd_theme/static/fonts 0.6.5 davix-doc + +dir_to_symlink /usr/share/doc/davix/html/_static/js ../../../../sphinx_rtd_theme/static/js 0.6.5 davix-doc diff -Nru davix-0.7.1/debian/davix-doc.postinst davix-0.7.2/debian/davix-doc.postinst --- davix-0.7.1/debian/davix-doc.postinst 2017-03-22 20:50:31.000000000 +0000 +++ davix-0.7.2/debian/davix-doc.postinst 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -#!/bin/sh - -set -e - -#DEBHELPER# - -dpkg-maintscript-helper dir_to_symlink \ - /usr/share/doc/davix/html/_static/css \ - ../../../../sphinx_rtd_theme/static/css \ - 0.6.5 davix-doc -- "$@" - -dpkg-maintscript-helper dir_to_symlink \ - /usr/share/doc/davix/html/_static/fonts \ - ../../../../sphinx_rtd_theme/static/fonts \ - 0.6.5 davix-doc -- "$@" - -dpkg-maintscript-helper dir_to_symlink \ - /usr/share/doc/davix/html/_static/js \ - ../../../../sphinx_rtd_theme/static/js \ - 0.6.5 davix-doc -- "$@" diff -Nru davix-0.7.1/debian/davix-doc.postrm davix-0.7.2/debian/davix-doc.postrm --- davix-0.7.1/debian/davix-doc.postrm 2017-03-22 20:50:31.000000000 +0000 +++ davix-0.7.2/debian/davix-doc.postrm 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -#!/bin/sh - -set -e - -#DEBHELPER# - -dpkg-maintscript-helper dir_to_symlink \ - /usr/share/doc/davix/html/_static/css \ - ../../../../sphinx_rtd_theme/static/css \ - 0.6.5 davix-doc -- "$@" - -dpkg-maintscript-helper dir_to_symlink \ - /usr/share/doc/davix/html/_static/fonts \ - ../../../../sphinx_rtd_theme/static/fonts \ - 0.6.5 davix-doc -- "$@" - -dpkg-maintscript-helper dir_to_symlink \ - /usr/share/doc/davix/html/_static/js \ - ../../../../sphinx_rtd_theme/static/js \ - 0.6.5 davix-doc -- "$@" diff -Nru davix-0.7.1/debian/davix-doc.preinst davix-0.7.2/debian/davix-doc.preinst --- davix-0.7.1/debian/davix-doc.preinst 2017-03-22 20:50:31.000000000 +0000 +++ davix-0.7.2/debian/davix-doc.preinst 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -#!/bin/sh - -set -e - -#DEBHELPER# - -dpkg-maintscript-helper dir_to_symlink \ - /usr/share/doc/davix/html/_static/css \ - ../../../../sphinx_rtd_theme/static/css \ - 0.6.5 davix-doc -- "$@" - -dpkg-maintscript-helper dir_to_symlink \ - /usr/share/doc/davix/html/_static/fonts \ - ../../../../sphinx_rtd_theme/static/fonts \ - 0.6.5 davix-doc -- "$@" - -dpkg-maintscript-helper dir_to_symlink \ - /usr/share/doc/davix/html/_static/js \ - ../../../../sphinx_rtd_theme/static/js \ - 0.6.5 davix-doc -- "$@" diff -Nru davix-0.7.1/debian/davix-tests.install davix-0.7.2/debian/davix-tests.install --- davix-0.7.1/debian/davix-tests.install 2018-11-02 08:11:23.000000000 +0000 +++ davix-0.7.2/debian/davix-tests.install 2019-02-19 10:46:50.000000000 +0000 @@ -1 +1,2 @@ +usr/bin/davix-tester usr/bin/davix-unit-tests diff -Nru davix-0.7.1/debian/patches/davix-python-3-compat.patch davix-0.7.2/debian/patches/davix-python-3-compat.patch --- davix-0.7.1/debian/patches/davix-python-3-compat.patch 2018-11-08 14:19:44.000000000 +0000 +++ davix-0.7.2/debian/patches/davix-python-3-compat.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -From 96e253d2ca3551fef0cc22c6689a0d23b6f03ef8 Mon Sep 17 00:00:00 2001 -From: Mattias Ellert -Date: Thu, 8 Nov 2018 15:18:13 +0100 -Subject: [PATCH] Python 3 compat - ---- - cmake/modules/FindSphinx.cmake | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/cmake/modules/FindSphinx.cmake b/cmake/modules/FindSphinx.cmake -index 23dcb94..c47e303 100644 ---- a/cmake/modules/FindSphinx.cmake -+++ b/cmake/modules/FindSphinx.cmake -@@ -52,7 +52,7 @@ find_program(SPHINX_EXECUTABLE sphinx-build - if (SPHINX_EXECUTABLE) - # Try to check Sphinx version by importing Sphinx - execute_process( -- COMMAND ${PYTHON_EXECUTABLE} -c "import sphinx; print sphinx.__version__" -+ COMMAND ${PYTHON_EXECUTABLE} -c "import sphinx; print(sphinx.__version__)" - OUTPUT_VARIABLE SPHINX_VERSION - OUTPUT_STRIP_TRAILING_WHITESPACE) - --- -2.19.1 - diff -Nru davix-0.7.1/debian/patches/series davix-0.7.2/debian/patches/series --- davix-0.7.1/debian/patches/series 2018-11-08 14:23:43.000000000 +0000 +++ davix-0.7.2/debian/patches/series 2019-02-19 10:46:50.000000000 +0000 @@ -1,6 +1,2 @@ # Avoid some overlinking davix-linking.patch - -# Fix cmake config warning due to Python 3 incompatibility -# https://github.com/cern-fts/davix/pull/35 -davix-python-3-compat.patch diff -Nru davix-0.7.1/debian/rules davix-0.7.2/debian/rules --- davix-0.7.1/debian/rules 2018-11-02 08:19:42.000000000 +0000 +++ davix-0.7.2/debian/rules 2019-02-19 10:46:50.000000000 +0000 @@ -39,7 +39,7 @@ # The script doesn't do anything outside a git checkout anyway and # replacing it avoids a build dependency on python2 rm -f genversion.py - ln -s /bin/true genversion.py + touch genversion.py dh_auto_configure -- -DLIB_SUFFIX="/$(DEB_HOST_MULTIARCH)" \ -DENABLE_HTML_DOCS=TRUE \ -DENABLE_THIRD_PARTY_COPY=TRUE diff -Nru davix-0.7.1/dist/produce-artifacts.py davix-0.7.2/dist/produce-artifacts.py --- davix-0.7.1/dist/produce-artifacts.py 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/dist/produce-artifacts.py 2019-02-15 13:02:34.000000000 +0000 @@ -76,18 +76,9 @@ os.chdir(foldername) sh("git checkout {}".format(args.tag)) sh("git submodule update --init") - shutil.rmtree(".git") - shutil.rmtree("dist") - sh("""sed -i "s/set(VERSION_TAG \"\")/set(VERSION_TAG \"std\")/g" CMakeLists.txt""") + sh("./packaging/make-dist.sh") os.chdir("..") - - sh("tar -cvzf {name}-embedded-{release}.tar.gz --transform 's/{foldername}/{name}-embedded-{release}/' {foldername}".format( - name=args.name, release=args.release, foldername=foldername)) - sh("tar -cvzf {name}-{release}.tar.gz --transform 's/{foldername}/{name}-{release}/' {foldername}".format( - name=args.name, release=args.release, foldername=foldername)) - - shutil.copy("{0}-embedded-{1}.tar.gz".format(args.name, args.release), args.release) - shutil.copy("{0}-{1}.tar.gz".format(args.name, args.release), args.release) + shutil.copy("davix/build/{0}-{1}.tar.gz".format(args.name, args.release), args.release) def main(): global args diff -Nru davix-0.7.1/doc/sphinx/conf.py davix-0.7.2/doc/sphinx/conf.py --- davix-0.7.1/doc/sphinx/conf.py 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/doc/sphinx/conf.py 2019-02-15 13:02:34.000000000 +0000 @@ -58,9 +58,9 @@ # built documents. # # The short X.Y version. -version = '0.6' +version = '0.7' # The full version, including alpha/beta/rc tags. -release = '0.6.0' +release = '0.7.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff -Nru davix-0.7.1/.gitlab-ci.yml davix-0.7.2/.gitlab-ci.yml --- davix-0.7.1/.gitlab-ci.yml 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/.gitlab-ci.yml 2019-02-15 13:02:34.000000000 +0000 @@ -53,18 +53,6 @@ paths: - "$CI_JOB_NAME" -fedora-27: - stage: build - image: fedora:27 - script: - - ci/fedora/packages.sh - - ci/common-rpm-build.sh - - mkdir ${CI_JOB_NAME} - - cp -r /root/rpmbuild/RPMS build/SRPMS ${CI_JOB_NAME} - artifacts: - paths: - - "$CI_JOB_NAME" - ubuntu-artful: stage: build image: ubuntu:artful @@ -89,6 +77,17 @@ paths: - "$CI_JOB_NAME" +docs-generate: + stage: build + image: fedora:29 + script: + - dnf install -y git python3-sphinx python3-pip gcc-c++ rpm-build dnf-plugins-core python2 python3-breathe + - packaging/make-docs.sh + - mv build/doc/build/html docs + artifacts: + paths: + - docs + cc7-test: stage: test image: gitlab-registry.cern.ch/linuxsupport/cc7-base @@ -107,34 +106,31 @@ - yum install -y slc6/RPMS/*.rpm - davix-unit-tests -fedora-27-test: +fedora-28-test: stage: test - image: fedora:27 + image: fedora:28 dependencies: - - fedora-27 + - fedora-28 script: - - dnf install -y fedora-27/RPMS/*.rpm + - dnf install -y fedora-28/RPMS/*.rpm - davix-unit-tests -#docs: -# stage: publish -# image: gitlab-registry.cern.ch/linuxsupport/cc7-base -# script: -# - yum install -y git rpm-build python-argparse tar cmake python2-sphinx make sssd-client sudo python-pip -# - rm -rf /usr/lib/python2.7/site-packages/chardet -# - pip uninstall -y chardet -# - pip install --upgrade sphinx -# - packaging/make-docs.sh -# - mv build/doc/build/html docs -# - chown -R stci docs -# - SNAPSHOT=$(date +%s) -# - TARGET="/eos/project/d/davix/www/docs/${CI_COMMIT_REF_NAME}" -# - STAGING_AREA="$TARGET-${SNAPSHOT}" -# - sudo -u stci -H cp -r docs "$STAGING_AREA" -# - sudo -u stci -H packaging/replace-directory.sh "$STAGING_AREA" "$TARGET" -# tags: -# - docker-cc7 -# retry: 2 +docs: + stage: publish + image: gitlab-registry.cern.ch/linuxsupport/cc7-base + dependencies: + - docs-generate + script: + - yum install -y sssd-client sudo + - chown -R stci docs + - SNAPSHOT=$(date +%s) + - TARGET="/eos/project/d/davix/www/docs/${CI_COMMIT_REF_NAME}" + - STAGING_AREA="$TARGET-${SNAPSHOT}" + - sudo -u stci -H cp -r docs "$STAGING_AREA" + - sudo -u stci -H packaging/replace-directory.sh "$STAGING_AREA" "$TARGET" + tags: + - docker-cc7 + retry: 2 rpms: stage: publish diff -Nru davix-0.7.1/packaging/davix.spec.in davix-0.7.2/packaging/davix.spec.in --- davix-0.7.1/packaging/davix.spec.in 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/packaging/davix.spec.in 2019-02-15 13:02:34.000000000 +0000 @@ -5,7 +5,6 @@ Version: @VERSION_FULL@ Release: 1%{?dist} Summary: Toolkit for Http-based file management -Group: Applications/Internet License: LGPLv2+ URL: http://dmc.web.cern.ch/projects/davix/home # git clone http://git.cern.ch/pub/davix @@ -41,7 +40,6 @@ %package libs Summary: Development files for %{name} -Group: Applications/Internet %description libs Libraries for %{name}. Davix is a toolkit designed for file operations @@ -50,7 +48,6 @@ %package devel Summary: Development files for %{name} -Group: Applications/Internet Requires: %{name}-libs%{?_isa} = %{version}-%{release} Requires: pkgconfig @@ -60,7 +57,6 @@ %package tests Summary: Test suite for %{name} -Group: Applications/Internet Requires: %{name}-libs%{?_isa} = %{version}-%{release} %description tests @@ -69,7 +65,6 @@ %package doc Summary: Documentation for %{name} -Group: Documentation %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif @@ -140,8 +135,12 @@ %files tests %{_bindir}/davix-unit-tests +%{_bindir}/davix-tester %changelog +* Fri Feb 15 2019 Georgios Bitzes - 0.7.2-1 + - davix 0.7.2 release, see RELEASE-NOTES.md for changes + * Wed Oct 24 2018 Georgios Bitzes - 0.7.1-1 - davix 0.7.1 release, see RELEASE-NOTES.md for changes diff -Nru davix-0.7.1/packaging/debian/changelog davix-0.7.2/packaging/debian/changelog --- davix-0.7.1/packaging/debian/changelog 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/packaging/debian/changelog 2019-02-15 13:02:34.000000000 +0000 @@ -1,3 +1,9 @@ +davix (0.7.2-1) unstable; urgency=low + + * Update to version 0.7.2 + + -- Georgios Bitzes Fri, 15 Feb 2019 14:02:34 +0100 + davix (0.7.1-1) unstable; urgency=low * Update to version 0.7.1 diff -Nru davix-0.7.1/packaging/debian/davix-tests.install davix-0.7.2/packaging/debian/davix-tests.install --- davix-0.7.1/packaging/debian/davix-tests.install 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/packaging/debian/davix-tests.install 2019-02-15 13:02:34.000000000 +0000 @@ -1 +1,2 @@ usr/bin/davix-unit-tests +usr/bin/davix-tester diff -Nru davix-0.7.1/packaging/make-docs.sh davix-0.7.2/packaging/make-docs.sh --- davix-0.7.1/packaging/make-docs.sh 1970-01-01 00:00:00.000000000 +0000 +++ davix-0.7.2/packaging/make-docs.sh 2019-02-15 13:02:34.000000000 +0000 @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +set -e + +#------------------------------------------------------------------------------- +# Make SRPM to get a list of build dependencies +#------------------------------------------------------------------------------- +git submodule update --init --recursive +./packaging/make-srpm.sh +dnf builddep -y build/SRPMS/* + +#------------------------------------------------------------------------------- +# Generate a docs folder - run this from the root of the git repository. +#------------------------------------------------------------------------------- +rm -rf build +mkdir build && cd build +cmake -DENABLE_HTML_DOCS=TRUE .. + +make sphinx +make doc diff -Nru davix-0.7.1/packaging/replace-directory.sh davix-0.7.2/packaging/replace-directory.sh --- davix-0.7.1/packaging/replace-directory.sh 1970-01-01 00:00:00.000000000 +0000 +++ davix-0.7.2/packaging/replace-directory.sh 2019-02-15 13:02:34.000000000 +0000 @@ -0,0 +1,13 @@ +#!/usr/bin/env bash +set -e + +STAGING_AREA="$1" +TARGET="$2" +OLD_CONTENTS="${TARGET}-old" + +test ! -d "$TARGET" || mv "$TARGET" "$OLD_CONTENTS" + +sleep 30 +mv "$STAGING_AREA" "$TARGET" +test ! -d "$OLD_CONTENTS" || rm -rf "$OLD_CONTENTS" + diff -Nru davix-0.7.1/README.md davix-0.7.2/README.md --- davix-0.7.1/README.md 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/README.md 2019-02-15 13:02:34.000000000 +0000 @@ -6,7 +6,7 @@ ## Documentation -Click [here](https://dmc-docs.web.cern.ch/dmc-docs/versions/docs/davix-epel/html/) to view the documentation of the latest released version, or [here](https://dmc-docs.web.cern.ch/dmc-docs/versions/docs/davix-development/html/) for the version under development. (devel branch) +Visit [https://davix.web.cern.ch](https://davix.web.cern.ch) to view the latest documentation. ## HTTP File Management @@ -65,7 +65,7 @@ ## Development The official repository is the one on [GitHub](https://github.com/cern-fts/davix). It's automatically mirrored on [CERN Gitlab](https://gitlab.cern.ch/dmc/davix) for CI purposes. This means: -* Use GitHub for new commits, issues, or pull requests. +* Use GitHub for new commits, issues, or pull requests. * Please don't commit directly on GitLab. * After a commit, GitLab will mirror the changes automatically, and run CI. Treat Gitlab as if it were ie a Jenkins CI instance. diff -Nru davix-0.7.1/RELEASE-NOTES.md davix-0.7.2/RELEASE-NOTES.md --- davix-0.7.1/RELEASE-NOTES.md 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/RELEASE-NOTES.md 2019-02-15 13:02:34.000000000 +0000 @@ -1,5 +1,18 @@ # davix release history + +## 0.7.2 (2019-02-15) +### Bug fixes +* produce-artifacts script was producing wonky release artifacts +* [DMC-1127] - davix should not segfault when calling DavPosix::close twice +* [DMC-1135] - davix misuses data provider function in S3 multi-part upload +* [DMC-1138] - Error from performance markers sometimes is not correctly reported by Davix +* [DMC-1140] - Fix parsing of the Digest to be complaint to RFC 3230 + +### Improvements +* Some refactoring, and splitting of redirection caching logic into its own separate class. + + ## 0.7.1 (2018-10-24) ### Bug fixes * [DMC-1114] - DAVIX adds cert chain multiple times diff -Nru davix-0.7.1/src/CMakeLists.txt davix-0.7.2/src/CMakeLists.txt --- davix-0.7.1/src/CMakeLists.txt 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/CMakeLists.txt 2019-02-15 13:02:34.000000000 +0000 @@ -1,7 +1,7 @@ # main file for src FILE(GLOB src_davix - "*.cpp" "neon/*.cpp" "status/*.cpp" "fileops/*.cpp" + "*.cpp" "neon/*.cpp" "core/*.cpp" "status/*.cpp" "fileops/*.cpp" "params/*.cpp" "auth/*.cpp" "file/*.cpp" "deprecated/*.cpp" "request/*.cpp" "hooks/*.cpp" "modules/*.cpp" "utils/*.cpp" "xml/*.cpp" ) diff -Nru davix-0.7.1/src/core/RedirectionResolver.cpp davix-0.7.2/src/core/RedirectionResolver.cpp --- davix-0.7.1/src/core/RedirectionResolver.cpp 1970-01-01 00:00:00.000000000 +0000 +++ davix-0.7.2/src/core/RedirectionResolver.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -0,0 +1,97 @@ +/* + * This File is part of Davix, The IO library for HTTP based protocols + * Copyright (C) CERN 2019 + * Author: Georgios Bitzes + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * +*/ + +#include "RedirectionResolver.hpp" +#include + + +using namespace Davix; + +static const std::pair makeKey(const std::string & method, const Uri & origin){ + std::string mymethod = method; + // cache HEAD and GET on same key + if(mymethod == "HEAD") + mymethod = "GET"; + + return std::make_pair(origin.getString(), mymethod); +} + +RedirectionResolver::RedirectionResolver(bool act) : active(act), redirCache(256) { + DAVIX_SLOG(DAVIX_LOG_TRACE, DAVIX_LOG_CORE, "Redirection Session caching {}", (active?"ENABLED":"DISABLED")); +} + +// add cached redirection +void RedirectionResolver::addRedirection(const std::string & method, const Uri & origin, std::shared_ptr dest) { + if(!active) { + return; + } + + DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_HTTP, "Add cached redirection <{} {} {}>", method.c_str(), origin.getString().c_str(), dest->getString().c_str()); + redirCache.insert(makeKey(method, origin), dest); +} + +// try to find cached redirection, resolve a full chain +std::shared_ptr RedirectionResolver::redirectionResolve(const std::string & method, const Uri & origin) { + std::shared_ptr res = resolveSingle(method, origin); + if(res.get() != NULL) { + std::shared_ptr res_rec = redirectionResolve(method, *res); + if(res_rec.get() != NULL) { + return res_rec; + } + } + return res; +} + +// resolve a single redirection chunk +std::shared_ptr RedirectionResolver::resolveSingle(const std::string & method, const Uri & origin) { + std::shared_ptr res = redirCache.find(makeKey(method, origin)); + + if(res.get() != NULL){ + DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_HTTP, "Found redirection <{} {} {}>", method.c_str(), origin.getString().c_str(), res->getString().c_str()); + } + return res; +} + +// check if redirections are active +bool RedirectionResolver::isActive() const { + return active; +} + +void RedirectionResolver::redirectionClean(const std::string & method, const Uri & origin) { + std::shared_ptr res = redirCache.find(makeKey(method, origin)); + if(res.get() != NULL){ + DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_HTTP, "Delete Cached redirection for <{} {} {}>", method.c_str(), origin.getString().c_str(), res->getString().c_str()); + redirCache.erase(makeKey(method, origin)); + redirectionClean(method, *res); + } +} + +void RedirectionResolver::redirectionClean(const Uri & origin) { + std::pair query = std::make_pair(origin.getString(), ""); + while(true) { + const std::pair nextkey = redirCache.upper_bound(query); + if(nextkey.first != origin.getString()) { + break; + } + + redirectionClean(nextkey.second, nextkey.first); + } +} diff -Nru davix-0.7.1/src/core/RedirectionResolver.hpp davix-0.7.2/src/core/RedirectionResolver.hpp --- davix-0.7.1/src/core/RedirectionResolver.hpp 1970-01-01 00:00:00.000000000 +0000 +++ davix-0.7.2/src/core/RedirectionResolver.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -0,0 +1,63 @@ +/* + * This File is part of Davix, The IO library for HTTP based protocols + * Copyright (C) CERN 2019 + * Author: Georgios Bitzes + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * +*/ + +#ifndef DAVIX_CORE_REDIRECTION_RESOLVER_HPP +#define DAVIX_CORE_REDIRECTION_RESOLVER_HPP + +#include +#include +#include +#include +#include + +namespace Davix { + +class RedirectionResolver { +public: + RedirectionResolver(bool active); + + // add cached redirection + void addRedirection(const std::string & method, const Uri & origin, std::shared_ptr dest); + + // try to find cached redirection, resolve a full redirection chain + std::shared_ptr redirectionResolve(const std::string & method, const Uri & origin); + + // check if redirections are active + bool isActive() const; + + // clean redirections + void redirectionClean(const std::string & method, const Uri & origin); + void redirectionClean(const Uri & origin); + +private: + bool active; + + // redirection pool + Davix::Cache, Uri> redirCache; + + // resolve a single redirection chunk + std::shared_ptr resolveSingle(const std::string & method, const Uri & origin); +}; + + +} + +#endif \ No newline at end of file diff -Nru davix-0.7.1/src/core/SessionPool.hpp davix-0.7.2/src/core/SessionPool.hpp --- davix-0.7.1/src/core/SessionPool.hpp 1970-01-01 00:00:00.000000000 +0000 +++ davix-0.7.2/src/core/SessionPool.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -0,0 +1,30 @@ +/* + * This File is part of Davix, The IO library for HTTP based protocols + * Copyright (C) CERN 2019 + * Author: Georgios Bitzes + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * +*/ + +#ifndef DAVIX_CORE_SESSION_POOL_HPP +#define DAVIX_CORE_SESSION_POOL_HPP + +class SessionPool { + +}; + +#endif + diff -Nru davix-0.7.1/src/davixcontext.cpp davix-0.7.2/src/davixcontext.cpp --- davix-0.7.1/src/davixcontext.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/davixcontext.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -27,6 +27,7 @@ #include #include #include +#include #include #include @@ -38,24 +39,24 @@ static LibPath lib_path; +static bool redirCachingDisabled(){ + return ( getenv("DAVIX_DISABLE_REDIRECT_CACHING") != NULL); +} + /// Implementation f the core logic in davix struct ContextInternal { - ContextInternal(NEONSessionFactory * fsess): - _fsess(fsess), - _s_buff(65536), - _timeout(300), - _context_flags(0), + ContextInternal(): + _fsess(new NEONSessionFactory()), + _redirectionResolver(new RedirectionResolver(!redirCachingDisabled())), _hook_list() { - DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_CORE, "libdavix path {}", getLibPath()); + DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_CORE, "libdavix path {}, version: {}", getLibPath(), version()); } - ContextInternal(const ContextInternal & orig): + ContextInternal(const ContextInternal & orig) : _fsess(new NEONSessionFactory()), - _s_buff(orig._s_buff), - _timeout(orig._timeout), - _context_flags(orig._context_flags), + _redirectionResolver(new RedirectionResolver(!redirCachingDisabled())), _hook_list(orig._hook_list) { } @@ -67,14 +68,12 @@ return _fsess.get(); } - void setBufferSize(const dav_size_t value){ - _s_buff = value; + inline RedirectionResolver* getRedirectionResolver() { + return _redirectionResolver.get(); } - Ptr::Scoped _fsess; - dav_size_t _s_buff; - unsigned long _timeout; - bool _context_flags; + std::unique_ptr _fsess; + std::unique_ptr _redirectionResolver; HookList _hook_list; }; @@ -84,7 +83,7 @@ Context::Context() : - _intern(new ContextInternal(new NEONSessionFactory())) + _intern(new ContextInternal()) { } @@ -148,6 +147,9 @@ return *static_cast(c._intern->getSessionFactory()); } +RedirectionResolver & ContextExplorer::RedirectionResolverFromContext(Context &c) { + return *c._intern->getRedirectionResolver(); +} LibPath::LibPath(){ Dl_info shared_lib_infos; @@ -161,7 +163,7 @@ } const std::string & version(){ - static const std::string _version = DAVIX_VERSION_STRING "-" DAVIX_VERSION_TAG; + static const std::string _version = DAVIX_VERSION_STRING; return _version; } diff -Nru davix-0.7.1/src/davix_context_internal.hpp davix-0.7.2/src/davix_context_internal.hpp --- davix-0.7.1/src/davix_context_internal.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/davix_context_internal.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -33,10 +33,13 @@ /// @cond HIDDEN_SYMBOLS +class RedirectionResolver; + struct ContextExplorer{ static NEONSessionFactory & SessionFactoryFromContext(Context & c); +static RedirectionResolver & RedirectionResolverFromContext(Context &c); }; diff -Nru davix-0.7.1/src/davix_internal_config.in davix-0.7.2/src/davix_internal_config.in --- davix-0.7.1/src/davix_internal_config.in 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/davix_internal_config.in 2019-02-15 13:02:34.000000000 +0000 @@ -107,7 +107,4 @@ #include #endif /* end of win32*/ -// Davix versionnin -#define DAVIX_VERSION_TAG "@VERSION_TAG@" - #endif // CONFIG_H diff -Nru davix-0.7.1/src/file/davposix.cpp davix-0.7.2/src/file/davposix.cpp --- davix-0.7.1/src/file/davposix.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/file/davposix.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -150,7 +150,7 @@ } DAVIX_DIR* internal_opendir(Context & context, const RequestParams* params, const std::string & url){ - Ptr::Scoped dir(new DAVIX_DIR(context, url, params)); + std::unique_ptr dir(new DAVIX_DIR(context, url, params)); dir->end = ! dir->io_chain.nextSubItem(dir->io_context,dir->start_entry_name, dir->start_entry_st); return dir.release(); } @@ -585,6 +585,7 @@ if(fd){ fd->io_handler.resetIO(fd->io_context); delete fd; + fd = NULL; } }CATCH_DAVIX(err) return 0; diff -Nru davix-0.7.1/src/fileops/azure_meta_ops.hpp davix-0.7.2/src/fileops/azure_meta_ops.hpp --- davix-0.7.1/src/fileops/azure_meta_ops.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/fileops/azure_meta_ops.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -59,7 +59,7 @@ private: - Ptr::Scoped directoryItem; + std::unique_ptr directoryItem; }; @@ -82,7 +82,7 @@ virtual bool nextSubItem(IOChainContext &iocontext, std::string &entry_name, StatInfo &info); private: - Ptr::Scoped directoryItem; + std::unique_ptr directoryItem; }; diff -Nru davix-0.7.1/src/fileops/davmeta.cpp davix-0.7.2/src/fileops/davmeta.cpp --- davix-0.7.1/src/fileops/davmeta.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/fileops/davmeta.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -62,8 +62,8 @@ DirHandle(HttpRequest* req, XMLPropParser * p): request(req), parser(p){} - Ptr::Scoped request; - Ptr::Scoped parser; + std::unique_ptr request; + std::unique_ptr parser; }; @@ -146,7 +146,7 @@ } -// Implement stat with a GET of Range 1 +// Implement stat with a GET of Range 1 int dav_stat_mapper_http_get(Context& context, const RequestParams* params, const Uri & uri, struct StatInfo& st_info){ int ret = -1; DavixError * tmp_err=NULL; @@ -168,7 +168,7 @@ } if (rnge.substr(pos+1,1) == "*") { throw DavixException(davix_scope_meta(), StatusCode::ParsingError, "Server does not provide content length"); - } + } long lsize = toType()(rnge.substr(pos+1)); st_info.size = std::max(0,lsize); st_info.mode = 0755 | S_IFREG; @@ -421,7 +421,7 @@ } std::ostringstream ss; - ss << "checksum calculation for " << chk_algo << "not supported for " << url; + ss << "checksum calculation for " << chk_algo << " not supported for " << url; throw DavixException(davix_scope_meta(), StatusCode::OperationNonSupported, ss.str()); DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_CHAIN, " checksum <-"); @@ -434,7 +434,7 @@ -bool wedav_get_next_property(Ptr::Scoped & handle, std::string & name_entry, StatInfo & info){ +bool wedav_get_next_property(std::unique_ptr & handle, std::string & name_entry, StatInfo & info){ DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_CHAIN, " -> wedav_get_next_property"); const size_t read_size = 2048; @@ -466,7 +466,7 @@ } -void webdav_start_listing_query(Ptr::Scoped & handle, Context & context, const RequestParams* params, const Uri & url, const std::string & body){ +void webdav_start_listing_query(std::unique_ptr & handle, Context & context, const RequestParams* params, const Uri & url, const std::string & body){ dav_ssize_t s_resu; DavixError* tmp_err=NULL; @@ -508,7 +508,7 @@ } -bool webdav_directory_listing(Ptr::Scoped & handle, Context & context, const RequestParams* params, const Uri & uri, const std::string & body, std::string & name_entry, StatInfo & info){ +bool webdav_directory_listing(std::unique_ptr & handle, Context & context, const RequestParams* params, const Uri & uri, const std::string & body, std::string & name_entry, StatInfo & info){ if(handle.get() == NULL){ webdav_start_listing_query(handle, context, params, uri, body); } @@ -769,7 +769,7 @@ } -bool s3_get_next_property(Ptr::Scoped & handle, std::string & name_entry, StatInfo & info){ +bool s3_get_next_property(std::unique_ptr & handle, std::string & name_entry, StatInfo & info){ DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_CHAIN, " -> s3_get_next_property"); const size_t read_size = 2048; @@ -801,7 +801,7 @@ } -void s3_start_listing_query(Ptr::Scoped & handle, Context & context, const RequestParams* params, const Uri & url, const std::string & body){ +void s3_start_listing_query(std::unique_ptr & handle, Context & context, const RequestParams* params, const Uri & url, const std::string & body){ (void) body; dav_ssize_t s_resu; DavixError* tmp_err=NULL; @@ -874,7 +874,7 @@ -bool s3_directory_listing(Ptr::Scoped & handle, Context & context, const RequestParams* params, const Uri & uri, const std::string & body, std::string & name_entry, StatInfo & info){ +bool s3_directory_listing(std::unique_ptr & handle, Context & context, const RequestParams* params, const Uri & uri, const std::string & body, std::string & name_entry, StatInfo & info){ if(handle.get() == NULL){ s3_start_listing_query(handle, context, params, uri, body); } @@ -980,7 +980,7 @@ } } -static void azure_start_listing_query(Ptr::Scoped & handle, Context & context, const RequestParams* params, const Uri & url, const std::string & body) { +static void azure_start_listing_query(std::unique_ptr & handle, Context & context, const RequestParams* params, const Uri & url, const std::string & body) { DavixError* tmp_err=NULL; dav_ssize_t s_resu; @@ -1014,7 +1014,7 @@ } -bool azure_get_next_property(Ptr::Scoped & handle, std::string & name_entry, StatInfo & info) { +bool azure_get_next_property(std::unique_ptr & handle, std::string & name_entry, StatInfo & info) { DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_CHAIN, " -> azure_get_next_property"); const size_t read_size = 2048; @@ -1043,7 +1043,7 @@ return true; } -static bool azure_directory_listing(Ptr::Scoped & handle, Context & context, const RequestParams* params, const Uri & uri, const std::string & body, std::string & name_entry, StatInfo & info){ +static bool azure_directory_listing(std::unique_ptr & handle, Context & context, const RequestParams* params, const Uri & uri, const std::string & body, std::string & name_entry, StatInfo & info){ if(handle.get() == NULL){ azure_start_listing_query(handle, context, params, uri, body); } diff -Nru davix-0.7.1/src/fileops/davmeta.hpp davix-0.7.2/src/fileops/davmeta.hpp --- davix-0.7.1/src/fileops/davmeta.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/fileops/davmeta.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -62,7 +62,7 @@ private: - Ptr::Scoped directoryItem; + std::unique_ptr directoryItem; }; @@ -88,7 +88,7 @@ virtual bool nextSubItem(IOChainContext &iocontext, std::string &entry_name, StatInfo &info); private: - Ptr::Scoped directoryItem; + std::unique_ptr directoryItem; }; @@ -111,7 +111,7 @@ virtual bool nextSubItem(IOChainContext &iocontext, std::string &entry_name, StatInfo &info); private: - Ptr::Scoped directoryItem; + std::unique_ptr directoryItem; }; diff -Nru davix-0.7.1/src/fileops/httpiochain.cpp davix-0.7.2/src/fileops/httpiochain.cpp --- davix-0.7.1/src/fileops/httpiochain.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/fileops/httpiochain.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -25,7 +25,7 @@ namespace Davix{ -HttpIOChain::HttpIOChain() : _next(NULL), _start(this) +HttpIOChain::HttpIOChain() : _start(this) { } diff -Nru davix-0.7.1/src/fileops/httpiochain.hpp davix-0.7.2/src/fileops/httpiochain.hpp --- davix-0.7.1/src/fileops/httpiochain.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/fileops/httpiochain.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -42,7 +42,7 @@ // data again to an fd after a retry / metalink recovery. struct FdHandler { FdHandler() : fd(-1), bytes_written_to_fd(0) { } - + int fd; dav_ssize_t bytes_written_to_fd; }; @@ -167,7 +167,7 @@ protected: - Ptr::Scoped _next; + std::unique_ptr _next; HttpIOChain* _start; diff -Nru davix-0.7.1/src/fileops/iobuffmap.hpp davix-0.7.2/src/fileops/iobuffmap.hpp --- davix-0.7.1/src/fileops/iobuffmap.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/fileops/iobuffmap.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -115,7 +115,7 @@ // locker std::recursive_mutex _rwlock; // write cache - Ptr::Scoped _local; + std::unique_ptr _local; dav_off_t _read_pos; //curent read file offset bool _read_endfile; diff -Nru davix-0.7.1/src/fileops/S3IO.cpp davix-0.7.2/src/fileops/S3IO.cpp --- davix-0.7.1/src/fileops/S3IO.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/fileops/S3IO.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -191,7 +191,15 @@ remaining -= bytesRead; written += bytesRead; - if(bytesRead == 0) break; // EOF + if(bytesRead == 0) { + DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_CHAIN, "Reached data provider EOF, received 0 bytes, even though asked for {}", remaining); + break; // EOF + } + + if(remaining == 0) { + DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_CHAIN, "Data provider buffer has been filled"); + break; // buffer is full + } } DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_CHAIN, "Retrieved {} bytes from data provider", written); diff -Nru davix-0.7.1/src/libs/alibxx/alibxx.hpp davix-0.7.2/src/libs/alibxx/alibxx.hpp --- davix-0.7.1/src/libs/alibxx/alibxx.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/libs/alibxx/alibxx.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -13,9 +13,6 @@ #endif -// pointer helpers -#include "ptr/unique.hpp" - // algorithm helpers #include "algorithm/algorithm.hpp" diff -Nru davix-0.7.1/src/libs/alibxx/containers/cache.hpp davix-0.7.2/src/libs/alibxx/containers/cache.hpp --- davix-0.7.1/src/libs/alibxx/containers/cache.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/libs/alibxx/containers/cache.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -6,9 +6,10 @@ #include #include #include +#include -namespace A_LIB_NAMESPACE{ +namespace Davix { /// /// Thread Safe Cache container diff -Nru davix-0.7.1/src/libs/alibxx/ptr/unique.hpp davix-0.7.2/src/libs/alibxx/ptr/unique.hpp --- davix-0.7.1/src/libs/alibxx/ptr/unique.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/libs/alibxx/ptr/unique.hpp 1970-01-01 00:00:00.000000000 +0000 @@ -1,68 +0,0 @@ -#ifndef PTR_UNIQUE_HPP -#define PTR_UNIQUE_HPP - -#include -#include "../alibxx.hpp" - -namespace A_LIB_NAMESPACE{ - -namespace Ptr{ - - -template -class Scoped{ -public: - typedef T element_type; - - explicit Scoped(): _ptr(NULL){} - explicit Scoped(T* p) : _ptr(p){} - ~Scoped(){ delete _ptr;} - - inline T* release(){ - T* r= _ptr; - _ptr = NULL; - return r; - } - - inline void reset(T* p = NULL){ - Scoped tmp(p); - swap(tmp); - } - - T& operator*(){ - return *_ptr; - } - - T* operator->(){ - return _ptr; - } - - bool operator==(const Scoped & other){ - return (_ptr == other._ptr); - } - - inline T* get(){ - return _ptr; - } - - inline void swap(Scoped & other){ - std::swap(other._ptr, _ptr); - } - -private: - T* _ptr; - Scoped(const Scoped & other); - Scoped & operator=(const Scoped & other); -}; - - - - -} - -} - - - - -#endif // UNIQUE_HPP diff -Nru davix-0.7.1/src/modules/copy/copy.cpp davix-0.7.2/src/modules/copy/copy.cpp --- davix-0.7.1/src/modules/copy/copy.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/modules/copy/copy.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -294,7 +294,11 @@ // Just wait for it to finish monitorPerformanceMarkers(request, error); - request->endRequest(error); + request->endRequest(&internalError); + if(internalError && !(*error) ) { + DavixError::propagatePrefixedError(error, internalError, __func__); + } + delete request; } diff -Nru davix-0.7.1/src/neon/neonrequest.cpp davix-0.7.2/src/neon/neonrequest.cpp --- davix-0.7.1/src/neon/neonrequest.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/neon/neonrequest.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -32,6 +32,7 @@ #include #include #include +#include @@ -208,12 +209,12 @@ std::shared_ptr redir_url; if(this->params.getTransparentRedirectionSupport()) { - redir_url = ContextExplorer::SessionFactoryFromContext(_c).redirectionResolve(_request_type, *_current); + redir_url = ContextExplorer::RedirectionResolverFromContext(_c).redirectionResolve(_request_type, *_current); } // performing an operation which could change the PFN? Clear all cache entries for selected URL if(_request_type == "DELETE" || _request_type == "MOVE") { - ContextExplorer::SessionFactoryFromContext(_c).redirectionClean(*_current.get()); + ContextExplorer::RedirectionResolverFromContext(_c).redirectionClean(*_current.get()); } if(redir_url.get() != NULL){ @@ -463,7 +464,7 @@ createError(status, err); cancelSessionReuse(); - ContextExplorer::SessionFactoryFromContext(_c).redirectionClean(_request_type, *_orig); + ContextExplorer::RedirectionResolverFromContext(_c).redirectionClean(_request_type, *_orig); return -1; } @@ -604,7 +605,7 @@ // bool NEONRequest::requestCleanup(){ // cleanup redirection - ContextExplorer::SessionFactoryFromContext(_c).redirectionClean(_request_type, *_orig); + ContextExplorer::RedirectionResolverFromContext(_c).redirectionClean(_request_type, *_orig); // disable recycling // server supporting broken pipelining will trigger if reused @@ -637,7 +638,7 @@ old_uri = _current; _current= std::shared_ptr(new Uri(dst_uri)); ne_free(dst_uri); - ContextExplorer::SessionFactoryFromContext(_c).addRedirection(_request_type, *old_uri, _current); + ContextExplorer::RedirectionResolverFromContext(_c).addRedirection(_request_type, *old_uri, _current); // recycle old request and session diff -Nru davix-0.7.1/src/neon/neonrequest.hpp davix-0.7.2/src/neon/neonrequest.hpp --- davix-0.7.1/src/neon/neonrequest.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/neon/neonrequest.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -161,7 +161,7 @@ // request parameters RequestParams params; // neon internal field - Ptr::Scoped _neon_sess; + std::unique_ptr _neon_sess; // request options flag diff -Nru davix-0.7.1/src/neon/neonsessionfactory.cpp davix-0.7.2/src/neon/neonsessionfactory.cpp --- davix-0.7.1/src/neon/neonsessionfactory.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/neon/neonsessionfactory.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -39,21 +39,13 @@ return ( getenv("DAVIX_DISABLE_SESSION_CACHING") != NULL); } -static bool redirCachingDisabled(){ - return ( getenv("DAVIX_DISABLE_REDIRECT_CACHING") != NULL); -} - - NEONSessionFactory::NEONSessionFactory() : _sess_map(), _sess_mut(), - _session_caching(!sessionCachingDisabled()), - _redir_caching(!redirCachingDisabled()), - _redirCache(256) + _session_caching(!sessionCachingDisabled()) { std::call_once(neon_once, &init_neon); DAVIX_SLOG(DAVIX_LOG_TRACE, DAVIX_LOG_CORE, "HTTP/SSL Session caching {}", (_session_caching?"ENABLED":"DISABLED")); - DAVIX_SLOG(DAVIX_LOG_TRACE, DAVIX_LOG_CORE, "Redirection Session caching {}", (_redir_caching?"ENABLED":"DISABLED")); } NEONSessionFactory::~NEONSessionFactory(){ @@ -162,65 +154,6 @@ _sess_map.insert(std::pair(sess_key, sess)); } -static const std::pair redirectionCreateKey(const std::string & method, const Uri & origin){ - std::string mymethod = method; - // cache HEAD and GET on same key - if(mymethod == "HEAD") - mymethod = "GET"; - - return std::make_pair(origin.getString(), mymethod); -} - -void NEONSessionFactory::addRedirection( const std::string & method, const Uri & origin, std::shared_ptr dest){ - if(_redir_caching){ - DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_HTTP, "Add cached redirection <{} {} {}>", method.c_str(), origin.getString().c_str(), dest->getString().c_str()); - _redirCache.insert(redirectionCreateKey(method, origin), dest); - } -} - -std::shared_ptr NEONSessionFactory::redirectionResolve(const std::string & method, const Uri & origin){ - std::shared_ptr res = redirectionResolveSingle(method, origin); - if(res.get() != NULL){ - std::shared_ptr res_rec = redirectionResolve(method, *res); - if(res_rec.get() != NULL) - return res_rec; - } - return res; -} - -std::shared_ptr NEONSessionFactory::redirectionResolveSingleIntern(const std::string & method, const Uri & origin){ - return _redirCache.find(redirectionCreateKey(method, origin)); -} - - -std::shared_ptr NEONSessionFactory::redirectionResolveSingle(const std::string & method, const Uri & origin){ - std::shared_ptr res = redirectionResolveSingleIntern(method, origin); - if(res.get() != NULL){ - DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_HTTP, "Found redirection <{} {} {}>", method.c_str(), origin.getString().c_str(), res->getString().c_str()); - } - return res; -} - -void NEONSessionFactory::redirectionClean(const std::string & method, const Uri & origin){ - std::shared_ptr res = redirectionResolveSingleIntern(method, origin); - if(res.get() != NULL){ - DAVIX_SLOG(DAVIX_LOG_DEBUG, DAVIX_LOG_HTTP, "Delete Cached redirection for <{} {} {}>", method.c_str(), origin.getString().c_str(), res->getString().c_str()); - _redirCache.erase(redirectionCreateKey(method, origin)); - redirectionClean(method, *res); - } -} - -void NEONSessionFactory::redirectionClean(const Uri & origin){ - std::pair query = std::make_pair(origin.getString(), ""); - while(1) { - const std::pair nextkey = _redirCache.upper_bound(query); - if(nextkey.first != origin.getString()) - break; - - redirectionClean(nextkey.second, nextkey.first); - } -} - std::string create_map_keys_from_URL(const std::string & protocol, const std::string &host, unsigned int port){ std::string host_port; if( (strcmp(protocol.c_str(), "http") ==0 && port == 80) diff -Nru davix-0.7.1/src/neon/neonsessionfactory.hpp davix-0.7.2/src/neon/neonsessionfactory.hpp --- davix-0.7.1/src/neon/neonsessionfactory.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/neon/neonsessionfactory.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -25,9 +25,7 @@ #include #include #include - #include -#include namespace Davix { @@ -51,20 +49,6 @@ int storeNeonSession(ne_session *sess); // - // Redirecton caching - // - - void addRedirection( const std::string & method, const Uri & origin, std::shared_ptr dest); - - // try to find cached redirection, resolve a full chain - std::shared_ptr redirectionResolve(const std::string & method, const Uri & origin); - // try to find a cached redirection, resolve only one element - std::shared_ptr redirectionResolveSingle(const std::string & method, const Uri & origin); - - void redirectionClean(const std::string & method, const Uri & origin); - void redirectionClean(const Uri & origin); - - // // opts // @@ -78,17 +62,11 @@ // session pool std::multimap _sess_map; std::mutex _sess_mut; - bool _session_caching, _redir_caching; - - // redirection pool - Cache, Uri> _redirCache; + bool _session_caching; void internal_release_session_handle(ne_session* sess); ne_session* create_session(const RequestParams & params, const std::string & protocol, const std::string &host, unsigned int port); ne_session* create_recycled_session(const RequestParams & params, const std::string & protocol, const std::string &host, unsigned int port); - - std::shared_ptr redirectionResolveSingleIntern(const std::string & method, const Uri & origin); - }; void parse_http_neon_url(const std::string & url, std::string & protocol, diff -Nru davix-0.7.1/src/neon/neonsession.hpp davix-0.7.2/src/neon/neonsession.hpp --- davix-0.7.1/src/neon/neonsession.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/neon/neonsession.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -46,15 +46,6 @@ return _sess; } - inline bool getLastError(DavixError** err){ - if(_last_error){ - DavixError::propagateError(err, _last_error); - _last_error = NULL; - return true; - } - return false; - } - inline bool isRecycledSession(){ return reused; } diff -Nru davix-0.7.1/src/request/httprequest.cpp davix-0.7.2/src/request/httprequest.cpp --- davix-0.7.1/src/request/httprequest.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/request/httprequest.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -33,8 +33,8 @@ ///////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////// -HttpRequest::HttpRequest(NEONRequest* req) : d_ptr(req) -{ +HttpRequest::HttpRequest(NEONRequest* req) { + std::cerr << "Usage of HttpRequest::HttpRequest(NEONRequest* req) is deprecated!" << std::endl; } HttpRequest::HttpRequest(Context & context, const Uri & uri, DavixError** err) : diff -Nru davix-0.7.1/src/utils/checksum_extractor.cpp davix-0.7.2/src/utils/checksum_extractor.cpp --- davix-0.7.1/src/utils/checksum_extractor.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/utils/checksum_extractor.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -58,37 +58,60 @@ return ss.str(); } -bool ChecksumExtractor::extractChecksum(const HeaderVec &headers, +static std::vector split(std::string data, std::string token) { + std::vector output; + size_t pos = std::string::npos; + do { + pos = data.find(token); + output.push_back(data.substr(0, pos)); + if(std::string::npos != pos) data = data.substr(pos + token.size()); + } while (std::string::npos != pos); + return output; +} + +bool ChecksumExtractor::extractChecksum(const std::string &headerLine, const std::string &desiredChecksum, std::string &checksum) { std::string expectedPrefix = SSTR(desiredChecksum << "="); + std::vector chunks = split(headerLine, ","); - for(HeaderVec::const_iterator it = headers.begin(); it != headers.end(); it++) { - if(equalsNoCase(it->first, "Digest")) { - if(startsWithNoCase(it->second, expectedPrefix)) { - // We have a match. Are we supposed to base64 decode this? - checksum = it->second.substr(expectedPrefix.size()); - - if(StrUtil::compare_ncase(desiredChecksum, "UNIXcksum") == 0 || - StrUtil::compare_ncase(desiredChecksum, "CRC32c") == 0 || - StrUtil::compare_ncase(desiredChecksum, "ADLER32") == 0 || - StrUtil::compare_ncase(desiredChecksum, "UNIXsum") == 0 - ) { - + for(size_t i = 0; i < chunks.size(); i++) { + if(startsWithNoCase(chunks[i], expectedPrefix)) { + // We have a match. Are we supposed to base64 decode this? + checksum = chunks[i].substr(expectedPrefix.size()); + + if(StrUtil::compare_ncase(desiredChecksum, "UNIXcksum") == 0 || + StrUtil::compare_ncase(desiredChecksum, "CRC32c") == 0 || + StrUtil::compare_ncase(desiredChecksum, "ADLER32") == 0 || + StrUtil::compare_ncase(desiredChecksum, "UNIXsum") == 0 + ) { // Nope, just extract the value. return true; - } + } - if(StrUtil::compare_ncase(desiredChecksum, "md5") == 0) { - // Maybe.. older versions of DPM don't base64 encode their output. - if(checksum.size() != 32) { - checksum = hexEncode(Base64::base64_decode(checksum)); - } - return true; + if(StrUtil::compare_ncase(desiredChecksum, "md5") == 0) { + // Maybe.. older versions of DPM don't base64 encode their output. + if(checksum.size() != 32) { + checksum = hexEncode(Base64::base64_decode(checksum)); } + return true; + } + + // All other checksums should be base64 decoded. + checksum = hexEncode(Base64::base64_decode(checksum)); + return true; + } + } + + return false; +} - // All other checksums should be base64 decoded. - checksum = hexEncode(Base64::base64_decode(checksum)); +bool ChecksumExtractor::extractChecksum(const HeaderVec &headers, + const std::string &desiredChecksum, std::string &checksum) { + + for(HeaderVec::const_iterator it = headers.begin(); it != headers.end(); it++) { + if(equalsNoCase(it->first, "Digest")) { + if(extractChecksum(it->second, desiredChecksum, checksum)) { return true; } } diff -Nru davix-0.7.1/src/utils/checksum_extractor.hpp davix-0.7.2/src/utils/checksum_extractor.hpp --- davix-0.7.1/src/utils/checksum_extractor.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/utils/checksum_extractor.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -26,6 +26,9 @@ class ChecksumExtractor { public: + static bool extractChecksum(const std::string &headerLine, + const std::string &desiredChecksum, std::string &checksum); + static bool extractChecksum(const HeaderVec &headers, const std::string &desiredChecksum, std::string &checksum); diff -Nru davix-0.7.1/src/utils/davixuri.cpp davix-0.7.2/src/utils/davixuri.cpp --- davix-0.7.1/src/utils/davixuri.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/utils/davixuri.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -46,8 +46,7 @@ query(), fragment(), port(0), - _uri_string(), - query_and_path(NULL){} + _uri_string() {} UriPrivate(const UriPrivate & orig): code(orig.code), @@ -58,8 +57,7 @@ query(orig.query), fragment(orig.fragment), port(orig.port), - _uri_string(orig._uri_string), - query_and_path(NULL){ } + _uri_string(orig._uri_string) {} ~UriPrivate(){ @@ -146,7 +144,7 @@ std::string proto, userinfo, path, host, query, fragment; unsigned int port; std::string _uri_string; - Ptr::Scoped query_and_path; + std::unique_ptr query_and_path; }; diff -Nru davix-0.7.1/src/utils/davix_utils_internal.hpp davix-0.7.2/src/utils/davix_utils_internal.hpp --- davix-0.7.1/src/utils/davix_utils_internal.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/utils/davix_utils_internal.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -60,7 +60,7 @@ targetParams.insert("website"); targetParams.insert("delete"); // ? - + ParamVec canonicalizedParams; const ParamVec ¶ms = url.getQueryVec(); @@ -84,7 +84,7 @@ if(!it->second.empty()) { ss << "=" << it->second; } - + if(it+1 != canonicalizedParams.end()) { ss << "&"; } diff -Nru davix-0.7.1/src/xml/azurepropparser.hpp davix-0.7.2/src/xml/azurepropparser.hpp --- davix-0.7.1/src/xml/azurepropparser.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/xml/azurepropparser.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -48,7 +48,7 @@ private: - Ptr::Scoped d_ptr; + std::unique_ptr d_ptr; }; } diff -Nru davix-0.7.1/src/xml/davdeletexmlparser.cpp davix-0.7.2/src/xml/davdeletexmlparser.cpp --- davix-0.7.1/src/xml/davdeletexmlparser.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/xml/davdeletexmlparser.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -34,7 +34,7 @@ const Xml::XmlPTree prop_response(Xml::ElementStart, "response"); -static Ptr::Scoped webDavTree; +static std::unique_ptr webDavTree; static std::once_flag _l_init; diff -Nru davix-0.7.1/src/xml/davpropxmlparser.cpp davix-0.7.2/src/xml/davpropxmlparser.cpp --- davix-0.7.1/src/xml/davpropxmlparser.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/xml/davpropxmlparser.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -33,7 +33,7 @@ const Xml::XmlPTree prop_node(Xml::ElementStart, "propstat"); const Xml::XmlPTree prop_collection(Xml::ElementStart, "collection"); -static Ptr::Scoped webDavTree; +static std::unique_ptr webDavTree; static std::once_flag _l_init; diff -Nru davix-0.7.1/src/xml/s3deleteparser.hpp davix-0.7.2/src/xml/s3deleteparser.hpp --- davix-0.7.1/src/xml/s3deleteparser.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/xml/s3deleteparser.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -50,7 +50,7 @@ private: - Ptr::Scoped d_ptr; + std::unique_ptr d_ptr; }; } diff -Nru davix-0.7.1/src/xml/s3propparser.hpp davix-0.7.2/src/xml/s3propparser.hpp --- davix-0.7.1/src/xml/s3propparser.hpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/src/xml/s3propparser.hpp 2019-02-15 13:02:34.000000000 +0000 @@ -50,7 +50,7 @@ private: - Ptr::Scoped d_ptr; + std::unique_ptr d_ptr; }; } diff -Nru davix-0.7.1/test/functional/CMakeLists.txt davix-0.7.2/test/functional/CMakeLists.txt --- davix-0.7.1/test/functional/CMakeLists.txt 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/test/functional/CMakeLists.txt 2019-02-15 13:02:34.000000000 +0000 @@ -1,8 +1,6 @@ # main file for src -if(FUNCTIONAL_TESTS) - set(src_davix_test_lib "davix_test_lib.cpp") #set( src_test_req "test_request.cpp") @@ -23,7 +21,7 @@ #set( src_tool_get_replicas "tool_get_replica_simple.cpp") #set( src_test_mv "test_mv.cpp") -set( src_tester "tester.cpp") +set( src_tester "davix-tester.cpp") add_library(test_davix_functional_lib STATIC ${src_davix_test_lib}) @@ -82,8 +80,11 @@ #add_executable(test_mv ${src_test_mv}) #target_link_libraries(test_mv libdavix test_davix_functional_lib) -add_executable(tester ${src_tester}) -target_link_libraries(tester libdavix test_davix_functional_lib) +add_executable(davix-tester ${src_tester}) +target_link_libraries(davix-tester libdavix test_davix_functional_lib) + +install(TARGETS davix-tester + DESTINATION ${BIN_INSTALL_DIR}/) function(assert_not_empty var) if("${var}" STREQUAL "") @@ -138,7 +139,7 @@ set(arguments ${params}) separate_arguments(arguments) - add_test(${testname} tester ${arguments} "--command" "${cmd}") + add_test(${testname} davix-tester ${arguments} "--command" "${cmd}") endfunction(runtest) function(preadvectest_single params ranges) @@ -206,60 +207,56 @@ endfunction(test_azure) -include(${CMAKE_SOURCE_DIR}/credentials/creds.cmake) +include(${CMAKE_SOURCE_DIR}/credentials/creds.cmake OPTIONAL) ### DEPRECATED, to remove soon -function(test_dav_endpoint_ronly name cred) - - add_test(test_stat_session_${name} test_stat_session ${cred} ${name} ${name} ${name} ${name} ${name} ${name} ${name}) - add_test(test_opendir_${name} test_opendir ${name} ${cred}) - # add_test(test_opendir_c_${name} test_opendir_c ${name} ${cred}) - add_test(test_opendirpp_${name} test_opendirpp ${name} ${cred}) - # add_test(test_stat_c_${name} test_stat_c ${name} ${cred}) -endfunction(test_dav_endpoint_ronly name cred) - -function(test_dav_endpoint_rw name cred) - - add_test(test_stat_session_${name} test_stat_session ${cred} ${name} ${name} ${name} ${name} ${name} ${name} ${name}) - add_test(test_opendir_${name} test_opendir ${name} ${cred}) - add_test(test_opendirpp_${name} test_opendirpp ${name} ${cred}) - add_test(test_stat_auto_check_${name} test_stat_auto_check ${name} ${cred}) -endfunction(test_dav_endpoint_rw name cred) - -function(listdir_partial name size cred) - add_test(test_opendir_partial_${name} test_opendir_partial ${name} ${size} ${cred}) -endfunction(listdir_partial name size cred) - - -function(test_collection name cred) - add_test(test_directory_${name} test_directory ${name} ${cred}) -endfunction(test_collection name cred) - -function(test_valid_read_generic url cred) - add_test(test_valid_read_generic_${url} test_valid_read_generic_simple ${url} ${cred}) -endfunction(test_valid_read_generic url cred) - -function(test_valid_delete_all url cred) - add_test(test_auto_rmdir_unlink_delete_${url} test_auto_rmdir_unlink_delete ${url} ${cred}) -endfunction(test_valid_delete_all url cred) - - -function(test_valid_write_read_generic url cred) - add_test(test_valid_write_read_generic_simple_${url} test_valid_write_read_generic_simple ${url} ${cred}) - add_test(test_rw_fd_${url} test_rw_fd ${url} ${cred}) - add_test(test_valid_write_read_vector_simple_${url} test_valid_write_read_vector_simple ${url} ${cred}) -endfunction(test_valid_write_read_generic url cred) - -function(test_replica_listing_existing url cred) - add_test(tool_get_replicas_${url} tool_get_replicas ${url} ${cred}) -endfunction(test_replica_listing_existing url cred) - -function(test_rename url cred) - add_test(test_mv_${url} test_mv ${url} ${cred}) -endfunction(test_rename url cred) - -# include(ctest_functional.cmake) +# function(test_dav_endpoint_ronly name cred) -endif(FUNCTIONAL_TESTS) +# add_test(test_stat_session_${name} test_stat_session ${cred} ${name} ${name} ${name} ${name} ${name} ${name} ${name}) +# add_test(test_opendir_${name} test_opendir ${name} ${cred}) +# # add_test(test_opendir_c_${name} test_opendir_c ${name} ${cred}) +# add_test(test_opendirpp_${name} test_opendirpp ${name} ${cred}) +# # add_test(test_stat_c_${name} test_stat_c ${name} ${cred}) +# endfunction(test_dav_endpoint_ronly name cred) + +# function(test_dav_endpoint_rw name cred) + +# add_test(test_stat_session_${name} test_stat_session ${cred} ${name} ${name} ${name} ${name} ${name} ${name} ${name}) +# add_test(test_opendir_${name} test_opendir ${name} ${cred}) +# add_test(test_opendirpp_${name} test_opendirpp ${name} ${cred}) +# add_test(test_stat_auto_check_${name} test_stat_auto_check ${name} ${cred}) +# endfunction(test_dav_endpoint_rw name cred) + +# function(listdir_partial name size cred) +# add_test(test_opendir_partial_${name} test_opendir_partial ${name} ${size} ${cred}) +# endfunction(listdir_partial name size cred) + + +# function(test_collection name cred) +# add_test(test_directory_${name} test_directory ${name} ${cred}) +# endfunction(test_collection name cred) + +# function(test_valid_read_generic url cred) +# add_test(test_valid_read_generic_${url} test_valid_read_generic_simple ${url} ${cred}) +# endfunction(test_valid_read_generic url cred) + +# function(test_valid_delete_all url cred) +# add_test(test_auto_rmdir_unlink_delete_${url} test_auto_rmdir_unlink_delete ${url} ${cred}) +# endfunction(test_valid_delete_all url cred) + + +# function(test_valid_write_read_generic url cred) +# add_test(test_valid_write_read_generic_simple_${url} test_valid_write_read_generic_simple ${url} ${cred}) +# add_test(test_rw_fd_${url} test_rw_fd ${url} ${cred}) +# add_test(test_valid_write_read_vector_simple_${url} test_valid_write_read_vector_simple ${url} ${cred}) +# endfunction(test_valid_write_read_generic url cred) + +# function(test_replica_listing_existing url cred) +# add_test(tool_get_replicas_${url} tool_get_replicas ${url} ${cred}) +# endfunction(test_replica_listing_existing url cred) + +# function(test_rename url cred) +# add_test(test_mv_${url} test_mv ${url} ${cred}) +# endfunction(test_rename url cred) diff -Nru davix-0.7.1/test/functional/davix-tester.cpp davix-0.7.2/test/functional/davix-tester.cpp --- davix-0.7.1/test/functional/davix-tester.cpp 1970-01-01 00:00:00.000000000 +0000 +++ davix-0.7.2/test/functional/davix-tester.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -0,0 +1,573 @@ +#include +#include +#include +#include "optionparser.h" +#include +#include + +#include +#include "davix_test_lib.h" +#include "utils/davix_s3_utils.hpp" + +using namespace Davix; + +#define SSTR(message) static_cast(std::ostringstream().flush() << message).str() +#define DECLARE_TEST() std::cout << " ----- Performing test: " << __FUNCTION__ << " on " << uri << std::endl + +#include "lorem-ipsum.h" // define std::string teststring +const std::string testfile("davix-testfile-"); + +#define ASSERT(assertion, msg) \ + if((assertion) == false) throw std::runtime_error( SSTR(__FILE__ << ":" << __LINE__ << " (" << __func__ << "): Assertion " << #assertion << " failed.\n" << msg)) + +void initialization(int argc, char** argv) { + std::cout << "Command: "; + for(int i = 0; i < argc; i++) { + std::cout << std::string(argv[i]) << " "; + } + std::cout << std::endl; +} + +std::vector split(const std::string str, const std::string delim) { + size_t prev = 0, cur; + std::vector results; + while((cur = str.find(delim, prev)) != std::string::npos) { + results.push_back(str.substr(prev, cur-prev)); + prev = cur + delim.size(); + } + std::string last = str.substr(prev, str.size()-prev); + if(last.size() != 0) + results.push_back(last); + + return results; +} + +namespace Auth { +enum Type {AWS, PROXY, AZURE, NONE, ILLEGAL}; +Type fromString(const std::string &str) { + if(str == "aws") + return Auth::AWS; + if(str == "proxy") + return Auth::PROXY; + if(str == "azure") + return Auth::AZURE; + if(str == "none") + return Auth::NONE; + + return Auth::ILLEGAL; +}; +}; + +static option::ArgStatus option_nonempty(const option::Option& option, bool msg) { + if (option.arg != 0 && option.arg[0] != 0) + return option::ARG_OK; + if (msg) std::cout << "Option '" << option << "' requires a non-empty argument" << std::endl; + return option::ARG_ILLEGAL; +} + + +namespace Opt { +enum Type { UNKNOWN, HELP, AUTH, S3ACCESSKEY, S3SECRETKEY, S3REGION, + AZUREKEY, S3ALTERNATE, CERT, URI, TRACE, COMMAND }; +} + +bool verify_options_sane(option::Parser &parse, std::vector &options) { + if(parse.error()) { + std::cout << "Parsing error" << std::endl; + return false; + } + + if(options[Opt::HELP]) { + return false; + } + + for(option::Option* opt = options[Opt::UNKNOWN]; opt; opt = opt->next()) { + std::cout << "Unknown option: " << std::string(opt->name,opt->namelen) << "\n"; + return false; + } + + for(int i = 0; i < parse.nonOptionsCount(); ++i) { + std::cout << "Non-option #" << i << ": " << parse.nonOption(i) << "\n"; + return false; + } + + return true; +} + +std::vector parse_args(int argc, char** argv) { + const option::Descriptor usage[] = { + {Opt::UNKNOWN, 0, "", "", option::Arg::None, "davix functional tests runner\n" + "USAGE: tester [options]\n\n" "Options:" }, + {Opt::HELP, 0, "", "help", option::Arg::None, " --help \tPrint usage and exit." }, + {Opt::AUTH, 0, "", "auth", option_nonempty, " --auth \t Authentication method" }, + {Opt::S3ACCESSKEY, 0, "", "s3accesskey", option_nonempty, " --s3accesskey S3 access key"}, + {Opt::S3SECRETKEY, 0, "", "s3secretkey", option_nonempty, " --s3secretkey S3 secret key"}, + {Opt::S3REGION, 0, "", "s3region", option_nonempty, "--s3region S3 region"}, + {Opt::AZUREKEY, 0, "", "azurekey", option_nonempty, "--azurekey Azure key"}, + {Opt::S3ALTERNATE, 0, "", "s3alternate", option::Arg::None, "--s3alternate"}, + {Opt::CERT, 0, "", "cert", option_nonempty, "--cert path to the proxy certificate to use"}, + {Opt::URI, 0, "", "uri", option_nonempty, "--uri uri to test against"}, + {Opt::TRACE, 0, "", "trace", option_nonempty, "--trace debug scope"}, + {Opt::COMMAND, 0, "", "command", option_nonempty, "--command test to run"}, + {Opt::UNKNOWN, 0, "", "",option::Arg::None, "\nExamples:\n" + " tester --auth proxy --uri https://storage/davix-tests --command makeCollection" }, + + {0,0,0,0,0,0} + }; + + option::Stats stats(usage, argc-1, argv+1); // TODO fix argc-1 + std::vector options(stats.options_max); + std::vector buffer(stats.buffer_max); + option::Parser parse(usage, argc-1, argv+1, &options[0], &buffer[0]); + + if(!verify_options_sane(parse, options)) { + option::printUsage(std::cout, usage); + exit(1); + } + + return options; +} + +std::string retrieve(const std::vector &options, const Opt::Type key) { + if(!options[key]) return ""; + return options[key].arg; +} + +void authentication(const std::vector &opts, const Auth::Type &auth, RequestParams ¶ms) { + if(auth == Auth::AWS) { + params.setProtocol(RequestProtocol::AwsS3); + + ASSERT(opts[Opt::S3ACCESSKEY] != NULL, "--s3accesskey is required when using s3"); + ASSERT(opts[Opt::S3SECRETKEY] != NULL, "--s3secretkey is required when using s3"); + + params.setAwsAuthorizationKeys(retrieve(opts, Opt::S3SECRETKEY), retrieve(opts, Opt::S3ACCESSKEY)); + if(opts[Opt::S3REGION]) params.setAwsRegion(retrieve(opts, Opt::S3REGION)); + if(opts[Opt::S3ALTERNATE]) params.setAwsAlternate(true); + } + else if(auth == Auth::PROXY) { + configure_grid_env("proxy", params); + } + else if(auth == Auth::AZURE) { + ASSERT(opts[Opt::AZUREKEY] != NULL, "--azurekey is required when using Azure"); + + params.setProtocol(RequestProtocol::Azure); + params.setAzureKey(retrieve(opts, Opt::AZUREKEY)); + } + else { + ASSERT(false, "unknown authentication method"); + } +} + +void depopulate(const RequestParams ¶ms, Uri uri, int nfiles) { + DECLARE_TEST(); + + Context context; + for(int i = 1; i <= nfiles; i++) { + Uri u(uri); + u.addPathSegment(SSTR(testfile << i)); + DavFile file(context, params, u); + file.deletion(¶ms); + std::cout << "File " << i << " deleted successfully." << std::endl; + } + std::cout << "All OK" << std::endl; +} + +std::string string_from_mode(mode_t mode){ + const char* rmask ="xwr"; + std::string str(10,'-'); + + str[0] = (S_ISDIR(mode))?'d':'-'; + for(size_t i=0; i < 9; ++i){ + str[9-i] = (( mode & (0x01 << i))?(rmask[i%3]):'-'); + } + return str; +} + +void statdir(const RequestParams ¶ms, Uri uri) { + DECLARE_TEST(); + Context context; + DavFile file(context, params, uri); + StatInfo info; + file.statInfo(¶ms, info); + std::cout << string_from_mode(info.mode) << std::endl; + + ASSERT(S_ISDIR(info.mode), "not a directory"); +} + +void makeCollection(const RequestParams ¶ms, Uri uri) { + DECLARE_TEST(); + + Context context; + DavFile file(context, params, uri); + file.makeCollection(¶ms); + + // make sure it is empty + DavFile::Iterator it = file.listCollection(¶ms); + ASSERT(it.name() == "" && !it.next(), "Newly created directory not empty!"); + + // do a stat, make sure it's a dir + statdir(params, uri); + + Uri u2 = uri; + u2.ensureTrailingSlash(); + statdir(params, u2); + + std::cout << "Done!" << std::endl; +} + +#define NEON_S3_SIGN_DURATION 3600 + +void statfileFromSignedURI(const RequestParams ¶ms, const Uri uri) { + DECLARE_TEST(); + + Uri signedURI(S3::signURI(params, "GET", uri, params.getHeaders(), NEON_S3_SIGN_DURATION)); + RequestParams params2(params); + + signedURI.httpizeProtocol(); + + params2.setProtocol(RequestProtocol::Http); + params2.setAwsAuthorizationKeys("", ""); + params2.setAwsRegion(""); + params2.setAwsToken(""); + + Context context; + DavFile file(context, params2, signedURI); + StatInfo info; + file.statInfo(¶ms2, info); + std::cout << string_from_mode(info.mode) << std::endl; + + ASSERT(! S_ISDIR(info.mode), "not a file"); +} + +/* stat a file, make sure it's a file */ +void statfile(const RequestParams ¶ms, const Uri uri) { + DECLARE_TEST(); + Context context; + DavFile file(context, params, uri); + StatInfo info; + file.statInfo(¶ms, info); + std::cout << string_from_mode(info.mode) << std::endl; + + ASSERT(! S_ISDIR(info.mode), "not a file"); + + if(!params.getAwsAutorizationKeys().first.empty()) { + // Now try statting through the signed URL + statfileFromSignedURI(params, uri); + } +} + +void movefile(const RequestParams ¶ms, const Uri uri) { + DECLARE_TEST(); + Context context; + Uri u1(uri); + Uri u2(uri); + + u1.addPathSegment(SSTR(testfile << 1)); + u2.addPathSegment(SSTR(testfile << 1 << "-moved")); + + DavFile source(context, params, u1); + DavFile dest(context, params, u2); + + source.move(¶ms, dest); + statfile(params, u2); + dest.move(¶ms, source); +} + +void populate(const RequestParams ¶ms, const Uri uri, const int nfiles) { + DECLARE_TEST(); + + Context context; + for(int i = 1; i <= nfiles; i++) { + Uri u(uri); + u.addPathSegment(SSTR(testfile << i)); + DavFile file(context, params, u); + file.put(NULL, testString.c_str(), testString.size()); + std::cout << "File " << i << " uploaded successfully." << std::endl; + std::cout << u << std::endl; + + statfile(params, u); + } +} + +// count the number of files in folder +void countfiles(const RequestParams ¶ms, const Uri uri, const int nfiles) { + DECLARE_TEST(); + Context context; + DavFile file(context, params, uri); + DavFile::Iterator it = file.listCollection(¶ms); + int i = 0; + + do { + i++; + } while(it.next()); + + ASSERT(i == nfiles, "wrong number of files; expected " << nfiles << ", found " << i); + std::cout << "All OK" << std::endl; +} + +// confirm that the files listed are the exact same ones uploaded during a populate test +void listing(const RequestParams ¶ms, const Uri uri, const int nfiles) { + DECLARE_TEST(); + int hits[nfiles+1]; + for(int i = 0; i <= nfiles; i++) hits[i] = 0; + + Context context; + DavFile file(context, params, uri); + DavFile::Iterator it = file.listCollection(¶ms); + + int i = 0; + do { + i++; + std::string name = it.name(); + std::cout << "Found " << name << std::endl; + + // make sure the filenames are the same as the ones we uploaded + ASSERT(name.size() > testfile.size(), "Unexpected filename: " << name); + std::string part1 = name.substr(0, testfile.size()); + std::string part2 = name.substr(testfile.size(), name.size()-1); + + ASSERT(part1 == testfile, "Unexpected filename: " << part1); + int num = atoi(part2.c_str()); + ASSERT(num > 0, "Unexpected file number: " << num); + ASSERT(num <= nfiles, "Unexpected file number: " << num); + hits[num]++; + } while(it.next()); + + // count all hits to make sure all have exactly one + ASSERT(i == nfiles, "wrong number of files; expected " << nfiles << ", found " << i); + for(int i = 1; i <= nfiles; i++) + ASSERT(hits[i] == 1, "hits check for file" << i << " failed. Expected 1, found " << hits[i]); + + std::cout << "All OK" << std::endl; +} + +/* upload a file and move it around */ +void putMoveDelete(const RequestParams ¶ms, const Uri uri) { + DECLARE_TEST(); + Uri u = uri; + Uri u2 = uri; + u.addPathSegment(SSTR(testfile << "put-move-delete")); + u2.addPathSegment(SSTR(testfile << "put-move-delete-MOVED")); + + Context context; + DavFile file(context, params, u); + file.put(¶ms, testString.c_str(), testString.size()); + + DavFile movedFile(context, params, u2); + file.move(¶ms, movedFile); + + movedFile.deletion(¶ms); + std::cout << "All OK" << std::endl; +} + +void remove(const RequestParams ¶ms, const Uri uri) { + DECLARE_TEST(); + + // a very dangerous test.. Make sure that uri at least + // contains "davix-test" in its path. + bool safePath = uri.getPath().find("davix-test") != std::string::npos; + ASSERT(safePath, "Uri given does not contain the string 'davix-test'. Refusing to perform delete operation for safety."); + + Context context; + DavFile file(context, params, uri); + file.deletion(¶ms); +} + +void preadvec(const RequestParams ¶ms, const Uri uri, const std::string str_ranges, std::vector options) { + DECLARE_TEST(); + Uri u = uri; + + std::string filename = SSTR(testfile << 1); + + bool noappend = false; + for(std::vector::iterator it = options.begin(); it != options.end(); it++) { + if(*it == "nomulti") { + u.addFragmentParam("multirange", "false"); + } + else if(*it == "noappend") { + noappend = true; + } + else if(it->find("nconnections=", 0) == 0) { + int nconnections = atoi(it->c_str() + 13); + ASSERT(nconnections > 0, "Unable to parse nconnections"); + u.addFragmentParam("nconnections", SSTR(nconnections)); + } + else if(it->find("mergewindow=", 0) == 0) { + int mergewindow = atoi(it->c_str() + 12); + ASSERT(mergewindow > 0, "Unable to parse mergewindow"); + u.addFragmentParam("mergewindow", SSTR(mergewindow)); + } + else { + ASSERT(false, "Unknown option to preadvec: " << *it); + } + } + + if(!noappend) { + u.addPathSegment(filename); + } + + std::vector ranges = split(str_ranges, ","); + DavIOVecInput inVec[ranges.size()]; + DavIOVecOuput outVec[ranges.size()]; + + for(size_t i = 0; i < ranges.size(); i++) { + std::vector parts = split(ranges[i], "-"); + ASSERT(parts.size() == 2, "Cannot parse range"); + dav_off_t start = atoi(parts[0].c_str()); + dav_off_t end = atoi(parts[1].c_str()); + + dav_ssize_t size = end - start + 1; + + inVec[i].diov_buffer = new char[size]; + inVec[i].diov_size = size; + inVec[i].diov_offset = start; + + std::cout << "Adding range: " << start << "-" << end << std::endl; + } + + Context context; + DavFile file(context, params, u); + DavixError *err = NULL; + file.readPartialBufferVec(¶ms, inVec, outVec, ranges.size(), &err); + + for(size_t i = 0; i < ranges.size(); i++) { + std::string chunk( (char*) outVec[i].diov_buffer, outVec[i].diov_size); + std::cout << "Chunk: " << chunk << std::endl; + + ASSERT(chunk.size() == inVec[i].diov_size, "unexpected chunk size"); + if(filename == SSTR(testfile << 1)) { + ASSERT(chunk == testString.substr(inVec[i].diov_offset, inVec[i].diov_size), "wrong chunk contents"); + } + } + std::cout << "All OK" << std::endl; +} + +void detectwebdav(const RequestParams ¶ms, const Uri uri, bool result) { + DECLARE_TEST(); + + Context context; + DavixError *err = NULL; + WebdavSupport::Type res = detect_webdav_support(context, params, uri, &err); + if(result) { + ASSERT(res == WebdavSupport::YES, ""); + } + else if(!result) { + ASSERT(res == WebdavSupport::NO || res == WebdavSupport::UNKNOWN, ""); + } + else { + ASSERT(false, "Unknown result"); + } +} + +void assert_args(const std::vector &cmd, int nargs) { + ASSERT(cmd.size() != 0, "assert_args called with empty command!"); + ASSERT(cmd.size() == nargs+1, "Wrong number of arguments to " << cmd[0] << ": " << cmd.size()-1 << ", expected: " << nargs); +} + +void run(int argc, char** argv) { + RequestParams params; + params.setOperationRetry(0); + + std::vector opts = parse_args(argc, argv); + Auth::Type auth = Auth::fromString(retrieve(opts, Opt::AUTH)); + + ASSERT(opts[Opt::COMMAND] != NULL, "--command is necessary"); + ASSERT(opts[Opt::URI] != NULL, "--uri is necessary"); + ASSERT(auth != Auth::ILLEGAL, "--auth is necessary, and can only be one of aws, proxy, azure, none"); + + if(opts[Opt::TRACE]) { + std::string scope = retrieve(opts, Opt::TRACE); + setLogScope(0); + setLogScope(scope); + setLogLevel(DAVIX_LOG_TRACE); + } + + std::vector cmd = split(retrieve(opts, Opt::COMMAND), " "); + Uri uri = Uri(retrieve(opts, Opt::URI)); + authentication(opts, auth, params); + + if(cmd[0] == "makeCollection") { + assert_args(cmd, 0); + makeCollection(params, uri); + } + else if(cmd[0] == "populate") { + assert_args(cmd, 1); + populate(params, uri, atoi(cmd[1].c_str())); + } + else if(cmd[0] == "remove") { + assert_args(cmd, 0); + ASSERT(cmd.size() == 1, "Wrong number of arguments to remove"); + remove(params, uri); + } + else if(cmd[0] == "listing") { + assert_args(cmd, 1); + listing(params, uri, atoi(cmd[1].c_str())); + } + else if(cmd[0] == "putMoveDelete") { + assert_args(cmd, 0); + ASSERT(cmd.size() == 1, "Wrong number of arguments to putMoveDelete"); + putMoveDelete(params, uri); + } + else if(cmd[0] == "depopulate") { + assert_args(cmd, 1); + depopulate(params, uri, atoi(cmd[1].c_str())); + } + else if(cmd[0] == "countfiles") { + assert_args(cmd, 1); + countfiles(params, uri, atoi(cmd[1].c_str())); + } + else if(cmd[0] == "statdir") { + assert_args(cmd, 0); + statdir(params, uri); + } + else if(cmd[0] == "statfile") { + assert_args(cmd, 0); + statfile(params, uri); + } + else if(cmd[0] == "movefile") { + assert_args(cmd, 0); + movefile(params, uri); + } + else if(cmd[0] == "preadvec") { + if(cmd.size() == 2) { + preadvec(params, uri, cmd[1], std::vector()); + } + else if(cmd.size() == 3) { + preadvec(params, uri, cmd[1], split(cmd[2], ",")); + } + else { + ASSERT(false, "Wrong number of arguments to preadvec"); + } + } + else if(cmd[0] == "detectwebdav") { + assert_args(cmd, 1); + bool expected = false; + if(cmd[1] == "1") { + expected = true; + } + else if(cmd[1] == "0") { + expected = false; + } + else { + ASSERT(false, "Unexpected input for expected result"); + } + + detectwebdav(params, uri, expected); + } + else { + ASSERT(false, "Unknown command: " << cmd[0]); + } +} + +int main(int argc, char** argv) { + try { + initialization(argc, argv); + run(argc, argv); + } + catch(std::exception &e) { + std::cout << e.what() << std::endl; + return 1; + } + + return 0; +} diff -Nru davix-0.7.1/test/functional/tester.cpp davix-0.7.2/test/functional/tester.cpp --- davix-0.7.1/test/functional/tester.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/test/functional/tester.cpp 1970-01-01 00:00:00.000000000 +0000 @@ -1,538 +0,0 @@ -#include -#include -#include -#include "optionparser.h" -#include -#include - -#include -#include "davix_test_lib.h" - -using namespace Davix; - -#define SSTR(message) static_cast(std::ostringstream().flush() << message).str() -#define DECLARE_TEST() std::cout << " ----- Performing test: " << __FUNCTION__ << " on " << uri << std::endl - -#include "lorem-ipsum.h" // define std::string teststring -const std::string testfile("davix-testfile-"); - -#define ASSERT(assertion, msg) \ - if((assertion) == false) throw std::runtime_error( SSTR(__FILE__ << ":" << __LINE__ << " (" << __func__ << "): Assertion " << #assertion << " failed.\n" << msg)) - -void initialization() { -} - -std::vector split(const std::string str, const std::string delim) { - size_t prev = 0, cur; - std::vector results; - while((cur = str.find(delim, prev)) != std::string::npos) { - results.push_back(str.substr(prev, cur-prev)); - prev = cur + delim.size(); - } - std::string last = str.substr(prev, str.size()-prev); - if(last.size() != 0) - results.push_back(last); - - return results; -} - -namespace Auth { -enum Type {AWS, PROXY, AZURE, NONE, ILLEGAL}; -Type fromString(const std::string &str) { - if(str == "aws") - return Auth::AWS; - if(str == "proxy") - return Auth::PROXY; - if(str == "azure") - return Auth::AZURE; - if(str == "none") - return Auth::NONE; - - return Auth::ILLEGAL; -}; -}; - -static option::ArgStatus option_nonempty(const option::Option& option, bool msg) { - if (option.arg != 0 && option.arg[0] != 0) - return option::ARG_OK; - if (msg) std::cout << "Option '" << option << "' requires a non-empty argument" << std::endl; - return option::ARG_ILLEGAL; -} - - -namespace Opt { -enum Type { UNKNOWN, HELP, AUTH, S3ACCESSKEY, S3SECRETKEY, S3REGION, - AZUREKEY, S3ALTERNATE, CERT, URI, TRACE, COMMAND }; -} - -bool verify_options_sane(option::Parser &parse, std::vector &options) { - if(parse.error()) { - std::cout << "Parsing error" << std::endl; - return false; - } - - if(options[Opt::HELP]) { - return false; - } - - for(option::Option* opt = options[Opt::UNKNOWN]; opt; opt = opt->next()) { - std::cout << "Unknown option: " << std::string(opt->name,opt->namelen) << "\n"; - return false; - } - - for(int i = 0; i < parse.nonOptionsCount(); ++i) { - std::cout << "Non-option #" << i << ": " << parse.nonOption(i) << "\n"; - return false; - } - - return true; -} - -std::vector parse_args(int argc, char** argv) { - const option::Descriptor usage[] = { - {Opt::UNKNOWN, 0, "", "", option::Arg::None, "davix functional tests runner\n" - "USAGE: tester [options]\n\n" "Options:" }, - {Opt::HELP, 0, "", "help", option::Arg::None, " --help \tPrint usage and exit." }, - {Opt::AUTH, 0, "", "auth", option_nonempty, " --auth \t Authentication method" }, - {Opt::S3ACCESSKEY, 0, "", "s3accesskey", option_nonempty, " --s3accesskey S3 access key"}, - {Opt::S3SECRETKEY, 0, "", "s3secretkey", option_nonempty, " --s3secretkey S3 secret key"}, - {Opt::S3REGION, 0, "", "s3region", option_nonempty, "--s3region S3 region"}, - {Opt::AZUREKEY, 0, "", "azurekey", option_nonempty, "--azurekey Azure key"}, - {Opt::S3ALTERNATE, 0, "", "s3alternate", option::Arg::None, "--s3alternate"}, - {Opt::CERT, 0, "", "cert", option_nonempty, "--cert path to the proxy certificate to use"}, - {Opt::URI, 0, "", "uri", option_nonempty, "--uri uri to test against"}, - {Opt::TRACE, 0, "", "trace", option_nonempty, "--trace debug scope"}, - {Opt::COMMAND, 0, "", "command", option_nonempty, "--command test to run"}, - {Opt::UNKNOWN, 0, "", "",option::Arg::None, "\nExamples:\n" - " tester --auth proxy --uri https://storage/davix-tests --command makeCollection" }, - - {0,0,0,0,0,0} - }; - - option::Stats stats(usage, argc-1, argv+1); // TODO fix argc-1 - std::vector options(stats.options_max); - std::vector buffer(stats.buffer_max); - option::Parser parse(usage, argc-1, argv+1, &options[0], &buffer[0]); - - if(!verify_options_sane(parse, options)) { - option::printUsage(std::cout, usage); - exit(1); - } - - return options; -} - -std::string retrieve(const std::vector &options, const Opt::Type key) { - if(!options[key]) return ""; - return options[key].arg; -} - -void authentication(const std::vector &opts, const Auth::Type &auth, RequestParams ¶ms) { - if(auth == Auth::AWS) { - params.setProtocol(RequestProtocol::AwsS3); - - ASSERT(opts[Opt::S3ACCESSKEY] != NULL, "--s3accesskey is required when using s3"); - ASSERT(opts[Opt::S3SECRETKEY] != NULL, "--s3secretkey is required when using s3"); - - params.setAwsAuthorizationKeys(retrieve(opts, Opt::S3SECRETKEY), retrieve(opts, Opt::S3ACCESSKEY)); - if(opts[Opt::S3REGION]) params.setAwsRegion(retrieve(opts, Opt::S3REGION)); - if(opts[Opt::S3ALTERNATE]) params.setAwsAlternate(true); - } - else if(auth == Auth::PROXY) { - configure_grid_env("proxy", params); - } - else if(auth == Auth::AZURE) { - ASSERT(opts[Opt::AZUREKEY] != NULL, "--azurekey is required when using Azure"); - - params.setProtocol(RequestProtocol::Azure); - params.setAzureKey(retrieve(opts, Opt::AZUREKEY)); - } - else { - ASSERT(false, "unknown authentication method"); - } -} - -void depopulate(const RequestParams ¶ms, Uri uri, int nfiles) { - DECLARE_TEST(); - - Context context; - for(int i = 1; i <= nfiles; i++) { - Uri u(uri); - u.addPathSegment(SSTR(testfile << i)); - DavFile file(context, params, u); - file.deletion(¶ms); - std::cout << "File " << i << " deleted successfully." << std::endl; - } - std::cout << "All OK" << std::endl; -} - -std::string string_from_mode(mode_t mode){ - const char* rmask ="xwr"; - std::string str(10,'-'); - - str[0] = (S_ISDIR(mode))?'d':'-'; - for(size_t i=0; i < 9; ++i){ - str[9-i] = (( mode & (0x01 << i))?(rmask[i%3]):'-'); - } - return str; -} - -void statdir(const RequestParams ¶ms, Uri uri) { - DECLARE_TEST(); - Context context; - DavFile file(context, params, uri); - StatInfo info; - file.statInfo(¶ms, info); - std::cout << string_from_mode(info.mode) << std::endl; - - ASSERT(S_ISDIR(info.mode), "not a directory"); -} - -void makeCollection(const RequestParams ¶ms, Uri uri) { - DECLARE_TEST(); - - Context context; - DavFile file(context, params, uri); - file.makeCollection(¶ms); - - // make sure it is empty - DavFile::Iterator it = file.listCollection(¶ms); - ASSERT(it.name() == "" && !it.next(), "Newly created directory not empty!"); - - // do a stat, make sure it's a dir - statdir(params, uri); - - Uri u2 = uri; - u2.ensureTrailingSlash(); - statdir(params, u2); - - std::cout << "Done!" << std::endl; -} - -/* stat a file, make sure it's a file */ -void statfile(const RequestParams ¶ms, const Uri uri) { - DECLARE_TEST(); - Context context; - DavFile file(context, params, uri); - StatInfo info; - file.statInfo(¶ms, info); - std::cout << string_from_mode(info.mode) << std::endl; - - ASSERT(! S_ISDIR(info.mode), "not a file"); -} - -void movefile(const RequestParams ¶ms, const Uri uri) { - DECLARE_TEST(); - Context context; - Uri u1(uri); - Uri u2(uri); - - u1.addPathSegment(SSTR(testfile << 1)); - u2.addPathSegment(SSTR(testfile << 1 << "-moved")); - - DavFile source(context, params, u1); - DavFile dest(context, params, u2); - - source.move(¶ms, dest); - statfile(params, u2); - dest.move(¶ms, source); -} - -void populate(const RequestParams ¶ms, const Uri uri, const int nfiles) { - DECLARE_TEST(); - - Context context; - for(int i = 1; i <= nfiles; i++) { - Uri u(uri); - u.addPathSegment(SSTR(testfile << i)); - DavFile file(context, params, u); - file.put(NULL, testString.c_str(), testString.size()); - std::cout << "File " << i << " uploaded successfully." << std::endl; - std::cout << u << std::endl; - - statfile(params, u); - } -} - -// count the number of files in folder -void countfiles(const RequestParams ¶ms, const Uri uri, const int nfiles) { - DECLARE_TEST(); - Context context; - DavFile file(context, params, uri); - DavFile::Iterator it = file.listCollection(¶ms); - int i = 0; - - do { - i++; - } while(it.next()); - - ASSERT(i == nfiles, "wrong number of files; expected " << nfiles << ", found " << i); - std::cout << "All OK" << std::endl; -} - -// confirm that the files listed are the exact same ones uploaded during a populate test -void listing(const RequestParams ¶ms, const Uri uri, const int nfiles) { - DECLARE_TEST(); - int hits[nfiles+1]; - for(int i = 0; i <= nfiles; i++) hits[i] = 0; - - Context context; - DavFile file(context, params, uri); - DavFile::Iterator it = file.listCollection(¶ms); - - int i = 0; - do { - i++; - std::string name = it.name(); - std::cout << "Found " << name << std::endl; - - // make sure the filenames are the same as the ones we uploaded - ASSERT(name.size() > testfile.size(), "Unexpected filename: " << name); - std::string part1 = name.substr(0, testfile.size()); - std::string part2 = name.substr(testfile.size(), name.size()-1); - - ASSERT(part1 == testfile, "Unexpected filename: " << part1); - int num = atoi(part2.c_str()); - ASSERT(num > 0, "Unexpected file number: " << num); - ASSERT(num <= nfiles, "Unexpected file number: " << num); - hits[num]++; - } while(it.next()); - - // count all hits to make sure all have exactly one - ASSERT(i == nfiles, "wrong number of files; expected " << nfiles << ", found " << i); - for(int i = 1; i <= nfiles; i++) - ASSERT(hits[i] == 1, "hits check for file" << i << " failed. Expected 1, found " << hits[i]); - - std::cout << "All OK" << std::endl; -} - -/* upload a file and move it around */ -void putMoveDelete(const RequestParams ¶ms, const Uri uri) { - DECLARE_TEST(); - Uri u = uri; - Uri u2 = uri; - u.addPathSegment(SSTR(testfile << "put-move-delete")); - u2.addPathSegment(SSTR(testfile << "put-move-delete-MOVED")); - - Context context; - DavFile file(context, params, u); - file.put(¶ms, testString.c_str(), testString.size()); - - DavFile movedFile(context, params, u2); - file.move(¶ms, movedFile); - - movedFile.deletion(¶ms); - std::cout << "All OK" << std::endl; -} - -void remove(const RequestParams ¶ms, const Uri uri) { - DECLARE_TEST(); - - // a very dangerous test.. Make sure that uri at least - // contains "davix-test" in its path. - bool safePath = uri.getPath().find("davix-test") != std::string::npos; - ASSERT(safePath, "Uri given does not contain the string 'davix-test'. Refusing to perform delete operation for safety."); - - Context context; - DavFile file(context, params, uri); - file.deletion(¶ms); -} - -void preadvec(const RequestParams ¶ms, const Uri uri, const std::string str_ranges, std::vector options) { - DECLARE_TEST(); - Uri u = uri; - - std::string filename = SSTR(testfile << 1); - - bool noappend = false; - for(std::vector::iterator it = options.begin(); it != options.end(); it++) { - if(*it == "nomulti") { - u.addFragmentParam("multirange", "false"); - } - else if(*it == "noappend") { - noappend = true; - } - else if(it->find("nconnections=", 0) == 0) { - int nconnections = atoi(it->c_str() + 13); - ASSERT(nconnections > 0, "Unable to parse nconnections"); - u.addFragmentParam("nconnections", SSTR(nconnections)); - } - else if(it->find("mergewindow=", 0) == 0) { - int mergewindow = atoi(it->c_str() + 12); - ASSERT(mergewindow > 0, "Unable to parse mergewindow"); - u.addFragmentParam("mergewindow", SSTR(mergewindow)); - } - else { - ASSERT(false, "Unknown option to preadvec: " << *it); - } - } - - if(!noappend) { - u.addPathSegment(filename); - } - - std::vector ranges = split(str_ranges, ","); - DavIOVecInput inVec[ranges.size()]; - DavIOVecOuput outVec[ranges.size()]; - - for(size_t i = 0; i < ranges.size(); i++) { - std::vector parts = split(ranges[i], "-"); - ASSERT(parts.size() == 2, "Cannot parse range"); - dav_off_t start = atoi(parts[0].c_str()); - dav_off_t end = atoi(parts[1].c_str()); - - dav_ssize_t size = end - start + 1; - - inVec[i].diov_buffer = new char[size]; - inVec[i].diov_size = size; - inVec[i].diov_offset = start; - - std::cout << "Adding range: " << start << "-" << end << std::endl; - } - - Context context; - DavFile file(context, params, u); - DavixError *err = NULL; - file.readPartialBufferVec(¶ms, inVec, outVec, ranges.size(), &err); - - for(size_t i = 0; i < ranges.size(); i++) { - std::string chunk( (char*) outVec[i].diov_buffer, outVec[i].diov_size); - std::cout << "Chunk: " << chunk << std::endl; - - ASSERT(chunk.size() == inVec[i].diov_size, "unexpected chunk size"); - if(filename == SSTR(testfile << 1)) { - ASSERT(chunk == testString.substr(inVec[i].diov_offset, inVec[i].diov_size), "wrong chunk contents"); - } - } - std::cout << "All OK" << std::endl; -} - -void detectwebdav(const RequestParams ¶ms, const Uri uri, bool result) { - DECLARE_TEST(); - - Context context; - DavixError *err = NULL; - WebdavSupport::Type res = detect_webdav_support(context, params, uri, &err); - if(result) { - ASSERT(res == WebdavSupport::YES, ""); - } - else if(!result) { - ASSERT(res == WebdavSupport::NO || res == WebdavSupport::UNKNOWN, ""); - } - else { - ASSERT(false, "Unknown result"); - } -} - -void assert_args(const std::vector &cmd, int nargs) { - ASSERT(cmd.size() != 0, "assert_args called with empty command!"); - ASSERT(cmd.size() == nargs+1, "Wrong number of arguments to " << cmd[0] << ": " << cmd.size()-1 << ", expected: " << nargs); -} - -void run(int argc, char** argv) { - RequestParams params; - params.setOperationRetry(0); - - std::vector opts = parse_args(argc, argv); - Auth::Type auth = Auth::fromString(retrieve(opts, Opt::AUTH)); - - ASSERT(opts[Opt::COMMAND] != NULL, "--command is necessary"); - ASSERT(opts[Opt::URI] != NULL, "--uri is necessary"); - ASSERT(auth != Auth::ILLEGAL, "--auth is necessary, and can only be one of aws, proxy, azure, none"); - - if(opts[Opt::TRACE]) { - std::string scope = retrieve(opts, Opt::TRACE); - setLogScope(0); - setLogScope(scope); - setLogLevel(DAVIX_LOG_TRACE); - } - - std::vector cmd = split(retrieve(opts, Opt::COMMAND), " "); - Uri uri = Uri(retrieve(opts, Opt::URI)); - authentication(opts, auth, params); - - if(cmd[0] == "makeCollection") { - assert_args(cmd, 0); - makeCollection(params, uri); - } - else if(cmd[0] == "populate") { - assert_args(cmd, 1); - populate(params, uri, atoi(cmd[1].c_str())); - } - else if(cmd[0] == "remove") { - assert_args(cmd, 0); - ASSERT(cmd.size() == 1, "Wrong number of arguments to remove"); - remove(params, uri); - } - else if(cmd[0] == "listing") { - assert_args(cmd, 1); - listing(params, uri, atoi(cmd[1].c_str())); - } - else if(cmd[0] == "putMoveDelete") { - assert_args(cmd, 0); - ASSERT(cmd.size() == 1, "Wrong number of arguments to putMoveDelete"); - putMoveDelete(params, uri); - } - else if(cmd[0] == "depopulate") { - assert_args(cmd, 1); - depopulate(params, uri, atoi(cmd[1].c_str())); - } - else if(cmd[0] == "countfiles") { - assert_args(cmd, 1); - countfiles(params, uri, atoi(cmd[1].c_str())); - } - else if(cmd[0] == "statdir") { - assert_args(cmd, 0); - statdir(params, uri); - } - else if(cmd[0] == "statfile") { - assert_args(cmd, 0); - statfile(params, uri); - } - else if(cmd[0] == "movefile") { - assert_args(cmd, 0); - movefile(params, uri); - } - else if(cmd[0] == "preadvec") { - if(cmd.size() == 2) { - preadvec(params, uri, cmd[1], std::vector()); - } - else if(cmd.size() == 3) { - preadvec(params, uri, cmd[1], split(cmd[2], ",")); - } - else { - ASSERT(false, "Wrong number of arguments to preadvec"); - } - } - else if(cmd[0] == "detectwebdav") { - assert_args(cmd, 1); - bool expected = false; - if(cmd[1] == "1") { - expected = true; - } - else if(cmd[1] == "0") { - expected = false; - } - else { - ASSERT(false, "Unexpected input for expected result"); - } - - detectwebdav(params, uri, expected); - } - else { - ASSERT(false, "Unknown command: " << cmd[0]); - } -} - -int main(int argc, char** argv) { - try { - initialization(); - run(argc, argv); - } - catch(std::exception &e) { - std::cout << e.what() << std::endl; - return 1; - } - - return 0; -} diff -Nru davix-0.7.1/test/run-tests.sh davix-0.7.2/test/run-tests.sh --- davix-0.7.1/test/run-tests.sh 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/test/run-tests.sh 2019-02-15 13:02:34.000000000 +0000 @@ -20,7 +20,7 @@ rm -rf build mkdir build cd build -cmake -DFUNCTIONAL_TESTS=TRUE -DENABLE_THIRD_PARTY_COPY=TRUE .. +cmake -DENABLE_THIRD_PARTY_COPY=TRUE .. make -j $CORES # make abi-check (ctest --no-compress-output -T Test || true) diff -Nru davix-0.7.1/test/unit/digest-extractor.cpp davix-0.7.2/test/unit/digest-extractor.cpp --- davix-0.7.1/test/unit/digest-extractor.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/test/unit/digest-extractor.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -35,3 +35,15 @@ ASSERT_TRUE(ChecksumExtractor::extractChecksum(v1, "frob", output)); ASSERT_EQ(output, "cbda22bcb41ab0151b438589aa4637e2"); } + +TEST(ChecksumExtractor, MultipleReturnedChecksums) { + std::string output; + + HeaderVec v1; + v1.emplace_back("Digest", "adler32=10cf712f,md5=+Tvja0I4Jp7AhrZfWO7C3A=="); + ASSERT_TRUE(ChecksumExtractor::extractChecksum(v1, "adler32", output)); + ASSERT_EQ(output, "10cf712f"); + + ASSERT_TRUE(ChecksumExtractor::extractChecksum(v1, "md5", output)); + ASSERT_EQ(output, "f93be36b4238269ec086b65f58eec2dc"); +} diff -Nru davix-0.7.1/test/unit/session-factory.cpp davix-0.7.2/test/unit/session-factory.cpp --- davix-0.7.1/test/unit/session-factory.cpp 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/test/unit/session-factory.cpp 2019-02-15 13:02:34.000000000 +0000 @@ -1,6 +1,7 @@ #include #include #include +#include using namespace Davix; @@ -15,7 +16,7 @@ Uri u_sec("https://higgs.boson/is/watchingus"); Uri u_port("http://higgs.boson:8668/is/watchingus"); - NEONSessionFactory f; + RedirectionResolver f(true); f.addRedirection("GET", u, dest); ASSERT_TRUE(f.redirectionResolve("GET", u) == dest); ASSERT_TRUE(f.redirectionResolve("GET", u_sec).get() == NULL); @@ -44,7 +45,7 @@ std::shared_ptr url3(new Uri("http://server2.com:8080/dsffds/sfdfdsfsdfdsfdsfds")); std::shared_ptr url4(new Uri("http://server3.com/dsffds/fsdaaaaa")); - NEONSessionFactory f; + RedirectionResolver f(true); f.addRedirection("GET", u, url1); f.addRedirection("GET",*url1, url2); f.addRedirection("GET", *url2, url3); @@ -68,7 +69,7 @@ std::shared_ptr url1(new Uri("http://sffsdfsd.com/dsffds/fsdfsdsdf")); std::shared_ptr url2(new Uri("http://server2.com/dsffds/sfdfdsfsdfdsfdsfds")); - NEONSessionFactory f; + RedirectionResolver f(true); f.addRedirection("GET", u, url1); ASSERT_TRUE(f.redirectionResolve("GET", u) == url1); diff -Nru davix-0.7.1/version.cmake.in davix-0.7.2/version.cmake.in --- davix-0.7.1/version.cmake.in 2018-10-24 08:11:47.000000000 +0000 +++ davix-0.7.2/version.cmake.in 2019-02-15 13:02:34.000000000 +0000 @@ -3,6 +3,5 @@ set(VERSION_PATCH @VERSION_PATCH@) set(VERSION_MINIPATCH @VERSION_MINIPATCH@) set(VERSION_FULL @VERSION_FULL@) -set(VERSION_TAG "") message("Configuring cmake for davix version: ${VERSION_FULL}")