diff -Nru kombu-4.0.2+really4.0.2+dfsg/AUTHORS kombu-4.1.0/AUTHORS --- kombu-4.0.2+really4.0.2+dfsg/AUTHORS 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/AUTHORS 2017-07-20 16:16:23.000000000 +0000 @@ -14,6 +14,7 @@ Andrey Antukh Andrii Kostenko Andy McCurdy +Anthony Lukach Antoine Legrand Anton Gyllenberg Ask Solem @@ -37,6 +38,7 @@ David Strauss David Ziegler Dhananjay Nene +Dima Kurguzov Dmitry Malinovsky Dustin J. Mitchell Emmanuel Cazenave @@ -108,6 +110,7 @@ Randy Barlow Raphael Michel Rob Ottaway +Robert Kopaczewski Roger Hu Rumyana Neykova Rune Halvorsen diff -Nru kombu-4.0.2+really4.0.2+dfsg/Changelog kombu-4.1.0/Changelog --- kombu-4.0.2+really4.0.2+dfsg/Changelog 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/Changelog 2017-07-20 16:16:23.000000000 +0000 @@ -4,6 +4,119 @@ Change history ================ +.. _version-4.1.0: + +4.1.0 +===== +:release-date: 2017-07-17 04:45 P.M MST +:release-by: Anthony Lukach + +- SQS: Added support for long-polling on all supported queries. Fixed bug + causing error on parsing responses with no retrieved messages from SQS. + + Contributed by **Anthony Lukach**. + +- Async hub: Fixed potential infinite loop while performing todo tasks + (Issue celery/celery#3712). + +- Qpid: Fixed bug where messages could have duplicate ``delivery_tag`` + (Issue #563). + + Contributed by **bmbouter**. + +- MongoDB: Fixed problem with using ``readPreference`` option at pymongo 3.x. + + Contributed by **Mikhail Elovskikh**. + +- Re-added support for :pypi:``SQLAlchemy`` + + Contributed by **Amin Ghadersohi**. + +- SQS: Fixed bug where hostname would default to ``localhost`` if not specified + in settings. + + Contributed by **Anthony Lukach**. + +- Redis: Added support for reading password from transport URL (Issue #677). + + Contributed by **George Psarakis**. + +- RabbitMQ: Ensured safer encoding of queue arguments. + + Contributed by **Robert Kopaczewski**. + +- Added fallback to :func:``uuid.uuid5`` in :func:``generate_oid`` if + :func:``uuid.uuid3`` fails. + + Contributed by **Bill Nottingham**. + +- Fixed race condition and innacurrate timeouts for + :class:``kombu.simple.SimpleBase`` (Issue #720). + + Contributed by **c-nichols**. + +- Zookeeper: Fixed last chroot character trimming + + Contributed by **Dima Kurguzov**. + +- RabbitMQ: Fixed bug causing an exception when attempting to close an + already-closed connection (Issue #690). + + Contributed by **eavictor**. + +- Removed deprecated use of StopIteration in generators and invalid regex + escape sequence. + + Contributed by **Jon Dufresne**. + +- Added Python 3.6 to CI testing. + + Contributed by **Jon Dufresne**. + +- SQS: Allowed endpoint URL to be specified in the boto3 connection. + + Contributed by **georgepsarakis**. + +- SQS: Added support for Python 3.4. + + Contributed by **Anthony Lukach**. + +- SQS: ``kombu[sqs]`` now depends on :pypi:`boto3` (no longer using + :pypi:`boto)`. + + - Adds support for Python 3.4+ + - Adds support for FIFO queues (Issue #678) and (Issue celery/celery#3690) + - Avoids issues around a broken endpoints file (Issue celery/celery#3672) + + Contributed by **Mischa Spiegelmock** and **Jerry Seutter**. + +- Zookeeper: Added support for delaying task with Python 3. + + Contributed by **Dima Kurguzov**. + +- SQS: Fixed bug where :meth:`kombu.transport.SQS.drain_events` did not support + callback argument (Issue #694). + + Contributed by **Michael Montgomery**. + +- Fixed bug around modifying dictionary size while iterating over it + (Issue #675). + + Contributed by **Felix Yan**. + +- etcd: Added handling for :exc:`EtcdException` exception rather than + :exc:`EtcdError`. + + Contributed by **Stephen Milner**. + +- Documentation improvements by: + + - **Mads Jensen** + - **Matias Insaurralde** + - **Omer Katz** + - **Dmitry Dygalo** + - **Christopher Hoskin** + .. _version-4.0.2: 4.0.2 diff -Nru kombu-4.0.2+really4.0.2+dfsg/debian/changelog kombu-4.1.0/debian/changelog --- kombu-4.0.2+really4.0.2+dfsg/debian/changelog 2017-05-15 00:47:57.000000000 +0000 +++ kombu-4.1.0/debian/changelog 2017-12-18 11:34:06.000000000 +0000 @@ -1,15 +1,34 @@ -kombu (4.0.2+really4.0.2+dfsg-2ubuntu1~cloud0) xenial-pike; urgency=medium +kombu (4.1.0-1~cloud0) xenial-queens; urgency=medium - * New update for the Ubuntu Cloud Archive. + * New upstream release for the Ubuntu Cloud Archive. - -- Openstack Ubuntu Testing Bot Mon, 15 May 2017 00:47:57 +0000 + -- Openstack Ubuntu Testing Bot Mon, 18 Dec 2017 11:34:06 +0000 -kombu (4.0.2+really4.0.2+dfsg-2ubuntu1) artful; urgency=medium +kombu (4.1.0-1) unstable; urgency=medium - * Add d/patches/0002-dont-modfiy-dict-size-while-iterating-over-it.patch from - upstream to fix build with Python 3.6. + * Uploading to unstable. + * Ran wrap-and-sort -bast to minimize diffs. + * Added myself as uploader. + * Correctly builds sphinx doc in override_dh_sphinxdoc, and understand + build-profiles nodoc and nocheck.. + * Removed useless X-Python-Version: >= 2.7 and X-Python3-Version: >= 3.3 + fields. + * Standards-Version is now 4.1.1. - -- Michael Hudson-Doyle Mon, 15 May 2017 12:16:30 +1200 + -- Thomas Goirand Sat, 04 Nov 2017 21:45:14 +0000 + +kombu (4.1.0-1~exp1) experimental; urgency=low + + * New upstream release. + * Remove repack script as upstream has merged the ICC profile removal. + * Add new upstream signing key. + * Add patch disabling intersphinx to prevent network requests during + build. + * Bump Standards-Version to 4.0.0. + * Remove Artistic and GPL-1+ licenses from d/copyright as + debian/repack.stub is no longer present. + + -- Michael Fladischer Sun, 30 Jul 2017 23:07:32 +0200 kombu (4.0.2+really4.0.2+dfsg-2) experimental; urgency=medium @@ -38,6 +57,12 @@ -- Christopher Hoskin Tue, 21 Mar 2017 21:51:21 +0000 +kombu (4.0.2+really3.0.35+dfsg-2) unstable; urgency=medium + + * Revert mistaken upload of 4.0.2 to unstable instead of experimental + + -- Christopher Hoskin Fri, 24 Mar 2017 20:27:37 +0000 + kombu (3.0.35+dfsg-2) unstable; urgency=medium * Team upload. diff -Nru kombu-4.0.2+really4.0.2+dfsg/debian/control kombu-4.1.0/debian/control --- kombu-4.0.2+really4.0.2+dfsg/debian/control 2017-05-15 00:17:33.000000000 +0000 +++ kombu-4.1.0/debian/control 2017-11-04 21:45:14.000000000 +0000 @@ -1,80 +1,86 @@ Source: kombu Section: python Priority: optional -Maintainer: Ubuntu Developers -XSBC-Original-Maintainer: Debian Python Modules Team -Uploaders: Michael Fladischer , - Brian May , - Christopher Hoskin -Build-Depends: debhelper (>= 9), - dh-python, - python-all, - python-amqp (>= 2.1.4), - python-anyjson (>= 0.3.3), - python-beanstalkc, - python-boto, - python-couchdb, - python-django, - python-funcsigs, - python-mock, - python-msgpack, - python-pika, - python-pymongo (>= 2.6.2), - python-redis, - python-setuptools, - python-simplejson, - python-sqlalchemy, - python-unittest2, - python-yaml, - python-pytest, - python-case, - python-tz, - python-vine, - python3-all, - python3-amqp (>= 2.1.4), - python3-anyjson (>= 0.3.3), - python3-django, - python3-funcsigs, - python3-mock, - python3-msgpack, - python3-pymongo, - python3-redis, - python3-setuptools, - python3-simplejson, - python3-sqlalchemy, - python3-yaml, - python3-sphinx, - python3-sphinx-celery, - python3-pytest, - python3-case, - python3-tz, - python3-vine -Build-Conflicts: python-cjson, - python-sphinx -X-Python-Version: >= 2.7 -X-Python3-Version: >= 3.3 -Standards-Version: 3.9.8 +Maintainer: Debian Python Modules Team +Uploaders: + Michael Fladischer , + Brian May , + Christopher Hoskin , + Thomas Goirand , +Build-Depends: + debhelper (>= 9), + dh-python, + python-all, + python-setuptools, + python3-all, + python3-setuptools, + python3-sphinx, +Build-Depends-Indep: + python-amqp (>= 2.1.4) , + python-anyjson (>= 0.3.3) , + python-beanstalkc , + python-boto , + python-case , + python-couchdb , + python-django , + python-funcsigs , + python-mock , + python-msgpack , + python-pika , + python-pymongo (>= 2.6.2) , + python-pytest , + python-redis , + python-simplejson , + python-sqlalchemy , + python-tz , + python-unittest2 , + python-vine , + python-yaml , + python3-amqp (>= 2.1.4) , + python3-anyjson (>= 0.3.3) , + python3-case , + python3-django , + python3-funcsigs , + python3-mock , + python3-msgpack , + python3-pymongo , + python3-pytest , + python3-redis , + python3-simplejson , + python3-sphinx-celery , + python3-sqlalchemy , + python3-tz , + python3-vine , + python3-yaml , +Build-Conflicts: + python-cjson, + python-sphinx, +Standards-Version: 4.1.1 Homepage: https://github.com/celery/kombu/ Vcs-Git: https://anonscm.debian.org/git/python-modules/packages/kombu.git Vcs-Browser: https://anonscm.debian.org/cgit/python-modules/packages/kombu.git Package: python-kombu Architecture: all -Depends: python-amqp (>= 1.4.6), - python-anyjson (>= 0.3.3), - ${misc:Depends}, - ${python:Depends} -Recommends: python-yaml -Breaks: python-cjson (<= 1.0.5-4+b1) -Suggests: python-beanstalkc, - python-boto, - python-couchdb, - python-django, - python-kombu-doc, - python-pika, - python-pymongo (>= 2.6.2), - python-redis, - python-sqlalchemy +Depends: + python-amqp (>= 1.4.6), + python-anyjson (>= 0.3.3), + ${misc:Depends}, + ${python:Depends}, +Recommends: + python-yaml, +Breaks: + python-cjson (<= 1.0.5-4+b1), +Suggests: + python-beanstalkc, + python-boto, + python-couchdb, + python-django, + python-kombu-doc, + python-pika, + python-pymongo (>= 2.6.2), + python-redis, + python-sqlalchemy, Description: AMQP Messaging Framework for Python The aim of Kombu is to make messaging in Python as easy as possible by providing an idiomatic high-level interface for the AMQP protocol. It is meant @@ -88,20 +94,14 @@ * The ability to ensure that an operation is performed by gracefully handling connection and channel errors. -Package: python3-kombu +Package: python-kombu-doc +Section: doc Architecture: all -Depends: python3-amqp (>= 1.4.6), - python3-anyjson (>= 0.3.3), - ${misc:Depends}, - ${python3:Depends} -Recommends: python3-yaml -Suggests: python-kombu-doc, - python3-boto, - python3-django, - python3-pymongo, - python3-redis, - python3-sqlalchemy -Description: AMQP Messaging Framework for Python (Python3 version) +Build-Profiles: +Depends: + ${misc:Depends}, + ${sphinxdoc:Depends}, +Description: AMQP Messaging Framework for Python (Documentation) The aim of Kombu is to make messaging in Python as easy as possible by providing an idiomatic high-level interface for the AMQP protocol. It is meant to replace the carrot library by providing a compatibility layer. @@ -114,13 +114,25 @@ * The ability to ensure that an operation is performed by gracefully handling connection and channel errors. . - This package contains the Python 3 version of the library. + This package contains the documentation. -Package: python-kombu-doc -Section: doc +Package: python3-kombu Architecture: all -Depends: ${misc:Depends}, ${sphinxdoc:Depends} -Description: AMQP Messaging Framework for Python (Documentation) +Depends: + python3-amqp (>= 1.4.6), + python3-anyjson (>= 0.3.3), + ${misc:Depends}, + ${python3:Depends}, +Recommends: + python3-yaml, +Suggests: + python-kombu-doc, + python3-boto, + python3-django, + python3-pymongo, + python3-redis, + python3-sqlalchemy, +Description: AMQP Messaging Framework for Python (Python3 version) The aim of Kombu is to make messaging in Python as easy as possible by providing an idiomatic high-level interface for the AMQP protocol. It is meant to replace the carrot library by providing a compatibility layer. @@ -133,4 +145,4 @@ * The ability to ensure that an operation is performed by gracefully handling connection and channel errors. . - This package contains the documentation. + This package contains the Python 3 version of the library. diff -Nru kombu-4.0.2+really4.0.2+dfsg/debian/copyright kombu-4.1.0/debian/copyright --- kombu-4.0.2+really4.0.2+dfsg/debian/copyright 2017-03-31 08:12:12.000000000 +0000 +++ kombu-4.1.0/debian/copyright 2017-11-04 21:45:14.000000000 +0000 @@ -5,8 +5,8 @@ Files: * Copyright: 2009-2012, Ask Solem and contributors - 2015-2016 Ask Solem & contributors - 2012-2014 GoPivotal Inc & contributors + 2015-2016 Ask Solem & contributors + 2012-2014 GoPivotal Inc & contributors License: BSD-3-clause Files: kombu/utils/functional.py @@ -27,10 +27,6 @@ 2017 Christopher Hoskin License: BSD-3-clause -Files: debian/repack.stub -Copyright: 2009 Ryan Niebur -License: Artistic or GPL-1+ - License: BSD-3-clause Copyright (c) 2015-2016 Ask Solem & contributors. All rights reserved. Copyright (c) 2012-2014 GoPivotal Inc & contributors. All rights reserved. @@ -105,20 +101,3 @@ 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this License Agreement. - -License: Artistic - This program is free software; you can redistribute it and/or modify - it under the terms of the Artistic License, which comes with Perl. - . - On Debian systems, the complete text of the Artistic License can be - found in `/usr/share/common-licenses/Artistic'. - -License: GPL-1+ - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 1, or (at your option) - any later version. - . - On Debian systems, the complete text of version 1 of the GNU General - Public License can be found in `/usr/share/common-licenses/GPL-1'. - diff -Nru kombu-4.0.2+really4.0.2+dfsg/debian/patches/0002-Disable-intershpinx-mapping-for-now.patch kombu-4.1.0/debian/patches/0002-Disable-intershpinx-mapping-for-now.patch --- kombu-4.0.2+really4.0.2+dfsg/debian/patches/0002-Disable-intershpinx-mapping-for-now.patch 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/debian/patches/0002-Disable-intershpinx-mapping-for-now.patch 2017-11-04 21:45:14.000000000 +0000 @@ -0,0 +1,20 @@ +From: Michael Fladischer +Date: Sun, 30 Jul 2017 22:22:36 +0200 +Subject: Disable intershpinx mapping for now. + +The best way to fix network requests when building the documenation would be to +patch INTERSPHINX_MAPPING in sphinx_celery.conf in src:sphinx-celery. +--- + docs/conf.py | 1 + + 1 file changed, 1 insertion(+) + +diff --git a/docs/conf.py b/docs/conf.py +index 6fa6917..e97470f 100644 +--- a/docs/conf.py ++++ b/docs/conf.py +@@ -28,4 +28,5 @@ globals().update(conf.build_config( + 'kombu.utils', + 'kombu.transport.virtual.base', + ], ++ intersphinx_mapping={}, + )) diff -Nru kombu-4.0.2+really4.0.2+dfsg/debian/patches/0002-dont-modfiy-dict-size-while-iterating-over-it.patch kombu-4.1.0/debian/patches/0002-dont-modfiy-dict-size-while-iterating-over-it.patch --- kombu-4.0.2+really4.0.2+dfsg/debian/patches/0002-dont-modfiy-dict-size-while-iterating-over-it.patch 2017-05-15 00:05:28.000000000 +0000 +++ kombu-4.1.0/debian/patches/0002-dont-modfiy-dict-size-while-iterating-over-it.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -Description: Fix tests with Python 3.6. -Author: Felix Yan -Origin: upstream -Bug: https://github.com/celery/kombu/issues/675 -Applied-Upstream: //github.com/celery/kombu/commit/2940a4b1830bc2fc6d3f65de4feeec6ea5a49fe5 -Reviewed-by: Michael Hudson-Doyle -Last-Update: 2017-05-15 ---- -This patch header follows DEP-3: http://dep.debian.net/deps/dep3/ -diff --git a/kombu/messaging.py b/kombu/messaging.py -index 03e7e15..e1800b5 100644 ---- a/kombu/messaging.py -+++ b/kombu/messaging.py -@@ -397,7 +397,8 @@ def revive(self, channel): - """Revive consumer after connection loss.""" - self._active_tags.clear() - channel = self.channel = maybe_channel(channel) -- for qname, queue in items(self._queues): -+ # modify dict size while iterating over it is not allowed -+ for qname, queue in list(items(self._queues)): - # name may have changed after declare - self._queues.pop(qname, None) - queue = self._queues[queue.name] = queue(self.channel) diff -Nru kombu-4.0.2+really4.0.2+dfsg/debian/patches/series kombu-4.1.0/debian/patches/series --- kombu-4.0.2+really4.0.2+dfsg/debian/patches/series 2017-05-15 00:02:58.000000000 +0000 +++ kombu-4.1.0/debian/patches/series 2017-11-04 21:45:14.000000000 +0000 @@ -1,2 +1,2 @@ 0001-Remove-image-from-remote-donation-site-privacy-issue.patch -0002-dont-modfiy-dict-size-while-iterating-over-it.patch +0002-Disable-intershpinx-mapping-for-now.patch diff -Nru kombu-4.0.2+really4.0.2+dfsg/debian/python3-kombu.docs kombu-4.1.0/debian/python3-kombu.docs --- kombu-4.0.2+really4.0.2+dfsg/debian/python3-kombu.docs 2017-03-31 08:12:12.000000000 +0000 +++ kombu-4.1.0/debian/python3-kombu.docs 2017-11-04 21:45:14.000000000 +0000 @@ -1,2 +1,2 @@ -README.rst FAQ +README.rst diff -Nru kombu-4.0.2+really4.0.2+dfsg/debian/python-kombu-doc.docs kombu-4.1.0/debian/python-kombu-doc.docs --- kombu-4.0.2+really4.0.2+dfsg/debian/python-kombu-doc.docs 2017-03-31 08:12:12.000000000 +0000 +++ kombu-4.1.0/debian/python-kombu-doc.docs 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -docs/.build/html diff -Nru kombu-4.0.2+really4.0.2+dfsg/debian/python-kombu-doc.links kombu-4.1.0/debian/python-kombu-doc.links --- kombu-4.0.2+really4.0.2+dfsg/debian/python-kombu-doc.links 2017-03-31 08:12:12.000000000 +0000 +++ kombu-4.1.0/debian/python-kombu-doc.links 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -usr/share/doc/python-kombu-doc/html/_sources usr/share/doc/python-kombu-doc/rst diff -Nru kombu-4.0.2+really4.0.2+dfsg/debian/python-kombu.docs kombu-4.1.0/debian/python-kombu.docs --- kombu-4.0.2+really4.0.2+dfsg/debian/python-kombu.docs 2017-03-31 08:12:12.000000000 +0000 +++ kombu-4.1.0/debian/python-kombu.docs 2017-11-04 21:45:14.000000000 +0000 @@ -1,2 +1,2 @@ -README.rst FAQ +README.rst diff -Nru kombu-4.0.2+really4.0.2+dfsg/debian/repack.local kombu-4.1.0/debian/repack.local --- kombu-4.0.2+really4.0.2+dfsg/debian/repack.local 2017-03-31 08:12:12.000000000 +0000 +++ kombu-4.1.0/debian/repack.local 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -#!/bin/sh - -if [ ! -f /usr/bin/convert ] -then - echo "This repack script requires the convert program." - echo "Please install the graphicsmagick-imagemagick-compat package and try again." - exit 1 -fi - -rm -rf kombu.egg-info/ -convert -verbose ./docs/images/kombu.jpg +profile "icc" ./docs/images/kombu.jpg -convert -verbose ./docs/images/kombusmall.jpg +profile "icc" ./docs/images/kombusmall.jpg diff -Nru kombu-4.0.2+really4.0.2+dfsg/debian/repack.stub kombu-4.1.0/debian/repack.stub --- kombu-4.0.2+really4.0.2+dfsg/debian/repack.stub 2017-03-31 08:12:12.000000000 +0000 +++ kombu-4.1.0/debian/repack.stub 1970-01-01 00:00:00.000000000 +0000 @@ -1,91 +0,0 @@ -#!/bin/sh - -: <<=cut -=pod - -=head1 NAME - -repack.stub - script to repack upstream tarballs from uscan - -=head1 INSTRUCTIONS - -put this in debian/repack.stub and add "debian sh debian/repack.stub" to -the end of the line in debian/watch. you will also need to add a version -mangle to debian/watch. - -then create a debian/repack.local. this is a shell script that is -sourced under "set -e", so be careful to check returns codes. - -=head1 FUNCTIONS - -=over 4 - -=item rm - -rm is replaced by a function that does some magic ("rm -rv" by default), but also changes MANIFEST if $MANIFEST is 1 - -=item mv - -mv is replaced by a function that just does mv (by default), but also changes MANIFEST if $MANIFEST is 1 - -=item requires_version - -requires_version is there for future usage for requiring certain versions of the script - -=back - -=head1 VARIABLES - -=over 4 - -=item SUFFIX - -defaults to +dfsg - -what to append to the upstream version - -=item RM_OPTS - -defaults to -vrf - -options to pass to rm - -=item MANIFEST - -defaults to 0, set to 1 to turn on. - -this will manipulate MANIFEST files in CPAN tarballs. - -=item UP_BASE - -this is the directory where the upstream source is. - -=back - -=head1 COPYRIGHT AND LICENSE - -Copyright 2009, Ryan Niebur - -This program is free software; you can redistribute it and/or modify it -under the same terms as Perl itself. - -=cut - -if [ -z "$REPACK_SH" ]; then - if [ -x /usr/share/pkg-perl-tools/repack.sh ]; then - REPACK_SH='/usr/share/pkg-perl-tools/repack.sh' - elif [ -f ../../scripts/repack.sh ]; then - REPACK_SH=../../scripts/repack.sh - fi - if [ -z "$REPACK_SH" ] && which repack.sh > /dev/null; then - REPACK_SH=$(which repack.sh) - fi -fi - -if [ ! -f "$REPACK_SH" ]; then - echo "Couldn't find a repack.sh. please put it in your PATH, put it at ../../scripts/repack.sh, or put it somewhere else and set the REPACK_SH variable" - echo "You can get it from https://anonscm.debian.org/cgit/pkg-perl/packages/pkg-perl-tools.git/plain/scripts/repack.sh" - exit 1 -fi - -exec "$REPACK_SH" "$@" diff -Nru kombu-4.0.2+really4.0.2+dfsg/debian/rules kombu-4.1.0/debian/rules --- kombu-4.0.2+really4.0.2+dfsg/debian/rules 2017-03-31 08:12:12.000000000 +0000 +++ kombu-4.1.0/debian/rules 2017-11-04 21:45:14.000000000 +0000 @@ -5,14 +5,16 @@ %: dh $@ --with python2,python3,sphinxdoc --buildsystem=pybuild -override_dh_auto_build: export http_proxy=127.0.0.1:9 -override_dh_auto_build: export https_proxy=127.0.0.1:9 -override_dh_auto_build: - PYTHONPATH=. sphinx-build -b html -d docs/.build/.doctrees -N docs docs/.build/html - dh_auto_build +override_dh_sphinxdoc: +ifeq (,$(findstring nodoc, $(DEB_BUILD_OPTIONS))) + http_proxy=127.0.0.1:9 https_proxy=127.0.0.1:9 PYTHONPATH=. sphinx-build -b html -d docs/.build/.doctrees -N docs $(CURDIR)/debian/python-kombu-doc/usr/share/doc/python-kombu-doc/html + dh_sphinxdoc +endif override_dh_auto_test: +ifeq (,$(findstring nocheck, $(DEB_BUILD_OPTIONS))) dh_auto_test -- --system=custom --test-args="{interpreter} setup.py test" +endif override_dh_python3: dh_python3 -v Binary files /tmp/tmphfsAZY/mPiteOnBJ2/kombu-4.0.2+really4.0.2+dfsg/debian/upstream-signing-key.pgp and /tmp/tmphfsAZY/0qV3Ys7xn_/kombu-4.1.0/debian/upstream-signing-key.pgp differ diff -Nru kombu-4.0.2+really4.0.2+dfsg/debian/watch kombu-4.1.0/debian/watch --- kombu-4.0.2+really4.0.2+dfsg/debian/watch 2017-03-31 08:12:12.000000000 +0000 +++ kombu-4.1.0/debian/watch 2017-11-04 21:45:14.000000000 +0000 @@ -1,4 +1,3 @@ version=3 opts=uversionmangle=s/(rc|a|b|c)/~$1/,dversionmangle=s/\+dfsg$//,repacksuffix=+dfsg,pgpsigurlmangle=s/$/.asc/ \ -http://pypi.debian.net/kombu/kombu-(.+)\.(?:zip|tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) \ -debian sh debian/repack.stub +http://pypi.debian.net/kombu/kombu-(.+)\.(?:zip|tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) diff -Nru kombu-4.0.2+really4.0.2+dfsg/docs/conf.py kombu-4.1.0/docs/conf.py --- kombu-4.0.2+really4.0.2+dfsg/docs/conf.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/docs/conf.py 2017-07-20 16:21:19.000000000 +0000 @@ -6,8 +6,8 @@ globals().update(conf.build_config( 'kombu', __file__, project='Kombu', - version_dev='4.0', - version_stable='3.0', + version_dev='4.2', + version_stable='4.1', canonical_url='http://docs.kombu.me', webdomain='kombu.me', github_project='celery/kombu', diff -Nru kombu-4.0.2+really4.0.2+dfsg/docs/includes/introduction.txt kombu-4.1.0/docs/includes/introduction.txt --- kombu-4.0.2+really4.0.2+dfsg/docs/includes/introduction.txt 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/docs/includes/introduction.txt 2017-07-20 16:19:30.000000000 +0000 @@ -1,4 +1,4 @@ -:Version: 4.0.2 +:Version: 4.1.0 :Web: http://kombu.me/ :Download: http://pypi.python.org/pypi/kombu/ :Source: https://github.com/celery/kombu/ @@ -56,12 +56,12 @@ For an introduction to AMQP you should read the article `Rabbits and warrens`_, and the `Wikipedia article about AMQP`_. -.. _`RabbitMQ`: http://www.rabbitmq.com/ -.. _`AMQP`: http://amqp.org -.. _`py-amqp`: http://pypi.python.org/pypi/amqp/ -.. _`qpid-python`: http://pypi.python.org/pypi/qpid-python/ -.. _`Redis`: http://code.google.com/p/redis/ -.. _`Amazon SQS`: http://aws.amazon.com/sqs/ +.. _`RabbitMQ`: https://www.rabbitmq.com/ +.. _`AMQP`: https://amqp.org +.. _`py-amqp`: https://pypi.python.org/pypi/amqp/ +.. _`qpid-python`: https://pypi.python.org/pypi/qpid-python/ +.. _`Redis`: https://redis.io/ +.. _`Amazon SQS`: https://aws.amazon.com/sqs/ .. _`Zookeeper`: https://zookeeper.apache.org/ .. _`Rabbits and warrens`: http://blogs.digitar.com/jjww/2009/01/rabbits-and-warrens/ .. _`amqplib`: http://barryp.org/software/py-amqplib/ diff -Nru kombu-4.0.2+really4.0.2+dfsg/docs/reference/index.rst kombu-4.1.0/docs/reference/index.rst --- kombu-4.0.2+really4.0.2+dfsg/docs/reference/index.rst 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/docs/reference/index.rst 2017-07-10 04:45:59.000000000 +0000 @@ -48,6 +48,8 @@ kombu.transport.etcd kombu.transport.zookeeper kombu.transport.filesystem + kombu.transport.sqlalchemy + kombu.transport.sqlalchemy.models kombu.transport.SQS kombu.transport.SLMQ kombu.transport.pyro diff -Nru kombu-4.0.2+really4.0.2+dfsg/docs/reference/kombu.transport.SLMQ.rst kombu-4.1.0/docs/reference/kombu.transport.SLMQ.rst --- kombu-4.0.2+really4.0.2+dfsg/docs/reference/kombu.transport.SLMQ.rst 2016-11-30 22:54:46.000000000 +0000 +++ kombu-4.1.0/docs/reference/kombu.transport.SLMQ.rst 2017-07-10 04:45:59.000000000 +0000 @@ -2,6 +2,7 @@ SLMQ Transport - ``kombu.transport.SLMQ`` ============================================= + .. currentmodule:: kombu.transport.SLMQ .. automodule:: kombu.transport.SLMQ diff -Nru kombu-4.0.2+really4.0.2+dfsg/docs/reference/kombu.transport.sqlalchemy.models.rst kombu-4.1.0/docs/reference/kombu.transport.sqlalchemy.models.rst --- kombu-4.0.2+really4.0.2+dfsg/docs/reference/kombu.transport.sqlalchemy.models.rst 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/docs/reference/kombu.transport.sqlalchemy.models.rst 2017-07-10 04:45:59.000000000 +0000 @@ -0,0 +1,32 @@ +===================================================================== + SQLAlchemy Transport Model - ``kombu.transport.sqlalchemy.models`` +===================================================================== + + +.. currentmodule:: kombu.transport.sqlalchemy.models + +.. automodule:: kombu.transport.sqlalchemy.models + + .. contents:: + :local: + + Models + ------ + + .. autoclass:: Queue + + .. autoattribute:: Queue.id + + .. autoattribute:: Queue.name + + .. autoclass:: Message + + .. autoattribute:: Message.id + + .. autoattribute:: Message.visible + + .. autoattribute:: Message.sent_at + + .. autoattribute:: Message.payload + + .. autoattribute:: Message.version diff -Nru kombu-4.0.2+really4.0.2+dfsg/docs/reference/kombu.transport.sqlalchemy.rst kombu-4.1.0/docs/reference/kombu.transport.sqlalchemy.rst --- kombu-4.0.2+really4.0.2+dfsg/docs/reference/kombu.transport.sqlalchemy.rst 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/docs/reference/kombu.transport.sqlalchemy.rst 2017-07-10 04:45:59.000000000 +0000 @@ -0,0 +1,25 @@ +=========================================================== + SQLAlchemy Transport Model - kombu.transport.sqlalchemy +=========================================================== + + +.. currentmodule:: kombu.transport.sqlalchemy + +.. automodule:: kombu.transport.sqlalchemy + + .. contents:: + :local: + + Transport + --------- + + .. autoclass:: Transport + :members: + :undoc-members: + + Channel + ------- + + .. autoclass:: Channel + :members: + :undoc-members: diff -Nru kombu-4.0.2+really4.0.2+dfsg/examples/hello_consumer.py kombu-4.1.0/examples/hello_consumer.py --- kombu-4.0.2+really4.0.2+dfsg/examples/hello_consumer.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/examples/hello_consumer.py 2017-07-10 04:45:59.000000000 +0000 @@ -1,6 +1,6 @@ from __future__ import absolute_import, unicode_literals, print_function -from kombu import Connection +from kombu import Connection # noqa with Connection('amqp://guest:guest@localhost:5672//') as conn: diff -Nru kombu-4.0.2+really4.0.2+dfsg/extra/requirements/extras/sqlalchemy.txt kombu-4.1.0/extra/requirements/extras/sqlalchemy.txt --- kombu-4.0.2+really4.0.2+dfsg/extra/requirements/extras/sqlalchemy.txt 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/extra/requirements/extras/sqlalchemy.txt 2017-07-10 04:45:59.000000000 +0000 @@ -0,0 +1 @@ +sqlalchemy diff -Nru kombu-4.0.2+really4.0.2+dfsg/extra/requirements/extras/sqs.txt kombu-4.1.0/extra/requirements/extras/sqs.txt --- kombu-4.0.2+really4.0.2+dfsg/extra/requirements/extras/sqs.txt 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/extra/requirements/extras/sqs.txt 2017-07-10 04:45:59.000000000 +0000 @@ -1,2 +1,2 @@ -boto>=2.8 +boto3>=1.4.4 pycurl diff -Nru kombu-4.0.2+really4.0.2+dfsg/extra/requirements/funtest.txt kombu-4.1.0/extra/requirements/funtest.txt --- kombu-4.0.2+really4.0.2+dfsg/extra/requirements/funtest.txt 2016-11-30 22:54:46.000000000 +0000 +++ kombu-4.1.0/extra/requirements/funtest.txt 2017-07-10 04:45:59.000000000 +0000 @@ -8,7 +8,7 @@ kazoo # SQS transport -boto +boto3 # Qpid transport qpid-python>=0.26 diff -Nru kombu-4.0.2+really4.0.2+dfsg/extra/requirements/pkgutils.txt kombu-4.1.0/extra/requirements/pkgutils.txt --- kombu-4.0.2+really4.0.2+dfsg/extra/requirements/pkgutils.txt 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/extra/requirements/pkgutils.txt 2017-07-10 04:45:59.000000000 +0000 @@ -5,4 +5,4 @@ tox>=2.3.1 sphinx2rst>=1.0 bumpversion -pydocstyle +pydocstyle==1.1.1 diff -Nru kombu-4.0.2+really4.0.2+dfsg/extra/requirements/test-ci.txt kombu-4.1.0/extra/requirements/test-ci.txt --- kombu-4.0.2+really4.0.2+dfsg/extra/requirements/test-ci.txt 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/extra/requirements/test-ci.txt 2017-07-10 04:45:59.000000000 +0000 @@ -3,3 +3,5 @@ redis PyYAML msgpack-python>0.2.0 +-r extras/sqs.txt +sqlalchemy diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/async/aws/connection.py kombu-4.1.0/kombu/async/aws/connection.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/async/aws/connection.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/async/aws/connection.py 2017-07-10 04:45:59.000000000 +0000 @@ -1,37 +1,36 @@ -# -*- coding: utf-8 -*- +# * coding: utf8 * """Amazon AWS Connection.""" from __future__ import absolute_import, unicode_literals -from io import BytesIO - from vine import promise, transform +from kombu.async.aws.ext import AWSRequest, get_response + from kombu.async.http import Headers, Request, get_client from kombu.five import items, python_2_unicode_compatible -from .ext import ( - boto, AWSAuthConnection, AWSQueryConnection, XmlHandler, ResultSet, -) - -try: - from urllib.parse import urlunsplit -except ImportError: - from urlparse import urlunsplit # noqa -from xml.sax import parseString as sax_parse # noqa +import io try: # pragma: no cover - from email import message_from_file + from email import message_from_bytes from email.mime.message import MIMEMessage + + # py3 + def message_from_headers(hdr): # noqa + bs = "\r\n".join("{}: {}".format(*h) for h in hdr) + return message_from_bytes(bs.encode()) + except ImportError: # pragma: no cover from mimetools import Message as MIMEMessage # noqa - def message_from_file(m): # noqa - return m + # py2 + def message_from_headers(hdr): # noqa + return io.BytesIO(b'\r\n'.join( + b'{0}: {1}'.format(*h) for h in hdr + )) __all__ = [ - 'AsyncHTTPConnection', 'AsyncHTTPSConnection', - 'AsyncHTTPResponse', 'AsyncConnection', - 'AsyncAWSAuthConnection', 'AsyncAWSQueryConnection', + 'AsyncHTTPSConnection', 'AsyncConnection', ] @@ -56,11 +55,7 @@ @property def msg(self): if self._msg is None: - self._msg = MIMEMessage(message_from_file( - BytesIO(b'\r\n'.join( - b'{0}: {1}'.format(*h) for h in self.getheaders()) - ) - )) + self._msg = MIMEMessage(message_from_headers(self.getheaders())) return self._msg @property @@ -78,7 +73,7 @@ @python_2_unicode_compatible -class AsyncHTTPConnection(object): +class AsyncHTTPSConnection(object): """Async HTTP Connection.""" Request = Request @@ -87,13 +82,9 @@ method = 'GET' path = '/' body = None - scheme = 'http' default_ports = {'http': 80, 'https': 443} - def __init__(self, host, port=None, - strict=None, timeout=20.0, http_client=None, **kwargs): - self.host = host - self.port = port + def __init__(self, strict=None, timeout=20.0, http_client=None): self.headers = [] self.timeout = timeout self.strict = strict @@ -112,14 +103,9 @@ if headers is not None: self.headers.extend(list(items(headers))) - def getrequest(self, scheme=None): - scheme = scheme if scheme else self.scheme - host = self.host - if self.port and self.port != self.default_ports[scheme]: - host = '{0}:{1}'.format(host, self.port) - url = urlunsplit((scheme, host, self.path, '', '')) + def getrequest(self): headers = Headers(self.headers) - return self.Request(url, method=self.method, headers=headers, + return self.Request(self.path, method=self.method, headers=headers, body=self.body, connect_timeout=self.timeout, request_timeout=self.timeout, validate_cert=False) @@ -137,7 +123,7 @@ def close(self): pass - def putrequest(self, method, path, **kwargs): + def putrequest(self, method, path): self.method = method self.path = path @@ -157,139 +143,117 @@ return ''.format(self.getrequest()) -class AsyncHTTPSConnection(AsyncHTTPConnection): - """Async HTTPS Connection.""" - - scheme = 'https' - - class AsyncConnection(object): """Async AWS Connection.""" - def __init__(self, http_client=None, **kwargs): - if boto is None: - raise ImportError('boto is not installed') + def __init__(self, sqs_connection, http_client=None, **kwargs): # noqa + self.sqs_connection = sqs_connection self._httpclient = http_client or get_client() - def get_http_connection(self, host, port, is_secure): - return (AsyncHTTPSConnection if is_secure else AsyncHTTPConnection)( - host, port, http_client=self._httpclient, - ) + def get_http_connection(self): + return AsyncHTTPSConnection(http_client=self._httpclient) def _mexe(self, request, sender=None, callback=None): callback = callback or promise() - boto.log.debug( - 'HTTP %s/%s headers=%s body=%s', - request.host, request.path, - request.headers, request.body, - ) - - conn = self.get_http_connection( - request.host, request.port, self.is_secure, - ) - request.authorize(connection=self) + conn = self.get_http_connection() if callable(sender): sender(conn, request.method, request.path, request.body, request.headers, callback) else: - conn.request(request.method, request.path, + conn.request(request.method, request.url, request.body, request.headers) conn.getresponse(callback=callback) return callback -class AsyncAWSAuthConnection(AsyncConnection, AWSAuthConnection): - """Async AWS Authn Connection.""" - - def __init__(self, host, - http_client=None, http_client_params={}, **kwargs): - AsyncConnection.__init__(self, http_client, **http_client_params) - AWSAuthConnection.__init__(self, host, **kwargs) - - def make_request(self, method, path, headers=None, data='', host=None, - auth_path=None, sender=None, callback=None, **kwargs): - req = self.build_base_http_request( - method, path, auth_path, {}, headers, data, host, - ) - return self._mexe(req, sender=sender, callback=callback) - - -class AsyncAWSQueryConnection(AsyncConnection, AWSQueryConnection): +class AsyncAWSQueryConnection(AsyncConnection): """Async AWS Query Connection.""" - def __init__(self, host, - http_client=None, http_client_params={}, **kwargs): - AsyncConnection.__init__(self, http_client, **http_client_params) - AWSAuthConnection.__init__(self, host, **kwargs) - - def make_request(self, action, params, path, verb, callback=None): - request = self.build_base_http_request( - verb, path, None, params, {}, '', self.server_name()) - if action: - request.params['Action'] = action - request.params['Version'] = self.APIVersion - return self._mexe(request, callback=callback) + def __init__(self, sqs_connection, http_client=None, + http_client_params=None, **kwargs): + if not http_client_params: + http_client_params = {} + AsyncConnection.__init__(self, sqs_connection, http_client, + **http_client_params) + + def make_request(self, operation, params_, path, verb, callback=None): # noqa + params = params_.copy() + if operation: + params['Action'] = operation + signer = self.sqs_connection._request_signer # noqa + + # defaults for non-get + signing_type = 'standard' + param_payload = {'data': params} + if verb.lower() == 'get': + # query-based opts + signing_type = 'presignurl' + param_payload = {'params': params} + + request = AWSRequest(method=verb, url=path, **param_payload) + signer.sign(operation, request, signing_type=signing_type) + prepared_request = request.prepare() + + return self._mexe(prepared_request, callback=callback) - def get_list(self, action, params, markers, - path='/', parent=None, verb='GET', callback=None): + def get_list(self, operation, params, markers, path='/', parent=None, verb='POST', callback=None): # noqa return self.make_request( - action, params, path, verb, + operation, params, path, verb, callback=transform( self._on_list_ready, callback, parent or self, markers, + operation ), ) - def get_object(self, action, params, cls, - path='/', parent=None, verb='GET', callback=None): + def get_object(self, operation, params, path='/', parent=None, verb='GET', callback=None): # noqa return self.make_request( - action, params, path, verb, + operation, params, path, verb, callback=transform( - self._on_obj_ready, callback, parent or self, cls, + self._on_obj_ready, callback, parent or self, operation ), ) - def get_status(self, action, params, - path='/', parent=None, verb='GET', callback=None): + def get_status(self, operation, params, path='/', parent=None, verb='GET', callback=None): # noqa return self.make_request( - action, params, path, verb, + operation, params, path, verb, callback=transform( - self._on_status_ready, callback, parent or self, + self._on_status_ready, callback, parent or self, operation ), ) - def _on_list_ready(self, parent, markers, response): - body = response.read() - if response.status == 200 and body: - rs = ResultSet(markers) - h = XmlHandler(rs, parent) - sax_parse(body, h) - return rs + def _on_list_ready(self, parent, markers, operation, response): # noqa + service_model = self.sqs_connection.meta.service_model + if response.status == 200: + _, parsed = get_response( + service_model.operation_model(operation), response.response + ) + return parsed else: - raise self._for_status(response, body) + raise self._for_status(response, response.read()) - def _on_obj_ready(self, parent, cls, response): - body = response.read() - if response.status == 200 and body: - obj = cls(parent) - h = XmlHandler(obj, parent) - sax_parse(body, h) - return obj + def _on_obj_ready(self, parent, operation, response): # noqa + service_model = self.sqs_connection.meta.service_model + if response.status == 200: + _, parsed = get_response( + service_model.operation_model(operation), response.response + ) + return parsed else: - raise self._for_status(response, body) + raise self._for_status(response, response.read()) - def _on_status_ready(self, parent, response): - body = response.read() - if response.status == 200 and body: - rs = ResultSet() - h = XmlHandler(rs, parent) - sax_parse(body, h) - return rs.status + def _on_status_ready(self, parent, operation, response): # noqa + service_model = self.sqs_connection.meta.service_model + if response.status == 200: + httpres, _ = get_response( + service_model.operation_model(operation), response.response + ) + return httpres.code else: - raise self._for_status(response, body) + raise self._for_status(response, response.read()) def _for_status(self, response, body): context = 'Empty body' if not body else 'HTTP Error' - exc = self.ResponseError(response.status, response.reason, body) - boto.log.error('{0}: %r'.format(context), exc) - return exc + return Exception("Request {} HTTP {} {} ({})".format( + context, response.status, response.reason, body + )) diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/async/aws/ext.py kombu-4.1.0/kombu/async/aws/ext.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/async/aws/ext.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/async/aws/ext.py 2017-07-10 04:45:59.000000000 +0000 @@ -1,29 +1,26 @@ # -*- coding: utf-8 -*- -"""Amazon boto interface.""" +"""Amazon boto3 interface.""" from __future__ import absolute_import, unicode_literals try: - import boto -except ImportError: # pragma: no cover - boto = get_regions = ResultSet = RegionInfo = XmlHandler = None + import boto3 + from botocore import exceptions + from botocore.awsrequest import AWSRequest + from botocore.response import get_response +except ImportError: + boto3 = None class _void(object): pass - AWSAuthConnection = AWSQueryConnection = _void # noqa - class BotoError(Exception): + class BotoCoreError(Exception): pass - exception = _void() - exception.SQSError = BotoError - exception.SQSDecodeError = BotoError -else: - from boto import exception - from boto.connection import AWSAuthConnection, AWSQueryConnection - from boto.handler import XmlHandler - from boto.resultset import ResultSet - from boto.regioninfo import RegionInfo, get_regions + exceptions = _void() + exceptions.BotoCoreError = BotoCoreError + AWSRequest = _void() + get_response = _void() + __all__ = [ - 'exception', 'AWSAuthConnection', 'AWSQueryConnection', - 'XmlHandler', 'ResultSet', 'RegionInfo', 'get_regions', + 'exceptions', 'AWSRequest', 'get_response' ] diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/async/aws/sqs/connection.py kombu-4.1.0/kombu/async/aws/sqs/connection.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/async/aws/sqs/connection.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/async/aws/sqs/connection.py 2017-07-10 04:45:59.000000000 +0000 @@ -5,9 +5,8 @@ from vine import transform from kombu.async.aws.connection import AsyncAWSQueryConnection -from kombu.async.aws.ext import RegionInfo -from .ext import boto, Attributes, BatchResults, SQSConnection +from .ext import boto3 from .message import AsyncMessage from .queue import AsyncQueue @@ -15,28 +14,17 @@ __all__ = ['AsyncSQSConnection'] -class AsyncSQSConnection(AsyncAWSQueryConnection, SQSConnection): +class AsyncSQSConnection(AsyncAWSQueryConnection): """Async SQS Connection.""" - def __init__(self, aws_access_key_id=None, aws_secret_access_key=None, - is_secure=True, port=None, proxy=None, proxy_port=None, - proxy_user=None, proxy_pass=None, debug=0, - https_connection_factory=None, region=None, *args, **kwargs): - if boto is None: - raise ImportError('boto is not installed') - self.region = region or RegionInfo( - self, self.DefaultRegionName, self.DefaultRegionEndpoint, - connection_cls=type(self), - ) + def __init__(self, sqs_connection, debug=0, region=None, **kwargs): + if boto3 is None: + raise ImportError('boto3 is not installed') AsyncAWSQueryConnection.__init__( self, - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - is_secure=is_secure, port=port, - proxy=proxy, proxy_port=proxy_port, - proxy_user=proxy_user, proxy_pass=proxy_pass, - host=self.region.endpoint, debug=debug, - https_connection_factory=https_connection_factory, **kwargs + sqs_connection, + region_name=region, debug=debug, + **kwargs ) def create_queue(self, queue_name, @@ -46,17 +34,21 @@ params['DefaultVisibilityTimeout'] = format( visibility_timeout, 'd', ) - return self.get_object('CreateQueue', params, AsyncQueue, + return self.get_object('CreateQueue', params, callback=callback) def delete_queue(self, queue, force_deletion=False, callback=None): return self.get_status('DeleteQueue', None, queue.id, callback=callback) + def get_queue_url(self, queue): + res = self.sqs_connection.get_queue_url(QueueName=queue) + return res['QueueUrl'] + def get_queue_attributes(self, queue, attribute='All', callback=None): return self.get_object( 'GetQueueAttributes', {'AttributeName': attribute}, - Attributes, queue.id, callback=callback, + queue.id, callback=callback, ) def set_queue_attribute(self, queue, attribute, value, callback=None): @@ -74,17 +66,21 @@ if visibility_timeout: params['VisibilityTimeout'] = visibility_timeout if attributes: - self.build_list_params(params, attributes, 'AttributeName') + attrs = {} + for idx, attr in enumerate(attributes): + attrs['AttributeName.' + str(idx + 1)] = attr + params.update(attrs) if wait_time_seconds is not None: params['WaitTimeSeconds'] = wait_time_seconds + queue_url = self.get_queue_url(queue) return self.get_list( - 'ReceiveMessage', params, [('Message', queue.message_class)], - queue.id, callback=callback, + 'ReceiveMessage', params, [('Message', AsyncMessage)], + queue_url, callback=callback, parent=queue, ) - def delete_message(self, queue, message, callback=None): + def delete_message(self, queue, receipt_handle, callback=None): return self.delete_message_from_handle( - queue, message.receipt_handle, callback, + queue, receipt_handle, callback, ) def delete_message_batch(self, queue, messages, callback=None): @@ -96,7 +92,7 @@ '{0}.ReceiptHandle'.format(prefix): m.receipt_handle, }) return self.get_object( - 'DeleteMessageBatch', params, BatchResults, queue.id, + 'DeleteMessageBatch', params, queue.id, verb='POST', callback=callback, ) @@ -104,7 +100,7 @@ callback=None): return self.get_status( 'DeleteMessage', {'ReceiptHandle': receipt_handle}, - queue.id, callback=callback, + queue, callback=callback, ) def send_message(self, queue, message_content, @@ -113,7 +109,7 @@ if delay_seconds: params['DelaySeconds'] = int(delay_seconds) return self.get_object( - 'SendMessage', params, AsyncMessage, queue.id, + 'SendMessage', params, queue.id, verb='POST', callback=callback, ) @@ -127,7 +123,7 @@ '{0}.DelaySeconds'.format(prefix): msg[2], }) return self.get_object( - 'SendMessageBatch', params, BatchResults, queue.id, + 'SendMessageBatch', params, queue.id, verb='POST', callback=callback, ) @@ -150,7 +146,7 @@ '{0}.VisibilityTimeout'.format(pre): t[1], }) return self.get_object( - 'ChangeMessageVisibilityBatch', params, BatchResults, queue.id, + 'ChangeMessageVisibilityBatch', params, queue.id, verb='POST', callback=callback, ) diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/async/aws/sqs/ext.py kombu-4.1.0/kombu/async/aws/sqs/ext.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/async/aws/sqs/ext.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/async/aws/sqs/ext.py 2017-07-10 04:45:59.000000000 +0000 @@ -1,32 +1,9 @@ # -*- coding: utf-8 -*- -"""Amazon SQS boto interface.""" +"""Amazon SQS boto3 interface.""" from __future__ import absolute_import, unicode_literals try: - import boto -except ImportError: # pragma: no cover - boto = Attributes = BatchResults = None # noqa - - class _void(object): - pass - regions = SQSConnection = Queue = _void - - RawMessage = Message = MHMessage = \ - EncodedMHMessage = JSONMessage = _void -else: - from boto.sqs.attributes import Attributes - from boto.sqs.batchresults import BatchResults - from boto.sqs.message import ( - EncodedMHMessage, Message, MHMessage, RawMessage, - ) - from boto.sqs import regions - from boto.sqs.jsonmessage import JSONMessage - from boto.sqs.connection import SQSConnection - from boto.sqs.queue import Queue - -__all__ = [ - 'Attributes', 'BatchResults', 'EncodedMHMessage', 'MHMessage', - 'Message', 'RawMessage', 'JSONMessage', 'SQSConnection', - 'Queue', 'regions', -] + import boto3 +except ImportError: + boto3 = None diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/async/aws/sqs/__init__.py kombu-4.1.0/kombu/async/aws/sqs/__init__.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/async/aws/sqs/__init__.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/async/aws/sqs/__init__.py 2017-07-10 04:45:59.000000000 +0000 @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals - -from kombu.async.aws.ext import boto, get_regions - -from .connection import AsyncSQSConnection - -__all__ = ['regions', 'connect_to_region'] - - -def regions(): - """Return list of known AWS regions.""" - if boto is None: - raise ImportError('boto is not installed') - return get_regions('sqs', connection_cls=AsyncSQSConnection) - - -def connect_to_region(region_name, **kwargs): - """Connect to specific AWS region.""" - for region in regions(): - if region.name == region_name: - return region.connect(**kwargs) diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/async/aws/sqs/message.py kombu-4.1.0/kombu/async/aws/sqs/message.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/async/aws/sqs/message.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/async/aws/sqs/message.py 2017-07-10 04:45:59.000000000 +0000 @@ -2,45 +2,34 @@ """Amazon SQS message implementation.""" from __future__ import absolute_import, unicode_literals -from .ext import ( - RawMessage, Message, MHMessage, EncodedMHMessage, JSONMessage, -) +import base64 -__all__ = [ - 'BaseAsyncMessage', 'AsyncRawMessage', 'AsyncMessage', - 'AsyncMHMessage', 'AsyncEncodedMHMessage', 'AsyncJSONMessage', -] +from kombu.message import Message +from kombu.utils.encoding import str_to_bytes -class BaseAsyncMessage(object): +class BaseAsyncMessage(Message): """Base class for messages received on async client.""" - def delete(self, callback=None): - if self.queue: - return self.queue.delete_message(self, callback) - - def change_visibility(self, visibility_timeout, callback=None): - if self.queue: - return self.queue.connection.change_message_visibility( - self.queue, self.receipt_handle, visibility_timeout, callback, - ) - -class AsyncRawMessage(BaseAsyncMessage, RawMessage): +class AsyncRawMessage(BaseAsyncMessage): """Raw Message.""" -class AsyncMessage(BaseAsyncMessage, Message): +class AsyncMessage(BaseAsyncMessage): """Serialized message.""" - -class AsyncMHMessage(BaseAsyncMessage, MHMessage): - """MHM Message (uhm, look that up later).""" - - -class AsyncEncodedMHMessage(BaseAsyncMessage, EncodedMHMessage): - """Encoded MH Message.""" - - -class AsyncJSONMessage(BaseAsyncMessage, JSONMessage): - """Json serialized message.""" + def encode(self, value): + """Encode/decode the value using Base64 encoding.""" + return base64.b64encode(str_to_bytes(value)).decode() + + def __getitem__(self, item): + """Support Boto3-style access on a message.""" + if item == 'ReceiptHandle': + return self.receipt_handle + elif item == 'Body': + return self.get_body() + elif item == 'queue': + return self.queue + else: + raise KeyError(item) diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/async/aws/sqs/queue.py kombu-4.1.0/kombu/async/aws/sqs/queue.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/async/aws/sqs/queue.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/async/aws/sqs/queue.py 2017-07-10 04:45:59.000000000 +0000 @@ -4,7 +4,6 @@ from vine import transform -from .ext import Queue as _Queue from .message import AsyncMessage _all__ = ['AsyncQueue'] @@ -15,7 +14,7 @@ return rs[0] if len(rs) == 1 else None -class AsyncQueue(_Queue): +class AsyncQueue(): """Async SQS Queue.""" def __init__(self, connection=None, url=None, message_class=AsyncMessage): diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/async/http/base.py kombu-4.1.0/kombu/async/http/base.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/async/http/base.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/async/http/base.py 2017-07-10 04:45:59.000000000 +0000 @@ -200,6 +200,15 @@ self._body = self.buffer.getvalue() return self._body + # these are for compatibility with Requests + @property + def status_code(self): + return self.code + + @property + def content(self): + return self.body + @coro def header_parser(keyt=normalize_header): diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/async/http/curl.py kombu-4.1.0/kombu/async/http/curl.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/async/http/curl.py 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/kombu/async/http/curl.py 2017-07-10 04:45:59.000000000 +0000 @@ -171,15 +171,12 @@ code = curl.getinfo(_pycurl.HTTP_CODE) effective_url = curl.getinfo(_pycurl.EFFECTIVE_URL) buffer.seek(0) - try: - request = info['request'] - request.on_ready(self.Response( - request=request, code=code, headers=info['headers'], - buffer=buffer, effective_url=effective_url, error=error, - )) - except Exception as exc: - self.hub.on_callback_error(request.on_ready, exc) - raise + # try: + request = info['request'] + request.on_ready(self.Response( + request=request, code=code, headers=info['headers'], + buffer=buffer, effective_url=effective_url, error=error, + )) def _setup_request(self, curl, request, buffer, headers, _pycurl=pycurl): setopt = curl.setopt @@ -243,7 +240,7 @@ setopt(meth, True) if request.method in ('POST', 'PUT'): - body = request.body or '' + body = request.body.encode('utf-8') if request.body else bytes() reqbuffer = BytesIO(body) setopt(_pycurl.READFUNCTION, reqbuffer.read) if request.method == 'POST': diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/async/hub.py kombu-4.1.0/kombu/async/hub.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/async/hub.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/async/hub.py 2017-07-20 16:16:23.000000000 +0000 @@ -3,18 +3,17 @@ from __future__ import absolute_import, unicode_literals import errno - +import itertools from contextlib import contextmanager from time import sleep from types import GeneratorType as generator # noqa -from vine import Thenable, promise - from kombu.five import Empty, python_2_unicode_compatible, range from kombu.log import get_logger from kombu.utils.compat import fileno -from kombu.utils.eventio import READ, WRITE, ERR, poll +from kombu.utils.eventio import ERR, READ, WRITE, poll from kombu.utils.objects import cached_property +from vine import Thenable, promise from .timer import Timer @@ -269,14 +268,24 @@ consolidate = self.consolidate consolidate_callback = self.consolidate_callback on_tick = self.on_tick - todo = self._ready propagate = self.propagate_errors + todo = self._ready while 1: for tick_callback in on_tick: tick_callback() - while todo: + # To avoid infinite loop where one of the callables adds items + # to self._ready (via call_soon or otherwise), we take pop only + # N items from the ready set. + # N represents the current number of items on the set. + # That way if a todo adds another one to the ready set, + # we will break early and allow execution of readers and writers. + current_todos = len(todo) + for _ in itertools.repeat(None, current_todos): + if not todo: + break + item = todo.pop() if item: item() diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/clocks.py kombu-4.1.0/kombu/clocks.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/clocks.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/clocks.py 2017-07-10 04:45:59.000000000 +0000 @@ -90,8 +90,8 @@ * `Lamports distributed mutex`_ - .. _`Lamport Timestamps`: http://en.wikipedia.org/wiki/Lamport_timestamps - .. _`Lamports distributed mutex`: http://bit.ly/p99ybE + .. _`Lamport Timestamps`: https://en.wikipedia.org/wiki/Lamport_timestamps + .. _`Lamports distributed mutex`: https://bit.ly/p99ybE *Usage* diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/common.py kombu-4.1.0/kombu/common.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/common.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/common.py 2017-07-10 04:45:59.000000000 +0000 @@ -9,7 +9,7 @@ from contextlib import contextmanager from functools import partial from itertools import count -from uuid import uuid4, uuid3, NAMESPACE_OID +from uuid import uuid5, uuid4, uuid3, NAMESPACE_OID from amqp import RecoverableConnectionError @@ -50,7 +50,11 @@ def generate_oid(node_id, process_id, thread_id, instance): ent = bytes_if_py2('%x-%x-%x-%x' % ( node_id, process_id, thread_id, id(instance))) - return str(uuid3(NAMESPACE_OID, ent)) + try: + ret = str(uuid3(NAMESPACE_OID, ent)) + except ValueError: + ret = str(uuid5(NAMESPACE_OID, ent)) + return ret def oid_from(instance, threads=True): diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/compat.py kombu-4.1.0/kombu/compat.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/compat.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/compat.py 2017-07-10 04:45:59.000000000 +0000 @@ -1,6 +1,6 @@ """Carrot compatibility interface. -See http://packages.python.org/pypi/carrot for documentation. +See https://pypi.python.org/pypi/carrot for documentation. """ from __future__ import absolute_import, unicode_literals @@ -20,7 +20,7 @@ consumer.consume(no_ack=no_ack) for iteration in count(0): # for infinity if limit and iteration >= limit: - raise StopIteration + break yield connection.drain_events() @@ -166,7 +166,7 @@ item = self.fetch() if (not infinite and item is None) or \ (limit and items_since_start >= limit): - raise StopIteration + break yield item diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/connection.py kombu-4.1.0/kombu/connection.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/connection.py 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/kombu/connection.py 2017-07-10 04:45:59.000000000 +0000 @@ -816,7 +816,7 @@ require a channel. """ # make sure we're still connected, and if not refresh. - self.connection + self.ensure_connection() if self._default_channel is None: self._default_channel = self.channel() return self._default_channel diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/entity.py kombu-4.1.0/kombu/entity.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/entity.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/entity.py 2017-07-10 04:45:59.000000000 +0000 @@ -98,7 +98,7 @@ .. _`AMQP in 10 minutes: Part 4`: - http://bit.ly/amqp-exchange-types + https://bit.ly/2rcICv5 channel (ChannelT): The channel the exchange is bound to (if bound). diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/__init__.py kombu-4.1.0/kombu/__init__.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/__init__.py 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/kombu/__init__.py 2017-07-20 16:19:30.000000000 +0000 @@ -10,7 +10,7 @@ from collections import namedtuple # noqa -__version__ = '4.0.2' +__version__ = '4.1.0' __author__ = 'Ask Solem' __contact__ = 'ask@celeryproject.org' __homepage__ = 'https://kombu.readthedocs.io' diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/messaging.py kombu-4.1.0/kombu/messaging.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/messaging.py 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/kombu/messaging.py 2017-07-10 04:45:59.000000000 +0000 @@ -397,7 +397,8 @@ """Revive consumer after connection loss.""" self._active_tags.clear() channel = self.channel = maybe_channel(channel) - for qname, queue in items(self._queues): + # modify dict size while iterating over it is not allowed + for qname, queue in list(items(self._queues)): # name may have changed after declare self._queues.pop(qname, None) queue = self._queues[queue.name] = queue(self.channel) @@ -434,7 +435,7 @@ return self def __exit__(self, exc_type, exc_val, exc_tb): - if self.channel: + if self.channel and self.channel.connection: conn_errors = self.channel.connection.client.connection_errors if not isinstance(exc_val, conn_errors): try: diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/mixins.py kombu-4.1.0/kombu/mixins.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/mixins.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/mixins.py 2017-07-10 04:45:59.000000000 +0000 @@ -49,7 +49,7 @@ .. code-block:: python class Worker(ConsumerMixin): - task_queue = Queue('tasks', Exchange('tasks'), 'tasks')) + task_queue = Queue('tasks', Exchange('tasks'), 'tasks') def __init__(self, connection): self.connection = None diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/serialization.py kombu-4.1.0/kombu/serialization.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/serialization.py 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/kombu/serialization.py 2017-07-10 04:45:59.000000000 +0000 @@ -358,7 +358,7 @@ """Register msgpack serializer. See Also: - http://msgpack.sourceforge.net/. + https://msgpack.org/. """ pack = unpack = None try: diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/simple.py kombu-4.1.0/kombu/simple.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/simple.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/simple.py 2017-07-10 04:45:59.000000000 +0000 @@ -35,20 +35,36 @@ def get(self, block=True, timeout=None): if not block: return self.get_nowait() + self._consume() - elapsed = 0.0 + + time_start = monotonic() remaining = timeout while True: - time_start = monotonic() if self.buffer: return self.buffer.popleft() + + if remaining is not None and remaining <= 0.0: + raise self.Empty() + try: - self.channel.connection.client.drain_events( - timeout=timeout and remaining) + # The `drain_events` method will + # block on the socket connection to rabbitmq. if any + # application-level messages are received, it will put them + # into `self.buffer`. + # * The method will block for UP TO `timeout` milliseconds. + # * The method may raise a socket.timeout exception; or... + # * The method may return without having put anything on + # `self.buffer`. This is because internal heartbeat + # messages are sent over the same socket; also POSIX makes + # no guarantees against socket calls returning early. + self.channel.connection.client.drain_events(timeout=remaining) except socket.timeout: raise self.Empty() - elapsed += monotonic() - time_start - remaining = timeout and timeout - elapsed or None + + if remaining is not None: + elapsed = monotonic() - time_start + remaining = timeout - elapsed def get_nowait(self): m = self.queue.get(no_ack=self.no_ack) diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/transport/etcd.py kombu-4.1.0/kombu/transport/etcd.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/transport/etcd.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/transport/etcd.py 2017-07-10 04:45:59.000000000 +0000 @@ -168,7 +168,7 @@ msg_content = loads(item['value']) self.client.delete(key=item['key']) return msg_content - except (TypeError, IndexError, etcd.EtcdError) as error: + except (TypeError, IndexError, etcd.EtcdException) as error: logger.debug('_get failed: {0}:{1}'.format(type(error), error)) raise Empty() @@ -233,11 +233,11 @@ super(Transport, self).__init__(*args, **kwargs) self.connection_errors = ( - virtual.Transport.connection_errors + (etcd.EtcdError, ) + virtual.Transport.connection_errors + (etcd.EtcdException, ) ) self.channel_errors = ( - virtual.Transport.channel_errors + (etcd.EtcdError, ) + virtual.Transport.channel_errors + (etcd.EtcdException, ) ) def verify_connection(self, connection): @@ -259,7 +259,6 @@ """Return the version of the etcd library. .. note:: - python-etcd has no __version__. This is a workaround. """ try: diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/transport/__init__.py kombu-4.1.0/kombu/transport/__init__.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/transport/__init__.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/transport/__init__.py 2017-07-10 04:45:59.000000000 +0000 @@ -28,6 +28,8 @@ 'sqs': 'kombu.transport.SQS:Transport', 'mongodb': 'kombu.transport.mongodb:Transport', 'zookeeper': 'kombu.transport.zookeeper:Transport', + 'sqlalchemy': 'kombu.transport.sqlalchemy:Transport', + 'sqla': 'kombu.transport.sqlalchemy:Transport', 'SLMQ': 'kombu.transport.SLMQ.Transport', 'slmq': 'kombu.transport.SLMQ.Transport', 'filesystem': 'kombu.transport.filesystem:Transport', diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/transport/librabbitmq.py kombu-4.1.0/kombu/transport/librabbitmq.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/transport/librabbitmq.py 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/kombu/transport/librabbitmq.py 2017-07-10 04:45:59.000000000 +0000 @@ -1,6 +1,6 @@ """`librabbitmq`_ transport. -.. _`librabbitmq`: http://pypi.python.org/librabbitmq/ +.. _`librabbitmq`: https://pypi.python.org/librabbitmq/ """ from __future__ import absolute_import, unicode_literals @@ -62,7 +62,8 @@ return body, properties def prepare_queue_arguments(self, arguments, **kwargs): - return to_rabbitmq_queue_arguments(arguments, **kwargs) + arguments = to_rabbitmq_queue_arguments(arguments, **kwargs) + return {k.encode('utf8'): v for k, v in items(arguments)} class Connection(amqp.Connection): diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/transport/mongodb.py kombu-4.1.0/kombu/transport/mongodb.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/transport/mongodb.py 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/kombu/transport/mongodb.py 2017-07-10 04:45:59.000000000 +0000 @@ -239,7 +239,7 @@ def _parse_uri(self, scheme='mongodb://'): # See mongodb uri documentation: - # http://docs.mongodb.org/manual/reference/connection-string/ + # https://docs.mongodb.org/manual/reference/connection-string/ client = self.connection.client hostname = client.hostname @@ -274,18 +274,21 @@ if self.connect_timeout else None), } options.update(parsed['options']) + options = self._prepare_client_options(options) return hostname, dbname, options def _prepare_client_options(self, options): if pymongo.version_tuple >= (3,): options.pop('auto_start_request', None) + if isinstance(options.get('readpreference'), int): + modes = pymongo.read_preferences._MONGOS_MODES + options['readpreference'] = modes[options['readpreference']] return options def _open(self, scheme='mongodb://'): - hostname, dbname, options = self._parse_uri(scheme=scheme) + hostname, dbname, conf = self._parse_uri(scheme=scheme) - conf = self._prepare_client_options(options) conf['host'] = hostname env = _detect_environment() diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/transport/qpid.py kombu-4.1.0/kombu/transport/qpid.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/transport/qpid.py 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/kombu/transport/qpid.py 2017-07-10 04:45:59.000000000 +0000 @@ -23,9 +23,9 @@ to underlying dependencies not being compatible. This version is tested and works with with Python 2.7. -.. _`Qpid`: http://qpid.apache.org/ -.. _`qpid-python`: http://pypi.python.org/pypi/qpid-python/ -.. _`qpid-tools`: http://pypi.python.org/pypi/qpid-tools/ +.. _`Qpid`: https://qpid.apache.org/ +.. _`qpid-python`: https://pypi.python.org/pypi/qpid-python/ +.. _`qpid-tools`: https://pypi.python.org/pypi/qpid-tools/ Authentication ============== @@ -850,7 +850,7 @@ except Empty: pass - def basic_ack(self, delivery_tag): + def basic_ack(self, delivery_tag, multiple=False): """Acknowledge a message by delivery_tag. Acknowledges a message referenced by delivery_tag. Messages can @@ -864,8 +864,12 @@ :param delivery_tag: The delivery tag associated with the message to be acknowledged. :type delivery_tag: uuid.UUID + :param multiple: not implemented. If set to True an AssertionError + is raised. + :type multiple: bool """ + assert multiple is False self.qos.ack(delivery_tag) def basic_reject(self, delivery_tag, requeue=False): diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/transport/redis.py kombu-4.1.0/kombu/transport/redis.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/transport/redis.py 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/kombu/transport/redis.py 2017-07-20 16:16:23.000000000 +0000 @@ -894,7 +894,7 @@ pass host = connparams['host'] if '://' in host: - scheme, _, _, _, _, path, query = _parse_url(host) + scheme, _, _, _, password, path, query = _parse_url(host) if scheme == 'socket': connparams = self._filter_tcp_connparams(**connparams) connparams.update({ @@ -904,6 +904,7 @@ connparams.pop('socket_connect_timeout', None) connparams.pop('socket_keepalive', None) connparams.pop('socket_keepalive_options', None) + connparams['password'] = password connparams.pop('host', None) connparams.pop('port', None) @@ -1059,7 +1060,7 @@ `Connection` object). You must provide at least one option in Transport options: - * `service_name` - name of the redis group to poll + * `master_name` - name of the redis group to poll """ from_transport_options = Channel.from_transport_options + ( diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/transport/sqlalchemy/__init__.py kombu-4.1.0/kombu/transport/sqlalchemy/__init__.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/transport/sqlalchemy/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/kombu/transport/sqlalchemy/__init__.py 2017-07-10 04:45:59.000000000 +0000 @@ -0,0 +1,164 @@ +"""Kombu transport using SQLAlchemy as the message store.""" +# SQLAlchemy overrides != False to have special meaning and pep8 complains +# flake8: noqa + +from __future__ import absolute_import, unicode_literals + +from json import loads, dumps + +from sqlalchemy import create_engine +from sqlalchemy.exc import OperationalError +from sqlalchemy.orm import sessionmaker + +from kombu.five import Empty +from kombu.transport import virtual +from kombu.utils import cached_property +from kombu.utils.encoding import bytes_to_str +from .models import (ModelBase, Queue as QueueBase, Message as MessageBase, + class_registry, metadata) + + +VERSION = (1, 1, 0) +__version__ = '.'.join(map(str, VERSION)) + + +class Channel(virtual.Channel): + """The channel class.""" + + _session = None + _engines = {} # engine cache + + def __init__(self, connection, **kwargs): + self._configure_entity_tablenames(connection.client.transport_options) + super(Channel, self).__init__(connection, **kwargs) + + def _configure_entity_tablenames(self, opts): + self.queue_tablename = opts.get('queue_tablename', 'kombu_queue') + self.message_tablename = opts.get('message_tablename', 'kombu_message') + + # + # Define the model definitions. This registers the declarative + # classes with the active SQLAlchemy metadata object. This *must* be + # done prior to the ``create_engine`` call. + # + self.queue_cls and self.message_cls + + def _engine_from_config(self): + conninfo = self.connection.client + transport_options = conninfo.transport_options.copy() + transport_options.pop('queue_tablename', None) + transport_options.pop('message_tablename', None) + return create_engine(conninfo.hostname, **transport_options) + + def _open(self): + conninfo = self.connection.client + if conninfo.hostname not in self._engines: + engine = self._engine_from_config() + Session = sessionmaker(bind=engine) + metadata.create_all(engine) + self._engines[conninfo.hostname] = engine, Session + return self._engines[conninfo.hostname] + + @property + def session(self): + if self._session is None: + _, Session = self._open() + self._session = Session() + return self._session + + def _get_or_create(self, queue): + obj = self.session.query(self.queue_cls) \ + .filter(self.queue_cls.name == queue).first() + if not obj: + obj = self.queue_cls(queue) + self.session.add(obj) + try: + self.session.commit() + except OperationalError: + self.session.rollback() + return obj + + def _new_queue(self, queue, **kwargs): + self._get_or_create(queue) + + def _put(self, queue, payload, **kwargs): + obj = self._get_or_create(queue) + message = self.message_cls(dumps(payload), obj) + self.session.add(message) + try: + self.session.commit() + except OperationalError: + self.session.rollback() + + def _get(self, queue): + obj = self._get_or_create(queue) + if self.session.bind.name == 'sqlite': + self.session.execute('BEGIN IMMEDIATE TRANSACTION') + try: + msg = self.session.query(self.message_cls) \ + .with_lockmode('update') \ + .filter(self.message_cls.queue_id == obj.id) \ + .filter(self.message_cls.visible != False) \ + .order_by(self.message_cls.sent_at) \ + .order_by(self.message_cls.id) \ + .limit(1) \ + .first() + if msg: + msg.visible = False + return loads(bytes_to_str(msg.payload)) + raise Empty() + finally: + self.session.commit() + + def _query_all(self, queue): + obj = self._get_or_create(queue) + return self.session.query(self.message_cls) \ + .filter(self.message_cls.queue_id == obj.id) + + def _purge(self, queue): + count = self._query_all(queue).delete(synchronize_session=False) + try: + self.session.commit() + except OperationalError: + self.session.rollback() + return count + + def _size(self, queue): + return self._query_all(queue).count() + + def _declarative_cls(self, name, base, ns): + if name in class_registry: + return class_registry[name] + return type(str(name), (base, ModelBase), ns) + + @cached_property + def queue_cls(self): + return self._declarative_cls( + 'Queue', + QueueBase, + {'__tablename__': self.queue_tablename} + ) + + @cached_property + def message_cls(self): + return self._declarative_cls( + 'Message', + MessageBase, + {'__tablename__': self.message_tablename} + ) + + +class Transport(virtual.Transport): + """The transport class.""" + + Channel = Channel + + can_parse_url = True + default_port = 0 + driver_type = 'sql' + driver_name = 'sqlalchemy' + connection_errors = (OperationalError, ) + + def driver_version(self): + import sqlalchemy + return sqlalchemy.__version__ diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/transport/sqlalchemy/models.py kombu-4.1.0/kombu/transport/sqlalchemy/models.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/transport/sqlalchemy/models.py 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/kombu/transport/sqlalchemy/models.py 2017-07-10 04:45:59.000000000 +0000 @@ -0,0 +1,67 @@ +"""Kombu transport using SQLAlchemy as the message store.""" +from __future__ import absolute_import, unicode_literals + +import datetime + +from sqlalchemy import (Column, Integer, String, Text, DateTime, + Sequence, Boolean, ForeignKey, SmallInteger) +from sqlalchemy.ext.declarative import declarative_base, declared_attr +from sqlalchemy.orm import relation +from sqlalchemy.schema import MetaData + +class_registry = {} +metadata = MetaData() +ModelBase = declarative_base(metadata=metadata, class_registry=class_registry) + + +class Queue(object): + """The queue class.""" + + __table_args__ = {'sqlite_autoincrement': True, 'mysql_engine': 'InnoDB'} + + id = Column(Integer, Sequence('queue_id_sequence'), primary_key=True, + autoincrement=True) + name = Column(String(200), unique=True) + + def __init__(self, name): + self.name = name + + def __str__(self): + return ''.format(self=self) + + @declared_attr + def messages(cls): + return relation('Message', backref='queue', lazy='noload') + + +class Message(object): + """The message class.""" + + __table_args__ = {'sqlite_autoincrement': True, 'mysql_engine': 'InnoDB'} + + id = Column(Integer, Sequence('message_id_sequence'), + primary_key=True, autoincrement=True) + visible = Column(Boolean, default=True, index=True) + sent_at = Column('timestamp', DateTime, nullable=True, index=True, + onupdate=datetime.datetime.now) + payload = Column(Text, nullable=False) + version = Column(SmallInteger, nullable=False, default=1) + + __mapper_args__ = {'version_id_col': version} + + def __init__(self, payload, queue): + self.payload = payload + self.queue = queue + + def __str__(self): + return ''.format(self) + + @declared_attr + def queue_id(self): + return Column( + Integer, + ForeignKey( + '%s.id' % class_registry['Queue'].__tablename__, + name='FK_kombu_message_queue' + ) + ) diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/transport/SQS.py kombu-4.1.0/kombu/transport/SQS.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/transport/SQS.py 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/kombu/transport/SQS.py 2017-07-20 16:16:23.000000000 +0000 @@ -9,7 +9,7 @@ SQS Features supported by this transport: Long Polling: - http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/ + https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/ sqs-long-polling.html Long polling is enabled by setting the `wait_time_seconds` transport @@ -17,7 +17,7 @@ enabled with 10 seconds by default. Batch API Actions: - http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/ + https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/ sqs-batch-api.html The default behavior of the SQS Channel.drain_events() method is to @@ -37,17 +37,17 @@ from __future__ import absolute_import, unicode_literals +import base64 import socket import string +import uuid from vine import transform, ensure_promise, promise from kombu.async import get_event_loop -from kombu.async.aws import sqs as _asynsqs -from kombu.async.aws.ext import boto, exception -from kombu.async.aws.sqs.connection import AsyncSQSConnection, SQSConnection -from kombu.async.aws.sqs.ext import regions -from kombu.async.aws.sqs.message import Message +from kombu.async.aws.ext import boto3, exceptions +from kombu.async.aws.sqs.connection import AsyncSQSConnection +from kombu.async.aws.sqs.message import AsyncMessage from kombu.five import Empty, range, string_t, text_t from kombu.log import get_logger from kombu.utils import scheduling @@ -91,8 +91,8 @@ _noack_queues = set() def __init__(self, *args, **kwargs): - if boto is None: - raise ImportError('boto is not installed') + if boto3 is None: + raise ImportError('boto3 is not installed') super(Channel, self).__init__(*args, **kwargs) # SQS blows up if you try to create a new queue when one already @@ -104,18 +104,10 @@ self.hub = kwargs.get('hub') or get_event_loop() def _update_queue_cache(self, queue_name_prefix): - try: - queues = self.sqs.get_all_queues(prefix=queue_name_prefix) - except exception.SQSError as exc: - if exc.status == 403: - raise RuntimeError( - 'SQS authorization error, access_key={0}'.format( - self.sqs.access_key)) - raise - else: - self._queue_cache.update({ - queue.name: queue for queue in queues - }) + resp = self.sqs.list_queues(QueueNamePrefix=queue_name_prefix) + for url in resp.get('QueueUrls', []): + queue_name = url.split('/')[-1] + self._queue_cache[queue_name] = url def basic_consume(self, queue, no_ack, *args, **kwargs): if no_ack: @@ -132,7 +124,7 @@ self._noack_queues.discard(queue) return super(Channel, self).basic_cancel(consumer_tag) - def drain_events(self, timeout=None): + def drain_events(self, timeout=None, callback=None, **kwargs): """Return a single payload message from one of our queues. Raises: @@ -143,7 +135,7 @@ raise Empty() # At this point, go and get more messages from SQS - self._poll(self.cycle, self.connection._deliver, timeout=timeout) + self._poll(self.cycle, callback, timeout=timeout) def _reset_cycle(self): """Reset the consume cycle. @@ -160,7 +152,15 @@ def entity_name(self, name, table=CHARS_REPLACE_TABLE): """Format AMQP queue name into a legal SQS queue name.""" - return text_t(safe_str(name)).translate(table) + if name.endswith('.fifo'): + partial = name.rstrip('.fifo') + partial = text_t(safe_str(partial)).translate(table) + return partial + '.fifo' + else: + return text_t(safe_str(name)).translate(table) + + def canonical_queue_name(self, queue_name): + return self.entity_name(self.queue_name_prefix + queue_name) def _new_queue(self, queue, **kwargs): """Ensure a queue with given name exists in SQS.""" @@ -168,7 +168,7 @@ return queue # Translate to SQS name for consistency with initial # _queue_cache population. - queue = self.entity_name(self.queue_name_prefix + queue) + queue = self.canonical_queue_name(queue) # The SQS ListQueues method only returns 1000 queues. When you have # so many queues, it's possible that the queue you are looking for is @@ -179,10 +179,14 @@ try: return self._queue_cache[queue] except KeyError: - q = self._queue_cache[queue] = self.sqs.create_queue( - queue, self.visibility_timeout, - ) - return q + attributes = {'VisibilityTimeout': str(self.visibility_timeout)} + if queue.endswith('.fifo'): + attributes['FifoQueue'] = 'true' + + resp = self._queue_cache[queue] = self.sqs.create_queue( + QueueName=queue, Attributes=attributes) + self._queue_cache[queue] = resp['QueueUrl'] + return resp['QueueUrl'] def _delete(self, queue, *args, **kwargs): """Delete queue by name.""" @@ -191,15 +195,28 @@ def _put(self, queue, message, **kwargs): """Put message onto queue.""" - q = self._new_queue(queue) - m = Message() - m.set_body(dumps(message)) - q.write(m) + q_url = self._new_queue(queue) + kwargs = {'QueueUrl': q_url, + 'MessageBody': AsyncMessage().encode(dumps(message))} + if queue.endswith('.fifo'): + if 'MessageGroupId' in message['properties']: + kwargs['MessageGroupId'] = \ + message['properties']['MessageGroupId'] + else: + kwargs['MessageGroupId'] = 'default' + if 'MessageDeduplicationId' in message['properties']: + kwargs['MessageDeduplicationId'] = \ + message['properties']['MessageDeduplicationId'] + else: + kwargs['MessageDeduplicationId'] = str(uuid.uuid4()) + self.sqs.send_message(**kwargs) def _message_to_python(self, message, queue_name, queue): - payload = loads(bytes_to_str(message.get_body())) + body = base64.b64decode(message['Body'].encode()) + payload = loads(bytes_to_str(body)) if queue_name in self._noack_queues: - queue.delete_message(message) + queue = self._new_queue(queue_name) + self.asynsqs.delete_message(queue, message['ReceiptHandle']) else: try: properties = payload['properties'] @@ -209,14 +226,14 @@ delivery_info = {} properties = {'delivery_info': delivery_info} payload.update({ - 'body': bytes_to_str(message.get_body()), + 'body': bytes_to_str(body), 'properties': properties, }) # set delivery tag to SQS receipt handle delivery_info.update({ 'sqs_message': message, 'sqs_queue': queue, }) - properties['delivery_tag'] = message.receipt_handle + properties['delivery_tag'] = message['ReceiptHandle'] return payload def _messages_to_python(self, messages, queue): @@ -261,23 +278,32 @@ # drain_events calls `can_consume` first, consuming # a token, so we know that we are allowed to consume at least # one message. - maxcount = self._get_message_estimate() - if maxcount: - q = self._new_queue(queue) - messages = q.get_messages(num_messages=maxcount) - if messages: - for msg in self._messages_to_python(messages, queue): + # Note: ignoring max_messages for SQS with boto3 + max_count = self._get_message_estimate() + if max_count: + q_url = self._new_queue(queue) + resp = self.sqs.receive_message( + QueueUrl=q_url, MaxNumberOfMessages=max_count, + WaitTimeSeconds=self.wait_time_seconds) + if resp.get('Messages'): + for m in resp['Messages']: + m['Body'] = AsyncMessage(body=m['Body']).decode() + for msg in self._messages_to_python(resp['Messages'], queue): self.connection._deliver(msg, queue) return raise Empty() def _get(self, queue): """Try to retrieve a single message off ``queue``.""" - q = self._new_queue(queue) - messages = q.get_messages(num_messages=1) - if messages: - return self._messages_to_python(messages, queue)[0] + q_url = self._new_queue(queue) + resp = self.sqs.receive_message( + QueueUrl=q_url, MaxNumberOfMessages=1, + WaitTimeSeconds=self.wait_time_seconds) + if resp.get('Messages'): + body = AsyncMessage(body=resp['Messages'][0]['Body']).decode() + resp['Messages'][0]['Body'] = body + return self._messages_to_python(resp['Messages'], queue)[0] raise Empty() def _loop1(self, queue, _=None): @@ -311,17 +337,18 @@ def _get_async(self, queue, count=1, callback=None): q = self._new_queue(queue) + qname = self.canonical_queue_name(queue) return self._get_from_sqs( - q, count=count, connection=self.asynsqs, + qname, count=count, connection=self.asynsqs, callback=transform(self._on_messages_ready, callback, q, queue), ) def _on_messages_ready(self, queue, qname, messages): - if messages: + if 'Messages' in messages and messages['Messages']: callbacks = self.connection._callbacks - for raw_message in messages: - message = self._message_to_python(raw_message, qname, queue) - callbacks[qname](message) + for msg in messages['Messages']: + msg_parsed = self._message_to_python(msg, qname, queue) + callbacks[qname](msg_parsed) def _get_from_sqs(self, queue, count=1, connection=None, callback=None): @@ -344,72 +371,69 @@ return super(Channel, self)._restore(message) def basic_ack(self, delivery_tag, multiple=False): - delivery_info = self.qos.get(delivery_tag).delivery_info try: - queue = delivery_info['sqs_queue'] + message = self.qos.get(delivery_tag).delivery_info + sqs_message = message['sqs_message'] except KeyError: pass else: - queue.delete_message(delivery_info['sqs_message']) + self.asynsqs.delete_message(message['sqs_queue'], + sqs_message['ReceiptHandle']) super(Channel, self).basic_ack(delivery_tag) def _size(self, queue): """Return the number of messages in a queue.""" - return self._new_queue(queue).count() + url = self._new_queue(queue) + resp = self.sqs.get_queue_attributes( + QueueUrl=url, + AttributeNames=['ApproximateNumberOfMessages']) + return int(resp['Attributes']['ApproximateNumberOfMessages']) def _purge(self, queue): """Delete all current messages in a queue.""" q = self._new_queue(queue) # SQS is slow at registering messages, so run for a few - # iterations to ensure messages are deleted. + # iterations to ensure messages are detected and deleted. size = 0 for i in range(10): - size += q.count() + size += int(self._size(queue)) if not size: break - q.clear() + self.sqs.purge_queue(QueueUrl=q) return size def close(self): super(Channel, self).close() - for conn in (self._sqs, self._asynsqs): - if conn: - try: - conn.close() - except AttributeError as exc: # FIXME ??? - if "can't set attribute" not in str(exc): - raise - - def _get_regioninfo(self, regions): - if self.regioninfo: - return self.regioninfo - if self.region: - for _r in regions: - if _r.name == self.region: - return _r - - def _aws_connect_to(self, fun, regions): - conninfo = self.conninfo - region = self._get_regioninfo(regions) - is_secure = self.is_secure if self.is_secure is not None else True - port = self.port if self.port is not None else conninfo.port - return fun(region=region, - aws_access_key_id=conninfo.userid, - aws_secret_access_key=conninfo.password, - is_secure=is_secure, - port=port) + # if self._asynsqs: + # try: + # self.asynsqs.close() + # except AttributeError as exc: # FIXME ??? + # if "can't set attribute" not in str(exc): + # raise @property def sqs(self): if self._sqs is None: - self._sqs = self._aws_connect_to(SQSConnection, regions()) + session = boto3.session.Session( + region_name=self.region, + aws_access_key_id=self.conninfo.userid, + aws_secret_access_key=self.conninfo.password, + ) + is_secure = self.is_secure if self.is_secure is not None else True + client_kwargs = dict( + use_ssl=is_secure + ) + if self.endpoint_url is not None: + client_kwargs['endpoint_url'] = self.endpoint_url + self._sqs = session.client('sqs', **client_kwargs) return self._sqs @property def asynsqs(self): if self._asynsqs is None: - self._asynsqs = self._aws_connect_to( - AsyncSQSConnection, _asynsqs.regions(), + self._asynsqs = AsyncSQSConnection( + sqs_connection=self.sqs, + region=self.region ) return self._asynsqs @@ -451,6 +475,20 @@ return self.transport_options.get('port') @cached_property + def endpoint_url(self): + if self.conninfo.hostname is not None: + scheme = 'https' if self.is_secure else 'http' + if self.conninfo.port is not None: + port = ':{}'.format(self.conninfo.port) + else: + port = '' + return '{}://{}{}'.format( + scheme, + self.conninfo.hostname, + port + ) + + @cached_property def wait_time_seconds(self): return self.transport_options.get('wait_time_seconds', self.default_wait_time_seconds) @@ -466,10 +504,10 @@ default_port = None connection_errors = ( virtual.Transport.connection_errors + - (exception.SQSError, socket.error) + (exceptions.BotoCoreError, socket.error) ) channel_errors = ( - virtual.Transport.channel_errors + (exception.SQSDecodeError,) + virtual.Transport.channel_errors + (exceptions.BotoCoreError,) ) driver_type = 'sqs' driver_name = 'sqs' @@ -478,3 +516,7 @@ async=True, exchange_type=frozenset(['direct']), ) + + @property + def default_connection_params(self): + return {'port': self.default_port} diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/transport/virtual/exchange.py kombu-4.1.0/kombu/transport/virtual/exchange.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/transport/virtual/exchange.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/transport/virtual/exchange.py 2017-07-10 04:45:59.000000000 +0000 @@ -107,7 +107,7 @@ def key_to_pattern(self, rkey): """Get the corresponding regex for any routing key.""" - return '^%s$' % ('\.'.join( + return '^%s$' % (r'\.'.join( self.wildcards.get(word, word) for word in escape_regex(rkey, '.#*').split('.') )) diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/transport/zookeeper.py kombu-4.1.0/kombu/transport/zookeeper.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/transport/zookeeper.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/transport/zookeeper.py 2017-07-10 04:45:59.000000000 +0000 @@ -27,8 +27,8 @@ import socket from kombu.five import Empty -from kombu.utils.encoding import bytes_to_str -from kombu.utils.json import loads, dumps +from kombu.utils.encoding import bytes_to_str, ensure_bytes +from kombu.utils.json import dumps, loads from . import virtual @@ -67,8 +67,8 @@ socket.error, ) except ImportError: - kazoo = None # noqa - KZ_CONNECTION_ERRORS = KZ_CHANNEL_ERRORS = () # noqa + kazoo = None # noqa + KZ_CONNECTION_ERRORS = KZ_CHANNEL_ERRORS = () # noqa DEFAULT_PORT = 2181 @@ -81,8 +81,13 @@ _client = None _queues = {} + def __init__(self, connection, **kwargs): + super(Channel, self).__init__(connection, **kwargs) + vhost = self.connection.client.virtual_host + self._vhost = '/{}'.format(vhost.strip('/')) + def _get_path(self, queue_name): - return os.path.join(self.vhost, queue_name) + return os.path.join(self._vhost, queue_name) def _get_queue(self, queue_name): queue = self._queues.get(queue_name, None) @@ -98,7 +103,7 @@ def _put(self, queue, message, **kwargs): return self._get_queue(queue).put( - dumps(message), + ensure_bytes(dumps(message)), priority=self._get_message_priority(message, reverse=True), ) @@ -141,7 +146,6 @@ def _open(self): conninfo = self.connection.client - self.vhost = os.path.join('/', conninfo.virtual_host[0:-1]) hosts = [] if conninfo.alt: for host_port in conninfo.alt: diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/utils/compat.py kombu-4.1.0/kombu/utils/compat.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/utils/compat.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/utils/compat.py 2017-07-10 04:45:59.000000000 +0000 @@ -36,6 +36,7 @@ _environment = None + def coro(gen): """Decorator to mark generator as co-routine.""" @wraps(gen) @@ -79,6 +80,7 @@ _environment = _detect_environment() return _environment + def entrypoints(namespace): """Return setuptools entrypoints for namespace.""" try: diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu/utils/limits.py kombu-4.1.0/kombu/utils/limits.py --- kombu-4.0.2+really4.0.2+dfsg/kombu/utils/limits.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/kombu/utils/limits.py 2017-07-10 04:45:59.000000000 +0000 @@ -12,10 +12,10 @@ """Token Bucket Algorithm. See Also: - http://en.wikipedia.org/wiki/Token_Bucket + https://en.wikipedia.org/wiki/Token_Bucket Most of this code was stolen from an entry in the ASPN Python Cookbook: - http://code.activestate.com/recipes/511490/ + https://code.activestate.com/recipes/511490/ Warning: Thread Safety: This implementation is not thread safe. diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu.egg-info/dependency_links.txt kombu-4.1.0/kombu.egg-info/dependency_links.txt --- kombu-4.0.2+really4.0.2+dfsg/kombu.egg-info/dependency_links.txt 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/kombu.egg-info/dependency_links.txt 2017-07-20 16:22:10.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu.egg-info/not-zip-safe kombu-4.1.0/kombu.egg-info/not-zip-safe --- kombu-4.0.2+really4.0.2+dfsg/kombu.egg-info/not-zip-safe 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/kombu.egg-info/not-zip-safe 2017-07-20 16:22:10.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu.egg-info/PKG-INFO kombu-4.1.0/kombu.egg-info/PKG-INFO --- kombu-4.0.2+really4.0.2+dfsg/kombu.egg-info/PKG-INFO 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/kombu.egg-info/PKG-INFO 2017-07-20 16:22:10.000000000 +0000 @@ -0,0 +1,388 @@ +Metadata-Version: 1.1 +Name: kombu +Version: 4.1.0 +Summary: Messaging library for Python. +Home-page: https://kombu.readthedocs.io +Author: Ask Solem +Author-email: ask@celeryproject.org +License: BSD +Description: ======================================== + kombu - Messaging library for Python + ======================================== + + |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| + + :Version: 4.1.0 + :Web: http://kombu.me/ + :Download: http://pypi.python.org/pypi/kombu/ + :Source: https://github.com/celery/kombu/ + :Keywords: messaging, amqp, rabbitmq, redis, mongodb, python, queue + + About + ===== + + `Kombu` is a messaging library for Python. + + The aim of `Kombu` is to make messaging in Python as easy as possible by + providing an idiomatic high-level interface for the AMQ protocol, and also + provide proven and tested solutions to common messaging problems. + + `AMQP`_ is the Advanced Message Queuing Protocol, an open standard protocol + for message orientation, queuing, routing, reliability and security, + for which the `RabbitMQ`_ messaging server is the most popular implementation. + + Features + ======== + + * Allows application authors to support several message server + solutions by using pluggable transports. + + * AMQP transport using the `py-amqp`_, `librabbitmq`_, or `qpid-python`_ libraries. + + * High performance AMQP transport written in C - when using `librabbitmq`_ + + This is automatically enabled if librabbitmq is installed: + + :: + + $ pip install librabbitmq + + * Virtual transports makes it really easy to add support for non-AMQP + transports. There is already built-in support for `Redis`_, + `Amazon SQS`_, `ZooKeeper`_, `SoftLayer MQ`_ and `Pyro`_. + + * In-memory transport for unit testing. + + * Supports automatic encoding, serialization and compression of message + payloads. + + * Consistent exception handling across transports. + + * The ability to ensure that an operation is performed by gracefully + handling connection and channel errors. + + * Several annoyances with `amqplib`_ has been fixed, like supporting + timeouts and the ability to wait for events on more than one channel. + + * Projects already using `carrot`_ can easily be ported by using + a compatibility layer. + + For an introduction to AMQP you should read the article `Rabbits and warrens`_, + and the `Wikipedia article about AMQP`_. + + .. _`RabbitMQ`: https://www.rabbitmq.com/ + .. _`AMQP`: https://amqp.org + .. _`py-amqp`: https://pypi.python.org/pypi/amqp/ + .. _`qpid-python`: https://pypi.python.org/pypi/qpid-python/ + .. _`Redis`: https://redis.io + .. _`Amazon SQS`: https://aws.amazon.com/sqs/ + .. _`Zookeeper`: https://zookeeper.apache.org/ + .. _`Rabbits and warrens`: http://blogs.digitar.com/jjww/2009/01/rabbits-and-warrens/ + .. _`amqplib`: https://barryp.org/software/py-amqplib/ + .. _`Wikipedia article about AMQP`: https://en.wikipedia.org/wiki/AMQP + .. _`carrot`: https://pypi.python.org/pypi/carrot/ + .. _`librabbitmq`: https://pypi.python.org/pypi/librabbitmq + .. _`Pyro`: https://pythonhosting.org/Pyro4 + .. _`SoftLayer MQ`: https://sldn.softlayer.com/reference/messagequeueapi + + .. _transport-comparison: + + Transport Comparison + ==================== + + +---------------+----------+------------+------------+---------------+--------------+-----------------------+ + | **Client** | **Type** | **Direct** | **Topic** | **Fanout** | **Priority** | **TTL** | + +---------------+----------+------------+------------+---------------+--------------+-----------------------+ + | *amqp* | Native | Yes | Yes | Yes | Yes [#f3]_ | Yes [#f4]_ | + +---------------+----------+------------+------------+---------------+--------------+-----------------------+ + | *qpid* | Native | Yes | Yes | Yes | No | No | + +---------------+----------+------------+------------+---------------+--------------+-----------------------+ + | *redis* | Virtual | Yes | Yes | Yes (PUB/SUB) | Yes | No | + +---------------+----------+------------+------------+---------------+--------------+-----------------------+ + | *mongodb* | Virtual | Yes | Yes | Yes | Yes | Yes | + +---------------+----------+------------+------------+---------------+--------------+-----------------------+ + | *SQS* | Virtual | Yes | Yes [#f1]_ | Yes [#f2]_ | No | No | + +---------------+----------+------------+------------+---------------+--------------+-----------------------+ + | *zookeeper* | Virtual | Yes | Yes [#f1]_ | No | Yes | No | + +---------------+----------+------------+------------+---------------+--------------+-----------------------+ + | *in-memory* | Virtual | Yes | Yes [#f1]_ | No | No | No | + +---------------+----------+------------+------------+---------------+--------------+-----------------------+ + | *SLMQ* | Virtual | Yes | Yes [#f1]_ | No | No | No | + +---------------+----------+------------+------------+---------------+--------------+-----------------------+ + + + .. [#f1] Declarations only kept in memory, so exchanges/queues + must be declared by all clients that needs them. + + .. [#f2] Fanout supported via storing routing tables in SimpleDB. + Disabled by default, but can be enabled by using the + ``supports_fanout`` transport option. + + .. [#f3] AMQP Message priority support depends on broker implementation. + + .. [#f4] AMQP Message/Queue TTL support depends on broker implementation. + + Documentation + ------------- + + Kombu is using Sphinx, and the latest documentation can be found here: + + https://kombu.readthedocs.io/ + + Quick overview + -------------- + + .. code:: python + + from kombu import Connection, Exchange, Queue + + media_exchange = Exchange('media', 'direct', durable=True) + video_queue = Queue('video', exchange=media_exchange, routing_key='video') + + def process_media(body, message): + print body + message.ack() + + # connections + with Connection('amqp://guest:guest@localhost//') as conn: + + # produce + producer = conn.Producer(serializer='json') + producer.publish({'name': '/tmp/lolcat1.avi', 'size': 1301013}, + exchange=media_exchange, routing_key='video', + declare=[video_queue]) + + # the declare above, makes sure the video queue is declared + # so that the messages can be delivered. + # It's a best practice in Kombu to have both publishers and + # consumers declare the queue. You can also declare the + # queue manually using: + # video_queue(conn).declare() + + # consume + with conn.Consumer(video_queue, callbacks=[process_media]) as consumer: + # Process messages and handle events on all channels + while True: + conn.drain_events() + + # Consume from several queues on the same channel: + video_queue = Queue('video', exchange=media_exchange, key='video') + image_queue = Queue('image', exchange=media_exchange, key='image') + + with connection.Consumer([video_queue, image_queue], + callbacks=[process_media]) as consumer: + while True: + connection.drain_events() + + + Or handle channels manually: + + .. code:: python + + with connection.channel() as channel: + producer = Producer(channel, ...) + consumer = Producer(channel) + + + All objects can be used outside of with statements too, + just remember to close the objects after use: + + .. code:: python + + from kombu import Connection, Consumer, Producer + + connection = Connection() + # ... + connection.release() + + consumer = Consumer(channel_or_connection, ...) + consumer.register_callback(my_callback) + consumer.consume() + # .... + consumer.cancel() + + + `Exchange` and `Queue` are simply declarations that can be pickled + and used in configuration files etc. + + They also support operations, but to do so they need to be bound + to a channel. + + Binding exchanges and queues to a connection will make it use + that connections default channel. + + :: + + >>> exchange = Exchange('tasks', 'direct') + + >>> connection = Connection() + >>> bound_exchange = exchange(connection) + >>> bound_exchange.delete() + + # the original exchange is not affected, and stays unbound. + >>> exchange.delete() + raise NotBoundError: Can't call delete on Exchange not bound to + a channel. + + Terminology + =========== + + There are some concepts you should be familiar with before starting: + + * Producers + + Producers sends messages to an exchange. + + * Exchanges + + Messages are sent to exchanges. Exchanges are named and can be + configured to use one of several routing algorithms. The exchange + routes the messages to consumers by matching the routing key in the + message with the routing key the consumer provides when binding to + the exchange. + + * Consumers + + Consumers declares a queue, binds it to a exchange and receives + messages from it. + + * Queues + + Queues receive messages sent to exchanges. The queues are declared + by consumers. + + * Routing keys + + Every message has a routing key. The interpretation of the routing + key depends on the exchange type. There are four default exchange + types defined by the AMQP standard, and vendors can define custom + types (so see your vendors manual for details). + + These are the default exchange types defined by AMQP/0.8: + + * Direct exchange + + Matches if the routing key property of the message and + the `routing_key` attribute of the consumer are identical. + + * Fan-out exchange + + Always matches, even if the binding does not have a routing + key. + + * Topic exchange + + Matches the routing key property of the message by a primitive + pattern matching scheme. The message routing key then consists + of words separated by dots (`"."`, like domain names), and + two special characters are available; star (`"*"`) and hash + (`"#"`). The star matches any word, and the hash matches + zero or more words. For example `"*.stock.#"` matches the + routing keys `"usd.stock"` and `"eur.stock.db"` but not + `"stock.nasdaq"`. + + + Installation + ============ + + You can install `Kombu` either via the Python Package Index (PyPI) + or from source. + + To install using `pip`,: + + :: + + $ pip install kombu + + To install using `easy_install`,: + + :: + + $ easy_install kombu + + If you have downloaded a source tarball you can install it + by doing the following,: + + :: + + $ python setup.py build + # python setup.py install # as root + + + + Getting Help + ============ + + Mailing list + ------------ + + Join the `carrot-users`_ mailing list. + + .. _`carrot-users`: https://groups.google.com/group/carrot-users/ + + Bug tracker + =========== + + If you have any suggestions, bug reports or annoyances please report them + to our issue tracker at https://github.com/celery/kombu/issues/ + + Contributing + ============ + + Development of `Kombu` happens at Github: https://github.com/celery/kombu + + You are highly encouraged to participate in the development. If you don't + like Github (for some reason) you're welcome to send regular patches. + + License + ======= + + This software is licensed under the `New BSD License`. See the `LICENSE` + file in the top distribution directory for the full license text. + + + .. |build-status| image:: https://secure.travis-ci.org/celery/kombu.png?branch=master + :alt: Build status + :target: https://travis-ci.org/celery/kombu + + .. |coverage| image:: https://codecov.io/github/celery/kombu/coverage.svg?branch=master + :target: https://codecov.io/github/celery/kombu?branch=master + + .. |license| image:: https://img.shields.io/pypi/l/kombu.svg + :alt: BSD License + :target: https://opensource.org/licenses/BSD-3-Clause + + .. |wheel| image:: https://img.shields.io/pypi/wheel/kombu.svg + :alt: Kombu can be installed via wheel + :target: https://pypi.python.org/pypi/kombu/ + + .. |pyversion| image:: https://img.shields.io/pypi/pyversions/kombu.svg + :alt: Supported Python versions. + :target: https://pypi.python.org/pypi/kombu/ + + .. |pyimp| image:: https://img.shields.io/pypi/implementation/kombu.svg + :alt: Support Python implementations. + :target: https://pypi.python.org/pypi/kombu/ + -- + + +Keywords: messaging message amqp rabbitmq redis actor producer consumer +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: Implementation :: Jython +Classifier: Intended Audience :: Developers +Classifier: Topic :: Communications +Classifier: Topic :: System :: Distributed Computing +Classifier: Topic :: System :: Networking +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu.egg-info/requires.txt kombu-4.1.0/kombu.egg-info/requires.txt --- kombu-4.0.2+really4.0.2+dfsg/kombu.egg-info/requires.txt 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/kombu.egg-info/requires.txt 2017-07-20 16:22:10.000000000 +0000 @@ -0,0 +1,39 @@ +amqp<3.0,>=2.1.4 + +[consul] +python-consul>=0.6.0 + +[librabbitmq] +librabbitmq>=1.5.2 + +[mongodb] +pymongo<3.0,>=2.6.2 + +[msgpack] +msgpack-python>=0.4.7 + +[pyro] +pyro4 + +[qpid] +qpid-python>=0.26 +qpid-tools>=0.26 + +[redis] +redis>=2.8.0 + +[slmq] +softlayer_messaging>=1.0.3 + +[sqlalchemy] +sqlalchemy + +[sqs] +boto3>=1.4.4 +pycurl + +[yaml] +PyYAML>=3.10 + +[zookeeper] +kazoo>=1.3.1 diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu.egg-info/SOURCES.txt kombu-4.1.0/kombu.egg-info/SOURCES.txt --- kombu-4.0.2+really4.0.2+dfsg/kombu.egg-info/SOURCES.txt 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/kombu.egg-info/SOURCES.txt 2017-07-20 16:22:10.000000000 +0000 @@ -0,0 +1,326 @@ +AUTHORS +Changelog +FAQ +INSTALL +LICENSE +MANIFEST.in +README.rst +THANKS +TODO +setup.cfg +setup.py +docs/Makefile +docs/changelog.rst +docs/conf.py +docs/faq.rst +docs/index.rst +docs/introduction.rst +docs/make.bat +docs/_ext/.keep +docs/_static/.keep +docs/_templates/sidebardonations.html +docs/images/favicon.ico +docs/images/kombu.jpg +docs/images/kombusmall.jpg +docs/includes/installation.txt +docs/includes/introduction.txt +docs/includes/resources.txt +docs/reference/index.rst +docs/reference/kombu.abstract.rst +docs/reference/kombu.async.aws.connection.rst +docs/reference/kombu.async.aws.rst +docs/reference/kombu.async.aws.sqs.connection.rst +docs/reference/kombu.async.aws.sqs.message.rst +docs/reference/kombu.async.aws.sqs.queue.rst +docs/reference/kombu.async.aws.sqs.rst +docs/reference/kombu.async.debug.rst +docs/reference/kombu.async.http.base.rst +docs/reference/kombu.async.http.curl.rst +docs/reference/kombu.async.http.rst +docs/reference/kombu.async.hub.rst +docs/reference/kombu.async.rst +docs/reference/kombu.async.semaphore.rst +docs/reference/kombu.async.timer.rst +docs/reference/kombu.clocks.rst +docs/reference/kombu.common.rst +docs/reference/kombu.compat.rst +docs/reference/kombu.compression.rst +docs/reference/kombu.connection.rst +docs/reference/kombu.exceptions.rst +docs/reference/kombu.five.rst +docs/reference/kombu.log.rst +docs/reference/kombu.message.rst +docs/reference/kombu.mixins.rst +docs/reference/kombu.pidbox.rst +docs/reference/kombu.pools.rst +docs/reference/kombu.resource.rst +docs/reference/kombu.rst +docs/reference/kombu.serialization.rst +docs/reference/kombu.simple.rst +docs/reference/kombu.transport.SLMQ.rst +docs/reference/kombu.transport.SQS.rst +docs/reference/kombu.transport.base.rst +docs/reference/kombu.transport.consul.rst +docs/reference/kombu.transport.etcd.rst +docs/reference/kombu.transport.filesystem.rst +docs/reference/kombu.transport.librabbitmq.rst +docs/reference/kombu.transport.memory.rst +docs/reference/kombu.transport.mongodb.rst +docs/reference/kombu.transport.pyamqp.rst +docs/reference/kombu.transport.pyro.rst +docs/reference/kombu.transport.qpid.rst +docs/reference/kombu.transport.redis.rst +docs/reference/kombu.transport.rst +docs/reference/kombu.transport.sqlalchemy.models.rst +docs/reference/kombu.transport.sqlalchemy.rst +docs/reference/kombu.transport.virtual.exchange.rst +docs/reference/kombu.transport.virtual.rst +docs/reference/kombu.transport.zookeeper.rst +docs/reference/kombu.utils.amq_manager.rst +docs/reference/kombu.utils.collections.rst +docs/reference/kombu.utils.compat.rst +docs/reference/kombu.utils.debug.rst +docs/reference/kombu.utils.div.rst +docs/reference/kombu.utils.encoding.rst +docs/reference/kombu.utils.eventio.rst +docs/reference/kombu.utils.functional.rst +docs/reference/kombu.utils.imports.rst +docs/reference/kombu.utils.json.rst +docs/reference/kombu.utils.limits.rst +docs/reference/kombu.utils.objects.rst +docs/reference/kombu.utils.scheduling.rst +docs/reference/kombu.utils.text.rst +docs/reference/kombu.utils.time.rst +docs/reference/kombu.utils.url.rst +docs/reference/kombu.utils.uuid.rst +docs/templates/readme.txt +docs/userguide/connections.rst +docs/userguide/consumers.rst +docs/userguide/examples.rst +docs/userguide/index.rst +docs/userguide/introduction.rst +docs/userguide/pools.rst +docs/userguide/producers.rst +docs/userguide/serialization.rst +docs/userguide/simple.rst +examples/complete_receive.py +examples/complete_send.py +examples/hello_consumer.py +examples/hello_publisher.py +examples/memory_transport.py +examples/simple_eventlet_receive.py +examples/simple_eventlet_send.py +examples/simple_receive.py +examples/simple_send.py +examples/experimental/async_consume.py +examples/rpc-tut6/rpc_client.py +examples/rpc-tut6/rpc_server.py +examples/simple_task_queue/__init__.py +examples/simple_task_queue/client.py +examples/simple_task_queue/queues.py +examples/simple_task_queue/tasks.py +examples/simple_task_queue/worker.py +extra/appveyor/install.ps1 +extra/appveyor/run_with_compiler.cmd +extra/requirements/default.txt +extra/requirements/dev.txt +extra/requirements/docs.txt +extra/requirements/funtest.txt +extra/requirements/pkgutils.txt +extra/requirements/test-ci-py2.txt +extra/requirements/test-ci.txt +extra/requirements/test.txt +extra/requirements/extras/consul.txt +extra/requirements/extras/couchdb.txt +extra/requirements/extras/etcd.txt +extra/requirements/extras/librabbitmq.txt +extra/requirements/extras/mongodb.txt +extra/requirements/extras/msgpack.txt +extra/requirements/extras/pyro.txt +extra/requirements/extras/qpid.txt +extra/requirements/extras/redis.txt +extra/requirements/extras/slmq.txt +extra/requirements/extras/sqlalchemy.txt +extra/requirements/extras/sqs.txt +extra/requirements/extras/yaml.txt +extra/requirements/extras/zookeeper.txt +kombu/__init__.py +kombu/abstract.py +kombu/clocks.py +kombu/common.py +kombu/compat.py +kombu/compression.py +kombu/connection.py +kombu/entity.py +kombu/exceptions.py +kombu/five.py +kombu/log.py +kombu/message.py +kombu/messaging.py +kombu/mixins.py +kombu/pidbox.py +kombu/pools.py +kombu/resource.py +kombu/serialization.py +kombu/simple.py +kombu.egg-info/PKG-INFO +kombu.egg-info/SOURCES.txt +kombu.egg-info/dependency_links.txt +kombu.egg-info/not-zip-safe +kombu.egg-info/requires.txt +kombu.egg-info/top_level.txt +kombu/async/__init__.py +kombu/async/debug.py +kombu/async/hub.py +kombu/async/semaphore.py +kombu/async/timer.py +kombu/async/aws/__init__.py +kombu/async/aws/connection.py +kombu/async/aws/ext.py +kombu/async/aws/sqs/__init__.py +kombu/async/aws/sqs/connection.py +kombu/async/aws/sqs/ext.py +kombu/async/aws/sqs/message.py +kombu/async/aws/sqs/queue.py +kombu/async/http/__init__.py +kombu/async/http/base.py +kombu/async/http/curl.py +kombu/transport/SLMQ.py +kombu/transport/SQS.py +kombu/transport/__init__.py +kombu/transport/base.py +kombu/transport/consul.py +kombu/transport/etcd.py +kombu/transport/filesystem.py +kombu/transport/librabbitmq.py +kombu/transport/memory.py +kombu/transport/mongodb.py +kombu/transport/pyamqp.py +kombu/transport/pyro.py +kombu/transport/qpid.py +kombu/transport/redis.py +kombu/transport/zookeeper.py +kombu/transport/sqlalchemy/__init__.py +kombu/transport/sqlalchemy/models.py +kombu/transport/virtual/__init__.py +kombu/transport/virtual/base.py +kombu/transport/virtual/exchange.py +kombu/utils/__init__.py +kombu/utils/amq_manager.py +kombu/utils/collections.py +kombu/utils/compat.py +kombu/utils/debug.py +kombu/utils/div.py +kombu/utils/encoding.py +kombu/utils/eventio.py +kombu/utils/functional.py +kombu/utils/imports.py +kombu/utils/json.py +kombu/utils/limits.py +kombu/utils/objects.py +kombu/utils/scheduling.py +kombu/utils/text.py +kombu/utils/time.py +kombu/utils/url.py +kombu/utils/uuid.py +requirements/default.txt +requirements/dev.txt +requirements/docs.txt +requirements/funtest.txt +requirements/pkgutils.txt +requirements/test-ci-py2.txt +requirements/test-ci.txt +requirements/test.txt +requirements/extras/consul.txt +requirements/extras/couchdb.txt +requirements/extras/etcd.txt +requirements/extras/librabbitmq.txt +requirements/extras/mongodb.txt +requirements/extras/msgpack.txt +requirements/extras/pyro.txt +requirements/extras/qpid.txt +requirements/extras/redis.txt +requirements/extras/slmq.txt +requirements/extras/sqlalchemy.txt +requirements/extras/sqs.txt +requirements/extras/yaml.txt +requirements/extras/zookeeper.txt +t/__init__.py +t/mocks.py +t/integration/__init__.py +t/integration/transport.py +t/integration/tests/__init__.py +t/integration/tests/test_SLMQ.py +t/integration/tests/test_SQS.py +t/integration/tests/test_amqp.py +t/integration/tests/test_librabbitmq.py +t/integration/tests/test_mongodb.py +t/integration/tests/test_pyamqp.py +t/integration/tests/test_qpid.py +t/integration/tests/test_redis.py +t/integration/tests/test_sqla.py +t/integration/tests/test_zookeeper.py +t/unit/__init__.py +t/unit/conftest.py +t/unit/test_clocks.py +t/unit/test_common.py +t/unit/test_compat.py +t/unit/test_compression.py +t/unit/test_connection.py +t/unit/test_entity.py +t/unit/test_exceptions.py +t/unit/test_log.py +t/unit/test_message.py +t/unit/test_messaging.py +t/unit/test_mixins.py +t/unit/test_pidbox.py +t/unit/test_pools.py +t/unit/test_serialization.py +t/unit/test_simple.py +t/unit/async/__init__.py +t/unit/async/test_hub.py +t/unit/async/test_semaphore.py +t/unit/async/test_timer.py +t/unit/async/aws/__init__.py +t/unit/async/aws/case.py +t/unit/async/aws/test_aws.py +t/unit/async/aws/test_connection.py +t/unit/async/aws/sqs/__init__.py +t/unit/async/aws/sqs/test_connection.py +t/unit/async/aws/sqs/test_queue.py +t/unit/async/http/__init__.py +t/unit/async/http/test_curl.py +t/unit/async/http/test_http.py +t/unit/transport/__init__.py +t/unit/transport/test_SQS.py +t/unit/transport/test_base.py +t/unit/transport/test_consul.py +t/unit/transport/test_etcd.py +t/unit/transport/test_filesystem.py +t/unit/transport/test_librabbitmq.py +t/unit/transport/test_memory.py +t/unit/transport/test_mongodb.py +t/unit/transport/test_pyamqp.py +t/unit/transport/test_qpid.py +t/unit/transport/test_redis.py +t/unit/transport/test_sqlalchemy.py +t/unit/transport/test_transport.py +t/unit/transport/test_zookeeper.py +t/unit/transport/virtual/__init__.py +t/unit/transport/virtual/test_base.py +t/unit/transport/virtual/test_exchange.py +t/unit/utils/__init__.py +t/unit/utils/test_amq_manager.py +t/unit/utils/test_compat.py +t/unit/utils/test_debug.py +t/unit/utils/test_div.py +t/unit/utils/test_encoding.py +t/unit/utils/test_functional.py +t/unit/utils/test_imports.py +t/unit/utils/test_json.py +t/unit/utils/test_objects.py +t/unit/utils/test_scheduling.py +t/unit/utils/test_time.py +t/unit/utils/test_url.py +t/unit/utils/test_utils.py +t/unit/utils/test_uuid.py \ No newline at end of file diff -Nru kombu-4.0.2+really4.0.2+dfsg/kombu.egg-info/top_level.txt kombu-4.1.0/kombu.egg-info/top_level.txt --- kombu-4.0.2+really4.0.2+dfsg/kombu.egg-info/top_level.txt 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/kombu.egg-info/top_level.txt 2017-07-20 16:22:10.000000000 +0000 @@ -0,0 +1 @@ +kombu diff -Nru kombu-4.0.2+really4.0.2+dfsg/PKG-INFO kombu-4.1.0/PKG-INFO --- kombu-4.0.2+really4.0.2+dfsg/PKG-INFO 2016-12-15 23:49:32.000000000 +0000 +++ kombu-4.1.0/PKG-INFO 2017-07-20 16:22:11.000000000 +0000 @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: kombu -Version: 4.0.2 +Version: 4.1.0 Summary: Messaging library for Python. Home-page: https://kombu.readthedocs.io Author: Ask Solem @@ -12,7 +12,7 @@ |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| - :Version: 4.0.2 + :Version: 4.1.0 :Web: http://kombu.me/ :Download: http://pypi.python.org/pypi/kombu/ :Source: https://github.com/celery/kombu/ @@ -70,20 +70,20 @@ For an introduction to AMQP you should read the article `Rabbits and warrens`_, and the `Wikipedia article about AMQP`_. - .. _`RabbitMQ`: http://www.rabbitmq.com/ - .. _`AMQP`: http://amqp.org - .. _`py-amqp`: http://pypi.python.org/pypi/amqp/ - .. _`qpid-python`: http://pypi.python.org/pypi/qpid-python/ - .. _`Redis`: http://code.google.com/p/redis/ - .. _`Amazon SQS`: http://aws.amazon.com/sqs/ + .. _`RabbitMQ`: https://www.rabbitmq.com/ + .. _`AMQP`: https://amqp.org + .. _`py-amqp`: https://pypi.python.org/pypi/amqp/ + .. _`qpid-python`: https://pypi.python.org/pypi/qpid-python/ + .. _`Redis`: https://redis.io + .. _`Amazon SQS`: https://aws.amazon.com/sqs/ .. _`Zookeeper`: https://zookeeper.apache.org/ .. _`Rabbits and warrens`: http://blogs.digitar.com/jjww/2009/01/rabbits-and-warrens/ - .. _`amqplib`: http://barryp.org/software/py-amqplib/ - .. _`Wikipedia article about AMQP`: http://en.wikipedia.org/wiki/AMQP - .. _`carrot`: http://pypi.python.org/pypi/carrot/ - .. _`librabbitmq`: http://pypi.python.org/pypi/librabbitmq - .. _`Pyro`: http://pythonhosting.org/Pyro - .. _`SoftLayer MQ`: http://www.softlayer.com/services/additional/message-queue + .. _`amqplib`: https://barryp.org/software/py-amqplib/ + .. _`Wikipedia article about AMQP`: https://en.wikipedia.org/wiki/AMQP + .. _`carrot`: https://pypi.python.org/pypi/carrot/ + .. _`librabbitmq`: https://pypi.python.org/pypi/librabbitmq + .. _`Pyro`: https://pythonhosting.org/Pyro4 + .. _`SoftLayer MQ`: https://sldn.softlayer.com/reference/messagequeueapi .. _transport-comparison: @@ -132,7 +132,7 @@ Quick overview -------------- - :: + .. code:: python from kombu import Connection, Exchange, Queue @@ -177,7 +177,7 @@ Or handle channels manually: - :: + .. code:: python with connection.channel() as channel: producer = Producer(channel, ...) @@ -187,7 +187,7 @@ All objects can be used outside of with statements too, just remember to close the objects after use: - :: + .. code:: python from kombu import Connection, Consumer, Producer @@ -318,18 +318,18 @@ Join the `carrot-users`_ mailing list. - .. _`carrot-users`: http://groups.google.com/group/carrot-users/ + .. _`carrot-users`: https://groups.google.com/group/carrot-users/ Bug tracker =========== If you have any suggestions, bug reports or annoyances please report them - to our issue tracker at http://github.com/celery/kombu/issues/ + to our issue tracker at https://github.com/celery/kombu/issues/ Contributing ============ - Development of `Kombu` happens at Github: http://github.com/celery/kombu + Development of `Kombu` happens at Github: https://github.com/celery/kombu You are highly encouraged to participate in the development. If you don't like Github (for some reason) you're welcome to send regular patches. @@ -354,15 +354,15 @@ .. |wheel| image:: https://img.shields.io/pypi/wheel/kombu.svg :alt: Kombu can be installed via wheel - :target: http://pypi.python.org/pypi/kombu/ + :target: https://pypi.python.org/pypi/kombu/ .. |pyversion| image:: https://img.shields.io/pypi/pyversions/kombu.svg :alt: Supported Python versions. - :target: http://pypi.python.org/pypi/kombu/ + :target: https://pypi.python.org/pypi/kombu/ .. |pyimp| image:: https://img.shields.io/pypi/implementation/kombu.svg :alt: Support Python implementations. - :target: http://pypi.python.org/pypi/kombu/ + :target: https://pypi.python.org/pypi/kombu/ -- @@ -375,6 +375,7 @@ Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: Implementation :: CPython diff -Nru kombu-4.0.2+really4.0.2+dfsg/README.rst kombu-4.1.0/README.rst --- kombu-4.0.2+really4.0.2+dfsg/README.rst 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/README.rst 2017-07-20 16:19:30.000000000 +0000 @@ -4,7 +4,7 @@ |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| -:Version: 4.0.2 +:Version: 4.1.0 :Web: http://kombu.me/ :Download: http://pypi.python.org/pypi/kombu/ :Source: https://github.com/celery/kombu/ @@ -62,20 +62,20 @@ For an introduction to AMQP you should read the article `Rabbits and warrens`_, and the `Wikipedia article about AMQP`_. -.. _`RabbitMQ`: http://www.rabbitmq.com/ -.. _`AMQP`: http://amqp.org -.. _`py-amqp`: http://pypi.python.org/pypi/amqp/ -.. _`qpid-python`: http://pypi.python.org/pypi/qpid-python/ -.. _`Redis`: http://code.google.com/p/redis/ -.. _`Amazon SQS`: http://aws.amazon.com/sqs/ +.. _`RabbitMQ`: https://www.rabbitmq.com/ +.. _`AMQP`: https://amqp.org +.. _`py-amqp`: https://pypi.python.org/pypi/amqp/ +.. _`qpid-python`: https://pypi.python.org/pypi/qpid-python/ +.. _`Redis`: https://redis.io +.. _`Amazon SQS`: https://aws.amazon.com/sqs/ .. _`Zookeeper`: https://zookeeper.apache.org/ .. _`Rabbits and warrens`: http://blogs.digitar.com/jjww/2009/01/rabbits-and-warrens/ -.. _`amqplib`: http://barryp.org/software/py-amqplib/ -.. _`Wikipedia article about AMQP`: http://en.wikipedia.org/wiki/AMQP -.. _`carrot`: http://pypi.python.org/pypi/carrot/ -.. _`librabbitmq`: http://pypi.python.org/pypi/librabbitmq -.. _`Pyro`: http://pythonhosting.org/Pyro -.. _`SoftLayer MQ`: http://www.softlayer.com/services/additional/message-queue +.. _`amqplib`: https://barryp.org/software/py-amqplib/ +.. _`Wikipedia article about AMQP`: https://en.wikipedia.org/wiki/AMQP +.. _`carrot`: https://pypi.python.org/pypi/carrot/ +.. _`librabbitmq`: https://pypi.python.org/pypi/librabbitmq +.. _`Pyro`: https://pythonhosting.org/Pyro4 +.. _`SoftLayer MQ`: https://sldn.softlayer.com/reference/messagequeueapi .. _transport-comparison: @@ -124,7 +124,7 @@ Quick overview -------------- -:: +.. code:: python from kombu import Connection, Exchange, Queue @@ -169,7 +169,7 @@ Or handle channels manually: -:: +.. code:: python with connection.channel() as channel: producer = Producer(channel, ...) @@ -179,7 +179,7 @@ All objects can be used outside of with statements too, just remember to close the objects after use: -:: +.. code:: python from kombu import Connection, Consumer, Producer @@ -310,18 +310,18 @@ Join the `carrot-users`_ mailing list. -.. _`carrot-users`: http://groups.google.com/group/carrot-users/ +.. _`carrot-users`: https://groups.google.com/group/carrot-users/ Bug tracker =========== If you have any suggestions, bug reports or annoyances please report them -to our issue tracker at http://github.com/celery/kombu/issues/ +to our issue tracker at https://github.com/celery/kombu/issues/ Contributing ============ -Development of `Kombu` happens at Github: http://github.com/celery/kombu +Development of `Kombu` happens at Github: https://github.com/celery/kombu You are highly encouraged to participate in the development. If you don't like Github (for some reason) you're welcome to send regular patches. @@ -346,14 +346,14 @@ .. |wheel| image:: https://img.shields.io/pypi/wheel/kombu.svg :alt: Kombu can be installed via wheel - :target: http://pypi.python.org/pypi/kombu/ + :target: https://pypi.python.org/pypi/kombu/ .. |pyversion| image:: https://img.shields.io/pypi/pyversions/kombu.svg :alt: Supported Python versions. - :target: http://pypi.python.org/pypi/kombu/ + :target: https://pypi.python.org/pypi/kombu/ .. |pyimp| image:: https://img.shields.io/pypi/implementation/kombu.svg :alt: Support Python implementations. - :target: http://pypi.python.org/pypi/kombu/ + :target: https://pypi.python.org/pypi/kombu/ -- diff -Nru kombu-4.0.2+really4.0.2+dfsg/requirements/extras/sqlalchemy.txt kombu-4.1.0/requirements/extras/sqlalchemy.txt --- kombu-4.0.2+really4.0.2+dfsg/requirements/extras/sqlalchemy.txt 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/requirements/extras/sqlalchemy.txt 2017-07-10 04:45:59.000000000 +0000 @@ -0,0 +1 @@ +sqlalchemy diff -Nru kombu-4.0.2+really4.0.2+dfsg/requirements/extras/sqs.txt kombu-4.1.0/requirements/extras/sqs.txt --- kombu-4.0.2+really4.0.2+dfsg/requirements/extras/sqs.txt 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/requirements/extras/sqs.txt 2017-07-10 04:45:59.000000000 +0000 @@ -1,2 +1,2 @@ -boto>=2.8 +boto3>=1.4.4 pycurl diff -Nru kombu-4.0.2+really4.0.2+dfsg/requirements/funtest.txt kombu-4.1.0/requirements/funtest.txt --- kombu-4.0.2+really4.0.2+dfsg/requirements/funtest.txt 2016-11-30 22:54:46.000000000 +0000 +++ kombu-4.1.0/requirements/funtest.txt 2017-07-10 04:45:59.000000000 +0000 @@ -8,7 +8,7 @@ kazoo # SQS transport -boto +boto3 # Qpid transport qpid-python>=0.26 diff -Nru kombu-4.0.2+really4.0.2+dfsg/requirements/pkgutils.txt kombu-4.1.0/requirements/pkgutils.txt --- kombu-4.0.2+really4.0.2+dfsg/requirements/pkgutils.txt 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/requirements/pkgutils.txt 2017-07-10 04:45:59.000000000 +0000 @@ -5,4 +5,4 @@ tox>=2.3.1 sphinx2rst>=1.0 bumpversion -pydocstyle +pydocstyle==1.1.1 diff -Nru kombu-4.0.2+really4.0.2+dfsg/requirements/test-ci.txt kombu-4.1.0/requirements/test-ci.txt --- kombu-4.0.2+really4.0.2+dfsg/requirements/test-ci.txt 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/requirements/test-ci.txt 2017-07-10 04:45:59.000000000 +0000 @@ -3,3 +3,5 @@ redis PyYAML msgpack-python>0.2.0 +-r extras/sqs.txt +sqlalchemy diff -Nru kombu-4.0.2+really4.0.2+dfsg/setup.cfg kombu-4.1.0/setup.cfg --- kombu-4.0.2+really4.0.2+dfsg/setup.cfg 2016-12-15 23:49:32.000000000 +0000 +++ kombu-4.1.0/setup.cfg 2017-07-20 16:22:11.000000000 +0000 @@ -16,11 +16,10 @@ [bdist_rpm] requires = amqp >= 2. -[wheel] +[bdist_wheel] universal = 1 [egg_info] tag_build = tag_date = 0 -tag_svn_revision = 0 diff -Nru kombu-4.0.2+really4.0.2+dfsg/setup.py kombu-4.1.0/setup.py --- kombu-4.0.2+really4.0.2+dfsg/setup.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/setup.py 2017-07-20 16:16:14.000000000 +0000 @@ -82,7 +82,7 @@ if os.path.exists('README.rst'): long_description = codecs.open('README.rst', 'r', 'utf-8').read() else: - long_description = 'See http://pypi.python.org/pypi/kombu' + long_description = 'See https://pypi.python.org/pypi/kombu' # -*- Installation Requires -*- py_version = sys.version_info @@ -141,6 +141,7 @@ 'mongodb': extras('mongodb.txt'), 'sqs': extras('sqs.txt'), 'zookeeper': extras('zookeeper.txt'), + 'sqlalchemy': extras('sqlalchemy.txt'), 'librabbitmq': extras('librabbitmq.txt'), 'pyro': extras('pyro.txt'), 'slmq': extras('slmq.txt'), @@ -155,6 +156,7 @@ 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: Implementation :: CPython', diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/integration/tests/test_sqla.py kombu-4.1.0/t/integration/tests/test_sqla.py --- kombu-4.0.2+really4.0.2+dfsg/t/integration/tests/test_sqla.py 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/t/integration/tests/test_sqla.py 2017-07-10 04:45:59.000000000 +0000 @@ -0,0 +1,13 @@ +from __future__ import absolute_import, unicode_literals + +from funtests import transport + +from kombu.tests.case import skip + + +@skip.unless_module('sqlalchemy') +class test_sqla(transport.TransportCase): + transport = 'sqlalchemy' + prefix = 'sqlalchemy' + event_loop_max = 10 + connection_options = {'hostname': 'sqla+sqlite://'} diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/integration/tests/test_SQS.py kombu-4.1.0/t/integration/tests/test_SQS.py --- kombu-4.0.2+really4.0.2+dfsg/t/integration/tests/test_SQS.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/t/integration/tests/test_SQS.py 2017-07-10 04:45:59.000000000 +0000 @@ -7,7 +7,7 @@ @skip.unless_environ('AWS_ACCESS_KEY_ID') @skip.unless_environ('AWS_SECRET_ACCESS_KEY') -@skip.unless_module('boto') +@skip.unless_module('boto3') class test_SQS(transport.TransportCase): transport = 'SQS' prefix = 'sqs' diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/unit/async/aws/case.py kombu-4.1.0/t/unit/async/aws/case.py --- kombu-4.0.2+really4.0.2+dfsg/t/unit/async/aws/case.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/t/unit/async/aws/case.py 2017-07-10 04:45:59.000000000 +0000 @@ -7,7 +7,7 @@ @skip.if_pypy() -@skip.unless_module('boto') +@skip.unless_module('boto3') @skip.unless_module('pycurl') @pytest.mark.usefixtures('hub') class AWSCase(object): diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/unit/async/aws/sqs/test_connection.py kombu-4.1.0/t/unit/async/aws/sqs/test_connection.py --- kombu-4.0.2+really4.0.2+dfsg/t/unit/async/aws/sqs/test_connection.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/t/unit/async/aws/sqs/test_connection.py 2017-07-10 04:45:59.000000000 +0000 @@ -1,13 +1,12 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals -import pytest - -from case import Mock +from case import Mock, MagicMock from kombu.async.aws.sqs.connection import ( - AsyncSQSConnection, Attributes, BatchResults, + AsyncSQSConnection ) +from kombu.async.aws.ext import boto3 from kombu.async.aws.sqs.message import AsyncMessage from kombu.async.aws.sqs.queue import AsyncQueue from kombu.utils.uuid import uuid @@ -20,29 +19,26 @@ class test_AsyncSQSConnection(AWSCase): def setup(self): - self.x = AsyncSQSConnection('ak', 'sk', http_client=Mock()) + session = boto3.session.Session( + aws_access_key_id='AAA', + aws_secret_access_key='AAAA', + region_name='us-west-2', + ) + sqs_client = session.client('sqs') + self.x = AsyncSQSConnection(sqs_client, 'ak', 'sk', http_client=Mock()) self.x.get_object = Mock(name='X.get_object') self.x.get_status = Mock(name='X.get_status') - self.x.get_list = Mock(nanme='X.get_list') + self.x.get_list = Mock(name='X.get_list') self.callback = PromiseMock(name='callback') - def test_without_boto(self): - from kombu.async.aws.sqs import connection - prev, connection.boto = connection.boto, None - try: - with pytest.raises(ImportError): - AsyncSQSConnection('ak', 'sk', http_client=Mock()) - finally: - connection.boto = prev - - def test_default_region(self): - assert self.x.region - assert issubclass(self.x.region.connection_cls, AsyncSQSConnection) + sqs_client.get_queue_url = MagicMock(return_value={ + 'QueueUrl': 'http://aws.com' + }) def test_create_queue(self): self.x.create_queue('foo', callback=self.callback) self.x.get_object.assert_called_with( - 'CreateQueue', {'QueueName': 'foo'}, AsyncQueue, + 'CreateQueue', {'QueueName': 'foo'}, callback=self.callback, ) @@ -55,7 +51,7 @@ 'QueueName': 'foo', 'DefaultVisibilityTimeout': '33' }, - AsyncQueue, callback=self.callback + callback=self.callback ) def test_delete_queue(self): @@ -72,7 +68,7 @@ ) self.x.get_object.assert_called_with( 'GetQueueAttributes', {'AttributeName': 'QueueSize'}, - Attributes, queue.id, callback=self.callback, + queue.id, callback=self.callback, ) def test_set_queue_attribute(self): @@ -93,8 +89,9 @@ self.x.receive_message(queue, 4, callback=self.callback) self.x.get_list.assert_called_with( 'ReceiveMessage', {'MaxNumberOfMessages': 4}, - [('Message', queue.message_class)], - queue.id, callback=self.callback, + [('Message', AsyncMessage)], + 'http://aws.com', callback=self.callback, + parent=queue, ) def test_receive_message__with_visibility_timeout(self): @@ -105,8 +102,9 @@ 'MaxNumberOfMessages': 4, 'VisibilityTimeout': 3666, }, - [('Message', queue.message_class)], - queue.id, callback=self.callback, + [('Message', AsyncMessage)], + 'http://aws.com', callback=self.callback, + parent=queue, ) def test_receive_message__with_wait_time_seconds(self): @@ -119,8 +117,9 @@ 'MaxNumberOfMessages': 4, 'WaitTimeSeconds': 303, }, - [('Message', queue.message_class)], - queue.id, callback=self.callback, + [('Message', AsyncMessage)], + 'http://aws.com', callback=self.callback, + parent=queue, ) def test_receive_message__with_attributes(self): @@ -134,8 +133,9 @@ 'AttributeName.2': 'bar', 'MaxNumberOfMessages': 4, }, - [('Message', queue.message_class)], - queue.id, callback=self.callback, + [('Message', AsyncMessage)], + 'http://aws.com', callback=self.callback, + parent=queue, ) def MockMessage(self, id=None, receipt_handle=None, body=None): @@ -157,10 +157,11 @@ def test_delete_message(self): queue = Mock(name='queue') message = self.MockMessage() - self.x.delete_message(queue, message, callback=self.callback) + self.x.delete_message(queue, message.receipt_handle, + callback=self.callback) self.x.get_status.assert_called_with( 'DeleteMessage', {'ReceiptHandle': message.receipt_handle}, - queue.id, callback=self.callback, + queue, callback=self.callback, ) def test_delete_message_batch(self): @@ -175,7 +176,7 @@ 'DeleteMessageBatchRequestEntry.2.Id': '2', 'DeleteMessageBatchRequestEntry.2.ReceiptHandle': 'r2', }, - BatchResults, queue.id, verb='POST', callback=self.callback, + queue.id, verb='POST', callback=self.callback, ) def test_send_message(self): @@ -183,7 +184,7 @@ self.x.send_message(queue, 'hello', callback=self.callback) self.x.get_object.assert_called_with( 'SendMessage', {'MessageBody': 'hello'}, - AsyncMessage, queue.id, verb='POST', callback=self.callback, + queue.id, verb='POST', callback=self.callback, ) def test_send_message__with_delay_seconds(self): @@ -193,7 +194,7 @@ ) self.x.get_object.assert_called_with( 'SendMessage', {'MessageBody': 'hello', 'DelaySeconds': 303}, - AsyncMessage, queue.id, verb='POST', callback=self.callback, + queue.id, verb='POST', callback=self.callback, ) def test_send_message_batch(self): @@ -213,7 +214,7 @@ 'SendMessageBatchRequestEntry.2.MessageBody': 'B', 'SendMessageBatchRequestEntry.2.DelaySeconds': 303, }, - BatchResults, queue.id, verb='POST', callback=self.callback, + queue.id, verb='POST', callback=self.callback, ) def test_change_message_visibility(self): @@ -251,7 +252,7 @@ preamble('2.ReceiptHandle'): 'r2', preamble('2.VisibilityTimeout'): 909, }, - BatchResults, queue.id, verb='POST', callback=self.callback, + queue.id, verb='POST', callback=self.callback, ) def test_get_all_queues(self): diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/unit/async/aws/sqs/test_message.py kombu-4.1.0/t/unit/async/aws/sqs/test_message.py --- kombu-4.0.2+really4.0.2+dfsg/t/unit/async/aws/sqs/test_message.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/t/unit/async/aws/sqs/test_message.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals - -from case import Mock - -from kombu.async.aws.sqs.message import AsyncMessage -from kombu.utils.uuid import uuid - -from t.mocks import PromiseMock - -from ..case import AWSCase - - -class test_AsyncMessage(AWSCase): - - def setup(self): - self.queue = Mock(name='queue') - self.callback = PromiseMock(name='callback') - self.x = AsyncMessage(self.queue, 'body') - self.x.receipt_handle = uuid() - - def test_delete(self): - assert self.x.delete(callback=self.callback) - self.x.queue.delete_message.assert_called_with( - self.x, self.callback, - ) - - self.x.queue = None - assert self.x.delete(callback=self.callback) is None - - def test_change_visibility(self): - assert self.x.change_visibility(303, callback=self.callback) - self.x.queue.connection.change_message_visibility.assert_called_with( - self.x.queue, self.x.receipt_handle, 303, self.callback, - ) - self.x.queue = None - assert self.x.change_visibility(303, callback=self.callback) is None diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/unit/async/aws/sqs/test_sqs.py kombu-4.1.0/t/unit/async/aws/sqs/test_sqs.py --- kombu-4.0.2+really4.0.2+dfsg/t/unit/async/aws/sqs/test_sqs.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/t/unit/async/aws/sqs/test_sqs.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals - -import pytest - -from case import Mock, patch - -from kombu.async.aws.sqs import regions, connect_to_region -from kombu.async.aws.sqs.connection import AsyncSQSConnection - -from ..case import AWSCase - - -class test_connect_to_region(AWSCase): - - def test_when_no_boto_installed(self, patching): - patching('kombu.async.aws.sqs.boto', None) - with pytest.raises(ImportError): - regions() - - def test_using_async_connection(self): - for region in regions(): - assert region.connection_cls is AsyncSQSConnection - - def test_connect_to_region(self): - with patch('kombu.async.aws.sqs.regions') as regions: - region = Mock(name='region') - region.name = 'us-west-1' - regions.return_value = [region] - conn = connect_to_region('us-west-1', kw=3.33) - assert conn is region.connect.return_value - region.connect.assert_called_with(kw=3.33) - - assert connect_to_region('foo') is None diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/unit/async/aws/test_aws.py kombu-4.1.0/t/unit/async/aws/test_aws.py --- kombu-4.0.2+really4.0.2+dfsg/t/unit/async/aws/test_aws.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/t/unit/async/aws/test_aws.py 2017-07-10 04:45:59.000000000 +0000 @@ -13,4 +13,4 @@ def test_connection(self): x = connect_sqs('AAKI', 'ASAK', http_client=Mock()) assert x - assert x.connection + assert x.sqs_connection diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/unit/async/aws/test_connection.py kombu-4.1.0/t/unit/async/aws/test_connection.py --- kombu-4.0.2+really4.0.2+dfsg/t/unit/async/aws/test_connection.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/t/unit/async/aws/test_connection.py 2017-07-10 04:45:59.000000000 +0000 @@ -1,11 +1,9 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals -import pytest - from contextlib import contextmanager -from case import Mock, patch +from case import Mock from vine.abstract import Thenable from kombu.exceptions import HttpError @@ -13,18 +11,22 @@ from kombu.async import http from kombu.async.aws.connection import ( - AsyncHTTPConnection, AsyncHTTPSConnection, AsyncHTTPResponse, AsyncConnection, - AsyncAWSAuthConnection, AsyncAWSQueryConnection, ) +from kombu.async.aws.ext import boto3 from .case import AWSCase from t.mocks import PromiseMock +try: + from urllib.parse import urlparse, parse_qs +except ImportError: + from urlparse import urlparse, parse_qs # noqa + # Not currently working VALIDATES_CERT = False @@ -38,37 +40,30 @@ return m -class test_AsyncHTTPConnection(AWSCase): - - def test_AsyncHTTPSConnection(self): - x = AsyncHTTPSConnection('aws.vandelay.com') - assert x.scheme == 'https' +class test_AsyncHTTPSConnection(AWSCase): def test_http_client(self): - x = AsyncHTTPConnection('aws.vandelay.com') + x = AsyncHTTPSConnection() assert x.http_client is http.get_client() client = Mock(name='http_client') - y = AsyncHTTPConnection('aws.vandelay.com', http_client=client) + y = AsyncHTTPSConnection(http_client=client) assert y.http_client is client def test_args(self): - x = AsyncHTTPConnection( - 'aws.vandelay.com', 8083, strict=True, timeout=33.3, + x = AsyncHTTPSConnection( + strict=True, timeout=33.3, ) - assert x.host == 'aws.vandelay.com' - assert x.port == 8083 assert x.strict assert x.timeout == 33.3 - assert x.scheme == 'http' def test_request(self): - x = AsyncHTTPConnection('aws.vandelay.com') + x = AsyncHTTPSConnection('aws.vandelay.com') x.request('PUT', '/importer-exporter') assert x.path == '/importer-exporter' assert x.method == 'PUT' def test_request_with_body_buffer(self): - x = AsyncHTTPConnection('aws.vandelay.com') + x = AsyncHTTPSConnection('aws.vandelay.com') body = Mock(name='body') body.read.return_value = 'Vandelay Industries' x.request('PUT', '/importer-exporter', body) @@ -78,14 +73,14 @@ body.read.assert_called_with() def test_request_with_body_text(self): - x = AsyncHTTPConnection('aws.vandelay.com') + x = AsyncHTTPSConnection('aws.vandelay.com') x.request('PUT', '/importer-exporter', 'Vandelay Industries') assert x.method == 'PUT' assert x.path == '/importer-exporter' assert x.body == 'Vandelay Industries' def test_request_with_headers(self): - x = AsyncHTTPConnection('aws.vandelay.com') + x = AsyncHTTPSConnection() headers = {'Proxy': 'proxy.vandelay.com'} x.request('PUT', '/importer-exporter', None, headers) assert 'Proxy' in dict(x.headers) @@ -99,27 +94,10 @@ validate_cert=VALIDATES_CERT, ) - def test_getrequest_AsyncHTTPSConnection(self): - x = AsyncHTTPSConnection('aws.vandelay.com') - x.Request = Mock(name='Request') - x.getrequest() - self.assert_request_created_with('https://aws.vandelay.com/', x) - - def test_getrequest_nondefault_port(self): - x = AsyncHTTPConnection('aws.vandelay.com', port=8080) - x.Request = Mock(name='Request') - x.getrequest() - self.assert_request_created_with('http://aws.vandelay.com:8080/', x) - - y = AsyncHTTPSConnection('aws.vandelay.com', port=8443) - y.Request = Mock(name='Request') - y.getrequest() - self.assert_request_created_with('https://aws.vandelay.com:8443/', y) - def test_getresponse(self): client = Mock(name='client') client.add_request = passthrough(name='client.add_request') - x = AsyncHTTPConnection('aws.vandelay.com', http_client=client) + x = AsyncHTTPSConnection(http_client=client) x.Response = Mock(name='x.Response') request = x.getresponse() x.http_client.add_request.assert_called_with(request) @@ -134,7 +112,7 @@ client = Mock(name='client') client.add_request = passthrough(name='client.add_request') callback = PromiseMock(name='callback') - x = AsyncHTTPConnection('aws.vandelay.com', http_client=client) + x = AsyncHTTPSConnection(http_client=client) request = x.getresponse(callback) x.http_client.add_request.assert_called_with(request) @@ -151,22 +129,22 @@ assert wresponse.read() == 'The quick brown fox jumps' assert wresponse.status == 200 assert wresponse.getheader('X-Foo') == 'Hello' - assert dict(wresponse.getheaders()) == headers - assert wresponse.msg + headers_dict = wresponse.getheaders() + assert dict(headers_dict) == headers assert wresponse.msg assert repr(wresponse) def test_repr(self): - assert repr(AsyncHTTPConnection('aws.vandelay.com')) + assert repr(AsyncHTTPSConnection()) def test_putrequest(self): - x = AsyncHTTPConnection('aws.vandelay.com') + x = AsyncHTTPSConnection() x.putrequest('UPLOAD', '/new') assert x.method == 'UPLOAD' assert x.path == '/new' def test_putheader(self): - x = AsyncHTTPConnection('aws.vandelay.com') + x = AsyncHTTPSConnection() x.putheader('X-Foo', 'bar') assert x.headers == [('X-Foo', 'bar')] x.putheader('X-Bar', 'baz') @@ -176,14 +154,14 @@ ] def test_send(self): - x = AsyncHTTPConnection('aws.vandelay.com') + x = AsyncHTTPSConnection() x.send('foo') assert x.body == 'foo' x.send('bar') assert x.body == 'foobar' def test_interface(self): - x = AsyncHTTPConnection('aws.vandelay.com') + x = AsyncHTTPSConnection() assert x.set_debuglevel(3) is None assert x.connect() is None assert x.close() is None @@ -204,102 +182,49 @@ class test_AsyncConnection(AWSCase): - def test_when_boto_missing(self, patching): - patching('kombu.async.aws.connection.boto', None) - with pytest.raises(ImportError): - AsyncConnection(Mock(name='client')) - def test_client(self): - x = AsyncConnection() + sqs = Mock(name='sqs') + x = AsyncConnection(sqs) assert x._httpclient is http.get_client() client = Mock(name='client') - y = AsyncConnection(http_client=client) + y = AsyncConnection(sqs, http_client=client) assert y._httpclient is client def test_get_http_connection(self): - x = AsyncConnection(client=Mock(name='client')) - assert isinstance( - x.get_http_connection('aws.vandelay.com', 80, False), - AsyncHTTPConnection, - ) + sqs = Mock(name='sqs') + x = AsyncConnection(sqs) assert isinstance( - x.get_http_connection('aws.vandelay.com', 443, True), + x.get_http_connection(), AsyncHTTPSConnection, ) - - conn = x.get_http_connection('aws.vandelay.com', 80, False) + conn = x.get_http_connection() assert conn.http_client is x._httpclient - assert conn.host == 'aws.vandelay.com' - assert conn.port == 80 - - -class test_AsyncAWSAuthConnection(AWSCase): - - @patch('boto.log', create=True) - def test_make_request(self, _): - x = AsyncAWSAuthConnection('aws.vandelay.com', - http_client=Mock(name='client')) - Conn = x.get_http_connection = Mock(name='get_http_connection') - callback = PromiseMock(name='callback') - ret = x.make_request('GET', '/foo', callback=callback) - assert ret is callback - Conn.return_value.request.assert_called() - Conn.return_value.getresponse.assert_called_with( - callback=callback, - ) - - @patch('boto.log', create=True) - def test_mexe(self, _): - x = AsyncAWSAuthConnection('aws.vandelay.com', - http_client=Mock(name='client')) - Conn = x.get_http_connection = Mock(name='get_http_connection') - request = x.build_base_http_request('GET', 'foo', '/auth') - callback = PromiseMock(name='callback') - x._mexe(request, callback=callback) - Conn.return_value.request.assert_called_with( - request.method, request.path, request.body, request.headers, - ) - Conn.return_value.getresponse.assert_called_with( - callback=callback, - ) - - no_callback_ret = x._mexe(request) - # _mexe always returns promise - assert isinstance(no_callback_ret, Thenable) - - @patch('boto.log', create=True) - def test_mexe__with_sender(self, _): - x = AsyncAWSAuthConnection('aws.vandelay.com', - http_client=Mock(name='client')) - Conn = x.get_http_connection = Mock(name='get_http_connection') - request = x.build_base_http_request('GET', 'foo', '/auth') - sender = Mock(name='sender') - callback = PromiseMock(name='callback') - x._mexe(request, sender=sender, callback=callback) - sender.assert_called_with( - Conn.return_value, request.method, request.path, - request.body, request.headers, callback, - ) class test_AsyncAWSQueryConnection(AWSCase): def setup(self): - self.x = AsyncAWSQueryConnection('aws.vandelay.com', + session = boto3.session.Session( + aws_access_key_id='AAA', + aws_secret_access_key='AAAA', + region_name='us-west-2', + ) + sqs_client = session.client('sqs') + self.x = AsyncAWSQueryConnection(sqs_client, http_client=Mock(name='client')) - @patch('boto.log', create=True) - def test_make_request(self, _): + def test_make_request(self): _mexe, self.x._mexe = self.x._mexe, Mock(name='_mexe') Conn = self.x.get_http_connection = Mock(name='get_http_connection') callback = PromiseMock(name='callback') self.x.make_request( - 'action', {'foo': 1}, '/', 'GET', callback=callback, + 'action', {'foo': 1}, 'https://foo.com/', 'GET', callback=callback, ) self.x._mexe.assert_called() request = self.x._mexe.call_args[0][0] - assert request.params['Action'] == 'action' - assert request.params['Version'] == self.x.APIVersion + parsed = urlparse(request.url) + params = parse_qs(parsed.query) + assert params['Action'][0] == 'action' ret = _mexe(request, callback=callback) assert ret is callback @@ -308,29 +233,18 @@ callback=callback, ) - @patch('boto.log', create=True) - def test_make_request__no_action(self, _): + def test_make_request__no_action(self): self.x._mexe = Mock(name='_mexe') self.x.get_http_connection = Mock(name='get_http_connection') callback = PromiseMock(name='callback') self.x.make_request( - None, {'foo': 1}, '/', 'GET', callback=callback, + None, {'foo': 1}, 'http://foo.com/', 'GET', callback=callback, ) self.x._mexe.assert_called() request = self.x._mexe.call_args[0][0] - assert 'Action' not in request.params - assert request.params['Version'] == self.x.APIVersion - - @contextmanager - def mock_sax_parse(self, parser): - with patch('kombu.async.aws.connection.sax_parse') as sax_parse: - with patch('kombu.async.aws.connection.XmlHandler') as xh: - - def effect(body, h): - return parser(xh.call_args[0][0], body, h) - sax_parse.side_effect = effect - yield (sax_parse, xh) - sax_parse.assert_called() + parsed = urlparse(request.url) + params = parse_qs(parsed.query) + assert 'Action' not in params def Response(self, status, body): r = Mock(name='response') @@ -347,90 +261,3 @@ def assert_make_request_called(self): self.x.make_request.assert_called() return self.x.make_request.call_args[1]['callback'] - - def test_get_list(self): - with self.mock_make_request() as callback: - self.x.get_list('action', {'p': 3.3}, ['m'], callback=callback) - on_ready = self.assert_make_request_called() - - def parser(dest, body, h): - dest.append('hi') - dest.append('there') - - with self.mock_sax_parse(parser): - on_ready(self.Response(200, 'hello')) - callback.assert_called_with(['hi', 'there']) - - def test_get_list_error(self): - with self.mock_make_request() as callback: - self.x.get_list('action', {'p': 3.3}, ['m'], callback=callback) - on_ready = self.assert_make_request_called() - - with pytest.raises(self.x.ResponseError): - on_ready(self.Response(404, 'Not found')) - - def test_get_object(self): - with self.mock_make_request() as callback: - - class Result(object): - parent = None - value = None - - def __init__(self, parent): - self.parent = parent - - self.x.get_object('action', {'p': 3.3}, Result, callback=callback) - on_ready = self.assert_make_request_called() - - def parser(dest, body, h): - dest.value = 42 - - with self.mock_sax_parse(parser): - on_ready(self.Response(200, 'hello')) - - callback.assert_called() - result = callback.call_args[0][0] - assert result.value == 42 - assert result.parent - - def test_get_object_error(self): - with self.mock_make_request() as callback: - self.x.get_object('action', {'p': 3.3}, object, callback=callback) - on_ready = self.assert_make_request_called() - - with pytest.raises(self.x.ResponseError): - on_ready(self.Response(404, 'Not found')) - - def test_get_status(self): - with self.mock_make_request() as callback: - self.x.get_status('action', {'p': 3.3}, callback=callback) - on_ready = self.assert_make_request_called() - set_status_to = [True] - - def parser(dest, body, b): - dest.status = set_status_to[0] - - with self.mock_sax_parse(parser): - on_ready(self.Response(200, 'hello')) - callback.assert_called_with(True) - - set_status_to[0] = False - with self.mock_sax_parse(parser): - on_ready(self.Response(200, 'hello')) - callback.assert_called_with(False) - - def test_get_status_error(self): - with self.mock_make_request() as callback: - self.x.get_status('action', {'p': 3.3}, callback=callback) - on_ready = self.assert_make_request_called() - - with pytest.raises(self.x.ResponseError): - on_ready(self.Response(404, 'Not found')) - - def test_get_status_error_empty_body(self): - with self.mock_make_request() as callback: - self.x.get_status('action', {'p': 3.3}, callback=callback) - on_ready = self.assert_make_request_called() - - with pytest.raises(self.x.ResponseError): - on_ready(self.Response(200, '')) diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/unit/async/test_hub.py kombu-4.1.0/t/unit/async/test_hub.py --- kombu-4.0.2+really4.0.2+dfsg/t/unit/async/test_hub.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/t/unit/async/test_hub.py 2017-07-20 16:16:23.000000000 +0000 @@ -501,7 +501,8 @@ def test_loop__tick_callbacks(self): self.hub._ready = Mock(name='_ready') - self.hub._ready.pop.side_effect = RuntimeError() + self.hub._ready.__len__ = Mock(name="_ready.__len__") + self.hub._ready.__len__.side_effect = RuntimeError() ticks = [Mock(name='cb1'), Mock(name='cb2')] self.hub.on_tick = list(ticks) diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/unit/test_common.py kombu-4.1.0/t/unit/test_common.py --- kombu-4.0.2+really4.0.2+dfsg/t/unit/test_common.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/t/unit/test_common.py 2017-07-10 04:45:59.000000000 +0000 @@ -11,12 +11,31 @@ Broadcast, maybe_declare, send_reply, collect_replies, declaration_cached, ignore_errors, - QoS, PREFETCH_COUNT_MAX, + QoS, PREFETCH_COUNT_MAX, generate_oid ) from t.mocks import MockPool +def test_generate_oid(): + from uuid import NAMESPACE_OID + from kombu.five import bytes_if_py2 + + instance = Mock() + + args = (1, 1001, 2001, id(instance)) + ent = bytes_if_py2('%x-%x-%x-%x' % args) + + with patch('kombu.common.uuid3') as mock_uuid3, \ + patch('kombu.common.uuid5') as mock_uuid5: + mock_uuid3.side_effect = ValueError + mock_uuid3.return_value = 'uuid3-6ba7b812-9dad-11d1-80b4' + mock_uuid5.return_value = 'uuid5-6ba7b812-9dad-11d1-80b4' + oid = generate_oid(1, 1001, 2001, instance) + mock_uuid5.assert_called_once_with(NAMESPACE_OID, ent) + assert oid == 'uuid5-6ba7b812-9dad-11d1-80b4' + + def test_ignore_errors(): connection = Mock() connection.channel_errors = (KeyError,) diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/unit/transport/test_base.py kombu-4.1.0/t/unit/transport/test_base.py --- kombu-4.0.2+really4.0.2+dfsg/t/unit/transport/test_base.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/t/unit/transport/test_base.py 2017-07-10 04:45:59.000000000 +0000 @@ -35,7 +35,6 @@ def test_Consumer(self): q = Queue('foo', Exchange('foo')) - print(self.channel.queues) cons = self.channel.Consumer(q) assert isinstance(cons, Consumer) assert cons.channel is self.channel diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/unit/transport/test_mongodb.py kombu-4.1.0/t/unit/transport/test_mongodb.py --- kombu-4.0.2+really4.0.2+dfsg/t/unit/transport/test_mongodb.py 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/t/unit/transport/test_mongodb.py 2017-07-10 04:45:59.000000000 +0000 @@ -81,6 +81,12 @@ assert hostname == 'mongodb://foo:bar@localhost/dbname' assert dbname == 'dbname' + def test_correct_readpreference(self): + url = 'mongodb://localhost/dbname?readpreference=nearest' + channel = _create_mock_connection(url).default_channel + hostname, dbname, options = channel._parse_uri() + assert options['readpreference'] == 'nearest' + class BaseMongoDBChannelCase: diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/unit/transport/test_redis.py kombu-4.1.0/t/unit/transport/test_redis.py --- kombu-4.0.2+really4.0.2+dfsg/t/unit/transport/test_redis.py 2016-12-15 23:33:36.000000000 +0000 +++ kombu-4.1.0/t/unit/transport/test_redis.py 2017-07-10 04:45:59.000000000 +0000 @@ -663,6 +663,20 @@ self.channel.connection.client.hostname = 'george.vandelay.com' assert self.channel._connparams()['host'] == 'george.vandelay.com' + def test_connparams_password_for_unix_socket(self): + self.channel.connection.client.hostname = \ + 'socket://:foo@/var/run/redis.sock' + connection_parameters = self.channel._connparams() + password = connection_parameters['password'] + path = connection_parameters['path'] + assert (password, path) == ('foo', '/var/run/redis.sock') + self.channel.connection.client.hostname = \ + 'socket://@/var/run/redis.sock' + connection_parameters = self.channel._connparams() + password = connection_parameters['password'] + path = connection_parameters['path'] + assert (password, path) == (None, '/var/run/redis.sock') + def test_rotate_cycle_ValueError(self): cycle = self.channel._queue_cycle cycle.update(['kramer', 'jerry']) diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/unit/transport/test_sqlalchemy.py kombu-4.1.0/t/unit/transport/test_sqlalchemy.py --- kombu-4.0.2+really4.0.2+dfsg/t/unit/transport/test_sqlalchemy.py 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/t/unit/transport/test_sqlalchemy.py 2017-07-10 04:45:59.000000000 +0000 @@ -0,0 +1,50 @@ +from __future__ import absolute_import, unicode_literals + +import pytest +from case import patch, skip + +from kombu import Connection + + +@skip.unless_module('sqlalchemy') +class test_SqlAlchemy: + + def test_url_parser(self): + with patch('kombu.transport.sqlalchemy.Channel._open'): + url = 'sqlalchemy+sqlite:///celerydb.sqlite' + Connection(url).connect() + + url = 'sqla+sqlite:///celerydb.sqlite' + Connection(url).connect() + + url = 'sqlb+sqlite:///celerydb.sqlite' + with pytest.raises(KeyError): + Connection(url).connect() + + def test_simple_queueing(self): + conn = Connection('sqlalchemy+sqlite:///:memory:') + conn.connect() + try: + channel = conn.channel() + assert channel.queue_cls.__table__.name == 'kombu_queue' + assert channel.message_cls.__table__.name == 'kombu_message' + + channel._put('celery', 'DATA_SIMPLE_QUEUEING') + assert channel._get('celery') == 'DATA_SIMPLE_QUEUEING' + finally: + conn.release() + + def test_clone(self): + hostname = 'sqlite:///celerydb.sqlite' + x = Connection('+'.join(['sqla', hostname])) + try: + assert x.uri_prefix == 'sqla' + assert x.hostname == hostname + clone = x.clone() + try: + assert clone.hostname == hostname + assert clone.uri_prefix == 'sqla' + finally: + clone.release() + finally: + x.release() diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/unit/transport/test_SQS.py kombu-4.1.0/t/unit/transport/test_SQS.py --- kombu-4.0.2+really4.0.2+dfsg/t/unit/transport/test_SQS.py 2016-12-08 19:42:26.000000000 +0000 +++ kombu-4.1.0/t/unit/transport/test_SQS.py 2017-07-20 16:16:23.000000000 +0000 @@ -8,97 +8,109 @@ from __future__ import absolute_import, unicode_literals import pytest +import random +import string from case import Mock, skip from kombu import messaging from kombu import Connection, Exchange, Queue -from kombu.async.aws.ext import exception from kombu.five import Empty from kombu.transport import SQS +SQS_Channel_sqs = SQS.Channel.sqs -class SQSQueueMock(object): - def __init__(self, name): - self.name = name - self.messages = [] - self._get_message_calls = 0 - - def clear(self, page_size=10, vtimeout=10): - empty, self.messages[:] = not self.messages, [] - return not empty +class SQSMessageMock(object): + def __init__(self): + """ + Imitate the SQS Message from boto3. + """ + self.body = "" + self.receipt_handle = "receipt_handle_xyz" - def count(self, page_size=10, vtimeout=10): - return len(self.messages) - count_slow = count - def delete(self): - self.messages[:] = [] - return True +class QueueMock(object): + """ Hold information about a queue. """ + + def __init__(self, url): + self.url = url + self.attributes = {'ApproximateNumberOfMessages': '0'} - def delete_message(self, message): - try: - self.messages.remove(message) - except ValueError: - return False - return True - - def get_messages(self, num_messages=1, visibility_timeout=None, - attributes=None, *args, **kwargs): - self._get_message_calls += 1 - messages, self.messages[:num_messages] = ( - self.messages[:num_messages], []) - return messages - - def read(self, visibility_timeout=None): - return self.messages.pop(0) + self.messages = [] - def write(self, message): - self.messages.append(message) - return True + def __repr__(self): + return 'QueueMock: {} {} messages'.format(self.url, len(self.messages)) -class SQSConnectionMock(object): +class SQSClientMock(object): def __init__(self): - self.queues = { - 'q_%s' % n: SQSQueueMock('q_%s' % n) for n in range(1500) - } - q = SQSQueueMock('unittest_queue') - q.write('hello') - self.queues['unittest_queue'] = q - - def get_queue(self, queue): - return self.queues.get(queue) - - def get_all_queues(self, prefix=""): - if not prefix: - keys = sorted(self.queues.keys())[:1000] - else: - keys = filter( - lambda k: k.startswith(prefix), sorted(self.queues.keys()) - )[:1000] - return [self.queues[key] for key in keys] - - def delete_queue(self, queue, force_deletion=False): - q = self.get_queue(queue) - if q: - if q.count(): - return False - q.clear() - self.queues.pop(queue, None) - - def delete_message(self, queue, message): - return queue.delete_message(message) - - def create_queue(self, name, *args, **kwargs): - q = self.queues[name] = SQSQueueMock(name) - return q + """ + Imitate the SQS Client from boto3. + """ + self._receive_messages_calls = 0 + # _queues doesn't exist on the real client, here for testing. + self._queues = {} + for n in range(1): + name = 'q_{}'.format(n) + url = 'sqs://q_{}'.format(n) + self.create_queue(QueueName=name) + + url = self.create_queue(QueueName='unittest_queue')['QueueUrl'] + self.send_message(QueueUrl=url, MessageBody='hello') + + def _get_q(self, url): + """ Helper method to quickly get a queue. """ + for q in self._queues.values(): + if q.url == url: + return q + raise Exception("Queue url {} not found".format(url)) + + def create_queue(self, QueueName=None, Attributes=None): + q = self._queues[QueueName] = QueueMock('sqs://' + QueueName) + return {'QueueUrl': q.url} + + def list_queues(self, QueueNamePrefix=None): + """ Return a list of queue urls """ + urls = (val.url for key, val in self._queues.items() + if key.startswith(QueueNamePrefix)) + return {'QueueUrls': urls} + + def get_queue_url(self, QueueName=None): + return self._queues[QueueName] + + def send_message(self, QueueUrl=None, MessageBody=None): + for q in self._queues.values(): + if q.url == QueueUrl: + handle = ''.join(random.choice(string.ascii_lowercase) for + x in range(10)) + q.messages.append({'Body': MessageBody, + 'ReceiptHandle': handle}) + break + + def receive_message(self, QueueUrl=None, MaxNumberOfMessages=1, + WaitTimeSeconds=10): + self._receive_messages_calls += 1 + for q in self._queues.values(): + if q.url == QueueUrl: + msgs = q.messages[:MaxNumberOfMessages] + q.messages = q.messages[MaxNumberOfMessages:] + return {'Messages': msgs} if msgs else {} + + def get_queue_attributes(self, QueueUrl=None, AttributeNames=None): + if 'ApproximateNumberOfMessages' in AttributeNames: + count = len(self._get_q(QueueUrl).messages) + return {'Attributes': {'ApproximateNumberOfMessages': count}} + + def purge_queue(self, QueueUrl=None): + for q in self._queues.values(): + if q.url == QueueUrl: + q.messages = [] -@skip.unless_module('boto') +@skip.unless_module('boto3') class test_Channel: def handleMessageCallback(self, message): @@ -115,19 +127,20 @@ # Mock the sqs() method that returns an SQSConnection object and # instead return an SQSConnectionMock() object. - self.sqs_conn_mock = SQSConnectionMock() + self.sqs_conn_mock = SQSClientMock() def mock_sqs(): return self.sqs_conn_mock + SQS.Channel.sqs = mock_sqs() # Set up a task exchange for passing tasks through the queue self.exchange = Exchange('test_SQS', type='direct') self.queue = Queue(self.queue_name, self.exchange, self.queue_name) - # Mock up a test SQS Queue with the SQSQueueMock class (and always + # Mock up a test SQS Queue with the QueueMock class (and always # make sure its a clean empty queue) - self.sqs_queue_mock = SQSQueueMock(self.queue_name) + self.sqs_queue_mock = QueueMock('sqs://' + self.queue_name) # Now, create our Connection object with the SQS Transport and store # the connection/channel objects as references for use in these tests. @@ -160,33 +173,24 @@ """kombu.SQS.Channel instantiates correctly with mocked queues""" assert self.queue_name in self.channel._queue_cache - def test_auth_fail(self): - normal_func = SQS.Channel.sqs.get_all_queues - - def get_all_queues_fail_403(prefix=''): - # mock auth error - raise exception.SQSError(403, None, None) - - def get_all_queues_fail_not_403(prefix=''): - # mock non-auth error - raise exception.SQSError(500, None, None) - - try: - SQS.Channel.sqs.access_key = '1234' - SQS.Channel.sqs.get_all_queues = get_all_queues_fail_403 - with pytest.raises(RuntimeError) as excinfo: - self.channel = self.connection.channel() - assert 'access_key=1234' in str(excinfo.value) - SQS.Channel.sqs.get_all_queues = get_all_queues_fail_not_403 - with pytest.raises(exception.SQSError): - self.channel = self.connection.channel() - finally: - SQS.Channel.sqs.get_all_queues = normal_func + def test_endpoint_url(self): + url = 'sqs://@localhost:5493' + self.connection = Connection(hostname=url, transport=SQS.Transport) + self.channel = self.connection.channel() + self.channel._sqs = None + expected_endpoint_url = 'http://localhost:5493' + assert self.channel.endpoint_url == expected_endpoint_url + boto3_sqs = SQS_Channel_sqs.__get__(self.channel, SQS.Channel) + assert boto3_sqs._endpoint.host == expected_endpoint_url + + def test_none_hostname_persists(self): + conn = Connection(hostname=None, transport=SQS.Transport) + assert conn.hostname == conn.clone().hostname def test_new_queue(self): queue_name = 'new_unittest_queue' self.channel._new_queue(queue_name) - assert queue_name in self.sqs_conn_mock.queues + assert queue_name in self.sqs_conn_mock._queues.keys() # For cleanup purposes, delete the queue and the queue file self.channel._delete(queue_name) @@ -195,11 +199,13 @@ # which is definitely out of cache when get_all_queues returns the # first 1000 queues sorted by name. queue_name = 'unittest_queue' + # This should not create a new queue. self.channel._new_queue(queue_name) - assert queue_name in self.sqs_conn_mock.queues - q = self.sqs_conn_mock.get_queue(queue_name) - assert 1 == q.count() - assert 'hello' == q.read() + assert queue_name in self.sqs_conn_mock._queues.keys() + queue = self.sqs_conn_mock._queues[queue_name] + # The queue originally had 1 message in it. + assert 1 == len(queue.messages) + assert 'hello' == queue.messages[0]['Body'] def test_delete(self): queue_name = 'new_unittest_queue' @@ -211,17 +217,16 @@ # Test getting a single message message = 'my test message' self.producer.publish(message) - q = self.channel._new_queue(self.queue_name) - results = q.get_messages() - assert len(results) == 1 + result = self.channel._get(self.queue_name) + assert 'body' in result.keys() # Now test getting many messages for i in range(3): message = 'message: {0}'.format(i) self.producer.publish(message) - results = q.get_messages(num_messages=3) - assert len(results) == 3 + self.channel._get_bulk(self.queue_name, max_if_unlimited=3) + assert len(self.sqs_conn_mock._queues[self.queue_name].messages) == 0 def test_get_with_empty_list(self): with pytest.raises(Empty): @@ -232,6 +237,8 @@ self.channel._get_bulk(self.queue_name) def test_messages_to_python(self): + from kombu.async.aws.sqs.message import Message + kombu_message_count = 3 json_message_count = 3 # Create several test messages and publish them @@ -244,10 +251,20 @@ message = {'foo': 'bar'} self.channel._put(self.producer.routing_key, message) - q = self.channel._new_queue(self.queue_name) + q_url = self.channel._new_queue(self.queue_name) # Get the messages now - kombu_messages = q.get_messages(num_messages=kombu_message_count) - json_messages = q.get_messages(num_messages=json_message_count) + kombu_messages = [] + for m in self.sqs_conn_mock.receive_message( + QueueUrl=q_url, + MaxNumberOfMessages=kombu_message_count)['Messages']: + m['Body'] = Message(body=m['Body']).decode() + kombu_messages.append(m) + json_messages = [] + for m in self.sqs_conn_mock.receive_message( + QueueUrl=q_url, + MaxNumberOfMessages=json_message_count)['Messages']: + m['Body'] = Message(body=m['Body']).decode() + json_messages.append(m) # Now convert them to payloads kombu_payloads = self.channel._messages_to_python( @@ -350,7 +367,7 @@ def test_drain_events_with_prefetch_none(self): # Generate 20 messages message_count = 20 - expected_get_message_count = 3 + expected_receive_messages_count = 3 current_delivery_tag = [1] @@ -378,6 +395,7 @@ assert self.channel.connection._deliver.call_count == message_count - # How many times was the SQSConnectionMock get_message method called? - assert (expected_get_message_count == - self.channel._queue_cache[self.queue_name]._get_message_calls) + # How many times was the SQSConnectionMock receive_message method + # called? + assert (expected_receive_messages_count == + self.sqs_conn_mock._receive_messages_calls) diff -Nru kombu-4.0.2+really4.0.2+dfsg/t/unit/transport/test_zookeeper.py kombu-4.1.0/t/unit/transport/test_zookeeper.py --- kombu-4.0.2+really4.0.2+dfsg/t/unit/transport/test_zookeeper.py 1970-01-01 00:00:00.000000000 +0000 +++ kombu-4.1.0/t/unit/transport/test_zookeeper.py 2017-07-10 04:45:59.000000000 +0000 @@ -0,0 +1,36 @@ +from __future__ import absolute_import, unicode_literals + +import pytest +from case import skip +from kombu import Connection +from kombu.transport import zookeeper + + +@skip.unless_module('kazoo') +class test_Channel: + def setup(self): + self.connection = self.create_connection() + self.channel = self.connection.default_channel + + def create_connection(self, **kwargs): + return Connection(transport=zookeeper.Transport, **kwargs) + + def teardown(self): + self.connection.close() + + def test_put_puts_bytes_to_queue(self): + class AssertQueue: + def put(self, value, priority): + assert isinstance(value, bytes) + + self.channel._queues['foo'] = AssertQueue() + self.channel._put(queue='foo', message='bar') + + @pytest.mark.parametrize('input,expected', ( + ('', '/'), + ('/root', '/root'), + ('/root/', '/root'), + )) + def test_virtual_host_normalization(self, input, expected): + with self.create_connection(virtual_host=input) as conn: + assert conn.default_channel._vhost == expected