diff -Nru python-jaraco.functools-2.0/appveyor.yml python-jaraco.functools-3.0.0/appveyor.yml --- python-jaraco.functools-2.0/appveyor.yml 2019-01-01 14:17:38.000000000 +0000 +++ python-jaraco.functools-3.0.0/appveyor.yml 2019-12-20 04:45:46.000000000 +0000 @@ -4,7 +4,7 @@ matrix: - PYTHON: "C:\\Python36-x64" - - PYTHON: "C:\\Python27-x64" + - PYTHON: "C:\\Python38-x64" install: # symlink python from a directory with a space @@ -18,7 +18,7 @@ - '%LOCALAPPDATA%\pip\Cache' test_script: - - "python -m pip install tox tox-venv" + - "python -m pip install -U tox tox-venv virtualenv" - "tox" version: '{build}' diff -Nru python-jaraco.functools-2.0/CHANGES.rst python-jaraco.functools-3.0.0/CHANGES.rst --- python-jaraco.functools-2.0/CHANGES.rst 2019-01-01 14:17:38.000000000 +0000 +++ python-jaraco.functools-3.0.0/CHANGES.rst 2019-12-20 04:45:46.000000000 +0000 @@ -1,3 +1,8 @@ +v3.0.0 +====== + +Require Python 3.6 or later. + 2.0 === diff -Nru python-jaraco.functools-2.0/conftest.py python-jaraco.functools-3.0.0/conftest.py --- python-jaraco.functools-2.0/conftest.py 1970-01-01 00:00:00.000000000 +0000 +++ python-jaraco.functools-3.0.0/conftest.py 2019-12-20 04:45:46.000000000 +0000 @@ -0,0 +1,24 @@ +import sys +import re + +import jaraco.functools + + +def pytest_configure(): + patch_for_issue_12() + + +def patch_for_issue_12(): # pragma: nocover + """ + Issue #12 revealed that Python 3.7.3 had a subtle + change in the C implementation of functools that + broke the assumptions around the method_cache (or + any caller using possibly empty keyword arguments). + This patch adjusts the docstring for that test so it + can pass on that Python version. + """ + affected_ver = 3, 7, 3 + if sys.version_info[:3] != affected_ver: + return + mc = jaraco.functools.method_cache + mc.__doc__ = re.sub(r'^(\s+)75', r'\g<1>76', mc.__doc__, flags=re.M) diff -Nru python-jaraco.functools-2.0/.coveragerc python-jaraco.functools-3.0.0/.coveragerc --- python-jaraco.functools-2.0/.coveragerc 1970-01-01 00:00:00.000000000 +0000 +++ python-jaraco.functools-3.0.0/.coveragerc 2019-12-20 04:45:46.000000000 +0000 @@ -0,0 +1,5 @@ +[run] +omit = .tox/* + +[report] +show_missing = True diff -Nru python-jaraco.functools-2.0/debian/changelog python-jaraco.functools-3.0.0/debian/changelog --- python-jaraco.functools-2.0/debian/changelog 2019-11-03 09:41:19.000000000 +0000 +++ python-jaraco.functools-3.0.0/debian/changelog 2020-04-29 11:23:01.000000000 +0000 @@ -1,3 +1,21 @@ +python-jaraco.functools (3.0.0-0ubuntu1~jcfp1~18.04) bionic; urgency=medium + + * Build for bionic/18.04. + + -- JCF Ploemen (jcfp) Wed, 29 Apr 2020 13:22:00 +0000 + +python-jaraco.functools (3.0.0-1) unstable; urgency=medium + + * New upstream release. + * Clean: add entry to ensure building twice works. + * Patches: refresh 02, also strip cov and black options. + * Bump Standards-Version to 4.5.0 (from 4.4.1; no further changes). + * Rules: + + simplify test handling. + + switch test files handling to d/pybuild.testfiles. + + -- JCF Ploemen (jcfp) Sat, 29 Feb 2020 13:37:00 +0000 + python-jaraco.functools (2.0-2) unstable; urgency=medium * Control: diff -Nru python-jaraco.functools-2.0/debian/clean python-jaraco.functools-3.0.0/debian/clean --- python-jaraco.functools-2.0/debian/clean 1970-01-01 00:00:00.000000000 +0000 +++ python-jaraco.functools-3.0.0/debian/clean 2020-02-29 13:37:00.000000000 +0000 @@ -0,0 +1 @@ +jaraco.functools.egg-info/* diff -Nru python-jaraco.functools-2.0/debian/compat python-jaraco.functools-3.0.0/debian/compat --- python-jaraco.functools-2.0/debian/compat 1970-01-01 00:00:00.000000000 +0000 +++ python-jaraco.functools-3.0.0/debian/compat 2020-04-29 11:23:50.000000000 +0000 @@ -0,0 +1 @@ +11 diff -Nru python-jaraco.functools-2.0/debian/control python-jaraco.functools-3.0.0/debian/control --- python-jaraco.functools-2.0/debian/control 2019-11-03 09:41:19.000000000 +0000 +++ python-jaraco.functools-3.0.0/debian/control 2020-04-29 11:23:42.000000000 +0000 @@ -5,13 +5,13 @@ Maintainer: JCF Ploemen (jcfp) Uploaders: Debian Python Modules Team Build-Depends: - debhelper-compat (= 12), + debhelper (>= 11~), dh-python, python3-all, python3-pytest, python3-setuptools, python3-setuptools-scm -Standards-Version: 4.4.1 +Standards-Version: 4.5.0 Vcs-Git: https://salsa.debian.org/python-team/modules/python-jaraco.functools.git Vcs-Browser: https://salsa.debian.org/python-team/modules/python-jaraco.functools Testsuite: autopkgtest-pkg-python diff -Nru python-jaraco.functools-2.0/debian/patches/02_pytest_no_flake8.diff python-jaraco.functools-3.0.0/debian/patches/02_pytest_no_flake8.diff --- python-jaraco.functools-2.0/debian/patches/02_pytest_no_flake8.diff 2019-11-03 09:41:19.000000000 +0000 +++ python-jaraco.functools-3.0.0/debian/patches/02_pytest_no_flake8.diff 2020-02-29 13:37:00.000000000 +0000 @@ -1,13 +1,10 @@ -# remove flake8 option from pytest.ini -Index: python-jaraco.functools-2.0/pytest.ini -=================================================================== ---- python-jaraco.functools-2.0.orig/pytest.ini -+++ python-jaraco.functools-2.0/pytest.ini -@@ -1,6 +1,6 @@ +# remove flake8, black, and cov options from pytest.ini +--- a/pytest.ini ++++ b/pytest.ini +@@ -1,5 +1,5 @@ [pytest] norecursedirs=dist build .tox .eggs --addopts=--doctest-modules --flake8 +-addopts=--doctest-modules --flake8 --black --cov +addopts=--doctest-modules doctest_optionflags=ALLOW_UNICODE ELLIPSIS filterwarnings= - ignore:Possible nested set::pycodestyle:113 diff -Nru python-jaraco.functools-2.0/debian/pybuild.testfiles python-jaraco.functools-3.0.0/debian/pybuild.testfiles --- python-jaraco.functools-2.0/debian/pybuild.testfiles 1970-01-01 00:00:00.000000000 +0000 +++ python-jaraco.functools-3.0.0/debian/pybuild.testfiles 2020-02-29 13:37:00.000000000 +0000 @@ -0,0 +1 @@ +pytest.ini diff -Nru python-jaraco.functools-2.0/debian/rules python-jaraco.functools-3.0.0/debian/rules --- python-jaraco.functools-2.0/debian/rules 2019-11-03 09:41:19.000000000 +0000 +++ python-jaraco.functools-3.0.0/debian/rules 2020-04-29 11:26:12.000000000 +0000 @@ -5,9 +5,7 @@ export PYBUILD_NAME=jaraco.functools -export PYBUILD_BEFORE_TEST=cp -va {dir}/pytest.ini {dir}/test_functools.py {build_dir} -export PYBUILD_TEST_ARGS=jaraco/ -export PYBUILD_AFTER_TEST=rm -f {build_dir}/pytest.ini {build_dir}/test_functools.py - %: - dh $@ --with python3 --buildsys=pybuild --test-pytest + dh $@ --with python3 --buildsys=pybuild + +override_dh_auto_test: diff -Nru python-jaraco.functools-2.0/docs/conf.py python-jaraco.functools-3.0.0/docs/conf.py --- python-jaraco.functools-2.0/docs/conf.py 2019-01-01 14:17:38.000000000 +0000 +++ python-jaraco.functools-3.0.0/docs/conf.py 2019-12-20 04:45:46.000000000 +0000 @@ -1,25 +1,25 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -extensions = ["sphinx.ext.autodoc", "jaraco.packaging.sphinx", "rst.linker"] +extensions = ['sphinx.ext.autodoc', 'jaraco.packaging.sphinx', 'rst.linker'] master_doc = "index" link_files = { - "../CHANGES.rst": dict( - using=dict(GH="https://github.com"), + '../CHANGES.rst': dict( + using=dict(GH='https://github.com'), replace=[ dict( - pattern=r"(Issue #|\B#)(?P\d+)", - url="{package_url}/issues/{issue}", + pattern=r'(Issue #|\B#)(?P\d+)', + url='{package_url}/issues/{issue}', ), dict( - pattern=r"^(?m)((?Pv?\d+(\.\d+){1,2}))\n[-=]+\n", - with_scm="{text}\n{rev[timestamp]:%d %b %Y}\n", + pattern=r'^(?m)((?Pv?\d+(\.\d+){1,2}))\n[-=]+\n', + with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n', ), dict( - pattern=r"PEP[- ](?P\d+)", - url="https://www.python.org/dev/peps/pep-{pep_number:0>4}/", + pattern=r'PEP[- ](?P\d+)', + url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/', ), ], ) diff -Nru python-jaraco.functools-2.0/.flake8 python-jaraco.functools-3.0.0/.flake8 --- python-jaraco.functools-2.0/.flake8 2019-01-01 14:17:38.000000000 +0000 +++ python-jaraco.functools-3.0.0/.flake8 2019-12-20 04:45:46.000000000 +0000 @@ -1,8 +1,9 @@ [flake8] +max-line-length = 88 ignore = - # Allow tabs for indentation - W191 # W503 violates spec https://github.com/PyCQA/pycodestyle/issues/513 W503 # W504 has issues https://github.com/OCA/maintainer-quality-tools/issues/545 W504 + # Black creates whitespace before colon + E203 diff -Nru python-jaraco.functools-2.0/jaraco/functools.py python-jaraco.functools-3.0.0/jaraco/functools.py --- python-jaraco.functools-2.0/jaraco/functools.py 2019-01-01 14:17:38.000000000 +0000 +++ python-jaraco.functools-3.0.0/jaraco/functools.py 2019-12-20 04:45:46.000000000 +0000 @@ -1,467 +1,459 @@ -from __future__ import ( - absolute_import, unicode_literals, print_function, division, -) - import functools import time -import warnings import inspect import collections -from itertools import count - -__metaclass__ = type - - -try: - from functools import lru_cache -except ImportError: - try: - from backports.functools_lru_cache import lru_cache - except ImportError: - try: - from functools32 import lru_cache - except ImportError: - warnings.warn("No lru_cache available") - +import types +import itertools import more_itertools.recipes def compose(*funcs): - """ - Compose any number of unary functions into a single unary function. + """ + Compose any number of unary functions into a single unary function. - >>> import textwrap - >>> from six import text_type - >>> stripped = text_type.strip(textwrap.dedent(compose.__doc__)) - >>> compose(text_type.strip, textwrap.dedent)(compose.__doc__) == stripped - True - - Compose also allows the innermost function to take arbitrary arguments. - - >>> round_three = lambda x: round(x, ndigits=3) - >>> f = compose(round_three, int.__truediv__) - >>> [f(3*x, x+1) for x in range(1,10)] - [1.5, 2.0, 2.25, 2.4, 2.5, 2.571, 2.625, 2.667, 2.7] - """ - - def compose_two(f1, f2): - return lambda *args, **kwargs: f1(f2(*args, **kwargs)) - return functools.reduce(compose_two, funcs) + >>> import textwrap + >>> from six import text_type + >>> stripped = text_type.strip(textwrap.dedent(compose.__doc__)) + >>> compose(text_type.strip, textwrap.dedent)(compose.__doc__) == stripped + True + + Compose also allows the innermost function to take arbitrary arguments. + + >>> round_three = lambda x: round(x, ndigits=3) + >>> f = compose(round_three, int.__truediv__) + >>> [f(3*x, x+1) for x in range(1,10)] + [1.5, 2.0, 2.25, 2.4, 2.5, 2.571, 2.625, 2.667, 2.7] + """ + + def compose_two(f1, f2): + return lambda *args, **kwargs: f1(f2(*args, **kwargs)) + + return functools.reduce(compose_two, funcs) def method_caller(method_name, *args, **kwargs): - """ - Return a function that will call a named method on the - target object with optional positional and keyword - arguments. - - >>> lower = method_caller('lower') - >>> lower('MyString') - 'mystring' - """ - def call_method(target): - func = getattr(target, method_name) - return func(*args, **kwargs) - return call_method + """ + Return a function that will call a named method on the + target object with optional positional and keyword + arguments. + + >>> lower = method_caller('lower') + >>> lower('MyString') + 'mystring' + """ + + def call_method(target): + func = getattr(target, method_name) + return func(*args, **kwargs) + + return call_method def once(func): - """ - Decorate func so it's only ever called the first time. + """ + Decorate func so it's only ever called the first time. + + This decorator can ensure that an expensive or non-idempotent function + will not be expensive on subsequent calls and is idempotent. - This decorator can ensure that an expensive or non-idempotent function - will not be expensive on subsequent calls and is idempotent. + >>> add_three = once(lambda a: a+3) + >>> add_three(3) + 6 + >>> add_three(9) + 6 + >>> add_three('12') + 6 + + To reset the stored value, simply clear the property ``saved_result``. + + >>> del add_three.saved_result + >>> add_three(9) + 12 + >>> add_three(8) + 12 + + Or invoke 'reset()' on it. + + >>> add_three.reset() + >>> add_three(-3) + 0 + >>> add_three(0) + 0 + """ + + @functools.wraps(func) + def wrapper(*args, **kwargs): + if not hasattr(wrapper, 'saved_result'): + wrapper.saved_result = func(*args, **kwargs) + return wrapper.saved_result - >>> add_three = once(lambda a: a+3) - >>> add_three(3) - 6 - >>> add_three(9) - 6 - >>> add_three('12') - 6 - - To reset the stored value, simply clear the property ``saved_result``. - - >>> del add_three.saved_result - >>> add_three(9) - 12 - >>> add_three(8) - 12 - - Or invoke 'reset()' on it. - - >>> add_three.reset() - >>> add_three(-3) - 0 - >>> add_three(0) - 0 - """ - @functools.wraps(func) - def wrapper(*args, **kwargs): - if not hasattr(wrapper, 'saved_result'): - wrapper.saved_result = func(*args, **kwargs) - return wrapper.saved_result - wrapper.reset = lambda: vars(wrapper).__delitem__('saved_result') - return wrapper + wrapper.reset = lambda: vars(wrapper).__delitem__('saved_result') + return wrapper def method_cache(method, cache_wrapper=None): - """ - Wrap lru_cache to support storing the cache data in the object instances. + """ + Wrap lru_cache to support storing the cache data in the object instances. - Abstracts the common paradigm where the method explicitly saves an - underscore-prefixed protected property on first call and returns that - subsequently. - - >>> class MyClass: - ... calls = 0 - ... - ... @method_cache - ... def method(self, value): - ... self.calls += 1 - ... return value - - >>> a = MyClass() - >>> a.method(3) - 3 - >>> for x in range(75): - ... res = a.method(x) - >>> a.calls - 75 - - Note that the apparent behavior will be exactly like that of lru_cache - except that the cache is stored on each instance, so values in one - instance will not flush values from another, and when an instance is - deleted, so are the cached values for that instance. - - >>> b = MyClass() - >>> for x in range(35): - ... res = b.method(x) - >>> b.calls - 35 - >>> a.method(0) - 0 - >>> a.calls - 75 - - Note that if method had been decorated with ``functools.lru_cache()``, - a.calls would have been 76 (due to the cached value of 0 having been - flushed by the 'b' instance). - - Clear the cache with ``.cache_clear()`` - - >>> a.method.cache_clear() - - Another cache wrapper may be supplied: - - >>> cache = lru_cache(maxsize=2) - >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache) - >>> a = MyClass() - >>> a.method2() - 3 - - Caution - do not subsequently wrap the method with another decorator, such - as ``@property``, which changes the semantics of the function. - - See also - http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/ - for another implementation and additional justification. - """ - cache_wrapper = cache_wrapper or lru_cache() - - def wrapper(self, *args, **kwargs): - # it's the first call, replace the method with a cached, bound method - bound_method = functools.partial(method, self) - cached_method = cache_wrapper(bound_method) - setattr(self, method.__name__, cached_method) - return cached_method(*args, **kwargs) + Abstracts the common paradigm where the method explicitly saves an + underscore-prefixed protected property on first call and returns that + subsequently. + + >>> class MyClass: + ... calls = 0 + ... + ... @method_cache + ... def method(self, value): + ... self.calls += 1 + ... return value + + >>> a = MyClass() + >>> a.method(3) + 3 + >>> for x in range(75): + ... res = a.method(x) + >>> a.calls + 75 + + Note that the apparent behavior will be exactly like that of lru_cache + except that the cache is stored on each instance, so values in one + instance will not flush values from another, and when an instance is + deleted, so are the cached values for that instance. + + >>> b = MyClass() + >>> for x in range(35): + ... res = b.method(x) + >>> b.calls + 35 + >>> a.method(0) + 0 + >>> a.calls + 75 + + Note that if method had been decorated with ``functools.lru_cache()``, + a.calls would have been 76 (due to the cached value of 0 having been + flushed by the 'b' instance). + + Clear the cache with ``.cache_clear()`` + + >>> a.method.cache_clear() + + Another cache wrapper may be supplied: + + >>> cache = functools.lru_cache(maxsize=2) + >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache) + >>> a = MyClass() + >>> a.method2() + 3 + + Caution - do not subsequently wrap the method with another decorator, such + as ``@property``, which changes the semantics of the function. + + See also + http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/ + for another implementation and additional justification. + """ + cache_wrapper = cache_wrapper or functools.lru_cache() + + def wrapper(self, *args, **kwargs): + # it's the first call, replace the method with a cached, bound method + bound_method = types.MethodType(method, self) + cached_method = cache_wrapper(bound_method) + setattr(self, method.__name__, cached_method) + return cached_method(*args, **kwargs) - return _special_method_cache(method, cache_wrapper) or wrapper + return _special_method_cache(method, cache_wrapper) or wrapper def _special_method_cache(method, cache_wrapper): - """ - Because Python treats special methods differently, it's not - possible to use instance attributes to implement the cached - methods. - - Instead, install the wrapper method under a different name - and return a simple proxy to that wrapper. - - https://github.com/jaraco/jaraco.functools/issues/5 - """ - name = method.__name__ - special_names = '__getattr__', '__getitem__' - if name not in special_names: - return - - wrapper_name = '__cached' + name - - def proxy(self, *args, **kwargs): - if wrapper_name not in vars(self): - bound = functools.partial(method, self) - cache = cache_wrapper(bound) - setattr(self, wrapper_name, cache) - else: - cache = getattr(self, wrapper_name) - return cache(*args, **kwargs) + """ + Because Python treats special methods differently, it's not + possible to use instance attributes to implement the cached + methods. + + Instead, install the wrapper method under a different name + and return a simple proxy to that wrapper. + + https://github.com/jaraco/jaraco.functools/issues/5 + """ + name = method.__name__ + special_names = '__getattr__', '__getitem__' + if name not in special_names: + return + + wrapper_name = '__cached' + name + + def proxy(self, *args, **kwargs): + if wrapper_name not in vars(self): + bound = types.MethodType(method, self) + cache = cache_wrapper(bound) + setattr(self, wrapper_name, cache) + else: + cache = getattr(self, wrapper_name) + return cache(*args, **kwargs) - return proxy + return proxy def apply(transform): - """ - Decorate a function with a transform function that is - invoked on results returned from the decorated function. - - >>> @apply(reversed) - ... def get_numbers(start): - ... return range(start, start+3) - >>> list(get_numbers(4)) - [6, 5, 4] - """ - def wrap(func): - return compose(transform, func) - return wrap + """ + Decorate a function with a transform function that is + invoked on results returned from the decorated function. + + >>> @apply(reversed) + ... def get_numbers(start): + ... return range(start, start+3) + >>> list(get_numbers(4)) + [6, 5, 4] + """ + + def wrap(func): + return compose(transform, func) + + return wrap def result_invoke(action): - r""" - Decorate a function with an action function that is - invoked on the results returned from the decorated - function (for its side-effect), then return the original - result. - - >>> @result_invoke(print) - ... def add_two(a, b): - ... return a + b - >>> x = add_two(2, 3) - 5 - """ - def wrap(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - result = func(*args, **kwargs) - action(result) - return result - return wrapper - return wrap + r""" + Decorate a function with an action function that is + invoked on the results returned from the decorated + function (for its side-effect), then return the original + result. + + >>> @result_invoke(print) + ... def add_two(a, b): + ... return a + b + >>> x = add_two(2, 3) + 5 + """ + + def wrap(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + result = func(*args, **kwargs) + action(result) + return result + + return wrapper + + return wrap def call_aside(f, *args, **kwargs): - """ - Call a function for its side effect after initialization. + """ + Call a function for its side effect after initialization. - >>> @call_aside - ... def func(): print("called") - called - >>> func() - called - - Use functools.partial to pass parameters to the initial call - - >>> @functools.partial(call_aside, name='bingo') - ... def func(name): print("called with", name) - called with bingo - """ - f(*args, **kwargs) - return f + >>> @call_aside + ... def func(): print("called") + called + >>> func() + called + + Use functools.partial to pass parameters to the initial call + + >>> @functools.partial(call_aside, name='bingo') + ... def func(name): print("called with", name) + called with bingo + """ + f(*args, **kwargs) + return f class Throttler: - """ - Rate-limit a function (or other callable) - """ - def __init__(self, func, max_rate=float('Inf')): - if isinstance(func, Throttler): - func = func.func - self.func = func - self.max_rate = max_rate - self.reset() - - def reset(self): - self.last_called = 0 - - def __call__(self, *args, **kwargs): - self._wait() - return self.func(*args, **kwargs) - - def _wait(self): - "ensure at least 1/max_rate seconds from last call" - elapsed = time.time() - self.last_called - must_wait = 1 / self.max_rate - elapsed - time.sleep(max(0, must_wait)) - self.last_called = time.time() + """ + Rate-limit a function (or other callable) + """ + + def __init__(self, func, max_rate=float('Inf')): + if isinstance(func, Throttler): + func = func.func + self.func = func + self.max_rate = max_rate + self.reset() + + def reset(self): + self.last_called = 0 + + def __call__(self, *args, **kwargs): + self._wait() + return self.func(*args, **kwargs) + + def _wait(self): + "ensure at least 1/max_rate seconds from last call" + elapsed = time.time() - self.last_called + must_wait = 1 / self.max_rate - elapsed + time.sleep(max(0, must_wait)) + self.last_called = time.time() - def __get__(self, obj, type=None): - return first_invoke(self._wait, functools.partial(self.func, obj)) + def __get__(self, obj, type=None): + return first_invoke(self._wait, functools.partial(self.func, obj)) def first_invoke(func1, func2): - """ - Return a function that when invoked will invoke func1 without - any parameters (for its side-effect) and then invoke func2 - with whatever parameters were passed, returning its result. - """ - def wrapper(*args, **kwargs): - func1() - return func2(*args, **kwargs) - return wrapper + """ + Return a function that when invoked will invoke func1 without + any parameters (for its side-effect) and then invoke func2 + with whatever parameters were passed, returning its result. + """ + + def wrapper(*args, **kwargs): + func1() + return func2(*args, **kwargs) + + return wrapper def retry_call(func, cleanup=lambda: None, retries=0, trap=()): - """ - Given a callable func, trap the indicated exceptions - for up to 'retries' times, invoking cleanup on the - exception. On the final attempt, allow any exceptions - to propagate. - """ - attempts = count() if retries == float('inf') else range(retries) - for attempt in attempts: - try: - return func() - except trap: - cleanup() + """ + Given a callable func, trap the indicated exceptions + for up to 'retries' times, invoking cleanup on the + exception. On the final attempt, allow any exceptions + to propagate. + """ + attempts = itertools.count() if retries == float('inf') else range(retries) + for attempt in attempts: + try: + return func() + except trap: + cleanup() - return func() + return func() def retry(*r_args, **r_kwargs): - """ - Decorator wrapper for retry_call. Accepts arguments to retry_call - except func and then returns a decorator for the decorated function. - - Ex: - - >>> @retry(retries=3) - ... def my_func(a, b): - ... "this is my funk" - ... print(a, b) - >>> my_func.__doc__ - 'this is my funk' - """ - def decorate(func): - @functools.wraps(func) - def wrapper(*f_args, **f_kwargs): - bound = functools.partial(func, *f_args, **f_kwargs) - return retry_call(bound, *r_args, **r_kwargs) - return wrapper - return decorate + """ + Decorator wrapper for retry_call. Accepts arguments to retry_call + except func and then returns a decorator for the decorated function. + + Ex: + + >>> @retry(retries=3) + ... def my_func(a, b): + ... "this is my funk" + ... print(a, b) + >>> my_func.__doc__ + 'this is my funk' + """ + + def decorate(func): + @functools.wraps(func) + def wrapper(*f_args, **f_kwargs): + bound = functools.partial(func, *f_args, **f_kwargs) + return retry_call(bound, *r_args, **r_kwargs) + + return wrapper + + return decorate def print_yielded(func): - """ - Convert a generator into a function that prints all yielded elements + """ + Convert a generator into a function that prints all yielded elements - >>> @print_yielded - ... def x(): - ... yield 3; yield None - >>> x() - 3 - None - """ - print_all = functools.partial(map, print) - print_results = compose(more_itertools.recipes.consume, print_all, func) - return functools.wraps(func)(print_results) + >>> @print_yielded + ... def x(): + ... yield 3; yield None + >>> x() + 3 + None + """ + print_all = functools.partial(map, print) + print_results = compose(more_itertools.recipes.consume, print_all, func) + return functools.wraps(func)(print_results) def pass_none(func): - """ - Wrap func so it's not called if its first param is None + """ + Wrap func so it's not called if its first param is None - >>> print_text = pass_none(print) - >>> print_text('text') - text - >>> print_text(None) - """ - @functools.wraps(func) - def wrapper(param, *args, **kwargs): - if param is not None: - return func(param, *args, **kwargs) - return wrapper + >>> print_text = pass_none(print) + >>> print_text('text') + text + >>> print_text(None) + """ + + @functools.wraps(func) + def wrapper(param, *args, **kwargs): + if param is not None: + return func(param, *args, **kwargs) + + return wrapper def assign_params(func, namespace): - """ - Assign parameters from namespace where func solicits. + """ + Assign parameters from namespace where func solicits. - >>> def func(x, y=3): - ... print(x, y) - >>> assigned = assign_params(func, dict(x=2, z=4)) - >>> assigned() - 2 3 - - The usual errors are raised if a function doesn't receive - its required parameters: - - >>> assigned = assign_params(func, dict(y=3, z=4)) - >>> assigned() - Traceback (most recent call last): - TypeError: func() ...argument... - - It even works on methods: - - >>> class Handler: - ... def meth(self, arg): - ... print(arg) - >>> assign_params(Handler().meth, dict(arg='crystal', foo='clear'))() - crystal - """ - try: - sig = inspect.signature(func) - params = sig.parameters.keys() - except AttributeError: - spec = inspect.getargspec(func) - params = spec.args - call_ns = { - k: namespace[k] - for k in params - if k in namespace - } - return functools.partial(func, **call_ns) + >>> def func(x, y=3): + ... print(x, y) + >>> assigned = assign_params(func, dict(x=2, z=4)) + >>> assigned() + 2 3 + + The usual errors are raised if a function doesn't receive + its required parameters: + + >>> assigned = assign_params(func, dict(y=3, z=4)) + >>> assigned() + Traceback (most recent call last): + TypeError: func() ...argument... + + It even works on methods: + + >>> class Handler: + ... def meth(self, arg): + ... print(arg) + >>> assign_params(Handler().meth, dict(arg='crystal', foo='clear'))() + crystal + """ + sig = inspect.signature(func) + params = sig.parameters.keys() + call_ns = {k: namespace[k] for k in params if k in namespace} + return functools.partial(func, **call_ns) def save_method_args(method): - """ - Wrap a method such that when it is called, the args and kwargs are - saved on the method. - - >>> class MyClass: - ... @save_method_args - ... def method(self, a, b): - ... print(a, b) - >>> my_ob = MyClass() - >>> my_ob.method(1, 2) - 1 2 - >>> my_ob._saved_method.args - (1, 2) - >>> my_ob._saved_method.kwargs - {} - >>> my_ob.method(a=3, b='foo') - 3 foo - >>> my_ob._saved_method.args - () - >>> my_ob._saved_method.kwargs == dict(a=3, b='foo') - True - - The arguments are stored on the instance, allowing for - different instance to save different args. - - >>> your_ob = MyClass() - >>> your_ob.method({str('x'): 3}, b=[4]) - {'x': 3} [4] - >>> your_ob._saved_method.args - ({'x': 3},) - >>> my_ob._saved_method.args - () - """ - args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs') - - @functools.wraps(method) - def wrapper(self, *args, **kwargs): - attr_name = '_saved_' + method.__name__ - attr = args_and_kwargs(args, kwargs) - setattr(self, attr_name, attr) - return method(self, *args, **kwargs) - return wrapper + """ + Wrap a method such that when it is called, the args and kwargs are + saved on the method. + + >>> class MyClass: + ... @save_method_args + ... def method(self, a, b): + ... print(a, b) + >>> my_ob = MyClass() + >>> my_ob.method(1, 2) + 1 2 + >>> my_ob._saved_method.args + (1, 2) + >>> my_ob._saved_method.kwargs + {} + >>> my_ob.method(a=3, b='foo') + 3 foo + >>> my_ob._saved_method.args + () + >>> my_ob._saved_method.kwargs == dict(a=3, b='foo') + True + + The arguments are stored on the instance, allowing for + different instance to save different args. + + >>> your_ob = MyClass() + >>> your_ob.method({str('x'): 3}, b=[4]) + {'x': 3} [4] + >>> your_ob._saved_method.args + ({'x': 3},) + >>> my_ob._saved_method.args + () + """ + args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs') + + @functools.wraps(method) + def wrapper(self, *args, **kwargs): + attr_name = '_saved_' + method.__name__ + attr = args_and_kwargs(args, kwargs) + setattr(self, attr_name, attr) + return method(self, *args, **kwargs) + + return wrapper diff -Nru python-jaraco.functools-2.0/.pre-commit-config.yaml python-jaraco.functools-3.0.0/.pre-commit-config.yaml --- python-jaraco.functools-2.0/.pre-commit-config.yaml 1970-01-01 00:00:00.000000000 +0000 +++ python-jaraco.functools-3.0.0/.pre-commit-config.yaml 2019-12-20 04:45:46.000000000 +0000 @@ -0,0 +1,5 @@ +repos: +- repo: https://github.com/psf/black + rev: 19.3b0 + hooks: + - id: black diff -Nru python-jaraco.functools-2.0/pyproject.toml python-jaraco.functools-3.0.0/pyproject.toml --- python-jaraco.functools-2.0/pyproject.toml 2019-01-01 14:17:38.000000000 +0000 +++ python-jaraco.functools-3.0.0/pyproject.toml 2019-12-20 04:45:46.000000000 +0000 @@ -1,3 +1,6 @@ [build-system] requires = ["setuptools>=34.4", "wheel", "setuptools_scm>=1.15"] build-backend = "setuptools.build_meta" + +[tool.black] +skip-string-normalization = true diff -Nru python-jaraco.functools-2.0/pytest.ini python-jaraco.functools-3.0.0/pytest.ini --- python-jaraco.functools-2.0/pytest.ini 2019-01-01 14:17:38.000000000 +0000 +++ python-jaraco.functools-3.0.0/pytest.ini 2019-12-20 04:45:46.000000000 +0000 @@ -1,11 +1,5 @@ [pytest] norecursedirs=dist build .tox .eggs -addopts=--doctest-modules --flake8 +addopts=--doctest-modules --flake8 --black --cov doctest_optionflags=ALLOW_UNICODE ELLIPSIS filterwarnings= - ignore:Possible nested set::pycodestyle:113 - ignore:Using or importing the ABCs::flake8:410 - # workaround for https://sourceforge.net/p/docutils/bugs/348/ - ignore:'U' mode is deprecated::docutils.io - # workaround for https://gitlab.com/pycqa/flake8/issues/275 - ignore:You passed a bytestring as `filenames`.::flake8 diff -Nru python-jaraco.functools-2.0/README.rst python-jaraco.functools-3.0.0/README.rst --- python-jaraco.functools-2.0/README.rst 2019-01-01 14:17:38.000000000 +0000 +++ python-jaraco.functools-3.0.0/README.rst 2019-12-20 04:45:46.000000000 +0000 @@ -6,6 +6,11 @@ .. image:: https://img.shields.io/travis/jaraco/jaraco.functools/master.svg :target: https://travis-ci.org/jaraco/jaraco.functools + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + .. .. image:: https://img.shields.io/appveyor/ci/jaraco/jaraco-functools/master.svg .. :target: https://ci.appveyor.com/project/jaraco-functools/skeleton/branch/master diff -Nru python-jaraco.functools-2.0/setup.cfg python-jaraco.functools-3.0.0/setup.cfg --- python-jaraco.functools-2.0/setup.cfg 2019-01-01 14:17:38.000000000 +0000 +++ python-jaraco.functools-3.0.0/setup.cfg 2019-12-20 04:45:46.000000000 +0000 @@ -13,24 +13,24 @@ Development Status :: 5 - Production/Stable Intended Audience :: Developers License :: OSI Approved :: MIT License - Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 [options] packages = find: include_package_data = true -python_requires = >=2.7 +python_requires = >=3.6 install_requires = more_itertools - backports.functools_lru_cache >= 1.0.3; python_version == "2.7" setup_requires = setuptools_scm >= 1.15.0 [options.extras_require] testing = # upstream pytest >= 3.5, !=3.7.3 - pytest-checkdocs + pytest-checkdocs >= 1.2.3 pytest-flake8 + pytest-black-multipy + pytest-cov # local six diff -Nru python-jaraco.functools-2.0/skeleton.md python-jaraco.functools-3.0.0/skeleton.md --- python-jaraco.functools-2.0/skeleton.md 2019-01-01 14:17:38.000000000 +0000 +++ python-jaraco.functools-3.0.0/skeleton.md 2019-12-20 04:45:46.000000000 +0000 @@ -10,6 +10,8 @@ The primary advantage to using an SCM for maintaining these techniques is that those tools help facilitate the merge between the template and its adopting projects. +Another advantage to using an SCM-managed approach is that tools like GitHub recognize that a change in the skeleton is the _same change_ across all projects that merge with that skeleton. Without the ancestry, with a traditional copy/paste approach, a [commit like this](https://github.com/jaraco/skeleton/commit/12eed1326e1bc26ce256e7b3f8cd8d3a5beab2d5) would produce notifications in the upstream project issue for each and every application, but because it's centralized, GitHub provides just the one notification when the change is added to the skeleton. + # Usage ## new projects @@ -48,7 +50,8 @@ - setuptools declarative configuration using setup.cfg - tox for running tests - A README.rst as reStructuredText with some popular badges, but with readthedocs and appveyor badges commented out -- A CHANGES.rst file intended for publishing release notes about the project. +- A CHANGES.rst file intended for publishing release notes about the project +- Use of [black](https://black.readthedocs.io/en/stable/) for code formatting (disabled on unsupported Python 3.5 and earlier) ## Packaging Conventions @@ -82,7 +85,7 @@ Other environments (invoked with `tox -e {name}`) supplied include: - - a `build-docs` environment to build the documentation + - a `docs` environment to build the documentation - a `release` environment to publish the package to PyPI A pytest.ini is included to define common options around running tests. In particular: @@ -95,18 +98,12 @@ Relies a .flake8 file to correct some default behaviors: -- allow tabs for indentation (legacy for jaraco projects) -- disable mutually incompatible rules W503 and W504. +- disable mutually incompatible rules W503 and W504 +- support for black format ## Continuous Integration -The project is pre-configured to run tests in [Travis-CI](https://travis-ci.org) (.travis.yml). Any new project must be enabled either through their web site or with the `travis enable` command. In addition to running tests, an additional deploy stage is configured to automatically release tagged commits. The username and password for PyPI must be configured for each project using the `travis` command and only after the travis project is created. As releases are cut with [twine](https://pypi.org/project/twine), the two values are supplied through the `TWINE_USERNAME` and `TWINE_PASSWORD`. To configure the latter as a secret, run the following command: - -``` -echo "TWINE_PASSWORD={password}" | travis encrypt -``` - -Or disable it in the CI definition and configure it through the web UI. +The project is pre-configured to run tests in [Travis-CI](https://travis-ci.org) (.travis.yml). Any new project must be enabled either through their web site or with the `travis enable` command. Features include: - test against Python 2 and 3 @@ -115,12 +112,26 @@ Also provided is a minimal template for running under Appveyor (Windows). +### Continuous Deployments + +In addition to running tests, an additional deploy stage is configured to automatically release tagged commits to PyPI using [API tokens](https://pypi.org/help/#apitoken). The release process expects an authorized token to be configured with Travis as the TWINE_PASSWORD environment variable. After the Travis project is created, configure the token through the web UI or with a command like the following (bash syntax): + +``` +TWINE_PASSWORD={token} travis env copy TWINE_PASSWORD +``` + ## Building Documentation -Documentation is automatically built by [Read the Docs](https://readthedocs.org) when the project is registered with it, by way of the .readthedocs.yml file. To test the docs build manually, a tox env may be invoked as `tox -e build-docs`. Both techniques rely on the dependencies declared in `setup.cfg/options.extras_require.docs`. +Documentation is automatically built by [Read the Docs](https://readthedocs.org) when the project is registered with it, by way of the .readthedocs.yml file. To test the docs build manually, a tox env may be invoked as `tox -e docs`. Both techniques rely on the dependencies declared in `setup.cfg/options.extras_require.docs`. In addition to building the sphinx docs scaffolded in `docs/`, the docs build a `history.html` file that first injects release dates and hyperlinks into the CHANGES.rst before incorporating it as history in the docs. ## Cutting releases By default, tagged commits are released through the continuous integration deploy stage. + +Releases may also be cut manually by invoking the tox environment `release` with the PyPI token set as the TWINE_PASSWORD: + +``` +TWINE_PASSWORD={token} tox -e release +``` diff -Nru python-jaraco.functools-2.0/test_functools.py python-jaraco.functools-3.0.0/test_functools.py --- python-jaraco.functools-2.0/test_functools.py 2019-01-01 14:17:38.000000000 +0000 +++ python-jaraco.functools-3.0.0/test_functools.py 2019-12-20 04:45:46.000000000 +0000 @@ -10,229 +10,229 @@ from jaraco.functools import Throttler, method_cache, retry_call, retry -__metaclass__ = type - class TestThrottler: - def test_function_throttled(self): - """ - Ensure the throttler actually throttles calls. - """ - # set up a function to be called - counter = itertools.count() - # set up a version of `next` that is only called 30 times per second - limited_next = Throttler(next, 30) - # for one second, call next as fast as possible - deadline = time.time() + 1 - while time.time() < deadline: - limited_next(counter) - # ensure the counter was advanced about 30 times - assert 28 <= next(counter) <= 32 - - # ensure that another burst of calls after some idle period will also - # get throttled - time.sleep(1) - deadline = time.time() + 1 - counter = itertools.count() - while time.time() < deadline: - limited_next(counter) - assert 28 <= next(counter) <= 32 - - def test_reconstruct_unwraps(self): - """ - The throttler should be re-usable - if one wants to throttle a - function that's aready throttled, the original function should be - used. - """ - wrapped = Throttler(next, 30) - wrapped_again = Throttler(wrapped, 60) - assert wrapped_again.func is next - assert wrapped_again.max_rate == 60 - - def test_throttled_method(self): - class ThrottledMethodClass: - @Throttler - def echo(self, arg): - return arg + def test_function_throttled(self): + """ + Ensure the throttler actually throttles calls. + """ + # set up a function to be called + counter = itertools.count() + # set up a version of `next` that is only called 30 times per second + limited_next = Throttler(next, 30) + # for one second, call next as fast as possible + deadline = time.time() + 1 + while time.time() < deadline: + limited_next(counter) + # ensure the counter was advanced about 30 times + assert 28 <= next(counter) <= 32 + + # ensure that another burst of calls after some idle period will also + # get throttled + time.sleep(1) + deadline = time.time() + 1 + counter = itertools.count() + while time.time() < deadline: + limited_next(counter) + assert 28 <= next(counter) <= 32 + + def test_reconstruct_unwraps(self): + """ + The throttler should be re-usable - if one wants to throttle a + function that's aready throttled, the original function should be + used. + """ + wrapped = Throttler(next, 30) + wrapped_again = Throttler(wrapped, 60) + assert wrapped_again.func is next + assert wrapped_again.max_rate == 60 + + def test_throttled_method(self): + class ThrottledMethodClass: + @Throttler + def echo(self, arg): + return arg - tmc = ThrottledMethodClass() - assert tmc.echo('foo') == 'foo' + tmc = ThrottledMethodClass() + assert tmc.echo('foo') == 'foo' class TestMethodCache: - bad_vers = '(3, 5, 0) <= sys.version_info < (3, 5, 2)' + bad_vers = '(3, 5, 0) <= sys.version_info < (3, 5, 2)' - @pytest.mark.skipif(bad_vers, reason="https://bugs.python.org/issue25447") - def test_deepcopy(self): - """ - A deepcopy of an object with a method cache should still - succeed. - """ - class ClassUnderTest: - calls = 0 - - @method_cache - def method(self, value): - self.calls += 1 - return value - - ob = ClassUnderTest() - copy.deepcopy(ob) - ob.method(1) - copy.deepcopy(ob) - - def test_special_methods(self): - """ - Test method_cache with __getitem__ and __getattr__. - """ - class ClassUnderTest: - getitem_calls = 0 - getattr_calls = 0 - - @method_cache - def __getitem__(self, item): - self.getitem_calls += 1 - return item - - @method_cache - def __getattr__(self, name): - self.getattr_calls += 1 - return name - - ob = ClassUnderTest() - - # __getitem__ - ob[1] + ob[1] - assert ob.getitem_calls == 1 - - # __getattr__ - ob.one + ob.one - assert ob.getattr_calls == 1 - - @pytest.mark.xfail(reason="can't replace property with cache; #6") - def test_property(self): - """ - Can a method_cache decorated method also be a property? - """ - class ClassUnderTest: - @property - @method_cache - def mything(self): - return random.random() - - ob = ClassUnderTest() - - assert ob.mything == ob.mything - - @pytest.mark.xfail(reason="can't replace property with cache; #6") - def test_non_data_property(self): - """ - A non-data property also does not work because the property - gets replaced with a method. - """ - class ClassUnderTest: - @properties.NonDataProperty - @method_cache - def mything(self): - return random.random() + @pytest.mark.skipif(bad_vers, reason="https://bugs.python.org/issue25447") + def test_deepcopy(self): + """ + A deepcopy of an object with a method cache should still + succeed. + """ + + class ClassUnderTest: + calls = 0 + + @method_cache + def method(self, value): + self.calls += 1 + return value + + ob = ClassUnderTest() + copy.deepcopy(ob) + ob.method(1) + copy.deepcopy(ob) + + def test_special_methods(self): + """ + Test method_cache with __getitem__ and __getattr__. + """ + + class ClassUnderTest: + getitem_calls = 0 + getattr_calls = 0 + + @method_cache + def __getitem__(self, item): + self.getitem_calls += 1 + return item + + @method_cache + def __getattr__(self, name): + self.getattr_calls += 1 + return name + + ob = ClassUnderTest() + + # __getitem__ + ob[1] + ob[1] + assert ob.getitem_calls == 1 + + # __getattr__ + ob.one + ob.one + assert ob.getattr_calls == 1 + + @pytest.mark.xfail(reason="can't replace property with cache; #6") + def test_property(self): + """ + Can a method_cache decorated method also be a property? + """ + + class ClassUnderTest: + @property + @method_cache + def mything(self): # pragma: nocover + return random.random() + + ob = ClassUnderTest() + + assert ob.mything == ob.mything + + @pytest.mark.xfail(reason="can't replace property with cache; #6") + def test_non_data_property(self): + """ + A non-data property also does not work because the property + gets replaced with a method. + """ + + class ClassUnderTest: + @properties.NonDataProperty + @method_cache + def mything(self): + return random.random() - ob = ClassUnderTest() + ob = ClassUnderTest() - assert ob.mything == ob.mything + assert ob.mything == ob.mything class TestRetry: - def attempt(self, arg=None): - if next(self.fails_left): - raise ValueError("Failed!") - if arg: - arg.touch() - return "Success" - - def set_to_fail(self, times): - self.fails_left = itertools.count(times, -1) - - def test_set_to_fail(self): - """ - Test this test's internal failure mechanism. - """ - self.set_to_fail(times=2) - with pytest.raises(ValueError): - self.attempt() - with pytest.raises(ValueError): - self.attempt() - assert self.attempt() == 'Success' - - def test_retry_call_succeeds(self): - self.set_to_fail(times=2) - res = retry_call(self.attempt, retries=2, trap=ValueError) - assert res == "Success" - - def test_retry_call_fails(self): - """ - Failing more than the number of retries should - raise the underlying error. - """ - self.set_to_fail(times=3) - with pytest.raises(ValueError) as res: - retry_call(self.attempt, retries=2, trap=ValueError) - assert str(res.value) == 'Failed!' - - def test_retry_multiple_exceptions(self): - self.set_to_fail(times=2) - errors = ValueError, NameError - res = retry_call(self.attempt, retries=2, trap=errors) - assert res == "Success" - - def test_retry_exception_superclass(self): - self.set_to_fail(times=2) - res = retry_call(self.attempt, retries=2, trap=Exception) - assert res == "Success" - - def test_default_traps_nothing(self): - self.set_to_fail(times=1) - with pytest.raises(ValueError): - retry_call(self.attempt, retries=1) - - def test_default_does_not_retry(self): - self.set_to_fail(times=1) - with pytest.raises(ValueError): - retry_call(self.attempt, trap=Exception) - - def test_cleanup_called_on_exception(self): - calls = random.randint(1, 10) - cleanup = mock.Mock() - self.set_to_fail(times=calls) - retry_call(self.attempt, retries=calls, cleanup=cleanup, trap=Exception) - assert cleanup.call_count == calls - assert cleanup.called_with() - - def test_infinite_retries(self): - self.set_to_fail(times=999) - cleanup = mock.Mock() - retry_call( - self.attempt, retries=float('inf'), cleanup=cleanup, - trap=Exception) - assert cleanup.call_count == 999 - - def test_with_arg(self): - self.set_to_fail(times=0) - arg = mock.Mock() - bound = functools.partial(self.attempt, arg) - res = retry_call(bound) - assert res == 'Success' - assert arg.touch.called - - def test_decorator(self): - self.set_to_fail(times=1) - attempt = retry(retries=1, trap=Exception)(self.attempt) - res = attempt() - assert res == "Success" - - def test_decorator_with_arg(self): - self.set_to_fail(times=0) - attempt = retry()(self.attempt) - arg = mock.Mock() - res = attempt(arg) - assert res == 'Success' - assert arg.touch.called + def attempt(self, arg=None): + if next(self.fails_left): + raise ValueError("Failed!") + if arg: + arg.touch() + return "Success" + + def set_to_fail(self, times): + self.fails_left = itertools.count(times, -1) + + def test_set_to_fail(self): + """ + Test this test's internal failure mechanism. + """ + self.set_to_fail(times=2) + with pytest.raises(ValueError): + self.attempt() + with pytest.raises(ValueError): + self.attempt() + assert self.attempt() == 'Success' + + def test_retry_call_succeeds(self): + self.set_to_fail(times=2) + res = retry_call(self.attempt, retries=2, trap=ValueError) + assert res == "Success" + + def test_retry_call_fails(self): + """ + Failing more than the number of retries should + raise the underlying error. + """ + self.set_to_fail(times=3) + with pytest.raises(ValueError) as res: + retry_call(self.attempt, retries=2, trap=ValueError) + assert str(res.value) == 'Failed!' + + def test_retry_multiple_exceptions(self): + self.set_to_fail(times=2) + errors = ValueError, NameError + res = retry_call(self.attempt, retries=2, trap=errors) + assert res == "Success" + + def test_retry_exception_superclass(self): + self.set_to_fail(times=2) + res = retry_call(self.attempt, retries=2, trap=Exception) + assert res == "Success" + + def test_default_traps_nothing(self): + self.set_to_fail(times=1) + with pytest.raises(ValueError): + retry_call(self.attempt, retries=1) + + def test_default_does_not_retry(self): + self.set_to_fail(times=1) + with pytest.raises(ValueError): + retry_call(self.attempt, trap=Exception) + + def test_cleanup_called_on_exception(self): + calls = random.randint(1, 10) + cleanup = mock.Mock() + self.set_to_fail(times=calls) + retry_call(self.attempt, retries=calls, cleanup=cleanup, trap=Exception) + assert cleanup.call_count == calls + assert cleanup.called_with() + + def test_infinite_retries(self): + self.set_to_fail(times=999) + cleanup = mock.Mock() + retry_call(self.attempt, retries=float('inf'), cleanup=cleanup, trap=Exception) + assert cleanup.call_count == 999 + + def test_with_arg(self): + self.set_to_fail(times=0) + arg = mock.Mock() + bound = functools.partial(self.attempt, arg) + res = retry_call(bound) + assert res == 'Success' + assert arg.touch.called + + def test_decorator(self): + self.set_to_fail(times=1) + attempt = retry(retries=1, trap=Exception)(self.attempt) + res = attempt() + assert res == "Success" + + def test_decorator_with_arg(self): + self.set_to_fail(times=0) + attempt = retry()(self.attempt) + arg = mock.Mock() + res = attempt(arg) + assert res == 'Success' + assert arg.touch.called diff -Nru python-jaraco.functools-2.0/tox.ini python-jaraco.functools-3.0.0/tox.ini --- python-jaraco.functools-2.0/tox.ini 2019-01-01 14:17:38.000000000 +0000 +++ python-jaraco.functools-3.0.0/tox.ini 2019-12-20 04:45:46.000000000 +0000 @@ -1,30 +1,41 @@ [tox] envlist = python -minversion = 2.4 +minversion = 3.2 +# https://github.com/jaraco/skeleton/issues/6 +tox_pip_extensions_ext_venv_update = true +# Ensure that a late version of pip is used even on tox-venv. +requires = + tox-pip-version>=0.0.6 + tox-venv + [testenv] deps = setuptools>=31.0.1 +pip_version = pip commands = pytest {posargs} usedevelop = True extras = testing -[testenv:build-docs] +[testenv:docs] extras = - docs - testing + docs + testing changedir = docs commands = - python -m sphinx . {toxinidir}/build/html + python -m sphinx . {toxinidir}/build/html [testenv:release] skip_install = True deps = pep517>=0.5 - # workaround for https://github.com/pypa/twine/issues/423 - git+https://github.com/pypa/twine - path.py + twine[keyring]>=1.13 + path +passenv = + TWINE_PASSWORD +setenv = + TWINE_USERNAME = {env:TWINE_USERNAME:__token__} commands = python -c "import path; path.Path('dist').rmtree_p()" python -m pep517.build . diff -Nru python-jaraco.functools-2.0/.travis.yml python-jaraco.functools-3.0.0/.travis.yml --- python-jaraco.functools-2.0/.travis.yml 2019-01-01 14:17:38.000000000 +0000 +++ python-jaraco.functools-3.0.0/.travis.yml 2019-12-20 04:45:46.000000000 +0000 @@ -1,11 +1,9 @@ dist: xenial -sudo: false language: python python: -- 2.7 - 3.6 -- &latest_py3 3.7 +- &latest_py3 3.8 jobs: fast_finish: true @@ -14,11 +12,6 @@ if: tag IS present python: *latest_py3 before_script: skip - env: - - TWINE_USERNAME=jaraco - # TWINE_PASSWORD - - secure: MploUiVMnwq1vqCrSGRmdSlx21PnlXCg9w4bjhD6tYyN4NvHowbpT9E4or2aBW622xmIi7puiu3fuxgTrp7nh+7CVl0hR1Aa2+Y5ggA50uAHUoO5VSYN21w9V4nNe1u26IjnktZ9jK5Fz+aubrhA9kQX6M1Msowdo/IDCEi+gMQQdGR5S7IEBb0ZWpuW28AKT7j/iivs1gffrmmuiIJ+7o+khWJ/LZ9I0qAJCJDksqql1UP3ScZbOPihpjU0Lmo0aWxUSIiF6Qn7S4+3PAOXLxw29wf4yHECu+n1TzOWy3l/Hl5RpTLGPXeHfgWwDNOsHmWJhhc8rj2c+6fDgCWUMqxDWgJtOTKeRIRoxy6Dz8xosssYu/LhDPCklkNoswRRHvWLCCbwnJplNdmgWU+jZ9DQZ8Mvfy1dy6M92uxwYKfCYBTyGqR6tkAZD5zWZkm3gVXbJw2vMc+kKngPgNKY89w1epK3o53LybTaFvT+3FlUtgoMJjvUFbhGIqtpKcPKWToqUSrnaMmi3aJB9CcIeSE71oMn/4S5TTaI9JAwidVYZQw+JF3UeZ3aDJ2fAvj7Xpklk41sm5URJu9QFMbE10AxX4+deoRXC/mxlVfoTgRyWgxgjtjM6i1P++57ETx75K1FyDgI1qTXMvVCHbrLggjlvulTU5zB/yGs/pwBmQY= - - TOX_TESTENV_PASSENV="TWINE_USERNAME TWINE_PASSWORD" script: tox -e release cache: pip