diff -Nru fabric-2.5.0/debian/changelog fabric-2.6.0/debian/changelog --- fabric-2.5.0/debian/changelog 2021-12-28 13:37:52.000000000 +0000 +++ fabric-2.6.0/debian/changelog 2022-02-19 13:51:01.000000000 +0000 @@ -1,3 +1,23 @@ +fabric (2.6.0-1) unstable; urgency=medium + + * Team upload. + * Set Rules-Requires-Root: no + * Use https for Homepage URL + * Switch to debhelper-compat 13 + * Switch to dh-sequence-python3 + * Use d/clean instead of override + * Update d/watch version + * Override very-long-line-length-in-source-file warning + * Set Section: python for python3 module package + * Add d/upstream/metadata file + * De-duplicate short description in d/control + * Bump Standards-Version to 4.6.0 + * Update d/copyright + * Move maintenance to Python Team (Closes: #984559) + * New upstream version 2.6.0 + + -- Luca Boccassi Sat, 19 Feb 2022 13:51:01 +0000 + fabric (2.5.0-0.4) unstable; urgency=medium * Non-maintainer upload. diff -Nru fabric-2.5.0/debian/clean fabric-2.6.0/debian/clean --- fabric-2.5.0/debian/clean 1970-01-01 00:00:00.000000000 +0000 +++ fabric-2.6.0/debian/clean 2022-02-19 13:51:01.000000000 +0000 @@ -0,0 +1,2 @@ +sites/docs/build/ +fabric.egg-info/ diff -Nru fabric-2.5.0/debian/compat fabric-2.6.0/debian/compat --- fabric-2.5.0/debian/compat 2019-11-28 18:29:11.000000000 +0000 +++ fabric-2.6.0/debian/compat 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -10 diff -Nru fabric-2.5.0/debian/control fabric-2.6.0/debian/control --- fabric-2.5.0/debian/control 2021-12-28 13:37:23.000000000 +0000 +++ fabric-2.6.0/debian/control 2022-02-19 13:51:01.000000000 +0000 @@ -1,9 +1,11 @@ Source: fabric Section: net Priority: optional -Maintainer: Andrew Starr-Bochicchio -Build-Depends: debhelper (>= 10), +Maintainer: Debian Python Team +Uploaders: Andrew Starr-Bochicchio +Build-Depends: debhelper-compat (= 13), dh-python, + dh-sequence-python3, python3, python3-alabaster, python3-decorator , @@ -12,10 +14,11 @@ python3-paramiko, python3-setuptools, python3-sphinx, -Standards-Version: 4.1.2 -Vcs-Git: https://salsa.debian.org/debian/fabric.git -Vcs-Browser: https://salsa.debian.org/debian/fabric -Homepage: http://fabfile.org/ +Standards-Version: 4.6.0 +Rules-Requires-Root: no +Vcs-Git: https://salsa.debian.org/python-team/packages/fabric.git +Vcs-Browser: https://salsa.debian.org/python-team/packages/fabric +Homepage: https://fabfile.org/ Package: fabric Architecture: all @@ -37,10 +40,11 @@ Package: python3-fabric Architecture: all +Section: python Depends: ${misc:Depends}, ${python3:Depends}, python3-decorator, -Description: Simple Pythonic remote deployment tool +Description: Simple Pythonic remote deployment tool - standalone module Fabric is designed to upload files and run shell commands on a number of servers in parallel or serially. These commands are grouped in tasks (which are regular Python functions) and specified in a 'fabfile.' diff -Nru fabric-2.5.0/debian/copyright fabric-2.6.0/debian/copyright --- fabric-2.5.0/debian/copyright 2019-11-28 18:29:11.000000000 +0000 +++ fabric-2.6.0/debian/copyright 2022-02-19 13:51:01.000000000 +0000 @@ -12,6 +12,7 @@ Copyright: 2008 Chris Lamb 2014 Ana Beatriz Guerrero Lopez 2015 Stein Magnus Jodal + 2019-2022 Luca Boccassi License: GPL-2 This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by diff -Nru fabric-2.5.0/debian/rules fabric-2.6.0/debian/rules --- fabric-2.5.0/debian/rules 2021-12-28 13:35:52.000000000 +0000 +++ fabric-2.6.0/debian/rules 2022-02-19 13:51:01.000000000 +0000 @@ -8,13 +8,8 @@ export PYBUILD_INSTALL_ARGS=--install-scripts=../fabric/usr/bin %: - dh $@ --with python3,sphinxdoc --buildsystem=pybuild + dh $@ --with sphinxdoc --buildsystem=pybuild override_dh_auto_install: dh_auto_install PYTHONPATH=. http_proxy='127.0.0.1:9' sphinx-build -N sites/docs/ sites/docs/build/html/ - -override_dh_auto_clean: - dh_auto_clean - rm -rf sites/docs/build/ - rm -rf fabric.egg-info/ diff -Nru fabric-2.5.0/debian/salsa-ci.yml fabric-2.6.0/debian/salsa-ci.yml --- fabric-2.5.0/debian/salsa-ci.yml 1970-01-01 00:00:00.000000000 +0000 +++ fabric-2.6.0/debian/salsa-ci.yml 2022-02-19 13:51:01.000000000 +0000 @@ -0,0 +1,4 @@ +--- +include: + - https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/salsa-ci.yml + - https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/pipeline-jobs.yml diff -Nru fabric-2.5.0/debian/source/lintian-overrides fabric-2.6.0/debian/source/lintian-overrides --- fabric-2.5.0/debian/source/lintian-overrides 1970-01-01 00:00:00.000000000 +0000 +++ fabric-2.6.0/debian/source/lintian-overrides 2022-02-19 13:51:01.000000000 +0000 @@ -0,0 +1 @@ +fabric source: very-long-line-length-in-source-file diff -Nru fabric-2.5.0/debian/upstream/metadata fabric-2.6.0/debian/upstream/metadata --- fabric-2.5.0/debian/upstream/metadata 1970-01-01 00:00:00.000000000 +0000 +++ fabric-2.6.0/debian/upstream/metadata 2022-02-19 13:51:01.000000000 +0000 @@ -0,0 +1,5 @@ +--- +Bug-Database: https://github.com/fabric/fabric/issues +Bug-Submit: https://github.com/fabric/fabric/issues/new +Repository: https://github.com/fabric/fabric.git +Repository-Browse: https://github.com/fabric/fabric diff -Nru fabric-2.5.0/debian/watch fabric-2.6.0/debian/watch --- fabric-2.5.0/debian/watch 2019-11-28 18:29:11.000000000 +0000 +++ fabric-2.6.0/debian/watch 2022-02-19 13:51:01.000000000 +0000 @@ -1,3 +1,3 @@ -version=3 +version=4 opts=filenamemangle=s/.+\/v?(\d\S*)\.tar\.gz/fabric-$1\.tar\.gz/ \ https://github.com/fabric/fabric/tags .*/v?(\d\S*)\.tar\.gz diff -Nru fabric-2.5.0/dev-requirements.txt fabric-2.6.0/dev-requirements.txt --- fabric-2.5.0/dev-requirements.txt 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/dev-requirements.txt 2021-01-19 01:09:46.000000000 +0000 @@ -14,8 +14,8 @@ # Linting! flake8==3.6.0 # Coverage! -coverage==3.7.1 -codecov==1.6.3 +coverage==5.3.1 +codecov==2.1.11 # Documentation tools sphinx>=1.4,<1.7 alabaster==0.7.12 diff -Nru fabric-2.5.0/fabric/connection.py fabric-2.6.0/fabric/connection.py --- fabric-2.5.0/fabric/connection.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/fabric/connection.py 2021-01-19 01:09:46.000000000 +0000 @@ -300,9 +300,11 @@ Default: ``config.timeouts.connect``. - .. _connect_kwargs-arg: :param dict connect_kwargs: + + .. _connect_kwargs-arg: + Keyword arguments handed verbatim to `SSHClient.connect ` (when `.open` is called). diff -Nru fabric-2.5.0/fabric/group.py fabric-2.6.0/fabric/group.py --- fabric-2.5.0/fabric/group.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/fabric/group.py 2021-01-19 01:09:46.000000000 +0000 @@ -18,9 +18,9 @@ concrete subclasses (such as `.SerialGroup` or `.ThreadingGroup`) or you'll get ``NotImplementedError`` on most of the methods. - Most methods in this class mirror those of `.Connection`, taking the same - arguments; however their return values and exception-raising behavior - differs: + Most methods in this class wrap those of `.Connection` and will accept the + same arguments; however their return values and exception-raising behavior + differ: - Return values are dict-like objects (`.GroupResult`) mapping `.Connection` objects to the return value for the respective connections: @@ -99,6 +99,11 @@ group.extend(connections) return group + def _do(self, method, *args, **kwargs): + # TODO: rename this something public & commit to an API for user + # subclasses + raise NotImplementedError + def run(self, *args, **kwargs): """ Executes `.Connection.run` on all member `Connections <.Connection>`. @@ -107,25 +112,22 @@ .. versionadded:: 2.0 """ - # TODO: probably best to suck it up & match actual run() sig? # TODO: how to change method of execution across contents? subclass, # kwargs, additional methods, inject an executor? Doing subclass for # now, but not 100% sure it's the best route. # TODO: also need way to deal with duplicate connections (see THOUGHTS) - # TODO: and errors - probably FailureSet? How to handle other, - # regular, non Failure, exceptions though? Still need an aggregate - # exception type either way, whether it is FailureSet or what... - # TODO: OTOH, users may well want to be able to operate on the hosts - # that did not fail (esp if failure % is low) so we really _do_ want - # something like a result object mixing success and failure, or maybe a - # golang style two-tuple of successes and failures? - # TODO: or keep going w/ a "return or except", but the object is - # largely similar (if not identical) in both situations, with the - # exception just being the signal that Shit Broke? - raise NotImplementedError + return self._do("run", *args, **kwargs) - # TODO: how to handle sudo? Probably just an inner worker method that takes - # the method name to actually call (run, sudo, etc)? + def sudo(self, *args, **kwargs): + """ + Executes `.Connection.sudo` on all member `Connections <.Connection>`. + + :returns: a `.GroupResult`. + + .. versionadded:: 2.6 + """ + # TODO: see run() TODOs + return self._do("sudo", *args, **kwargs) # TODO: this all needs to mesh well with similar strategies applied to # entire tasks - so that may still end up factored out into Executors or @@ -133,19 +135,59 @@ # TODO: local? Invoke wants ability to do that on its own though, which # would be distinct from Group. (May want to switch Group to use that, - # though, whatever it ends up being?) + # though, whatever it ends up being? Eg many cases where you do want to do + # some local thing either N times identically, or parameterized by remote + # cxn values) - def get(self, *args, **kwargs): + def put(self, *args, **kwargs): """ - Executes `.Connection.get` on all member `Connections <.Connection>`. + Executes `.Connection.put` on all member `Connections <.Connection>`. - :returns: a `.GroupResult`. + This is a straightforward application: aside from whatever the concrete + group subclass does for concurrency or lack thereof, the effective + result is like running a loop over the connections and calling their + ``put`` method. - .. versionadded:: 2.0 + :returns: + a `.GroupResult` whose values are `.transfer.Result` instances. + + .. versionadded:: 2.6 """ - # TODO: probably best to suck it up & match actual get() sig? - # TODO: actually implement on subclasses - raise NotImplementedError + return self._do("put", *args, **kwargs) + + def get(self, *args, **kwargs): + """ + Executes `.Connection.get` on all member `Connections <.Connection>`. + + .. note:: + This method changes some behaviors over e.g. directly calling + `.Connection.get` on a ``for`` loop of connections; the biggest is + that the implied default value for the ``local`` parameter is + ``"{host}/"``, which triggers use of local path parameterization + based on each connection's target hostname. + + Thus, unless you override ``local`` yourself, a copy of the + downloaded file will be stored in (relative) directories named + after each host in the group. + + .. warning:: + Using file-like objects as the ``local`` argument is not currently + supported, as it would be equivalent to supplying that same object + to a series of individual ``get()`` calls. + + :returns: + a `.GroupResult` whose values are `.transfer.Result` instances. + + .. versionadded:: 2.6 + """ + # TODO: consider a backwards incompat change after we drop Py2 that + # just makes a lot of these kwarg-only methods? then below could become + # kwargs.setdefault() if desired. + # TODO: do we care enough to handle explicitly given, yet falsey, + # values? it's a lot more complexity for a corner case. + if len(args) < 2 and "local" not in kwargs: + kwargs["local"] = "{host}/" + return self._do("get", *args, **kwargs) def close(self): """ @@ -170,12 +212,12 @@ .. versionadded:: 2.0 """ - def run(self, *args, **kwargs): + def _do(self, method, *args, **kwargs): results = GroupResult() excepted = False for cxn in self: try: - results[cxn] = cxn.run(*args, **kwargs) + results[cxn] = getattr(cxn, method)(*args, **kwargs) except Exception as e: results[cxn] = e excepted = True @@ -184,8 +226,8 @@ return results -def thread_worker(cxn, queue, args, kwargs): - result = cxn.run(*args, **kwargs) +def thread_worker(cxn, queue, method, args, kwargs): + result = getattr(cxn, method)(*args, **kwargs) # TODO: namedtuple or attrs object? queue.put((cxn, result)) @@ -197,22 +239,26 @@ .. versionadded:: 2.0 """ - def run(self, *args, **kwargs): + def _do(self, method, *args, **kwargs): results = GroupResult() queue = Queue() threads = [] for cxn in self: - my_kwargs = dict(cxn=cxn, queue=queue, args=args, kwargs=kwargs) thread = ExceptionHandlingThread( - target=thread_worker, kwargs=my_kwargs + target=thread_worker, + kwargs=dict( + cxn=cxn, + queue=queue, + method=method, + args=args, + kwargs=kwargs, + ), ) threads.append(thread) for thread in threads: thread.start() for thread in threads: # TODO: configurable join timeout - # TODO: (in sudo's version) configurability around interactive - # prompting resulting in an exception instead, as in v1 thread.join() # Get non-exception results from queue while not queue.empty(): diff -Nru fabric-2.5.0/fabric/testing/base.py fabric-2.6.0/fabric/testing/base.py --- fabric-2.5.0/fabric/testing/base.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/fabric/testing/base.py 2021-01-19 01:09:46.000000000 +0000 @@ -376,13 +376,17 @@ # Set up mocks self.os_patcher = patch("fabric.transfer.os") self.client_patcher = patch("fabric.connection.SSHClient") + self.path_patcher = patch("fabric.transfer.Path") mock_os = self.os_patcher.start() Client = self.client_patcher.start() + self.path_patcher.start() sftp = Client.return_value.open_sftp.return_value # Handle common filepath massage actions; tests will assume these. def fake_abspath(path): - return "/local/{}".format(path) + # Run normpath to avoid tests not seeing abspath wrinkles (like + # trailing slash chomping) + return "/local/{}".format(os.path.normpath(path)) mock_os.path.abspath.side_effect = fake_abspath sftp.getcwd.return_value = "/remote" @@ -392,11 +396,14 @@ sftp.stat.return_value.st_mode = fake_mode mock_os.stat.return_value.st_mode = fake_mode # Not super clear to me why the 'wraps' functionality in mock isn't - # working for this :( - mock_os.path.basename.side_effect = os.path.basename + # working for this :( reinstate a bunch of os(.path) so it still works + mock_os.sep = os.sep + for name in ("basename", "split", "join", "normpath"): + getattr(mock_os.path, name).side_effect = getattr(os.path, name) # Return the sftp and OS mocks for use by decorator use case. return sftp, mock_os def stop(self): self.os_patcher.stop() self.client_patcher.stop() + self.path_patcher.stop() diff -Nru fabric-2.5.0/fabric/testing/fixtures.py fabric-2.6.0/fabric/testing/fixtures.py --- fabric-2.5.0/fabric/testing/fixtures.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/fabric/testing/fixtures.py 2021-01-19 01:09:46.000000000 +0000 @@ -104,6 +104,7 @@ """ mock = MockSFTP(autostart=False) client, mock_os = mock.start() + # Regular ol transfer to save some time transfer = Transfer(Connection("host")) yield transfer, client, mock_os # TODO: old mock_sftp() lacked any 'stop'...why? feels bad man diff -Nru fabric-2.5.0/fabric/transfer.py fabric-2.6.0/fabric/transfer.py --- fabric-2.5.0/fabric/transfer.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/fabric/transfer.py 2021-01-19 01:09:46.000000000 +0000 @@ -6,6 +6,11 @@ import posixpath import stat +try: + from pathlib import Path +except ImportError: + from pathlib2 import Path + from .util import debug # TODO: actual logging! LOL # TODO: figure out best way to direct folks seeking rsync, to patchwork's rsync @@ -40,7 +45,7 @@ def get(self, remote, local=None, preserve_mode=True): """ - Download a file from the current connection to the local filesystem. + Copy a file from wrapped connection's host to the local filesystem. :param str remote: Remote file to download. @@ -60,7 +65,9 @@ **If None or another 'falsey'/empty value is given** (the default), the remote file is downloaded to the current working directory (as - seen by `os.getcwd`) using its remote filename. + seen by `os.getcwd`) using its remote filename. (This is equivalent + to giving ``"{basename}"``; see the below subsection on + interpolation.) **If a string is given**, it should be a path to a local directory or file and is subject to similar behavior as that seen by common @@ -71,11 +78,23 @@ '/tmp/')`` would result in creation or overwriting of ``/tmp/file.txt``). + This path will be **interpolated** with some useful parameters, + using `str.format`: + + - The `.Connection` object's ``host``, ``user`` and ``port`` + attributes. + - The ``basename`` and ``dirname`` of the ``remote`` path, as + derived by `os.path` (specifically, its ``posixpath`` flavor, so + that the resulting values are useful on remote POSIX-compatible + SFTP servers even if the local client is Windows). + - Thus, for example, ``"/some/path/{user}@{host}/{basename}"`` will + yield different local paths depending on the properties of both + the connection and the remote path. + .. note:: - When dealing with nonexistent file paths, normal Python file - handling concerns come into play - for example, a ``local`` - path containing non-leaf directories which do not exist, will - typically result in an `OSError`. + If nonexistent directories are present in this path (including + the final path component, if it ends in `os.sep`) they will be + created automatically using `os.makedirs`. **If a file-like object is given**, the contents of the remote file are simply written into it. @@ -87,12 +106,14 @@ :returns: A `.Result` object. .. versionadded:: 2.0 + .. versionchanged:: 2.6 + Added ``local`` path interpolation of connection & remote file + attributes. + .. versionchanged:: 2.6 + Create missing ``local`` directories automatically. """ # TODO: how does this API change if we want to implement # remote-to-remote file transfer? (Is that even realistic?) - # TODO: handle v1's string interpolation bits, especially the default - # one, or at least think about how that would work re: split between - # single and multiple server targets. # TODO: callback support # TODO: how best to allow changing the behavior/semantics of # remote/local (e.g. users might want 'safer' behavior that complains @@ -107,15 +128,37 @@ self.sftp.getcwd() or self.sftp.normalize("."), remote ) - # Massage local path: - # - handle file-ness - # - if path, fill with remote name if empty, & make absolute + # Massage local path orig_local = local is_file_like = hasattr(local, "write") and callable(local.write) + remote_filename = posixpath.basename(remote) if not local: - local = posixpath.basename(remote) + local = remote_filename + # Path-driven local downloads need interpolation, abspath'ing & + # directory creation if not is_file_like: + local = local.format( + host=self.connection.host, + user=self.connection.user, + port=self.connection.port, + dirname=posixpath.dirname(remote), + basename=remote_filename, + ) + # Must treat dir vs file paths differently, lest we erroneously + # mkdir what was intended as a filename, and so that non-empty + # dir-like paths still get remote filename tacked on. + if local.endswith(os.sep): + dir_path = local + local = os.path.join(local, remote_filename) + else: + dir_path, _ = os.path.split(local) local = os.path.abspath(local) + Path(dir_path).mkdir(parents=True, exist_ok=True) + # TODO: reimplement mkdir (or otherwise write a testing function) + # allowing us to track what was created so we can revert if + # transfer fails. + # TODO: Alternately, transfer to temp location and then move, but + # that's basically inverse of v1's sudo-put which gets messy # Run Paramiko-level .get() (side-effects only. womp.) # TODO: push some of the path handling into Paramiko; it should be diff -Nru fabric-2.5.0/fabric/_version.py fabric-2.6.0/fabric/_version.py --- fabric-2.5.0/fabric/_version.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/fabric/_version.py 2021-01-19 01:09:46.000000000 +0000 @@ -1,2 +1,2 @@ -__version_info__ = (2, 5, 0) +__version_info__ = (2, 6, 0) __version__ = ".".join(map(str, __version_info__)) diff -Nru fabric-2.5.0/fabric2/connection.py fabric-2.6.0/fabric2/connection.py --- fabric-2.5.0/fabric2/connection.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/fabric2/connection.py 2021-01-19 01:09:46.000000000 +0000 @@ -300,9 +300,11 @@ Default: ``config.timeouts.connect``. - .. _connect_kwargs-arg: :param dict connect_kwargs: + + .. _connect_kwargs-arg: + Keyword arguments handed verbatim to `SSHClient.connect ` (when `.open` is called). diff -Nru fabric-2.5.0/fabric2/group.py fabric-2.6.0/fabric2/group.py --- fabric-2.5.0/fabric2/group.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/fabric2/group.py 2021-01-19 01:09:46.000000000 +0000 @@ -18,9 +18,9 @@ concrete subclasses (such as `.SerialGroup` or `.ThreadingGroup`) or you'll get ``NotImplementedError`` on most of the methods. - Most methods in this class mirror those of `.Connection`, taking the same - arguments; however their return values and exception-raising behavior - differs: + Most methods in this class wrap those of `.Connection` and will accept the + same arguments; however their return values and exception-raising behavior + differ: - Return values are dict-like objects (`.GroupResult`) mapping `.Connection` objects to the return value for the respective connections: @@ -99,6 +99,11 @@ group.extend(connections) return group + def _do(self, method, *args, **kwargs): + # TODO: rename this something public & commit to an API for user + # subclasses + raise NotImplementedError + def run(self, *args, **kwargs): """ Executes `.Connection.run` on all member `Connections <.Connection>`. @@ -107,25 +112,22 @@ .. versionadded:: 2.0 """ - # TODO: probably best to suck it up & match actual run() sig? # TODO: how to change method of execution across contents? subclass, # kwargs, additional methods, inject an executor? Doing subclass for # now, but not 100% sure it's the best route. # TODO: also need way to deal with duplicate connections (see THOUGHTS) - # TODO: and errors - probably FailureSet? How to handle other, - # regular, non Failure, exceptions though? Still need an aggregate - # exception type either way, whether it is FailureSet or what... - # TODO: OTOH, users may well want to be able to operate on the hosts - # that did not fail (esp if failure % is low) so we really _do_ want - # something like a result object mixing success and failure, or maybe a - # golang style two-tuple of successes and failures? - # TODO: or keep going w/ a "return or except", but the object is - # largely similar (if not identical) in both situations, with the - # exception just being the signal that Shit Broke? - raise NotImplementedError + return self._do("run", *args, **kwargs) - # TODO: how to handle sudo? Probably just an inner worker method that takes - # the method name to actually call (run, sudo, etc)? + def sudo(self, *args, **kwargs): + """ + Executes `.Connection.sudo` on all member `Connections <.Connection>`. + + :returns: a `.GroupResult`. + + .. versionadded:: 2.6 + """ + # TODO: see run() TODOs + return self._do("sudo", *args, **kwargs) # TODO: this all needs to mesh well with similar strategies applied to # entire tasks - so that may still end up factored out into Executors or @@ -133,19 +135,59 @@ # TODO: local? Invoke wants ability to do that on its own though, which # would be distinct from Group. (May want to switch Group to use that, - # though, whatever it ends up being?) + # though, whatever it ends up being? Eg many cases where you do want to do + # some local thing either N times identically, or parameterized by remote + # cxn values) - def get(self, *args, **kwargs): + def put(self, *args, **kwargs): """ - Executes `.Connection.get` on all member `Connections <.Connection>`. + Executes `.Connection.put` on all member `Connections <.Connection>`. - :returns: a `.GroupResult`. + This is a straightforward application: aside from whatever the concrete + group subclass does for concurrency or lack thereof, the effective + result is like running a loop over the connections and calling their + ``put`` method. - .. versionadded:: 2.0 + :returns: + a `.GroupResult` whose values are `.transfer.Result` instances. + + .. versionadded:: 2.6 """ - # TODO: probably best to suck it up & match actual get() sig? - # TODO: actually implement on subclasses - raise NotImplementedError + return self._do("put", *args, **kwargs) + + def get(self, *args, **kwargs): + """ + Executes `.Connection.get` on all member `Connections <.Connection>`. + + .. note:: + This method changes some behaviors over e.g. directly calling + `.Connection.get` on a ``for`` loop of connections; the biggest is + that the implied default value for the ``local`` parameter is + ``"{host}/"``, which triggers use of local path parameterization + based on each connection's target hostname. + + Thus, unless you override ``local`` yourself, a copy of the + downloaded file will be stored in (relative) directories named + after each host in the group. + + .. warning:: + Using file-like objects as the ``local`` argument is not currently + supported, as it would be equivalent to supplying that same object + to a series of individual ``get()`` calls. + + :returns: + a `.GroupResult` whose values are `.transfer.Result` instances. + + .. versionadded:: 2.6 + """ + # TODO: consider a backwards incompat change after we drop Py2 that + # just makes a lot of these kwarg-only methods? then below could become + # kwargs.setdefault() if desired. + # TODO: do we care enough to handle explicitly given, yet falsey, + # values? it's a lot more complexity for a corner case. + if len(args) < 2 and "local" not in kwargs: + kwargs["local"] = "{host}/" + return self._do("get", *args, **kwargs) def close(self): """ @@ -170,12 +212,12 @@ .. versionadded:: 2.0 """ - def run(self, *args, **kwargs): + def _do(self, method, *args, **kwargs): results = GroupResult() excepted = False for cxn in self: try: - results[cxn] = cxn.run(*args, **kwargs) + results[cxn] = getattr(cxn, method)(*args, **kwargs) except Exception as e: results[cxn] = e excepted = True @@ -184,8 +226,8 @@ return results -def thread_worker(cxn, queue, args, kwargs): - result = cxn.run(*args, **kwargs) +def thread_worker(cxn, queue, method, args, kwargs): + result = getattr(cxn, method)(*args, **kwargs) # TODO: namedtuple or attrs object? queue.put((cxn, result)) @@ -197,22 +239,26 @@ .. versionadded:: 2.0 """ - def run(self, *args, **kwargs): + def _do(self, method, *args, **kwargs): results = GroupResult() queue = Queue() threads = [] for cxn in self: - my_kwargs = dict(cxn=cxn, queue=queue, args=args, kwargs=kwargs) thread = ExceptionHandlingThread( - target=thread_worker, kwargs=my_kwargs + target=thread_worker, + kwargs=dict( + cxn=cxn, + queue=queue, + method=method, + args=args, + kwargs=kwargs, + ), ) threads.append(thread) for thread in threads: thread.start() for thread in threads: # TODO: configurable join timeout - # TODO: (in sudo's version) configurability around interactive - # prompting resulting in an exception instead, as in v1 thread.join() # Get non-exception results from queue while not queue.empty(): diff -Nru fabric-2.5.0/fabric2/testing/base.py fabric-2.6.0/fabric2/testing/base.py --- fabric-2.5.0/fabric2/testing/base.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/fabric2/testing/base.py 2021-01-19 01:09:46.000000000 +0000 @@ -376,13 +376,17 @@ # Set up mocks self.os_patcher = patch("fabric.transfer.os") self.client_patcher = patch("fabric.connection.SSHClient") + self.path_patcher = patch("fabric.transfer.Path") mock_os = self.os_patcher.start() Client = self.client_patcher.start() + self.path_patcher.start() sftp = Client.return_value.open_sftp.return_value # Handle common filepath massage actions; tests will assume these. def fake_abspath(path): - return "/local/{}".format(path) + # Run normpath to avoid tests not seeing abspath wrinkles (like + # trailing slash chomping) + return "/local/{}".format(os.path.normpath(path)) mock_os.path.abspath.side_effect = fake_abspath sftp.getcwd.return_value = "/remote" @@ -392,11 +396,14 @@ sftp.stat.return_value.st_mode = fake_mode mock_os.stat.return_value.st_mode = fake_mode # Not super clear to me why the 'wraps' functionality in mock isn't - # working for this :( - mock_os.path.basename.side_effect = os.path.basename + # working for this :( reinstate a bunch of os(.path) so it still works + mock_os.sep = os.sep + for name in ("basename", "split", "join", "normpath"): + getattr(mock_os.path, name).side_effect = getattr(os.path, name) # Return the sftp and OS mocks for use by decorator use case. return sftp, mock_os def stop(self): self.os_patcher.stop() self.client_patcher.stop() + self.path_patcher.stop() diff -Nru fabric-2.5.0/fabric2/testing/fixtures.py fabric-2.6.0/fabric2/testing/fixtures.py --- fabric-2.5.0/fabric2/testing/fixtures.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/fabric2/testing/fixtures.py 2021-01-19 01:09:46.000000000 +0000 @@ -104,6 +104,7 @@ """ mock = MockSFTP(autostart=False) client, mock_os = mock.start() + # Regular ol transfer to save some time transfer = Transfer(Connection("host")) yield transfer, client, mock_os # TODO: old mock_sftp() lacked any 'stop'...why? feels bad man diff -Nru fabric-2.5.0/fabric2/transfer.py fabric-2.6.0/fabric2/transfer.py --- fabric-2.5.0/fabric2/transfer.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/fabric2/transfer.py 2021-01-19 01:09:46.000000000 +0000 @@ -6,6 +6,11 @@ import posixpath import stat +try: + from pathlib import Path +except ImportError: + from pathlib2 import Path + from .util import debug # TODO: actual logging! LOL # TODO: figure out best way to direct folks seeking rsync, to patchwork's rsync @@ -40,7 +45,7 @@ def get(self, remote, local=None, preserve_mode=True): """ - Download a file from the current connection to the local filesystem. + Copy a file from wrapped connection's host to the local filesystem. :param str remote: Remote file to download. @@ -60,7 +65,9 @@ **If None or another 'falsey'/empty value is given** (the default), the remote file is downloaded to the current working directory (as - seen by `os.getcwd`) using its remote filename. + seen by `os.getcwd`) using its remote filename. (This is equivalent + to giving ``"{basename}"``; see the below subsection on + interpolation.) **If a string is given**, it should be a path to a local directory or file and is subject to similar behavior as that seen by common @@ -71,11 +78,23 @@ '/tmp/')`` would result in creation or overwriting of ``/tmp/file.txt``). + This path will be **interpolated** with some useful parameters, + using `str.format`: + + - The `.Connection` object's ``host``, ``user`` and ``port`` + attributes. + - The ``basename`` and ``dirname`` of the ``remote`` path, as + derived by `os.path` (specifically, its ``posixpath`` flavor, so + that the resulting values are useful on remote POSIX-compatible + SFTP servers even if the local client is Windows). + - Thus, for example, ``"/some/path/{user}@{host}/{basename}"`` will + yield different local paths depending on the properties of both + the connection and the remote path. + .. note:: - When dealing with nonexistent file paths, normal Python file - handling concerns come into play - for example, a ``local`` - path containing non-leaf directories which do not exist, will - typically result in an `OSError`. + If nonexistent directories are present in this path (including + the final path component, if it ends in `os.sep`) they will be + created automatically using `os.makedirs`. **If a file-like object is given**, the contents of the remote file are simply written into it. @@ -87,12 +106,14 @@ :returns: A `.Result` object. .. versionadded:: 2.0 + .. versionchanged:: 2.6 + Added ``local`` path interpolation of connection & remote file + attributes. + .. versionchanged:: 2.6 + Create missing ``local`` directories automatically. """ # TODO: how does this API change if we want to implement # remote-to-remote file transfer? (Is that even realistic?) - # TODO: handle v1's string interpolation bits, especially the default - # one, or at least think about how that would work re: split between - # single and multiple server targets. # TODO: callback support # TODO: how best to allow changing the behavior/semantics of # remote/local (e.g. users might want 'safer' behavior that complains @@ -107,15 +128,37 @@ self.sftp.getcwd() or self.sftp.normalize("."), remote ) - # Massage local path: - # - handle file-ness - # - if path, fill with remote name if empty, & make absolute + # Massage local path orig_local = local is_file_like = hasattr(local, "write") and callable(local.write) + remote_filename = posixpath.basename(remote) if not local: - local = posixpath.basename(remote) + local = remote_filename + # Path-driven local downloads need interpolation, abspath'ing & + # directory creation if not is_file_like: + local = local.format( + host=self.connection.host, + user=self.connection.user, + port=self.connection.port, + dirname=posixpath.dirname(remote), + basename=remote_filename, + ) + # Must treat dir vs file paths differently, lest we erroneously + # mkdir what was intended as a filename, and so that non-empty + # dir-like paths still get remote filename tacked on. + if local.endswith(os.sep): + dir_path = local + local = os.path.join(local, remote_filename) + else: + dir_path, _ = os.path.split(local) local = os.path.abspath(local) + Path(dir_path).mkdir(parents=True, exist_ok=True) + # TODO: reimplement mkdir (or otherwise write a testing function) + # allowing us to track what was created so we can revert if + # transfer fails. + # TODO: Alternately, transfer to temp location and then move, but + # that's basically inverse of v1's sudo-put which gets messy # Run Paramiko-level .get() (side-effects only. womp.) # TODO: push some of the path handling into Paramiko; it should be diff -Nru fabric-2.5.0/fabric2/_version.py fabric-2.6.0/fabric2/_version.py --- fabric-2.5.0/fabric2/_version.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/fabric2/_version.py 2021-01-19 01:09:46.000000000 +0000 @@ -1,2 +1,2 @@ -__version_info__ = (2, 5, 0) +__version_info__ = (2, 6, 0) __version__ = ".".join(map(str, __version_info__)) diff -Nru fabric-2.5.0/LICENSE fabric-2.6.0/LICENSE --- fabric-2.5.0/LICENSE 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/LICENSE 2021-01-19 01:09:46.000000000 +0000 @@ -1,4 +1,4 @@ -Copyright (c) 2019 Jeff Forcier. +Copyright (c) 2020 Jeff Forcier. All rights reserved. Redistribution and use in source and binary forms, with or without diff -Nru fabric-2.5.0/setup.py fabric-2.6.0/setup.py --- fabric-2.5.0/setup.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/setup.py 2021-01-19 01:09:46.000000000 +0000 @@ -67,7 +67,7 @@ author="Jeff Forcier", author_email="jeff@bitprophet.org", url="http://fabfile.org", - install_requires=["invoke>=1.3,<2.0", "paramiko>=2.4"], + install_requires=["invoke>=1.3,<2.0", "paramiko>=2.4", "pathlib2"], extras_require={ "testing": testing_deps, "pytest": testing_deps + pytest_deps, diff -Nru fabric-2.5.0/sites/docs/getting-started.rst fabric-2.6.0/sites/docs/getting-started.rst --- fabric-2.5.0/sites/docs/getting-started.rst 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/sites/docs/getting-started.rst 2021-01-19 01:09:46.000000000 +0000 @@ -14,7 +14,7 @@ on top; user code will most often import from the ``fabric`` package, but you'll sometimes import directly from ``invoke`` or ``paramiko`` too: -- `Invoke `_ implements CLI parsing, task organization, +- `Invoke `_ implements CLI parsing, task organization, and shell command execution (a generic framework plus specific implementation for local commands.) @@ -24,7 +24,7 @@ - Fabric users will frequently import Invoke objects, in cases where Fabric itself has no need to subclass or otherwise modify what Invoke provides. -- `Paramiko `_ implements low/mid level SSH +- `Paramiko `_ implements low/mid level SSH functionality - SSH and SFTP sessions, key management, etc. - Fabric mostly uses this under the hood; users will only rarely import diff -Nru fabric-2.5.0/sites/www/changelog.rst fabric-2.6.0/sites/www/changelog.rst --- fabric-2.5.0/sites/www/changelog.rst 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/sites/www/changelog.rst 2021-01-19 01:09:46.000000000 +0000 @@ -5,6 +5,31 @@ .. note:: Looking for the Fabric 1.x changelog? See :doc:`/changelog-v1`. +- :release:`2.6.0 <2021-01-18>` +- :bug:`- major` Fix a handful of issues in the handling and + mocking of SFTP local paths and ``os.path`` members within + :ref:`fabric.testing `; this should remove some + occasional "useless Mocks" as well as hewing closer to the real behavior of + things like ``os.path.abspath`` re: path normalization. +- :feature:`-` When the ``local`` path argument to + `Transfer.get ` contains nonexistent + directories, they are now created instead of raising an error. + + .. warning:: + This change introduces a new runtime dependency: ``pathlib2``. + +- :feature:`1868` Ported a feature from v1: interpolating the local path + argument in `Transfer.get ` with connection + and remote filepath attributes. + + For example, ``cxn.get(remote="/var/log/foo.log", local="{host}/")`` is now + feasible for storing a file in per-host-named directories or files, and in + fact `Group.get ` does this by default. +- :feature:`1810` Add `put `/`get + ` support to `~fabric.group.Group`. +- :feature:`1999` Add `sudo ` support to + `~fabric.group.Group`. Thanks to Bonnie Hardin for the report and to Winston + Nolan for an early patchset. - :release:`2.5.0 <2019-08-06>` - :support:`-` Update minimum Invoke version requirement to ``>=1.3``. - :feature:`1985` Add support for explicitly closing remote subprocess' stdin diff -Nru fabric-2.5.0/sites/www/upgrading.rst fabric-2.6.0/sites/www/upgrading.rst --- fabric-2.5.0/sites/www/upgrading.rst 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/sites/www/upgrading.rst 2021-01-19 01:09:46.000000000 +0000 @@ -842,7 +842,7 @@ * - ``shell`` / ``env.use_shell`` designating whether or not to wrap commands within an explicit call to e.g. ``/bin/sh -c "real command"`` - - `Pending `__/Removed + - `Pending `__/Removed - See the note above under ``run`` for details on shell wrapping as a general strategy; unfortunately for ``sudo``, some sort of manual wrapping is still necessary for nontrivial commands (i.e. anything @@ -1127,9 +1127,8 @@ own, so it's gone. * - Naming downloaded files after some aspect of the remote destination, to avoid overwriting during multi-server actions - - `Pending `__ - - This falls under the `~fabric.group.Group` family, which still needs - some work in this regard. + - Ported + - Added back (to `fabric.transfer.Transfer.get`) in Fabric 2.6. .. _upgrading-configuration: diff -Nru fabric-2.5.0/tests/conftest.py fabric-2.6.0/tests/conftest.py --- fabric-2.5.0/tests/conftest.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/tests/conftest.py 2021-01-19 01:09:46.000000000 +0000 @@ -1,2 +1,31 @@ # flake8: noqa from fabric.testing.fixtures import client, remote, sftp, sftp_objs, transfer + +from os.path import isfile, expanduser + +from pytest import fixture + +from mock import patch + + +# TODO: does this want to end up in the public fixtures module too? +@fixture(autouse=True) +def no_user_ssh_config(): + """ + Cowardly refuse to ever load what looks like user SSH config paths. + + Prevents the invoking user's real config from gumming up test results or + inflating test runtime (eg if it sets canonicalization on, which will incur + DNS lookups for nearly all of this suite's bogus names). + """ + # An ugly, but effective, hack. I am not proud. I also don't see anything + # that's >= as bulletproof and less ugly? + # TODO: ideally this should expand to cover system config paths too, but + # that's even less likely to be an issue. + def no_config_for_you(path): + if path == expanduser("~/.ssh/config"): + return False + return isfile(path) + + with patch("fabric.config.os.path.isfile", no_config_for_you): + yield diff -Nru fabric-2.5.0/tests/connection.py fabric-2.6.0/tests/connection.py --- fabric-2.5.0/tests/connection.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/tests/connection.py 2021-01-19 01:09:46.000000000 +0000 @@ -20,11 +20,11 @@ from invoke.config import Config as InvokeConfig from invoke.exceptions import ThreadException -from fabric import Config as Config_ +from fabric import Config, Connection from fabric.exceptions import InvalidV1Env from fabric.util import get_local_user -from _util import support, Connection, Config, faux_v1_env +from _util import support, faux_v1_env # Remote is woven in as a config default, so must be patched there @@ -265,7 +265,7 @@ runtime_path = join(support, "ssh_config", confname) if overrides is None: overrides = {} - return Config_( + return Config( runtime_ssh_path=runtime_path, overrides=overrides ) @@ -274,7 +274,7 @@ return Connection("runtime", config=config) def effectively_blank_when_no_loaded_config(self): - c = Config_(ssh_config=SSHConfig()) + c = Config(ssh_config=SSHConfig()) cxn = Connection("host", config=c) # NOTE: paramiko always injects this even if you look up a host # that has no rules, even wildcard ones. @@ -306,7 +306,7 @@ path = join( support, "ssh_config", "overridden_hostname.conf" ) - config = Config_(runtime_ssh_path=path) + config = Config(runtime_ssh_path=path) cxn = Connection("aliasname", config=config) assert cxn.host == "realname" assert cxn.original_host == "aliasname" @@ -859,7 +859,7 @@ config_kwargs["overrides"] = { "connect_kwargs": {"key_filename": ["configured.key"]} } - conf = Config_(**config_kwargs) + conf = Config(**config_kwargs) connect_kwargs = {} if kwarg: # Stitch in connect_kwargs value diff -Nru fabric-2.5.0/tests/group.py fabric-2.6.0/tests/group.py --- fabric-2.5.0/tests/group.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/tests/group.py 2021-01-19 01:09:46.000000000 +0000 @@ -1,11 +1,29 @@ from mock import Mock, patch, call -from pytest_relaxed import raises +from pytest import mark, raises from fabric import Connection, Group, SerialGroup, ThreadingGroup, GroupResult from fabric.group import thread_worker from fabric.exceptions import GroupException +RUNNER_METHODS = ("run", "sudo") +TRANSFER_METHODS = ("put", "get") +ALL_METHODS = RUNNER_METHODS + TRANSFER_METHODS +runner_args = ("command",) +runner_kwargs = dict(hide=True, warn=True) +transfer_args = tuple() +transfer_kwargs = dict(local="yokel", remote="goat") +ARGS_BY_METHOD = dict( + run=runner_args, sudo=runner_args, put=transfer_args, get=transfer_args +) +KWARGS_BY_METHOD = dict( + run=runner_kwargs, + sudo=runner_kwargs, + put=transfer_kwargs, + get=transfer_kwargs, +) + + class Group_: class init: "__init__" @@ -41,10 +59,11 @@ for c in g: assert isinstance(c, Connection) - class run: - @raises(NotImplementedError) - def not_implemented_in_base_class(self): - Group().run() + @mark.parametrize("method", ALL_METHODS) + def abstract_methods_not_implemented(self, method): + group = Group() + with raises(NotImplementedError): + getattr(group, method)() class close_and_contextmanager_behavior: def close_closes_all_member_connections(self): @@ -62,8 +81,32 @@ for c in cxns: c.close.assert_called_once_with() + class get: + class local_defaults_to_host_interpolated_path: + def when_no_arg_or_kwarg_given(self): + g = Group("host1", "host2") + g._do = Mock() + g.get(remote="whatever") + g._do.assert_called_with( + "get", remote="whatever", local="{host}/" + ) + + def not_when_arg_given(self): + g = Group("host1", "host2") + g._do = Mock() + g.get("whatever", "lol") + # No local kwarg passed. + g._do.assert_called_with("get", "whatever", "lol") + + def not_when_kwarg_given(self): + g = Group("host1", "host2") + g._do = Mock() + g.get(remote="whatever", local="lol") + # Doesn't stomp given local arg + g._do.assert_called_with("get", remote="whatever", local="lol") + -def _make_serial_tester(cxns, index, args, kwargs): +def _make_serial_tester(method, cxns, index, args, kwargs): args = args[:] kwargs = kwargs.copy() @@ -72,176 +115,187 @@ predecessors = cxns[:car] successors = cxns[cdr:] for predecessor in predecessors: - predecessor.run.assert_called_with(*args, **kwargs) + getattr(predecessor, method).assert_called_with(*args, **kwargs) for successor in successors: - assert not successor.run.called + assert not getattr(successor, method).called return tester class SerialGroup_: - class run: - def executes_arguments_on_contents_run_serially(self): - "executes arguments on contents' run() serially" - cxns = [Connection(x) for x in ("host1", "host2", "host3")] - args = ("command",) - kwargs = {"hide": True, "warn": True} - for index, cxn in enumerate(cxns): - side_effect = _make_serial_tester(cxns, index, args, kwargs) - cxn.run = Mock(side_effect=side_effect) - g = SerialGroup.from_connections(cxns) - g.run(*args, **kwargs) - # Sanity check, e.g. in case none of them were actually run - for cxn in cxns: - cxn.run.assert_called_with(*args, **kwargs) - - def errors_in_execution_capture_and_continue_til_end(self): - cxns = [Mock(name=x) for x in ("host1", "host2", "host3")] - - class OhNoz(Exception): - pass - - onoz = OhNoz() - cxns[1].run.side_effect = onoz - g = SerialGroup.from_connections(cxns) - try: - g.run("whatever", hide=True) - except GroupException as e: - result = e.result - else: - assert False, "Did not raise GroupException!" - succeeded = { - cxns[0]: cxns[0].run.return_value, - cxns[2]: cxns[2].run.return_value, - } - failed = {cxns[1]: onoz} - expected = succeeded.copy() - expected.update(failed) - assert result == expected - assert result.succeeded == succeeded - assert result.failed == failed - - def returns_results_mapping(self): - cxns = [Mock(name=x) for x in ("host1", "host2", "host3")] - g = SerialGroup.from_connections(cxns) - result = g.run("whatever", hide=True) - assert isinstance(result, GroupResult) - expected = {x: x.run.return_value for x in cxns} - assert result == expected - assert result.succeeded == expected - assert result.failed == {} + @mark.parametrize("method", ALL_METHODS) + def executes_arguments_on_contents_run_serially(self, method): + "executes arguments on contents' run() serially" + cxns = [Connection(x) for x in ("host1", "host2", "host3")] + args = ARGS_BY_METHOD[method] + kwargs = KWARGS_BY_METHOD[method] + for index, cxn in enumerate(cxns): + side_effect = _make_serial_tester( + method, cxns, index, args, kwargs + ) + setattr(cxn, method, Mock(side_effect=side_effect)) + g = SerialGroup.from_connections(cxns) + getattr(g, method)(*args, **kwargs) + # Sanity check, e.g. in case none of them were actually run + for cxn in cxns: + getattr(cxn, method).assert_called_with(*args, **kwargs) + + @mark.parametrize("method", ALL_METHODS) + def errors_in_execution_capture_and_continue_til_end(self, method): + cxns = [Mock(name=x) for x in ("host1", "host2", "host3")] + + class OhNoz(Exception): + pass + + onoz = OhNoz() + getattr(cxns[1], method).side_effect = onoz + g = SerialGroup.from_connections(cxns) + try: + getattr(g, method)("whatever", hide=True) + except GroupException as e: + result = e.result + else: + assert False, "Did not raise GroupException!" + succeeded = { + cxns[0]: getattr(cxns[0], method).return_value, + cxns[2]: getattr(cxns[2], method).return_value, + } + failed = {cxns[1]: onoz} + expected = succeeded.copy() + expected.update(failed) + assert result == expected + assert result.succeeded == succeeded + assert result.failed == failed + + @mark.parametrize("method", ALL_METHODS) + def returns_results_mapping(self, method): + cxns = [Mock(name=x) for x in ("host1", "host2", "host3")] + g = SerialGroup.from_connections(cxns) + result = getattr(g, method)("whatever", hide=True) + assert isinstance(result, GroupResult) + expected = {x: getattr(x, method).return_value for x in cxns} + assert result == expected + assert result.succeeded == expected + assert result.failed == {} class ThreadingGroup_: def setup(self): self.cxns = [Connection(x) for x in ("host1", "host2", "host3")] - self.args = ("command",) - self.kwargs = {"hide": True, "warn": True} - class run: - @patch("fabric.group.Queue") - @patch("fabric.group.ExceptionHandlingThread") - def executes_arguments_on_contents_run_via_threading( - self, Thread, Queue + @mark.parametrize("method", ALL_METHODS) + @patch("fabric.group.Queue") + @patch("fabric.group.ExceptionHandlingThread") + def executes_arguments_on_contents_run_via_threading( + self, Thread, Queue, method + ): + queue = Queue.return_value + g = ThreadingGroup.from_connections(self.cxns) + # Make sure .exception() doesn't yield truthy Mocks. Otherwise we + # end up with 'exceptions' that cause errors due to all being the + # same. + Thread.return_value.exception.return_value = None + args = ARGS_BY_METHOD[method] + kwargs = KWARGS_BY_METHOD[method] + getattr(g, method)(*args, **kwargs) + # Testing that threads were used the way we expect is mediocre but + # I honestly can't think of another good way to assert "threading + # was used & concurrency occurred"... + instantiations = [ + call( + target=thread_worker, + kwargs=dict( + cxn=cxn, + queue=queue, + method=method, + args=args, + kwargs=kwargs, + ), + ) + for cxn in self.cxns + ] + Thread.assert_has_calls(instantiations, any_order=True) + # These ought to work as by default a Mock.return_value is a + # singleton mock object + expected = len(self.cxns) + for name, got in ( + ("start", Thread.return_value.start.call_count), + ("join", Thread.return_value.join.call_count), ): - queue = Queue.return_value - g = ThreadingGroup.from_connections(self.cxns) - # Make sure .exception() doesn't yield truthy Mocks. Otherwise we - # end up with 'exceptions' that cause errors due to all being the - # same. - Thread.return_value.exception.return_value = None - g.run(*self.args, **self.kwargs) - # Testing that threads were used the way we expect is mediocre but - # I honestly can't think of another good way to assert "threading - # was used & concurrency occurred"... - instantiations = [ - call( - target=thread_worker, - kwargs=dict( - cxn=cxn, - queue=queue, - args=self.args, - kwargs=self.kwargs, - ), - ) - for cxn in self.cxns - ] - Thread.assert_has_calls(instantiations, any_order=True) - # These ought to work as by default a Mock.return_value is a - # singleton mock object - expected = len(self.cxns) - for name, got in ( - ("start", Thread.return_value.start.call_count), - ("join", Thread.return_value.join.call_count), - ): - err = ( - "Expected {} calls to ExceptionHandlingThread.{}, got {}" - ) # noqa - err = err.format(expected, name, got) - assert expected, got == err - - @patch("fabric.group.Queue") - def queue_used_to_return_results(self, Queue): - # Regular, explicit, mocks for Connections - cxns = [Mock(host=x) for x in ("host1", "host2", "host3")] - # Set up Queue with enough behavior to work / assert - queue = Queue.return_value - # Ending w/ a True will terminate a while-not-empty loop - queue.empty.side_effect = (False, False, False, True) - fakes = [(x, x.run.return_value) for x in cxns] - queue.get.side_effect = fakes[:] - # Execute & inspect results - g = ThreadingGroup.from_connections(cxns) - results = g.run(*self.args, **self.kwargs) - expected = {x: x.run.return_value for x in cxns} - assert results == expected - # Make sure queue was used as expected within worker & - # ThreadingGroup.run() - puts = [call(x) for x in fakes] - queue.put.assert_has_calls(puts, any_order=True) - assert queue.empty.called - gets = [call(block=False) for _ in cxns] - queue.get.assert_has_calls(gets) - - def bubbles_up_errors_within_threads(self): - # TODO: I feel like this is the first spot where a raw - # ThreadException might need tweaks, at least presentation-wise, - # since we're no longer dealing with truly background threads (IO - # workers and tunnels), but "middle-ground" threads the user is - # kind of expecting (and which they might expect to encounter - # failures). - cxns = [Mock(host=x) for x in ("host1", "host2", "host3")] - - class OhNoz(Exception): - pass - - onoz = OhNoz() - cxns[1].run.side_effect = onoz - g = ThreadingGroup.from_connections(cxns) - try: - g.run(*self.args, **self.kwargs) - except GroupException as e: - result = e.result - else: - assert False, "Did not raise GroupException!" - succeeded = { - cxns[0]: cxns[0].run.return_value, - cxns[2]: cxns[2].run.return_value, - } - failed = {cxns[1]: onoz} - expected = succeeded.copy() - expected.update(failed) - assert result == expected - assert result.succeeded == succeeded - assert result.failed == failed - - def returns_results_mapping(self): - # TODO: update if/when we implement ResultSet - cxns = [Mock(name=x) for x in ("host1", "host2", "host3")] - g = ThreadingGroup.from_connections(cxns) - result = g.run("whatever", hide=True) - assert isinstance(result, GroupResult) - expected = {x: x.run.return_value for x in cxns} - assert result == expected - assert result.succeeded == expected - assert result.failed == {} + err = ( + "Expected {} calls to ExceptionHandlingThread.{}, got {}" + ) # noqa + err = err.format(expected, name, got) + assert expected, got == err + + @mark.parametrize("method", ALL_METHODS) + @patch("fabric.group.Queue") + def queue_used_to_return_results(self, Queue, method): + # Regular, explicit, mocks for Connections + cxns = [Mock(host=x) for x in ("host1", "host2", "host3")] + # Set up Queue with enough behavior to work / assert + queue = Queue.return_value + # Ending w/ a True will terminate a while-not-empty loop + queue.empty.side_effect = (False, False, False, True) + fakes = [(x, getattr(x, method).return_value) for x in cxns] + queue.get.side_effect = fakes[:] + # Execute & inspect results + g = ThreadingGroup.from_connections(cxns) + results = getattr(g, method)( + *ARGS_BY_METHOD[method], **KWARGS_BY_METHOD[method] + ) + expected = {x: getattr(x, method).return_value for x in cxns} + assert results == expected + # Make sure queue was used as expected within worker & + # ThreadingGroup.run() + puts = [call(x) for x in fakes] + queue.put.assert_has_calls(puts, any_order=True) + assert queue.empty.called + gets = [call(block=False) for _ in cxns] + queue.get.assert_has_calls(gets) + + @mark.parametrize("method", ALL_METHODS) + def bubbles_up_errors_within_threads(self, method): + # TODO: I feel like this is the first spot where a raw + # ThreadException might need tweaks, at least presentation-wise, + # since we're no longer dealing with truly background threads (IO + # workers and tunnels), but "middle-ground" threads the user is + # kind of expecting (and which they might expect to encounter + # failures). + cxns = [Mock(host=x) for x in ("host1", "host2", "host3")] + + class OhNoz(Exception): + pass + + onoz = OhNoz() + getattr(cxns[1], method).side_effect = onoz + g = ThreadingGroup.from_connections(cxns) + try: + getattr(g, method)( + *ARGS_BY_METHOD[method], **KWARGS_BY_METHOD[method] + ) + except GroupException as e: + result = e.result + else: + assert False, "Did not raise GroupException!" + succeeded = { + cxns[0]: getattr(cxns[0], method).return_value, + cxns[2]: getattr(cxns[2], method).return_value, + } + failed = {cxns[1]: onoz} + expected = succeeded.copy() + expected.update(failed) + assert result == expected + assert result.succeeded == succeeded + assert result.failed == failed + + @mark.parametrize("method", ALL_METHODS) + def returns_results_mapping(self, method): + cxns = [Mock(name=x) for x in ("host1", "host2", "host3")] + g = ThreadingGroup.from_connections(cxns) + result = getattr(g, method)("whatever", hide=True) + assert isinstance(result, GroupResult) + expected = {x: getattr(x, method).return_value for x in cxns} + assert result == expected + assert result.succeeded == expected + assert result.failed == {} diff -Nru fabric-2.5.0/tests/transfer.py fabric-2.6.0/tests/transfer.py --- fabric-2.5.0/tests/transfer.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/tests/transfer.py 2021-01-19 01:09:46.000000000 +0000 @@ -3,7 +3,7 @@ except ImportError: from six import StringIO -from mock import Mock, call +from mock import Mock, call, patch from pytest_relaxed import raises from pytest import skip # noqa from paramiko import SFTPAttributes @@ -92,6 +92,24 @@ def remote_arg_cannot_be_empty_string(self, transfer): transfer.get("") + class local_arg_interpolation: + def connection_params(self, transfer): + result = transfer.get("somefile", "{user}@{host}-{port}") + expected = "/local/{}@host-22".format(transfer.connection.user) + assert result.local == expected + + def connection_params_as_dir(self, transfer): + result = transfer.get("somefile", "{host}/") + assert result.local == "/local/host/somefile" + + def remote_path_posixpath_bits(self, transfer): + result = transfer.get( + "parent/mid/leaf", "foo/{dirname}/bar/{basename}" + ) + # Recall that test harness sets remote apparent cwd as + # /remote/, thus dirname is /remote/parent/mid + assert result.local == "/local/foo/remote/parent/mid/bar/leaf" + class file_like_local_paths: "file-like local paths" @@ -133,6 +151,35 @@ transfer.get("file", local="meh", preserve_mode=False) assert not mock_os.chmod.called + class local_directory_creation: + @patch("fabric.transfer.Path") + def without_trailing_slash_means_leaf_file(self, Path, sftp_objs): + transfer, client = sftp_objs + transfer.get(remote="file", local="top/middle/leaf") + client.get.assert_called_with( + localpath="/local/top/middle/leaf", + remotepath="/remote/file", + ) + Path.assert_called_with("top/middle") + Path.return_value.mkdir.assert_called_with( + parents=True, exist_ok=True + ) + + @patch("fabric.transfer.Path") + def with_trailing_slash_means_mkdir_entire_arg( + self, Path, sftp_objs + ): + transfer, client = sftp_objs + transfer.get(remote="file", local="top/middle/leaf/") + client.get.assert_called_with( + localpath="/local/top/middle/leaf/file", + remotepath="/remote/file", + ) + Path.assert_called_with("top/middle/leaf/") + Path.return_value.mkdir.assert_called_with( + parents=True, exist_ok=True + ) + class put: class basics: def accepts_single_local_path_posarg(self, sftp_objs): diff -Nru fabric-2.5.0/tests/_util.py fabric-2.6.0/tests/_util.py --- fabric-2.5.0/tests/_util.py 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/tests/_util.py 2021-01-19 01:09:46.000000000 +0000 @@ -6,9 +6,7 @@ from invoke.vendor.lexicon import Lexicon from pytest_relaxed import trap -from fabric import Connection as Connection_, Config as Config_ from fabric.main import make_program -from paramiko import SSHConfig support = os.path.join(os.path.abspath(os.path.dirname(__file__)), "_support") @@ -51,27 +49,6 @@ assert False, err.format(test) -# Locally override Connection, Config with versions that supply a dummy -# SSHConfig and thus don't load any test-running user's own ssh_config files. -# TODO: find a cleaner way to do this, though I don't really see any that isn't -# adding a ton of fixtures everywhere (and thus, opening up to forgetting it -# for new tests...) -class Config(Config_): - def __init__(self, *args, **kwargs): - wat = "You're giving ssh_config explicitly, please use Config_!" - assert "ssh_config" not in kwargs, wat - # Give ssh_config explicitly -> shorter way of turning off loading - kwargs["ssh_config"] = SSHConfig() - super(Config, self).__init__(*args, **kwargs) - - -class Connection(Connection_): - def __init__(self, *args, **kwargs): - # Make sure we're using our tweaked Config if none was given. - kwargs.setdefault("config", Config()) - super(Connection, self).__init__(*args, **kwargs) - - def faux_v1_env(): # Close enough to v1 _AttributeDict... # Contains a copy of enough of v1's defaults to prevent us having to do a diff -Nru fabric-2.5.0/.travis.yml fabric-2.6.0/.travis.yml --- fabric-2.5.0/.travis.yml 2019-08-06 22:57:28.000000000 +0000 +++ fabric-2.6.0/.travis.yml 2021-01-19 01:09:46.000000000 +0000 @@ -74,9 +74,9 @@ - pip uninstall -y fabric - "PACKAGE_AS_FABRIC2=yes inv travis.test-packaging --package=fabric2 --sanity=\"fab2 --version\"" - inv sanity-test-from-v1 -after_success: +#after_success: # Upload coverage data to codecov - - codecov + #- codecov notifications: irc: channels: "irc.freenode.org#fabric"