diff -Nru python-bioblend-0.18.0/ABOUT.rst python-bioblend-1.0.0/ABOUT.rst --- python-bioblend-0.18.0/ABOUT.rst 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/ABOUT.rst 2022-10-13 18:17:37.000000000 +0000 @@ -1,5 +1,5 @@ `BioBlend `_ is a Python library for -interacting with `Galaxy`_ and `CloudMan`_ APIs. +interacting with the `Galaxy`_ API. BioBlend is supported and tested on: @@ -7,8 +7,7 @@ - Galaxy release 17.09 and later. BioBlend's goal is to make it easier to script and automate the running of -Galaxy analyses, administering of a Galaxy server, and cloud infrastructure -provisioning and scaling via CloudMan. +Galaxy analyses and administering of a Galaxy server. In practice, it makes it possible to do things like this: - Interact with Galaxy via a straightforward API:: @@ -30,38 +29,14 @@ params = {"Paste1": {"delimiter": "U"}} wf_invocation = wf.invoke(input_map, params=params) -- Create a CloudMan compute cluster, via an API and directly from your - local machine:: - - from bioblend.cloudman import CloudManConfig - from bioblend.cloudman import CloudManInstance - cfg = CloudManConfig('', '', 'My CloudMan', 'ami-', 'm1.small', '') - cmi = CloudManInstance.launch_instance(cfg) - cmi.get_status() - -- Reconnect to an existing CloudMan instance and manipulate it:: - - from bioblend.cloudman import CloudManInstance - cmi = CloudManInstance("", "") - cmi.add_nodes(3) - cluster_status = cmi.get_status() - cmi.remove_nodes(2) - -.. note:: - Although this library allows you to blend these two services into - a cohesive unit, the library itself can be used with either - service irrespective of the other. For example, you can use it to - just manipulate CloudMan clusters or to script the interactions - with an instance of Galaxy running on your laptop. - About the library name ~~~~~~~~~~~~~~~~~~~~~~ -The library was originally called just ``Blend`` but we -`renamed it `_ -to reflect more of its domain and a make it bit more unique so it can be easier to find. -The name was intended to be short and easily pronounceable. In its original -implementation, the goal was to provide a lot more support for CloudMan +The library was originally called just ``Blend`` but we +`renamed it `_ +to reflect more of its domain and a make it bit more unique so it can be easier to find. +The name was intended to be short and easily pronounceable. In its original +implementation, the goal was to provide a lot more support for `CloudMan`_ and other integration capabilities, allowing them to be *blended* together via code. ``BioBlend`` fitted the bill. diff -Nru python-bioblend-0.18.0/bioblend/cloudman/__init__.py python-bioblend-1.0.0/bioblend/cloudman/__init__.py --- python-bioblend-0.18.0/bioblend/cloudman/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/cloudman/__init__.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,771 +0,0 @@ -""" -API for interacting with a CloudMan instance. -""" -import functools -import json -import time -from urllib.parse import urlparse - -import requests - -import bioblend -from bioblend.cloudman.launch import CloudManLauncher -from bioblend.util import Bunch - - -def block_until_vm_ready(func): - """ - This decorator exists to make sure that a launched VM is - ready and has received a public IP before allowing the wrapped - function call to continue. If the VM is not ready, the function will - block until the VM is ready. If the VM does not become ready - until the vm_ready_timeout elapses or the VM status returns an error, - a VMLaunchException will be thrown. - - This decorator relies on the wait_until_instance_ready method defined in - class GenericVMInstance. All methods to which this decorator is applied - must be members of a class which inherit from GenericVMInstance. - - The following two optional keyword arguments are recognized by this decorator: - - :type vm_ready_timeout: int - :param vm_ready_timeout: Maximum length of time to block before timing out. - Once the timeout is reached, a VMLaunchException - will be thrown. - - :type vm_ready_check_interval: int - :param vm_ready_check_interval: The number of seconds to pause between consecutive - calls when polling the VM's ready status. - """ - - @functools.wraps(func) - def wrapper(*args, **kwargs): - obj = args[0] - timeout = kwargs.pop("vm_ready_timeout", 300) - interval = kwargs.pop("vm_ready_check_interval", 10) - try: - obj.wait_until_instance_ready(timeout, interval) - except AttributeError: - raise VMLaunchException( - "Decorated object does not define a wait_until_instance_ready method." - "Make sure that the object is of type GenericVMInstance." - ) - return func(*args, **kwargs) - - return wrapper - - -class VMLaunchException(Exception): - def __init__(self, value): - self.value = value - - def __str__(self): - return repr(self.value) - - -class CloudManConfig: - def __init__( - self, - access_key=None, - secret_key=None, - cluster_name=None, - image_id=None, - instance_type="m1.medium", - password=None, - cloud_metadata=None, - cluster_type=None, - galaxy_data_option="", - initial_storage_size=10, - key_name="cloudman_key_pair", - security_groups=None, - placement="", - kernel_id=None, - ramdisk_id=None, - block_until_ready=False, - **kwargs, - ): - """ - Initializes a CloudMan launch configuration object. - - :type access_key: str - :param access_key: Access credentials. - - :type secret_key: str - :param secret_key: Access credentials. - - :type cluster_name: str - :param cluster_name: Name used to identify this CloudMan cluster. - - :type image_id: str - :param image_id: Machine image ID to use when launching this - CloudMan instance. - - :type instance_type: str - :param instance_type: The type of the machine instance, as understood by - the chosen cloud provider. (e.g., ``m1.medium``) - - :type password: str - :param password: The administrative password for this CloudMan instance. - - :type cloud_metadata: Bunch - :param cloud_metadata: This object must define the properties required - to establish a `boto `_ - connection to that cloud. See this method's implementation - for an example of the required fields. Note that as - long the as provided object defines the required fields, - it can really by implemented as anything (e.g., - a Bunch, a database object, a custom class). If no - value for the ``cloud`` argument is provided, the - default is to use the Amazon cloud. - - :type kernel_id: str - :param kernel_id: The ID of the kernel with which to launch the - instances - - :type ramdisk_id: str - :param ramdisk_id: The ID of the RAM disk with which to launch the - instances - - :type key_name: str - :param key_name: The name of the key pair with which to launch instances - - :type security_groups: list of str - :param security_groups: The IDs of the security groups with which to - associate instances - - :type placement: str - :param placement: The availability zone in which to launch the instances - - :type cluster_type: str - :param cluster_type: The ``type``, either 'Galaxy', 'Data', or - 'Test', defines the type of cluster platform to initialize. - - :type galaxy_data_option: str - :param galaxy_data_option: The storage type to use for this instance. - May be 'transient', 'custom_size' or ''. The default is '', - which will result in ignoring the bioblend specified - initial_storage_size. 'custom_size' must be used for - initial_storage_size to come into effect. - - :type initial_storage_size: int - :param initial_storage_size: The initial storage to allocate for the instance. - This only applies if ``cluster_type`` is set - to either ``Galaxy`` or ``Data`` and ``galaxy_data_option`` - is set to ``custom_size`` - - :type block_until_ready: bool - :param block_until_ready: Specifies whether the launch method will block - until the instance is ready and only return once - all initialization is complete. The default is False. - If False, the launch method will return immediately - without blocking. However, any subsequent calls - made will automatically block if the instance is - not ready and initialized. The blocking timeout - and polling interval can be configured by providing - extra parameters to the ``CloudManInstance.launch_instance`` - method. - """ - if security_groups is None: - security_groups = ["CloudMan"] - self.set_connection_parameters(access_key, secret_key, cloud_metadata) - self.set_pre_launch_parameters( - cluster_name, - image_id, - instance_type, - password, - kernel_id, - ramdisk_id, - key_name, - security_groups, - placement, - block_until_ready, - ) - self.set_post_launch_parameters(cluster_type, galaxy_data_option, initial_storage_size) - self.set_extra_parameters(**kwargs) - - def set_connection_parameters(self, access_key, secret_key, cloud_metadata=None): - self.access_key = access_key - self.secret_key = secret_key - self.cloud_metadata = cloud_metadata - - def set_pre_launch_parameters( - self, - cluster_name, - image_id, - instance_type, - password, - kernel_id=None, - ramdisk_id=None, - key_name="cloudman_key_pair", - security_groups=None, - placement="", - block_until_ready=False, - ): - if security_groups is None: - security_groups = ["CloudMan"] - self.cluster_name = cluster_name - self.image_id = image_id - self.instance_type = instance_type - self.password = password - self.kernel_id = kernel_id - self.ramdisk_id = ramdisk_id - self.key_name = key_name - self.security_groups = security_groups - self.placement = placement - self.block_until_ready = block_until_ready - - def set_post_launch_parameters(self, cluster_type=None, galaxy_data_option="", initial_storage_size=10): - self.cluster_type = cluster_type - self.galaxy_data_option = galaxy_data_option - self.initial_storage_size = initial_storage_size - - def set_extra_parameters(self, **kwargs): - self.kwargs = kwargs - - class CustomTypeEncoder(json.JSONEncoder): - def default(self, obj): - if isinstance(obj, (CloudManConfig, Bunch)): - key = f"__{obj.__class__.__name__}__" - return {key: obj.__dict__} - return json.JSONEncoder.default(self, obj) - - @staticmethod - def CustomTypeDecoder(dct): - if "__CloudManConfig__" in dct: - return CloudManConfig(**dct["__CloudManConfig__"]) - elif "__Bunch__" in dct: - return Bunch(**dct["__Bunch__"]) - else: - return dct - - @staticmethod - def load_config(fp): - return json.load(fp, object_hook=CloudManConfig.CustomTypeDecoder) - - def save_config(self, fp): - json.dump(self, fp, cls=self.CustomTypeEncoder) - - def validate(self): - if self.access_key is None: - return "Access key must not be null" - elif self.secret_key is None: - return "Secret key must not be null" - elif self.cluster_name is None: - return "Cluster name must not be null" - elif self.image_id is None: - return "Image ID must not be null" - elif self.instance_type is None: - return "Instance type must not be null" - elif self.password is None: - return "Password must not be null" - elif self.cluster_type not in [None, "Test", "Data", "Galaxy", "Shared_cluster"]: - return f"Unrecognized cluster type ({self.cluster_type})" - elif self.galaxy_data_option not in [None, "", "custom-size", "transient"]: - return f"Unrecognized galaxy data option ({self.galaxy_data_option})" - elif self.key_name is None: - return "Key-pair name must not be null" - else: - return None - - -class GenericVMInstance: - def __init__(self, launcher, launch_result): - """ - Create an instance of the CloudMan API class, which is to be used when - manipulating that given CloudMan instance. - - The ``url`` is a string defining the address of CloudMan, for - example "http://115.146.92.174". The ``password`` is CloudMan's password, - as defined in the user data sent to CloudMan on instance creation. - """ - # Make sure the url scheme is defined (otherwise requests will not work) - self.vm_error = None - self.vm_status = None - self.host_name = None - self.launcher = launcher - self.launch_result = launch_result - - def _update_host_name(self, host_name): - if self.host_name != host_name: - self.host_name = host_name - - @property - def instance_id(self): - """ - Returns the ID of this instance (e.g., ``i-87ey32dd``) if launch was - successful or ``None`` otherwise. - """ - return None if self.launch_result is None else self.launch_result["instance_id"] - - @property - def key_pair_name(self): - """ - Returns the name of the key pair used by this instance. If instance was - not launched properly, returns ``None``. - """ - return None if self.launch_result is None else self.launch_result["kp_name"] - - @property - def key_pair_material(self): - """ - Returns the private portion of the generated key pair. It does so only - if the instance was properly launched and key pair generated; ``None`` - otherwise. - """ - return None if self.launch_result is None else self.launch_result["kp_material"] - - def get_machine_status(self): - """ - Check on the underlying VM status of an instance. This can be used to - determine whether the VM has finished booting up and if CloudMan - is up and running. - - Return a ``state`` dict with the current ``instance_state``, ``public_ip``, - ``placement``, and ``error`` keys, which capture the current state (the - values for those keys default to empty string if no data is available from - the cloud). - """ - if self.launcher: - return self.launcher.get_status(self.instance_id) - # elif self.host_name: - - else: - state = { - "instance_state": "", - "public_ip": "", - "placement": "", - "error": "No reference to the instance object", - } - return state - - def _init_instance(self, host_name): - self._update_host_name(host_name) - - def wait_until_instance_ready(self, vm_ready_timeout=300, vm_ready_check_interval=10): - """ - Wait until the VM state changes to ready/error or timeout elapses. - Updates the host name once ready. - """ - assert vm_ready_timeout > 0 - assert vm_ready_timeout > vm_ready_check_interval - assert vm_ready_check_interval > 0 - - if self.host_name: # Host name available. Therefore, instance is ready - return - - for time_left in range(vm_ready_timeout, 0, -vm_ready_check_interval): - status = self.get_machine_status() - if status["public_ip"] != "" and status["error"] == "": - self._init_instance(status["public_ip"]) - return - elif status["error"] != "": - msg = f"Error launching an instance: {status['error']}" - bioblend.log.error(msg) - raise VMLaunchException(msg) - else: - bioblend.log.warning( - f"Instance not ready yet (it's in state '{status['instance_state']}'); waiting another {time_left} seconds..." - ) - time.sleep(vm_ready_check_interval) - - raise VMLaunchException(f"Waited too long for instance to become ready. Instance Id: {self.instance_id}") - - -class CloudManInstance(GenericVMInstance): - def __init__(self, url, password, **kwargs): - """ - Create an instance of the CloudMan API class, which is to be used when - manipulating that given CloudMan instance. - - The ``url`` is a string defining the address of CloudMan, for - example "http://115.146.92.174". The ``password`` is CloudMan's password, - as defined in the user data sent to CloudMan on instance creation. - """ - self.initialized = False - if kwargs.get("launch_result", None) is not None: # Used internally by the launch_instance method - super().__init__(kwargs["launcher"], kwargs["launch_result"]) - else: - super().__init__(None, None) - self.config = kwargs.pop("cloudman_config", CloudManConfig()) - self.password = password or self.config.password - self.use_ssl = kwargs.get("use_ssl", self.config.kwargs.get("use_ssl", False)) - self.verify = kwargs.get("verify", self.config.kwargs.get("verify", False)) - self.authuser = kwargs.get("authuser", "") - self._set_url(url) - - def __repr__(self): - if self.cloudman_url: - return f"CloudMan instance at {self.cloudman_url}" - else: - return "Waiting for this CloudMan instance to start..." - - def _update_host_name(self, host_name): - """ - Overrides the super-class method and makes sure that the ``cloudman_url`` - is kept in sync with the host name. - """ - self._set_url(host_name) - - def _init_instance(self, hostname): - super()._init_instance(hostname) - if self.config.cluster_type: - self.initialize( - self.config.cluster_type, - galaxy_data_option=self.config.galaxy_data_option, - initial_storage_size=self.config.initial_storage_size, - ) - - def _set_url(self, url): - """ - Keeps the CloudMan URL as well and the hostname in sync. - """ - if url: - # Make sure the URL scheme is defined (otherwise requests will not work) - if not url.lower().startswith("http"): - # Check to see whether https scheme is required - if self.use_ssl: - url = "https://" + url - else: - url = "http://" + url - # Parse the corrected URL again to extract the hostname - parse_result = urlparse(url) - super()._update_host_name(parse_result.hostname) - self.url = url - - @property - def galaxy_url(self): - """ - Returns the base URL for this instance, which by default happens to be - the URL for Galaxy application. - """ - return self.url - - @property - def cloudman_url(self): - """ - Returns the URL for accessing this instance of CloudMan. - """ - if self.url: - return self.url + "/cloud" - return None - - @staticmethod - def launch_instance(cfg, **kwargs): - """ - Launches a new instance of CloudMan on the specified cloud infrastructure. - - :type cfg: CloudManConfig - :param cfg: A CloudManConfig object containing the initial parameters - for this launch. - """ - validation_result = cfg.validate() - if validation_result is not None: - raise VMLaunchException(f"Invalid CloudMan configuration provided: {validation_result}") - launcher = CloudManLauncher(cfg.access_key, cfg.secret_key, cfg.cloud_metadata) - result = launcher.launch( - cfg.cluster_name, - cfg.image_id, - cfg.instance_type, - cfg.password, - cfg.kernel_id, - cfg.ramdisk_id, - cfg.key_name, - cfg.security_groups, - cfg.placement, - **cfg.kwargs, - ) - if result["error"] is not None: - raise VMLaunchException(f"Error launching cloudman instance: {result['error']}") - instance = CloudManInstance(None, None, launcher=launcher, launch_result=result, cloudman_config=cfg) - if cfg.block_until_ready and cfg.cluster_type: - instance.get_status() # this will indirect result in initialize being invoked - return instance - - def update(self): - """ - Update the local object's fields to be in sync with the actual state - of the CloudMan instance the object points to. This method should be - called periodically to ensure you are looking at the current data. - - .. versionadded:: 0.2.2 - """ - ms = self.get_machine_status() - # Check if the machine is running and update IP and state - self.vm_status = ms.get("instance_state", None) - self.vm_error = ms.get("error", None) - public_ip = ms.get("public_ip", None) - # Update url if we don't have it or is different than what we have - if not self.url and (public_ip and self.url != public_ip): - self._set_url(public_ip) - # See if the cluster has been initialized - if self.vm_status == "running" or self.url: - ct = self.get_cluster_type() - if ct.get("cluster_type", None): - self.initialized = True - if self.vm_error: - bioblend.log.error(self.vm_error) - - @block_until_vm_ready - def get_cloudman_version(self): - """ - Returns the cloudman version from the server. Versions prior to Cloudman 2 does not - support this call, and therefore, the default is to return 1 - """ - try: - r = self._make_get_request("cloudman_version") - return r["version"] - except Exception: - return 1 - - @block_until_vm_ready - def initialize(self, cluster_type, galaxy_data_option="", initial_storage_size=None, shared_bucket=None): - """ - Initialize CloudMan platform. This needs to be done before the cluster - can be used. - - The ``cluster_type``, either 'Galaxy', 'Data', or 'Test', defines the type - of cluster platform to initialize. - """ - if not self.initialized: - if self.get_cloudman_version() < 2: - r = self._make_get_request( - "initialize_cluster", - parameters={ - "startup_opt": cluster_type, - "g_pss": initial_storage_size, - "shared_bucket": shared_bucket, - }, - ) - else: - r = self._make_get_request( - "initialize_cluster", - parameters={ - "startup_opt": cluster_type, - "galaxy_data_option": galaxy_data_option, - "pss": initial_storage_size, - "shared_bucket": shared_bucket, - }, - ) - self.initialized = True - return r - - @block_until_vm_ready - def get_cluster_type(self): - """ - Get the ``cluster type`` for this CloudMan instance. See the - CloudMan docs about the available types. Returns a dictionary, - for example: ``{'cluster_type': 'Test'}``. - """ - cluster_type = self._make_get_request("cluster_type") - if cluster_type["cluster_type"]: - self.initialized = True - return cluster_type - - @block_until_vm_ready - def get_status(self): - """ - Get status information on this CloudMan instance. - """ - return self._make_get_request("instance_state_json") - - @block_until_vm_ready - def get_nodes(self): - """ - Get a list of nodes currently running in this CloudMan cluster. - """ - instance_feed_json = self._make_get_request("instance_feed_json") - return instance_feed_json["instances"] - - @block_until_vm_ready - def get_cluster_size(self): - """ - Get the size of the cluster in terms of the number of nodes; this count - includes the master node. - """ - return len(self.get_nodes()) - - @block_until_vm_ready - def get_static_state(self): - """ - Get static information on this CloudMan instance. - i.e. state that doesn't change over the lifetime of the cluster - """ - return self._make_get_request("static_instance_state_json") - - @block_until_vm_ready - def get_master_ip(self): - """ - Returns the public IP of the master node in this CloudMan cluster - """ - status_json = self.get_static_state() - return status_json["master_ip"] - - @block_until_vm_ready - def get_master_id(self): - """ - Returns the instance ID of the master node in this CloudMan cluster - """ - status_json = self.get_static_state() - return status_json["master_id"] - - @block_until_vm_ready - def add_nodes(self, num_nodes, instance_type="", spot_price=""): - """ - Add a number of worker nodes to the cluster, optionally specifying - the type for new instances. If ``instance_type`` is not specified, - instance(s) of the same type as the master instance will be started. - Note that the ``instance_type`` must match the type of instance - available on the given cloud. - - ``spot_price`` applies only to AWS and, if set, defines the maximum - price for Spot instances, thus turning this request for more instances - into a Spot request. - """ - payload = {"number_nodes": num_nodes, "instance_type": instance_type, "spot_price": spot_price} - return self._make_get_request("add_instances", parameters=payload) - - @block_until_vm_ready - def remove_nodes(self, num_nodes, force=False): - """ - Remove worker nodes from the cluster. - - The ``num_nodes`` parameter defines the number of worker nodes to remove. - The ``force`` parameter (defaulting to False), is a boolean indicating - whether the nodes should be forcibly removed rather than gracefully removed. - """ - payload = {"number_nodes": num_nodes, "force_termination": force} - result = self._make_get_request("remove_instances", parameters=payload) - return result - - @block_until_vm_ready - def remove_node(self, instance_id, force=False): - """ - Remove a specific worker node from the cluster. - - The ``instance_id`` parameter defines the ID, as a string, of a worker node - to remove from the cluster. The ``force`` parameter (defaulting to False), - is a boolean indicating whether the node should be forcibly removed rather - than gracefully removed. - - """ - payload = {"instance_id": instance_id} - return self._make_get_request("remove_instance", parameters=payload) - - @block_until_vm_ready - def reboot_node(self, instance_id): - """ - Reboot a specific worker node. - - The ``instance_id`` parameter defines the ID, as a string, of a worker node - to reboot. - """ - payload = {"instance_id": instance_id} - return self._make_get_request("reboot_instance", parameters=payload) - - @block_until_vm_ready - def autoscaling_enabled(self): - """ - Returns a boolean indicating whether autoscaling is enabled. - """ - return bool(self.get_status()["autoscaling"]["use_autoscaling"]) - - @block_until_vm_ready - def enable_autoscaling(self, minimum_nodes=0, maximum_nodes=19): - """ - Enable cluster autoscaling, allowing the cluster to automatically add, - or remove, worker nodes, as needed. - - The number of worker nodes in the cluster is bounded by the ``minimum_nodes`` - (default is 0) and ``maximum_nodes`` (default is 19) parameters. - """ - if not self.autoscaling_enabled(): - payload = {"as_min": minimum_nodes, "as_max": maximum_nodes} - self._make_get_request("toggle_autoscaling", parameters=payload) - - @block_until_vm_ready - def disable_autoscaling(self): - """ - Disable autoscaling, meaning that worker nodes will need to be manually - added and removed. - """ - if self.autoscaling_enabled(): - self._make_get_request("toggle_autoscaling") - - @block_until_vm_ready - def adjust_autoscaling(self, minimum_nodes=None, maximum_nodes=None): - """ - Adjust the autoscaling configuration parameters. - - The number of worker nodes in the cluster is bounded by the optional - ``minimum_nodes`` and ``maximum_nodes`` parameters. If a parameter is - not provided then its configuration value does not change. - """ - if self.autoscaling_enabled(): - payload = {"as_min_adj": minimum_nodes, "as_max_adj": maximum_nodes} - self._make_get_request("adjust_autoscaling", parameters=payload) - - @block_until_vm_ready - def is_master_execution_host(self): - """ - Checks whether the master node has job execution enabled. - - """ - status = self._make_get_request("get_all_services_status") - return bool(status["master_is_exec_host"]) - - @block_until_vm_ready - def set_master_as_execution_host(self, enable): - """ - Enables/disables master as execution host. - - """ - if not self.is_master_execution_host(): - self._make_get_request("toggle_master_as_exec_host") - - @block_until_vm_ready - def get_galaxy_state(self): - """ - Get the current status of Galaxy running on the cluster. - """ - payload = {"srvc": "Galaxy"} - status = self._make_get_request("get_srvc_status", parameters=payload) - return {"status": status["status"]} - - @block_until_vm_ready - def terminate(self, terminate_master_instance=True, delete_cluster=False): - """ - Terminate this CloudMan cluster. There is an option to also terminate the - master instance (all worker instances will be terminated in the process - of cluster termination), and delete the whole cluster. - - .. warning:: - Deleting a cluster is irreversible - all of the data will be - permanently deleted. - """ - payload = {"terminate_master_instance": terminate_master_instance, "delete_cluster": delete_cluster} - result = self._make_get_request("kill_all", parameters=payload, timeout=15) - return result - - def _make_get_request(self, url, parameters=None, timeout=None): - """ - Private function that makes a GET request to the nominated ``url``, - with the provided GET ``parameters``. Optionally, set the ``timeout`` - to stop waiting for a response after a given number of seconds. This is - particularly useful when terminating a cluster as it may terminate - before sending a response. - """ - if parameters is None: - parameters = {} - req_url = "/".join((self.cloudman_url, "root", url)) - r = requests.get( - req_url, - params=parameters, - auth=(self.authuser, self.password), - timeout=timeout, - verify=self.verify, - ) - try: - json = r.json() - return json - except Exception: - return r.text diff -Nru python-bioblend-0.18.0/bioblend/cloudman/launch.py python-bioblend-1.0.0/bioblend/cloudman/launch.py --- python-bioblend-0.18.0/bioblend/cloudman/launch.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/cloudman/launch.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,838 +0,0 @@ -""" -Setup and launch a CloudMan instance. -""" -import datetime -import socket -from http.client import ( - BadStatusLine, - HTTPConnection, - HTTPException, -) -from urllib.parse import urlparse - -import boto -import yaml -from boto.ec2.regioninfo import RegionInfo -from boto.exception import ( - EC2ResponseError, - S3ResponseError, -) -from boto.s3.connection import ( - OrdinaryCallingFormat, - S3Connection, - SubdomainCallingFormat, -) - -import bioblend -from bioblend.util import Bunch - - -# Uncomment the following line if no logging from boto is desired -# bioblend.logging.getLogger('boto').setLevel(bioblend.logging.CRITICAL) -# Uncomment the following line if logging at the prompt is desired -# bioblend.set_stream_logger(__name__) -def instance_types(cloud_name="generic"): - """ - Return a list of dictionaries containing details about the available - instance types for the given `cloud_name`. - - :type cloud_name: str - :param cloud_name: A name of the cloud for which the list of instance - types will be returned. Valid values are: `aws`, - `nectar`, `generic`. - - :rtype: list - :return: A list of dictionaries describing instance types. Each dict will - contain the following keys: `name`, `model`, and `description`. - """ - instance_list = [] - if cloud_name.lower() == "aws": - instance_list.append({"model": "c3.large", "name": "Compute optimized Large", "description": "2 vCPU/4GB RAM"}) - instance_list.append( - {"model": "c3.2xlarge", "name": "Compute optimized 2xLarge", "description": "8 vCPU/15GB RAM"} - ) - instance_list.append( - {"model": "c3.8xlarge", "name": "Compute optimized 8xLarge", "description": "32 vCPU/60GB RAM"} - ) - elif cloud_name.lower() in ["nectar", "generic"]: - instance_list.append({"model": "m1.small", "name": "Small", "description": "1 vCPU / 4GB RAM"}) - instance_list.append({"model": "m1.medium", "name": "Medium", "description": "2 vCPU / 8GB RAM"}) - instance_list.append({"model": "m1.large", "name": "Large", "description": "4 vCPU / 16GB RAM"}) - instance_list.append({"model": "m1.xlarge", "name": "Extra Large", "description": "8 vCPU / 32GB RAM"}) - instance_list.append({"model": "m1.xxlarge", "name": "Extra-extra Large", "description": "16 vCPU / 64GB RAM"}) - return instance_list - - -class CloudManLauncher: - def __init__(self, access_key, secret_key, cloud=None): - """ - Define the environment in which this instance of CloudMan will be launched. - - Besides providing the credentials, optionally provide the ``cloud`` - object. This object must define the properties required to establish a - `boto `_ connection to that cloud. See - this method's implementation for an example of the required fields. - Note that as long the as provided object defines the required fields, - it can really by implemented as anything (e.g., a Bunch, a database - object, a custom class). If no value for the ``cloud`` argument is - provided, the default is to use the Amazon cloud. - """ - self.access_key = access_key - self.secret_key = secret_key - if cloud is None: - # Default to an EC2-compatible object - self.cloud = Bunch( - id="1", # for compatibility w/ DB representation - name="Amazon", - cloud_type="ec2", - bucket_default="cloudman", - region_name="us-east-1", - region_endpoint="ec2.amazonaws.com", - ec2_port="", - ec2_conn_path="/", - cidr_range="", - is_secure=True, - s3_host="s3.amazonaws.com", - s3_port="", - s3_conn_path="/", - ) - else: - self.cloud = cloud - self.ec2_conn = self.connect_ec2(self.access_key, self.secret_key, self.cloud) - self.vpc_conn = self.connect_vpc(self.access_key, self.secret_key, self.cloud) - # Define exceptions that we want to catch and retry - self.http_exceptions = (HTTPException, socket.error, socket.gaierror, BadStatusLine) - - def __repr__(self): - return f"Cloud: {self.cloud.name}; acct ID: {self.access_key}" - - def launch( - self, - cluster_name, - image_id, - instance_type, - password, - kernel_id=None, - ramdisk_id=None, - key_name="cloudman_key_pair", - security_groups=None, - placement="", - subnet_id=None, - ebs_optimized=False, - **kwargs, - ): - """ - Check all the prerequisites (key pair and security groups) for - launching a CloudMan instance, compose the user data based on the - parameters specified in the arguments and the cloud properties as - defined in the object's ``cloud`` field. - - For the current list of user data fields that can be provided via - ``kwargs``, see ``_ - - Return a dict containing the properties and info with which an instance - was launched, namely: ``sg_names`` containing the names of the security - groups, ``kp_name`` containing the name of the key pair, ``kp_material`` - containing the private portion of the key pair (*note* that this portion - of the key is available and can be retrieved *only* at the time the key - is created, which will happen only if no key with the name provided in - the ``key_name`` argument exists), ``rs`` containing the - `boto `_ ``ResultSet`` object, - ``instance_id`` containing the ID of a started instance, and - ``error`` containing an error message if there was one. - """ - if security_groups is None: - security_groups = ["CloudMan"] - ret = { - "sg_names": [], - "sg_ids": [], - "kp_name": "", - "kp_material": "", - "rs": None, - "instance_id": "", - "error": None, - } - # First satisfy the prerequisites - for sg in security_groups: - # Get VPC ID in case we're launching into a VPC - vpc_id = None - if subnet_id: - try: - sn = self.vpc_conn.get_all_subnets(subnet_id)[0] - vpc_id = sn.vpc_id - except (EC2ResponseError, IndexError): - bioblend.log.exception("Trouble fetching subnet %s", subnet_id) - cmsg = self.create_cm_security_group(sg, vpc_id=vpc_id) - ret["error"] = cmsg["error"] - if ret["error"]: - return ret - if cmsg["name"]: - ret["sg_names"].append(cmsg["name"]) - ret["sg_ids"].append(cmsg["sg_id"]) - if subnet_id: - # Must setup a network interface if launching into VPC - security_groups = None - interface = boto.ec2.networkinterface.NetworkInterfaceSpecification( - subnet_id=subnet_id, groups=[cmsg["sg_id"]], associate_public_ip_address=True - ) - network_interfaces = boto.ec2.networkinterface.NetworkInterfaceCollection(interface) - else: - network_interfaces = None - kp_info = self.create_key_pair(key_name) - ret["kp_name"] = kp_info["name"] - ret["kp_material"] = kp_info["material"] - ret["error"] = kp_info["error"] - if ret["error"]: - return ret - # If not provided, try to find a placement - # TODO: Should placement always be checked? To make sure it's correct - # for existing clusters. - if not placement: - placement = self._find_placement(cluster_name).get("placement", None) - # Compose user data for launching an instance, ensuring we have the - # required fields - kwargs["access_key"] = self.access_key - kwargs["secret_key"] = self.secret_key - kwargs["cluster_name"] = cluster_name - kwargs["password"] = password - kwargs["cloud_name"] = self.cloud.name - ud = self._compose_user_data(kwargs) - # Now launch an instance - try: - - rs = None - rs = self.ec2_conn.run_instances( - image_id=image_id, - instance_type=instance_type, - key_name=key_name, - security_groups=security_groups, - # The following two arguments are - # provided in the network_interface - # instead of arguments: - # security_group_ids=security_group_ids, - # subnet_id=subnet_id, - network_interfaces=network_interfaces, - user_data=ud, - kernel_id=kernel_id, - ramdisk_id=ramdisk_id, - placement=placement, - ebs_optimized=ebs_optimized, - ) - ret["rs"] = rs - except EC2ResponseError as e: - err_msg = f"Problem launching an instance: {e} (code {e.error_code}; status {e.status})" - bioblend.log.exception(err_msg) - ret["error"] = err_msg - return ret - else: - if rs: - try: - bioblend.log.info("Launched an instance with ID %s", rs.instances[0].id) - ret["instance_id"] = rs.instances[0].id - ret["instance_ip"] = rs.instances[0].ip_address - except EC2ResponseError as e: - err_msg = f"Problem with the launched instance object: {e} (code {e.error_code}; status {e.status})" - bioblend.log.exception(err_msg) - ret["error"] = err_msg - else: - ret["error"] = ( - "No response after launching an instance. Check " "your account permissions and try again." - ) - return ret - - def create_cm_security_group(self, sg_name="CloudMan", vpc_id=None): - """ - Create a security group with all authorizations required to run CloudMan. - - If the group already exists, check its rules and add the missing ones. - - :type sg_name: str - :param sg_name: A name for the security group to be created. - - :type vpc_id: str - :param vpc_id: VPC ID under which to create the security group. - - :rtype: dict - :return: A dictionary containing keys ``name`` (with the value being the - name of the security group that was created), ``error`` - (with the value being the error message if there was an error - or ``None`` if no error was encountered), and ``ports`` - (containing the list of tuples with port ranges that were - opened or attempted to be opened). - - .. versionchanged:: 0.6.1 - The return value changed from a string to a dict - """ - ports = ( - ("20", "21"), # FTP - ("22", "22"), # SSH - ("80", "80"), # Web UI - ("443", "443"), # SSL Web UI - ("8800", "8800"), # NodeJS Proxy for Galaxy IPython IE - ("9600", "9700"), # HTCondor - ("30000", "30100"), - ) # FTP transfer - progress = {"name": None, "sg_id": None, "error": None, "ports": ports} - cmsg = None - filters = None - if vpc_id: - filters = {"vpc-id": vpc_id} - # Check if this security group already exists - try: - sgs = self.ec2_conn.get_all_security_groups(filters=filters) - except EC2ResponseError as e: - err_msg = f"Problem getting security groups. This could indicate a problem with your account credentials or permissions: {e} (code {e.error_code}; status {e.status})" - bioblend.log.exception(err_msg) - progress["error"] = err_msg - return progress - for sg in sgs: - if sg.name == sg_name: - cmsg = sg - bioblend.log.debug("Security group '%s' already exists; will add authorizations next.", sg_name) - break - # If it does not exist, create security group - if cmsg is None: - bioblend.log.debug("Creating Security Group %s", sg_name) - try: - cmsg = self.ec2_conn.create_security_group(sg_name, "A security " "group for CloudMan", vpc_id=vpc_id) - except EC2ResponseError as e: - err_msg = f"Problem creating security group '{sg_name}': {e} (code {e.error_code}; status {e.status})" - bioblend.log.exception(err_msg) - progress["error"] = err_msg - if cmsg: - progress["name"] = cmsg.name - progress["sg_id"] = cmsg.id - # Add appropriate authorization rules - # If these rules already exist, nothing will be changed in the SG - for port in ports: - try: - if not self.rule_exists(cmsg.rules, from_port=port[0], to_port=port[1]): - cmsg.authorize(ip_protocol="tcp", from_port=port[0], to_port=port[1], cidr_ip="0.0.0.0/0") - else: - bioblend.log.debug("Rule (%s:%s) already exists in the SG", port[0], port[1]) - except EC2ResponseError as e: - err_msg = ( - f"A problem adding security group authorizations: {e} (code {e.error_code}; status {e.status})" - ) - bioblend.log.exception(err_msg) - progress["error"] = err_msg - # Add ICMP (i.e., ping) rule required by HTCondor - try: - if not self.rule_exists(cmsg.rules, from_port="-1", to_port="-1", ip_protocol="icmp"): - cmsg.authorize(ip_protocol="icmp", from_port=-1, to_port=-1, cidr_ip="0.0.0.0/0") - else: - bioblend.log.debug(f"ICMP rule already exists in {sg_name} SG.") - except EC2ResponseError as e: - err_msg = ( - f"A problem with security ICMP rule authorization: {e} (code {e.error_code}; status {e.status})" - ) - bioblend.log.exception(err_msg) - progress["err_msg"] = err_msg - # Add rule that allows communication between instances in the same - # SG - # A flag to indicate if group rule already exists - g_rule_exists = False - for rule in cmsg.rules: - for grant in rule.grants: - if grant.name == cmsg.name: - g_rule_exists = True - bioblend.log.debug("Group rule already exists in the SG.") - if g_rule_exists: - break - if not g_rule_exists: - try: - cmsg.authorize(src_group=cmsg, ip_protocol="tcp", from_port=0, to_port=65535) - except EC2ResponseError as e: - err_msg = f"A problem with security group group authorization: {e} (code {e.error_code}; status {e.status})" - bioblend.log.exception(err_msg) - progress["err_msg"] = err_msg - bioblend.log.info("Done configuring '%s' security group", cmsg.name) - else: - bioblend.log.warning(f"Did not create security group '{sg_name}'") - return progress - - def rule_exists(self, rules, from_port, to_port, ip_protocol="tcp", cidr_ip="0.0.0.0/0"): - """ - A convenience method to check if an authorization rule in a security group - already exists. - """ - for rule in rules: - if ( - rule.ip_protocol == ip_protocol - and rule.from_port == from_port - and rule.to_port == to_port - and cidr_ip in [ip.cidr_ip for ip in rule.grants] - ): - return True - return False - - def create_key_pair(self, key_name="cloudman_key_pair"): - """ - If a key pair with the provided ``key_name`` does not exist, create it. - - :type sg_name: str - :param sg_name: A name for the key pair to be created. - - :rtype: dict - :return: A dictionary containing keys ``name`` (with the value being the - name of the key pair that was created), ``error`` - (with the value being the error message if there was an error - or ``None`` if no error was encountered), and ``material`` - (containing the unencrypted PEM encoded RSA private key if the - key was created or ``None`` if the key already eixsted). - - .. versionchanged:: 0.6.1 - The return value changed from a tuple to a dict - """ - progress = {"name": None, "material": None, "error": None} - kp = None - # Check if a key pair under the given name already exists. If it does not, - # create it, else return. - try: - kps = self.ec2_conn.get_all_key_pairs() - except EC2ResponseError as e: - err_msg = f"Problem getting key pairs: {e} (code {e.error_code}; status {e.status})" - bioblend.log.exception(err_msg) - progress["error"] = err_msg - return progress - for akp in kps: - if akp.name == key_name: - bioblend.log.info("Key pair '%s' already exists; reusing it.", key_name) - progress["name"] = akp.name - return progress - try: - kp = self.ec2_conn.create_key_pair(key_name) - except EC2ResponseError as e: - err_msg = f"Problem creating key pair '{key_name}': {e} (code {e.error_code}; status {e.status})" - bioblend.log.exception(err_msg) - progress["error"] = err_msg - return progress - bioblend.log.info("Created key pair '%s'", kp.name) - progress["name"] = kp.name - progress["material"] = kp.material - return progress - - def assign_floating_ip(self, ec2_conn, instance): - try: - bioblend.log.debug("Allocating a new floating IP address.") - address = ec2_conn.allocate_address() - except EC2ResponseError: - bioblend.log.exception("Exception allocating a new floating IP address") - bioblend.log.info("Associating floating IP %s to instance %s", address.public_ip, instance.id) - ec2_conn.associate_address(instance_id=instance.id, public_ip=address.public_ip) - - def get_status(self, instance_id): - """ - Check on the status of an instance. ``instance_id`` needs to be a - ``boto``-library copatible instance ID (e.g., ``i-8fehrdss``).If - ``instance_id`` is not provided, the ID obtained when launching - *the most recent* instance is used. Note that this assumes the instance - being checked on was launched using this class. Also note that the same - class may be used to launch multiple instances but only the most recent - ``instance_id`` is kept while any others will to be explicitly specified. - - This method also allows the required ``ec2_conn`` connection object to be - provided at invocation time. If the object is not provided, credentials - defined for the class are used (ability to specify a custom ``ec2_conn`` - helps in case of stateless method invocations). - - Return a ``state`` dict containing the following keys: ``instance_state``, - ``public_ip``, ``placement``, and ``error``, which capture CloudMan's - current state. For ``instance_state``, expected values are: ``pending``, - ``booting``, ``running``, or ``error`` and represent the state of the - underlying instance. Other keys will return an empty value until the - ``instance_state`` enters ``running`` state. - """ - ec2_conn = self.ec2_conn - rs = None - state = {"instance_state": "", "public_ip": "", "placement": "", "error": ""} - - # Make sure we have an instance ID - if instance_id is None: - err = "Missing instance ID, cannot check the state." - bioblend.log.error(err) - state["error"] = err - return state - try: - rs = ec2_conn.get_all_instances([instance_id]) - if rs is not None: - inst_state = rs[0].instances[0].update() - public_ip = rs[0].instances[0].ip_address - state["public_ip"] = public_ip - if inst_state == "running": - # if there's a private ip, but no public ip - # attempt auto allocation of floating IP - if rs[0].instances[0].private_ip_address and not public_ip: - self.assign_floating_ip(ec2_conn, rs[0].instances[0]) - # Wait until the CloudMan URL is accessible to return - # the data - this may only be correct initially at - # bootup for instances that configure themselves for - # https - they may ultimately block port 80. However, - # we don't have a good way to determine whether to - # check using http or https. - cm_url = f"http://{public_ip}/cloud" - if self._checkURL(cm_url) is True: - state["instance_state"] = inst_state - state["placement"] = rs[0].instances[0].placement - else: - state["instance_state"] = "booting" - else: - state["instance_state"] = inst_state - except Exception as e: - err = f"Problem updating instance '{instance_id}' state: {e}" - bioblend.log.error(err) - state["error"] = err - return state - - def get_clusters_pd(self, include_placement=True): - """ - Return *persistent data* of all existing clusters for this account. - - :type include_placement: bool - :param include_placement: Whether or not to include region placement for - the clusters. Setting this option will lead - to a longer function runtime. - - :rtype: dict - :return: A dictionary containing keys ``clusters`` and ``error``. The - value of ``clusters`` will be a dictionary with the following keys - ``cluster_name``, ``persistent_data``, ``bucket_name`` and optionally - ``placement`` or an empty list if no clusters were found or an - error was encountered. ``persistent_data`` key value is yet - another dictionary containing given cluster's persistent data. - The value for the ``error`` key will contain a string with the - error message. - - .. versionadded:: 0.3 - .. versionchanged:: 0.7.0 - The return value changed from a list to a dictionary. - """ - clusters = [] - response = {"clusters": clusters, "error": None} - s3_conn = self.connect_s3(self.access_key, self.secret_key, self.cloud) - try: - buckets = s3_conn.get_all_buckets() - except S3ResponseError as e: - response["error"] = f"S3ResponseError getting buckets: {e}" - except self.http_exceptions as ex: - response["error"] = f"Exception getting buckets: {ex}" - if response["error"]: - bioblend.log.exception(response["error"]) - return response - for bucket in [b for b in buckets if b.name.startswith("cm-")]: - try: - # TODO: first lookup if persistent_data.yaml key exists - pd = bucket.get_key("persistent_data.yaml") - except S3ResponseError: - # This can fail for a number of reasons for non-us and/or - # CNAME'd buckets but it is not a terminal error - bioblend.log.warning("Problem fetching persistent_data.yaml from bucket %s", bucket) - continue - if pd: - # We are dealing with a CloudMan bucket - pd_contents = pd.get_contents_as_string() - pd = yaml.safe_load(pd_contents) - if "cluster_name" in pd: - cluster_name = pd["cluster_name"] - else: - for key in bucket.list(): - if key.name.endswith(".clusterName"): - cluster_name = key.name.split(".clusterName")[0] - cluster = {"cluster_name": cluster_name, "persistent_data": pd, "bucket_name": bucket.name} - # Look for cluster's placement too - if include_placement: - placement = self._find_placement(cluster_name, cluster) - cluster["placement"] = placement - clusters.append(cluster) - response["clusters"] = clusters - return response - - def get_cluster_pd(self, cluster_name): - """ - Return *persistent data* (as a dict) associated with a cluster with the - given ``cluster_name``. If a cluster with the given name is not found, - return an empty dict. - - .. versionadded:: 0.3 - """ - cluster = {} - clusters = self.get_clusters_pd().get("clusters", []) - for c in clusters: - if c["cluster_name"] == cluster_name: - cluster = c - break - return cluster - - def connect_ec2(self, a_key, s_key, cloud=None): - """ - Create and return an EC2-compatible connection object for the given cloud. - - See ``_get_cloud_info`` method for more details on the requirements for - the ``cloud`` parameter. If no value is provided, the class field is used. - """ - if cloud is None: - cloud = self.cloud - ci = self._get_cloud_info(cloud) - r = RegionInfo(name=ci["region_name"], endpoint=ci["region_endpoint"]) - ec2_conn = boto.connect_ec2( - aws_access_key_id=a_key, - aws_secret_access_key=s_key, - is_secure=ci["is_secure"], - region=r, - port=ci["ec2_port"], - path=ci["ec2_conn_path"], - validate_certs=False, - ) - return ec2_conn - - def connect_s3(self, a_key, s_key, cloud=None): - """ - Create and return an S3-compatible connection object for the given cloud. - - See ``_get_cloud_info`` method for more details on the requirements for - the ``cloud`` parameter. If no value is provided, the class field is used. - """ - if cloud is None: - cloud = self.cloud - ci = self._get_cloud_info(cloud) - if ci["cloud_type"] == "amazon": - calling_format = SubdomainCallingFormat() - else: - calling_format = OrdinaryCallingFormat() - s3_conn = S3Connection( - aws_access_key_id=a_key, - aws_secret_access_key=s_key, - is_secure=ci["is_secure"], - port=ci["s3_port"], - host=ci["s3_host"], - path=ci["s3_conn_path"], - calling_format=calling_format, - ) - return s3_conn - - def connect_vpc(self, a_key, s_key, cloud=None): - """ - Establish a connection to the VPC service. - - TODO: Make this work with non-default clouds as well. - """ - if cloud is None: - cloud = self.cloud - ci = self._get_cloud_info(cloud) - r = RegionInfo(name=ci["region_name"], endpoint=ci["region_endpoint"]) - vpc_conn = boto.connect_vpc( - aws_access_key_id=a_key, - aws_secret_access_key=s_key, - is_secure=ci["is_secure"], - region=r, - port=ci["ec2_port"], - path=ci["ec2_conn_path"], - validate_certs=False, - ) - return vpc_conn - - def _compose_user_data(self, user_provided_data): - """ - A convenience method used to compose and properly format the user data - required when requesting an instance. - - ``user_provided_data`` is the data provided by a user required to identify - a cluster and user other user requirements. - """ - form_data = {} - # Do not include the following fields in the user data but do include - # any 'advanced startup fields' that might be added in the future - excluded_fields = [ - "sg_name", - "image_id", - "instance_id", - "kp_name", - "cloud", - "cloud_type", - "public_dns", - "cidr_range", - "kp_material", - "placement", - "flavor_id", - ] - for key, value in user_provided_data.items(): - if key not in excluded_fields: - form_data[key] = value - # If the following user data keys are empty, do not include them in the - # request user data - udkeys = ["post_start_script_url", "worker_post_start_script_url", "bucket_default", "share_string"] - for udkey in udkeys: - if udkey in form_data and form_data[udkey] == "": - del form_data[udkey] - # If bucket_default was not provided, add a default value to the user data - # (missing value does not play nicely with CloudMan's ec2autorun.py) - if not form_data.get("bucket_default", None) and self.cloud.bucket_default: - form_data["bucket_default"] = self.cloud.bucket_default - # Reuse the ``password`` for the ``freenxpass`` user data option - if "freenxpass" not in form_data and "password" in form_data: - form_data["freenxpass"] = form_data["password"] - # Convert form_data into the YAML format - ud = yaml.dump(form_data, default_flow_style=False, allow_unicode=False) - # Also include connection info about the selected cloud - ci = self._get_cloud_info(self.cloud, as_str=True) - return ud + "\n" + ci - - def _get_cloud_info(self, cloud, as_str=False): - """ - Get connection information about a given cloud - """ - ci = {} - ci["cloud_type"] = cloud.cloud_type - ci["region_name"] = cloud.region_name - ci["region_endpoint"] = cloud.region_endpoint - ci["is_secure"] = cloud.is_secure - ci["ec2_port"] = cloud.ec2_port if cloud.ec2_port != "" else None - ci["ec2_conn_path"] = cloud.ec2_conn_path - # Include cidr_range only if not empty - if cloud.cidr_range != "": - ci["cidr_range"] = cloud.cidr_range - ci["s3_host"] = cloud.s3_host - ci["s3_port"] = cloud.s3_port if cloud.s3_port != "" else None - ci["s3_conn_path"] = cloud.s3_conn_path - if as_str: - ci = yaml.dump(ci, default_flow_style=False, allow_unicode=False) - return ci - - def _get_volume_placement(self, vol_id): - """ - Returns the placement of a volume (or None, if it cannot be determined) - """ - try: - vol = self.ec2_conn.get_all_volumes(volume_ids=[vol_id]) - except EC2ResponseError as ec2e: - bioblend.log.error(f"EC2ResponseError querying for volume {vol_id}: {ec2e}") - vol = None - if vol: - return vol[0].zone - else: - bioblend.log.error("Requested placement of a volume '%s' that does not exist.", vol_id) - return None - - def _find_placement(self, cluster_name, cluster=None): - """ - Find a placement zone for a cluster with the name ``cluster_name``. - - By default, this method will search for and fetch given cluster's - *persistent data*; alternatively, *persistent data* can be provided via - the ``cluster`` parameter. This dict needs to have ``persistent_data`` - key with the contents of cluster's *persistent data*. - If the cluster or the volume associated with the cluster cannot be found, - cluster placement is set to ``None``. - - :rtype: dict - :return: A dictionary with ``placement`` and ``error`` keywords. - - .. versionchanged:: 0.7.0 - The return value changed from a list to a dictionary. - """ - placement = None - response = {"placement": placement, "error": None} - cluster = cluster or self.get_cluster_pd(cluster_name) - if cluster and "persistent_data" in cluster: - pd = cluster["persistent_data"] - try: - if "placement" in pd: - response["placement"] = pd["placement"] - elif "data_filesystems" in pd: - # We have v1 format persistent data so get the volume first and - # then the placement zone - vol_id = pd["data_filesystems"]["galaxyData"][0]["vol_id"] - response["placement"] = self._get_volume_placement(vol_id) - elif "filesystems" in pd: - # V2 format. - for fs in [fs for fs in pd["filesystems"] if fs.get("kind", None) == "volume" and "ids" in fs]: - # All volumes must be in the same zone - vol_id = fs["ids"][0] - response["placement"] = self._get_volume_placement(vol_id) - # No need to continue to iterate through - # filesystems, if we found one with a volume. - break - except Exception as exc: - response[ - "error" - ] = f"Exception while finding placement for cluster '{cluster_name}'. This can indicate malformed instance data. Or that this method is broken: {exc}" - bioblend.log.error(response["error"]) - response["placement"] = None - else: - bioblend.log.debug(f"Insufficient info about cluster {cluster_name} to get placement.") - return response - - def find_placements(self, ec2_conn, instance_type, cloud_type, cluster_name=None): - """ - Find a list of placement zones that support the specified instance type. - - If ``cluster_name`` is given and a cluster with the given name exist, - return a list with only one entry where the given cluster lives. - - Searching for available zones for a given instance type is done by - checking the spot prices in the potential availability zones for - support before deciding on a region: - http://blog.piefox.com/2011/07/ec2-availability-zones-and-instance.html - - Note that, currently, instance-type based zone selection applies only to - AWS. For other clouds, all the available zones are returned (unless a - cluster is being recreated, in which case the cluster's placement zone is - returned sa stored in its persistent data. - - :rtype: dict - :return: A dictionary with ``zones`` and ``error`` keywords. - - .. versionchanged:: 0.3 - Changed method name from ``_find_placements`` to ``find_placements``. - Also added ``cluster_name`` parameter. - - .. versionchanged:: 0.7.0 - The return value changed from a list to a dictionary. - """ - # First look for a specific zone a given cluster is bound to - zones = [] - response = {"zones": zones, "error": None} - if cluster_name: - placement = self._find_placement(cluster_name) - if placement.get("error"): - response["error"] = placement["error"] - return response - response["zones"] = placement.get("placement", []) - # If placement is not found, look for a list of available zones - if not response["zones"]: - in_the_past = datetime.datetime.now() - datetime.timedelta(hours=1) - back_compatible_zone = "us-east-1e" - for zone in [z for z in ec2_conn.get_all_zones() if z.state == "available"]: - # Non EC2 clouds may not support get_spot_price_history - if instance_type is None or cloud_type != "ec2": - zones.append(zone.name) - elif ec2_conn.get_spot_price_history( - instance_type=instance_type, end_time=in_the_past.isoformat(), availability_zone=zone.name - ): - zones.append(zone.name) - # Higher-lettered zones seem to have more availability currently - zones.sort(reverse=True) - if back_compatible_zone in zones: - zones = [back_compatible_zone] + [z for z in zones if z != back_compatible_zone] - if len(zones) == 0: - response["error"] = f"Did not find availabilty zone for {instance_type}" - bioblend.log.error(response["error"]) - zones.append(back_compatible_zone) - return response - - def _checkURL(self, url): - """ - Check if the ``url`` is *alive* (i.e., remote server returns code 200(OK) - or 401 (unauthorized)). - """ - try: - p = urlparse(url) - h = HTTPConnection(p[1]) - h.putrequest("HEAD", p[2]) - h.endheaders() - r = h.getresponse() - # CloudMan UI is pwd protected so include 401 - return r.status in (200, 401) - except Exception: - # No response or no good response - return False diff -Nru python-bioblend-0.18.0/bioblend/config.py python-bioblend-1.0.0/bioblend/config.py --- python-bioblend-0.18.0/bioblend/config.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/config.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,5 +1,9 @@ import configparser import os +from typing import ( + IO, + Optional, +) BioBlendConfigPath = "/etc/bioblend.cfg" BioBlendConfigLocations = [BioBlendConfigPath] @@ -17,48 +21,12 @@ * Individual user: ``~/.bioblend`` (which works on both Windows and Unix) """ - def __init__(self, path=None, fp=None, do_load=True): + def __init__(self, path: Optional[str] = None, fp: Optional[IO[str]] = None, do_load: bool = True) -> None: super().__init__({"working_dir": "/mnt/pyami", "debug": "0"}) if do_load: if path: - self.load_from_path(path) + self.read([path]) elif fp: - self.readfp(fp) + self.read_file(fp) else: self.read(BioBlendConfigLocations) - - def get_value(self, section, name, default=None): - return self.get(section, name, default) - - def get(self, section, name, default=None): - """ """ - try: - val = super().get(section, name) - except Exception: - val = default - return val - - def getint(self, section, name, default=0): - try: - val = super().getint(section, name) - except Exception: - val = int(default) - return val - - def getfloat(self, section, name, default=0.0): - try: - val = super().getfloat(section, name) - except Exception: - val = float(default) - return val - - def getbool(self, section, name, default=False): - if self.has_option(section, name): - val = self.get(section, name) - if val.lower() == "true": - val = True - else: - val = False - else: - val = default - return val diff -Nru python-bioblend-0.18.0/bioblend/galaxy/client.py python-bioblend-1.0.0/bioblend/galaxy/client.py --- python-bioblend-0.18.0/bioblend/galaxy/client.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/client.py 2022-10-13 18:17:37.000000000 +0000 @@ -6,8 +6,11 @@ """ import time -import typing -from typing import Optional +from typing import ( + Any, + Optional, + TYPE_CHECKING, +) import requests @@ -17,64 +20,32 @@ # ConnectionError class was originally defined here from bioblend import ConnectionError # noqa: I202 -if typing.TYPE_CHECKING: - from bioblend.galaxy import GalaxyInstance +if TYPE_CHECKING: + from bioblend.galaxyclient import GalaxyClient class Client: # The `module` attribute needs to be defined in subclasses module: str - gi: "GalaxyInstance" - - # Class variables that configure GET request retries. Note that since these - # are class variables their values are shared by all Client instances -- - # i.e., HistoryClient, WorkflowClient, etc. - # - # Number of attempts before giving up on a GET request. - _max_get_retries = 1 - # Delay in seconds between subsequent retries. - _get_retry_delay = 10 + gi: "GalaxyClient" @classmethod - def max_get_retries(cls): - """ - The maximum number of attempts for a GET request. - """ - return cls._max_get_retries + def max_get_retries(cls) -> None: + raise AttributeError("Deprecated method, please use gi's `max_get_attempts` property") @classmethod - def set_max_get_retries(cls, value): - """ - Set the maximum number of attempts for GET requests. A value greater - than one causes failed GET requests to be retried `value` - 1 times. - - Default: 1 - """ - if value < 1: - raise ValueError(f"Number of retries must be >= 1 (got: {value})") - cls._max_get_retries = value - return cls + def set_max_get_retries(cls, value: int) -> None: + raise AttributeError("Deprecated method, please use gi's `max_get_attempts` property") @classmethod - def get_retry_delay(cls): - """ - The delay (in seconds) to wait before retrying a failed GET - request. - """ - return cls._get_retry_delay + def get_retry_delay(cls) -> None: + raise AttributeError("Deprecated method, please use gi's `get_retry_delay` property") @classmethod - def set_get_retry_delay(cls, value): - """ - Set the delay (in seconds) to wait before retrying a failed GET - request. Default: 10 - """ - if value < 0: - raise ValueError(f"Retry delay must be >= 0 (got: {value})") - cls._get_retry_delay = value - return cls + def set_get_retry_delay(cls, value: float) -> None: + raise AttributeError("Deprecated method, please use gi's `get_retry_delay` property") - def __init__(self, galaxy_instance: "GalaxyInstance"): + def __init__(self, galaxy_instance: "GalaxyClient") -> None: """ A generic Client interface defining the common fields. @@ -85,7 +56,7 @@ """ self.gi = galaxy_instance - def _make_url(self, module_id: Optional[str] = None, deleted: bool = False, contents: bool = False): + def _make_url(self, module_id: Optional[str] = None, deleted: bool = False, contents: bool = False) -> str: """ Compose a URL based on the provided arguments. @@ -115,26 +86,26 @@ deleted: bool = False, contents: bool = False, url: Optional[str] = None, - params=None, + params: Optional[dict] = None, json: bool = True, - ): + ) -> Any: """ Do a GET request, composing the URL from ``id``, ``deleted`` and ``contents``. Alternatively, an explicit ``url`` can be provided. If ``json`` is set to ``True``, return a decoded JSON object (and treat an empty or undecodable response as an error). - The request will optionally be retried as configured by - ``max_get_retries`` and ``get_retry_delay``: this offers some + The request will optionally be retried as configured by gi's + ``max_get_attempts`` and ``get_retry_delay``: this offers some resilience in the presence of temporary failures. :return: The decoded response if ``json`` is set to ``True``, otherwise the response object """ - if not url: + if url is None: url = self._make_url(module_id=id, deleted=deleted, contents=contents) - attempts_left = self.max_get_retries() - retry_delay = self.get_retry_delay() + attempts_left = self.gi.max_get_attempts + retry_delay = self.gi.get_retry_delay bioblend.log.debug("GET - attempts left: %s; retry delay: %s", attempts_left, retry_delay) msg = "" while attempts_left > 0: @@ -169,7 +140,15 @@ bioblend.log.warning(msg) time.sleep(retry_delay) - def _post(self, payload=None, id=None, deleted=False, contents=None, url=None, files_attached=False): + def _post( + self, + payload: Optional[dict] = None, + id: Optional[str] = None, + deleted: bool = False, + contents: bool = False, + url: Optional[str] = None, + files_attached: bool = False, + ) -> Any: """ Do a generic POST request, composing the url from the contents of the arguments. Alternatively, an explicit ``url`` can be provided to use @@ -189,7 +168,13 @@ url = self._make_url(module_id=id, deleted=deleted, contents=contents) return self.gi.make_post_request(url, payload=payload, files_attached=files_attached) - def _put(self, payload=None, id=None, url=None, params=None): + def _put( + self, + payload: Optional[dict] = None, + id: Optional[str] = None, + url: Optional[str] = None, + params: Optional[dict] = None, + ) -> Any: """ Do a generic PUT request, composing the url from the contents of the arguments. Alternatively, an explicit ``url`` can be provided to use @@ -204,7 +189,13 @@ url = self._make_url(module_id=id) return self.gi.make_put_request(url, payload=payload, params=params) - def _patch(self, payload=None, id=None, url=None, params=None): + def _patch( + self, + payload: Optional[dict] = None, + id: Optional[str] = None, + url: Optional[str] = None, + params: Optional[dict] = None, + ) -> Any: """ Do a generic PATCH request, composing the url from the contents of the arguments. Alternatively, an explicit ``url`` can be provided to use @@ -219,7 +210,15 @@ url = self._make_url(module_id=id) return self.gi.make_patch_request(url, payload=payload, params=params) - def _delete(self, payload=None, id=None, deleted=False, contents=None, url=None, params=None): + def _delete( + self, + payload: Optional[dict] = None, + id: Optional[str] = None, + deleted: bool = False, + contents: bool = False, + url: Optional[str] = None, + params: Optional[dict] = None, + ) -> Any: """ Do a generic DELETE request, composing the url from the contents of the arguments. Alternatively, an explicit ``url`` can be provided to use @@ -233,7 +232,7 @@ if not url: url = self._make_url(module_id=id, deleted=deleted, contents=contents) r = self.gi.make_delete_request(url, payload=payload, params=params) - if r.status_code == 200: + if 200 <= r.status_code < 300: return r.json() # @see self.body for HTTP response body raise ConnectionError( diff -Nru python-bioblend-0.18.0/bioblend/galaxy/config/__init__.py python-bioblend-1.0.0/bioblend/galaxy/config/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/config/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/config/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -2,16 +2,21 @@ Contains possible interaction dealing with Galaxy configuration. """ +from typing import TYPE_CHECKING + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + class ConfigClient(Client): module = "configuration" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def get_config(self): + def get_config(self) -> dict: """ Get a list of attributes about the Galaxy instance. More attributes will be present if the user is an admin. @@ -36,7 +41,7 @@ """ return self._get() - def get_version(self): + def get_version(self) -> dict: """ Get the current version of the Galaxy instance. @@ -49,7 +54,7 @@ url = self.gi.url + "/version" return self._get(url=url) - def whoami(self): + def whoami(self) -> dict: """ Return information about the current authenticated user. diff -Nru python-bioblend-0.18.0/bioblend/galaxy/dataset_collections/__init__.py python-bioblend-1.0.0/bioblend/galaxy/dataset_collections/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/dataset_collections/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/dataset_collections/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,5 +1,13 @@ import logging import time +from typing import ( + Any, + Dict, + List, + Optional, + TYPE_CHECKING, + Union, +) from bioblend import ( CHUNK_SIZE, @@ -8,30 +16,40 @@ from bioblend.galaxy.client import Client from bioblend.galaxy.datasets import TERMINAL_STATES +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + log = logging.getLogger(__name__) class HasElements: - def __init__(self, name, type="list", elements=None): + def __init__( + self, + name: str, + type: str = "list", + elements: Optional[Union[List[Union["CollectionElement", "SimpleElement"]], Dict[str, Any]]] = None, + ) -> None: self.name = name self.type = type if isinstance(elements, dict): - self.elements = [dict(name=key, id=value, src="hda") for key, value in elements.values()] + self.elements: List[Union["CollectionElement", "SimpleElement"]] = [ + HistoryDatasetElement(name=key, id=value) for key, value in elements.values() + ] elif elements: self.elements = elements - def add(self, element): + def add(self, element: Union["CollectionElement", "SimpleElement"]) -> "HasElements": self.elements.append(element) return self class CollectionDescription(HasElements): - def to_dict(self): + def to_dict(self) -> Dict[str, Union[str, List]]: return dict(name=self.name, collection_type=self.type, element_identifiers=[e.to_dict() for e in self.elements]) class CollectionElement(HasElements): - def to_dict(self): + def to_dict(self) -> Dict[str, Union[str, List]]: return dict( src="new_collection", name=self.name, @@ -41,15 +59,15 @@ class SimpleElement: - def __init__(self, value): + def __init__(self, value: Dict[str, str]) -> None: self.value = value - def to_dict(self): + def to_dict(self) -> Dict[str, str]: return self.value class HistoryDatasetElement(SimpleElement): - def __init__(self, name, id): + def __init__(self, name: str, id: str) -> None: super().__init__( dict( name=name, @@ -60,7 +78,7 @@ class HistoryDatasetCollectionElement(SimpleElement): - def __init__(self, name, id): + def __init__(self, name: str, id: str) -> None: super().__init__( dict( name=name, @@ -71,7 +89,7 @@ class LibraryDatasetElement(SimpleElement): - def __init__(self, name, id): + def __init__(self, name: str, id: str) -> None: super().__init__( dict( name=name, @@ -82,12 +100,13 @@ class DatasetCollectionClient(Client): + gi: "GalaxyInstance" module = "dataset_collections" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def show_dataset_collection(self, dataset_collection_id: str, instance_type: str = "history") -> dict: + def show_dataset_collection(self, dataset_collection_id: str, instance_type: str = "history") -> Dict[str, Any]: """ Get details of a given dataset collection of the current user @@ -106,7 +125,7 @@ url = self._make_url(module_id=dataset_collection_id) return self._get(id=dataset_collection_id, url=url, params=params) - def download_dataset_collection(self, dataset_collection_id: str, file_path: str) -> dict: + def download_dataset_collection(self, dataset_collection_id: str, file_path: str) -> Dict[str, Any]: """ Download a history dataset collection as an archive. @@ -144,7 +163,7 @@ interval: float = 3, proportion_complete: float = 1.0, check: bool = True, - ) -> dict: + ) -> Dict[str, Any]: """ Wait until all or a specified proportion of elements of a dataset collection are in a terminal state. @@ -184,7 +203,7 @@ time_left = maxwait while True: - dataset_collection = self.gi.dataset_collections.show_dataset_collection(dataset_collection_id) + dataset_collection = self.show_dataset_collection(dataset_collection_id) states = [elem["object"]["state"] for elem in dataset_collection["elements"]] terminal_states = [state for state in states if state in TERMINAL_STATES] if set(terminal_states) not in [{"ok"}, set()]: diff -Nru python-bioblend-0.18.0/bioblend/galaxy/datasets/__init__.py python-bioblend-1.0.0/bioblend/galaxy/datasets/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/datasets/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/datasets/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -11,27 +11,38 @@ Dict, List, Optional, + overload, + Tuple, + TYPE_CHECKING, Union, ) from urllib.parse import urljoin +from requests import Response +from typing_extensions import Literal + import bioblend from bioblend import TimeoutException from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + log = logging.getLogger(__name__) +HdaLdda = Literal["hda", "ldda"] TERMINAL_STATES = {"ok", "empty", "error", "discarded", "failed_metadata"} # Non-terminal states are: 'new', 'upload', 'queued', 'running', 'paused', 'setting_metadata' class DatasetClient(Client): + gi: "GalaxyInstance" module = "datasets" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def show_dataset(self, dataset_id, deleted=False, hda_ldda="hda"): + def show_dataset(self, dataset_id: str, deleted: bool = False, hda_ldda: HdaLdda = "hda") -> Dict[str, Any]: """ Get details about a given dataset. This can be a history or a library dataset. @@ -55,7 +66,7 @@ def _initiate_download( self, dataset_id: str, stream_content: bool, require_ok_state: bool = True, maxwait: float = 12000 - ): + ) -> Tuple[Dict[str, Any], str, Response]: dataset = self.wait_for_dataset(dataset_id, maxwait=maxwait, check=False) if not dataset["state"] == "ok": message = f"Dataset state is not 'ok'. Dataset id: {dataset_id}, current state: {dataset['state']}" @@ -81,9 +92,36 @@ r.raise_for_status() return dataset, file_ext, r + @overload def download_dataset( - self, dataset_id, file_path=None, use_default_filename=True, require_ok_state=True, maxwait=12000 - ): + self, + dataset_id: str, + file_path: None = None, + use_default_filename: bool = True, + require_ok_state: bool = True, + maxwait: float = 12000, + ) -> bytes: + ... + + @overload + def download_dataset( + self, + dataset_id: str, + file_path: str, + use_default_filename: bool = True, + require_ok_state: bool = True, + maxwait: float = 12000, + ) -> str: + ... + + def download_dataset( + self, + dataset_id: str, + file_path: Optional[str] = None, + use_default_filename: bool = True, + require_ok_state: bool = True, + maxwait: float = 12000, + ) -> Union[bytes, str]: """ Download a dataset to file or in memory. If the dataset state is not 'ok', a ``DatasetStateException`` will be thrown, unless ``require_ok_state=False``. @@ -167,12 +205,12 @@ tool_id: Optional[str] = None, tag: Optional[str] = None, history_id: Optional[str] = None, - create_time_min: str = None, - create_time_max: str = None, - update_time_min: str = None, - update_time_max: str = None, + create_time_min: Optional[str] = None, + create_time_max: Optional[str] = None, + update_time_min: Optional[str] = None, + update_time_max: Optional[str] = None, order: str = "create_time-dsc", - ) -> List[dict]: + ) -> List[Dict[str, Any]]: """ Get the latest datasets, or select another subset by specifying optional arguments for filtering (e.g. a history ID). @@ -253,7 +291,7 @@ params["history_id"] = history_id q: List[str] = [] - qv: List[Any] = [] + qv = [] if name: q.append("name") @@ -299,16 +337,16 @@ return self._get(params=params) - def _param_to_filter(self, param): - if type(param) is str: + def _param_to_filter(self, param: Union[str, List[str]]) -> Tuple[str, str]: + if isinstance(param, str): return "eq", param - if type(param) is list: + if isinstance(param, list): if len(param) == 1: return "eq", param.pop() return "in", ",".join(param) raise Exception("Filter param is not of type ``str`` or ``list``") - def publish_dataset(self, dataset_id: str, published: bool = False): + def publish_dataset(self, dataset_id: str, published: bool = False) -> Dict[str, Any]: """ Make a dataset publicly available or private. For more fine-grained control (assigning different permissions to specific roles), use the ``update_permissions()`` method. @@ -320,14 +358,14 @@ :param published: Whether to make the dataset published (``True``) or private (``False``). :rtype: dict - :return: Current roles for all available permission types. + :return: Details of the updated dataset .. note:: This method works only on Galaxy 19.05 or later. """ payload: Dict[str, Any] = {"action": "remove_restrictions" if published else "make_private"} url = self._make_url(dataset_id) + "/permissions" - self.gi.datasets._put(url=url, payload=payload) + return self.gi.datasets._put(url=url, payload=payload) def update_permissions( self, @@ -335,7 +373,7 @@ access_ids: Optional[list] = None, manage_ids: Optional[list] = None, modify_ids: Optional[list] = None, - ): + ) -> dict: """ Set access, manage or modify permissions for a dataset to a list of roles. @@ -365,9 +403,11 @@ if modify_ids: payload["modify"] = modify_ids url = self._make_url(dataset_id) + "/permissions" - self.gi.datasets._put(url=url, payload=payload) + return self.gi.datasets._put(url=url, payload=payload) - def wait_for_dataset(self, dataset_id, maxwait=12000, interval=3, check=True): + def wait_for_dataset( + self, dataset_id: str, maxwait: float = 12000, interval: float = 3, check: bool = True + ) -> Dict[str, Any]: """ Wait until a dataset is in a terminal state. diff -Nru python-bioblend-0.18.0/bioblend/galaxy/datatypes/__init__.py python-bioblend-1.0.0/bioblend/galaxy/datatypes/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/datatypes/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/datatypes/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,16 +1,25 @@ """ Contains possible interactions with the Galaxy Datatype """ +from typing import ( + Dict, + List, + TYPE_CHECKING, +) + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + class DatatypesClient(Client): module = "datatypes" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def get_datatypes(self, extension_only=False, upload_only=False): + def get_datatypes(self, extension_only: bool = False, upload_only: bool = False) -> List[str]: """ Get the list of all installed datatypes. @@ -36,7 +45,7 @@ 'xml'] """ - params = {} + params: Dict[str, bool] = {} if extension_only: params["extension_only"] = True @@ -45,7 +54,7 @@ return self._get(params=params) - def get_sniffers(self): + def get_sniffers(self) -> List[str]: """ Get the list of all installed sniffers. diff -Nru python-bioblend-0.18.0/bioblend/galaxy/folders/__init__.py python-bioblend-1.0.0/bioblend/galaxy/folders/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/folders/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/folders/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,16 +1,30 @@ """ Contains possible interactions with the Galaxy library folders """ +from typing import ( + Any, + Dict, + List, + Optional, + TYPE_CHECKING, + Union, +) + +from typing_extensions import Literal + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + class FoldersClient(Client): module = "folders" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def create_folder(self, parent_folder_id, name, description=None): + def create_folder(self, parent_folder_id: str, name: str, description: Optional[str] = None) -> Dict[str, Any]: """ Create a folder. @@ -26,12 +40,12 @@ :rtype: dict :return: details of the updated folder """ - payload = {"name": name} + payload: Dict[str, str] = {"name": name} if description: payload["description"] = description return self._post(payload=payload, id=parent_folder_id) - def show_folder(self, folder_id, contents=False): + def show_folder(self, folder_id: str, contents: bool = False) -> Dict[str, Any]: """ Display information about a folder. @@ -48,7 +62,7 @@ return self._get(id=folder_id, contents=contents) - def delete_folder(self, folder_id, undelete=False): + def delete_folder(self, folder_id: str, undelete: bool = False) -> Dict[str, Any]: """ Marks the folder with the given ``id`` as `deleted` (or removes the `deleted` mark if the `undelete` param is True). @@ -66,7 +80,7 @@ payload = {"undelete": undelete} return self._delete(payload=payload, id=folder_id) - def update_folder(self, folder_id, name, description=None): + def update_folder(self, folder_id: str, name: str, description: Optional[str] = None) -> Dict[str, Any]: """ Update folder information. @@ -87,7 +101,7 @@ payload["description"] = description return self._put(payload=payload, id=folder_id) - def get_permissions(self, folder_id, scope): + def get_permissions(self, folder_id: str, scope: Literal["current", "available"] = "current") -> Dict[str, Any]: """ Get the permissions of a folder. @@ -98,12 +112,19 @@ :param scope: scope of permissions, either 'current' or 'available' :rtype: dict - :return: dictionary including details of the folder + :return: dictionary including details of the folder permissions """ url = self._make_url(folder_id) + "/permissions" return self._get(url=url) - def set_permissions(self, folder_id, action="set_permissions", add_ids=None, manage_ids=None, modify_ids=None): + def set_permissions( + self, + folder_id: str, + action: Literal["set_permissions"] = "set_permissions", + add_ids: Optional[List[str]] = None, + manage_ids: Optional[List[str]] = None, + modify_ids: Optional[List[str]] = None, + ) -> Dict[str, Any]: """ Set the permissions of a folder. @@ -126,7 +147,7 @@ :return: dictionary including details of the folder """ url = self._make_url(folder_id) + "/permissions" - payload = {"action": action} + payload: Dict[str, Union[str, List[str]]] = {"action": action} if add_ids: payload["add_ids[]"] = add_ids if manage_ids: diff -Nru python-bioblend-0.18.0/bioblend/galaxy/forms/__init__.py python-bioblend-1.0.0/bioblend/galaxy/forms/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/forms/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/forms/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,16 +1,26 @@ """ Contains possible interactions with the Galaxy Forms """ +from typing import ( + Any, + Dict, + List, + TYPE_CHECKING, +) + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + class FormsClient(Client): module = "forms" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def get_forms(self): + def get_forms(self) -> List[Dict[str, Any]]: """ Get the list of all forms. @@ -29,7 +39,7 @@ """ return self._get() - def show_form(self, form_id): + def show_form(self, form_id: str) -> Dict[str, Any]: """ Get details of a given form. @@ -51,15 +61,17 @@ """ return self._get(id=form_id) - def create_form(self, form_xml_text): + def create_form(self, form_xml_text: str) -> List[Dict[str, Any]]: """ Create a new form. - :type form_xml_text: str - :param form_xml_text: Form xml to create a form on galaxy instance + :type form_xml_text: str + :param form_xml_text: Form xml to create a form on galaxy instance - :rtype: str - :return: Unique URL of newly created form with encoded id + :rtype: list of dicts + :return: List with a single dictionary describing the created form """ - payload = form_xml_text + payload = { + "xml_text": form_xml_text, + } return self._post(payload=payload) diff -Nru python-bioblend-0.18.0/bioblend/galaxy/ftpfiles/__init__.py python-bioblend-1.0.0/bioblend/galaxy/ftpfiles/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/ftpfiles/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/ftpfiles/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,16 +1,24 @@ """ Contains possible interactions with the Galaxy FTP Files """ +from typing import ( + List, + TYPE_CHECKING, +) + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + class FTPFilesClient(Client): module = "ftp_files" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def get_ftp_files(self, deleted=False): + def get_ftp_files(self, deleted: bool = False) -> List[dict]: """ Get a list of local files. diff -Nru python-bioblend-0.18.0/bioblend/galaxy/genomes/__init__.py python-bioblend-1.0.0/bioblend/galaxy/genomes/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/genomes/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/genomes/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,16 +1,28 @@ """ Contains possible interactions with the Galaxy Histories """ +from typing import ( + Any, + Dict, + Optional, + TYPE_CHECKING, +) + +from typing_extensions import Literal + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + class GenomeClient(Client): module = "genomes" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def get_genomes(self): + def get_genomes(self) -> list: """ Returns a list of installed genomes @@ -20,7 +32,14 @@ genomes = self._get() return genomes - def show_genome(self, id, num=None, chrom=None, low=None, high=None): + def show_genome( + self, + id: str, + num: Optional[str] = None, + chrom: Optional[str] = None, + low: Optional[str] = None, + high: Optional[str] = None, + ) -> Dict[str, Any]: """ Returns information about build @@ -42,7 +61,7 @@ :rtype: dict :return: Information about the genome build """ - params = {} + params: Dict[str, str] = {} if num: params["num"] = num if chrom: @@ -51,21 +70,26 @@ params["low"] = low if high: params["high"] = high - return self._get(id, params) + return self._get(id=id, params=params) def install_genome( self, - func="download", - source=None, - dbkey=None, - ncbi_name=None, - ensembl_dbkey=None, - url_dbkey=None, - indexers=None, - ): + func: Literal["download", "index"] = "download", + source: Optional[str] = None, + dbkey: Optional[str] = None, + ncbi_name: Optional[str] = None, + ensembl_dbkey: Optional[str] = None, + url_dbkey: Optional[str] = None, + indexers: Optional[list] = None, + ) -> Dict[str, Any]: """ Download and/or index a genome. + :type func: str + :param func: Allowed values: 'download', Download and index; 'index', Index only + + :type source: str + :param source: Data source for this build. Can be: UCSC, Ensembl, NCBI, URL :type dbkey: str :param dbkey: DB key of the build to download, ignored unless 'UCSC' is specified as the source @@ -79,21 +103,15 @@ :type url_dbkey: str :param url_dbkey: DB key to use for this build, ignored unless URL is specified as the source - :type source: str - :param source: Data source for this build. Can be: UCSC, Ensembl, NCBI, URL - :type indexers: list :param indexers: POST array of indexers to run after downloading (indexers[] = first, indexers[] = second, ...) - :type func: str - :param func: Allowed values: 'download', Download and index; 'index', Index only - :rtype: dict :return: dict( status: 'ok', job: ) If error: dict( status: 'error', error: ) """ - payload = {} + payload: Dict[str, Any] = {} if source: payload["source"] = source if func: diff -Nru python-bioblend-0.18.0/bioblend/galaxy/groups/__init__.py python-bioblend-1.0.0/bioblend/galaxy/groups/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/groups/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/groups/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,16 +1,27 @@ """ Contains possible interactions with the Galaxy Groups """ +from typing import ( + Any, + Dict, + List, + Optional, + TYPE_CHECKING, +) + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + class GroupsClient(Client): module = "groups" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def get_groups(self): + def get_groups(self) -> List[Dict[str, Any]]: """ Get all (not deleted) groups. @@ -29,7 +40,7 @@ """ return self._get() - def show_group(self, group_id): + def show_group(self, group_id: str) -> Dict[str, Any]: """ Get details of a given group. @@ -49,7 +60,9 @@ """ return self._get(id=group_id) - def create_group(self, group_name, user_ids=None, role_ids=None): + def create_group( + self, group_name: str, user_ids: Optional[List[str]] = None, role_ids: Optional[List[str]] = None + ) -> List[Dict[str, Any]]: """ Create a new group. @@ -78,7 +91,13 @@ payload = {"name": group_name, "user_ids": user_ids, "role_ids": role_ids} return self._post(payload) - def update_group(self, group_id, group_name=None, user_ids=None, role_ids=None): + def update_group( + self, + group_id: str, + group_name: Optional[str] = None, + user_ids: Optional[List[str]] = None, + role_ids: Optional[List[str]] = None, + ) -> None: """ Update a group. @@ -107,7 +126,7 @@ payload = {"name": group_name, "user_ids": user_ids, "role_ids": role_ids} return self._put(payload=payload, id=group_id) - def get_group_users(self, group_id): + def get_group_users(self, group_id: str) -> List[Dict[str, Any]]: """ Get the list of users associated to the given group. @@ -120,7 +139,7 @@ url = self._make_url(group_id) + "/users" return self._get(url=url) - def get_group_roles(self, group_id): + def get_group_roles(self, group_id: str) -> List[Dict[str, Any]]: """ Get the list of roles associated to the given group. @@ -133,7 +152,7 @@ url = self._make_url(group_id) + "/roles" return self._get(url=url) - def add_group_user(self, group_id, user_id): + def add_group_user(self, group_id: str, user_id: str) -> Dict[str, Any]: """ Add a user to the given group. @@ -149,7 +168,7 @@ url = "/".join((self._make_url(group_id), "users", user_id)) return self._put(url=url) - def add_group_role(self, group_id, role_id): + def add_group_role(self, group_id: str, role_id: str) -> Dict[str, Any]: """ Add a role to the given group. @@ -165,7 +184,7 @@ url = "/".join((self._make_url(group_id), "roles", role_id)) return self._put(url=url) - def delete_group_user(self, group_id, user_id): + def delete_group_user(self, group_id: str, user_id: str) -> Dict[str, Any]: """ Remove a user from the given group. @@ -181,7 +200,7 @@ url = "/".join((self._make_url(group_id), "users", user_id)) return self._delete(url=url) - def delete_group_role(self, group_id, role_id): + def delete_group_role(self, group_id: str, role_id: str) -> Dict[str, Any]: """ Remove a role from the given group. diff -Nru python-bioblend-0.18.0/bioblend/galaxy/histories/__init__.py python-bioblend-1.0.0/bioblend/galaxy/histories/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/histories/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/histories/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -5,25 +5,42 @@ import re import sys import time +import typing import webbrowser -from typing import List +from typing import ( + Any, + Dict, + IO, + List, + Optional, + overload, + Pattern, + Union, +) from urllib.parse import urljoin +from typing_extensions import Literal + import bioblend from bioblend import ConnectionError from bioblend.galaxy.client import Client +from bioblend.galaxy.dataset_collections import CollectionDescription from bioblend.util import attach_file +if typing.TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + log = logging.getLogger(__name__) class HistoryClient(Client): + gi: "GalaxyInstance" module = "histories" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def create_history(self, name=None): + def create_history(self, name: Optional[str] = None) -> Dict[str, Any]: """ Create a new history, optionally setting the ``name``. @@ -38,30 +55,49 @@ payload["name"] = name return self._post(payload) - def import_history(self, file_path=None, url=None): + def import_history(self, file_path: Optional[str] = None, url: Optional[str] = None) -> Dict[str, Any]: """ Import a history from an archive on disk or a URL. :type file_path: str :param file_path: Path to exported history archive on disk. + :type url: str :param url: URL for an exported history archive + + :rtype: dict + :return: Dictionary containing information about the imported history """ if file_path: archive_file = attach_file(file_path) - payload = dict(archive_source="", archive_file=archive_file, archive_type="file") + payload: Dict[str, Any] = { + "archive_source": "", + "archive_file": archive_file, + "archive_type": "file", + } else: - payload = dict(archive_source=url, archive_type="url") + payload = { + "archive_source": url, + "archive_type": "url", + } return self._post(payload=payload, files_attached=file_path is not None) - def _get_histories(self, name=None, deleted=False, filter_user_published=None, get_all_published=False, slug=None): + def _get_histories( + self, + name: Optional[str] = None, + deleted: bool = False, + filter_user_published: Optional[bool] = None, + get_all_published: bool = False, + slug: Optional[str] = None, + all: Optional[bool] = False, + ) -> List[Dict[str, Any]]: """ Hidden method to be used by both get_histories() and get_published_histories() """ assert not (filter_user_published is not None and get_all_published) - params = {} + params: Dict[str, Any] = {} if deleted: params.setdefault("q", []).append("deleted") params.setdefault("qv", []).append(deleted) @@ -71,6 +107,8 @@ if slug is not None: params.setdefault("q", []).append("slug") params.setdefault("qv", []).append(slug) + if all: + params["all"] = True url = "/".join((self._make_url(), "published")) if get_all_published else None histories = self._get(url=url, params=params) @@ -79,7 +117,15 @@ histories = [_ for _ in histories if _["name"] == name] return histories - def get_histories(self, history_id=None, name=None, deleted=False, published=None, slug=None): + def get_histories( + self, + history_id: Optional[str] = None, + name: Optional[str] = None, + deleted: bool = False, + published: Optional[bool] = None, + slug: Optional[str] = None, + all: Optional[bool] = False, + ) -> List[Dict[str, Any]]: """ Get all histories, or select a subset by specifying optional arguments for filtering (e.g. a history name). @@ -101,6 +147,11 @@ :type slug: str :param slug: History slug to filter on + :type all: bool + :param all: Whether to include histories from other users. This + parameter works only on Galaxy 20.01 or later and can be specified + only if the user is a Galaxy admin. + :rtype: list :return: List of history dicts. @@ -113,10 +164,12 @@ "The history_id parameter has been removed, use the show_history() method to view details of a history for which you know the ID.", ) return self._get_histories( - name=name, deleted=deleted, filter_user_published=published, get_all_published=False, slug=slug + name=name, deleted=deleted, filter_user_published=published, get_all_published=False, slug=slug, all=all ) - def get_published_histories(self, name=None, deleted=False, slug=None): + def get_published_histories( + self, name: Optional[str] = None, deleted: bool = False, slug: Optional[str] = None + ) -> List[Dict[str, Any]]: """ Get all published histories (by any user), or select a subset by specifying optional arguments for filtering (e.g. a history name). @@ -138,7 +191,48 @@ name=name, deleted=deleted, filter_user_published=None, get_all_published=True, slug=slug ) - def show_history(self, history_id, contents=False, deleted=None, visible=None, details=None, types=None): + @overload + def show_history( + self, + history_id: str, + contents: Literal[False] = False, + ) -> Dict[str, Any]: + ... + + @overload + def show_history( + self, + history_id: str, + contents: Literal[True], + deleted: Optional[bool] = None, + visible: Optional[bool] = None, + details: Optional[str] = None, + types: Optional[List[str]] = None, + ) -> List[Dict[str, Any]]: + ... + + # Fallback in case the caller provides a regular bool as contents + @overload + def show_history( + self, + history_id: str, + contents: bool = False, + deleted: Optional[bool] = None, + visible: Optional[bool] = None, + details: Optional[str] = None, + types: Optional[List[str]] = None, + ) -> Union[Dict[str, Any], List[Dict[str, Any]]]: + pass + + def show_history( + self, + history_id: str, + contents: bool = False, + deleted: Optional[bool] = None, + visible: Optional[bool] = None, + details: Optional[str] = None, + types: Optional[List[str]] = None, + ) -> Union[Dict[str, Any], List[Dict[str, Any]]]: """ Get details of a given history. By default, just get the history meta information. @@ -182,7 +276,7 @@ more extensive functionality for filtering and ordering the results. """ - params = {} + params: Dict[str, Union[bool, list, str]] = {} if contents: if details: params["details"] = details @@ -194,7 +288,7 @@ params["types"] = types return self._get(id=history_id, contents=contents, params=params) - def delete_dataset(self, history_id, dataset_id, purge=False): + def delete_dataset(self, history_id: str, dataset_id: str, purge: bool = False) -> None: """ Mark corresponding dataset as deleted. @@ -221,7 +315,7 @@ payload["purge"] = purge self._delete(payload=payload, url=url) - def delete_dataset_collection(self, history_id, dataset_collection_id): + def delete_dataset_collection(self, history_id: str, dataset_collection_id: str) -> None: """ Mark corresponding dataset collection as deleted. @@ -237,7 +331,7 @@ url = "/".join((self._make_url(history_id, contents=True), "dataset_collections", dataset_collection_id)) self._delete(url=url) - def show_dataset(self, history_id, dataset_id): + def show_dataset(self, history_id: str, dataset_id: str) -> Dict[str, Any]: """ Get details about a given history dataset. @@ -253,7 +347,7 @@ url = "/".join((self._make_url(history_id, contents=True), dataset_id)) return self._get(url=url) - def show_dataset_collection(self, history_id, dataset_collection_id): + def show_dataset_collection(self, history_id: str, dataset_collection_id: str) -> Dict[str, Any]: """ Get details about a given history dataset collection. @@ -269,7 +363,9 @@ url = "/".join((self._make_url(history_id, contents=True), "dataset_collections", dataset_collection_id)) return self._get(url=url) - def show_matching_datasets(self, history_id, name_filter=None): + def show_matching_datasets( + self, history_id: str, name_filter: Optional[Union[str, Pattern[str]]] = None + ) -> List[Dict[str, Any]]: """ Get dataset details for matching datasets within a history. @@ -293,7 +389,7 @@ if name_filter is None or name_filter.match(h["name"]) ] - def show_dataset_provenance(self, history_id, dataset_id, follow=False): + def show_dataset_provenance(self, history_id: str, dataset_id: str, follow: bool = False) -> Dict[str, Any]: """ Get details related to how dataset was created (``id``, ``job_id``, ``tool_id``, ``stdout``, ``stderr``, ``parameters``, ``inputs``, @@ -329,9 +425,10 @@ 'uuid': '5c0c43f5-8d93-44bd-939d-305e82f213c6'} """ url = "/".join((self._make_url(history_id, contents=True), dataset_id, "provenance")) - return self._get(url=url) + params = {"follow": follow} + return self._get(url=url, params=params) - def update_history(self, history_id, **kwds): + def update_history(self, history_id: str, **kwargs: Any) -> Dict[str, Any]: """ Update history metadata information. Some of the attributes that can be modified are documented below. @@ -366,9 +463,9 @@ .. versionchanged:: 0.8.0 Changed the return value from the status code (type int) to a dict. """ - return self._put(payload=kwds, id=history_id) + return self._put(payload=kwargs, id=history_id) - def update_dataset(self, history_id, dataset_id, **kwds): + def update_dataset(self, history_id: str, dataset_id: str, **kwargs: Any) -> Dict[str, Any]: """ Update history dataset metadata. Some of the attributes that can be modified are documented below. @@ -408,9 +505,9 @@ Changed the return value from the status code (type int) to a dict. """ url = "/".join((self._make_url(history_id, contents=True), dataset_id)) - return self._put(payload=kwds, url=url) + return self._put(payload=kwargs, url=url) - def update_dataset_collection(self, history_id, dataset_collection_id, **kwds): + def update_dataset_collection(self, history_id: str, dataset_collection_id: str, **kwargs: Any) -> Dict[str, Any]: """ Update history dataset collection metadata. Some of the attributes that can be modified are documented below. @@ -438,9 +535,9 @@ Changed the return value from the status code (type int) to a dict. """ url = "/".join((self._make_url(history_id, contents=True), "dataset_collections", dataset_collection_id)) - return self._put(payload=kwds, url=url) + return self._put(payload=kwargs, url=url) - def create_history_tag(self, history_id, tag): + def create_history_tag(self, history_id: str, tag: str) -> Dict[str, Any]: """ Create history tag @@ -460,11 +557,11 @@ 'user_value': None} """ # empty payload since we are adding the new tag using the url - payload = {} + payload: Dict[str, Any] = {} url = "/".join((self._make_url(history_id), "tags", tag)) return self._post(payload, url=url) - def upload_dataset_from_library(self, history_id, lib_dataset_id): + def upload_dataset_from_library(self, history_id: str, lib_dataset_id: str) -> Dict[str, Any]: """ Upload a dataset into the history from a library. Requires the library dataset ID, which can be obtained from the library @@ -486,7 +583,9 @@ } return self._post(payload, id=history_id, contents=True) - def create_dataset_collection(self, history_id, collection_description): + def create_dataset_collection( + self, history_id: str, collection_description: Union["CollectionDescription", Dict[str, Any]] + ) -> Dict[str, Any]: """ Create a new dataset collection @@ -509,19 +608,20 @@ :rtype: dict :return: Information about the new HDCA """ - try: - collection_description = collection_description.to_dict() - except AttributeError: - pass + if isinstance(collection_description, CollectionDescription): + collection_description_dict = collection_description.to_dict() + else: + collection_description_dict = collection_description + payload = dict( - name=collection_description["name"], + name=collection_description_dict["name"], type="dataset_collection", - collection_type=collection_description["collection_type"], - element_identifiers=collection_description["element_identifiers"], + collection_type=collection_description_dict["collection_type"], + element_identifiers=collection_description_dict["element_identifiers"], ) return self._post(payload, id=history_id, contents=True) - def delete_history(self, history_id, purge=False): + def delete_history(self, history_id: str, purge: bool = False) -> Dict[str, Any]: """ Delete a history. @@ -547,7 +647,7 @@ payload["purge"] = purge return self._delete(payload=payload, id=history_id) - def undelete_history(self, history_id): + def undelete_history(self, history_id: str) -> str: """ Undelete a history @@ -560,7 +660,7 @@ url = self._make_url(history_id, deleted=True) + "/undelete" return self._post(url=url) - def get_status(self, history_id): + def get_status(self, history_id: str) -> Dict[str, Any]: """ Returns the state of this history @@ -573,9 +673,12 @@ 'state_details' = Contains individual statistics for various dataset states. 'percent_complete' = The overall number of datasets processed to completion. """ - state = {} history = self.show_history(history_id) - state["state"] = history["state"] + + state: Dict[str, Any] = { + "state": history["state"], + } + if history.get("state_details") is not None: state["state_details"] = history["state_details"] total_complete = sum(history["state_details"].values()) @@ -585,7 +688,7 @@ state["percent_complete"] = 0 return state - def get_most_recently_used_history(self): + def get_most_recently_used_history(self) -> Dict[str, Any]: """ Returns the current user's most recently used history (not deleted). @@ -596,8 +699,14 @@ return self._get(url=url) def export_history( - self, history_id, gzip=True, include_hidden=False, include_deleted=False, wait=False, maxwait=None - ): + self, + history_id: str, + gzip: bool = True, + include_hidden: bool = False, + include_deleted: bool = False, + wait: bool = False, + maxwait: Optional[float] = None, + ) -> str: """ Start a job to create an export archive for the given history. @@ -663,7 +772,9 @@ jeha_id = r["download_url"].rsplit("/", 1)[-1] return jeha_id - def download_history(self, history_id, jeha_id, outf, chunk_size=bioblend.CHUNK_SIZE): + def download_history( + self, history_id: str, jeha_id: str, outf: IO[bytes], chunk_size: int = bioblend.CHUNK_SIZE + ) -> None: """ Download a history export archive. Use :meth:`export_history` to create an export. @@ -690,7 +801,9 @@ for chunk in r.iter_content(chunk_size): outf.write(chunk) - def copy_dataset(self, history_id, dataset_id, source="hda"): + def copy_dataset( + self, history_id: str, dataset_id: str, source: Literal["hda", "library", "library_folder"] = "hda" + ) -> Dict[str, Any]: """ Copy a dataset to a history. @@ -708,7 +821,9 @@ """ return self.copy_content(history_id, dataset_id, source) - def copy_content(self, history_id, content_id, source="hda"): + def copy_content( + self, history_id: str, content_id: str, source: Literal["hda", "hdca", "library", "library_folder"] = "hda" + ) -> Dict[str, Any]: """ Copy existing content (e.g. a dataset) to a history. @@ -737,7 +852,7 @@ url = self._make_url(history_id, contents=True) return self._post(payload=payload, url=url) - def open_history(self, history_id): + def open_history(self, history_id: str) -> None: """ Open Galaxy in a new tab of the default web browser and switch to the specified history. @@ -758,7 +873,7 @@ url = urljoin(self.gi.base_url, f"history/switch_to_history?hist_id={history_id}") webbrowser.open_new_tab(url) - def get_extra_files(self, history_id: str, dataset_id: str) -> List[dict]: + def get_extra_files(self, history_id: str, dataset_id: str) -> List[str]: """ Get extra files associated with a composite dataset, or an empty list if there are none. diff -Nru python-bioblend-0.18.0/bioblend/galaxy/__init__.py python-bioblend-1.0.0/bioblend/galaxy/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,6 +1,8 @@ """ A base representation of an instance of Galaxy """ +from typing import Optional + from bioblend.galaxy import ( config, dataset_collections, @@ -25,12 +27,18 @@ visual, workflows, ) -from bioblend.galaxy.client import Client from bioblend.galaxyclient import GalaxyClient class GalaxyInstance(GalaxyClient): - def __init__(self, url, key=None, email=None, password=None, verify=True): + def __init__( + self, + url: str, + key: Optional[str] = None, + email: Optional[str] = None, + password: Optional[str] = None, + verify: bool = True, + ) -> None: """ A base representation of a connection to a Galaxy instance, identified by the server URL and user credentials. @@ -97,23 +105,7 @@ self.folders = folders.FoldersClient(self) self.tool_dependencies = tool_dependencies.ToolDependenciesClient(self) - @property - def max_get_attempts(self): - return Client.max_get_retries() - - @max_get_attempts.setter - def max_get_attempts(self, v): - Client.set_max_get_retries(v) - - @property - def get_retry_delay(self): - return Client.get_retry_delay() - - @get_retry_delay.setter - def get_retry_delay(self, v): - Client.set_get_retry_delay(v) - - def __repr__(self): + def __repr__(self) -> str: """ A nicer representation of this GalaxyInstance object """ diff -Nru python-bioblend-0.18.0/bioblend/galaxy/invocations/__init__.py python-bioblend-1.0.0/bioblend/galaxy/invocations/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/invocations/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/invocations/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -3,13 +3,23 @@ """ import logging import time -from typing import Optional +from typing import ( + Any, + Dict, + List, + Optional, + TYPE_CHECKING, +) from bioblend import ( CHUNK_SIZE, TimeoutException, ) from bioblend.galaxy.client import Client +from bioblend.galaxy.workflows import InputsBy + +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance log = logging.getLogger(__name__) @@ -18,21 +28,22 @@ class InvocationClient(Client): + gi: "GalaxyInstance" module = "invocations" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) def get_invocations( self, - workflow_id=None, - history_id=None, - user_id=None, - include_terminal=True, - limit=None, - view="collection", - step_details=False, - ): + workflow_id: Optional[str] = None, + history_id: Optional[str] = None, + user_id: Optional[str] = None, + include_terminal: bool = True, + limit: Optional[int] = None, + view: str = "collection", + step_details: bool = False, + ) -> List[Dict[str, Any]]: """ Get all workflow invocations, or select a subset by specifying optional arguments for filtering (e.g. a workflow ID). @@ -85,7 +96,7 @@ params["limit"] = limit return self._get(params=params) - def show_invocation(self, invocation_id): + def show_invocation(self, invocation_id: str) -> Dict[str, Any]: """ Get a workflow invocation dictionary representing the scheduling of a workflow. This dictionary may be sparse at first (missing inputs and @@ -143,9 +154,9 @@ import_inputs_to_history: bool = False, replacement_params: Optional[dict] = None, allow_tool_state_corrections: bool = False, - inputs_by: Optional[str] = None, + inputs_by: Optional[InputsBy] = None, parameters_normalized: bool = False, - ): + ) -> Dict[str, Any]: """ Rerun a workflow invocation. For more extensive documentation of all parameters, see the ``gi.workflows.invoke_workflow()`` method. @@ -235,7 +246,7 @@ url = "/".join((self.gi.url, "workflows", workflow_id, "invocations")) return self.gi.make_post_request(url=url, payload=payload, params=api_params) - def cancel_invocation(self, invocation_id): + def cancel_invocation(self, invocation_id: str) -> Dict[str, Any]: """ Cancel the scheduling of a workflow. @@ -248,7 +259,7 @@ url = self._make_url(invocation_id) return self._delete(url=url) - def show_invocation_step(self, invocation_id, step_id): + def show_invocation_step(self, invocation_id: str, step_id: str) -> Dict[str, Any]: """ See the details of a particular workflow invocation step. @@ -276,7 +287,7 @@ url = self._invocation_step_url(invocation_id, step_id) return self._get(url=url) - def run_invocation_step_action(self, invocation_id, step_id, action): + def run_invocation_step_action(self, invocation_id: str, step_id: str, action: Any) -> Dict[str, Any]: """Execute an action for an active workflow invocation step. The nature of this action and what is expected will vary based on the the type of workflow step (the only currently valid action is True/False @@ -299,7 +310,7 @@ payload = {"action": action} return self._put(payload=payload, url=url) - def get_invocation_summary(self, invocation_id): + def get_invocation_summary(self, invocation_id: str) -> Dict[str, Any]: """ Get a summary of an invocation, stating the number of jobs which succeed, which are paused and which have errored. @@ -319,7 +330,7 @@ url = self._make_url(invocation_id) + "/jobs_summary" return self._get(url=url) - def get_invocation_step_jobs_summary(self, invocation_id): + def get_invocation_step_jobs_summary(self, invocation_id: str) -> List[Dict[str, Any]]: """ Get a detailed summary of an invocation, listing all jobs with their job IDs and current states. @@ -347,7 +358,7 @@ url = self._make_url(invocation_id) + "/step_jobs_summary" return self._get(url=url) - def get_invocation_report(self, invocation_id): + def get_invocation_report(self, invocation_id: str) -> Dict[str, Any]: """ Get a Markdown report for an invocation. @@ -368,7 +379,7 @@ url = self._make_url(invocation_id) + "/report" return self._get(url=url) - def get_invocation_report_pdf(self, invocation_id, file_path, chunk_size=CHUNK_SIZE): + def get_invocation_report_pdf(self, invocation_id: str, file_path: str, chunk_size: int = CHUNK_SIZE) -> None: """ Get a PDF report for an invocation. @@ -388,7 +399,7 @@ for chunk in r.iter_content(chunk_size): outf.write(chunk) - def get_invocation_biocompute_object(self, invocation_id): + def get_invocation_biocompute_object(self, invocation_id: str) -> Dict[str, Any]: """ Get a BioCompute object for an invocation. @@ -401,7 +412,9 @@ url = self._make_url(invocation_id) + "/biocompute" return self._get(url=url) - def wait_for_invocation(self, invocation_id, maxwait=12000, interval=3, check=True): + def wait_for_invocation( + self, invocation_id: str, maxwait: float = 12000, interval: float = 3, check: bool = True + ) -> Dict[str, Any]: """ Wait until an invocation is in a terminal state. @@ -443,7 +456,7 @@ f"Invocation {invocation_id} is still in non-terminal state {state} after {maxwait} s" ) - def _invocation_step_url(self, invocation_id, step_id): + def _invocation_step_url(self, invocation_id: str, step_id: str) -> str: return "/".join((self._make_url(invocation_id), "steps", step_id)) diff -Nru python-bioblend-0.18.0/bioblend/galaxy/jobs/__init__.py python-bioblend-1.0.0/bioblend/galaxy/jobs/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/jobs/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/jobs/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -4,16 +4,24 @@ import logging import time from typing import ( + Any, + Dict, List, Optional, + TYPE_CHECKING, ) +from typing_extensions import Literal + from bioblend import TimeoutException from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + log = logging.getLogger(__name__) -JOB_TERMINAL_STATES = {"deleted", "error", "ok"} +JOB_TERMINAL_STATES = {"deleted", "deleting", "error", "ok"} # Job non-terminal states are: 'deleted_new', 'failed', 'new', 'paused', # 'queued', 'resubmitted', 'running', 'upload', 'waiting' @@ -21,24 +29,24 @@ class JobsClient(Client): module = "jobs" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) def get_jobs( self, - state=None, - history_id=None, - invocation_id=None, - tool_id=None, - workflow_id=None, - user_id=None, - date_range_min=None, - date_range_max=None, - limit=500, - offset=0, - user_details=False, - order_by=None, - ): + state: Optional[str] = None, + history_id: Optional[str] = None, + invocation_id: Optional[str] = None, + tool_id: Optional[str] = None, + workflow_id: Optional[str] = None, + user_id: Optional[str] = None, + date_range_min: Optional[str] = None, + date_range_max: Optional[str] = None, + limit: int = 500, + offset: int = 0, + user_details: bool = False, + order_by: Literal["create_time", "update_time"] = "update_time", + ) -> List[Dict[str, Any]]: """ Get all jobs, or select a subset by specifying optional arguments for filtering (e.g. a state). @@ -109,10 +117,10 @@ 'update_time': '2014-03-01T16:05:39.558458'}] .. note:: - The following options work only on Galaxy 21.05 or later: ``user_id``, + The following parameters work only on Galaxy 21.05 or later: ``user_id``, ``limit``, ``offset``, ``workflow_id``, ``invocation_id``. """ - params = {"limit": limit, "offset": offset} + params: Dict[str, Any] = {"limit": limit, "offset": offset} if state: params["state"] = state if history_id: @@ -135,7 +143,7 @@ params["order_by"] = order_by return self._get(params=params) - def show_job(self, job_id, full_details=False): + def show_job(self, job_id: str, full_details: bool = False) -> Dict[str, Any]: """ Get details of a given job of the current user. @@ -170,7 +178,7 @@ return self._get(id=job_id, params=params) - def _build_for_rerun(self, job_id): + def _build_for_rerun(self, job_id: str) -> Dict[str, Any]: """ Get details of a given job that can be used to rerun the corresponding tool. @@ -184,7 +192,13 @@ url = "/".join((self._make_url(job_id), "build_for_rerun")) return self._get(url=url) - def rerun_job(self, job_id, remap=False, tool_inputs_update=None, history_id=None): + def rerun_job( + self, + job_id: str, + remap: bool = False, + tool_inputs_update: Optional[Dict[str, Any]] = None, + history_id: Optional[str] = None, + ) -> Dict[str, Any]: """ Rerun a job. @@ -223,14 +237,13 @@ raise ValueError("remap was set to True, but this job is not remappable.") job_inputs["rerun_remap_job_id"] = job_id - def update_inputs(inputs, tool_inputs_update): - # recursively update inputs with tool_inputs_update + def update_inputs(inputs: Dict[str, Any], tool_inputs_update: Dict[str, Any]) -> None: + """Recursively update inputs with tool_inputs_update""" for input_param, input_value in tool_inputs_update.items(): - if type(input_value) is dict: + if isinstance(input_value, dict): update_inputs(inputs[input_param], input_value) else: inputs[input_param] = input_value - return inputs if tool_inputs_update: update_inputs(job_inputs, tool_inputs_update) @@ -260,7 +273,7 @@ """ return self.show_job(job_id).get("state", "") - def search_jobs(self, tool_id: str, inputs: dict, state: Optional[str] = None) -> List[dict]: + def search_jobs(self, tool_id: str, inputs: Dict[str, Any], state: Optional[str] = None) -> List[Dict[str, Any]]: """ Return jobs matching input parameters. @@ -297,7 +310,7 @@ url = self._make_url() + "/search" return self._post(url=url, payload=job_info) - def get_metrics(self, job_id: str) -> List[dict]: + def get_metrics(self, job_id: str) -> List[Dict[str, Any]]: """ Return job metrics for a given job. @@ -317,7 +330,7 @@ url = self._make_url(module_id=job_id) + "/metrics" return self._get(url=url) - def cancel_job(self, job_id: str): + def cancel_job(self, job_id: str) -> bool: """ Cancel a job, deleting output datasets. @@ -330,7 +343,7 @@ """ return self._delete(id=job_id) - def report_error(self, job_id: str, dataset_id: str, message: str, email: str = None) -> dict: + def report_error(self, job_id: str, dataset_id: str, message: str, email: Optional[str] = None) -> Dict[str, Any]: """ Report an error for a given job and dataset to the server administrators. @@ -363,7 +376,7 @@ url = self._make_url(module_id=job_id) + "/error" return self._post(url=url, payload=payload) - def get_common_problems(self, job_id: str) -> dict: + def get_common_problems(self, job_id: str) -> Dict[str, Any]: """ Query inputs and jobs for common potential problems that might have resulted in job failure. @@ -380,7 +393,7 @@ url = self._make_url(module_id=job_id) + "/common_problems" return self._get(url=url) - def get_inputs(self, job_id: str) -> List[dict]: + def get_inputs(self, job_id: str) -> List[Dict[str, Any]]: """ Get dataset inputs used by a job. @@ -393,7 +406,7 @@ url = self._make_url(module_id=job_id) + "/inputs" return self._get(url=url) - def get_outputs(self, job_id: str) -> List[dict]: + def get_outputs(self, job_id: str) -> List[Dict[str, Any]]: """ Get dataset outputs produced by a job. @@ -406,15 +419,15 @@ url = self._make_url(module_id=job_id) + "/outputs" return self._get(url=url) - def resume_job(self, job_id: str) -> dict: + def resume_job(self, job_id: str) -> List[Dict[str, Any]]: """ Resume a job if it is paused. :type job_id: str :param job_id: job ID - :rtype: dict - :return: dict containing output dataset associations + :rtype: list of dicts + :return: list of dictionaries containing output dataset associations .. note:: This method works only on Galaxy 18.09 or later. @@ -422,7 +435,7 @@ url = self._make_url(module_id=job_id) + "/resume" return self._put(url=url) - def get_destination_params(self, job_id: str) -> dict: + def get_destination_params(self, job_id: str) -> Dict[str, Any]: """ Get destination parameters for a job, describing the environment and location where the job is run. @@ -456,7 +469,7 @@ response = self._get(url=url) return response["active"] - def update_job_lock(self, active=False) -> bool: + def update_job_lock(self, active: bool = False) -> bool: """ Update the job lock status by setting ``active`` to either ``True`` or ``False``. If ``True``, all job dispatching will @@ -476,7 +489,9 @@ response = self._put(url=url, payload=payload) return response["active"] - def wait_for_job(self, job_id, maxwait=12000, interval=3, check=True): + def wait_for_job( + self, job_id: str, maxwait: float = 12000, interval: float = 3, check: bool = True + ) -> Dict[str, Any]: """ Wait until a job is in a terminal state. diff -Nru python-bioblend-0.18.0/bioblend/galaxy/libraries/__init__.py python-bioblend-1.0.0/bioblend/galaxy/libraries/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/libraries/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/libraries/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -4,6 +4,15 @@ import logging import time import warnings +from typing import ( + Any, + Dict, + List, + Optional, + TYPE_CHECKING, +) + +from typing_extensions import Literal from bioblend.galaxy.client import Client from bioblend.galaxy.datasets import ( @@ -12,16 +21,23 @@ ) from bioblend.util import attach_file +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + +LinkDataOnly = Literal["copy_files", "link_to_files"] + log = logging.getLogger(__name__) class LibraryClient(Client): module = "libraries" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def create_library(self, name, description=None, synopsis=None): + def create_library( + self, name: str, description: Optional[str] = None, synopsis: Optional[str] = None + ) -> Dict[str, Any]: """ Create a data library with the properties defined in the arguments. @@ -49,7 +65,7 @@ payload["synopsis"] = synopsis return self._post(payload) - def delete_library(self, library_id): + def delete_library(self, library_id: str) -> Dict[str, Any]: """ Delete a data library. @@ -66,14 +82,14 @@ """ return self._delete(id=library_id) - def _show_item(self, library_id, item_id): + def _show_item(self, library_id: str, item_id: str) -> Dict[str, Any]: """ Get details about a given library item. """ url = "/".join((self._make_url(library_id, contents=True), item_id)) return self._get(url=url) - def delete_library_dataset(self, library_id, dataset_id, purged=False): + def delete_library_dataset(self, library_id: str, dataset_id: str, purged: bool = False) -> Dict[str, Any]: """ Delete a library dataset in a data library. @@ -98,7 +114,7 @@ url = "/".join((self._make_url(library_id, contents=True), dataset_id)) return self._delete(payload={"purged": purged}, url=url) - def update_library_dataset(self, dataset_id, **kwds): + def update_library_dataset(self, dataset_id: str, **kwargs: Any) -> Dict[str, Any]: """ Update library dataset metadata. Some of the attributes that can be modified are documented below. @@ -125,9 +141,9 @@ :return: details of the updated dataset """ url = "/".join((self._make_url(), "datasets", dataset_id)) - return self._patch(payload=kwds, url=url) + return self._patch(payload=kwargs, url=url) - def show_dataset(self, library_id, dataset_id): + def show_dataset(self, library_id: str, dataset_id: str) -> Dict[str, Any]: """ Get details about a given library dataset. The required ``library_id`` can be obtained from the datasets's library content details. @@ -144,7 +160,9 @@ """ return self._show_item(library_id, dataset_id) - def wait_for_dataset(self, library_id, dataset_id, maxwait=12000, interval=3): + def wait_for_dataset( + self, library_id: str, dataset_id: str, maxwait: float = 12000, interval: float = 3 + ) -> Dict[str, Any]: """ Wait until the library dataset state is terminal ('ok', 'empty', 'error', 'discarded' or 'failed_metadata'). @@ -191,7 +209,7 @@ f"Waited too long for dataset {dataset_id} in library {library_id} to complete" ) - def show_folder(self, library_id, folder_id): + def show_folder(self, library_id: str, folder_id: str) -> Dict[str, Any]: """ Get details about a given folder. The required ``folder_id`` can be obtained from the folder's library content details. @@ -207,7 +225,7 @@ """ return self._show_item(library_id, folder_id) - def _get_root_folder_id(self, library_id): + def _get_root_folder_id(self, library_id: str) -> str: """ Find the root folder (i.e. '/') of a library. @@ -217,7 +235,9 @@ l = self.show_library(library_id=library_id) return l["root_folder_id"] - def create_folder(self, library_id, folder_name, description=None, base_folder_id=None): + def create_folder( + self, library_id: str, folder_name: str, description: Optional[str] = None, base_folder_id: Optional[str] = None + ) -> List[Dict[str, Any]]: """ Create a folder in a library. @@ -241,15 +261,18 @@ if base_folder_id is None: base_folder_id = self._get_root_folder_id(library_id) # Compose the payload - payload = {} - payload["name"] = folder_name - payload["folder_id"] = base_folder_id - payload["create_type"] = "folder" + payload = { + "name": folder_name, + "folder_id": base_folder_id, + "create_type": "folder", + } if description is not None: payload["description"] = description return self._post(payload, id=library_id, contents=True) - def get_folders(self, library_id, folder_id=None, name=None): + def get_folders( + self, library_id: str, folder_id: Optional[str] = None, name: Optional[str] = None + ) -> List[Dict[str, Any]]: """ Get all the folders in a library, or select a subset by specifying a folder name for filtering. @@ -289,7 +312,9 @@ folders = [_ for _ in library_contents if _["type"] == "folder"] return folders - def get_libraries(self, library_id=None, name=None, deleted=False): + def get_libraries( + self, library_id: Optional[str] = None, name: Optional[str] = None, deleted: Optional[bool] = False + ) -> List[Dict[str, Any]]: """ Get all libraries, or select a subset by specifying optional arguments for filtering (e.g. a library name). @@ -327,7 +352,7 @@ libraries = [_ for _ in libraries if _["name"] == name] return libraries - def show_library(self, library_id, contents=False): + def show_library(self, library_id: str, contents: bool = False) -> Dict[str, Any]: """ Get information about a library. @@ -343,55 +368,64 @@ """ return self._get(id=library_id, contents=contents) - def _do_upload(self, library_id, **keywords): + def _do_upload(self, library_id: str, **kwargs: Any) -> List[Dict[str, Any]]: """ Set up the POST request and do the actual data upload to a data library. This method should not be called directly but instead refer to the methods specific for the desired type of data upload. """ - folder_id = keywords.get("folder_id", None) + folder_id = kwargs.get("folder_id") if folder_id is None: folder_id = self._get_root_folder_id(library_id) files_attached = False # Compose the payload dict - payload = {} - payload["folder_id"] = folder_id - payload["file_type"] = keywords.get("file_type", "auto") - payload["dbkey"] = keywords.get("dbkey", "?") - payload["create_type"] = "file" - if keywords.get("roles", None): - payload["roles"] = keywords["roles"] - if keywords.get("link_data_only", None) and keywords["link_data_only"] != "copy_files": + payload = { + "folder_id": folder_id, + "file_type": kwargs.get("file_type", "auto"), + "dbkey": kwargs.get("dbkey", "?"), + "create_type": "file", + "tag_using_filenames": kwargs.get("tag_using_filenames", False), + "preserve_dirs": kwargs.get("preserve_dirs", False), + } + if kwargs.get("roles"): + payload["roles"] = kwargs["roles"] + if kwargs.get("link_data_only") and kwargs["link_data_only"] != "copy_files": payload["link_data_only"] = "link_to_files" - payload["tag_using_filenames"] = keywords.get("tag_using_filenames", False) - if keywords.get("tags"): - payload["tags"] = keywords["tags"] - payload["preserve_dirs"] = keywords.get("preserve_dirs", False) + if kwargs.get("tags"): + payload["tags"] = kwargs["tags"] # upload options - if keywords.get("file_url", None) is not None: + if kwargs.get("file_url") is not None: payload["upload_option"] = "upload_file" - payload["files_0|url_paste"] = keywords["file_url"] - elif keywords.get("pasted_content", None) is not None: + payload["files_0|url_paste"] = kwargs["file_url"] + elif kwargs.get("pasted_content") is not None: payload["upload_option"] = "upload_file" - payload["files_0|url_paste"] = keywords["pasted_content"] - elif keywords.get("server_dir", None) is not None: + payload["files_0|url_paste"] = kwargs["pasted_content"] + elif kwargs.get("server_dir") is not None: payload["upload_option"] = "upload_directory" - payload["server_dir"] = keywords["server_dir"] - elif keywords.get("file_local_path", None) is not None: + payload["server_dir"] = kwargs["server_dir"] + elif kwargs.get("file_local_path") is not None: payload["upload_option"] = "upload_file" - payload["files_0|file_data"] = attach_file(keywords["file_local_path"]) + payload["files_0|file_data"] = attach_file(kwargs["file_local_path"]) files_attached = True - elif keywords.get("filesystem_paths", None) is not None: + elif kwargs.get("filesystem_paths") is not None: payload["upload_option"] = "upload_paths" - payload["filesystem_paths"] = keywords["filesystem_paths"] + payload["filesystem_paths"] = kwargs["filesystem_paths"] try: return self._post(payload, id=library_id, contents=True, files_attached=files_attached) finally: - if payload.get("files_0|file_data", None) is not None: + if payload.get("files_0|file_data") is not None: payload["files_0|file_data"].close() - def upload_file_from_url(self, library_id, file_url, folder_id=None, file_type="auto", dbkey="?", tags=None): + def upload_file_from_url( + self, + library_id: str, + file_url: str, + folder_id: Optional[str] = None, + file_type: str = "auto", + dbkey: str = "?", + tags: Optional[List[str]] = None, + ) -> List[Dict[str, Any]]: """ Upload a file to a library from a URL. @@ -421,7 +455,15 @@ library_id, file_url=file_url, folder_id=folder_id, file_type=file_type, dbkey=dbkey, tags=tags ) - def upload_file_contents(self, library_id, pasted_content, folder_id=None, file_type="auto", dbkey="?", tags=None): + def upload_file_contents( + self, + library_id: str, + pasted_content: str, + folder_id: Optional[str] = None, + file_type: str = "auto", + dbkey: str = "?", + tags: Optional[List[str]] = None, + ) -> List[Dict[str, Any]]: """ Upload pasted_content to a data library as a new file. @@ -452,8 +494,14 @@ ) def upload_file_from_local_path( - self, library_id, file_local_path, folder_id=None, file_type="auto", dbkey="?", tags=None - ): + self, + library_id: str, + file_local_path: str, + folder_id: Optional[str] = None, + file_type: str = "auto", + dbkey: str = "?", + tags: Optional[List[str]] = None, + ) -> List[Dict[str, Any]]: """ Read local file contents from file_local_path and upload data to a library. @@ -491,17 +539,17 @@ def upload_file_from_server( self, - library_id, - server_dir, - folder_id=None, - file_type="auto", - dbkey="?", - link_data_only=None, - roles="", - preserve_dirs=False, - tag_using_filenames=False, - tags=None, - ): + library_id: str, + server_dir: str, + folder_id: Optional[str] = None, + file_type: str = "auto", + dbkey: str = "?", + link_data_only: Optional[LinkDataOnly] = None, + roles: str = "", + preserve_dirs: bool = False, + tag_using_filenames: bool = False, + tags: Optional[List[str]] = None, + ) -> List[Dict[str, Any]]: """ Upload all files in the specified subdirectory of the Galaxy library import directory to a library. @@ -569,17 +617,17 @@ def upload_from_galaxy_filesystem( self, - library_id, - filesystem_paths, - folder_id=None, - file_type="auto", - dbkey="?", - link_data_only=None, - roles="", - preserve_dirs=False, - tag_using_filenames=False, - tags=None, - ): + library_id: str, + filesystem_paths: str, + folder_id: Optional[str] = None, + file_type: str = "auto", + dbkey: str = "?", + link_data_only: Optional[LinkDataOnly] = None, + roles: str = "", + preserve_dirs: bool = False, + tag_using_filenames: bool = False, + tags: Optional[List[str]] = None, + ) -> List[Dict[str, Any]]: """ Upload a set of files already present on the filesystem of the Galaxy server to a library. @@ -643,7 +691,9 @@ tags=tags, ) - def copy_from_dataset(self, library_id, dataset_id, folder_id=None, message=""): + def copy_from_dataset( + self, library_id: str, dataset_id: str, folder_id: Optional[str] = None, message: str = "" + ) -> Dict[str, Any]: """ Copy a Galaxy dataset into a library. @@ -665,14 +715,15 @@ """ if folder_id is None: folder_id = self._get_root_folder_id(library_id) - payload = {} - payload["folder_id"] = folder_id - payload["create_type"] = "file" - payload["from_hda_id"] = dataset_id - payload["ldda_message"] = message + payload = { + "folder_id": folder_id, + "create_type": "file", + "from_hda_id": dataset_id, + "ldda_message": message, + } return self._post(payload, id=library_id, contents=True) - def get_library_permissions(self, library_id): + def get_library_permissions(self, library_id: str) -> Dict[str, Any]: """ Get the permissions for a library. @@ -685,7 +736,7 @@ url = self._make_url(library_id) + "/permissions" return self._get(url=url) - def get_dataset_permissions(self, dataset_id): + def get_dataset_permissions(self, dataset_id: str) -> Dict[str, Any]: """ Get the permissions for a dataset. @@ -698,7 +749,14 @@ url = "/".join((self._make_url(), "datasets", dataset_id, "permissions")) return self._get(url=url) - def set_library_permissions(self, library_id, access_in=None, modify_in=None, add_in=None, manage_in=None): + def set_library_permissions( + self, + library_id: str, + access_in: Optional[List[str]] = None, + modify_in: Optional[List[str]] = None, + add_in: Optional[List[str]] = None, + manage_in: Optional[List[str]] = None, + ) -> Dict[str, Any]: """ Set the permissions for a library. Note: it will override all security for this library even if you leave out a permission type. @@ -721,7 +779,7 @@ :rtype: dict :return: General information about the library """ - payload = {} + payload: Dict[str, List[str]] = {} if access_in: payload["LIBRARY_ACCESS_in"] = access_in if modify_in: @@ -733,7 +791,13 @@ url = self._make_url(library_id) + "/permissions" return self._post(payload, url=url) - def set_dataset_permissions(self, dataset_id, access_in=None, modify_in=None, manage_in=None): + def set_dataset_permissions( + self, + dataset_id: str, + access_in: Optional[List[str]] = None, + modify_in: Optional[List[str]] = None, + manage_in: Optional[List[str]] = None, + ) -> Dict[str, Any]: """ Set the permissions for a dataset. Note: it will override all security for this dataset even if you leave out a permission type. @@ -753,14 +817,15 @@ :rtype: dict :return: dictionary with all applicable permissions' values """ - payload = {} + # we need here to define an action + payload: Dict[str, Any] = { + "action": "set_permissions", + } if access_in: payload["access_ids[]"] = access_in if modify_in: payload["modify_ids[]"] = modify_in if manage_in: payload["manage_ids[]"] = manage_in - # we need here to define an action - payload["action"] = "set_permissions" url = "/".join((self._make_url(), "datasets", dataset_id, "permissions")) return self._post(payload, url=url) diff -Nru python-bioblend-0.18.0/bioblend/galaxy/objects/client.py python-bioblend-1.0.0/bioblend/galaxy/objects/client.py --- python-bioblend-0.18.0/bioblend/galaxy/objects/client.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/objects/client.py 2022-10-13 18:17:37.000000000 +0000 @@ -6,22 +6,32 @@ """ import abc import json -import typing -from collections.abc import ( - Mapping, - Sequence, +from collections.abc import Sequence +from typing import ( + Any, + cast, + Dict, + Generic, + List, + Optional, + overload, + Type, + TYPE_CHECKING, + Union, ) -from typing import List + +from typing_extensions import Literal import bioblend +from bioblend.galaxy.datasets import HdaLdda from . import wrappers -if typing.TYPE_CHECKING: +if TYPE_CHECKING: from .galaxy_instance import GalaxyInstance class ObjClient(abc.ABC): - def __init__(self, obj_gi: "GalaxyInstance"): + def __init__(self, obj_gi: "GalaxyInstance") -> None: self.obj_gi = obj_gi self.gi = self.obj_gi.gi self.log = bioblend.log @@ -31,10 +41,9 @@ """ Retrieve the object corresponding to the given id. """ - pass @abc.abstractmethod - def get_previews(self) -> list: + def get_previews(self, **kwargs: Any) -> list: """ Get a list of object previews. @@ -46,7 +55,6 @@ :rtype: list :return: a list of object previews """ - pass @abc.abstractmethod def list(self) -> list: @@ -59,9 +67,8 @@ :rtype: list :return: a list of objects """ - pass - def _select_id(self, id_=None, name=None): + def _select_id(self, id_: Optional[str] = None, name: Optional[str] = None) -> str: """ Return the id that corresponds to the given id or name info. """ @@ -72,17 +79,17 @@ if id_ is None: id_list = [_.id for _ in self.get_previews(name=name)] if len(id_list) > 1: - raise ValueError("Ambiguous name") + raise ValueError(f"Ambiguous name '{name}'") if not id_list: - raise ValueError("name not found") + raise ValueError(f"Name '{name}' not found") return id_list[0] else: return id_ - def _get_dict(self, meth_name, reply): + def _get_dict(self, meth_name: str, reply: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]]) -> Dict[str, Any]: if reply is None: raise RuntimeError(f"{meth_name}: no reply") - elif isinstance(reply, Mapping): + elif isinstance(reply, dict): return reply try: return reply[0] @@ -90,37 +97,49 @@ raise RuntimeError(f"{meth_name}: unexpected reply: {reply!r}") -class ObjDatasetContainerClient(ObjClient): - def _get_container(self, id_, ctype): - show_fname = f"show_{ctype.__name__.lower()}" - gi_client = getattr(self.gi, ctype.API_MODULE) - show_f = getattr(gi_client, show_fname) - res = show_f(id_) - cdict = self._get_dict(show_fname, res) - cdict["id"] = id_ # overwrite unencoded id - c_infos = show_f(id_, contents=True) - if not isinstance(c_infos, Sequence): - raise RuntimeError(f"{show_fname}: unexpected reply: {c_infos!r}") - c_infos = [ctype.CONTENT_INFO_TYPE(_) for _ in c_infos] - return ctype(cdict, content_infos=c_infos, gi=self.obj_gi) +class ObjDatasetContainerClient( + ObjClient, Generic[wrappers.DatasetContainerSubtype, wrappers.DatasetContainerPreviewSubtype] +): + CONTAINER_TYPE: Type[wrappers.DatasetContainer] + CONTAINER_PREVIEW_TYPE: Type[wrappers.DatasetContainerPreview] + + def __init__(self, obj_gi: "GalaxyInstance") -> None: + super().__init__(obj_gi=obj_gi) + gi_client = getattr(self.gi, self.CONTAINER_TYPE.API_MODULE) + get_fname = f"get_{self.CONTAINER_TYPE.API_MODULE}" + self._get_f = getattr(gi_client, get_fname) + show_fname = f"show_{self.CONTAINER_TYPE.__name__.lower()}" + self._show_f = getattr(gi_client, show_fname) + + def get_previews( + self, name: Optional[str] = None, deleted: bool = False, **kwargs: Any + ) -> List[wrappers.DatasetContainerPreviewSubtype]: + dicts = self._get_f(name=name, deleted=deleted, **kwargs) + return [ + cast(wrappers.DatasetContainerPreviewSubtype, self.CONTAINER_PREVIEW_TYPE(_, gi=self.obj_gi)) for _ in dicts + ] - @abc.abstractmethod - def get(self, id_: str) -> wrappers.DatasetContainer: + def get(self, id_: str) -> wrappers.DatasetContainerSubtype: """ - Retrieve the dataset corresponding to the given id. + Retrieve the dataset container corresponding to the given id. """ - pass + cdict = self._show_f(id_) + c_infos = self._show_f(id_, contents=True) + if not isinstance(c_infos, Sequence): + raise RuntimeError(f"{self._show_f.__name__}: unexpected reply: {c_infos!r}") + c_infos = [self.CONTAINER_TYPE.CONTENT_INFO_TYPE(_) for _ in c_infos] + return cast(wrappers.DatasetContainerSubtype, self.CONTAINER_TYPE(cdict, content_infos=c_infos, gi=self.obj_gi)) -class ObjLibraryClient(ObjDatasetContainerClient): +class ObjLibraryClient(ObjDatasetContainerClient[wrappers.Library, wrappers.LibraryPreview]): """ Interacts with Galaxy libraries. """ - def __init__(self, obj_gi): - super().__init__(obj_gi) + CONTAINER_TYPE = wrappers.Library + CONTAINER_PREVIEW_TYPE = wrappers.LibraryPreview - def create(self, name, description=None, synopsis=None): + def create(self, name: str, description: Optional[str] = None, synopsis: Optional[str] = None) -> wrappers.Library: """ Create a data library with the properties defined in the arguments. @@ -131,20 +150,7 @@ lib_info = self._get_dict("create_library", res) return self.get(lib_info["id"]) - def get(self, id_) -> wrappers.Library: - """ - Retrieve the data library corresponding to the given id. - - :rtype: :class:`~.wrappers.Library` - :return: the library corresponding to ``id_`` - """ - return self._get_container(id_, wrappers.Library) - - def get_previews(self, name=None, deleted=False): - dicts = self.gi.libraries.get_libraries(name=name, deleted=deleted) - return [wrappers.LibraryPreview(_, gi=self.obj_gi) for _ in dicts] - - def list(self, name=None, deleted=False): + def list(self, name: Optional[str] = None, deleted: bool = False) -> List[wrappers.Library]: """ Get libraries owned by the user of this Galaxy instance. @@ -164,7 +170,7 @@ else: return [self.get(_["id"]) for _ in dicts] - def delete(self, id_=None, name=None): + def delete(self, id_: Optional[str] = None, name: Optional[str] = None) -> None: """ Delete the library with the given id or name. @@ -176,19 +182,19 @@ """ id_ = self._select_id(id_=id_, name=name) res = self.gi.libraries.delete_library(id_) - if not isinstance(res, Mapping): + if not isinstance(res, dict): raise RuntimeError(f"delete_library: unexpected reply: {res!r}") -class ObjHistoryClient(ObjDatasetContainerClient): +class ObjHistoryClient(ObjDatasetContainerClient[wrappers.History, wrappers.HistoryPreview]): """ Interacts with Galaxy histories. """ - def __init__(self, obj_gi): - super().__init__(obj_gi) + CONTAINER_TYPE = wrappers.History + CONTAINER_PREVIEW_TYPE = wrappers.HistoryPreview - def create(self, name=None): + def create(self, name: Optional[str] = None) -> wrappers.History: """ Create a new Galaxy history, optionally setting its name. @@ -199,20 +205,7 @@ hist_info = self._get_dict("create_history", res) return self.get(hist_info["id"]) - def get(self, id_) -> wrappers.History: - """ - Retrieve the history corresponding to the given id. - - :rtype: :class:`~.wrappers.History` - :return: the history corresponding to ``id_`` - """ - return self._get_container(id_, wrappers.History) - - def get_previews(self, name=None, deleted=False): - dicts = self.gi.histories.get_histories(name=name, deleted=deleted) - return [wrappers.HistoryPreview(_, gi=self.obj_gi) for _ in dicts] - - def list(self, name=None, deleted=False): + def list(self, name: Optional[str] = None, deleted: bool = False) -> List[wrappers.History]: """ Get histories owned by the user of this Galaxy instance. @@ -226,7 +219,7 @@ dicts = self.gi.histories.get_histories(name=name, deleted=deleted) return [self.get(_["id"]) for _ in dicts] - def delete(self, id_=None, name=None, purge=False): + def delete(self, id_: Optional[str] = None, name: Optional[str] = None, purge: bool = False) -> None: """ Delete the history with the given id or name. @@ -242,7 +235,7 @@ """ id_ = self._select_id(id_=id_, name=name) res = self.gi.histories.delete_history(id_, purge=purge) - if not isinstance(res, Mapping): + if not isinstance(res, dict): raise RuntimeError(f"delete_history: unexpected reply: {res!r}") @@ -251,7 +244,7 @@ Interacts with Galaxy workflows. """ - def import_new(self, src, publish=False): + def import_new(self, src: Union[str, Dict[str, Any]], publish: bool = False) -> wrappers.Workflow: """ Imports a new workflow into Galaxy. @@ -267,7 +260,7 @@ :rtype: :class:`~.wrappers.Workflow` :return: the workflow just imported """ - if isinstance(src, Mapping): + if isinstance(src, dict): wf_dict = src else: try: @@ -277,7 +270,7 @@ wf_info = self.gi.workflows.import_workflow_dict(wf_dict, publish) return self.get(wf_info["id"]) - def import_shared(self, id_): + def import_shared(self, id_: str) -> wrappers.Workflow: """ Imports a shared workflow to the user's space. @@ -290,7 +283,7 @@ wf_info = self.gi.workflows.import_shared_workflow(id_) return self.get(wf_info["id"]) - def get(self, id_) -> wrappers.Workflow: + def get(self, id_: str) -> wrappers.Workflow: """ Retrieve the workflow corresponding to the given id. @@ -302,12 +295,14 @@ return wrappers.Workflow(wf_dict, gi=self.obj_gi) # the 'deleted' option is not available for workflows - def get_previews(self, name=None, published=False): - dicts = self.gi.workflows.get_workflows(name=name, published=published) + def get_previews( + self, name: Optional[str] = None, published: bool = False, **kwargs: Any + ) -> List[wrappers.WorkflowPreview]: + dicts = self.gi.workflows.get_workflows(name=name, published=published, **kwargs) return [wrappers.WorkflowPreview(_, gi=self.obj_gi) for _ in dicts] # the 'deleted' option is not available for workflows - def list(self, name=None, published=False): + def list(self, name: Optional[str] = None, published: bool = False) -> List[wrappers.Workflow]: """ Get workflows owned by the user of this Galaxy instance. @@ -321,7 +316,7 @@ dicts = self.gi.workflows.get_workflows(name=name, published=published) return [self.get(_["id"]) for _ in dicts] - def delete(self, id_=None, name=None): + def delete(self, id_: Optional[str] = None, name: Optional[str] = None) -> None: """ Delete the workflow with the given id or name. @@ -342,7 +337,7 @@ Interacts with Galaxy Invocations. """ - def get(self, id_) -> wrappers.Invocation: + def get(self, id_: str) -> wrappers.Invocation: """ Get an invocation by ID. @@ -352,17 +347,23 @@ inv_dict = self.gi.invocations.show_invocation(id_) return wrappers.Invocation(inv_dict, self.obj_gi) - def get_previews(self) -> List[wrappers.InvocationPreview]: + def get_previews(self, **kwargs: Any) -> List[wrappers.InvocationPreview]: """ Get previews of all invocations. :rtype: list of InvocationPreview :param: previews of invocations """ - inv_list = self.gi.invocations.get_invocations() + inv_list = self.gi.invocations.get_invocations(**kwargs) return [wrappers.InvocationPreview(inv_dict, gi=self.obj_gi) for inv_dict in inv_list] - def list(self, workflow=None, history=None, include_terminal=True, limit=None) -> List[wrappers.Invocation]: + def list( + self, + workflow: Optional[wrappers.Workflow] = None, + history: Optional[wrappers.History] = None, + include_terminal: bool = True, + limit: Optional[int] = None, + ) -> List[wrappers.Invocation]: """ Get full listing of workflow invocations, or select a subset by specifying optional arguments for filtering (e.g. a workflow). @@ -371,7 +372,7 @@ :param workflow: Include only invocations associated with this workflow - :type history: str + :type history: wrappers.History :param history: Include only invocations associated with this history @@ -401,7 +402,7 @@ Interacts with Galaxy tools. """ - def get(self, id_, io_details=False, link_details=False) -> wrappers.Tool: + def get(self, id_: str, io_details: bool = False, link_details: bool = False) -> wrappers.Tool: """ Retrieve the tool corresponding to the given id. @@ -418,7 +419,7 @@ tool_dict = self._get_dict("show_tool", res) return wrappers.Tool(tool_dict, gi=self.obj_gi) - def get_previews(self, name=None, trackster=None): + def get_previews(self, name: Optional[str] = None, trackster: bool = False, **kwargs: Any) -> List[wrappers.Tool]: """ Get the list of tools installed on the Galaxy instance. @@ -431,11 +432,11 @@ :rtype: list of :class:`~.wrappers.Tool` """ - dicts = self.gi.tools.get_tools(name=name, trackster=trackster) + dicts = self.gi.tools.get_tools(name=name, trackster=trackster, **kwargs) return [wrappers.Tool(_, gi=self.obj_gi) for _ in dicts] # the 'deleted' option is not available for tools - def list(self, name=None, trackster=None): + def list(self, name: Optional[str] = None, trackster: bool = False) -> List[wrappers.Tool]: """ Get the list of tools installed on the Galaxy instance. @@ -465,7 +466,7 @@ Interacts with Galaxy jobs. """ - def get(self, id_, full_details=False) -> wrappers.Job: + def get(self, id_: str, full_details: bool = False) -> wrappers.Job: """ Retrieve the job corresponding to the given id. @@ -480,11 +481,11 @@ job_dict = self._get_dict("show_job", res) return wrappers.Job(job_dict, gi=self.obj_gi) - def get_previews(self): - dicts = self.gi.jobs.get_jobs() + def get_previews(self, **kwargs: Any) -> List[wrappers.JobPreview]: + dicts = self.gi.jobs.get_jobs(**kwargs) return [wrappers.JobPreview(_, gi=self.obj_gi) for _ in dicts] - def list(self): + def list(self) -> List[wrappers.Job]: """ Get the list of jobs of the current user. @@ -499,7 +500,15 @@ Interacts with Galaxy datasets. """ - def get(self, id_: str, hda_ldda: str = "hda") -> wrappers.Dataset: + @overload + def get(self, id_: str, hda_ldda: Literal["hda"] = "hda") -> wrappers.HistoryDatasetAssociation: + ... + + @overload + def get(self, id_: str, hda_ldda: Literal["ldda"]) -> wrappers.LibraryDatasetDatasetAssociation: + ... + + def get(self, id_: str, hda_ldda: HdaLdda = "hda") -> wrappers.Dataset: """ Retrieve the dataset corresponding to the given id. @@ -521,7 +530,7 @@ else: raise ValueError(f"Unsupported value for hda_ldda: {hda_ldda}") - def get_previews(self) -> list: + def get_previews(self, **kwargs: Any) -> list: raise NotImplementedError() def list(self) -> list: @@ -545,7 +554,7 @@ hist = self.obj_gi.histories.get(ds_dict["history_id"]) return wrappers.HistoryDatasetCollectionAssociation(ds_dict, hist, gi=self.obj_gi) - def get_previews(self) -> list: + def get_previews(self, **kwargs: Any) -> list: raise NotImplementedError() def list(self) -> list: diff -Nru python-bioblend-0.18.0/bioblend/galaxy/objects/galaxy_instance.py python-bioblend-1.0.0/bioblend/galaxy/objects/galaxy_instance.py --- python-bioblend-0.18.0/bioblend/galaxy/objects/galaxy_instance.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/objects/galaxy_instance.py 2022-10-13 18:17:37.000000000 +0000 @@ -3,18 +3,25 @@ """ import time +from typing import ( + Iterable, + List, + Optional, +) import bioblend import bioblend.galaxy from bioblend.galaxy.datasets import TERMINAL_STATES -from . import client +from . import ( + client, + wrappers, +) -def _get_error_info(hda): - msg = hda.id +def _get_error_info(dataset: wrappers.Dataset) -> str: + msg = dataset.id try: - msg += f" ({hda.name}): " - msg += hda.wrapped["misc_info"] + msg += f" ({dataset.name}): {dataset.misc_info}" except Exception: # avoid 'error while generating an error report' msg += ": error" return msg @@ -43,7 +50,14 @@ histories = gi.histories.list() """ - def __init__(self, url, api_key=None, email=None, password=None, verify=True): + def __init__( + self, + url: str, + api_key: Optional[str] = None, + email: Optional[str] = None, + password: Optional[str] = None, + verify: bool = True, + ) -> None: self.gi = bioblend.galaxy.GalaxyInstance(url, api_key, email, password, verify) self.log = bioblend.log self.datasets = client.ObjDatasetClient(self) @@ -55,7 +69,9 @@ self.tools = client.ObjToolClient(self) self.jobs = client.ObjJobClient(self) - def _wait_datasets(self, datasets, polling_interval, break_on_error=True): + def _wait_datasets( + self, datasets: Iterable[wrappers.Dataset], polling_interval: float, break_on_error: bool = True + ) -> None: """ Wait for datasets to come out of the pending states. @@ -78,7 +94,7 @@ times) during the execution. """ - def poll(ds_list): + def poll(ds_list: Iterable[wrappers.Dataset]) -> List[wrappers.Dataset]: pending = [] for ds in ds_list: ds.refresh() diff -Nru python-bioblend-0.18.0/bioblend/galaxy/objects/wrappers.py python-bioblend-1.0.0/bioblend/galaxy/objects/wrappers.py --- python-bioblend-0.18.0/bioblend/galaxy/objects/wrappers.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/objects/wrappers.py 2022-10-13 18:17:37.000000000 +0000 @@ -6,24 +6,37 @@ import abc import json -import typing from collections.abc import ( Mapping, Sequence, ) from typing import ( + Any, + Callable, + cast, + ClassVar, Dict, + Generic, + IO, + Iterable, + Iterator, List, Optional, Set, Tuple, + Type, + TYPE_CHECKING, + TypeVar, Union, ) +from typing_extensions import Literal + import bioblend +from bioblend.galaxy.workflows import InputsBy from bioblend.util import abstractclass -if typing.TYPE_CHECKING: +if TYPE_CHECKING: from . import client from .galaxy_instance import GalaxyInstance @@ -50,6 +63,8 @@ "WorkflowPreview", ) +WrapperSubtype = TypeVar("WrapperSubtype", bound="Wrapper") + @abstractclass class Wrapper: @@ -69,10 +84,13 @@ BASE_ATTRS: Tuple[str, ...] = ("id",) gi: Optional["GalaxyInstance"] id: str + is_modified: bool wrapped: dict _cached_parent: Optional["Wrapper"] - def __init__(self, wrapped: dict, parent: Optional["Wrapper"] = None, gi: Optional["GalaxyInstance"] = None): + def __init__( + self, wrapped: Dict[str, Any], parent: Optional["Wrapper"] = None, gi: Optional["GalaxyInstance"] = None + ) -> None: """ :type wrapped: dict :param wrapped: JSON-serializable dictionary @@ -105,25 +123,25 @@ return self._cached_parent @property - def is_mapped(self): + def is_mapped(self) -> bool: """ ``True`` if this wrapper is mapped to an actual Galaxy entity. """ return self.id is not None - def unmap(self): + def unmap(self) -> None: """ Disconnect this wrapper from Galaxy. """ object.__setattr__(self, "id", None) - def clone(self): + def clone(self: WrapperSubtype) -> WrapperSubtype: """ Return an independent copy of this wrapper. """ return self.__class__(self.wrapped) - def touch(self): + def touch(self) -> None: """ Mark this wrapper as having been modified since its creation. """ @@ -131,29 +149,28 @@ if self.parent: self.parent.touch() - def to_json(self): + def to_json(self) -> str: """ Return a JSON dump of this wrapper. """ return json.dumps(self.wrapped) @classmethod - def from_json(cls, jdef): + def from_json(cls: Type[WrapperSubtype], jdef: str) -> WrapperSubtype: """ Build a new wrapper from a JSON dump. """ return cls(json.loads(jdef)) # FIXME: things like self.x[0] = 'y' do NOT call self.__setattr__ - def __setattr__(self, name, value): + def __setattr__(self, name: str, value: str) -> None: if name not in self.wrapped: raise AttributeError("can't set attribute") - else: - self.wrapped[name] = value - object.__setattr__(self, name, value) - self.touch() + self.wrapped[name] = value + object.__setattr__(self, name, value) + self.touch() - def __repr__(self): + def __repr__(self) -> str: return f"{self.__class__.__name__}({self.wrapped!r})" @@ -175,11 +192,13 @@ "tool_version", "type", ) + input_steps: Dict[str, Dict] type: str - tool_id: str + tool_id: Optional[str] tool_inputs: Dict + tool_version: Optional[str] - def __init__(self, step_dict, parent): + def __init__(self, step_dict: Dict[str, Any], parent: Wrapper) -> None: super().__init__(step_dict, parent=parent, gi=parent.gi) try: stype = step_dict["type"] @@ -204,7 +223,15 @@ "workflow_step_label", "workflow_step_uuid", ) + action: Optional[object] gi: "GalaxyInstance" + job_id: str + order_index: int + state: str + update_time: str + workflow_step_id: str + workflow_step_label: str + workflow_step_uuid: str @property def parent(self) -> Wrapper: @@ -212,7 +239,7 @@ assert ret is not None return ret - def __init__(self, wrapped: dict, parent: Wrapper, gi: "GalaxyInstance"): + def __init__(self, wrapped: Dict[str, Any], parent: Wrapper, gi: "GalaxyInstance") -> None: super().__init__(wrapped, parent, gi) def refresh(self) -> "InvocationStep": @@ -225,7 +252,7 @@ self.__init__(step_dict, parent=self.parent, gi=self.gi) # type: ignore[misc] return self - def get_outputs(self): + def get_outputs(self) -> Dict[str, "HistoryDatasetAssociation"]: """ Get the output datasets of the invocation step @@ -234,12 +261,9 @@ """ if not hasattr(self, "outputs"): self.refresh() - outputs = {} - for name, out_dict in self.wrapped["outputs"].items(): - outputs[name] = self.gi.datasets.get(out_dict["id"]) - return outputs + return {name: self.gi.datasets.get(out_dict["id"]) for name, out_dict in self.wrapped["outputs"].items()} - def get_output_collections(self): + def get_output_collections(self) -> Dict[str, "HistoryDatasetCollectionAssociation"]: """ Get the output dataset collections of the invocation step @@ -248,10 +272,10 @@ """ if not hasattr(self, "output_collections"): self.refresh() - output_collections = {} - for name, out_coll_dict in self.wrapped["output_collections"].items(): - output_collections[name] = self.gi.dataset_collections.get(out_coll_dict["id"]) - return output_collections + return { + name: self.gi.dataset_collections.get(out_coll_dict["id"]) + for name, out_coll_dict in self.wrapped["output_collections"].items() + } class Workflow(Wrapper): @@ -272,29 +296,42 @@ "steps", "tags", ) - inputs: Dict + dag: Dict[str, Set[str]] + deleted: bool + input_labels_to_ids: Dict[str, Set[str]] + inputs: Dict[str, Dict] + inv_dag: Dict[str, Set[str]] missing_ids: List - steps: Dict + name: str + owner: str POLLING_INTERVAL = 10 # for output state monitoring + published: bool + sink_ids: Set[str] + source_ids: Set[str] + steps: Dict[str, Step] + tags: List[str] + tool_labels_to_ids: Dict[str, Set[str]] - def __init__(self, wf_dict: dict, gi: Optional["GalaxyInstance"] = None): + def __init__(self, wf_dict: Dict[str, Any], gi: Optional["GalaxyInstance"] = None) -> None: super().__init__(wf_dict, gi=gi) - missing_ids = [] if gi: tools_list_by_id = [t.id for t in gi.tools.get_previews()] else: tools_list_by_id = [] + missing_ids = [] tool_labels_to_ids: Dict[str, Set[str]] = {} for k, v in self.steps.items(): + step_dict = cast(Dict[str, Any], v) # convert step ids to str for consistency with outer keys - v["id"] = str(v["id"]) - for i in v["input_steps"].values(): + step_dict["id"] = str(step_dict["id"]) + for i in step_dict["input_steps"].values(): i["source_step"] = str(i["source_step"]) - step = Step(v, self) + step = Step(step_dict, self) self.steps[k] = step if step.type == "tool": if not step.tool_inputs or step.tool_id not in tools_list_by_id: missing_ids.append(k) + assert step.tool_id tool_labels_to_ids.setdefault(step.tool_id, set()).add(step.id) input_labels_to_ids: Dict[str, Set[str]] = {} for id_, d in self.inputs.items(): @@ -312,7 +349,7 @@ object.__setattr__(self, "sink_ids", tails - heads) object.__setattr__(self, "missing_ids", missing_ids) - def _get_dag(self): + def _get_dag(self) -> Tuple[Dict[str, Set[str]], Dict[str, Set[str]]]: """ Return the workflow's DAG. @@ -329,25 +366,27 @@ {'c': {'a', 'b'}, 'd': {'c'}, 'e': {'c'}, 'f': {'c'}} """ - dag, inv_dag = {}, {} + dag: Dict[str, Set[str]] = {} + inv_dag: Dict[str, Set[str]] = {} for s in self.steps.values(): + assert isinstance(s, Step) for i in s.input_steps.values(): head, tail = i["source_step"], s.id dag.setdefault(head, set()).add(tail) inv_dag.setdefault(tail, set()).add(head) return dag, inv_dag - def sorted_step_ids(self): + def sorted_step_ids(self) -> List[str]: """ Return a topological sort of the workflow's DAG. """ - ids = [] + ids: List[str] = [] source_ids = self.source_ids.copy() inv_dag = {k: v.copy() for k, v in self.inv_dag.items()} while source_ids: head = source_ids.pop() ids.append(head) - for tail in self.dag.get(head, []): + for tail in self.dag.get(head, set()): incoming = inv_dag[tail] incoming.remove(head) if not incoming: @@ -355,42 +394,42 @@ return ids @property - def data_input_ids(self): + def data_input_ids(self) -> Set[str]: """ Return the ids of data input steps for this workflow. """ return {id_ for id_, s in self.steps.items() if s.type == "data_input"} @property - def data_collection_input_ids(self): + def data_collection_input_ids(self) -> Set[str]: """ Return the ids of data collection input steps for this workflow. """ return {id_ for id_, s in self.steps.items() if s.type == "data_collection_input"} @property - def parameter_input_ids(self): + def parameter_input_ids(self) -> Set[str]: """ Return the ids of parameter input steps for this workflow. """ return {id_ for id_, s in self.steps.items() if s.type == "parameter_input"} @property - def tool_ids(self): + def tool_ids(self) -> Set[str]: """ Return the ids of tool steps for this workflow. """ return {id_ for id_, s in self.steps.items() if s.type == "tool"} @property - def input_labels(self): + def input_labels(self) -> Set[str]: """ Return the labels of this workflow's input steps. """ return set(self.input_labels_to_ids) @property - def is_runnable(self): + def is_runnable(self) -> bool: """ Return True if the workflow can be run on Galaxy. @@ -400,7 +439,7 @@ return not self.missing_ids @staticmethod - def _convert_input_map(input_map: dict) -> dict: + def _convert_input_map(input_map: Dict[str, Any]) -> Dict[str, Any]: """ Convert ``input_map`` to the format required by the Galaxy web API. @@ -421,20 +460,20 @@ LibraryDataset, ), ): - ret[key] = {"id": value.id, "src": value.SRC} # type: ignore + ret[key] = {"id": value.id, "src": value.SRC} else: ret[key] = value return ret - def preview(self): - getf = self.gi.workflows.get_previews + # I think we should deprecate this method - NS + def preview(self) -> "WorkflowPreview": + assert self.gi is not None try: - p = [_ for _ in getf(published=True) if _.id == self.id][0] + return [_ for _ in self.gi.workflows.get_previews(published=True) if _.id == self.id][0] except IndexError: raise ValueError(f"no object for id {self.id}") - return p - def export(self) -> dict: + def export(self) -> Dict[str, Any]: """ Export a re-importable representation of the workflow. @@ -444,7 +483,7 @@ assert self.gi is not None return self.gi.gi.workflows.export_workflow_dict(self.id) - def delete(self): + def delete(self) -> None: """ Delete this workflow. @@ -458,14 +497,14 @@ def invoke( self, - inputs=None, - params=None, - history=None, - import_inputs_to_history=None, - replacement_params=None, - allow_tool_state_corrections=True, - inputs_by=None, - parameters_normalized=False, + inputs: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None, + history: Optional[Union[str, "History"]] = None, + import_inputs_to_history: bool = False, + replacement_params: Optional[Dict[str, Any]] = None, + allow_tool_state_corrections: bool = True, + inputs_by: Optional[InputsBy] = None, + parameters_normalized: bool = False, ) -> "Invocation": """ Invoke the workflow. This will cause a workflow to be scheduled @@ -633,14 +672,19 @@ "workflow_id", ) gi: "GalaxyInstance" + history_id: str + state: str steps: List[InvocationStep] + update_time: str + uuid: str + workflow_id: str - def __init__(self, inv_dict: dict, gi: "GalaxyInstance"): + def __init__(self, inv_dict: Dict[str, Any], gi: "GalaxyInstance") -> None: super().__init__(inv_dict, gi=gi) self.steps = [InvocationStep(step, parent=self, gi=gi) for step in inv_dict["steps"]] self.inputs = [{**v, "label": k} for k, v in inv_dict["inputs"].items()] - def sorted_step_ids(self): + def sorted_step_ids(self) -> List[str]: """ Get the step IDs sorted based on this order index. @@ -649,7 +693,7 @@ """ return [step.id for step in sorted(self.steps, key=lambda step: step.order_index)] - def step_states(self): + def step_states(self) -> Set[str]: """ Get the set of step states for this invocation. @@ -658,7 +702,7 @@ """ return {step.state for step in self.steps} - def number_of_steps(self): + def number_of_steps(self) -> int: """ Get the number of steps for this invocation. @@ -667,7 +711,12 @@ """ return len(self.steps) - def sorted_steps_by(self, indices=None, states=None, step_ids=None): + def sorted_steps_by( + self, + indices: Optional[Iterable[int]] = None, + states: Optional[Iterable[Union[str, None]]] = None, + step_ids: Optional[Iterable[str]] = None, + ) -> List[InvocationStep]: """ Get steps for this invocation, or get a subset by specifying optional parameters for filtering. @@ -684,7 +733,7 @@ :rtype: list of InvocationStep :param: invocation steps """ - steps = self.steps + steps: Union[List[InvocationStep], filter] = self.steps if indices is not None: steps = filter(lambda step: step.order_index in indices, steps) if states is not None: @@ -693,7 +742,7 @@ steps = filter(lambda step: step.id in step_ids, steps) return sorted(steps, key=lambda step: step.order_index) - def cancel(self): + def cancel(self) -> None: """ Cancel this invocation. @@ -713,14 +762,14 @@ self.__init__(inv_dict, gi=self.gi) # type: ignore[misc] return self - def run_step_actions(self, steps: List[InvocationStep], actions): + def run_step_actions(self, steps: List[InvocationStep], actions: List[object]) -> None: """ Run actions for active steps of this invocation. :type steps: list of InvocationStep :param steps: list of steps to run actions on - :type actions: list of str + :type actions: list of objects :param actions: list of actions to run .. note:: @@ -737,7 +786,7 @@ for step, step_dict in zip(steps, step_dict_list): step.__init__(step_dict, parent=self, gi=self.gi) # type: ignore[misc] - def summary(self): + def summary(self) -> Dict[str, Any]: """ Get a summary for this invocation. @@ -746,7 +795,7 @@ """ return self.gi.gi.invocations.get_invocation_summary(self.id) - def step_jobs_summary(self): + def step_jobs_summary(self) -> List[Dict[str, Any]]: """ Get a summary for this invocation's step jobs. @@ -755,7 +804,7 @@ """ return self.gi.gi.invocations.get_invocation_step_jobs_summary(self.id) - def report(self): + def report(self) -> Dict[str, Any]: """ Get a dictionary containing a Markdown report for this invocation. @@ -764,7 +813,7 @@ """ return self.gi.gi.invocations.get_invocation_report(self.id) - def save_report_pdf(self, file_path, chunk_size=bioblend.CHUNK_SIZE): + def save_report_pdf(self, file_path: str, chunk_size: int = bioblend.CHUNK_SIZE) -> None: """ Download a PDF report for this invocation. @@ -776,7 +825,7 @@ """ self.gi.gi.invocations.get_invocation_report_pdf(self.id, file_path, chunk_size) - def biocompute_object(self): + def biocompute_object(self) -> Dict[str, Any]: """ Get a BioCompute object for this invocation. @@ -785,7 +834,7 @@ """ return self.gi.gi.invocations.get_invocation_biocompute_object(self.id) - def wait(self, maxwait: int = 12000, interval: int = 3, check: bool = True): + def wait(self, maxwait: float = 12000, interval: float = 3, check: bool = True) -> None: """ Wait for this invocation to reach a terminal state. @@ -805,6 +854,9 @@ self.__init__(inv_dict, gi=self.gi) # type: ignore[misc] +DatasetSubtype = TypeVar("DatasetSubtype", bound="Dataset") + + class Dataset(Wrapper, metaclass=abc.ABCMeta): """ Abstract base class for Galaxy datasets. @@ -821,29 +873,32 @@ "state", ) container: "DatasetContainer" + genome_build: str gi: "GalaxyInstance" + misc_info: str + name: str POLLING_INTERVAL = 1 # for state monitoring + state: str - def __init__(self, ds_dict, container: "DatasetContainer", gi: "GalaxyInstance"): + def __init__(self, ds_dict: Dict[str, Any], container: "DatasetContainer", gi: "GalaxyInstance") -> None: super().__init__(ds_dict, gi=gi) object.__setattr__(self, "container", container) @property @abc.abstractmethod - def _stream_url(self): + def _stream_url(self) -> str: """ Return the URL to stream this dataset. """ - pass - def get_stream(self, chunk_size=bioblend.CHUNK_SIZE): + def get_stream(self, chunk_size: int = bioblend.CHUNK_SIZE) -> Iterator[bytes]: """ Open dataset for reading and return an iterator over its contents. :type chunk_size: int :param chunk_size: read this amount of bytes at a time """ - kwargs = {"stream": True} + kwargs: Dict[str, Any] = {"stream": True} if isinstance(self, LibraryDataset): kwargs["params"] = {"ld_ids%5B%5D": self.id} r = self.gi.gi.make_get_request(self._stream_url, **kwargs) @@ -854,7 +909,7 @@ r.raise_for_status() return r.iter_content(chunk_size) # FIXME: client can't close r - def peek(self, chunk_size=bioblend.CHUNK_SIZE): + def peek(self, chunk_size: int = bioblend.CHUNK_SIZE) -> bytes: """ Open dataset for reading and return the first chunk. @@ -865,7 +920,7 @@ except StopIteration: return b"" - def download(self, file_object, chunk_size=bioblend.CHUNK_SIZE): + def download(self, file_object: IO[bytes], chunk_size: int = bioblend.CHUNK_SIZE) -> None: """ Open dataset for reading and save its contents to ``file_object``. @@ -877,7 +932,7 @@ for chunk in self.get_stream(chunk_size=chunk_size): file_object.write(chunk) - def get_contents(self, chunk_size=bioblend.CHUNK_SIZE): + def get_contents(self, chunk_size: int = bioblend.CHUNK_SIZE) -> bytes: """ Open dataset for reading and return its **full** contents. @@ -885,7 +940,7 @@ """ return b"".join(self.get_stream(chunk_size=chunk_size)) - def refresh(self) -> "Dataset": + def refresh(self: DatasetSubtype) -> DatasetSubtype: """ Re-fetch the attributes pertaining to this object. @@ -896,7 +951,7 @@ self.__init__(ds_dict, self.container, self.gi) # type: ignore[misc] return self - def wait(self, polling_interval=POLLING_INTERVAL, break_on_error=True): + def wait(self, polling_interval: float = POLLING_INTERVAL, break_on_error: bool = True) -> None: """ Wait for this dataset to come out of the pending states. @@ -923,13 +978,16 @@ BASE_ATTRS = Dataset.BASE_ATTRS + ("annotation", "deleted", "purged", "tags", "visible") SRC = "hda" + annotation: str + deleted: bool + purged: bool @property - def _stream_url(self): + def _stream_url(self) -> str: base_url = self.gi.gi.histories._make_url(module_id=self.container.id, contents=True) return f"{base_url}/{self.id}/display" - def get_stream(self, chunk_size=bioblend.CHUNK_SIZE): + def get_stream(self, chunk_size: int = bioblend.CHUNK_SIZE) -> Iterator[bytes]: """ Open dataset for reading and return an iterator over its contents. @@ -942,7 +1000,7 @@ ) return r.iter_content(chunk_size) # FIXME: client can't close r - def update(self, **kwds) -> "HistoryDatasetAssociation": + def update(self, **kwargs: Any) -> "HistoryDatasetAssociation": """ Update this history dataset metadata. Some of the attributes that can be modified are documented below. @@ -962,13 +1020,13 @@ :type visible: bool :param visible: Mark or unmark history dataset as visible """ - res = self.gi.gi.histories.update_dataset(self.container.id, self.id, **kwds) + res = self.gi.gi.histories.update_dataset(self.container.id, self.id, **kwargs) # Refresh also the history because the dataset may have been (un)deleted self.container.refresh() self.__init__(res, self.container, gi=self.gi) # type: ignore[misc] return self - def delete(self, purge=False): + def delete(self, purge: bool = False) -> None: """ Delete this history dataset. @@ -985,6 +1043,9 @@ self.refresh() +DatasetCollectionSubtype = TypeVar("DatasetCollectionSubtype", bound="DatasetCollection") + + class DatasetCollection(Wrapper, metaclass=abc.ABCMeta): """ Abstract base class for Galaxy dataset collections. @@ -998,13 +1059,17 @@ ) container: Union["DatasetCollection", "History"] API_MODULE = "dataset_collections" + collection_type: str + deleted: bool gi: "GalaxyInstance" - def __init__(self, dsc_dict, container, gi: "GalaxyInstance"): + def __init__( + self, dsc_dict: Dict[str, Any], container: Union["DatasetCollection", "History"], gi: "GalaxyInstance" + ) -> None: super().__init__(dsc_dict, gi=gi) object.__setattr__(self, "container", container) - def refresh(self) -> "DatasetCollection": + def refresh(self: DatasetCollectionSubtype) -> DatasetCollectionSubtype: """ Re-fetch the attributes pertaining to this object. @@ -1016,8 +1081,10 @@ return self @abc.abstractmethod - def delete(self): - pass + def delete(self) -> None: + """ + Delete this dataset collection. + """ class HistoryDatasetCollectionAssociation(DatasetCollection): @@ -1027,11 +1094,9 @@ BASE_ATTRS = DatasetCollection.BASE_ATTRS + ("tags", "visible", "elements") SRC = "hdca" + elements: List[Dict] - def delete(self): - """ - Delete this dataset collection. - """ + def delete(self) -> None: self.gi.gi.histories.delete_dataset_collection(self.container.id, self.id) self.container.refresh() self.refresh() @@ -1044,7 +1109,7 @@ """ @property - def _stream_url(self): + def _stream_url(self) -> str: base_url = self.gi.gi.libraries._make_url() return f"{base_url}/datasets/download/uncompressed" @@ -1064,8 +1129,9 @@ """ SRC = "ld" + file_name: str - def delete(self, purged=False): + def delete(self, purged: bool = False) -> None: """ Delete this library dataset. @@ -1076,7 +1142,7 @@ self.container.refresh() self.refresh() - def update(self, **kwds) -> "LibraryDataset": + def update(self, **kwargs: Any) -> "LibraryDataset": """ Update this library dataset metadata. Some of the attributes that can be modified are documented below. @@ -1087,7 +1153,7 @@ :type genome_build: str :param genome_build: Replace history dataset genome build (dbkey) """ - res = self.gi.gi.libraries.update_library_dataset(self.id, **kwds) + res = self.gi.gi.libraries.update_library_dataset(self.id, **kwargs) self.container.refresh() self.__init__(res, self.container, gi=self.gi) # type: ignore[misc] return self @@ -1122,7 +1188,10 @@ BASE_ATTRS = ContentInfo.BASE_ATTRS + ("deleted", "state", "visible") -class DatasetContainer(Wrapper, metaclass=abc.ABCMeta): +DatasetContainerSubtype = TypeVar("DatasetContainerSubtype", bound="DatasetContainer") + + +class DatasetContainer(Wrapper, Generic[DatasetSubtype], metaclass=abc.ABCMeta): """ Abstract base class for dataset containers (histories and libraries). """ @@ -1132,11 +1201,20 @@ "name", ) API_MODULE: str + CONTENT_INFO_TYPE: Type[ContentInfo] + DS_TYPE: ClassVar[Callable] content_infos: List[ContentInfo] + deleted: bool gi: "GalaxyInstance" + name: str obj_gi_client: "client.ObjDatasetContainerClient" - def __init__(self, c_dict, content_infos=None, gi: "GalaxyInstance" = None): + def __init__( + self, + c_dict: Dict[str, Any], + content_infos: Optional[List[ContentInfo]] = None, + gi: Optional["GalaxyInstance"] = None, + ) -> None: """ :type content_infos: list of :class:`ContentInfo` :param content_infos: info objects for the container's contents @@ -1148,13 +1226,14 @@ object.__setattr__(self, "obj_gi_client", getattr(self.gi, self.API_MODULE)) @property - def dataset_ids(self): + def dataset_ids(self) -> List[str]: """ Return the ids of the contained datasets. """ return [_.id for _ in self.content_infos if _.type == "file"] - def preview(self): + # I think we should deprecate this method - NS + def preview(self) -> "DatasetContainerPreview": getf = self.obj_gi_client.get_previews # self.state could be stale: check both regular and deleted containers try: @@ -1166,7 +1245,7 @@ raise ValueError(f"no object for id {self.id}") return p - def refresh(self) -> "DatasetContainer": + def refresh(self: DatasetContainerSubtype) -> DatasetContainerSubtype: """ Re-fetch the attributes pertaining to this object. @@ -1176,7 +1255,7 @@ self.__init__(fresh.wrapped, content_infos=fresh.content_infos, gi=self.gi) # type: ignore[misc] return self - def get_dataset(self, ds_id): + def get_dataset(self, ds_id: str) -> DatasetSubtype: """ Retrieve the dataset corresponding to the given id. @@ -1191,7 +1270,7 @@ ds_dict = gi_client.show_dataset(self.id, ds_id) return self.DS_TYPE(ds_dict, self, gi=self.gi) - def get_datasets(self, name=None): + def get_datasets(self, name: Optional[str] = None) -> List[DatasetSubtype]: """ Get all datasets contained inside this dataset container. @@ -1216,8 +1295,14 @@ ds_ids = [_.id for _ in self.content_infos if _.name == name] return [self.get_dataset(_) for _ in ds_ids] + @abc.abstractmethod + def delete(self) -> None: + """ + Delete this dataset container. + """ + -class History(DatasetContainer): +class History(DatasetContainer[HistoryDatasetAssociation]): """ Maps to a Galaxy history. """ @@ -1234,8 +1319,11 @@ DSC_TYPE = HistoryDatasetCollectionAssociation CONTENT_INFO_TYPE = HistoryContentInfo API_MODULE = "histories" + annotation: str + published: bool + tags: List[str] - def update(self, **kwds): + def update(self, **kwargs: Any) -> "History": """ Update history metadata information. Some of the attributes that can be modified are documented below. @@ -1262,11 +1350,11 @@ :param tags: Replace history tags with the given list """ # TODO: wouldn't it be better if name and annotation were attributes? - self.gi.gi.histories.update_history(self.id, **kwds) + self.gi.gi.histories.update_history(self.id, **kwargs) self.refresh() return self - def delete(self, purge=False): + def delete(self, purge: bool = False) -> None: """ Delete this history. @@ -1282,7 +1370,7 @@ self.refresh() self.unmap() - def import_dataset(self, lds): + def import_dataset(self, lds: LibraryDataset) -> HistoryDatasetAssociation: """ Import a dataset into the history from a library. @@ -1302,7 +1390,7 @@ self.refresh() return self.get_dataset(res["id"]) - def upload_file(self, path, **kwargs): + def upload_file(self, path: str, **kwargs: Any) -> HistoryDatasetAssociation: """ Upload the file specified by ``path`` to this history. @@ -1321,7 +1409,7 @@ upload_dataset = upload_file - def upload_from_ftp(self, path, **kwargs): + def upload_from_ftp(self, path: str, **kwargs: Any) -> HistoryDatasetAssociation: """ Upload the file specified by ``path`` from the user's FTP directory to this history. @@ -1339,7 +1427,7 @@ self.refresh() return self.get_dataset(out_dict["outputs"][0]["id"]) - def paste_content(self, content, **kwargs): + def paste_content(self, content: str, **kwargs: Any) -> HistoryDatasetAssociation: """ Upload a string to a new dataset in this history. @@ -1356,7 +1444,14 @@ self.refresh() return self.get_dataset(out_dict["outputs"][0]["id"]) - def export(self, gzip=True, include_hidden=False, include_deleted=False, wait=False, maxwait=None): + def export( + self, + gzip: bool = True, + include_hidden: bool = False, + include_deleted: bool = False, + wait: bool = False, + maxwait: Optional[int] = None, + ) -> str: """ Start a job to create an export archive for this history. See :meth:`~bioblend.galaxy.histories.HistoryClient.export_history` @@ -1371,7 +1466,7 @@ maxwait=maxwait, ) - def download(self, jeha_id, outf, chunk_size=bioblend.CHUNK_SIZE): + def download(self, jeha_id: str, outf: IO[bytes], chunk_size: int = bioblend.CHUNK_SIZE) -> None: """ Download an export archive for this history. Use :meth:`export` to create an export and get the required ``jeha_id``. See @@ -1380,7 +1475,9 @@ """ return self.gi.gi.histories.download_history(self.id, jeha_id, outf, chunk_size=chunk_size) - def create_dataset_collection(self, collection_description): + def create_dataset_collection( + self, collection_description: bioblend.galaxy.dataset_collections.CollectionDescription + ) -> "HistoryDatasetCollectionAssociation": """ Create a new dataset collection in the history by providing a collection description. @@ -1394,7 +1491,7 @@ self.refresh() return self.get_dataset_collection(dataset_collection["id"]) - def get_dataset_collection(self, dsc_id): + def get_dataset_collection(self, dsc_id: str) -> "HistoryDatasetCollectionAssociation": """ Retrieve the dataset collection corresponding to the given id. @@ -1408,7 +1505,7 @@ return self.DSC_TYPE(dsc_dict, self, gi=self.gi) -class Library(DatasetContainer): +class Library(DatasetContainer[LibraryDataset]): """ Maps to a Galaxy library. """ @@ -1417,15 +1514,17 @@ DS_TYPE = LibraryDataset CONTENT_INFO_TYPE = LibraryContentInfo API_MODULE = "libraries" + description: str + synopsis: str @property - def folder_ids(self): + def folder_ids(self) -> List[str]: """ Return the ids of the contained folders. """ return [_.id for _ in self.content_infos if _.type == "folder"] - def delete(self): + def delete(self) -> None: """ Delete this library. """ @@ -1433,7 +1532,7 @@ self.refresh() self.unmap() - def _pre_upload(self, folder): + def _pre_upload(self, folder: Optional["Folder"]) -> Optional[str]: """ Return the id of the given folder, after sanity checking. """ @@ -1441,7 +1540,7 @@ raise RuntimeError("library is not mapped to a Galaxy object") return None if folder is None else folder.id - def upload_data(self, data, folder=None, **kwargs): + def upload_data(self, data: str, folder: Optional["Folder"] = None, **kwargs: Any) -> LibraryDataset: """ Upload data to this library. @@ -1461,7 +1560,7 @@ self.refresh() return self.get_dataset(res[0]["id"]) - def upload_from_url(self, url, folder=None, **kwargs): + def upload_from_url(self, url: str, folder: Optional["Folder"] = None, **kwargs: Any) -> LibraryDataset: """ Upload data to this library from the given URL. @@ -1475,7 +1574,7 @@ self.refresh() return self.get_dataset(res[0]["id"]) - def upload_from_local(self, path, folder=None, **kwargs): + def upload_from_local(self, path: str, folder: Optional["Folder"] = None, **kwargs: Any) -> LibraryDataset: """ Upload data to this library from a local file. @@ -1489,7 +1588,13 @@ self.refresh() return self.get_dataset(res[0]["id"]) - def upload_from_galaxy_fs(self, paths, folder=None, link_data_only=None, **kwargs): + def upload_from_galaxy_fs( + self, + paths: Union[str, Iterable[str]], + folder: Optional["Folder"] = None, + link_data_only: Literal["copy_files", "link_to_files"] = "copy_files", + **kwargs: Any, + ) -> List[LibraryDataset]: """ Upload data to this library from filesystem paths on the server. @@ -1522,11 +1627,12 @@ raise RuntimeError("upload_from_galaxy_filesystem: no reply") if not isinstance(res, Sequence): raise RuntimeError(f"upload_from_galaxy_filesystem: unexpected reply: {res!r}") - new_datasets = [self.get_dataset(ds_info["id"]) for ds_info in res] self.refresh() - return new_datasets + return [self.get_dataset(ds_info["id"]) for ds_info in res] - def copy_from_dataset(self, hda, folder=None, message=""): + def copy_from_dataset( + self, hda: HistoryDatasetAssociation, folder: Optional["Folder"] = None, message: str = "" + ) -> LibraryDataset: """ Copy a history dataset into this library. @@ -1540,7 +1646,9 @@ self.refresh() return self.get_dataset(res["library_dataset_id"]) - def create_folder(self, name, description=None, base_folder=None): + def create_folder( + self, name: str, description: Optional[str] = None, base_folder: Optional["Folder"] = None + ) -> "Folder": """ Create a folder in this library. @@ -1562,7 +1670,7 @@ self.refresh() return self.get_folder(res[0]["id"]) - def get_folder(self, f_id): + def get_folder(self, f_id: str) -> "Folder": """ Retrieve the folder corresponding to the given id. @@ -1573,7 +1681,7 @@ return Folder(f_dict, self, gi=self.gi) @property - def root_folder(self): + def root_folder(self) -> "Folder": """ The root folder of this library. @@ -1595,10 +1703,12 @@ "name", ) container: Library + description: str gi: "GalaxyInstance" + name: str _cached_parent: Optional["Folder"] - def __init__(self, f_dict, container, gi: "GalaxyInstance"): + def __init__(self, f_dict: Dict[str, Any], container: Library, gi: "GalaxyInstance") -> None: super().__init__(f_dict, gi=gi) object.__setattr__(self, "container", container) @@ -1644,9 +1754,12 @@ "name", "version", ) + gi: "GalaxyInstance" POLLING_INTERVAL = 10 # for output state monitoring - def run(self, inputs, history, wait=False, polling_interval=POLLING_INTERVAL): + def run( + self, inputs: Dict[str, Any], history: History, wait: bool = False, polling_interval: float = POLLING_INTERVAL + ) -> List[HistoryDatasetAssociation]: """ Execute this tool in the given history with inputs from dict ``inputs``. @@ -1678,7 +1791,7 @@ """ for k, v in inputs.items(): if isinstance(v, Dataset): - inputs[k] = {"src": v.SRC, "id": v.id} + inputs[k] = {"src": v.SRC, "id": v.id} # type: ignore out_dict = self.gi.gi.tools.run_tool(history.id, self.id, inputs) outputs = [history.get_dataset(_["id"]) for _ in out_dict["outputs"]] if wait: @@ -1694,6 +1807,9 @@ BASE_ATTRS = Wrapper.BASE_ATTRS + ("state",) +DatasetContainerPreviewSubtype = TypeVar("DatasetContainerPreviewSubtype", bound="DatasetContainerPreview") + + @abstractclass class DatasetContainerPreview(Wrapper): """ @@ -1767,6 +1883,11 @@ "uuid", "workflow_id", ) + history_id: str + state: str + update_time: str + uuid: str + workflow_id: str class JobPreview(Wrapper): diff -Nru python-bioblend-0.18.0/bioblend/galaxy/quotas/__init__.py python-bioblend-1.0.0/bioblend/galaxy/quotas/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/quotas/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/quotas/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,16 +1,32 @@ """ Contains possible interactions with the Galaxy Quota """ + +from typing import ( + Any, + Dict, + List, + Optional, + TYPE_CHECKING, +) + +from typing_extensions import Literal + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + +QuotaOperations = Literal["+", "-", "="] + class QuotaClient(Client): module = "quotas" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def get_quotas(self, deleted=False): + def get_quotas(self, deleted: bool = False) -> List[Dict[str, Any]]: """ Get a list of quotas @@ -32,7 +48,7 @@ """ return self._get(deleted=deleted) - def show_quota(self, quota_id, deleted=False): + def show_quota(self, quota_id: str, deleted: bool = False) -> Dict[str, Any]: """ Display information on a quota @@ -59,7 +75,16 @@ """ return self._get(id=quota_id, deleted=deleted) - def create_quota(self, name, description, amount, operation, default="no", in_users=None, in_groups=None): + def create_quota( + self, + name: str, + description: str, + amount: str, + operation: QuotaOperations, + default: Optional[Literal["no", "registered", "unregistered"]] = "no", + in_users: Optional[List[str]] = None, + in_groups: Optional[List[str]] = None, + ) -> Dict[str, Any]: """ Create a new quota @@ -96,7 +121,7 @@ 'id': '386f14984287a0f7', 'name': 'Testing'} """ - payload = { + payload: Dict[str, Any] = { "name": name, "description": description, "amount": amount, @@ -113,15 +138,16 @@ def update_quota( self, - quota_id, - name=None, - description=None, - amount=None, - operation=None, - default="no", - in_users=None, - in_groups=None, - ): + quota_id: str, + name: Optional[str] = None, + description: Optional[str] = None, + amount: Optional[str] = None, + operation: Optional[QuotaOperations] = None, + default: str = "no", + in_users: Optional[List[str]] = None, + in_groups: Optional[List[str]] = None, + ) -> str: + """ Update an existing quota @@ -162,7 +188,7 @@ "Quota 'Testing-A' has been renamed to 'Testing-B'; Quota 'Testing-e' is now '-100.0 GB'; Quota 'Testing-B' is now the default for unregistered users" """ - payload = {"default": default} + payload: Dict[str, Any] = {"default": default} if name: payload["name"] = name @@ -183,7 +209,7 @@ return self._put(id=quota_id, payload=payload) - def delete_quota(self, quota_id): + def delete_quota(self, quota_id: str) -> str: """ Delete a quota @@ -200,7 +226,7 @@ """ return self._delete(id=quota_id) - def undelete_quota(self, quota_id): + def undelete_quota(self, quota_id: str) -> str: """ Undelete a quota diff -Nru python-bioblend-0.18.0/bioblend/galaxy/roles/__init__.py python-bioblend-1.0.0/bioblend/galaxy/roles/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/roles/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/roles/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,16 +1,27 @@ """ Contains possible interactions with the Galaxy Roles """ +from typing import ( + Any, + Dict, + List, + Optional, + TYPE_CHECKING, +) + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + class RolesClient(Client): module = "roles" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def get_roles(self): + def get_roles(self) -> List[Dict[str, Any]]: """ Displays a collection (list) of roles. @@ -29,7 +40,7 @@ """ return self._get() - def show_role(self, role_id): + def show_role(self, role_id: str) -> Dict[str, Any]: """ Display information on a single role @@ -49,7 +60,13 @@ """ return self._get(id=role_id) - def create_role(self, role_name, description, user_ids=None, group_ids=None): + def create_role( + self, + role_name: str, + description: str, + user_ids: Optional[List[str]] = None, + group_ids: Optional[List[str]] = None, + ) -> Dict[str, Any]: """ Create a new role. diff -Nru python-bioblend-0.18.0/bioblend/galaxy/tool_data/__init__.py python-bioblend-1.0.0/bioblend/galaxy/tool_data/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/tool_data/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/tool_data/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,16 +1,26 @@ """ Contains possible interactions with the Galaxy Tool data tables """ +from typing import ( + Any, + Dict, + List, + TYPE_CHECKING, +) + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + class ToolDataClient(Client): module = "tool_data" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def get_data_tables(self): + def get_data_tables(self) -> List[Dict[str, Any]]: """ Get the list of all data tables. @@ -23,7 +33,7 @@ """ return self._get() - def show_data_table(self, data_table_id): + def show_data_table(self, data_table_id: str) -> Dict[str, Any]: """ Get details of a given data table. @@ -45,7 +55,7 @@ """ return self._get(id=data_table_id) - def reload_data_table(self, data_table_id): + def reload_data_table(self, data_table_id: str) -> Dict[str, Any]: """ Reload a data table. @@ -67,7 +77,7 @@ url = self._make_url(data_table_id) + "/reload" return self._get(url=url) - def delete_data_table(self, data_table_id, values): + def delete_data_table(self, data_table_id: str, values: str) -> Dict[str, Any]: """ Delete an item from a data table. diff -Nru python-bioblend-0.18.0/bioblend/galaxy/tool_dependencies/__init__.py python-bioblend-1.0.0/bioblend/galaxy/tool_dependencies/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/tool_dependencies/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/tool_dependencies/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,24 +1,35 @@ """ Contains interactions dealing with Galaxy dependency resolvers. """ +from typing import ( + List, + Optional, + TYPE_CHECKING, +) + +from typing_extensions import Literal + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + class ToolDependenciesClient(Client): module = "dependency_resolvers" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) def summarize_toolbox( self, - index=None, - tool_ids=None, - resolver_type=None, - include_containers=False, - container_type=None, - index_by="requirements", - ): + index: Optional[int] = None, + tool_ids: Optional[List[str]] = None, + resolver_type: Optional[str] = None, + include_containers: bool = False, + container_type: Optional[str] = None, + index_by: Literal["requirements", "tools"] = "requirements", + ) -> list: """ Summarize requirements across toolbox (for Tool Management grid). diff -Nru python-bioblend-0.18.0/bioblend/galaxy/tools/__init__.py python-bioblend-1.0.0/bioblend/galaxy/tools/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/tools/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/tools/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -4,22 +4,35 @@ import warnings from os.path import basename from typing import ( + Any, + Dict, List, Optional, + TYPE_CHECKING, + Union, ) +from typing_extensions import Literal + from bioblend.galaxy.client import Client from bioblend.galaxyclient import UPLOAD_CHUNK_SIZE from bioblend.util import attach_file +from .inputs import InputsBuilder + +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance class ToolClient(Client): + gi: "GalaxyInstance" module = "tools" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def get_tools(self, tool_id=None, name=None, trackster=None): + def get_tools( + self, tool_id: Optional[str] = None, name: Optional[str] = None, trackster: Optional[bool] = None + ) -> List[Dict[str, Any]]: """ Get all tools, or select a subset by specifying optional arguments for filtering (e.g. a tool name). @@ -58,7 +71,7 @@ tools = [_ for _ in tools if _["name"] == name] return tools - def get_tool_panel(self): + def get_tool_panel(self) -> List[Dict[str, Any]]: """ Get a list of available tool elements in Galaxy's configured toolbox. @@ -70,13 +83,14 @@ """ return self._raw_get_tool(in_panel=True) - def _raw_get_tool(self, in_panel=None, trackster=None): - params = {} - params["in_panel"] = in_panel - params["trackster"] = trackster + def _raw_get_tool(self, in_panel: Optional[bool] = None, trackster: Optional[bool] = None) -> List[Dict[str, Any]]: + params = { + "in_panel": in_panel, + "trackster": trackster, + } return self._get(params=params) - def requirements(self, tool_id): + def requirements(self, tool_id: str) -> List[Dict[str, Any]]: """ Return the resolver status for a specific tool. @@ -148,7 +162,7 @@ url = self._make_url(tool_id) + "/citations" return self._get(url=url) - def install_dependencies(self, tool_id): + def install_dependencies(self, tool_id: str) -> List[Dict[str, Any]]: """ Install dependencies for a given tool via a resolver. This works only for Conda currently. @@ -156,8 +170,8 @@ :type tool_id: str :param tool_id: id of the requested tool - :rtype: dict - :return: Tool requirement status + :rtype: list of dicts + :return: List of tool requirement status dictionaries .. note:: This method works only if the user is a Galaxy admin. @@ -182,7 +196,7 @@ url = self._make_url(tool_id) + "/dependencies" return self._delete(url=url) - def show_tool(self, tool_id, io_details=False, link_details=False): + def show_tool(self, tool_id: str, io_details: bool = False, link_details: bool = False) -> Dict[str, Any]: """ Get details of a given tool. @@ -198,12 +212,19 @@ :rtype: dict :return: Information about the tool's interface """ - params = {} - params["io_details"] = io_details - params["link_details"] = link_details + params = { + "io_details": io_details, + "link_details": link_details, + } return self._get(id=tool_id, params=params) - def build(self, tool_id, inputs=None, tool_version=None, history_id=None): + def build( + self, + tool_id: str, + inputs: Optional[Dict[str, Any]] = None, + tool_version: Optional[str] = None, + history_id: Optional[str] = None, + ) -> Dict[str, Any]: """ This method returns the tool model, which includes an updated input parameter array for the given tool, based on user-defined "inputs". @@ -301,7 +322,7 @@ } """ - params = {} + params: Dict[str, Union[str, Dict]] = {} if inputs: params["inputs"] = inputs @@ -316,7 +337,14 @@ return self._post(payload=params, url=url) - def run_tool(self, history_id, tool_id, tool_inputs, input_format="legacy"): + def run_tool( + self, + history_id: str, + tool_id: str, + tool_inputs: Union[InputsBuilder, dict], + input_format: Literal["21.01", "legacy"] = "legacy", + ) -> Dict[str, Any]: + """ Runs tool specified by ``tool_id`` in history indicated by ``history_id`` with inputs from ``dict`` ``tool_inputs``. @@ -389,13 +417,15 @@ You can also check the examples in `Galaxy's API test suite `_. """ - payload = {} - payload["history_id"] = history_id - payload["tool_id"] = tool_id - payload["input_format"] = input_format - try: + payload: Dict[str, Union[str, Dict]] = { + "history_id": history_id, + "tool_id": tool_id, + "input_format": input_format, + } + + if isinstance(tool_inputs, InputsBuilder): payload["inputs"] = tool_inputs.to_dict() - except AttributeError: + else: payload["inputs"] = tool_inputs return self._post(payload) @@ -406,8 +436,8 @@ storage: Optional[str] = None, metadata: Optional[dict] = None, chunk_size: Optional[int] = UPLOAD_CHUNK_SIZE, - **keywords, - ): + **kwargs: Any, + ) -> Dict[str, Any]: """ Upload the file specified by ``path`` to the history specified by ``history_id``. @@ -454,25 +484,25 @@ :return: Information about the created upload job .. note:: - The following options work only on Galaxy 22.01 or later: ``storage``, - ``metadata``, ``chunk_size``, ``auto_decompress``. + The following parameters work only on Galaxy 22.01 or later: + ``storage``, ``metadata``, ``chunk_size``, ``auto_decompress``. """ if self.gi.config.get_version()["version_major"] >= "22.01": # Use the tus protocol uploader = self.gi.get_tus_uploader(path, storage=storage, metadata=metadata, chunk_size=chunk_size) uploader.upload() - return self.post_to_fetch(path, history_id, uploader.session_id, **keywords) + return self.post_to_fetch(path, history_id, uploader.session_id, **kwargs) else: - if "file_name" not in keywords: - keywords["file_name"] = basename(path) - payload = self._upload_payload(history_id, **keywords) - payload["files_0|file_data"] = attach_file(path, name=keywords["file_name"]) + if "file_name" not in kwargs: + kwargs["file_name"] = basename(path) + payload = self._upload_payload(history_id, **kwargs) + payload["files_0|file_data"] = attach_file(path, name=kwargs["file_name"]) try: return self._post(payload, files_attached=True) finally: payload["files_0|file_data"].close() - def post_to_fetch(self, path: str, history_id: str, session_id: str, **keywords): + def post_to_fetch(self, path: str, history_id: str, session_id: str, **kwargs: Any) -> Dict[str, Any]: """ Make a POST request to the Fetch API after performing a tus upload. @@ -494,11 +524,11 @@ :rtype: dict :return: Information about the created upload job """ - payload = self._fetch_payload(path, history_id, session_id, **keywords) + payload = self._fetch_payload(path, history_id, session_id, **kwargs) url = "/".join((self.gi.url, "tools/fetch")) return self._post(payload, url=url) - def upload_from_ftp(self, path, history_id, **keywords): + def upload_from_ftp(self, path: str, history_id: str, **kwargs: Any) -> Dict[str, Any]: """ Upload the file specified by ``path`` from the user's FTP directory to the history specified by ``history_id``. @@ -514,11 +544,11 @@ :rtype: dict :return: Information about the created upload job """ - payload = self._upload_payload(history_id, **keywords) + payload = self._upload_payload(history_id, **kwargs) payload["files_0|ftp_files"] = path return self._post(payload) - def paste_content(self, content, history_id, **kwds): + def paste_content(self, content: str, history_id: str, **kwargs: Any) -> Dict[str, Any]: """ Upload a string to a new dataset in the history specified by ``history_id``. @@ -535,37 +565,38 @@ See :meth:`upload_file` for the optional parameters. """ - payload = self._upload_payload(history_id, **kwds) + payload = self._upload_payload(history_id, **kwargs) payload["files_0|url_paste"] = content return self._post(payload, files_attached=False) put_url = paste_content - def _upload_payload(self, history_id, **keywords): - payload = {} - payload["history_id"] = history_id - payload["tool_id"] = keywords.get("tool_id", "upload1") - tool_input = {} - tool_input["file_type"] = keywords.get("file_type", "auto") - tool_input["dbkey"] = keywords.get("dbkey", "?") - if not keywords.get("to_posix_lines", True): + def _upload_payload(self, history_id: str, **kwargs: Any) -> Dict[str, Any]: + tool_input: Dict[str, Any] = { + "file_type": kwargs.get("file_type", "auto"), + "dbkey": kwargs.get("dbkey", "?"), + "files_0|type": "upload_dataset", + } + if not kwargs.get("to_posix_lines", True): tool_input["files_0|to_posix_lines"] = False - elif keywords.get("space_to_tab", False): + elif kwargs.get("space_to_tab", False): tool_input["files_0|space_to_tab"] = "Yes" - if "file_name" in keywords: - tool_input["files_0|NAME"] = keywords["file_name"] - tool_input["files_0|type"] = "upload_dataset" - payload["inputs"] = tool_input - return payload + if "file_name" in kwargs: + tool_input["files_0|NAME"] = kwargs["file_name"] + return { + "history_id": history_id, + "tool_id": kwargs.get("tool_id", "upload1"), + "inputs": tool_input, + } - def _fetch_payload(self, path: str, history_id: str, session_id: str, **keywords) -> dict: - file_name = keywords.get("file_name", basename(path)) + def _fetch_payload(self, path: str, history_id: str, session_id: str, **kwargs: Any) -> dict: + file_name = kwargs.get("file_name", basename(path)) element = { "src": "files", - "ext": keywords.get("file_type", "auto"), - "dbkey": keywords.get("dbkey", "?"), - "to_posix_lines": keywords.get("to_posix_lines", True), - "space_to_tab": keywords.get("space_to_tab", False), + "ext": kwargs.get("file_type", "auto"), + "dbkey": kwargs.get("dbkey", "?"), + "to_posix_lines": kwargs.get("to_posix_lines", True), + "space_to_tab": kwargs.get("space_to_tab", False), "name": file_name, } payload = { @@ -577,6 +608,6 @@ } ], "files_0|file_data": {"session_id": session_id, "name": file_name}, - "auto_decompress": keywords.get("auto_decompress", False), + "auto_decompress": kwargs.get("auto_decompress", False), } return payload diff -Nru python-bioblend-0.18.0/bioblend/galaxy/tools/inputs.py python-bioblend-1.0.0/bioblend/galaxy/tools/inputs.py --- python-bioblend-0.18.0/bioblend/galaxy/tools/inputs.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/tools/inputs.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,20 +1,31 @@ +from typing import ( + Any, + Dict, + Iterator, + List, + Optional, + Tuple, + Union, +) + + class InputsBuilder: """ """ - def __init__(self): - self._input_dict = {} + def __init__(self) -> None: + self._input_dict: Dict[str, Any] = {} - def set(self, name, input): + def set(self, name: str, input: Any) -> "InputsBuilder": self._input_dict[name] = input return self - def set_param(self, name, value): + def set_param(self, name: str, value: Any) -> "InputsBuilder": return self.set(name, param(value=value)) - def set_dataset_param(self, name, value, src="hda"): + def set_dataset_param(self, name: str, value: str, src: str = "hda") -> "InputsBuilder": return self.set(name, dataset(value, src=src)) - def to_dict(self): + def to_dict(self) -> Dict[str, Any]: values = {} for key, value in self.flat_iter(): if hasattr(value, "value"): @@ -22,7 +33,7 @@ values[key] = value return values - def flat_iter(self, prefix=None): + def flat_iter(self, prefix: Optional[str] = None) -> Iterator[Tuple[str, Any]]: for key, value in self._input_dict.items(): effective_key = key if prefix is None else f"{prefix}|{key}" if hasattr(value, "flat_iter"): @@ -32,26 +43,26 @@ class RepeatBuilder: - def __init__(self): - self._instances = [] + def __init__(self) -> None: + self._instances: List[InputsBuilder] = [] - def instance(self, inputs): + def instance(self, inputs: InputsBuilder) -> "RepeatBuilder": self._instances.append(inputs) return self - def flat_iter(self, prefix=None): + def flat_iter(self, prefix: str) -> Iterator[Tuple[str, Any]]: for index, instance in enumerate(self._instances): index_prefix = f"{prefix}_{index}" yield from instance.flat_iter(index_prefix) class Param: - def __init__(self, value): + def __init__(self, value: Any) -> None: self.value = value class DatasetParam(Param): - def __init__(self, value, src="hda"): + def __init__(self, value: Union[Dict[str, str], str], src: str = "hda") -> None: if not isinstance(value, dict): value = dict(src=src, id=value) super().__init__(value) diff -Nru python-bioblend-0.18.0/bioblend/galaxy/toolshed/__init__.py python-bioblend-1.0.0/bioblend/galaxy/toolshed/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/toolshed/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/toolshed/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,16 +1,28 @@ """ Interaction with a Galaxy Tool Shed. """ + +from typing import ( + Any, + Dict, + List, + Optional, + TYPE_CHECKING, +) + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + class ToolShedClient(Client): module = "tool_shed_repositories" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def get_repositories(self): + def get_repositories(self) -> List[Dict[str, Any]]: """ Get the list of all installed Tool Shed repositories on this Galaxy instance. @@ -35,7 +47,7 @@ """ return self._get() - def show_repository(self, toolShed_id): + def show_repository(self, toolShed_id: str) -> Dict[str, Any]: """ Get details of a given Tool Shed repository as it is installed on this Galaxy instance. @@ -61,16 +73,16 @@ def install_repository_revision( self, - tool_shed_url, - name, - owner, - changeset_revision, - install_tool_dependencies=False, - install_repository_dependencies=False, - install_resolver_dependencies=False, - tool_panel_section_id=None, - new_tool_panel_section_label=None, - ): + tool_shed_url: str, + name: str, + owner: str, + changeset_revision: str, + install_tool_dependencies: bool = False, + install_repository_dependencies: bool = False, + install_resolver_dependencies: bool = False, + tool_panel_section_id: Optional[str] = None, + new_tool_panel_section_label: Optional[str] = None, + ) -> Dict[str, Any]: """ Install a specified repository revision from a specified Tool Shed into this Galaxy instance. This example demonstrates installation of a repository @@ -132,7 +144,7 @@ that should be created and the repository installed into. """ - payload = {} + payload: Dict[str, Any] = {} payload["tool_shed_url"] = tool_shed_url payload["name"] = name payload["owner"] = owner @@ -148,7 +160,9 @@ url = self._make_url() + "/new/install_repository_revision" return self._post(url=url, payload=payload) - def uninstall_repository_revision(self, name, owner, changeset_revision, tool_shed_url, remove_from_disk=True): + def uninstall_repository_revision( + self, name: str, owner: str, changeset_revision: str, tool_shed_url: str, remove_from_disk: bool = True + ) -> Dict[str, Any]: """ Uninstalls a specified repository revision from this Galaxy instance. @@ -172,7 +186,7 @@ :rtype: dict :return: If successful, a dictionary with a message noting the removal """ - payload = { + payload: Dict[str, Any] = { "tool_shed_url": tool_shed_url, "name": name, "owner": owner, diff -Nru python-bioblend-0.18.0/bioblend/galaxy/users/__init__.py python-bioblend-1.0.0/bioblend/galaxy/users/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/users/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/users/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -3,16 +3,33 @@ Most of these methods must be executed by a registered Galaxy admin user. """ +from typing import ( + Any, + Dict, + List, + Optional, + TYPE_CHECKING, +) + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + class UserClient(Client): module = "users" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def get_users(self, deleted=False, f_email=None, f_name=None, f_any=None): + def get_users( + self, + deleted: bool = False, + f_email: Optional[str] = None, + f_name: Optional[str] = None, + f_any: Optional[str] = None, + ) -> List[Dict[str, Any]]: """ Get a list of all registered users. If ``deleted`` is set to ``True``, get a list of deleted users. @@ -20,7 +37,6 @@ :type deleted: bool :param deleted: Whether to include deleted users - :type f_email: str :param f_email: filter for user emails. The filter will be active for non-admin users only if the Galaxy instance has the @@ -48,7 +64,7 @@ 'url': '/api/users/dda47097d9189f15'}] """ - params = {} + params: Dict[str, Any] = {} if f_email: params["f_email"] = f_email if f_name: @@ -57,7 +73,7 @@ params["f_any"] = f_any return self._get(deleted=deleted, params=params) - def show_user(self, user_id, deleted=False): + def show_user(self, user_id: str, deleted: bool = False) -> Dict[str, Any]: """ Display information about a user. @@ -72,7 +88,7 @@ """ return self._get(id=user_id, deleted=deleted) - def create_remote_user(self, user_email): + def create_remote_user(self, user_email: str) -> Dict[str, Any]: """ Create a new Galaxy remote user. @@ -90,11 +106,12 @@ :rtype: dict :return: a dictionary containing information about the created user """ - payload = {} - payload["remote_user_email"] = user_email + payload = { + "remote_user_email": user_email, + } return self._post(payload) - def create_local_user(self, username, user_email, password): + def create_local_user(self, username: str, user_email: str, password: str) -> Dict[str, Any]: """ Create a new Galaxy local user. @@ -116,13 +133,14 @@ :rtype: dict :return: a dictionary containing information about the created user """ - payload = {} - payload["username"] = username - payload["email"] = user_email - payload["password"] = password + payload = { + "username": username, + "email": user_email, + "password": password, + } return self._post(payload) - def get_current_user(self): + def get_current_user(self) -> Dict[str, Any]: """ Display information about the user associated with this Galaxy connection. @@ -133,7 +151,7 @@ url = self._make_url() + "/current" return self._get(url=url) - def create_user_apikey(self, user_id): + def create_user_apikey(self, user_id: str) -> str: """ Create a new API key for a given user. @@ -144,11 +162,12 @@ :return: the API key for the user """ url = self._make_url(user_id) + "/api_key" - payload = {} - payload["user_id"] = user_id + payload = { + "user_id": user_id, + } return self._post(payload, url=url) - def delete_user(self, user_id, purge=False): + def delete_user(self, user_id: str, purge: bool = False) -> Dict[str, Any]: """ Delete a user. @@ -171,7 +190,7 @@ params["purge"] = purge return self._delete(id=user_id, params=params) - def get_user_apikey(self, user_id): + def get_user_apikey(self, user_id: str) -> str: """ Get the current API key for a given user. @@ -179,12 +198,30 @@ :param user_id: encoded user ID :rtype: str - :return: the API key for the user + :return: the API key for the user, or 'Not available.' if it doesn't + exist yet. """ url = self._make_url(user_id) + "/api_key/inputs" return self._get(url=url)["inputs"][0]["value"] - def update_user(self, user_id, **kwds): + def get_or_create_user_apikey(self, user_id: str) -> str: + """ + Get the current API key for a given user, creating one if it doesn't + exist yet. + + :type user_id: str + :param user_id: encoded user ID + + :rtype: str + :return: the API key for the user + + .. note:: + This method works only on Galaxy 21.01 or later. + """ + url = self._make_url(user_id) + "/api_key" + return self._get(url=url) + + def update_user(self, user_id: str, **kwargs: Any) -> Dict[str, Any]: """ Update user information. Some of the attributes that can be modified are documented below. @@ -202,4 +239,4 @@ :return: details of the updated user """ url = self._make_url(user_id) + "/information/inputs" - return self._put(url=url, payload=kwds, id=user_id) + return self._put(url=url, payload=kwargs, id=user_id) diff -Nru python-bioblend-0.18.0/bioblend/galaxy/visual/__init__.py python-bioblend-1.0.0/bioblend/galaxy/visual/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/visual/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/visual/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,16 +1,26 @@ """ Contains possible interactions with the Galaxy visualization """ +from typing import ( + Any, + Dict, + List, + TYPE_CHECKING, +) + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + class VisualClient(Client): module = "visualizations" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) - def get_visualizations(self): + def get_visualizations(self) -> List[Dict[str, Any]]: """ Get the list of all visualizations. @@ -31,7 +41,7 @@ """ return self._get() - def show_visualization(self, visual_id): + def show_visualization(self, visual_id: str) -> Dict[str, Any]: """ Get details of a given visualization. diff -Nru python-bioblend-0.18.0/bioblend/galaxy/workflows/__init__.py python-bioblend-1.0.0/bioblend/galaxy/workflows/__init__.py --- python-bioblend-0.18.0/bioblend/galaxy/workflows/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxy/workflows/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -7,20 +7,31 @@ from typing import ( Any, Dict, + List, Optional, + TYPE_CHECKING, ) +from typing_extensions import Literal + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.galaxy import GalaxyInstance + +InputsBy = Literal["step_index|step_uuid", "step_index", "step_id", "step_uuid", "name"] # type: ignore[name-defined] + class WorkflowClient(Client): module = "workflows" - def __init__(self, galaxy_instance): + def __init__(self, galaxy_instance: "GalaxyInstance") -> None: super().__init__(galaxy_instance) # the 'deleted' option is not available for workflows - def get_workflows(self, workflow_id=None, name=None, published=False): + def get_workflows( + self, workflow_id: Optional[str] = None, name: Optional[str] = None, published: bool = False + ) -> List[Dict[str, Any]]: """ Get all workflows, or select a subset by specifying optional arguments for filtering (e.g. a workflow name). @@ -54,7 +65,7 @@ ) if workflow_id is not None and name is not None: raise ValueError("Provide only one argument between name or workflow_id, but not both") - params = {} + params: Dict[str, Any] = {} if published: params["show_published"] = True workflows = self._get(params=params) @@ -65,7 +76,7 @@ workflows = [_ for _ in workflows if _["name"] == name] return workflows - def show_workflow(self, workflow_id, version=None): + def show_workflow(self, workflow_id: str, version: Optional[int] = None) -> Dict[str, Any]: """ Display information needed to run a workflow. @@ -84,13 +95,13 @@ 'name': 'Simple', 'url': '/api/workflows/92c56938c2f9b315'} """ - params = {} + params: Dict[str, Any] = {} if version is not None: params["version"] = version return self._get(id=workflow_id, params=params) - def get_workflow_inputs(self, workflow_id, label): + def get_workflow_inputs(self, workflow_id: str, label: str) -> List[str]: """ Get a list of workflow input IDs that match the given label. If no input matches the given label, an empty list is returned. @@ -108,7 +119,7 @@ inputs = wf["inputs"] return [id for id in inputs if inputs[id]["label"] == label] - def import_workflow_dict(self, workflow_dict, publish=False): + def import_workflow_dict(self, workflow_dict: Dict[str, Any], publish: bool = False) -> Dict[str, Any]: """ Imports a new workflow given a dictionary representing a previously exported workflow. @@ -140,7 +151,7 @@ url = self._make_url() + "/upload" return self._post(url=url, payload=payload) - def import_workflow_from_local_path(self, file_local_path, publish=False): + def import_workflow_from_local_path(self, file_local_path: str, publish: bool = False) -> Dict[str, Any]: """ Imports a new workflow given the path to a file containing a previously exported workflow. @@ -173,7 +184,7 @@ return self.import_workflow_dict(workflow_json, publish) - def import_shared_workflow(self, workflow_id): + def import_shared_workflow(self, workflow_id: str) -> Dict[str, Any]: """ Imports a new workflow from the shared published workflows. @@ -195,7 +206,7 @@ url = self._make_url() return self._post(url=url, payload=payload) - def export_workflow_dict(self, workflow_id, version=None): + def export_workflow_dict(self, workflow_id: str, version: Optional[int] = None) -> Dict[str, Any]: """ Exports a workflow. @@ -208,14 +219,16 @@ :rtype: dict :return: Dictionary representing the requested workflow """ - params = {} + params: Dict[str, Any] = {} if version is not None: params["version"] = version url = "/".join((self._make_url(), "download", workflow_id)) return self._get(url=url, params=params) - def export_workflow_to_local_path(self, workflow_id, file_local_path, use_default_filename=True): + def export_workflow_to_local_path( + self, workflow_id: str, file_local_path: str, use_default_filename: bool = True + ) -> None: """ Exports a workflow in JSON format to a given local path. @@ -244,7 +257,7 @@ with open(file_local_path, "w") as fp: json.dump(workflow_dict, fp) - def update_workflow(self, workflow_id, **kwds): + def update_workflow(self, workflow_id: str, **kwargs: Any) -> Dict[str, Any]: """ Update a given workflow. @@ -272,7 +285,7 @@ :rtype: dict :return: Dictionary representing the updated workflow """ - return self._put(payload=kwds, id=workflow_id) + return self._put(payload=kwargs, id=workflow_id) def invoke_workflow( self, @@ -284,9 +297,10 @@ import_inputs_to_history: bool = False, replacement_params: Optional[dict] = None, allow_tool_state_corrections: bool = False, - inputs_by: Optional[str] = None, + inputs_by: Optional[InputsBy] = None, parameters_normalized: bool = False, - ) -> dict: + require_exact_tool_versions: bool = True, + ) -> Dict[str, Any]: """ Invoke the workflow identified by ``workflow_id``. This will cause a workflow to be scheduled and return an object describing @@ -351,6 +365,11 @@ ``False``, but when setting ``params`` for a subworkflow, ``True`` is required. + :type require_exact_tool_versions: bool + :param require_exact_tool_versions: Whether invocation should fail if + Galaxy does not have the exact tool versions. Default is ``True``. + Parameter does not any effect for Galaxy versions < 22.05. + :rtype: dict :return: A dict containing the workflow invocation describing the scheduling of the workflow. For example:: @@ -479,12 +498,13 @@ payload["allow_tool_state_corrections"] = allow_tool_state_corrections if inputs_by is not None: payload["inputs_by"] = inputs_by + payload["require_exact_tool_versions"] = require_exact_tool_versions if parameters_normalized: payload["parameters_normalized"] = parameters_normalized url = self._invocations_url(workflow_id) return self._post(payload, url=url) - def show_invocation(self, workflow_id, invocation_id): + def show_invocation(self, workflow_id: str, invocation_id: str) -> Dict[str, Any]: """ Get a workflow invocation object representing the scheduling of a workflow. This object may be sparse at first (missing inputs and @@ -535,7 +555,7 @@ url = self._invocation_url(workflow_id, invocation_id) return self._get(url=url) - def get_invocations(self, workflow_id): + def get_invocations(self, workflow_id: str) -> List[Dict[str, Any]]: """ Get a list containing all the workflow invocations corresponding to the specified workflow. @@ -560,7 +580,7 @@ url = self._invocations_url(workflow_id) return self._get(url=url) - def cancel_invocation(self, workflow_id, invocation_id): + def cancel_invocation(self, workflow_id: str, invocation_id: str) -> Dict[str, Any]: """ Cancel the scheduling of a workflow. @@ -576,7 +596,7 @@ url = self._invocation_url(workflow_id, invocation_id) return self._delete(url=url) - def show_invocation_step(self, workflow_id, invocation_id, step_id): + def show_invocation_step(self, workflow_id: str, invocation_id: str, step_id: str) -> Dict[str, Any]: """ See the details of a particular workflow invocation step. @@ -607,7 +627,9 @@ url = self._invocation_step_url(workflow_id, invocation_id, step_id) return self._get(url=url) - def run_invocation_step_action(self, workflow_id, invocation_id, step_id, action): + def run_invocation_step_action( + self, workflow_id: str, invocation_id: str, step_id: str, action: Any + ) -> Dict[str, Any]: """Execute an action for an active workflow invocation step. The nature of this action and what is expected will vary based on the the type of workflow step (the only currently valid action is True/False @@ -633,7 +655,7 @@ payload = {"action": action} return self._put(payload=payload, url=url) - def delete_workflow(self, workflow_id): + def delete_workflow(self, workflow_id: str) -> str: """ Delete a workflow identified by `workflow_id`. @@ -649,7 +671,9 @@ """ return self._delete(id=workflow_id) - def refactor_workflow(self, workflow_id, actions, dry_run=False): + def refactor_workflow( + self, workflow_id: str, actions: List[Dict[str, Any]], dry_run: bool = False + ) -> Dict[str, Any]: """ Refactor workflow with given actions. @@ -691,8 +715,13 @@ return self._put(payload=payload, url=url) def extract_workflow_from_history( - self, history_id, workflow_name, job_ids=None, dataset_hids=None, dataset_collection_hids=None - ): + self, + history_id: str, + workflow_name: str, + job_ids: Optional[List[str]] = None, + dataset_hids: Optional[List[str]] = None, + dataset_collection_hids: Optional[List[str]] = None, + ) -> Dict[str, Any]: """ Extract a workflow from a history. @@ -725,7 +754,7 @@ } return self._post(payload=payload) - def show_versions(self, workflow_id): + def show_versions(self, workflow_id: str) -> List[Dict[str, Any]]: """ Get versions for a workflow. @@ -738,13 +767,13 @@ url = self._make_url(workflow_id) + "/versions" return self._get(url=url) - def _invocation_step_url(self, workflow_id, invocation_id, step_id): + def _invocation_step_url(self, workflow_id: str, invocation_id: str, step_id: str) -> str: return "/".join((self._invocation_url(workflow_id, invocation_id), "steps", step_id)) - def _invocation_url(self, workflow_id, invocation_id): + def _invocation_url(self, workflow_id: str, invocation_id: str) -> str: return "/".join((self._invocations_url(workflow_id), invocation_id)) - def _invocations_url(self, workflow_id): + def _invocations_url(self, workflow_id: str) -> str: return "/".join((self._make_url(workflow_id), "invocations")) diff -Nru python-bioblend-0.18.0/bioblend/galaxyclient.py python-bioblend-1.0.0/bioblend/galaxyclient.py --- python-bioblend-0.18.0/bioblend/galaxyclient.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/galaxyclient.py 2022-10-13 18:17:37.000000000 +0000 @@ -9,7 +9,10 @@ import contextlib import json import logging -from typing import Optional +from typing import ( + Any, + Optional, +) from urllib.parse import urljoin import requests @@ -28,7 +31,15 @@ class GalaxyClient: - def __init__(self, url, key=None, email=None, password=None, verify=True, timeout=None): + def __init__( + self, + url: str, + key: Optional[str] = None, + email: Optional[str] = None, + password: Optional[str] = None, + verify: bool = True, + timeout: Optional[float] = None, + ) -> None: """ :param verify: Whether to verify the server's TLS certificate :type verify: bool @@ -61,16 +72,55 @@ # If key has been supplied, use it; otherwise just set email and # password and grab user's key before first request. if key: - self._key = key + self._key: Optional[str] = key else: self._key = None self.email = email self.password = password - self.json_headers = {"Content-Type": "application/json"} + self.json_headers: dict = {"Content-Type": "application/json"} # json_headers needs to be set before key can be defined, otherwise authentication with email/password causes an error self.json_headers["x-api-key"] = self.key + # Number of attempts before giving up on a GET request. + self._max_get_attempts = 1 + # Delay in seconds between subsequent retries. + self._get_retry_delay = 10.0 + + @property + def max_get_attempts(self) -> int: + """ + The maximum number of attempts for a GET request. Default: 1 + """ + return self._max_get_attempts + + @max_get_attempts.setter + def max_get_attempts(self, value: int) -> None: + """ + Set the maximum number of attempts for GET requests. A value greater + than one causes failed GET requests to be retried `value` - 1 times. + """ + if value < 1: + raise ValueError(f"Number of attempts must be >= 1 (got: {value})") + self._max_get_attempts = value + + @property + def get_retry_delay(self) -> float: + """ + The delay (in seconds) to wait before retrying a failed GET request. + Default: 10.0 + """ + return self._get_retry_delay - def make_get_request(self, url, **kwargs): + @get_retry_delay.setter + def get_retry_delay(self, value: float) -> None: + """ + Set the delay (in seconds) to wait before retrying a failed GET + request. + """ + if value < 0: + raise ValueError(f"Retry delay must be >= 0 (got: {value})") + self._get_retry_delay = value + + def make_get_request(self, url: str, **kwargs: Any) -> requests.Response: """ Make a GET request using the provided ``url``. @@ -78,11 +128,6 @@ If ``verify`` is not provided, ``self.verify`` will be used. - If the ``params`` are not provided, use ``default_params`` class field. - If params are provided and the provided dict does not have ``key`` key, - the default ``self.key`` value will be included in what's passed to - the server via the request. - :rtype: requests.Response :return: the response object. """ @@ -92,22 +137,19 @@ r = requests.get(url, headers=headers, **kwargs) return r - def make_post_request(self, url, payload=None, params=None, files_attached=False): + def make_post_request( + self, url: str, payload: Optional[dict] = None, params: Optional[dict] = None, files_attached: bool = False + ) -> Any: """ Make a POST request using the provided ``url`` and ``payload``. The ``payload`` must be a dict that contains the request values. The payload dict may contain file handles (in which case the files_attached flag must be set to true). - If the ``params`` are not provided, use ``default_params`` class field. - If params are provided and the provided dict does not have ``key`` key, - the default ``self.key`` value will be included in what's passed to - the server via the request. - :return: The decoded response. """ - def my_dumps(d): + def my_dumps(d: dict) -> dict: """ Apply ``json.dumps()`` to the values of the dict ``d`` if they are not of type ``FileStream``. @@ -121,23 +163,22 @@ # leveraging the requests-toolbelt library if any files have # been attached. if files_attached: + payload_copy = payload.copy() if payload is not None else {} if params: - payload.update(params) - payload = my_dumps(payload) - payload = MultipartEncoder(fields=payload) + payload_copy.update(params) + data = MultipartEncoder(fields=my_dumps(payload_copy)) headers = self.json_headers.copy() - headers["Content-Type"] = payload.content_type + headers["Content-Type"] = data.content_type post_params = None else: - if payload is not None: - payload = json.dumps(payload) + data = json.dumps(payload) if payload is not None else None headers = self.json_headers post_params = params r = requests.post( url, params=post_params, - data=payload, + data=data, headers=headers, timeout=self.timeout, allow_redirects=False, @@ -159,29 +200,25 @@ status_code=r.status_code, ) - def make_delete_request(self, url, payload=None, params=None): + def make_delete_request( + self, url: str, payload: Optional[dict] = None, params: Optional[dict] = None + ) -> requests.Response: """ Make a DELETE request using the provided ``url`` and the optional arguments. - If the ``params`` are not provided, use ``default_params`` class field. - If params are provided and the provided dict does not have ``key`` key, - the default ``self.key`` value will be included in what's passed to - the server via the request. - :type payload: dict :param payload: a JSON-serializable dictionary :rtype: requests.Response :return: the response object. """ - if payload is not None: - payload = json.dumps(payload) + data = json.dumps(payload) if payload is not None else None headers = self.json_headers r = requests.delete( url, params=params, - data=payload, + data=data, headers=headers, timeout=self.timeout, allow_redirects=False, @@ -189,7 +226,7 @@ ) return r - def make_put_request(self, url, payload=None, params=None): + def make_put_request(self, url: str, payload: Optional[dict] = None, params: Optional[dict] = None) -> Any: """ Make a PUT request using the provided ``url`` with required payload. @@ -198,13 +235,12 @@ :return: The decoded response. """ - if payload is not None: - payload = json.dumps(payload) + data = json.dumps(payload) if payload is not None else None headers = self.json_headers r = requests.put( url, params=params, - data=payload, + data=data, headers=headers, timeout=self.timeout, allow_redirects=False, @@ -226,7 +262,7 @@ status_code=r.status_code, ) - def make_patch_request(self, url, payload=None, params=None): + def make_patch_request(self, url: str, payload: Optional[dict] = None, params: Optional[dict] = None) -> Any: """ Make a PATCH request using the provided ``url`` with required payload. @@ -235,13 +271,12 @@ :return: The decoded response. """ - if payload is not None: - payload = json.dumps(payload) + data = json.dumps(payload) if payload is not None else None headers = self.json_headers r = requests.patch( url, params=params, - data=payload, + data=data, headers=headers, timeout=self.timeout, allow_redirects=False, @@ -266,7 +301,7 @@ def get_tus_uploader( self, path: str, - url: Optional[str] = "/upload/resumable_upload", + url: str = "/upload/resumable_upload", storage: Optional[str] = None, metadata: Optional[dict] = None, chunk_size: Optional[int] = UPLOAD_CHUNK_SIZE, @@ -312,15 +347,15 @@ ) @property - def key(self): + def key(self) -> Optional[str]: if not self._key and self.email is not None and self.password is not None: unencoded_credentials = f"{self.email}:{self.password}" authorization = base64.b64encode(unencoded_credentials.encode()) headers = self.json_headers.copy() headers["Authorization"] = authorization auth_url = f"{self.url}/authenticate/baseauth" - # make_post_request uses default_params, which uses this and - # sets wrong headers - so using lower level method. + # Use lower level method instead of make_get_request() because we + # need the additional Authorization header. r = requests.get( auth_url, headers=headers, @@ -337,7 +372,7 @@ return self._key -def _tus_uploader_session_id(self) -> str: +def _tus_uploader_session_id(self: tusclient.uploader.Uploader) -> str: return self.url.rsplit("/", 1)[1] diff -Nru python-bioblend-0.18.0/bioblend/__init__.py python-bioblend-1.0.0/bioblend/__init__.py --- python-bioblend-0.18.0/bioblend/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,6 +1,11 @@ import contextlib import logging +import logging.config import os +from typing import ( + Optional, + Union, +) from bioblend.config import ( BioBlendConfigLocations, @@ -8,7 +13,7 @@ ) # Current version of the library -__version__ = "0.18.0" +__version__ = "1.0.0" # default chunk size (in bytes) for reading remote data try: @@ -22,14 +27,14 @@ config = Config() -def get_version(): +def get_version() -> str: """ Returns a string with the current version of the library (e.g., "0.2.0") """ return __version__ -def init_logging(): +def init_logging() -> None: """ Initialize BioBlend's logging from a configuration file. """ @@ -39,7 +44,7 @@ class NullHandler(logging.Handler): - def emit(self, record): + def emit(self, record: logging.LogRecord) -> None: pass @@ -59,7 +64,9 @@ # bioblend.set_stream_logger(__name__) -def set_file_logger(name, filepath, level=logging.INFO, format_string=None): +def set_file_logger( + name: str, filepath: str, level: Union[int, str] = logging.INFO, format_string: Optional[str] = None +) -> None: global log if not format_string: format_string = default_format_string @@ -73,7 +80,7 @@ log = logger -def set_stream_logger(name, level=logging.DEBUG, format_string=None): +def set_stream_logger(name: str, level: Union[int, str] = logging.DEBUG, format_string: Optional[str] = None) -> None: global log if not format_string: format_string = default_format_string @@ -96,13 +103,15 @@ @see: body attribute to see the content of the http response """ - def __init__(self, message, body=None, status_code=None): + def __init__( + self, message: str, body: Optional[Union[bytes, str]] = None, status_code: Optional[int] = None + ) -> None: super().__init__(message) self.body = body self.status_code = status_code - def __str__(self): - return f"{self.args[0]}: {self.body}" + def __str__(self) -> str: + return f"{self.args[0]}: {self.body!s}" class TimeoutException(Exception): diff -Nru python-bioblend-0.18.0/bioblend/_tests/CloudmanTestBase.py python-bioblend-1.0.0/bioblend/_tests/CloudmanTestBase.py --- python-bioblend-0.18.0/bioblend/_tests/CloudmanTestBase.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/CloudmanTestBase.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,68 +0,0 @@ -""" -Tests the functionality of the Blend CloudMan API. These tests require working -credentials to supported cloud infrastructure. -""" -import contextlib -import os -import unittest - -from bioblend.util import Bunch -from . import test_util - - -class CloudmanTestBase(unittest.TestCase): - @classmethod - @test_util.skip_unless_cloudman() - def setUpClass(cls): - if os.environ.get("BIOBLEND_CLOUD_TYPE") == "EC2": - cls.access_key = os.environ["BIOBLEND_ACCESS_KEY"] - cls.secret_key = os.environ["BIOBLEND_SECRET_KEY"] - cls.cluster_name = "Blend CloudMan" - cls.ami_id = os.environ["BIOBLEND_AMI_ID"] - cls.instance_type = "m1.small" - cls.password = "password" - cls.cloud_metadata = Bunch( - id="1", # for compatibility w/ DB representation - name="Amazon", - cloud_type="ec2", - bucket_default="cloudman", - region_name="us-east-1", - region_endpoint="ec2.amazonaws.com", - ec2_port="", - ec2_conn_path="/", - cidr_range="", - is_secure=True, - s3_host="s3.amazonaws.com", - s3_port="", - s3_conn_path="/", - ) - else: - # Assume OpenStack/NeCTAR - cls.access_key = os.environ["BIOBLEND_ACCESS_KEY"] - cls.secret_key = os.environ["BIOBLEND_SECRET_KEY"] - cls.cloud_metadata = Bunch( - id="-1", - name="NeCTAR", - cloud_type="openstack", - bucket_default="cloudman-os", - region_name="melbourne", - region_endpoint="nova.rc.nectar.org.au", - ec2_port=8773, - ec2_conn_path="/services/Cloud", - cidr_range="115.146.92.0/22", - is_secure=True, - s3_host="swift.rc.nectar.org.au", - s3_port=8888, - s3_conn_path="/", - ) - cls.cluster_name = "Blend CloudMan" - cls.ami_id = os.environ["BIOBLEND_AMI_ID"] - cls.instance_type = "m1.small" - cls.password = "password" - - @classmethod - @test_util.skip_unless_cloudman() - def tearDownClass(cls): - with contextlib.suppress(Exception): - # TODO: cloudman's terminate method has a bug. Needs fix - cls.cmi.terminate(delete_cluster=True) diff -Nru python-bioblend-0.18.0/bioblend/_tests/GalaxyTestBase.py python-bioblend-1.0.0/bioblend/_tests/GalaxyTestBase.py --- python-bioblend-0.18.0/bioblend/_tests/GalaxyTestBase.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/GalaxyTestBase.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,8 +1,14 @@ import os import unittest +from typing import ( + Any, + Dict, +) + +from typing_extensions import Literal import bioblend -import bioblend.galaxy +from bioblend.galaxy import GalaxyInstance from . import test_util bioblend.set_stream_logger("test", level="INFO") @@ -12,15 +18,36 @@ @test_util.skip_unless_galaxy() class GalaxyTestBase(unittest.TestCase): - def setUp(self): + gi: GalaxyInstance + + @classmethod + def setUpClass(cls): galaxy_key = os.environ["BIOBLEND_GALAXY_API_KEY"] galaxy_url = os.environ["BIOBLEND_GALAXY_URL"] - self.gi = bioblend.galaxy.GalaxyInstance(url=galaxy_url, key=galaxy_key) + cls.gi = GalaxyInstance(url=galaxy_url, key=galaxy_key) - def _test_dataset(self, history_id, contents="1\t2\t3", **kwds): - tool_output = self.gi.tools.paste_content(contents, history_id, **kwds) + def _test_dataset(self, history_id: str, contents: str = "1\t2\t3", **kwargs: Any) -> str: + tool_output = self.gi.tools.paste_content(contents, history_id, **kwargs) return tool_output["outputs"][0]["id"] - def _wait_and_verify_dataset(self, dataset_id, expected_contents, timeout_seconds=BIOBLEND_TEST_JOB_TIMEOUT): + def _wait_and_verify_dataset( + self, dataset_id: str, expected_contents: bytes, timeout_seconds: float = BIOBLEND_TEST_JOB_TIMEOUT + ) -> None: dataset_contents = self.gi.datasets.download_dataset(dataset_id, maxwait=timeout_seconds) - self.assertEqual(dataset_contents, expected_contents) + assert dataset_contents == expected_contents + + def _run_random_lines1( + self, history_id: str, dataset_id: str, input_format: Literal["21.01", "legacy"] = "legacy" + ) -> Dict[str, Any]: + tool_inputs = { + "num_lines": "1", + "input": {"src": "hda", "id": dataset_id}, + } + if input_format == "21.01": + tool_inputs.update({"seed_source": {"seed_source_selector": "set_seed", "seed": "asdf"}}) + else: + # legacy format + tool_inputs.update({"seed_source|seed_source_selector": "set_seed", "seed_source|seed": "asdf"}) + return self.gi.tools.run_tool( + history_id=history_id, tool_id="random_lines1", tool_inputs=tool_inputs, input_format=input_format + ) diff -Nru python-bioblend-0.18.0/bioblend/_tests/pytest_galaxy_test_wrapper.py python-bioblend-1.0.0/bioblend/_tests/pytest_galaxy_test_wrapper.py --- python-bioblend-0.18.0/bioblend/_tests/pytest_galaxy_test_wrapper.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/pytest_galaxy_test_wrapper.py 2022-10-13 18:17:37.000000000 +0000 @@ -6,11 +6,16 @@ """ import os import sys +from typing import ( + List, + NoReturn, + Optional, +) try: import pytest except ImportError: - pytest = None # type: ignore + pytest = None DIRECTORY = os.path.abspath(os.path.dirname(__file__)) BIOBLEND_TEST_SUITE = os.environ.get("BIOBLEND_TEST_SUITE", "full") @@ -24,7 +29,7 @@ ] -def main(args=None): +def main(args: Optional[List[str]] = None) -> NoReturn: """Entry point that delegates to pytest.main.""" if pytest is None: raise Exception("pytest is required to use this script.") diff -Nru python-bioblend-0.18.0/bioblend/_tests/README.TXT python-bioblend-1.0.0/bioblend/_tests/README.TXT --- python-bioblend-0.18.0/bioblend/_tests/README.TXT 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/README.TXT 2022-10-13 18:17:37.000000000 +0000 @@ -1,11 +1,3 @@ -To run the cloud tests, the following environment variables must be set: - -BIOBLEND_ACCESS_KEY = -BIOBLEND_SECRET_KEY = -BIOBLEND_CLOUD_TYPE = -BIOBLEND_AMI_ID = - - To run Galaxy tests, the following environment variables must be set: BIOBLEND_GALAXY_API_KEY = diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestCloudmanLaunch.py python-bioblend-1.0.0/bioblend/_tests/TestCloudmanLaunch.py --- python-bioblend-0.18.0/bioblend/_tests/TestCloudmanLaunch.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestCloudmanLaunch.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,71 +0,0 @@ -""" -Tests the functionality of the Blend CloudMan API. These tests require working -credentials to supported cloud infrastructure. -""" -import contextlib - -from bioblend.cloudman import ( - CloudManConfig, - CloudManInstance, -) -from . import ( - CloudmanTestBase, - test_util, -) - - -@test_util.skip_unless_cloudman() -class TestCloudmanLaunch(CloudmanTestBase.CloudmanTestBase): - def setUp(self): - super().setUp() - - def test_validate_valid_config(self): - """ - Tests whether a valid config is validated properly. - """ - # cfg = CloudManConfig(self.access_key, self.secret_key, self.cluster_name, self.ami_id, self.instance_type, self.password, cloud_metadata=self.cloud_metadata) - cls = TestCloudmanLaunch - cfg = CloudManConfig( - cls.access_key, - cls.secret_key, - cls.cluster_name, - cls.ami_id, - cls.instance_type, - cls.password, - cloud_metadata=cls.cloud_metadata, - ) - result = cfg.validate() - self.assertIsNone(result, "Validation did not return null to indicate success!") - - def test_validate_invalid_config(self): - """ - Tests whether an invalid config is validated properly. - """ - cfg = CloudManConfig() - result = cfg.validate() - self.assertIsNotNone(result, "Validation should have returned a value since the configuration was invalid!") - - def test_launch_and_terminate(self): - cls = TestCloudmanLaunch - cfg = CloudManConfig( - cls.access_key, - cls.secret_key, - cls.cluster_name, - cls.ami_id, - cls.instance_type, - cls.password, - cloud_metadata=cls.cloud_metadata, - ) - cmi = CloudManInstance.launch_instance(cfg) - status = cmi.get_status() - self.assertNotEqual( - status["cluster_status"], - "ERROR", - "instance.get_status() returned ERROR. Should return a successful status!", - ) - with contextlib.suppress(Exception): - # TODO: The terminate method is unpredictable! Needs fix. - result = cmi.terminate(delete_cluster=True) - self.assertEqual( - result["cluster_status"], "SHUTDOWN", "Cluster should be in status SHUTDOWN after call to terminate!" - ) diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestCloudmanMock.py python-bioblend-1.0.0/bioblend/_tests/TestCloudmanMock.py --- python-bioblend-0.18.0/bioblend/_tests/TestCloudmanMock.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestCloudmanMock.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,136 +0,0 @@ -""" -Tests the functionality of the BioBlend CloudMan API, without actually making -calls to a remote CloudMan instance/server. These don't actually ensure -that BioBlend is integrated with CloudMan correctly. They only ensure that -if you refactor the BioBlend CloudMan API code, that it will maintain its -current behaviour. -""" -import unittest -from unittest.mock import MagicMock - -from bioblend import cloudman - - -class TestCloudmanMock(unittest.TestCase): - def setUp(self): - url = "http://127.0.0.1:42284" - password = "password" - self.cm = cloudman.CloudManInstance(url, password) - - # def test_initialize(self): - # self.cm._make_get_request = MagicMock(return_value="{}") - # - # ## Set cluster type - # self.cm.initialize(type="Galaxy") - # - # params = {'startup_opt': 'Galaxy'} - # self.cm._make_get_request.assert_called_with("initialize_cluster", parameters=params) - - def test_get_status(self): - # Set return value of call - self.cm._make_get_request = MagicMock(return_value={}) - - status = self.cm.get_status() - self.assertNotEqual(status, None) - self.assertEqual(status, {}) - - # Check that the correct URL was called - self.cm._make_get_request.assert_called_with("instance_state_json") - - def test_get_nodes(self): - # Set return value of call - self.cm._make_get_request = MagicMock(return_value={"instances": []}) - - nodes = self.cm.get_nodes() - self.assertIsNotNone(nodes) - self.assertEqual(len(nodes), 0) - - # Check that the correct URL was called - self.cm._make_get_request.assert_called_with("instance_feed_json") - - def test_add_nodes(self): - self.cm._make_get_request = MagicMock(return_value="{}") - num_nodes = 10 - status = self.cm.add_nodes(num_nodes) - self.assertIsNotNone(status) - - # Check that the correct URL was called - params = {"number_nodes": 10, "instance_type": "", "spot_price": ""} - self.cm._make_get_request.assert_called_with("add_instances", parameters=params) - - def test_remove_nodes(self): - self.cm._make_get_request = MagicMock(return_value="{}") - num_nodes = 10 - status = self.cm.remove_nodes(num_nodes, force=True) - self.assertIsNotNone(status) - - # Check that the correct URL was called - params = {"number_nodes": 10, "force_termination": True} - self.cm._make_get_request.assert_called_with("remove_instances", parameters=params) - - def test_remove_node(self): - self.cm._make_get_request = MagicMock(return_value="{}") - instance_id = "abcdef" - self.cm.remove_node(instance_id, force=True) - - # Check that the correct URL was called - params = {"instance_id": "abcdef"} - self.cm._make_get_request.assert_called_with("remove_instance", parameters=params) - - def test_reboot_node(self): - self.cm._make_get_request = MagicMock(return_value="{}") - instance_id = "abcdef" - self.cm.reboot_node(instance_id) - - # Check that the correct URL was called - params = {"instance_id": "abcdef"} - self.cm._make_get_request.assert_called_with("reboot_instance", parameters=params) - - def test_autoscaling_enabled_true(self): - return_json_string = {"autoscaling": {"use_autoscaling": True, "as_max": "3", "as_min": "1"}} - self.cm._make_get_request = MagicMock(return_value=return_json_string) - self.assertTrue(self.cm.autoscaling_enabled()) - - def test_autoscaling_enabled_false(self): - return_json_string = {"autoscaling": {"use_autoscaling": False, "as_max": "3", "as_min": "1"}} - self.cm._make_get_request = MagicMock(return_value=return_json_string) - self.assertFalse(self.cm.autoscaling_enabled()) - - def test_enable_autoscaling(self): - return_json_string = {"autoscaling": {"use_autoscaling": False, "as_max": "N/A", "as_min": "N/A"}} - self.cm._make_get_request = MagicMock(return_value=return_json_string) - self.assertFalse(self.cm.autoscaling_enabled()) - self.cm.enable_autoscaling(minimum_nodes=0, maximum_nodes=19) - - # Check that the correct URL was called - params = {"as_min": 0, "as_max": 19} - self.cm._make_get_request.assert_called_with("toggle_autoscaling", parameters=params) - - return_json_string = {"autoscaling": {"use_autoscaling": True, "as_max": "19", "as_min": "0"}} - self.cm.enable_autoscaling(minimum_nodes=0, maximum_nodes=19) - - # Check that the correct URL was called - params = {"as_min": 0, "as_max": 19} - self.cm._make_get_request.assert_called_with("toggle_autoscaling", parameters=params) - - def test_disable_autoscaling(self): - return_json_string = {"autoscaling": {"use_autoscaling": True, "as_max": "3", "as_min": "1"}} - self.cm._make_get_request = MagicMock(return_value=return_json_string) - self.cm.disable_autoscaling() - - self.cm._make_get_request.assert_called_with("toggle_autoscaling") - - def test_adjust_autoscaling(self): - return_json_string = {"autoscaling": {"use_autoscaling": True, "as_max": "3", "as_min": "1"}} - self.cm._make_get_request = MagicMock(return_value=return_json_string) - self.cm.adjust_autoscaling(minimum_nodes=3, maximum_nodes=4) - params = {"as_min_adj": 3, "as_max_adj": 4} - self.cm._make_get_request.assert_called_with("adjust_autoscaling", parameters=params) - - def test_get_galaxy_state_stopped(self): - return_json = {"status": "'Galaxy' is not running", "srvc": "Galaxy"} - self.cm._make_get_request = MagicMock(return_value=return_json) - - self.assertEqual(self.cm.get_galaxy_state()["status"], "'Galaxy' is not running") - params = {"srvc": "Galaxy"} - self.cm._make_get_request.assert_called_with("get_srvc_status", parameters=params) diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestCloudmanServices.py python-bioblend-1.0.0/bioblend/_tests/TestCloudmanServices.py --- python-bioblend-0.18.0/bioblend/_tests/TestCloudmanServices.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestCloudmanServices.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,69 +0,0 @@ -""" -Tests the functionality of the Blend CloudMan API. These tests require working -credentials to supported cloud infrastructure. -""" -from bioblend.cloudman import ( - CloudManConfig, - CloudManInstance, -) -from . import ( - CloudmanTestBase, - test_util, -) - - -@test_util.skip_unless_cloudman() -class TestCloudmanServices(CloudmanTestBase.CloudmanTestBase): - @classmethod - def setUpClass(cls): - super().setUpClass() - cls.cfg = CloudManConfig( - cls.access_key, - cls.secret_key, - cls.cluster_name, - cls.ami_id, - cls.instance_type, - cls.password, - cloud_metadata=cls.cloud_metadata, - ) - cls.cmi = CloudManInstance.launch_instance(cls.cfg) - - def setUp(self): - self.cmi = self.__class__.cmi - - def test_get_status(self): - status = self.cmi.get_status() - self.assertIsNotNone(status) - - def test_get_nodes(self): - nodes = self.cmi.get_nodes() - self.assertIsNotNone(nodes) - - def test_add_nodes(self): - num_nodes = 1 - status = self.cmi.add_nodes(num_nodes) - self.assertIsNotNone(status) - - def test_reboot_node(self): - instance_id = self.cmi.instance_id - self.cmi.reboot_node(instance_id) - - def test_remove_node(self): - instance_id = self.cmi.instance_id - self.cmi.remove_node(instance_id, force=True) - - def test_enable_autoscaling(self): - self.assertFalse(self.cmi.autoscaling_enabled()) - self.cmi.enable_autoscaling(minimum_nodes=0, maximum_nodes=19) - self.assertTrue(self.cmi.autoscaling_enabled()) - - def test_disable_autoscaling(self): - self.cmi.disable_autoscaling() - self.assertFalse(self.cmi.autoscaling_enabled()) - - def test_adjust_autoscaling(self): - self.cmi.adjust_autoscaling(minimum_nodes=3, maximum_nodes=4) - - -# def test_get_galaxy_state_stopped(self): -# self.assertEqual(self.cmi.get_galaxy_state(), "'Galaxy' is not running") diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyConfig.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyConfig.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyConfig.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyConfig.py 2022-10-13 18:17:37.000000000 +0000 @@ -4,15 +4,15 @@ class TestGalaxyConfig(GalaxyTestBase.GalaxyTestBase): def test_get_config(self): response = self.gi.config.get_config() - self.assertTrue(isinstance(response, dict)) - self.assertTrue("brand" in response.keys()) + assert isinstance(response, dict) + assert "brand" in response.keys() def test_get_version(self): response = self.gi.config.get_version() - self.assertTrue(isinstance(response, dict)) - self.assertTrue("version_major" in response.keys()) + assert isinstance(response, dict) + assert "version_major" in response.keys() def test_whoami(self): response = self.gi.config.whoami() - self.assertTrue(isinstance(response, dict)) - self.assertTrue("username" in response.keys()) + assert isinstance(response, dict) + assert "username" in response.keys() diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyDatasetCollections.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyDatasetCollections.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyDatasetCollections.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyDatasetCollections.py 2022-10-13 18:17:37.000000000 +0000 @@ -2,6 +2,11 @@ import tarfile import tempfile from inspect import signature +from typing import ( + Any, + Dict, + Union, +) from zipfile import ZipFile from bioblend.galaxy import dataset_collections @@ -28,15 +33,15 @@ ], ), ) - self.assertEqual(collection_response["name"], "MyDatasetList") - self.assertEqual(collection_response["collection_type"], "list") + assert collection_response["name"] == "MyDatasetList" + assert collection_response["collection_type"] == "list" elements = collection_response["elements"] - self.assertEqual(len(elements), 3) - self.assertEqual(elements[0]["element_index"], 0) - self.assertEqual(elements[0]["object"]["id"], dataset1_id) - self.assertEqual(elements[1]["object"]["id"], dataset2_id) - self.assertEqual(elements[2]["object"]["id"], dataset3_id) - self.assertEqual(elements[2]["element_identifier"], "sample3") + assert len(elements) == 3 + assert elements[0]["element_index"] == 0 + assert elements[0]["object"]["id"] == dataset1_id + assert elements[1]["object"]["id"] == dataset2_id + assert elements[2]["object"]["id"] == dataset3_id + assert elements[2]["element_identifier"] == "sample3" def test_create_list_of_paired_datasets_in_history(self): history_id = self.gi.histories.create_history(name="TestDSListCreate")["id"] @@ -69,62 +74,62 @@ ], ), ) - self.assertEqual(collection_response["name"], "MyListOfPairedDatasets") - self.assertEqual(collection_response["collection_type"], "list:paired") + assert collection_response["name"] == "MyListOfPairedDatasets" + assert collection_response["collection_type"] == "list:paired" elements = collection_response["elements"] - self.assertEqual(len(elements), 2) - self.assertEqual(elements[0]["element_index"], 0) + assert len(elements) == 2 + assert elements[0]["element_index"] == 0 created_pair1 = elements[0]["object"] - self.assertEqual(created_pair1["collection_type"], "paired") - self.assertEqual(len(created_pair1["elements"]), 2) + assert created_pair1["collection_type"] == "paired" + assert len(created_pair1["elements"]) == 2 forward_element1 = created_pair1["elements"][0] - self.assertEqual(forward_element1["element_identifier"], "forward") - self.assertEqual(forward_element1["element_index"], 0) + assert forward_element1["element_identifier"] == "forward" + assert forward_element1["element_index"] == 0 forward_dataset1 = forward_element1["object"] - self.assertEqual(forward_dataset1["id"], dataset1_id) + assert forward_dataset1["id"] == dataset1_id - self.assertEqual(elements[1]["element_index"], 1) + assert elements[1]["element_index"] == 1 created_pair2 = elements[1]["object"] - self.assertEqual(created_pair2["collection_type"], "paired") - self.assertEqual(len(created_pair2["elements"]), 2) + assert created_pair2["collection_type"] == "paired" + assert len(created_pair2["elements"]) == 2 reverse_element2 = created_pair2["elements"][1] reverse_dataset2 = reverse_element2["object"] - self.assertEqual(reverse_element2["element_identifier"], "reverse") - self.assertEqual(reverse_element2["element_index"], 1) - self.assertEqual(reverse_dataset2["id"], dataset4_id) + assert reverse_element2["element_identifier"] == "reverse" + assert reverse_element2["element_index"] == 1 + assert reverse_dataset2["id"] == dataset4_id def test_collections_in_history_index(self): history_id = self.gi.histories.create_history(name="TestHistoryDSIndex")["id"] history_dataset_collection = self._create_pair_in_history(history_id) contents = self.gi.histories.show_history(history_id, contents=True) - self.assertEqual(len(contents), 3) - self.assertEqual(contents[2]["id"], history_dataset_collection["id"]) - self.assertEqual(contents[2]["name"], "MyTestPair") - self.assertEqual(contents[2]["collection_type"], "paired") + assert len(contents) == 3 + assert contents[2]["id"] == history_dataset_collection["id"] + assert contents[2]["name"] == "MyTestPair" + assert contents[2]["collection_type"] == "paired" def test_show_history_dataset_collection(self): history_id = self.gi.histories.create_history(name="TestHistoryDSIndexShow")["id"] history_dataset_collection = self._create_pair_in_history(history_id) show_response = self.gi.histories.show_dataset_collection(history_id, history_dataset_collection["id"]) for key in ["collection_type", "elements", "name", "deleted", "visible"]: - self.assertIn(key, show_response) - self.assertFalse(show_response["deleted"]) - self.assertTrue(show_response["visible"]) + assert key in show_response + assert not show_response["deleted"] + assert show_response["visible"] def test_delete_history_dataset_collection(self): history_id = self.gi.histories.create_history(name="TestHistoryDSDelete")["id"] history_dataset_collection = self._create_pair_in_history(history_id) self.gi.histories.delete_dataset_collection(history_id, history_dataset_collection["id"]) show_response = self.gi.histories.show_dataset_collection(history_id, history_dataset_collection["id"]) - self.assertTrue(show_response["deleted"]) + assert show_response["deleted"] def test_update_history_dataset_collection(self): history_id = self.gi.histories.create_history(name="TestHistoryDSDelete")["id"] history_dataset_collection = self._create_pair_in_history(history_id) self.gi.histories.update_dataset_collection(history_id, history_dataset_collection["id"], visible=False) show_response = self.gi.histories.show_dataset_collection(history_id, history_dataset_collection["id"]) - self.assertFalse(show_response["visible"]) + assert not show_response["visible"] def test_show_dataset_collection(self): history_id = self.gi.histories.create_history(name="TestDatasetCollectionShow")["id"] @@ -141,12 +146,12 @@ "url", "visible", ): - self.assertEqual(dataset_collection1[key], dataset_collection2[key]) + assert dataset_collection1[key] == dataset_collection2[key] for element1, element2 in zip(dataset_collection1["elements"], dataset_collection2["elements"]): - self.assertEqual(element1["id"], element2["id"]) - self.assertEqual(element1.keys(), element2.keys()) + assert element1["id"] == element2["id"] + assert element1.keys() == element2.keys() for key in element1["object"].keys(): - self.assertIn(key, element2["object"].keys()) + assert key in element2["object"].keys() @test_util.skip_unless_galaxy("release_18.01") def test_download_dataset_collection(self): @@ -164,7 +169,7 @@ os.mkdir(extract_dir_path) if archive_type == "zip": - archive = ZipFile(archive_path) + archive: Union[ZipFile, tarfile.TarFile] = ZipFile(archive_path) elif archive_type == "tgz": archive = tarfile.open(archive_path) @@ -173,7 +178,7 @@ dataset_dir_path = os.path.join(extract_dir_path, fname) file_path = os.path.join(dataset_dir_path, os.listdir(dataset_dir_path)[0]) with open(file_path) as f: - self.assertEqual(expected_contents, f.read()) + assert expected_contents == f.read() archive.close() def test_wait_for_dataset_collection(self): @@ -181,9 +186,9 @@ dataset_collection_id = self._create_pair_in_history(history_id)["id"] dataset_collection = self.gi.dataset_collections.wait_for_dataset_collection(dataset_collection_id) for element in dataset_collection["elements"]: - self.assertEqual(element["object"]["state"], "ok") + assert element["object"]["state"] == "ok" - def _create_pair_in_history(self, history_id): + def _create_pair_in_history(self, history_id: str) -> Dict[str, Any]: dataset1_id = self._test_dataset(history_id) dataset2_id = self._test_dataset(history_id) collection_response = self.gi.histories.create_dataset_collection( diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyDatasets.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyDatasets.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyDatasets.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyDatasets.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,6 +1,8 @@ import shutil import tempfile +import pytest + from bioblend import ( ConnectionError, galaxy, @@ -24,16 +26,15 @@ @test_util.skip_unless_galaxy("release_19.05") def test_show_nonexistent_dataset(self): - with self.assertRaises(ConnectionError): + with pytest.raises(ConnectionError): self.gi.datasets.show_dataset("nonexistent_id") def test_show_dataset(self): self.gi.datasets.show_dataset(self.dataset_id) def test_download_dataset(self): - with self.assertRaises(Exception) as ctx: - self.gi.datasets.download_dataset(None) - self.assertIsInstance(ctx.exception, (TypeError, ConnectionError)) + with pytest.raises((TypeError, ConnectionError)): + self.gi.datasets.download_dataset(None) # type: ignore[call-overload] expected_contents = ("\n".join(self.dataset_contents.splitlines()) + "\n").encode() # download_dataset() with file_path=None is already tested in TestGalaxyTools.test_paste_content() # self._wait_and_verify_dataset(self.dataset_id, expected_contents) @@ -42,9 +43,9 @@ downloaded_dataset = self.gi.datasets.download_dataset( self.dataset_id, file_path=tempdir, maxwait=GalaxyTestBase.BIOBLEND_TEST_JOB_TIMEOUT * 2 ) - self.assertTrue(downloaded_dataset.startswith(tempdir)) + assert downloaded_dataset.startswith(tempdir) with open(downloaded_dataset, "rb") as f: - self.assertEqual(f.read(), expected_contents) + assert f.read() == expected_contents finally: shutil.rmtree(tempdir) with tempfile.NamedTemporaryFile(prefix="bioblend_test_") as f: @@ -54,119 +55,123 @@ use_default_filename=False, maxwait=GalaxyTestBase.BIOBLEND_TEST_JOB_TIMEOUT, ) - self.assertEqual(download_filename, f.name) + assert download_filename == f.name f.flush() - self.assertEqual(f.read(), expected_contents) + assert f.read() == expected_contents @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets(self): datasets = self.gi.datasets.get_datasets() dataset_ids = [dataset["id"] for dataset in datasets] - self.assertIn(self.dataset_id, dataset_ids) + assert self.dataset_id in dataset_ids @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_history(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_limit_offset(self): datasets = self.gi.datasets.get_datasets(limit=1) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, offset=1) - self.assertEqual(datasets, []) + assert datasets == [] @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_name(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id, name="Pasted Entry") - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, name="Wrong Name") - self.assertEqual(datasets, []) + assert datasets == [] @test_util.skip_unless_galaxy("release_20.05") def test_get_datasets_time(self): dataset = self.gi.datasets.show_dataset(self.dataset_id) ct = dataset["create_time"] datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_min=ct) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_max=ct) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_min="2100-01-01T00:00:00") - self.assertEqual(datasets, []) + assert datasets == [] datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_max="2000-01-01T00:00:00") - self.assertEqual(datasets, []) + assert datasets == [] ut = dataset["update_time"] datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_min=ut) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_max=ut) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_min="2100-01-01T00:00:00") - self.assertEqual(datasets, []) + assert datasets == [] datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_max="2000-01-01T00:00:00") - self.assertEqual(datasets, []) + assert datasets == [] @test_util.skip_unless_galaxy("release_20.05") def test_get_datasets_extension(self): - datasets = self.gi.datasets.get_datasets(history_id=self.history_id) datasets = self.gi.datasets.get_datasets(history_id=self.history_id, extension="txt") - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, extension="bam") - self.assertEqual(datasets, []) + assert datasets == [] + + @test_util.skip_unless_galaxy("release_22.01") + def test_get_datasets_extension_list(self): + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, extension=["bam", "txt"]) + assert len(datasets) == 1 @test_util.skip_unless_galaxy("release_20.05") def test_get_datasets_state(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state="ok") - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state="queued") - self.assertEqual(datasets, []) - with self.assertRaises(ConnectionError): + assert datasets == [] + with pytest.raises(ConnectionError): self.gi.datasets.get_datasets(history_id=self.history_id, state="nonexistent_state") datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state=["ok", "queued"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 @test_util.skip_unless_galaxy("release_20.05") def test_get_datasets_visible(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id, visible=True) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, visible=False) - self.assertEqual(len(datasets), 0) + assert len(datasets) == 0 @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_ordering(self): self.dataset_id2 = self._test_dataset(self.history_id, contents=self.dataset_contents) self.gi.datasets.wait_for_dataset(self.dataset_id2) datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="create_time-dsc") - self.assertEqual(datasets[0]["id"], self.dataset_id2) + assert datasets[0]["id"] == self.dataset_id2 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="create_time-asc") - self.assertEqual(datasets[0]["id"], self.dataset_id) + assert datasets[0]["id"] == self.dataset_id datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="hid-dsc") - self.assertEqual(datasets[0]["id"], self.dataset_id2) + assert datasets[0]["id"] == self.dataset_id2 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="hid-asc") - self.assertEqual(datasets[0]["id"], self.dataset_id) + assert datasets[0]["id"] == self.dataset_id @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_deleted(self): deleted_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, deleted=True) - self.assertEqual(deleted_datasets, []) + assert deleted_datasets == [] self.gi.histories.delete_dataset(self.history_id, self.dataset_id) deleted_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, deleted=True) - self.assertEqual(len(deleted_datasets), 1) + assert len(deleted_datasets) == 1 purged_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, purged=True) - self.assertEqual(purged_datasets, []) + assert purged_datasets == [] self.gi.histories.delete_dataset(self.history_id, self.dataset_id, purge=True) purged_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, purged=True) - self.assertEqual(len(purged_datasets), 1) + assert len(purged_datasets) == 1 @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_tool_id_and_tag(self): cat1_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tool_id="cat1") - self.assertEqual(cat1_datasets, []) + assert cat1_datasets == [] upload1_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tool_id="upload1") - self.assertEqual(len(upload1_datasets), 1) + assert len(upload1_datasets) == 1 self.gi.histories.update_dataset(self.history_id, self.dataset_id, tags=["test"]) tagged_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tag="test") - self.assertEqual(len(tagged_datasets), 1) + assert len(tagged_datasets) == 1 def test_wait_for_dataset(self): history_id = self.gi.histories.create_history(name="TestWaitForDataset")["id"] @@ -174,7 +179,7 @@ dataset_id = self._test_dataset(history_id, contents=dataset_contents) dataset = self.gi.datasets.wait_for_dataset(dataset_id) - self.assertEqual(dataset["state"], "ok") + assert dataset["state"] == "ok" self.gi.histories.delete_history(history_id, purge=True) @@ -188,17 +193,17 @@ sharing_role = self.gi.roles.create_role("sharing_role", "sharing_role", [user_id, admin_user_id])["id"] self.gi.datasets.publish_dataset(self.dataset_id, published=False) - with self.assertRaises(ConnectionError): + with pytest.raises(ConnectionError): anonymous_gi.datasets.show_dataset(self.dataset_id) self.gi.datasets.publish_dataset(self.dataset_id, published=True) # now dataset is public, i.e. accessible to anonymous users - self.assertEqual(anonymous_gi.datasets.show_dataset(self.dataset_id)["id"], self.dataset_id) + assert anonymous_gi.datasets.show_dataset(self.dataset_id)["id"] == self.dataset_id self.gi.datasets.publish_dataset(self.dataset_id, published=False) - with self.assertRaises(ConnectionError): + with pytest.raises(ConnectionError): user_gi.datasets.show_dataset(self.dataset_id) self.gi.datasets.update_permissions(self.dataset_id, access_ids=[sharing_role], manage_ids=[sharing_role]) - self.assertEqual(user_gi.datasets.show_dataset(self.dataset_id)["id"], self.dataset_id) + assert user_gi.datasets.show_dataset(self.dataset_id)["id"] == self.dataset_id # anonymous access now fails because sharing is only with the shared user role - with self.assertRaises(ConnectionError): + with pytest.raises(ConnectionError): anonymous_gi.datasets.show_dataset(self.dataset_id) diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyFolders.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyFolders.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyFolders.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyFolders.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,3 +1,8 @@ +from typing import ( + Dict, + List, +) + from . import GalaxyTestBase FOO_DATA = "foo\nbar\n" @@ -18,18 +23,18 @@ self.gi.libraries.delete_library(self.library["id"]) def test_create_folder(self): - self.assertEqual(self.folder["name"], self.name) - self.assertEqual(self.folder["description"], "automatically created folder") + assert self.folder["name"] == self.name + assert self.folder["description"] == "automatically created folder" def test_show_folder(self): f2 = self.gi.folders.show_folder(self.folder["id"]) - self.assertEqual(f2["id"], self.folder["id"]) + assert f2["id"] == self.folder["id"] def test_show_folder_contents(self): f2 = self.gi.folders.show_folder(self.folder["id"], contents=True) - self.assertIn("folder_contents", f2) - self.assertIn("metadata", f2) - self.assertEqual(self.name, f2["metadata"]["folder_name"]) + assert "folder_contents" in f2 + assert "metadata" in f2 + assert self.name == f2["metadata"]["folder_name"] def test_delete_folder(self): self.sub_folder = self.gi.folders.create_folder(self.folder["id"], self.name) @@ -37,22 +42,22 @@ def test_update_folder(self): self.folder = self.gi.folders.update_folder(self.folder["id"], "new-name", "new-description") - self.assertEqual(self.folder["name"], "new-name") - self.assertEqual(self.folder["description"], "new-description") + assert self.folder["name"] == "new-name" + assert self.folder["description"] == "new-description" def test_get_set_permissions(self): - empty_permission = { + empty_permission: Dict[str, List] = { "add_library_item_role_list": [], "modify_folder_role_list": [], "manage_folder_role_list": [], } # They should be empty to start with - self.assertEqual(self.gi.folders.get_permissions(self.folder["id"], scope="current"), empty_permission) - self.assertEqual(self.gi.folders.get_permissions(self.folder["id"], scope="available"), empty_permission) + assert self.gi.folders.get_permissions(self.folder["id"], scope="current") == empty_permission + assert self.gi.folders.get_permissions(self.folder["id"], scope="available") == empty_permission # Then we'll add a role role = self.gi.roles.get_roles()[0] self.gi.folders.set_permissions(self.folder["id"], add_ids=[role["id"]]) - self.assertTrue( + assert ( role["id"] in self.gi.folders.get_permissions(self.folder["id"], scope="available")["add_library_item_role_list"][0] ) diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyGroups.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyGroups.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyGroups.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyGroups.py 2022-10-13 18:17:37.000000000 +0000 @@ -17,46 +17,46 @@ pass def test_create_group(self): - self.assertEqual(self.group["name"], self.name) - self.assertIsNotNone(self.group["id"]) + assert self.group["name"] == self.name + assert self.group["id"] is not None def test_get_groups(self): groups = self.gi.groups.get_groups() for group in groups: - self.assertIsNotNone(group["id"]) - self.assertIsNotNone(group["name"]) + assert group["id"] is not None + assert group["name"] is not None def test_show_group(self): group_data = self.gi.groups.show_group(self.group["id"]) - self.assertEqual(self.group["id"], group_data["id"]) - self.assertEqual(self.group["name"], group_data["name"]) + assert self.group["id"] == group_data["id"] + assert self.group["name"] == group_data["name"] def test_get_group_users(self): group_users = self.gi.groups.get_group_users(self.group["id"]) - self.assertEqual(group_users, []) + assert group_users == [] def test_get_group_roles(self): group_roles = self.gi.groups.get_group_roles(self.group["id"]) - self.assertEqual(group_roles, []) + assert group_roles == [] def test_update_group(self): new_name = f"test_{uuid.uuid4().hex}" new_users = [self.gi.users.get_current_user()["id"]] self.gi.groups.update_group(self.group["id"], new_name, user_ids=new_users) updated_group = self.gi.groups.show_group(self.group["id"]) - self.assertEqual(self.group["id"], updated_group["id"]) - self.assertEqual(updated_group["name"], new_name) + assert self.group["id"] == updated_group["id"] + assert updated_group["name"] == new_name updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])] - self.assertEqual(set(updated_group_users), set(new_users)) + assert set(updated_group_users) == set(new_users) updated_group_roles = [_["id"] for _ in self.gi.groups.get_group_roles(self.group["id"])] - self.assertEqual(set(updated_group_roles), set()) + assert set(updated_group_roles) == set() def test_add_delete_group_user(self): new_user = self.gi.users.get_current_user()["id"] ret = self.gi.groups.add_group_user(self.group["id"], new_user) - self.assertEqual(ret["id"], new_user) + assert ret["id"] == new_user updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])] - self.assertIn(new_user, updated_group_users) + assert new_user in updated_group_users self.gi.groups.delete_group_user(self.group["id"], new_user) updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])] - self.assertNotIn(new_user, updated_group_users) + assert new_user not in updated_group_users diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyHistories.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyHistories.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyHistories.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyHistories.py 2022-10-13 18:17:37.000000000 +0000 @@ -5,7 +5,12 @@ import tarfile import tempfile -import bioblend.galaxy +import pytest + +from bioblend import ( + ConnectionError, + galaxy, +) from . import ( GalaxyTestBase, test_util, @@ -21,9 +26,9 @@ def test_create_history(self): history_name = "another buildbot - automated test" new_history = self.gi.histories.create_history(name=history_name) - self.assertIsNotNone(new_history["id"]) - self.assertEqual(new_history["name"], history_name) - self.assertIsNotNone(new_history["url"]) + assert new_history["id"] is not None + assert new_history["name"] == history_name + assert new_history["url"] is not None def test_update_history(self): new_name = "buildbot - automated test renamed" @@ -34,39 +39,39 @@ ) if "id" not in updated_hist: updated_hist = self.gi.histories.show_history(self.history["id"]) - self.assertEqual(self.history["id"], updated_hist["id"]) - self.assertEqual(updated_hist["name"], new_name) - self.assertEqual(updated_hist["annotation"], new_annotation) - self.assertEqual(updated_hist["tags"], new_tags) + assert self.history["id"] == updated_hist["id"] + assert updated_hist["name"] == new_name + assert updated_hist["annotation"] == new_annotation + assert updated_hist["tags"] == new_tags def test_publish_history(self): # Verify that searching for published histories does not return the test history published_histories = self.gi.histories.get_histories(published=True) - self.assertFalse(any(h["id"] == self.history["id"] for h in published_histories)) + assert not any(h["id"] == self.history["id"] for h in published_histories) updated_hist = self.gi.histories.update_history(self.history["id"], published=True) if "id" not in updated_hist: updated_hist = self.gi.histories.show_history(self.history["id"]) - self.assertEqual(self.history["id"], updated_hist["id"]) - self.assertTrue(updated_hist["published"]) + assert self.history["id"] == updated_hist["id"] + assert updated_hist["published"] # Verify that searching for published histories now returns the test history published_histories = self.gi.histories.get_histories(published=True) - self.assertTrue(any(h["id"] == self.history["id"] for h in published_histories)) + assert any(h["id"] == self.history["id"] for h in published_histories) # Verify that get_published_histories as an anonymous user also returns the test history - anonymous_gi = bioblend.galaxy.GalaxyInstance(url=self.gi.base_url, key=None) + anonymous_gi = galaxy.GalaxyInstance(url=self.gi.base_url, key=None) published_histories = anonymous_gi.histories.get_published_histories() - self.assertTrue(any(h["id"] == self.history["id"] for h in published_histories)) + assert any(h["id"] == self.history["id"] for h in published_histories) history_from_slug = anonymous_gi.histories.get_published_histories(slug=updated_hist["slug"]) - self.assertTrue(len(history_from_slug) == 1) - self.assertEqual(self.history["id"], history_from_slug[0]["id"]) + assert len(history_from_slug) == 1 + assert self.history["id"] == history_from_slug[0]["id"] def test_get_histories(self): # Make sure there's at least one value - the one we created all_histories = self.gi.histories.get_histories() - self.assertGreater(len(all_histories), 0) + assert len(all_histories) > 0 # Check whether id is present, when searched by name histories = self.gi.histories.get_histories(name=self.default_history_name) - self.assertEqual(len([h for h in histories if h["id"] == self.history["id"]]), 1) + assert len([h for h in histories if h["id"] == self.history["id"]]) == 1 # TODO: check whether deleted history is returned correctly # At the moment, get_histories() returns only not-deleted histories @@ -76,73 +81,101 @@ # get_histories() will return both not-deleted and deleted histories # and we can uncomment the following test. # deleted_history = self.gi.histories.get_histories(deleted=True) - # self.assertGreaterEqual(len(all_histories), len(deleted_history)) + # assert len(all_histories) >= len(deleted_history) + + @test_util.skip_unless_galaxy("release_20.01") + def test_other_users_histories(self): + username = "newuser4" + user_id = self.gi.users.create_local_user(username, f"{username}@example.org", "secret")["id"] + user_api_key = self.gi.users.create_user_apikey(user_id) + user_gi = galaxy.GalaxyInstance(url=self.gi.base_url, key=user_api_key) + # Normal users cannot use the `all` parameter + with pytest.raises(ConnectionError): + other_user_histories = user_gi.histories.get_histories(all=True) + user_history_id = user_gi.histories.create_history(name=f"History for {username}")["id"] + # Get all users' histories from an admin account + other_user_histories = self.gi.histories.get_histories(all=True) + assert user_history_id in [h["id"] for h in other_user_histories] def test_show_history(self): history_data = self.gi.histories.show_history(self.history["id"]) - self.assertEqual(self.history["id"], history_data["id"]) - self.assertEqual(self.history["name"], history_data["name"]) - self.assertEqual("new", history_data["state"]) + assert self.history["id"] == history_data["id"] + assert self.history["name"] == history_data["name"] + assert "new" == history_data["state"] def test_show_history_with_contents(self): history_id = self.history["id"] contents = self.gi.histories.show_history(history_id, contents=True) # Empty history has no datasets, content length should be 0 - self.assertEqual(len(contents), 0) + assert len(contents) == 0 self._test_dataset(history_id) contents = self.gi.histories.show_history(history_id, contents=True) # history has 1 dataset, content length should be 1 - self.assertEqual(len(contents), 1) + assert len(contents) == 1 contents = self.gi.histories.show_history(history_id, contents=True, types=["dataset"]) # filtering for dataset, content length should still be 1 - self.assertEqual(len(contents), 1) + assert len(contents) == 1 contents = self.gi.histories.show_history(history_id, contents=True, types=["dataset_collection"]) # filtering for dataset collection but there's no collection in the history - self.assertEqual(len(contents), 0) + assert len(contents) == 0 contents = self.gi.histories.show_history(history_id, contents=True, types=["dataset", "dataset_collection"]) - self.assertEqual(len(contents), 1) + assert len(contents) == 1 def test_create_history_tag(self): new_tag = "tag1" self.gi.histories.create_history_tag(self.history["id"], new_tag) updated_hist = self.gi.histories.show_history(self.history["id"]) - self.assertEqual(self.history["id"], updated_hist["id"]) - self.assertIn(new_tag, updated_hist["tags"]) + assert self.history["id"] == updated_hist["id"] + assert new_tag in updated_hist["tags"] def test_show_dataset(self): history_id = self.history["id"] dataset1_id = self._test_dataset(history_id) dataset = self.gi.histories.show_dataset(history_id, dataset1_id) for key in ["name", "hid", "id", "deleted", "history_id", "visible"]: - self.assertIn(key, dataset) - self.assertEqual(dataset["history_id"], history_id) - self.assertEqual(dataset["hid"], 1) - self.assertEqual(dataset["id"], dataset1_id) - self.assertEqual(dataset["deleted"], False) - self.assertEqual(dataset["visible"], True) - - def test_show_dataset_provenance(self): + assert key in dataset + assert dataset["history_id"] == history_id + assert dataset["hid"] == 1 + assert dataset["id"] == dataset1_id + assert not dataset["deleted"] + assert dataset["visible"] + + @test_util.skip_unless_galaxy("release_22.01") + def test_show_dataset_provenance(self) -> None: + MINIMAL_PROV_KEYS = ("id", "uuid") + OTHER_PROV_KEYS = ("job_id", "parameters", "stderr", "stdout", "tool_id") + ALL_PROV_KEYS = MINIMAL_PROV_KEYS + OTHER_PROV_KEYS history_id = self.history["id"] dataset1_id = self._test_dataset(history_id) - prov = self.gi.histories.show_dataset_provenance(history_id, dataset1_id) - for key in ["id", "job_id", "parameters", "stderr", "stdout", "tool_id"]: - self.assertIn(key, prov) + dataset2_id = self._run_random_lines1(history_id, dataset1_id)["outputs"][0]["id"] + prov = self.gi.histories.show_dataset_provenance(history_id, dataset2_id) + for key in ALL_PROV_KEYS: + assert key in prov + for key in MINIMAL_PROV_KEYS: + assert key in prov["parameters"]["input"] + for key in OTHER_PROV_KEYS: + assert key not in prov["parameters"]["input"] + recursive_prov = self.gi.histories.show_dataset_provenance(history_id, dataset2_id, follow=True) + for key in ALL_PROV_KEYS: + assert key in recursive_prov + for key in ALL_PROV_KEYS: + assert key in recursive_prov["parameters"]["input"] def test_delete_dataset(self): history_id = self.history["id"] dataset1_id = self._test_dataset(history_id) self.gi.histories.delete_dataset(history_id, dataset1_id) dataset = self.gi.histories.show_dataset(history_id, dataset1_id) - self.assertTrue(dataset["deleted"]) - self.assertFalse(dataset["purged"]) + assert dataset["deleted"] + assert not dataset["purged"] def test_purge_dataset(self): history_id = self.history["id"] dataset1_id = self._test_dataset(history_id) self.gi.histories.delete_dataset(history_id, dataset1_id, purge=True) dataset = self.gi.histories.show_dataset(history_id, dataset1_id) - self.assertTrue(dataset["deleted"]) - self.assertTrue(dataset["purged"]) + assert dataset["deleted"] + assert dataset["purged"] def test_update_dataset(self): history_id = self.history["id"] @@ -150,7 +183,7 @@ updated_dataset = self.gi.histories.update_dataset(history_id, dataset1_id, visible=False) if "id" not in updated_dataset: updated_dataset = self.gi.histories.show_dataset(history_id, dataset1_id) - self.assertFalse(updated_dataset["visible"]) + assert not updated_dataset["visible"] def test_upload_dataset_from_library(self): pass @@ -159,39 +192,39 @@ def test_delete_history(self): result = self.gi.histories.delete_history(self.history["id"]) - self.assertTrue(result["deleted"]) + assert result["deleted"] all_histories = self.gi.histories.get_histories() - self.assertTrue(not any(d["id"] == self.history["id"] for d in all_histories)) + assert not any(d["id"] == self.history["id"] for d in all_histories) def test_undelete_history(self): self.gi.histories.delete_history(self.history["id"]) self.gi.histories.undelete_history(self.history["id"]) all_histories = self.gi.histories.get_histories() - self.assertTrue(any(d["id"] == self.history["id"] for d in all_histories)) + assert any(d["id"] == self.history["id"] for d in all_histories) def test_get_status(self): state = self.gi.histories.get_status(self.history["id"]) - self.assertEqual("new", state["state"]) + assert "new" == state["state"] def test_get_most_recently_used_history(self): most_recently_used_history = self.gi.histories.get_most_recently_used_history() # if the user has been created via the API, it does not have # a session, therefore no history if most_recently_used_history is not None: - self.assertIsNotNone(most_recently_used_history["id"]) - self.assertIsNotNone(most_recently_used_history["name"]) - self.assertIsNotNone(most_recently_used_history["state"]) + assert most_recently_used_history["id"] is not None + assert most_recently_used_history["name"] is not None + assert most_recently_used_history["state"] is not None def test_download_history(self): jeha_id = self.gi.histories.export_history(self.history["id"], wait=True, maxwait=60) - self.assertTrue(jeha_id) + assert jeha_id tempdir = tempfile.mkdtemp(prefix="bioblend_test_") temp_fn = os.path.join(tempdir, "export.tar.gz") try: with open(temp_fn, "wb") as fo: self.gi.histories.download_history(self.history["id"], jeha_id, fo) - self.assertTrue(tarfile.is_tarfile(temp_fn)) + assert tarfile.is_tarfile(temp_fn) finally: shutil.rmtree(tempdir) @@ -225,7 +258,7 @@ history_id = self.history["id"] dataset_id = self._test_dataset(history_id) extra_files = self.gi.histories.get_extra_files(history_id, dataset_id) - self.assertEqual(extra_files, []) + assert extra_files == [] def tearDown(self): self.gi.histories.delete_history(self.history["id"], purge=True) diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyInstance.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyInstance.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyInstance.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyInstance.py 2022-10-13 18:17:37.000000000 +0000 @@ -5,9 +5,10 @@ import time import unittest +import pytest + from bioblend import ConnectionError from bioblend.galaxy import GalaxyInstance -from bioblend.galaxy.client import Client from . import test_util @@ -16,29 +17,29 @@ # "connect" to a fake Galaxy instance self.gi = GalaxyInstance("http://localhost:56789", key="whatever") - def test_set_max_get_retries(self): + def test_set_max_get_attempts(self): self.gi.max_get_attempts = 3 - self.assertEqual(3, Client.max_get_retries()) + assert 3 == self.gi.max_get_attempts def test_set_retry_delay(self): - self.gi.get_retry_delay = 5 - self.assertEqual(5, Client.get_retry_delay()) + self.gi.get_retry_delay = 5.0 + assert 5.0 == self.gi.get_retry_delay def test_get_retry(self): # We set the client to try twice, with a delay of 5 seconds between # attempts. So, we expect the call to take at least 5 seconds before # failing. - self.gi.max_get_attempts = 2 - self.gi.get_retry_delay = 5 + self.gi.max_get_attempts = 3 + self.gi.get_retry_delay = 2 start = time.time() - with self.assertRaises(ConnectionError): + with pytest.raises(ConnectionError): self.gi.libraries.get_libraries() end = time.time() duration = end - start - self.assertGreater(duration, self.gi.get_retry_delay, "Didn't seem to retry long enough") + assert duration > self.gi.get_retry_delay * (self.gi.max_get_attempts - 1), "Didn't seem to retry long enough" def test_missing_scheme_fake_url(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): GalaxyInstance("localhost:56789", key="whatever") @test_util.skip_unless_galaxy() diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyInvocations.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyInvocations.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyInvocations.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyInvocations.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,6 +1,10 @@ import contextlib import os import time +from typing import ( + Any, + Dict, +) from . import ( GalaxyTestBase, @@ -25,11 +29,11 @@ invocation_id = invocation["id"] invocations = self.gi.invocations.get_invocations() - self.assertEqual(len(invocations), 1) - self.assertEqual(invocations[0]["id"], invocation_id) + assert len(invocations) == 1 + assert invocations[0]["id"] == invocation_id self.gi.invocations.cancel_invocation(invocation_id) invocation = self.gi.invocations.show_invocation(invocation_id) - self.assertEqual(invocation["state"], "cancelled") + assert invocation["state"] == "cancelled" @test_util.skip_unless_galaxy("release_20.01") def test_get_invocations(self): @@ -55,20 +59,20 @@ # Test filtering by workflow ID for wf_id, expected_invoc_num in {self.workflow_id: 2, workflow2_id: 1}.items(): invocs = self.gi.invocations.get_invocations(workflow_id=wf_id) - self.assertEqual(len(invocs), expected_invoc_num) + assert len(invocs) == expected_invoc_num for invoc in invocs: - self.assertEqual(invoc["workflow_id"], wf_id) + assert invoc["workflow_id"] == wf_id # Test filtering by history ID for hist_id, expected_invoc_num in {self.history_id: 1, hist2_id: 2}.items(): invocs = self.gi.invocations.get_invocations(history_id=hist_id) - self.assertEqual(len(invocs), expected_invoc_num) + assert len(invocs) == expected_invoc_num for invoc in invocs: - self.assertEqual(invoc["history_id"], hist_id) + assert invoc["history_id"] == hist_id # Test limiting limit_invocs = self.gi.invocations.get_invocations(limit=2) - self.assertEqual(len(limit_invocs), 2) + assert len(limit_invocs) == 2 self.gi.histories.delete_history(hist2_id, purge=True) @@ -90,17 +94,17 @@ self.gi.invocations.wait_for_invocation(invocation["id"]) biocompute_object = self.gi.invocations.get_invocation_biocompute_object(invocation["id"]) - self.assertEqual(len(biocompute_object["description_domain"]["pipeline_steps"]), 1) + assert len(biocompute_object["description_domain"]["pipeline_steps"]) == 1 @test_util.skip_unless_galaxy("release_19.09") def test_get_invocation_jobs_summary(self): invocation = self._invoke_workflow() self.gi.invocations.wait_for_invocation(invocation["id"]) jobs_summary = self.gi.invocations.get_invocation_summary(invocation["id"]) - self.assertEqual(jobs_summary["populated_state"], "ok") + assert jobs_summary["populated_state"] == "ok" step_jobs_summary = self.gi.invocations.get_invocation_step_jobs_summary(invocation["id"]) - self.assertEqual(len(step_jobs_summary), 1) - self.assertEqual(step_jobs_summary[0]["populated_state"], "ok") + assert len(step_jobs_summary) == 1 + assert step_jobs_summary[0]["populated_state"] == "ok" @test_util.skip_unless_galaxy("release_19.09") @test_util.skip_unless_tool("cat1") @@ -116,7 +120,7 @@ ) invocation_id = invocation["id"] - def invocation_steps_by_order_index(): + def invocation_steps_by_order_index() -> Dict[int, Dict[str, Any]]: invocation = self.gi.invocations.show_invocation(invocation_id) return {s["order_index"]: s for s in invocation["steps"]} @@ -126,9 +130,9 @@ time.sleep(0.5) steps = invocation_steps_by_order_index() pause_step = steps[2] - self.assertIsNone(self.gi.invocations.show_invocation_step(invocation_id, pause_step["id"])["action"]) + assert self.gi.invocations.show_invocation_step(invocation_id, pause_step["id"])["action"] is None self.gi.invocations.run_invocation_step_action(invocation_id, pause_step["id"], action=True) - self.assertTrue(self.gi.invocations.show_invocation_step(invocation_id, pause_step["id"])["action"]) + assert self.gi.invocations.show_invocation_step(invocation_id, pause_step["id"])["action"] self.gi.invocations.wait_for_invocation(invocation["id"]) @test_util.skip_unless_galaxy("release_21.01") @@ -138,9 +142,9 @@ rerun_invocation = self.gi.invocations.rerun_invocation(invocation["id"], import_inputs_to_history=True) self.gi.invocations.wait_for_invocation(rerun_invocation["id"]) history = self.gi.histories.show_history(rerun_invocation["history_id"], contents=True) - self.assertEqual(len(history), 3) + assert len(history) == 3 - def _invoke_workflow(self): + def _invoke_workflow(self) -> Dict[str, Any]: dataset = {"src": "hda", "id": self.dataset_id} return self.gi.workflows.invoke_workflow( diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyJobs.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyJobs.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyJobs.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyJobs.py 2022-10-13 18:17:37.000000000 +0000 @@ -5,6 +5,8 @@ ) from operator import itemgetter +from typing_extensions import Literal + from bioblend.galaxy.tools.inputs import ( dataset, inputs, @@ -31,7 +33,7 @@ tool_output = self.gi.tools.run_tool(history_id=self.history_id, tool_id="cat1", tool_inputs=tool_inputs) job_id = tool_output["jobs"][0]["id"] job = self.gi.jobs.wait_for_job(job_id) - self.assertEqual(job["state"], "ok") + assert job["state"] == "ok" @test_util.skip_unless_tool("random_lines1") def test_get_jobs(self): @@ -39,17 +41,17 @@ self._run_tool() jobs = self.gi.jobs.get_jobs(tool_id="random_lines1", history_id=self.history_id) - self.assertEqual(len(jobs), 2) + assert len(jobs) == 2 jobs = self.gi.jobs.get_jobs(history_id=self.history_id, state="failed") - self.assertEqual(len(jobs), 0) + assert len(jobs) == 0 yesterday = datetime.today() - timedelta(days=1) jobs = self.gi.jobs.get_jobs(date_range_max=yesterday.strftime("%Y-%m-%d"), history_id=self.history_id) - self.assertEqual(len(jobs), 0) + assert len(jobs) == 0 tomorrow = datetime.today() + timedelta(days=1) jobs = self.gi.jobs.get_jobs(date_range_min=tomorrow.strftime("%Y-%m-%d")) - self.assertEqual(len(jobs), 0) + assert len(jobs) == 0 jobs = self.gi.jobs.get_jobs(date_range_min=datetime.today().strftime("%Y-%m-%d"), history_id=self.history_id) - self.assertEqual(len(jobs), 3) + assert len(jobs) == 3 @test_util.skip_unless_galaxy("release_21.05") def test_get_jobs_with_filtering(self): @@ -72,17 +74,17 @@ self.gi.invocations.wait_for_invocation(invocation2["id"]) all_jobs = self.gi.jobs.get_jobs(history_id=self.history_id, order_by="create_time") - self.assertEqual(len(all_jobs), 3) + assert len(all_jobs) == 3 job1_id = all_jobs[1]["id"] jobs = self.gi.jobs.get_jobs(history_id=self.history_id, limit=1, offset=1, order_by="create_time") - self.assertEqual(len(jobs), 1) - self.assertEqual(jobs[0]["id"], job1_id) + assert len(jobs) == 1 + assert jobs[0]["id"] == job1_id jobs = self.gi.jobs.get_jobs(invocation_id=invocation1["id"]) - self.assertEqual(len(jobs), 1) + assert len(jobs) == 1 job_id_inv = jobs[0]["id"] jobs = self.gi.jobs.get_jobs(workflow_id=workflow_id) - self.assertEqual(len(jobs), 2) - self.assertIn(job_id_inv, [job["id"] for job in jobs]) + assert len(jobs) == 2 + assert job_id_inv in [job["id"] for job in jobs] @test_util.skip_unless_galaxy("release_21.01") @test_util.skip_unless_tool("random_lines1") @@ -93,7 +95,7 @@ rerun_output = self.gi.jobs.rerun_job(original_job_id) original_output_content = self.gi.datasets.download_dataset(original_output["outputs"][0]["id"]) rerun_output_content = self.gi.datasets.download_dataset(rerun_output["outputs"][0]["id"]) - self.assertEqual(rerun_output_content, original_output_content) + assert rerun_output_content == original_output_content @test_util.skip_unless_galaxy("release_21.01") @test_util.skip_unless_tool("Show beginning1") @@ -116,16 +118,16 @@ raise Exception("The job should have failed") history_contents = self.gi.histories.show_history(self.history_id, contents=True) - self.assertEqual(len(history_contents), 3) - self.assertEqual(history_contents[1]["state"], "error") - self.assertEqual(history_contents[2]["state"], "paused") + assert len(history_contents) == 3 + assert history_contents[1]["state"] == "error" + assert history_contents[2]["state"] == "paused" # resume the paused step job - resumed_job = self.gi.jobs.resume_job(job_steps[-1]["job_id"]) - self.assertEqual(resumed_job[0]["name"], "out_file1") + resumed_outputs = self.gi.jobs.resume_job(job_steps[-1]["job_id"]) + assert resumed_outputs[0]["name"] == "out_file1" # the following does not pass stably - the job goes back to paused too quickly # history_contents_resumed = self.gi.histories.show_history(self.history_id, contents=True) - # self.assertNotEqual(history_contents_resumed[2]['state'], 'paused') + # assert history_contents_resumed[2]["state"] != "paused" # now rerun and remap with correct input param failed_job_id = self.gi.datasets.show_dataset(history_contents[1]["id"])["creating_job"] @@ -139,8 +141,8 @@ self.gi.jobs.wait_for_job(new_job_id) self.gi.jobs.resume_job(last_job_id) # last_job can get stuck on paused - resume it in case self.gi.jobs.wait_for_job(last_job_id) - self.assertEqual(last_dataset["hid"], 3) - self.assertEqual(last_dataset["id"], history_contents[2]["id"]) + assert last_dataset["hid"] == 3 + assert last_dataset["id"] == history_contents[2]["id"] self._wait_and_verify_dataset(last_dataset["id"], b"line 1\tline 1\n") @test_util.skip_unless_galaxy("release_19.05") @@ -148,29 +150,33 @@ def test_get_common_problems(self): job_id = self._run_tool()["jobs"][0]["id"] response = self.gi.jobs.get_common_problems(job_id) - self.assertEqual(response, {"has_duplicate_inputs": False, "has_empty_inputs": True}) + assert response == {"has_duplicate_inputs": False, "has_empty_inputs": True} @test_util.skip_unless_tool("random_lines1") def test_get_inputs(self): job_id = self._run_tool()["jobs"][0]["id"] response = self.gi.jobs.get_inputs(job_id) - self.assertEqual(response, [{"name": "input", "dataset": {"src": "hda", "id": self.dataset_id}}]) + assert response == [{"name": "input", "dataset": {"src": "hda", "id": self.dataset_id}}] @test_util.skip_unless_tool("random_lines1") def test_get_outputs(self): output = self._run_tool() job_id, output_id = output["jobs"][0]["id"], output["outputs"][0]["id"] response = self.gi.jobs.get_outputs(job_id) - self.assertEqual(response, [{"name": "out_file1", "dataset": {"src": "hda", "id": output_id}}]) + assert response == [{"name": "out_file1", "dataset": {"src": "hda", "id": output_id}}] @test_util.skip_unless_galaxy("release_20.05") @test_util.skip_unless_tool("random_lines1") def test_get_destination_params(self): job_id = self._run_tool()["jobs"][0]["id"] + # In Galaxy 20.05 and 20.09 we need to wait for the job, otherwise + # `get_destination_params()` receives a 500 error code. Fixed upstream + # in https://github.com/galaxyproject/galaxy/commit/3e7f03cd1f229b8c9421ade02002728a33e131d8 + self.gi.jobs.wait_for_job(job_id) response = self.gi.jobs.get_destination_params(job_id) - self.assertIn("Runner", response) - self.assertIn("Runner Job ID", response) - self.assertIn("Handler", response) + assert "Runner" in response + assert "Runner Job ID" in response + assert "Handler" in response @test_util.skip_unless_galaxy("release_18.01") @test_util.skip_unless_tool("random_lines1") @@ -183,7 +189,7 @@ "seed_source|seed": "asdf", } response = self.gi.jobs.search_jobs("random_lines1", inputs) - self.assertIn(job_id, [job["id"] for job in response]) + assert job_id in [job["id"] for job in response] @test_util.skip_unless_galaxy("release_20.01") @test_util.skip_unless_tool("random_lines1") @@ -192,47 +198,33 @@ job_id, output_id = output["jobs"][0]["id"], output["outputs"][0]["id"] response = self.gi.jobs.report_error(job_id, output_id, "Test error") # expected response when the Galaxy server does not have mail configured - self.assertEqual( - response, - { - "messages": [ - [ - "An error occurred sending the report by email: Mail is not configured for this Galaxy instance", - "danger", - ] + assert response == { + "messages": [ + [ + "An error occurred sending the report by email: Mail is not configured for this Galaxy instance", + "danger", ] - }, - ) + ] + } @test_util.skip_unless_galaxy("release_20.05") def test_show_job_lock(self): status = self.gi.jobs.show_job_lock() - self.assertFalse(status) + assert not status @test_util.skip_unless_galaxy("release_20.05") def test_update_job_lock(self): status = self.gi.jobs.update_job_lock(active=True) - self.assertTrue(status) + assert status status = self.gi.jobs.update_job_lock(active=False) - self.assertFalse(status) + assert not status @test_util.skip_unless_galaxy("release_18.01") def test_cancel_job(self): job_id = self._run_tool()["jobs"][0]["id"] - job_state = self.gi.jobs.show_job(job_id)["state"] - self.assertTrue(job_state, "deleted") + self.gi.jobs.cancel_job(job_id) + job = self.gi.jobs.wait_for_job(job_id, check=False) + assert job["state"] in ("deleted", "deleting") - def _run_tool(self, input_format: str = "legacy") -> dict: - tool_inputs = { - "num_lines": "1", - "input": {"src": "hda", "id": self.dataset_id}, - } - if input_format == "21.01": - tool_inputs.update({"seed_source": {"seed_source_selector": "set_seed", "seed": "asdf"}}) - else: - # legacy format - tool_inputs.update({"seed_source|seed_source_selector": "set_seed", "seed_source|seed": "asdf"}) - - return self.gi.tools.run_tool( - history_id=self.history_id, tool_id="random_lines1", tool_inputs=tool_inputs, input_format=input_format - ) + def _run_tool(self, input_format: Literal["21.01", "legacy"] = "legacy") -> dict: + return super()._run_random_lines1(self.history_id, self.dataset_id, input_format=input_format) diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyLibraries.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyLibraries.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyLibraries.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyLibraries.py 2022-10-13 18:17:37.000000000 +0000 @@ -22,12 +22,12 @@ self.gi.libraries.delete_library(self.library["id"]) def test_create_library(self): - self.assertEqual(self.library["name"], self.name) - self.assertIsNotNone(self.library["id"]) + assert self.library["name"] == self.name + assert self.library["id"] is not None def test_get_libraries(self): libraries_with_name = self.gi.libraries.get_libraries(name=self.name) - self.assertEqual(len([l for l in libraries_with_name if l["id"] == self.library["id"]]), 1) + assert len([l for l in libraries_with_name if l["id"] == self.library["id"]]) == 1 deleted_name = "deleted test library" deleted_library_id = self.gi.libraries.create_library( @@ -35,24 +35,24 @@ )["id"] self.gi.libraries.delete_library(deleted_library_id) deleted_libraries_with_name = self.gi.libraries.get_libraries(name=deleted_name, deleted=True) - self.assertEqual(len([l for l in deleted_libraries_with_name if l["id"] == deleted_library_id]), 1) + assert len([l for l in deleted_libraries_with_name if l["id"] == deleted_library_id]) == 1 all_non_deleted_libraries = self.gi.libraries.get_libraries(deleted=False) - self.assertEqual(len([l for l in all_non_deleted_libraries if l["id"] == self.library["id"]]), 1) - self.assertEqual([l for l in all_non_deleted_libraries if l["id"] == deleted_library_id], []) + assert len([l for l in all_non_deleted_libraries if l["id"] == self.library["id"]]) == 1 + assert [l for l in all_non_deleted_libraries if l["id"] == deleted_library_id] == [] all_deleted_libraries = self.gi.libraries.get_libraries(deleted=True) - self.assertEqual([l for l in all_deleted_libraries if l["id"] == self.library["id"]], []) - self.assertEqual(len([l for l in all_deleted_libraries if l["id"] == deleted_library_id]), 1) + assert [l for l in all_deleted_libraries if l["id"] == self.library["id"]] == [] + assert len([l for l in all_deleted_libraries if l["id"] == deleted_library_id]) == 1 all_libraries = self.gi.libraries.get_libraries(deleted=None) - self.assertEqual(len([l for l in all_libraries if l["id"] == self.library["id"]]), 1) - self.assertEqual(len([l for l in all_libraries if l["id"] == deleted_library_id]), 1) + assert len([l for l in all_libraries if l["id"] == self.library["id"]]) == 1 + assert len([l for l in all_libraries if l["id"] == deleted_library_id]) == 1 def test_show_library(self): library_data = self.gi.libraries.show_library(self.library["id"]) - self.assertEqual(self.library["id"], library_data["id"]) - self.assertEqual(self.library["name"], library_data["name"]) + assert self.library["id"] == library_data["id"] + assert self.library["name"] == library_data["name"] def test_upload_file_from_url(self): self.gi.libraries.upload_file_from_url( @@ -83,13 +83,13 @@ ret = self.gi.libraries.upload_from_galaxy_filesystem(self.library["id"], filesystem_paths) for dataset_dict in ret: dataset = self.gi.libraries.wait_for_dataset(self.library["id"], dataset_dict["id"]) - self.assertEqual(dataset["state"], "ok") + assert dataset["state"] == "ok" ret = self.gi.libraries.upload_from_galaxy_filesystem( self.library["id"], filesystem_paths, link_data_only="link_to_files" ) for dataset_dict in ret: dataset = self.gi.libraries.wait_for_dataset(self.library["id"], dataset_dict["id"]) - self.assertEqual(dataset["state"], "ok") + assert dataset["state"] == "ok" finally: shutil.rmtree(tempdir) @@ -104,8 +104,8 @@ updated_dataset = self.gi.libraries.update_library_dataset( dataset1[0]["id"], name="Modified name", misc_info="Modified the name succesfully" ) - self.assertEqual(updated_dataset["name"], "Modified name") - self.assertEqual(updated_dataset["misc_info"], "Modified the name succesfully") + assert updated_dataset["name"] == "Modified name" + assert updated_dataset["misc_info"] == "Modified the name succesfully" def test_library_permissions(self): current_user = self.gi.users.get_current_user() @@ -118,10 +118,10 @@ manage_in=user_id_list_new, ) ret = self.gi.libraries.get_library_permissions(self.library["id"]) - self.assertEqual({_[1] for _ in ret["access_library_role_list"]}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret["modify_library_role_list"]}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret["add_library_item_role_list"]}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret["manage_library_role_list"]}, set(user_id_list_new)) + assert {_[1] for _ in ret["access_library_role_list"]} == set(user_id_list_new) + assert {_[1] for _ in ret["modify_library_role_list"]} == set(user_id_list_new) + assert {_[1] for _ in ret["add_library_item_role_list"]} == set(user_id_list_new) + assert {_[1] for _ in ret["manage_library_role_list"]} == set(user_id_list_new) def test_dataset_permissions(self): current_user = self.gi.users.get_current_user() @@ -131,28 +131,28 @@ ret = self.gi.libraries.set_dataset_permissions( dataset1[0]["id"], access_in=user_id_list_new, modify_in=user_id_list_new, manage_in=user_id_list_new ) - self.assertEqual({_[1] for _ in ret["access_dataset_roles"]}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret["modify_item_roles"]}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret["manage_dataset_roles"]}, set(user_id_list_new)) + assert {_[1] for _ in ret["access_dataset_roles"]} == set(user_id_list_new) + assert {_[1] for _ in ret["modify_item_roles"]} == set(user_id_list_new) + assert {_[1] for _ in ret["manage_dataset_roles"]} == set(user_id_list_new) # test get_dataset_permissions ret_get = self.gi.libraries.get_dataset_permissions(dataset1[0]["id"]) - self.assertEqual({_[1] for _ in ret_get["access_dataset_roles"]}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret_get["modify_item_roles"]}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret_get["manage_dataset_roles"]}, set(user_id_list_new)) + assert {_[1] for _ in ret_get["access_dataset_roles"]} == set(user_id_list_new) + assert {_[1] for _ in ret_get["modify_item_roles"]} == set(user_id_list_new) + assert {_[1] for _ in ret_get["manage_dataset_roles"]} == set(user_id_list_new) @test_util.skip_unless_galaxy("release_19.09") def test_upload_file_contents_with_tags(self): datasets = self.gi.libraries.upload_file_contents(self.library["id"], FOO_DATA, tags=["name:foobar", "barfoo"]) dataset_show = self.gi.libraries.show_dataset(self.library["id"], datasets[0]["id"]) - self.assertEqual(dataset_show["tags"], "name:foobar, barfoo") + assert dataset_show["tags"] == "name:foobar, barfoo" @test_util.skip_unless_galaxy("release_19.09") def test_update_dataset_tags(self): datasets = self.gi.libraries.upload_file_contents(self.library["id"], FOO_DATA) dataset_show = self.gi.libraries.show_dataset(self.library["id"], datasets[0]["id"]) - self.assertEqual(dataset_show["tags"], "") + assert dataset_show["tags"] == "" updated_dataset = self.gi.libraries.update_library_dataset(datasets[0]["id"], tags=["name:foobar", "barfoo"]) dataset_show = self.gi.libraries.show_dataset(self.library["id"], updated_dataset["id"]) - self.assertEqual(dataset_show["tags"], "name:foobar, barfoo") + assert dataset_show["tags"] == "name:foobar, barfoo" diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyObjects.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyObjects.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyObjects.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyObjects.py 2022-10-13 18:17:37.000000000 +0000 @@ -9,9 +9,23 @@ import unittest import uuid from ssl import SSLError +from typing import ( + Any, + Callable, + Collection, + Dict, + Iterable, + List, + Set, + Tuple, + Union, +) from urllib.error import URLError from urllib.request import urlopen +import pytest +from typing_extensions import Literal + import bioblend from bioblend.galaxy import dataset_collections from bioblend.galaxy.objects import ( @@ -74,7 +88,7 @@ "tags": [], "url": "/api/workflows/9005c5112febe774", } -SAMPLE_INV_DICT = { +SAMPLE_INV_DICT: Dict[str, Any] = { "history_id": "2f94e8ae9edff68a", "id": "df7a1f0c02a5b08e", "inputs": {"0": {"id": "a7db2fac67043c7e", "src": "hda", "uuid": "7932ffe0-2340-4952-8857-dbaa50f1f46a"}}, @@ -112,7 +126,7 @@ } -def is_reachable(url): +def is_reachable(url: str) -> bool: res = None try: res = urlopen(url, timeout=5) @@ -123,7 +137,9 @@ return True -def upload_from_fs(lib, bnames, **kwargs): +def upload_from_fs( + lib: wrappers.Library, bnames: Iterable[str], **kwargs: Any +) -> Tuple[List[wrappers.LibraryDataset], List[str]]: tempdir = tempfile.mkdtemp(prefix="bioblend_test_") try: fnames = [os.path.join(tempdir, _) for _ in bnames] @@ -138,53 +154,55 @@ class MockWrapper(wrappers.Wrapper): BASE_ATTRS = ("a", "b") + a: int + b: List[int] - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) class TestWrapper(unittest.TestCase): def setUp(self): - self.d = {"a": 1, "b": [2, 3], "c": {"x": 4}} - with self.assertRaises(TypeError): + self.d: Dict[str, Any] = {"a": 1, "b": [2, 3], "c": {"x": 4}} + with pytest.raises(TypeError): wrappers.Wrapper(self.d) self.w = MockWrapper(self.d) def test_initialize(self): for k in MockWrapper.BASE_ATTRS: - self.assertEqual(getattr(self.w, k), self.d[k]) + assert getattr(self.w, k) == self.d[k] self.w.a = 222 self.w.b[0] = 222 - self.assertEqual(self.w.a, 222) - self.assertEqual(self.w.b[0], 222) - self.assertEqual(self.d["a"], 1) - self.assertEqual(self.d["b"][0], 2) - with self.assertRaises(AttributeError): - self.w.foo - with self.assertRaises(AttributeError): - self.w.foo = 0 + assert self.w.a == 222 + assert self.w.b[0] == 222 + assert self.d["a"] == 1 + assert self.d["b"][0] == 2 + with pytest.raises(AttributeError): + self.w.foo # type: ignore[attr-defined] + with pytest.raises(AttributeError): + self.w.foo = 0 # type: ignore[assignment] def test_taint(self): - self.assertFalse(self.w.is_modified) + assert not self.w.is_modified self.w.a = 111 # pylint: disable=W0201 - self.assertTrue(self.w.is_modified) + assert self.w.is_modified def test_serialize(self): w = MockWrapper.from_json(self.w.to_json()) - self.assertEqual(w.wrapped, self.w.wrapped) + assert w.wrapped == self.w.wrapped def test_clone(self): w = self.w.clone() - self.assertEqual(w.wrapped, self.w.wrapped) + assert w.wrapped == self.w.wrapped w.b[0] = 111 - self.assertEqual(self.w.b[0], 2) + assert self.w.b[0] == 2 def test_kwargs(self): parent = MockWrapper({"a": 10}) w = MockWrapper(self.d, parent=parent) - self.assertIs(w.parent, parent) - with self.assertRaises(AttributeError): - w.parent = 0 + assert w.parent is parent + with pytest.raises(AttributeError): + w.parent = 0 # type: ignore[assignment,misc] @test_util.skip_unless_galaxy() @@ -192,7 +210,7 @@ gi: galaxy_instance.GalaxyInstance @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: galaxy_key = os.environ["BIOBLEND_GALAXY_API_KEY"] galaxy_url = os.environ["BIOBLEND_GALAXY_URL"] cls.gi = galaxy_instance.GalaxyInstance(galaxy_url, galaxy_key) @@ -203,48 +221,49 @@ self.wf = wrappers.Workflow(SAMPLE_WF_DICT) def test_initialize(self): - self.assertEqual(self.wf.id, "9005c5112febe774") - self.assertEqual(self.wf.name, "paste_columns") - self.assertEqual(self.wf.deleted, False) - self.assertEqual(self.wf.owner, "user_foo") - self.assertEqual(self.wf.published, False) - self.assertEqual(self.wf.tags, []) - self.assertEqual(self.wf.input_labels_to_ids, {"Input Dataset": {"571", "572"}}) - self.assertEqual(self.wf.tool_labels_to_ids, {"Paste1": {"573"}}) - self.assertEqual(self.wf.data_input_ids, {"571", "572"}) - self.assertEqual(self.wf.source_ids, {"571", "572"}) - self.assertEqual(self.wf.sink_ids, {"573"}) + assert self.wf.id == "9005c5112febe774" + assert self.wf.name == "paste_columns" + assert not self.wf.deleted + assert self.wf.owner == "user_foo" + assert not self.wf.published + assert self.wf.tags == [] + assert self.wf.input_labels_to_ids == {"Input Dataset": {"571", "572"}} + assert self.wf.tool_labels_to_ids == {"Paste1": {"573"}} + assert self.wf.data_input_ids == {"571", "572"} + assert self.wf.source_ids == {"571", "572"} + assert self.wf.sink_ids == {"573"} def test_dag(self): - inv_dag = {} + inv_dag: Dict[str, Set[str]] = {} for h, tails in self.wf.dag.items(): for t in tails: inv_dag.setdefault(str(t), set()).add(h) - self.assertEqual(self.wf.inv_dag, inv_dag) + assert self.wf.inv_dag == inv_dag heads = set(self.wf.dag) - self.assertEqual(heads, set.union(*self.wf.inv_dag.values())) + assert heads == set.union(*self.wf.inv_dag.values()) tails = set(self.wf.inv_dag) - self.assertEqual(tails, set.union(*self.wf.dag.values())) + assert tails == set.union(*self.wf.dag.values()) ids = self.wf.sorted_step_ids() - self.assertEqual(set(ids), heads | tails) + assert set(ids) == heads | tails for h, tails in self.wf.dag.items(): for t in tails: - self.assertLess(ids.index(h), ids.index(t)) + assert ids.index(h) < ids.index(t) def test_steps(self): steps = SAMPLE_WF_DICT["steps"] + assert isinstance(steps, dict) for sid, s in self.wf.steps.items(): - self.assertIsInstance(s, wrappers.Step) - self.assertEqual(s.id, sid) - self.assertIn(sid, steps) - self.assertIs(s.parent, self.wf) - self.assertEqual(self.wf.data_input_ids, {"571", "572"}) - self.assertEqual(self.wf.tool_ids, {"573"}) + assert isinstance(s, wrappers.Step) + assert s.id == sid + assert sid in steps + assert s.parent is self.wf + assert self.wf.data_input_ids == {"571", "572"} + assert self.wf.tool_ids == {"573"} def test_taint(self): - self.assertFalse(self.wf.is_modified) + assert not self.wf.is_modified self.wf.steps["571"].tool_id = "foo" - self.assertTrue(self.wf.is_modified) + assert self.wf.is_modified def test_input_map(self): history = wrappers.History({}, gi=self.gi) @@ -252,20 +271,23 @@ hda = wrappers.HistoryDatasetAssociation({"id": "hda_id"}, container=history, gi=self.gi) ldda = wrappers.LibraryDatasetDatasetAssociation({"id": "ldda_id"}, container=library, gi=self.gi) input_map = self.wf._convert_input_map({"0": hda, "1": ldda, "2": {"id": "hda2_id", "src": "hda"}}) - self.assertEqual( - input_map, - { - "0": {"id": "hda_id", "src": "hda"}, - "1": {"id": "ldda_id", "src": "ldda"}, - "2": {"id": "hda2_id", "src": "hda"}, - }, - ) + assert input_map == { + "0": {"id": "hda_id", "src": "hda"}, + "1": {"id": "ldda_id", "src": "ldda"}, + "2": {"id": "hda2_id", "src": "hda"}, + } @test_util.skip_unless_galaxy("release_19.09") class TestInvocation(GalaxyObjectsTestBase): + dataset: wrappers.HistoryDatasetAssociation + history: wrappers.History + inv: wrappers.Invocation + workflow: wrappers.Workflow + workflow_pause: wrappers.Workflow + @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: super().setUpClass() cls.inv = wrappers.Invocation(SAMPLE_INV_DICT, gi=cls.gi) with open(SAMPLE_FN) as f: @@ -281,63 +303,64 @@ cls.history.delete(purge=True) def test_initialize(self): - self.assertEqual(self.inv.workflow_id, "03501d7626bd192f") - self.assertEqual(self.inv.history_id, "2f94e8ae9edff68a") - self.assertEqual(self.inv.id, "df7a1f0c02a5b08e") - self.assertEqual(self.inv.state, "ready") - self.assertEqual(self.inv.update_time, "2015-10-31T22:00:26") - self.assertEqual(self.inv.uuid, "c8aa2b1c-801a-11e5-a9e5-8ca98228593c") + assert self.inv.workflow_id == "03501d7626bd192f" + assert self.inv.history_id == "2f94e8ae9edff68a" + assert self.inv.id == "df7a1f0c02a5b08e" + assert self.inv.state == "ready" + assert self.inv.update_time == "2015-10-31T22:00:26" + assert self.inv.uuid == "c8aa2b1c-801a-11e5-a9e5-8ca98228593c" def test_initialize_steps(self): for step, step_dict in zip(self.inv.steps, SAMPLE_INV_DICT["steps"]): - self.assertIsInstance(step, wrappers.InvocationStep) - self.assertIs(step.parent, self.inv) - self.assertEqual(step.id, step_dict["id"]) - self.assertEqual(step.job_id, step_dict["job_id"]) - self.assertEqual(step.order_index, step_dict["order_index"]) - self.assertEqual(step.state, step_dict["state"]) - self.assertEqual(step.update_time, step_dict["update_time"]) - self.assertEqual(step.workflow_step_id, step_dict["workflow_step_id"]) - self.assertEqual(step.workflow_step_label, step_dict["workflow_step_label"]) - self.assertEqual(step.workflow_step_uuid, step_dict["workflow_step_uuid"]) + assert isinstance(step_dict, dict) + assert isinstance(step, wrappers.InvocationStep) + assert step.parent is self.inv + assert step.id == step_dict["id"] + assert step.job_id == step_dict["job_id"] + assert step.order_index == step_dict["order_index"] + assert step.state == step_dict["state"] + assert step.update_time == step_dict["update_time"] + assert step.workflow_step_id == step_dict["workflow_step_id"] + assert step.workflow_step_label == step_dict["workflow_step_label"] + assert step.workflow_step_uuid == step_dict["workflow_step_uuid"] def test_initialize_inputs(self): for i, input in enumerate(self.inv.inputs): - self.assertEqual(input, {**SAMPLE_INV_DICT["inputs"][str(i)], "label": str(i)}) + assert input == {**SAMPLE_INV_DICT["inputs"][str(i)], "label": str(i)} def test_sorted_step_ids(self): - self.assertListEqual(self.inv.sorted_step_ids(), ["d413a19dec13d11e", "2f94e8ae9edff68a"]) + assert self.inv.sorted_step_ids() == ["d413a19dec13d11e", "2f94e8ae9edff68a"] def test_step_states(self): - self.assertSetEqual(self.inv.step_states(), {None, "new"}) + assert self.inv.step_states() == {None, "new"} def test_number_of_steps(self): - self.assertEqual(self.inv.number_of_steps(), 2) + assert self.inv.number_of_steps() == 2 def test_sorted_steps_by(self): - self.assertEqual(len(self.inv.sorted_steps_by()), 2) + assert len(self.inv.sorted_steps_by()) == 2 steps = self.inv.sorted_steps_by(step_ids={"2f94e8ae9edff68a"}) - self.assertEqual(len(steps), 1) - self.assertEqual(steps[0].id, "2f94e8ae9edff68a") - self.assertListEqual(self.inv.sorted_steps_by(step_ids={"unmatched_id"}), []) + assert len(steps) == 1 + assert steps[0].id == "2f94e8ae9edff68a" + assert self.inv.sorted_steps_by(step_ids={"unmatched_id"}) == [] steps = self.inv.sorted_steps_by(states={"new"}) - self.assertEqual(len(steps), 1) - self.assertEqual(steps[0].state, "new") - self.assertListEqual(self.inv.sorted_steps_by(states={"unmatched_state"}), []) + assert len(steps) == 1 + assert steps[0].state == "new" + assert self.inv.sorted_steps_by(states={"unmatched_state"}) == [] steps = self.inv.sorted_steps_by(indices={0}, states={None, "new"}) - self.assertEqual(len(steps), 1) - self.assertEqual(steps[0].order_index, 0) - self.assertListEqual(self.inv.sorted_steps_by(indices={2}), []) + assert len(steps) == 1 + assert steps[0].order_index == 0 + assert self.inv.sorted_steps_by(indices={2}) == [] def test_cancel(self): inv = self._obj_invoke_workflow() inv.cancel() - self.assertEqual(inv.state, "cancelled") + assert inv.state == "cancelled" def test_wait(self): inv = self._obj_invoke_workflow() inv.wait() - self.assertEqual(inv.state, "scheduled") + assert inv.state == "scheduled" def test_refresh(self): inv = self._obj_invoke_workflow() @@ -345,7 +368,7 @@ # use wait_for_invocation() directly, because inv.wait() will update inv automatically self.gi.gi.invocations.wait_for_invocation(inv.id) inv.refresh() - self.assertEqual(inv.state, "scheduled") + assert inv.state == "scheduled" def test_run_step_actions(self): inv = self.workflow_pause.invoke( @@ -353,27 +376,27 @@ history=self.history, ) for _ in range(20): - with self.assertRaises(bioblend.TimeoutException): + with pytest.raises(bioblend.TimeoutException): inv.wait(maxwait=0.5, interval=0.5) inv.refresh() if len(inv.steps) >= 3: break - self.assertEqual(inv.steps[2].action, None) + assert inv.steps[2].action is None inv.run_step_actions([inv.steps[2]], [True]) - self.assertEqual(inv.steps[2].action, True) + assert inv.steps[2].action is True def test_summary(self): inv = self._obj_invoke_workflow() inv.wait() summary = inv.summary() - self.assertEqual(summary["populated_state"], "ok") + assert summary["populated_state"] == "ok" def test_step_jobs_summary(self): inv = self._obj_invoke_workflow() inv.wait() step_jobs_summary = inv.step_jobs_summary() - self.assertEqual(len(step_jobs_summary), 1) - self.assertEqual(step_jobs_summary[0]["populated_state"], "ok") + assert len(step_jobs_summary) == 1 + assert step_jobs_summary[0]["populated_state"] == "ok" def test_report(self): inv = self._obj_invoke_workflow() @@ -385,9 +408,9 @@ inv = self._obj_invoke_workflow() inv.wait() biocompute_object = inv.biocompute_object() - self.assertEqual(len(biocompute_object["description_domain"]["pipeline_steps"]), 1) + assert len(biocompute_object["description_domain"]["pipeline_steps"]) == 1 - def _obj_invoke_workflow(self): + def _obj_invoke_workflow(self) -> wrappers.Invocation: return self.workflow.invoke( inputs={"Input 1": self.dataset, "Input 2": self.dataset}, history=self.history, @@ -397,8 +420,12 @@ @test_util.skip_unless_galaxy("release_19.09") class TestObjInvocationClient(GalaxyObjectsTestBase): + history: wrappers.History + inv: wrappers.Invocation + workflow: wrappers.Workflow + @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: super().setUpClass() with open(SAMPLE_FN) as f: cls.workflow = cls.gi.workflows.import_new(f.read()) @@ -417,36 +444,36 @@ def test_get(self): inv = self.gi.invocations.get(self.inv.id) - self.assertEqual(inv.id, self.inv.id) - self.assertEqual(inv.workflow_id, self.workflow.id) - self.assertEqual(inv.history_id, self.history.id) - self.assertEqual(inv.state, "scheduled") - self.assertEqual(inv.update_time, self.inv.update_time) - self.assertEqual(inv.uuid, self.inv.uuid) + assert inv.id == self.inv.id + assert inv.workflow_id == self.workflow.id + assert inv.history_id == self.history.id + assert inv.state == "scheduled" + assert inv.update_time == self.inv.update_time + assert inv.uuid == self.inv.uuid def test_get_previews(self): previews = self.gi.invocations.get_previews() - self.assertSetEqual({type(preview) for preview in previews}, {wrappers.InvocationPreview}) + assert {type(preview) for preview in previews} == {wrappers.InvocationPreview} inv_preview = next(p for p in previews if p.id == self.inv.id) - self.assertEqual(inv_preview.id, self.inv.id) - self.assertEqual(inv_preview.workflow_id, self.workflow.id) - self.assertEqual(inv_preview.history_id, self.history.id) - self.assertEqual(inv_preview.state, "scheduled") - self.assertEqual(inv_preview.update_time, self.inv.update_time) - self.assertEqual(inv_preview.uuid, self.inv.uuid) + assert inv_preview.id == self.inv.id + assert inv_preview.workflow_id == self.workflow.id + assert inv_preview.history_id == self.history.id + assert inv_preview.state == "scheduled" + assert inv_preview.update_time == self.inv.update_time + assert inv_preview.uuid == self.inv.uuid def test_list(self): invs = self.gi.invocations.list() inv = next(i for i in invs if i.id == self.inv.id) - self.assertEqual(inv.id, self.inv.id) - self.assertEqual(inv.workflow_id, self.workflow.id) - self.assertEqual(inv.history_id, self.history.id) - self.assertEqual(inv.state, "scheduled") - self.assertEqual(inv.update_time, self.inv.update_time) - self.assertEqual(inv.uuid, self.inv.uuid) - self.assertGreater(len(self.inv.steps), 0) + assert inv.id == self.inv.id + assert inv.workflow_id == self.workflow.id + assert inv.history_id == self.history.id + assert inv.state == "scheduled" + assert inv.update_time == self.inv.update_time + assert inv.uuid == self.inv.uuid + assert len(self.inv.steps) > 0 history = self.gi.histories.create(name="TestGalaxyObjInvocationClientList") - self.assertEqual(self.gi.invocations.list(history=history), []) + assert self.gi.invocations.list(history=history) == [] history.delete(purge=True) @@ -455,15 +482,15 @@ name = f"test_{uuid.uuid4().hex}" description, synopsis = "D", "S" lib = self.gi.libraries.create(name, description=description, synopsis=synopsis) - self.assertEqual(lib.name, name) - self.assertEqual(lib.description, description) - self.assertEqual(lib.synopsis, synopsis) - self.assertEqual(len(lib.content_infos), 1) # root folder - self.assertEqual(len(lib.folder_ids), 1) - self.assertEqual(len(lib.dataset_ids), 0) - self.assertIn(lib.id, [_.id for _ in self.gi.libraries.list()]) + assert lib.name == name + assert lib.description == description + assert lib.synopsis == synopsis + assert len(lib.content_infos) == 1 # root folder + assert len(lib.folder_ids) == 1 + assert len(lib.dataset_ids) == 0 + assert lib.id in [_.id for _ in self.gi.libraries.list()] lib.delete() - self.assertFalse(lib.is_mapped) + assert not lib.is_mapped def test_workflow_from_str(self): with open(SAMPLE_FN) as f: @@ -500,8 +527,8 @@ for k in "tool_inputs", "tool_version": wf_dict["steps"][id_][k] = None wf = wrappers.Workflow(wf_dict, gi=self.gi) - self.assertFalse(wf.is_runnable) - with self.assertRaises(RuntimeError): + assert not wf.is_runnable + with pytest.raises(RuntimeError): wf.invoke() wf.delete() @@ -509,31 +536,30 @@ with open(SAMPLE_FN) as f: wf1 = self.gi.workflows.import_new(f.read()) wf2 = self.gi.workflows.import_new(wf1.export()) - self.assertNotEqual(wf1.id, wf2.id) + assert wf1.id != wf2.id for wf in wf1, wf2: self._check_and_del_workflow(wf) - def _check_and_del_workflow(self, wf, check_is_public=False): + def _check_and_del_workflow(self, wf: wrappers.Workflow, check_is_public: bool = False) -> None: # Galaxy appends additional text to imported workflow names - self.assertTrue(wf.name.startswith("paste_columns")) - self.assertEqual(type(wf.owner), str) - self.assertEqual(len(wf.steps), 3) + assert wf.name.startswith("paste_columns") + assert len(wf.steps) == 3 for step_id, step in wf.steps.items(): - self.assertIsInstance(step, wrappers.Step) - self.assertEqual(step_id, step.id) - self.assertIsInstance(step.tool_inputs, dict) + assert isinstance(step, wrappers.Step) + assert step_id == step.id + assert isinstance(step.tool_inputs, dict) if step.type == "tool": - self.assertIsNotNone(step.tool_id) - self.assertIsNotNone(step.tool_version) - self.assertIsInstance(step.input_steps, dict) + assert step.tool_id is not None + assert step.tool_version is not None + assert isinstance(step.input_steps, dict) elif step.type in ("data_collection_input", "data_input"): - self.assertIsNone(step.tool_id) - self.assertIsNone(step.tool_version) - self.assertEqual(step.input_steps, {}) + assert step.tool_id is None + assert step.tool_version is None + assert step.input_steps == {} wf_ids = {_.id for _ in self.gi.workflows.list()} - self.assertIn(wf.id, wf_ids) + assert wf.id in wf_ids if check_is_public: - self.assertTrue(wf.published) + assert wf.published wf.delete() # not very accurate: @@ -545,7 +571,7 @@ if pub_only_ids: wf_id = pub_only_ids.pop() imported = self.gi.workflows.import_shared(wf_id) - self.assertIsInstance(imported, wrappers.Workflow) + assert isinstance(imported, wrappers.Workflow) imported.delete() else: self.skipTest("no published workflows, manually publish a workflow to run this test") @@ -559,9 +585,11 @@ def test_get_workflows(self): self._test_multi_get("workflows") - def _normalized_functions(self, obj_type): + def _normalized_functions( + self, obj_type: Literal["histories", "libraries", "workflows"] + ) -> Tuple[Callable, Dict[str, Any]]: if obj_type == "libraries": - create = self.gi.libraries.create + create: Callable = self.gi.libraries.create del_kwargs = {} elif obj_type == "histories": create = self.gi.histories.create @@ -577,31 +605,31 @@ del_kwargs = {} return create, del_kwargs - def _test_multi_get(self, obj_type): + def _test_multi_get(self, obj_type: Literal["histories", "libraries", "workflows"]) -> None: obj_gi_client = getattr(self.gi, obj_type) create, del_kwargs = self._normalized_functions(obj_type) - def ids(seq): + def ids(seq: Iterable[wrappers.Wrapper]) -> Set[str]: return {_.id for _ in seq} names = [f"test_{uuid.uuid4().hex}" for _ in range(2)] objs = [] try: objs = [create(_) for _ in names] - self.assertLessEqual(ids(objs), ids(obj_gi_client.list())) + assert ids(objs) <= ids(obj_gi_client.list()) if obj_type != "workflows": filtered = obj_gi_client.list(name=names[0]) - self.assertEqual(len(filtered), 1) - self.assertEqual(filtered[0].id, objs[0].id) + assert len(filtered) == 1 + assert filtered[0].id == objs[0].id del_id = objs[-1].id objs.pop().delete(**del_kwargs) - self.assertIn(del_id, ids(obj_gi_client.get_previews(deleted=True))) + assert del_id in ids(obj_gi_client.get_previews(deleted=True)) else: # Galaxy appends info strings to imported workflow names prev = obj_gi_client.get_previews()[0] filtered = obj_gi_client.list(name=prev.name) - self.assertEqual(len(filtered), 1) - self.assertEqual(filtered[0].id, prev.id) + assert len(filtered) == 1 + assert filtered[0].id == prev.id finally: for o in objs: o.delete(**del_kwargs) @@ -618,27 +646,27 @@ self._test_delete_by_name("workflows") self._test_delete_by_ambiguous_name("workflows") - def _test_delete_by_name(self, obj_type): + def _test_delete_by_name(self, obj_type: Literal["histories", "libraries", "workflows"]) -> None: obj_gi_client = getattr(self.gi, obj_type) create, del_kwargs = self._normalized_functions(obj_type) name = f"test_{uuid.uuid4().hex}" create(name) prevs = [_ for _ in obj_gi_client.get_previews(name=name) if not _.deleted] - self.assertEqual(len(prevs), 1) + assert len(prevs) == 1 del_kwargs["name"] = name obj_gi_client.delete(**del_kwargs) prevs = [_ for _ in obj_gi_client.get_previews(name=name) if not _.deleted] - self.assertEqual(len(prevs), 0) + assert len(prevs) == 0 - def _test_delete_by_ambiguous_name(self, obj_type): + def _test_delete_by_ambiguous_name(self, obj_type: Literal["histories", "libraries", "workflows"]) -> None: obj_gi_client = getattr(self.gi, obj_type) create, del_kwargs = self._normalized_functions(obj_type) name = f"test_{uuid.uuid4().hex}" objs = [create(name) for _ in range(2)] prevs = [_ for _ in obj_gi_client.get_previews(name=name) if not _.deleted] - self.assertEqual(len(prevs), len(objs)) + assert len(prevs) == len(objs) del_kwargs["name"] = name - with self.assertRaises(ValueError): + with pytest.raises(ValueError): obj_gi_client.delete(**del_kwargs) # Cleanup del del_kwargs["name"] @@ -660,33 +688,34 @@ def test_root_folder(self): r = self.lib.root_folder - self.assertIsNone(r.parent) + assert r.parent is None def test_folder(self): name = f"test_{uuid.uuid4().hex}" desc = "D" folder = self.lib.create_folder(name, description=desc) - self.assertEqual(folder.name, name) - self.assertEqual(folder.description, desc) - self.assertIs(folder.container, self.lib) - self.assertEqual(folder.parent.id, self.lib.root_folder.id) - self.assertEqual(len(self.lib.content_infos), 2) - self.assertEqual(len(self.lib.folder_ids), 2) - self.assertIn(folder.id, self.lib.folder_ids) + assert folder.name == name + assert folder.description == desc + assert folder.container is self.lib + assert folder.parent is not None + assert folder.parent.id == self.lib.root_folder.id + assert len(self.lib.content_infos) == 2 + assert len(self.lib.folder_ids) == 2 + assert folder.id in self.lib.folder_ids retrieved = self.lib.get_folder(folder.id) - self.assertEqual(folder.id, retrieved.id) + assert folder.id == retrieved.id - def _check_datasets(self, dss): - self.assertEqual(len(dss), len(self.lib.dataset_ids)) - self.assertEqual({_.id for _ in dss}, set(self.lib.dataset_ids)) + def _check_datasets(self, dss: Collection[wrappers.LibraryDataset]) -> None: + assert len(dss) == len(self.lib.dataset_ids) + assert {_.id for _ in dss} == set(self.lib.dataset_ids) for ds in dss: - self.assertIs(ds.container, self.lib) + assert ds.container is self.lib def test_dataset(self): folder = self.lib.create_folder(f"test_{uuid.uuid4().hex}") ds = self.lib.upload_data(FOO_DATA, folder=folder) - self.assertEqual(len(self.lib.content_infos), 3) - self.assertEqual(len(self.lib.folder_ids), 2) + assert len(self.lib.content_infos) == 3 + assert len(self.lib.folder_ids) == 2 self._check_datasets([ds]) def test_dataset_from_url(self): @@ -709,7 +738,7 @@ self._check_datasets(dss) dss, fnames = upload_from_fs(self.lib, bnames, link_data_only="link_to_files") for ds, fn in zip(dss, fnames): - self.assertEqual(ds.file_name, fn) + assert ds.file_name == fn def test_copy_from_dataset(self): hist = self.gi.histories.create(f"test_{uuid.uuid4().hex}") @@ -723,18 +752,18 @@ def test_get_dataset(self): ds = self.lib.upload_data(FOO_DATA) retrieved = self.lib.get_dataset(ds.id) - self.assertEqual(ds.id, retrieved.id) + assert ds.id == retrieved.id def test_get_datasets(self): bnames = [f"f{i}.txt" for i in range(2)] dss, _ = upload_from_fs(self.lib, bnames) retrieved = self.lib.get_datasets() - self.assertEqual(len(dss), len(retrieved)) - self.assertEqual({_.id for _ in dss}, {_.id for _ in retrieved}) + assert len(dss) == len(retrieved) + assert {_.id for _ in dss} == {_.id for _ in retrieved} name = f"/{bnames[0]}" selected = self.lib.get_datasets(name=name) - self.assertEqual(len(selected), 1) - self.assertEqual(selected[0].name, bnames[0]) + assert len(selected) == 1 + assert selected[0].name == bnames[0] class TestLDContents(GalaxyObjectsTestBase): @@ -749,36 +778,36 @@ def test_dataset_get_stream(self): for idx, c in enumerate(self.ds.get_stream(chunk_size=1)): - self.assertEqual(FOO_DATA[idx].encode(), c) + assert FOO_DATA[idx].encode() == c def test_dataset_peek(self): fetched_data = self.ds.peek(chunk_size=4) - self.assertEqual(FOO_DATA[0:4].encode(), fetched_data) + assert FOO_DATA[0:4].encode() == fetched_data def test_dataset_download(self): with tempfile.TemporaryFile() as f: self.ds.download(f) f.seek(0) - self.assertEqual(FOO_DATA.encode(), f.read()) + assert FOO_DATA.encode() == f.read() def test_dataset_get_contents(self): - self.assertEqual(FOO_DATA.encode(), self.ds.get_contents()) + assert FOO_DATA.encode() == self.ds.get_contents() def test_dataset_delete(self): self.ds.delete() # Cannot test this yet because the 'deleted' attribute is not exported # by the API at the moment - # self.assertTrue(self.ds.deleted) + # assert self.ds.deleted def test_dataset_update(self): new_name = f"test_{uuid.uuid4().hex}" new_misc_info = f"Annotation for {new_name}" new_genome_build = "hg19" updated_ldda = self.ds.update(name=new_name, misc_info=new_misc_info, genome_build=new_genome_build) - self.assertEqual(self.ds.id, updated_ldda.id) - self.assertEqual(self.ds.name, new_name) - self.assertEqual(self.ds.misc_info, new_misc_info) - self.assertEqual(self.ds.genome_build, new_genome_build) + assert self.ds.id == updated_ldda.id + assert self.ds.name == new_name + assert self.ds.misc_info == new_misc_info + assert self.ds.genome_build == new_genome_build class TestHistory(GalaxyObjectsTestBase): @@ -792,24 +821,24 @@ def test_create_delete(self): name = f"test_{uuid.uuid4().hex}" hist = self.gi.histories.create(name) - self.assertEqual(hist.name, name) + assert hist.name == name hist_id = hist.id - self.assertIn(hist_id, [_.id for _ in self.gi.histories.list()]) + assert hist_id in [_.id for _ in self.gi.histories.list()] hist.delete(purge=True) - self.assertFalse(hist.is_mapped) + assert not hist.is_mapped h = self.gi.histories.get(hist_id) - self.assertTrue(h.deleted) + assert h.deleted - def _check_dataset(self, hda): - self.assertIsInstance(hda, wrappers.HistoryDatasetAssociation) - self.assertIs(hda.container, self.hist) - self.assertEqual(len(self.hist.dataset_ids), 1) - self.assertEqual(self.hist.dataset_ids[0], hda.id) + def _check_dataset(self, hda: wrappers.HistoryDatasetAssociation) -> None: + assert isinstance(hda, wrappers.HistoryDatasetAssociation) + assert hda.container is self.hist + assert len(self.hist.dataset_ids) == 1 + assert self.hist.dataset_ids[0] == hda.id def test_import_dataset(self): lib = self.gi.libraries.create(f"test_{uuid.uuid4().hex}") lds = lib.upload_data(FOO_DATA) - self.assertEqual(len(self.hist.dataset_ids), 0) + assert len(self.hist.dataset_ids) == 0 hda = self.hist.import_dataset(lds) lib.delete() self._check_dataset(hda) @@ -828,7 +857,7 @@ def test_get_dataset(self): hda = self.hist.paste_content(FOO_DATA) retrieved = self.hist.get_dataset(hda.id) - self.assertEqual(hda.id, retrieved.id) + assert hda.id == retrieved.id def test_get_datasets(self): bnames = [f"f{i}.txt" for i in range(2)] @@ -837,21 +866,21 @@ hdas = [self.hist.import_dataset(_) for _ in lds] lib.delete() retrieved = self.hist.get_datasets() - self.assertEqual(len(hdas), len(retrieved)) - self.assertEqual({_.id for _ in hdas}, {_.id for _ in retrieved}) + assert len(hdas) == len(retrieved) + assert {_.id for _ in hdas} == {_.id for _ in retrieved} selected = self.hist.get_datasets(name=bnames[0]) - self.assertEqual(len(selected), 1) - self.assertEqual(selected[0].name, bnames[0]) + assert len(selected) == 1 + assert selected[0].name == bnames[0] def test_export_and_download(self): jeha_id = self.hist.export(wait=True, maxwait=60) - self.assertTrue(jeha_id) + assert jeha_id tempdir = tempfile.mkdtemp(prefix="bioblend_test_") temp_fn = os.path.join(tempdir, "export.tar.gz") try: with open(temp_fn, "wb") as fo: self.hist.download(jeha_id, fo) - self.assertTrue(tarfile.is_tarfile(temp_fn)) + assert tarfile.is_tarfile(temp_fn) finally: shutil.rmtree(tempdir) @@ -860,31 +889,31 @@ new_annotation = f"Annotation for {new_name}" new_tags = ["tag1", "tag2"] updated_hist = self.hist.update(name=new_name, annotation=new_annotation, tags=new_tags) - self.assertEqual(self.hist.id, updated_hist.id) - self.assertEqual(self.hist.name, new_name) - self.assertEqual(self.hist.annotation, new_annotation) - self.assertEqual(self.hist.tags, new_tags) + assert self.hist.id == updated_hist.id + assert self.hist.name == new_name + assert self.hist.annotation == new_annotation + assert self.hist.tags == new_tags updated_hist = self.hist.update(published=True) - self.assertEqual(self.hist.id, updated_hist.id) - self.assertTrue(self.hist.published) + assert self.hist.id == updated_hist.id + assert self.hist.published def test_create_dataset_collection(self): self._create_collection_description() hdca = self.hist.create_dataset_collection(self.collection_description) - self.assertIsInstance(hdca, wrappers.HistoryDatasetCollectionAssociation) - self.assertEqual(hdca.collection_type, "list") - self.assertIs(hdca.container, self.hist) - self.assertEqual(len(hdca.elements), 2) - self.assertEqual(self.dataset1.id, hdca.elements[0]["object"]["id"]) - self.assertEqual(self.dataset2.id, hdca.elements[1]["object"]["id"]) + assert isinstance(hdca, wrappers.HistoryDatasetCollectionAssociation) + assert hdca.collection_type == "list" + assert hdca.container is self.hist + assert len(hdca.elements) == 2 + assert self.dataset1.id == hdca.elements[0]["object"]["id"] + assert self.dataset2.id == hdca.elements[1]["object"]["id"] def test_delete_dataset_collection(self): self._create_collection_description() hdca = self.hist.create_dataset_collection(self.collection_description) hdca.delete() - self.assertTrue(hdca.deleted) + assert hdca.deleted - def _create_collection_description(self): + def _create_collection_description(self) -> None: self.dataset1 = self.hist.paste_content(FOO_DATA) self.dataset2 = self.hist.paste_content(FOO_DATA_2) self.collection_description = dataset_collections.CollectionDescription( @@ -907,40 +936,40 @@ def test_dataset_get_stream(self): for idx, c in enumerate(self.ds.get_stream(chunk_size=1)): - self.assertEqual(FOO_DATA[idx].encode(), c) + assert FOO_DATA[idx].encode() == c def test_dataset_peek(self): fetched_data = self.ds.peek(chunk_size=4) - self.assertEqual(FOO_DATA[0:4].encode(), fetched_data) + assert FOO_DATA[0:4].encode() == fetched_data def test_dataset_download(self): with tempfile.TemporaryFile() as f: self.ds.download(f) f.seek(0) - self.assertEqual(FOO_DATA.encode(), f.read()) + assert FOO_DATA.encode() == f.read() def test_dataset_get_contents(self): - self.assertEqual(FOO_DATA.encode(), self.ds.get_contents()) + assert FOO_DATA.encode() == self.ds.get_contents() def test_dataset_update(self): new_name = f"test_{uuid.uuid4().hex}" new_annotation = f"Annotation for {new_name}" new_genome_build = "hg19" updated_hda = self.ds.update(name=new_name, annotation=new_annotation, genome_build=new_genome_build) - self.assertEqual(self.ds.id, updated_hda.id) - self.assertEqual(self.ds.name, new_name) - self.assertEqual(self.ds.annotation, new_annotation) - self.assertEqual(self.ds.genome_build, new_genome_build) + assert self.ds.id == updated_hda.id + assert self.ds.name == new_name + assert self.ds.annotation == new_annotation + assert self.ds.genome_build == new_genome_build def test_dataset_delete(self): self.ds.delete() - self.assertTrue(self.ds.deleted) - self.assertFalse(self.ds.purged) + assert self.ds.deleted + assert not self.ds.purged def test_dataset_purge(self): self.ds.delete(purge=True) - self.assertTrue(self.ds.deleted) - self.assertTrue(self.ds.purged) + assert self.ds.deleted + assert self.ds.purged @test_util.skip_unless_galaxy("release_19.09") @@ -957,13 +986,13 @@ self.wf.delete() self.lib.delete() - def _test(self, existing_hist=False, params=False): + def _test(self, existing_hist: bool = False, pass_params: bool = False) -> None: hist_name = f"test_{uuid.uuid4().hex}" if existing_hist: - hist = self.gi.histories.create(hist_name) + hist: Union[str, wrappers.History] = self.gi.histories.create(hist_name) else: hist = hist_name - if params: + if pass_params: params = {"Paste1": {"delimiter": "U"}} sep = "_" # 'U' maps to '_' in the paste tool else: @@ -976,23 +1005,23 @@ inv.wait() last_step = inv.sorted_steps_by()[-1] out_ds = last_step.get_outputs()["out_file1"] - self.assertEqual(out_ds.container.id, out_hist.id) + assert out_ds.container.id == out_hist.id res = out_ds.get_contents() exp_rows = zip(*(_.splitlines() for _ in self.contents)) exp_res = ("\n".join(sep.join(t) for t in exp_rows) + "\n").encode() - self.assertEqual(res, exp_res) - if existing_hist: - self.assertEqual(out_hist.id, hist.id) + assert res == exp_res + if isinstance(hist, wrappers.History): # i.e. existing_hist == True + assert out_hist.id == hist.id out_hist.delete(purge=True) - def test_existing_history(self): + def test_existing_history(self) -> None: self._test(existing_hist=True) - def test_new_history(self): + def test_new_history(self) -> None: self._test(existing_hist=False) - def test_params(self): - self._test(params=True) + def test_params(self) -> None: + self._test(pass_params=True) @test_util.skip_unless_galaxy("release_19.09") @@ -1018,17 +1047,17 @@ ], ) dataset_collection = self.hist.create_dataset_collection(collection_description) - self.assertEqual(len(self.hist.content_infos), 3) + assert len(self.hist.content_infos) == 3 input_map = {"0": dataset_collection, "1": dataset1} inv = self.wf.invoke(input_map, history=self.hist) inv.wait() self.hist.refresh() - self.assertEqual(len(self.hist.content_infos), 6) + assert len(self.hist.content_infos) == 6 last_step = inv.sorted_steps_by()[-1] out_hdca = last_step.get_output_collections()["out_file1"] - self.assertEqual(out_hdca.collection_type, "list") - self.assertEqual(len(out_hdca.elements), 2) - self.assertEqual(out_hdca.container.id, self.hist.id) + assert out_hdca.collection_type == "list" + assert len(out_hdca.elements) == 2 + assert out_hdca.container.id == self.hist.id class TestJob(GalaxyObjectsTestBase): @@ -1036,36 +1065,9 @@ job_prevs = self.gi.jobs.get_previews() if len(job_prevs) > 0: job_prev = job_prevs[0] - self.assertIsInstance(job_prev, wrappers.JobPreview) + assert isinstance(job_prev, wrappers.JobPreview) job = self.gi.jobs.get(job_prev.id) - self.assertIsInstance(job, wrappers.Job) - self.assertEqual(job.id, job_prev.id) + assert isinstance(job, wrappers.Job) + assert job.id == job_prev.id for job in self.gi.jobs.list(): - self.assertIsInstance(job, wrappers.Job) - - -def suite(): - loader = unittest.TestLoader() - s = unittest.TestSuite() - s.addTests( - [ - loader.loadTestsFromTestCase(c) - for c in ( - TestWrapper, - TestWorkflow, - TestGalaxyInstance, - TestLibrary, - TestLDContents, - TestHistory, - TestHDAContents, - TestRunWorkflow, - ) - ] - ) - return s - - -if __name__ == "__main__": - tests = suite() - RUNNER = unittest.TextTestRunner(verbosity=2) - RUNNER.run(tests) + assert isinstance(job, wrappers.Job) diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyQuotas.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyQuotas.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyQuotas.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyQuotas.py 2022-10-13 18:17:37.000000000 +0000 @@ -18,14 +18,14 @@ def test_create_quota(self): quota = self.gi.quotas.show_quota(self.quota["id"]) - self.assertEqual(quota["name"], self.quota_name) - self.assertEqual(quota["bytes"], 107374182400) - self.assertEqual(quota["operation"], "=") - self.assertEqual(quota["description"], "testing") + assert quota["name"] == self.quota_name + assert quota["bytes"] == 107374182400 + assert quota["operation"] == "=" + assert quota["description"] == "testing" def test_get_quotas(self): quotas = self.gi.quotas.get_quotas() - self.assertIn(self.quota["id"], [quota["id"] for quota in quotas]) + assert self.quota["id"] in [quota["id"] for quota in quotas] def test_update_quota(self): response = self.gi.quotas.update_quota( @@ -36,17 +36,17 @@ operation="-", amount=".01 TB", ) - self.assertIn(f"""Quota '{self.quota_name}' has been renamed to '{self.quota_name}-new'""", response) + assert f"""Quota '{self.quota_name}' has been renamed to '{self.quota_name}-new'""" in response quota = self.gi.quotas.show_quota(self.quota["id"]) - self.assertEqual(quota["name"], self.quota_name + "-new") - self.assertEqual(quota["bytes"], 10995116277) - self.assertEqual(quota["operation"], "-") - self.assertEqual(quota["description"], "asdf") + assert quota["name"] == self.quota_name + "-new" + assert quota["bytes"] == 10995116277 + assert quota["operation"] == "-" + assert quota["description"] == "asdf" def test_delete_undelete_quota(self): self.gi.quotas.update_quota(self.quota["id"], default="no") response = self.gi.quotas.delete_quota(self.quota["id"]) - self.assertEqual(response, "Deleted 1 quotas: " + self.quota_name) + assert response == "Deleted 1 quotas: " + self.quota_name response = self.gi.quotas.undelete_quota(self.quota["id"]) - self.assertEqual(response, "Undeleted 1 quotas: " + self.quota_name) + assert response == "Undeleted 1 quotas: " + self.quota_name diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyRoles.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyRoles.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyRoles.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyRoles.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,7 +1,3 @@ -""" -Tests the functionality of the Blend CloudMan API. These tests require working -credentials to supported cloud infrastructure. -""" import uuid from . import GalaxyTestBase @@ -21,10 +17,10 @@ def test_get_roles(self): roles = self.gi.roles.get_roles() for role in roles: - self.assertIsNotNone(role["id"]) - self.assertIsNotNone(role["name"]) + assert role["id"] is not None + assert role["name"] is not None def test_create_role(self): - self.assertEqual(self.role["name"], self.name) - self.assertEqual(self.role["description"], self.description) - self.assertIsNotNone(self.role["id"]) + assert self.role["name"] == self.name + assert self.role["description"] == self.description + assert self.role["id"] is not None diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyToolData.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyToolData.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyToolData.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyToolData.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,7 +1,3 @@ -""" -Tests the functionality of the Blend CloudMan API. These tests require working -credentials to supported cloud infrastructure. -""" from . import GalaxyTestBase @@ -9,11 +5,11 @@ def test_get_data_tables(self): tables = self.gi.tool_data.get_data_tables() for table in tables: - self.assertIsNotNone(table["name"]) + assert table["name"] is not None def test_show_data_table(self): tables = self.gi.tool_data.get_data_tables() table = self.gi.tool_data.show_data_table(tables[0]["name"]) - self.assertIsNotNone(table["columns"]) - self.assertIsNotNone(table["fields"]) - self.assertIsNotNone(table["name"]) + assert table["columns"] is not None + assert table["fields"] is not None + assert table["name"] is not None diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyToolDependencies.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyToolDependencies.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyToolDependencies.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyToolDependencies.py 2022-10-13 18:17:37.000000000 +0000 @@ -11,20 +11,20 @@ @test_util.skip_unless_galaxy("release_20.01") def test_summarize_toolbox(self): toolbox_summary = self.gi.tool_dependencies.summarize_toolbox() - self.assertTrue(isinstance(toolbox_summary, list)) - self.assertGreater(len(toolbox_summary), 0) + assert isinstance(toolbox_summary, list) + assert len(toolbox_summary) > 0 toolbox_summary_by_tool = self.gi.tool_dependencies.summarize_toolbox(index_by="tools") - self.assertTrue(isinstance(toolbox_summary_by_tool, list)) - self.assertGreater(len(toolbox_summary_by_tool), 0) - self.assertTrue(isinstance(toolbox_summary_by_tool[0], dict)) - self.assertTrue("tool_ids" in toolbox_summary_by_tool[0]) - self.assertTrue(isinstance(toolbox_summary_by_tool[0]["tool_ids"], list)) + assert isinstance(toolbox_summary_by_tool, list) + assert len(toolbox_summary_by_tool) > 0 + assert isinstance(toolbox_summary_by_tool[0], dict) + assert "tool_ids" in toolbox_summary_by_tool[0] + assert isinstance(toolbox_summary_by_tool[0]["tool_ids"], list) tool_id = toolbox_summary_by_tool[0]["tool_ids"][0] toolbox_summary_select_tool_ids = self.gi.tool_dependencies.summarize_toolbox( index_by="tools", tool_ids=[tool_id] ) - self.assertTrue(isinstance(toolbox_summary_select_tool_ids, list)) - self.assertEqual(len(toolbox_summary_select_tool_ids), 1) - self.assertEqual(toolbox_summary_select_tool_ids[0]["tool_ids"][0], tool_id) + assert isinstance(toolbox_summary_select_tool_ids, list) + assert len(toolbox_summary_select_tool_ids) == 1 + assert toolbox_summary_select_tool_ids[0]["tool_ids"][0] == tool_id diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyTools.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyTools.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyTools.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyTools.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,6 +1,10 @@ """ """ import os +from typing import ( + Any, + Dict, +) from bioblend.galaxy.tools.inputs import ( conditional, @@ -18,37 +22,37 @@ def test_get_tools(self): # Test requires target Galaxy is configured with at least one tool. tools = self.gi.tools.get_tools() - self.assertGreater(len(tools), 0) - self.assertTrue(all(map(self._assert_is_tool_rep, tools))) + assert len(tools) > 0 + assert all(map(self._assert_is_tool_rep, tools)) def test_get_tool_panel(self): # Test requires target Galaxy is configured with at least one tool # section. tool_panel = self.gi.tools.get_tool_panel() sections = [s for s in tool_panel if "elems" in s] - self.assertGreater(len(sections), 0) - self.assertTrue(all(map(self._assert_is_tool_rep, sections[0]["elems"]))) + assert len(sections) > 0 + assert all(map(self._assert_is_tool_rep, sections[0]["elems"])) def _assert_is_tool_rep(self, data): - self.assertTrue(data["model_class"].endswith("Tool")) + assert data["model_class"].endswith("Tool") # Special tools like SetMetadataTool may have different model_class # than Tool - but they all seem to end in tool. for key in ["name", "id", "version"]: - self.assertIn(key, data) + assert key in data return True def test_paste_content(self): history = self.gi.histories.create_history(name="test_paste_data history") paste_text = "line 1\nline 2\rline 3\r\nline 4" tool_output = self.gi.tools.paste_content(paste_text, history["id"]) - self.assertEqual(len(tool_output["outputs"]), 1) + assert len(tool_output["outputs"]) == 1 # All lines in the resulting dataset should end with "\n" expected_contents = ("\n".join(paste_text.splitlines()) + "\n").encode() self._wait_and_verify_dataset(tool_output["outputs"][0]["id"], expected_contents) # Same with space_to_tab=True tool_output = self.gi.tools.paste_content(paste_text, history["id"], space_to_tab=True) - self.assertEqual(len(tool_output["outputs"]), 1) + assert len(tool_output["outputs"]) == 1 expected_contents = ("\n".join("\t".join(_.split()) for _ in paste_text.splitlines()) + "\n").encode() self._wait_and_verify_dataset(tool_output["outputs"][0]["id"], expected_contents) @@ -95,7 +99,7 @@ .set("seed_source", conditional().set("seed_source_selector", "set_seed").set("seed", "asdf")) ) tool_output = self.gi.tools.run_tool(history_id=history_id, tool_id="random_lines1", tool_inputs=tool_inputs) - self.assertEqual(len(tool_output["outputs"]), 1) + assert len(tool_output["outputs"]) == 1 # TODO: Wait for results and verify has 1 line and is # chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 + @@ -116,7 +120,7 @@ ) ) tool_output = self.gi.tools.run_tool(history_id=history_id, tool_id="cat1", tool_inputs=tool_inputs) - self.assertEqual(len(tool_output["outputs"]), 1) + assert len(tool_output["outputs"]) == 1 # TODO: Wait for results and verify it has 3 lines - 1 2 3, 4 5 6, # and 7 8 9. @@ -124,51 +128,45 @@ @test_util.skip_unless_tool("CONVERTER_fasta_to_bowtie_color_index") def test_tool_dependency_install(self): installed_dependencies = self.gi.tools.install_dependencies("CONVERTER_fasta_to_bowtie_color_index") - self.assertTrue( - any( - True - for d in installed_dependencies - if d.get("name") == "bowtie" and d.get("dependency_type") == "conda" - ), - f"installed_dependencies is {installed_dependencies}", - ) + assert any( + True for d in installed_dependencies if d.get("name") == "bowtie" and d.get("dependency_type") == "conda" + ), f"installed_dependencies is {installed_dependencies}" status = self.gi.tools.uninstall_dependencies("CONVERTER_fasta_to_bowtie_color_index") - self.assertEqual(status[0]["model_class"], "NullDependency", status) + assert status[0]["model_class"] == "NullDependency", status @test_util.skip_unless_tool("CONVERTER_fasta_to_bowtie_color_index") def test_tool_requirements(self): tool_requirements = self.gi.tools.requirements("CONVERTER_fasta_to_bowtie_color_index") - self.assertTrue( - any( - True - for tr in tool_requirements - if {"dependency_type", "version"} <= set(tr.keys()) and tr.get("name") == "bowtie" - ), - f"tool_requirements is {tool_requirements}", - ) + assert any( + True + for tr in tool_requirements + if {"dependency_type", "version"} <= set(tr.keys()) and tr.get("name") == "bowtie" + ), f"tool_requirements is {tool_requirements}" @test_util.skip_unless_tool("CONVERTER_fasta_to_bowtie_color_index") def test_reload(self): response = self.gi.tools.reload("CONVERTER_fasta_to_bowtie_color_index") - self.assertIsInstance(response, dict) - self.assertIn("message", response) - self.assertIn("id", response["message"]) + assert isinstance(response, dict) + assert "message" in response + assert "id" in response["message"] @test_util.skip_unless_tool("sra_source") def test_get_citations(self): citations = self.gi.tools.get_citations("sra_source") - self.assertEqual(len(citations), 2) + assert len(citations) == 2 - def _wait_for_and_verify_upload(self, tool_output, file_name, fn, expected_dbkey="?"): - self.assertEqual(len(tool_output["outputs"]), 1) + def _wait_for_and_verify_upload( + self, tool_output: Dict[str, Any], file_name: str, fn: str, expected_dbkey: str = "?" + ) -> None: + assert len(tool_output["outputs"]) == 1 output = tool_output["outputs"][0] - self.assertEqual(output["name"], file_name) + assert output["name"] == file_name expected_contents = open(fn, "rb").read() self._wait_and_verify_dataset(output["id"], expected_contents) - self.assertEqual(output["genome_build"], expected_dbkey) + assert output["genome_build"] == expected_dbkey @test_util.skip_unless_tool("random_lines1") def test_get_tool_model(self): history_id = self.gi.histories.create_history(name="test_run_random_lines history")["id"] tool_model = self.gi.tools.build(tool_id="random_lines1", history_id=history_id) - self.assertEqual(len(tool_model["inputs"]), 3) + assert len(tool_model["inputs"]) == 3 diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyUsers.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyUsers.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyUsers.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyUsers.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,28 +1,27 @@ -""" -Tests the functionality of the Blend CloudMan API. These tests require working -credentials to supported cloud infrastructure. -""" import bioblend.galaxy -from . import GalaxyTestBase +from . import ( + GalaxyTestBase, + test_util, +) class TestGalaxyUsers(GalaxyTestBase.GalaxyTestBase): def test_get_users(self): users = self.gi.users.get_users() for user in users: - self.assertIsNotNone(user["id"]) - self.assertIsNotNone(user["email"]) + assert user["id"] is not None + assert user["email"] is not None def test_show_user(self): current_user = self.gi.users.get_current_user() user = self.gi.users.show_user(current_user["id"]) - self.assertEqual(user["id"], current_user["id"]) - self.assertEqual(user["username"], current_user["username"]) - self.assertEqual(user["email"], current_user["email"]) + assert user["id"] == current_user["id"] + assert user["username"] == current_user["username"] + assert user["email"] == current_user["email"] # The 2 following tests randomly fail - # self.assertEqual(user['nice_total_disk_usage'], current_user['nice_total_disk_usage']) - # self.assertEqual(user['total_disk_usage'], current_user['total_disk_usage']) + # assert user["nice_total_disk_usage"] == current_user["nice_total_disk_usage"] + # assert user["total_disk_usage"] == current_user["total_disk_usage"] def test_create_remote_user(self): # WARNING: only admins can create users! @@ -32,11 +31,11 @@ self.skipTest("This Galaxy instance is not configured to use remote users") new_user_email = "newuser@example.org" user = self.gi.users.create_remote_user(new_user_email) - self.assertEqual(user["email"], new_user_email) + assert user["email"] == new_user_email if self.gi.config.get_config()["allow_user_deletion"]: deleted_user = self.gi.users.delete_user(user["id"]) - self.assertEqual(deleted_user["email"], new_user_email) - self.assertTrue(deleted_user["deleted"]) + assert deleted_user["email"] == new_user_email + assert deleted_user["deleted"] def test_create_local_user(self): # WARNING: only admins can create users! @@ -48,24 +47,24 @@ username = "newuser" password = "secret" user = self.gi.users.create_local_user(username, new_user_email, password) - self.assertEqual(user["username"], username) - self.assertEqual(user["email"], new_user_email) + assert user["username"] == username + assert user["email"] == new_user_email # test a BioBlend GalaxyInstance can be created using username+password user_gi = bioblend.galaxy.GalaxyInstance(url=self.gi.base_url, email=new_user_email, password=password) - self.assertEqual(user_gi.users.get_current_user()["email"], new_user_email) + assert user_gi.users.get_current_user()["email"] == new_user_email # test deletion if self.gi.config.get_config()["allow_user_deletion"]: deleted_user = self.gi.users.delete_user(user["id"]) - self.assertEqual(deleted_user["email"], new_user_email) - self.assertTrue(deleted_user["deleted"]) + assert deleted_user["email"] == new_user_email + assert deleted_user["deleted"] def test_get_current_user(self): user = self.gi.users.get_current_user() - self.assertIsNotNone(user["id"]) - self.assertIsNotNone(user["username"]) - self.assertIsNotNone(user["email"]) - self.assertIsNotNone(user["nice_total_disk_usage"]) - self.assertIsNotNone(user["total_disk_usage"]) + assert user["id"] is not None + assert user["username"] is not None + assert user["email"] is not None + assert user["nice_total_disk_usage"] is not None + assert user["total_disk_usage"] is not None def test_update_user(self): # WARNING: only admins can create users! @@ -75,16 +74,25 @@ self.skipTest("This Galaxy instance is not configured to use local users") new_user_email = "newuser2@example.org" user = self.gi.users.create_local_user("newuser2", new_user_email, "secret") - self.assertEqual(user["username"], "newuser2") - self.assertEqual(user["email"], new_user_email) + assert user["username"] == "newuser2" + assert user["email"] == new_user_email updated_user_email = "updateduser@example.org" updated_username = "updateduser" user_id = user["id"] self.gi.users.update_user(user_id, username=updated_username, email=updated_user_email) user = self.gi.users.show_user(user_id) - self.assertEqual(user["username"], updated_username) - self.assertEqual(user["email"], updated_user_email) + assert user["username"] == updated_username + assert user["email"] == updated_user_email if self.gi.config.get_config()["allow_user_deletion"]: self.gi.users.delete_user(user["id"]) + + def test_get_user_apikey(self): + user_id = self.gi.users.get_current_user()["id"] + assert self.gi.users.get_user_apikey(user_id) + + @test_util.skip_unless_galaxy("release_21.01") + def test_get_or_create_user_apikey(self): + user_id = self.gi.users.get_current_user()["id"] + assert self.gi.users.get_or_create_user_apikey(user_id) diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestGalaxyWorkflows.py python-bioblend-1.0.0/bioblend/_tests/TestGalaxyWorkflows.py --- python-bioblend-0.18.0/bioblend/_tests/TestGalaxyWorkflows.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestGalaxyWorkflows.py 2022-10-13 18:17:37.000000000 +0000 @@ -3,6 +3,13 @@ import shutil import tempfile import time +from typing import ( + Any, + Dict, + List, +) + +import pytest from bioblend import ConnectionError from . import ( @@ -21,10 +28,10 @@ history_id = self.gi.histories.create_history(name="TestWorkflowState")["id"] invocations = self.gi.workflows.get_invocations(workflow_id) - self.assertEqual(len(invocations), 0) + assert len(invocations) == 0 # Try invalid invocation (no input) - with self.assertRaises(ConnectionError): + with pytest.raises(ConnectionError): self.gi.workflows.invoke_workflow(workflow["id"]) dataset1_id = self._test_dataset(history_id) @@ -32,13 +39,13 @@ workflow["id"], inputs={"0": {"src": "hda", "id": dataset1_id}}, ) - self.assertEqual(invocation["state"], "new") + assert invocation["state"] == "new" invocation_id = invocation["id"] invocations = self.gi.workflows.get_invocations(workflow_id) - self.assertEqual(len(invocations), 1) - self.assertEqual(invocations[0]["id"], invocation_id) + assert len(invocations) == 1 + assert invocations[0]["id"] == invocation_id - def invocation_steps_by_order_index(): + def invocation_steps_by_order_index() -> Dict[int, Dict[str, Any]]: invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) return {s["order_index"]: s for s in invocation["steps"]} @@ -48,15 +55,13 @@ time.sleep(0.5) invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) - self.assertEqual(invocation["state"], "ready") + assert invocation["state"] == "ready" steps = invocation_steps_by_order_index() pause_step = steps[2] - self.assertIsNone( - self.gi.workflows.show_invocation_step(workflow_id, invocation_id, pause_step["id"])["action"] - ) + assert self.gi.workflows.show_invocation_step(workflow_id, invocation_id, pause_step["id"])["action"] is None self.gi.workflows.run_invocation_step_action(workflow_id, invocation_id, pause_step["id"], action=True) - self.assertTrue(self.gi.workflows.show_invocation_step(workflow_id, invocation_id, pause_step["id"])["action"]) + assert self.gi.workflows.show_invocation_step(workflow_id, invocation_id, pause_step["id"])["action"] for _ in range(20): invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) if invocation["state"] == "scheduled": @@ -65,7 +70,7 @@ time.sleep(0.5) invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) - self.assertEqual(invocation["state"], "scheduled") + assert invocation["state"] == "scheduled" @test_util.skip_unless_galaxy("release_19.01") def test_invoke_workflow_parameters_normalized(self): @@ -73,7 +78,7 @@ workflow_id = self.gi.workflows.import_workflow_from_local_path(path)["id"] history_id = self.gi.histories.create_history(name="TestWorkflowInvokeParametersNormalized")["id"] dataset_id = self._test_dataset(history_id) - with self.assertRaises(ConnectionError): + with pytest.raises(ConnectionError): self.gi.workflows.invoke_workflow( workflow_id, inputs={"0": {"src": "hda", "id": dataset_id}}, params={"1": {"1|2": "comma"}} ) @@ -94,7 +99,7 @@ dataset1_id = self._test_dataset(history_id) invocations = self.gi.workflows.get_invocations(workflow_id) - self.assertEqual(len(invocations), 0) + assert len(invocations) == 0 invocation = self.gi.workflows.invoke_workflow( workflow["id"], @@ -102,92 +107,92 @@ ) invocation_id = invocation["id"] invocations = self.gi.workflows.get_invocations(workflow_id) - self.assertEqual(len(invocations), 1) - self.assertEqual(invocations[0]["id"], invocation_id) + assert len(invocations) == 1 + assert invocations[0]["id"] == invocation_id invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) - self.assertIn(invocation["state"], ["new", "ready"]) + assert invocation["state"] in ["new", "ready"] self.gi.workflows.cancel_invocation(workflow_id, invocation_id) invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) - self.assertEqual(invocation["state"], "cancelled") + assert invocation["state"] == "cancelled" def test_import_export_workflow_from_local_path(self): - with self.assertRaises(TypeError): - self.gi.workflows.import_workflow_from_local_path(None) + with pytest.raises(TypeError): + self.gi.workflows.import_workflow_from_local_path(None) # type: ignore[arg-type] path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) imported_wf = self.gi.workflows.import_workflow_from_local_path(path) - self.assertIsInstance(imported_wf, dict) - self.assertEqual(imported_wf["name"], "paste_columns") - self.assertTrue(imported_wf["url"].startswith("/api/workflows/")) - self.assertFalse(imported_wf["deleted"]) - self.assertFalse(imported_wf["published"]) - with self.assertRaises(TypeError): - self.gi.workflows.export_workflow_to_local_path(None, None, None) + assert isinstance(imported_wf, dict) + assert imported_wf["name"] == "paste_columns" + assert imported_wf["url"].startswith("/api/workflows/") + assert not imported_wf["deleted"] + assert not imported_wf["published"] + with pytest.raises(TypeError): + self.gi.workflows.export_workflow_to_local_path(None, None, None) # type: ignore[arg-type] export_dir = tempfile.mkdtemp(prefix="bioblend_test_") try: self.gi.workflows.export_workflow_to_local_path(imported_wf["id"], export_dir) dir_contents = os.listdir(export_dir) - self.assertEqual(len(dir_contents), 1) + assert len(dir_contents) == 1 export_path = os.path.join(export_dir, dir_contents[0]) with open(export_path) as f: exported_wf_dict = json.load(f) finally: shutil.rmtree(export_dir) - self.assertIsInstance(exported_wf_dict, dict) + assert isinstance(exported_wf_dict, dict) def test_import_publish_workflow_from_local_path(self): path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) imported_wf = self.gi.workflows.import_workflow_from_local_path(path, publish=True) - self.assertIsInstance(imported_wf, dict) - self.assertFalse(imported_wf["deleted"]) - self.assertTrue(imported_wf["published"]) + assert isinstance(imported_wf, dict) + assert not imported_wf["deleted"] + assert imported_wf["published"] def test_import_export_workflow_dict(self): path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) with open(path) as f: wf_dict = json.load(f) imported_wf = self.gi.workflows.import_workflow_dict(wf_dict) - self.assertIsInstance(imported_wf, dict) - self.assertEqual(imported_wf["name"], "paste_columns") - self.assertTrue(imported_wf["url"].startswith("/api/workflows/")) - self.assertFalse(imported_wf["deleted"]) - self.assertFalse(imported_wf["published"]) + assert isinstance(imported_wf, dict) + assert imported_wf["name"] == "paste_columns" + assert imported_wf["url"].startswith("/api/workflows/") + assert not imported_wf["deleted"] + assert not imported_wf["published"] exported_wf_dict = self.gi.workflows.export_workflow_dict(imported_wf["id"]) - self.assertIsInstance(exported_wf_dict, dict) + assert isinstance(exported_wf_dict, dict) def test_import_publish_workflow_dict(self): path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) with open(path) as f: wf_dict = json.load(f) imported_wf = self.gi.workflows.import_workflow_dict(wf_dict, publish=True) - self.assertIsInstance(imported_wf, dict) - self.assertFalse(imported_wf["deleted"]) - self.assertTrue(imported_wf["published"]) + assert isinstance(imported_wf, dict) + assert not imported_wf["deleted"] + assert imported_wf["published"] def test_get_workflows(self): path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) workflow = self.gi.workflows.import_workflow_from_local_path(path) all_wfs = self.gi.workflows.get_workflows() - self.assertGreater(len(all_wfs), 0) + assert len(all_wfs) > 0 wfs_with_name = self.gi.workflows.get_workflows(name=workflow["name"]) wf_list = [w for w in wfs_with_name if w["id"] == workflow["id"]] - self.assertEqual(len(wf_list), 1) + assert len(wf_list) == 1 wf_data = wf_list[0] if "create_time" in workflow: # Galaxy >= 20.01 - self.assertEqual(wf_data["create_time"], workflow["create_time"]) + assert wf_data["create_time"] == workflow["create_time"] else: # Galaxy <= 22.01 - self.assertEqual(wf_data["url"], workflow["url"]) + assert wf_data["url"] == workflow["url"] def test_show_workflow(self): path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) wf_data = self.gi.workflows.show_workflow(wf["id"]) - self.assertEqual(wf_data["id"], wf["id"]) - self.assertEqual(wf_data["name"], wf["name"]) - self.assertEqual(wf_data["url"], wf["url"]) - self.assertEqual(len(wf_data["steps"]), 3) - self.assertIsNotNone(wf_data["inputs"]) + assert wf_data["id"] == wf["id"] + assert wf_data["name"] == wf["name"] + assert wf_data["url"] == wf["url"] + assert len(wf_data["steps"]) == 3 + assert wf_data["inputs"] is not None @test_util.skip_unless_galaxy("release_18.05") def test_update_workflow_name(self): @@ -195,17 +200,17 @@ wf = self.gi.workflows.import_workflow_from_local_path(path) new_name = "new name" updated_wf = self.gi.workflows.update_workflow(wf["id"], name=new_name) - self.assertEqual(updated_wf["name"], new_name) + assert updated_wf["name"] == new_name @test_util.skip_unless_galaxy("release_21.01") def test_update_workflow_published(self): path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) - self.assertFalse(wf["published"]) + assert not wf["published"] updated_wf = self.gi.workflows.update_workflow(wf["id"], published=True) - self.assertTrue(updated_wf["published"]) + assert updated_wf["published"] updated_wf = self.gi.workflows.update_workflow(wf["id"], published=False) - self.assertFalse(updated_wf["published"]) + assert not updated_wf["published"] @test_util.skip_unless_galaxy( "release_19.09" @@ -214,18 +219,18 @@ path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) wf_data = self.gi.workflows.show_workflow(wf["id"]) - self.assertEqual(wf_data["version"], 0) + assert wf_data["version"] == 0 new_name = "new name" self.gi.workflows.update_workflow(wf["id"], name=new_name) updated_wf = self.gi.workflows.show_workflow(wf["id"]) - self.assertEqual(updated_wf["name"], new_name) - self.assertEqual(updated_wf["version"], 1) + assert updated_wf["name"] == new_name + assert updated_wf["version"] == 1 updated_wf = self.gi.workflows.show_workflow(wf["id"], version=0) - self.assertEqual(updated_wf["name"], "paste_columns") - self.assertEqual(updated_wf["version"], 0) + assert updated_wf["name"] == "paste_columns" + assert updated_wf["version"] == 0 updated_wf = self.gi.workflows.show_workflow(wf["id"], version=1) - self.assertEqual(updated_wf["name"], new_name) - self.assertEqual(updated_wf["version"], 1) + assert updated_wf["name"] == new_name + assert updated_wf["version"] == 1 @test_util.skip_unless_galaxy("release_19.09") def test_extract_workflow_from_history(self): @@ -257,34 +262,34 @@ dataset_hids=dataset_hids, ) wf2 = self.gi.workflows.show_workflow(wf2["id"]) - self.assertEqual(wf2["name"], new_workflow_name) - self.assertEqual(len(wf1["steps"]), len(wf2["steps"])) + assert wf2["name"] == new_workflow_name + assert len(wf1["steps"]) == len(wf2["steps"]) for i in range(len(wf1["steps"])): - self.assertEqual(wf1["steps"][str(i)]["type"], wf2["steps"][str(i)]["type"]) - self.assertEqual(wf1["steps"][str(i)]["tool_id"], wf2["steps"][str(i)]["tool_id"]) + assert wf1["steps"][str(i)]["type"] == wf2["steps"][str(i)]["type"] + assert wf1["steps"][str(i)]["tool_id"] == wf2["steps"][str(i)]["tool_id"] @test_util.skip_unless_galaxy("release_18.09") def test_show_versions(self): path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) versions = self.gi.workflows.show_versions(wf["id"]) - self.assertEqual(len(versions), 1) + assert len(versions) == 1 version = versions[0] - self.assertEqual(version["version"], 0) - self.assertTrue("update_time" in version) - self.assertTrue("steps" in version) + assert version["version"] == 0 + assert "update_time" in version + assert "steps" in version @test_util.skip_unless_galaxy("release_21.01") def test_refactor_workflow(self): - actions = [ + actions: List[Dict[str, Any]] = [ {"action_type": "add_input", "type": "data", "label": "foo"}, {"action_type": "update_step_label", "label": "bar", "step": {"label": "foo"}}, ] path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) response = self.gi.workflows.refactor_workflow(wf["id"], actions, dry_run=True) - self.assertEqual(len(response["action_executions"]), len(actions)) - self.assertEqual(response["dry_run"], True) + assert len(response["action_executions"]) == len(actions) + assert response["dry_run"] is True updated_steps = response["workflow"]["steps"] - self.assertEqual(len(updated_steps), 4) - self.assertEqual({step["label"] for step in updated_steps.values()}, {"bar", None, "Input 1", "Input 2"}) + assert len(updated_steps) == 4 + assert {step["label"] for step in updated_steps.values()} == {"bar", None, "Input 1", "Input 2"} diff -Nru python-bioblend-0.18.0/bioblend/_tests/TestToolshed.py python-bioblend-1.0.0/bioblend/_tests/TestToolshed.py --- python-bioblend-0.18.0/bioblend/_tests/TestToolshed.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/TestToolshed.py 2022-10-13 18:17:37.000000000 +0000 @@ -9,42 +9,47 @@ @test_util.skip_unless_toolshed() class TestToolshed(unittest.TestCase): def setUp(self): - toolshed_url = os.environ.get("BIOBLEND_TOOLSHED_URL", "https://testtoolshed.g2.bx.psu.edu/") + toolshed_url = os.environ["BIOBLEND_TOOLSHED_URL"] self.ts = bioblend.toolshed.ToolShedInstance(url=toolshed_url) def test_categories_client(self): # get_categories categories = self.ts.categories.get_categories() - self.assertIn("Assembly", [c["name"] for c in categories]) + assert "Assembly" in [c["name"] for c in categories] # we cannot test get_categories with deleted=True as it requires administrator status # show_category visualization_category_id = [c for c in categories if c["name"] == "Visualization"][0]["id"] visualization_category = self.ts.categories.show_category(visualization_category_id) - self.assertEqual(visualization_category["description"], "Tools for visualizing data") + assert visualization_category["description"] == "Tools for visualizing data" # get_repositories repositories = self.ts.categories.get_repositories(visualization_category_id) repositories_reversed = self.ts.categories.get_repositories(visualization_category_id, sort_order="desc") - self.assertEqual(repositories["repositories"][0]["model_class"], "Repository") - self.assertGreater(len(repositories["repositories"]), 200) - self.assertEqual(repositories["repositories"][0], repositories_reversed["repositories"][-1]) + assert repositories["repositories"][0]["model_class"] == "Repository" + assert len(repositories["repositories"]) > 200 + assert repositories["repositories"][0] == repositories_reversed["repositories"][-1] def test_repositories_client(self): # get_repositories repositories = self.ts.repositories.get_repositories() - self.assertGreater(len(repositories), 5000) - self.assertEqual(repositories[0]["model_class"], "Repository") + assert len(repositories) > 5000 + assert repositories[0]["model_class"] == "Repository" + + repositories = self.ts.repositories.get_repositories(name="bam_to_sam", owner="devteam") + assert len(repositories) == 1 + bam_to_sam_repo = repositories[0] + assert bam_to_sam_repo["name"] == "bam_to_sam" + assert bam_to_sam_repo["owner"] == "devteam" # search_repositories samtools_search = self.ts.repositories.search_repositories("samtools", page_size=5) - self.assertGreater(int(samtools_search["total_results"]), 20) - self.assertEqual(len(samtools_search["hits"]), 5) + assert int(samtools_search["total_results"]) > 20 + assert len(samtools_search["hits"]) == 5 # show_repository - bam_to_sam_repo = [r for r in repositories if r["name"] == "bam_to_sam"][0] show_bam_to_sam_repo = self.ts.repositories.show_repository(bam_to_sam_repo["id"]) - self.assertIn("SAM", show_bam_to_sam_repo["long_description"]) + assert "SAM" in show_bam_to_sam_repo["long_description"] # test_create_repository # need to provide an API key to test this @@ -55,19 +60,19 @@ def test_repositories_revisions(self): # get_ordered_installable_revisions bam_to_sam_revisions = self.ts.repositories.get_ordered_installable_revisions("bam_to_sam", "devteam") - self.assertGreaterEqual(len(bam_to_sam_revisions), 4) + assert len(bam_to_sam_revisions) >= 4 # get_repository_revision_install_info bam_to_sam_revision_install_info = self.ts.repositories.get_repository_revision_install_info( "bam_to_sam", "devteam", bam_to_sam_revisions[0] ) - self.assertEqual(len(bam_to_sam_revision_install_info), 3) - self.assertEqual(bam_to_sam_revision_install_info[0].get("model_class"), "Repository") - self.assertEqual(bam_to_sam_revision_install_info[1].get("model_class"), "RepositoryMetadata") - self.assertEqual(bam_to_sam_revision_install_info[2].get("model_class"), None) + assert len(bam_to_sam_revision_install_info) == 3 + assert bam_to_sam_revision_install_info[0].get("model_class") == "Repository" + assert bam_to_sam_revision_install_info[1].get("model_class") == "RepositoryMetadata" + assert bam_to_sam_revision_install_info[2].get("model_class") is None def test_tools_client(self): # search_tools samtools_search = self.ts.tools.search_tools("samtools", page_size=5) - self.assertGreater(int(samtools_search["total_results"]), 2000) - self.assertEqual(len(samtools_search["hits"]), 5) + assert int(samtools_search["total_results"]) > 2000 + assert len(samtools_search["hits"]) == 5 diff -Nru python-bioblend-0.18.0/bioblend/_tests/test_util.py python-bioblend-1.0.0/bioblend/_tests/test_util.py --- python-bioblend-0.18.0/bioblend/_tests/test_util.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/_tests/test_util.py 2022-10-13 18:17:37.000000000 +0000 @@ -4,38 +4,28 @@ import random import string import unittest - -import bioblend - -NO_CLOUDMAN_MESSAGE = "CloudMan required and no CloudMan AMI configured." -NO_GALAXY_MESSAGE = "Externally configured Galaxy required, but not found. Set BIOBLEND_GALAXY_URL and BIOBLEND_GALAXY_API_KEY to run this test." -NO_TOOLSHED_MESSAGE = ( - "Externally configured ToolShed required, but not found. Set BIOBLEND_TOOLSHED_URL to run this test." +from typing import ( + Callable, + Optional, ) -OLD_GALAXY_RELEASE = "Testing on Galaxy %s, but need %s to run this test." -MISSING_TOOL_MESSAGE = "Externally configured Galaxy instance requires tool %s to run test." +import bioblend.galaxy -def skip_unless_cloudman(): - """Decorate tests with this to skip the test if CloudMan is not - configured. - """ - if "BIOBLEND_AMI_ID" not in os.environ: - return unittest.skip(NO_CLOUDMAN_MESSAGE) - else: - return lambda f: f +NO_GALAXY_MESSAGE = "Externally configured Galaxy required, but not found. Set BIOBLEND_GALAXY_URL and BIOBLEND_GALAXY_API_KEY to run this test." -def skip_unless_toolshed(): +def skip_unless_toolshed() -> Callable: """Decorate tests with this to skip the test if a URL for a ToolShed to run the tests is not provided. """ if "BIOBLEND_TOOLSHED_URL" not in os.environ: - return unittest.skip(NO_TOOLSHED_MESSAGE) + return unittest.skip( + "Externally configured ToolShed required, but not found. Set BIOBLEND_TOOLSHED_URL (e.g. to https://testtoolshed.g2.bx.psu.edu/ ) to run this test." + ) return lambda f: f -def skip_unless_galaxy(min_release=None): +def skip_unless_galaxy(min_release: Optional[str] = None) -> Callable: """Decorate tests with this to skip the test if Galaxy is not configured. """ @@ -47,7 +37,7 @@ if not min_release.startswith("release_"): raise Exception("min_release should start with 'release_'") if galaxy_release[8:] < min_release[8:]: - return unittest.skip(OLD_GALAXY_RELEASE % (galaxy_release, min_release)) + return unittest.skip(f"Testing on Galaxy {galaxy_release}, but need {min_release} to run this test.") if "BIOBLEND_GALAXY_URL" not in os.environ: return unittest.skip(NO_GALAXY_MESSAGE) @@ -71,24 +61,24 @@ galaxy_user_id = user["id"] break + config = gi.config.get_config() if galaxy_user_id is None: - try: - config = gi.config.get_config() - except Exception: - # If older Galaxy for instance just assume use_remote_user is False. - config = {} - if config.get("use_remote_user", False): new_user = gi.users.create_remote_user(galaxy_user_email) else: galaxy_user = galaxy_user_email.split("@", 1)[0] galaxy_password = "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(20)) - # Create a new user and get a new API key for her + # Create a new user new_user = gi.users.create_local_user(galaxy_user, galaxy_user_email, galaxy_password) galaxy_user_id = new_user["id"] - api_key = gi.users.create_user_apikey(galaxy_user_id) + if config["version_major"] >= "21.01": + api_key = gi.users.get_or_create_user_apikey(galaxy_user_id) + else: + api_key = gi.users.get_user_apikey(galaxy_user_id) + if not api_key or api_key == "Not available.": + api_key = gi.users.create_user_apikey(galaxy_user_id) os.environ["BIOBLEND_GALAXY_API_KEY"] = api_key if "BIOBLEND_GALAXY_API_KEY" not in os.environ: @@ -97,7 +87,7 @@ return lambda f: f -def skip_unless_tool(tool_id): +def skip_unless_tool(tool_id: str) -> Callable: """Decorate a Galaxy test method as requiring a specific tool, skip the test case if the tool is unavailable. """ @@ -108,11 +98,11 @@ # In panels by default, so flatten out sections... tool_ids = [_["id"] for _ in tools] if tool_id not in tool_ids: - raise unittest.SkipTest(MISSING_TOOL_MESSAGE % tool_id) + raise unittest.SkipTest(f"Externally configured Galaxy instance requires tool {tool_id} to run test.") return method(has_gi, *args, **kwargs) - # Must preserve method name so nose can detect and report tests by + # Must preserve method name so pytest can detect and report tests by # name. wrapped_method.__name__ = method.__name__ return wrapped_method @@ -120,5 +110,5 @@ return method_wrapper -def get_abspath(path): +def get_abspath(path: str) -> str: return os.path.abspath(os.path.join(os.path.dirname(__file__), path)) diff -Nru python-bioblend-0.18.0/bioblend/toolshed/categories/__init__.py python-bioblend-1.0.0/bioblend/toolshed/categories/__init__.py --- python-bioblend-0.18.0/bioblend/toolshed/categories/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/toolshed/categories/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,16 +1,28 @@ """ Interaction with a Tool Shed instance categories """ +from typing import ( + Any, + Dict, + List, + TYPE_CHECKING, +) + +from typing_extensions import Literal + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.toolshed import ToolShedInstance + class ToolShedCategoryClient(Client): module = "categories" - def __init__(self, toolshed_instance): + def __init__(self, toolshed_instance: "ToolShedInstance") -> None: super().__init__(toolshed_instance) - def get_categories(self, deleted=False): + def get_categories(self, deleted: bool = False) -> List[Dict[str, Any]]: """ Returns a list of dictionaries that contain descriptions of the repository categories found on the given Tool Shed instance. @@ -35,7 +47,7 @@ """ return self._get(deleted=deleted) - def show_category(self, category_id): + def show_category(self, category_id: str) -> Dict[str, Any]: """ Get details of a given category. @@ -47,7 +59,9 @@ """ return self._get(id=category_id) - def get_repositories(self, category_id, sort_key="name", sort_order="asc"): + def get_repositories( + self, category_id: str, sort_key: Literal["name", "owner"] = "name", sort_order: Literal["asc", "desc"] = "asc" + ) -> Dict[str, Any]: """ Returns a dictionary of information for a repository category including a list of repositories belonging to the category. @@ -107,7 +121,7 @@ 'url': '/api/categories/589548af7e391bcf'} """ - params = {} + params: Dict[str, Any] = {} if sort_key: params.update({"sort_key": sort_key}) if sort_order: diff -Nru python-bioblend-0.18.0/bioblend/toolshed/__init__.py python-bioblend-1.0.0/bioblend/toolshed/__init__.py --- python-bioblend-0.18.0/bioblend/toolshed/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/toolshed/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,6 +1,8 @@ """ A base representation of an instance of Tool Shed """ +from typing import Optional + from bioblend.galaxyclient import GalaxyClient from bioblend.toolshed import ( categories, @@ -10,7 +12,14 @@ class ToolShedInstance(GalaxyClient): - def __init__(self, url, key=None, email=None, password=None, verify=True): + def __init__( + self, + url: str, + key: Optional[str] = None, + email: Optional[str] = None, + password: Optional[str] = None, + verify: bool = True, + ) -> None: """ A base representation of a connection to a ToolShed instance, identified by the ToolShed URL and user credentials. diff -Nru python-bioblend-0.18.0/bioblend/toolshed/repositories/__init__.py python-bioblend-1.0.0/bioblend/toolshed/repositories/__init__.py --- python-bioblend-0.18.0/bioblend/toolshed/repositories/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/toolshed/repositories/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,21 +1,40 @@ """ Interaction with a Tool Shed instance repositories """ -from typing import Optional +from typing import ( + Any, + Dict, + List, + Optional, + TYPE_CHECKING, + Union, +) + +from typing_extensions import Literal from bioblend.galaxy.client import Client from bioblend.util import attach_file +if TYPE_CHECKING: + from bioblend.toolshed import ToolShedInstance + class ToolShedRepositoryClient(Client): module = "repositories" - def __init__(self, toolshed_instance): + def __init__(self, toolshed_instance: "ToolShedInstance") -> None: super().__init__(toolshed_instance) - def get_repositories(self): + def get_repositories(self, name: Optional[str] = None, owner: Optional[str] = None) -> List[Dict[str, Any]]: """ - Get a list of all the repositories in a Galaxy Tool Shed. + Get all repositories in a Galaxy Tool Shed, or select a subset by + specifying optional arguments for filtering (e.g. a repository name). + + :type name: str + :param name: Repository name to filter on. + + :type owner: str + :param owner: Repository owner to filter on. :rtype: list :return: Returns a list of dictionaries containing information about @@ -42,9 +61,19 @@ Changed method name from ``get_tools`` to ``get_repositories`` to better align with the Tool Shed concepts. """ - return self._get() + params = {} + if name: + params["name"] = name + if owner: + params["owner"] = owner + return self._get(params=params) - def search_repositories(self, q, page=1, page_size=10): + def search_repositories( + self, + q: str, + page: int = 1, + page_size: int = 10, + ) -> Dict[str, Any]: """ Search for repositories in a Galaxy Tool Shed. @@ -100,7 +129,10 @@ params = dict(q=q, page=page, page_size=page_size) return self._get(params=params) - def show_repository(self, toolShed_id): + def show_repository( + self, + toolShed_id: str, + ) -> Dict[str, Any]: """ Display information of a repository from Tool Shed @@ -134,7 +166,11 @@ """ return self._get(id=toolShed_id) - def get_ordered_installable_revisions(self, name, owner): + def get_ordered_installable_revisions( + self, + name: str, + owner: str, + ) -> List[str]: """ Returns the ordered list of changeset revision hash strings that are associated with installable revisions. As in the changelog, the list is @@ -155,7 +191,12 @@ return r - def get_repository_revision_install_info(self, name, owner, changeset_revision): + def get_repository_revision_install_info( + self, + name: str, + owner: str, + changeset_revision: str, + ) -> List[Dict[str, Any]]: """ Return a list of dictionaries of metadata about a certain changeset revision for a single tool. @@ -259,8 +300,12 @@ return self._get(url=url, params=params) def repository_revisions( - self, downloadable=None, malicious=None, missing_test_components=None, includes_tools=None - ): + self, + downloadable: Optional[bool] = None, + malicious: Optional[bool] = None, + missing_test_components: Optional[bool] = None, + includes_tools: Optional[bool] = None, + ) -> List[Dict[str, Any]]: """ Returns a (possibly filtered) list of dictionaries that include information about all repository revisions. The following parameters can @@ -328,7 +373,10 @@ params["includes_tools"] = includes_tools return self._get(url=url, params=params) - def show_repository_revision(self, metadata_id): + def show_repository_revision( + self, + metadata_id: str, + ) -> Dict[str, Any]: """ Returns a dictionary that includes information about a specified repository revision. @@ -364,7 +412,7 @@ url = "/".join((self.gi.url, "repository_revisions", metadata_id)) return self._get(url=url) - def update_repository(self, id, tar_ball_path, commit_message=None): + def update_repository(self, id: str, tar_ball_path: str, commit_message: Optional[str] = None) -> Dict[str, Any]: """ Update the contents of a Tool Shed repository with specified tar ball. @@ -390,7 +438,7 @@ .. versionadded:: 0.5.2 """ url = self._make_url(id) + "/changeset_revision" - payload = {"file": attach_file(tar_ball_path)} + payload: Dict[str, Any] = {"file": attach_file(tar_ball_path)} if commit_message is not None: payload["commit_message"] = commit_message try: @@ -400,14 +448,14 @@ def create_repository( self, - name, - synopsis, - description=None, - type="unrestricted", - remote_repository_url=None, - homepage_url=None, - category_ids=None, - ): + name: str, + synopsis: str, + description: Optional[str] = None, + type: Literal["unrestricted", "repository_suite_definition", "tool_dependency_definition"] = "unrestricted", + remote_repository_url: Optional[str] = None, + homepage_url: Optional[str] = None, + category_ids: Optional[List[str]] = None, + ) -> Dict[str, Any]: """ Create a new repository in a Tool Shed. @@ -453,7 +501,7 @@ "type": "unrestricted", "user_id": "adb5f5c93f827949"} """ - payload = { + payload: Dict[str, Union[str, List[str]]] = { "name": name, "synopsis": synopsis, } @@ -479,8 +527,8 @@ description: Optional[str] = None, remote_repository_url: Optional[str] = None, homepage_url: Optional[str] = None, - category_ids: Optional[str] = None, - ) -> dict: + category_ids: Optional[List[str]] = None, + ) -> Dict[str, Any]: """ Update metadata of a Tool Shed repository. @@ -509,7 +557,7 @@ :rtype: dict :return: a dictionary containing information about the updated repository. """ - payload = {} + payload: Dict[str, Union[str, List[str]]] = {} if name: payload["name"] = name if synopsis: diff -Nru python-bioblend-0.18.0/bioblend/toolshed/tools/__init__.py python-bioblend-1.0.0/bioblend/toolshed/tools/__init__.py --- python-bioblend-0.18.0/bioblend/toolshed/tools/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/toolshed/tools/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,16 +1,26 @@ """ Interaction with a Tool Shed instance tools """ +from typing import ( + Any, + Dict, + TYPE_CHECKING, +) + from bioblend.galaxy.client import Client +if TYPE_CHECKING: + from bioblend.toolshed import ToolShedInstance + class ToolShedToolClient(Client): + gi: "ToolShedInstance" module = "tools" - def __init__(self, toolshed_instance): + def __init__(self, toolshed_instance: "ToolShedInstance") -> None: super().__init__(toolshed_instance) - def search_tools(self, q, page=1, page_size=10): + def search_tools(self, q: str, page: int = 1, page_size: int = 10) -> Dict[str, Any]: """ Search for tools in a Galaxy Tool Shed. @@ -46,5 +56,5 @@ 'page_size': '2', 'total_results': '118'} """ - params = dict(q=q, page=page, page_size=page_size) + params = {"q": q, "page": page, "page_size": page_size} return self._get(params=params) diff -Nru python-bioblend-0.18.0/bioblend/util/__init__.py python-bioblend-1.0.0/bioblend/util/__init__.py --- python-bioblend-0.18.0/bioblend/util/__init__.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/bioblend/util/__init__.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,37 +1,23 @@ import os from typing import ( + Any, IO, NamedTuple, + Optional, + Type, + TypeVar, ) -class Bunch: - """ - A convenience class to allow dict keys to be represented as object fields. - - The end result is that this allows a dict to be to be represented the same - as a database class, thus the two become interchangeable as a data source. - """ - - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - - def __repr__(self): - """ - Return the contents of the dict in a printable representation - """ - return str(self.__dict__) - - class FileStream(NamedTuple): name: str fd: IO - def close(self): + def close(self) -> None: self.fd.close() -def attach_file(path, name=None): +def attach_file(path: str, name: Optional[str] = None) -> FileStream: """ Attach a path to a request payload object. @@ -47,27 +33,31 @@ """ if name is None: name = os.path.basename(path) - attachment = FileStream(name, open(path, "rb")) - return attachment + return FileStream(name, open(path, "rb")) + + +T = TypeVar("T") -def abstractclass(decorated_cls): +def abstractclass(decorated_cls: Type[T]) -> Type[T]: """ Decorator that marks a class as abstract even without any abstract method Adapted from https://stackoverflow.com/a/49013561/4503125 """ - def clsnew(cls, *args, **kwargs): + def clsnew(cls: Type[T], *args: Any, **kwargs: Any) -> T: + # assert issubclass(cls, decorated_cls) if cls is decorated_cls: - raise TypeError(f"Can't instantiate abstract class {decorated_cls.__name__}") - return super(decorated_cls, cls).__new__(cls) + cls_name = getattr(decorated_cls, "__name__", str(decorated_cls)) + raise TypeError(f"Can't instantiate abstract class {cls_name}") + return super(decorated_cls, cls).__new__(cls) # type: ignore[misc] - decorated_cls.__new__ = clsnew + decorated_cls.__new__ = clsnew # type: ignore[assignment] return decorated_cls __all__ = ( - "Bunch", + "abstractclass", "attach_file", ) diff -Nru python-bioblend-0.18.0/CHANGELOG.md python-bioblend-1.0.0/CHANGELOG.md --- python-bioblend-0.18.0/CHANGELOG.md 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/CHANGELOG.md 2022-10-13 18:17:37.000000000 +0000 @@ -1,11 +1,64 @@ +### BioBlend v1.0.0 - 2022-10-13 + +* Drop support for deprecated CloudMan, see + https://galaxyproject.org/blog/2021-10-sunsetting-cloudlaunch/ + +* Added dependency on ``typing-extensions`` package, removed dependencies on + ``boto`` and ``pyyaml``. + +* Deprecated ``max_get_retries()``, ``set_max_get_retries()``, + ``get_retry_delay()`` and ``set_get_retry_delay()`` methods of ``Client``. + +* Moved ``max_get_attempts`` and ``get_retry_delay`` properties from + ``GalaxyInstance`` to ``GalaxyClient``, so they are also available in + ``ToolshedInstance``. + +* Added ``get_or_create_user_apikey()`` method to ``UserClient``. + +* Added ``all`` parameter to ``HistoryClient.get_histories()`` method (thanks to + [Paprikant](https://github.com/Paprikant)). + +* Added ``require_exact_tool_versions`` parameter to + ``WorkflowClient.invoke_workflow()`` method (thanks to + [cat-bro](https://github.com/cat-bro)). + +* Added ``name`` and ``owner`` parameters to + ``ToolShedRepositoryClient.get_repositories()``. + +* Remove unused methods from ``bioblend.config.Config``. If needed, use the + methods inherited from `configparser.ConfigParser` instead. + +* Allowed any 2XX HTTP response status code in ``Client._delete()`` to correctly + support history purging via Celery (thanks to + [Nolan Woods](https://github.com/innovate-invent)). + +* Fixed bug in ``FormsClient.create_form()`` where the ``form_xml_text`` + argument was not passed correctly to the Galaxy API. + +* Fixed bug in ``HistoryClient.show_dataset_provenance()`` where the ``follow`` + argument was not passed to the Galaxy API. + +* BioBlend.objects: Added ``delete()`` abstract method to ``DatasetContainer`` + class. + +* Added Code of Conduct for the project. + +* Finished the full type annotation of the library (thanks to + [cat-bro](https://github.com/cat-bro), + [Fabio Cumbo](https://github.com/cumbof), + [Jayadev Joshi](https://github.com/jaidevjoshi83), + [thepineapplepirate](https://github.com/thepineapplepirate)). + +* Improvements to tests and documentation. + ### BioBlend v0.18.0 - 2022-07-07 * Added support for Galaxy release 22.05. -* Add tus support to ``ToolClient.upload_file()`` (thanks to +* Added tus support to ``ToolClient.upload_file()`` (thanks to [Nate Coraor](https://github.com/natefoo)). -* Format Python code with black and isort. +* Formatted Python code with black and isort. * Improvements to type annotations, tests and documentation. @@ -81,9 +134,9 @@ * Pass the API key for all requests as the ``x-api-key`` header instead of as a parameter (thanks to [rikeshi](https://github.com/rikeshi)). -* Try prepending https:// and http:// if the scheme is missing in the ``url`` - parameter of ``GalaxyClient``, i.e. when initialising a Galaxy or ToolShed - instance. +* Try prepending "https://" and "http://" if the scheme is missing in the + ``url`` argument passed to ``GalaxyClient``, i.e. when initialising a Galaxy + or ToolShed instance. * Added a new ``dataset_collections`` attribute to ``GalaxyInstance`` objects, which is an instance of the new ``DatasetCollectionClient``. This new module @@ -297,15 +350,15 @@ * Added ``maxwait`` parameter to ``HistoryClient.export_history()`` and ``History.export()`` methods. -* Fixed handling of ``type`` parameter in ``HistoryClient.show_history()`` +* Fixed handling of ``type`` argument in ``HistoryClient.show_history()`` (thanks to [Marius van den Beek](https://github.com/mvdbeek)). -* Fixed handling of ``deleted`` parameter in ``LibraryClient.get_libraries()`` +* Fixed handling of ``deleted`` argument in ``LibraryClient.get_libraries()`` (thanks to [Luke Sargent](https://github.com/luke-c-sargent), reported by [Katie](https://github.com/emartchenko)). * Fixed ``LibraryClient.wait_for_dataset()`` when ``maxwait`` or ``interval`` - parameters are of type ``float``. + arguments are of type ``float``. * Unify JSON-encoding of non-file parameters of POST requests inside ``GalaxyClient.make_post_request()``. @@ -439,7 +492,7 @@ Galaxy API request (thanks to @DamCorreia). * Fixed ``HistoryClient.update_history()`` and ``History.update()`` methods - when ``name`` parameter is not specified. + when ``name`` argument is not specified. * Added warning if content size differs from content-length header in ``DatasetClient.download_dataset()``. @@ -527,7 +580,7 @@ * Updated CloudmanLauncher's ``launch`` method to accept ``subnet_id`` parameter, for VPC support (thanks to Matthew Ralston). -* Properly pass extra parameters to cloud instance userdata. +* Properly pass extra arguments to cloud instance userdata. * Updated placement finding methods and `get_clusters_pd` method to return a dict vs. lists so error messages can be included. diff -Nru python-bioblend-0.18.0/CODE_OF_CONDUCT.md python-bioblend-1.0.0/CODE_OF_CONDUCT.md --- python-bioblend-0.18.0/CODE_OF_CONDUCT.md 1970-01-01 00:00:00.000000000 +0000 +++ python-bioblend-1.0.0/CODE_OF_CONDUCT.md 2022-10-13 18:17:37.000000000 +0000 @@ -0,0 +1,8 @@ +Code of Conduct +=============== + +As part of the Galaxy Community, this project is committed to providing a +welcoming and harassment-free experience for everyone. We therefore expect +participants to abide by our Code of Conduct, which can be found at: + +https://galaxyproject.org/community/coc/ diff -Nru python-bioblend-0.18.0/debian/changelog python-bioblend-1.0.0/debian/changelog --- python-bioblend-0.18.0/debian/changelog 2022-08-25 20:19:08.000000000 +0000 +++ python-bioblend-1.0.0/debian/changelog 2022-11-23 14:17:58.000000000 +0000 @@ -1,3 +1,10 @@ +python-bioblend (1.0.0-1) unstable; urgency=medium + + * Team upload. + * New upstream version + + -- Lance Lin Wed, 23 Nov 2022 21:17:58 +0700 + python-bioblend (0.18.0-1) unstable; urgency=medium * Team upload. diff -Nru python-bioblend-0.18.0/docs/api_docs/cloudman/all.rst python-bioblend-1.0.0/docs/api_docs/cloudman/all.rst --- python-bioblend-0.18.0/docs/api_docs/cloudman/all.rst 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/docs/api_docs/cloudman/all.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -.. _cloudman-api-docs: - -=============================================== -API documentation for interacting with CloudMan -=============================================== - -CloudManLauncher ----------------- - -.. autoclass:: bioblend.cloudman.launch.CloudManLauncher - :members: - :undoc-members: - -.. _cloudman-instance-api: - -CloudManInstance ----------------- - -.. automodule:: bioblend.cloudman - :members: - :undoc-members: diff -Nru python-bioblend-0.18.0/docs/api_docs/cloudman/docs.rst python-bioblend-1.0.0/docs/api_docs/cloudman/docs.rst --- python-bioblend-0.18.0/docs/api_docs/cloudman/docs.rst 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/docs/api_docs/cloudman/docs.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,134 +0,0 @@ -=================== -Usage documentation -=================== - -This page describes some sample use cases for CloudMan API and provides -examples for these API calls. -In addition to this page, there are functional examples of complete scripts in -``docs/examples`` directory of the BioBlend source code repository. - -Setting up custom cloud properties -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -CloudMan supports Amazon, OpenStack, OpenNebula, and Eucalyptus based clouds and -BioBlend can be used to programatically manipulate CloudMan on any of those -clouds. Once launched, the API calls to CloudMan are the same irrespective of -the cloud. In order to launch an instance on a given cloud, cloud properties -need to be provided to ``CloudManLauncher``. If cloud properties are not specified, -``CloudManLauncher`` will default to Amazon cloud properties. - -If we want to use a different cloud provider, we need to specify additional cloud -properties when creating an instance of the ``CloudManLauncher`` class. For -example, if we wanted to create a connection to `NeCTAR`_, Australia's national -research cloud, we would use the following properties:: - - from bioblend.util import Bunch - nectar = Bunch( - name='NeCTAR', - cloud_type='openstack', - bucket_default='cloudman-os', - region_name='NeCTAR', - region_endpoint='nova.rc.nectar.org.au', - ec2_port=8773, - ec2_conn_path='/services/Cloud', - cidr_range='115.146.92.0/22', - is_secure=True, - s3_host='swift.rc.nectar.org.au', - s3_port=8888, - s3_conn_path='/') - -.. Note:: These properties are cloud-specific and need to be obtained from a - given cloud provider. - -Launching a new cluster instance -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -In order to launch a CloudMan cluster on a chosen cloud, we do the following -(continuing from the previous example):: - - from bioblend.cloudman import CloudManConfig - from bioblend.cloudman import CloudManInstance - cmc = CloudManConfig('', 'm1.medium', 'choose_a_password_here', nectar) - cmi = CloudManInstance.launch_instance(cmc) - -.. Note:: If you already have an existing instance of CloudMan, just create an - instance of the ``CloudManInstance`` object directly by calling its - constructor and connecting to it (the password you provide must match - the password you provided as part of user data when launching this - instance). For example:: - - cmi = CloudManInstance('http://115.146.92.174', 'your_UD_password') - -We now have a ``CloudManInstance`` object that allows us to manage created -CloudMan instance via the API. Once launched, it will take a few minutes for the -instance to boot and CloudMan start. To check on the status of the machine, -(repeatedly) run the following command:: - - >>> cmi.get_machine_status() - {'error': '', - 'instance_state': 'pending', - 'placement': '', - 'public_ip': ''} - >>> cmi.get_machine_status() - {'error': '', - 'instance_state': 'running', - 'placement': 'melbourne-qh2', - 'public_ip': '115.146.86.29'} - -Once the instance is ready, although it may still take a few moments for CloudMan -to start, it is possible to start interacting with the application. - -.. Note:: The ``CloudManInstance`` object (e.g., ``cmi``) is a local representation - of the actual CloudMan instance. As a result, the local object can get - out of sync with the remote instance. To update the state of the local - object, call the ``update`` method on the ``cmi`` object:: - - >>> cmi.update() - - -Manipulating an existing cluster -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Having a reference to a ``CloudManInstance`` object, we can manage it via the -available :ref:`cloudman-instance-api` API:: - - >>> cmi.initialized - False - >>> cmi.initialize('SGE') - >>> cmi.get_status() - {'all_fs': [], - 'app_status': 'yellow', - 'autoscaling': {'as_max': 'N/A', - 'as_min': 'N/A', - 'use_autoscaling': False}, - 'cluster_status': 'STARTING', - 'data_status': 'green', - 'disk_usage': {'pct': '0%', 'total': '0', 'used': '0'}, - 'dns': '#', - 'instance_status': {'available': '0', 'idle': '0', 'requested': '0'}, - 'snapshot': {'progress': 'None', 'status': 'None'}} - >>> cmi.get_cluster_size() - 1 - >>> cmi.get_nodes() - [{'id': 'i-00006016', - 'instance_type': 'm1.medium', - 'ld': '0.0 0.025 0.065', - 'public_ip': '115.146.86.29', - 'time_in_state': '2268'}] - >>> cmi.add_nodes(2) - {'all_fs': [], - 'app_status': 'green', - 'autoscaling': {'as_max': 'N/A', - 'as_min': 'N/A', - 'use_autoscaling': False}, - 'cluster_status': 'READY', - 'data_status': 'green', - 'disk_usage': {'pct': '0%', 'total': '0', 'used': '0'}, - 'dns': '#', - 'instance_status': {'available': '0', 'idle': '0', 'requested': '2'}, - 'snapshot': {'progress': 'None', 'status': 'None'}} - >>> cmi.get_cluster_size() - 3 - -.. _NeCTAR: http://www.nectar.org.au/research-cloud - diff -Nru python-bioblend-0.18.0/docs/examples/cloudman_basic_usage_scenario.py python-bioblend-1.0.0/docs/examples/cloudman_basic_usage_scenario.py --- python-bioblend-0.18.0/docs/examples/cloudman_basic_usage_scenario.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/docs/examples/cloudman_basic_usage_scenario.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,52 +0,0 @@ -from bioblend import cloudman - -url = "http://127.0.0.1:42284" -password = "password" -cm = cloudman.CloudManInstance(url, password) - -# Expects CloudMan to be running locally -# Set cluster type and storage size -cm.initialize(type="SGE") - -# Get cluster status -status = cm.get_status() - -# Get nodes -nodes = cm.get_nodes() -# There should be a master node - -# Add node -num_nodes = 1 -status = cm.add_nodes(num_nodes) - -# Remove nodes -status = cm.remove_nodes(num_nodes, force=True) - -instance_id = "abcdef" -cm.remove_node(instance_id, force=True) - -# Reboot instance -cm.reboot_node(instance_id) - -# Autoscaling: -# enable -cm.disable_autoscaling() -cm.enable_autoscaling(minimum_nodes=0, maximum_nodes=19) - -# autoscaling should be enabled now -is_enabled = cm.autoscaling_enabled() - -min_autoscaling = cm.get_status()["autoscaling"]["as_min"] -max_autoscaling = cm.get_status()["autoscaling"]["as_max"] - -# adjust -cm.adjust_autoscaling(minimum_nodes=5, maximum_nodes=10) - -# disable -cm.disable_autoscaling() - -# autoscaling should be disabled -cm.autoscaling_enabled() - -# Get Galaxy DNS/Host -galaxy_state = cm.get_galaxy_state() # RUNNING, STARTING..... diff -Nru python-bioblend-0.18.0/docs/examples/start_cloudman.py python-bioblend-1.0.0/docs/examples/start_cloudman.py --- python-bioblend-0.18.0/docs/examples/start_cloudman.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/docs/examples/start_cloudman.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,77 +0,0 @@ -""" -A quick way to start and initialize an instance of the CloudMan platform -directly from the command line. - -Usage:: - - python start_cloudman.py - -```` can be 'Galaxy', 'Data', or 'SGE' -(see https://galaxyproject.org/cloudman/ for more details on types) - -Example: -python start_cloudman.py "cluster bioblend" pwd SGE m1.small ami-00000032 -""" - -import sys - -from bioblend.cloudman import ( - CloudManConfig, - CloudManInstance, -) -from bioblend.util import Bunch - - -def start_cloudman(name, pwd, cm_type, inst_type, ami, ak, sk): - """ - Start an instance of CloudMan with the provided arguments. - - Returns a tuple: an instance of ``CloudManConfig`` pointing to the - settings used to launch this instance of CloudMan; and an instance - of ``CloudManInstance`` pointing to the given instance of CloudMan. - """ - cloud = None # If left as None, BioBlend will default to Amazon - # Define properties for the NeCTAR cloud - cloud = Bunch( - id="-1", - name="NeCTAR", - cloud_type="openstack", - bucket_default="cloudman-os", - region_name="melbourne", - region_endpoint="nova.rc.nectar.org.au", - ec2_port=8773, - ec2_conn_path="/services/Cloud", - cidr_range="115.146.92.0/22", - is_secure=True, - s3_host="swift.rc.nectar.org.au", - s3_port=8888, - s3_conn_path="/", - ) - # Create an instance of the CloudManConfig class and launch a CloudMan instance - cmc = CloudManConfig( - ak, - sk, - name, - ami, - inst_type, - pwd, - cloud_metadata=cloud, - cloudman_type=cm_type, - initial_storage_size=2, - placement="melbourne-np", - ) - print("Configured an instance; waiting to launch and boot...") - cmi = CloudManInstance.launch_instance(cmc) - print(f"Done! CloudMan IP is {cmi.cloudman_url}") - return cmc, cmi - - -if __name__ == "__main__": - if len(sys.argv) != 8: - print( - "\nUsage:\n" - "python start_cloudman.py " - " " - ) - sys.exit(1) - cml, cm = start_cloudman(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5], sys.argv[6], sys.argv[7]) diff -Nru python-bioblend-0.18.0/docs/index.rst python-bioblend-1.0.0/docs/index.rst --- python-bioblend-0.18.0/docs/index.rst 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/docs/index.rst 2022-10-13 18:17:37.000000000 +0000 @@ -94,18 +94,6 @@ api_docs/toolshed/* -CloudMan API -~~~~~~~~~~~~ - -API used to manipulate the instantiated infrastructure. For example, scale the -size of the compute cluster, get infrastructure status, get service status. - -.. toctree:: - :maxdepth: 2 - :glob: - - api_docs/cloudman/* - Configuration ============= BioBlend allows library-wide configuration to be set in external files. diff -Nru python-bioblend-0.18.0/README.rst python-bioblend-1.0.0/README.rst --- python-bioblend-0.18.0/README.rst 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/README.rst 2022-10-13 18:17:37.000000000 +0000 @@ -11,8 +11,7 @@ :target: https://gitter.im/galaxyproject/bioblend?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge -BioBlend is a Python library for interacting with `Galaxy`_ and `CloudMan`_ -APIs. +BioBlend is a Python library for interacting with the `Galaxy`_ API. BioBlend is supported and tested on: @@ -23,5 +22,4 @@ overview also available in `ABOUT.rst <./ABOUT.rst>`_. .. References/hyperlinks used above -.. _CloudMan: https://galaxyproject.org/cloudman/ .. _Galaxy: https://galaxyproject.org/ diff -Nru python-bioblend-0.18.0/setup.cfg python-bioblend-1.0.0/setup.cfg --- python-bioblend-0.18.0/setup.cfg 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/setup.cfg 2022-10-13 18:17:37.000000000 +0000 @@ -27,7 +27,7 @@ Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Topic :: Scientific/Engineering :: Bio-Informatics -description = Galaxy and CloudMan API library +description = Library for interacting with the Galaxy API license = MIT license_files = CITATION @@ -45,17 +45,30 @@ version = attr: bioblend.__version__ [mypy] -pretty = True +check_untyped_defs = True +disallow_incomplete_defs = True +disallow_subclassing_any = True +disallow_untyped_calls = True +disallow_untyped_decorators = True +disallow_untyped_defs = True ignore_missing_imports = True +no_implicit_optional = True +no_implicit_reexport = True +pretty = True show_error_codes = True +strict_equality = True +warn_redundant_casts = True +warn_unused_ignores = True +warn_unreachable = True +[mypy-bioblend._tests.*] +disallow_untyped_defs = False [options] install_requires = - boto>=2.9.7 - pyyaml requests>=2.20.0 requests-toolbelt>=0.5.1,!=0.9.0 tuspy + typing-extensions packages = find: python_requires = >=3.7 diff -Nru python-bioblend-0.18.0/tests/CloudmanTestBase.py python-bioblend-1.0.0/tests/CloudmanTestBase.py --- python-bioblend-0.18.0/tests/CloudmanTestBase.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/CloudmanTestBase.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,68 +0,0 @@ -""" -Tests the functionality of the Blend CloudMan API. These tests require working -credentials to supported cloud infrastructure. -""" -import contextlib -import os -import unittest - -from bioblend.util import Bunch -from . import test_util - - -class CloudmanTestBase(unittest.TestCase): - @classmethod - @test_util.skip_unless_cloudman() - def setUpClass(cls): - if os.environ.get("BIOBLEND_CLOUD_TYPE") == "EC2": - cls.access_key = os.environ["BIOBLEND_ACCESS_KEY"] - cls.secret_key = os.environ["BIOBLEND_SECRET_KEY"] - cls.cluster_name = "Blend CloudMan" - cls.ami_id = os.environ["BIOBLEND_AMI_ID"] - cls.instance_type = "m1.small" - cls.password = "password" - cls.cloud_metadata = Bunch( - id="1", # for compatibility w/ DB representation - name="Amazon", - cloud_type="ec2", - bucket_default="cloudman", - region_name="us-east-1", - region_endpoint="ec2.amazonaws.com", - ec2_port="", - ec2_conn_path="/", - cidr_range="", - is_secure=True, - s3_host="s3.amazonaws.com", - s3_port="", - s3_conn_path="/", - ) - else: - # Assume OpenStack/NeCTAR - cls.access_key = os.environ["BIOBLEND_ACCESS_KEY"] - cls.secret_key = os.environ["BIOBLEND_SECRET_KEY"] - cls.cloud_metadata = Bunch( - id="-1", - name="NeCTAR", - cloud_type="openstack", - bucket_default="cloudman-os", - region_name="melbourne", - region_endpoint="nova.rc.nectar.org.au", - ec2_port=8773, - ec2_conn_path="/services/Cloud", - cidr_range="115.146.92.0/22", - is_secure=True, - s3_host="swift.rc.nectar.org.au", - s3_port=8888, - s3_conn_path="/", - ) - cls.cluster_name = "Blend CloudMan" - cls.ami_id = os.environ["BIOBLEND_AMI_ID"] - cls.instance_type = "m1.small" - cls.password = "password" - - @classmethod - @test_util.skip_unless_cloudman() - def tearDownClass(cls): - with contextlib.suppress(Exception): - # TODO: cloudman's terminate method has a bug. Needs fix - cls.cmi.terminate(delete_cluster=True) diff -Nru python-bioblend-0.18.0/tests/GalaxyTestBase.py python-bioblend-1.0.0/tests/GalaxyTestBase.py --- python-bioblend-0.18.0/tests/GalaxyTestBase.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/GalaxyTestBase.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,8 +1,14 @@ import os import unittest +from typing import ( + Any, + Dict, +) + +from typing_extensions import Literal import bioblend -import bioblend.galaxy +from bioblend.galaxy import GalaxyInstance from . import test_util bioblend.set_stream_logger("test", level="INFO") @@ -12,15 +18,36 @@ @test_util.skip_unless_galaxy() class GalaxyTestBase(unittest.TestCase): - def setUp(self): + gi: GalaxyInstance + + @classmethod + def setUpClass(cls): galaxy_key = os.environ["BIOBLEND_GALAXY_API_KEY"] galaxy_url = os.environ["BIOBLEND_GALAXY_URL"] - self.gi = bioblend.galaxy.GalaxyInstance(url=galaxy_url, key=galaxy_key) + cls.gi = GalaxyInstance(url=galaxy_url, key=galaxy_key) - def _test_dataset(self, history_id, contents="1\t2\t3", **kwds): - tool_output = self.gi.tools.paste_content(contents, history_id, **kwds) + def _test_dataset(self, history_id: str, contents: str = "1\t2\t3", **kwargs: Any) -> str: + tool_output = self.gi.tools.paste_content(contents, history_id, **kwargs) return tool_output["outputs"][0]["id"] - def _wait_and_verify_dataset(self, dataset_id, expected_contents, timeout_seconds=BIOBLEND_TEST_JOB_TIMEOUT): + def _wait_and_verify_dataset( + self, dataset_id: str, expected_contents: bytes, timeout_seconds: float = BIOBLEND_TEST_JOB_TIMEOUT + ) -> None: dataset_contents = self.gi.datasets.download_dataset(dataset_id, maxwait=timeout_seconds) - self.assertEqual(dataset_contents, expected_contents) + assert dataset_contents == expected_contents + + def _run_random_lines1( + self, history_id: str, dataset_id: str, input_format: Literal["21.01", "legacy"] = "legacy" + ) -> Dict[str, Any]: + tool_inputs = { + "num_lines": "1", + "input": {"src": "hda", "id": dataset_id}, + } + if input_format == "21.01": + tool_inputs.update({"seed_source": {"seed_source_selector": "set_seed", "seed": "asdf"}}) + else: + # legacy format + tool_inputs.update({"seed_source|seed_source_selector": "set_seed", "seed_source|seed": "asdf"}) + return self.gi.tools.run_tool( + history_id=history_id, tool_id="random_lines1", tool_inputs=tool_inputs, input_format=input_format + ) diff -Nru python-bioblend-0.18.0/tests/pytest_galaxy_test_wrapper.py python-bioblend-1.0.0/tests/pytest_galaxy_test_wrapper.py --- python-bioblend-0.18.0/tests/pytest_galaxy_test_wrapper.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/pytest_galaxy_test_wrapper.py 2022-10-13 18:17:37.000000000 +0000 @@ -6,11 +6,16 @@ """ import os import sys +from typing import ( + List, + NoReturn, + Optional, +) try: import pytest except ImportError: - pytest = None # type: ignore + pytest = None DIRECTORY = os.path.abspath(os.path.dirname(__file__)) BIOBLEND_TEST_SUITE = os.environ.get("BIOBLEND_TEST_SUITE", "full") @@ -24,7 +29,7 @@ ] -def main(args=None): +def main(args: Optional[List[str]] = None) -> NoReturn: """Entry point that delegates to pytest.main.""" if pytest is None: raise Exception("pytest is required to use this script.") diff -Nru python-bioblend-0.18.0/tests/README.TXT python-bioblend-1.0.0/tests/README.TXT --- python-bioblend-0.18.0/tests/README.TXT 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/README.TXT 2022-10-13 18:17:37.000000000 +0000 @@ -1,11 +1,3 @@ -To run the cloud tests, the following environment variables must be set: - -BIOBLEND_ACCESS_KEY = -BIOBLEND_SECRET_KEY = -BIOBLEND_CLOUD_TYPE = -BIOBLEND_AMI_ID = - - To run Galaxy tests, the following environment variables must be set: BIOBLEND_GALAXY_API_KEY = diff -Nru python-bioblend-0.18.0/tests/TestCloudmanLaunch.py python-bioblend-1.0.0/tests/TestCloudmanLaunch.py --- python-bioblend-0.18.0/tests/TestCloudmanLaunch.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestCloudmanLaunch.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,71 +0,0 @@ -""" -Tests the functionality of the Blend CloudMan API. These tests require working -credentials to supported cloud infrastructure. -""" -import contextlib - -from bioblend.cloudman import ( - CloudManConfig, - CloudManInstance, -) -from . import ( - CloudmanTestBase, - test_util, -) - - -@test_util.skip_unless_cloudman() -class TestCloudmanLaunch(CloudmanTestBase.CloudmanTestBase): - def setUp(self): - super().setUp() - - def test_validate_valid_config(self): - """ - Tests whether a valid config is validated properly. - """ - # cfg = CloudManConfig(self.access_key, self.secret_key, self.cluster_name, self.ami_id, self.instance_type, self.password, cloud_metadata=self.cloud_metadata) - cls = TestCloudmanLaunch - cfg = CloudManConfig( - cls.access_key, - cls.secret_key, - cls.cluster_name, - cls.ami_id, - cls.instance_type, - cls.password, - cloud_metadata=cls.cloud_metadata, - ) - result = cfg.validate() - self.assertIsNone(result, "Validation did not return null to indicate success!") - - def test_validate_invalid_config(self): - """ - Tests whether an invalid config is validated properly. - """ - cfg = CloudManConfig() - result = cfg.validate() - self.assertIsNotNone(result, "Validation should have returned a value since the configuration was invalid!") - - def test_launch_and_terminate(self): - cls = TestCloudmanLaunch - cfg = CloudManConfig( - cls.access_key, - cls.secret_key, - cls.cluster_name, - cls.ami_id, - cls.instance_type, - cls.password, - cloud_metadata=cls.cloud_metadata, - ) - cmi = CloudManInstance.launch_instance(cfg) - status = cmi.get_status() - self.assertNotEqual( - status["cluster_status"], - "ERROR", - "instance.get_status() returned ERROR. Should return a successful status!", - ) - with contextlib.suppress(Exception): - # TODO: The terminate method is unpredictable! Needs fix. - result = cmi.terminate(delete_cluster=True) - self.assertEqual( - result["cluster_status"], "SHUTDOWN", "Cluster should be in status SHUTDOWN after call to terminate!" - ) diff -Nru python-bioblend-0.18.0/tests/TestCloudmanMock.py python-bioblend-1.0.0/tests/TestCloudmanMock.py --- python-bioblend-0.18.0/tests/TestCloudmanMock.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestCloudmanMock.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,136 +0,0 @@ -""" -Tests the functionality of the BioBlend CloudMan API, without actually making -calls to a remote CloudMan instance/server. These don't actually ensure -that BioBlend is integrated with CloudMan correctly. They only ensure that -if you refactor the BioBlend CloudMan API code, that it will maintain its -current behaviour. -""" -import unittest -from unittest.mock import MagicMock - -from bioblend import cloudman - - -class TestCloudmanMock(unittest.TestCase): - def setUp(self): - url = "http://127.0.0.1:42284" - password = "password" - self.cm = cloudman.CloudManInstance(url, password) - - # def test_initialize(self): - # self.cm._make_get_request = MagicMock(return_value="{}") - # - # ## Set cluster type - # self.cm.initialize(type="Galaxy") - # - # params = {'startup_opt': 'Galaxy'} - # self.cm._make_get_request.assert_called_with("initialize_cluster", parameters=params) - - def test_get_status(self): - # Set return value of call - self.cm._make_get_request = MagicMock(return_value={}) - - status = self.cm.get_status() - self.assertNotEqual(status, None) - self.assertEqual(status, {}) - - # Check that the correct URL was called - self.cm._make_get_request.assert_called_with("instance_state_json") - - def test_get_nodes(self): - # Set return value of call - self.cm._make_get_request = MagicMock(return_value={"instances": []}) - - nodes = self.cm.get_nodes() - self.assertIsNotNone(nodes) - self.assertEqual(len(nodes), 0) - - # Check that the correct URL was called - self.cm._make_get_request.assert_called_with("instance_feed_json") - - def test_add_nodes(self): - self.cm._make_get_request = MagicMock(return_value="{}") - num_nodes = 10 - status = self.cm.add_nodes(num_nodes) - self.assertIsNotNone(status) - - # Check that the correct URL was called - params = {"number_nodes": 10, "instance_type": "", "spot_price": ""} - self.cm._make_get_request.assert_called_with("add_instances", parameters=params) - - def test_remove_nodes(self): - self.cm._make_get_request = MagicMock(return_value="{}") - num_nodes = 10 - status = self.cm.remove_nodes(num_nodes, force=True) - self.assertIsNotNone(status) - - # Check that the correct URL was called - params = {"number_nodes": 10, "force_termination": True} - self.cm._make_get_request.assert_called_with("remove_instances", parameters=params) - - def test_remove_node(self): - self.cm._make_get_request = MagicMock(return_value="{}") - instance_id = "abcdef" - self.cm.remove_node(instance_id, force=True) - - # Check that the correct URL was called - params = {"instance_id": "abcdef"} - self.cm._make_get_request.assert_called_with("remove_instance", parameters=params) - - def test_reboot_node(self): - self.cm._make_get_request = MagicMock(return_value="{}") - instance_id = "abcdef" - self.cm.reboot_node(instance_id) - - # Check that the correct URL was called - params = {"instance_id": "abcdef"} - self.cm._make_get_request.assert_called_with("reboot_instance", parameters=params) - - def test_autoscaling_enabled_true(self): - return_json_string = {"autoscaling": {"use_autoscaling": True, "as_max": "3", "as_min": "1"}} - self.cm._make_get_request = MagicMock(return_value=return_json_string) - self.assertTrue(self.cm.autoscaling_enabled()) - - def test_autoscaling_enabled_false(self): - return_json_string = {"autoscaling": {"use_autoscaling": False, "as_max": "3", "as_min": "1"}} - self.cm._make_get_request = MagicMock(return_value=return_json_string) - self.assertFalse(self.cm.autoscaling_enabled()) - - def test_enable_autoscaling(self): - return_json_string = {"autoscaling": {"use_autoscaling": False, "as_max": "N/A", "as_min": "N/A"}} - self.cm._make_get_request = MagicMock(return_value=return_json_string) - self.assertFalse(self.cm.autoscaling_enabled()) - self.cm.enable_autoscaling(minimum_nodes=0, maximum_nodes=19) - - # Check that the correct URL was called - params = {"as_min": 0, "as_max": 19} - self.cm._make_get_request.assert_called_with("toggle_autoscaling", parameters=params) - - return_json_string = {"autoscaling": {"use_autoscaling": True, "as_max": "19", "as_min": "0"}} - self.cm.enable_autoscaling(minimum_nodes=0, maximum_nodes=19) - - # Check that the correct URL was called - params = {"as_min": 0, "as_max": 19} - self.cm._make_get_request.assert_called_with("toggle_autoscaling", parameters=params) - - def test_disable_autoscaling(self): - return_json_string = {"autoscaling": {"use_autoscaling": True, "as_max": "3", "as_min": "1"}} - self.cm._make_get_request = MagicMock(return_value=return_json_string) - self.cm.disable_autoscaling() - - self.cm._make_get_request.assert_called_with("toggle_autoscaling") - - def test_adjust_autoscaling(self): - return_json_string = {"autoscaling": {"use_autoscaling": True, "as_max": "3", "as_min": "1"}} - self.cm._make_get_request = MagicMock(return_value=return_json_string) - self.cm.adjust_autoscaling(minimum_nodes=3, maximum_nodes=4) - params = {"as_min_adj": 3, "as_max_adj": 4} - self.cm._make_get_request.assert_called_with("adjust_autoscaling", parameters=params) - - def test_get_galaxy_state_stopped(self): - return_json = {"status": "'Galaxy' is not running", "srvc": "Galaxy"} - self.cm._make_get_request = MagicMock(return_value=return_json) - - self.assertEqual(self.cm.get_galaxy_state()["status"], "'Galaxy' is not running") - params = {"srvc": "Galaxy"} - self.cm._make_get_request.assert_called_with("get_srvc_status", parameters=params) diff -Nru python-bioblend-0.18.0/tests/TestCloudmanServices.py python-bioblend-1.0.0/tests/TestCloudmanServices.py --- python-bioblend-0.18.0/tests/TestCloudmanServices.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestCloudmanServices.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,69 +0,0 @@ -""" -Tests the functionality of the Blend CloudMan API. These tests require working -credentials to supported cloud infrastructure. -""" -from bioblend.cloudman import ( - CloudManConfig, - CloudManInstance, -) -from . import ( - CloudmanTestBase, - test_util, -) - - -@test_util.skip_unless_cloudman() -class TestCloudmanServices(CloudmanTestBase.CloudmanTestBase): - @classmethod - def setUpClass(cls): - super().setUpClass() - cls.cfg = CloudManConfig( - cls.access_key, - cls.secret_key, - cls.cluster_name, - cls.ami_id, - cls.instance_type, - cls.password, - cloud_metadata=cls.cloud_metadata, - ) - cls.cmi = CloudManInstance.launch_instance(cls.cfg) - - def setUp(self): - self.cmi = self.__class__.cmi - - def test_get_status(self): - status = self.cmi.get_status() - self.assertIsNotNone(status) - - def test_get_nodes(self): - nodes = self.cmi.get_nodes() - self.assertIsNotNone(nodes) - - def test_add_nodes(self): - num_nodes = 1 - status = self.cmi.add_nodes(num_nodes) - self.assertIsNotNone(status) - - def test_reboot_node(self): - instance_id = self.cmi.instance_id - self.cmi.reboot_node(instance_id) - - def test_remove_node(self): - instance_id = self.cmi.instance_id - self.cmi.remove_node(instance_id, force=True) - - def test_enable_autoscaling(self): - self.assertFalse(self.cmi.autoscaling_enabled()) - self.cmi.enable_autoscaling(minimum_nodes=0, maximum_nodes=19) - self.assertTrue(self.cmi.autoscaling_enabled()) - - def test_disable_autoscaling(self): - self.cmi.disable_autoscaling() - self.assertFalse(self.cmi.autoscaling_enabled()) - - def test_adjust_autoscaling(self): - self.cmi.adjust_autoscaling(minimum_nodes=3, maximum_nodes=4) - - -# def test_get_galaxy_state_stopped(self): -# self.assertEqual(self.cmi.get_galaxy_state(), "'Galaxy' is not running") diff -Nru python-bioblend-0.18.0/tests/TestGalaxyConfig.py python-bioblend-1.0.0/tests/TestGalaxyConfig.py --- python-bioblend-0.18.0/tests/TestGalaxyConfig.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyConfig.py 2022-10-13 18:17:37.000000000 +0000 @@ -4,15 +4,15 @@ class TestGalaxyConfig(GalaxyTestBase.GalaxyTestBase): def test_get_config(self): response = self.gi.config.get_config() - self.assertTrue(isinstance(response, dict)) - self.assertTrue("brand" in response.keys()) + assert isinstance(response, dict) + assert "brand" in response.keys() def test_get_version(self): response = self.gi.config.get_version() - self.assertTrue(isinstance(response, dict)) - self.assertTrue("version_major" in response.keys()) + assert isinstance(response, dict) + assert "version_major" in response.keys() def test_whoami(self): response = self.gi.config.whoami() - self.assertTrue(isinstance(response, dict)) - self.assertTrue("username" in response.keys()) + assert isinstance(response, dict) + assert "username" in response.keys() diff -Nru python-bioblend-0.18.0/tests/TestGalaxyDatasetCollections.py python-bioblend-1.0.0/tests/TestGalaxyDatasetCollections.py --- python-bioblend-0.18.0/tests/TestGalaxyDatasetCollections.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyDatasetCollections.py 2022-10-13 18:17:37.000000000 +0000 @@ -2,6 +2,11 @@ import tarfile import tempfile from inspect import signature +from typing import ( + Any, + Dict, + Union, +) from zipfile import ZipFile from bioblend.galaxy import dataset_collections @@ -28,15 +33,15 @@ ], ), ) - self.assertEqual(collection_response["name"], "MyDatasetList") - self.assertEqual(collection_response["collection_type"], "list") + assert collection_response["name"] == "MyDatasetList" + assert collection_response["collection_type"] == "list" elements = collection_response["elements"] - self.assertEqual(len(elements), 3) - self.assertEqual(elements[0]["element_index"], 0) - self.assertEqual(elements[0]["object"]["id"], dataset1_id) - self.assertEqual(elements[1]["object"]["id"], dataset2_id) - self.assertEqual(elements[2]["object"]["id"], dataset3_id) - self.assertEqual(elements[2]["element_identifier"], "sample3") + assert len(elements) == 3 + assert elements[0]["element_index"] == 0 + assert elements[0]["object"]["id"] == dataset1_id + assert elements[1]["object"]["id"] == dataset2_id + assert elements[2]["object"]["id"] == dataset3_id + assert elements[2]["element_identifier"] == "sample3" def test_create_list_of_paired_datasets_in_history(self): history_id = self.gi.histories.create_history(name="TestDSListCreate")["id"] @@ -69,62 +74,62 @@ ], ), ) - self.assertEqual(collection_response["name"], "MyListOfPairedDatasets") - self.assertEqual(collection_response["collection_type"], "list:paired") + assert collection_response["name"] == "MyListOfPairedDatasets" + assert collection_response["collection_type"] == "list:paired" elements = collection_response["elements"] - self.assertEqual(len(elements), 2) - self.assertEqual(elements[0]["element_index"], 0) + assert len(elements) == 2 + assert elements[0]["element_index"] == 0 created_pair1 = elements[0]["object"] - self.assertEqual(created_pair1["collection_type"], "paired") - self.assertEqual(len(created_pair1["elements"]), 2) + assert created_pair1["collection_type"] == "paired" + assert len(created_pair1["elements"]) == 2 forward_element1 = created_pair1["elements"][0] - self.assertEqual(forward_element1["element_identifier"], "forward") - self.assertEqual(forward_element1["element_index"], 0) + assert forward_element1["element_identifier"] == "forward" + assert forward_element1["element_index"] == 0 forward_dataset1 = forward_element1["object"] - self.assertEqual(forward_dataset1["id"], dataset1_id) + assert forward_dataset1["id"] == dataset1_id - self.assertEqual(elements[1]["element_index"], 1) + assert elements[1]["element_index"] == 1 created_pair2 = elements[1]["object"] - self.assertEqual(created_pair2["collection_type"], "paired") - self.assertEqual(len(created_pair2["elements"]), 2) + assert created_pair2["collection_type"] == "paired" + assert len(created_pair2["elements"]) == 2 reverse_element2 = created_pair2["elements"][1] reverse_dataset2 = reverse_element2["object"] - self.assertEqual(reverse_element2["element_identifier"], "reverse") - self.assertEqual(reverse_element2["element_index"], 1) - self.assertEqual(reverse_dataset2["id"], dataset4_id) + assert reverse_element2["element_identifier"] == "reverse" + assert reverse_element2["element_index"] == 1 + assert reverse_dataset2["id"] == dataset4_id def test_collections_in_history_index(self): history_id = self.gi.histories.create_history(name="TestHistoryDSIndex")["id"] history_dataset_collection = self._create_pair_in_history(history_id) contents = self.gi.histories.show_history(history_id, contents=True) - self.assertEqual(len(contents), 3) - self.assertEqual(contents[2]["id"], history_dataset_collection["id"]) - self.assertEqual(contents[2]["name"], "MyTestPair") - self.assertEqual(contents[2]["collection_type"], "paired") + assert len(contents) == 3 + assert contents[2]["id"] == history_dataset_collection["id"] + assert contents[2]["name"] == "MyTestPair" + assert contents[2]["collection_type"] == "paired" def test_show_history_dataset_collection(self): history_id = self.gi.histories.create_history(name="TestHistoryDSIndexShow")["id"] history_dataset_collection = self._create_pair_in_history(history_id) show_response = self.gi.histories.show_dataset_collection(history_id, history_dataset_collection["id"]) for key in ["collection_type", "elements", "name", "deleted", "visible"]: - self.assertIn(key, show_response) - self.assertFalse(show_response["deleted"]) - self.assertTrue(show_response["visible"]) + assert key in show_response + assert not show_response["deleted"] + assert show_response["visible"] def test_delete_history_dataset_collection(self): history_id = self.gi.histories.create_history(name="TestHistoryDSDelete")["id"] history_dataset_collection = self._create_pair_in_history(history_id) self.gi.histories.delete_dataset_collection(history_id, history_dataset_collection["id"]) show_response = self.gi.histories.show_dataset_collection(history_id, history_dataset_collection["id"]) - self.assertTrue(show_response["deleted"]) + assert show_response["deleted"] def test_update_history_dataset_collection(self): history_id = self.gi.histories.create_history(name="TestHistoryDSDelete")["id"] history_dataset_collection = self._create_pair_in_history(history_id) self.gi.histories.update_dataset_collection(history_id, history_dataset_collection["id"], visible=False) show_response = self.gi.histories.show_dataset_collection(history_id, history_dataset_collection["id"]) - self.assertFalse(show_response["visible"]) + assert not show_response["visible"] def test_show_dataset_collection(self): history_id = self.gi.histories.create_history(name="TestDatasetCollectionShow")["id"] @@ -141,12 +146,12 @@ "url", "visible", ): - self.assertEqual(dataset_collection1[key], dataset_collection2[key]) + assert dataset_collection1[key] == dataset_collection2[key] for element1, element2 in zip(dataset_collection1["elements"], dataset_collection2["elements"]): - self.assertEqual(element1["id"], element2["id"]) - self.assertEqual(element1.keys(), element2.keys()) + assert element1["id"] == element2["id"] + assert element1.keys() == element2.keys() for key in element1["object"].keys(): - self.assertIn(key, element2["object"].keys()) + assert key in element2["object"].keys() @test_util.skip_unless_galaxy("release_18.01") def test_download_dataset_collection(self): @@ -164,7 +169,7 @@ os.mkdir(extract_dir_path) if archive_type == "zip": - archive = ZipFile(archive_path) + archive: Union[ZipFile, tarfile.TarFile] = ZipFile(archive_path) elif archive_type == "tgz": archive = tarfile.open(archive_path) @@ -173,7 +178,7 @@ dataset_dir_path = os.path.join(extract_dir_path, fname) file_path = os.path.join(dataset_dir_path, os.listdir(dataset_dir_path)[0]) with open(file_path) as f: - self.assertEqual(expected_contents, f.read()) + assert expected_contents == f.read() archive.close() def test_wait_for_dataset_collection(self): @@ -181,9 +186,9 @@ dataset_collection_id = self._create_pair_in_history(history_id)["id"] dataset_collection = self.gi.dataset_collections.wait_for_dataset_collection(dataset_collection_id) for element in dataset_collection["elements"]: - self.assertEqual(element["object"]["state"], "ok") + assert element["object"]["state"] == "ok" - def _create_pair_in_history(self, history_id): + def _create_pair_in_history(self, history_id: str) -> Dict[str, Any]: dataset1_id = self._test_dataset(history_id) dataset2_id = self._test_dataset(history_id) collection_response = self.gi.histories.create_dataset_collection( diff -Nru python-bioblend-0.18.0/tests/TestGalaxyDatasets.py python-bioblend-1.0.0/tests/TestGalaxyDatasets.py --- python-bioblend-0.18.0/tests/TestGalaxyDatasets.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyDatasets.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,6 +1,8 @@ import shutil import tempfile +import pytest + from bioblend import ( ConnectionError, galaxy, @@ -24,16 +26,15 @@ @test_util.skip_unless_galaxy("release_19.05") def test_show_nonexistent_dataset(self): - with self.assertRaises(ConnectionError): + with pytest.raises(ConnectionError): self.gi.datasets.show_dataset("nonexistent_id") def test_show_dataset(self): self.gi.datasets.show_dataset(self.dataset_id) def test_download_dataset(self): - with self.assertRaises(Exception) as ctx: - self.gi.datasets.download_dataset(None) - self.assertIsInstance(ctx.exception, (TypeError, ConnectionError)) + with pytest.raises((TypeError, ConnectionError)): + self.gi.datasets.download_dataset(None) # type: ignore[call-overload] expected_contents = ("\n".join(self.dataset_contents.splitlines()) + "\n").encode() # download_dataset() with file_path=None is already tested in TestGalaxyTools.test_paste_content() # self._wait_and_verify_dataset(self.dataset_id, expected_contents) @@ -42,9 +43,9 @@ downloaded_dataset = self.gi.datasets.download_dataset( self.dataset_id, file_path=tempdir, maxwait=GalaxyTestBase.BIOBLEND_TEST_JOB_TIMEOUT * 2 ) - self.assertTrue(downloaded_dataset.startswith(tempdir)) + assert downloaded_dataset.startswith(tempdir) with open(downloaded_dataset, "rb") as f: - self.assertEqual(f.read(), expected_contents) + assert f.read() == expected_contents finally: shutil.rmtree(tempdir) with tempfile.NamedTemporaryFile(prefix="bioblend_test_") as f: @@ -54,119 +55,123 @@ use_default_filename=False, maxwait=GalaxyTestBase.BIOBLEND_TEST_JOB_TIMEOUT, ) - self.assertEqual(download_filename, f.name) + assert download_filename == f.name f.flush() - self.assertEqual(f.read(), expected_contents) + assert f.read() == expected_contents @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets(self): datasets = self.gi.datasets.get_datasets() dataset_ids = [dataset["id"] for dataset in datasets] - self.assertIn(self.dataset_id, dataset_ids) + assert self.dataset_id in dataset_ids @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_history(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_limit_offset(self): datasets = self.gi.datasets.get_datasets(limit=1) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, offset=1) - self.assertEqual(datasets, []) + assert datasets == [] @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_name(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id, name="Pasted Entry") - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, name="Wrong Name") - self.assertEqual(datasets, []) + assert datasets == [] @test_util.skip_unless_galaxy("release_20.05") def test_get_datasets_time(self): dataset = self.gi.datasets.show_dataset(self.dataset_id) ct = dataset["create_time"] datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_min=ct) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_max=ct) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_min="2100-01-01T00:00:00") - self.assertEqual(datasets, []) + assert datasets == [] datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_max="2000-01-01T00:00:00") - self.assertEqual(datasets, []) + assert datasets == [] ut = dataset["update_time"] datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_min=ut) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_max=ut) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_min="2100-01-01T00:00:00") - self.assertEqual(datasets, []) + assert datasets == [] datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_max="2000-01-01T00:00:00") - self.assertEqual(datasets, []) + assert datasets == [] @test_util.skip_unless_galaxy("release_20.05") def test_get_datasets_extension(self): - datasets = self.gi.datasets.get_datasets(history_id=self.history_id) datasets = self.gi.datasets.get_datasets(history_id=self.history_id, extension="txt") - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, extension="bam") - self.assertEqual(datasets, []) + assert datasets == [] + + @test_util.skip_unless_galaxy("release_22.01") + def test_get_datasets_extension_list(self): + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, extension=["bam", "txt"]) + assert len(datasets) == 1 @test_util.skip_unless_galaxy("release_20.05") def test_get_datasets_state(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state="ok") - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state="queued") - self.assertEqual(datasets, []) - with self.assertRaises(ConnectionError): + assert datasets == [] + with pytest.raises(ConnectionError): self.gi.datasets.get_datasets(history_id=self.history_id, state="nonexistent_state") datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state=["ok", "queued"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 @test_util.skip_unless_galaxy("release_20.05") def test_get_datasets_visible(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id, visible=True) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, visible=False) - self.assertEqual(len(datasets), 0) + assert len(datasets) == 0 @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_ordering(self): self.dataset_id2 = self._test_dataset(self.history_id, contents=self.dataset_contents) self.gi.datasets.wait_for_dataset(self.dataset_id2) datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="create_time-dsc") - self.assertEqual(datasets[0]["id"], self.dataset_id2) + assert datasets[0]["id"] == self.dataset_id2 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="create_time-asc") - self.assertEqual(datasets[0]["id"], self.dataset_id) + assert datasets[0]["id"] == self.dataset_id datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="hid-dsc") - self.assertEqual(datasets[0]["id"], self.dataset_id2) + assert datasets[0]["id"] == self.dataset_id2 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="hid-asc") - self.assertEqual(datasets[0]["id"], self.dataset_id) + assert datasets[0]["id"] == self.dataset_id @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_deleted(self): deleted_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, deleted=True) - self.assertEqual(deleted_datasets, []) + assert deleted_datasets == [] self.gi.histories.delete_dataset(self.history_id, self.dataset_id) deleted_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, deleted=True) - self.assertEqual(len(deleted_datasets), 1) + assert len(deleted_datasets) == 1 purged_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, purged=True) - self.assertEqual(purged_datasets, []) + assert purged_datasets == [] self.gi.histories.delete_dataset(self.history_id, self.dataset_id, purge=True) purged_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, purged=True) - self.assertEqual(len(purged_datasets), 1) + assert len(purged_datasets) == 1 @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_tool_id_and_tag(self): cat1_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tool_id="cat1") - self.assertEqual(cat1_datasets, []) + assert cat1_datasets == [] upload1_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tool_id="upload1") - self.assertEqual(len(upload1_datasets), 1) + assert len(upload1_datasets) == 1 self.gi.histories.update_dataset(self.history_id, self.dataset_id, tags=["test"]) tagged_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tag="test") - self.assertEqual(len(tagged_datasets), 1) + assert len(tagged_datasets) == 1 def test_wait_for_dataset(self): history_id = self.gi.histories.create_history(name="TestWaitForDataset")["id"] @@ -174,7 +179,7 @@ dataset_id = self._test_dataset(history_id, contents=dataset_contents) dataset = self.gi.datasets.wait_for_dataset(dataset_id) - self.assertEqual(dataset["state"], "ok") + assert dataset["state"] == "ok" self.gi.histories.delete_history(history_id, purge=True) @@ -188,17 +193,17 @@ sharing_role = self.gi.roles.create_role("sharing_role", "sharing_role", [user_id, admin_user_id])["id"] self.gi.datasets.publish_dataset(self.dataset_id, published=False) - with self.assertRaises(ConnectionError): + with pytest.raises(ConnectionError): anonymous_gi.datasets.show_dataset(self.dataset_id) self.gi.datasets.publish_dataset(self.dataset_id, published=True) # now dataset is public, i.e. accessible to anonymous users - self.assertEqual(anonymous_gi.datasets.show_dataset(self.dataset_id)["id"], self.dataset_id) + assert anonymous_gi.datasets.show_dataset(self.dataset_id)["id"] == self.dataset_id self.gi.datasets.publish_dataset(self.dataset_id, published=False) - with self.assertRaises(ConnectionError): + with pytest.raises(ConnectionError): user_gi.datasets.show_dataset(self.dataset_id) self.gi.datasets.update_permissions(self.dataset_id, access_ids=[sharing_role], manage_ids=[sharing_role]) - self.assertEqual(user_gi.datasets.show_dataset(self.dataset_id)["id"], self.dataset_id) + assert user_gi.datasets.show_dataset(self.dataset_id)["id"] == self.dataset_id # anonymous access now fails because sharing is only with the shared user role - with self.assertRaises(ConnectionError): + with pytest.raises(ConnectionError): anonymous_gi.datasets.show_dataset(self.dataset_id) diff -Nru python-bioblend-0.18.0/tests/TestGalaxyFolders.py python-bioblend-1.0.0/tests/TestGalaxyFolders.py --- python-bioblend-0.18.0/tests/TestGalaxyFolders.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyFolders.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,3 +1,8 @@ +from typing import ( + Dict, + List, +) + from . import GalaxyTestBase FOO_DATA = "foo\nbar\n" @@ -18,18 +23,18 @@ self.gi.libraries.delete_library(self.library["id"]) def test_create_folder(self): - self.assertEqual(self.folder["name"], self.name) - self.assertEqual(self.folder["description"], "automatically created folder") + assert self.folder["name"] == self.name + assert self.folder["description"] == "automatically created folder" def test_show_folder(self): f2 = self.gi.folders.show_folder(self.folder["id"]) - self.assertEqual(f2["id"], self.folder["id"]) + assert f2["id"] == self.folder["id"] def test_show_folder_contents(self): f2 = self.gi.folders.show_folder(self.folder["id"], contents=True) - self.assertIn("folder_contents", f2) - self.assertIn("metadata", f2) - self.assertEqual(self.name, f2["metadata"]["folder_name"]) + assert "folder_contents" in f2 + assert "metadata" in f2 + assert self.name == f2["metadata"]["folder_name"] def test_delete_folder(self): self.sub_folder = self.gi.folders.create_folder(self.folder["id"], self.name) @@ -37,22 +42,22 @@ def test_update_folder(self): self.folder = self.gi.folders.update_folder(self.folder["id"], "new-name", "new-description") - self.assertEqual(self.folder["name"], "new-name") - self.assertEqual(self.folder["description"], "new-description") + assert self.folder["name"] == "new-name" + assert self.folder["description"] == "new-description" def test_get_set_permissions(self): - empty_permission = { + empty_permission: Dict[str, List] = { "add_library_item_role_list": [], "modify_folder_role_list": [], "manage_folder_role_list": [], } # They should be empty to start with - self.assertEqual(self.gi.folders.get_permissions(self.folder["id"], scope="current"), empty_permission) - self.assertEqual(self.gi.folders.get_permissions(self.folder["id"], scope="available"), empty_permission) + assert self.gi.folders.get_permissions(self.folder["id"], scope="current") == empty_permission + assert self.gi.folders.get_permissions(self.folder["id"], scope="available") == empty_permission # Then we'll add a role role = self.gi.roles.get_roles()[0] self.gi.folders.set_permissions(self.folder["id"], add_ids=[role["id"]]) - self.assertTrue( + assert ( role["id"] in self.gi.folders.get_permissions(self.folder["id"], scope="available")["add_library_item_role_list"][0] ) diff -Nru python-bioblend-0.18.0/tests/TestGalaxyGroups.py python-bioblend-1.0.0/tests/TestGalaxyGroups.py --- python-bioblend-0.18.0/tests/TestGalaxyGroups.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyGroups.py 2022-10-13 18:17:37.000000000 +0000 @@ -17,46 +17,46 @@ pass def test_create_group(self): - self.assertEqual(self.group["name"], self.name) - self.assertIsNotNone(self.group["id"]) + assert self.group["name"] == self.name + assert self.group["id"] is not None def test_get_groups(self): groups = self.gi.groups.get_groups() for group in groups: - self.assertIsNotNone(group["id"]) - self.assertIsNotNone(group["name"]) + assert group["id"] is not None + assert group["name"] is not None def test_show_group(self): group_data = self.gi.groups.show_group(self.group["id"]) - self.assertEqual(self.group["id"], group_data["id"]) - self.assertEqual(self.group["name"], group_data["name"]) + assert self.group["id"] == group_data["id"] + assert self.group["name"] == group_data["name"] def test_get_group_users(self): group_users = self.gi.groups.get_group_users(self.group["id"]) - self.assertEqual(group_users, []) + assert group_users == [] def test_get_group_roles(self): group_roles = self.gi.groups.get_group_roles(self.group["id"]) - self.assertEqual(group_roles, []) + assert group_roles == [] def test_update_group(self): new_name = f"test_{uuid.uuid4().hex}" new_users = [self.gi.users.get_current_user()["id"]] self.gi.groups.update_group(self.group["id"], new_name, user_ids=new_users) updated_group = self.gi.groups.show_group(self.group["id"]) - self.assertEqual(self.group["id"], updated_group["id"]) - self.assertEqual(updated_group["name"], new_name) + assert self.group["id"] == updated_group["id"] + assert updated_group["name"] == new_name updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])] - self.assertEqual(set(updated_group_users), set(new_users)) + assert set(updated_group_users) == set(new_users) updated_group_roles = [_["id"] for _ in self.gi.groups.get_group_roles(self.group["id"])] - self.assertEqual(set(updated_group_roles), set()) + assert set(updated_group_roles) == set() def test_add_delete_group_user(self): new_user = self.gi.users.get_current_user()["id"] ret = self.gi.groups.add_group_user(self.group["id"], new_user) - self.assertEqual(ret["id"], new_user) + assert ret["id"] == new_user updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])] - self.assertIn(new_user, updated_group_users) + assert new_user in updated_group_users self.gi.groups.delete_group_user(self.group["id"], new_user) updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])] - self.assertNotIn(new_user, updated_group_users) + assert new_user not in updated_group_users diff -Nru python-bioblend-0.18.0/tests/TestGalaxyHistories.py python-bioblend-1.0.0/tests/TestGalaxyHistories.py --- python-bioblend-0.18.0/tests/TestGalaxyHistories.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyHistories.py 2022-10-13 18:17:37.000000000 +0000 @@ -5,7 +5,12 @@ import tarfile import tempfile -import bioblend.galaxy +import pytest + +from bioblend import ( + ConnectionError, + galaxy, +) from . import ( GalaxyTestBase, test_util, @@ -21,9 +26,9 @@ def test_create_history(self): history_name = "another buildbot - automated test" new_history = self.gi.histories.create_history(name=history_name) - self.assertIsNotNone(new_history["id"]) - self.assertEqual(new_history["name"], history_name) - self.assertIsNotNone(new_history["url"]) + assert new_history["id"] is not None + assert new_history["name"] == history_name + assert new_history["url"] is not None def test_update_history(self): new_name = "buildbot - automated test renamed" @@ -34,39 +39,39 @@ ) if "id" not in updated_hist: updated_hist = self.gi.histories.show_history(self.history["id"]) - self.assertEqual(self.history["id"], updated_hist["id"]) - self.assertEqual(updated_hist["name"], new_name) - self.assertEqual(updated_hist["annotation"], new_annotation) - self.assertEqual(updated_hist["tags"], new_tags) + assert self.history["id"] == updated_hist["id"] + assert updated_hist["name"] == new_name + assert updated_hist["annotation"] == new_annotation + assert updated_hist["tags"] == new_tags def test_publish_history(self): # Verify that searching for published histories does not return the test history published_histories = self.gi.histories.get_histories(published=True) - self.assertFalse(any(h["id"] == self.history["id"] for h in published_histories)) + assert not any(h["id"] == self.history["id"] for h in published_histories) updated_hist = self.gi.histories.update_history(self.history["id"], published=True) if "id" not in updated_hist: updated_hist = self.gi.histories.show_history(self.history["id"]) - self.assertEqual(self.history["id"], updated_hist["id"]) - self.assertTrue(updated_hist["published"]) + assert self.history["id"] == updated_hist["id"] + assert updated_hist["published"] # Verify that searching for published histories now returns the test history published_histories = self.gi.histories.get_histories(published=True) - self.assertTrue(any(h["id"] == self.history["id"] for h in published_histories)) + assert any(h["id"] == self.history["id"] for h in published_histories) # Verify that get_published_histories as an anonymous user also returns the test history - anonymous_gi = bioblend.galaxy.GalaxyInstance(url=self.gi.base_url, key=None) + anonymous_gi = galaxy.GalaxyInstance(url=self.gi.base_url, key=None) published_histories = anonymous_gi.histories.get_published_histories() - self.assertTrue(any(h["id"] == self.history["id"] for h in published_histories)) + assert any(h["id"] == self.history["id"] for h in published_histories) history_from_slug = anonymous_gi.histories.get_published_histories(slug=updated_hist["slug"]) - self.assertTrue(len(history_from_slug) == 1) - self.assertEqual(self.history["id"], history_from_slug[0]["id"]) + assert len(history_from_slug) == 1 + assert self.history["id"] == history_from_slug[0]["id"] def test_get_histories(self): # Make sure there's at least one value - the one we created all_histories = self.gi.histories.get_histories() - self.assertGreater(len(all_histories), 0) + assert len(all_histories) > 0 # Check whether id is present, when searched by name histories = self.gi.histories.get_histories(name=self.default_history_name) - self.assertEqual(len([h for h in histories if h["id"] == self.history["id"]]), 1) + assert len([h for h in histories if h["id"] == self.history["id"]]) == 1 # TODO: check whether deleted history is returned correctly # At the moment, get_histories() returns only not-deleted histories @@ -76,73 +81,101 @@ # get_histories() will return both not-deleted and deleted histories # and we can uncomment the following test. # deleted_history = self.gi.histories.get_histories(deleted=True) - # self.assertGreaterEqual(len(all_histories), len(deleted_history)) + # assert len(all_histories) >= len(deleted_history) + + @test_util.skip_unless_galaxy("release_20.01") + def test_other_users_histories(self): + username = "newuser4" + user_id = self.gi.users.create_local_user(username, f"{username}@example.org", "secret")["id"] + user_api_key = self.gi.users.create_user_apikey(user_id) + user_gi = galaxy.GalaxyInstance(url=self.gi.base_url, key=user_api_key) + # Normal users cannot use the `all` parameter + with pytest.raises(ConnectionError): + other_user_histories = user_gi.histories.get_histories(all=True) + user_history_id = user_gi.histories.create_history(name=f"History for {username}")["id"] + # Get all users' histories from an admin account + other_user_histories = self.gi.histories.get_histories(all=True) + assert user_history_id in [h["id"] for h in other_user_histories] def test_show_history(self): history_data = self.gi.histories.show_history(self.history["id"]) - self.assertEqual(self.history["id"], history_data["id"]) - self.assertEqual(self.history["name"], history_data["name"]) - self.assertEqual("new", history_data["state"]) + assert self.history["id"] == history_data["id"] + assert self.history["name"] == history_data["name"] + assert "new" == history_data["state"] def test_show_history_with_contents(self): history_id = self.history["id"] contents = self.gi.histories.show_history(history_id, contents=True) # Empty history has no datasets, content length should be 0 - self.assertEqual(len(contents), 0) + assert len(contents) == 0 self._test_dataset(history_id) contents = self.gi.histories.show_history(history_id, contents=True) # history has 1 dataset, content length should be 1 - self.assertEqual(len(contents), 1) + assert len(contents) == 1 contents = self.gi.histories.show_history(history_id, contents=True, types=["dataset"]) # filtering for dataset, content length should still be 1 - self.assertEqual(len(contents), 1) + assert len(contents) == 1 contents = self.gi.histories.show_history(history_id, contents=True, types=["dataset_collection"]) # filtering for dataset collection but there's no collection in the history - self.assertEqual(len(contents), 0) + assert len(contents) == 0 contents = self.gi.histories.show_history(history_id, contents=True, types=["dataset", "dataset_collection"]) - self.assertEqual(len(contents), 1) + assert len(contents) == 1 def test_create_history_tag(self): new_tag = "tag1" self.gi.histories.create_history_tag(self.history["id"], new_tag) updated_hist = self.gi.histories.show_history(self.history["id"]) - self.assertEqual(self.history["id"], updated_hist["id"]) - self.assertIn(new_tag, updated_hist["tags"]) + assert self.history["id"] == updated_hist["id"] + assert new_tag in updated_hist["tags"] def test_show_dataset(self): history_id = self.history["id"] dataset1_id = self._test_dataset(history_id) dataset = self.gi.histories.show_dataset(history_id, dataset1_id) for key in ["name", "hid", "id", "deleted", "history_id", "visible"]: - self.assertIn(key, dataset) - self.assertEqual(dataset["history_id"], history_id) - self.assertEqual(dataset["hid"], 1) - self.assertEqual(dataset["id"], dataset1_id) - self.assertEqual(dataset["deleted"], False) - self.assertEqual(dataset["visible"], True) - - def test_show_dataset_provenance(self): + assert key in dataset + assert dataset["history_id"] == history_id + assert dataset["hid"] == 1 + assert dataset["id"] == dataset1_id + assert not dataset["deleted"] + assert dataset["visible"] + + @test_util.skip_unless_galaxy("release_22.01") + def test_show_dataset_provenance(self) -> None: + MINIMAL_PROV_KEYS = ("id", "uuid") + OTHER_PROV_KEYS = ("job_id", "parameters", "stderr", "stdout", "tool_id") + ALL_PROV_KEYS = MINIMAL_PROV_KEYS + OTHER_PROV_KEYS history_id = self.history["id"] dataset1_id = self._test_dataset(history_id) - prov = self.gi.histories.show_dataset_provenance(history_id, dataset1_id) - for key in ["id", "job_id", "parameters", "stderr", "stdout", "tool_id"]: - self.assertIn(key, prov) + dataset2_id = self._run_random_lines1(history_id, dataset1_id)["outputs"][0]["id"] + prov = self.gi.histories.show_dataset_provenance(history_id, dataset2_id) + for key in ALL_PROV_KEYS: + assert key in prov + for key in MINIMAL_PROV_KEYS: + assert key in prov["parameters"]["input"] + for key in OTHER_PROV_KEYS: + assert key not in prov["parameters"]["input"] + recursive_prov = self.gi.histories.show_dataset_provenance(history_id, dataset2_id, follow=True) + for key in ALL_PROV_KEYS: + assert key in recursive_prov + for key in ALL_PROV_KEYS: + assert key in recursive_prov["parameters"]["input"] def test_delete_dataset(self): history_id = self.history["id"] dataset1_id = self._test_dataset(history_id) self.gi.histories.delete_dataset(history_id, dataset1_id) dataset = self.gi.histories.show_dataset(history_id, dataset1_id) - self.assertTrue(dataset["deleted"]) - self.assertFalse(dataset["purged"]) + assert dataset["deleted"] + assert not dataset["purged"] def test_purge_dataset(self): history_id = self.history["id"] dataset1_id = self._test_dataset(history_id) self.gi.histories.delete_dataset(history_id, dataset1_id, purge=True) dataset = self.gi.histories.show_dataset(history_id, dataset1_id) - self.assertTrue(dataset["deleted"]) - self.assertTrue(dataset["purged"]) + assert dataset["deleted"] + assert dataset["purged"] def test_update_dataset(self): history_id = self.history["id"] @@ -150,7 +183,7 @@ updated_dataset = self.gi.histories.update_dataset(history_id, dataset1_id, visible=False) if "id" not in updated_dataset: updated_dataset = self.gi.histories.show_dataset(history_id, dataset1_id) - self.assertFalse(updated_dataset["visible"]) + assert not updated_dataset["visible"] def test_upload_dataset_from_library(self): pass @@ -159,39 +192,39 @@ def test_delete_history(self): result = self.gi.histories.delete_history(self.history["id"]) - self.assertTrue(result["deleted"]) + assert result["deleted"] all_histories = self.gi.histories.get_histories() - self.assertTrue(not any(d["id"] == self.history["id"] for d in all_histories)) + assert not any(d["id"] == self.history["id"] for d in all_histories) def test_undelete_history(self): self.gi.histories.delete_history(self.history["id"]) self.gi.histories.undelete_history(self.history["id"]) all_histories = self.gi.histories.get_histories() - self.assertTrue(any(d["id"] == self.history["id"] for d in all_histories)) + assert any(d["id"] == self.history["id"] for d in all_histories) def test_get_status(self): state = self.gi.histories.get_status(self.history["id"]) - self.assertEqual("new", state["state"]) + assert "new" == state["state"] def test_get_most_recently_used_history(self): most_recently_used_history = self.gi.histories.get_most_recently_used_history() # if the user has been created via the API, it does not have # a session, therefore no history if most_recently_used_history is not None: - self.assertIsNotNone(most_recently_used_history["id"]) - self.assertIsNotNone(most_recently_used_history["name"]) - self.assertIsNotNone(most_recently_used_history["state"]) + assert most_recently_used_history["id"] is not None + assert most_recently_used_history["name"] is not None + assert most_recently_used_history["state"] is not None def test_download_history(self): jeha_id = self.gi.histories.export_history(self.history["id"], wait=True, maxwait=60) - self.assertTrue(jeha_id) + assert jeha_id tempdir = tempfile.mkdtemp(prefix="bioblend_test_") temp_fn = os.path.join(tempdir, "export.tar.gz") try: with open(temp_fn, "wb") as fo: self.gi.histories.download_history(self.history["id"], jeha_id, fo) - self.assertTrue(tarfile.is_tarfile(temp_fn)) + assert tarfile.is_tarfile(temp_fn) finally: shutil.rmtree(tempdir) @@ -225,7 +258,7 @@ history_id = self.history["id"] dataset_id = self._test_dataset(history_id) extra_files = self.gi.histories.get_extra_files(history_id, dataset_id) - self.assertEqual(extra_files, []) + assert extra_files == [] def tearDown(self): self.gi.histories.delete_history(self.history["id"], purge=True) diff -Nru python-bioblend-0.18.0/tests/TestGalaxyInstance.py python-bioblend-1.0.0/tests/TestGalaxyInstance.py --- python-bioblend-0.18.0/tests/TestGalaxyInstance.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyInstance.py 2022-10-13 18:17:37.000000000 +0000 @@ -5,9 +5,10 @@ import time import unittest +import pytest + from bioblend import ConnectionError from bioblend.galaxy import GalaxyInstance -from bioblend.galaxy.client import Client from . import test_util @@ -16,29 +17,29 @@ # "connect" to a fake Galaxy instance self.gi = GalaxyInstance("http://localhost:56789", key="whatever") - def test_set_max_get_retries(self): + def test_set_max_get_attempts(self): self.gi.max_get_attempts = 3 - self.assertEqual(3, Client.max_get_retries()) + assert 3 == self.gi.max_get_attempts def test_set_retry_delay(self): - self.gi.get_retry_delay = 5 - self.assertEqual(5, Client.get_retry_delay()) + self.gi.get_retry_delay = 5.0 + assert 5.0 == self.gi.get_retry_delay def test_get_retry(self): # We set the client to try twice, with a delay of 5 seconds between # attempts. So, we expect the call to take at least 5 seconds before # failing. - self.gi.max_get_attempts = 2 - self.gi.get_retry_delay = 5 + self.gi.max_get_attempts = 3 + self.gi.get_retry_delay = 2 start = time.time() - with self.assertRaises(ConnectionError): + with pytest.raises(ConnectionError): self.gi.libraries.get_libraries() end = time.time() duration = end - start - self.assertGreater(duration, self.gi.get_retry_delay, "Didn't seem to retry long enough") + assert duration > self.gi.get_retry_delay * (self.gi.max_get_attempts - 1), "Didn't seem to retry long enough" def test_missing_scheme_fake_url(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): GalaxyInstance("localhost:56789", key="whatever") @test_util.skip_unless_galaxy() diff -Nru python-bioblend-0.18.0/tests/TestGalaxyInvocations.py python-bioblend-1.0.0/tests/TestGalaxyInvocations.py --- python-bioblend-0.18.0/tests/TestGalaxyInvocations.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyInvocations.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,6 +1,10 @@ import contextlib import os import time +from typing import ( + Any, + Dict, +) from . import ( GalaxyTestBase, @@ -25,11 +29,11 @@ invocation_id = invocation["id"] invocations = self.gi.invocations.get_invocations() - self.assertEqual(len(invocations), 1) - self.assertEqual(invocations[0]["id"], invocation_id) + assert len(invocations) == 1 + assert invocations[0]["id"] == invocation_id self.gi.invocations.cancel_invocation(invocation_id) invocation = self.gi.invocations.show_invocation(invocation_id) - self.assertEqual(invocation["state"], "cancelled") + assert invocation["state"] == "cancelled" @test_util.skip_unless_galaxy("release_20.01") def test_get_invocations(self): @@ -55,20 +59,20 @@ # Test filtering by workflow ID for wf_id, expected_invoc_num in {self.workflow_id: 2, workflow2_id: 1}.items(): invocs = self.gi.invocations.get_invocations(workflow_id=wf_id) - self.assertEqual(len(invocs), expected_invoc_num) + assert len(invocs) == expected_invoc_num for invoc in invocs: - self.assertEqual(invoc["workflow_id"], wf_id) + assert invoc["workflow_id"] == wf_id # Test filtering by history ID for hist_id, expected_invoc_num in {self.history_id: 1, hist2_id: 2}.items(): invocs = self.gi.invocations.get_invocations(history_id=hist_id) - self.assertEqual(len(invocs), expected_invoc_num) + assert len(invocs) == expected_invoc_num for invoc in invocs: - self.assertEqual(invoc["history_id"], hist_id) + assert invoc["history_id"] == hist_id # Test limiting limit_invocs = self.gi.invocations.get_invocations(limit=2) - self.assertEqual(len(limit_invocs), 2) + assert len(limit_invocs) == 2 self.gi.histories.delete_history(hist2_id, purge=True) @@ -90,17 +94,17 @@ self.gi.invocations.wait_for_invocation(invocation["id"]) biocompute_object = self.gi.invocations.get_invocation_biocompute_object(invocation["id"]) - self.assertEqual(len(biocompute_object["description_domain"]["pipeline_steps"]), 1) + assert len(biocompute_object["description_domain"]["pipeline_steps"]) == 1 @test_util.skip_unless_galaxy("release_19.09") def test_get_invocation_jobs_summary(self): invocation = self._invoke_workflow() self.gi.invocations.wait_for_invocation(invocation["id"]) jobs_summary = self.gi.invocations.get_invocation_summary(invocation["id"]) - self.assertEqual(jobs_summary["populated_state"], "ok") + assert jobs_summary["populated_state"] == "ok" step_jobs_summary = self.gi.invocations.get_invocation_step_jobs_summary(invocation["id"]) - self.assertEqual(len(step_jobs_summary), 1) - self.assertEqual(step_jobs_summary[0]["populated_state"], "ok") + assert len(step_jobs_summary) == 1 + assert step_jobs_summary[0]["populated_state"] == "ok" @test_util.skip_unless_galaxy("release_19.09") @test_util.skip_unless_tool("cat1") @@ -116,7 +120,7 @@ ) invocation_id = invocation["id"] - def invocation_steps_by_order_index(): + def invocation_steps_by_order_index() -> Dict[int, Dict[str, Any]]: invocation = self.gi.invocations.show_invocation(invocation_id) return {s["order_index"]: s for s in invocation["steps"]} @@ -126,9 +130,9 @@ time.sleep(0.5) steps = invocation_steps_by_order_index() pause_step = steps[2] - self.assertIsNone(self.gi.invocations.show_invocation_step(invocation_id, pause_step["id"])["action"]) + assert self.gi.invocations.show_invocation_step(invocation_id, pause_step["id"])["action"] is None self.gi.invocations.run_invocation_step_action(invocation_id, pause_step["id"], action=True) - self.assertTrue(self.gi.invocations.show_invocation_step(invocation_id, pause_step["id"])["action"]) + assert self.gi.invocations.show_invocation_step(invocation_id, pause_step["id"])["action"] self.gi.invocations.wait_for_invocation(invocation["id"]) @test_util.skip_unless_galaxy("release_21.01") @@ -138,9 +142,9 @@ rerun_invocation = self.gi.invocations.rerun_invocation(invocation["id"], import_inputs_to_history=True) self.gi.invocations.wait_for_invocation(rerun_invocation["id"]) history = self.gi.histories.show_history(rerun_invocation["history_id"], contents=True) - self.assertEqual(len(history), 3) + assert len(history) == 3 - def _invoke_workflow(self): + def _invoke_workflow(self) -> Dict[str, Any]: dataset = {"src": "hda", "id": self.dataset_id} return self.gi.workflows.invoke_workflow( diff -Nru python-bioblend-0.18.0/tests/TestGalaxyJobs.py python-bioblend-1.0.0/tests/TestGalaxyJobs.py --- python-bioblend-0.18.0/tests/TestGalaxyJobs.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyJobs.py 2022-10-13 18:17:37.000000000 +0000 @@ -5,6 +5,8 @@ ) from operator import itemgetter +from typing_extensions import Literal + from bioblend.galaxy.tools.inputs import ( dataset, inputs, @@ -31,7 +33,7 @@ tool_output = self.gi.tools.run_tool(history_id=self.history_id, tool_id="cat1", tool_inputs=tool_inputs) job_id = tool_output["jobs"][0]["id"] job = self.gi.jobs.wait_for_job(job_id) - self.assertEqual(job["state"], "ok") + assert job["state"] == "ok" @test_util.skip_unless_tool("random_lines1") def test_get_jobs(self): @@ -39,17 +41,17 @@ self._run_tool() jobs = self.gi.jobs.get_jobs(tool_id="random_lines1", history_id=self.history_id) - self.assertEqual(len(jobs), 2) + assert len(jobs) == 2 jobs = self.gi.jobs.get_jobs(history_id=self.history_id, state="failed") - self.assertEqual(len(jobs), 0) + assert len(jobs) == 0 yesterday = datetime.today() - timedelta(days=1) jobs = self.gi.jobs.get_jobs(date_range_max=yesterday.strftime("%Y-%m-%d"), history_id=self.history_id) - self.assertEqual(len(jobs), 0) + assert len(jobs) == 0 tomorrow = datetime.today() + timedelta(days=1) jobs = self.gi.jobs.get_jobs(date_range_min=tomorrow.strftime("%Y-%m-%d")) - self.assertEqual(len(jobs), 0) + assert len(jobs) == 0 jobs = self.gi.jobs.get_jobs(date_range_min=datetime.today().strftime("%Y-%m-%d"), history_id=self.history_id) - self.assertEqual(len(jobs), 3) + assert len(jobs) == 3 @test_util.skip_unless_galaxy("release_21.05") def test_get_jobs_with_filtering(self): @@ -72,17 +74,17 @@ self.gi.invocations.wait_for_invocation(invocation2["id"]) all_jobs = self.gi.jobs.get_jobs(history_id=self.history_id, order_by="create_time") - self.assertEqual(len(all_jobs), 3) + assert len(all_jobs) == 3 job1_id = all_jobs[1]["id"] jobs = self.gi.jobs.get_jobs(history_id=self.history_id, limit=1, offset=1, order_by="create_time") - self.assertEqual(len(jobs), 1) - self.assertEqual(jobs[0]["id"], job1_id) + assert len(jobs) == 1 + assert jobs[0]["id"] == job1_id jobs = self.gi.jobs.get_jobs(invocation_id=invocation1["id"]) - self.assertEqual(len(jobs), 1) + assert len(jobs) == 1 job_id_inv = jobs[0]["id"] jobs = self.gi.jobs.get_jobs(workflow_id=workflow_id) - self.assertEqual(len(jobs), 2) - self.assertIn(job_id_inv, [job["id"] for job in jobs]) + assert len(jobs) == 2 + assert job_id_inv in [job["id"] for job in jobs] @test_util.skip_unless_galaxy("release_21.01") @test_util.skip_unless_tool("random_lines1") @@ -93,7 +95,7 @@ rerun_output = self.gi.jobs.rerun_job(original_job_id) original_output_content = self.gi.datasets.download_dataset(original_output["outputs"][0]["id"]) rerun_output_content = self.gi.datasets.download_dataset(rerun_output["outputs"][0]["id"]) - self.assertEqual(rerun_output_content, original_output_content) + assert rerun_output_content == original_output_content @test_util.skip_unless_galaxy("release_21.01") @test_util.skip_unless_tool("Show beginning1") @@ -116,16 +118,16 @@ raise Exception("The job should have failed") history_contents = self.gi.histories.show_history(self.history_id, contents=True) - self.assertEqual(len(history_contents), 3) - self.assertEqual(history_contents[1]["state"], "error") - self.assertEqual(history_contents[2]["state"], "paused") + assert len(history_contents) == 3 + assert history_contents[1]["state"] == "error" + assert history_contents[2]["state"] == "paused" # resume the paused step job - resumed_job = self.gi.jobs.resume_job(job_steps[-1]["job_id"]) - self.assertEqual(resumed_job[0]["name"], "out_file1") + resumed_outputs = self.gi.jobs.resume_job(job_steps[-1]["job_id"]) + assert resumed_outputs[0]["name"] == "out_file1" # the following does not pass stably - the job goes back to paused too quickly # history_contents_resumed = self.gi.histories.show_history(self.history_id, contents=True) - # self.assertNotEqual(history_contents_resumed[2]['state'], 'paused') + # assert history_contents_resumed[2]["state"] != "paused" # now rerun and remap with correct input param failed_job_id = self.gi.datasets.show_dataset(history_contents[1]["id"])["creating_job"] @@ -139,8 +141,8 @@ self.gi.jobs.wait_for_job(new_job_id) self.gi.jobs.resume_job(last_job_id) # last_job can get stuck on paused - resume it in case self.gi.jobs.wait_for_job(last_job_id) - self.assertEqual(last_dataset["hid"], 3) - self.assertEqual(last_dataset["id"], history_contents[2]["id"]) + assert last_dataset["hid"] == 3 + assert last_dataset["id"] == history_contents[2]["id"] self._wait_and_verify_dataset(last_dataset["id"], b"line 1\tline 1\n") @test_util.skip_unless_galaxy("release_19.05") @@ -148,29 +150,33 @@ def test_get_common_problems(self): job_id = self._run_tool()["jobs"][0]["id"] response = self.gi.jobs.get_common_problems(job_id) - self.assertEqual(response, {"has_duplicate_inputs": False, "has_empty_inputs": True}) + assert response == {"has_duplicate_inputs": False, "has_empty_inputs": True} @test_util.skip_unless_tool("random_lines1") def test_get_inputs(self): job_id = self._run_tool()["jobs"][0]["id"] response = self.gi.jobs.get_inputs(job_id) - self.assertEqual(response, [{"name": "input", "dataset": {"src": "hda", "id": self.dataset_id}}]) + assert response == [{"name": "input", "dataset": {"src": "hda", "id": self.dataset_id}}] @test_util.skip_unless_tool("random_lines1") def test_get_outputs(self): output = self._run_tool() job_id, output_id = output["jobs"][0]["id"], output["outputs"][0]["id"] response = self.gi.jobs.get_outputs(job_id) - self.assertEqual(response, [{"name": "out_file1", "dataset": {"src": "hda", "id": output_id}}]) + assert response == [{"name": "out_file1", "dataset": {"src": "hda", "id": output_id}}] @test_util.skip_unless_galaxy("release_20.05") @test_util.skip_unless_tool("random_lines1") def test_get_destination_params(self): job_id = self._run_tool()["jobs"][0]["id"] + # In Galaxy 20.05 and 20.09 we need to wait for the job, otherwise + # `get_destination_params()` receives a 500 error code. Fixed upstream + # in https://github.com/galaxyproject/galaxy/commit/3e7f03cd1f229b8c9421ade02002728a33e131d8 + self.gi.jobs.wait_for_job(job_id) response = self.gi.jobs.get_destination_params(job_id) - self.assertIn("Runner", response) - self.assertIn("Runner Job ID", response) - self.assertIn("Handler", response) + assert "Runner" in response + assert "Runner Job ID" in response + assert "Handler" in response @test_util.skip_unless_galaxy("release_18.01") @test_util.skip_unless_tool("random_lines1") @@ -183,7 +189,7 @@ "seed_source|seed": "asdf", } response = self.gi.jobs.search_jobs("random_lines1", inputs) - self.assertIn(job_id, [job["id"] for job in response]) + assert job_id in [job["id"] for job in response] @test_util.skip_unless_galaxy("release_20.01") @test_util.skip_unless_tool("random_lines1") @@ -192,47 +198,33 @@ job_id, output_id = output["jobs"][0]["id"], output["outputs"][0]["id"] response = self.gi.jobs.report_error(job_id, output_id, "Test error") # expected response when the Galaxy server does not have mail configured - self.assertEqual( - response, - { - "messages": [ - [ - "An error occurred sending the report by email: Mail is not configured for this Galaxy instance", - "danger", - ] + assert response == { + "messages": [ + [ + "An error occurred sending the report by email: Mail is not configured for this Galaxy instance", + "danger", ] - }, - ) + ] + } @test_util.skip_unless_galaxy("release_20.05") def test_show_job_lock(self): status = self.gi.jobs.show_job_lock() - self.assertFalse(status) + assert not status @test_util.skip_unless_galaxy("release_20.05") def test_update_job_lock(self): status = self.gi.jobs.update_job_lock(active=True) - self.assertTrue(status) + assert status status = self.gi.jobs.update_job_lock(active=False) - self.assertFalse(status) + assert not status @test_util.skip_unless_galaxy("release_18.01") def test_cancel_job(self): job_id = self._run_tool()["jobs"][0]["id"] - job_state = self.gi.jobs.show_job(job_id)["state"] - self.assertTrue(job_state, "deleted") + self.gi.jobs.cancel_job(job_id) + job = self.gi.jobs.wait_for_job(job_id, check=False) + assert job["state"] in ("deleted", "deleting") - def _run_tool(self, input_format: str = "legacy") -> dict: - tool_inputs = { - "num_lines": "1", - "input": {"src": "hda", "id": self.dataset_id}, - } - if input_format == "21.01": - tool_inputs.update({"seed_source": {"seed_source_selector": "set_seed", "seed": "asdf"}}) - else: - # legacy format - tool_inputs.update({"seed_source|seed_source_selector": "set_seed", "seed_source|seed": "asdf"}) - - return self.gi.tools.run_tool( - history_id=self.history_id, tool_id="random_lines1", tool_inputs=tool_inputs, input_format=input_format - ) + def _run_tool(self, input_format: Literal["21.01", "legacy"] = "legacy") -> dict: + return super()._run_random_lines1(self.history_id, self.dataset_id, input_format=input_format) diff -Nru python-bioblend-0.18.0/tests/TestGalaxyLibraries.py python-bioblend-1.0.0/tests/TestGalaxyLibraries.py --- python-bioblend-0.18.0/tests/TestGalaxyLibraries.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyLibraries.py 2022-10-13 18:17:37.000000000 +0000 @@ -22,12 +22,12 @@ self.gi.libraries.delete_library(self.library["id"]) def test_create_library(self): - self.assertEqual(self.library["name"], self.name) - self.assertIsNotNone(self.library["id"]) + assert self.library["name"] == self.name + assert self.library["id"] is not None def test_get_libraries(self): libraries_with_name = self.gi.libraries.get_libraries(name=self.name) - self.assertEqual(len([l for l in libraries_with_name if l["id"] == self.library["id"]]), 1) + assert len([l for l in libraries_with_name if l["id"] == self.library["id"]]) == 1 deleted_name = "deleted test library" deleted_library_id = self.gi.libraries.create_library( @@ -35,24 +35,24 @@ )["id"] self.gi.libraries.delete_library(deleted_library_id) deleted_libraries_with_name = self.gi.libraries.get_libraries(name=deleted_name, deleted=True) - self.assertEqual(len([l for l in deleted_libraries_with_name if l["id"] == deleted_library_id]), 1) + assert len([l for l in deleted_libraries_with_name if l["id"] == deleted_library_id]) == 1 all_non_deleted_libraries = self.gi.libraries.get_libraries(deleted=False) - self.assertEqual(len([l for l in all_non_deleted_libraries if l["id"] == self.library["id"]]), 1) - self.assertEqual([l for l in all_non_deleted_libraries if l["id"] == deleted_library_id], []) + assert len([l for l in all_non_deleted_libraries if l["id"] == self.library["id"]]) == 1 + assert [l for l in all_non_deleted_libraries if l["id"] == deleted_library_id] == [] all_deleted_libraries = self.gi.libraries.get_libraries(deleted=True) - self.assertEqual([l for l in all_deleted_libraries if l["id"] == self.library["id"]], []) - self.assertEqual(len([l for l in all_deleted_libraries if l["id"] == deleted_library_id]), 1) + assert [l for l in all_deleted_libraries if l["id"] == self.library["id"]] == [] + assert len([l for l in all_deleted_libraries if l["id"] == deleted_library_id]) == 1 all_libraries = self.gi.libraries.get_libraries(deleted=None) - self.assertEqual(len([l for l in all_libraries if l["id"] == self.library["id"]]), 1) - self.assertEqual(len([l for l in all_libraries if l["id"] == deleted_library_id]), 1) + assert len([l for l in all_libraries if l["id"] == self.library["id"]]) == 1 + assert len([l for l in all_libraries if l["id"] == deleted_library_id]) == 1 def test_show_library(self): library_data = self.gi.libraries.show_library(self.library["id"]) - self.assertEqual(self.library["id"], library_data["id"]) - self.assertEqual(self.library["name"], library_data["name"]) + assert self.library["id"] == library_data["id"] + assert self.library["name"] == library_data["name"] def test_upload_file_from_url(self): self.gi.libraries.upload_file_from_url( @@ -83,13 +83,13 @@ ret = self.gi.libraries.upload_from_galaxy_filesystem(self.library["id"], filesystem_paths) for dataset_dict in ret: dataset = self.gi.libraries.wait_for_dataset(self.library["id"], dataset_dict["id"]) - self.assertEqual(dataset["state"], "ok") + assert dataset["state"] == "ok" ret = self.gi.libraries.upload_from_galaxy_filesystem( self.library["id"], filesystem_paths, link_data_only="link_to_files" ) for dataset_dict in ret: dataset = self.gi.libraries.wait_for_dataset(self.library["id"], dataset_dict["id"]) - self.assertEqual(dataset["state"], "ok") + assert dataset["state"] == "ok" finally: shutil.rmtree(tempdir) @@ -104,8 +104,8 @@ updated_dataset = self.gi.libraries.update_library_dataset( dataset1[0]["id"], name="Modified name", misc_info="Modified the name succesfully" ) - self.assertEqual(updated_dataset["name"], "Modified name") - self.assertEqual(updated_dataset["misc_info"], "Modified the name succesfully") + assert updated_dataset["name"] == "Modified name" + assert updated_dataset["misc_info"] == "Modified the name succesfully" def test_library_permissions(self): current_user = self.gi.users.get_current_user() @@ -118,10 +118,10 @@ manage_in=user_id_list_new, ) ret = self.gi.libraries.get_library_permissions(self.library["id"]) - self.assertEqual({_[1] for _ in ret["access_library_role_list"]}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret["modify_library_role_list"]}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret["add_library_item_role_list"]}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret["manage_library_role_list"]}, set(user_id_list_new)) + assert {_[1] for _ in ret["access_library_role_list"]} == set(user_id_list_new) + assert {_[1] for _ in ret["modify_library_role_list"]} == set(user_id_list_new) + assert {_[1] for _ in ret["add_library_item_role_list"]} == set(user_id_list_new) + assert {_[1] for _ in ret["manage_library_role_list"]} == set(user_id_list_new) def test_dataset_permissions(self): current_user = self.gi.users.get_current_user() @@ -131,28 +131,28 @@ ret = self.gi.libraries.set_dataset_permissions( dataset1[0]["id"], access_in=user_id_list_new, modify_in=user_id_list_new, manage_in=user_id_list_new ) - self.assertEqual({_[1] for _ in ret["access_dataset_roles"]}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret["modify_item_roles"]}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret["manage_dataset_roles"]}, set(user_id_list_new)) + assert {_[1] for _ in ret["access_dataset_roles"]} == set(user_id_list_new) + assert {_[1] for _ in ret["modify_item_roles"]} == set(user_id_list_new) + assert {_[1] for _ in ret["manage_dataset_roles"]} == set(user_id_list_new) # test get_dataset_permissions ret_get = self.gi.libraries.get_dataset_permissions(dataset1[0]["id"]) - self.assertEqual({_[1] for _ in ret_get["access_dataset_roles"]}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret_get["modify_item_roles"]}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret_get["manage_dataset_roles"]}, set(user_id_list_new)) + assert {_[1] for _ in ret_get["access_dataset_roles"]} == set(user_id_list_new) + assert {_[1] for _ in ret_get["modify_item_roles"]} == set(user_id_list_new) + assert {_[1] for _ in ret_get["manage_dataset_roles"]} == set(user_id_list_new) @test_util.skip_unless_galaxy("release_19.09") def test_upload_file_contents_with_tags(self): datasets = self.gi.libraries.upload_file_contents(self.library["id"], FOO_DATA, tags=["name:foobar", "barfoo"]) dataset_show = self.gi.libraries.show_dataset(self.library["id"], datasets[0]["id"]) - self.assertEqual(dataset_show["tags"], "name:foobar, barfoo") + assert dataset_show["tags"] == "name:foobar, barfoo" @test_util.skip_unless_galaxy("release_19.09") def test_update_dataset_tags(self): datasets = self.gi.libraries.upload_file_contents(self.library["id"], FOO_DATA) dataset_show = self.gi.libraries.show_dataset(self.library["id"], datasets[0]["id"]) - self.assertEqual(dataset_show["tags"], "") + assert dataset_show["tags"] == "" updated_dataset = self.gi.libraries.update_library_dataset(datasets[0]["id"], tags=["name:foobar", "barfoo"]) dataset_show = self.gi.libraries.show_dataset(self.library["id"], updated_dataset["id"]) - self.assertEqual(dataset_show["tags"], "name:foobar, barfoo") + assert dataset_show["tags"] == "name:foobar, barfoo" diff -Nru python-bioblend-0.18.0/tests/TestGalaxyObjects.py python-bioblend-1.0.0/tests/TestGalaxyObjects.py --- python-bioblend-0.18.0/tests/TestGalaxyObjects.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyObjects.py 2022-10-13 18:17:37.000000000 +0000 @@ -9,9 +9,23 @@ import unittest import uuid from ssl import SSLError +from typing import ( + Any, + Callable, + Collection, + Dict, + Iterable, + List, + Set, + Tuple, + Union, +) from urllib.error import URLError from urllib.request import urlopen +import pytest +from typing_extensions import Literal + import bioblend from bioblend.galaxy import dataset_collections from bioblend.galaxy.objects import ( @@ -74,7 +88,7 @@ "tags": [], "url": "/api/workflows/9005c5112febe774", } -SAMPLE_INV_DICT = { +SAMPLE_INV_DICT: Dict[str, Any] = { "history_id": "2f94e8ae9edff68a", "id": "df7a1f0c02a5b08e", "inputs": {"0": {"id": "a7db2fac67043c7e", "src": "hda", "uuid": "7932ffe0-2340-4952-8857-dbaa50f1f46a"}}, @@ -112,7 +126,7 @@ } -def is_reachable(url): +def is_reachable(url: str) -> bool: res = None try: res = urlopen(url, timeout=5) @@ -123,7 +137,9 @@ return True -def upload_from_fs(lib, bnames, **kwargs): +def upload_from_fs( + lib: wrappers.Library, bnames: Iterable[str], **kwargs: Any +) -> Tuple[List[wrappers.LibraryDataset], List[str]]: tempdir = tempfile.mkdtemp(prefix="bioblend_test_") try: fnames = [os.path.join(tempdir, _) for _ in bnames] @@ -138,53 +154,55 @@ class MockWrapper(wrappers.Wrapper): BASE_ATTRS = ("a", "b") + a: int + b: List[int] - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) class TestWrapper(unittest.TestCase): def setUp(self): - self.d = {"a": 1, "b": [2, 3], "c": {"x": 4}} - with self.assertRaises(TypeError): + self.d: Dict[str, Any] = {"a": 1, "b": [2, 3], "c": {"x": 4}} + with pytest.raises(TypeError): wrappers.Wrapper(self.d) self.w = MockWrapper(self.d) def test_initialize(self): for k in MockWrapper.BASE_ATTRS: - self.assertEqual(getattr(self.w, k), self.d[k]) + assert getattr(self.w, k) == self.d[k] self.w.a = 222 self.w.b[0] = 222 - self.assertEqual(self.w.a, 222) - self.assertEqual(self.w.b[0], 222) - self.assertEqual(self.d["a"], 1) - self.assertEqual(self.d["b"][0], 2) - with self.assertRaises(AttributeError): - self.w.foo - with self.assertRaises(AttributeError): - self.w.foo = 0 + assert self.w.a == 222 + assert self.w.b[0] == 222 + assert self.d["a"] == 1 + assert self.d["b"][0] == 2 + with pytest.raises(AttributeError): + self.w.foo # type: ignore[attr-defined] + with pytest.raises(AttributeError): + self.w.foo = 0 # type: ignore[assignment] def test_taint(self): - self.assertFalse(self.w.is_modified) + assert not self.w.is_modified self.w.a = 111 # pylint: disable=W0201 - self.assertTrue(self.w.is_modified) + assert self.w.is_modified def test_serialize(self): w = MockWrapper.from_json(self.w.to_json()) - self.assertEqual(w.wrapped, self.w.wrapped) + assert w.wrapped == self.w.wrapped def test_clone(self): w = self.w.clone() - self.assertEqual(w.wrapped, self.w.wrapped) + assert w.wrapped == self.w.wrapped w.b[0] = 111 - self.assertEqual(self.w.b[0], 2) + assert self.w.b[0] == 2 def test_kwargs(self): parent = MockWrapper({"a": 10}) w = MockWrapper(self.d, parent=parent) - self.assertIs(w.parent, parent) - with self.assertRaises(AttributeError): - w.parent = 0 + assert w.parent is parent + with pytest.raises(AttributeError): + w.parent = 0 # type: ignore[assignment,misc] @test_util.skip_unless_galaxy() @@ -192,7 +210,7 @@ gi: galaxy_instance.GalaxyInstance @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: galaxy_key = os.environ["BIOBLEND_GALAXY_API_KEY"] galaxy_url = os.environ["BIOBLEND_GALAXY_URL"] cls.gi = galaxy_instance.GalaxyInstance(galaxy_url, galaxy_key) @@ -203,48 +221,49 @@ self.wf = wrappers.Workflow(SAMPLE_WF_DICT) def test_initialize(self): - self.assertEqual(self.wf.id, "9005c5112febe774") - self.assertEqual(self.wf.name, "paste_columns") - self.assertEqual(self.wf.deleted, False) - self.assertEqual(self.wf.owner, "user_foo") - self.assertEqual(self.wf.published, False) - self.assertEqual(self.wf.tags, []) - self.assertEqual(self.wf.input_labels_to_ids, {"Input Dataset": {"571", "572"}}) - self.assertEqual(self.wf.tool_labels_to_ids, {"Paste1": {"573"}}) - self.assertEqual(self.wf.data_input_ids, {"571", "572"}) - self.assertEqual(self.wf.source_ids, {"571", "572"}) - self.assertEqual(self.wf.sink_ids, {"573"}) + assert self.wf.id == "9005c5112febe774" + assert self.wf.name == "paste_columns" + assert not self.wf.deleted + assert self.wf.owner == "user_foo" + assert not self.wf.published + assert self.wf.tags == [] + assert self.wf.input_labels_to_ids == {"Input Dataset": {"571", "572"}} + assert self.wf.tool_labels_to_ids == {"Paste1": {"573"}} + assert self.wf.data_input_ids == {"571", "572"} + assert self.wf.source_ids == {"571", "572"} + assert self.wf.sink_ids == {"573"} def test_dag(self): - inv_dag = {} + inv_dag: Dict[str, Set[str]] = {} for h, tails in self.wf.dag.items(): for t in tails: inv_dag.setdefault(str(t), set()).add(h) - self.assertEqual(self.wf.inv_dag, inv_dag) + assert self.wf.inv_dag == inv_dag heads = set(self.wf.dag) - self.assertEqual(heads, set.union(*self.wf.inv_dag.values())) + assert heads == set.union(*self.wf.inv_dag.values()) tails = set(self.wf.inv_dag) - self.assertEqual(tails, set.union(*self.wf.dag.values())) + assert tails == set.union(*self.wf.dag.values()) ids = self.wf.sorted_step_ids() - self.assertEqual(set(ids), heads | tails) + assert set(ids) == heads | tails for h, tails in self.wf.dag.items(): for t in tails: - self.assertLess(ids.index(h), ids.index(t)) + assert ids.index(h) < ids.index(t) def test_steps(self): steps = SAMPLE_WF_DICT["steps"] + assert isinstance(steps, dict) for sid, s in self.wf.steps.items(): - self.assertIsInstance(s, wrappers.Step) - self.assertEqual(s.id, sid) - self.assertIn(sid, steps) - self.assertIs(s.parent, self.wf) - self.assertEqual(self.wf.data_input_ids, {"571", "572"}) - self.assertEqual(self.wf.tool_ids, {"573"}) + assert isinstance(s, wrappers.Step) + assert s.id == sid + assert sid in steps + assert s.parent is self.wf + assert self.wf.data_input_ids == {"571", "572"} + assert self.wf.tool_ids == {"573"} def test_taint(self): - self.assertFalse(self.wf.is_modified) + assert not self.wf.is_modified self.wf.steps["571"].tool_id = "foo" - self.assertTrue(self.wf.is_modified) + assert self.wf.is_modified def test_input_map(self): history = wrappers.History({}, gi=self.gi) @@ -252,20 +271,23 @@ hda = wrappers.HistoryDatasetAssociation({"id": "hda_id"}, container=history, gi=self.gi) ldda = wrappers.LibraryDatasetDatasetAssociation({"id": "ldda_id"}, container=library, gi=self.gi) input_map = self.wf._convert_input_map({"0": hda, "1": ldda, "2": {"id": "hda2_id", "src": "hda"}}) - self.assertEqual( - input_map, - { - "0": {"id": "hda_id", "src": "hda"}, - "1": {"id": "ldda_id", "src": "ldda"}, - "2": {"id": "hda2_id", "src": "hda"}, - }, - ) + assert input_map == { + "0": {"id": "hda_id", "src": "hda"}, + "1": {"id": "ldda_id", "src": "ldda"}, + "2": {"id": "hda2_id", "src": "hda"}, + } @test_util.skip_unless_galaxy("release_19.09") class TestInvocation(GalaxyObjectsTestBase): + dataset: wrappers.HistoryDatasetAssociation + history: wrappers.History + inv: wrappers.Invocation + workflow: wrappers.Workflow + workflow_pause: wrappers.Workflow + @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: super().setUpClass() cls.inv = wrappers.Invocation(SAMPLE_INV_DICT, gi=cls.gi) with open(SAMPLE_FN) as f: @@ -281,63 +303,64 @@ cls.history.delete(purge=True) def test_initialize(self): - self.assertEqual(self.inv.workflow_id, "03501d7626bd192f") - self.assertEqual(self.inv.history_id, "2f94e8ae9edff68a") - self.assertEqual(self.inv.id, "df7a1f0c02a5b08e") - self.assertEqual(self.inv.state, "ready") - self.assertEqual(self.inv.update_time, "2015-10-31T22:00:26") - self.assertEqual(self.inv.uuid, "c8aa2b1c-801a-11e5-a9e5-8ca98228593c") + assert self.inv.workflow_id == "03501d7626bd192f" + assert self.inv.history_id == "2f94e8ae9edff68a" + assert self.inv.id == "df7a1f0c02a5b08e" + assert self.inv.state == "ready" + assert self.inv.update_time == "2015-10-31T22:00:26" + assert self.inv.uuid == "c8aa2b1c-801a-11e5-a9e5-8ca98228593c" def test_initialize_steps(self): for step, step_dict in zip(self.inv.steps, SAMPLE_INV_DICT["steps"]): - self.assertIsInstance(step, wrappers.InvocationStep) - self.assertIs(step.parent, self.inv) - self.assertEqual(step.id, step_dict["id"]) - self.assertEqual(step.job_id, step_dict["job_id"]) - self.assertEqual(step.order_index, step_dict["order_index"]) - self.assertEqual(step.state, step_dict["state"]) - self.assertEqual(step.update_time, step_dict["update_time"]) - self.assertEqual(step.workflow_step_id, step_dict["workflow_step_id"]) - self.assertEqual(step.workflow_step_label, step_dict["workflow_step_label"]) - self.assertEqual(step.workflow_step_uuid, step_dict["workflow_step_uuid"]) + assert isinstance(step_dict, dict) + assert isinstance(step, wrappers.InvocationStep) + assert step.parent is self.inv + assert step.id == step_dict["id"] + assert step.job_id == step_dict["job_id"] + assert step.order_index == step_dict["order_index"] + assert step.state == step_dict["state"] + assert step.update_time == step_dict["update_time"] + assert step.workflow_step_id == step_dict["workflow_step_id"] + assert step.workflow_step_label == step_dict["workflow_step_label"] + assert step.workflow_step_uuid == step_dict["workflow_step_uuid"] def test_initialize_inputs(self): for i, input in enumerate(self.inv.inputs): - self.assertEqual(input, {**SAMPLE_INV_DICT["inputs"][str(i)], "label": str(i)}) + assert input == {**SAMPLE_INV_DICT["inputs"][str(i)], "label": str(i)} def test_sorted_step_ids(self): - self.assertListEqual(self.inv.sorted_step_ids(), ["d413a19dec13d11e", "2f94e8ae9edff68a"]) + assert self.inv.sorted_step_ids() == ["d413a19dec13d11e", "2f94e8ae9edff68a"] def test_step_states(self): - self.assertSetEqual(self.inv.step_states(), {None, "new"}) + assert self.inv.step_states() == {None, "new"} def test_number_of_steps(self): - self.assertEqual(self.inv.number_of_steps(), 2) + assert self.inv.number_of_steps() == 2 def test_sorted_steps_by(self): - self.assertEqual(len(self.inv.sorted_steps_by()), 2) + assert len(self.inv.sorted_steps_by()) == 2 steps = self.inv.sorted_steps_by(step_ids={"2f94e8ae9edff68a"}) - self.assertEqual(len(steps), 1) - self.assertEqual(steps[0].id, "2f94e8ae9edff68a") - self.assertListEqual(self.inv.sorted_steps_by(step_ids={"unmatched_id"}), []) + assert len(steps) == 1 + assert steps[0].id == "2f94e8ae9edff68a" + assert self.inv.sorted_steps_by(step_ids={"unmatched_id"}) == [] steps = self.inv.sorted_steps_by(states={"new"}) - self.assertEqual(len(steps), 1) - self.assertEqual(steps[0].state, "new") - self.assertListEqual(self.inv.sorted_steps_by(states={"unmatched_state"}), []) + assert len(steps) == 1 + assert steps[0].state == "new" + assert self.inv.sorted_steps_by(states={"unmatched_state"}) == [] steps = self.inv.sorted_steps_by(indices={0}, states={None, "new"}) - self.assertEqual(len(steps), 1) - self.assertEqual(steps[0].order_index, 0) - self.assertListEqual(self.inv.sorted_steps_by(indices={2}), []) + assert len(steps) == 1 + assert steps[0].order_index == 0 + assert self.inv.sorted_steps_by(indices={2}) == [] def test_cancel(self): inv = self._obj_invoke_workflow() inv.cancel() - self.assertEqual(inv.state, "cancelled") + assert inv.state == "cancelled" def test_wait(self): inv = self._obj_invoke_workflow() inv.wait() - self.assertEqual(inv.state, "scheduled") + assert inv.state == "scheduled" def test_refresh(self): inv = self._obj_invoke_workflow() @@ -345,7 +368,7 @@ # use wait_for_invocation() directly, because inv.wait() will update inv automatically self.gi.gi.invocations.wait_for_invocation(inv.id) inv.refresh() - self.assertEqual(inv.state, "scheduled") + assert inv.state == "scheduled" def test_run_step_actions(self): inv = self.workflow_pause.invoke( @@ -353,27 +376,27 @@ history=self.history, ) for _ in range(20): - with self.assertRaises(bioblend.TimeoutException): + with pytest.raises(bioblend.TimeoutException): inv.wait(maxwait=0.5, interval=0.5) inv.refresh() if len(inv.steps) >= 3: break - self.assertEqual(inv.steps[2].action, None) + assert inv.steps[2].action is None inv.run_step_actions([inv.steps[2]], [True]) - self.assertEqual(inv.steps[2].action, True) + assert inv.steps[2].action is True def test_summary(self): inv = self._obj_invoke_workflow() inv.wait() summary = inv.summary() - self.assertEqual(summary["populated_state"], "ok") + assert summary["populated_state"] == "ok" def test_step_jobs_summary(self): inv = self._obj_invoke_workflow() inv.wait() step_jobs_summary = inv.step_jobs_summary() - self.assertEqual(len(step_jobs_summary), 1) - self.assertEqual(step_jobs_summary[0]["populated_state"], "ok") + assert len(step_jobs_summary) == 1 + assert step_jobs_summary[0]["populated_state"] == "ok" def test_report(self): inv = self._obj_invoke_workflow() @@ -385,9 +408,9 @@ inv = self._obj_invoke_workflow() inv.wait() biocompute_object = inv.biocompute_object() - self.assertEqual(len(biocompute_object["description_domain"]["pipeline_steps"]), 1) + assert len(biocompute_object["description_domain"]["pipeline_steps"]) == 1 - def _obj_invoke_workflow(self): + def _obj_invoke_workflow(self) -> wrappers.Invocation: return self.workflow.invoke( inputs={"Input 1": self.dataset, "Input 2": self.dataset}, history=self.history, @@ -397,8 +420,12 @@ @test_util.skip_unless_galaxy("release_19.09") class TestObjInvocationClient(GalaxyObjectsTestBase): + history: wrappers.History + inv: wrappers.Invocation + workflow: wrappers.Workflow + @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: super().setUpClass() with open(SAMPLE_FN) as f: cls.workflow = cls.gi.workflows.import_new(f.read()) @@ -417,36 +444,36 @@ def test_get(self): inv = self.gi.invocations.get(self.inv.id) - self.assertEqual(inv.id, self.inv.id) - self.assertEqual(inv.workflow_id, self.workflow.id) - self.assertEqual(inv.history_id, self.history.id) - self.assertEqual(inv.state, "scheduled") - self.assertEqual(inv.update_time, self.inv.update_time) - self.assertEqual(inv.uuid, self.inv.uuid) + assert inv.id == self.inv.id + assert inv.workflow_id == self.workflow.id + assert inv.history_id == self.history.id + assert inv.state == "scheduled" + assert inv.update_time == self.inv.update_time + assert inv.uuid == self.inv.uuid def test_get_previews(self): previews = self.gi.invocations.get_previews() - self.assertSetEqual({type(preview) for preview in previews}, {wrappers.InvocationPreview}) + assert {type(preview) for preview in previews} == {wrappers.InvocationPreview} inv_preview = next(p for p in previews if p.id == self.inv.id) - self.assertEqual(inv_preview.id, self.inv.id) - self.assertEqual(inv_preview.workflow_id, self.workflow.id) - self.assertEqual(inv_preview.history_id, self.history.id) - self.assertEqual(inv_preview.state, "scheduled") - self.assertEqual(inv_preview.update_time, self.inv.update_time) - self.assertEqual(inv_preview.uuid, self.inv.uuid) + assert inv_preview.id == self.inv.id + assert inv_preview.workflow_id == self.workflow.id + assert inv_preview.history_id == self.history.id + assert inv_preview.state == "scheduled" + assert inv_preview.update_time == self.inv.update_time + assert inv_preview.uuid == self.inv.uuid def test_list(self): invs = self.gi.invocations.list() inv = next(i for i in invs if i.id == self.inv.id) - self.assertEqual(inv.id, self.inv.id) - self.assertEqual(inv.workflow_id, self.workflow.id) - self.assertEqual(inv.history_id, self.history.id) - self.assertEqual(inv.state, "scheduled") - self.assertEqual(inv.update_time, self.inv.update_time) - self.assertEqual(inv.uuid, self.inv.uuid) - self.assertGreater(len(self.inv.steps), 0) + assert inv.id == self.inv.id + assert inv.workflow_id == self.workflow.id + assert inv.history_id == self.history.id + assert inv.state == "scheduled" + assert inv.update_time == self.inv.update_time + assert inv.uuid == self.inv.uuid + assert len(self.inv.steps) > 0 history = self.gi.histories.create(name="TestGalaxyObjInvocationClientList") - self.assertEqual(self.gi.invocations.list(history=history), []) + assert self.gi.invocations.list(history=history) == [] history.delete(purge=True) @@ -455,15 +482,15 @@ name = f"test_{uuid.uuid4().hex}" description, synopsis = "D", "S" lib = self.gi.libraries.create(name, description=description, synopsis=synopsis) - self.assertEqual(lib.name, name) - self.assertEqual(lib.description, description) - self.assertEqual(lib.synopsis, synopsis) - self.assertEqual(len(lib.content_infos), 1) # root folder - self.assertEqual(len(lib.folder_ids), 1) - self.assertEqual(len(lib.dataset_ids), 0) - self.assertIn(lib.id, [_.id for _ in self.gi.libraries.list()]) + assert lib.name == name + assert lib.description == description + assert lib.synopsis == synopsis + assert len(lib.content_infos) == 1 # root folder + assert len(lib.folder_ids) == 1 + assert len(lib.dataset_ids) == 0 + assert lib.id in [_.id for _ in self.gi.libraries.list()] lib.delete() - self.assertFalse(lib.is_mapped) + assert not lib.is_mapped def test_workflow_from_str(self): with open(SAMPLE_FN) as f: @@ -500,8 +527,8 @@ for k in "tool_inputs", "tool_version": wf_dict["steps"][id_][k] = None wf = wrappers.Workflow(wf_dict, gi=self.gi) - self.assertFalse(wf.is_runnable) - with self.assertRaises(RuntimeError): + assert not wf.is_runnable + with pytest.raises(RuntimeError): wf.invoke() wf.delete() @@ -509,31 +536,30 @@ with open(SAMPLE_FN) as f: wf1 = self.gi.workflows.import_new(f.read()) wf2 = self.gi.workflows.import_new(wf1.export()) - self.assertNotEqual(wf1.id, wf2.id) + assert wf1.id != wf2.id for wf in wf1, wf2: self._check_and_del_workflow(wf) - def _check_and_del_workflow(self, wf, check_is_public=False): + def _check_and_del_workflow(self, wf: wrappers.Workflow, check_is_public: bool = False) -> None: # Galaxy appends additional text to imported workflow names - self.assertTrue(wf.name.startswith("paste_columns")) - self.assertEqual(type(wf.owner), str) - self.assertEqual(len(wf.steps), 3) + assert wf.name.startswith("paste_columns") + assert len(wf.steps) == 3 for step_id, step in wf.steps.items(): - self.assertIsInstance(step, wrappers.Step) - self.assertEqual(step_id, step.id) - self.assertIsInstance(step.tool_inputs, dict) + assert isinstance(step, wrappers.Step) + assert step_id == step.id + assert isinstance(step.tool_inputs, dict) if step.type == "tool": - self.assertIsNotNone(step.tool_id) - self.assertIsNotNone(step.tool_version) - self.assertIsInstance(step.input_steps, dict) + assert step.tool_id is not None + assert step.tool_version is not None + assert isinstance(step.input_steps, dict) elif step.type in ("data_collection_input", "data_input"): - self.assertIsNone(step.tool_id) - self.assertIsNone(step.tool_version) - self.assertEqual(step.input_steps, {}) + assert step.tool_id is None + assert step.tool_version is None + assert step.input_steps == {} wf_ids = {_.id for _ in self.gi.workflows.list()} - self.assertIn(wf.id, wf_ids) + assert wf.id in wf_ids if check_is_public: - self.assertTrue(wf.published) + assert wf.published wf.delete() # not very accurate: @@ -545,7 +571,7 @@ if pub_only_ids: wf_id = pub_only_ids.pop() imported = self.gi.workflows.import_shared(wf_id) - self.assertIsInstance(imported, wrappers.Workflow) + assert isinstance(imported, wrappers.Workflow) imported.delete() else: self.skipTest("no published workflows, manually publish a workflow to run this test") @@ -559,9 +585,11 @@ def test_get_workflows(self): self._test_multi_get("workflows") - def _normalized_functions(self, obj_type): + def _normalized_functions( + self, obj_type: Literal["histories", "libraries", "workflows"] + ) -> Tuple[Callable, Dict[str, Any]]: if obj_type == "libraries": - create = self.gi.libraries.create + create: Callable = self.gi.libraries.create del_kwargs = {} elif obj_type == "histories": create = self.gi.histories.create @@ -577,31 +605,31 @@ del_kwargs = {} return create, del_kwargs - def _test_multi_get(self, obj_type): + def _test_multi_get(self, obj_type: Literal["histories", "libraries", "workflows"]) -> None: obj_gi_client = getattr(self.gi, obj_type) create, del_kwargs = self._normalized_functions(obj_type) - def ids(seq): + def ids(seq: Iterable[wrappers.Wrapper]) -> Set[str]: return {_.id for _ in seq} names = [f"test_{uuid.uuid4().hex}" for _ in range(2)] objs = [] try: objs = [create(_) for _ in names] - self.assertLessEqual(ids(objs), ids(obj_gi_client.list())) + assert ids(objs) <= ids(obj_gi_client.list()) if obj_type != "workflows": filtered = obj_gi_client.list(name=names[0]) - self.assertEqual(len(filtered), 1) - self.assertEqual(filtered[0].id, objs[0].id) + assert len(filtered) == 1 + assert filtered[0].id == objs[0].id del_id = objs[-1].id objs.pop().delete(**del_kwargs) - self.assertIn(del_id, ids(obj_gi_client.get_previews(deleted=True))) + assert del_id in ids(obj_gi_client.get_previews(deleted=True)) else: # Galaxy appends info strings to imported workflow names prev = obj_gi_client.get_previews()[0] filtered = obj_gi_client.list(name=prev.name) - self.assertEqual(len(filtered), 1) - self.assertEqual(filtered[0].id, prev.id) + assert len(filtered) == 1 + assert filtered[0].id == prev.id finally: for o in objs: o.delete(**del_kwargs) @@ -618,27 +646,27 @@ self._test_delete_by_name("workflows") self._test_delete_by_ambiguous_name("workflows") - def _test_delete_by_name(self, obj_type): + def _test_delete_by_name(self, obj_type: Literal["histories", "libraries", "workflows"]) -> None: obj_gi_client = getattr(self.gi, obj_type) create, del_kwargs = self._normalized_functions(obj_type) name = f"test_{uuid.uuid4().hex}" create(name) prevs = [_ for _ in obj_gi_client.get_previews(name=name) if not _.deleted] - self.assertEqual(len(prevs), 1) + assert len(prevs) == 1 del_kwargs["name"] = name obj_gi_client.delete(**del_kwargs) prevs = [_ for _ in obj_gi_client.get_previews(name=name) if not _.deleted] - self.assertEqual(len(prevs), 0) + assert len(prevs) == 0 - def _test_delete_by_ambiguous_name(self, obj_type): + def _test_delete_by_ambiguous_name(self, obj_type: Literal["histories", "libraries", "workflows"]) -> None: obj_gi_client = getattr(self.gi, obj_type) create, del_kwargs = self._normalized_functions(obj_type) name = f"test_{uuid.uuid4().hex}" objs = [create(name) for _ in range(2)] prevs = [_ for _ in obj_gi_client.get_previews(name=name) if not _.deleted] - self.assertEqual(len(prevs), len(objs)) + assert len(prevs) == len(objs) del_kwargs["name"] = name - with self.assertRaises(ValueError): + with pytest.raises(ValueError): obj_gi_client.delete(**del_kwargs) # Cleanup del del_kwargs["name"] @@ -660,33 +688,34 @@ def test_root_folder(self): r = self.lib.root_folder - self.assertIsNone(r.parent) + assert r.parent is None def test_folder(self): name = f"test_{uuid.uuid4().hex}" desc = "D" folder = self.lib.create_folder(name, description=desc) - self.assertEqual(folder.name, name) - self.assertEqual(folder.description, desc) - self.assertIs(folder.container, self.lib) - self.assertEqual(folder.parent.id, self.lib.root_folder.id) - self.assertEqual(len(self.lib.content_infos), 2) - self.assertEqual(len(self.lib.folder_ids), 2) - self.assertIn(folder.id, self.lib.folder_ids) + assert folder.name == name + assert folder.description == desc + assert folder.container is self.lib + assert folder.parent is not None + assert folder.parent.id == self.lib.root_folder.id + assert len(self.lib.content_infos) == 2 + assert len(self.lib.folder_ids) == 2 + assert folder.id in self.lib.folder_ids retrieved = self.lib.get_folder(folder.id) - self.assertEqual(folder.id, retrieved.id) + assert folder.id == retrieved.id - def _check_datasets(self, dss): - self.assertEqual(len(dss), len(self.lib.dataset_ids)) - self.assertEqual({_.id for _ in dss}, set(self.lib.dataset_ids)) + def _check_datasets(self, dss: Collection[wrappers.LibraryDataset]) -> None: + assert len(dss) == len(self.lib.dataset_ids) + assert {_.id for _ in dss} == set(self.lib.dataset_ids) for ds in dss: - self.assertIs(ds.container, self.lib) + assert ds.container is self.lib def test_dataset(self): folder = self.lib.create_folder(f"test_{uuid.uuid4().hex}") ds = self.lib.upload_data(FOO_DATA, folder=folder) - self.assertEqual(len(self.lib.content_infos), 3) - self.assertEqual(len(self.lib.folder_ids), 2) + assert len(self.lib.content_infos) == 3 + assert len(self.lib.folder_ids) == 2 self._check_datasets([ds]) def test_dataset_from_url(self): @@ -709,7 +738,7 @@ self._check_datasets(dss) dss, fnames = upload_from_fs(self.lib, bnames, link_data_only="link_to_files") for ds, fn in zip(dss, fnames): - self.assertEqual(ds.file_name, fn) + assert ds.file_name == fn def test_copy_from_dataset(self): hist = self.gi.histories.create(f"test_{uuid.uuid4().hex}") @@ -723,18 +752,18 @@ def test_get_dataset(self): ds = self.lib.upload_data(FOO_DATA) retrieved = self.lib.get_dataset(ds.id) - self.assertEqual(ds.id, retrieved.id) + assert ds.id == retrieved.id def test_get_datasets(self): bnames = [f"f{i}.txt" for i in range(2)] dss, _ = upload_from_fs(self.lib, bnames) retrieved = self.lib.get_datasets() - self.assertEqual(len(dss), len(retrieved)) - self.assertEqual({_.id for _ in dss}, {_.id for _ in retrieved}) + assert len(dss) == len(retrieved) + assert {_.id for _ in dss} == {_.id for _ in retrieved} name = f"/{bnames[0]}" selected = self.lib.get_datasets(name=name) - self.assertEqual(len(selected), 1) - self.assertEqual(selected[0].name, bnames[0]) + assert len(selected) == 1 + assert selected[0].name == bnames[0] class TestLDContents(GalaxyObjectsTestBase): @@ -749,36 +778,36 @@ def test_dataset_get_stream(self): for idx, c in enumerate(self.ds.get_stream(chunk_size=1)): - self.assertEqual(FOO_DATA[idx].encode(), c) + assert FOO_DATA[idx].encode() == c def test_dataset_peek(self): fetched_data = self.ds.peek(chunk_size=4) - self.assertEqual(FOO_DATA[0:4].encode(), fetched_data) + assert FOO_DATA[0:4].encode() == fetched_data def test_dataset_download(self): with tempfile.TemporaryFile() as f: self.ds.download(f) f.seek(0) - self.assertEqual(FOO_DATA.encode(), f.read()) + assert FOO_DATA.encode() == f.read() def test_dataset_get_contents(self): - self.assertEqual(FOO_DATA.encode(), self.ds.get_contents()) + assert FOO_DATA.encode() == self.ds.get_contents() def test_dataset_delete(self): self.ds.delete() # Cannot test this yet because the 'deleted' attribute is not exported # by the API at the moment - # self.assertTrue(self.ds.deleted) + # assert self.ds.deleted def test_dataset_update(self): new_name = f"test_{uuid.uuid4().hex}" new_misc_info = f"Annotation for {new_name}" new_genome_build = "hg19" updated_ldda = self.ds.update(name=new_name, misc_info=new_misc_info, genome_build=new_genome_build) - self.assertEqual(self.ds.id, updated_ldda.id) - self.assertEqual(self.ds.name, new_name) - self.assertEqual(self.ds.misc_info, new_misc_info) - self.assertEqual(self.ds.genome_build, new_genome_build) + assert self.ds.id == updated_ldda.id + assert self.ds.name == new_name + assert self.ds.misc_info == new_misc_info + assert self.ds.genome_build == new_genome_build class TestHistory(GalaxyObjectsTestBase): @@ -792,24 +821,24 @@ def test_create_delete(self): name = f"test_{uuid.uuid4().hex}" hist = self.gi.histories.create(name) - self.assertEqual(hist.name, name) + assert hist.name == name hist_id = hist.id - self.assertIn(hist_id, [_.id for _ in self.gi.histories.list()]) + assert hist_id in [_.id for _ in self.gi.histories.list()] hist.delete(purge=True) - self.assertFalse(hist.is_mapped) + assert not hist.is_mapped h = self.gi.histories.get(hist_id) - self.assertTrue(h.deleted) + assert h.deleted - def _check_dataset(self, hda): - self.assertIsInstance(hda, wrappers.HistoryDatasetAssociation) - self.assertIs(hda.container, self.hist) - self.assertEqual(len(self.hist.dataset_ids), 1) - self.assertEqual(self.hist.dataset_ids[0], hda.id) + def _check_dataset(self, hda: wrappers.HistoryDatasetAssociation) -> None: + assert isinstance(hda, wrappers.HistoryDatasetAssociation) + assert hda.container is self.hist + assert len(self.hist.dataset_ids) == 1 + assert self.hist.dataset_ids[0] == hda.id def test_import_dataset(self): lib = self.gi.libraries.create(f"test_{uuid.uuid4().hex}") lds = lib.upload_data(FOO_DATA) - self.assertEqual(len(self.hist.dataset_ids), 0) + assert len(self.hist.dataset_ids) == 0 hda = self.hist.import_dataset(lds) lib.delete() self._check_dataset(hda) @@ -828,7 +857,7 @@ def test_get_dataset(self): hda = self.hist.paste_content(FOO_DATA) retrieved = self.hist.get_dataset(hda.id) - self.assertEqual(hda.id, retrieved.id) + assert hda.id == retrieved.id def test_get_datasets(self): bnames = [f"f{i}.txt" for i in range(2)] @@ -837,21 +866,21 @@ hdas = [self.hist.import_dataset(_) for _ in lds] lib.delete() retrieved = self.hist.get_datasets() - self.assertEqual(len(hdas), len(retrieved)) - self.assertEqual({_.id for _ in hdas}, {_.id for _ in retrieved}) + assert len(hdas) == len(retrieved) + assert {_.id for _ in hdas} == {_.id for _ in retrieved} selected = self.hist.get_datasets(name=bnames[0]) - self.assertEqual(len(selected), 1) - self.assertEqual(selected[0].name, bnames[0]) + assert len(selected) == 1 + assert selected[0].name == bnames[0] def test_export_and_download(self): jeha_id = self.hist.export(wait=True, maxwait=60) - self.assertTrue(jeha_id) + assert jeha_id tempdir = tempfile.mkdtemp(prefix="bioblend_test_") temp_fn = os.path.join(tempdir, "export.tar.gz") try: with open(temp_fn, "wb") as fo: self.hist.download(jeha_id, fo) - self.assertTrue(tarfile.is_tarfile(temp_fn)) + assert tarfile.is_tarfile(temp_fn) finally: shutil.rmtree(tempdir) @@ -860,31 +889,31 @@ new_annotation = f"Annotation for {new_name}" new_tags = ["tag1", "tag2"] updated_hist = self.hist.update(name=new_name, annotation=new_annotation, tags=new_tags) - self.assertEqual(self.hist.id, updated_hist.id) - self.assertEqual(self.hist.name, new_name) - self.assertEqual(self.hist.annotation, new_annotation) - self.assertEqual(self.hist.tags, new_tags) + assert self.hist.id == updated_hist.id + assert self.hist.name == new_name + assert self.hist.annotation == new_annotation + assert self.hist.tags == new_tags updated_hist = self.hist.update(published=True) - self.assertEqual(self.hist.id, updated_hist.id) - self.assertTrue(self.hist.published) + assert self.hist.id == updated_hist.id + assert self.hist.published def test_create_dataset_collection(self): self._create_collection_description() hdca = self.hist.create_dataset_collection(self.collection_description) - self.assertIsInstance(hdca, wrappers.HistoryDatasetCollectionAssociation) - self.assertEqual(hdca.collection_type, "list") - self.assertIs(hdca.container, self.hist) - self.assertEqual(len(hdca.elements), 2) - self.assertEqual(self.dataset1.id, hdca.elements[0]["object"]["id"]) - self.assertEqual(self.dataset2.id, hdca.elements[1]["object"]["id"]) + assert isinstance(hdca, wrappers.HistoryDatasetCollectionAssociation) + assert hdca.collection_type == "list" + assert hdca.container is self.hist + assert len(hdca.elements) == 2 + assert self.dataset1.id == hdca.elements[0]["object"]["id"] + assert self.dataset2.id == hdca.elements[1]["object"]["id"] def test_delete_dataset_collection(self): self._create_collection_description() hdca = self.hist.create_dataset_collection(self.collection_description) hdca.delete() - self.assertTrue(hdca.deleted) + assert hdca.deleted - def _create_collection_description(self): + def _create_collection_description(self) -> None: self.dataset1 = self.hist.paste_content(FOO_DATA) self.dataset2 = self.hist.paste_content(FOO_DATA_2) self.collection_description = dataset_collections.CollectionDescription( @@ -907,40 +936,40 @@ def test_dataset_get_stream(self): for idx, c in enumerate(self.ds.get_stream(chunk_size=1)): - self.assertEqual(FOO_DATA[idx].encode(), c) + assert FOO_DATA[idx].encode() == c def test_dataset_peek(self): fetched_data = self.ds.peek(chunk_size=4) - self.assertEqual(FOO_DATA[0:4].encode(), fetched_data) + assert FOO_DATA[0:4].encode() == fetched_data def test_dataset_download(self): with tempfile.TemporaryFile() as f: self.ds.download(f) f.seek(0) - self.assertEqual(FOO_DATA.encode(), f.read()) + assert FOO_DATA.encode() == f.read() def test_dataset_get_contents(self): - self.assertEqual(FOO_DATA.encode(), self.ds.get_contents()) + assert FOO_DATA.encode() == self.ds.get_contents() def test_dataset_update(self): new_name = f"test_{uuid.uuid4().hex}" new_annotation = f"Annotation for {new_name}" new_genome_build = "hg19" updated_hda = self.ds.update(name=new_name, annotation=new_annotation, genome_build=new_genome_build) - self.assertEqual(self.ds.id, updated_hda.id) - self.assertEqual(self.ds.name, new_name) - self.assertEqual(self.ds.annotation, new_annotation) - self.assertEqual(self.ds.genome_build, new_genome_build) + assert self.ds.id == updated_hda.id + assert self.ds.name == new_name + assert self.ds.annotation == new_annotation + assert self.ds.genome_build == new_genome_build def test_dataset_delete(self): self.ds.delete() - self.assertTrue(self.ds.deleted) - self.assertFalse(self.ds.purged) + assert self.ds.deleted + assert not self.ds.purged def test_dataset_purge(self): self.ds.delete(purge=True) - self.assertTrue(self.ds.deleted) - self.assertTrue(self.ds.purged) + assert self.ds.deleted + assert self.ds.purged @test_util.skip_unless_galaxy("release_19.09") @@ -957,13 +986,13 @@ self.wf.delete() self.lib.delete() - def _test(self, existing_hist=False, params=False): + def _test(self, existing_hist: bool = False, pass_params: bool = False) -> None: hist_name = f"test_{uuid.uuid4().hex}" if existing_hist: - hist = self.gi.histories.create(hist_name) + hist: Union[str, wrappers.History] = self.gi.histories.create(hist_name) else: hist = hist_name - if params: + if pass_params: params = {"Paste1": {"delimiter": "U"}} sep = "_" # 'U' maps to '_' in the paste tool else: @@ -976,23 +1005,23 @@ inv.wait() last_step = inv.sorted_steps_by()[-1] out_ds = last_step.get_outputs()["out_file1"] - self.assertEqual(out_ds.container.id, out_hist.id) + assert out_ds.container.id == out_hist.id res = out_ds.get_contents() exp_rows = zip(*(_.splitlines() for _ in self.contents)) exp_res = ("\n".join(sep.join(t) for t in exp_rows) + "\n").encode() - self.assertEqual(res, exp_res) - if existing_hist: - self.assertEqual(out_hist.id, hist.id) + assert res == exp_res + if isinstance(hist, wrappers.History): # i.e. existing_hist == True + assert out_hist.id == hist.id out_hist.delete(purge=True) - def test_existing_history(self): + def test_existing_history(self) -> None: self._test(existing_hist=True) - def test_new_history(self): + def test_new_history(self) -> None: self._test(existing_hist=False) - def test_params(self): - self._test(params=True) + def test_params(self) -> None: + self._test(pass_params=True) @test_util.skip_unless_galaxy("release_19.09") @@ -1018,17 +1047,17 @@ ], ) dataset_collection = self.hist.create_dataset_collection(collection_description) - self.assertEqual(len(self.hist.content_infos), 3) + assert len(self.hist.content_infos) == 3 input_map = {"0": dataset_collection, "1": dataset1} inv = self.wf.invoke(input_map, history=self.hist) inv.wait() self.hist.refresh() - self.assertEqual(len(self.hist.content_infos), 6) + assert len(self.hist.content_infos) == 6 last_step = inv.sorted_steps_by()[-1] out_hdca = last_step.get_output_collections()["out_file1"] - self.assertEqual(out_hdca.collection_type, "list") - self.assertEqual(len(out_hdca.elements), 2) - self.assertEqual(out_hdca.container.id, self.hist.id) + assert out_hdca.collection_type == "list" + assert len(out_hdca.elements) == 2 + assert out_hdca.container.id == self.hist.id class TestJob(GalaxyObjectsTestBase): @@ -1036,36 +1065,9 @@ job_prevs = self.gi.jobs.get_previews() if len(job_prevs) > 0: job_prev = job_prevs[0] - self.assertIsInstance(job_prev, wrappers.JobPreview) + assert isinstance(job_prev, wrappers.JobPreview) job = self.gi.jobs.get(job_prev.id) - self.assertIsInstance(job, wrappers.Job) - self.assertEqual(job.id, job_prev.id) + assert isinstance(job, wrappers.Job) + assert job.id == job_prev.id for job in self.gi.jobs.list(): - self.assertIsInstance(job, wrappers.Job) - - -def suite(): - loader = unittest.TestLoader() - s = unittest.TestSuite() - s.addTests( - [ - loader.loadTestsFromTestCase(c) - for c in ( - TestWrapper, - TestWorkflow, - TestGalaxyInstance, - TestLibrary, - TestLDContents, - TestHistory, - TestHDAContents, - TestRunWorkflow, - ) - ] - ) - return s - - -if __name__ == "__main__": - tests = suite() - RUNNER = unittest.TextTestRunner(verbosity=2) - RUNNER.run(tests) + assert isinstance(job, wrappers.Job) diff -Nru python-bioblend-0.18.0/tests/TestGalaxyQuotas.py python-bioblend-1.0.0/tests/TestGalaxyQuotas.py --- python-bioblend-0.18.0/tests/TestGalaxyQuotas.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyQuotas.py 2022-10-13 18:17:37.000000000 +0000 @@ -18,14 +18,14 @@ def test_create_quota(self): quota = self.gi.quotas.show_quota(self.quota["id"]) - self.assertEqual(quota["name"], self.quota_name) - self.assertEqual(quota["bytes"], 107374182400) - self.assertEqual(quota["operation"], "=") - self.assertEqual(quota["description"], "testing") + assert quota["name"] == self.quota_name + assert quota["bytes"] == 107374182400 + assert quota["operation"] == "=" + assert quota["description"] == "testing" def test_get_quotas(self): quotas = self.gi.quotas.get_quotas() - self.assertIn(self.quota["id"], [quota["id"] for quota in quotas]) + assert self.quota["id"] in [quota["id"] for quota in quotas] def test_update_quota(self): response = self.gi.quotas.update_quota( @@ -36,17 +36,17 @@ operation="-", amount=".01 TB", ) - self.assertIn(f"""Quota '{self.quota_name}' has been renamed to '{self.quota_name}-new'""", response) + assert f"""Quota '{self.quota_name}' has been renamed to '{self.quota_name}-new'""" in response quota = self.gi.quotas.show_quota(self.quota["id"]) - self.assertEqual(quota["name"], self.quota_name + "-new") - self.assertEqual(quota["bytes"], 10995116277) - self.assertEqual(quota["operation"], "-") - self.assertEqual(quota["description"], "asdf") + assert quota["name"] == self.quota_name + "-new" + assert quota["bytes"] == 10995116277 + assert quota["operation"] == "-" + assert quota["description"] == "asdf" def test_delete_undelete_quota(self): self.gi.quotas.update_quota(self.quota["id"], default="no") response = self.gi.quotas.delete_quota(self.quota["id"]) - self.assertEqual(response, "Deleted 1 quotas: " + self.quota_name) + assert response == "Deleted 1 quotas: " + self.quota_name response = self.gi.quotas.undelete_quota(self.quota["id"]) - self.assertEqual(response, "Undeleted 1 quotas: " + self.quota_name) + assert response == "Undeleted 1 quotas: " + self.quota_name diff -Nru python-bioblend-0.18.0/tests/TestGalaxyRoles.py python-bioblend-1.0.0/tests/TestGalaxyRoles.py --- python-bioblend-0.18.0/tests/TestGalaxyRoles.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyRoles.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,7 +1,3 @@ -""" -Tests the functionality of the Blend CloudMan API. These tests require working -credentials to supported cloud infrastructure. -""" import uuid from . import GalaxyTestBase @@ -21,10 +17,10 @@ def test_get_roles(self): roles = self.gi.roles.get_roles() for role in roles: - self.assertIsNotNone(role["id"]) - self.assertIsNotNone(role["name"]) + assert role["id"] is not None + assert role["name"] is not None def test_create_role(self): - self.assertEqual(self.role["name"], self.name) - self.assertEqual(self.role["description"], self.description) - self.assertIsNotNone(self.role["id"]) + assert self.role["name"] == self.name + assert self.role["description"] == self.description + assert self.role["id"] is not None diff -Nru python-bioblend-0.18.0/tests/TestGalaxyToolData.py python-bioblend-1.0.0/tests/TestGalaxyToolData.py --- python-bioblend-0.18.0/tests/TestGalaxyToolData.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyToolData.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,7 +1,3 @@ -""" -Tests the functionality of the Blend CloudMan API. These tests require working -credentials to supported cloud infrastructure. -""" from . import GalaxyTestBase @@ -9,11 +5,11 @@ def test_get_data_tables(self): tables = self.gi.tool_data.get_data_tables() for table in tables: - self.assertIsNotNone(table["name"]) + assert table["name"] is not None def test_show_data_table(self): tables = self.gi.tool_data.get_data_tables() table = self.gi.tool_data.show_data_table(tables[0]["name"]) - self.assertIsNotNone(table["columns"]) - self.assertIsNotNone(table["fields"]) - self.assertIsNotNone(table["name"]) + assert table["columns"] is not None + assert table["fields"] is not None + assert table["name"] is not None diff -Nru python-bioblend-0.18.0/tests/TestGalaxyToolDependencies.py python-bioblend-1.0.0/tests/TestGalaxyToolDependencies.py --- python-bioblend-0.18.0/tests/TestGalaxyToolDependencies.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyToolDependencies.py 2022-10-13 18:17:37.000000000 +0000 @@ -11,20 +11,20 @@ @test_util.skip_unless_galaxy("release_20.01") def test_summarize_toolbox(self): toolbox_summary = self.gi.tool_dependencies.summarize_toolbox() - self.assertTrue(isinstance(toolbox_summary, list)) - self.assertGreater(len(toolbox_summary), 0) + assert isinstance(toolbox_summary, list) + assert len(toolbox_summary) > 0 toolbox_summary_by_tool = self.gi.tool_dependencies.summarize_toolbox(index_by="tools") - self.assertTrue(isinstance(toolbox_summary_by_tool, list)) - self.assertGreater(len(toolbox_summary_by_tool), 0) - self.assertTrue(isinstance(toolbox_summary_by_tool[0], dict)) - self.assertTrue("tool_ids" in toolbox_summary_by_tool[0]) - self.assertTrue(isinstance(toolbox_summary_by_tool[0]["tool_ids"], list)) + assert isinstance(toolbox_summary_by_tool, list) + assert len(toolbox_summary_by_tool) > 0 + assert isinstance(toolbox_summary_by_tool[0], dict) + assert "tool_ids" in toolbox_summary_by_tool[0] + assert isinstance(toolbox_summary_by_tool[0]["tool_ids"], list) tool_id = toolbox_summary_by_tool[0]["tool_ids"][0] toolbox_summary_select_tool_ids = self.gi.tool_dependencies.summarize_toolbox( index_by="tools", tool_ids=[tool_id] ) - self.assertTrue(isinstance(toolbox_summary_select_tool_ids, list)) - self.assertEqual(len(toolbox_summary_select_tool_ids), 1) - self.assertEqual(toolbox_summary_select_tool_ids[0]["tool_ids"][0], tool_id) + assert isinstance(toolbox_summary_select_tool_ids, list) + assert len(toolbox_summary_select_tool_ids) == 1 + assert toolbox_summary_select_tool_ids[0]["tool_ids"][0] == tool_id diff -Nru python-bioblend-0.18.0/tests/TestGalaxyTools.py python-bioblend-1.0.0/tests/TestGalaxyTools.py --- python-bioblend-0.18.0/tests/TestGalaxyTools.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyTools.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,6 +1,10 @@ """ """ import os +from typing import ( + Any, + Dict, +) from bioblend.galaxy.tools.inputs import ( conditional, @@ -18,37 +22,37 @@ def test_get_tools(self): # Test requires target Galaxy is configured with at least one tool. tools = self.gi.tools.get_tools() - self.assertGreater(len(tools), 0) - self.assertTrue(all(map(self._assert_is_tool_rep, tools))) + assert len(tools) > 0 + assert all(map(self._assert_is_tool_rep, tools)) def test_get_tool_panel(self): # Test requires target Galaxy is configured with at least one tool # section. tool_panel = self.gi.tools.get_tool_panel() sections = [s for s in tool_panel if "elems" in s] - self.assertGreater(len(sections), 0) - self.assertTrue(all(map(self._assert_is_tool_rep, sections[0]["elems"]))) + assert len(sections) > 0 + assert all(map(self._assert_is_tool_rep, sections[0]["elems"])) def _assert_is_tool_rep(self, data): - self.assertTrue(data["model_class"].endswith("Tool")) + assert data["model_class"].endswith("Tool") # Special tools like SetMetadataTool may have different model_class # than Tool - but they all seem to end in tool. for key in ["name", "id", "version"]: - self.assertIn(key, data) + assert key in data return True def test_paste_content(self): history = self.gi.histories.create_history(name="test_paste_data history") paste_text = "line 1\nline 2\rline 3\r\nline 4" tool_output = self.gi.tools.paste_content(paste_text, history["id"]) - self.assertEqual(len(tool_output["outputs"]), 1) + assert len(tool_output["outputs"]) == 1 # All lines in the resulting dataset should end with "\n" expected_contents = ("\n".join(paste_text.splitlines()) + "\n").encode() self._wait_and_verify_dataset(tool_output["outputs"][0]["id"], expected_contents) # Same with space_to_tab=True tool_output = self.gi.tools.paste_content(paste_text, history["id"], space_to_tab=True) - self.assertEqual(len(tool_output["outputs"]), 1) + assert len(tool_output["outputs"]) == 1 expected_contents = ("\n".join("\t".join(_.split()) for _ in paste_text.splitlines()) + "\n").encode() self._wait_and_verify_dataset(tool_output["outputs"][0]["id"], expected_contents) @@ -95,7 +99,7 @@ .set("seed_source", conditional().set("seed_source_selector", "set_seed").set("seed", "asdf")) ) tool_output = self.gi.tools.run_tool(history_id=history_id, tool_id="random_lines1", tool_inputs=tool_inputs) - self.assertEqual(len(tool_output["outputs"]), 1) + assert len(tool_output["outputs"]) == 1 # TODO: Wait for results and verify has 1 line and is # chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 + @@ -116,7 +120,7 @@ ) ) tool_output = self.gi.tools.run_tool(history_id=history_id, tool_id="cat1", tool_inputs=tool_inputs) - self.assertEqual(len(tool_output["outputs"]), 1) + assert len(tool_output["outputs"]) == 1 # TODO: Wait for results and verify it has 3 lines - 1 2 3, 4 5 6, # and 7 8 9. @@ -124,51 +128,45 @@ @test_util.skip_unless_tool("CONVERTER_fasta_to_bowtie_color_index") def test_tool_dependency_install(self): installed_dependencies = self.gi.tools.install_dependencies("CONVERTER_fasta_to_bowtie_color_index") - self.assertTrue( - any( - True - for d in installed_dependencies - if d.get("name") == "bowtie" and d.get("dependency_type") == "conda" - ), - f"installed_dependencies is {installed_dependencies}", - ) + assert any( + True for d in installed_dependencies if d.get("name") == "bowtie" and d.get("dependency_type") == "conda" + ), f"installed_dependencies is {installed_dependencies}" status = self.gi.tools.uninstall_dependencies("CONVERTER_fasta_to_bowtie_color_index") - self.assertEqual(status[0]["model_class"], "NullDependency", status) + assert status[0]["model_class"] == "NullDependency", status @test_util.skip_unless_tool("CONVERTER_fasta_to_bowtie_color_index") def test_tool_requirements(self): tool_requirements = self.gi.tools.requirements("CONVERTER_fasta_to_bowtie_color_index") - self.assertTrue( - any( - True - for tr in tool_requirements - if {"dependency_type", "version"} <= set(tr.keys()) and tr.get("name") == "bowtie" - ), - f"tool_requirements is {tool_requirements}", - ) + assert any( + True + for tr in tool_requirements + if {"dependency_type", "version"} <= set(tr.keys()) and tr.get("name") == "bowtie" + ), f"tool_requirements is {tool_requirements}" @test_util.skip_unless_tool("CONVERTER_fasta_to_bowtie_color_index") def test_reload(self): response = self.gi.tools.reload("CONVERTER_fasta_to_bowtie_color_index") - self.assertIsInstance(response, dict) - self.assertIn("message", response) - self.assertIn("id", response["message"]) + assert isinstance(response, dict) + assert "message" in response + assert "id" in response["message"] @test_util.skip_unless_tool("sra_source") def test_get_citations(self): citations = self.gi.tools.get_citations("sra_source") - self.assertEqual(len(citations), 2) + assert len(citations) == 2 - def _wait_for_and_verify_upload(self, tool_output, file_name, fn, expected_dbkey="?"): - self.assertEqual(len(tool_output["outputs"]), 1) + def _wait_for_and_verify_upload( + self, tool_output: Dict[str, Any], file_name: str, fn: str, expected_dbkey: str = "?" + ) -> None: + assert len(tool_output["outputs"]) == 1 output = tool_output["outputs"][0] - self.assertEqual(output["name"], file_name) + assert output["name"] == file_name expected_contents = open(fn, "rb").read() self._wait_and_verify_dataset(output["id"], expected_contents) - self.assertEqual(output["genome_build"], expected_dbkey) + assert output["genome_build"] == expected_dbkey @test_util.skip_unless_tool("random_lines1") def test_get_tool_model(self): history_id = self.gi.histories.create_history(name="test_run_random_lines history")["id"] tool_model = self.gi.tools.build(tool_id="random_lines1", history_id=history_id) - self.assertEqual(len(tool_model["inputs"]), 3) + assert len(tool_model["inputs"]) == 3 diff -Nru python-bioblend-0.18.0/tests/TestGalaxyUsers.py python-bioblend-1.0.0/tests/TestGalaxyUsers.py --- python-bioblend-0.18.0/tests/TestGalaxyUsers.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyUsers.py 2022-10-13 18:17:37.000000000 +0000 @@ -1,28 +1,27 @@ -""" -Tests the functionality of the Blend CloudMan API. These tests require working -credentials to supported cloud infrastructure. -""" import bioblend.galaxy -from . import GalaxyTestBase +from . import ( + GalaxyTestBase, + test_util, +) class TestGalaxyUsers(GalaxyTestBase.GalaxyTestBase): def test_get_users(self): users = self.gi.users.get_users() for user in users: - self.assertIsNotNone(user["id"]) - self.assertIsNotNone(user["email"]) + assert user["id"] is not None + assert user["email"] is not None def test_show_user(self): current_user = self.gi.users.get_current_user() user = self.gi.users.show_user(current_user["id"]) - self.assertEqual(user["id"], current_user["id"]) - self.assertEqual(user["username"], current_user["username"]) - self.assertEqual(user["email"], current_user["email"]) + assert user["id"] == current_user["id"] + assert user["username"] == current_user["username"] + assert user["email"] == current_user["email"] # The 2 following tests randomly fail - # self.assertEqual(user['nice_total_disk_usage'], current_user['nice_total_disk_usage']) - # self.assertEqual(user['total_disk_usage'], current_user['total_disk_usage']) + # assert user["nice_total_disk_usage"] == current_user["nice_total_disk_usage"] + # assert user["total_disk_usage"] == current_user["total_disk_usage"] def test_create_remote_user(self): # WARNING: only admins can create users! @@ -32,11 +31,11 @@ self.skipTest("This Galaxy instance is not configured to use remote users") new_user_email = "newuser@example.org" user = self.gi.users.create_remote_user(new_user_email) - self.assertEqual(user["email"], new_user_email) + assert user["email"] == new_user_email if self.gi.config.get_config()["allow_user_deletion"]: deleted_user = self.gi.users.delete_user(user["id"]) - self.assertEqual(deleted_user["email"], new_user_email) - self.assertTrue(deleted_user["deleted"]) + assert deleted_user["email"] == new_user_email + assert deleted_user["deleted"] def test_create_local_user(self): # WARNING: only admins can create users! @@ -48,24 +47,24 @@ username = "newuser" password = "secret" user = self.gi.users.create_local_user(username, new_user_email, password) - self.assertEqual(user["username"], username) - self.assertEqual(user["email"], new_user_email) + assert user["username"] == username + assert user["email"] == new_user_email # test a BioBlend GalaxyInstance can be created using username+password user_gi = bioblend.galaxy.GalaxyInstance(url=self.gi.base_url, email=new_user_email, password=password) - self.assertEqual(user_gi.users.get_current_user()["email"], new_user_email) + assert user_gi.users.get_current_user()["email"] == new_user_email # test deletion if self.gi.config.get_config()["allow_user_deletion"]: deleted_user = self.gi.users.delete_user(user["id"]) - self.assertEqual(deleted_user["email"], new_user_email) - self.assertTrue(deleted_user["deleted"]) + assert deleted_user["email"] == new_user_email + assert deleted_user["deleted"] def test_get_current_user(self): user = self.gi.users.get_current_user() - self.assertIsNotNone(user["id"]) - self.assertIsNotNone(user["username"]) - self.assertIsNotNone(user["email"]) - self.assertIsNotNone(user["nice_total_disk_usage"]) - self.assertIsNotNone(user["total_disk_usage"]) + assert user["id"] is not None + assert user["username"] is not None + assert user["email"] is not None + assert user["nice_total_disk_usage"] is not None + assert user["total_disk_usage"] is not None def test_update_user(self): # WARNING: only admins can create users! @@ -75,16 +74,25 @@ self.skipTest("This Galaxy instance is not configured to use local users") new_user_email = "newuser2@example.org" user = self.gi.users.create_local_user("newuser2", new_user_email, "secret") - self.assertEqual(user["username"], "newuser2") - self.assertEqual(user["email"], new_user_email) + assert user["username"] == "newuser2" + assert user["email"] == new_user_email updated_user_email = "updateduser@example.org" updated_username = "updateduser" user_id = user["id"] self.gi.users.update_user(user_id, username=updated_username, email=updated_user_email) user = self.gi.users.show_user(user_id) - self.assertEqual(user["username"], updated_username) - self.assertEqual(user["email"], updated_user_email) + assert user["username"] == updated_username + assert user["email"] == updated_user_email if self.gi.config.get_config()["allow_user_deletion"]: self.gi.users.delete_user(user["id"]) + + def test_get_user_apikey(self): + user_id = self.gi.users.get_current_user()["id"] + assert self.gi.users.get_user_apikey(user_id) + + @test_util.skip_unless_galaxy("release_21.01") + def test_get_or_create_user_apikey(self): + user_id = self.gi.users.get_current_user()["id"] + assert self.gi.users.get_or_create_user_apikey(user_id) diff -Nru python-bioblend-0.18.0/tests/TestGalaxyWorkflows.py python-bioblend-1.0.0/tests/TestGalaxyWorkflows.py --- python-bioblend-0.18.0/tests/TestGalaxyWorkflows.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestGalaxyWorkflows.py 2022-10-13 18:17:37.000000000 +0000 @@ -3,6 +3,13 @@ import shutil import tempfile import time +from typing import ( + Any, + Dict, + List, +) + +import pytest from bioblend import ConnectionError from . import ( @@ -21,10 +28,10 @@ history_id = self.gi.histories.create_history(name="TestWorkflowState")["id"] invocations = self.gi.workflows.get_invocations(workflow_id) - self.assertEqual(len(invocations), 0) + assert len(invocations) == 0 # Try invalid invocation (no input) - with self.assertRaises(ConnectionError): + with pytest.raises(ConnectionError): self.gi.workflows.invoke_workflow(workflow["id"]) dataset1_id = self._test_dataset(history_id) @@ -32,13 +39,13 @@ workflow["id"], inputs={"0": {"src": "hda", "id": dataset1_id}}, ) - self.assertEqual(invocation["state"], "new") + assert invocation["state"] == "new" invocation_id = invocation["id"] invocations = self.gi.workflows.get_invocations(workflow_id) - self.assertEqual(len(invocations), 1) - self.assertEqual(invocations[0]["id"], invocation_id) + assert len(invocations) == 1 + assert invocations[0]["id"] == invocation_id - def invocation_steps_by_order_index(): + def invocation_steps_by_order_index() -> Dict[int, Dict[str, Any]]: invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) return {s["order_index"]: s for s in invocation["steps"]} @@ -48,15 +55,13 @@ time.sleep(0.5) invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) - self.assertEqual(invocation["state"], "ready") + assert invocation["state"] == "ready" steps = invocation_steps_by_order_index() pause_step = steps[2] - self.assertIsNone( - self.gi.workflows.show_invocation_step(workflow_id, invocation_id, pause_step["id"])["action"] - ) + assert self.gi.workflows.show_invocation_step(workflow_id, invocation_id, pause_step["id"])["action"] is None self.gi.workflows.run_invocation_step_action(workflow_id, invocation_id, pause_step["id"], action=True) - self.assertTrue(self.gi.workflows.show_invocation_step(workflow_id, invocation_id, pause_step["id"])["action"]) + assert self.gi.workflows.show_invocation_step(workflow_id, invocation_id, pause_step["id"])["action"] for _ in range(20): invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) if invocation["state"] == "scheduled": @@ -65,7 +70,7 @@ time.sleep(0.5) invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) - self.assertEqual(invocation["state"], "scheduled") + assert invocation["state"] == "scheduled" @test_util.skip_unless_galaxy("release_19.01") def test_invoke_workflow_parameters_normalized(self): @@ -73,7 +78,7 @@ workflow_id = self.gi.workflows.import_workflow_from_local_path(path)["id"] history_id = self.gi.histories.create_history(name="TestWorkflowInvokeParametersNormalized")["id"] dataset_id = self._test_dataset(history_id) - with self.assertRaises(ConnectionError): + with pytest.raises(ConnectionError): self.gi.workflows.invoke_workflow( workflow_id, inputs={"0": {"src": "hda", "id": dataset_id}}, params={"1": {"1|2": "comma"}} ) @@ -94,7 +99,7 @@ dataset1_id = self._test_dataset(history_id) invocations = self.gi.workflows.get_invocations(workflow_id) - self.assertEqual(len(invocations), 0) + assert len(invocations) == 0 invocation = self.gi.workflows.invoke_workflow( workflow["id"], @@ -102,92 +107,92 @@ ) invocation_id = invocation["id"] invocations = self.gi.workflows.get_invocations(workflow_id) - self.assertEqual(len(invocations), 1) - self.assertEqual(invocations[0]["id"], invocation_id) + assert len(invocations) == 1 + assert invocations[0]["id"] == invocation_id invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) - self.assertIn(invocation["state"], ["new", "ready"]) + assert invocation["state"] in ["new", "ready"] self.gi.workflows.cancel_invocation(workflow_id, invocation_id) invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) - self.assertEqual(invocation["state"], "cancelled") + assert invocation["state"] == "cancelled" def test_import_export_workflow_from_local_path(self): - with self.assertRaises(TypeError): - self.gi.workflows.import_workflow_from_local_path(None) + with pytest.raises(TypeError): + self.gi.workflows.import_workflow_from_local_path(None) # type: ignore[arg-type] path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) imported_wf = self.gi.workflows.import_workflow_from_local_path(path) - self.assertIsInstance(imported_wf, dict) - self.assertEqual(imported_wf["name"], "paste_columns") - self.assertTrue(imported_wf["url"].startswith("/api/workflows/")) - self.assertFalse(imported_wf["deleted"]) - self.assertFalse(imported_wf["published"]) - with self.assertRaises(TypeError): - self.gi.workflows.export_workflow_to_local_path(None, None, None) + assert isinstance(imported_wf, dict) + assert imported_wf["name"] == "paste_columns" + assert imported_wf["url"].startswith("/api/workflows/") + assert not imported_wf["deleted"] + assert not imported_wf["published"] + with pytest.raises(TypeError): + self.gi.workflows.export_workflow_to_local_path(None, None, None) # type: ignore[arg-type] export_dir = tempfile.mkdtemp(prefix="bioblend_test_") try: self.gi.workflows.export_workflow_to_local_path(imported_wf["id"], export_dir) dir_contents = os.listdir(export_dir) - self.assertEqual(len(dir_contents), 1) + assert len(dir_contents) == 1 export_path = os.path.join(export_dir, dir_contents[0]) with open(export_path) as f: exported_wf_dict = json.load(f) finally: shutil.rmtree(export_dir) - self.assertIsInstance(exported_wf_dict, dict) + assert isinstance(exported_wf_dict, dict) def test_import_publish_workflow_from_local_path(self): path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) imported_wf = self.gi.workflows.import_workflow_from_local_path(path, publish=True) - self.assertIsInstance(imported_wf, dict) - self.assertFalse(imported_wf["deleted"]) - self.assertTrue(imported_wf["published"]) + assert isinstance(imported_wf, dict) + assert not imported_wf["deleted"] + assert imported_wf["published"] def test_import_export_workflow_dict(self): path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) with open(path) as f: wf_dict = json.load(f) imported_wf = self.gi.workflows.import_workflow_dict(wf_dict) - self.assertIsInstance(imported_wf, dict) - self.assertEqual(imported_wf["name"], "paste_columns") - self.assertTrue(imported_wf["url"].startswith("/api/workflows/")) - self.assertFalse(imported_wf["deleted"]) - self.assertFalse(imported_wf["published"]) + assert isinstance(imported_wf, dict) + assert imported_wf["name"] == "paste_columns" + assert imported_wf["url"].startswith("/api/workflows/") + assert not imported_wf["deleted"] + assert not imported_wf["published"] exported_wf_dict = self.gi.workflows.export_workflow_dict(imported_wf["id"]) - self.assertIsInstance(exported_wf_dict, dict) + assert isinstance(exported_wf_dict, dict) def test_import_publish_workflow_dict(self): path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) with open(path) as f: wf_dict = json.load(f) imported_wf = self.gi.workflows.import_workflow_dict(wf_dict, publish=True) - self.assertIsInstance(imported_wf, dict) - self.assertFalse(imported_wf["deleted"]) - self.assertTrue(imported_wf["published"]) + assert isinstance(imported_wf, dict) + assert not imported_wf["deleted"] + assert imported_wf["published"] def test_get_workflows(self): path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) workflow = self.gi.workflows.import_workflow_from_local_path(path) all_wfs = self.gi.workflows.get_workflows() - self.assertGreater(len(all_wfs), 0) + assert len(all_wfs) > 0 wfs_with_name = self.gi.workflows.get_workflows(name=workflow["name"]) wf_list = [w for w in wfs_with_name if w["id"] == workflow["id"]] - self.assertEqual(len(wf_list), 1) + assert len(wf_list) == 1 wf_data = wf_list[0] if "create_time" in workflow: # Galaxy >= 20.01 - self.assertEqual(wf_data["create_time"], workflow["create_time"]) + assert wf_data["create_time"] == workflow["create_time"] else: # Galaxy <= 22.01 - self.assertEqual(wf_data["url"], workflow["url"]) + assert wf_data["url"] == workflow["url"] def test_show_workflow(self): path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) wf_data = self.gi.workflows.show_workflow(wf["id"]) - self.assertEqual(wf_data["id"], wf["id"]) - self.assertEqual(wf_data["name"], wf["name"]) - self.assertEqual(wf_data["url"], wf["url"]) - self.assertEqual(len(wf_data["steps"]), 3) - self.assertIsNotNone(wf_data["inputs"]) + assert wf_data["id"] == wf["id"] + assert wf_data["name"] == wf["name"] + assert wf_data["url"] == wf["url"] + assert len(wf_data["steps"]) == 3 + assert wf_data["inputs"] is not None @test_util.skip_unless_galaxy("release_18.05") def test_update_workflow_name(self): @@ -195,17 +200,17 @@ wf = self.gi.workflows.import_workflow_from_local_path(path) new_name = "new name" updated_wf = self.gi.workflows.update_workflow(wf["id"], name=new_name) - self.assertEqual(updated_wf["name"], new_name) + assert updated_wf["name"] == new_name @test_util.skip_unless_galaxy("release_21.01") def test_update_workflow_published(self): path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) - self.assertFalse(wf["published"]) + assert not wf["published"] updated_wf = self.gi.workflows.update_workflow(wf["id"], published=True) - self.assertTrue(updated_wf["published"]) + assert updated_wf["published"] updated_wf = self.gi.workflows.update_workflow(wf["id"], published=False) - self.assertFalse(updated_wf["published"]) + assert not updated_wf["published"] @test_util.skip_unless_galaxy( "release_19.09" @@ -214,18 +219,18 @@ path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) wf_data = self.gi.workflows.show_workflow(wf["id"]) - self.assertEqual(wf_data["version"], 0) + assert wf_data["version"] == 0 new_name = "new name" self.gi.workflows.update_workflow(wf["id"], name=new_name) updated_wf = self.gi.workflows.show_workflow(wf["id"]) - self.assertEqual(updated_wf["name"], new_name) - self.assertEqual(updated_wf["version"], 1) + assert updated_wf["name"] == new_name + assert updated_wf["version"] == 1 updated_wf = self.gi.workflows.show_workflow(wf["id"], version=0) - self.assertEqual(updated_wf["name"], "paste_columns") - self.assertEqual(updated_wf["version"], 0) + assert updated_wf["name"] == "paste_columns" + assert updated_wf["version"] == 0 updated_wf = self.gi.workflows.show_workflow(wf["id"], version=1) - self.assertEqual(updated_wf["name"], new_name) - self.assertEqual(updated_wf["version"], 1) + assert updated_wf["name"] == new_name + assert updated_wf["version"] == 1 @test_util.skip_unless_galaxy("release_19.09") def test_extract_workflow_from_history(self): @@ -257,34 +262,34 @@ dataset_hids=dataset_hids, ) wf2 = self.gi.workflows.show_workflow(wf2["id"]) - self.assertEqual(wf2["name"], new_workflow_name) - self.assertEqual(len(wf1["steps"]), len(wf2["steps"])) + assert wf2["name"] == new_workflow_name + assert len(wf1["steps"]) == len(wf2["steps"]) for i in range(len(wf1["steps"])): - self.assertEqual(wf1["steps"][str(i)]["type"], wf2["steps"][str(i)]["type"]) - self.assertEqual(wf1["steps"][str(i)]["tool_id"], wf2["steps"][str(i)]["tool_id"]) + assert wf1["steps"][str(i)]["type"] == wf2["steps"][str(i)]["type"] + assert wf1["steps"][str(i)]["tool_id"] == wf2["steps"][str(i)]["tool_id"] @test_util.skip_unless_galaxy("release_18.09") def test_show_versions(self): path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) versions = self.gi.workflows.show_versions(wf["id"]) - self.assertEqual(len(versions), 1) + assert len(versions) == 1 version = versions[0] - self.assertEqual(version["version"], 0) - self.assertTrue("update_time" in version) - self.assertTrue("steps" in version) + assert version["version"] == 0 + assert "update_time" in version + assert "steps" in version @test_util.skip_unless_galaxy("release_21.01") def test_refactor_workflow(self): - actions = [ + actions: List[Dict[str, Any]] = [ {"action_type": "add_input", "type": "data", "label": "foo"}, {"action_type": "update_step_label", "label": "bar", "step": {"label": "foo"}}, ] path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) response = self.gi.workflows.refactor_workflow(wf["id"], actions, dry_run=True) - self.assertEqual(len(response["action_executions"]), len(actions)) - self.assertEqual(response["dry_run"], True) + assert len(response["action_executions"]) == len(actions) + assert response["dry_run"] is True updated_steps = response["workflow"]["steps"] - self.assertEqual(len(updated_steps), 4) - self.assertEqual({step["label"] for step in updated_steps.values()}, {"bar", None, "Input 1", "Input 2"}) + assert len(updated_steps) == 4 + assert {step["label"] for step in updated_steps.values()} == {"bar", None, "Input 1", "Input 2"} diff -Nru python-bioblend-0.18.0/tests/TestToolshed.py python-bioblend-1.0.0/tests/TestToolshed.py --- python-bioblend-0.18.0/tests/TestToolshed.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/TestToolshed.py 2022-10-13 18:17:37.000000000 +0000 @@ -9,42 +9,47 @@ @test_util.skip_unless_toolshed() class TestToolshed(unittest.TestCase): def setUp(self): - toolshed_url = os.environ.get("BIOBLEND_TOOLSHED_URL", "https://testtoolshed.g2.bx.psu.edu/") + toolshed_url = os.environ["BIOBLEND_TOOLSHED_URL"] self.ts = bioblend.toolshed.ToolShedInstance(url=toolshed_url) def test_categories_client(self): # get_categories categories = self.ts.categories.get_categories() - self.assertIn("Assembly", [c["name"] for c in categories]) + assert "Assembly" in [c["name"] for c in categories] # we cannot test get_categories with deleted=True as it requires administrator status # show_category visualization_category_id = [c for c in categories if c["name"] == "Visualization"][0]["id"] visualization_category = self.ts.categories.show_category(visualization_category_id) - self.assertEqual(visualization_category["description"], "Tools for visualizing data") + assert visualization_category["description"] == "Tools for visualizing data" # get_repositories repositories = self.ts.categories.get_repositories(visualization_category_id) repositories_reversed = self.ts.categories.get_repositories(visualization_category_id, sort_order="desc") - self.assertEqual(repositories["repositories"][0]["model_class"], "Repository") - self.assertGreater(len(repositories["repositories"]), 200) - self.assertEqual(repositories["repositories"][0], repositories_reversed["repositories"][-1]) + assert repositories["repositories"][0]["model_class"] == "Repository" + assert len(repositories["repositories"]) > 200 + assert repositories["repositories"][0] == repositories_reversed["repositories"][-1] def test_repositories_client(self): # get_repositories repositories = self.ts.repositories.get_repositories() - self.assertGreater(len(repositories), 5000) - self.assertEqual(repositories[0]["model_class"], "Repository") + assert len(repositories) > 5000 + assert repositories[0]["model_class"] == "Repository" + + repositories = self.ts.repositories.get_repositories(name="bam_to_sam", owner="devteam") + assert len(repositories) == 1 + bam_to_sam_repo = repositories[0] + assert bam_to_sam_repo["name"] == "bam_to_sam" + assert bam_to_sam_repo["owner"] == "devteam" # search_repositories samtools_search = self.ts.repositories.search_repositories("samtools", page_size=5) - self.assertGreater(int(samtools_search["total_results"]), 20) - self.assertEqual(len(samtools_search["hits"]), 5) + assert int(samtools_search["total_results"]) > 20 + assert len(samtools_search["hits"]) == 5 # show_repository - bam_to_sam_repo = [r for r in repositories if r["name"] == "bam_to_sam"][0] show_bam_to_sam_repo = self.ts.repositories.show_repository(bam_to_sam_repo["id"]) - self.assertIn("SAM", show_bam_to_sam_repo["long_description"]) + assert "SAM" in show_bam_to_sam_repo["long_description"] # test_create_repository # need to provide an API key to test this @@ -55,19 +60,19 @@ def test_repositories_revisions(self): # get_ordered_installable_revisions bam_to_sam_revisions = self.ts.repositories.get_ordered_installable_revisions("bam_to_sam", "devteam") - self.assertGreaterEqual(len(bam_to_sam_revisions), 4) + assert len(bam_to_sam_revisions) >= 4 # get_repository_revision_install_info bam_to_sam_revision_install_info = self.ts.repositories.get_repository_revision_install_info( "bam_to_sam", "devteam", bam_to_sam_revisions[0] ) - self.assertEqual(len(bam_to_sam_revision_install_info), 3) - self.assertEqual(bam_to_sam_revision_install_info[0].get("model_class"), "Repository") - self.assertEqual(bam_to_sam_revision_install_info[1].get("model_class"), "RepositoryMetadata") - self.assertEqual(bam_to_sam_revision_install_info[2].get("model_class"), None) + assert len(bam_to_sam_revision_install_info) == 3 + assert bam_to_sam_revision_install_info[0].get("model_class") == "Repository" + assert bam_to_sam_revision_install_info[1].get("model_class") == "RepositoryMetadata" + assert bam_to_sam_revision_install_info[2].get("model_class") is None def test_tools_client(self): # search_tools samtools_search = self.ts.tools.search_tools("samtools", page_size=5) - self.assertGreater(int(samtools_search["total_results"]), 2000) - self.assertEqual(len(samtools_search["hits"]), 5) + assert int(samtools_search["total_results"]) > 2000 + assert len(samtools_search["hits"]) == 5 diff -Nru python-bioblend-0.18.0/tests/test_util.py python-bioblend-1.0.0/tests/test_util.py --- python-bioblend-0.18.0/tests/test_util.py 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tests/test_util.py 2022-10-13 18:17:37.000000000 +0000 @@ -4,38 +4,28 @@ import random import string import unittest - -import bioblend - -NO_CLOUDMAN_MESSAGE = "CloudMan required and no CloudMan AMI configured." -NO_GALAXY_MESSAGE = "Externally configured Galaxy required, but not found. Set BIOBLEND_GALAXY_URL and BIOBLEND_GALAXY_API_KEY to run this test." -NO_TOOLSHED_MESSAGE = ( - "Externally configured ToolShed required, but not found. Set BIOBLEND_TOOLSHED_URL to run this test." +from typing import ( + Callable, + Optional, ) -OLD_GALAXY_RELEASE = "Testing on Galaxy %s, but need %s to run this test." -MISSING_TOOL_MESSAGE = "Externally configured Galaxy instance requires tool %s to run test." +import bioblend.galaxy -def skip_unless_cloudman(): - """Decorate tests with this to skip the test if CloudMan is not - configured. - """ - if "BIOBLEND_AMI_ID" not in os.environ: - return unittest.skip(NO_CLOUDMAN_MESSAGE) - else: - return lambda f: f +NO_GALAXY_MESSAGE = "Externally configured Galaxy required, but not found. Set BIOBLEND_GALAXY_URL and BIOBLEND_GALAXY_API_KEY to run this test." -def skip_unless_toolshed(): +def skip_unless_toolshed() -> Callable: """Decorate tests with this to skip the test if a URL for a ToolShed to run the tests is not provided. """ if "BIOBLEND_TOOLSHED_URL" not in os.environ: - return unittest.skip(NO_TOOLSHED_MESSAGE) + return unittest.skip( + "Externally configured ToolShed required, but not found. Set BIOBLEND_TOOLSHED_URL (e.g. to https://testtoolshed.g2.bx.psu.edu/ ) to run this test." + ) return lambda f: f -def skip_unless_galaxy(min_release=None): +def skip_unless_galaxy(min_release: Optional[str] = None) -> Callable: """Decorate tests with this to skip the test if Galaxy is not configured. """ @@ -47,7 +37,7 @@ if not min_release.startswith("release_"): raise Exception("min_release should start with 'release_'") if galaxy_release[8:] < min_release[8:]: - return unittest.skip(OLD_GALAXY_RELEASE % (galaxy_release, min_release)) + return unittest.skip(f"Testing on Galaxy {galaxy_release}, but need {min_release} to run this test.") if "BIOBLEND_GALAXY_URL" not in os.environ: return unittest.skip(NO_GALAXY_MESSAGE) @@ -71,24 +61,24 @@ galaxy_user_id = user["id"] break + config = gi.config.get_config() if galaxy_user_id is None: - try: - config = gi.config.get_config() - except Exception: - # If older Galaxy for instance just assume use_remote_user is False. - config = {} - if config.get("use_remote_user", False): new_user = gi.users.create_remote_user(galaxy_user_email) else: galaxy_user = galaxy_user_email.split("@", 1)[0] galaxy_password = "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(20)) - # Create a new user and get a new API key for her + # Create a new user new_user = gi.users.create_local_user(galaxy_user, galaxy_user_email, galaxy_password) galaxy_user_id = new_user["id"] - api_key = gi.users.create_user_apikey(galaxy_user_id) + if config["version_major"] >= "21.01": + api_key = gi.users.get_or_create_user_apikey(galaxy_user_id) + else: + api_key = gi.users.get_user_apikey(galaxy_user_id) + if not api_key or api_key == "Not available.": + api_key = gi.users.create_user_apikey(galaxy_user_id) os.environ["BIOBLEND_GALAXY_API_KEY"] = api_key if "BIOBLEND_GALAXY_API_KEY" not in os.environ: @@ -97,7 +87,7 @@ return lambda f: f -def skip_unless_tool(tool_id): +def skip_unless_tool(tool_id: str) -> Callable: """Decorate a Galaxy test method as requiring a specific tool, skip the test case if the tool is unavailable. """ @@ -108,11 +98,11 @@ # In panels by default, so flatten out sections... tool_ids = [_["id"] for _ in tools] if tool_id not in tool_ids: - raise unittest.SkipTest(MISSING_TOOL_MESSAGE % tool_id) + raise unittest.SkipTest(f"Externally configured Galaxy instance requires tool {tool_id} to run test.") return method(has_gi, *args, **kwargs) - # Must preserve method name so nose can detect and report tests by + # Must preserve method name so pytest can detect and report tests by # name. wrapped_method.__name__ = method.__name__ return wrapped_method @@ -120,5 +110,5 @@ return method_wrapper -def get_abspath(path): +def get_abspath(path: str) -> str: return os.path.abspath(os.path.join(os.path.dirname(__file__), path)) diff -Nru python-bioblend-0.18.0/tox.ini python-bioblend-1.0.0/tox.ini --- python-bioblend-0.18.0/tox.ini 2022-07-07 19:03:35.000000000 +0000 +++ python-bioblend-1.0.0/tox.ini 2022-10-13 18:17:37.000000000 +0000 @@ -12,8 +12,8 @@ [testenv:lint] commands = flake8 . - lint: black --check --diff . - lint: isort --check --diff . + black --check --diff . + isort --check --diff . mypy bioblend/ deps = black @@ -22,7 +22,5 @@ flake8-sfs isort mypy - types-boto - types-PyYAML types-requests -skip_install = True +skip_install = true