diff -Nru python-saharaclient-0.10.1/debian/changelog python-saharaclient-0.11.0/debian/changelog --- python-saharaclient-0.10.1/debian/changelog 2015-09-07 12:23:54.000000000 +0000 +++ python-saharaclient-0.11.0/debian/changelog 2015-09-28 07:54:32.000000000 +0000 @@ -1,3 +1,17 @@ +python-saharaclient (0.11.0-1~ubuntu15.10.1~ppa201509280854) wily; urgency=medium + + * No-change backport to wily + + -- James Page Mon, 28 Sep 2015 08:54:32 +0100 + +python-saharaclient (0.11.0-1) experimental; urgency=medium + + * New upstream release. + * d/control: Align dependencies and versions with upstream. + * d/control: Update uploaders. + + -- Corey Bryant Thu, 24 Sep 2015 14:36:59 -0400 + python-saharaclient (0.10.1-1) experimental; urgency=medium * New upstream release. diff -Nru python-saharaclient-0.10.1/debian/control python-saharaclient-0.11.0/debian/control --- python-saharaclient-0.10.1/debian/control 2015-09-07 12:23:54.000000000 +0000 +++ python-saharaclient-0.11.0/debian/control 2015-09-26 15:52:05.000000000 +0000 @@ -2,7 +2,8 @@ Section: python Priority: extra Maintainer: PKG OpenStack -Uploaders: Thomas Goirand +Uploaders: Thomas Goirand , + Corey Bryant , Build-Depends: debhelper (>= 9), dh-python, openstack-pkg-tools, @@ -11,6 +12,7 @@ python-setuptools, python-sphinx, Build-Depends-Indep: python-babel, + python-cliff (>= 1.14.0), python-coverage, python-hacking (>= 0.10.0), python-keystoneclient (>= 1:1.6.0), @@ -18,9 +20,10 @@ python-netaddr (>= 0.7.12), python-neutronclient (>= 1:2.6.0), python-novaclient (>= 2:2.26.0), - python-oslo.config (>= 1:1.11.0), + python-openstackclient (>= 1.5.0), + python-oslo.config (>= 1:2.3.0), python-oslo.i18n (>= 1.5.0), - python-oslo.utils (>= 1.9.0), + python-oslo.utils (>= 2.0.0), python-oslosphinx (>= 2.5.0), python-prettytable (>= 0.7), python-requests (>= 2.5.2), @@ -38,10 +41,12 @@ Package: python-saharaclient Architecture: all Depends: python-babel (>= 1.3), + python-cliff (>= 1.14.0), python-keystoneclient (>= 1:1.6.0), python-netaddr (>= 0.7.12), + python-openstackclient (>= 1.5.0), python-oslo.i18n (>= 1.5.0), - python-oslo.utils (>= 1.9.0), + python-oslo.utils (>= 2.0.0), python-pbr (>= 1.6), python-prettytable (>= 0.7), python-requests (>= 2.5.2), diff -Nru python-saharaclient-0.10.1/requirements.txt python-saharaclient-0.11.0/requirements.txt --- python-saharaclient-0.10.1/requirements.txt 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/requirements.txt 2015-08-31 15:36:20.000000000 +0000 @@ -2,13 +2,15 @@ # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. -pbr<2.0,>=1.4 +pbr<2.0,>=1.6 Babel>=1.3 -netaddr>=0.7.12 +cliff>=1.14.0 # Apache-2.0 +netaddr!=0.7.16,>=0.7.12 oslo.i18n>=1.5.0 # Apache-2.0 -oslo.utils>=1.9.0 # Apache-2.0 +oslo.utils>=2.0.0 # Apache-2.0 python-keystoneclient>=1.6.0 +python-openstackclient>=1.5.0 requests>=2.5.2 six>=1.9.0 PrettyTable<0.8,>=0.7 diff -Nru python-saharaclient-0.10.1/saharaclient/api/base.py python-saharaclient-0.11.0/saharaclient/api/base.py --- python-saharaclient-0.10.1/saharaclient/api/base.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/api/base.py 2015-08-31 15:36:20.000000000 +0000 @@ -119,6 +119,24 @@ data = get_json(resp)[response_key] else: data = get_json(resp) + + return self.resource_class(self, data) + + def _patch(self, url, data, response_key=None, dump_json=True): + if dump_json: + kwargs = {'json': data} + else: + kwargs = {'data': data} + + resp = self.api.patch(url, **kwargs) + + if resp.status_code != 202: + self._raise_api_exception(resp) + if response_key is not None: + data = get_json(resp)[response_key] + else: + data = get_json(resp) + return self.resource_class(self, data) def _list(self, url, response_key): @@ -187,7 +205,7 @@ def get_query_string(search_opts): if search_opts: qparams = sorted(search_opts.items(), key=lambda x: x[0]) - query_string = "?%s" % parse.urlencode(qparams) + query_string = "?%s" % parse.urlencode(qparams, doseq=True) else: query_string = "" return query_string diff -Nru python-saharaclient-0.10.1/saharaclient/api/clusters.py python-saharaclient-0.11.0/saharaclient/api/clusters.py --- python-saharaclient-0.10.1/saharaclient/api/clusters.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/api/clusters.py 2015-08-31 15:36:20.000000000 +0000 @@ -30,7 +30,8 @@ is_transient=None, description=None, cluster_configs=None, node_groups=None, user_keypair_id=None, anti_affinity=None, net_id=None, count=None, - use_autoconfig=None, shares=None): + use_autoconfig=None, shares=None, + is_public=None, is_protected=None): data = { 'name': name, @@ -56,7 +57,9 @@ neutron_management_network=net_id, count=count, use_autoconfig=use_autoconfig, - shares=shares) + shares=shares, + is_public=is_public, + is_protected=is_protected) if count: return self._create('/clusters/multiple', data) @@ -79,3 +82,12 @@ def delete(self, cluster_id): self._delete('/clusters/%s' % cluster_id) + + def update(self, cluster_id, name=None, description=None, is_public=None, + is_protected=None): + + data = {} + self._copy_if_defined(data, name=name, description=description, + is_public=is_public, is_protected=is_protected) + + return self._patch('/clusters/%s' % cluster_id, data) diff -Nru python-saharaclient-0.10.1/saharaclient/api/cluster_templates.py python-saharaclient-0.11.0/saharaclient/api/cluster_templates.py --- python-saharaclient-0.10.1/saharaclient/api/cluster_templates.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/api/cluster_templates.py 2015-08-31 15:36:20.000000000 +0000 @@ -26,7 +26,8 @@ def _assign_field(self, name, plugin_name, hadoop_version, description=None, cluster_configs=None, node_groups=None, anti_affinity=None, net_id=None, default_image_id=None, - use_autoconfig=None, shares=None): + use_autoconfig=None, shares=None, is_public=None, + is_protected=None): data = { 'name': name, 'plugin_name': plugin_name, @@ -41,28 +42,33 @@ neutron_management_network=net_id, default_image_id=default_image_id, use_autoconfig=use_autoconfig, - shares=shares) + shares=shares, + is_public=is_public, + is_protected=is_protected) return data def create(self, name, plugin_name, hadoop_version, description=None, cluster_configs=None, node_groups=None, anti_affinity=None, net_id=None, default_image_id=None, use_autoconfig=None, - shares=None): + shares=None, is_public=None, is_protected=None): data = self._assign_field(name, plugin_name, hadoop_version, description, cluster_configs, node_groups, anti_affinity, net_id, default_image_id, - use_autoconfig, shares) + use_autoconfig, shares, is_public, + is_protected) return self._create('/cluster-templates', data, 'cluster_template') def update(self, cluster_template_id, name, plugin_name, hadoop_version, description=None, cluster_configs=None, node_groups=None, anti_affinity=None, net_id=None, default_image_id=None, - use_autoconfig=None, shares=None): + use_autoconfig=None, shares=None, is_public=None, + is_protected=None): data = self._assign_field(name, plugin_name, hadoop_version, description, cluster_configs, node_groups, anti_affinity, net_id, default_image_id, - use_autoconfig, shares) + use_autoconfig, shares, is_public, + is_protected) return self._update('/cluster-templates/%s' % cluster_template_id, data, 'cluster_template') diff -Nru python-saharaclient-0.10.1/saharaclient/api/data_sources.py python-saharaclient-0.11.0/saharaclient/api/data_sources.py --- python-saharaclient-0.10.1/saharaclient/api/data_sources.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/api/data_sources.py 2015-08-31 15:36:20.000000000 +0000 @@ -24,7 +24,8 @@ resource_class = DataSources def create(self, name, description, data_source_type, - url, credential_user=None, credential_pass=None): + url, credential_user=None, credential_pass=None, + is_public=None, is_protected=None): data = { 'name': name, 'description': description, @@ -35,6 +36,10 @@ self._copy_if_defined(data['credentials'], user=credential_user, password=credential_pass) + + self._copy_if_defined(data, is_public=is_public, + is_protected=is_protected) + return self._create('/data-sources', data, 'data_source') def list(self, search_opts=None): diff -Nru python-saharaclient-0.10.1/saharaclient/api/job_binaries.py python-saharaclient-0.11.0/saharaclient/api/job_binaries.py --- python-saharaclient-0.10.1/saharaclient/api/job_binaries.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/api/job_binaries.py 2015-08-31 15:36:20.000000000 +0000 @@ -23,7 +23,8 @@ class JobBinariesManager(base.ResourceManager): resource_class = JobBinaries - def create(self, name, url, description, extra): + def create(self, name, url, description, extra, is_public=None, + is_protected=None): data = { "name": name, "url": url, @@ -31,6 +32,9 @@ "extra": extra } + self._copy_if_defined(data, is_public=is_public, + is_protected=is_protected) + return self._create('/job-binaries', data, 'job_binary') def list(self, search_opts=None): diff -Nru python-saharaclient-0.10.1/saharaclient/api/job_binary_internals.py python-saharaclient-0.11.0/saharaclient/api/job_binary_internals.py --- python-saharaclient-0.10.1/saharaclient/api/job_binary_internals.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/api/job_binary_internals.py 2015-08-31 15:36:20.000000000 +0000 @@ -40,3 +40,12 @@ def delete(self, job_binary_id): self._delete('/job-binary-internals/%s' % job_binary_id) + + def update(self, job_binary_id, name=None, is_public=None, + is_protected=None): + + data = {} + self._copy_if_defined(data, name=name, is_public=is_public, + is_protected=is_protected) + + return self._patch('/job-binary-internals/%s' % job_binary_id, data) diff -Nru python-saharaclient-0.10.1/saharaclient/api/job_executions.py python-saharaclient-0.11.0/saharaclient/api/job_executions.py --- python-saharaclient-0.10.1/saharaclient/api/job_executions.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/api/job_executions.py 2015-08-31 15:36:20.000000000 +0000 @@ -34,7 +34,9 @@ self._delete('/job-executions/%s' % obj_id) def create(self, job_id, cluster_id, input_id, - output_id, configs, interface=None): + output_id, configs, interface=None, is_public=None, + is_protected=None): + url = "/jobs/%s/execute" % job_id data = { "cluster_id": cluster_id, @@ -52,4 +54,14 @@ if value is not None: data.update({key: value}) + self._copy_if_defined(data, is_public=is_public, + is_protected=is_protected) + return self._create(url, data, 'job_execution') + + def update(self, obj_id, is_public=None, is_protected=None): + + data = {} + self._copy_if_defined(data, is_public=is_public, + is_protected=is_protected) + return self._patch('/job-executions/%s' % obj_id, data) diff -Nru python-saharaclient-0.10.1/saharaclient/api/jobs.py python-saharaclient-0.11.0/saharaclient/api/jobs.py --- python-saharaclient-0.10.1/saharaclient/api/jobs.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/api/jobs.py 2015-08-31 15:36:20.000000000 +0000 @@ -49,3 +49,12 @@ def delete(self, job_id): self._delete('/jobs/%s' % job_id) + + def update(self, job_id, name=None, description=None, is_public=None, + is_protected=None): + + data = {} + self._copy_if_defined(data, name=name, description=description, + is_public=is_public, is_protected=is_protected) + + return self._patch('/jobs/%s' % job_id, data) diff -Nru python-saharaclient-0.10.1/saharaclient/api/node_group_templates.py python-saharaclient-0.11.0/saharaclient/api/node_group_templates.py --- python-saharaclient-0.10.1/saharaclient/api/node_group_templates.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/api/node_group_templates.py 2015-08-31 15:36:20.000000000 +0000 @@ -31,7 +31,7 @@ availability_zone=None, volumes_availability_zone=None, volume_type=None, image_id=None, is_proxy_gateway=None, volume_local_to_instance=None, use_autoconfig=None, - shares=None): + shares=None, is_public=None, is_protected=None): data = { 'name': name, @@ -51,7 +51,10 @@ image_id=image_id, is_proxy_gateway=is_proxy_gateway, use_autoconfig=use_autoconfig, - shares=shares) + shares=shares, + is_public=is_public, + is_protected=is_protected + ) if volumes_per_node: data.update({"volumes_per_node": volumes_per_node, @@ -74,7 +77,7 @@ availability_zone=None, volumes_availability_zone=None, volume_type=None, image_id=None, is_proxy_gateway=None, volume_local_to_instance=None, use_autoconfig=None, - shares=None): + shares=None, is_public=None, is_protected=None): data = self._assign_field(name, plugin_name, hadoop_version, flavor_id, description, volumes_per_node, volumes_size, @@ -84,7 +87,7 @@ volumes_availability_zone, volume_type, image_id, is_proxy_gateway, volume_local_to_instance, use_autoconfig, - shares) + shares, is_public, is_protected) return self._create('/node-group-templates', data, 'node_group_template') @@ -97,7 +100,7 @@ volumes_availability_zone=None, volume_type=None, image_id=None, is_proxy_gateway=None, volume_local_to_instance=None, use_autoconfig=None, - shares=None): + shares=None, is_public=None, is_protected=None): data = self._assign_field(name, plugin_name, hadoop_version, flavor_id, description, volumes_per_node, volumes_size, @@ -107,7 +110,7 @@ volumes_availability_zone, volume_type, image_id, is_proxy_gateway, volume_local_to_instance, use_autoconfig, - shares) + shares, is_public, is_protected) return self._update('/node-group-templates/%s' % ng_template_id, data, 'node_group_template') diff -Nru python-saharaclient-0.10.1/saharaclient/api/shell.py python-saharaclient-0.11.0/saharaclient/api/shell.py --- python-saharaclient-0.10.1/saharaclient/api/shell.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/api/shell.py 2015-08-31 15:36:20.000000000 +0000 @@ -320,7 +320,7 @@ @utils.arg('--count', default=1, type=int, - help='Number of clusters to be created.') + help='Number of clusters to create.') def do_cluster_create(cs, args): """Create a cluster.""" # TODO(mattf): improve template validation, e.g. template w/o name key @@ -346,7 +346,7 @@ type=argparse.FileType('r'), help='JSON representation of cluster scale.') def do_cluster_scale(cs, args): - """Scale a cluster """ + """Scale a cluster.""" cluster_id = args.id or _get_by_id_or_name(cs.clusters, name=args.name).id scale_template = json.loads(args.json.read()) _show_cluster(cs.clusters.scale(cluster_id, **scale_template)) @@ -436,11 +436,11 @@ help='Name of the node group template to update.') @utils.arg('--id', metavar='', - help='ID of the node group template to update') + help='ID of the node group template to update.') @utils.arg('--json', default=sys.stdin, type=argparse.FileType('r'), - help='JSON representation of the node group template update') + help='JSON representation of the node group template update.') def do_node_group_template_update(cs, args): """Update a node group template.""" template = _get_by_id_or_name(cs.node_group_templates, @@ -531,7 +531,7 @@ help='Name of the cluster template to update.') @utils.arg('--id', metavar='', - help='Id of the cluster template to update.') + help='ID of the cluster template to update.') @utils.arg('--json', default=sys.stdin, type=argparse.FileType('r'), @@ -766,7 +766,7 @@ help='Name of the job binary to update.') @utils.arg('--id', metavar='', - help='Id of the job binary to update.') + help='ID of the job binary to update.') @utils.arg('--json', default=sys.stdin, type=argparse.FileType('r'), @@ -982,7 +982,7 @@ @utils.arg('--type', metavar='', default=None, - help='Report only on this job type') + help='Report only on this job type.') @utils.arg('--plugin', metavar='', default=None, diff -Nru python-saharaclient-0.10.1/saharaclient/osc/plugin.py python-saharaclient-0.11.0/saharaclient/osc/plugin.py --- python-saharaclient-0.10.1/saharaclient/osc/plugin.py 1970-01-01 00:00:00.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/osc/plugin.py 2015-08-31 15:36:20.000000000 +0000 @@ -0,0 +1,57 @@ +# Copyright (c) 2015 Mirantis Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from openstackclient.common import utils +from oslo_log import log as logging + +LOG = logging.getLogger(__name__) + +DEFAULT_DATA_PROCESSING_API_VERSION = "1.1" +API_VERSION_OPTION = "os_data_processing_api_version" +API_NAME = "data_processing" +API_VERSIONS = { + "1.1": "saharaclient.api.client.Client" +} + + +def make_client(instance): + data_processing_client = utils.get_client_class( + API_NAME, + instance._api_version[API_NAME], + API_VERSIONS) + LOG.debug('Instantiating data-processing client: %s', + data_processing_client) + + client = data_processing_client( + session=instance.session, + region_name=instance._region_name, + cacert=instance._cacert, + insecure=instance._insecure + ) + return client + + +def build_option_parser(parser): + """Hook to add global options.""" + parser.add_argument( + "--os-data-processing-api-version", + metavar="", + default=utils.env( + 'OS_DATA_PROCESSING_API_VERSION', + default=DEFAULT_DATA_PROCESSING_API_VERSION), + help=("Data processing API version, default=" + + DEFAULT_DATA_PROCESSING_API_VERSION + + ' (Env: OS_DATA_PROCESSING_API_VERSION)')) + return parser diff -Nru python-saharaclient-0.10.1/saharaclient/osc/v1/data_sources.py python-saharaclient-0.11.0/saharaclient/osc/v1/data_sources.py --- python-saharaclient-0.10.1/saharaclient/osc/v1/data_sources.py 1970-01-01 00:00:00.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/osc/v1/data_sources.py 2015-08-31 15:36:20.000000000 +0000 @@ -0,0 +1,179 @@ +# Copyright (c) 2015 Mirantis Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from cliff import command +from cliff import lister +from cliff import show +from openstackclient.common import utils as osc_utils +from oslo_log import log as logging + +from saharaclient.osc.v1 import utils + + +class CreateDataSource(show.ShowOne): + """Creates data source""" + + log = logging.getLogger(__name__ + ".CreateDataSource") + + def get_parser(self, prog_name): + parser = super(CreateDataSource, self).get_parser(prog_name) + + parser.add_argument( + 'name', + metavar="", + help="Name of the data source", + ) + parser.add_argument( + '--type', + metavar="", + choices=["swift", "hdfs", "maprfs"], + help="Type of the data source (swift, hdfs or maprfs) [REQUIRED]", + required=True + ) + parser.add_argument( + '--url', + metavar="", + help="Url for the data source [REQUIRED]", + required=True + ) + parser.add_argument( + '--username', + metavar="", + help="Username for accessing the data source url" + ) + parser.add_argument( + '--password', + metavar="", + help="Password for accessing the data source url" + ) + parser.add_argument( + '--description', + metavar="", + help="Description of the data source" + ) + return parser + + def take_action(self, parsed_args): + self.log.debug("take_action(%s)" % parsed_args) + client = self.app.client_manager.data_processing + + description = parsed_args.description or '' + data = client.data_sources.create( + name=parsed_args.name, description=description, + data_source_type=parsed_args.type, url=parsed_args.url, + credential_user=parsed_args.username, + credential_pass=parsed_args.password).to_dict() + + fields = ['name', 'id', 'type', 'url', 'description'] + data = utils.prepare_data(data, fields) + + return self.dict2columns(data) + + +class ListDataSources(lister.Lister): + """Lists data sources""" + + log = logging.getLogger(__name__ + ".ListDataSources") + + def get_parser(self, prog_name): + parser = super(ListDataSources, self).get_parser(prog_name) + parser.add_argument( + '--long', + action='store_true', + default=False, + help='List additional fields in output', + ) + parser.add_argument( + '--type', + metavar="", + choices=["swift", "hdfs", "maprfs"], + help="List data sources of specific type (swift, hdfs or maprfs)" + ) + + return parser + + def take_action(self, parsed_args): + self.log.debug("take_action(%s)" % parsed_args) + client = self.app.client_manager.data_processing + search_opts = {'type': parsed_args.type} if parsed_args.type else {} + + data = client.data_sources.list(search_opts=search_opts) + + if parsed_args.long: + columns = ('name', 'id', 'type', 'url', 'description') + column_headers = [c.capitalize() for c in columns] + + else: + columns = ('name', 'id', 'type') + column_headers = [c.capitalize() for c in columns] + + return ( + column_headers, + (osc_utils.get_item_properties( + s, + columns + ) for s in data) + ) + + +class ShowDataSource(show.ShowOne): + """Display data source details""" + + log = logging.getLogger(__name__ + ".ShowDataSource") + + def get_parser(self, prog_name): + parser = super(ShowDataSource, self).get_parser(prog_name) + parser.add_argument( + "data_source", + metavar="", + help="Name or id of the data source to display", + ) + + return parser + + def take_action(self, parsed_args): + self.log.debug("take_action(%s)" % parsed_args) + client = self.app.client_manager.data_processing + + data = utils.get_resource( + client.data_sources, parsed_args.data_source).to_dict() + + fields = ['name', 'id', 'type', 'url', 'description'] + data = utils.prepare_data(data, fields) + + return self.dict2columns(data) + + +class DeleteDataSource(command.Command): + """Delete data source""" + + log = logging.getLogger(__name__ + ".DeleteDataSource") + + def get_parser(self, prog_name): + parser = super(DeleteDataSource, self).get_parser(prog_name) + parser.add_argument( + "data_source", + metavar="", + help="Name or id of the data source to delete", + ) + + return parser + + def take_action(self, parsed_args): + self.log.debug("take_action(%s)" % parsed_args) + client = self.app.client_manager.data_processing + data_source_id = utils.get_resource( + client.data_sources, parsed_args.data_source).id + client.data_sources.delete(data_source_id) diff -Nru python-saharaclient-0.10.1/saharaclient/osc/v1/plugins.py python-saharaclient-0.11.0/saharaclient/osc/v1/plugins.py --- python-saharaclient-0.10.1/saharaclient/osc/v1/plugins.py 1970-01-01 00:00:00.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/osc/v1/plugins.py 2015-08-31 15:36:20.000000000 +0000 @@ -0,0 +1,135 @@ +# Copyright (c) 2015 Mirantis Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from os import path + +from cliff import command +from cliff import lister +from cliff import show +from openstackclient.common import utils +from oslo_log import log as logging +from oslo_serialization import jsonutils + + +class ListPlugins(lister.Lister): + """Lists plugins""" + + log = logging.getLogger(__name__ + ".ListPlugins") + + def get_parser(self, prog_name): + parser = super(ListPlugins, self).get_parser(prog_name) + parser.add_argument( + '--long', + action='store_true', + default=False, + help='List additional fields in output', + ) + + return parser + + def take_action(self, parsed_args): + self.log.debug("take_action(%s)" % parsed_args) + client = self.app.client_manager.data_processing + data = client.plugins.list() + + if parsed_args.long: + columns = ('name', 'title', 'versions', 'description') + column_headers = [c.capitalize() for c in columns] + + else: + columns = ('name', 'versions') + column_headers = [c.capitalize() for c in columns] + + return ( + column_headers, + (utils.get_item_properties( + s, + columns, + formatters={ + 'versions': utils.format_list + }, + ) for s in data) + ) + + +class ShowPlugin(show.ShowOne): + """Display plugin details""" + + log = logging.getLogger(__name__ + ".ShowPlugin") + + def get_parser(self, prog_name): + parser = super(ShowPlugin, self).get_parser(prog_name) + parser.add_argument( + "plugin", + metavar="", + help="Name of the plugin to display", + ) + + return parser + + def take_action(self, parsed_args): + self.log.debug("take_action(%s)" % parsed_args) + client = self.app.client_manager.data_processing + + data = client.plugins.get(parsed_args.plugin).to_dict() + data['versions'] = utils.format_list(data['versions']) + + return self.dict2columns(data) + + +class GetPluginConfigs(command.Command): + """Get plugin configs""" + + log = logging.getLogger(__name__ + ".GetPluginConfigs") + + def get_parser(self, prog_name): + parser = super(GetPluginConfigs, self).get_parser(prog_name) + parser.add_argument( + "plugin", + metavar="", + help="Name of the plugin to provide config information about", + ) + parser.add_argument( + "version", + metavar="", + help="Version of the plugin to provide config information about", + ) + parser.add_argument( + '--file', + metavar="", + help='Destination file (defaults to plugin name)', + ) + return parser + + def take_action(self, parsed_args): + self.log.debug("take_action(%s)" % parsed_args) + client = self.app.client_manager.data_processing + + if not parsed_args.file: + parsed_args.file = parsed_args.plugin + + data = client.plugins.get_version_details( + parsed_args.plugin, parsed_args.version).to_dict() + + if path.exists(parsed_args.file): + self.log.error('File "%s" already exists. Chose another one with ' + '--file argument.' % parsed_args.file) + else: + with open(parsed_args.file, 'w') as f: + jsonutils.dump(data, f, indent=4) + self.log.info( + '"%(plugin)s" plugin configs was saved in "%(file)s"' + 'file' % {'plugin': parsed_args.plugin, + 'file': parsed_args.file}) diff -Nru python-saharaclient-0.10.1/saharaclient/osc/v1/utils.py python-saharaclient-0.11.0/saharaclient/osc/v1/utils.py --- python-saharaclient-0.10.1/saharaclient/osc/v1/utils.py 1970-01-01 00:00:00.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/osc/v1/utils.py 2015-08-31 15:36:20.000000000 +0000 @@ -0,0 +1,43 @@ +# Copyright (c) 2015 Mirantis Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from openstackclient.common import exceptions +from openstackclient.common import utils + + +def get_resource(manager, name_or_id): + resource = utils.find_resource(manager, name_or_id) + if isinstance(resource, list): + if not resource: + msg = "No %s with a name or ID of '%s' exists." % \ + (manager.resource_class.__name__.lower(), name_or_id) + raise exceptions.CommandError(msg) + if len(resource) > 1: + msg = "More than one %s exists with the name '%s'." % \ + (manager.resource_class.__name__.lower(), name_or_id) + raise exceptions.CommandError(msg) + return resource[0] + + else: + return resource + + +def prepare_data(data, fields): + new_data = {} + for f in fields: + if f in data: + new_data[f.replace('_', ' ').capitalize()] = data[f] + + return new_data diff -Nru python-saharaclient-0.10.1/saharaclient/shell.py python-saharaclient-0.11.0/saharaclient/shell.py --- python-saharaclient-0.10.1/saharaclient/shell.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/shell.py 2015-08-31 15:36:20.000000000 +0000 @@ -306,8 +306,10 @@ metavar='', default=cliutils.env( 'SAHARA_ENDPOINT_TYPE', + 'OS_ENDPOINT_TYPE', default=DEFAULT_ENDPOINT_TYPE), - help='Defaults to env[SAHARA_ENDPOINT_TYPE] or ' + help=('Defaults to env[SAHARA_ENDPOINT_TYPE] or' + ' env[OS_ENDPOINT_TYPE] or ') + DEFAULT_ENDPOINT_TYPE + '.') # NOTE(dtroyer): We can't add --endpoint_type here due to argparse # thinking usage-list --end is ambiguous; but it diff -Nru python-saharaclient-0.10.1/saharaclient/tests/unit/osc/test_plugin.py python-saharaclient-0.11.0/saharaclient/tests/unit/osc/test_plugin.py --- python-saharaclient-0.10.1/saharaclient/tests/unit/osc/test_plugin.py 1970-01-01 00:00:00.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/tests/unit/osc/test_plugin.py 2015-08-31 15:36:20.000000000 +0000 @@ -0,0 +1,38 @@ +# Copyright (c) 2015 Mirantis Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +from saharaclient.osc import plugin +from saharaclient.tests.unit import base + + +class TestDataProcessingPlugin(base.BaseTestCase): + + @mock.patch("saharaclient.api.client.Client") + def test_make_client(self, p_client): + + instance = mock.Mock() + instance._api_version = {"data_processing": '1.1'} + instance.session = 'session' + instance._region_name = 'region_name' + instance._cacert = 'cacert' + instance._insecure = 'insecure' + + plugin.make_client(instance) + p_client.assert_called_with(session='session', + region_name='region_name', + cacert='cacert', + insecure='insecure') diff -Nru python-saharaclient-0.10.1/saharaclient/tests/unit/osc/v1/fakes.py python-saharaclient-0.11.0/saharaclient/tests/unit/osc/v1/fakes.py --- python-saharaclient-0.10.1/saharaclient/tests/unit/osc/v1/fakes.py 1970-01-01 00:00:00.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/tests/unit/osc/v1/fakes.py 2015-08-31 15:36:20.000000000 +0000 @@ -0,0 +1,26 @@ +# Copyright (c) 2015 Mirantis Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import mock +from openstackclient.tests import utils + + +class TestDataProcessing(utils.TestCommand): + + def setUp(self): + super(TestDataProcessing, self).setUp() + + self.app.client_manager.data_processing = mock.Mock() diff -Nru python-saharaclient-0.10.1/saharaclient/tests/unit/osc/v1/test_data_sources.py python-saharaclient-0.11.0/saharaclient/tests/unit/osc/v1/test_data_sources.py --- python-saharaclient-0.10.1/saharaclient/tests/unit/osc/v1/test_data_sources.py 1970-01-01 00:00:00.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/tests/unit/osc/v1/test_data_sources.py 2015-08-31 15:36:20.000000000 +0000 @@ -0,0 +1,201 @@ +# Copyright (c) 2015 Mirantis Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from openstackclient.tests import utils as osc_utils + +from saharaclient.api import data_sources as api_ds +from saharaclient.osc.v1 import data_sources as osc_ds +from saharaclient.tests.unit.osc.v1 import fakes + + +DS_INFO = {'id': 'id', 'name': 'source', 'type': 'swift', + 'url': 'swift://container.sahara/object', + 'description': 'Data Source for tests'} + + +class TestDataSources(fakes.TestDataProcessing): + def setUp(self): + super(TestDataSources, self).setUp() + self.ds_mock = ( + self.app.client_manager.data_processing.data_sources) + self.ds_mock.reset_mock() + + +class TestCreateDataSource(TestDataSources): + def setUp(self): + super(TestCreateDataSource, self).setUp() + self.ds_mock.create.return_value = api_ds.DataSources( + None, DS_INFO) + + # Command to test + self.cmd = osc_ds.CreateDataSource(self.app, None) + + def test_data_sources_create_no_options(self): + arglist = [] + verifylist = [] + + self.assertRaises(osc_utils.ParserException, self.check_parser, + self.cmd, arglist, verifylist) + + def test_data_sources_create_required_options(self): + arglist = ['source', '--type', 'swift', '--url', + 'swift://container.sahara/object'] + verifylist = [('name', 'source'), ('type', 'swift'), + ('url', 'swift://container.sahara/object')] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + columns, data = self.cmd.take_action(parsed_args) + + # Check that data source was created with correct arguments + called_args = {'credential_pass': None, 'credential_user': None, + 'data_source_type': 'swift', 'name': 'source', + 'description': '', + 'url': 'swift://container.sahara/object'} + self.ds_mock.create.assert_called_once_with(**called_args) + + # Check that columns are correct + expected_columns = ('Description', 'Id', 'Name', 'Type', 'Url') + self.assertEqual(expected_columns, columns) + + # Check that data is correct + expected_data = ('Data Source for tests', 'id', 'source', 'swift', + 'swift://container.sahara/object') + self.assertEqual(expected_data, data) + + def test_data_sources_create_all_options(self): + arglist = ['source', '--type', 'swift', '--url', + 'swift://container.sahara/object', '--username', 'user', + '--password', 'pass', '--description', + 'Data Source for tests'] + verifylist = [('name', 'source'), ('type', 'swift'), + ('url', 'swift://container.sahara/object'), + ('username', 'user'), ('password', 'pass'), + ('description', 'Data Source for tests')] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + columns, data = self.cmd.take_action(parsed_args) + + # Check that data source was created with correct arguments + called_args = {'credential_pass': 'pass', 'credential_user': 'user', + 'data_source_type': 'swift', 'name': 'source', + 'description': 'Data Source for tests', + 'url': 'swift://container.sahara/object'} + self.ds_mock.create.assert_called_once_with(**called_args) + + # Check that columns are correct + expected_columns = ('Description', 'Id', 'Name', 'Type', 'Url') + self.assertEqual(expected_columns, columns) + + # Check that data is correct + expected_data = ('Data Source for tests', 'id', 'source', 'swift', + 'swift://container.sahara/object') + self.assertEqual(expected_data, data) + + +class TestListDataSources(TestDataSources): + def setUp(self): + super(TestListDataSources, self).setUp() + self.ds_mock.list.return_value = [api_ds.DataSources( + None, DS_INFO)] + + # Command to test + self.cmd = osc_ds.ListDataSources(self.app, None) + + def test_data_sources_list_no_options(self): + arglist = [] + verifylist = [] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + columns, data = self.cmd.take_action(parsed_args) + + # Check that columns are correct + expected_columns = ['Name', 'Id', 'Type'] + self.assertEqual(expected_columns, columns) + + # Check that data is correct + expected_data = [('source', 'id', 'swift')] + self.assertEqual(expected_data, list(data)) + + def test_data_sources_list_long(self): + arglist = ['--long'] + verifylist = [('long', True)] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + columns, data = self.cmd.take_action(parsed_args) + + # Check that columns are correct + expected_columns = ['Name', 'Id', 'Type', 'Url', 'Description'] + self.assertEqual(expected_columns, columns) + + # Check that data is correct + expected_data = [('source', 'id', 'swift', + 'swift://container.sahara/object', + 'Data Source for tests')] + self.assertEqual(expected_data, list(data)) + + +class TestShowDataSource(TestDataSources): + def setUp(self): + super(TestShowDataSource, self).setUp() + self.ds_mock.get.return_value = api_ds.DataSources( + None, DS_INFO) + + # Command to test + self.cmd = osc_ds.ShowDataSource(self.app, None) + + def test_data_sources_show(self): + arglist = ['source'] + verifylist = [('data_source', 'source')] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + columns, data = self.cmd.take_action(parsed_args) + + # Check that correct arguments was passed + self.ds_mock.get.assert_called_once_with('source') + + # Check that columns are correct + expected_columns = ('Description', 'Id', 'Name', 'Type', 'Url') + self.assertEqual(expected_columns, columns) + + # Check that data is correct + expected_data = ['Data Source for tests', 'id', 'source', 'swift', + 'swift://container.sahara/object'] + self.assertEqual(expected_data, list(data)) + + +class TestDeleteDataSource(TestDataSources): + def setUp(self): + super(TestDeleteDataSource, self).setUp() + self.ds_mock.get.return_value = api_ds.DataSources( + None, DS_INFO) + + # Command to test + self.cmd = osc_ds.DeleteDataSource(self.app, None) + + def test_data_sources_delete(self): + arglist = ['source'] + verifylist = [('data_source', 'source')] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + self.cmd.take_action(parsed_args) + + # Check that correct arguments was passed + self.ds_mock.delete.assert_called_once_with('id') diff -Nru python-saharaclient-0.10.1/saharaclient/tests/unit/osc/v1/test_plugins.py python-saharaclient-0.11.0/saharaclient/tests/unit/osc/v1/test_plugins.py --- python-saharaclient-0.10.1/saharaclient/tests/unit/osc/v1/test_plugins.py 1970-01-01 00:00:00.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/tests/unit/osc/v1/test_plugins.py 2015-08-31 15:36:20.000000000 +0000 @@ -0,0 +1,160 @@ +# Copyright (c) 2015 Mirantis Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +from saharaclient.api import plugins as api_plugins +from saharaclient.osc.v1 import plugins as osc_plugins +from saharaclient.tests.unit.osc.v1 import fakes + + +PLUGIN_INFO = {'name': 'fake', + 'title': 'Fake Plugin', + 'versions': ['0.1', '0.2'], + 'description': 'Plugin for tests'} + + +class TestPlugins(fakes.TestDataProcessing): + def setUp(self): + super(TestPlugins, self).setUp() + self.plugins_mock = self.app.client_manager.data_processing.plugins + self.plugins_mock.reset_mock() + + +class TestListPlugins(TestPlugins): + def setUp(self): + super(TestListPlugins, self).setUp() + self.plugins_mock.list.return_value = [api_plugins.Plugin( + None, PLUGIN_INFO)] + + # Command to test + self.cmd = osc_plugins.ListPlugins(self.app, None) + + def test_plugins_list_no_options(self): + arglist = [] + verifylist = [] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + columns, data = self.cmd.take_action(parsed_args) + + # Check that columns are correct + expected_columns = ['Name', 'Versions'] + self.assertEqual(expected_columns, columns) + + # Check that data is correct + expected_data = [('fake', '0.1, 0.2')] + self.assertEqual(expected_data, list(data)) + + def test_plugins_list_long(self): + arglist = ['--long'] + verifylist = [('long', True)] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + columns, data = self.cmd.take_action(parsed_args) + + # Check that columns are correct + expected_columns = ['Name', 'Title', 'Versions', 'Description'] + self.assertEqual(expected_columns, columns) + + # Check that data is correct + expected_data = [('fake', 'Fake Plugin', '0.1, 0.2', + 'Plugin for tests')] + self.assertEqual(expected_data, list(data)) + + +class TestShowPlugin(TestPlugins): + def setUp(self): + super(TestShowPlugin, self).setUp() + self.plugins_mock.get.return_value = api_plugins.Plugin( + None, PLUGIN_INFO) + + # Command to test + self.cmd = osc_plugins.ShowPlugin(self.app, None) + + def test_plugin_show(self): + arglist = ['fake'] + verifylist = [] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + columns, data = self.cmd.take_action(parsed_args) + + # Check that correct arguments was passed + self.plugins_mock.get.assert_called_once_with('fake') + + # Check that columns are correct + expected_columns = ('description', 'name', 'title', 'versions') + self.assertEqual(expected_columns, columns) + + # Check that data is correct + expected_data = ('Plugin for tests', 'fake', 'Fake Plugin', '0.1, 0.2') + self.assertEqual(expected_data, data) + + +class TestGetPluginConfigs(TestPlugins): + def setUp(self): + super(TestGetPluginConfigs, self).setUp() + self.plugins_mock.get_version_details.return_value = ( + api_plugins.Plugin(None, PLUGIN_INFO)) + + # Command to test + self.cmd = osc_plugins.GetPluginConfigs(self.app, None) + + @mock.patch('oslo_serialization.jsonutils.dump') + def test_get_plugin_configs_default_file(self, p_dump): + m_open = mock.mock_open() + with mock.patch('six.moves.builtins.open', m_open, create=True): + arglist = ['fake', '0.1'] + verifylist = [('plugin', 'fake'), ('version', '0.1')] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + self.cmd.take_action(parsed_args) + + # Check that correct arguments was passed + self.plugins_mock.get_version_details.assert_called_once_with( + 'fake', '0.1') + + args_to_dump = p_dump.call_args[0] + # Check that the right data will be saved + + self.assertEqual(PLUGIN_INFO, args_to_dump[0]) + # Check that data will be saved to the right file + self.assertEqual('fake', m_open.call_args[0][0]) + + @mock.patch('oslo_serialization.jsonutils.dump') + def test_get_plugin_configs_specified_file(self, p_dump): + m_open = mock.mock_open() + with mock.patch('six.moves.builtins.open', m_open): + arglist = ['fake', '0.1', '--file', 'testfile'] + verifylist = [('plugin', 'fake'), ('version', '0.1'), + ('file', 'testfile')] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + self.cmd.take_action(parsed_args) + + # Check that correct arguments was passed + self.plugins_mock.get_version_details.assert_called_once_with( + 'fake', '0.1') + + args_to_dump = p_dump.call_args[0] + # Check that the right data will be saved + + self.assertEqual(PLUGIN_INFO, args_to_dump[0]) + # Check that data will be saved to the right file + self.assertEqual('testfile', m_open.call_args[0][0]) diff -Nru python-saharaclient-0.10.1/saharaclient/tests/unit/osc/v1/test_utils.py python-saharaclient-0.11.0/saharaclient/tests/unit/osc/v1/test_utils.py --- python-saharaclient-0.10.1/saharaclient/tests/unit/osc/v1/test_utils.py 1970-01-01 00:00:00.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/tests/unit/osc/v1/test_utils.py 2015-08-31 15:36:20.000000000 +0000 @@ -0,0 +1,76 @@ +# Copyright (c) 2015 Mirantis Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from openstackclient.common import exceptions + +from saharaclient.osc.v1 import utils +from saharaclient.tests.unit import base + + +class TestUtils(base.BaseTestCase): + def test_prepare_data(self): + data = {'id': '123', 'name_of_res': 'name', 'description': 'descr'} + + fields = ['id', 'name_of_res', 'description'] + expected_data = {'Description': 'descr', 'Id': '123', + 'Name of res': 'name'} + self.assertEqual(expected_data, utils.prepare_data(data, fields)) + + fields = ['id', 'name_of_res'] + expected_data = {'Id': '123', 'Name of res': 'name'} + self.assertEqual(expected_data, utils.prepare_data(data, fields)) + + fields = ['name_of_res'] + expected_data = {'Name of res': 'name'} + self.assertEqual(expected_data, utils.prepare_data(data, fields)) + + def test_get_resource_id(self): + class TestResource(object): + def __init__(self, id): + self.id = id + + class TestManager(object): + + resource_class = TestResource + + def get(self, id): + if id == 'id': + return TestResource('from_id') + else: + raise + + def find(self, name): + if name == 'name': + return [TestResource('from_name')] + if name == 'null': + return [] + if name == 'mult': + return [TestResource('1'), TestResource('2')] + + # check case when resource id is passed + self.assertEqual('from_id', utils.get_resource( + TestManager(), 'id').id) + + # check case when resource name is passed + self.assertEqual('from_name', utils.get_resource( + TestManager(), 'name').id) + + # check that error is raised when resource doesn't exists + self.assertRaises(exceptions.CommandError, utils.get_resource, + TestManager(), 'null') + + # check that error is raised when multiple resources choice + self.assertRaises(exceptions.CommandError, utils.get_resource, + TestManager(), 'mult') diff -Nru python-saharaclient-0.10.1/saharaclient/tests/unit/test_clusters.py python-saharaclient-0.11.0/saharaclient/tests/unit/test_clusters.py --- python-saharaclient-0.10.1/saharaclient/tests/unit/test_clusters.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/tests/unit/test_clusters.py 2015-08-31 15:36:20.000000000 +0000 @@ -148,3 +148,21 @@ self.client.clusters.delete('id') self.assertEqual(url, self.responses.last_request.url) + + def test_clusters_update(self): + url = self.URL + '/clusters/id' + + update_body = { + 'name': 'new_name', + 'description': 'descr' + } + + self.responses.patch(url, status_code=202, json=update_body) + + resp = self.client.clusters.update('id', name='new_name', + description='descr') + + self.assertEqual(url, self.responses.last_request.url) + self.assertIsInstance(resp, cl.Cluster) + self.assertEqual(update_body, + json.loads(self.responses.last_request.body)) diff -Nru python-saharaclient-0.10.1/saharaclient/tests/unit/test_job_binary_internals.py python-saharaclient-0.11.0/saharaclient/tests/unit/test_job_binary_internals.py --- python-saharaclient-0.10.1/saharaclient/tests/unit/test_job_binary_internals.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/tests/unit/test_job_binary_internals.py 2015-08-31 15:36:20.000000000 +0000 @@ -12,6 +12,8 @@ # License for the specific language governing permissions and limitations # under the License. +import json + from saharaclient.api import job_binary_internals as jbi from saharaclient.tests.unit import base @@ -62,3 +64,19 @@ self.client.job_binary_internals.delete('id') self.assertEqual(url, self.responses.last_request.url) + + def test_job_binary_update(self): + url = self.URL + '/job-binary-internals/id' + + update_body = { + 'name': 'new_name' + } + + self.responses.patch(url, status_code=202, json=update_body) + + resp = self.client.job_binary_internals.update('id', name='new_name') + + self.assertEqual(url, self.responses.last_request.url) + self.assertIsInstance(resp, jbi.JobBinaryInternal) + self.assertEqual(update_body, + json.loads(self.responses.last_request.body)) diff -Nru python-saharaclient-0.10.1/saharaclient/tests/unit/test_job_executions.py python-saharaclient-0.11.0/saharaclient/tests/unit/test_job_executions.py --- python-saharaclient-0.10.1/saharaclient/tests/unit/test_job_executions.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/tests/unit/test_job_executions.py 2015-08-31 15:36:20.000000000 +0000 @@ -32,6 +32,11 @@ 'job_configs': {} } + update_json = { + 'is_public': True, + 'is_protected': True, + } + def test_create_job_execution_with_io(self): url = self.URL + '/jobs/job_id/execute' @@ -92,3 +97,13 @@ self.client.job_executions.delete('id') self.assertEqual(url, self.responses.last_request.url) + + def test_job_executions_update(self): + url = self.URL + '/job-executions/id' + self.responses.patch(url, status_code=202, json=self.update_json) + + resp = self.client.job_executions.update("id", **self.update_json) + self.assertEqual(url, self.responses.last_request.url) + self.assertIsInstance(resp, je.JobExecution) + self.assertEqual(self.update_json, + json.loads(self.responses.last_request.body)) diff -Nru python-saharaclient-0.10.1/saharaclient/tests/unit/test_jobs.py python-saharaclient-0.11.0/saharaclient/tests/unit/test_jobs.py --- python-saharaclient-0.10.1/saharaclient/tests/unit/test_jobs.py 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/saharaclient/tests/unit/test_jobs.py 2015-08-31 15:36:20.000000000 +0000 @@ -83,3 +83,21 @@ self.client.jobs.delete('id') self.assertEqual(url, self.responses.last_request.url) + + def test_jobs_update(self): + url = self.URL + '/jobs/id' + + update_body = { + 'name': 'new_name', + 'description': 'description' + } + + self.responses.patch(url, status_code=202, json=update_body) + + resp = self.client.jobs.update('id', name='new_name', + description='description') + + self.assertEqual(url, self.responses.last_request.url) + self.assertIsInstance(resp, jobs.Job) + self.assertEqual(update_body, + json.loads(self.responses.last_request.body)) diff -Nru python-saharaclient-0.10.1/setup.cfg python-saharaclient-0.11.0/setup.cfg --- python-saharaclient-0.10.1/setup.cfg 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/setup.cfg 2015-08-31 15:36:20.000000000 +0000 @@ -25,13 +25,27 @@ setup-hooks = pbr.hooks.setup_hook [files] -packages = +packages = saharaclient [entry_points] console_scripts = sahara = saharaclient.shell:main +openstack.cli.extension = + data_processing = saharaclient.osc.plugin + +openstack.data_processing.v1 = + dataprocessing_plugin_list = saharaclient.osc.v1.plugins:ListPlugins + dataprocessing_plugin_show = saharaclient.osc.v1.plugins:ShowPlugin + dataprocessing_plugin_configs_get = saharaclient.osc.v1.plugins:GetPluginConfigs + + dataprocessing_data_source_create = saharaclient.osc.v1.data_sources:CreateDataSource + dataprocessing_data_source_list = saharaclient.osc.v1.data_sources:ListDataSources + dataprocessing_data_source_show = saharaclient.osc.v1.data_sources:ShowDataSource + dataprocessing_data_source_update = saharaclient.osc.v1.data_sources:UpdateDataSource + dataprocessing_data_source_delete = saharaclient.osc.v1.data_sources:DeleteDataSource + [build_sphinx] all_files = 1 build-dir = doc/build diff -Nru python-saharaclient-0.10.1/test-requirements.txt python-saharaclient-0.11.0/test-requirements.txt --- python-saharaclient-0.10.1/test-requirements.txt 2015-08-19 09:57:38.000000000 +0000 +++ python-saharaclient-0.11.0/test-requirements.txt 2015-08-31 15:36:20.000000000 +0000 @@ -7,7 +7,7 @@ coverage>=3.6 discover mock>=1.2 -oslo.config>=1.11.0 # Apache-2.0 +oslo.config>=2.3.0 # Apache-2.0 oslosphinx>=2.5.0 # Apache-2.0 python-neutronclient<3,>=2.6.0 python-novaclient>=2.26.0