diff -Nru python-pykmip-0.7.0/bin/run_server.py python-pykmip-0.8.0/bin/run_server.py --- python-pykmip-0.7.0/bin/run_server.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/bin/run_server.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2016 The Johns Hopkins University/Applied Physics Laboratory -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from kmip.services.server import server - - -if __name__ == '__main__': - server.main() diff -Nru python-pykmip-0.7.0/bin/run_tests.sh python-pykmip-0.8.0/bin/run_tests.sh --- python-pykmip-0.7.0/bin/run_tests.sh 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/bin/run_tests.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -#!/bin/bash - -set -eu - -tox - diff -Nru python-pykmip-0.7.0/CHANGES.txt python-pykmip-0.8.0/CHANGES.txt --- python-pykmip-0.7.0/CHANGES.txt 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/CHANGES.txt 1970-01-01 00:00:00.000000000 +0000 @@ -1,149 +0,0 @@ -0.7.0 - November 14, 2017 -* Add support for Python 3.6 -* Add support for the InitialDate attribute -* Add server support for the GetAttributeList operation -* Add server support for the Locate operation -* Add client and server support for the MAC operation -* Add client and server support for the Revoke operation -* Add client and server support for the Encrypt operation -* Add client and server support for the Decrypt operation -* Add client and server support for the DeriveKey operation -* Add client and server support for the Sign operation -* Add client and server support for the SignatureVerify operation -* Add client and server support for retrieving wrapped keys -* Add client and server support for storing wrapped keys -* Add KMIP 1.4 enumerations -* Add server config option enabling certificate extension checks -* Add server config option defining set of usable TLS ciphers -* Add server config option setting the server log level -* Update the server to enforce checking object state and usage masks -* Update server Locate support to allow object name filtering -* Remove support for Python 2.6 -* Fix bug with multithreading support with the SQLite backend -* Fix bug with how open() is mocked in the server test suite -* Fix bug with mismapped polymorphic identity for certificate objects -* Fix bug with socket interrupt handling under Python 3.5 -* Fix bug with detached instance errors in the server test suite - -0.6.0 - December 14, 2016 -* Add support for Python 3.5 -* Add support for the State and OperationPolicyName attributes -* Add server support for the Activate and GetAttributes operations -* Add server support for certificate-based client authentication -* Add server support for object access control via operation policies -* Add server support for loading of user-defined operation policies -* Add client support for the GetAttributes operation -* Update clients to support operation policy names with objects -* Update ProxyKmipClient to support names when creating new objects -* Remove coveralls integration -* Fix bug with early server termination on missing request credential -* Fix bug with closing the client while unconnected to a server -* Fix bug with default values overriding server config file settings -* Fix bug with early server termination on bad client certificates -* Fix bug with deprecated usage of the bandit config file -* Fix bug with ProxyKmipClient registering unset object attributes - -0.5.0 - April 14, 2016 -* Add KmipServer server implementation -* Add KmipSession to manage threaded client/server connections -* Add KmipEngine for processing core server application logic -* Add KmipEngine support for CRUD operations for managed objects -* Add SQLAlchemy/SQLite support for KmipEngine data storage -* Add CryptographyEngine component for cryptographic operations -* Add pending deprecation warning for Python 2.6 support -* Add pending deprecation warning for the KMIPServer implementation -* Add support for building Sphinx documentation -* Add support for SQLAlchemy tables to all Pie objects -* Add Python magic methods to Attribute and Name objects -* Add Attribute class unit tests -* Add bin script to run the KmipServer -* Add setup entry points to run the KmipServer -* Update DiscoverVersions demo with optional versions argument -* Update all demo scripts to setup their own logging infrastructure -* Update README with information on the KmipServer implementation -* Remove expired certificate files from the integration test suite -* Remove default package log configuration and configuration file -* Fix bug with Locate payload parsing optional values -* Fix bug with DateTime string tests and move to UTC representation - -0.4.1 - December 2, 2015 -* Add support for the GetAttributeList operation -* Add integration with Travis CI, Codecov/Coveralls, and Bandit -* Add client/server failover support using multiple IP addresses -* Add additional attribute unit tests -* Update implementations of KMIP primitives -* Reorganize server code to prepare for refactoring -* Remove use of exec when handling library version numbers -* Remove broken server script - -0.4.0 - August 14, 2015 -* Add the official Pie API for a simpler KMIP interface -* Add the ProxyKmipClient implementation of the Pie API -* Add key, secret, and opaque objects to the Pie object hierarchy -* Add unit demos for all ProxyKmipClient operations -* Add complete unit and integration test suites for the Pie package -* Add KMIPProxy client support/demos for the Activate and Revoke operations -* Add KMIPProxy client connection timeout support -* Add KMIPProxy integration tests for asymmetric key and secret/opaque objects -* Add improved request error logging for the KMIPServer -* Update README with additional information about the clients and Pie API -* Remove AUTHORS in favor of Git commit history -* Fix bug with dangling file handle when setting __version__ -* Fix bug with dangling socket connection upon client destruction - -0.3.3 - June 25, 2015 -* Add the core ManagedObject class hierarchy for the new Pie API -* Add updated Boolean primitive implementation and test suite -* Add integration tests for symmetric key creation and registration -* Update demo and client logging to log at the INFO level by default -* Update README with improved testing instructions -* Fix bug causing enumerations to be encoded as signed integers -* Fix bug with mismatched EncodingOption tag -* Fix bug with relative path use for version number handling -* Fix bug with Integer primitive breaking on valid long integer values - -0.3.2 - June 11, 2015 -* Add support for registering and retrieving Certificates -* Update unit demos to work with Certificates -* Reorganize test suite into unit and integration test suites -* Remove old demo scripts -* Fix bug with incorrect KeyMaterialStruct tag -* Fix bug causing infinite recursion with object inheritance - -0.3.1 - April 23, 2015 -* Add KMIP profile information to the client -* Add support for registering/retrieving SecretData and Opaque objects -* Update the SecretFactory to build Public/PrivateKeys with user data - -0.3.0 - March 14, 2015 -* Add client support for the DiscoverVersions and Query operations -* Add client support for the CreateKeyPair and ReKeyKeyPair operations -* Add support for registering and retrieving PublicKeys and PrivateKeys -* Add unit demos demonstrating how to use individual KMIP client operations -* Add custom configuration support to the KMIP client -* Add inline documentation for new KMIP objects, attributes and payloads -* Add additional unit test suites for new KMIP objects, attributes and payloads -* Add dependency for the six library to handle Python version support -* Update README with a condensed description and breakdown of the library -* Fix bug with unindexed format strings (impacts Python 2.6) -* Fix missing certificate file issue when installing library from PyPI - -0.2.0 - November 17, 2014 -* Add configuration file support -* Add client support for the Locate operation -* Update README with additional information and reStructuredText format - -0.1.1 - September 12, 2014 -* Fix bug with auto-installing third party dependencies - -0.1.0 - August 28, 2014 -* Add support for Python 3.3 and 3.4 -* Add support for KMIP client/server SSL connections -* Remove all Thrift library dependencies - -0.0.1 - August 12, 2014 -* Initial release -* Add support for Python 2.6 and 2.7 -* Add KMIP client and server -* Add client/server support for Create, Get, Register, and Destroy operations -* Add unit test suite \ No newline at end of file diff -Nru python-pykmip-0.7.0/debian/changelog python-pykmip-0.8.0/debian/changelog --- python-pykmip-0.7.0/debian/changelog 2018-02-25 20:46:12.000000000 +0000 +++ python-pykmip-0.8.0/debian/changelog 2018-06-14 19:20:01.000000000 +0000 @@ -1,3 +1,18 @@ +python-pykmip (0.8.0-0ubuntu1~ubuntu18.10.1~ppa201806141519) cosmic; urgency=medium + + * No-change backport to cosmic + + -- Corey Bryant Thu, 14 Jun 2018 15:20:01 -0400 + +python-pykmip (0.8.0-0ubuntu1) cosmic; urgency=medium + + * d/gbp.conf: Update gbp configuration file. + * d/control: Update Vcs-* links and maintainers. + * New upstream release. + * d/control: Align (Build-)Depends with upstream. + + -- Corey Bryant Thu, 14 Jun 2018 15:09:05 -0400 + python-pykmip (0.7.0-2) unstable; urgency=medium * Uploading to unstable. diff -Nru python-pykmip-0.7.0/debian/control python-pykmip-0.8.0/debian/control --- python-pykmip-0.7.0/debian/control 2018-02-25 20:46:12.000000000 +0000 +++ python-pykmip-0.8.0/debian/control 2018-06-14 19:09:05.000000000 +0000 @@ -1,7 +1,8 @@ Source: python-pykmip Section: python Priority: optional -Maintainer: Debian OpenStack +Maintainer: Ubuntu Developers +XSBC-Original-Maintainer: Debian OpenStack Uploaders: Thomas Goirand , Build-Depends: @@ -20,6 +21,7 @@ python-fixtures, python-mock, python-pytest, + python-requests, python-six, python-sqlalchemy, python-testresources, @@ -30,6 +32,7 @@ python3-fixtures, python3-mock, python3-pytest, + python3-requests, python3-six, python3-sqlalchemy, python3-subunit, @@ -39,8 +42,8 @@ subunit, testrepository, Standards-Version: 4.1.3 -Vcs-Browser: https://salsa.debian.org/openstack-team/python/python-pykmip -Vcs-Git: https://salsa.debian.org/openstack-team/python/python-pykmip.git +Vcs-Browser: https://git.launchpad.net/~ubuntu-server-dev/ubuntu/+source/python-pykmip +Vcs-Git: https://git.launchpad.net/~ubuntu-server-dev/ubuntu/+source/python-pykmip Homepage: https://github.com/OpenKMIP/PyKMIP Package: python-pykmip @@ -50,6 +53,7 @@ Depends: python-cryptography, python-enum34, + python-requests, python-six, python-sqlalchemy, ${misc:Depends}, @@ -77,6 +81,7 @@ python3-kmip, Depends: python3-cryptography, + python3-requests, python3-six, python3-sqlalchemy, ${misc:Depends}, diff -Nru python-pykmip-0.7.0/debian/gbp.conf python-pykmip-0.8.0/debian/gbp.conf --- python-pykmip-0.7.0/debian/gbp.conf 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/debian/gbp.conf 2018-06-14 19:09:05.000000000 +0000 @@ -0,0 +1,7 @@ +[DEFAULT] +debian-branch = master +upstream-tag = %(version)s +pristine-tar = True + +[buildpackage] +export-dir = ../build-area diff -Nru python-pykmip-0.7.0/docs/conf.py python-pykmip-0.8.0/docs/conf.py --- python-pykmip-0.7.0/docs/conf.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/docs/conf.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,359 +0,0 @@ -# -*- coding: utf-8 -*- -# -# PyKMIP documentation build configuration file, created by -# sphinx-quickstart on Mon Jan 25 17:12:29 2016. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.todo', - 'sphinx.ext.coverage', - 'sphinx.ext.mathjax', - 'sphinx.ext.ifconfig', -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'PyKMIP' -copyright = u'2016, JHUAPL' -author = u'JHUAPL' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = u'0.4' -# The full version, including alpha/beta/rc tags. -release = u'0.4.1' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'alabaster' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -#html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = 'PyKMIPdoc' - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', - -# Latex figure (float) alignment -#'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'PyKMIP.tex', u'PyKMIP Documentation', - u'JHUAPL', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'pykmip', u'PyKMIP Documentation', - [author], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'PyKMIP', u'PyKMIP Documentation', - author, 'PyKMIP', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False - - -# -- Options for Epub output ---------------------------------------------- - -# Bibliographic Dublin Core info. -epub_title = project -epub_author = author -epub_publisher = author -epub_copyright = copyright - -# The basename for the epub file. It defaults to the project name. -#epub_basename = project - -# The HTML theme for the epub output. Since the default themes are not -# optimized for small screen space, using the same theme for HTML and epub -# output is usually not wise. This defaults to 'epub', a theme designed to save -# visual space. -#epub_theme = 'epub' - -# The language of the text. It defaults to the language option -# or 'en' if the language is not set. -#epub_language = '' - -# The scheme of the identifier. Typical schemes are ISBN or URL. -#epub_scheme = '' - -# The unique identifier of the text. This can be a ISBN number -# or the project homepage. -#epub_identifier = '' - -# A unique identification for the text. -#epub_uid = '' - -# A tuple containing the cover image and cover page html template filenames. -#epub_cover = () - -# A sequence of (type, uri, title) tuples for the guide element of content.opf. -#epub_guide = () - -# HTML files that should be inserted before the pages created by sphinx. -# The format is a list of tuples containing the path and title. -#epub_pre_files = [] - -# HTML files that should be inserted after the pages created by sphinx. -# The format is a list of tuples containing the path and title. -#epub_post_files = [] - -# A list of files that should not be packed into the epub file. -epub_exclude_files = ['search.html'] - -# The depth of the table of contents in toc.ncx. -#epub_tocdepth = 3 - -# Allow duplicate toc entries. -#epub_tocdup = True - -# Choose between 'default' and 'includehidden'. -#epub_tocscope = 'default' - -# Fix unsupported image types using the Pillow. -#epub_fix_images = False - -# Scale large images. -#epub_max_image_width = 0 - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#epub_show_urls = 'inline' - -# If false, no index is generated. -#epub_use_index = True diff -Nru python-pykmip-0.7.0/docs/index.rst python-pykmip-0.8.0/docs/index.rst --- python-pykmip-0.7.0/docs/index.rst 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/docs/index.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -.. PyKMIP documentation master file, created by - sphinx-quickstart on Mon Jan 25 17:12:29 2016. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to PyKMIP's documentation! -================================== - -Contents: - -.. toctree:: - :maxdepth: 2 - - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - diff -Nru python-pykmip-0.7.0/docs/make.bat python-pykmip-0.8.0/docs/make.bat --- python-pykmip-0.7.0/docs/make.bat 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/docs/make.bat 1970-01-01 00:00:00.000000000 +0000 @@ -1,263 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set BUILDDIR=_build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . -set I18NSPHINXOPTS=%SPHINXOPTS% . -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. xml to make Docutils-native XML files - echo. pseudoxml to make pseudoxml-XML files for display purposes - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - echo. coverage to run coverage check of the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - - -REM Check if sphinx-build is available and fallback to Python version if any -%SPHINXBUILD% 1>NUL 2>NUL -if errorlevel 9009 goto sphinx_python -goto sphinx_ok - -:sphinx_python - -set SPHINXBUILD=python -m sphinx.__init__ -%SPHINXBUILD% 2> nul -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -:sphinx_ok - - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\PyKMIP.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\PyKMIP.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdf" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf - cd %~dp0 - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdfja" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf-ja - cd %~dp0 - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -if "%1" == "coverage" ( - %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage - if errorlevel 1 exit /b 1 - echo. - echo.Testing of coverage in the sources finished, look at the ^ -results in %BUILDDIR%/coverage/python.txt. - goto end -) - -if "%1" == "xml" ( - %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The XML files are in %BUILDDIR%/xml. - goto end -) - -if "%1" == "pseudoxml" ( - %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. - goto end -) - -:end diff -Nru python-pykmip-0.7.0/examples/policy.json python-pykmip-0.8.0/examples/policy.json --- python-pykmip-0.7.0/examples/policy.json 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/examples/policy.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,166 +0,0 @@ -{ - "example": { - "CERTIFICATE": { - "LOCATE": "ALLOW_ALL", - "CHECK": "ALLOW_ALL", - "GET": "ALLOW_ALL", - "GET_ATTRIBUTES": "ALLOW_ALL", - "GET_ATTRIBUTE_LIST": "ALLOW_ALL", - "ADD_ATTRIBUTE": "ALLOW_OWNER", - "MODIFY_ATTRIBUTE": "ALLOW_OWNER", - "DELETE_ATTRIBUTE": "ALLOW_OWNER", - "OBTAIN_LEASE": "ALLOW_ALL", - "ACTIVATE": "ALLOW_OWNER", - "REVOKE": "ALLOW_OWNER", - "DESTROY": "ALLOW_OWNER", - "ARCHIVE": "ALLOW_OWNER", - "RECOVER": "ALLOW_OWNER" - }, - "SYMMETRIC_KEY": { - "REKEY": "ALLOW_OWNER", - "REKEY_KEY_PAIR": "ALLOW_OWNER", - "DERIVE_KEY": "ALLOW_OWNER", - "LOCATE": "ALLOW_OWNER", - "CHECK": "ALLOW_OWNER", - "GET": "ALLOW_OWNER", - "GET_ATTRIBUTES": "ALLOW_OWNER", - "GET_ATTRIBUTE_LIST": "ALLOW_OWNER", - "ADD_ATTRIBUTE": "ALLOW_OWNER", - "MODIFY_ATTRIBUTE": "ALLOW_OWNER", - "DELETE_ATTRIBUTE": "ALLOW_OWNER", - "OBTAIN_LEASE": "ALLOW_OWNER", - "GET_USAGE_ALLOCATION": "ALLOW_OWNER", - "ACTIVATE": "ALLOW_OWNER", - "REVOKE": "ALLOW_OWNER", - "DESTROY": "ALLOW_OWNER", - "ARCHIVE": "ALLOW_OWNER", - "RECOVER": "ALLOW_OWNER" - }, - "PUBLIC_KEY": { - "LOCATE": "ALLOW_ALL", - "CHECK": "ALLOW_ALL", - "GET": "ALLOW_ALL", - "GET_ATTRIBUTES": "ALLOW_ALL", - "GET_ATTRIBUTE_LIST": "ALLOW_ALL", - "ADD_ATTRIBUTE": "ALLOW_OWNER", - "MODIFY_ATTRIBUTE": "ALLOW_OWNER", - "DELETE_ATTRIBUTE": "ALLOW_OWNER", - "OBTAIN_LEASE": "ALLOW_ALL", - "ACTIVATE": "ALLOW_OWNER", - "REVOKE": "ALLOW_OWNER", - "DESTROY": "ALLOW_OWNER", - "ARCHIVE": "ALLOW_OWNER", - "RECOVER": "ALLOW_OWNER" - }, - "PRIVATE_KEY": { - "REKEY": "ALLOW_OWNER", - "REKEY_KEY_PAIR": "ALLOW_OWNER", - "DERIVE_KEY": "ALLOW_OWNER", - "LOCATE": "ALLOW_OWNER", - "CHECK": "ALLOW_OWNER", - "GET": "ALLOW_OWNER", - "GET_ATTRIBUTES": "ALLOW_OWNER", - "GET_ATTRIBUTE_LIST": "ALLOW_OWNER", - "ADD_ATTRIBUTE": "ALLOW_OWNER", - "MODIFY_ATTRIBUTE": "ALLOW_OWNER", - "DELETE_ATTRIBUTE": "ALLOW_OWNER", - "OBTAIN_LEASE": "ALLOW_OWNER", - "GET_USAGE_ALLOCATION": "ALLOW_OWNER", - "ACTIVATE": "ALLOW_OWNER", - "REVOKE": "ALLOW_OWNER", - "DESTROY": "ALLOW_OWNER", - "ARCHIVE": "ALLOW_OWNER", - "RECOVER": "ALLOW_OWNER" - }, - "SPLIT_KEY": { - "REKEY": "ALLOW_OWNER", - "REKEY_KEY_PAIR": "ALLOW_OWNER", - "DERIVE_KEY": "ALLOW_OWNER", - "LOCATE": "ALLOW_OWNER", - "CHECK": "ALLOW_OWNER", - "GET": "ALLOW_OWNER", - "GET_ATTRIBUTES": "ALLOW_OWNER", - "GET_ATTRIBUTE_LIST": "ALLOW_OWNER", - "ADD_ATTRIBUTE": "ALLOW_OWNER", - "MODIFY_ATTRIBUTE": "ALLOW_OWNER", - "DELETE_ATTRIBUTE": "ALLOW_OWNER", - "OBTAIN_LEASE": "ALLOW_OWNER", - "GET_USAGE_ALLOCATION": "ALLOW_OWNER", - "ACTIVATE": "ALLOW_OWNER", - "REVOKE": "ALLOW_OWNER", - "DESTROY": "ALLOW_OWNER", - "ARCHIVE": "ALLOW_OWNER", - "RECOVER": "ALLOW_OWNER" - }, - "TEMPLATE": { - "LOCATE": "ALLOW_OWNER", - "GET": "ALLOW_OWNER", - "GET_ATTRIBUTES": "ALLOW_OWNER", - "GET_ATTRIBUTE_LIST": "ALLOW_OWNER", - "ADD_ATTRIBUTE": "ALLOW_OWNER", - "MODIFY_ATTRIBUTE": "ALLOW_OWNER", - "DELETE_ATTRIBUTE": "ALLOW_OWNER", - "DESTROY": "ALLOW_OWNER" - }, - "SECRET_DATA": { - "REKEY": "ALLOW_OWNER", - "REKEY_KEY_PAIR": "ALLOW_OWNER", - "DERIVE_KEY": "ALLOW_OWNER", - "LOCATE": "ALLOW_OWNER", - "CHECK": "ALLOW_OWNER", - "GET": "ALLOW_OWNER", - "GET_ATTRIBUTES": "ALLOW_OWNER", - "GET_ATTRIBUTE_LIST": "ALLOW_OWNER", - "ADD_ATTRIBUTE": "ALLOW_OWNER", - "MODIFY_ATTRIBUTE": "ALLOW_OWNER", - "DELETE_ATTRIBUTE": "ALLOW_OWNER", - "OBTAIN_LEASE": "ALLOW_OWNER", - "GET_USAGE_ALLOCATION": "ALLOW_OWNER", - "ACTIVATE": "ALLOW_OWNER", - "REVOKE": "ALLOW_OWNER", - "DESTROY": "ALLOW_OWNER", - "ARCHIVE": "ALLOW_OWNER", - "RECOVER": "ALLOW_OWNER" - }, - "OPAQUE_DATA": { - "REKEY": "ALLOW_OWNER", - "REKEY_KEY_PAIR": "ALLOW_OWNER", - "DERIVE_KEY": "ALLOW_OWNER", - "LOCATE": "ALLOW_OWNER", - "CHECK": "ALLOW_OWNER", - "GET": "ALLOW_OWNER", - "GET_ATTRIBUTES": "ALLOW_OWNER", - "GET_ATTRIBUTE_LIST": "ALLOW_OWNER", - "ADD_ATTRIBUTE": "ALLOW_OWNER", - "MODIFY_ATTRIBUTE": "ALLOW_OWNER", - "DELETE_ATTRIBUTE": "ALLOW_OWNER", - "OBTAIN_LEASE": "ALLOW_OWNER", - "GET_USAGE_ALLOCATION": "ALLOW_OWNER", - "ACTIVATE": "ALLOW_OWNER", - "REVOKE": "ALLOW_OWNER", - "DESTROY": "ALLOW_OWNER", - "ARCHIVE": "ALLOW_OWNER", - "RECOVER": "ALLOW_OWNER" - }, - "PGP_KEY": { - "REKEY": "ALLOW_OWNER", - "REKEY_KEY_PAIR": "ALLOW_OWNER", - "DERIVE_KEY": "ALLOW_OWNER", - "LOCATE": "ALLOW_OWNER", - "CHECK": "ALLOW_OWNER", - "GET": "ALLOW_OWNER", - "GET_ATTRIBUTES": "ALLOW_OWNER", - "GET_ATTRIBUTE_LIST": "ALLOW_OWNER", - "ADD_ATTRIBUTE": "ALLOW_OWNER", - "MODIFY_ATTRIBUTE": "ALLOW_OWNER", - "DELETE_ATTRIBUTE": "ALLOW_OWNER", - "OBTAIN_LEASE": "ALLOW_OWNER", - "GET_USAGE_ALLOCATION": "ALLOW_OWNER", - "ACTIVATE": "ALLOW_OWNER", - "REVOKE": "ALLOW_OWNER", - "DESTROY": "ALLOW_OWNER", - "ARCHIVE": "ALLOW_OWNER", - "RECOVER": "ALLOW_OWNER" - } - } -} diff -Nru python-pykmip-0.7.0/examples/pykmip.conf python-pykmip-0.8.0/examples/pykmip.conf --- python-pykmip-0.7.0/examples/pykmip.conf 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/examples/pykmip.conf 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -[client] -host=127.0.0.1 -port=5696 -keyfile=/etc/pykmip/certs/client_private_key.pem -certfile=/etc/pykmip/certs/client_cert.pem -cert_reqs=CERT_REQUIRED -ssl_version=PROTOCOL_SSLv23 -ca_certs=/etc/pykmip/certs/server_ca_cert.pem -do_handshake_on_connect=True -suppress_ragged_eofs=True -username=example_username -password=example_password diff -Nru python-pykmip-0.7.0/examples/server.conf python-pykmip-0.8.0/examples/server.conf --- python-pykmip-0.7.0/examples/server.conf 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/examples/server.conf 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -[server] -hostname=127.0.0.1 -port=5696 -certificate_path=/etc/pykmip/certs/server_cert.pem -key_path=/etc/pykmip/certs/server_private_key.pem -ca_path=/etc/pykmip/certs/server_ca_cert.pem -auth_suite=Basic -policy_path=/etc/pykmip/policies -enable_tls_client_auth=True -tls_cipher_suites= - EXAMPLE_CIPHER_SUITE_1 - EXAMPLE_CIPHER_SUITE_2 - EXAMPLE_CIPHER_SUITE_3 -logging_level=INFO diff -Nru python-pykmip-0.7.0/.gitignore python-pykmip-0.8.0/.gitignore --- python-pykmip-0.7.0/.gitignore 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/.gitignore 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -.project -.pydevproject -*.pyc - -.cache/ -.coverage -.idea/ -.tox/ -ChangeLog -PyKMIP.egg-info/ -dist/ -htmlcov/ -tags -build/ diff -Nru python-pykmip-0.7.0/kmip/core/attributes.py python-pykmip-0.8.0/kmip/core/attributes.py --- python-pykmip-0.7.0/kmip/core/attributes.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/attributes.py 2017-12-08 17:36:18.000000000 +0000 @@ -16,13 +16,12 @@ import six from kmip.core import enums +from kmip.core import exceptions from kmip.core.enums import HashingAlgorithm as HashingAlgorithmEnum from kmip.core.enums import KeyFormatType as KeyFormatTypeEnum from kmip.core.enums import Tags -from kmip.core.errors import ErrorStrings - from kmip.core.misc import KeyFormatType from kmip.core.primitives import Boolean @@ -138,7 +137,7 @@ def __validate(self): name = Name.__name__ - msg = ErrorStrings.BAD_EXP_RECV + msg = exceptions.ErrorStrings.BAD_EXP_RECV if self.name_value and \ not isinstance(self.name_value, Name.NameValue) and \ not isinstance(self.name_value, str): @@ -165,7 +164,7 @@ value = cls.NameValue(name_value) else: name = 'Name' - msg = ErrorStrings.BAD_EXP_RECV + msg = exceptions.ErrorStrings.BAD_EXP_RECV member = 'name_value' raise TypeError(msg.format('{0}.{1}'.format(name, member), 'name_value', type(Name.NameValue), @@ -177,7 +176,7 @@ n_type = cls.NameType(name_type) else: name = 'Name' - msg = ErrorStrings.BAD_EXP_RECV + msg = exceptions.ErrorStrings.BAD_EXP_RECV member = 'name_type' raise TypeError(msg.format('{0}.{1}'.format(name, member), 'name_type', type(Name.NameType), diff -Nru python-pykmip-0.7.0/kmip/core/config_helper.py python-pykmip-0.8.0/kmip/core/config_helper.py --- python-pykmip-0.7.0/kmip/core/config_helper.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/config_helper.py 2018-04-17 20:03:33.000000000 +0000 @@ -45,15 +45,20 @@ # Timeout measured in seconds DEFAULT_TIMEOUT = 30 - def __init__(self): + def __init__(self, path=None): self.logger = logging.getLogger(__name__) self.conf = SafeConfigParser() - if self.conf.read(CONFIG_FILE): - self.logger.debug("Using config file at {0}".format(CONFIG_FILE)) + + filenames = path + if not path: + filenames = CONFIG_FILE + + if self.conf.read(filenames): + self.logger.debug("Using config file at {0}".format(filenames)) else: self.logger.warning( - "Config file {0} not found".format(CONFIG_FILE)) + "Config file {0} not found".format(filenames)) def get_valid_value(self, direct_value, config_section, config_option_name, default_value): diff -Nru python-pykmip-0.7.0/kmip/core/errors.py python-pykmip-0.8.0/kmip/core/errors.py --- python-pykmip-0.7.0/kmip/core/errors.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/errors.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,130 +0,0 @@ -# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -class ErrorStrings: - BAD_EXP_RECV = "Bad {0} {1}: expected {2}, received {3}" - BAD_ENCODING = "Bad {0} {1}: encoding mismatch" - - -class BaseError(Exception): - """Base class for exceptions defined in this module.""" - - def __init__(self, args): - [setattr(self, k, v) for k, v in args.items() if k is not 'self'] - - -class KMIPServerError(BaseError): - """Base Exception for KMIP server errors.""" - def __init__(self, args): - super(KMIPServerError, self).__init__(args) - - -class KMIPServerZombieError(KMIPServerError): - """KMIP server error for hung and persistent live KMIP servers.""" - def __init__(self, pid): - message = 'KMIP server alive after termination: PID {0}'.format(pid) - super(KMIPServerZombieError, self).__init__({'message': message}) - - def __str__(self): - return self.message - - -class KMIPServerSuicideError(KMIPServerError): - """KMIP server error for prematurely dead KMIP servers.""" - def __init__(self, pid): - message = 'KMIP server dead prematurely: PID {0}'.format(pid) - super(KMIPServerSuicideError, self).__init__({'message': message}) - - def __str__(self): - return self.message - - -class InitError(BaseError): - """Exception thrown for bad initializations.""" - def __init__(self, cls, exp, recv): - super(InitError, self).__init__(locals()) - - def __str__(self): - msg = "Tried to initialize {0} instance with bad type: " - msg += "expected {1}, received {2}" - return msg.format(self.cls, self.exp, self.recv) - - -class WriteValueError(BaseError): - def __init__(self, cls, attr, value): - super(WriteValueError, self).__init__(locals()) - - def __str__(self): - msg = "Tried to write {0}.{1} with invalid value: {2}" - return msg.format(self.cls, self.attr, self.value) - - -class WriteTypeError(BaseError): - def __init__(self, cls, attr, value): - super(WriteTypeError, self).__init__(locals()) - - def __str__(self): - msg = "Tried to write {0}.{1} with invalid type: {2}" - return msg.format(self.cls, self.attr, self.value) - - -class WriteOverflowError(BaseError): - def __init__(self, cls, attr, exp, recv): - super(WriteOverflowError, self).__init__(locals()) - - def __str__(self): - msg = "Tried to write {0}.{1} with too many bytes: " - msg += "expected {2}, received {3}" - return msg.format(self.cls, self.attr, self.exp, self.recv) - - -class ReadValueError(BaseError): - def __init__(self, cls, attr, exp, recv): - super(ReadValueError, self).__init__(locals()) - - def __str__(self): - msg = "Tried to read {0}.{1}: expected {2}, received {3}" - return msg.format(self.cls, self.attr, self.exp, self.recv) - - -class InvalidLengthError(ValueError): - def __init__(self, cls, exp, recv): - msg = "Invalid length read for {0}: expected {1}, received {2}" - super(InvalidLengthError, self).__init__(msg.format(cls, exp, recv)) - - -class StreamNotEmptyError(BaseError): - def __init__(self, cls, extra): - super(StreamNotEmptyError, self).__init__(locals()) - - def __str__(self): - msg = "Invalid length used to read {0}, bytes remaining: {1}" - return msg.format(self.cls, self.extra) - - -class StateTypeError(TypeError): - def __init__(self, cls, exp, recv): - msg = "Tried to initialize {0} instance with bad type: " - msg += "expected {1}, received {2}" - super(StateTypeError, self).__init__(msg.format(cls, exp, recv)) - - -class StateOverflowError(ValueError): - def __init__(self, cls, attr, exp, recv): - msg = "Tried to write {0}.{1} with too many bytes: " - msg += "expected {2}, received {3}" - super(StateOverflowError, self).__init__(msg.format(cls, attr, exp, - recv)) diff -Nru python-pykmip-0.7.0/kmip/core/exceptions.py python-pykmip-0.8.0/kmip/core/exceptions.py --- python-pykmip-0.7.0/kmip/core/exceptions.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/exceptions.py 2018-04-12 06:42:35.000000000 +0000 @@ -38,6 +38,25 @@ self.status = status self.reason = reason + def __eq__(self, other): + if isinstance(other, KmipError): + if str(self) != str(other): + return False + elif self.status != other.status: + return False + elif self.reason != other.reason: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, KmipError): + return self == other + else: + return NotImplemented + class CryptographicFailure(KmipError): """ @@ -290,3 +309,79 @@ lengths. """ pass + + +class ShutdownError(Exception): + """ + An error generated when a problem occurs with shutting down the server. + """ + + +class StreamNotEmptyError(Exception): + def __init__(self, cls, extra): + super(StreamNotEmptyError, self).__init__() + + self.cls = cls + self.extra = extra + + def __str__(self): + msg = "Invalid length used to read {0}, bytes remaining: {1}" + return msg.format(self.cls, self.extra) + + +class ReadValueError(Exception): + def __init__(self, cls, attr, exp, recv): + super(ReadValueError, self).__init__() + + self.cls = cls + self.attr = attr + self.exp = exp + self.recv = recv + + def __str__(self): + msg = "Tried to read {0}.{1}: expected {2}, received {3}" + return msg.format(self.cls, self.attr, self.exp, self.recv) + + +class WriteOverflowError(Exception): + def __init__(self, cls, attr, exp, recv): + super(WriteOverflowError, self).__init__() + + self.cls = cls + self.attr = attr + self.exp = exp + self.recv = recv + + def __str__(self): + msg = "Tried to write {0}.{1} with too many bytes: " + msg += "expected {2}, received {3}" + return msg.format(self.cls, self.attr, self.exp, self.recv) + + +class KMIPServerZombieError(Exception): + """KMIP server error for hung and persistent live KMIP servers.""" + def __init__(self, pid): + super(KMIPServerZombieError, self).__init__() + + self.message = 'KMIP server alive after termination: PID {0}'.format( + pid + ) + + def __str__(self): + return self.message + + +class KMIPServerSuicideError(Exception): + """KMIP server error for prematurely dead KMIP servers.""" + def __init__(self, pid): + super(KMIPServerSuicideError, self).__init__() + + self.message = 'KMIP server dead prematurely: PID {0}'.format(pid) + + def __str__(self): + return self.message + + +class ErrorStrings: + BAD_EXP_RECV = "Bad {0} {1}: expected {2}, received {3}" + BAD_ENCODING = "Bad {0} {1}: encoding mismatch" diff -Nru python-pykmip-0.7.0/kmip/core/factories/credentials.py python-pykmip-0.8.0/kmip/core/factories/credentials.py --- python-pykmip-0.7.0/kmip/core/factories/credentials.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/factories/credentials.py 2018-04-02 17:12:18.000000000 +0000 @@ -13,43 +13,41 @@ # License for the specific language governing permissions and limitations # under the License. -from kmip.core.enums import CredentialType - -from kmip.core.objects import Credential +from kmip.core import enums +from kmip.core import objects class CredentialFactory(object): - def __init__(self): - pass - - def _create_credential(self, credential_type, credential_value): - credential_type = Credential.CredentialType(credential_type) - return Credential(credential_type=credential_type, - credential_value=credential_value) - def create_credential(self, cred_type, value): + def create_credential(self, credential_type, credential_value): # Switch on the type of the credential - if cred_type is CredentialType.USERNAME_AND_PASSWORD: - value = self._create_username_password_credential(value) - elif cred_type is CredentialType.DEVICE: - value = self._create_device_credential(value) + if credential_type is enums.CredentialType.USERNAME_AND_PASSWORD: + credential_value = self.create_username_password_credential( + credential_value + ) + elif credential_type is enums.CredentialType.DEVICE: + credential_value = self.create_device_credential(credential_value) else: msg = 'Unrecognized credential type: {0}' - raise ValueError(msg.format(cred_type)) + raise ValueError(msg.format(credential_type)) - return self._create_credential(cred_type, value) + return objects.Credential( + credential_type=credential_type, + credential_value=credential_value + ) - def _create_username_password_credential(self, value): + @staticmethod + def create_username_password_credential(value): username = value.get('Username') password = value.get('Password') - username = Credential.UsernamePasswordCredential.Username(username) - password = Credential.UsernamePasswordCredential.Password(password) - - return Credential.UsernamePasswordCredential(username=username, - password=password) + return objects.UsernamePasswordCredential( + username=username, + password=password + ) - def _create_device_credential(self, value): + @staticmethod + def create_device_credential(value): dsn = value.get('Device Serial Number') password = value.get('Password') dev_id = value.get('Device Identifier') @@ -57,16 +55,11 @@ mach_id = value.get('Machine Identifier') med_id = value.get('Media Identifier') - dsn = Credential.DeviceCredential.DeviceSerialNumber(dsn) - password = Credential.DeviceCredential.Password(password) - dev_id = Credential.DeviceCredential.DeviceIdentifier(dev_id) - net_id = Credential.DeviceCredential.NetworkIdentifier(net_id) - mach_id = Credential.DeviceCredential.MachineIdentifier(mach_id) - med_id = Credential.DeviceCredential.MediaIdentifier(med_id) - - return Credential.DeviceCredential(device_serial_number=dsn, - password=password, - device_identifier=dev_id, - network_identifier=net_id, - machine_identifier=mach_id, - media_identifier=med_id) + return objects.DeviceCredential( + device_serial_number=dsn, + password=password, + device_identifier=dev_id, + network_identifier=net_id, + machine_identifier=mach_id, + media_identifier=med_id + ) diff -Nru python-pykmip-0.7.0/kmip/core/factories/keys.py python-pykmip-0.8.0/kmip/core/factories/keys.py --- python-pykmip-0.7.0/kmip/core/factories/keys.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/factories/keys.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,132 +0,0 @@ -# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from kmip.core.enums import KeyFormatType - -from kmip.core.keys import ECPrivateKey -from kmip.core.keys import OpaqueKey -from kmip.core.keys import PKCS1Key -from kmip.core.keys import PKCS8Key -from kmip.core.keys import RawKey -from kmip.core.keys import TransparentSymmetricKey -from kmip.core.keys import X509Key - - -class KeyFactory(object): - - def create_key(self, key_format, value=None): - if value is None: - value = {} - - # Switch on the format type of the key - if key_format is KeyFormatType.RAW: - return self._create_raw_key(value) - elif key_format is KeyFormatType.OPAQUE: - return self._create_opaque_key() - elif key_format is KeyFormatType.PKCS_1: - return self._create_pkcs_1_key() - elif key_format is KeyFormatType.PKCS_8: - return self._create_pkcs_8_key() - elif key_format is KeyFormatType.X_509: - return self._create_x_509_key() - elif key_format is KeyFormatType.EC_PRIVATE_KEY: - return self._create_ec_private_key() - elif key_format is KeyFormatType.TRANSPARENT_SYMMETRIC_KEY: - return self._create_transparent_symmetric_key() - elif key_format is KeyFormatType.TRANSPARENT_DSA_PRIVATE_KEY: - return self._create_transparent_dsa_private_key(value) - elif key_format is KeyFormatType.TRANSPARENT_DSA_PUBLIC_KEY: - return self._create_transparent_dsa_public_key(value) - elif key_format is KeyFormatType.TRANSPARENT_RSA_PRIVATE_KEY: - return self._create_transparent_rsa_private_key(value) - elif key_format is KeyFormatType.TRANSPARENT_RSA_PUBLIC_KEY: - return self._create_transparent_rsa_public_key(value) - elif key_format is KeyFormatType.TRANSPARENT_DH_PRIVATE_KEY: - return self._create_transparent_dh_private_key(value) - elif key_format is KeyFormatType.TRANSPARENT_DH_PUBLIC_KEY: - return self._create_transparent_dh_public_key(value) - elif key_format is KeyFormatType.TRANSPARENT_ECDSA_PRIVATE_KEY: - return self._create_transparent_ecdsa_private_key(value) - elif key_format is KeyFormatType.TRANSPARENT_ECDSA_PUBLIC_KEY: - return self._create_transparent_ecdsa_public_key(value) - elif key_format is KeyFormatType.TRANSPARENT_ECDH_PRIVATE_KEY: - return self._create_transparent_ecdh_private_key(value) - elif key_format is KeyFormatType.TRANSPARENT_ECDH_PUBLIC_KEY: - return self._create_transparent_ecdh_public_key(value) - elif key_format is KeyFormatType.TRANSPARENT_ECMQV_PRIVATE_KEY: - return self._create_transparent_ecmqv_private_key(value) - elif key_format is KeyFormatType.TRANSPARENT_ECMQV_PUBLIC_KEY: - return self._create_transparent_ecmqv_public_key(value) - else: - msg = 'Unrecognized key format type: {0}' - raise ValueError(msg.format(key_format)) - - def _create_raw_key(self, value): - data = value.get('bytes') - return RawKey(data) - - def _create_opaque_key(self): - return OpaqueKey() - - def _create_pkcs_1_key(self): - return PKCS1Key() - - def _create_pkcs_8_key(self): - return PKCS8Key() - - def _create_x_509_key(self): - return X509Key() - - def _create_ec_private_key(self): - return ECPrivateKey() - - def _create_transparent_symmetric_key(self): - return TransparentSymmetricKey() - - def _create_transparent_dsa_private_key(self, value): - raise NotImplementedError() - - def _create_transparent_dsa_public_key(self, value): - raise NotImplementedError() - - def _create_transparent_rsa_private_key(self, value): - raise NotImplementedError() - - def _create_transparent_rsa_public_key(self, value): - raise NotImplementedError() - - def _create_transparent_dh_private_key(self, value): - raise NotImplementedError() - - def _create_transparent_dh_public_key(self, value): - raise NotImplementedError() - - def _create_transparent_ecdsa_private_key(self, value): - raise NotImplementedError() - - def _create_transparent_ecdsa_public_key(self, value): - raise NotImplementedError() - - def _create_transparent_ecdh_private_key(self, value): - raise NotImplementedError() - - def _create_transparent_ecdh_public_key(self, value): - raise NotImplementedError() - - def _create_transparent_ecmqv_private_key(self, value): - raise NotImplementedError() - - def _create_transparent_ecmqv_public_key(self, value): - raise NotImplementedError() diff -Nru python-pykmip-0.7.0/kmip/core/factories/payloads/request.py python-pykmip-0.8.0/kmip/core/factories/payloads/request.py --- python-pykmip-0.7.0/kmip/core/factories/payloads/request.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/factories/payloads/request.py 2018-04-17 13:06:07.000000000 +0000 @@ -31,12 +31,18 @@ def _create_derive_key_payload(self): return payloads.DeriveKeyRequestPayload() + def _create_rekey_payload(self): + return payloads.RekeyRequestPayload() + def _create_rekey_key_pair_payload(self): return payloads.RekeyKeyPairRequestPayload() def _create_locate_payload(self): return payloads.LocateRequestPayload() + def _create_check_payload(self): + return payloads.CheckRequestPayload() + def _create_get_payload(self): return payloads.GetRequestPayload() diff -Nru python-pykmip-0.7.0/kmip/core/factories/payloads/response.py python-pykmip-0.8.0/kmip/core/factories/payloads/response.py --- python-pykmip-0.7.0/kmip/core/factories/payloads/response.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/factories/payloads/response.py 2018-04-17 13:06:07.000000000 +0000 @@ -31,12 +31,18 @@ def _create_derive_key_payload(self): return payloads.DeriveKeyResponsePayload() + def _create_rekey_payload(self): + return payloads.RekeyResponsePayload() + def _create_rekey_key_pair_payload(self): return payloads.RekeyKeyPairResponsePayload() def _create_locate_payload(self): return payloads.LocateResponsePayload() + def _create_check_payload(self): + return payloads.CheckResponsePayload() + def _create_get_payload(self): return payloads.GetResponsePayload() diff -Nru python-pykmip-0.7.0/kmip/core/factories/secrets.py python-pykmip-0.8.0/kmip/core/factories/secrets.py --- python-pykmip-0.7.0/kmip/core/factories/secrets.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/factories/secrets.py 2017-12-08 17:36:18.000000000 +0000 @@ -13,13 +13,10 @@ # License for the specific language governing permissions and limitations # under the License. -from kmip.core.factories.keys import KeyFactory - from kmip.core.attributes import CryptographicAlgorithm from kmip.core.attributes import CryptographicLength from kmip.core.enums import ObjectType -from kmip.core.errors import ErrorStrings from kmip.core.misc import KeyFormatType @@ -38,14 +35,13 @@ from kmip.core.secrets import Template from kmip.core import utils +from kmip.core import exceptions class SecretFactory(object): def __init__(self): - self.key_factory = KeyFactory() - - self.base_error = ErrorStrings.BAD_EXP_RECV + self.base_error = exceptions.ErrorStrings.BAD_EXP_RECV self.template_input = self.base_error.format('Template', '{0}', '{1}', '{2}') diff -Nru python-pykmip-0.7.0/kmip/core/keys.py python-pykmip-0.8.0/kmip/core/keys.py --- python-pykmip-0.7.0/kmip/core/keys.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/keys.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,102 +0,0 @@ -# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# This module defines classes representing all of the different key types -# used by KMIP, including the more detailed structures of the Transparent -# Keys defined in Section 2.1.7. - -from kmip.core.enums import Tags - -from kmip.core.primitives import Struct -from kmip.core.primitives import ByteString - -from kmip.core.utils import BytearrayStream - - -class RawKey(ByteString): - - def __init__(self, value=None): - super(RawKey, self).__init__(value, Tags.KEY_MATERIAL) - - -class OpaqueKey(ByteString): - - def __init__(self, value=None): - super(OpaqueKey, self).__init__(value, Tags.KEY_MATERIAL) - - -class PKCS1Key(ByteString): - - def __init__(self, value=None): - super(PKCS1Key, self).__init__(value, Tags.KEY_MATERIAL) - - -class PKCS8Key(ByteString): - - def __init__(self, value=None): - super(PKCS8Key, self).__init__(value, Tags.KEY_MATERIAL) - - -class X509Key(ByteString): - - def __init__(self, value=None): - super(X509Key, self).__init__(value, Tags.KEY_MATERIAL) - - -class ECPrivateKey(ByteString): - - def __init__(self, value=None): - super(ECPrivateKey, self).__init__(value, Tags.KEY_MATERIAL) - - -# 2.1.7.1 -class TransparentSymmetricKey(Struct): - - class Key(ByteString): - - def __init__(self, value=None): - super(TransparentSymmetricKey.Key, self).__init__(value, Tags.KEY) - - def __init__(self, key=None): - super(TransparentSymmetricKey, self).__init__(Tags.KEY_MATERIAL) - self.key = key - self.validate() - - def read(self, istream): - super(TransparentSymmetricKey, self).read(istream) - tstream = BytearrayStream(istream.read(self.length)) - - self.key = TransparentSymmetricKey.Key() - self.key.read(tstream) - - self.is_oversized(tstream) - self.validate() - - def write(self, ostream): - tstream = BytearrayStream() - - self.key.write(tstream) - - # Write the length and value of the key wrapping data - self.length = tstream.length() - super(TransparentSymmetricKey, self).write(ostream) - ostream.write(tstream.buffer) - - def validate(self): - self.__validate() - - def __validate(self): - # TODO (peter-hamilton) Finish implementation. - pass diff -Nru python-pykmip-0.7.0/kmip/core/messages/contents.py python-pykmip-0.8.0/kmip/core/messages/contents.py --- python-pykmip-0.7.0/kmip/core/messages/contents.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/messages/contents.py 2018-04-02 17:12:18.000000000 +0000 @@ -13,10 +13,13 @@ # License for the specific language governing permissions and limitations # under the License. +import six + from kmip.core import enums from kmip.core import objects from kmip.core import utils +from kmip.core import primitives from kmip.core.primitives import Struct from kmip.core.primitives import Integer from kmip.core.primitives import Enumeration @@ -26,105 +29,170 @@ from kmip.core.primitives import DateTime -# 6.1 -class ProtocolVersion(Struct): - - class ProtocolVersionMajor(Integer): - def __init__(self, value=None): - super(ProtocolVersion.ProtocolVersionMajor, self).\ - __init__(value, enums.Tags.PROTOCOL_VERSION_MAJOR) - - class ProtocolVersionMinor(Integer): - def __init__(self, value=None): - super(ProtocolVersion.ProtocolVersionMinor, self).\ - __init__(value, enums.Tags.PROTOCOL_VERSION_MINOR) - - def __init__(self, - protocol_version_major=None, - protocol_version_minor=None): +class ProtocolVersion(primitives.Struct): + """ + A struct representing a ProtocolVersion number. + + Attributes: + major: The major protocol version number. + minor: The minor protocol version number. + """ + + def __init__(self, major=None, minor=None): + """ + Construct a ProtocolVersion struct. + + Args: + major (int): The major protocol version number. Optional, defaults + to None. + minor (int): The minor protocol version number. Optional, defaults + to None. + """ super(ProtocolVersion, self).__init__(enums.Tags.PROTOCOL_VERSION) - if protocol_version_major is None: - self.protocol_version_major = \ - ProtocolVersion.ProtocolVersionMajor() - else: - self.protocol_version_major = protocol_version_major - - if protocol_version_minor is None: - self.protocol_version_minor = \ - ProtocolVersion.ProtocolVersionMinor() - else: - self.protocol_version_minor = protocol_version_minor - - self.validate() - - def read(self, istream): - super(ProtocolVersion, self).read(istream) - tstream = utils.BytearrayStream(istream.read(self.length)) - - # Read the major and minor portions of the version number - self.protocol_version_major.read(tstream) - self.protocol_version_minor.read(tstream) - - self.is_oversized(tstream) - - def write(self, ostream): - tstream = utils.BytearrayStream() - - # Write the major and minor portions of the protocol version - self.protocol_version_major.write(tstream) - self.protocol_version_minor.write(tstream) - - # Write the length and value of the protocol version - self.length = tstream.length() - super(ProtocolVersion, self).write(ostream) - ostream.write(tstream.buffer) - - def validate(self): - self.__validate() - - def __validate(self): - if not isinstance(self.protocol_version_major, - ProtocolVersion.ProtocolVersionMajor): - msg = "invalid protocol version major" - msg += "; expected {0}, received {1}".format( - ProtocolVersion.ProtocolVersionMajor, - self.protocol_version_major) - raise TypeError(msg) - - if not isinstance(self.protocol_version_minor, - ProtocolVersion.ProtocolVersionMinor): - msg = "invalid protocol version minor" - msg += "; expected {0}, received {1}".format( - ProtocolVersion.ProtocolVersionMinor, - self.protocol_version_minor) - raise TypeError(msg) + self._major = None + self._minor = None + + self.major = major + self.minor = minor + + @property + def major(self): + if self._major: + return self._major.value + else: + return None + + @major.setter + def major(self, value): + if value is None: + self._major = None + elif isinstance(value, six.integer_types): + self._major = primitives.Integer( + value=value, + tag=enums.Tags.PROTOCOL_VERSION_MAJOR + ) + else: + raise TypeError( + "Major protocol version number must be an integer." + ) + + @property + def minor(self): + if self._minor: + return self._minor.value + else: + return None + + @minor.setter + def minor(self, value): + if value is None: + self._minor = None + elif isinstance(value, six.integer_types): + self._minor = primitives.Integer( + value=value, + tag=enums.Tags.PROTOCOL_VERSION_MINOR + ) + else: + raise TypeError( + "Minor protocol version number must be an integer." + ) + + def read(self, input_stream): + """ + Read the data encoding the ProtocolVersion struct and decode it into + its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if either the major or minor protocol versions + are missing from the encoding. + """ + super(ProtocolVersion, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.PROTOCOL_VERSION_MAJOR, local_stream): + self._major = primitives.Integer( + tag=enums.Tags.PROTOCOL_VERSION_MAJOR + ) + self._major.read(local_stream) + else: + raise ValueError( + "Invalid encoding missing the major protocol version number." + ) + + if self.is_tag_next(enums.Tags.PROTOCOL_VERSION_MINOR, local_stream): + self._minor = primitives.Integer( + tag=enums.Tags.PROTOCOL_VERSION_MINOR + ) + self._minor.read(local_stream) + else: + raise ValueError( + "Invalid encoding missing the minor protocol version number." + ) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the ProtocolVersion struct to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is not defined. + """ + local_stream = utils.BytearrayStream() + + if self._major: + self._major.write(local_stream) + else: + raise ValueError( + "Invalid struct missing the major protocol version number." + ) + + if self._minor: + self._minor.write(local_stream) + else: + raise ValueError( + "Invalid struct missing the minor protocol version number." + ) + + self.length = local_stream.length() + super(ProtocolVersion, self).write(output_stream) + output_stream.write(local_stream.buffer) def __eq__(self, other): if isinstance(other, ProtocolVersion): - if ((self.protocol_version_major == - other.protocol_version_major) and - (self.protocol_version_minor == - other.protocol_version_minor)): - return True - else: + if self.major != other.major: + return False + elif self.minor != other.minor: return False + else: + return True else: return NotImplemented def __ne__(self, other): if isinstance(other, ProtocolVersion): - return not self.__eq__(other) + return not (self == other) else: return NotImplemented def __lt__(self, other): if isinstance(other, ProtocolVersion): - if self.protocol_version_major < other.protocol_version_major: + if self.major < other.major: return True - elif self.protocol_version_major > other.protocol_version_major: + elif self.major > other.major: return False - elif self.protocol_version_minor < other.protocol_version_minor: + elif self.minor < other.minor: return True else: return False @@ -133,24 +201,16 @@ def __gt__(self, other): if isinstance(other, ProtocolVersion): - if self.protocol_version_major > other.protocol_version_major: - return True - elif self.protocol_version_major < other.protocol_version_major: + if (self == other) or (self < other): return False - elif self.protocol_version_minor > other.protocol_version_minor: - return True else: - return False + return True else: return NotImplemented def __le__(self, other): if isinstance(other, ProtocolVersion): - if self.protocol_version_major < other.protocol_version_major: - return True - elif self.protocol_version_major > other.protocol_version_major: - return False - elif self.protocol_version_minor <= other.protocol_version_minor: + if (self == other) or (self < other): return True else: return False @@ -159,11 +219,7 @@ def __ge__(self, other): if isinstance(other, ProtocolVersion): - if self.protocol_version_major > other.protocol_version_major: - return True - elif self.protocol_version_major < other.protocol_version_major: - return False - elif self.protocol_version_minor >= other.protocol_version_minor: + if (self == other) or (self > other): return True else: return False @@ -171,15 +227,14 @@ return NotImplemented def __repr__(self): - major = self.protocol_version_major.value - minor = self.protocol_version_minor.value - return "{0}.{1}".format(major, minor) - - @classmethod - def create(cls, major, minor): - major = cls.ProtocolVersionMajor(major) - minor = cls.ProtocolVersionMinor(minor) - return ProtocolVersion(major, minor) + args = ", ".join([ + "major={}".format(self.major), + "minor={}".format(self.minor) + ]) + return "ProtocolVersion({})".format(args) + + def __str__(self): + return "{}.{}".format(self.major, self.minor) # 6.2 @@ -210,37 +265,120 @@ super(TimeStamp, self).__init__(value, enums.Tags.TIME_STAMP) -# 6.6 class Authentication(Struct): + """ + A struct representing an Authentication bundle. - def __init__(self, credential=None): + Attributes: + credentials: A list of Credential structs to be used for + authentication. + """ + + def __init__(self, credentials=None): + """ + Construct an Authentication struct. + + Args: + credentials (list): A list of Credential structs to be used for + authentication. Optional, defaults to None. + """ super(Authentication, self).__init__(enums.Tags.AUTHENTICATION) - self.credential = credential - - def read(self, istream): - super(Authentication, self).read(istream) - tstream = utils.BytearrayStream(istream.read(self.length)) - # Read the credential - self.credential = objects.Credential() - self.credential.read(tstream) + self._credentials = [] + self.credentials = credentials - self.is_oversized(tstream) - - def write(self, ostream): - tstream = utils.BytearrayStream() + @property + def credentials(self): + return self._credentials + + @credentials.setter + def credentials(self, value): + if value is None: + self._credentials = [] + elif isinstance(value, list): + credentials = [] + for i in range(len(value)): + credential = value[i] + if not isinstance(credential, objects.Credential): + raise TypeError( + "Credentials must be a list of Credential structs. " + "Item {} has type: {}".format(i + 1, type(credential)) + ) + credentials.append(credential) + self._credentials = credentials + else: + raise TypeError( + "Credentials must be a list of Credential structs." + ) + + def read(self, input_stream): + """ + Read the data encoding the Authentication struct and decode it into + its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + """ + super(Authentication, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + credentials = [] + while self.is_tag_next(enums.Tags.CREDENTIAL, local_stream): + credential = objects.Credential() + credential.read(local_stream) + credentials.append(credential) + if len(credentials) == 0: + raise ValueError("Authentication encoding missing credentials.") + self._credentials = credentials + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the Authentication struct to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + """ + local_stream = utils.BytearrayStream() + + if len(self._credentials) == 0: + raise ValueError("Authentication struct missing credentials.") + for credential in self._credentials: + credential.write(local_stream) + + self.length = local_stream.length() + super(Authentication, self).write(output_stream) + output_stream.write(local_stream.buffer) - # Write the credential - self.credential.write(tstream) + def __eq__(self, other): + if isinstance(other, Authentication): + if self.credentials != other.credentials: + return False + else: + return True + else: + return NotImplemented - # Write the length and value of the protocol version - self.length = tstream.length() - super(Authentication, self).write(ostream) - ostream.write(tstream.buffer) + def __ne__(self, other): + if isinstance(other, Authentication): + return not (self == other) + else: + return NotImplemented - def validate(self): - # TODO (peter-hamilton) Finish implementation. - pass + def __repr__(self): + args = ", ".join([ + "credentials={}".format([x for x in self.credentials]) + ]) + return "Authentication({})".format(args) + + def __str__(self): + credentials = ", ".join([str(x) for x in self.credentials]) + return "{'credentials': [" + credentials + "]}" # 6.7 diff -Nru python-pykmip-0.7.0/kmip/core/messages/.gitignore python-pykmip-0.8.0/kmip/core/messages/.gitignore --- python-pykmip-0.7.0/kmip/core/messages/.gitignore 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/messages/.gitignore 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -/test_in.txt -/test_out.txt diff -Nru python-pykmip-0.7.0/kmip/core/messages/messages.py python-pykmip-0.8.0/kmip/core/messages/messages.py --- python-pykmip-0.7.0/kmip/core/messages/messages.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/messages/messages.py 2018-04-02 17:12:18.000000000 +0000 @@ -157,9 +157,6 @@ ostream.write(tstream.buffer) def validate(self): - if self.protocol_version is not None: - # TODO (peter-hamilton) conduct type check - self.protocol_version.validate() if self.time_stamp is not None: # TODO (peter-hamilton) conduct type check self.time_stamp.validate() diff -Nru python-pykmip-0.7.0/kmip/core/messages/payloads/archive.py python-pykmip-0.8.0/kmip/core/messages/payloads/archive.py --- python-pykmip-0.7.0/kmip/core/messages/payloads/archive.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/messages/payloads/archive.py 2017-12-08 17:36:18.000000000 +0000 @@ -0,0 +1,246 @@ +# Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import six + +from kmip import enums +from kmip.core import primitives +from kmip.core import utils + + +class ArchiveRequestPayload(primitives.Struct): + """ + A request payload for the Archive operation. + + Attributes: + unique_identifier: The unique ID of the object to archive. + """ + + def __init__(self, unique_identifier=None): + """ + Construct an Archive request payload struct. + + Args: + unique_identifier (string): The ID of the managed object (e.g., + a public key) to archive. Optional, defaults to None. + """ + super(ArchiveRequestPayload, self).__init__( + enums.Tags.REQUEST_PAYLOAD + ) + + self._unique_identifier = None + self.unique_identifier = unique_identifier + + @property + def unique_identifier(self): + if self._unique_identifier: + return self._unique_identifier.value + else: + return None + + @unique_identifier.setter + def unique_identifier(self, value): + if value is None: + self._unique_identifier = None + elif isinstance(value, six.string_types): + self._unique_identifier = primitives.TextString( + value=value, + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + else: + raise TypeError("Unique identifier must be a string.") + + def read(self, input_stream): + """ + Read the data encoding the Archive request payload and decode it into + its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is missing from the + encoded payload. + """ + super(ArchiveRequestPayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): + self._unique_identifier = primitives.TextString( + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + self._unique_identifier.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the Archive request payload to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is not defined. + """ + local_stream = utils.BytearrayStream() + + if self._unique_identifier: + self._unique_identifier.write(local_stream) + + self.length = local_stream.length() + super(ArchiveRequestPayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, ArchiveRequestPayload): + if self.unique_identifier != other.unique_identifier: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, ArchiveRequestPayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = "unique_identifier='{0}'".format(self.unique_identifier) + return "ArchiveRequestPayload({0})".format(args) + + def __str__(self): + return str({ + 'unique_identifier': self.unique_identifier + }) + + +class ArchiveResponsePayload(primitives.Struct): + """ + A response payload for the Archive operation. + + Attributes: + unique_identifier: The unique ID of the object that was archived. + """ + + def __init__(self, unique_identifier=None): + """ + Construct an Archive response payload struct. + + Args: + unique_identifier (string): The ID of the managed object (e.g., + a public key) that was archived. Optional, defaults to None. + """ + super(ArchiveResponsePayload, self).__init__( + enums.Tags.RESPONSE_PAYLOAD + ) + + self._unique_identifier = None + self.unique_identifier = unique_identifier + + @property + def unique_identifier(self): + if self._unique_identifier: + return self._unique_identifier.value + else: + return None + + @unique_identifier.setter + def unique_identifier(self, value): + if value is None: + self._unique_identifier = None + elif isinstance(value, six.string_types): + self._unique_identifier = primitives.TextString( + value=value, + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + else: + raise TypeError("Unique identifier must be a string.") + + def read(self, input_stream): + """ + Read the data encoding the Archive response payload and decode it + into its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is missing from the + encoded payload. + """ + super(ArchiveResponsePayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): + self._unique_identifier = primitives.TextString( + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + self._unique_identifier.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the Archive response payload to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is not defined. + """ + local_stream = utils.BytearrayStream() + + if self._unique_identifier: + self._unique_identifier.write(local_stream) + + self.length = local_stream.length() + super(ArchiveResponsePayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, ArchiveResponsePayload): + if self.unique_identifier != other.unique_identifier: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, ArchiveResponsePayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = "unique_identifier='{0}'".format(self.unique_identifier) + return "ArchiveResponsePayload({0})".format(args) + + def __str__(self): + return str({ + 'unique_identifier': self.unique_identifier, + }) diff -Nru python-pykmip-0.7.0/kmip/core/messages/payloads/cancel.py python-pykmip-0.8.0/kmip/core/messages/payloads/cancel.py --- python-pykmip-0.7.0/kmip/core/messages/payloads/cancel.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/messages/payloads/cancel.py 2017-12-08 17:36:18.000000000 +0000 @@ -0,0 +1,308 @@ +# Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import six + +from kmip import enums +from kmip.core import primitives +from kmip.core import utils + + +class CancelRequestPayload(primitives.Struct): + """ + A request payload for the Cancel operation. + + Attributes: + asynchronous_correlation_value: The unique ID, in bytes, of the + operation to cancel. + """ + + def __init__(self, asynchronous_correlation_value=None): + """ + Construct a Cancel request payload struct. + + Args: + asynchronous_correlation_value (bytes): The ID of a pending + operation to cancel, in bytes. Optional, defaults to None. + """ + super(CancelRequestPayload, self).__init__( + enums.Tags.REQUEST_PAYLOAD + ) + + self._asynchronous_correlation_value = None + self.asynchronous_correlation_value = asynchronous_correlation_value + + @property + def asynchronous_correlation_value(self): + if self._asynchronous_correlation_value: + return self._asynchronous_correlation_value.value + else: + return None + + @asynchronous_correlation_value.setter + def asynchronous_correlation_value(self, value): + if value is None: + self._asynchronous_correlation_value = None + elif isinstance(value, six.binary_type): + self._asynchronous_correlation_value = primitives.ByteString( + value=value, + tag=enums.Tags.ASYNCHRONOUS_CORRELATION_VALUE + ) + else: + raise TypeError("Asynchronous correlation value must be bytes.") + + def read(self, input_stream): + """ + Read the data encoding the Cancel request payload and decode it into + its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is missing from the + encoded payload. + """ + super(CancelRequestPayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next( + enums.Tags.ASYNCHRONOUS_CORRELATION_VALUE, + local_stream + ): + self._asynchronous_correlation_value = primitives.ByteString( + tag=enums.Tags.ASYNCHRONOUS_CORRELATION_VALUE + ) + self._asynchronous_correlation_value.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the Cancel request payload to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is not defined. + """ + local_stream = utils.BytearrayStream() + + if self._asynchronous_correlation_value: + self._asynchronous_correlation_value.write(local_stream) + + self.length = local_stream.length() + super(CancelRequestPayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, CancelRequestPayload): + if self.asynchronous_correlation_value != \ + other.asynchronous_correlation_value: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, CancelRequestPayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = "asynchronous_correlation_value={0}".format( + self.asynchronous_correlation_value + ) + return "CancelRequestPayload({0})".format(args) + + def __str__(self): + return str({ + 'asynchronous_correlation_value': + self.asynchronous_correlation_value + }) + + +class CancelResponsePayload(primitives.Struct): + """ + A response payload for the Cancel operation. + + Attributes: + asynchronous_correlation_value: The unique ID, in bytes, of the + operation that was cancelled. + cancellation_result: The result of canceling the operation. + """ + + def __init__(self, + asynchronous_correlation_value=None, + cancellation_result=None): + """ + Construct a Cancel response payload struct. + + Args: + asynchronous_correlation_value (bytes): The ID of a pending + operation that was cancelled, in bytes. Optional, defaults to + None. + cancellation_result (enum): A CancellationResult enumeration + specifying the result of canceling the operation. Optional, + defaults to None. + """ + super(CancelResponsePayload, self).__init__( + enums.Tags.RESPONSE_PAYLOAD + ) + + self._asynchronous_correlation_value = None + self._cancellation_result = None + + self.asynchronous_correlation_value = asynchronous_correlation_value + self.cancellation_result = cancellation_result + + @property + def asynchronous_correlation_value(self): + if self._asynchronous_correlation_value: + return self._asynchronous_correlation_value.value + else: + return None + + @asynchronous_correlation_value.setter + def asynchronous_correlation_value(self, value): + if value is None: + self._asynchronous_correlation_value = None + elif isinstance(value, six.binary_type): + self._asynchronous_correlation_value = primitives.ByteString( + value=value, + tag=enums.Tags.ASYNCHRONOUS_CORRELATION_VALUE + ) + else: + raise TypeError("Asynchronous correlation value must be bytes.") + + @property + def cancellation_result(self): + if self._cancellation_result: + return self._cancellation_result.value + else: + return None + + @cancellation_result.setter + def cancellation_result(self, value): + if value is None: + self._cancellation_result = None + elif isinstance(value, enums.CancellationResult): + self._cancellation_result = primitives.Enumeration( + enums.CancellationResult, + value=value, + tag=enums.Tags.CANCELLATION_RESULT + ) + else: + raise TypeError( + "Cancellation result must be a CancellationResult enumeration." + ) + + def read(self, input_stream): + """ + Read the data encoding the Cancel response payload and decode it into + its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is missing from the + encoded payload. + """ + super(CancelResponsePayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next( + enums.Tags.ASYNCHRONOUS_CORRELATION_VALUE, + local_stream + ): + self._asynchronous_correlation_value = primitives.ByteString( + tag=enums.Tags.ASYNCHRONOUS_CORRELATION_VALUE + ) + self._asynchronous_correlation_value.read(local_stream) + if self.is_tag_next(enums.Tags.CANCELLATION_RESULT, local_stream): + self._cancellation_result = primitives.Enumeration( + enums.CancellationResult, + tag=enums.Tags.CANCELLATION_RESULT + ) + self._cancellation_result.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the Cancel response payload to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is not defined. + """ + local_stream = utils.BytearrayStream() + + if self._asynchronous_correlation_value: + self._asynchronous_correlation_value.write(local_stream) + if self._cancellation_result: + self._cancellation_result.write(local_stream) + + self.length = local_stream.length() + super(CancelResponsePayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, CancelResponsePayload): + if self.asynchronous_correlation_value != \ + other.asynchronous_correlation_value: + return False + elif self.cancellation_result != other.cancellation_result: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, CancelResponsePayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = ", ".join([ + "asynchronous_correlation_value={0}".format( + self.asynchronous_correlation_value + ), + "cancellation_result={0}".format(self.cancellation_result) + ]) + return "CancelResponsePayload({0})".format(args) + + def __str__(self): + return str({ + 'asynchronous_correlation_value': + self.asynchronous_correlation_value, + 'cancellation_result': self.cancellation_result + }) diff -Nru python-pykmip-0.7.0/kmip/core/messages/payloads/check.py python-pykmip-0.8.0/kmip/core/messages/payloads/check.py --- python-pykmip-0.7.0/kmip/core/messages/payloads/check.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/messages/payloads/check.py 2017-12-08 17:36:18.000000000 +0000 @@ -0,0 +1,484 @@ +# Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import six + +from kmip import enums +from kmip.core import primitives +from kmip.core import utils + + +class CheckRequestPayload(primitives.Struct): + """ + A request payload for the Check operation. + + Attributes: + unique_identifier: The unique ID of the object to be checked. + usage_limits_count: The number of usage limits units that should be + available on the checked object. + cryptographic_usage_mask: The numeric representation of a set of usage + masks that should be set on the checked object. + lease_time: The date in seconds since the epoch that a lease should be + available for on the checked object. + """ + + def __init__(self, + unique_identifier=None, + usage_limits_count=None, + cryptographic_usage_mask=None, + lease_time=None): + """ + Construct a Check request payload struct. + + Args: + unique_identifier (string): The ID of the managed object (e.g., + a public key) to be checked. Optional, defaults to None. + usage_limits_count (int): The number of usage limits units that + should be available on the checked object. Optional, defaults + to None. + cryptographic_usage_mask (int): The numeric representation of a + set of usage masks that should be set on the checked object. + Optional, defaults to None. + lease_time (int): The date in seconds since the epoch that a + lease should be available for on the checked object. Optional, + defaults to None. + """ + super(CheckRequestPayload, self).__init__(enums.Tags.REQUEST_PAYLOAD) + + self._unique_identifier = None + self._usage_limits_count = None + self._cryptographic_usage_mask = None + self._lease_time = None + + self.unique_identifier = unique_identifier + self.usage_limits_count = usage_limits_count + self.cryptographic_usage_mask = cryptographic_usage_mask + self.lease_time = lease_time + + @property + def unique_identifier(self): + if self._unique_identifier: + return self._unique_identifier.value + else: + return None + + @unique_identifier.setter + def unique_identifier(self, value): + if value is None: + self._unique_identifier = None + elif isinstance(value, six.string_types): + self._unique_identifier = primitives.TextString( + value=value, + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + else: + raise TypeError("Unique identifier must be a string.") + + @property + def usage_limits_count(self): + if self._usage_limits_count: + return self._usage_limits_count.value + else: + return None + + @usage_limits_count.setter + def usage_limits_count(self, value): + if value is None: + self._usage_limits_count = None + elif isinstance(value, six.integer_types): + self._usage_limits_count = primitives.LongInteger( + value=value, + tag=enums.Tags.USAGE_LIMITS_COUNT + ) + else: + raise TypeError("Usage limits count must be an integer.") + + @property + def cryptographic_usage_mask(self): + if self._cryptographic_usage_mask: + return self._cryptographic_usage_mask.value + else: + return None + + @cryptographic_usage_mask.setter + def cryptographic_usage_mask(self, value): + if value is None: + self._cryptographic_usage_mask = None + elif isinstance(value, six.integer_types): + self._cryptographic_usage_mask = primitives.Integer( + value=value, + tag=enums.Tags.CRYPTOGRAPHIC_USAGE_MASK + ) + else: + raise TypeError("Cryptographic usage mask must be an integer.") + + @property + def lease_time(self): + if self._lease_time: + return self._lease_time.value + else: + return None + + @lease_time.setter + def lease_time(self, value): + if value is None: + self._lease_time = None + elif isinstance(value, six.integer_types): + self._lease_time = primitives.Interval( + value=value, + tag=enums.Tags.LEASE_TIME + ) + else: + raise TypeError("Lease time must be an integer.") + + def read(self, input_stream): + """ + Read the data encoding the Check request payload and decode it into + its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is missing from the + encoded payload. + """ + super(CheckRequestPayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): + self._unique_identifier = primitives.TextString( + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + self._unique_identifier.read(local_stream) + if self.is_tag_next(enums.Tags.USAGE_LIMITS_COUNT, local_stream): + self._usage_limits_count = primitives.LongInteger( + tag=enums.Tags.USAGE_LIMITS_COUNT + ) + self._usage_limits_count.read(local_stream) + if self.is_tag_next(enums.Tags.CRYPTOGRAPHIC_USAGE_MASK, local_stream): + self._cryptographic_usage_mask = primitives.Integer( + tag=enums.Tags.CRYPTOGRAPHIC_USAGE_MASK + ) + self._cryptographic_usage_mask.read(local_stream) + if self.is_tag_next(enums.Tags.LEASE_TIME, local_stream): + self._lease_time = primitives.Interval( + tag=enums.Tags.LEASE_TIME + ) + self._lease_time.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the Check request payload to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is not defined. + """ + local_stream = utils.BytearrayStream() + + if self._unique_identifier: + self._unique_identifier.write(local_stream) + if self._usage_limits_count: + self._usage_limits_count.write(local_stream) + if self._cryptographic_usage_mask: + self._cryptographic_usage_mask.write(local_stream) + if self._lease_time: + self._lease_time.write(local_stream) + + self.length = local_stream.length() + super(CheckRequestPayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, CheckRequestPayload): + if self.unique_identifier != other.unique_identifier: + return False + elif self.usage_limits_count != other.usage_limits_count: + return False + elif self.cryptographic_usage_mask != \ + other.cryptographic_usage_mask: + return False + elif self.lease_time != other.lease_time: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, CheckRequestPayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = ", ".join([ + "unique_identifier='{0}'".format(self.unique_identifier), + "usage_limits_count={0}".format(self.usage_limits_count), + "cryptographic_usage_mask={0}".format( + self.cryptographic_usage_mask + ), + "lease_time={0}".format(self.lease_time) + ]) + return "CheckRequestPayload({0})".format(args) + + def __str__(self): + return str({ + 'unique_identifier': self.unique_identifier, + 'usage_limits_count': self.usage_limits_count, + 'cryptographic_usage_mask': self.cryptographic_usage_mask, + 'lease_time': self.lease_time + }) + + +class CheckResponsePayload(primitives.Struct): + """ + A response payload for the Check operation. + + Attributes: + unique_identifier: The unique ID of the object that was checked. + usage_limits_count: The number of usage limits units that should be + available on the checked object. + cryptographic_usage_mask: The numeric representation of a set of usage + masks that should be set on the checked object. + lease_time: The date in seconds since the epoch that a lease should be + available for on the checked object. + """ + + def __init__(self, + unique_identifier=None, + usage_limits_count=None, + cryptographic_usage_mask=None, + lease_time=None): + """ + Construct a Check response payload struct. + + Args: + unique_identifier (string): The ID of the managed object (e.g., + a public key) that was checked. Optional, defaults to None. + usage_limits_count (int): The number of usage limits units that + should be available on the checked object. Optional, defaults + to None. + cryptographic_usage_mask (int): The numeric representation of a + set of usage masks that should be set on the checked object. + Optional, defaults to None. + lease_time (int): The date in seconds since the epoch that a + lease should be available for on the checked object. Optional, + defaults to None. + """ + super(CheckResponsePayload, self).__init__(enums.Tags.RESPONSE_PAYLOAD) + + self._unique_identifier = None + self._usage_limits_count = None + self._cryptographic_usage_mask = None + self._lease_time = None + + self.unique_identifier = unique_identifier + self.usage_limits_count = usage_limits_count + self.cryptographic_usage_mask = cryptographic_usage_mask + self.lease_time = lease_time + + @property + def unique_identifier(self): + if self._unique_identifier: + return self._unique_identifier.value + else: + return None + + @unique_identifier.setter + def unique_identifier(self, value): + if value is None: + self._unique_identifier = None + elif isinstance(value, six.string_types): + self._unique_identifier = primitives.TextString( + value=value, + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + else: + raise TypeError("Unique identifier must be a string.") + + @property + def usage_limits_count(self): + if self._usage_limits_count: + return self._usage_limits_count.value + else: + return None + + @usage_limits_count.setter + def usage_limits_count(self, value): + if value is None: + self._usage_limits_count = None + elif isinstance(value, six.integer_types): + self._usage_limits_count = primitives.LongInteger( + value=value, + tag=enums.Tags.USAGE_LIMITS_COUNT + ) + else: + raise TypeError("Usage limits count must be an integer.") + + @property + def cryptographic_usage_mask(self): + if self._cryptographic_usage_mask: + return self._cryptographic_usage_mask.value + else: + return None + + @cryptographic_usage_mask.setter + def cryptographic_usage_mask(self, value): + if value is None: + self._cryptographic_usage_mask = None + elif isinstance(value, six.integer_types): + self._cryptographic_usage_mask = primitives.Integer( + value=value, + tag=enums.Tags.CRYPTOGRAPHIC_USAGE_MASK + ) + else: + raise TypeError("Cryptographic usage mask must be an integer.") + + @property + def lease_time(self): + if self._lease_time: + return self._lease_time.value + else: + return None + + @lease_time.setter + def lease_time(self, value): + if value is None: + self._lease_time = None + elif isinstance(value, six.integer_types): + self._lease_time = primitives.Interval( + value=value, + tag=enums.Tags.LEASE_TIME + ) + else: + raise TypeError("Lease time must be an integer.") + + def read(self, input_stream): + """ + Read the data encoding the Check response payload and decode it into + its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is missing from the + encoded payload. + """ + super(CheckResponsePayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): + self._unique_identifier = primitives.TextString( + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + self._unique_identifier.read(local_stream) + if self.is_tag_next(enums.Tags.USAGE_LIMITS_COUNT, local_stream): + self._usage_limits_count = primitives.LongInteger( + tag=enums.Tags.USAGE_LIMITS_COUNT + ) + self._usage_limits_count.read(local_stream) + if self.is_tag_next(enums.Tags.CRYPTOGRAPHIC_USAGE_MASK, local_stream): + self._cryptographic_usage_mask = primitives.Integer( + tag=enums.Tags.CRYPTOGRAPHIC_USAGE_MASK + ) + self._cryptographic_usage_mask.read(local_stream) + if self.is_tag_next(enums.Tags.LEASE_TIME, local_stream): + self._lease_time = primitives.Interval( + tag=enums.Tags.LEASE_TIME + ) + self._lease_time.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the Check response payload to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is not defined. + """ + local_stream = utils.BytearrayStream() + + if self._unique_identifier: + self._unique_identifier.write(local_stream) + if self._usage_limits_count: + self._usage_limits_count.write(local_stream) + if self._cryptographic_usage_mask: + self._cryptographic_usage_mask.write(local_stream) + if self._lease_time: + self._lease_time.write(local_stream) + + self.length = local_stream.length() + super(CheckResponsePayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, CheckResponsePayload): + if self.unique_identifier != other.unique_identifier: + return False + elif self.usage_limits_count != other.usage_limits_count: + return False + elif self.cryptographic_usage_mask != \ + other.cryptographic_usage_mask: + return False + elif self.lease_time != other.lease_time: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, CheckResponsePayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = ", ".join([ + "unique_identifier='{0}'".format(self.unique_identifier), + "usage_limits_count={0}".format(self.usage_limits_count), + "cryptographic_usage_mask={0}".format( + self.cryptographic_usage_mask + ), + "lease_time={0}".format(self.lease_time) + ]) + return "CheckResponsePayload({0})".format(args) + + def __str__(self): + return str({ + 'unique_identifier': self.unique_identifier, + 'usage_limits_count': self.usage_limits_count, + 'cryptographic_usage_mask': self.cryptographic_usage_mask, + 'lease_time': self.lease_time + }) diff -Nru python-pykmip-0.7.0/kmip/core/messages/payloads/get_usage_allocation.py python-pykmip-0.8.0/kmip/core/messages/payloads/get_usage_allocation.py --- python-pykmip-0.7.0/kmip/core/messages/payloads/get_usage_allocation.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/messages/payloads/get_usage_allocation.py 2017-12-08 17:36:18.000000000 +0000 @@ -0,0 +1,289 @@ +# Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import six + +from kmip import enums +from kmip.core import primitives +from kmip.core import utils + + +class GetUsageAllocationRequestPayload(primitives.Struct): + """ + A request payload for the GetUsageAllocation operation. + + Attributes: + unique_identifier: The unique ID of the object for which to obtain a + usage allocation. + usage_limits_count: The number of usage limits units that should be + reserved for the object. + """ + + def __init__(self, unique_identifier=None, usage_limits_count=None): + """ + Construct a GetUsageAllocation request payload struct. + + Args: + unique_identifier (string): The ID of the managed object (e.g., + a public key) to obtain a usage allocation for. Optional, + defaults to None. + usage_limits_count (int): The number of usage limits units that + should be reserved for the object. Optional, defaults to None. + """ + super(GetUsageAllocationRequestPayload, self).__init__( + enums.Tags.REQUEST_PAYLOAD + ) + + self._unique_identifier = None + self._usage_limits_count = None + + self.unique_identifier = unique_identifier + self.usage_limits_count = usage_limits_count + + @property + def unique_identifier(self): + if self._unique_identifier: + return self._unique_identifier.value + else: + return None + + @unique_identifier.setter + def unique_identifier(self, value): + if value is None: + self._unique_identifier = None + elif isinstance(value, six.string_types): + self._unique_identifier = primitives.TextString( + value=value, + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + else: + raise TypeError("Unique identifier must be a string.") + + @property + def usage_limits_count(self): + if self._usage_limits_count: + return self._usage_limits_count.value + else: + return None + + @usage_limits_count.setter + def usage_limits_count(self, value): + if value is None: + self._usage_limits_count = None + elif isinstance(value, six.integer_types): + self._usage_limits_count = primitives.LongInteger( + value=value, + tag=enums.Tags.USAGE_LIMITS_COUNT + ) + else: + raise TypeError("Usage limits count must be an integer.") + + def read(self, input_stream): + """ + Read the data encoding the GetUsageAllocation request payload and + decode it into its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is missing from the + encoded payload. + """ + super(GetUsageAllocationRequestPayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): + self._unique_identifier = primitives.TextString( + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + self._unique_identifier.read(local_stream) + if self.is_tag_next(enums.Tags.USAGE_LIMITS_COUNT, local_stream): + self._usage_limits_count = primitives.LongInteger( + tag=enums.Tags.USAGE_LIMITS_COUNT + ) + self._usage_limits_count.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the GetUsageAllocation request payload to a + stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is not defined. + """ + local_stream = utils.BytearrayStream() + + if self._unique_identifier: + self._unique_identifier.write(local_stream) + if self._usage_limits_count: + self._usage_limits_count.write(local_stream) + + self.length = local_stream.length() + super(GetUsageAllocationRequestPayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, GetUsageAllocationRequestPayload): + if self.unique_identifier != other.unique_identifier: + return False + elif self.usage_limits_count != other.usage_limits_count: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, GetUsageAllocationRequestPayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = ", ".join([ + "unique_identifier='{0}'".format(self.unique_identifier), + "usage_limits_count={0}".format(self.usage_limits_count) + ]) + return "GetUsageAllocationRequestPayload({0})".format(args) + + def __str__(self): + return str({ + 'unique_identifier': self.unique_identifier, + 'usage_limits_count': self.usage_limits_count + }) + + +class GetUsageAllocationResponsePayload(primitives.Struct): + """ + A response payload for the GetUsageAllocation operation. + + Attributes: + unique_identifier: The unique ID of the object that was allocated. + """ + + def __init__(self, unique_identifier=None): + """ + Construct a GetUsageAllocation response payload struct. + + Args: + unique_identifier (string): The ID of the managed object (e.g., + a public key) that was allocated. Optional, defaults to None. + """ + super(GetUsageAllocationResponsePayload, self).__init__( + enums.Tags.RESPONSE_PAYLOAD + ) + + self._unique_identifier = None + self.unique_identifier = unique_identifier + + @property + def unique_identifier(self): + if self._unique_identifier: + return self._unique_identifier.value + else: + return None + + @unique_identifier.setter + def unique_identifier(self, value): + if value is None: + self._unique_identifier = None + elif isinstance(value, six.string_types): + self._unique_identifier = primitives.TextString( + value=value, + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + else: + raise TypeError("Unique identifier must be a string.") + + def read(self, input_stream): + """ + Read the data encoding the GetUsageAllocation response payload and + decode it into its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is missing from the + encoded payload. + """ + super(GetUsageAllocationResponsePayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): + self._unique_identifier = primitives.TextString( + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + self._unique_identifier.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the GetUsageAllocation response payload to a + stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is not defined. + """ + local_stream = utils.BytearrayStream() + + if self._unique_identifier: + self._unique_identifier.write(local_stream) + + self.length = local_stream.length() + super(GetUsageAllocationResponsePayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, GetUsageAllocationResponsePayload): + if self.unique_identifier != other.unique_identifier: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, GetUsageAllocationResponsePayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = "unique_identifier='{0}'".format(self.unique_identifier) + return "GetUsageAllocationResponsePayload({0})".format(args) + + def __str__(self): + return str({ + 'unique_identifier': self.unique_identifier, + }) diff -Nru python-pykmip-0.7.0/kmip/core/messages/payloads/__init__.py python-pykmip-0.8.0/kmip/core/messages/payloads/__init__.py --- python-pykmip-0.7.0/kmip/core/messages/payloads/__init__.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/messages/payloads/__init__.py 2018-04-16 18:14:10.000000000 +0000 @@ -17,6 +17,18 @@ ActivateRequestPayload, ActivateResponsePayload ) +from kmip.core.messages.payloads.archive import ( + ArchiveRequestPayload, + ArchiveResponsePayload +) +from kmip.core.messages.payloads.cancel import ( + CancelRequestPayload, + CancelResponsePayload +) +from kmip.core.messages.payloads.check import ( + CheckRequestPayload, + CheckResponsePayload +) from kmip.core.messages.payloads.create import ( CreateRequestPayload, CreateResponsePayload @@ -57,6 +69,10 @@ GetAttributesRequestPayload, GetAttributesResponsePayload ) +from kmip.core.messages.payloads.get_usage_allocation import ( + GetUsageAllocationRequestPayload, + GetUsageAllocationResponsePayload +) from kmip.core.messages.payloads.locate import ( LocateRequestPayload, LocateResponsePayload @@ -65,10 +81,21 @@ MACRequestPayload, MACResponsePayload ) +from kmip.core.messages.payloads.obtain_lease import ( + ObtainLeaseRequestPayload, + ObtainLeaseResponsePayload +) +from kmip.core.messages.payloads.poll import ( + PollRequestPayload +) from kmip.core.messages.payloads.query import ( QueryRequestPayload, QueryResponsePayload ) +from kmip.core.messages.payloads.recover import ( + RecoverRequestPayload, + RecoverResponsePayload +) from kmip.core.messages.payloads.register import ( RegisterRequestPayload, RegisterResponsePayload @@ -77,6 +104,10 @@ RekeyKeyPairRequestPayload, RekeyKeyPairResponsePayload ) +from kmip.core.messages.payloads.rekey import ( + RekeyRequestPayload, + RekeyResponsePayload +) from kmip.core.messages.payloads.revoke import ( RevokeRequestPayload, RevokeResponsePayload @@ -94,6 +125,12 @@ __all__ = [ "ActivateRequestPayload", "ActivateResponsePayload", + "ArchiveRequestPayload", + "ArchiveResponsePayload", + "CancelRequestPayload", + "CancelResponsePayload", + "CheckRequestPayload", + "CheckResponsePayload", "CreateRequestPayload", "CreateResponsePayload", "CreateKeyPairRequestPayload", @@ -114,16 +151,25 @@ "GetAttributeListResponsePayload", "GetAttributesRequestPayload", "GetAttributesResponsePayload", + "GetUsageAllocationRequestPayload", + "GetUsageAllocationResponsePayload", "LocateRequestPayload", "LocateResponsePayload", "MACRequestPayload", "MACResponsePayload", + "ObtainLeaseRequestPayload", + "ObtainLeaseResponsePayload", + "PollRequestPayload", "QueryRequestPayload", "QueryResponsePayload", + "RecoverRequestPayload", + "RecoverResponsePayload", "RegisterRequestPayload", "RegisterResponsePayload", "RekeyKeyPairRequestPayload", "RekeyKeyPairResponsePayload", + "RekeyRequestPayload", + "RekeyResponsePayload", "RevokeRequestPayload", "RevokeResponsePayload", "SignRequestPayload", diff -Nru python-pykmip-0.7.0/kmip/core/messages/payloads/obtain_lease.py python-pykmip-0.8.0/kmip/core/messages/payloads/obtain_lease.py --- python-pykmip-0.7.0/kmip/core/messages/payloads/obtain_lease.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/messages/payloads/obtain_lease.py 2017-12-08 17:36:18.000000000 +0000 @@ -0,0 +1,328 @@ +# Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import six + +from kmip import enums +from kmip.core import primitives +from kmip.core import utils + + +class ObtainLeaseRequestPayload(primitives.Struct): + """ + A request payload for the ObtainLease operation. + + Attributes: + unique_identifier: The unique ID of the object to be leased. + """ + + def __init__(self, unique_identifier=None): + """ + Construct an ObtainLease request payload struct. + + Args: + unique_identifier (string): The ID of the managed object (e.g., + a public key) to obtain a lease for. Optional, defaults to + None. + """ + super(ObtainLeaseRequestPayload, self).__init__( + enums.Tags.REQUEST_PAYLOAD + ) + + self._unique_identifier = None + self.unique_identifier = unique_identifier + + @property + def unique_identifier(self): + if self._unique_identifier: + return self._unique_identifier.value + else: + return None + + @unique_identifier.setter + def unique_identifier(self, value): + if value is None: + self._unique_identifier = None + elif isinstance(value, six.string_types): + self._unique_identifier = primitives.TextString( + value=value, + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + else: + raise TypeError("Unique identifier must be a string.") + + def read(self, input_stream): + """ + Read the data encoding the ObtainLease request payload and decode it + into its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is missing from the + encoded payload. + """ + super(ObtainLeaseRequestPayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): + self._unique_identifier = primitives.TextString( + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + self._unique_identifier.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the ObtainLease request payload to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is not defined. + """ + local_stream = utils.BytearrayStream() + + if self._unique_identifier: + self._unique_identifier.write(local_stream) + + self.length = local_stream.length() + super(ObtainLeaseRequestPayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, ObtainLeaseRequestPayload): + if self.unique_identifier != other.unique_identifier: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, ObtainLeaseRequestPayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = "unique_identifier='{0}'".format(self.unique_identifier) + return "ObtainLeaseRequestPayload({0})".format(args) + + def __str__(self): + return str({ + 'unique_identifier': self.unique_identifier + }) + + +class ObtainLeaseResponsePayload(primitives.Struct): + """ + A response payload for the ObtainLease operation. + + Attributes: + unique_identifier: The unique ID of the object that was leased. + lease_time: The amount of time, in seconds, that the object lease is + in effect. + last_change_date: The date, in seconds since the epoch, representing + the last time a change was made to the object or one of its + attributes. + """ + + def __init__(self, + unique_identifier=None, + lease_time=None, + last_change_date=None): + """ + Construct an ObtainLease response payload struct. + + Args: + unique_identifier (string): The ID of the managed object (e.g., + a public key) a lease was obtained for. Optional, defaults to + None. + lease_time (int): The amount of time, in seconds, that the object + lease is in effect for. Optional, defaults to None. + last_change_date (int): The date, in seconds since the epoch, + when the last change was made to the object or one of its + attributes. Optional, defaults to None. + """ + super(ObtainLeaseResponsePayload, self).__init__( + enums.Tags.RESPONSE_PAYLOAD + ) + + self._unique_identifier = None + self._lease_time = None + self._last_change_date = None + + self.unique_identifier = unique_identifier + self.lease_time = lease_time + self.last_change_date = last_change_date + + @property + def unique_identifier(self): + if self._unique_identifier: + return self._unique_identifier.value + else: + return None + + @unique_identifier.setter + def unique_identifier(self, value): + if value is None: + self._unique_identifier = None + elif isinstance(value, six.string_types): + self._unique_identifier = primitives.TextString( + value=value, + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + else: + raise TypeError("Unique identifier must be a string.") + + @property + def lease_time(self): + if self._lease_time: + return self._lease_time.value + else: + return None + + @lease_time.setter + def lease_time(self, value): + if value is None: + self._lease_time = None + elif isinstance(value, six.integer_types): + self._lease_time = primitives.Interval( + value=value, + tag=enums.Tags.LEASE_TIME + ) + else: + raise TypeError("Lease time must be an integer.") + + @property + def last_change_date(self): + if self._last_change_date: + return self._last_change_date.value + else: + return None + + @last_change_date.setter + def last_change_date(self, value): + if value is None: + self._last_change_date = None + elif isinstance(value, six.integer_types): + self._last_change_date = primitives.DateTime( + value=value, + tag=enums.Tags.LAST_CHANGE_DATE + ) + else: + raise TypeError("Last change date must be an integer.") + + def read(self, input_stream): + """ + Read the data encoding the ObtainLease response payload and decode it + into its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is missing from the + encoded payload. + """ + super(ObtainLeaseResponsePayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): + self._unique_identifier = primitives.TextString( + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + self._unique_identifier.read(local_stream) + if self.is_tag_next(enums.Tags.LEASE_TIME, local_stream): + self._lease_time = primitives.Interval( + tag=enums.Tags.LEASE_TIME + ) + self._lease_time.read(local_stream) + if self.is_tag_next(enums.Tags.LAST_CHANGE_DATE, local_stream): + self._last_change_date = primitives.DateTime( + tag=enums.Tags.LAST_CHANGE_DATE + ) + self._last_change_date.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the ObtainLease response payload to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is not defined. + """ + local_stream = utils.BytearrayStream() + + if self._unique_identifier: + self._unique_identifier.write(local_stream) + if self._lease_time: + self._lease_time.write(local_stream) + if self._last_change_date: + self._last_change_date.write(local_stream) + + self.length = local_stream.length() + super(ObtainLeaseResponsePayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, ObtainLeaseResponsePayload): + if self.unique_identifier != other.unique_identifier: + return False + elif self.lease_time != other.lease_time: + return False + elif self.last_change_date != other.last_change_date: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, ObtainLeaseResponsePayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = ", ".join([ + "unique_identifier='{0}'".format(self.unique_identifier), + "lease_time={0}".format(self.lease_time), + "last_change_date={0}".format(self.last_change_date) + ]) + return "ObtainLeaseResponsePayload({0})".format(args) + + def __str__(self): + return str({ + 'unique_identifier': self.unique_identifier, + 'lease_time': self.lease_time, + 'last_change_date': self.last_change_date + }) diff -Nru python-pykmip-0.7.0/kmip/core/messages/payloads/poll.py python-pykmip-0.8.0/kmip/core/messages/payloads/poll.py --- python-pykmip-0.7.0/kmip/core/messages/payloads/poll.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/messages/payloads/poll.py 2017-12-08 19:18:07.000000000 +0000 @@ -0,0 +1,142 @@ +# Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import six + +from kmip import enums +from kmip.core import primitives +from kmip.core import utils + + +class PollRequestPayload(primitives.Struct): + """ + A request payload for the Poll operation. + + Attributes: + asynchronous_correlation_value: The unique ID, in bytes, of the + operation to poll. + """ + + def __init__(self, asynchronous_correlation_value=None): + """ + Construct a Poll request payload struct. + + Args: + asynchronous_correlation_value (bytes): The ID of a pending + operation to poll the status of, in bytes. Optional, defaults + to None. + """ + super(PollRequestPayload, self).__init__( + enums.Tags.REQUEST_PAYLOAD + ) + + self._asynchronous_correlation_value = None + self.asynchronous_correlation_value = asynchronous_correlation_value + + @property + def asynchronous_correlation_value(self): + if self._asynchronous_correlation_value: + return self._asynchronous_correlation_value.value + else: + return None + + @asynchronous_correlation_value.setter + def asynchronous_correlation_value(self, value): + if value is None: + self._asynchronous_correlation_value = None + elif isinstance(value, six.binary_type): + self._asynchronous_correlation_value = primitives.ByteString( + value=value, + tag=enums.Tags.ASYNCHRONOUS_CORRELATION_VALUE + ) + else: + raise TypeError("Asynchronous correlation value must be bytes.") + + def read(self, input_stream): + """ + Read the data encoding the Poll request payload and decode it into + its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is missing from the + encoded payload. + """ + super(PollRequestPayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next( + enums.Tags.ASYNCHRONOUS_CORRELATION_VALUE, + local_stream + ): + self._asynchronous_correlation_value = primitives.ByteString( + tag=enums.Tags.ASYNCHRONOUS_CORRELATION_VALUE + ) + self._asynchronous_correlation_value.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the Poll request payload to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is not defined. + """ + local_stream = utils.BytearrayStream() + + if self._asynchronous_correlation_value: + self._asynchronous_correlation_value.write(local_stream) + + self.length = local_stream.length() + super(PollRequestPayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, PollRequestPayload): + if self.asynchronous_correlation_value != \ + other.asynchronous_correlation_value: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, PollRequestPayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = "asynchronous_correlation_value={0}".format( + self.asynchronous_correlation_value + ) + return "PollRequestPayload({0})".format(args) + + def __str__(self): + return str({ + 'asynchronous_correlation_value': + self.asynchronous_correlation_value + }) diff -Nru python-pykmip-0.7.0/kmip/core/messages/payloads/recover.py python-pykmip-0.8.0/kmip/core/messages/payloads/recover.py --- python-pykmip-0.7.0/kmip/core/messages/payloads/recover.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/messages/payloads/recover.py 2017-12-08 17:36:18.000000000 +0000 @@ -0,0 +1,246 @@ +# Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import six + +from kmip import enums +from kmip.core import primitives +from kmip.core import utils + + +class RecoverRequestPayload(primitives.Struct): + """ + A request payload for the Recover operation. + + Attributes: + unique_identifier: The unique ID of the object to recover. + """ + + def __init__(self, unique_identifier=None): + """ + Construct a Recover request payload struct. + + Args: + unique_identifier (string): The ID of the managed object (e.g., + a public key) to recover. Optional, defaults to None. + """ + super(RecoverRequestPayload, self).__init__( + enums.Tags.REQUEST_PAYLOAD + ) + + self._unique_identifier = None + self.unique_identifier = unique_identifier + + @property + def unique_identifier(self): + if self._unique_identifier: + return self._unique_identifier.value + else: + return None + + @unique_identifier.setter + def unique_identifier(self, value): + if value is None: + self._unique_identifier = None + elif isinstance(value, six.string_types): + self._unique_identifier = primitives.TextString( + value=value, + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + else: + raise TypeError("Unique identifier must be a string.") + + def read(self, input_stream): + """ + Read the data encoding the Recover request payload and decode it into + its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is missing from the + encoded payload. + """ + super(RecoverRequestPayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): + self._unique_identifier = primitives.TextString( + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + self._unique_identifier.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the Recover request payload to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is not defined. + """ + local_stream = utils.BytearrayStream() + + if self._unique_identifier: + self._unique_identifier.write(local_stream) + + self.length = local_stream.length() + super(RecoverRequestPayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, RecoverRequestPayload): + if self.unique_identifier != other.unique_identifier: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, RecoverRequestPayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = "unique_identifier='{0}'".format(self.unique_identifier) + return "RecoverRequestPayload({0})".format(args) + + def __str__(self): + return str({ + 'unique_identifier': self.unique_identifier + }) + + +class RecoverResponsePayload(primitives.Struct): + """ + A response payload for the Recover operation. + + Attributes: + unique_identifier: The unique ID of the object that was recovered. + """ + + def __init__(self, unique_identifier=None): + """ + Construct a Recover response payload struct. + + Args: + unique_identifier (string): The ID of the managed object (e.g., + a public key) that was recovered. Optional, defaults to None. + """ + super(RecoverResponsePayload, self).__init__( + enums.Tags.RESPONSE_PAYLOAD + ) + + self._unique_identifier = None + self.unique_identifier = unique_identifier + + @property + def unique_identifier(self): + if self._unique_identifier: + return self._unique_identifier.value + else: + return None + + @unique_identifier.setter + def unique_identifier(self, value): + if value is None: + self._unique_identifier = None + elif isinstance(value, six.string_types): + self._unique_identifier = primitives.TextString( + value=value, + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + else: + raise TypeError("Unique identifier must be a string.") + + def read(self, input_stream): + """ + Read the data encoding the Recover response payload and decode it + into its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is missing from the + encoded payload. + """ + super(RecoverResponsePayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): + self._unique_identifier = primitives.TextString( + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + self._unique_identifier.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the Recover response payload to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the data attribute is not defined. + """ + local_stream = utils.BytearrayStream() + + if self._unique_identifier: + self._unique_identifier.write(local_stream) + + self.length = local_stream.length() + super(RecoverResponsePayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, RecoverResponsePayload): + if self.unique_identifier != other.unique_identifier: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, RecoverResponsePayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = "unique_identifier='{0}'".format(self.unique_identifier) + return "RecoverResponsePayload({0})".format(args) + + def __str__(self): + return str({ + 'unique_identifier': self.unique_identifier, + }) diff -Nru python-pykmip-0.7.0/kmip/core/messages/payloads/rekey.py python-pykmip-0.8.0/kmip/core/messages/payloads/rekey.py --- python-pykmip-0.7.0/kmip/core/messages/payloads/rekey.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/messages/payloads/rekey.py 2018-04-16 18:14:10.000000000 +0000 @@ -0,0 +1,365 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import six + +from kmip.core import enums +from kmip.core import objects +from kmip.core import primitives +from kmip.core import utils + + +class RekeyRequestPayload(primitives.Struct): + """ + A request payload for the Rekey operation. + + Attributes: + unique_identifier: The unique ID of the symmetric key to rekey. + offset: The interval between the initialization and activation dates + of the replacement key. + template_attribute: A collection of attributes that should be set on + the replacement key. + """ + def __init__(self, + unique_identifier=None, + offset=None, + template_attribute=None): + """ + Construct a Rekey request payload struct. + + Args: + unique_identifier (string): The ID of the symmetric key to rekey. + Optional, defaults to None. + offset (int): The number of seconds between the initialization and + activation dates of the replacement key. Optional, defaults to + None. + template_attribute (TemplateAttribute): A structure containing a + set of attributes (e.g., cryptographic algorithm, + cryptographic length) that should be set on the replacement + key. Optional, defaults to None. + """ + super(RekeyRequestPayload, self).__init__( + enums.Tags.REQUEST_PAYLOAD + ) + + self._unique_identifier = None + self._offset = None + self._template_attribute = None + + self.unique_identifier = unique_identifier + self.offset = offset + self.template_attribute = template_attribute + + @property + def unique_identifier(self): + if self._unique_identifier: + return self._unique_identifier.value + else: + return None + + @unique_identifier.setter + def unique_identifier(self, value): + if value is None: + self._unique_identifier = None + elif isinstance(value, six.string_types): + self._unique_identifier = primitives.TextString( + value=value, + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + else: + raise TypeError("Unique identifier must be a string.") + + @property + def offset(self): + if self._offset: + return self._offset.value + else: + return None + + @offset.setter + def offset(self, value): + if value is None: + self._offset = None + elif isinstance(value, six.integer_types): + self._offset = primitives.Interval( + value=value, + tag=enums.Tags.OFFSET + ) + else: + raise TypeError("Offset must be an integer.") + + @property + def template_attribute(self): + if self._template_attribute: + return self._template_attribute + else: + return None + + @template_attribute.setter + def template_attribute(self, value): + if value is None: + self._template_attribute = None + elif isinstance(value, objects.TemplateAttribute): + self._template_attribute = value + else: + raise TypeError( + "Template attribute must be a TemplateAttribute struct." + ) + + def read(self, input_stream): + """ + Read the data encoding the Rekey request payload and decode it into + its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + """ + super(RekeyRequestPayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): + self._unique_identifier = primitives.TextString( + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + self._unique_identifier.read(local_stream) + + if self.is_tag_next(enums.Tags.OFFSET, local_stream): + self._offset = primitives.Interval( + tag=enums.Tags.OFFSET + ) + self._offset.read(local_stream) + + if self.is_tag_next(enums.Tags.TEMPLATE_ATTRIBUTE, local_stream): + self._template_attribute = objects.TemplateAttribute() + self._template_attribute.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the Rekey request payload to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + """ + local_stream = utils.BytearrayStream() + + if self._unique_identifier is not None: + self._unique_identifier.write(local_stream) + if self._offset is not None: + self._offset.write(local_stream) + if self._template_attribute is not None: + self._template_attribute.write(local_stream) + + self.length = local_stream.length() + super(RekeyRequestPayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, RekeyRequestPayload): + if self.unique_identifier != other.unique_identifier: + return False + elif self.offset != other.offset: + return False + elif self.template_attribute != other.template_attribute: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, RekeyRequestPayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = ", ".join([ + "unique_identifier='{0}'".format(self.unique_identifier), + "offset={0}".format(self.offset), + "template_attribute={0}".format(repr(self.template_attribute)) + ]) + return "RekeyRequestPayload({0})".format(args) + + def __str__(self): + return str({ + 'unique_identifier': self.unique_identifier, + 'offset': self.offset, + 'template_attribute': str(self.template_attribute) + }) + + +class RekeyResponsePayload(primitives.Struct): + """ + A response payload for the Rekey operation. + + Attributes: + unique_identifier: The unique ID of the replacement key. + template_attribute: A collection of server attributes that were set on + the replacement key. + """ + def __init__(self, + unique_identifier=None, + template_attribute=None): + """ + Construct a Rekey response payload struct. + + Args: + unique_identifier (string): The ID of the replacement key. + Optional, defaults to None. Required for read/write. + template_attribute (TemplateAttribute): A structure containing a + set of attributes (e.g., cryptographic algorithm, + cryptographic length) that were set by the server on the + replacement key. Optional, defaults to None. + """ + super(RekeyResponsePayload, self).__init__( + enums.Tags.RESPONSE_PAYLOAD + ) + + self._unique_identifier = None + self._template_attribute = None + + self.unique_identifier = unique_identifier + self.template_attribute = template_attribute + + @property + def unique_identifier(self): + if self._unique_identifier: + return self._unique_identifier.value + else: + return None + + @unique_identifier.setter + def unique_identifier(self, value): + if value is None: + self._unique_identifier = None + elif isinstance(value, six.string_types): + self._unique_identifier = primitives.TextString( + value=value, + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + else: + raise TypeError("Unique identifier must be a string.") + + @property + def template_attribute(self): + if self._template_attribute: + return self._template_attribute + else: + return None + + @template_attribute.setter + def template_attribute(self, value): + if value is None: + self._template_attribute = None + elif isinstance(value, objects.TemplateAttribute): + self._template_attribute = value + else: + raise TypeError( + "Template attribute must be a TemplateAttribute struct." + ) + + def read(self, input_stream): + """ + Read the data encoding the Rekey response payload and decode it into + its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the unique identifier attribute is missing + from the encoded payload. + """ + super(RekeyResponsePayload, self).read(input_stream) + local_stream = utils.BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): + self._unique_identifier = primitives.TextString( + tag=enums.Tags.UNIQUE_IDENTIFIER + ) + self._unique_identifier.read(local_stream) + else: + raise ValueError( + "The Rekey response payload encoding is missing the unique " + "identifier." + ) + + if self.is_tag_next(enums.Tags.TEMPLATE_ATTRIBUTE, local_stream): + self._template_attribute = objects.TemplateAttribute() + self._template_attribute.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the Rekey request payload to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the payload is missing the unique identifier. + """ + local_stream = utils.BytearrayStream() + + if self._unique_identifier is not None: + self._unique_identifier.write(local_stream) + else: + raise ValueError( + "The Rekey response payload is missing the unique identifier." + ) + if self._template_attribute is not None: + self._template_attribute.write(local_stream) + + self.length = local_stream.length() + super(RekeyResponsePayload, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, RekeyResponsePayload): + if self.unique_identifier != other.unique_identifier: + return False + elif self.template_attribute != other.template_attribute: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, RekeyResponsePayload): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = ", ".join([ + "unique_identifier='{0}'".format(self.unique_identifier), + "template_attribute={0}".format(repr(self.template_attribute)) + ]) + return "RekeyResponsePayload({0})".format(args) + + def __str__(self): + return str({ + 'unique_identifier': self.unique_identifier, + 'template_attribute': str(self.template_attribute) + }) diff -Nru python-pykmip-0.7.0/kmip/core/objects.py python-pykmip-0.8.0/kmip/core/objects.py --- python-pykmip-0.7.0/kmip/core/objects.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/objects.py 2018-05-17 22:30:38.000000000 +0000 @@ -13,6 +13,7 @@ # License for the specific language governing permissions and limitations # under the License. +import abc import six from six.moves import xrange @@ -25,10 +26,9 @@ from kmip.core.enums import AttributeType from kmip.core.enums import Tags from kmip.core.enums import Types -from kmip.core.enums import CredentialType from kmip.core.enums import RevocationReasonCode as RevocationReasonCodeEnum +from kmip.core import exceptions -from kmip.core.errors import ErrorStrings from kmip.core.misc import KeyFormatType from kmip.core import primitives @@ -112,53 +112,933 @@ enum_type = name value = self.value_factory.create_attribute_value(enum_type, None) + if value is None: + raise Exception("No value type for {}".format(enum_name)) self.attribute_value = value self.attribute_value.tag = Tags.ATTRIBUTE_VALUE self.attribute_value.read(tstream) - self.is_oversized(tstream) + self.is_oversized(tstream) + + def write(self, ostream): + tstream = BytearrayStream() + + self.attribute_name.write(tstream) + if self.attribute_index is not None: + self.attribute_index.write(tstream) + self.attribute_value.write(tstream) + + # Write the length and value of the attribute + self.length = tstream.length() + super(Attribute, self).write(ostream) + ostream.write(tstream.buffer) + + def __repr__(self): + attribute_name = "attribute_name={0}".format(repr(self.attribute_name)) + attribute_index = "attribute_index={0}".format( + repr(self.attribute_index) + ) + attribute_value = "attribute_value={0}".format( + repr(self.attribute_value) + ) + return "Attribute({0}, {1}, {2})".format( + attribute_name, + attribute_index, + attribute_value + ) + + def __str__(self): + return str({ + 'attribute_name': str(self.attribute_name), + 'attribute_index': str(self.attribute_index), + 'attribute_value': str(self.attribute_value) + }) + + def __eq__(self, other): + if isinstance(other, Attribute): + if self.attribute_name != other.attribute_name: + return False + elif self.attribute_index != other.attribute_index: + return False + elif self.attribute_value != other.attribute_value: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, Attribute): + return not self.__eq__(other) + else: + return NotImplemented + + +class Nonce(primitives.Struct): + """ + A struct representing a Nonce object. + + Attributes: + nonce_id (bytes): A binary string representing the ID of the nonce + value. + nonce_value (bytes): A binary string representing a random value. + """ + + def __init__(self, nonce_id=None, nonce_value=None): + """ + Construct a Nonce struct. + + Args: + nonce_id (bytes): A binary string representing the ID of the nonce + value. Optional, defaults to None. Required for encoding and + decoding. + nonce_value (bytes): A binary string representing a random value. + Optional, defaults to None. Required for encoding and decoding. + """ + super(Nonce, self).__init__(tag=enums.Tags.NONCE) + + self._nonce_id = None + self._nonce_value = None + + self.nonce_id = nonce_id + self.nonce_value = nonce_value + + @property + def nonce_id(self): + if self._nonce_id: + return self._nonce_id.value + else: + return None + + @nonce_id.setter + def nonce_id(self, value): + if value is None: + self._nonce_id = None + elif isinstance(value, six.binary_type): + self._nonce_id = primitives.ByteString( + value=value, + tag=enums.Tags.NONCE_ID + ) + else: + raise TypeError("Nonce ID must be bytes.") + + @property + def nonce_value(self): + if self._nonce_value: + return self._nonce_value.value + else: + return None + + @nonce_value.setter + def nonce_value(self, value): + if value is None: + self._nonce_value = None + elif isinstance(value, six.binary_type): + self._nonce_value = primitives.ByteString( + value=value, + tag=enums.Tags.NONCE_VALUE + ) + else: + raise TypeError("Nonce value must be bytes.") + + def read(self, input_stream): + """ + Read the data encoding the Nonce struct and decode it into its + constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the nonce ID or nonce value is missing from + the encoding. + """ + super(Nonce, self).read(input_stream) + local_stream = BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.NONCE_ID, local_stream): + self._nonce_id = primitives.ByteString( + tag=enums.Tags.NONCE_ID + ) + self._nonce_id.read(local_stream) + else: + raise ValueError( + "Nonce encoding missing the nonce ID." + ) + + if self.is_tag_next(enums.Tags.NONCE_VALUE, local_stream): + self._nonce_value = primitives.ByteString( + tag=enums.Tags.NONCE_VALUE + ) + self._nonce_value.read(local_stream) + else: + raise ValueError( + "Nonce encoding missing the nonce value." + ) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the Nonce struct to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the nonce ID or nonce value is not defined. + """ + local_stream = BytearrayStream() + + if self._nonce_id: + self._nonce_id.write(local_stream) + else: + raise ValueError("Nonce struct is missing the nonce ID.") + + if self._nonce_value: + self._nonce_value.write(local_stream) + else: + raise ValueError("Nonce struct is missing the nonce value.") + + self.length = local_stream.length() + super(Nonce, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, Nonce): + if self.nonce_id != other.nonce_id: + return False + elif self.nonce_value != other.nonce_value: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, Nonce): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = ", ".join([ + "nonce_id={}".format(self.nonce_id), + "nonce_value={}".format(self.nonce_value) + ]) + return "Nonce({})".format(args) + + def __str__(self): + body = ", ".join([ + "'nonce_id': {}".format(self.nonce_id), + "'nonce_value': {}".format(self.nonce_value) + ]) + return "{" + body + "}" + + +@six.add_metaclass(abc.ABCMeta) +class CredentialValue(primitives.Struct): + """ + An empty, abstract base class to be used by Credential objects to easily + group and type-check credential values. + """ + + +class UsernamePasswordCredential(CredentialValue): + """ + A struct representing a UsernamePasswordCredential object. + + Attributes: + username: The username identifying the credential. + password: The password associated with the username. + """ + + def __init__(self, username=None, password=None): + """ + Construct a UsernamePasswordCredential struct. + + Args: + username (string): The username identifying the credential. + Optional, defaults to None. Required for encoding and decoding. + password (string): The password associated with the username. + Optional, defaults to None. + """ + super(UsernamePasswordCredential, self).__init__( + tag=Tags.CREDENTIAL_VALUE + ) + + self._username = None + self._password = None + + self.username = username + self.password = password + + @property + def username(self): + if self._username: + return self._username.value + else: + return None + + @username.setter + def username(self, value): + if value is None: + self._username = None + elif isinstance(value, six.string_types): + self._username = primitives.TextString( + value=value, + tag=enums.Tags.USERNAME + ) + else: + raise TypeError("Username must be a string.") + + @property + def password(self): + if self._password: + return self._password.value + else: + return None + + @password.setter + def password(self, value): + if value is None: + self._password = None + elif isinstance(value, six.string_types): + self._password = primitives.TextString( + value=value, + tag=enums.Tags.PASSWORD + ) + else: + raise TypeError("Password must be a string.") + + def read(self, input_stream): + """ + Read the data encoding the UsernamePasswordCredential struct and + decode it into its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the username is missing from the encoding. + """ + super(UsernamePasswordCredential, self).read(input_stream) + local_stream = BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.USERNAME, local_stream): + self._username = primitives.TextString( + tag=enums.Tags.USERNAME + ) + self._username.read(local_stream) + else: + raise ValueError( + "Username/password credential encoding missing the username." + ) + + if self.is_tag_next(enums.Tags.PASSWORD, local_stream): + self._password = primitives.TextString( + tag=enums.Tags.PASSWORD + ) + self._password.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the UsernamePasswordCredential struct to a + stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if the username is not defined. + """ + local_stream = BytearrayStream() + + if self._username: + self._username.write(local_stream) + else: + raise ValueError( + "Username/password credential struct missing the username." + ) + + if self._password: + self._password.write(local_stream) + + self.length = local_stream.length() + super(UsernamePasswordCredential, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, UsernamePasswordCredential): + if self.username != other.username: + return False + elif self.password != other.password: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, UsernamePasswordCredential): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = ", ".join([ + "username='{}'".format(self.username), + "password='{}'".format(self.password) + ]) + return "UsernamePasswordCredential({})".format(args) + + def __str__(self): + return str({ + "username": self.username, + "password": self.password + }) + + +class DeviceCredential(CredentialValue): + """ + A struct representing a DeviceCredential object. + + Attributes: + device_serial_number: The device serial number for the credential. + password: The password associated with the credential. + device_identifier: The device identifier for the credential. + network_identifier: The network identifier for the credential. + machine_identifier: The machine identifier for the credential. + media_identifier: The media identifier for the credential. + """ + + def __init__(self, + device_serial_number=None, + password=None, + device_identifier=None, + network_identifier=None, + machine_identifier=None, + media_identifier=None): + """ + Construct a DeviceCredential struct. + + Args: + device_serial_number (string): The device serial number for the + credential. Optional, defaults to None. + password (string): The password associated with the credential. + Optional, defaults to None. + device_identifier (string): The device identifier for the + credential. Optional, defaults to None. + network_identifier (string): The network identifier for the + credential. Optional, defaults to None. + machine_identifier (string): The machine identifier for the + credential. Optional, defaults to None. + media_identifier (string): The media identifier for the + credential. Optional, defaults to None. + """ + super(DeviceCredential, self).__init__(tag=Tags.CREDENTIAL_VALUE) + + self._device_serial_number = None + self._password = None + self._device_identifier = None + self._network_identifier = None + self._machine_identifier = None + self._media_identifier = None + + self.device_serial_number = device_serial_number + self.password = password + self.device_identifier = device_identifier + self.network_identifier = network_identifier + self.machine_identifier = machine_identifier + self.media_identifier = media_identifier + + @property + def device_serial_number(self): + if self._device_serial_number: + return self._device_serial_number.value + else: + return None + + @device_serial_number.setter + def device_serial_number(self, value): + if value is None: + self._device_serial_number = None + elif isinstance(value, six.string_types): + self._device_serial_number = primitives.TextString( + value=value, + tag=enums.Tags.DEVICE_SERIAL_NUMBER + ) + else: + raise TypeError("Device serial number must be a string.") + + @property + def password(self): + if self._password: + return self._password.value + else: + return None + + @password.setter + def password(self, value): + if value is None: + self._password = None + elif isinstance(value, six.string_types): + self._password = primitives.TextString( + value=value, + tag=enums.Tags.PASSWORD + ) + else: + raise TypeError("Password must be a string.") + + @property + def device_identifier(self): + if self._device_identifier: + return self._device_identifier.value + else: + return None + + @device_identifier.setter + def device_identifier(self, value): + if value is None: + self._device_identifier = None + elif isinstance(value, six.string_types): + self._device_identifier = primitives.TextString( + value=value, + tag=enums.Tags.DEVICE_IDENTIFIER + ) + else: + raise TypeError("Device identifier must be a string.") + + @property + def network_identifier(self): + if self._network_identifier: + return self._network_identifier.value + else: + return None + + @network_identifier.setter + def network_identifier(self, value): + if value is None: + self._network_identifier = None + elif isinstance(value, six.string_types): + self._network_identifier = primitives.TextString( + value=value, + tag=enums.Tags.NETWORK_IDENTIFIER + ) + else: + raise TypeError("Network identifier must be a string.") + + @property + def machine_identifier(self): + if self._machine_identifier: + return self._machine_identifier.value + else: + return None + + @machine_identifier.setter + def machine_identifier(self, value): + if value is None: + self._machine_identifier = None + elif isinstance(value, six.string_types): + self._machine_identifier = primitives.TextString( + value=value, + tag=enums.Tags.MACHINE_IDENTIFIER + ) + else: + raise TypeError("Machine identifier must be a string.") + + @property + def media_identifier(self): + if self._media_identifier: + return self._media_identifier.value + else: + return None + + @media_identifier.setter + def media_identifier(self, value): + if value is None: + self._media_identifier = None + elif isinstance(value, six.string_types): + self._media_identifier = primitives.TextString( + value=value, + tag=enums.Tags.MEDIA_IDENTIFIER + ) + else: + raise TypeError("Media identifier must be a string.") + + def read(self, input_stream): + """ + Read the data encoding the DeviceCredential struct and decode it into + its constituent parts. + + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object.. + """ + super(DeviceCredential, self).read(input_stream) + local_stream = BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.DEVICE_SERIAL_NUMBER, local_stream): + self._device_serial_number = primitives.TextString( + tag=enums.Tags.DEVICE_SERIAL_NUMBER + ) + self._device_serial_number.read(local_stream) + + if self.is_tag_next(enums.Tags.PASSWORD, local_stream): + self._password = primitives.TextString( + tag=enums.Tags.PASSWORD + ) + self._password.read(local_stream) + + if self.is_tag_next(enums.Tags.DEVICE_IDENTIFIER, local_stream): + self._device_identifier = primitives.TextString( + tag=enums.Tags.DEVICE_IDENTIFIER + ) + self._device_identifier.read(local_stream) + + if self.is_tag_next(enums.Tags.NETWORK_IDENTIFIER, local_stream): + self._network_identifier = primitives.TextString( + tag=enums.Tags.NETWORK_IDENTIFIER + ) + self._network_identifier.read(local_stream) + + if self.is_tag_next(enums.Tags.MACHINE_IDENTIFIER, local_stream): + self._machine_identifier = primitives.TextString( + tag=enums.Tags.MACHINE_IDENTIFIER + ) + self._machine_identifier.read(local_stream) + + if self.is_tag_next(enums.Tags.MEDIA_IDENTIFIER, local_stream): + self._media_identifier = primitives.TextString( + tag=enums.Tags.MEDIA_IDENTIFIER + ) + self._media_identifier.read(local_stream) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the DeviceCredential struct to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + """ + local_stream = BytearrayStream() + + if self._device_serial_number is not None: + self._device_serial_number.write(local_stream) + if self._password is not None: + self._password.write(local_stream) + if self._device_identifier is not None: + self._device_identifier.write(local_stream) + if self._network_identifier is not None: + self._network_identifier.write(local_stream) + if self._machine_identifier is not None: + self._machine_identifier.write(local_stream) + if self._media_identifier is not None: + self._media_identifier.write(local_stream) + + self.length = local_stream.length() + super(DeviceCredential, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, DeviceCredential): + if self.device_serial_number != other.device_serial_number: + return False + elif self.password != other.password: + return False + elif self.device_identifier != other.device_identifier: + return False + elif self.network_identifier != other.network_identifier: + return False + elif self.machine_identifier != other.machine_identifier: + return False + elif self.media_identifier != other.media_identifier: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, DeviceCredential): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = ", ".join([ + "device_serial_number='{}'".format(self.device_serial_number), + "password='{}'".format(self.password), + "device_identifier='{}'".format(self.device_identifier), + "network_identifier='{}'".format(self.network_identifier), + "machine_identifier='{}'".format(self.machine_identifier), + "media_identifier='{}'".format(self.media_identifier), + ]) + return "DeviceCredential({})".format(args) + + def __str__(self): + return str({ + "device_serial_number": self.device_serial_number, + "password": self.password, + "device_identifier": self.device_identifier, + "network_identifier": self.network_identifier, + "machine_identifier": self.machine_identifier, + "media_identifier": self.media_identifier + }) + + +class AttestationCredential(CredentialValue): + """ + A struct representing an AttestationCredential object. + + Attributes: + nonce: A nonce value obtained from the key management server. + attestation_type: The type of attestation being used. + attestation_measurement: The attestation measurement of the client. + attestation_assertion: The attestation assertion from a third party. + """ + + def __init__(self, + nonce=None, + attestation_type=None, + attestation_measurement=None, + attestation_assertion=None): + """ + Construct an AttestationCredential struct. + + Args: + nonce (Nonce): A Nonce structure containing nonce data obtained + from the key management server. Optional, defaults to None. + Required for encoding and decoding. + attestation_type (enum): An AttestationType enumeration specifying + the type of attestation being used. Optional, defaults to None. + Required for encoding and decoding. + attestation_measurement (bytes): The device identifier for the + credential. Optional, defaults to None. Required for encoding + and decoding if the attestation assertion is not provided. + attestation_assertion (bytes): The network identifier for the + credential. Optional, defaults to None. Required for encoding + and decoding if the attestation measurement is not provided. + """ + super(AttestationCredential, self).__init__(tag=Tags.CREDENTIAL_VALUE) + + self._nonce = None + self._attestation_type = None + self._attestation_measurement = None + self._attestation_assertion = None + + self.nonce = nonce + self.attestation_type = attestation_type + self.attestation_measurement = attestation_measurement + self.attestation_assertion = attestation_assertion + + @property + def nonce(self): + return self._nonce + + @nonce.setter + def nonce(self, value): + if value is None: + self._nonce = None + elif isinstance(value, Nonce): + self._nonce = value + else: + raise TypeError("Nonce must be a Nonce struct.") + + @property + def attestation_type(self): + if self._attestation_type: + return self._attestation_type.value + else: + return None + + @attestation_type.setter + def attestation_type(self, value): + if value is None: + self._attestation_type = None + elif isinstance(value, enums.AttestationType): + self._attestation_type = Enumeration( + enums.AttestationType, + value=value, + tag=Tags.ATTESTATION_TYPE + ) + else: + raise TypeError( + "Attestation type must be an AttestationType enumeration." + ) + + @property + def attestation_measurement(self): + if self._attestation_measurement: + return self._attestation_measurement.value + else: + return None - def write(self, ostream): - tstream = BytearrayStream() + @attestation_measurement.setter + def attestation_measurement(self, value): + if value is None: + self._attestation_measurement = None + elif isinstance(value, six.binary_type): + self._attestation_measurement = primitives.ByteString( + value=value, + tag=enums.Tags.ATTESTATION_MEASUREMENT + ) + else: + raise TypeError("Attestation measurement must be bytes.") - self.attribute_name.write(tstream) - if self.attribute_index is not None: - self.attribute_index.write(tstream) - self.attribute_value.write(tstream) + @property + def attestation_assertion(self): + if self._attestation_assertion: + return self._attestation_assertion.value + else: + return None - # Write the length and value of the attribute - self.length = tstream.length() - super(Attribute, self).write(ostream) - ostream.write(tstream.buffer) + @attestation_assertion.setter + def attestation_assertion(self, value): + if value is None: + self._attestation_assertion = None + elif isinstance(value, six.binary_type): + self._attestation_assertion = primitives.ByteString( + value=value, + tag=enums.Tags.ATTESTATION_ASSERTION + ) + else: + raise TypeError("Attestation assertion must be bytes.") - def __repr__(self): - attribute_name = "attribute_name={0}".format(repr(self.attribute_name)) - attribute_index = "attribute_index={0}".format( - repr(self.attribute_index) - ) - attribute_value = "attribute_value={0}".format( - repr(self.attribute_value) - ) - return "Attribute({0}, {1}, {2})".format( - attribute_name, - attribute_index, - attribute_value - ) + def read(self, input_stream): + """ + Read the data encoding the AttestationCredential struct and decode it + into its constituent parts. - def __str__(self): - return str({ - 'attribute_name': str(self.attribute_name), - 'attribute_index': str(self.attribute_index), - 'attribute_value': str(self.attribute_value) - }) + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if either the nonce or attestation type are + missing from the encoding. Also raised if neither the + attestation measurement nor the attestation assertion are + included in the encoding. + + """ + super(AttestationCredential, self).read(input_stream) + local_stream = BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.NONCE, local_stream): + self._nonce = Nonce() + self._nonce.read(local_stream) + else: + raise ValueError( + "Attestation credential encoding is missing the nonce." + ) + + if self.is_tag_next(enums.Tags.ATTESTATION_TYPE, local_stream): + self._attestation_type = primitives.Enumeration( + enums.AttestationType, + tag=enums.Tags.ATTESTATION_TYPE + ) + self._attestation_type.read(local_stream) + else: + raise ValueError( + "Attestation credential encoding is missing the attestation " + "type." + ) + + self._attestation_measurement = None + if self.is_tag_next(enums.Tags.ATTESTATION_MEASUREMENT, local_stream): + self._attestation_measurement = primitives.ByteString( + tag=enums.Tags.ATTESTATION_MEASUREMENT + ) + self._attestation_measurement.read(local_stream) + + self._attestation_assertion = None + if self.is_tag_next(enums.Tags.ATTESTATION_ASSERTION, local_stream): + self._attestation_assertion = primitives.ByteString( + tag=enums.Tags.ATTESTATION_ASSERTION + ) + self._attestation_assertion.read(local_stream) + + if ((self._attestation_measurement is None) and + (self._attestation_assertion is None)): + raise ValueError( + "Attestation credential encoding is missing either the " + "attestation measurement or the attestation assertion." + ) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the AttestationCredential struct to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if either the nonce or attestation type are + not defined. Also raised if neither the attestation measurement + nor the attestation assertion are defined. + """ + local_stream = BytearrayStream() + + if self._nonce: + self._nonce.write(local_stream) + else: + raise ValueError( + "Attestation credential struct is missing the nonce." + ) + + if self._attestation_type: + self._attestation_type.write(local_stream) + else: + raise ValueError( + "Attestation credential struct is missing the attestation " + "type." + ) + + if self._attestation_measurement: + self._attestation_measurement.write(local_stream) + if self._attestation_assertion: + self._attestation_assertion.write(local_stream) + + if ((self._attestation_measurement is None) and + (self._attestation_assertion is None)): + raise ValueError( + "Attestation credential struct is missing either the " + "attestation measurement or the attestation assertion." + ) + + self.length = local_stream.length() + super(AttestationCredential, self).write(output_stream) + output_stream.write(local_stream.buffer) def __eq__(self, other): - if isinstance(other, Attribute): - if self.attribute_name != other.attribute_name: + if isinstance(other, AttestationCredential): + if self.nonce != other.nonce: return False - elif self.attribute_index != other.attribute_index: + elif self.attestation_type != other.attestation_type: return False - elif self.attribute_value != other.attribute_value: + elif self.attestation_measurement != other.attestation_measurement: + return False + elif self.attestation_assertion != other.attestation_assertion: return False else: return True @@ -166,230 +1046,209 @@ return NotImplemented def __ne__(self, other): - if isinstance(other, Attribute): - return not self.__eq__(other) + if isinstance(other, AttestationCredential): + return not (self == other) else: return NotImplemented + def __repr__(self): + args = ", ".join([ + "nonce={}".format(repr(self.nonce)), + "attestation_type={}".format(self.attestation_type), + "attestation_measurement={}".format(self.attestation_measurement), + "attestation_assertion={}".format(self.attestation_assertion) + ]) + return "AttestationCredential({})".format(args) -# 2.1.2 -class Credential(Struct): - - class CredentialType(Enumeration): - - def __init__(self, value=None): - super(Credential.CredentialType, self).__init__( - CredentialType, value, Tags.CREDENTIAL_TYPE) + def __str__(self): + return "{" \ + "'nonce': " + str(self.nonce) + ", " \ + "'attestation_type': " + str(self.attestation_type) + ", " \ + "'attestation_measurement': " + \ + str(self.attestation_measurement) + ", " \ + "'attestation_assertion': " + \ + str(self.attestation_assertion) + "}" - class UsernamePasswordCredential(Struct): - class Username(TextString): - def __init__(self, value=None): - super(Credential.UsernamePasswordCredential.Username, - self).__init__( - value, Tags.USERNAME) - - class Password(TextString): - def __init__(self, value=None): - super(Credential.UsernamePasswordCredential.Password, - self).__init__( - value, Tags.PASSWORD) - - def __init__(self, username=None, password=None): - super(Credential.UsernamePasswordCredential, self).__init__( - tag=Tags.CREDENTIAL_VALUE) - self.username = username - self.password = password - self.validate() - - def read(self, istream): - super(Credential.UsernamePasswordCredential, self).read(istream) - tstream = BytearrayStream(istream.read(self.length)) - - # Read the username of the credential - self.username = self.Username() - self.username.read(tstream) - - # Read the password if it is next - if self.is_tag_next(Tags.PASSWORD, tstream): - self.password = self.Password() - self.password.read(tstream) - - self.is_oversized(tstream) - self.validate() - - def write(self, ostream): - tstream = BytearrayStream() - - self.username.write(tstream) - if self.password is not None: - self.password.write(tstream) - - # Write the length and value of the credential - self.length = tstream.length() - super(Credential.UsernamePasswordCredential, self).write(ostream) - ostream.write(tstream.buffer) - - def validate(self): - pass - - class DeviceCredential(Struct): - - class DeviceSerialNumber(TextString): - - def __init__(self, value=None): - super(Credential.DeviceCredential.DeviceSerialNumber, self).\ - __init__(value, Tags.DEVICE_SERIAL_NUMBER) - - class Password(TextString): - - def __init__(self, value=None): - super(Credential.DeviceCredential.Password, self).\ - __init__(value, Tags.PASSWORD) - - class DeviceIdentifier(TextString): - - def __init__(self, value=None): - super(Credential.DeviceCredential.DeviceIdentifier, self).\ - __init__(value, Tags.DEVICE_IDENTIFIER) - - class NetworkIdentifier(TextString): - - def __init__(self, value=None): - super(Credential.DeviceCredential.NetworkIdentifier, self).\ - __init__(value, Tags.NETWORK_IDENTIFIER) - - class MachineIdentifier(TextString): - - def __init__(self, value=None): - super(Credential.DeviceCredential.MachineIdentifier, self).\ - __init__(value, Tags.MACHINE_IDENTIFIER) - - class MediaIdentifier(TextString): - - def __init__(self, value=None): - super(Credential.DeviceCredential.MediaIdentifier, self).\ - __init__(value, Tags.MEDIA_IDENTIFIER) - - def __init__(self, - device_serial_number=None, - password=None, - device_identifier=None, - network_identifier=None, - machine_identifier=None, - media_identifier=None): - super(Credential.DeviceCredential, self).__init__( - tag=Tags.CREDENTIAL_VALUE) - self.device_serial_number = device_serial_number - self.password = password - self.device_identifier = device_identifier - self.network_identifier = network_identifier - self.machine_identifier = machine_identifier - self.media_identifier = media_identifier - - def read(self, istream): - super(Credential.DeviceCredential, self).read(istream) - tstream = BytearrayStream(istream.read(self.length)) - - # Read the password if it is next - if self.is_tag_next(Tags.DEVICE_SERIAL_NUMBER, tstream): - self.device_serial_number = self.DeviceSerialNumber() - self.device_serial_number.read(tstream) - - # Read the password if it is next - if self.is_tag_next(Tags.PASSWORD, tstream): - self.password = self.Password() - self.password.read(tstream) - - # Read the password if it is next - if self.is_tag_next(Tags.DEVICE_IDENTIFIER, tstream): - self.device_identifier = self.DeviceIdentifier() - self.device_identifier.read(tstream) - - # Read the password if it is next - if self.is_tag_next(Tags.NETWORK_IDENTIFIER, tstream): - self.network_identifier = self.NetworkIdentifier() - self.network_identifier.read(tstream) - - # Read the password if it is next - if self.is_tag_next(Tags.MACHINE_IDENTIFIER, tstream): - self.machine_identifier = self.MachineIdentifier() - self.machine_identifier.read(tstream) - - # Read the password if it is next - if self.is_tag_next(Tags.MEDIA_IDENTIFIER, tstream): - self.media_identifier = self.MediaIdentifier() - self.media_identifier.read(tstream) - - self.is_oversized(tstream) - self.validate() - - def write(self, ostream): - tstream = BytearrayStream() - - if self.device_serial_number is not None: - self.device_serial_number.write(tstream) - if self.password is not None: - self.password.write(tstream) - if self.device_identifier is not None: - self.device_identifier.write(tstream) - if self.network_identifier is not None: - self.network_identifier.write(tstream) - if self.machine_identifier is not None: - self.machine_identifier.write(tstream) - if self.media_identifier is not None: - self.media_identifier.write(tstream) - - # Write the length and value of the credential - self.length = tstream.length() - super(Credential.DeviceCredential, self).write(ostream) - ostream.write(tstream.buffer) +class Credential(primitives.Struct): + """ + A struct representing a Credential object. - def validate(self): - pass + Attributes: + credential_type: The credential type, a CredentialType enumeration. + credential_value: The credential value, a CredentialValue instance. + """ def __init__(self, credential_type=None, credential_value=None): + """ + Construct a Credential struct. + + Args: + credential_type (CredentialType): An enumeration value that + specifies the type of the credential struct. Optional, + defaults to None. Required for encoding and decoding. + credential_value (CredentialValue): The credential value + corresponding to the credential type. Optional, defaults to + None. Required for encoding and decoding. + """ super(Credential, self).__init__(tag=Tags.CREDENTIAL) + + self._credential_type = None + self._credential_value = None + self.credential_type = credential_type self.credential_value = credential_value - def read(self, istream): - super(Credential, self).read(istream) - tstream = BytearrayStream(istream.read(self.length)) + @property + def credential_type(self): + if self._credential_type: + return self._credential_type.value + else: + return None - # Read the type of the credential - self.credential_type = self.CredentialType() - self.credential_type.read(tstream) - - # Use the type to determine what credential value to read - if self.credential_type.value is CredentialType.USERNAME_AND_PASSWORD: - self.credential_value = self.UsernamePasswordCredential() - elif self.credential_type.value is CredentialType.DEVICE: - self.credential_value = self.DeviceCredential() - else: - # TODO (peter-hamilton) Use more descriptive error here - raise NotImplementedError() - self.credential_value.read(tstream) + @credential_type.setter + def credential_type(self, value): + if value is None: + self._credential_type = None + elif isinstance(value, enums.CredentialType): + self._credential_type = Enumeration( + enums.CredentialType, + value=value, + tag=Tags.CREDENTIAL_TYPE + ) + else: + raise TypeError( + "Credential type must be a CredentialType enumeration." + ) - self.is_oversized(tstream) - self.validate() + @property + def credential_value(self): + return self._credential_value - def write(self, ostream): - tstream = BytearrayStream() + @credential_value.setter + def credential_value(self, value): + if value is None: + self._credential_value = None + elif isinstance(value, CredentialValue): + self._credential_value = value + else: + raise TypeError( + "Credential value must be a CredentialValue struct." + ) - self.credential_type.write(tstream) - self.credential_value.write(tstream) + def read(self, input_stream): + """ + Read the data encoding the Credential struct and decode it into its + constituent parts. - # Write the length and value of the credential - self.length = tstream.length() - super(Credential, self).write(ostream) - ostream.write(tstream.buffer) + Args: + input_stream (stream): A data stream containing encoded object + data, supporting a read method; usually a BytearrayStream + object. - def validate(self): - pass + Raises: + ValueError: Raised if either the credential type or value are + missing from the encoding. + """ + super(Credential, self).read(input_stream) + local_stream = BytearrayStream(input_stream.read(self.length)) + + if self.is_tag_next(enums.Tags.CREDENTIAL_TYPE, local_stream): + self._credential_type = primitives.Enumeration( + enum=enums.CredentialType, + tag=enums.Tags.CREDENTIAL_TYPE + ) + self._credential_type.read(local_stream) + else: + raise ValueError( + "Credential encoding missing the credential type." + ) + + if self.is_tag_next(enums.Tags.CREDENTIAL_VALUE, local_stream): + if self.credential_type == \ + enums.CredentialType.USERNAME_AND_PASSWORD: + self._credential_value = UsernamePasswordCredential() + elif self.credential_type == enums.CredentialType.DEVICE: + self._credential_value = DeviceCredential() + elif self.credential_type == enums.CredentialType.ATTESTATION: + self._credential_value = AttestationCredential() + else: + raise ValueError( + "Credential encoding includes unrecognized credential " + "type." + ) + self._credential_value.read(local_stream) + else: + raise ValueError( + "Credential encoding missing the credential value." + ) + + self.is_oversized(local_stream) + + def write(self, output_stream): + """ + Write the data encoding the Credential struct to a stream. + + Args: + output_stream (stream): A data stream in which to encode object + data, supporting a write method; usually a BytearrayStream + object. + + Raises: + ValueError: Raised if either the credential type or value are not + defined. + """ + local_stream = BytearrayStream() + + if self._credential_type: + self._credential_type.write(local_stream) + else: + raise ValueError( + "Credential struct missing the credential type." + ) + + if self._credential_value: + self._credential_value.write(local_stream) + else: + raise ValueError( + "Credential struct missing the credential value." + ) + + self.length = local_stream.length() + super(Credential, self).write(output_stream) + output_stream.write(local_stream.buffer) + + def __eq__(self, other): + if isinstance(other, Credential): + if self.credential_type != other.credential_type: + return False + elif self.credential_value != other.credential_value: + return False + else: + return True + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, Credential): + return not (self == other) + else: + return NotImplemented + + def __repr__(self): + args = ", ".join([ + "credential_type={}".format(self.credential_type), + "credential_value={}".format(repr(self.credential_value)) + ]) + return "Credential({})".format(args) + + def __str__(self): + return str({ + "credential_type": self.credential_type, + "credential_value": str(self.credential_value) + }) -# 2.1.3 class KeyBlock(Struct): class KeyCompressionType(Enumeration): @@ -474,8 +1333,12 @@ member = 'KeyBlock.key_format_type' exp_type = KeyFormatType rcv_type = type(self.key_format_type) - msg = ErrorStrings.BAD_EXP_RECV.format(member, 'type', - exp_type, rcv_type) + msg = exceptions.ErrorStrings.BAD_EXP_RECV.format( + member, + 'type', + exp_type, + rcv_type + ) raise TypeError(msg) @@ -598,67 +1461,6 @@ raise TypeError(msg) -# 2.1.5 -class WrappingMethod(Enumeration): - - def __init__(self, value=None): - super(WrappingMethod, self).__init__( - enums.WrappingMethod, value, Tags.WRAPPING_METHOD) - - -class EncodingOption(Enumeration): - - def __init__(self, value=None): - super(EncodingOption, self).__init__( - enums.EncodingOption, value, Tags.ENCODING_OPTION) - - -class KeyInformation(Struct): - - def __init__(self, - unique_identifier=None, - cryptographic_parameters=None, - tag=Tags.ENCRYPTION_KEY_INFORMATION): - super(KeyInformation, self).__init__(tag=tag) - self.unique_identifier = unique_identifier - self.cryptographic_parameters = cryptographic_parameters - self.validate() - - def read(self, istream): - super(KeyInformation, self).read(istream) - tstream = BytearrayStream(istream.read(self.length)) - - self.unique_identifier = attributes.UniqueIdentifier() - self.unique_identifier.read(tstream) - - if self.is_tag_next(Tags.CRYPTOGRAPHIC_PARAMETERS, tstream): - self.cryptographic_parameters = CryptographicParameters() - self.cryptographic_parameters.read(tstream) - - self.is_oversized(tstream) - self.validate() - - def write(self, ostream): - tstream = BytearrayStream() - - self.unique_identifier.write(tstream) - - if self.cryptographic_parameters is not None: - self.cryptographic_parameters.write(tstream) - - # Write the length and value of the template attribute - self.length = tstream.length() - super(KeyInformation, self).write(ostream) - ostream.write(tstream.buffer) - - def validate(self): - self.__validate() - - def __validate(self): - # TODO (peter-hamilton) Finish implementation. - pass - - class EncryptionKeyInformation(Struct): """ A set of values detailing how an encrypted value was encrypted. @@ -1653,6 +2455,8 @@ if len(self.attributes) != len(other.attributes): return False + # TODO (peter-hamilton) Allow order independence? + for i in xrange(len(self.names)): a = self.names[i] b = other.names[i] diff -Nru python-pykmip-0.7.0/kmip/core/policy.py python-pykmip-0.8.0/kmip/core/policy.py --- python-pykmip-0.7.0/kmip/core/policy.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/policy.py 2018-05-17 22:30:38.000000000 +0000 @@ -19,240 +19,277 @@ from kmip.core import enums -def read_policy_from_file(path): - with open(path, 'r') as f: +def parse_policy(policy): + result = {} + + for object_type, operation_policies in six.iteritems(policy): + processed_operation_policies = {} + + for operation, permission in six.iteritems(operation_policies): + try: + enum_operation = enums.Operation[operation] + except Exception: + raise ValueError( + "'{0}' is not a valid Operation value.".format( + operation + ) + ) + try: + enum_policy = enums.Policy[permission] + except Exception: + raise ValueError( + "'{0}' is not a valid Policy value.".format( + permission + ) + ) + + processed_operation_policies[enum_operation] = enum_policy + try: - policy_blob = json.loads(f.read()) - except Exception as e: + enum_type = enums.ObjectType[object_type] + except Exception: raise ValueError( - "An error occurred while attempting to parse the JSON " - "file. {0}".format(e) + "'{0}' is not a valid ObjectType value.".format( + object_type + ) ) - policies = dict() + result[enum_type] = processed_operation_policies - for name, object_policies in six.iteritems(policy_blob): - processed_object_policies = dict() + return result - for object_type, operation_policies in six.iteritems(object_policies): - processed_operation_policies = dict() - for operation, permission in six.iteritems(operation_policies): - - try: - enum_operation = enums.Operation[operation] - except Exception: - raise ValueError( - "'{0}' is not a valid Operation value.".format( - operation - ) - ) - try: - enum_policy = enums.Policy[permission] - except Exception: - raise ValueError( - "'{0}' is not a valid Policy value.".format( - permission - ) - ) +def read_policy_from_file(path): + policy_blob = {} - processed_operation_policies.update([ - (enum_operation, enum_policy) - ]) + with open(path, 'r') as f: + try: + policy_blob = json.loads(f.read()) + except Exception as e: + raise ValueError( + "Loading the policy file '{}' generated a JSON error: " + "{}".format(path, e) + ) - try: - enum_type = enums.ObjectType[object_type] - except Exception: - raise ValueError( - "'{0}' is not a valid ObjectType value.".format( - object_type + policy_sections = {'groups', 'preset'} + object_types = set([t.name for t in enums.ObjectType]) + result = {} + + for name, object_policy in policy_blob.items(): + if len(object_policy.keys()) == 0: + continue + + # Use subset checking to determine what type of policy we have + sections = set([s for s in six.iterkeys(object_policy)]) + if sections <= policy_sections: + parsed_policies = dict() + + default_policy = object_policy.get('preset') + if default_policy: + parsed_policies['preset'] = parse_policy(default_policy) + + group_policies = object_policy.get('groups') + if group_policies: + parsed_group_policies = dict() + for group_name, group_policy in six.iteritems(group_policies): + parsed_group_policies[group_name] = parse_policy( + group_policy ) - ) - - processed_object_policies.update([ - (enum_type, processed_operation_policies) - ]) + parsed_policies['groups'] = parsed_group_policies - policies.update([(name, processed_object_policies)]) + result[name] = parsed_policies + elif sections <= object_types: + policy = parse_policy(object_policy) + result[name] = {'preset': policy} + else: + invalid_sections = sections - policy_sections - object_types + raise ValueError( + "Policy '{}' contains an invalid section named: " + "{}".format(name, invalid_sections.pop()) + ) - return policies + return result policies = { 'default': { - enums.ObjectType.CERTIFICATE: { - enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, - enums.Operation.CHECK: enums.Policy.ALLOW_ALL, - enums.Operation.GET: enums.Policy.ALLOW_ALL, - enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_ALL, - enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_ALL, - enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_ALL, - enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, - enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, - enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, - enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, - enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER - }, - enums.ObjectType.SYMMETRIC_KEY: { - enums.Operation.REKEY: enums.Policy.ALLOW_OWNER, - enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER, - enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER, - enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER, - enums.Operation.CHECK: enums.Policy.ALLOW_OWNER, - enums.Operation.GET: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER, - enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER, - enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, - enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, - enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, - enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, - enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER - }, - enums.ObjectType.PUBLIC_KEY: { - enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, - enums.Operation.CHECK: enums.Policy.ALLOW_ALL, - enums.Operation.GET: enums.Policy.ALLOW_ALL, - enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_ALL, - enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_ALL, - enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_ALL, - enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, - enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, - enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, - enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, - enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER - }, - enums.ObjectType.PRIVATE_KEY: { - enums.Operation.REKEY: enums.Policy.ALLOW_OWNER, - enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER, - enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER, - enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER, - enums.Operation.CHECK: enums.Policy.ALLOW_OWNER, - enums.Operation.GET: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER, - enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER, - enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, - enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, - enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, - enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, - enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER - }, - enums.ObjectType.SPLIT_KEY: { - enums.Operation.REKEY: enums.Policy.ALLOW_OWNER, - enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER, - enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER, - enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER, - enums.Operation.CHECK: enums.Policy.ALLOW_OWNER, - enums.Operation.GET: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER, - enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER, - enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, - enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, - enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, - enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, - enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER - }, - enums.ObjectType.TEMPLATE: { - enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER, - enums.Operation.GET: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER, - enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER - }, - enums.ObjectType.SECRET_DATA: { - enums.Operation.REKEY: enums.Policy.ALLOW_OWNER, - enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER, - enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER, - enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER, - enums.Operation.CHECK: enums.Policy.ALLOW_OWNER, - enums.Operation.GET: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER, - enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER, - enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, - enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, - enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, - enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, - enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER - }, - enums.ObjectType.OPAQUE_DATA: { - enums.Operation.REKEY: enums.Policy.ALLOW_OWNER, - enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER, - enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER, - enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER, - enums.Operation.CHECK: enums.Policy.ALLOW_OWNER, - enums.Operation.GET: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER, - enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER, - enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, - enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, - enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, - enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, - enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER - }, - enums.ObjectType.PGP_KEY: { - enums.Operation.REKEY: enums.Policy.ALLOW_OWNER, - enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER, - enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER, - enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER, - enums.Operation.CHECK: enums.Policy.ALLOW_OWNER, - enums.Operation.GET: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER, - enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, - enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER, - enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER, - enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, - enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, - enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, - enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, - enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER + 'preset': { + enums.ObjectType.CERTIFICATE: { + enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, + enums.Operation.CHECK: enums.Policy.ALLOW_ALL, + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_ALL, + enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_ALL, + enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_ALL, + enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, + enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, + enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, + enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, + enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER + }, + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.REKEY: enums.Policy.ALLOW_OWNER, + enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER, + enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER, + enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER, + enums.Operation.CHECK: enums.Policy.ALLOW_OWNER, + enums.Operation.GET: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER, + enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER, + enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, + enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, + enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, + enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, + enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER + }, + enums.ObjectType.PUBLIC_KEY: { + enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, + enums.Operation.CHECK: enums.Policy.ALLOW_ALL, + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_ALL, + enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_ALL, + enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_ALL, + enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, + enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, + enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, + enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, + enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER + }, + enums.ObjectType.PRIVATE_KEY: { + enums.Operation.REKEY: enums.Policy.ALLOW_OWNER, + enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER, + enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER, + enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER, + enums.Operation.CHECK: enums.Policy.ALLOW_OWNER, + enums.Operation.GET: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER, + enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER, + enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, + enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, + enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, + enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, + enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER + }, + enums.ObjectType.SPLIT_KEY: { + enums.Operation.REKEY: enums.Policy.ALLOW_OWNER, + enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER, + enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER, + enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER, + enums.Operation.CHECK: enums.Policy.ALLOW_OWNER, + enums.Operation.GET: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER, + enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER, + enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, + enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, + enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, + enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, + enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER + }, + enums.ObjectType.TEMPLATE: { + enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER, + enums.Operation.GET: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER, + enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER + }, + enums.ObjectType.SECRET_DATA: { + enums.Operation.REKEY: enums.Policy.ALLOW_OWNER, + enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER, + enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER, + enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER, + enums.Operation.CHECK: enums.Policy.ALLOW_OWNER, + enums.Operation.GET: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER, + enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER, + enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, + enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, + enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, + enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, + enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER + }, + enums.ObjectType.OPAQUE_DATA: { + enums.Operation.REKEY: enums.Policy.ALLOW_OWNER, + enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER, + enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER, + enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER, + enums.Operation.CHECK: enums.Policy.ALLOW_OWNER, + enums.Operation.GET: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER, + enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER, + enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, + enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, + enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, + enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, + enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER + }, + enums.ObjectType.PGP_KEY: { + enums.Operation.REKEY: enums.Policy.ALLOW_OWNER, + enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER, + enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER, + enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER, + enums.Operation.CHECK: enums.Policy.ALLOW_OWNER, + enums.Operation.GET: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER, + enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER, + enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER, + enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER, + enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER, + enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER, + enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER, + enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER, + enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER + } } }, 'public': { - enums.ObjectType.TEMPLATE: { - enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, - enums.Operation.GET: enums.Policy.ALLOW_ALL, - enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_ALL, - enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_ALL, - enums.Operation.ADD_ATTRIBUTE: enums.Policy.DISALLOW_ALL, - enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.DISALLOW_ALL, - enums.Operation.DELETE_ATTRIBUTE: enums.Policy.DISALLOW_ALL, - enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + 'preset': { + enums.ObjectType.TEMPLATE: { + enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_ALL, + enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_ALL, + enums.Operation.ADD_ATTRIBUTE: enums.Policy.DISALLOW_ALL, + enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.DISALLOW_ALL, + enums.Operation.DELETE_ATTRIBUTE: enums.Policy.DISALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } } } } diff -Nru python-pykmip-0.7.0/kmip/core/primitives.py python-pykmip-0.8.0/kmip/core/primitives.py --- python-pykmip-0.7.0/kmip/core/primitives.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/primitives.py 2017-12-08 17:36:18.000000000 +0000 @@ -22,10 +22,7 @@ from struct import pack, unpack -from kmip.core.errors import ErrorStrings - from kmip.core import enums -from kmip.core import errors from kmip.core import exceptions from kmip.core import utils @@ -45,7 +42,7 @@ def is_oversized(self, stream): extra = len(stream.peek()) if extra > 0: - raise errors.StreamNotEmptyError(Base.__name__, extra) + raise exceptions.StreamNotEmptyError(Base.__name__, extra) def read_tag(self, istream): # Read in the bytes for the tag @@ -56,8 +53,12 @@ # Verify that the tag matches for the current object if enum_tag is not self.tag: - raise errors.ReadValueError(Base.__name__, 'tag', - hex(self.tag.value), hex(tag)) + raise exceptions.ReadValueError( + Base.__name__, + 'tag', + hex(self.tag.value), + hex(tag) + ) def read_type(self, istream): # Read in the bytes for the type @@ -65,15 +66,23 @@ num_bytes = len(tts) if num_bytes != self.TYPE_SIZE: min_bytes = 'a minimum of {0} bytes'.format(self.TYPE_SIZE) - raise errors.ReadValueError(Base.__name__, 'type', min_bytes, - '{0} bytes'.format(num_bytes)) + raise exceptions.ReadValueError( + Base.__name__, + 'type', + min_bytes, + '{0} bytes'.format(num_bytes) + ) typ = unpack('!B', tts)[0] enum_typ = enums.Types(typ) if enum_typ is not self.type: - raise errors.ReadValueError(Base.__name__, 'type', - self.type.value, typ) + raise exceptions.ReadValueError( + Base.__name__, + 'type', + self.type.value, + typ + ) def read_length(self, istream): # Read in the bytes for the length @@ -81,8 +90,12 @@ num_bytes = len(lst) if num_bytes != self.LENGTH_SIZE: min_bytes = 'a minimum of {0} bytes'.format(self.LENGTH_SIZE) - raise errors.ReadValueError(Base.__name__, 'length', min_bytes, - '{0} bytes'.format(num_bytes)) + raise exceptions.ReadValueError( + Base.__name__, + 'length', + min_bytes, + '{0} bytes'.format(num_bytes) + ) self.length = unpack('!I', lst)[0] def read_value(self, istream): @@ -99,20 +112,24 @@ def write_type(self, ostream): if type(self.type) is not enums.Types: - msg = ErrorStrings.BAD_EXP_RECV + msg = exceptions.ErrorStrings.BAD_EXP_RECV raise TypeError(msg.format(Base.__name__, 'type', enums.Types, type(self.type))) ostream.write(pack('!B', self.type.value)) def write_length(self, ostream): if type(self.length) is not int: - msg = ErrorStrings.BAD_EXP_RECV + msg = exceptions.ErrorStrings.BAD_EXP_RECV raise TypeError(msg.format(Base.__name__, 'length', int, type(self.length))) num_bytes = utils.count_bytes(self.length) if num_bytes > self.LENGTH_SIZE: - raise errors.WriteOverflowError(Base.__name__, 'length', - self.LENGTH_SIZE, num_bytes) + raise exceptions.WriteOverflowError( + Base.__name__, + 'length', + self.LENGTH_SIZE, + num_bytes + ) ostream.write(pack('!I', self.length)) def write_value(self, ostream): @@ -188,15 +205,23 @@ def read_value(self, istream): if self.length is not self.LENGTH: - raise errors.ReadValueError(Integer.__name__, 'length', - self.LENGTH, self.length) + raise exceptions.ReadValueError( + Integer.__name__, + 'length', + self.LENGTH, + self.length + ) self.value = unpack(self.pack_string, istream.read(self.length))[0] pad = unpack(self.pack_string, istream.read(self.padding_length))[0] if pad is not 0: - raise errors.ReadValueError(Integer.__name__, 'pad', 0, - pad) + raise exceptions.ReadValueError( + Integer.__name__, + 'pad', + 0, + pad + ) self.validate() def read(self, istream): @@ -799,8 +824,12 @@ for _ in range(self.padding_length): pad = unpack('!B', istream.read(1))[0] if pad is not 0: - raise errors.ReadValueError(TextString.__name__, 'pad', 0, - pad) + raise exceptions.ReadValueError( + TextString.__name__, + 'pad', + 0, + pad + ) def read(self, istream): super(TextString, self).read(istream) @@ -826,7 +855,7 @@ def __validate(self): if self.value is not None: if not isinstance(self.value, six.string_types): - msg = ErrorStrings.BAD_EXP_RECV + msg = exceptions.ErrorStrings.BAD_EXP_RECV raise TypeError(msg.format('TextString', 'value', str, type(self.value))) @@ -890,8 +919,12 @@ for _ in range(self.padding_length): pad = unpack('!B', istream.read(1))[0] if pad is not 0: - raise errors.ReadValueError(TextString.__name__, 'pad', 0, - pad) + raise exceptions.ReadValueError( + TextString.__name__, + 'pad', + 0, + pad + ) def read(self, istream): super(ByteString, self).read(istream) @@ -919,7 +952,7 @@ if self.value is not None: data_type = type(self.value) if data_type is not bytes: - msg = ErrorStrings.BAD_EXP_RECV + msg = exceptions.ErrorStrings.BAD_EXP_RECV raise TypeError(msg.format('ByteString', 'value', bytes, data_type)) diff -Nru python-pykmip-0.7.0/kmip/core/server.py python-pykmip-0.8.0/kmip/core/server.py --- python-pykmip-0.7.0/kmip/core/server.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/server.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,53 +0,0 @@ -# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -class KMIP(object): - - def __init__(self): - pass - - def create(self, object_type, template_attribute, credential=None): - raise NotImplementedError() - - def create_key_pair(self, common_template_attribute, - private_key_template_attribute, - public_key_template_attribute): - raise NotImplementedError() - - def register(self, object_type, template_attribute, secret, - credential=None): - raise NotImplementedError() - - def rekey_key_pair(self, private_key_unique_identifier, - offset, common_template_attribute, - private_key_template_attribute, - public_key_template_attribute): - raise NotImplementedError() - - def get(self, uuid=None, key_format_type=None, key_compression_type=None, - key_wrapping_specification=None, credential=None): - raise NotImplementedError() - - def destroy(self, uuid, credential=None): - raise NotImplementedError() - - def locate(self, maximum_items=None, storate_status_mask=None, - object_group_member=None, attributes=None, - credential=None): - raise NotImplementedError() - - def discover_versions(self, protocol_versions=None): - raise NotImplementedError() diff -Nru python-pykmip-0.7.0/kmip/core/utils.py python-pykmip-0.8.0/kmip/core/utils.py --- python-pykmip-0.7.0/kmip/core/utils.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/core/utils.py 2017-12-08 17:36:18.000000000 +0000 @@ -16,7 +16,7 @@ from binascii import hexlify import io -from kmip.core.errors import ErrorStrings +from kmip.core import exceptions def bit_length(num): @@ -52,9 +52,8 @@ def build_er_error(class_object, descriptor, expected, received, attribute=None): - msg = ErrorStrings.BAD_EXP_RECV + msg = exceptions.ErrorStrings.BAD_EXP_RECV - class_string = '' if attribute is None: class_string = '{0}'.format(class_object.__name__) else: diff -Nru python-pykmip-0.7.0/kmip/demos/pie/create_key_pair.py python-pykmip-0.8.0/kmip/demos/pie/create_key_pair.py --- python-pykmip-0.7.0/kmip/demos/pie/create_key_pair.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/create_key_pair.py 2018-04-17 20:03:33.000000000 +0000 @@ -43,9 +43,16 @@ algorithm = getattr(enums.CryptographicAlgorithm, algorithm, None) # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: try: - public_uid, private_uid = client.create_key_pair(algorithm, length) + public_uid, private_uid = client.create_key_pair( + algorithm, + length, + operation_policy_name=opts.operation_policy_name + ) logger.info("Successfully created public key with ID: {0}".format( public_uid)) logger.info("Successfully created private key with ID: {0}".format( diff -Nru python-pykmip-0.7.0/kmip/demos/pie/create.py python-pykmip-0.8.0/kmip/demos/pie/create.py --- python-pykmip-0.7.0/kmip/demos/pie/create.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/create.py 2018-04-17 20:03:33.000000000 +0000 @@ -44,9 +44,16 @@ algorithm = getattr(enums.CryptographicAlgorithm, algorithm, None) # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: try: - uid = client.create(algorithm, length) + uid = client.create( + algorithm, + length, + operation_policy_name=opts.operation_policy_name + ) logger.info("Successfully created symmetric key with ID: " "{0}".format(uid)) except Exception as e: diff -Nru python-pykmip-0.7.0/kmip/demos/pie/decrypt.py python-pykmip-0.8.0/kmip/demos/pie/decrypt.py --- python-pykmip-0.7.0/kmip/demos/pie/decrypt.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/decrypt.py 2018-04-17 20:03:33.000000000 +0000 @@ -54,7 +54,10 @@ message = binascii.unhexlify(message[1:]) # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: # Decrypt the cipher text with the encryption key. try: plain_text = client.decrypt( diff -Nru python-pykmip-0.7.0/kmip/demos/pie/derive_key.py python-pykmip-0.8.0/kmip/demos/pie/derive_key.py --- python-pykmip-0.7.0/kmip/demos/pie/derive_key.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/derive_key.py 2018-04-17 20:03:33.000000000 +0000 @@ -30,7 +30,10 @@ config = opts.config # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: # Create keys to use for derivation try: key_id = client.create( diff -Nru python-pykmip-0.7.0/kmip/demos/pie/destroy.py python-pykmip-0.8.0/kmip/demos/pie/destroy.py --- python-pykmip-0.7.0/kmip/demos/pie/destroy.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/destroy.py 2018-04-17 20:03:33.000000000 +0000 @@ -37,7 +37,10 @@ sys.exit() # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: try: client.destroy(uid) logger.info("Successfully destroyed secret with ID: {0}".format( diff -Nru python-pykmip-0.7.0/kmip/demos/pie/encrypt.py python-pykmip-0.8.0/kmip/demos/pie/encrypt.py --- python-pykmip-0.7.0/kmip/demos/pie/encrypt.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/encrypt.py 2018-04-17 20:03:33.000000000 +0000 @@ -50,7 +50,10 @@ message = bytes(message, 'utf-8') # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: # Create an encryption key. try: key_id = client.create( diff -Nru python-pykmip-0.7.0/kmip/demos/pie/get_attribute_list.py python-pykmip-0.8.0/kmip/demos/pie/get_attribute_list.py --- python-pykmip-0.7.0/kmip/demos/pie/get_attribute_list.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/get_attribute_list.py 2018-04-17 20:03:33.000000000 +0000 @@ -37,7 +37,10 @@ sys.exit() # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: try: attribute_names = client.get_attribute_list(uid) logger.info("Successfully retrieved {0} attribute names:".format( diff -Nru python-pykmip-0.7.0/kmip/demos/pie/get.py python-pykmip-0.8.0/kmip/demos/pie/get.py --- python-pykmip-0.7.0/kmip/demos/pie/get.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/get.py 2018-04-17 20:03:33.000000000 +0000 @@ -37,7 +37,10 @@ sys.exit() # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: try: secret = client.get(uid) logger.info("Successfully retrieved secret with ID: {0}".format( diff -Nru python-pykmip-0.7.0/kmip/demos/pie/locate.py python-pykmip-0.8.0/kmip/demos/pie/locate.py --- python-pykmip-0.7.0/kmip/demos/pie/locate.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/locate.py 2018-04-17 20:03:33.000000000 +0000 @@ -53,7 +53,10 @@ attributes = [name_obj] # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: try: uuids = client.locate(attributes=attributes) logger.info("Located uuids: {0}".format(uuids)) diff -Nru python-pykmip-0.7.0/kmip/demos/pie/mac.py python-pykmip-0.8.0/kmip/demos/pie/mac.py --- python-pykmip-0.7.0/kmip/demos/pie/mac.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/mac.py 2018-04-17 20:03:33.000000000 +0000 @@ -48,7 +48,10 @@ algorithm = getattr(enums.CryptographicAlgorithm, algorithm, None) # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: try: uid, mac_data = client.mac(data, uid, algorithm) logger.info("Successfully done MAC using key with ID: " diff -Nru python-pykmip-0.7.0/kmip/demos/pie/register_certificate.py python-pykmip-0.8.0/kmip/demos/pie/register_certificate.py --- python-pykmip-0.7.0/kmip/demos/pie/register_certificate.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/register_certificate.py 2018-04-17 20:03:33.000000000 +0000 @@ -88,9 +88,13 @@ name = 'Demo X.509 Certificate' cert = objects.X509Certificate(value, usage_mask, name) + cert.operation_policy_name = opts.operation_policy_name # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: try: uid = client.register(cert) logger.info("Successfully registered certificate with ID: " diff -Nru python-pykmip-0.7.0/kmip/demos/pie/register_opaque_object.py python-pykmip-0.8.0/kmip/demos/pie/register_opaque_object.py --- python-pykmip-0.7.0/kmip/demos/pie/register_opaque_object.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/register_opaque_object.py 2018-04-17 20:03:33.000000000 +0000 @@ -36,9 +36,13 @@ name = 'Demo Opaque Object' obj = objects.OpaqueObject(value, opaque_type, name) + obj.operation_policy_name = opts.operation_policy_name # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: try: uid = client.register(obj) logger.info("Successfully registered opaque object with ID: " diff -Nru python-pykmip-0.7.0/kmip/demos/pie/register_private_key.py python-pykmip-0.8.0/kmip/demos/pie/register_private_key.py --- python-pykmip-0.7.0/kmip/demos/pie/register_private_key.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/register_private_key.py 2018-04-17 20:03:33.000000000 +0000 @@ -115,9 +115,13 @@ key = objects.PrivateKey( algorithm, length, value, format_type, usage_mask, name) + key.operation_policy_name = opts.operation_policy_name # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: try: uid = client.register(key) logger.info("Successfully registered private key with ID: " diff -Nru python-pykmip-0.7.0/kmip/demos/pie/register_public_key.py python-pykmip-0.8.0/kmip/demos/pie/register_public_key.py --- python-pykmip-0.7.0/kmip/demos/pie/register_public_key.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/register_public_key.py 2018-04-17 20:03:33.000000000 +0000 @@ -57,9 +57,13 @@ key = objects.PublicKey( algorithm, length, value, format_type, usage_mask, name) + key.operation_policy_name = opts.operation_policy_name # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: try: uid = client.register(key) logger.info("Successfully registered public key with ID: " diff -Nru python-pykmip-0.7.0/kmip/demos/pie/register_secret_data.py python-pykmip-0.8.0/kmip/demos/pie/register_secret_data.py --- python-pykmip-0.7.0/kmip/demos/pie/register_secret_data.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/register_secret_data.py 2018-04-17 20:03:33.000000000 +0000 @@ -38,9 +38,13 @@ name = 'Demo Secret Data' secret = objects.SecretData(value, data_type, usage_mask, name) + secret.operation_policy_name = opts.operation_policy_name # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: try: uid = client.register(secret) logger.info( diff -Nru python-pykmip-0.7.0/kmip/demos/pie/register_symmetric_key.py python-pykmip-0.8.0/kmip/demos/pie/register_symmetric_key.py --- python-pykmip-0.7.0/kmip/demos/pie/register_symmetric_key.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/register_symmetric_key.py 2018-04-17 20:03:33.000000000 +0000 @@ -41,9 +41,13 @@ name = 'Demo Symmetric Key' key = objects.SymmetricKey(algorithm, length, value, usage_mask, name) + key.operation_policy_name = opts.operation_policy_name # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: try: uid = client.register(key) logger.info("Successfully registered symmetric key with ID: " diff -Nru python-pykmip-0.7.0/kmip/demos/pie/signature_verify.py python-pykmip-0.8.0/kmip/demos/pie/signature_verify.py --- python-pykmip-0.7.0/kmip/demos/pie/signature_verify.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/signature_verify.py 2018-04-17 20:03:33.000000000 +0000 @@ -31,7 +31,10 @@ config = opts.config # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: # Create keys to use for derivation try: signing_key_id = client.register( diff -Nru python-pykmip-0.7.0/kmip/demos/pie/sign.py python-pykmip-0.8.0/kmip/demos/pie/sign.py --- python-pykmip-0.7.0/kmip/demos/pie/sign.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/pie/sign.py 2018-04-17 20:03:33.000000000 +0000 @@ -31,7 +31,10 @@ config = opts.config # Build the client and connect to the server - with client.ProxyKmipClient(config=config) as client: + with client.ProxyKmipClient( + config=config, + config_file=opts.config_file + ) as client: # Create keys to use for derivation try: signing_key_id = client.register( diff -Nru python-pykmip-0.7.0/kmip/demos/units/activate.py python-pykmip-0.8.0/kmip/demos/units/activate.py --- python-pykmip-0.7.0/kmip/demos/units/activate.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/units/activate.py 2018-04-17 20:03:33.000000000 +0000 @@ -40,7 +40,7 @@ sys.exit() # Build the client and connect to the server - client = KMIPProxy(config=config) + client = KMIPProxy(config=config, config_file=opts.config_file) client.open() # Activate the object diff -Nru python-pykmip-0.7.0/kmip/demos/units/create_key_pair.py python-pykmip-0.8.0/kmip/demos/units/create_key_pair.py --- python-pykmip-0.7.0/kmip/demos/units/create_key_pair.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/units/create_key_pair.py 2018-04-17 20:03:33.000000000 +0000 @@ -13,6 +13,7 @@ # License for the specific language governing permissions and limitations # under the License. +from kmip.core import enums from kmip.core.enums import AttributeType from kmip.core.enums import CredentialType from kmip.core.enums import CryptographicAlgorithm @@ -86,7 +87,7 @@ credential = credential_factory.create_credential(credential_type, credential_value) # Build the client and connect to the server - client = KMIPProxy(config=config) + client = KMIPProxy(config=config, config_file=opts.config_file) client.open() algorithm_obj = attribute_factory.create_attribute(attribute_type, @@ -108,6 +109,14 @@ length) attributes = [algorithm_obj, length_obj, name, usage_mask] + + if opts.operation_policy_name is not None: + opn = attribute_factory.create_attribute( + enums.AttributeType.OPERATION_POLICY_NAME, + opts.operation_policy_name + ) + attributes.append(opn) + common = CommonTemplateAttribute(attributes=attributes) private = PrivateKeyTemplateAttribute(attributes=attributes) public = PublicKeyTemplateAttribute(attributes=attributes) diff -Nru python-pykmip-0.7.0/kmip/demos/units/create.py python-pykmip-0.8.0/kmip/demos/units/create.py --- python-pykmip-0.7.0/kmip/demos/units/create.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/units/create.py 2018-04-17 20:03:33.000000000 +0000 @@ -13,6 +13,7 @@ # License for the specific language governing permissions and limitations # under the License. +from kmip.core import enums from kmip.core.enums import AttributeType from kmip.core.enums import CredentialType from kmip.core.enums import CryptographicAlgorithm @@ -73,7 +74,7 @@ credential = credential_factory.create_credential(credential_type, credential_value) # Build the client and connect to the server - client = KMIPProxy(config=config) + client = KMIPProxy(config=config, config_file=opts.config_file) client.open() # Build the different object attributes @@ -108,6 +109,14 @@ name = Attribute(attribute_name=name, attribute_value=value) attributes = [algorithm_obj, usage_mask, length_obj, name] + + if opts.operation_policy_name is not None: + opn = attribute_factory.create_attribute( + enums.AttributeType.OPERATION_POLICY_NAME, + opts.operation_policy_name + ) + attributes.append(opn) + template_attribute = TemplateAttribute(attributes=attributes) # Create the SYMMETRIC_KEY object diff -Nru python-pykmip-0.7.0/kmip/demos/units/destroy.py python-pykmip-0.8.0/kmip/demos/units/destroy.py --- python-pykmip-0.7.0/kmip/demos/units/destroy.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/units/destroy.py 2018-04-17 20:03:33.000000000 +0000 @@ -59,7 +59,7 @@ credential = credential_factory.create_credential(credential_type, credential_value) # Build the client and connect to the server - client = KMIPProxy(config=config) + client = KMIPProxy(config=config, config_file=opts.config_file) client.open() # Destroy the SYMMETRIC_KEY object diff -Nru python-pykmip-0.7.0/kmip/demos/units/discover_versions.py python-pykmip-0.8.0/kmip/demos/units/discover_versions.py --- python-pykmip-0.7.0/kmip/demos/units/discover_versions.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/units/discover_versions.py 2018-04-17 20:03:33.000000000 +0000 @@ -42,11 +42,10 @@ if opts.protocol_versions is not None: for version in re.split(',| ', opts.protocol_versions): mm = re.split('\.', version) - protocol_versions.append(ProtocolVersion.create(int(mm[0]), - int(mm[1]))) + protocol_versions.append(ProtocolVersion(int(mm[0]), int(mm[1]))) # Build the client and connect to the server - client = KMIPProxy(config=config) + client = KMIPProxy(config=config, config_file=opts.config_file) client.open() result = client.discover_versions(protocol_versions=protocol_versions) diff -Nru python-pykmip-0.7.0/kmip/demos/units/get.py python-pykmip-0.8.0/kmip/demos/units/get.py --- python-pykmip-0.7.0/kmip/demos/units/get.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/units/get.py 2018-04-17 20:03:33.000000000 +0000 @@ -77,7 +77,7 @@ key_format_type = KeyFormatType(format_type_enum) # Build the client and connect to the server - client = KMIPProxy(config=config) + client = KMIPProxy(config=config, config_file=opts.config_file) client.open() # Retrieve the SYMMETRIC_KEY object diff -Nru python-pykmip-0.7.0/kmip/demos/units/locate.py python-pykmip-0.8.0/kmip/demos/units/locate.py --- python-pykmip-0.7.0/kmip/demos/units/locate.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/units/locate.py 2018-04-17 20:03:33.000000000 +0000 @@ -64,7 +64,7 @@ credential = credential_factory.create_credential(credential_type, credential_value) # Build the client and connect to the server - client = KMIPProxy(config=config) + client = KMIPProxy(config=config, config_file=opts.config_file) client.open() # Build name attribute diff -Nru python-pykmip-0.7.0/kmip/demos/units/query.py python-pykmip-0.8.0/kmip/demos/units/query.py --- python-pykmip-0.7.0/kmip/demos/units/query.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/units/query.py 2018-04-17 20:03:33.000000000 +0000 @@ -56,7 +56,7 @@ QueryFunction(QueryFunctionEnum.QUERY_EXTENSION_MAP)) # Build the client and connect to the server - client = KMIPProxy(config=config) + client = KMIPProxy(config=config, config_file=opts.config_file) client.open() result = client.query(query_functions=query_functions) diff -Nru python-pykmip-0.7.0/kmip/demos/units/register.py python-pykmip-0.8.0/kmip/demos/units/register.py --- python-pykmip-0.7.0/kmip/demos/units/register.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/units/register.py 2018-04-17 20:03:33.000000000 +0000 @@ -13,11 +13,13 @@ # License for the specific language governing permissions and limitations # under the License. +from kmip.core import enums from kmip.core.enums import KeyFormatType from kmip.core.enums import ObjectType from kmip.core.enums import Operation from kmip.core.enums import ResultStatus +from kmip.core.factories.attributes import AttributeFactory from kmip.core.objects import TemplateAttribute from kmip.demos import utils @@ -51,16 +53,26 @@ logger.error( "Invalid key format type specified; exiting early from demo") + attribute_factory = AttributeFactory() + # Create the template attribute for the secret and then build the secret usage_mask = utils.build_cryptographic_usage_mask(logger, object_type) attributes = [usage_mask] + + if opts.operation_policy_name is not None: + opn = attribute_factory.create_attribute( + enums.AttributeType.OPERATION_POLICY_NAME, + opts.operation_policy_name + ) + attributes.append(opn) + template_attribute = TemplateAttribute(attributes=attributes) secret = utils.build_object(logger, object_type, key_format_type) # Build the client, connect to the server, register the secret, and # disconnect from the server - client = KMIPProxy(config=config) + client = KMIPProxy(config=config, config_file=opts.config_file) client.open() result = client.register(object_type, template_attribute, secret) diff -Nru python-pykmip-0.7.0/kmip/demos/units/revoke.py python-pykmip-0.8.0/kmip/demos/units/revoke.py --- python-pykmip-0.7.0/kmip/demos/units/revoke.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/units/revoke.py 2018-04-17 20:03:33.000000000 +0000 @@ -41,7 +41,7 @@ sys.exit() # Build the client and connect to the server - client = KMIPProxy(config=config) + client = KMIPProxy(config=config, config_file=opts.config_file) client.open() # Activate the object diff -Nru python-pykmip-0.7.0/kmip/demos/utils.py python-pykmip-0.8.0/kmip/demos/utils.py --- python-pykmip-0.7.0/kmip/demos/utils.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/demos/utils.py 2018-04-17 20:03:33.000000000 +0000 @@ -86,6 +86,15 @@ default="client", dest="config", help="Client configuration group to load from configuration file") + parser.add_option( + "-s", + "--config-file", + action="store", + type="str", + default=None, + dest="config_file", + help="Path to the client configuration file." + ) if operation is Operation.CREATE: parser.add_option( @@ -104,6 +113,15 @@ default=None, dest="length", help="Key length in bits (e.g., 128, 256)") + parser.add_option( + "-o", + "--operation-policy-name", + action="store", + type="str", + default=None, + dest="operation_policy_name", + help="Operation policy name for the secret (e.g., 'default')" + ) elif operation is Operation.CREATE_KEY_PAIR: parser.add_option( "-a", @@ -112,7 +130,7 @@ type="str", default=None, dest="algorithm", - help="Encryption algorithm for the secret (e.g., AES)") + help="Encryption algorithm for the secret (e.g., RSA)") parser.add_option( "-l", "--length", @@ -120,7 +138,7 @@ type="int", default=None, dest="length", - help="Key length in bits (e.g., 128, 256)") + help="Key length in bits (e.g., 512, 1024, 2048)") parser.add_option( "-n", "--name", @@ -129,6 +147,16 @@ default=None, dest="name", help="Name of key pair to create") + parser.add_option( + "-o", + "--operation-policy-name", + action="store", + type="str", + default=None, + dest="operation_policy_name", + help="Operation policy name for the secrets (e.g., 'default')" + ) + elif operation is Operation.DESTROY: parser.add_option( "-i", @@ -212,6 +240,15 @@ help=("Type of the object to register. Supported types include: " "CERTIFICATE, PRIVATE_KEY, PUBLIC_KEY, SYMMETRIC_KEY, " "SECRET_DATA")) + parser.add_option( + "-o", + "--operation-policy-name", + action="store", + type="str", + default=None, + dest="operation_policy_name", + help="Operation policy name for the secret (e.g., 'default')" + ) elif operation is Operation.DISCOVER_VERSIONS: parser.add_option( "-v", diff -Nru python-pykmip-0.7.0/kmip/__init__.py python-pykmip-0.8.0/kmip/__init__.py --- python-pykmip-0.7.0/kmip/__init__.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/__init__.py 2017-12-04 19:45:49.000000000 +0000 @@ -25,7 +25,7 @@ version_path = os.path.join(os.path.dirname( os.path.realpath(__file__)), 'version.py') with open(version_path, 'r') as version_file: - mo = re.search(r"^.*= '(\d\.\d\.\d)'$", version_file.read(), re.MULTILINE) + mo = re.search(r"^.*= '(\d\.\d\..*)'$", version_file.read(), re.MULTILINE) __version__ = mo.group(1) diff -Nru python-pykmip-0.7.0/kmip/pie/client.py python-pykmip-0.8.0/kmip/pie/client.py --- python-pykmip-0.7.0/kmip/pie/client.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/pie/client.py 2018-04-17 20:03:33.000000000 +0000 @@ -61,7 +61,8 @@ ssl_version=None, username=None, password=None, - config='client'): + config='client', + config_file=None): """ Construct a ProxyKmipClient. @@ -88,6 +89,9 @@ file. Use to load a specific set of configuration settings from the configuration file, instead of specifying them manually. Optional, defaults to the default client section, 'client'. + config_file (string): The path to the client's configuration file. + Optional, defaults to None. + """ self.logger = logging.getLogger() @@ -104,7 +108,9 @@ ssl_version=ssl_version, username=username, password=password, - config=config) + config=config, + config_file=config_file + ) # TODO (peter-hamilton) Add a multiprocessing lock for synchronization. self._is_open = False @@ -227,13 +233,13 @@ length (int): The length in bits for the key pair. operation_policy_name (string): The name of the operation policy to use for the new key pair. Optional, defaults to None. - public_name (string): The name to give the public key. - Optional, defaults to None. + public_name (string): The name to give the public key. Optional, + defaults to None. public_usage_mask (list): A list of CryptographicUsageMask enumerations indicating how the public key should be used. Optional, defaults to None. - private_name (string): The name to give the public key. - Optional, defaults to None. + private_name (string): The name to give the public key. Optional, + defaults to None. private_usage_mask (list): A list of CryptographicUsageMask enumerations indicating how the private key should be used. Optional, defaults to None. @@ -258,9 +264,20 @@ common_attributes = self._build_common_attributes( operation_policy_name ) - key_attributes = self._build_key_attributes(algorithm, length) - key_attributes.extend(common_attributes) - template = cobjects.CommonTemplateAttribute(attributes=key_attributes) + + algorithm_attribute = self.attribute_factory.create_attribute( + enums.AttributeType.CRYPTOGRAPHIC_ALGORITHM, + algorithm + ) + length_attribute = self.attribute_factory.create_attribute( + enums.AttributeType.CRYPTOGRAPHIC_LENGTH, + length + ) + + common_attributes.extend([algorithm_attribute, length_attribute]) + template = cobjects.CommonTemplateAttribute( + attributes=common_attributes + ) # Create public / private specific attributes public_template = None @@ -353,6 +370,14 @@ managed_object.operation_policy_name ) object_attributes.append(opn_attribute) + if hasattr(managed_object, 'names'): + if managed_object.names: + for name in managed_object.names: + name_attribute = self.attribute_factory.create_attribute( + enums.AttributeType.NAME, + name + ) + object_attributes.append(name_attribute) template = cobjects.TemplateAttribute(attributes=object_attributes) object_type = managed_object.object_type @@ -371,6 +396,94 @@ raise exceptions.KmipOperationFailure(status, reason, message) @is_connected + def rekey(self, + uid=None, + offset=None, + **kwargs): + """ + Rekey an existing key. + + Args: + uid (string): The unique ID of the symmetric key to rekey. + Optional, defaults to None. + offset (int): The time delta, in seconds, between the new key's + initialization date and activation date. Optional, defaults + to None. + **kwargs (various): A placeholder for object attributes that + should be set on the newly rekeyed key. Currently + supported attributes include: + activation_date (int) + process_start_date (int) + protect_stop_date (int) + deactivation_date (int) + + Returns: + string: The unique ID of the newly rekeyed key. + + Raises: + ClientConnectionNotOpen: if the client connection is unusable + KmipOperationFailure: if the operation result is a failure + TypeError: if the input arguments are invalid + """ + if uid is not None: + if not isinstance(uid, six.string_types): + raise TypeError("The unique identifier must be a string.") + if offset is not None: + if not isinstance(offset, six.integer_types): + raise TypeError("The offset must be an integer.") + + # TODO (peter-hamilton) Unify attribute handling across operations + attributes = [] + if kwargs.get('activation_date'): + attributes.append( + self.attribute_factory.create_attribute( + enums.AttributeType.ACTIVATION_DATE, + kwargs.get('activation_date') + ) + ) + if kwargs.get('process_start_date'): + attributes.append( + self.attribute_factory.create_attribute( + enums.AttributeType.PROCESS_START_DATE, + kwargs.get('process_start_date') + ) + ) + if kwargs.get('protect_stop_date'): + attributes.append( + self.attribute_factory.create_attribute( + enums.AttributeType.PROTECT_STOP_DATE, + kwargs.get('protect_stop_date') + ) + ) + if kwargs.get('deactivation_date'): + attributes.append( + self.attribute_factory.create_attribute( + enums.AttributeType.DEACTIVATION_DATE, + kwargs.get('deactivation_date') + ) + ) + template_attribute = cobjects.TemplateAttribute( + attributes=attributes + ) + + # Derive the new key/data and handle the results + result = self.proxy.rekey( + uuid=uid, + offset=offset, + template_attribute=template_attribute + ) + + status = result.get('result_status') + if status == enums.ResultStatus.SUCCESS: + return result.get('unique_identifier') + else: + raise exceptions.KmipOperationFailure( + status, + result.get('result_reason'), + result.get('result_message') + ) + + @is_connected def derive_key(self, object_type, unique_identifiers, @@ -476,6 +589,13 @@ kwargs.get('cryptographic_algorithm') ) ) + if kwargs.get('cryptographic_usage_mask'): + attributes.append( + self.attribute_factory.create_attribute( + enums.AttributeType.CRYPTOGRAPHIC_USAGE_MASK, + kwargs.get('cryptographic_usage_mask') + ) + ) template_attribute = cobjects.TemplateAttribute( attributes=attributes ) @@ -558,6 +678,63 @@ raise exceptions.KmipOperationFailure(status, reason, message) @is_connected + def check(self, + uid=None, + usage_limits_count=None, + cryptographic_usage_mask=None, + lease_time=None): + """ + Check the constraints for a managed object. + + Args: + uid (string): The unique ID of the managed object to check. + Optional, defaults to None. + usage_limits_count (int): The number of items that can be secured + with the specified managed object. Optional, defaults to None. + cryptographic_usage_mask (list): A list of CryptographicUsageMask + enumerations specifying the operations possible with the + specified managed object. Optional, defaults to None. + lease_time (int): The number of seconds that can be leased for the + specified managed object. Optional, defaults to None. + """ + if uid is not None: + if not isinstance(uid, six.string_types): + raise TypeError("The unique identifier must be a string.") + if usage_limits_count is not None: + if not isinstance(usage_limits_count, six.integer_types): + raise TypeError("The usage limits count must be an integer.") + if cryptographic_usage_mask is not None: + if not isinstance(cryptographic_usage_mask, list) or \ + not all(isinstance( + x, + enums.CryptographicUsageMask + ) for x in cryptographic_usage_mask): + raise TypeError( + "The cryptographic usage mask must be a list of " + "CryptographicUsageMask enumerations." + ) + if lease_time is not None: + if not isinstance(lease_time, six.integer_types): + raise TypeError("The lease time must be an integer.") + + result = self.proxy.check( + uid, + usage_limits_count, + cryptographic_usage_mask, + lease_time + ) + + status = result.get('result_status') + if status == enums.ResultStatus.SUCCESS: + return result.get('unique_identifier') + else: + raise exceptions.KmipOperationFailure( + status, + result.get('result_reason'), + result.get('result_message') + ) + + @is_connected def get(self, uid=None, key_wrapping_specification=None): """ Get a managed object from a KMIP appliance. @@ -1235,12 +1412,15 @@ CryptographicParameters struct. Returns: + None: if value is None CryptographicParameters: a CryptographicParameters struct Raises: TypeError: if the input argument is invalid """ - if not isinstance(value, dict): + if value is None: + return None + elif not isinstance(value, dict): raise TypeError("Cryptographic parameters must be a dictionary.") cryptographic_parameters = CryptographicParameters( diff -Nru python-pykmip-0.7.0/kmip/services/kmip_client.py python-pykmip-0.8.0/kmip/services/kmip_client.py --- python-pykmip-0.7.0/kmip/services/kmip_client.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/services/kmip_client.py 2018-04-17 20:03:33.000000000 +0000 @@ -31,6 +31,7 @@ from kmip.core import attributes as attr +from kmip.core import enums from kmip.core.enums import AuthenticationSuite from kmip.core.enums import ConformanceClause from kmip.core.enums import CredentialType @@ -39,7 +40,6 @@ from kmip.core.factories.credentials import CredentialFactory from kmip.core import objects -from kmip.core.server import KMIP from kmip.core.messages.contents import Authentication from kmip.core.messages.contents import BatchCount @@ -59,6 +59,7 @@ import logging import logging.config import os +import six import socket import ssl @@ -66,23 +67,34 @@ CONFIG_FILE = os.path.normpath(os.path.join(FILE_PATH, '../kmipconfig.ini')) -class KMIPProxy(KMIP): +class KMIPProxy: def __init__(self, host=None, port=None, keyfile=None, certfile=None, cert_reqs=None, ssl_version=None, ca_certs=None, do_handshake_on_connect=None, suppress_ragged_eofs=None, - username=None, password=None, timeout=30, config='client'): - super(KMIPProxy, self).__init__() + username=None, password=None, timeout=30, config='client', + config_file=None): self.logger = logging.getLogger(__name__) self.credential_factory = CredentialFactory() self.config = config + if config_file: + if not isinstance(config_file, six.string_types): + raise ValueError( + "The client configuration file argument must be a string." + ) + if not os.path.exists(config_file): + raise ValueError( + "The client configuration file '{}' does not " + "exist.".format(config_file) + ) + self._set_variables(host, port, keyfile, certfile, cert_reqs, ssl_version, ca_certs, do_handshake_on_connect, suppress_ragged_eofs, - username, password, timeout) + username, password, timeout, config_file) self.batch_items = [] self.conformance_clauses = [ @@ -284,6 +296,83 @@ """ return self._activate(uuid, credential=credential) + def rekey(self, + uuid=None, + offset=None, + template_attribute=None, + credential=None): + """ + Check object usage according to specific constraints. + + Args: + uuid (string): The unique identifier of a managed cryptographic + object that should be checked. Optional, defaults to None. + offset (int): An integer specifying, in seconds, the difference + between the rekeyed objects initialization date and activation + date. Optional, defaults to None. + template_attribute (TemplateAttribute): A TemplateAttribute struct + containing the attributes to set on the newly rekeyed object. + Optional, defaults to None. + credential (Credential): A Credential struct containing a set of + authorization parameters for the operation. Optional, defaults + to None. + + Returns: + dict: The results of the check operation, containing the following + key/value pairs: + + Key | Value + ---------------------------|----------------------------------- + 'unique_identifier' | (string) The unique ID of the + | checked cryptographic object. + 'template_attribute' | (TemplateAttribute) A struct + | containing attribute set by the + | server. Optional. + 'result_status' | (ResultStatus) An enumeration + | indicating the status of the + | operation result. + 'result_reason' | (ResultReason) An enumeration + | providing context for the result + | status. + 'result_message' | (string) A message providing + | additional context for the + | operation result. + """ + operation = Operation(OperationEnum.REKEY) + request_payload = payloads.RekeyRequestPayload( + unique_identifier=uuid, + offset=offset, + template_attribute=template_attribute + ) + batch_item = messages.RequestBatchItem( + operation=operation, + request_payload=request_payload + ) + + request = self._build_request_message(credential, [batch_item]) + response = self._send_and_receive_message(request) + batch_item = response.batch_items[0] + payload = batch_item.response_payload + + result = {} + + if payload: + result['unique_identifier'] = payload.unique_identifier + if payload.template_attribute is not None: + result['template_attribute'] = payload.template_attribute + + result['result_status'] = batch_item.result_status.value + try: + result['result_reason'] = batch_item.result_reason.value + except Exception: + result['result_reason'] = batch_item.result_reason + try: + result['result_message'] = batch_item.result_message.value + except Exception: + result['result_message'] = batch_item.result_message + + return result + def derive_key(self, object_type, unique_identifiers, @@ -366,6 +455,107 @@ return result + def check(self, + uuid=None, + usage_limits_count=None, + cryptographic_usage_mask=None, + lease_time=None, + credential=None): + """ + Check object usage according to specific constraints. + + Args: + uuid (string): The unique identifier of a managed cryptographic + object that should be checked. Optional, defaults to None. + usage_limits_count (int): An integer specifying the number of + items that can be secured with the specified cryptographic + object. Optional, defaults to None. + cryptographic_usage_mask (list): A list of CryptographicUsageMask + enumerations specifying the operations possible with the + specified cryptographic object. Optional, defaults to None. + lease_time (int): The number of seconds that can be leased for the + specified cryptographic object. Optional, defaults to None. + credential (Credential): A Credential struct containing a set of + authorization parameters for the operation. Optional, defaults + to None. + + Returns: + dict: The results of the check operation, containing the following + key/value pairs: + + Key | Value + ---------------------------|----------------------------------- + 'unique_identifier' | (string) The unique ID of the + | checked cryptographic object. + 'usage_limits_count' | (int) The value provided as input + | if the value exceeds server + | constraints. + 'cryptographic_usage_mask' | (list) The value provided as input + | if the value exceeds server + | constraints. + 'lease_time' | (int) The value provided as input + | if the value exceeds server + | constraints. + 'result_status' | (ResultStatus) An enumeration + | indicating the status of the + | operation result. + 'result_reason' | (ResultReason) An enumeration + | providing context for the result + | status. + 'result_message' | (string) A message providing + | additional context for the + | operation result. + """ + # TODO (peter-hamilton) Push this into the Check request. + mask = 0 + for m in cryptographic_usage_mask: + mask |= m.value + + operation = Operation(OperationEnum.CHECK) + request_payload = payloads.CheckRequestPayload( + unique_identifier=uuid, + usage_limits_count=usage_limits_count, + cryptographic_usage_mask=mask, + lease_time=lease_time + ) + batch_item = messages.RequestBatchItem( + operation=operation, + request_payload=request_payload + ) + + request = self._build_request_message(credential, [batch_item]) + response = self._send_and_receive_message(request) + batch_item = response.batch_items[0] + payload = batch_item.response_payload + + result = {} + + if payload: + result['unique_identifier'] = payload.unique_identifier + if payload.usage_limits_count is not None: + result['usage_limits_count'] = payload.usage_limits_count + if payload.cryptographic_usage_mask is not None: + # TODO (peter-hamilton) Push this into the Check response. + masks = [] + for enumeration in enums.CryptographicUsageMask: + if payload.cryptographic_usage_mask & enumeration.value: + masks.append(enumeration) + result['cryptographic_usage_mask'] = masks + if payload.lease_time is not None: + result['lease_time'] = payload.lease_time + + result['result_status'] = batch_item.result_status.value + try: + result['result_reason'] = batch_item.result_reason.value + except Exception: + result['result_reason'] = batch_item.result_reason + try: + result['result_message'] = batch_item.result_message.value + except Exception: + result['result_message'] = batch_item.result_message + + return result + def get(self, uuid=None, key_format_type=None, key_compression_type=None, key_wrapping_specification=None, credential=None): return self._get( @@ -1341,14 +1531,14 @@ return credential def _build_request_message(self, credential, batch_items): - protocol_version = ProtocolVersion.create(1, 2) + protocol_version = ProtocolVersion(1, 2) if credential is None: credential = self._build_credential() authentication = None if credential is not None: - authentication = Authentication(credential) + authentication = Authentication([credential]) batch_count = BatchCount(len(batch_items)) req_header = messages.RequestHeader(protocol_version=protocol_version, @@ -1376,8 +1566,8 @@ def _set_variables(self, host, port, keyfile, certfile, cert_reqs, ssl_version, ca_certs, do_handshake_on_connect, suppress_ragged_eofs, - username, password, timeout): - conf = ConfigHelper() + username, password, timeout, config_file): + conf = ConfigHelper(config_file) # TODO: set this to a host list self.host_list_str = conf.get_valid_value( diff -Nru python-pykmip-0.7.0/kmip/services/server/auth/api.py python-pykmip-0.8.0/kmip/services/server/auth/api.py --- python-pykmip-0.7.0/kmip/services/server/auth/api.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/services/server/auth/api.py 2018-04-02 17:12:18.000000000 +0000 @@ -0,0 +1,45 @@ +# Copyright (c) 2018 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import abc +import six + + +@six.add_metaclass(abc.ABCMeta) +class AuthAPI: + """ + The base class for an authentication API connector. + """ + + @abc.abstractmethod + def authenticate(self, + connection_certificate=None, + connection_info=None, + request_credentials=None): + """ + Query the configured authentication service with the given credentials. + + Args: + connection_certificate (cryptography.x509.Certificate): An X.509 + certificate object obtained from the connection being + authenticated. Optional, defaults to None. + connection_info (tuple): A tuple of information pertaining to the + connection being authenticated, including the source IP address + and a timestamp (e.g., ('127.0.0.1', 1519759267.467451)). + Optional, defaults to None. + request_credentials (list): A list of KMIP Credential structures + containing credential information to use for authentication. + Optional, defaults to None. + """ diff -Nru python-pykmip-0.7.0/kmip/services/server/auth/__init__.py python-pykmip-0.8.0/kmip/services/server/auth/__init__.py --- python-pykmip-0.7.0/kmip/services/server/auth/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/services/server/auth/__init__.py 2018-04-02 17:12:18.000000000 +0000 @@ -0,0 +1,34 @@ +# Copyright (c) 2018 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from kmip.services.server.auth.api import AuthAPI +from kmip.services.server.auth.slugs import SLUGSConnector + +from kmip.services.server.auth.utils import get_certificate_from_connection +from kmip.services.server.auth.utils import \ + get_client_identity_from_certificate +from kmip.services.server.auth.utils import get_common_names_from_certificate +from kmip.services.server.auth.utils import \ + get_extended_key_usage_from_certificate + + +__all__ = [ + 'AuthAPI', + 'SLUGSConnector', + 'get_certificate_from_connection', + 'get_client_identity_from_certificate', + 'get_common_names_from_certificate', + 'get_extended_key_usage_from_certificate' +] diff -Nru python-pykmip-0.7.0/kmip/services/server/auth/slugs.py python-pykmip-0.8.0/kmip/services/server/auth/slugs.py --- python-pykmip-0.7.0/kmip/services/server/auth/slugs.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/services/server/auth/slugs.py 2018-04-02 17:12:18.000000000 +0000 @@ -0,0 +1,108 @@ +# Copyright (c) 2018 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import requests +import six + +from kmip.core import exceptions +from kmip.services.server.auth import api +from kmip.services.server.auth import utils + + +class SLUGSConnector(api.AuthAPI): + """ + An authentication API connector for a SLUGS service. + """ + + def __init__(self, url=None): + """ + Construct a SLUGSConnector. + + Args: + url (string): The base URL for the remote SLUGS instance. Optional, + defaults to None. Required for authentication. + """ + self._url = None + self.users_url = None + self.groups_url = None + + self.url = url + + @property + def url(self): + return self._url + + @url.setter + def url(self, value): + if value is None: + self._url = None + self.users_url = None + self.groups_url = None + elif isinstance(value, six.string_types): + self._url = value + if not self._url.endswith("/"): + self._url += "/" + self.users_url = self._url + "users/{}" + self.groups_url = self.users_url + "/groups" + else: + raise TypeError("URL must be a string.") + + def authenticate(self, + connection_certificate=None, + connection_info=None, + request_credentials=None): + """ + Query the configured SLUGS service with the provided credentials. + + Args: + connection_certificate (cryptography.x509.Certificate): An X.509 + certificate object obtained from the connection being + authenticated. Required for SLUGS authentication. + connection_info (tuple): A tuple of information pertaining to the + connection being authenticated, including the source IP address + and a timestamp (e.g., ('127.0.0.1', 1519759267.467451)). + Optional, defaults to None. Ignored for SLUGS authentication. + request_credentials (list): A list of KMIP Credential structures + containing credential information to use for authentication. + Optional, defaults to None. Ignored for SLUGS authentication. + """ + if (self.users_url is None) or (self.groups_url is None): + raise exceptions.ConfigurationError( + "The SLUGS URL must be specified." + ) + + user_id = utils.get_client_identity_from_certificate( + connection_certificate + ) + + try: + response = requests.get(self.users_url.format(user_id)) + except Exception: + raise exceptions.ConfigurationError( + "A connection could not be established using the SLUGS URL." + ) + if response.status_code == 404: + raise exceptions.PermissionDenied( + "Unrecognized user ID: {}".format(user_id) + ) + + response = requests.get(self.groups_url.format(user_id)) + if response.status_code == 404: + raise exceptions.PermissionDenied( + "Group information could not be retrieved for user ID: " + "{}".format(user_id) + ) + + return user_id, response.json().get('groups') diff -Nru python-pykmip-0.7.0/kmip/services/server/auth/utils.py python-pykmip-0.8.0/kmip/services/server/auth/utils.py --- python-pykmip-0.7.0/kmip/services/server/auth/utils.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/services/server/auth/utils.py 2018-04-02 17:12:18.000000000 +0000 @@ -0,0 +1,75 @@ +# Copyright (c) 2018 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from cryptography import x509 +from cryptography.hazmat import backends + +from kmip.core import exceptions + + +def get_certificate_from_connection(connection): + """ + Extract an X.509 certificate from a socket connection. + """ + certificate = connection.getpeercert(binary_form=True) + if certificate: + return x509.load_der_x509_certificate( + certificate, + backends.default_backend() + ) + return None + + +def get_extended_key_usage_from_certificate(certificate): + """ + Given an X.509 certificate, extract and return the extendedKeyUsage + extension. + """ + try: + return certificate.extensions.get_extension_for_oid( + x509.oid.ExtensionOID.EXTENDED_KEY_USAGE + ).value + except x509.ExtensionNotFound: + return None + + +def get_common_names_from_certificate(certificate): + """ + Given an X.509 certificate, extract and return all common names. + """ + + common_names = certificate.subject.get_attributes_for_oid( + x509.oid.NameOID.COMMON_NAME + ) + return [common_name.value for common_name in common_names] + + +def get_client_identity_from_certificate(certificate): + """ + Given an X.509 certificate, extract and return the client identity. + """ + client_ids = get_common_names_from_certificate(certificate) + + if len(client_ids) > 0: + if len(client_ids) > 1: + raise exceptions.PermissionDenied( + "Multiple client identities found." + ) + return client_ids[0] + else: + raise exceptions.PermissionDenied( + "The certificate does not define any subject common names. " + "Client identity unavailable." + ) diff -Nru python-pykmip-0.7.0/kmip/services/server/config.py python-pykmip-0.8.0/kmip/services/server/config.py --- python-pykmip-0.7.0/kmip/services/server/config.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/services/server/config.py 2018-04-16 19:14:22.000000000 +0000 @@ -37,6 +37,7 @@ self.settings['enable_tls_client_auth'] = True self.settings['tls_cipher_suites'] = [] self.settings['logging_level'] = logging.INFO + self.settings['auth_plugins'] = [] self._expected_settings = [ 'hostname', @@ -50,7 +51,8 @@ 'policy_path', 'enable_tls_client_auth', 'tls_cipher_suites', - 'logging_level' + 'logging_level', + 'database_path' ] def set_setting(self, setting, value): @@ -92,8 +94,10 @@ self._set_enable_tls_client_auth(value) elif setting == 'tls_cipher_suites': self._set_tls_cipher_suites(value) - else: + elif setting == 'logging_level': self._set_logging_level(value) + else: + self._set_database_path(value) def load_settings(self, path): """ @@ -121,6 +125,12 @@ parser = configparser.SafeConfigParser() parser.read(path) self._parse_settings(parser) + self.parse_auth_settings(parser) + + def parse_auth_settings(self, parser): + sections = [x for x in parser.sections() if x.startswith("auth:")] + configs = [(x, dict(parser.items(x))) for x in sections] + self.settings['auth_plugins'] = configs def _parse_settings(self, parser): if not parser.has_section('server'): @@ -172,6 +182,8 @@ self._set_logging_level( parser.get('server', 'logging_level') ) + if parser.has_option('server', 'database_path'): + self._set_database_path(parser.get('server', 'database_path')) def _set_hostname(self, value): if isinstance(value, six.string_types): @@ -327,3 +339,14 @@ "The logging level must be a string representing a valid " "logging level." ) + + def _set_database_path(self, value): + if not value: + self.settings['database_path'] = None + elif isinstance(value, six.string_types): + self.settings['database_path'] = value + else: + raise exceptions.ConfigurationError( + "The database path, if specified, must be a valid path to a " + "SQLite database file." + ) diff -Nru python-pykmip-0.7.0/kmip/services/server/engine.py python-pykmip-0.8.0/kmip/services/server/engine.py --- python-pykmip-0.7.0/kmip/services/server/engine.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/services/server/engine.py 2018-05-17 22:30:38.000000000 +0000 @@ -15,7 +15,6 @@ import copy import logging -import os import six import sqlalchemy @@ -42,8 +41,6 @@ from kmip.core import misc -from kmip.core import policy as operation_policy - from kmip.pie import factory from kmip.pie import objects from kmip.pie import sqltypes @@ -77,7 +74,7 @@ * Cryptographic usage mask enforcement per object type """ - def __init__(self, policy_path=None): + def __init__(self, policies=None, database_path=None): """ Create a KmipEngine. @@ -85,13 +82,20 @@ policy_path (string): The path to the filesystem directory containing PyKMIP server operation policy JSON files. Optional, defaults to None. + database_path (string): The path to the SQLite database file + used to store all server data. Optional, defaults to None. + If none, database path defaults to '/tmp/pykmip.database'. """ self._logger = logging.getLogger('kmip.server.engine') self._cryptography_engine = engine.CryptographyEngine() + self.database_path = 'sqlite:///{}'.format(database_path) + if not database_path: + self.database_path = 'sqlite:////tmp/pykmip.database' + self._data_store = sqlalchemy.create_engine( - 'sqlite:////tmp/pykmip.database', + self.database_path, echo=False, connect_args={'check_same_thread': False} ) @@ -105,9 +109,9 @@ self._id_placeholder = None self._protocol_versions = [ - contents.ProtocolVersion.create(1, 2), - contents.ProtocolVersion.create(1, 1), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 2), + contents.ProtocolVersion(1, 1), + contents.ProtocolVersion(1, 0) ] self._protocol_version = self._protocol_versions[0] @@ -124,68 +128,8 @@ } self._attribute_policy = policy.AttributePolicy(self._protocol_version) - self._operation_policies = copy.deepcopy(operation_policy.policies) - self._load_operation_policies(policy_path) - - self._client_identity = None - - def _load_operation_policies(self, policy_path): - if (policy_path is None) or (not os.path.isdir(policy_path)): - self._logger.warning( - "The specified operation policy directory{0} is not " - "valid. No user-defined policies will be loaded.".format( - " (" + policy_path + ")" if policy_path else '' - ) - ) - return dict() - else: - self._logger.info( - "Loading user-defined operation policy files from: {0}".format( - policy_path - ) - ) - - for filename in os.listdir(policy_path): - file_path = os.path.join(policy_path, filename) - if os.path.isfile(file_path): - self._logger.info( - "Loading user-defined operation policies " - "from file: {0}".format(file_path) - ) - - try: - policies = operation_policy.read_policy_from_file( - file_path - ) - except ValueError as e: - self._logger.error( - "A failure occurred while loading policies." - ) - self._logger.exception(e) - continue - - reserved_policies = ['default', 'public'] - for policy_name in six.iterkeys(policies): - if policy_name in reserved_policies: - self._logger.warning( - "Loaded policy '{0}' overwrites a reserved " - "policy and will be thrown out.".format( - policy_name - ) - ) - elif policy_name in six.iterkeys( - self._operation_policies - ): - self._logger.warning( - "Loaded policy '{0}' overwrites a " - "preexisting policy and will be thrown " - "out.".format(policy_name) - ) - else: - self._operation_policies.update([( - policy_name, - policies.get(policy_name) - )]) + self._operation_policies = policies + self._client_identity = [None, None] def _get_enum_string(self, e): return ''.join([x.capitalize() for x in e.name.split('_')]) @@ -262,7 +206,7 @@ ResponseMessage: The response containing all of the results from the request batch items. """ - self._client_identity = None + self._client_identity = [None, None] header = request.request_header # Process the protocol version @@ -328,9 +272,13 @@ # Process the authentication credentials if header.authentication: - auth_credentials = header.authentication.credential + if header.authentication.credentials: + auth_credentials = header.authentication.credentials[0] + else: + auth_credentials = None else: auth_credentials = None + self._verify_credential(auth_credentials, credential) # Process the batch error continuation option @@ -850,24 +798,94 @@ def _is_allowed_by_operation_policy( self, - operation_policy, + policy_name, session_identity, object_owner, object_type, operation ): - policy_set = self._operation_policies.get(operation_policy) - if not policy_set: + session_user = session_identity[0] + session_groups = session_identity[1] + + if session_groups is None: + session_groups = [None] + + for session_group in session_groups: + allowed = self.is_allowed( + policy_name, + session_user, + session_group, + object_owner, + object_type, + operation + ) + if allowed: + return True + + return False + + def get_relevant_policy_section(self, policy_name, group=None): + """ + Look up the policy corresponding to the provided policy name and + group (optional). Log any issues found during the look up. + """ + policy_bundle = self._operation_policies.get(policy_name) + + if not policy_bundle: self._logger.warning( - "The '{0}' policy does not exist.".format(operation_policy) + "The '{}' policy does not exist.".format(policy_name) ) + return None + + if group: + groups_policy_bundle = policy_bundle.get('groups') + if not groups_policy_bundle: + self._logger.debug( + "The '{}' policy does not support groups.".format( + policy_name + ) + ) + return None + else: + group_policy = groups_policy_bundle.get(group) + if not group_policy: + self._logger.debug( + "The '{}' policy does not support group '{}'.".format( + policy_name, + group + ) + ) + return None + else: + return group_policy + else: + return policy_bundle.get('preset') + + def is_allowed( + self, + policy_name, + session_user, + session_group, + object_owner, + object_type, + operation + ): + """ + Determine if object access is allowed for the provided policy and + session settings. + """ + policy_section = self.get_relevant_policy_section( + policy_name, + session_group + ) + if policy_section is None: return False - object_policy = policy_set.get(object_type) + object_policy = policy_section.get(object_type) if not object_policy: self._logger.warning( "The '{0}' policy does not apply to {1} objects.".format( - operation_policy, + policy_name, self._get_enum_string(object_type) ) ) @@ -878,7 +896,7 @@ self._logger.warning( "The '{0}' policy does not apply to {1} operations on {2} " "objects.".format( - operation_policy, + policy_name, self._get_enum_string(operation), self._get_enum_string(object_type) ) @@ -888,7 +906,7 @@ if operation_object_policy == enums.Policy.ALLOW_ALL: return True elif operation_object_policy == enums.Policy.ALLOW_OWNER: - if session_identity == object_owner: + if session_user == object_owner: return True else: return False @@ -908,6 +926,8 @@ object_type.unique_identifier == uid ).one() + # TODO (peter-hamilton) Add debug log with policy contents? + # Determine if the request should be carried out under the object's # operation policy. If not, feign ignorance of the object. is_allowed = self._is_allowed_by_operation_policy( @@ -918,7 +938,7 @@ operation ) if not is_allowed: - raise exceptions.ItemNotFound( + raise exceptions.PermissionDenied( "Could not locate object: {0}".format(uid) ) @@ -1057,7 +1077,7 @@ ) # TODO (peterhamilton) Set additional server-only attributes. - managed_object._owner = self._client_identity + managed_object._owner = self._client_identity[0] managed_object.initial_date = int(time.time()) self._data_session.add(managed_object) @@ -1225,9 +1245,9 @@ ) # TODO (peterhamilton) Set additional server-only attributes. - public_key._owner = self._client_identity + public_key._owner = self._client_identity[0] public_key.initial_date = int(time.time()) - private_key._owner = self._client_identity + private_key._owner = self._client_identity[0] private_key.initial_date = public_key.initial_date self._data_session.add(public_key) @@ -1302,7 +1322,7 @@ ) # TODO (peterhamilton) Set additional server-only attributes. - managed_object._owner = self._client_identity + managed_object._owner = self._client_identity[0] managed_object.initial_date = int(time.time()) self._data_session.add(managed_object) @@ -1490,7 +1510,7 @@ ) # TODO (peterhamilton) Set additional server-only attributes. - managed_object._owner = self._client_identity + managed_object._owner = self._client_identity[0] managed_object.initial_date = int(time.time()) self._data_session.add(managed_object) @@ -1927,11 +1947,11 @@ contents.Operation(enums.Operation.QUERY) ]) - if self._protocol_version >= contents.ProtocolVersion.create(1, 1): + if self._protocol_version >= contents.ProtocolVersion(1, 1): operations.extend([ contents.Operation(enums.Operation.DISCOVER_VERSIONS) ]) - if self._protocol_version >= contents.ProtocolVersion.create(1, 2): + if self._protocol_version >= contents.ProtocolVersion(1, 2): operations.extend([ contents.Operation(enums.Operation.ENCRYPT), contents.Operation(enums.Operation.DECRYPT), diff -Nru python-pykmip-0.7.0/kmip/services/server/monitor.py python-pykmip-0.8.0/kmip/services/server/monitor.py --- python-pykmip-0.7.0/kmip/services/server/monitor.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/services/server/monitor.py 2018-04-12 06:42:35.000000000 +0000 @@ -0,0 +1,175 @@ +# Copyright (c) 2018 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging +import multiprocessing +import os +import signal +import time + +from kmip.core import policy as operation_policy + + +def get_json_files(p): + """ + Scan the provided policy directory for all JSON policy files. + """ + f = [os.path.join(p, x) for x in os.listdir(p) if x.endswith(".json")] + return sorted(f) + + +class PolicyDirectoryMonitor(multiprocessing.Process): + """ + A file monitor that tracks modifications made within the policy directory. + """ + + def __init__(self, policy_directory, policy_store, live_monitoring=True): + """ + Set up the file monitor with the policy directory to track. + + Args: + policy_directory (string): The system path of the policy directory + that should be monitored. Required. + policy_store (DictProxy): A dictionary proxy created by the server + multiprocessing resource manager. Used to store and share the + policy information across server processes and threads. + Required. + live_monitoring (boolean): A boolean indicating whether or not + live monitoring should continue indefinitely. Optional, + defaults to True. + """ + super(PolicyDirectoryMonitor, self).__init__() + + self.halt_trigger = multiprocessing.Event() + self.policy_directory = policy_directory + self.live_monitoring = live_monitoring + + self.file_timestamps = None + self.policy_cache = None + self.policy_files = None + self.policy_map = None + self.policy_store = policy_store + + self.reserved_policies = ['default', 'public'] + + def interrupt_handler(trigger, frame): + self.stop() + signal.signal(signal.SIGINT, interrupt_handler) + signal.signal(signal.SIGTERM, interrupt_handler) + + self.logger = logging.getLogger("kmip.server.monitor") + self.initialize_tracking_structures() + + def stop(self): + self.halt_trigger.set() + + def scan_policies(self): + """ + Scan the policy directory for policy data. + """ + policy_files = get_json_files(self.policy_directory) + for f in set(policy_files) - set(self.policy_files): + self.file_timestamps[f] = 0 + for f in set(self.policy_files) - set(policy_files): + self.logger.info("Removing policies for file: {}".format(f)) + self.file_timestamps.pop(f, None) + for p in self.policy_cache.keys(): + self.disassociate_policy_and_file(p, f) + for p in [k for k, v in self.policy_map.items() if v == f]: + self.restore_or_delete_policy(p) + self.policy_files = policy_files + + for f in sorted(self.file_timestamps.keys()): + t = os.path.getmtime(f) + if t > self.file_timestamps[f]: + self.logger.info("Loading policies for file: {}".format(f)) + self.file_timestamps[f] = t + old_p = [k for k, v in self.policy_map.items() if v == f] + try: + new_p = operation_policy.read_policy_from_file(f) + except ValueError: + self.logger.error("Failure loading file: {}".format(f)) + self.logger.debug("", exc_info=True) + continue + for p in new_p.keys(): + self.logger.info("Loading policy: {}".format(p)) + if p in self.reserved_policies: + self.logger.warning( + "Policy '{}' overwrites a reserved policy and " + "will be thrown out.".format(p) + ) + continue + if p in sorted(self.policy_store.keys()): + self.logger.debug( + "Policy '{}' overwrites an existing " + "policy.".format(p) + ) + if f != self.policy_map.get(p): + self.policy_cache.get(p).append( + ( + time.time(), + self.policy_map.get(p), + self.policy_store.get(p) + ) + ) + else: + self.policy_cache[p] = [] + self.policy_store[p] = new_p.get(p) + self.policy_map[p] = f + for p in set(old_p) - set(new_p.keys()): + self.disassociate_policy_and_file(p, f) + self.restore_or_delete_policy(p) + + def run(self): + """ + Start monitoring operation policy files. + """ + self.initialize_tracking_structures() + + if self.live_monitoring: + self.logger.info("Starting up the operation policy file monitor.") + while not self.halt_trigger.is_set(): + time.sleep(1) + self.scan_policies() + self.logger.info("Stopping the operation policy file monitor.") + else: + self.scan_policies() + + def initialize_tracking_structures(self): + self.file_timestamps = {} + self.policy_cache = {} + self.policy_files = [] + self.policy_map = {} + + for k in self.policy_store.keys(): + if k not in self.reserved_policies: + self.policy_store.pop(k, None) + + def disassociate_policy_and_file(self, policy, file_name): + c = self.policy_cache.get(policy, []) + for i in [c.index(e) for e in c if e[1] == file_name][::-1]: + c.pop(i) + + def restore_or_delete_policy(self, policy): + c = self.policy_cache.get(policy, []) + if len(c) == 0: + self.logger.info("Removing policy: {}".format(policy)) + self.policy_store.pop(policy, None) + self.policy_map.pop(policy, None) + self.policy_cache.pop(policy, None) + else: + e = c.pop() + self.policy_store[policy] = e[2] + self.policy_map[policy] = e[1] diff -Nru python-pykmip-0.7.0/kmip/services/server/policy.py python-pykmip-0.8.0/kmip/services/server/policy.py --- python-pykmip-0.7.0/kmip/services/server/policy.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/services/server/policy.py 2018-04-02 17:12:18.000000000 +0000 @@ -157,7 +157,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Name': AttributeRuleSet( False, @@ -181,7 +181,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Object Type': AttributeRuleSet( True, @@ -210,7 +210,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Cryptographic Algorithm': AttributeRuleSet( True, @@ -237,7 +237,7 @@ enums.ObjectType.SPLIT_KEY, enums.ObjectType.TEMPLATE ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Cryptographic Length': AttributeRuleSet( True, @@ -264,7 +264,7 @@ enums.ObjectType.SPLIT_KEY, enums.ObjectType.TEMPLATE ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Cryptographic Parameters': AttributeRuleSet( False, @@ -286,7 +286,7 @@ enums.ObjectType.SPLIT_KEY, enums.ObjectType.TEMPLATE ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Cryptographic Domain Parameters': AttributeRuleSet( False, @@ -304,7 +304,7 @@ enums.ObjectType.PRIVATE_KEY, enums.ObjectType.TEMPLATE ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Certificate Type': AttributeRuleSet( True, @@ -321,7 +321,7 @@ ( enums.ObjectType.CERTIFICATE, ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Certificate Length': AttributeRuleSet( True, @@ -338,7 +338,7 @@ ( enums.ObjectType.CERTIFICATE, ), - contents.ProtocolVersion.create(1, 1) + contents.ProtocolVersion(1, 1) ), 'X.509 Certificate Identifier': AttributeRuleSet( True, @@ -356,7 +356,7 @@ # TODO (peterhamilton) Enforce only on X.509 certificates enums.ObjectType.CERTIFICATE, ), - contents.ProtocolVersion.create(1, 1) + contents.ProtocolVersion(1, 1) ), 'X.509 Certificate Subject': AttributeRuleSet( True, @@ -374,7 +374,7 @@ # TODO (peterhamilton) Enforce only on X.509 certificates enums.ObjectType.CERTIFICATE, ), - contents.ProtocolVersion.create(1, 1) + contents.ProtocolVersion(1, 1) ), 'X.509 Certificate Issuer': AttributeRuleSet( True, @@ -392,7 +392,7 @@ # TODO (peterhamilton) Enforce only on X.509 certificates enums.ObjectType.CERTIFICATE, ), - contents.ProtocolVersion.create(1, 1) + contents.ProtocolVersion(1, 1) ), 'Certificate Identifier': AttributeRuleSet( True, @@ -409,8 +409,8 @@ ( enums.ObjectType.CERTIFICATE, ), - contents.ProtocolVersion.create(1, 0), - contents.ProtocolVersion.create(1, 1) + contents.ProtocolVersion(1, 0), + contents.ProtocolVersion(1, 1) ), 'Certificate Subject': AttributeRuleSet( True, @@ -427,8 +427,8 @@ ( enums.ObjectType.CERTIFICATE, ), - contents.ProtocolVersion.create(1, 0), - contents.ProtocolVersion.create(1, 1) + contents.ProtocolVersion(1, 0), + contents.ProtocolVersion(1, 1) ), 'Certificate Issuer': AttributeRuleSet( True, @@ -445,8 +445,8 @@ ( enums.ObjectType.CERTIFICATE, ), - contents.ProtocolVersion.create(1, 0), - contents.ProtocolVersion.create(1, 1) + contents.ProtocolVersion(1, 0), + contents.ProtocolVersion(1, 1) ), 'Digital Signature Algorithm': AttributeRuleSet( True, @@ -464,7 +464,7 @@ ( enums.ObjectType.CERTIFICATE, ), - contents.ProtocolVersion.create(1, 1) + contents.ProtocolVersion(1, 1) ), 'Digest': AttributeRuleSet( True, # If the server has access to the data @@ -492,7 +492,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Operation Policy Name': AttributeRuleSet( False, @@ -521,7 +521,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Cryptographic Usage Mask': AttributeRuleSet( True, @@ -549,7 +549,7 @@ enums.ObjectType.TEMPLATE, enums.ObjectType.SECRET_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Lease Time': AttributeRuleSet( False, @@ -576,7 +576,7 @@ enums.ObjectType.SPLIT_KEY, enums.ObjectType.SECRET_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Usage Limits': AttributeRuleSet( False, @@ -601,7 +601,7 @@ enums.ObjectType.SPLIT_KEY, enums.ObjectType.TEMPLATE ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'State': AttributeRuleSet( True, @@ -631,7 +631,7 @@ enums.ObjectType.SPLIT_KEY, enums.ObjectType.SECRET_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Initial Date': AttributeRuleSet( True, @@ -660,7 +660,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Activation Date': AttributeRuleSet( False, @@ -689,7 +689,7 @@ enums.ObjectType.TEMPLATE, enums.ObjectType.SECRET_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Process Start Date': AttributeRuleSet( False, @@ -710,7 +710,7 @@ enums.ObjectType.SPLIT_KEY, enums.ObjectType.TEMPLATE ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Protect Stop Date': AttributeRuleSet( False, @@ -731,7 +731,7 @@ enums.ObjectType.SPLIT_KEY, enums.ObjectType.TEMPLATE ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Deactivation Date': AttributeRuleSet( False, @@ -760,7 +760,7 @@ enums.ObjectType.TEMPLATE, enums.ObjectType.SECRET_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Destroy Date': AttributeRuleSet( False, @@ -781,7 +781,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Compromise Occurrence Date': AttributeRuleSet( False, @@ -802,7 +802,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Compromise Date': AttributeRuleSet( False, @@ -823,7 +823,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Revocation Reason': AttributeRuleSet( False, @@ -844,7 +844,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Archive Date': AttributeRuleSet( False, @@ -866,7 +866,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Object Group': AttributeRuleSet( False, @@ -895,7 +895,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Fresh': AttributeRuleSet( False, @@ -924,7 +924,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 1) + contents.ProtocolVersion(1, 1) ), 'Link': AttributeRuleSet( False, @@ -951,7 +951,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Application Specific Information': AttributeRuleSet( False, @@ -975,7 +975,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Contact Information': AttributeRuleSet( False, @@ -1004,7 +1004,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Last Change Date': AttributeRuleSet( True, @@ -1042,7 +1042,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), 'Custom Attribute': AttributeRuleSet( False, @@ -1074,7 +1074,7 @@ enums.ObjectType.SECRET_DATA, enums.ObjectType.OPAQUE_DATA ), - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ), } diff -Nru python-pykmip-0.7.0/kmip/services/server/server.py python-pykmip-0.8.0/kmip/services/server/server.py --- python-pykmip-0.7.0/kmip/services/server/server.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/services/server/server.py 2018-05-09 05:46:08.000000000 +0000 @@ -13,20 +13,25 @@ # License for the specific language governing permissions and limitations # under the License. +import copy import logging import logging.handlers as handlers +import multiprocessing import optparse import os import signal +import six import socket import ssl import sys import threading from kmip.core import exceptions +from kmip.core import policy as operation_policy from kmip.services import auth from kmip.services.server import config from kmip.services.server import engine +from kmip.services.server import monitor from kmip.services.server import session @@ -40,6 +45,7 @@ shutting down all server components upon receiving a termination signal. """ + # TODO (peter-hamilton) Move to using **kwargs for all server parameters. def __init__( self, hostname=None, @@ -53,7 +59,9 @@ policy_path=None, enable_tls_client_auth=None, tls_cipher_suites=None, - logging_level=None + logging_level=None, + live_policies=False, + database_path=None ): """ Create a KmipServer. @@ -113,6 +121,12 @@ level for the server. All log messages logged at this level or higher in criticality will be logged. All log messages lower in criticality will not be logged. Optional, defaults to None. + live_policies (boolean): A boolean indicating if the operation + policy directory should be actively monitored to autoload any + policy changes while the server is running. Optional, defaults + to False. + database_path (string): The path to the server's SQLite database + file. Optional, defaults to None. """ self._logger = logging.getLogger('kmip.server') self._setup_logging(log_path) @@ -129,8 +143,11 @@ policy_path, enable_tls_client_auth, tls_cipher_suites, - logging_level + logging_level, + database_path ) + self.live_policies = live_policies + self.policies = {} self._logger.setLevel(self.config.settings.get('logging_level')) @@ -140,9 +157,6 @@ else: self.auth_suite = auth.BasicAuthenticationSuite(cipher_suites) - self._engine = engine.KmipEngine( - self.config.settings.get('policy_path') - ) self._session_id = 1 self._is_serving = False @@ -179,7 +193,8 @@ policy_path=None, enable_tls_client_auth=None, tls_cipher_suites=None, - logging_level=None + logging_level=None, + database_path=None ): if path: self.config.load_settings(path) @@ -210,6 +225,8 @@ ) if logging_level: self.config.set_setting('logging_level', logging_level) + if database_path: + self.config.set_setting('database_path', database_path) def start(self): """ @@ -223,9 +240,34 @@ NetworkingError: Raised if the TLS socket cannot be bound to the network address. """ + self.manager = multiprocessing.Manager() + self.policies = self.manager.dict() + policies = copy.deepcopy(operation_policy.policies) + for policy_name, policy_set in six.iteritems(policies): + self.policies[policy_name] = policy_set + + self.policy_monitor = monitor.PolicyDirectoryMonitor( + self.config.settings.get('policy_path'), + self.policies, + self.live_policies + ) + + def interrupt_handler(trigger, frame): + self.policy_monitor.stop() + signal.signal(signal.SIGINT, interrupt_handler) + signal.signal(signal.SIGTERM, interrupt_handler) + + self.policy_monitor.start() + + self._engine = engine.KmipEngine( + policies=self.policies, + database_path=self.config.settings.get('database_path') + ) + self._logger.info("Starting server socket handler.") # Create a TCP stream socket and configure it for immediate reuse. + socket.setdefaulttimeout(10) self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) @@ -328,6 +370,16 @@ "Server failed to shutdown socket handler." ) + if hasattr(self, "policy_monitor"): + try: + self.policy_monitor.stop() + self.policy_monitor.join() + except Exception as e: + self._logger.exception(e) + raise exceptions.ShutdownError( + "Server failed to clean up the policy monitor." + ) + def serve(self): """ Serve client connections. @@ -356,6 +408,11 @@ while self._is_serving: try: connection, address = self._socket.accept() + except socket.timeout: + # Setting the default socket timeout to break hung connections + # will cause accept to periodically raise socket.timeout. This + # is expected behavior, so ignore it and retry accept. + pass except socket.error as e: self._logger.warning( "Error detected while establishing new connection." @@ -398,10 +455,12 @@ s = session.KmipSession( self._engine, connection, + address, name=session_name, enable_tls_client_auth=self.config.settings.get( 'enable_tls_client_auth' - ) + ), + auth_settings=self.config.settings.get('auth_plugins') ) s.daemon = True s.start() @@ -560,6 +619,18 @@ "DEBUG, INFO). Optional, defaults to None." ) ) + parser.add_option( + "-d", + "--database_path", + action="store", + type="str", + default=None, + dest="database_path", + help=( + "A string representing a path to the server's SQLite database " + "file. Optional, defaults to None." + ), + ) return parser @@ -592,6 +663,10 @@ kwargs['enable_tls_client_auth'] = False if opts.logging_level: kwargs['logging_level'] = opts.logging_level + if opts.database_path: + kwargs['database_path'] = opts.database_path + + kwargs['live_policies'] = True # Create and start the server. s = KmipServer(**kwargs) diff -Nru python-pykmip-0.7.0/kmip/services/server/session.py python-pykmip-0.8.0/kmip/services/server/session.py --- python-pykmip-0.7.0/kmip/services/server/session.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/services/server/session.py 2018-04-02 17:12:18.000000000 +0000 @@ -17,9 +17,9 @@ import socket import struct import threading +import time from cryptography import x509 -from cryptography.hazmat import backends from kmip.core import enums from kmip.core import exceptions @@ -27,6 +27,8 @@ from kmip.core.messages import messages from kmip.core import utils +from kmip.services.server import auth + class KmipSession(threading.Thread): """ @@ -36,8 +38,10 @@ def __init__(self, engine, connection, + address, name=None, - enable_tls_client_auth=True): + enable_tls_client_auth=True, + auth_settings=None): """ Create a KmipSession. @@ -46,12 +50,19 @@ that handles message processing. Required. connection (socket): A client socket.socket TLS connection representing a new KMIP connection. Required. + address (tuple): The address tuple produced with the session + connection. Contains the IP address and port number of the + remote connection endpoint. Required. name (str): The name of the KmipSession. Optional, defaults to None. enable_tls_client_auth (bool): A flag that enables a strict check for the client auth flag in the extended key usage extension in client certificates when establishing the client/server TLS connection. Optional, defaults to True. + auth_settings (list): A list of tuples, each containing (1) the + name of the 'auth:' settings block from the server config file, + and (2) a dictionary of configuration settings for a specific + authentication plugin. Optional, defaults to None. """ super(KmipSession, self).__init__( group=None, @@ -67,9 +78,12 @@ self._engine = engine self._connection = connection + self._address = address self._enable_tls_client_auth = enable_tls_client_auth + self._auth_settings = [] if auth_settings is None else auth_settings + self._session_time = time.time() self._max_buffer_size = 4096 self._max_request_size = 1048576 self._max_response_size = 1048576 @@ -96,61 +110,6 @@ self._connection.close() self._logger.info("Stopping session: {0}".format(self.name)) - def _get_client_identity(self): - certificate_data = self._connection.getpeercert(binary_form=True) - try: - cert = x509.load_der_x509_certificate( - certificate_data, - backends.default_backend() - ) - except Exception: - # This should never get raised "in theory," as the ssl socket - # should fail to connect non-TLS connections before the session - # gets created. This is a failsafe in case that protection fails. - raise exceptions.PermissionDenied( - "Failure loading the client certificate from the session " - "connection. Could not retrieve client identity." - ) - - if self._enable_tls_client_auth: - try: - extended_key_usage = cert.extensions.get_extension_for_oid( - x509.oid.ExtensionOID.EXTENDED_KEY_USAGE - ).value - except x509.ExtensionNotFound: - raise exceptions.PermissionDenied( - "The extended key usage extension is missing from the " - "client certificate. Session client identity unavailable." - ) - - if x509.oid.ExtendedKeyUsageOID.CLIENT_AUTH not in \ - extended_key_usage: - raise exceptions.PermissionDenied( - "The extended key usage extension is not marked for " - "client authentication in the client certificate. Session " - "client identity unavailable." - ) - - client_identities = cert.subject.get_attributes_for_oid( - x509.oid.NameOID.COMMON_NAME - ) - if len(client_identities) > 0: - if len(client_identities) > 1: - self._logger.warning( - "Multiple client identities found. Using the first " - "one processed." - ) - client_identity = client_identities[0].value - self._logger.info( - "Session client identity: {0}".format(client_identity) - ) - return client_identity - else: - raise exceptions.PermissionDenied( - "The client certificate does not define a subject common " - "name. Session client identity unavailable." - ) - def _handle_message_loop(self): request_data = self._receive_request() request = messages.RequestMessage() @@ -170,42 +129,90 @@ self._connection.cipher() ) ) - client_identity = self._get_client_identity() + + certificate = auth.get_certificate_from_connection( + self._connection + ) + if certificate is None: + raise exceptions.PermissionDenied( + "The client certificate could not be loaded from the " + "session connection." + ) + + if self._enable_tls_client_auth: + extension = auth.get_extended_key_usage_from_certificate( + certificate + ) + if extension is None: + raise exceptions.PermissionDenied( + "The extended key usage extension is missing from " + "the client certificate." + ) + if x509.oid.ExtendedKeyUsageOID.CLIENT_AUTH not in extension: + raise exceptions.PermissionDenied( + "The extended key usage extension is not marked for " + "client authentication in the client certificate." + ) + request.read(request_data) + except exceptions.PermissionDenied as e: + self._logger.warning("Failure verifying the client certificate.") + self._logger.exception(e) + response = self._engine.build_error_response( + contents.ProtocolVersion(1, 0), + enums.ResultReason.AUTHENTICATION_NOT_SUCCESSFUL, + "Error verifying the client certificate. " + "See server logs for more information." + ) except Exception as e: self._logger.warning("Failure parsing request message.") self._logger.exception(e) response = self._engine.build_error_response( - contents.ProtocolVersion.create(1, 0), + contents.ProtocolVersion(1, 0), enums.ResultReason.INVALID_MESSAGE, "Error parsing request message. See server logs for more " "information." ) else: try: - response, max_response_size = self._engine.process_request( - request, - client_identity - ) - if max_response_size: - max_size = max_response_size - except exceptions.KmipError as e: - response = self._engine.build_error_response( - request.request_header.protocol_version, - e.reason, - str(e) + client_identity = self.authenticate(certificate, request) + self._logger.info( + "Session client identity: {}".format(client_identity[0]) ) - except Exception as e: - self._logger.warning( - "An unexpected error occurred while processing request." - ) - self._logger.exception(e) + except Exception: + self._logger.warning("Authentication failed.") response = self._engine.build_error_response( request.request_header.protocol_version, - enums.ResultReason.GENERAL_FAILURE, - "An unexpected error occurred while processing request. " + enums.ResultReason.AUTHENTICATION_NOT_SUCCESSFUL, + "An error occurred during client authentication. " "See server logs for more information." ) + else: + try: + response, max_response_size = self._engine.process_request( + request, + client_identity + ) + if max_response_size: + max_size = max_response_size + except exceptions.KmipError as e: + response = self._engine.build_error_response( + request.request_header.protocol_version, + e.reason, + str(e) + ) + except Exception as e: + self._logger.warning( + "An unexpected error occurred while processing " + "request." + ) + self._logger.exception(e) + response = self._engine.build_error_response( + request.request_header.protocol_version, + enums.ResultReason.GENERAL_FAILURE, + "An unexpected error occurred while processing " + "request. See server logs for more information." + ) response_data = utils.BytearrayStream() response.write(response_data) @@ -229,6 +236,68 @@ self._send_response(response_data.buffer) + def authenticate(self, certificate, request): + credentials = [] + if request.request_header.authentication is not None: + credentials = request.request_header.authentication.credentials + + plugin_enabled = False + + for auth_settings in self._auth_settings: + plugin_name, plugin_config = auth_settings + + if plugin_name.startswith("auth:slugs"): + if plugin_config.get("enabled") == "True": + plugin_enabled = True + plugin = auth.SLUGSConnector(plugin_config.get("url")) + self._logger.debug( + "Authenticating with plugin: {}".format(plugin_name) + ) + try: + client_identity = plugin.authenticate( + certificate, + (self._address, self._session_time), + credentials + ) + except Exception as e: + self._logger.warning( + "Authentication failed." + ) + self._logger.exception(e) + else: + self._logger.debug( + "Authentication succeeded for client identity: " + "{}".format(client_identity[0]) + ) + return client_identity + else: + self._logger.warning( + "Authentication plugin '{}' is not " + "supported.".format(plugin_name) + ) + + if not plugin_enabled: + self._logger.debug( + "No authentication plugins are enabled. The client identity " + "will be extracted from the client certificate." + ) + try: + client_identity = auth.get_client_identity_from_certificate( + certificate + ) + except Exception as e: + self._logger.warning("Client identity extraction failed.") + self._logger.exception(e) + else: + self._logger.debug( + "Extraction succeeded for client identity: {}".format( + client_identity + ) + ) + return tuple([client_identity, None]) + + raise exceptions.PermissionDenied("Authentication failed.") + def _receive_request(self): header = self._receive_bytes(8) message_size = struct.unpack('!I', header[4:])[0] diff -Nru python-pykmip-0.7.0/kmip/tests/functional/conftest.py python-pykmip-0.8.0/kmip/tests/functional/conftest.py --- python-pykmip-0.7.0/kmip/tests/functional/conftest.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/functional/conftest.py 2018-05-17 17:16:56.000000000 +0000 @@ -0,0 +1,29 @@ +# Copyright (c) 2018 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import pytest + + +def pytest_addoption(parser): + parser.addoption( + "--config-file", + action="store", + help="Config file path for client configuration settings" + ) + + +@pytest.fixture(scope="class") +def config_file(request): + request.cls.config_file = request.config.getoption("--config-file") diff -Nru python-pykmip-0.7.0/kmip/tests/functional/services/test_authentication.py python-pykmip-0.8.0/kmip/tests/functional/services/test_authentication.py --- python-pykmip-0.7.0/kmip/tests/functional/services/test_authentication.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/functional/services/test_authentication.py 2018-05-17 17:16:56.000000000 +0000 @@ -0,0 +1,263 @@ +# Copyright (c) 2018 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os +import pytest +import six +import testtools +import time + +from kmip.core import enums +from kmip.pie import client +from kmip.pie import exceptions +from kmip.pie import objects + + +@pytest.mark.usefixtures("config_file") +class TestSLUGSAuthenticationAndAccessControl(testtools.TestCase): + + def setUp(self): + super(TestSLUGSAuthenticationAndAccessControl, self).setUp() + + self.client_john_doe = client.ProxyKmipClient( + config='john_doe', + config_file=self.config_file + ) + self.client_jane_doe = client.ProxyKmipClient( + config='jane_doe', + config_file=self.config_file + ) + self.client_john_smith = client.ProxyKmipClient( + config='john_smith', + config_file=self.config_file + ) + self.client_jane_smith = client.ProxyKmipClient( + config='jane_smith', + config_file=self.config_file + ) + + def tearDown(self): + super(TestSLUGSAuthenticationAndAccessControl, self).tearDown() + + def test_group_level_access_control(self): + """ + Test that: + 1. a user in Group A can create and retrieve a symmetric key + 2. a user in Group B can also retrieve the same symmetric key + 3. a user in both Groups can also retrieve the same symmetric key + 4. a user in Group B cannot destroy the same symmetric key, and + 5. a user in Group A can destroy the same symmetric key. + """ + with self.client_john_doe as c: + uid = c.create( + enums.CryptographicAlgorithm.AES, + 256, + operation_policy_name="policy_1" + ) + self.assertIsInstance(uid, six.string_types) + + key = c.get(uid) + self.assertIsInstance(key, objects.SymmetricKey) + self.assertEqual( + key.cryptographic_algorithm, + enums.CryptographicAlgorithm.AES) + self.assertEqual(key.cryptographic_length, 256) + + with self.client_jane_doe as c: + key = c.get(uid) + self.assertIsInstance(key, objects.SymmetricKey) + self.assertEqual( + key.cryptographic_algorithm, + enums.CryptographicAlgorithm.AES) + self.assertEqual(key.cryptographic_length, 256) + + with self.client_john_smith as c: + key = c.get(uid) + self.assertIsInstance(key, objects.SymmetricKey) + self.assertEqual( + key.cryptographic_algorithm, + enums.CryptographicAlgorithm.AES) + self.assertEqual(key.cryptographic_length, 256) + + self.assertRaises(exceptions.KmipOperationFailure, c.destroy, uid) + + with self.client_john_doe as c: + c.destroy(uid) + self.assertRaises( + exceptions.KmipOperationFailure, c.get, uid) + self.assertRaises( + exceptions.KmipOperationFailure, c.destroy, uid) + + def test_policy_live_loading(self): + """ + Test that: + 1. a user in Group A can create and retrieve a symmetric key + 2. a user in Group B can also retrieve the same symmetric key + 3. a user in Group B cannot destroy the same symmetric key + 4. a policy is uploaded if created after server start up + 5. a user in Group A cannot retrieve the same symmetric key, and + 6. a user in Group B can destroy the same symmetric key. + """ + with self.client_john_doe as c: + uid = c.create( + enums.CryptographicAlgorithm.AES, + 256, + operation_policy_name="policy_1" + ) + self.assertIsInstance(uid, six.string_types) + + key = c.get(uid) + self.assertIsInstance(key, objects.SymmetricKey) + self.assertEqual( + key.cryptographic_algorithm, + enums.CryptographicAlgorithm.AES) + self.assertEqual(key.cryptographic_length, 256) + + with self.client_john_smith as c: + key = c.get(uid) + self.assertIsInstance(key, objects.SymmetricKey) + self.assertEqual( + key.cryptographic_algorithm, + enums.CryptographicAlgorithm.AES) + self.assertEqual(key.cryptographic_length, 256) + + self.assertRaises(exceptions.KmipOperationFailure, c.destroy, uid) + + with open("/tmp/pykmip/policies/policy_overwrite.json", "w") as f: + f.write('{\n') + f.write(' "policy_1": {\n') + f.write(' "groups": {\n') + f.write(' "Group A": {\n') + f.write(' "SYMMETRIC_KEY": {\n') + f.write(' "GET": "DISALLOW_ALL",\n') + f.write(' "DESTROY": "DISALLOW_ALL"\n') + f.write(' }\n') + f.write(' },\n') + f.write(' "Group B": {\n') + f.write(' "SYMMETRIC_KEY": {\n') + f.write(' "GET": "ALLOW_ALL",\n') + f.write(' "DESTROY": "ALLOW_ALL"\n') + f.write(' }\n') + f.write(' }\n') + f.write(' }\n') + f.write(' }\n') + f.write('}\n') + time.sleep(1) + + with self.client_john_doe as c: + self.assertRaises(exceptions.KmipOperationFailure, c.get, uid) + self.assertRaises(exceptions.KmipOperationFailure, c.destroy, uid) + + with self.client_john_smith as c: + key = c.get(uid) + self.assertIsInstance(key, objects.SymmetricKey) + self.assertEqual( + key.cryptographic_algorithm, + enums.CryptographicAlgorithm.AES) + self.assertEqual(key.cryptographic_length, 256) + + c.destroy(uid) + self.assertRaises( + exceptions.KmipOperationFailure, c.get, uid) + self.assertRaises( + exceptions.KmipOperationFailure, c.destroy, uid) + + os.remove("/tmp/pykmip/policies/policy_overwrite.json") + time.sleep(1) + + def test_policy_caching(self): + """ + Test that: + 1. a user in Group A can create and retrieve a symmetric key + 2. a policy is uploaded if created after server start up + 3. a user in Group A cannot retrieve or destroy the same symmetric key + 4. the original policy is restored after the new policy is removed, and + 5. a user in Group A can retrieve and destroy the same symmetric key. + """ + with self.client_john_doe as c: + uid = c.create( + enums.CryptographicAlgorithm.AES, + 256, + operation_policy_name="policy_1" + ) + self.assertIsInstance(uid, six.string_types) + + key = c.get(uid) + self.assertIsInstance(key, objects.SymmetricKey) + self.assertEqual( + key.cryptographic_algorithm, + enums.CryptographicAlgorithm.AES) + self.assertEqual(key.cryptographic_length, 256) + + with open("/tmp/pykmip/policies/policy_caching.json", "w") as f: + f.write('{\n') + f.write(' "policy_1": {\n') + f.write(' "groups": {\n') + f.write(' "Group A": {\n') + f.write(' "SYMMETRIC_KEY": {\n') + f.write(' "GET": "DISALLOW_ALL",\n') + f.write(' "DESTROY": "DISALLOW_ALL"\n') + f.write(' }\n') + f.write(' }\n') + f.write(' }\n') + f.write(' }\n') + f.write('}\n') + time.sleep(1) + + self.assertRaises(exceptions.KmipOperationFailure, c.get, uid) + self.assertRaises(exceptions.KmipOperationFailure, c.destroy, uid) + + os.remove("/tmp/pykmip/policies/policy_caching.json") + time.sleep(1) + + key = c.get(uid) + self.assertIsInstance(key, objects.SymmetricKey) + self.assertEqual( + key.cryptographic_algorithm, + enums.CryptographicAlgorithm.AES) + self.assertEqual(key.cryptographic_length, 256) + + c.destroy(uid) + self.assertRaises( + exceptions.KmipOperationFailure, c.get, uid) + self.assertRaises( + exceptions.KmipOperationFailure, c.destroy, uid) + + def test_authenticating_unrecognized_user(self): + """ + Test that an unrecognized user is blocked from submitting a request. + """ + with open("/tmp/slugs/user_group_mapping.csv", "w") as f: + f.write('Jane Doe,Group A\n') + f.write('Jane Doe,Group B\n') + f.write('John Smith,Group B\n') + time.sleep(1) + + args = (enums.CryptographicAlgorithm.AES, 256) + kwargs = {'operation_policy_name': 'policy_1'} + with self.client_john_doe as c: + self.assertRaises( + exceptions.KmipOperationFailure, + c.create, + *args, + **kwargs + ) + + with open("/tmp/slugs/user_group_mapping.csv", "w") as f: + f.write('John Doe,Group A\n') + f.write('Jane Doe,Group A\n') + f.write('Jane Doe,Group B\n') + f.write('John Smith,Group B\n') + time.sleep(1) diff -Nru python-pykmip-0.7.0/kmip/tests/integration/services/test_kmip_client.py python-pykmip-0.8.0/kmip/tests/integration/services/test_kmip_client.py --- python-pykmip-0.7.0/kmip/tests/integration/services/test_kmip_client.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/integration/services/test_kmip_client.py 2017-12-08 17:36:18.000000000 +0000 @@ -35,8 +35,8 @@ from kmip.core.enums import ResultStatus from kmip.core.enums import ResultReason -from kmip.core.errors import KMIPServerSuicideError -from kmip.core.errors import KMIPServerZombieError +from kmip.core.exceptions import KMIPServerSuicideError +from kmip.core.exceptions import KMIPServerZombieError from kmip.core.factories.attributes import AttributeFactory from kmip.core.factories.credentials import CredentialFactory diff -Nru python-pykmip-0.7.0/kmip/tests/integration/services/test_proxykmipclient.py python-pykmip-0.8.0/kmip/tests/integration/services/test_proxykmipclient.py --- python-pykmip-0.7.0/kmip/tests/integration/services/test_proxykmipclient.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/integration/services/test_proxykmipclient.py 2018-04-12 06:42:35.000000000 +0000 @@ -111,7 +111,9 @@ enums.CryptographicAlgorithm.AES, 128, (b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E' - b'\x0F')) + b'\x0F'), + name="Test Symmetric Key" + ) uid = self.client.register(key) self.assertIsInstance(uid, six.string_types) @@ -179,7 +181,11 @@ asymmetric key pair. """ public_uid, private_uid = self.client.create_key_pair( - enums.CryptographicAlgorithm.RSA, 2048) + enums.CryptographicAlgorithm.RSA, + 2048, + public_usage_mask=[enums.CryptographicUsageMask.ENCRYPT], + private_usage_mask=[enums.CryptographicUsageMask.DECRYPT] + ) self.assertIsInstance(public_uid, six.string_types) self.assertIsInstance(private_uid, six.string_types) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/factories/payloads/test_request.py python-pykmip-0.8.0/kmip/tests/unit/core/factories/payloads/test_request.py --- python-pykmip-0.7.0/kmip/tests/unit/core/factories/payloads/test_request.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/factories/payloads/test_request.py 2018-04-17 13:06:07.000000000 +0000 @@ -53,7 +53,8 @@ self._test_payload_type(payload, payloads.RegisterRequestPayload) def test_create_rekey_payload(self): - self._test_not_implemented(self.factory.create, enums.Operation.REKEY) + payload = self.factory.create(enums.Operation.REKEY) + self._test_payload_type(payload, payloads.RekeyRequestPayload) def test_create_derive_key_payload(self): payload = self.factory.create(enums.Operation.DERIVE_KEY) @@ -76,7 +77,8 @@ self._test_payload_type(payload, payloads.LocateRequestPayload) def test_create_check_payload(self): - self._test_not_implemented(self.factory.create, enums.Operation.CHECK) + payload = self.factory.create(enums.Operation.CHECK) + self._test_payload_type(payload, payloads.CheckRequestPayload) def test_create_get_payload(self): payload = self.factory.create(enums.Operation.GET) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/factories/payloads/test_response.py python-pykmip-0.8.0/kmip/tests/unit/core/factories/payloads/test_response.py --- python-pykmip-0.7.0/kmip/tests/unit/core/factories/payloads/test_response.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/factories/payloads/test_response.py 2018-04-17 13:06:07.000000000 +0000 @@ -53,7 +53,8 @@ self._test_payload_type(payload, payloads.RegisterResponsePayload) def test_create_rekey_payload(self): - self._test_not_implemented(self.factory.create, enums.Operation.REKEY) + payload = self.factory.create(enums.Operation.REKEY) + self._test_payload_type(payload, payloads.RekeyResponsePayload) def test_create_derive_key_payload(self): payload = self.factory.create(enums.Operation.DERIVE_KEY) @@ -76,7 +77,8 @@ self._test_payload_type(payload, payloads.LocateResponsePayload) def test_create_check_payload(self): - self._test_not_implemented(self.factory.create, enums.Operation.CHECK) + payload = self.factory.create(enums.Operation.CHECK) + self._test_payload_type(payload, payloads.CheckResponsePayload) def test_create_get_payload(self): payload = self.factory.create(enums.Operation.GET) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/messages/contents/test_authentication.py python-pykmip-0.8.0/kmip/tests/unit/core/messages/contents/test_authentication.py --- python-pykmip-0.7.0/kmip/tests/unit/core/messages/contents/test_authentication.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/messages/contents/test_authentication.py 2018-04-02 17:12:18.000000000 +0000 @@ -0,0 +1,988 @@ +# Copyright (c) 2018 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import testtools + +from kmip.core import enums +from kmip.core import objects +from kmip.core import utils +from kmip.core.messages import contents + + +class TestAuthentication(testtools.TestCase): + """ + Test suite for the Authentication struct. + """ + + def setUp(self): + super(TestAuthentication, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 11.1. + # + # This encoding matches the following set of values: + # Authentication + # Credential + # CredentialType - Username and Password + # CredentialValue + # Username - Fred + # Password - password1 + self.username_password_encoding = utils.BytearrayStream( + b'\x42\x00\x0C\x01\x00\x00\x00\x48' + b'\x42\x00\x23\x01\x00\x00\x00\x40' + b'\x42\x00\x24\x05\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00' + b'\x42\x00\x25\x01\x00\x00\x00\x28' + b'\x42\x00\x99\x07\x00\x00\x00\x04' + b'\x46\x72\x65\x64\x00\x00\x00\x00' + b'\x42\x00\xA1\x07\x00\x00\x00\x09' + b'\x70\x61\x73\x73\x77\x6F\x72\x64\x31\x00\x00\x00\x00\x00\x00\x00' + ) + self.encoding_missing_credentials = utils.BytearrayStream( + b'\x42\x00\x0C\x01\x00\x00\x00\x00' + ) + + # Encoding obtained from the KMIP 1.1 testing document, Section 11.2. + # + # This encoding matches the following set of values: + # Authentication + # Credential + # CredentialType - Device + # CredentialValue + # Device Serial Number - serNum123456 + # Password - secret + # Device Identifier - devID2233 + # Network Identifier - netID9000 + # Machine Identifier - machineID1 + # Media Identifier - mediaID313 + self.device_encoding = utils.BytearrayStream( + b'\x42\x00\x0C\x01\x00\x00\x00\xA8' + b'\x42\x00\x23\x01\x00\x00\x00\xA0' + b'\x42\x00\x24\x05\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x00' + b'\x42\x00\x25\x01\x00\x00\x00\x88' + b'\x42\x00\xB0\x07\x00\x00\x00\x0C' + b'\x73\x65\x72\x4E\x75\x6D\x31\x32\x33\x34\x35\x36\x00\x00\x00\x00' + b'\x42\x00\xA1\x07\x00\x00\x00\x06' + b'\x73\x65\x63\x72\x65\x74\x00\x00' + b'\x42\x00\xA2\x07\x00\x00\x00\x09' + b'\x64\x65\x76\x49\x44\x32\x32\x33\x33\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAB\x07\x00\x00\x00\x09' + b'\x6E\x65\x74\x49\x44\x39\x30\x30\x30\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xA9\x07\x00\x00\x00\x0A' + b'\x6D\x61\x63\x68\x69\x6E\x65\x49\x44\x31\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAA\x07\x00\x00\x00\x0A' + b'\x6D\x65\x64\x69\x61\x49\x44\x33\x31\x33\x00\x00\x00\x00\x00\x00' + ) + + # Encoding obtained from the KMIP 1.1 testing document, combining + # encodings from Sections 11.1 and 11.2. + # + # This encoding matches the following set of values: + # Authentication + # Credential + # CredentialType - Username and Password + # CredentialValue + # Username - Fred + # Password - password1 + # Credential + # CredentialType - Device + # CredentialValue + # Device Serial Number - serNum123456 + # Password - secret + # Device Identifier - devID2233 + # Network Identifier - netID9000 + # Machine Identifier - machineID1 + # Media Identifier - mediaID313 + self.multiple_credentials_encoding = utils.BytearrayStream( + b'\x42\x00\x0C\x01\x00\x00\x00\xF0' + b'\x42\x00\x23\x01\x00\x00\x00\x40' + b'\x42\x00\x24\x05\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00' + b'\x42\x00\x25\x01\x00\x00\x00\x28' + b'\x42\x00\x99\x07\x00\x00\x00\x04' + b'\x46\x72\x65\x64\x00\x00\x00\x00' + b'\x42\x00\xA1\x07\x00\x00\x00\x09' + b'\x70\x61\x73\x73\x77\x6F\x72\x64\x31\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\x23\x01\x00\x00\x00\xA0' + b'\x42\x00\x24\x05\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x00' + b'\x42\x00\x25\x01\x00\x00\x00\x88' + b'\x42\x00\xB0\x07\x00\x00\x00\x0C' + b'\x73\x65\x72\x4E\x75\x6D\x31\x32\x33\x34\x35\x36\x00\x00\x00\x00' + b'\x42\x00\xA1\x07\x00\x00\x00\x06' + b'\x73\x65\x63\x72\x65\x74\x00\x00' + b'\x42\x00\xA2\x07\x00\x00\x00\x09' + b'\x64\x65\x76\x49\x44\x32\x32\x33\x33\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAB\x07\x00\x00\x00\x09' + b'\x6E\x65\x74\x49\x44\x39\x30\x30\x30\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xA9\x07\x00\x00\x00\x0A' + b'\x6D\x61\x63\x68\x69\x6E\x65\x49\x44\x31\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAA\x07\x00\x00\x00\x0A' + b'\x6D\x65\x64\x69\x61\x49\x44\x33\x31\x33\x00\x00\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestAuthentication, self).tearDown() + + def test_init(self): + """ + Test that an Authentication struct can be constructed without + arguments. + """ + authentication = contents.Authentication() + + self.assertEqual([], authentication.credentials) + + def test_init_with_args(self): + """ + Test that an Authentication struct can be constructed with arguments. + """ + authentication = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="John", + password="abc123" + ) + ) + ] + ) + + self.assertEqual(1, len(authentication.credentials)) + self.assertEqual( + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="John", + password="abc123" + ) + ), + authentication.credentials[0] + ) + + def test_invalid_credentials(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the credentials of an Authentication struct. + """ + kwargs = {'credentials': 'invalid'} + self.assertRaisesRegexp( + TypeError, + "Credentials must be a list of Credential structs.", + contents.Authentication, + **kwargs + ) + + authentication = contents.Authentication() + args = (authentication, "credentials", 'invalid') + self.assertRaisesRegexp( + TypeError, + "Credentials must be a list of Credential structs.", + setattr, + *args + ) + + def test_invalid_credentials_list(self): + """ + Test that a TypeError is raised when an invalid list is used to set + the credentials of an Authentication struct. + """ + kwargs = { + 'credentials': [ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="John", + password="abc123" + ) + ), + 'invalid' + ] + } + self.assertRaisesRegexp( + TypeError, + "Credentials must be a list of Credential structs. Item 2 has " + "type: {}".format(type('invalid')), + contents.Authentication, + **kwargs + ) + + authentication = contents.Authentication() + args = ( + authentication, + "credentials", + [ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="John", + password="abc123" + ) + ), + 'invalid' + ] + ) + self.assertRaisesRegexp( + TypeError, + "Credentials must be a list of Credential structs. Item 2 has " + "type: {}".format(type('invalid')), + setattr, + *args + ) + + def test_read(self): + """ + Test that an Authentication struct can be read from a data stream. + """ + # Test with a single UsernamePasswordCredential. + authentication = contents.Authentication() + + self.assertEqual([], authentication.credentials) + + authentication.read(self.username_password_encoding) + + self.assertEqual(1, len(authentication.credentials)) + self.assertEqual( + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ), + authentication.credentials[0] + ) + + # Test with a single DeviceCredential. + authentication = contents.Authentication() + + self.assertEqual([], authentication.credentials) + + authentication.read(self.device_encoding) + + self.assertEqual(1, len(authentication.credentials)) + self.assertEqual( + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ), + authentication.credentials[0] + ) + + # Test with multiple Credentials. + authentication = contents.Authentication() + + self.assertEqual([], authentication.credentials) + + authentication.read(self.multiple_credentials_encoding) + + self.assertEqual(2, len(authentication.credentials)) + self.assertEqual( + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ), + authentication.credentials[0] + ) + self.assertEqual( + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ), + authentication.credentials[1] + ) + + def test_read_missing_credentials(self): + """ + Test that a ValueError gets raised when attempting to read an + Authentication struct from a data stream missing credentials data. + """ + authentication = contents.Authentication() + + self.assertEqual([], authentication.credentials) + + args = (self.encoding_missing_credentials, ) + self.assertRaisesRegexp( + ValueError, + "Authentication encoding missing credentials.", + authentication.read, + *args + ) + + def test_write(self): + """ + Test that an Authentication struct can be written to a data stream. + """ + # Test with a single UsernamePasswordCredential. + authentication = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + ] + ) + stream = utils.BytearrayStream() + + authentication.write(stream) + + self.assertEqual(len(self.username_password_encoding), len(stream)) + self.assertEqual(str(self.username_password_encoding), str(stream)) + + # Test with a single DeviceCredential. + authentication = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + stream = utils.BytearrayStream() + + authentication.write(stream) + + self.assertEqual(len(self.device_encoding), len(stream)) + self.assertEqual(str(self.device_encoding), str(stream)) + + # Test with multiple Credentials. + authentication = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ), + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + stream = utils.BytearrayStream() + + authentication.write(stream) + + self.assertEqual(len(self.multiple_credentials_encoding), len(stream)) + self.assertEqual(str(self.multiple_credentials_encoding), str(stream)) + + def test_write_missing_credentials(self): + """ + Test that a ValueError gets raised when attempting to write a + Authentication struct missing credentials data to a data stream. + """ + authentication = contents.Authentication() + stream = utils.BytearrayStream() + + args = (stream, ) + self.assertRaisesRegexp( + ValueError, + "Authentication struct missing credentials.", + authentication.write, + *args + ) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + Authentication structs with the same data. + """ + a = contents.Authentication() + b = contents.Authentication() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + # Test with a single UsernamePasswordCredential. + a = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + ] + ) + b = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + ] + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + # Test with a single DeviceCredential. + a = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + b = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + # Test with multiple Credentials. + a = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ), + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + b = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ), + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_credentials(self): + """ + Test that the equality operator returns False when comparing two + Authentication structs with different credentials. + """ + a = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + ] + ) + b = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + Authentication structs with different types. + """ + a = contents.Authentication() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + Authentication structs with the same data. + """ + a = contents.Authentication() + b = contents.Authentication() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + # Test with a single UsernamePasswordCredential. + a = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + ] + ) + b = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + ] + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + # Test with a single DeviceCredential. + a = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + b = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + # Test with multiple Credentials. + a = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ), + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + b = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ), + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_credentials(self): + """ + Test that the inequality operator returns True when comparing two + Authentication structs with different credentials. + """ + a = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + ] + ) + b = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + Authentication structs with different types. + """ + a = contents.Authentication() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to an Authentication struct. + """ + # Test with a UsernamePasswordCredential. + authentication = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + ] + ) + expected = ( + "Authentication(" + "credentials=[" + "Credential(" + "credential_type=CredentialType.USERNAME_AND_PASSWORD, " + "credential_value=UsernamePasswordCredential(" + "username='Fred', " + "password='password1'))])" + ) + observed = repr(authentication) + + self.assertEqual(expected, observed) + + # Test with a DeviceCredential. + authentication = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + expected = ( + "Authentication(" + "credentials=[" + "Credential(" + "credential_type=CredentialType.DEVICE, " + "credential_value=DeviceCredential(" + "device_serial_number='serNum123456', " + "password='secret', " + "device_identifier='devID2233', " + "network_identifier='netID9000', " + "machine_identifier='machineID1', " + "media_identifier='mediaID313'))])" + ) + observed = repr(authentication) + + self.assertEqual(expected, observed) + + # Test with multiple Credentials. + authentication = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ), + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + expected = ( + "Authentication(" + "credentials=[" + "Credential(" + "credential_type=CredentialType.USERNAME_AND_PASSWORD, " + "credential_value=UsernamePasswordCredential(" + "username='Fred', " + "password='password1')), " + "Credential(" + "credential_type=CredentialType.DEVICE, " + "credential_value=DeviceCredential(" + "device_serial_number='serNum123456', " + "password='secret', " + "device_identifier='devID2233', " + "network_identifier='netID9000', " + "machine_identifier='machineID1', " + "media_identifier='mediaID313'))])" + ) + observed = repr(authentication) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to an Authentication struct. + """ + # Test with a UsernamePasswordCredential. + authentication = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + ] + ) + expected = str({ + "credentials": [ + { + "credential_type": + enums.CredentialType.USERNAME_AND_PASSWORD, + "credential_value": str({ + "username": "Fred", + "password": "password1" + }) + } + ] + }) + observed = str(authentication) + + self.assertEqual(expected, observed) + + # Test with a DeviceCredential. + authentication = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + expected = str({ + "credentials": [ + { + "credential_type": enums.CredentialType.DEVICE, + "credential_value": str({ + "device_serial_number": "serNum123456", + "password": "secret", + "device_identifier": "devID2233", + "network_identifier": "netID9000", + "machine_identifier": "machineID1", + "media_identifier": "mediaID313" + }) + } + ] + }) + observed = str(authentication) + + self.assertEqual(expected, observed) + + # Test with multiple Credentials. + authentication = contents.Authentication( + credentials=[ + objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ), + objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + ] + ) + expected = str({ + "credentials": [ + { + "credential_type": + enums.CredentialType.USERNAME_AND_PASSWORD, + "credential_value": str({ + "username": "Fred", + "password": "password1" + }) + }, + { + "credential_type": enums.CredentialType.DEVICE, + "credential_value": str({ + "device_serial_number": "serNum123456", + "password": "secret", + "device_identifier": "devID2233", + "network_identifier": "netID9000", + "machine_identifier": "machineID1", + "media_identifier": "mediaID313" + }) + } + ] + }) + observed = str(authentication) + + self.assertEqual(expected, observed) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/messages/contents/test_protocol_version.py python-pykmip-0.8.0/kmip/tests/unit/core/messages/contents/test_protocol_version.py --- python-pykmip-0.7.0/kmip/tests/unit/core/messages/contents/test_protocol_version.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/messages/contents/test_protocol_version.py 2018-04-02 17:12:18.000000000 +0000 @@ -13,171 +13,243 @@ # License for the specific language governing permissions and limitations # under the License. -from testtools import TestCase +import testtools -from kmip.core.messages.contents import ProtocolVersion -from kmip.core.utils import BytearrayStream +from kmip.core.messages import contents +from kmip.core import utils -class TestProtocolVersion(TestCase): +class TestProtocolVersion(testtools.TestCase): def setUp(self): super(TestProtocolVersion, self).setUp() - self.major_default = ProtocolVersion.ProtocolVersionMajor() - self.minor_default = ProtocolVersion.ProtocolVersionMinor() - self.major = ProtocolVersion.ProtocolVersionMajor(1) - self.minor = ProtocolVersion.ProtocolVersionMinor(1) - - self.encoding_default = BytearrayStream(( - b'\x42\x00\x69\x01\x00\x00\x00\x20\x42\x00\x6A\x02\x00\x00\x00\x04' - b'\x00\x00\x00\x00\x00\x00\x00\x00\x42\x00\x6B\x02\x00\x00\x00\x04' - b'\x00\x00\x00\x00\x00\x00\x00\x00')) - self.encoding = BytearrayStream(( - b'\x42\x00\x69\x01\x00\x00\x00\x20\x42\x00\x6A\x02\x00\x00\x00\x04' - b'\x00\x00\x00\x01\x00\x00\x00\x00\x42\x00\x6B\x02\x00\x00\x00\x04' - b'\x00\x00\x00\x01\x00\x00\x00\x00')) + # Encoding obtained from the KMIP 1.1 testing document, Section 3.1.1. + # + # This encoding matches the following set of values: + # ProtocolVersion + # ProtocolVersionMajor - 1 + # ProtocolVersionMinor - 1 + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x69\x01\x00\x00\x00\x20' + b'\x42\x00\x6A\x02\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00' + b'\x42\x00\x6B\x02\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00' + ) + + self.encoding_no_major_number = utils.BytearrayStream( + b'\x42\x00\x69\x01\x00\x00\x00\x10' + b'\x42\x00\x6B\x02\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00' + ) + + self.encoding_no_minor_number = utils.BytearrayStream( + b'\x42\x00\x69\x01\x00\x00\x00\x10' + b'\x42\x00\x6A\x02\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00' + ) def tearDown(self): super(TestProtocolVersion, self).tearDown() - def _test_init(self, protocol_version_major, protocol_version_minor): - protocol_version = ProtocolVersion( - protocol_version_major, protocol_version_minor) - - if protocol_version_major is None: - self.assertEqual(ProtocolVersion.ProtocolVersionMajor(), - protocol_version.protocol_version_major) - else: - self.assertEqual(protocol_version_major, - protocol_version.protocol_version_major) - - if protocol_version_minor is None: - self.assertEqual(ProtocolVersion.ProtocolVersionMinor(), - protocol_version.protocol_version_minor) - else: - self.assertEqual(protocol_version_minor, - protocol_version.protocol_version_minor) + def test_init(self): + """ + Test that a ProtocolVersion struct can be constructed with no + arguments. + """ + struct = contents.ProtocolVersion() - def test_init_with_none(self): - self._test_init(None, None) + self.assertEqual(None, struct.major) + self.assertEqual(None, struct.minor) def test_init_with_args(self): - major = ProtocolVersion.ProtocolVersionMajor(1) - minor = ProtocolVersion.ProtocolVersionMinor(0) + """ + Test that a ProtocolVersion struct can be constructed with valid + values. + """ + struct = contents.ProtocolVersion(1, 1) - self._test_init(major, minor) + self.assertEqual(1, struct.major) + self.assertEqual(1, struct.minor) - def test_validate_on_invalid_protocol_version_major(self): - major = "invalid" - minor = ProtocolVersion.ProtocolVersionMinor(0) - args = [major, minor] + def test_invalid_protocol_version_major(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the major protocol version number of a ProtocolVersion struct. + """ + struct = contents.ProtocolVersion() + args = (struct, 'major', 'invalid') + self.assertRaisesRegexp( + TypeError, + "Major protocol version number must be an integer.", + setattr, + *args + ) + def test_invalid_protocol_version_minor(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the minor protocol version number of a ProtocolVersion struct. + """ + struct = contents.ProtocolVersion() + args = (struct, 'minor', 'invalid') self.assertRaisesRegexp( - TypeError, "invalid protocol version major", self._test_init, - *args) + TypeError, + "Minor protocol version number must be an integer.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a ProtocolVersion struct can be read from a data stream. + """ + struct = contents.ProtocolVersion() - def test_validate_on_invalid_protocol_version_minor(self): - major = ProtocolVersion.ProtocolVersionMajor(1) - minor = "invalid" - args = [major, minor] + self.assertEqual(None, struct.major) + self.assertEqual(None, struct.minor) + struct.read(self.full_encoding) + + self.assertEqual(1, struct.major) + self.assertEqual(1, struct.minor) + + def test_read_missing_major_number(self): + """ + Test that a ValueError gets raised when a required ProtocolVersion + struct attribute is missing from the struct encoding. + """ + struct = contents.ProtocolVersion() + args = (self.encoding_no_major_number, ) self.assertRaisesRegexp( - TypeError, "invalid protocol version minor", self._test_init, - *args) + ValueError, + "Invalid encoding missing the major protocol version number.", + struct.read, + *args + ) + + def test_read_missing_minor_number(self): + """ + Test that a ValueError gets raised when a required ProtocolVersion + struct attribute is missing from the struct encoding. + """ + struct = contents.ProtocolVersion() + args = (self.encoding_no_minor_number, ) + self.assertRaisesRegexp( + ValueError, + "Invalid encoding missing the minor protocol version number.", + struct.read, + *args + ) + + def test_write(self): + """ + Test that a ProtocolVersion struct can be written to a data stream. + """ + struct = contents.ProtocolVersion(1, 1) + stream = utils.BytearrayStream() + struct.write(stream) - def _test_read(self, stream, major, minor): - protocol_version = ProtocolVersion() - protocol_version.read(stream) - - msg = "protocol version major decoding mismatch" - msg += "; expected {0}, received {1}".format( - major, protocol_version.protocol_version_major) - self.assertEqual(major, protocol_version.protocol_version_major, msg) - - msg = "protocol version minor decoding mismatch" - msg += "; expected {0}, received {1}".format( - minor, protocol_version.protocol_version_minor) - self.assertEqual(minor, protocol_version.protocol_version_minor, msg) - - def test_read_with_none(self): - self._test_read(self.encoding_default, self.major_default, - self.minor_default) - - def test_read_with_args(self): - self._test_read(self.encoding, self.major, self.minor) - - def _test_write(self, stream_expected, major, minor): - stream_observed = BytearrayStream() - protocol_version = ProtocolVersion(major, minor) - protocol_version.write(stream_observed) - - length_expected = len(stream_expected) - length_observed = len(stream_observed) - - msg = "encoding lengths not equal" - msg += "; expected {0}, received {1}".format( - length_expected, length_observed) - self.assertEqual(length_expected, length_observed, msg) - - msg = "encoding mismatch" - msg += ";\nexpected:\n{0}\nreceived:\n{1}".format( - stream_expected, stream_observed) - - self.assertEqual(stream_expected, stream_observed, msg) - - def test_write_with_none(self): - self._test_write(self.encoding_default, self.major_default, - self.minor_default) + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) - def test_write_with_args(self): - self._test_write(self.encoding, self.major, self.minor) + def test_write_missing_major_number(self): + """ + Test that a ValueError gets raised when a required ProtocolVersion + struct attribute is missing when encoding the struct. + """ + struct = contents.ProtocolVersion(None, 1) + stream = utils.BytearrayStream() + args = (stream, ) + self.assertRaisesRegexp( + ValueError, + "Invalid struct missing the major protocol version number.", + struct.write, + *args + ) + + def test_write_missing_minor_number(self): + """ + Test that a ValueError gets raised when a required ProtocolVersion + struct attribute is missing when encoding the struct. + """ + struct = contents.ProtocolVersion(1, None) + stream = utils.BytearrayStream() + args = (stream, ) + self.assertRaisesRegexp( + ValueError, + "Invalid struct missing the minor protocol version number.", + struct.write, + *args + ) def test_equal_on_equal(self): - a = ProtocolVersion.create(1, 0) - b = ProtocolVersion.create(1, 0) + """ + Test that the equality operator returns True when comparing two + ProtocolVersion structs with the same data. + """ + a = contents.ProtocolVersion(1, 0) + b = contents.ProtocolVersion(1, 0) self.assertTrue(a == b) def test_equal_on_not_equal(self): - a = ProtocolVersion.create(1, 0) - b = ProtocolVersion.create(0, 1) + """ + Test that the equality operator returns False when comparing two + ProtocolVersion structs with different data. + """ + a = contents.ProtocolVersion(1, 0) + b = contents.ProtocolVersion(0, 1) self.assertFalse(a == b) def test_equal_on_type_mismatch(self): - a = ProtocolVersion.create(1, 0) + """ + Test that the equality operator returns False when comparing two + ProtocolVersion structs with different types. + """ + a = contents.ProtocolVersion(1, 0) b = "invalid" self.assertFalse(a == b) def test_not_equal_on_equal(self): - a = ProtocolVersion.create(1, 0) - b = ProtocolVersion.create(1, 0) + """ + Test that the inequality operator returns False when comparing two + ProtocolVersion structs with the same data. + """ + a = contents.ProtocolVersion(1, 0) + b = contents.ProtocolVersion(1, 0) self.assertFalse(a != b) def test_not_equal_on_not_equal(self): - a = ProtocolVersion.create(1, 0) - b = ProtocolVersion.create(0, 1) + """ + Test that the inequality operator returns True when comparing two + ProtocolVersion structs with different data. + """ + a = contents.ProtocolVersion(1, 0) + b = contents.ProtocolVersion(0, 1) self.assertTrue(a != b) def test_not_equal_on_type_mismatch(self): - a = ProtocolVersion.create(1, 0) + """ + Test that the inequality operator returns True when comparing two + ProtocolVersion structs with different types. + """ + a = contents.ProtocolVersion(1, 0) b = "invalid" self.assertTrue(a != b) def test_less_than(self): """ - Test that the less than operator returns True/False when comparing - two different ProtocolVersions. + Test that the less than operator correctly returns True/False when + comparing two different ProtocolVersions. """ - a = ProtocolVersion.create(1, 0) - b = ProtocolVersion.create(1, 1) - c = ProtocolVersion.create(2, 0) - d = ProtocolVersion.create(0, 2) + a = contents.ProtocolVersion(1, 0) + b = contents.ProtocolVersion(1, 1) + c = contents.ProtocolVersion(2, 0) + d = contents.ProtocolVersion(0, 2) self.assertTrue(a < b) self.assertFalse(b < a) @@ -187,15 +259,19 @@ self.assertFalse(c < d) self.assertTrue(d < c) + # A direct call to __lt__ is required here due to differences in how + # Python 2 and Python 3 treat comparison operators. + self.assertEqual(NotImplemented, a.__lt__('invalid')) + def test_greater_than(self): """ - Test that the greater than operator returns True/False when + Test that the greater than operator correctly returns True/False when comparing two different ProtocolVersions. """ - a = ProtocolVersion.create(1, 0) - b = ProtocolVersion.create(1, 1) - c = ProtocolVersion.create(2, 0) - d = ProtocolVersion.create(0, 2) + a = contents.ProtocolVersion(1, 0) + b = contents.ProtocolVersion(1, 1) + c = contents.ProtocolVersion(2, 0) + d = contents.ProtocolVersion(0, 2) self.assertFalse(a > b) self.assertTrue(b > a) @@ -205,15 +281,19 @@ self.assertTrue(c > d) self.assertFalse(d > c) + # A direct call to __gt__ is required here due to differences in how + # Python 2 and Python 3 treat comparison operators. + self.assertEqual(NotImplemented, a.__gt__('invalid')) + def test_less_than_or_equal(self): """ - Test that the less than or equal operator returns True/False when - comparing two different ProtocolVersions. + Test that the less than or equal operator correctly returns True/False + when comparing two different ProtocolVersions. """ - a = ProtocolVersion.create(1, 0) - b = ProtocolVersion.create(1, 1) - c = ProtocolVersion.create(2, 0) - d = ProtocolVersion.create(0, 2) + a = contents.ProtocolVersion(1, 0) + b = contents.ProtocolVersion(1, 1) + c = contents.ProtocolVersion(2, 0) + d = contents.ProtocolVersion(0, 2) self.assertTrue(a <= b) self.assertFalse(b <= a) @@ -223,15 +303,19 @@ self.assertFalse(c <= d) self.assertTrue(d <= c) + # A direct call to __le__ is required here due to differences in how + # Python 2 and Python 3 treat comparison operators. + self.assertEqual(NotImplemented, a.__le__('invalid')) + def test_greater_than_or_equal(self): """ - Test that the greater than or equal operator returns True/False when - comparing two different ProtocolVersions. + Test that the greater than or equal operator correctly returns + True/False when comparing two different ProtocolVersions. """ - a = ProtocolVersion.create(1, 0) - b = ProtocolVersion.create(1, 1) - c = ProtocolVersion.create(2, 0) - d = ProtocolVersion.create(0, 2) + a = contents.ProtocolVersion(1, 0) + b = contents.ProtocolVersion(1, 1) + c = contents.ProtocolVersion(2, 0) + d = contents.ProtocolVersion(0, 2) self.assertFalse(a >= b) self.assertTrue(b >= a) @@ -241,30 +325,25 @@ self.assertTrue(c >= d) self.assertFalse(d >= c) - def test_repr(self): - a = ProtocolVersion.create(1, 0) - - self.assertEqual("1.0", "{0}".format(a)) - - def _test_create(self, major, minor): - protocol_version = ProtocolVersion.create(major, minor) + # A direct call to __ge__ is required here due to differences in how + # Python 2 and Python 3 treat comparison operators. + self.assertEqual(NotImplemented, a.__ge__('invalid')) - if major is None: - expected = ProtocolVersion.ProtocolVersionMajor() - else: - expected = ProtocolVersion.ProtocolVersionMajor(major) - - self.assertEqual(expected, protocol_version.protocol_version_major) - - if minor is None: - expected = ProtocolVersion.ProtocolVersionMinor() - else: - expected = ProtocolVersion.ProtocolVersionMinor(minor) + def test_repr(self): + """ + Test that repr can be applied to a ProtocolVersion struct. + """ + struct = contents.ProtocolVersion(1, 0) - self.assertEqual(expected, protocol_version.protocol_version_minor) + self.assertEqual( + "ProtocolVersion(major=1, minor=0)", + "{}".format(repr(struct)) + ) - def test_create_with_none(self): - self._test_create(None, None) + def test_str(self): + """ + Test that str can be applied to a ProtocolVersion struct. + """ + struct = contents.ProtocolVersion(1, 0) - def test_create_with_args(self): - self._test_create(1, 0) + self.assertEqual("1.0", str(struct)) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_archive.py python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_archive.py --- python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_archive.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_archive.py 2017-12-08 17:36:18.000000000 +0000 @@ -0,0 +1,523 @@ +# Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import testtools + +from kmip.core import utils +from kmip.core.messages import payloads + + +class TestArchiveRequestPayload(testtools.TestCase): + """ + Test suite for the Archive request payload. + """ + + def setUp(self): + super(TestArchiveRequestPayload, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 10.1. + # + # This encoding matches the following set of values: + # Request Payload + # Unique Identifier - f613dba1-b557-489a-87c5-3c0ecd4294e3 + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x30' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x66\x36\x31\x33\x64\x62\x61\x31\x2D\x62\x35\x35\x37\x2D\x34\x38' + b'\x39\x61\x2D\x38\x37\x63\x35\x2D\x33\x63\x30\x65\x63\x64\x34\x32' + b'\x39\x34\x65\x33\x00\x00\x00\x00' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestArchiveRequestPayload, self).tearDown() + + def test_init(self): + """ + Test that an Archive request payload can be constructed with no + arguments. + """ + payload = payloads.ArchiveRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + + def test_init_with_args(self): + """ + Test that an Archive request payload can be constructed with valid + values. + """ + payload = payloads.ArchiveRequestPayload( + unique_identifier='00000000-1111-2222-3333-444444444444' + ) + + self.assertEqual( + '00000000-1111-2222-3333-444444444444', + payload.unique_identifier + ) + + def test_invalid_unique_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the unique identifier of an Archive request payload. + """ + kwargs = {'unique_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + payloads.ArchiveRequestPayload, + **kwargs + ) + + payload = payloads.ArchiveRequestPayload() + args = (payload, 'unique_identifier', 0) + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + setattr, + *args + ) + + def test_read(self): + """ + Test that an Archive request payload can be read from a data stream. + """ + payload = payloads.ArchiveRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + + payload.read(self.full_encoding) + + self.assertEqual( + 'f613dba1-b557-489a-87c5-3c0ecd4294e3', + payload.unique_identifier + ) + + def test_read_empty(self): + """ + Test that an Archive request payload can be read from an empty data + stream. + """ + payload = payloads.ArchiveRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + + payload.read(self.empty_encoding) + + self.assertEqual(None, payload.unique_identifier) + + def test_write(self): + """ + Test that an Archive request payload can be written to a data stream. + """ + payload = payloads.ArchiveRequestPayload( + unique_identifier='f613dba1-b557-489a-87c5-3c0ecd4294e3' + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_empty(self): + """ + Test that an empty Archive request payload can be written + to a data stream. + """ + payload = payloads.ArchiveRequestPayload() + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.empty_encoding), len(stream)) + self.assertEqual(str(self.empty_encoding), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + Archive request payloads with the same data. + """ + a = payloads.ArchiveRequestPayload() + b = payloads.ArchiveRequestPayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.ArchiveRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + b = payloads.ArchiveRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_unique_identifier(self): + """ + Test that the equality operator returns False when comparing two + Archive request payloads with different unique identifiers. + """ + a = payloads.ArchiveRequestPayload( + unique_identifier='a' + ) + b = payloads.ArchiveRequestPayload( + unique_identifier='b' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + Archive request payloads with different types. + """ + a = payloads.ArchiveRequestPayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + Archive request payloads with the same data. + """ + a = payloads.ArchiveRequestPayload() + b = payloads.ArchiveRequestPayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.ArchiveRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + b = payloads.ArchiveRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_unique_identifier(self): + """ + Test that the inequality operator returns True when comparing two + Archive request payloads with different unique identifiers. + """ + a = payloads.ArchiveRequestPayload( + unique_identifier='a' + ) + b = payloads.ArchiveRequestPayload( + unique_identifier='b' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + Archive request payloads with different types. + """ + a = payloads.ArchiveRequestPayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to an Archive request payload. + """ + payload = payloads.ArchiveRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + expected = ( + "ArchiveRequestPayload(" + "unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038')" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to an Archive request payload. + """ + payload = payloads.ArchiveRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + expected = str({ + 'unique_identifier': '49a1ca88-6bea-4fb2-b450-7e58802c3038' + }) + observed = str(payload) + + self.assertEqual(expected, observed) + + +class TestArchiveResponsePayload(testtools.TestCase): + """ + Test suite for the Archive response payload. + """ + + def setUp(self): + super(TestArchiveResponsePayload, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 10.1. + # + # This encoding matches the following set of values: + # Response Payload + # Unique Identifier - f613dba1-b557-489a-87c5-3c0ecd4294e3 + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x30' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x66\x36\x31\x33\x64\x62\x61\x31\x2D\x62\x35\x35\x37\x2D\x34\x38' + b'\x39\x61\x2D\x38\x37\x63\x35\x2D\x33\x63\x30\x65\x63\x64\x34\x32' + b'\x39\x34\x65\x33\x00\x00\x00\x00' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestArchiveResponsePayload, self).tearDown() + + def test_init(self): + """ + Test that an Archive response payload can be constructed with no + arguments. + """ + payload = payloads.ArchiveResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + + def test_init_with_args(self): + """ + Test that an Archive response payload can be constructed with valid + values. + """ + payload = payloads.ArchiveResponsePayload( + unique_identifier='00000000-1111-2222-3333-444444444444' + ) + + self.assertEqual( + '00000000-1111-2222-3333-444444444444', + payload.unique_identifier + ) + + def test_invalid_unique_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the unique identifier of an Archive response payload. + """ + kwargs = {'unique_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + payloads.ArchiveResponsePayload, + **kwargs + ) + + payload = payloads.ArchiveResponsePayload() + args = (payload, 'unique_identifier', 0) + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + setattr, + *args + ) + + def test_read(self): + """ + Test that an Archive response payload can be read from a data stream. + """ + payload = payloads.ArchiveResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + + payload.read(self.full_encoding) + + self.assertEqual( + 'f613dba1-b557-489a-87c5-3c0ecd4294e3', + payload.unique_identifier + ) + + def test_read_empty(self): + """ + Test that an Archive response payload can be read from an empty data + stream. + """ + payload = payloads.ArchiveResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + + payload.read(self.empty_encoding) + + self.assertEqual(None, payload.unique_identifier) + + def test_write(self): + """ + Test that an Archive response payload can be written to a data stream. + """ + payload = payloads.ArchiveResponsePayload( + unique_identifier='f613dba1-b557-489a-87c5-3c0ecd4294e3' + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_empty(self): + """ + Test that an empty Archive response payload can be written to a data + stream. + """ + payload = payloads.ArchiveResponsePayload() + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.empty_encoding), len(stream)) + self.assertEqual(str(self.empty_encoding), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + Archive response payloads with the same data. + """ + a = payloads.ArchiveResponsePayload() + b = payloads.ArchiveResponsePayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.ArchiveResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + b = payloads.ArchiveResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_unique_identifier(self): + """ + Test that the equality operator returns False when comparing two + Archive response payloads with different unique identifiers. + """ + a = payloads.ArchiveResponsePayload( + unique_identifier='a' + ) + b = payloads.ArchiveResponsePayload( + unique_identifier='b' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + Archive response payloads with different types. + """ + a = payloads.ArchiveResponsePayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + Archive response payloads with the same data. + """ + a = payloads.ArchiveResponsePayload() + b = payloads.ArchiveResponsePayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.ArchiveResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + b = payloads.ArchiveResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_unique_identifier(self): + """ + Test that the inequality operator returns True when comparing two + Archive response payloads with different unique identifiers. + """ + a = payloads.ArchiveResponsePayload( + unique_identifier='a' + ) + b = payloads.ArchiveResponsePayload( + unique_identifier='b' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + Archive response payloads with different types. + """ + a = payloads.ArchiveResponsePayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a Archive response payload. + """ + payload = payloads.ArchiveResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + expected = ( + "ArchiveResponsePayload(" + "unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038')" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a Archive response payload + """ + payload = payloads.ArchiveResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + expected = str({ + 'unique_identifier': '49a1ca88-6bea-4fb2-b450-7e58802c3038' + }) + observed = str(payload) + + self.assertEqual(expected, observed) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_cancel.py python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_cancel.py --- python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_cancel.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_cancel.py 2017-12-08 17:36:18.000000000 +0000 @@ -0,0 +1,590 @@ +# Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import testtools + +from kmip import enums +from kmip.core import utils +from kmip.core.messages import payloads + + +class TestCancelRequestPayload(testtools.TestCase): + """ + Test suite for the Cancel request payload. + """ + + def setUp(self): + super(TestCancelRequestPayload, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 10.1. + # + # This encoding matches the following set of values: + # Request Payload + # Asynchronous Correlation Value - 0x583B0036C1A2DD01 + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x10' + b'\x42\x00\x06\x08\x00\x00\x00\x08\x58\x3B\x00\x36\xC1\xA2\xDD\x01' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestCancelRequestPayload, self).tearDown() + + def test_init(self): + """ + Test that a Cancel request payload can be constructed with no + arguments. + """ + payload = payloads.CancelRequestPayload() + + self.assertEqual(None, payload.asynchronous_correlation_value) + + def test_init_with_args(self): + """ + Test that a Cancel request payload can be constructed with valid + values. + """ + payload = payloads.CancelRequestPayload( + asynchronous_correlation_value=b'\x01' + ) + + self.assertEqual(b'\x01', payload.asynchronous_correlation_value) + + def test_invalid_asynchronous_correlation_value(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the asynchronous correlation value of an Cancel request payload. + """ + kwargs = {'asynchronous_correlation_value': 0} + self.assertRaisesRegexp( + TypeError, + "Asynchronous correlation value must be bytes.", + payloads.CancelRequestPayload, + **kwargs + ) + + payload = payloads.CancelRequestPayload() + args = (payload, 'asynchronous_correlation_value', 0) + self.assertRaisesRegexp( + TypeError, + "Asynchronous correlation value must be bytes.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a Cancel request payload can be read from a data stream. + """ + payload = payloads.CancelRequestPayload() + + self.assertEqual(None, payload.asynchronous_correlation_value) + + payload.read(self.full_encoding) + + self.assertEqual( + b'\x58\x3B\x00\x36\xC1\xA2\xDD\x01', + payload.asynchronous_correlation_value + ) + + def test_read_empty(self): + """ + Test that an Cancel request payload can be read from an empty data + stream. + """ + payload = payloads.CancelRequestPayload() + + self.assertEqual(None, payload.asynchronous_correlation_value) + + payload.read(self.empty_encoding) + + self.assertEqual(None, payload.asynchronous_correlation_value) + + def test_write(self): + """ + Test that a Cancel request payload can be written to a data stream. + """ + payload = payloads.CancelRequestPayload( + asynchronous_correlation_value=b'\x58\x3B\x00\x36\xC1\xA2\xDD\x01' + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_empty(self): + """ + Test that an empty Cancel request payload can be written to a data + stream. + """ + payload = payloads.CancelRequestPayload() + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.empty_encoding), len(stream)) + self.assertEqual(str(self.empty_encoding), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two Cancel + request payloads with the same data. + """ + a = payloads.CancelRequestPayload() + b = payloads.CancelRequestPayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.CancelRequestPayload( + asynchronous_correlation_value=b'\x49\xa1\xca\x88' + ) + b = payloads.CancelRequestPayload( + asynchronous_correlation_value=b'\x49\xa1\xca\x88' + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_asynchronous_correlation_value(self): + """ + Test that the equality operator returns False when comparing two Cancel + request payloads with different asynchronous correlation values. + """ + a = payloads.CancelRequestPayload( + asynchronous_correlation_value=b'\xaa' + ) + b = payloads.CancelRequestPayload( + asynchronous_correlation_value=b'\xbb' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two Cancel + request payloads with different types. + """ + a = payloads.CancelRequestPayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + Cancel request payloads with the same data. + """ + a = payloads.CancelRequestPayload() + b = payloads.CancelRequestPayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.CancelRequestPayload( + asynchronous_correlation_value=b'\x49\xa1\xca\x88' + ) + b = payloads.CancelRequestPayload( + asynchronous_correlation_value=b'\x49\xa1\xca\x88' + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_asynchronous_correlation_value(self): + """ + Test that the inequality operator returns True when comparing two + Cancel request payloads with different asynchronous correlation values. + """ + a = payloads.CancelRequestPayload( + asynchronous_correlation_value=b'\xaa' + ) + b = payloads.CancelRequestPayload( + asynchronous_correlation_value=b'\xbb' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + Cancel request payloads with different types. + """ + a = payloads.CancelRequestPayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a Cancel request payload. + """ + payload = payloads.CancelRequestPayload( + asynchronous_correlation_value=b'\xaa' + ) + expected = ( + "CancelRequestPayload(" + "asynchronous_correlation_value=" + str(b'\xaa') + ")" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a Cancel request payload. + """ + payload = payloads.CancelRequestPayload( + asynchronous_correlation_value=b'\xaa' + ) + + expected = str({ + 'asynchronous_correlation_value': b'\xaa' + }) + observed = str(payload) + + self.assertEqual(expected, observed) + + +class TestCancelResponsePayload(testtools.TestCase): + """ + Test suite for the Cancel response payload. + """ + + def setUp(self): + super(TestCancelResponsePayload, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 10.1. + # + # This encoding matches the following set of values: + # Response Payload + # Asynchronous Correlation Value - 0x583B0036C1A2DD01 + # Cancellation Result - 1 (Canceled) + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x20' + b'\x42\x00\x06\x08\x00\x00\x00\x08\x58\x3B\x00\x36\xC1\xA2\xDD\x01' + b'\x42\x00\x12\x05\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestCancelResponsePayload, self).tearDown() + + def test_init(self): + """ + Test that a Cancel response payload can be constructed with no + arguments. + """ + payload = payloads.CancelRequestPayload() + + self.assertEqual(None, payload.asynchronous_correlation_value) + + def test_init_with_args(self): + """ + Test that a Cancel response payload can be constructed with valid + values. + """ + payload = payloads.CancelResponsePayload( + asynchronous_correlation_value=b'\x01', + cancellation_result=enums.CancellationResult.FAILED + ) + + self.assertEqual(b'\x01', payload.asynchronous_correlation_value) + self.assertEqual( + enums.CancellationResult.FAILED, + payload.cancellation_result + ) + + def test_invalid_asynchronous_correlation_value(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the asynchronous correlation value of an Cancel response payload. + """ + kwargs = {'asynchronous_correlation_value': 0} + self.assertRaisesRegexp( + TypeError, + "Asynchronous correlation value must be bytes.", + payloads.CancelResponsePayload, + **kwargs + ) + + payload = payloads.CancelResponsePayload() + args = (payload, 'asynchronous_correlation_value', 0) + self.assertRaisesRegexp( + TypeError, + "Asynchronous correlation value must be bytes.", + setattr, + *args + ) + + def test_invalid_cancellation_result(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the cancellation result of an Cancel response payload. + """ + kwargs = {'cancellation_result': 'invalid'} + self.assertRaisesRegexp( + TypeError, + "Cancellation result must be a CancellationResult enumeration.", + payloads.CancelResponsePayload, + **kwargs + ) + + payload = payloads.CancelResponsePayload() + args = (payload, 'cancellation_result', 'invalid') + self.assertRaisesRegexp( + TypeError, + "Cancellation result must be a CancellationResult enumeration.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a Cancel response payload can be read from a data stream. + """ + payload = payloads.CancelResponsePayload() + + self.assertEqual(None, payload.asynchronous_correlation_value) + self.assertEqual(None, payload.cancellation_result) + + payload.read(self.full_encoding) + + self.assertEqual( + b'\x58\x3B\x00\x36\xC1\xA2\xDD\x01', + payload.asynchronous_correlation_value + ) + self.assertEqual( + enums.CancellationResult.CANCELED, + payload.cancellation_result + ) + + def test_read_empty(self): + """ + Test that an Cancel response payload can be read from an empty data + stream. + """ + payload = payloads.CancelResponsePayload() + + self.assertEqual(None, payload.asynchronous_correlation_value) + self.assertEqual(None, payload.cancellation_result) + + payload.read(self.empty_encoding) + + self.assertEqual(None, payload.asynchronous_correlation_value) + self.assertEqual(None, payload.cancellation_result) + + def test_write(self): + """ + Test that a Cancel response payload can be written to a data stream. + """ + payload = payloads.CancelResponsePayload( + asynchronous_correlation_value=b'\x58\x3B\x00\x36\xC1\xA2\xDD\x01', + cancellation_result=enums.CancellationResult.CANCELED + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_empty(self): + """ + Test that an empty Cancel response payload can be written to a data + stream. + """ + payload = payloads.CancelResponsePayload() + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.empty_encoding), len(stream)) + self.assertEqual(str(self.empty_encoding), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two Cancel + response payloads with the same data. + """ + a = payloads.CancelResponsePayload() + b = payloads.CancelResponsePayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.CancelResponsePayload( + asynchronous_correlation_value=b'\x49\xa1\xca\x88', + cancellation_result=enums.CancellationResult.COMPLETED + ) + b = payloads.CancelResponsePayload( + asynchronous_correlation_value=b'\x49\xa1\xca\x88', + cancellation_result=enums.CancellationResult.COMPLETED + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_asynchronous_correlation_value(self): + """ + Test that the equality operator returns False when comparing two Cancel + response payloads with different asynchronous correlation values. + """ + a = payloads.CancelResponsePayload( + asynchronous_correlation_value=b'\xaa' + ) + b = payloads.CancelResponsePayload( + asynchronous_correlation_value=b'\xbb' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_cancellation_result(self): + """ + Test that the equality operator returns False when comparing two Cancel + response payloads with different cancellation results. + """ + a = payloads.CancelResponsePayload( + cancellation_result=enums.CancellationResult.FAILED + ) + b = payloads.CancelResponsePayload( + cancellation_result=enums.CancellationResult.COMPLETED + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two Cancel + response payloads with different types. + """ + a = payloads.CancelResponsePayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + Cancel response payloads with the same data. + """ + a = payloads.CancelResponsePayload() + b = payloads.CancelResponsePayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.CancelResponsePayload( + asynchronous_correlation_value=b'\x49\xa1\xca\x88', + cancellation_result=enums.CancellationResult.COMPLETED + ) + b = payloads.CancelResponsePayload( + asynchronous_correlation_value=b'\x49\xa1\xca\x88', + cancellation_result=enums.CancellationResult.COMPLETED + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_asynchronous_correlation_value(self): + """ + Test that the inequality operator returns True when comparing two + Cancel response payloads with different asynchronous correlation + values. + """ + a = payloads.CancelResponsePayload( + asynchronous_correlation_value=b'\xaa' + ) + b = payloads.CancelResponsePayload( + asynchronous_correlation_value=b'\xbb' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_cancellation_result(self): + """ + Test that the inequality operator returns True when comparing two + Cancel response payloads with different cancellation results. + """ + a = payloads.CancelResponsePayload( + cancellation_result=enums.CancellationResult.FAILED + ) + b = payloads.CancelResponsePayload( + cancellation_result=enums.CancellationResult.COMPLETED + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + Cancel response payloads with different types. + """ + a = payloads.CancelResponsePayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a Cancel response payload. + """ + payload = payloads.CancelResponsePayload( + asynchronous_correlation_value=b'\xaa', + cancellation_result=enums.CancellationResult.UNABLE_TO_CANCEL + ) + expected = ( + "CancelResponsePayload(" + "asynchronous_correlation_value=" + str(b'\xaa') + ", " + "cancellation_result=" + str( + enums.CancellationResult.UNABLE_TO_CANCEL + ) + ")" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a Cancel response payload. + """ + payload = payloads.CancelResponsePayload( + asynchronous_correlation_value=b'\xaa', + cancellation_result=enums.CancellationResult.UNAVAILABLE + ) + + expected = str({ + 'asynchronous_correlation_value': b'\xaa', + 'cancellation_result': enums.CancellationResult.UNAVAILABLE + }) + observed = str(payload) + + self.assertEqual(expected, observed) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_check.py python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_check.py --- python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_check.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_check.py 2017-12-08 17:36:18.000000000 +0000 @@ -0,0 +1,1043 @@ +# Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import testtools + +from kmip.core import utils +from kmip.core.messages import payloads + + +class TestCheckRequestPayload(testtools.TestCase): + """ + Test suite for the Check request payload. + """ + + def setUp(self): + super(TestCheckRequestPayload, self).setUp() + + # Encoding obtained in part from the KMIP 1.1 testing document, + # Section 5.1. The rest of the encoding was built by hand. + # + # This encoding matches the following set of values: + # Request Payload + # Unique Identifier - 2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6 + # Usage Limits Count - 500 + # Cryptographic Usage Mask - Encrypt | Decrypt (4 | 8 -> 12 or C) + # Lease Time - 0 + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x60' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x32\x63\x32\x33\x32\x31\x37\x65\x2D\x66\x35\x33\x63\x2D\x34\x62' + b'\x64\x66\x2D\x61\x64\x30\x61\x2D\x35\x38\x61\x33\x31\x66\x64\x33' + b'\x64\x34\x62\x36\x00\x00\x00\x00' + b'\x42\x00\x96\x03\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x01\xF4' + b'\x42\x00\x2C\x02\x00\x00\x00\x04\x00\x00\x00\x0C\x00\x00\x00\x00' + b'\x42\x00\x49\x0A\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00' + ) + + # This encoding matches the following set of values: + # Request Payload + # Unique Identifier - 2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6 + # Usage Limits Count - 500 + self.partial_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x40' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x32\x63\x32\x33\x32\x31\x37\x65\x2D\x66\x35\x33\x63\x2D\x34\x62' + b'\x64\x66\x2D\x61\x64\x30\x61\x2D\x35\x38\x61\x33\x31\x66\x64\x33' + b'\x64\x34\x62\x36\x00\x00\x00\x00' + b'\x42\x00\x96\x03\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x01\xF4' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestCheckRequestPayload, self).tearDown() + + def test_init(self): + """ + Test that a Check request payload can be constructed with no arguments. + """ + payload = payloads.CheckRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + self.assertEqual(None, payload.cryptographic_usage_mask) + self.assertEqual(None, payload.lease_time) + + def test_init_with_args(self): + """ + Test that a Check request payload can be constructed with valid values. + """ + payload = payloads.CheckRequestPayload( + unique_identifier='00000000-1111-2222-3333-444444444444', + usage_limits_count=10, + cryptographic_usage_mask=12, + lease_time=1000000000 + ) + + self.assertEqual( + '00000000-1111-2222-3333-444444444444', + payload.unique_identifier + ) + self.assertEqual(10, payload.usage_limits_count) + self.assertEqual(12, payload.cryptographic_usage_mask) + self.assertEqual(1000000000, payload.lease_time) + + def test_invalid_unique_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the unique identifier of a Check request payload. + """ + kwargs = {'unique_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + payloads.CheckRequestPayload, + **kwargs + ) + + payload = payloads.CheckRequestPayload() + args = (payload, 'unique_identifier', 0) + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + setattr, + *args + ) + + def test_invalid_usage_limits_count(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the usage limits count of a Check request payload. + """ + kwargs = {'usage_limits_count': 'invalid'} + self.assertRaisesRegexp( + TypeError, + "Usage limits count must be an integer.", + payloads.CheckRequestPayload, + **kwargs + ) + + payload = payloads.CheckRequestPayload() + args = (payload, 'usage_limits_count', 'invalid') + self.assertRaisesRegexp( + TypeError, + "Usage limits count must be an integer.", + setattr, + *args + ) + + def test_invalid_cryptographic_usage_mask(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the cryptographic usage mask of a Check request payload. + """ + kwargs = {'cryptographic_usage_mask': 'invalid'} + self.assertRaisesRegexp( + TypeError, + "Cryptographic usage mask must be an integer.", + payloads.CheckRequestPayload, + **kwargs + ) + + payload = payloads.CheckRequestPayload() + args = (payload, 'cryptographic_usage_mask', 'invalid') + self.assertRaisesRegexp( + TypeError, + "Cryptographic usage mask must be an integer.", + setattr, + *args + ) + + def test_invalid_lease_time(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the lease time of a Check request payload. + """ + kwargs = {'lease_time': 'invalid'} + self.assertRaisesRegexp( + TypeError, + "Lease time must be an integer.", + payloads.CheckRequestPayload, + **kwargs + ) + + payload = payloads.CheckRequestPayload() + args = (payload, 'lease_time', 'invalid') + self.assertRaisesRegexp( + TypeError, + "Lease time must be an integer.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a Check request payload can be read from a data stream. + """ + payload = payloads.CheckRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + self.assertEqual(None, payload.cryptographic_usage_mask) + self.assertEqual(None, payload.lease_time) + + payload.read(self.full_encoding) + + self.assertEqual( + '2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6', + payload.unique_identifier + ) + self.assertEqual(500, payload.usage_limits_count) + self.assertEqual(12, payload.cryptographic_usage_mask) + self.assertEqual(0, payload.lease_time) + + def test_read_partial(self): + """ + Test that a Check request payload can be read from a partial data + stream. + """ + payload = payloads.CheckRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + self.assertEqual(None, payload.cryptographic_usage_mask) + self.assertEqual(None, payload.lease_time) + + payload.read(self.partial_encoding) + + self.assertEqual( + '2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6', + payload.unique_identifier + ) + self.assertEqual(500, payload.usage_limits_count) + self.assertEqual(None, payload.cryptographic_usage_mask) + self.assertEqual(None, payload.lease_time) + + def test_read_empty(self): + """ + Test that a Check request payload can be read from an empty data + stream. + """ + payload = payloads.CheckRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + self.assertEqual(None, payload.cryptographic_usage_mask) + self.assertEqual(None, payload.lease_time) + + payload.read(self.empty_encoding) + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + self.assertEqual(None, payload.cryptographic_usage_mask) + self.assertEqual(None, payload.lease_time) + + def test_write(self): + """ + Test that a Check request payload can be written to a data stream. + """ + payload = payloads.CheckRequestPayload( + unique_identifier='2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6', + usage_limits_count=500, + cryptographic_usage_mask=12, + lease_time=0 + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_partial(self): + """ + Test that a partial Check request payload can be written to a data + stream. + """ + payload = payloads.CheckRequestPayload( + unique_identifier='2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6', + usage_limits_count=500 + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.partial_encoding), len(stream)) + self.assertEqual(str(self.partial_encoding), str(stream)) + + def test_write_empty(self): + """ + Test that an empty Check request payload can be written to a data + stream. + """ + payload = payloads.CheckRequestPayload() + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.empty_encoding), len(stream)) + self.assertEqual(str(self.empty_encoding), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + Check request payloads with the same data. + """ + a = payloads.CheckRequestPayload() + b = payloads.CheckRequestPayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.CheckRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=200, + cryptographic_usage_mask=4, + lease_time=1511882848 + ) + b = payloads.CheckRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=200, + cryptographic_usage_mask=4, + lease_time=1511882848 + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_unique_identifier(self): + """ + Test that the equality operator returns False when comparing two + Check request payloads with different unique identifiers. + """ + a = payloads.CheckRequestPayload( + unique_identifier='a' + ) + b = payloads.CheckRequestPayload( + unique_identifier='b' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_usage_limits_count(self): + """ + Test that the equality operator returns False when comparing two + Check request payloads with different usage limits counts. + """ + a = payloads.CheckRequestPayload( + usage_limits_count=0 + ) + b = payloads.CheckRequestPayload( + usage_limits_count=1 + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_cryptographic_usage_mask(self): + """ + Test that the equality operator returns False when comparing two + Check request payloads with different cryptographic usage masks. + """ + a = payloads.CheckRequestPayload( + cryptographic_usage_mask=4 + ) + b = payloads.CheckRequestPayload( + cryptographic_usage_mask=12 + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_lease_time(self): + """ + Test that the equality operator returns False when comparing two + Check request payloads with different lease times. + """ + a = payloads.CheckRequestPayload( + lease_time=0 + ) + b = payloads.CheckRequestPayload( + lease_time=1511882848 + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + Check request payloads with different types. + """ + a = payloads.CheckRequestPayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + Check request payloads with the same data. + """ + a = payloads.CheckRequestPayload() + b = payloads.CheckRequestPayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.CheckRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=200, + cryptographic_usage_mask=4, + lease_time=1511882848 + ) + b = payloads.CheckRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=200, + cryptographic_usage_mask=4, + lease_time=1511882848 + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_unique_identifier(self): + """ + Test that the inequality operator returns True when comparing two + Check request payloads with different unique identifiers. + """ + a = payloads.CheckRequestPayload( + unique_identifier='a' + ) + b = payloads.CheckRequestPayload( + unique_identifier='b' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_usage_limits_count(self): + """ + Test that the inequality operator returns True when comparing two + Check request payloads with different usage limits counts. + """ + a = payloads.CheckRequestPayload( + usage_limits_count=0 + ) + b = payloads.CheckRequestPayload( + usage_limits_count=1 + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_cryptographic_usage_mask(self): + """ + Test that the inequality operator returns True when comparing two + Check request payloads with different cryptographic usage masks. + """ + a = payloads.CheckRequestPayload( + cryptographic_usage_mask=4 + ) + b = payloads.CheckRequestPayload( + cryptographic_usage_mask=12 + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_lease_time(self): + """ + Test that the inequality operator returns True when comparing two + Check request payloads with different lease times. + """ + a = payloads.CheckRequestPayload( + lease_time=0 + ) + b = payloads.CheckRequestPayload( + lease_time=1511882848 + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + Check request payloads with different types. + """ + a = payloads.CheckRequestPayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a Check request payload. + """ + payload = payloads.CheckRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=1000, + cryptographic_usage_mask=8, + lease_time=1511882898 + ) + expected = ( + "CheckRequestPayload(" + "unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', " + "usage_limits_count=1000, " + "cryptographic_usage_mask=8, " + "lease_time=1511882898)" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a Check request payload + """ + payload = payloads.CheckRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=1000, + cryptographic_usage_mask=8, + lease_time=1511882898 + ) + + expected = str({ + 'unique_identifier': '49a1ca88-6bea-4fb2-b450-7e58802c3038', + 'usage_limits_count': 1000, + 'cryptographic_usage_mask': 8, + 'lease_time': 1511882898 + }) + observed = str(payload) + + self.assertEqual(expected, observed) + + +class TestCheckResponsePayload(testtools.TestCase): + """ + Test suite for the Check response payload. + """ + + def setUp(self): + super(TestCheckResponsePayload, self).setUp() + + # Encoding obtained in part from the KMIP 1.1 testing document, + # Section 5.1. The rest of the encoding was built by hand. + # + # This encoding matches the following set of values: + # Response Payload + # Unique Identifier - 2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6 + # Usage Limits Count - 500 + # Cryptographic Usage Mask - Encrypt | Decrypt (4 | 8 -> 12 or C) + # Lease Time - 0 + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x60' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x32\x63\x32\x33\x32\x31\x37\x65\x2D\x66\x35\x33\x63\x2D\x34\x62' + b'\x64\x66\x2D\x61\x64\x30\x61\x2D\x35\x38\x61\x33\x31\x66\x64\x33' + b'\x64\x34\x62\x36\x00\x00\x00\x00' + b'\x42\x00\x96\x03\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x01\xF4' + b'\x42\x00\x2C\x02\x00\x00\x00\x04\x00\x00\x00\x0C\x00\x00\x00\x00' + b'\x42\x00\x49\x0A\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00' + ) + + # This encoding matches the following set of values: + # Response Payload + # Unique Identifier - 2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6 + # Usage Limits Count - 500 + self.partial_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x40' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x32\x63\x32\x33\x32\x31\x37\x65\x2D\x66\x35\x33\x63\x2D\x34\x62' + b'\x64\x66\x2D\x61\x64\x30\x61\x2D\x35\x38\x61\x33\x31\x66\x64\x33' + b'\x64\x34\x62\x36\x00\x00\x00\x00' + b'\x42\x00\x96\x03\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x01\xF4' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestCheckResponsePayload, self).tearDown() + + def test_init(self): + """ + Test that a Check response payload can be constructed with no + arguments. + """ + payload = payloads.CheckResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + self.assertEqual(None, payload.cryptographic_usage_mask) + self.assertEqual(None, payload.lease_time) + + def test_init_with_args(self): + """ + Test that a Check response payload can be constructed with valid + values. + """ + payload = payloads.CheckResponsePayload( + unique_identifier='00000000-1111-2222-3333-444444444444', + usage_limits_count=10, + cryptographic_usage_mask=12, + lease_time=1000000000 + ) + + self.assertEqual( + '00000000-1111-2222-3333-444444444444', + payload.unique_identifier + ) + self.assertEqual(10, payload.usage_limits_count) + self.assertEqual(12, payload.cryptographic_usage_mask) + self.assertEqual(1000000000, payload.lease_time) + + def test_invalid_unique_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the unique identifier of a Check response payload. + """ + kwargs = {'unique_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + payloads.CheckResponsePayload, + **kwargs + ) + + payload = payloads.CheckResponsePayload() + args = (payload, 'unique_identifier', 0) + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + setattr, + *args + ) + + def test_invalid_usage_limits_count(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the usage limits count of a Check response payload. + """ + kwargs = {'usage_limits_count': 'invalid'} + self.assertRaisesRegexp( + TypeError, + "Usage limits count must be an integer.", + payloads.CheckResponsePayload, + **kwargs + ) + + payload = payloads.CheckResponsePayload() + args = (payload, 'usage_limits_count', 'invalid') + self.assertRaisesRegexp( + TypeError, + "Usage limits count must be an integer.", + setattr, + *args + ) + + def test_invalid_cryptographic_usage_mask(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the cryptographic usage mask of a Check response payload. + """ + kwargs = {'cryptographic_usage_mask': 'invalid'} + self.assertRaisesRegexp( + TypeError, + "Cryptographic usage mask must be an integer.", + payloads.CheckResponsePayload, + **kwargs + ) + + payload = payloads.CheckResponsePayload() + args = (payload, 'cryptographic_usage_mask', 'invalid') + self.assertRaisesRegexp( + TypeError, + "Cryptographic usage mask must be an integer.", + setattr, + *args + ) + + def test_invalid_lease_time(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the lease time of a Check response payload. + """ + kwargs = {'lease_time': 'invalid'} + self.assertRaisesRegexp( + TypeError, + "Lease time must be an integer.", + payloads.CheckResponsePayload, + **kwargs + ) + + payload = payloads.CheckResponsePayload() + args = (payload, 'lease_time', 'invalid') + self.assertRaisesRegexp( + TypeError, + "Lease time must be an integer.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a Check response payload can be read from a data stream. + """ + payload = payloads.CheckResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + self.assertEqual(None, payload.cryptographic_usage_mask) + self.assertEqual(None, payload.lease_time) + + payload.read(self.full_encoding) + + self.assertEqual( + '2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6', + payload.unique_identifier + ) + self.assertEqual(500, payload.usage_limits_count) + self.assertEqual(12, payload.cryptographic_usage_mask) + self.assertEqual(0, payload.lease_time) + + def test_read_partial(self): + """ + Test that a Check response payload can be read from a partial data + stream. + """ + payload = payloads.CheckResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + self.assertEqual(None, payload.cryptographic_usage_mask) + self.assertEqual(None, payload.lease_time) + + payload.read(self.partial_encoding) + + self.assertEqual( + '2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6', + payload.unique_identifier + ) + self.assertEqual(500, payload.usage_limits_count) + self.assertEqual(None, payload.cryptographic_usage_mask) + self.assertEqual(None, payload.lease_time) + + def test_read_empty(self): + """ + Test that a Check response payload can be read from an empty data + stream. + """ + payload = payloads.CheckResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + self.assertEqual(None, payload.cryptographic_usage_mask) + self.assertEqual(None, payload.lease_time) + + payload.read(self.empty_encoding) + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + self.assertEqual(None, payload.cryptographic_usage_mask) + self.assertEqual(None, payload.lease_time) + + def test_write(self): + """ + Test that a Check response payload can be written to a data stream. + """ + payload = payloads.CheckResponsePayload( + unique_identifier='2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6', + usage_limits_count=500, + cryptographic_usage_mask=12, + lease_time=0 + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_partial(self): + """ + Test that a partial Check response payload can be written to a data + stream. + """ + payload = payloads.CheckResponsePayload( + unique_identifier='2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6', + usage_limits_count=500 + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.partial_encoding), len(stream)) + self.assertEqual(str(self.partial_encoding), str(stream)) + + def test_write_empty(self): + """ + Test that an empty Check response payload can be written to a data + stream. + """ + payload = payloads.CheckResponsePayload() + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.empty_encoding), len(stream)) + self.assertEqual(str(self.empty_encoding), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + Check response payloads with the same data. + """ + a = payloads.CheckResponsePayload() + b = payloads.CheckResponsePayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.CheckResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=200, + cryptographic_usage_mask=4, + lease_time=1511882848 + ) + b = payloads.CheckResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=200, + cryptographic_usage_mask=4, + lease_time=1511882848 + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_unique_identifier(self): + """ + Test that the equality operator returns False when comparing two + Check response payloads with different unique identifiers. + """ + a = payloads.CheckResponsePayload( + unique_identifier='a' + ) + b = payloads.CheckResponsePayload( + unique_identifier='b' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_usage_limits_count(self): + """ + Test that the equality operator returns False when comparing two + Check response payloads with different usage limits counts. + """ + a = payloads.CheckResponsePayload( + usage_limits_count=0 + ) + b = payloads.CheckResponsePayload( + usage_limits_count=1 + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_cryptographic_usage_mask(self): + """ + Test that the equality operator returns False when comparing two + Check response payloads with different cryptographic usage masks. + """ + a = payloads.CheckResponsePayload( + cryptographic_usage_mask=4 + ) + b = payloads.CheckResponsePayload( + cryptographic_usage_mask=12 + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_lease_time(self): + """ + Test that the equality operator returns False when comparing two + Check response payloads with different lease times. + """ + a = payloads.CheckResponsePayload( + lease_time=0 + ) + b = payloads.CheckResponsePayload( + lease_time=1511882848 + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + Check response payloads with different types. + """ + a = payloads.CheckResponsePayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + Check response payloads with the same data. + """ + a = payloads.CheckResponsePayload() + b = payloads.CheckResponsePayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.CheckResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=200, + cryptographic_usage_mask=4, + lease_time=1511882848 + ) + b = payloads.CheckResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=200, + cryptographic_usage_mask=4, + lease_time=1511882848 + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_unique_identifier(self): + """ + Test that the inequality operator returns True when comparing two + Check response payloads with different unique identifiers. + """ + a = payloads.CheckResponsePayload( + unique_identifier='a' + ) + b = payloads.CheckResponsePayload( + unique_identifier='b' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_usage_limits_count(self): + """ + Test that the inequality operator returns True when comparing two + Check response payloads with different usage limits counts. + """ + a = payloads.CheckResponsePayload( + usage_limits_count=0 + ) + b = payloads.CheckResponsePayload( + usage_limits_count=1 + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_cryptographic_usage_mask(self): + """ + Test that the inequality operator returns True when comparing two + Check response payloads with different cryptographic usage masks. + """ + a = payloads.CheckResponsePayload( + cryptographic_usage_mask=4 + ) + b = payloads.CheckResponsePayload( + cryptographic_usage_mask=12 + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_lease_time(self): + """ + Test that the inequality operator returns True when comparing two + Check response payloads with different lease times. + """ + a = payloads.CheckResponsePayload( + lease_time=0 + ) + b = payloads.CheckResponsePayload( + lease_time=1511882848 + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + Check response payloads with different types. + """ + a = payloads.CheckResponsePayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a Check response payload. + """ + payload = payloads.CheckResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=1000, + cryptographic_usage_mask=8, + lease_time=1511882898 + ) + expected = ( + "CheckResponsePayload(" + "unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', " + "usage_limits_count=1000, " + "cryptographic_usage_mask=8, " + "lease_time=1511882898)" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a Check response payload + """ + payload = payloads.CheckResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=1000, + cryptographic_usage_mask=8, + lease_time=1511882898 + ) + + expected = str({ + 'unique_identifier': '49a1ca88-6bea-4fb2-b450-7e58802c3038', + 'usage_limits_count': 1000, + 'cryptographic_usage_mask': 8, + 'lease_time': 1511882898 + }) + observed = str(payload) + + self.assertEqual(expected, observed) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_discover_versions.py python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_discover_versions.py --- python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_discover_versions.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_discover_versions.py 2018-04-02 17:12:18.000000000 +0000 @@ -30,10 +30,10 @@ self.protocol_versions_empty = list() self.protocol_versions_one = list() - self.protocol_versions_one.append(ProtocolVersion.create(1, 0)) + self.protocol_versions_one.append(ProtocolVersion(1, 0)) self.protocol_versions_two = list() - self.protocol_versions_two.append(ProtocolVersion.create(1, 1)) - self.protocol_versions_two.append(ProtocolVersion.create(1, 0)) + self.protocol_versions_two.append(ProtocolVersion(1, 1)) + self.protocol_versions_two.append(ProtocolVersion(1, 0)) self.encoding_empty = utils.BytearrayStream(( b'\x42\x00\x79\x01\x00\x00\x00\x00')) @@ -157,10 +157,10 @@ self.protocol_versions_empty = list() self.protocol_versions_one = list() - self.protocol_versions_one.append(ProtocolVersion.create(1, 0)) + self.protocol_versions_one.append(ProtocolVersion(1, 0)) self.protocol_versions_two = list() - self.protocol_versions_two.append(ProtocolVersion.create(1, 1)) - self.protocol_versions_two.append(ProtocolVersion.create(1, 0)) + self.protocol_versions_two.append(ProtocolVersion(1, 1)) + self.protocol_versions_two.append(ProtocolVersion(1, 0)) self.encoding_empty = utils.BytearrayStream(( b'\x42\x00\x7C\x01\x00\x00\x00\x00')) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_get_usage_allocation.py python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_get_usage_allocation.py --- python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_get_usage_allocation.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_get_usage_allocation.py 2017-12-08 17:36:18.000000000 +0000 @@ -0,0 +1,634 @@ +# Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import testtools + +from kmip.core import utils +from kmip.core.messages import payloads + + +class TestGetUsageAllocationRequestPayload(testtools.TestCase): + """ + Test suite for the GetUsageAllocation request payload. + """ + + def setUp(self): + super(TestGetUsageAllocationRequestPayload, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 5.1. + # + # This encoding matches the following set of values: + # Request Payload + # Unique Identifier - 2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6 + # Usage Limits Count - 500 + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x40' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x32\x63\x32\x33\x32\x31\x37\x65\x2D\x66\x35\x33\x63\x2D\x34\x62' + b'\x64\x66\x2D\x61\x64\x30\x61\x2D\x35\x38\x61\x33\x31\x66\x64\x33' + b'\x64\x34\x62\x36\x00\x00\x00\x00' + b'\x42\x00\x96\x03\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x01\xF4' + ) + + # This encoding matches the following set of values: + # Request Payload + # Usage Limits Count - 500 + self.partial_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x10' + b'\x42\x00\x96\x03\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x01\xF4' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestGetUsageAllocationRequestPayload, self).tearDown() + + def test_init(self): + """ + Test that a GetUsageAllocation request payload can be constructed with + no arguments. + """ + payload = payloads.GetUsageAllocationRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + + def test_init_with_args(self): + """ + Test that a GetUsageAllocation request payload can be constructed with + valid values. + """ + payload = payloads.GetUsageAllocationRequestPayload( + unique_identifier='00000000-1111-2222-3333-444444444444', + usage_limits_count=10 + ) + + self.assertEqual( + '00000000-1111-2222-3333-444444444444', + payload.unique_identifier + ) + self.assertEqual(10, payload.usage_limits_count) + + def test_invalid_unique_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the unique identifier of a GetUsageAllocation request payload. + """ + kwargs = {'unique_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + payloads.GetUsageAllocationRequestPayload, + **kwargs + ) + + payload = payloads.GetUsageAllocationRequestPayload() + args = (payload, 'unique_identifier', 0) + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + setattr, + *args + ) + + def test_invalid_usage_limits_count(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the usage limits count of a GetUsageAllocation request payload. + """ + kwargs = {'usage_limits_count': 'invalid'} + self.assertRaisesRegexp( + TypeError, + "Usage limits count must be an integer.", + payloads.GetUsageAllocationRequestPayload, + **kwargs + ) + + payload = payloads.GetUsageAllocationRequestPayload() + args = (payload, 'usage_limits_count', 'invalid') + self.assertRaisesRegexp( + TypeError, + "Usage limits count must be an integer.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a GetUsageAllocation request payload can be read from a data + stream. + """ + payload = payloads.GetUsageAllocationRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + + payload.read(self.full_encoding) + + self.assertEqual( + '2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6', + payload.unique_identifier + ) + self.assertEqual(500, payload.usage_limits_count) + + def test_read_partial(self): + """ + Test that a GetUsageAllocation request payload can be read from a + partial data stream. + """ + payload = payloads.GetUsageAllocationRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + + payload.read(self.partial_encoding) + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(500, payload.usage_limits_count) + + def test_read_empty(self): + """ + Test that a GetUsageAllocation request payload can be read from an + empty data stream. + """ + payload = payloads.GetUsageAllocationRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + + payload.read(self.empty_encoding) + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.usage_limits_count) + + def test_write(self): + """ + Test that a GetUsageAllocation request payload can be written to a + data stream. + """ + payload = payloads.GetUsageAllocationRequestPayload( + unique_identifier='2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6', + usage_limits_count=500 + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_partial(self): + """ + Test that a partial GetUsageAllocation request payload can be written + to a data stream. + """ + payload = payloads.GetUsageAllocationRequestPayload( + usage_limits_count=500 + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.partial_encoding), len(stream)) + self.assertEqual(str(self.partial_encoding), str(stream)) + + def test_write_empty(self): + """ + Test that an empty GetUsageAllocation request payload can be written + to a data stream. + """ + payload = payloads.GetUsageAllocationRequestPayload() + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.empty_encoding), len(stream)) + self.assertEqual(str(self.empty_encoding), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + GetUsageAllocation request payloads with the same data. + """ + a = payloads.GetUsageAllocationRequestPayload() + b = payloads.GetUsageAllocationRequestPayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.GetUsageAllocationRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=200 + ) + b = payloads.GetUsageAllocationRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=200 + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_unique_identifier(self): + """ + Test that the equality operator returns False when comparing two + GetUsageAllocation request payloads with different unique identifiers. + """ + a = payloads.GetUsageAllocationRequestPayload( + unique_identifier='a' + ) + b = payloads.GetUsageAllocationRequestPayload( + unique_identifier='b' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_usage_limits_count(self): + """ + Test that the equality operator returns False when comparing two + GetUsageAllocation request payloads with different usage limits counts. + """ + a = payloads.GetUsageAllocationRequestPayload( + usage_limits_count=0 + ) + b = payloads.GetUsageAllocationRequestPayload( + usage_limits_count=1 + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + GetUsageAllocation request payloads with different types. + """ + a = payloads.GetUsageAllocationRequestPayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + GetUsageAllocation request payloads with the same data. + """ + a = payloads.GetUsageAllocationRequestPayload() + b = payloads.GetUsageAllocationRequestPayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.GetUsageAllocationRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=200 + ) + b = payloads.GetUsageAllocationRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=200 + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_unique_identifier(self): + """ + Test that the inequality operator returns True when comparing two + GetUsageAllocation request payloads with different unique identifiers. + """ + a = payloads.GetUsageAllocationRequestPayload( + unique_identifier='a' + ) + b = payloads.GetUsageAllocationRequestPayload( + unique_identifier='b' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_usage_limits_count(self): + """ + Test that the inequality operator returns True when comparing two + GetUsageAllocation request payloads with different usage limits counts. + """ + a = payloads.GetUsageAllocationRequestPayload( + usage_limits_count=0 + ) + b = payloads.GetUsageAllocationRequestPayload( + usage_limits_count=1 + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + GetUsageAllocation request payloads with different types. + """ + a = payloads.GetUsageAllocationRequestPayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a GetUsageAllocation request payload. + """ + payload = payloads.GetUsageAllocationRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=1000 + ) + expected = ( + "GetUsageAllocationRequestPayload(" + "unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', " + "usage_limits_count=1000)" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a GetUsageAllocation request payload. + """ + payload = payloads.GetUsageAllocationRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + usage_limits_count=1000 + ) + + expected = str({ + 'unique_identifier': '49a1ca88-6bea-4fb2-b450-7e58802c3038', + 'usage_limits_count': 1000 + }) + observed = str(payload) + + self.assertEqual(expected, observed) + + +class TestGetUsageAllocationResponsePayload(testtools.TestCase): + """ + Test suite for the GetUsageAllocation response payload. + """ + + def setUp(self): + super(TestGetUsageAllocationResponsePayload, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 5.1. + # + # This encoding matches the following set of values: + # Response Payload + # Unique Identifier - 2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6 + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x30' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x32\x63\x32\x33\x32\x31\x37\x65\x2D\x66\x35\x33\x63\x2D\x34\x62' + b'\x64\x66\x2D\x61\x64\x30\x61\x2D\x35\x38\x61\x33\x31\x66\x64\x33' + b'\x64\x34\x62\x36\x00\x00\x00\x00' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestGetUsageAllocationResponsePayload, self).tearDown() + + def test_init(self): + """ + Test that a GetUsageAllocation response payload can be constructed + with no arguments. + """ + payload = payloads.GetUsageAllocationResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + + def test_init_with_args(self): + """ + Test that a GetUsageAllocation response payload can be constructed + with valid values. + """ + payload = payloads.GetUsageAllocationResponsePayload( + unique_identifier='00000000-1111-2222-3333-444444444444' + ) + + self.assertEqual( + '00000000-1111-2222-3333-444444444444', + payload.unique_identifier + ) + + def test_invalid_unique_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the unique identifier of a GetUsageAllocation response payload. + """ + kwargs = {'unique_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + payloads.GetUsageAllocationResponsePayload, + **kwargs + ) + + payload = payloads.GetUsageAllocationResponsePayload() + args = (payload, 'unique_identifier', 0) + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a GetUsageAllocation response payload can be read from a + data stream. + """ + payload = payloads.GetUsageAllocationResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + + payload.read(self.full_encoding) + + self.assertEqual( + '2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6', + payload.unique_identifier + ) + + def test_read_empty(self): + """ + Test that a GetUsageAllocation response payload can be read from an + empty data stream. + """ + payload = payloads.GetUsageAllocationResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + + payload.read(self.empty_encoding) + + self.assertEqual(None, payload.unique_identifier) + + def test_write(self): + """ + Test that a GetUsageAllocation response payload can be written to a + data stream. + """ + payload = payloads.GetUsageAllocationResponsePayload( + unique_identifier='2c23217e-f53c-4bdf-ad0a-58a31fd3d4b6' + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_empty(self): + """ + Test that an empty GetUsageAllocation response payload can be written + to a data stream. + """ + payload = payloads.GetUsageAllocationResponsePayload() + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.empty_encoding), len(stream)) + self.assertEqual(str(self.empty_encoding), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + GetUsageAllocation response payloads with the same data. + """ + a = payloads.GetUsageAllocationResponsePayload() + b = payloads.GetUsageAllocationResponsePayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.GetUsageAllocationResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + b = payloads.GetUsageAllocationResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_unique_identifier(self): + """ + Test that the equality operator returns False when comparing two + GetUsageAllocation response payloads with different unique identifiers. + """ + a = payloads.GetUsageAllocationResponsePayload( + unique_identifier='a' + ) + b = payloads.GetUsageAllocationResponsePayload( + unique_identifier='b' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + GetUsageAllocation response payloads with different types. + """ + a = payloads.GetUsageAllocationResponsePayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + GetUsageAllocation response payloads with the same data. + """ + a = payloads.GetUsageAllocationResponsePayload() + b = payloads.GetUsageAllocationResponsePayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.GetUsageAllocationResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + b = payloads.GetUsageAllocationResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_unique_identifier(self): + """ + Test that the inequality operator returns True when comparing two + GetUsageAllocation response payloads with different unique identifiers. + """ + a = payloads.GetUsageAllocationResponsePayload( + unique_identifier='a' + ) + b = payloads.GetUsageAllocationResponsePayload( + unique_identifier='b' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + GetUsageAllocation response payloads with different types. + """ + a = payloads.GetUsageAllocationResponsePayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a GetUsageAllocation response payload. + """ + payload = payloads.GetUsageAllocationResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + expected = ( + "GetUsageAllocationResponsePayload(" + "unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038')" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a GetUsageAllocation response payload + """ + payload = payloads.GetUsageAllocationResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + expected = str({ + 'unique_identifier': '49a1ca88-6bea-4fb2-b450-7e58802c3038' + }) + observed = str(payload) + + self.assertEqual(expected, observed) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_obtain_lease.py python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_obtain_lease.py --- python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_obtain_lease.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_obtain_lease.py 2017-12-08 17:36:18.000000000 +0000 @@ -0,0 +1,712 @@ +# Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import testtools + +from kmip.core import utils +from kmip.core.messages import payloads + + +class TestObtainLeaseRequestPayload(testtools.TestCase): + """ + Test suite for the ObtainLease request payload. + """ + + def setUp(self): + super(TestObtainLeaseRequestPayload, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 9.5. + # + # This encoding matches the following set of values: + # Request Payload + # Unique Identifier - f4152f17-9312-431a-b3fb-4fe86a86a7a1 + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x30' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x66\x34\x31\x35\x32\x66\x31\x37\x2D\x39\x33\x31\x32\x2D\x34\x33' + b'\x31\x61\x2D\x62\x33\x66\x62\x2D\x34\x66\x65\x38\x36\x61\x38\x36' + b'\x61\x37\x61\x31\x00\x00\x00\x00' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestObtainLeaseRequestPayload, self).tearDown() + + def test_init(self): + """ + Test that an ObtainLease request payload can be constructed with no + arguments. + """ + payload = payloads.ObtainLeaseRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + + def test_init_with_args(self): + """ + Test that an ObtainLease request payload can be constructed with valid + values. + """ + payload = payloads.ObtainLeaseRequestPayload( + unique_identifier='00000000-1111-2222-3333-444444444444' + ) + + self.assertEqual( + '00000000-1111-2222-3333-444444444444', + payload.unique_identifier + ) + + def test_invalid_unique_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the unique identifier of an ObtainLease request payload. + """ + kwargs = {'unique_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + payloads.ObtainLeaseRequestPayload, + **kwargs + ) + + payload = payloads.ObtainLeaseRequestPayload() + args = (payload, 'unique_identifier', 0) + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + setattr, + *args + ) + + def test_read(self): + """ + Test that an ObtainLease request payload can be read from a data + stream. + """ + payload = payloads.ObtainLeaseRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + + payload.read(self.full_encoding) + + self.assertEqual( + 'f4152f17-9312-431a-b3fb-4fe86a86a7a1', + payload.unique_identifier + ) + + def test_read_empty(self): + """ + Test that an ObtainLease request payload can be read from an empty + data stream. + """ + payload = payloads.ObtainLeaseRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + + payload.read(self.empty_encoding) + + self.assertEqual(None, payload.unique_identifier) + + def test_write(self): + """ + Test that an ObtainLease request payload can be written to a data + stream. + """ + payload = payloads.ObtainLeaseRequestPayload( + unique_identifier='f4152f17-9312-431a-b3fb-4fe86a86a7a1' + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_empty(self): + """ + Test that an empty ObtainLease request payload can be written to a + data stream. + """ + payload = payloads.ObtainLeaseRequestPayload() + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.empty_encoding), len(stream)) + self.assertEqual(str(self.empty_encoding), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + ObtainLease request payloads with the same data. + """ + a = payloads.ObtainLeaseRequestPayload() + b = payloads.ObtainLeaseRequestPayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.ObtainLeaseRequestPayload( + unique_identifier='f4152f17-9312-431a-b3fb-4fe86a86a7a1' + ) + b = payloads.ObtainLeaseRequestPayload( + unique_identifier='f4152f17-9312-431a-b3fb-4fe86a86a7a1' + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_unique_identifier(self): + """ + Test that the equality operator returns False when comparing two + ObtainLease request payloads with different unique identifiers. + """ + a = payloads.ObtainLeaseRequestPayload( + unique_identifier='a' + ) + b = payloads.ObtainLeaseRequestPayload( + unique_identifier='b' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + ObtainLease request payloads with different types. + """ + a = payloads.ObtainLeaseRequestPayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + ObtainLease request payloads with the same data. + """ + a = payloads.ObtainLeaseRequestPayload() + b = payloads.ObtainLeaseRequestPayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.ObtainLeaseRequestPayload( + unique_identifier='f4152f17-9312-431a-b3fb-4fe86a86a7a1' + ) + b = payloads.ObtainLeaseRequestPayload( + unique_identifier='f4152f17-9312-431a-b3fb-4fe86a86a7a1' + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_unique_identifier(self): + """ + Test that the inequality operator returns True when comparing two + ObtainLease request payloads with different unique identifiers. + """ + a = payloads.ObtainLeaseRequestPayload( + unique_identifier='a' + ) + b = payloads.ObtainLeaseRequestPayload( + unique_identifier='b' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + ObtainLease request payloads with different types. + """ + a = payloads.ObtainLeaseRequestPayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to an ObtainLease request payload. + """ + payload = payloads.ObtainLeaseRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + expected = ( + "ObtainLeaseRequestPayload(" + "unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038')" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to an ObtainLease request payload + """ + payload = payloads.ObtainLeaseRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + expected = str({ + 'unique_identifier': '49a1ca88-6bea-4fb2-b450-7e58802c3038' + }) + observed = str(payload) + + self.assertEqual(expected, observed) + + +class TestObtainLeaseResponsePayload(testtools.TestCase): + """ + Test suite for the ObtainLease response payload. + """ + + def setUp(self): + super(TestObtainLeaseResponsePayload, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 9.5. + # + # This encoding matches the following set of values: + # Response Payload + # Unique Identifier - f4152f17-9312-431a-b3fb-4fe86a86a7a1 + # Lease Time - 0 + # Last Change Date - 0x4F9A5564 (Fri Apr 27 10:14:28 CEST 2012) + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x50' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x66\x34\x31\x35\x32\x66\x31\x37\x2D\x39\x33\x31\x32\x2D\x34\x33' + b'\x31\x61\x2D\x62\x33\x66\x62\x2D\x34\x66\x65\x38\x36\x61\x38\x36' + b'\x61\x37\x61\x31\x00\x00\x00\x00' + b'\x42\x00\x49\x0A\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\x48\x09\x00\x00\x00\x08\x00\x00\x00\x00\x4F\x9A\x55\x64' + ) + + # This encoding matches the following set of values: + # Response Payload + # Unique Identifier - f4152f17-9312-431a-b3fb-4fe86a86a7a1 + self.partial_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x30' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x66\x34\x31\x35\x32\x66\x31\x37\x2D\x39\x33\x31\x32\x2D\x34\x33' + b'\x31\x61\x2D\x62\x33\x66\x62\x2D\x34\x66\x65\x38\x36\x61\x38\x36' + b'\x61\x37\x61\x31\x00\x00\x00\x00' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestObtainLeaseResponsePayload, self).tearDown() + + def test_init(self): + """ + Test that an ObtainLease response payload can be constructed with no + arguments. + """ + payload = payloads.ObtainLeaseResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.lease_time) + self.assertEqual(None, payload.last_change_date) + + def test_init_with_args(self): + """ + Test that an ObtainLease response payload can be constructed with valid + values. + """ + payload = payloads.ObtainLeaseResponsePayload( + unique_identifier='00000000-1111-2222-3333-444444444444', + lease_time=1000000000, + last_change_date=1512400153 + ) + + self.assertEqual( + '00000000-1111-2222-3333-444444444444', + payload.unique_identifier + ) + self.assertEqual(1000000000, payload.lease_time) + self.assertEqual(1512400153, payload.last_change_date) + + def test_invalid_unique_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the unique identifier of an ObtainLease response payload. + """ + kwargs = {'unique_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + payloads.ObtainLeaseResponsePayload, + **kwargs + ) + + payload = payloads.ObtainLeaseResponsePayload() + args = (payload, 'unique_identifier', 0) + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + setattr, + *args + ) + + def test_invalid_lease_time(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the lease time of an ObtainLease response payload. + """ + kwargs = {'lease_time': 'invalid'} + self.assertRaisesRegexp( + TypeError, + "Lease time must be an integer.", + payloads.ObtainLeaseResponsePayload, + **kwargs + ) + + payload = payloads.ObtainLeaseResponsePayload() + args = (payload, 'lease_time', 'invalid') + self.assertRaisesRegexp( + TypeError, + "Lease time must be an integer.", + setattr, + *args + ) + + def test_invalid_last_change_date(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the last change date of an ObtainLease response payload. + """ + kwargs = {'last_change_date': 'invalid'} + self.assertRaisesRegexp( + TypeError, + "Last change date must be an integer.", + payloads.ObtainLeaseResponsePayload, + **kwargs + ) + + payload = payloads.ObtainLeaseResponsePayload() + args = (payload, 'last_change_date', 'invalid') + self.assertRaisesRegexp( + TypeError, + "Last change date must be an integer.", + setattr, + *args + ) + + def test_read(self): + """ + Test that an ObtainLease response payload can be read from a data + stream. + """ + payload = payloads.ObtainLeaseResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.lease_time) + self.assertEqual(None, payload.last_change_date) + + payload.read(self.full_encoding) + + self.assertEqual( + 'f4152f17-9312-431a-b3fb-4fe86a86a7a1', + payload.unique_identifier + ) + self.assertEqual(0, payload.lease_time) + self.assertEqual(1335514468, payload.last_change_date) + + def test_read_partial(self): + """ + Test that an ObtainLease response payload can be read from a partial + data stream. + """ + payload = payloads.ObtainLeaseResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.lease_time) + self.assertEqual(None, payload.last_change_date) + + payload.read(self.partial_encoding) + + self.assertEqual( + 'f4152f17-9312-431a-b3fb-4fe86a86a7a1', + payload.unique_identifier + ) + self.assertEqual(None, payload.lease_time) + self.assertEqual(None, payload.last_change_date) + + def test_read_empty(self): + """ + Test that an ObtainLease response payload can be read from an empty + data stream. + """ + payload = payloads.ObtainLeaseResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.lease_time) + self.assertEqual(None, payload.last_change_date) + + payload.read(self.empty_encoding) + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.lease_time) + self.assertEqual(None, payload.last_change_date) + + def test_write(self): + """ + Test that an ObtainLease response payload can be written to a data + stream. + """ + payload = payloads.ObtainLeaseResponsePayload( + unique_identifier='f4152f17-9312-431a-b3fb-4fe86a86a7a1', + lease_time=0, + last_change_date=1335514468 + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_partial(self): + """ + Test that a partial ObtainLease response payload can be written to a + data stream. + """ + payload = payloads.ObtainLeaseResponsePayload( + unique_identifier='f4152f17-9312-431a-b3fb-4fe86a86a7a1' + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.partial_encoding), len(stream)) + self.assertEqual(str(self.partial_encoding), str(stream)) + + def test_write_empty(self): + """ + Test that an empty ObtainLease response payload can be written to a + data stream. + """ + payload = payloads.ObtainLeaseResponsePayload() + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.empty_encoding), len(stream)) + self.assertEqual(str(self.empty_encoding), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + ObtainLease response payloads with the same data. + """ + a = payloads.ObtainLeaseResponsePayload() + b = payloads.ObtainLeaseResponsePayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.ObtainLeaseResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + lease_time=1511882848, + last_change_date=1512410153 + ) + b = payloads.ObtainLeaseResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + lease_time=1511882848, + last_change_date=1512410153 + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_unique_identifier(self): + """ + Test that the equality operator returns False when comparing two + ObtainLease response payloads with different unique identifiers. + """ + a = payloads.ObtainLeaseResponsePayload( + unique_identifier='a' + ) + b = payloads.ObtainLeaseResponsePayload( + unique_identifier='b' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_lease_time(self): + """ + Test that the equality operator returns False when comparing two + ObtainLease response payloads with different lease times. + """ + a = payloads.ObtainLeaseResponsePayload( + lease_time=0 + ) + b = payloads.ObtainLeaseResponsePayload( + lease_time=1511882848 + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_last_change_date(self): + """ + Test that the equality operator returns False when comparing two + ObtainLease response payloads with different last change dates. + """ + a = payloads.ObtainLeaseResponsePayload( + last_change_date=0 + ) + b = payloads.ObtainLeaseResponsePayload( + last_change_date=1511882848 + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + ObtainLease response payloads with different types. + """ + a = payloads.ObtainLeaseResponsePayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + ObtainLease response payloads with the same data. + """ + a = payloads.ObtainLeaseResponsePayload() + b = payloads.ObtainLeaseResponsePayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.ObtainLeaseResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + lease_time=1511882848, + last_change_date=0 + ) + b = payloads.ObtainLeaseResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + lease_time=1511882848, + last_change_date=0 + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_unique_identifier(self): + """ + Test that the inequality operator returns True when comparing two + ObtainLease response payloads with different unique identifiers. + """ + a = payloads.ObtainLeaseResponsePayload( + unique_identifier='a' + ) + b = payloads.ObtainLeaseResponsePayload( + unique_identifier='b' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_lease_time(self): + """ + Test that the inequality operator returns True when comparing two + ObtainLease response payloads with different lease times. + """ + a = payloads.ObtainLeaseResponsePayload( + lease_time=0 + ) + b = payloads.ObtainLeaseResponsePayload( + lease_time=1511882848 + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_last_change_time(self): + """ + Test that the inequality operator returns True when comparing two + ObtainLease response payloads with different last change time. + """ + a = payloads.ObtainLeaseResponsePayload( + lease_time=0 + ) + b = payloads.ObtainLeaseResponsePayload( + lease_time=1511882848 + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + ObtainLease response payloads with different types. + """ + a = payloads.ObtainLeaseResponsePayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to an ObtainLease response payload. + """ + payload = payloads.ObtainLeaseResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + lease_time=1511882898, + last_change_date=1512410153 + ) + expected = ( + "ObtainLeaseResponsePayload(" + "unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', " + "lease_time=1511882898, " + "last_change_date=1512410153)" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to an ObtainLease response payload. + """ + payload = payloads.ObtainLeaseResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + lease_time=1511882898, + last_change_date=1512410153 + ) + + expected = str({ + 'unique_identifier': '49a1ca88-6bea-4fb2-b450-7e58802c3038', + 'lease_time': 1511882898, + 'last_change_date': 1512410153 + }) + observed = str(payload) + + self.assertEqual(expected, observed) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_poll.py python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_poll.py --- python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_poll.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_poll.py 2017-12-08 19:18:07.000000000 +0000 @@ -0,0 +1,264 @@ +# Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import testtools + +from kmip.core import utils +from kmip.core.messages import payloads + + +class TestPollRequestPayload(testtools.TestCase): + """ + Test suite for the Poll request payload. + """ + + def setUp(self): + super(TestPollRequestPayload, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 10.1. + # + # This encoding matches the following set of values: + # Request Payload + # Asynchronous Correlation Value - 0xE7125DE85B3C90A6 + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x10' + b'\x42\x00\x06\x08\x00\x00\x00\x08\xE7\x12\x5D\xE8\x5B\x3C\x90\xA6' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestPollRequestPayload, self).tearDown() + + def test_init(self): + """ + Test that a Poll request payload can be constructed with no arguments. + """ + payload = payloads.PollRequestPayload() + + self.assertEqual(None, payload.asynchronous_correlation_value) + + def test_init_with_args(self): + """ + Test that an Poll request payload can be constructed with valid + values. + """ + payload = payloads.PollRequestPayload( + asynchronous_correlation_value=b'\x01' + ) + + self.assertEqual(b'\x01', payload.asynchronous_correlation_value) + + def test_invalid_asynchronous_correlation_value(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the asynchronous correlation value of an Poll request payload. + """ + kwargs = {'asynchronous_correlation_value': 0} + self.assertRaisesRegexp( + TypeError, + "Asynchronous correlation value must be bytes.", + payloads.PollRequestPayload, + **kwargs + ) + + payload = payloads.PollRequestPayload() + args = (payload, 'asynchronous_correlation_value', 0) + self.assertRaisesRegexp( + TypeError, + "Asynchronous correlation value must be bytes.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a Poll request payload can be read from a data stream. + """ + payload = payloads.PollRequestPayload() + + self.assertEqual(None, payload.asynchronous_correlation_value) + + payload.read(self.full_encoding) + + self.assertEqual( + b'\xE7\x12\x5D\xE8\x5B\x3C\x90\xA6', + payload.asynchronous_correlation_value + ) + + def test_read_empty(self): + """ + Test that an Poll request payload can be read from an empty data + stream. + """ + payload = payloads.PollRequestPayload() + + self.assertEqual(None, payload.asynchronous_correlation_value) + + payload.read(self.empty_encoding) + + self.assertEqual(None, payload.asynchronous_correlation_value) + + def test_write(self): + """ + Test that a Poll request payload can be written to a data stream. + """ + payload = payloads.PollRequestPayload( + asynchronous_correlation_value=b'\xE7\x12\x5D\xE8\x5B\x3C\x90\xA6' + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_empty(self): + """ + Test that an empty Poll request payload can be written to a data + stream. + """ + payload = payloads.PollRequestPayload() + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.empty_encoding), len(stream)) + self.assertEqual(str(self.empty_encoding), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two Poll + request payloads with the same data. + """ + a = payloads.PollRequestPayload() + b = payloads.PollRequestPayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.PollRequestPayload( + asynchronous_correlation_value=b'\x49\xa1\xca\x88' + ) + b = payloads.PollRequestPayload( + asynchronous_correlation_value=b'\x49\xa1\xca\x88' + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_asynchronous_correlation_value(self): + """ + Test that the equality operator returns False when comparing two Poll + request payloads with different asynchronous correlation values. + """ + a = payloads.PollRequestPayload( + asynchronous_correlation_value=b'\xaa' + ) + b = payloads.PollRequestPayload( + asynchronous_correlation_value=b'\xbb' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two Poll + request payloads with different types. + """ + a = payloads.PollRequestPayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two Poll + request payloads with the same data. + """ + a = payloads.PollRequestPayload() + b = payloads.PollRequestPayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.PollRequestPayload( + asynchronous_correlation_value=b'\x49\xa1\xca\x88' + ) + b = payloads.PollRequestPayload( + asynchronous_correlation_value=b'\x49\xa1\xca\x88' + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_asynchronous_correlation_value(self): + """ + Test that the inequality operator returns True when comparing two Poll + request payloads with different asynchronous correlation values. + """ + a = payloads.PollRequestPayload( + asynchronous_correlation_value=b'\xaa' + ) + b = payloads.PollRequestPayload( + asynchronous_correlation_value=b'\xbb' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two Poll + request payloads with different types. + """ + a = payloads.PollRequestPayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a Poll request payload. + """ + payload = payloads.PollRequestPayload( + asynchronous_correlation_value=b'\xaa' + ) + expected = ( + "PollRequestPayload(" + "asynchronous_correlation_value=" + str(b'\xaa') + ")" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a Poll request payload. + """ + payload = payloads.PollRequestPayload( + asynchronous_correlation_value=b'\xaa' + ) + + expected = str({ + 'asynchronous_correlation_value': b'\xaa' + }) + observed = str(payload) + + self.assertEqual(expected, observed) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_recover.py python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_recover.py --- python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_recover.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_recover.py 2017-12-08 17:36:18.000000000 +0000 @@ -0,0 +1,523 @@ +# Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import testtools + +from kmip.core import utils +from kmip.core.messages import payloads + + +class TestRecoverRequestPayload(testtools.TestCase): + """ + Test suite for the Recover request payload. + """ + + def setUp(self): + super(TestRecoverRequestPayload, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 10.1. + # + # This encoding matches the following set of values: + # Request Payload + # Unique Identifier - f613dba1-b557-489a-87c5-3c0ecd4294e3 + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x30' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x66\x36\x31\x33\x64\x62\x61\x31\x2D\x62\x35\x35\x37\x2D\x34\x38' + b'\x39\x61\x2D\x38\x37\x63\x35\x2D\x33\x63\x30\x65\x63\x64\x34\x32' + b'\x39\x34\x65\x33\x00\x00\x00\x00' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestRecoverRequestPayload, self).tearDown() + + def test_init(self): + """ + Test that a Recover request payload can be constructed with no + arguments. + """ + payload = payloads.RecoverRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + + def test_init_with_args(self): + """ + Test that a Recover request payload can be constructed with valid + values. + """ + payload = payloads.RecoverRequestPayload( + unique_identifier='00000000-1111-2222-3333-444444444444' + ) + + self.assertEqual( + '00000000-1111-2222-3333-444444444444', + payload.unique_identifier + ) + + def test_invalid_unique_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the unique identifier of a Recover request payload. + """ + kwargs = {'unique_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + payloads.RecoverRequestPayload, + **kwargs + ) + + payload = payloads.RecoverRequestPayload() + args = (payload, 'unique_identifier', 0) + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a Recover request payload can be read from a data stream. + """ + payload = payloads.RecoverRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + + payload.read(self.full_encoding) + + self.assertEqual( + 'f613dba1-b557-489a-87c5-3c0ecd4294e3', + payload.unique_identifier + ) + + def test_read_empty(self): + """ + Test that a Recover request payload can be read from an empty data + stream. + """ + payload = payloads.RecoverRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + + payload.read(self.empty_encoding) + + self.assertEqual(None, payload.unique_identifier) + + def test_write(self): + """ + Test that a Recover request payload can be written to a data stream. + """ + payload = payloads.RecoverRequestPayload( + unique_identifier='f613dba1-b557-489a-87c5-3c0ecd4294e3' + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_empty(self): + """ + Test that an empty Recover request payload can be written + to a data stream. + """ + payload = payloads.RecoverRequestPayload() + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.empty_encoding), len(stream)) + self.assertEqual(str(self.empty_encoding), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + Recover request payloads with the same data. + """ + a = payloads.RecoverRequestPayload() + b = payloads.RecoverRequestPayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.RecoverRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + b = payloads.RecoverRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_unique_identifier(self): + """ + Test that the equality operator returns False when comparing two + Recover request payloads with different unique identifiers. + """ + a = payloads.RecoverRequestPayload( + unique_identifier='a' + ) + b = payloads.RecoverRequestPayload( + unique_identifier='b' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + Recover request payloads with different types. + """ + a = payloads.RecoverRequestPayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + Recover request payloads with the same data. + """ + a = payloads.RecoverRequestPayload() + b = payloads.RecoverRequestPayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.RecoverRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + b = payloads.RecoverRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_unique_identifier(self): + """ + Test that the inequality operator returns True when comparing two + Recover request payloads with different unique identifiers. + """ + a = payloads.RecoverRequestPayload( + unique_identifier='a' + ) + b = payloads.RecoverRequestPayload( + unique_identifier='b' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + Recover request payloads with different types. + """ + a = payloads.RecoverRequestPayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a Recover request payload. + """ + payload = payloads.RecoverRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + expected = ( + "RecoverRequestPayload(" + "unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038')" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a Recover request payload. + """ + payload = payloads.RecoverRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + expected = str({ + 'unique_identifier': '49a1ca88-6bea-4fb2-b450-7e58802c3038' + }) + observed = str(payload) + + self.assertEqual(expected, observed) + + +class TestRecoverResponsePayload(testtools.TestCase): + """ + Test suite for the Recover response payload. + """ + + def setUp(self): + super(TestRecoverResponsePayload, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 10.1. + # + # This encoding matches the following set of values: + # Response Payload + # Unique Identifier - f613dba1-b557-489a-87c5-3c0ecd4294e3 + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x30' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x66\x36\x31\x33\x64\x62\x61\x31\x2D\x62\x35\x35\x37\x2D\x34\x38' + b'\x39\x61\x2D\x38\x37\x63\x35\x2D\x33\x63\x30\x65\x63\x64\x34\x32' + b'\x39\x34\x65\x33\x00\x00\x00\x00' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestRecoverResponsePayload, self).tearDown() + + def test_init(self): + """ + Test that a Recover response payload can be constructed with no + arguments. + """ + payload = payloads.RecoverResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + + def test_init_with_args(self): + """ + Test that a Recover response payload can be constructed with valid + values. + """ + payload = payloads.RecoverResponsePayload( + unique_identifier='00000000-1111-2222-3333-444444444444' + ) + + self.assertEqual( + '00000000-1111-2222-3333-444444444444', + payload.unique_identifier + ) + + def test_invalid_unique_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the unique identifier of a Recover response payload. + """ + kwargs = {'unique_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + payloads.RecoverResponsePayload, + **kwargs + ) + + payload = payloads.RecoverResponsePayload() + args = (payload, 'unique_identifier', 0) + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a Recover response payload can be read from a data stream. + """ + payload = payloads.RecoverResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + + payload.read(self.full_encoding) + + self.assertEqual( + 'f613dba1-b557-489a-87c5-3c0ecd4294e3', + payload.unique_identifier + ) + + def test_read_empty(self): + """ + Test that a Recover response payload can be read from an empty data + stream. + """ + payload = payloads.RecoverResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + + payload.read(self.empty_encoding) + + self.assertEqual(None, payload.unique_identifier) + + def test_write(self): + """ + Test that a Recover response payload can be written to a data stream. + """ + payload = payloads.RecoverResponsePayload( + unique_identifier='f613dba1-b557-489a-87c5-3c0ecd4294e3' + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_empty(self): + """ + Test that an empty Recover response payload can be written to a data + stream. + """ + payload = payloads.RecoverResponsePayload() + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.empty_encoding), len(stream)) + self.assertEqual(str(self.empty_encoding), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + Recover response payloads with the same data. + """ + a = payloads.RecoverResponsePayload() + b = payloads.RecoverResponsePayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.RecoverResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + b = payloads.RecoverResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_unique_identifier(self): + """ + Test that the equality operator returns False when comparing two + Recover response payloads with different unique identifiers. + """ + a = payloads.RecoverResponsePayload( + unique_identifier='a' + ) + b = payloads.RecoverResponsePayload( + unique_identifier='b' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + Recover response payloads with different types. + """ + a = payloads.RecoverResponsePayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + Recover response payloads with the same data. + """ + a = payloads.RecoverResponsePayload() + b = payloads.RecoverResponsePayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.RecoverResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + b = payloads.RecoverResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_unique_identifier(self): + """ + Test that the inequality operator returns True when comparing two + Recover response payloads with different unique identifiers. + """ + a = payloads.RecoverResponsePayload( + unique_identifier='a' + ) + b = payloads.RecoverResponsePayload( + unique_identifier='b' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + Recover response payloads with different types. + """ + a = payloads.RecoverResponsePayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a Recover response payload. + """ + payload = payloads.RecoverResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + expected = ( + "RecoverResponsePayload(" + "unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038')" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a Recover response payload + """ + payload = payloads.RecoverResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' + ) + + expected = str({ + 'unique_identifier': '49a1ca88-6bea-4fb2-b450-7e58802c3038' + }) + observed = str(payload) + + self.assertEqual(expected, observed) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_rekey.py python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_rekey.py --- python-pykmip-0.7.0/kmip/tests/unit/core/messages/payloads/test_rekey.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/messages/payloads/test_rekey.py 2018-04-16 18:14:10.000000000 +0000 @@ -0,0 +1,1304 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import testtools + +from kmip.core import enums +from kmip.core import objects +from kmip.core import primitives +from kmip.core import utils + +from kmip.core.messages import payloads + + +class TestRekeyRequestPayload(testtools.TestCase): + """ + Test suite for the Rekey request payload. + """ + + def setUp(self): + super(TestRekeyRequestPayload, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, + # Sections 9.2 and 9.4. + # + # This encoding matches the following set of values: + # Request Payload + # Unique Identifier - 1346d253-69d6-474c-8cd5-ad475a3e0a81 + # Offset - 0 + # Template Attribute + # Attribute + # Attribute Name - Activation Date + # Attribute Value - Sun Jan 01 12:00:00 CET 2006 + # Attribute + # Attribute Name - Process Start Date + # Attribute Value - Sun Jan 01 12:00:00 CET 2006 + # Attribute + # Attribute Name - Protect Stop Date + # Attribute Value - Wed Jan 01 12:00:00 CET 2020 + # Attribute + # Attribute Name - Deactivation Date + # Attribute Value - Wed Jan 01 12:00:00 CET 2020 + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x01\x20' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x31\x33\x34\x36\x64\x32\x35\x33\x2D\x36\x39\x64\x36\x2D\x34\x37' + b'\x34\x63\x2D\x38\x63\x64\x35\x2D\x61\x64\x34\x37\x35\x61\x33\x65' + b'\x30\x61\x38\x31\x00\x00\x00\x00' + b'\x42\x00\x58\x0A\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\x91\x01\x00\x00\x00\xD8' + b'\x42\x00\x08\x01\x00\x00\x00\x28' + b'\x42\x00\x0A\x07\x00\x00\x00\x0F' + b'\x41\x63\x74\x69\x76\x61\x74\x69\x6F\x6E\x20\x44\x61\x74\x65\x00' + b'\x42\x00\x0B\x09\x00\x00\x00\x08\x00\x00\x00\x00\x43\xB7\xB6\x30' + b'\x42\x00\x08\x01\x00\x00\x00\x30' + b'\x42\x00\x0A\x07\x00\x00\x00\x12' + b'\x50\x72\x6F\x63\x65\x73\x73\x20\x53\x74\x61\x72\x74\x20\x44\x61' + b'\x74\x65\x00\x00\x00\x00\x00\x00' + b'\x42\x00\x0B\x09\x00\x00\x00\x08\x00\x00\x00\x00\x43\xB7\xB6\x30' + b'\x42\x00\x08\x01\x00\x00\x00\x30' + b'\x42\x00\x0A\x07\x00\x00\x00\x11' + b'\x50\x72\x6F\x74\x65\x63\x74\x20\x53\x74\x6F\x70\x20\x44\x61\x74' + b'\x65\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\x0B\x09\x00\x00\x00\x08\x00\x00\x00\x00\x5E\x0C\x7B\xB0' + b'\x42\x00\x08\x01\x00\x00\x00\x30' + b'\x42\x00\x0A\x07\x00\x00\x00\x11' + b'\x44\x65\x61\x63\x74\x69\x76\x61\x74\x69\x6F\x6E\x20\x44\x61\x74' + b'\x65\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\x0B\x09\x00\x00\x00\x08\x00\x00\x00\x00\x5E\x0C\x7B\xB0' + ) + + # Encoding obtained from the KMIP 1.1 testing document, Section 9.1. + # + # This encoding matches the following set of values: + # Request Payload + # Unique Identifier - 964d3dd2-5f06-4529-8bb8-ae630b6ca2e0 + + self.partial_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x30' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x39\x36\x34\x64\x33\x64\x64\x32\x2D\x35\x66\x30\x36\x2D\x34\x35' + b'\x32\x39\x2D\x38\x62\x62\x38\x2D\x61\x65\x36\x33\x30\x62\x36\x63' + b'\x61\x32\x65\x30\x00\x00\x00\x00' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x79\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestRekeyRequestPayload, self).tearDown() + + def test_init(self): + """ + Test that a Rekey request payload can be constructed with no arguments. + """ + payload = payloads.RekeyRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.offset) + self.assertEqual(None, payload.template_attribute) + + def test_init_with_args(self): + """ + Test that a Rekey request payload can be constructed with valid values. + """ + payload = payloads.RekeyRequestPayload( + unique_identifier='00000000-2222-4444-6666-888888888888', + offset=0, + template_attribute=objects.TemplateAttribute() + ) + + self.assertEqual( + '00000000-2222-4444-6666-888888888888', + payload.unique_identifier + ) + self.assertEqual(0, payload.offset) + self.assertEqual( + objects.TemplateAttribute(), + payload.template_attribute + ) + + def test_invalid_unique_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the unique identifier of a Rekey request payload. + """ + kwargs = {'unique_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + payloads.RekeyRequestPayload, + **kwargs + ) + + args = (payloads.RekeyRequestPayload(), 'unique_identifier', 0) + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + setattr, + *args + ) + + def test_invalid_offset(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the offset of a Rekey request payload. + """ + kwargs = {'offset': 'invalid'} + self.assertRaisesRegexp( + TypeError, + "Offset must be an integer.", + payloads.RekeyRequestPayload, + **kwargs + ) + + args = (payloads.RekeyRequestPayload(), 'offset', 'invalid') + self.assertRaisesRegexp( + TypeError, + "Offset must be an integer.", + setattr, + *args + ) + + def test_invalid_template_attribute(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the template attribute of a Rekey request payload. + """ + kwargs = {'template_attribute': 'invalid'} + self.assertRaisesRegexp( + TypeError, + "Template attribute must be a TemplateAttribute struct.", + payloads.RekeyRequestPayload, + **kwargs + ) + + args = ( + payloads.RekeyRequestPayload(), + 'template_attribute', + 'invalid' + ) + self.assertRaisesRegexp( + TypeError, + "Template attribute must be a TemplateAttribute struct.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a Rekey request payload can be read from a data stream. + """ + payload = payloads.RekeyRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.offset) + self.assertEqual(None, payload.template_attribute) + + payload.read(self.full_encoding) + + self.assertEqual( + '1346d253-69d6-474c-8cd5-ad475a3e0a81', + payload.unique_identifier + ) + self.assertEqual(0, payload.offset) + self.assertEqual( + objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Activation Date' + ), + attribute_value=primitives.DateTime( + value=1136113200, + tag=enums.Tags.ACTIVATION_DATE + ) + ), + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Process Start Date' + ), + attribute_value=primitives.DateTime( + value=1136113200, + tag=enums.Tags.PROCESS_START_DATE + ) + ), + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Protect Stop Date' + ), + attribute_value=primitives.DateTime( + value=1577876400, + tag=enums.Tags.PROTECT_STOP_DATE + ) + ), + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Deactivation Date' + ), + attribute_value=primitives.DateTime( + value=1577876400, + tag=enums.Tags.DEACTIVATION_DATE + ) + ) + ] + ), + payload.template_attribute + ) + + def test_read_partial(self): + """ + Test that a Rekey request payload can be read from a partial data + stream. + """ + payload = payloads.RekeyRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.offset) + self.assertEqual(None, payload.template_attribute) + + payload.read(self.partial_encoding) + + self.assertEqual( + '964d3dd2-5f06-4529-8bb8-ae630b6ca2e0', + payload.unique_identifier + ) + self.assertEqual(None, payload.offset) + self.assertEqual(None, payload.template_attribute) + + def test_read_empty(self): + """ + Test that a Rekey request payload can be read from an empty data + stream. + """ + payload = payloads.RekeyRequestPayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.offset) + self.assertEqual(None, payload.template_attribute) + + payload.read(self.empty_encoding) + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.offset) + self.assertEqual(None, payload.template_attribute) + + def test_write(self): + """ + Test that a Rekey request payload can be written to a data stream. + """ + payload = payloads.RekeyRequestPayload( + unique_identifier='1346d253-69d6-474c-8cd5-ad475a3e0a81', + offset=0, + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Activation Date' + ), + attribute_value=primitives.DateTime( + value=1136113200, + tag=enums.Tags.ACTIVATION_DATE + ) + ), + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Process Start Date' + ), + attribute_value=primitives.DateTime( + value=1136113200, + tag=enums.Tags.PROCESS_START_DATE + ) + ), + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Protect Stop Date' + ), + attribute_value=primitives.DateTime( + value=1577876400, + tag=enums.Tags.PROTECT_STOP_DATE + ) + ), + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Deactivation Date' + ), + attribute_value=primitives.DateTime( + value=1577876400, + tag=enums.Tags.DEACTIVATION_DATE + ) + ) + ] + ) + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_partial(self): + """ + Test that a partial Rekey request payload can be written to a data + stream. + """ + payload = payloads.RekeyRequestPayload( + unique_identifier='964d3dd2-5f06-4529-8bb8-ae630b6ca2e0' + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.partial_encoding), len(stream)) + self.assertEqual(str(self.partial_encoding), str(stream)) + + def test_write_empty(self): + """ + Test that an empty Rekey request payload can be written to a data + stream. + """ + payload = payloads.RekeyRequestPayload() + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.empty_encoding), len(stream)) + self.assertEqual(str(self.empty_encoding), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two Rekey + request payloads with the same data. + """ + a = payloads.RekeyRequestPayload() + b = payloads.RekeyRequestPayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.RekeyRequestPayload( + unique_identifier='1346d253-69d6-474c-8cd5-ad475a3e0a81', + offset=0, + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Activation Date' + ), + attribute_value=primitives.DateTime( + value=1136113200, + tag=enums.Tags.ACTIVATION_DATE + ) + ) + ] + ) + ) + b = payloads.RekeyRequestPayload( + unique_identifier='1346d253-69d6-474c-8cd5-ad475a3e0a81', + offset=0, + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Activation Date' + ), + attribute_value=primitives.DateTime( + value=1136113200, + tag=enums.Tags.ACTIVATION_DATE + ) + ) + ] + ) + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_unique_identifier(self): + """ + Test that the equality operator returns False when comparing two Rekey + request payloads with different unique identifiers. + """ + a = payloads.RekeyRequestPayload( + unique_identifier='a' + ) + b = payloads.RekeyRequestPayload( + unique_identifier='b' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_offset(self): + """ + Test that the equality operator returns False when comparing two Rekey + request payloads with different offsets. + """ + a = payloads.RekeyRequestPayload( + offset=0 + ) + b = payloads.RekeyRequestPayload( + offset=1 + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_template_attribute(self): + """ + Test that the equality operator returns False when comparing two Rekey + request payloads with different template attributes. + """ + a = payloads.RekeyRequestPayload( + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Protect Stop Date' + ), + attribute_value=primitives.DateTime( + value=1577876400, + tag=enums.Tags.PROTECT_STOP_DATE + ) + ) + ] + ) + ) + b = payloads.RekeyRequestPayload( + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Deactivation Date' + ), + attribute_value=primitives.DateTime( + value=1577876400, + tag=enums.Tags.DEACTIVATION_DATE + ) + ) + ] + ) + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two Rekey + request payloads with different types. + """ + a = payloads.RekeyRequestPayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + Rekey request payloads with the same data. + """ + a = payloads.RekeyRequestPayload() + b = payloads.RekeyRequestPayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.RekeyRequestPayload( + unique_identifier='1346d253-69d6-474c-8cd5-ad475a3e0a81', + offset=0, + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Activation Date' + ), + attribute_value=primitives.DateTime( + value=1136113200, + tag=enums.Tags.ACTIVATION_DATE + ) + ) + ] + ) + ) + b = payloads.RekeyRequestPayload( + unique_identifier='1346d253-69d6-474c-8cd5-ad475a3e0a81', + offset=0, + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Activation Date' + ), + attribute_value=primitives.DateTime( + value=1136113200, + tag=enums.Tags.ACTIVATION_DATE + ) + ) + ] + ) + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_unique_identifier(self): + """ + Test that the equality operator returns True when comparing two Rekey + request payloads with different unique identifiers. + """ + a = payloads.RekeyRequestPayload( + unique_identifier='a' + ) + b = payloads.RekeyRequestPayload( + unique_identifier='b' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_offset(self): + """ + Test that the inequality operator returns True when comparing two Rekey + request payloads with different offsets. + """ + a = payloads.RekeyRequestPayload( + offset=0 + ) + b = payloads.RekeyRequestPayload( + offset=1 + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_template_attribute(self): + """ + Test that the inequality operator returns True when comparing two Rekey + request payloads with different template attributes. + """ + a = payloads.RekeyRequestPayload( + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Protect Stop Date' + ), + attribute_value=primitives.DateTime( + value=1577876400, + tag=enums.Tags.PROTECT_STOP_DATE + ) + ) + ] + ) + ) + b = payloads.RekeyRequestPayload( + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Deactivation Date' + ), + attribute_value=primitives.DateTime( + value=1577876400, + tag=enums.Tags.DEACTIVATION_DATE + ) + ) + ] + ) + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two Rekey + request payloads with different types. + """ + a = payloads.RekeyRequestPayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a Rekey request payload. + """ + payload = payloads.RekeyRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + offset=0, + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Deactivation Date' + ), + attribute_value=primitives.DateTime( + value=1577876400, + tag=enums.Tags.DEACTIVATION_DATE + ) + ) + ] + ) + ) + + # TODO (peter-hamilton) Update this when TemplateAttributes have repr + expected = ( + "RekeyRequestPayload(" + "unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', " + "offset=0, " + "template_attribute=Struct())" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a Rekey request payload + """ + payload = payloads.RekeyRequestPayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + offset=0, + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Deactivation Date' + ), + attribute_value=primitives.DateTime( + value=1577876400, + tag=enums.Tags.DEACTIVATION_DATE + ) + ) + ] + ) + ) + + # TODO (peter-hamilton) Update this when TemplateAttributes have str + expected = str({ + 'unique_identifier': '49a1ca88-6bea-4fb2-b450-7e58802c3038', + 'offset': 0, + 'template_attribute': 'Struct()' + }) + observed = str(payload) + + self.assertEqual(expected, observed) + + +class TestRekeyResponsePayload(testtools.TestCase): + """ + Test suite for the Rekey response payload. + """ + + def setUp(self): + super(TestRekeyResponsePayload, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, + # Sections 3.1.1 and 9.2. + # + # This encoding matches the following set of values: + # Response Payload + # Unique Identifier - 8efbbd67-2847-46b5-b7e7-4ab3b5e175de + # Template Attribute + # Attribute + # Attribute Name - Cryptographic Algorithm + # Attribute Value - AES + # Attribute + # Attribute Name - Cryptographic Length + # Attribute Value - 128 + + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\xA8' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x38\x65\x66\x62\x62\x64\x36\x37\x2D\x32\x38\x34\x37\x2D\x34\x36' + b'\x62\x35\x2D\x62\x37\x65\x37\x2D\x34\x61\x62\x33\x62\x35\x65\x31' + b'\x37\x35\x64\x65\x00\x00\x00\x00' + b'\x42\x00\x91\x01\x00\x00\x00\x70' + b'\x42\x00\x08\x01\x00\x00\x00\x30' + b'\x42\x00\x0A\x07\x00\x00\x00\x17' + b'\x43\x72\x79\x70\x74\x6F\x67\x72\x61\x70\x68\x69\x63\x20\x41\x6C' + b'\x67\x6F\x72\x69\x74\x68\x6D\x00' + b'\x42\x00\x0B\x05\x00\x00\x00\x04\x00\x00\x00\x03\x00\x00\x00\x00' + b'\x42\x00\x08\x01\x00\x00\x00\x30' + b'\x42\x00\x0A\x07\x00\x00\x00\x14' + b'\x43\x72\x79\x70\x74\x6F\x67\x72\x61\x70\x68\x69\x63\x20\x4C\x65' + b'\x6E\x67\x74\x68\x00\x00\x00\x00' + b'\x42\x00\x0B\x02\x00\x00\x00\x04\x00\x00\x00\x80\x00\x00\x00\x00' + ) + + # Encoding obtained from the KMIP 1.1 testing document, + # Sections 3.1.1 and 9.2. + # + # This encoding matches the following set of values: + # Response Payload + # Unique Identifier - 8efbbd67-2847-46b5-b7e7-4ab3b5e175de + + self.partial_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x30' + b'\x42\x00\x94\x07\x00\x00\x00\x24' + b'\x38\x65\x66\x62\x62\x64\x36\x37\x2D\x32\x38\x34\x37\x2D\x34\x36' + b'\x62\x35\x2D\x62\x37\x65\x37\x2D\x34\x61\x62\x33\x62\x35\x65\x31' + b'\x37\x35\x64\x65\x00\x00\x00\x00' + ) + + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x7C\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestRekeyResponsePayload, self).tearDown() + + def test_init(self): + """ + Test that a Rekey response payload can be constructed with no + arguments. + """ + payload = payloads.RekeyResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.template_attribute) + + def test_init_with_args(self): + """ + Test that a Rekey response payload can be constructed with valid + values. + """ + payload = payloads.RekeyResponsePayload( + unique_identifier='00000000-2222-4444-6666-888888888888', + template_attribute=objects.TemplateAttribute() + ) + + self.assertEqual( + '00000000-2222-4444-6666-888888888888', + payload.unique_identifier + ) + self.assertEqual( + objects.TemplateAttribute(), + payload.template_attribute + ) + + def test_invalid_unique_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the unique identifier of a Rekey response payload. + """ + kwargs = {'unique_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + payloads.RekeyResponsePayload, + **kwargs + ) + + args = (payloads.RekeyResponsePayload(), 'unique_identifier', 0) + self.assertRaisesRegexp( + TypeError, + "Unique identifier must be a string.", + setattr, + *args + ) + + def test_invalid_template_attribute(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the template attribute of a Rekey response payload. + """ + kwargs = {'template_attribute': 'invalid'} + self.assertRaisesRegexp( + TypeError, + "Template attribute must be a TemplateAttribute struct.", + payloads.RekeyResponsePayload, + **kwargs + ) + + args = ( + payloads.RekeyResponsePayload(), + 'template_attribute', + 'invalid' + ) + self.assertRaisesRegexp( + TypeError, + "Template attribute must be a TemplateAttribute struct.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a Rekey response payload can be read from a data stream. + """ + payload = payloads.RekeyResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.template_attribute) + + payload.read(self.full_encoding) + + self.assertEqual( + '8efbbd67-2847-46b5-b7e7-4ab3b5e175de', + payload.unique_identifier + ) + self.assertEqual( + objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Algorithm' + ), + attribute_value=primitives.Enumeration( + enums.CryptographicAlgorithm, + value=enums.CryptographicAlgorithm.AES, + tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM + ) + ), + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Length' + ), + attribute_value=primitives.Integer( + value=128, + tag=enums.Tags.CRYPTOGRAPHIC_LENGTH + ) + ) + ] + ), + payload.template_attribute + ) + + def test_read_partial(self): + """ + Test that a Rekey response payload can be read from a partial data + stream. + """ + payload = payloads.RekeyResponsePayload() + + self.assertEqual(None, payload.unique_identifier) + self.assertEqual(None, payload.template_attribute) + + payload.read(self.partial_encoding) + + self.assertEqual( + '8efbbd67-2847-46b5-b7e7-4ab3b5e175de', + payload.unique_identifier + ) + self.assertEqual(None, payload.template_attribute) + + def test_read_invalid(self): + """ + Test that a ValueError gets raised when a required Rekey response + payload attribute is missing from the payload encoding. + """ + payload = payloads.RekeyResponsePayload() + args = (self.empty_encoding, ) + self.assertRaisesRegexp( + ValueError, + "The Rekey response payload encoding is missing the unique " + "identifier.", + payload.read, + *args + ) + + def test_write(self): + """ + Test that a Rekey response payload can be written to a data stream. + """ + payload = payloads.RekeyResponsePayload( + unique_identifier='8efbbd67-2847-46b5-b7e7-4ab3b5e175de', + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Algorithm' + ), + attribute_value=primitives.Enumeration( + enums.CryptographicAlgorithm, + value=enums.CryptographicAlgorithm.AES, + tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM + ) + ), + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Length' + ), + attribute_value=primitives.Integer( + value=128, + tag=enums.Tags.CRYPTOGRAPHIC_LENGTH + ) + ) + ] + ) + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_partial(self): + """ + Test that a partial Rekey response payload can be written to a data + stream. + """ + payload = payloads.RekeyResponsePayload( + unique_identifier='8efbbd67-2847-46b5-b7e7-4ab3b5e175de' + ) + stream = utils.BytearrayStream() + payload.write(stream) + + self.assertEqual(len(self.partial_encoding), len(stream)) + self.assertEqual(str(self.partial_encoding), str(stream)) + + def test_write_invalid(self): + """ + Test that a ValueError gets raised when a required Rekey response + payload attribute is missing when encoding the payload. + """ + payload = payloads.RekeyResponsePayload() + stream = utils.BytearrayStream() + args = (stream, ) + self.assertRaisesRegexp( + ValueError, + "The Rekey response payload is missing the unique identifier.", + payload.write, + *args + ) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two Rekey + response payloads with the same data. + """ + a = payloads.RekeyResponsePayload() + b = payloads.RekeyResponsePayload() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = payloads.RekeyResponsePayload( + unique_identifier='1346d253-69d6-474c-8cd5-ad475a3e0a81', + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Algorithm' + ), + attribute_value=primitives.Enumeration( + enums.CryptographicAlgorithm, + value=enums.CryptographicAlgorithm.AES, + tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM + ) + ), + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Length' + ), + attribute_value=primitives.Integer( + value=128, + tag=enums.Tags.CRYPTOGRAPHIC_LENGTH + ) + ) + ] + ) + ) + b = payloads.RekeyResponsePayload( + unique_identifier='1346d253-69d6-474c-8cd5-ad475a3e0a81', + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Algorithm' + ), + attribute_value=primitives.Enumeration( + enums.CryptographicAlgorithm, + value=enums.CryptographicAlgorithm.AES, + tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM + ) + ), + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Length' + ), + attribute_value=primitives.Integer( + value=128, + tag=enums.Tags.CRYPTOGRAPHIC_LENGTH + ) + ) + ] + ) + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_unique_identifier(self): + """ + Test that the equality operator returns False when comparing two Rekey + response payloads with different unique identifiers. + """ + a = payloads.RekeyResponsePayload( + unique_identifier='a' + ) + b = payloads.RekeyResponsePayload( + unique_identifier='b' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_template_attribute(self): + """ + Test that the equality operator returns False when comparing two Rekey + response payloads with different template attributes. + """ + a = payloads.RekeyResponsePayload( + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Algorithm' + ), + attribute_value=primitives.Enumeration( + enums.CryptographicAlgorithm, + value=enums.CryptographicAlgorithm.AES, + tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM + ) + ) + ] + ) + ) + b = payloads.RekeyResponsePayload( + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Length' + ), + attribute_value=primitives.Integer( + value=128, + tag=enums.Tags.CRYPTOGRAPHIC_LENGTH + ) + ) + ] + ) + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two Rekey + response payloads with different types. + """ + a = payloads.RekeyResponsePayload() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + Rekey response payloads with the same data. + """ + a = payloads.RekeyResponsePayload() + b = payloads.RekeyResponsePayload() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = payloads.RekeyResponsePayload( + unique_identifier='1346d253-69d6-474c-8cd5-ad475a3e0a81', + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Algorithm' + ), + attribute_value=primitives.Enumeration( + enums.CryptographicAlgorithm, + value=enums.CryptographicAlgorithm.AES, + tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM + ) + ), + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Length' + ), + attribute_value=primitives.Integer( + value=128, + tag=enums.Tags.CRYPTOGRAPHIC_LENGTH + ) + ) + ] + ) + ) + b = payloads.RekeyResponsePayload( + unique_identifier='1346d253-69d6-474c-8cd5-ad475a3e0a81', + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Algorithm' + ), + attribute_value=primitives.Enumeration( + enums.CryptographicAlgorithm, + value=enums.CryptographicAlgorithm.AES, + tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM + ) + ), + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Length' + ), + attribute_value=primitives.Integer( + value=128, + tag=enums.Tags.CRYPTOGRAPHIC_LENGTH + ) + ) + ] + ) + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_unique_identifier(self): + """ + Test that the inequality operator returns True when comparing two Rekey + response payloads with different unique identifiers. + """ + a = payloads.RekeyResponsePayload( + unique_identifier='a' + ) + b = payloads.RekeyResponsePayload( + unique_identifier='b' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_template_attribute(self): + """ + Test that the inequality operator returns True when comparing two Rekey + response payloads with different template attributes. + """ + a = payloads.RekeyResponsePayload( + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Algorithm' + ), + attribute_value=primitives.Enumeration( + enums.CryptographicAlgorithm, + value=enums.CryptographicAlgorithm.AES, + tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM + ) + ) + ] + ) + ) + b = payloads.RekeyResponsePayload( + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Length' + ), + attribute_value=primitives.Integer( + value=128, + tag=enums.Tags.CRYPTOGRAPHIC_LENGTH + ) + ) + ] + ) + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two Rekey + response payloads with different types. + """ + a = payloads.RekeyResponsePayload() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a Rekey response payload. + """ + payload = payloads.RekeyResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Length' + ), + attribute_value=primitives.Integer( + value=128, + tag=enums.Tags.CRYPTOGRAPHIC_LENGTH + ) + ) + ] + ) + ) + + # TODO (peter-hamilton) Update this when TemplateAttributes have repr + expected = ( + "RekeyResponsePayload(" + "unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', " + "template_attribute=Struct())" + ) + observed = repr(payload) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a Rekey response payload + """ + payload = payloads.RekeyResponsePayload( + unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038', + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Length' + ), + attribute_value=primitives.Integer( + value=128, + tag=enums.Tags.CRYPTOGRAPHIC_LENGTH + ) + ) + ] + ) + ) + + # TODO (peter-hamilton) Update this when TemplateAttributes have str + expected = str({ + 'unique_identifier': '49a1ca88-6bea-4fb2-b450-7e58802c3038', + 'template_attribute': 'Struct()' + }) + observed = str(payload) + + self.assertEqual(expected, observed) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/messages/test_messages.py python-pykmip-0.8.0/kmip/tests/unit/core/messages/test_messages.py --- python-pykmip-0.7.0/kmip/tests/unit/core/messages/test_messages.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/messages/test_messages.py 2018-04-02 17:12:18.000000000 +0000 @@ -16,7 +16,6 @@ from testtools import TestCase import binascii -from kmip.core.factories.keys import KeyFactory from kmip.core.factories.secrets import SecretFactory from kmip.core.factories.attributes import AttributeFactory @@ -36,9 +35,7 @@ from kmip.core.enums import CryptographicUsageMask from kmip.core.enums import NameType -from kmip.core import errors -from kmip.core.errors import ErrorStrings - +from kmip.core import exceptions from kmip.core import objects from kmip.core.messages import contents @@ -61,7 +58,7 @@ super(TestRequestMessage, self).setUp() self.stream = BytearrayStream() self.attribute_factory = AttributeFactory() - self.msg = errors.ErrorStrings.BAD_EXP_RECV + self.msg = exceptions.ErrorStrings.BAD_EXP_RECV self.create = ( b'\x42\x00\x78\x01\x00\x00\x01\x20\x42\x00\x77\x01\x00\x00\x00\x38' b'\x42\x00\x69\x01\x00\x00\x00\x20\x42\x00\x6A\x02\x00\x00\x00\x04' @@ -186,25 +183,25 @@ msg.format(contents.ProtocolVersion, type(protocol_version))) - protocol_version_major = protocol_version.protocol_version_major + protocol_version_major = protocol_version.major msg = "Bad protocol version major type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMajor + exp_type = int rcv_type = type(protocol_version_major) self.assertIsInstance(protocol_version_major, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version major value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_major.value, - msg.format(1, protocol_version_major.value)) + self.assertEqual(1, protocol_version_major, + msg.format(1, protocol_version_major)) - protocol_version_minor = protocol_version.protocol_version_minor + protocol_version_minor = protocol_version.minor msg = "Bad protocol version minor type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMinor + exp_type = int rcv_type = type(protocol_version_minor) self.assertIsInstance(protocol_version_minor, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version minor value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_minor.value, - msg.format(1, protocol_version_minor.value)) + self.assertEqual(1, protocol_version_minor, + msg.format(1, protocol_version_minor)) batch_count = request_header.batch_count msg = "Bad batch count type: expected {0}, received {1}" @@ -355,7 +352,7 @@ exp_value, attribute_value.value)) def test_create_request_write(self): - prot_ver = contents.ProtocolVersion.create(1, 1) + prot_ver = contents.ProtocolVersion(1, 1) batch_count = contents.BatchCount(1) request_header = messages.RequestHeader(protocol_version=prot_ver, @@ -417,25 +414,25 @@ msg.format(contents.ProtocolVersion, type(protocol_version))) - protocol_version_major = protocol_version.protocol_version_major + protocol_version_major = protocol_version.major msg = "Bad protocol version major type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMajor + exp_type = int rcv_type = type(protocol_version_major) self.assertIsInstance(protocol_version_major, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version major value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_major.value, - msg.format(1, protocol_version_major.value)) + self.assertEqual(1, protocol_version_major, + msg.format(1, protocol_version_major)) - protocol_version_minor = protocol_version.protocol_version_minor + protocol_version_minor = protocol_version.minor msg = "Bad protocol version minor type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMinor + exp_type = int rcv_type = type(protocol_version_minor) self.assertIsInstance(protocol_version_minor, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version minor value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_minor.value, - msg.format(1, protocol_version_minor.value)) + self.assertEqual(1, protocol_version_minor, + msg.format(1, protocol_version_minor)) batch_count = request_header.batch_count msg = "Bad batch count type: expected {0}, received {1}" @@ -489,7 +486,7 @@ ) def test_get_request_write(self): - prot_ver = contents.ProtocolVersion.create(1, 1) + prot_ver = contents.ProtocolVersion(1, 1) batch_count = contents.BatchCount(1) req_header = messages.RequestHeader(protocol_version=prot_ver, @@ -535,25 +532,25 @@ msg.format(contents.ProtocolVersion, type(protocol_version))) - protocol_version_major = protocol_version.protocol_version_major + protocol_version_major = protocol_version.major msg = "Bad protocol version major type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMajor + exp_type = int rcv_type = type(protocol_version_major) self.assertIsInstance(protocol_version_major, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version major value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_major.value, - msg.format(1, protocol_version_major.value)) + self.assertEqual(1, protocol_version_major, + msg.format(1, protocol_version_major)) - protocol_version_minor = protocol_version.protocol_version_minor + protocol_version_minor = protocol_version.minor msg = "Bad protocol version minor type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMinor + exp_type = int rcv_type = type(protocol_version_minor) self.assertIsInstance(protocol_version_minor, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version minor value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_minor.value, - msg.format(1, protocol_version_minor.value)) + self.assertEqual(1, protocol_version_minor, + msg.format(1, protocol_version_minor)) batch_count = request_header.batch_count msg = "Bad batch count type: expected {0}, received {1}" @@ -608,7 +605,7 @@ msg.format(exp_value, rcv_value)) def test_destroy_request_write(self): - prot_ver = contents.ProtocolVersion.create(1, 1) + prot_ver = contents.ProtocolVersion(1, 1) batch_count = contents.BatchCount(1) req_header = messages.RequestHeader(protocol_version=prot_ver, @@ -654,25 +651,25 @@ msg.format(contents.ProtocolVersion, type(protocol_version))) - protocol_version_major = protocol_version.protocol_version_major + protocol_version_major = protocol_version.major msg = "Bad protocol version major type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMajor + exp_type = int rcv_type = type(protocol_version_major) self.assertIsInstance(protocol_version_major, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version major value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_major.value, - msg.format(1, protocol_version_major.value)) + self.assertEqual(1, protocol_version_major, + msg.format(1, protocol_version_major)) - protocol_version_minor = protocol_version.protocol_version_minor + protocol_version_minor = protocol_version.minor msg = "Bad protocol version minor type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMinor + exp_type = int rcv_type = type(protocol_version_minor) self.assertIsInstance(protocol_version_minor, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version minor value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_minor.value, - msg.format(1, protocol_version_minor.value)) + self.assertEqual(1, protocol_version_minor, + msg.format(1, protocol_version_minor)) batch_count = request_header.batch_count msg = "Bad batch count type: expected {0}, received {1}" @@ -736,33 +733,49 @@ names = template_attribute.names exp_type = list rcv_type = type(names) - msg = ErrorStrings.BAD_EXP_RECV.format('TemplateAttribute.names', - 'type', '{0}', '{0}') + msg = exceptions.ErrorStrings.BAD_EXP_RECV.format( + 'TemplateAttribute.names', + 'type', + '{0}', + '{0}' + ) self.assertIsInstance(names, exp_type, msg.format(exp_type, rcv_type)) exp_length = 0 rcv_length = len(names) - msg = ErrorStrings.BAD_EXP_RECV.format('TemplateAttribute.names', - 'length', '{0}', '{0}') + msg = exceptions.ErrorStrings.BAD_EXP_RECV.format( + 'TemplateAttribute.names', + 'length', + '{0}', + '{0}' + ) self.assertEqual(exp_length, rcv_length, msg.format(exp_length, rcv_length)) attributes = template_attribute.attributes exp_type = list rcv_type = type(attributes) - msg = ErrorStrings.BAD_EXP_RECV.format( - 'TemplateAttribute.attributes', 'type', '{0}', '{1}') + msg = exceptions.ErrorStrings.BAD_EXP_RECV.format( + 'TemplateAttribute.attributes', + 'type', + '{0}', + '{1}' + ) self.assertIsInstance(names, exp_type, msg.format(exp_type, rcv_type)) exp_length = 0 rcv_length = len(attributes) - msg = ErrorStrings.BAD_EXP_RECV.format( - 'TemplateAttribute.attributes', 'length', '{0}', '{1}') + msg = exceptions.ErrorStrings.BAD_EXP_RECV.format( + 'TemplateAttribute.attributes', + 'length', + '{0}', + '{1}' + ) self.assertEqual(exp_length, rcv_length, msg.format(exp_length, rcv_length)) def test_register_request_write(self): - prot_ver = contents.ProtocolVersion.create(1, 1) + prot_ver = contents.ProtocolVersion(1, 1) batch_count = contents.BatchCount(1) req_header = messages.RequestHeader(protocol_version=prot_ver, @@ -854,25 +867,25 @@ msg.format(contents.ProtocolVersion, type(protocol_version))) - protocol_version_major = protocol_version.protocol_version_major + protocol_version_major = protocol_version.major msg = "Bad protocol version major type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMajor + exp_type = int rcv_type = type(protocol_version_major) self.assertIsInstance(protocol_version_major, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version major value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_major.value, - msg.format(1, protocol_version_major.value)) + self.assertEqual(1, protocol_version_major, + msg.format(1, protocol_version_major)) - protocol_version_minor = protocol_version.protocol_version_minor + protocol_version_minor = protocol_version.minor msg = "Bad protocol version minor type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMinor + exp_type = int rcv_type = type(protocol_version_minor) self.assertIsInstance(protocol_version_minor, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version minor value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_minor.value, - msg.format(1, protocol_version_minor.value)) + self.assertEqual(1, protocol_version_minor, + msg.format(1, protocol_version_minor)) batch_count = request_header.batch_count msg = "Bad batch count type: expected {0}, received {1}" @@ -999,25 +1012,25 @@ msg.format(contents.ProtocolVersion, type(protocol_version))) - protocol_version_major = protocol_version.protocol_version_major + protocol_version_major = protocol_version.major msg = "Bad protocol version major type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMajor + exp_type = int rcv_type = type(protocol_version_major) self.assertIsInstance(protocol_version_major, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version major value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_major.value, - msg.format(1, protocol_version_major.value)) + self.assertEqual(1, protocol_version_major, + msg.format(1, protocol_version_major)) - protocol_version_minor = protocol_version.protocol_version_minor + protocol_version_minor = protocol_version.minor msg = "Bad protocol version minor type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMinor + exp_type = int rcv_type = type(protocol_version_minor) self.assertIsInstance(protocol_version_minor, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version minor value: expected {0}, received {1}" - self.assertEqual(2, protocol_version_minor.value, - msg.format(2, protocol_version_minor.value)) + self.assertEqual(2, protocol_version_minor, + msg.format(2, protocol_version_minor)) batch_count = request_header.batch_count msg = "Bad batch count type: expected {0}, received {1}" @@ -1101,7 +1114,7 @@ ) def test_mac_request_write(self): - prot_ver = contents.ProtocolVersion.create(1, 2) + prot_ver = contents.ProtocolVersion(1, 2) batch_count = contents.BatchCount(1) req_header = messages.RequestHeader(protocol_version=prot_ver, @@ -1144,9 +1157,8 @@ def setUp(self): super(TestResponseMessage, self).setUp() self.stream = BytearrayStream() - self.key_factory = KeyFactory() self.secret_factory = SecretFactory() - self.msg = errors.ErrorStrings.BAD_EXP_RECV + self.msg = exceptions.ErrorStrings.BAD_EXP_RECV self.create = ( b'\x42\x00\x7B\x01\x00\x00\x00\xC0\x42\x00\x7A\x01\x00\x00\x00\x48' b'\x42\x00\x69\x01\x00\x00\x00\x20\x42\x00\x6A\x02\x00\x00\x00\x04' @@ -1272,26 +1284,26 @@ contents.ProtocolVersion, type(protocol_version))) - protocol_version_major = protocol_version.protocol_version_major - exp_type = contents.ProtocolVersion.ProtocolVersionMajor + protocol_version_major = protocol_version.major + exp_type = int rcv_type = type(protocol_version_major) self.assertIsInstance(protocol_version_major, exp_type, self.msg.format('protocol version major', 'type', exp_type, rcv_type)) - self.assertEqual(1, protocol_version_major.value, + self.assertEqual(1, protocol_version_major, self.msg.format('protocol version major', 'value', - 1, protocol_version_major.value)) + 1, protocol_version_major)) - protocol_version_minor = protocol_version.protocol_version_minor - exp_type = contents.ProtocolVersion.ProtocolVersionMinor + protocol_version_minor = protocol_version.minor + exp_type = int rcv_type = type(protocol_version_minor) self.assertIsInstance(protocol_version_minor, - contents.ProtocolVersion.ProtocolVersionMinor, + int, self.msg.format('protocol version minor', 'type', exp_type, rcv_type)) - self.assertEqual(1, protocol_version_minor.value, + self.assertEqual(1, protocol_version_minor, self.msg.format('protocol version minor', 'value', - 1, protocol_version_minor.value)) + 1, protocol_version_minor)) time_stamp = response_header.time_stamp value = 0x4f9a54e5 # Fri Apr 27 10:12:21 CEST 2012 @@ -1371,7 +1383,7 @@ unique_identifier.value, value)) def test_create_response_write(self): - prot_ver = contents.ProtocolVersion.create(1, 1) + prot_ver = contents.ProtocolVersion(1, 1) # Fri Apr 27 10:12:21 CEST 2012 time_stamp = contents.TimeStamp(0x4f9a54e5) @@ -1424,25 +1436,25 @@ contents.ProtocolVersion, type(protocol_version))) - protocol_version_major = protocol_version.protocol_version_major - exp_type = contents.ProtocolVersion.ProtocolVersionMajor + protocol_version_major = protocol_version.major + exp_type = int rcv_type = type(protocol_version_major) self.assertIsInstance(protocol_version_major, exp_type, self.msg.format('protocol version major', 'type', exp_type, rcv_type)) - self.assertEqual(1, protocol_version_major.value, + self.assertEqual(1, protocol_version_major, self.msg.format('protocol version major', 'value', - 1, protocol_version_major.value)) + 1, protocol_version_major)) - protocol_version_minor = protocol_version.protocol_version_minor - exp_type = contents.ProtocolVersion.ProtocolVersionMinor + protocol_version_minor = protocol_version.minor + exp_type = int rcv_type = type(protocol_version_minor) self.assertIsInstance(protocol_version_minor, exp_type, self.msg.format('protocol version minor', 'type', exp_type, rcv_type)) - self.assertEqual(1, protocol_version_minor.value, + self.assertEqual(1, protocol_version_minor, self.msg.format('protocol version minor', 'value', - 1, protocol_version_minor.value)) + 1, protocol_version_minor)) time_stamp = response_header.time_stamp value = 0x4f9a54e7 # Fri Apr 27 10:12:23 CEST 2012 @@ -1572,7 +1584,7 @@ 'value', exp, obs)) def test_get_response_write(self): - prot_ver = contents.ProtocolVersion.create(1, 1) + prot_ver = contents.ProtocolVersion(1, 1) # Fri Apr 27 10:12:23 CEST 2012 time_stamp = contents.TimeStamp(0x4f9a54e7) @@ -1653,25 +1665,25 @@ msg.format(contents.ProtocolVersion, type(protocol_version))) - protocol_version_major = protocol_version.protocol_version_major + protocol_version_major = protocol_version.major msg = "Bad protocol version major type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMajor + exp_type = int rcv_type = type(protocol_version_major) self.assertIsInstance(protocol_version_major, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version major value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_major.value, - msg.format(1, protocol_version_major.value)) + self.assertEqual(1, protocol_version_major, + msg.format(1, protocol_version_major)) - protocol_version_minor = protocol_version.protocol_version_minor + protocol_version_minor = protocol_version.minor msg = "Bad protocol version minor type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMinor + exp_type = int rcv_type = type(protocol_version_minor) self.assertIsInstance(protocol_version_minor, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version minor value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_minor.value, - msg.format(1, protocol_version_minor.value)) + self.assertEqual(1, protocol_version_minor, + msg.format(1, protocol_version_minor)) time_stamp = response_header.time_stamp value = 0x4f9a54e5 # Fri Apr 27 10:12:21 CEST 2012 @@ -1746,7 +1758,7 @@ msg.format(exp_value, rcv_value)) def test_destroy_response_write(self): - prot_ver = contents.ProtocolVersion.create(1, 1) + prot_ver = contents.ProtocolVersion(1, 1) # Fri Apr 27 10:12:21 CEST 2012 time_stamp = contents.TimeStamp(0x4f9a54e5) @@ -1796,25 +1808,25 @@ msg.format(contents.ProtocolVersion, type(protocol_version))) - protocol_version_major = protocol_version.protocol_version_major + protocol_version_major = protocol_version.major msg = "Bad protocol version major type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMajor + exp_type = int rcv_type = type(protocol_version_major) self.assertIsInstance(protocol_version_major, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version major value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_major.value, - msg.format(1, protocol_version_major.value)) + self.assertEqual(1, protocol_version_major, + msg.format(1, protocol_version_major)) - protocol_version_minor = protocol_version.protocol_version_minor + protocol_version_minor = protocol_version.minor msg = "Bad protocol version minor type: expected {0}, received {1}" - exp_type = contents.ProtocolVersion.ProtocolVersionMinor + exp_type = int rcv_type = type(protocol_version_minor) self.assertIsInstance(protocol_version_minor, exp_type, msg.format(exp_type, rcv_type)) msg = "Bad protocol version minor value: expected {0}, received {1}" - self.assertEqual(1, protocol_version_minor.value, - msg.format(1, protocol_version_minor.value)) + self.assertEqual(1, protocol_version_minor, + msg.format(1, protocol_version_minor)) time_stamp = response_header.time_stamp value = 0x4f9a54e5 # Fri Apr 27 10:12:21 CEST 2012 @@ -1889,7 +1901,7 @@ msg.format(exp_value, rcv_value)) def test_register_response_write(self): - prot_ver = contents.ProtocolVersion.create(1, 1) + prot_ver = contents.ProtocolVersion(1, 1) # Fri Apr 27 10:12:21 CEST 2012 time_stamp = contents.TimeStamp(0x4f9a54e5) @@ -1922,7 +1934,7 @@ self.assertEqual(self.register, result, msg) def test_locate_response_write(self): - prot_ver = contents.ProtocolVersion.create(1, 1) + prot_ver = contents.ProtocolVersion(1, 1) # Fri Apr 27 10:12:22 CEST 2012 time_stamp = contents.TimeStamp(0x4f9a54e6) @@ -1973,25 +1985,25 @@ contents.ProtocolVersion, type(protocol_version))) - protocol_version_major = protocol_version.protocol_version_major - exp_type = contents.ProtocolVersion.ProtocolVersionMajor + protocol_version_major = protocol_version.major + exp_type = int rcv_type = type(protocol_version_major) self.assertIsInstance(protocol_version_major, exp_type, self.msg.format('protocol version major', 'type', exp_type, rcv_type)) - self.assertEqual(1, protocol_version_major.value, + self.assertEqual(1, protocol_version_major, self.msg.format('protocol version major', 'value', - 1, protocol_version_major.value)) + 1, protocol_version_major)) - protocol_version_minor = protocol_version.protocol_version_minor - exp_type = contents.ProtocolVersion.ProtocolVersionMinor + protocol_version_minor = protocol_version.minor + exp_type = int rcv_type = type(protocol_version_minor) self.assertIsInstance(protocol_version_minor, exp_type, self.msg.format('protocol version minor', 'type', exp_type, rcv_type)) - self.assertEqual(2, protocol_version_minor.value, + self.assertEqual(2, protocol_version_minor, self.msg.format('protocol version minor', 'value', - 2, protocol_version_minor.value)) + 2, protocol_version_minor)) time_stamp = response_header.time_stamp value = 0x588a3f23 @@ -2080,7 +2092,7 @@ binascii.hexlify(value))) def test_mac_response_write(self): - prot_ver = contents.ProtocolVersion.create(1, 2) + prot_ver = contents.ProtocolVersion(1, 2) # Fri Apr 27 10:12:23 CEST 2012 time_stamp = contents.TimeStamp(0x588a3f23) @@ -2130,7 +2142,7 @@ def test_message_invalid_response_write(self): # Batch item of 'INVALID MESSAGE' response # has no 'operation' attribute - prot_ver = contents.ProtocolVersion.create(1, 1) + prot_ver = contents.ProtocolVersion(1, 1) # Time stamp Tue Mar 29 10:58:37 2016 time_stamp = contents.TimeStamp(0x56fa43bd) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/objects/test_credentials.py python-pykmip-0.8.0/kmip/tests/unit/core/objects/test_credentials.py --- python-pykmip-0.7.0/kmip/tests/unit/core/objects/test_credentials.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/objects/test_credentials.py 2018-04-02 17:12:18.000000000 +0000 @@ -0,0 +1,3159 @@ +# Copyright (c) 2018 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import enum +import mock +import testtools + +from kmip import enums +from kmip.core import objects +from kmip.core import utils + + +class TestNonce(testtools.TestCase): + """ + Test suite for the Nonce struct. + """ + + def setUp(self): + super(TestNonce, self).setUp() + + # There are no Nonce encodings available in any of the KMIP testing + # documents. The following encodings were adapted from other structure + # encodings present in the KMIP testing suite. + # + # This encoding matches the following set of values: + # Nonce + # Nonce ID - 1 + # Nonce Value - 0x0001020304050607 + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\xC8\x01\x00\x00\x00\x20' + b'\x42\x00\xC9\x08\x00\x00\x00\x01\x01\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xCA\x08\x00\x00\x00\x08\x00\x01\x02\x03\x04\x05\x06\x07' + ) + self.encoding_missing_nonce_id = utils.BytearrayStream( + b'\x42\x00\xC8\x01\x00\x00\x00\x10' + b'\x42\x00\xCA\x08\x00\x00\x00\x08\x00\x01\x02\x03\x04\x05\x06\x07' + ) + self.encoding_missing_nonce_value = utils.BytearrayStream( + b'\x42\x00\xC8\x01\x00\x00\x00\x10' + b'\x42\x00\xC9\x08\x00\x00\x00\x01\x01\x00\x00\x00\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestNonce, self).tearDown() + + def test_init(self): + """ + Test that a Nonce struct can be constructed without arguments. + """ + nonce = objects.Nonce() + + self.assertEqual(None, nonce.nonce_id) + self.assertEqual(None, nonce.nonce_value) + + def test_init_with_args(self): + """ + Test that a Nonce struct can be constructed with arguments. + """ + nonce = objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ) + + self.assertEqual(b'\x01', nonce.nonce_id) + self.assertEqual( + b'\x00\x01\x02\x03\x04\x05\x06\x07', + nonce.nonce_value + ) + + def test_invalid_nonce_id(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the nonce ID of a Nonce struct. + """ + kwargs = {'nonce_id': 0} + self.assertRaisesRegexp( + TypeError, + "Nonce ID must be bytes.", + objects.Nonce, + **kwargs + ) + + nonce = objects.Nonce() + args = (nonce, "nonce_id", 0) + self.assertRaisesRegexp( + TypeError, + "Nonce ID must be bytes.", + setattr, + *args + ) + + def test_invalid_nonce_value(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the nonce value of a Nonce struct. + """ + kwargs = {'nonce_value': 0} + self.assertRaisesRegexp( + TypeError, + "Nonce value must be bytes.", + objects.Nonce, + **kwargs + ) + + nonce = objects.Nonce() + args = (nonce, "nonce_value", 0) + self.assertRaisesRegexp( + TypeError, + "Nonce value must be bytes.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a Nonce struct can be read from a data stream. + """ + nonce = objects.Nonce() + + self.assertEqual(None, nonce.nonce_id) + self.assertEqual(None, nonce.nonce_value) + + nonce.read(self.full_encoding) + + self.assertEqual(b'\x01', nonce.nonce_id) + self.assertEqual( + b'\x00\x01\x02\x03\x04\x05\x06\x07', + nonce.nonce_value + ) + + def test_read_missing_nonce_id(self): + """ + Test that a ValueError gets raised when attempting to read a + Nonce struct from a data stream missing the nonce ID data. + """ + nonce = objects.Nonce() + + self.assertEqual(None, nonce.nonce_id) + self.assertEqual(None, nonce.nonce_value) + + args = (self.encoding_missing_nonce_id, ) + self.assertRaisesRegexp( + ValueError, + "Nonce encoding missing the nonce ID.", + nonce.read, + *args + ) + + def test_read_missing_nonce_value(self): + """ + Test that a ValueError gets raised when attempting to read a + Nonce struct from a data stream missing the nonce value data. + """ + nonce = objects.Nonce() + + self.assertEqual(None, nonce.nonce_id) + self.assertEqual(None, nonce.nonce_value) + + args = (self.encoding_missing_nonce_value, ) + self.assertRaisesRegexp( + ValueError, + "Nonce encoding missing the nonce value.", + nonce.read, + *args + ) + + def test_write(self): + """ + Test that a Nonce struct can be written to a data stream. + """ + nonce = objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ) + stream = utils.BytearrayStream() + + nonce.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_missing_nonce_id(self): + """ + Test that a ValueError gets raised when attempting to write a + Nonce struct missing nonce ID data to a data stream. + """ + nonce = objects.Nonce( + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ) + stream = utils.BytearrayStream() + + args = (stream, ) + self.assertRaisesRegexp( + ValueError, + "Nonce struct is missing the nonce ID.", + nonce.write, + *args + ) + + def test_write_missing_nonce_value(self): + """ + Test that a ValueError gets raised when attempting to write a + Nonce struct missing nonce value data to a data stream. + """ + nonce = objects.Nonce( + nonce_id=b'\x01' + ) + stream = utils.BytearrayStream() + + args = (stream, ) + self.assertRaisesRegexp( + ValueError, + "Nonce struct is missing the nonce value.", + nonce.write, + *args + ) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + Nonce structs with the same data. + """ + a = objects.Nonce() + b = objects.Nonce() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03' + ) + b = objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03' + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_nonce_id(self): + """ + Test that the equality operator returns False when comparing two + Nonce structs with different nonce IDs. + """ + a = objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03' + ) + b = objects.Nonce( + nonce_id=b'\x02', + nonce_value=b'\x00\x01\x02\x03' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_nonce_value(self): + """ + Test that the equality operator returns False when comparing two + Nonce structs with different nonce values. + """ + a = objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03' + ) + b = objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x03\x02\x01\x00' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + Nonce structs with different types. + """ + a = objects.Nonce() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + Nonce structs with the same data. + """ + a = objects.Nonce() + b = objects.Nonce() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03' + ) + b = objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03' + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_nonce_id(self): + """ + Test that the inequality operator returns True when comparing two + Nonce structs with different nonce IDs. + """ + a = objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03' + ) + b = objects.Nonce( + nonce_id=b'\x02', + nonce_value=b'\x00\x01\x02\x03' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_nonce_value(self): + """ + Test that the inequality operator returns True when comparing two + Nonce structs with different nonce values. + """ + a = objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03' + ) + b = objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x03\x02\x01\x00' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + Nonce structs with different types. + """ + a = objects.Nonce() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a Nonce struct. + """ + credential = objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ) + expected = ( + "Nonce(" + "nonce_id=" + str(b'\x01') + ", " + "nonce_value=" + str(b'\x00\x01\x02\x03\x04\x05\x06\x07') + ")" + ) + observed = repr(credential) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a Nonce struct. + """ + credential = objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ) + expected = ", ".join([ + "'nonce_id': {}".format(b'\x01'), + "'nonce_value': {}".format(b'\x00\x01\x02\x03\x04\x05\x06\x07') + ]) + expected = "{" + expected + "}" + observed = str(credential) + + self.assertEqual(expected, observed) + + +class TestUsernamePasswordCredential(testtools.TestCase): + """ + Test suite for the UsernamePasswordCredential struct. + """ + + def setUp(self): + super(TestUsernamePasswordCredential, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 11.1. + # + # This encoding matches the following set of values: + # UsernamePasswordCredential + # Username - Fred + # Password - password1 + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x28' + b'\x42\x00\x99\x07\x00\x00\x00\x04' + b'\x46\x72\x65\x64\x00\x00\x00\x00' + b'\x42\x00\xA1\x07\x00\x00\x00\x09' + b'\x70\x61\x73\x73\x77\x6F\x72\x64\x31\x00\x00\x00\x00\x00\x00\x00' + ) + + self.encoding_missing_username = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x18' + b'\x42\x00\xA1\x07\x00\x00\x00\x09' + b'\x70\x61\x73\x73\x77\x6F\x72\x64\x31\x00\x00\x00\x00\x00\x00\x00' + ) + + self.encoding_missing_password = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x10' + b'\x42\x00\x99\x07\x00\x00\x00\x04' + b'\x46\x72\x65\x64\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestUsernamePasswordCredential, self).tearDown() + + def test_init(self): + """ + Test that a UsernamePasswordCredential struct can be constructed + without arguments. + """ + credential = objects.UsernamePasswordCredential() + + self.assertEqual(None, credential.username) + self.assertEqual(None, credential.password) + + def test_init_with_args(self): + """ + Test that a UsernamePasswordCredential struct can be constructed with + arguments. + """ + credential = objects.UsernamePasswordCredential( + username="John", + password="abc123" + ) + + self.assertEqual("John", credential.username) + self.assertEqual("abc123", credential.password) + + def test_invalid_username(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the username of a UsernamePasswordCredential struct. + """ + kwargs = {'username': 0} + self.assertRaisesRegexp( + TypeError, + "Username must be a string.", + objects.UsernamePasswordCredential, + **kwargs + ) + + credential = objects.UsernamePasswordCredential() + args = (credential, "username", 0) + self.assertRaisesRegexp( + TypeError, + "Username must be a string.", + setattr, + *args + ) + + def test_invalid_password(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the password of a UsernamePasswordCredential struct. + """ + kwargs = {'password': 0} + self.assertRaisesRegexp( + TypeError, + "Password must be a string.", + objects.UsernamePasswordCredential, + **kwargs + ) + + credential = objects.UsernamePasswordCredential() + args = (credential, "password", 0) + self.assertRaisesRegexp( + TypeError, + "Password must be a string.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a UsernamePasswordCredential struct can be read from a data + stream. + """ + credential = objects.UsernamePasswordCredential() + + self.assertEqual(None, credential.username) + self.assertEqual(None, credential.password) + + credential.read(self.full_encoding) + + self.assertEqual("Fred", credential.username) + self.assertEqual("password1", credential.password) + + def test_read_missing_username(self): + """ + Test that a ValueError gets raised when attempting to read a + UsernamePasswordCredential struct from a data stream missing the + username data. + """ + credential = objects.UsernamePasswordCredential() + + self.assertEqual(None, credential.username) + self.assertEqual(None, credential.password) + + args = (self.encoding_missing_username, ) + self.assertRaisesRegexp( + ValueError, + "Username/password credential encoding missing the username.", + credential.read, + *args + ) + + def test_read_missing_password(self): + """ + Test that a UsernamePasswordCredential struct can be read from a data + stream missing the password data. + """ + credential = objects.UsernamePasswordCredential() + + self.assertEqual(None, credential.username) + self.assertEqual(None, credential.password) + + credential.read(self.encoding_missing_password) + + self.assertEqual("Fred", credential.username) + self.assertEqual(None, credential.password) + + def test_write(self): + """ + Test that a UsernamePasswordCredential struct can be written to a + data stream. + """ + credential = objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + stream = utils.BytearrayStream() + + credential.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_missing_username(self): + """ + Test that a ValueError gets raised when attempting to write a + UsernamePasswordCredential struct missing username data to a data + stream. + """ + credential = objects.UsernamePasswordCredential( + password="password1" + ) + stream = utils.BytearrayStream() + + args = (stream, ) + self.assertRaisesRegexp( + ValueError, + "Username/password credential struct missing the username.", + credential.write, + *args + ) + + def test_write_missing_password(self): + """ + Test that a UsernamePasswordCredential struct missing password data + can be written to a data stream. + """ + credential = objects.UsernamePasswordCredential( + username="Fred" + ) + stream = utils.BytearrayStream() + + credential.write(stream) + + self.assertEqual(len(self.encoding_missing_password), len(stream)) + self.assertEqual(str(self.encoding_missing_password), str(stream)) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + UsernamePasswordCredential structs with the same data. + """ + a = objects.UsernamePasswordCredential() + b = objects.UsernamePasswordCredential() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + b = objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_username(self): + """ + Test that the equality operator returns False when comparing two + UsernamePasswordCredential structs with different usernames. + """ + a = objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + b = objects.UsernamePasswordCredential( + username="Wilma", + password="password1" + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_password(self): + """ + Test that the equality operator returns False when comparing two + UsernamePasswordCredential structs with different passwords. + """ + a = objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + b = objects.UsernamePasswordCredential( + username="Fred", + password="1password" + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + UsernamePasswordCredential structs with different types. + """ + a = objects.UsernamePasswordCredential() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + UsernamePasswordCredential structs with the same data. + """ + a = objects.UsernamePasswordCredential() + b = objects.UsernamePasswordCredential() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + b = objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_username(self): + """ + Test that the inequality operator returns True when comparing two + UsernamePasswordCredential structs with different usernames. + """ + a = objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + b = objects.UsernamePasswordCredential( + username="Wilma", + password="password1" + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_password(self): + """ + Test that the inequality operator returns True when comparing two + UsernamePasswordCredential structs with different passwords. + """ + a = objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + b = objects.UsernamePasswordCredential( + username="Fred", + password="1password" + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + UsernamePasswordCredential structs with different types. + """ + a = objects.UsernamePasswordCredential() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a UsernamePasswordCredential struct. + """ + credential = objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + expected = ( + "UsernamePasswordCredential(" + "username='Fred', " + "password='password1')" + ) + observed = repr(credential) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a UsernamePasswordCredential struct. + """ + credential = objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + expected = str({"username": "Fred", "password": "password1"}) + observed = str(credential) + + self.assertEqual(expected, observed) + + +class TestDeviceCredential(testtools.TestCase): + """ + Test suite for the DeviceCredential struct. + """ + + def setUp(self): + super(TestDeviceCredential, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 11.2. + # + # This encoding matches the following set of values: + # DeviceCredential + # Device Serial Number - serNum123456 + # Password - secret + # Device Identifier - devID2233 + # Network Identifier - netID9000 + # Machine Identifier - machineID1 + # Media Identifier - mediaID313 + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x88' + b'\x42\x00\xB0\x07\x00\x00\x00\x0C' + b'\x73\x65\x72\x4E\x75\x6D\x31\x32\x33\x34\x35\x36\x00\x00\x00\x00' + b'\x42\x00\xA1\x07\x00\x00\x00\x06' + b'\x73\x65\x63\x72\x65\x74\x00\x00' + b'\x42\x00\xA2\x07\x00\x00\x00\x09' + b'\x64\x65\x76\x49\x44\x32\x32\x33\x33\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAB\x07\x00\x00\x00\x09' + b'\x6E\x65\x74\x49\x44\x39\x30\x30\x30\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xA9\x07\x00\x00\x00\x0A' + b'\x6D\x61\x63\x68\x69\x6E\x65\x49\x44\x31\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAA\x07\x00\x00\x00\x0A' + b'\x6D\x65\x64\x69\x61\x49\x44\x33\x31\x33\x00\x00\x00\x00\x00\x00' + ) + self.encoding_missing_device_serial_number = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x70' + b'\x42\x00\xA1\x07\x00\x00\x00\x06' + b'\x73\x65\x63\x72\x65\x74\x00\x00' + b'\x42\x00\xA2\x07\x00\x00\x00\x09' + b'\x64\x65\x76\x49\x44\x32\x32\x33\x33\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAB\x07\x00\x00\x00\x09' + b'\x6E\x65\x74\x49\x44\x39\x30\x30\x30\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xA9\x07\x00\x00\x00\x0A' + b'\x6D\x61\x63\x68\x69\x6E\x65\x49\x44\x31\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAA\x07\x00\x00\x00\x0A' + b'\x6D\x65\x64\x69\x61\x49\x44\x33\x31\x33\x00\x00\x00\x00\x00\x00' + ) + self.encoding_missing_password = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x78' + b'\x42\x00\xB0\x07\x00\x00\x00\x0C' + b'\x73\x65\x72\x4E\x75\x6D\x31\x32\x33\x34\x35\x36\x00\x00\x00\x00' + b'\x42\x00\xA2\x07\x00\x00\x00\x09' + b'\x64\x65\x76\x49\x44\x32\x32\x33\x33\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAB\x07\x00\x00\x00\x09' + b'\x6E\x65\x74\x49\x44\x39\x30\x30\x30\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xA9\x07\x00\x00\x00\x0A' + b'\x6D\x61\x63\x68\x69\x6E\x65\x49\x44\x31\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAA\x07\x00\x00\x00\x0A' + b'\x6D\x65\x64\x69\x61\x49\x44\x33\x31\x33\x00\x00\x00\x00\x00\x00' + ) + self.encoding_missing_device_identifier = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x70' + b'\x42\x00\xB0\x07\x00\x00\x00\x0C' + b'\x73\x65\x72\x4E\x75\x6D\x31\x32\x33\x34\x35\x36\x00\x00\x00\x00' + b'\x42\x00\xA1\x07\x00\x00\x00\x06' + b'\x73\x65\x63\x72\x65\x74\x00\x00' + b'\x42\x00\xAB\x07\x00\x00\x00\x09' + b'\x6E\x65\x74\x49\x44\x39\x30\x30\x30\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xA9\x07\x00\x00\x00\x0A' + b'\x6D\x61\x63\x68\x69\x6E\x65\x49\x44\x31\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAA\x07\x00\x00\x00\x0A' + b'\x6D\x65\x64\x69\x61\x49\x44\x33\x31\x33\x00\x00\x00\x00\x00\x00' + ) + self.encoding_missing_network_identifier = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x70' + b'\x42\x00\xB0\x07\x00\x00\x00\x0C' + b'\x73\x65\x72\x4E\x75\x6D\x31\x32\x33\x34\x35\x36\x00\x00\x00\x00' + b'\x42\x00\xA1\x07\x00\x00\x00\x06' + b'\x73\x65\x63\x72\x65\x74\x00\x00' + b'\x42\x00\xA2\x07\x00\x00\x00\x09' + b'\x64\x65\x76\x49\x44\x32\x32\x33\x33\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xA9\x07\x00\x00\x00\x0A' + b'\x6D\x61\x63\x68\x69\x6E\x65\x49\x44\x31\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAA\x07\x00\x00\x00\x0A' + b'\x6D\x65\x64\x69\x61\x49\x44\x33\x31\x33\x00\x00\x00\x00\x00\x00' + ) + self.encoding_missing_machine_identifier = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x70' + b'\x42\x00\xB0\x07\x00\x00\x00\x0C' + b'\x73\x65\x72\x4E\x75\x6D\x31\x32\x33\x34\x35\x36\x00\x00\x00\x00' + b'\x42\x00\xA1\x07\x00\x00\x00\x06' + b'\x73\x65\x63\x72\x65\x74\x00\x00' + b'\x42\x00\xA2\x07\x00\x00\x00\x09' + b'\x64\x65\x76\x49\x44\x32\x32\x33\x33\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAB\x07\x00\x00\x00\x09' + b'\x6E\x65\x74\x49\x44\x39\x30\x30\x30\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAA\x07\x00\x00\x00\x0A' + b'\x6D\x65\x64\x69\x61\x49\x44\x33\x31\x33\x00\x00\x00\x00\x00\x00' + ) + self.encoding_missing_media_identifier = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x70' + b'\x42\x00\xB0\x07\x00\x00\x00\x0C' + b'\x73\x65\x72\x4E\x75\x6D\x31\x32\x33\x34\x35\x36\x00\x00\x00\x00' + b'\x42\x00\xA1\x07\x00\x00\x00\x06' + b'\x73\x65\x63\x72\x65\x74\x00\x00' + b'\x42\x00\xA2\x07\x00\x00\x00\x09' + b'\x64\x65\x76\x49\x44\x32\x32\x33\x33\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAB\x07\x00\x00\x00\x09' + b'\x6E\x65\x74\x49\x44\x39\x30\x30\x30\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xA9\x07\x00\x00\x00\x0A' + b'\x6D\x61\x63\x68\x69\x6E\x65\x49\x44\x31\x00\x00\x00\x00\x00\x00' + ) + self.empty_encoding = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestDeviceCredential, self).tearDown() + + def test_init(self): + """ + Test that a DeviceCredential struct can be constructed without + arguments. + """ + credential = objects.DeviceCredential() + + self.assertEqual(None, credential.device_serial_number) + self.assertEqual(None, credential.password) + self.assertEqual(None, credential.device_identifier) + self.assertEqual(None, credential.network_identifier) + self.assertEqual(None, credential.machine_identifier) + self.assertEqual(None, credential.media_identifier) + + def test_init_with_args(self): + """ + Test that a DeviceCredential struct can be constructed with arguments. + """ + credential = objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + + self.assertEqual("serNum123456", credential.device_serial_number) + self.assertEqual("secret", credential.password) + self.assertEqual("devID2233", credential.device_identifier) + self.assertEqual("netID9000", credential.network_identifier) + self.assertEqual("machineID1", credential.machine_identifier) + self.assertEqual("mediaID313", credential.media_identifier) + + def test_invalid_device_serial_number(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the device serial number of a DeviceCredential struct. + """ + kwargs = {'device_serial_number': 0} + self.assertRaisesRegexp( + TypeError, + "Device serial number must be a string.", + objects.DeviceCredential, + **kwargs + ) + + credential = objects.DeviceCredential() + args = (credential, "device_serial_number", 0) + self.assertRaisesRegexp( + TypeError, + "Device serial number must be a string.", + setattr, + *args + ) + + def test_invalid_password(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the password of a DeviceCredential struct. + """ + kwargs = {'password': 0} + self.assertRaisesRegexp( + TypeError, + "Password must be a string.", + objects.DeviceCredential, + **kwargs + ) + + credential = objects.DeviceCredential() + args = (credential, "password", 0) + self.assertRaisesRegexp( + TypeError, + "Password must be a string.", + setattr, + *args + ) + + def test_invalid_device_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the device identifier of a DeviceCredential struct. + """ + kwargs = {'device_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Device identifier must be a string.", + objects.DeviceCredential, + **kwargs + ) + + credential = objects.DeviceCredential() + args = (credential, "device_identifier", 0) + self.assertRaisesRegexp( + TypeError, + "Device identifier must be a string.", + setattr, + *args + ) + + def test_invalid_network_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the network identifier of a DeviceCredential struct. + """ + kwargs = {'network_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Network identifier must be a string.", + objects.DeviceCredential, + **kwargs + ) + + credential = objects.DeviceCredential() + args = (credential, "network_identifier", 0) + self.assertRaisesRegexp( + TypeError, + "Network identifier must be a string.", + setattr, + *args + ) + + def test_invalid_machine_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the machine identifier of a DeviceCredential struct. + """ + kwargs = {'machine_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Machine identifier must be a string.", + objects.DeviceCredential, + **kwargs + ) + + credential = objects.DeviceCredential() + args = (credential, "machine_identifier", 0) + self.assertRaisesRegexp( + TypeError, + "Machine identifier must be a string.", + setattr, + *args + ) + + def test_invalid_media_identifier(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the media identifier of a DeviceCredential struct. + """ + kwargs = {'media_identifier': 0} + self.assertRaisesRegexp( + TypeError, + "Media identifier must be a string.", + objects.DeviceCredential, + **kwargs + ) + + credential = objects.DeviceCredential() + args = (credential, "media_identifier", 0) + self.assertRaisesRegexp( + TypeError, + "Media identifier must be a string.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a DeviceCredential struct can be read from a data stream. + """ + credential = objects.DeviceCredential() + + self.assertEqual(None, credential.device_serial_number) + self.assertEqual(None, credential.password) + self.assertEqual(None, credential.device_identifier) + self.assertEqual(None, credential.network_identifier) + self.assertEqual(None, credential.machine_identifier) + self.assertEqual(None, credential.media_identifier) + + credential.read(self.full_encoding) + + self.assertEqual("serNum123456", credential.device_serial_number) + self.assertEqual("secret", credential.password) + self.assertEqual("devID2233", credential.device_identifier) + self.assertEqual("netID9000", credential.network_identifier) + self.assertEqual("machineID1", credential.machine_identifier) + self.assertEqual("mediaID313", credential.media_identifier) + + def test_read_missing_device_serial_number(self): + """ + Test that a DeviceCredential struct can be read from a data stream + missing the device serial number data. + """ + credential = objects.DeviceCredential() + + self.assertEqual(None, credential.device_serial_number) + self.assertEqual(None, credential.password) + self.assertEqual(None, credential.device_identifier) + self.assertEqual(None, credential.network_identifier) + self.assertEqual(None, credential.machine_identifier) + self.assertEqual(None, credential.media_identifier) + + credential.read(self.encoding_missing_device_serial_number) + + self.assertEqual(None, credential.device_serial_number) + self.assertEqual("secret", credential.password) + self.assertEqual("devID2233", credential.device_identifier) + self.assertEqual("netID9000", credential.network_identifier) + self.assertEqual("machineID1", credential.machine_identifier) + self.assertEqual("mediaID313", credential.media_identifier) + + def test_read_missing_password(self): + """ + Test that a DeviceCredential struct can be read from a data stream + missing the password data. + """ + credential = objects.DeviceCredential() + + self.assertEqual(None, credential.device_serial_number) + self.assertEqual(None, credential.password) + self.assertEqual(None, credential.device_identifier) + self.assertEqual(None, credential.network_identifier) + self.assertEqual(None, credential.machine_identifier) + self.assertEqual(None, credential.media_identifier) + + credential.read(self.encoding_missing_password) + + self.assertEqual("serNum123456", credential.device_serial_number) + self.assertEqual(None, credential.password) + self.assertEqual("devID2233", credential.device_identifier) + self.assertEqual("netID9000", credential.network_identifier) + self.assertEqual("machineID1", credential.machine_identifier) + self.assertEqual("mediaID313", credential.media_identifier) + + def test_read_missing_device_identifier(self): + """ + Test that a DeviceCredential struct can be read from a data stream + missing the device identifier data. + """ + credential = objects.DeviceCredential() + + self.assertEqual(None, credential.device_serial_number) + self.assertEqual(None, credential.password) + self.assertEqual(None, credential.device_identifier) + self.assertEqual(None, credential.network_identifier) + self.assertEqual(None, credential.machine_identifier) + self.assertEqual(None, credential.media_identifier) + + credential.read(self.encoding_missing_device_identifier) + + self.assertEqual("serNum123456", credential.device_serial_number) + self.assertEqual("secret", credential.password) + self.assertEqual(None, credential.device_identifier) + self.assertEqual("netID9000", credential.network_identifier) + self.assertEqual("machineID1", credential.machine_identifier) + self.assertEqual("mediaID313", credential.media_identifier) + + def test_read_missing_network_identifier(self): + """ + Test that a DeviceCredential struct can be read from a data stream + missing the network identifier data. + """ + credential = objects.DeviceCredential() + + self.assertEqual(None, credential.device_serial_number) + self.assertEqual(None, credential.password) + self.assertEqual(None, credential.device_identifier) + self.assertEqual(None, credential.network_identifier) + self.assertEqual(None, credential.machine_identifier) + self.assertEqual(None, credential.media_identifier) + + credential.read(self.encoding_missing_network_identifier) + + self.assertEqual("serNum123456", credential.device_serial_number) + self.assertEqual("secret", credential.password) + self.assertEqual("devID2233", credential.device_identifier) + self.assertEqual(None, credential.network_identifier) + self.assertEqual("machineID1", credential.machine_identifier) + self.assertEqual("mediaID313", credential.media_identifier) + + def test_read_missing_machine_identifier(self): + """ + Test that a DeviceCredential struct can be read from a data stream + missing the machine identifier data. + """ + credential = objects.DeviceCredential() + + self.assertEqual(None, credential.device_serial_number) + self.assertEqual(None, credential.password) + self.assertEqual(None, credential.device_identifier) + self.assertEqual(None, credential.network_identifier) + self.assertEqual(None, credential.machine_identifier) + self.assertEqual(None, credential.media_identifier) + + credential.read(self.encoding_missing_machine_identifier) + + self.assertEqual("serNum123456", credential.device_serial_number) + self.assertEqual("secret", credential.password) + self.assertEqual("devID2233", credential.device_identifier) + self.assertEqual("netID9000", credential.network_identifier) + self.assertEqual(None, credential.machine_identifier) + self.assertEqual("mediaID313", credential.media_identifier) + + def test_read_missing_media_identifier(self): + """ + Test that a DeviceCredential struct can be read from a data stream + missing the media identifier data. + """ + credential = objects.DeviceCredential() + + self.assertEqual(None, credential.device_serial_number) + self.assertEqual(None, credential.password) + self.assertEqual(None, credential.device_identifier) + self.assertEqual(None, credential.network_identifier) + self.assertEqual(None, credential.machine_identifier) + self.assertEqual(None, credential.media_identifier) + + credential.read(self.encoding_missing_media_identifier) + + self.assertEqual("serNum123456", credential.device_serial_number) + self.assertEqual("secret", credential.password) + self.assertEqual("devID2233", credential.device_identifier) + self.assertEqual("netID9000", credential.network_identifier) + self.assertEqual("machineID1", credential.machine_identifier) + self.assertEqual(None, credential.media_identifier) + + def test_read_missing_everything(self): + """ + Test that a DeviceCredential struct can be read from a data stream + missing all data. + """ + credential = objects.DeviceCredential() + + self.assertEqual(None, credential.device_serial_number) + self.assertEqual(None, credential.password) + self.assertEqual(None, credential.device_identifier) + self.assertEqual(None, credential.network_identifier) + self.assertEqual(None, credential.machine_identifier) + self.assertEqual(None, credential.media_identifier) + + credential.read(self.empty_encoding) + + self.assertEqual(None, credential.device_serial_number) + self.assertEqual(None, credential.password) + self.assertEqual(None, credential.device_identifier) + self.assertEqual(None, credential.network_identifier) + self.assertEqual(None, credential.machine_identifier) + self.assertEqual(None, credential.media_identifier) + + def test_write(self): + """ + Test that a DeviceCredential struct can be written to a data stream. + """ + credential = objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + stream = utils.BytearrayStream() + + credential.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_missing_device_serial_number(self): + """ + Test that a DeviceCredential struct missing device serial number data + can be written to a data stream. + """ + credential = objects.DeviceCredential( + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + stream = utils.BytearrayStream() + + credential.write(stream) + + self.assertEqual( + len(self.encoding_missing_device_serial_number), + len(stream) + ) + self.assertEqual( + str(self.encoding_missing_device_serial_number), + str(stream) + ) + + def test_write_missing_password(self): + """ + Test that a DeviceCredential struct missing password data can be + written to a data stream. + """ + credential = objects.DeviceCredential( + device_serial_number="serNum123456", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + stream = utils.BytearrayStream() + + credential.write(stream) + + self.assertEqual(len(self.encoding_missing_password), len(stream)) + self.assertEqual(str(self.encoding_missing_password), str(stream)) + + def test_write_missing_device_identifier(self): + """ + Test that a DeviceCredential struct missing device identifier data can + be written to a data stream. + """ + credential = objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + stream = utils.BytearrayStream() + + credential.write(stream) + + self.assertEqual( + len(self.encoding_missing_device_identifier), + len(stream) + ) + self.assertEqual( + str(self.encoding_missing_device_identifier), + str(stream) + ) + + def test_write_missing_network_identifier(self): + """ + Test that a DeviceCredential struct missing network identifier data + can be written to a data stream. + """ + credential = objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + stream = utils.BytearrayStream() + + credential.write(stream) + + self.assertEqual( + len(self.encoding_missing_network_identifier), + len(stream) + ) + self.assertEqual( + str(self.encoding_missing_network_identifier), + str(stream) + ) + + def test_write_missing_machine_identifier(self): + """ + Test that a DeviceCredential struct missing machine identifier data + can be written to a data stream. + """ + credential = objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + media_identifier="mediaID313" + ) + stream = utils.BytearrayStream() + + credential.write(stream) + + self.assertEqual( + len(self.encoding_missing_machine_identifier), + len(stream) + ) + self.assertEqual( + str(self.encoding_missing_machine_identifier), + str(stream) + ) + + def test_write_missing_media_identifier(self): + """ + Test that a DeviceCredential struct missing media identifier data can + be written to a data stream. + """ + credential = objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1" + ) + stream = utils.BytearrayStream() + + credential.write(stream) + + self.assertEqual( + len(self.encoding_missing_media_identifier), + len(stream) + ) + self.assertEqual( + str(self.encoding_missing_media_identifier), + str(stream) + ) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + DeviceCredential structs with the same data. + """ + a = objects.DeviceCredential() + b = objects.DeviceCredential() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + b = objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_device_serial_number(self): + """ + Test that the equality operator returns False when comparing two + DeviceCredential structs with different device serial numbers. + """ + a = objects.DeviceCredential( + device_serial_number="serNum123456" + ) + b = objects.DeviceCredential( + device_serial_number="serNum654321" + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_password(self): + """ + Test that the equality operator returns False when comparing two + DeviceCredential structs with different passwords. + """ + a = objects.DeviceCredential( + password="secret" + ) + b = objects.DeviceCredential( + password="public" + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_device_identifier(self): + """ + Test that the equality operator returns False when comparing two + DeviceCredential structs with different device identifiers. + """ + a = objects.DeviceCredential( + device_identifier="devID2233" + ) + b = objects.DeviceCredential( + device_identifier="devID0011" + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_network_identifier(self): + """ + Test that the equality operator returns False when comparing two + DeviceCredential structs with different network identifiers. + """ + a = objects.DeviceCredential( + network_identifier="netID9000" + ) + b = objects.DeviceCredential( + network_identifier="netID0999" + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_machine_identifier(self): + """ + Test that the inequality operator returns True when comparing two + DeviceCredential structs with different machine identifiers. + """ + a = objects.DeviceCredential( + machine_identifier="machineID1" + ) + b = objects.DeviceCredential( + machine_identifier="machineID2" + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_media_identifier(self): + """ + Test that the equality operator returns False when comparing two + DeviceCredential structs with different media identifiers. + """ + a = objects.DeviceCredential( + media_identifier="mediaID313" + ) + b = objects.DeviceCredential( + media_identifier="mediaID828" + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + DeviceCredential structs with different types. + """ + a = objects.DeviceCredential() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + DeviceCredential structs with the same data. + """ + a = objects.DeviceCredential() + b = objects.DeviceCredential() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + b = objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_device_serial_number(self): + """ + Test that the inequality operator returns True when comparing two + DeviceCredential structs with different device serial numbers. + """ + a = objects.DeviceCredential( + device_serial_number="serNum123456" + ) + b = objects.DeviceCredential( + device_serial_number="serNum654321" + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_password(self): + """ + Test that the inequality operator returns True when comparing two + DeviceCredential structs with different passwords. + """ + a = objects.DeviceCredential( + password="secret" + ) + b = objects.DeviceCredential( + password="public" + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_device_identifier(self): + """ + Test that the inequality operator returns True when comparing two + DeviceCredential structs with different device identifiers. + """ + a = objects.DeviceCredential( + device_identifier="devID2233" + ) + b = objects.DeviceCredential( + device_identifier="devID0011" + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_network_identifier(self): + """ + Test that the inequality operator returns True when comparing two + DeviceCredential structs with different network identifiers. + """ + a = objects.DeviceCredential( + network_identifier="netID9000" + ) + b = objects.DeviceCredential( + network_identifier="netID0999" + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_machine_identifier(self): + """ + Test that the inequality operator returns True when comparing two + DeviceCredential structs with different machine identifiers. + """ + a = objects.DeviceCredential( + machine_identifier="machineID1" + ) + b = objects.DeviceCredential( + machine_identifier="machineID2" + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_media_identifier(self): + """ + Test that the inequality operator returns True when comparing two + DeviceCredential structs with different media identifiers. + """ + a = objects.DeviceCredential( + media_identifier="mediaID313" + ) + b = objects.DeviceCredential( + media_identifier="mediaID828" + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + DeviceCredential structs with different types. + """ + a = objects.DeviceCredential() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a DeviceCredential struct. + """ + credential = objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + expected = ( + "DeviceCredential(" + "device_serial_number='serNum123456', " + "password='secret', " + "device_identifier='devID2233', " + "network_identifier='netID9000', " + "machine_identifier='machineID1', " + "media_identifier='mediaID313')" + ) + observed = repr(credential) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a DeviceCredential struct. + """ + credential = objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + expected = str( + { + "device_serial_number": "serNum123456", + "password": "secret", + "device_identifier": "devID2233", + "network_identifier": "netID9000", + "machine_identifier": "machineID1", + "media_identifier": "mediaID313" + } + ) + observed = str(credential) + + self.assertEqual(expected, observed) + + +class TestAttestationCredential(testtools.TestCase): + """ + Test suite for the AttestationCredential struct. + """ + + def setUp(self): + super(TestAttestationCredential, self).setUp() + + # There are no AttestationCredential encodings available in any of the + # KMIP testing documents. The following encodings were adapted from + # other structure encodings present in the KMIP testing suite. + # + # This encoding matches the following set of values: + # AttestationCredential + # Nonce + # Nonce ID - 1 + # Nonce Value - 0x0001020304050607 + # AttestationType - TPM Quote + # AttestationMeasurement - 0xFFFFFFFFFFFFFFFF + # AttestationAssertion - 0x1111111111111111 + self.full_encoding = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x58' + b'\x42\x00\xC8\x01\x00\x00\x00\x20' + b'\x42\x00\xC9\x08\x00\x00\x00\x01\x01\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xCA\x08\x00\x00\x00\x08\x00\x01\x02\x03\x04\x05\x06\x07' + b'\x42\x00\xC7\x05\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00' + b'\x42\x00\xCB\x08\x00\x00\x00\x08\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF' + b'\x42\x00\xCC\x08\x00\x00\x00\x08\x11\x11\x11\x11\x11\x11\x11\x11' + ) + self.encoding_missing_nonce = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x30' + b'\x42\x00\xC7\x05\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00' + b'\x42\x00\xCB\x08\x00\x00\x00\x08\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF' + b'\x42\x00\xCC\x08\x00\x00\x00\x08\x11\x11\x11\x11\x11\x11\x11\x11' + ) + self.encoding_missing_attestation_type = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x48' + b'\x42\x00\xC8\x01\x00\x00\x00\x20' + b'\x42\x00\xC9\x08\x00\x00\x00\x01\x01\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xCA\x08\x00\x00\x00\x08\x00\x01\x02\x03\x04\x05\x06\x07' + b'\x42\x00\xCB\x08\x00\x00\x00\x08\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF' + b'\x42\x00\xCC\x08\x00\x00\x00\x08\x11\x11\x11\x11\x11\x11\x11\x11' + ) + self.encoding_missing_attestation_measurement = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x48' + b'\x42\x00\xC8\x01\x00\x00\x00\x20' + b'\x42\x00\xC9\x08\x00\x00\x00\x01\x01\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xCA\x08\x00\x00\x00\x08\x00\x01\x02\x03\x04\x05\x06\x07' + b'\x42\x00\xC7\x05\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00' + b'\x42\x00\xCC\x08\x00\x00\x00\x08\x11\x11\x11\x11\x11\x11\x11\x11' + ) + self.encoding_missing_attestation_assertion = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x48' + b'\x42\x00\xC8\x01\x00\x00\x00\x20' + b'\x42\x00\xC9\x08\x00\x00\x00\x01\x01\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xCA\x08\x00\x00\x00\x08\x00\x01\x02\x03\x04\x05\x06\x07' + b'\x42\x00\xC7\x05\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00' + b'\x42\x00\xCB\x08\x00\x00\x00\x08\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF' + ) + self.encoding_missing_attestation = utils.BytearrayStream( + b'\x42\x00\x25\x01\x00\x00\x00\x38' + b'\x42\x00\xC8\x01\x00\x00\x00\x20' + b'\x42\x00\xC9\x08\x00\x00\x00\x01\x01\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xCA\x08\x00\x00\x00\x08\x00\x01\x02\x03\x04\x05\x06\x07' + b'\x42\x00\xC7\x05\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestAttestationCredential, self).tearDown() + + def test_init(self): + """ + Test that an AttestationCredential struct can be constructed without + arguments. + """ + credential = objects.AttestationCredential() + + self.assertEqual(None, credential.nonce) + self.assertEqual(None, credential.attestation_type) + self.assertEqual(None, credential.attestation_measurement) + self.assertEqual(None, credential.attestation_assertion) + + def test_init_with_args(self): + """ + Test that an AttestationCredential struct can be constructed with + arguments. + """ + credential = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + attestation_type=enums.AttestationType.TPM_QUOTE, + attestation_measurement=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', + attestation_assertion=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + + self.assertEqual( + objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + credential.nonce + ) + self.assertEqual( + enums.AttestationType.TPM_QUOTE, + credential.attestation_type + ) + self.assertEqual( + b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', + credential.attestation_measurement + ) + self.assertEqual( + b'\x11\x11\x11\x11\x11\x11\x11\x11', + credential.attestation_assertion + ) + + def test_invalid_nonce(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the nonce of an AttestationCredential struct. + """ + kwargs = {"nonce": "invalid"} + self.assertRaisesRegexp( + TypeError, + "Nonce must be a Nonce struct.", + objects.AttestationCredential, + **kwargs + ) + + credential = objects.AttestationCredential() + args = (credential, "nonce", 0) + self.assertRaisesRegexp( + TypeError, + "Nonce must be a Nonce struct.", + setattr, + *args + ) + + def test_invalid_attestation_type(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the attestation type of an AttestationCredential struct. + """ + kwargs = {"attestation_type": "invalid"} + self.assertRaisesRegexp( + TypeError, + "Attestation type must be an AttestationType enumeration.", + objects.AttestationCredential, + **kwargs + ) + + credential = objects.AttestationCredential() + args = (credential, "attestation_type", 0) + self.assertRaisesRegexp( + TypeError, + "Attestation type must be an AttestationType enumeration.", + setattr, + *args + ) + + def test_invalid_attestation_measurement(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the attestation measurement of an AttestationCredential struct. + """ + kwargs = {"attestation_measurement": 0} + self.assertRaisesRegexp( + TypeError, + "Attestation measurement must be bytes.", + objects.AttestationCredential, + **kwargs + ) + + credential = objects.AttestationCredential() + args = (credential, "attestation_measurement", 0) + self.assertRaisesRegexp( + TypeError, + "Attestation measurement must be bytes.", + setattr, + *args + ) + + def test_invalid_attestation_assertion(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the attestation assertion of an AttestationCredential struct. + """ + kwargs = {"attestation_assertion": 0} + self.assertRaisesRegexp( + TypeError, + "Attestation assertion must be bytes.", + objects.AttestationCredential, + **kwargs + ) + + credential = objects.AttestationCredential() + args = (credential, "attestation_assertion", 0) + self.assertRaisesRegexp( + TypeError, + "Attestation assertion must be bytes.", + setattr, + *args + ) + + def test_read(self): + """ + Test that an AttestationCredential struct can be read from a data + stream. + """ + credential = objects.AttestationCredential() + + self.assertEqual(None, credential.nonce) + self.assertEqual(None, credential.attestation_type) + self.assertEqual(None, credential.attestation_measurement) + self.assertEqual(None, credential.attestation_assertion) + + credential.read(self.full_encoding) + + self.assertEqual( + objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + credential.nonce + ) + self.assertEqual( + enums.AttestationType.TPM_QUOTE, + credential.attestation_type + ) + self.assertEqual( + b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', + credential.attestation_measurement + ) + self.assertEqual( + b'\x11\x11\x11\x11\x11\x11\x11\x11', + credential.attestation_assertion + ) + + def test_read_missing_nonce(self): + """ + Test that a ValueError gets raised when attempting to read an + AttestationCredential struct from a data stream missing the nonce data. + """ + credential = objects.AttestationCredential() + + self.assertEqual(None, credential.nonce) + self.assertEqual(None, credential.attestation_type) + self.assertEqual(None, credential.attestation_measurement) + self.assertEqual(None, credential.attestation_assertion) + + args = (self.encoding_missing_nonce, ) + self.assertRaisesRegexp( + ValueError, + "Attestation credential encoding is missing the nonce.", + credential.read, + *args + ) + + def test_read_missing_attestation_type(self): + """ + Test that a ValueError gets raised when attempting to read an + AttestationCredential struct from a data stream missing the + attestation type data. + """ + credential = objects.AttestationCredential() + + self.assertEqual(None, credential.nonce) + self.assertEqual(None, credential.attestation_type) + self.assertEqual(None, credential.attestation_measurement) + self.assertEqual(None, credential.attestation_assertion) + + args = (self.encoding_missing_attestation_type, ) + self.assertRaisesRegexp( + ValueError, + "Attestation credential encoding is missing the attestation type.", + credential.read, + *args + ) + + def test_read_missing_attestation_measurement(self): + """ + Test that an AttestationCredential struct can be read from a data + stream missing the attestation measurement data. + """ + credential = objects.AttestationCredential() + + self.assertEqual(None, credential.nonce) + self.assertEqual(None, credential.attestation_type) + self.assertEqual(None, credential.attestation_measurement) + self.assertEqual(None, credential.attestation_assertion) + + credential.read(self.encoding_missing_attestation_measurement) + + self.assertEqual( + objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + credential.nonce + ) + self.assertEqual( + enums.AttestationType.TPM_QUOTE, + credential.attestation_type + ) + self.assertEqual(None, credential.attestation_measurement) + self.assertEqual( + b'\x11\x11\x11\x11\x11\x11\x11\x11', + credential.attestation_assertion + ) + + def test_read_missing_attestation_assertion(self): + """ + Test that an AttestationCredential struct can be read from a data + stream missing the attestation assertion data. + """ + + credential = objects.AttestationCredential() + + self.assertEqual(None, credential.nonce) + self.assertEqual(None, credential.attestation_type) + self.assertEqual(None, credential.attestation_measurement) + self.assertEqual(None, credential.attestation_assertion) + + credential.read(self.encoding_missing_attestation_assertion) + + self.assertEqual( + objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + credential.nonce + ) + self.assertEqual( + enums.AttestationType.TPM_QUOTE, + credential.attestation_type + ) + self.assertEqual( + b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', + credential.attestation_measurement + ) + self.assertEqual(None, credential.attestation_assertion) + + def test_read_missing_attestation_measurement_and_assertion(self): + """ + Test that a ValueError gets raised when attempting to read an + AttestationCredential struct from a data stream missing both the + attestation measurement and attestation assertion data. + """ + credential = objects.AttestationCredential() + + self.assertEqual(None, credential.nonce) + self.assertEqual(None, credential.attestation_type) + self.assertEqual(None, credential.attestation_measurement) + self.assertEqual(None, credential.attestation_assertion) + + args = (self.encoding_missing_attestation, ) + self.assertRaisesRegexp( + ValueError, + "Attestation credential encoding is missing either the " + "attestation measurement or the attestation assertion.", + credential.read, + *args + ) + + def test_write(self): + """ + Test that an AttestationCredential struct can be written to a data + stream. + """ + credential = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + attestation_type=enums.AttestationType.TPM_QUOTE, + attestation_measurement=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', + attestation_assertion=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + stream = utils.BytearrayStream() + + credential.write(stream) + + self.assertEqual(len(self.full_encoding), len(stream)) + self.assertEqual(str(self.full_encoding), str(stream)) + + def test_write_missing_nonce(self): + """ + Test that a ValueError gets raised when attempting to write an + AttestationCredential struct missing nonce data to a data stream. + """ + credential = objects.AttestationCredential( + attestation_type=enums.AttestationType.TPM_QUOTE, + attestation_measurement=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', + attestation_assertion=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + stream = utils.BytearrayStream() + + args = (stream, ) + self.assertRaisesRegexp( + ValueError, + "Attestation credential struct is missing the nonce.", + credential.write, + *args + ) + + def test_write_missing_attestation_type(self): + """ + Test that a ValueError gets raised when attempting to write an + AttestationCredential struct missing nonce data to a data stream. + """ + credential = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + attestation_measurement=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', + attestation_assertion=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + stream = utils.BytearrayStream() + + args = (stream, ) + self.assertRaisesRegexp( + ValueError, + "Attestation credential struct is missing the attestation type.", + credential.write, + *args + ) + + def test_write_missing_attestation_measurement(self): + """ + Test that an AttestationCredential struct can be written to a data + stream missing attestation measurement data. + """ + credential = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + attestation_type=enums.AttestationType.TPM_QUOTE, + attestation_assertion=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + stream = utils.BytearrayStream() + + credential.write(stream) + + self.assertEqual( + len(self.encoding_missing_attestation_measurement), + len(stream) + ) + self.assertEqual( + str(self.encoding_missing_attestation_measurement), + str(stream) + ) + + def test_write_missing_attestation_assertion(self): + """ + Test that an AttestationCredential struct can be written to a data + stream missing attestation assertion data. + """ + credential = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + attestation_type=enums.AttestationType.TPM_QUOTE, + attestation_measurement=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF' + ) + stream = utils.BytearrayStream() + + credential.write(stream) + + self.assertEqual( + len(self.encoding_missing_attestation_assertion), + len(stream) + ) + self.assertEqual( + str(self.encoding_missing_attestation_assertion), + str(stream) + ) + + def test_write_missing_attestation_measurement_and_assertion(self): + """ + Test that a ValueError gets raised when attempting to write an + AttestationCredential struct missing both attestation measurement and + attestation assertion data to a data stream. + """ + credential = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + attestation_type=enums.AttestationType.TPM_QUOTE + ) + stream = utils.BytearrayStream() + + args = (stream, ) + self.assertRaisesRegexp( + ValueError, + "Attestation credential struct is missing either the attestation " + "measurement or the attestation assertion.", + credential.write, + *args + ) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + AttestationCredential structs with the same data. + """ + a = objects.AttestationCredential() + b = objects.AttestationCredential() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + a = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + attestation_type=enums.AttestationType.TPM_QUOTE, + attestation_measurement=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', + attestation_assertion=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + b = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + attestation_type=enums.AttestationType.TPM_QUOTE, + attestation_measurement=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', + attestation_assertion=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_nonce(self): + """ + Test that the equality operator returns False when comparing two + AttestationCredential structs with different nonce values. + """ + a = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ) + ) + b = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x02', + nonce_value=b'\x07\x06\x05\x04\x03\x02\x01\x00' + ) + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_attestation_type(self): + """ + Test that the equality operator returns False when comparing two + AttestationCredential structs with different attestation types. + """ + a = objects.AttestationCredential( + attestation_type=enums.AttestationType.TPM_QUOTE + ) + b = objects.AttestationCredential( + attestation_type=enums.AttestationType.SAML_ASSERTION + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_attestation_measurement(self): + """ + Test that the equality operator returns False when comparing two + AttestationCredential structs with different attestation measurements. + """ + a = objects.AttestationCredential( + attestation_measurement=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF' + ) + b = objects.AttestationCredential( + attestation_measurement=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_attestation_assertion(self): + """ + Test that the equality operator returns False when comparing two + AttestationCredential structs with different attestation assertions. + """ + a = objects.AttestationCredential( + attestation_assertion=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF' + ) + b = objects.AttestationCredential( + attestation_assertion=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + AttestationCredential structs with different types. + """ + a = objects.AttestationCredential() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + AttestationCredential structs with the same data. + """ + a = objects.AttestationCredential() + b = objects.AttestationCredential() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + a = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + attestation_type=enums.AttestationType.TPM_QUOTE, + attestation_measurement=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', + attestation_assertion=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + b = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + attestation_type=enums.AttestationType.TPM_QUOTE, + attestation_measurement=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', + attestation_assertion=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_nonce(self): + """ + Test that the inequality operator returns True when comparing two + AttestationCredential structs with different nonce values. + """ + a = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ) + ) + b = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x02', + nonce_value=b'\x07\x06\x05\x04\x03\x02\x01\x00' + ) + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_attestation_type(self): + """ + Test that the inequality operator returns True when comparing two + AttestationCredential structs with different attestation types. + """ + a = objects.AttestationCredential( + attestation_type=enums.AttestationType.TPM_QUOTE + ) + b = objects.AttestationCredential( + attestation_type=enums.AttestationType.SAML_ASSERTION + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_attestation_measurement(self): + """ + Test that the inequality operator returns True when comparing two + AttestationCredential structs with different attestation measurements. + """ + a = objects.AttestationCredential( + attestation_measurement=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF' + ) + b = objects.AttestationCredential( + attestation_measurement=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_attestation_assertion(self): + """ + Test that the inequality operator returns True when comparing two + AttestationCredential structs with different attestation assertions. + """ + a = objects.AttestationCredential( + attestation_assertion=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF' + ) + b = objects.AttestationCredential( + attestation_assertion=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + AttestationCredential structs with different types. + """ + a = objects.AttestationCredential() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to an AttestationCredential struct. + """ + credential = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + attestation_type=enums.AttestationType.TPM_QUOTE, + attestation_measurement=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', + attestation_assertion=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + expected = ( + "AttestationCredential(" + "nonce=Nonce(" + "nonce_id=" + str(b'\x01') + ", " + "nonce_value=" + str(b'\x00\x01\x02\x03\x04\x05\x06\x07') + "), " + "attestation_type=AttestationType.TPM_QUOTE, " + "attestation_measurement=" + + str(b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF') + ", " + "attestation_assertion=" + + str(b'\x11\x11\x11\x11\x11\x11\x11\x11') + ")" + ) + observed = repr(credential) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to an AttestationCredential struct. + """ + credential = objects.AttestationCredential( + nonce=objects.Nonce( + nonce_id=b'\x01', + nonce_value=b'\x00\x01\x02\x03\x04\x05\x06\x07' + ), + attestation_type=enums.AttestationType.TPM_QUOTE, + attestation_measurement=b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', + attestation_assertion=b'\x11\x11\x11\x11\x11\x11\x11\x11' + ) + expected = "{" \ + "'nonce': {" \ + "'nonce_id': " + str(b'\x01') + ", " \ + "'nonce_value': " + \ + str(b'\x00\x01\x02\x03\x04\x05\x06\x07') + "}, " \ + "'attestation_type': " + \ + str(enums.AttestationType.TPM_QUOTE) + ", " \ + "'attestation_measurement': " + \ + str(b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF') + ", " \ + "'attestation_assertion': " + \ + str(b'\x11\x11\x11\x11\x11\x11\x11\x11') + "}" + observed = str(credential) + + self.assertEqual(expected, observed) + + +class TestCredential(testtools.TestCase): + """ + Test suite for the Credential struct. + """ + + def setUp(self): + super(TestCredential, self).setUp() + + # Encoding obtained from the KMIP 1.1 testing document, Section 11.1. + # + # This encoding matches the following set of values: + # Credential + # CredentialType - Username and Password + # CredentialValue + # Username - Fred + # Password - password1 + self.username_password_encoding = utils.BytearrayStream( + b'\x42\x00\x23\x01\x00\x00\x00\x40' + b'\x42\x00\x24\x05\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00' + b'\x42\x00\x25\x01\x00\x00\x00\x28' + b'\x42\x00\x99\x07\x00\x00\x00\x04' + b'\x46\x72\x65\x64\x00\x00\x00\x00' + b'\x42\x00\xA1\x07\x00\x00\x00\x09' + b'\x70\x61\x73\x73\x77\x6F\x72\x64\x31\x00\x00\x00\x00\x00\x00\x00' + ) + self.encoding_missing_credential_type = utils.BytearrayStream( + b'\x42\x00\x23\x01\x00\x00\x00\x30' + b'\x42\x00\x25\x01\x00\x00\x00\x28' + b'\x42\x00\x99\x07\x00\x00\x00\x04' + b'\x46\x72\x65\x64\x00\x00\x00\x00' + b'\x42\x00\xA1\x07\x00\x00\x00\x09' + b'\x70\x61\x73\x73\x77\x6F\x72\x64\x31\x00\x00\x00\x00\x00\x00\x00' + ) + self.encoding_missing_credential_value = utils.BytearrayStream( + b'\x42\x00\x23\x01\x00\x00\x00\x10' + b'\x42\x00\x24\x05\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00' + ) + self.encoding_unknown_credential_type = utils.BytearrayStream( + b'\x42\x00\x23\x01\x00\x00\x00\x40' + b'\x42\x00\x24\x05\x00\x00\x00\x04\x00\x00\x00\xFF\x00\x00\x00\x00' + b'\x42\x00\x25\x01\x00\x00\x00\x28' + b'\x42\x00\x99\x07\x00\x00\x00\x04' + b'\x46\x72\x65\x64\x00\x00\x00\x00' + b'\x42\x00\xA1\x07\x00\x00\x00\x09' + b'\x70\x61\x73\x73\x77\x6F\x72\x64\x31\x00\x00\x00\x00\x00\x00\x00' + ) + + # Encoding obtained from the KMIP 1.1 testing document, Section 11.2. + # + # This encoding matches the following set of values: + # Credential + # CredentialType - Device + # CredentialValue + # Device Serial Number - serNum123456 + # Password - secret + # Device Identifier - devID2233 + # Network Identifier - netID9000 + # Machine Identifier - machineID1 + # Media Identifier - mediaID313 + self.device_encoding = utils.BytearrayStream( + b'\x42\x00\x23\x01\x00\x00\x00\xA0' + b'\x42\x00\x24\x05\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x00' + b'\x42\x00\x25\x01\x00\x00\x00\x88' + b'\x42\x00\xB0\x07\x00\x00\x00\x0C' + b'\x73\x65\x72\x4E\x75\x6D\x31\x32\x33\x34\x35\x36\x00\x00\x00\x00' + b'\x42\x00\xA1\x07\x00\x00\x00\x06' + b'\x73\x65\x63\x72\x65\x74\x00\x00' + b'\x42\x00\xA2\x07\x00\x00\x00\x09' + b'\x64\x65\x76\x49\x44\x32\x32\x33\x33\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAB\x07\x00\x00\x00\x09' + b'\x6E\x65\x74\x49\x44\x39\x30\x30\x30\x00\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xA9\x07\x00\x00\x00\x0A' + b'\x6D\x61\x63\x68\x69\x6E\x65\x49\x44\x31\x00\x00\x00\x00\x00\x00' + b'\x42\x00\xAA\x07\x00\x00\x00\x0A' + b'\x6D\x65\x64\x69\x61\x49\x44\x33\x31\x33\x00\x00\x00\x00\x00\x00' + ) + + def tearDown(self): + super(TestCredential, self).tearDown() + + def test_init(self): + """ + Test that a Credential struct can be constructed without arguments. + """ + credential = objects.Credential() + + self.assertEqual(None, credential.credential_type) + self.assertEqual(None, credential.credential_value) + + def test_init_with_args(self): + """ + Test that a Credential struct can be constructed with arguments. + """ + credential = objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="John", + password="abc123" + ) + ) + + self.assertEqual( + enums.CredentialType.USERNAME_AND_PASSWORD, + credential.credential_type + ) + self.assertEqual( + objects.UsernamePasswordCredential( + username="John", + password="abc123" + ), + credential.credential_value + ) + + def test_invalid_credential_type(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the credential type of a Credential struct. + """ + kwargs = {"credential_type": "invalid"} + self.assertRaisesRegexp( + TypeError, + "Credential type must be a CredentialType enumeration.", + objects.Credential, + **kwargs + ) + + credential = objects.Credential() + args = (credential, "credential_type", 0) + self.assertRaisesRegexp( + TypeError, + "Credential type must be a CredentialType enumeration.", + setattr, + *args + ) + + def test_invalid_credential_value(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the credential value of a Credential struct. + """ + kwargs = {"credential_value": "invalid"} + self.assertRaisesRegexp( + TypeError, + "Credential value must be a CredentialValue struct.", + objects.Credential, + **kwargs + ) + + credential = objects.Credential() + args = (credential, "credential_value", 0) + self.assertRaisesRegexp( + TypeError, + "Credential value must be a CredentialValue struct.", + setattr, + *args + ) + + def test_read(self): + """ + Test that a Credential struct can be read from a data stream. + """ + # Test with a UsernamePasswordCredential. + credential = objects.Credential() + + self.assertEqual(None, credential.credential_type) + self.assertEqual(None, credential.credential_value) + + credential.read(self.username_password_encoding) + + self.assertEqual( + enums.CredentialType.USERNAME_AND_PASSWORD, + credential.credential_type + ) + self.assertEqual( + objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ), + credential.credential_value + ) + + # Test with a DeviceCredential + credential = objects.Credential() + + self.assertEqual(None, credential.credential_type) + self.assertEqual(None, credential.credential_value) + + credential.read(self.device_encoding) + + self.assertEqual( + enums.CredentialType.DEVICE, + credential.credential_type + ) + self.assertEqual( + objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ), + credential.credential_value + ) + + def test_read_missing_credential_type(self): + """ + Test that a ValueError gets raised when attempting to read a + Credential struct from a data stream missing the credential type data. + """ + credential = objects.Credential() + + self.assertEqual(None, credential.credential_type) + self.assertEqual(None, credential.credential_value) + + args = (self.encoding_missing_credential_type, ) + self.assertRaisesRegexp( + ValueError, + "Credential encoding missing the credential type.", + credential.read, + *args + ) + + @mock.patch( + 'kmip.core.enums.CredentialType', + enum.Enum( + 'FakeCredentialType', + [(i.name, i.value) for i in enums.CredentialType] + + [('UNKNOWN', 0x000000FF)] + ) + ) + def test_read_unknown_credential_type(self): + """ + Test that a ValueError gets raised when attempting to read a + Credential struct from a data stream with an unknown credential + type. + """ + credential = objects.Credential() + + self.assertEqual(None, credential.credential_type) + self.assertEqual(None, credential.credential_value) + + args = (self.encoding_unknown_credential_type, ) + self.assertRaisesRegexp( + ValueError, + "Credential encoding includes unrecognized credential type.", + credential.read, + *args + ) + + def test_read_missing_credential_value(self): + """ + Test that a ValueError gets raised when attempting to read a + Credential struct from a data stream missing the credential value + data. + """ + credential = objects.Credential() + + self.assertEqual(None, credential.credential_type) + self.assertEqual(None, credential.credential_value) + + args = (self.encoding_missing_credential_value, ) + self.assertRaisesRegexp( + ValueError, + "Credential encoding missing the credential value.", + credential.read, + *args + ) + + def test_write(self): + """ + Test that a Credential struct can be written to a data stream. + """ + # Test with a UsernamePasswordCredential. + credential = objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + stream = utils.BytearrayStream() + + credential.write(stream) + + self.assertEqual(len(self.username_password_encoding), len(stream)) + self.assertEqual(str(self.username_password_encoding), str(stream)) + + # Test with a DeviceCredential. + credential = objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + stream = utils.BytearrayStream() + + credential.write(stream) + + self.assertEqual(len(self.device_encoding), len(stream)) + self.assertEqual(str(self.device_encoding), str(stream)) + + def test_write_missing_credential_type(self): + """ + Test that a ValueError gets raised when attempting to write a + Credential struct missing credential type data to a data stream. + """ + credential = objects.Credential( + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + stream = utils.BytearrayStream() + + args = (stream, ) + self.assertRaisesRegexp( + ValueError, + "Credential struct missing the credential type.", + credential.write, + *args + ) + + def test_write_missing_credential_value(self): + """ + Test that a ValueError gets raised when attempting to write a + Credential struct missing credential value data to a data stream. + """ + credential = objects.Credential( + credential_type=enums.CredentialType.DEVICE + ) + stream = utils.BytearrayStream() + + args = (stream, ) + self.assertRaisesRegexp( + ValueError, + "Credential struct missing the credential value.", + credential.write, + *args + ) + + def test_equal_on_equal(self): + """ + Test that the equality operator returns True when comparing two + Credential structs with the same data. + """ + a = objects.Credential() + b = objects.Credential() + + self.assertTrue(a == b) + self.assertTrue(b == a) + + # Test with a UsernamePasswordCredential. + a = objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + b = objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + # Test with a DeviceCredential. + a = objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + b = objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + + self.assertTrue(a == b) + self.assertTrue(b == a) + + def test_equal_on_not_equal_credential_type(self): + """ + Test that the equality operator returns False when comparing two + Credential structs with different credential types. + """ + a = objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD + ) + b = objects.Credential( + credential_type=enums.CredentialType.DEVICE + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_not_equal_credential_value(self): + """ + Test that the equality operator returns False when comparing two + Credential structs with different credential values. + """ + a = objects.Credential( + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + b = objects.Credential( + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_equal_on_type_mismatch(self): + """ + Test that the equality operator returns False when comparing two + Credential structs with different types. + """ + a = objects.Credential() + b = 'invalid' + + self.assertFalse(a == b) + self.assertFalse(b == a) + + def test_not_equal_on_equal(self): + """ + Test that the inequality operator returns False when comparing two + Credential structs with the same data. + """ + a = objects.Credential() + b = objects.Credential() + + self.assertFalse(a != b) + self.assertFalse(b != a) + + # Test with a UsernamePasswordCredential. + a = objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + b = objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + # Test with a DeviceCredential. + a = objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + b = objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + + self.assertFalse(a != b) + self.assertFalse(b != a) + + def test_not_equal_on_not_equal_credential_type(self): + """ + Test that the inequality operator returns True when comparing two + Credential structs with different credential types. + """ + a = objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD + ) + b = objects.Credential( + credential_type=enums.CredentialType.DEVICE + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_not_equal_credential_value(self): + """ + Test that the inequality operator returns True when comparing two + Credential structs with different credential values. + """ + a = objects.Credential( + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + b = objects.Credential( + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_not_equal_on_type_mismatch(self): + """ + Test that the inequality operator returns True when comparing two + Credential structs with different types. + """ + a = objects.Credential() + b = 'invalid' + + self.assertTrue(a != b) + self.assertTrue(b != a) + + def test_repr(self): + """ + Test that repr can be applied to a Credential struct. + """ + # Test with a UsernamePasswordCredential. + credential = objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + expected = ( + "Credential(" + "credential_type=CredentialType.USERNAME_AND_PASSWORD, " + "credential_value=UsernamePasswordCredential(" + "username='Fred', " + "password='password1'))" + ) + observed = repr(credential) + + self.assertEqual(expected, observed) + + # Test with a DeviceCredential. + credential = objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + expected = ( + "Credential(" + "credential_type=CredentialType.DEVICE, " + "credential_value=DeviceCredential(" + "device_serial_number='serNum123456', " + "password='secret', " + "device_identifier='devID2233', " + "network_identifier='netID9000', " + "machine_identifier='machineID1', " + "media_identifier='mediaID313'))" + ) + observed = repr(credential) + + self.assertEqual(expected, observed) + + def test_str(self): + """ + Test that str can be applied to a Credential struct. + """ + # Test with a UsernamePasswordCredential. + credential = objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="Fred", + password="password1" + ) + ) + expected = str({ + "credential_type": enums.CredentialType.USERNAME_AND_PASSWORD, + "credential_value": str({ + "username": "Fred", + "password": "password1" + }) + }) + observed = str(credential) + + self.assertEqual(expected, observed) + + # Test with a DeviceCredential. + credential = objects.Credential( + credential_type=enums.CredentialType.DEVICE, + credential_value=objects.DeviceCredential( + device_serial_number="serNum123456", + password="secret", + device_identifier="devID2233", + network_identifier="netID9000", + machine_identifier="machineID1", + media_identifier="mediaID313" + ) + ) + expected = str({ + "credential_type": enums.CredentialType.DEVICE, + "credential_value": str({ + "device_serial_number": "serNum123456", + "password": "secret", + "device_identifier": "devID2233", + "network_identifier": "netID9000", + "machine_identifier": "machineID1", + "media_identifier": "mediaID313" + }) + }) + observed = str(credential) + + self.assertEqual(expected, observed) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/primitives/test_base.py python-pykmip-0.8.0/kmip/tests/unit/core/primitives/test_base.py --- python-pykmip-0.7.0/kmip/tests/unit/core/primitives/test_base.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/primitives/test_base.py 2017-12-08 17:36:18.000000000 +0000 @@ -15,7 +15,7 @@ import testtools -from kmip.core import errors +from kmip.core import exceptions from kmip.core import primitives from kmip.core import utils @@ -26,11 +26,11 @@ super(TestBase, self).setUp() self.stream = utils.BytearrayStream() self.bad_init = 'Bad Base initialization: attribute {0} missing' - self.bad_write = errors.ErrorStrings.BAD_EXP_RECV.format( + self.bad_write = exceptions.ErrorStrings.BAD_EXP_RECV.format( 'primitives.Base.{0}', 'write', '{1}', '{2}') - self.bad_encoding = errors.ErrorStrings.BAD_ENCODING.format( + self.bad_encoding = exceptions.ErrorStrings.BAD_ENCODING.format( 'primitives.Base.{0}', 'write') - self.bad_match = errors.ErrorStrings.BAD_EXP_RECV.format( + self.bad_match = exceptions.ErrorStrings.BAD_EXP_RECV.format( 'primitives.Base.{0}', 'comparison', '{1}', '{2}') def tearDown(self): @@ -44,7 +44,10 @@ self.stream.write(b'\x00') base = primitives.Base() self.assertRaises( - errors.StreamNotEmptyError, base.is_oversized, self.stream) + exceptions.StreamNotEmptyError, + base.is_oversized, + self.stream + ) def test_read_tag(self): encoding = (b'\x42\x00\x00') @@ -56,7 +59,11 @@ encoding = (b'\x42\x00\x01') base = primitives.Base() self.stream = utils.BytearrayStream(encoding) - self.assertRaises(errors.ReadValueError, base.read_tag, self.stream) + self.assertRaises( + exceptions.ReadValueError, + base.read_tag, + self.stream + ) def test_read_type(self): self.stream.write(b'\x00') @@ -66,11 +73,19 @@ def test_read_type_error(self): self.stream.write(b'\x01') base = primitives.Base() - self.assertRaises(errors.ReadValueError, base.read_type, self.stream) + self.assertRaises( + exceptions.ReadValueError, + base.read_type, + self.stream + ) def test_read_type_underflow(self): base = primitives.Base() - self.assertRaises(errors.ReadValueError, base.read_type, self.stream) + self.assertRaises( + exceptions.ReadValueError, + base.read_type, + self.stream + ) def test_read_type_overflow(self): self.stream.write(b'\x00\x00') @@ -87,7 +102,11 @@ self.stream.write(b'\x00') base = primitives.Base() base.length = 4 - self.assertRaises(errors.ReadValueError, base.read_length, self.stream) + self.assertRaises( + exceptions.ReadValueError, + base.read_length, + self.stream + ) def test_read_length_overflow(self): self.stream.write(b'\x00\x00\x00\x04\x00') diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/primitives/test_byte_string.py python-pykmip-0.8.0/kmip/tests/unit/core/primitives/test_byte_string.py --- python-pykmip-0.7.0/kmip/tests/unit/core/primitives/test_byte_string.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/primitives/test_byte_string.py 2017-12-08 17:36:18.000000000 +0000 @@ -15,7 +15,7 @@ import testtools -from kmip.core import errors +from kmip.core import exceptions from kmip.core import primitives from kmip.core import utils @@ -25,17 +25,17 @@ def setUp(self): super(TestByteString, self).setUp() self.stream = utils.BytearrayStream() - self.bad_type = errors.ErrorStrings.BAD_EXP_RECV.format( + self.bad_type = exceptions.ErrorStrings.BAD_EXP_RECV.format( 'primitives.ByteString.{0}', 'type', '{1}', '{2}') - self.bad_value = errors.ErrorStrings.BAD_EXP_RECV.format( + self.bad_value = exceptions.ErrorStrings.BAD_EXP_RECV.format( 'primitives.ByteString.{0}', 'value', '{1}', '{2}') - self.bad_read = errors.ErrorStrings.BAD_EXP_RECV.format( + self.bad_read = exceptions.ErrorStrings.BAD_EXP_RECV.format( 'primitives.ByteString.{0}', '', '{1}', '{2}') - self.bad_write = errors.ErrorStrings.BAD_EXP_RECV.format( + self.bad_write = exceptions.ErrorStrings.BAD_EXP_RECV.format( 'primitives.ByteString.{0}', 'write', '{1}', '{2}') - self.bad_encoding = errors.ErrorStrings.BAD_ENCODING.format( + self.bad_encoding = exceptions.ErrorStrings.BAD_ENCODING.format( 'primitives.ByteString', '') - self.bad_length = errors.ErrorStrings.BAD_EXP_RECV.format( + self.bad_length = exceptions.ErrorStrings.BAD_EXP_RECV.format( 'primitives.ByteString', 'length', '{0} bytes', '{1} bytes') def tearDown(self): @@ -142,7 +142,7 @@ self.stream = utils.BytearrayStream(encoding) bs = primitives.ByteString() - self.assertRaises(errors.ReadValueError, bs.read, self.stream) + self.assertRaises(exceptions.ReadValueError, bs.read, self.stream) def test_write_value(self): encoding = b'\x01\x02\x03\x00\x00\x00\x00\x00' diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/primitives/test_integer.py python-pykmip-0.8.0/kmip/tests/unit/core/primitives/test_integer.py --- python-pykmip-0.7.0/kmip/tests/unit/core/primitives/test_integer.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/primitives/test_integer.py 2017-12-08 17:36:18.000000000 +0000 @@ -15,7 +15,7 @@ import testtools -from kmip.core import errors +from kmip.core import exceptions from kmip.core import primitives from kmip.core import utils @@ -146,7 +146,11 @@ self.stream = utils.BytearrayStream(encoding) i = primitives.Integer() - self.assertRaises(errors.ReadValueError, i.read, self.stream) + self.assertRaises( + exceptions.ReadValueError, + i.read, + self.stream + ) def test_read_on_invalid_padding(self): encoding = ( @@ -155,7 +159,11 @@ self.stream = utils.BytearrayStream(encoding) i = primitives.Integer() - self.assertRaises(errors.ReadValueError, i.read, self.stream) + self.assertRaises( + exceptions.ReadValueError, + i.read, + self.stream + ) def test_write_value(self): encoding = (b'\x00\x00\x00\x01\x00\x00\x00\x00') diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/primitives/test_text_string.py python-pykmip-0.8.0/kmip/tests/unit/core/primitives/test_text_string.py --- python-pykmip-0.7.0/kmip/tests/unit/core/primitives/test_text_string.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/primitives/test_text_string.py 2017-12-08 17:36:18.000000000 +0000 @@ -16,7 +16,7 @@ import six import testtools -from kmip.core import errors +from kmip.core import exceptions from kmip.core import primitives from kmip.core import utils @@ -26,17 +26,17 @@ def setUp(self): super(TestTextString, self).setUp() self.stream = utils.BytearrayStream() - self.bad_type = errors.ErrorStrings.BAD_EXP_RECV.format( + self.bad_type = exceptions.ErrorStrings.BAD_EXP_RECV.format( 'primitives.TextString.{0}', 'type', '{1}', '{2}') - self.bad_value = errors.ErrorStrings.BAD_EXP_RECV.format( + self.bad_value = exceptions.ErrorStrings.BAD_EXP_RECV.format( 'primitives.TextString.{0}', 'value', '{1}', '{2}') - self.bad_read = errors.ErrorStrings.BAD_EXP_RECV.format( + self.bad_read = exceptions.ErrorStrings.BAD_EXP_RECV.format( 'primitives.TextString.{0}', '', '{1}', '{2}') - self.bad_write = errors.ErrorStrings.BAD_EXP_RECV.format( + self.bad_write = exceptions.ErrorStrings.BAD_EXP_RECV.format( 'primitives.TextString.{0}', 'write', '{1}', '{2}') - self.bad_encoding = errors.ErrorStrings.BAD_ENCODING.format( + self.bad_encoding = exceptions.ErrorStrings.BAD_ENCODING.format( 'primitives.TextString', '') - self.bad_length = errors.ErrorStrings.BAD_EXP_RECV.format( + self.bad_length = exceptions.ErrorStrings.BAD_EXP_RECV.format( 'primitives.TextString', 'length', '{0} bytes', '{1} bytes') def tearDown(self): @@ -139,7 +139,7 @@ self.stream = utils.BytearrayStream(encoding) ts = primitives.TextString() - self.assertRaises(errors.ReadValueError, ts.read, self.stream) + self.assertRaises(exceptions.ReadValueError, ts.read, self.stream) def test_write_value(self): encoding = ( diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/test_policy.py python-pykmip-0.8.0/kmip/tests/unit/core/test_policy.py --- python-pykmip-0.7.0/kmip/tests/unit/core/test_policy.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/test_policy.py 2018-05-17 22:30:38.000000000 +0000 @@ -32,14 +32,82 @@ def tearDown(self): super(TestPolicy, self).tearDown() + def test_parse_policy(self): + """ + Test that parsing a text-based policy works correctly. + """ + object_policy = {"CERTIFICATE": {"LOCATE": "ALLOW_ALL"}} + observed = policy.parse_policy(object_policy) + + expected = { + enums.ObjectType.CERTIFICATE: { + enums.Operation.LOCATE: enums.Policy.ALLOW_ALL + } + } + + self.assertEqual(expected, observed) + + def test_parse_policy_with_bad_object_type(self): + """ + Test that policy parsing correctly handles an invalid object type + string. + """ + object_policy = {"INVALID": {"LOCATE": "ALLOW_ALL"}} + + args = (object_policy, ) + regex = "'INVALID' is not a valid ObjectType value." + self.assertRaisesRegexp( + ValueError, + regex, + policy.parse_policy, + *args + ) + + def test_parse_policy_with_bad_operation(self): + """ + Test that policy parsing correctly handles an invalid operation string. + """ + object_policy = {"CERTIFICATE": {"INVALID": "ALLOW_ALL"}} + + args = (object_policy, ) + regex = "'INVALID' is not a valid Operation value." + self.assertRaisesRegexp( + ValueError, + regex, + policy.parse_policy, + *args + ) + + def test_parse_policy_with_bad_permission(self): + """ + Test that policy parsing correctly handles an invalid permission + string. + """ + object_policy = {"CERTIFICATE": {"LOCATE": "INVALID"}} + + args = (object_policy, ) + regex = "'INVALID' is not a valid Policy value." + self.assertRaisesRegexp( + ValueError, + regex, + policy.parse_policy, + *args + ) + def test_read_policy_from_file(self): + """ + Test that reading a policy file works correctly. + """ policy_file = tempfile.NamedTemporaryFile( dir=self.temp_dir, delete=False ) with open(policy_file.name, 'w') as f: f.write( - '{"test": {"CERTIFICATE": {"LOCATE": "ALLOW_ALL"}}}' + '{"test": {' + '"groups": {"group_A": {"SPLIT_KEY": {"GET": "ALLOW_ALL"}}}, ' + '"preset": {"SPLIT_KEY": {"GET": "ALLOW_ALL"}}}' + '}' ) policies = policy.read_policy_from_file(policy_file.name) @@ -47,43 +115,102 @@ self.assertEqual(1, len(policies)) self.assertIn('test', policies.keys()) - test_policy = { - enums.ObjectType.CERTIFICATE: { - enums.Operation.LOCATE: enums.Policy.ALLOW_ALL + expected = { + 'groups': { + 'group_A': { + enums.ObjectType.SPLIT_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL + } + } + }, + 'preset': { + enums.ObjectType.SPLIT_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL + } } } - self.assertEqual(test_policy, policies.get('test')) + self.assertEqual(expected, policies.get('test')) - def test_read_policy_from_file_empty(self): + def test_read_policy_from_file_groups_only(self): + """ + Test that reading a policy file with only a groups section works + correctly. + """ policy_file = tempfile.NamedTemporaryFile( dir=self.temp_dir, delete=False ) with open(policy_file.name, 'w') as f: - f.write('') + f.write( + '{"test": ' + '{"groups": {"group_A": {"SPLIT_KEY": {"GET": "ALLOW_ALL"}}}}}' + ) - args = (policy_file.name, ) - regex = "An error occurred while attempting to parse the JSON file." - self.assertRaisesRegexp( - ValueError, - regex, - policy.read_policy_from_file, - *args + policies = policy.read_policy_from_file(policy_file.name) + + self.assertEqual(1, len(policies)) + self.assertIn('test', policies.keys()) + + expected = { + 'groups': { + 'group_A': { + enums.ObjectType.SPLIT_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL + } + } + } + } + + self.assertEqual(expected, policies.get('test')) + + def test_read_policy_from_file_default_only(self): + """ + Test that reading a policy file with only a preset section works + correctly. + """ + policy_file = tempfile.NamedTemporaryFile( + dir=self.temp_dir, + delete=False ) + with open(policy_file.name, 'w') as f: + f.write( + '{"test": ' + '{"preset": {"SPLIT_KEY": {"GET": "ALLOW_ALL"}}}}' + ) + + policies = policy.read_policy_from_file(policy_file.name) - def test_read_policy_from_file_bad_object_type(self): + self.assertEqual(1, len(policies)) + self.assertIn('test', policies.keys()) + + expected = { + 'preset': { + enums.ObjectType.SPLIT_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL + } + } + } + + self.assertEqual(expected, policies.get('test')) + + def test_read_policy_from_file_invalid_section(self): + """ + Test that reading a policy file with an invalid section generates + the right error. + """ policy_file = tempfile.NamedTemporaryFile( dir=self.temp_dir, delete=False ) with open(policy_file.name, 'w') as f: f.write( - '{"test": {"INVALID": {"LOCATE": "ALLOW_ALL"}}}' + '{"test": {' + '"invalid": {"group_A": {"SPLIT_KEY": {"GET": "ALLOW_ALL"}}}}}' ) args = (policy_file.name, ) - regex = "'INVALID' is not a valid ObjectType value." + regex = "Policy 'test' contains an invalid section named: invalid" self.assertRaisesRegexp( ValueError, regex, @@ -91,18 +218,51 @@ *args ) - def test_read_policy_from_file_bad_operation(self): + def test_read_policy_from_file_legacy(self): + """ + Test that reading a legacy policy file works correctly. + + Note: legacy policy file support may be removed in the future. + """ policy_file = tempfile.NamedTemporaryFile( dir=self.temp_dir, delete=False ) with open(policy_file.name, 'w') as f: f.write( - '{"test": {"CERTIFICATE": {"INVALID": "ALLOW_ALL"}}}' + '{"test": {"CERTIFICATE": {"LOCATE": "ALLOW_ALL"}}}' ) + policies = policy.read_policy_from_file(policy_file.name) + + self.assertEqual(1, len(policies)) + self.assertIn('test', policies.keys()) + + expected = { + 'preset': { + enums.ObjectType.CERTIFICATE: { + enums.Operation.LOCATE: enums.Policy.ALLOW_ALL + } + } + } + + self.assertEqual(expected, policies.get('test')) + + def test_read_policy_from_file_empty(self): + """ + Test that reading an empty policy file generates the right error. + """ + policy_file = tempfile.NamedTemporaryFile( + dir=self.temp_dir, + delete=False + ) + with open(policy_file.name, 'w') as f: + f.write('') + args = (policy_file.name, ) - regex = "'INVALID' is not a valid Operation value." + regex = "Loading the policy file '{}' generated a JSON error:".format( + policy_file.name + ) self.assertRaisesRegexp( ValueError, regex, @@ -110,21 +270,19 @@ *args ) - def test_read_policy_from_file_bad_permission(self): + def test_read_policy_from_file_empty_policy(self): + """ + Test that reading a file with an empty policy is handled correctly. + """ policy_file = tempfile.NamedTemporaryFile( dir=self.temp_dir, delete=False ) with open(policy_file.name, 'w') as f: f.write( - '{"test": {"CERTIFICATE": {"LOCATE": "INVALID"}}}' + '{"test": {}}' ) - args = (policy_file.name, ) - regex = "'INVALID' is not a valid Policy value." - self.assertRaisesRegexp( - ValueError, - regex, - policy.read_policy_from_file, - *args - ) + policies = policy.read_policy_from_file(policy_file.name) + + self.assertEqual(0, len(policies)) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/core/test_utils.py python-pykmip-0.8.0/kmip/tests/unit/core/test_utils.py --- python-pykmip-0.7.0/kmip/tests/unit/core/test_utils.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/core/test_utils.py 2017-12-08 17:36:18.000000000 +0000 @@ -15,8 +15,7 @@ from testtools import TestCase -from kmip.core.errors import ErrorStrings - +from kmip.core import exceptions from kmip.core import utils @@ -62,12 +61,24 @@ super(TestBytearrayStream, self).setUp() self.stream = utils.BytearrayStream() - self.bad_type = ErrorStrings.BAD_EXP_RECV.format('BytearrayStream.{0}', - 'type', '{1}', '{2}') - self.bad_len = ErrorStrings.BAD_EXP_RECV.format('BytearrayStream.{0}', - 'length', '{1}', '{2}') - self.bad_val = ErrorStrings.BAD_EXP_RECV.format('BytearrayStream.{0}', - 'value', '{1}', '{2}') + self.bad_type = exceptions.ErrorStrings.BAD_EXP_RECV.format( + 'BytearrayStream.{0}', + 'type', + '{1}', + '{2}' + ) + self.bad_len = exceptions.ErrorStrings.BAD_EXP_RECV.format( + 'BytearrayStream.{0}', + 'length', + '{1}', + '{2}' + ) + self.bad_val = exceptions.ErrorStrings.BAD_EXP_RECV.format( + 'BytearrayStream.{0}', + 'value', + '{1}', + '{2}' + ) def tearDown(self): super(TestBytearrayStream, self).tearDown() diff -Nru python-pykmip-0.7.0/kmip/tests/unit/pie/test_client.py python-pykmip-0.8.0/kmip/tests/unit/pie/test_client.py --- python-pykmip-0.7.0/kmip/tests/unit/pie/test_client.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/pie/test_client.py 2018-04-16 18:14:10.000000000 +0000 @@ -450,12 +450,8 @@ enums.AttributeType.CRYPTOGRAPHIC_ALGORITHM, algorithm) length_attribute = self.attribute_factory.create_attribute( enums.AttributeType.CRYPTOGRAPHIC_LENGTH, length) - mask_attribute = self.attribute_factory.create_attribute( - enums.AttributeType.CRYPTOGRAPHIC_USAGE_MASK, - [enums.CryptographicUsageMask.ENCRYPT, - enums.CryptographicUsageMask.DECRYPT]) - attributes = [algorithm_attribute, length_attribute, mask_attribute] + attributes = [algorithm_attribute, length_attribute] template = obj.CommonTemplateAttribute(attributes=attributes) status = enums.ResultStatus.SUCCESS @@ -470,7 +466,9 @@ client.proxy.create_key_pair.return_value = result public_uid, private_uid = client.create_key_pair( - enums.CryptographicAlgorithm.RSA, 2048) + enums.CryptographicAlgorithm.RSA, + 2048 + ) kwargs = {'common_template_attribute': template, 'private_key_template_attribute': None, @@ -494,20 +492,15 @@ enums.AttributeType.CRYPTOGRAPHIC_ALGORITHM, algorithm) length_attribute = self.attribute_factory.create_attribute( enums.AttributeType.CRYPTOGRAPHIC_LENGTH, length) - mask_attribute = self.attribute_factory.create_attribute( - enums.AttributeType.CRYPTOGRAPHIC_USAGE_MASK, - [enums.CryptographicUsageMask.ENCRYPT, - enums.CryptographicUsageMask.DECRYPT]) opn_attribute = self.attribute_factory.create_attribute( enums.AttributeType.OPERATION_POLICY_NAME, 'test' ) pair_attributes = [ + opn_attribute, algorithm_attribute, - length_attribute, - mask_attribute, - opn_attribute + length_attribute ] template = obj.CommonTemplateAttribute(attributes=pair_attributes) @@ -548,10 +541,6 @@ enums.AttributeType.CRYPTOGRAPHIC_ALGORITHM, algorithm) length_attribute = self.attribute_factory.create_attribute( enums.AttributeType.CRYPTOGRAPHIC_LENGTH, length) - mask_attribute = self.attribute_factory.create_attribute( - enums.AttributeType.CRYPTOGRAPHIC_USAGE_MASK, - [enums.CryptographicUsageMask.ENCRYPT, - enums.CryptographicUsageMask.DECRYPT]) private_name_attribute = self.attribute_factory.create_attribute( enums.AttributeType.NAME, "private") @@ -560,8 +549,8 @@ pair_attributes = [ algorithm_attribute, - length_attribute, - mask_attribute] + length_attribute + ] template = obj.CommonTemplateAttribute(attributes=pair_attributes) private_template = obj.PrivateKeyTemplateAttribute( @@ -607,10 +596,6 @@ enums.AttributeType.CRYPTOGRAPHIC_ALGORITHM, algorithm) length_attribute = self.attribute_factory.create_attribute( enums.AttributeType.CRYPTOGRAPHIC_LENGTH, length) - mask_attribute = self.attribute_factory.create_attribute( - enums.AttributeType.CRYPTOGRAPHIC_USAGE_MASK, - [enums.CryptographicUsageMask.ENCRYPT, - enums.CryptographicUsageMask.DECRYPT]) private_usage_mask = self.attribute_factory.create_attribute( enums.AttributeType.CRYPTOGRAPHIC_USAGE_MASK, @@ -623,8 +608,8 @@ pair_attributes = [ algorithm_attribute, - length_attribute, - mask_attribute] + length_attribute + ] template = obj.CommonTemplateAttribute(attributes=pair_attributes) private_template = obj.PrivateKeyTemplateAttribute( @@ -717,6 +702,273 @@ KmipOperationFailure, error_msg, client.create_key_pair, *args) + @mock.patch( + 'kmip.pie.client.KMIPProxy', mock.MagicMock(spec_set=KMIPProxy) + ) + def test_rekey(self): + """ + Test that the client can rekey an object. + """ + result = { + 'unique_identifier': '2', + 'result_status': enums.ResultStatus.SUCCESS + } + + client = ProxyKmipClient() + client.open() + client.proxy.rekey.return_value = result + + checked_id = client.rekey( + uid='1', + offset=0, + activation_date=1000000, + process_start_date=1000001, + protect_stop_date=1000002, + deactivation_date=1000003 + ) + + self.assertEqual('2', checked_id) + + @mock.patch('kmip.pie.client.KMIPProxy', + mock.MagicMock(spec_set=KMIPProxy)) + def test_rekey_on_invalid_unique_identifier(self): + """ + Test that a TypeError exception is raised when trying to rekey an + object with an invalid unique identifier. + """ + kwargs = {'uid': 0} + with ProxyKmipClient() as client: + self.assertRaisesRegexp( + TypeError, + "The unique identifier must be a string.", + client.rekey, + **kwargs + ) + + @mock.patch('kmip.pie.client.KMIPProxy', + mock.MagicMock(spec_set=KMIPProxy)) + def test_rekey_on_invalid_offset(self): + """ + Test that a TypeError exception is raised when trying to rekey an + object with an invalid offset. + """ + kwargs = {'offset': 'invalid'} + with ProxyKmipClient() as client: + self.assertRaisesRegexp( + TypeError, + "The offset must be an integer.", + client.rekey, + **kwargs + ) + + @mock.patch('kmip.pie.client.KMIPProxy', + mock.MagicMock(spec_set=KMIPProxy)) + def test_rekey_on_closed(self): + """ + Test that a ClientConnectionNotOpen exception is raised when trying + to rekey an object on an unopened client connection. + """ + client = ProxyKmipClient() + kwargs = { + 'uid': '1', + 'offset': 10 + } + + self.assertRaises( + ClientConnectionNotOpen, + client.rekey, + **kwargs + ) + + @mock.patch('kmip.pie.client.KMIPProxy', + mock.MagicMock(spec_set=KMIPProxy)) + def test_rekey_on_operation_failure(self): + """ + Test that a KmipOperationFailure exception is raised when the + backend fails to rekey a key. + """ + status = enums.ResultStatus.OPERATION_FAILED + reason = enums.ResultReason.GENERAL_FAILURE + message = "Test failure message" + + result = { + 'result_status': status, + 'result_reason': reason, + 'result_message': message + } + error_message = str(KmipOperationFailure(status, reason, message)) + + client = ProxyKmipClient() + client.open() + client.proxy.rekey.return_value = result + kwargs = { + 'uid': '1', + 'offset': 1, + 'deactivation_date': 10000 + } + + self.assertRaisesRegexp( + KmipOperationFailure, + error_message, + client.rekey, + **kwargs + ) + + @mock.patch( + 'kmip.pie.client.KMIPProxy', mock.MagicMock(spec_set=KMIPProxy) + ) + def test_check(self): + """ + Test that the client can check an object. + """ + result = { + 'unique_identifier': '1', + 'result_status': enums.ResultStatus.SUCCESS + } + + client = ProxyKmipClient() + client.open() + client.proxy.check.return_value = result + + checked_id = client.check( + uid='1', + usage_limits_count=100, + cryptographic_usage_mask=[ + enums.CryptographicUsageMask.ENCRYPT, + enums.CryptographicUsageMask.DECRYPT + ], + lease_time=10000 + ) + + self.assertEqual('1', checked_id) + + @mock.patch('kmip.pie.client.KMIPProxy', + mock.MagicMock(spec_set=KMIPProxy)) + def test_check_on_invalid_unique_identifier(self): + """ + Test that a TypeError exception is raised when trying to check an + object with an invalid unique identifier. + """ + kwargs = {'uid': 0} + with ProxyKmipClient() as client: + self.assertRaisesRegexp( + TypeError, + "The unique identifier must be a string.", + client.check, + **kwargs + ) + + @mock.patch('kmip.pie.client.KMIPProxy', + mock.MagicMock(spec_set=KMIPProxy)) + def test_check_on_invalid_usage_limits_count(self): + """ + Test that a TypeError exception is raised when trying to check an + object with an invalid usage limits count. + """ + kwargs = {'usage_limits_count': 'invalid'} + with ProxyKmipClient() as client: + self.assertRaisesRegexp( + TypeError, + "The usage limits count must be an integer.", + client.check, + **kwargs + ) + + @mock.patch('kmip.pie.client.KMIPProxy', + mock.MagicMock(spec_set=KMIPProxy)) + def test_check_on_invalid_cryptographic_usage_mask(self): + """ + Test that a TypeError exception is raised when trying to check an + object with an invalid cryptographic usage mask. + """ + kwargs = {'cryptographic_usage_mask': 'invalid'} + with ProxyKmipClient() as client: + self.assertRaisesRegexp( + TypeError, + "The cryptographic usage mask must be a list of " + "CryptographicUsageMask enumerations.", + client.check, + **kwargs + ) + + @mock.patch('kmip.pie.client.KMIPProxy', + mock.MagicMock(spec_set=KMIPProxy)) + def test_check_on_invalid_lease_time(self): + """ + Test that a TypeError exception is raised when trying to check an + object with an invalid lease time. + """ + kwargs = {'lease_time': 'invalid'} + with ProxyKmipClient() as client: + self.assertRaisesRegexp( + TypeError, + "The lease time must be an integer.", + client.check, + **kwargs + ) + + @mock.patch('kmip.pie.client.KMIPProxy', + mock.MagicMock(spec_set=KMIPProxy)) + def test_check_on_closed(self): + """ + Test that a ClientConnectionNotOpen exception is raised when trying + to check an object on an unopened client connection. + """ + client = ProxyKmipClient() + kwargs = { + 'uid': '1', + 'usage_limits_count': 100, + 'cryptographic_usage_mask': [ + enums.CryptographicUsageMask.ENCRYPT, + enums.CryptographicUsageMask.DECRYPT + ], + 'lease_time': 10000 + } + + self.assertRaises( + ClientConnectionNotOpen, + client.check, + **kwargs + ) + + @mock.patch('kmip.pie.client.KMIPProxy', + mock.MagicMock(spec_set=KMIPProxy)) + def test_check_on_operation_failure(self): + """ + Test that a KmipOperationFailure exception is raised when the + backend fails to derive a key. + """ + status = enums.ResultStatus.OPERATION_FAILED + reason = enums.ResultReason.GENERAL_FAILURE + message = "Test failure message" + + result = { + 'result_status': status, + 'result_reason': reason, + 'result_message': message + } + error_message = str(KmipOperationFailure(status, reason, message)) + + client = ProxyKmipClient() + client.open() + client.proxy.check.return_value = result + kwargs = { + 'uid': '1', + 'usage_limits_count': 100, + 'cryptographic_usage_mask': [ + enums.CryptographicUsageMask.ENCRYPT, + enums.CryptographicUsageMask.DECRYPT + ], + 'lease_time': 10000 + } + + self.assertRaisesRegexp( + KmipOperationFailure, + error_message, + client.check, + **kwargs + ) + @mock.patch('kmip.pie.client.KMIPProxy', mock.MagicMock(spec_set=KMIPProxy)) def test_get(self): @@ -1431,7 +1683,11 @@ 'derivation_data': b'\xFF\xFE\xFE\xFC' }, cryptographic_length=128, - cryptographic_algorithm=enums.CryptographicAlgorithm.AES + cryptographic_algorithm=enums.CryptographicAlgorithm.AES, + cryptographic_usage_mask=[ + enums.CryptographicUsageMask.ENCRYPT, + enums.CryptographicUsageMask.DECRYPT + ] ) self.assertEqual('1', derived_id) @@ -2600,6 +2856,15 @@ self.assertRaises( ClientConnectionNotOpen, client.locate, *args) + def test_build_cryptographic_parameters_with_none(self): + """ + Test that an empty set of cryptographic parameters is processed + correctly. + """ + client = ProxyKmipClient() + result = client._build_cryptographic_parameters(None) + self.assertEqual(None, result) + def test_build_cryptographic_parameters_invalid(self): """ Test that the right error is raised when attempting to build diff -Nru python-pykmip-0.7.0/kmip/tests/unit/services/server/auth/__init__.py python-pykmip-0.8.0/kmip/tests/unit/services/server/auth/__init__.py --- python-pykmip-0.7.0/kmip/tests/unit/services/server/auth/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/services/server/auth/__init__.py 2018-04-02 17:12:18.000000000 +0000 @@ -0,0 +1,14 @@ +# Copyright (c) 2018 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. diff -Nru python-pykmip-0.7.0/kmip/tests/unit/services/server/auth/test_slugs.py python-pykmip-0.8.0/kmip/tests/unit/services/server/auth/test_slugs.py --- python-pykmip-0.7.0/kmip/tests/unit/services/server/auth/test_slugs.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/services/server/auth/test_slugs.py 2018-04-02 17:12:18.000000000 +0000 @@ -0,0 +1,251 @@ +# Copyright (c) 2018 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import mock +import requests +import testtools + +from kmip.core import exceptions +from kmip.services.server import auth + + +class TestSLUGSConnector(testtools.TestCase): + """ + Test suite for the SLUGSConnector. + """ + + def setUp(self): + super(TestSLUGSConnector, self).setUp() + + def tearDown(self): + super(TestSLUGSConnector, self).tearDown() + + def test_init(self): + """ + Test that a SLUGSConnector can be constructed without arguments. + """ + auth.SLUGSConnector() + + def test_init_with_args(self): + """ + Test that a SLUGSConnector can be constructed with arguments. + """ + connector = auth.SLUGSConnector(url='http://127.0.0.1:8080/slugs/') + + self.assertEqual('http://127.0.0.1:8080/slugs/', connector.url) + self.assertEqual( + 'http://127.0.0.1:8080/slugs/users/{}', + connector.users_url + ) + self.assertEqual( + 'http://127.0.0.1:8080/slugs/users/{}/groups', + connector.groups_url + ) + + def test_url_formatting(self): + """ + Test that a URL without a trailing slash is handled properly when used + to set the URL of a SLUGSConnector. + """ + connector = auth.SLUGSConnector(url="http://127.0.0.1:8080/slugs") + + self.assertEqual('http://127.0.0.1:8080/slugs/', connector.url) + self.assertEqual( + 'http://127.0.0.1:8080/slugs/users/{}', + connector.users_url + ) + self.assertEqual( + 'http://127.0.0.1:8080/slugs/users/{}/groups', + connector.groups_url + ) + + connector = auth.SLUGSConnector() + + self.assertEqual(None, connector.url) + self.assertEqual(None, connector.users_url) + self.assertEqual(None, connector.groups_url) + + connector.url = "http://127.0.0.1:8080/slugs" + + self.assertEqual('http://127.0.0.1:8080/slugs/', connector.url) + self.assertEqual( + 'http://127.0.0.1:8080/slugs/users/{}', + connector.users_url + ) + self.assertEqual( + 'http://127.0.0.1:8080/slugs/users/{}/groups', + connector.groups_url + ) + + def test_invalid_url(self): + """ + Test that a TypeError is raised when an invalid value is used to set + the URL of a SLUGSConnector. + """ + kwargs = {'url': 0} + self.assertRaisesRegexp( + TypeError, + "URL must be a string.", + auth.SLUGSConnector, + **kwargs + ) + + connector = auth.SLUGSConnector() + args = (connector, "url", 0) + self.assertRaisesRegexp( + TypeError, + "URL must be a string.", + setattr, + *args + ) + + @mock.patch('requests.get') + @mock.patch( + 'kmip.services.server.auth.utils.get_client_identity_from_certificate' + ) + def test_authenticate(self, mock_get_client_identity, mock_request_get): + """ + Test that a call to authenticate with the SLUGSConnector triggers the + right utility and SLUGS API calls. + """ + mock_get_client_identity.return_value = "John Doe" + + users_response = mock.MagicMock(requests.Response) + users_response.status_code = 200 + groups_response = mock.MagicMock(requests.Response) + groups_response.status_code = 200 + groups_response.json.return_value = {'groups': ['Group A', 'Group B']} + + mock_request_get.side_effect = [users_response, groups_response] + + connector = auth.SLUGSConnector( + url="http://127.0.0.1:8080/test/slugs/" + ) + result = connector.authenticate("test") + + mock_get_client_identity.assert_called_once_with("test") + mock_request_get.assert_any_call( + "http://127.0.0.1:8080/test/slugs/users/John Doe" + ) + mock_request_get.assert_any_call( + "http://127.0.0.1:8080/test/slugs/users/John Doe/groups" + ) + self.assertEqual(('John Doe', ['Group A', 'Group B']), result) + + @mock.patch('requests.get') + @mock.patch( + 'kmip.services.server.auth.utils.get_client_identity_from_certificate' + ) + def test_authenticate_with_url_unset(self, + mock_get_client_identity, + mock_request_get): + """ + Test that a ConfigurationError is raised when attempting to + authenticate with an unset URL. + """ + connector = auth.SLUGSConnector() + + args = ("test", ) + self.assertRaisesRegexp( + exceptions.ConfigurationError, + "The SLUGS URL must be specified.", + connector.authenticate, + *args + ) + + @mock.patch('requests.get') + @mock.patch( + 'kmip.services.server.auth.utils.get_client_identity_from_certificate' + ) + def test_authenticate_with_connection_failure(self, + mock_get_client_identity, + mock_request_get): + """ + Test that a ConfigurationError is raised when attempting to + authenticate with an invalid URL. + """ + mock_get_client_identity.return_value = "John Doe" + mock_request_get.side_effect = [requests.exceptions.ConnectionError()] + + connector = auth.SLUGSConnector( + url="http://127.0.0.1:8080/test/slugs/" + ) + args = ("test", ) + self.assertRaisesRegexp( + exceptions.ConfigurationError, + "A connection could not be established using the SLUGS URL.", + connector.authenticate, + *args + ) + + @mock.patch('requests.get') + @mock.patch( + 'kmip.services.server.auth.utils.get_client_identity_from_certificate' + ) + def test_authenticate_with_users_failure(self, + mock_get_client_identity, + mock_request_get): + """ + Test that a PermissionDenied error is raised when an invalid user ID + is used to query SLUGS. + """ + mock_get_client_identity.return_value = "John Doe" + + users_response = mock.MagicMock(requests.Response) + users_response.status_code = 404 + + mock_request_get.return_value = users_response + + connector = auth.SLUGSConnector( + url="http://127.0.0.1:8080/test/slugs/" + ) + args = ("test", ) + self.assertRaisesRegexp( + exceptions.PermissionDenied, + "Unrecognized user ID: John Doe", + connector.authenticate, + *args + ) + + @mock.patch('requests.get') + @mock.patch( + 'kmip.services.server.auth.utils.get_client_identity_from_certificate' + ) + def test_authenticate_with_groups_failure(self, + mock_get_client_identity, + mock_request_get): + """ + Test that a PermissionDenied error is raised when a groups request to + SLUGS fails. + """ + mock_get_client_identity.return_value = "John Doe" + + users_response = mock.MagicMock(requests.Response) + users_response.status_code = 200 + groups_response = mock.MagicMock(requests.Response) + groups_response.status_code = 404 + + mock_request_get.side_effect = [users_response, groups_response] + + connector = auth.SLUGSConnector( + url="http://127.0.0.1:8080/test/slugs/" + ) + args = ("test", ) + self.assertRaisesRegexp( + exceptions.PermissionDenied, + "Group information could not be retrieved for user ID: John Doe", + connector.authenticate, + *args + ) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/services/server/auth/test_utils.py python-pykmip-0.8.0/kmip/tests/unit/services/server/auth/test_utils.py --- python-pykmip-0.7.0/kmip/tests/unit/services/server/auth/test_utils.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/services/server/auth/test_utils.py 2018-04-02 17:12:18.000000000 +0000 @@ -0,0 +1,270 @@ +# Copyright (c) 2018 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from cryptography import x509 +from cryptography.hazmat import backends +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import rsa + +import datetime +import mock +import ssl +import testtools + +from kmip.core import exceptions +from kmip.services.server.auth import utils + + +class TestUtils(testtools.TestCase): + """ + Test suite for authentication utilities. + """ + + def setUp(self): + super(TestUtils, self).setUp() + + self.certificate_bytes = ( + b'\x30\x82\x03\x7c\x30\x82\x02\x64\xa0\x03\x02\x01\x02\x02\x01\x02' + b'\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x0b\x05\x00\x30' + b'\x45\x31\x0b\x30\x09\x06\x03\x55\x04\x06\x13\x02\x55\x53\x31\x1f' + b'\x30\x1d\x06\x03\x55\x04\x0a\x13\x16\x54\x65\x73\x74\x20\x43\x65' + b'\x72\x74\x69\x66\x69\x63\x61\x74\x65\x73\x20\x32\x30\x31\x31\x31' + b'\x15\x30\x13\x06\x03\x55\x04\x03\x13\x0c\x54\x72\x75\x73\x74\x20' + b'\x41\x6e\x63\x68\x6f\x72\x30\x1e\x17\x0d\x31\x30\x30\x31\x30\x31' + b'\x30\x38\x33\x30\x30\x30\x5a\x17\x0d\x33\x30\x31\x32\x33\x31\x30' + b'\x38\x33\x30\x30\x30\x5a\x30\x40\x31\x0b\x30\x09\x06\x03\x55\x04' + b'\x06\x13\x02\x55\x53\x31\x1f\x30\x1d\x06\x03\x55\x04\x0a\x13\x16' + b'\x54\x65\x73\x74\x20\x43\x65\x72\x74\x69\x66\x69\x63\x61\x74\x65' + b'\x73\x20\x32\x30\x31\x31\x31\x10\x30\x0e\x06\x03\x55\x04\x03\x13' + b'\x07\x47\x6f\x6f\x64\x20\x43\x41\x30\x82\x01\x22\x30\x0d\x06\x09' + b'\x2a\x86\x48\x86\xf7\x0d\x01\x01\x01\x05\x00\x03\x82\x01\x0f\x00' + b'\x30\x82\x01\x0a\x02\x82\x01\x01\x00\x90\x58\x9a\x47\x62\x8d\xfb' + b'\x5d\xf6\xfb\xa0\x94\x8f\x7b\xe5\xaf\x7d\x39\x73\x20\x6d\xb5\x59' + b'\x0e\xcc\xc8\xc6\xc6\xb4\xaf\xe6\xf2\x67\xa3\x0b\x34\x7a\x73\xe7' + b'\xff\xa4\x98\x44\x1f\xf3\x9c\x0d\x23\x2c\x5e\xaf\x21\xe6\x45\xda' + b'\x04\x6a\x96\x2b\xeb\xd2\xc0\x3f\xcf\xce\x9e\x4e\x60\x6a\x6d\x5e' + b'\x61\x8f\x72\xd8\x43\xb4\x0c\x25\xad\xa7\xe4\x18\xe4\xb8\x1a\xa2' + b'\x09\xf3\xe9\x3d\x5c\x62\xac\xfa\xf4\x14\x5c\x92\xac\x3a\x4e\x3b' + b'\x46\xec\xc3\xe8\xf6\x6e\xa6\xae\x2c\xd7\xac\x5a\x2d\x5a\x98\x6d' + b'\x40\xb6\xe9\x47\x18\xd3\xc1\xa9\x9e\x82\xcd\x1c\x96\x52\xfc\x49' + b'\x97\xc3\x56\x59\xdd\xde\x18\x66\x33\x65\xa4\x8a\x56\x14\xd1\xe7' + b'\x50\x69\x9d\x88\x62\x97\x50\xf5\xff\xf4\x7d\x1f\x56\x32\x00\x69' + b'\x0c\x23\x9c\x60\x1b\xa6\x0c\x82\xba\x65\xa0\xcc\x8c\x0f\xa5\x7f' + b'\x84\x94\x53\x94\xaf\x7c\xfb\x06\x85\x67\x14\xa8\x48\x5f\x37\xbe' + b'\x56\x64\x06\x49\x6c\x59\xc6\xf5\x83\x50\xdf\x74\x52\x5d\x2d\x2c' + b'\x4a\x4b\x82\x4d\xce\x57\x15\x01\xe1\x55\x06\xb9\xfd\x79\x38\x93' + b'\xa9\x82\x8d\x71\x89\xb2\x0d\x3e\x65\xad\xd7\x85\x5d\x6b\x63\x7d' + b'\xca\xb3\x4a\x96\x82\x46\x64\xda\x8b\x02\x03\x01\x00\x01\xa3\x7c' + b'\x30\x7a\x30\x1f\x06\x03\x55\x1d\x23\x04\x18\x30\x16\x80\x14\xe4' + b'\x7d\x5f\xd1\x5c\x95\x86\x08\x2c\x05\xae\xbe\x75\xb6\x65\xa7\xd9' + b'\x5d\xa8\x66\x30\x1d\x06\x03\x55\x1d\x0e\x04\x16\x04\x14\x58\x01' + b'\x84\x24\x1b\xbc\x2b\x52\x94\x4a\x3d\xa5\x10\x72\x14\x51\xf5\xaf' + b'\x3a\xc9\x30\x0e\x06\x03\x55\x1d\x0f\x01\x01\xff\x04\x04\x03\x02' + b'\x01\x06\x30\x17\x06\x03\x55\x1d\x20\x04\x10\x30\x0e\x30\x0c\x06' + b'\x0a\x60\x86\x48\x01\x65\x03\x02\x01\x30\x01\x30\x0f\x06\x03\x55' + b'\x1d\x13\x01\x01\xff\x04\x05\x30\x03\x01\x01\xff\x30\x0d\x06\x09' + b'\x2a\x86\x48\x86\xf7\x0d\x01\x01\x0b\x05\x00\x03\x82\x01\x01\x00' + b'\x35\x87\x97\x16\xe6\x75\x35\xcd\xc0\x12\xff\x96\x5c\x21\x42\xac' + b'\x27\x6b\x32\xbb\x08\x2d\x96\xb1\x70\x41\xaa\x03\x4f\x5a\x3e\xe6' + b'\xb6\xf4\x3e\x68\xb1\xbc\xff\x9d\x10\x73\x64\xae\x9f\xba\x36\x56' + b'\x7c\x05\xf4\x3d\x7c\x51\x47\xbc\x1a\x3d\xee\x3d\x46\x07\xfa\x84' + b'\x88\xd6\xf0\xdd\xc8\xa7\x23\x98\xc6\xca\x45\x4e\x2b\x93\x47\xa8' + b'\xdd\x41\xcd\x0d\x7c\x2a\x21\x57\x3d\x09\x04\xbd\xb2\x6c\x95\xfb' + b'\x1d\x47\x0b\x02\xf8\x4d\x3a\xea\xf8\xb5\xcb\x2b\x1f\xea\x56\x28' + b'\xf4\x62\xa9\x3e\x50\x97\xc0\xb6\xb8\x36\x8e\x76\x0a\x5e\xc0\xae' + b'\x14\xc0\x50\x42\x75\x82\x1a\xbc\x1a\xd6\x0d\x53\xa6\x14\x69\xfd' + b'\x19\x98\x1e\x73\x32\x9d\x81\x66\x66\xb5\xed\xcc\x5c\xfe\x53\xd5' + b'\xc4\x03\xb0\xbe\x80\xfa\xb8\x92\xa0\xc8\xfe\x25\x5f\x21\x3d\x6c' + b'\xea\x50\x6d\x74\x1e\x74\x96\xb0\xd5\xc2\x5d\xa8\x61\xf0\x2f\x5b' + b'\xfe\xac\x0b\x6b\x1e\xd9\x09\x5e\x66\x27\x54\x9a\xbc\xe2\x54\xd3' + b'\xf8\xa0\x47\x97\x20\xda\x24\x53\xa4\xfa\xa7\xff\xc7\x33\x51\x46' + b'\x41\x8c\x36\x8c\xeb\xe9\x29\xc2\xad\x58\x24\x80\x9d\xe8\x04\x6e' + b'\x0b\x06\x63\x30\x13\x2a\x39\x8f\x24\xf2\x74\x9e\x91\xc5\xab\x33' + ) + + private_key = rsa.generate_private_key( + public_exponent=65537, + key_size=2048, + backend=backends.default_backend() + ) + subject = issuer = x509.Name([ + x509.NameAttribute(x509.NameOID.COMMON_NAME, u"Jane Doe") + ]) + subject_no_common_name = issuer_no_common_name = x509.Name([ + x509.NameAttribute(x509.NameOID.ORGANIZATION_NAME, u"Test, Inc.") + ]) + self.certificate = x509.CertificateBuilder().subject_name( + subject + ).issuer_name( + issuer + ).public_key( + private_key.public_key() + ).serial_number( + x509.random_serial_number() + ).not_valid_before( + datetime.datetime.utcnow() + ).not_valid_after( + datetime.datetime.utcnow() + datetime.timedelta(days=1) + ).add_extension( + x509.ExtendedKeyUsage([x509.ExtendedKeyUsageOID.CLIENT_AUTH]), + critical=True + ).sign(private_key, hashes.SHA256(), backends.default_backend()) + + self.certificate_no_name = x509.CertificateBuilder().subject_name( + subject_no_common_name + ).issuer_name( + issuer_no_common_name + ).public_key( + private_key.public_key() + ).serial_number( + x509.random_serial_number() + ).not_valid_before( + datetime.datetime.utcnow() + ).not_valid_after( + datetime.datetime.utcnow() + datetime.timedelta(days=1) + ).sign(private_key, hashes.SHA256(), backends.default_backend()) + + self.certificate_no_extension = x509.CertificateBuilder().subject_name( + subject + ).issuer_name( + issuer + ).public_key( + private_key.public_key() + ).serial_number( + x509.random_serial_number() + ).not_valid_before( + datetime.datetime.utcnow() + ).not_valid_after( + datetime.datetime.utcnow() + datetime.timedelta(days=1) + ).sign(private_key, hashes.SHA256(), backends.default_backend()) + + def tearDown(self): + super(TestUtils, self).tearDown() + + def test_get_certificate_from_connection(self): + """ + Test that the certificate can be retrieved from a provided connection. + """ + mock_connection = mock.MagicMock(ssl.SSLSocket) + mock_connection.getpeercert.return_value = self.certificate_bytes + result = utils.get_certificate_from_connection( + mock_connection + ) + + self.assertIsInstance(result, x509.Certificate) + + def test_get_certificate_from_connection_with_load_failure(self): + """ + Test that the right value is returned when the certificate cannot be + retrieved from the provided connection. + """ + mock_connection = mock.MagicMock(ssl.SSLSocket) + mock_connection.getpeercert.return_value = None + result = utils.get_certificate_from_connection( + mock_connection + ) + + self.assertEqual(None, result) + + def test_get_extended_key_usage_from_certificate(self): + """ + Test that the ExtendedKeyUsage extension can be retrieved from a + certificate. + """ + extension = utils.get_extended_key_usage_from_certificate( + self.certificate + ) + + self.assertIsInstance(extension, x509.ExtendedKeyUsage) + self.assertIn(x509.ExtendedKeyUsageOID.CLIENT_AUTH, extension) + + def test_get_extended_key_usage_from_certificate_with_no_extension(self): + """ + Test that the right value is returned when the ExtendedKeyUsage + extension cannot be retrieved from a certificate. + """ + extension = utils.get_extended_key_usage_from_certificate( + self.certificate_no_extension + ) + + self.assertEqual(None, extension) + + def test_get_common_names_from_certificate(self): + """ + Test that the common names can be retrieved from a certificate. + """ + common_names = utils.get_common_names_from_certificate( + self.certificate + ) + + self.assertEqual(["Jane Doe"], common_names) + + def test_get_common_names_from_certificate_no_common_names(self): + """ + Test that the right value is returned when no common names can be + retrieved from a certificate. + """ + common_names = utils.get_common_names_from_certificate( + self.certificate_no_name + ) + + self.assertEqual([], common_names) + + def test_get_client_identity_from_certificate(self): + """ + Test that the common names from a certificate can be processed into a + client identity. + """ + result = utils.get_client_identity_from_certificate(self.certificate) + + self.assertEqual("Jane Doe", result) + + @mock.patch( + 'kmip.services.server.auth.utils.get_common_names_from_certificate' + ) + def test_get_client_identity_from_certificate_multiple_names(self, + mock_get): + """ + Test that the a PermissionDenied error is raised if multiple possible + client identities are discovered. + """ + mock_get.return_value = ["John Doe", "Jane Doe"] + + args = ("test", ) + self.assertRaisesRegexp( + exceptions.PermissionDenied, + "Multiple client identities found.", + utils.get_client_identity_from_certificate, + *args + ) + + @mock.patch( + 'kmip.services.server.auth.utils.get_common_names_from_certificate' + ) + def test_get_client_identity_from_certificate_no_names(self, mock_get): + """ + Test that the a PermissionDenied error is raised if no possible client + identities are discovered. + """ + mock_get.return_value = [] + + args = ("test", ) + self.assertRaisesRegexp( + exceptions.PermissionDenied, + "The certificate does not define any subject common names. Client " + "identity unavailable.", + utils.get_client_identity_from_certificate, + *args + ) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/services/server/test_config.py python-pykmip-0.8.0/kmip/tests/unit/services/server/test_config.py --- python-pykmip-0.7.0/kmip/tests/unit/services/server/test_config.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/services/server/test_config.py 2018-04-16 19:14:22.000000000 +0000 @@ -16,6 +16,7 @@ import logging import mock +import six from six.moves import configparser import testtools @@ -67,6 +68,7 @@ c._set_enable_tls_client_auth = mock.MagicMock() c._set_tls_cipher_suites = mock.MagicMock() c._set_logging_level = mock.MagicMock() + c._set_database_path = mock.MagicMock() # Test the right error is generated when setting an unsupported # setting. @@ -114,6 +116,9 @@ c.set_setting('logging_level', 'WARNING') c._set_logging_level.assert_called_once_with('WARNING') + c.set_setting('database_path', '/var/pykmip/pykmip.db') + c._set_database_path.assert_called_once_with('/var/pykmip/pykmip.db') + def test_load_settings(self): """ Test that the right calls are made and the right errors generated when @@ -123,6 +128,7 @@ c = config.KmipServerConfig() c._logger = mock.MagicMock() c._parse_settings = mock.MagicMock() + c.parse_auth_settings = mock.MagicMock() # Test that the right calls are made when correctly processing the # configuration file. @@ -138,6 +144,7 @@ ) parser_mock.assert_called_with("/test/path/server.conf") self.assertTrue(c._parse_settings.called) + self.assertTrue(c.parse_auth_settings.called) # Test that a ConfigurationError is generated when the path is invalid. c._logger.reset_mock() @@ -151,6 +158,66 @@ *args ) + def test_parse_auth_settings(self): + """ + Test that server authentication plugin settings are parsed correctly. + """ + parser = configparser.SafeConfigParser() + parser.add_section('server') + parser.add_section('auth:slugs') + parser.set('auth:slugs', 'enabled', 'True') + parser.set('auth:slugs', 'url', 'http://127.0.0.1:8080/slugs/') + parser.add_section('auth:ldap') + parser.set('auth:ldap', 'enabled', 'False') + parser.set('auth:ldap', 'url', 'http://127.0.0.1:8080/ldap/') + + c = config.KmipServerConfig() + c._logger = mock.MagicMock() + + self.assertEqual([], c.settings['auth_plugins']) + + c.parse_auth_settings(parser) + configs = c.settings['auth_plugins'] + + self.assertIsInstance(configs, list) + self.assertEqual(2, len(configs)) + + for c in configs: + self.assertIsInstance(c, tuple) + self.assertEqual(2, len(c)) + self.assertIn(c[0], ['auth:slugs', 'auth:ldap']) + self.assertIsInstance(c[1], dict) + + if c[0] == 'auth:slugs': + self.assertIn('enabled', six.iterkeys(c[1])) + self.assertEqual('True', c[1]['enabled']) + self.assertIn('url', six.iterkeys(c[1])) + self.assertEqual('http://127.0.0.1:8080/slugs/', c[1]['url']) + elif c[0] == 'auth:ldap': + self.assertIn('enabled', six.iterkeys(c[1])) + self.assertEqual('False', c[1]['enabled']) + self.assertIn('url', six.iterkeys(c[1])) + self.assertEqual('http://127.0.0.1:8080/ldap/', c[1]['url']) + + def test_parse_auth_settings_no_config(self): + """ + Test that server authentication plugin settings are parsed correctly, + even when not specified. + """ + parser = configparser.SafeConfigParser() + parser.add_section('server') + + c = config.KmipServerConfig() + c._logger = mock.MagicMock() + + self.assertEqual([], c.settings['auth_plugins']) + + c.parse_auth_settings(parser) + configs = c.settings['auth_plugins'] + + self.assertIsInstance(configs, list) + self.assertEqual(0, len(configs)) + def test_parse_settings(self): """ Test that the right methods are called and the right errors generated @@ -169,6 +236,7 @@ c._set_enable_tls_client_auth = mock.MagicMock() c._set_tls_cipher_suites = mock.MagicMock() c._set_logging_level = mock.MagicMock() + c._set_database_path = mock.MagicMock() # Test that the right calls are made when correctly parsing settings. parser = configparser.SafeConfigParser() @@ -187,6 +255,7 @@ "\n TLS_RSA_WITH_AES_256_CBC_SHA256" ) parser.set('server', 'logging_level', 'ERROR') + parser.set('server', 'database_path', '/var/pykmip/pykmip.db') c._parse_settings(parser) @@ -204,6 +273,7 @@ "\n TLS_RSA_WITH_AES_256_CBC_SHA256" ) c._set_logging_level.assert_called_once_with('ERROR') + c._set_database_path.assert_called_once_with('/var/pykmip/pykmip.db') # Test that a ConfigurationError is generated when the expected # section is missing. @@ -756,3 +826,55 @@ c._set_logging_level, *args ) + + def test_set_database_path(self): + """ + Test that the database_path configuration property can be set + correctly. + """ + c = config.KmipServerConfig() + c._logger = mock.MagicMock() + + self.assertNotIn('database_path', c.settings.keys()) + + with mock.patch('os.path.exists') as os_mock: + os_mock.return_value = True + c._set_database_path('/test/path/database.db') + + self.assertIn('database_path', c.settings.keys()) + self.assertEqual( + '/test/path/database.db', + c.settings.get('database_path') + ) + + def test_set_database_path_default(self): + """ + Test that the database_path configuration property can be set correctly + without specifying a value. + """ + c = config.KmipServerConfig() + c._logger = mock.MagicMock() + + self.assertNotIn('database_path', c.settings.keys()) + + c._set_database_path(None) + self.assertIn('database_path', c.settings.keys()) + self.assertEqual(None, c.settings.get('database_path')) + + def test_set_database_path_invalid_value(self): + """ + Test that the right error is raised when an invalid value is used to + set the database_path configuration property. + """ + c = config.KmipServerConfig() + c._logger = mock.MagicMock() + + self.assertNotIn('database_path', c.settings.keys()) + + args = (1, ) + self.assertRaises( + exceptions.ConfigurationError, + c._set_database_path, + *args + ) + self.assertNotEqual(1, c.settings.get('database_path')) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/services/server/test_engine.py python-pykmip-0.8.0/kmip/tests/unit/services/server/test_engine.py --- python-pykmip-0.7.0/kmip/tests/unit/services/server/test_engine.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/services/server/test_engine.py 2018-05-17 22:30:38.000000000 +0000 @@ -89,7 +89,7 @@ ) ] - protocol = contents.ProtocolVersion.create(1, 0) + protocol = contents.ProtocolVersion(1, 0) max_size = contents.MaximumResponseSize(2 ** 20) asynch = contents.AsynchronousIndicator(False) @@ -148,175 +148,6 @@ } create_engine_mock.assert_called_once_with(*args, **fargs) - def test_load_operation_policies(self): - """ - Test that the KmipEngine can correctly load operation policies. - """ - e = engine.KmipEngine() - e._logger = mock.MagicMock() - - policy_file = tempfile.NamedTemporaryFile( - dir=self.temp_dir - ) - with open(policy_file.name, 'w') as f: - f.write( - '{"test": {"CERTIFICATE": {"LOCATE": "ALLOW_ALL"}}}' - ) - - self.assertEqual(2, len(e._operation_policies)) - - e._load_operation_policies(self.temp_dir) - e._logger.info.assert_any_call( - "Loading user-defined operation policy files from: {0}".format( - self.temp_dir - ) - ) - e._logger.info.assert_any_call( - "Loading user-defined operation policies from file: {0}".format( - policy_file.name - ) - ) - - self.assertEqual(3, len(e._operation_policies)) - self.assertIn('test', e._operation_policies.keys()) - - test_policy = { - enums.ObjectType.CERTIFICATE: { - enums.Operation.LOCATE: enums.Policy.ALLOW_ALL - } - } - - self.assertEqual(test_policy, e._operation_policies.get('test')) - - def test_load_operation_policies_with_file_read_error(self): - """ - Test that the KmipEngine can correctly handle load errors. - """ - e = engine.KmipEngine() - e._logger = mock.MagicMock() - - policy_file = tempfile.NamedTemporaryFile( - dir=self.temp_dir - ) - with open(policy_file.name, 'w') as f: - f.write( - '{"test": {"INVALID": {"LOCATE": "ALLOW_ALL"}}}' - ) - - self.assertEqual(2, len(e._operation_policies)) - - e._load_operation_policies(self.temp_dir) - e._logger.info.assert_any_call( - "Loading user-defined operation policy files from: {0}".format( - self.temp_dir - ) - ) - e._logger.info.assert_any_call( - "Loading user-defined operation policies from file: {0}".format( - policy_file.name - ) - ) - e._logger.error.assert_called_once_with( - "A failure occurred while loading policies." - ) - e._logger.exception.assert_called_once() - - self.assertEqual(2, len(e._operation_policies)) - - def test_load_operation_policies_with_reserved(self): - """ - Test that the KmipEngine can correctly load operation policies, even - when a policy attempts to overwrite a reserved one. - """ - e = engine.KmipEngine() - e._logger = mock.MagicMock() - - policy_file = tempfile.NamedTemporaryFile( - dir=self.temp_dir - ) - with open(policy_file.name, 'w') as f: - f.write( - '{"public": {"CERTIFICATE": {"LOCATE": "ALLOW_ALL"}}}' - ) - - self.assertEqual(2, len(e._operation_policies)) - - e._load_operation_policies(self.temp_dir) - e._logger.info.assert_any_call( - "Loading user-defined operation policy files from: {0}".format( - self.temp_dir - ) - ) - e._logger.info.assert_any_call( - "Loading user-defined operation policies from file: {0}".format( - policy_file.name - ) - ) - e._logger.warning.assert_called_once_with( - "Loaded policy 'public' overwrites a reserved policy and will " - "be thrown out." - ) - - self.assertEqual(2, len(e._operation_policies)) - - def test_load_operation_policies_with_duplicate(self): - """ - Test that the KmipEngine can correctly load operation policies, even - when a policy is defined multiple times. - """ - e = engine.KmipEngine() - e._logger = mock.MagicMock() - - policy_file_a = tempfile.NamedTemporaryFile( - dir=self.temp_dir - ) - with open(policy_file_a.name, 'w') as f: - f.write( - '{"test": {"CERTIFICATE": {"LOCATE": "ALLOW_ALL"}}}' - ) - - policy_file_b = tempfile.NamedTemporaryFile( - dir=self.temp_dir - ) - with open(policy_file_b.name, 'w') as f: - f.write( - '{"test": {"CERTIFICATE": {"LOCATE": "ALLOW_ALL"}}}' - ) - - self.assertEqual(2, len(e._operation_policies)) - - e._load_operation_policies(self.temp_dir) - e._logger.info.assert_any_call( - "Loading user-defined operation policy files from: {0}".format( - self.temp_dir - ) - ) - e._logger.info.assert_any_call( - "Loading user-defined operation policies from file: {0}".format( - policy_file_a.name - ) - ) - e._logger.info.assert_any_call( - "Loading user-defined operation policies from file: {0}".format( - policy_file_b.name - ) - ) - e._logger.warning.assert_called_once_with( - "Loaded policy 'test' overwrites a preexisting policy and will " - "be thrown out." - ) - - self.assertEqual(3, len(e._operation_policies)) - self.assertIn('test', e._operation_policies.keys()) - - test_policy = { - enums.ObjectType.CERTIFICATE: { - enums.Operation.LOCATE: enums.Policy.ALLOW_ALL - } - } - - self.assertEqual(test_policy, e._operation_policies.get('test')) - def test_version_operation_match(self): """ Test that a valid response is generated when trying to invoke an @@ -335,7 +166,7 @@ """ e = engine.KmipEngine() e._logger = mock.MagicMock() - e._protocol_version = contents.ProtocolVersion.create(1, 0) + e._protocol_version = contents.ProtocolVersion(1, 0) args = (None, ) regex = "DiscoverVersions is not supported by KMIP {0}".format( @@ -356,7 +187,7 @@ e = engine.KmipEngine() e._logger = mock.MagicMock() - protocol = contents.ProtocolVersion.create(1, 1) + protocol = contents.ProtocolVersion(1, 1) header = messages.RequestHeader( protocol_version=protocol, maximum_response_size=contents.MaximumResponseSize(2 ** 20), @@ -398,7 +229,7 @@ self.assertIsNotNone(header) self.assertEqual( - contents.ProtocolVersion.create(1, 1), + contents.ProtocolVersion(1, 1), header.protocol_version ) self.assertIsInstance(header.time_stamp, contents.TimeStamp) @@ -438,7 +269,7 @@ e = engine.KmipEngine() e._logger = mock.MagicMock() - protocol = contents.ProtocolVersion.create(0, 1) + protocol = contents.ProtocolVersion(0, 1) header = messages.RequestHeader( protocol_version=protocol ) @@ -466,7 +297,7 @@ e = engine.KmipEngine() e._logger = mock.MagicMock() - protocol = contents.ProtocolVersion.create(1, 0) + protocol = contents.ProtocolVersion(1, 0) header = messages.RequestHeader( protocol_version=protocol, time_stamp=contents.TimeStamp(0) @@ -499,7 +330,7 @@ e = engine.KmipEngine() e._logger = mock.MagicMock() - protocol = contents.ProtocolVersion.create(1, 0) + protocol = contents.ProtocolVersion(1, 0) header = messages.RequestHeader( protocol_version=protocol, time_stamp=contents.TimeStamp(10 ** 10) @@ -532,7 +363,7 @@ e = engine.KmipEngine() e._logger = mock.MagicMock() - protocol = contents.ProtocolVersion.create(1, 1) + protocol = contents.ProtocolVersion(1, 1) header = messages.RequestHeader( protocol_version=protocol, asynchronous_indicator=contents.AsynchronousIndicator(True) @@ -559,7 +390,7 @@ e = engine.KmipEngine() e._logger = mock.MagicMock() - protocol = contents.ProtocolVersion.create(1, 1) + protocol = contents.ProtocolVersion(1, 1) header = messages.RequestHeader( protocol_version=protocol, authentication=contents.Authentication(), @@ -589,7 +420,7 @@ e = engine.KmipEngine() e._logger = mock.MagicMock() - protocol = contents.ProtocolVersion.create(1, 1) + protocol = contents.ProtocolVersion(1, 1) header = messages.RequestHeader( protocol_version=protocol, authentication=None, @@ -625,7 +456,7 @@ e._logger = mock.MagicMock() response = e.build_error_response( - contents.ProtocolVersion.create(1, 1), + contents.ProtocolVersion(1, 1), enums.ResultReason.GENERAL_FAILURE, "A general test failure occurred." ) @@ -635,7 +466,7 @@ header = response.response_header self.assertEqual( - contents.ProtocolVersion.create(1, 1), + contents.ProtocolVersion(1, 1), header.protocol_version ) self.assertIsNotNone(header.time_stamp) @@ -756,7 +587,7 @@ """ e = engine.KmipEngine() e._logger = mock.MagicMock() - e._protocol_version = contents.ProtocolVersion.create(1, 0) + e._protocol_version = contents.ProtocolVersion(1, 0) batch = list([ messages.RequestBatchItem( @@ -953,6 +784,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() obj_a = pie_objects.OpaqueObject(b'', enums.OpaqueDataType.NONE) @@ -977,6 +809,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() args = ('1', ) @@ -1003,6 +836,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) test_exception = exc.MultipleResultsFound() e._data_session.query = mock.MagicMock(side_effect=test_exception) e._logger = mock.MagicMock() @@ -1029,6 +863,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() obj_a = pie_objects.OpaqueObject(b'', enums.OpaqueDataType.NONE) @@ -1415,6 +1250,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() symmetric_key = pie_objects.SymmetricKey( @@ -1477,6 +1313,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() symmetric_key = pie_objects.SymmetricKey( @@ -2081,244 +1918,438 @@ *args ) - def test_is_allowed_by_operation_policy(self): + def test_is_allowed_by_operation_policy_granted(self): """ - Test that an allowed operation is correctly allowed by the operation - policy. + Test that access granted by operation policy is processed correctly. """ e = engine.KmipEngine() - e._operation_policies = { - 'test': { - enums.ObjectType.SYMMETRIC_KEY: { - enums.Operation.GET: enums.Policy.ALLOW_OWNER - } - } - } + e.is_allowed = mock.Mock(return_value=True) - is_allowed = e._is_allowed_by_operation_policy( - 'test', - 'test', - 'test', + result = e._is_allowed_by_operation_policy( + 'test_policy', + ['test_user', ['test_group_A', 'test_group_B']], + 'test_user', enums.ObjectType.SYMMETRIC_KEY, enums.Operation.GET ) - self.assertTrue(is_allowed) + e.is_allowed.assert_called_once_with( + 'test_policy', + 'test_user', + 'test_group_A', + 'test_user', + enums.ObjectType.SYMMETRIC_KEY, + enums.Operation.GET + ) + self.assertTrue(result) - def test_is_allowed_by_operation_policy_blocked(self): + def test_is_allowed_by_operation_policy_denied(self): """ - Test that an unallowed operation is correctly blocked by the operation - policy. + Test that access denied by operation policy is processed correctly. """ e = engine.KmipEngine() - e._operation_policies = { - 'test': { - enums.ObjectType.SYMMETRIC_KEY: { - enums.Operation.GET: enums.Policy.ALLOW_OWNER - } - } - } + e.is_allowed = mock.Mock(return_value=False) - is_allowed = e._is_allowed_by_operation_policy( - 'test', - 'random', - 'test', + result = e._is_allowed_by_operation_policy( + 'test_policy', + ['test_user', ['test_group_A', 'test_group_B']], + 'test_user', enums.ObjectType.SYMMETRIC_KEY, enums.Operation.GET ) - self.assertFalse(is_allowed) + e.is_allowed.assert_any_call( + 'test_policy', + 'test_user', + 'test_group_A', + 'test_user', + enums.ObjectType.SYMMETRIC_KEY, + enums.Operation.GET + ) + e.is_allowed.assert_any_call( + 'test_policy', + 'test_user', + 'test_group_B', + 'test_user', + enums.ObjectType.SYMMETRIC_KEY, + enums.Operation.GET + ) + self.assertFalse(result) - def test_is_allowed_by_operation_public(self): + def test_is_allowed_by_operation_policy_no_groups(self): """ - Test that a public operation is allowed by the operation policy. + Test that access by operation policy is processed correctly when no + user groups are provided. """ e = engine.KmipEngine() - e._operation_policies = { - 'test': { - enums.ObjectType.SYMMETRIC_KEY: { - enums.Operation.GET: enums.Policy.ALLOW_ALL - } - } - } + e.is_allowed = mock.Mock(return_value=True) - is_allowed = e._is_allowed_by_operation_policy( - 'test', - 'test', - 'test', + result = e._is_allowed_by_operation_policy( + 'test_policy', + ['test_user', None], + 'test_user', enums.ObjectType.SYMMETRIC_KEY, enums.Operation.GET ) - self.assertTrue(is_allowed) + e.is_allowed.assert_called_once_with( + 'test_policy', + 'test_user', + None, + 'test_user', + enums.ObjectType.SYMMETRIC_KEY, + enums.Operation.GET + ) + self.assertTrue(result) + + def test_is_allowed_by_operation_policy_groups_empty(self): + """ + Test that access by operation policy is processed correctly when the + provided set of user groups is empty. + + Note that _is_allowed will always return True here, but because there + are no groups to check, access is by default denied. + """ + e = engine.KmipEngine() + e.is_allowed = mock.Mock(return_value=True) - is_allowed = e._is_allowed_by_operation_policy( - 'test', - 'random', - 'test', + result = e._is_allowed_by_operation_policy( + 'test_policy', + ['test_user', []], + 'test_user', enums.ObjectType.SYMMETRIC_KEY, enums.Operation.GET ) - self.assertTrue(is_allowed) + e.is_allowed.assert_not_called() + self.assertFalse(result) + + def test_get_relevant_policy_section_policy_missing(self): + """ + Test that the lookup for a non-existent policy is handled correctly. + """ + e = engine.KmipEngine() + e._operation_policies = {} + e._logger = mock.MagicMock() + + result = e.get_relevant_policy_section('invalid') - def test_is_allowed_by_operation_block_all(self): + e._logger.warning.assert_called_once_with( + "The 'invalid' policy does not exist." + ) + self.assertIsNone(result) + + def test_get_relevant_policy_section_no_group(self): """ - Test that a blocked operation is blocked by the operation policy. + Test that the lookup for a policy with no group specified is handled + correctly. """ e = engine.KmipEngine() e._operation_policies = { - 'test': { - enums.ObjectType.SYMMETRIC_KEY: { - enums.Operation.GET: enums.Policy.DISALLOW_ALL + 'test_policy': { + 'preset': { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_OWNER + } } } } - is_allowed = e._is_allowed_by_operation_policy( - 'test', - 'test', - 'test', - enums.ObjectType.SYMMETRIC_KEY, - enums.Operation.GET - ) + expected = { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_OWNER + } + } - self.assertFalse(is_allowed) + result = e.get_relevant_policy_section('test_policy') + self.assertEqual(expected, result) - is_allowed = e._is_allowed_by_operation_policy( - 'test', - 'random', - 'test', - enums.ObjectType.SYMMETRIC_KEY, - enums.Operation.GET - ) + def test_get_relevant_policy_section_group(self): + """ + Test that the lookup for a policy with a group specified is handled + correctly. + """ + e = engine.KmipEngine() + e._operation_policies = { + 'test_policy': { + 'preset': { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_OWNER + } + }, + 'groups': { + 'test_group': { + enums.ObjectType.CERTIFICATE: { + enums.Operation.CREATE: enums.Policy.ALLOW_ALL + } + } + } + } + } + + expected = { + enums.ObjectType.CERTIFICATE: { + enums.Operation.CREATE: enums.Policy.ALLOW_ALL + } + } - self.assertFalse(is_allowed) + result = e.get_relevant_policy_section('test_policy', 'test_group') + self.assertEqual(expected, result) - def test_is_allowed_by_operation_safety_check(self): + def test_get_relevant_policy_section_group_not_supported(self): """ - Test that an unknown operation is blocked by the operation policy. + Test that the lookup for a policy with a group specified but not + supported is handled correctly. """ e = engine.KmipEngine() + e._logger = mock.MagicMock() e._operation_policies = { - 'test': { - enums.ObjectType.SYMMETRIC_KEY: { - enums.Operation.GET: 'unknown value' + 'test_policy': { + 'preset': { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_OWNER + } + }, + 'groups': { + 'test_group_B': { + enums.ObjectType.CERTIFICATE: { + enums.Operation.CREATE: enums.Policy.ALLOW_ALL + } + } } } } - is_allowed = e._is_allowed_by_operation_policy( - 'test', - 'test', - 'test', + result = e.get_relevant_policy_section('test_policy', 'test_group_A') + + e._logger.debug.assert_called_once_with( + "The 'test_policy' policy does not support group 'test_group_A'." + ) + self.assertIsNone(result) + + def test_get_relevant_policy_section_groups_not_supported(self): + """ + Test that the lookup for a group-less policy with a group specified is + handled correctly. + """ + e = engine.KmipEngine() + e._logger = mock.MagicMock() + e._operation_policies = { + 'test_policy': { + 'preset': { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_OWNER + } + } + } + } + + result = e.get_relevant_policy_section('test_policy', 'test_group_A') + + e._logger.debug.assert_called_once_with( + "The 'test_policy' policy does not support groups." + ) + self.assertIsNone(result) + + def test_is_allowed_policy_not_found(self): + """ + Test that an access check using a non-existent policy is handled + correctly. + """ + e = engine.KmipEngine() + e.get_relevant_policy_section = mock.Mock(return_value=None) + + result = e.is_allowed( + 'test_policy', + 'test_user', + 'test_group', + 'test_user', enums.ObjectType.SYMMETRIC_KEY, enums.Operation.GET ) + self.assertFalse(result) - self.assertFalse(is_allowed) + def test_is_allowed_policy_object_type_mismatch(self): + """ + Test that an access check using a policy that does not support the + specified object type is handled correctly. + """ + e = engine.KmipEngine() + e._logger = mock.Mock() + e._get_enum_string = mock.Mock(return_value="Certificate") + e.get_relevant_policy_section = mock.Mock( + return_value={ + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_OWNER + } + } + ) - is_allowed = e._is_allowed_by_operation_policy( - 'test', - 'random', - 'test', - enums.ObjectType.SYMMETRIC_KEY, + result = e.is_allowed( + 'test_policy', + 'test_user', + 'test_group', + 'test_user', + enums.ObjectType.CERTIFICATE, enums.Operation.GET ) - self.assertFalse(is_allowed) + e._logger.warning.assert_called_once_with( + "The 'test_policy' policy does not apply to Certificate objects." + ) + self.assertFalse(result) - def test_is_allowed_by_operation_policy_nonexistent_policy(self): + def test_is_allowed_policy_operation_mismatch(self): """ - Test that a check with a non-existent policy yields a logging warning - and a blocked operation. + Test that an access check using a policy that does not support the + specified operation is handled correctly. """ e = engine.KmipEngine() - e._logger = mock.MagicMock() + e._logger = mock.Mock() + e._get_enum_string = mock.Mock(side_effect=["Create", "SymmetricKey"]) + e.get_relevant_policy_section = mock.Mock( + return_value={ + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_OWNER + } + } + ) - policy = 'nonexistent-policy' - is_allowed = e._is_allowed_by_operation_policy( - policy, - 'test', - 'test', + result = e.is_allowed( + 'test_policy', + 'test_user', + 'test_group', + 'test_user', enums.ObjectType.SYMMETRIC_KEY, - enums.Operation.GET + enums.Operation.CREATE ) - self.assertFalse(is_allowed) e._logger.warning.assert_called_once_with( - "The '{0}' policy does not exist.".format(policy) + "The 'test_policy' policy does not apply to Create operations on " + "SymmetricKey objects." ) + self.assertFalse(result) - def test_is_allowed_by_operation_policy_not_object_applicable(self): + def test_is_allowed_allow_all(self): """ - Test that a check for an object with a non-applicable policy yields - a logging warning and a blocked operation. + Test that an access check resulting in an "Allow All" policy is + processed correctly. """ e = engine.KmipEngine() - e._logger = mock.MagicMock() - e._operation_policies = { - 'test': { + e.get_relevant_policy_section = mock.Mock( + return_value={ enums.ObjectType.SYMMETRIC_KEY: { - enums.Operation.GET: enums.Policy.ALLOW_OWNER + enums.Operation.GET: enums.Policy.ALLOW_ALL } } - } + ) - policy = 'test' - object_type = enums.ObjectType.PRIVATE_KEY - is_allowed = e._is_allowed_by_operation_policy( - policy, - 'test', - 'test', - object_type, + result = e.is_allowed( + 'test_policy', + 'test_user', + 'test_group', + 'test_user', + enums.ObjectType.SYMMETRIC_KEY, enums.Operation.GET ) + self.assertTrue(result) - self.assertFalse(is_allowed) - e._logger.warning.assert_called_once_with( - "The '{0}' policy does not apply to {1} objects.".format( - policy, - e._get_enum_string(object_type) - ) + def test_is_allowed_allow_owner(self): + """ + Test that an access check resulting in an "Allow Owner" policy is + processed correctly. + """ + e = engine.KmipEngine() + e.get_relevant_policy_section = mock.Mock( + return_value={ + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_OWNER + } + } ) - def test_is_allowed_by_operation_policy_not_applicable(self): + result = e.is_allowed( + 'test_policy', + 'test_user', + 'test_group', + 'test_user', + enums.ObjectType.SYMMETRIC_KEY, + enums.Operation.GET + ) + self.assertTrue(result) + + def test_is_allowed_allow_owner_not_owner(self): """ - Test that a check with a non-applicable policy yields a logging - warning and a blocked operation. + Test that an access check resulting in an "Allow Owner" policy is + processed correctly when the user requesting access is not the owner. """ e = engine.KmipEngine() - e._logger = mock.MagicMock() - e._operation_policies = { - 'test': { + e.get_relevant_policy_section = mock.Mock( + return_value={ enums.ObjectType.SYMMETRIC_KEY: { enums.Operation.GET: enums.Policy.ALLOW_OWNER } } - } + ) - policy = 'test' - object_type = enums.ObjectType.SYMMETRIC_KEY - operation = enums.Operation.CREATE - is_allowed = e._is_allowed_by_operation_policy( - policy, - 'test', - 'test', - object_type, - operation + result = e.is_allowed( + 'test_policy', + 'test_user_A', + 'test_group', + 'test_user_B', + enums.ObjectType.SYMMETRIC_KEY, + enums.Operation.GET ) + self.assertFalse(result) - self.assertFalse(is_allowed) - e._logger.warning.assert_called_once_with( - "The '{0}' policy does not apply to {1} operations on {2} " - "objects.".format( - policy, - e._get_enum_string(operation), - e._get_enum_string(object_type) - ) + def test_is_allowed_disallow_all(self): + """ + Test that an access check resulting in an "Disallow All" policy is + processed correctly. + """ + e = engine.KmipEngine() + e.get_relevant_policy_section = mock.Mock( + return_value={ + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.DISALLOW_ALL + } + } ) + result = e.is_allowed( + 'test_policy', + 'test_user', + 'test_group', + 'test_user', + enums.ObjectType.SYMMETRIC_KEY, + enums.Operation.GET + ) + self.assertFalse(result) + + def test_is_allowed_invalid_permission(self): + """ + Test that an access check resulting in an invalid policy option is + processed correctly. + """ + e = engine.KmipEngine() + e.get_relevant_policy_section = mock.Mock( + return_value={ + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: 'invalid' + } + } + ) + + result = e.is_allowed( + 'test_policy', + 'test_user', + 'test_group', + 'test_user', + enums.ObjectType.SYMMETRIC_KEY, + enums.Operation.GET + ) + self.assertFalse(result) + def test_get_object_with_access_controls(self): """ Test that an unallowed object access request is handled correctly. @@ -2327,6 +2358,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=False) e._logger = mock.MagicMock() e._client_identity = 'test' @@ -2344,7 +2376,7 @@ args = [id_a, enums.Operation.GET] six.assertRaisesRegex( self, - exceptions.ItemNotFound, + exceptions.PermissionDenied, "Could not locate object: {0}".format(id_a), e._get_object_with_access_controls, *args @@ -3480,6 +3512,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -3654,6 +3687,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -3771,6 +3805,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -3806,6 +3841,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -3845,6 +3881,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -3965,6 +4002,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -4022,6 +4060,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -4083,6 +4122,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -4140,6 +4180,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -4205,6 +4246,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() obj_a = pie_objects.OpaqueObject(b'', enums.OpaqueDataType.NONE) @@ -4288,6 +4330,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() key = (b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' @@ -4383,6 +4426,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() obj_a = pie_objects.OpaqueObject(b'', enums.OpaqueDataType.NONE) @@ -4489,6 +4533,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() obj_a = pie_objects.SymmetricKey( @@ -4596,6 +4641,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=False) e._logger = mock.MagicMock() e._client_identity = 'test' @@ -4613,7 +4659,7 @@ args = [payload] six.assertRaisesRegex( self, - exceptions.ItemNotFound, + exceptions.PermissionDenied, "Could not locate object: {0}".format(id_a), e._process_get, *args @@ -4627,6 +4673,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -4740,6 +4787,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -4806,6 +4854,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -4870,6 +4919,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -4914,6 +4964,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -4965,6 +5016,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -5029,6 +5081,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -5093,6 +5146,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -5158,6 +5212,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -5223,6 +5278,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -5288,6 +5344,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() secret = pie_objects.SymmetricKey( @@ -5344,6 +5401,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() secret = pie_objects.SymmetricKey( @@ -5427,6 +5485,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=False) e._logger = mock.MagicMock() e._client_identity = 'test' @@ -5446,7 +5505,7 @@ # be retrieved. args = [payload] self.assertRaisesRegex( - exceptions.ItemNotFound, + exceptions.PermissionDenied, "Could not locate object: {0}".format(id_a), e._process_get_attributes, *args @@ -5460,6 +5519,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() secret = pie_objects.SymmetricKey( @@ -5537,6 +5597,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() secret = pie_objects.SymmetricKey( @@ -5613,6 +5674,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=False) e._logger = mock.MagicMock() e._client_identity = 'test' @@ -5632,7 +5694,7 @@ # be retrieved. args = [payload] self.assertRaisesRegex( - exceptions.ItemNotFound, + exceptions.PermissionDenied, "Could not locate object: {0}".format(id_a), e._process_get_attribute_list, *args @@ -5646,6 +5708,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() managed_object = pie_objects.SymmetricKey( @@ -5716,6 +5779,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() managed_object = pie_objects.OpaqueObject( @@ -5756,6 +5820,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() managed_object = pie_objects.SymmetricKey( @@ -5792,6 +5857,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=False) e._logger = mock.MagicMock() e._client_identity = 'test' @@ -5810,7 +5876,7 @@ # Test by specifying the ID of the object to activate. args = [payload] self.assertRaisesRegex( - exceptions.ItemNotFound, + exceptions.PermissionDenied, "Could not locate object: {0}".format(id_a), e._process_activate, *args @@ -5824,6 +5890,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() managed_object = pie_objects.SymmetricKey( @@ -5972,6 +6039,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() managed_object = pie_objects.SymmetricKey( @@ -6014,6 +6082,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() managed_object = pie_objects.SymmetricKey( @@ -6058,6 +6127,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() managed_object = pie_objects.OpaqueObject( @@ -6102,6 +6172,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=False) e._logger = mock.MagicMock() e._client_identity = 'test' @@ -6126,7 +6197,7 @@ args = [payload] self.assertRaisesRegex( - exceptions.ItemNotFound, + exceptions.PermissionDenied, "Could not locate object: {0}".format(id_a), e._process_revoke, *args @@ -6140,6 +6211,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() obj_a = pie_objects.OpaqueObject(b'', enums.OpaqueDataType.NONE) @@ -6248,6 +6320,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=False) e._logger = mock.MagicMock() e._client_identity = 'test' @@ -6267,7 +6340,7 @@ args = [payload] six.assertRaisesRegex( self, - exceptions.ItemNotFound, + exceptions.PermissionDenied, "Could not locate object: {0}".format(id_a), e._process_destroy, *args @@ -6282,6 +6355,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() key = (b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' @@ -6318,7 +6392,7 @@ e = engine.KmipEngine() e._logger = mock.MagicMock() - e._protocol_version = contents.ProtocolVersion.create(1, 0) + e._protocol_version = contents.ProtocolVersion(1, 0) payload = payloads.QueryRequestPayload([ misc.QueryFunction(enums.QueryFunction.QUERY_OPERATIONS), @@ -6404,7 +6478,7 @@ e = engine.KmipEngine() e._logger = mock.MagicMock() - e._protocol_version = contents.ProtocolVersion.create(1, 1) + e._protocol_version = contents.ProtocolVersion(1, 1) payload = payloads.QueryRequestPayload([ misc.QueryFunction(enums.QueryFunction.QUERY_OPERATIONS), @@ -6494,7 +6568,7 @@ e = engine.KmipEngine() e._logger = mock.MagicMock() - e._protocol_version = contents.ProtocolVersion.create(1, 2) + e._protocol_version = contents.ProtocolVersion(1, 2) payload = payloads.QueryRequestPayload([ misc.QueryFunction(enums.QueryFunction.QUERY_OPERATIONS), @@ -6620,22 +6694,22 @@ self.assertIsNotNone(result.protocol_versions) self.assertEqual(3, len(result.protocol_versions)) self.assertEqual( - contents.ProtocolVersion.create(1, 2), + contents.ProtocolVersion(1, 2), result.protocol_versions[0] ) self.assertEqual( - contents.ProtocolVersion.create(1, 1), + contents.ProtocolVersion(1, 1), result.protocol_versions[1] ) self.assertEqual( - contents.ProtocolVersion.create(1, 0), + contents.ProtocolVersion(1, 0), result.protocol_versions[2] ) # Test detailed request. e._logger = mock.MagicMock() payload = payloads.DiscoverVersionsRequestPayload([ - contents.ProtocolVersion.create(1, 0) + contents.ProtocolVersion(1, 0) ]) result = e._process_discover_versions(payload) @@ -6646,14 +6720,14 @@ self.assertIsNotNone(result.protocol_versions) self.assertEqual(1, len(result.protocol_versions)) self.assertEqual( - contents.ProtocolVersion.create(1, 0), + contents.ProtocolVersion(1, 0), result.protocol_versions[0] ) # Test disjoint request. e._logger = mock.MagicMock() payload = payloads.DiscoverVersionsRequestPayload([ - contents.ProtocolVersion.create(0, 1) + contents.ProtocolVersion(0, 1) ]) result = e._process_discover_versions(payload) @@ -6674,6 +6748,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -6740,6 +6815,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -6802,6 +6878,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -6854,6 +6931,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -6905,6 +6983,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -6961,6 +7040,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -7020,6 +7100,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -7089,6 +7170,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -7141,6 +7223,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -7192,6 +7275,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -7248,6 +7332,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -7304,6 +7389,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -7440,6 +7526,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -7493,6 +7580,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -7535,6 +7623,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -7593,6 +7682,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -7650,6 +7740,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -7718,6 +7809,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() key = (b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' @@ -7795,6 +7887,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -7843,6 +7936,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -7892,6 +7986,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() attribute_factory = factory.AttributeFactory() @@ -8017,6 +8112,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() attribute_factory = factory.AttributeFactory() @@ -8227,6 +8323,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() attribute_factory = factory.AttributeFactory() @@ -8368,6 +8465,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() attribute_factory = factory.AttributeFactory() @@ -8576,6 +8674,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -8669,6 +8768,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -8749,6 +8849,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -8793,6 +8894,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() @@ -8880,6 +8982,7 @@ e._data_store = self.engine e._data_store_session_factory = self.session_factory e._data_session = e._data_store_session_factory() + e._is_allowed_by_operation_policy = mock.Mock(return_value=True) e._logger = mock.MagicMock() e._cryptography_engine.logger = mock.MagicMock() diff -Nru python-pykmip-0.7.0/kmip/tests/unit/services/server/test_monitor.py python-pykmip-0.8.0/kmip/tests/unit/services/server/test_monitor.py --- python-pykmip-0.7.0/kmip/tests/unit/services/server/test_monitor.py 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/services/server/test_monitor.py 2018-04-12 06:42:35.000000000 +0000 @@ -0,0 +1,2153 @@ +# Copyright (c) 2018 The Johns Hopkins University/Applied Physics Laboratory +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging +import mock +import multiprocessing +import os +import shutil +import signal +import tempfile +import testtools + +from kmip.core import enums +from kmip.services.server import monitor + + +class TestMonitorUtilities(testtools.TestCase): + + def setUp(self): + super(TestMonitorUtilities, self).setUp() + + self.tmp_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, self.tmp_dir) + + def test_get_json_files(self): + """ + Test that all files ending in .json can be collected from a directory. + """ + with open(os.path.join(self.tmp_dir, "policy_1.json"), "w") as f: + f.write('{"policy_1": {}}\n') + with open(os.path.join(self.tmp_dir, "policy_2.json"), "w") as f: + f.write('{"policy_2": {}}\n') + with open(os.path.join(self.tmp_dir, "policy_3.txt"), "w") as f: + f.write('{"policy_3": {}}\n') + + result = monitor.get_json_files(self.tmp_dir) + + self.assertIsInstance(result, list) + self.assertEqual(2, len(result)) + self.assertIn(os.path.join(self.tmp_dir, "policy_1.json"), result) + self.assertIn(os.path.join(self.tmp_dir, "policy_2.json"), result) + + +POLICY_1 = """ +{ + "policy_A": { + "groups": { + "group_A": { + "SYMMETRIC_KEY": { + "GET": "ALLOW_ALL", + "DESTROY": "ALLOW_ALL" + } + } + } + } +} +""" +POLICY_2 = """ +{ + "policy_B": { + "groups": { + "group_B": { + "SYMMETRIC_KEY": { + "GET": "ALLOW_ALL", + "LOCATE": "ALLOW_ALL", + "DESTROY": "ALLOW_ALL" + } + } + } + }, + "policy_C": { + "groups": { + "group_C": { + "SYMMETRIC_KEY": { + "GET": "ALLOW_ALL", + "DESTROY": "DISALLOW_ALL" + } + } + } + } +} +""" +POLICY_3 = """ +{ + "policy_B": { + "groups": { + "group_B": { + "SYMMETRIC_KEY": { + "GET": "DISALLOW_ALL", + "LOCATE": "DISALLOW_ALL", + "DESTROY": "DISALLOW_ALL" + } + } + } + } +} +""" +POLICY_4 = """ +{ + "default": { + "groups": { + "group_B": { + "SYMMETRIC_KEY": { + "GET": "DISALLOW_ALL", + "LOCATE": "DISALLOW_ALL", + "DESTROY": "DISALLOW_ALL" + } + } + } + } +} +""" +POLICY_5 = """ +{ + "policy_B": { + "groups": { + "group_B": { + "SYMMETRIC_KEY": { + "GET": "ALLOW_ALL", + "LOCATE": "ALLOW_ALL", + "DESTROY": "ALLOW_ALL" + } + } + } + }, + "policy_D": { + "groups": { + "group_D": { + "SYMMETRIC_KEY": { + "GET": "ALLOW_ALL", + "DESTROY": "DISALLOW_ALL" + } + } + } + } +} +""" +POLICY_6 = """ +{ + "policy_A": { + "groups": { + "group_A": { + "SYMMETRIC_KEY": { + "GET": "ALLOW_ALL", + "DESTROY": "ALLOW_ALL" + } + } + } + }, + "policy_E": { + "groups": { + "group_E": { + "SYMMETRIC_KEY": { + "GET": "ALLOW_ALL", + "CHECK": "ALLOW_OWNER", + "DESTROY": "ALLOW_ALL" + } + } + } + } +} +""" +POLICY_7 = """ +{ + "policy_D": { + "groups": { + "group_D": { + "SYMMETRIC_KEY": { + "GET": "DISALLOW_ALL", + "LOCATE": "DISALLOW_ALL", + "DESTROY": "DISALLOW_ALL" + } + } + } + } +} +""" + + +def write_file(path, file_name, content): + with open(os.path.join(path, file_name), "w") as f: + f.write("{}\n".format(content)) + + +def side_effects(effects): + for effect in effects: + if isinstance(effect, bool): + yield effect + else: + effect() + yield False + + +def build_write_effect(path, file_name, content): + def side_effect(): + write_file(path, file_name, content) + return side_effect + + +def build_delete_effect(path, file_name): + def side_effect(): + os.remove(os.path.join(path, file_name)) + return side_effect + + +class TestPolicyDirectoryMonitor(testtools.TestCase): + + def setUp(self): + super(TestPolicyDirectoryMonitor, self).setUp() + + self.tmp_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, self.tmp_dir) + + def test_init(self): + """ + Test that the PolicyDirectoryMonitor can be instantiated without error. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + + self.assertIsInstance( + m.halt_trigger, + multiprocessing.synchronize.Event + ) + self.assertEqual(self.tmp_dir, m.policy_directory) + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual([], m.policy_store.keys()) + self.assertEqual(['default', 'public'], m.reserved_policies) + self.assertIsInstance(m.logger, logging.Logger) + + def test_signal_handler(self): + """ + Test that the signal handler for SIGINT and SIGTERM correctly stops + the monitor. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.stop = mock.MagicMock() + handler = signal.getsignal(signal.SIGINT) + + m.stop.assert_not_called() + handler(None, None) + m.stop.assert_called() + + def test_stop(self): + """ + Test that the PolicyDirectoryMonitor processes stop calls correctly. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + + self.assertFalse(m.halt_trigger.is_set()) + + m.stop() + + self.assertTrue(m.halt_trigger.is_set()) + + def test_run(self): + """ + Test that the PolicyDirectoryMonitor can load policy files and track + them properly. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.logger = mock.MagicMock(logging.Logger) + m.halt_trigger = mock.MagicMock(multiprocessing.synchronize.Event) + m.halt_trigger.is_set.side_effect = [False, True] + + write_file(self.tmp_dir, "policy_1.json", POLICY_1) + write_file(self.tmp_dir, "policy_2.json", POLICY_2) + + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual([], m.policy_store.keys()) + + m.run() + + m.logger.info.assert_any_call( + "Starting up the operation policy file monitor." + ) + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_A") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.info.assert_any_call("Loading policy: policy_C") + m.logger.info.assert_any_call( + "Stopping the operation policy file monitor." + ) + + self.assertEqual(2, len(m.policy_files)) + path = os.path.join(self.tmp_dir, "policy_1.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_A", None)) + + path = os.path.join(self.tmp_dir, "policy_2.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_B", None)) + self.assertEqual(path, m.policy_map.get("policy_C", None)) + + self.assertEqual( + { + "policy_A": [], + "policy_B": [], + "policy_C": [] + }, + m.policy_cache + ) + + self.assertEqual(3, len(m.policy_store.keys())) + self.assertEqual( + { + "groups": { + "group_A": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_A", None) + ) + self.assertEqual( + { + "groups": { + "group_B": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_B", None) + ) + self.assertEqual( + { + "groups": { + "group_C": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_C", None) + ) + + def test_run_with_policy_overloading(self): + """ + Test that the PolicyDirectoryMonitor can load policy files and track + them properly, even when one policy overloads another existing policy. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.logger = mock.MagicMock(logging.Logger) + m.halt_trigger = mock.MagicMock(multiprocessing.synchronize.Event) + m.halt_trigger.is_set.side_effect = [False, True] + + write_file(self.tmp_dir, "policy_1.json", POLICY_1) + write_file(self.tmp_dir, "policy_2.json", POLICY_2) + write_file(self.tmp_dir, "policy_3.json", POLICY_3) + + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual([], m.policy_store.keys()) + + m.run() + + m.logger.info.assert_any_call( + "Starting up the operation policy file monitor." + ) + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_A") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.info.assert_any_call("Loading policy: policy_C") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_3.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.debug.assert_any_call( + "Policy 'policy_B' overwrites an existing policy." + ) + m.logger.info.assert_any_call( + "Stopping the operation policy file monitor." + ) + + self.assertEqual(3, len(m.policy_files)) + path = os.path.join(self.tmp_dir, "policy_1.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_A", None)) + + path = os.path.join(self.tmp_dir, "policy_2.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_C", None)) + + path = os.path.join(self.tmp_dir, "policy_3.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_B", None)) + + cache = m.policy_cache.get("policy_A") + self.assertEqual(0, len(cache)) + cache = m.policy_cache.get("policy_B") + self.assertEqual(1, len(cache)) + self.assertEqual( + os.path.join(self.tmp_dir, "policy_2.json"), + cache[0][1] + ) + self.assertEqual( + { + 'groups': { + 'group_B': { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: + enums.Policy.ALLOW_ALL, + enums.Operation.LOCATE: + enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: + enums.Policy.ALLOW_ALL + } + } + } + }, + cache[0][2] + ) + + self.assertEqual(3, len(m.policy_store.keys())) + self.assertEqual( + { + "groups": { + "group_A": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_A", None) + ) + self.assertEqual( + { + "groups": { + "group_B": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.DISALLOW_ALL, + enums.Operation.LOCATE: enums.Policy.DISALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_B", None) + ) + self.assertEqual( + { + "groups": { + "group_C": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_C", None) + ) + + def test_run_with_policy_load_failure(self): + """ + Test that the PolicyDirectoryMonitor can load policy files and track + them properly, even when one policy can't be loaded properly. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.logger = mock.MagicMock(logging.Logger) + m.halt_trigger = mock.MagicMock(multiprocessing.synchronize.Event) + m.halt_trigger.is_set.side_effect = [False, True] + + write_file(self.tmp_dir, "policy_1.json", POLICY_1) + write_file(self.tmp_dir, "policy_2.json", "not a JSON blob") + + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual([], m.policy_store.keys()) + + m.run() + + m.logger.info.assert_any_call( + "Starting up the operation policy file monitor." + ) + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_A") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.error.assert_any_call( + "Failure loading file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.debug.assert_called() + m.logger.info.assert_any_call( + "Stopping the operation policy file monitor." + ) + + self.assertEqual(2, len(m.policy_files)) + path = os.path.join(self.tmp_dir, "policy_1.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_A", None)) + + path = os.path.join(self.tmp_dir, "policy_2.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + + self.assertEqual( + { + "policy_A": [] + }, + m.policy_cache + ) + + self.assertEqual(1, len(m.policy_store.keys())) + self.assertEqual( + { + "groups": { + "group_A": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_A", None) + ) + + def test_run_with_policy_load_failure_and_fix(self): + """ + Test that the PolicyDirectoryMonitor can load policy files and track + them properly, even when one policy can't be loaded properly and is + then fixed while tracking is active. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.logger = mock.MagicMock(logging.Logger) + m.halt_trigger = mock.MagicMock(multiprocessing.synchronize.Event) + m.halt_trigger.is_set.side_effect = side_effects( + [ + False, + build_write_effect( + self.tmp_dir, + "policy_2.json", + "invalid JSON" + ), + False, + build_write_effect(self.tmp_dir, "policy_2.json", POLICY_2), + False, + True + ] + ) + + write_file(self.tmp_dir, "policy_1.json", POLICY_1) + + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual([], m.policy_store.keys()) + + m.run() + + m.logger.info.assert_any_call( + "Starting up the operation policy file monitor." + ) + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_A") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.error.assert_any_call( + "Failure loading file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.debug.assert_called() + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.info.assert_any_call("Loading policy: policy_C") + m.logger.info.assert_any_call( + "Stopping the operation policy file monitor." + ) + + self.assertEqual(2, len(m.policy_files)) + path = os.path.join(self.tmp_dir, "policy_1.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_A", None)) + + path = os.path.join(self.tmp_dir, "policy_2.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_B", None)) + self.assertEqual(path, m.policy_map.get("policy_C", None)) + + self.assertEqual( + { + "policy_A": [], + "policy_B": [], + "policy_C": [] + }, + m.policy_cache + ) + + self.assertEqual(3, len(m.policy_store.keys())) + self.assertEqual( + { + "groups": { + "group_A": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_A", None) + ) + self.assertEqual( + { + "groups": { + "group_B": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_B", None) + ) + self.assertEqual( + { + "groups": { + "group_C": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_C", None) + ) + + def test_run_with_policy_overloading_reserved(self): + """ + Test that the PolicyDirectoryMonitor can load policy files and track + them properly, even when one policy can't be loaded properly. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.logger = mock.MagicMock(logging.Logger) + m.halt_trigger = mock.MagicMock(multiprocessing.synchronize.Event) + m.halt_trigger.is_set.side_effect = [False, True] + + write_file(self.tmp_dir, "policy_3.json", POLICY_3) + write_file(self.tmp_dir, "policy_4.json", POLICY_4) + + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual([], m.policy_store.keys()) + + m.run() + + m.logger.info.assert_any_call( + "Starting up the operation policy file monitor." + ) + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_3.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_4.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: default") + m.logger.warning.assert_any_call( + "Policy 'default' overwrites a reserved policy and will be " + "thrown out." + ) + m.logger.info.assert_any_call( + "Stopping the operation policy file monitor." + ) + + self.assertEqual(2, len(m.policy_files)) + path = os.path.join(self.tmp_dir, "policy_3.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_B", None)) + + path = os.path.join(self.tmp_dir, "policy_4.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + + self.assertEqual( + { + "policy_B": [] + }, + m.policy_cache + ) + + self.assertEqual(1, len(m.policy_store.keys())) + self.assertEqual( + { + "groups": { + "group_B": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.DISALLOW_ALL, + enums.Operation.LOCATE: enums.Policy.DISALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_B", None) + ) + + def test_run_with_edit_modifying_existing_file(self): + """ + Test that the PolicyDirectoryMonitor can load policy files and track + them properly, even when an existing policy file is modified while + tracking is active. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.logger = mock.MagicMock(logging.Logger) + m.halt_trigger = mock.MagicMock(multiprocessing.synchronize.Event) + m.halt_trigger.is_set.side_effect = side_effects( + [ + False, + build_write_effect(self.tmp_dir, "policy_2.json", POLICY_5), + True + ] + ) + + write_file(self.tmp_dir, "policy_1.json", POLICY_1) + write_file(self.tmp_dir, "policy_2.json", POLICY_2) + + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual([], m.policy_store.keys()) + + m.run() + + m.logger.info.assert_any_call( + "Starting up the operation policy file monitor." + ) + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_A") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.info.assert_any_call("Loading policy: policy_C") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.info.assert_any_call("Loading policy: policy_D") + m.logger.info.assert_any_call("Removing policy: policy_C") + m.logger.info.assert_any_call( + "Stopping the operation policy file monitor." + ) + + self.assertEqual(2, len(m.policy_files)) + path = os.path.join(self.tmp_dir, "policy_1.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_A", None)) + + path = os.path.join(self.tmp_dir, "policy_2.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_B", None)) + self.assertEqual(path, m.policy_map.get("policy_D", None)) + + self.assertEqual( + { + "policy_A": [], + "policy_B": [], + "policy_D": [] + }, + m.policy_cache + ) + + self.assertEqual(3, len(m.policy_store.keys())) + self.assertEqual( + { + "groups": { + "group_A": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_A", None) + ) + self.assertEqual( + { + "groups": { + "group_B": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_B", None) + ) + self.assertEqual( + { + "groups": { + "group_D": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_D", None) + ) + + def test_run_with_edit_adding_to_existing_file(self): + """ + Test that the PolicyDirectoryMonitor can load policy files and track + them properly, even when an existing policy file is added to while + tracking is active. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.logger = mock.MagicMock(logging.Logger) + m.halt_trigger = mock.MagicMock(multiprocessing.synchronize.Event) + m.halt_trigger.is_set.side_effect = side_effects( + [ + False, + build_write_effect(self.tmp_dir, "policy_1.json", POLICY_6), + True + ] + ) + + write_file(self.tmp_dir, "policy_1.json", POLICY_1) + write_file(self.tmp_dir, "policy_2.json", POLICY_2) + + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual([], m.policy_store.keys()) + + m.run() + + m.logger.info.assert_any_call( + "Starting up the operation policy file monitor." + ) + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_A") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.info.assert_any_call("Loading policy: policy_C") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_A") + m.logger.info.assert_any_call("Loading policy: policy_E") + m.logger.info.assert_any_call( + "Stopping the operation policy file monitor." + ) + + self.assertEqual(2, len(m.policy_files)) + path = os.path.join(self.tmp_dir, "policy_1.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_A", None)) + self.assertEqual(path, m.policy_map.get("policy_E", None)) + + path = os.path.join(self.tmp_dir, "policy_2.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_B", None)) + self.assertEqual(path, m.policy_map.get("policy_C", None)) + + self.assertEqual( + { + "policy_A": [], + "policy_B": [], + "policy_C": [], + "policy_E": [] + }, + m.policy_cache + ) + + self.assertEqual(4, len(m.policy_store.keys())) + self.assertEqual( + { + "groups": { + "group_A": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_A", None) + ) + self.assertEqual( + { + "groups": { + "group_B": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_B", None) + ) + self.assertEqual( + { + "groups": { + "group_C": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_C", None) + ) + self.assertEqual( + { + "groups": { + "group_E": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.CHECK: enums.Policy.ALLOW_OWNER, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_E", None) + ) + + def test_run_with_edit_deleting_from_existing_file(self): + """ + Test that the PolicyDirectoryMonitor can load policy files and track + them properly, even when an existing policy file has content removed + while tracking is active. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.logger = mock.MagicMock(logging.Logger) + m.halt_trigger = mock.MagicMock(multiprocessing.synchronize.Event) + m.halt_trigger.is_set.side_effect = side_effects( + [ + False, + build_write_effect(self.tmp_dir, "policy_1.json", POLICY_1), + True + ] + ) + + write_file(self.tmp_dir, "policy_1.json", POLICY_6) + write_file(self.tmp_dir, "policy_2.json", POLICY_2) + + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual([], m.policy_store.keys()) + + m.run() + + m.logger.info.assert_any_call( + "Starting up the operation policy file monitor." + ) + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_A") + m.logger.info.assert_any_call("Loading policy: policy_E") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.info.assert_any_call("Loading policy: policy_C") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_A") + m.logger.info.assert_any_call("Removing policy: policy_E") + m.logger.info.assert_any_call( + "Stopping the operation policy file monitor." + ) + + self.assertEqual(2, len(m.policy_files)) + path = os.path.join(self.tmp_dir, "policy_1.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_A", None)) + + path = os.path.join(self.tmp_dir, "policy_2.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_B", None)) + self.assertEqual(path, m.policy_map.get("policy_C", None)) + + self.assertEqual( + { + "policy_A": [], + "policy_B": [], + "policy_C": [] + }, + m.policy_cache + ) + + self.assertEqual(3, len(m.policy_store.keys())) + self.assertEqual( + { + "groups": { + "group_A": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_A", None) + ) + self.assertEqual( + { + "groups": { + "group_B": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_B", None) + ) + self.assertEqual( + { + "groups": { + "group_C": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_C", None) + ) + + def test_run_with_deleting_existing_file(self): + """ + Test that the PolicyDirectoryMonitor can load policy files and track + them properly, even when an existing policy file is removed while + tracking is active. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.logger = mock.MagicMock(logging.Logger) + m.halt_trigger = mock.MagicMock(multiprocessing.synchronize.Event) + m.halt_trigger.is_set.side_effect = side_effects( + [ + False, + build_delete_effect(self.tmp_dir, "policy_1.json"), + True + ] + ) + + write_file(self.tmp_dir, "policy_1.json", POLICY_6) + write_file(self.tmp_dir, "policy_2.json", POLICY_2) + + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual([], m.policy_store.keys()) + + m.run() + + m.logger.info.assert_any_call( + "Starting up the operation policy file monitor." + ) + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_A") + m.logger.info.assert_any_call("Loading policy: policy_E") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.info.assert_any_call("Loading policy: policy_C") + m.logger.info.assert_any_call( + "Removing policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Removing policy: policy_A") + m.logger.info.assert_any_call("Removing policy: policy_E") + m.logger.info.assert_any_call( + "Stopping the operation policy file monitor." + ) + + self.assertEqual(1, len(m.policy_files)) + path = os.path.join(self.tmp_dir, "policy_2.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(2, len(m.policy_map.keys())) + self.assertEqual(path, m.policy_map.get("policy_B", None)) + self.assertEqual(path, m.policy_map.get("policy_C", None)) + + self.assertEqual( + { + "policy_B": [], + "policy_C": [] + }, + m.policy_cache + ) + + self.assertEqual(2, len(m.policy_store.keys())) + self.assertEqual( + { + "groups": { + "group_B": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_B", None) + ) + self.assertEqual( + { + "groups": { + "group_C": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_C", None) + ) + + def test_run_with_adding_new_file(self): + """ + Test that the PolicyDirectoryMonitor can load policy files and track + them properly, even when a new policy file is added while tracking is + active. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.logger = mock.MagicMock(logging.Logger) + m.halt_trigger = mock.MagicMock(multiprocessing.synchronize.Event) + m.halt_trigger.is_set.side_effect = side_effects( + [ + False, + build_write_effect(self.tmp_dir, "policy_2.json", POLICY_2), + True + ] + ) + + write_file(self.tmp_dir, "policy_1.json", POLICY_1) + + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual([], m.policy_store.keys()) + + m.run() + + m.logger.info.assert_any_call( + "Starting up the operation policy file monitor." + ) + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_A") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.debug.assert_not_called() + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.info.assert_any_call("Loading policy: policy_C") + m.logger.info.assert_any_call( + "Stopping the operation policy file monitor." + ) + + self.assertEqual(2, len(m.policy_files)) + path = os.path.join(self.tmp_dir, "policy_1.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_A", None)) + + path = os.path.join(self.tmp_dir, "policy_2.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_B", None)) + self.assertEqual(path, m.policy_map.get("policy_C", None)) + + self.assertEqual( + { + "policy_A": [], + "policy_B": [], + "policy_C": [] + }, + m.policy_cache + ) + + self.assertEqual(3, len(m.policy_store.keys())) + self.assertEqual( + { + "groups": { + "group_A": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_A", None) + ) + self.assertEqual( + { + "groups": { + "group_B": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_B", None) + ) + self.assertEqual( + { + "groups": { + "group_C": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_C", None) + ) + + def test_run_with_adding_new_file_overloading(self): + """ + Test that the PolicyDirectoryMonitor can load policy files and track + them properly, even when new policy files are added overwritting + existing policies while tracking is active. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.logger = mock.MagicMock(logging.Logger) + m.halt_trigger = mock.MagicMock(multiprocessing.synchronize.Event) + m.halt_trigger.is_set.side_effect = side_effects( + [ + False, + build_write_effect(self.tmp_dir, "policy_3.json", POLICY_2), + build_write_effect(self.tmp_dir, "policy_4.json", POLICY_3), + True + ] + ) + + write_file(self.tmp_dir, "policy_1.json", POLICY_1) + write_file(self.tmp_dir, "policy_2.json", POLICY_2) + + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual([], m.policy_store.keys()) + + m.run() + + m.logger.info.assert_any_call( + "Starting up the operation policy file monitor." + ) + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_A") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.info.assert_any_call("Loading policy: policy_C") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_3.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.debug.assert_any_call( + "Policy 'policy_B' overwrites an existing policy." + ) + m.logger.info.assert_any_call("Loading policy: policy_C") + m.logger.debug.assert_any_call( + "Policy 'policy_C' overwrites an existing policy." + ) + + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_4.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.debug.assert_any_call( + "Policy 'policy_B' overwrites an existing policy." + ) + + m.logger.info.assert_any_call( + "Stopping the operation policy file monitor." + ) + + self.assertEqual(4, len(m.policy_files)) + path = os.path.join(self.tmp_dir, "policy_1.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_A", None)) + + path = os.path.join(self.tmp_dir, "policy_2.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + + path = os.path.join(self.tmp_dir, "policy_3.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_C", None)) + + path = os.path.join(self.tmp_dir, "policy_4.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_B", None)) + + self.assertEqual([], m.policy_cache.get("policy_A")) + cache = m.policy_cache.get("policy_B") + self.assertEqual(2, len(cache)) + self.assertEqual( + os.path.join(self.tmp_dir, "policy_2.json"), + cache[0][1] + ) + self.assertEqual( + { + 'groups': { + 'group_B': { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: + enums.Policy.ALLOW_ALL, + enums.Operation.LOCATE: + enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: + enums.Policy.ALLOW_ALL + } + } + } + }, + cache[0][2] + ) + self.assertEqual( + os.path.join(self.tmp_dir, "policy_3.json"), + cache[1][1] + ) + self.assertEqual( + { + 'groups': { + 'group_B': { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: + enums.Policy.ALLOW_ALL, + enums.Operation.LOCATE: + enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: + enums.Policy.ALLOW_ALL + } + } + } + }, + cache[1][2] + ) + cache = m.policy_cache.get("policy_C") + self.assertEqual(1, len(cache)) + self.assertEqual( + os.path.join(self.tmp_dir, "policy_2.json"), + cache[0][1] + ) + self.assertEqual( + { + 'groups': { + 'group_C': { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: + enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: + enums.Policy.DISALLOW_ALL + } + } + } + }, + cache[0][2] + ) + + self.assertEqual(3, len(m.policy_store.keys())) + self.assertEqual( + { + "groups": { + "group_A": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_A", None) + ) + self.assertEqual( + { + "groups": { + "group_B": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.DISALLOW_ALL, + enums.Operation.LOCATE: enums.Policy.DISALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_B", None) + ) + self.assertEqual( + { + "groups": { + "group_C": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_C", None) + ) + + def test_run_with_adding_new_file_editing_overloading(self): + """ + Test that the PolicyDirectoryMonitor can load policy files and track + them properly, even when new policy files are added overwritting + existing policies while tracking is active. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.logger = mock.MagicMock(logging.Logger) + m.halt_trigger = mock.MagicMock(multiprocessing.synchronize.Event) + m.halt_trigger.is_set.side_effect = side_effects( + [ + False, + build_write_effect(self.tmp_dir, "policy_3.json", POLICY_2), + build_write_effect(self.tmp_dir, "policy_4.json", POLICY_3), + build_delete_effect(self.tmp_dir, "policy_2.json"), + build_write_effect(self.tmp_dir, "policy_4.json", POLICY_7), + True + ] + ) + + write_file(self.tmp_dir, "policy_1.json", POLICY_1) + write_file(self.tmp_dir, "policy_2.json", POLICY_2) + + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual([], m.policy_store.keys()) + + m.run() + + m.logger.info.assert_any_call( + "Starting up the operation policy file monitor." + ) + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_A") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.info.assert_any_call("Loading policy: policy_C") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_3.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.debug.assert_any_call( + "Policy 'policy_B' overwrites an existing policy." + ) + m.logger.info.assert_any_call("Loading policy: policy_C") + m.logger.debug.assert_any_call( + "Policy 'policy_C' overwrites an existing policy." + ) + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_4.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.debug.assert_any_call( + "Policy 'policy_B' overwrites an existing policy." + ) + m.logger.info.assert_any_call( + "Removing policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_4.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_D") + m.logger.info.assert_any_call( + "Stopping the operation policy file monitor." + ) + + self.assertEqual(3, len(m.policy_files)) + path = os.path.join(self.tmp_dir, "policy_1.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_A", None)) + + path = os.path.join(self.tmp_dir, "policy_3.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_B", None)) + self.assertEqual(path, m.policy_map.get("policy_C", None)) + + path = os.path.join(self.tmp_dir, "policy_4.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_D", None)) + + self.assertEqual([], m.policy_cache.get("policy_A")) + self.assertEqual([], m.policy_cache.get("policy_B")) + self.assertEqual([], m.policy_cache.get("policy_C")) + + self.assertEqual(4, len(m.policy_store.keys())) + self.assertEqual( + { + "groups": { + "group_A": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_A", None) + ) + self.assertEqual( + { + "groups": { + "group_B": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_B", None) + ) + self.assertEqual( + { + "groups": { + "group_C": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_C", None) + ) + self.assertEqual( + { + "groups": { + "group_D": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.DISALLOW_ALL, + enums.Operation.LOCATE: enums.Policy.DISALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_D", None) + ) + + def test_run_without_live_monitoring(self): + """ + Test that the PolicyDirectoryMonitor can load policy files and track + them properly even when operating in a one-shot scanning mode. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict(), + live_monitoring=False + ) + m.logger = mock.MagicMock(logging.Logger) + m.halt_trigger = mock.MagicMock(multiprocessing.synchronize.Event) + m.halt_trigger.is_set.side_effect = [False, True] + + write_file(self.tmp_dir, "policy_1.json", POLICY_1) + write_file(self.tmp_dir, "policy_2.json", POLICY_2) + + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual([], m.policy_store.keys()) + + m.run() + + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_1.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_A") + m.logger.info.assert_any_call( + "Loading policies for file: {}".format( + os.path.join(self.tmp_dir, "policy_2.json") + ) + ) + m.logger.info.assert_any_call("Loading policy: policy_B") + m.logger.info.assert_any_call("Loading policy: policy_C") + + self.assertEqual(2, len(m.policy_files)) + path = os.path.join(self.tmp_dir, "policy_1.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_A", None)) + + path = os.path.join(self.tmp_dir, "policy_2.json") + self.assertEqual( + os.path.getmtime(path), + m.file_timestamps.get(path, None) + ) + self.assertIn(path, m.policy_files) + self.assertEqual(path, m.policy_map.get("policy_B", None)) + self.assertEqual(path, m.policy_map.get("policy_C", None)) + + self.assertEqual( + { + "policy_A": [], + "policy_B": [], + "policy_C": [] + }, + m.policy_cache + ) + + self.assertEqual(3, len(m.policy_store.keys())) + self.assertEqual( + { + "groups": { + "group_A": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_A", None) + ) + self.assertEqual( + { + "groups": { + "group_B": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.LOCATE: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.ALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_B", None) + ) + self.assertEqual( + { + "groups": { + "group_C": { + enums.ObjectType.SYMMETRIC_KEY: { + enums.Operation.GET: enums.Policy.ALLOW_ALL, + enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL + } + } + } + }, + m.policy_store.get("policy_C", None) + ) + + def test_initialize_tracking_structures(self): + """ + Test that the PolicyDirectoryMonitor can correctly initialize/reset the + various tracking structures used for file monitoring. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + + m.file_timestamps["a"] = 1234 + m.policy_cache["a"] = (123.12, "b", {"c": 2}) + m.policy_files = ["a", "b"] + m.policy_map["a"] = "b" + m.policy_store["a"] = {"c": 2} + m.policy_store["default"] = {"c": 3} + + m.initialize_tracking_structures() + + self.assertEqual({}, m.file_timestamps) + self.assertEqual({}, m.policy_cache) + self.assertEqual([], m.policy_files) + self.assertEqual({}, m.policy_map) + self.assertEqual(["default"], m.policy_store.keys()) + self.assertEqual({"c": 3}, m.policy_store.get("default")) + + def test_disassociate_policy_and_file(self): + """ + Test that the PolicyDirectoryMonitor can correctly unlink a policy and + a policy file in its tracking structures. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + + m.policy_cache = { + "policy_A": [ + ( + 1480043060.870089, + os.path.join(self.tmp_dir, "policy_1.json"), + {} + ), + ( + 1480043062.02171, + os.path.join(self.tmp_dir, "policy_2.json"), + {} + ), + ( + 1480043062.645776, + os.path.join(self.tmp_dir, "policy_1.json"), + {} + ), + ( + 1480043063.453713, + os.path.join(self.tmp_dir, "policy_3.json"), + {} + ) + ], + "policy_B": [ + ( + 1480043123.65311, + os.path.join(self.tmp_dir, "policy_1.json"), + {} + ) + ] + } + + m.disassociate_policy_and_file( + "policy_A", + os.path.join(self.tmp_dir, "policy_1.json") + ) + + self.assertEqual( + [ + ( + 1480043062.02171, + os.path.join(self.tmp_dir, "policy_2.json"), + {} + ), + ( + 1480043063.453713, + os.path.join(self.tmp_dir, "policy_3.json"), + {} + ) + ], + m.policy_cache.get("policy_A", []) + ) + self.assertEqual( + [ + ( + 1480043123.65311, + os.path.join(self.tmp_dir, "policy_1.json"), + {} + ) + ], + m.policy_cache.get("policy_B", []) + ) + + def test_restore_or_delete_policy_restore(self): + """ + Test that the PolicyDirectoryMonitor can correctly restore policy data + upon a policy file change. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.logger = mock.MagicMock(logging.Logger) + + m.policy_cache = { + "policy_A": [ + ( + 1480043060.870089, + os.path.join(self.tmp_dir, "policy_1.json"), + {'{"policy_1"}'} + ), + ( + 1480043062.02171, + os.path.join(self.tmp_dir, "policy_2.json"), + {'{"policy_2"}'} + ), + ( + 1480043063.453713, + os.path.join(self.tmp_dir, "policy_3.json"), + {'{"policy_3"}'} + ) + ] + } + m.policy_store["policy_A"] = {'{"policy_4"}'} + m.policy_map["policy_A"] = os.path.join(self.tmp_dir, "policy_4.json") + + m.restore_or_delete_policy("policy_A") + + m.logger.info.assert_not_called() + self.assertEqual( + [ + ( + 1480043060.870089, + os.path.join(self.tmp_dir, "policy_1.json"), + {'{"policy_1"}'} + ), + ( + 1480043062.02171, + os.path.join(self.tmp_dir, "policy_2.json"), + {'{"policy_2"}'} + ) + ], + m.policy_cache.get("policy_A", []) + ) + self.assertEqual( + {'{"policy_3"}'}, + m.policy_store.get("policy_A", {}) + ) + self.assertEqual( + os.path.join(self.tmp_dir, "policy_3.json"), + m.policy_map.get("policy_A", None) + ) + + def test_restore_or_delete_policy_delete(self): + """ + Test that the PolicyDirectoryMonitor can correctly delete policy data + upon a policy file change. + """ + m = monitor.PolicyDirectoryMonitor( + self.tmp_dir, + multiprocessing.Manager().dict() + ) + m.logger = mock.MagicMock(logging.Logger) + + m.policy_cache = { + "policy_A": [] + } + m.policy_store["policy_A"] = {'{"policy_4"}'} + m.policy_map["policy_A"] = os.path.join(self.tmp_dir, "policy_4.json") + + m.restore_or_delete_policy("policy_A") + + m.logger.info.assert_called_once_with("Removing policy: policy_A") + self.assertNotIn("policy_A", m.policy_cache.keys()) + self.assertNotIn("policy_A", m.policy_store.keys()) + self.assertNotIn("policy_A", m.policy_map.keys()) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/services/server/test_policy.py python-pykmip-0.8.0/kmip/tests/unit/services/server/test_policy.py --- python-pykmip-0.7.0/kmip/tests/unit/services/server/test_policy.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/services/server/test_policy.py 2018-04-02 17:12:18.000000000 +0000 @@ -35,14 +35,14 @@ """ Test that an AttributePolicy can be built without any errors. """ - policy.AttributePolicy(contents.ProtocolVersion.create(1, 0)) + policy.AttributePolicy(contents.ProtocolVersion(1, 0)) def test_is_attribute_supported(self): """ Test that is_attribute_supported returns the expected results in all cases. """ - rules = policy.AttributePolicy(contents.ProtocolVersion.create(1, 0)) + rules = policy.AttributePolicy(contents.ProtocolVersion(1, 0)) attribute_a = 'Unique Identifier' attribute_b = 'Certificate Length' attribute_c = 'invalid' @@ -61,7 +61,7 @@ Test that is_attribute_deprecated returns the expected results in all cases. """ - rules = policy.AttributePolicy(contents.ProtocolVersion.create(1, 0)) + rules = policy.AttributePolicy(contents.ProtocolVersion(1, 0)) attribute_a = 'Name' attribute_b = 'Certificate Subject' @@ -71,7 +71,7 @@ result = rules.is_attribute_deprecated(attribute_b) self.assertFalse(result) - rules = policy.AttributePolicy(contents.ProtocolVersion.create(1, 1)) + rules = policy.AttributePolicy(contents.ProtocolVersion(1, 1)) result = rules.is_attribute_deprecated(attribute_b) self.assertTrue(result) @@ -81,7 +81,7 @@ Test that is_attribute_applicable_to_object_type returns the expected results in all cases. """ - rules = policy.AttributePolicy(contents.ProtocolVersion.create(1, 0)) + rules = policy.AttributePolicy(contents.ProtocolVersion(1, 0)) attribute = 'Cryptographic Algorithm' object_type_a = enums.ObjectType.SYMMETRIC_KEY object_type_b = enums.ObjectType.OPAQUE_DATA @@ -103,7 +103,7 @@ Test that is_attribute_multivalued returns the expected results in all cases. """ - rules = policy.AttributePolicy(contents.ProtocolVersion.create(1, 0)) + rules = policy.AttributePolicy(contents.ProtocolVersion(1, 0)) attribute_a = 'Object Type' attribute_b = 'Link' @@ -118,7 +118,7 @@ Test that get_all_attribute_names returns a complete list of the names of all spec-defined attributes. """ - rules = policy.AttributePolicy(contents.ProtocolVersion.create(1, 0)) + rules = policy.AttributePolicy(contents.ProtocolVersion(1, 0)) attribute_names = [ 'Unique Identifier', 'Name', diff -Nru python-pykmip-0.7.0/kmip/tests/unit/services/server/test_server.py python-pykmip-0.8.0/kmip/tests/unit/services/server/test_server.py --- python-pykmip-0.7.0/kmip/tests/unit/services/server/test_server.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/services/server/test_server.py 2018-05-09 05:46:08.000000000 +0000 @@ -51,7 +51,6 @@ self.assertTrue(logging_mock.called) self.assertIsInstance(s.auth_suite, auth.BasicAuthenticationSuite) - self.assertIsNotNone(s._engine) self.assertEqual(1, s._session_id) self.assertFalse(s._is_serving) @@ -122,7 +121,8 @@ '/etc/pykmip/policies', False, 'TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA', - 'DEBUG' + 'DEBUG', + '/var/pykmip/pykmip.db' ) s.config.load_settings.assert_called_with('/etc/pykmip/server.conf') @@ -157,6 +157,10 @@ ] ) s.config.set_setting.assert_any_call('logging_level', 'DEBUG') + s.config.set_setting.assert_any_call( + 'database_path', + '/var/pykmip/pykmip.db' + ) # Test that an attempt is made to instantiate the TLS 1.2 auth suite s = server.KmipServer( @@ -167,13 +171,27 @@ self.assertEqual('TLS1.2', s.config.settings.get('auth_suite')) self.assertIsNotNone(s.auth_suite) + @mock.patch('multiprocessing.Manager') + @mock.patch('kmip.services.server.monitor.PolicyDirectoryMonitor') @mock.patch('kmip.services.server.engine.KmipEngine') @mock.patch('kmip.services.server.server.KmipServer._setup_logging') - def test_start(self, logging_mock, engine_mock): + def test_start(self, + logging_mock, + engine_mock, + monitor_mock, + manager_mock): """ Test that starting the KmipServer either runs as expected or generates the expected error. """ + monitor_instance_mock = mock.MagicMock() + monitor_mock.return_value = monitor_instance_mock + + dict_mock = mock.MagicMock() + manager_instance_mock = mock.MagicMock() + manager_instance_mock.dict.return_value = dict_mock + manager_mock.return_value = manager_instance_mock + a_mock = mock.MagicMock() b_mock = mock.MagicMock() @@ -196,7 +214,18 @@ socket_mock.return_value = a_mock ssl_mock.return_value = b_mock + manager_mock.assert_not_called() + monitor_mock.assert_not_called() + s.start() + + manager_mock.assert_called_once_with() + monitor_mock.assert_called_once_with( + None, + dict_mock, + False + ) + self.assertIsNotNone(s._engine) s._logger.info.assert_any_call( "Starting server socket handler." ) @@ -223,8 +252,20 @@ "127.0.0.1:5696" ) + monitor_instance_mock.stop.assert_not_called() + handler = signal.getsignal(signal.SIGINT) + handler(None, None) + monitor_instance_mock.stop.assert_called_once_with() + monitor_instance_mock.stop.reset_mock() + monitor_instance_mock.stop.assert_not_called() + handler = signal.getsignal(signal.SIGTERM) + handler(None, None) + monitor_instance_mock.stop.assert_called_once_with() + self.assertTrue(s._is_serving) + manager_mock.reset_mock() + monitor_mock.reset_mock() a_mock.reset_mock() b_mock.reset_mock() @@ -237,6 +278,9 @@ test_exception = Exception() b_mock.bind.side_effect = test_exception + manager_mock.assert_not_called() + monitor_mock.assert_not_called() + regex = ( "Server failed to bind socket handler to 127.0.0.1:5696" ) @@ -245,6 +289,13 @@ regex, s.start ) + + manager_mock.assert_called_once_with() + monitor_mock.assert_called_once_with( + None, + dict_mock, + False + ) s._logger.info.assert_any_call( "Starting server socket handler." ) @@ -370,6 +421,50 @@ @mock.patch('kmip.services.server.engine.KmipEngine') @mock.patch('kmip.services.server.server.KmipServer._setup_logging') + def test_stop_with_monitor_shutdown_error(self, logging_mock, engine_mock): + """ + Test that the right calls and log messages are triggered when stopping + the server results in an error while shutting down the policy monitor. + """ + s = server.KmipServer( + hostname='127.0.0.1', + port=5696, + config_path=None, + policy_path=None + ) + s._logger = mock.MagicMock() + s._socket = mock.MagicMock() + s.policy_monitor = mock.MagicMock() + test_exception = Exception() + s.policy_monitor.join.side_effect = test_exception + + # Test the expected behavior for a normal server stop sequence + thread_mock = mock.MagicMock() + thread_mock.join = mock.MagicMock() + thread_mock.is_alive = mock.MagicMock(return_value=False) + thread_mock.name = 'TestThread' + + regex = "Server failed to clean up the policy monitor." + self.assertRaisesRegexp( + exceptions.ShutdownError, + regex, + s.stop + ) + s._logger.info.assert_any_call( + "Cleaning up remaining connection threads." + ) + s._logger.info.assert_any_call( + "Shutting down server socket handler." + ) + s._socket.shutdown.assert_called_once_with(socket.SHUT_RDWR) + s._socket.close.assert_called_once_with() + + s.policy_monitor.stop.assert_called_once_with() + s.policy_monitor.join.assert_called_once_with() + s._logger.exception(test_exception) + + @mock.patch('kmip.services.server.engine.KmipEngine') + @mock.patch('kmip.services.server.server.KmipServer._setup_logging') def test_serve(self, logging_mock, engine_mock): """ Test that the right calls and log messages are triggered while @@ -390,7 +485,11 @@ # Test the expected behavior for a normal server/interrupt sequence s._socket.accept = mock.MagicMock( - side_effect=[('connection', 'address'), expected_error] + side_effect=[ + ('connection', 'address'), + socket.timeout, + expected_error + ] ) s.serve() @@ -468,6 +567,7 @@ policy_path=None ) s._logger = mock.MagicMock() + s._engine = engine_mock # Test that the right calls and log messages are made when # starting a new session. diff -Nru python-pykmip-0.7.0/kmip/tests/unit/services/server/test_session.py python-pykmip-0.8.0/kmip/tests/unit/services/server/test_session.py --- python-pykmip-0.7.0/kmip/tests/unit/services/server/test_session.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/services/server/test_session.py 2018-04-02 17:12:18.000000000 +0000 @@ -16,7 +16,6 @@ from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric import rsa @@ -28,6 +27,7 @@ from kmip.core import enums from kmip.core import exceptions +from kmip.core import objects from kmip.core import utils from kmip.core.messages import contents @@ -122,20 +122,20 @@ """ Test that a KmipSession can be created without errors. """ - session.KmipSession(None, None, 'name') + session.KmipSession(None, None, None, 'name') def test_init_without_name(self): """ Test that a KmipSession without 'name' can be created without errors. """ - session.KmipSession(None, None, None) + session.KmipSession(None, None, None, None) def test_run(self): """ Test that the message handling loop is handled properly on normal execution. """ - kmip_session = session.KmipSession(None, None, 'name') + kmip_session = session.KmipSession(None, None, None, 'name') kmip_session._logger = mock.MagicMock() kmip_session._handle_message_loop = mock.MagicMock( side_effect=[ @@ -160,7 +160,7 @@ Test that the correct logging and error handling occurs when the thread encounters an error with the message handling loop. """ - kmip_session = session.KmipSession(None, None, 'name') + kmip_session = session.KmipSession(None, None, None, 'name') kmip_session._logger = mock.MagicMock() kmip_session._connection = mock.MagicMock() @@ -186,147 +186,9 @@ kmip_session._connection.close.assert_called_once_with() kmip_session._logger.info.assert_called_with("Stopping session: name") - def test_get_client_identity(self): - """ - Test that a client identity is obtained from a valid client - certificate. - """ - client_certificate = build_certificate([u'Test Identity']) - der_encoding = client_certificate.public_bytes( - serialization.Encoding.DER - ) - - kmip_session = session.KmipSession(None, None, 'name') - kmip_session._logger = mock.MagicMock() - kmip_session._connection = mock.MagicMock() - kmip_session._connection.getpeercert.return_value = der_encoding - - identity = kmip_session._get_client_identity() - self.assertEqual(u'Test Identity', identity) - - kmip_session._logger.info.assert_called_once_with( - "Session client identity: Test Identity" - ) - kmip_session._logger.warning.assert_not_called() - - def test_get_client_identity_with_no_certificate(self): - """ - Test that the right error is generated when no certificate is - available to provide the client identity. - """ - kmip_session = session.KmipSession(None, None, 'name') - kmip_session._logger = mock.MagicMock() - kmip_session._connection = mock.MagicMock() - kmip_session._connection.getpeercert.return_value = None - - self.assertRaisesRegexp( - exceptions.PermissionDenied, - "Failure loading the client certificate from the session " - "connection. Could not retrieve client identity.", - kmip_session._get_client_identity - ) - - def test_get_client_identity_with_no_extended_key_usage(self): - """ - Test that the right error is generated when the client certificate - is missing its extended key usage extension. - """ - client_certificate = build_certificate([u'Test Identity'], False) - der_encoding = client_certificate.public_bytes( - serialization.Encoding.DER - ) - - kmip_session = session.KmipSession(None, None, 'name') - kmip_session._logger = mock.MagicMock() - kmip_session._connection = mock.MagicMock() - kmip_session._connection.getpeercert.return_value = der_encoding - - self.assertRaisesRegexp( - exceptions.PermissionDenied, - "The extended key usage extension is missing from the client " - "certificate. Session client identity unavailable.", - kmip_session._get_client_identity - ) - - def test_get_client_identity_with_no_common_name(self): - """ - Test that the right error is generated when the client certificate - does not define a subject common name. - """ - client_certificate = build_certificate([]) - der_encoding = client_certificate.public_bytes( - serialization.Encoding.DER - ) - - kmip_session = session.KmipSession(None, None, 'name') - kmip_session._logger = mock.MagicMock() - kmip_session._connection = mock.MagicMock() - kmip_session._connection.getpeercert.return_value = der_encoding - - self.assertRaisesRegexp( - exceptions.PermissionDenied, - "The client certificate does not define a subject common " - "name. Session client identity unavailable.", - kmip_session._get_client_identity - ) - - def test_get_client_identity_with_multiple_common_names(self): - """ - Test that the right client identity is returned when the client - certificate has multiple subject common names. - """ - client_certificate = build_certificate([ - u'Test Identity 1', - u'Test Identity 2' - ]) - der_encoding = client_certificate.public_bytes( - serialization.Encoding.DER - ) - - kmip_session = session.KmipSession(None, None, 'name') - kmip_session._logger = mock.MagicMock() - kmip_session._connection = mock.MagicMock() - kmip_session._connection.getpeercert.return_value = der_encoding - - identity = kmip_session._get_client_identity() - self.assertEqual(u'Test Identity 1', identity) - - kmip_session._logger.info.assert_called_once_with( - "Session client identity: Test Identity 1" - ) - kmip_session._logger.warning.assert_called_once_with( - "Multiple client identities found. Using the first one processed." - ) - - def test_get_client_identity_with_incorrect_extended_key_usage(self): - """ - Test that the right error is generated when the client certificate - does not have client authentication set in its extended key usage - extension. - """ - client_certificate = build_certificate( - [u'Test Identity'], - bad_extension=True - ) - der_encoding = client_certificate.public_bytes( - serialization.Encoding.DER - ) - - kmip_session = session.KmipSession(None, None, 'name') - kmip_session._logger = mock.MagicMock() - kmip_session._connection = mock.MagicMock() - kmip_session._connection.getpeercert.return_value = der_encoding - - self.assertRaisesRegexp( - exceptions.PermissionDenied, - "The extended key usage extension is not marked for client " - "authentication in the client certificate. Session client " - "identity unavailable.", - kmip_session._get_client_identity - ) - + @mock.patch('kmip.services.server.auth.get_certificate_from_connection') @mock.patch('kmip.core.messages.messages.RequestMessage') - def test_handle_message_loop(self, request_mock): + def test_handle_message_loop(self, request_mock, cert_mock): """ Test that the correct logging and error handling occurs during the message handling loop. @@ -345,7 +207,7 @@ ) batch_items = [batch_item] header = messages.ResponseHeader( - protocol_version=contents.ProtocolVersion.create(1, 0), + protocol_version=contents.ProtocolVersion(1, 0), time_stamp=contents.TimeStamp(int(time.time())), batch_count=contents.BatchCount(len(batch_items)) ) @@ -354,12 +216,22 @@ batch_items=batch_items ) + cert_mock.return_value = 'test_certificate' kmip_engine = engine.KmipEngine() kmip_engine._logger = mock.MagicMock() - kmip_session = session.KmipSession(kmip_engine, None, 'name') + kmip_session = session.KmipSession( + kmip_engine, + None, + None, + name='name', + enable_tls_client_auth=False + ) kmip_session._engine = mock.MagicMock() - kmip_session._get_client_identity = mock.MagicMock() - kmip_session._get_client_identity.return_value = 'test' + kmip_session.authenticate = mock.MagicMock() + kmip_session.authenticate.return_value = ( + 'test', + ['group A', 'group B'] + ) kmip_session._engine.process_request = mock.MagicMock( return_value=(message, kmip_session._max_response_size) ) @@ -376,11 +248,16 @@ ) kmip_session._receive_request = mock.MagicMock(return_value=data) kmip_session._send_response = mock.MagicMock() + kmip_session.authenticate = mock.MagicMock( + return_value=("John Doe", ["Group A"]) + ) kmip_session._handle_message_loop() kmip_session._receive_request.assert_called_once_with() - kmip_session._logger.info.assert_not_called() + kmip_session._logger.info.assert_any_call( + "Session client identity: John Doe" + ) kmip_session._logger.debug.assert_any_call( "Possible session ciphers: 2" ) @@ -399,19 +276,30 @@ kmip_session._logger.exception.assert_not_called() self.assertTrue(kmip_session._send_response.called) + @mock.patch('kmip.services.server.auth.get_certificate_from_connection') @mock.patch('kmip.core.messages.messages.RequestMessage.read', mock.MagicMock(side_effect=Exception())) - def test_handle_message_loop_with_parse_failure(self): + def test_handle_message_loop_with_parse_failure(self, cert_mock): """ Test that the correct logging and error handling occurs during the message handling loop. """ data = utils.BytearrayStream(()) + cert_mock.return_value = 'test_certificate' kmip_engine = engine.KmipEngine() - kmip_session = session.KmipSession(kmip_engine, None, 'name') - kmip_session._get_client_identity = mock.MagicMock() - kmip_session._get_client_identity.return_value = 'test' + kmip_session = session.KmipSession( + kmip_engine, + None, + None, + name='name', + enable_tls_client_auth=False + ) + kmip_session.authenticate = mock.MagicMock() + kmip_session.authenticate.return_value = ( + 'test', + ['group A', 'group B'] + ) kmip_session._logger = mock.MagicMock() kmip_session._connection = mock.MagicMock() kmip_session._receive_request = mock.MagicMock(return_value=data) @@ -427,18 +315,31 @@ kmip_session._logger.error.assert_not_called() self.assertTrue(kmip_session._send_response.called) + @mock.patch('kmip.services.server.auth.get_certificate_from_connection') @mock.patch('kmip.core.messages.messages.RequestMessage') - def test_handle_message_loop_with_response_too_long(self, request_mock): + def test_handle_message_loop_with_response_too_long(self, + request_mock, + cert_mock): """ Test that the correct logging and error handling occurs during the message handling loop. """ data = utils.BytearrayStream(()) + cert_mock.return_value = 'test_certificate' kmip_engine = engine.KmipEngine() - kmip_session = session.KmipSession(kmip_engine, None, 'name') - kmip_session._get_client_identity = mock.MagicMock() - kmip_session._get_client_identity.return_value = 'test' + kmip_session = session.KmipSession( + kmip_engine, + None, + None, + name='name', + enable_tls_client_auth=False + ) + kmip_session.authenticate = mock.MagicMock() + kmip_session.authenticate.return_value = ( + 'test', + ['group A', 'group B'] + ) kmip_session._logger = mock.MagicMock() kmip_session._connection = mock.MagicMock() kmip_session._receive_request = mock.MagicMock(return_value=data) @@ -448,24 +349,36 @@ kmip_session._handle_message_loop() kmip_session._receive_request.assert_called_once_with() -# kmip_session._logger.info.assert_not_called() self.assertTrue(kmip_session._logger.warning.called) kmip_session._logger.exception.assert_not_called() self.assertTrue(kmip_session._send_response.called) + @mock.patch('kmip.services.server.auth.get_certificate_from_connection') @mock.patch('kmip.core.messages.messages.RequestMessage') - def test_handle_message_loop_with_unexpected_error(self, request_mock): + def test_handle_message_loop_with_unexpected_error(self, + request_mock, + cert_mock): """ Test that the correct logging and error handling occurs when an unexpected error is generated while processing a request. """ data = utils.BytearrayStream(()) + cert_mock.return_value = 'test_certificate' kmip_engine = engine.KmipEngine() kmip_engine._logger = mock.MagicMock() - kmip_session = session.KmipSession(kmip_engine, None, 'name') - kmip_session._get_client_identity = mock.MagicMock() - kmip_session._get_client_identity.return_value = 'test' + kmip_session = session.KmipSession( + kmip_engine, + None, + None, + name='name', + enable_tls_client_auth=False + ) + kmip_session.authenticate = mock.MagicMock() + kmip_session.authenticate.return_value = ( + 'test', + ['group A', 'group B'] + ) kmip_session._engine = mock.MagicMock() test_exception = Exception("Unexpected error.") kmip_session._engine.process_request = mock.MagicMock( @@ -479,13 +392,406 @@ kmip_session._handle_message_loop() kmip_session._receive_request.assert_called_once_with() -# kmip_session._logger.info.assert_not_called() kmip_session._logger.warning.assert_called_once_with( "An unexpected error occurred while processing request." ) kmip_session._logger.exception.assert_called_once_with(test_exception) self.assertTrue(kmip_session._send_response.called) + @mock.patch('kmip.services.server.auth.get_certificate_from_connection') + @mock.patch('kmip.core.messages.messages.RequestMessage') + def test_handle_message_loop_with_authentication_failure(self, + request_mock, + cert_mock): + """ + Test that the correct logging and error handling occurs when an + authentication error is generated while processing a request. + """ + data = utils.BytearrayStream(()) + + cert_mock.return_value = 'test_certificate' + kmip_engine = engine.KmipEngine() + kmip_engine._logger = mock.MagicMock() + kmip_session = session.KmipSession( + kmip_engine, + None, + None, + name='name', + enable_tls_client_auth=False + ) + kmip_session.authenticate = mock.MagicMock() + kmip_session.authenticate.side_effect = exceptions.PermissionDenied( + "Authentication failed." + ) + kmip_session._engine = mock.MagicMock() + kmip_session._logger = mock.MagicMock() + kmip_session._connection = mock.MagicMock() + kmip_session._receive_request = mock.MagicMock(return_value=data) + kmip_session._send_response = mock.MagicMock() + fake_version = contents.ProtocolVersion(1, 2) + fake_credential = objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="John Doe", + password="secret" + ) + ) + fake_header = messages.RequestHeader( + protocol_version=fake_version, + authentication=contents.Authentication( + credentials=[fake_credential] + ) + ) + fake_request = messages.RequestMessage() + fake_request.request_header = fake_header + fake_request.read = mock.MagicMock() + request_mock.return_value = fake_request + + kmip_session._handle_message_loop() + + kmip_session._receive_request.assert_called_once_with() + fake_request.read.assert_called_once_with(data) + kmip_session.authenticate.assert_called_once_with( + "test_certificate", + fake_request + ) + kmip_session._logger.warning.assert_called_once_with( + "Authentication failed." + ) + kmip_session._engine.build_error_response.assert_called_once_with( + fake_version, + enums.ResultReason.AUTHENTICATION_NOT_SUCCESSFUL, + "An error occurred during client authentication. " + "See server logs for more information." + ) + kmip_session._logger.exception.assert_not_called() + self.assertTrue(kmip_session._send_response.called) + + @mock.patch('kmip.services.server.auth.get_certificate_from_connection') + @mock.patch('kmip.core.messages.messages.RequestMessage') + def test_handle_message_loop_no_certificate(self, + request_mock, + cert_mock): + """ + Test that the correct logging and error handling occurs when no + certificate is encountered while processing a request. + """ + data = utils.BytearrayStream(()) + + cert_mock.return_value = None + kmip_engine = engine.KmipEngine() + kmip_engine._logger = mock.MagicMock() + kmip_session = session.KmipSession( + kmip_engine, + None, + None, + name='name', + enable_tls_client_auth=True + ) + kmip_session.authenticate = mock.MagicMock() + kmip_session._engine = mock.MagicMock() + kmip_session._logger = mock.MagicMock() + kmip_session._connection = mock.MagicMock() + kmip_session._receive_request = mock.MagicMock(return_value=data) + kmip_session._send_response = mock.MagicMock() + + kmip_session._handle_message_loop() + + kmip_session._receive_request.assert_called_once_with() + kmip_session._logger.warning( + "Failure verifying the client certificate." + ) + kmip_session._logger.exception.assert_called_once_with( + exceptions.PermissionDenied( + "The client certificate could not be loaded from the session " + "connection." + ) + ) + kmip_session._engine.build_error_response.assert_called_once_with( + contents.ProtocolVersion(1, 0), + enums.ResultReason.AUTHENTICATION_NOT_SUCCESSFUL, + "Error verifying the client certificate. " + "See server logs for more information." + ) + self.assertTrue(kmip_session._send_response.called) + + @mock.patch( + 'kmip.services.server.auth.get_extended_key_usage_from_certificate' + ) + @mock.patch('kmip.services.server.auth.get_certificate_from_connection') + @mock.patch('kmip.core.messages.messages.RequestMessage') + def test_handle_message_loop_no_certificate_extension(self, + request_mock, + cert_mock, + ext_mock): + """ + Test that the correct logging and error handling occurs when an + invalid certificate is encountered while processing a request. + """ + data = utils.BytearrayStream(()) + + cert_mock.return_value = 'test_certificate' + ext_mock.return_value = None + kmip_engine = engine.KmipEngine() + kmip_engine._logger = mock.MagicMock() + kmip_session = session.KmipSession( + kmip_engine, + None, + None, + name='name', + enable_tls_client_auth=True + ) + kmip_session.authenticate = mock.MagicMock() + kmip_session._engine = mock.MagicMock() + kmip_session._logger = mock.MagicMock() + kmip_session._connection = mock.MagicMock() + kmip_session._receive_request = mock.MagicMock(return_value=data) + kmip_session._send_response = mock.MagicMock() + + kmip_session._handle_message_loop() + + kmip_session._receive_request.assert_called_once_with() + kmip_session._logger.warning( + "Failure verifying the client certificate." + ) + kmip_session._logger.exception.assert_called_once_with( + exceptions.PermissionDenied( + "The extended key usage extension is missing from the client " + "certificate." + ) + ) + kmip_session._engine.build_error_response.assert_called_once_with( + contents.ProtocolVersion(1, 0), + enums.ResultReason.AUTHENTICATION_NOT_SUCCESSFUL, + "Error verifying the client certificate. " + "See server logs for more information." + ) + self.assertTrue(kmip_session._send_response.called) + + @mock.patch( + 'kmip.services.server.auth.get_extended_key_usage_from_certificate' + ) + @mock.patch('kmip.services.server.auth.get_certificate_from_connection') + @mock.patch('kmip.core.messages.messages.RequestMessage') + def test_handle_message_loop_invalid_certificate_extension(self, + request_mock, + cert_mock, + ext_mock): + """ + Test that the correct logging and error handling occurs when an + invalid certificate is encountered while processing a request. + """ + data = utils.BytearrayStream(()) + + cert_mock.return_value = 'test_certificate' + ext_mock.return_value = [] + kmip_engine = engine.KmipEngine() + kmip_engine._logger = mock.MagicMock() + kmip_session = session.KmipSession( + kmip_engine, + None, + None, + name='name', + enable_tls_client_auth=True + ) + kmip_session.authenticate = mock.MagicMock() + kmip_session._engine = mock.MagicMock() + kmip_session._logger = mock.MagicMock() + kmip_session._connection = mock.MagicMock() + kmip_session._receive_request = mock.MagicMock(return_value=data) + kmip_session._send_response = mock.MagicMock() + + kmip_session._handle_message_loop() + + kmip_session._receive_request.assert_called_once_with() + kmip_session._logger.warning( + "Failure verifying the client certificate." + ) + kmip_session._logger.exception.assert_called_once_with( + exceptions.PermissionDenied( + "The extended key usage extension is not marked for client " + "authentication in the client certificate." + ) + ) + kmip_session._engine.build_error_response.assert_called_once_with( + contents.ProtocolVersion(1, 0), + enums.ResultReason.AUTHENTICATION_NOT_SUCCESSFUL, + "Error verifying the client certificate. " + "See server logs for more information." + ) + self.assertTrue(kmip_session._send_response.called) + + @mock.patch( + "kmip.services.server.auth.get_client_identity_from_certificate" + ) + def test_authenticate(self, mock_get): + """ + Test that the session correctly uses the authentication plugin + framework to authenticate new connections. + """ + mock_get.return_value = "John Doe" + kmip_session = session.KmipSession( + None, + None, + None, + name='TestSession' + ) + kmip_session._logger = mock.MagicMock() + fake_request = messages.RequestMessage( + request_header=messages.RequestHeader() + ) + + session_identity = kmip_session.authenticate( + "fake_certificate", + fake_request + ) + + kmip_session._logger.debug.assert_any_call( + "No authentication plugins are enabled. The client identity will " + "be extracted from the client certificate." + ) + mock_get.assert_any_call("fake_certificate") + kmip_session._logger.debug.assert_any_call( + "Extraction succeeded for client identity: John Doe" + ) + self.assertEqual(("John Doe", None), session_identity) + + @mock.patch("kmip.services.server.auth.SLUGSConnector") + def test_authenticate_against_slugs(self, mock_connector): + """ + Test that the session correctly handles authentication with SLUGS. + """ + mock_instance = mock.MagicMock() + mock_instance.authenticate.return_value = ("John Doe", ["Group A"]) + mock_connector.return_value = mock_instance + kmip_session = session.KmipSession( + None, + None, + ("127.0.0.1", 48026), + name='TestSession', + auth_settings=[( + "auth:slugs", + {"enabled": "True", "url": "test_url"} + )] + ) + kmip_session._logger = mock.MagicMock() + fake_credential = objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="John Doe", + password="secret" + ) + ) + fake_request = messages.RequestMessage( + request_header=messages.RequestHeader( + authentication=contents.Authentication( + credentials=[fake_credential] + ) + ) + ) + + result = kmip_session.authenticate( + "fake_certificate", + fake_request + ) + + mock_connector.assert_any_call("test_url") + kmip_session._logger.debug.assert_any_call( + "Authenticating with plugin: auth:slugs" + ) + mock_instance.authenticate.assert_any_call( + "fake_certificate", + (("127.0.0.1", 48026), kmip_session._session_time), + fake_request.request_header.authentication.credentials + ) + kmip_session._logger.debug( + "Authentication succeeded for client identity: John Doe" + ) + self.assertEqual(2, len(result)) + self.assertEqual("John Doe", result[0]) + self.assertEqual(["Group A"], result[1]) + + @mock.patch("kmip.services.server.auth.SLUGSConnector") + def test_authenticate_against_slugs_with_failure(self, mock_connector): + """ + Test that the session correctly handles a SLUGS authentication error. + """ + mock_instance = mock.MagicMock() + test_exception = exceptions.PermissionDenied( + "Unrecognized user ID: John Doe" + ) + mock_instance.authenticate.side_effect = test_exception + mock_connector.return_value = mock_instance + kmip_session = session.KmipSession( + None, + None, + ("127.0.0.1", 48026), + name='TestSession', + auth_settings=[( + "auth:slugs", + {"enabled": "True", "url": "test_url"} + )] + ) + kmip_session._logger = mock.MagicMock() + fake_credential = objects.Credential( + credential_type=enums.CredentialType.USERNAME_AND_PASSWORD, + credential_value=objects.UsernamePasswordCredential( + username="John Doe", + password="secret" + ) + ) + fake_request = messages.RequestMessage( + request_header=messages.RequestHeader( + authentication=contents.Authentication( + credentials=[fake_credential] + ) + ) + ) + + args = ("fake_certificate", fake_request) + self.assertRaisesRegexp( + exceptions.PermissionDenied, + "Authentication failed.", + kmip_session.authenticate, + *args + ) + + mock_connector.assert_any_call("test_url") + kmip_session._logger.debug.assert_any_call( + "Authenticating with plugin: auth:slugs" + ) + kmip_session._logger.warning.assert_any_call("Authentication failed.") + kmip_session._logger.exception.assert_any_call(test_exception) + + def test_authenticate_against_unrecognized_plugin(self): + """ + Test that the session correctly handles an unrecognized plugin + configuration. + """ + kmip_session = session.KmipSession( + None, + None, + None, + name='TestSession', + auth_settings=[("auth:unrecognized", {})] + ) + kmip_session._logger = mock.MagicMock() + fake_request = messages.RequestMessage( + request_header=messages.RequestHeader() + ) + + args = ("fake_certificate", fake_request) + self.assertRaisesRegexp( + exceptions.PermissionDenied, + "Authentication failed.", + kmip_session.authenticate, + *args + ) + + kmip_session._logger.warning.assert_any_call( + "Authentication plugin 'auth:unrecognized' is not supported." + ) + def test_receive_request(self): """ Test that the session can correctly receive and parse a message @@ -494,7 +800,7 @@ content = b'\x00\x00\x00\x00\x00\x00\x00\x00' expected = utils.BytearrayStream((content)) - kmip_session = session.KmipSession(None, None, 'name') + kmip_session = session.KmipSession(None, None, None, 'name') kmip_session._receive_bytes = mock.MagicMock( side_effect=[content, b''] ) @@ -512,7 +818,7 @@ """ content = b'\x00\x00\x00\x00\x00\x00\x00\x00' - kmip_session = session.KmipSession(None, None, 'name') + kmip_session = session.KmipSession(None, None, None, 'name') kmip_session._connection = mock.MagicMock() kmip_session._connection.recv = mock.MagicMock( side_effect=[content, content] @@ -542,7 +848,7 @@ """ content = b'\x00\x00\x00\x00\x00\x00\x00\x00' - kmip_session = session.KmipSession(None, None, 'name') + kmip_session = session.KmipSession(None, None, None, 'name') kmip_session._connection = mock.MagicMock() kmip_session._connection.recv = mock.MagicMock( side_effect=[content, content, None] @@ -563,7 +869,7 @@ )) buffer_empty = utils.BytearrayStream() - kmip_session = session.KmipSession(None, None, 'name') + kmip_session = session.KmipSession(None, None, None, 'name') kmip_session._connection = mock.MagicMock() kmip_session._send_response(buffer_empty.buffer) diff -Nru python-pykmip-0.7.0/kmip/tests/unit/services/test_kmip_client.py python-pykmip-0.8.0/kmip/tests/unit/services/test_kmip_client.py --- python-pykmip-0.7.0/kmip/tests/unit/services/test_kmip_client.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/tests/unit/services/test_kmip_client.py 2018-04-17 20:03:33.000000000 +0000 @@ -49,10 +49,12 @@ from kmip.core.misc import ServerInformation from kmip.core.misc import VendorIdentification +from kmip.core import objects from kmip.core.objects import TemplateAttribute from kmip.core.objects import CommonTemplateAttribute from kmip.core.objects import PrivateKeyTemplateAttribute from kmip.core.objects import PublicKeyTemplateAttribute +from kmip.core import primitives from kmip.services.kmip_client import KMIPProxy @@ -64,8 +66,6 @@ from kmip.services.results import QueryResult from kmip.services.results import RekeyKeyPairResult -import kmip.core.utils as utils - import mock import os import socket @@ -96,6 +96,32 @@ def tearDown(self): super(TestKMIPClient, self).tearDown() + def test_init_with_invalid_config_file_value(self): + """ + Test that the right error is raised when an invalid configuration file + value is provided to the client. + """ + kwargs = {'config_file': 1} + self.assertRaisesRegexp( + ValueError, + "The client configuration file argument must be a string.", + KMIPProxy, + **kwargs + ) + + def test_init_with_invalid_config_file_path(self): + """ + Test that the right error is raised when an invalid configuration file + path is provided to the client. + """ + kwargs = {'config_file': 'invalid'} + self.assertRaisesRegexp( + ValueError, + "The client configuration file 'invalid' does not exist.", + KMIPProxy, + **kwargs + ) + def test_close(self): """ Test that calling close on the client works as expected. @@ -144,31 +170,17 @@ def test_build_credential(self): username = 'username' password = 'password' - cred_type = CredentialType.USERNAME_AND_PASSWORD self.client.username = username self.client.password = password credential = self.client._build_credential() - message = utils.build_er_error(credential.__class__, 'type', - cred_type, - credential.credential_type.value, - 'value') - self.assertEqual(CredentialType.USERNAME_AND_PASSWORD, - credential.credential_type.value, - message) - - message = utils.build_er_error( - credential.__class__, 'type', username, - credential.credential_value.username.value, 'value') - self.assertEqual(username, credential.credential_value.username.value, - message) - - message = utils.build_er_error( - credential.__class__, 'type', password, - credential.credential_value.password.value, 'value') - self.assertEqual(password, credential.credential_value.password.value, - message) + self.assertEqual( + CredentialType.USERNAME_AND_PASSWORD, + credential.credential_type + ) + self.assertEqual(username, credential.credential_value.username) + self.assertEqual(password, credential.credential_value.password) def test_build_credential_no_username(self): username = None @@ -385,7 +397,7 @@ self.assertEqual(protocol_versions, observed, msg) def test_build_discover_versions_batch_item_with_input(self): - protocol_versions = [ProtocolVersion.create(1, 0)] + protocol_versions = [ProtocolVersion(1, 0)] self._test_build_discover_versions_batch_item(protocol_versions) def test_build_discover_versions_batch_item_no_input(self): @@ -612,7 +624,7 @@ self.assertEqual(protocol_versions, result.protocol_versions, msg) def test_process_discover_versions_batch_item_with_results(self): - protocol_versions = [ProtocolVersion.create(1, 0)] + protocol_versions = [ProtocolVersion(1, 0)] self._test_process_discover_versions_batch_item(protocol_versions) def test_process_discover_versions_batch_item_no_results(self): @@ -660,13 +672,21 @@ host_list_string = '127.0.0.1,127.0.0.3, 127.0.0.5' host_list_expected = ['127.0.0.1', '127.0.0.3', '127.0.0.5'] - self.client._set_variables(host=host_list_string, - port=None, keyfile=None, certfile=None, - cert_reqs=None, ssl_version=None, - ca_certs=None, - do_handshake_on_connect=False, - suppress_ragged_eofs=None, username=None, - password=None, timeout=None) + self.client._set_variables( + host=host_list_string, + port=None, + keyfile=None, + certfile=None, + cert_reqs=None, + ssl_version=None, + ca_certs=None, + do_handshake_on_connect=False, + suppress_ragged_eofs=None, + username=None, + password=None, + timeout=None, + config_file=None + ) self.assertEqual(host_list_expected, self.client.host_list) def test_host_is_invalid_input(self): @@ -725,6 +745,159 @@ @mock.patch( 'kmip.services.kmip_client.KMIPProxy._build_request_message' + ) + @mock.patch( + 'kmip.services.kmip_client.KMIPProxy._send_and_receive_message' + ) + def test_check(self, send_mock, build_mock): + """ + Test that the client can correctly build, send, and process a Check + request. + """ + payload = payloads.CheckResponsePayload( + unique_identifier='1', + usage_limits_count=100, + cryptographic_usage_mask=12, + lease_time=10000 + ) + batch_item = ResponseBatchItem( + operation=Operation(OperationEnum.CHECK), + result_status=ResultStatus(ResultStatusEnum.SUCCESS), + response_payload=payload + ) + response = ResponseMessage(batch_items=[batch_item]) + + build_mock.return_value = None + send_mock.return_value = response + + result = self.client.check( + '1', + 100, + [ + enums.CryptographicUsageMask.ENCRYPT, + enums.CryptographicUsageMask.DECRYPT + ], + 10000 + ) + + self.assertEqual('1', result.get('unique_identifier')) + self.assertEqual(100, result.get('usage_limits_count')) + self.assertEqual( + [ + enums.CryptographicUsageMask.ENCRYPT, + enums.CryptographicUsageMask.DECRYPT + ], + result.get('cryptographic_usage_mask') + ) + self.assertEqual(10000, result.get('lease_time')) + self.assertEqual( + ResultStatusEnum.SUCCESS, + result.get('result_status') + ) + self.assertEqual(None, result.get('result_reason')) + self.assertEqual(None, result.get('result_message')) + + @mock.patch( + 'kmip.services.kmip_client.KMIPProxy._build_request_message' + ) + @mock.patch( + 'kmip.services.kmip_client.KMIPProxy._send_and_receive_message' + ) + def test_rekey(self, send_mock, build_mock): + """ + Test that the client can correctly build, send, and process a Rekey + request. + """ + payload = payloads.RekeyResponsePayload( + unique_identifier='1', + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Algorithm' + ), + attribute_value=primitives.Enumeration( + enums.CryptographicAlgorithm, + value=enums.CryptographicAlgorithm.AES, + tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM + ) + ), + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Length' + ), + attribute_value=primitives.Integer( + value=128, + tag=enums.Tags.CRYPTOGRAPHIC_LENGTH + ) + ) + ] + ) + ) + batch_item = ResponseBatchItem( + operation=Operation(OperationEnum.REKEY), + result_status=ResultStatus(ResultStatusEnum.SUCCESS), + response_payload=payload + ) + response = ResponseMessage(batch_items=[batch_item]) + + build_mock.return_value = None + send_mock.return_value = response + + result = self.client.rekey( + uuid='1', + offset=0, + template_attribute=objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Activation Date' + ), + attribute_value=primitives.DateTime( + value=1136113200, + tag=enums.Tags.ACTIVATION_DATE + ) + ) + ] + ) + ) + + self.assertEqual('1', result.get('unique_identifier')) + self.assertEqual( + objects.TemplateAttribute( + attributes=[ + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Algorithm' + ), + attribute_value=primitives.Enumeration( + enums.CryptographicAlgorithm, + value=enums.CryptographicAlgorithm.AES, + tag=enums.Tags.CRYPTOGRAPHIC_ALGORITHM + ) + ), + objects.Attribute( + attribute_name=objects.Attribute.AttributeName( + 'Cryptographic Length' + ), + attribute_value=primitives.Integer( + value=128, + tag=enums.Tags.CRYPTOGRAPHIC_LENGTH + ) + ) + ] + ), + result.get('template_attribute') + ) + self.assertEqual( + ResultStatusEnum.SUCCESS, + result.get('result_status') + ) + self.assertEqual(None, result.get('result_reason')) + self.assertEqual(None, result.get('result_message')) + + @mock.patch( + 'kmip.services.kmip_client.KMIPProxy._build_request_message' ) @mock.patch( 'kmip.services.kmip_client.KMIPProxy._send_and_receive_message' diff -Nru python-pykmip-0.7.0/kmip/version.py python-pykmip-0.8.0/kmip/version.py --- python-pykmip-0.7.0/kmip/version.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/kmip/version.py 2018-05-17 22:30:38.000000000 +0000 @@ -13,4 +13,4 @@ # License for the specific language governing permissions and limitations # under the License. -__version__ = '0.7.0' +__version__ = '0.8.0' diff -Nru python-pykmip-0.7.0/LICENSE.txt python-pykmip-0.8.0/LICENSE.txt --- python-pykmip-0.7.0/LICENSE.txt 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/LICENSE.txt 2018-02-14 22:02:52.000000000 +0000 @@ -172,30 +172,3 @@ defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2013, Rackspace (http://www.rackspace.com) - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff -Nru python-pykmip-0.7.0/PKG-INFO python-pykmip-0.8.0/PKG-INFO --- python-pykmip-0.7.0/PKG-INFO 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/PKG-INFO 2018-05-17 23:17:08.000000000 +0000 @@ -0,0 +1,24 @@ +Metadata-Version: 1.1 +Name: PyKMIP +Version: 0.8.0 +Summary: KMIP v1.1 library +Home-page: https://github.com/OpenKMIP/PyKMIP +Author: Peter Hamilton +Author-email: peter.hamilton@jhuapl.edu +License: Apache License, Version 2.0 +Description-Content-Type: UNKNOWN +Description: UNKNOWN +Keywords: KMIP +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Natural Language :: English +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 diff -Nru python-pykmip-0.7.0/PyKMIP.egg-info/dependency_links.txt python-pykmip-0.8.0/PyKMIP.egg-info/dependency_links.txt --- python-pykmip-0.7.0/PyKMIP.egg-info/dependency_links.txt 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/PyKMIP.egg-info/dependency_links.txt 2018-05-17 23:17:05.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru python-pykmip-0.7.0/PyKMIP.egg-info/entry_points.txt python-pykmip-0.8.0/PyKMIP.egg-info/entry_points.txt --- python-pykmip-0.7.0/PyKMIP.egg-info/entry_points.txt 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/PyKMIP.egg-info/entry_points.txt 2018-05-17 23:17:05.000000000 +0000 @@ -0,0 +1,3 @@ +[console_scripts] +pykmip-server = kmip.services.server.server:main + diff -Nru python-pykmip-0.7.0/PyKMIP.egg-info/PKG-INFO python-pykmip-0.8.0/PyKMIP.egg-info/PKG-INFO --- python-pykmip-0.7.0/PyKMIP.egg-info/PKG-INFO 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/PyKMIP.egg-info/PKG-INFO 2018-05-17 23:17:05.000000000 +0000 @@ -0,0 +1,24 @@ +Metadata-Version: 1.1 +Name: PyKMIP +Version: 0.8.0 +Summary: KMIP v1.1 library +Home-page: https://github.com/OpenKMIP/PyKMIP +Author: Peter Hamilton +Author-email: peter.hamilton@jhuapl.edu +License: Apache License, Version 2.0 +Description-Content-Type: UNKNOWN +Description: UNKNOWN +Keywords: KMIP +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Natural Language :: English +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 diff -Nru python-pykmip-0.7.0/PyKMIP.egg-info/requires.txt python-pykmip-0.8.0/PyKMIP.egg-info/requires.txt --- python-pykmip-0.7.0/PyKMIP.egg-info/requires.txt 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/PyKMIP.egg-info/requires.txt 2018-05-17 23:17:05.000000000 +0000 @@ -0,0 +1,4 @@ +cryptography +enum34 +six +sqlalchemy diff -Nru python-pykmip-0.7.0/PyKMIP.egg-info/SOURCES.txt python-pykmip-0.8.0/PyKMIP.egg-info/SOURCES.txt --- python-pykmip-0.7.0/PyKMIP.egg-info/SOURCES.txt 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/PyKMIP.egg-info/SOURCES.txt 2018-05-17 23:17:07.000000000 +0000 @@ -0,0 +1,236 @@ +LICENSE.txt +MANIFEST.in +README.rst +setup.py +PyKMIP.egg-info/PKG-INFO +PyKMIP.egg-info/SOURCES.txt +PyKMIP.egg-info/dependency_links.txt +PyKMIP.egg-info/entry_points.txt +PyKMIP.egg-info/requires.txt +PyKMIP.egg-info/top_level.txt +kmip/__init__.py +kmip/version.py +kmip/core/__init__.py +kmip/core/attributes.py +kmip/core/config_helper.py +kmip/core/enums.py +kmip/core/exceptions.py +kmip/core/misc.py +kmip/core/objects.py +kmip/core/policy.py +kmip/core/primitives.py +kmip/core/secrets.py +kmip/core/utils.py +kmip/core/factories/__init__.py +kmip/core/factories/attribute_values.py +kmip/core/factories/attributes.py +kmip/core/factories/credentials.py +kmip/core/factories/secrets.py +kmip/core/factories/payloads/__init__.py +kmip/core/factories/payloads/request.py +kmip/core/factories/payloads/response.py +kmip/core/messages/__init__.py +kmip/core/messages/contents.py +kmip/core/messages/messages.py +kmip/core/messages/payloads/__init__.py +kmip/core/messages/payloads/activate.py +kmip/core/messages/payloads/archive.py +kmip/core/messages/payloads/cancel.py +kmip/core/messages/payloads/check.py +kmip/core/messages/payloads/create.py +kmip/core/messages/payloads/create_key_pair.py +kmip/core/messages/payloads/decrypt.py +kmip/core/messages/payloads/derive_key.py +kmip/core/messages/payloads/destroy.py +kmip/core/messages/payloads/discover_versions.py +kmip/core/messages/payloads/encrypt.py +kmip/core/messages/payloads/get.py +kmip/core/messages/payloads/get_attribute_list.py +kmip/core/messages/payloads/get_attributes.py +kmip/core/messages/payloads/get_usage_allocation.py +kmip/core/messages/payloads/locate.py +kmip/core/messages/payloads/mac.py +kmip/core/messages/payloads/obtain_lease.py +kmip/core/messages/payloads/poll.py +kmip/core/messages/payloads/query.py +kmip/core/messages/payloads/recover.py +kmip/core/messages/payloads/register.py +kmip/core/messages/payloads/rekey.py +kmip/core/messages/payloads/rekey_key_pair.py +kmip/core/messages/payloads/revoke.py +kmip/core/messages/payloads/sign.py +kmip/core/messages/payloads/signature_verify.py +kmip/demos/__init__.py +kmip/demos/utils.py +kmip/demos/certs/server.crt +kmip/demos/certs/server.key +kmip/demos/pie/__init__.py +kmip/demos/pie/create.py +kmip/demos/pie/create_key_pair.py +kmip/demos/pie/decrypt.py +kmip/demos/pie/derive_key.py +kmip/demos/pie/destroy.py +kmip/demos/pie/encrypt.py +kmip/demos/pie/get.py +kmip/demos/pie/get_attribute_list.py +kmip/demos/pie/locate.py +kmip/demos/pie/mac.py +kmip/demos/pie/register_certificate.py +kmip/demos/pie/register_opaque_object.py +kmip/demos/pie/register_private_key.py +kmip/demos/pie/register_public_key.py +kmip/demos/pie/register_secret_data.py +kmip/demos/pie/register_symmetric_key.py +kmip/demos/pie/sign.py +kmip/demos/pie/signature_verify.py +kmip/demos/units/__init__.py +kmip/demos/units/activate.py +kmip/demos/units/create.py +kmip/demos/units/create_key_pair.py +kmip/demos/units/destroy.py +kmip/demos/units/discover_versions.py +kmip/demos/units/get.py +kmip/demos/units/locate.py +kmip/demos/units/query.py +kmip/demos/units/register.py +kmip/demos/units/revoke.py +kmip/pie/__init__.py +kmip/pie/client.py +kmip/pie/exceptions.py +kmip/pie/factory.py +kmip/pie/objects.py +kmip/pie/sqltypes.py +kmip/services/__init__.py +kmip/services/auth.py +kmip/services/kmip_client.py +kmip/services/results.py +kmip/services/server/__init__.py +kmip/services/server/config.py +kmip/services/server/engine.py +kmip/services/server/kmip_protocol.py +kmip/services/server/monitor.py +kmip/services/server/policy.py +kmip/services/server/server.py +kmip/services/server/session.py +kmip/services/server/auth/__init__.py +kmip/services/server/auth/api.py +kmip/services/server/auth/slugs.py +kmip/services/server/auth/utils.py +kmip/services/server/crypto/__init__.py +kmip/services/server/crypto/api.py +kmip/services/server/crypto/engine.py +kmip/tests/__init__.py +kmip/tests/functional/__init__.py +kmip/tests/functional/conftest.py +kmip/tests/functional/services/__init__.py +kmip/tests/functional/services/test_authentication.py +kmip/tests/integration/__init__.py +kmip/tests/integration/conftest.py +kmip/tests/integration/services/__init__.py +kmip/tests/integration/services/test_integration.py +kmip/tests/integration/services/test_kmip_client.py +kmip/tests/integration/services/test_proxykmipclient.py +kmip/tests/unit/__init__.py +kmip/tests/unit/test_kmip.py +kmip/tests/unit/core/__init__.py +kmip/tests/unit/core/test_config_helper.py +kmip/tests/unit/core/test_policy.py +kmip/tests/unit/core/test_utils.py +kmip/tests/unit/core/attributes/__init__.py +kmip/tests/unit/core/attributes/test_application_specific_information.py +kmip/tests/unit/core/attributes/test_attributes.py +kmip/tests/unit/core/attributes/test_digest.py +kmip/tests/unit/core/factories/__init__.py +kmip/tests/unit/core/factories/test_attribute.py +kmip/tests/unit/core/factories/test_attribute_values.py +kmip/tests/unit/core/factories/payloads/__init__.py +kmip/tests/unit/core/factories/payloads/test_payload.py +kmip/tests/unit/core/factories/payloads/test_request.py +kmip/tests/unit/core/factories/payloads/test_response.py +kmip/tests/unit/core/messages/__init__.py +kmip/tests/unit/core/messages/test_messages.py +kmip/tests/unit/core/messages/test_operations.py +kmip/tests/unit/core/messages/contents/__init__.py +kmip/tests/unit/core/messages/contents/test_authentication.py +kmip/tests/unit/core/messages/contents/test_protocol_version.py +kmip/tests/unit/core/messages/payloads/__init__.py +kmip/tests/unit/core/messages/payloads/test_activate.py +kmip/tests/unit/core/messages/payloads/test_archive.py +kmip/tests/unit/core/messages/payloads/test_cancel.py +kmip/tests/unit/core/messages/payloads/test_check.py +kmip/tests/unit/core/messages/payloads/test_create.py +kmip/tests/unit/core/messages/payloads/test_create_key_pair.py +kmip/tests/unit/core/messages/payloads/test_decrypt.py +kmip/tests/unit/core/messages/payloads/test_derive_key.py +kmip/tests/unit/core/messages/payloads/test_destroy.py +kmip/tests/unit/core/messages/payloads/test_discover_versions.py +kmip/tests/unit/core/messages/payloads/test_encrypt.py +kmip/tests/unit/core/messages/payloads/test_get.py +kmip/tests/unit/core/messages/payloads/test_get_attribute_list.py +kmip/tests/unit/core/messages/payloads/test_get_attributes.py +kmip/tests/unit/core/messages/payloads/test_get_usage_allocation.py +kmip/tests/unit/core/messages/payloads/test_locate.py +kmip/tests/unit/core/messages/payloads/test_mac.py +kmip/tests/unit/core/messages/payloads/test_obtain_lease.py +kmip/tests/unit/core/messages/payloads/test_poll.py +kmip/tests/unit/core/messages/payloads/test_query.py +kmip/tests/unit/core/messages/payloads/test_recover.py +kmip/tests/unit/core/messages/payloads/test_register.py +kmip/tests/unit/core/messages/payloads/test_rekey.py +kmip/tests/unit/core/messages/payloads/test_rekey_key_pair.py +kmip/tests/unit/core/messages/payloads/test_revoke.py +kmip/tests/unit/core/messages/payloads/test_sign.py +kmip/tests/unit/core/messages/payloads/test_signature_verify.py +kmip/tests/unit/core/misc/__init__.py +kmip/tests/unit/core/misc/test_misc.py +kmip/tests/unit/core/misc/test_server_information.py +kmip/tests/unit/core/objects/__init__.py +kmip/tests/unit/core/objects/test_attribute.py +kmip/tests/unit/core/objects/test_credentials.py +kmip/tests/unit/core/objects/test_extension_information.py +kmip/tests/unit/core/objects/test_objects.py +kmip/tests/unit/core/primitives/__init__.py +kmip/tests/unit/core/primitives/test_base.py +kmip/tests/unit/core/primitives/test_big_integer.py +kmip/tests/unit/core/primitives/test_boolean.py +kmip/tests/unit/core/primitives/test_byte_string.py +kmip/tests/unit/core/primitives/test_date_time.py +kmip/tests/unit/core/primitives/test_enumeration.py +kmip/tests/unit/core/primitives/test_integer.py +kmip/tests/unit/core/primitives/test_interval.py +kmip/tests/unit/core/primitives/test_long_integer.py +kmip/tests/unit/core/primitives/test_text_string.py +kmip/tests/unit/core/secrets/__init__.py +kmip/tests/unit/core/secrets/test_certificate.py +kmip/tests/unit/pie/__init__.py +kmip/tests/unit/pie/test_client.py +kmip/tests/unit/pie/test_exceptions.py +kmip/tests/unit/pie/test_factory.py +kmip/tests/unit/pie/objects/__init__.py +kmip/tests/unit/pie/objects/test_certificate.py +kmip/tests/unit/pie/objects/test_cryptographic_object.py +kmip/tests/unit/pie/objects/test_key.py +kmip/tests/unit/pie/objects/test_managed_object.py +kmip/tests/unit/pie/objects/test_opaque_object.py +kmip/tests/unit/pie/objects/test_private_key.py +kmip/tests/unit/pie/objects/test_public_key.py +kmip/tests/unit/pie/objects/test_secret_data.py +kmip/tests/unit/pie/objects/test_sqltypes.py +kmip/tests/unit/pie/objects/test_symmetric_key.py +kmip/tests/unit/pie/objects/test_x509_certificate.py +kmip/tests/unit/services/__init__.py +kmip/tests/unit/services/test_auth.py +kmip/tests/unit/services/test_kmip_client.py +kmip/tests/unit/services/test_kmip_protocol.py +kmip/tests/unit/services/server/__init__.py +kmip/tests/unit/services/server/test_config.py +kmip/tests/unit/services/server/test_engine.py +kmip/tests/unit/services/server/test_monitor.py +kmip/tests/unit/services/server/test_policy.py +kmip/tests/unit/services/server/test_server.py +kmip/tests/unit/services/server/test_session.py +kmip/tests/unit/services/server/auth/__init__.py +kmip/tests/unit/services/server/auth/test_slugs.py +kmip/tests/unit/services/server/auth/test_utils.py +kmip/tests/unit/services/server/crypto/__init__.py +kmip/tests/unit/services/server/crypto/test_engine.py \ No newline at end of file diff -Nru python-pykmip-0.7.0/PyKMIP.egg-info/top_level.txt python-pykmip-0.8.0/PyKMIP.egg-info/top_level.txt --- python-pykmip-0.7.0/PyKMIP.egg-info/top_level.txt 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/PyKMIP.egg-info/top_level.txt 2018-05-17 23:17:05.000000000 +0000 @@ -0,0 +1 @@ +kmip diff -Nru python-pykmip-0.7.0/pytest.ini python-pykmip-0.8.0/pytest.ini --- python-pykmip-0.7.0/pytest.ini 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/pytest.ini 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -[pytest] -markers = - ignore: skip the given test object diff -Nru python-pykmip-0.7.0/README.rst python-pykmip-0.8.0/README.rst --- python-pykmip-0.7.0/README.rst 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/README.rst 2017-12-08 17:36:18.000000000 +0000 @@ -7,367 +7,52 @@ |python-versions| PyKMIP is a Python implementation of the Key Management Interoperability -Protocol (KMIP). KMIP is a client/server communication protocol for the -storage and maintenance of key, certificate, and secret objects. The standard -is governed by the `Organization for the Advancement of Structured Information -Standards`_ (OASIS). PyKMIP supports a subset of features in versions -1.0 - 1.2 of the KMIP specification. - -For a high-level overview of KMIP, check out the `KMIP Wikipedia page`_. For -comprehensive documentation from OASIS and information about the KMIP -community, visit the `KMIP Technical Committee home page`_. - -.. _Usage: - -Usage -===== -Client ------- -There are two implementations of the KMIP client. The first, -``kmip.services.kmip_client.KMIPProxy``, is the original client and provides -support for the following operations: - -* ``Create`` -* ``CreateKeyPair`` -* ``Register`` -* ``Locate`` -* ``Get`` -* ``GetAttributes`` -* ``GetAttributeList`` -* ``Activate`` -* ``Revoke`` -* ``Destroy`` -* ``Query`` -* ``DiscoverVersions`` - -The second client, ``kmip.pie.client.ProxyKmipClient``, wraps the original -``KMIPProxy`` and provides a simpler interface. It provides support for the -following operations: - -* ``Create`` -* ``CreateKeyPair`` -* ``Register`` -* ``Get`` -* ``GetAttributes`` -* ``GetAttributeList`` -* ``Destroy`` - -For examples of how to create and use the different clients, see the scripts -in ``kmip/demos``. - -Configuration -************* -A KMIP client can be configured in different ways to connect to a KMIP server. -The first method is the default approach, which uses settings found in the -PyKMIP configuration file. The configuration file can be stored in several -different locations, including: - -* ``/.pykmip/pykmip.conf`` -* ``/etc/pykmip/pykmip.conf`` -* ``/kmip/pykmip.conf`` -* ``/kmip/kmipconfig.ini`` - -These locations are searched in order. For example, configuration data found -in ``/etc`` will take priority over configuration information found in the -PyKMIP installation directory. The ``kmipconfig.ini`` file name is supported -for legacy installations. Users can specify the connection configuration -settings to use on client instantiation, allowing applications to support -multiple key storage backends simultaneously, one client per backend. - -An example client configuration settings block is shown below:: - - [client] - host=127.0.0.1 - port=5696 - keyfile=/path/to/key/file - certfile=/path/to/cert/file - cert_reqs=CERT_REQUIRED - ssl_version=PROTOCOL_SSLv23 - ca_certs=/path/to/ca/cert/file - do_handshake_on_connect=True - suppress_ragged_eofs=True - username=user - password=password - -The second configuration approach allows developers to specify the -configuration settings when creating the client at run time. The following -example demonstrates how to create the ``ProxyKmipClient``, directly -specifying the different configuration values:: - - client = ProxyKmipClient( - hostname='127.0.0.1', - port=5696, - cert='/path/to/cert/file/', - key='/path/to/key/file/', - ca='/path/to/ca/cert/file/', - ssl_version='PROTOCOL_SSLv23', - username='user', - password='password', - config='client' - ) - -A KMIP client will load the configuration settings found in the ``client`` -settings block by default. Settings specified at runtime, as in the above -example, will take precedence over the default values found in the -configuration file. - -Many of these settings correspond to the settings for ``ssl.wrap_socket``, -which is used to establish secure connections to KMIP backends. For more -information, check out the `Python SSL library documentation`_. - -Server ------- -In addition to the KMIP clients, PyKMIP provides a basic software -implementation of a KMIP server, ``kmip.services.server.KmipServer``. -However, the server is intended for use only in testing and demonstration -environments. The server is **not** intended to be a substitute for a secure, -hardware-based key management appliance. The PyKMIP client should be used for -operational purposes **only** with a hardware-based KMIP server. - -The KMIP server provides support for the following operations: - -* ``Create`` -* ``CreateKeyPair`` -* ``Register`` -* ``Get`` -* ``GetAttributes`` -* ``Activate`` -* ``Destroy`` -* ``Query`` -* ``DiscoverVersions`` - -Configuration -************* -The PyKMIP software server can be configured via configuration file, by -default located at ``/etc/pykmip/server.conf``. An example server -configuration settings block, as found in the configuration file, is shown -below:: - - [server] - hostname=127.0.0.1 - port=5696 - certificate_path=/path/to/certificate/file - key_path=/path/to/certificate/key/file - ca_path=/path/to/ca/certificate/file - auth_suite=Basic - policy_path=/path/to/policy/file - -The server can also be configured manually. The following example shows how -to create the ``KmipServer`` in Python code, directly specifying the -different configuration values:: - - server = KmipServer( - hostname='127.0.0.1', - port=5696, - certificate_path='/path/to/certificate/file/', - key_path='/path/to/certificate/key/file/', - ca_path='/path/to/ca/certificate/file/', - auth_suite='Basic', - config_path='/etc/pykmip/server.conf', - log_path='/var/log/pykmip/server.log', - policy_path='/etc/pykmip/policies' - ) - -**NOTE:** The ``kmip_server.KMIPServer`` implementation of the software -server is deprecated and will be removed in a future version of PyKMIP. - -The different configuration options are defined below: - -* ``hostname`` - A string representing either a hostname in Internet domain notation or an - IPv4 address. -* ``port`` - An integer representing a port number. Recommended to be ``5696`` - according to the KMIP specification. -* ``certificate_path`` - A string representing a path to a PEM-encoded server certificate file. For - more information, see the `Python SSL library documentation`_. -* ``key_path`` - A string representing a path to a PEM-encoded server certificate key file. - The private key contained in the file must correspond to the certificate - pointed to by ``certificate_path``. For more information, see the - `Python SSL library documentation`_. -* ``ca_path`` - A string representing a path to a PEM-encoded certificate authority - certificate file. If using a self-signed certificate, the ``ca_path`` and - the ``certificate_path`` should be identical. For more information, see - the `Python SSL library documentation`_. -* ``auth_suite`` - A string representing the type of authentication suite to use when - establishing TLS connections. Acceptable values are ``Basic`` and - ``TLS1.2``. - **Note:** ``TLS1.2`` can only be used with versions of Python that support - TLS 1.2 (e.g,. Python 2.7.9+ or Python 3.4+). If you are running on an - older version of Python, you will only be able to use basic TLS 1.0 - authentication. For more information, see the - `Python SSL library documentation`_ and the - `Key Management Interoperability Protocol Profiles Version 1.1`_ - documentation. -* ``config_path`` - A string representing a path to a server configuration file, as shown - above. Only set via the ``KmipServer`` constructor. Defaults to - ``/etc/pykmip/server.conf``. -* ``log_path`` - A string representing a path to a log file. The server will set up a - rotating file logger on this file. Only set via the ``KmipServer`` - constructor. Defaults to ``/var/log/pykmip/server.log``. -* ``policy_path`` - A string representing a path to the filesystem directory containing - PyKMIP server operation policy JSON files. - -**NOTE:** When installing PyKMIP and deploying the KMIP software server, you -must manually set up the server configuration file. It **will not** be placed -in ``/etc/pykmip`` automatically. - -Usage -***** -The software server can be run using the ``bin/run_server.py`` startup script. -If you are currently in the PyKMIP root directory, use the following command:: - - $ python bin/run_server.py - -If you need more information about running the startup script, pass ``-h`` -to it:: - - $ python bin/run_server.py -h - -**NOTE:** You may need to run the server as root, depending on the -permissions of the configuration, log, and certificate file directories. - -If PyKMIP is installed and you are able to ``import kmip`` in Python, you can -copy the startup script and run it from any directory you choose. - -Identity & Ownership -******************** -The software server determines client identity using the client's TLS -certificate. Specifically, the common name of the certificate subject is used -as the client ID. Additionally, the client certificate must have an extended -key usage extension marked for client authentication. If this extension is -not included in the client certificate and/or the client does not define a -subject and common name, the server will fail to establish a client session. -For more information on certificates and their use in authentication, see -`RFC 5280`_. - -The client identity described above is used to anchor object ownership. -Object ownership and access is governed by an object's operation policy, -defined on object creation. By default the KMIP specification defines two -operation policies, a ``default`` policy covering all objects and a -``public`` policy applied only to ``Template`` objects. - -For example, if user A creates a symmetric key, user B will only be able -to retrieve that key if the key's operation policy indicates that the -key is accessible to all users. If the operation policy specifies that -the key is only available to the owner, only user A will be able to access -it. - -Users can create their own operation policies by placing operation policy -JSON files in the policy directory pointed to by the ``policy_path`` -configuration option. The server will load all policies from that directory -upon start up, allowing users to use those policies for their objects. A -template for the operation policy JSON file can be found under ``examples``. -Note that the ``default`` and ``public`` policies are reserved and cannot -be redefined by a user's policy. - -Profiles -======== -The KMIP standard includes various profiles that tailor the standard for -specific use cases (e.g., symmetric key storage with TLS 1.2). These profiles -specify conformance to certain operations and attributes. - -The PyKMIP ``KMIPProxy`` client provides full support for the following -profile(s): - -* Basic Discover Versions Client KMIP Profile - -Development -=========== -Roadmap -------- -The development plan for PyKMIP follows the requirements for the following -KMIP profiles. The foundation for symmetric and asymmetric key operation -support is already built into the library. - -Client profiles: - -* Basic Baseline Client KMIP Profile -* Basic Symmetric Key Store Client KMIP Profile -* Basic Symmetric Key Foundry Client KMIP Profile -* Basic Asymmetric Key Store Client KMIP Profile -* Basic Asymmetric Key Foundry Client KMIP Profile - -Server profiles: - -* Basic Discover Versions Server KMIP Profile -* Basic Baseline Server KMIP Profile -* Basic Symmetric Key Store and Server KMIP Profile -* Basic Symmetric Key Foundry and Server KMIP Profile -* Basic Asymmetric Key Store Server KMIP Profile -* Basic Asymmetric Key Foundry and Server KMIP Profile - -Testing -------- -The PyKMIP test suite is composed of two parts, a unit test suite and an -integration test suite that runs various tests against instantiations of the -software KMIP server and real KMIP appliances. The tests are managed by a -combination of the ``tox``, ``pytest``, and ``flake8`` libraries. - -There are several ways to run different versions of the tests. To run, use one -of the following commands in the PyKMIP root directory. - -To run all of the unit tests:: - - $ tox - -To run the Python syntax and format compliance tests:: - - $ tox -e pep8 - -To run the unit test suite against Python 2.7:: - - $ tox -e py27 - -The integration tests require a configuration flag whose value corresponds to -the name of a client configuration section in the ``pykmip.conf`` -configuration file. See the Usage_ section for more information. - -To run the integration test suite with a specific configuration setup:: - - $ tox -e integration -- --config - -For more information and a list of supported ``tox`` environments, see -``tox.ini`` in the PyKMIP root directory. - -Platforms -========= -PyKMIP has been tested and runs on the following platform(s): - -* Ubuntu: 12.04 LTS, 14.04 LTS, 16.04 LTS - -PyKMIP is supported by Python 2.7 and 3.3 - 3.6. - -References -========== -The source code for PyKMIP is hosted on GitHub and the library is available -for installation from the Python Package Index (PyPI): - -* `PyKMIP on GitHub `_ -* `PyKMIP on PyPI `_ - -For more information on KMIP version 1.1, see the following documentation: - -* `Key Management Interoperability Protocol Specification Version 1.1`_ -* `Key Management Interoperability Protocol Profiles Version 1.1`_ -* `Key Management Interoperability Protocol Test Cases Version 1.1`_ - -.. _code base: https://github.com/OpenKMIP/PyKMIP -.. _Organization for the Advancement of Structured Information Standards: https://www.oasis-open.org/ -.. _Key Management Interoperability Protocol Specification Version 1.1: http://docs.oasis-open.org/kmip/spec/v1.1/os/kmip-spec-v1.1-os.html -.. _Key Management Interoperability Protocol Profiles Version 1.1: http://docs.oasis-open.org/kmip/profiles/v1.1/os/kmip-profiles-v1.1-os.html -.. _Key Management Interoperability Protocol Test Cases Version 1.1: http://docs.oasis-open.org/kmip/testcases/v1.1/cn01/kmip-testcases-v1.1-cn01.html -.. _Python SSL library documentation: https://docs.python.org/dev/library/ssl.html#socket-creation -.. _KMIP Wikipedia page: https://en.wikipedia.org/wiki/Key_Management_Interoperability_Protocol -.. _KMIP Technical Committee home page: https://www.oasis-open.org/committees/tc_home.php?wg_abbrev=kmip -.. _RFC 5280: https://tools.ietf.org/html/rfc5280 +Protocol (KMIP), an `OASIS`_ communication standard for the management of +objects stored and maintained by key management systems. KMIP defines how key +management operations and operation data should be encoded and communicated +between client and server applications. Supported operations include the full +`CRUD`_ key management lifecycle, including operations for managing object +metadata and for conducting cryptographic operations. Supported object types +include: + +* symmetric/asymmetric encryption keys +* passwords/passphrases +* certificates +* opaque data blobs, and more + +For more information on KMIP, check out the `OASIS KMIP Technical Committee`_ +and the `OASIS KMIP Documentation`_. + +For more information on PyKMIP, check out the project `Documentation`_. + +Installation +------------ +You can install PyKMIP via ``pip``: + +.. code-block:: console + + $ pip install pykmip + +See `Installation`_ for more information. + +Community +--------- +The PyKMIP community has various forums and resources you can use: + +* `Source code`_ +* `Issue tracker`_ +* IRC: ``#pykmip`` on ``irc.freenode.net`` +* Twitter: ``@pykmip`` + + +.. _`CRUD`: https://en.wikipedia.org/wiki/Create,_read,_update_and_delete +.. _`OASIS`: https://www.oasis-open.org +.. _`OASIS KMIP Technical Committee`: https://www.oasis-open.org/committees/tc_home.php?wg_abbrev=kmip +.. _`OASIS KMIP Documentation`: https://docs.oasis-open.org/kmip/spec/ +.. _`Documentation`: https://pykmip.readthedocs.io/en/latest/index.html +.. _`Installation`: https://pykmip.readthedocs.io/en/latest/installation.html +.. _`Source code`: https://github.com/openkmip/pykmip +.. _`Issue tracker`: https://github.com/openkmip/pykmip/issues .. |pypi-version| image:: https://img.shields.io/pypi/v/pykmip.svg :target: https://pypi.python.org/pypi/pykmip diff -Nru python-pykmip-0.7.0/requirements.txt python-pykmip-0.8.0/requirements.txt --- python-pykmip-0.7.0/requirements.txt 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/requirements.txt 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -cryptography>=1.3 -enum34 -six>=1.9.0 -sqlalchemy>=1.0 diff -Nru python-pykmip-0.7.0/setup.cfg python-pykmip-0.8.0/setup.cfg --- python-pykmip-0.7.0/setup.cfg 1970-01-01 00:00:00.000000000 +0000 +++ python-pykmip-0.8.0/setup.cfg 2018-05-17 23:17:08.000000000 +0000 @@ -0,0 +1,4 @@ +[egg_info] +tag_build = +tag_date = 0 + diff -Nru python-pykmip-0.7.0/setup.py python-pykmip-0.8.0/setup.py --- python-pykmip-0.7.0/setup.py 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/setup.py 2017-12-08 17:36:18.000000000 +0000 @@ -21,7 +21,7 @@ version_path = os.path.join(os.path.dirname( os.path.realpath(__file__)), 'kmip', 'version.py') with open(version_path, 'r') as version_file: - mo = re.search(r"^.*= '(\d\.\d\.\d)'$", version_file.read(), re.MULTILINE) + mo = re.search(r"^.*= '(\d\.\d\..*)'$", version_file.read(), re.MULTILINE) __version__ = mo.group(1) setuptools.setup( @@ -57,7 +57,6 @@ "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", diff -Nru python-pykmip-0.7.0/test-requirements.txt python-pykmip-0.8.0/test-requirements.txt --- python-pykmip-0.7.0/test-requirements.txt 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/test-requirements.txt 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -# TODO: add any test requirements here -coverage -pytest -flake8 -testtools -fixtures -testresources -mock -testscenarios -testrepository -sphinx -bandit diff -Nru python-pykmip-0.7.0/tox.ini python-pykmip-0.8.0/tox.ini --- python-pykmip-0.7.0/tox.ini 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/tox.ini 1970-01-01 00:00:00.000000000 +0000 @@ -1,36 +0,0 @@ -[tox] -envlist = pep8,py27,py33,py34,py35,py36,bandit - -[testenv] -passenv = TRAVIS TRAVIS_JOB_ID TRAVIS_BRANCH - -deps = -r{toxinidir}/requirements.txt - -r{toxinidir}/test-requirements.txt - -commands = - coverage run --source=kmip/ --omit=kmip/demos/*,kmip/tests/* -m pytest --strict kmip/tests/unit - coverage report -m - coverage html - -[testenv:pep8] -commands = flake8 kmip/ - -[testenv:integration] -# Note: This requires local or remote access to a KMIP appliance or service -deps = {[testenv]deps} -basepython=python2.7 -commands = - py.test --strict kmip/tests/integration -m "not ignore" {posargs} - -[testenv:bandit] -deps = {[testenv]deps} -commands = bandit -r kmip -n5 -x kmip/tests - -[testenv:docs] -deps = sphinx -commands = - sphinx-apidoc -o {toxinidir}/docs {toxinidir}/kmip/ - sphinx-build -b html {toxinidir}/docs {toxinidir}/docs/_build - -[flake8] -exclude = .git,.tox,dist,rpmbuild,*.egg-info diff -Nru python-pykmip-0.7.0/.travis.yml python-pykmip-0.8.0/.travis.yml --- python-pykmip-0.7.0/.travis.yml 2017-11-14 06:30:49.000000000 +0000 +++ python-pykmip-0.8.0/.travis.yml 1970-01-01 00:00:00.000000000 +0000 @@ -1,67 +0,0 @@ -language: python -matrix: - include: - - python: 2.7 - os: linux - dist: precise - env: TOXENV=py27 - - python: 2.7 - os: linux - dist: trusty - env: TOXENV=py27 - - python: 3.3 - os: linux - dist: precise - env: TOXENV=py33 - - python: 3.3 - os: linux - dist: trusty - env: TOXENV=py33 - - python: 3.4 - os: linux - dist: precise - env: TOXENV=py34 - - python: 3.4 - os: linux - dist: trusty - env: TOXENV=py34 - - python: 3.5 - os: linux - dist: precise - env: TOXENV=py35 - - python: 3.5 - os: linux - dist: trusty - env: TOXENV=py35 - - python: 3.6 - os: linux - dist: precise - env: TOXENV=py36 - - python: 3.6 - os: linux - dist: trusty - env: TOXENV=py36 - - python: 2.7 - os: linux - dist: precise - env: TOXENV=pep8 - - python: 2.7 - os: linux - dist: trusty - env: TOXENV=pep8 - - python: 2.7 - os: linux - dist: precise - env: TOXENV=bandit - - python: 2.7 - os: linux - dist: trusty - env: TOXENV=bandit -install: - - pip install tox - - pip install bandit - - pip install codecov -script: - - tox -after_success: - - codecov