diff -Nru ubuntu-image-0.11+16.04ubuntu1/coverage.ini ubuntu-image-0.12+16.04ubuntu1/coverage.ini --- ubuntu-image-0.11+16.04ubuntu1/coverage.ini 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/coverage.ini 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -[run] -branch = true -parallel = true -omit = - setup* - .tox/coverage/lib/python*/site-packages/* - ubuntu_image/tests/* - ubuntu_image/testing/* - /usr/lib/python3/dist-packages/* - -[paths] -source = - ubuntu_image - .tox/coverage/lib/python*/site-packages/ubuntu_image - -[report] -#fail_under = 100 diff -Nru ubuntu-image-0.11+16.04ubuntu1/debian/changelog ubuntu-image-0.12+16.04ubuntu1/debian/changelog --- ubuntu-image-0.11+16.04ubuntu1/debian/changelog 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/debian/changelog 2016-11-08 22:31:21.000000000 +0000 @@ -1,3 +1,49 @@ +ubuntu-image (0.12+16.04ubuntu1) xenial; urgency=medium + + * SRU tracking bug LP: #1646608 + * Revert previous change which sorted structure volumes by their + offset. Instead, we preserve gadget.yaml order for purposes of + partition numbering, but we still provide implicit offsets when they + are missing, and we still sanity check for partition overlap. + (LP:1642999) + * Provide human-readable error messages on gadget.yaml parser failures, + not Python tracebacks (unless --debug is given). (LP:1617421) + * Change the way we invoke the autopkgtests. + - Use a standard d/t/control file instead of d/t/control.autodep8. We + were only using the Python 3 import test anyway. + - Add an import.sh stanza to reproduce the Python 3 import bits we + lost by removing autodep8. + - Turn the `tox -e coverage` Test-Command into a separate test + script so that we can manipulate the $UBUNTU_IMAGE_CODENAME + environment variable. This is used by tox.ini to select an + appropriate *-coverage.ini file since Xenial does not and cannot + cover certain code paths. Everything after Xenial gets named 'devel'. + - Narrow the dependencies so that they aren't just importing all + binary packages. The effects may be similar, but EIBTI. + * d/control: Drop Testsuite header; we're not doing autodep8 anymore. + * Rename the environment variable $UBUNTUIMAGE_MOCK_SNAP to + $UBUNTU_IMAGE_MOCK_SNAP for consistency. + * Skip some tests which touch the actual store when running without + network access (e.g. during package build time). + * Move the __version__ calculation to the package's __init__.py + * Parse all YAML integer-like values as strings, and then turn them into + integers if necessary during post-processing. (LP:1640523) + * gadget.yaml files can include an optional `format` key which names the + version number supported by the gadget.yaml. Currently only format 0 + is supported, which is a legacy version (omitting the format is + equivalent). (LP:1638926) + * d/control: Add run-time dependencies which are missing from snapd but + are required for `snap prepare-image`. (LP:1642427) + * Structures with type='mbr' are deprecated. Use structure role + instead. (LP:1638660) + * mbr structures must start at offset 0. (LP:1630769) + * Fixed sanity checking of --image-size argument for out-of-offset-order + structure definitions. (LP:1643598) + * Prevent wrapping blobs in disk partitions by using the `type: bare` + structure key. (LP:1645750) + + -- Barry Warsaw Tue, 08 Nov 2016 17:31:21 -0500 + ubuntu-image (0.11+16.04ubuntu1) xenial; urgency=medium * SRU tracking bug LP: #1639381 diff -Nru ubuntu-image-0.11+16.04ubuntu1/debian/control ubuntu-image-0.12+16.04ubuntu1/debian/control --- ubuntu-image-0.11+16.04ubuntu1/debian/control 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/debian/control 2016-11-08 22:31:21.000000000 +0000 @@ -6,6 +6,7 @@ dh-python, dosfstools, gdisk, + grub-common, mtools, python3-all, python3-attr, @@ -19,11 +20,9 @@ python3-voluptuous, python3-yaml, snapd (>= 2.0.11~ppa0), - grub-common, tox, Standards-Version: 3.9.8 Homepage: http://launchpad.net/ubuntu-image -Testsuite: autopkgtest-pkg-python Package: ubuntu-image Architecture: all @@ -37,8 +36,10 @@ Package: python3-ubuntu-image Architecture: all Section: python -Depends: dosfstools, +Depends: ca-certificates, + dosfstools, gdisk, + grub-common, mtools (>= 4.0.18-2ubuntu0), python3-attr, python3-pkg-resources, diff -Nru ubuntu-image-0.11+16.04ubuntu1/debian/tests/control ubuntu-image-0.12+16.04ubuntu1/debian/tests/control --- ubuntu-image-0.11+16.04ubuntu1/debian/tests/control 1970-01-01 00:00:00.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/debian/tests/control 2016-11-08 22:31:21.000000000 +0000 @@ -0,0 +1,29 @@ +Tests: import.sh +Depends: python3-ubuntu-image + +Test-Command: ubuntu-image --help +Depends: ubuntu-image + +Test-Command: ubuntu-image --version +Depends: ubuntu-image + +Test-Command: man ubuntu-image +Depends: ubuntu-image, man-db + +# Something in the tox/pip/setuptools stack requires git. + +Test-Command: tox -e py35 +Depends: @builddeps@, git +Restrictions: needs-root + +Test-Command: tox -e qa +Depends: @builddeps@, git +Restrictions: needs-root + +Tests: coverage.sh +Depends: @builddeps@, git, lsb-release +Restrictions: needs-root + +Tests: mount +Restrictions: needs-root, allow-stderr +Depends: @, kpartx diff -Nru ubuntu-image-0.11+16.04ubuntu1/debian/tests/control.autodep8 ubuntu-image-0.12+16.04ubuntu1/debian/tests/control.autodep8 --- ubuntu-image-0.11+16.04ubuntu1/debian/tests/control.autodep8 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/debian/tests/control.autodep8 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -Test-Command: ubuntu-image --help - -Test-Command: ubuntu-image --version - -Test-Command: man ubuntu-image -Depends: @, man-db - -# Something in the tox/pip/setuptools stack requires git. - -Test-Command: tox -e py35 -Depends: @builddeps@, git -Restrictions: needs-root - -Test-Command: tox -e qa -Depends: @builddeps@, git -Restrictions: needs-root - -Test-Command: tox -e coverage -Depends: @builddeps@, git -Restrictions: needs-root - -Tests: mount -Restrictions: needs-root, allow-stderr -Depends: @, kpartx, ca-certificates, grub-common diff -Nru ubuntu-image-0.11+16.04ubuntu1/debian/tests/coverage.sh ubuntu-image-0.12+16.04ubuntu1/debian/tests/coverage.sh --- ubuntu-image-0.11+16.04ubuntu1/debian/tests/coverage.sh 1970-01-01 00:00:00.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/debian/tests/coverage.sh 2016-11-08 22:31:21.000000000 +0000 @@ -0,0 +1,12 @@ +#!/bin/sh + +export UBUNTU_IMAGE_CODENAME=`lsb_release -cs` + +# Xenial is special; all the others are share a configuration. + +if [ "$UBUNTU_IMAGE_CODENAME" != 'xenial' ] +then + export UBUNTU_IMAGE_CODENAME="devel" +fi + +tox -e coverage diff -Nru ubuntu-image-0.11+16.04ubuntu1/debian/tests/import.sh ubuntu-image-0.12+16.04ubuntu1/debian/tests/import.sh --- ubuntu-image-0.11+16.04ubuntu1/debian/tests/import.sh 1970-01-01 00:00:00.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/debian/tests/import.sh 2016-11-08 22:31:21.000000000 +0000 @@ -0,0 +1,4 @@ +cd $ADTTMP + +python3 -c "import ubuntu_image; print(ubuntu_image)" +python3 -c "import ubuntu_image; print(ubuntu_image.__version__)" diff -Nru ubuntu-image-0.11+16.04ubuntu1/devel-coverage.ini ubuntu-image-0.12+16.04ubuntu1/devel-coverage.ini --- ubuntu-image-0.11+16.04ubuntu1/devel-coverage.ini 1970-01-01 00:00:00.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/devel-coverage.ini 2016-11-08 22:31:21.000000000 +0000 @@ -0,0 +1,17 @@ +[run] +branch = true +parallel = true +omit = + setup* + .tox/coverage/lib/python*/site-packages/* + ubuntu_image/tests/* + ubuntu_image/testing/* + /usr/lib/python3/dist-packages/* + +[paths] +source = + ubuntu_image + .tox/coverage/lib/python*/site-packages/ubuntu_image + +[report] +#fail_under = 100 diff -Nru ubuntu-image-0.11+16.04ubuntu1/setup.py ubuntu-image-0.12+16.04ubuntu1/setup.py --- ubuntu-image-0.11+16.04ubuntu1/setup.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/setup.py 2016-11-08 22:31:21.000000000 +0000 @@ -58,6 +58,11 @@ author_email='snapcraft@lists.ubuntu.com', url='https://github.com/CanonicalLtd/ubuntu-image', packages=find_packages(), + install_requires=[ + 'attrs', + 'pyyaml', + 'voluptuous', + ], include_package_data=True, scripts=['ubuntu-image'], entry_points={ diff -Nru ubuntu-image-0.11+16.04ubuntu1/snapcraft.yaml ubuntu-image-0.12+16.04ubuntu1/snapcraft.yaml --- ubuntu-image-0.11+16.04ubuntu1/snapcraft.yaml 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/snapcraft.yaml 2016-11-08 22:31:21.000000000 +0000 @@ -2,7 +2,7 @@ summary: Create Ubuntu images description: | Use this tool to create Ubuntu images. -version: 0.11+real1 +version: 0.12+real1 confinement: devmode apps: diff -Nru ubuntu-image-0.11+16.04ubuntu1/tox.ini ubuntu-image-0.12+16.04ubuntu1/tox.ini --- ubuntu-image-0.11+16.04ubuntu1/tox.ini 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/tox.ini 2016-11-08 22:31:21.000000000 +0000 @@ -11,11 +11,11 @@ sitepackages = True passenv = PYTHON* - UBUNTUIMAGE_* + UBUNTU_IMAGE_* *_proxy [coverage] -rcfile = {toxinidir}/coverage.ini +rcfile = {toxinidir}/{env:UBUNTU_IMAGE_CODENAME:devel}-coverage.ini rc = --rcfile={[coverage]rcfile} [testenv:coverage] diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/assertions.py ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/assertions.py --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/assertions.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/assertions.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,70 +0,0 @@ -from io import StringIO -from ubuntu_image._unstable import load_rfc822_records - - -__all__ = ('Assertion', 'ModelAssertion') - - -class Assertion: - """Assertion is a fact encoded in a text file. - - Assertions are used heavily in snappy. The format of an assertion is - similar to RFC822 but they are not meant to be read or written by humans - and they contain a cryptographic signature. - """ - - def __init__(self, headers, body=None): - """Initialize an assertion with the following data (headers).""" - self.headers = headers - self.body = body - - @classmethod - def from_string(cls, text): - """Load assertion from a string.""" - # XXX: This is a temporary stop-gap. The proper way to do this is to - # invoke yet-unimplemented go executable that reads an assertion on - # stdin, validates it and outputs the same assertion as JSON on stdout. - # - # XXX: At worst, we should probably use the stdlib email package - # parser and invoke it with a policy for field value assertions if - # necessary. - fields, signature = text.rsplit('\n\n', 1) - records = load_rfc822_records(StringIO(fields)) - if len(records) != 1: - raise ValueError('Expected exactly one assertion') - headers = records[0].data - body = None # XXX: body is not required here - return cls(headers, body) - - -class Header: - """Descriptor for accessing assertion headers conveniently.""" - - def __init__(self, name): - self.name = name - - def __repr__(self): - return 'Header({!a})'.format(self.name) - - def __get__(self, instance, owner): - if instance is None: - return self - return instance.headers[self.name] - - -class ModelAssertion(Assertion): - """Model assertion describes a class of devices sharing the model name. - - The assertion contains, among other things, the identifier of the store - and of the three key snaps (core, kernel, gadget) that have to be - installed. - """ - type = Header('type') - authority_id = Header('authority-id') - series = Header('series') - brand_id = Header('brand-id') - os = Header('os') - architecture = Header('architecture') - kernel = Header('kernel') - gadget = Header('gadget') - required_snaps = Header('required-snaps') diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/builder.py ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/builder.py --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/builder.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/builder.py 2016-11-08 22:31:21.000000000 +0000 @@ -1,6 +1,5 @@ """Flow for building a disk image.""" - import os import shutil import logging @@ -43,6 +42,7 @@ # Information passed between states. self.rootfs = None self.rootfs_size = 0 + self.rootfs_offset = 0 self.image_size = 0 self.bootfs = None self.bootfs_sizes = None @@ -64,16 +64,18 @@ boot_images=self.boot_images, bootfs=self.bootfs, bootfs_sizes=self.bootfs_sizes, + cloud_init=self.cloud_init, disk_img=self.disk_img, + exitcode=self.exitcode, gadget=self.gadget, + image_size=self.image_size, images=self.images, output=self.output, root_img=self.root_img, rootfs=self.rootfs, rootfs_size=self.rootfs_size, - image_size=self.image_size, + rootfs_offset=self.rootfs_offset, unpackdir=self.unpackdir, - cloud_init=self.cloud_init, ) return state @@ -83,16 +85,23 @@ self.boot_images = state['boot_images'] self.bootfs = state['bootfs'] self.bootfs_sizes = state['bootfs_sizes'] + self.cloud_init = state['cloud_init'] self.disk_img = state['disk_img'] + self.exitcode = state['exitcode'] self.gadget = state['gadget'] + self.image_size = state['image_size'] self.images = state['images'] self.output = state['output'] self.root_img = state['root_img'] self.rootfs = state['rootfs'] self.rootfs_size = state['rootfs_size'] - self.image_size = state['image_size'] + self.rootfs_offset = state['rootfs_offset'] self.unpackdir = state['unpackdir'] - self.cloud_init = state['cloud_init'] + + def _log_exception(self, name): + # Only log the exception if we're in debug mode. + if self.args.debug: + super()._log_exception(name) def make_temporary_directories(self): self.rootfs = os.path.join(self.workdir, 'root') @@ -129,7 +138,7 @@ dst = os.path.join(self.rootfs, 'system-data') for subdir in os.listdir(src): # LP: #1632134 - copy everything under the image directory except - # /boot which goos to the boot partition. + # /boot which goes to the boot partition. if subdir != 'boot': shutil.move(os.path.join(src, subdir), os.path.join(dst, subdir)) @@ -275,6 +284,11 @@ assert len(volumes) == 1, 'For now, only one volume is allowed' volume = list(volumes)[0] for partnum, part in enumerate(volume.structures): + # The root file system implicitly lives right after the farthest + # out structure, which may not be the last named structure in the + # gadget.yaml. + self.rootfs_offset = max( + self.rootfs_offset, (part.offset + part.size)) part_img = os.path.join(self.images, 'part{}.img'.format(partnum)) self.boot_images.append(part_img) run('dd if=/dev/zero of={} count=0 bs={} seek=1'.format( @@ -289,14 +303,12 @@ # XXX: hard-coding of sector size run('mkfs.vfat -s 1 -S 512 -F 32 {} {}'.format( label_option, part_img)) - # XXX: Does not handle the case of partitions at the end of the - # image. - next_avail = part.offset + part.size - # The image for the root partition. - # - # XXX: Hard-codes last 34 512-byte sectors for backup GPT, - # empirically derived from sgdisk behavior. - calculated = ceil((self.rootfs_size + next_avail) / 1024 + 17) * 1024 + # Sanity check whether everything will fit on the disk if an explicit + # size was given on the command line. XXX: This hard-codes last 34 + # 512-byte sectors for backup GPT, empirically derived from sgdisk + # behavior. + calculated = ceil( + (self.rootfs_offset + self.rootfs_size) / 1024 + 17) * 1024 if self.args.image_size is None: self.image_size = calculated else: @@ -307,6 +319,7 @@ self.image_size = calculated else: self.image_size = self.args.image_size + # The image for the root partition. self.root_img = os.path.join(self.images, 'root.img') # Create empty file with holes. with open(self.root_img, 'w'): @@ -374,13 +387,13 @@ volume = list(volumes)[0] # XXX: This ought to be a single constructor that figures out the # class for us when we pass in the schema. - if volume.schema == VolumeSchema.mbr: + if volume.schema is VolumeSchema.mbr: image = MBRImage(self.disk_img, self.image_size) else: image = Image(self.disk_img, self.image_size) offset_writes = [] part_offsets = {} - next_offset = 1 + root_offset = 0 for i, part in enumerate(volume.structures): if part.name is not None: part_offsets[part.name] = part.offset @@ -390,7 +403,7 @@ bs='1M', seek=part.offset // MiB(1), count=ceil(part.size / MiB(1)), conv='notrunc') - if part.type == 'mbr': + if part.type in ('bare', 'mbr'): continue # sgdisk takes either a sector or a KiB/MiB argument; assume # that the offset and size are always multiples of 1MiB. @@ -409,17 +422,22 @@ partition_args['change_name'] = part.name image.partition(part_id, **partition_args) part_id += 1 - next_offset = (part.offset + part.size) // MiB(1) - # Create main snappy writable partition as the last partition. + # Put the rootfs after the farthest out structure, which may not + # be the last named structure in the gadget.yaml. + root_offset = max(root_offset, (part.offset + part.size)) + # Create and write the main snappy writable partition (i.e. rootfs) as + # the last partition. + root_offset_mib = self.rootfs_offset // MiB(1) + root_size_kib = ceil(self.rootfs_size / 1024) image.partition( part_id, - new='{}M:+{}K'.format(next_offset, ceil(self.rootfs_size / 1024)), + new='{}M:+{}K'.format(root_offset_mib, root_size_kib), typecode=('83', '0FC63DAF-8483-4772-8E79-3D69D8477DE4')) if volume.schema is VolumeSchema.gpt: image.partition(part_id, change_name='writable') image.copy_blob( self.root_img, - bs='1M', seek=next_offset, + bs='1M', seek=root_offset_mib, count=ceil(self.rootfs_size / MiB(1)), conv='notrunc') for value, dest in offset_writes: diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/helpers.py ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/helpers.py --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/helpers.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/helpers.py 2016-11-08 22:31:21.000000000 +0000 @@ -19,7 +19,6 @@ 'run', 'snap', 'sparse_copy', - 'transform', ] @@ -60,21 +59,15 @@ def as_size(size, min=0, max=None): - # Check for int-ness and just return what you get if so. YAML parsers - # will turn values like '108' into ints automatically, but voluptuous will - # always try to coerce the value to an as_size. - if isinstance(size, int): - value = size - else: - mo = re.match('(\d+)([a-zA-Z]*)', size) - if mo is None: - raise ValueError(size) - size_in_bytes = mo.group(1) - value = { - '': straight_up_bytes, - 'G': GiB, - 'M': MiB, - }[mo.group(2)](int(size_in_bytes)) + mo = re.match('(\d+)([a-zA-Z]*)', size) + if mo is None: + raise ValueError(size) + size_in_bytes = mo.group(1) + value = { + '': straight_up_bytes, + 'G': GiB, + 'M': MiB, + }[mo.group(2)](int(size_in_bytes)) if max is None: if value < min: raise ValueError('Value outside range: {} < {}'.format(value, min)) @@ -84,29 +77,6 @@ return value -def transform(caught_excs, new_exc): - """Transform any caught exceptions into a new exception. - - This is a decorator which runs the decorated function, catching all - specified exceptions. If one of those exceptions occurs, it is - transformed (i.e. re-raised) into a new exception. The original exception - is retained via exception chaining. - - :param caught_excs: The exception or exceptions to catch. - :type caught_excs: A single exception, or a tuple of exceptions. - :param new_exc: The new exception to re-raise. - :type new_exc: An exception. - """ - def outer(func): - def inner(*args, **kws): - try: - return func(*args, **kws) - except caught_excs as exception: - raise new_exc from exception - return inner - return outer - - def run(command, *, check=True, **args): runnable_command = ( command.split() if isinstance(command, str) and 'shell' not in args @@ -179,7 +149,7 @@ proc = run(cmd, check=False) if proc.returncode == 0: # We have a new enough e2fsprogs, so we're done. - return # pragma: nocover + return # pragma: noxenial run('mkfs.ext4 -L {} -T default -O uninit_bg {}'.format(label, img_file)) # Only do this if the directory is non-empty. if not os.listdir(contents_dir): diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/image.py ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/image.py --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/image.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/image.py 2016-11-08 22:31:21.000000000 +0000 @@ -185,6 +185,14 @@ if isinstance(value, tuple): value = value[0] command_input.append('type={}'.format(value)) + elif key == 'change_name': + # If the gadget.yaml has an mbr schema with a named structure, + # the builder will try to set the partition's name to this + # value. That's not supported by sfdisk and is probably not + # allowed by the mbr partition table format. Rather than + # change the builder (i.e. in case we find a workaround), for + # now, just ignore the argument. + pass else: raise ValueError('{} option not supported for MBR partitions' .format(key)) diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/__init__.py ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/__init__.py --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/__init__.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/__init__.py 2016-11-08 22:31:21.000000000 +0000 @@ -0,0 +1,17 @@ +import os + +from pkg_resources import resource_string as resource_bytes + + +# Try to get the version number, which will be different if we're living in a +# snap world or a deb. Actually, I'd prefer to not even have the -NubuntuY +# version string when we're running from source, but that's trickier, so don't +# worry about it. +__version__ = os.environ.get('SNAP_VERSION') +if __version__ is None: # pragma: nocover + try: + __version__ = resource_bytes( + 'ubuntu_image', 'version.txt').decode('utf-8') + except FileNotFoundError: + # Probably, setup.py hasn't been run yet to generate the version.txt. + __version__ = 'dev' diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/__main__.py ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/__main__.py --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/__main__.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/__main__.py 2016-11-08 22:31:21.000000000 +0000 @@ -1,6 +1,5 @@ """Allows the package to be run with `python3 -m ubuntu_image`.""" - import os import sys import logging @@ -8,27 +7,15 @@ from contextlib import suppress from pickle import dump, load -from pkg_resources import resource_string as resource_bytes +from ubuntu_image import __version__ from ubuntu_image.builder import ModelAssertionBuilder from ubuntu_image.helpers import as_size from ubuntu_image.i18n import _ +from ubuntu_image.parser import GadgetSpecificationError _logger = logging.getLogger('ubuntu-image') -# Try to get the version number, which will be different if we're living in a -# snap world or a deb. Actually, I'd prefer to not even have the -NubuntuY -# version string when we're running from source, but that's trickier, so don't -# worry about it. -__version__ = os.environ.get('SNAP_VERSION') -if __version__ is None: # pragma: nocover - try: - __version__ = resource_bytes( - 'ubuntu_image', 'version.txt').decode('utf-8') - except FileNotFoundError: - # Probably, setup.py hasn't been run yet to generate the version.txt. - __version__ = 'dev' - PROGRAM = 'ubuntu-image' @@ -169,6 +156,12 @@ state_machine.run_until(args.until) else: list(state_machine) + except GadgetSpecificationError as error: + if args.debug: + _logger.exception('gadget.yaml parse error') + else: + _logger.error('gadget.yaml parse error: {}'.format(error)) + _logger.error('Use --debug for more information') except: _logger.exception('Crash in state machine') return 1 diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/parser.py ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/parser.py --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/parser.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/parser.py 2016-11-08 22:31:21.000000000 +0000 @@ -2,16 +2,26 @@ import re import attr +import logging from enum import Enum from io import StringIO from operator import attrgetter, methodcaller -from ubuntu_image.helpers import GiB, MiB, as_size, transform +from ubuntu_image.helpers import GiB, MiB, as_size from uuid import UUID from voluptuous import Any, Coerce, Invalid, Match, Optional, Required, Schema +from warnings import warn from yaml import load from yaml.loader import SafeLoader -from yaml.parser import ParserError +from yaml.parser import ParserError, ScannerError + + +COLON = ':' +_logger = logging.getLogger('ubuntu-image') + + +class GadgetSpecificationError(Exception): + """An exception occurred during the parsing of the gadget.yaml file.""" # By default PyYAML allows duplicate mapping keys, even though the YAML spec @@ -26,42 +36,69 @@ mapping = {} for key, value in pairs: if key in mapping: - raise ValueError('Duplicate key: {}'.format(key)) + raise GadgetSpecificationError('Duplicate key: {}'.format(key)) mapping[key] = value return mapping StrictLoader.add_constructor( 'tag:yaml.org,2002:map', StrictLoader.construct_mapping) +# LP: #1640523 +StrictLoader.add_constructor( + 'tag:yaml.org,2002:int', StrictLoader.construct_yaml_str) +# Decorator for naming the path -as best we can statically- within the +# gadget.yaml file where this enum is found. Used in error reporting. +def yaml_path(path): + def inner(cls): + cls.yaml_path = path + return cls + return inner + + +@yaml_path('volumes::bootloader') class BootLoader(Enum): uboot = 'u-boot' grub = 'grub' +@yaml_path('volumes::schema') class VolumeSchema(Enum): mbr = 'mbr' gpt = 'gpt' +@yaml_path('volumes::structure::filesystem') class FileSystemType(Enum): none = 'none' ext4 = 'ext4' vfat = 'vfat' +@yaml_path('volumes::structure::role') +class StructureRole(Enum): + mbr = 'mbr' + system_boot = 'system-boot' + system_data = 'system-data' + + class Enumify: def __init__(self, enum_class, msg=None, preprocessor=None): self.enum_class = enum_class self.preprocessor = preprocessor def __call__(self, v): - # Voluptuous will catch any exceptions. - return self.enum_class[ - v if self.preprocessor is None - else self.preprocessor(v) - ] + # Turn KeyErrors into spec errors. + try: + return self.enum_class[ + v if self.preprocessor is None + else self.preprocessor(v) + ] + except KeyError as error: + raise GadgetSpecificationError( + "Invalid gadget.yaml value '{}' @ {}".format( + v, self.enum_class.yaml_path)) from error def Size32bit(v): @@ -71,15 +108,8 @@ def Id(v): """Coerce to either a hex UUID, a 2-digit hex value.""" - if isinstance(v, int): - # Okay, here's the problem. If the id value is something like '80' in - # the yaml file, the yaml parser will turn that into the decimal - # integer 80, but that's really not what we want! We want it to be - # the hex value 0x80. So we have to turn it back into a string and - # allow the 2-digit validation matcher to go from there. - if v >= 100 or v < 0: - raise ValueError(str(v)) - v = '{:02d}'.format(v) + # Yes, we actually do want this function to raise ValueErrors instead of + # GadgetSpecificationErrors. try: return UUID(hex=v) except ValueError: @@ -92,15 +122,16 @@ def HybridId(v): """Like above, but allows for hybrid Ids.""" - if isinstance(v, str): - code, comma, guid = v.partition(',') - if comma == ',': - # Two digit hex code must appear before GUID. - if len(code) != 2 or len(guid) != 36: - raise ValueError(v) - hex_code = Id(code) - guid_code = Id(guid) - return hex_code, guid_code + # Yes, we actually do want this function to raise ValueErrors instead of + # GadgetSpecificationErrors. + code, comma, guid = v.partition(',') + if comma == ',': + # Two digit hex code must appear before GUID. + if len(code) != 2 or len(guid) != 36: + raise ValueError(v) + hex_code = Id(code) + guid_code = Id(guid) + return hex_code, guid_code return Id(v) @@ -110,12 +141,30 @@ It may be specified relative to another structure item with the syntax ``label+1234``. """ + # Yes, we actually do want this function to raise ValueErrors instead of + # GadgetSpecificationErrors. label, plus, offset = v.partition('+') if len(label) == 0 or plus != '+' or len(offset) == 0: raise ValueError(v) return label, Size32bit(offset) +def YAMLFormat(v): + """Verify supported gadget.yaml format versions.""" + # Allow ValueError to percolate up. + unsupported = False + try: + value = int(v) + except ValueError: + unsupported = True + else: + unsupported = (value != 0) + if unsupported: + raise GadgetSpecificationError( + 'Unsupported gadget.yaml format version: {}'.format(v)) + return value + + GadgetYAML = Schema({ Optional('defaults'): { Match('^[a-zA-Z0-9]+$'): { @@ -124,6 +173,7 @@ }, Optional('device-tree-origin', default='gadget'): str, Optional('device-tree'): str, + Optional('format'): YAMLFormat, Required('volumes'): { Match('^[-a-zA-Z0-9]+$'): Schema({ Optional('schema', default=VolumeSchema.gpt): @@ -137,7 +187,10 @@ Optional('offset-write'): Any( Coerce(Size32bit), RelativeOffset), Required('size'): Coerce(as_size), - Required('type'): Any('mbr', Coerce(HybridId)), + Required('type'): Any('mbr', 'bare', Coerce(HybridId)), + Optional('role'): Enumify( + StructureRole, + preprocessor=methodcaller('replace', '-', '_')), Optional('id'): Coerce(UUID), Optional('filesystem', default=FileSystemType.none): Enumify(FileSystemType), @@ -199,6 +252,7 @@ size = attr.ib() type = attr.ib() id = attr.ib() + role = attr.ib() filesystem = attr.ib() filesystem_label = attr.ib() content = attr.ib() @@ -218,9 +272,9 @@ device_tree = attr.ib() volumes = attr.ib() defaults = attr.ib() + format = attr.ib() -@transform((KeyError, Invalid, ParserError), ValueError) def parse(stream_or_string): """Parse the YAML read from the stream or string. @@ -233,7 +287,7 @@ :type stream_or_string: str or file-like object :return: A specification of the gadget. :rtype: GadgetSpec - :raises ValueError: If the schema is violated. + :raises GadgetSpecificationError: If the schema is violated. """ # Do the basic schema validation steps. There some interdependencies that # require post-validation. E.g. you cannot define the fs-type if the role @@ -241,11 +295,25 @@ stream = (StringIO(stream_or_string) if isinstance(stream_or_string, str) else stream_or_string) - yaml = load(stream, Loader=StrictLoader) - validated = GadgetYAML(yaml) + try: + yaml = load(stream, Loader=StrictLoader) + except (ParserError, ScannerError) as error: + raise GadgetSpecificationError( + 'gadget.yaml file is not valid YAML') from error + try: + validated = GadgetYAML(yaml) + except Invalid as error: + if len(error.path) == 0: + raise GadgetSpecificationError('Empty gadget.yaml') + path = COLON.join(str(component) for component in error.path) + # It doesn't look like voluptuous gives us the bogus value, but it + # does give us the path to it. The str(error) contains some + # additional information of dubious value, so just use the path. + raise GadgetSpecificationError('Invalid gadget.yaml @ {}'.format(path)) device_tree_origin = validated.get('device-tree-origin') device_tree = validated.get('device-tree') defaults = validated.get('defaults') + format = validated.get('format') volume_specs = {} bootloader_seen = False # This item is a dictionary so it can't possibly have duplicate keys. @@ -264,25 +332,74 @@ offset_write = structure.get('offset-write') size = structure['size'] structure_type = structure['type'] - # Validate structure types. These can be either GUIDs, two hex - # digits, hybrids, or the special 'mbr' type. The basic syntactic - # validation happens above in the Voluptuous schema, but here we - # need to ensure cross-attribute constraints. Specifically, - # hybrids and 'mbr' are allowed for either schema, but GUID-only - # is only allowed for GPT, while 2-digit-only is only allowed for - # MBR. Note too that 2-item tuples are also already ensured. + structure_role = structure.get('role') + # Structure types and roles work together to define how the + # structure is laid out on disk, along with any disk partitions + # wrapping the structure. In general, the type field names the + # disk partition type code for the wrapping partition, and it will + # either be a GUID for GPT disk schemas, a two hex digit string + # for MBR disk schemas, or a hybrid type where a tuple-like string + # names both type codes. + # + # The role specifies how the structure is to be used. It may be a + # partition holding boot assets, or a partition holding the + # operating system data. Without a role specification, we drop + # back to the filesystem label to determine this. + # + # There are two complications. Disks can have a special + # non-partition wrapped Master Boot Record section on the disk + # containing bootstrapping code. MBRs must start at offset 0 and + # be no larger than 446 bytes (there is some variability for other + # MBR layouts, but 446 is the max). The Wikipedia page has some + # good diagrams: https://en.wikipedia.org/wiki/Master_boot_record + # + # MBR sections are identified by a role:mbr key, and in that case, + # the gadget.yaml may not include a type field (since the MBR + # isn't a partition). + # + # Some use cases involve putting bootstrapping code at other + # locations on the disk, with offsets other than zero and + # arbitrary sizes. This bootstrapping code is also not wrapped in + # a disk partition. For these, type:none is the way to specify + # that, but in that case, you cannot include a role key. + # Technically speaking though, role:mbr is allowed, but somewhat + # redundant. All other roles with type:none are prohibited. + # + # For backward compatibility, we still allow the type:mbr field, + # which is exactly equivalent to the preferred role:mbr field, + # however a deprecation warning is issued in the former case. + if structure_type == 'mbr': + if structure_role: + raise GadgetSpecificationError( + 'Type mbr and role fields assigned at the same time, ' + 'please use the mbr role instead') + warn("volumes::structure::type = 'mbr' is " + 'deprecated; use role instead', DeprecationWarning) + structure_role = StructureRole.mbr + # For now, the structure type value can be of several Python + # types. 1) a UUID for GPT schemas; 2) a 2-letter str for MBR + # schemas; 3) a 2-tuple of #1 and #2 for mixed schemas; 4) the + # special strings 'mbr' and 'bare' which can appear for either GPT + # or MBR schemas. type:mbr is deprecated and will eventually go + # away. What we're doing here is some simple validation of #1 and + # #2. if (isinstance(structure_type, UUID) and schema is not VolumeSchema.gpt): - raise ValueError('GUID structure type with non-GPT') + raise GadgetSpecificationError( + 'MBR structure type with non-MBR schema') + elif structure_type == 'bare': + if structure_role not in (None, StructureRole.mbr): + raise GadgetSpecificationError( + 'Invalid gadget.yaml: structure role/type conflict') elif (isinstance(structure_type, str) and - structure_type != 'mbr' and + structure_role is not StructureRole.mbr and schema is not VolumeSchema.mbr): - raise ValueError('MBR structure type with non-MBR') + raise GadgetSpecificationError( + 'GUID structure type with non-GPT schema') # Check for implicit vs. explicit partition offset. if offset is None: - # XXX: Ensure the special case of the 'mbr' type doesn't - # extend beyond the confines of the mbr. - if structure_type != 'mbr' and last_offset < MiB(1): + if (structure_role is not StructureRole.mbr and + last_offset < MiB(1)): offset = MiB(1) else: offset = last_offset @@ -290,38 +407,66 @@ # Extract the rest of the structure data. structure_id = structure.get('id') filesystem = structure['filesystem'] - if structure_type == 'mbr': + if structure_role is StructureRole.mbr: + if size > 446: + raise GadgetSpecificationError( + 'mbr structures cannot be larger than 446 bytes.') + if offset != 0: + raise GadgetSpecificationError( + 'mbr structure must start at offset 0') if structure_id is not None: - raise ValueError('mbr type must not specify partition id') - elif filesystem is not FileSystemType.none: - raise ValueError('mbr type must not specify a file system') + raise GadgetSpecificationError( + 'mbr structures must not specify partition id') + if filesystem is not FileSystemType.none: + raise GadgetSpecificationError( + 'mbr structures must not specify a file system') filesystem_label = structure.get('filesystem-label', name) + # The content will be one of two formats, and no mixing is + # allowed. I.e. even though multiple content sections are allowed + # in a single structure, they must all be of type A or type B. If + # the filesystem type is vfat or ext4, then type A *must* be used; + # likewise if filesystem is none or missing, type B must be used. content = structure.get('content') content_specs = [] - content_spec_class = ( - ContentSpecB if filesystem is FileSystemType.none - else ContentSpecA) if content is not None: - for item in content: - content_specs.append(content_spec_class.from_yaml(item)) + if filesystem is FileSystemType.none: + for item in content: + try: + spec = ContentSpecB.from_yaml(item) + except KeyError: + raise GadgetSpecificationError( + 'filesystem: none missing image file name') + else: + content_specs.append(spec) + else: + for item in content: + try: + spec = ContentSpecA.from_yaml(item) + except KeyError: + raise GadgetSpecificationError( + 'filesystem: vfat|ext4 missing source/target') + else: + content_specs.append(spec) structures.append(StructureSpec( name, offset, offset_write, size, - structure_type, structure_id, filesystem, filesystem_label, + structure_type, structure_id, structure_role, + filesystem, filesystem_label, content_specs)) # Sort structures by their offset. volume_specs[image_name] = VolumeSpec( - schema, bootloader, image_id, - sorted(structures, key=attrgetter('offset'))) + schema, bootloader, image_id, structures) # Sanity check the partition offsets to ensure that there is no # overlap conflict where a part's offset begins before the previous # part's end. last_end = -1 - for part in volume_specs[image_name].structures: + for part in sorted(structures, key=attrgetter('offset')): if part.offset < last_end: - raise ValueError('Structure conflict! {}: {} < {}'.format( - part.type if part.name is None else part.name, - part.offset, last_end)) + raise GadgetSpecificationError( + 'Structure conflict! {}: {} < {}'.format( + part.type if part.name is None else part.name, + part.offset, last_end)) last_end = part.offset + part.size if not bootloader_seen: - raise ValueError('No bootloader volume named') - return GadgetSpec(device_tree_origin, device_tree, volume_specs, defaults) + raise GadgetSpecificationError('No bootloader structure named') + return GadgetSpec(device_tree_origin, device_tree, volume_specs, + defaults, format) diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/state.py ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/state.py --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/state.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/state.py 2016-11-08 22:31:21.000000000 +0000 @@ -62,6 +62,11 @@ log.debug('-> [{:2}] {}'.format(self._debug_step, name)) return step, name + def _log_exception(self, name): + log.exception( + 'uncaught exception in state machine step: [{}] {}'.format( + self._debug_step, name)) + def __next__(self): try: step, name = self._pop() @@ -72,9 +77,7 @@ self.close() raise StopIteration from None except: - log.exception( - 'uncaught exception in state machine step: [{}] {}'.format( - self._debug_step, name)) + self._log_exception(name) self.close() raise diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/testing/helpers.py ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/testing/helpers.py --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/testing/helpers.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/testing/helpers.py 2016-11-08 22:31:21.000000000 +0000 @@ -23,8 +23,8 @@ meta_dir = os.path.join(gadget_dir, 'meta') os.makedirs(meta_dir, exist_ok=True) shutil.copy( - resource_filename('ubuntu_image.tests.data', 'gadget.yaml'), - os.path.join(meta_dir, self.gadget_yaml)) + resource_filename('ubuntu_image.tests.data', self.gadget_yaml), + os.path.join(meta_dir, 'gadget.yaml')) shutil.copy( resource_filename('ubuntu_image.tests.data', 'grubx64.efi'), os.path.join(gadget_dir, 'grubx64.efi')) diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/testing/nose.py ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/testing/nose.py --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/testing/nose.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/testing/nose.py 2016-11-08 22:31:21.000000000 +0000 @@ -175,7 +175,7 @@ # How should we mock `snap prepare-image`? If set to 'always' (case # insensitive), then use the sample data in the .zip file. Any other # truthy value says to use a second-and-onward mock. - should_we_mock = os.environ.get('UBUNTUIMAGE_MOCK_SNAP', 'yes') + should_we_mock = os.environ.get('UBUNTU_IMAGE_MOCK_SNAP', 'yes') if should_we_mock.lower() == 'always': mock_class = AlwaysMock elif as_bool(should_we_mock): diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/tests/data/bad-gadget.yaml ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/tests/data/bad-gadget.yaml --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/tests/data/bad-gadget.yaml 1970-01-01 00:00:00.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/tests/data/bad-gadget.yaml 2016-11-08 22:31:21.000000000 +0000 @@ -0,0 +1,6 @@ +volumes: + first-image: + bootloader: u-boot + structure: + - type: ef + size: 400M diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/tests/test_assertions.py ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/tests/test_assertions.py --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/tests/test_assertions.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/tests/test_assertions.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,53 +0,0 @@ -from ubuntu_image.assertions import ModelAssertion -from unittest import TestCase - - -fake_model_assertion = """ -type: model -authority-id: nobody -series: 16 -brand-id: zygoon -model: kvm-demo -os: ubuntu-core -architecture: amd64 -kernel: canonical-pc-linux -gadget: canonical-pc -required-snaps: links -# preinstalled-snaps: a, b -# prefetched-snaps: docker -body-size: 0 - -openpgpg 2cln -""" -# TODO: add a fake signature once the parser understands it. - - -two_records_assertion = """ -type: model - -authority-id: nobody - -openpgpg 2cln -""" - - -class TestModelAssertion(TestCase): - def test_smoke(self): - model = ModelAssertion.from_string(fake_model_assertion) - self.assertEqual(model.type, 'model') - self.assertEqual(model.authority_id, 'nobody') - self.assertEqual(model.series, '16') - self.assertEqual(model.brand_id, 'zygoon') - self.assertEqual(model.os, 'ubuntu-core') - self.assertEqual(model.architecture, 'amd64') - self.assertEqual(model.kernel, 'canonical-pc-linux') - self.assertEqual(model.gadget, 'canonical-pc') - self.assertEqual(model.required_snaps, 'links') - - def test_two_records(self): - self.assertRaises(ValueError, - ModelAssertion.from_string, - two_records_assertion) - - def test_header_repr(self): - self.assertEqual(repr(ModelAssertion.type), "Header('type')") diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/tests/test_builder.py ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/tests/test_builder.py --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/tests/test_builder.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/tests/test_builder.py 2016-11-08 22:31:21.000000000 +0000 @@ -57,11 +57,12 @@ output = self._resources.enter_context(NamedTemporaryFile()) args = SimpleNamespace( channel='edge', - workdir=None, - output=output, - model_assertion=self.model_assertion, cloud_init=None, + debug=False, extra_snaps=None, + model_assertion=self.model_assertion, + output=output, + workdir=None, ) state = self._resources.enter_context(XXXModelAssertionBuilder(args)) state.run_thru('calculate_bootfs_size') @@ -89,7 +90,8 @@ seed_patterns = [ '^pc-kernel_[0-9]+.snap$', '^pc_[0-9]+.snap$', - '^ubuntu-core_[0-9]+.snap$', + # This snap's name is undergoing transition. + '^(ubuntu-)?core_[0-9]+.snap$', ] # Make sure every file matches a pattern and every pattern matches a # file. @@ -106,12 +108,11 @@ patterns_unmatched = set(seed_patterns) - patterns_matched files_unmatched = snaps - files_matched self.assertEqual( - len(patterns_unmatched), 0, - 'Unmatched patterns: {}'.format(COMMASPACE.join( - patterns_unmatched))) - self.assertEqual( - len(files_unmatched), 0, - 'Unmatched files: {}'.format(COMMASPACE.join(files_unmatched))) + (len(patterns_unmatched), len(files_unmatched)), + (0, 0), + 'Unmatched patterns: {}\nUnmatched files: {}'.format( + COMMASPACE.join(patterns_unmatched), + COMMASPACE.join(files_unmatched))) def test_populate_rootfs_contents_without_cloud_init(self): with ExitStack() as resources: @@ -120,11 +121,11 @@ print('cloud init user data', end='', flush=True, file=cloud_init) args = SimpleNamespace( channel='edge', - workdir=None, - model_assertion=self.model_assertion, - output=None, cloud_init=None, extra_snaps=None, + model_assertion=self.model_assertion, + output=None, + workdir=None, ) state = resources.enter_context(XXXModelAssertionBuilder(args)) # Fake some state expected by the method under test. @@ -156,11 +157,11 @@ print('cloud init user data', end='', flush=True, file=cloud_init) args = SimpleNamespace( channel='edge', - workdir=None, - model_assertion=self.model_assertion, - output=None, cloud_init=cloud_init.name, extra_snaps=None, + model_assertion=self.model_assertion, + output=None, + workdir=None, ) state = resources.enter_context(XXXModelAssertionBuilder(args)) # Fake some state expected by the method under test. @@ -191,11 +192,11 @@ with ExitStack() as resources: args = SimpleNamespace( channel='edge', - workdir=None, - model_assertion=self.model_assertion, - output=None, cloud_init=None, extra_snaps=None, + model_assertion=self.model_assertion, + output=None, + workdir=None, ) state = resources.enter_context(XXXModelAssertionBuilder(args)) # Fake some state expected by the method under test. @@ -237,10 +238,10 @@ unpackdir = resources.enter_context(TemporaryDirectory()) # Fast forward a state machine to the method under test. args = SimpleNamespace( - workdir=workdir, - unpackdir=unpackdir, - output=None, cloud_init=None, + output=None, + unpackdir=unpackdir, + workdir=workdir, ) state = resources.enter_context(XXXModelAssertionBuilder(args)) state._next.pop() @@ -290,10 +291,11 @@ unpackdir = resources.enter_context(TemporaryDirectory()) # Fast forward a state machine to the method under test. args = SimpleNamespace( - workdir=workdir, - unpackdir=unpackdir, - output=None, cloud_init=None, + debug=False, + output=None, + unpackdir=unpackdir, + workdir=workdir, ) state = resources.enter_context(XXXModelAssertionBuilder(args)) state._next.pop() @@ -331,10 +333,10 @@ unpackdir = resources.enter_context(TemporaryDirectory()) # Fast forward a state machine to the method under test. args = SimpleNamespace( - workdir=workdir, - unpackdir=unpackdir, - output=None, cloud_init=None, + output=None, + unpackdir=unpackdir, + workdir=workdir, ) state = resources.enter_context(XXXModelAssertionBuilder(args)) state._next.pop() @@ -402,10 +404,11 @@ unpackdir = resources.enter_context(TemporaryDirectory()) # Fast forward a state machine to the method under test. args = SimpleNamespace( - workdir=workdir, - unpackdir=unpackdir, - output=None, cloud_init=None, + debug=False, + output=None, + unpackdir=unpackdir, + workdir=workdir, ) state = resources.enter_context(XXXModelAssertionBuilder(args)) state._next.pop() @@ -447,10 +450,10 @@ unpackdir = resources.enter_context(TemporaryDirectory()) # Fast forward a state machine to the method under test. args = SimpleNamespace( - workdir=workdir, - unpackdir=unpackdir, - output=None, cloud_init=None, + output=None, + unpackdir=unpackdir, + workdir=workdir, ) state = resources.enter_context(XXXModelAssertionBuilder(args)) state._next.pop() @@ -476,10 +479,10 @@ unpackdir = resources.enter_context(TemporaryDirectory()) # Fast forward a state machine to the method under test. args = SimpleNamespace( - workdir=workdir, - unpackdir=unpackdir, - output=None, cloud_init=None, + output=None, + unpackdir=unpackdir, + workdir=workdir, ) # Jump right to the method under test. state = resources.enter_context(XXXModelAssertionBuilder(args)) @@ -561,10 +564,10 @@ unpackdir = resources.enter_context(TemporaryDirectory()) # Fast forward a state machine to the method under test. args = SimpleNamespace( - workdir=workdir, - unpackdir=unpackdir, - output=None, cloud_init=None, + output=None, + unpackdir=unpackdir, + workdir=workdir, ) # Jump right to the method under test. state = resources.enter_context(XXXModelAssertionBuilder(args)) @@ -622,10 +625,11 @@ unpackdir = resources.enter_context(TemporaryDirectory()) # Fast forward a state machine to the method under test. args = SimpleNamespace( - workdir=workdir, - unpackdir=unpackdir, - output=None, cloud_init=None, + debug=False, + output=None, + unpackdir=unpackdir, + workdir=workdir, ) # Jump right to the method under test. state = resources.enter_context(XXXModelAssertionBuilder(args)) @@ -675,10 +679,11 @@ unpackdir = resources.enter_context(TemporaryDirectory()) # Fast forward a state machine to the method under test. args = SimpleNamespace( - workdir=workdir, - unpackdir=unpackdir, - output=None, cloud_init=None, + debug=False, + output=None, + unpackdir=unpackdir, + workdir=workdir, ) # Jump right to the method under test. state = resources.enter_context(XXXModelAssertionBuilder(args)) @@ -718,10 +723,10 @@ unpackdir = resources.enter_context(TemporaryDirectory()) # Fast forward a state machine to the method under test. args = SimpleNamespace( - workdir=workdir, - unpackdir=unpackdir, - output=None, cloud_init=None, + output=None, + unpackdir=unpackdir, + workdir=workdir, ) # Jump right to the method under test. state = resources.enter_context(XXXModelAssertionBuilder(args)) @@ -737,6 +742,7 @@ part0 = SimpleNamespace( name='alpha', type='da', + role=None, size=MiB(1), offset=MiB(2), offset_write=100, @@ -744,6 +750,7 @@ part1 = SimpleNamespace( name='beta', type='ef', + role=None, size=MiB(1), offset=MiB(4), offset_write=200, @@ -751,12 +758,13 @@ part2 = SimpleNamespace( name='gamma', type='mbr', + role=None, size=MiB(1), offset=0, offset_write=None, ) volume = SimpleNamespace( - # Ordered by offset, as would happen by the parser. + # gadget.yaml appearance order. structures=[part2, part0, part1], schema=VolumeSchema.gpt, ) @@ -777,6 +785,7 @@ fp.write(b'\3' * 13) state.root_img = os.path.join(state.images, 'root.img') state.rootfs_size = MiB(1) + state.rootfs_offset = MiB(5) with open(state.root_img, 'wb') as fp: fp.write(b'\4' * 14) state.boot_images = [part2_img, part0_img, part1_img] @@ -821,6 +830,198 @@ self.assertEqual(partitions[1], ('beta', 8192)) self.assertEqual(partitions[2], ('writable', 10240)) + def test_make_disk_with_bare_parts(self): + # The second structure has a role:bare meaning it is not wrapped in a + # disk partition. + with ExitStack() as resources: + workdir = resources.enter_context(TemporaryDirectory()) + unpackdir = resources.enter_context(TemporaryDirectory()) + # Fast forward a state machine to the method under test. + args = SimpleNamespace( + cloud_init=None, + output=None, + unpackdir=unpackdir, + workdir=workdir, + ) + # Jump right to the method under test. + state = resources.enter_context(XXXModelAssertionBuilder(args)) + state._next.pop() + state._next.append(state.make_disk) + # Set up expected state. + state.unpackdir = unpackdir + state.images = os.path.join(workdir, '.images') + state.disk_img = os.path.join(workdir, 'disk.img') + state.image_size = MiB(10) + os.makedirs(state.images) + # Craft a gadget schema. + part0 = SimpleNamespace( + name='assets', + size=MiB(1), + offset=0, + offset_write=None, + type='bare', + ) + volume = SimpleNamespace( + # gadget.yaml appearance order. + structures=[part0], + schema=VolumeSchema.gpt, + ) + state.gadget = SimpleNamespace( + volumes=dict(volume1=volume), + ) + # Set up images for the targeted test. These represent the image + # files that would have already been crafted to write to the disk + # in early (untested here) stages of the state machine. + part0_img = os.path.join(state.images, 'part0.img') + with open(part0_img, 'wb') as fp: + fp.write(b'\1' * 11) + state.root_img = os.path.join(state.images, 'root.img') + state.rootfs_size = MiB(1) + state.rootfs_offset = MiB(5) + with open(state.root_img, 'wb') as fp: + fp.write(b'\4' * 14) + state.boot_images = [part0_img] + # Create the disk. + next(state) + # Verify some parts of the disk.img's content. First, that we've + # written the part offsets at the right place.y + with open(state.disk_img, 'rb') as fp: + fp.seek(0) + self.assertEqual(fp.read(15), b'\1' * 11 + b'\0' * 4) + # The root file system lives at the end, which in this case is + # at the 5MiB location (e.g. the farthest out non-mbr + # partition is at 4MiB and has 1MiB in size. + fp.seek(MiB(5)) + self.assertEqual(fp.read(15), b'\4' * 14 + b'\0') + # Verify the disk image's partition table. + proc = run('sfdisk --json {}'.format(state.disk_img)) + layout = json.loads(proc.stdout) + partitions = [ + (part['name'], part['start']) + for part in layout['partitiontable']['partitions'] + ] + self.assertEqual(len(partitions), 1) + self.assertEqual(partitions[0], ('writable', 10240)) + + def test_make_disk_with_out_of_order_structures(self): + # Structures get partition numbers matching their appearance in the + # gadget.yaml, even if these are out of order with respect to their + # disk offsets. LP: #1642999 + with ExitStack() as resources: + workdir = resources.enter_context(TemporaryDirectory()) + unpackdir = resources.enter_context(TemporaryDirectory()) + # Fast forward a state machine to the method under test. + args = SimpleNamespace( + cloud_init=None, + output=None, + unpackdir=unpackdir, + workdir=workdir, + ) + # Jump right to the method under test. + state = resources.enter_context(XXXModelAssertionBuilder(args)) + state._next.pop() + state._next.append(state.make_disk) + # Set up expected state. + state.unpackdir = unpackdir + state.images = os.path.join(workdir, '.images') + state.disk_img = os.path.join(workdir, 'disk.img') + state.image_size = MiB(10) + os.makedirs(state.images) + # Craft a gadget schema. + part0 = SimpleNamespace( + name='alpha', + type='aa', + role=None, + size=MiB(1), + offset=MiB(2), + offset_write=100, + ) + part1 = SimpleNamespace( + name='beta', + type='bb', + role=None, + size=MiB(1), + offset=MiB(4), + offset_write=200, + ) + part2 = SimpleNamespace( + name='gamma', + type='mbr', + role=None, + size=MiB(1), + offset=0, + offset_write=None, + ) + volume = SimpleNamespace( + # gadget.yaml appearance order which does not match the disk + # offset order. LP: #1642999 + structures=[part1, part0, part2], + schema=VolumeSchema.gpt, + ) + state.gadget = SimpleNamespace( + volumes=dict(volume1=volume), + ) + # Set up images for the targeted test. These represent the image + # files that would have already been crafted to write to the disk + # in early (untested here) stages of the state machine. + part0_img = os.path.join(state.images, 'part0.img') + with open(part0_img, 'wb') as fp: + fp.write(b'\1' * 11) + part1_img = os.path.join(state.images, 'part1.img') + with open(part1_img, 'wb') as fp: + fp.write(b'\2' * 12) + part2_img = os.path.join(state.images, 'part2.img') + with open(part2_img, 'wb') as fp: + fp.write(b'\3' * 13) + state.root_img = os.path.join(state.images, 'root.img') + state.rootfs_size = MiB(1) + state.rootfs_offset = MiB(5) + with open(state.root_img, 'wb') as fp: + fp.write(b'\4' * 14) + state.boot_images = [part1_img, part0_img, part2_img] + # Create the disk. + next(state) + # Verify some parts of the disk.img's content. First, that we've + # written the part offsets at the right place.y + with open(state.disk_img, 'rb') as fp: + # Part 0's offset is written at position 100. + fp.seek(100) + # Read a 32-bit little-ending integer. Remember + # struct.unpack() always returns tuples, and the values are + # written in sector units, which are hard-coded as 512 bytes. + offset = unpack(':structure::filesystem')) - def test_content_spec_a(self): + def test_volume_structure_role(self): gadget_spec = parse("""\ volumes: first-image: - schema: gpt + schema: mbr bootloader: u-boot structure: - - type: 00000000-0000-0000-0000-0000deadbeef - size: 400M - filesystem: ext4 - content: - - source: subdir/ - target: / + - type: ef + size: 100 + role: mbr + second-image: + structure: + - type: 00000000-0000-0000-0000-0000feedface + size: 200 + role: system-boot + third-image: + structure: + - type: 00000000-0000-0000-0000-0000deafbead + size: 300 + role: system-data + fourth-image: + structure: + - type: 00000000-0000-0000-0000-0000deafbead + size: 400 """) - volume0 = gadget_spec.volumes['first-image'] - partition0 = volume0.structures[0] - self.assertEqual(len(partition0.content), 1) - content0 = partition0.content[0] - self.assertEqual(content0.source, 'subdir/') - self.assertEqual(content0.target, '/') + self.assertEqual(len(gadget_spec.volumes), 4) + self.assertEqual({ + 'first-image': StructureRole.mbr, + 'second-image': StructureRole.system_boot, + 'third-image': StructureRole.system_data, + 'fourth-image': None, + }, + {key: gadget_spec.volumes[key].structures[0].role + for key in gadget_spec.volumes} + ) - def test_content_spec_b(self): - gadget_spec = parse("""\ + def test_volume_structure_type_none(self): + gadget_spec = parse(""" volumes: first-image: - schema: gpt + schema: mbr bootloader: u-boot structure: - - type: 00000000-0000-0000-0000-0000deadbeef - size: 400M - filesystem: none - content: - - image: foo.img + - type: ef + size: 100 + role: mbr + second-image: + structure: + - type: bare + size: 200 """) - volume0 = gadget_spec.volumes['first-image'] - partition0 = volume0.structures[0] - self.assertEqual(len(partition0.content), 1) - content0 = partition0.content[0] - self.assertEqual(content0.image, 'foo.img') - self.assertIsNone(content0.offset) - self.assertIsNone(content0.offset_write) - self.assertIsNone(content0.size) + self.assertEqual(len(gadget_spec.volumes), 2) + self.assertEqual({ + 'first-image': 'EF', + 'second-image': 'bare', + }, + {key: gadget_spec.volumes[key].structures[0].type + for key in gadget_spec.volumes} + ) - def test_content_spec_b_offset(self): - gadget_spec = parse("""\ + def test_volume_structure_type_role_conflict_1(self): + # type:none means there's no partition, so you can't have a role of + # system-{boot,data}. + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse(""" volumes: first-image: - schema: gpt + schema: mbr bootloader: u-boot structure: - - type: 00000000-0000-0000-0000-0000deadbeef + - type: ef + size: 100 + role: mbr + second-image: + structure: + - type: bare + size: 200 + role: system-boot +""") + self.assertEqual( + str(cm.exception), + 'Invalid gadget.yaml: structure role/type conflict') + + def test_volume_structure_type_role_conflict_2(self): + # type:none means there's no partition, so you can't have a role of + # system-{boot,data}. + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse(""" +volumes: + first-image: + schema: mbr + bootloader: u-boot + structure: + - type: ef + size: 100 + role: mbr + second-image: + structure: + - type: bare + size: 200 + role: system-data +""") + self.assertEqual( + str(cm.exception), + 'Invalid gadget.yaml: structure role/type conflict') + + def test_volume_structure_type_role_redundant(self): + # type:none means there's no partition. It's valid, but redundant to + # also give a role:mbr. + gadget_spec = parse(""" +volumes: + first-image: + schema: mbr + bootloader: u-boot + structure: + - type: bare + size: 100 + role: mbr +""") + self.assertEqual(len(gadget_spec.volumes), 1) + volume = gadget_spec.volumes['first-image'] + self.assertEqual(len(volume.structures), 1) + structure = volume.structures[0] + self.assertEqual(structure.role, StructureRole.mbr) + self.assertEqual(structure.type, 'bare') + + def test_volume_structure_invalid_role(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 100 + role: foobar +""") + self.assertEqual( + str(cm.exception), + ("Invalid gadget.yaml value 'foobar' @ " + 'volumes::structure::role')) + + def test_volume_structure_mbr_role_sizes(self): + exception = 'mbr structures cannot be larger than 446 bytes.' + cases = { + '445': False, + '446': False, + '447': True, + '1M': True, + } + for size, raises in cases.items(): + with ExitStack() as resources: + if raises: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: mbr + bootloader: u-boot + structure: + - type: ef + size: {} + role: mbr +""".format(size)) + if raises: + self.assertEqual( + str(cm.exception), + exception) + + def test_volume_structure_mbr_conflicting_id(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: mbr + bootloader: u-boot + structure: + - type: ef + role: mbr + size: 100 + id: 00000000-0000-0000-0000-0000deadbeef +""") + self.assertEqual( + str(cm.exception), + 'mbr structures must not specify partition id') + + def test_volume_structure_mbr_conflicting_filesystem(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: mbr + bootloader: u-boot + structure: + - type: ef + role: mbr + size: 100 + filesystem: ext4 +""") + self.assertEqual( + str(cm.exception), + 'mbr structures must not specify a file system') + + def test_volume_special_type_mbr(self): + gadget_spec = parse("""\ +volumes: + first-image: + schema: mbr + bootloader: u-boot + structure: + - type: mbr + size: 100 +""") + volume0 = gadget_spec.volumes['first-image'] + partition0 = volume0.structures[0] + self.assertEqual(partition0.type, 'mbr') + self.assertEqual(partition0.role, StructureRole.mbr) + self.assertEqual(partition0.filesystem, FileSystemType.none) + + def test_volume_special_type_mbr_and_role(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: mbr + bootloader: u-boot + structure: + - type: mbr + role: mbr + size: 100 +""") + self.assertEqual( + str(cm.exception), + 'Type mbr and role fields assigned at the same time, please use ' + 'the mbr role instead') + + def test_volume_special_type_mbr_and_filesystem(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: mbr + bootloader: u-boot + structure: + - type: mbr + size: 100 + filesystem: ext4 +""") + self.assertEqual( + str(cm.exception), + 'mbr structures must not specify a file system') + + def test_content_spec_a(self): + gadget_spec = parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef size: 400M + filesystem: ext4 content: - - image: foo.img - offset: 2112 + - source: subdir/ + target: / """) volume0 = gadget_spec.volumes['first-image'] partition0 = volume0.structures[0] self.assertEqual(len(partition0.content), 1) content0 = partition0.content[0] - self.assertEqual(content0.image, 'foo.img') - self.assertEqual(content0.offset, 2112) - self.assertIsNone(content0.offset_write) - self.assertIsNone(content0.size) + self.assertEqual(content0.source, 'subdir/') + self.assertEqual(content0.target, '/') - def test_content_spec_b_offset_suffix(self): + def test_content_spec_b(self): gadget_spec = parse("""\ volumes: first-image: @@ -751,20 +811,20 @@ structure: - type: 00000000-0000-0000-0000-0000deadbeef size: 400M + filesystem: none content: - image: foo.img - offset: 1M """) volume0 = gadget_spec.volumes['first-image'] partition0 = volume0.structures[0] self.assertEqual(len(partition0.content), 1) content0 = partition0.content[0] self.assertEqual(content0.image, 'foo.img') - self.assertEqual(content0.offset, MiB(1)) + self.assertIsNone(content0.offset) self.assertIsNone(content0.offset_write) self.assertIsNone(content0.size) - def test_content_spec_b_offset_write(self): + def test_content_spec_b_offset(self): gadget_spec = parse("""\ volumes: first-image: @@ -775,18 +835,18 @@ size: 400M content: - image: foo.img - offset-write: 2112 + offset: 2112 """) volume0 = gadget_spec.volumes['first-image'] partition0 = volume0.structures[0] self.assertEqual(len(partition0.content), 1) content0 = partition0.content[0] self.assertEqual(content0.image, 'foo.img') - self.assertIsNone(content0.offset) - self.assertEqual(content0.offset_write, 2112) + self.assertEqual(content0.offset, 2112) + self.assertIsNone(content0.offset_write) self.assertIsNone(content0.size) - def test_content_spec_b_offset_write_suffix(self): + def test_content_spec_b_offset_suffix(self): gadget_spec = parse("""\ volumes: first-image: @@ -797,18 +857,18 @@ size: 400M content: - image: foo.img - offset-write: 1M + offset: 1M """) volume0 = gadget_spec.volumes['first-image'] partition0 = volume0.structures[0] self.assertEqual(len(partition0.content), 1) content0 = partition0.content[0] self.assertEqual(content0.image, 'foo.img') - self.assertIsNone(content0.offset) - self.assertEqual(content0.offset_write, MiB(1)) + self.assertEqual(content0.offset, MiB(1)) + self.assertIsNone(content0.offset_write) self.assertIsNone(content0.size) - def test_content_spec_b_offset_write_label(self): + def test_content_spec_b_offset_write(self): gadget_spec = parse("""\ volumes: first-image: @@ -819,7 +879,7 @@ size: 400M content: - image: foo.img - offset-write: label+2112 + offset-write: 2112 """) volume0 = gadget_spec.volumes['first-image'] partition0 = volume0.structures[0] @@ -827,11 +887,11 @@ content0 = partition0.content[0] self.assertEqual(content0.image, 'foo.img') self.assertIsNone(content0.offset) - self.assertEqual(content0.offset_write, ('label', 2112)) + self.assertEqual(content0.offset_write, 2112) self.assertIsNone(content0.size) - def test_content_spec_b_offset_write_larger_than_32bit(self): - self.assertRaises(ValueError, parse, """\ + def test_content_spec_b_offset_write_suffix(self): + gadget_spec = parse("""\ volumes: first-image: schema: gpt @@ -841,12 +901,19 @@ size: 400M content: - image: foo.img - offset-write: 8G + offset-write: 1M """) + volume0 = gadget_spec.volumes['first-image'] + partition0 = volume0.structures[0] + self.assertEqual(len(partition0.content), 1) + content0 = partition0.content[0] + self.assertEqual(content0.image, 'foo.img') + self.assertIsNone(content0.offset) + self.assertEqual(content0.offset_write, MiB(1)) + self.assertIsNone(content0.size) - def test_content_spec_b_offset_write_is_4G(self): - # 4GiB is just outside 32 bits. - self.assertRaises(ValueError, parse, """\ + def test_content_spec_b_offset_write_label(self): + gadget_spec = parse("""\ volumes: first-image: schema: gpt @@ -856,8 +923,16 @@ size: 400M content: - image: foo.img - offset-write: 4G + offset-write: label+2112 """) + volume0 = gadget_spec.volumes['first-image'] + partition0 = volume0.structures[0] + self.assertEqual(len(partition0.content), 1) + content0 = partition0.content[0] + self.assertEqual(content0.image, 'foo.img') + self.assertIsNone(content0.offset) + self.assertEqual(content0.offset_write, ('label', 2112)) + self.assertIsNone(content0.size) def test_content_spec_b_offset_write_is_just_under_4G(self): gadget_spec = parse("""\ @@ -921,51 +996,6 @@ self.assertIsNone(content0.offset_write) self.assertEqual(content0.size, MiB(1)) - def test_wrong_content_1(self): - self.assertRaises(ValueError, parse, """\ -volumes: - first-image: - schema: gpt - bootloader: u-boot - structure: - - type: 00000000-0000-0000-0000-0000deadbeef - size: 400M - filesystem: none - content: - - source: subdir/ - target: / -""") - - def test_wrong_content_2(self): - self.assertRaises(ValueError, parse, """\ -volumes: - first-image: - schema: gpt - bootloader: u-boot - structure: - - type: 00000000-0000-0000-0000-0000deadbeef - size: 400M - filesystem: ext4 - content: - - image: foo.img -""") - - def test_content_conflict(self): - self.assertRaises(ValueError, parse, """\ -volumes: - first-image: - schema: gpt - bootloader: u-boot - structure: - - type: 00000000-0000-0000-0000-0000deadbeef - size: 400M - filesystem: ext4 - content: - - source: subdir/ - target: / - - image: foo.img -""") - def test_content_a_multiple(self): gadget_spec = parse("""\ volumes: @@ -1020,26 +1050,6 @@ self.assertIsNone(content1.offset_write) self.assertIsNone(content1.size) - def test_multiple_volumes_no_bootloader(self): - self.assertRaises(ValueError, parse, """\ -volumes: - first-image: - schema: gpt - structure: - - type: ef - size: 400M - second-image: - schema: gpt - structure: - - type: a0 - size: 400M - third-image: - schema: gpt - structure: - - type: b1 - size: 400M -""") - def test_multiple_volumes_with_bootloader(self): gadget_spec = parse("""\ volumes: @@ -1101,45 +1111,206 @@ 'some-value') self.assertEqual( gadget_spec.defaults['mfq0tsAY']['other-key'], - 42) - - -class TestPartOrder(TestCase): - # LP: #1631423 - maxDiff = None + '42') - def test_implicit_offset_ordering(self): - # None of the structures have an offset. They get ordered in yaml - # appearance order with offsets calculated to be the end of the - # previous structure. + def test_parser_format_version(self): gadget_spec = parse("""\ +format: 0 volumes: - first: - schema: gpt - bootloader: grub + first-image: + bootloader: u-boot structure: - - type: 00000000-0000-0000-0000-dd00deadbeef + - type: 00000000-0000-0000-0000-0000deadbeef size: 400M - - type: 00000000-0000-0000-0000-cc00deadbeef - size: 500M - - type: 00000000-0000-0000-0000-bb00deadbeef - size: 100M - - type: 00000000-0000-0000-0000-aa00deadbeef - size: 100M """) - parts = gadget_spec.volumes['first'].structures - self.assertEqual( - [(part.type, part.offset) for part in parts], [ - (UUID('00000000-0000-0000-0000-dd00deadbeef'), MiB(1)), - (UUID('00000000-0000-0000-0000-cc00deadbeef'), MiB(401)), - (UUID('00000000-0000-0000-0000-bb00deadbeef'), MiB(901)), - (UUID('00000000-0000-0000-0000-aa00deadbeef'), MiB(1001)), - ]) + self.assertEqual(gadget_spec.format, 0) - def test_explicit_offset_ordering(self): - # All of the structures have an offset, so they get ordered that way. + def test_parser_format_version_legacy(self): gadget_spec = parse("""\ volumes: + first-image: + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M +""") + self.assertIsNone(gadget_spec.format) + + +class TestParserErrors(TestCase): + # Test corner cases, as well as YAML, schema, and specification violations. + + def test_not_yaml(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""foo: bar: baz""") + self.assertEqual(str(cm.exception), + 'gadget.yaml file is not valid YAML') + + def test_bad_schema(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: bad + bootloader: u-boot + structure: + - type: ef + size: 400M +""") + self.assertEqual( + str(cm.exception), + "Invalid gadget.yaml value 'bad' @ volumes::schema") + + def test_implicit_gpt_with_two_digit_type(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + bootloader: u-boot + structure: + - type: ef + size: 400M +""") + self.assertEqual(str(cm.exception), + 'GUID structure type with non-GPT schema') + + def test_explicit_gpt_with_two_digit_type(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boot + structure: + - type: ef + size: 400M +""") + self.assertEqual(str(cm.exception), + 'GUID structure type with non-GPT schema') + + def test_mbr_with_guid_type(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: mbr + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M +""") + self.assertEqual(str(cm.exception), + 'MBR structure type with non-MBR schema') + + def test_mbr_with_bogus_type(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: mbr + bootloader: u-boot + structure: + - type: 801 + size: 400M +""") + self.assertEqual( + str(cm.exception), + 'Invalid gadget.yaml @ volumes:first-image:structure:0:type') + + def test_bad_bootloader(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boat + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M +""") + self.assertEqual( + str(cm.exception), + ("Invalid gadget.yaml value 'u-boat' @ " + 'volumes::bootloader')) + + def test_missing_bootloader(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M +""") + self.assertEqual(str(cm.exception), 'No bootloader structure named') + + def test_missing_bootloader_multiple_volumes(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M + second-image: + schema: gpt + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M + third-image: + schema: gpt + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M +""") + self.assertEqual(str(cm.exception), 'No bootloader structure named') + + def test_no_nuthin(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse('') + self.assertEqual(str(cm.exception), 'Empty gadget.yaml') + + def test_no_volumes(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +device-tree-origin: kernel +device-tree: dtree +""") + self.assertEqual(str(cm.exception), 'Invalid gadget.yaml @ volumes') + + def test_mixed_offset_conflict(self): + # Most of the structures have an offset, but one doesn't. The + # bb00deadbeef part is implicitly offset at 700MiB which because it is + # 200MiB in size, conflicts with the offset @ 800M of dd00deadbeef. + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: first: schema: gpt bootloader: grub @@ -1151,24 +1322,534 @@ size: 500M offset: 200M - type: 00000000-0000-0000-0000-bb00deadbeef + size: 200M + - type: 00000000-0000-0000-0000-aa00deadbeef + size: 100M + offset: 1M +""") + self.assertEqual( + str(cm.exception), + ('Structure conflict! 00000000-0000-0000-0000-dd00deadbeef: ' + '838860800 < 943718400')) + + def test_explicit_offset_conflict(self): + # All of the structures have an offset, but they conflict. + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first: + schema: gpt + bootloader: grub + structure: + - type: 00000000-0000-0000-0000-dd00deadbeef + size: 400M + offset: 800M + name: dd + - type: 00000000-0000-0000-0000-cc00deadbeef + size: 500M + offset: 350M + name: cc + - type: 00000000-0000-0000-0000-bb00deadbeef size: 100M offset: 1200M + name: bb - type: 00000000-0000-0000-0000-aa00deadbeef size: 100M offset: 1M + name: aa +""") + self.assertEqual(str(cm.exception), + 'Structure conflict! dd: 838860800 < 891289600') + + def test_bad_volume_id(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boot + id: 3g + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M +""") + self.assertEqual(str(cm.exception), + 'Invalid gadget.yaml @ volumes:first-image:id') + + def test_bad_integer_volume_id(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boot + id: 801 + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M +""") + self.assertEqual(str(cm.exception), + 'Invalid gadget.yaml @ volumes:first-image:id') + + def test_disallow_hybrid_volume_id(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M + id: 80,00000000-0000-0000-0000-0000deadbeef """) - parts = gadget_spec.volumes['first'].structures self.assertEqual( - [(part.type, part.offset) for part in parts], [ - (UUID('00000000-0000-0000-0000-aa00deadbeef'), MiB(1)), - (UUID('00000000-0000-0000-0000-cc00deadbeef'), MiB(200)), - (UUID('00000000-0000-0000-0000-dd00deadbeef'), MiB(800)), - (UUID('00000000-0000-0000-0000-bb00deadbeef'), MiB(1200)), - ]) + str(cm.exception), + 'Invalid gadget.yaml @ volumes:first-image:structure:0:id') - def test_mixed_offset_ordering(self): - # Most of the structures have an offset, but one doesn't. The - # bb00deadbeef part is implicitly offset at 700MiB. + def test_volume_id_not_guid(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M + id: ef +""") + self.assertEqual( + str(cm.exception), + 'Invalid gadget.yaml @ volumes:first-image:structure:0:id') + + def test_no_structure(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + bootloader: u-boot +""") + self.assertEqual( + str(cm.exception), + 'Invalid gadget.yaml @ volumes:first-image:structure') + + def test_volume_offset_write_relative_syntax_error(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M + offset-write: some_label%2112 +""") + self.assertEqual( + str(cm.exception), + ('Invalid gadget.yaml @ ' + 'volumes:first-image:structure:0:offset-write')) + + def test_volume_offset_write_larger_than_32bit(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M + offset-write: 8G +""") + self.assertEqual( + str(cm.exception), + ('Invalid gadget.yaml @ ' + 'volumes:first-image:structure:0:offset-write')) + + def test_volume_offset_write_is_4G(self): + # 4GiB is just outside 32 bits. + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M + offset-write: 4G +""") + self.assertEqual( + str(cm.exception), + ('Invalid gadget.yaml @ ' + 'volumes:first-image:structure:0:offset-write')) + + def test_no_size(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef +""") + self.assertEqual( + str(cm.exception), + 'Invalid gadget.yaml @ volumes:first-image:structure:0:size') + + def test_bad_hybrid_volume_type_1(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef,80 + size: 400M +""") + self.assertEqual( + str(cm.exception), + 'Invalid gadget.yaml @ volumes:first-image:structure:0:type') + + def test_bad_hybrid_volume_type_2(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef,\ +00000000-0000-0000-0000-0000deadbeef + size: 400M +""") + self.assertEqual( + str(cm.exception), + 'Invalid gadget.yaml @ volumes:first-image:structure:0:type') + + def test_bad_hybrid_volume_type_3(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + bootloader: u-boot + structure: + - type: 80,ab + size: 400M +""") + self.assertEqual( + str(cm.exception), + 'Invalid gadget.yaml @ volumes:first-image:structure:0:type') + + def test_bad_hybrid_volume_type_4(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + bootloader: u-boot + structure: + - type: 80, + size: 400M +""") + self.assertEqual( + str(cm.exception), + 'Invalid gadget.yaml @ volumes:first-image:structure:0:type') + + def test_bad_hybrid_volume_type_5(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + bootloader: u-boot + structure: + - type: ,00000000-0000-0000-0000-0000deadbeef + size: 400M +""") + self.assertEqual(str(cm.exception), + 'gadget.yaml file is not valid YAML') + + def test_volume_filesystem_bad(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M + filesystem: zfs +""") + self.assertEqual( + str(cm.exception), + ("Invalid gadget.yaml value 'zfs' @ " + 'volumes::structure::filesystem')) + + def test_content_spec_b_offset_write_larger_than_32bit(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M + content: + - image: foo.img + offset-write: 8G +""") + # XXX https://github.com/alecthomas/voluptuous/issues/239 + front, colon, end = str(cm.exception).rpartition(':') + self.assertEqual( + front, + 'Invalid gadget.yaml @ volumes:first-image:structure:0:content:0') + self.assertIn(end, ['offset-write', 'image']) + + def test_content_spec_b_offset_write_is_4G(self): + # 4GiB is just outside 32 bits. + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M + content: + - image: foo.img + offset-write: 4G +""") + # XXX https://github.com/alecthomas/voluptuous/issues/239 + front, colon, end = str(cm.exception).rpartition(':') + self.assertEqual( + front, + 'Invalid gadget.yaml @ volumes:first-image:structure:0:content:0') + self.assertIn(end, ['offset-write', 'image']) + + def test_wrong_content_1(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M + filesystem: none + content: + - source: subdir/ + target: / +""") + self.assertEqual(str(cm.exception), + 'filesystem: none missing image file name') + + def test_wrong_content_2(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M + filesystem: ext4 + content: + - image: foo.img +""") + self.assertEqual(str(cm.exception), + 'filesystem: vfat|ext4 missing source/target') + + def test_content_conflict_1(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M + filesystem: ext4 + content: + - source: subdir/ + target: / + - image: foo.img +""") + # https://github.com/alecthomas/voluptuous/issues/239 + # + # XXX Because of the above bug, we get a sort of confusing error + # message. The voluptuous constraint engine will throw an exception + # with a less than helpful message, but it's the best we can do. + self.assertEqual( + str(cm.exception), + ('Invalid gadget.yaml @ ' + 'volumes:first-image:structure:0:content:1:image')) + + def test_content_conflict_2(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + schema: gpt + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M + filesystem: ext4 + content: + - image: foo.img + - source: subdir/ + target: / +""") + # https://github.com/alecthomas/voluptuous/issues/239 + # + # XXX Because of the above bug, we get a sort of confusing error + # message. The voluptuous constraint engine will throw an exception + # with a less than helpful message, but it's the best we can do. + self.assertEqual( + str(cm.exception), + ('Invalid gadget.yaml @ ' + 'volumes:first-image:structure:0:content:0:image')) + + def test_parser_format_version_error(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +format: 1 +volumes: + first-image: + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M +""") + self.assertEqual(str(cm.exception), + 'Unsupported gadget.yaml format version: 1') + + def test_parser_format_version_negative(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +format: -1 +volumes: + first-image: + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M +""") + self.assertEqual(str(cm.exception), + 'Unsupported gadget.yaml format version: -1') + + def test_parser_format_version_bogus(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +format: bogus +volumes: + first-image: + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 400M +""") + self.assertEqual(str(cm.exception), + 'Unsupported gadget.yaml format version: bogus') + + def test_mbr_structure_not_at_offset_zero_explicit(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + bootloader: u-boot + structure: + - role: mbr + type: 00000000-0000-0000-0000-0000deadbeef + size: 446 + offset: 10 +""") + self.assertEqual(str(cm.exception), + 'mbr structure must start at offset 0') + + def test_mbr_structure_not_at_offset_zero_implicit(self): + with ExitStack() as resources: + cm = resources.enter_context( + self.assertRaises(GadgetSpecificationError)) + parse("""\ +volumes: + first-image: + bootloader: u-boot + structure: + - type: 00000000-0000-0000-0000-0000deadbeef + size: 2M + - role: mbr + type: 00000000-0000-0000-0000-0000deadbeef + size: 446 +""") + self.assertEqual(str(cm.exception), + 'mbr structure must start at offset 0') + + +class TestPartOrder(TestCase): + # LP: #1631423 + maxDiff = None + + def test_implicit_offset_ordering(self): + # None of the structures have an offset. They get ordered in yaml + # appearance order with offsets calculated to be the end of the + # previous structure. gadget_spec = parse("""\ volumes: first: @@ -1177,30 +1858,26 @@ structure: - type: 00000000-0000-0000-0000-dd00deadbeef size: 400M - offset: 800M - type: 00000000-0000-0000-0000-cc00deadbeef size: 500M - offset: 200M - type: 00000000-0000-0000-0000-bb00deadbeef size: 100M - type: 00000000-0000-0000-0000-aa00deadbeef size: 100M - offset: 1M """) parts = gadget_spec.volumes['first'].structures self.assertEqual( [(part.type, part.offset) for part in parts], [ - (UUID('00000000-0000-0000-0000-aa00deadbeef'), MiB(1)), - (UUID('00000000-0000-0000-0000-cc00deadbeef'), MiB(200)), - (UUID('00000000-0000-0000-0000-bb00deadbeef'), MiB(700)), - (UUID('00000000-0000-0000-0000-dd00deadbeef'), MiB(800)), + (UUID('00000000-0000-0000-0000-dd00deadbeef'), MiB(1)), + (UUID('00000000-0000-0000-0000-cc00deadbeef'), MiB(401)), + (UUID('00000000-0000-0000-0000-bb00deadbeef'), MiB(901)), + (UUID('00000000-0000-0000-0000-aa00deadbeef'), MiB(1001)), ]) - def test_mixed_offset_conflict(self): - # Most of the structures have an offset, but one doesn't. The - # bb00deadbeef part is implicitly offset at 700MiB which because it is - # 200MiB in size, conflicts with the offset @ 800M of dd00deadbeef. - self.assertRaises(ValueError, parse, """\ + def test_explicit_offset_ordering(self): + # All of the structures have an offset, specified in an order that + # does not match their partitioning order. + gadget_spec = parse("""\ volumes: first: schema: gpt @@ -1213,15 +1890,25 @@ size: 500M offset: 200M - type: 00000000-0000-0000-0000-bb00deadbeef - size: 200M + size: 100M + offset: 1200M - type: 00000000-0000-0000-0000-aa00deadbeef size: 100M offset: 1M """) + parts = gadget_spec.volumes['first'].structures + self.assertEqual( + [(part.type, part.offset) for part in parts], [ + (UUID('00000000-0000-0000-0000-dd00deadbeef'), MiB(800)), + (UUID('00000000-0000-0000-0000-cc00deadbeef'), MiB(200)), + (UUID('00000000-0000-0000-0000-bb00deadbeef'), MiB(1200)), + (UUID('00000000-0000-0000-0000-aa00deadbeef'), MiB(1)), + ]) - def test_explicit_offset_conflict(self): - # All of the structures have an offset, but they conflict. - self.assertRaises(ValueError, parse, """\ + def test_mixed_offset_ordering(self): + # Most of the structures have an offset, but one doesn't. The + # bb00deadbeef part is implicitly offset at 700MiB. + gadget_spec = parse("""\ volumes: first: schema: gpt @@ -1232,11 +1919,18 @@ offset: 800M - type: 00000000-0000-0000-0000-cc00deadbeef size: 500M - offset: 350M + offset: 200M - type: 00000000-0000-0000-0000-bb00deadbeef size: 100M - offset: 1200M - type: 00000000-0000-0000-0000-aa00deadbeef size: 100M offset: 1M """) + parts = gadget_spec.volumes['first'].structures + self.assertEqual( + [(part.type, part.offset) for part in parts], [ + (UUID('00000000-0000-0000-0000-dd00deadbeef'), MiB(800)), + (UUID('00000000-0000-0000-0000-cc00deadbeef'), MiB(200)), + (UUID('00000000-0000-0000-0000-bb00deadbeef'), MiB(700)), + (UUID('00000000-0000-0000-0000-aa00deadbeef'), MiB(1)), + ]) diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/tests/test_unstable.py ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/tests/test_unstable.py --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/tests/test_unstable.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/tests/test_unstable.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,896 +0,0 @@ -"""Test definitions for ubuntu_image.unstable module.""" - -from io import StringIO -from ubuntu_image._unstable import ( - FileTextSource, Origin, OriginMode, RFC822Record, RFC822SyntaxError, - UnknownTextSource, load_rfc822_records, normalize_rfc822_value) -from unittest import TestCase - - -class NormalizationTests(TestCase): - """Tests for normalize_rfc822_value()""" - - def test_smoke(self): - n = normalize_rfc822_value - self.assertEqual(n('foo'), 'foo') - self.assertEqual(n(' foo'), 'foo') - self.assertEqual(n('foo '), 'foo') - self.assertEqual(n(' foo '), 'foo') - self.assertEqual(n(' foo\n' - ' bar\n'), - ('foo\n' - 'bar')) - - def test_dot_handling(self): - n = normalize_rfc822_value - # single leading dot is stripped - self.assertEqual(n('foo\n' - '.\n' - 'bar\n'), - ('foo\n' - '\n' - 'bar')) - # the dot is stripped even if whitespace is present - self.assertEqual(n(' foo\n' - ' .\n' - ' bar\n'), - ('foo\n' - '\n' - 'bar')) - # Two dots don't invoke the special behaviour though - self.assertEqual(n(' foo\n' - ' ..\n' - ' bar\n'), - ('foo\n' - '..\n' - 'bar')) - # Regardless of whitespace - self.assertEqual(n('foo\n' - '..\n' - 'bar\n'), - ('foo\n' - '..\n' - 'bar')) - - -class TestRFC822Record(TestCase): - def setUp(self): - self.raw_data = dict(key=' value') - self.data = dict(key='value') - self.origin = Origin(FileTextSource('file.txt'), 1, 1) - self.record = RFC822Record(self.data, self.origin, self.raw_data) - - def test_repr(self): - self.assertEqual( - repr(self.record), - ">") - - def test_eq_something_else(self): - # Use assertFalse() so we can actually trigger the __eq__() method - # path we're testing. - self.assertFalse(self.record == 7) - - def test_ne_something_else(self): - # Use assertTrue() so we can actually trigger the __ne__() method path - # we're testing. - self.assertTrue(self.record != 7) - - def test_raw_data(self): - self.assertEqual(self.record.raw_data, self.raw_data) - - def test_data(self): - self.assertEqual(self.record.data, self.data) - - def test_origin(self): - self.assertEqual(self.record.origin, self.origin) - - def test_equality(self): - # Equality is compared by normalized data, the raw data doesn't count. - other_raw_data = dict(key='value ') - # This other raw data is actually different to the one we're going to - # test against. - self.assertNotEqual(other_raw_data, self.raw_data) - # Let's make another record with different raw data. - other_record = RFC822Record(self.data, self.origin, other_raw_data) - # The normalized data is identical. - self.assertEqual(other_record.data, self.record.data) - # The raw data is not. - self.assertNotEqual(other_record.raw_data, self.record.raw_data) - # The origin is the same (just a sanity check). - self.assertEqual(other_record.origin, self.record.origin) - # Let's look at the whole object, they should be equal. Use - # assertTrue() and assertFalse() to definitively test the method - # paths. - self.assertTrue(other_record == self.record) - self.assertFalse(other_record != self.record) - - -class RFC822ParserTests(TestCase): - loader = load_rfc822_records - - def test_empty(self): - with StringIO('') as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 0) - - def test_parsing_strings_preserves_newlines(self): - # Ensure that the special behavior, when a string is passed instead of - # a stream, is parsed the same way as regular streams are, that is, - # that newlines are preserved. - text = ('key:\n' - ' line1\n' - ' line2\n') - records_str = type(self).loader(text) - with StringIO(text) as stream: - records_stream = type(self).loader(stream) - self.assertEqual(records_str, records_stream) - - def test_preserves_whitespace1(self): - with StringIO('key: value ') as stream: - records = type(self).loader(stream) - self.assertEqual(records[0].data, {'key': 'value'}) - self.assertEqual(records[0].raw_data, {'key': 'value '}) - - def test_preserves_whitespace2(self): - with StringIO('key:\n value ') as stream: - records = type(self).loader(stream) - self.assertEqual(records[0].data, {'key': 'value'}) - self.assertEqual(records[0].raw_data, {'key': 'value '}) - - def test_strips_newlines1(self): - with StringIO('key: value \n') as stream: - records = type(self).loader(stream) - self.assertEqual(records[0].data, {'key': 'value'}) - self.assertEqual(records[0].raw_data, {'key': 'value \n'}) - - def test_strips_newlines2(self): - with StringIO('key:\n value \n') as stream: - records = type(self).loader(stream) - self.assertEqual(records[0].data, {'key': 'value'}) - self.assertEqual(records[0].raw_data, {'key': 'value \n'}) - - def test_single_record(self): - with StringIO('key:value') as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].data, {'key': 'value'}) - self.assertEqual(records[0].raw_data, {'key': 'value'}) - - def test_single_record_with_source(self): - with StringIO('key: value') as stream: - records = type(self).loader(stream, source='file.txt') - self.assertEqual(len(records), 1) - self.assertEqual(records[0].origin.source, 'file.txt') - - def test_comments(self): - # Ensure that comments are stripped and don't break multi-line - # handling. - text = ( - '# this is a comment\n' - 'key:\n' - ' multi-line value\n' - '# this is a comment\n' - ) - with StringIO(text) as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].data, {'key': 'multi-line value'}) - self.assertEqual(records[0].raw_data, {'key': 'multi-line value\n'}) - - def test_dot_escape(self): - # Ensure that the dot is not processed in any way.. - - # This part of the code is now handled by another layer. - text = ( - 'key: something\n' - ' .\n' - ' .this\n' - ' ..should\n' - ' ...work\n' - ) - expected_value = ( - 'something\n' - '\n' - '.this\n' - '..should\n' - '...work' - ) - expected_raw_value = ( - 'something\n' - '.\n' - '.this\n' - '..should\n' - '...work\n' - ) - with StringIO(text) as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].data, {'key': expected_value}) - self.assertEqual(records[0].raw_data, {'key': expected_raw_value}) - - def test_many_newlines(self): - text = ( - '\n' - '\n' - 'key1:value1\n' - '\n' - '\n' - '\n' - 'key2:value2\n' - '\n' - '\n' - 'key3:value3\n' - '\n' - '\n' - ) - with StringIO(text) as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 3) - self.assertEqual(records[0].data, {'key1': 'value1'}) - self.assertEqual(records[1].data, {'key2': 'value2'}) - self.assertEqual(records[2].data, {'key3': 'value3'}) - self.assertEqual(records[0].raw_data, {'key1': 'value1\n'}) - self.assertEqual(records[1].raw_data, {'key2': 'value2\n'}) - self.assertEqual(records[2].raw_data, {'key3': 'value3\n'}) - - def test_many_records(self): - text = ( - 'key1:value1\n' - '\n' - 'key2:value2\n' - '\n' - 'key3:value3\n' - ) - with StringIO(text) as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 3) - self.assertEqual(records[0].data, {'key1': 'value1'}) - self.assertEqual(records[1].data, {'key2': 'value2'}) - self.assertEqual(records[2].data, {'key3': 'value3'}) - self.assertEqual(records[0].raw_data, {'key1': 'value1\n'}) - self.assertEqual(records[1].raw_data, {'key2': 'value2\n'}) - self.assertEqual(records[2].raw_data, {'key3': 'value3\n'}) - - def test_multiline_value(self): - text = ( - 'key:\n' - ' longer\n' - ' value\n' - ) - expected_value = ( - 'longer\n' - 'value' - ) - expected_raw_value = ( - 'longer\n' - 'value\n' - ) - with StringIO(text) as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].data, {'key': expected_value}) - self.assertEqual(records[0].raw_data, {'key': expected_raw_value}) - - def test_multiline_value_with_space(self): - text = ( - 'key:\n' - ' longer\n' - ' .\n' - ' value\n' - ) - expected_value = ( - 'longer\n' - '\n' - 'value' - ) - expected_raw_value = ( - 'longer\n' - '.\n' - 'value\n' - ) - with StringIO(text) as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].data, {'key': expected_value}) - self.assertEqual(records[0].raw_data, {'key': expected_raw_value}) - - def test_multiline_value_with_space__deep_indent(self): - # Ensure that equally indented spaces are removed, even if multiple - # spaces are used (more than one that is typically removed). The raw - # value should have just the one space removed. - text = ( - 'key:\n' - ' longer\n' - ' .\n' - ' value\n' - ) - expected_value = ( - 'longer\n' - '\n' - 'value' - ) - # HINT: exactly as the original above but one space shorter. - expected_raw_value = ( - ' longer\n' - ' .\n' - ' value\n' - ) - with StringIO(text) as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].data, {'key': expected_value}) - self.assertEqual(records[0].raw_data, {'key': expected_raw_value}) - - def test_multiline_value_with_period(self): - # Ensure that the dot is not processed in any way. - # - # This part of the code is now handled by another layer. - text = ( - 'key:\n' - ' longer\n' - ' ..\n' - ' value\n' - ) - expected_value = ( - 'longer\n' - '..\n' - 'value' - ) - expected_raw_value = ( - 'longer\n' - '..\n' - 'value\n' - ) - with StringIO(text) as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].data, {'key': expected_value}) - self.assertEqual(records[0].raw_data, {'key': expected_raw_value}) - - def test_many_multiline_values(self): - text = ( - 'key1:initial\n' - ' longer\n' - ' value 1\n' - '\n' - 'key2:\n' - ' longer\n' - ' value 2\n' - ) - expected_value1 = ( - 'initial\n' - 'longer\n' - 'value 1' - ) - expected_value2 = ( - 'longer\n' - 'value 2' - ) - expected_raw_value1 = ( - 'initial\n' - 'longer\n' - 'value 1\n' - ) - expected_raw_value2 = ( - 'longer\n' - 'value 2\n' - ) - with StringIO(text) as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 2) - self.assertEqual(records[0].data, {'key1': expected_value1}) - self.assertEqual(records[1].data, {'key2': expected_value2}) - self.assertEqual(records[0].raw_data, {'key1': expected_raw_value1}) - self.assertEqual(records[1].raw_data, {'key2': expected_raw_value2}) - - def test_proper_parsing_nested_multiline(self): - text = ( - 'key:\n' - ' nested: stuff\n' - ' even:\n' - ' more\n' - ' text\n' - ) - expected_value = ( - 'nested: stuff\n' - 'even:\n' - ' more\n' - ' text' - ) - expected_raw_value = ( - 'nested: stuff\n' - 'even:\n' - ' more\n' - ' text\n' - ) - with StringIO(text) as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].data, {'key': expected_value}) - self.assertEqual(records[0].raw_data, {'key': expected_raw_value}) - - def test_proper_parsing_nested_multiline__deep_indent(self): - text = ( - 'key:\n' - ' nested: stuff\n' - ' even:\n' - ' more\n' - ' text\n' - ) - expected_value = ( - 'nested: stuff\n' - 'even:\n' - ' more\n' - ' text' - ) - # HINT: exactly as the original above but one space shorter. - expected_raw_value = ( - ' nested: stuff\n' - ' even:\n' - ' more\n' - ' text\n' - ) - with StringIO(text) as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].data, {'key': expected_value}) - self.assertEqual(records[0].raw_data, {'key': expected_raw_value}) - - def test_irrelevant_whitespace(self): - text = 'key : value ' - with StringIO(text) as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].data, {'key': 'value'}) - self.assertEqual(records[0].raw_data, {'key': 'value '}) - - def test_relevant_whitespace(self): - text = ( - 'key:\n' - ' value\n' - ) - with StringIO(text) as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].data, {'key': 'value'}) - self.assertEqual(records[0].raw_data, {'key': 'value\n'}) - - def test_bad_multiline(self): - text = ' extra value' - with StringIO(text) as stream: - with self.assertRaises(RFC822SyntaxError) as call: - type(self).loader(stream) - self.assertEqual(call.exception.msg, 'Unexpected multi-line value') - - def test_garbage(self): - text = 'garbage' - with StringIO(text) as stream: - with self.assertRaises(RFC822SyntaxError) as call: - type(self).loader(stream) - self.assertEqual( - call.exception.msg, - "Unexpected non-empty line: 'garbage'") - - def test_syntax_error(self): - text = 'key1 = value1' - with StringIO(text) as stream: - with self.assertRaises(RFC822SyntaxError) as call: - type(self).loader(stream) - self.assertEqual( - call.exception.msg, - "Unexpected non-empty line: 'key1 = value1'") - - def test_duplicate_error(self): - text = ( - "key1: value1\n" - "key1: value2\n" - ) - with StringIO(text) as stream: - with self.assertRaises(RFC822SyntaxError) as call: - type(self).loader(stream) - self.assertEqual(call.exception.msg, ( - "Job has a duplicate key 'key1' with old value 'value1\\n'" - " and new value 'value2\\n'")) - - def test_origin_from_stream_is_Unknown(self): - # Verify that gen_rfc822_records() uses origin instances with source - # equal to UnknownTextSource, when no explicit source is provided and - # the stream has no name to infer a FileTextSource() from. - expected_origin = Origin(UnknownTextSource(), 1, 1) - with StringIO("key:value") as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].data, {'key': 'value'}) - self.assertEqual(records[0].origin, expected_origin) - - def test_origin_from_filename_is_filename(self): - # If the test's origin has a filename, we need a valid origin - # with proper data. - # - # We're faking the name by using a StringIO subclass with a - # name property, which is how rfc822 gets that data. - expected_origin = Origin(FileTextSource("file.txt"), 1, 1) - with NamedStringIO("key:value", - fake_filename="file.txt") as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].data, {'key': 'value'}) - self.assertEqual(records[0].origin, expected_origin) - - def test_field_offset_map_is_computed(self): - text = ( - "a: value-a\n" # offset 0 - "b: value-b\n" # offset 1 - "# comment\n" # offset 2 - "c:\n" # offset 3 - " value-c.1\n" # offset 4 - " value-c.2\n" # offset 5 - "\n" - "d: value-d\n" # offset 0 - ) - with StringIO(text) as stream: - records = type(self).loader(stream) - self.assertEqual(len(records), 2) - self.assertEqual(records[0].data, { - 'a': 'value-a', - 'b': 'value-b', - 'c': 'value-c.1\nvalue-c.2', - }) - self.assertEqual(records[0].field_offset_map, { - 'a': 0, - 'b': 1, - 'c': 4, - }) - self.assertEqual(records[1].data, { - 'd': 'value-d', - }) - self.assertEqual(records[1].field_offset_map, { - 'd': 0, - }) - - -class NamedStringIO(StringIO): - """Subclass of StringIO with a name attribute.""" - - def __init__(self, string, fake_filename=None): - super(NamedStringIO, self).__init__(string) - self._fake_filename = fake_filename - - @property - def name(self): - return(self._fake_filename) - - -class RFC822WriterTests(TestCase): - """Tests for the :meth:`RFC822Record.dump()` method.""" - - def test_single_record(self): - with StringIO() as stream: - RFC822Record({'key': 'value'}).dump(stream) - self.assertEqual(stream.getvalue(), "key: value\n\n") - - def test_multiple_record(self): - with StringIO() as stream: - RFC822Record({'key1': 'value1', 'key2': 'value2'}).dump(stream) - self.assertIn( - stream.getvalue(), ( - "key1: value1\nkey2: value2\n\n", - "key2: value2\nkey1: value1\n\n")) - - def test_multiline_value(self): - text = ( - "key:\n" - " longer\n" - " value\n\n" - ) - with StringIO() as stream: - RFC822Record({'key': 'longer\nvalue'}).dump(stream) - self.assertEqual(stream.getvalue(), text) - - def test_multiline_value_with_space(self): - text = ( - "key:\n" - " longer\n" - " .\n" - " value\n\n" - ) - with StringIO() as stream: - RFC822Record({'key': 'longer\n\nvalue'}).dump(stream) - self.assertEqual(stream.getvalue(), text) - - def test_multiline_value_with_period(self): - text = ( - "key:\n" - " longer\n" - " ..\n" - " value\n\n" - ) - with StringIO() as stream: - RFC822Record({'key': 'longer\n.\nvalue'}).dump(stream) - self.assertEqual(stream.getvalue(), text) - - def test_type_error(self): - with StringIO() as stream: - with self.assertRaises(AttributeError): - RFC822Record(['key', 'value']).dump(stream) - - def test_dump_list(self): - with StringIO() as stream: - RFC822Record({'key': ['one', 'two']}).dump(stream) - self.assertEqual(stream.getvalue(), - 'key:\n' - ' one\n' - ' two\n\n') - - def test_dump_multiline(self): - with StringIO() as stream: - RFC822Record({'key': 'one\ntwo\n\n'}).dump(stream) - self.assertEqual(stream.getvalue(), - 'key:\n' - ' one\n' - ' two\n' - ' .\n\n') - - -class RFC822SyntaxErrorTests(TestCase): - """Tests for RFC822SyntaxError class.""" - - def test_hash(self): - # Verify that RFC822SyntaxError is hashable. - self.assertEqual( - hash(RFC822SyntaxError('file.txt', 10, 'msg')), - hash(RFC822SyntaxError('file.txt', 10, 'msg'))) - - def test_repr(self): - self.assertEqual( - repr(RFC822SyntaxError('file.txt', 10, 'msg')), - "RFC822SyntaxError('file.txt', 10, 'msg')") - - def test_eq(self): - first = RFC822SyntaxError('file.txt', 10, 'msg') - second = RFC822SyntaxError('file.txt', 10, 'msg') - self.assertTrue(first == second) - - def test_eq_other(self): - self.assertFalse(RFC822SyntaxError('file.txt', 10, 'msg') == 7) - - def test_ne(self): - first = RFC822SyntaxError('file.txt', 10, 'msg') - second = RFC822SyntaxError('file.txt', 20, 'msg') - self.assertTrue(first != second) - - def test_ne_other(self): - self.assertTrue(RFC822SyntaxError('file.txt', 10, 'msg') != 7) - - -class UnknownTextSourceTests(TestCase): - """Tests for UnknownTextSource class.""" - - def setUp(self): - self.src = UnknownTextSource() - - def test_str(self): - # Verify how UnknownTextSource. __str__() works. - self.assertEqual(str(self.src), "???") - - def test_repr(self): - # Verify how UnknownTextSource.__repr__() works. - self.assertEqual(repr(self.src), "UnknownTextSource()") - - def test_eq(self): - # Verify instances of UnknownTextSource are all equal to each other - # but not equal to any other object. - other_src = UnknownTextSource() - self.assertTrue(self.src == other_src) - self.assertFalse(self.src == "???") - - def test_eq_others(self): - # Verify instances of UnknownTextSource are unequal to instances of - # other classes. - self.assertTrue(self.src != object()) - self.assertFalse(self.src == object()) - - def test_gt(self): - # Verify that instances of UnknownTextSource are not ordered. - other_src = UnknownTextSource() - self.assertFalse(self.src < other_src) - self.assertFalse(other_src < self.src) - - def test_gt_others(self): - # Verify that instances of UnknownTextSource are not comparable to - # other objects. - with self.assertRaises(TypeError): - self.src < object() - with self.assertRaises(TypeError): - object() < self.src - - -class FileTextSourceTests(TestCase): - """Tests for FileTextSource class.""" - - _FILENAME = "filename" - _CLS = FileTextSource - - def setUp(self): - self.src = self._CLS(self._FILENAME) - - def test_filename(self): - """verify that FileTextSource.filename works.""" - self.assertEqual(self._FILENAME, self.src.filename) - - def test_str(self): - """verify that FileTextSource.__str__() works.""" - self.assertEqual(str(self.src), self._FILENAME) - - def test_repr(self): - """verify that FileTextSource.__repr__() works.""" - self.assertEqual( - repr(self.src), - "{}({!r})".format(self._CLS.__name__, self._FILENAME)) - - def test_eq(self): - # Verify that FileTextSource compares equal to other instances with - # the same filename and unequal to instances with different filenames. - self.assertTrue(self._CLS('foo') == self._CLS('foo')) - self.assertTrue(self._CLS('foo') != self._CLS('bar')) - - def test_eq_others(self): - # Verify instances of FileTextSource are not equal to instances of - # other classes. - self.assertTrue(self._CLS('foo') != object()) - self.assertFalse(self._CLS('foo') == object()) - - def test_gt(self): - # Verify that FileTextSource is ordered by filename. - self.assertTrue(self._CLS('a') < self._CLS('b') < self._CLS('c')) - self.assertTrue(self._CLS('c') > self._CLS('b') > self._CLS('a')) - - def test_gt_others(self): - # Verify that instances of FileTextSource are not comparable to other - # objects. - with self.assertRaises(TypeError): - self.src < object() - with self.assertRaises(TypeError): - object() < self.src - - def test_relative_to(self): - # Verify that FileTextSource.relative_to() works. - self.assertEqual( - self._CLS('/path/to/file.txt').relative_to('/path/to'), - self._CLS('file.txt')) - - -class OriginTests(TestCase): - """Tests for Origin class.""" - - def setUp(self): - self.origin = Origin(FileTextSource('file.txt'), 10, 12) - - def test_smoke(self): - # verify that all three instance attributes actually work. - self.assertEqual(self.origin.source.filename, 'file.txt') - self.assertEqual(self.origin.line_start, 10) - self.assertEqual(self.origin.line_end, 12) - - def test_repr(self): - # verify that Origin.__repr__() works. - expected = ("") - observed = repr(self.origin) - self.assertEqual(expected, observed) - - def test_str(self): - # verify that Origin.__str__() works. - expected = 'file.txt:10-12' - observed = str(self.origin) - self.assertEqual(expected, observed) - - def test_str__single_line(self): - # verify that Origin.__str__() behaves differently when the range - # describes a single line. - expected = 'file.txt:15' - observed = str(Origin(FileTextSource('file.txt'), 15, 15)) - self.assertEqual(expected, observed) - - def test_str__whole_file(self): - # verify that Origin.__str__() behaves differently when the range - # is empty. - expected = 'file.txt' - observed = str(Origin(FileTextSource('file.txt'))) - self.assertEqual(expected, observed) - - def test_eq(self): - # Verify instances of Origin are all equal to other instances with the - # same instance attributes but not equal to instances with different - # attributes. - origin1 = Origin( - self.origin.source, self.origin.line_start, self.origin.line_end) - origin2 = Origin( - self.origin.source, self.origin.line_start, self.origin.line_end) - self.assertTrue(origin1 == origin2) - origin_other1 = Origin( - self.origin.source, self.origin.line_start + 1, - self.origin.line_end) - self.assertTrue(origin1 != origin_other1) - self.assertFalse(origin1 == origin_other1) - origin_other2 = Origin( - self.origin.source, self.origin.line_start, - self.origin.line_end + 1) - self.assertTrue(origin1 != origin_other2) - self.assertFalse(origin1 == origin_other2) - origin_other3 = Origin( - FileTextSource('unrelated'), self.origin.line_start, - self.origin.line_end) - self.assertTrue(origin1 != origin_other3) - self.assertFalse(origin1 == origin_other3) - - def test_eq_other(self): - # verify instances of UnknownTextSource are unequal to instances of - # other classes. - self.assertTrue(self.origin != object()) - self.assertFalse(self.origin == object()) - - def test_gt(self): - # Verify that Origin instances are ordered by their constituting - # components. - self.assertTrue( - Origin(FileTextSource('file.txt'), 1, 1) < - Origin(FileTextSource('file.txt'), 1, 2) < - Origin(FileTextSource('file.txt'), 1, 3)) - self.assertTrue( - Origin(FileTextSource('file.txt'), 1, 10) < - Origin(FileTextSource('file.txt'), 2, 10) < - Origin(FileTextSource('file.txt'), 3, 10)) - self.assertTrue( - Origin(FileTextSource('file1.txt'), 1, 10) < - Origin(FileTextSource('file2.txt'), 1, 10) < - Origin(FileTextSource('file3.txt'), 1, 10)) - - def test_gt_other(self): - # Verify that Origin instances are not comparable to other objects. - with self.assertRaises(TypeError): - self.origin < object() - with self.assertRaises(TypeError): - object() < self.origin - - def test_relative_to(self): - # Verify how Origin.relative_to() works in various situations. - # - # If the source does not have relative_to method, nothing is changed. - origin = Origin(UnknownTextSource(), 1, 2) - self.assertIs(origin.relative_to('/some/path'), origin) - # otherwise the source is replaced and a new origin is returned - self.assertEqual( - Origin( - FileTextSource('/some/path/file.txt'), 1, 2 - ).relative_to('/some/path'), - Origin(FileTextSource('file.txt'), 1, 2)) - - def test_with_offset(self): - # Verify how Origin.with_offset() works as expected. - origin1 = Origin(UnknownTextSource(), 1, 2) - origin2 = origin1.with_offset(10) - self.assertEqual(origin2.line_start, 11) - self.assertEqual(origin2.line_end, 12) - self.assertIs(origin2.source, origin1.source) - - def test_with_offset_whole_file(self): - origin1 = Origin(UnknownTextSource()) - self.assertEqual(origin1.mode(), OriginMode.whole_file) - self.assertEqual(origin1.with_offset(10), origin1) - - def test_just_line(self): - origin1 = Origin(UnknownTextSource(), 1, 2) - origin2 = origin1.just_line() - self.assertEqual(origin2.line_start, origin1.line_start) - self.assertEqual(origin2.line_end, origin1.line_start) - self.assertIs(origin2.source, origin1.source) - - def test_just_file(self): - origin1 = Origin(UnknownTextSource(), 1, 2) - origin2 = origin1.just_file() - self.assertEqual(origin2.line_start, None) - self.assertEqual(origin2.line_end, None) - self.assertIs(origin2.source, origin1.source) diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/_unstable.py ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/_unstable.py --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/_unstable.py 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/_unstable.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,584 +0,0 @@ -"""Internal, unstable API compatibility module. - -This module contains unstable APIs for things that will be delegated to other -tools later. While we will do our best not to break those APIs when the -implementation changes, this is not guaranteed. -""" - -import os -import re -import gettext -import logging -import textwrap - -from enum import Enum -from functools import total_ordering - - -_ = gettext.gettext -_logger = logging.getLogger('ubuntu-image') - - -def normalize_rfc822_value(value): - # Remove the multi-line dot marker. - value = re.sub('^(\s*)\.$', '\\1', value, flags=re.M) - # Remove consistent indentation. - value = textwrap.dedent(value) - # Strip the remaining whitespace. - return value.strip() - - -class OriginMode(Enum): - """Possible "modes" an :class:`Origin` can operate in.""" - - whole_file = 'whole-file' - single_line = 'single-line' - line_range = 'line-range' - - -@total_ordering -class Origin: - """Simple class for tracking where something came from. - - This class supports "pinpointing" something in a block of text. The - block is described by the source attribute. The actual range is - described by line_start (inclusive) and line_end (exclusive). - - :attribute source: - Something that describes where the text came from. - - :attribute line_start: - The number of the line where the record begins. This can be None - when the intent is to cover the whole file. This can also be equal - to line_end (when not None) if the intent is to show a single line. - - :attribute line_end: - The number of the line where the record ends - """ - - __slots__ = ['source', 'line_start', 'line_end'] - - def __init__(self, source, line_start=None, line_end=None): - self.source = source - self.line_start = line_start - self.line_end = line_end - - def mode(self): - """Compute the "mode" of this origin instance. - - :returns: - :attr:`OriginMode.whole_file`, :attr:`OriginMode.single_line` - or :attr:`OriginMode.line_range`. - - The mode tells if this instance is describing the whole file, - a range of lines or just a single line. It is mostly used internally - by the implementation. - """ - if self.line_start is None and self.line_end is None: - return OriginMode.whole_file - elif self.line_start == self.line_end: - return OriginMode.single_line - else: - return OriginMode.line_range - - def __repr__(self): - return '<{} source:{!r} line_start:{} line_end:{}>'.format( - self.__class__.__name__, - self.source, self.line_start, self.line_end) - - def __str__(self): - mode = self.mode() - if mode is OriginMode.whole_file: - return str(self.source) - elif mode is OriginMode.single_line: - return '{}:{}'.format(self.source, self.line_start) - elif mode is OriginMode.line_range: - return '{}:{}-{}'.format( - self.source, self.line_start, self.line_end) - else: # pragma: nocover - raise AssertionError('Bad mode: {}'.format(mode)) - - def relative_to(self, base_dir): - """Create a new relative Origin. - - The new Origin has a source relative to the specified base directory. - - :param base_dir: - A base directory name - :returns: - A new Origin with source replaced by the result of calling - relative_to(base_dir) on the current source *iff* the current - source has that method, self otherwise. - - This method is useful for obtaining user friendly Origin objects that - have short, understandable filenames. - """ - relative_source = self.source.relative_to(base_dir) - if relative_source is not self.source: - return Origin(relative_source, self.line_start, self.line_end) - else: - return self - - def with_offset(self, offset): - """Create a new Origin offset by a number of lines. - - The new Origin adds an offset of a specific number of lines. - - :param offset: - Number of lines to add (or subtract) - :returns: - A new Origin object. - """ - mode = self.mode() - if mode is OriginMode.whole_file: - return self - elif mode in (OriginMode.single_line, OriginMode.line_range): - return Origin( - self.source, self.line_start + offset, self.line_end + offset) - else: # pragma: nocover - raise AssertionError('Bad mode: {}'.format(mode)) - - def just_line(self): - """Create a new Origin that points to the start line. - - :returns: - A new Origin with the end_line equal to start_line. - This effectively makes the origin describe a single line. - """ - return Origin(self.source, self.line_start, self.line_start) - - def just_file(self): - """Create a new Origin that points to the whole file. - - :returns: - A new Origin with line_end and line_start both set to None. - """ - return Origin(self.source) - - def __eq__(self, other): - if isinstance(other, Origin): - return ((self.source, self.line_start, self.line_end) == - (other.source, other.line_start, other.line_end)) - else: - return NotImplemented - - def __gt__(self, other): - if isinstance(other, Origin): - return ((self.source, self.line_start, self.line_end) > - (other.source, other.line_start, other.line_end)) - else: - return NotImplemented - - -@total_ordering -class UnknownTextSource: - """Class indicating that the source of text is unknown. - - This instances of this class are constructed by gen_rfc822_records() when - no explicit source is provided and the stream has no name. - """ - - def __str__(self): - return '???' - - def __repr__(self): - return '{}()'.format(self.__class__.__name__) - - def __eq__(self, other): - if isinstance(other, UnknownTextSource): - return True - else: - return False - - def __gt__(self, other): - if isinstance(other, UnknownTextSource): - return False - else: - return NotImplemented - - def relative_to(self, path): - return self - - -@total_ordering -class FileTextSource: - """Class indicating that text came from a file. - - :attribute filename: - name of the file something comes from - """ - - def __init__(self, filename): - self.filename = filename - - def __str__(self): - return self.filename - - def __repr__(self): - return '{}({!r})'.format( - self.__class__.__name__, self.filename) - - def __eq__(self, other): - if isinstance(other, FileTextSource): - return self.filename == other.filename - else: - return False - - def __gt__(self, other): - if isinstance(other, FileTextSource): - return self.filename > other.filename - else: - return NotImplemented - - def relative_to(self, base_dir): - """Return a FileTextSource with a new path. - - This computes a FileTextSource with the filename being a - relative path from the specified base directory. - - :param base_dir: - A base directory name - :returns: - A new FileTextSource with filename relative to that base_dir. - """ - return self.__class__(os.path.relpath(self.filename, base_dir)) - - -class RFC822Record: - """Class for tracking RFC822 records. - - This is a simple container for the dictionary of data. The data is - represented by two copies, one original and one after value normalization. - Value normalization strips out excess whitespace and processes the magic - leading dot syntax that is essential for empty newlines. - - Comparison is performed on the normalized data only, raw data is stored for - reference but does not differentiate records. - - Each instance also holds the origin of the data (location of the - file/stream where it was parsed from). - """ - - def __init__(self, data, origin=None, raw_data=None, - field_offset_map=None): - """Initialize a new record. - - :param data: - A dictionary with normalized record data. - :param origin: - A :class:`Origin` instance that describes where the data came from. - :param raw_data: - An optional dictionary with raw record data. If omitted then it - will default to normalized data (as the same object, without making - a copy). - :param field_offset_map: - An optional dictionary with offsets (in line numbers) of - each field. - """ - self._data = data - self._raw_data = (data if raw_data is None else raw_data) - self._origin = origin - self._field_offset_map = field_offset_map - - def __repr__(self): - return '<{} data:{!r} origin:{!r}>'.format( - self.__class__.__name__, self._data, self._origin) - - def __eq__(self, other): - if isinstance(other, RFC822Record): - return (self._data, self._origin) == (other._data, other._origin) - return NotImplemented - - def __ne__(self, other): - if isinstance(other, RFC822Record): - return (self._data, self._origin) != (other._data, other._origin) - return NotImplemented - - @property - def data(self): - """The normalized version of the data set (dictionary) - - This property exposes the normalized version of the data encapsulated - in this record. Normalization is performed with - :func:`normalize_rfc822_value()`. Only values are normalized, keys are - left intact. - """ - return self._data - - @property - def raw_data(self): - """The raw version of data set (dictionary). - - This property exposes the raw (original) version of the data - encapsulated by this record. This data is as it was originally parsed, - including all the whitespace layout. - - In some records this may be 'normal' data object itself (same object). - """ - return self._raw_data - - @property - def origin(self): - """The origin of the record.""" - return self._origin - - @property - def field_offset_map(self): - """The field-to-line-number-offset mapping. - - A dictionary mapping field name to offset (in lines) relative to the - origin where that field definition commences. - - Note: the return value may be None - """ - return self._field_offset_map - - def dump(self, stream): - """Dump this record to a stream.""" - def _dump_part(stream, key, values): - stream.write('{}:\n'.format(key)) - for value in values: - if not value: - stream.write(' .\n') - elif value == '.': - stream.write(' ..\n') - else: - stream.write(' {}\n'.format(value)) - for key, value in self.data.items(): - if isinstance(value, (list, tuple)): - _dump_part(stream, key, value) - elif isinstance(value, str) and '\n' in value: - values = value.split('\n') - if not values[-1]: - values = values[:-1] - _dump_part(stream, key, values) - else: - stream.write('{}: {}\n'.format(key, value)) - stream.write('\n') - - -class RFC822SyntaxError(SyntaxError): - """SyntaxError subclass for RFC822 parsing functions""" - - def __init__(self, filename, lineno, msg): - self.filename = filename - self.lineno = lineno - self.msg = msg - - def __repr__(self): - return '{}({!r}, {!r}, {!r})'.format( - self.__class__.__name__, self.filename, self.lineno, self.msg) - - def __eq__(self, other): - if isinstance(other, RFC822SyntaxError): - return ((self.filename, self.lineno, self.msg) == ( - other.filename, other.lineno, other.msg)) - return NotImplemented - - def __ne__(self, other): - if isinstance(other, RFC822SyntaxError): - return ((self.filename, self.lineno, self.msg) != ( - other.filename, other.lineno, other.msg)) - return NotImplemented - - def __hash__(self): - return hash((self.filename, self.lineno, self.msg)) - - -def load_rfc822_records(stream, data_cls=dict, source=None): - """Load a sequence of rfc822-like records from a text stream. - - :param stream: - A file-like object from which to load the rfc822 data. - :param data_cls: - The class of the dictionary-like type to hold the results. This is - mainly there so that callers may pass collections.OrderedDict. - :param source: - An object that describes where stream data is coming from. - - If None, it will be inferred from the stream (if possible). Specialized - callers should provider a custom source object to allow developers to - accurately keep track of where (possibly problematic) RFC822 data is - coming from. If this is None and inferring fails then all of the loaded - records will have a None origin. - - Each record consists of any number of key-value pairs. Subsequent records - are separated by one blank line. A record key may have a multi-line value - if the line starts with whitespace character. - - Returns a list of subsequent values as instances RFC822Record class. If - the optional data_cls argument is collections.OrderedDict then the values - retain their original ordering. - """ - return list(gen_rfc822_records(stream, data_cls, source)) - - -def gen_rfc822_records(stream, data_cls=dict, source=None): - """Load a sequence of rfc822-like records from a text stream. - - :param stream: - A file-like object from which to load the rfc822 data. - :param data_cls: - The class of the dictionary-like type to hold the results. This is - mainly there so that callers may pass collections.OrderedDict. - :param source: - An object that describes where stream data is coming from. - - If None, it will be inferred from the stream (if possible). Specialized - callers should provider a custom source object to allow developers to - accurately keep track of where (possibly problematic) RFC822 data is - coming from. If this is None and inferring fails then all of the loaded - records will have a None origin. - - Each record consists of any number of key-value pairs. Subsequent records - are separated by one blank line. A record key may have a multi-line value - if the line starts with whitespace character. - - Returns a list of subsequent values as instances RFC822Record class. If - the optional data_cls argument is collections.OrderedDict then the values - retain their original ordering. - """ - record = None - key = None - value_list = None - origin = None - field_offset_map = None - # If the source was not provided then try constructing a FileTextSource - # from the name of the stream. If that fails, keep using None. - if source is None: - try: - source = FileTextSource(stream.name) - except AttributeError: - source = UnknownTextSource() - - def _syntax_error(msg): - """Report a syntax error in the current line.""" - try: - filename = stream.name - except AttributeError: - filename = None - return RFC822SyntaxError(filename, lineno, msg) - - def _new_record(): - """Reset local state to track new record.""" - nonlocal key - nonlocal value_list - nonlocal record - nonlocal origin - nonlocal field_offset_map - key = None - value_list = None - origin = Origin(source, None, None) - field_offset_map = {} - record = RFC822Record(data_cls(), origin, data_cls(), field_offset_map) - - def _commit_key_value_if_needed(): - """Finalize the most recently seen key: value pair.""" - nonlocal key - if key is not None: - raw_value = ''.join(value_list) - normalized_value = normalize_rfc822_value(raw_value) - record.raw_data[key] = raw_value - record.data[key] = normalized_value - _logger.debug(_('Committed key/value %r=%r'), key, - normalized_value) - key = None - - def _set_start_lineno_if_needed(): - """Remember the line number of the record start unless already set.""" - if origin and record.origin.line_start is None: - record.origin.line_start = lineno - - def _update_end_lineno(): - """Update the line number of the record tail.""" - record.origin.line_end = lineno - - # Start with an empty record. - _new_record() - # Support simple text strings. - if isinstance(stream, str): - stream = iter(stream.splitlines(keepends=True)) - # Iterate over subsequent lines of the stream - for lineno, line in enumerate(stream, start=1): - _logger.debug(_('Looking at line %d:%r'), lineno, line) - # Treat # as comments - if line.startswith('#'): - pass - # Treat empty lines as record separators - elif line.strip() == '': - # Commit the current record so that the multi-line value of the - # last key, if any, is saved as a string - _commit_key_value_if_needed() - # If data is non-empty, yield the record, this allows us to safely - # use newlines for formatting - if record.data: - _logger.debug(_('yielding record: %r'), record) - yield record - # Reset local state so that we can build a new record - _new_record() - # Treat lines staring with whitespace as multi-line continuation of the - # most recently seen key-value - elif line.startswith(' '): - if key is None: - # If we have not seen any keys yet then this is a syntax error - raise _syntax_error(_('Unexpected multi-line value')) - # Strip the initial space. This matches the behavior of xgettext - # scanning our job definitions with multi-line values. - line = line[1:] - # Append the current line to the list of values of the most recent - # key. This prevents quadratic complexity of string concatenation - value_list.append(line) - # Update the end line location of this record - _update_end_lineno() - # Treat lines with a colon as new key-value pairs - elif ':' in line: - # Since this is actual data let's try to remember where it came - # from. This may be a no-operation if there were any preceding - # key-value pairs. - _set_start_lineno_if_needed() - # Since we have a new, key-value pair we need to commit any - # previous key that we may have (regardless of multi-line or - # single-line values). - _commit_key_value_if_needed() - # Parse the line by splitting on the colon, getting rid of - # all surrounding whitespace from the key and getting rid of the - # leading whitespace from the value. - key, value = line.split(':', 1) - key = key.strip() - value = value.lstrip() - # Check if the key already exist in this message - if key in record.data: - raise _syntax_error(_( - 'Job has a duplicate key {!r} ' - 'with old value {!r} and new value {!r}' - ).format(key, record.raw_data[key], value)) - if value.strip() != '': - # Construct initial value list out of the (only) value that we - # have so far. Additional multi-line values will just append to - # value_list - value_list = [value] - # Store the offset of the filed in the offset map - field_offset_map[key] = lineno - origin.line_start - else: - # The initial line may be empty, in that case the spaces and - # newlines there are discarded - value_list = [] - # Store the offset of the filed in the offset map - # The +1 is for the fact that value is empty (or just - # whitespace) and that is stripped away in the normalized data - # part of the RFC822 record. To keep line tracking accurate - # we just assume that the field actually starts on - # the following line. - field_offset_map[key] = lineno - origin.line_start + 1 - # Update the end-line location - _update_end_lineno() - # Treat all other lines as syntax errors - else: - raise _syntax_error( - _('Unexpected non-empty line: {!r}').format(line)) - # Make sure to commit the last key from the record - _commit_key_value_if_needed() - # Once we've seen the whole file return the last record, if any - if record.data: - _logger.debug(_('yielding record: %r'), record) - yield record diff -Nru ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/version.txt ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/version.txt --- ubuntu-image-0.11+16.04ubuntu1/ubuntu_image/version.txt 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/ubuntu_image/version.txt 2016-11-08 22:31:21.000000000 +0000 @@ -1 +1 @@ -0.11+16.04ubuntu1 +0.12+16.04ubuntu1 diff -Nru ubuntu-image-0.11+16.04ubuntu1/update-sample-data ubuntu-image-0.12+16.04ubuntu1/update-sample-data --- ubuntu-image-0.11+16.04ubuntu1/update-sample-data 2016-11-04 22:54:16.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/update-sample-data 2016-11-08 22:31:21.000000000 +0000 @@ -15,7 +15,7 @@ By doing an off-line run and zip, we can avoid having to hit the store at all. For now, this isn't the default, but if you set the environment variable -`UBUNTUIMAGE_MOCK_SNAP=always`, it enables the very fast path, where only +`UBUNTU_UIMAGE_MOCK_SNAP=always`, it enables the very fast path, where only sample data is used and the store is never touched. This also allows you to run the test suite without having to sudo (although you still currently need sudo to generate the .zip file - `snap prepare-image` will eventually fix diff -Nru ubuntu-image-0.11+16.04ubuntu1/xenial-coverage.ini ubuntu-image-0.12+16.04ubuntu1/xenial-coverage.ini --- ubuntu-image-0.11+16.04ubuntu1/xenial-coverage.ini 1970-01-01 00:00:00.000000000 +0000 +++ ubuntu-image-0.12+16.04ubuntu1/xenial-coverage.ini 2016-11-08 22:31:21.000000000 +0000 @@ -0,0 +1,20 @@ +[run] +branch = true +parallel = true +omit = + setup* + .tox/coverage/lib/python*/site-packages/* + ubuntu_image/tests/* + ubuntu_image/testing/* + /usr/lib/python3/dist-packages/* + +[paths] +source = + ubuntu_image + .tox/coverage/lib/python*/site-packages/ubuntu_image + +[report] +#fail_under = 100 +exclude_lines = + pragma: nocover + pragma: noxenial