diff -Nru simplestreams-0.1.0~bzr354/debian/changelog simplestreams-0.1.0~bzr378/debian/changelog --- simplestreams-0.1.0~bzr354/debian/changelog 2014-08-22 14:06:20.000000000 +0000 +++ simplestreams-0.1.0~bzr378/debian/changelog 2015-05-13 17:03:51.000000000 +0000 @@ -1,3 +1,14 @@ +simplestreams (0.1.0~bzr378-0ubuntu1) wily; urgency=medium + + * New upstream snapshot. + - GlanceMirror: identify images as i686 not i386 (LP: #1454775) + - sstream-mirror: debug statement about filtered items + - GlanceMirror: do not strip version information from endpoints + (LP: #1346935) + - general fixes to tools/ that are upstream only, not packaged. + + -- Scott Moser Wed, 13 May 2015 13:03:50 -0400 + simplestreams (0.1.0~bzr354-0ubuntu1) utopic; urgency=medium * New upstream snapshot. diff -Nru simplestreams-0.1.0~bzr354/setup.py simplestreams-0.1.0~bzr378/setup.py --- simplestreams-0.1.0~bzr354/setup.py 2014-08-22 14:05:32.000000000 +0000 +++ simplestreams-0.1.0~bzr378/setup.py 2015-05-13 16:49:55.000000000 +0000 @@ -21,8 +21,8 @@ 'simplestreams.objectstores'], scripts=glob('bin/*'), data_files=[ - ('/usr/lib/simplestreams', glob('tools/hook-*')), - ('/usr/share/doc/simplestreams', + ('lib/simplestreams', glob('tools/hook-*')), + ('share/doc/simplestreams', [f for f in glob('doc/*') if is_f(f)]), ] ) diff -Nru simplestreams-0.1.0~bzr354/simplestreams/mirrors/glance.py simplestreams-0.1.0~bzr378/simplestreams/mirrors/glance.py --- simplestreams-0.1.0~bzr354/simplestreams/mirrors/glance.py 2014-08-22 14:05:32.000000000 +0000 +++ simplestreams-0.1.0~bzr378/simplestreams/mirrors/glance.py 2015-05-13 16:49:55.000000000 +0000 @@ -25,9 +25,13 @@ import errno import glanceclient import os +import re def get_glanceclient(version='1', **kwargs): + # newer versions of the glanceclient will do this 'strip_version' for + # us, but older versions do not. + kwargs['endpoint'] = _strip_version(kwargs['endpoint']) pt = ('endpoint', 'token', 'insecure', 'cacert') kskw = {k: kwargs.get(k) for k in pt if k in kwargs} return glanceclient.Client(version, **kskw) @@ -175,6 +179,8 @@ t_item['arch'] = arch if arch == "amd64": arch = "x86_64" + if arch == "i386": + arch = "i686" props['architecture'] = arch fullname = self.name_prefix + name @@ -334,4 +340,17 @@ return (os.path.getsize(path), md5) + +def _strip_version(endpoint): + """Strip a version from the last component of an endpoint if present""" + + # Get rid of trailing '/' if present + if endpoint.endswith('/'): + endpoint = endpoint[:-1] + url_bits = endpoint.split('/') + # regex to match 'v1' or 'v2.0' etc + if re.match(r'v\d+\.?\d*', url_bits[-1]): + endpoint = '/'.join(url_bits[:-1]) + return endpoint + # vi: ts=4 expandtab syntax=python diff -Nru simplestreams-0.1.0~bzr354/simplestreams/mirrors/__init__.py simplestreams-0.1.0~bzr378/simplestreams/mirrors/__init__.py --- simplestreams-0.1.0~bzr354/simplestreams/mirrors/__init__.py 2014-08-22 14:05:32.000000000 +0000 +++ simplestreams-0.1.0~bzr378/simplestreams/mirrors/__init__.py 2015-05-13 16:49:55.000000000 +0000 @@ -303,6 +303,7 @@ for itemname, item in version.get('items', {}).items(): pgree = (prodname, vername, itemname) if not self.filter_item(item, src, target, pgree): + LOG.debug("Filtered out item: %s/%s", itemname, item) continue added_items.append(itemname) diff -Nru simplestreams-0.1.0~bzr354/simplestreams/openstack.py simplestreams-0.1.0~bzr378/simplestreams/openstack.py --- simplestreams-0.1.0~bzr354/simplestreams/openstack.py 2014-08-22 14:05:32.000000000 +0000 +++ simplestreams-0.1.0~bzr378/simplestreams/openstack.py 2015-05-13 16:49:55.000000000 +0000 @@ -17,7 +17,6 @@ from keystoneclient.v2_0 import client as ksclient import os -import re OS_ENV_VARS = ( 'OS_AUTH_TOKEN', 'OS_AUTH_URL', 'OS_CACERT', 'OS_IMAGE_API_VERSION', @@ -119,17 +118,4 @@ endpoint_kwargs['filter_value'] = kwargs.get('region_name') endpoint = client.service_catalog.url_for(**endpoint_kwargs) - return _strip_version(endpoint) - - -def _strip_version(endpoint): - """Strip a version from the last component of an endpoint if present""" - - # Get rid of trailing '/' if present - if endpoint.endswith('/'): - endpoint = endpoint[:-1] - url_bits = endpoint.split('/') - # regex to match 'v1' or 'v2.0' etc - if re.match(r'v\d+\.?\d*', url_bits[-1]): - endpoint = '/'.join(url_bits[:-1]) return endpoint diff -Nru simplestreams-0.1.0~bzr354/tests/unittests/test_mirrorwriters.py simplestreams-0.1.0~bzr378/tests/unittests/test_mirrorwriters.py --- simplestreams-0.1.0~bzr354/tests/unittests/test_mirrorwriters.py 2014-08-22 14:05:32.000000000 +0000 +++ simplestreams-0.1.0~bzr378/tests/unittests/test_mirrorwriters.py 2015-05-13 16:49:55.000000000 +0000 @@ -1,5 +1,6 @@ from tests.testutil import get_mirror_reader -from simplestreams.mirrors import DryRunMirrorWriter +from simplestreams.filters import get_filters +from simplestreams.mirrors import DryRunMirrorWriter, ObjectFilterMirror from simplestreams.objectstores import MemoryObjectStore @@ -10,3 +11,17 @@ target = DryRunMirrorWriter(config, objectstore) target.sync(src, "streams/v1/index.json") assert target.size == 886 + + +def test_ObjectFilterMirror_does_item_filter(): + src = get_mirror_reader("foocloud") + filter_list = get_filters(['ftype!=disk1.img']) + config = {'filters': filter_list} + objectstore = MemoryObjectStore(None) + target = ObjectFilterMirror(config, objectstore) + target.sync(src, "streams/v1/index.json") + + unexpected = [f for f in objectstore.data if 'disk' in f] + + assert len(unexpected) == 0 + assert len(objectstore.data) != 0 diff -Nru simplestreams-0.1.0~bzr354/tools/make-test-data simplestreams-0.1.0~bzr378/tools/make-test-data --- simplestreams-0.1.0~bzr354/tools/make-test-data 2014-08-22 14:05:32.000000000 +0000 +++ simplestreams-0.1.0~bzr378/tools/make-test-data 2015-05-13 16:49:55.000000000 +0000 @@ -17,21 +17,26 @@ # along with Simplestreams. If not, see . import argparse +import hashlib +import errno import json import os import os.path import sys from simplestreams import util +from simplestreams import contentsource as cs try: # this is just python2 or python3 compatible prepping for get_url_len import urllib.request url_request = urllib.request.Request url_open = urllib.request.urlopen + url_error = urllib.error except ImportError as e: import urllib2 url_request = urllib2.Request url_open = urllib2.urlopen + url_error = urllib2 import toolutil @@ -40,21 +45,17 @@ # http://cloud-images-archive.ubuntu.com/ # file:///srv/ec2-images # -BASE_URLS = ("http://cloud-images.ubuntu.com/",) +BASE_URLS = ( + "http://cloud-images.ubuntu.com/", +) FAKE_DATA = { - 'root.tar.gz': { - 'size': 10240, 'md5': '1276481102f218c981e0324180bafd9f', - 'sha256': '84ff92691f909a05b224e1c56abb4864f01b4f8e3c854e4bb4c7baf1d3f6d652'}, - 'tar.gz': { - 'size': 11264, 'md5': '820a81e0916bac82838fd7e74ab29b15', - 'sha256': '5309e677c79cffae49a65728c61b436d3cdc2a2bab4c81bf0038415f74a56880'}, - 'disk1.img': { - 'size': 12288, 'md5': '4072783b8efb99a9e5817067d68f61c6', - 'sha256': 'f3cc103136423a57975750907ebc1d367e2985ac6338976d4d5a439f50323f4a'}, - 'uefi1.img': { - 'size': 12421, 'md5': 'd41d8cd98f00b204e9800998ecf8427e', - 'sha256': '8ca9c39f2200d299b011f5018c9d27a5a70f5a6b4c24f2fe06a94bc0e8c1213f'}, + 'root.tar.gz': {'size': 10240}, + 'tar.gz': {'size': 11264}, + 'disk1.img': {'size': 12288}, + 'uefi1.img': {'size': 12421}, + 'manifest': {'size': 10241}, + 'ova': {'size': 12399}, } EC2_ENDPOINTS = { @@ -90,6 +91,12 @@ FILE_DATA[dirname][bname][field] = value +def store_cache_entry(path, data): + for k, v in data.items(): + if k in ('size', 'md5', 'sha256'): + store_cache_data(path, k, v) + + def save_cache(): if FILE_DATA: hashcache = FILE_DATA['filename'] @@ -97,41 +104,129 @@ hfp.write(json.dumps(FILE_DATA, indent=1)) -def get_cloud_images_file_hash(path): - md5 = get_cache_data(path, 'md5') - sha256 = get_cache_data(path, 'sha256') - if md5 and sha256: - return {'md5': md5, 'sha256': sha256} - - found = {} - dirname = os.path.dirname(path) +def load_sums_from_sumfiles(path): for cksum in ("md5", "sha256"): content = None - for burl in BASE_URLS: - dir_url = burl + dirname + sfile_url = path + "/%sSUMS" % cksum.upper() + if get_cache_data(sfile_url, 'size'): + continue - try: - url = dir_url + "/%sSUMS" % cksum.upper() - sys.stderr.write("reading %s\n" % url) - content = util.read_url(url).decode("utf-8") - break - except Exception as error: - pass - - if not content: - raise error + sfile_info = load_url(sfile_url) + content = sfile_info['content'].decode('utf-8') for line in content.splitlines(): (hexsum, fname) = line.split() if fname.startswith("*"): fname = fname[1:] - found[cksum] = hexsum - store_cache_data(dirname + "/" + fname, cksum, hexsum) + fpath = path + "/" + fname + store_cache_data(fpath, cksum, hexsum) + get_cloud_images_file_size(fpath, save=False) + + store_cache_entry(sfile_url, sfile_info) + + +class NonExistingUrl(Exception): + pass + + +def load_url(path, hashes=None, base_urls=None): + if base_urls is None: + base_urls = BASE_URLS + url = base_urls[0] + path + mirrors = [u + path for u in base_urls[1:]] + try: + data = cs.UrlContentSource(url, mirrors=mirrors).read() + if b'403 Forbidden' in data: + raise NonExistingUrl("%s: 403 Forbidden (s3 404)" % path) + except url_error.HTTPError as e: + if e.code == 403: + raise NonExistingUrl("%s: 403" % path) + elif e.code == 404: + raise NonExistingUrl("%s: 404" % path) + else: + raise e + except IOError as e: + if e.errno != errno.ENOENT: + raise e + else: + raise NonExistingUrl("%s: ENOENT" % path) + + sys.stderr.write("read url %s\n" % path) + + raw_content = data + + ret = {'size': len(raw_content)} + ret['content'] = raw_content + + if hashes is None: + hashes = ["sha256", "md5"] + for hashname in hashes: + t = hashlib.new(hashname) + t.update(raw_content) + ret[hashname] = t.hexdigest() + return ret + + +def load_data_in_dir(path): + qfile = ".qindex.json" + qpath_loaded = False + qpath = path + "/" + qfile + + if get_cache_data(qpath, "size"): + sys.stderr.write("dir[cache]: %s\n" % path) + return + + try: + ret = load_url(qpath) + content = ret['content'].decode("utf-8") + try: + for fpath, data in json.loads(content).items(): + store_cache_entry(path + "/" + fpath, data) + qpath_loaded = True + store_cache_entry(qpath, ret) + except ValueError as e: + sys.stderr.write("qindex parse failed %s" % path) + raise e + except NonExistingUrl as e: + # sys.stderr.write("%s: 404 (%s)" % (qpath, e)) + pass + + # fall back to loading sumfiles and statting sizes + if qpath_loaded: + sys.stderr.write("dir[qindex]: %s\n" % path) + else: + load_sums_from_sumfiles(path) + sys.stderr.write("dir[sumfiles]: %s\n" % path) - md5 = get_cache_data(path, 'md5') - sha256 = get_cache_data(path, 'sha256') save_cache() - return {'md5': md5, 'sha256': sha256} + return + + +def get_cloud_images_file_info(path): + keys = ('md5', 'sha256', 'size') + cached = {k: get_cache_data(path, k) for k in keys} + if all(cached.values()): + return cached + + dirname = os.path.dirname(path) + + load_data_in_dir(dirname) + + # if we were missing an md5 or a sha256 for the manifest + # file, then get them ourselves. + ret = {k: get_cache_data(path, k) for k in keys} + if path.endswith(".manifest") and not all(ret.values()): + loaded = load_url(path) + store_cache_entry(path, loaded) + save_cache() + ret = {k: loaded[k] for k in keys} + + missing = [h for h in ret if not ret[h]] + if missing: + raise Exception("Unable to get checksums (%s) for %s" % + (missing, path)) + + return ret def get_url_len(url): @@ -142,35 +237,37 @@ return os.stat(url).st_size # http://stackoverflow.com/questions/4421170/python-head-request-with-urllib2 - sys.stderr.write("getting size for %s\n" % url) request = url_request(url) - request.get_method = lambda : 'HEAD' + request.get_method = lambda: 'HEAD' response = url_open(request) return int(response.headers.get('content-length', 0)) -def get_cloud_images_file_size(path): +def get_cloud_images_file_size(path, save=True): size = get_cache_data(path, 'size') if size: return size + error = None + sys.stderr.write(" size: %s\n" % path) for burl in BASE_URLS: try: size = int(get_url_len(burl + path)) break except Exception as error: + sys.stderr.write(" size stat failed: %s" % burl + path) pass if not size: raise error store_cache_data(path, 'size', size) - save_cache() + if save: + save_cache() return size def create_fake_file(prefix, item): fpath = os.path.join(prefix, item['path']) - path = item['path'] data = FAKE_DATA[item['ftype']] @@ -179,10 +276,16 @@ with open(fpath, "w") as fp: fp.truncate(data['size']) + pwd_mirror = "file://" + os.getcwd() + "/" + if 'md5' not in FAKE_DATA[item['ftype']]: + # load the url to get checksums and update the sparse FAKE_DATA + fdata = load_url(fpath, base_urls=[pwd_mirror]) + FAKE_DATA[item['ftype']].update(fdata) + item.update(data) for cksum in util.CHECKSUMS: - if cksum in item and not cksum in data: + if cksum in item and cksum not in data: del item[data] return @@ -264,6 +367,7 @@ "southwest": "sw", "west": "ww", "northwest": "nw", + "central": "cc", } itmap = { 'pv': {'instance': "pi", "ebs": "pe", "ssd": "es", "io1": "eo"}, @@ -312,7 +416,7 @@ # create the item key: # - 2 letter country code (us) . 3 for govcloud (gww) - # - 2 letter direction ('nn' for north, 'nw' for northwest) + # - 2 letter direction (nn=north, nw=northwest, cc=central) # - 1 digit number # - 1 char for virt type # - 1 char for root-store type @@ -353,11 +457,8 @@ ts = util.timestamp() tree = dl_load_query(query_tree) - def update_hashes(item, tree, pedigree): - item.update(get_cloud_images_file_hash(item['path'])) - - def update_sizes(item, tree, pedigree): - item.update({'size': get_cloud_images_file_size(item['path'])}) + def update_data(item, tree, pedigree): + item.update(get_cloud_images_file_info(item['path'])) cid_fmt = "com.ubuntu.cloud:%s:download" for stream in tree: @@ -366,8 +467,7 @@ cid = cid_fmt % stream if REAL_DATA: - util.walk_products(tree[stream], cb_item=update_hashes) - util.walk_products(tree[stream], cb_item=update_sizes) + util.walk_products(tree[stream], cb_item=update_data) else: util.walk_products(tree[stream], cb_item=create_file) @@ -481,7 +581,7 @@ for root, dirs, files in os.walk(args.out_d): for f in [f for f in files if f.endswith(".json")]: toolutil.signjson_file(os.path.join(root, f), - status_cb=printstatus) + status_cb=printstatus) return diff -Nru simplestreams-0.1.0~bzr354/tools/sstream-mirror-glance simplestreams-0.1.0~bzr378/tools/sstream-mirror-glance --- simplestreams-0.1.0~bzr354/tools/sstream-mirror-glance 2014-08-22 14:05:32.000000000 +0000 +++ simplestreams-0.1.0~bzr378/tools/sstream-mirror-glance 2015-05-13 16:49:55.000000000 +0000 @@ -31,6 +31,7 @@ from simplestreams import util from simplestreams.mirrors import glance +DEFAULT_FILTERS = ['ftype~(disk1.img|disk.img)', 'arch~(x86_64|amd64|i386)'] def error(msg): sys.stderr.write(msg) @@ -67,6 +68,8 @@ parser.add_argument('--mirror', action='append', default=[], dest="mirrors", help='additional mirrors to find referenced files') + parser.add_argument('--path', default=None, + help='sync from index or products file in mirror') parser.add_argument('--output-dir', metavar="DIR", default=False, help='write image data to storage in dir') parser.add_argument('--output-swift', metavar="prefix", default=False, @@ -94,9 +97,7 @@ help='The keyring for gpg --keyring') parser.add_argument('source_mirror') - parser.add_argument('path', nargs='?', default="streams/v1/index.sjson") - parser.add_argument('--item-filter', action='append', default=[], - dest="item_filters", + parser.add_argument('item_filters', nargs='*', default=DEFAULT_FILTERS, help="Filter expression for mirrored items. " "Multiple filter arguments can be specified" "and will be combined with logical AND. " @@ -114,13 +115,15 @@ 'modify_hook': modify_hook, 'item_filters': args.item_filters} + (mirror_url, args.path) = util.path_from_mirror_url(args.source_mirror, + args.path) def policy(content, path): # pylint: disable=W0613 if args.path.endswith('sjson'): return util.read_signed(content, keyring=args.keyring) else: return content - smirror = mirrors.UrlMirrorReader(args.source_mirror, mirrors=args.mirrors, + smirror = mirrors.UrlMirrorReader(mirror_url, mirrors=args.mirrors, policy=policy) if args.output_dir and args.output_swift: error("--output-dir and --output-swift are mutually exclusive\n") diff -Nru simplestreams-0.1.0~bzr354/tools/tab2streams simplestreams-0.1.0~bzr378/tools/tab2streams --- simplestreams-0.1.0~bzr354/tools/tab2streams 2014-08-22 14:05:32.000000000 +0000 +++ simplestreams-0.1.0~bzr378/tools/tab2streams 2015-05-13 16:49:55.000000000 +0000 @@ -111,7 +111,8 @@ not_copied_up = ['content_id'] for content_id in trees: - util.products_condense(trees[content_id]) + util.products_condense(trees[content_id], + sticky=['path', 'sha256', 'md5', 'size']) content = trees[content_id] index['index'][content_id] = { 'path': "%s/%s.json" % (streamdir, content_id), diff -Nru simplestreams-0.1.0~bzr354/tools/toolutil.py simplestreams-0.1.0~bzr378/tools/toolutil.py --- simplestreams-0.1.0~bzr354/tools/toolutil.py 2014-08-22 14:05:32.000000000 +0000 +++ simplestreams-0.1.0~bzr378/tools/toolutil.py 2015-05-13 16:49:55.000000000 +0000 @@ -20,29 +20,17 @@ import os.path from simplestreams import util +from ubuntu_versions import REL2VER -REL2VER = { - "hardy": {'version': "8.04", 'devname': "Hardy Heron"}, - "lucid": {'version': "10.04", 'devname': "Lucid Lynx"}, - "oneiric": {'version': "11.10", 'devname': "Oneiric Ocelot"}, - "precise": {'version': "12.04", 'devname': "Precise Pangolin"}, - "quantal": {'version': "12.10", 'devname': "Quantal Quetzal"}, - "raring": {'version': "13.04", 'devname': "Raring Ringtail"}, - "saucy": {'version': "13.10", 'devname': "Saucy Salamander"}, - "trusty": {'version': "14.04", 'devname': "Trusty Tahr"}, - "utopic": {'version': "14.10", 'devname': "Utopic Unicorn"}, -} - -RELEASES = [k for k in REL2VER if k != "hardy"] +BLACKLIST_RELS = ('hardy', 'intrepid', 'jaunty', 'karmic', 'maverick', 'natty') +RELEASES = [k for k in REL2VER if k not in BLACKLIST_RELS] BUILDS = ("server") NUM_DAILIES = 4 def is_expected(repl, fields): - rel = fields[0] - serial = fields[3] - arch = fields[4] + rel, bname, label, serial, arch, path, pubname = fields if repl == "-root.tar.gz": if rel in ("lucid", "oneiric"): # lucid, oneiric do not have -root.tar.gz @@ -66,9 +54,17 @@ return False if arch == "ppc64el": - if rel < "trusty" or serial <= "20140122": + if rel < "trusty" or serial <= "20140326": + return False + if repl not in (".tar.gz", "-root.tar.gz", "-disk1.img", ".manifest"): + return False + + if repl == ".ova": + # OVA images become available after 20150407.4 (vivid beta-3) + # and only for trusty and later x86 + if rel < "trusty" or serial < "20150407.4": return False - if repl not in (".tar.gz", "-root.tar.gz"): + if arch not in ('i386', 'amd64'): return False # if some data in /query is not truely available, fill up this array @@ -87,7 +83,8 @@ if rels is None: rels = RELEASES - suffixes = (".tar.gz", "-root.tar.gz", "-disk1.img", "-uefi1.img") + suffixes = (".tar.gz", "-root.tar.gz", "-disk1.img", "-uefi1.img", + ".manifest", ".ova") streams = [f[0:-len(".latest.txt")] for f in os.listdir(path) if f.endswith("latest.txt")] diff -Nru simplestreams-0.1.0~bzr354/tools/ubuntu_versions.py simplestreams-0.1.0~bzr378/tools/ubuntu_versions.py --- simplestreams-0.1.0~bzr354/tools/ubuntu_versions.py 1970-01-01 00:00:00.000000000 +0000 +++ simplestreams-0.1.0~bzr378/tools/ubuntu_versions.py 2015-05-13 16:49:55.000000000 +0000 @@ -0,0 +1,101 @@ +#!/usr/bin/python3 +# Copyright (C) 2013 Canonical Ltd. +# +# Author: Scott Moser +# +# Simplestreams is free software: you can redistribute it and/or modify it +# under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. +# +# Simplestreams is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY +# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public +# License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with Simplestreams. If not, see . + +# this is only used if no distro_info available +HARDCODED_REL2VER = { + "hardy": {'version': "8.04", 'devname': "Hardy Heron"}, + "lucid": {'version': "10.04", 'devname': "Lucid Lynx"}, + "oneiric": {'version': "11.10", 'devname': "Oneiric Ocelot"}, + "precise": {'version': "12.04", 'devname': "Precise Pangolin"}, + "quantal": {'version': "12.10", 'devname': "Quantal Quetzal"}, + "raring": {'version': "13.04", 'devname': "Raring Ringtail"}, + "saucy": {'version': "13.10", 'devname': "Saucy Salamander"}, + "trusty": {'version': "14.04", 'devname': "Trusty Tahr"}, + "utopic": {'version': "14.10", 'devname': "Utopic Unicorn"}, + "vivid": {'version': "15.04", 'devname': "Vivid Vervet"}, + "vivid": {'version': "15.10", 'devname': "Wily Werewolf"}, +} + +from simplestreams.log import LOG + + +def get_ubuntu_info(date=None): + # this returns a sorted list of dicts + # each dict has information about an ubuntu release. + # Notably absent is any date information (release or eol) + # its harder than you'd like to get at data via the distro_info library + # + # The resultant dicts looks like this: + # {'codename': 'saucy', 'devel': True, + # 'full_codename': 'Saucy Salamander', + # 'fullname': 'Ubuntu 13.10 "Saucy Salamander"', + # 'lts': False, 'supported': True, 'version': '13.10'} + + udi = distro_info.UbuntuDistroInfo() + # 'all' is a attribute, not a function. so we can't ask for it formated. + # s2all and us2all are lists, the value of each is the index + # where that release should fall in 'all'. + allcn = udi.all + s2all = [allcn.index(c) for c in + udi.supported(result="codename", date=date)] + us2all = [allcn.index(c) for c in + udi.unsupported(result="codename", date=date)] + + def getall(result, date): + ret = [None for f in range(0, len(allcn))] + for i, r in enumerate(udi.supported(result=result, date=date)): + ret[s2all[i]] = r + for i, r in enumerate(udi.unsupported(result=result, date=date)): + ret[us2all[i]] = r + return [r for r in ret if r is not None] + + codenames = getall(result="codename", date=date) + fullnames = getall(result="fullname", date=date) + lts = [bool('LTS' in f) for f in fullnames] + versions = [x.replace(" LTS", "") for x in + getall(result="release", date=date)] + full_codenames = [x.split('"')[1] for x in fullnames] + supported = udi.supported(date=date) + try: + devel = udi.devel(date=date) + except distro_info.DistroDataOutdated as e: + LOG.warn("distro_info.UbuntuDistroInfo() raised exception (%s)." + " Using stable release as devel.", e) + devel = udi.stable(date=date) + ret = [] + for i, codename in enumerate(codenames): + ret.append({'lts': lts[i], 'version': versions[i], + 'supported': codename in supported, + 'fullname': fullnames[i], 'codename': codename, + 'devname': full_codenames[i], + 'devel': bool(codename == devel)}) + + return ret + + +try: + import distro_info + info = get_ubuntu_info() + REL2VER = {} + for r in info: + if r['codename'] < "hardy": + continue + REL2VER[r['codename']] = {x: r[x] for x in ("version", "devname")} + +except ImportError: + REL2VER = HARDCODED_REL2VER