diff -Nru swift-1.4.8/debian/changelog swift-1.4.8/debian/changelog --- swift-1.4.8/debian/changelog 2012-04-12 16:05:49.000000000 +0000 +++ swift-1.4.8/debian/changelog 2015-07-27 16:17:55.000000000 +0000 @@ -1,3 +1,62 @@ +swift (1.4.8-0ubuntu2.5) precise-security; urgency=medium + + [ Marc Deslauriers ] + * SECURITY UPDATE: metadata constraint bypass via multiple requests + - debian/patches/CVE-2014-7960.patch: add metadata checks to + swift/account/server.py, swift/common/constraints.py, + swift/common/db.py, swift/container/server.py, added tests to + test/unit/common/test_db.py, + test/functionalnosetests/test_account.py, + test/functionalnosetests/test_container.py. + - CVE-2014-7960 + + [ Jamie Strandboge ] + * debian/patches/CVE-2014-7960.patch: + - adjust unittests since we use webob.exc and not the newer swob + - adjust functional tests to properly skip if test environment is not + specified and to not interfere with other functional tests + * debian/control: Build-Depends on python-mock + + -- Jamie Strandboge Mon, 27 Jul 2015 10:48:47 -0500 + +swift (1.4.8-0ubuntu2.4) precise-security; urgency=medium + + * SECURITY UPDATE: timing side-channel attack in TempURL + - debian/patches/CVE-2014-0006.patch: use constant time comparison in + swift/common/middleware/tempurl.py. + - CVE-2014-0006 + + -- Marc Deslauriers Fri, 14 Mar 2014 14:22:18 -0400 + +swift (1.4.8-0ubuntu2.3) precise-security; urgency=low + + * SECURITY UPDATE: Fix handling of DELETE obj reqs with old timestamp + - debian/patches/CVE-2013-4155.patch: don't create tombstone files when + a file with a newer timestamp exists + - CVE-2013-4155 + - LP: #1196932 + + -- Jamie Strandboge Thu, 22 Aug 2013 15:40:33 -0500 + +swift (1.4.8-0ubuntu2.2) precise-security; urgency=low + + * SECURITY UPDATE: fix unchecked input in XML responses + - debian/patches/CVE-2013-2161.patch: use saxutils.quoteattr() on account + name + - CVE-2013-2161 + - LP: #1183884 + * SECURITY UPDATE: optionally allow using secure json serialization instead + of pickle. + - debian/patches/CVE-2012-4406.patch: add memcache_serialization_support + option and update man pages + - debian/patches/memcache_serialization_support-default-to-zero.patch: + default to insecure pickle configuration for people upgrading. + Interested users can adjust this as desired + - CVE-2012-4406 + - LP: #1006414 + + -- Jamie Strandboge Mon, 17 Jun 2013 14:56:56 -0500 + swift (1.4.8-0ubuntu2) precise; urgency=low * debian/patches/fix-ubuntu-unittests.patch: Refreshed diff -Nru swift-1.4.8/debian/control swift-1.4.8/debian/control --- swift-1.4.8/debian/control 2012-04-10 13:23:59.000000000 +0000 +++ swift-1.4.8/debian/control 2015-07-23 23:18:52.000000000 +0000 @@ -21,7 +21,8 @@ python-nose, python-paste, python-pastedeploy, - python-sphinx (>= 1.0) + python-sphinx (>= 1.0), + python-mock Standards-Version: 3.9.2 Homepage: http://launchpad.net/swift Vcs-Browser: http://bazaar.launchpad.net/~ubuntu-server-dev/swift/essex diff -Nru swift-1.4.8/debian/patches/CVE-2012-4406.patch swift-1.4.8/debian/patches/CVE-2012-4406.patch --- swift-1.4.8/debian/patches/CVE-2012-4406.patch 1970-01-01 00:00:00.000000000 +0000 +++ swift-1.4.8/debian/patches/CVE-2012-4406.patch 2013-06-17 19:43:02.000000000 +0000 @@ -0,0 +1,357 @@ +From: Vincent Untz +Date: Thu, 21 Jun 2012 12:37:41 +0000 (+0200) +Subject: Do not use pickle for serialization in memcache, but JSON +X-Git-Url: https://review.openstack.org/gitweb?p=openstack%2Fswift.git;a=commitdiff_plain;h=d81c5f1985739e0db56864fb9ba87a1a3f62b249 + +Do not use pickle for serialization in memcache, but JSON + +We don't want to use pickle as it can execute arbitrary code. JSON is +safer. However, note that it supports serialization for only some +specific subset of object types; this should be enough for what we need, +though. + +To avoid issues on upgrades (unability to read pickled values, and cache +poisoning for old servers not understanding JSON), we add a +memcache_serialization_support configuration option, with the following +values: + + 0 = older, insecure pickle serialization + 1 = json serialization but pickles can still be read (still insecure) + 2 = json serialization only (secure and the default) + +To avoid an instant full cache flush, existing installations should +upgrade with 0, then set to 1 and reload, then after some time (24 +hours) set to 2 and reload. Support for 0 and 1 will be removed in +future versions. + +Part of bug 1006414. + +Patch Set 2: Added Vincent Untz to AUTHORS + +Change-Id: Id7d6d547b103b4f23ebf5be98b88f09ec6027ce4 +--- + +diff --git a/AUTHORS b/AUTHORS +index be0783a..677e7fb 100644 +--- a/AUTHORS ++++ b/AUTHORS +@@ -60,6 +60,7 @@ Rainer Toebbicke (Rainer.Toebbicke@cern.ch) + Fujita Tomonori (fujita.tomonori@lab.ntt.co.jp) + Kapil Thangavelu (kapil.foss@gmail.com) + Dean Troyer (dtroyer@gmail.com) ++Vincent Untz (vuntz@suse.com) + Daniele Valeriani (daniele@dvaleriani.net) + Chris Wedgwood (cw@f00f.org) + Conrad Weidenkeller (conrad.weidenkeller@rackspace.com) +diff --git a/doc/manpages/proxy-server.conf.5 b/doc/manpages/proxy-server.conf.5 +index 4979e4d..e8581d1 100644 +--- a/doc/manpages/proxy-server.conf.5 ++++ b/doc/manpages/proxy-server.conf.5 +@@ -205,6 +205,21 @@ Enables the ability to log request headers. The default is False. + .IP \fBmemcache_servers\fR + The memcache servers that are available. This can be a list separated by commas. The default + is 127.0.0.1:11211. ++.IP \fBmemcache_serialization_support\fR ++This sets how memcache values are serialized and deserialized: ++.RE ++ ++.PD 0 ++.RS 10 ++.IP "0 = older, insecure pickle serialization" ++.IP "1 = json serialization but pickles can still be read (still insecure)" ++.IP "2 = json serialization only (secure and the default)" ++.RE ++ ++.RS 10 ++To avoid an instant full cache flush, existing installations should upgrade with 0, then set to 1 and reload, then after some time (24 hours) set to 2 and reload. In the future, the ability to use pickle serialization will be removed. ++ ++If not set in the configuration file, the value for memcache_serialization_support will be read from /etc/swift/memcache.conf if it exists (see memcache.conf-sample). Otherwise, the default value as indicated above will be used. + .RE + + +diff --git a/etc/memcache.conf-sample b/etc/memcache.conf-sample +index 580d94a..5ad48ab 100644 +--- a/etc/memcache.conf-sample ++++ b/etc/memcache.conf-sample +@@ -3,3 +3,13 @@ + # several other conf files under [filter:cache] for example. You can specify + # multiple servers separated with commas, as in: 10.1.2.3:11211,10.1.2.4:11211 + # memcache_servers = 127.0.0.1:11211 ++# ++# Sets how memcache values are serialized and deserialized: ++# 0 = older, insecure pickle serialization ++# 1 = json serialization but pickles can still be read (still insecure) ++# 2 = json serialization only (secure and the default) ++# To avoid an instant full cache flush, existing installations should ++# upgrade with 0, then set to 1 and reload, then after some time (24 hours) ++# set to 2 and reload. ++# In the future, the ability to use pickle serialization will be removed. ++# memcache_serialization_support = 2 +diff --git a/etc/proxy-server.conf-sample b/etc/proxy-server.conf-sample +index 148616b..00ddc7e 100644 +--- a/etc/proxy-server.conf-sample ++++ b/etc/proxy-server.conf-sample +@@ -122,6 +122,18 @@ use = egg:swift#memcache + # default to the value below. You can specify multiple servers separated with + # commas, as in: 10.1.2.3:11211,10.1.2.4:11211 + # memcache_servers = 127.0.0.1:11211 ++# ++# Sets how memcache values are serialized and deserialized: ++# 0 = older, insecure pickle serialization ++# 1 = json serialization but pickles can still be read (still insecure) ++# 2 = json serialization only (secure and the default) ++# If not set here, the value for memcache_serialization_support will be read ++# from /etc/swift/memcache.conf (see memcache.conf-sample). ++# To avoid an instant full cache flush, existing installations should ++# upgrade with 0, then set to 1 and reload, then after some time (24 hours) ++# set to 2 and reload. ++# In the future, the ability to use pickle serialization will be removed. ++# memcache_serialization_support = 2 + + [filter:ratelimit] + use = egg:swift#ratelimit +diff --git a/swift/common/memcached.py b/swift/common/memcached.py +index ecd9332..82ebb7a 100644 +--- a/swift/common/memcached.py ++++ b/swift/common/memcached.py +@@ -27,11 +27,17 @@ import time + from bisect import bisect + from hashlib import md5 + ++try: ++ import simplejson as json ++except ImportError: ++ import json ++ + DEFAULT_MEMCACHED_PORT = 11211 + + CONN_TIMEOUT = 0.3 + IO_TIMEOUT = 2.0 + PICKLE_FLAG = 1 ++JSON_FLAG = 2 + NODE_WEIGHT = 50 + PICKLE_PROTOCOL = 2 + TRY_COUNT = 3 +@@ -57,7 +63,8 @@ class MemcacheRing(object): + """ + + def __init__(self, servers, connect_timeout=CONN_TIMEOUT, +- io_timeout=IO_TIMEOUT, tries=TRY_COUNT): ++ io_timeout=IO_TIMEOUT, tries=TRY_COUNT, ++ allow_pickle=False, allow_unpickle=False): + self._ring = {} + self._errors = dict(((serv, []) for serv in servers)) + self._error_limited = dict(((serv, 0) for serv in servers)) +@@ -69,6 +76,8 @@ class MemcacheRing(object): + self._client_cache = dict(((server, []) for server in servers)) + self._connect_timeout = connect_timeout + self._io_timeout = io_timeout ++ self._allow_pickle = allow_pickle ++ self._allow_unpickle = allow_unpickle or allow_pickle + + def _exception_occurred(self, server, e, action='talking'): + if isinstance(e, socket.timeout): +@@ -130,16 +139,21 @@ class MemcacheRing(object): + + :param key: key + :param value: value +- :param serialize: if True, value is pickled before sending to memcache ++ :param serialize: if True, value is serialized with JSON before sending ++ to memcache, or with pickle if configured to use ++ pickle instead of JSON (to avoid cache poisoning) + :param timeout: ttl in memcache + """ + key = md5hash(key) + if timeout > 0: + timeout += time.time() + flags = 0 +- if serialize: ++ if serialize and self._allow_pickle: + value = pickle.dumps(value, PICKLE_PROTOCOL) + flags |= PICKLE_FLAG ++ elif serialize: ++ value = json.dumps(value) ++ flags |= JSON_FLAG + for (server, fp, sock) in self._get_conns(key): + try: + sock.sendall('set %s %d %d %s noreply\r\n%s\r\n' % \ +@@ -151,8 +165,9 @@ class MemcacheRing(object): + + def get(self, key): + """ +- Gets the object specified by key. It will also unpickle the object +- before returning if it is pickled in memcache. ++ Gets the object specified by key. It will also unserialize the object ++ before returning if it is serialized in memcache with JSON, or if it ++ is pickled and unpickling is allowed. + + :param key: key + :returns: value of the key in memcache +@@ -168,7 +183,12 @@ class MemcacheRing(object): + size = int(line[3]) + value = fp.read(size) + if int(line[2]) & PICKLE_FLAG: +- value = pickle.loads(value) ++ if self._allow_unpickle: ++ value = pickle.loads(value) ++ else: ++ value = None ++ elif int(line[2]) & JSON_FLAG: ++ value = json.loads(value) + fp.readline() + line = fp.readline().strip().split() + self._return_conn(server, fp, sock) +@@ -258,7 +278,9 @@ class MemcacheRing(object): + :param mapping: dictonary of keys and values to be set in memcache + :param servery_key: key to use in determining which server in the ring + is used +- :param serialize: if True, value is pickled before sending to memcache ++ :param serialize: if True, value is serialized with JSON before sending ++ to memcache, or with pickle if configured to use ++ pickle instead of JSON (to avoid cache poisoning) + :param timeout: ttl for memcache + """ + server_key = md5hash(server_key) +@@ -268,9 +290,12 @@ class MemcacheRing(object): + for key, value in mapping.iteritems(): + key = md5hash(key) + flags = 0 +- if serialize: ++ if serialize and self._allow_pickle: + value = pickle.dumps(value, PICKLE_PROTOCOL) + flags |= PICKLE_FLAG ++ elif serialize: ++ value = json.dumps(value) ++ flags |= JSON_FLAG + msg += ('set %s %d %d %s noreply\r\n%s\r\n' % + (key, flags, timeout, len(value), value)) + for (server, fp, sock) in self._get_conns(server_key): +@@ -302,7 +327,12 @@ class MemcacheRing(object): + size = int(line[3]) + value = fp.read(size) + if int(line[2]) & PICKLE_FLAG: +- value = pickle.loads(value) ++ if self._allow_unpickle: ++ value = pickle.loads(value) ++ else: ++ value = None ++ elif int(line[2]) & JSON_FLAG: ++ value = json.loads(value) + responses[line[1]] = value + fp.readline() + line = fp.readline().strip().split() +diff --git a/swift/common/middleware/memcache.py b/swift/common/middleware/memcache.py +index eb988bd..c79ba24 100644 +--- a/swift/common/middleware/memcache.py ++++ b/swift/common/middleware/memcache.py +@@ -27,20 +27,36 @@ class MemcacheMiddleware(object): + def __init__(self, app, conf): + self.app = app + self.memcache_servers = conf.get('memcache_servers') +- if not self.memcache_servers: ++ serialization_format = conf.get('memcache_serialization_support') ++ ++ if not self.memcache_servers or serialization_format is None: + path = os.path.join(conf.get('swift_dir', '/etc/swift'), + 'memcache.conf') + memcache_conf = ConfigParser() + if memcache_conf.read(path): +- try: +- self.memcache_servers = \ +- memcache_conf.get('memcache', 'memcache_servers') +- except (NoSectionError, NoOptionError): +- pass ++ if not self.memcache_servers: ++ try: ++ self.memcache_servers = \ ++ memcache_conf.get('memcache', 'memcache_servers') ++ except (NoSectionError, NoOptionError): ++ pass ++ if serialization_format is None: ++ try: ++ serialization_format = \ ++ memcache_conf.get('memcache', ++ 'memcache_serialization_support') ++ except (NoSectionError, NoOptionError): ++ pass ++ + if not self.memcache_servers: + self.memcache_servers = '127.0.0.1:11211' ++ if serialization_format is None: ++ serialization_format = 2 ++ + self.memcache = MemcacheRing( +- [s.strip() for s in self.memcache_servers.split(',') if s.strip()]) ++ [s.strip() for s in self.memcache_servers.split(',') if s.strip()], ++ allow_pickle=(serialization_format == 0), ++ allow_unpickle=(serialization_format <= 1)) + + def __call__(self, env, start_response): + env['swift.cache'] = self.memcache +diff --git a/test/unit/common/middleware/test_memcache.py b/test/unit/common/middleware/test_memcache.py +index 6b94bd1..e217a96 100644 +--- a/test/unit/common/middleware/test_memcache.py ++++ b/test/unit/common/middleware/test_memcache.py +@@ -47,6 +47,8 @@ class SetConfigParser(object): + if section == 'memcache': + if option == 'memcache_servers': + return '1.2.3.4:5' ++ elif option == 'memcache_serialization_support': ++ return '2' + else: + raise NoOptionError(option) + else: +@@ -86,7 +88,8 @@ class TestCacheMiddleware(unittest.TestCase): + exc = None + try: + app = memcache.MemcacheMiddleware( +- FakeApp(), {'memcache_servers': '1.2.3.4:5'}) ++ FakeApp(), {'memcache_servers': '1.2.3.4:5', ++ 'memcache_serialization_support': '2'}) + except Exception, err: + exc = err + finally: +diff --git a/test/unit/common/test_memcached.py b/test/unit/common/test_memcached.py +index dff6e80..3016d10 100644 +--- a/test/unit/common/test_memcached.py ++++ b/test/unit/common/test_memcached.py +@@ -1,3 +1,4 @@ ++ # -*- coding: utf8 -*- + # Copyright (c) 2010-2012 OpenStack, LLC. + # + # Licensed under the Apache License, Version 2.0 (the "License"); +@@ -166,6 +167,9 @@ class TestMemcached(unittest.TestCase): + self.assertEquals(memcache_client.get('some_key'), [1, 2, 3]) + memcache_client.set('some_key', [4, 5, 6]) + self.assertEquals(memcache_client.get('some_key'), [4, 5, 6]) ++ memcache_client.set('some_key', ['simple str', 'utf8 str éà']) ++ # As per http://wiki.openstack.org/encoding, we should expect to have unicode ++ self.assertEquals(memcache_client.get('some_key'), ['simple str', u'utf8 str éà']) + self.assert_(float(mock.cache.values()[0][1]) == 0) + esttimeout = time.time() + 10 + memcache_client.set('some_key', [1, 2, 3], timeout=10) +@@ -244,6 +248,24 @@ class TestMemcached(unittest.TestCase): + self.assertEquals(memcache_client.get_multi(('some_key2', 'some_key1', + 'not_exists'), 'multi_key'), [[4, 5, 6], [1, 2, 3], None]) + ++ def test_serialization(self): ++ memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'], ++ allow_pickle=True) ++ mock = MockMemcached() ++ memcache_client._client_cache['1.2.3.4:11211'] = [(mock, mock)] * 2 ++ memcache_client.set('some_key', [1, 2, 3]) ++ self.assertEquals(memcache_client.get('some_key'), [1, 2, 3]) ++ memcache_client._allow_pickle = False ++ memcache_client._allow_unpickle = True ++ self.assertEquals(memcache_client.get('some_key'), [1, 2, 3]) ++ memcache_client._allow_unpickle = False ++ self.assertEquals(memcache_client.get('some_key'), None) ++ memcache_client.set('some_key', [1, 2, 3]) ++ self.assertEquals(memcache_client.get('some_key'), [1, 2, 3]) ++ memcache_client._allow_unpickle = True ++ self.assertEquals(memcache_client.get('some_key'), [1, 2, 3]) ++ memcache_client._allow_pickle = True ++ self.assertEquals(memcache_client.get('some_key'), [1, 2, 3]) + + if __name__ == '__main__': + unittest.main() diff -Nru swift-1.4.8/debian/patches/CVE-2013-2161.patch swift-1.4.8/debian/patches/CVE-2013-2161.patch --- swift-1.4.8/debian/patches/CVE-2013-2161.patch 1970-01-01 00:00:00.000000000 +0000 +++ swift-1.4.8/debian/patches/CVE-2013-2161.patch 2013-06-17 20:21:29.000000000 +0000 @@ -0,0 +1,66 @@ +Origin: backport, 4eed6bf5b5028409f730be97ddcfb6bfa893c976 + 92d7eadd328797d392758c79e258c8455874c80e +Description: Check user input in XML responses +Bug: https://launchpad.net/bugs/1183884 + +Index: swift-1.4.8/swift/account/server.py +=================================================================== +--- swift-1.4.8.orig/swift/account/server.py 2013-06-17 15:19:26.000000000 -0500 ++++ swift-1.4.8/swift/account/server.py 2013-06-17 15:19:26.000000000 -0500 +@@ -238,7 +238,7 @@ + account_list = '[' + ','.join(json_out) + ']' + elif out_content_type.endswith('/xml'): + output_list = ['', +- '' % account] ++ '' % saxutils.quoteattr(account)] + for (name, object_count, bytes_used, is_subdir) in account_list: + name = saxutils.escape(name) + if is_subdir: +Index: swift-1.4.8/test/unit/account/test_server.py +=================================================================== +--- swift-1.4.8.orig/test/unit/account/test_server.py 2013-06-17 15:19:26.000000000 -0500 ++++ swift-1.4.8/test/unit/account/test_server.py 2013-06-17 15:21:25.000000000 -0500 +@@ -537,6 +537,43 @@ + self.assertEquals(node.firstChild.nodeValue, '4') + self.assertEquals(resp.charset, 'utf-8') + ++ def test_GET_xml_escapes_account_name(self): ++ req = Request.blank( ++ '/sda1/p/%22%27', # "' ++ environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'}) ++ self.controller.PUT(req) ++ ++ req = Request.blank( ++ '/sda1/p/%22%27?format=xml', ++ environ={'REQUEST_METHOD': 'GET', 'HTTP_X_TIMESTAMP': '1'}) ++ resp = self.controller.GET(req) ++ ++ dom = xml.dom.minidom.parseString(resp.body) ++ self.assertEquals(dom.firstChild.attributes['name'].value, '"\'') ++ ++ def test_GET_xml_escapes_container_name(self): ++ req = Request.blank( ++ '/sda1/p/a', ++ environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'}) ++ self.controller.PUT(req) ++ ++ req = Request.blank( ++ '/sda1/p/a/%22%3Cword', # "= request.headers['x-timestamp']: ++ return HTTPConflict(request=request) + metadata = {'X-Timestamp': request.headers['x-timestamp']} + metadata.update(val for val in request.headers.iteritems() + if val[0].lower().startswith('x-object-meta-')) +@@ -551,6 +556,8 @@ + file = DiskFile(self.devices, device, partition, account, container, + obj, self.logger, disk_chunk_size=self.disk_chunk_size) + orig_timestamp = file.metadata.get('X-Timestamp') ++ if orig_timestamp and orig_timestamp >= request.headers['x-timestamp']: ++ return HTTPConflict(request=request) + upload_expiration = time.time() + self.max_upload_time + etag = md5() + upload_size = 0 +@@ -743,7 +750,6 @@ + content_type='text/plain') + if self.mount_check and not check_mount(self.devices, device): + return Response(status='507 %s is not mounted' % device) +- response_class = HTTPNoContent + file = DiskFile(self.devices, device, partition, account, container, + obj, self.logger, disk_chunk_size=self.disk_chunk_size) + if 'x-if-delete-at' in request.headers and \ +@@ -751,23 +757,26 @@ + int(file.metadata.get('X-Delete-At') or 0): + return HTTPPreconditionFailed(request=request, + body='X-If-Delete-At and X-Delete-At do not match') +- orig_timestamp = file.metadata.get('X-Timestamp') ++ old_delete_at = int(file.metadata.get('X-Delete-At') or 0) ++ if old_delete_at: ++ self.delete_at_update('DELETE', old_delete_at, account, ++ container, obj, request.headers, device) ++ orig_timestamp = file.metadata.get('X-Timestamp', 0) ++ req_timestamp = request.headers['X-Timestamp'] + if file.is_deleted(): + response_class = HTTPNotFound +- metadata = { +- 'X-Timestamp': request.headers['X-Timestamp'], 'deleted': True, +- } +- with file.mkstemp() as (fd, tmppath): +- old_delete_at = int(file.metadata.get('X-Delete-At') or 0) +- if old_delete_at: +- self.delete_at_update('DELETE', old_delete_at, account, +- container, obj, request.headers, device) +- file.put(fd, tmppath, metadata, extension='.ts') +- file.unlinkold(metadata['X-Timestamp']) +- if not orig_timestamp or \ +- orig_timestamp < request.headers['x-timestamp']: ++ else: ++ if orig_timestamp < req_timestamp: ++ response_class = HTTPNoContent ++ else: ++ response_class = HTTPConflict ++ if orig_timestamp < req_timestamp: ++ metadata = {'X-Timestamp': req_timestamp} ++ with file.mkstemp() as (fd, tmppath): ++ file.put(fd, tmppath, metadata, extension='.ts') ++ file.unlinkold(req_timestamp) + self.container_update('DELETE', account, container, obj, +- request.headers, {'x-timestamp': metadata['X-Timestamp'], ++ request.headers, {'x-timestamp': req_timestamp, + 'x-trans-id': request.headers.get('x-trans-id', '-')}, + device) + resp = response_class(request=request) +Index: swift-1.4.8/test/unit/obj/test_server.py +=================================================================== +--- swift-1.4.8.orig/test/unit/obj/test_server.py 2013-08-22 15:20:39.000000000 -0500 ++++ swift-1.4.8/test/unit/obj/test_server.py 2013-08-22 15:36:44.000000000 -0500 +@@ -423,6 +423,41 @@ + "X-Object-Meta-3" in resp.headers) + self.assertEquals(resp.headers['Content-Type'], 'application/x-test') + ++ def test_POST_old_timestamp(self): ++ ts = time() ++ timestamp = normalize_timestamp(ts) ++ req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, ++ headers={'X-Timestamp': timestamp, ++ 'Content-Type': 'application/x-test', ++ 'X-Object-Meta-1': 'One', ++ 'X-Object-Meta-Two': 'Two'}) ++ req.body = 'VERIFY' ++ resp = self.object_controller.PUT(req) ++ self.assertEquals(resp.status_int, 201) ++ ++ # Same timestamp should result in 409 ++ req = Request.blank('/sda1/p/a/c/o', ++ environ={'REQUEST_METHOD': 'POST'}, ++ headers={'X-Timestamp': timestamp, ++ 'X-Object-Meta-3': 'Three', ++ 'X-Object-Meta-4': 'Four', ++ 'Content-Encoding': 'gzip', ++ 'Content-Type': 'application/x-test'}) ++ resp = self.object_controller.POST(req) ++ self.assertEquals(resp.status_int, 409) ++ ++ # Earlier timestamp should result in 409 ++ timestamp = normalize_timestamp(ts - 1) ++ req = Request.blank('/sda1/p/a/c/o', ++ environ={'REQUEST_METHOD': 'POST'}, ++ headers={'X-Timestamp': timestamp, ++ 'X-Object-Meta-5': 'Five', ++ 'X-Object-Meta-6': 'Six', ++ 'Content-Encoding': 'gzip', ++ 'Content-Type': 'application/x-test'}) ++ resp = self.object_controller.POST(req) ++ self.assertEquals(resp.status_int, 409) ++ + def test_POST_not_exist(self): + timestamp = normalize_timestamp(time()) + req = Request.blank('/sda1/p/a/c/fail', +@@ -471,11 +506,15 @@ + + old_http_connect = object_server.http_connect + try: +- timestamp = normalize_timestamp(time()) ++ ts = time() ++ timestamp = normalize_timestamp(ts) + req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': + 'POST'}, headers={'X-Timestamp': timestamp, 'Content-Type': + 'text/plain', 'Content-Length': '0'}) + resp = self.object_controller.PUT(req) ++ self.assertEquals(resp.status_int, 201) ++ ++ timestamp = normalize_timestamp(ts + 1) + req = Request.blank('/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Timestamp': timestamp, +@@ -487,6 +526,8 @@ + object_server.http_connect = mock_http_connect(202) + resp = self.object_controller.POST(req) + self.assertEquals(resp.status_int, 202) ++ ++ timestamp = normalize_timestamp(ts + 2) + req = Request.blank('/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Timestamp': timestamp, +@@ -498,6 +539,8 @@ + object_server.http_connect = mock_http_connect(202, with_exc=True) + resp = self.object_controller.POST(req) + self.assertEquals(resp.status_int, 202) ++ ++ timestamp = normalize_timestamp(ts + 3) + req = Request.blank('/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Timestamp': timestamp, +@@ -640,6 +683,32 @@ + 'name': '/a/c/o', + 'Content-Encoding': 'gzip'}) + ++ def test_PUT_old_timestamp(self): ++ ts = time() ++ req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, ++ headers={'X-Timestamp': normalize_timestamp(ts), ++ 'Content-Length': '6', ++ 'Content-Type': 'application/octet-stream'}) ++ req.body = 'VERIFY' ++ resp = self.object_controller.PUT(req) ++ self.assertEquals(resp.status_int, 201) ++ ++ req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, ++ headers={'X-Timestamp': normalize_timestamp(ts), ++ 'Content-Type': 'text/plain', ++ 'Content-Encoding': 'gzip'}) ++ req.body = 'VERIFY TWO' ++ resp = self.object_controller.PUT(req) ++ self.assertEquals(resp.status_int, 409) ++ ++ req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, ++ headers={'X-Timestamp': normalize_timestamp(ts - 1), ++ 'Content-Type': 'text/plain', ++ 'Content-Encoding': 'gzip'}) ++ req.body = 'VERIFY THREE' ++ resp = self.object_controller.PUT(req) ++ self.assertEquals(resp.status_int, 409) ++ + def test_PUT_no_etag(self): + if ubuntu_buildd_disable: + raise SkipTest +@@ -1258,12 +1327,32 @@ + self.assertEquals(resp.status_int, 400) + # self.assertRaises(KeyError, self.object_controller.DELETE, req) + ++ # The following should have created a tombstone file + timestamp = normalize_timestamp(time()) + req = Request.blank('/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Timestamp': timestamp}) + resp = self.object_controller.DELETE(req) + self.assertEquals(resp.status_int, 404) ++ objfile = os.path.join(self.testdir, 'sda1', ++ storage_directory(object_server.DATADIR, 'p', ++ hash_path('a', 'c', 'o')), ++ timestamp + '.ts') ++ self.assert_(os.path.isfile(objfile)) ++ ++ # The following should *not* have created a tombstone file. ++ timestamp = normalize_timestamp(float(timestamp) - 1) ++ req = Request.blank('/sda1/p/a/c/o', ++ environ={'REQUEST_METHOD': 'DELETE'}, ++ headers={'X-Timestamp': timestamp}) ++ resp = self.object_controller.DELETE(req) ++ self.assertEquals(resp.status_int, 404) ++ objfile = os.path.join(self.testdir, 'sda1', ++ storage_directory(object_server.DATADIR, 'p', ++ hash_path('a', 'c', 'o')), ++ timestamp + '.ts') ++ self.assertFalse(os.path.isfile(objfile)) ++ self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1) + + sleep(.00001) + timestamp = normalize_timestamp(time()) +@@ -1277,17 +1366,19 @@ + resp = self.object_controller.PUT(req) + self.assertEquals(resp.status_int, 201) + ++ # The following should *not* have created a tombstone file. + timestamp = normalize_timestamp(float(timestamp) - 1) + req = Request.blank('/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Timestamp': timestamp}) + resp = self.object_controller.DELETE(req) +- self.assertEquals(resp.status_int, 204) ++ self.assertEquals(resp.status_int, 409) + objfile = os.path.join(self.testdir, 'sda1', + storage_directory(object_server.DATADIR, 'p', + hash_path('a', 'c', 'o')), + timestamp + '.ts') +- self.assert_(os.path.isfile(objfile)) ++ self.assertFalse(os.path.exists(objfile)) ++ self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1) + + sleep(.00001) + timestamp = normalize_timestamp(time()) +@@ -1302,6 +1393,103 @@ + timestamp + '.ts') + self.assert_(os.path.isfile(objfile)) + ++ def test_DELETE_container_updates(self): ++ # Test swift.object_server.ObjectController.DELETE and container ++ # updates, making sure container update is called in the correct ++ # state. ++ timestamp = normalize_timestamp(time()) ++ req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, ++ headers={ ++ 'X-Timestamp': timestamp, ++ 'Content-Type': 'application/octet-stream', ++ 'Content-Length': '4', ++ }) ++ req.body = 'test' ++ resp = self.object_controller.PUT(req) ++ self.assertEquals(resp.status_int, 201) ++ ++ calls_made = [0] ++ ++ def our_container_update(*args, **kwargs): ++ calls_made[0] += 1 ++ ++ orig_cu = self.object_controller.container_update ++ self.object_controller.container_update = our_container_update ++ try: ++ # The following request should return 409 (HTTP Conflict). A ++ # tombstone file should not have been created with this timestamp. ++ timestamp = normalize_timestamp(float(timestamp) - 1) ++ req = Request.blank('/sda1/p/a/c/o', ++ environ={'REQUEST_METHOD': 'DELETE'}, ++ headers={'X-Timestamp': timestamp}) ++ resp = self.object_controller.DELETE(req) ++ self.assertEquals(resp.status_int, 409) ++ objfile = os.path.join(self.testdir, 'sda1', ++ storage_directory(object_server.DATADIR, 'p', ++ hash_path('a', 'c', 'o')), ++ timestamp + '.ts') ++ self.assertFalse(os.path.isfile(objfile)) ++ self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1) ++ self.assertEquals(0, calls_made[0]) ++ ++ # The following request should return 204, and the object should ++ # be truly deleted (container update is performed) because this ++ # timestamp is newer. A tombstone file should have been created ++ # with this timestamp. ++ sleep(.00001) ++ timestamp = normalize_timestamp(time()) ++ req = Request.blank('/sda1/p/a/c/o', ++ environ={'REQUEST_METHOD': 'DELETE'}, ++ headers={'X-Timestamp': timestamp}) ++ resp = self.object_controller.DELETE(req) ++ self.assertEquals(resp.status_int, 204) ++ objfile = os.path.join(self.testdir, 'sda1', ++ storage_directory(object_server.DATADIR, 'p', ++ hash_path('a', 'c', 'o')), ++ timestamp + '.ts') ++ self.assert_(os.path.isfile(objfile)) ++ self.assertEquals(1, calls_made[0]) ++ self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1) ++ ++ # The following request should return a 404, as the object should ++ # already have been deleted, but it should have also performed a ++ # container update because the timestamp is newer, and a tombstone ++ # file should also exist with this timestamp. ++ sleep(.00001) ++ timestamp = normalize_timestamp(time()) ++ req = Request.blank('/sda1/p/a/c/o', ++ environ={'REQUEST_METHOD': 'DELETE'}, ++ headers={'X-Timestamp': timestamp}) ++ resp = self.object_controller.DELETE(req) ++ self.assertEquals(resp.status_int, 404) ++ objfile = os.path.join(self.testdir, 'sda1', ++ storage_directory(object_server.DATADIR, 'p', ++ hash_path('a', 'c', 'o')), ++ timestamp + '.ts') ++ self.assert_(os.path.isfile(objfile)) ++ self.assertEquals(2, calls_made[0]) ++ self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1) ++ ++ # The following request should return a 404, as the object should ++ # already have been deleted, and it should not have performed a ++ # container update because the timestamp is older, or created a ++ # tombstone file with this timestamp. ++ timestamp = normalize_timestamp(float(timestamp) - 1) ++ req = Request.blank('/sda1/p/a/c/o', ++ environ={'REQUEST_METHOD': 'DELETE'}, ++ headers={'X-Timestamp': timestamp}) ++ resp = self.object_controller.DELETE(req) ++ self.assertEquals(resp.status_int, 404) ++ objfile = os.path.join(self.testdir, 'sda1', ++ storage_directory(object_server.DATADIR, 'p', ++ hash_path('a', 'c', 'o')), ++ timestamp + '.ts') ++ self.assertFalse(os.path.isfile(objfile)) ++ self.assertEquals(2, calls_made[0]) ++ self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1) ++ finally: ++ self.object_controller.container_update = orig_cu ++ + def test_call(self): + """ Test swift.object_server.ObjectController.__call__ """ + inbuf = StringIO() diff -Nru swift-1.4.8/debian/patches/CVE-2014-0006.patch swift-1.4.8/debian/patches/CVE-2014-0006.patch --- swift-1.4.8/debian/patches/CVE-2014-0006.patch 1970-01-01 00:00:00.000000000 +0000 +++ swift-1.4.8/debian/patches/CVE-2014-0006.patch 2014-03-14 18:23:25.000000000 +0000 @@ -0,0 +1,45 @@ +Backport of: + +From c0eed792a22865b280f99cbb79076fa7ad19fcbb Mon Sep 17 00:00:00 2001 +From: Samuel Merritt +Date: Thu, 16 Jan 2014 12:45:52 +0000 +Subject: Use constant time comparison in tempURL + +Use constant time comparison when evaluating tempURL to avoid timing +attacks (CVE-2014-0006). This is the grizzly backport of the master +patch. + +Fixes bug 1265665 + +Change-Id: I11e4ad83cc4077e52adf54a0bd0f9749294b2a48 +--- +Index: swift-1.4.8/swift/common/middleware/tempurl.py +=================================================================== +--- swift-1.4.8.orig/swift/common/middleware/tempurl.py 2014-03-14 14:21:30.131487800 -0400 ++++ swift-1.4.8/swift/common/middleware/tempurl.py 2014-03-14 14:21:55.067488468 -0400 +@@ -87,6 +87,7 @@ + from urlparse import parse_qs + + from swift.common.utils import get_logger ++from swift.common.utils import streq_const_time + + + #: Default headers to remove from incoming requests. Simply a whitespace +@@ -234,14 +235,14 @@ + if env['REQUEST_METHOD'] == 'HEAD': + hmac_val = self._get_hmac(env, temp_url_expires, key, + request_method='GET') +- if temp_url_sig != hmac_val: ++ if not streq_const_time(temp_url_sig, hmac_val): + hmac_val = self._get_hmac(env, temp_url_expires, key, + request_method='PUT') +- if temp_url_sig != hmac_val: ++ if not streq_const_time(temp_url_sig, hmac_val): + return self._invalid(env, start_response) + else: + hmac_val = self._get_hmac(env, temp_url_expires, key) +- if temp_url_sig != hmac_val: ++ if not streq_const_time(temp_url_sig, hmac_val): + return self._invalid(env, start_response) + self._clean_incoming_headers(env) + env['swift.authorize'] = lambda req: None diff -Nru swift-1.4.8/debian/patches/CVE-2014-7960.patch swift-1.4.8/debian/patches/CVE-2014-7960.patch --- swift-1.4.8/debian/patches/CVE-2014-7960.patch 1970-01-01 00:00:00.000000000 +0000 +++ swift-1.4.8/debian/patches/CVE-2014-7960.patch 2015-07-27 16:11:29.000000000 +0000 @@ -0,0 +1,401 @@ +Backport of: + +From 2c4622a28ea04e1c6b2382189b0a1f6cccdc9c0f Mon Sep 17 00:00:00 2001 +From: "Richard (Rick) Hawkins" +Date: Wed, 1 Oct 2014 09:37:47 -0400 +Subject: [PATCH] Fix metadata overall limits bug + +Currently metadata limits are checked on a per request basis. If +multiple requests are sent within the per request limits, it is +possible to exceed the overall limits. This patch adds an overall +metadata check to ensure that multiple requests to add metadata to +an account/container will check overall limits before adding +the additional metadata. + +This is a backport to the stable/icehouse branch for commit SHA +5b2c27a5874c2b5b0a333e4955b03544f6a8119f. + +Closes-Bug: 1365350 + +Conflicts: + swift/common/db.py + swift/container/server.py + +Change-Id: Id9fca209c9c1216f1949de7108bbe332808f1045 +--- + swift/account/server.py | 4 +- + swift/common/constraints.py | 5 ++- + swift/common/db.py | 34 ++++++++++++++- + swift/container/server.py | 4 +- + test/functional/test_account.py | 66 ++++++++++++++++++++++++++++ + test/functional/test_container.py | 20 +++++++++ + test/unit/common/test_db.py | 90 ++++++++++++++++++++++++++++++++++++++- + 7 files changed, 216 insertions(+), 7 deletions(-) + +Index: swift-1.4.8/swift/account/server.py +=================================================================== +--- swift-1.4.8.orig/swift/account/server.py ++++ swift-1.4.8/swift/account/server.py +@@ -125,7 +125,7 @@ class AccountController(object): + for key, value in req.headers.iteritems() + if key.lower().startswith('x-account-meta-')) + if metadata: +- broker.update_metadata(metadata) ++ broker.update_metadata(metadata, validate_metadata=True) + if created: + return HTTPCreated(request=req) + else: +@@ -302,7 +302,7 @@ class AccountController(object): + for key, value in req.headers.iteritems() + if key.lower().startswith('x-account-meta-')) + if metadata: +- broker.update_metadata(metadata) ++ broker.update_metadata(metadata, validate_metadata=True) + return HTTPNoContent(request=req) + + def __call__(self, env, start_response): +Index: swift-1.4.8/swift/common/constraints.py +=================================================================== +--- swift-1.4.8.orig/swift/common/constraints.py ++++ swift-1.4.8/swift/common/constraints.py +@@ -41,7 +41,10 @@ MAX_CONTAINER_NAME_LENGTH = 256 + + def check_metadata(req, target_type): + """ +- Check metadata sent in the request headers. ++ Check metadata sent in the request headers. This should only check ++ that the metadata in the request given is valid. Checks against ++ account/container overall metadata should be forwarded on to its ++ respective server to be checked. + + :param req: request object + :param target_type: str: one of: object, container, or account: indicates +Index: swift-1.4.8/swift/common/db.py +=================================================================== +--- swift-1.4.8.orig/swift/common/db.py ++++ swift-1.4.8/swift/common/db.py +@@ -34,7 +34,9 @@ import sqlite3 + + from swift.common.utils import normalize_timestamp, renamer, \ + mkdirs, lock_parent_directory, fallocate ++from swift.common.constraints import MAX_META_COUNT, MAX_META_OVERALL_SIZE + from swift.common.exceptions import LockTimeout ++from webob.exc import HTTPBadRequest + + + #: Timeout for trying to connect to a DB +@@ -551,7 +553,35 @@ class DatabaseBroker(object): + metadata = {} + return metadata + +- def update_metadata(self, metadata_updates): ++ @staticmethod ++ def validate_metadata(metadata): ++ """ ++ Validates that metadata_falls within acceptable limits. ++ ++ :param metadata: to be validated ++ :raises: HTTPBadRequest if MAX_META_COUNT or MAX_META_OVERALL_SIZE ++ is exceeded ++ """ ++ meta_count = 0 ++ meta_size = 0 ++ for key, (value, timestamp) in metadata.iteritems(): ++ key = key.lower() ++ if value != '' and (key.startswith('x-account-meta') or ++ key.startswith('x-container-meta')): ++ prefix = 'x-account-meta-' ++ if key.startswith('x-container-meta-'): ++ prefix = 'x-container-meta-' ++ key = key[len(prefix):] ++ meta_count = meta_count + 1 ++ meta_size = meta_size + len(key) + len(value) ++ if meta_count > MAX_META_COUNT: ++ raise HTTPBadRequest('Too many metadata items; max %d' ++ % MAX_META_COUNT) ++ if meta_size > MAX_META_OVERALL_SIZE: ++ raise HTTPBadRequest('Total metadata too large; max %d' ++ % MAX_META_OVERALL_SIZE) ++ ++ def update_metadata(self, metadata_updates, validate_metadata=False): + """ + Updates the metadata dict for the database. The metadata dict values + are tuples of (value, timestamp) where the timestamp indicates when +@@ -583,6 +613,8 @@ class DatabaseBroker(object): + value, timestamp = value_timestamp + if key not in md or timestamp > md[key][1]: + md[key] = value_timestamp ++ if validate_metadata: ++ DatabaseBroker.validate_metadata(md) + conn.execute('UPDATE %s_stat SET metadata = ?' % self.db_type, + (json.dumps(md),)) + conn.commit() +Index: swift-1.4.8/swift/container/server.py +=================================================================== +--- swift-1.4.8.orig/swift/container/server.py ++++ swift-1.4.8/swift/container/server.py +@@ -221,7 +221,7 @@ class ContainerController(object): + metadata['X-Container-Sync-To'][0] != \ + broker.metadata['X-Container-Sync-To'][0]: + broker.set_x_container_sync_points(-1, -1) +- broker.update_metadata(metadata) ++ broker.update_metadata(metadata, validate_metadata=True) + resp = self.account_update(req, account, container, broker) + if resp: + return resp +@@ -424,7 +424,7 @@ class ContainerController(object): + metadata['X-Container-Sync-To'][0] != \ + broker.metadata['X-Container-Sync-To'][0]: + broker.set_x_container_sync_points(-1, -1) +- broker.update_metadata(metadata) ++ broker.update_metadata(metadata, validate_metadata=True) + return HTTPNoContent(request=req) + + def __call__(self, env, start_response): +Index: swift-1.4.8/test/unit/common/test_db.py +=================================================================== +--- swift-1.4.8.orig/test/unit/common/test_db.py ++++ swift-1.4.8/test/unit/common/test_db.py +@@ -26,12 +26,16 @@ from uuid import uuid4 + + import simplejson + import sqlite3 ++from mock import patch + + import swift.common.db ++from swift.common.constraints import \ ++ MAX_META_VALUE_LENGTH, MAX_META_COUNT, MAX_META_OVERALL_SIZE + from swift.common.db import AccountBroker, chexor, ContainerBroker, \ + DatabaseBroker, DatabaseConnectionError, dict_factory, get_db_connection + from swift.common.utils import normalize_timestamp + from swift.common.exceptions import LockTimeout ++from webob.exc import HTTPException, HTTPBadRequest + + + class TestDatabaseConnectionError(unittest.TestCase): +@@ -145,7 +149,7 @@ class TestDatabaseBroker(unittest.TestCa + conn.execute('CREATE TABLE test (one TEXT)') + conn.execute('CREATE TABLE test_stat (id TEXT)') + conn.execute('INSERT INTO test_stat (id) VALUES (?)', +- (str(uuid4),)) ++ (str(uuid4),)) + conn.execute('INSERT INTO test (one) VALUES ("1")') + conn.commit() + stub_called = [False] +@@ -579,6 +583,97 @@ class TestDatabaseBroker(unittest.TestCa + [first_value, first_timestamp]) + self.assert_('Second' not in broker.metadata) + ++ @patch.object(DatabaseBroker, 'validate_metadata') ++ def test_validate_metadata_is_called_from_update_metadata(self, mock): ++ broker = self.get_replication_info_tester(metadata=True) ++ first_timestamp = normalize_timestamp(1) ++ first_value = '1' ++ metadata = {'First': [first_value, first_timestamp]} ++ broker.update_metadata(metadata, validate_metadata=True) ++ self.assertTrue(mock.called) ++ ++ @patch.object(DatabaseBroker, 'validate_metadata') ++ def test_validate_metadata_is_not_called_from_update_metadata(self, mock): ++ broker = self.get_replication_info_tester(metadata=True) ++ first_timestamp = normalize_timestamp(1) ++ first_value = '1' ++ metadata = {'First': [first_value, first_timestamp]} ++ broker.update_metadata(metadata) ++ self.assertFalse(mock.called) ++ ++ def test_metadata_with_max_count(self): ++ metadata = {} ++ for c in xrange(MAX_META_COUNT): ++ key = 'X-Account-Meta-F{0}'.format(c) ++ metadata[key] = ('B', normalize_timestamp(1)) ++ key = 'X-Account-Meta-Foo'.format(c) ++ metadata[key] = ('', normalize_timestamp(1)) ++ try: ++ DatabaseBroker.validate_metadata(metadata) ++ except HTTPException: ++ self.fail('Unexpected HTTPException') ++ ++ def test_metadata_raises_exception_over_max_count(self): ++ metadata = {} ++ for c in xrange(MAX_META_COUNT + 1): ++ key = 'X-Account-Meta-F{0}'.format(c) ++ metadata[key] = ('B', normalize_timestamp(1)) ++ message = '' ++ try: ++ DatabaseBroker.validate_metadata(metadata) ++ except HTTPBadRequest as e: ++ message = str(e) ++ except HTTPException: ++ self.fail('Unexpected HTTPException') ++ self.assertEqual(message, 'Too many metadata items; max %d' % ++ MAX_META_COUNT) ++ ++ def test_metadata_with_max_overall_size(self): ++ metadata = {} ++ metadata_value = 'v' * MAX_META_VALUE_LENGTH ++ size = 0 ++ x = 0 ++ while size < (MAX_META_OVERALL_SIZE - 4 ++ - MAX_META_VALUE_LENGTH): ++ size += 4 + MAX_META_VALUE_LENGTH ++ metadata['X-Account-Meta-%04d' % x] = (metadata_value, ++ normalize_timestamp(1)) ++ x += 1 ++ if MAX_META_OVERALL_SIZE - size > 1: ++ metadata['X-Account-Meta-k'] = ( ++ 'v' * (MAX_META_OVERALL_SIZE - size - 1), ++ normalize_timestamp(1)) ++ try: ++ DatabaseBroker.validate_metadata(metadata) ++ except HTTPException: ++ self.fail('Unexpected HTTPException') ++ ++ def test_metadata_raises_exception_over_max_overall_size(self): ++ metadata = {} ++ metadata_value = 'k' * MAX_META_VALUE_LENGTH ++ size = 0 ++ x = 0 ++ while size < (MAX_META_OVERALL_SIZE - 4 ++ - MAX_META_VALUE_LENGTH): ++ size += 4 + MAX_META_VALUE_LENGTH ++ metadata['X-Account-Meta-%04d' % x] = (metadata_value, ++ normalize_timestamp(1)) ++ x += 1 ++ if MAX_META_OVERALL_SIZE - size > 1: ++ metadata['X-Account-Meta-k'] = ( ++ 'v' * (MAX_META_OVERALL_SIZE - size - 1), ++ normalize_timestamp(1)) ++ metadata['X-Account-Meta-k2'] = ('v', normalize_timestamp(1)) ++ message = '' ++ try: ++ DatabaseBroker.validate_metadata(metadata) ++ except HTTPBadRequest as e: ++ message = str(e) ++ except HTTPException: ++ self.fail('Unexpected HTTPException') ++ self.assertEqual(message, 'Total metadata too large; max %d' % ++ MAX_META_OVERALL_SIZE) ++ + + class TestContainerBroker(unittest.TestCase): + """ Tests for swift.common.db.ContainerBroker """ +Index: swift-1.4.8/test/functionalnosetests/test_account.py +=================================================================== +--- swift-1.4.8.orig/test/functionalnosetests/test_account.py ++++ swift-1.4.8/test/functionalnosetests/test_account.py +@@ -11,6 +11,42 @@ from swift_testing import check_response + + class TestAccount(unittest.TestCase): + ++ def setUp(self): ++ if skip: ++ return ++ def head(url, token, parsed, conn): ++ conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token}) ++ return check_response(conn) ++ resp = retry(head) ++ resp.read() ++ self.existing_metadata = set([ ++ k for k, v in resp.getheaders() if ++ k.lower().startswith('x-account-meta')]) ++ ++ def tearDown(self): ++ if skip: ++ return ++ def head(url, token, parsed, conn): ++ conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token}) ++ return check_response(conn) ++ resp = retry(head) ++ resp.read() ++ new_metadata = set( ++ [k for k, v in resp.getheaders() if ++ k.lower().startswith('x-account-meta')]) ++ ++ def clear_meta(url, token, parsed, conn, remove_metadata_keys): ++ headers = {'X-Auth-Token': token} ++ headers.update((k, '') for k in remove_metadata_keys) ++ conn.request('POST', parsed.path, '', headers) ++ return check_response(conn) ++ extra_metadata = list(self.existing_metadata ^ new_metadata) ++ for i in range(0, len(extra_metadata), 90): ++ batch = extra_metadata[i:i + 90] ++ resp = retry(clear_meta, batch) ++ resp.read() ++ self.assertEqual(resp.status // 100, 2) ++ + def test_metadata(self): + if skip: + raise SkipTest +@@ -99,6 +135,16 @@ class TestAccount(unittest.TestCase): + resp.read() + self.assertEquals(resp.status, 400) + ++ def test_bad_metadata2(self): ++ if skip: ++ raise SkipTest ++ ++ def post(url, token, parsed, conn, extra_headers): ++ headers = {'X-Auth-Token': token} ++ headers.update(extra_headers) ++ conn.request('POST', parsed.path, '', headers) ++ return check_response(conn) ++ + headers = {} + for x in xrange(MAX_META_COUNT): + headers['X-Account-Meta-%d' % x] = 'v' +@@ -112,6 +158,16 @@ class TestAccount(unittest.TestCase): + resp.read() + self.assertEquals(resp.status, 400) + ++ def test_bad_metadata3(self): ++ if skip: ++ raise SkipTest ++ ++ def post(url, token, parsed, conn, extra_headers): ++ headers = {'X-Auth-Token': token} ++ headers.update(extra_headers) ++ conn.request('POST', parsed.path, '', headers) ++ return check_response(conn) ++ + headers = {} + header_value = 'k' * MAX_META_VALUE_LENGTH + size = 0 +Index: swift-1.4.8/test/functionalnosetests/test_container.py +=================================================================== +--- swift-1.4.8.orig/test/functionalnosetests/test_container.py ++++ swift-1.4.8/test/functionalnosetests/test_container.py +@@ -291,6 +291,16 @@ class TestContainer(unittest.TestCase): + resp.read() + self.assertEquals(resp.status, 400) + ++ def test_POST_bad_metadata2(self): ++ if skip: ++ raise SkipTest ++ ++ def post(url, token, parsed, conn, extra_headers): ++ headers = {'X-Auth-Token': token} ++ headers.update(extra_headers) ++ conn.request('POST', parsed.path + '/' + self.name, '', headers) ++ return check_response(conn) ++ + headers = {} + for x in xrange(MAX_META_COUNT): + headers['X-Container-Meta-%d' % x] = 'v' +@@ -304,6 +314,16 @@ class TestContainer(unittest.TestCase): + resp.read() + self.assertEquals(resp.status, 400) + ++ def test_POST_bad_metadata3(self): ++ if skip: ++ raise SkipTest ++ ++ def post(url, token, parsed, conn, extra_headers): ++ headers = {'X-Auth-Token': token} ++ headers.update(extra_headers) ++ conn.request('POST', parsed.path + '/' + self.name, '', headers) ++ return check_response(conn) ++ + headers = {} + header_value = 'k' * MAX_META_VALUE_LENGTH + size = 0 diff -Nru swift-1.4.8/debian/patches/memcache_serialization_support-default-to-zero.patch swift-1.4.8/debian/patches/memcache_serialization_support-default-to-zero.patch --- swift-1.4.8/debian/patches/memcache_serialization_support-default-to-zero.patch 1970-01-01 00:00:00.000000000 +0000 +++ swift-1.4.8/debian/patches/memcache_serialization_support-default-to-zero.patch 2013-06-17 19:56:52.000000000 +0000 @@ -0,0 +1,100 @@ +Author: Jamie Strandboge +Description: default to using insecure pickle serialization, the previous + default +Forwarded: no +Bug: https://launchpad.net/bugs/1006414 + +Index: swift-1.4.8/doc/manpages/proxy-server.conf.5 +=================================================================== +--- swift-1.4.8.orig/doc/manpages/proxy-server.conf.5 2013-06-17 14:45:58.000000000 -0500 ++++ swift-1.4.8/doc/manpages/proxy-server.conf.5 2013-06-17 14:47:32.000000000 -0500 +@@ -211,9 +211,9 @@ + + .PD 0 + .RS 10 +-.IP "0 = older, insecure pickle serialization" ++.IP "0 = older, insecure pickle serialization (insecure, default)" + .IP "1 = json serialization but pickles can still be read (still insecure)" +-.IP "2 = json serialization only (secure and the default)" ++.IP "2 = json serialization only (secure)" + .RE + + .RS 10 +Index: swift-1.4.8/etc/memcache.conf-sample +=================================================================== +--- swift-1.4.8.orig/etc/memcache.conf-sample 2013-06-17 14:45:58.000000000 -0500 ++++ swift-1.4.8/etc/memcache.conf-sample 2013-06-17 14:47:57.000000000 -0500 +@@ -5,11 +5,11 @@ + # memcache_servers = 127.0.0.1:11211 + # + # Sets how memcache values are serialized and deserialized: +-# 0 = older, insecure pickle serialization ++# 0 = older, insecure pickle serialization (insecure, default) + # 1 = json serialization but pickles can still be read (still insecure) +-# 2 = json serialization only (secure and the default) ++# 2 = json serialization only (secure) + # To avoid an instant full cache flush, existing installations should + # upgrade with 0, then set to 1 and reload, then after some time (24 hours) + # set to 2 and reload. + # In the future, the ability to use pickle serialization will be removed. +-# memcache_serialization_support = 2 ++# memcache_serialization_support = 0 +Index: swift-1.4.8/etc/proxy-server.conf-sample +=================================================================== +--- swift-1.4.8.orig/etc/proxy-server.conf-sample 2013-06-17 14:45:58.000000000 -0500 ++++ swift-1.4.8/etc/proxy-server.conf-sample 2013-06-17 14:48:18.000000000 -0500 +@@ -124,16 +124,16 @@ + # memcache_servers = 127.0.0.1:11211 + # + # Sets how memcache values are serialized and deserialized: +-# 0 = older, insecure pickle serialization ++# 0 = older, insecure pickle serialization (insecure, default) + # 1 = json serialization but pickles can still be read (still insecure) +-# 2 = json serialization only (secure and the default) ++# 2 = json serialization only (secure) + # If not set here, the value for memcache_serialization_support will be read + # from /etc/swift/memcache.conf (see memcache.conf-sample). + # To avoid an instant full cache flush, existing installations should + # upgrade with 0, then set to 1 and reload, then after some time (24 hours) + # set to 2 and reload. + # In the future, the ability to use pickle serialization will be removed. +-# memcache_serialization_support = 2 ++# memcache_serialization_support = 0 + + [filter:ratelimit] + use = egg:swift#ratelimit +Index: swift-1.4.8/test/unit/common/middleware/test_memcache.py +=================================================================== +--- swift-1.4.8.orig/test/unit/common/middleware/test_memcache.py 2013-06-17 14:45:58.000000000 -0500 ++++ swift-1.4.8/test/unit/common/middleware/test_memcache.py 2013-06-17 14:49:15.000000000 -0500 +@@ -48,7 +48,7 @@ + if option == 'memcache_servers': + return '1.2.3.4:5' + elif option == 'memcache_serialization_support': +- return '2' ++ return '0' + else: + raise NoOptionError(option) + else: +@@ -89,7 +89,7 @@ + try: + app = memcache.MemcacheMiddleware( + FakeApp(), {'memcache_servers': '1.2.3.4:5', +- 'memcache_serialization_support': '2'}) ++ 'memcache_serialization_support': '0'}) + except Exception, err: + exc = err + finally: +Index: swift-1.4.8/swift/common/middleware/memcache.py +=================================================================== +--- swift-1.4.8.orig/swift/common/middleware/memcache.py 2013-06-17 14:45:58.000000000 -0500 ++++ swift-1.4.8/swift/common/middleware/memcache.py 2013-06-17 14:52:48.000000000 -0500 +@@ -51,7 +51,7 @@ + if not self.memcache_servers: + self.memcache_servers = '127.0.0.1:11211' + if serialization_format is None: +- serialization_format = 2 ++ serialization_format = 0 + + self.memcache = MemcacheRing( + [s.strip() for s in self.memcache_servers.split(',') if s.strip()], diff -Nru swift-1.4.8/debian/patches/series swift-1.4.8/debian/patches/series --- swift-1.4.8/debian/patches/series 2012-04-10 13:23:59.000000000 +0000 +++ swift-1.4.8/debian/patches/series 2015-07-22 19:17:20.000000000 +0000 @@ -1,2 +1,8 @@ fix-ubuntu-unittests.patch fix-doc-no-network.patch +CVE-2013-2161.patch +CVE-2012-4406.patch +memcache_serialization_support-default-to-zero.patch +CVE-2013-4155.patch +CVE-2014-0006.patch +CVE-2014-7960.patch