diff -Nru storm-0.18/debian/changelog storm-0.19/debian/changelog --- storm-0.18/debian/changelog 2011-05-31 22:16:37.000000000 +0000 +++ storm-0.19/debian/changelog 2011-11-14 00:24:03.000000000 +0000 @@ -1,3 +1,10 @@ +storm (0.19-1) unstable; urgency=low + + * New upstream release. + * Update copyright dates. + + -- Miguel Landaeta Thu, 10 Nov 2011 19:03:56 -0430 + storm (0.18-1) unstable; urgency=low * Initial release in Debian. (Closes: #511253). diff -Nru storm-0.18/debian/copyright storm-0.19/debian/copyright --- storm-0.18/debian/copyright 2011-05-31 20:18:22.000000000 +0000 +++ storm-0.19/debian/copyright 2011-11-11 01:10:42.000000000 +0000 @@ -5,7 +5,7 @@ Comment: Upstream author is Gustavo Niemeyer . Files: * -Copyright: © 2006-2010, Canonical. +Copyright: © 2006-2011, Canonical. License: LGPL-2.1 Files: tests/mocker.py diff -Nru storm-0.18/NEWS storm-0.19/NEWS --- storm-0.18/NEWS 2010-10-23 20:27:31.000000000 +0000 +++ storm-0.19/NEWS 2011-10-03 14:55:04.000000000 +0000 @@ -1,3 +1,68 @@ +0.19 (2011-10-03) +================= + +Improvements +------------ + - A new Cast expressions compiles an input and the type to cast it to + into a call the CAST function (bug #681121). + - The storm.zope.testing.ZStormResourceManager now supports applying database + schemas using a custom URI, typically for connecting to the database using a + different user with greater privileges than the user running the tests. Note + that the format of the 'databases' parameter passed to the constructor of + the ZStormResourceManager class has changed. So now you have to create your + resource manager roughly like this:: + + databases = [{"name": "test", + "uri": "postgres://user@host/db", + "schema": Schema(...), + "schema-uri: "postgres://schema_user@host/db"}] + manager = ZStormResourceManager(databases) + + where the "schema-uri" key is optional and would default to "uri" if + not given. The old format of the 'databases' parameter is still supported + but deprecated. (bug #772258) + - A new storm.twisted.transact module has been added with facilities to + integrate Storm with twisted, by running transactions in a separate + thread from the main one in order to not block the reactor. (bug #777047) + - ResultSet.config's "distinct" argument now also accepts a tuple of + columns, which will be turned into a DISTINCT ON clause. + - Provide wrapped cursor objects in the Django integration + layer. Catch some disconnection errors that might otherwise be + missed, leading to broken connections. (bug #816049) + - A new JSON property is available. It's similar to the existing + Pickle property, except that it serializes data as JSON, and must + back onto a text column rather than a byte column. (bug #726799, #846867) + - Cache the compilation of columns and tables (bug #826170, #848925). + - Add two new tracers extracted from the Launchpad codebase. + BaseStatementTracer provides statements with parameters substituted + to its subclasses. TimelineTracer records queries in a timeline + (useful for OOPS reports). + - New ROW constructor (bug #698344). + - Add support for Postgres DISTINCT ON queries. (bug #374777) + +Bug fixes +--------- + - When retrieving and using an object with Store.get(), Storm will no + longer issue two queries when there is a live invalidated object + (bug #697513). + - When a datetime object is returned by the database driver, + DateVariable failed to detect and convert it to a date() object + (bug #391601). + - The ISQLObjectResultSet declares an is_empty method, which matches + the existing implementation. This makes it possible to call the + method in security proxied environments (bug #759384). + - The UUIDVariable correctly converts inputs to unicode before + sending them to the database. This makes the UUID property usable + (bug #691752). + - Move the firing of the register-transaction event in + Connection.execute before the connection checking, to make sure + that the store gets registered properly for future rollbacks (bug + #819282). + - Skip tests/zope/README.txt when zope.security is not found. (bug #848848) + - Fix the handling of disconnection errors in the storm.django bridge. (bug + #854787) + + 0.18 (2010-10-25) ================= diff -Nru storm-0.18/PKG-INFO storm-0.19/PKG-INFO --- storm-0.18/PKG-INFO 2010-10-23 20:27:56.000000000 +0000 +++ storm-0.19/PKG-INFO 2011-10-05 13:11:24.000000000 +0000 @@ -1,6 +1,6 @@ Metadata-Version: 1.0 Name: storm -Version: 0.18 +Version: 0.19 Summary: Storm is an object-relational mapper (ORM) for Python developed at Canonical. Home-page: https://storm.canonical.com Author: Storm Developers @@ -9,7 +9,7 @@ Download-URL: https://launchpad.net/storm/+download Description: UNKNOWN Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta +Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) Classifier: Programming Language :: Python diff -Nru storm-0.18/setup.cfg storm-0.19/setup.cfg --- storm-0.18/setup.cfg 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/setup.cfg 2011-10-05 13:11:24.000000000 +0000 @@ -1,2 +1,8 @@ [sdist] -formats = bztar +formats = bztar, gztar + +[egg_info] +tag_build = +tag_date = 0 +tag_svn_revision = 0 + diff -Nru storm-0.18/setup.py storm-0.19/setup.py --- storm-0.18/setup.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/setup.py 2011-09-25 18:45:14.000000000 +0000 @@ -45,7 +45,7 @@ include_package_data=True, package_data={"": ["*.zcml"]}, classifiers=[ - "Development Status :: 4 - Beta", + "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", "Programming Language :: Python", diff -Nru storm-0.18/storm/compat.py storm-0.19/storm/compat.py --- storm-0.18/storm/compat.py 1970-01-01 00:00:00.000000000 +0000 +++ storm-0.19/storm/compat.py 2011-09-25 18:45:14.000000000 +0000 @@ -0,0 +1,31 @@ +# +# Copyright (c) 2011 Canonical +# +# Written by Gustavo Niemeyer +# +# This file is part of Storm Object Relational Mapper. +# +# Storm is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as +# published by the Free Software Foundation; either version 2.1 of +# the License, or (at your option) any later version. +# +# Storm is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program. If not, see . +# + +__all__ = ["json"] + + +try: + import json +except ImportError: + try: + import simplejson as json + except ImportError: + json = None diff -Nru storm-0.18/storm/database.py storm-0.19/storm/database.py --- storm-0.18/storm/database.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/storm/database.py 2011-10-03 14:28:37.000000000 +0000 @@ -26,7 +26,8 @@ """ from storm.expr import Expr, State, compile -from storm.tracer import trace +# Circular import: imported at the end of the module. +# from storm.tracer import trace from storm.variables import Variable from storm.exceptions import ( ClosedError, ConnectionBlockedError, DatabaseError, DisconnectionError, @@ -224,9 +225,9 @@ raise ClosedError("Connection is closed") if self._blocked: raise ConnectionBlockedError("Access to connection is blocked") - self._ensure_connected() if self._event: self._event.emit("register-transaction") + self._ensure_connected() if isinstance(statement, Expr): if params is not None: raise ValueError("Can't pass parameters with expressions") @@ -352,7 +353,7 @@ else: self._state = STATE_CONNECTED - def is_disconnection_error(self, exc): + def is_disconnection_error(self, exc, extra_disconnection_errors=()): """Check whether an exception represents a database disconnection. This should be overridden by backends to detect whichever @@ -362,10 +363,14 @@ def _check_disconnect(self, function, *args, **kwargs): """Run the given function, checking for database disconnections.""" + # Allow the caller to specify additional exception types that + # should be treated as possible disconnection errors. + extra_disconnection_errors = kwargs.pop( + 'extra_disconnection_errors', ()) try: return function(*args, **kwargs) - except Error, exc: - if self.is_disconnection_error(exc): + except Exception, exc: + if self.is_disconnection_error(exc, extra_disconnection_errors): self._state = STATE_DISCONNECTED self._raw_connection = None raise DisconnectionError(str(exc)) @@ -458,3 +463,6 @@ None, None, [""]) factory = module.create_from_uri return factory(uri) + +# Deal with circular import. +from storm.tracer import trace diff -Nru storm-0.18/storm/databases/mysql.py storm-0.19/storm/databases/mysql.py --- storm-0.18/storm/databases/mysql.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/storm/databases/mysql.py 2011-10-03 14:28:37.000000000 +0000 @@ -114,9 +114,10 @@ else: yield param - def is_disconnection_error(self, exc): + def is_disconnection_error(self, exc, extra_disconnection_errors=()): # http://dev.mysql.com/doc/refman/5.0/en/gone-away.html - return (isinstance(exc, OperationalError) and + return (isinstance(exc, (OperationalError, + extra_disconnection_errors)) and exc.args[0] in (2006, 2013)) # (SERVER_GONE_ERROR, SERVER_LOST) diff -Nru storm-0.18/storm/databases/postgres.py storm-0.19/storm/databases/postgres.py --- storm-0.18/storm/databases/postgres.py 2010-10-23 18:39:42.000000000 +0000 +++ storm-0.19/storm/databases/postgres.py 2011-10-03 14:28:37.000000000 +0000 @@ -276,9 +276,9 @@ else: yield param - def is_disconnection_error(self, exc): + def is_disconnection_error(self, exc, extra_disconnection_errors=()): if not isinstance(exc, (InterfaceError, OperationalError, - ProgrammingError)): + ProgrammingError, extra_disconnection_errors)): return False # XXX: 2007-09-17 jamesh @@ -291,7 +291,8 @@ "no connection to the server" in msg or "connection not open" in msg or "connection already closed" in msg or - "losed the connection unexpectedly" in msg) + "losed the connection unexpectedly" in msg or + "could not receive data from server" in msg) class Postgres(Database): diff -Nru storm-0.18/storm/django/backend/base.py storm-0.19/storm/django/backend/base.py --- storm-0.18/storm/django/backend/base.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/storm/django/backend/base.py 2011-10-03 14:28:37.000000000 +0000 @@ -18,6 +18,10 @@ def _get_connection(self): if self._store is None: self._store = get_store(settings.DATABASE_NAME) + # Make sure that the store is registered with the transaction + # manager: we don't know what the connection will be used for. + self._store._event.emit("register-transaction") + self._store._connection._ensure_connected() return self._store._connection._raw_connection def _set_connection(self, connection): @@ -26,10 +30,13 @@ connection = property(_get_connection, _set_connection) + def _valid_connection(self): + # Storm handles the connection liveness checks. + return True + def _cursor(self, *args): cursor = super(StormDatabaseWrapperMixin, self)._cursor(*args) - self._store._event.emit("register-transaction") - return cursor + return StormCursorWrapper(self._store, cursor) def _commit(self): #print "commit" @@ -45,6 +52,50 @@ self._store = None +class StormCursorWrapper(object): + """A cursor wrapper that checks for disconnection errors.""" + + def __init__(self, store, cursor): + self._connection = store._connection + self._cursor = cursor + + def _check_disconnect(self, *args, **kwargs): + from django.db import DatabaseError as DjangoDatabaseError + kwargs['extra_disconnection_errors'] = DjangoDatabaseError + return self._connection._check_disconnect(*args, **kwargs) + + def execute(self, statement, *args): + """Execute an SQL statement.""" + return self._check_disconnect(self._cursor.execute, statement, *args) + + def fetchone(self): + """Fetch one row from the result.""" + return self._check_disconnect(self._cursor.fetchone) + + def fetchall(self): + """Fetch all rows from the result.""" + return self._check_disconnect(self._cursor.fetchall) + + def fetchmany(self, *args): + """Fetch multiple rows from the result.""" + return self._check_disconnect(self._cursor.fetchmany, *args) + + @property + def description(self): + """Fetch the description of the result.""" + return self._check_disconnect(getattr, self._cursor, "description") + + @property + def rowcount(self): + """Fetch the number of rows in the result.""" + return self._check_disconnect(getattr, self._cursor, "rowcount") + + @property + def query(self): + """Fetch the last executed query.""" + return self._check_disconnect(getattr, self._cursor, "query") + + PostgresStormDatabaseWrapper = None MySQLStormDatabaseWrapper = None diff -Nru storm-0.18/storm/expr.py storm-0.19/storm/expr.py --- storm-0.18/storm/expr.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/storm/expr.py 2011-09-25 18:45:14.000000000 +0000 @@ -79,10 +79,10 @@ Use this as: - @compile.when(TypeA, TypeB) - def compile_type_a_or_b(compile, expr, state): - ... - return "THE COMPILED SQL STATEMENT" + >>> @compile.when(TypeA, TypeB) + >>> def compile_type_a_or_b(compile, expr, state): + >>> ... + >>> return "THE COMPILED SQL STATEMENT" """ return _when(self, types) @@ -656,10 +656,13 @@ @compile.when(Select) def compile_select(compile, select, state): tokens = ["SELECT "] - if select.distinct: - tokens.append("DISTINCT ") state.push("auto_tables", []) state.push("context", COLUMN) + if select.distinct: + tokens.append("DISTINCT ") + if isinstance(select.distinct, (tuple, list)): + tokens.append( + "ON (%s) " % compile(select.distinct, state, raw=True)) tokens.append(compile(select.columns, state)) tables_pos = len(tokens) parameters_pos = len(state.parameters) @@ -797,7 +800,8 @@ @ivar variable_factory: Factory producing C{Variable} instances typed according to this column. """ - __slots__ = ("name", "table", "primary", "variable_factory") + __slots__ = ("name", "table", "primary", "variable_factory", + "compile_cache", "compile_id") def __init__(self, name=Undef, table=Undef, primary=False, variable_factory=None): @@ -805,6 +809,8 @@ self.table = table self.primary = int(primary) self.variable_factory = variable_factory or Variable + self.compile_cache = None + self.compile_id = None @compile.when(Column) def compile_column(compile, column, state): @@ -816,11 +822,17 @@ alias = state.aliases.get(column) if alias is not None: return compile(alias.name, state, token=True) - return compile(column.name, state, token=True) + if column.compile_id != id(compile): + column.compile_cache = compile(column.name, state, token=True) + column.compile_id = id(compile) + return column.compile_cache state.push("context", COLUMN_PREFIX) table = compile(column.table, state, token=True) state.pop() - return "%s.%s" % (table, compile(column.name, state, token=True)) + if column.compile_id != id(compile): + column.compile_cache = compile(column.name, state, token=True) + column.compile_id = id(compile) + return "%s.%s" % (table, column.compile_cache) @compile_python.when(Column) def compile_python_column(compile, column, state): @@ -867,14 +879,20 @@ class Table(FromExpr): - __slots__ = ("name",) + __slots__ = ("name", "compile_cache", "compile_id") def __init__(self, name): self.name = name + self.compile_cache = None + self.compile_id = None + @compile.when(Table) def compile_table(compile, table, state): - return compile(table.name, state, token=True) + if table.compile_id != id(compile): + table.compile_cache = compile(table.name, state, token=True) + table.compile_id = id(compile) + return table.compile_cache class JoinExpr(FromExpr): @@ -1286,6 +1304,31 @@ name = "COALESCE" +class Row(NamedFunc): + __slots__ = () + name = "ROW" + + +class Cast(FuncExpr): + """A representation of C{CAST} clauses. e.g., C{CAST(bar AS TEXT)}.""" + __slots__ = ("column", "type") + name = "CAST" + + def __init__(self, column, type): + """Create a cast of C{column} as C{type}.""" + self.column = column + self.type = type + + +@compile.when(Cast) +def compile_cast(compile, cast, state): + """Compile L{Cast} expressions.""" + state.push("context", EXPR) + column = compile(cast.column, state) + state.pop() + return "CAST(%s AS %s)" % (column, cast.type) + + # -------------------------------------------------------------------- # Prefix and suffix expressions diff -Nru storm-0.18/storm/info.py storm-0.19/storm/info.py --- storm-0.18/storm/info.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/storm/info.py 2011-09-25 18:45:14.000000000 +0000 @@ -18,11 +18,11 @@ # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see . # -from weakref import ref, WeakKeyDictionary +from weakref import ref from storm.exceptions import ClassInfoError -from storm.expr import Expr, FromExpr, Column, Desc, TABLE -from storm.expr import SQLToken, CompileError, compile +from storm.expr import Column, Desc, TABLE +from storm.expr import compile, Table from storm.event import EventSystem from storm import Undef, has_cextensions @@ -40,9 +40,11 @@ obj_info = ObjectInfo(obj) return obj.__dict__.setdefault("__storm_object_info__", obj_info) + def set_obj_info(obj, obj_info): obj.__dict__["__storm_object_info__"] = obj_info + def get_cls_info(cls): if "__storm_class_info__" in cls.__dict__: # Can't use attribute access here, otherwise subclassing won't work. @@ -51,6 +53,7 @@ cls.__storm_class_info__ = ClassInfo(cls) return cls.__storm_class_info__ + class ClassInfo(dict): """Persistent storm-related information of a class. @@ -71,7 +74,7 @@ self.cls = cls if isinstance(self.table, basestring): - self.table = SQLToken(self.table) + self.table = Table(self.table) pairs = [] for attr in dir(cls): @@ -79,7 +82,6 @@ if isinstance(column, Column): pairs.append((attr, column)) - pairs.sort() self.columns = tuple(pair[1] for pair in pairs) @@ -121,7 +123,6 @@ self.primary_key_pos = tuple(id_positions[id(column)] for column in self.primary_key) - __order__ = getattr(cls, "__storm_order__", None) if __order__ is None: self.default_order = Undef @@ -151,7 +152,7 @@ __hash__ = object.__hash__ # For get_obj_info(), an ObjectInfo is its own obj_info. - __storm_object_info__ = property(lambda self:self) + __storm_object_info__ = property(lambda self: self) def __init__(self, obj): # FASTPATH This method is part of the fast path. Be careful when @@ -171,7 +172,7 @@ column.variable_factory(column=column, event=event, validator_object_factory=self.get_obj) - + self.primary_vars = tuple(variables[column] for column in self.cls_info.primary_key) @@ -199,7 +200,6 @@ from storm.cextensions import ObjectInfo, get_obj_info - class ClassAlias(object): """Create a named alias for a Storm class for use in queries. @@ -229,8 +229,7 @@ cls._storm_alias_cache = {} elif name in cache: return cache[name] - cls_info = get_cls_info(cls) - alias_cls = type(cls.__name__+"Alias", (self_cls,), + alias_cls = type(cls.__name__ + "Alias", (self_cls,), {"__storm_table__": name}) alias_cls.__bases__ = (cls, self_cls) alias_cls_info = get_cls_info(alias_cls) diff -Nru storm-0.18/storm/__init__.py storm-0.19/storm/__init__.py --- storm-0.18/storm/__init__.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/storm/__init__.py 2011-10-03 14:56:13.000000000 +0000 @@ -28,7 +28,7 @@ # that this isn't the 0.15 release version while also allowing us to release # an 0.15.1 if need be. Release versions should use 2-digit version numbers, # with 0.16 being the next release version in this example. -version = "0.18" +version = "0.19" version_info = tuple([int(x) for x in version.split(".")]) diff -Nru storm-0.18/storm/locals.py storm-0.19/storm/locals.py --- storm-0.18/storm/locals.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/storm/locals.py 2011-09-25 18:45:14.000000000 +0000 @@ -18,9 +18,9 @@ # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see . # -from storm.properties import Bool, Int, Float, RawStr, Chars, Unicode, Pickle -from storm.properties import List, Decimal, DateTime, Date, Time, Enum -from storm.properties import TimeDelta +from storm.properties import Bool, Int, Float, RawStr, Chars, Unicode +from storm.properties import List, Decimal, DateTime, Date, Time, Enum, UUID +from storm.properties import TimeDelta, Pickle, JSON from storm.references import Reference, ReferenceSet, Proxy from storm.database import create_database from storm.exceptions import StormError diff -Nru storm-0.18/storm/properties.py storm-0.19/storm/properties.py --- storm-0.18/storm/properties.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/storm/properties.py 2011-09-25 18:45:14.000000000 +0000 @@ -29,14 +29,14 @@ Variable, VariableFactory, BoolVariable, IntVariable, FloatVariable, DecimalVariable, RawStrVariable, UnicodeVariable, DateTimeVariable, DateVariable, TimeVariable, TimeDeltaVariable, UUIDVariable, - PickleVariable, ListVariable, EnumVariable) + PickleVariable, JSONVariable, ListVariable, EnumVariable) __all__ = ["Property", "SimpleProperty", "Bool", "Int", "Float", "Decimal", "RawStr", "Unicode", "DateTime", "Date", "Time", "TimeDelta", "UUID", "Enum", - "Pickle", "List", "PropertyRegistry"] + "Pickle", "JSON", "List", "PropertyRegistry"] class Property(object): @@ -172,6 +172,9 @@ class Pickle(SimpleProperty): variable_class = PickleVariable +class JSON(SimpleProperty): + variable_class = JSONVariable + class List(SimpleProperty): variable_class = ListVariable diff -Nru storm-0.18/storm/sqlobject.py storm-0.19/storm/sqlobject.py --- storm-0.18/storm/sqlobject.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/storm/sqlobject.py 2011-09-25 18:45:14.000000000 +0000 @@ -552,7 +552,7 @@ return item in result_set def __nonzero__(self): - """Return C{True} if this result set doesn't contain any results. + """Return C{True} if this result set contains any results. @note: This method is provided for compatibility with SQL Object. For new code, prefer L{is_empty}. It's compatible with L{ResultSet} diff -Nru storm-0.18/storm/store.py storm-0.19/storm/store.py --- storm-0.18/storm/store.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/storm/store.py 2011-09-25 18:45:14.000000000 +0000 @@ -167,12 +167,7 @@ primary_values = tuple(var.get(to_db=True) for var in primary_vars) obj_info = self._alive.get((cls_info.cls, primary_values)) - if obj_info is not None: - if obj_info.get("invalidated"): - try: - self._validate_alive(obj_info) - except LostObjectError: - return None + if obj_info is not None and not obj_info.get("invalidated"): return self._get_object(obj_info) where = compare_columns(cls_info.primary_key, primary_vars) @@ -940,8 +935,9 @@ def config(self, distinct=None, offset=None, limit=None): """Configure this result object in-place. All parameters are optional. - @param distinct: Boolean enabling/disabling usage of the DISTINCT - keyword in the query made. + @param distinct: If True, enables usage of the DISTINCT keyword in + the query. If a tuple or list of columns, inserts a DISTINCT ON + (only supported by PostgreSQL). @param offset: Offset where results will start to be retrieved from the result set. @param limit: Limit the number of objects retrieved from the diff -Nru storm-0.18/storm/tracer.py storm-0.19/storm/tracer.py --- storm-0.18/storm/tracer.py 2010-10-23 18:39:05.000000000 +0000 +++ storm-0.19/storm/tracer.py 2011-09-25 18:45:14.000000000 +0000 @@ -1,6 +1,10 @@ from datetime import datetime +import re import sys +import threading +# Circular import: imported at the end of the module. +# from storm.database import convert_param_marks from storm.exceptions import TimeoutError from storm.expr import Variable @@ -104,6 +108,103 @@ % self.__class__.__name__) +class BaseStatementTracer(object): + """Storm tracer base class that does query interpolation.""" + + def connection_raw_execute(self, connection, raw_cursor, + statement, params): + statement_to_log = statement + if params: + # There are some bind parameters so we want to insert them into + # the sql statement so we can log the statement. + query_params = list(connection.to_database(params)) + if connection.param_mark == '%s': + # Double the %'s in the string so that python string formatting + # can restore them to the correct number. Note that %s needs to + # be preserved as that is where we are substituting values in. + quoted_statement = re.sub( + "%%%", "%%%%", re.sub("%([^s])", r"%%\1", statement)) + else: + # Double all the %'s in the statement so that python string + # formatting can restore them to the correct number. Any %s in + # the string should be preserved because the param_mark is not + # %s. + quoted_statement = re.sub("%", "%%", statement) + quoted_statement = convert_param_marks( + quoted_statement, connection.param_mark, "%s") + # We need to massage the query parameters a little to deal with + # string parameters which represent encoded binary data. + render_params = [] + for param in query_params: + if isinstance(param, unicode): + render_params.append(repr(param.encode('utf8'))) + else: + render_params.append(repr(param)) + try: + statement_to_log = quoted_statement % tuple(render_params) + except TypeError: + statement_to_log = \ + "Unformattable query: %r with params %r." % ( + statement, query_params) + self._expanded_raw_execute(connection, raw_cursor, statement_to_log) + + def _expanded_raw_execute(self, connection, raw_cursor, statement): + """Called by connection_raw_execute after parameter substitution.""" + raise NotImplementedError(self._expanded_raw_execute) + + +class TimelineTracer(BaseStatementTracer): + """Storm tracer class to insert executed statements into a L{Timeline}. + + For more information on timelines see the module at + http://pypi.python.org/pypi/timeline. + + The timeline to use is obtained by calling the timeline_factory supplied to + the constructor. This simple function takes no parameters and returns a + timeline to use. If it returns None, the tracer is bypassed. + """ + + def __init__(self, timeline_factory, prefix='SQL-'): + """Create a TimelineTracer. + + @param timeline_factory: A factory function to produce the timeline to + record a query against. + @param prefix: A prefix to give the connection name when starting an + action. Connection names are found by trying a getattr for 'name' + on the connection object. If no name has been assigned, '' + is used instead. + """ + super(TimelineTracer, self).__init__() + self.timeline_factory = timeline_factory + self.prefix = prefix + # Stores the action in progress in a given thread. + self.threadinfo = threading.local() + + def _expanded_raw_execute(self, connection, raw_cursor, statement): + timeline = self.timeline_factory() + if timeline is None: + return + connection_name = getattr(connection, 'name', '') + action = timeline.start(self.prefix + connection_name, statement) + self.threadinfo.action = action + + def connection_raw_execute_success(self, connection, raw_cursor, + statement, params): + + # action may be None if the tracer was installed after the statement + # was submitted. + action = getattr(self.threadinfo, 'action', None) + if action is not None: + action.finish() + + def connection_raw_execute_error(self, connection, raw_cursor, + statement, params, error): + # Since we are just logging durations, we execute the same + # hook code for errors as successes. + self.connection_raw_execute_success( + connection, raw_cursor, statement, params) + + _tracers = [] def trace(name, *args, **kwargs): @@ -130,3 +231,6 @@ remove_tracer_type(DebugTracer) if flag: install_tracer(DebugTracer(stream=stream)) + +# Deal with circular import. +from storm.database import convert_param_marks diff -Nru storm-0.18/storm/twisted/testing.py storm-0.19/storm/twisted/testing.py --- storm-0.18/storm/twisted/testing.py 1970-01-01 00:00:00.000000000 +0000 +++ storm-0.19/storm/twisted/testing.py 2011-09-25 18:45:14.000000000 +0000 @@ -0,0 +1,57 @@ +import transaction + +from twisted.python.failure import Failure +from twisted.internet.defer import execute + + +class FakeThreadPool(object): + """ + A fake L{twisted.python.threadpool.ThreadPool}, running functions inside + the main thread instead for easing tests. + """ + + def callInThreadWithCallback(self, onResult, func, *args, **kw): + success = True + try: + result = func(*args, **kw) + except: + result = Failure() + success = False + + onResult(success, result) + + +class FakeTransactor(object): + """ + A fake C{Transactor} wrapper that runs the given function in the main + thread and performs basic checks on its return value. If it has a + C{__storm_table__} property a C{RuntimeError} is raised because Storm + objects cannot be used outside the thread in which they were created. + + @see L{Transactor}. + """ + + def __init__(self, _transaction=None): + if _transaction is None: + _transaction = transaction + self._transaction = _transaction + + def run(self, function, *args, **kwargs): + return execute(self._wrap, function, *args, **kwargs) + + def _wrap(self, function, *args, **kwargs): + try: + result = function(*args, **kwargs) + if getattr(result, "__storm_table__", None) is not None: + raise RuntimeError("Attempted to return a Storm object from a " + "transaction") + except: + self._transaction.abort() + raise + else: + try: + self._transaction.commit() + except: + self._transaction.abort() + raise + return result diff -Nru storm-0.18/storm/twisted/transact.py storm-0.19/storm/twisted/transact.py --- storm-0.18/storm/twisted/transact.py 1970-01-01 00:00:00.000000000 +0000 +++ storm-0.19/storm/twisted/transact.py 2011-09-25 18:45:14.000000000 +0000 @@ -0,0 +1,130 @@ +import time +import random +import transaction + +from functools import wraps + +from zope.component import getUtility + +from storm.zope.interfaces import IZStorm +from storm.exceptions import IntegrityError, DisconnectionError + +from twisted.internet.threads import deferToThreadPool + + +class Transactor(object): + """Run in a thread code that needs to interact with the database. + + This class makes sure that code interacting with the database is run + in a separate thread and that the associated transaction is aborted or + committed in the same thread. + + @param threadpool: The C{ThreadPool} to get threads from. + @param _transaction: The C{TransactionManager} to use, for test cases only. + + @ivar retries: Maximum number of retries upon retriable exceptions. The + default is to retry a function up to 2 times upon possibly transient + or spurious errors like L{IntegrityError} and L{DisconnectionError}. + + @see: C{twisted.python.threadpool.ThreadPool} + """ + retries = 2 + + def __init__(self, threadpool, _transaction=None): + self._threadpool = threadpool + if _transaction is None: + _transaction = transaction + self._transaction = _transaction + self._retriable_errors = (DisconnectionError, IntegrityError) + try: + from psycopg2.extensions import TransactionRollbackError + self._retriable_errors += (TransactionRollbackError,) + except ImportError: + pass + + def run(self, function, *args, **kwargs): + """Run C{function} in a thread. + + The function is run in a thread by a function wrapper, which + commits the transaction if the function runs successfully. If it + raises an exception the transaction is aborted. + + @param function: The function to run. + @param args: Positional arguments to pass to C{function}. + @param kwargs: Keyword arguments to pass to C{function}. + @return: A C{Deferred} that will fire after the function has been run. + """ + # Inline the reactor import here for sake of safeness, in case a + # custom reactor needs to be installed + from twisted.internet import reactor + return deferToThreadPool( + reactor, self._threadpool, self._wrap, function, *args, **kwargs) + + def _wrap(self, function, *args, **kwargs): + retries = 0 + while True: + try: + result = function(*args, **kwargs) + self._transaction.commit() + except self._retriable_errors, error: + if isinstance(error, DisconnectionError): + # If we got a disconnection, calling rollback may not be + # enough because psycopg2 doesn't necessarily use the + # connection, so we call a dummy query to be sure that all + # the stores are correct. + zstorm = getUtility(IZStorm) + for name, store in zstorm.iterstores(): + try: + store.execute("SELECT 1") + except DisconnectionError: + pass + self._transaction.abort() + if retries < self.retries: + retries += 1 + time.sleep(random.uniform(1, 2 ** retries)) + continue + else: + raise + except: + self._transaction.abort() + raise + else: + return result + + +def transact(method): + """Decorate L{method} so that it is invoked via L{Transactor.run}. + + @param method: The method to decorate. + @return: A decorated method. + + @note: The return value of the decorated method should *not* contain + any reference to Storm objects, because they were retrieved in + a different thread and cannot be used outside it. + + Example: + + from twisted.python.threadpool import ThreadPool + from storm.twisted.transact import Transactor, transact + + class Foo(object): + + def __init__(self, transactor): + self.transactor = transactor + + @transact + def bar(self): + # code that uses Storm + + threadpool = ThreadPool(0, 10) + threadpool.start() + transactor = Transactor(threadpool) + foo = Foo(transactor) + deferred = foo.bar() + deferred.addCallback(...) + """ + + @wraps(method) + def wrapper(self, *args, **kwargs): + return self.transactor.run(method, self, *args, **kwargs) + return wrapper diff -Nru storm-0.18/storm/variables.py storm-0.19/storm/variables.py --- storm-0.18/storm/variables.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/storm/variables.py 2011-09-25 18:45:14.000000000 +0000 @@ -27,6 +27,7 @@ except ImportError: uuid = None +from storm.compat import json from storm.exceptions import NoneError from storm import Undef, has_cextensions @@ -48,6 +49,7 @@ "EnumVariable", "UUIDVariable", "PickleVariable", + "JSONVariable", "ListVariable", ] @@ -436,6 +438,8 @@ if from_db: if value is None: return None + if isinstance(value, datetime): + return value.date() if isinstance(value, date): return value if not isinstance(value, (str, unicode)): @@ -507,7 +511,7 @@ def parse_get(self, value, to_db): if to_db: - return str(value) + return unicode(value) return value @@ -564,7 +568,7 @@ if (self._checkpoint_state is not Undef and self.get_state() != self._checkpoint_state): self.event.emit("changed", self, None, self._value, False) - + def _detect_changes_and_stop(self, obj_info): self._detect_changes(obj_info) if self._event_system is not None: @@ -584,29 +588,67 @@ super(MutableValueVariable, self).set(value, from_db) -class PickleVariable(MutableValueVariable): +class EncodedValueVariable(MutableValueVariable): + __slots__ = () def parse_set(self, value, from_db): if from_db: if isinstance(value, buffer): value = str(value) - return pickle.loads(value) + return self._loads(value) else: return value def parse_get(self, value, to_db): if to_db: - return pickle.dumps(value, -1) + return self._dumps(value) else: return value def get_state(self): - return (self._lazy_value, pickle.dumps(self._value, -1)) + return (self._lazy_value, self._dumps(self._value)) def set_state(self, state): self._lazy_value = state[0] - self._value = pickle.loads(state[1]) + self._value = self._loads(state[1]) + + +class PickleVariable(EncodedValueVariable): + + def _loads(self, value): + return pickle.loads(value) + + def _dumps(self, value): + return pickle.dumps(value, -1) + + +class JSONVariable(EncodedValueVariable): + + __slots__ = () + + def __init__(self, *args, **kwargs): + assert json is not None, ( + "Neither the json nor the simplejson module was found.") + super(JSONVariable, self).__init__(*args, **kwargs) + + def _loads(self, value): + if not isinstance(value, unicode): + raise TypeError( + "Cannot safely assume encoding of byte string %r." % value) + return json.loads(value) + + def _dumps(self, value): + # http://www.ietf.org/rfc/rfc4627.txt states that JSON is text-based + # and so we treat it as such here. In other words, this method returns + # unicode and never str. + dump = json.dumps(value, ensure_ascii=False) + if not isinstance(dump, unicode): + # json.dumps() does not always return unicode. See + # http://code.google.com/p/simplejson/issues/detail?id=40 for one + # of many discussions of str/unicode handling in simplejson. + dump = dump.decode("utf-8") + return dump class ListVariable(MutableValueVariable): diff -Nru storm-0.18/storm/wsgi.py storm-0.19/storm/wsgi.py --- storm-0.18/storm/wsgi.py 1970-01-01 00:00:00.000000000 +0000 +++ storm-0.19/storm/wsgi.py 2011-09-25 18:45:14.000000000 +0000 @@ -0,0 +1,58 @@ +# +# Copyright (c) 2006, 2007 Canonical +# +# Written by Robert Collins +# +# This file is part of Storm Object Relational Mapper. +# +# Storm is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as +# published by the Free Software Foundation; either version 2.1 of +# the License, or (at your option) any later version. +# +# Storm is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program. If not, see . +# + +"""Glue to wire a storm timeline tracer up to a WSGI app.""" + +import functools +import threading + +__all__ = ['make_app'] + +def make_app(app): + """Capture the per-request timeline object needed for storm tracing. + + To use firstly make your app and then wrap it with this make_app:: + + >>> app, find_timeline = make_app(app) + + Then wrap the returned app with the timeline app (or anything that sets + environ['timeline.timeline']):: + + >>> app = timeline.wsgi.make_app(app) + + Finally install a timeline tracer to capture storm queries:: + + >>> install_tracer(TimelineTracer(find_timeline)) + + @return: A wrapped WSGI app and a timeline factory function for use with + TimelineTracer. + """ + timeline_map = threading.local() + def wrapper(environ, start_response): + timeline = environ.get('timeline.timeline') + timeline_map.timeline = timeline + try: + gen = app(environ, start_response) + for bytes in gen: + yield bytes + finally: + del timeline_map.timeline + return wrapper, functools.partial(getattr, timeline_map, 'timeline', None) diff -Nru storm-0.18/storm/zope/interfaces.py storm-0.19/storm/zope/interfaces.py --- storm-0.18/storm/zope/interfaces.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/storm/zope/interfaces.py 2011-09-25 18:45:14.000000000 +0000 @@ -39,28 +39,28 @@ The rationale behind the exposed attributes is: - 1. Model code constructs a L{ResultSet} and returns a security proxied - object to the view code. + 1. Model code constructs a L{ResultSet} and returns a security proxied + object to the view code. - 2. View code treats the L{ResultSet} as an immutable sequence/iterable and - presents the data to the user. + 2. View code treats the L{ResultSet} as an immutable sequence/iterable + and presents the data to the user. Therefore several attributes of L{ResultSet} are not included here: - - Both C{set()} and C{remove()} can be used to modify the contained - objects, which will bypass the security proxies on those objects. - - - C{get_select_expr()} will return a L{Select} object, which has no - security declarations (and it isn't clear that any would be desirable). - - - C{find()}, C{group_by()} and C{having()} are really used to configure - result sets, so are mostly intended for use on the model side. - - - There may be an argument for exposing C{difference()}, C{intersection()} - and C{union()} as a way for view code to combine multiple results, but - it isn't clear how often it makes sense to do this on the view side - rather than model side. + - Both C{set()} and C{remove()} can be used to modify the contained + objects, which will bypass the security proxies on those objects. + - C{get_select_expr()} will return a L{Select} object, which has no + security declarations (and it isn't clear that any would be + desirable). + + - C{find()}, C{group_by()} and C{having()} are really used to configure + result sets, so are mostly intended for use on the model side. + + - There may be an argument for exposing C{difference()}, + C{intersection()} and C{union()} as a way for view code to combine + multiple results, but it isn't clear how often it makes sense to do + this on the view side rather than model side. """ def copy(): @@ -164,9 +164,14 @@ """Return the number of items in the result set.""" def __nonzero__(): - """Boolean emulation.""" + """Return C{True} if this result set contains any results. + + @note: This method is provided for compatibility with SQL Object. For + new code, prefer L{is_empty}. It's compatible with L{ResultSet} + which doesn't have a C{__nonzero__} implementation. + """ - def __contains__(): + def __contains__(item): """Support C{if FooObject in Foo.select(query)}.""" def intersect(otherSelect, intersectAll=False, orderBy=None): @@ -177,6 +182,9 @@ @param orderBy: the order the result set should use. """ + def is_empty(): + """Return C{True} if this result set doesn't contain any results.""" + def prejoin(prejoins): """Return a new L{SelectResults} with the list of attributes prejoined. diff -Nru storm-0.18/storm/zope/testing.py storm-0.19/storm/zope/testing.py --- storm-0.18/storm/zope/testing.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/storm/zope/testing.py 2011-09-25 18:45:14.000000000 +0000 @@ -20,9 +20,10 @@ # import transaction from testresources import TestResourceManager -from zope.component import provideUtility +from zope.component import provideUtility, getUtility from storm.zope.zstorm import ZStorm +from storm.zope.interfaces import IZStorm class ZStormResourceManager(TestResourceManager): @@ -32,17 +33,26 @@ in the provided L{ZStore} resource. Then the C{make} and C{clean} methods make sure that such L{Store}s are properly setup and cleaned for each test. - @param databases: A C{dict} with the form C{name: (uri, schema)}, where - 'name' is the name of the store to be registered in the L{ZStorm} - resource, 'uri' is the database URI needed to create the store and - 'schema' is the L{Schema} for the tables in the store. + @param databases: A C{list} of C{dict}s holding the following keys: + - 'name', the name of the store to be registered. + - 'uri', the database URI to use to create the store. + - 'schema', the L{Schema} for the tables in the store. + - 'schema-uri', optionally an alternate URI to use for applying the + schema, if not given it defaults to 'uri'. + + @ivar force_delete: If C{True} for running L{Schema.delete} on a L{Store} + even if no commit was performed by the test. Useful when running a test + in a subprocess that might commit behind our back. """ + force_delete = False def __init__(self, databases): super(ZStormResourceManager, self).__init__() self._databases = databases self._zstorm = None + self._schema_zstorm = None self._commits = {} + self._schemas = {} def make(self, dependencies): """Create a L{ZStorm} resource to be used by tests. @@ -51,17 +61,42 @@ this resource manager. """ if self._zstorm is None: + zstorm = ZStorm() - provideUtility(zstorm) - for name, (uri, schema) in self._databases.iteritems(): + schema_zstorm = ZStorm() + databases = self._databases + + # Adapt the old databases format to the new one, for backward + # compatibility. This should be eventually dropped. + if isinstance(databases, dict): + databases = [{"name": name, "uri": uri, "schema": schema} + for name, (uri, schema) in databases.iteritems()] + + for database in databases: + name = database["name"] + uri = database["uri"] + schema = database["schema"] + schema_uri = database.get("schema-uri", uri) + self._schemas[name] = schema zstorm.set_default_uri(name, uri) + schema_zstorm.set_default_uri(name, schema_uri) store = zstorm.get(name) self._set_commit_proxy(store) - schema.upgrade(store) + schema_store = schema_zstorm.get(name) + schema.upgrade(schema_store) # Clean up tables here to ensure that the first test run starts # with an empty db - schema.delete(store) + schema.delete(schema_store) + + provideUtility(zstorm) self._zstorm = zstorm + self._schema_zstorm = schema_zstorm + + elif getUtility(IZStorm) is not self._zstorm: + # This probably means that the test code has overwritten our + # utility, let's re-register it. + provideUtility(self._zstorm) + return self._zstorm def _set_commit_proxy(self, store): @@ -85,12 +120,17 @@ for name, store in self._zstorm.iterstores(): # Ensure that the store is in a consistent state store.flush() + # Clear the alive cache *before* abort is called, + # to prevent a useless loop in Store.invalidate + # over the alive objects + store._alive.clear() finally: transaction.abort() # Clean up tables after each test if a commit was made for name, store in self._zstorm.iterstores(): - if store in self._commits: - _, schema = self._databases[name] - schema.delete(store) + if self.force_delete or store in self._commits: + schema_store = self._schema_zstorm.get(name) + schema = self._schemas[name] + schema.delete(schema_store) self._commits = {} diff -Nru storm-0.18/storm.egg-info/dependency_links.txt storm-0.19/storm.egg-info/dependency_links.txt --- storm-0.18/storm.egg-info/dependency_links.txt 1970-01-01 00:00:00.000000000 +0000 +++ storm-0.19/storm.egg-info/dependency_links.txt 2011-10-05 13:11:24.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru storm-0.18/storm.egg-info/not-zip-safe storm-0.19/storm.egg-info/not-zip-safe --- storm-0.18/storm.egg-info/not-zip-safe 1970-01-01 00:00:00.000000000 +0000 +++ storm-0.19/storm.egg-info/not-zip-safe 2011-10-03 18:02:55.000000000 +0000 @@ -0,0 +1 @@ + diff -Nru storm-0.18/storm.egg-info/PKG-INFO storm-0.19/storm.egg-info/PKG-INFO --- storm-0.18/storm.egg-info/PKG-INFO 1970-01-01 00:00:00.000000000 +0000 +++ storm-0.19/storm.egg-info/PKG-INFO 2011-10-05 13:11:24.000000000 +0000 @@ -0,0 +1,18 @@ +Metadata-Version: 1.0 +Name: storm +Version: 0.19 +Summary: Storm is an object-relational mapper (ORM) for Python developed at Canonical. +Home-page: https://storm.canonical.com +Author: Storm Developers +Author-email: storm@lists.canonical.com +License: LGPL +Download-URL: https://launchpad.net/storm/+download +Description: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) +Classifier: Programming Language :: Python +Classifier: Topic :: Database +Classifier: Topic :: Database :: Front-Ends +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff -Nru storm-0.18/storm.egg-info/SOURCES.txt storm-0.19/storm.egg-info/SOURCES.txt --- storm-0.18/storm.egg-info/SOURCES.txt 1970-01-01 00:00:00.000000000 +0000 +++ storm-0.19/storm.egg-info/SOURCES.txt 2011-10-05 13:11:24.000000000 +0000 @@ -0,0 +1,101 @@ +LICENSE +MANIFEST.in +Makefile +NEWS +README +TODO +setup.cfg +setup.py +test +storm/__init__.py +storm/base.py +storm/cache.py +storm/cextensions.c +storm/compat.py +storm/database.py +storm/event.py +storm/exceptions.py +storm/expr.py +storm/info.py +storm/locals.py +storm/properties.py +storm/references.py +storm/sqlobject.py +storm/store.py +storm/tracer.py +storm/tz.py +storm/uri.py +storm/variables.py +storm/wsgi.py +storm.egg-info/PKG-INFO +storm.egg-info/SOURCES.txt +storm.egg-info/dependency_links.txt +storm.egg-info/not-zip-safe +storm.egg-info/top_level.txt +storm/databases/__init__.py +storm/databases/mysql.py +storm/databases/postgres.py +storm/databases/sqlite.py +storm/django/__init__.py +storm/django/middleware.py +storm/django/stores.py +storm/django/backend/__init__.py +storm/django/backend/base.py +storm/schema/__init__.py +storm/schema/patch.py +storm/schema/schema.py +storm/twisted/__init__.py +storm/twisted/testing.py +storm/twisted/transact.py +storm/zope/__init__.py +storm/zope/adapters.py +storm/zope/configure.zcml +storm/zope/interfaces.py +storm/zope/meta.zcml +storm/zope/metaconfigure.py +storm/zope/metadirectives.py +storm/zope/schema.py +storm/zope/testing.py +storm/zope/zstorm.py +tests/__init__.py +tests/base.py +tests/cache.py +tests/database.py +tests/event.py +tests/expr.py +tests/helper.py +tests/info.py +tests/infoheritance.txt +tests/mocker.py +tests/properties.py +tests/sqlobject.py +tests/tracer.py +tests/tutorial.txt +tests/uri.py +tests/variables.py +tests/wsgi.py +tests/databases/__init__.py +tests/databases/base.py +tests/databases/mysql.py +tests/databases/postgres.py +tests/databases/proxy.py +tests/databases/sqlite.py +tests/django/__init__.py +tests/django/backend.py +tests/django/middleware.py +tests/django/stores.py +tests/schema/__init__.py +tests/schema/patch.py +tests/schema/schema.py +tests/store/__init__.py +tests/store/base.py +tests/store/mysql.py +tests/store/postgres.py +tests/store/sqlite.py +tests/twisted/__init__.py +tests/twisted/transact.py +tests/zope/README.txt +tests/zope/__init__.py +tests/zope/adapters.py +tests/zope/testing.py +tests/zope/zstorm.py \ No newline at end of file diff -Nru storm-0.18/storm.egg-info/top_level.txt storm-0.19/storm.egg-info/top_level.txt --- storm-0.18/storm.egg-info/top_level.txt 1970-01-01 00:00:00.000000000 +0000 +++ storm-0.19/storm.egg-info/top_level.txt 2011-10-05 13:11:24.000000000 +0000 @@ -0,0 +1 @@ +storm diff -Nru storm-0.18/test storm-0.19/test --- storm-0.18/test 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/test 2011-09-25 18:45:14.000000000 +0000 @@ -68,8 +68,11 @@ if relpath == os.path.join("tests", "zope", "README.txt"): # Special case the inclusion of the Zope-dependent # ZStorm doctest. - from tests.zope import has_zope - load_test = has_zope + import tests.zope as ztest + load_test = ( + ztest.has_transaction and + ztest.has_zope_component and + ztest.has_zope_security) if load_test: parent_path = os.path.dirname(relpath).replace( os.path.sep, ".") diff -Nru storm-0.18/tests/cache.py storm-0.19/tests/cache.py --- storm-0.18/tests/cache.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/cache.py 2011-09-25 18:45:14.000000000 +0000 @@ -69,8 +69,9 @@ cache.add(obj_info2) cache.add(obj_info2) cache.add(obj_info1) - self.assertEquals([hash(obj_info) for obj_info in cache.get_cached()], - [hash(obj_info1), hash(obj_info2)]) + self.assertEquals(sorted([hash(obj_info) + for obj_info in cache.get_cached()]), + sorted([hash(obj_info1), hash(obj_info2)])) def test_remove(self): cache = self.Cache(5) diff -Nru storm-0.18/tests/database.py storm-0.19/tests/database.py --- storm-0.18/tests/database.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/database.py 2011-10-03 14:28:37.000000000 +0000 @@ -236,7 +236,8 @@ install_tracer(tracer_mock) self.connection.is_disconnection_error = ( - lambda exc: 'connection closed' in str(exc)) + lambda exc, extra_disconnection_errors=(): + 'connection closed' in str(exc)) self.assertRaises(DisconnectionError, self.connection.execute, "something") @@ -251,7 +252,8 @@ install_tracer(tracer_mock) self.connection.is_disconnection_error = ( - lambda exc: 'connection closed' in str(exc)) + lambda exc, extra_disconnection_errors=(): + 'connection closed' in str(exc)) self.assertRaises(DisconnectionError, self.connection.execute, "something") @@ -270,7 +272,8 @@ install_tracer(tracer_mock) self.connection.is_disconnection_error = ( - lambda exc: 'connection closed' in str(exc)) + lambda exc, extra_disconnection_errors=(): + 'connection closed' in str(exc)) self.assertRaises(DisconnectionError, self.connection.execute, "something") @@ -356,7 +359,8 @@ class FakeException(DatabaseError): """A fake database exception that indicates a disconnection.""" self.connection.is_disconnection_error = ( - lambda exc: isinstance(exc, FakeException)) + lambda exc, extra_disconnection_errors=(): + isinstance(exc, FakeException)) self.assertEqual(self.connection._state, storm.database.STATE_CONNECTED) @@ -369,6 +373,32 @@ storm.database.STATE_DISCONNECTED) self.assertEqual(self.connection._raw_connection, None) + def test_wb_check_disconnection_extra_errors(self): + """Ensure that _check_disconnect() can check for additional + exceptions.""" + class FakeException(DatabaseError): + """A fake database exception that indicates a disconnection.""" + self.connection.is_disconnection_error = ( + lambda exc, extra_disconnection_errors=(): + isinstance(exc, extra_disconnection_errors)) + + self.assertEqual(self.connection._state, + storm.database.STATE_CONNECTED) + # Error is converted to DisconnectionError: + def raise_exception(): + raise FakeException + # Exception passes through as normal. + self.assertRaises(FakeException, + self.connection._check_disconnect, raise_exception) + self.assertEqual(self.connection._state, + storm.database.STATE_CONNECTED) + # Exception treated as a disconnection when keyword argument passed. + self.assertRaises(DisconnectionError, + self.connection._check_disconnect, raise_exception, + extra_disconnection_errors=FakeException) + self.assertEqual(self.connection._state, + storm.database.STATE_DISCONNECTED) + def test_wb_rollback_clears_disconnected_connection(self): """Check that rollback clears the DISCONNECTED state.""" self.connection._state = storm.database.STATE_DISCONNECTED diff -Nru storm-0.18/tests/databases/base.py storm-0.19/tests/databases/base.py --- storm-0.18/tests/databases/base.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/databases/base.py 2011-09-25 18:45:14.000000000 +0000 @@ -493,21 +493,21 @@ sys.modules.update(dbapi_modules) -class DatabaseDisconnectionTest(object): +class DatabaseDisconnectionMixin(object): environment_variable = "" host_environment_variable = "" default_port = None def setUp(self): - super(DatabaseDisconnectionTest, self).setUp() + super(DatabaseDisconnectionMixin, self).setUp() self.create_database_and_proxy() self.create_connection() def tearDown(self): self.drop_database() self.proxy.close() - super(DatabaseDisconnectionTest, self).tearDown() + super(DatabaseDisconnectionMixin, self).tearDown() def is_supported(self): return bool(self.get_uri()) @@ -546,6 +546,7 @@ uri = self.get_uri() self.proxy = ProxyTCPServer((uri.host, uri.port)) uri.host, uri.port = self.proxy.server_address + self.proxy_uri = uri self.database = create_database(uri) def create_connection(self): @@ -554,6 +555,8 @@ def drop_database(self): pass +class DatabaseDisconnectionTest(DatabaseDisconnectionMixin): + def test_proxy_works(self): """Ensure that we can talk to the database through the proxy.""" result = self.connection.execute("SELECT 1") diff -Nru storm-0.18/tests/django/backend.py storm-0.19/tests/django/backend.py --- storm-0.18/tests/django/backend.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/django/backend.py 2011-10-03 14:28:37.000000000 +0000 @@ -1,4 +1,4 @@ -# + # Copyright (c) 2008 Canonical # # Written by James Henstridge @@ -32,7 +32,29 @@ from storm.django import stores from storm.zope.zstorm import global_zstorm, StoreDataManager +import storm.database +from storm.exceptions import DisconnectionError + from tests.helper import TestHelper +from tests.databases.base import DatabaseDisconnectionMixin + + +def make_wrapper(): + from storm.django.backend import base + if django.VERSION >= (1, 1): + wrapper = base.DatabaseWrapper({ + 'DATABASE_HOST': settings.DATABASE_HOST, + 'DATABASE_NAME': settings.DATABASE_NAME, + 'DATABASE_OPTIONS': settings.DATABASE_OPTIONS, + 'DATABASE_PASSWORD': settings.DATABASE_PASSWORD, + 'DATABASE_PORT': settings.DATABASE_PORT, + 'DATABASE_USER': settings.DATABASE_USER, + 'TIME_ZONE': settings.TIME_ZONE, + 'OPTIONS': {}, + }) + else: + wrapper = base.DatabaseWrapper(**settings.DATABASE_OPTIONS) + return wrapper class DjangoBackendTests(object): @@ -76,24 +98,8 @@ def drop_tables(self): raise NotImplementedError - def make_wrapper(self): - from storm.django.backend import base - if django.VERSION >= (1, 1): - wrapper = base.DatabaseWrapper({ - 'DATABASE_HOST': settings.DATABASE_HOST, - 'DATABASE_NAME': settings.DATABASE_NAME, - 'DATABASE_OPTIONS': settings.DATABASE_OPTIONS, - 'DATABASE_PASSWORD': settings.DATABASE_PASSWORD, - 'DATABASE_PORT': settings.DATABASE_PORT, - 'DATABASE_USER': settings.DATABASE_USER, - 'TIME_ZONE': settings.TIME_ZONE, - }) - else: - wrapper = base.DatabaseWrapper(**settings.DATABASE_OPTIONS) - return wrapper - def test_create_wrapper(self): - wrapper = self.make_wrapper() + wrapper = make_wrapper() self.assertTrue(isinstance(wrapper, self.get_wrapper_class())) # The wrapper uses the same database connection as the store. @@ -113,13 +119,13 @@ "%r should be joined to the transaction" % store) def test_using_wrapper_joins_transaction(self): - wrapper = self.make_wrapper() + wrapper = make_wrapper() cursor = wrapper.cursor() cursor.execute("SELECT 1") self.assertInTransaction(stores.get_store("django")) def test_commit(self): - wrapper = self.make_wrapper() + wrapper = make_wrapper() cursor = wrapper.cursor() cursor.execute("INSERT INTO django_test (title) VALUES ('foo')") wrapper._commit() @@ -131,7 +137,7 @@ self.assertEqual(result[0][0], "foo") def test_rollback(self): - wrapper = self.make_wrapper() + wrapper = make_wrapper() cursor = wrapper.cursor() cursor.execute("INSERT INTO django_test (title) VALUES ('foo')") wrapper._rollback() @@ -141,6 +147,100 @@ result = cursor.fetchall() self.assertEqual(len(result), 0) + def test_register_transaction(self): + wrapper = make_wrapper() + store = global_zstorm.get("django") + # Watch for register-transaction calls. + calls = [] + def register_transaction(owner): + calls.append(owner) + store._event.hook("register-transaction", register_transaction) + + cursor = wrapper.cursor() + cursor.execute("SELECT 1") + self.assertNotEqual(calls, []) + + +class DjangoBackendDisconnectionTests(DatabaseDisconnectionMixin): + + def is_supported(self): + if not have_django_and_transaction: + return False + return DatabaseDisconnectionMixin.is_supported(self) + + def setUp(self): + super(DjangoBackendDisconnectionTests, self).setUp() + settings.configure(STORM_STORES={}) + + settings.DATABASE_ENGINE = "storm.django.backend" + settings.DATABASE_NAME = "django" + settings.STORM_STORES["django"] = str(self.proxy_uri) + stores.have_configured_stores = False + + def tearDown(self): + transaction.abort() + if django.VERSION >= (1, 1): + settings._wrapped = None + else: + settings._target = None + global_zstorm._reset() + stores.have_configured_stores = False + transaction.manager.free(transaction.get()) + super(DjangoBackendDisconnectionTests, self).tearDown() + + def test_wb_disconnect(self): + wrapper = make_wrapper() + store = global_zstorm.get("django") + cursor = wrapper.cursor() + cursor.execute("SELECT 'about to reset connection'") + wrapper._rollback() + cursor = wrapper.cursor() + self.proxy.restart() + self.assertRaises(DisconnectionError, cursor.execute, "SELECT 1") + self.assertEqual( + store._connection._state, storm.database.STATE_DISCONNECTED) + wrapper._rollback() + + self.assertEqual( + store._connection._state, storm.database.STATE_RECONNECT) + cursor = wrapper.cursor() + cursor.execute("SELECT 1") + + def test_wb_transaction_registration(self): + wrapper = make_wrapper() + store = global_zstorm.get("django") + # Watch for register-transaction calls. + calls = [] + def register_transaction(owner): + calls.append(owner) + store._event.hook("register-transaction", register_transaction) + + # Simulate a disconnection, and put the connection into a + # state where it would attempt to reconnect. + store._connection._raw_connection = None + store._connection._state = storm.database.STATE_RECONNECT + self.proxy.stop() + + self.assertRaises(DisconnectionError, wrapper.cursor) + # The connection is in the disconnected state, and has been + # registered with any listening transaction manager. + self.assertNotEqual(calls, []) + self.assertEqual( + store._connection._state, storm.database.STATE_DISCONNECTED) + + wrapper._rollback() + del calls[:] + + # Now reconnect: + self.proxy.start() + cursor = wrapper.cursor() + cursor.execute("SELECT 1") + # The connection is up, and has been registered with any + # listening transaction manager. + self.assertNotEqual(calls, []) + self.assertEqual( + store._connection._state, storm.database.STATE_CONNECTED) + class PostgresDjangoBackendTests(DjangoBackendTests, TestHelper): @@ -184,3 +284,18 @@ store = stores.get_store("django") store.execute("DROP TABLE django_test") transaction.commit() + +class PostgresDjangoBackendDisconnectionTests( + DjangoBackendDisconnectionTests, TestHelper): + + environment_variable = "STORM_POSTGRES_URI" + host_environment_variable = "STORM_POSTGRES_HOST_URI" + default_port = 5432 + + +class MySQLDjangoBackendDisconnectionTests( + DjangoBackendDisconnectionTests, TestHelper): + + environment_variable = "STORM_MYSQL_URI" + host_environment_variable = "STORM_MYSQL_HOST_URI" + default_port = 3306 diff -Nru storm-0.18/tests/expr.py storm-0.19/tests/expr.py --- storm-0.18/tests/expr.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/expr.py 2011-09-25 18:45:14.000000000 +0000 @@ -135,6 +135,7 @@ expr = Column() self.assertEquals(expr.name, Undef) self.assertEquals(expr.table, Undef) + self.assertIdentical(expr.compile_cache, None) # Test for identity. We don't want False there. self.assertTrue(expr.primary is 0) @@ -657,6 +658,16 @@ 'SELECT DISTINCT column1, column2 FROM "table 1"') self.assertEquals(state.parameters, []) + def test_select_distinct_on(self): + expr = Select([column1, column2], Undef, [table1], + distinct=[column2, column1]) + state = State() + statement = compile(expr, state) + self.assertEquals(statement, + 'SELECT DISTINCT ON (column2, column1) ' + 'column1, column2 FROM "table 1"') + self.assertEquals(state.parameters, []) + def test_select_where(self): expr = Select([column1, Func1()], Func1(), @@ -997,13 +1008,16 @@ statement = compile(expr, state) self.assertEquals(statement, "column1") self.assertEquals(state.parameters, []) + self.assertEquals(expr.compile_cache, "column1") def test_column_table(self): - expr = Select(Column(column1, Func1())) + column = Column(column1, Func1()) + expr = Select(column) state = State() statement = compile(expr, state) self.assertEquals(statement, "SELECT func1().column1 FROM func1()") self.assertEquals(state.parameters, []) + self.assertEquals(column.compile_cache, "column1") def test_column_contexts(self): table, = track_contexts(1) @@ -1018,6 +1032,11 @@ self.assertEquals(statement, 'SELECT "table 1"."name 1" FROM "table 1"') + def test_row(self): + expr = Row(column1, column2) + statement = compile(expr) + self.assertEquals(statement, "ROW(column1, column2)") + def test_variable(self): expr = Variable("value") state = State() @@ -1380,6 +1399,17 @@ def test_count_distinct_all(self): self.assertRaises(ValueError, Count, distinct=True) + def test_cast(self): + """ + The L{Cast} expression renders a C{CAST} function call with a + user-defined input value and the type to cast it to. + """ + expr = Cast(Func1(), "TEXT") + state = State() + statement = compile(expr, state) + self.assertEquals(statement, "CAST(func1() AS TEXT)") + self.assertEquals(state.parameters, []) + def test_max(self): expr = Max(Func1()) state = State() @@ -1558,10 +1588,12 @@ def test_table(self): expr = Table(table1) + self.assertIdentical(expr.compile_cache, None) state = State() statement = compile(expr, state) self.assertEquals(statement, '"table 1"') self.assertEquals(state.parameters, []) + self.assertEquals(expr.compile_cache, '"table 1"') def test_alias(self): expr = Alias(Table(table1), "name") diff -Nru storm-0.18/tests/info.py storm-0.19/tests/info.py --- storm-0.18/tests/info.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/info.py 2011-09-25 18:45:14.000000000 +0000 @@ -94,7 +94,7 @@ (self.Class.prop1, self.Class.prop2)) def test_table(self): - self.assertEquals(self.cls_info.table, "table") + self.assertEquals(self.cls_info.table.name, "table") def test_primary_key(self): # Can't use == for props. @@ -202,7 +202,7 @@ def test_variables(self): self.assertTrue(isinstance(self.obj_info.variables, dict)) - + for column in self.cls_info.columns: variable = self.obj_info.variables.get(column) self.assertTrue(isinstance(variable, Variable)) @@ -227,7 +227,7 @@ def test_primary_vars(self): self.assertTrue(isinstance(self.obj_info.primary_vars, tuple)) - + for column, variable in zip(self.cls_info.primary_key, self.obj_info.primary_vars): self.assertEquals(self.obj_info.variables.get(column), @@ -316,7 +316,7 @@ self.obj_info.checkpoint() obj = object() - + self.obj_info.event.hook("changed", object_changed1, obj) self.obj_info.event.hook("changed", object_changed2, obj) @@ -326,7 +326,7 @@ self.assertEquals(changes1, [(1, self.obj_info, self.variable2, Undef, 10, False, obj), (1, self.obj_info, self.variable1, Undef, 20, False, obj)]) - self.assertEquals(changes2, + self.assertEquals(changes2, [(2, self.obj_info, self.variable2, Undef, 10, False, obj), (2, self.obj_info, self.variable1, Undef, 20, False, obj)]) @@ -339,7 +339,7 @@ self.assertEquals(changes1, [(1, self.obj_info, self.variable1, 20, None, False, obj), (1, self.obj_info, self.variable2, 10, None, False, obj)]) - self.assertEquals(changes2, + self.assertEquals(changes2, [(2, self.obj_info, self.variable1, 20, None, False, obj), (2, self.obj_info, self.variable2, 10, None, False, obj)]) @@ -352,7 +352,7 @@ self.assertEquals(changes1, [(1, self.obj_info, self.variable1, None, Undef, False, obj), (1, self.obj_info, self.variable2, None, Undef, False, obj)]) - self.assertEquals(changes2, + self.assertEquals(changes2, [(2, self.obj_info, self.variable1, None, Undef, False, obj), (2, self.obj_info, self.variable2, None, Undef, False, obj)]) @@ -528,11 +528,11 @@ prop1 = Property("column1", primary=True) self.Class = Class self.ClassAlias = ClassAlias(self.Class, "alias") - + def test_cls_info_cls(self): cls_info = get_cls_info(self.ClassAlias) self.assertEquals(cls_info.cls, self.Class) - self.assertEquals(cls_info.table, "alias") + self.assertEquals(cls_info.table.name, "alias") self.assertEquals(self.ClassAlias.prop1.name, "column1") self.assertEquals(self.ClassAlias.prop1.table, self.ClassAlias) diff -Nru storm-0.18/tests/properties.py storm-0.19/tests/properties.py --- storm-0.18/tests/properties.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/properties.py 2011-09-25 18:45:14.000000000 +0000 @@ -26,6 +26,7 @@ except ImportError: uuid = None +from storm.compat import json from storm.exceptions import NoneError, PropertyPathError from storm.properties import PropertyPublisherMeta from storm.properties import * @@ -693,6 +694,67 @@ del self.obj self.assertEquals(changes, [(self.variable1, None, ["a"], False)]) + def test_json(self): + # Skip test if json support is not available. + if json is None: + return + + self.setup(JSON, default_factory=dict, allow_none=False) + + self.assertTrue(isinstance(self.column1, Column)) + self.assertTrue(isinstance(self.column2, Column)) + self.assertEquals(self.column1.name, "column1") + self.assertEquals(self.column1.table, self.SubClass) + self.assertEquals(self.column2.name, "prop2") + self.assertEquals(self.column2.table, self.SubClass) + self.assertTrue(isinstance(self.variable1, JSONVariable)) + self.assertTrue(isinstance(self.variable2, JSONVariable)) + + self.assertEquals(self.obj.prop1, {}) + self.assertRaises(NoneError, setattr, self.obj, "prop1", None) + self.obj.prop2 = None + self.assertEquals(self.obj.prop2, None) + + self.obj.prop1 = [] + self.assertEquals(self.obj.prop1, []) + self.obj.prop1.append("a") + self.assertEquals(self.obj.prop1, ["a"]) + + def test_json_events(self): + # Skip test if json support is not available. + if json is None: + return + + self.setup(JSON, default_factory=list, allow_none=False) + + changes = [] + def changed(owner, variable, old_value, new_value, fromdb): + changes.append((variable, old_value, new_value, fromdb)) + + # Can't checkpoint Undef. + self.obj.prop2 = [] + + self.obj_info.checkpoint() + self.obj_info.event.emit("start-tracking-changes", self.obj_info.event) + self.obj_info.event.hook("changed", changed) + + self.assertEquals(self.obj.prop1, []) + self.assertEquals(changes, []) + self.obj.prop1.append("a") + self.assertEquals(changes, []) + + # Check "flush" event. Notice that the other variable wasn't + # listed, since it wasn't changed. + self.obj_info.event.emit("flush") + self.assertEquals(changes, [(self.variable1, None, ["a"], False)]) + + del changes[:] + + # Check "object-deleted" event. Notice that the other variable + # wasn't listed again, since it wasn't changed. + del self.obj + self.assertEquals(changes, [(self.variable1, None, ["a"], False)]) + def test_list(self): self.setup(List, default_factory=list, allow_none=False) diff -Nru storm-0.18/tests/store/base.py storm-0.19/tests/store/base.py --- storm-0.18/tests/store/base.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/store/base.py 2011-09-25 18:45:14.000000000 +0000 @@ -24,11 +24,13 @@ import decimal import gc import operator +from uuid import uuid4 import weakref from storm.references import Reference, ReferenceSet, Proxy -from storm.database import Result -from storm.properties import Int, Float, RawStr, Unicode, Property, Pickle +from storm.database import Result, STATE_DISCONNECTED +from storm.properties import ( + Int, Float, RawStr, Unicode, Property, Pickle, UUID) from storm.properties import PropertyPublisherMeta, Decimal from storm.variables import PickleVariable from storm.expr import ( @@ -38,7 +40,7 @@ from storm.exceptions import ( ClosedError, ConnectionBlockedError, FeatureError, LostObjectError, NoStoreError, NotFlushedError, NotOneError, OrderLoopError, UnorderedError, - WrongStoreError) + WrongStoreError, DisconnectionError) from storm.cache import Cache from storm.store import AutoReload, EmptyResultSet, Store, ResultSet from storm.tracer import debug @@ -59,6 +61,12 @@ foo_id = Int() foo = Reference(foo_id, Foo.id) +class UniqueID(object): + __storm_table__ = "unique_id" + id = UUID(primary=True) + def __init__(self, id): + self.id = id + class Blob(object): __storm_table__ = "bin" id = Int(primary=True) @@ -259,7 +267,7 @@ def drop_tables(self): for table in ["foo", "bar", "bin", "link", "money", "selfref", - "foovalue"]: + "foovalue", "unique_id"]: try: self.connection.execute("DROP TABLE %s" % table) self.connection.commit() @@ -1783,6 +1791,10 @@ self.assertEquals(type(foo.id), int) self.assertEquals(foo.title, u"Default Title") + def test_add_uuid(self): + unique_id = self.store.add(UniqueID(uuid4())) + self.assertEqual(unique_id, self.store.find(UniqueID).one()) + def test_remove_commit(self): foo = self.store.get(Foo, 20) self.store.remove(foo) @@ -5366,12 +5378,13 @@ self.store.invalidate(foo) self.assertRaises(LostObjectError, setattr, foo, "title", u"Title 40") - def test_invalidate_and_get_returns_autoreloaded(self): + def test_invalidated_objects_reloaded_by_get(self): foo = self.store.get(Foo, 20) self.store.invalidate(foo) foo = self.store.get(Foo, 20) - self.assertEquals(get_obj_info(foo).variables[Foo.title].get_lazy(), - AutoReload) + title_variable = get_obj_info(foo).variables[Foo.title] + self.assertEquals(title_variable.get_lazy(), None) + self.assertEquals(title_variable.get(), u"Title 20") self.assertEquals(foo.title, "Title 20") def test_invalidated_hook(self): @@ -5869,7 +5882,7 @@ try: self.assertEquals(myfoo.title, title) except AssertionError, e: - raise AssertionError(str(e) + + raise AssertionError(unicode(e, 'replace') + " (ensure your database was created with CREATE DATABASE" " ... CHARACTER SET utf8)") @@ -5946,6 +5959,20 @@ self.assertEqual(len(calls), 1) self.assertEqual(calls[0], self.store) + def test_wb_event_before_check_connection(self): + """ + The register-transaction event is emitted before checking the state of + the connection. + """ + calls = [] + def register_transaction(owner): + calls.append(owner) + self.store._event.hook("register-transaction", register_transaction) + self.store._connection._state = STATE_DISCONNECTED + self.assertRaises(DisconnectionError, self.store.execute, "SELECT 1") + self.assertEqual(len(calls), 1) + self.assertEqual(calls[0], self.store) + def test_add_sends_event(self): """Adding an object emits the register-transaction event.""" calls = [] diff -Nru storm-0.18/tests/store/mysql.py storm-0.19/tests/store/mysql.py --- storm-0.18/tests/store/mysql.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/store/mysql.py 2011-09-25 18:45:14.000000000 +0000 @@ -74,7 +74,11 @@ connection.execute("CREATE TABLE foovalue " "(id INT PRIMARY KEY AUTO_INCREMENT," " foo_id INTEGER," - " value1 INTEGER, value2 INTEGER)") + " value1 INTEGER, value2 INTEGER) " + "ENGINE=InnoDB") + connection.execute("CREATE TABLE unique_id " + "(id VARCHAR(36) PRIMARY KEY) " + "ENGINE=InnoDB") connection.commit() diff -Nru storm-0.18/tests/store/postgres.py storm-0.19/tests/store/postgres.py --- storm-0.18/tests/store/postgres.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/store/postgres.py 2011-09-25 18:45:14.000000000 +0000 @@ -91,6 +91,8 @@ connection.execute("CREATE TABLE foovalue " "(id SERIAL PRIMARY KEY, foo_id INTEGER," " value1 INTEGER, value2 INTEGER)") + connection.execute("CREATE TABLE unique_id " + "(id UUID PRIMARY KEY)") connection.commit() def drop_tables(self): diff -Nru storm-0.18/tests/store/sqlite.py storm-0.19/tests/store/sqlite.py --- storm-0.18/tests/store/sqlite.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/store/sqlite.py 2011-09-25 18:45:14.000000000 +0000 @@ -63,6 +63,8 @@ connection.execute("CREATE TABLE foovalue " "(id INTEGER PRIMARY KEY, foo_id INTEGER," " value1 INTEGER, value2 INTEGER)") + connection.execute("CREATE TABLE unique_id " + "(id VARCHAR PRIMARY KEY)") connection.commit() def drop_tables(self): diff -Nru storm-0.18/tests/tracer.py storm-0.19/tests/tracer.py --- storm-0.18/tests/tracer.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/tracer.py 2011-09-25 18:45:14.000000000 +0000 @@ -1,9 +1,18 @@ import datetime import sys +from unittest import TestCase + +try: + # Optional dependency, if missing TimelineTracer tests are skipped. + import timeline +except ImportError: + timeline = None from storm.tracer import (trace, install_tracer, get_tracers, remove_tracer_type, remove_all_tracers, debug, - DebugTracer, TimeoutTracer, TimeoutError, _tracers) + BaseStatementTracer, DebugTracer, TimeoutTracer, + TimelineTracer, TimeoutError, _tracers) +from storm.database import Connection from storm.expr import Variable from tests.helper import TestHelper @@ -79,7 +88,7 @@ def __init__(self, value): self._value = value - def get(self): + def get(self, to_db=False): return self._value @@ -319,3 +328,166 @@ self.execute() self.execute() + + +class BaseStatementTracerTest(TestCase): + + class LoggingBaseStatementTracer(BaseStatementTracer): + def _expanded_raw_execute(self, connection, raw_cursor, statement): + self.__dict__.setdefault('calls', []).append( + (connection, raw_cursor, statement)) + + class StubConnection(Connection): + + def __init__(self): + self._database = None + self._event = None + self._raw_connection = None + + def test_no_params(self): + """With no parameters the statement is passed through verbatim.""" + tracer = self.LoggingBaseStatementTracer() + tracer.connection_raw_execute('foo', 'bar', 'baz ? %s', ()) + self.assertEqual([('foo', 'bar', 'baz ? %s')], tracer.calls) + + def test_params_substituted_pyformat(self): + tracer = self.LoggingBaseStatementTracer() + conn = self.StubConnection() + conn.param_mark = '%s' + var1 = MockVariable(u'VAR1') + tracer.connection_raw_execute( + conn, 'cursor', 'SELECT * FROM person where name = %s', [var1]) + self.assertEqual( + [(conn, 'cursor', "SELECT * FROM person where name = 'VAR1'")], + tracer.calls) + + def test_params_substituted_single_string(self): + """String parameters are formatted as a single quoted string.""" + tracer = self.LoggingBaseStatementTracer() + conn = self.StubConnection() + var1 = MockVariable(u'VAR1') + tracer.connection_raw_execute( + conn, 'cursor', 'SELECT * FROM person where name = ?', [var1]) + self.assertEqual( + [(conn, 'cursor', "SELECT * FROM person where name = 'VAR1'")], + tracer.calls) + + def test_qmark_percent_s_literal_preserved(self): + """With ? parameters %s in the statement can be kept intact.""" + tracer = self.LoggingBaseStatementTracer() + conn = self.StubConnection() + var1 = MockVariable(1) + tracer.connection_raw_execute( + conn, 'cursor', + "SELECT * FROM person where id > ? AND name LIKE '%s'", [var1]) + self.assertEqual( + [(conn, 'cursor', + "SELECT * FROM person where id > 1 AND name LIKE '%s'")], + tracer.calls) + + def test_int_variable_as_int(self): + """Int parameters are formatted as an int literal.""" + tracer = self.LoggingBaseStatementTracer() + conn = self.StubConnection() + var1 = MockVariable(1) + tracer.connection_raw_execute( + conn, 'cursor', "SELECT * FROM person where id = ?", [var1]) + self.assertEqual( + [(conn, 'cursor', "SELECT * FROM person where id = 1")], + tracer.calls) + + def test_like_clause_preserved(self): + """% operators in LIKE statements are preserved.""" + tracer = self.LoggingBaseStatementTracer() + conn = self.StubConnection() + var1 = MockVariable(u'substring') + tracer.connection_raw_execute( + conn, 'cursor', + "SELECT * FROM person WHERE name LIKE '%%' || ? || '-suffix%%'", + [var1]) + self.assertEqual( + [(conn, 'cursor', "SELECT * FROM person WHERE name " + "LIKE '%%' || 'substring' || '-suffix%%'")], + tracer.calls) + + def test_unformattable_statements_are_handled(self): + tracer = self.LoggingBaseStatementTracer() + conn = self.StubConnection() + var1 = MockVariable(u'substring') + tracer.connection_raw_execute( + conn, 'cursor', "%s %s", + [var1]) + self.assertEqual( + [(conn, 'cursor', + "Unformattable query: '%s %s' with params [u'substring'].")], + tracer.calls) + + +class TimelineTracerTest(TestHelper): + + def is_supported(self): + return timeline is not None + + def factory(self): + self.timeline = timeline.Timeline() + return self.timeline + + def test_separate_tracers_own_state(self): + """"Check that multiple TimelineTracer's could be used at once.""" + tracer1 = TimelineTracer(self.factory) + tracer2 = TimelineTracer(self.factory) + tracer1.threadinfo.action = 'foo' + self.assertEqual(None, getattr(tracer2.threadinfo, 'action', None)) + + def test_error_finishes_action(self): + tracer = TimelineTracer(self.factory) + action = timeline.Timeline().start('foo', 'bar') + tracer.threadinfo.action = action + tracer.connection_raw_execute_error( + 'conn', 'cursor', 'statement', 'params', 'error') + self.assertNotEqual(None, action.duration) + + def test_success_finishes_action(self): + tracer = TimelineTracer(self.factory) + action = timeline.Timeline().start('foo', 'bar') + tracer.threadinfo.action = action + tracer.connection_raw_execute_success( + 'conn', 'cursor', 'statement', 'params') + self.assertNotEqual(None, action.duration) + + def test_finds_timeline_from_factory(self): + factory_result = timeline.Timeline() + factory = lambda:factory_result + tracer = TimelineTracer(lambda:factory_result) + tracer._expanded_raw_execute('conn', 'cursor', 'statement') + self.assertEqual(1, len(factory_result.actions)) + + def test_action_details_are_statement(self): + """The detail in the timeline action is the formatted SQL statement.""" + tracer = TimelineTracer(self.factory) + tracer._expanded_raw_execute('conn', 'cursor', 'statement') + self.assertEqual('statement', self.timeline.actions[-1].detail) + + def test_category_from_prefix_and_connection_name(self): + class StubConnection(Connection): + + def __init__(self): + self._database = None + self._event = None + self._raw_connection = None + self.name = 'Foo' + tracer = TimelineTracer(self.factory, prefix='bar-') + tracer._expanded_raw_execute(StubConnection(), 'cursor', 'statement') + self.assertEqual('bar-Foo', self.timeline.actions[-1].category) + + def test_unnamed_connection(self): + """A connection with no name has put in as a placeholder.""" + tracer = TimelineTracer(self.factory, prefix='bar-') + tracer._expanded_raw_execute('conn', 'cursor', 'statement') + self.assertEqual('bar-', self.timeline.actions[-1].category) + + def test_default_prefix(self): + """By default the prefix "SQL-" is added to the action's category.""" + tracer = TimelineTracer(self.factory) + tracer._expanded_raw_execute('conn', 'cursor', 'statement') + self.assertEqual('SQL-', self.timeline.actions[-1].category) diff -Nru storm-0.18/tests/twisted/__init__.py storm-0.19/tests/twisted/__init__.py --- storm-0.18/tests/twisted/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ storm-0.19/tests/twisted/__init__.py 2011-09-25 18:45:14.000000000 +0000 @@ -0,0 +1,18 @@ +__all__ = [ + 'has_twisted', + 'has_psycopg', + ] + +try: + import twisted +except ImportError: + has_twisted = False +else: + has_twisted = True + +try: + import psycopg2 +except ImportError: + has_psycopg = False +else: + has_psycopg = True diff -Nru storm-0.18/tests/twisted/transact.py storm-0.19/tests/twisted/transact.py --- storm-0.18/tests/twisted/transact.py 1970-01-01 00:00:00.000000000 +0000 +++ storm-0.19/tests/twisted/transact.py 2011-09-25 18:45:14.000000000 +0000 @@ -0,0 +1,283 @@ +import time +import random + +from tests.helper import TestHelper +from tests.zope import has_transaction, has_zope_component +from tests.twisted import (has_twisted, has_psycopg) + +if has_transaction and has_zope_component and has_twisted: + import transaction + + from twisted.trial.unittest import TestCase + from zope.component import getUtility + + from storm.zope.interfaces import IZStorm + from storm.exceptions import IntegrityError, DisconnectionError + + from storm.twisted.transact import Transactor, transact + from storm.twisted.testing import FakeThreadPool +else: + # We can't use trial's TestCase as base + TestCase = TestHelper + TestHelper = object + +if has_psycopg: + from psycopg2.extensions import TransactionRollbackError + + +class TransactorTest(TestCase, TestHelper): + + def is_supported(self): + return has_transaction and has_zope_component and has_twisted + + def setUp(self): + TestCase.setUp(self) + TestHelper.setUp(self) + self.threadpool = FakeThreadPool() + self.transaction = self.mocker.mock() + self.transactor = Transactor(self.threadpool, self.transaction) + self.function = self.mocker.mock() + + def test_run(self): + """ + L{Transactor.run} executes a function in a thread, commits + the transaction and returns a deferred firing the function result. + """ + self.mocker.order() + self.expect(self.function(1, arg=2)).result(3) + self.expect(self.transaction.commit()) + self.mocker.replay() + deferred = self.transactor.run(self.function, 1, arg=2) + deferred.addCallback(self.assertEqual, 3) + return deferred + + def test_run_with_function_failure(self): + """ + If the given function raises an error, then L{Transactor.run} + aborts the transaction and re-raises the same error. + """ + self.mocker.order() + self.expect(self.function()).throw(ZeroDivisionError()) + self.expect(self.transaction.abort()) + self.mocker.replay() + deferred = self.transactor.run(self.function) + self.assertFailure(deferred, ZeroDivisionError) + return deferred + + def test_run_with_disconnection_error(self): + """ + If the given function raises a L{DisconnectionError}, then a C{SELECT + 1} will be executed in each registered store such that C{psycopg} + actually detects the disconnection. + """ + self.transactor.retries = 0 + self.mocker.order() + zstorm = self.mocker.mock() + store1 = self.mocker.mock() + store2 = self.mocker.mock() + gu = self.mocker.replace(getUtility) + self.expect(self.function()).throw(DisconnectionError()) + self.expect(gu(IZStorm)).result(zstorm) + self.expect(zstorm.iterstores()).result(iter((("store1", store1), + ("store2", store2)))) + self.expect(store1.execute("SELECT 1")) + self.expect(store2.execute("SELECT 1")) + self.expect(self.transaction.abort()) + self.mocker.replay() + deferred = self.transactor.run(self.function) + self.assertFailure(deferred, DisconnectionError) + return deferred + + def test_run_with_disconnection_error_in_execute_is_ignored(self): + """ + If the given function raises a L{DisconnectionError}, then a C{SELECT + 1} will be executed in each registered store such that C{psycopg} + actually detects the disconnection. If another L{DisconnectionError} + happens during C{execute}, then it is ignored. + """ + self.transactor.retries = 0 + zstorm = self.mocker.mock() + store1 = self.mocker.mock() + store2 = self.mocker.mock() + gu = self.mocker.replace(getUtility) + self.mocker.order() + self.expect(self.function()).throw(DisconnectionError()) + self.expect(gu(IZStorm)).result(zstorm) + self.expect(zstorm.iterstores()).result(iter((("store1", store1), + ("store2", store2)))) + self.expect(store1.execute("SELECT 1")).throw(DisconnectionError()) + self.expect(store2.execute("SELECT 1")) + self.expect(self.transaction.abort()) + self.mocker.replay() + deferred = self.transactor.run(self.function) + self.assertFailure(deferred, DisconnectionError) + return deferred + + def test_run_with_commit_failure(self): + """ + If the given function succeeds but the transaction fails to commit, + then L{Transactor.run} aborts the transaction and re-raises + the commit exception. + """ + self.mocker.order() + self.expect(self.function()) + self.expect(self.transaction.commit()).throw(ZeroDivisionError()) + self.expect(self.transaction.abort()) + self.mocker.replay() + deferred = self.transactor.run(self.function) + self.assertFailure(deferred, ZeroDivisionError) + return deferred + + def test_wb_default_transaction(self): + """ + By default L{Transact} uses the global transaction manager. + """ + transactor = Transactor(self.threadpool) + self.assertIdentical(transaction, transactor._transaction) + + def test_decorate(self): + """ + A L{transact} decorator can be used with methods of an object that + contains a L{Transactor} instance as a C{transactor} instance variable, + ensuring that the decorated function is called via L{Transactor.run}. + """ + self.mocker.order() + self.expect(self.transaction.commit()) + self.mocker.replay() + + @transact + def function(self): + """docstring""" + return "result" + + # Function metadata is copied to the wrapper. + self.assertEqual("docstring", function.__doc__) + deferred = function(self) + deferred.addCallback(self.assertEqual, "result") + return deferred + + def test_run_with_integrity_error_retries(self): + """ + If the given function raises a L{IntegrityError}, then the function + will be retried another two times before letting the exception bubble + up. + """ + sleep = self.mocker.replace(time.sleep) + uniform = self.mocker.replace(random).uniform + self.mocker.order() + + self.expect(self.function()).throw(IntegrityError()) + self.expect(self.transaction.abort()) + self.expect(uniform(1, 2 ** 1)).result(1) + self.expect(sleep(1)) + + self.expect(self.function()).throw(IntegrityError()) + self.expect(self.transaction.abort()) + self.expect(uniform(1, 2 ** 2)).result(2) + self.expect(sleep(2)) + + self.expect(self.function()).throw(IntegrityError()) + self.expect(self.transaction.abort()) + self.mocker.replay() + + deferred = self.transactor.run(self.function) + self.assertFailure(deferred, IntegrityError) + return deferred + + def test_run_with_transaction_rollback_error_retries(self): + """ + If the given function raises a L{TransactionRollbackError}, then the + function will be retried another two times before letting the exception + bubble up. + """ + if not has_psycopg: + return + + sleep = self.mocker.replace(time.sleep) + uniform = self.mocker.replace(random).uniform + self.mocker.order() + + self.expect(self.function()).throw(TransactionRollbackError()) + self.expect(self.transaction.abort()) + self.expect(uniform(1, 2 ** 1)).result(1) + self.expect(sleep(1)) + + self.expect(self.function()).throw(TransactionRollbackError()) + self.expect(self.transaction.abort()) + self.expect(uniform(1, 2 ** 2)).result(2) + self.expect(sleep(2)) + + self.expect(self.function()).throw(TransactionRollbackError()) + self.expect(self.transaction.abort()) + self.mocker.replay() + + deferred = self.transactor.run(self.function) + self.assertFailure(deferred, TransactionRollbackError) + return deferred + + def test_run_with_disconnection_error_retries(self): + """ + If the given function raises a L{DisconnectionError}, then the + function will be retried another two times before letting the exception + bubble up. + """ + zstorm = self.mocker.mock() + gu = self.mocker.replace(getUtility) + sleep = self.mocker.replace(time.sleep) + uniform = self.mocker.replace(random).uniform + self.mocker.order() + + self.expect(self.function()).throw(DisconnectionError()) + self.expect(gu(IZStorm)).result(zstorm) + self.expect(zstorm.iterstores()).result(iter(())) + self.expect(self.transaction.abort()) + self.expect(uniform(1, 2 ** 1)).result(1) + self.expect(sleep(1)) + + self.expect(self.function()).throw(DisconnectionError()) + self.expect(gu(IZStorm)).result(zstorm) + self.expect(zstorm.iterstores()).result(iter(())) + self.expect(self.transaction.abort()) + self.expect(uniform(1, 2 ** 2)).result(2) + self.expect(sleep(2)) + + self.expect(self.function()).throw(DisconnectionError()) + self.expect(gu(IZStorm)).result(zstorm) + self.expect(zstorm.iterstores()).result(iter(())) + self.expect(self.transaction.abort()) + self.mocker.replay() + + deferred = self.transactor.run(self.function) + self.assertFailure(deferred, DisconnectionError) + return deferred + + def test_run_with_integrity_error_on_commit_retries(self): + """ + If the given function raises a L{IntegrityError}, then the function + will be retried another two times before letting the exception bubble + up. + """ + sleep = self.mocker.replace(time.sleep) + uniform = self.mocker.replace(random).uniform + self.mocker.order() + + self.expect(self.function()) + self.expect(self.transaction.commit()).throw(IntegrityError()) + self.expect(self.transaction.abort()) + self.expect(uniform(1, 2 ** 1)).result(1) + self.expect(sleep(1)) + + self.expect(self.function()) + self.expect(self.transaction.commit()).throw(IntegrityError()) + self.expect(self.transaction.abort()) + self.expect(uniform(1, 2 ** 2)).result(2) + self.expect(sleep(2)) + + self.expect(self.function()) + self.expect(self.transaction.commit()).throw(IntegrityError()) + self.expect(self.transaction.abort()) + self.mocker.replay() + + deferred = self.transactor.run(self.function) + self.assertFailure(deferred, IntegrityError) + return deferred diff -Nru storm-0.18/tests/variables.py storm-0.19/tests/variables.py --- storm-0.18/tests/variables.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/variables.py 2011-09-25 18:45:14.000000000 +0000 @@ -28,6 +28,7 @@ except ImportError: uuid = None +from storm.compat import json from storm.exceptions import NoneError from storm.variables import * from storm.event import EventSystem @@ -554,6 +555,7 @@ date_str = "1977-05-04" date_uni = unicode(date_str) date_obj = date(1977, 5, 4) + datetime_obj = datetime(1977, 5, 4, 0, 0, 0) variable = DateVariable() @@ -563,6 +565,8 @@ self.assertEquals(variable.get(), date_obj) variable.set(date_obj, from_db=True) self.assertEquals(variable.get(), date_obj) + variable.set(datetime_obj, from_db=True) + self.assertEquals(variable.get(), date_obj) self.assertRaises(TypeError, variable.set, 0, from_db=True) self.assertRaises(TypeError, variable.set, marker, from_db=True) @@ -822,13 +826,16 @@ self.assertEquals(variable.get(), value) -class PickleVariableTest(TestHelper): +class EncodedValueVariableTestMixin(object): + + encoding = None + variable_type = None def test_get_set(self): d = {"a": 1} - d_dump = pickle.dumps(d, -1) + d_dump = self.encode(d) - variable = PickleVariable() + variable = self.variable_type() variable.set(d) self.assertEquals(variable.get(), d) @@ -850,7 +857,7 @@ def test_pickle_events(self): event = EventSystem(marker) - variable = PickleVariable(event=event, value_factory=list) + variable = self.variable_type(event=event, value_factory=list) changes = [] def changed(owner, variable, old_value, new_value, fromdb): @@ -884,6 +891,34 @@ self.assertEquals(changes, [(variable, None, ["a"], False)]) +class PickleVariableTest(EncodedValueVariableTestMixin, TestHelper): + + encode = staticmethod(lambda data: pickle.dumps(data, -1)) + variable_type = PickleVariable + + +class JSONVariableTest(EncodedValueVariableTestMixin, TestHelper): + + encode = staticmethod(lambda data: json.dumps(data).decode("utf-8")) + variable_type = JSONVariable + + def is_supported(self): + return json is not None + + def test_unicode_from_db_required(self): + # JSONVariable._loads() complains loudly if it does not receive a + # unicode string because it has no way of knowing its encoding. + variable = self.variable_type() + self.assertRaises(TypeError, variable.set, '"abc"', from_db=True) + + def test_unicode_to_db(self): + # JSONVariable._dumps() works around unicode/str handling issues in + # simplejson/json. + variable = self.variable_type() + variable.set({u"a": 1}) + self.assertTrue(isinstance(variable.get(to_db=True), unicode)) + + class ListVariableTest(TestHelper): def test_get_set(self): diff -Nru storm-0.18/tests/wsgi.py storm-0.19/tests/wsgi.py --- storm-0.18/tests/wsgi.py 1970-01-01 00:00:00.000000000 +0000 +++ storm-0.19/tests/wsgi.py 2011-09-25 18:45:14.000000000 +0000 @@ -0,0 +1,117 @@ +import Queue +from unittest import TestCase +import threading +import time + +from storm.wsgi import make_app + +class TestMakeApp(TestCase): + + def stub_app(self, environ, start_response): + if getattr(self, 'in_request', None): + self.in_request() + getattr(self, 'calls', []).append('stub_app') + start_response('200 OK', []) + yield '' + if getattr(self, 'in_generator', None): + self.in_generator() + + def stub_start_response(self, status, headers): + pass + + def test_find_timeline_outside_request(self): + app, find_timeline = make_app(self.stub_app) + # outside a request, find_timeline returns nothing: + self.assertEqual(None, find_timeline()) + + def test_find_timeline_in_request_not_set(self): + # In a request, with no timeline object in the environ, find_timeline + # returns None: + app, find_timeline = make_app(self.stub_app) + self.in_request = lambda:self.assertEqual(None, find_timeline()) + self.calls = [] + list(app({}, self.stub_start_response)) + # And we definitely got into the call: + self.assertEqual(['stub_app'], self.calls) + + def test_find_timeline_set_in_environ(self): + # If a timeline object is known, find_timeline finds it: + app, find_timeline = make_app(self.stub_app) + timeline = object() + self.in_request = lambda:self.assertEqual(timeline, find_timeline()) + list(app({'timeline.timeline': timeline}, self.stub_start_response)) + # Having left the request, no timeline is known: + self.assertEqual(None, find_timeline()) + + def test_find_timeline_set_in_environ_during_generator(self): + # If a timeline object is known, find_timeline finds it: + app, find_timeline = make_app(self.stub_app) + timeline = object() + self.in_generator = lambda:self.assertEqual(timeline, find_timeline()) + list(app({'timeline.timeline': timeline}, self.stub_start_response)) + # Having left the request, no timeline is known: + self.assertEqual(None, find_timeline()) + + def raiser(self): + raise ValueError('foo') + + def test_find_timeline_exception_in_app_still_gets_cleared(self): + self.in_request = self.raiser + app, find_timeline = make_app(self.stub_app) + timeline = object() + self.assertRaises( + ValueError, lambda: list(app({'timeline.timeline': timeline}, + self.stub_start_response))) + self.assertEqual(None, find_timeline()) + + def test_find_timeline_exception_in_generator_still_gets_cleared(self): + self.in_generator = self.raiser + app, find_timeline = make_app(self.stub_app) + timeline = object() + self.assertRaises( + ValueError, lambda: list(app({'timeline.timeline': timeline}, + self.stub_start_response))) + self.assertEqual(None, find_timeline()) + + def test_lookups_are_threaded(self): + # with two threads in a request at once, each only sees their own + # timeline. + app, find_timeline = make_app(self.stub_app) + errors = Queue.Queue() + sync = threading.Condition() + waiting = [] + def check_timeline(): + timeline = object() + def start_response(status, headers): + # Block on the condition, so all test threads are in + # start_response when the test resumes. + sync.acquire() + waiting.append('x') + sync.wait() + sync.release() + found_timeline = find_timeline() + if found_timeline != timeline: + errors.put((found_timeline, timeline)) + list(app({'timeline.timeline': timeline}, start_response)) + t1 = threading.Thread(target=check_timeline) + t2 = threading.Thread(target=check_timeline) + t1.start() + try: + t2.start() + try: + while True: + sync.acquire() + if len(waiting) == 2: + break + sync.release() + time.sleep(0) + sync.notify() + sync.notify() + sync.release() + finally: + t2.join() + finally: + t1.join() + if errors.qsize(): + found_timeline, timeline = errors.get(False) + self.assertEqual(timeline, found_timeline) diff -Nru storm-0.18/tests/zope/__init__.py storm-0.19/tests/zope/__init__.py --- storm-0.18/tests/zope/__init__.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/zope/__init__.py 2011-09-25 18:45:14.000000000 +0000 @@ -20,8 +20,8 @@ __all__ = [ 'has_transaction', - 'has_zope', 'has_zope_component', + 'has_zope_security', 'has_testresources', ] @@ -40,10 +40,15 @@ has_zope_component = True try: + import zope.security +except ImportError: + has_zope_security = False +else: + has_zope_security = True + +try: import testresources except ImportError: has_testresources = False else: has_testresources = True - -has_zope = has_transaction and has_zope_component diff -Nru storm-0.18/tests/zope/testing.py storm-0.19/tests/zope/testing.py --- storm-0.18/tests/zope/testing.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/zope/testing.py 2011-09-25 18:45:14.000000000 +0000 @@ -22,12 +22,15 @@ import sys from tests.helper import TestHelper -from tests.zope import has_zope, has_testresources +from tests.zope import has_transaction, has_zope_component, has_testresources from storm.locals import create_database, Store, Unicode, Int from storm.exceptions import IntegrityError -if has_zope and has_testresources: +if has_transaction and has_zope_component and has_testresources: + from zope.component import provideUtility, getUtility + from storm.zope.zstorm import ZStorm + from storm.zope.interfaces import IZStorm from storm.zope.schema import ZSchema from storm.zope.testing import ZStormResourceManager @@ -41,7 +44,7 @@ class ZStormResourceManagerTest(TestHelper): def is_supported(self): - return has_zope and has_testresources + return has_transaction and has_zope_component and has_testresources def setUp(self): super(ZStormResourceManagerTest, self).setUp() @@ -56,9 +59,10 @@ create = ["CREATE TABLE test (foo TEXT UNIQUE, bar INT)"] drop = ["DROP TABLE test"] delete = ["DELETE FROM test"] - schema = ZSchema(create, drop, delete, patch_package) uri = "sqlite:///%s" % self.makeFile() - self.resource = ZStormResourceManager({"test": (uri, schema)}) + schema = ZSchema(create, drop, delete, patch_package) + self.databases = [{"name": "test", "uri": uri, "schema": schema}] + self.resource = ZStormResourceManager(self.databases) self.store = Store(create_database(uri)) def tearDown(self): @@ -103,12 +107,21 @@ store = zstorm.get("test") self.assertEqual([], list(store.execute("SELECT foo FROM test"))) + def test_make_zstorm_overwritten(self): + """ + L{ZStormResourceManager.make} registers its own ZStorm again if a test + has registered a new ZStorm utility overwriting the resource one. + """ + zstorm = self.resource.make([]) + provideUtility(ZStorm()) + self.resource.make([]) + self.assertIs(zstorm, getUtility(IZStorm)) + def test_clean_flush(self): """ L{ZStormResourceManager.clean} tries to flush the stores to make sure that they are all in a consistent state. """ - class Test(object): __storm_table__ = "test" foo = Unicode() @@ -135,3 +148,76 @@ store.commit() self.resource.clean(zstorm) self.assertEqual([], list(self.store.execute("SELECT * FROM test"))) + + def test_clean_with_force_delete(self): + """ + If L{ZStormResourceManager.force_delete} is C{True}, L{Schema.delete} + is always invoked upon test cleanup. + """ + zstorm = self.resource.make([]) + self.store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)") + self.store.commit() + self.resource.force_delete = True + self.resource.clean(zstorm) + self.assertEqual([], list(self.store.execute("SELECT * FROM test"))) + + def test_wb_clean_clears_alive_cache_before_abort(self): + """ + L{ZStormResourceManager.clean} clears the alive cache before + aborting the transaction. + """ + class Test(object): + __storm_table__ = "test" + bar = Int(primary=True) + + def __init__(self, bar): + self.bar = bar + + zstorm = self.resource.make([]) + store = zstorm.get("test") + store.add(Test(1)) + store.add(Test(2)) + real_invalidate = store.invalidate + + def invalidate_proxy(): + self.assertEqual(0, len(store._alive.values())) + real_invalidate() + store.invalidate = invalidate_proxy + + self.resource.clean(zstorm) + + def test_schema_uri(self): + """ + It's possible to specify an alternate URI for applying the schema + and cleaning up tables after a test. + """ + schema_uri = "sqlite:///%s" % self.makeFile() + self.databases[0]["schema-uri"] = schema_uri + zstorm = self.resource.make([]) + store = zstorm.get("test") + schema_store = Store(create_database(schema_uri)) + + # The schema was applied using the alternate schema URI + statement = "SELECT name FROM sqlite_master WHERE name='patch'" + self.assertEqual([], list(store.execute(statement))) + self.assertEqual([("patch",)], list(schema_store.execute(statement))) + + # The cleanup is performed with the alternate schema URI + store.commit() + schema_store.execute("INSERT INTO test (foo) VALUES ('data')") + schema_store.commit() + self.resource.clean(zstorm) + self.assertEqual([], list(schema_store.execute("SELECT * FROM test"))) + + def test_deprecated_database_format(self): + """ + The old deprecated format of the 'database' constructor parameter is + still supported. + """ + import patch_package + uri = "sqlite:///%s" % self.makeFile() + schema = ZSchema([], [], [], patch_package) + resource = ZStormResourceManager({"test": (uri, schema)}) + zstorm = resource.make([]) + store = zstorm.get("test") + self.assertIsNot(None, store) diff -Nru storm-0.18/tests/zope/zstorm.py storm-0.19/tests/zope/zstorm.py --- storm-0.18/tests/zope/zstorm.py 2010-10-22 14:28:52.000000000 +0000 +++ storm-0.19/tests/zope/zstorm.py 2011-09-25 18:45:14.000000000 +0000 @@ -266,7 +266,7 @@ class ZStormUtilityTest(TestHelper): def is_supported(self): - return has_zope_component + return has_transaction and has_zope_component def test_utility(self): provideUtility(ZStorm())