diff --git a/libs/sqlalchemy/__init__.py b/libs/sqlalchemy/__init__.py index 03293b5d..9a21a70f 100644 --- a/libs/sqlalchemy/__init__.py +++ b/libs/sqlalchemy/__init__.py @@ -1,5 +1,5 @@ # sqlalchemy/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -50,6 +50,8 @@ from sqlalchemy.sql import ( ) from sqlalchemy.types import ( + BIGINT, + BINARY, BLOB, BOOLEAN, BigInteger, @@ -87,6 +89,7 @@ from sqlalchemy.types import ( TypeDecorator, Unicode, UnicodeText, + VARBINARY, VARCHAR, ) @@ -117,7 +120,7 @@ from sqlalchemy.engine import create_engine, engine_from_config __all__ = sorted(name for name, obj in locals().items() if not (name.startswith('_') or inspect.ismodule(obj))) -__version__ = '0.7.6' +__version__ = '0.7.10' del inspect, sys diff --git a/libs/sqlalchemy/cextension/resultproxy.c b/libs/sqlalchemy/cextension/resultproxy.c index 3494ccae..ca9d28e6 100644 --- a/libs/sqlalchemy/cextension/resultproxy.c +++ b/libs/sqlalchemy/cextension/resultproxy.c @@ -13,8 +13,8 @@ typedef int Py_ssize_t; #define PY_SSIZE_T_MAX INT_MAX #define PY_SSIZE_T_MIN INT_MIN typedef Py_ssize_t (*lenfunc)(PyObject *); -#define PyInt_FromSsize_t(x) PyInt_FromLong(x) -typedef intargfunc ssizeargfunc; +#define PyInt_FromSsize_t(x) PyInt_FromLong(x) +typedef intargfunc ssizeargfunc; #endif @@ -241,12 +241,13 @@ static PyObject * BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key) { PyObject *processors, *values; - PyObject *processor, *value; + PyObject *processor, *value, *processed_value; PyObject *row, *record, *result, *indexobject; - PyObject *exc_module, *exception; + PyObject *exc_module, *exception, *cstr_obj; char *cstr_key; long index; int key_fallback = 0; + int tuple_check = 0; if (PyInt_CheckExact(key)) { index = PyInt_AS_LONG(key); @@ -299,9 +300,16 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key) if (exception == NULL) return NULL; - cstr_key = PyString_AsString(key); - if (cstr_key == NULL) + // wow. this seems quite excessive. + cstr_obj = PyObject_Str(key); + if (cstr_obj == NULL) return NULL; + cstr_key = PyString_AsString(cstr_obj); + if (cstr_key == NULL) { + Py_DECREF(cstr_obj); + return NULL; + } + Py_DECREF(cstr_obj); PyErr_Format(exception, "Ambiguous column name '%.200s' in result set! " @@ -319,17 +327,28 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key) return NULL; row = self->row; - if (PyTuple_CheckExact(row)) + if (PyTuple_CheckExact(row)) { value = PyTuple_GetItem(row, index); - else + tuple_check = 1; + } + else { value = PySequence_GetItem(row, index); + tuple_check = 0; + } + if (value == NULL) return NULL; if (processor != Py_None) { - return PyObject_CallFunctionObjArgs(processor, value, NULL); + processed_value = PyObject_CallFunctionObjArgs(processor, value, NULL); + if (!tuple_check) { + Py_DECREF(value); + } + return processed_value; } else { - Py_INCREF(value); + if (tuple_check) { + Py_INCREF(value); + } return value; } } @@ -356,7 +375,7 @@ BaseRowProxy_getattro(BaseRowProxy *self, PyObject *name) tmp = BaseRowProxy_subscript(self, name); if (tmp == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) { PyErr_Format( - PyExc_AttributeError, + PyExc_AttributeError, "Could not locate column in row for column '%.200s'", PyString_AsString(name) ); diff --git a/libs/sqlalchemy/connectors/__init__.py b/libs/sqlalchemy/connectors/__init__.py index 5a0e2eb2..a4e017c4 100644 --- a/libs/sqlalchemy/connectors/__init__.py +++ b/libs/sqlalchemy/connectors/__init__.py @@ -1,5 +1,5 @@ # connectors/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/connectors/mxodbc.py b/libs/sqlalchemy/connectors/mxodbc.py index e46253b4..2848f200 100644 --- a/libs/sqlalchemy/connectors/mxodbc.py +++ b/libs/sqlalchemy/connectors/mxodbc.py @@ -1,5 +1,5 @@ # connectors/mxodbc.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -117,7 +117,7 @@ class MxODBCConnector(Connector): return False def _get_server_version_info(self, connection): - # eGenix suggests using conn.dbms_version instead + # eGenix suggests using conn.dbms_version instead # of what we're doing here dbapi_con = connection.connection version = [] diff --git a/libs/sqlalchemy/connectors/mysqldb.py b/libs/sqlalchemy/connectors/mysqldb.py index b5a9f05a..be1f3530 100644 --- a/libs/sqlalchemy/connectors/mysqldb.py +++ b/libs/sqlalchemy/connectors/mysqldb.py @@ -63,6 +63,7 @@ class MySQLDBConnector(Connector): util.coerce_kw_type(opts, 'compress', bool) util.coerce_kw_type(opts, 'connect_timeout', int) + util.coerce_kw_type(opts, 'read_timeout', int) util.coerce_kw_type(opts, 'client_flag', int) util.coerce_kw_type(opts, 'local_infile', int) # Note: using either of the below will cause all strings to be returned diff --git a/libs/sqlalchemy/connectors/pyodbc.py b/libs/sqlalchemy/connectors/pyodbc.py index 439b8f4f..5be65d2d 100644 --- a/libs/sqlalchemy/connectors/pyodbc.py +++ b/libs/sqlalchemy/connectors/pyodbc.py @@ -1,5 +1,5 @@ # connectors/pyodbc.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -37,6 +37,10 @@ class PyODBCConnector(Connector): # if the libessqlsrv.so is detected easysoft = False + def __init__(self, supports_unicode_binds=None, **kw): + super(PyODBCConnector, self).__init__(**kw) + self._user_supports_unicode_binds = supports_unicode_binds + @classmethod def dbapi(cls): return __import__('pyodbc') @@ -66,7 +70,7 @@ class PyODBCConnector(Connector): if 'port' in keys and not 'port' in query: port = ',%d' % int(keys.pop('port')) - connectors = ["DRIVER={%s}" % + connectors = ["DRIVER={%s}" % keys.pop('driver', self.pyodbc_driver_name), 'Server=%s%s' % (keys.pop('host', ''), port), 'Database=%s' % keys.pop('database', '') ] @@ -79,9 +83,9 @@ class PyODBCConnector(Connector): connectors.append("Trusted_Connection=Yes") # if set to 'Yes', the ODBC layer will try to automagically - # convert textual data from your database encoding to your - # client encoding. This should obviously be set to 'No' if - # you query a cp1253 encoded database from a latin1 client... + # convert textual data from your database encoding to your + # client encoding. This should obviously be set to 'No' if + # you query a cp1253 encoded database from a latin1 client... if 'odbc_autotranslate' in keys: connectors.append("AutoTranslate=%s" % keys.pop("odbc_autotranslate")) @@ -119,8 +123,12 @@ class PyODBCConnector(Connector): # have not tried pyodbc + python3.1 yet. # Py2K self.supports_unicode_statements = not self.freetds and not self.easysoft - self.supports_unicode_binds = (not self.freetds or - self.freetds_driver_version >= '0.91') and not self.easysoft + if self._user_supports_unicode_binds is not None: + self.supports_unicode_binds = self._user_supports_unicode_binds + else: + self.supports_unicode_binds = (not self.freetds or + self.freetds_driver_version >= '0.91' + ) and not self.easysoft # end Py2K # run other initialization which asks for user name, etc. diff --git a/libs/sqlalchemy/connectors/zxJDBC.py b/libs/sqlalchemy/connectors/zxJDBC.py index 5bc25f4a..e2bfed2e 100644 --- a/libs/sqlalchemy/connectors/zxJDBC.py +++ b/libs/sqlalchemy/connectors/zxJDBC.py @@ -1,5 +1,5 @@ # connectors/zxJDBC.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -33,7 +33,7 @@ class ZxJDBCConnector(Connector): def _create_jdbc_url(self, url): """Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`""" return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host, - url.port is not None + url.port is not None and ':%s' % url.port or '', url.database) @@ -41,8 +41,8 @@ class ZxJDBCConnector(Connector): opts = self._driver_kwargs() opts.update(url.query) return [ - [self._create_jdbc_url(url), - url.username, url.password, + [self._create_jdbc_url(url), + url.username, url.password, self.jdbc_driver_name], opts] diff --git a/libs/sqlalchemy/databases/__init__.py b/libs/sqlalchemy/databases/__init__.py index fe638837..bb0b370e 100644 --- a/libs/sqlalchemy/databases/__init__.py +++ b/libs/sqlalchemy/databases/__init__.py @@ -1,5 +1,5 @@ # databases/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/dialects/__init__.py b/libs/sqlalchemy/dialects/__init__.py index 2d483241..a427cde4 100644 --- a/libs/sqlalchemy/dialects/__init__.py +++ b/libs/sqlalchemy/dialects/__init__.py @@ -1,5 +1,5 @@ # dialects/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/dialects/access/base.py b/libs/sqlalchemy/dialects/access/base.py index 7c62dcc3..f107c9c8 100644 --- a/libs/sqlalchemy/dialects/access/base.py +++ b/libs/sqlalchemy/dialects/access/base.py @@ -9,9 +9,10 @@ """ Support for the Microsoft Access database. -This dialect is *not* ported to SQLAlchemy 0.6 or 0.7. +.. note:: -This dialect is *not* tested on SQLAlchemy 0.6 or 0.7. + The Access dialect is **non-functional as of SQLAlchemy 0.6**, + pending development efforts to bring it up-to-date. """ @@ -124,7 +125,7 @@ class AccessExecutionContext(default.DefaultExecutionContext): # self._last_inserted_ids[0] is None: self.cursor.execute("SELECT @@identity AS lastrowid") row = self.cursor.fetchone() - self._last_inserted_ids = [int(row[0])] + self._last_inserted_ids = [int(row[0])] #+ self._last_inserted_ids[1:] # print "LAST ROW ID", self._last_inserted_ids @@ -259,7 +260,7 @@ class AccessDialect(default.DefaultDialect): colargs = \ { - 'nullable': not(col.Required or + 'nullable': not(col.Required or col.Attributes & const.dbAutoIncrField), } default = col.DefaultValue @@ -286,7 +287,7 @@ class AccessDialect(default.DefaultDialect): if isinstance(thecol.type, AcInteger) and \ not (thecol.default and isinstance( - thecol.default.arg, + thecol.default.arg, schema.Sequence )): thecol.autoincrement = False @@ -321,7 +322,7 @@ class AccessDialect(default.DefaultDialect): # This is necessary, so we get the latest updates dtbs = daoEngine.OpenDatabase(connection.engine.url.database) - names = [t.Name for t in dtbs.TableDefs + names = [t.Name for t in dtbs.TableDefs if t.Name[:4] != "MSys" and t.Name[:4] != "~TMP"] dtbs.Close() return names @@ -372,7 +373,7 @@ class AccessCompiler(compiler.SQLCompiler): 'length': 'len', } def visit_function(self, func): - """Access function names differ from the ANSI SQL names; + """Access function names differ from the ANSI SQL names; rewrite common ones""" func.name = self.function_rewrites.get(func.name, func.name) return super(AccessCompiler, self).visit_function(func) diff --git a/libs/sqlalchemy/dialects/drizzle/__init__.py b/libs/sqlalchemy/dialects/drizzle/__init__.py index bbd716f5..1392b8e2 100644 --- a/libs/sqlalchemy/dialects/drizzle/__init__.py +++ b/libs/sqlalchemy/dialects/drizzle/__init__.py @@ -1,18 +1,22 @@ from sqlalchemy.dialects.drizzle import base, mysqldb -# default dialect base.dialect = mysqldb.dialect from sqlalchemy.dialects.drizzle.base import \ - BIGINT, BINARY, BLOB, BOOLEAN, CHAR, DATE, DATETIME, \ - DECIMAL, DOUBLE, ENUM, \ - FLOAT, INTEGER, \ - NUMERIC, REAL, TEXT, TIME, TIMESTAMP, \ - VARBINARY, VARCHAR, dialect - + BIGINT, BINARY, BLOB, \ + BOOLEAN, CHAR, DATE, \ + DATETIME, DECIMAL, DOUBLE, \ + ENUM, FLOAT, INTEGER, \ + NUMERIC, REAL, TEXT, \ + TIME, TIMESTAMP, VARBINARY, \ + VARCHAR, dialect + __all__ = ( -'BIGINT', 'BINARY', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL', 'DOUBLE', -'ENUM', 'FLOAT', 'INTEGER', -'NUMERIC', 'SET', 'REAL', 'TEXT', 'TIME', 'TIMESTAMP', -'VARBINARY', 'VARCHAR', 'dialect' + 'BIGINT', 'BINARY', 'BLOB', + 'BOOLEAN', 'CHAR', 'DATE', + 'DATETIME', 'DECIMAL', 'DOUBLE', + 'ENUM', 'FLOAT', 'INTEGER', + 'NUMERIC', 'REAL', 'TEXT', + 'TIME', 'TIMESTAMP', 'VARBINARY', + 'VARCHAR', 'dialect' ) diff --git a/libs/sqlalchemy/dialects/drizzle/base.py b/libs/sqlalchemy/dialects/drizzle/base.py index 62967174..0165a2aa 100644 --- a/libs/sqlalchemy/dialects/drizzle/base.py +++ b/libs/sqlalchemy/dialects/drizzle/base.py @@ -1,138 +1,36 @@ # drizzle/base.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # Copyright (C) 2010-2011 Monty Taylor # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php + """Support for the Drizzle database. -Supported Versions and Features -------------------------------- +Drizzle is a variant of MySQL. Unlike MySQL, Drizzle's default storage engine +is InnoDB (transactions, foreign-keys) rather than MyISAM. For more +`Notable Differences `_, visit +the `Drizzle Documentation `_. -SQLAlchemy supports the Drizzle database starting with 2010.08. -with capabilities increasing with more modern servers. - -Most available DBAPI drivers are supported; see below. - -===================================== =============== -Feature Minimum Version -===================================== =============== -sqlalchemy.orm 2010.08 -Table Reflection 2010.08 -DDL Generation 2010.08 -utf8/Full Unicode Connections 2010.08 -Transactions 2010.08 -Two-Phase Transactions 2010.08 -Nested Transactions 2010.08 -===================================== =============== - -See the official Drizzle documentation for detailed information about features -supported in any given server release. +The SQLAlchemy Drizzle dialect leans heavily on the MySQL dialect, so much of +the :doc:`SQLAlchemy MySQL ` documentation is also relevant. Connecting ---------- -See the API documentation on individual drivers for details on connecting. - -Connection Timeouts -------------------- - -Drizzle features an automatic connection close behavior, for connections that -have been idle for eight hours or more. To circumvent having this issue, use -the ``pool_recycle`` option which controls the maximum age of any connection:: - - engine = create_engine('drizzle+mysqldb://...', pool_recycle=3600) - -Storage Engines ---------------- - -Drizzle defaults to the ``InnoDB`` storage engine, which is transactional. - -Storage engines can be elected when creating tables in SQLAlchemy by supplying -a ``drizzle_engine='whatever'`` to the ``Table`` constructor. Any Drizzle table -creation option can be specified in this syntax:: - - Table('mytable', metadata, - Column('data', String(32)), - drizzle_engine='InnoDB', - ) - -Keys ----- - -Not all Drizzle storage engines support foreign keys. For ``BlitzDB`` and -similar engines, the information loaded by table reflection will not include -foreign keys. For these tables, you may supply a -:class:`~sqlalchemy.ForeignKeyConstraint` at reflection time:: - - Table('mytable', metadata, - ForeignKeyConstraint(['other_id'], ['othertable.other_id']), - autoload=True - ) - -When creating tables, SQLAlchemy will automatically set ``AUTO_INCREMENT`` on -an integer primary key column:: - - >>> t = Table('mytable', metadata, - ... Column('mytable_id', Integer, primary_key=True) - ... ) - >>> t.create() - CREATE TABLE mytable ( - id INTEGER NOT NULL AUTO_INCREMENT, - PRIMARY KEY (id) - ) - -You can disable this behavior by supplying ``autoincrement=False`` to the -:class:`~sqlalchemy.Column`. This flag can also be used to enable -auto-increment on a secondary column in a multi-column key for some storage -engines:: - - Table('mytable', metadata, - Column('gid', Integer, primary_key=True, autoincrement=False), - Column('id', Integer, primary_key=True) - ) - -Drizzle SQL Extensions ----------------------- - -Many of the Drizzle SQL extensions are handled through SQLAlchemy's generic -function and operator support:: - - table.select(table.c.password==func.md5('plaintext')) - table.select(table.c.username.op('regexp')('^[a-d]')) - -And of course any valid Drizzle statement can be executed as a string as well. - -Some limited direct support for Drizzle extensions to SQL is currently -available. - -* SELECT pragma:: - - select(..., prefixes=['HIGH_PRIORITY', 'SQL_SMALL_RESULT']) - -* UPDATE with LIMIT:: - - update(..., drizzle_limit=10) +See the individual driver sections below for details on connecting. """ -import datetime, inspect, re, sys - -from sqlalchemy import schema as sa_schema -from sqlalchemy import exc, log, sql, util -from sqlalchemy.sql import operators as sql_operators -from sqlalchemy.sql import functions as sql_functions -from sqlalchemy.sql import compiler -from array import array as _array - -from sqlalchemy.engine import reflection -from sqlalchemy.engine import base as engine_base, default +from sqlalchemy import exc +from sqlalchemy import log from sqlalchemy import types as sqltypes +from sqlalchemy.engine import reflection from sqlalchemy.dialects.mysql import base as mysql_dialect - from sqlalchemy.types import DATE, DATETIME, BOOLEAN, TIME, \ - BLOB, BINARY, VARBINARY + BLOB, BINARY, VARBINARY + class _NumericType(object): """Base for Drizzle numeric types.""" @@ -140,6 +38,7 @@ class _NumericType(object): def __init__(self, **kw): super(_NumericType, self).__init__(**kw) + class _FloatType(_NumericType, sqltypes.Float): def __init__(self, precision=None, scale=None, asdecimal=True, **kw): if isinstance(self, (REAL, DOUBLE)) and \ @@ -147,23 +46,22 @@ class _FloatType(_NumericType, sqltypes.Float): (precision is None and scale is not None) or (precision is not None and scale is None) ): - raise exc.ArgumentError( - "You must specify both precision and scale or omit " - "both altogether.") + raise exc.ArgumentError( + "You must specify both precision and scale or omit " + "both altogether.") - super(_FloatType, self).__init__(precision=precision, asdecimal=asdecimal, **kw) + super(_FloatType, self).__init__(precision=precision, + asdecimal=asdecimal, **kw) self.scale = scale + class _StringType(mysql_dialect._StringType): """Base for Drizzle string types.""" - def __init__(self, collation=None, - binary=False, - **kw): + def __init__(self, collation=None, binary=False, **kw): kw['national'] = False - super(_StringType, self).__init__(collation=collation, - binary=binary, - **kw) + super(_StringType, self).__init__(collation=collation, binary=binary, + **kw) class NUMERIC(_NumericType, sqltypes.NUMERIC): @@ -180,7 +78,9 @@ class NUMERIC(_NumericType, sqltypes.NUMERIC): :param scale: The number of digits after the decimal point. """ - super(NUMERIC, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal, **kw) + + super(NUMERIC, self).__init__(precision=precision, scale=scale, + asdecimal=asdecimal, **kw) class DECIMAL(_NumericType, sqltypes.DECIMAL): @@ -215,9 +115,11 @@ class DOUBLE(_FloatType): :param scale: The number of digits after the decimal point. """ + super(DOUBLE, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal, **kw) + class REAL(_FloatType, sqltypes.REAL): """Drizzle REAL type.""" @@ -232,9 +134,11 @@ class REAL(_FloatType, sqltypes.REAL): :param scale: The number of digits after the decimal point. """ + super(REAL, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal, **kw) + class FLOAT(_FloatType, sqltypes.FLOAT): """Drizzle FLOAT type.""" @@ -249,42 +153,46 @@ class FLOAT(_FloatType, sqltypes.FLOAT): :param scale: The number of digits after the decimal point. """ + super(FLOAT, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal, **kw) def bind_processor(self, dialect): return None + class INTEGER(sqltypes.INTEGER): """Drizzle INTEGER type.""" __visit_name__ = 'INTEGER' def __init__(self, **kw): - """Construct an INTEGER. + """Construct an INTEGER.""" - """ super(INTEGER, self).__init__(**kw) + class BIGINT(sqltypes.BIGINT): """Drizzle BIGINTEGER type.""" __visit_name__ = 'BIGINT' def __init__(self, **kw): - """Construct a BIGINTEGER. + """Construct a BIGINTEGER.""" - """ super(BIGINT, self).__init__(**kw) class _DrizzleTime(mysql_dialect._MSTime): """Drizzle TIME type.""" + class TIMESTAMP(sqltypes.TIMESTAMP): """Drizzle TIMESTAMP type.""" + __visit_name__ = 'TIMESTAMP' + class TEXT(_StringType, sqltypes.TEXT): """Drizzle TEXT type, for text up to 2^16 characters.""" @@ -306,8 +214,10 @@ class TEXT(_StringType, sqltypes.TEXT): only the collation of character data. """ + super(TEXT, self).__init__(length=length, **kw) + class VARCHAR(_StringType, sqltypes.VARCHAR): """Drizzle VARCHAR type, for variable-length character data.""" @@ -325,8 +235,10 @@ class VARCHAR(_StringType, sqltypes.VARCHAR): only the collation of character data. """ + super(VARCHAR, self).__init__(length=length, **kwargs) + class CHAR(_StringType, sqltypes.CHAR): """Drizzle CHAR type, for fixed-length character data.""" @@ -345,8 +257,10 @@ class CHAR(_StringType, sqltypes.CHAR): compatible with the national character set. """ + super(CHAR, self).__init__(length=length, **kwargs) + class ENUM(mysql_dialect.ENUM): """Drizzle ENUM type.""" @@ -363,8 +277,9 @@ class ENUM(mysql_dialect.ENUM): :param strict: Defaults to False: ensure that a given value is in this ENUM's range of permissible values when inserting or updating rows. - Note that Drizzle will not raise a fatal error if you attempt to store - an out of range value- an alternate value will be stored instead. + Note that Drizzle will not raise a fatal error if you attempt to + store an out of range value- an alternate value will be stored + instead. (See Drizzle ENUM documentation.) :param collation: Optional, a column-level collation for this string @@ -390,12 +305,15 @@ class ENUM(mysql_dialect.ENUM): literals for you. This is a transitional option. """ + super(ENUM, self).__init__(*enums, **kw) + class _DrizzleBoolean(sqltypes.Boolean): def get_dbapi_type(self, dbapi): return dbapi.NUMERIC + colspecs = { sqltypes.Numeric: NUMERIC, sqltypes.Float: FLOAT, @@ -404,6 +322,7 @@ colspecs = { sqltypes.Boolean: _DrizzleBoolean, } + # All the types we have in Drizzle ischema_names = { 'BIGINT': BIGINT, @@ -427,6 +346,7 @@ ischema_names = { 'VARCHAR': VARCHAR, } + class DrizzleCompiler(mysql_dialect.MySQLCompiler): def visit_typeclause(self, typeclause): @@ -439,7 +359,7 @@ class DrizzleCompiler(mysql_dialect.MySQLCompiler): def visit_cast(self, cast, **kwargs): type_ = self.process(cast.typeclause) if type_ is None: - return self.process(cast.clause) + return self.process(cast.clause) return 'CAST(%s AS %s)' % (self.process(cast.clause), type_) @@ -447,12 +367,13 @@ class DrizzleCompiler(mysql_dialect.MySQLCompiler): class DrizzleDDLCompiler(mysql_dialect.MySQLDDLCompiler): pass + class DrizzleTypeCompiler(mysql_dialect.MySQLTypeCompiler): def _extend_numeric(self, type_, spec): return spec def _extend_string(self, type_, defaults, spec): - """Extend a string-type declaration with standard SQL + """Extend a string-type declaration with standard SQL COLLATE annotations and Drizzle specific extensions. """ @@ -492,11 +413,16 @@ class DrizzleTypeCompiler(mysql_dialect.MySQLTypeCompiler): class DrizzleExecutionContext(mysql_dialect.MySQLExecutionContext): pass + class DrizzleIdentifierPreparer(mysql_dialect.MySQLIdentifierPreparer): pass + class DrizzleDialect(mysql_dialect.MySQLDialect): - """Details of the Drizzle dialect. Not used directly in application code.""" + """Details of the Drizzle dialect. + + Not used directly in application code. + """ name = 'drizzle' @@ -505,7 +431,6 @@ class DrizzleDialect(mysql_dialect.MySQLDialect): supports_native_boolean = True supports_views = False - default_paramstyle = 'format' colspecs = colspecs @@ -516,8 +441,8 @@ class DrizzleDialect(mysql_dialect.MySQLDialect): preparer = DrizzleIdentifierPreparer def on_connect(self): - """Force autocommit - Drizzle Bug#707842 doesn't set this - properly""" + """Force autocommit - Drizzle Bug#707842 doesn't set this properly""" + def connect(conn): conn.autocommit(False) return connect @@ -535,6 +460,7 @@ class DrizzleDialect(mysql_dialect.MySQLDialect): @reflection.cache def get_table_names(self, connection, schema=None, **kw): """Return a Unicode SHOW TABLES from a given schema.""" + if schema is not None: current_schema = schema else: @@ -554,8 +480,8 @@ class DrizzleDialect(mysql_dialect.MySQLDialect): Cached per-connection. This value can not change without a server restart. - """ + return 0 def _detect_collations(self, connection): @@ -566,7 +492,9 @@ class DrizzleDialect(mysql_dialect.MySQLDialect): collations = {} charset = self._connection_charset - rs = connection.execute('SELECT CHARACTER_SET_NAME, COLLATION_NAME from data_dictionary.COLLATIONS') + rs = connection.execute( + 'SELECT CHARACTER_SET_NAME, COLLATION_NAME FROM' + ' data_dictionary.COLLATIONS') for row in self._compat_fetchall(rs, charset): collations[row[0]] = row[1] return collations @@ -575,8 +503,7 @@ class DrizzleDialect(mysql_dialect.MySQLDialect): """Detect and adjust for the ANSI_QUOTES sql mode.""" self._server_ansiquotes = False - self._backslash_escapes = False -log.class_logger(DrizzleDialect) +log.class_logger(DrizzleDialect) diff --git a/libs/sqlalchemy/dialects/drizzle/mysqldb.py b/libs/sqlalchemy/dialects/drizzle/mysqldb.py index 01116fa9..ce9518a8 100644 --- a/libs/sqlalchemy/dialects/drizzle/mysqldb.py +++ b/libs/sqlalchemy/dialects/drizzle/mysqldb.py @@ -1,11 +1,9 @@ -"""Support for the Drizzle database via the Drizzle-python adapter. +"""Support for the Drizzle database via the mysql-python adapter. -Drizzle-Python is available at: +MySQL-Python is available at: http://sourceforge.net/projects/mysql-python -At least version 1.2.1 or 1.2.2 should be used. - Connecting ----------- @@ -13,37 +11,22 @@ Connect string format:: drizzle+mysqldb://:@[:]/ -Unicode -------- - -Drizzle accommodates Python ``unicode`` objects directly and -uses the ``utf8`` encoding in all cases. - -Known Issues -------------- - -Drizzle-python at least as of version 1.2.2 has a serious memory leak related -to unicode conversion, a feature which is disabled via ``use_unicode=0``. -The recommended connection form with SQLAlchemy is:: - - engine = create_engine('mysql://scott:tiger@localhost/test?charset=utf8&use_unicode=0', pool_recycle=3600) - - """ -from sqlalchemy.dialects.drizzle.base import (DrizzleDialect, - DrizzleExecutionContext, - DrizzleCompiler, DrizzleIdentifierPreparer) +from sqlalchemy.dialects.drizzle.base import ( + DrizzleDialect, + DrizzleExecutionContext, + DrizzleCompiler, + DrizzleIdentifierPreparer) from sqlalchemy.connectors.mysqldb import ( - MySQLDBExecutionContext, - MySQLDBCompiler, - MySQLDBIdentifierPreparer, - MySQLDBConnector - ) + MySQLDBExecutionContext, + MySQLDBCompiler, + MySQLDBIdentifierPreparer, + MySQLDBConnector) -class DrizzleExecutionContext_mysqldb( - MySQLDBExecutionContext, - DrizzleExecutionContext): + +class DrizzleExecutionContext_mysqldb(MySQLDBExecutionContext, + DrizzleExecutionContext): pass @@ -51,11 +34,11 @@ class DrizzleCompiler_mysqldb(MySQLDBCompiler, DrizzleCompiler): pass -class DrizzleIdentifierPreparer_mysqldb( - MySQLDBIdentifierPreparer, - DrizzleIdentifierPreparer): +class DrizzleIdentifierPreparer_mysqldb(MySQLDBIdentifierPreparer, + DrizzleIdentifierPreparer): pass + class DrizzleDialect_mysqldb(MySQLDBConnector, DrizzleDialect): execution_ctx_cls = DrizzleExecutionContext_mysqldb statement_compiler = DrizzleCompiler_mysqldb @@ -63,6 +46,7 @@ class DrizzleDialect_mysqldb(MySQLDBConnector, DrizzleDialect): def _detect_charset(self, connection): """Sniff out the character set in use for connection results.""" + return 'utf8' diff --git a/libs/sqlalchemy/dialects/firebird/__init__.py b/libs/sqlalchemy/dialects/firebird/__init__.py index 665e3226..2a3b756f 100644 --- a/libs/sqlalchemy/dialects/firebird/__init__.py +++ b/libs/sqlalchemy/dialects/firebird/__init__.py @@ -1,5 +1,5 @@ # firebird/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -14,7 +14,7 @@ from sqlalchemy.dialects.firebird.base import \ dialect __all__ = ( - 'SMALLINT', 'BIGINT', 'FLOAT', 'FLOAT', 'DATE', 'TIME', + 'SMALLINT', 'BIGINT', 'FLOAT', 'FLOAT', 'DATE', 'TIME', 'TEXT', 'NUMERIC', 'FLOAT', 'TIMESTAMP', 'VARCHAR', 'CHAR', 'BLOB', 'dialect' ) diff --git a/libs/sqlalchemy/dialects/firebird/base.py b/libs/sqlalchemy/dialects/firebird/base.py index 031c6891..a0bb9c20 100644 --- a/libs/sqlalchemy/dialects/firebird/base.py +++ b/libs/sqlalchemy/dialects/firebird/base.py @@ -1,5 +1,5 @@ # firebird/base.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -135,7 +135,7 @@ class VARCHAR(_StringType, sqltypes.VARCHAR): __visit_name__ = 'VARCHAR' def __init__(self, length = None, **kwargs): - super(VARCHAR, self).__init__(length=length, **kwargs) + super(VARCHAR, self).__init__(length=length, **kwargs) class CHAR(_StringType, sqltypes.CHAR): """Firebird CHAR type""" @@ -164,7 +164,7 @@ ischema_names = { } -# TODO: date conversion types (should be implemented as _FBDateTime, +# TODO: date conversion types (should be implemented as _FBDateTime, # _FBDate, etc. as bind/result functionality is required) class FBTypeCompiler(compiler.GenericTypeCompiler): @@ -198,7 +198,7 @@ class FBTypeCompiler(compiler.GenericTypeCompiler): class FBCompiler(sql.compiler.SQLCompiler): - """Firebird specific idiosincrasies""" + """Firebird specific idiosyncrasies""" def visit_mod(self, binary, **kw): # Firebird lacks a builtin modulo operator, but there is @@ -293,7 +293,7 @@ class FBCompiler(sql.compiler.SQLCompiler): class FBDDLCompiler(sql.compiler.DDLCompiler): - """Firebird syntactic idiosincrasies""" + """Firebird syntactic idiosyncrasies""" def visit_create_sequence(self, create): """Generate a ``CREATE GENERATOR`` statement for the sequence.""" @@ -339,7 +339,7 @@ class FBExecutionContext(default.DefaultExecutionContext): """Get the next value from the sequence using ``gen_id()``.""" return self._execute_scalar( - "SELECT gen_id(%s, 1) FROM rdb$database" % + "SELECT gen_id(%s, 1) FROM rdb$database" % self.dialect.identifier_preparer.format_sequence(seq), type_ ) @@ -418,7 +418,7 @@ class FBDialect(default.DefaultDialect): return name def has_table(self, connection, table_name, schema=None): - """Return ``True`` if the given table exists, ignoring + """Return ``True`` if the given table exists, ignoring the `schema`.""" tblqry = """ @@ -489,8 +489,8 @@ class FBDialect(default.DefaultDialect): return pkfields @reflection.cache - def get_column_sequence(self, connection, - table_name, column_name, + def get_column_sequence(self, connection, + table_name, column_name, schema=None, **kw): tablename = self.denormalize_name(table_name) colname = self.denormalize_name(column_name) @@ -528,7 +528,7 @@ class FBDialect(default.DefaultDialect): COALESCE(cs.rdb$bytes_per_character,1) AS flen, f.rdb$field_precision AS fprec, f.rdb$field_scale AS fscale, - COALESCE(r.rdb$default_source, + COALESCE(r.rdb$default_source, f.rdb$default_source) AS fdefault FROM rdb$relation_fields r JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name @@ -563,7 +563,7 @@ class FBDialect(default.DefaultDialect): coltype = sqltypes.NULLTYPE elif colspec == 'INT64': coltype = coltype( - precision=row['fprec'], + precision=row['fprec'], scale=row['fscale'] * -1) elif colspec in ('VARYING', 'CSTRING'): coltype = coltype(row['flen']) @@ -582,7 +582,7 @@ class FBDialect(default.DefaultDialect): if row['fdefault'] is not None: # the value comes down as "DEFAULT 'value'": there may be # more than one whitespace around the "DEFAULT" keyword - # and it may also be lower case + # and it may also be lower case # (see also http://tracker.firebirdsql.org/browse/CORE-356) defexpr = row['fdefault'].lstrip() assert defexpr[:8].rstrip().upper() == \ diff --git a/libs/sqlalchemy/dialects/firebird/kinterbasdb.py b/libs/sqlalchemy/dialects/firebird/kinterbasdb.py index d9d749b3..ddca91db 100644 --- a/libs/sqlalchemy/dialects/firebird/kinterbasdb.py +++ b/libs/sqlalchemy/dialects/firebird/kinterbasdb.py @@ -1,5 +1,5 @@ # firebird/kinterbasdb.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -17,20 +17,20 @@ Kinterbasedb backend specific keyword arguments are: SQLAlchemy uses 200 with Unicode, datetime and decimal support (see details__). -* concurrency_level - set the backend policy with regards to threading +* concurrency_level - set the backend policy with regards to threading issues: by default SQLAlchemy uses policy 1 (see details__). -* enable_rowcount - True by default, setting this to False disables - the usage of "cursor.rowcount" with the +* enable_rowcount - True by default, setting this to False disables + the usage of "cursor.rowcount" with the Kinterbasdb dialect, which SQLAlchemy ordinarily calls upon automatically - after any UPDATE or DELETE statement. When disabled, SQLAlchemy's - ResultProxy will return -1 for result.rowcount. The rationale here is - that Kinterbasdb requires a second round trip to the database when - .rowcount is called - since SQLA's resultproxy automatically closes - the cursor after a non-result-returning statement, rowcount must be + after any UPDATE or DELETE statement. When disabled, SQLAlchemy's + ResultProxy will return -1 for result.rowcount. The rationale here is + that Kinterbasdb requires a second round trip to the database when + .rowcount is called - since SQLA's resultproxy automatically closes + the cursor after a non-result-returning statement, rowcount must be called, if at all, before the result object is returned. Additionally, cursor.rowcount may not return correct results with older versions - of Firebird, and setting this flag to False will also cause the + of Firebird, and setting this flag to False will also cause the SQLAlchemy ORM to ignore its usage. The behavior can also be controlled on a per-execution basis using the `enable_rowcount` option with :meth:`execution_options()`:: @@ -64,7 +64,7 @@ class _FBNumeric_kinterbasdb(sqltypes.Numeric): class FBExecutionContext_kinterbasdb(FBExecutionContext): @property def rowcount(self): - if self.execution_options.get('enable_rowcount', + if self.execution_options.get('enable_rowcount', self.dialect.enable_rowcount): return self.cursor.rowcount else: @@ -135,7 +135,7 @@ class FBDialect_kinterbasdb(FBDialect): # that for backward compatibility reasons returns a string like # LI-V6.3.3.12981 Firebird 2.0 # where the first version is a fake one resembling the old - # Interbase signature. + # Interbase signature. fbconn = connection.connection version = fbconn.server_version @@ -159,7 +159,7 @@ class FBDialect_kinterbasdb(FBDialect): msg = str(e) return ('Unable to complete network request to host' in msg or 'Invalid connection state' in msg or - 'Invalid cursor state' in msg or + 'Invalid cursor state' in msg or 'connection shutdown' in msg) else: return False diff --git a/libs/sqlalchemy/dialects/informix/__init__.py b/libs/sqlalchemy/dialects/informix/__init__.py index e500bea5..bd633da5 100644 --- a/libs/sqlalchemy/dialects/informix/__init__.py +++ b/libs/sqlalchemy/dialects/informix/__init__.py @@ -1,5 +1,5 @@ # informix/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/dialects/informix/base.py b/libs/sqlalchemy/dialects/informix/base.py index 867cc408..07561f8d 100644 --- a/libs/sqlalchemy/dialects/informix/base.py +++ b/libs/sqlalchemy/dialects/informix/base.py @@ -1,5 +1,5 @@ # informix/base.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # coding: gbk # # This module is part of SQLAlchemy and is released under @@ -7,8 +7,11 @@ """Support for the Informix database. -This dialect is mostly functional as of SQLAlchemy 0.6.5. +.. note:: + The Informix dialect functions on current SQLAlchemy versions + but is not regularly tested, and may have many issues and + caveats not currently handled. """ @@ -23,7 +26,7 @@ from sqlalchemy import types as sqltypes RESERVED_WORDS = set( ["abs", "absolute", "access", "access_method", "acos", "active", "add", "address", "add_months", "admin", "after", "aggregate", "alignment", - "all", "allocate", "all_rows", "altere", "and", "ansi", "any", "append", + "all", "allocate", "all_rows", "alter", "and", "ansi", "any", "append", "array", "as", "asc", "ascii", "asin", "at", "atan", "atan2", "attach", "attributes", "audit", "authentication", "authid", "authorization", "authorized", "auto", "autofree", "auto_reprepare", "auto_stat_mode", @@ -463,7 +466,7 @@ class InformixDialect(default.DefaultDialect): c = connection.execute( """select t1.constrname as cons_name, t4.colname as local_column, t7.tabname as remote_table, - t6.colname as remote_column, t7.owner as remote_owner + t6.colname as remote_column, t7.owner as remote_owner from sysconstraints as t1 , systables as t2 , sysindexes as t3 , syscolumns as t4 , sysreferences as t5 , syscolumns as t6 , systables as t7 , @@ -472,7 +475,7 @@ class InformixDialect(default.DefaultDialect): and t3.tabid = t2.tabid and t3.idxname = t1.idxname and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3, t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10, - t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16) + t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16) and t5.constrid = t1.constrid and t8.constrid = t5.primary and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3, t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10, @@ -519,7 +522,7 @@ class InformixDialect(default.DefaultDialect): # Select the column positions from sysindexes for sysconstraints data = connection.execute( - """select t2.* + """select t2.* from systables as t1, sysindexes as t2, sysconstraints as t3 where t1.tabid=t2.tabid and t1.tabname=? and t1.owner=? and t2.idxname=t3.idxname and t3.constrtype='P'""", @@ -541,7 +544,7 @@ class InformixDialect(default.DefaultDialect): c = connection.execute( """select t1.colname from syscolumns as t1, systables as t2 - where t2.tabname=? and t1.tabid = t2.tabid and + where t2.tabname=? and t1.tabid = t2.tabid and t1.colno in (%s)""" % place_holder, table_name, *colpositions ).fetchall() @@ -565,7 +568,7 @@ class InformixDialect(default.DefaultDialect): c = connection.execute( """select t1.colname from syscolumns as t1, systables as t2 - where t2.tabname=? and t1.tabid = t2.tabid and + where t2.tabname=? and t1.tabid = t2.tabid and t1.colno in (%s)""" % place_holder, table_name, *colnames ).fetchall() diff --git a/libs/sqlalchemy/dialects/informix/informixdb.py b/libs/sqlalchemy/dialects/informix/informixdb.py index b771e150..8b543467 100644 --- a/libs/sqlalchemy/dialects/informix/informixdb.py +++ b/libs/sqlalchemy/dialects/informix/informixdb.py @@ -1,5 +1,5 @@ # informix/informixdb.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/dialects/maxdb/__init__.py b/libs/sqlalchemy/dialects/maxdb/__init__.py index c045f115..9d1d6418 100644 --- a/libs/sqlalchemy/dialects/maxdb/__init__.py +++ b/libs/sqlalchemy/dialects/maxdb/__init__.py @@ -1,5 +1,5 @@ # maxdb/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/dialects/maxdb/base.py b/libs/sqlalchemy/dialects/maxdb/base.py index ce3aaaa1..68ae630e 100644 --- a/libs/sqlalchemy/dialects/maxdb/base.py +++ b/libs/sqlalchemy/dialects/maxdb/base.py @@ -1,14 +1,15 @@ # maxdb/base.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Support for the MaxDB database. -This dialect is *not* ported to SQLAlchemy 0.6 or 0.7. +.. note:: -This dialect is *not* tested on SQLAlchemy 0.6 or 0.7. + The MaxDB dialect is **non-functional as of SQLAlchemy 0.6**, + pending development efforts to bring it up-to-date. Overview -------- @@ -254,7 +255,7 @@ class MaxTimestamp(sqltypes.DateTime): value[20:])]) else: raise exc.InvalidRequestError( - "datetimeformat '%s' is not supported." % + "datetimeformat '%s' is not supported." % dialect.datetimeformat) return process @@ -282,18 +283,18 @@ class MaxDate(sqltypes.Date): if value is None: return None else: - return datetime.date(int(value[0:4]), int(value[4:6]), + return datetime.date(int(value[0:4]), int(value[4:6]), int(value[6:8])) elif dialect.datetimeformat == 'iso': def process(value): if value is None: return None else: - return datetime.date(int(value[0:4]), int(value[5:7]), + return datetime.date(int(value[0:4]), int(value[5:7]), int(value[8:10])) else: raise exc.InvalidRequestError( - "datetimeformat '%s' is not supported." % + "datetimeformat '%s' is not supported." % dialect.datetimeformat) return process @@ -321,7 +322,7 @@ class MaxTime(sqltypes.Time): if value is None: return None else: - return datetime.time(int(value[0:4]), int(value[4:6]), + return datetime.time(int(value[0:4]), int(value[4:6]), int(value[6:8])) elif dialect.datetimeformat == 'iso': def process(value): @@ -332,7 +333,7 @@ class MaxTime(sqltypes.Time): int(value[8:10])) else: raise exc.InvalidRequestError( - "datetimeformat '%s' is not supported." % + "datetimeformat '%s' is not supported." % dialect.datetimeformat) return process diff --git a/libs/sqlalchemy/dialects/maxdb/sapdb.py b/libs/sqlalchemy/dialects/maxdb/sapdb.py index 280411b2..51f272a2 100644 --- a/libs/sqlalchemy/dialects/maxdb/sapdb.py +++ b/libs/sqlalchemy/dialects/maxdb/sapdb.py @@ -1,5 +1,5 @@ # maxdb/sapdb.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/dialects/mssql/__init__.py b/libs/sqlalchemy/dialects/mssql/__init__.py index 8a2101c5..b3acbf3a 100644 --- a/libs/sqlalchemy/dialects/mssql/__init__.py +++ b/libs/sqlalchemy/dialects/mssql/__init__.py @@ -1,5 +1,5 @@ # mssql/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -18,9 +18,9 @@ from sqlalchemy.dialects.mssql.base import \ __all__ = ( - 'INTEGER', 'BIGINT', 'SMALLINT', 'TINYINT', 'VARCHAR', 'NVARCHAR', 'CHAR', + 'INTEGER', 'BIGINT', 'SMALLINT', 'TINYINT', 'VARCHAR', 'NVARCHAR', 'CHAR', 'NCHAR', 'TEXT', 'NTEXT', 'DECIMAL', 'NUMERIC', 'FLOAT', 'DATETIME', - 'DATETIME2', 'DATETIMEOFFSET', 'DATE', 'TIME', 'SMALLDATETIME', + 'DATETIME2', 'DATETIMEOFFSET', 'DATE', 'TIME', 'SMALLDATETIME', 'BINARY', 'VARBINARY', 'BIT', 'REAL', 'IMAGE', 'TIMESTAMP', 'MONEY', 'SMALLMONEY', 'UNIQUEIDENTIFIER', 'SQL_VARIANT', 'dialect' ) \ No newline at end of file diff --git a/libs/sqlalchemy/dialects/mssql/adodbapi.py b/libs/sqlalchemy/dialects/mssql/adodbapi.py index 21e63288..05ac6d6f 100644 --- a/libs/sqlalchemy/dialects/mssql/adodbapi.py +++ b/libs/sqlalchemy/dialects/mssql/adodbapi.py @@ -1,5 +1,5 @@ # mssql/adodbapi.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -16,7 +16,7 @@ import sys class MSDateTime_adodbapi(MSDateTime): def result_processor(self, dialect, coltype): def process(value): - # adodbapi will return datetimes with empty time + # adodbapi will return datetimes with empty time # values as datetime.date() objects. # Promote them back to full datetime.datetime() if type(value) is datetime.date: @@ -49,7 +49,7 @@ class MSDialect_adodbapi(MSDialect): connectors = ["Provider=SQLOLEDB"] if 'port' in keys: - connectors.append ("Data Source=%s, %s" % + connectors.append ("Data Source=%s, %s" % (keys.get("host"), keys.get("port"))) else: connectors.append ("Data Source=%s" % keys.get("host")) diff --git a/libs/sqlalchemy/dialects/mssql/base.py b/libs/sqlalchemy/dialects/mssql/base.py index 103b0a3e..b6e0d881 100644 --- a/libs/sqlalchemy/dialects/mssql/base.py +++ b/libs/sqlalchemy/dialects/mssql/base.py @@ -1,5 +1,5 @@ # mssql/base.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -74,7 +74,7 @@ will yield:: SELECT TOP n If using SQL Server 2005 or above, LIMIT with OFFSET -support is available through the ``ROW_NUMBER OVER`` construct. +support is available through the ``ROW_NUMBER OVER`` construct. For versions below 2005, LIMIT with OFFSET usage will fail. Nullability @@ -107,10 +107,10 @@ Compatibility Levels -------------------- MSSQL supports the notion of setting compatibility levels at the database level. This allows, for instance, to run a database that -is compatibile with SQL2000 while running on a SQL2005 database +is compatible with SQL2000 while running on a SQL2005 database server. ``server_version_info`` will always return the database server version information (in this case SQL2005) and not the -compatibiility level information. Because of this, if running under +compatibility level information. Because of this, if running under a backwards compatibility mode SQAlchemy may attempt to use T-SQL statements that are unable to be parsed by the database server. @@ -119,14 +119,14 @@ Triggers SQLAlchemy by default uses OUTPUT INSERTED to get at newly generated primary key values via IDENTITY columns or other -server side defaults. MS-SQL does not +server side defaults. MS-SQL does not allow the usage of OUTPUT INSERTED on tables that have triggers. To disable the usage of OUTPUT INSERTED on a per-table basis, specify ``implicit_returning=False`` for each :class:`.Table` which has triggers:: - Table('mytable', metadata, - Column('id', Integer, primary_key=True), + Table('mytable', metadata, + Column('id', Integer, primary_key=True), # ..., implicit_returning=False ) @@ -144,11 +144,11 @@ This option can also be specified engine-wide using the Enabling Snapshot Isolation --------------------------- -Not necessarily specific to SQLAlchemy, SQL Server has a default transaction +Not necessarily specific to SQLAlchemy, SQL Server has a default transaction isolation mode that locks entire tables, and causes even mildly concurrent -applications to have long held locks and frequent deadlocks. -Enabling snapshot isolation for the database as a whole is recommended -for modern levels of concurrency support. This is accomplished via the +applications to have long held locks and frequent deadlocks. +Enabling snapshot isolation for the database as a whole is recommended +for modern levels of concurrency support. This is accomplished via the following ALTER DATABASE commands executed at the SQL prompt:: ALTER DATABASE MyDatabase SET ALLOW_SNAPSHOT_ISOLATION ON @@ -161,14 +161,15 @@ http://msdn.microsoft.com/en-us/library/ms175095.aspx. Scalar Select Comparisons ------------------------- -The MSSQL dialect contains a legacy behavior whereby comparing -a scalar select to a value using the ``=`` or ``!=`` operator -will resolve to IN or NOT IN, respectively. This behavior is -deprecated and will be removed in 0.8 - the ``s.in_()``/``~s.in_()`` operators -should be used when IN/NOT IN are desired. +.. deprecated:: 0.8 + The MSSQL dialect contains a legacy behavior whereby comparing + a scalar select to a value using the ``=`` or ``!=`` operator + will resolve to IN or NOT IN, respectively. This behavior + will be removed in 0.8 - the ``s.in_()``/``~s.in_()`` operators + should be used when IN/NOT IN are desired. For the time being, the existing behavior prevents a comparison -between scalar select and another value that actually wants to use ``=``. +between scalar select and another value that actually wants to use ``=``. To remove this behavior in a forwards-compatible way, apply this compilation rule by placing the following code at the module import level:: @@ -176,7 +177,7 @@ level:: from sqlalchemy.ext.compiler import compiles from sqlalchemy.sql.expression import _BinaryExpression from sqlalchemy.sql.compiler import SQLCompiler - + @compiles(_BinaryExpression, 'mssql') def override_legacy_binary(element, compiler, **kw): return SQLCompiler.visit_binary(compiler, element, **kw) @@ -272,7 +273,7 @@ class _MSDate(sqltypes.Date): return value.date() elif isinstance(value, basestring): return datetime.date(*[ - int(x or 0) + int(x or 0) for x in self._reg.match(value).groups() ]) else: @@ -303,7 +304,7 @@ class TIME(sqltypes.TIME): return value.time() elif isinstance(value, basestring): return datetime.time(*[ - int(x or 0) + int(x or 0) for x in self._reg.match(value).groups()]) else: return value @@ -608,7 +609,7 @@ class MSTypeCompiler(compiler.GenericTypeCompiler): return self._extend("TEXT", type_) def visit_VARCHAR(self, type_): - return self._extend("VARCHAR", type_, + return self._extend("VARCHAR", type_, length = type_.length or 'max') def visit_CHAR(self, type_): @@ -618,7 +619,7 @@ class MSTypeCompiler(compiler.GenericTypeCompiler): return self._extend("NCHAR", type_) def visit_NVARCHAR(self, type_): - return self._extend("NVARCHAR", type_, + return self._extend("NVARCHAR", type_, length = type_.length or 'max') def visit_date(self, type_): @@ -641,8 +642,8 @@ class MSTypeCompiler(compiler.GenericTypeCompiler): def visit_VARBINARY(self, type_): return self._extend( - "VARBINARY", - type_, + "VARBINARY", + type_, length=type_.length or 'max') def visit_boolean(self, type_): @@ -689,18 +690,22 @@ class MSExecutionContext(default.DefaultExecutionContext): not self.executemany if self._enable_identity_insert: - self.cursor.execute("SET IDENTITY_INSERT %s ON" % - self.dialect.identifier_preparer.format_table(tbl)) + self.root_connection._cursor_execute(self.cursor, + "SET IDENTITY_INSERT %s ON" % + self.dialect.identifier_preparer.format_table(tbl), + ()) def post_exec(self): """Disable IDENTITY_INSERT if enabled.""" + conn = self.root_connection if self._select_lastrowid: if self.dialect.use_scope_identity: - self.cursor.execute( - "SELECT scope_identity() AS lastrowid", ()) + conn._cursor_execute(self.cursor, + "SELECT scope_identity() AS lastrowid", ()) else: - self.cursor.execute("SELECT @@identity AS lastrowid", ()) + conn._cursor_execute(self.cursor, + "SELECT @@identity AS lastrowid", ()) # fetchall() ensures the cursor is consumed without closing it row = self.cursor.fetchall()[0] self._lastrowid = int(row[0]) @@ -710,10 +715,11 @@ class MSExecutionContext(default.DefaultExecutionContext): self._result_proxy = base.FullyBufferedResultProxy(self) if self._enable_identity_insert: - self.cursor.execute( + conn._cursor_execute(self.cursor, "SET IDENTITY_INSERT %s OFF" % self.dialect.identifier_preparer. - format_table(self.compiled.statement.table) + format_table(self.compiled.statement.table), + () ) def get_lastrowid(self): @@ -723,7 +729,7 @@ class MSExecutionContext(default.DefaultExecutionContext): if self._enable_identity_insert: try: self.cursor.execute( - "SET IDENTITY_INSERT %s OFF" % + "SET IDENTITY_INSERT %s OFF" % self.dialect.identifier_preparer.\ format_table(self.compiled.statement.table) ) @@ -766,12 +772,12 @@ class MSSQLCompiler(compiler.SQLCompiler): def visit_concat_op(self, binary, **kw): return "%s + %s" % \ - (self.process(binary.left, **kw), + (self.process(binary.left, **kw), self.process(binary.right, **kw)) def visit_match_op(self, binary, **kw): return "CONTAINS (%s, %s)" % ( - self.process(binary.left, **kw), + self.process(binary.left, **kw), self.process(binary.right, **kw)) def get_select_precolumns(self, select): @@ -803,27 +809,28 @@ class MSSQLCompiler(compiler.SQLCompiler): so tries to wrap it in a subquery with ``row_number()`` criterion. """ - if not getattr(select, '_mssql_visit', None) and select._offset: + if select._offset and not getattr(select, '_mssql_visit', None): # to use ROW_NUMBER(), an ORDER BY is required. - orderby = self.process(select._order_by_clause) - if not orderby: + if not select._order_by_clause.clauses: raise exc.CompileError('MSSQL requires an order_by when ' 'using an offset.') _offset = select._offset _limit = select._limit + _order_by_clauses = select._order_by_clause.clauses + select = select._generate() select._mssql_visit = True select = select.column( - sql.literal_column("ROW_NUMBER() OVER (ORDER BY %s)" \ - % orderby).label("mssql_rn") + sql.func.ROW_NUMBER().over(order_by=_order_by_clauses) + .label("mssql_rn") ).order_by(None).alias() mssql_rn = sql.column('mssql_rn') limitselect = sql.select([c for c in select.c if - c.key!='mssql_rn']) - limitselect.append_whereclause(mssql_rn> _offset) + c.key != 'mssql_rn']) + limitselect.append_whereclause(mssql_rn > _offset) if _limit is not None: - limitselect.append_whereclause(mssql_rn<=(_limit + _offset)) + limitselect.append_whereclause(mssql_rn <= (_limit + _offset)) return self.process(limitselect, iswrapper=True, **kwargs) else: return compiler.SQLCompiler.visit_select(self, select, **kwargs) @@ -861,7 +868,7 @@ class MSSQLCompiler(compiler.SQLCompiler): return "SAVE TRANSACTION %s" % self.preparer.format_savepoint(savepoint_stmt) def visit_rollback_to_savepoint(self, savepoint_stmt): - return ("ROLLBACK TRANSACTION %s" + return ("ROLLBACK TRANSACTION %s" % self.preparer.format_savepoint(savepoint_stmt)) def visit_column(self, column, result_map=None, **kwargs): @@ -875,15 +882,16 @@ class MSSQLCompiler(compiler.SQLCompiler): if result_map is not None: result_map[column.name.lower()] = \ - (column.name, (column, ), + (column.name, (column, column.name, + column.key), column.type) return super(MSSQLCompiler, self).\ - visit_column(converted, + visit_column(converted, result_map=None, **kwargs) - return super(MSSQLCompiler, self).visit_column(column, - result_map=result_map, + return super(MSSQLCompiler, self).visit_column(column, + result_map=result_map, **kwargs) def visit_binary(self, binary, **kwargs): @@ -892,27 +900,27 @@ class MSSQLCompiler(compiler.SQLCompiler): """ if ( - isinstance(binary.left, expression._BindParamClause) + isinstance(binary.left, expression._BindParamClause) and binary.operator == operator.eq and not isinstance(binary.right, expression._BindParamClause) ): return self.process( - expression._BinaryExpression(binary.right, - binary.left, - binary.operator), + expression._BinaryExpression(binary.right, + binary.left, + binary.operator), **kwargs) else: if ( - (binary.operator is operator.eq or - binary.operator is operator.ne) + (binary.operator is operator.eq or + binary.operator is operator.ne) and ( - (isinstance(binary.left, expression._FromGrouping) - and isinstance(binary.left.element, - expression._ScalarSelect)) - or (isinstance(binary.right, expression._FromGrouping) - and isinstance(binary.right.element, - expression._ScalarSelect)) - or isinstance(binary.left, expression._ScalarSelect) + (isinstance(binary.left, expression._FromGrouping) + and isinstance(binary.left.element, + expression._ScalarSelect)) + or (isinstance(binary.right, expression._FromGrouping) + and isinstance(binary.right.element, + expression._ScalarSelect)) + or isinstance(binary.left, expression._ScalarSelect) or isinstance(binary.right, expression._ScalarSelect) ) ): @@ -944,10 +952,10 @@ class MSSQLCompiler(compiler.SQLCompiler): columns = [ self.process( - col_label(c), - within_columns_clause=True, + col_label(c), + within_columns_clause=True, result_map=self.result_map - ) + ) for c in expression._select_iterables(returning_cols) ] return 'OUTPUT ' + ', '.join(columns) @@ -967,7 +975,7 @@ class MSSQLCompiler(compiler.SQLCompiler): label_select_column(select, column, asfrom) def for_update_clause(self, select): - # "FOR UPDATE" is only allowed on "DECLARE CURSOR" which + # "FOR UPDATE" is only allowed on "DECLARE CURSOR" which # SQLAlchemy doesn't use return '' @@ -980,6 +988,22 @@ class MSSQLCompiler(compiler.SQLCompiler): else: return "" + def update_from_clause(self, update_stmt, + from_table, extra_froms, + from_hints, + **kw): + """Render the UPDATE..FROM clause specific to MSSQL. + + In MSSQL, if the UPDATE statement involves an alias of the table to + be updated, then the table itself must be added to the FROM list as + well. Otherwise, it is optional. Here, we add it regardless. + + """ + return "FROM " + ', '.join( + t._compiler_dispatch(self, asfrom=True, + fromhints=from_hints, **kw) + for t in [from_table] + extra_froms) + class MSSQLStrictCompiler(MSSQLCompiler): """A subclass of MSSQLCompiler which disables the usage of bind parameters where not allowed natively by MS-SQL. @@ -993,14 +1017,14 @@ class MSSQLStrictCompiler(MSSQLCompiler): def visit_in_op(self, binary, **kw): kw['literal_binds'] = True return "%s IN %s" % ( - self.process(binary.left, **kw), + self.process(binary.left, **kw), self.process(binary.right, **kw) ) def visit_notin_op(self, binary, **kw): kw['literal_binds'] = True return "%s NOT IN %s" % ( - self.process(binary.left, **kw), + self.process(binary.left, **kw), self.process(binary.right, **kw) ) @@ -1029,7 +1053,7 @@ class MSSQLStrictCompiler(MSSQLCompiler): class MSDDLCompiler(compiler.DDLCompiler): def get_column_specification(self, column, **kwargs): - colspec = (self.preparer.format_column(column) + " " + colspec = (self.preparer.format_column(column) + " " + self.dialect.type_compiler.process(column.type)) if column.nullable is not None: @@ -1040,7 +1064,7 @@ class MSDDLCompiler(compiler.DDLCompiler): if column.table is None: raise exc.CompileError( - "mssql requires Table-bound columns " + "mssql requires Table-bound columns " "in order to generate DDL") seq_col = column.table._autoincrement_column @@ -1075,7 +1099,7 @@ class MSIdentifierPreparer(compiler.IdentifierPreparer): reserved_words = RESERVED_WORDS def __init__(self, dialect): - super(MSIdentifierPreparer, self).__init__(dialect, initial_quote='[', + super(MSIdentifierPreparer, self).__init__(dialect, initial_quote='[', final_quote=']') def _escape_identifier(self, value): @@ -1140,7 +1164,7 @@ class MSDialect(default.DefaultDialect): super(MSDialect, self).initialize(connection) if self.server_version_info[0] not in range(8, 17): # FreeTDS with version 4.2 seems to report here - # a number like "95.10.255". Don't know what + # a number like "95.10.255". Don't know what # that is. So emit warning. util.warn( "Unrecognized server version info '%s'. Version specific " @@ -1241,11 +1265,11 @@ class MSDialect(default.DefaultDialect): "join sys.schemas as sch on sch.schema_id=tab.schema_id " "where tab.name = :tabname " "and sch.name=:schname " - "and ind.is_primary_key=0", + "and ind.is_primary_key=0", bindparams=[ - sql.bindparam('tabname', tablename, + sql.bindparam('tabname', tablename, sqltypes.String(convert_unicode=True)), - sql.bindparam('schname', current_schema, + sql.bindparam('schname', current_schema, sqltypes.String(convert_unicode=True)) ], typemap = { @@ -1272,9 +1296,9 @@ class MSDialect(default.DefaultDialect): "where tab.name=:tabname " "and sch.name=:schname", bindparams=[ - sql.bindparam('tabname', tablename, + sql.bindparam('tabname', tablename, sqltypes.String(convert_unicode=True)), - sql.bindparam('schname', current_schema, + sql.bindparam('schname', current_schema, sqltypes.String(convert_unicode=True)) ], typemap = { @@ -1302,9 +1326,9 @@ class MSDialect(default.DefaultDialect): "views.schema_id=sch.schema_id and " "views.name=:viewname and sch.name=:schname", bindparams=[ - sql.bindparam('viewname', viewname, + sql.bindparam('viewname', viewname, sqltypes.String(convert_unicode=True)), - sql.bindparam('schname', current_schema, + sql.bindparam('schname', current_schema, sqltypes.String(convert_unicode=True)) ] ) @@ -1332,7 +1356,7 @@ class MSDialect(default.DefaultDialect): row = c.fetchone() if row is None: break - (name, type, nullable, charlen, + (name, type, nullable, charlen, numericprec, numericscale, default, collation) = ( row[columns.c.column_name], row[columns.c.data_type], @@ -1346,7 +1370,7 @@ class MSDialect(default.DefaultDialect): coltype = self.ischema_names.get(type, None) kwargs = {} - if coltype in (MSString, MSChar, MSNVarchar, MSNChar, MSText, + if coltype in (MSString, MSChar, MSNVarchar, MSNChar, MSText, MSNText, MSBinary, MSVarBinary, sqltypes.LargeBinary): kwargs['length'] = charlen @@ -1358,7 +1382,7 @@ class MSDialect(default.DefaultDialect): if coltype is None: util.warn( - "Did not recognize type '%s' of column '%s'" % + "Did not recognize type '%s' of column '%s'" % (type, name)) coltype = sqltypes.NULLTYPE else: @@ -1382,7 +1406,7 @@ class MSDialect(default.DefaultDialect): colmap[col['name']] = col # We also run an sp_columns to check for identity columns: cursor = connection.execute("sp_columns @table_name = '%s', " - "@table_owner = '%s'" + "@table_owner = '%s'" % (tablename, current_schema)) ic = None while True: @@ -1401,7 +1425,7 @@ class MSDialect(default.DefaultDialect): if ic is not None and self.server_version_info >= MS_2005_VERSION: table_fullname = "%s.%s" % (current_schema, tablename) cursor = connection.execute( - "select ident_seed('%s'), ident_incr('%s')" + "select ident_seed('%s'), ident_incr('%s')" % (table_fullname, table_fullname) ) @@ -1421,16 +1445,17 @@ class MSDialect(default.DefaultDialect): RR = ischema.ref_constraints # information_schema.table_constraints TC = ischema.constraints - # information_schema.constraint_column_usage: + # information_schema.constraint_column_usage: # the constrained column - C = ischema.key_constraints.alias('C') - # information_schema.constraint_column_usage: + C = ischema.key_constraints.alias('C') + # information_schema.constraint_column_usage: # the referenced column - R = ischema.key_constraints.alias('R') + R = ischema.key_constraints.alias('R') # Primary key constraints s = sql.select([C.c.column_name, TC.c.constraint_type], sql.and_(TC.c.constraint_name == C.c.constraint_name, + TC.c.table_schema == C.c.table_schema, C.c.table_name == tablename, C.c.table_schema == current_schema) ) @@ -1448,12 +1473,12 @@ class MSDialect(default.DefaultDialect): RR = ischema.ref_constraints # information_schema.table_constraints TC = ischema.constraints - # information_schema.constraint_column_usage: + # information_schema.constraint_column_usage: # the constrained column - C = ischema.key_constraints.alias('C') - # information_schema.constraint_column_usage: + C = ischema.key_constraints.alias('C') + # information_schema.constraint_column_usage: # the referenced column - R = ischema.key_constraints.alias('R') + R = ischema.key_constraints.alias('R') # Foreign key constraints s = sql.select([C.c.column_name, diff --git a/libs/sqlalchemy/dialects/mssql/information_schema.py b/libs/sqlalchemy/dialects/mssql/information_schema.py index 29a9de34..0dcddae9 100644 --- a/libs/sqlalchemy/dialects/mssql/information_schema.py +++ b/libs/sqlalchemy/dialects/mssql/information_schema.py @@ -1,5 +1,5 @@ # mssql/information_schema.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -15,8 +15,10 @@ class CoerceUnicode(TypeDecorator): impl = Unicode def process_bind_param(self, value, dialect): + # Py2K if isinstance(value, str): value = value.decode(dialect.encoding) + # end Py2K return value schemata = Table("SCHEMATA", ischema, diff --git a/libs/sqlalchemy/dialects/mssql/mxodbc.py b/libs/sqlalchemy/dialects/mssql/mxodbc.py index 76599869..56a72f41 100644 --- a/libs/sqlalchemy/dialects/mssql/mxodbc.py +++ b/libs/sqlalchemy/dialects/mssql/mxodbc.py @@ -1,5 +1,5 @@ # mssql/mxodbc.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -41,13 +41,13 @@ simplistic statements. For this reason, the mxODBC dialect uses the "native" mode by default only for INSERT, UPDATE, and DELETE statements, and uses the escaped string mode for -all other statements. +all other statements. This behavior can be controlled via :meth:`~sqlalchemy.sql.expression.Executable.execution_options` using the ``native_odbc_execute`` flag with a value of ``True`` or ``False``, where a value of ``True`` will unconditionally use native bind parameters and a value -of ``False`` will uncondtionally use string-escaped parameters. +of ``False`` will unconditionally use string-escaped parameters. """ @@ -55,7 +55,7 @@ of ``False`` will uncondtionally use string-escaped parameters. from sqlalchemy import types as sqltypes from sqlalchemy.connectors.mxodbc import MxODBCConnector from sqlalchemy.dialects.mssql.pyodbc import MSExecutionContext_pyodbc -from sqlalchemy.dialects.mssql.base import (MSDialect, +from sqlalchemy.dialects.mssql.base import (MSDialect, MSSQLStrictCompiler, _MSDateTime, _MSDate, TIME) diff --git a/libs/sqlalchemy/dialects/mssql/pymssql.py b/libs/sqlalchemy/dialects/mssql/pymssql.py index 9cc42c09..8229d6ce 100644 --- a/libs/sqlalchemy/dialects/mssql/pymssql.py +++ b/libs/sqlalchemy/dialects/mssql/pymssql.py @@ -1,5 +1,5 @@ # mssql/pymssql.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -21,8 +21,8 @@ Sample connect string:: mssql+pymssql://:@ Adding "?charset=utf8" or similar will cause pymssql to return -strings as Python unicode objects. This can potentially improve -performance in some scenarios as decoding of strings is +strings as Python unicode objects. This can potentially improve +performance in some scenarios as decoding of strings is handled natively. Limitations diff --git a/libs/sqlalchemy/dialects/mssql/pyodbc.py b/libs/sqlalchemy/dialects/mssql/pyodbc.py index 7b47004e..389018c6 100644 --- a/libs/sqlalchemy/dialects/mssql/pyodbc.py +++ b/libs/sqlalchemy/dialects/mssql/pyodbc.py @@ -1,5 +1,5 @@ # mssql/pyodbc.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -35,14 +35,14 @@ Examples of pyodbc connection string URLs: dsn=mydsn;UID=user;PWD=pass;LANGUAGE=us_english -* ``mssql+pyodbc://user:pass@host/db`` - connects using a connection +* ``mssql+pyodbc://user:pass@host/db`` - connects using a connection that would appear like:: DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass * ``mssql+pyodbc://user:pass@host:123/db`` - connects using a connection string which includes the port - information using the comma syntax. This will create the following + information using the comma syntax. This will create the following connection string:: DRIVER={SQL Server};Server=host,123;Database=db;UID=user;PWD=pass @@ -80,6 +80,34 @@ the python shell. For example:: >>> urllib.quote_plus('dsn=mydsn;Database=db') 'dsn%3Dmydsn%3BDatabase%3Ddb' +Unicode Binds +^^^^^^^^^^^^^ + +The current state of PyODBC on a unix backend with FreeTDS and/or +EasySoft is poor regarding unicode; different OS platforms and versions of UnixODBC +versus IODBC versus FreeTDS/EasySoft versus PyODBC itself dramatically +alter how strings are received. The PyODBC dialect attempts to use all the information +it knows to determine whether or not a Python unicode literal can be +passed directly to the PyODBC driver or not; while SQLAlchemy can encode +these to bytestrings first, some users have reported that PyODBC mis-handles +bytestrings for certain encodings and requires a Python unicode object, +while the author has observed widespread cases where a Python unicode +is completely misinterpreted by PyODBC, particularly when dealing with +the information schema tables used in table reflection, and the value +must first be encoded to a bytestring. + +It is for this reason that whether or not unicode literals for bound +parameters be sent to PyODBC can be controlled using the +``supports_unicode_binds`` parameter to ``create_engine()``. When +left at its default of ``None``, the PyODBC dialect will use its +best guess as to whether or not the driver deals with unicode literals +well. When ``False``, unicode literals will be encoded first, and when +``True`` unicode literals will be passed straight through. This is an interim +flag that hopefully should not be needed when the unicode situation stabilizes +for unix + PyODBC. + +.. versionadded:: 0.7.7 + ``supports_unicode_binds`` parameter to ``create_engine()``\ . """ @@ -171,7 +199,7 @@ class MSExecutionContext_pyodbc(MSExecutionContext): super(MSExecutionContext_pyodbc, self).pre_exec() - # don't embed the scope_identity select into an + # don't embed the scope_identity select into an # "INSERT .. DEFAULT VALUES" if self._select_lastrowid and \ self.dialect.use_scope_identity and \ @@ -183,11 +211,11 @@ class MSExecutionContext_pyodbc(MSExecutionContext): def post_exec(self): if self._embedded_scope_identity: # Fetch the last inserted id from the manipulated statement - # We may have to skip over a number of result sets with + # We may have to skip over a number of result sets with # no data (due to triggers, etc.) while True: try: - # fetchall() ensures the cursor is consumed + # fetchall() ensures the cursor is consumed # without closing it (FreeTDS particularly) row = self.cursor.fetchall()[0] break @@ -217,7 +245,8 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect): def __init__(self, description_encoding='latin-1', **params): super(MSDialect_pyodbc, self).__init__(**params) self.description_encoding = description_encoding - self.use_scope_identity = self.dbapi and \ + self.use_scope_identity = self.use_scope_identity and \ + self.dbapi and \ hasattr(self.dbapi.Cursor, 'nextset') self._need_decimal_fix = self.dbapi and \ self._dbapi_version() < (2, 1, 8) diff --git a/libs/sqlalchemy/dialects/mssql/zxjdbc.py b/libs/sqlalchemy/dialects/mssql/zxjdbc.py index bfa358c0..842225da 100644 --- a/libs/sqlalchemy/dialects/mssql/zxjdbc.py +++ b/libs/sqlalchemy/dialects/mssql/zxjdbc.py @@ -1,5 +1,5 @@ # mssql/zxjdbc.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -68,7 +68,7 @@ class MSDialect_zxjdbc(ZxJDBCConnector, MSDialect): def _get_server_version_info(self, connection): return tuple( - int(x) + int(x) for x in connection.connection.dbversion.split('.') ) diff --git a/libs/sqlalchemy/dialects/mysql/__init__.py b/libs/sqlalchemy/dialects/mysql/__init__.py index 135ff37d..c41dd0b1 100644 --- a/libs/sqlalchemy/dialects/mysql/__init__.py +++ b/libs/sqlalchemy/dialects/mysql/__init__.py @@ -1,11 +1,12 @@ # mysql/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from sqlalchemy.dialects.mysql import base, mysqldb, oursql, \ - pyodbc, zxjdbc, mysqlconnector, pymysql + pyodbc, zxjdbc, mysqlconnector, pymysql,\ + gaerdbms # default dialect base.dialect = mysqldb.dialect diff --git a/libs/sqlalchemy/dialects/mysql/base.py b/libs/sqlalchemy/dialects/mysql/base.py index d9ab5a34..ea180eee 100644 --- a/libs/sqlalchemy/dialects/mysql/base.py +++ b/libs/sqlalchemy/dialects/mysql/base.py @@ -1,5 +1,5 @@ # mysql/base.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,29 +9,11 @@ Supported Versions and Features ------------------------------- -SQLAlchemy supports 6 major MySQL versions: 3.23, 4.0, 4.1, 5.0, 5.1 and 6.0, -with capabilities increasing with more modern servers. - -Versions 4.1 and higher support the basic SQL functionality that SQLAlchemy -uses in the ORM and SQL expressions. These versions pass the applicable tests -in the suite 100%. No heroic measures are taken to work around major missing -SQL features- if your server version does not support sub-selects, for +SQLAlchemy supports MySQL starting with version 4.1 through modern releases. +However, no heroic measures are taken to work around major missing +SQL features - if your server version does not support sub-selects, for example, they won't work in SQLAlchemy either. -Most available DBAPI drivers are supported; see below. - -===================================== =============== -Feature Minimum Version -===================================== =============== -sqlalchemy.orm 4.1.1 -Table Reflection 3.23.x -DDL Generation 4.1.1 -utf8/Full Unicode Connections 4.1.1 -Transactions 3.23.15 -Two-Phase Transactions 5.0.3 -Nested Transactions 5.0.3 -===================================== =============== - See the official MySQL documentation for detailed information about features supported in any given server release. @@ -44,18 +26,21 @@ Connection Timeouts ------------------- MySQL features an automatic connection close behavior, for connections that have -been idle for eight hours or more. To circumvent having this issue, use the +been idle for eight hours or more. To circumvent having this issue, use the ``pool_recycle`` option which controls the maximum age of any connection:: engine = create_engine('mysql+mysqldb://...', pool_recycle=3600) +.. _mysql_storage_engines: + Storage Engines --------------- Most MySQL server installations have a default table type of ``MyISAM``, a non-transactional table type. During a transaction, non-transactional storage engines do not participate and continue to store table changes in autocommit -mode. For fully atomic transactions, all participating tables must use a +mode. For fully atomic transactions as well as support for foreign key +constraints, all participating tables must use a transactional engine such as ``InnoDB``, ``Falcon``, ``SolidDB``, `PBXT`, etc. Storage engines can be elected when creating tables in SQLAlchemy by supplying @@ -68,6 +53,10 @@ creation option can be specified in this syntax:: mysql_charset='utf8' ) +.. seealso:: + + `The InnoDB Storage Engine `_ - on the MySQL website. + Case Sensitivity and Table Reflection ------------------------------------- @@ -87,19 +76,19 @@ to be used. Transaction Isolation Level --------------------------- -:func:`.create_engine` accepts an ``isolation_level`` -parameter which results in the command ``SET SESSION -TRANSACTION ISOLATION LEVEL `` being invoked for +:func:`.create_engine` accepts an ``isolation_level`` +parameter which results in the command ``SET SESSION +TRANSACTION ISOLATION LEVEL `` being invoked for every new connection. Valid values for this parameter are -``READ COMMITTED``, ``READ UNCOMMITTED``, +``READ COMMITTED``, ``READ UNCOMMITTED``, ``REPEATABLE READ``, and ``SERIALIZABLE``:: engine = create_engine( - "mysql://scott:tiger@localhost/test", + "mysql://scott:tiger@localhost/test", isolation_level="READ UNCOMMITTED" ) -(new in 0.7.6) +.. versionadded:: 0.7.6 Keys ---- @@ -185,6 +174,24 @@ available. update(..., mysql_limit=10) +rowcount Support +---------------- + +SQLAlchemy standardizes the DBAPI ``cursor.rowcount`` attribute to be the +usual definition of "number of rows matched by an UPDATE or DELETE" statement. +This is in contradiction to the default setting on most MySQL DBAPI drivers, +which is "number of rows actually modified/deleted". For this reason, the +SQLAlchemy MySQL dialects always set the ``constants.CLIENT.FOUND_ROWS`` flag, +or whatever is equivalent for the DBAPI in use, on connect, unless the flag value +is overridden using DBAPI-specific options +(such as ``client_flag`` for the MySQL-Python driver, ``found_rows`` for the +OurSQL driver). + +See also: + +:attr:`.ResultProxy.rowcount` + + CAST Support ------------ @@ -242,7 +249,7 @@ Index Types ~~~~~~~~~~~~~ Some MySQL storage engines permit you to specify an index type when creating -an index or primary key constraint. SQLAlchemy provides this feature via the +an index or primary key constraint. SQLAlchemy provides this feature via the ``mysql_using`` parameter on :class:`.Index`:: Index('my_index', my_table.c.data, mysql_using='hash') @@ -252,7 +259,7 @@ As well as the ``mysql_using`` parameter on :class:`.PrimaryKeyConstraint`:: PrimaryKeyConstraint("data", mysql_using='hash') The value passed to the keyword argument will be simply passed through to the -underlying CREATE INDEX or PRIMARY KEY clause, so it *must* be a valid index +underlying CREATE INDEX or PRIMARY KEY clause, so it *must* be a valid index type for your MySQL storage engine. More information can be found at: @@ -344,9 +351,9 @@ class _FloatType(_NumericType, sqltypes.Float): (precision is None and scale is not None) or (precision is not None and scale is None) ): - raise exc.ArgumentError( - "You must specify both precision and scale or omit " - "both altogether.") + raise exc.ArgumentError( + "You must specify both precision and scale or omit " + "both altogether.") super(_FloatType, self).__init__(precision=precision, asdecimal=asdecimal, **kw) self.scale = scale @@ -1273,11 +1280,11 @@ class MySQLCompiler(compiler.SQLCompiler): def visit_cast(self, cast, **kwargs): # No cast until 4, no decimals until 5. if not self.dialect._supports_cast: - return self.process(cast.clause) + return self.process(cast.clause.self_group()) type_ = self.process(cast.typeclause) if type_ is None: - return self.process(cast.clause) + return self.process(cast.clause.self_group()) return 'CAST(%s AS %s)' % (self.process(cast.clause), type_) @@ -1289,13 +1296,13 @@ class MySQLCompiler(compiler.SQLCompiler): def get_select_precolumns(self, select): """Add special MySQL keywords in place of DISTINCT. - - .. note:: - + + .. note:: + this usage is deprecated. :meth:`.Select.prefix_with` should be used for special keywords at the start of a SELECT. - + """ if isinstance(select._distinct, basestring): return select._distinct.upper() + " " @@ -1343,16 +1350,16 @@ class MySQLCompiler(compiler.SQLCompiler): if limit is None: # hardwire the upper limit. Currently # needed by OurSQL with Python 3 - # (https://bugs.launchpad.net/oursql/+bug/686232), + # (https://bugs.launchpad.net/oursql/+bug/686232), # but also is consistent with the usage of the upper # bound as part of MySQL's "syntax" for OFFSET with # no LIMIT return ' \n LIMIT %s, %s' % ( - self.process(sql.literal(offset)), + self.process(sql.literal(offset)), "18446744073709551615") else: return ' \n LIMIT %s, %s' % ( - self.process(sql.literal(offset)), + self.process(sql.literal(offset)), self.process(sql.literal(limit))) else: # No offset provided, so just use the limit @@ -1366,10 +1373,10 @@ class MySQLCompiler(compiler.SQLCompiler): return None def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): - return ', '.join(t._compiler_dispatch(self, asfrom=True, **kw) + return ', '.join(t._compiler_dispatch(self, asfrom=True, **kw) for t in [from_table] + list(extra_froms)) - def update_from_clause(self, update_stmt, from_table, + def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): return None @@ -1395,8 +1402,12 @@ class MySQLDDLCompiler(compiler.DDLCompiler): auto_inc_column is not list(table.primary_key)[0]: if constraint_string: constraint_string += ", \n\t" - constraint_string += "KEY `idx_autoinc_%s`(`%s`)" % (auto_inc_column.name, \ - self.preparer.format_column(auto_inc_column)) + constraint_string += "KEY %s (%s)" % ( + self.preparer.quote( + "idx_autoinc_%s" % auto_inc_column.name, None + ), + self.preparer.format_column(auto_inc_column) + ) return constraint_string @@ -1431,7 +1442,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler): opts = dict( ( - k[len(self.dialect.name)+1:].upper(), + k[len(self.dialect.name)+1:].upper(), v ) for k, v in table.kwargs.items() @@ -1447,7 +1458,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler): arg = "'%s'" % arg.replace("\\", "\\\\").replace("'", "''") if opt in ('DATA_DIRECTORY', 'INDEX_DIRECTORY', - 'DEFAULT_CHARACTER_SET', 'CHARACTER_SET', + 'DEFAULT_CHARACTER_SET', 'CHARACTER_SET', 'DEFAULT_CHARSET', 'DEFAULT_COLLATE'): opt = opt.replace('_', ' ') @@ -1467,7 +1478,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler): table = preparer.format_table(index.table) columns = [preparer.quote(c.name, c.quote) for c in index.columns] name = preparer.quote( - self._index_identifier(index.name), + self._index_identifier(index.name), index.quote) text = "CREATE " @@ -1576,24 +1587,24 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): if type_.precision is None: return self._extend_numeric(type_, "NUMERIC") elif type_.scale is None: - return self._extend_numeric(type_, - "NUMERIC(%(precision)s)" % + return self._extend_numeric(type_, + "NUMERIC(%(precision)s)" % {'precision': type_.precision}) else: - return self._extend_numeric(type_, - "NUMERIC(%(precision)s, %(scale)s)" % + return self._extend_numeric(type_, + "NUMERIC(%(precision)s, %(scale)s)" % {'precision': type_.precision, 'scale' : type_.scale}) def visit_DECIMAL(self, type_): if type_.precision is None: return self._extend_numeric(type_, "DECIMAL") elif type_.scale is None: - return self._extend_numeric(type_, - "DECIMAL(%(precision)s)" % + return self._extend_numeric(type_, + "DECIMAL(%(precision)s)" % {'precision': type_.precision}) else: - return self._extend_numeric(type_, - "DECIMAL(%(precision)s, %(scale)s)" % + return self._extend_numeric(type_, + "DECIMAL(%(precision)s, %(scale)s)" % {'precision': type_.precision, 'scale' : type_.scale}) def visit_DOUBLE(self, type_): @@ -1616,7 +1627,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): if self._mysql_type(type_) and \ type_.scale is not None and \ type_.precision is not None: - return self._extend_numeric(type_, + return self._extend_numeric(type_, "FLOAT(%s, %s)" % (type_.precision, type_.scale)) elif type_.precision is not None: return self._extend_numeric(type_, "FLOAT(%s)" % (type_.precision,)) @@ -1625,24 +1636,24 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): def visit_INTEGER(self, type_): if self._mysql_type(type_) and type_.display_width is not None: - return self._extend_numeric(type_, - "INTEGER(%(display_width)s)" % + return self._extend_numeric(type_, + "INTEGER(%(display_width)s)" % {'display_width': type_.display_width}) else: return self._extend_numeric(type_, "INTEGER") def visit_BIGINT(self, type_): if self._mysql_type(type_) and type_.display_width is not None: - return self._extend_numeric(type_, - "BIGINT(%(display_width)s)" % + return self._extend_numeric(type_, + "BIGINT(%(display_width)s)" % {'display_width': type_.display_width}) else: return self._extend_numeric(type_, "BIGINT") def visit_MEDIUMINT(self, type_): if self._mysql_type(type_) and type_.display_width is not None: - return self._extend_numeric(type_, - "MEDIUMINT(%(display_width)s)" % + return self._extend_numeric(type_, + "MEDIUMINT(%(display_width)s)" % {'display_width': type_.display_width}) else: return self._extend_numeric(type_, "MEDIUMINT") @@ -1655,8 +1666,8 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): def visit_SMALLINT(self, type_): if self._mysql_type(type_) and type_.display_width is not None: - return self._extend_numeric(type_, - "SMALLINT(%(display_width)s)" % + return self._extend_numeric(type_, + "SMALLINT(%(display_width)s)" % {'display_width': type_.display_width} ) else: @@ -1706,7 +1717,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): return self._extend_string(type_, {}, "VARCHAR(%d)" % type_.length) else: raise exc.CompileError( - "VARCHAR requires a length on dialect %s" % + "VARCHAR requires a length on dialect %s" % self.dialect.name) def visit_CHAR(self, type_): @@ -1722,7 +1733,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): return self._extend_string(type_, {'national':True}, "VARCHAR(%(length)s)" % {'length': type_.length}) else: raise exc.CompileError( - "NVARCHAR requires a length on dialect %s" % + "NVARCHAR requires a length on dialect %s" % self.dialect.name) def visit_NCHAR(self, type_): @@ -1783,8 +1794,8 @@ class MySQLIdentifierPreparer(compiler.IdentifierPreparer): quote = '"' super(MySQLIdentifierPreparer, self).__init__( - dialect, - initial_quote=quote, + dialect, + initial_quote=quote, escape_quote=quote) def _quote_free_identifiers(self, *ids): @@ -1817,7 +1828,7 @@ class MySQLDialect(default.DefaultDialect): preparer = MySQLIdentifierPreparer # default SQL compilation settings - - # these are modified upon initialize(), + # these are modified upon initialize(), # i.e. first connect _backslash_escapes = True _server_ansiquotes = False @@ -1834,7 +1845,7 @@ class MySQLDialect(default.DefaultDialect): else: return None - _isolation_lookup = set(['SERIALIZABLE', + _isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED', 'READ COMMITTED', 'REPEATABLE READ']) def set_isolation_level(self, connection, level): @@ -1842,7 +1853,7 @@ class MySQLDialect(default.DefaultDialect): if level not in self._isolation_lookup: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " - "Valid isolation levels for %s are %s" % + "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup)) ) cursor = connection.cursor() @@ -1914,7 +1925,7 @@ class MySQLDialect(default.DefaultDialect): return self._extract_error_code(e) in \ (2006, 2013, 2014, 2045, 2055) elif isinstance(e, self.dbapi.InterfaceError): - # if underlying connection is closed, + # if underlying connection is closed, # this is the error you get return "(0, '')" in str(e) else: @@ -2017,7 +2028,6 @@ class MySQLDialect(default.DefaultDialect): @reflection.cache def get_view_names(self, connection, schema=None, **kw): - charset = self._connection_charset if self.server_version_info < (5, 0, 2): raise NotImplementedError if schema is None: @@ -2028,7 +2038,7 @@ class MySQLDialect(default.DefaultDialect): rp = connection.execute("SHOW FULL TABLES FROM %s" % self.identifier_preparer.quote_identifier(schema)) return [row[0] for row in self._compat_fetchall(rp, charset=charset)\ - if row[1] == 'VIEW'] + if row[1] in ('VIEW', 'SYSTEM VIEW')] @reflection.cache def get_table_options(self, connection, table_name, schema=None, **kw): @@ -2129,9 +2139,9 @@ class MySQLDialect(default.DefaultDialect): def _parsed_state_or_create(self, connection, table_name, schema=None, **kw): return self._setup_parser( - connection, - table_name, - schema, + connection, + table_name, + schema, info_cache=kw.get('info_cache', None) ) @@ -2139,7 +2149,7 @@ class MySQLDialect(default.DefaultDialect): def _tabledef_parser(self): """return the MySQLTableDefinitionParser, generate if needed. - The deferred creation ensures that the dialect has + The deferred creation ensures that the dialect has retrieved server version information first. """ diff --git a/libs/sqlalchemy/dialects/mysql/gaerdbms.py b/libs/sqlalchemy/dialects/mysql/gaerdbms.py new file mode 100644 index 00000000..2203504f --- /dev/null +++ b/libs/sqlalchemy/dialects/mysql/gaerdbms.py @@ -0,0 +1,84 @@ +# mysql/gaerdbms.py +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +"""Support for Google Cloud SQL on Google App Engine. + +This dialect is based primarily on the :mod:`.mysql.mysqldb` dialect with minimal +changes. + +.. versionadded:: 0.7.8 + +Connecting +---------- + +Connect string format:: + + mysql+gaerdbms:/// + +E.g.:: + + create_engine('mysql+gaerdbms:///mydb', + connect_args={"instance":"instancename"}) + +Pooling +------- + +Google App Engine connections appear to be randomly recycled, +so the dialect does not pool connections. The :class:`.NullPool` +implementation is installed within the :class:`.Engine` by +default. + +""" + +from sqlalchemy.dialects.mysql.mysqldb import MySQLDialect_mysqldb +from sqlalchemy.pool import NullPool +import re + + +class MySQLDialect_gaerdbms(MySQLDialect_mysqldb): + + @classmethod + def dbapi(cls): + # from django: + # http://code.google.com/p/googleappengine/source/ + # browse/trunk/python/google/storage/speckle/ + # python/django/backend/base.py#118 + # see also [ticket:2649] + # see also http://stackoverflow.com/q/14224679/34549 + from google.appengine.api import apiproxy_stub_map + + if apiproxy_stub_map.apiproxy.GetStub('rdbms'): + from google.storage.speckle.python.api import rdbms_apiproxy + return rdbms_apiproxy + else: + from google.storage.speckle.python.api import rdbms_googleapi + return rdbms_googleapi + + @classmethod + def get_pool_class(cls, url): + # Cloud SQL connections die at any moment + return NullPool + + def create_connect_args(self, url): + opts = url.translate_connect_args() + # 'dsn' and 'instance' are because we are skipping + # the traditional google.api.rdbms wrapper + + opts['dsn'] = '' + opts['instance'] = url.query['instance'] + return [], opts + + def _extract_error_code(self, exception): + match = re.compile(r"^(\d+):").match(str(exception)) + # The rdbms api will wrap then re-raise some types of errors + # making this regex return no matches. + if match: + code = match.group(1) + else: + code = None + if code: + return int(code) + +dialect = MySQLDialect_gaerdbms diff --git a/libs/sqlalchemy/dialects/mysql/mysqlconnector.py b/libs/sqlalchemy/dialects/mysql/mysqlconnector.py index be99367b..bd8ee013 100644 --- a/libs/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/libs/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -1,5 +1,5 @@ # mysql/mysqlconnector.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -105,15 +105,11 @@ class MySQLDialect_mysqlconnector(MySQLDialect): def _get_server_version_info(self, connection): dbapi_con = connection.connection - - from mysql.connector.constants import ClientFlag - dbapi_con.set_client_flag(ClientFlag.FOUND_ROWS) - version = dbapi_con.get_server_version() return tuple(version) def _detect_charset(self, connection): - return connection.connection.get_characterset_info() + return connection.connection.charset def _extract_error_code(self, exception): return exception.errno diff --git a/libs/sqlalchemy/dialects/mysql/mysqldb.py b/libs/sqlalchemy/dialects/mysql/mysqldb.py index 656e105a..c6ae5333 100644 --- a/libs/sqlalchemy/dialects/mysql/mysqldb.py +++ b/libs/sqlalchemy/dialects/mysql/mysqldb.py @@ -1,5 +1,5 @@ # mysql/mysqldb.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -26,20 +26,20 @@ MySQLdb will accommodate Python ``unicode`` objects if the ``use_unicode=1`` parameter, or the ``charset`` parameter, is passed as a connection argument. -Without this setting, many MySQL server installations default to +Without this setting, many MySQL server installations default to a ``latin1`` encoding for client connections, which has the effect -of all data being converted into ``latin1``, even if you have ``utf8`` +of all data being converted into ``latin1``, even if you have ``utf8`` or another character set configured on your tables and columns. With versions 4.1 and higher, you can change the connection character set either through server configuration or by including the ``charset`` parameter. The ``charset`` -parameter as received by MySQL-Python also has the side-effect of +parameter as received by MySQL-Python also has the side-effect of enabling ``use_unicode=1``:: # set client encoding to utf8; all strings come back as unicode create_engine('mysql+mysqldb:///mydb?charset=utf8') -Manually configuring ``use_unicode=0`` will cause MySQL-python to +Manually configuring ``use_unicode=0`` will cause MySQL-python to return encoded strings:: # set client encoding to utf8; all strings come back as utf8 str @@ -57,9 +57,9 @@ It is strongly advised to use the latest version of MySQL-Python. from sqlalchemy.dialects.mysql.base import (MySQLDialect, MySQLExecutionContext, MySQLCompiler, MySQLIdentifierPreparer) from sqlalchemy.connectors.mysqldb import ( - MySQLDBExecutionContext, - MySQLDBCompiler, - MySQLDBIdentifierPreparer, + MySQLDBExecutionContext, + MySQLDBCompiler, + MySQLDBIdentifierPreparer, MySQLDBConnector ) diff --git a/libs/sqlalchemy/dialects/mysql/oursql.py b/libs/sqlalchemy/dialects/mysql/oursql.py index 2a3c6b09..d6d8e9ff 100644 --- a/libs/sqlalchemy/dialects/mysql/oursql.py +++ b/libs/sqlalchemy/dialects/mysql/oursql.py @@ -1,5 +1,5 @@ # mysql/oursql.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -108,9 +108,9 @@ class MySQLDialect_oursql(MySQLDialect): arg = "'%s'" % arg connection.execution_options(_oursql_plain_query=True).execute(query % arg) - # Because mysql is bad, these methods have to be + # Because mysql is bad, these methods have to be # reimplemented to use _PlainQuery. Basically, some queries - # refuse to return any data if they're run through + # refuse to return any data if they're run through # the parameterized query API, or refuse to be parameterized # in the first place. def do_begin_twophase(self, connection, xid): @@ -135,7 +135,7 @@ class MySQLDialect_oursql(MySQLDialect): # Q: why didn't we need all these "plain_query" overrides earlier ? # am i on a newer/older version of OurSQL ? def has_table(self, connection, table_name, schema=None): - return MySQLDialect.has_table(self, + return MySQLDialect.has_table(self, connection.connect().\ execution_options(_oursql_plain_query=True), table_name, schema) @@ -183,7 +183,7 @@ class MySQLDialect_oursql(MySQLDialect): def initialize(self, connection): return MySQLDialect.initialize( - self, + self, connection.execution_options(_oursql_plain_query=True) ) @@ -208,6 +208,7 @@ class MySQLDialect_oursql(MySQLDialect): util.coerce_kw_type(opts, 'port', int) util.coerce_kw_type(opts, 'compress', bool) util.coerce_kw_type(opts, 'autoping', bool) + util.coerce_kw_type(opts, 'raise_on_warnings', bool) util.coerce_kw_type(opts, 'default_charset', bool) if opts.pop('default_charset', False): @@ -222,7 +223,7 @@ class MySQLDialect_oursql(MySQLDialect): opts.setdefault('found_rows', True) ssl = {} - for key in ['ssl_ca', 'ssl_key', 'ssl_cert', + for key in ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher']: if key in opts: ssl[key[4:]] = opts[key] diff --git a/libs/sqlalchemy/dialects/mysql/pymysql.py b/libs/sqlalchemy/dialects/mysql/pymysql.py index e05d50d3..f5aaa122 100644 --- a/libs/sqlalchemy/dialects/mysql/pymysql.py +++ b/libs/sqlalchemy/dialects/mysql/pymysql.py @@ -1,5 +1,5 @@ # mysql/pymysql.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -20,20 +20,20 @@ Connect string:: MySQL-Python Compatibility -------------------------- -The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver, -and targets 100% compatibility. Most behavioral notes for MySQL-python apply to +The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver, +and targets 100% compatibility. Most behavioral notes for MySQL-python apply to the pymysql driver as well. """ -from sqlalchemy.dialects.mysql.mysqldb import MySQLDialect_mysqldb +from sqlalchemy.dialects.mysql.mysqldb import MySQLDialect_mysqldb -class MySQLDialect_pymysql(MySQLDialect_mysqldb): +class MySQLDialect_pymysql(MySQLDialect_mysqldb): driver = 'pymysql' description_encoding = None - @classmethod - def dbapi(cls): - return __import__('pymysql') + @classmethod + def dbapi(cls): + return __import__('pymysql') -dialect = MySQLDialect_pymysql \ No newline at end of file +dialect = MySQLDialect_pymysql \ No newline at end of file diff --git a/libs/sqlalchemy/dialects/mysql/pyodbc.py b/libs/sqlalchemy/dialects/mysql/pyodbc.py index 20a16988..5d631afb 100644 --- a/libs/sqlalchemy/dialects/mysql/pyodbc.py +++ b/libs/sqlalchemy/dialects/mysql/pyodbc.py @@ -1,5 +1,5 @@ # mysql/pyodbc.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -20,7 +20,7 @@ Connect string:: Limitations ----------- -The mysql-pyodbc dialect is subject to unresolved character encoding issues +The mysql-pyodbc dialect is subject to unresolved character encoding issues which exist within the current ODBC drivers available. (see http://code.google.com/p/pyodbc/issues/detail?id=25). Consider usage of OurSQL, MySQLdb, or MySQL-connector/Python. diff --git a/libs/sqlalchemy/dialects/mysql/zxjdbc.py b/libs/sqlalchemy/dialects/mysql/zxjdbc.py index 816e10ba..df479043 100644 --- a/libs/sqlalchemy/dialects/mysql/zxjdbc.py +++ b/libs/sqlalchemy/dialects/mysql/zxjdbc.py @@ -1,5 +1,5 @@ # mysql/zxjdbc.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/dialects/oracle/__init__.py b/libs/sqlalchemy/dialects/oracle/__init__.py index 3b3a3b12..a1e2a8dd 100644 --- a/libs/sqlalchemy/dialects/oracle/__init__.py +++ b/libs/sqlalchemy/dialects/oracle/__init__.py @@ -1,5 +1,5 @@ # oracle/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,7 +12,7 @@ from sqlalchemy.dialects.oracle.base import \ VARCHAR, NVARCHAR, CHAR, DATE, DATETIME, NUMBER,\ BLOB, BFILE, CLOB, NCLOB, TIMESTAMP, RAW,\ FLOAT, DOUBLE_PRECISION, LONG, dialect, INTERVAL,\ - VARCHAR2, NVARCHAR2 + VARCHAR2, NVARCHAR2, ROWID __all__ = ( diff --git a/libs/sqlalchemy/dialects/oracle/base.py b/libs/sqlalchemy/dialects/oracle/base.py index dd761ae8..f82991bc 100644 --- a/libs/sqlalchemy/dialects/oracle/base.py +++ b/libs/sqlalchemy/dialects/oracle/base.py @@ -1,5 +1,5 @@ # oracle/base.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -14,7 +14,7 @@ for that driver. Connect Arguments ----------------- -The dialect supports several :func:`~sqlalchemy.create_engine()` arguments which +The dialect supports several :func:`~sqlalchemy.create_engine()` arguments which affect the behavior of the dialect regardless of driver in use. * *use_ansi* - Use ANSI JOIN constructs (see the section on Oracle 8). Defaults @@ -29,32 +29,32 @@ Auto Increment Behavior SQLAlchemy Table objects which include integer primary keys are usually assumed to have "autoincrementing" behavior, meaning they can generate their own primary key values upon -INSERT. Since Oracle has no "autoincrement" feature, SQLAlchemy relies upon sequences +INSERT. Since Oracle has no "autoincrement" feature, SQLAlchemy relies upon sequences to produce these values. With the Oracle dialect, *a sequence must always be explicitly -specified to enable autoincrement*. This is divergent with the majority of documentation +specified to enable autoincrement*. This is divergent with the majority of documentation examples which assume the usage of an autoincrement-capable database. To specify sequences, use the sqlalchemy.schema.Sequence object which is passed to a Column construct:: - t = Table('mytable', metadata, + t = Table('mytable', metadata, Column('id', Integer, Sequence('id_seq'), primary_key=True), Column(...), ... ) This step is also required when using table reflection, i.e. autoload=True:: - t = Table('mytable', metadata, + t = Table('mytable', metadata, Column('id', Integer, Sequence('id_seq'), primary_key=True), autoload=True - ) + ) Identifier Casing ----------------- -In Oracle, the data dictionary represents all case insensitive identifier names +In Oracle, the data dictionary represents all case insensitive identifier names using UPPERCASE text. SQLAlchemy on the other hand considers an all-lower case identifier name to be case insensitive. The Oracle dialect converts all case insensitive identifiers to and from those two formats during schema level communication, such as reflection of -tables and indexes. Using an UPPERCASE name on the SQLAlchemy side indicates a +tables and indexes. Using an UPPERCASE name on the SQLAlchemy side indicates a case sensitive identifier, and SQLAlchemy will quote the name - this will cause mismatches against data dictionary data received from Oracle, so unless identifier names have been truly created as case sensitive (i.e. using quoted names), all lowercase names should be @@ -63,23 +63,25 @@ used on the SQLAlchemy side. Unicode ------- -SQLAlchemy 0.6 uses the "native unicode" mode provided as of cx_oracle 5. cx_oracle 5.0.2 -or greater is recommended for support of NCLOB. If not using cx_oracle 5, the NLS_LANG -environment variable needs to be set in order for the oracle client library to use -proper encoding, such as "AMERICAN_AMERICA.UTF8". +.. versionchanged:: 0.6 + SQLAlchemy uses the "native unicode" mode provided as of cx_oracle 5. + cx_oracle 5.0.2 or greater is recommended for support of NCLOB. + If not using cx_oracle 5, the NLS_LANG environment variable needs + to be set in order for the oracle client library to use proper encoding, + such as "AMERICAN_AMERICA.UTF8". Also note that Oracle supports unicode data through the NVARCHAR and NCLOB data types. When using the SQLAlchemy Unicode and UnicodeText types, these DDL types will be used -within CREATE TABLE statements. Usage of VARCHAR2 and CLOB with unicode text still +within CREATE TABLE statements. Usage of VARCHAR2 and CLOB with unicode text still requires NLS_LANG to be set. LIMIT/OFFSET Support -------------------- -Oracle has no support for the LIMIT or OFFSET keywords. SQLAlchemy uses -a wrapped subquery approach in conjunction with ROWNUM. The exact methodology +Oracle has no support for the LIMIT or OFFSET keywords. SQLAlchemy uses +a wrapped subquery approach in conjunction with ROWNUM. The exact methodology is taken from -http://www.oracle.com/technology/oramag/oracle/06-sep/o56asktom.html . +http://www.oracle.com/technology/oramag/oracle/06-sep/o56asktom.html . There are two options which affect its behavior: @@ -87,13 +89,13 @@ There are two options which affect its behavior: optimization directive, specify ``optimize_limits=True`` to :func:`.create_engine`. * the values passed for the limit/offset are sent as bound parameters. Some users have observed that Oracle produces a poor query plan when the values are sent as binds and not - rendered literally. To render the limit/offset values literally within the SQL + rendered literally. To render the limit/offset values literally within the SQL statement, specify ``use_binds_for_limits=False`` to :func:`.create_engine`. -Some users have reported better performance when the entirely different approach of a -window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to provide LIMIT/OFFSET (note -that the majority of users don't observe this). To suit this case the -method used for LIMIT/OFFSET can be replaced entirely. See the recipe at +Some users have reported better performance when the entirely different approach of a +window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to provide LIMIT/OFFSET (note +that the majority of users don't observe this). To suit this case the +method used for LIMIT/OFFSET can be replaced entirely. See the recipe at http://www.sqlalchemy.org/trac/wiki/UsageRecipes/WindowFunctionsByDefault which installs a select compiler that overrides the generation of limit/offset with a window function. @@ -101,11 +103,11 @@ a window function. ON UPDATE CASCADE ----------------- -Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based solution +Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based solution is available at http://asktom.oracle.com/tkyte/update_cascade/index.html . When using the SQLAlchemy ORM, the ORM has limited ability to manually issue -cascading updates - specify ForeignKey objects using the +cascading updates - specify ForeignKey objects using the "deferrable=True, initially='deferred'" keyword arguments, and specify "passive_updates=False" on each relationship(). @@ -119,21 +121,21 @@ behaviors: JOIN phrases into the WHERE clause, and in the case of LEFT OUTER JOIN makes use of Oracle's (+) operator. -* the NVARCHAR2 and NCLOB datatypes are no longer generated as DDL when - the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are issued +* the NVARCHAR2 and NCLOB datatypes are no longer generated as DDL when + the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are issued instead. This because these types don't seem to work correctly on Oracle 8 - even though they are available. The :class:`~sqlalchemy.types.NVARCHAR` + even though they are available. The :class:`~sqlalchemy.types.NVARCHAR` and :class:`~sqlalchemy.dialects.oracle.NCLOB` types will always generate NVARCHAR2 and NCLOB. -* the "native unicode" mode is disabled when using cx_oracle, i.e. SQLAlchemy +* the "native unicode" mode is disabled when using cx_oracle, i.e. SQLAlchemy encodes all Python unicode objects to "string" before passing in as bind parameters. Synonym/DBLINK Reflection ------------------------- When using reflection with Table objects, the dialect can optionally search for tables -indicated by synonyms that reference DBLINK-ed tables by passing the flag -oracle_resolve_synonyms=True as a keyword argument to the Table construct. If DBLINK +indicated by synonyms that reference DBLINK-ed tables by passing the flag +oracle_resolve_synonyms=True as a keyword argument to the Table construct. If DBLINK is not in use this flag should be left off. """ @@ -215,8 +217,8 @@ class LONG(sqltypes.Text): class INTERVAL(sqltypes.TypeEngine): __visit_name__ = 'INTERVAL' - def __init__(self, - day_precision=None, + def __init__(self, + day_precision=None, second_precision=None): """Construct an INTERVAL. @@ -301,10 +303,10 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler): def visit_INTERVAL(self, type_): return "INTERVAL DAY%s TO SECOND%s" % ( - type_.day_precision is not None and + type_.day_precision is not None and "(%d)" % type_.day_precision or "", - type_.second_precision is not None and + type_.second_precision is not None and "(%d)" % type_.second_precision or "", ) @@ -338,7 +340,7 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler): else: return "%(name)s(%(precision)s, %(scale)s)" % {'name':name,'precision': precision, 'scale' : scale} - def visit_string(self, type_): + def visit_string(self, type_): return self.visit_VARCHAR2(type_) def visit_VARCHAR2(self, type_): @@ -354,10 +356,10 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler): def _visit_varchar(self, type_, n, num): if not n and self.dialect._supports_char_length: return "VARCHAR%(two)s(%(length)s CHAR)" % { - 'length' : type_.length, + 'length' : type_.length, 'two':num} else: - return "%(n)sVARCHAR%(two)s(%(length)s)" % {'length' : type_.length, + return "%(n)sVARCHAR%(two)s(%(length)s)" % {'length' : type_.length, 'two':num, 'n':n} def visit_text(self, type_): @@ -429,7 +431,7 @@ class OracleCompiler(compiler.SQLCompiler): return "" def default_from(self): - """Called when a ``SELECT`` statement has no froms, + """Called when a ``SELECT`` statement has no froms, and no ``FROM`` clause is to be appended. The Oracle compiler tacks a "FROM DUAL" to the statement. @@ -564,7 +566,7 @@ class OracleCompiler(compiler.SQLCompiler): if not self.dialect.use_binds_for_limits: max_row = sql.literal_column("%d" % max_row) limitselect.append_whereclause( - sql.literal_column("ROWNUM")<=max_row) + sql.literal_column("ROWNUM") <= max_row) # If needed, add the ora_rn, and wrap again with offset. if select._offset is None: @@ -611,7 +613,7 @@ class OracleDDLCompiler(compiler.DDLCompiler): if constraint.ondelete is not None: text += " ON DELETE %s" % constraint.ondelete - # oracle has no ON UPDATE CASCADE - + # oracle has no ON UPDATE CASCADE - # its only available via triggers http://asktom.oracle.com/tkyte/update_cascade/index.html if constraint.onupdate is not None: util.warn( @@ -641,8 +643,8 @@ class OracleIdentifierPreparer(compiler.IdentifierPreparer): class OracleExecutionContext(default.DefaultExecutionContext): def fire_sequence(self, seq, type_): - return self._execute_scalar("SELECT " + - self.dialect.identifier_preparer.format_sequence(seq) + + return self._execute_scalar("SELECT " + + self.dialect.identifier_preparer.format_sequence(seq) + ".nextval FROM DUAL", type_) class OracleDialect(default.DefaultDialect): @@ -674,9 +676,9 @@ class OracleDialect(default.DefaultDialect): reflection_options = ('oracle_resolve_synonyms', ) - def __init__(self, - use_ansi=True, - optimize_limits=False, + def __init__(self, + use_ansi=True, + optimize_limits=False, use_binds_for_limits=True, **kwargs): default.DefaultDialect.__init__(self, **kwargs) @@ -806,8 +808,8 @@ class OracleDialect(default.DefaultDialect): if resolve_synonyms: actual_name, owner, dblink, synonym = self._resolve_synonym( - connection, - desired_owner=self.denormalize_name(schema), + connection, + desired_owner=self.denormalize_name(schema), desired_synonym=self.denormalize_name(table_name) ) else: @@ -874,11 +876,11 @@ class OracleDialect(default.DefaultDialect): char_length_col = 'char_length' else: char_length_col = 'data_length' - + c = connection.execute(sql.text( "SELECT column_name, data_type, %(char_length_col)s, data_precision, data_scale, " "nullable, data_default FROM ALL_TAB_COLUMNS%(dblink)s " - "WHERE table_name = :table_name AND owner = :owner " + "WHERE table_name = :table_name AND owner = :owner " "ORDER BY column_id" % {'dblink': dblink, 'char_length_col':char_length_col}), table_name=table_name, owner=schema) @@ -890,7 +892,7 @@ class OracleDialect(default.DefaultDialect): coltype = NUMBER(precision, scale) elif coltype in ('VARCHAR2', 'NVARCHAR2', 'CHAR'): coltype = self.ischema_names.get(coltype)(length) - elif 'WITH TIME ZONE' in coltype: + elif 'WITH TIME ZONE' in coltype: coltype = TIMESTAMP(timezone=True) else: coltype = re.sub(r'\(\d+\)', '', coltype) @@ -927,8 +929,8 @@ class OracleDialect(default.DefaultDialect): indexes = [] q = sql.text(""" SELECT a.index_name, a.column_name, b.uniqueness - FROM ALL_IND_COLUMNS%(dblink)s a, - ALL_INDEXES%(dblink)s b + FROM ALL_IND_COLUMNS%(dblink)s a, + ALL_INDEXES%(dblink)s b WHERE a.index_name = b.index_name AND a.table_owner = b.table_owner @@ -1110,8 +1112,8 @@ class OracleDialect(default.DefaultDialect): if resolve_synonyms: ref_remote_name, ref_remote_owner, ref_dblink, ref_synonym = \ self._resolve_synonym( - connection, - desired_owner=self.denormalize_name(remote_owner), + connection, + desired_owner=self.denormalize_name(remote_owner), desired_table=self.denormalize_name(remote_table) ) if ref_synonym: diff --git a/libs/sqlalchemy/dialects/oracle/cx_oracle.py b/libs/sqlalchemy/dialects/oracle/cx_oracle.py index 5001acca..0154180d 100644 --- a/libs/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/libs/sqlalchemy/dialects/oracle/cx_oracle.py @@ -1,5 +1,5 @@ # oracle/cx_oracle.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,19 +9,19 @@ Driver ------ -The Oracle dialect uses the cx_oracle driver, available at -http://cx-oracle.sourceforge.net/ . The dialect has several behaviors +The Oracle dialect uses the cx_oracle driver, available at +http://cx-oracle.sourceforge.net/ . The dialect has several behaviors which are specifically tailored towards compatibility with this module. Version 5.0 or greater is **strongly** recommended, as SQLAlchemy makes -extensive use of the cx_oracle output converters for numeric and +extensive use of the cx_oracle output converters for numeric and string conversions. Connecting ---------- -Connecting with create_engine() uses the standard URL approach of -``oracle://user:pass@host:port/dbname[?key=value&key=value...]``. If dbname is present, the -host, port, and dbname tokens are converted to a TNS name using the cx_oracle +Connecting with create_engine() uses the standard URL approach of +``oracle://user:pass@host:port/dbname[?key=value&key=value...]``. If dbname is present, the +host, port, and dbname tokens are converted to a TNS name using the cx_oracle :func:`makedsn()` function. Otherwise, the host token is taken directly as a TNS name. Additional arguments which may be specified either as query string arguments on the @@ -53,7 +53,7 @@ handler so that all string based result values are returned as unicode as well. Generally, the ``NLS_LANG`` environment variable determines the nature of the encoding to be used. -Note that this behavior is disabled when Oracle 8 is detected, as it has been +Note that this behavior is disabled when Oracle 8 is detected, as it has been observed that issues remain when passing Python unicodes to cx_oracle with Oracle 8. LOB Objects @@ -71,8 +71,40 @@ To disable this processing, pass ``auto_convert_lobs=False`` to :func:`create_en Two Phase Transaction Support ----------------------------- -Two Phase transactions are implemented using XA transactions. Success has been reported -with this feature but it should be regarded as experimental. +Two Phase transactions are implemented using XA transactions, and are known +to work in a rudimental fashion with recent versions of cx_Oracle +as of SQLAlchemy 0.8.0b2, 0.7.10. However, the mechanism is not yet +considered to be robust and should still be regarded as experimental. + +In particular, the cx_Oracle DBAPI as recently as 5.1.2 has a bug regarding +two phase which prevents +a particular DBAPI connection from being consistently usable in both +prepared transactions as well as traditional DBAPI usage patterns; therefore +once a particular connection is used via :meth:`.Connection.begin_prepared`, +all subsequent usages of the underlying DBAPI connection must be within +the context of prepared transactions. + +The default behavior of :class:`.Engine` is to maintain a pool of DBAPI +connections. Therefore, due to the above glitch, a DBAPI connection that has +been used in a two-phase operation, and is then returned to the pool, will +not be usable in a non-two-phase context. To avoid this situation, +the application can make one of several choices: + +* Disable connection pooling using :class:`.NullPool` + +* Ensure that the particular :class:`.Engine` in use is only used + for two-phase operations. A :class:`.Engine` bound to an ORM + :class:`.Session` which includes ``twophase=True`` will consistently + use the two-phase transaction style. + +* For ad-hoc two-phase operations without disabling pooling, the DBAPI + connection in use can be evicted from the connection pool using the + :class:`.Connection.detach` method. + +.. versionchanged:: 0.8.0b2,0.7.10 + Support for cx_oracle prepared transactions has been implemented + and tested. + Precision Numerics ------------------ @@ -95,13 +127,14 @@ If precision numerics aren't required, the decimal handling can be disabled by passing the flag ``coerce_to_decimal=False`` to :func:`.create_engine`:: - engine = create_engine("oracle+cx_oracle://dsn", + engine = create_engine("oracle+cx_oracle://dsn", coerce_to_decimal=False) -The ``coerce_to_decimal`` flag is new in 0.7.6. +.. versionadded:: 0.7.6 + Add the ``coerce_to_decimal`` flag. -Another alternative to performance is to use the -`cdecimal `_ library; +Another alternative to performance is to use the +`cdecimal `_ library; see :class:`.Numeric` for additional notes. The handler attempts to use the "precision" and "scale" @@ -128,21 +161,23 @@ environment variable. Upon first connection, the dialect runs a test to determine the current "decimal" character, which can be a comma "," for european locales. From that point forward the outputtypehandler uses that character to represent a decimal -point (this behavior is new in version 0.6.6). Note that -cx_oracle 5.0.3 or greater is required when dealing with -numerics with locale settings that don't use a period "." as the -decimal character. +point. Note that cx_oracle 5.0.3 or greater is required +when dealing with numerics with locale settings that don't use +a period "." as the decimal character. + +.. versionchanged:: 0.6.6 + The outputtypehandler uses a comma "," character to represent + a decimal point. .. _OCI: http://www.oracle.com/technetwork/database/features/oci/index.html """ from sqlalchemy.dialects.oracle.base import OracleCompiler, OracleDialect, \ - RESERVED_WORDS, OracleExecutionContext + OracleExecutionContext from sqlalchemy.dialects.oracle import base as oracle from sqlalchemy.engine import base from sqlalchemy import types as sqltypes, util, exc, processors -from datetime import datetime import random import collections from sqlalchemy.util.compat import decimal @@ -156,7 +191,7 @@ class _OracleNumeric(sqltypes.Numeric): def result_processor(self, dialect, coltype): # we apply a cx_oracle type handler to all connections # that converts floating point strings to Decimal(). - # However, in some subquery situations, Oracle doesn't + # However, in some subquery situations, Oracle doesn't # give us enough information to determine int or Decimal. # It could even be int/Decimal differently on each row, # regardless of the scale given for the originating type. @@ -186,7 +221,7 @@ class _OracleNumeric(sqltypes.Numeric): else: return None else: - # cx_oracle 4 behavior, will assume + # cx_oracle 4 behavior, will assume # floats return super(_OracleNumeric, self).\ result_processor(dialect, coltype) @@ -233,7 +268,7 @@ class _NativeUnicodeMixin(object): # end Py2K # we apply a connection output handler that returns - # unicode in all cases, so the "native_unicode" flag + # unicode in all cases, so the "native_unicode" flag # will be set for the default String.result_processor. class _OracleChar(_NativeUnicodeMixin, sqltypes.CHAR): @@ -248,6 +283,13 @@ class _OracleText(_LOBMixin, sqltypes.Text): def get_dbapi_type(self, dbapi): return dbapi.CLOB +class _OracleLong(oracle.LONG): + # a raw LONG is a text type, but does *not* + # get the LobMixin with cx_oracle. + + def get_dbapi_type(self, dbapi): + return dbapi.LONG_STRING + class _OracleString(_NativeUnicodeMixin, sqltypes.String): pass @@ -296,13 +338,13 @@ class _OracleRowid(oracle.ROWID): return dbapi.ROWID class OracleCompiler_cx_oracle(OracleCompiler): - def bindparam_string(self, name): + def bindparam_string(self, name, **kw): if self.preparer._bindparam_requires_quotes(name): quoted_name = '"%s"' % name self._quoted_bind_names[name] = quoted_name - return OracleCompiler.bindparam_string(self, quoted_name) + return OracleCompiler.bindparam_string(self, quoted_name, **kw) else: - return OracleCompiler.bindparam_string(self, name) + return OracleCompiler.bindparam_string(self, name, **kw) class OracleExecutionContext_cx_oracle(OracleExecutionContext): @@ -312,15 +354,15 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): getattr(self.compiled, '_quoted_bind_names', None) if quoted_bind_names: if not self.dialect.supports_unicode_statements: - # if DBAPI doesn't accept unicode statements, + # if DBAPI doesn't accept unicode statements, # keys in self.parameters would have been encoded # here. so convert names in quoted_bind_names # to encoded as well. quoted_bind_names = \ dict( - (fromname.encode(self.dialect.encoding), - toname.encode(self.dialect.encoding)) - for fromname, toname in + (fromname.encode(self.dialect.encoding), + toname.encode(self.dialect.encoding)) + for fromname, toname in quoted_bind_names.items() ) for param in self.parameters: @@ -329,11 +371,11 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): del param[fromname] if self.dialect.auto_setinputsizes: - # cx_oracle really has issues when you setinputsizes + # cx_oracle really has issues when you setinputsizes # on String, including that outparams/RETURNING # breaks for varchars - self.set_input_sizes(quoted_bind_names, - exclude_types=self.dialect._cx_oracle_string_types + self.set_input_sizes(quoted_bind_names, + exclude_types=self.dialect._cx_oracle_exclude_setinputsizes ) # if a single execute, check for outparams @@ -365,7 +407,7 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): def get_result_proxy(self): if hasattr(self, 'out_parameters') and self.compiled.returning: returning_params = dict( - (k, v.getvalue()) + (k, v.getvalue()) for k, v in self.out_parameters.items() ) return ReturningResultProxy(self, returning_params) @@ -391,7 +433,7 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): impl_type = type.dialect_impl(self.dialect) dbapi_type = impl_type.get_dbapi_type(self.dialect.dbapi) result_processor = impl_type.\ - result_processor(self.dialect, + result_processor(self.dialect, dbapi_type) if result_processor is not None: out_parameters[name] = \ @@ -400,7 +442,7 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): out_parameters[name] = self.out_parameters[name].getvalue() else: result.out_parameters = dict( - (k, v.getvalue()) + (k, v.getvalue()) for k, v in self.out_parameters.items() ) @@ -409,13 +451,13 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_oracle): """Support WITH_UNICODE in Python 2.xx. - WITH_UNICODE allows cx_Oracle's Python 3 unicode handling - behavior under Python 2.x. This mode in some cases disallows - and in other cases silently passes corrupted data when - non-Python-unicode strings (a.k.a. plain old Python strings) - are passed as arguments to connect(), the statement sent to execute(), + WITH_UNICODE allows cx_Oracle's Python 3 unicode handling + behavior under Python 2.x. This mode in some cases disallows + and in other cases silently passes corrupted data when + non-Python-unicode strings (a.k.a. plain old Python strings) + are passed as arguments to connect(), the statement sent to execute(), or any of the bind parameter keys or values sent to execute(). - This optional context therefore ensures that all statements are + This optional context therefore ensures that all statements are passed as Python unicode objects. """ @@ -446,7 +488,7 @@ class ReturningResultProxy(base.FullyBufferedResultProxy): return ret def _buffer_rows(self): - return collections.deque([tuple(self._returning_params["ret_%d" % i] + return collections.deque([tuple(self._returning_params["ret_%d" % i] for i, c in enumerate(self._returning_params))]) class OracleDialect_cx_oracle(OracleDialect): @@ -467,6 +509,11 @@ class OracleDialect_cx_oracle(OracleDialect): sqltypes.String : _OracleString, sqltypes.UnicodeText : _OracleUnicodeText, sqltypes.CHAR : _OracleChar, + + # a raw LONG is a text type, but does *not* + # get the LobMixin with cx_oracle. + oracle.LONG: _OracleLong, + sqltypes.Integer : _OracleInteger, # this is only needed for OUT parameters. # it would be nice if we could not use it otherwise. oracle.RAW: _OracleRaw, @@ -478,11 +525,11 @@ class OracleDialect_cx_oracle(OracleDialect): execute_sequence_format = list - def __init__(self, - auto_setinputsizes=True, - auto_convert_lobs=True, - threaded=True, - allow_twophase=True, + def __init__(self, + auto_setinputsizes=True, + auto_convert_lobs=True, + threaded=True, + allow_twophase=True, coerce_to_decimal=True, arraysize=50, **kwargs): OracleDialect.__init__(self, **kwargs) @@ -503,13 +550,14 @@ class OracleDialect_cx_oracle(OracleDialect): getattr(self.dbapi, name, None) for name in names ]).difference([None]) + self._cx_oracle_exclude_setinputsizes = types("STRING", "UNICODE") self._cx_oracle_string_types = types("STRING", "UNICODE", "NCLOB", "CLOB") self._cx_oracle_unicode_types = types("UNICODE", "NCLOB") - self._cx_oracle_binary_types = types("BFILE", "CLOB", "NCLOB", "BLOB") + self._cx_oracle_binary_types = types("BFILE", "CLOB", "NCLOB", "BLOB") self.supports_unicode_binds = self.cx_oracle_ver >= (5, 0) self.supports_native_decimal = ( - self.cx_oracle_ver >= (5, 0) and + self.cx_oracle_ver >= (5, 0) and coerce_to_decimal ) @@ -551,13 +599,13 @@ class OracleDialect_cx_oracle(OracleDialect): # expect encoded strings or unicodes, etc. self.dbapi_type_map = { self.dbapi.CLOB: oracle.CLOB(), - self.dbapi.NCLOB:oracle.NCLOB(), + self.dbapi.NCLOB: oracle.NCLOB(), self.dbapi.BLOB: oracle.BLOB(), self.dbapi.BINARY: oracle.RAW(), } @classmethod def dbapi(cls): - import cx_Oracle + cx_Oracle = __import__('cx_Oracle') return cx_Oracle def initialize(self, connection): @@ -567,12 +615,12 @@ class OracleDialect_cx_oracle(OracleDialect): self._detect_decimal_char(connection) def _detect_decimal_char(self, connection): - """detect if the decimal separator character is not '.', as + """detect if the decimal separator character is not '.', as is the case with european locale settings for NLS_LANG. cx_oracle itself uses similar logic when it formats Python - Decimal objects to strings on the bind side (as of 5.0.3), - as Oracle sends/receives string numerics only in the + Decimal objects to strings on the bind side (as of 5.0.3), + as Oracle sends/receives string numerics only in the current locale. """ @@ -583,14 +631,14 @@ class OracleDialect_cx_oracle(OracleDialect): cx_Oracle = self.dbapi conn = connection.connection - # override the output_type_handler that's - # on the cx_oracle connection with a plain + # override the output_type_handler that's + # on the cx_oracle connection with a plain # one on the cursor - def output_type_handler(cursor, name, defaultType, + def output_type_handler(cursor, name, defaultType, size, precision, scale): return cursor.var( - cx_Oracle.STRING, + cx_Oracle.STRING, 255, arraysize=cursor.arraysize) cursor = conn.cursor() @@ -620,7 +668,7 @@ class OracleDialect_cx_oracle(OracleDialect): return cx_Oracle = self.dbapi - def output_type_handler(cursor, name, defaultType, + def output_type_handler(cursor, name, defaultType, size, precision, scale): # convert all NUMBER with precision + positive scale to Decimal # this almost allows "native decimal" mode. @@ -628,22 +676,22 @@ class OracleDialect_cx_oracle(OracleDialect): defaultType == cx_Oracle.NUMBER and \ precision and scale > 0: return cursor.var( - cx_Oracle.STRING, - 255, - outconverter=self._to_decimal, + cx_Oracle.STRING, + 255, + outconverter=self._to_decimal, arraysize=cursor.arraysize) # if NUMBER with zero precision and 0 or neg scale, this appears - # to indicate "ambiguous". Use a slower converter that will - # make a decision based on each value received - the type + # to indicate "ambiguous". Use a slower converter that will + # make a decision based on each value received - the type # may change from row to row (!). This kills # off "native decimal" mode, handlers still needed. elif self.supports_native_decimal and \ defaultType == cx_Oracle.NUMBER \ and not precision and scale <= 0: return cursor.var( - cx_Oracle.STRING, - 255, - outconverter=self._detect_decimal, + cx_Oracle.STRING, + 255, + outconverter=self._detect_decimal, arraysize=cursor.arraysize) # allow all strings to come back natively as Unicode elif defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR): @@ -707,7 +755,7 @@ class OracleDialect_cx_oracle(OracleDialect): def _get_server_version_info(self, connection): return tuple( - int(x) + int(x) for x in connection.connection.version.split('.') ) @@ -739,15 +787,23 @@ class OracleDialect_cx_oracle(OracleDialect): connection.connection.begin(*xid) def do_prepare_twophase(self, connection, xid): - connection.connection.prepare() + result = connection.connection.prepare() + connection.info['cx_oracle_prepared'] = result - def do_rollback_twophase(self, connection, xid, is_prepared=True, recover=False): + def do_rollback_twophase(self, connection, xid, is_prepared=True, + recover=False): self.do_rollback(connection.connection) - def do_commit_twophase(self, connection, xid, is_prepared=True, recover=False): - self.do_commit(connection.connection) + def do_commit_twophase(self, connection, xid, is_prepared=True, + recover=False): + if not is_prepared: + self.do_commit(connection.connection) + else: + oci_prepared = connection.info['cx_oracle_prepared'] + if oci_prepared: + self.do_commit(connection.connection) def do_recover_twophase(self, connection): - pass + connection.info.pop('cx_oracle_prepared', None) dialect = OracleDialect_cx_oracle diff --git a/libs/sqlalchemy/dialects/oracle/zxjdbc.py b/libs/sqlalchemy/dialects/oracle/zxjdbc.py index fa86294b..e4a12ce0 100644 --- a/libs/sqlalchemy/dialects/oracle/zxjdbc.py +++ b/libs/sqlalchemy/dialects/oracle/zxjdbc.py @@ -1,5 +1,5 @@ # oracle/zxjdbc.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/dialects/postgres.py b/libs/sqlalchemy/dialects/postgres.py index 8c5097d6..82d1a39c 100644 --- a/libs/sqlalchemy/dialects/postgres.py +++ b/libs/sqlalchemy/dialects/postgres.py @@ -1,5 +1,5 @@ # dialects/postgres.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/dialects/postgresql/__init__.py b/libs/sqlalchemy/dialects/postgresql/__init__.py index bc0c3127..04ae413c 100644 --- a/libs/sqlalchemy/dialects/postgresql/__init__.py +++ b/libs/sqlalchemy/dialects/postgresql/__init__.py @@ -1,5 +1,5 @@ # postgresql/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -14,7 +14,7 @@ from sqlalchemy.dialects.postgresql.base import \ DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect __all__ = ( -'INTEGER', 'BIGINT', 'SMALLINT', 'VARCHAR', 'CHAR', 'TEXT', 'NUMERIC', 'FLOAT', 'REAL', 'INET', +'INTEGER', 'BIGINT', 'SMALLINT', 'VARCHAR', 'CHAR', 'TEXT', 'NUMERIC', 'FLOAT', 'REAL', 'INET', 'CIDR', 'UUID', 'BIT', 'MACADDR', 'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA', 'BOOLEAN', 'INTERVAL', 'ARRAY', 'ENUM', 'dialect' ) diff --git a/libs/sqlalchemy/dialects/postgresql/base.py b/libs/sqlalchemy/dialects/postgresql/base.py index c4c2bbdb..384b7616 100644 --- a/libs/sqlalchemy/dialects/postgresql/base.py +++ b/libs/sqlalchemy/dialects/postgresql/base.py @@ -1,5 +1,5 @@ # postgresql/base.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -21,7 +21,7 @@ default corresponding to the column. To specify a specific named sequence to be used for primary key generation, use the :func:`~sqlalchemy.schema.Sequence` construct:: - Table('sometable', metadata, + Table('sometable', metadata, Column('id', Integer, Sequence('some_id_seq'), primary_key=True) ) @@ -51,7 +51,7 @@ parameter are ``READ COMMITTED``, ``READ UNCOMMITTED``, ``REPEATABLE READ``, and ``SERIALIZABLE``:: engine = create_engine( - "postgresql+pg8000://scott:tiger@localhost/test", + "postgresql+pg8000://scott:tiger@localhost/test", isolation_level="READ UNCOMMITTED" ) @@ -75,18 +75,19 @@ the current ``search_path``, the "schema" attribute of the resulting remote table matches that of the referencing table, and the "schema" argument was explicitly stated on the referencing table. -The best practice here is to not use the ``schema`` argument +The best practice here is to not use the ``schema`` argument on :class:`.Table` for any schemas that are present in ``search_path``. ``search_path`` defaults to "public", but care should be taken to inspect the actual value using:: SHOW search_path; -Prior to version 0.7.3, cross-schema foreign keys when the schemas -were also in the ``search_path`` could make an incorrect assumption -if the schemas were explicitly stated on each :class:`.Table`. +.. versionchanged:: 0.7.3 + Prior to this version, cross-schema foreign keys when the schemas + were also in the ``search_path`` could make an incorrect assumption + if the schemas were explicitly stated on each :class:`.Table`. -Background on PG's ``search_path`` is at: +Background on PG's ``search_path`` is at: http://www.postgresql.org/docs/9.0/static/ddl-schemas.html#DDL-SCHEMAS-PATH INSERT/UPDATE...RETURNING @@ -125,7 +126,7 @@ to the PostgreSQL dialect. Partial Indexes ^^^^^^^^^^^^^^^^ -Partial indexes add criterion to the index definition so that the index is +Partial indexes add criterion to the index definition so that the index is applied to a subset of rows. These can be specified on :class:`.Index` using the ``postgresql_where`` keyword argument:: @@ -137,13 +138,16 @@ Operator Classes PostgreSQL allows the specification of an *operator class* for each column of an index (see http://www.postgresql.org/docs/8.3/interactive/indexes-opclass.html). The :class:`.Index` construct allows these to be specified via the ``postgresql_ops`` -keyword argument (new as of SQLAlchemy 0.7.2):: +keyword argument:: - Index('my_index', my_table.c.id, my_table.c.data, + Index('my_index', my_table.c.id, my_table.c.data, postgresql_ops={ - 'data': 'text_pattern_ops', + 'data': 'text_pattern_ops', 'id': 'int4_ops' - }) + }) + +.. versionadded:: 0.7.2 + ``postgresql_ops`` keyword argument to :class:`.Index` construct. Note that the keys in the ``postgresql_ops`` dictionary are the "key" name of the :class:`.Column`, i.e. the name used to access it from the ``.c`` collection @@ -345,24 +349,27 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine): the fly :param mutable=False: Specify whether lists passed to this - class should be considered mutable - this enables - "mutable types" mode in the ORM. Be sure to read the - notes for :class:`.MutableType` regarding ORM - performance implications (default changed from ``True`` in - 0.7.0). + class should be considered mutable - this enables + "mutable types" mode in the ORM. Be sure to read the + notes for :class:`.MutableType` regarding ORM + performance implications. - .. note:: - - This functionality is now superseded by the - ``sqlalchemy.ext.mutable`` extension described in - :ref:`mutable_toplevel`. + .. versionchanged:: 0.7.0 + Default changed from ``True``\ . + + .. versionchanged:: 0.7 + This functionality is now superseded by the + ``sqlalchemy.ext.mutable`` extension described in + :ref:`mutable_toplevel`. :param as_tuple=False: Specify whether return results should be converted to tuples from lists. DBAPIs such as psycopg2 return lists by default. When tuples are returned, the results are hashable. This flag can only be set to ``True`` when ``mutable`` is set to - ``False``. (new in 0.6.5) + ``False``. + + .. versionadded:: 0.6.5 """ if isinstance(item_type, ARRAY): @@ -444,37 +451,37 @@ PGArray = ARRAY class ENUM(sqltypes.Enum): """Postgresql ENUM type. - + This is a subclass of :class:`.types.Enum` which includes support for PG's ``CREATE TYPE``. - - :class:`~.postgresql.ENUM` is used automatically when + + :class:`~.postgresql.ENUM` is used automatically when using the :class:`.types.Enum` type on PG assuming - the ``native_enum`` is left as ``True``. However, the + the ``native_enum`` is left as ``True``. However, the :class:`~.postgresql.ENUM` class can also be instantiated directly in order to access some additional Postgresql-specific - options, namely finer control over whether or not + options, namely finer control over whether or not ``CREATE TYPE`` should be emitted. - - Note that both :class:`.types.Enum` as well as + + Note that both :class:`.types.Enum` as well as :class:`~.postgresql.ENUM` feature create/drop methods; the base :class:`.types.Enum` type ultimately delegates to the :meth:`~.postgresql.ENUM.create` and :meth:`~.postgresql.ENUM.drop` methods present here. - + """ def __init__(self, *enums, **kw): """Construct an :class:`~.postgresql.ENUM`. - + Arguments are the same as that of :class:`.types.Enum`, but also including the following parameters. - - :param create_type: Defaults to True. - Indicates that ``CREATE TYPE`` should be - emitted, after optionally checking for the - presence of the type, when the parent + + :param create_type: Defaults to True. + Indicates that ``CREATE TYPE`` should be + emitted, after optionally checking for the + presence of the type, when the parent table is being created; and additionally that ``DROP TYPE`` is called when the table is dropped. When ``False``, no check @@ -485,31 +492,32 @@ class ENUM(sqltypes.Enum): are called directly. Setting to ``False`` is helpful when invoking a creation scheme to a SQL file - without access to the actual database - + without access to the actual database - the :meth:`~.postgresql.ENUM.create` and :meth:`~.postgresql.ENUM.drop` methods can be used to emit SQL to a target bind. - (new in 0.7.4) - + + .. versionadded:: 0.7.4 + """ self.create_type = kw.pop("create_type", True) super(ENUM, self).__init__(*enums, **kw) def create(self, bind=None, checkfirst=True): - """Emit ``CREATE TYPE`` for this + """Emit ``CREATE TYPE`` for this :class:`~.postgresql.ENUM`. - + If the underlying dialect does not support Postgresql CREATE TYPE, no action is taken. - + :param bind: a connectable :class:`.Engine`, :class:`.Connection`, or similar object to emit SQL. - :param checkfirst: if ``True``, a query against + :param checkfirst: if ``True``, a query against the PG catalog will be first performed to see if the type does not exist already before creating. - + """ if not bind.dialect.supports_native_enum: return @@ -519,19 +527,19 @@ class ENUM(sqltypes.Enum): bind.execute(CreateEnumType(self)) def drop(self, bind=None, checkfirst=True): - """Emit ``DROP TYPE`` for this + """Emit ``DROP TYPE`` for this :class:`~.postgresql.ENUM`. - + If the underlying dialect does not support Postgresql DROP TYPE, no action is taken. - + :param bind: a connectable :class:`.Engine`, :class:`.Connection`, or similar object to emit SQL. - :param checkfirst: if ``True``, a query against + :param checkfirst: if ``True``, a query against the PG catalog will be first performed to see if the type actually exists before dropping. - + """ if not bind.dialect.supports_native_enum: return @@ -543,7 +551,7 @@ class ENUM(sqltypes.Enum): def _check_for_name_in_memos(self, checkfirst, kw): """Look in the 'ddl runner' for 'memos', then note our name in that collection. - + This to ensure a particular named enum is operated upon only once within any kind of create/drop sequence without relying upon "checkfirst". @@ -620,14 +628,14 @@ class PGCompiler(compiler.SQLCompiler): def visit_match_op(self, binary, **kw): return "%s @@ to_tsquery(%s)" % ( - self.process(binary.left), + self.process(binary.left), self.process(binary.right)) def visit_ilike_op(self, binary, **kw): escape = binary.modifiers.get("escape", None) return '%s ILIKE %s' % \ (self.process(binary.left), self.process(binary.right)) \ - + (escape and + + (escape and (' ESCAPE ' + self.render_literal_value(escape, None)) or '') @@ -635,7 +643,7 @@ class PGCompiler(compiler.SQLCompiler): escape = binary.modifiers.get("escape", None) return '%s NOT ILIKE %s' % \ (self.process(binary.left), self.process(binary.right)) \ - + (escape and + + (escape and (' ESCAPE ' + self.render_literal_value(escape, None)) or '') @@ -675,6 +683,10 @@ class PGCompiler(compiler.SQLCompiler): def for_update_clause(self, select): if select.for_update == 'nowait': return " FOR UPDATE NOWAIT" + elif select.for_update == 'read': + return " FOR SHARE" + elif select.for_update == 'read_nowait': + return " FOR SHARE NOWAIT" else: return super(PGCompiler, self).for_update_clause(select) @@ -682,9 +694,9 @@ class PGCompiler(compiler.SQLCompiler): columns = [ self.process( - self.label_select_column(None, c, asfrom=False), - within_columns_clause=True, - result_map=self.result_map) + self.label_select_column(None, c, asfrom=False), + within_columns_clause=True, + result_map=self.result_map) for c in expression._select_iterables(returning_cols) ] @@ -698,8 +710,8 @@ class PGCompiler(compiler.SQLCompiler): affinity = None casts = { - sqltypes.Date:'date', - sqltypes.DateTime:'timestamp', + sqltypes.Date:'date', + sqltypes.DateTime:'timestamp', sqltypes.Interval:'interval', sqltypes.Time:'time' } cast = casts.get(affinity, None) @@ -718,7 +730,7 @@ class PGDDLCompiler(compiler.DDLCompiler): column is column.table._autoincrement_column and \ not isinstance(impl_type, sqltypes.SmallInteger) and \ ( - column.default is None or + column.default is None or ( isinstance(column.default, schema.Sequence) and column.default.optional @@ -773,7 +785,7 @@ class PGDDLCompiler(compiler.DDLCompiler): text += "(%s)" \ % ( ', '.join([ - preparer.format_column(c) + + preparer.format_column(c) + (c.key in ops and (' ' + ops[c.key]) or '') for c in index.columns]) ) @@ -831,14 +843,14 @@ class PGTypeCompiler(compiler.GenericTypeCompiler): def visit_TIMESTAMP(self, type_): return "TIMESTAMP%s %s" % ( - getattr(type_, 'precision', None) and "(%d)" % + getattr(type_, 'precision', None) and "(%d)" % type_.precision or "", (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE" ) def visit_TIME(self, type_): return "TIME%s %s" % ( - getattr(type_, 'precision', None) and "(%d)" % + getattr(type_, 'precision', None) and "(%d)" % type_.precision or "", (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE" ) @@ -920,21 +932,21 @@ class PGExecutionContext(default.DefaultExecutionContext): return self._execute_scalar("select %s" % column.server_default.arg, column.type) - elif (column.default is None or + elif (column.default is None or (column.default.is_sequence and column.default.optional)): - # execute the sequence associated with a SERIAL primary + # execute the sequence associated with a SERIAL primary # key column. for non-primary-key SERIAL, the ID just # generates server side. try: seq_name = column._postgresql_seq_name except AttributeError: - tab = column.table.name - col = column.name - tab = tab[0:29 + max(0, (29 - len(col)))] - col = col[0:29 + max(0, (29 - len(tab)))] + tab = column.table.name + col = column.name + tab = tab[0:29 + max(0, (29 - len(col)))] + col = col[0:29 + max(0, (29 - len(tab)))] column._postgresql_seq_name = seq_name = "%s_%s_seq" % (tab, col) sch = column.table.schema @@ -1004,7 +1016,7 @@ class PGDialect(default.DefaultDialect): else: return None - _isolation_lookup = set(['SERIALIZABLE', + _isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED', 'READ COMMITTED', 'REPEATABLE READ']) def set_isolation_level(self, connection, level): @@ -1012,9 +1024,9 @@ class PGDialect(default.DefaultDialect): if level not in self._isolation_lookup: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " - "Valid isolation levels for %s are %s" % + "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup)) - ) + ) cursor = connection.cursor() cursor.execute( "SET SESSION CHARACTERISTICS AS TRANSACTION " @@ -1035,13 +1047,13 @@ class PGDialect(default.DefaultDialect): def do_prepare_twophase(self, connection, xid): connection.execute("PREPARE TRANSACTION '%s'" % xid) - def do_rollback_twophase(self, connection, xid, + def do_rollback_twophase(self, connection, xid, is_prepared=True, recover=False): if is_prepared: if recover: - #FIXME: ugly hack to get out of transaction - # context when commiting recoverable transactions - # Must find out a way how to make the dbapi not + #FIXME: ugly hack to get out of transaction + # context when committing recoverable transactions + # Must find out a way how to make the dbapi not # open a transaction. connection.execute("ROLLBACK") connection.execute("ROLLBACK PREPARED '%s'" % xid) @@ -1050,7 +1062,7 @@ class PGDialect(default.DefaultDialect): else: self.do_rollback(connection.connection) - def do_commit_twophase(self, connection, xid, + def do_commit_twophase(self, connection, xid, is_prepared=True, recover=False): if is_prepared: if recover: @@ -1102,10 +1114,10 @@ class PGDialect(default.DefaultDialect): "n.oid=c.relnamespace where n.nspname=:schema and " "relname=:name", bindparams=[ - sql.bindparam('name', + sql.bindparam('name', unicode(table_name), type_=sqltypes.Unicode), - sql.bindparam('schema', - unicode(schema), type_=sqltypes.Unicode)] + sql.bindparam('schema', + unicode(schema), type_=sqltypes.Unicode)] ) ) return bool(cursor.first()) @@ -1121,7 +1133,7 @@ class PGDialect(default.DefaultDialect): bindparams=[ sql.bindparam('name', unicode(sequence_name), type_=sqltypes.Unicode) - ] + ] ) ) else: @@ -1133,7 +1145,7 @@ class PGDialect(default.DefaultDialect): bindparams=[ sql.bindparam('name', unicode(sequence_name), type_=sqltypes.Unicode), - sql.bindparam('schema', + sql.bindparam('schema', unicode(schema), type_=sqltypes.Unicode) ] ) @@ -1261,13 +1273,13 @@ class PGDialect(default.DefaultDialect): SELECT relname FROM pg_class c WHERE relkind = 'v' - AND '%(schema)s' = (select nspname from pg_namespace n + AND '%(schema)s' = (select nspname from pg_namespace n where n.oid = c.relnamespace) """ % dict(schema=current_schema) # Py3K #view_names = [row[0] for row in connection.execute(s)] # Py2K - view_names = [row[0].decode(self.encoding) + view_names = [row[0].decode(self.encoding) for row in connection.execute(s)] # end Py2K return view_names @@ -1301,10 +1313,10 @@ class PGDialect(default.DefaultDialect): SQL_COLS = """ SELECT a.attname, pg_catalog.format_type(a.atttypid, a.atttypmod), - (SELECT substring(pg_catalog.pg_get_expr(d.adbin, d.adrelid) - for 128) + (SELECT substring(pg_catalog.pg_get_expr(d.adbin, d.adrelid) + for 128) FROM pg_catalog.pg_attrdef d - WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum + WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum AND a.atthasdef) AS DEFAULT, a.attnotnull, a.attnum, a.attrelid as table_oid @@ -1313,8 +1325,8 @@ class PGDialect(default.DefaultDialect): AND a.attnum > 0 AND NOT a.attisdropped ORDER BY a.attnum """ - s = sql.text(SQL_COLS, - bindparams=[sql.bindparam('table_oid', type_=sqltypes.Integer)], + s = sql.text(SQL_COLS, + bindparams=[sql.bindparam('table_oid', type_=sqltypes.Integer)], typemap={'attname':sqltypes.Unicode, 'default':sqltypes.Unicode} ) c = connection.execute(s, table_oid=table_oid) @@ -1325,7 +1337,7 @@ class PGDialect(default.DefaultDialect): # format columns columns = [] for name, format_type, default, notnull, attnum, table_oid in rows: - ## strip (5) from character varying(5), timestamp(5) + ## strip (5) from character varying(5), timestamp(5) # with time zone, etc attype = re.sub(r'\([\d,]+\)', '', format_type) @@ -1350,13 +1362,13 @@ class PGDialect(default.DefaultDialect): args = (53, ) elif attype == 'integer': args = () - elif attype in ('timestamp with time zone', + elif attype in ('timestamp with time zone', 'time with time zone'): kwargs['timezone'] = True if charlen: kwargs['precision'] = int(charlen) args = () - elif attype in ('timestamp without time zone', + elif attype in ('timestamp without time zone', 'time without time zone', 'time'): kwargs['timezone'] = False if charlen: @@ -1397,7 +1409,7 @@ class PGDialect(default.DefaultDialect): # A table can't override whether the domain is nullable. nullable = domain['nullable'] if domain['default'] and not default: - # It can, however, override the default + # It can, however, override the default # value, but can't set it to null. default = domain['default'] continue @@ -1423,7 +1435,7 @@ class PGDialect(default.DefaultDialect): sch = schema if '.' not in match.group(2) and sch is not None: # unconditionally quote the schema name. this could - # later be enhanced to obey quoting rules / + # later be enhanced to obey quoting rules / # "quote schema" default = match.group(1) + \ ('"%s"' % sch) + '.' + \ @@ -1439,27 +1451,40 @@ class PGDialect(default.DefaultDialect): table_oid = self.get_table_oid(connection, table_name, schema, info_cache=kw.get('info_cache')) - PK_SQL = """ - SELECT a.attname - FROM + if self.server_version_info < (8, 4): + # unnest() and generate_subscripts() both introduced in + # version 8.4 + PK_SQL = """ + SELECT a.attname + FROM pg_class t join pg_index ix on t.oid = ix.indrelid - join pg_attribute a + join pg_attribute a on t.oid=a.attrelid and a.attnum=ANY(ix.indkey) - WHERE - t.oid = :table_oid and - ix.indisprimary = 't' - ORDER BY - a.attnum - """ - t = sql.text(PK_SQL, typemap={'attname':sqltypes.Unicode}) + WHERE + t.oid = :table_oid and ix.indisprimary = 't' + ORDER BY a.attnum + """ + else: + PK_SQL = """ + SELECT a.attname + FROM pg_attribute a JOIN ( + SELECT unnest(ix.indkey) attnum, + generate_subscripts(ix.indkey, 1) ord + FROM pg_index ix + WHERE ix.indrelid = :table_oid AND ix.indisprimary + ) k ON a.attnum=k.attnum + WHERE a.attrelid = :table_oid + ORDER BY k.ord + """ + t = sql.text(PK_SQL, typemap={'attname': sqltypes.Unicode}) c = connection.execute(t, table_oid=table_oid) primary_keys = [r[0] for r in c.fetchall()] return primary_keys @reflection.cache def get_pk_constraint(self, connection, table_name, schema=None, **kw): - cols = self.get_primary_keys(connection, table_name, + cols = self.get_primary_keys(connection, table_name, schema=schema, **kw) table_oid = self.get_table_oid(connection, table_name, schema, @@ -1486,14 +1511,14 @@ class PGDialect(default.DefaultDialect): info_cache=kw.get('info_cache')) FK_SQL = """ - SELECT r.conname, + SELECT r.conname, pg_catalog.pg_get_constraintdef(r.oid, true) as condef, n.nspname as conschema FROM pg_catalog.pg_constraint r, pg_namespace n, pg_class c - WHERE r.conrelid = :table AND + WHERE r.conrelid = :table AND r.contype = 'f' AND c.oid = confrelid AND n.oid = c.relnamespace @@ -1510,7 +1535,7 @@ class PGDialect(default.DefaultDialect): '(?:(.*?)\.)?(.*?)\((.*?)\)', condef).groups() constrained_columns, referred_schema, \ referred_table, referred_columns = m - constrained_columns = [preparer._unquote_identifier(x) + constrained_columns = [preparer._unquote_identifier(x) for x in re.split(r'\s*,\s*', constrained_columns)] if referred_schema: @@ -1525,7 +1550,7 @@ class PGDialect(default.DefaultDialect): # and an explicit schema was given for the referencing table. referred_schema = schema referred_table = preparer._unquote_identifier(referred_table) - referred_columns = [preparer._unquote_identifier(x) + referred_columns = [preparer._unquote_identifier(x) for x in re.split(r'\s*,\s', referred_columns)] fkey_d = { 'name' : conname, @@ -1548,11 +1573,11 @@ class PGDialect(default.DefaultDialect): ix.indisunique, ix.indexprs, ix.indpred, a.attname FROM - pg_class t + pg_class t join pg_index ix on t.oid = ix.indrelid join pg_class i on i.oid=ix.indexrelid - left outer join - pg_attribute a + left outer join + pg_attribute a on t.oid=a.attrelid and a.attnum=ANY(ix.indkey) WHERE t.relkind = 'r' @@ -1604,13 +1629,12 @@ class PGDialect(default.DefaultDialect): SQL_ENUMS = """ SELECT t.typname as "name", -- no enum defaults in 8.4 at least - -- t.typdefault as "default", + -- t.typdefault as "default", pg_catalog.pg_type_is_visible(t.oid) as "visible", n.nspname as "schema", e.enumlabel as "label" FROM pg_catalog.pg_type t LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace - LEFT JOIN pg_catalog.pg_constraint r ON t.oid = r.contypid LEFT JOIN pg_catalog.pg_enum e ON t.oid = e.enumtypid WHERE t.typtype = 'e' ORDER BY "name", e.oid -- e.oid gives us label order @@ -1625,8 +1649,8 @@ class PGDialect(default.DefaultDialect): for enum in c.fetchall(): if enum['visible']: # 'visible' just means whether or not the enum is in a - # schema that's on the search path -- or not overriden by - # a schema with higher presedence. If it's not visible, + # schema that's on the search path -- or not overridden by + # a schema with higher precedence. If it's not visible, # it will be prefixed with the schema-name when it's used. name = enum['name'] else: @@ -1652,7 +1676,6 @@ class PGDialect(default.DefaultDialect): n.nspname as "schema" FROM pg_catalog.pg_type t LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace - LEFT JOIN pg_catalog.pg_constraint r ON t.oid = r.contypid WHERE t.typtype = 'd' """ @@ -1665,16 +1688,16 @@ class PGDialect(default.DefaultDialect): attype = re.search('([^\(]+)', domain['attype']).group(1) if domain['visible']: # 'visible' just means whether or not the domain is in a - # schema that's on the search path -- or not overriden by - # a schema with higher presedence. If it's not visible, + # schema that's on the search path -- or not overridden by + # a schema with higher precedence. If it's not visible, # it will be prefixed with the schema-name when it's used. name = domain['name'] else: name = "%s.%s" % (domain['schema'], domain['name']) domains[name] = { - 'attype':attype, - 'nullable': domain['nullable'], + 'attype':attype, + 'nullable': domain['nullable'], 'default': domain['default'] } diff --git a/libs/sqlalchemy/dialects/postgresql/pg8000.py b/libs/sqlalchemy/dialects/postgresql/pg8000.py index 9a65bf08..dc72555e 100644 --- a/libs/sqlalchemy/dialects/postgresql/pg8000.py +++ b/libs/sqlalchemy/dialects/postgresql/pg8000.py @@ -1,5 +1,5 @@ # postgresql/pg8000.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/dialects/postgresql/psycopg2.py b/libs/sqlalchemy/dialects/postgresql/psycopg2.py index 5aa93978..ecc8d331 100644 --- a/libs/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/libs/sqlalchemy/dialects/postgresql/psycopg2.py @@ -1,5 +1,5 @@ # postgresql/psycopg2.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -10,7 +10,7 @@ Driver ------ The psycopg2 driver is available at http://pypi.python.org/pypi/psycopg2/ . -The dialect has several behaviors which are specifically tailored towards compatibility +The dialect has several behaviors which are specifically tailored towards compatibility with this module. Note that psycopg1 is **not** supported. @@ -48,7 +48,7 @@ which specifies Unix-domain communication rather than TCP/IP communication:: create_engine("postgresql+psycopg2://user:password@/dbname") By default, the socket file used is to connect to a Unix-domain socket -in ``/tmp``, or whatever socket directory was specified when PostgreSQL +in ``/tmp``, or whatever socket directory was specified when PostgreSQL was built. This value can be overridden by passing a pathname to psycopg2, using ``host`` as an additional keyword argument:: @@ -61,11 +61,11 @@ See also: Per-Statement/Connection Execution Options ------------------------------------------- -The following DBAPI-specific options are respected when used with +The following DBAPI-specific options are respected when used with :meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`, :meth:`.Query.execution_options`, in addition to those not specific to DBAPIs: -* isolation_level - Set the transaction isolation level for the lifespan of a +* isolation_level - Set the transaction isolation level for the lifespan of a :class:`.Connection` (can only be set on a connection, not a statement or query). This includes the options ``SERIALIZABLE``, ``READ COMMITTED``, ``READ UNCOMMITTED`` and ``REPEATABLE READ``. @@ -79,8 +79,8 @@ By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE`` extension, such that the DBAPI receives and returns all strings as Python Unicode objects directly - SQLAlchemy passes these values through without change. Psycopg2 here will encode/decode string values based on the -current "client encoding" setting; by default this is the value in -the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``. +current "client encoding" setting; by default this is the value in +the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``. Typically, this can be changed to ``utf-8``, as a more useful default:: #client_encoding = sql_ascii # actually, defaults to database @@ -90,26 +90,27 @@ Typically, this can be changed to ``utf-8``, as a more useful default:: A second way to affect the client encoding is to set it within Psycopg2 locally. SQLAlchemy will call psycopg2's ``set_client_encoding()`` method (see: http://initd.org/psycopg/docs/connection.html#connection.set_client_encoding) -on all new connections based on the value passed to +on all new connections based on the value passed to :func:`.create_engine` using the ``client_encoding`` parameter:: engine = create_engine("postgresql://user:pass@host/dbname", client_encoding='utf8') This overrides the encoding specified in the Postgresql client configuration. -The psycopg2-specific ``client_encoding`` parameter to :func:`.create_engine` is new as of -SQLAlchemy 0.7.3. + +.. versionadded:: 0.7.3 + The psycopg2-specific ``client_encoding`` parameter to :func:`.create_engine`. SQLAlchemy can also be instructed to skip the usage of the psycopg2 ``UNICODE`` extension and to instead utilize it's own unicode encode/decode -services, which are normally reserved only for those DBAPIs that don't -fully support unicode directly. Passing ``use_native_unicode=False`` +services, which are normally reserved only for those DBAPIs that don't +fully support unicode directly. Passing ``use_native_unicode=False`` to :func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``. -SQLAlchemy will instead encode data itself into Python bytestrings on the way +SQLAlchemy will instead encode data itself into Python bytestrings on the way in and coerce from bytes on the way back, -using the value of the :func:`.create_engine` ``encoding`` parameter, which +using the value of the :func:`.create_engine` ``encoding`` parameter, which defaults to ``utf-8``. SQLAlchemy's own unicode encode/decode functionality is steadily becoming -obsolete as more DBAPIs support unicode fully along with the approach of +obsolete as more DBAPIs support unicode fully along with the approach of Python 3; in modern usage psycopg2 should be relied upon to handle unicode. Transactions @@ -131,7 +132,7 @@ at the API level what level should be used. NOTICE logging --------------- -The psycopg2 dialect will log Postgresql NOTICE messages via the +The psycopg2 dialect will log Postgresql NOTICE messages via the ``sqlalchemy.dialects.postgresql`` logger:: import logging @@ -219,8 +220,8 @@ class PGExecutionContext_psycopg2(PGExecutionContext): (self.compiled and isinstance(self.compiled.statement, expression.Selectable) \ or \ ( - (not self.compiled or - isinstance(self.compiled.statement, expression._TextClause)) + (not self.compiled or + isinstance(self.compiled.statement, expression._TextClause)) and self.statement and SERVER_SIDE_CURSOR_RE.match(self.statement)) ) ) @@ -248,7 +249,7 @@ class PGExecutionContext_psycopg2(PGExecutionContext): def _log_notices(self, cursor): for notice in cursor.connection.notices: - # NOTICE messages have a + # NOTICE messages have a # newline character at the end logger.info(notice.rstrip()) @@ -290,7 +291,7 @@ class PGDialect_psycopg2(PGDialect): } ) - def __init__(self, server_side_cursors=False, use_native_unicode=True, + def __init__(self, server_side_cursors=False, use_native_unicode=True, client_encoding=None, **kwargs): PGDialect.__init__(self, **kwargs) self.server_side_cursors = server_side_cursors @@ -298,12 +299,12 @@ class PGDialect_psycopg2(PGDialect): self.supports_unicode_binds = use_native_unicode self.client_encoding = client_encoding if self.dbapi and hasattr(self.dbapi, '__version__'): - m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?', + m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?', self.dbapi.__version__) if m: self.psycopg2_version = tuple( - int(x) - for x in m.group(1, 2, 3) + int(x) + for x in m.group(1, 2, 3) if x is not None) @classmethod @@ -315,8 +316,8 @@ class PGDialect_psycopg2(PGDialect): def _isolation_lookup(self): extensions = __import__('psycopg2.extensions').extensions return { - 'READ COMMITTED':extensions.ISOLATION_LEVEL_READ_COMMITTED, - 'READ UNCOMMITTED':extensions.ISOLATION_LEVEL_READ_UNCOMMITTED, + 'READ COMMITTED':extensions.ISOLATION_LEVEL_READ_COMMITTED, + 'READ UNCOMMITTED':extensions.ISOLATION_LEVEL_READ_UNCOMMITTED, 'REPEATABLE READ':extensions.ISOLATION_LEVEL_REPEATABLE_READ, 'SERIALIZABLE':extensions.ISOLATION_LEVEL_SERIALIZABLE } @@ -327,9 +328,9 @@ class PGDialect_psycopg2(PGDialect): except KeyError: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " - "Valid isolation levels for %s are %s" % + "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup)) - ) + ) connection.set_isolation_level(level) @@ -369,9 +370,10 @@ class PGDialect_psycopg2(PGDialect): def is_disconnect(self, e, connection, cursor): if isinstance(e, self.dbapi.OperationalError): # these error messages from libpq: interfaces/libpq/fe-misc.c. - # TODO: these are sent through gettext in libpq and we can't - # check within other locales - consider using connection.closed - return 'closed the connection' in str(e) or \ + # TODO: these are sent through gettext in libpq and we can't + # check within other locales - consider using connection.closed + return 'terminating connection' in str(e) or \ + 'closed the connection' in str(e) or \ 'connection not open' in str(e) or \ 'could not receive data from server' in str(e) elif isinstance(e, self.dbapi.InterfaceError): @@ -379,7 +381,7 @@ class PGDialect_psycopg2(PGDialect): return 'connection already closed' in str(e) or \ 'cursor already closed' in str(e) elif isinstance(e, self.dbapi.ProgrammingError): - # not sure where this path is originally from, it may + # not sure where this path is originally from, it may # be obsolete. It really says "losed", not "closed". return "losed the connection unexpectedly" in str(e) else: diff --git a/libs/sqlalchemy/dialects/postgresql/pypostgresql.py b/libs/sqlalchemy/dialects/postgresql/pypostgresql.py index e902c684..5303d047 100644 --- a/libs/sqlalchemy/dialects/postgresql/pypostgresql.py +++ b/libs/sqlalchemy/dialects/postgresql/pypostgresql.py @@ -1,5 +1,5 @@ # postgresql/pypostgresql.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -40,7 +40,7 @@ class PGDialect_pypostgresql(PGDialect): default_paramstyle = 'pyformat' # requires trunk version to support sane rowcounts - # TODO: use dbapi version information to set this flag appropariately + # TODO: use dbapi version information to set this flag appropriately supports_sane_rowcount = True supports_sane_multi_rowcount = False diff --git a/libs/sqlalchemy/dialects/postgresql/zxjdbc.py b/libs/sqlalchemy/dialects/postgresql/zxjdbc.py index 17969954..4aea9c9b 100644 --- a/libs/sqlalchemy/dialects/postgresql/zxjdbc.py +++ b/libs/sqlalchemy/dialects/postgresql/zxjdbc.py @@ -1,5 +1,5 @@ # postgresql/zxjdbc.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/dialects/sqlite/__init__.py b/libs/sqlalchemy/dialects/sqlite/__init__.py index 0958c813..c1157b63 100644 --- a/libs/sqlalchemy/dialects/sqlite/__init__.py +++ b/libs/sqlalchemy/dialects/sqlite/__init__.py @@ -1,5 +1,5 @@ # sqlite/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/dialects/sqlite/base.py b/libs/sqlalchemy/dialects/sqlite/base.py index 10a0d882..9118ace2 100644 --- a/libs/sqlalchemy/dialects/sqlite/base.py +++ b/libs/sqlalchemy/dialects/sqlite/base.py @@ -1,5 +1,5 @@ # sqlite/base.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,7 +12,7 @@ section regarding that driver. Date and Time Types ------------------- -SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does not provide +SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does not provide out of the box functionality for translating values between Python `datetime` objects and a SQLite-supported format. SQLAlchemy's own :class:`~sqlalchemy.types.DateTime` and related types provide date formatting and parsing functionality when SQlite is used. @@ -36,23 +36,91 @@ Two things to note: This is regardless of the AUTOINCREMENT keyword being present or not. To specifically render the AUTOINCREMENT keyword on the primary key -column when rendering DDL, add the flag ``sqlite_autoincrement=True`` +column when rendering DDL, add the flag ``sqlite_autoincrement=True`` to the Table construct:: Table('sometable', metadata, - Column('id', Integer, primary_key=True), + Column('id', Integer, primary_key=True), sqlite_autoincrement=True) Transaction Isolation Level --------------------------- -:func:`.create_engine` accepts an ``isolation_level`` parameter which results in -the command ``PRAGMA read_uncommitted `` being invoked for every new -connection. Valid values for this parameter are ``SERIALIZABLE`` and +:func:`.create_engine` accepts an ``isolation_level`` parameter which results in +the command ``PRAGMA read_uncommitted `` being invoked for every new +connection. Valid values for this parameter are ``SERIALIZABLE`` and ``READ UNCOMMITTED`` corresponding to a value of 0 and 1, respectively. See the section :ref:`pysqlite_serializable` for an important workaround when using serializable isolation with Pysqlite. +Database Locking Behavior / Concurrency +--------------------------------------- + +Note that SQLite is not designed for a high level of concurrency. The database +itself, being a file, is locked completely during write operations and within +transactions, meaning exactly one connection has exclusive access to the database +during this period - all other connections will be blocked during this time. + +The Python DBAPI specification also calls for a connection model that is always +in a transaction; there is no BEGIN method, only commit and rollback. This implies +that a SQLite DBAPI driver would technically allow only serialized access to a +particular database file at all times. The pysqlite driver attempts to ameliorate this by +deferring the actual BEGIN statement until the first DML (INSERT, UPDATE, or +DELETE) is received within a transaction. While this breaks serializable isolation, +it at least delays the exclusive locking inherent in SQLite's design. + +SQLAlchemy's default mode of usage with the ORM is known +as "autocommit=False", which means the moment the :class:`.Session` begins to be +used, a transaction is begun. As the :class:`.Session` is used, the autoflush +feature, also on by default, will flush out pending changes to the database +before each query. The effect of this is that a :class:`.Session` used in its +default mode will often emit DML early on, long before the transaction is actually +committed. This again will have the effect of serializing access to the SQLite +database. If highly concurrent reads are desired against the SQLite database, +it is advised that the autoflush feature be disabled, and potentially even +that autocommit be re-enabled, which has the effect of each SQL statement and +flush committing changes immediately. + +For more information on SQLite's lack of concurrency by design, please +see `Situations Where Another RDBMS May Work Better - High Concurrency `_ +near the bottom of the page. + +.. _sqlite_foreign_keys: + +Foreign Key Support +------------------- + +SQLite supports FOREIGN KEY syntax when emitting CREATE statements for tables, +however by default these constraints have no effect on the operation +of the table. + +Constraint checking on SQLite has three prerequisites: + +* At least version 3.6.19 of SQLite must be in use +* The SQLite libary must be compiled *without* the SQLITE_OMIT_FOREIGN_KEY + or SQLITE_OMIT_TRIGGER symbols enabled. +* The ``PRAGMA foreign_keys = ON`` statement must be emitted on all connections + before use. + +SQLAlchemy allows for the ``PRAGMA`` statement to be emitted automatically +for new connections through the usage of events:: + + from sqlalchemy.engine import Engine + from sqlalchemy import event + + @event.listens_for(Engine, "connect") + def set_sqlite_pragma(dbapi_connection, connection_record): + cursor = dbapi_connection.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + cursor.close() + +.. seealso:: + + `SQLite Foreign Key Support `_ - + on the SQLite web site. + + :ref:`event_toplevel` - SQLAlchemy event API. + """ import datetime, re @@ -80,36 +148,36 @@ class _DateTimeMixin(object): class DATETIME(_DateTimeMixin, sqltypes.DateTime): """Represent a Python datetime object in SQLite using a string. - + The default string storage format is:: - - "%04d-%02d-%02d %02d:%02d:%02d.%06d" % (value.year, + + "%04d-%02d-%02d %02d:%02d:%02d.%06d" % (value.year, value.month, value.day, - value.hour, value.minute, + value.hour, value.minute, value.second, value.microsecond) - + e.g.:: - + 2011-03-15 12:05:57.10558 - - The storage format can be customized to some degree using the + + The storage format can be customized to some degree using the ``storage_format`` and ``regexp`` parameters, such as:: - + import re from sqlalchemy.dialects.sqlite import DATETIME - + dt = DATETIME( storage_format="%04d/%02d/%02d %02d-%02d-%02d-%06d", regexp=re.compile("(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)(?:-(\d+))?") ) - - :param storage_format: format string which will be appled to the + + :param storage_format: format string which will be applied to the tuple ``(value.year, value.month, value.day, value.hour, value.minute, value.second, value.microsecond)``, given a Python datetime.datetime() object. - - :param regexp: regular expression which will be applied to - incoming result rows. The resulting match object is appled to + + :param regexp: regular expression which will be applied to + incoming result rows. The resulting match object is applied to the Python datetime() constructor via ``*map(int, match_obj.groups(0))``. """ @@ -146,16 +214,16 @@ class DATE(_DateTimeMixin, sqltypes.Date): """Represent a Python date object in SQLite using a string. The default string storage format is:: - + "%04d-%02d-%02d" % (value.year, value.month, value.day) - + e.g.:: - + 2011-03-15 - - The storage format can be customized to some degree using the + + The storage format can be customized to some degree using the ``storage_format`` and ``regexp`` parameters, such as:: - + import re from sqlalchemy.dialects.sqlite import DATE @@ -163,16 +231,16 @@ class DATE(_DateTimeMixin, sqltypes.Date): storage_format="%02d/%02d/%02d", regexp=re.compile("(\d+)/(\d+)/(\d+)") ) - - :param storage_format: format string which will be appled to the + + :param storage_format: format string which will be applied to the tuple ``(value.year, value.month, value.day)``, given a Python datetime.date() object. - - :param regexp: regular expression which will be applied to - incoming result rows. The resulting match object is appled to + + :param regexp: regular expression which will be applied to + incoming result rows. The resulting match object is applied to the Python date() constructor via ``*map(int, match_obj.groups(0))``. - + """ _storage_format = "%04d-%02d-%02d" @@ -199,20 +267,20 @@ class DATE(_DateTimeMixin, sqltypes.Date): class TIME(_DateTimeMixin, sqltypes.Time): """Represent a Python time object in SQLite using a string. - + The default string storage format is:: - - "%02d:%02d:%02d.%06d" % (value.hour, value.minute, + + "%02d:%02d:%02d.%06d" % (value.hour, value.minute, value.second, value.microsecond) - + e.g.:: - + 12:05:57.10558 - - The storage format can be customized to some degree using the + + The storage format can be customized to some degree using the ``storage_format`` and ``regexp`` parameters, such as:: - + import re from sqlalchemy.dialects.sqlite import TIME @@ -220,13 +288,13 @@ class TIME(_DateTimeMixin, sqltypes.Time): storage_format="%02d-%02d-%02d-%06d", regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?") ) - - :param storage_format: format string which will be appled + + :param storage_format: format string which will be applied to the tuple ``(value.hour, value.minute, value.second, value.microsecond)``, given a Python datetime.time() object. - - :param regexp: regular expression which will be applied to - incoming result rows. The resulting match object is appled to + + :param regexp: regular expression which will be applied to + incoming result rows. The resulting match object is applied to the Python time() constructor via ``*map(int, match_obj.groups(0))``. @@ -302,6 +370,9 @@ class SQLiteCompiler(compiler.SQLCompiler): def visit_now_func(self, fn, **kw): return "CURRENT_TIMESTAMP" + def visit_localtimestamp_func(self, func, **kw): + return 'DATETIME(CURRENT_TIMESTAMP, "localtime")' + def visit_true(self, expr, **kw): return '1' @@ -373,7 +444,7 @@ class SQLiteDDLCompiler(compiler.DDLCompiler): issubclass(c.type._type_affinity, sqltypes.Integer) and \ not c.foreign_keys: return None - + return super(SQLiteDDLCompiler, self).\ visit_primary_key_constraint(constraint) @@ -441,6 +512,22 @@ class SQLiteIdentifierPreparer(compiler.IdentifierPreparer): result = self.quote_schema(index.table.schema, index.table.quote_schema) + "." + result return result +class SQLiteExecutionContext(default.DefaultExecutionContext): + @util.memoized_property + def _preserve_raw_colnames(self): + return self.execution_options.get("sqlite_raw_colnames", False) + + def _translate_colname(self, colname): + # adjust for dotted column names. SQLite + # in the case of UNION may store col names as + # "tablename.colname" + # in cursor.description + if not self._preserve_raw_colnames and "." in colname: + return colname.split(".")[1], colname + else: + return colname, None + + class SQLiteDialect(default.DefaultDialect): name = 'sqlite' supports_alter = False @@ -451,6 +538,7 @@ class SQLiteDialect(default.DefaultDialect): supports_cast = True default_paramstyle = 'qmark' + execution_ctx_cls = SQLiteExecutionContext statement_compiler = SQLiteCompiler ddl_compiler = SQLiteDDLCompiler type_compiler = SQLiteTypeCompiler @@ -462,13 +550,15 @@ class SQLiteDialect(default.DefaultDialect): supports_cast = True supports_default_values = True + _broken_fk_pragma_quotes = False + def __init__(self, isolation_level=None, native_datetime=False, **kwargs): default.DefaultDialect.__init__(self, **kwargs) self.isolation_level = isolation_level # this flag used by pysqlite dialect, and perhaps others in the # future, to indicate the driver is handling date/timestamp - # conversions (and perhaps datetime/time as well on some + # conversions (and perhaps datetime/time as well on some # hypothetical driver ?) self.native_datetime = native_datetime @@ -478,6 +568,12 @@ class SQLiteDialect(default.DefaultDialect): self.supports_cast = \ self.dbapi.sqlite_version_info >= (3, 2, 3) + # see http://www.sqlalchemy.org/trac/ticket/2568 + # as well as http://www.sqlite.org/src/info/600482d161 + self._broken_fk_pragma_quotes = \ + self.dbapi.sqlite_version_info < (3, 6, 14) + + _isolation_lookup = { 'READ UNCOMMITTED':1, 'SERIALIZABLE':0 @@ -488,9 +584,9 @@ class SQLiteDialect(default.DefaultDialect): except KeyError: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " - "Valid isolation levels for %s are %s" % + "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup)) - ) + ) cursor = connection.cursor() cursor.execute("PRAGMA read_uncommitted = %d" % isolation_level) cursor.close() @@ -501,11 +597,11 @@ class SQLiteDialect(default.DefaultDialect): res = cursor.fetchone() if res: value = res[0] - else: + else: # http://www.sqlite.org/changes.html#version_3_3_3 - # "Optional READ UNCOMMITTED isolation (instead of the - # default isolation level of SERIALIZABLE) and - # table level locking when database connections + # "Optional READ UNCOMMITTED isolation (instead of the + # default isolation level of SERIALIZABLE) and + # table level locking when database connections # share a common cache."" # pre-SQLite 3.3.0 default to 0 value = 0 @@ -525,16 +621,6 @@ class SQLiteDialect(default.DefaultDialect): else: return None - def _translate_colname(self, colname): - # adjust for dotted column names. SQLite - # in the case of UNION may store col names as - # "tablename.colname" - # in cursor.description - if "." in colname: - return colname.split(".")[1], colname - else: - return colname, None - @reflection.cache def get_table_names(self, connection, schema=None, **kw): if schema is not None: @@ -631,45 +717,52 @@ class SQLiteDialect(default.DefaultDialect): pragma = "PRAGMA " qtable = quote(table_name) c = _pragma_cursor( - connection.execute("%stable_info(%s)" % + connection.execute("%stable_info(%s)" % (pragma, qtable))) - found_table = False - columns = [] - while True: - row = c.fetchone() - if row is None: - break - (name, type_, nullable, default, has_default, primary_key) = \ - (row[1], row[2].upper(), not row[3], - row[4], row[4] is not None, row[5]) - name = re.sub(r'^\"|\"$', '', name) - match = re.match(r'(\w+)(\(.*?\))?', type_) - if match: - coltype = match.group(1) - args = match.group(2) - else: - coltype = "VARCHAR" - args = '' - try: - coltype = self.ischema_names[coltype] - if args is not None: - args = re.findall(r'(\d+)', args) - coltype = coltype(*[int(a) for a in args]) - except KeyError: - util.warn("Did not recognize type '%s' of column '%s'" % - (coltype, name)) - coltype = sqltypes.NullType() - columns.append({ - 'name' : name, - 'type' : coltype, - 'nullable' : nullable, - 'default' : default, - 'autoincrement':default is None, - 'primary_key': primary_key - }) + rows = c.fetchall() + columns = [] + for row in rows: + (name, type_, nullable, default, primary_key) = \ + (row[1], row[2].upper(), not row[3], + row[4], row[5]) + + columns.append(self._get_column_info(name, type_, nullable, + default, primary_key)) return columns + def _get_column_info(self, name, type_, nullable, + default, primary_key): + + match = re.match(r'(\w+)(\(.*?\))?', type_) + if match: + coltype = match.group(1) + args = match.group(2) + else: + coltype = "VARCHAR" + args = '' + try: + coltype = self.ischema_names[coltype] + if args is not None: + args = re.findall(r'(\d+)', args) + coltype = coltype(*[int(a) for a in args]) + except KeyError: + util.warn("Did not recognize type '%s' of column '%s'" % + (coltype, name)) + coltype = sqltypes.NullType() + + if default is not None: + default = unicode(default) + + return { + 'name': name, + 'type': coltype, + 'nullable': nullable, + 'default': default, + 'autoincrement': default is None, + 'primary_key': primary_key + } + @reflection.cache def get_primary_keys(self, connection, table_name, schema=None, **kw): cols = self.get_columns(connection, table_name, schema, **kw) @@ -687,7 +780,8 @@ class SQLiteDialect(default.DefaultDialect): else: pragma = "PRAGMA " qtable = quote(table_name) - c = _pragma_cursor(connection.execute("%sforeign_key_list(%s)" % (pragma, qtable))) + statement = "%sforeign_key_list(%s)" % (pragma, qtable) + c = _pragma_cursor(connection.execute(statement)) fkeys = [] fks = {} while True: @@ -695,34 +789,38 @@ class SQLiteDialect(default.DefaultDialect): if row is None: break (numerical_id, rtbl, lcol, rcol) = (row[0], row[2], row[3], row[4]) - # sqlite won't return rcol if the table - # was created with REFERENCES , no col - if rcol is None: - rcol = lcol - rtbl = re.sub(r'^\"|\"$', '', rtbl) - lcol = re.sub(r'^\"|\"$', '', lcol) - rcol = re.sub(r'^\"|\"$', '', rcol) - try: - fk = fks[numerical_id] - except KeyError: - fk = { - 'name' : None, - 'constrained_columns' : [], - 'referred_schema' : None, - 'referred_table' : rtbl, - 'referred_columns' : [] - } - fkeys.append(fk) - fks[numerical_id] = fk - # look up the table based on the given table's engine, not 'self', - # since it could be a ProxyEngine - if lcol not in fk['constrained_columns']: - fk['constrained_columns'].append(lcol) - if rcol not in fk['referred_columns']: - fk['referred_columns'].append(rcol) + self._parse_fk(fks, fkeys, numerical_id, rtbl, lcol, rcol) return fkeys + def _parse_fk(self, fks, fkeys, numerical_id, rtbl, lcol, rcol): + # sqlite won't return rcol if the table + # was created with REFERENCES , no col + if rcol is None: + rcol = lcol + + if self._broken_fk_pragma_quotes: + rtbl = re.sub(r'^[\"\[`\']|[\"\]`\']$', '', rtbl) + + try: + fk = fks[numerical_id] + except KeyError: + fk = { + 'name': None, + 'constrained_columns': [], + 'referred_schema': None, + 'referred_table': rtbl, + 'referred_columns': [] + } + fkeys.append(fk) + fks[numerical_id] = fk + + if lcol not in fk['constrained_columns']: + fk['constrained_columns'].append(lcol) + if rcol not in fk['referred_columns']: + fk['referred_columns'].append(rcol) + return fk + @reflection.cache def get_indexes(self, connection, table_name, schema=None, **kw): quote = self.identifier_preparer.quote_identifier @@ -757,9 +855,10 @@ class SQLiteDialect(default.DefaultDialect): def _pragma_cursor(cursor): - """work around SQLite issue whereby cursor.description + """work around SQLite issue whereby cursor.description is blank when PRAGMA returns no rows.""" if cursor.closed: cursor.fetchone = lambda: None + cursor.fetchall = lambda: [] return cursor diff --git a/libs/sqlalchemy/dialects/sqlite/pysqlite.py b/libs/sqlalchemy/dialects/sqlite/pysqlite.py index f4c3338d..826eefd8 100644 --- a/libs/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/libs/sqlalchemy/dialects/sqlite/pysqlite.py @@ -1,5 +1,5 @@ # sqlite/pysqlite.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,15 +12,15 @@ module included with the Python distribution. Driver ------ -When using Python 2.5 and above, the built in ``sqlite3`` driver is +When using Python 2.5 and above, the built in ``sqlite3`` driver is already installed and no additional installation is needed. Otherwise, the ``pysqlite2`` driver needs to be present. This is the same driver as ``sqlite3``, just with a different name. The ``pysqlite2`` driver will be loaded first, and if not found, ``sqlite3`` is loaded. This allows an explicitly installed pysqlite driver to take -precedence over the built in one. As with all dialects, a specific -DBAPI module may be provided to :func:`~sqlalchemy.create_engine()` to control +precedence over the built in one. As with all dialects, a specific +DBAPI module may be provided to :func:`~sqlalchemy.create_engine()` to control this explicitly:: from sqlite3 import dbapi2 as sqlite @@ -64,25 +64,25 @@ The sqlite ``:memory:`` identifier is the default if no filepath is present. Sp Compatibility with sqlite3 "native" date and datetime types ----------------------------------------------------------- -The pysqlite driver includes the sqlite3.PARSE_DECLTYPES and +The pysqlite driver includes the sqlite3.PARSE_DECLTYPES and sqlite3.PARSE_COLNAMES options, which have the effect of any column or expression explicitly cast as "date" or "timestamp" will be converted -to a Python date or datetime object. The date and datetime types provided -with the pysqlite dialect are not currently compatible with these options, -since they render the ISO date/datetime including microseconds, which +to a Python date or datetime object. The date and datetime types provided +with the pysqlite dialect are not currently compatible with these options, +since they render the ISO date/datetime including microseconds, which pysqlite's driver does not. Additionally, SQLAlchemy does not at -this time automatically render the "cast" syntax required for the +this time automatically render the "cast" syntax required for the freestanding functions "current_timestamp" and "current_date" to return -datetime/date types natively. Unfortunately, pysqlite +datetime/date types natively. Unfortunately, pysqlite does not provide the standard DBAPI types in ``cursor.description``, -leaving SQLAlchemy with no way to detect these types on the fly +leaving SQLAlchemy with no way to detect these types on the fly without expensive per-row type checks. Keeping in mind that pysqlite's parsing option is not recommended, -nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES +nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES can be forced if one configures "native_datetime=True" on create_engine():: - engine = create_engine('sqlite://', + engine = create_engine('sqlite://', connect_args={'detect_types': sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES}, native_datetime=True ) @@ -97,37 +97,40 @@ Threading/Pooling Behavior --------------------------- Pysqlite's default behavior is to prohibit the usage of a single connection -in more than one thread. This is controlled by the ``check_same_thread`` -Pysqlite flag. This default is intended to work with older versions -of SQLite that did not support multithreaded operation under +in more than one thread. This is originally intended to work with older versions +of SQLite that did not support multithreaded operation under various circumstances. In particular, older SQLite versions did not allow a ``:memory:`` database to be used in multiple threads under any circumstances. +Pysqlite does include a now-undocumented flag known as +``check_same_thread`` which will disable this check, however note that pysqlite +connections are still not safe to use in concurrently in multiple threads. +In particular, any statement execution calls would need to be externally +mutexed, as Pysqlite does not provide for thread-safe propagation of error +messages among other things. So while even ``:memory:`` databases can be +shared among threads in modern SQLite, Pysqlite doesn't provide enough +thread-safety to make this usage worth it. + SQLAlchemy sets up pooling to work with Pysqlite's default behavior: * When a ``:memory:`` SQLite database is specified, the dialect by default will use :class:`.SingletonThreadPool`. This pool maintains a single connection per thread, so that all access to the engine within the current thread use the - same ``:memory:`` database - other threads would access a different + same ``:memory:`` database - other threads would access a different ``:memory:`` database. -* When a file-based database is specified, the dialect will use :class:`.NullPool` +* When a file-based database is specified, the dialect will use :class:`.NullPool` as the source of connections. This pool closes and discards connections which are returned to the pool immediately. SQLite file-based connections have extremely low overhead, so pooling is not necessary. The scheme also prevents a connection from being used again in a different thread and works best with SQLite's coarse-grained file locking. - .. note:: - - The default selection of :class:`.NullPool` for SQLite file-based databases - is new in SQLAlchemy 0.7. Previous versions - select :class:`.SingletonThreadPool` by - default for all SQLite databases. + .. versionchanged:: 0.7 + Default selection of :class:`.NullPool` for SQLite file-based databases. + Previous versions select :class:`.SingletonThreadPool` by + default for all SQLite databases. -Modern versions of SQLite no longer have the threading restrictions, and assuming -the sqlite3/pysqlite library was built with SQLite's default threading mode -of "Serialized", even ``:memory:`` databases can be shared among threads. Using a Memory Database in Multiple Threads ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -143,7 +146,7 @@ can be passed to Pysqlite as ``False``:: connect_args={'check_same_thread':False}, poolclass=StaticPool) -Note that using a ``:memory:`` database in multiple threads requires a recent +Note that using a ``:memory:`` database in multiple threads requires a recent version of SQLite. Using Temporary Tables with SQLite @@ -177,8 +180,8 @@ Unicode The pysqlite driver only returns Python ``unicode`` objects in result sets, never plain strings, and accommodates ``unicode`` objects within bound parameter -values in all cases. Regardless of the SQLAlchemy string type in use, -string-based result values will by Python ``unicode`` in Python 2. +values in all cases. Regardless of the SQLAlchemy string type in use, +string-based result values will by Python ``unicode`` in Python 2. The :class:`.Unicode` type should still be used to indicate those columns that require unicode, however, so that non-``unicode`` values passed inadvertently will emit a warning. Pysqlite will emit an error if a non-``unicode`` string @@ -193,7 +196,7 @@ The pysqlite DBAPI driver has a long-standing bug in which transactional state is not begun until the first DML statement, that is INSERT, UPDATE or DELETE, is emitted. A SELECT statement will not cause transactional state to begin. While this mode of usage is fine for typical situations -and has the advantage that the SQLite database file is not prematurely +and has the advantage that the SQLite database file is not prematurely locked, it breaks serializable transaction isolation, which requires that the database file be locked upon any SQL being emitted. diff --git a/libs/sqlalchemy/dialects/sybase/__init__.py b/libs/sqlalchemy/dialects/sybase/__init__.py index 7a91ca67..528ebf23 100644 --- a/libs/sqlalchemy/dialects/sybase/__init__.py +++ b/libs/sqlalchemy/dialects/sybase/__init__.py @@ -1,5 +1,5 @@ # sybase/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/dialects/sybase/base.py b/libs/sqlalchemy/dialects/sybase/base.py index 4b8cc08b..f551bff9 100644 --- a/libs/sqlalchemy/dialects/sybase/base.py +++ b/libs/sqlalchemy/dialects/sybase/base.py @@ -1,8 +1,8 @@ # sybase/base.py # Copyright (C) 2010-2011 the SQLAlchemy authors and contributors # get_select_precolumns(), limit_clause() implementation -# copyright (C) 2007 Fisch Asset Management -# AG http://www.fam.ch, with coding by Alexander Houben +# copyright (C) 2007 Fisch Asset Management +# AG http://www.fam.ch, with coding by Alexander Houben # alexander.houben@thor-solutions.ch # # This module is part of SQLAlchemy and is released under @@ -10,8 +10,12 @@ """Support for Sybase Adaptive Server Enterprise (ASE). -Note that this dialect is no longer specific to Sybase iAnywhere. -ASE is the primary support platform. +.. note:: + + The Sybase dialect functions on current SQLAlchemy versions + but is not regularly tested, and may have many issues and + caveats not currently handled. In particular, the table + and database reflection features are not implemented. """ @@ -126,7 +130,7 @@ class UNIQUEIDENTIFIER(sqltypes.TypeEngine): class IMAGE(sqltypes.LargeBinary): __visit_name__ = 'IMAGE' - + class SybaseTypeCompiler(compiler.GenericTypeCompiler): def visit_large_binary(self, type_): @@ -220,12 +224,12 @@ class SybaseExecutionContext(default.DefaultExecutionContext): self._enable_identity_insert = False if self._enable_identity_insert: - self.cursor.execute("SET IDENTITY_INSERT %s ON" % + self.cursor.execute("SET IDENTITY_INSERT %s ON" % self.dialect.identifier_preparer.format_table(tbl)) if self.isddl: # TODO: to enhance this, we can detect "ddl in tran" on the - # database settings. this error message should be improved to + # database settings. this error message should be improved to # include a note about that. if not self.should_autocommit: raise exc.InvalidRequestError( @@ -236,7 +240,7 @@ class SybaseExecutionContext(default.DefaultExecutionContext): "AUTOCOMMIT (Assuming no Sybase 'ddl in tran')") self.set_ddl_autocommit( - self.root_connection.connection.connection, + self.root_connection.connection.connection, True) @@ -300,7 +304,7 @@ class SybaseSQLCompiler(compiler.SQLCompiler): field, self.process(extract.expr, **kw)) def for_update_clause(self, select): - # "FOR UPDATE" is only allowed on "DECLARE CURSOR" + # "FOR UPDATE" is only allowed on "DECLARE CURSOR" # which SQLAlchemy doesn't use return '' diff --git a/libs/sqlalchemy/dialects/sybase/mxodbc.py b/libs/sqlalchemy/dialects/sybase/mxodbc.py index f88b1ed3..db60b9b2 100644 --- a/libs/sqlalchemy/dialects/sybase/mxodbc.py +++ b/libs/sqlalchemy/dialects/sybase/mxodbc.py @@ -1,5 +1,5 @@ # sybase/mxodbc.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/dialects/sybase/pyodbc.py b/libs/sqlalchemy/dialects/sybase/pyodbc.py index 35d8d154..8e3729b3 100644 --- a/libs/sqlalchemy/dialects/sybase/pyodbc.py +++ b/libs/sqlalchemy/dialects/sybase/pyodbc.py @@ -1,5 +1,5 @@ # sybase/pyodbc.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -17,7 +17,7 @@ Connect strings are of the form:: Unicode Support --------------- -The pyodbc driver currently supports usage of these Sybase types with +The pyodbc driver currently supports usage of these Sybase types with Unicode or multibyte strings:: CHAR @@ -43,7 +43,7 @@ from sqlalchemy.util.compat import decimal class _SybNumeric_pyodbc(sqltypes.Numeric): """Turns Decimals with adjusted() < -6 into floats. - It's not yet known how to get decimals with many + It's not yet known how to get decimals with many significant digits or very large adjusted() into Sybase via pyodbc. diff --git a/libs/sqlalchemy/dialects/sybase/pysybase.py b/libs/sqlalchemy/dialects/sybase/pysybase.py index e12cf07d..bf8c2096 100644 --- a/libs/sqlalchemy/dialects/sybase/pysybase.py +++ b/libs/sqlalchemy/dialects/sybase/pysybase.py @@ -38,7 +38,7 @@ class SybaseExecutionContext_pysybase(SybaseExecutionContext): def set_ddl_autocommit(self, dbapi_connection, value): if value: # call commit() on the Sybase connection directly, - # to avoid any side effects of calling a Connection + # to avoid any side effects of calling a Connection # transactional method inside of pre_exec() dbapi_connection.commit() @@ -52,7 +52,7 @@ class SybaseExecutionContext_pysybase(SybaseExecutionContext): class SybaseSQLCompiler_pysybase(SybaseSQLCompiler): - def bindparam_string(self, name): + def bindparam_string(self, name, **kw): return "@" + name class SybaseDialect_pysybase(SybaseDialect): @@ -82,10 +82,10 @@ class SybaseDialect_pysybase(SybaseDialect): cursor.execute(statement, param) def _get_server_version_info(self, connection): - vers = connection.scalar("select @@version_number") - # i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0), - # (12, 5, 0, 0) - return (vers / 1000, vers % 1000 / 100, vers % 100 / 10, vers % 10) + vers = connection.scalar("select @@version_number") + # i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0), + # (12, 5, 0, 0) + return (vers / 1000, vers % 1000 / 100, vers % 100 / 10, vers % 10) def is_disconnect(self, e, connection, cursor): if isinstance(e, (self.dbapi.OperationalError, diff --git a/libs/sqlalchemy/engine/__init__.py b/libs/sqlalchemy/engine/__init__.py index 23b4b0b3..6ff8ba15 100644 --- a/libs/sqlalchemy/engine/__init__.py +++ b/libs/sqlalchemy/engine/__init__.py @@ -1,5 +1,5 @@ # engine/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -101,8 +101,8 @@ default_strategy = 'plain' def create_engine(*args, **kwargs): """Create a new :class:`.Engine` instance. - The standard calling form is to send the URL as the - first positional argument, usually a string + The standard calling form is to send the URL as the + first positional argument, usually a string that indicates database dialect and connection arguments. Additional keyword arguments may then follow it which establish various options on the resulting :class:`.Engine` @@ -111,14 +111,14 @@ def create_engine(*args, **kwargs): The string form of the URL is ``dialect+driver://user:password@host/dbname[?key=value..]``, where - ``dialect`` is a database name such as ``mysql``, ``oracle``, - ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as - ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively, + ``dialect`` is a database name such as ``mysql``, ``oracle``, + ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as + ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively, the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`. - ``**kwargs`` takes a wide variety of options which are routed - towards their appropriate components. Arguments may be - specific to the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the + ``**kwargs`` takes a wide variety of options which are routed + towards their appropriate components. Arguments may be + specific to the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the :class:`.Pool`. Specific dialects also accept keyword arguments that are unique to that dialect. Here, we describe the parameters that are common to most :func:`.create_engine()` usage. @@ -136,11 +136,11 @@ def create_engine(*args, **kwargs): :ref:`engines_toplevel` :ref:`connections_toplevel` - + :param assert_unicode: Deprecated. This flag sets an engine-wide default value for - the ``assert_unicode`` flag on the - :class:`.String` type - see that + the ``assert_unicode`` flag on the + :class:`.String` type - see that type for further details. :param connect_args: a dictionary of options which will be @@ -151,16 +151,16 @@ def create_engine(*args, **kwargs): :param convert_unicode=False: if set to True, sets the default behavior of ``convert_unicode`` on the :class:`.String` type to ``True``, regardless - of a setting of ``False`` on an individual + of a setting of ``False`` on an individual :class:`.String` type, thus causing all :class:`.String` -based columns to accommodate Python ``unicode`` objects. This flag - is useful as an engine-wide setting when using a + is useful as an engine-wide setting when using a DBAPI that does not natively support Python ``unicode`` objects and raises an error when one is received (such as pyodbc with FreeTDS). - - See :class:`.String` for further details on + + See :class:`.String` for further details on what this flag indicates. :param creator: a callable which returns a DBAPI connection. @@ -184,43 +184,43 @@ def create_engine(*args, **kwargs): :ref:`dbengine_logging` for information on how to configure logging directly. - :param encoding: Defaults to ``utf-8``. This is the string - encoding used by SQLAlchemy for string encode/decode - operations which occur within SQLAlchemy, **outside of - the DBAPI.** Most modern DBAPIs feature some degree of + :param encoding: Defaults to ``utf-8``. This is the string + encoding used by SQLAlchemy for string encode/decode + operations which occur within SQLAlchemy, **outside of + the DBAPI.** Most modern DBAPIs feature some degree of direct support for Python ``unicode`` objects, what you see in Python 2 as a string of the form - ``u'some string'``. For those scenarios where the + ``u'some string'``. For those scenarios where the DBAPI is detected as not supporting a Python ``unicode`` - object, this encoding is used to determine the + object, this encoding is used to determine the source/destination encoding. It is **not used** for those cases where the DBAPI handles unicode directly. - + To properly configure a system to accommodate Python - ``unicode`` objects, the DBAPI should be + ``unicode`` objects, the DBAPI should be configured to handle unicode to the greatest degree as is appropriate - see the notes on unicode pertaining to the specific - target database in use at :ref:`dialect_toplevel`. - - Areas where string encoding may need to be accommodated - outside of the DBAPI include zero or more of: - - * the values passed to bound parameters, corresponding to + target database in use at :ref:`dialect_toplevel`. + + Areas where string encoding may need to be accommodated + outside of the DBAPI include zero or more of: + + * the values passed to bound parameters, corresponding to the :class:`.Unicode` type or the :class:`.String` type when ``convert_unicode`` is ``True``; - * the values returned in result set columns corresponding - to the :class:`.Unicode` type or the :class:`.String` + * the values returned in result set columns corresponding + to the :class:`.Unicode` type or the :class:`.String` type when ``convert_unicode`` is ``True``; - * the string SQL statement passed to the DBAPI's - ``cursor.execute()`` method; - * the string names of the keys in the bound parameter - dictionary passed to the DBAPI's ``cursor.execute()`` + * the string SQL statement passed to the DBAPI's + ``cursor.execute()`` method; + * the string names of the keys in the bound parameter + dictionary passed to the DBAPI's ``cursor.execute()`` as well as ``cursor.setinputsizes()`` methods; - * the string column names retrieved from the DBAPI's + * the string column names retrieved from the DBAPI's ``cursor.description`` attribute. - + When using Python 3, the DBAPI is required to support *all* of the above values as Python ``unicode`` objects, which in Python 3 are just known as ``str``. In Python 2, @@ -236,9 +236,9 @@ def create_engine(*args, **kwargs): :param implicit_returning=True: When ``True``, a RETURNING- compatible construct, if available, will be used to fetch newly generated primary key values when a single row - INSERT statement is emitted with no existing returning() - clause. This applies to those backends which support RETURNING - or a compatible construct, including Postgresql, Firebird, Oracle, + INSERT statement is emitted with no existing returning() + clause. This applies to those backends which support RETURNING + or a compatible construct, including Postgresql, Firebird, Oracle, Microsoft SQL Server. Set this to ``False`` to disable the automatic usage of RETURNING. @@ -248,13 +248,13 @@ def create_engine(*args, **kwargs): "_(counter)". If ``None``, the value of ``dialect.max_identifier_length`` is used instead. - :param listeners: A list of one or more - :class:`~sqlalchemy.interfaces.PoolListener` objects which will + :param listeners: A list of one or more + :class:`~sqlalchemy.interfaces.PoolListener` objects which will receive connection pool events. :param logging_name: String identifier which will be used within the "name" field of logging records generated within the - "sqlalchemy.engine" logger. Defaults to a hexstring of the + "sqlalchemy.engine" logger. Defaults to a hexstring of the object's id. :param max_overflow=10: the number of connections to allow in @@ -286,8 +286,8 @@ def create_engine(*args, **kwargs): of pool to be used. :param pool_logging_name: String identifier which will be used within - the "name" field of logging records generated within the - "sqlalchemy.pool" logger. Defaults to a hexstring of the object's + the "name" field of logging records generated within the + "sqlalchemy.pool" logger. Defaults to a hexstring of the object's id. :param pool_size=5: the number of connections to keep open @@ -307,18 +307,29 @@ def create_engine(*args, **kwargs): server configuration as well). :param pool_reset_on_return='rollback': set the "reset on return" - behavior of the pool, which is whether ``rollback()``, + behavior of the pool, which is whether ``rollback()``, ``commit()``, or nothing is called upon connections being returned to the pool. See the docstring for - ``reset_on_return`` at :class:`.Pool`. (new as of 0.7.6) + ``reset_on_return`` at :class:`.Pool`. + + .. versionadded:: 0.7.6 :param pool_timeout=30: number of seconds to wait before giving up on getting a connection from the pool. This is only used with :class:`~sqlalchemy.pool.QueuePool`. :param strategy='plain': selects alternate engine implementations. - Currently available is the ``threadlocal`` - strategy, which is described in :ref:`threadlocal_strategy`. + Currently available are: + + * the ``threadlocal`` strategy, which is described in + :ref:`threadlocal_strategy`; + * the ``mock`` strategy, which dispatches all statement + execution to a function passed as the argument ``executor``. + See `example in the FAQ `_. + + :param executor=None: a function taking arguments + ``(sql, *multiparams, **params)``, to which the ``mock`` strategy will + dispatch all statement execution. Used only by ``strategy='mock'``. """ diff --git a/libs/sqlalchemy/engine/base.py b/libs/sqlalchemy/engine/base.py index d16fc9c6..302fb779 100644 --- a/libs/sqlalchemy/engine/base.py +++ b/libs/sqlalchemy/engine/base.py @@ -1,5 +1,5 @@ # engine/base.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -118,10 +118,10 @@ class Dialect(object): Postgresql. implicit_returning - use RETURNING or equivalent during INSERT execution in order to load + use RETURNING or equivalent during INSERT execution in order to load newly generated primary keys and other column defaults in one execution, which are then available via inserted_primary_key. - If an insert statement has returning() specified explicitly, + If an insert statement has returning() specified explicitly, the "implicit" functionality is not used and inserted_primary_key will not be available. @@ -200,7 +200,7 @@ class Dialect(object): Allows dialects to configure options based on server version info or other properties. - The connection passed here is a SQLAlchemy Connection object, + The connection passed here is a SQLAlchemy Connection object, with full capabilities. The initalize() method of the base dialect should be called via @@ -219,8 +219,8 @@ class Dialect(object): set) is specified, limit the autoload to the given column names. - The default implementation uses the - :class:`~sqlalchemy.engine.reflection.Inspector` interface to + The default implementation uses the + :class:`~sqlalchemy.engine.reflection.Inspector` interface to provide the output, building upon the granular table/column/ constraint etc. methods of :class:`.Dialect`. @@ -355,7 +355,7 @@ class Dialect(object): raise NotImplementedError() def normalize_name(self, name): - """convert the given name to lowercase if it is detected as + """convert the given name to lowercase if it is detected as case insensitive. this method is only used if the dialect defines @@ -407,7 +407,7 @@ class Dialect(object): raise NotImplementedError() def _get_default_schema_name(self, connection): - """Return the string name of the currently selected schema from + """Return the string name of the currently selected schema from the given connection. This is used by the default implementation to populate the @@ -419,13 +419,13 @@ class Dialect(object): raise NotImplementedError() def do_begin(self, connection): - """Provide an implementation of *connection.begin()*, given a + """Provide an implementation of *connection.begin()*, given a DB-API connection.""" raise NotImplementedError() def do_rollback(self, connection): - """Provide an implementation of *connection.rollback()*, given + """Provide an implementation of *connection.rollback()*, given a DB-API connection.""" raise NotImplementedError() @@ -441,7 +441,7 @@ class Dialect(object): raise NotImplementedError() def do_commit(self, connection): - """Provide an implementation of *connection.commit()*, given a + """Provide an implementation of *connection.commit()*, given a DB-API connection.""" raise NotImplementedError() @@ -520,7 +520,7 @@ class Dialect(object): def connect(self): """return a callable which sets up a newly created DBAPI connection. - The callable accepts a single argument "conn" which is the + The callable accepts a single argument "conn" which is the DBAPI connection itself. It has no return value. This is used to set dialect-wide per-connection options such as @@ -644,13 +644,13 @@ class ExecutionContext(object): raise NotImplementedError() def handle_dbapi_exception(self, e): - """Receive a DBAPI exception which occurred upon execute, result + """Receive a DBAPI exception which occurred upon execute, result fetch, etc.""" raise NotImplementedError() def should_autocommit_text(self, statement): - """Parse the given textual statement and return True if it refers to + """Parse the given textual statement and return True if it refers to a "committable" statement""" raise NotImplementedError() @@ -663,13 +663,10 @@ class ExecutionContext(object): raise NotImplementedError() def get_rowcount(self): - """Return the number of rows produced (by a SELECT query) - or affected (by an INSERT/UPDATE/DELETE statement). + """Return the DBAPI ``cursor.rowcount`` value, or in some + cases an interpreted value. - Note that this row count may not be properly implemented - in some dialects; this is indicated by the - ``supports_sane_rowcount`` and ``supports_sane_multi_rowcount`` - dialect attributes. + See :attr:`.ResultProxy.rowcount` for details on this. """ @@ -696,7 +693,7 @@ class Compiled(object): :param statement: ``ClauseElement`` to be compiled. - :param bind: Optional Engine or Connection to compile this + :param bind: Optional Engine or Connection to compile this statement against. """ @@ -734,7 +731,7 @@ class Compiled(object): def construct_params(self, params=None): """Return the bind params for this compiled object. - :param params: a dict of string/object pairs whos values will + :param params: a dict of string/object pairs whose values will override bind values compiled in to the statement. """ @@ -757,7 +754,7 @@ class Compiled(object): return e._execute_compiled(self, multiparams, params) def scalar(self, *multiparams, **params): - """Execute this compiled object and return the result's + """Execute this compiled object and return the result's scalar value.""" return self.execute(*multiparams, **params).scalar() @@ -788,7 +785,7 @@ class Connectable(object): """Return a :class:`.Connection` object. Depending on context, this may be ``self`` if this object - is already an instance of :class:`.Connection`, or a newly + is already an instance of :class:`.Connection`, or a newly procured :class:`.Connection` if this object is an instance of :class:`.Engine`. @@ -799,7 +796,7 @@ class Connectable(object): context. Depending on context, this may be ``self`` if this object - is already an instance of :class:`.Connection`, or a newly + is already an instance of :class:`.Connection`, or a newly procured :class:`.Connection` if this object is an instance of :class:`.Engine`. @@ -834,7 +831,7 @@ class Connectable(object): """ raise NotImplementedError() - def _run_visitor(self, visitorcallable, element, + def _run_visitor(self, visitorcallable, element, **kwargs): raise NotImplementedError() @@ -902,7 +899,7 @@ class Connection(Connectable): """ return self.engine._connection_cls( - self.engine, + self.engine, self.__connection, _branch=True) def _clone(self): @@ -920,7 +917,7 @@ class Connection(Connectable): self.close() def execution_options(self, **opt): - """ Set non-SQL options for the connection which take effect + """ Set non-SQL options for the connection which take effect during execution. The method returns a copy of this :class:`.Connection` which references @@ -935,11 +932,11 @@ class Connection(Connectable): :meth:`.Connection.execution_options` accepts all options as those accepted by :meth:`.Executable.execution_options`. Additionally, - it includes options that are applicable only to + it includes options that are applicable only to :class:`.Connection`. :param autocommit: Available on: Connection, statement. - When True, a COMMIT will be invoked after execution + When True, a COMMIT will be invoked after execution when executed in 'autocommit' mode, i.e. when an explicit transaction is not begun on the connection. Note that DBAPI connections by default are always in a transaction - SQLAlchemy uses @@ -954,17 +951,17 @@ class Connection(Connectable): :param compiled_cache: Available on: Connection. A dictionary where :class:`.Compiled` objects - will be cached when the :class:`.Connection` compiles a clause + will be cached when the :class:`.Connection` compiles a clause expression into a :class:`.Compiled` object. It is the user's responsibility to manage the size of this dictionary, which will have keys corresponding to the dialect, clause element, the column - names within the VALUES or SET clause of an INSERT or UPDATE, + names within the VALUES or SET clause of an INSERT or UPDATE, as well as the "batch" mode for an INSERT or UPDATE statement. The format of this dictionary is not guaranteed to stay the same in future releases. - Note that the ORM makes use of its own "compiled" caches for + Note that the ORM makes use of its own "compiled" caches for some operations, including flush operations. The caching used by the ORM internally supersedes a cache dictionary specified here. @@ -974,31 +971,33 @@ class Connection(Connectable): the lifespan of this connection. Valid values include those string values accepted by the ``isolation_level`` parameter passed to :func:`.create_engine`, and are - database specific, including those for :ref:`sqlite_toplevel`, + database specific, including those for :ref:`sqlite_toplevel`, :ref:`postgresql_toplevel` - see those dialect's documentation for further info. - Note that this option necessarily affects the underying - DBAPI connection for the lifespan of the originating - :class:`.Connection`, and is not per-execution. This - setting is not removed until the underying DBAPI connection + Note that this option necessarily affects the underlying + DBAPI connection for the lifespan of the originating + :class:`.Connection`, and is not per-execution. This + setting is not removed until the underlying DBAPI connection is returned to the connection pool, i.e. the :meth:`.Connection.close` method is called. - :param no_parameters: When ``True``, if the final parameter - list or dictionary is totally empty, will invoke the + :param no_parameters: When ``True``, if the final parameter + list or dictionary is totally empty, will invoke the statement on the cursor as ``cursor.execute(statement)``, not passing the parameter collection at all. Some DBAPIs such as psycopg2 and mysql-python consider - percent signs as significant only when parameters are + percent signs as significant only when parameters are present; this option allows code to generate SQL containing percent signs (and possibly other characters) that is neutral regarding whether it's executed by the DBAPI - or piped into a script that's later invoked by - command line tools. New in 0.7.6. - + or piped into a script that's later invoked by + command line tools. + + .. versionadded:: 0.7.6 + :param stream_results: Available on: Connection, statement. - Indicate to the dialect that results should be + Indicate to the dialect that results should be "streamed" and not pre-buffered, if possible. This is a limitation of many DBAPIs. The flag is currently understood only by the psycopg2 dialect. @@ -1011,7 +1010,7 @@ class Connection(Connectable): return c def _set_isolation_level(self): - self.dialect.set_isolation_level(self.connection, + self.dialect.set_isolation_level(self.connection, self._execution_options['isolation_level']) self.connection._connection_record.finalize_callback = \ self.dialect.reset_isolation_level @@ -1074,7 +1073,7 @@ class Connection(Connectable): """Returns self. This ``Connectable`` interface method returns self, allowing - Connections to be used interchangably with Engines in most + Connections to be used interchangeably with Engines in most situations that require a bind. """ @@ -1084,14 +1083,14 @@ class Connection(Connectable): """Returns self. This ``Connectable`` interface method returns self, allowing - Connections to be used interchangably with Engines in most + Connections to be used interchangeably with Engines in most situations that require a bind. """ return self def invalidate(self, exception=None): - """Invalidate the underlying DBAPI connection associated with + """Invalidate the underlying DBAPI connection associated with this Connection. The underlying DB-API connection is literally closed (if @@ -1123,7 +1122,7 @@ class Connection(Connectable): def detach(self): """Detach the underlying DB-API connection from its connection pool. - This Connection instance will remain useable. When closed, + This Connection instance will remain usable. When closed, the DB-API connection will be literally closed and not returned to its pool. The pool will typically lazily create a new connection to replace the detached connection. @@ -1148,26 +1147,26 @@ class Connection(Connectable): Nested calls to :meth:`.begin` on the same :class:`.Connection` will return new :class:`.Transaction` objects that represent - an emulated transaction within the scope of the enclosing + an emulated transaction within the scope of the enclosing transaction, that is:: - + trans = conn.begin() # outermost transaction - trans2 = conn.begin() # "nested" + trans2 = conn.begin() # "nested" trans2.commit() # does nothing trans.commit() # actually commits - - Calls to :meth:`.Transaction.commit` only have an effect + + Calls to :meth:`.Transaction.commit` only have an effect when invoked via the outermost :class:`.Transaction` object, though the :meth:`.Transaction.rollback` method of any of the :class:`.Transaction` objects will roll back the transaction. See also: - + :meth:`.Connection.begin_nested` - use a SAVEPOINT - + :meth:`.Connection.begin_twophase` - use a two phase /XID transaction - + :meth:`.Engine.begin` - context manager available from :class:`.Engine`. """ @@ -1189,7 +1188,7 @@ class Connection(Connectable): still controls the overall ``commit`` or ``rollback`` of the transaction of a whole. - See also :meth:`.Connection.begin`, + See also :meth:`.Connection.begin`, :meth:`.Connection.begin_twophase`. """ @@ -1208,10 +1207,10 @@ class Connection(Connectable): :class:`.Transaction`, also provides a :meth:`~.TwoPhaseTransaction.prepare` method. - :param xid: the two phase transaction id. If not supplied, a + :param xid: the two phase transaction id. If not supplied, a random id will be generated. - See also :meth:`.Connection.begin`, + See also :meth:`.Connection.begin`, :meth:`.Connection.begin_twophase`. """ @@ -1386,12 +1385,12 @@ class Connection(Connectable): def execute(self, object, *multiparams, **params): """Executes the a SQL statement construct and returns a :class:`.ResultProxy`. - :param object: The statement to be executed. May be + :param object: The statement to be executed. May be one of: * a plain string * any :class:`.ClauseElement` construct that is also - a subclass of :class:`.Executable`, such as a + a subclass of :class:`.Executable`, such as a :func:`~.expression.select` construct * a :class:`.FunctionElement`, such as that generated by :attr:`.func`, will be automatically wrapped in @@ -1406,7 +1405,7 @@ class Connection(Connectable): dictionaries passed to \*multiparams:: conn.execute( - table.insert(), + table.insert(), {"id":1, "value":"v1"}, {"id":2, "value":"v2"} ) @@ -1417,10 +1416,10 @@ class Connection(Connectable): table.insert(), id=1, value="v1" ) - In the case that a plain SQL string is passed, and the underlying + In the case that a plain SQL string is passed, and the underlying DBAPI accepts positional bind parameters, a collection of tuples or individual values in \*multiparams may be passed:: - + conn.execute( "INSERT INTO table (id, value) VALUES (?, ?)", (1, "v1"), (2, "v2") @@ -1432,9 +1431,9 @@ class Connection(Connectable): ) Note above, the usage of a question mark "?" or other - symbol is contingent upon the "paramstyle" accepted by the DBAPI + symbol is contingent upon the "paramstyle" accepted by the DBAPI in use, which may be any of "qmark", "named", "pyformat", "format", - "numeric". See `pep-249 `_ + "numeric". See `pep-249 `_ for details on paramstyle. To execute a textual SQL statement which uses bound parameters in a @@ -1444,9 +1443,9 @@ class Connection(Connectable): for c in type(object).__mro__: if c in Connection.executors: return Connection.executors[c]( - self, + self, object, - multiparams, + multiparams, params) else: raise exc.InvalidRequestError( @@ -1471,7 +1470,8 @@ class Connection(Connectable): elif len(multiparams) == 1: zero = multiparams[0] if isinstance(zero, (list, tuple)): - if not zero or hasattr(zero[0], '__iter__'): + if not zero or hasattr(zero[0], '__iter__') and \ + not hasattr(zero[0], 'strip'): return zero else: return [zero] @@ -1480,7 +1480,8 @@ class Connection(Connectable): else: return [[zero]] else: - if hasattr(multiparams[0], '__iter__'): + if hasattr(multiparams[0], '__iter__') and \ + not hasattr(multiparams[0], 'strip'): return multiparams else: return [multiparams] @@ -1488,7 +1489,7 @@ class Connection(Connectable): def _execute_function(self, func, multiparams, params): """Execute a sql.FunctionElement object.""" - return self._execute_clauseelement(func.select(), + return self._execute_clauseelement(func.select(), multiparams, params) def _execute_default(self, default, multiparams, params): @@ -1517,7 +1518,7 @@ class Connection(Connectable): self.close() if self._has_events: - self.engine.dispatch.after_execute(self, + self.engine.dispatch.after_execute(self, default, multiparams, params, ret) return ret @@ -1536,12 +1537,12 @@ class Connection(Connectable): ret = self._execute_context( dialect, dialect.execution_ctx_cls._init_ddl, - compiled, + compiled, None, compiled ) if self._has_events: - self.engine.dispatch.after_execute(self, + self.engine.dispatch.after_execute(self, ddl, multiparams, params, ret) return ret @@ -1566,24 +1567,24 @@ class Connection(Connectable): compiled_sql = self._execution_options['compiled_cache'][key] else: compiled_sql = elem.compile( - dialect=dialect, column_keys=keys, + dialect=dialect, column_keys=keys, inline=len(distilled_params) > 1) self._execution_options['compiled_cache'][key] = compiled_sql else: compiled_sql = elem.compile( - dialect=dialect, column_keys=keys, + dialect=dialect, column_keys=keys, inline=len(distilled_params) > 1) ret = self._execute_context( dialect, dialect.execution_ctx_cls._init_compiled, - compiled_sql, + compiled_sql, distilled_params, compiled_sql, distilled_params ) if self._has_events: - self.engine.dispatch.after_execute(self, + self.engine.dispatch.after_execute(self, elem, multiparams, params, ret) return ret @@ -1600,12 +1601,12 @@ class Connection(Connectable): ret = self._execute_context( dialect, dialect.execution_ctx_cls._init_compiled, - compiled, + compiled, parameters, compiled, parameters ) if self._has_events: - self.engine.dispatch.after_execute(self, + self.engine.dispatch.after_execute(self, compiled, multiparams, params, ret) return ret @@ -1622,17 +1623,17 @@ class Connection(Connectable): ret = self._execute_context( dialect, dialect.execution_ctx_cls._init_statement, - statement, + statement, parameters, statement, parameters ) if self._has_events: - self.engine.dispatch.after_execute(self, + self.engine.dispatch.after_execute(self, statement, multiparams, params, ret) return ret - def _execute_context(self, dialect, constructor, - statement, parameters, + def _execute_context(self, dialect, constructor, + statement, parameters, *args): """Create an :class:`.ExecutionContext` and execute, returning a :class:`.ResultProxy`.""" @@ -1645,8 +1646,8 @@ class Connection(Connectable): context = constructor(dialect, self, conn, *args) except Exception, e: - self._handle_dbapi_exception(e, - str(statement), parameters, + self._handle_dbapi_exception(e, + str(statement), parameters, None, None) raise @@ -1663,46 +1664,46 @@ class Connection(Connectable): if self._has_events: for fn in self.engine.dispatch.before_cursor_execute: statement, parameters = \ - fn(self, cursor, statement, parameters, + fn(self, cursor, statement, parameters, context, context.executemany) if self._echo: self.engine.logger.info(statement) - self.engine.logger.info("%r", + self.engine.logger.info("%r", sql_util._repr_params(parameters, batches=10)) try: if context.executemany: self.dialect.do_executemany( - cursor, - statement, - parameters, + cursor, + statement, + parameters, context) elif not parameters and context.no_parameters: self.dialect.do_execute_no_params( - cursor, - statement, + cursor, + statement, context) else: self.dialect.do_execute( - cursor, - statement, - parameters, + cursor, + statement, + parameters, context) except Exception, e: self._handle_dbapi_exception( - e, - statement, - parameters, - cursor, + e, + statement, + parameters, + cursor, context) raise if self._has_events: - self.engine.dispatch.after_cursor_execute(self, cursor, - statement, - parameters, - context, + self.engine.dispatch.after_cursor_execute(self, cursor, + statement, + parameters, + context, context.executemany) if context.compiled: @@ -1714,7 +1715,6 @@ class Connection(Connectable): # create a resultproxy, get rowcount/implicit RETURNING # rows, close cursor if no further results pending result = context.get_result_proxy() - if context.isinsert: if context._is_implicit_returning: context._fetch_implicit_returning(result) @@ -1722,7 +1722,7 @@ class Connection(Connectable): elif not context._is_explicit_returning: result.close(_autoclose_connection=False) elif result._metadata is None: - # no results, get rowcount + # no results, get rowcount # (which requires open cursor on some drivers # such as kintersbasdb, mxodbc), result.rowcount @@ -1743,7 +1743,7 @@ class Connection(Connectable): This method is used by DefaultDialect for special-case executions, such as for sequences and column defaults. - The path of statement execution in the majority of cases + The path of statement execution in the majority of cases terminates at _execute_context(). """ @@ -1752,14 +1752,14 @@ class Connection(Connectable): self.engine.logger.info("%r", parameters) try: self.dialect.do_execute( - cursor, - statement, + cursor, + statement, parameters) except Exception, e: self._handle_dbapi_exception( - e, - statement, - parameters, + e, + statement, + parameters, cursor, None) raise @@ -1781,20 +1781,20 @@ class Connection(Connectable): if isinstance(e, (SystemExit, KeyboardInterrupt)): raise - def _handle_dbapi_exception(self, - e, - statement, - parameters, - cursor, + def _handle_dbapi_exception(self, + e, + statement, + parameters, + cursor, context): if getattr(self, '_reentrant_error', False): # Py3K - #raise exc.DBAPIError.instance(statement, parameters, e, + #raise exc.DBAPIError.instance(statement, parameters, e, # self.dialect.dbapi.Error) from e # Py2K - raise exc.DBAPIError.instance(statement, - parameters, - e, + raise exc.DBAPIError.instance(statement, + parameters, + e, self.dialect.dbapi.Error), \ None, sys.exc_info()[2] # end Py2K @@ -1806,10 +1806,19 @@ class Connection(Connectable): (statement is not None and context is None) if should_wrap and context: + if self._has_events: + self.engine.dispatch.dbapi_error(self, + cursor, + statement, + parameters, + context, + e) context.handle_dbapi_exception(e) is_disconnect = isinstance(e, self.dialect.dbapi.Error) and \ self.dialect.is_disconnect(e, self.__connection, cursor) + + if is_disconnect: self.invalidate(e) self.engine.dispose() @@ -1825,17 +1834,17 @@ class Connection(Connectable): # Py3K #raise exc.DBAPIError.instance( - # statement, - # parameters, - # e, + # statement, + # parameters, + # e, # self.dialect.dbapi.Error, # connection_invalidated=is_disconnect) \ # from e # Py2K raise exc.DBAPIError.instance( - statement, - parameters, - e, + statement, + parameters, + e, self.dialect.dbapi.Error, connection_invalidated=is_disconnect), \ None, sys.exc_info()[2] @@ -1881,8 +1890,8 @@ class Connection(Connectable): set) is specified, limit the autoload to the given column names. - The default implementation uses the - :class:`.Inspector` interface to + The default implementation uses the + :class:`.Inspector` interface to provide the output, building upon the granular table/column/ constraint etc. methods of :class:`.Dialect`. @@ -1895,18 +1904,18 @@ class Connection(Connectable): def transaction(self, callable_, *args, **kwargs): """Execute the given function within a transaction boundary. - The function is passed this :class:`.Connection` + The function is passed this :class:`.Connection` as the first argument, followed by the given \*args and \**kwargs, e.g.:: - + def do_something(conn, x, y): conn.execute("some statement", {'x':x, 'y':y}) conn.transaction(do_something, 5, 10) The operations inside the function are all invoked within the - context of a single :class:`.Transaction`. - Upon success, the transaction is committed. If an + context of a single :class:`.Transaction`. + Upon success, the transaction is committed. If an exception is raised, the transaction is rolled back before propagating the exception. @@ -1915,20 +1924,20 @@ class Connection(Connectable): The :meth:`.transaction` method is superseded by the usage of the Python ``with:`` statement, which can be used with :meth:`.Connection.begin`:: - + with conn.begin(): conn.execute("some statement", {'x':5, 'y':10}) - + As well as with :meth:`.Engine.begin`:: - + with engine.begin() as conn: conn.execute("some statement", {'x':5, 'y':10}) - + See also: - - :meth:`.Engine.begin` - engine-level transactional + + :meth:`.Engine.begin` - engine-level transactional context - + :meth:`.Engine.transaction` - engine-level version of :meth:`.Connection.transaction` @@ -1950,7 +1959,7 @@ class Connection(Connectable): The given \*args and \**kwargs are passed subsequent to the :class:`.Connection` argument. - This function, along with :meth:`.Engine.run_callable`, + This function, along with :meth:`.Engine.run_callable`, allows a function to be run with a :class:`.Connection` or :class:`.Engine` object without the need to know which one is being dealt with. @@ -1966,7 +1975,7 @@ class Connection(Connectable): class Transaction(object): """Represent a database transaction in progress. - The :class:`.Transaction` object is procured by + The :class:`.Transaction` object is procured by calling the :meth:`~.Connection.begin` method of :class:`.Connection`:: @@ -1978,9 +1987,9 @@ class Transaction(object): trans.commit() The object provides :meth:`.rollback` and :meth:`.commit` - methods in order to control transaction boundaries. It - also implements a context manager interface so that - the Python ``with`` statement can be used with the + methods in order to control transaction boundaries. It + also implements a context manager interface so that + the Python ``with`` statement can be used with the :meth:`.Connection.begin` method:: with connection.begin(): @@ -2126,11 +2135,11 @@ class TwoPhaseTransaction(Transaction): class Engine(Connectable, log.Identified): """ - Connects a :class:`~sqlalchemy.pool.Pool` and - :class:`~sqlalchemy.engine.base.Dialect` together to provide a source + Connects a :class:`~sqlalchemy.pool.Pool` and + :class:`~sqlalchemy.engine.base.Dialect` together to provide a source of database connectivity and behavior. - An :class:`.Engine` object is instantiated publically using the + An :class:`.Engine` object is instantiated publicly using the :func:`~sqlalchemy.create_engine` function. See also: @@ -2145,7 +2154,7 @@ class Engine(Connectable, log.Identified): _has_events = False _connection_cls = Connection - def __init__(self, pool, dialect, url, + def __init__(self, pool, dialect, url, logging_name=None, echo=None, proxy=None, execution_options=None ): @@ -2172,13 +2181,13 @@ class Engine(Connectable, log.Identified): dispatch = event.dispatcher(events.ConnectionEvents) def update_execution_options(self, **opt): - """Update the default execution_options dictionary + """Update the default execution_options dictionary of this :class:`.Engine`. The given keys/values in \**opt are added to the - default execution options that will be used for + default execution options that will be used for all connections. The initial contents of this dictionary - can be sent via the ``execution_options`` paramter + can be sent via the ``execution_options`` parameter to :func:`.create_engine`. See :meth:`.Connection.execution_options` for more @@ -2212,27 +2221,27 @@ class Engine(Connectable, log.Identified): A new connection pool is created immediately after the old one has been disposed. This new pool, like all SQLAlchemy connection pools, - does not make any actual connections to the database until one is + does not make any actual connections to the database until one is first requested. This method has two general use cases: * When a dropped connection is detected, it is assumed that all - connections held by the pool are potentially dropped, and + connections held by the pool are potentially dropped, and the entire pool is replaced. - * An application may want to use :meth:`dispose` within a test + * An application may want to use :meth:`dispose` within a test suite that is creating multiple engines. It is critical to note that :meth:`dispose` does **not** guarantee that the application will release all open database connections - only those connections that are checked into the pool are closed. Connections which remain checked out or have been detached from - the engine are not affected. + the engine are not affected. """ self.pool.dispose() - self.pool = self.pool.recreate() + self.pool = self.pool._replace() @util.deprecated("0.7", "Use the create() method on the given schema " "object directly, i.e. :meth:`.Table.create`, " @@ -2242,7 +2251,7 @@ class Engine(Connectable, log.Identified): from sqlalchemy.engine import ddl - self._run_visitor(ddl.SchemaGenerator, entity, + self._run_visitor(ddl.SchemaGenerator, entity, connection=connection, **kwargs) @util.deprecated("0.7", "Use the drop() method on the given schema " @@ -2253,7 +2262,7 @@ class Engine(Connectable, log.Identified): from sqlalchemy.engine import ddl - self._run_visitor(ddl.SchemaDropper, entity, + self._run_visitor(ddl.SchemaDropper, entity, connection=connection, **kwargs) def _execute_default(self, default): @@ -2264,15 +2273,15 @@ class Engine(Connectable, log.Identified): connection.close() @property - @util.deprecated("0.7", + @util.deprecated("0.7", "Use :attr:`~sqlalchemy.sql.expression.func` to create function constructs.") def func(self): return expression._FunctionGenerator(bind=self) - @util.deprecated("0.7", + @util.deprecated("0.7", "Use :func:`.expression.text` to create text constructs.") def text(self, text, *args, **kwargs): - """Return a :func:`~sqlalchemy.sql.expression.text` construct, + """Return a :func:`~sqlalchemy.sql.expression.text` construct, bound to this engine. This is equivalent to:: @@ -2283,7 +2292,7 @@ class Engine(Connectable, log.Identified): return expression.text(text, bind=self, *args, **kwargs) - def _run_visitor(self, visitorcallable, element, + def _run_visitor(self, visitorcallable, element, connection=None, **kwargs): if connection is None: conn = self.contextual_connect(close_with_result=False) @@ -2317,15 +2326,15 @@ class Engine(Connectable, log.Identified): with a :class:`.Transaction` established. E.g.:: - + with engine.begin() as conn: conn.execute("insert into table (x, y, z) values (1, 2, 3)") conn.execute("my_special_procedure(5)") - Upon successful operation, the :class:`.Transaction` + Upon successful operation, the :class:`.Transaction` is committed. If an error is raised, the :class:`.Transaction` - is rolled back. - + is rolled back. + The ``close_with_result`` flag is normally ``False``, and indicates that the :class:`.Connection` will be closed when the operation is complete. When set to ``True``, it indicates the :class:`.Connection` @@ -2334,10 +2343,10 @@ class Engine(Connectable, log.Identified): close the :class:`.Connection` when that :class:`.ResultProxy` has exhausted all result rows. - New in 0.7.6. - + .. versionadded:: 0.7.6 + See also: - + :meth:`.Engine.connect` - procure a :class:`.Connection` from an :class:`.Engine`. @@ -2346,26 +2355,30 @@ class Engine(Connectable, log.Identified): """ conn = self.contextual_connect(close_with_result=close_with_result) - trans = conn.begin() + try: + trans = conn.begin() + except: + conn.close() + raise return Engine._trans_ctx(conn, trans, close_with_result) def transaction(self, callable_, *args, **kwargs): """Execute the given function within a transaction boundary. The function is passed a :class:`.Connection` newly procured - from :meth:`.Engine.contextual_connect` as the first argument, + from :meth:`.Engine.contextual_connect` as the first argument, followed by the given \*args and \**kwargs. - + e.g.:: - + def do_something(conn, x, y): conn.execute("some statement", {'x':x, 'y':y}) engine.transaction(do_something, 5, 10) - + The operations inside the function are all invoked within the - context of a single :class:`.Transaction`. - Upon success, the transaction is committed. If an + context of a single :class:`.Transaction`. + Upon success, the transaction is committed. If an exception is raised, the transaction is rolled back before propagating the exception. @@ -2374,15 +2387,15 @@ class Engine(Connectable, log.Identified): The :meth:`.transaction` method is superseded by the usage of the Python ``with:`` statement, which can be used with :meth:`.Engine.begin`:: - + with engine.begin() as conn: conn.execute("some statement", {'x':5, 'y':10}) - + See also: - - :meth:`.Engine.begin` - engine-level transactional + + :meth:`.Engine.begin` - engine-level transactional context - + :meth:`.Connection.transaction` - connection-level version of :meth:`.Engine.transaction` @@ -2401,7 +2414,7 @@ class Engine(Connectable, log.Identified): The given \*args and \**kwargs are passed subsequent to the :class:`.Connection` argument. - This function, along with :meth:`.Connection.run_callable`, + This function, along with :meth:`.Connection.run_callable`, allows a function to be run with a :class:`.Connection` or :class:`.Engine` object without the need to know which one is being dealt with. @@ -2472,9 +2485,9 @@ class Engine(Connectable, log.Identified): """ - return self._connection_cls(self, - self.pool.connect(), - close_with_result=close_with_result, + return self._connection_cls(self, + self.pool.connect(), + close_with_result=close_with_result, **kwargs) def table_names(self, schema=None, connection=None): @@ -2505,7 +2518,7 @@ class Engine(Connectable, log.Identified): Uses the given :class:`.Connection`, or if None produces its own :class:`.Connection`, and passes the ``table`` - and ``include_columns`` arguments onto that + and ``include_columns`` arguments onto that :class:`.Connection` object's :meth:`.Connection.reflecttable` method. The :class:`.Table` object is then populated with new attributes. @@ -2527,7 +2540,7 @@ class Engine(Connectable, log.Identified): def raw_connection(self): """Return a "raw" DBAPI connection from the connection pool. - The returned object is a proxied version of the DBAPI + The returned object is a proxied version of the DBAPI connection object used by the underlying driver in use. The object will have all the same behavior as the real DBAPI connection, except that its ``close()`` method will result in the @@ -2552,8 +2565,8 @@ try: # __setstate__. from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor - # The extra function embedding is needed so that the - # reconstructor function has the same signature whether or not + # The extra function embedding is needed so that the + # reconstructor function has the same signature whether or not # the extension is present. def rowproxy_reconstructor(cls, state): return safe_rowproxy_reconstructor(cls, state) @@ -2687,7 +2700,7 @@ class RowProxy(BaseRowProxy): return iter(self) try: - # Register RowProxy with Sequence, + # Register RowProxy with Sequence, # so sequence protocol is implemented from collections import Sequence Sequence.register(RowProxy) @@ -2710,7 +2723,7 @@ class ResultMetaData(object): context = parent.context dialect = context.dialect typemap = dialect.dbapi_type_map - translate_colname = dialect._translate_colname + translate_colname = context._translate_colname # high precedence key values. primary_keymap = {} @@ -2745,10 +2758,10 @@ class ResultMetaData(object): primary_keymap[i] = rec # populate primary keymap, looking for conflicts. - if primary_keymap.setdefault(name.lower(), rec) is not rec: + if primary_keymap.setdefault(name.lower(), rec) is not rec: # place a record that doesn't have the "index" - this # is interpreted later as an AmbiguousColumnError, - # but only when actually accessed. Columns + # but only when actually accessed. Columns # colliding by name is not a problem if those names # aren't used; integer and ColumnElement access is always # unambiguous. @@ -2779,7 +2792,7 @@ class ResultMetaData(object): def _set_keymap_synonym(self, name, origname): """Set a synonym for the given name. - Some dialects (SQLite at the moment) may use this to + Some dialects (SQLite at the moment) may use this to adjust the column names that are significant within a row. @@ -2795,7 +2808,7 @@ class ResultMetaData(object): result = map.get(key.lower()) # fallback for targeting a ColumnElement to a textual expression # this is a rare use case which only occurs when matching text() - # or colummn('name') constructs to ColumnElements, or after a + # or colummn('name') constructs to ColumnElements, or after a # pickle/unpickle roundtrip elif isinstance(key, expression.ColumnElement): if key._label and key._label.lower() in map: @@ -2803,7 +2816,7 @@ class ResultMetaData(object): elif hasattr(key, 'name') and key.name.lower() in map: # match is only on name. result = map[key.name.lower()] - # search extra hard to make sure this + # search extra hard to make sure this # isn't a column/label name overlap. # this check isn't currently available if the row # was unpickled. @@ -2817,7 +2830,7 @@ class ResultMetaData(object): if result is None: if raiseerr: raise exc.NoSuchColumnError( - "Could not locate column in row for column '%s'" % + "Could not locate column in row for column '%s'" % expression._string_or_unprintable(key)) else: return None @@ -2870,7 +2883,7 @@ class ResultProxy(object): col3 = row[mytable.c.mycol] # access via Column object. ``ResultProxy`` also handles post-processing of result column - data using ``TypeEngine`` objects, which are referenced from + data using ``TypeEngine`` objects, which are referenced from the originating SQL statement that produced this result set. """ @@ -2907,18 +2920,41 @@ class ResultProxy(object): def rowcount(self): """Return the 'rowcount' for this result. - The 'rowcount' reports the number of rows affected - by an UPDATE or DELETE statement. It has *no* other - uses and is not intended to provide the number of rows - present from a SELECT. + The 'rowcount' reports the number of rows *matched* + by the WHERE criterion of an UPDATE or DELETE statement. - Note that this row count may not be properly implemented in some - dialects; this is indicated by - :meth:`~sqlalchemy.engine.base.ResultProxy.supports_sane_rowcount()` - and - :meth:`~sqlalchemy.engine.base.ResultProxy.supports_sane_multi_rowcount()`. - ``rowcount()`` also may not work at this time for a statement that - uses ``returning()``. + .. note:: + + Notes regarding :attr:`.ResultProxy.rowcount`: + + + * This attribute returns the number of rows *matched*, + which is not necessarily the same as the number of rows + that were actually *modified* - an UPDATE statement, for example, + may have no net change on a given row if the SET values + given are the same as those present in the row already. + Such a row would be matched but not modified. + On backends that feature both styles, such as MySQL, + rowcount is configured by default to return the match + count in all cases. + + * :attr:`.ResultProxy.rowcount` is *only* useful in conjunction + with an UPDATE or DELETE statement. Contrary to what the Python + DBAPI says, it does *not* return the + number of rows available from the results of a SELECT statement + as DBAPIs cannot support this functionality when rows are + unbuffered. + + * :attr:`.ResultProxy.rowcount` may not be fully implemented by + all dialects. In particular, most DBAPIs do not support an + aggregate rowcount result from an executemany call. + The :meth:`.ResultProxy.supports_sane_rowcount` and + :meth:`.ResultProxy.supports_sane_multi_rowcount` methods + will report from the dialect if each usage is known to be + supported. + + * Statements that use RETURNING may not return a correct + rowcount. """ try: @@ -2934,7 +2970,7 @@ class ResultProxy(object): This is a DBAPI specific method and is only functional for those backends which support it, for statements - where it is appropriate. It's behavior is not + where it is appropriate. It's behavior is not consistent across backends. Usage of this method is normally unnecessary; the @@ -2947,7 +2983,7 @@ class ResultProxy(object): return self._saved_cursor.lastrowid except Exception, e: self.connection._handle_dbapi_exception( - e, None, None, + e, None, None, self._saved_cursor, self.context) raise @@ -2955,8 +2991,8 @@ class ResultProxy(object): def returns_rows(self): """True if this :class:`.ResultProxy` returns rows. - I.e. if it is legal to call the methods - :meth:`~.ResultProxy.fetchone`, + I.e. if it is legal to call the methods + :meth:`~.ResultProxy.fetchone`, :meth:`~.ResultProxy.fetchmany` :meth:`~.ResultProxy.fetchall`. @@ -2966,10 +3002,10 @@ class ResultProxy(object): @property def is_insert(self): """True if this :class:`.ResultProxy` is the result - of a executing an expression language compiled + of a executing an expression language compiled :func:`.expression.insert` construct. - When True, this implies that the + When True, this implies that the :attr:`inserted_primary_key` attribute is accessible, assuming the statement did not include a user defined "returning" construct. @@ -3022,20 +3058,20 @@ class ResultProxy(object): def inserted_primary_key(self): """Return the primary key for the row just inserted. - The return value is a list of scalar values + The return value is a list of scalar values corresponding to the list of primary key columns in the target table. - This only applies to single row :func:`.insert` - constructs which did not explicitly specify + This only applies to single row :func:`.insert` + constructs which did not explicitly specify :meth:`.Insert.returning`. Note that primary key columns which specify a - server_default clause, + server_default clause, or otherwise do not qualify as "autoincrement" columns (see the notes at :class:`.Column`), and were generated using the database-side default, will - appear in this list as ``None`` unless the backend + appear in this list as ``None`` unless the backend supports "returning" and the insert statement executed with the "implicit returning" enabled. @@ -3098,12 +3134,20 @@ class ResultProxy(object): return self.context.prefetch_cols def supports_sane_rowcount(self): - """Return ``supports_sane_rowcount`` from the dialect.""" + """Return ``supports_sane_rowcount`` from the dialect. + + See :attr:`.ResultProxy.rowcount` for background. + + """ return self.dialect.supports_sane_rowcount def supports_sane_multi_rowcount(self): - """Return ``supports_sane_multi_rowcount`` from the dialect.""" + """Return ``supports_sane_multi_rowcount`` from the dialect. + + See :attr:`.ResultProxy.rowcount` for background. + + """ return self.dialect.supports_sane_multi_rowcount @@ -3162,7 +3206,7 @@ class ResultProxy(object): return l except Exception, e: self.connection._handle_dbapi_exception( - e, None, None, + e, None, None, self.cursor, self.context) raise @@ -3182,7 +3226,7 @@ class ResultProxy(object): return l except Exception, e: self.connection._handle_dbapi_exception( - e, None, None, + e, None, None, self.cursor, self.context) raise @@ -3202,7 +3246,7 @@ class ResultProxy(object): return None except Exception, e: self.connection._handle_dbapi_exception( - e, None, None, + e, None, None, self.cursor, self.context) raise @@ -3219,7 +3263,7 @@ class ResultProxy(object): row = self._fetchone_impl() except Exception, e: self.connection._handle_dbapi_exception( - e, None, None, + e, None, None, self.cursor, self.context) raise diff --git a/libs/sqlalchemy/engine/ddl.py b/libs/sqlalchemy/engine/ddl.py index d6fdaee2..c3b32505 100644 --- a/libs/sqlalchemy/engine/ddl.py +++ b/libs/sqlalchemy/engine/ddl.py @@ -28,7 +28,7 @@ class SchemaGenerator(DDLBase): if table.schema: self.dialect.validate_identifier(table.schema) return not self.checkfirst or \ - not self.dialect.has_table(self.connection, + not self.dialect.has_table(self.connection, table.name, schema=table.schema) def _can_create_sequence(self, sequence): @@ -39,8 +39,8 @@ class SchemaGenerator(DDLBase): ( not self.checkfirst or not self.dialect.has_sequence( - self.connection, - sequence.name, + self.connection, + sequence.name, schema=sequence.schema) ) ) @@ -50,9 +50,9 @@ class SchemaGenerator(DDLBase): tables = self.tables else: tables = metadata.tables.values() - collection = [t for t in sql_util.sort_tables(tables) + collection = [t for t in sql_util.sort_tables(tables) if self._can_create_table(t)] - seq_coll = [s for s in metadata._sequences.values() + seq_coll = [s for s in metadata._sequences.values() if s.column is None and self._can_create_sequence(s)] metadata.dispatch.before_create(metadata, self.connection, @@ -95,7 +95,7 @@ class SchemaGenerator(DDLBase): def visit_sequence(self, sequence, create_ok=False): if not create_ok and not self._can_create_sequence(sequence): - return + return self.connection.execute(schema.CreateSequence(sequence)) def visit_index(self, index): @@ -116,9 +116,9 @@ class SchemaDropper(DDLBase): tables = self.tables else: tables = metadata.tables.values() - collection = [t for t in reversed(sql_util.sort_tables(tables)) + collection = [t for t in reversed(sql_util.sort_tables(tables)) if self._can_drop_table(t)] - seq_coll = [s for s in metadata._sequences.values() + seq_coll = [s for s in metadata._sequences.values() if s.column is None and self._can_drop_sequence(s)] metadata.dispatch.before_drop(metadata, self.connection, @@ -141,7 +141,7 @@ class SchemaDropper(DDLBase): self.dialect.validate_identifier(table.name) if table.schema: self.dialect.validate_identifier(table.schema) - return not self.checkfirst or self.dialect.has_table(self.connection, + return not self.checkfirst or self.dialect.has_table(self.connection, table.name, schema=table.schema) def _can_drop_sequence(self, sequence): @@ -150,8 +150,8 @@ class SchemaDropper(DDLBase): not sequence.optional) and (not self.checkfirst or self.dialect.has_sequence( - self.connection, - sequence.name, + self.connection, + sequence.name, schema=sequence.schema)) ) diff --git a/libs/sqlalchemy/engine/default.py b/libs/sqlalchemy/engine/default.py index 5c2d9814..f3dfd95e 100644 --- a/libs/sqlalchemy/engine/default.py +++ b/libs/sqlalchemy/engine/default.py @@ -1,5 +1,5 @@ # engine/default.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -89,17 +89,13 @@ class DefaultDialect(base.Dialect): server_version_info = None - # indicates symbol names are + # indicates symbol names are # UPPERCASEd if they are case insensitive # within the database. # if this is True, the methods normalize_name() # and denormalize_name() must be provided. requires_name_normalize = False - # a hook for SQLite's translation of - # result column names - _translate_colname = None - reflection_options = () def __init__(self, convert_unicode=False, assert_unicode=False, @@ -190,7 +186,7 @@ class DefaultDialect(base.Dialect): self.returns_unicode_strings = self._check_unicode_returns(connection) self.do_rollback(connection.connection) - + def on_connect(self): """return a callable which sets up a newly created DBAPI connection. @@ -220,7 +216,7 @@ class DefaultDialect(base.Dialect): try: cursor.execute( cast_to( - expression.select( + expression.select( [expression.cast( expression.literal_column( "'test %s returns'" % formatstr), type_) @@ -264,20 +260,20 @@ class DefaultDialect(base.Dialect): return insp.reflecttable(table, include_columns, exclude_columns) def get_pk_constraint(self, conn, table_name, schema=None, **kw): - """Compatiblity method, adapts the result of get_primary_keys() + """Compatibility method, adapts the result of get_primary_keys() for those dialects which don't implement get_pk_constraint(). """ return { 'constrained_columns': - self.get_primary_keys(conn, table_name, + self.get_primary_keys(conn, table_name, schema=schema, **kw) } def validate_identifier(self, ident): if len(ident) > self.max_identifier_length: raise exc.IdentifierError( - "Identifier '%s' exceeds maximum length of %d characters" % + "Identifier '%s' exceeds maximum length of %d characters" % (ident, self.max_identifier_length) ) @@ -341,8 +337,8 @@ class DefaultDialect(base.Dialect): return False def reset_isolation_level(self, dbapi_conn): - # default_isolation_level is read from the first conenction - # after the initial set of 'isolation_level', if any, so is + # default_isolation_level is read from the first connection + # after the initial set of 'isolation_level', if any, so is # the configured default of this dialect. self.set_isolation_level(dbapi_conn, self.default_isolation_level) @@ -355,9 +351,15 @@ class DefaultExecutionContext(base.ExecutionContext): result_map = None compiled = None statement = None + postfetch_cols = None + prefetch_cols = None _is_implicit_returning = False _is_explicit_returning = False + # a hook for SQLite's translation of + # result column names + _translate_colname = None + @classmethod def _init_ddl(cls, dialect, connection, dbapi_connection, compiled_ddl): """Initialize execution context for a DDLElement construct.""" @@ -449,8 +451,8 @@ class DefaultExecutionContext(base.ExecutionContext): processors = compiled._bind_processors - # Convert the dictionary of bind parameter values - # into a dict or list to be sent to the DBAPI's + # Convert the dictionary of bind parameter values + # into a dict or list to be sent to the DBAPI's # execute() or executemany() method. parameters = [] if dialect.positional: @@ -513,7 +515,7 @@ class DefaultExecutionContext(base.ExecutionContext): for d in parameters ] or [{}] else: - self.parameters = [dialect.execute_sequence_format(p) + self.parameters = [dialect.execute_sequence_format(p) for p in parameters] self.executemany = len(parameters) > 1 @@ -550,10 +552,10 @@ class DefaultExecutionContext(base.ExecutionContext): @util.memoized_property def should_autocommit(self): - autocommit = self.execution_options.get('autocommit', - not self.compiled and + autocommit = self.execution_options.get('autocommit', + not self.compiled and self.statement and - expression.PARSE_AUTOCOMMIT + expression.PARSE_AUTOCOMMIT or False) if autocommit is expression.PARSE_AUTOCOMMIT: @@ -586,7 +588,7 @@ class DefaultExecutionContext(base.ExecutionContext): if type_ is not None: # apply type post processors to the result proc = type_._cached_result_processor( - self.dialect, + self.dialect, self.cursor.description[0][1] ) if proc: @@ -623,7 +625,7 @@ class DefaultExecutionContext(base.ExecutionContext): and when no explicit id value was bound to the statement. - The function is called once, directly after + The function is called once, directly after post_exec() and before the transaction is committed or ResultProxy is generated. If the post_exec() method assigns a value to `self._lastrowid`, the @@ -672,7 +674,7 @@ class DefaultExecutionContext(base.ExecutionContext): self.inserted_primary_key = [ c is autoinc_col and lastrowid or v for c, v in zip( - table.primary_key, + table.primary_key, self.inserted_primary_key) ] @@ -698,7 +700,7 @@ class DefaultExecutionContext(base.ExecutionContext): style of ``setinputsizes()`` on the cursor, using DB-API types from the bind parameter's ``TypeEngine`` objects. - This method only called by those dialects which require it, + This method only called by those dialects which require it, currently cx_oracle. """ @@ -743,7 +745,7 @@ class DefaultExecutionContext(base.ExecutionContext): elif default.is_callable: return default.arg(self) elif default.is_clause_element: - # TODO: expensive branching here should be + # TODO: expensive branching here should be # pulled into _exec_scalar() conn = self.connection c = expression.select([default.arg]).compile(bind=conn) @@ -809,7 +811,7 @@ class DefaultExecutionContext(base.ExecutionContext): if self.isinsert: self.inserted_primary_key = [ - self.compiled_parameters[0].get(c.key, None) + self.compiled_parameters[0].get(c.key, None) for c in self.compiled.\ statement.table.primary_key ] diff --git a/libs/sqlalchemy/engine/reflection.py b/libs/sqlalchemy/engine/reflection.py index 71d97e65..6d34a279 100644 --- a/libs/sqlalchemy/engine/reflection.py +++ b/libs/sqlalchemy/engine/reflection.py @@ -1,5 +1,5 @@ # engine/reflection.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -38,8 +38,8 @@ def cache(fn, self, con, *args, **kw): if info_cache is None: return fn(self, con, *args, **kw) key = ( - fn.__name__, - tuple(a for a in args if isinstance(a, basestring)), + fn.__name__, + tuple(a for a in args if isinstance(a, basestring)), tuple((k, v) for k, v in kw.iteritems() if isinstance(v, (basestring, int, float))) ) ret = info_cache.get(key) @@ -72,9 +72,9 @@ class Inspector(object): def __init__(self, bind): """Initialize a new :class:`.Inspector`. - :param bind: a :class:`~sqlalchemy.engine.base.Connectable`, - which is typically an instance of - :class:`~sqlalchemy.engine.base.Engine` or + :param bind: a :class:`~sqlalchemy.engine.base.Connectable`, + which is typically an instance of + :class:`~sqlalchemy.engine.base.Engine` or :class:`~sqlalchemy.engine.base.Connection`. For a dialect-specific instance of :class:`.Inspector`, see @@ -101,9 +101,9 @@ class Inspector(object): def from_engine(cls, bind): """Construct a new dialect-specific Inspector object from the given engine or connection. - :param bind: a :class:`~sqlalchemy.engine.base.Connectable`, - which is typically an instance of - :class:`~sqlalchemy.engine.base.Engine` or + :param bind: a :class:`~sqlalchemy.engine.base.Connectable`, + which is typically an instance of + :class:`~sqlalchemy.engine.base.Engine` or :class:`~sqlalchemy.engine.base.Connection`. This method differs from direct a direct constructor call of :class:`.Inspector` @@ -320,7 +320,7 @@ class Inspector(object): def reflecttable(self, table, include_columns, exclude_columns=()): """Given a Table object, load its internal constructs based on introspection. - This is the underlying method used by most dialects to produce + This is the underlying method used by most dialects to produce table reflection. Direct usage is like:: from sqlalchemy import create_engine, MetaData, Table @@ -379,7 +379,7 @@ class Inspector(object): coltype = col_d['type'] col_kw = { - 'nullable':col_d['nullable'], + 'nullable': col_d['nullable'], } for k in ('autoincrement', 'quote', 'info', 'key'): if k in col_d: @@ -414,11 +414,11 @@ class Inspector(object): # Primary keys pk_cons = self.get_pk_constraint(table_name, schema, **tblkw) if pk_cons: - pk_cols = [table.c[pk] - for pk in pk_cons['constrained_columns'] + pk_cols = [table.c[pk] + for pk in pk_cons['constrained_columns'] if pk in table.c and pk not in exclude_columns ] + [pk for pk in table.primary_key if pk.key in exclude_columns] - primary_key_constraint = sa_schema.PrimaryKeyConstraint(name=pk_cons.get('name'), + primary_key_constraint = sa_schema.PrimaryKeyConstraint(name=pk_cons.get('name'), *pk_cols ) @@ -452,7 +452,7 @@ class Inspector(object): table.append_constraint( sa_schema.ForeignKeyConstraint(constrained_columns, refspec, conname, link_to_name=True)) - # Indexes + # Indexes indexes = self.get_indexes(table_name, schema) for index_d in indexes: name = index_d['name'] @@ -465,5 +465,5 @@ class Inspector(object): "Omitting %s KEY for (%s), key covers omitted columns." % (flavor, ', '.join(columns))) continue - sa_schema.Index(name, *[table.columns[c] for c in columns], + sa_schema.Index(name, *[table.columns[c] for c in columns], **dict(unique=unique)) diff --git a/libs/sqlalchemy/engine/strategies.py b/libs/sqlalchemy/engine/strategies.py index 4d5a4b34..fab97975 100644 --- a/libs/sqlalchemy/engine/strategies.py +++ b/libs/sqlalchemy/engine/strategies.py @@ -1,5 +1,5 @@ # engine/strategies.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -24,7 +24,7 @@ strategies = {} class EngineStrategy(object): - """An adaptor that processes input arguements and produces an Engine. + """An adaptor that processes input arguments and produces an Engine. Provides a ``create`` method that receives input arguments and produces an instance of base.Engine or a subclass. @@ -41,7 +41,7 @@ class EngineStrategy(object): class DefaultEngineStrategy(EngineStrategy): - """Base class for built-in stratgies.""" + """Base class for built-in strategies.""" def create(self, name_or_url, **kwargs): # create url.URL object @@ -80,7 +80,7 @@ class DefaultEngineStrategy(EngineStrategy): return dialect.connect(*cargs, **cparams) except Exception, e: # Py3K - #raise exc.DBAPIError.instance(None, None, + #raise exc.DBAPIError.instance(None, None, # e, dialect.dbapi.Error, # connection_invalidated= # dialect.is_disconnect(e, None, None) @@ -180,7 +180,7 @@ PlainEngineStrategy() class ThreadLocalEngineStrategy(DefaultEngineStrategy): - """Strategy for configuring an Engine with thredlocal behavior.""" + """Strategy for configuring an Engine with threadlocal behavior.""" name = 'threadlocal' engine_cls = threadlocal.TLEngine @@ -245,8 +245,8 @@ class MockEngineStrategy(EngineStrategy): from sqlalchemy.engine import ddl ddl.SchemaDropper(self.dialect, self, **kwargs).traverse_single(entity) - def _run_visitor(self, visitorcallable, element, - connection=None, + def _run_visitor(self, visitorcallable, element, + connection=None, **kwargs): kwargs['checkfirst'] = False visitorcallable(self.dialect, self, diff --git a/libs/sqlalchemy/engine/threadlocal.py b/libs/sqlalchemy/engine/threadlocal.py index f0d6803d..c8a16272 100644 --- a/libs/sqlalchemy/engine/threadlocal.py +++ b/libs/sqlalchemy/engine/threadlocal.py @@ -1,5 +1,5 @@ # engine/threadlocal.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -7,7 +7,7 @@ """Provides a thread-local transactional wrapper around the root Engine class. The ``threadlocal`` module is invoked when using the ``strategy="threadlocal"`` flag -with :func:`~sqlalchemy.engine.create_engine`. This module is semi-private and is +with :func:`~sqlalchemy.engine.create_engine`. This module is semi-private and is invoked automatically when the threadlocal engine strategy is used. """ diff --git a/libs/sqlalchemy/engine/url.py b/libs/sqlalchemy/engine/url.py index 392ecda1..9cabc8dc 100644 --- a/libs/sqlalchemy/engine/url.py +++ b/libs/sqlalchemy/engine/url.py @@ -1,5 +1,5 @@ # engine/url.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -110,7 +110,7 @@ class URL(object): module = self._load_entry_point() if module is None: raise exc.ArgumentError( - "Could not determine dialect for '%s'." % + "Could not determine dialect for '%s'." % self.drivername) return module.dialect diff --git a/libs/sqlalchemy/event.py b/libs/sqlalchemy/event.py index cd70b3a7..dabebb81 100644 --- a/libs/sqlalchemy/event.py +++ b/libs/sqlalchemy/event.py @@ -1,5 +1,5 @@ # sqlalchemy/event.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -7,6 +7,7 @@ """Base event API.""" from sqlalchemy import util, exc +import weakref CANCEL = util.symbol('CANCEL') NO_RETVAL = util.symbol('NO_RETVAL') @@ -25,8 +26,8 @@ def listen(target, identifier, fn, *args, **kw): list(const.columns)[0].name ) event.listen( - UniqueConstraint, - "after_parent_attach", + UniqueConstraint, + "after_parent_attach", unique_constraint_name) """ @@ -37,7 +38,7 @@ def listen(target, identifier, fn, *args, **kw): tgt.dispatch._listen(tgt, identifier, fn, *args, **kw) return raise exc.InvalidRequestError("No such event '%s' for target '%s'" % - (identifier,target)) + (identifier, target)) def listens_for(target, identifier, *args, **kw): """Decorate a function as a listener for the given target + identifier. @@ -69,7 +70,7 @@ def remove(target, identifier, fn): """ for evt_cls in _registrars[identifier]: for tgt in evt_cls._accept_with(target): - tgt.dispatch._remove(identifier, tgt, fn, *args, **kw) + tgt.dispatch._remove(identifier, tgt, fn) return _registrars = util.defaultdict(list) @@ -90,12 +91,12 @@ class _UnpickleDispatch(object): raise AttributeError("No class with a 'dispatch' member present.") class _Dispatch(object): - """Mirror the event listening definitions of an Events class with + """Mirror the event listening definitions of an Events class with listener collections. - Classes which define a "dispatch" member will return a - non-instantiated :class:`._Dispatch` subclass when the member - is accessed at the class level. When the "dispatch" member is + Classes which define a "dispatch" member will return a + non-instantiated :class:`._Dispatch` subclass when the member + is accessed at the class level. When the "dispatch" member is accessed at the instance level of its owner, an instance of the :class:`._Dispatch` class is returned. @@ -103,7 +104,7 @@ class _Dispatch(object): class defined, by the :func:`._create_dispatcher_class` function. The original :class:`.Events` classes remain untouched. This decouples the construction of :class:`.Events` subclasses from - the implementation used by the event internals, and allows + the implementation used by the event internals, and allows inspecting tools like Sphinx to work in an unsurprising way against the public API. @@ -120,13 +121,14 @@ class _Dispatch(object): object.""" for ls in _event_descriptors(other): - getattr(self, ls.name)._update(ls, only_propagate=only_propagate) + getattr(self, ls.name).\ + for_modify(self)._update(ls, only_propagate=only_propagate) def _event_descriptors(target): return [getattr(target, k) for k in dir(target) if _is_event_name(k)] class _EventMeta(type): - """Intercept new Event subclasses and create + """Intercept new Event subclasses and create associated _Dispatch classes.""" def __init__(cls, classname, bases, dict_): @@ -134,14 +136,14 @@ class _EventMeta(type): return type.__init__(cls, classname, bases, dict_) def _create_dispatcher_class(cls, classname, bases, dict_): - """Create a :class:`._Dispatch` class corresponding to an + """Create a :class:`._Dispatch` class corresponding to an :class:`.Events` class.""" # there's all kinds of ways to do this, # i.e. make a Dispatch class that shares the '_listen' method # of the Event class, this is the straight monkeypatch. dispatch_base = getattr(cls, 'dispatch', _Dispatch) - cls.dispatch = dispatch_cls = type("%sDispatch" % classname, + cls.dispatch = dispatch_cls = type("%sDispatch" % classname, (dispatch_base, ), {}) dispatch_cls._listen = cls._listen dispatch_cls._clear = cls._clear @@ -180,9 +182,11 @@ class Events(object): @classmethod def _listen(cls, target, identifier, fn, propagate=False, insert=False): if insert: - getattr(target.dispatch, identifier).insert(fn, target, propagate) + getattr(target.dispatch, identifier).\ + for_modify(target.dispatch).insert(fn, target, propagate) else: - getattr(target.dispatch, identifier).append(fn, target, propagate) + getattr(target.dispatch, identifier).\ + for_modify(target.dispatch).append(fn, target, propagate) @classmethod def _remove(cls, target, identifier, fn): @@ -200,7 +204,12 @@ class _DispatchDescriptor(object): def __init__(self, fn): self.__name__ = fn.__name__ self.__doc__ = fn.__doc__ - self._clslevel = util.defaultdict(list) + self._clslevel = weakref.WeakKeyDictionary() + self._empty_listeners = weakref.WeakKeyDictionary() + + def _contains(self, cls, evt): + return cls in self._clslevel and \ + evt in self._clslevel[cls] def insert(self, obj, target, propagate): assert isinstance(target, type), \ @@ -212,6 +221,8 @@ class _DispatchDescriptor(object): if cls is not target and cls not in self._clslevel: self.update_subclass(cls) else: + if cls not in self._clslevel: + self._clslevel[cls] = [] self._clslevel[cls].insert(0, obj) def append(self, obj, target, propagate): @@ -225,15 +236,19 @@ class _DispatchDescriptor(object): if cls is not target and cls not in self._clslevel: self.update_subclass(cls) else: + if cls not in self._clslevel: + self._clslevel[cls] = [] self._clslevel[cls].append(obj) def update_subclass(self, target): + if target not in self._clslevel: + self._clslevel[target] = [] clslevel = self._clslevel[target] for cls in target.__mro__[1:]: if cls in self._clslevel: clslevel.extend([ - fn for fn - in self._clslevel[cls] + fn for fn + in self._clslevel[cls] if fn not in clslevel ]) @@ -242,7 +257,8 @@ class _DispatchDescriptor(object): while stack: cls = stack.pop(0) stack.extend(cls.__subclasses__()) - self._clslevel[cls].remove(obj) + if cls in self._clslevel: + self._clslevel[cls].remove(obj) def clear(self): """Clear all class level listeners""" @@ -250,18 +266,91 @@ class _DispatchDescriptor(object): for dispatcher in self._clslevel.values(): dispatcher[:] = [] + def for_modify(self, obj): + """Return an event collection which can be modified. + + For _DispatchDescriptor at the class level of + a dispatcher, this returns self. + + """ + return self + def __get__(self, obj, cls): if obj is None: return self - obj.__dict__[self.__name__] = result = \ - _ListenerCollection(self, obj._parent_cls) + elif obj._parent_cls in self._empty_listeners: + ret = self._empty_listeners[obj._parent_cls] + else: + self._empty_listeners[obj._parent_cls] = ret = \ + _EmptyListener(self, obj._parent_cls) + # assigning it to __dict__ means + # memoized for fast re-access. but more memory. + obj.__dict__[self.__name__] = ret + return ret + +class _EmptyListener(object): + """Serves as a class-level interface to the events + served by a _DispatchDescriptor, when there are no + instance-level events present. + + Is replaced by _ListenerCollection when instance-level + events are added. + + """ + def __init__(self, parent, target_cls): + if target_cls not in parent._clslevel: + parent.update_subclass(target_cls) + self.parent = parent + self.parent_listeners = parent._clslevel[target_cls] + self.name = parent.__name__ + self.propagate = frozenset() + self.listeners = () + + def for_modify(self, obj): + """Return an event collection which can be modified. + + For _EmptyListener at the instance level of + a dispatcher, this generates a new + _ListenerCollection, applies it to the instance, + and returns it. + + """ + obj.__dict__[self.name] = result = _ListenerCollection( + self.parent, obj._parent_cls) return result + def _needs_modify(self, *args, **kw): + raise NotImplementedError("need to call for_modify()") + + exec_once = insert = append = remove = clear = _needs_modify + + def __call__(self, *args, **kw): + """Execute this event.""" + + for fn in self.parent_listeners: + fn(*args, **kw) + + def __len__(self): + return len(self.parent_listeners) + + def __iter__(self): + return iter(self.parent_listeners) + + def __getitem__(self, index): + return (self.parent_listeners)[index] + + def __nonzero__(self): + return bool(self.parent_listeners) + + class _ListenerCollection(object): """Instance-level attributes on instances of :class:`._Dispatch`. Represents a collection of listeners. + As of 0.7.9, _ListenerCollection is only first + created via the _EmptyListener.for_modify() method. + """ _exec_once = False @@ -274,6 +363,15 @@ class _ListenerCollection(object): self.listeners = [] self.propagate = set() + def for_modify(self, obj): + """Return an event collection which can be modified. + + For _ListenerCollection at the instance level of + a dispatcher, this returns self. + + """ + return self + def exec_once(self, *args, **kw): """Execute this event, but only if it has not been executed already for this collection.""" @@ -294,11 +392,9 @@ class _ListenerCollection(object): # but this allows class-level listeners to be added # at any point. # - # alternatively, _DispatchDescriptor could notify - # all _ListenerCollection objects, but then we move - # to a higher memory model, i.e.weakrefs to all _ListenerCollection - # objects, the _DispatchDescriptor collection repeated - # for all instances. + # In the absense of instance-level listeners, + # we stay with the _EmptyListener object when called + # at the instance level. def __len__(self): return len(self.parent_listeners + self.listeners) @@ -319,8 +415,8 @@ class _ListenerCollection(object): existing_listeners = self.listeners existing_listener_set = set(existing_listeners) self.propagate.update(other.propagate) - existing_listeners.extend([l for l - in other.listeners + existing_listeners.extend([l for l + in other.listeners if l not in existing_listener_set and not only_propagate or l in self.propagate ]) @@ -347,7 +443,7 @@ class _ListenerCollection(object): self.propagate.clear() class dispatcher(object): - """Descriptor used by target classes to + """Descriptor used by target classes to deliver the _Dispatch class at the class level and produce new _Dispatch instances for target instances. diff --git a/libs/sqlalchemy/events.py b/libs/sqlalchemy/events.py index 504dfe15..e7aa34f2 100644 --- a/libs/sqlalchemy/events.py +++ b/libs/sqlalchemy/events.py @@ -1,5 +1,5 @@ # sqlalchemy/events.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -17,11 +17,11 @@ class DDLEvents(event.Events): that is, :class:`.SchemaItem` and :class:`.SchemaEvent` subclasses, including :class:`.MetaData`, :class:`.Table`, :class:`.Column`. - + :class:`.MetaData` and :class:`.Table` support events specifically regarding when CREATE and DROP - DDL is emitted to the database. - + DDL is emitted to the database. + Attachment events are also provided to customize behavior whenever a child schema element is associated with a parent, such as, when a :class:`.Column` is associated @@ -37,14 +37,14 @@ class DDLEvents(event.Events): some_table = Table('some_table', m, Column('data', Integer)) def after_create(target, connection, **kw): - connection.execute("ALTER TABLE %s SET name=foo_%s" % + connection.execute("ALTER TABLE %s SET name=foo_%s" % (target.name, target.name)) event.listen(some_table, "after_create", after_create) - DDL events integrate closely with the + DDL events integrate closely with the :class:`.DDL` class and the :class:`.DDLElement` hierarchy - of DDL clause constructs, which are themselves appropriate + of DDL clause constructs, which are themselves appropriate as listener callables:: from sqlalchemy import DDL @@ -81,7 +81,7 @@ class DDLEvents(event.Events): to the event. The contents of this dictionary may vary across releases, and include the list of tables being generated for a metadata-level - event, the checkfirst flag, and other + event, the checkfirst flag, and other elements used by internal events. """ @@ -97,7 +97,7 @@ class DDLEvents(event.Events): to the event. The contents of this dictionary may vary across releases, and include the list of tables being generated for a metadata-level - event, the checkfirst flag, and other + event, the checkfirst flag, and other elements used by internal events. """ @@ -113,7 +113,7 @@ class DDLEvents(event.Events): to the event. The contents of this dictionary may vary across releases, and include the list of tables being generated for a metadata-level - event, the checkfirst flag, and other + event, the checkfirst flag, and other elements used by internal events. """ @@ -129,52 +129,52 @@ class DDLEvents(event.Events): to the event. The contents of this dictionary may vary across releases, and include the list of tables being generated for a metadata-level - event, the checkfirst flag, and other + event, the checkfirst flag, and other elements used by internal events. """ def before_parent_attach(self, target, parent): - """Called before a :class:`.SchemaItem` is associated with + """Called before a :class:`.SchemaItem` is associated with a parent :class:`.SchemaItem`. - + :param target: the target object :param parent: the parent to which the target is being attached. - + :func:`.event.listen` also accepts a modifier for this event: - + :param propagate=False: When True, the listener function will be established for any copies made of the target object, i.e. those copies that are generated when :meth:`.Table.tometadata` is used. - + """ def after_parent_attach(self, target, parent): - """Called after a :class:`.SchemaItem` is associated with + """Called after a :class:`.SchemaItem` is associated with a parent :class:`.SchemaItem`. :param target: the target object :param parent: the parent to which the target is being attached. - + :func:`.event.listen` also accepts a modifier for this event: - + :param propagate=False: When True, the listener function will be established for any copies made of the target object, i.e. those copies that are generated when :meth:`.Table.tometadata` is used. - + """ def column_reflect(self, table, column_info): """Called for each unit of 'column info' retrieved when - a :class:`.Table` is being reflected. - + a :class:`.Table` is being reflected. + The dictionary of column information as returned by the dialect is passed, and can be modified. The dictionary - is that returned in each element of the list returned - by :meth:`.reflection.Inspector.get_columns`. - + is that returned in each element of the list returned + by :meth:`.reflection.Inspector.get_columns`. + The event is called before any action is taken against this dictionary, and the contents can be modified. The :class:`.Column` specific arguments ``info``, ``key``, @@ -182,45 +182,45 @@ class DDLEvents(event.Events): will be passed to the constructor of :class:`.Column`. Note that this event is only meaningful if either - associated with the :class:`.Table` class across the + associated with the :class:`.Table` class across the board, e.g.:: - + from sqlalchemy.schema import Table from sqlalchemy import event def listen_for_reflect(table, column_info): "receive a column_reflect event" # ... - + event.listen( - Table, - 'column_reflect', + Table, + 'column_reflect', listen_for_reflect) - + ...or with a specific :class:`.Table` instance using the ``listeners`` argument:: - + def listen_for_reflect(table, column_info): "receive a column_reflect event" # ... - + t = Table( - 'sometable', + 'sometable', autoload=True, listeners=[ ('column_reflect', listen_for_reflect) ]) - + This because the reflection process initiated by ``autoload=True`` completes within the scope of the constructor for :class:`.Table`. - + """ class SchemaEventTarget(object): """Base class for elements that are the targets of :class:`.DDLEvents` events. - + This includes :class:`.SchemaItem` as well as :class:`.SchemaType`. - + """ dispatch = event.dispatcher(DDLEvents) @@ -230,9 +230,9 @@ class SchemaEventTarget(object): raise NotImplementedError() def _set_parent_with_dispatch(self, parent): - self.dispatch.before_parent_attach(self, parent) - self._set_parent(parent) - self.dispatch.after_parent_attach(self, parent) + self.dispatch.before_parent_attach(self, parent) + self._set_parent(parent) + self.dispatch.after_parent_attach(self, parent) class PoolEvents(event.Events): """Available events for :class:`.Pool`. @@ -350,10 +350,10 @@ class ConnectionEvents(event.Events): Some events allow modifiers to the listen() function. - :param retval=False: Applies to the :meth:`.before_execute` and + :param retval=False: Applies to the :meth:`.before_execute` and :meth:`.before_cursor_execute` events only. When True, the user-defined event function must have a return value, which - is a tuple of parameters that replace the given statement + is a tuple of parameters that replace the given statement and parameters. See those methods for a description of specific return arguments. @@ -372,9 +372,9 @@ class ConnectionEvents(event.Events): fn = wrap elif identifier == 'before_cursor_execute': orig_fn = fn - def wrap(conn, cursor, statement, + def wrap(conn, cursor, statement, parameters, context, executemany): - orig_fn(conn, cursor, statement, + orig_fn(conn, cursor, statement, parameters, context, executemany) return statement, parameters fn = wrap @@ -393,14 +393,44 @@ class ConnectionEvents(event.Events): def after_execute(self, conn, clauseelement, multiparams, params, result): """Intercept high level execute() events.""" - def before_cursor_execute(self, conn, cursor, statement, + def before_cursor_execute(self, conn, cursor, statement, parameters, context, executemany): """Intercept low-level cursor execute() events.""" - def after_cursor_execute(self, conn, cursor, statement, + def after_cursor_execute(self, conn, cursor, statement, parameters, context, executemany): """Intercept low-level cursor execute() events.""" + def dbapi_error(self, conn, cursor, statement, parameters, + context, exception): + """Intercept a raw DBAPI error. + + This event is called with the DBAPI exception instance + received from the DBAPI itself, *before* SQLAlchemy wraps the + exception with it's own exception wrappers, and before any + other operations are performed on the DBAPI cursor; the + existing transaction remains in effect as well as any state + on the cursor. + + The use case here is to inject low-level exception handling + into an :class:`.Engine`, typically for logging and + debugging purposes. In general, user code should **not** modify + any state or throw any exceptions here as this will + interfere with SQLAlchemy's cleanup and error handling + routines. + + Subsequent to this hook, SQLAlchemy may attempt any + number of operations on the connection/cursor, including + closing the cursor, rolling back of the transaction in the + case of connectionless execution, and disposing of the entire + connection pool if a "disconnect" was detected. The + exception is then wrapped in a SQLAlchemy DBAPI exception + wrapper and re-thrown. + + .. versionadded:: 0.7.7 + + """ + def begin(self, conn): """Intercept begin() events.""" diff --git a/libs/sqlalchemy/exc.py b/libs/sqlalchemy/exc.py index 91ffc281..febee3fe 100644 --- a/libs/sqlalchemy/exc.py +++ b/libs/sqlalchemy/exc.py @@ -1,5 +1,5 @@ # sqlalchemy/exc.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -28,21 +28,21 @@ class ArgumentError(SQLAlchemyError): class CircularDependencyError(SQLAlchemyError): """Raised by topological sorts when a circular dependency is detected. - + There are two scenarios where this error occurs: - + * In a Session flush operation, if two objects are mutually dependent - on each other, they can not be inserted or deleted via INSERT or + on each other, they can not be inserted or deleted via INSERT or DELETE statements alone; an UPDATE will be needed to post-associate or pre-deassociate one of the foreign key constrained values. - The ``post_update`` flag described at :ref:`post_update` can resolve + The ``post_update`` flag described at :ref:`post_update` can resolve this cycle. * In a :meth:`.MetaData.create_all`, :meth:`.MetaData.drop_all`, :attr:`.MetaData.sorted_tables` operation, two :class:`.ForeignKey` or :class:`.ForeignKeyConstraint` objects mutually refer to each other. Apply the ``use_alter=True`` flag to one or both, see :ref:`use_alter`. - + """ def __init__(self, message, cycles, edges, msg=None): if msg is None: @@ -54,7 +54,7 @@ class CircularDependencyError(SQLAlchemyError): self.edges = edges def __reduce__(self): - return self.__class__, (None, self.cycles, + return self.__class__, (None, self.cycles, self.edges, self.args[0]) class CompileError(SQLAlchemyError): @@ -70,9 +70,9 @@ class DisconnectionError(SQLAlchemyError): """A disconnect is detected on a raw DB-API connection. This error is raised and consumed internally by a connection pool. It can - be raised by the :meth:`.PoolEvents.checkout` event + be raised by the :meth:`.PoolEvents.checkout` event so that the host pool forces a retry; the exception will be caught - three times in a row before the pool gives up and raises + three times in a row before the pool gives up and raises :class:`~sqlalchemy.exc.InvalidRequestError` regarding the connection attempt. """ @@ -121,7 +121,7 @@ class NoReferencedColumnError(NoReferenceError): self.column_name = cname def __reduce__(self): - return self.__class__, (self.args[0], self.table_name, + return self.__class__, (self.args[0], self.table_name, self.column_name) class NoSuchTableError(InvalidRequestError): @@ -136,20 +136,20 @@ class DontWrapMixin(object): """A mixin class which, when applied to a user-defined Exception class, will not be wrapped inside of :class:`.StatementError` if the error is emitted within the process of executing a statement. - + E.g.:: from sqlalchemy.exc import DontWrapMixin - + class MyCustomException(Exception, DontWrapMixin): pass - + class MySpecialType(TypeDecorator): impl = String - + def process_bind_param(self, value, dialect): if value == 'invalid': raise MyCustomException("invalid!") - + """ import sys if sys.version_info < (2, 5): @@ -161,15 +161,15 @@ UnmappedColumnError = None class StatementError(SQLAlchemyError): """An error occurred during execution of a SQL statement. - + :class:`StatementError` wraps the exception raised during execution, and features :attr:`.statement` and :attr:`.params` attributes which supply context regarding the specifics of the statement which had an issue. - The wrapped exception object is available in + The wrapped exception object is available in the :attr:`.orig` attribute. - + """ statement = None @@ -188,7 +188,7 @@ class StatementError(SQLAlchemyError): self.orig = orig def __reduce__(self): - return self.__class__, (self.args[0], self.statement, + return self.__class__, (self.args[0], self.statement, self.params, self.orig) def __str__(self): @@ -211,7 +211,7 @@ class DBAPIError(StatementError): :class:`DBAPIError` features :attr:`~.StatementError.statement` and :attr:`~.StatementError.params` attributes which supply context regarding - the specifics of the statement which had an issue, for the + the specifics of the statement which had an issue, for the typical case when the error was raised within the context of emitting a SQL statement. @@ -221,8 +221,8 @@ class DBAPIError(StatementError): """ @classmethod - def instance(cls, statement, params, - orig, + def instance(cls, statement, params, + orig, dbapi_base_err, connection_invalidated=False): # Don't ever wrap these, just return them directly as if @@ -236,7 +236,7 @@ class DBAPIError(StatementError): if not isinstance(orig, dbapi_base_err) and statement: return StatementError( "%s (original cause: %s)" % ( - str(orig), + str(orig), traceback.format_exception_only(orig.__class__, orig)[-1].strip() ), statement, params, orig) @@ -247,7 +247,7 @@ class DBAPIError(StatementError): return cls(statement, params, orig, connection_invalidated) def __reduce__(self): - return self.__class__, (self.statement, self.params, + return self.__class__, (self.statement, self.params, self.orig, self.connection_invalidated) def __init__(self, statement, params, orig, connection_invalidated=False): @@ -258,7 +258,7 @@ class DBAPIError(StatementError): except Exception, e: text = 'Error in str() of DB-API-generated exception: ' + str(e) StatementError.__init__( - self, + self, '(%s) %s' % (orig.__class__.__name__, text), statement, params, diff --git a/libs/sqlalchemy/ext/__init__.py b/libs/sqlalchemy/ext/__init__.py index 7558ac26..4a6e1952 100644 --- a/libs/sqlalchemy/ext/__init__.py +++ b/libs/sqlalchemy/ext/__init__.py @@ -1,5 +1,5 @@ # ext/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/ext/associationproxy.py b/libs/sqlalchemy/ext/associationproxy.py index 8b3416ea..d5b0ab69 100644 --- a/libs/sqlalchemy/ext/associationproxy.py +++ b/libs/sqlalchemy/ext/associationproxy.py @@ -1,5 +1,5 @@ # ext/associationproxy.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -24,17 +24,17 @@ from sqlalchemy.sql import not_ def association_proxy(target_collection, attr, **kw): """Return a Python property implementing a view of a target - attribute which references an attribute on members of the + attribute which references an attribute on members of the target. - + The returned value is an instance of :class:`.AssociationProxy`. - + Implements a Python property representing a relationship as a collection of simpler values, or a scalar value. The proxied property will mimic the collection type of the target (list, dict or set), or, in the case of a one to one relationship, a simple scalar value. - :param target_collection: Name of the attribute we'll proxy to. + :param target_collection: Name of the attribute we'll proxy to. This attribute is typically mapped by :func:`~sqlalchemy.orm.relationship` to link to a target collection, but can also be a many-to-one or non-scalar relationship. @@ -80,15 +80,15 @@ class AssociationProxy(object): """A descriptor that presents a read/write view of an object attribute.""" def __init__(self, target_collection, attr, creator=None, - getset_factory=None, proxy_factory=None, + getset_factory=None, proxy_factory=None, proxy_bulk_set=None): """Construct a new :class:`.AssociationProxy`. - + The :func:`.association_proxy` function is provided as the usual entrypoint here, though :class:`.AssociationProxy` can be instantiated and/or subclassed directly. - :param target_collection: Name of the collection we'll proxy to, + :param target_collection: Name of the collection we'll proxy to, usually created with :func:`.relationship`. :param attr: Attribute on the collected instances we'll proxy for. For example, @@ -120,7 +120,7 @@ class AssociationProxy(object): collection implementation, you may supply a factory function to produce those collections. Only applicable to non-scalar relationships. - :param proxy_bulk_set: Optional, use with proxy_factory. See + :param proxy_bulk_set: Optional, use with proxy_factory. See the _set() method for details. """ @@ -140,11 +140,11 @@ class AssociationProxy(object): def remote_attr(self): """The 'remote' :class:`.MapperProperty` referenced by this :class:`.AssociationProxy`. - - New in 0.7.3. - + + .. versionadded:: 0.7.3 + See also: - + :attr:`.AssociationProxy.attr` :attr:`.AssociationProxy.local_attr` @@ -157,10 +157,10 @@ class AssociationProxy(object): """The 'local' :class:`.MapperProperty` referenced by this :class:`.AssociationProxy`. - New in 0.7.3. - + .. versionadded:: 0.7.3 + See also: - + :attr:`.AssociationProxy.attr` :attr:`.AssociationProxy.remote_attr` @@ -171,20 +171,20 @@ class AssociationProxy(object): @property def attr(self): """Return a tuple of ``(local_attr, remote_attr)``. - - This attribute is convenient when specifying a join + + This attribute is convenient when specifying a join using :meth:`.Query.join` across two relationships:: - + sess.query(Parent).join(*Parent.proxied.attr) - New in 0.7.3. - + .. versionadded:: 0.7.3 + See also: - + :attr:`.AssociationProxy.local_attr` :attr:`.AssociationProxy.remote_attr` - + """ return (self.local_attr, self.remote_attr) @@ -195,10 +195,10 @@ class AssociationProxy(object): @util.memoized_property def target_class(self): """The intermediary class handled by this :class:`.AssociationProxy`. - + Intercepted append/set/assignment events will result in the generation of new instances of this class. - + """ return self._get_property().mapper.class_ @@ -333,10 +333,10 @@ class AssociationProxy(object): def any(self, criterion=None, **kwargs): """Produce a proxied 'any' expression using EXISTS. - + This expression will be a composed product using the :meth:`.RelationshipProperty.Comparator.any` - and/or :meth:`.RelationshipProperty.Comparator.has` + and/or :meth:`.RelationshipProperty.Comparator.has` operators of the underlying proxied attributes. """ @@ -360,12 +360,12 @@ class AssociationProxy(object): def has(self, criterion=None, **kwargs): """Produce a proxied 'has' expression using EXISTS. - + This expression will be a composed product using the :meth:`.RelationshipProperty.Comparator.any` - and/or :meth:`.RelationshipProperty.Comparator.has` + and/or :meth:`.RelationshipProperty.Comparator.has` operators of the underlying proxied attributes. - + """ return self._comparator.has( @@ -375,7 +375,7 @@ class AssociationProxy(object): def contains(self, obj): """Produce a proxied 'contains' expression using EXISTS. - + This expression will be a composed product using the :meth:`.RelationshipProperty.Comparator.any` , :meth:`.RelationshipProperty.Comparator.has`, diff --git a/libs/sqlalchemy/ext/compiler.py b/libs/sqlalchemy/ext/compiler.py index 47221fa6..9bd9b42e 100644 --- a/libs/sqlalchemy/ext/compiler.py +++ b/libs/sqlalchemy/ext/compiler.py @@ -1,5 +1,5 @@ # ext/compiler.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -91,9 +91,9 @@ Produces:: "INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z FROM mytable WHERE mytable.x > :x_1)" -.. note:: +.. note:: - The above ``InsertFromSelect`` construct probably wants to have "autocommit" + The above ``InsertFromSelect`` construct probably wants to have "autocommit" enabled. See :ref:`enabling_compiled_autocommit` for this step. Cross Compiling between SQL and DDL compilers @@ -118,12 +118,12 @@ Enabling Autocommit on a Construct Recall from the section :ref:`autocommit` that the :class:`.Engine`, when asked to execute a construct in the absence of a user-defined transaction, detects if the given -construct represents DML or DDL, that is, a data modification or data definition statement, which +construct represents DML or DDL, that is, a data modification or data definition statement, which requires (or may require, in the case of DDL) that the transaction generated by the DBAPI be committed -(recall that DBAPI always has a transaction going on regardless of what SQLAlchemy does). Checking +(recall that DBAPI always has a transaction going on regardless of what SQLAlchemy does). Checking for this is actually accomplished by checking for the "autocommit" execution option on the construct. When building a construct like -an INSERT derivation, a new DDL type, or perhaps a stored procedure that alters data, the "autocommit" +an INSERT derivation, a new DDL type, or perhaps a stored procedure that alters data, the "autocommit" option needs to be set in order for the statement to function with "connectionless" execution (as described in :ref:`dbengine_implicit`). @@ -146,13 +146,13 @@ can be used, which already is a subclass of :class:`.Executable`, :class:`.Claus class MyInsertThing(UpdateBase): def __init__(self, ...): ... - - - + + + DDL elements that subclass :class:`.DDLElement` already have the "autocommit" flag turned on. - + Changing the default compilation of existing constructs @@ -163,7 +163,7 @@ the compilation of a built in SQL construct, the @compiles decorator is invoked the appropriate class (be sure to use the class, i.e. ``Insert`` or ``Select``, instead of the creation function such as ``insert()`` or ``select()``). Within the new compilation function, to get at the "original" compilation routine, -use the appropriate visit_XXX method - this because compiler.process() will call upon the +use the appropriate visit_XXX method - this because compiler.process() will call upon the overriding routine and cause an endless loop. Such as, to add "prefix" to all insert statements:: from sqlalchemy.sql.expression import Insert @@ -205,7 +205,7 @@ A synopsis is as follows: expression class. Any SQL expression can be derived from this base, and is probably the best choice for longer constructs such as specialized INSERT statements. - + * :class:`~sqlalchemy.sql.expression.ColumnElement` - The root of all "column-like" elements. Anything that you'd place in the "columns" clause of a SELECT statement (as well as order by and group by) can derive from this - @@ -218,7 +218,7 @@ A synopsis is as follows: class timestamp(ColumnElement): type = TIMESTAMP() - + * :class:`~sqlalchemy.sql.expression.FunctionElement` - This is a hybrid of a ``ColumnElement`` and a "from clause" like object, and represents a SQL function or stored procedure type of call. Since most databases support @@ -250,7 +250,7 @@ A synopsis is as follows: * :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which should be used with any expression class that represents a "standalone" SQL statement that - can be passed directly to an ``execute()`` method. It is already implicit + can be passed directly to an ``execute()`` method. It is already implicit within ``DDLElement`` and ``FunctionElement``. Further Examples @@ -263,15 +263,15 @@ A function that works like "CURRENT_TIMESTAMP" except applies the appropriate co so that the time is in UTC time. Timestamps are best stored in relational databases as UTC, without time zones. UTC so that your database doesn't think time has gone backwards in the hour when daylight savings ends, without timezones because timezones -are like character encodings - they're best applied only at the endpoints of an +are like character encodings - they're best applied only at the endpoints of an application (i.e. convert to UTC upon user input, re-apply desired timezone upon display). For Postgresql and Microsoft SQL Server:: - + from sqlalchemy.sql import expression from sqlalchemy.ext.compiler import compiles from sqlalchemy.types import DateTime - + class utcnow(expression.FunctionElement): type = DateTime() @@ -284,7 +284,7 @@ For Postgresql and Microsoft SQL Server:: return "GETUTCDATE()" Example usage:: - + from sqlalchemy import ( Table, Column, Integer, String, DateTime, MetaData ) @@ -299,8 +299,8 @@ Example usage:: ------------------- The "GREATEST" function is given any number of arguments and returns the one that is -of the highest value - it's equivalent to Python's ``max`` function. A SQL -standard version versus a CASE based version which only accommodates two +of the highest value - it's equivalent to Python's ``max`` function. A SQL +standard version versus a CASE based version which only accommodates two arguments:: from sqlalchemy.sql import expression @@ -332,7 +332,7 @@ Example usage:: Session.query(Account).\\ filter( greatest( - Account.checking_balance, + Account.checking_balance, Account.savings_balance) > 10000 ) @@ -340,10 +340,10 @@ Example usage:: ------------------ Render a "false" constant expression, rendering as "0" on platforms that don't have a "false" constant:: - + from sqlalchemy.sql import expression from sqlalchemy.ext.compiler import compiles - + class sql_false(expression.ColumnElement): pass @@ -358,14 +358,14 @@ Render a "false" constant expression, rendering as "0" on platforms that don't h return "0" Example usage:: - + from sqlalchemy import select, union_all exp = union_all( select([users.c.name, sql_false().label("enrolled")]), select([customers.c.name, customers.c.enrolled]) ) - + """ from sqlalchemy import exc diff --git a/libs/sqlalchemy/ext/declarative.py b/libs/sqlalchemy/ext/declarative.py index faf575da..b0876f0d 100755 --- a/libs/sqlalchemy/ext/declarative.py +++ b/libs/sqlalchemy/ext/declarative.py @@ -1,5 +1,5 @@ # ext/declarative.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -51,7 +51,7 @@ automatically named with the name of the attribute to which they are assigned. To name columns explicitly with a name distinct from their mapped attribute, -just give the column a name. Below, column "some_table_id" is mapped to the +just give the column a name. Below, column "some_table_id" is mapped to the "id" attribute of `SomeClass`, but in SQL will be represented as "some_table_id":: class SomeClass(Base): @@ -68,7 +68,7 @@ added to the underlying :class:`.Table` and Classes which are constructed using declarative can interact freely with classes that are mapped explicitly with :func:`mapper`. -It is recommended, though not required, that all tables +It is recommended, though not required, that all tables share the same underlying :class:`~sqlalchemy.schema.MetaData` object, so that string-configured :class:`~sqlalchemy.schema.ForeignKey` references can be resolved without issue. @@ -86,21 +86,11 @@ CREATE statements for all tables:: engine = create_engine('sqlite://') Base.metadata.create_all(engine) -The usual techniques of associating :class:`.MetaData:` with :class:`.Engine` -apply, such as assigning to the ``bind`` attribute:: - - Base.metadata.bind = create_engine('sqlite://') - -To associate the engine with the :func:`declarative_base` at time -of construction, the ``bind`` argument is accepted:: - - Base = declarative_base(bind=create_engine('sqlite://')) - :func:`declarative_base` can also receive a pre-existing :class:`.MetaData` object, which allows a -declarative setup to be associated with an already +declarative setup to be associated with an already existing traditional collection of :class:`~sqlalchemy.schema.Table` -objects:: +objects:: mymetadata = MetaData() Base = declarative_base(metadata=mymetadata) @@ -113,7 +103,7 @@ feature that the class specified to :func:`~sqlalchemy.orm.relationship` may be a string name. The "class registry" associated with ``Base`` is used at mapper compilation time to resolve the name into the actual class object, which is expected to have been defined once the mapper -configuration is used:: +configuration is used:: class User(Base): __tablename__ = 'users' @@ -131,7 +121,7 @@ configuration is used:: Column constructs, since they are just that, are immediately usable, as below where we define a primary join condition on the ``Address`` -class using them:: +class using them:: class Address(Base): __tablename__ = 'addresses' @@ -148,15 +138,15 @@ evaluated as Python expressions. The full namespace available within this evaluation includes all classes mapped for this declarative base, as well as the contents of the ``sqlalchemy`` package, including expression functions like :func:`~sqlalchemy.sql.expression.desc` and -:attr:`~sqlalchemy.sql.expression.func`:: +:attr:`~sqlalchemy.sql.expression.func`:: class User(Base): # .... addresses = relationship("Address", - order_by="desc(Address.email)", + order_by="desc(Address.email)", primaryjoin="Address.user_id==User.id") -As an alternative to string-based attributes, attributes may also be +As an alternative to string-based attributes, attributes may also be defined after all classes have been created. Just add them to the target class after the fact:: @@ -169,8 +159,8 @@ Configuring Many-to-Many Relationships Many-to-many relationships are also declared in the same way with declarative as with traditional mappings. The ``secondary`` argument to -:func:`.relationship` is as usual passed a -:class:`.Table` object, which is typically declared in the +:func:`.relationship` is as usual passed a +:class:`.Table` object, which is typically declared in the traditional way. The :class:`.Table` usually shares the :class:`.MetaData` object used by the declarative base:: @@ -185,7 +175,7 @@ the :class:`.MetaData` object used by the declarative base:: id = Column(Integer, primary_key=True) keywords = relationship("Keyword", secondary=keywords) -Like other :func:`.relationship` arguments, a string is accepted as well, +Like other :func:`.relationship` arguments, a string is accepted as well, passing the string name of the table as defined in the ``Base.metadata.tables`` collection:: @@ -194,7 +184,7 @@ collection:: id = Column(Integer, primary_key=True) keywords = relationship("Keyword", secondary="keywords") -As with traditional mapping, its generally not a good idea to use +As with traditional mapping, its generally not a good idea to use a :class:`.Table` as the "secondary" argument which is also mapped to a class, unless the :class:`.relationship` is declared with ``viewonly=True``. Otherwise, the unit-of-work system may attempt duplicate INSERT and @@ -219,7 +209,7 @@ This attribute accommodates both positional as well as keyword arguments that are normally sent to the :class:`~sqlalchemy.schema.Table` constructor. The attribute can be specified in one of two forms. One is as a -dictionary:: +dictionary:: class MyClass(Base): __tablename__ = 'sometable' @@ -235,7 +225,7 @@ The other, a tuple, where each argument is positional UniqueConstraint('foo'), ) -Keyword arguments can be specified with the above form by +Keyword arguments can be specified with the above form by specifying the last argument as a dictionary:: class MyClass(Base): @@ -253,7 +243,7 @@ As an alternative to ``__tablename__``, a direct :class:`~sqlalchemy.schema.Table` construct may be used. The :class:`~sqlalchemy.schema.Column` objects, which in this case require their names, will be added to the mapping just like a regular mapping -to a table:: +to a table:: class MyClass(Base): __table__ = Table('my_table', Base.metadata, @@ -277,9 +267,9 @@ and pass it to declarative classes:: class Address(Base): __table__ = metadata.tables['address'] -Some configuration schemes may find it more appropriate to use ``__table__``, -such as those which already take advantage of the data-driven nature of -:class:`.Table` to customize and/or automate schema definition. +Some configuration schemes may find it more appropriate to use ``__table__``, +such as those which already take advantage of the data-driven nature of +:class:`.Table` to customize and/or automate schema definition. Note that when the ``__table__`` approach is used, the object is immediately usable as a plain :class:`.Table` within the class declaration body itself, @@ -292,15 +282,15 @@ by using the ``id`` column in the ``primaryjoin`` condition of a :func:`.relatio Column('name', String(50)) ) - widgets = relationship(Widget, + widgets = relationship(Widget, primaryjoin=Widget.myclass_id==__table__.c.id) -Similarly, mapped attributes which refer to ``__table__`` can be placed inline, +Similarly, mapped attributes which refer to ``__table__`` can be placed inline, as below where we assign the ``name`` column to the attribute ``_name``, generating a synonym for ``name``:: from sqlalchemy.ext.declarative import synonym_for - + class MyClass(Base): __table__ = Table('my_table', Base.metadata, Column('id', Integer, primary_key=True), @@ -320,14 +310,14 @@ It's easy to set up a :class:`.Table` that uses ``autoload=True`` in conjunction with a mapped class:: class MyClass(Base): - __table__ = Table('mytable', Base.metadata, + __table__ = Table('mytable', Base.metadata, autoload=True, autoload_with=some_engine) -However, one improvement that can be made here is to not -require the :class:`.Engine` to be available when classes are +However, one improvement that can be made here is to not +require the :class:`.Engine` to be available when classes are being first declared. To achieve this, use the example -described at :ref:`examples_declarative_reflection` to build a -declarative base that sets up mappings only after a special +described at :ref:`examples_declarative_reflection` to build a +declarative base that sets up mappings only after a special ``prepare(engine)`` step is called:: Base = declarative_base(cls=DeclarativeReflectedBase) @@ -339,14 +329,14 @@ declarative base that sets up mappings only after a special class Bar(Base): __tablename__ = 'bar' - # illustrate overriding of "bar.foo_id" to have + # illustrate overriding of "bar.foo_id" to have # a foreign key constraint otherwise not # reflected, such as when using MySQL foo_id = Column(Integer, ForeignKey('foo.id')) Base.prepare(e) - + Mapper Configuration ==================== @@ -354,7 +344,7 @@ Declarative makes use of the :func:`~.orm.mapper` function internally when it creates the mapping to the declared table. The options for :func:`~.orm.mapper` are passed directly through via the ``__mapper_args__`` class attribute. As always, arguments which reference locally -mapped columns can reference them directly from within the +mapped columns can reference them directly from within the class declaration:: from datetime import datetime @@ -383,7 +373,7 @@ as declarative will determine this from the class itself. The various Joined Table Inheritance ~~~~~~~~~~~~~~~~~~~~~~~~ -Joined table inheritance is defined as a subclass that defines its own +Joined table inheritance is defined as a subclass that defines its own table:: class Person(Base): @@ -400,8 +390,8 @@ table:: Note that above, the ``Engineer.id`` attribute, since it shares the same attribute name as the ``Person.id`` attribute, will in fact -represent the ``people.id`` and ``engineers.id`` columns together, and -will render inside a query as ``"people.id"``. +represent the ``people.id`` and ``engineers.id`` columns together, +with the "Engineer.id" column taking precedence if queried directly. To provide the ``Engineer`` class with an attribute that represents only the ``engineers.id`` column, give it a different attribute name:: @@ -412,12 +402,17 @@ only the ``engineers.id`` column, give it a different attribute name:: primary_key=True) primary_language = Column(String(50)) + +.. versionchanged:: 0.7 joined table inheritance favors the subclass + column over that of the superclass, such as querying above + for ``Engineer.id``. Prior to 0.7 this was the reverse. + Single Table Inheritance ~~~~~~~~~~~~~~~~~~~~~~~~ Single table inheritance is defined as a subclass that does not have its own table; you just leave out the ``__table__`` and ``__tablename__`` -attributes:: +attributes:: class Person(Base): __tablename__ = 'people' @@ -506,29 +501,31 @@ before the class is built:: Using the Concrete Helpers ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -New helper classes released in 0.7.3 provides a simpler pattern for concrete inheritance. +Helper classes provides a simpler pattern for concrete inheritance. With these objects, the ``__declare_last__`` helper is used to configure the "polymorphic" loader for the mapper after all subclasses have been declared. +.. versionadded:: 0.7.3 + An abstract base can be declared using the :class:`.AbstractConcreteBase` class:: from sqlalchemy.ext.declarative import AbstractConcreteBase - + class Employee(AbstractConcreteBase, Base): pass To have a concrete ``employee`` table, use :class:`.ConcreteBase` instead:: from sqlalchemy.ext.declarative import ConcreteBase - + class Employee(ConcreteBase, Base): __tablename__ = 'employee' employee_id = Column(Integer, primary_key=True) name = Column(String(50)) __mapper_args__ = { - 'polymorphic_identity':'employee', + 'polymorphic_identity':'employee', 'concrete':True} - + Either ``Employee`` base can be used in the normal fashion:: @@ -538,7 +535,7 @@ Either ``Employee`` base can be used in the normal fashion:: name = Column(String(50)) manager_data = Column(String(40)) __mapper_args__ = { - 'polymorphic_identity':'manager', + 'polymorphic_identity':'manager', 'concrete':True} class Engineer(Employee): @@ -546,7 +543,7 @@ Either ``Employee`` base can be used in the normal fashion:: employee_id = Column(Integer, primary_key=True) name = Column(String(50)) engineer_info = Column(String(40)) - __mapper_args__ = {'polymorphic_identity':'engineer', + __mapper_args__ = {'polymorphic_identity':'engineer', 'concrete':True} @@ -569,7 +566,7 @@ mappings are declared. An example of some commonly mixed-in idioms is below:: from sqlalchemy.ext.declarative import declared_attr - + class MyMixin(object): @declared_attr @@ -586,29 +583,29 @@ idioms is below:: Where above, the class ``MyModel`` will contain an "id" column as the primary key, a ``__tablename__`` attribute that derives -from the name of the class itself, as well as ``__table_args__`` +from the name of the class itself, as well as ``__table_args__`` and ``__mapper_args__`` defined by the ``MyMixin`` mixin class. -There's no fixed convention over whether ``MyMixin`` precedes -``Base`` or not. Normal Python method resolution rules apply, and +There's no fixed convention over whether ``MyMixin`` precedes +``Base`` or not. Normal Python method resolution rules apply, and the above example would work just as well with:: class MyModel(Base, MyMixin): name = Column(String(1000)) -This works because ``Base`` here doesn't define any of the -variables that ``MyMixin`` defines, i.e. ``__tablename__``, -``__table_args__``, ``id``, etc. If the ``Base`` did define -an attribute of the same name, the class placed first in the -inherits list would determine which attribute is used on the +This works because ``Base`` here doesn't define any of the +variables that ``MyMixin`` defines, i.e. ``__tablename__``, +``__table_args__``, ``id``, etc. If the ``Base`` did define +an attribute of the same name, the class placed first in the +inherits list would determine which attribute is used on the newly defined class. Augmenting the Base ~~~~~~~~~~~~~~~~~~~ -In addition to using a pure mixin, most of the techniques in this +In addition to using a pure mixin, most of the techniques in this section can also be applied to the base class itself, for patterns that -should apply to all classes derived from a particular base. This +should apply to all classes derived from a particular base. This is achieved using the ``cls`` argument of the :func:`.declarative_base` function:: from sqlalchemy.ext.declarative import declared_attr @@ -617,26 +614,26 @@ is achieved using the ``cls`` argument of the :func:`.declarative_base` function @declared_attr def __tablename__(cls): return cls.__name__.lower() - + __table_args__ = {'mysql_engine': 'InnoDB'} id = Column(Integer, primary_key=True) from sqlalchemy.ext.declarative import declarative_base - + Base = declarative_base(cls=Base) class MyModel(Base): name = Column(String(1000)) -Where above, ``MyModel`` and all other classes that derive from ``Base`` will have -a table name derived from the class name, an ``id`` primary key column, as well as +Where above, ``MyModel`` and all other classes that derive from ``Base`` will have +a table name derived from the class name, an ``id`` primary key column, as well as the "InnoDB" engine for MySQL. Mixing in Columns ~~~~~~~~~~~~~~~~~ -The most basic way to specify a column on a mixin is by simple +The most basic way to specify a column on a mixin is by simple declaration:: class TimestampMixin(object): @@ -649,30 +646,29 @@ declaration:: name = Column(String(1000)) Where above, all declarative classes that include ``TimestampMixin`` -will also have a column ``created_at`` that applies a timestamp to +will also have a column ``created_at`` that applies a timestamp to all row insertions. -Those familiar with the SQLAlchemy expression language know that +Those familiar with the SQLAlchemy expression language know that the object identity of clause elements defines their role in a schema. -Two ``Table`` objects ``a`` and ``b`` may both have a column called -``id``, but the way these are differentiated is that ``a.c.id`` +Two ``Table`` objects ``a`` and ``b`` may both have a column called +``id``, but the way these are differentiated is that ``a.c.id`` and ``b.c.id`` are two distinct Python objects, referencing their parent tables ``a`` and ``b`` respectively. In the case of the mixin column, it seems that only one -:class:`.Column` object is explicitly created, yet the ultimate +:class:`.Column` object is explicitly created, yet the ultimate ``created_at`` column above must exist as a distinct Python object for each separate destination class. To accomplish this, the declarative -extension creates a **copy** of each :class:`.Column` object encountered on +extension creates a **copy** of each :class:`.Column` object encountered on a class that is detected as a mixin. This copy mechanism is limited to simple columns that have no foreign keys, as a :class:`.ForeignKey` itself contains references to columns -which can't be properly recreated at this level. For columns that +which can't be properly recreated at this level. For columns that have foreign keys, as well as for the variety of mapper-level constructs that require destination-explicit context, the -:func:`~.declared_attr` decorator (renamed from ``sqlalchemy.util.classproperty`` in 0.6.5) -is provided so that +:func:`~.declared_attr` decorator is provided so that patterns common to many classes can be defined as callables:: from sqlalchemy.ext.declarative import declared_attr @@ -686,14 +682,17 @@ patterns common to many classes can be defined as callables:: __tablename__ = 'user' id = Column(Integer, primary_key=True) -Where above, the ``address_id`` class-level callable is executed at the +Where above, the ``address_id`` class-level callable is executed at the point at which the ``User`` class is constructed, and the declarative extension can use the resulting :class:`.Column` object as returned by the method without the need to copy it. +.. versionchanged:: > 0.6.5 + Rename 0.6.5 ``sqlalchemy.util.classproperty`` into :func:`~.declared_attr`. + Columns generated by :func:`~.declared_attr` can also be -referenced by ``__mapper_args__`` to a limited degree, currently -by ``polymorphic_on`` and ``version_id_col``, by specifying the +referenced by ``__mapper_args__`` to a limited degree, currently +by ``polymorphic_on`` and ``version_id_col``, by specifying the classdecorator itself into the dictionary - the declarative extension will resolve them at class construction time:: @@ -713,7 +712,7 @@ Mixing in Relationships Relationships created by :func:`~sqlalchemy.orm.relationship` are provided with declarative mixin classes exclusively using the -:func:`.declared_attr` approach, eliminating any ambiguity +:class:`.declared_attr` approach, eliminating any ambiguity which could arise when copying a relationship and its possibly column-bound contents. Below is an example which combines a foreign key column and a relationship so that two classes ``Foo`` and ``Bar`` can both be configured to @@ -741,10 +740,10 @@ reference a common target class via many-to-one:: id = Column(Integer, primary_key=True) :func:`~sqlalchemy.orm.relationship` definitions which require explicit -primaryjoin, order_by etc. expressions should use the string forms +primaryjoin, order_by etc. expressions should use the string forms for these arguments, so that they are evaluated as late as possible. To reference the mixin class in these expressions, use the given ``cls`` -to get it's name:: +to get its name:: class RefTargetMixin(object): @declared_attr @@ -763,8 +762,8 @@ Mixing in deferred(), column_property(), etc. Like :func:`~sqlalchemy.orm.relationship`, all :class:`~sqlalchemy.orm.interfaces.MapperProperty` subclasses such as :func:`~sqlalchemy.orm.deferred`, :func:`~sqlalchemy.orm.column_property`, -etc. ultimately involve references to columns, and therefore, when -used with declarative mixins, have the :func:`.declared_attr` +etc. ultimately involve references to columns, and therefore, when +used with declarative mixins, have the :class:`.declared_attr` requirement so that no reliance on copying is needed:: class SomethingMixin(object): @@ -781,7 +780,7 @@ Controlling table inheritance with mixins ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The ``__tablename__`` attribute in conjunction with the hierarchy of -classes involved in a declarative mixin scenario controls what type of +classes involved in a declarative mixin scenario controls what type of table inheritance, if any, is configured by the declarative extension. @@ -816,7 +815,7 @@ return a ``__tablename__`` in the event that no table is already mapped in the inheritance hierarchy. To help with this, a :func:`~sqlalchemy.ext.declarative.has_inherited_table` helper function is provided that returns ``True`` if a parent class already -has a mapped table. +has a mapped table. As an example, here's a mixin that will only allow single table inheritance:: @@ -879,7 +878,7 @@ In the case of ``__table_args__`` or ``__mapper_args__`` specified with declarative mixins, you may want to combine some parameters from several mixins with those you wish to define on the class iteself. The -:func:`.declared_attr` decorator can be used +:class:`.declared_attr` decorator can be used here to create user-defined collation routines that pull from multiple collections:: @@ -906,7 +905,7 @@ from multiple collections:: Creating Indexes with Mixins ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -To define a named, potentially multicolumn :class:`.Index` that applies to all +To define a named, potentially multicolumn :class:`.Index` that applies to all tables derived from a mixin, use the "inline" form of :class:`.Index` and establish it as part of ``__table_args__``:: @@ -928,7 +927,7 @@ Special Directives ``__declare_last__()`` ~~~~~~~~~~~~~~~~~~~~~~ -The ``__declare_last__()`` hook, introduced in 0.7.3, allows definition of +The ``__declare_last__()`` hook allows definition of a class level function that is automatically called by the :meth:`.MapperEvents.after_configured` event, which occurs after mappings are assumed to be completed and the 'configure' step has finished:: @@ -939,29 +938,31 @@ has finished:: "" # do something with mappings +.. versionadded:: 0.7.3 + .. _declarative_abstract: ``__abstract__`` ~~~~~~~~~~~~~~~~~~~ -``__abstract__`` is introduced in 0.7.3 and causes declarative to skip the production +``__abstract__`` causes declarative to skip the production of a table or mapper for the class entirely. A class can be added within a hierarchy in the same way as mixin (see :ref:`declarative_mixins`), allowing subclasses to extend just from the special class:: class SomeAbstractBase(Base): __abstract__ = True - + def some_helpful_method(self): "" - + @declared_attr def __mapper_args__(cls): return {"helpful mapper arguments":True} class MyMappedClass(SomeAbstractBase): "" - + One possible use of ``__abstract__`` is to use a distinct :class:`.MetaData` for different bases:: @@ -975,13 +976,15 @@ bases:: __abstract__ = True metadata = MetaData() -Above, classes which inherit from ``DefaultBase`` will use one :class:`.MetaData` as the -registry of tables, and those which inherit from ``OtherBase`` will use a different one. +Above, classes which inherit from ``DefaultBase`` will use one :class:`.MetaData` as the +registry of tables, and those which inherit from ``OtherBase`` will use a different one. The tables themselves can then be created perhaps within distinct databases:: DefaultBase.metadata.create_all(some_engine) OtherBase.metadata_create_all(some_other_engine) +.. versionadded:: 0.7.3 + Class Constructor ================= @@ -1006,7 +1009,7 @@ setup using :func:`~sqlalchemy.orm.scoped_session` might look like:: Base = declarative_base() Mapped instances then make usage of -:class:`~sqlalchemy.orm.session.Session` in the usual way. +:class:`~sqlalchemy.orm.session.Session` in the usual way. """ @@ -1028,7 +1031,7 @@ __all__ = 'declarative_base', 'synonym_for', \ def instrument_declarative(cls, registry, metadata): """Given a class, configure the class declaratively, using the given registry, which can be any dictionary, and - MetaData object. + MetaData object. """ if '_decl_class_registry' in cls.__dict__: @@ -1071,7 +1074,7 @@ def _as_declarative(cls, classname, dict_): def go(): cls.__declare_last__() if '__abstract__' in base.__dict__: - if (base is cls or + if (base is cls or (base in cls.__bases__ and not _is_declarative_inherits) ): return @@ -1083,19 +1086,19 @@ def _as_declarative(cls, classname, dict_): for name,obj in vars(base).items(): if name == '__mapper_args__': if not mapper_args and ( - not class_mapped or + not class_mapped or isinstance(obj, declarative_props) ): mapper_args = cls.__mapper_args__ elif name == '__tablename__': if not tablename and ( - not class_mapped or + not class_mapped or isinstance(obj, declarative_props) ): tablename = cls.__tablename__ elif name == '__table_args__': if not table_args and ( - not class_mapped or + not class_mapped or isinstance(obj, declarative_props) ): table_args = cls.__table_args__ @@ -1110,7 +1113,7 @@ def _as_declarative(cls, classname, dict_): util.warn("Regular (i.e. not __special__) " "attribute '%s.%s' uses @declared_attr, " "but owning class %s is mapped - " - "not applying to subclass %s." + "not applying to subclass %s." % (base.__name__, name, base, cls)) continue elif base is not cls: @@ -1122,7 +1125,7 @@ def _as_declarative(cls, classname, dict_): "must be declared as @declared_attr callables " "on declarative mixin classes. ") if name not in dict_ and not ( - '__table__' in dict_ and + '__table__' in dict_ and (obj.name or name) in dict_['__table__'].c ) and name not in potential_columns: potential_columns[name] = \ @@ -1151,7 +1154,7 @@ def _as_declarative(cls, classname, dict_): if inherited_table_args and not tablename: table_args = None - # make sure that column copies are used rather + # make sure that column copies are used rather # than the original columns from any mixins for k in ('version_id_col', 'polymorphic_on',): if k in mapper_args: @@ -1204,7 +1207,7 @@ def _as_declarative(cls, classname, dict_): elif isinstance(c, Column): _undefer_column_name(key, c) cols.add(c) - # if the column is the same name as the key, + # if the column is the same name as the key, # remove it from the explicit properties dict. # the normal rules for assigning column-based properties # will take over, including precedence of columns @@ -1291,7 +1294,7 @@ def _as_declarative(cls, classname, dict_): if c.name in inherited_table.c: raise exc.ArgumentError( "Column '%s' on class %s conflicts with " - "existing column '%s'" % + "existing column '%s'" % (c, cls, inherited_table.c[c.name]) ) inherited_table.append_column(c) @@ -1310,7 +1313,7 @@ def _as_declarative(cls, classname, dict_): if c not in inherited_mapper._columntoproperty]) exclude_properties.difference_update([c.key for c in cols]) - # look through columns in the current mapper that + # look through columns in the current mapper that # are keyed to a propname different than the colname # (if names were the same, we'd have popped it out above, # in which case the mapper makes this combination). @@ -1322,25 +1325,21 @@ def _as_declarative(cls, classname, dict_): if k in inherited_mapper._props: p = inherited_mapper._props[k] if isinstance(p, ColumnProperty): - # note here we place the superclass column - # first. this corresponds to the - # append() in mapper._configure_property(). - # change this ordering when we do [ticket:1892] - our_stuff[k] = p.columns + [col] + # note here we place the subclass column + # first. See [ticket:1892] for background. + our_stuff[k] = [col] + p.columns - cls.__mapper__ = mapper_cls(cls, - table, - properties=our_stuff, + cls.__mapper__ = mapper_cls(cls, + table, + properties=our_stuff, **mapper_args) class DeclarativeMeta(type): def __init__(cls, classname, bases, dict_): - if '_decl_class_registry' in cls.__dict__: - return type.__init__(cls, classname, bases, dict_) - else: + if '_decl_class_registry' not in cls.__dict__: _as_declarative(cls, classname, cls.__dict__) - return type.__init__(cls, classname, bases, dict_) + type.__init__(cls, classname, bases, dict_) def __setattr__(cls, key, value): if '__mapper__' in cls.__dict__: @@ -1356,7 +1355,7 @@ class DeclarativeMeta(type): cls.__mapper__.add_property(key, value) elif isinstance(value, MapperProperty): cls.__mapper__.add_property( - key, + key, _deferred_relationship(cls, value) ) else: @@ -1423,7 +1422,7 @@ def _deferred_relationship(cls, prop): "When initializing mapper %s, expression %r failed to " "locate a name (%r). If this is a class name, consider " "adding this relationship() to the %r class after " - "both dependent classes have been defined." % + "both dependent classes have been defined." % (prop.parent, arg, n.args[0], cls) ) return return_cls @@ -1493,15 +1492,14 @@ class declared_attr(property): """Mark a class-level method as representing the definition of a mapped property or special declarative member name. - .. note:: - - @declared_attr is available as - ``sqlalchemy.util.classproperty`` for SQLAlchemy versions - 0.6.2, 0.6.3, 0.6.4. + .. versionchanged:: 0.6.{2,3,4} + ``@declared_attr`` is available as + ``sqlalchemy.util.classproperty`` for SQLAlchemy versions + 0.6.2, 0.6.3, 0.6.4. @declared_attr turns the attribute into a scalar-like property that can be invoked from the uninstantiated class. - Declarative treats attributes specifically marked with + Declarative treats attributes specifically marked with @declared_attr as returning a construct that is specific to mapping or declarative table configuration. The name of the attribute is that of what the non-dynamic version @@ -1533,7 +1531,7 @@ class declared_attr(property): def __mapper_args__(cls): if cls.__name__ == 'Employee': return { - "polymorphic_on":cls.type, + "polymorphic_on":cls.type, "polymorphic_identity":"Employee" } else: @@ -1581,8 +1579,8 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object, :param bind: An optional :class:`~sqlalchemy.engine.base.Connectable`, will be assigned - the ``bind`` attribute on the :class:`~sqlalchemy.MetaData` - instance. + the ``bind`` attribute on the :class:`~sqlalchemy.MetaData` + instance. :param metadata: An optional :class:`~sqlalchemy.MetaData` instance. All @@ -1613,13 +1611,13 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object, no __init__ will be provided and construction will fall back to cls.__init__ by way of the normal Python semantics. - :param class_registry: optional dictionary that will serve as the + :param class_registry: optional dictionary that will serve as the registry of class names-> mapped classes when string names - are used to identify classes inside of :func:`.relationship` + are used to identify classes inside of :func:`.relationship` and others. Allows two or more declarative base classes - to share the same registry of class names for simplified + to share the same registry of class names for simplified inter-base relationships. - + :param metaclass: Defaults to :class:`.DeclarativeMeta`. A metaclass or __metaclass__ compatible callable to use as the meta type of the generated @@ -1652,7 +1650,7 @@ def _undefer_column_name(key, column): class ConcreteBase(object): """A helper class for 'concrete' declarative mappings. - + :class:`.ConcreteBase` will use the :func:`.polymorphic_union` function automatically, against all tables mapped as a subclass to this class. The function is called via the @@ -1662,7 +1660,7 @@ class ConcreteBase(object): :class:`.ConcreteBase` produces a mapped table for the class itself. Compare to :class:`.AbstractConcreteBase`, which does not. - + Example:: from sqlalchemy.ext.declarative import ConcreteBase @@ -1672,7 +1670,7 @@ class ConcreteBase(object): employee_id = Column(Integer, primary_key=True) name = Column(String(50)) __mapper_args__ = { - 'polymorphic_identity':'employee', + 'polymorphic_identity':'employee', 'concrete':True} class Manager(Employee): @@ -1681,7 +1679,7 @@ class ConcreteBase(object): name = Column(String(50)) manager_data = Column(String(40)) __mapper_args__ = { - 'polymorphic_identity':'manager', + 'polymorphic_identity':'manager', 'concrete':True} """ @@ -1706,17 +1704,17 @@ class ConcreteBase(object): class AbstractConcreteBase(ConcreteBase): """A helper class for 'concrete' declarative mappings. - + :class:`.AbstractConcreteBase` will use the :func:`.polymorphic_union` function automatically, against all tables mapped as a subclass to this class. The function is called via the ``__declare_last__()`` function, which is essentially a hook for the :func:`.MapperEvents.after_configured` event. - + :class:`.AbstractConcreteBase` does not produce a mapped table for the class itself. Compare to :class:`.ConcreteBase`, which does. - + Example:: from sqlalchemy.ext.declarative import ConcreteBase @@ -1730,7 +1728,7 @@ class AbstractConcreteBase(ConcreteBase): name = Column(String(50)) manager_data = Column(String(40)) __mapper_args__ = { - 'polymorphic_identity':'manager', + 'polymorphic_identity':'manager', 'concrete':True} """ diff --git a/libs/sqlalchemy/ext/horizontal_shard.py b/libs/sqlalchemy/ext/horizontal_shard.py index 6befabe8..05b45e03 100644 --- a/libs/sqlalchemy/ext/horizontal_shard.py +++ b/libs/sqlalchemy/ext/horizontal_shard.py @@ -1,5 +1,5 @@ # ext/horizontal_shard.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,8 +9,8 @@ Defines a rudimental 'horizontal sharding' system which allows a Session to distribute queries and persistence operations across multiple databases. -For a usage example, see the :ref:`examples_sharding` example included in -the source distrbution. +For a usage example, see the :ref:`examples_sharding` example included in +the source distribution. """ @@ -31,7 +31,7 @@ class ShardedQuery(Query): def set_shard(self, shard_id): """return a new query, limited to a single shard ID. - all subsequent operations with the returned query will + all subsequent operations with the returned query will be against the single shard regardless of other state. """ @@ -45,7 +45,7 @@ class ShardedQuery(Query): result = self._connection_from_session( mapper=self._mapper_zero(), shard_id=shard_id).execute( - context.statement, + context.statement, self._params) return self.instances(result, context) @@ -56,7 +56,7 @@ class ShardedQuery(Query): for shard_id in self.query_chooser(self): partial.extend(iter_for_shard(shard_id)) - # if some kind of in memory 'sorting' + # if some kind of in memory 'sorting' # were done, this is where it would happen return iter(partial) @@ -73,7 +73,7 @@ class ShardedQuery(Query): return None class ShardedSession(Session): - def __init__(self, shard_chooser, id_chooser, query_chooser, shards=None, + def __init__(self, shard_chooser, id_chooser, query_chooser, shards=None, query_cls=ShardedQuery, **kwargs): """Construct a ShardedSession. @@ -113,8 +113,8 @@ class ShardedSession(Session): if self.transaction is not None: return self.transaction.connection(mapper, shard_id=shard_id) else: - return self.get_bind(mapper, - shard_id=shard_id, + return self.get_bind(mapper, + shard_id=shard_id, instance=instance).contextual_connect(**kwargs) def get_bind(self, mapper, shard_id=None, instance=None, clause=None, **kw): diff --git a/libs/sqlalchemy/ext/hybrid.py b/libs/sqlalchemy/ext/hybrid.py index 8734181e..038898e4 100644 --- a/libs/sqlalchemy/ext/hybrid.py +++ b/libs/sqlalchemy/ext/hybrid.py @@ -1,5 +1,5 @@ # ext/hybrid.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -10,8 +10,8 @@ class level and at the instance level. The :mod:`~sqlalchemy.ext.hybrid` extension provides a special form of method -decorator, is around 50 lines of code and has almost no dependencies on the rest -of SQLAlchemy. It can, in theory, work with any descriptor-based expression +decorator, is around 50 lines of code and has almost no dependencies on the rest +of SQLAlchemy. It can, in theory, work with any descriptor-based expression system. Consider a mapping ``Interval``, representing integer ``start`` and ``end`` @@ -25,9 +25,9 @@ as the class itself:: from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import Session, aliased from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method - + Base = declarative_base() - + class Interval(Base): __tablename__ = 'interval' @@ -50,7 +50,7 @@ as the class itself:: @hybrid_method def intersects(self, other): return self.contains(other.start) | self.contains(other.end) - + Above, the ``length`` property returns the difference between the ``end`` and ``start`` attributes. With an instance of ``Interval``, this subtraction occurs in Python, using normal Python descriptor mechanics:: @@ -60,33 +60,33 @@ in Python, using normal Python descriptor mechanics:: 5 When dealing with the ``Interval`` class itself, the :class:`.hybrid_property` -descriptor evaluates the function body given the ``Interval`` class as +descriptor evaluates the function body given the ``Interval`` class as the argument, which when evaluated with SQLAlchemy expression mechanics returns a new SQL expression:: - + >>> print Interval.length interval."end" - interval.start - + >>> print Session().query(Interval).filter(Interval.length > 10) - SELECT interval.id AS interval_id, interval.start AS interval_start, - interval."end" AS interval_end - FROM interval + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end + FROM interval WHERE interval."end" - interval.start > :param_1 - -ORM methods such as :meth:`~.Query.filter_by` generally use ``getattr()`` to + +ORM methods such as :meth:`~.Query.filter_by` generally use ``getattr()`` to locate attributes, so can also be used with hybrid attributes:: >>> print Session().query(Interval).filter_by(length=5) - SELECT interval.id AS interval_id, interval.start AS interval_start, - interval."end" AS interval_end - FROM interval + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end + FROM interval WHERE interval."end" - interval.start = :param_1 The ``Interval`` class example also illustrates two methods, ``contains()`` and ``intersects()``, decorated with :class:`.hybrid_method`. This decorator applies the same idea to methods that :class:`.hybrid_property` applies -to attributes. The methods return boolean values, and take advantage -of the Python ``|`` and ``&`` bitwise operators to produce equivalent instance-level and +to attributes. The methods return boolean values, and take advantage +of the Python ``|`` and ``&`` bitwise operators to produce equivalent instance-level and SQL expression-level boolean behavior:: >>> i1.contains(6) @@ -97,24 +97,24 @@ SQL expression-level boolean behavior:: True >>> i1.intersects(Interval(25, 29)) False - + >>> print Session().query(Interval).filter(Interval.contains(15)) - SELECT interval.id AS interval_id, interval.start AS interval_start, - interval."end" AS interval_end - FROM interval + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end + FROM interval WHERE interval.start <= :start_1 AND interval."end" > :end_1 >>> ia = aliased(Interval) >>> print Session().query(Interval, ia).filter(Interval.intersects(ia)) - SELECT interval.id AS interval_id, interval.start AS interval_start, - interval."end" AS interval_end, interval_1.id AS interval_1_id, - interval_1.start AS interval_1_start, interval_1."end" AS interval_1_end - FROM interval, interval AS interval_1 - WHERE interval.start <= interval_1.start - AND interval."end" > interval_1.start - OR interval.start <= interval_1."end" + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end, interval_1.id AS interval_1_id, + interval_1.start AS interval_1_start, interval_1."end" AS interval_1_end + FROM interval, interval AS interval_1 + WHERE interval.start <= interval_1.start + AND interval."end" > interval_1.start + OR interval.start <= interval_1."end" AND interval."end" > interval_1."end" - + Defining Expression Behavior Distinct from Attribute Behavior -------------------------------------------------------------- @@ -122,18 +122,18 @@ Our usage of the ``&`` and ``|`` bitwise operators above was fortunate, consider our functions operated on two boolean values to return a new one. In many cases, the construction of an in-Python function and a SQLAlchemy SQL expression have enough differences that two separate Python expressions should be defined. The :mod:`~sqlalchemy.ext.hybrid` decorators -define the :meth:`.hybrid_property.expression` modifier for this purpose. As an example we'll +define the :meth:`.hybrid_property.expression` modifier for this purpose. As an example we'll define the radius of the interval, which requires the usage of the absolute value function:: from sqlalchemy import func - + class Interval(object): # ... - + @hybrid_property def radius(self): return abs(self.length) / 2 - + @radius.expression def radius(cls): return func.abs(cls.length) / 2 @@ -143,22 +143,22 @@ Above the Python function ``abs()`` is used for instance-level operations, the S >>> i1.radius 2 - + >>> print Session().query(Interval).filter(Interval.radius > 5) - SELECT interval.id AS interval_id, interval.start AS interval_start, - interval."end" AS interval_end - FROM interval + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end + FROM interval WHERE abs(interval."end" - interval.start) / :abs_1 > :param_1 Defining Setters ---------------- -Hybrid properties can also define setter methods. If we wanted ``length`` above, when +Hybrid properties can also define setter methods. If we wanted ``length`` above, when set, to modify the endpoint value:: class Interval(object): # ... - + @hybrid_property def length(self): return self.end - self.start @@ -179,17 +179,24 @@ The ``length(self, value)`` method is now called upon set:: Working with Relationships -------------------------- -There's no essential difference when creating hybrids that work with related objects as -opposed to column-based data. The need for distinct expressions tends to be greater. -Consider the following declarative mapping which relates a ``User`` to a ``SavingsAccount``:: +There's no essential difference when creating hybrids that work with +related objects as opposed to column-based data. The need for distinct +expressions tends to be greater. Two variants of we'll illustrate +are the "join-dependent" hybrid, and the "correlated subquery" hybrid. + +Join-Dependent Relationship Hybrid +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Consider the following declarative +mapping which relates a ``User`` to a ``SavingsAccount``:: from sqlalchemy import Column, Integer, ForeignKey, Numeric, String from sqlalchemy.orm import relationship from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.hybrid import hybrid_property - + Base = declarative_base() - + class SavingsAccount(Base): __tablename__ = 'account' id = Column(Integer, primary_key=True) @@ -200,9 +207,9 @@ Consider the following declarative mapping which relates a ``User`` to a ``Savin __tablename__ = 'user' id = Column(Integer, primary_key=True) name = Column(String(100), nullable=False) - + accounts = relationship("SavingsAccount", backref="owner") - + @hybrid_property def balance(self): if self.accounts: @@ -222,30 +229,88 @@ Consider the following declarative mapping which relates a ``User`` to a ``Savin def balance(cls): return SavingsAccount.balance -The above hybrid property ``balance`` works with the first ``SavingsAccount`` entry in the list of -accounts for this user. The in-Python getter/setter methods can treat ``accounts`` as a Python -list available on ``self``. +The above hybrid property ``balance`` works with the first +``SavingsAccount`` entry in the list of accounts for this user. The +in-Python getter/setter methods can treat ``accounts`` as a Python +list available on ``self``. -However, at the expression level, we can't travel along relationships to column attributes -directly since SQLAlchemy is explicit about joins. So here, it's expected that the ``User`` class will be -used in an appropriate context such that an appropriate join to ``SavingsAccount`` will be present:: +However, at the expression level, it's expected that the ``User`` class will be used +in an appropriate context such that an appropriate join to +``SavingsAccount`` will be present:: - >>> print Session().query(User, User.balance).join(User.accounts).filter(User.balance > 5000) - SELECT "user".id AS user_id, "user".name AS user_name, account.balance AS account_balance - FROM "user" JOIN account ON "user".id = account.user_id + >>> print Session().query(User, User.balance).\\ + ... join(User.accounts).filter(User.balance > 5000) + SELECT "user".id AS user_id, "user".name AS user_name, + account.balance AS account_balance + FROM "user" JOIN account ON "user".id = account.user_id WHERE account.balance > :balance_1 -Note however, that while the instance level accessors need to worry about whether ``self.accounts`` -is even present, this issue expresses itself differently at the SQL expression level, where we basically +Note however, that while the instance level accessors need to worry +about whether ``self.accounts`` is even present, this issue expresses +itself differently at the SQL expression level, where we basically would use an outer join:: >>> from sqlalchemy import or_ >>> print (Session().query(User, User.balance).outerjoin(User.accounts). ... filter(or_(User.balance < 5000, User.balance == None))) - SELECT "user".id AS user_id, "user".name AS user_name, account.balance AS account_balance - FROM "user" LEFT OUTER JOIN account ON "user".id = account.user_id + SELECT "user".id AS user_id, "user".name AS user_name, + account.balance AS account_balance + FROM "user" LEFT OUTER JOIN account ON "user".id = account.user_id WHERE account.balance < :balance_1 OR account.balance IS NULL +Correlated Subquery Relationship Hybrid +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +We can, of course, forego being dependent on the enclosing query's usage +of joins in favor of the correlated +subquery, which can portably be packed into a single colunn expression. +A correlated subquery is more portable, but often performs more poorly +at the SQL level. +Using the same technique illustrated at :ref:`mapper_column_property_sql_expressions`, +we can adjust our ``SavingsAccount`` example to aggregate the balances for +*all* accounts, and use a correlated subquery for the column expression:: + + from sqlalchemy import Column, Integer, ForeignKey, Numeric, String + from sqlalchemy.orm import relationship + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.hybrid import hybrid_property + from sqlalchemy import select, func + + Base = declarative_base() + + class SavingsAccount(Base): + __tablename__ = 'account' + id = Column(Integer, primary_key=True) + user_id = Column(Integer, ForeignKey('user.id'), nullable=False) + balance = Column(Numeric(15, 5)) + + class User(Base): + __tablename__ = 'user' + id = Column(Integer, primary_key=True) + name = Column(String(100), nullable=False) + + accounts = relationship("SavingsAccount", backref="owner") + + @hybrid_property + def balance(self): + return sum(acc.balance for acc in self.accounts) + + @balance.expression + def balance(cls): + return select([func.sum(SavingsAccount.balance)]).\\ + where(SavingsAccount.user_id==cls.id).\\ + label('total_balance') + +The above recipe will give us the ``balance`` column which renders +a correlated SELECT:: + + >>> print s.query(User).filter(User.balance > 400) + SELECT "user".id AS user_id, "user".name AS user_name + FROM "user" + WHERE (SELECT sum(account.balance) AS sum_1 + FROM account + WHERE account.user_id = "user".id) > :param_1 + .. _hybrid_custom_comparators: Building Custom Comparators @@ -253,7 +318,7 @@ Building Custom Comparators The hybrid property also includes a helper that allows construction of custom comparators. A comparator object allows one to customize the behavior of each SQLAlchemy expression -operator individually. They are useful when creating custom types that have +operator individually. They are useful when creating custom types that have some highly idiosyncratic behavior on the SQL side. The example class below allows case-insensitive comparisons on the attribute @@ -263,9 +328,9 @@ named ``word_insensitive``:: from sqlalchemy import func, Column, Integer, String from sqlalchemy.orm import Session from sqlalchemy.ext.declarative import declarative_base - + Base = declarative_base() - + class CaseInsensitiveComparator(Comparator): def __eq__(self, other): return func.lower(self.__clause_element__()) == func.lower(other) @@ -274,27 +339,27 @@ named ``word_insensitive``:: __tablename__ = 'searchword' id = Column(Integer, primary_key=True) word = Column(String(255), nullable=False) - + @hybrid_property def word_insensitive(self): return self.word.lower() - + @word_insensitive.comparator def word_insensitive(cls): return CaseInsensitiveComparator(cls.word) -Above, SQL expressions against ``word_insensitive`` will apply the ``LOWER()`` +Above, SQL expressions against ``word_insensitive`` will apply the ``LOWER()`` SQL function to both sides:: >>> print Session().query(SearchWord).filter_by(word_insensitive="Trucks") - SELECT searchword.id AS searchword_id, searchword.word AS searchword_word - FROM searchword + SELECT searchword.id AS searchword_id, searchword.word AS searchword_word + FROM searchword WHERE lower(searchword.word) = lower(:lower_1) The ``CaseInsensitiveComparator`` above implements part of the :class:`.ColumnOperators` interface. A "coercion" operation like lowercasing can be applied to all comparison operations (i.e. ``eq``, ``lt``, ``gt``, etc.) using :meth:`.Operators.operate`:: - + class CaseInsensitiveComparator(Comparator): def operate(self, op, other): return op(func.lower(self.__clause_element__()), func.lower(other)) @@ -310,7 +375,7 @@ by ``@word_insensitive.comparator``, only applies to the SQL side. A more comprehensive form of the custom comparator is to construct a *Hybrid Value Object*. This technique applies the target value or expression to a value object which is then returned by the accessor in all cases. The value object allows control -of all operations upon the value as well as how compared values are treated, both +of all operations upon the value as well as how compared values are treated, both on the SQL expression side as well as the Python value side. Replacing the previous ``CaseInsensitiveComparator`` class with a new ``CaseInsensitiveWord`` class:: @@ -342,8 +407,8 @@ previous ``CaseInsensitiveComparator`` class with a new ``CaseInsensitiveWord`` Above, the ``CaseInsensitiveWord`` object represents ``self.word``, which may be a SQL function, or may be a Python native. By overriding ``operate()`` and ``__clause_element__()`` to work in terms of ``self.word``, all comparison operations will work against the -"converted" form of ``word``, whether it be SQL side or Python side. -Our ``SearchWord`` class can now deliver the ``CaseInsensitiveWord`` object unconditionally +"converted" form of ``word``, whether it be SQL side or Python side. +Our ``SearchWord`` class can now deliver the ``CaseInsensitiveWord`` object unconditionally from a single hybrid call:: class SearchWord(Base): @@ -356,12 +421,12 @@ from a single hybrid call:: return CaseInsensitiveWord(self.word) The ``word_insensitive`` attribute now has case-insensitive comparison behavior -universally, including SQL expression vs. Python expression (note the Python value is +universally, including SQL expression vs. Python expression (note the Python value is converted to lower case on the Python side here):: >>> print Session().query(SearchWord).filter_by(word_insensitive="Trucks") - SELECT searchword.id AS searchword_id, searchword.word AS searchword_word - FROM searchword + SELECT searchword.id AS searchword_id, searchword.word AS searchword_word + FROM searchword WHERE lower(searchword.word) = :lower_1 SQL expression versus SQL expression:: @@ -369,13 +434,13 @@ SQL expression versus SQL expression:: >>> sw1 = aliased(SearchWord) >>> sw2 = aliased(SearchWord) >>> print Session().query( - ... sw1.word_insensitive, + ... sw1.word_insensitive, ... sw2.word_insensitive).\\ ... filter( ... sw1.word_insensitive > sw2.word_insensitive ... ) - SELECT lower(searchword_1.word) AS lower_1, lower(searchword_2.word) AS lower_2 - FROM searchword AS searchword_1, searchword AS searchword_2 + SELECT lower(searchword_1.word) AS lower_1, lower(searchword_2.word) AS lower_2 + FROM searchword AS searchword_1, searchword AS searchword_2 WHERE lower(searchword_1.word) > lower(searchword_2.word) Python only expression:: @@ -403,7 +468,7 @@ Building Transformers ---------------------- A *transformer* is an object which can receive a :class:`.Query` object and return a -new one. The :class:`.Query` object includes a method :meth:`.with_transformation` +new one. The :class:`.Query` object includes a method :meth:`.with_transformation` that simply returns a new :class:`.Query` transformed by the given function. We can combine this with the :class:`.Comparator` class to produce one type @@ -412,18 +477,18 @@ filtering criterion. Consider a mapped class ``Node``, which assembles using adjacency list into a hierarchical tree pattern:: - + from sqlalchemy import Column, Integer, ForeignKey from sqlalchemy.orm import relationship from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() - + class Node(Base): __tablename__ = 'node' id =Column(Integer, primary_key=True) parent_id = Column(Integer, ForeignKey('node.id')) parent = relationship("Node", remote_side=id) - + Suppose we wanted to add an accessor ``grandparent``. This would return the ``parent`` of ``Node.parent``. When we have an instance of ``Node``, this is simple:: @@ -431,7 +496,7 @@ Suppose we wanted to add an accessor ``grandparent``. This would return the ``p class Node(Base): # ... - + @hybrid_property def grandparent(self): return self.parent.parent @@ -460,7 +525,7 @@ attribute and filtered based on the given criterion:: id =Column(Integer, primary_key=True) parent_id = Column(Integer, ForeignKey('node.id')) parent = relationship("Node", remote_side=id) - + @hybrid_property def grandparent(self): return self.parent.parent @@ -486,8 +551,8 @@ using :attr:`.Operators.eq` against the left and right sides, passing into {sql}>>> session.query(Node).\\ ... with_transformation(Node.grandparent==Node(id=5)).\\ ... all() - SELECT node.id AS node_id, node.parent_id AS node_parent_id - FROM node JOIN node AS node_1 ON node_1.id = node.parent_id + SELECT node.id AS node_id, node.parent_id AS node_parent_id + FROM node JOIN node AS node_1 ON node_1.id = node.parent_id WHERE :param_1 = node_1.parent_id {stop} @@ -529,14 +594,14 @@ with each class:: {sql}>>> session.query(Node).\\ ... with_transformation(Node.grandparent.join).\\ ... filter(Node.grandparent==Node(id=5)) - SELECT node.id AS node_id, node.parent_id AS node_parent_id - FROM node JOIN node AS node_1 ON node_1.id = node.parent_id + SELECT node.id AS node_id, node.parent_id AS node_parent_id + FROM node JOIN node AS node_1 ON node_1.id = node.parent_id WHERE :param_1 = node_1.parent_id {stop} The "transformer" pattern is an experimental pattern that starts to make usage of some functional programming paradigms. -While it's only recommended for advanced and/or patient developers, +While it's only recommended for advanced and/or patient developers, there's probably a whole lot of amazing things it can be used for. """ @@ -546,26 +611,26 @@ from sqlalchemy.orm import attributes, interfaces class hybrid_method(object): """A decorator which allows definition of a Python object method with both instance-level and class-level behavior. - + """ def __init__(self, func, expr=None): """Create a new :class:`.hybrid_method`. - + Usage is typically via decorator:: - + from sqlalchemy.ext.hybrid import hybrid_method - + class SomeClass(object): @hybrid_method def value(self, x, y): return self._value + x + y - + @value.expression def value(self, x, y): return func.some_function(self._value, x, y) - + """ self.func = func self.expr = expr or func @@ -585,25 +650,25 @@ class hybrid_method(object): class hybrid_property(object): """A decorator which allows definition of a Python descriptor with both instance-level and class-level behavior. - + """ def __init__(self, fget, fset=None, fdel=None, expr=None): """Create a new :class:`.hybrid_property`. - + Usage is typically via decorator:: - + from sqlalchemy.ext.hybrid import hybrid_property - + class SomeClass(object): @hybrid_property def value(self): return self._value - + @value.setter def value(self, value): self._value = value - + """ self.fget = fget self.fset = fset @@ -647,10 +712,10 @@ class hybrid_property(object): def comparator(self, comparator): """Provide a modifying decorator that defines a custom comparator producing method. - + The return value of the decorated method should be an instance of :class:`~.hybrid.Comparator`. - + """ proxy_attr = attributes.\ diff --git a/libs/sqlalchemy/ext/mutable.py b/libs/sqlalchemy/ext/mutable.py index 9f1bb892..36e8fcaf 100644 --- a/libs/sqlalchemy/ext/mutable.py +++ b/libs/sqlalchemy/ext/mutable.py @@ -1,5 +1,5 @@ # ext/mutable.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -21,8 +21,8 @@ Establishing Mutability on Scalar Column Values =============================================== A typical example of a "mutable" structure is a Python dictionary. -Following the example introduced in :ref:`types_toplevel`, we -begin with a custom type that marshals Python dictionaries into +Following the example introduced in :ref:`types_toplevel`, we +begin with a custom type that marshals Python dictionaries into JSON strings before being persisted:: from sqlalchemy.types import TypeDecorator, VARCHAR @@ -43,7 +43,7 @@ JSON strings before being persisted:: value = json.loads(value) return value -The usage of ``json`` is only for the purposes of example. The :mod:`sqlalchemy.ext.mutable` +The usage of ``json`` is only for the purposes of example. The :mod:`sqlalchemy.ext.mutable` extension can be used with any type whose target Python type may be mutable, including :class:`.PickleType`, :class:`.postgresql.ARRAY`, etc. @@ -86,7 +86,7 @@ The above dictionary class takes the approach of subclassing the Python built-in ``dict`` to produce a dict subclass which routes all mutation events through ``__setitem__``. There are many variants on this approach, such as subclassing ``UserDict.UserDict``, -the newer ``collections.MutableMapping``, etc. The part that's important to this +the newer ``collections.MutableMapping``, etc. The part that's important to this example is that the :meth:`.Mutable.changed` method is called whenever an in-place change to the datastructure takes place. @@ -95,7 +95,7 @@ convert any values that are not instances of ``MutationDict``, such as the plain dictionaries returned by the ``json`` module, into the appropriate type. Defining this method is optional; we could just as well created our ``JSONEncodedDict`` such that it always returns an instance of ``MutationDict``, -and additionally ensured that all calling code uses ``MutationDict`` +and additionally ensured that all calling code uses ``MutationDict`` explicitly. When :meth:`.Mutable.coerce` is not overridden, any values applied to a parent object which are not instances of the mutable type will raise a ``ValueError``. @@ -108,14 +108,14 @@ of this type, applying event listening instrumentation to the mapped attribute. Such as, with classical table metadata:: from sqlalchemy import Table, Column, Integer - + my_data = Table('my_data', metadata, Column('id', Integer, primary_key=True), Column('data', MutationDict.as_mutable(JSONEncodedDict)) ) Above, :meth:`~.Mutable.as_mutable` returns an instance of ``JSONEncodedDict`` -(if the type object was not an instance already), which will intercept any +(if the type object was not an instance already), which will intercept any attributes which are mapped against this type. Below we establish a simple mapping against the ``my_data`` table:: @@ -157,7 +157,7 @@ will flag the attribute as "dirty" on the parent object:: The ``MutationDict`` can be associated with all future instances of ``JSONEncodedDict`` in one step, using :meth:`~.Mutable.associate_with`. This -is similar to :meth:`~.Mutable.as_mutable` except it will intercept +is similar to :meth:`~.Mutable.as_mutable` except it will intercept all occurrences of ``MutationDict`` in all mappings unconditionally, without the need to declare it individually:: @@ -167,8 +167,8 @@ the need to declare it individually:: __tablename__ = 'my_data' id = Column(Integer, primary_key=True) data = Column(JSONEncodedDict) - - + + Supporting Pickling -------------------- @@ -180,7 +180,7 @@ not picklable, due to the fact that they contain weakrefs and function callbacks. In our case, this is a good thing, since if this dictionary were picklable, it could lead to an excessively large pickle size for our value objects that are pickled by themselves outside of the context of the parent. -The developer responsiblity here is only to provide a ``__getstate__`` method +The developer responsibility here is only to provide a ``__getstate__`` method that excludes the :meth:`~.MutableBase._parents` collection from the pickle stream:: @@ -217,12 +217,13 @@ be assigned an object value which represents information "composed" from one or more columns from the underlying mapped table. The usual example is that of a geometric "point", and is introduced in :ref:`mapper_composite`. -As of SQLAlchemy 0.7, the internals of :func:`.orm.composite` have been -greatly simplified and in-place mutation detection is no longer enabled by -default; instead, the user-defined value must detect changes on its own and -propagate them to all owning parents. The :mod:`sqlalchemy.ext.mutable` -extension provides the helper class :class:`.MutableComposite`, which is a -slight variant on the :class:`.Mutable` class. +.. versionchanged:: 0.7 + The internals of :func:`.orm.composite` have been + greatly simplified and in-place mutation detection is no longer enabled by + default; instead, the user-defined value must detect changes on its own and + propagate them to all owning parents. The :mod:`sqlalchemy.ext.mutable` + extension provides the helper class :class:`.MutableComposite`, which is a + slight variant on the :class:`.Mutable` class. As is the case with :class:`.Mutable`, the user-defined composite class subclasses :class:`.MutableComposite` as a mixin, and detects and delivers @@ -300,6 +301,31 @@ will flag the attribute as "dirty" on the parent object:: >>> assert v1 in sess.dirty True +Coercing Mutable Composites +--------------------------- + +The :meth:`.MutableBase.coerce` method is also supported on composite types. +In the case of :class:`.MutableComposite`, the :meth:`.MutableBase.coerce` +method is only called for attribute set operations, not load operations. +Overriding the :meth:`.MutableBase.coerce` method is essentially equivalent +to using a :func:`.validates` validation routine for all attributes which +make use of the custom composite type:: + + class Point(MutableComposite): + # other Point methods + # ... + + def coerce(cls, key, value): + if isinstance(value, tuple): + value = Point(*value) + elif not isinstance(value, Point): + raise ValueError("tuple or Point expected") + return value + +.. versionadded:: 0.7.10,0.8.0b2 + Support for the :meth:`.MutableBase.coerce` method in conjunction with + objects of type :class:`.MutableComposite`. + Supporting Pickling -------------------- @@ -313,10 +339,10 @@ the minimal form of our ``Point`` class:: class Point(MutableComposite): # ... - + def __getstate__(self): return self.x, self.y - + def __setstate__(self, state): self.x, self.y = state @@ -327,7 +353,7 @@ pickling process of the parent's object-relational state so that the """ from sqlalchemy.orm.attributes import flag_modified from sqlalchemy import event, types -from sqlalchemy.orm import mapper, object_mapper +from sqlalchemy.orm import mapper, object_mapper, Mapper from sqlalchemy.util import memoized_property import weakref @@ -337,20 +363,38 @@ class MutableBase(object): @memoized_property def _parents(self): """Dictionary of parent object->attribute name on the parent. - + This attribute is a so-called "memoized" property. It initializes itself with a new ``weakref.WeakKeyDictionary`` the first time it is accessed, returning the same object upon subsequent access. - + """ return weakref.WeakKeyDictionary() @classmethod def coerce(cls, key, value): - """Given a value, coerce it into this type. + """Given a value, coerce it into the target type. + + Can be overridden by custom subclasses to coerce incoming + data into a particular type. + + By default, raises ``ValueError``. + + This method is called in different scenarios depending on if + the parent class is of type :class:`.Mutable` or of type + :class:`.MutableComposite`. In the case of the former, it is called + for both attribute-set operations as well as during ORM loading + operations. For the latter, it is only called during attribute-set + operations; the mechanics of the :func:`.composite` construct + handle coercion during load operations. + + + :param key: string name of the ORM-mapped attribute being set. + :param value: the incoming value. + :return: the method should return the coerced value, or raise + ``ValueError`` if the coercion cannot be completed. - By default raises ValueError. """ if value is None: return None @@ -358,7 +402,7 @@ class MutableBase(object): @classmethod def _listen_on_attribute(cls, attribute, coerce, parent_cls): - """Establish this type as a mutation listener for the given + """Establish this type as a mutation listener for the given mapped descriptor. """ @@ -372,7 +416,7 @@ class MutableBase(object): def load(state, *args): """Listen for objects loaded or refreshed. - Wrap the target data member's value with + Wrap the target data member's value with ``Mutable``. """ @@ -388,7 +432,7 @@ class MutableBase(object): data member. Establish a weak reference to the parent object - on the incoming value, remove it for the one + on the incoming value, remove it for the one outgoing. """ @@ -435,7 +479,7 @@ class Mutable(MutableBase): @classmethod def associate_with_attribute(cls, attribute): - """Establish this type as a mutation listener for the given + """Establish this type as a mutation listener for the given mapped descriptor. """ @@ -443,15 +487,15 @@ class Mutable(MutableBase): @classmethod def associate_with(cls, sqltype): - """Associate this wrapper with all future mapped columns + """Associate this wrapper with all future mapped columns of the given type. This is a convenience method that calls ``associate_with_attribute`` automatically. - .. warning:: - + .. warning:: + The listeners established by this method are *global* - to all mappers, and are *not* garbage collected. Only use + to all mappers, and are *not* garbage collected. Only use :meth:`.associate_with` for types that are permanent to an application, not with ad-hoc types else this will cause unbounded growth in memory usage. @@ -473,7 +517,7 @@ class Mutable(MutableBase): This establishes listeners that will detect ORM mappings against the given type, adding mutation event trackers to those mappings. - The type is returned, unconditionally as an instance, so that + The type is returned, unconditionally as an instance, so that :meth:`.as_mutable` can be used inline:: Table('mytable', metadata, @@ -485,15 +529,15 @@ class Mutable(MutableBase): is given, and that only columns which are declared specifically with that type instance receive additional instrumentation. - To associate a particular mutable type with all occurrences of a + To associate a particular mutable type with all occurrences of a particular type, use the :meth:`.Mutable.associate_with` classmethod of the particular :meth:`.Mutable` subclass to establish a global association. - .. warning:: - + .. warning:: + The listeners established by this method are *global* - to all mappers, and are *not* garbage collected. Only use + to all mappers, and are *not* garbage collected. Only use :meth:`.as_mutable` for types that are permanent to an application, not with ad-hoc types else this will cause unbounded growth in memory usage. @@ -511,28 +555,22 @@ class Mutable(MutableBase): return sqltype -class _MutableCompositeMeta(type): - def __init__(cls, classname, bases, dict_): - cls._setup_listeners() - return type.__init__(cls, classname, bases, dict_) - class MutableComposite(MutableBase): """Mixin that defines transparent propagation of change events on a SQLAlchemy "composite" object to its owning parent or parents. - + See the example in :ref:`mutable_composites` for usage information. - - .. warning:: - + + .. warning:: + The listeners established by the :class:`.MutableComposite` - class are *global* to all mappers, and are *not* garbage collected. Only use + class are *global* to all mappers, and are *not* garbage collected. Only use :class:`.MutableComposite` for types that are permanent to an application, not with ad-hoc types else this will cause unbounded growth in memory usage. """ - __metaclass__ = _MutableCompositeMeta def changed(self): """Subclasses should call this method whenever change events occur.""" @@ -541,23 +579,18 @@ class MutableComposite(MutableBase): prop = object_mapper(parent).get_property(key) for value, attr_name in zip( - self.__composite_values__(), + self.__composite_values__(), prop._attribute_keys): setattr(parent, attr_name, value) - @classmethod - def _setup_listeners(cls): - """Associate this wrapper with all future mapped composites - of the given type. - - This is a convenience method that calls ``associate_with_attribute`` automatically. - - """ - - def listen_for_type(mapper, class_): - for prop in mapper.iterate_properties: - if hasattr(prop, 'composite_class') and issubclass(prop.composite_class, cls): - cls._listen_on_attribute(getattr(class_, prop.key), False, class_) - - event.listen(mapper, 'mapper_configured', listen_for_type) +def _setup_composite_listener(): + def _listen_for_type(mapper, class_): + for prop in mapper.iterate_properties: + if (hasattr(prop, 'composite_class') and + issubclass(prop.composite_class, MutableComposite)): + prop.composite_class._listen_on_attribute( + getattr(class_, prop.key), False, class_) + if not Mapper.dispatch.mapper_configured._contains(Mapper, _listen_for_type): + event.listen(Mapper, 'mapper_configured', _listen_for_type) +_setup_composite_listener() diff --git a/libs/sqlalchemy/ext/orderinglist.py b/libs/sqlalchemy/ext/orderinglist.py index 38957250..6a9b6c39 100644 --- a/libs/sqlalchemy/ext/orderinglist.py +++ b/libs/sqlalchemy/ext/orderinglist.py @@ -1,64 +1,77 @@ # ext/orderinglist.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -"""A custom list that manages index/position information for its children. +"""A custom list that manages index/position information for contained +elements. :author: Jason Kirtland -``orderinglist`` is a helper for mutable ordered relationships. It will intercept -list operations performed on a relationship collection and automatically -synchronize changes in list position with an attribute on the related objects. -(See :ref:`advdatamapping_entitycollections` for more information on the general pattern.) +``orderinglist`` is a helper for mutable ordered relationships. It will +intercept list operations performed on a :func:`.relationship`-managed +collection and +automatically synchronize changes in list position onto a target scalar +attribute. -Example: Two tables that store slides in a presentation. Each slide -has a number of bullet points, displayed in order by the 'position' -column on the bullets table. These bullets can be inserted and re-ordered -by your end users, and you need to update the 'position' column of all -affected rows when changes are made. +Example: A ``slide`` table, where each row refers to zero or more entries +in a related ``bullet`` table. The bullets within a slide are +displayed in order based on the value of the ``position`` column in the +``bullet`` table. As entries are reordered in memory, the value of the +``position`` attribute should be updated to reflect the new sort order:: -.. sourcecode:: python+sql - slides_table = Table('Slides', metadata, - Column('id', Integer, primary_key=True), - Column('name', String)) + Base = declarative_base() - bullets_table = Table('Bullets', metadata, - Column('id', Integer, primary_key=True), - Column('slide_id', Integer, ForeignKey('Slides.id')), - Column('position', Integer), - Column('text', String)) + class Slide(Base): + __tablename__ = 'slide' - class Slide(object): - pass - class Bullet(object): - pass + id = Column(Integer, primary_key=True) + name = Column(String) - mapper(Slide, slides_table, properties={ - 'bullets': relationship(Bullet, order_by=[bullets_table.c.position]) - }) - mapper(Bullet, bullets_table) + bullets = relationship("Bullet", order_by="Bullet.position") -The standard relationship mapping will produce a list-like attribute on each Slide -containing all related Bullets, but coping with changes in ordering is totally -your responsibility. If you insert a Bullet into that list, there is no -magic- it won't have a position attribute unless you assign it it one, and -you'll need to manually renumber all the subsequent Bullets in the list to -accommodate the insert. + class Bullet(Base): + __tablename__ = 'bullet' + id = Column(Integer, primary_key=True) + slide_id = Column(Integer, ForeignKey('slide.id')) + position = Column(Integer) + text = Column(String) -An ``orderinglist`` can automate this and manage the 'position' attribute on all -related bullets for you. +The standard relationship mapping will produce a list-like attribute on each +``Slide`` containing all related ``Bullet`` objects, +but coping with changes in ordering is not handled automatically. +When appending a ``Bullet`` into ``Slide.bullets``, the ``Bullet.position`` +attribute will remain unset until manually assigned. When the ``Bullet`` +is inserted into the middle of the list, the following ``Bullet`` objects +will also need to be renumbered. -.. sourcecode:: python+sql +The :class:`.OrderingList` object automates this task, managing the +``position`` attribute on all ``Bullet`` objects in the collection. It is +constructed using the :func:`.ordering_list` factory:: - mapper(Slide, slides_table, properties={ - 'bullets': relationship(Bullet, - collection_class=ordering_list('position'), - order_by=[bullets_table.c.position]) - }) - mapper(Bullet, bullets_table) + from sqlalchemy.ext.orderinglist import ordering_list + + Base = declarative_base() + + class Slide(Base): + __tablename__ = 'slide' + + id = Column(Integer, primary_key=True) + name = Column(String) + + bullets = relationship("Bullet", order_by="Bullet.position", + collection_class=ordering_list('position')) + + class Bullet(Base): + __tablename__ = 'bullet' + id = Column(Integer, primary_key=True) + slide_id = Column(Integer, ForeignKey('slide.id')) + position = Column(Integer) + text = Column(String) + +With the above mapping the ``Bullet.position`` attribute is managed:: s = Slide() s.bullets.append(Bullet()) @@ -69,73 +82,87 @@ related bullets for you. s.bullets[2].position >>> 2 -Use the ``ordering_list`` function to set up the ``collection_class`` on relationships -(as in the mapper example above). This implementation depends on the list -starting in the proper order, so be SURE to put an order_by on your relationship. +The :class:`.OrderingList` construct only works with **changes** to a collection, +and not the initial load from the database, and requires that the list be +sorted when loaded. Therefore, be sure to +specify ``order_by`` on the :func:`.relationship` against the target ordering +attribute, so that the ordering is correct when first loaded. -.. warning:: +.. warning:: - ``ordering_list`` only provides limited functionality when a primary - key column or unique column is the target of the sort. Since changing the order of - entries often means that two rows must trade values, this is not possible when - the value is constrained by a primary key or unique constraint, since one of the rows - would temporarily have to point to a third available value so that the other row - could take its old value. ``ordering_list`` doesn't do any of this for you, + :class:`.OrderingList` only provides limited functionality when a primary + key column or unique column is the target of the sort. Since changing the + order of entries often means that two rows must trade values, this is not + possible when the value is constrained by a primary key or unique + constraint, since one of the rows would temporarily have to point to a + third available value so that the other row could take its old + value. :class:`.OrderingList` doesn't do any of this for you, nor does SQLAlchemy itself. -``ordering_list`` takes the name of the related object's ordering attribute as +:func:`.ordering_list` takes the name of the related object's ordering attribute as an argument. By default, the zero-based integer index of the object's -position in the ``ordering_list`` is synchronized with the ordering attribute: +position in the :func:`.ordering_list` is synchronized with the ordering attribute: index 0 will get position 0, index 1 position 1, etc. To start numbering at 1 or some other integer, provide ``count_from=1``. -Ordering values are not limited to incrementing integers. Almost any scheme -can implemented by supplying a custom ``ordering_func`` that maps a Python list -index to any value you require. - - - """ from sqlalchemy.orm.collections import collection from sqlalchemy import util -__all__ = [ 'ordering_list' ] +__all__ = ['ordering_list'] def ordering_list(attr, count_from=None, **kw): - """Prepares an OrderingList factory for use in mapper definitions. + """Prepares an :class:`OrderingList` factory for use in mapper definitions. - Returns an object suitable for use as an argument to a Mapper relationship's - ``collection_class`` option. Arguments are: + Returns an object suitable for use as an argument to a Mapper + relationship's ``collection_class`` option. e.g.:: - attr + from sqlalchemy.ext.orderinglist import ordering_list + + class Slide(Base): + __tablename__ = 'slide' + + id = Column(Integer, primary_key=True) + name = Column(String) + + bullets = relationship("Bullet", order_by="Bullet.position", + collection_class=ordering_list('position')) + + :param attr: Name of the mapped attribute to use for storage and retrieval of ordering information - count_from (optional) + :param count_from: Set up an integer-based ordering, starting at ``count_from``. For example, ``ordering_list('pos', count_from=1)`` would create a 1-based list in SQL, storing the value in the 'pos' column. Ignored if ``ordering_func`` is supplied. - Passes along any keyword arguments to ``OrderingList`` constructor. + Additional arguments are passed to the :class:`.OrderingList` constructor. + """ kw = _unsugar_count_from(count_from=count_from, **kw) return lambda: OrderingList(attr, **kw) + # Ordering utility functions + + def count_from_0(index, collection): """Numbering function: consecutive integers starting at 0.""" return index + def count_from_1(index, collection): """Numbering function: consecutive integers starting at 1.""" return index + 1 + def count_from_n_factory(start): """Numbering function: consecutive integers starting at arbitrary start.""" @@ -147,8 +174,9 @@ def count_from_n_factory(start): pass return f + def _unsugar_count_from(**kw): - """Builds counting functions from keywrod arguments. + """Builds counting functions from keyword arguments. Keyword argument filter, prepares a simple ``ordering_func`` from a ``count_from`` argument, otherwise passes ``ordering_func`` on unchanged. @@ -164,12 +192,13 @@ def _unsugar_count_from(**kw): kw['ordering_func'] = count_from_n_factory(count_from) return kw + class OrderingList(list): """A custom list that manages position information for its children. - See the module and __init__ documentation for more details. The - ``ordering_list`` factory function is used to configure ``OrderingList`` - collections in ``mapper`` relationship definitions. + The :class:`.OrderingList` object is normally set up using the + :func:`.ordering_list` factory function, used in conjunction with + the :func:`.relationship` function. """ @@ -184,13 +213,14 @@ class OrderingList(list): This implementation relies on the list starting in the proper order, so be **sure** to put an ``order_by`` on your relationship. - :param ordering_attr: + :param ordering_attr: Name of the attribute that stores the object's order in the relationship. - :param ordering_func: Optional. A function that maps the position in the Python list to a - value to store in the ``ordering_attr``. Values returned are - usually (but need not be!) integers. + :param ordering_func: Optional. A function that maps the position in + the Python list to a value to store in the + ``ordering_attr``. Values returned are usually (but need not be!) + integers. An ``ordering_func`` is called with two positional parameters: the index of the element in the list, and the list itself. @@ -201,7 +231,7 @@ class OrderingList(list): like stepped numbering, alphabetical and Fibonacci numbering, see the unit tests. - :param reorder_on_append: + :param reorder_on_append: Default False. When appending an object with an existing (non-None) ordering value, that value will be left untouched unless ``reorder_on_append`` is true. This is an optimization to avoid a @@ -215,7 +245,7 @@ class OrderingList(list): making changes, any of whom happen to load this collection even in passing, all of the sessions would try to "clean up" the numbering in their commits, possibly causing all but one to fail with a - concurrent modification error. Spooky action at a distance. + concurrent modification error. Recommend leaving this with the default of False, and just call ``reorder()`` if you're doing ``append()`` operations with @@ -314,9 +344,24 @@ class OrderingList(list): self._reorder() # end Py2K + def __reduce__(self): + return _reconstitute, (self.__class__, self.__dict__, list(self)) + for func_name, func in locals().items(): if (util.callable(func) and func.func_name == func_name and not func.__doc__ and hasattr(list, func_name)): func.__doc__ = getattr(list, func_name).__doc__ del func_name, func + +def _reconstitute(cls, dict_, items): + """ Reconstitute an :class:`.OrderingList`. + + This is the adjoint to :meth:`.OrderingList.__reduce__`. It is used for + unpickling :class:`.OrderingList` objects. + + """ + obj = cls.__new__(cls) + obj.__dict__.update(dict_) + list.extend(obj, items) + return obj diff --git a/libs/sqlalchemy/ext/serializer.py b/libs/sqlalchemy/ext/serializer.py index ed2dec6c..47121bca 100644 --- a/libs/sqlalchemy/ext/serializer.py +++ b/libs/sqlalchemy/ext/serializer.py @@ -1,10 +1,10 @@ # ext/serializer.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -"""Serializer/Deserializer objects for usage with SQLAlchemy query structures, +"""Serializer/Deserializer objects for usage with SQLAlchemy query structures, allowing "contextual" deserialization. Any SQLAlchemy query structure, either based on sqlalchemy.sql.* @@ -31,19 +31,19 @@ Usage is nearly the same as that of the standard Python pickle module:: print query2.all() -Similar restrictions as when using raw pickle apply; mapped classes must be +Similar restrictions as when using raw pickle apply; mapped classes must be themselves be pickleable, meaning they are importable from a module-level namespace. The serializer module is only appropriate for query structures. It is not needed for: -* instances of user-defined classes. These contain no references to engines, +* instances of user-defined classes. These contain no references to engines, sessions or expression constructs in the typical case and can be serialized directly. * Table metadata that is to be loaded entirely from the serialized structure (i.e. is - not already declared in the application). Regular pickle.loads()/dumps() can - be used to fully dump any ``MetaData`` object, typically one which was reflected + not already declared in the application). Regular pickle.loads()/dumps() can + be used to fully dump any ``MetaData`` object, typically one which was reflected from an existing database at some previous point in time. The serializer module is specifically for the opposite case, where the Table metadata is already present in memory. diff --git a/libs/sqlalchemy/ext/sqlsoup.py b/libs/sqlalchemy/ext/sqlsoup.py index 7d36cdfa..486b09c5 100644 --- a/libs/sqlalchemy/ext/sqlsoup.py +++ b/libs/sqlalchemy/ext/sqlsoup.py @@ -1,22 +1,17 @@ # ext/sqlsoup.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """ -.. note:: - - SQLSoup is now its own project. Documentation - and project status are available at: - - http://pypi.python.org/pypi/sqlsoup - - http://readthedocs.org/docs/sqlsoup - - SQLSoup will no longer be included with SQLAlchemy as of - version 0.8. +.. versionchanged:: 0.8 + SQLSoup is now its own project. Documentation + and project status are available at: + http://pypi.python.org/pypi/sqlsoup and + http://readthedocs.org/docs/sqlsoup\ . + SQLSoup will no longer be included with SQLAlchemy. Introduction @@ -62,7 +57,7 @@ Loading objects is as easy as this:: >>> users [ MappedUsers(name=u'Joe Student',email=u'student@example.edu', - password=u'student',classname=None,admin=0), + password=u'student',classname=None,admin=0), MappedUsers(name=u'Bhargan Basepair',email=u'basepair@example.edu', password=u'basepair',classname=None,admin=1) ] @@ -72,7 +67,7 @@ Of course, letting the database do the sort is better:: >>> db.users.order_by(db.users.name).all() [ MappedUsers(name=u'Bhargan Basepair',email=u'basepair@example.edu', - password=u'basepair',classname=None,admin=1), + password=u'basepair',classname=None,admin=1), MappedUsers(name=u'Joe Student',email=u'student@example.edu', password=u'student',classname=None,admin=0) ] @@ -91,7 +86,7 @@ we're at it:: >>> db.users.filter(where).order_by(desc(db.users.name)).all() [ MappedUsers(name=u'Joe Student',email=u'student@example.edu', - password=u'student',classname=None,admin=0), + password=u'student',classname=None,admin=0), MappedUsers(name=u'Bhargan Basepair',email=u'basepair@example.edu', password=u'basepair',classname=None,admin=1) ] @@ -217,15 +212,15 @@ with `with_labels`, to disambiguate columns with their table name (.c is short for .columns):: >>> db.with_labels(join1).c.keys() - [u'users_name', u'users_email', u'users_password', - u'users_classname', u'users_admin', u'loans_book_id', + [u'users_name', u'users_email', u'users_password', + u'users_classname', u'users_admin', u'loans_book_id', u'loans_user_name', u'loans_loan_date'] You can also join directly to a labeled object:: >>> labeled_loans = db.with_labels(db.loans) >>> db.join(db.users, labeled_loans, isouter=True).c.keys() - [u'name', u'email', u'password', u'classname', + [u'name', u'email', u'password', u'classname', u'admin', u'loans_book_id', u'loans_user_name', u'loans_loan_date'] @@ -256,8 +251,8 @@ accepts in normal mapper definition: Advanced Use ============ -Sessions, Transations and Application Integration -------------------------------------------------- +Sessions, Transactions and Application Integration +--------------------------------------------------- .. note:: @@ -472,8 +467,8 @@ def _class_for_table(session, engine, selectable, base_cls, mapper_kwargs): selectable = expression._clause_element_as_expr(selectable) mapname = 'Mapped' + _selectable_name(selectable) # Py2K - if isinstance(mapname, unicode): - engine_encoding = engine.dialect.encoding + if isinstance(mapname, unicode): + engine_encoding = engine.dialect.encoding mapname = mapname.encode(engine_encoding) # end Py2K @@ -492,7 +487,7 @@ def _class_for_table(session, engine, selectable, base_cls, mapper_kwargs): raise TypeError('unable to compare with %s' % o.__class__) return t1, t2 - # python2/python3 compatible system of + # python2/python3 compatible system of # __cmp__ - __lt__ + __eq__ def __lt__(self, o): @@ -529,15 +524,15 @@ class SqlSoup(object): def __init__(self, engine_or_metadata, base=object, session=None): """Initialize a new :class:`.SqlSoup`. - :param engine_or_metadata: a string database URL, :class:`.Engine` + :param engine_or_metadata: a string database URL, :class:`.Engine` or :class:`.MetaData` object to associate with. If the argument is a :class:`.MetaData`, it should be *bound* to an :class:`.Engine`. - :param base: a class which will serve as the default class for + :param base: a class which will serve as the default class for returned mapped classes. Defaults to ``object``. :param session: a :class:`.ScopedSession` or :class:`.Session` with which to associate ORM operations for this :class:`.SqlSoup` instance. - If ``None``, a :class:`.ScopedSession` that's local to this + If ``None``, a :class:`.ScopedSession` that's local to this module is used. """ @@ -550,7 +545,7 @@ class SqlSoup(object): elif isinstance(engine_or_metadata, (basestring, Engine)): self._metadata = MetaData(engine_or_metadata) else: - raise ArgumentError("invalid engine or metadata argument %r" % + raise ArgumentError("invalid engine or metadata argument %r" % engine_or_metadata) self._cache = {} @@ -572,7 +567,7 @@ class SqlSoup(object): """Execute a SQL statement. The statement may be a string SQL string, - an :func:`.expression.select` construct, or an :func:`.expression.text` + an :func:`.expression.select` construct, or an :func:`.expression.text` construct. """ @@ -599,7 +594,7 @@ class SqlSoup(object): self.session.flush() def rollback(self): - """Rollback the current transction. + """Rollback the current transaction. See :meth:`.Session.rollback`. @@ -635,14 +630,14 @@ class SqlSoup(object): """ self.session.expunge_all() - def map_to(self, attrname, tablename=None, selectable=None, + def map_to(self, attrname, tablename=None, selectable=None, schema=None, base=None, mapper_args=util.immutabledict()): """Configure a mapping to the given attrname. - This is the "master" method that can be used to create any + This is the "master" method that can be used to create any configuration. - (new in 0.6.6) + .. versionadded:: 0.6.6 :param attrname: String attribute name which will be established as an attribute on this :class:.`.SqlSoup` @@ -682,10 +677,10 @@ class SqlSoup(object): raise ArgumentError("'tablename' and 'selectable' " "arguments are mutually exclusive") - selectable = Table(tablename, - self._metadata, - autoload=True, - autoload_with=self.bind, + selectable = Table(tablename, + self._metadata, + autoload=True, + autoload_with=self.bind, schema=schema or self.schema) elif schema: raise ArgumentError("'tablename' argument is required when " @@ -723,8 +718,9 @@ class SqlSoup(object): def map(self, selectable, base=None, **mapper_args): """Map a selectable directly. - The class and its mapping are not cached and will - be discarded once dereferenced (as of 0.6.6). + .. versionchanged:: 0.6.6 + The class and its mapping are not cached and will + be discarded once dereferenced. :param selectable: an :func:`.expression.select` construct. :param base: a Python class which will be used as the @@ -746,11 +742,12 @@ class SqlSoup(object): ) def with_labels(self, selectable, base=None, **mapper_args): - """Map a selectable directly, wrapping the + """Map a selectable directly, wrapping the selectable in a subquery with labels. - The class and its mapping are not cached and will - be discarded once dereferenced (as of 0.6.6). + .. versionchanged:: 0.6.6 + The class and its mapping are not cached and will + be discarded once dereferenced. :param selectable: an :func:`.expression.select` construct. :param base: a Python class which will be used as the @@ -769,12 +766,13 @@ class SqlSoup(object): select(use_labels=True). alias('foo'), base=base, **mapper_args) - def join(self, left, right, onclause=None, isouter=False, + def join(self, left, right, onclause=None, isouter=False, base=None, **mapper_args): """Create an :func:`.expression.join` and map to it. - The class and its mapping are not cached and will - be discarded once dereferenced (as of 0.6.6). + .. versionchanged:: 0.6.6 + The class and its mapping are not cached and will + be discarded once dereferenced. :param left: a mapped class or table object. :param right: a mapped class or table object. @@ -794,7 +792,7 @@ class SqlSoup(object): return self.map(j, base=base, **mapper_args) def entity(self, attr, schema=None): - """Return the named entity from this :class:`.SqlSoup`, or + """Return the named entity from this :class:`.SqlSoup`, or create if not present. For more generalized mapping, see :meth:`.map_to`. diff --git a/libs/sqlalchemy/interfaces.py b/libs/sqlalchemy/interfaces.py index ed02ed0a..92fb3154 100644 --- a/libs/sqlalchemy/interfaces.py +++ b/libs/sqlalchemy/interfaces.py @@ -1,5 +1,5 @@ # sqlalchemy/interfaces.py -# Copyright (C) 2007-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2007-2013 the SQLAlchemy authors and contributors # Copyright (C) 2007 Jason Kirtland jek@discorporate.us # # This module is part of SQLAlchemy and is released under @@ -17,8 +17,8 @@ from sqlalchemy import event, util class PoolListener(object): """Hooks into the lifecycle of connections in a :class:`.Pool`. - .. note:: - + .. note:: + :class:`.PoolListener` is deprecated. Please refer to :class:`.PoolEvents`. @@ -27,7 +27,7 @@ class PoolListener(object): class MyListener(PoolListener): def connect(self, dbapi_con, con_record): '''perform connect operations''' - # etc. + # etc. # create a new pool with a listener p = QueuePool(..., listeners=[MyListener()]) @@ -151,8 +151,8 @@ class PoolListener(object): class ConnectionProxy(object): """Allows interception of statement execution by Connections. - .. note:: - + .. note:: + :class:`.ConnectionProxy` is deprecated. Please refer to :class:`.ConnectionEvents`. @@ -194,7 +194,7 @@ class ConnectionProxy(object): event.listen(self, 'before_execute', adapt_execute) - def adapt_cursor_execute(conn, cursor, statement, + def adapt_cursor_execute(conn, cursor, statement, parameters,context, executemany, ): def execute_wrapper( diff --git a/libs/sqlalchemy/log.py b/libs/sqlalchemy/log.py index e77730a9..24608fde 100644 --- a/libs/sqlalchemy/log.py +++ b/libs/sqlalchemy/log.py @@ -12,7 +12,7 @@ module. The regular dotted module namespace is used, starting at 'sqlalchemy'. For class-level logging, the class name is appended. The "echo" keyword parameter, available on SQLA :class:`.Engine` -and :class:`.Pool` objects, corresponds to a logger specific to that +and :class:`.Pool` objects, corresponds to a logger specific to that instance only. """ @@ -60,7 +60,7 @@ class InstanceLogger(object): """A logger adapter (wrapper) for :class:`.Identified` subclasses. This allows multiple instances (e.g. Engine or Pool instances) - to share a logger, but have its verbosity controlled on a + to share a logger, but have its verbosity controlled on a per-instance basis. The basic functionality is to return a logging level @@ -185,7 +185,7 @@ def instance_logger(instance, echoflag=None): logger = logging.getLogger(name) else: # if a specified echo flag, return an EchoLogger, - # which checks the flag, overrides normal log + # which checks the flag, overrides normal log # levels by calling logger._log() logger = InstanceLogger(echoflag, name) diff --git a/libs/sqlalchemy/orm/__init__.py b/libs/sqlalchemy/orm/__init__.py index 9fd969e3..b4006be4 100644 --- a/libs/sqlalchemy/orm/__init__.py +++ b/libs/sqlalchemy/orm/__init__.py @@ -1,5 +1,5 @@ # orm/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -142,7 +142,7 @@ def scoped_session(session_factory, scopefunc=None): return ScopedSession(session_factory, scopefunc=scopefunc) def create_session(bind=None, **kwargs): - """Create a new :class:`.Session` + """Create a new :class:`.Session` with no automation enabled by default. This function is used primarily for testing. The usual @@ -180,10 +180,8 @@ def create_session(bind=None, **kwargs): def relationship(argument, secondary=None, **kwargs): """Provide a relationship of a primary Mapper to a secondary Mapper. - .. note:: - - :func:`relationship` is historically known as - :func:`relation` prior to version 0.6. + .. versionchanged:: 0.6 + :func:`relationship` is historically known as :func:`relation`. This corresponds to a parent-child or associative table relationship. The constructed class is an instance of :class:`.RelationshipProperty`. @@ -194,57 +192,57 @@ def relationship(argument, secondary=None, **kwargs): 'children': relationship(Child) }) - Some arguments accepted by :func:`.relationship` optionally accept a + Some arguments accepted by :func:`.relationship` optionally accept a callable function, which when called produces the desired value. The callable is invoked by the parent :class:`.Mapper` at "mapper initialization" time, which happens only when mappers are first used, and is assumed to be after all mappings have been constructed. This can be used - to resolve order-of-declaration and other dependency issues, such as + to resolve order-of-declaration and other dependency issues, such as if ``Child`` is declared below ``Parent`` in the same file:: - + mapper(Parent, properties={ - "children":relationship(lambda: Child, + "children":relationship(lambda: Child, order_by=lambda: Child.id) }) - + When using the :ref:`declarative_toplevel` extension, the Declarative initializer allows string arguments to be passed to :func:`.relationship`. - These string arguments are converted into callables that evaluate + These string arguments are converted into callables that evaluate the string as Python code, using the Declarative class-registry as a namespace. This allows the lookup of related classes to be automatic via their string name, and removes the need to import related classes at all into the local module space:: - + from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() - + class Parent(Base): __tablename__ = 'parent' id = Column(Integer, primary_key=True) children = relationship("Child", order_by="Child.id") - + A full array of examples and reference documentation regarding :func:`.relationship` is at :ref:`relationship_config_toplevel`. - + :param argument: a mapped class, or actual :class:`.Mapper` instance, representing the target of - the relationship. - + the relationship. + ``argument`` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a + which is evaluated at mapper initialization time, and may be passed as a Python-evaluable string when using Declarative. :param secondary: for a many-to-many relationship, specifies the intermediary - table, and is an instance of :class:`.Table`. The ``secondary`` keyword + table, and is an instance of :class:`.Table`. The ``secondary`` keyword argument should generally only be used for a table that is not otherwise expressed in any class mapping, unless this relationship is declared as view only, otherwise - conflicting persistence operations can occur. - + conflicting persistence operations can occur. + ``secondary`` may - also be passed as a callable function which is evaluated at + also be passed as a callable function which is evaluated at mapper initialization time. :param active_history=False: @@ -260,16 +258,16 @@ def relationship(argument, secondary=None, **kwargs): :param backref: indicates the string name of a property to be placed on the related mapper's class that will handle this relationship in the other - direction. The other property will be created automatically + direction. The other property will be created automatically when the mappers are configured. Can also be passed as a :func:`backref` object to control the configuration of the new relationship. :param back_populates: - Takes a string name and has the same meaning as ``backref``, - except the complementing property is **not** created automatically, - and instead must be configured explicitly on the other mapper. The - complementing property should also indicate ``back_populates`` + Takes a string name and has the same meaning as ``backref``, + except the complementing property is **not** created automatically, + and instead must be configured explicitly on the other mapper. The + complementing property should also indicate ``back_populates`` to this relationship to ensure proper functioning. :param cascade: @@ -280,12 +278,12 @@ def relationship(argument, secondary=None, **kwargs): Available cascades are: - * ``save-update`` - cascade the :meth:`.Session.add` + * ``save-update`` - cascade the :meth:`.Session.add` operation. This cascade applies both to future and - past calls to :meth:`~sqlalchemy.orm.session.Session.add`, + past calls to :meth:`~sqlalchemy.orm.session.Session.add`, meaning new items added to a collection or scalar relationship - get placed into the same session as that of the parent, and - also applies to items which have been removed from this + get placed into the same session as that of the parent, and + also applies to items which have been removed from this relationship but are still part of unflushed history. * ``merge`` - cascade the :meth:`~sqlalchemy.orm.session.Session.merge` @@ -297,15 +295,17 @@ def relationship(argument, secondary=None, **kwargs): * ``delete`` - cascade the :meth:`.Session.delete` operation - * ``delete-orphan`` - if an item of the child's type is - detached from its parent, mark it for deletion. - As of version 0.7, this option does not prevent - a new instance of the child object from being persisted - without a parent to start with; to constrain against - that case, ensure the child's foreign key column(s) - is configured as NOT NULL + * ``delete-orphan`` - if an item of the child's type is + detached from its parent, mark it for deletion. - * ``refresh-expire`` - cascade the :meth:`.Session.expire` + .. versionchanged:: 0.7 + This option does not prevent + a new instance of the child object from being persisted + without a parent to start with; to constrain against + that case, ensure the child's foreign key column(s) + is configured as NOT NULL + + * ``refresh-expire`` - cascade the :meth:`.Session.expire` and :meth:`~sqlalchemy.orm.session.Session.refresh` operations * ``all`` - shorthand for "save-update,merge, refresh-expire, @@ -313,33 +313,33 @@ def relationship(argument, secondary=None, **kwargs): See the section :ref:`unitofwork_cascades` for more background on configuring cascades. - + :param cascade_backrefs=True: a boolean value indicating if the ``save-update`` cascade should - operate along an assignment event intercepted by a backref. + operate along an assignment event intercepted by a backref. When set to ``False``, the attribute managed by this relationship will not cascade an incoming transient object into the session of a persistent parent, if the event is received via backref. - + That is:: - + mapper(A, a_table, properties={ 'bs':relationship(B, backref="a", cascade_backrefs=False) }) - + If an ``A()`` is present in the session, assigning it to the "a" attribute on a transient ``B()`` will not place - the ``B()`` into the session. To set the flag in the other - direction, i.e. so that ``A().bs.append(B())`` won't add + the ``B()`` into the session. To set the flag in the other + direction, i.e. so that ``A().bs.append(B())`` won't add a transient ``A()`` into the session for a persistent ``B()``:: - + mapper(A, a_table, properties={ - 'bs':relationship(B, + 'bs':relationship(B, backref=backref("a", cascade_backrefs=False) ) }) - + See the section :ref:`unitofwork_cascades` for more background on configuring cascades. @@ -366,9 +366,9 @@ def relationship(argument, secondary=None, **kwargs): a list of columns which are to be used as "foreign key" columns. Normally, :func:`relationship` uses the :class:`.ForeignKey` and :class:`.ForeignKeyConstraint` objects present within the - mapped or secondary :class:`.Table` to determine the "foreign" side of + mapped or secondary :class:`.Table` to determine the "foreign" side of the join condition. This is used to construct SQL clauses in order - to load objects, as well as to "synchronize" values from + to load objects, as well as to "synchronize" values from primary key columns to referencing foreign key columns. The ``foreign_keys`` parameter overrides the notion of what's "foreign" in the table metadata, allowing the specification @@ -384,31 +384,33 @@ def relationship(argument, secondary=None, **kwargs): should artificially not be considered as foreign. ``foreign_keys`` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a + which is evaluated at mapper initialization time, and may be passed as a Python-evaluable string when using Declarative. - + :param innerjoin=False: when ``True``, joined eager loads will use an inner join to join against related tables instead of an outer join. The purpose - of this option is strictly one of performance, as inner joins - generally perform better than outer joins. This flag can - be set to ``True`` when the relationship references an object - via many-to-one using local foreign keys that are not nullable, - or when the reference is one-to-one or a collection that is - guaranteed to have one or at least one entry. + of this option is generally one of performance, as inner joins + generally perform better than outer joins. Another reason can be + the use of ``with_lockmode``, which does not support outer joins. + + This flag can be set to ``True`` when the relationship references an + object via many-to-one using local foreign keys that are not nullable, + or when the reference is one-to-one or a collection that is guaranteed + to have one or at least one entry. :param join_depth: when non-``None``, an integer value indicating how many levels - deep "eager" loaders should join on a self-referring or cyclical - relationship. The number counts how many times the same Mapper - shall be present in the loading condition along a particular join + deep "eager" loaders should join on a self-referring or cyclical + relationship. The number counts how many times the same Mapper + shall be present in the loading condition along a particular join branch. When left at its default of ``None``, eager loaders - will stop chaining when they encounter a the same target mapper + will stop chaining when they encounter a the same target mapper which is already higher up in the chain. This option applies both to joined- and subquery- eager loaders. - :param lazy='select': specifies - how the related items should be loaded. Default value is + :param lazy='select': specifies + how the related items should be loaded. Default value is ``select``. Values include: * ``select`` - items should be loaded lazily when the property is first @@ -417,7 +419,9 @@ def relationship(argument, secondary=None, **kwargs): * ``immediate`` - items should be loaded as the parents are loaded, using a separate SELECT statement, or identity map fetch for - simple many-to-one references. (new as of 0.6.5) + simple many-to-one references. + + .. versionadded:: 0.6.5 * ``joined`` - items should be loaded "eagerly" in the same query as that of the parent, using a JOIN or LEFT OUTER JOIN. Whether @@ -429,19 +433,19 @@ def relationship(argument, secondary=None, **kwargs): which issues a JOIN to a subquery of the original statement. - * ``noload`` - no loading should occur at any time. This is to + * ``noload`` - no loading should occur at any time. This is to support "write-only" attributes, or attributes which are populated in some manner specific to the application. * ``dynamic`` - the attribute will return a pre-configured - :class:`~sqlalchemy.orm.query.Query` object for all read + :class:`~sqlalchemy.orm.query.Query` object for all read operations, onto which further filtering operations can be applied before iterating the results. See the section :ref:`dynamic_relationship` for more details. * True - a synonym for 'select' - * False - a synonyn for 'joined' + * False - a synonym for 'joined' * None - a synonym for 'noload' @@ -460,8 +464,8 @@ def relationship(argument, secondary=None, **kwargs): Note that the load of related objects on a pending or transient object also does not trigger any attribute change events - no user-defined - events will be emitted for these attributes, and if and when the - object is ultimately flushed, only the user-specific foreign key + events will be emitted for these attributes, and if and when the + object is ultimately flushed, only the user-specific foreign key attributes will be part of the modified state. The load_on_pending flag does not improve behavior @@ -475,14 +479,14 @@ def relationship(argument, secondary=None, **kwargs): :param order_by: indicates the ordering that should be applied when loading these items. ``order_by`` is expected to refer to one of the :class:`.Column` - objects to which the target class is mapped, or + objects to which the target class is mapped, or the attribute itself bound to the target class which refers to the column. ``order_by`` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a + which is evaluated at mapper initialization time, and may be passed as a Python-evaluable string when using Declarative. - + :param passive_deletes=False: Indicates loading behavior during delete operations. @@ -562,7 +566,7 @@ def relationship(argument, secondary=None, **kwargs): table). ``primaryjoin`` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a + which is evaluated at mapper initialization time, and may be passed as a Python-evaluable string when using Declarative. :param remote_side: @@ -570,16 +574,16 @@ def relationship(argument, secondary=None, **kwargs): list of columns that form the "remote side" of the relationship. ``remote_side`` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a + which is evaluated at mapper initialization time, and may be passed as a Python-evaluable string when using Declarative. :param query_class: a :class:`.Query` subclass that will be used as the base of the "appender query" returned by a "dynamic" relationship, that - is, a relationship that specifies ``lazy="dynamic"`` or was + is, a relationship that specifies ``lazy="dynamic"`` or was otherwise constructed using the :func:`.orm.dynamic_loader` function. - + :param secondaryjoin: a SQL expression that will be used as the join of an association table to the child object. By default, this value is @@ -587,7 +591,7 @@ def relationship(argument, secondary=None, **kwargs): child tables. ``secondaryjoin`` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a + which is evaluated at mapper initialization time, and may be passed as a Python-evaluable string when using Declarative. :param single_parent=(True|False): @@ -595,7 +599,7 @@ def relationship(argument, secondary=None, **kwargs): from being associated with more than one parent at a time. This is used for many-to-one or many-to-many relationships that should be treated either as one-to-one or one-to-many. Its - usage is optional unless delete-orphan cascade is also + usage is optional unless delete-orphan cascade is also set on this relationship(), in which case its required. :param uselist=(True|False): @@ -628,13 +632,13 @@ def relation(*arg, **kw): def dynamic_loader(argument, **kw): """Construct a dynamically-loading mapper property. - This is essentially the same as + This is essentially the same as using the ``lazy='dynamic'`` argument with :func:`relationship`:: dynamic_loader(SomeClass) - + # is the same as - + relationship(SomeClass, lazy="dynamic") See the section :ref:`dynamic_relationship` for more details @@ -668,7 +672,9 @@ def column_property(*cols, **kw): flag is available for applications that make use of :func:`.attributes.get_history` or :meth:`.Session.is_modified` which also need to know - the "previous" value of the attribute. (new in 0.6.6) + the "previous" value of the attribute. + + .. versionadded:: 0.6.6 :param comparator_factory: a class which extends :class:`.ColumnProperty.Comparator` which provides custom SQL clause @@ -686,29 +692,29 @@ def column_property(*cols, **kw): :param doc: optional string that will be applied as the doc on the class-bound descriptor. - + :param expire_on_flush=True: Disable expiry on flush. A column_property() which refers to a SQL expression (and not a single table-bound column) is considered to be a "read only" property; populating it has no effect on the state of data, and it can only return database state. For this reason a column_property()'s value - is expired whenever the parent object is involved in a + is expired whenever the parent object is involved in a flush, that is, has any kind of "dirty" state within a flush. Setting this parameter to ``False`` will have the effect of leaving any existing value present after the flush proceeds. Note however that the :class:`.Session` with default expiration - settings still expires + settings still expires all attributes after a :meth:`.Session.commit` call, however. - New in 0.7.3. - + + .. versionadded:: 0.7.3 :param extension: an :class:`.AttributeExtension` instance, or list of extensions, which will be prepended to the list of attribute listeners for the resulting - descriptor placed on the class. + descriptor placed on the class. **Deprecated.** Please see :class:`.AttributeEvents`. @@ -719,7 +725,7 @@ def column_property(*cols, **kw): def composite(class_, *cols, **kwargs): """Return a composite column-based property for use with a Mapper. - See the mapping documention section :ref:`mapper_composite` for a full + See the mapping documentation section :ref:`mapper_composite` for a full usage example. :param class\_: @@ -732,8 +738,10 @@ def composite(class_, *cols, **kwargs): When ``True``, indicates that the "previous" value for a scalar attribute should be loaded when replaced, if not already loaded. See the same flag on :func:`.column_property`. - (This flag becomes meaningful specifically for - :func:`.composite` in 0.7 - previously it was a placeholder). + + .. versionchanged:: 0.7 + This flag specifically becomes meaningful + - previously it was a placeholder. :param group: A group name for this property when marked as deferred. @@ -767,7 +775,7 @@ def backref(name, **kwargs): Used with the ``backref`` keyword argument to :func:`relationship` in place of a string argument, e.g.:: - + 'items':relationship(SomeItem, backref=backref('parent', lazy='subquery')) """ @@ -781,7 +789,7 @@ def deferred(*columns, **kwargs): Used with the "properties" dictionary sent to :func:`mapper`. See also: - + :ref:`deferred` """ @@ -789,47 +797,47 @@ def deferred(*columns, **kwargs): def mapper(class_, local_table=None, *args, **params): """Return a new :class:`~.Mapper` object. - + This function is typically used behind the scenes via the Declarative extension. When using Declarative, many of the usual :func:`.mapper` arguments are handled by the Declarative extension itself, including ``class_``, ``local_table``, ``properties``, and ``inherits``. - Other options are passed to :func:`.mapper` using + Other options are passed to :func:`.mapper` using the ``__mapper_args__`` class variable:: - + class MyClass(Base): __tablename__ = 'my_table' id = Column(Integer, primary_key=True) type = Column(String(50)) alt = Column("some_alt", Integer) - + __mapper_args__ = { 'polymorphic_on' : type } Explicit use of :func:`.mapper` - is often referred to as *classical mapping*. The above + is often referred to as *classical mapping*. The above declarative example is equivalent in classical form to:: - + my_table = Table("my_table", metadata, Column('id', Integer, primary_key=True), Column('type', String(50)), Column("some_alt", Integer) ) - + class MyClass(object): pass - - mapper(MyClass, my_table, - polymorphic_on=my_table.c.type, + + mapper(MyClass, my_table, + polymorphic_on=my_table.c.type, properties={ 'alt':my_table.c.some_alt }) - + See also: - + :ref:`classical_mapping` - discussion of direct usage of :func:`.mapper` @@ -837,10 +845,10 @@ def mapper(class_, local_table=None, *args, **params): this argument is automatically passed as the declared class itself. - :param local_table: The :class:`.Table` or other selectable - to which the class is mapped. May be ``None`` if + :param local_table: The :class:`.Table` or other selectable + to which the class is mapped. May be ``None`` if this mapper inherits from another mapper using single-table - inheritance. When using Declarative, this argument is + inheritance. When using Declarative, this argument is automatically passed by the extension, based on what is configured via the ``__table__`` argument or via the :class:`.Table` produced as a result of the ``__tablename__`` and :class:`.Column` @@ -864,30 +872,30 @@ def mapper(class_, local_table=None, *args, **params): particular primary key value. A "partial primary key" can occur if one has mapped to an OUTER JOIN, for example. - :param batch: Defaults to ``True``, indicating that save operations - of multiple entities can be batched together for efficiency. + :param batch: Defaults to ``True``, indicating that save operations + of multiple entities can be batched together for efficiency. Setting to False indicates that an instance will be fully saved before saving the next - instance. This is used in the extremely rare case that a - :class:`.MapperEvents` listener requires being called + instance. This is used in the extremely rare case that a + :class:`.MapperEvents` listener requires being called in between individual row persistence operations. - :param column_prefix: A string which will be prepended + :param column_prefix: A string which will be prepended to the mapped attribute name when :class:`.Column` objects are automatically assigned as attributes to the - mapped class. Does not affect explicitly specified - column-based properties. - + mapped class. Does not affect explicitly specified + column-based properties. + See the section :ref:`column_prefix` for an example. :param concrete: If True, indicates this mapper should use concrete table inheritance with its parent mapper. - + See the section :ref:`concrete_inheritance` for an example. - :param exclude_properties: A list or set of string column names to - be excluded from mapping. - + :param exclude_properties: A list or set of string column names to + be excluded from mapping. + See :ref:`include_exclude_cols` for an example. :param extension: A :class:`.MapperExtension` instance or @@ -896,47 +904,47 @@ def mapper(class_, local_table=None, *args, **params): :class:`.Mapper`. **Deprecated.** Please see :class:`.MapperEvents`. :param include_properties: An inclusive list or set of string column - names to map. - + names to map. + See :ref:`include_exclude_cols` for an example. - :param inherits: A mapped class or the corresponding :class:`.Mapper` + :param inherits: A mapped class or the corresponding :class:`.Mapper` of one indicating a superclass to which this :class:`.Mapper` should *inherit* from. The mapped class here must be a subclass of the other mapper's class. When using Declarative, this argument is passed automatically as a result of the natural class - hierarchy of the declared classes. - + hierarchy of the declared classes. + See also: - + :ref:`inheritance_toplevel` - + :param inherit_condition: For joined table inheritance, a SQL expression which will define how the two tables are joined; defaults to a natural join between the two tables. :param inherit_foreign_keys: When ``inherit_condition`` is used and the - columns present are missing a :class:`.ForeignKey` configuration, - this parameter can be used to specify which columns are "foreign". + columns present are missing a :class:`.ForeignKey` configuration, + this parameter can be used to specify which columns are "foreign". In most cases can be left as ``None``. :param non_primary: Specify that this :class:`.Mapper` is in addition to the "primary" mapper, that is, the one used for persistence. The :class:`.Mapper` created here may be used for ad-hoc mapping of the class to an alternate selectable, for loading - only. - + only. + The ``non_primary`` feature is rarely needed with modern usage. :param order_by: A single :class:`.Column` or list of :class:`.Column` objects for which selection operations should use as the default - ordering for entities. By default mappers have no pre-defined + ordering for entities. By default mappers have no pre-defined ordering. :param passive_updates: Indicates UPDATE behavior of foreign key - columns when a primary key column changes on a joined-table inheritance + columns when a primary key column changes on a joined-table inheritance mapping. Defaults to ``True``. When True, it is assumed that ON UPDATE CASCADE is configured on @@ -949,41 +957,41 @@ def mapper(class_, local_table=None, *args, **params): operation for an update. The :class:`.Mapper` here will emit an UPDATE statement for the dependent columns during a primary key change. - + See also: - - :ref:`passive_updates` - description of a similar feature as + + :ref:`passive_updates` - description of a similar feature as used with :func:`.relationship` - :param polymorphic_on: Specifies the column, attribute, or - SQL expression used to determine the target class for an + :param polymorphic_on: Specifies the column, attribute, or + SQL expression used to determine the target class for an incoming row, when inheriting classes are present. - + This value is commonly a :class:`.Column` object that's present in the mapped :class:`.Table`:: - + class Employee(Base): __tablename__ = 'employee' - + id = Column(Integer, primary_key=True) discriminator = Column(String(50)) - + __mapper_args__ = { "polymorphic_on":discriminator, "polymorphic_identity":"employee" } - - As of SQLAlchemy 0.7.4, it may also be specified - as a SQL expression, as in this example where we + + It may also be specified + as a SQL expression, as in this example where we use the :func:`.case` construct to provide a conditional approach:: class Employee(Base): __tablename__ = 'employee' - + id = Column(Integer, primary_key=True) discriminator = Column(String(50)) - + __mapper_args__ = { "polymorphic_on":case([ (discriminator == "EN", "engineer"), @@ -991,14 +999,14 @@ def mapper(class_, local_table=None, *args, **params): ], else_="employee"), "polymorphic_identity":"employee" } - - Also as of 0.7.4, it may also refer to any attribute + + It may also refer to any attribute configured with :func:`.column_property`, or to the string name of one:: - + class Employee(Base): __tablename__ = 'employee' - + id = Column(Integer, primary_key=True) discriminator = Column(String(50)) employee_type = column_property( @@ -1007,16 +1015,21 @@ def mapper(class_, local_table=None, *args, **params): (discriminator == "MA", "manager"), ], else_="employee") ) - + __mapper_args__ = { "polymorphic_on":employee_type, "polymorphic_identity":"employee" } - + + .. versionchanged:: 0.7.4 + ``polymorphic_on`` may be specified as a SQL expression, + or refer to any attribute configured with + :func:`.column_property`, or to the string name of one. + When setting ``polymorphic_on`` to reference an attribute or expression that's not present in the - locally mapped :class:`.Table`, yet the value - of the discriminator should be persisted to the database, + locally mapped :class:`.Table`, yet the value + of the discriminator should be persisted to the database, the value of the discriminator is not automatically set on new instances; this must be handled by the user, @@ -1026,27 +1039,27 @@ def mapper(class_, local_table=None, *args, **params): from sqlalchemy import event from sqlalchemy.orm import object_mapper - + @event.listens_for(Employee, "init", propagate=True) def set_identity(instance, *arg, **kw): mapper = object_mapper(instance) instance.discriminator = mapper.polymorphic_identity - + Where above, we assign the value of ``polymorphic_identity`` for the mapped class to the ``discriminator`` attribute, thus persisting the value to the ``discriminator`` column in the database. - + See also: - + :ref:`inheritance_toplevel` - - :param polymorphic_identity: Specifies the value which + + :param polymorphic_identity: Specifies the value which identifies this particular class as returned by the column expression referred to by the ``polymorphic_on`` setting. As rows are received, the value corresponding to the ``polymorphic_on`` column expression is compared - to this value, indicating which subclass should + to this value, indicating which subclass should be used for the newly reconstructed object. :param properties: A dictionary mapping the string names of object @@ -1064,11 +1077,11 @@ def mapper(class_, local_table=None, *args, **params): This is normally simply the primary key of the ``local_table``, but can be overridden here. - :param version_id_col: A :class:`.Column` + :param version_id_col: A :class:`.Column` that will be used to keep a running version id of mapped entities in the database. This is used during save operations to ensure that no other thread or process has updated the instance during the - lifetime of the entity, else a :class:`~sqlalchemy.orm.exc.StaleDataError` + lifetime of the entity, else a :class:`~sqlalchemy.orm.exc.StaleDataError` exception is thrown. By default the column must be of :class:`.Integer` type, unless ``version_id_generator`` specifies a new generation @@ -1085,13 +1098,13 @@ def mapper(class_, local_table=None, *args, **params): __tablename__ = 'mytable' id = Column(Integer, primary_key=True) version_uuid = Column(String(32)) - + __mapper_args__ = { 'version_id_col':version_uuid, 'version_id_generator':lambda version:uuid.uuid4().hex } - The callable receives the current version identifier as its + The callable receives the current version identifier as its single argument. :param with_polymorphic: A tuple in the form ``(, @@ -1102,28 +1115,27 @@ def mapper(class_, local_table=None, *args, **params): ``'*'`` may be used to indicate all descending classes should be loaded immediately. The second tuple argument indicates a selectable that will be used to query for multiple - classes. - + classes. + See also: - + :ref:`concrete_inheritance` - typically uses ``with_polymorphic`` to specify a UNION statement to select from. - - :ref:`with_polymorphic` - usage example of the related + + :ref:`with_polymorphic` - usage example of the related :meth:`.Query.with_polymorphic` method - + """ return Mapper(class_, local_table, *args, **params) -def synonym(name, map_column=False, descriptor=None, +def synonym(name, map_column=False, descriptor=None, comparator_factory=None, doc=None): """Denote an attribute name as a synonym to a mapped property. - .. note:: - - :func:`.synonym` is superseded as of 0.7 by - the :mod:`~sqlalchemy.ext.hybrid` extension. See - the documentation for hybrids at :ref:`hybrids_toplevel`. + .. versionchanged:: 0.7 + :func:`.synonym` is superseded by the :mod:`~sqlalchemy.ext.hybrid` + extension. See the documentation for hybrids + at :ref:`hybrids_toplevel`. Used with the ``properties`` dictionary sent to :func:`~sqlalchemy.orm.mapper`:: @@ -1138,7 +1150,7 @@ def synonym(name, map_column=False, descriptor=None, mapper(MyClass, sometable, properties={ "status":synonym("_status", map_column=True) }) - + Above, the ``status`` attribute of MyClass will produce expression behavior against the table column named ``status``, using the Python attribute ``_status`` on the mapped class @@ -1154,25 +1166,24 @@ def synonym(name, map_column=False, descriptor=None, column to map. """ - return SynonymProperty(name, map_column=map_column, - descriptor=descriptor, + return SynonymProperty(name, map_column=map_column, + descriptor=descriptor, comparator_factory=comparator_factory, doc=doc) def comparable_property(comparator_factory, descriptor=None): - """Provides a method of applying a :class:`.PropComparator` + """Provides a method of applying a :class:`.PropComparator` to any Python descriptor attribute. - .. note:: - - :func:`.comparable_property` is superseded as of 0.7 by - the :mod:`~sqlalchemy.ext.hybrid` extension. See the example - at :ref:`hybrid_custom_comparators`. - - Allows any Python descriptor to behave like a SQL-enabled + .. versionchanged:: 0.7 + :func:`.comparable_property` is superseded by + the :mod:`~sqlalchemy.ext.hybrid` extension. See the example + at :ref:`hybrid_custom_comparators`. + + Allows any Python descriptor to behave like a SQL-enabled attribute when used at the class level in queries, allowing redefinition of expression operator behavior. - + In the example below we redefine :meth:`.PropComparator.operate` to wrap both sides of an expression in ``func.lower()`` to produce case-insensitive comparison:: @@ -1186,7 +1197,7 @@ def comparable_property(comparator_factory, descriptor=None): class CaseInsensitiveComparator(PropComparator): def __clause_element__(self): return self.prop - + def operate(self, op, other): return op( func.lower(self.__clause_element__()), @@ -1203,13 +1214,13 @@ def comparable_property(comparator_factory, descriptor=None): CaseInsensitiveComparator(mapper.c.word, mapper) ) - - A mapping like the above allows the ``word_insensitive`` attribute + + A mapping like the above allows the ``word_insensitive`` attribute to render an expression like:: - + >>> print SearchWord.word_insensitive == "Trucks" lower(search_word.word) = lower(:lower_1) - + :param comparator_factory: A PropComparator subclass or factory that defines operator behavior for this property. @@ -1218,7 +1229,7 @@ def comparable_property(comparator_factory, descriptor=None): Optional when used in a ``properties={}`` declaration. The Python descriptor or property to layer comparison behavior on top of. - The like-named descriptor will be automatically retreived from the + The like-named descriptor will be automatically retrieved from the mapped class if left blank in a ``properties`` declaration. """ @@ -1235,7 +1246,7 @@ def clear_mappers(): """Remove all mappers from all classes. This function removes all instrumentation from classes and disposes - of their associated mappers. Once called, the classes are unmapped + of their associated mappers. Once called, the classes are unmapped and can be later re-mapped with new mappers. :func:`.clear_mappers` is *not* for normal use, as there is literally no @@ -1246,7 +1257,7 @@ def clear_mappers(): such, :func:`.clear_mappers` is only for usage in test suites that re-use the same classes with different mappings, which is itself an extremely rare use case - the only such use case is in fact SQLAlchemy's own test suite, - and possibly the test suites of other ORM extension libraries which + and possibly the test suites of other ORM extension libraries which intend to test various combinations of mapper construction upon a fixed set of classes. @@ -1267,22 +1278,21 @@ def joinedload(*keys, **kw): """Return a ``MapperOption`` that will convert the property of the given name or series of mapped attributes into an joined eager load. - .. note:: - - This function is known as :func:`eagerload` in all versions - of SQLAlchemy prior to version 0.6beta3, including the 0.5 and 0.4 - series. :func:`eagerload` will remain available for the foreseeable - future in order to enable cross-compatibility. + .. versionchanged:: 0.6beta3 + This function is known as :func:`eagerload` in all versions + of SQLAlchemy prior to version 0.6beta3, including the 0.5 and 0.4 + series. :func:`eagerload` will remain available for the foreseeable + future in order to enable cross-compatibility. Used with :meth:`~sqlalchemy.orm.query.Query.options`. examples:: - # joined-load the "orders" colleciton on "User" + # joined-load the "orders" collection on "User" query(User).options(joinedload(User.orders)) # joined-load the "keywords" collection on each "Item", - # but not the "items" collection on "Order" - those + # but not the "items" collection on "Order" - those # remain lazily loaded. query(Order).options(joinedload(Order.items, Item.keywords)) @@ -1297,17 +1307,17 @@ def joinedload(*keys, **kw): query(Order).options(joinedload(Order.user, innerjoin=True)) - .. note:: - + .. note:: + The join created by :func:`joinedload` is anonymously aliased such that it **does not affect the query results**. An :meth:`.Query.order_by` or :meth:`.Query.filter` call **cannot** reference these aliased - tables - so-called "user space" joins are constructed using + tables - so-called "user space" joins are constructed using :meth:`.Query.join`. The rationale for this is that :func:`joinedload` is only applied in order to affect how related objects or collections are loaded as an optimizing detail - it can be added or removed with no impact - on actual results. See the section :ref:`zen_of_eager_loading` for - a detailed description of how this is used, including how to use a single + on actual results. See the section :ref:`zen_of_eager_loading` for + a detailed description of how this is used, including how to use a single explicit JOIN for filtering/ordering and eager loading simultaneously. See also: :func:`subqueryload`, :func:`lazyload` @@ -1316,7 +1326,7 @@ def joinedload(*keys, **kw): innerjoin = kw.pop('innerjoin', None) if innerjoin is not None: return ( - strategies.EagerLazyOption(keys, lazy='joined'), + strategies.EagerLazyOption(keys, lazy='joined'), strategies.EagerJoinOption(keys, innerjoin) ) else: @@ -1324,15 +1334,14 @@ def joinedload(*keys, **kw): def joinedload_all(*keys, **kw): """Return a ``MapperOption`` that will convert all properties along the - given dot-separated path or series of mapped attributes + given dot-separated path or series of mapped attributes into an joined eager load. - .. note:: - - This function is known as :func:`eagerload_all` in all versions - of SQLAlchemy prior to version 0.6beta3, including the 0.5 and 0.4 - series. :func:`eagerload_all` will remain available for the - foreseeable future in order to enable cross-compatibility. + .. versionchanged:: 0.6beta3 + This function is known as :func:`eagerload_all` in all versions + of SQLAlchemy prior to version 0.6beta3, including the 0.5 and 0.4 + series. :func:`eagerload_all` will remain available for the + foreseeable future in order to enable cross-compatibility. Used with :meth:`~sqlalchemy.orm.query.Query.options`. @@ -1357,7 +1366,7 @@ def joinedload_all(*keys, **kw): innerjoin = kw.pop('innerjoin', None) if innerjoin is not None: return ( - strategies.EagerLazyOption(keys, lazy='joined', chained=True), + strategies.EagerLazyOption(keys, lazy='joined', chained=True), strategies.EagerJoinOption(keys, innerjoin, chained=True) ) else: @@ -1373,19 +1382,19 @@ def eagerload_all(*args, **kwargs): return joinedload_all(*args, **kwargs) def subqueryload(*keys): - """Return a ``MapperOption`` that will convert the property - of the given name or series of mapped attributes + """Return a ``MapperOption`` that will convert the property + of the given name or series of mapped attributes into an subquery eager load. Used with :meth:`~sqlalchemy.orm.query.Query.options`. examples:: - # subquery-load the "orders" colleciton on "User" + # subquery-load the "orders" collection on "User" query(User).options(subqueryload(User.orders)) # subquery-load the "keywords" collection on each "Item", - # but not the "items" collection on "Order" - those + # but not the "items" collection on "Order" - those # remain lazily loaded. query(Order).options(subqueryload(Order.items, Item.keywords)) @@ -1402,7 +1411,7 @@ def subqueryload(*keys): def subqueryload_all(*keys): """Return a ``MapperOption`` that will convert all properties along the - given dot-separated path or series of mapped attributes + given dot-separated path or series of mapped attributes into a subquery eager load. Used with :meth:`~sqlalchemy.orm.query.Query.options`. @@ -1437,7 +1446,7 @@ def lazyload(*keys): def lazyload_all(*keys): """Return a ``MapperOption`` that will convert all the properties - along the given dot-separated path or series of mapped attributes + along the given dot-separated path or series of mapped attributes into a lazy load. Used with :meth:`~sqlalchemy.orm.query.Query.options`. @@ -1453,22 +1462,22 @@ def noload(*keys): Used with :meth:`~sqlalchemy.orm.query.Query.options`. - See also: :func:`lazyload`, :func:`eagerload`, + See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload`, :func:`immediateload` """ return strategies.EagerLazyOption(keys, lazy=None) def immediateload(*keys): - """Return a ``MapperOption`` that will convert the property of the given + """Return a ``MapperOption`` that will convert the property of the given name or series of mapped attributes into an immediate load. - + The "immediate" load means the attribute will be fetched - with a separate SELECT statement per parent in the + with a separate SELECT statement per parent in the same way as lazy loading - except the loader is guaranteed to be called at load time before the parent object is returned in the result. - + The normal behavior of lazy loading applies - if the relationship is a simple many-to-one, and the child object is already present in the :class:`.Session`, @@ -1478,7 +1487,7 @@ def immediateload(*keys): See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload` - New as of verison 0.6.5. + .. versionadded:: 0.6.5 """ return strategies.EagerLazyOption(keys, lazy='immediate') @@ -1488,7 +1497,7 @@ def contains_alias(alias): the main table has been aliased. This is used in the very rare case that :func:`.contains_eager` - is being used in conjunction with a user-defined SELECT + is being used in conjunction with a user-defined SELECT statement that aliases the parent table. E.g.:: # define an aliased UNION called 'ulist' @@ -1500,18 +1509,18 @@ def contains_alias(alias): statement = statement.outerjoin(addresses).\\ select().apply_labels() - # create query, indicating "ulist" will be an - # alias for the main table, "addresses" + # create query, indicating "ulist" will be an + # alias for the main table, "addresses" # property should be eager loaded query = session.query(User).options( - contains_alias('ulist'), + contains_alias('ulist'), contains_eager('addresses')) # then get results via the statement results = query.from_statement(statement).all() - :param alias: is the string name of an alias, or a - :class:`~.sql.expression.Alias` object representing + :param alias: is the string name of an alias, or a + :class:`~.sql.expression.Alias` object representing the alias. """ @@ -1524,7 +1533,7 @@ def contains_eager(*keys, **kwargs): Used with :meth:`~sqlalchemy.orm.query.Query.options`. - The option is used in conjunction with an explicit join that loads + The option is used in conjunction with an explicit join that loads the desired rows, i.e.:: sess.query(Order).\\ @@ -1545,7 +1554,7 @@ def contains_eager(*keys, **kwargs): join((user_alias, Order.user)).\\ options(contains_eager(Order.user, alias=user_alias)) - See also :func:`eagerload` for the "automatic" version of this + See also :func:`eagerload` for the "automatic" version of this functionality. For additional examples of :func:`contains_eager` see @@ -1565,36 +1574,36 @@ def defer(*key): of the given name into a deferred load. Used with :meth:`.Query.options`. - + e.g.:: - + from sqlalchemy.orm import defer - query(MyClass).options(defer("attribute_one"), + query(MyClass).options(defer("attribute_one"), defer("attribute_two")) - + A class bound descriptor is also accepted:: - + query(MyClass).options( - defer(MyClass.attribute_one), + defer(MyClass.attribute_one), defer(MyClass.attribute_two)) - + A "path" can be specified onto a related or collection object using a dotted name. The :func:`.orm.defer` option will be applied to that object when loaded:: - + query(MyClass).options( - defer("related.attribute_one"), + defer("related.attribute_one"), defer("related.attribute_two")) - + To specify a path via class, send multiple arguments:: query(MyClass).options( - defer(MyClass.related, MyOtherClass.attribute_one), + defer(MyClass.related, MyOtherClass.attribute_one), defer(MyClass.related, MyOtherClass.attribute_two)) - + See also: - + :ref:`deferred` :param \*key: A key representing an individual path. Multiple entries @@ -1609,41 +1618,41 @@ def undefer(*key): of the given name into a non-deferred (regular column) load. Used with :meth:`.Query.options`. - + e.g.:: - + from sqlalchemy.orm import undefer - query(MyClass).options(undefer("attribute_one"), + query(MyClass).options(undefer("attribute_one"), undefer("attribute_two")) - + A class bound descriptor is also accepted:: - + query(MyClass).options( - undefer(MyClass.attribute_one), + undefer(MyClass.attribute_one), undefer(MyClass.attribute_two)) - + A "path" can be specified onto a related or collection object using a dotted name. The :func:`.orm.undefer` option will be applied to that object when loaded:: - + query(MyClass).options( - undefer("related.attribute_one"), + undefer("related.attribute_one"), undefer("related.attribute_two")) - + To specify a path via class, send multiple arguments:: query(MyClass).options( - undefer(MyClass.related, MyOtherClass.attribute_one), + undefer(MyClass.related, MyOtherClass.attribute_one), undefer(MyClass.related, MyOtherClass.attribute_two)) - + See also: - + :func:`.orm.undefer_group` as a means to "undefer" a group of attributes at once. - + :ref:`deferred` - + :param \*key: A key representing an individual path. Multiple entries are accepted to allow a multiple-token path for a single target, not multiple targets. @@ -1656,21 +1665,21 @@ def undefer_group(name): column properties into a non-deferred (regular column) load. Used with :meth:`.Query.options`. - + e.g.:: - + query(MyClass).options(undefer("group_one")) See also: - + :ref:`deferred` - - :param name: String name of the deferred group. This name is - established using the "group" name to the :func:`.orm.deferred` + + :param name: String name of the deferred group. This name is + established using the "group" name to the :func:`.orm.deferred` configurational function. """ return strategies.UndeferGroupOption(name) from sqlalchemy import util as _sa_util -_sa_util.importlater.resolve_all() \ No newline at end of file +_sa_util.importlater.resolve_all() diff --git a/libs/sqlalchemy/orm/attributes.py b/libs/sqlalchemy/orm/attributes.py index d31b970e..b40abd57 100644 --- a/libs/sqlalchemy/orm/attributes.py +++ b/libs/sqlalchemy/orm/attributes.py @@ -1,5 +1,5 @@ # orm/attributes.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -29,7 +29,7 @@ NO_VALUE = util.symbol('NO_VALUE') NEVER_SET = util.symbol('NEVER_SET') PASSIVE_RETURN_NEVER_SET = util.symbol('PASSIVE_RETURN_NEVER_SET', -"""Symbol indicating that loader callables can be +"""Symbol indicating that loader callables can be fired off, but if no callable is applicable and no value is present, the attribute should remain non-initialized. NEVER_SET is returned in this case. @@ -37,14 +37,14 @@ NEVER_SET is returned in this case. PASSIVE_NO_INITIALIZE = util.symbol('PASSIVE_NO_INITIALIZE', """Symbol indicating that loader callables should - not be fired off, and a non-initialized attribute + not be fired off, and a non-initialized attribute should remain that way. """) PASSIVE_NO_FETCH = util.symbol('PASSIVE_NO_FETCH', -"""Symbol indicating that loader callables should not emit SQL, +"""Symbol indicating that loader callables should not emit SQL, but a value can be fetched from the current session. - + Non-initialized attributes should be initialized to an empty value. """) @@ -53,9 +53,9 @@ PASSIVE_NO_FETCH_RELATED = util.symbol('PASSIVE_NO_FETCH_RELATED', """Symbol indicating that loader callables should not emit SQL for loading a related object, but can refresh the attributes of the local instance in order to locate a related object in the current session. - + Non-initialized attributes should be initialized to an empty value. - + The unit of work uses this mode to check if history is present on many-to-one attributes with minimal SQL emitted. @@ -81,7 +81,7 @@ PASSIVE_OFF = util.symbol('PASSIVE_OFF', class QueryableAttribute(interfaces.PropComparator): """Base class for class-bound attributes. """ - def __init__(self, class_, key, impl=None, + def __init__(self, class_, key, impl=None, comparator=None, parententity=None): self.class_ = class_ self.key = key @@ -92,7 +92,7 @@ class QueryableAttribute(interfaces.PropComparator): manager = manager_of_class(class_) # manager is None in the case of AliasedClass if manager: - # propagate existing event listeners from + # propagate existing event listeners from # immediate superclass for base in manager._bases: if key in base: @@ -134,8 +134,8 @@ class QueryableAttribute(interfaces.PropComparator): except AttributeError: raise AttributeError( 'Neither %r object nor %r object has an attribute %r' % ( - type(self).__name__, - type(self.comparator).__name__, + type(self).__name__, + type(self.comparator).__name__, key) ) @@ -151,7 +151,7 @@ class InstrumentedAttribute(QueryableAttribute): """Class bound instrumented attribute which adds descriptor methods.""" def __set__(self, instance, value): - self.impl.set(instance_state(instance), + self.impl.set(instance_state(instance), instance_dict(instance), value, None) def __delete__(self, instance): @@ -179,12 +179,12 @@ def create_proxied_attribute(descriptor): class Proxy(QueryableAttribute): """Presents the :class:`.QueryableAttribute` interface as a - proxy on top of a Python descriptor / :class:`.PropComparator` + proxy on top of a Python descriptor / :class:`.PropComparator` combination. """ - def __init__(self, class_, key, descriptor, comparator, + def __init__(self, class_, key, descriptor, comparator, adapter=None, doc=None): self.class_ = class_ self.key = key @@ -233,8 +233,8 @@ def create_proxied_attribute(descriptor): except AttributeError: raise AttributeError( 'Neither %r object nor %r object has an attribute %r' % ( - type(descriptor).__name__, - type(self.comparator).__name__, + type(descriptor).__name__, + type(self.comparator).__name__, attribute) ) @@ -250,7 +250,7 @@ class AttributeImpl(object): def __init__(self, class_, key, callable_, dispatch, trackparent=False, extension=None, - compare_function=None, active_history=False, + compare_function=None, active_history=False, parent_token=None, expire_missing=True, **kwargs): """Construct an AttributeImpl. @@ -287,12 +287,12 @@ class AttributeImpl(object): parent_token Usually references the MapperProperty, used as a key for the hasparent() function to identify an "owning" attribute. - Allows multiple AttributeImpls to all match a single + Allows multiple AttributeImpls to all match a single owner attribute. expire_missing if False, don't add an "expiry" callable to this attribute - during state.expire_attributes(None), if no value is present + during state.expire_attributes(None), if no value is present for this key. """ @@ -331,7 +331,7 @@ class AttributeImpl(object): def hasparent(self, state, optimistic=False): - """Return the boolean value of a `hasparent` flag attached to + """Return the boolean value of a `hasparent` flag attached to the given state. The `optimistic` flag determines what the default return value @@ -375,8 +375,8 @@ class AttributeImpl(object): "state %s along attribute '%s', " "but the parent record " "has gone stale, can't be sure this " - "is the most recent parent." % - (mapperutil.state_str(state), + "is the most recent parent." % + (mapperutil.state_str(state), mapperutil.state_str(parent_state), self.key)) @@ -406,18 +406,18 @@ class AttributeImpl(object): raise NotImplementedError() def get_all_pending(self, state, dict_): - """Return a list of tuples of (state, obj) - for all objects in this attribute's current state + """Return a list of tuples of (state, obj) + for all objects in this attribute's current state + history. Only applies to object-based attributes. This is an inlining of existing functionality - which roughly correponds to: + which roughly corresponds to: get_state_history( - state, - key, + state, + key, passive=PASSIVE_NO_INITIALIZE).sum() """ @@ -478,14 +478,14 @@ class AttributeImpl(object): self.set(state, dict_, value, initiator, passive=passive) def remove(self, state, dict_, value, initiator, passive=PASSIVE_OFF): - self.set(state, dict_, None, initiator, + self.set(state, dict_, None, initiator, passive=passive, check_old=value) def pop(self, state, dict_, value, initiator, passive=PASSIVE_OFF): - self.set(state, dict_, None, initiator, + self.set(state, dict_, None, initiator, passive=passive, check_old=value, pop=True) - def set(self, state, dict_, value, initiator, + def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF, check_old=None, pop=False): raise NotImplementedError() @@ -532,7 +532,7 @@ class ScalarAttributeImpl(AttributeImpl): return History.from_scalar_attribute( self, state, dict_.get(self.key, NO_VALUE)) - def set(self, state, dict_, value, initiator, + def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF, check_old=None, pop=False): if initiator and initiator.parent_token is self.parent_token: return @@ -543,7 +543,7 @@ class ScalarAttributeImpl(AttributeImpl): old = dict_.get(self.key, NO_VALUE) if self.dispatch.set: - value = self.fire_replace_event(state, dict_, + value = self.fire_replace_event(state, dict_, value, old, initiator) state.modified_event(dict_, self, old) dict_[self.key] = value @@ -575,10 +575,10 @@ class MutableScalarAttributeImpl(ScalarAttributeImpl): class_manager, copy_function=None, compare_function=None, **kwargs): super(ScalarAttributeImpl, self).__init__( - class_, - key, + class_, + key, callable_, dispatch, - compare_function=compare_function, + compare_function=compare_function, **kwargs) class_manager.mutable_attributes.add(key) if copy_function is None: @@ -611,15 +611,15 @@ class MutableScalarAttributeImpl(ScalarAttributeImpl): ScalarAttributeImpl.delete(self, state, dict_) state.mutable_dict.pop(self.key) - def set(self, state, dict_, value, initiator, + def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF, check_old=None, pop=False): - ScalarAttributeImpl.set(self, state, dict_, value, + ScalarAttributeImpl.set(self, state, dict_, value, initiator, passive, check_old=check_old, pop=pop) state.mutable_dict[self.key] = value class ScalarObjectAttributeImpl(ScalarAttributeImpl): - """represents a scalar-holding InstrumentedAttribute, + """represents a scalar-holding InstrumentedAttribute, where the target object is also instrumented. Adds events to delete/set operations. @@ -653,7 +653,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl): if current is not None: ret = [(instance_state(current), current)] else: - ret = [] + ret = [(None, None)] if self.key in state.committed_state: original = state.committed_state[self.key] @@ -665,7 +665,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl): else: return [] - def set(self, state, dict_, value, initiator, + def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF, check_old=None, pop=False): """Set a value on the given InstanceState. @@ -686,7 +686,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl): old is not PASSIVE_NO_RESULT and \ check_old is not old: if pop: - return + return else: raise ValueError( "Object %s not associated with %s on attribute '%s'" % ( @@ -744,12 +744,12 @@ class CollectionAttributeImpl(AttributeImpl): typecallable=None, trackparent=False, extension=None, copy_function=None, compare_function=None, **kwargs): super(CollectionAttributeImpl, self).__init__( - class_, - key, + class_, + key, callable_, dispatch, trackparent=trackparent, extension=extension, - compare_function=compare_function, + compare_function=compare_function, **kwargs) if copy_function is None: @@ -777,11 +777,11 @@ class CollectionAttributeImpl(AttributeImpl): if self.key in state.committed_state: original = state.committed_state[self.key] if original is not NO_VALUE: - current_states = [((c is not None) and - instance_state(c) or None, c) + current_states = [((c is not None) and + instance_state(c) or None, c) for c in current] - original_states = [((c is not None) and - instance_state(c) or None, c) + original_states = [((c is not None) and + instance_state(c) or None, c) for c in original] current_set = dict(current_states) @@ -869,13 +869,13 @@ class CollectionAttributeImpl(AttributeImpl): def pop(self, state, dict_, value, initiator, passive=PASSIVE_OFF): try: # TODO: better solution here would be to add - # a "popper" role to collections.py to complement + # a "popper" role to collections.py to complement # "remover". self.remove(state, dict_, value, initiator, passive=passive) except (ValueError, KeyError, IndexError): pass - def set(self, state, dict_, value, initiator, + def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF, pop=False): """Set a value on the given object. @@ -954,7 +954,7 @@ class CollectionAttributeImpl(AttributeImpl): return user_data - def get_collection(self, state, dict_, + def get_collection(self, state, dict_, user_data=None, passive=PASSIVE_OFF): """Retrieve the CollectionAdapter associated with the given state. @@ -973,6 +973,14 @@ def backref_listeners(attribute, key, uselist): # use easily recognizable names for stack traces + parent_token = attribute.impl.parent_token + + def _acceptable_key_err(child_state, initiator): + raise ValueError( + "Object %s not associated with attribute of " + "type %s" % (mapperutil.state_str(child_state), + manager_of_class(initiator.class_)[initiator.key])) + def emit_backref_from_scalar_set_event(state, child, oldchild, initiator): if oldchild is child: return child @@ -983,61 +991,73 @@ def backref_listeners(attribute, key, uselist): old_state, old_dict = instance_state(oldchild),\ instance_dict(oldchild) impl = old_state.manager[key].impl - impl.pop(old_state, - old_dict, - state.obj(), + impl.pop(old_state, + old_dict, + state.obj(), initiator, passive=PASSIVE_NO_FETCH) if child is not None: child_state, child_dict = instance_state(child),\ instance_dict(child) - child_state.manager[key].impl.append( - child_state, - child_dict, - state.obj(), - initiator, - passive=PASSIVE_NO_FETCH) + child_impl = child_state.manager[key].impl + if initiator.parent_token is not parent_token and \ + initiator.parent_token is not child_impl.parent_token: + _acceptable_key_err(state, initiator) + child_impl.append( + child_state, + child_dict, + state.obj(), + initiator, + passive=PASSIVE_NO_FETCH) return child def emit_backref_from_collection_append_event(state, child, initiator): child_state, child_dict = instance_state(child), \ instance_dict(child) - child_state.manager[key].impl.append( - child_state, - child_dict, - state.obj(), - initiator, - passive=PASSIVE_NO_FETCH) + child_impl = child_state.manager[key].impl + if initiator.parent_token is not parent_token and \ + initiator.parent_token is not child_impl.parent_token: + _acceptable_key_err(state, initiator) + child_impl.append( + child_state, + child_dict, + state.obj(), + initiator, + passive=PASSIVE_NO_FETCH) return child def emit_backref_from_collection_remove_event(state, child, initiator): if child is not None: child_state, child_dict = instance_state(child),\ instance_dict(child) - child_state.manager[key].impl.pop( - child_state, - child_dict, - state.obj(), - initiator, - passive=PASSIVE_NO_FETCH) + child_impl = child_state.manager[key].impl + # can't think of a path that would produce an initiator + # mismatch here, as it would require an existing collection + # mismatch. + child_impl.pop( + child_state, + child_dict, + state.obj(), + initiator, + passive=PASSIVE_NO_FETCH) if uselist: - event.listen(attribute, "append", - emit_backref_from_collection_append_event, + event.listen(attribute, "append", + emit_backref_from_collection_append_event, retval=True, raw=True) else: - event.listen(attribute, "set", - emit_backref_from_scalar_set_event, + event.listen(attribute, "set", + emit_backref_from_scalar_set_event, retval=True, raw=True) # TODO: need coverage in test/orm/ of remove event - event.listen(attribute, "remove", - emit_backref_from_collection_remove_event, + event.listen(attribute, "remove", + emit_backref_from_collection_remove_event, retval=True, raw=True) _NO_HISTORY = util.symbol('NO_HISTORY') _NO_STATE_SYMBOLS = frozenset([ - id(PASSIVE_NO_RESULT), - id(NO_VALUE), + id(PASSIVE_NO_RESULT), + id(NO_VALUE), id(NEVER_SET)]) class History(tuple): """A 3-tuple of added, unchanged and deleted values, @@ -1078,7 +1098,7 @@ class History(tuple): return not bool( (self.added or self.deleted) or self.unchanged and self.unchanged != [None] - ) + ) def sum(self): """Return a collection of added + unchanged + deleted.""" @@ -1126,11 +1146,11 @@ class History(tuple): return cls((), (), ()) else: return cls((), [current], ()) - # dont let ClauseElement expressions here trip things up + # don't let ClauseElement expressions here trip things up elif attribute.is_equal(current, original) is True: return cls((), [current], ()) else: - # current convention on native scalars is to not + # current convention on native scalars is to not # include information # about missing previous value in "deleted", but # we do include None, which helps in some primary @@ -1156,11 +1176,11 @@ class History(tuple): elif current is original: return cls((), [current], ()) else: - # current convention on related objects is to not + # current convention on related objects is to not # include information # about missing previous value in "deleted", and # to also not include None - the dependency.py rules - # ignore the None in any case. + # ignore the None in any case. if id(original) in _NO_STATE_SYMBOLS or original is None: deleted = () else: @@ -1181,11 +1201,11 @@ class History(tuple): return cls((), list(current), ()) else: - current_states = [((c is not None) and instance_state(c) or None, c) - for c in current + current_states = [((c is not None) and instance_state(c) or None, c) + for c in current ] - original_states = [((c is not None) and instance_state(c) or None, c) - for c in original + original_states = [((c is not None) and instance_state(c) or None, c) + for c in original ] current_set = dict(current_states) @@ -1200,7 +1220,7 @@ class History(tuple): HISTORY_BLANK = History(None, None, None) def get_history(obj, key, passive=PASSIVE_OFF): - """Return a :class:`.History` record for the given object + """Return a :class:`.History` record for the given object and attribute key. :param obj: an object whose class is instrumented by the @@ -1239,14 +1259,14 @@ def register_attribute(class_, key, **kw): comparator = kw.pop('comparator', None) parententity = kw.pop('parententity', None) doc = kw.pop('doc', None) - desc = register_descriptor(class_, key, + desc = register_descriptor(class_, key, comparator, parententity, doc=doc) register_attribute_impl(class_, key, **kw) return desc def register_attribute_impl(class_, key, - uselist=False, callable_=None, - useobject=False, mutable_scalars=False, + uselist=False, callable_=None, + useobject=False, mutable_scalars=False, impl_class=None, backref=None, **kw): manager = manager_of_class(class_) @@ -1281,7 +1301,7 @@ def register_attribute_impl(class_, key, manager.post_configure_attribute(key) return manager[key] -def register_descriptor(class_, key, comparator=None, +def register_descriptor(class_, key, comparator=None, parententity=None, doc=None): manager = manager_of_class(class_) @@ -1310,7 +1330,7 @@ def init_collection(obj, key): :func:`~sqlalchemy.orm.attributes.set_committed_value`. obj is an instrumented object instance. An InstanceState - is accepted directly for backwards compatibility but + is accepted directly for backwards compatibility but this usage is deprecated. """ @@ -1328,7 +1348,7 @@ def init_state_collection(state, dict_, key): def set_committed_value(instance, key, value): """Set the value of an attribute with no history events. - Cancels any previous history present. The value should be + Cancels any previous history present. The value should be a scalar value for scalar-holding attributes, or an iterable for any collection-holding attribute. @@ -1385,7 +1405,7 @@ def del_attribute(instance, key): def flag_modified(instance, key): """Mark an attribute on an instance as 'modified'. - This sets the 'modified' flag on the instance and + This sets the 'modified' flag on the instance and establishes an unconditional change event for the given attribute. """ diff --git a/libs/sqlalchemy/orm/collections.py b/libs/sqlalchemy/orm/collections.py index 160fac8b..e26a5973 100644 --- a/libs/sqlalchemy/orm/collections.py +++ b/libs/sqlalchemy/orm/collections.py @@ -1,5 +1,5 @@ # orm/collections.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -111,27 +111,62 @@ import weakref from sqlalchemy.sql import expression from sqlalchemy import schema, util, exc as sa_exc - - __all__ = ['collection', 'collection_adapter', 'mapped_collection', 'column_mapped_collection', 'attribute_mapped_collection'] __instrumentation_mutex = util.threading.Lock() -class _SerializableColumnGetter(object): - def __init__(self, colkeys): - self.colkeys = colkeys - self.composite = len(colkeys) > 1 + +class _PlainColumnGetter(object): + """Plain column getter, stores collection of Column objects + directly. + + Serializes to a :class:`._SerializableColumnGetterV2` + which has more expensive __call__() performance + and some rare caveats. + + """ + def __init__(self, cols): + self.cols = cols + self.composite = len(cols) > 1 def __reduce__(self): - return _SerializableColumnGetter, (self.colkeys,) + return _SerializableColumnGetterV2._reduce_from_cols(self.cols) + + def _cols(self, mapper): + return self.cols + def __call__(self, value): + state = instance_state(value) + m = _state_mapper(state) + + key = [ + m._get_state_attr_by_column(state, state.dict, col) + for col in self._cols(m) + ] + + if self.composite: + return tuple(key) + else: + return key[0] + +class _SerializableColumnGetter(object): + """Column-based getter used in version 0.7.6 only. + + Remains here for pickle compatibility with 0.7.6. + + """ + def __init__(self, colkeys): + self.colkeys = colkeys + self.composite = len(colkeys) > 1 + def __reduce__(self): + return _SerializableColumnGetter, (self.colkeys,) def __call__(self, value): state = instance_state(value) m = _state_mapper(state) key = [m._get_state_attr_by_column( - state, state.dict, + state, state.dict, m.mapped_table.columns[k]) for k in self.colkeys] if self.composite: @@ -139,6 +174,48 @@ class _SerializableColumnGetter(object): else: return key[0] +class _SerializableColumnGetterV2(_PlainColumnGetter): + """Updated serializable getter which deals with + multi-table mapped classes. + + Two extremely unusual cases are not supported. + Mappings which have tables across multiple metadata + objects, or which are mapped to non-Table selectables + linked across inheriting mappers may fail to function + here. + + """ + + def __init__(self, colkeys): + self.colkeys = colkeys + self.composite = len(colkeys) > 1 + + def __reduce__(self): + return self.__class__, (self.colkeys,) + + @classmethod + def _reduce_from_cols(cls, cols): + def _table_key(c): + if not isinstance(c.table, expression.TableClause): + return None + else: + return c.table.key + colkeys = [(c.key, _table_key(c)) for c in cols] + return _SerializableColumnGetterV2, (colkeys,) + + def _cols(self, mapper): + cols = [] + metadata = getattr(mapper.local_table, 'metadata', None) + for (ckey, tkey) in self.colkeys: + if tkey is None or \ + metadata is None or \ + tkey not in metadata: + cols.append(mapper.local_table.c[ckey]) + else: + cols.append(metadata.tables[tkey].c[ckey]) + return cols + + def column_mapped_collection(mapping_spec): """A dictionary-based collection type with column-based keying. @@ -155,10 +232,10 @@ def column_mapped_collection(mapping_spec): from sqlalchemy.orm.util import _state_mapper from sqlalchemy.orm.attributes import instance_state - cols = [c.key for c in [ - expression._only_column_elements(q, "mapping_spec") - for q in util.to_list(mapping_spec)]] - keyfunc = _SerializableColumnGetter(cols) + cols = [expression._only_column_elements(q, "mapping_spec") + for q in util.to_list(mapping_spec) + ] + keyfunc = _PlainColumnGetter(cols) return lambda: MappedCollection(keyfunc) class _SerializableAttrGetter(object): @@ -632,8 +709,8 @@ class CollectionAdapter(object): """ if initiator is not False and item is not None: return self.attr.fire_append_event( - self.owner_state, - self.owner_state.dict, + self.owner_state, + self.owner_state.dict, item, initiator) else: return item @@ -648,8 +725,8 @@ class CollectionAdapter(object): """ if initiator is not False and item is not None: self.attr.fire_remove_event( - self.owner_state, - self.owner_state.dict, + self.owner_state, + self.owner_state.dict, item, initiator) def fire_pre_remove_event(self, initiator=None): @@ -660,8 +737,8 @@ class CollectionAdapter(object): """ self.attr.fire_pre_remove_event( - self.owner_state, - self.owner_state.dict, + self.owner_state, + self.owner_state.dict, initiator=initiator) def __getstate__(self): diff --git a/libs/sqlalchemy/orm/dependency.py b/libs/sqlalchemy/orm/dependency.py index b3789e75..c46969f2 100644 --- a/libs/sqlalchemy/orm/dependency.py +++ b/libs/sqlalchemy/orm/dependency.py @@ -1,5 +1,5 @@ # orm/dependency.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -48,7 +48,7 @@ class DependencyProcessor(object): def hasparent(self, state): """return True if the given object instance has a parent, - according to the ``InstrumentedAttribute`` handled by this + according to the ``InstrumentedAttribute`` handled by this ``DependencyProcessor``. """ @@ -69,29 +69,29 @@ class DependencyProcessor(object): before_delete = unitofwork.ProcessAll(uow, self, True, True) parent_saves = unitofwork.SaveUpdateAll( - uow, + uow, self.parent.primary_base_mapper ) child_saves = unitofwork.SaveUpdateAll( - uow, + uow, self.mapper.primary_base_mapper ) parent_deletes = unitofwork.DeleteAll( - uow, + uow, self.parent.primary_base_mapper ) child_deletes = unitofwork.DeleteAll( - uow, + uow, self.mapper.primary_base_mapper ) - self.per_property_dependencies(uow, - parent_saves, - child_saves, - parent_deletes, - child_deletes, - after_save, + self.per_property_dependencies(uow, + parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, before_delete ) @@ -99,7 +99,7 @@ class DependencyProcessor(object): def per_state_flush_actions(self, uow, states, isdelete): """establish actions and dependencies related to a flush. - These actions will operate on all relevant states + These actions will operate on all relevant states individually. This occurs only if there are cycles in the 'aggregated' version of events. @@ -141,14 +141,14 @@ class DependencyProcessor(object): # check if the "parent" side is part of the cycle if not isdelete: parent_saves = unitofwork.SaveUpdateAll( - uow, + uow, self.parent.base_mapper) parent_deletes = before_delete = None if parent_saves in uow.cycles: parent_in_cycles = True else: parent_deletes = unitofwork.DeleteAll( - uow, + uow, self.parent.base_mapper) parent_saves = after_save = None if parent_deletes in uow.cycles: @@ -165,19 +165,19 @@ class DependencyProcessor(object): continue if isdelete: - before_delete = unitofwork.ProcessState(uow, + before_delete = unitofwork.ProcessState(uow, self, True, state) if parent_in_cycles: parent_deletes = unitofwork.DeleteState( - uow, - state, + uow, + state, parent_base_mapper) else: after_save = unitofwork.ProcessState(uow, self, False, state) if parent_in_cycles: parent_saves = unitofwork.SaveUpdateState( - uow, - state, + uow, + state, parent_base_mapper) if child_in_cycles: @@ -190,24 +190,24 @@ class DependencyProcessor(object): if deleted: child_action = ( unitofwork.DeleteState( - uow, child_state, - child_base_mapper), + uow, child_state, + child_base_mapper), True) else: child_action = ( unitofwork.SaveUpdateState( - uow, child_state, - child_base_mapper), + uow, child_state, + child_base_mapper), False) child_actions.append(child_action) # establish dependencies between our possibly per-state # parent action and our possibly per-state child action. for child_action, childisdelete in child_actions: - self.per_state_dependencies(uow, parent_saves, - parent_deletes, - child_action, - after_save, before_delete, + self.per_state_dependencies(uow, parent_saves, + parent_deletes, + child_action, + after_save, before_delete, isdelete, childisdelete) @@ -232,12 +232,12 @@ class DependencyProcessor(object): passive = attributes.PASSIVE_OFF for s in states: - # TODO: add a high speed method + # TODO: add a high speed method # to InstanceState which returns: attribute # has a non-None value, or had one history = uowcommit.get_attribute_history( - s, - self.key, + s, + self.key, passive) if history and not history.empty(): return True @@ -248,7 +248,7 @@ class DependencyProcessor(object): def _verify_canload(self, state): if state is not None and \ - not self.mapper._canload(state, + not self.mapper._canload(state, allow_subtypes=not self.enable_typechecks): if self.mapper._canload(state, allow_subtypes=True): raise exc.FlushError('Attempting to flush an item of type ' @@ -287,11 +287,11 @@ class DependencyProcessor(object): return None process_key = tuple(sorted( - [self.key] + + [self.key] + [p.key for p in self.prop._reverse_property] )) return uow.memo( - ('reverse_key', process_key), + ('reverse_key', process_key), set ) @@ -299,7 +299,7 @@ class DependencyProcessor(object): for x in related: if x is not None: uowcommit.issue_post_update( - state, + state, [r for l, r in self.prop.synchronize_pairs] ) break @@ -312,21 +312,21 @@ class DependencyProcessor(object): class OneToManyDP(DependencyProcessor): - def per_property_dependencies(self, uow, parent_saves, - child_saves, - parent_deletes, - child_deletes, - after_save, + def per_property_dependencies(self, uow, parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, before_delete, ): if self.post_update: child_post_updates = unitofwork.IssuePostUpdate( - uow, - self.mapper.primary_base_mapper, + uow, + self.mapper.primary_base_mapper, False) child_pre_updates = unitofwork.IssuePostUpdate( - uow, - self.mapper.primary_base_mapper, + uow, + self.mapper.primary_base_mapper, True) uow.dependencies.update([ @@ -352,22 +352,22 @@ class OneToManyDP(DependencyProcessor): (before_delete, child_deletes), ]) - def per_state_dependencies(self, uow, - save_parent, - delete_parent, - child_action, - after_save, before_delete, + def per_state_dependencies(self, uow, + save_parent, + delete_parent, + child_action, + after_save, before_delete, isdelete, childisdelete): if self.post_update: child_post_updates = unitofwork.IssuePostUpdate( - uow, - self.mapper.primary_base_mapper, + uow, + self.mapper.primary_base_mapper, False) child_pre_updates = unitofwork.IssuePostUpdate( - uow, - self.mapper.primary_base_mapper, + uow, + self.mapper.primary_base_mapper, True) # TODO: this whole block is not covered @@ -393,7 +393,7 @@ class OneToManyDP(DependencyProcessor): else: uow.dependencies.update([ (before_delete, child_pre_updates), - (child_pre_updates, delete_parent), + (child_pre_updates, delete_parent), ]) elif not isdelete: uow.dependencies.update([ @@ -408,16 +408,16 @@ class OneToManyDP(DependencyProcessor): ]) def presort_deletes(self, uowcommit, states): - # head object is being deleted, and we manage its list of - # child objects the child objects have to have their + # head object is being deleted, and we manage its list of + # child objects the child objects have to have their # foreign key to the parent set to NULL should_null_fks = not self.cascade.delete and \ not self.passive_deletes == 'all' for state in states: history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, self._passive_delete_flag) if history: for child in history.deleted: @@ -430,7 +430,7 @@ class OneToManyDP(DependencyProcessor): if should_null_fks: for child in history.unchanged: if child is not None: - uowcommit.register_object(child, + uowcommit.register_object(child, operation="delete", prop=self.prop) @@ -447,25 +447,25 @@ class OneToManyDP(DependencyProcessor): passive = attributes.PASSIVE_OFF history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, passive) if history: for child in history.added: if child is not None: - uowcommit.register_object(child, cancel_delete=True, - operation="add", + uowcommit.register_object(child, cancel_delete=True, + operation="add", prop=self.prop) children_added.update(history.added) for child in history.deleted: if not self.cascade.delete_orphan: - uowcommit.register_object(child, isdelete=False, - operation='delete', + uowcommit.register_object(child, isdelete=False, + operation='delete', prop=self.prop) elif self.hasparent(child) is False: - uowcommit.register_object(child, isdelete=True, + uowcommit.register_object(child, isdelete=True, operation="delete", prop=self.prop) for c, m, st_, dct_ in self.mapper.cascade_iterator( 'delete', child): @@ -478,16 +478,16 @@ class OneToManyDP(DependencyProcessor): for child in history.unchanged: if child is not None: uowcommit.register_object( - child, - False, + child, + False, self.passive_updates, operation="pk change", prop=self.prop) def process_deletes(self, uowcommit, states): - # head object is being deleted, and we manage its list of - # child objects the child objects have to have their foreign - # key to the parent set to NULL this phase can be called + # head object is being deleted, and we manage its list of + # child objects the child objects have to have their foreign + # key to the parent set to NULL this phase can be called # safely for any cascade but is unnecessary if delete cascade # is on. @@ -496,17 +496,17 @@ class OneToManyDP(DependencyProcessor): for state in states: history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, self._passive_delete_flag) if history: for child in history.deleted: if child is not None and \ self.hasparent(child) is False: self._synchronize( - state, - child, - None, True, + state, + child, + None, True, uowcommit, False) if self.post_update and child: self._post_update(child, uowcommit, [state]) @@ -516,18 +516,18 @@ class OneToManyDP(DependencyProcessor): difference(children_added): if child is not None: self._synchronize( - state, - child, - None, True, + state, + child, + None, True, uowcommit, False) if self.post_update and child: - self._post_update(child, - uowcommit, + self._post_update(child, + uowcommit, [state]) # technically, we can even remove each child from the - # collection here too. but this would be a somewhat - # inconsistent behavior since it wouldn't happen + # collection here too. but this would be a somewhat + # inconsistent behavior since it wouldn't happen #if the old parent wasn't deleted but child was moved. def process_saves(self, uowcommit, states): @@ -538,7 +538,7 @@ class OneToManyDP(DependencyProcessor): attributes.PASSIVE_NO_INITIALIZE) if history: for child in history.added: - self._synchronize(state, child, None, + self._synchronize(state, child, None, False, uowcommit, False) if child is not None and self.post_update: self._post_update(child, uowcommit, [state]) @@ -546,15 +546,15 @@ class OneToManyDP(DependencyProcessor): for child in history.deleted: if not self.cascade.delete_orphan and \ not self.hasparent(child): - self._synchronize(state, child, None, True, + self._synchronize(state, child, None, True, uowcommit, False) if self._pks_changed(uowcommit, state): for child in history.unchanged: - self._synchronize(state, child, None, + self._synchronize(state, child, None, False, uowcommit, True) - def _synchronize(self, state, child, + def _synchronize(self, state, child, associationrow, clearkeys, uowcommit, pks_changed): source = state @@ -566,15 +566,15 @@ class OneToManyDP(DependencyProcessor): if clearkeys: sync.clear(dest, self.mapper, self.prop.synchronize_pairs) else: - sync.populate(source, self.parent, dest, self.mapper, + sync.populate(source, self.parent, dest, self.mapper, self.prop.synchronize_pairs, uowcommit, self.passive_updates and pks_changed) def _pks_changed(self, uowcommit, state): return sync.source_modified( - uowcommit, - state, - self.parent, + uowcommit, + state, + self.parent, self.prop.synchronize_pairs) class ManyToOneDP(DependencyProcessor): @@ -582,22 +582,22 @@ class ManyToOneDP(DependencyProcessor): DependencyProcessor.__init__(self, prop) self.mapper._dependency_processors.append(DetectKeySwitch(prop)) - def per_property_dependencies(self, uow, - parent_saves, - child_saves, - parent_deletes, - child_deletes, - after_save, + def per_property_dependencies(self, uow, + parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, before_delete): if self.post_update: parent_post_updates = unitofwork.IssuePostUpdate( - uow, - self.parent.primary_base_mapper, + uow, + self.parent.primary_base_mapper, False) parent_pre_updates = unitofwork.IssuePostUpdate( - uow, - self.parent.primary_base_mapper, + uow, + self.parent.primary_base_mapper, True) uow.dependencies.update([ @@ -618,19 +618,19 @@ class ManyToOneDP(DependencyProcessor): (parent_deletes, child_deletes) ]) - def per_state_dependencies(self, uow, - save_parent, - delete_parent, - child_action, - after_save, before_delete, + def per_state_dependencies(self, uow, + save_parent, + delete_parent, + child_action, + after_save, before_delete, isdelete, childisdelete): if self.post_update: if not isdelete: parent_post_updates = unitofwork.IssuePostUpdate( - uow, - self.parent.primary_base_mapper, + uow, + self.parent.primary_base_mapper, False) if childisdelete: uow.dependencies.update([ @@ -646,8 +646,8 @@ class ManyToOneDP(DependencyProcessor): ]) else: parent_pre_updates = unitofwork.IssuePostUpdate( - uow, - self.parent.primary_base_mapper, + uow, + self.parent.primary_base_mapper, True) uow.dependencies.update([ @@ -677,8 +677,8 @@ class ManyToOneDP(DependencyProcessor): if self.cascade.delete or self.cascade.delete_orphan: for state in states: history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, self._passive_delete_flag) if history: if self.cascade.delete_orphan: @@ -688,7 +688,7 @@ class ManyToOneDP(DependencyProcessor): for child in todelete: if child is None: continue - uowcommit.register_object(child, isdelete=True, + uowcommit.register_object(child, isdelete=True, operation="delete", prop=self.prop) for c, m, st_, dct_ in self.mapper.cascade_iterator( 'delete', child): @@ -700,14 +700,14 @@ class ManyToOneDP(DependencyProcessor): uowcommit.register_object(state, operation="add", prop=self.prop) if self.cascade.delete_orphan: history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, self._passive_delete_flag) if history: ret = True for child in history.deleted: if self.hasparent(child) is False: - uowcommit.register_object(child, isdelete=True, + uowcommit.register_object(child, isdelete=True, operation="delete", prop=self.prop) for c, m, st_, dct_ in self.mapper.cascade_iterator( @@ -721,15 +721,15 @@ class ManyToOneDP(DependencyProcessor): not self.cascade.delete_orphan and \ not self.passive_deletes == 'all': - # post_update means we have to update our + # post_update means we have to update our # row to not reference the child object # before we can DELETE the row for state in states: self._synchronize(state, None, None, True, uowcommit) if state and self.post_update: history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, self._passive_delete_flag) if history: self._post_update(state, uowcommit, history.sum()) @@ -737,12 +737,12 @@ class ManyToOneDP(DependencyProcessor): def process_saves(self, uowcommit, states): for state in states: history = uowcommit.get_attribute_history( - state, + state, self.key, attributes.PASSIVE_NO_INITIALIZE) if history: for child in history.added: - self._synchronize(state, child, None, False, + self._synchronize(state, child, None, False, uowcommit, "add") if self.post_update: @@ -759,7 +759,7 @@ class ManyToOneDP(DependencyProcessor): not uowcommit.session._contains_state(child): util.warn( "Object of type %s not in session, %s " - "operation along '%s' won't proceed" % + "operation along '%s' won't proceed" % (mapperutil.state_class_str(child), operation, self.prop)) return @@ -767,14 +767,14 @@ class ManyToOneDP(DependencyProcessor): sync.clear(state, self.parent, self.prop.synchronize_pairs) else: self._verify_canload(child) - sync.populate(child, self.mapper, state, - self.parent, - self.prop.synchronize_pairs, + sync.populate(child, self.mapper, state, + self.parent, + self.prop.synchronize_pairs, uowcommit, - False) + False) class DetectKeySwitch(DependencyProcessor): - """For many-to-one relationships with no one-to-many backref, + """For many-to-one relationships with no one-to-many backref, searches for parents through the unit of work when a primary key has changed and updates them. @@ -798,7 +798,7 @@ class DetectKeySwitch(DependencyProcessor): def per_property_flush_actions(self, uow): parent_saves = unitofwork.SaveUpdateAll( - uow, + uow, self.parent.base_mapper) after_save = unitofwork.ProcessAll(uow, self, False, False) uow.dependencies.update([ @@ -837,7 +837,7 @@ class DetectKeySwitch(DependencyProcessor): def _key_switchers(self, uow, states): switched, notswitched = uow.memo( - ('pk_switchers', self), + ('pk_switchers', self), lambda: (set(), set()) ) @@ -865,29 +865,29 @@ class DetectKeySwitch(DependencyProcessor): related is not None: related_state = attributes.instance_state(dict_[self.key]) if related_state in switchers: - uowcommit.register_object(state, - False, + uowcommit.register_object(state, + False, self.passive_updates) sync.populate( - related_state, - self.mapper, state, - self.parent, self.prop.synchronize_pairs, + related_state, + self.mapper, state, + self.parent, self.prop.synchronize_pairs, uowcommit, self.passive_updates) def _pks_changed(self, uowcommit, state): - return bool(state.key) and sync.source_modified(uowcommit, - state, - self.mapper, + return bool(state.key) and sync.source_modified(uowcommit, + state, + self.mapper, self.prop.synchronize_pairs) class ManyToManyDP(DependencyProcessor): - def per_property_dependencies(self, uow, parent_saves, - child_saves, - parent_deletes, - child_deletes, - after_save, + def per_property_dependencies(self, uow, parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, before_delete ): @@ -896,9 +896,9 @@ class ManyToManyDP(DependencyProcessor): (child_saves, after_save), (after_save, child_deletes), - # a rowswitch on the parent from deleted to saved - # can make this one occur, as the "save" may remove - # an element from the + # a rowswitch on the parent from deleted to saved + # can make this one occur, as the "save" may remove + # an element from the # "deleted" list before we have a chance to # process its child rows (before_delete, parent_saves), @@ -908,11 +908,11 @@ class ManyToManyDP(DependencyProcessor): (before_delete, child_saves), ]) - def per_state_dependencies(self, uow, - save_parent, - delete_parent, - child_action, - after_save, before_delete, + def per_state_dependencies(self, uow, + save_parent, + delete_parent, + child_action, + after_save, before_delete, isdelete, childisdelete): if not isdelete: if childisdelete: @@ -933,25 +933,25 @@ class ManyToManyDP(DependencyProcessor): def presort_deletes(self, uowcommit, states): if not self.passive_deletes: - # if no passive deletes, load history on + # if no passive deletes, load history on # the collection, so that prop_has_changes() # returns True for state in states: history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, self._passive_delete_flag) def presort_saves(self, uowcommit, states): if not self.passive_updates: - # if no passive updates, load history on + # if no passive updates, load history on # each collection where parent has changed PK, # so that prop_has_changes() returns True for state in states: if self._pks_changed(uowcommit, state): history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, attributes.PASSIVE_OFF) if not self.cascade.delete_orphan: @@ -961,16 +961,16 @@ class ManyToManyDP(DependencyProcessor): # if delete_orphan check is turned on. for state in states: history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, attributes.PASSIVE_NO_INITIALIZE) if history: for child in history.deleted: if self.hasparent(child) is False: - uowcommit.register_object(child, isdelete=True, + uowcommit.register_object(child, isdelete=True, operation="delete", prop=self.prop) for c, m, st_, dct_ in self.mapper.cascade_iterator( - 'delete', + 'delete', child): uowcommit.register_object( st_, isdelete=True) @@ -983,23 +983,23 @@ class ManyToManyDP(DependencyProcessor): processed = self._get_reversed_processed_set(uowcommit) tmp = set() for state in states: - # this history should be cached already, as + # this history should be cached already, as # we loaded it in preprocess_deletes history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, self._passive_delete_flag) if history: for child in history.non_added(): if child is None or \ - (processed is not None and + (processed is not None and (state, child) in processed): continue associationrow = {} if not self._synchronize( - state, - child, - associationrow, + state, + child, + associationrow, False, uowcommit, "delete"): continue secondary_delete.append(associationrow) @@ -1009,7 +1009,7 @@ class ManyToManyDP(DependencyProcessor): if processed is not None: processed.update(tmp) - self._run_crud(uowcommit, secondary_insert, + self._run_crud(uowcommit, secondary_insert, secondary_update, secondary_delete) def process_saves(self, uowcommit, states): @@ -1022,7 +1022,7 @@ class ManyToManyDP(DependencyProcessor): for state in states: need_cascade_pks = not self.passive_updates and \ - self._pks_changed(uowcommit, state) + self._pks_changed(uowcommit, state) if need_cascade_pks: passive = attributes.PASSIVE_OFF else: @@ -1032,45 +1032,45 @@ class ManyToManyDP(DependencyProcessor): if history: for child in history.added: if child is None or \ - (processed is not None and + (processed is not None and (state, child) in processed): continue associationrow = {} - if not self._synchronize(state, - child, - associationrow, + if not self._synchronize(state, + child, + associationrow, False, uowcommit, "add"): continue secondary_insert.append(associationrow) for child in history.deleted: if child is None or \ - (processed is not None and + (processed is not None and (state, child) in processed): continue associationrow = {} - if not self._synchronize(state, - child, - associationrow, + if not self._synchronize(state, + child, + associationrow, False, uowcommit, "delete"): continue secondary_delete.append(associationrow) - tmp.update((c, state) + tmp.update((c, state) for c in history.added + history.deleted) if need_cascade_pks: for child in history.unchanged: associationrow = {} - sync.update(state, - self.parent, - associationrow, - "old_", + sync.update(state, + self.parent, + associationrow, + "old_", self.prop.synchronize_pairs) - sync.update(child, - self.mapper, - associationrow, - "old_", + sync.update(child, + self.mapper, + associationrow, + "old_", self.prop.secondary_synchronize_pairs) secondary_update.append(associationrow) @@ -1078,18 +1078,18 @@ class ManyToManyDP(DependencyProcessor): if processed is not None: processed.update(tmp) - self._run_crud(uowcommit, secondary_insert, + self._run_crud(uowcommit, secondary_insert, secondary_update, secondary_delete) - def _run_crud(self, uowcommit, secondary_insert, + def _run_crud(self, uowcommit, secondary_insert, secondary_update, secondary_delete): connection = uowcommit.transaction.connection(self.mapper) if secondary_delete: associationrow = secondary_delete[0] statement = self.secondary.delete(sql.and_(*[ - c == sql.bindparam(c.key, type_=c.type) - for c in self.secondary.c + c == sql.bindparam(c.key, type_=c.type) + for c in self.secondary.c if c.key in associationrow ])) result = connection.execute(statement, secondary_delete) @@ -1098,7 +1098,7 @@ class ManyToManyDP(DependencyProcessor): result.rowcount != len(secondary_delete): raise exc.StaleDataError( "DELETE statement on table '%s' expected to delete %d row(s); " - "Only %d were matched." % + "Only %d were matched." % (self.secondary.description, len(secondary_delete), result.rowcount) ) @@ -1106,8 +1106,8 @@ class ManyToManyDP(DependencyProcessor): if secondary_update: associationrow = secondary_update[0] statement = self.secondary.update(sql.and_(*[ - c == sql.bindparam("old_" + c.key, type_=c.type) - for c in self.secondary.c + c == sql.bindparam("old_" + c.key, type_=c.type) + for c in self.secondary.c if c.key in associationrow ])) result = connection.execute(statement, secondary_update) @@ -1115,7 +1115,7 @@ class ManyToManyDP(DependencyProcessor): result.rowcount != len(secondary_update): raise exc.StaleDataError( "UPDATE statement on table '%s' expected to update %d row(s); " - "Only %d were matched." % + "Only %d were matched." % (self.secondary.description, len(secondary_update), result.rowcount) ) @@ -1124,7 +1124,7 @@ class ManyToManyDP(DependencyProcessor): statement = self.secondary.insert() connection.execute(statement, secondary_insert) - def _synchronize(self, state, child, associationrow, + def _synchronize(self, state, child, associationrow, clearkeys, uowcommit, operation): if associationrow is None: return @@ -1133,13 +1133,13 @@ class ManyToManyDP(DependencyProcessor): if not child.deleted: util.warn( "Object of type %s not in session, %s " - "operation along '%s' won't proceed" % + "operation along '%s' won't proceed" % (mapperutil.state_class_str(child), operation, self.prop)) return False self._verify_canload(child) - sync.populate_dict(state, self.parent, associationrow, + sync.populate_dict(state, self.parent, associationrow, self.prop.synchronize_pairs) sync.populate_dict(child, self.mapper, associationrow, self.prop.secondary_synchronize_pairs) @@ -1148,9 +1148,9 @@ class ManyToManyDP(DependencyProcessor): def _pks_changed(self, uowcommit, state): return sync.source_modified( - uowcommit, - state, - self.parent, + uowcommit, + state, + self.parent, self.prop.synchronize_pairs) _direction_to_processor = { diff --git a/libs/sqlalchemy/orm/deprecated_interfaces.py b/libs/sqlalchemy/orm/deprecated_interfaces.py index dfc368f0..d251f52e 100644 --- a/libs/sqlalchemy/orm/deprecated_interfaces.py +++ b/libs/sqlalchemy/orm/deprecated_interfaces.py @@ -1,5 +1,5 @@ # orm/deprecated_interfaces.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -11,10 +11,10 @@ from interfaces import EXT_CONTINUE class MapperExtension(object): """Base implementation for :class:`.Mapper` event hooks. - .. note:: - + .. note:: + :class:`.MapperExtension` is deprecated. Please - refer to :func:`.event.listen` as well as + refer to :func:`.event.listen` as well as :class:`.MapperEvents`. New extension classes subclass :class:`.MapperExtension` and are specified @@ -42,8 +42,8 @@ class MapperExtension(object): to the next ``MapperExtension`` for processing". For methods that return objects like translated rows or new object instances, EXT_CONTINUE means the result of the method - should be ignored. In some cases it's required for a - default mapper activity to be performed, such as adding a + should be ignored. In some cases it's required for a + default mapper activity to be performed, such as adding a new instance to a result list. The symbol EXT_STOP has significance within a chain @@ -91,29 +91,29 @@ class MapperExtension(object): def reconstruct(instance, ctx): ls_meth(self, instance) return reconstruct - event.listen(self.class_manager, 'load', + event.listen(self.class_manager, 'load', go(ls_meth), raw=False, propagate=True) elif meth == 'init_instance': def go(ls_meth): def init_instance(instance, args, kwargs): - ls_meth(self, self.class_, - self.class_manager.original_init, + ls_meth(self, self.class_, + self.class_manager.original_init, instance, args, kwargs) return init_instance - event.listen(self.class_manager, 'init', + event.listen(self.class_manager, 'init', go(ls_meth), raw=False, propagate=True) elif meth == 'init_failed': def go(ls_meth): def init_failed(instance, args, kwargs): - util.warn_exception(ls_meth, self, self.class_, - self.class_manager.original_init, + util.warn_exception(ls_meth, self, self.class_, + self.class_manager.original_init, instance, args, kwargs) return init_failed - event.listen(self.class_manager, 'init_failure', + event.listen(self.class_manager, 'init_failure', go(ls_meth), raw=False, propagate=True) else: - event.listen(self, "%s" % meth, ls_meth, + event.listen(self, "%s" % meth, ls_meth, raw=False, retval=True, propagate=True) @@ -121,7 +121,7 @@ class MapperExtension(object): """Receive a class when the mapper is first constructed, and has applied instrumentation to the mapped class. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -130,25 +130,25 @@ class MapperExtension(object): def init_instance(self, mapper, class_, oldinit, instance, args, kwargs): """Receive an instance when it's constructor is called. - This method is only called during a userland construction of + This method is only called during a userland construction of an object. It is not called when an object is loaded from the database. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ return EXT_CONTINUE def init_failed(self, mapper, class_, oldinit, instance, args, kwargs): - """Receive an instance when it's constructor has been called, + """Receive an instance when it's constructor has been called, and raised an exception. - This method is only called during a userland construction of + This method is only called during a userland construction of an object. It is not called when an object is loaded from the database. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -160,9 +160,9 @@ class MapperExtension(object): This is called when the mapper first receives a row, before the object identity or the instance itself has been derived - from that row. The given row may or may not be a + from that row. The given row may or may not be a ``RowProxy`` object - it will always be a dictionary-like - object which contains mapped columns as keys. The + object which contains mapped columns as keys. The returned object should also be a dictionary-like object which recognizes mapped columns as keys. @@ -197,7 +197,7 @@ class MapperExtension(object): """ return EXT_CONTINUE - def append_result(self, mapper, selectcontext, row, instance, + def append_result(self, mapper, selectcontext, row, instance, result, **flags): """Receive an object instance before that instance is appended to a result list. @@ -231,7 +231,7 @@ class MapperExtension(object): return EXT_CONTINUE - def populate_instance(self, mapper, selectcontext, row, + def populate_instance(self, mapper, selectcontext, row, instance, **flags): """Receive an instance before that instance has its attributes populated. @@ -247,10 +247,11 @@ class MapperExtension(object): instance population will not proceed, giving this extension an opportunity to populate the instance itself, if desired. - As of 0.5, most usages of this hook are obsolete. For a - generic "object has been newly created from a row" hook, use - ``reconstruct_instance()``, or the ``@orm.reconstructor`` - decorator. + .. deprecated:: 0.5 + Most usages of this hook are obsolete. For a + generic "object has been newly created from a row" hook, use + ``reconstruct_instance()``, or the ``@orm.reconstructor`` + decorator. """ return EXT_CONTINUE @@ -265,11 +266,11 @@ class MapperExtension(object): instance's lifetime. Note that during a result-row load, this method is called upon - the first row received for this instance. Note that some - attributes and collections may or may not be loaded or even + the first row received for this instance. Note that some + attributes and collections may or may not be loaded or even initialized, depending on what's present in the result rows. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -284,12 +285,12 @@ class MapperExtension(object): Column-based attributes can be modified within this method which will result in the new value being inserted. However - *no* changes to the overall flush plan can be made, and + *no* changes to the overall flush plan can be made, and manipulation of the ``Session`` will not have the desired effect. - To manipulate the ``Session`` within an extension, use + To manipulate the ``Session`` within an extension, use ``SessionExtension``. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -299,7 +300,7 @@ class MapperExtension(object): def after_insert(self, mapper, connection, instance): """Receive an object instance after that instance is inserted. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -326,12 +327,12 @@ class MapperExtension(object): Column-based attributes can be modified within this method which will result in the new value being updated. However - *no* changes to the overall flush plan can be made, and + *no* changes to the overall flush plan can be made, and manipulation of the ``Session`` will not have the desired effect. - To manipulate the ``Session`` within an extension, use + To manipulate the ``Session`` within an extension, use ``SessionExtension``. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -341,7 +342,7 @@ class MapperExtension(object): def after_update(self, mapper, connection, instance): """Receive an object instance after that instance is updated. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -356,7 +357,7 @@ class MapperExtension(object): desired effect. To manipulate the ``Session`` within an extension, use ``SessionExtension``. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -377,10 +378,10 @@ class SessionExtension(object): """Base implementation for :class:`.Session` event hooks. - .. note:: - + .. note:: + :class:`.SessionExtension` is deprecated. Please - refer to :func:`.event.listen` as well as + refer to :func:`.event.listen` as well as :class:`.SessionEvents`. Subclasses may be installed into a :class:`.Session` (or @@ -497,10 +498,10 @@ class AttributeExtension(object): """Base implementation for :class:`.AttributeImpl` event hooks, events that fire upon attribute mutations in user code. - .. note:: - + .. note:: + :class:`.AttributeExtension` is deprecated. Please - refer to :func:`.event.listen` as well as + refer to :func:`.event.listen` as well as :class:`.AttributeEvents`. :class:`.AttributeExtension` is used to listen for set, @@ -554,10 +555,10 @@ class AttributeExtension(object): active_history=listener.active_history, raw=True, retval=True) event.listen(self, 'remove', listener.remove, - active_history=listener.active_history, + active_history=listener.active_history, raw=True, retval=True) event.listen(self, 'set', listener.set, - active_history=listener.active_history, + active_history=listener.active_history, raw=True, retval=True) def append(self, state, value, initiator): diff --git a/libs/sqlalchemy/orm/descriptor_props.py b/libs/sqlalchemy/orm/descriptor_props.py index ed0d4924..70cf0e7c 100644 --- a/libs/sqlalchemy/orm/descriptor_props.py +++ b/libs/sqlalchemy/orm/descriptor_props.py @@ -1,5 +1,5 @@ # orm/descriptor_props.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -19,7 +19,7 @@ from sqlalchemy.sql import expression properties = util.importlater('sqlalchemy.orm', 'properties') class DescriptorProperty(MapperProperty): - """:class:`.MapperProperty` which proxies access to a + """:class:`.MapperProperty` which proxies access to a user-defined descriptor.""" doc = None @@ -35,7 +35,7 @@ class DescriptorProperty(MapperProperty): self.key = key if hasattr(prop, 'get_history'): - def get_history(self, state, dict_, + def get_history(self, state, dict_, passive=attributes.PASSIVE_OFF): return prop.get_history(state, dict_, passive) @@ -62,7 +62,7 @@ class DescriptorProperty(MapperProperty): create_proxied_attribute(self.descriptor)\ ( self.parent.class_, - self.key, + self.key, self.descriptor, lambda: self._comparator_factory(mapper), doc=self.doc @@ -89,7 +89,7 @@ class CompositeProperty(DescriptorProperty): self._setup_event_handlers() def do_init(self): - """Initialization which occurs after the :class:`.CompositeProperty` + """Initialization which occurs after the :class:`.CompositeProperty` has been associated with its parent mapper. """ @@ -97,7 +97,7 @@ class CompositeProperty(DescriptorProperty): self._setup_arguments_on_columns() def _create_descriptor(self): - """Create the Python descriptor that will serve as + """Create the Python descriptor that will serve as the access point on instances of the mapped class. """ @@ -113,12 +113,12 @@ class CompositeProperty(DescriptorProperty): values = [getattr(instance, key) for key in self._attribute_keys] # current expected behavior here is that the composite is - # created on access if the object is persistent or if - # col attributes have non-None. This would be better + # created on access if the object is persistent or if + # col attributes have non-None. This would be better # if the composite were created unconditionally, # but that would be a behavioral change. if self.key not in dict_ and ( - state.key is not None or + state.key is not None or not _none_set.issuperset(values) ): dict_[self.key] = self.composite_class(*values) @@ -139,7 +139,7 @@ class CompositeProperty(DescriptorProperty): setattr(instance, key, None) else: for key, value in zip( - self._attribute_keys, + self._attribute_keys, value.__composite_values__()): setattr(instance, key, value) @@ -198,7 +198,7 @@ class CompositeProperty(DescriptorProperty): return # if column elements aren't loaded, skip. - # __get__() will initiate a load for those + # __get__() will initiate a load for those # columns for k in self._attribute_keys: if k not in dict_: @@ -206,7 +206,7 @@ class CompositeProperty(DescriptorProperty): #assert self.key not in dict_ dict_[self.key] = self.composite_class( - *[state.dict[key] for key in + *[state.dict[key] for key in self._attribute_keys] ) @@ -217,16 +217,16 @@ class CompositeProperty(DescriptorProperty): def insert_update_handler(mapper, connection, state): """After an insert or update, some columns may be expired due to server side defaults, or re-populated due to client side - defaults. Pop out the composite value here so that it + defaults. Pop out the composite value here so that it recreates. - + """ state.dict.pop(self.key, None) - event.listen(self.parent, 'after_insert', + event.listen(self.parent, 'after_insert', insert_update_handler, raw=True) - event.listen(self.parent, 'after_update', + event.listen(self.parent, 'after_update', insert_update_handler, raw=True) event.listen(self.parent, 'load', load_handler, raw=True, propagate=True) event.listen(self.parent, 'refresh', load_handler, raw=True, propagate=True) @@ -307,19 +307,19 @@ class CompositeProperty(DescriptorProperty): return str(self.parent.class_.__name__) + "." + self.key class ConcreteInheritedProperty(DescriptorProperty): - """A 'do nothing' :class:`.MapperProperty` that disables + """A 'do nothing' :class:`.MapperProperty` that disables an attribute on a concrete subclass that is only present on the inherited mapper, not the concrete classes' mapper. Cases where this occurs include: - * When the superclass mapper is mapped against a - "polymorphic union", which includes all attributes from + * When the superclass mapper is mapped against a + "polymorphic union", which includes all attributes from all subclasses. * When a relationship() is configured on an inherited mapper, but not on the subclass mapper. Concrete mappers require - that relationship() is configured explicitly on each - subclass. + that relationship() is configured explicitly on each + subclass. """ @@ -337,7 +337,7 @@ class ConcreteInheritedProperty(DescriptorProperty): def warn(): raise AttributeError("Concrete %s does not implement " "attribute %r at the instance level. Add this " - "property explicitly to %s." % + "property explicitly to %s." % (self.parent, self.key, self.parent)) class NoninheritedConcreteProp(object): @@ -354,7 +354,7 @@ class ConcreteInheritedProperty(DescriptorProperty): class SynonymProperty(DescriptorProperty): - def __init__(self, name, map_column=None, + def __init__(self, name, map_column=None, descriptor=None, comparator_factory=None, doc=None): self.name = name @@ -387,7 +387,7 @@ class SynonymProperty(DescriptorProperty): if self.key not in parent.mapped_table.c: raise sa_exc.ArgumentError( "Can't compile synonym '%s': no column on table " - "'%s' named '%s'" + "'%s' named '%s'" % (self.name, parent.mapped_table.description, self.key)) elif parent.mapped_table.c[self.key] in \ parent._columntoproperty and \ @@ -397,13 +397,13 @@ class SynonymProperty(DescriptorProperty): raise sa_exc.ArgumentError( "Can't call map_column=True for synonym %r=%r, " "a ColumnProperty already exists keyed to the name " - "%r for column %r" % + "%r for column %r" % (self.key, self.name, self.name, self.key) ) p = properties.ColumnProperty(parent.mapped_table.c[self.key]) parent._configure_property( - self.name, p, - init=init, + self.name, p, + init=init, setparent=True) p._mapped_by_synonym = self.key diff --git a/libs/sqlalchemy/orm/dynamic.py b/libs/sqlalchemy/orm/dynamic.py index edf05287..e3773659 100644 --- a/libs/sqlalchemy/orm/dynamic.py +++ b/libs/sqlalchemy/orm/dynamic.py @@ -1,5 +1,5 @@ # orm/dynamic.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,7 +12,6 @@ basic add/delete mutation. """ from sqlalchemy import log, util -from sqlalchemy import exc as sa_exc from sqlalchemy.orm import exc as orm_exc from sqlalchemy.sql import operators from sqlalchemy.orm import ( @@ -20,12 +19,16 @@ from sqlalchemy.orm import ( ) from sqlalchemy.orm.query import Query from sqlalchemy.orm.util import has_identity -from sqlalchemy.orm import attributes, collections +from sqlalchemy.orm import collections class DynaLoader(strategies.AbstractRelationshipLoader): def init_class_attribute(self, mapper): self.is_class_level = True - + if not self.uselist: + util.warn( + "On relationship %s, 'dynamic' loaders cannot be used with " + "many-to-one/one-to-one relationships and/or " + "uselist=False." % self.parent_property) strategies._register_attribute(self, mapper, useobject=True, @@ -63,7 +66,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl): else: return self.query_class(self, state) - def get_collection(self, state, dict_, user_data=None, + def get_collection(self, state, dict_, user_data=None, passive=attributes.PASSIVE_NO_INITIALIZE): if passive is not attributes.PASSIVE_OFF: return self._get_collection_history(state, @@ -97,7 +100,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl): if self.key not in state.committed_state: state.committed_state[self.key] = CollectionHistory(self, state) - state.modified_event(dict_, + state.modified_event(dict_, self, attributes.NEVER_SET) @@ -107,7 +110,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl): return state.committed_state[self.key] def set(self, state, dict_, value, initiator, - passive=attributes.PASSIVE_OFF, + passive=attributes.PASSIVE_OFF, check_old=None, pop=False): if initiator and initiator.parent_token is self.parent_token: return @@ -144,8 +147,8 @@ class DynamicAttributeImpl(attributes.AttributeImpl): def get_all_pending(self, state, dict_): c = self._get_collection_history(state, True) return [ - (attributes.instance_state(x), x) - for x in + (attributes.instance_state(x), x) + for x in c.added_items + c.unchanged_items + c.deleted_items ] @@ -160,12 +163,12 @@ class DynamicAttributeImpl(attributes.AttributeImpl): else: return c - def append(self, state, dict_, value, initiator, + def append(self, state, dict_, value, initiator, passive=attributes.PASSIVE_OFF): if initiator is not self: self.fire_append_event(state, dict_, value, initiator) - def remove(self, state, dict_, value, initiator, + def remove(self, state, dict_, value, initiator, passive=attributes.PASSIVE_OFF): if initiator is not self: self.fire_remove_event(state, dict_, value, initiator) @@ -204,9 +207,9 @@ class AppenderMixin(object): mapper = object_mapper(instance) prop = mapper._props[self.attr.key] self._criterion = prop.compare( - operators.eq, - instance, - value_is_parent=True, + operators.eq, + instance, + value_is_parent=True, alias_secondary=False) if self.attr.order_by: @@ -280,12 +283,12 @@ class AppenderMixin(object): def append(self, item): self.attr.append( - attributes.instance_state(self.instance), + attributes.instance_state(self.instance), attributes.instance_dict(self.instance), item, None) def remove(self, item): self.attr.remove( - attributes.instance_state(self.instance), + attributes.instance_state(self.instance), attributes.instance_dict(self.instance), item, None) diff --git a/libs/sqlalchemy/orm/evaluator.py b/libs/sqlalchemy/orm/evaluator.py index e727c17b..5eaac6c3 100644 --- a/libs/sqlalchemy/orm/evaluator.py +++ b/libs/sqlalchemy/orm/evaluator.py @@ -1,5 +1,5 @@ # orm/evaluator.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -13,10 +13,10 @@ class UnevaluatableError(Exception): pass _straight_ops = set(getattr(operators, op) - for op in ('add', 'mul', 'sub', + for op in ('add', 'mul', 'sub', # Py2K 'div', - # end Py2K + # end Py2K 'mod', 'truediv', 'lt', 'le', 'ne', 'gt', 'ge', 'eq')) diff --git a/libs/sqlalchemy/orm/events.py b/libs/sqlalchemy/orm/events.py index d319a3bb..3c868d14 100644 --- a/libs/sqlalchemy/orm/events.py +++ b/libs/sqlalchemy/orm/events.py @@ -1,5 +1,5 @@ # orm/events.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -91,11 +91,11 @@ class InstanceEvents(event.Events): When using :class:`.InstanceEvents`, several modifiers are available to the :func:`.event.listen` function. - :param propagate=False: When True, the event listener should - be applied to all inheriting mappers as well as the + :param propagate=False: When True, the event listener should + be applied to all inheriting mappers as well as the mapper which is the target of this listener. :param raw=False: When True, the "target" argument passed - to applicable event listener functions will be the + to applicable event listener functions will be the instance's :class:`.InstanceState` management object, rather than the mapped instance itself. @@ -142,17 +142,17 @@ class InstanceEvents(event.Events): def init(self, target, args, kwargs): """Receive an instance when it's constructor is called. - This method is only called during a userland construction of + This method is only called during a userland construction of an object. It is not called when an object is loaded from the database. """ def init_failure(self, target, args, kwargs): - """Receive an instance when it's constructor has been called, + """Receive an instance when it's constructor has been called, and raised an exception. - This method is only called during a userland construction of + This method is only called during a userland construction of an object. It is not called when an object is loaded from the database. @@ -168,12 +168,12 @@ class InstanceEvents(event.Events): instance's lifetime. Note that during a result-row load, this method is called upon - the first row received for this instance. Note that some - attributes and collections may or may not be loaded or even + the first row received for this instance. Note that some + attributes and collections may or may not be loaded or even initialized, depending on what's present in the result rows. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :param context: the :class:`.QueryContext` corresponding to the @@ -184,16 +184,16 @@ class InstanceEvents(event.Events): """ def refresh(self, target, context, attrs): - """Receive an object instance after one or more attributes have + """Receive an object instance after one or more attributes have been refreshed from a query. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :param context: the :class:`.QueryContext` corresponding to the current :class:`.Query` in progress. - :param attrs: iterable collection of attribute names which + :param attrs: iterable collection of attribute names which were populated, or None if all column-mapped, non-deferred attributes were populated. @@ -206,23 +206,23 @@ class InstanceEvents(event.Events): 'keys' is a list of attribute names. If None, the entire state was expired. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :param attrs: iterable collection of attribute - names which were expired, or None if all attributes were + names which were expired, or None if all attributes were expired. """ def resurrect(self, target): - """Receive an object instance as it is 'resurrected' from + """Receive an object instance as it is 'resurrected' from garbage collection, which occurs when a "dirty" state falls out of scope. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. @@ -232,28 +232,28 @@ class InstanceEvents(event.Events): """Receive an object instance when its associated state is being pickled. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. - :param state_dict: the dictionary returned by + :param state_dict: the dictionary returned by :class:`.InstanceState.__getstate__`, containing the state to be pickled. - + """ def unpickle(self, target, state_dict): """Receive an object instance after it's associated state has been unpickled. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :param state_dict: the dictionary sent to :class:`.InstanceState.__setstate__`, containing the state dictionary which was pickled. - + """ class MapperEvents(event.Events): @@ -267,7 +267,7 @@ class MapperEvents(event.Events): # execute a stored procedure upon INSERT, # apply the value to the row to be inserted target.calculated_value = connection.scalar( - "select my_special_function(%d)" + "select my_special_function(%d)" % target.special_number) # associate the listener function with SomeMappedClass, @@ -304,16 +304,16 @@ class MapperEvents(event.Events): When using :class:`.MapperEvents`, several modifiers are available to the :func:`.event.listen` function. - :param propagate=False: When True, the event listener should - be applied to all inheriting mappers as well as the + :param propagate=False: When True, the event listener should + be applied to all inheriting mappers as well as the mapper which is the target of this listener. :param raw=False: When True, the "target" argument passed - to applicable event listener functions will be the + to applicable event listener functions will be the instance's :class:`.InstanceState` management object, rather than the mapped instance itself. :param retval=False: when True, the user-defined event function must have a return value, the purpose of which is either to - control subsequent event propagation, or to otherwise alter + control subsequent event propagation, or to otherwise alter the operation in progress by the mapper. Possible return values are: @@ -322,7 +322,7 @@ class MapperEvents(event.Events): * ``sqlalchemy.orm.interfaces.EXT_STOP`` - cancel all subsequent event handlers in the chain. * other values - the return value specified by specific listeners, - such as :meth:`~.MapperEvents.translate_row` or + such as :meth:`~.MapperEvents.translate_row` or :meth:`~.MapperEvents.create_instance`. """ @@ -340,7 +340,7 @@ class MapperEvents(event.Events): return target @classmethod - def _listen(cls, target, identifier, fn, + def _listen(cls, target, identifier, fn, raw=False, retval=False, propagate=False): if not raw or not retval: @@ -370,7 +370,7 @@ class MapperEvents(event.Events): event.Events._listen(target, identifier, fn) def instrument_class(self, mapper, class_): - """Receive a class when the mapper is first constructed, + """Receive a class when the mapper is first constructed, before instrumentation is applied to the mapped class. This event is the earliest phase of mapper construction. @@ -388,8 +388,16 @@ class MapperEvents(event.Events): def mapper_configured(self, mapper, class_): """Called when the mapper for the class is fully configured. - This event is the latest phase of mapper construction. - The mapper should be in its final state. + This event is the latest phase of mapper construction, and + is invoked when the mapped classes are first used, so that relationships + between mappers can be resolved. When the event is called, + the mapper should be in its final state. + + While the configuration event normally occurs automatically, + it can be forced to occur ahead of time, in the case where the event + is needed before any actual mapper usage, by using the + :func:`.configure_mappers` function. + :param mapper: the :class:`.Mapper` which is the target of this event. @@ -404,11 +412,11 @@ class MapperEvents(event.Events): This corresponds to the :func:`.orm.configure_mappers` call, which note is usually called automatically as mappings are first used. - + Theoretically this event is called once per application, but is actually called any time new mappers have been affected by a :func:`.orm.configure_mappers` call. If new mappings - are constructed after existing ones have already been used, + are constructed after existing ones have already been used, this event can be called again. """ @@ -420,9 +428,9 @@ class MapperEvents(event.Events): This listener is typically registered with ``retval=True``. It is called when the mapper first receives a row, before the object identity or the instance itself has been derived - from that row. The given row may or may not be a + from that row. The given row may or may not be a :class:`.RowProxy` object - it will always be a dictionary-like - object which contains mapped columns as keys. The + object which contains mapped columns as keys. The returned object should also be a dictionary-like object which recognizes mapped columns as keys. @@ -431,7 +439,7 @@ class MapperEvents(event.Events): :param context: the :class:`.QueryContext`, which includes a handle to the current :class:`.Query` in progress as well as additional state information. - :param row: the result row being handled. This may be + :param row: the result row being handled. This may be an actual :class:`.RowProxy` or may be a dictionary containing :class:`.Column` objects as keys. :return: When configured with ``retval=True``, the function @@ -454,18 +462,18 @@ class MapperEvents(event.Events): :param context: the :class:`.QueryContext`, which includes a handle to the current :class:`.Query` in progress as well as additional state information. - :param row: the result row being handled. This may be + :param row: the result row being handled. This may be an actual :class:`.RowProxy` or may be a dictionary containing :class:`.Column` objects as keys. :param class\_: the mapped class. :return: When configured with ``retval=True``, the return value - should be a newly created instance of the mapped class, + should be a newly created instance of the mapped class, or ``EXT_CONTINUE`` indicating that default object construction should take place. """ - def append_result(self, mapper, context, row, target, + def append_result(self, mapper, context, row, target, result, **flags): """Receive an object instance before that instance is appended to a result list. @@ -478,27 +486,27 @@ class MapperEvents(event.Events): :param context: the :class:`.QueryContext`, which includes a handle to the current :class:`.Query` in progress as well as additional state information. - :param row: the result row being handled. This may be + :param row: the result row being handled. This may be an actual :class:`.RowProxy` or may be a dictionary containing :class:`.Column` objects as keys. - :param target: the mapped instance being populated. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance being populated. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :param result: a list-like object where results are being appended. - :param \**flags: Additional state information about the + :param \**flags: Additional state information about the current handling of the row. :return: If this method is registered with ``retval=True``, a return value of ``EXT_STOP`` will prevent the instance - from being appended to the given result list, whereas a + from being appended to the given result list, whereas a return value of ``EXT_CONTINUE`` will result in the default behavior of appending the value to the result list. """ - def populate_instance(self, mapper, context, row, + def populate_instance(self, mapper, context, row, target, **flags): """Receive an instance before that instance has its attributes populated. @@ -518,11 +526,11 @@ class MapperEvents(event.Events): :param context: the :class:`.QueryContext`, which includes a handle to the current :class:`.Query` in progress as well as additional state information. - :param row: the result row being handled. This may be + :param row: the result row being handled. This may be an actual :class:`.RowProxy` or may be a dictionary containing :class:`.Column` objects as keys. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: When configured with ``retval=True``, a return @@ -536,9 +544,9 @@ class MapperEvents(event.Events): """Receive an object instance before an INSERT statement is emitted corresponding to that instance. - This event is used to modify local, non-object related + This event is used to modify local, non-object related attributes on the instance before an INSERT occurs, as well - as to emit additional SQL statements on the given + as to emit additional SQL statements on the given connection. The event is often called for a batch of objects of the @@ -552,23 +560,23 @@ class MapperEvents(event.Events): .. warning:: Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled + local to the immediate object being handled and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of + Handlers here should **not** make alterations to the state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as + affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it - always known if the related class has already been handled. + always known if the related class has already been handled. Operations that **are not supported in mapper events** include: - + * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` - + Operations which manipulate the state of the object relative to other objects are better handled: - + * In the ``__init__()`` method of the mapped object itself, or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` @@ -576,12 +584,12 @@ class MapperEvents(event.Events): :param mapper: the :class:`.Mapper` which is the target of this event. - :param connection: the :class:`.Connection` being used to + :param connection: the :class:`.Connection` being used to emit INSERT statements for this instance. This - provides a handle into the current transaction on the + provides a handle into the current transaction on the target database specific to this instance. - :param target: the mapped instance being persisted. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. @@ -594,7 +602,7 @@ class MapperEvents(event.Events): This event is used to modify in-Python-only state on the instance after an INSERT occurs, as well - as to emit additional SQL statements on the given + as to emit additional SQL statements on the given connection. The event is often called for a batch of objects of the @@ -608,23 +616,23 @@ class MapperEvents(event.Events): .. warning:: Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled + local to the immediate object being handled and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of + Handlers here should **not** make alterations to the state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as + affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it - always known if the related class has already been handled. + always known if the related class has already been handled. Operations that **are not supported in mapper events** include: - + * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` - + Operations which manipulate the state of the object relative to other objects are better handled: - + * In the ``__init__()`` method of the mapped object itself, or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` @@ -632,12 +640,12 @@ class MapperEvents(event.Events): :param mapper: the :class:`.Mapper` which is the target of this event. - :param connection: the :class:`.Connection` being used to + :param connection: the :class:`.Connection` being used to emit INSERT statements for this instance. This - provides a handle into the current transaction on the + provides a handle into the current transaction on the target database specific to this instance. - :param target: the mapped instance being persisted. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. @@ -648,9 +656,9 @@ class MapperEvents(event.Events): """Receive an object instance before an UPDATE statement is emitted corresponding to that instance. - This event is used to modify local, non-object related + This event is used to modify local, non-object related attributes on the instance before an UPDATE occurs, as well - as to emit additional SQL statements on the given + as to emit additional SQL statements on the given connection. This method is called for all instances that are @@ -683,23 +691,23 @@ class MapperEvents(event.Events): .. warning:: Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled + local to the immediate object being handled and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of + Handlers here should **not** make alterations to the state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as + affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it - always known if the related class has already been handled. + always known if the related class has already been handled. Operations that **are not supported in mapper events** include: - + * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` - + Operations which manipulate the state of the object relative to other objects are better handled: - + * In the ``__init__()`` method of the mapped object itself, or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` @@ -707,12 +715,12 @@ class MapperEvents(event.Events): :param mapper: the :class:`.Mapper` which is the target of this event. - :param connection: the :class:`.Connection` being used to + :param connection: the :class:`.Connection` being used to emit UPDATE statements for this instance. This - provides a handle into the current transaction on the + provides a handle into the current transaction on the target database specific to this instance. - :param target: the mapped instance being persisted. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. @@ -724,12 +732,12 @@ class MapperEvents(event.Events): This event is used to modify in-Python-only state on the instance after an UPDATE occurs, as well - as to emit additional SQL statements on the given + as to emit additional SQL statements on the given connection. This method is called for all instances that are marked as "dirty", *even those which have no net changes - to their column-based attributes*, and for which + to their column-based attributes*, and for which no UPDATE statement has proceeded. An object is marked as dirty when any of its column-based attributes have a "set attribute" operation called or when any of its @@ -756,23 +764,23 @@ class MapperEvents(event.Events): .. warning:: Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled + local to the immediate object being handled and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of + Handlers here should **not** make alterations to the state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as + affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it - always known if the related class has already been handled. + always known if the related class has already been handled. Operations that **are not supported in mapper events** include: - + * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` - + Operations which manipulate the state of the object relative to other objects are better handled: - + * In the ``__init__()`` method of the mapped object itself, or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` @@ -780,12 +788,12 @@ class MapperEvents(event.Events): :param mapper: the :class:`.Mapper` which is the target of this event. - :param connection: the :class:`.Connection` being used to + :param connection: the :class:`.Connection` being used to emit UPDATE statements for this instance. This - provides a handle into the current transaction on the + provides a handle into the current transaction on the target database specific to this instance. - :param target: the mapped instance being persisted. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. @@ -796,33 +804,33 @@ class MapperEvents(event.Events): """Receive an object instance before a DELETE statement is emitted corresponding to that instance. - This event is used to emit additional SQL statements on + This event is used to emit additional SQL statements on the given connection as well as to perform application specific bookkeeping related to a deletion event. The event is often called for a batch of objects of the same class before their DELETE statements are emitted at - once in a later step. + once in a later step. .. warning:: Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled + local to the immediate object being handled and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of + Handlers here should **not** make alterations to the state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as + affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it - always known if the related class has already been handled. + always known if the related class has already been handled. Operations that **are not supported in mapper events** include: - + * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` - + Operations which manipulate the state of the object relative to other objects are better handled: - + * In the ``__init__()`` method of the mapped object itself, or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` @@ -830,12 +838,12 @@ class MapperEvents(event.Events): :param mapper: the :class:`.Mapper` which is the target of this event. - :param connection: the :class:`.Connection` being used to + :param connection: the :class:`.Connection` being used to emit DELETE statements for this instance. This - provides a handle into the current transaction on the + provides a handle into the current transaction on the target database specific to this instance. - :param target: the mapped instance being deleted. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance being deleted. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. @@ -846,33 +854,33 @@ class MapperEvents(event.Events): """Receive an object instance after a DELETE statement has been emitted corresponding to that instance. - This event is used to emit additional SQL statements on + This event is used to emit additional SQL statements on the given connection as well as to perform application specific bookkeeping related to a deletion event. The event is often called for a batch of objects of the same class after their DELETE statements have been emitted at - once in a previous step. + once in a previous step. .. warning:: Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled + local to the immediate object being handled and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of + Handlers here should **not** make alterations to the state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as + affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it - always known if the related class has already been handled. + always known if the related class has already been handled. Operations that **are not supported in mapper events** include: - + * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` - + Operations which manipulate the state of the object relative to other objects are better handled: - + * In the ``__init__()`` method of the mapped object itself, or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` @@ -880,12 +888,12 @@ class MapperEvents(event.Events): :param mapper: the :class:`.Mapper` which is the target of this event. - :param connection: the :class:`.Connection` being used to + :param connection: the :class:`.Connection` being used to emit DELETE statements for this instance. This - provides a handle into the current transaction on the + provides a handle into the current transaction on the target database specific to this instance. - :param target: the mapped instance being deleted. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance being deleted. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. @@ -952,7 +960,7 @@ class SessionEvents(event.Events): transaction is ongoing. :param session: The target :class:`.Session`. - + """ def after_commit(self, session): @@ -960,19 +968,19 @@ class SessionEvents(event.Events): Note that this may not be per-flush if a longer running transaction is ongoing. - + :param session: The target :class:`.Session`. - + """ def after_rollback(self, session): """Execute after a real DBAPI rollback has occurred. - + Note that this event only fires when the *actual* rollback against - the database occurs - it does *not* fire each time the - :meth:`.Session.rollback` method is called, if the underlying + the database occurs - it does *not* fire each time the + :meth:`.Session.rollback` method is called, if the underlying DBAPI transaction has already been rolled back. In many - cases, the :class:`.Session` will not be in + cases, the :class:`.Session` will not be in an "active" state during this event, as the current transaction is not valid. To acquire a :class:`.Session` which is active after the outermost rollback has proceeded, @@ -984,30 +992,30 @@ class SessionEvents(event.Events): """ def after_soft_rollback(self, session, previous_transaction): - """Execute after any rollback has occurred, including "soft" + """Execute after any rollback has occurred, including "soft" rollbacks that don't actually emit at the DBAPI level. - + This corresponds to both nested and outer rollbacks, i.e. - the innermost rollback that calls the DBAPI's - rollback() method, as well as the enclosing rollback + the innermost rollback that calls the DBAPI's + rollback() method, as well as the enclosing rollback calls that only pop themselves from the transaction stack. - - The given :class:`.Session` can be used to invoke SQL and - :meth:`.Session.query` operations after an outermost rollback + + The given :class:`.Session` can be used to invoke SQL and + :meth:`.Session.query` operations after an outermost rollback by first checking the :attr:`.Session.is_active` flag:: @event.listens_for(Session, "after_soft_rollback") def do_something(session, previous_transaction): if session.is_active: session.execute("select * from some_table") - + :param session: The target :class:`.Session`. :param previous_transaction: The :class:`.SessionTransaction` transactional marker object which was just closed. The current :class:`.SessionTransaction` for the given :class:`.Session` is available via the :attr:`.Session.transaction` attribute. - New in 0.7.3. + .. versionadded:: 0.7.3 """ @@ -1030,7 +1038,7 @@ class SessionEvents(event.Events): Note that the session's state is still in pre-flush, i.e. 'new', 'dirty', and 'deleted' lists still show pre-flush state as well as the history settings on instance attributes. - + :param session: The target :class:`.Session`. :param flush_context: Internal :class:`.UOWTransaction` object which handles the details of the flush. @@ -1044,8 +1052,8 @@ class SessionEvents(event.Events): This will be when the 'new', 'dirty', and 'deleted' lists are in their final state. An actual commit() may or may not have occurred, depending on whether or not the flush started its own - transaction or participated in a larger transaction. - + transaction or participated in a larger transaction. + :param session: The target :class:`.Session`. :param flush_context: Internal :class:`.UOWTransaction` object which handles the details of the flush. @@ -1056,9 +1064,9 @@ class SessionEvents(event.Events): :param session: The target :class:`.Session`. :param transaction: The :class:`.SessionTransaction`. - :param connection: The :class:`~.engine.base.Connection` object + :param connection: The :class:`~.engine.base.Connection` object which will be used for SQL statements. - + """ def after_attach(self, session, instance): @@ -1072,7 +1080,7 @@ class SessionEvents(event.Events): This is called as a result of the :meth:`.Query.update` method. :param query: the :class:`.Query` object that this update operation was - called upon. + called upon. :param query_context: The :class:`.QueryContext` object, corresponding to the invocation of an ORM query. :param result: the :class:`.ResultProxy` returned as a result of the @@ -1086,7 +1094,7 @@ class SessionEvents(event.Events): This is called as a result of the :meth:`.Query.delete` method. :param query: the :class:`.Query` object that this update operation was - called upon. + called upon. :param query_context: The :class:`.QueryContext` object, corresponding to the invocation of an ORM query. :param result: the :class:`.ResultProxy` returned as a result of the @@ -1137,15 +1145,15 @@ class AttributeEvents(event.Events): :param propagate=False: When True, the listener function will be established not just for the class attribute given, but - for attributes of the same name on all current subclasses - of that class, as well as all future subclasses of that - class, using an additional listener that listens for + for attributes of the same name on all current subclasses + of that class, as well as all future subclasses of that + class, using an additional listener that listens for instrumentation events. :param raw=False: When True, the "target" argument to the event will be the :class:`.InstanceState` management object, rather than the mapped instance itself. - :param retval=False: when True, the user-defined event - listening must return the "value" argument from the + :param retval=False: when True, the user-defined event + listening must return the "value" argument from the function. This gives the listening function the opportunity to change the value that is ultimately used for a "set" or "append" event. @@ -1161,7 +1169,7 @@ class AttributeEvents(event.Events): return target @classmethod - def _listen(cls, target, identifier, fn, active_history=False, + def _listen(cls, target, identifier, fn, active_history=False, raw=False, retval=False, propagate=False): if active_history: @@ -1202,9 +1210,9 @@ class AttributeEvents(event.Events): be the :class:`.InstanceState` object. :param value: the value being appended. If this listener is registered with ``retval=True``, the listener - function must return this value, or a new value which + function must return this value, or a new value which replaces it. - :param initiator: the attribute implementation object + :param initiator: the attribute implementation object which initiated this event. :return: if the event was registered with ``retval=True``, the given value, or a new effective value, should be returned. @@ -1218,7 +1226,7 @@ class AttributeEvents(event.Events): If the listener is registered with ``raw=True``, this will be the :class:`.InstanceState` object. :param value: the value being removed. - :param initiator: the attribute implementation object + :param initiator: the attribute implementation object which initiated this event. :return: No return value is defined for this event. """ @@ -1231,15 +1239,15 @@ class AttributeEvents(event.Events): be the :class:`.InstanceState` object. :param value: the value being set. If this listener is registered with ``retval=True``, the listener - function must return this value, or a new value which + function must return this value, or a new value which replaces it. :param oldvalue: the previous value being replaced. This may also be the symbol ``NEVER_SET`` or ``NO_VALUE``. If the listener is registered with ``active_history=True``, the previous value of the attribute will be loaded from - the database if the existing value is currently unloaded + the database if the existing value is currently unloaded or expired. - :param initiator: the attribute implementation object + :param initiator: the attribute implementation object which initiated this event. :return: if the event was registered with ``retval=True``, the given value, or a new effective value, should be returned. diff --git a/libs/sqlalchemy/orm/exc.py b/libs/sqlalchemy/orm/exc.py index 2aaf1490..a116d204 100644 --- a/libs/sqlalchemy/orm/exc.py +++ b/libs/sqlalchemy/orm/exc.py @@ -1,5 +1,5 @@ # orm/exc.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -18,21 +18,23 @@ class StaleDataError(sa.exc.SQLAlchemyError): Conditions which cause this to happen include: * A flush may have attempted to update or delete rows - and an unexpected number of rows were matched during - the UPDATE or DELETE statement. Note that when + and an unexpected number of rows were matched during + the UPDATE or DELETE statement. Note that when version_id_col is used, rows in UPDATE or DELETE statements are also matched against the current known version identifier. - * A mapped object with version_id_col was refreshed, + * A mapped object with version_id_col was refreshed, and the version number coming back from the database does not match that of the object itself. - + * A object is detached from its parent object, however the object was previously attached to a different parent identity which was garbage collected, and a decision cannot be made if the new parent was really the most - recent "parent" (new in 0.7.4). + recent "parent". + + .. versionadded:: 0.7.4 """ @@ -50,7 +52,7 @@ class ObjectDereferencedError(sa.exc.SQLAlchemyError): """An operation cannot complete due to an object being garbage collected.""" class DetachedInstanceError(sa.exc.SQLAlchemyError): - """An attempt to access unloaded attributes on a + """An attempt to access unloaded attributes on a mapped instance that is detached.""" class UnmappedInstanceError(UnmappedError): @@ -89,21 +91,21 @@ class UnmappedClassError(UnmappedError): class ObjectDeletedError(sa.exc.InvalidRequestError): """A refresh operation failed to retrieve the database row corresponding to an object's known primary key identity. - - A refresh operation proceeds when an expired attribute is + + A refresh operation proceeds when an expired attribute is accessed on an object, or when :meth:`.Query.get` is used to retrieve an object which is, upon retrieval, detected as expired. A SELECT is emitted for the target row based on primary key; if no row is returned, this exception is raised. - - The true meaning of this exception is simply that + + The true meaning of this exception is simply that no row exists for the primary key identifier associated - with a persistent object. The row may have been + with a persistent object. The row may have been deleted, or in some cases the primary key updated to a new value, outside of the ORM's management of the target - object. - + object. + """ def __init__(self, state, msg=None): if not msg: diff --git a/libs/sqlalchemy/orm/identity.py b/libs/sqlalchemy/orm/identity.py index 59d121de..29e13f2c 100644 --- a/libs/sqlalchemy/orm/identity.py +++ b/libs/sqlalchemy/orm/identity.py @@ -1,5 +1,5 @@ # orm/identity.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -128,7 +128,7 @@ class WeakInstanceDict(IdentityMap): o = existing_state._is_really_none() if o is not None: raise AssertionError("A conflicting state is already " - "present in the identity map for key %r" + "present in the identity map for key %r" % (key, )) else: return diff --git a/libs/sqlalchemy/orm/instrumentation.py b/libs/sqlalchemy/orm/instrumentation.py index af9ef784..0006accb 100644 --- a/libs/sqlalchemy/orm/instrumentation.py +++ b/libs/sqlalchemy/orm/instrumentation.py @@ -1,5 +1,5 @@ # orm/instrumentation.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -91,7 +91,7 @@ class ClassManager(dict): self.originals = {} self._bases = [mgr for mgr in [ - manager_of_class(base) + manager_of_class(base) for base in self.class_.__bases__ if isinstance(base, type) ] if mgr is not None] @@ -139,7 +139,7 @@ class ClassManager(dict): def _instrument_init(self): # TODO: self.class_.__init__ is often the already-instrumented - # __init__ from an instrumented superclass. We still need to make + # __init__ from an instrumented superclass. We still need to make # our own wrapper, but it would # be nice to wrap the original __init__ and not our existing wrapper # of such, since this adds method overhead. @@ -212,7 +212,7 @@ class ClassManager(dict): if key in self.mutable_attributes: self.mutable_attributes.remove(key) for cls in self.class_.__subclasses__(): - manager = manager_of_class(cls) + manager = manager_of_class(cls) if manager: manager.uninstrument_attribute(key, True) @@ -277,12 +277,12 @@ class ClassManager(dict): def new_instance(self, state=None): instance = self.class_.__new__(self.class_) - setattr(instance, self.STATE_ATTR, + setattr(instance, self.STATE_ATTR, state or self._state_constructor(instance, self)) return instance def setup_instance(self, instance, state=None): - setattr(instance, self.STATE_ATTR, + setattr(instance, self.STATE_ATTR, state or self._state_constructor(instance, self)) def teardown_instance(self, instance): @@ -387,7 +387,7 @@ class _ClassInstrumentationAdapter(ClassManager): if delegate: return delegate(key, state, factory) else: - return ClassManager.initialize_collection(self, key, + return ClassManager.initialize_collection(self, key, state, factory) def new_instance(self, state=None): @@ -463,7 +463,7 @@ def is_instrumented(instance, key): class InstrumentationRegistry(object): """Private instrumentation registration singleton. - All classes are routed through this registry + All classes are routed through this registry when first instrumented, however the InstrumentationRegistry is not actually needed unless custom ClassManagers are in use. @@ -501,7 +501,7 @@ class InstrumentationRegistry(object): if factory != ClassManager and not self._extended: # somebody invoked a custom ClassManager. - # reinstall global "getter" functions with the more + # reinstall global "getter" functions with the more # expensive ones. self._extended = True _install_lookup_strategy(self) @@ -543,7 +543,7 @@ class InstrumentationRegistry(object): return factories def manager_of_class(self, cls): - # this is only called when alternate instrumentation + # this is only called when alternate instrumentation # has been established if cls is None: return None @@ -555,7 +555,7 @@ class InstrumentationRegistry(object): return finder(cls) def state_of(self, instance): - # this is only called when alternate instrumentation + # this is only called when alternate instrumentation # has been established if instance is None: raise AttributeError("None has no persistent state.") @@ -566,7 +566,7 @@ class InstrumentationRegistry(object): instance.__class__) def dict_of(self, instance): - # this is only called when alternate instrumentation + # this is only called when alternate instrumentation # has been established if instance is None: raise AttributeError("None has no persistent state.") @@ -632,7 +632,7 @@ instrumentation_finders.append(find_native_user_instrumentation_hook) def _generate_init(class_, class_manager): """Build an __init__ decorator that triggers ClassManager events.""" - # TODO: we should use the ClassManager's notion of the + # TODO: we should use the ClassManager's notion of the # original '__init__' method, once ClassManager is fixed # to always reference that. original__init__ = class_.__init__ diff --git a/libs/sqlalchemy/orm/interfaces.py b/libs/sqlalchemy/orm/interfaces.py index bda48cbb..b911ac29 100644 --- a/libs/sqlalchemy/orm/interfaces.py +++ b/libs/sqlalchemy/orm/interfaces.py @@ -1,5 +1,5 @@ # orm/interfaces.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -61,13 +61,13 @@ class MapperProperty(object): attribute, as well as that attribute as it appears on individual instances of the class, including attribute instrumentation, attribute access, loading behavior, and dependency calculations. - + The most common occurrences of :class:`.MapperProperty` are the - mapped :class:`.Column`, which is represented in a mapping as + mapped :class:`.Column`, which is represented in a mapping as an instance of :class:`.ColumnProperty`, and a reference to another class produced by :func:`.relationship`, represented in the mapping as an instance of :class:`.RelationshipProperty`. - + """ cascade = () @@ -87,7 +87,7 @@ class MapperProperty(object): pass - def create_row_processor(self, context, path, reduced_path, + def create_row_processor(self, context, path, reduced_path, mapper, row, adapter): """Return a 3-tuple consisting of three row processing functions. @@ -263,13 +263,13 @@ class PropComparator(operators.ColumnOperators): """Return true if this collection contains any member that meets the given criterion. - The usual implementation of ``any()`` is + The usual implementation of ``any()`` is :meth:`.RelationshipProperty.Comparator.any`. - :param criterion: an optional ClauseElement formulated against the + :param criterion: an optional ClauseElement formulated against the member class' table or attributes. - :param \**kwargs: key/value pairs corresponding to member class attribute + :param \**kwargs: key/value pairs corresponding to member class attribute names which will be compared via equality to the corresponding values. @@ -281,13 +281,13 @@ class PropComparator(operators.ColumnOperators): """Return true if this element references a member which meets the given criterion. - The usual implementation of ``has()`` is + The usual implementation of ``has()`` is :meth:`.RelationshipProperty.Comparator.has`. - :param criterion: an optional ClauseElement formulated against the + :param criterion: an optional ClauseElement formulated against the member class' table or attributes. - :param \**kwargs: key/value pairs corresponding to member class attribute + :param \**kwargs: key/value pairs corresponding to member class attribute names which will be compared via equality to the corresponding values. @@ -337,12 +337,12 @@ class StrategizedProperty(MapperProperty): def setup(self, context, entity, path, reduced_path, adapter, **kwargs): self._get_context_strategy(context, reduced_path + (self.key,)).\ - setup_query(context, entity, path, + setup_query(context, entity, path, reduced_path, adapter, **kwargs) def create_row_processor(self, context, path, reduced_path, mapper, row, adapter): return self._get_context_strategy(context, reduced_path + (self.key,)).\ - create_row_processor(context, path, + create_row_processor(context, path, reduced_path, mapper, row, adapter) def do_init(self): @@ -365,7 +365,7 @@ def serialize_path(path): return None return zip( - [m.class_ for m in [path[i] for i in range(0, len(path), 2)]], + [m.class_ for m in [path[i] for i in range(0, len(path), 2)]], [path[i] for i in range(1, len(path), 2)] + [None] ) @@ -382,7 +382,7 @@ class MapperOption(object): """Describe a modification to a Query.""" propagate_to_loaders = False - """if True, indicate this option should be carried along + """if True, indicate this option should be carried along Query object generated by scalar or object lazy loaders. """ @@ -464,9 +464,9 @@ class PropertyOption(MapperOption): else: raise sa_exc.ArgumentError( "Can't find property '%s' on any entity " - "specified in this Query. Note the full path " - "from root (%s) to target entity must be specified." - % (token, ",".join(str(x) for + "specified in this Query. Note the full path " + "from root (%s) to target entity must be specified." + % (token, ",".join(str(x) for x in query._mapper_entities)) ) else: @@ -494,7 +494,7 @@ class PropertyOption(MapperOption): l = [] mappers = [] - # _current_path implies we're in a + # _current_path implies we're in a # secondary load with an existing path current_path = list(query._current_path) @@ -520,8 +520,8 @@ class PropertyOption(MapperOption): if not entity: entity = self._find_entity_basestring( - query, - token, + query, + token, raiseerr) if entity is None: return [], [] @@ -555,8 +555,8 @@ class PropertyOption(MapperOption): if not entity: entity = self._find_entity_prop_comparator( query, - prop.key, - token.parententity, + prop.key, + token.parententity, raiseerr) if not entity: return [], [] @@ -587,7 +587,7 @@ class PropertyOption(MapperOption): ) if current_path: - # ran out of tokens before + # ran out of tokens before # current_path was exhausted. assert not tokens return [], [] @@ -630,9 +630,9 @@ def _reduce_path(path): of the mapper referenced by Mapper.prop1. """ - return tuple([i % 2 != 0 and - element or - getattr(element, 'base_mapper', element) + return tuple([i % 2 != 0 and + element or + getattr(element, 'base_mapper', element) for i, element in enumerate(path)]) class LoaderStrategy(object): @@ -678,7 +678,7 @@ class LoaderStrategy(object): def setup_query(self, context, entity, path, reduced_path, adapter, **kwargs): pass - def create_row_processor(self, context, path, reduced_path, mapper, + def create_row_processor(self, context, path, reduced_path, mapper, row, adapter): """Return row processing functions which fulfill the contract specified by MapperProperty.create_row_processor. diff --git a/libs/sqlalchemy/orm/mapper.py b/libs/sqlalchemy/orm/mapper.py index e96b7549..de4d351b 100644 --- a/libs/sqlalchemy/orm/mapper.py +++ b/libs/sqlalchemy/orm/mapper.py @@ -1,5 +1,5 @@ # orm/mapper.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -214,16 +214,16 @@ class Mapper(object): local_table = None """The :class:`.Selectable` which this :class:`.Mapper` manages. - Typically is an instance of :class:`.Table` or :class:`.Alias`. - May also be ``None``. + Typically is an instance of :class:`.Table` or :class:`.Alias`. + May also be ``None``. The "local" table is the - selectable that the :class:`.Mapper` is directly responsible for + selectable that the :class:`.Mapper` is directly responsible for managing from an attribute access and flush perspective. For non-inheriting mappers, the local table is the same as the "mapped" table. For joined-table inheritance mappers, local_table will be the particular sub-table of the overall "join" which - this :class:`.Mapper` represents. If this mapper is a + this :class:`.Mapper` represents. If this mapper is a single-table inheriting mapper, local_table will be ``None``. See also :attr:`~.Mapper.mapped_table`. @@ -233,11 +233,11 @@ class Mapper(object): mapped_table = None """The :class:`.Selectable` to which this :class:`.Mapper` is mapped. - Typically an instance of :class:`.Table`, :class:`.Join`, or + Typically an instance of :class:`.Table`, :class:`.Join`, or :class:`.Alias`. - The "mapped" table is the selectable that - the mapper selects from during queries. For non-inheriting + The "mapped" table is the selectable that + the mapper selects from during queries. For non-inheriting mappers, the mapped table is the same as the "local" table. For joined-table inheritance mappers, mapped_table references the full :class:`.Join` representing full rows for this particular @@ -249,7 +249,7 @@ class Mapper(object): """ inherits = None - """References the :class:`.Mapper` which this :class:`.Mapper` + """References the :class:`.Mapper` which this :class:`.Mapper` inherits from, if any. This is a *read only* attribute determined during mapper construction. @@ -268,7 +268,7 @@ class Mapper(object): """ concrete = None - """Represent ``True`` if this :class:`.Mapper` is a concrete + """Represent ``True`` if this :class:`.Mapper` is a concrete inheritance mapper. This is a *read only* attribute determined during mapper construction. @@ -291,7 +291,7 @@ class Mapper(object): primary_key = None """An iterable containing the collection of :class:`.Column` objects - which comprise the 'primary key' of the mapped table, from the + which comprise the 'primary key' of the mapped table, from the perspective of this :class:`.Mapper`. This list is against the selectable in :attr:`~.Mapper.mapped_table`. In the @@ -301,7 +301,7 @@ class Mapper(object): referenced by the :class:`.Join`. The list is also not necessarily the same as the primary key column - collection associated with the underlying tables; the :class:`.Mapper` + collection associated with the underlying tables; the :class:`.Mapper` features a ``primary_key`` argument that can override what the :class:`.Mapper` considers as primary key columns. @@ -328,7 +328,7 @@ class Mapper(object): """ single = None - """Represent ``True`` if this :class:`.Mapper` is a single table + """Represent ``True`` if this :class:`.Mapper` is a single table inheritance mapper. :attr:`~.Mapper.local_table` will be ``None`` if this flag is set. @@ -339,8 +339,8 @@ class Mapper(object): """ non_primary = None - """Represent ``True`` if this :class:`.Mapper` is a "non-primary" - mapper, e.g. a mapper that is used only to selet rows but not for + """Represent ``True`` if this :class:`.Mapper` is a "non-primary" + mapper, e.g. a mapper that is used only to selet rows but not for persistence management. This is a *read only* attribute determined during mapper construction. @@ -364,10 +364,10 @@ class Mapper(object): """A mapping of "polymorphic identity" identifiers mapped to :class:`.Mapper` instances, within an inheritance scenario. - The identifiers can be of any type which is comparable to the + The identifiers can be of any type which is comparable to the type of column represented by :attr:`~.Mapper.polymorphic_on`. - An inheritance chain of mappers will all reference the same + An inheritance chain of mappers will all reference the same polymorphic map object. The object is used to correlate incoming result rows to target mappers. @@ -402,10 +402,10 @@ class Mapper(object): """ columns = None - """A collection of :class:`.Column` or other scalar expression + """A collection of :class:`.Column` or other scalar expression objects maintained by this :class:`.Mapper`. - The collection behaves the same as that of the ``c`` attribute on + The collection behaves the same as that of the ``c`` attribute on any :class:`.Table` object, except that only those columns included in this mapping are present, and are keyed based on the attribute name defined in the mapping, not necessarily the ``key`` attribute of the @@ -419,11 +419,11 @@ class Mapper(object): validators = None """An immutable dictionary of attributes which have been decorated - using the :func:`~.orm.validates` decorator. - + using the :func:`~.orm.validates` decorator. + The dictionary contains string attribute names as keys mapped to the actual validation method. - + """ c = None @@ -443,13 +443,13 @@ class Mapper(object): self.inherits = class_mapper(self.inherits, compile=False) if not issubclass(self.class_, self.inherits.class_): raise sa_exc.ArgumentError( - "Class '%s' does not inherit from '%s'" % + "Class '%s' does not inherit from '%s'" % (self.class_.__name__, self.inherits.class_.__name__)) if self.non_primary != self.inherits.non_primary: np = not self.non_primary and "primary" or "non-primary" raise sa_exc.ArgumentError( "Inheritance of %s mapper for class '%s' is " - "only allowed from a %s mapper" % + "only allowed from a %s mapper" % (np, self.class_.__name__, np)) # inherit_condition is optional. if self.local_table is None: @@ -472,7 +472,7 @@ class Mapper(object): self.inherits.local_table, self.local_table) self.mapped_table = sql.join( - self.inherits.mapped_table, + self.inherits.mapped_table, self.local_table, self.inherit_condition) @@ -499,7 +499,7 @@ class Mapper(object): "the inherited versioning column. " "version_id_col should only be specified on " "the base-most mapper that includes versioning." % - (self.version_id_col.description, + (self.version_id_col.description, self.inherits.version_id_col.description) ) @@ -528,7 +528,7 @@ class Mapper(object): if self.mapped_table is None: raise sa_exc.ArgumentError( - "Mapper '%s' does not have a mapped_table specified." + "Mapper '%s' does not have a mapped_table specified." % self) def _set_with_polymorphic(self, with_polymorphic): @@ -580,6 +580,12 @@ class Mapper(object): self.inherits._inheriting_mappers.add(self) self.passive_updates = self.inherits.passive_updates self._all_tables = self.inherits._all_tables + for key, prop in mapper._props.iteritems(): + if key not in self._props and \ + not self._should_exclude(key, key, local=False, + column=None): + self._adapt_inherited_property(key, prop, False) + def _set_polymorphic_on(self, polymorphic_on): self.polymorphic_on = polymorphic_on @@ -589,7 +595,7 @@ class Mapper(object): if self.inherits: self.dispatch._update(self.inherits.dispatch) - super_extensions = set(chain(*[m._deprecated_extensions + super_extensions = set(chain(*[m._deprecated_extensions for m in self.inherits.iterate_to_root()])) else: super_extensions = set() @@ -600,7 +606,7 @@ class Mapper(object): def _configure_listeners(self): if self.inherits: - super_extensions = set(chain(*[m._deprecated_extensions + super_extensions = set(chain(*[m._deprecated_extensions for m in self.inherits.iterate_to_root()])) else: super_extensions = set() @@ -647,8 +653,8 @@ class Mapper(object): "remove *all* current mappers from all classes." % self.class_) #else: - # a ClassManager may already exist as - # ClassManager.instrument_attribute() creates + # a ClassManager may already exist as + # ClassManager.instrument_attribute() creates # new managers for each subclass if they don't yet exist. _mapper_registry[self] = True @@ -662,8 +668,8 @@ class Mapper(object): manager.mapper = self manager.deferred_scalar_loader = self._load_scalar_attributes - - # The remaining members can be added by any mapper, + + # The remaining members can be added by any mapper, # e_name None or not. if manager.info.get(_INSTRUMENTOR, False): return @@ -678,9 +684,10 @@ class Mapper(object): self._reconstructor = method event.listen(manager, 'load', _event_on_load, raw=True) elif hasattr(method, '__sa_validators__'): + include_removes = getattr(method, "__sa_include_removes__", False) for name in method.__sa_validators__: self.validators = self.validators.union( - {name : method} + {name : (method, include_removes)} ) manager.info[_INSTRUMENTOR] = self @@ -746,10 +753,10 @@ class Mapper(object): self._readonly_props = set( self._columntoproperty[col] for col in self._columntoproperty - if not hasattr(col, 'table') or + if not hasattr(col, 'table') or col.table not in self._cols_by_table) - # if explicit PK argument sent, add those columns to the + # if explicit PK argument sent, add those columns to the # primary key mappings if self._primary_key_argument: for k in self._primary_key_argument: @@ -762,23 +769,23 @@ class Mapper(object): len(self._pks_by_table[self.mapped_table]) == 0: raise sa_exc.ArgumentError( "Mapper %s could not assemble any primary " - "key columns for mapped table '%s'" % + "key columns for mapped table '%s'" % (self, self.mapped_table.description)) elif self.local_table not in self._pks_by_table and \ isinstance(self.local_table, schema.Table): util.warn("Could not assemble any primary " "keys for locally mapped table '%s' - " - "no rows will be persisted in this Table." + "no rows will be persisted in this Table." % self.local_table.description) if self.inherits and \ not self.concrete and \ not self._primary_key_argument: - # if inheriting, the "primary key" for this mapper is + # if inheriting, the "primary key" for this mapper is # that of the inheriting (unless concrete or explicit) self.primary_key = self.inherits.primary_key else: - # determine primary key from argument or mapped_table pks - + # determine primary key from argument or mapped_table pks - # reduce to the minimal set of columns if self._primary_key_argument: primary_key = sqlutil.reduce_columns( @@ -793,7 +800,7 @@ class Mapper(object): if len(primary_key) == 0: raise sa_exc.ArgumentError( "Mapper %s could not assemble any primary " - "key columns for mapped table '%s'" % + "key columns for mapped table '%s'" % (self, self.mapped_table.description)) self.primary_key = tuple(primary_key) @@ -845,19 +852,19 @@ class Mapper(object): if column in mapper._columntoproperty: column_key = mapper._columntoproperty[column].key - self._configure_property(column_key, - column, - init=False, + self._configure_property(column_key, + column, + init=False, setparent=True) def _configure_polymorphic_setter(self, init=False): - """Configure an attribute on the mapper representing the - 'polymorphic_on' column, if applicable, and not + """Configure an attribute on the mapper representing the + 'polymorphic_on' column, if applicable, and not already generated by _configure_properties (which is typical). Also create a setter function which will assign this attribute to the value of the 'polymorphic_identity' - upon instance construction, also if applicable. This + upon instance construction, also if applicable. This routine will run when an instance is created. """ @@ -906,15 +913,15 @@ class Mapper(object): else: # polymorphic_on is a Column or SQL expression and doesn't # appear to be mapped. - # this means it can be 1. only present in the with_polymorphic + # this means it can be 1. only present in the with_polymorphic # selectable or 2. a totally standalone SQL expression which we'd # hope is compatible with this mapper's mapped_table col = self.mapped_table.corresponding_column(self.polymorphic_on) if col is None: - # polymorphic_on doesn't derive from any column/expression + # polymorphic_on doesn't derive from any column/expression # isn't present in the mapped table. - # we will make a "hidden" ColumnProperty for it. - # Just check that if it's directly a schema.Column and we + # we will make a "hidden" ColumnProperty for it. + # Just check that if it's directly a schema.Column and we # have with_polymorphic, it's likely a user error if the # schema.Column isn't represented somehow in either mapped_table or # with_polymorphic. Otherwise as of 0.7.4 we just go with it @@ -932,15 +939,14 @@ class Mapper(object): "loads will not function properly" % col.description) else: - # column/expression that polymorphic_on derives from + # column/expression that polymorphic_on derives from # is present in our mapped table # and is probably mapped, but polymorphic_on itself - # is not. This happens when - # the polymorphic_on is only directly present in the + # is not. This happens when + # the polymorphic_on is only directly present in the # with_polymorphic selectable, as when use polymorphic_union. # we'll make a separate ColumnProperty for it. instrument = True - key = getattr(col, 'key', None) if key: if self._should_exclude(col.key, col.key, False, col): @@ -952,7 +958,7 @@ class Mapper(object): key = col.key self._configure_property( - key, + key, properties.ColumnProperty(col, _instrument=instrument), init=init, setparent=True) polymorphic_key = key @@ -998,15 +1004,15 @@ class Mapper(object): self._configure_property(key, prop, init=False, setparent=False) elif key not in self._props: self._configure_property( - key, - properties.ConcreteInheritedProperty(), + key, + properties.ConcreteInheritedProperty(), init=init, setparent=True) def _configure_property(self, key, prop, init=True, setparent=True): self._log("_configure_property(%s, %s)", key, prop.__class__.__name__) if not isinstance(prop, MapperProperty): - # we were passed a Column or a list of Columns; + # we were passed a Column or a list of Columns; # generate a properties.ColumnProperty columns = util.to_list(prop) column = columns[0] @@ -1026,7 +1032,7 @@ class Mapper(object): "explicitly." % (prop.columns[-1], column, key)) - # existing properties.ColumnProperty from an inheriting + # existing properties.ColumnProperty from an inheriting # mapper. make a copy and append our column to it prop = prop.copy() prop.columns.insert(0, column) @@ -1065,14 +1071,14 @@ class Mapper(object): "(including its availability as a foreign key), " "use the 'include_properties' or 'exclude_properties' " "mapper arguments to control specifically which table " - "columns get mapped." % + "columns get mapped." % (key, self, column.key, prop)) if isinstance(prop, properties.ColumnProperty): col = self.mapped_table.corresponding_column(prop.columns[0]) - # if the column is not present in the mapped table, - # test if a column has been added after the fact to the + # if the column is not present in the mapped table, + # test if a column has been added after the fact to the # parent table (or their parent, etc.) [ticket:1570] if col is None and self.inherits: path = [self] @@ -1086,20 +1092,20 @@ class Mapper(object): break path.append(m) - # subquery expression, column not present in the mapped + # subquery expression, column not present in the mapped # selectable. if col is None: col = prop.columns[0] - # column is coming in after _readonly_props was + # column is coming in after _readonly_props was # initialized; check for 'readonly' if hasattr(self, '_readonly_props') and \ - (not hasattr(col, 'table') or + (not hasattr(col, 'table') or col.table not in self._cols_by_table): self._readonly_props.add(prop) else: - # if column is coming in after _cols_by_table was + # if column is coming in after _cols_by_table was # initialized, ensure the col is in the right set if hasattr(self, '_cols_by_table') and \ col.table in self._cols_by_table and \ @@ -1199,10 +1205,10 @@ class Mapper(object): def _log_desc(self): return "(" + self.class_.__name__ + \ "|" + \ - (self.local_table is not None and - self.local_table.description or + (self.local_table is not None and + self.local_table.description or str(self.local_table)) +\ - (self.non_primary and + (self.non_primary and "|non-primary" or "") + ")" def _log(self, msg, *args): @@ -1223,7 +1229,7 @@ class Mapper(object): def __str__(self): return "Mapper|%s|%s%s" % ( self.class_.__name__, - self.local_table is not None and + self.local_table is not None and self.local_table.description or None, self.non_primary and "|non-primary" or "" ) @@ -1288,7 +1294,7 @@ class Mapper(object): for m in mappers: if not m.isa(self): raise sa_exc.InvalidRequestError( - "%r does not inherit from %r" % + "%r does not inherit from %r" % (m, self)) else: mappers = [] @@ -1387,7 +1393,7 @@ class Mapper(object): mappers]) ): if getattr(c, '_is_polymorphic_discriminator', False) and \ - (self.polymorphic_on is None or + (self.polymorphic_on is None or c.columns[0] is not self.polymorphic_on): continue yield c @@ -1452,7 +1458,7 @@ class Mapper(object): return result def _is_userland_descriptor(self, obj): - if isinstance(obj, (MapperProperty, + if isinstance(obj, (MapperProperty, attributes.QueryableAttribute)): return False elif not hasattr(obj, '__get__'): @@ -1505,7 +1511,7 @@ class Mapper(object): return False def common_parent(self, other): - """Return true if the given mapper shares a + """Return true if the given mapper shares a common inherited parent as this mapper.""" return self.base_mapper is other.base_mapper @@ -1634,7 +1640,7 @@ class Mapper(object): for col in self.primary_key ] - def _get_state_attr_by_column(self, state, dict_, column, + def _get_state_attr_by_column(self, state, dict_, column, passive=attributes.PASSIVE_OFF): prop = self._columntoproperty[column] return state.manager[prop.key].impl.get(state, dict_, passive=passive) @@ -1648,7 +1654,7 @@ class Mapper(object): dict_ = attributes.instance_dict(obj) return self._get_committed_state_attr_by_column(state, dict_, column) - def _get_committed_state_attr_by_column(self, state, dict_, + def _get_committed_state_attr_by_column(self, state, dict_, column, passive=attributes.PASSIVE_OFF): prop = self._columntoproperty[column] @@ -1674,8 +1680,8 @@ class Mapper(object): statement = self._optimized_get_statement(state, attribute_names) if statement is not None: result = session.query(self).from_statement(statement).\ - _load_on_ident(None, - only_load_props=attribute_names, + _load_on_ident(None, + only_load_props=attribute_names, refresh_state=state) if result is False: @@ -1698,16 +1704,16 @@ class Mapper(object): _none_set.issuperset(identity_key): util.warn("Instance %s to be refreshed doesn't " "contain a full primary key - can't be refreshed " - "(and shouldn't be expired, either)." + "(and shouldn't be expired, either)." % state_str(state)) return result = session.query(self)._load_on_ident( - identity_key, - refresh_state=state, + identity_key, + refresh_state=state, only_load_props=attribute_names) - # if instance is pending, a refresh operation + # if instance is pending, a refresh operation # may not complete (even if PK attributes are assigned) if has_key and result is None: raise orm_exc.ObjectDeletedError(state) @@ -1716,16 +1722,16 @@ class Mapper(object): """assemble a WHERE clause which retrieves a given state by primary key, using a minimized set of tables. - Applies to a joined-table inheritance mapper where the + Applies to a joined-table inheritance mapper where the requested attribute names are only present on joined tables, - not the base table. The WHERE clause attempts to include + not the base table. The WHERE clause attempts to include only those tables to minimize joins. """ props = self._props tables = set(chain( - *[sqlutil.find_tables(c, check_columns=True) + *[sqlutil.find_tables(c, check_columns=True) for key in attribute_names for c in props[key].columns] )) @@ -1744,8 +1750,8 @@ class Mapper(object): if leftcol.table not in tables: leftval = self._get_committed_state_attr_by_column( - state, state.dict, - leftcol, + state, state.dict, + leftcol, passive=attributes.PASSIVE_NO_INITIALIZE) if leftval is attributes.PASSIVE_NO_RESULT or leftval is None: raise ColumnsNotAvailable() @@ -1753,8 +1759,8 @@ class Mapper(object): type_=binary.right.type) elif rightcol.table not in tables: rightval = self._get_committed_state_attr_by_column( - state, state.dict, - rightcol, + state, state.dict, + rightcol, passive=attributes.PASSIVE_NO_INITIALIZE) if rightval is attributes.PASSIVE_NO_RESULT or rightval is None: raise ColumnsNotAvailable() @@ -1770,8 +1776,8 @@ class Mapper(object): start = True if start and not mapper.single: allconds.append(visitors.cloned_traverse( - mapper.inherit_condition, - {}, + mapper.inherit_condition, + {}, {'binary':visit_binary} ) ) @@ -1804,7 +1810,7 @@ class Mapper(object): visited_states = set() prp, mpp = object(), object() - visitables = deque([(deque(self._props.values()), prp, + visitables = deque([(deque(self._props.values()), prp, state, state.dict)]) while visitables: @@ -1817,7 +1823,7 @@ class Mapper(object): prop = iterator.popleft() if type_ not in prop.cascade: continue - queue = deque(prop.cascade_iterator(type_, parent_state, + queue = deque(prop.cascade_iterator(type_, parent_state, parent_dict, visited_states, halt_on)) if queue: visitables.append((queue,mpp, None, None)) @@ -1826,8 +1832,8 @@ class Mapper(object): corresponding_dict = iterator.popleft() yield instance, instance_mapper, \ corresponding_state, corresponding_dict - visitables.append((deque(instance_mapper._props.values()), - prp, corresponding_state, + visitables.append((deque(instance_mapper._props.values()), + prp, corresponding_state, corresponding_dict)) @_memoized_configured_property @@ -1884,7 +1890,7 @@ class Mapper(object): @util.memoized_property def _table_to_equated(self): - """memoized map of tables to collections of columns to be + """memoized map of tables to collections of columns to be synchronized upwards to the base mapper.""" result = util.defaultdict(list) @@ -1900,16 +1906,16 @@ class Mapper(object): return result - def _instance_processor(self, context, path, reduced_path, adapter, - polymorphic_from=None, + def _instance_processor(self, context, path, reduced_path, adapter, + polymorphic_from=None, only_load_props=None, refresh_state=None, polymorphic_discriminator=None): - """Produce a mapper level row processor callable + """Produce a mapper level row processor callable which processes rows into mapped instances.""" # note that this method, most of which exists in a closure - # called _instance(), resists being broken out, as + # called _instance(), resists being broken out, as # attempts to do so tend to add significant function # call overhead. _instance() is the most # performance-critical section in the whole ORM. @@ -2019,7 +2025,7 @@ class Mapper(object): identitykey = self._identity_key_from_state(refresh_state) else: identitykey = ( - identity_class, + identity_class, tuple([row[column] for column in pk_cols]) ) @@ -2036,22 +2042,22 @@ class Mapper(object): version_id_col is not None and \ context.version_check and \ self._get_state_attr_by_column( - state, - dict_, + state, + dict_, self.version_id_col) != \ row[version_id_col]: raise orm_exc.StaleDataError( "Instance '%s' has version id '%s' which " - "does not match database-loaded version id '%s'." - % (state_str(state), + "does not match database-loaded version id '%s'." + % (state_str(state), self._get_state_attr_by_column( state, dict_, self.version_id_col), row[version_id_col])) elif refresh_state: # out of band refresh_state detected (i.e. its not in the - # session.identity_map) honor it anyway. this can happen + # session.identity_map) honor it anyway. this can happen # if a _get() occurs within save_obj(), such as # when eager_defaults is True. state = refresh_state @@ -2072,7 +2078,7 @@ class Mapper(object): if create_instance: for fn in create_instance: - instance = fn(self, context, + instance = fn(self, context, row, self.class_) if instance is not EXT_CONTINUE: manager = attributes.manager_of_class( @@ -2103,8 +2109,8 @@ class Mapper(object): if populate_instance: for fn in populate_instance: - ret = fn(self, context, row, state, - only_load_props=only_load_props, + ret = fn(self, context, row, state, + only_load_props=only_load_props, instancekey=identitykey, isnew=isnew) if ret is not EXT_CONTINUE: break @@ -2132,8 +2138,8 @@ class Mapper(object): if populate_instance: for fn in populate_instance: - ret = fn(self, context, row, state, - only_load_props=attrs, + ret = fn(self, context, row, state, + only_load_props=attrs, instancekey=identitykey, isnew=isnew) if ret is not EXT_CONTINUE: break @@ -2153,7 +2159,7 @@ class Mapper(object): if result is not None: if append_result: for fn in append_result: - if fn(self, context, row, state, + if fn(self, context, row, state, result, instancekey=identitykey, isnew=isnew) is not EXT_CONTINUE: break @@ -2173,7 +2179,7 @@ class Mapper(object): pops = (new_populators, existing_populators, delayed_populators, eager_populators) for prop in self._props.itervalues(): for i, pop in enumerate(prop.create_row_processor( - context, path, + context, path, reduced_path, self, row, adapter)): if pop is not None: @@ -2196,8 +2202,8 @@ class Mapper(object): if mapper is self: return None - # replace the tip of the path info with the subclass mapper - # being used. that way accurate "load_path" info is available + # replace the tip of the path info with the subclass mapper + # being used. that way accurate "load_path" info is available # for options invoked during deferred loads. # we lose AliasedClass path elements this way, but currently, # those are not needed at this stage. @@ -2205,7 +2211,7 @@ class Mapper(object): # this asserts to true #assert mapper.isa(_class_to_mapper(path[-1])) - return mapper._instance_processor(context, path[0:-1] + (mapper,), + return mapper._instance_processor(context, path[0:-1] + (mapper,), reduced_path[0:-1] + (mapper.base_mapper,), adapter, polymorphic_from=self) @@ -2217,14 +2223,14 @@ def configure_mappers(): """Initialize the inter-mapper relationships of all mappers that have been constructed thus far. - This function can be called any number of times, but in + This function can be called any number of times, but in most cases is handled internally. """ global _new_mappers if not _new_mappers: - return + return _call_configured = None _COMPILE_MUTEX.acquire() @@ -2240,8 +2246,8 @@ def configure_mappers(): return # initialize properties on all mappers - # note that _mapper_registry is unordered, which - # may randomly conceal/reveal issues related to + # note that _mapper_registry is unordered, which + # may randomly conceal/reveal issues related to # the order of mapper compilation for mapper in list(_mapper_registry): if getattr(mapper, '_configure_failed', False): @@ -2291,7 +2297,7 @@ def reconstructor(fn): fn.__sa_reconstructor__ = True return fn -def validates(*names): +def validates(*names, **kw): """Decorate a method as a 'validator' for one or more named properties. Designates a method as a validator, a method which receives the @@ -2307,9 +2313,18 @@ def validates(*names): an assertion to avoid recursion overflows. This is a reentrant condition which is not supported. + :param \*names: list of attribute names to be validated. + :param include_removes: if True, "remove" events will be + sent as well - the validation function must accept an additional + argument "is_remove" which will be a boolean. + + .. versionadded:: 0.7.7 + """ + include_removes = kw.pop('include_removes', False) def wrap(fn): fn.__sa_validators__ = names + fn.__sa_include_removes__ = include_removes return fn return wrap @@ -2320,7 +2335,7 @@ def _event_on_load(state, ctx): def _event_on_first_init(manager, cls): """Initial mapper compilation trigger. - + instrumentation calls this one when InstanceState is first generated, and is needed for legacy mutable attributes to work. @@ -2333,11 +2348,11 @@ def _event_on_first_init(manager, cls): def _event_on_init(state, args, kwargs): """Run init_instance hooks. - + This also includes mapper compilation, normally not needed here but helps with some piecemeal configuration scenarios (such as in the ORM tutorial). - + """ instrumenting_mapper = state.manager.info.get(_INSTRUMENTOR) diff --git a/libs/sqlalchemy/orm/persistence.py b/libs/sqlalchemy/orm/persistence.py index 55b9bf84..5be57cce 100644 --- a/libs/sqlalchemy/orm/persistence.py +++ b/libs/sqlalchemy/orm/persistence.py @@ -1,5 +1,5 @@ # orm/persistence.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -23,7 +23,7 @@ from sqlalchemy.orm import attributes, sync, \ from sqlalchemy.orm.util import _state_mapper, state_str def save_obj(base_mapper, states, uowtransaction, single=False): - """Issue ``INSERT`` and/or ``UPDATE`` statements for a list + """Issue ``INSERT`` and/or ``UPDATE`` statements for a list of objects. This is called within the context of a UOWTransaction during a @@ -40,30 +40,30 @@ def save_obj(base_mapper, states, uowtransaction, single=False): return states_to_insert, states_to_update = _organize_states_for_save( - base_mapper, - states, + base_mapper, + states, uowtransaction) cached_connections = _cached_connection_dict(base_mapper) for table, mapper in base_mapper._sorted_tables.iteritems(): - insert = _collect_insert_commands(base_mapper, uowtransaction, + insert = _collect_insert_commands(base_mapper, uowtransaction, table, states_to_insert) - update = _collect_update_commands(base_mapper, uowtransaction, + update = _collect_update_commands(base_mapper, uowtransaction, table, states_to_update) if update: - _emit_update_statements(base_mapper, uowtransaction, - cached_connections, + _emit_update_statements(base_mapper, uowtransaction, + cached_connections, mapper, table, update) if insert: - _emit_insert_statements(base_mapper, uowtransaction, - cached_connections, + _emit_insert_statements(base_mapper, uowtransaction, + cached_connections, table, insert) - _finalize_insert_update_commands(base_mapper, uowtransaction, + _finalize_insert_update_commands(base_mapper, uowtransaction, states_to_insert, states_to_update) def post_update(base_mapper, states, uowtransaction, post_update_cols): @@ -74,18 +74,18 @@ def post_update(base_mapper, states, uowtransaction, post_update_cols): cached_connections = _cached_connection_dict(base_mapper) states_to_update = _organize_states_for_post_update( - base_mapper, + base_mapper, states, uowtransaction) for table, mapper in base_mapper._sorted_tables.iteritems(): - update = _collect_post_update_commands(base_mapper, uowtransaction, - table, states_to_update, + update = _collect_post_update_commands(base_mapper, uowtransaction, + table, states_to_update, post_update_cols) if update: - _emit_post_update_statements(base_mapper, uowtransaction, - cached_connections, + _emit_post_update_statements(base_mapper, uowtransaction, + cached_connections, mapper, table, update) def delete_obj(base_mapper, states, uowtransaction): @@ -99,19 +99,19 @@ def delete_obj(base_mapper, states, uowtransaction): cached_connections = _cached_connection_dict(base_mapper) states_to_delete = _organize_states_for_delete( - base_mapper, + base_mapper, states, uowtransaction) table_to_mapper = base_mapper._sorted_tables for table in reversed(table_to_mapper.keys()): - delete = _collect_delete_commands(base_mapper, uowtransaction, + delete = _collect_delete_commands(base_mapper, uowtransaction, table, states_to_delete) mapper = table_to_mapper[table] - _emit_delete_statements(base_mapper, uowtransaction, + _emit_delete_statements(base_mapper, uowtransaction, cached_connections, mapper, table, delete) for state, state_dict, mapper, has_identity, connection \ @@ -121,20 +121,20 @@ def delete_obj(base_mapper, states, uowtransaction): def _organize_states_for_save(base_mapper, states, uowtransaction): """Make an initial pass across a set of states for INSERT or UPDATE. - + This includes splitting out into distinct lists for each, calling before_insert/before_update, obtaining key information for each state including its dictionary, mapper, the connection to use for the execution per state, and the identity flag. - + """ states_to_insert = [] states_to_update = [] for state, dict_, mapper, connection in _connections_for_states( - base_mapper, uowtransaction, + base_mapper, uowtransaction, states): has_identity = bool(state.key) @@ -148,9 +148,9 @@ def _organize_states_for_save(base_mapper, states, uowtransaction): else: mapper.dispatch.before_update(mapper, connection, state) - # detect if we have a "pending" instance (i.e. has - # no instance_key attached to it), and another instance - # with the same identity key already exists as persistent. + # detect if we have a "pending" instance (i.e. has + # no instance_key attached to it), and another instance + # with the same identity key already exists as persistent. # convert to an UPDATE if so. if not has_identity and \ instance_key in uowtransaction.session.identity_map: @@ -160,14 +160,14 @@ def _organize_states_for_save(base_mapper, states, uowtransaction): if not uowtransaction.is_deleted(existing): raise orm_exc.FlushError( "New instance %s with identity key %s conflicts " - "with persistent instance %s" % + "with persistent instance %s" % (state_str(state), instance_key, state_str(existing))) base_mapper._log_debug( "detected row switch for identity %s. " "will update %s, remove %s from " - "transaction", instance_key, + "transaction", instance_key, state_str(state), state_str(existing)) # remove the "delete" flag from the existing element @@ -176,55 +176,55 @@ def _organize_states_for_save(base_mapper, states, uowtransaction): if not has_identity and not row_switch: states_to_insert.append( - (state, dict_, mapper, connection, + (state, dict_, mapper, connection, has_identity, instance_key, row_switch) ) else: states_to_update.append( - (state, dict_, mapper, connection, + (state, dict_, mapper, connection, has_identity, instance_key, row_switch) ) return states_to_insert, states_to_update -def _organize_states_for_post_update(base_mapper, states, +def _organize_states_for_post_update(base_mapper, states, uowtransaction): """Make an initial pass across a set of states for UPDATE corresponding to post_update. - - This includes obtaining key information for each state - including its dictionary, mapper, the connection to use for + + This includes obtaining key information for each state + including its dictionary, mapper, the connection to use for the execution per state. - + """ - return list(_connections_for_states(base_mapper, uowtransaction, + return list(_connections_for_states(base_mapper, uowtransaction, states)) def _organize_states_for_delete(base_mapper, states, uowtransaction): """Make an initial pass across a set of states for DELETE. - + This includes calling out before_delete and obtaining key information for each state including its dictionary, mapper, the connection to use for the execution per state. - + """ states_to_delete = [] for state, dict_, mapper, connection in _connections_for_states( - base_mapper, uowtransaction, + base_mapper, uowtransaction, states): mapper.dispatch.before_delete(mapper, connection, state) - states_to_delete.append((state, dict_, mapper, + states_to_delete.append((state, dict_, mapper, bool(state.key), connection)) return states_to_delete -def _collect_insert_commands(base_mapper, uowtransaction, table, +def _collect_insert_commands(base_mapper, uowtransaction, table, states_to_insert): """Identify sets of values to use in INSERT statements for a list of states. - + """ insert = [] for state, state_dict, mapper, connection, has_identity, \ @@ -242,7 +242,7 @@ def _collect_insert_commands(base_mapper, uowtransaction, table, if col is mapper.version_id_col: params[col.key] = mapper.version_id_generator(None) else: - # pull straight from the dict for + # pull straight from the dict for # pending objects prop = mapper._columntoproperty[col] value = state_dict.get(prop.key, None) @@ -259,15 +259,15 @@ def _collect_insert_commands(base_mapper, uowtransaction, table, else: params[col.key] = value - insert.append((state, state_dict, params, mapper, + insert.append((state, state_dict, params, mapper, connection, value_params, has_all_pks)) return insert -def _collect_update_commands(base_mapper, uowtransaction, +def _collect_update_commands(base_mapper, uowtransaction, table, states_to_update): """Identify sets of values to use in UPDATE statements for a list of states. - + This function works intricately with the history system to determine exactly what values should be updated as well as how the row should be matched within an UPDATE @@ -292,14 +292,14 @@ def _collect_update_commands(base_mapper, uowtransaction, if col is mapper.version_id_col: params[col._label] = \ mapper._get_committed_state_attr_by_column( - row_switch or state, - row_switch and row_switch.dict + row_switch or state, + row_switch and row_switch.dict or state_dict, col) prop = mapper._columntoproperty[col] history = attributes.get_state_history( - state, prop.key, + state, prop.key, attributes.PASSIVE_NO_INITIALIZE ) if history.added: @@ -309,20 +309,20 @@ def _collect_update_commands(base_mapper, uowtransaction, params[col.key] = mapper.version_id_generator( params[col._label]) - # HACK: check for history, in case the + # HACK: check for history, in case the # history is only - # in a different table than the one + # in a different table than the one # where the version_id_col is. for prop in mapper._columntoproperty.itervalues(): history = attributes.get_state_history( - state, prop.key, + state, prop.key, attributes.PASSIVE_NO_INITIALIZE) if history.added: hasdata = True else: prop = mapper._columntoproperty[col] history = attributes.get_state_history( - state, prop.key, + state, prop.key, attributes.PASSIVE_NO_INITIALIZE) if history.added: if isinstance(history.added[0], @@ -344,7 +344,7 @@ def _collect_update_commands(base_mapper, uowtransaction, value = history.added[0] params[col._label] = value else: - # use the old value to + # use the old value to # locate the row value = history.deleted[0] params[col._label] = value @@ -374,12 +374,12 @@ def _collect_update_commands(base_mapper, uowtransaction, "Can't update table " "using NULL for primary " "key value") - update.append((state, state_dict, params, mapper, + update.append((state, state_dict, params, mapper, connection, value_params)) return update -def _collect_post_update_commands(base_mapper, uowtransaction, table, +def _collect_post_update_commands(base_mapper, uowtransaction, table, states_to_update, post_update_cols): """Identify sets of values to use in UPDATE statements for a list of states within a post_update operation. @@ -403,20 +403,20 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table, elif col in post_update_cols: prop = mapper._columntoproperty[col] history = attributes.get_state_history( - state, prop.key, + state, prop.key, attributes.PASSIVE_NO_INITIALIZE) if history.added: value = history.added[0] params[col.key] = value hasdata = True if hasdata: - update.append((state, state_dict, params, mapper, + update.append((state, state_dict, params, mapper, connection)) return update -def _collect_delete_commands(base_mapper, uowtransaction, table, +def _collect_delete_commands(base_mapper, uowtransaction, table, states_to_delete): - """Identify values to use in DELETE statements for a list of + """Identify values to use in DELETE statements for a list of states to be deleted.""" delete = util.defaultdict(list) @@ -448,7 +448,7 @@ def _collect_delete_commands(base_mapper, uowtransaction, table, return delete -def _emit_update_statements(base_mapper, uowtransaction, +def _emit_update_statements(base_mapper, uowtransaction, cached_connections, mapper, table, update): """Emit UPDATE statements corresponding to value lists collected by _collect_update_commands().""" @@ -466,7 +466,7 @@ def _emit_update_statements(base_mapper, uowtransaction, if needs_version_id: clause.clauses.append(mapper.version_id_col ==\ sql.bindparam(mapper.version_id_col._label, - type_=col.type)) + type_=mapper.version_id_col.type)) return table.update(clause) @@ -486,13 +486,13 @@ def _emit_update_statements(base_mapper, uowtransaction, _postfetch( mapper, - uowtransaction, - table, - state, - state_dict, - c.context.prefetch_cols, + uowtransaction, + table, + state, + state_dict, + c.context.prefetch_cols, c.context.postfetch_cols, - c.context.compiled_parameters[0], + c.context.compiled_parameters[0], value_params) rows += c.rowcount @@ -505,11 +505,11 @@ def _emit_update_statements(base_mapper, uowtransaction, elif needs_version_id: util.warn("Dialect %s does not support updated rowcount " - "- versioning cannot be verified." % + "- versioning cannot be verified." % c.dialect.dialect_description, stacklevel=12) -def _emit_insert_statements(base_mapper, uowtransaction, +def _emit_insert_statements(base_mapper, uowtransaction, cached_connections, table, insert): """Emit INSERT statements corresponding to value lists collected by _collect_insert_commands().""" @@ -517,10 +517,10 @@ def _emit_insert_statements(base_mapper, uowtransaction, statement = base_mapper._memo(('insert', table), table.insert) for (connection, pkeys, hasvalue, has_all_pks), \ - records in groupby(insert, - lambda rec: (rec[4], - rec[2].keys(), - bool(rec[5]), + records in groupby(insert, + lambda rec: (rec[4], + rec[2].keys(), + bool(rec[5]), rec[6]) ): if has_all_pks and not hasvalue: @@ -529,19 +529,19 @@ def _emit_insert_statements(base_mapper, uowtransaction, c = cached_connections[connection].\ execute(statement, multiparams) - for (state, state_dict, params, mapper, + for (state, state_dict, params, mapper, conn, value_params, has_all_pks), \ last_inserted_params in \ zip(records, c.context.compiled_parameters): _postfetch( mapper, - uowtransaction, + uowtransaction, table, - state, + state, state_dict, c.context.prefetch_cols, c.context.postfetch_cols, - last_inserted_params, + last_inserted_params, value_params) else: @@ -561,31 +561,31 @@ def _emit_insert_statements(base_mapper, uowtransaction, if primary_key is not None: # set primary key attributes - for pk, col in zip(primary_key, + for pk, col in zip(primary_key, mapper._pks_by_table[table]): prop = mapper._columntoproperty[col] if state_dict.get(prop.key) is None: # TODO: would rather say: #state_dict[prop.key] = pk mapper._set_state_attr_by_column( - state, - state_dict, + state, + state_dict, col, pk) _postfetch( mapper, - uowtransaction, - table, - state, + uowtransaction, + table, + state, state_dict, - result.context.prefetch_cols, + result.context.prefetch_cols, result.context.postfetch_cols, - result.context.compiled_parameters[0], + result.context.compiled_parameters[0], value_params) -def _emit_post_update_statements(base_mapper, uowtransaction, +def _emit_post_update_statements(base_mapper, uowtransaction, cached_connections, mapper, table, update): """Emit UPDATE statements corresponding to value lists collected by _collect_post_update_commands().""" @@ -603,19 +603,19 @@ def _emit_post_update_statements(base_mapper, uowtransaction, # execute each UPDATE in the order according to the original # list of states to guarantee row access order, but - # also group them into common (connection, cols) sets + # also group them into common (connection, cols) sets # to support executemany(). for key, grouper in groupby( update, lambda rec: (rec[4], rec[2].keys()) ): connection = key[0] - multiparams = [params for state, state_dict, + multiparams = [params for state, state_dict, params, mapper, conn in grouper] cached_connections[connection].\ execute(statement, multiparams) -def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, +def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, mapper, table, delete): """Emit DELETE statements corresponding to value lists collected by _collect_delete_commands().""" @@ -631,9 +631,9 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, if need_version_id: clause.clauses.append( - mapper.version_id_col == + mapper.version_id_col == sql.bindparam( - mapper.version_id_col.key, + mapper.version_id_col.key, type_=mapper.version_id_col.type ) ) @@ -657,13 +657,13 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, if rows != len(del_objects): raise orm_exc.StaleDataError( "DELETE statement on table '%s' expected to " - "delete %d row(s); %d were matched." % + "delete %d row(s); %d were matched." % (table.description, len(del_objects), c.rowcount) ) else: util.warn( "Dialect %s does not support deleted rowcount " - "- versioning cannot be verified." % + "- versioning cannot be verified." % connection.dialect.dialect_description, stacklevel=12) connection.execute(statement, del_objects) @@ -671,11 +671,11 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, connection.execute(statement, del_objects) -def _finalize_insert_update_commands(base_mapper, uowtransaction, +def _finalize_insert_update_commands(base_mapper, uowtransaction, states_to_insert, states_to_update): """finalize state on states that have been inserted or updated, including calling after_insert/after_update events. - + """ for state, state_dict, mapper, connection, has_identity, \ instance_key, row_switch in states_to_insert + \ @@ -683,7 +683,7 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction, if mapper._readonly_props: readonly = state.unmodified_intersection( - [p.key for p in mapper._readonly_props + [p.key for p in mapper._readonly_props if p.expire_on_flush or p.key not in state.dict] ) if readonly: @@ -703,7 +703,7 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction, else: mapper.dispatch.after_update(mapper, connection, state) -def _postfetch(mapper, uowtransaction, table, +def _postfetch(mapper, uowtransaction, table, state, dict_, prefetch_cols, postfetch_cols, params, value_params): """Expire attributes in need of newly persisted database state, @@ -718,9 +718,9 @@ def _postfetch(mapper, uowtransaction, table, mapper._set_state_attr_by_column(state, dict_, c, params[c.key]) if postfetch_cols: - state.expire_attributes(state.dict, - [mapper._columntoproperty[c].key - for c in postfetch_cols if c in + state.expire_attributes(state.dict, + [mapper._columntoproperty[c].key + for c in postfetch_cols if c in mapper._columntoproperty] ) @@ -728,33 +728,35 @@ def _postfetch(mapper, uowtransaction, table, # TODO: this still goes a little too often. would be nice to # have definitive list of "columns that changed" here for m, equated_pairs in mapper._table_to_equated[table]: - sync.populate(state, m, state, m, - equated_pairs, + sync.populate(state, m, state, m, + equated_pairs, uowtransaction, mapper.passive_updates) def _connections_for_states(base_mapper, uowtransaction, states): """Return an iterator of (state, state.dict, mapper, connection). - + The states are sorted according to _sort_states, then paired with the connection they should be using for the given unit of work transaction. - + """ # if session has a connection callable, - # organize individual states with the connection + # organize individual states with the connection # to use for update if uowtransaction.session.connection_callable: connection_callable = \ uowtransaction.session.connection_callable else: - connection = uowtransaction.transaction.connection( - base_mapper) + connection = None connection_callable = None for state in _sort_states(states): if connection_callable: connection = connection_callable(base_mapper, state.obj()) + elif not connection: + connection = uowtransaction.transaction.connection( + base_mapper) mapper = _state_mapper(state) diff --git a/libs/sqlalchemy/orm/properties.py b/libs/sqlalchemy/orm/properties.py index 59c4cb3d..204232cf 100644 --- a/libs/sqlalchemy/orm/properties.py +++ b/libs/sqlalchemy/orm/properties.py @@ -1,5 +1,5 @@ # orm/properties.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -33,9 +33,9 @@ from descriptor_props import CompositeProperty, SynonymProperty, \ class ColumnProperty(StrategizedProperty): """Describes an object attribute that corresponds to a table column. - + Public constructor is the :func:`.orm.column_property` function. - + """ def __init__(self, *columns, **kwargs): @@ -62,7 +62,7 @@ class ColumnProperty(StrategizedProperty): """ self._orig_columns = [expression._labeled(c) for c in columns] - self.columns = [expression._labeled(_orm_deannotate(c)) + self.columns = [expression._labeled(_orm_deannotate(c)) for c in columns] self.group = kwargs.pop('group', None) self.deferred = kwargs.pop('deferred', False) @@ -88,7 +88,7 @@ class ColumnProperty(StrategizedProperty): if kwargs: raise TypeError( "%s received unexpected keyword argument(s): %s" % ( - self.__class__.__name__, + self.__class__.__name__, ', '.join(sorted(kwargs.keys())))) util.set_creation_order(self) @@ -104,9 +104,9 @@ class ColumnProperty(StrategizedProperty): return attributes.register_descriptor( - mapper.class_, - self.key, - comparator=self.comparator_factory(self, mapper), + mapper.class_, + self.key, + comparator=self.comparator_factory(self, mapper), parententity=mapper, doc=self.doc ) @@ -124,19 +124,21 @@ class ColumnProperty(StrategizedProperty): def copy(self): return ColumnProperty( - deferred=self.deferred, - group=self.group, + deferred=self.deferred, + group=self.group, active_history=self.active_history, *self.columns) - def _getcommitted(self, state, dict_, column, + def _getcommitted(self, state, dict_, column, passive=attributes.PASSIVE_OFF): return state.get_impl(self.key).\ get_committed_value(state, dict_, passive=passive) - def merge(self, session, source_state, source_dict, dest_state, + def merge(self, session, source_state, source_dict, dest_state, dest_dict, load, _recursive): - if self.key in source_dict: + if not self.instrument: + return + elif self.key in source_dict: value = source_dict[self.key] if not load: @@ -144,9 +146,8 @@ class ColumnProperty(StrategizedProperty): else: impl = dest_state.get_impl(self.key) impl.set(dest_state, dest_dict, value, None) - else: - if dest_state.has_identity and self.key not in dest_dict: - dest_state.expire_attributes(dest_dict, [self.key]) + elif dest_state.has_identity and self.key not in dest_dict: + dest_state.expire_attributes(dest_dict, [self.key]) class Comparator(PropComparator): @util.memoized_instancemethod @@ -176,20 +177,20 @@ log.class_logger(ColumnProperty) class RelationshipProperty(StrategizedProperty): """Describes an object property that holds a single item or list of items that correspond to a related database table. - + Public constructor is the :func:`.orm.relationship` function. - + Of note here is the :class:`.RelationshipProperty.Comparator` class, which implements comparison operations for scalar- and collection-referencing mapped attributes. - + """ strategy_wildcard_key = 'relationship:*' def __init__(self, argument, secondary=None, primaryjoin=None, - secondaryjoin=None, + secondaryjoin=None, foreign_keys=None, uselist=None, order_by=False, @@ -207,7 +208,7 @@ class RelationshipProperty(StrategizedProperty): active_history=False, cascade_backrefs=True, load_on_pending=False, - strategy_class=None, _local_remote_pairs=None, + strategy_class=None, _local_remote_pairs=None, query_class=None): self.uselist = uselist @@ -256,7 +257,7 @@ class RelationshipProperty(StrategizedProperty): self.cascade = CascadeOptions("save-update, merge") if self.passive_deletes == 'all' and \ - ("delete" in self.cascade or + ("delete" in self.cascade or "delete-orphan" in self.cascade): raise sa_exc.ArgumentError( "Can't set passive_deletes='all' in conjunction " @@ -278,9 +279,9 @@ class RelationshipProperty(StrategizedProperty): def instrument_class(self, mapper): attributes.register_descriptor( - mapper.class_, - self.key, - comparator=self.comparator_factory(self, mapper), + mapper.class_, + self.key, + comparator=self.comparator_factory(self, mapper), parententity=mapper, doc=self.doc, ) @@ -292,7 +293,7 @@ class RelationshipProperty(StrategizedProperty): def __init__(self, prop, mapper, of_type=None, adapter=None): """Construction of :class:`.RelationshipProperty.Comparator` is internal to the ORM's attribute mechanics. - + """ self.prop = prop self.mapper = mapper @@ -322,29 +323,23 @@ class RelationshipProperty(StrategizedProperty): else: return elem - def operate(self, op, *other, **kwargs): - return op(self, *other, **kwargs) - - def reverse_operate(self, op, other, **kwargs): - return op(self, *other, **kwargs) - def of_type(self, cls): """Produce a construct that represents a particular 'subtype' of attribute for the parent class. - + Currently this is usable in conjunction with :meth:`.Query.join` and :meth:`.Query.outerjoin`. - + """ return RelationshipProperty.Comparator( - self.property, - self.mapper, + self.property, + self.mapper, cls, adapter=self.adapter) def in_(self, other): - """Produce an IN clause - this is not implemented + """Produce an IN clause - this is not implemented for :func:`~.orm.relationship`-based attributes at this time. - + """ raise NotImplementedError('in_() not yet supported for ' 'relationships. For a simple many-to-one, use ' @@ -361,20 +356,20 @@ class RelationshipProperty(StrategizedProperty): this will typically produce a clause such as:: - + mytable.related_id == - - Where ```` is the primary key of the given + + Where ```` is the primary key of the given object. - + The ``==`` operator provides partial functionality for non- many-to-one comparisons: - + * Comparisons against collections are not supported. Use :meth:`~.RelationshipProperty.Comparator.contains`. - * Compared to a scalar one-to-many, will produce a + * Compared to a scalar one-to-many, will produce a clause that compares the target columns in the parent to - the given target. + the given target. * Compared to a scalar many-to-many, an alias of the association table will be rendered as well, forming a natural join that is part of the @@ -448,9 +443,9 @@ class RelationshipProperty(StrategizedProperty): # limit this adapter to annotated only? criterion = target_adapter.traverse(criterion) - # only have the "joined left side" of what we + # only have the "joined left side" of what we # return be subject to Query adaption. The right - # side of it is used for an exists() subquery and + # side of it is used for an exists() subquery and # should not correlate or otherwise reach out # to anything in the enclosing query. if criterion is not None: @@ -464,42 +459,42 @@ class RelationshipProperty(StrategizedProperty): def any(self, criterion=None, **kwargs): """Produce an expression that tests a collection against particular criterion, using EXISTS. - + An expression like:: - + session.query(MyClass).filter( MyClass.somereference.any(SomeRelated.x==2) ) - - + + Will produce a query like:: - + SELECT * FROM my_table WHERE - EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id + EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id AND related.x=2) - + Because :meth:`~.RelationshipProperty.Comparator.any` uses a correlated subquery, its performance is not nearly as good when compared against large target tables as that of using a join. - + :meth:`~.RelationshipProperty.Comparator.any` is particularly useful for testing for empty collections:: - + session.query(MyClass).filter( ~MyClass.somereference.any() ) - + will produce:: - + SELECT * FROM my_table WHERE NOT EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id) - + :meth:`~.RelationshipProperty.Comparator.any` is only valid for collections, i.e. a :func:`.relationship` that has ``uselist=True``. For scalar references, use :meth:`~.RelationshipProperty.Comparator.has`. - + """ if not self.property.uselist: raise sa_exc.InvalidRequestError( @@ -514,14 +509,14 @@ class RelationshipProperty(StrategizedProperty): particular criterion, using EXISTS. An expression like:: - + session.query(MyClass).filter( MyClass.somereference.has(SomeRelated.x==2) ) - - + + Will produce a query like:: - + SELECT * FROM my_table WHERE EXISTS (SELECT 1 FROM related WHERE related.id==my_table.related_id AND related.x=2) @@ -530,12 +525,12 @@ class RelationshipProperty(StrategizedProperty): a correlated subquery, its performance is not nearly as good when compared against large target tables as that of using a join. - + :meth:`~.RelationshipProperty.Comparator.has` is only valid for scalar references, i.e. a :func:`.relationship` that has ``uselist=False``. For collection references, use :meth:`~.RelationshipProperty.Comparator.any`. - + """ if self.property.uselist: raise sa_exc.InvalidRequestError( @@ -544,46 +539,46 @@ class RelationshipProperty(StrategizedProperty): return self._criterion_exists(criterion, **kwargs) def contains(self, other, **kwargs): - """Return a simple expression that tests a collection for + """Return a simple expression that tests a collection for containment of a particular item. - + :meth:`~.RelationshipProperty.Comparator.contains` is only valid for a collection, i.e. a :func:`~.orm.relationship` that implements one-to-many or many-to-many with ``uselist=True``. - - When used in a simple one-to-many context, an + + When used in a simple one-to-many context, an expression like:: - + MyClass.contains(other) - + Produces a clause like:: - + mytable.id == - + Where ```` is the value of the foreign key attribute on ``other`` which refers to the primary key of its parent object. From this it follows that :meth:`~.RelationshipProperty.Comparator.contains` is very useful when used with simple one-to-many operations. - + For many-to-many operations, the behavior of :meth:`~.RelationshipProperty.Comparator.contains` has more caveats. The association table will be rendered in the statement, producing an "implicit" join, that is, includes multiple tables in the FROM clause which are equated in the WHERE clause:: - + query(MyClass).filter(MyClass.contains(other)) - + Produces a query like:: - + SELECT * FROM my_table, my_association_table AS my_association_table_1 WHERE my_table.id = my_association_table_1.parent_id AND my_association_table_1.child_id = - + Where ```` would be the primary key of ``other``. From the above, it is clear that :meth:`~.RelationshipProperty.Comparator.contains` @@ -597,7 +592,7 @@ class RelationshipProperty(StrategizedProperty): a less-performant alternative using EXISTS, or refer to :meth:`.Query.outerjoin` as well as :ref:`ormtutorial_joins` for more details on constructing outer joins. - + """ if not self.property.uselist: raise sa_exc.InvalidRequestError( @@ -635,7 +630,7 @@ class RelationshipProperty(StrategizedProperty): adapt(x) == None) for (x, y) in self.property.local_remote_pairs]) - criterion = sql.and_(*[x==y for (x, y) in + criterion = sql.and_(*[x==y for (x, y) in zip( self.property.mapper.primary_key, self.property.\ @@ -648,26 +643,26 @@ class RelationshipProperty(StrategizedProperty): """Implement the ``!=`` operator. In a many-to-one context, such as:: - + MyClass.some_prop != - + This will typically produce a clause such as:: - + mytable.related_id != - + Where ```` is the primary key of the given object. - + The ``!=`` operator provides partial functionality for non- many-to-one comparisons: - + * Comparisons against collections are not supported. Use :meth:`~.RelationshipProperty.Comparator.contains` in conjunction with :func:`~.expression.not_`. - * Compared to a scalar one-to-many, will produce a + * Compared to a scalar one-to-many, will produce a clause that compares the target columns in the parent to - the given target. + the given target. * Compared to a scalar many-to-many, an alias of the association table will be rendered as well, forming a natural join that is part of the @@ -681,7 +676,7 @@ class RelationshipProperty(StrategizedProperty): membership tests. * Comparisons against ``None`` given in a one-to-many or many-to-many context produce an EXISTS clause. - + """ if isinstance(other, (NoneType, expression._Null)): if self.property.direction == MANYTOONE: @@ -702,26 +697,26 @@ class RelationshipProperty(StrategizedProperty): configure_mappers() return self.prop - def compare(self, op, value, - value_is_parent=False, + def compare(self, op, value, + value_is_parent=False, alias_secondary=True): if op == operators.eq: if value is None: if self.uselist: return ~sql.exists([1], self.primaryjoin) else: - return self._optimized_compare(None, + return self._optimized_compare(None, value_is_parent=value_is_parent, alias_secondary=alias_secondary) else: - return self._optimized_compare(value, + return self._optimized_compare(value, value_is_parent=value_is_parent, alias_secondary=alias_secondary) else: return op(self.comparator, value) - def _optimized_compare(self, value, value_is_parent=False, - adapt_source=None, + def _optimized_compare(self, value, value_is_parent=False, + adapt_source=None, alias_secondary=True): if value is not None: value = attributes.instance_state(value) @@ -733,12 +728,12 @@ class RelationshipProperty(StrategizedProperty): def __str__(self): return str(self.parent.class_.__name__) + "." + self.key - def merge(self, + def merge(self, session, source_state, source_dict, dest_state, - dest_dict, + dest_dict, load, _recursive): if load: @@ -848,8 +843,8 @@ class RelationshipProperty(StrategizedProperty): raise AssertionError("Attribute '%s' on class '%s' " "doesn't handle objects " "of type '%s'" % ( - self.key, - self.parent.class_, + self.key, + self.parent.class_, c.__class__ )) @@ -877,11 +872,11 @@ class RelationshipProperty(StrategizedProperty): @util.memoized_property def mapper(self): - """Return the targeted :class:`.Mapper` for this + """Return the targeted :class:`.Mapper` for this :class:`.RelationshipProperty`. - + This is a lazy-initializing static attribute. - + """ if isinstance(self.argument, type): mapper_ = mapper.class_mapper(self.argument, @@ -905,8 +900,8 @@ class RelationshipProperty(StrategizedProperty): @util.memoized_property @util.deprecated("0.7", "Use .target") def table(self): - """Return the selectable linked to this - :class:`.RelationshipProperty` object's target + """Return the selectable linked to this + :class:`.RelationshipProperty` object's target :class:`.Mapper`.""" return self.target @@ -922,7 +917,7 @@ class RelationshipProperty(StrategizedProperty): super(RelationshipProperty, self).do_init() def _check_conflicts(self): - """Test that this relationship is legal, warn about + """Test that this relationship is legal, warn about inheritance conflicts.""" if not self.is_primary() \ @@ -949,11 +944,11 @@ class RelationshipProperty(StrategizedProperty): % (self.key, self.parent, inheriting)) def _process_dependent_arguments(self): - """Convert incoming configuration arguments to their + """Convert incoming configuration arguments to their proper form. - + Callables are resolved, ORM annotations removed. - + """ # accept callables for other attributes which may require # deferred initialization. This technique is used @@ -983,20 +978,20 @@ class RelationshipProperty(StrategizedProperty): # remote_side are all columns, not strings. if self.order_by is not False and self.order_by is not None: self.order_by = [ - expression._only_column_elements(x, "order_by") + expression._only_column_elements(x, "order_by") for x in util.to_list(self.order_by)] self._user_defined_foreign_keys = \ util.column_set( - expression._only_column_elements(x, "foreign_keys") + expression._only_column_elements(x, "foreign_keys") for x in util.to_column_set( self._user_defined_foreign_keys )) self.remote_side = \ util.column_set( - expression._only_column_elements(x, "remote_side") + expression._only_column_elements(x, "remote_side") for x in util.to_column_set(self.remote_side)) @@ -1010,10 +1005,10 @@ class RelationshipProperty(StrategizedProperty): def _determine_joins(self): """Determine the 'primaryjoin' and 'secondaryjoin' attributes, if not passed to the constructor already. - + This is based on analysis of the foreign key relationships between the parent and target mapped selectables. - + """ if self.secondaryjoin is not None and self.secondary is None: raise sa_exc.ArgumentError("Property '" + self.key @@ -1029,7 +1024,7 @@ class RelationshipProperty(StrategizedProperty): # for more specificity, then if not found will try the more # general mapped table, which in the case of inheritance is # a join. - return join_condition(mapper.mapped_table, table, + return join_condition(mapper.mapped_table, table, a_subset=mapper.local_table) try: @@ -1053,9 +1048,9 @@ class RelationshipProperty(StrategizedProperty): % self) def _columns_are_mapped(self, *cols): - """Return True if all columns in the given collection are + """Return True if all columns in the given collection are mapped by the tables referenced by this :class:`.Relationship`. - + """ for c in cols: if self.secondary is not None \ @@ -1070,11 +1065,11 @@ class RelationshipProperty(StrategizedProperty): """Determine a list of "source"/"destination" column pairs based on the given join condition, as well as the foreign keys argument. - + "source" would be a column referenced by a foreign key, and "destination" would be the column who has a foreign key reference to "source". - + """ fks = self._user_defined_foreign_keys @@ -1083,7 +1078,7 @@ class RelationshipProperty(StrategizedProperty): consider_as_foreign_keys=fks, any_operator=self.viewonly) - # couldn't find any fks, but we have + # couldn't find any fks, but we have # "secondary" - assume the "secondary" columns # are the fks if not eq_pairs and \ @@ -1108,19 +1103,19 @@ class RelationshipProperty(StrategizedProperty): # Filter out just to columns that are mapped. # If viewonly, allow pairs where the FK col # was part of "foreign keys" - the column it references - # may be in an un-mapped table - see + # may be in an un-mapped table - see # test.orm.test_relationships.ViewOnlyComplexJoin.test_basic # for an example of this. eq_pairs = [(l, r) for (l, r) in eq_pairs if self._columns_are_mapped(l, r) - or self.viewonly and + or self.viewonly and r in fks] if eq_pairs: return eq_pairs # from here below is just determining the best error message - # to report. Check for a join condition using any operator + # to report. Check for a join condition using any operator # (not just ==), perhaps they need to turn on "viewonly=True". if not self.viewonly and criterion_as_pairs(join_condition, consider_as_foreign_keys=self._user_defined_foreign_keys, @@ -1130,8 +1125,8 @@ class RelationshipProperty(StrategizedProperty): "foreign-key-equated, locally mapped column "\ "pairs for %s "\ "condition '%s' on relationship %s." % ( - primary and 'primaryjoin' or 'secondaryjoin', - join_condition, + primary and 'primaryjoin' or 'secondaryjoin', + join_condition, self ) @@ -1160,10 +1155,10 @@ class RelationshipProperty(StrategizedProperty): "have adequate ForeignKey and/or " "ForeignKeyConstraint objects established " "(in which case 'foreign_keys' is usually " - "unnecessary)?" + "unnecessary)?" % ( primary and 'primaryjoin' or 'secondaryjoin', - join_condition, + join_condition, self, primary and 'mapped' or 'secondary' )) @@ -1174,18 +1169,18 @@ class RelationshipProperty(StrategizedProperty): "referencing Column objects have a " "ForeignKey present, or are otherwise part " "of a ForeignKeyConstraint on their parent " - "Table, or specify the foreign_keys parameter " + "Table, or specify the foreign_keys parameter " "to this relationship." % ( - primary and 'primaryjoin' or 'secondaryjoin', - join_condition, + primary and 'primaryjoin' or 'secondaryjoin', + join_condition, self )) def _determine_synchronize_pairs(self): """Resolve 'primary'/foreign' column pairs from the primaryjoin and secondaryjoin arguments. - + """ if self.local_remote_pairs: if not self._user_defined_foreign_keys: @@ -1200,7 +1195,7 @@ class RelationshipProperty(StrategizedProperty): self.synchronize_pairs.append((r, l)) else: self.synchronize_pairs = self._sync_pairs_from_join( - self.primaryjoin, + self.primaryjoin, True) self._calculated_foreign_keys = util.column_set( @@ -1209,7 +1204,7 @@ class RelationshipProperty(StrategizedProperty): if self.secondaryjoin is not None: self.secondary_synchronize_pairs = self._sync_pairs_from_join( - self.secondaryjoin, + self.secondaryjoin, False) self._calculated_foreign_keys.update( r for (l, r) in @@ -1218,12 +1213,12 @@ class RelationshipProperty(StrategizedProperty): self.secondary_synchronize_pairs = None def _determine_direction(self): - """Determine if this relationship is one to many, many to one, + """Determine if this relationship is one to many, many to one, many to many. - + This is derived from the primaryjoin, presence of "secondary", and in the case of self-referential the "remote side". - + """ if self.secondaryjoin is not None: self.direction = MANYTOMANY @@ -1300,19 +1295,19 @@ class RelationshipProperty(StrategizedProperty): % self) def _determine_local_remote_pairs(self): - """Determine pairs of columns representing "local" to + """Determine pairs of columns representing "local" to "remote", where "local" columns are on the parent mapper, "remote" are on the target mapper. - + These pairs are used on the load side only to generate lazy loading clauses. """ if not self.local_remote_pairs and not self.remote_side: - # the most common, trivial case. Derive + # the most common, trivial case. Derive # local/remote pairs from the synchronize pairs. eq_pairs = util.unique_list( - self.synchronize_pairs + + self.synchronize_pairs + (self.secondary_synchronize_pairs or [])) if self.direction is MANYTOONE: self.local_remote_pairs = [(r, l) for l, r in eq_pairs] @@ -1474,8 +1469,8 @@ class RelationshipProperty(StrategizedProperty): if not self.viewonly and self._dependency_processor: self._dependency_processor.per_property_preprocessors(uow) - def _create_joins(self, source_polymorphic=False, - source_selectable=None, dest_polymorphic=False, + def _create_joins(self, source_polymorphic=False, + source_selectable=None, dest_polymorphic=False, dest_selectable=None, of_type=None): if source_selectable is None: if source_polymorphic and self.parent.with_polymorphic: @@ -1497,10 +1492,10 @@ class RelationshipProperty(StrategizedProperty): # place a barrier on the destination such that # replacement traversals won't ever dig into it. - # its internal structure remains fixed + # its internal structure remains fixed # regardless of context. dest_selectable = _shallow_annotate( - dest_selectable, + dest_selectable, {'no_replacement_traverse':True}) aliased = aliased or (source_selectable is not None) diff --git a/libs/sqlalchemy/orm/query.py b/libs/sqlalchemy/orm/query.py index aa3dd017..286dbf6b 100644 --- a/libs/sqlalchemy/orm/query.py +++ b/libs/sqlalchemy/orm/query.py @@ -1,19 +1,19 @@ # orm/query.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """The Query class and support. -Defines the :class:`.Query` class, the central +Defines the :class:`.Query` class, the central construct used by the ORM to construct database queries. The :class:`.Query` class should not be confused with the -:class:`.Select` class, which defines database -SELECT operations at the SQL (non-ORM) level. ``Query`` differs from -``Select`` in that it returns ORM-mapped objects and interacts with an -ORM session, whereas the ``Select`` construct interacts directly with the +:class:`.Select` class, which defines database +SELECT operations at the SQL (non-ORM) level. ``Query`` differs from +``Select`` in that it returns ORM-mapped objects and interacts with an +ORM session, whereas the ``Select`` construct interacts directly with the database to return iterable result sets. """ @@ -55,14 +55,14 @@ class Query(object): """ORM-level SQL construction object. :class:`.Query` is the source of all SELECT statements generated by the - ORM, both those formulated by end-user query operations as well as by - high level internal operations such as related collection loading. It + ORM, both those formulated by end-user query operations as well as by + high level internal operations such as related collection loading. It features a generative interface whereby successive calls return a new - :class:`.Query` object, a copy of the former with additional + :class:`.Query` object, a copy of the former with additional criteria and options associated with it. - :class:`.Query` objects are normally initially generated using the - :meth:`~.Session.query` method of :class:`.Session`. For a full walkthrough + :class:`.Query` objects are normally initially generated using the + :meth:`~.Session.query` method of :class:`.Session`. For a full walkthrough of :class:`.Query` usage, see the :ref:`ormtutorial_toplevel`. """ @@ -77,6 +77,7 @@ class Query(object): _group_by = False _having = None _distinct = False + _prefixes = None _offset = None _limit = None _statement = None @@ -133,20 +134,20 @@ class Query(object): with_polymorphic = mapper._with_polymorphic_mappers if mapper.mapped_table not in \ self._polymorphic_adapters: - self._mapper_loads_polymorphically_with(mapper, + self._mapper_loads_polymorphically_with(mapper, sql_util.ColumnAdapter( - selectable, + selectable, mapper._equivalent_columns)) adapter = None elif is_aliased_class: adapter = sql_util.ColumnAdapter( - selectable, + selectable, mapper._equivalent_columns) with_polymorphic = None else: with_polymorphic = adapter = None - d[entity] = (mapper, adapter, selectable, + d[entity] = (mapper, adapter, selectable, is_aliased_class, with_polymorphic) ent.setup_entity(entity, *d[entity]) @@ -198,8 +199,8 @@ class Query(object): def _adapt_col_list(self, cols): return [ self._adapt_clause( - expression._literal_as_text(o), - True, True) + expression._literal_as_text(o), + True, True) for o in cols ] @@ -208,7 +209,7 @@ class Query(object): self._orm_only_adapt = False def _adapt_clause(self, clause, as_filter, orm_only): - """Adapt incoming clauses to transformations which have been applied + """Adapt incoming clauses to transformations which have been applied within this query.""" adapters = [] @@ -227,12 +228,12 @@ class Query(object): if self._from_obj_alias: # for the "from obj" alias, apply extra rule to the - # 'ORM only' check, if this query were generated from a + # 'ORM only' check, if this query were generated from a # subquery of itself, i.e. _from_selectable(), apply adaption # to all SQL constructs. adapters.append( ( - getattr(self, '_orm_only_from_obj_alias', orm_only), + getattr(self, '_orm_only_from_obj_alias', orm_only), self._from_obj_alias.replace ) ) @@ -260,8 +261,8 @@ class Query(object): return e return visitors.replacement_traverse( - clause, - {}, + clause, + {}, replace ) @@ -296,7 +297,7 @@ class Query(object): def _only_mapper_zero(self, rationale=None): if len(self._entities) > 1: raise sa_exc.InvalidRequestError( - rationale or + rationale or "This operation requires a Query against a single mapper." ) return self._mapper_zero() @@ -316,7 +317,7 @@ class Query(object): def _only_entity_zero(self, rationale=None): if len(self._entities) > 1: raise sa_exc.InvalidRequestError( - rationale or + rationale or "This operation requires a Query against a single mapper." ) return self._entity_zero() @@ -388,13 +389,13 @@ class Query(object): ): if getattr(self, attr) is not notset: raise sa_exc.InvalidRequestError( - "Can't call Query.%s() when %s has been called" % + "Can't call Query.%s() when %s has been called" % (meth, methname) ) - def _get_options(self, populate_existing=None, - version_check=None, - only_load_props=None, + def _get_options(self, populate_existing=None, + version_check=None, + only_load_props=None, refresh_state=None): if populate_existing: self._populate_existing = populate_existing @@ -431,17 +432,17 @@ class Query(object): return stmt._annotate({'no_replacement_traverse': True}) def subquery(self, name=None): - """return the full SELECT statement represented by this :class:`.Query`, + """return the full SELECT statement represented by this :class:`.Query`, embedded within an :class:`.Alias`. Eager JOIN generation within the query is disabled. The statement will not have disambiguating labels - applied to the list of selected columns unless the + applied to the list of selected columns unless the :meth:`.Query.with_labels` method is used to generate a new :class:`.Query` with the option enabled. - :param name: string name to be assigned as the alias; + :param name: string name to be assigned as the alias; this is passed through to :meth:`.FromClause.alias`. If ``None``, a name will be deterministically generated at compile time. @@ -454,13 +455,13 @@ class Query(object): """Return the full SELECT statement represented by this :class:`.Query` represented as a common table expression (CTE). - The :meth:`.Query.cte` method is new in 0.7.6. - - Parameters and usage are the same as those of the - :meth:`._SelectBase.cte` method; see that method for + .. versionadded:: 0.7.6 + + Parameters and usage are the same as those of the + :meth:`._SelectBase.cte` method; see that method for further details. - - Here is the `Postgresql WITH + + Here is the `Postgresql WITH RECURSIVE example `_. Note that, in this example, the ``included_parts`` cte and the ``incl_alias`` alias of it are Core selectables, which @@ -477,8 +478,8 @@ class Query(object): quantity = Column(Integer) included_parts = session.query( - Part.sub_part, - Part.part, + Part.sub_part, + Part.part, Part.quantity).\\ filter(Part.part=="our part").\\ cte(name="included_parts", recursive=True) @@ -487,8 +488,8 @@ class Query(object): parts_alias = aliased(Part, name="p") included_parts = included_parts.union_all( session.query( - parts_alias.part, - parts_alias.sub_part, + parts_alias.part, + parts_alias.sub_part, parts_alias.quantity).\\ filter(parts_alias.part==incl_alias.c.sub_part) ) @@ -500,19 +501,19 @@ class Query(object): group_by(included_parts.c.sub_part) See also: - + :meth:`._SelectBase.cte` """ return self.enable_eagerloads(False).statement.cte(name=name, recursive=recursive) def label(self, name): - """Return the full SELECT statement represented by this :class:`.Query`, converted + """Return the full SELECT statement represented by this :class:`.Query`, converted to a scalar subquery with a label of the given name. Analogous to :meth:`sqlalchemy.sql._SelectBaseMixin.label`. - New in 0.6.5. + .. versionadded:: 0.6.5 """ @@ -520,12 +521,12 @@ class Query(object): def as_scalar(self): - """Return the full SELECT statement represented by this :class:`.Query`, converted + """Return the full SELECT statement represented by this :class:`.Query`, converted to a scalar subquery. Analogous to :meth:`sqlalchemy.sql._SelectBaseMixin.as_scalar`. - New in 0.6.5. + .. versionadded:: 0.6.5 """ @@ -537,7 +538,7 @@ class Query(object): @_generative() def enable_eagerloads(self, value): - """Control whether or not eager joins and subqueries are + """Control whether or not eager joins and subqueries are rendered. When set to False, the returned Query will not render @@ -573,17 +574,17 @@ class Query(object): def enable_assertions(self, value): """Control whether assertions are generated. - When set to False, the returned Query will - not assert its state before certain operations, + When set to False, the returned Query will + not assert its state before certain operations, including that LIMIT/OFFSET has not been applied when filter() is called, no criterion exists when get() is called, and no "from_statement()" exists when filter()/order_by()/group_by() etc. - is called. This more permissive mode is used by - custom Query subclasses to specify criterion or + is called. This more permissive mode is used by + custom Query subclasses to specify criterion or other modifiers outside of the usual usage patterns. - Care should be taken to ensure that the usage + Care should be taken to ensure that the usage pattern is even possible. A statement applied by from_statement() will override any criterion set by filter() or order_by(), for example. @@ -595,7 +596,7 @@ class Query(object): def whereclause(self): """A readonly attribute which returns the current WHERE criterion for this Query. - This returned value is a SQL expression construct, or ``None`` if no + This returned value is a SQL expression construct, or ``None`` if no criterion has been established. """ @@ -603,19 +604,19 @@ class Query(object): @_generative() def _with_current_path(self, path): - """indicate that this query applies to objects loaded + """indicate that this query applies to objects loaded within a certain path. - Used by deferred loaders (see strategies.py) which transfer - query options from an originating query to a newly generated + Used by deferred loaders (see strategies.py) which transfer + query options from an originating query to a newly generated query intended for the deferred load. """ self._current_path = path @_generative(_no_clauseelement_condition) - def with_polymorphic(self, - cls_or_mappers, + def with_polymorphic(self, + cls_or_mappers, selectable=None, discriminator=None): """Load columns for descendant mappers of this Query's mapper. @@ -653,8 +654,8 @@ class Query(object): "No primary mapper set up for this Query.") entity = self._entities[0]._clone() self._entities = [entity] + self._entities[1:] - entity.set_with_polymorphic(self, - cls_or_mappers, + entity.set_with_polymorphic(self, + cls_or_mappers, selectable=selectable, discriminator=discriminator) @@ -667,15 +668,15 @@ class Query(object): overwritten. In particular, it's usually impossible to use this setting with - eagerly loaded collections (i.e. any lazy='joined' or 'subquery') - since those collections will be cleared for a new load when + eagerly loaded collections (i.e. any lazy='joined' or 'subquery') + since those collections will be cleared for a new load when encountered in a subsequent result batch. In the case of 'subquery' loading, the full result for all rows is fetched which generally defeats the purpose of :meth:`~sqlalchemy.orm.query.Query.yield_per`. Also note that many DBAPIs do not "stream" results, pre-buffering - all rows before making them available, including mysql-python and - psycopg2. :meth:`~sqlalchemy.orm.query.Query.yield_per` will also + all rows before making them available, including mysql-python and + psycopg2. :meth:`~sqlalchemy.orm.query.Query.yield_per` will also set the ``stream_results`` execution option to ``True``, which currently is only understood by psycopg2 and causes server side cursors to be used. @@ -686,33 +687,33 @@ class Query(object): self._execution_options['stream_results'] = True def get(self, ident): - """Return an instance based on the given primary key identifier, + """Return an instance based on the given primary key identifier, or ``None`` if not found. - + E.g.:: - + my_user = session.query(User).get(5) - + some_object = session.query(VersionedFoo).get((5, 10)) - - :meth:`~.Query.get` is special in that it provides direct + + :meth:`~.Query.get` is special in that it provides direct access to the identity map of the owning :class:`.Session`. If the given primary key identifier is present in the local identity map, the object is returned - directly from this collection and no SQL is emitted, + directly from this collection and no SQL is emitted, unless the object has been marked fully expired. If not present, a SELECT is performed in order to locate the object. - - :meth:`~.Query.get` also will perform a check if - the object is present in the identity map and - marked as expired - a SELECT + + :meth:`~.Query.get` also will perform a check if + the object is present in the identity map and + marked as expired - a SELECT is emitted to refresh the object as well as to ensure that the row is still present. If not, :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised. - + :meth:`~.Query.get` is only used to return a single - mapped instance, not multiple instances or + mapped instance, not multiple instances or individual column constructs, and strictly on a single primary key value. The originating :class:`.Query` must be constructed in this way, @@ -721,26 +722,26 @@ class Query(object): options via :meth:`~.Query.options` may be applied however, and will be used if the object is not yet locally present. - + A lazy-loading, many-to-one attribute configured by :func:`.relationship`, using a simple - foreign-key-to-primary-key criterion, will also use an + foreign-key-to-primary-key criterion, will also use an operation equivalent to :meth:`~.Query.get` in order to retrieve the target value from the local identity map before querying the database. See :ref:`loading_toplevel` for further details on relationship loading. - + :param ident: A scalar or tuple value representing the primary key. For a composite primary key, the order of identifiers corresponds in most cases - to that of the mapped :class:`.Table` object's + to that of the mapped :class:`.Table` object's primary key columns. For a :func:`.mapper` that was given the ``primary key`` argument during - construction, the order of identifiers corresponds + construction, the order of identifiers corresponds to the elements present in this collection. :return: The object instance, or ``None``. - + """ # convert composite types to individual args @@ -788,14 +789,14 @@ class Query(object): :meth:`.Select.correlate` after coercion to expression constructs. The correlation arguments take effect in such cases - as when :meth:`.Query.from_self` is used, or when - a subquery as returned by :meth:`.Query.subquery` is + as when :meth:`.Query.from_self` is used, or when + a subquery as returned by :meth:`.Query.subquery` is embedded in another :func:`~.expression.select` construct. """ self._correlate = self._correlate.union( - _orm_selectable(s) + _orm_selectable(s) for s in args) @_generative() @@ -812,11 +813,11 @@ class Query(object): @_generative() def populate_existing(self): - """Return a :class:`.Query` that will expire and refresh all instances + """Return a :class:`.Query` that will expire and refresh all instances as they are loaded, or reused from the current :class:`.Session`. - :meth:`.populate_existing` does not improve behavior when - the ORM is used normally - the :class:`.Session` object's usual + :meth:`.populate_existing` does not improve behavior when + the ORM is used normally - the :class:`.Session` object's usual behavior of maintaining a transaction and expiring all attributes after rollback or commit handles object state automatically. This method is not intended for general use. @@ -829,7 +830,7 @@ class Query(object): """Set the 'invoke all eagers' flag which causes joined- and subquery loaders to traverse into already-loaded related objects and collections. - + Default is that of :attr:`.Query._invoke_all_eagers`. """ @@ -837,7 +838,7 @@ class Query(object): def with_parent(self, instance, property=None): """Add filtering criterion that relates the given instance - to a child object or collection, using its attribute state + to a child object or collection, using its attribute state as well as an established :func:`.relationship()` configuration. @@ -862,7 +863,7 @@ class Query(object): else: raise sa_exc.InvalidRequestError( "Could not locate a property which relates instances " - "of class '%s' to instances of class '%s'" % + "of class '%s' to instances of class '%s'" % ( self._mapper_zero().class_.__name__, instance.__class__.__name__) @@ -872,7 +873,7 @@ class Query(object): @_generative() def add_entity(self, entity, alias=None): - """add a mapped entity to the list of result columns + """add a mapped entity to the list of result columns to be returned.""" if alias is not None: @@ -891,7 +892,7 @@ class Query(object): self.session = session def from_self(self, *entities): - """return a Query that selects from this Query's + """return a Query that selects from this Query's SELECT statement. \*entities - optional list of entities which will replace @@ -913,11 +914,12 @@ class Query(object): @_generative() def _from_selectable(self, fromclause): for attr in ( - '_statement', '_criterion', + '_statement', '_criterion', '_order_by', '_group_by', - '_limit', '_offset', - '_joinpath', '_joinpoint', - '_distinct', '_having' + '_limit', '_offset', + '_joinpath', '_joinpoint', + '_distinct', '_having', + '_prefixes', ): self.__dict__.pop(attr, None) self._set_select_from(fromclause) @@ -932,7 +934,7 @@ class Query(object): e.adapt_to_selectable(self, self._from_obj[0]) def values(self, *columns): - """Return an iterator yielding result tuples corresponding + """Return an iterator yielding result tuples corresponding to the given list of columns""" if not columns: @@ -945,7 +947,7 @@ class Query(object): _values = values def value(self, column): - """Return a scalar result corresponding to the given + """Return a scalar result corresponding to the given column expression.""" try: # Py3K @@ -970,7 +972,7 @@ class Query(object): filter(User.name.like('%ed%')).\\ order_by(Address.email) - # given *only* User.id==5, Address.email, and 'q', what + # given *only* User.id==5, Address.email, and 'q', what # would the *next* User in the result be ? subq = q.with_entities(Address.email).\\ order_by(None).\\ @@ -979,7 +981,7 @@ class Query(object): q = q.join((subq, subq.c.email < Address.email)).\\ limit(1) - New in 0.6.5. + .. versionadded:: 0.6.5 """ self._set_entities(entities) @@ -987,7 +989,7 @@ class Query(object): @_generative() def add_columns(self, *column): - """Add one or more column expressions to the list + """Add one or more column expressions to the list of result columns to be returned.""" self._entities = list(self._entities) @@ -998,13 +1000,13 @@ class Query(object): # given arg is a FROM clause self._setup_aliasizers(self._entities[l:]) - @util.pending_deprecation("0.7", - ":meth:`.add_column` is superseded by :meth:`.add_columns`", + @util.pending_deprecation("0.7", + ":meth:`.add_column` is superseded by :meth:`.add_columns`", False) def add_column(self, column): """Add a column expression to the list of result columns to be returned. - Pending deprecation: :meth:`.add_column` will be superseded by + Pending deprecation: :meth:`.add_column` will be superseded by :meth:`.add_columns`. """ @@ -1043,33 +1045,33 @@ class Query(object): def with_transformation(self, fn): """Return a new :class:`.Query` object transformed by the given function. - + E.g.:: - + def filter_something(criterion): def transform(q): return q.filter(criterion) return transform - + q = q.with_transformation(filter_something(x==5)) - + This allows ad-hoc recipes to be created for :class:`.Query` objects. See the example at :ref:`hybrid_transformers`. - :meth:`~.Query.with_transformation` is new in SQLAlchemy 0.7.4. + .. versionadded:: 0.7.4 """ return fn(self) @_generative() def with_hint(self, selectable, text, dialect_name='*'): - """Add an indexing hint for the given entity or selectable to + """Add an indexing hint for the given entity or selectable to this :class:`.Query`. - Functionality is passed straight through to - :meth:`~sqlalchemy.sql.expression.Select.with_hint`, - with the addition that ``selectable`` can be a - :class:`.Table`, :class:`.Alias`, or ORM entity / mapped class + Functionality is passed straight through to + :meth:`~sqlalchemy.sql.expression.Select.with_hint`, + with the addition that ``selectable`` can be a + :class:`.Table`, :class:`.Alias`, or ORM entity / mapped class /etc. """ mapper, selectable, is_aliased_class = _entity_info(selectable) @@ -1080,7 +1082,7 @@ class Query(object): def execution_options(self, **kwargs): """ Set non-SQL options which take effect during execution. - The options are the same as those accepted by + The options are the same as those accepted by :meth:`.Connection.execution_options`. Note that the ``stream_results`` execution option is enabled @@ -1103,17 +1105,25 @@ class Query(object): ``FOR UPDATE`` (standard SQL, supported by most dialects) ``'update_nowait'`` - passes ``for_update='nowait'``, which - translates to ``FOR UPDATE NOWAIT`` (supported by Oracle) + translates to ``FOR UPDATE NOWAIT`` (supported by Oracle, + PostgreSQL 8.1 upwards) ``'read'`` - passes ``for_update='read'``, which translates to - ``LOCK IN SHARE MODE`` (supported by MySQL). + ``LOCK IN SHARE MODE`` (for MySQL), and ``FOR SHARE`` (for + PostgreSQL) + + ``'read_nowait'`` - passes ``for_update='read_nowait'``, which + translates to ``FOR SHARE NOWAIT`` (supported by PostgreSQL). + + .. versionadded:: 0.7.7 + ``FOR SHARE`` and ``FOR SHARE NOWAIT`` (PostgreSQL). """ self._lockmode = mode @_generative() def params(self, *args, **kwargs): - """add values for bind parameters which may have been + """add values for bind parameters which may have been specified in filter(). parameters may be specified using \**kwargs, or optionally a single @@ -1137,20 +1147,23 @@ class Query(object): of this :class:`.Query`, using SQL expressions. e.g.:: - + session.query(MyClass).filter(MyClass.name == 'some name') - - Multiple criteria are joined together by AND (new in 0.7.5):: - + + Multiple criteria are joined together by AND:: + session.query(MyClass).\\ filter(MyClass.name == 'some name', MyClass.id > 5) - - The criterion is any SQL expression object applicable to the + + The criterion is any SQL expression object applicable to the WHERE clause of a select. String expressions are coerced into SQL expression constructs via the :func:`.text` construct. + .. versionchanged:: 0.7.5 + Multiple criteria joined by AND. + See also: - + :meth:`.Query.filter_by` - filter on keyword expressions. """ @@ -1174,24 +1187,24 @@ class Query(object): def filter_by(self, **kwargs): """apply the given filtering criterion to a copy of this :class:`.Query`, using keyword expressions. - + e.g.:: - + session.query(MyClass).filter_by(name = 'some name') - + Multiple criteria are joined together by AND:: - + session.query(MyClass).\\ filter_by(name = 'some name', id = 5) - - The keyword expressions are extracted from the primary - entity of the query, or the last entity that was the + + The keyword expressions are extracted from the primary + entity of the query, or the last entity that was the target of a call to :meth:`.Query.join`. - + See also: - + :meth:`.Query.filter` - filter on SQL expressions. - + """ clauses = [_entity_descriptor(self._joinpoint_zero(), key) == value @@ -1200,15 +1213,15 @@ class Query(object): @_generative(_no_statement_condition, _no_limit_offset) def order_by(self, *criterion): - """apply one or more ORDER BY criterion to the query and return + """apply one or more ORDER BY criterion to the query and return the newly resulting ``Query`` - All existing ORDER BY settings can be suppressed by + All existing ORDER BY settings can be suppressed by passing ``None`` - this will suppress any ORDER BY configured on mappers as well. Alternatively, an existing ORDER BY setting on the Query - object can be entirely cancelled by passing ``False`` + object can be entirely cancelled by passing ``False`` as the value - use this before calling methods where an ORDER BY is invalid. @@ -1232,8 +1245,8 @@ class Query(object): @_generative(_no_statement_condition, _no_limit_offset) def group_by(self, *criterion): - """apply one or more GROUP BY criterion to the query and return - the newly resulting ``Query``""" + """apply one or more GROUP BY criterion to the query and return + the newly resulting :class:`.Query`""" criterion = list(chain(*[_orm_columns(c) for c in criterion])) @@ -1246,8 +1259,20 @@ class Query(object): @_generative(_no_statement_condition, _no_limit_offset) def having(self, criterion): - """apply a HAVING criterion to the query and return the - newly resulting ``Query``.""" + """apply a HAVING criterion to the query and return the + newly resulting :class:`.Query`. + + :meth:`having` is used in conjunction with :meth:`group_by`. + + HAVING criterion makes it possible to use filters on aggregate + functions like COUNT, SUM, AVG, MAX, and MIN, eg.:: + + q = session.query(User.id).\\ + join(User.addresses).\\ + group_by(User.id).\\ + having(func.count(Address.id) > 2) + + """ if isinstance(criterion, basestring): criterion = sql.text(criterion) @@ -1282,7 +1307,7 @@ class Query(object): will nest on each ``union()``, and produces:: - SELECT * FROM (SELECT * FROM (SELECT * FROM X UNION + SELECT * FROM (SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y) UNION SELECT * FROM Z) Whereas:: @@ -1291,14 +1316,14 @@ class Query(object): produces:: - SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y UNION + SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y UNION SELECT * FROM Z) Note that many database backends do not allow ORDER BY to be rendered on a query called within UNION, EXCEPT, etc. To disable all ORDER BY clauses including those configured on mappers, issue ``query.order_by(None)`` - the resulting - :class:`.Query` object will not render ORDER BY within + :class:`.Query` object will not render ORDER BY within its SELECT statement. """ @@ -1365,254 +1390,251 @@ class Query(object): def join(self, *props, **kwargs): """Create a SQL JOIN against this :class:`.Query` object's criterion and apply generatively, returning the newly resulting :class:`.Query`. - + **Simple Relationship Joins** - + Consider a mapping between two classes ``User`` and ``Address``, - with a relationship ``User.addresses`` representing a collection - of ``Address`` objects associated with each ``User``. The most common + with a relationship ``User.addresses`` representing a collection + of ``Address`` objects associated with each ``User``. The most common usage of :meth:`~.Query.join` is to create a JOIN along this relationship, using the ``User.addresses`` attribute as an indicator for how this should occur:: - + q = session.query(User).join(User.addresses) - - Where above, the call to :meth:`~.Query.join` along ``User.addresses`` + + Where above, the call to :meth:`~.Query.join` along ``User.addresses`` will result in SQL equivalent to:: - + SELECT user.* FROM user JOIN address ON user.id = address.user_id - + In the above example we refer to ``User.addresses`` as passed to :meth:`~.Query.join` as the *on clause*, that is, it indicates - how the "ON" portion of the JOIN should be constructed. For a + how the "ON" portion of the JOIN should be constructed. For a single-entity query such as the one above (i.e. we start by selecting only from - ``User`` and nothing else), the relationship can also be specified by its + ``User`` and nothing else), the relationship can also be specified by its string name:: - + q = session.query(User).join("addresses") - - :meth:`~.Query.join` can also accommodate multiple + + :meth:`~.Query.join` can also accommodate multiple "on clause" arguments to produce a chain of joins, such as below where a join across four related entities is constructed:: - + q = session.query(User).join("orders", "items", "keywords") - + The above would be shorthand for three separate calls to :meth:`~.Query.join`, each using an explicit attribute to indicate the source entity:: - + q = session.query(User).\\ join(User.orders).\\ join(Order.items).\\ join(Item.keywords) - + **Joins to a Target Entity or Selectable** - + A second form of :meth:`~.Query.join` allows any mapped entity - or core selectable construct as a target. In this usage, + or core selectable construct as a target. In this usage, :meth:`~.Query.join` will attempt to create a JOIN along the natural foreign key relationship between two entities:: - + q = session.query(User).join(Address) - - The above calling form of :meth:`.join` will raise an error if - either there are no foreign keys between the two entities, or if + + The above calling form of :meth:`.join` will raise an error if + either there are no foreign keys between the two entities, or if there are multiple foreign key linkages between them. In the - above calling form, :meth:`~.Query.join` is called upon to + above calling form, :meth:`~.Query.join` is called upon to create the "on clause" automatically for us. The target can be any mapped entity or selectable, such as a :class:`.Table`:: - + q = session.query(User).join(addresses_table) - + **Joins to a Target with an ON Clause** - + The third calling form allows both the target entity as well - as the ON clause to be passed explicitly. Suppose for + as the ON clause to be passed explicitly. Suppose for example we wanted to join to ``Address`` twice, using - an alias the second time. We use :func:`~sqlalchemy.orm.aliased` + an alias the second time. We use :func:`~sqlalchemy.orm.aliased` to create a distinct alias of ``Address``, and join - to it using the ``target, onclause`` form, so that the + to it using the ``target, onclause`` form, so that the alias can be specified explicitly as the target along with the relationship to instruct how the ON clause should proceed:: - + a_alias = aliased(Address) - + q = session.query(User).\\ join(User.addresses).\\ join(a_alias, User.addresses).\\ filter(Address.email_address=='ed@foo.com').\\ filter(a_alias.email_address=='ed@bar.com') - + Where above, the generated SQL would be similar to:: - - SELECT user.* FROM user + + SELECT user.* FROM user JOIN address ON user.id = address.user_id JOIN address AS address_1 ON user.id=address_1.user_id WHERE address.email_address = :email_address_1 AND address_1.email_address = :email_address_2 - - The two-argument calling form of :meth:`~.Query.join` + + The two-argument calling form of :meth:`~.Query.join` also allows us to construct arbitrary joins with SQL-oriented "on clause" expressions, not relying upon configured relationships at all. Any SQL expression can be passed as the ON clause when using the two-argument form, which should refer to the target entity in some way as well as an applicable source entity:: - + q = session.query(User).join(Address, User.id==Address.user_id) - - .. note:: - - In SQLAlchemy 0.6 and earlier, the two argument form of - :meth:`~.Query.join` requires the usage of a tuple:: - - query(User).join((Address, User.id==Address.user_id)) - - This calling form is accepted in 0.7 and further, though - is not necessary unless multiple join conditions are passed to - a single :meth:`~.Query.join` call, which itself is also not - generally necessary as it is now equivalent to multiple - calls (this wasn't always the case). - + + .. versionchanged:: 0.7 + In SQLAlchemy 0.6 and earlier, the two argument form of + :meth:`~.Query.join` requires the usage of a tuple: + ``query(User).join((Address, User.id==Address.user_id))``\ . + This calling form is accepted in 0.7 and further, though + is not necessary unless multiple join conditions are passed to + a single :meth:`~.Query.join` call, which itself is also not + generally necessary as it is now equivalent to multiple + calls (this wasn't always the case). + **Advanced Join Targeting and Adaption** - There is a lot of flexibility in what the "target" can be when using - :meth:`~.Query.join`. As noted previously, it also accepts - :class:`.Table` constructs and other selectables such as :func:`.alias` + There is a lot of flexibility in what the "target" can be when using + :meth:`~.Query.join`. As noted previously, it also accepts + :class:`.Table` constructs and other selectables such as :func:`.alias` and :func:`.select` constructs, with either the one or two-argument forms:: addresses_q = select([Address.user_id]).\\ - filter(Address.email_address.endswith("@bar.com")).\\ + where(Address.email_address.endswith("@bar.com")).\\ alias() q = session.query(User).\\ join(addresses_q, addresses_q.c.user_id==User.id) - - :meth:`~.Query.join` also features the ability to *adapt* a + + :meth:`~.Query.join` also features the ability to *adapt* a :meth:`~sqlalchemy.orm.relationship` -driven ON clause to the target selectable. Below we construct a JOIN from ``User`` to a subquery against ``Address``, allowing the relationship denoted by ``User.addresses`` to *adapt* itself to the altered target:: - + address_subq = session.query(Address).\\ filter(Address.email_address == 'ed@foo.com').\\ subquery() q = session.query(User).join(address_subq, User.addresses) - + Producing SQL similar to:: - - SELECT user.* FROM user + + SELECT user.* FROM user JOIN ( - SELECT address.id AS id, - address.user_id AS user_id, - address.email_address AS email_address - FROM address + SELECT address.id AS id, + address.user_id AS user_id, + address.email_address AS email_address + FROM address WHERE address.email_address = :email_address_1 ) AS anon_1 ON user.id = anon_1.user_id - + The above form allows one to fall back onto an explicit ON clause at any time:: - + q = session.query(User).\\ join(address_subq, User.id==address_subq.c.user_id) - + **Controlling what to Join From** - + While :meth:`~.Query.join` exclusively deals with the "right" side of the JOIN, we can also control the "left" side, in those cases where it's needed, using :meth:`~.Query.select_from`. Below we construct a query against ``Address`` but can still make usage of ``User.addresses`` as our ON clause by instructing - the :class:`.Query` to select first from the ``User`` + the :class:`.Query` to select first from the ``User`` entity:: - + q = session.query(Address).select_from(User).\\ join(User.addresses).\\ filter(User.name == 'ed') - + Which will produce SQL similar to:: - - SELECT address.* FROM user - JOIN address ON user.id=address.user_id + + SELECT address.* FROM user + JOIN address ON user.id=address.user_id WHERE user.name = :name_1 - + **Constructing Aliases Anonymously** - + :meth:`~.Query.join` can construct anonymous aliases using the ``aliased=True`` flag. This feature is useful when a query is being joined algorithmically, such as when querying self-referentially to an arbitrary depth:: - + q = session.query(Node).\\ join("children", "children", aliased=True) - + When ``aliased=True`` is used, the actual "alias" construct - is not explicitly available. To work with it, methods such as - :meth:`.Query.filter` will adapt the incoming entity to + is not explicitly available. To work with it, methods such as + :meth:`.Query.filter` will adapt the incoming entity to the last join point:: - + q = session.query(Node).\\ join("children", "children", aliased=True).\\ filter(Node.name == 'grandchild 1') - + When using automatic aliasing, the ``from_joinpoint=True`` argument can allow a multi-node join to be broken into multiple calls to :meth:`~.Query.join`, so that each path along the way can be further filtered:: - + q = session.query(Node).\\ join("children", aliased=True).\\ filter(Node.name='child 1').\\ join("children", aliased=True, from_joinpoint=True).\\ filter(Node.name == 'grandchild 1') - + The filtering aliases above can then be reset back to the original ``Node`` entity using :meth:`~.Query.reset_joinpoint`:: - + q = session.query(Node).\\ join("children", "children", aliased=True).\\ filter(Node.name == 'grandchild 1').\\ reset_joinpoint().\\ filter(Node.name == 'parent 1) - - For an example of ``aliased=True``, see the distribution + + For an example of ``aliased=True``, see the distribution example :ref:`examples_xmlpersistence` which illustrates an XPath-like query system using algorithmic joins. - - :param *props: A collection of one or more join conditions, - each consisting of a relationship-bound attribute or string - relationship name representing an "on clause", or a single + + :param *props: A collection of one or more join conditions, + each consisting of a relationship-bound attribute or string + relationship name representing an "on clause", or a single target entity, or a tuple in the form of ``(target, onclause)``. A special two-argument calling form of the form ``target, onclause`` is also accepted. - :param aliased=False: If True, indicate that the JOIN target should be + :param aliased=False: If True, indicate that the JOIN target should be anonymously aliased. Subsequent calls to :class:`~.Query.filter` - and similar will adapt the incoming criterion to the target + and similar will adapt the incoming criterion to the target alias, until :meth:`~.Query.reset_joinpoint` is called. :param from_joinpoint=False: When using ``aliased=True``, a setting of True here will cause the join to be from the most recent - joined target, rather than starting back from the original + joined target, rather than starting back from the original FROM clauses of the query. - + See also: - + :ref:`ormtutorial_joins` in the ORM tutorial. :ref:`inheritance_toplevel` for details on how :meth:`~.Query.join` is used for inheritance relationships. - + :func:`.orm.join` - a standalone ORM-level join function, - used internally by :meth:`.Query.join`, which in previous + used internally by :meth:`.Query.join`, which in previous SQLAlchemy versions was the primary ORM-level joining interface. - + """ aliased, from_joinpoint = kwargs.pop('aliased', False),\ kwargs.pop('from_joinpoint', False) if kwargs: raise TypeError("unknown arguments: %s" % ','.join(kwargs.iterkeys())) - return self._join(props, - outerjoin=False, create_aliases=aliased, + return self._join(props, + outerjoin=False, create_aliases=aliased, from_joinpoint=from_joinpoint) def outerjoin(self, *props, **kwargs): @@ -1627,8 +1649,8 @@ class Query(object): if kwargs: raise TypeError("unknown arguments: %s" % ','.join(kwargs.iterkeys())) - return self._join(props, - outerjoin=True, create_aliases=aliased, + return self._join(props, + outerjoin=True, create_aliases=aliased, from_joinpoint=from_joinpoint) def _update_joinpoint(self, jp): @@ -1654,9 +1676,9 @@ class Query(object): self._reset_joinpoint() if len(keys) == 2 and \ - isinstance(keys[0], (expression.FromClause, + isinstance(keys[0], (expression.FromClause, type, AliasedClass)) and \ - isinstance(keys[1], (basestring, expression.ClauseElement, + isinstance(keys[1], (basestring, expression.ClauseElement, interfaces.PropComparator)): # detect 2-arg form of join and # convert to a tuple. @@ -1666,7 +1688,7 @@ class Query(object): if isinstance(arg1, tuple): # "tuple" form of join, multiple # tuples are accepted as well. The simpler - # "2-arg" form is preferred. May deprecate + # "2-arg" form is preferred. May deprecate # the "tuple" usage. arg1, arg2 = arg1 else: @@ -1740,11 +1762,11 @@ class Query(object): raise NotImplementedError("query.join(a==b) not supported.") self._join_left_to_right( - left_entity, - right_entity, onclause, + left_entity, + right_entity, onclause, outerjoin, create_aliases, prop) - def _join_left_to_right(self, left, right, + def _join_left_to_right(self, left, right, onclause, outerjoin, create_aliases, prop): """append a JOIN to the query's from clause.""" @@ -1760,12 +1782,12 @@ class Query(object): not create_aliases: raise sa_exc.InvalidRequestError( "Can't construct a join from %s to %s, they " - "are the same entity" % + "are the same entity" % (left, right)) right, right_is_aliased, onclause = self._prepare_right_side( right, onclause, - outerjoin, create_aliases, + outerjoin, create_aliases, prop) # if joining on a MapperProperty path, @@ -1780,11 +1802,11 @@ class Query(object): '_joinpoint_entity':right } - self._join_to_left(left, right, - right_is_aliased, + self._join_to_left(left, right, + right_is_aliased, onclause, outerjoin) - def _prepare_right_side(self, right, onclause, outerjoin, + def _prepare_right_side(self, right, onclause, outerjoin, create_aliases, prop): right_mapper, right_selectable, right_is_aliased = _entity_info(right) @@ -1835,11 +1857,11 @@ class Query(object): # until reset_joinpoint() is called. if need_adapter: self._filter_aliases = ORMAdapter(right, - equivalents=right_mapper and + equivalents=right_mapper and right_mapper._equivalent_columns or {}, chain_to=self._filter_aliases) - # if the onclause is a ClauseElement, adapt it with any + # if the onclause is a ClauseElement, adapt it with any # adapters that are in place right now if isinstance(onclause, expression.ClauseElement): onclause = self._adapt_clause(onclause, True, True) @@ -1852,7 +1874,7 @@ class Query(object): self._mapper_loads_polymorphically_with( right_mapper, ORMAdapter( - right, + right, equivalents=right_mapper._equivalent_columns ) ) @@ -1862,19 +1884,19 @@ class Query(object): def _join_to_left(self, left, right, right_is_aliased, onclause, outerjoin): left_mapper, left_selectable, left_is_aliased = _entity_info(left) - # this is an overly broad assumption here, but there's a + # this is an overly broad assumption here, but there's a # very wide variety of situations where we rely upon orm.join's # adaption to glue clauses together, with joined-table inheritance's # wide array of variables taking up most of the space. # Setting the flag here is still a guess, so it is a bug - # that we don't have definitive criterion to determine when - # adaption should be enabled (or perhaps that we're even doing the + # that we don't have definitive criterion to determine when + # adaption should be enabled (or perhaps that we're even doing the # whole thing the way we are here). join_to_left = not right_is_aliased and not left_is_aliased if self._from_obj and left_selectable is not None: replace_clause_index, clause = sql_util.find_join_source( - self._from_obj, + self._from_obj, left_selectable) if clause is not None: # the entire query's FROM clause is an alias of itself (i.e. @@ -1890,9 +1912,9 @@ class Query(object): join_to_left = False try: - clause = orm_join(clause, - right, - onclause, isouter=outerjoin, + clause = orm_join(clause, + right, + onclause, isouter=outerjoin, join_to_left=join_to_left) except sa_exc.ArgumentError, ae: raise sa_exc.InvalidRequestError( @@ -1922,7 +1944,7 @@ class Query(object): "Could not find a FROM clause to join from") try: - clause = orm_join(clause, right, onclause, + clause = orm_join(clause, right, onclause, isouter=outerjoin, join_to_left=join_to_left) except sa_exc.ArgumentError, ae: raise sa_exc.InvalidRequestError( @@ -1939,10 +1961,10 @@ class Query(object): def reset_joinpoint(self): """Return a new :class:`.Query`, where the "join point" has been reset back to the base FROM entities of the query. - + This method is usually used in conjunction with the ``aliased=True`` feature of the :meth:`~.Query.join` - method. See the example in :meth:`~.Query.join` for how + method. See the example in :meth:`~.Query.join` for how this is used. """ @@ -1953,15 +1975,15 @@ class Query(object): """Set the FROM clause of this :class:`.Query` explicitly. Sending a mapped class or entity here effectively replaces the - "left edge" of any calls to :meth:`~.Query.join`, when no + "left edge" of any calls to :meth:`~.Query.join`, when no joinpoint is otherwise established - usually, the default "join point" is the leftmost entity in the :class:`~.Query` object's list of entities to be selected. Mapped entities or plain :class:`~.Table` or other selectables can be sent here which will form the default FROM clause. - - See the example in :meth:`~.Query.join` for a typical + + See the example in :meth:`~.Query.join` for a typical usage of :meth:`~.Query.select_from`. """ @@ -2048,14 +2070,41 @@ class Query(object): construct. """ - if not criterion: - self._distinct = True - else: + if not criterion: + self._distinct = True + else: criterion = self._adapt_col_list(criterion) if isinstance(self._distinct, list): self._distinct += criterion - else: - self._distinct = criterion + else: + self._distinct = criterion + + @_generative() + def prefix_with(self, *prefixes): + """Apply the prefixes to the query and return the newly resulting + ``Query``. + + :param \*prefixes: optional prefixes, typically strings, + not using any commas. In particular is useful for MySQL keywords. + + e.g.:: + + query = sess.query(User.name).\\ + prefix_with('HIGH_PRIORITY').\\ + prefix_with('SQL_SMALL_RESULT', 'ALL') + + Would render:: + + SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL users.name AS users_name + FROM users + + .. versionadded:: 0.7.7 + + """ + if self._prefixes: + self._prefixes += prefixes + else: + self._prefixes = prefixes def all(self): """Return the results represented by this ``Query`` as a list. @@ -2080,7 +2129,7 @@ class Query(object): if isinstance(statement, basestring): statement = sql.text(statement) - if not isinstance(statement, + if not isinstance(statement, (expression._TextClause, expression._SelectBase)): raise sa_exc.ArgumentError( @@ -2090,12 +2139,12 @@ class Query(object): self._statement = statement def first(self): - """Return the first result of this ``Query`` or + """Return the first result of this ``Query`` or None if the result doesn't contain any row. first() applies a limit of one within the generated SQL, so that - only one primary entity row is generated on the server side - (note this may consist of multiple result rows if join-loaded + only one primary entity row is generated on the server side + (note this may consist of multiple result rows if join-loaded collections are present). Calling ``first()`` results in an execution of the underlying query. @@ -2113,21 +2162,23 @@ class Query(object): def one(self): """Return exactly one result or raise an exception. - Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects - no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound`` + Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects + no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound`` if multiple object identities are returned, or if multiple rows are returned for a query that does not return object identities. Note that an entity query, that is, one which selects one or more mapped classes as opposed to individual column attributes, - may ultimately represent many rows but only one row of + may ultimately represent many rows but only one row of unique entity or entities - this is a successful result for one(). Calling ``one()`` results in an execution of the underlying query. - As of 0.6, ``one()`` fully fetches all results instead of applying - any kind of limit, so that the "unique"-ing of entities does not - conceal multiple object identities. + + .. versionchanged:: 0.6 + ``one()`` fully fetches all results instead of applying + any kind of limit, so that the "unique"-ing of entities does not + conceal multiple object identities. """ ret = list(self) @@ -2193,7 +2244,7 @@ class Query(object): @property def column_descriptions(self): - """Return metadata about the columns which would be + """Return metadata about the columns which would be returned by this :class:`.Query`. Format is a list of dictionaries:: @@ -2334,7 +2385,7 @@ class Query(object): this :class:`.Query` - if these do not correspond, unchecked errors will occur. The 'load' argument is the same as that of :meth:`.Session.merge`. - + For an example of how :meth:`~.Query.merge_result` is used, see the source code for the example :ref:`examples_caching`, where :meth:`~.Query.merge_result` is used to efficiently restore state @@ -2354,24 +2405,26 @@ class Query(object): if single_entity: if isinstance(self._entities[0], _MapperEntity): result = [session._merge( - attributes.instance_state(instance), - attributes.instance_dict(instance), + attributes.instance_state(instance), + attributes.instance_dict(instance), load=load, _recursive={}) for instance in iterator] else: result = list(iterator) else: - mapped_entities = [i for i, e in enumerate(self._entities) + mapped_entities = [i for i, e in enumerate(self._entities) if isinstance(e, _MapperEntity)] result = [] + keys = [ent._label_name for ent in self._entities] for row in iterator: newrow = list(row) for i in mapped_entities: - newrow[i] = session._merge( - attributes.instance_state(newrow[i]), - attributes.instance_dict(newrow[i]), - load=load, _recursive={}) - result.append(util.NamedTuple(newrow, row._labels)) + if newrow[i] is not None: + newrow[i] = session._merge( + attributes.instance_state(newrow[i]), + attributes.instance_dict(newrow[i]), + load=load, _recursive={}) + result.append(util.NamedTuple(newrow, keys)) return iter(result) finally: @@ -2379,7 +2432,7 @@ class Query(object): @classmethod def _get_from_identity(cls, session, key, passive): - """Look up the given key in the given session's identity map, + """Look up the given key in the given session's identity map, check the object for expired state if found. """ @@ -2468,6 +2521,7 @@ class Query(object): 'limit':self._limit, 'offset':self._offset, 'distinct':self._distinct, + 'prefixes':self._prefixes, 'group_by':self._group_by or None, 'having':self._having } @@ -2481,39 +2535,39 @@ class Query(object): def count(self): """Return a count of rows this Query would return. - + This generates the SQL for this Query as follows:: - + SELECT count(1) AS count_1 FROM ( SELECT ) AS anon_1 - Note the above scheme is newly refined in 0.7 - (as of 0.7b3). - - For fine grained control over specific columns + .. versionchanged:: 0.7 + The above scheme is newly refined as of 0.7b3. + + For fine grained control over specific columns to count, to skip the usage of a subquery or otherwise control of the FROM clause, or to use other aggregate functions, use :attr:`~sqlalchemy.sql.expression.func` expressions in conjunction with :meth:`~.Session.query`, i.e.:: - + from sqlalchemy import func - + # count User records, without # using a subquery. session.query(func.count(User.id)) - + # return count of user "id" grouped # by "name" session.query(func.count(User.id)).\\ group_by(User.name) from sqlalchemy import distinct - + # count distinct "name" values session.query(func.count(distinct(User.name))) - + """ col = sql.func.count(sql.literal_column('*')) return self.from_self(col).scalar() @@ -2541,7 +2595,7 @@ class Query(object): ``'evaluate'`` - Evaluate the query's criteria in Python straight on the objects in the session. If evaluation of the criteria isn't - implemented, an error is raised. In that case you probably + implemented, an error is raised. In that case you probably want to use the 'fetch' strategy as a fallback. The expression evaluator currently doesn't account for differing @@ -2556,13 +2610,13 @@ class Query(object): state of dependent objects subject to delete or delete-orphan cascade to be correctly represented. - Note that the :meth:`.MapperEvents.before_delete` and + Note that the :meth:`.MapperEvents.before_delete` and :meth:`.MapperEvents.after_delete` events are **not** invoked from this method. It instead invokes :meth:`.SessionEvents.after_bulk_delete`. """ - #TODO: lots of duplication and ifs - probably needs to be + #TODO: lots of duplication and ifs - probably needs to be # refactored to strategies #TODO: cascades need handling. @@ -2682,7 +2736,7 @@ class Query(object): or call expire_all()) in order for the state of dependent objects subject foreign key cascade to be correctly represented. - Note that the :meth:`.MapperEvents.before_update` and + Note that the :meth:`.MapperEvents.before_update` and :meth:`.MapperEvents.after_update` events are **not** invoked from this method. It instead invokes :meth:`.SessionEvents.after_bulk_update`. @@ -2691,7 +2745,7 @@ class Query(object): #TODO: value keys need to be mapped to corresponding sql cols and # instr.attr.s to string keys - #TODO: updates of manytoone relationships need to be converted to + #TODO: updates of manytoone relationships need to be converted to # fk assignments #TODO: cascades need handling. @@ -2776,7 +2830,7 @@ class Query(object): state.commit(dict_, list(to_evaluate)) - # expire attributes with pending changes + # expire attributes with pending changes # (there was no autoflush, so they are overwritten) state.expire_attributes(dict_, set(evaluated_keys). @@ -2790,7 +2844,7 @@ class Query(object): list(primary_key)) if identity_key in session.identity_map: session.expire( - session.identity_map[identity_key], + session.identity_map[identity_key], [_attr_as_key(k) for k in values] ) @@ -2807,6 +2861,7 @@ class Query(object): if self._lockmode: try: for_update = {'read': 'read', + 'read_nowait': 'read_nowait', 'update': True, 'update_nowait': 'nowait', None: False}[self._lockmode] @@ -2826,11 +2881,11 @@ class Query(object): eager_joins = context.eager_joins.values() if context.from_clause: - # "load from explicit FROMs" mode, + # "load from explicit FROMs" mode, # i.e. when select_from() or join() is used froms = list(context.from_clause) else: - # "load from discrete FROMs" mode, + # "load from discrete FROMs" mode, # i.e. when each _MappedEntity has its own FROM froms = context.froms @@ -2849,7 +2904,7 @@ class Query(object): "SELECT from.") if context.multi_row_eager_loaders and self._should_nest_selectable: - # for eager joins present and LIMIT/OFFSET/DISTINCT, + # for eager joins present and LIMIT/OFFSET/DISTINCT, # wrap the query inside a select, # then append eager joins onto that @@ -2870,7 +2925,7 @@ class Query(object): from_obj=froms, use_labels=labels, correlate=False, - # TODO: this order_by is only needed if + # TODO: this order_by is only needed if # LIMIT/OFFSET is present in self._select_args, # else the application on the outside is enough order_by=context.order_by, @@ -2890,17 +2945,17 @@ class Query(object): context.adapter = sql_util.ColumnAdapter(inner, equivs) statement = sql.select( - [inner] + context.secondary_columns, - for_update=for_update, + [inner] + context.secondary_columns, + for_update=for_update, use_labels=labels) from_clause = inner for eager_join in eager_joins: # EagerLoader places a 'stop_on' attribute on the join, - # giving us a marker as to where the "splice point" of + # giving us a marker as to where the "splice point" of # the join should be from_clause = sql_util.splice_joins( - from_clause, + from_clause, eager_join, eager_join.stop_on) statement.append_from(from_clause) @@ -2920,7 +2975,7 @@ class Query(object): if self._distinct and context.order_by: order_by_col_expr = list( chain(*[ - sql_util.unwrap_order_by(o) + sql_util.unwrap_order_by(o) for o in context.order_by ]) ) @@ -3006,7 +3061,7 @@ class _MapperEntity(_QueryEntity): self.entities = [entity] self.entity_zero = self.expr = entity - def setup_entity(self, entity, mapper, adapter, + def setup_entity(self, entity, mapper, adapter, from_obj, is_aliased_class, with_polymorphic): self.mapper = mapper self.adapter = adapter @@ -3027,7 +3082,7 @@ class _MapperEntity(_QueryEntity): self._label_name = self.mapper.class_.__name__ - def set_with_polymorphic(self, query, cls_or_mappers, + def set_with_polymorphic(self, query, cls_or_mappers, selectable, discriminator): if cls_or_mappers is None: query._reset_polymorphic_adapter(self.mapper) @@ -3038,12 +3093,12 @@ class _MapperEntity(_QueryEntity): self._with_polymorphic = mappers self._polymorphic_discriminator = discriminator - # TODO: do the wrapped thing here too so that + # TODO: do the wrapped thing here too so that # with_polymorphic() can be applied to aliases if not self.is_aliased_class: self.selectable = from_obj - query._mapper_loads_polymorphically_with(self.mapper, - sql_util.ColumnAdapter(from_obj, + query._mapper_loads_polymorphically_with(self.mapper, + sql_util.ColumnAdapter(from_obj, self.mapper._equivalent_columns)) filter_fn = id @@ -3092,7 +3147,7 @@ class _MapperEntity(_QueryEntity): elif not adapter: adapter = context.adapter - # polymorphic mappers which have concrete tables in + # polymorphic mappers which have concrete tables in # their hierarchy usually # require row aliasing unconditionally. if not adapter and self.mapper._requires_row_aliasing: @@ -3102,7 +3157,7 @@ class _MapperEntity(_QueryEntity): if self.primary_entity: _instance = self.mapper._instance_processor( - context, + context, self._path, self._reduced_path, adapter, @@ -3113,7 +3168,7 @@ class _MapperEntity(_QueryEntity): ) else: _instance = self.mapper._instance_processor( - context, + context, self._path, self._reduced_path, adapter, @@ -3256,13 +3311,13 @@ class _ColumnEntity(_QueryEntity): def adapt_to_selectable(self, query, sel): c = _ColumnEntity(query, sel.corresponding_column(self.column)) - c._label_name = self._label_name + c._label_name = self._label_name c.entity_zero = self.entity_zero c.entities = self.entities def setup_entity(self, entity, mapper, adapter, from_obj, is_aliased_class, with_polymorphic): - if 'selectable' not in self.__dict__: + if 'selectable' not in self.__dict__: self.selectable = from_obj self.froms.add(from_obj) diff --git a/libs/sqlalchemy/orm/scoping.py b/libs/sqlalchemy/orm/scoping.py index 3c1cd7f2..b5bd65b2 100644 --- a/libs/sqlalchemy/orm/scoping.py +++ b/libs/sqlalchemy/orm/scoping.py @@ -1,5 +1,5 @@ # orm/scoping.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -16,10 +16,10 @@ __all__ = ['ScopedSession'] class ScopedSession(object): """Provides thread-local management of Sessions. - + Typical invocation is via the :func:`.scoped_session` function:: - + Session = scoped_session(sessionmaker()) The internal registry is accessible, @@ -71,7 +71,7 @@ class ScopedSession(object): self.session_factory.configure(**kwargs) def query_property(self, query_cls=None): - """return a class property which produces a `Query` object + """return a class property which produces a `Query` object against the class when called. e.g.:: @@ -122,7 +122,7 @@ def makeprop(name): def get(self): return getattr(self.registry(), name) return property(get, set) -for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map', +for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map', 'is_active', 'autoflush', 'no_autoflush'): setattr(ScopedSession, prop, makeprop(prop)) diff --git a/libs/sqlalchemy/orm/session.py b/libs/sqlalchemy/orm/session.py index 14778705..8994a339 100644 --- a/libs/sqlalchemy/orm/session.py +++ b/libs/sqlalchemy/orm/session.py @@ -1,5 +1,5 @@ # orm/session.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -68,9 +68,9 @@ def sessionmaker(bind=None, class_=None, autoflush=True, autocommit=False, Session.configure(bind=create_engine('sqlite:///foo.db')) sess = Session() - + For options, see the constructor options for :class:`.Session`. - + """ kwargs['bind'] = bind kwargs['autoflush'] = autoflush @@ -103,39 +103,79 @@ def sessionmaker(bind=None, class_=None, autoflush=True, autocommit=False, class SessionTransaction(object): - """A Session-level transaction. + """A :class:`.Session`-level transaction. - This corresponds to one or more Core :class:`~.engine.base.Transaction` - instances behind the scenes, with one :class:`~.engine.base.Transaction` - per :class:`~.engine.base.Engine` in use. + :class:`.SessionTransaction` is a mostly behind-the-scenes object + not normally referenced directly by application code. It coordinates + among multiple :class:`.Connection` objects, maintaining a database + transaction for each one individually, committing or rolling them + back all at once. It also provides optional two-phase commit behavior + which can augment this coordination operation. - Direct usage of :class:`.SessionTransaction` is not typically - necessary as of SQLAlchemy 0.4; use the :meth:`.Session.rollback` and - :meth:`.Session.commit` methods on :class:`.Session` itself to - control the transaction. - - The current instance of :class:`.SessionTransaction` for a given - :class:`.Session` is available via the :attr:`.Session.transaction` - attribute. + The :attr:`.Session.transaction` attribute of :class:`.Session` refers to the + current :class:`.SessionTransaction` object in use, if any. - The :class:`.SessionTransaction` object is **not** thread-safe. + + A :class:`.SessionTransaction` is associated with a :class:`.Session` + in its default mode of ``autocommit=False`` immediately, associated + with no database connections. As the :class:`.Session` is called upon + to emit SQL on behalf of various :class:`.Engine` or :class:`.Connection` + objects, a corresponding :class:`.Connection` and associated :class:`.Transaction` + is added to a collection within the :class:`.SessionTransaction` object, + becoming one of the connection/transaction pairs maintained by the + :class:`.SessionTransaction`. + + The lifespan of the :class:`.SessionTransaction` ends when the + :meth:`.Session.commit`, :meth:`.Session.rollback` or :meth:`.Session.close` + methods are called. At this point, the :class:`.SessionTransaction` removes + its association with its parent :class:`.Session`. A :class:`.Session` + that is in ``autocommit=False`` mode will create a new + :class:`.SessionTransaction` to replace it immediately, whereas a + :class:`.Session` that's in ``autocommit=True`` + mode will remain without a :class:`.SessionTransaction` until the + :meth:`.Session.begin` method is called. + + Another detail of :class:`.SessionTransaction` behavior is that it is + capable of "nesting". This means that the :meth:`.begin` method can + be called while an existing :class:`.SessionTransaction` is already present, + producing a new :class:`.SessionTransaction` that temporarily replaces + the parent :class:`.SessionTransaction`. When a :class:`.SessionTransaction` + is produced as nested, it assigns itself to the :attr:`.Session.transaction` + attribute. When it is ended via :meth:`.Session.commit` or :meth:`.Session.rollback`, + it restores its parent :class:`.SessionTransaction` back onto the + :attr:`.Session.transaction` attribute. The + behavior is effectively a stack, where :attr:`.Session.transaction` refers + to the current head of the stack. + + The purpose of this stack is to allow nesting of :meth:`.rollback` or + :meth:`.commit` calls in context with various flavors of :meth:`.begin`. + This nesting behavior applies to when :meth:`.Session.begin_nested` + is used to emit a SAVEPOINT transaction, and is also used to produce + a so-called "subtransaction" which allows a block of code to use a + begin/rollback/commit sequence regardless of whether or not its enclosing + code block has begun a transaction. The :meth:`.flush` method, whether called + explicitly or via autoflush, is the primary consumer of the "subtransaction" + feature, in that it wishes to guarantee that it works within in a transaction block + regardless of whether or not the :class:`.Session` is in transactional mode + when the method is called. See also: - + :meth:`.Session.rollback` - + :meth:`.Session.commit` + :meth:`.Session.begin` + + :meth:`.Session.begin_nested` + :attr:`.Session.is_active` - + :meth:`.SessionEvents.after_commit` - + :meth:`.SessionEvents.after_rollback` - + :meth:`.SessionEvents.after_soft_rollback` - - .. index:: - single: thread safety; SessionTransaction """ @@ -211,6 +251,7 @@ class SessionTransaction(object): if not self._is_transaction_boundary: self._new = self._parent._new self._deleted = self._parent._deleted + self._key_switches = self._parent._key_switches return if not self.session._flushing: @@ -218,6 +259,7 @@ class SessionTransaction(object): self._new = weakref.WeakKeyDictionary() self._deleted = weakref.WeakKeyDictionary() + self._key_switches = weakref.WeakKeyDictionary() def _restore_snapshot(self): assert self._is_transaction_boundary @@ -227,11 +269,16 @@ class SessionTransaction(object): if s.key: del s.key + for s, (oldkey, newkey) in self._key_switches.items(): + self.session.identity_map.discard(s) + s.key = oldkey + self.session.identity_map.replace(s) + for s in set(self._deleted).union(self.session._deleted): if s.deleted: #assert s in self._deleted del s.deleted - self.session._update_impl(s) + self.session._update_impl(s, discard_existing=True) assert not self.session._deleted @@ -294,7 +341,15 @@ class SessionTransaction(object): subtransaction.commit() if not self.session._flushing: - self.session.flush() + for _flush_guard in xrange(100): + if self.session._is_clean(): + break + self.session.flush() + else: + raise exc.FlushError( + "Over 100 subsequent flushes have occurred within " + "session.commit() - is an after_flush() hook " + "creating new objects?") if self._parent is None and self.session.twophase: try: @@ -417,19 +472,19 @@ class Session(object): '__contains__', '__iter__', 'add', 'add_all', 'begin', 'begin_nested', 'close', 'commit', 'connection', 'delete', 'execute', 'expire', 'expire_all', 'expunge', 'expunge_all', 'flush', 'get_bind', - 'is_modified', - 'merge', 'query', 'refresh', 'rollback', + 'is_modified', + 'merge', 'query', 'refresh', 'rollback', 'scalar') def __init__(self, bind=None, autoflush=True, expire_on_commit=True, _enable_transaction_accounting=True, - autocommit=False, twophase=False, + autocommit=False, twophase=False, weak_identity_map=True, binds=None, extension=None, query_cls=query.Query): """Construct a new Session. - See also the :func:`.sessionmaker` function which is used to + See also the :func:`.sessionmaker` function which is used to generate a :class:`.Session`-producing callable with a given set of arguments. @@ -448,7 +503,7 @@ class Session(object): by any of these methods, the ``Session`` is ready for the next usage, which will again acquire and maintain a new connection/transaction. - :param autoflush: When ``True``, all query operations will issue a + :param autoflush: When ``True``, all query operations will issue a ``flush()`` call to this ``Session`` before proceeding. This is a convenience feature so that ``flush()`` need not be called repeatedly in order for database queries to retrieve results. It's typical that @@ -496,7 +551,7 @@ class Session(object): attribute/object access subsequent to a completed transaction will load from the most recent database state. - :param extension: An optional + :param extension: An optional :class:`~.SessionExtension` instance, or a list of such instances, which will receive pre- and post- commit and flush events, as well as a post-rollback event. **Deprecated.** @@ -514,9 +569,9 @@ class Session(object): be called. This allows each database to roll back the entire transaction, before each transaction is committed. - :param weak_identity_map: Defaults to ``True`` - when set to - ``False``, objects placed in the :class:`.Session` will be - strongly referenced until explicitly removed or the + :param weak_identity_map: Defaults to ``True`` - when set to + ``False``, objects placed in the :class:`.Session` will be + strongly referenced until explicitly removed or the :class:`.Session` is closed. **Deprecated** - this option is obsolete. @@ -573,7 +628,7 @@ class Session(object): transaction or nested transaction, an error is raised, unless ``subtransactions=True`` or ``nested=True`` is specified. - The ``subtransactions=True`` flag indicates that this :meth:`~.Session.begin` + The ``subtransactions=True`` flag indicates that this :meth:`~.Session.begin` can create a subtransaction if a transaction is already in progress. For documentation on subtransactions, please see :ref:`session_subtransactions`. @@ -631,7 +686,7 @@ class Session(object): By default, the :class:`.Session` also expires all database loaded state on all ORM-managed attributes after transaction commit. - This so that subsequent operations load the most recent + This so that subsequent operations load the most recent data from the database. This behavior can be disabled using the ``expire_on_commit=False`` option to :func:`.sessionmaker` or the :class:`.Session` constructor. @@ -672,11 +727,11 @@ class Session(object): self.transaction.prepare() - def connection(self, mapper=None, clause=None, - bind=None, - close_with_result=False, + def connection(self, mapper=None, clause=None, + bind=None, + close_with_result=False, **kw): - """Return a :class:`.Connection` object corresponding to this + """Return a :class:`.Connection` object corresponding to this :class:`.Session` object's transactional state. If this :class:`.Session` is configured with ``autocommit=False``, @@ -684,13 +739,13 @@ class Session(object): is returned, or if no transaction is in progress, a new one is begun and the :class:`.Connection` returned (note that no transactional state is established with the DBAPI until the first SQL statement is emitted). - + Alternatively, if this :class:`.Session` is configured with ``autocommit=True``, - an ad-hoc :class:`.Connection` is returned using :meth:`.Engine.contextual_connect` + an ad-hoc :class:`.Connection` is returned using :meth:`.Engine.contextual_connect` on the underlying :class:`.Engine`. Ambiguity in multi-bind or unbound :class:`.Session` objects can be resolved through - any of the optional keyword arguments. This ultimately makes usage of the + any of the optional keyword arguments. This ultimately makes usage of the :meth:`.get_bind` method for resolution. :param bind: @@ -705,27 +760,27 @@ class Session(object): ``clause``. :param clause: - A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`, - :func:`~.sql.expression.text`, + A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`, + :func:`~.sql.expression.text`, etc.) which will be used to locate a bind, if a bind cannot otherwise be identified. :param close_with_result: Passed to :meth:`Engine.connect`, indicating the :class:`.Connection` should be considered "single use", automatically - closing when the first result set is closed. This flag only has + closing when the first result set is closed. This flag only has an effect if this :class:`.Session` is configured with ``autocommit=True`` and does not already have a transaction in progress. :param \**kw: Additional keyword arguments are sent to :meth:`get_bind()`, - allowing additional arguments to be passed to custom + allowing additional arguments to be passed to custom implementations of :meth:`get_bind`. """ if bind is None: bind = self.get_bind(mapper, clause=clause, **kw) - return self._connection_for_bind(bind, + return self._connection_for_bind(bind, close_with_result=close_with_result) def _connection_for_bind(self, engine, **kwargs): @@ -735,49 +790,99 @@ class Session(object): return engine.contextual_connect(**kwargs) def execute(self, clause, params=None, mapper=None, bind=None, **kw): - """Execute a clause within the current transaction. + """Execute a SQL expression construct or string statement within + the current transaction. Returns a :class:`.ResultProxy` representing results of the statement execution, in the same manner as that of an :class:`.Engine` or :class:`.Connection`. + E.g.:: + + result = session.execute( + user_table.select().where(user_table.c.id == 5) + ) + :meth:`~.Session.execute` accepts any executable clause construct, such as :func:`~.sql.expression.select`, :func:`~.sql.expression.insert`, :func:`~.sql.expression.update`, :func:`~.sql.expression.delete`, and - :func:`~.sql.expression.text`, and additionally accepts - plain strings that represent SQL statements. If a plain string is - passed, it is first converted to a - :func:`~.sql.expression.text` construct, which here means - that bind parameters should be specified using the format ``:param``. - If raw DBAPI statement execution is desired, use :meth:`.Session.connection` - to acquire a :class:`.Connection`, then call its :meth:`~.Connection.execute` - method. + :func:`~.sql.expression.text`. Plain SQL strings can be passed + as well, which in the case of :meth:`.Session.execute` only + will be interpreted the same as if it were passed via a :func:`~.expression.text` + construct. That is, the following usage:: + + result = session.execute( + "SELECT * FROM user WHERE id=:param", + {"param":5} + ) + + is equivalent to:: + + from sqlalchemy import text + result = session.execute( + text("SELECT * FROM user WHERE id=:param"), + {"param":5} + ) + + The second positional argument to :meth:`.Session.execute` is an + optional parameter set. Similar to that of :meth:`.Connection.execute`, whether this + is passed as a single dictionary, or a list of dictionaries, determines + whether the DBAPI cursor's ``execute()`` or ``executemany()`` is used to execute the + statement. An INSERT construct may be invoked for a single row:: + + result = session.execute(users.insert(), {"id": 7, "name": "somename"}) + + or for multiple rows:: + + result = session.execute(users.insert(), [ + {"id": 7, "name": "somename7"}, + {"id": 8, "name": "somename8"}, + {"id": 9, "name": "somename9"} + ]) The statement is executed within the current transactional context of - this :class:`.Session`, using the same behavior as that of - the :meth:`.Session.connection` method to determine the active - :class:`.Connection`. The ``close_with_result`` flag is - set to ``True`` so that an ``autocommit=True`` :class:`.Session` - with no active transaction will produce a result that auto-closes - the underlying :class:`.Connection`. - + this :class:`.Session`. The :class:`.Connection` which is used + to execute the statement can also be acquired directly by + calling the :meth:`.Session.connection` method. Both methods use + a rule-based resolution scheme in order to determine the + :class:`.Connection`, which in the average case is derived directly + from the "bind" of the :class:`.Session` itself, and in other cases + can be based on the :func:`.mapper` + and :class:`.Table` objects passed to the method; see the documentation + for :meth:`.Session.get_bind` for a full description of this scheme. + + The :meth:`.Session.execute` method does *not* invoke autoflush. + + The :class:`.ResultProxy` returned by the :meth:`.Session.execute` + method is returned with the "close_with_result" flag set to true; + the significance of this flag is that if this :class:`.Session` is + autocommitting and does not have a transaction-dedicated :class:`.Connection` + available, a temporary :class:`.Connection` is established for the + statement execution, which is closed (meaning, returned to the connection + pool) when the :class:`.ResultProxy` has consumed all available data. + This applies *only* when the :class:`.Session` is configured with + autocommit=True and no transaction has been started. + :param clause: - A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`, - :func:`~.sql.expression.text`, etc.) or string SQL statement to be executed. The clause - will also be used to locate a bind, if this :class:`.Session` - is not bound to a single engine already, and the ``mapper`` - and ``bind`` arguments are not passed. + An executable statement (i.e. an :class:`.Executable` expression + such as :func:`.expression.select`) or string SQL statement + to be executed. :param params: - Optional dictionary of bind names mapped to values. + Optional dictionary, or list of dictionaries, containing + bound parameter values. If a single dictionary, single-row + execution occurs; if a list of dictionaries, an + "executemany" will be invoked. The keys in each dictionary + must correspond to parameter names present in the statement. :param mapper: Optional :func:`.mapper` or mapped class, used to identify the appropriate bind. This argument takes precedence over - ``clause`` when locating a bind. + ``clause`` when locating a bind. See :meth:`.Session.get_bind` + for more details. :param bind: Optional :class:`.Engine` to be used as the bind. If @@ -785,11 +890,22 @@ class Session(object): that connection will be used. This argument takes precedence over ``mapper`` and ``clause`` when locating a bind. - + :param \**kw: - Additional keyword arguments are sent to :meth:`get_bind()`, - allowing additional arguments to be passed to custom - implementations of :meth:`get_bind`. + Additional keyword arguments are sent to :meth:`.Session.get_bind()` + to allow extensibility of "bind" schemes. + + .. seealso:: + + :ref:`sqlexpression_toplevel` - Tutorial on using Core SQL + constructs. + + :ref:`connections_toplevel` - Further information on direct + statement execution. + + :meth:`.Connection.execute` - core level statement execution + method, which is :meth:`.Session.execute` ultimately uses + in order to execute the statement. """ clause = expression._literal_as_text(clause) @@ -881,39 +997,39 @@ class Session(object): def get_bind(self, mapper=None, clause=None): """Return a "bind" to which this :class:`.Session` is bound. - - The "bind" is usually an instance of :class:`.Engine`, + + The "bind" is usually an instance of :class:`.Engine`, except in the case where the :class:`.Session` has been explicitly bound directly to a :class:`.Connection`. - For a multiply-bound or unbound :class:`.Session`, the - ``mapper`` or ``clause`` arguments are used to determine the + For a multiply-bound or unbound :class:`.Session`, the + ``mapper`` or ``clause`` arguments are used to determine the appropriate bind to return. - + Note that the "mapper" argument is usually present when :meth:`.Session.get_bind` is called via an ORM - operation such as a :meth:`.Session.query`, each - individual INSERT/UPDATE/DELETE operation within a + operation such as a :meth:`.Session.query`, each + individual INSERT/UPDATE/DELETE operation within a :meth:`.Session.flush`, call, etc. - + The order of resolution is: - + 1. if mapper given and session.binds is present, locate a bind based on mapper. 2. if clause given and session.binds is present, locate a bind based on :class:`.Table` objects found in the given clause present in session.binds. 3. if session.bind is present, return that. - 4. if clause given, attempt to return a bind + 4. if clause given, attempt to return a bind linked to the :class:`.MetaData` ultimately associated with the clause. 5. if mapper given, attempt to return a bind - linked to the :class:`.MetaData` ultimately + linked to the :class:`.MetaData` ultimately associated with the :class:`.Table` or other selectable to which the mapper is mapped. 6. No bind can be found, :class:`.UnboundExecutionError` is raised. - + :param mapper: Optional :func:`.mapper` mapped class or instance of :class:`.Mapper`. The bind can be derived from a :class:`.Mapper` @@ -923,11 +1039,11 @@ class Session(object): is mapped for a bind. :param clause: - A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`, - :func:`~.sql.expression.text`, + A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`, + :func:`~.sql.expression.text`, etc.). If the ``mapper`` argument is not present or could not produce a bind, the given expression construct will be searched for a bound - element, typically a :class:`.Table` associated with bound + element, typically a :class:`.Table` associated with bound :class:`.MetaData`. """ @@ -982,23 +1098,23 @@ class Session(object): @util.contextmanager def no_autoflush(self): """Return a context manager that disables autoflush. - + e.g.:: - + with session.no_autoflush: - + some_object = SomeClass() session.add(some_object) # won't autoflush some_object.related_thing = session.query(SomeRelated).first() - + Operations that proceed within the ``with:`` block will not be subject to flushes occurring upon query access. This is useful when initializing a series of objects which involve existing database queries, where the uncompleted object should not yet be flushed. - - New in 0.7.6. + + .. versionadded:: 0.7.6 """ autoflush = self.autoflush @@ -1035,10 +1151,10 @@ class Session(object): mode is turned on. :param attribute_names: optional. An iterable collection of - string attribute names indicating a subset of attributes to + string attribute names indicating a subset of attributes to be refreshed. - :param lockmode: Passed to the :class:`~sqlalchemy.orm.query.Query` + :param lockmode: Passed to the :class:`~sqlalchemy.orm.query.Query` as used by :meth:`~sqlalchemy.orm.query.Query.with_lockmode`. """ @@ -1060,22 +1176,22 @@ class Session(object): def expire_all(self): """Expires all persistent instances within this Session. - When any attributes on a persistent instance is next accessed, + When any attributes on a persistent instance is next accessed, a query will be issued using the :class:`.Session` object's current transactional context in order to load all expired attributes for the given instance. Note that - a highly isolated transaction will return the same values as were + a highly isolated transaction will return the same values as were previously read in that same transaction, regardless of changes in database state outside of that transaction. - To expire individual objects and individual attributes + To expire individual objects and individual attributes on those objects, use :meth:`Session.expire`. - The :class:`.Session` object's default behavior is to + The :class:`.Session` object's default behavior is to expire all state whenever the :meth:`Session.rollback` or :meth:`Session.commit` methods are called, so that new state can be loaded for the new transaction. For this reason, - calling :meth:`Session.expire_all` should not be needed when + calling :meth:`Session.expire_all` should not be needed when autocommit is ``False``, assuming the transaction is isolated. """ @@ -1089,14 +1205,14 @@ class Session(object): attribute is next accessed, a query will be issued to the :class:`.Session` object's current transactional context in order to load all expired attributes for the given instance. Note that - a highly isolated transaction will return the same values as were + a highly isolated transaction will return the same values as were previously read in that same transaction, regardless of changes in database state outside of that transaction. To expire all objects in the :class:`.Session` simultaneously, use :meth:`Session.expire_all`. - The :class:`.Session` object's default behavior is to + The :class:`.Session` object's default behavior is to expire all state whenever the :meth:`Session.rollback` or :meth:`Session.commit` methods are called, so that new state can be loaded for the new transaction. For this reason, @@ -1210,10 +1326,15 @@ class Session(object): if state.key is None: state.key = instance_key elif state.key != instance_key: - # primary key switch. use discard() in case another - # state has already replaced this one in the identity + # primary key switch. use discard() in case another + # state has already replaced this one in the identity # map (see test/orm/test_naturalpks.py ReversePKsTest) self.identity_map.discard(state) + if state in self.transaction._key_switches: + orig_key = self.transaction._key_switches[state][0] + else: + orig_key = state.key + self.transaction._key_switches[state] = (orig_key, instance_key) state.key = instance_key self.identity_map.replace(state) @@ -1261,8 +1382,8 @@ class Session(object): mapper = _state_mapper(state) for o, m, st_, dct_ in mapper.cascade_iterator( - 'save-update', - state, + 'save-update', + state, halt_on=self._contains_state): self._save_or_update_impl(st_) @@ -1285,7 +1406,7 @@ class Session(object): if state in self._deleted: return - # ensure object is attached to allow the + # ensure object is attached to allow the # cascade operation to load deferred attributes # and collections self._attach(state) @@ -1303,20 +1424,47 @@ class Session(object): self._delete_impl(st_) def merge(self, instance, load=True, **kw): - """Copy the state an instance onto the persistent instance with the - same identifier. + """Copy the state of a given instance into a corresponding instance + within this :class:`.Session`. - If there is no persistent instance currently associated with the - session, it will be loaded. Return the persistent instance. If the - given instance is unsaved, save a copy of and return it as a newly - persistent instance. The given instance does not become associated - with the session. + :meth:`.Session.merge` examines the primary key attributes of the + source instance, and attempts to reconcile it with an instance of the + same primary key in the session. If not found locally, it attempts + to load the object from the database based on primary key, and if + none can be located, creates a new instance. The state of each attribute + on the source instance is then copied to the target instance. + The resulting target instance is then returned by the method; the + original source instance is left unmodified, and un-associated with the + :class:`.Session` if not already. This operation cascades to associated instances if the association is mapped with ``cascade="merge"``. See :ref:`unitofwork_merging` for a detailed discussion of merging. + :param instance: Instance to be merged. + :param load: Boolean, when False, :meth:`.merge` switches into + a "high performance" mode which causes it to forego emitting history + events as well as all database access. This flag is used for + cases such as transferring graphs of objects into a :class:`.Session` + from a second level cache, or to transfer just-loaded objects + into the :class:`.Session` owned by a worker thread or process + without re-querying the database. + + The ``load=False`` use case adds the caveat that the given + object has to be in a "clean" state, that is, has no pending changes + to be flushed - even if the incoming object is detached from any + :class:`.Session`. This is so that when + the merge operation populates local attributes and + cascades to related objects and + collections, the values can be "stamped" onto the + target object as is, without generating any history or attribute + events, and without the need to reconcile the incoming data with + any existing related objects or collections that might not + be loaded. The resulting objects from ``load=False`` are always + produced as "clean", so it is only appropriate that the given objects + should be "clean" as well, else this suggests a mis-use of the method. + """ if 'dont_load' in kw: load = not kw['dont_load'] @@ -1334,8 +1482,8 @@ class Session(object): try: self.autoflush = False return self._merge( - attributes.instance_state(instance), - attributes.instance_dict(instance), + attributes.instance_state(instance), + attributes.instance_dict(instance), load=load, _recursive=_recursive) finally: self.autoflush = autoflush @@ -1373,7 +1521,7 @@ class Session(object): new_instance = True elif not _none_set.issubset(key[1]) or \ - (mapper.allow_partial_pks and + (mapper.allow_partial_pks and not _none_set.issuperset(key[1])): merged = self.query(mapper.class_).get(key[1]) else: @@ -1397,14 +1545,14 @@ class Session(object): # version check if applicable if mapper.version_id_col is not None: existing_version = mapper._get_state_attr_by_column( - state, - state_dict, + state, + state_dict, mapper.version_id_col, passive=attributes.PASSIVE_NO_INITIALIZE) merged_version = mapper._get_state_attr_by_column( - merged_state, - merged_dict, + merged_state, + merged_dict, mapper.version_id_col, passive=attributes.PASSIVE_NO_INITIALIZE) @@ -1426,8 +1574,8 @@ class Session(object): merged_state.load_options = state.load_options for prop in mapper.iterate_properties: - prop.merge(self, state, state_dict, - merged_state, merged_dict, + prop.merge(self, state, state_dict, + merged_state, merged_dict, load, _recursive) if not load: @@ -1465,7 +1613,7 @@ class Session(object): self._new[state] = state.obj() state.insert_order = len(self._new) - def _update_impl(self, state): + def _update_impl(self, state, discard_existing=False): if (self.identity_map.contains_state(state) and state not in self._deleted): return @@ -1481,6 +1629,10 @@ class Session(object): "function to send this object back to the transient state." % mapperutil.state_str(state) ) + if discard_existing: + existing = self.identity_map.get(state.key) + if existing is not None: + self.identity_map.discard(attributes.instance_state(existing)) self._attach(state) self._deleted.pop(state, None) self.identity_map.add(state) @@ -1555,7 +1707,7 @@ class Session(object): Database operations will be issued in the current transactional context and do not affect the state of the transaction, unless an - error occurs, in which case the entire transaction is rolled back. + error occurs, in which case the entire transaction is rolled back. You may flush() as often as you like within a transaction to move changes from Python to the database's transaction buffer. @@ -1563,20 +1715,15 @@ class Session(object): will create a transaction on the fly that surrounds the entire set of operations int the flush. - objects - Optional; a list or tuple collection. Restricts the flush operation - to only these objects, rather than all pending changes. - Deprecated - this flag prevents the session from properly maintaining - accounting among inter-object relations and can cause invalid results. + :param objects: Optional; restricts the flush operation to operate + only on elements that are in the given collection. + + This feature is for an extremely narrow set of use cases where + particular objects may need to be operated upon before the + full flush() occurs. It is not intended for general use. """ - if objects: - util.warn_deprecated( - "The 'objects' argument to session.flush() is deprecated; " - "Please do not add objects to the session which should not " - "yet be persisted.") - if self._flushing: raise sa_exc.InvalidRequestError("Session is already flushing") @@ -1677,42 +1824,41 @@ class Session(object): raise - def is_modified(self, instance, include_collections=True, + def is_modified(self, instance, include_collections=True, passive=attributes.PASSIVE_OFF): - """Return ``True`` if the given instance has locally + """Return ``True`` if the given instance has locally modified attributes. This method retrieves the history for each instrumented attribute on the instance and performs a comparison of the current value to its previously committed value, if any. - + It is in effect a more expensive and accurate - version of checking for the given instance in the - :attr:`.Session.dirty` collection; a full test for + version of checking for the given instance in the + :attr:`.Session.dirty` collection; a full test for each attribute's net "dirty" status is performed. - + E.g.:: - + return session.is_modified(someobject, passive=True) - - .. note:: - - In SQLAlchemy 0.7 and earlier, the ``passive`` - flag should **always** be explicitly set to ``True``. - The current default value of :data:`.attributes.PASSIVE_OFF` - for this flag is incorrect, in that it loads unloaded - collections and attributes which by definition - have no modified state, and furthermore trips off - autoflush which then causes all subsequent, possibly - modified attributes to lose their modified state. - The default value of the flag will be changed in 0.8. - + + .. versionchanged:: 0.8 + In SQLAlchemy 0.7 and earlier, the ``passive`` + flag should **always** be explicitly set to ``True``. + The current default value of :data:`.attributes.PASSIVE_OFF` + for this flag is incorrect, in that it loads unloaded + collections and attributes which by definition + have no modified state, and furthermore trips off + autoflush which then causes all subsequent, possibly + modified attributes to lose their modified state. + The default value of the flag will be changed in 0.8. + A few caveats to this method apply: - * Instances present in the :attr:`.Session.dirty` collection may report - ``False`` when tested with this method. This is because + * Instances present in the :attr:`.Session.dirty` collection may report + ``False`` when tested with this method. This is because the object may have received change events via attribute - mutation, thus placing it in :attr:`.Session.dirty`, + mutation, thus placing it in :attr:`.Session.dirty`, but ultimately the state is the same as that loaded from the database, resulting in no net change here. * Scalar attributes may not have recorded the previously set @@ -1724,36 +1870,37 @@ class Session(object): it skips the expense of a SQL call if the old value isn't present, based on the assumption that an UPDATE of the scalar value is usually needed, and in those few cases where it isn't, is less - expensive on average than issuing a defensive SELECT. + expensive on average than issuing a defensive SELECT. The "old" value is fetched unconditionally only if the attribute container has the ``active_history`` flag set to ``True``. This flag is set typically for primary key attributes and scalar object references - that are not a simple many-to-one. To set this flag for + that are not a simple many-to-one. To set this flag for any arbitrary mapped column, use the ``active_history`` argument with :func:`.column_property`. - + :param instance: mapped instance to be tested for pending changes. :param include_collections: Indicates if multivalued collections should be included in the operation. Setting this to ``False`` is a way to detect only local-column based properties (i.e. scalar columns or many-to-one foreign keys) that would result in an UPDATE for this instance upon flush. - :param passive: Indicates if unloaded attributes and - collections should be loaded in the course of performing + :param passive: Indicates if unloaded attributes and + collections should be loaded in the course of performing this test. If set to ``False``, or left at its default value of :data:`.PASSIVE_OFF`, unloaded attributes - will be loaded. If set to ``True`` or - :data:`.PASSIVE_NO_INITIALIZE`, unloaded - collections and attributes will remain unloaded. As + will be loaded. If set to ``True`` or + :data:`.PASSIVE_NO_INITIALIZE`, unloaded + collections and attributes will remain unloaded. As noted previously, the existence of this flag here - is a bug, as unloaded attributes by definition have + is a bug, as unloaded attributes by definition have no changes, and the load operation also triggers an autoflush which then cancels out subsequent changes. - This flag should **always be set to - True**. In 0.8 the flag will be deprecated and the default - set to ``True``. + This flag should **always be set to True**. + .. versionchanged:: 0.8 + The flag will be deprecated and the default + set to ``True``. """ try: @@ -1770,7 +1917,7 @@ class Session(object): for attr in state.manager.attributes: if \ ( - not include_collections and + not include_collections and hasattr(attr.impl, 'get_collection') ) or not hasattr(attr.impl, 'get_history'): continue @@ -1784,33 +1931,71 @@ class Session(object): @property def is_active(self): - """True if this :class:`.Session` has an active transaction. - - This indicates if the :class:`.Session` is capable of emitting - SQL, as from the :meth:`.Session.execute`, :meth:`.Session.query`, - or :meth:`.Session.flush` methods. If False, it indicates - that the innermost transaction has been rolled back, but enclosing - :class:`.SessionTransaction` objects remain in the transactional - stack, which also must be rolled back. - - This flag is generally only useful with a :class:`.Session` - configured in its default mode of ``autocommit=False``. + """True if this :class:`.Session` is in "transaction mode" and + is not in "partial rollback" state. + + The :class:`.Session` in its default mode of ``autocommit=False`` + is essentially always in "transaction mode", in that a + :class:`.SessionTransaction` is associated with it as soon as + it is instantiated. This :class:`.SessionTransaction` is immediately + replaced with a new one as soon as it is ended, due to a rollback, + commit, or close operation. + + "Transaction mode" does *not* indicate whether + or not actual database connection resources are in use; the + :class:`.SessionTransaction` object coordinates among zero or more + actual database transactions, and starts out with none, accumulating + individual DBAPI connections as different data sources are used + within its scope. The best way to track when a particular + :class:`.Session` has actually begun to use DBAPI resources is to + implement a listener using the :meth:`.SessionEvents.after_begin` + method, which will deliver both the :class:`.Session` as well as the + target :class:`.Connection` to a user-defined event listener. + + The "partial rollback" state refers to when an "inner" transaction, + typically used during a flush, encounters an error and emits + a rollback of the DBAPI connection. At this point, the :class:`.Session` + is in "partial rollback" and awaits for the user to call :meth:`.rollback`, + in order to close out the transaction stack. It is in this "partial + rollback" period that the :attr:`.is_active` flag returns False. After + the call to :meth:`.rollback`, the :class:`.SessionTransaction` is replaced + with a new one and :attr:`.is_active` returns ``True`` again. + + When a :class:`.Session` is used in ``autocommit=True`` mode, the + :class:`.SessionTransaction` is only instantiated within the scope + of a flush call, or when :meth:`.Session.begin` is called. So + :attr:`.is_active` will always be ``False`` outside of a flush or + :meth:`.begin` block in this mode, and will be ``True`` within the + :meth:`.begin` block as long as it doesn't enter "partial rollback" + state. + + From all the above, it follows that the only purpose to this flag is + for application frameworks that wish to detect is a "rollback" is + necessary within a generic error handling routine, for :class:`.Session` + objects that would otherwise be in "partial rollback" mode. In + a typical integration case, this is also not necessary as it is standard + practice to emit :meth:`.Session.rollback` unconditionally within the + outermost exception catch. + + To track the transactional state of a :class:`.Session` fully, + use event listeners, primarily the :meth:`.SessionEvents.after_begin`, + :meth:`.SessionEvents.after_commit`, :meth:`.SessionEvents.after_rollback` + and related events. """ - return self.transaction and self.transaction.is_active identity_map = None """A mapping of object identities to objects themselves. - + Iterating through ``Session.identity_map.values()`` provides - access to the full set of persistent objects (i.e., those + access to the full set of persistent objects (i.e., those that have row identity) currently in the session. - + See also: - + :func:`.identity_key` - operations involving identity keys. - + """ @property @@ -1826,9 +2011,9 @@ class Session(object): @property def dirty(self): """The set of all persistent instances considered dirty. - + E.g.:: - + some_mapped_object in session.dirty Instances are considered dirty when they were modified but not @@ -1869,7 +2054,7 @@ _sessions = weakref.WeakValueDictionary() def make_transient(instance): """Make the given instance 'transient'. - This will remove its association with any + This will remove its association with any session and additionally will remove its "identity key", such that it's as though the object were newly constructed, except retaining its values. It also resets the @@ -1877,7 +2062,7 @@ def make_transient(instance): had been explicitly deleted by its session. Attributes which were "expired" or deferred at the - instance level are reverted to undefined, and + instance level are reverted to undefined, and will not trigger any loads. """ @@ -1886,7 +2071,7 @@ def make_transient(instance): if s: s._expunge_state(state) - # remove expired state and + # remove expired state and # deferred callables state.callables.clear() if state.key: diff --git a/libs/sqlalchemy/orm/shard.py b/libs/sqlalchemy/orm/shard.py index 73e56310..93bc7a6b 100644 --- a/libs/sqlalchemy/orm/shard.py +++ b/libs/sqlalchemy/orm/shard.py @@ -1,5 +1,5 @@ # orm/shard.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/orm/state.py b/libs/sqlalchemy/orm/state.py index 4803ecdc..b9a9c463 100644 --- a/libs/sqlalchemy/orm/state.py +++ b/libs/sqlalchemy/orm/state.py @@ -1,5 +1,5 @@ # orm/state.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -124,7 +124,7 @@ class InstanceState(object): return [] elif hasattr(impl, 'get_collection'): return [ - (attributes.instance_state(o), o) for o in + (attributes.instance_state(o), o) for o in impl.get_collection(self, dict_, x, passive=passive) ] else: @@ -134,10 +134,10 @@ class InstanceState(object): d = {'instance':self.obj()} d.update( (k, self.__dict__[k]) for k in ( - 'committed_state', 'pending', 'modified', 'expired', + 'committed_state', 'pending', 'modified', 'expired', 'callables', 'key', 'parents', 'load_options', 'mutable_dict', 'class_', - ) if k in self.__dict__ + ) if k in self.__dict__ ) if self.load_path: d['load_path'] = interfaces.serialize_path(self.load_path) @@ -181,26 +181,26 @@ class InstanceState(object): self.__dict__.update([ (k, state[k]) for k in ( 'key', 'load_options', 'mutable_dict' - ) if k in state + ) if k in state ]) if 'load_path' in state: self.load_path = interfaces.deserialize_path(state['load_path']) - # setup _sa_instance_state ahead of time so that + # setup _sa_instance_state ahead of time so that # unpickle events can access the object normally. # see [ticket:2362] manager.setup_instance(inst, self) manager.dispatch.unpickle(self, state) def initialize(self, key): - """Set this attribute to an empty value or collection, + """Set this attribute to an empty value or collection, based on the AttributeImpl in use.""" self.manager.get_impl(key).initialize(self, self.dict) def reset(self, dict_, key): - """Remove the given attribute and any + """Remove the given attribute and any callables associated with it.""" dict_.pop(key, None) @@ -284,7 +284,7 @@ class InstanceState(object): self.manager.deferred_scalar_loader(self, toload) - # if the loader failed, or this + # if the loader failed, or this # instance state didn't have an identity, # the attributes still might be in the callables # dict. ensure they are removed. @@ -321,7 +321,7 @@ class InstanceState(object): @property def expired_attributes(self): """Return the set of keys which are 'expired' to be loaded by - the manager's deferred scalar loader, assuming no pending + the manager's deferred scalar loader, assuming no pending changes. see also the ``unmodified`` collection which is intersected @@ -348,7 +348,7 @@ class InstanceState(object): self.committed_state[attr.key] = previous - # the "or not self.modified" is defensive at + # the "or not self.modified" is defensive at # this point. The assertion below is expected # to be True: # assert self._strong_obj is None or self.modified @@ -363,9 +363,9 @@ class InstanceState(object): raise orm_exc.ObjectDereferencedError( "Can't emit change event for attribute '%s' - " "parent object of type %s has been garbage " - "collected." + "collected." % ( - self.manager[attr.key], + self.manager[attr.key], orm_util.state_class_str(self) )) self.modified = True @@ -433,7 +433,7 @@ class InstanceState(object): self._strong_obj = None class MutableAttrInstanceState(InstanceState): - """InstanceState implementation for objects that reference 'mutable' + """InstanceState implementation for objects that reference 'mutable' attributes. Has a more involved "cleanup" handler that checks mutable attributes @@ -491,7 +491,7 @@ class MutableAttrInstanceState(InstanceState): This would be called in the extremely rare race condition that the weakref returned None but - the cleanup handler had not yet established the + the cleanup handler had not yet established the __resurrect callable as its replacement. """ diff --git a/libs/sqlalchemy/orm/strategies.py b/libs/sqlalchemy/orm/strategies.py index 5f4b182d..2cde3f67 100644 --- a/libs/sqlalchemy/orm/strategies.py +++ b/libs/sqlalchemy/orm/strategies.py @@ -1,10 +1,10 @@ # orm/strategies.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -"""sqlalchemy.orm.interfaces.LoaderStrategy +"""sqlalchemy.orm.interfaces.LoaderStrategy implementations, and related MapperOptions.""" from sqlalchemy import exc as sa_exc @@ -23,15 +23,15 @@ from sqlalchemy.orm.query import Query import itertools def _register_attribute(strategy, mapper, useobject, - compare_function=None, + compare_function=None, typecallable=None, - copy_function=None, - mutable_scalars=False, + copy_function=None, + mutable_scalars=False, uselist=False, - callable_=None, - proxy_property=None, + callable_=None, + proxy_property=None, active_history=False, - impl_class=None, + impl_class=None, **kw ): @@ -45,11 +45,11 @@ def _register_attribute(strategy, mapper, useobject, listen_hooks.append(single_parent_validator) if prop.key in prop.parent.validators: + fn, include_removes = prop.parent.validators[prop.key] listen_hooks.append( - lambda desc, prop: mapperutil._validator_events(desc, - prop.key, - prop.parent.validators[prop.key]) - ) + lambda desc, prop: mapperutil._validator_events(desc, + prop.key, fn, include_removes) + ) if useobject: listen_hooks.append(unitofwork.track_cascade_events) @@ -59,8 +59,8 @@ def _register_attribute(strategy, mapper, useobject, backref = kw.pop('backref', None) if backref: listen_hooks.append( - lambda desc, prop: attributes.backref_listeners(desc, - backref, + lambda desc, prop: attributes.backref_listeners(desc, + backref, uselist) ) @@ -68,18 +68,18 @@ def _register_attribute(strategy, mapper, useobject, if prop is m._props.get(prop.key): desc = attributes.register_attribute_impl( - m.class_, - prop.key, + m.class_, + prop.key, parent_token=prop, mutable_scalars=mutable_scalars, - uselist=uselist, - copy_function=copy_function, - compare_function=compare_function, + uselist=uselist, + copy_function=copy_function, + compare_function=compare_function, useobject=useobject, - extension=attribute_ext, - trackparent=useobject and (prop.single_parent or prop.direction is interfaces.ONETOMANY), + extension=attribute_ext, + trackparent=useobject and (prop.single_parent or prop.direction is interfaces.ONETOMANY), typecallable=typecallable, - callable_=callable_, + callable_=callable_, active_history=active_history, impl_class=impl_class, doc=prop.doc, @@ -99,7 +99,7 @@ class UninstrumentedColumnLoader(LoaderStrategy): def init(self): self.columns = self.parent_property.columns - def setup_query(self, context, entity, path, reduced_path, adapter, + def setup_query(self, context, entity, path, reduced_path, adapter, column_collection=None, **kwargs): for c in self.columns: if adapter: @@ -116,7 +116,7 @@ class ColumnLoader(LoaderStrategy): self.columns = self.parent_property.columns self.is_composite = hasattr(self.parent_property, 'composite_class') - def setup_query(self, context, entity, path, reduced_path, + def setup_query(self, context, entity, path, reduced_path, adapter, column_collection, **kwargs): for c in self.columns: if adapter: @@ -137,7 +137,7 @@ class ColumnLoader(LoaderStrategy): active_history = active_history ) - def create_row_processor(self, context, path, reduced_path, + def create_row_processor(self, context, path, reduced_path, mapper, row, adapter): key = self.key # look through list of columns represented here @@ -199,10 +199,10 @@ class DeferredColumnLoader(LoaderStrategy): expire_missing=False ) - def setup_query(self, context, entity, path, reduced_path, adapter, + def setup_query(self, context, entity, path, reduced_path, adapter, only_load_props=None, **kwargs): if ( - self.group is not None and + self.group is not None and context.attributes.get(('undefer', self.group), False) ) or (only_load_props and self.key in only_load_props): self.parent_property._get_strategy(ColumnLoader).\ @@ -220,10 +220,10 @@ class DeferredColumnLoader(LoaderStrategy): if self.group: toload = [ - p.key for p in - localparent.iterate_properties - if isinstance(p, StrategizedProperty) and - isinstance(p.strategy, DeferredColumnLoader) and + p.key for p in + localparent.iterate_properties + if isinstance(p, StrategizedProperty) and + isinstance(p.strategy, DeferredColumnLoader) and p.group==self.group ] else: @@ -236,12 +236,12 @@ class DeferredColumnLoader(LoaderStrategy): if session is None: raise orm_exc.DetachedInstanceError( "Parent instance %s is not bound to a Session; " - "deferred load operation of attribute '%s' cannot proceed" % + "deferred load operation of attribute '%s' cannot proceed" % (mapperutil.state_str(state), self.key) ) query = session.query(localparent) - if query._load_on_ident(state.key, + if query._load_on_ident(state.key, only_load_props=group, refresh_state=state) is None: raise orm_exc.ObjectDeletedError(state) @@ -297,14 +297,14 @@ class AbstractRelationshipLoader(LoaderStrategy): class NoLoader(AbstractRelationshipLoader): """Provide loading behavior for a :class:`.RelationshipProperty` with "lazy=None". - + """ def init_class_attribute(self, mapper): self.is_class_level = True _register_attribute(self, mapper, - useobject=True, + useobject=True, uselist=self.parent_property.uselist, typecallable = self.parent_property.collection_class, ) @@ -319,7 +319,7 @@ log.class_logger(NoLoader) class LazyLoader(AbstractRelationshipLoader): """Provide loading behavior for a :class:`.RelationshipProperty` with "lazy=True", that is loads when first accessed. - + """ def init(self): @@ -331,7 +331,7 @@ class LazyLoader(AbstractRelationshipLoader): self._rev_lazywhere, \ self._rev_bind_to_col, \ self._rev_equated_columns = self._create_lazy_clause( - self.parent_property, + self.parent_property, reverse_direction=True) self.logger.info("%s lazy loading clause %s", self, self._lazywhere) @@ -341,8 +341,8 @@ class LazyLoader(AbstractRelationshipLoader): #from sqlalchemy.orm import query self.use_get = not self.uselist and \ self.mapper._get_clause[0].compare( - self._lazywhere, - use_proxies=True, + self._lazywhere, + use_proxies=True, equivalents=self.mapper._equivalent_columns ) @@ -358,13 +358,13 @@ class LazyLoader(AbstractRelationshipLoader): def init_class_attribute(self, mapper): self.is_class_level = True - # MANYTOONE currently only needs the + # MANYTOONE currently only needs the # "old" value for delete-orphan - # cascades. the required _SingleParentValidator + # cascades. the required _SingleParentValidator # will enable active_history - # in that case. otherwise we don't need the + # in that case. otherwise we don't need the # "old" value during backref operations. - _register_attribute(self, + _register_attribute(self, mapper, useobject=True, callable_=self._load_for_state, @@ -378,12 +378,12 @@ class LazyLoader(AbstractRelationshipLoader): not self.use_get, ) - def lazy_clause(self, state, reverse_direction=False, - alias_secondary=False, + def lazy_clause(self, state, reverse_direction=False, + alias_secondary=False, adapt_source=None): if state is None: return self._lazy_none_clause( - reverse_direction, + reverse_direction, adapt_source=adapt_source) if not reverse_direction: @@ -414,14 +414,14 @@ class LazyLoader(AbstractRelationshipLoader): if bindparam._identifying_key in bind_to_col: bindparam.callable = \ lambda: mapper._get_committed_state_attr_by_column( - state, dict_, + state, dict_, bind_to_col[bindparam._identifying_key]) else: def visit_bindparam(bindparam): if bindparam._identifying_key in bind_to_col: bindparam.callable = \ lambda: mapper._get_state_attr_by_column( - state, dict_, + state, dict_, bind_to_col[bindparam._identifying_key]) @@ -465,10 +465,10 @@ class LazyLoader(AbstractRelationshipLoader): if ( (passive is attributes.PASSIVE_NO_FETCH or \ - passive is attributes.PASSIVE_NO_FETCH_RELATED) and + passive is attributes.PASSIVE_NO_FETCH_RELATED) and not self.use_get ) or ( - passive is attributes.PASSIVE_ONLY_PERSISTENT and + passive is attributes.PASSIVE_ONLY_PERSISTENT and pending ): return attributes.PASSIVE_NO_RESULT @@ -477,11 +477,11 @@ class LazyLoader(AbstractRelationshipLoader): if not session: raise orm_exc.DetachedInstanceError( "Parent instance %s is not bound to a Session; " - "lazy load operation of attribute '%s' cannot proceed" % + "lazy load operation of attribute '%s' cannot proceed" % (mapperutil.state_str(state), self.key) ) - # if we have a simple primary key load, check the + # if we have a simple primary key load, check the # identity map without generating a Query at all if self.use_get: ident = self._get_ident_for_use_get( @@ -555,7 +555,7 @@ class LazyLoader(AbstractRelationshipLoader): q = q.order_by(*util.to_list(self.parent_property.order_by)) for rev in self.parent_property._reverse_property: - # reverse props that are MANYTOONE are loading *this* + # reverse props that are MANYTOONE are loading *this* # object from get(), so don't need to eager out to those. if rev.direction is interfaces.MANYTOONE and \ rev._use_get and \ @@ -580,7 +580,7 @@ class LazyLoader(AbstractRelationshipLoader): if l > 1: util.warn( "Multiple rows returned with " - "uselist=False for lazily-loaded attribute '%s' " + "uselist=False for lazily-loaded attribute '%s' " % self.parent_property) return result[0] @@ -588,30 +588,30 @@ class LazyLoader(AbstractRelationshipLoader): return None - def create_row_processor(self, context, path, reduced_path, + def create_row_processor(self, context, path, reduced_path, mapper, row, adapter): key = self.key if not self.is_class_level: def set_lazy_callable(state, dict_, row): - # we are not the primary manager for this attribute + # we are not the primary manager for this attribute # on this class - set up a - # per-instance lazyloader, which will override the + # per-instance lazyloader, which will override the # class-level behavior. - # this currently only happens when using a + # this currently only happens when using a # "lazyload" option on a "no load" - # attribute - "eager" attributes always have a + # attribute - "eager" attributes always have a # class-level lazyloader installed. state.set_callable(dict_, key, LoadLazyAttribute(state, key)) return set_lazy_callable, None, None else: def reset_for_lazy_callable(state, dict_, row): - # we are the primary manager for this attribute on + # we are the primary manager for this attribute on # this class - reset its - # per-instance attribute state, so that the class-level + # per-instance attribute state, so that the class-level # lazy loader is # executed when next referenced on this instance. # this is needed in - # populate_existing() types of scenarios to reset + # populate_existing() types of scenarios to reset # any existing state. state.reset(dict_, key) @@ -648,7 +648,7 @@ class LazyLoader(AbstractRelationshipLoader): if prop.secondaryjoin is None or not reverse_direction: lazywhere = visitors.replacement_traverse( - lazywhere, {}, col_to_bind) + lazywhere, {}, col_to_bind) if prop.secondaryjoin is not None: secondaryjoin = prop.secondaryjoin @@ -685,12 +685,12 @@ class ImmediateLoader(AbstractRelationshipLoader): _get_strategy(LazyLoader).\ init_class_attribute(mapper) - def setup_query(self, context, entity, + def setup_query(self, context, entity, path, reduced_path, adapter, column_collection=None, parentmapper=None, **kwargs): pass - def create_row_processor(self, context, path, reduced_path, + def create_row_processor(self, context, path, reduced_path, mapper, row, adapter): def load_immediate(state, dict_, row): state.get_impl(self.key).get(state, dict_) @@ -707,8 +707,8 @@ class SubqueryLoader(AbstractRelationshipLoader): _get_strategy(LazyLoader).\ init_class_attribute(mapper) - def setup_query(self, context, entity, - path, reduced_path, adapter, + def setup_query(self, context, entity, + path, reduced_path, adapter, column_collection=None, parentmapper=None, **kwargs): @@ -738,7 +738,7 @@ class SubqueryLoader(AbstractRelationshipLoader): self._get_leftmost(subq_path) orig_query = context.attributes.get( - ("orig_query", SubqueryLoader), + ("orig_query", SubqueryLoader), context.query) # generate a new Query from the original, then @@ -748,7 +748,7 @@ class SubqueryLoader(AbstractRelationshipLoader): leftmost_attr, subq_path ) - # generate another Query that will join the + # generate another Query that will join the # left alias to the target relationships. # basically doing a longhand # "from_self()". (from_self() itself not quite industrial @@ -770,7 +770,7 @@ class SubqueryLoader(AbstractRelationshipLoader): q = self._setup_options(q, subq_path, orig_query) q = self._setup_outermost_orderby(q) - # add new query to attributes to be picked up + # add new query to attributes to be picked up # by create_row_processor context.attributes[('subquery', reduced_path)] = q @@ -801,7 +801,7 @@ class SubqueryLoader(AbstractRelationshipLoader): # to look only for significant columns q = orig_query._clone() - # TODO: why does polymporphic etc. require hardcoding + # TODO: why does polymporphic etc. require hardcoding # into _adapt_col_list ? Does query.add_columns(...) work # with polymorphic loading ? q._set_entities(q._adapt_col_list(leftmost_attr)) @@ -823,7 +823,7 @@ class SubqueryLoader(AbstractRelationshipLoader): def _prep_for_joins(self, left_alias, subq_path): # figure out what's being joined. a.k.a. the fun part to_join = [ - (subq_path[i], subq_path[i+1]) + (subq_path[i], subq_path[i+1]) for i in xrange(0, len(subq_path), 2) ] @@ -836,13 +836,13 @@ class SubqueryLoader(AbstractRelationshipLoader): parent_alias = left_alias elif subq_path[-2].isa(self.parent): # In the case of multiple levels, retrieve - # it from subq_path[-2]. This is the same as self.parent - # in the vast majority of cases, and [ticket:2014] + # it from subq_path[-2]. This is the same as self.parent + # in the vast majority of cases, and [ticket:2014] # illustrates a case where sub_path[-2] is a subclass # of self.parent parent_alias = mapperutil.AliasedClass(subq_path[-2]) else: - # if of_type() were used leading to this relationship, + # if of_type() were used leading to this relationship, # self.parent is more specific than subq_path[-2] parent_alias = mapperutil.AliasedClass(self.parent) @@ -860,10 +860,10 @@ class SubqueryLoader(AbstractRelationshipLoader): for i, (mapper, key) in enumerate(to_join): # we need to use query.join() as opposed to - # orm.join() here because of the - # rich behavior it brings when dealing with + # orm.join() here because of the + # rich behavior it brings when dealing with # "with_polymorphic" mappers. "aliased" - # and "from_joinpoint" take care of most of + # and "from_joinpoint" take care of most of # the chaining and aliasing for us. first = i == 0 @@ -897,12 +897,14 @@ class SubqueryLoader(AbstractRelationshipLoader): # these will fire relative to subq_path. q = q._with_current_path(subq_path) q = q._conditional_options(*orig_query._with_options) + if orig_query._populate_existing: + q._populate_existing = orig_query._populate_existing return q def _setup_outermost_orderby(self, q): if self.parent_property.order_by: # if there's an ORDER BY, alias it the same - # way joinedloader does, but we have to pull out + # way joinedloader does, but we have to pull out # the "eagerjoin" from the query. # this really only picks up the "secondary" table # right now. @@ -917,12 +919,12 @@ class SubqueryLoader(AbstractRelationshipLoader): q = q.order_by(*eager_order_by) return q - def create_row_processor(self, context, path, reduced_path, + def create_row_processor(self, context, path, reduced_path, mapper, row, adapter): if not self.parent.class_manager[self.key].impl.supports_population: raise sa_exc.InvalidRequestError( "'%s' does not support object " - "population - eager loading cannot be applied." % + "population - eager loading cannot be applied." % self) reduced_path = reduced_path + (self.key,) @@ -934,10 +936,16 @@ class SubqueryLoader(AbstractRelationshipLoader): q = context.attributes[('subquery', reduced_path)] - collections = dict( - (k, [v[0] for v in v]) + # cache the loaded collections in the context + # so that inheriting mappers don't re-load when they + # call upon create_row_processor again + if ('collections', reduced_path) in context.attributes: + collections = context.attributes[('collections', reduced_path)] + else: + collections = context.attributes[('collections', reduced_path)] = dict( + (k, [v[0] for v in v]) for k, v in itertools.groupby( - q, + q, lambda x:x[1:] )) @@ -952,7 +960,7 @@ class SubqueryLoader(AbstractRelationshipLoader): def _create_collection_loader(self, collections, local_cols): def load_collection_from_subq(state, dict_, row): collection = collections.get( - tuple([row[col] for col in local_cols]), + tuple([row[col] for col in local_cols]), () ) state.get_impl(self.key).\ @@ -963,7 +971,7 @@ class SubqueryLoader(AbstractRelationshipLoader): def _create_scalar_loader(self, collections, local_cols): def load_scalar_from_subq(state, dict_, row): collection = collections.get( - tuple([row[col] for col in local_cols]), + tuple([row[col] for col in local_cols]), (None,) ) if len(collection) > 1: @@ -983,7 +991,7 @@ log.class_logger(SubqueryLoader) class JoinedLoader(AbstractRelationshipLoader): """Provide loading behavior for a :class:`.RelationshipProperty` using joined eager loading. - + """ def init(self): super(JoinedLoader, self).init() @@ -1014,7 +1022,7 @@ class JoinedLoader(AbstractRelationshipLoader): ) else: # check for join_depth or basic recursion, - # if the current path was not explicitly stated as + # if the current path was not explicitly stated as # a desired "loaderstrategy" (i.e. via query.options()) if ("loaderstrategy", reduced_path) not in context.attributes: if self.join_depth: @@ -1035,16 +1043,16 @@ class JoinedLoader(AbstractRelationshipLoader): for value in self.mapper._polymorphic_properties: value.setup( - context, - entity, - path, + context, + entity, + path, reduced_path, - clauses, - parentmapper=self.mapper, + clauses, + parentmapper=self.mapper, column_collection=add_to_collection, allow_innerjoin=allow_innerjoin) - def _get_user_defined_adapter(self, context, entity, + def _get_user_defined_adapter(self, context, entity, reduced_path, adapter): clauses = context.attributes[ ("user_defined_eager_row_processor", @@ -1063,12 +1071,12 @@ class JoinedLoader(AbstractRelationshipLoader): add_to_collection = context.primary_columns return clauses, adapter, add_to_collection - def _generate_row_adapter(self, + def _generate_row_adapter(self, context, entity, path, reduced_path, adapter, column_collection, parentmapper, allow_innerjoin ): clauses = mapperutil.ORMAdapter( - mapperutil.AliasedClass(self.mapper), + mapperutil.AliasedClass(self.mapper), equivalents=self.mapper._equivalent_columns, adapt_required=True) @@ -1076,7 +1084,7 @@ class JoinedLoader(AbstractRelationshipLoader): context.multi_row_eager_loaders = True innerjoin = allow_innerjoin and context.attributes.get( - ("eager_join_type", path), + ("eager_join_type", path), self.parent_property.innerjoin) if not innerjoin: # if this is an outer join, all eager joins from @@ -1084,8 +1092,8 @@ class JoinedLoader(AbstractRelationshipLoader): allow_innerjoin = False context.create_eager_joins.append( - (self._create_eager_join, context, - entity, path, adapter, + (self._create_eager_join, context, + entity, path, adapter, parentmapper, clauses, innerjoin) ) @@ -1095,8 +1103,8 @@ class JoinedLoader(AbstractRelationshipLoader): ] = clauses return clauses, adapter, add_to_collection, allow_innerjoin - def _create_eager_join(self, context, entity, - path, adapter, parentmapper, + def _create_eager_join(self, context, entity, + path, adapter, parentmapper, clauses, innerjoin): if parentmapper is None: @@ -1105,7 +1113,7 @@ class JoinedLoader(AbstractRelationshipLoader): localparent = parentmapper # whether or not the Query will wrap the selectable in a subquery, - # and then attach eager load joins to that (i.e., in the case of + # and then attach eager load joins to that (i.e., in the case of # LIMIT/OFFSET etc.) should_nest_selectable = context.multi_row_eager_loaders and \ context.query._should_nest_selectable @@ -1120,7 +1128,7 @@ class JoinedLoader(AbstractRelationshipLoader): if clause is not None: # join to an existing FROM clause on the query. # key it to its list index in the eager_joins dict. - # Query._compile_context will adapt as needed and + # Query._compile_context will adapt as needed and # append to the FROM clause of the select(). entity_key, default_towrap = index, clause @@ -1138,14 +1146,14 @@ class JoinedLoader(AbstractRelationshipLoader): else: onclause = getattr( mapperutil.AliasedClass( - self.parent, + self.parent, adapter.selectable - ), + ), self.key, self.parent_property ) if onclause is self.parent_property: - # TODO: this is a temporary hack to + # TODO: this is a temporary hack to # account for polymorphic eager loads where # the eagerload is referencing via of_type(). join_to_left = True @@ -1154,10 +1162,10 @@ class JoinedLoader(AbstractRelationshipLoader): context.eager_joins[entity_key] = eagerjoin = \ mapperutil.join( - towrap, - clauses.aliased_class, - onclause, - join_to_left=join_to_left, + towrap, + clauses.aliased_class, + onclause, + join_to_left=join_to_left, isouter=not innerjoin ) @@ -1167,11 +1175,11 @@ class JoinedLoader(AbstractRelationshipLoader): if self.parent_property.secondary is None and \ not parentmapper: # for parentclause that is the non-eager end of the join, - # ensure all the parent cols in the primaryjoin are actually + # ensure all the parent cols in the primaryjoin are actually # in the - # columns clause (i.e. are not deferred), so that aliasing applied + # columns clause (i.e. are not deferred), so that aliasing applied # by the Query propagates those columns outward. - # This has the effect + # This has the effect # of "undefering" those columns. for col in sql_util.find_columns( self.parent_property.primaryjoin): @@ -1196,7 +1204,7 @@ class JoinedLoader(AbstractRelationshipLoader): decorator = context.attributes[ ("user_defined_eager_row_processor", reduced_path)] - # user defined eagerloads are part of the "primary" + # user defined eagerloads are part of the "primary" # portion of the load. # the adapters applied to the Query should be honored. if context.adapter and decorator: @@ -1213,7 +1221,7 @@ class JoinedLoader(AbstractRelationshipLoader): self.mapper.identity_key_from_row(row, decorator) return decorator except KeyError: - # no identity key - dont return a row + # no identity key - dont return a row # processor, will cause a degrade to lazy return False @@ -1221,23 +1229,23 @@ class JoinedLoader(AbstractRelationshipLoader): if not self.parent.class_manager[self.key].impl.supports_population: raise sa_exc.InvalidRequestError( "'%s' does not support object " - "population - eager loading cannot be applied." % + "population - eager loading cannot be applied." % self) our_path = path + (self.key,) our_reduced_path = reduced_path + (self.key,) eager_adapter = self._create_eager_adapter( - context, - row, + context, + row, adapter, our_path, our_reduced_path) if eager_adapter is not False: key = self.key _instance = self.mapper._instance_processor( - context, - our_path + (self.mapper,), + context, + our_path + (self.mapper,), our_reduced_path + (self.mapper.base_mapper,), eager_adapter) @@ -1249,7 +1257,7 @@ class JoinedLoader(AbstractRelationshipLoader): return self.parent_property.\ _get_strategy(LazyLoader).\ create_row_processor( - context, path, + context, path, reduced_path, mapper, row, adapter) @@ -1380,7 +1388,7 @@ class LoadEagerFromAliasOption(PropertyOption): prop = root_mapper._props[propname] adapter = query._polymorphic_adapters.get(prop.mapper, None) query._attributes.setdefault( - ("user_defined_eager_row_processor", + ("user_defined_eager_row_processor", interfaces._reduce_path(path)), adapter) if self.alias is not None: @@ -1389,7 +1397,7 @@ class LoadEagerFromAliasOption(PropertyOption): prop = root_mapper._props[propname] self.alias = prop.target.alias(self.alias) query._attributes[ - ("user_defined_eager_row_processor", + ("user_defined_eager_row_processor", interfaces._reduce_path(paths[-1])) ] = sql_util.ColumnAdapter(self.alias) else: @@ -1397,18 +1405,18 @@ class LoadEagerFromAliasOption(PropertyOption): prop = root_mapper._props[propname] adapter = query._polymorphic_adapters.get(prop.mapper, None) query._attributes[ - ("user_defined_eager_row_processor", + ("user_defined_eager_row_processor", interfaces._reduce_path(paths[-1]))] = adapter def single_parent_validator(desc, prop): def _do_check(state, value, oldvalue, initiator): if value is not None and initiator.key == prop.key: hasparent = initiator.hasparent(attributes.instance_state(value)) - if hasparent and oldvalue is not value: + if hasparent and oldvalue is not value: raise sa_exc.InvalidRequestError( "Instance %s is already associated with an instance " "of %s via its %s attribute, and is only allowed a " - "single parent." % + "single parent." % (mapperutil.instance_str(value), state.class_, prop) ) return value diff --git a/libs/sqlalchemy/orm/sync.py b/libs/sqlalchemy/orm/sync.py index a20e871e..3094386b 100644 --- a/libs/sqlalchemy/orm/sync.py +++ b/libs/sqlalchemy/orm/sync.py @@ -1,17 +1,17 @@ # orm/sync.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -"""private module containing functions used for copying data +"""private module containing functions used for copying data between instances based on join conditions. """ from sqlalchemy.orm import exc, util as mapperutil, attributes -def populate(source, source_mapper, dest, dest_mapper, +def populate(source, source_mapper, dest, dest_mapper, synchronize_pairs, uowcommit, flag_cascaded_pks): source_dict = source.dict dest_dict = dest.dict @@ -20,7 +20,7 @@ def populate(source, source_mapper, dest, dest_mapper, try: # inline of source_mapper._get_state_attr_by_column prop = source_mapper._columntoproperty[l] - value = source.manager[prop.key].impl.get(source, source_dict, + value = source.manager[prop.key].impl.get(source, source_dict, attributes.PASSIVE_OFF) except exc.UnmappedColumnError: _raise_col_to_prop(False, source_mapper, l, dest_mapper, r) @@ -47,7 +47,7 @@ def clear(dest, dest_mapper, synchronize_pairs): if r.primary_key: raise AssertionError( "Dependency rule tried to blank-out primary key " - "column '%s' on instance '%s'" % + "column '%s' on instance '%s'" % (r, mapperutil.state_str(dest)) ) try: @@ -75,7 +75,7 @@ def populate_dict(source, source_mapper, dict_, synchronize_pairs): dict_[r.key] = value def source_modified(uowcommit, source, source_mapper, synchronize_pairs): - """return true if the source object has changes from an old to a + """return true if the source object has changes from an old to a new value on the given synchronize pairs """ @@ -84,7 +84,7 @@ def source_modified(uowcommit, source, source_mapper, synchronize_pairs): prop = source_mapper._columntoproperty[l] except exc.UnmappedColumnError: _raise_col_to_prop(False, source_mapper, l, None, r) - history = uowcommit.get_attribute_history(source, prop.key, + history = uowcommit.get_attribute_history(source, prop.key, attributes.PASSIVE_NO_INITIALIZE) return bool(history.deleted) else: @@ -103,6 +103,6 @@ def _raise_col_to_prop(isdest, source_mapper, source_column, dest_mapper, dest_c "Can't execute sync rule for source column '%s'; mapper '%s' " "does not map this column. Try using an explicit `foreign_keys`" " collection which does not include destination column '%s' (or " - "use a viewonly=True relation)." % + "use a viewonly=True relation)." % (source_column, source_mapper, dest_column) ) diff --git a/libs/sqlalchemy/orm/unitofwork.py b/libs/sqlalchemy/orm/unitofwork.py index 8fc5f139..003d7ae7 100644 --- a/libs/sqlalchemy/orm/unitofwork.py +++ b/libs/sqlalchemy/orm/unitofwork.py @@ -1,5 +1,5 @@ # orm/unitofwork.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -26,7 +26,7 @@ def track_cascade_events(descriptor, prop): key = prop.key def append(state, item, initiator): - # process "save_update" cascade rules for when + # process "save_update" cascade rules for when # an instance is appended to the list of another instance sess = session._state_session(state) @@ -51,7 +51,7 @@ def track_cascade_events(descriptor, prop): sess.expunge(item) def set_(state, newvalue, oldvalue, initiator): - # process "save_update" cascade rules for when an instance + # process "save_update" cascade rules for when an instance # is attached to another instance if oldvalue is newvalue: return newvalue @@ -86,12 +86,12 @@ class UOWTransaction(object): def __init__(self, session): self.session = session - # dictionary used by external actors to + # dictionary used by external actors to # store arbitrary state information. self.attributes = {} - # dictionary of mappers to sets of - # DependencyProcessors, which are also + # dictionary of mappers to sets of + # DependencyProcessors, which are also # set to be part of the sorted flush actions, # which have that mapper as a parent. self.deps = util.defaultdict(set) @@ -106,7 +106,7 @@ class UOWTransaction(object): # and determine if a flush action is needed self.presort_actions = {} - # dictionary of PostSortRec objects, each + # dictionary of PostSortRec objects, each # one issues work during the flush within # a certain ordering. self.postsort_actions = {} @@ -124,7 +124,7 @@ class UOWTransaction(object): # tracks InstanceStates which will be receiving # a "post update" call. Keys are mappers, - # values are a set of states and a set of the + # values are a set of states and a set of the # columns which should be included in the update. self.post_update_states = util.defaultdict(lambda: (set(), set())) @@ -133,7 +133,7 @@ class UOWTransaction(object): return bool(self.states) def is_deleted(self, state): - """return true if the given state is marked as deleted + """return true if the given state is marked as deleted within this uowtransaction.""" return state in self.states and self.states[state][0] @@ -152,7 +152,7 @@ class UOWTransaction(object): self.states[state] = (isdelete, True) - def get_attribute_history(self, state, key, + def get_attribute_history(self, state, key, passive=attributes.PASSIVE_NO_INITIALIZE): """facade to attributes.get_state_history(), including caching of results.""" @@ -164,12 +164,12 @@ class UOWTransaction(object): if hashkey in self.attributes: history, state_history, cached_passive = self.attributes[hashkey] - # if the cached lookup was "passive" and now + # if the cached lookup was "passive" and now # we want non-passive, do a non-passive lookup and re-cache if cached_passive is not attributes.PASSIVE_OFF \ and passive is attributes.PASSIVE_OFF: impl = state.manager[key].impl - history = impl.get_history(state, state.dict, + history = impl.get_history(state, state.dict, attributes.PASSIVE_OFF) if history and impl.uses_objects: state_history = history.as_state() @@ -197,13 +197,13 @@ class UOWTransaction(object): if key not in self.presort_actions: self.presort_actions[key] = Preprocess(processor, fromparent) - def register_object(self, state, isdelete=False, + def register_object(self, state, isdelete=False, listonly=False, cancel_delete=False, operation=None, prop=None): if not self.session._contains_state(state): if not state.deleted and operation is not None: util.warn("Object of type %s not in session, %s operation " - "along '%s' will not proceed" % + "along '%s' will not proceed" % (mapperutil.state_class_str(state), operation, prop)) return False @@ -228,8 +228,8 @@ class UOWTransaction(object): @util.memoized_property def _mapper_for_dep(self): - """return a dynamic mapping of (Mapper, DependencyProcessor) to - True or False, indicating if the DependencyProcessor operates + """return a dynamic mapping of (Mapper, DependencyProcessor) to + True or False, indicating if the DependencyProcessor operates on objects of that Mapper. The result is stored in the dictionary persistently once @@ -241,7 +241,7 @@ class UOWTransaction(object): ) def filter_states_for_dep(self, dep, states): - """Filter the given list of InstanceStates to those relevant to the + """Filter the given list of InstanceStates to those relevant to the given DependencyProcessor. """ @@ -273,7 +273,7 @@ class UOWTransaction(object): # see if the graph of mapper dependencies has cycles. self.cycles = cycles = topological.find_cycles( - self.dependencies, + self.dependencies, self.postsort_actions.values()) if cycles: @@ -319,14 +319,14 @@ class UOWTransaction(object): # execute if self.cycles: for set_ in topological.sort_as_subsets( - self.dependencies, + self.dependencies, postsort_actions): while set_: n = set_.pop() n.execute_aggregate(self, set_) else: for rec in topological.sort( - self.dependencies, + self.dependencies, postsort_actions): rec.execute(self) @@ -470,7 +470,7 @@ class SaveUpdateAll(PostSortRec): assert mapper is mapper.base_mapper def execute(self, uow): - persistence.save_obj(self.mapper, + persistence.save_obj(self.mapper, uow.states_for_mapper_hierarchy(self.mapper, False, False), uow ) @@ -478,8 +478,8 @@ class SaveUpdateAll(PostSortRec): def per_state_flush_actions(self, uow): states = list(uow.states_for_mapper_hierarchy(self.mapper, False, False)) for rec in self.mapper._per_state_flush_actions( - uow, - states, + uow, + states, False): yield rec @@ -501,8 +501,8 @@ class DeleteAll(PostSortRec): def per_state_flush_actions(self, uow): states = list(uow.states_for_mapper_hierarchy(self.mapper, True, False)) for rec in self.mapper._per_state_flush_actions( - uow, - states, + uow, + states, True): yield rec @@ -520,8 +520,8 @@ class ProcessState(PostSortRec): cls_ = self.__class__ dependency_processor = self.dependency_processor delete = self.delete - our_recs = [r for r in recs - if r.__class__ is cls_ and + our_recs = [r for r in recs + if r.__class__ is cls_ and r.dependency_processor is dependency_processor and r.delete is delete] recs.difference_update(our_recs) @@ -547,13 +547,13 @@ class SaveUpdateState(PostSortRec): def execute_aggregate(self, uow, recs): cls_ = self.__class__ mapper = self.mapper - our_recs = [r for r in recs - if r.__class__ is cls_ and + our_recs = [r for r in recs + if r.__class__ is cls_ and r.mapper is mapper] recs.difference_update(our_recs) persistence.save_obj(mapper, - [self.state] + - [r.state for r in our_recs], + [self.state] + + [r.state for r in our_recs], uow) def __repr__(self): @@ -570,13 +570,13 @@ class DeleteState(PostSortRec): def execute_aggregate(self, uow, recs): cls_ = self.__class__ mapper = self.mapper - our_recs = [r for r in recs - if r.__class__ is cls_ and + our_recs = [r for r in recs + if r.__class__ is cls_ and r.mapper is mapper] recs.difference_update(our_recs) states = [self.state] + [r.state for r in our_recs] persistence.delete_obj(mapper, - [s for s in states if uow.states[s][0]], + [s for s in states if uow.states[s][0]], uow) def __repr__(self): diff --git a/libs/sqlalchemy/orm/util.py b/libs/sqlalchemy/orm/util.py index 0c5f203a..a8cc80ce 100644 --- a/libs/sqlalchemy/orm/util.py +++ b/libs/sqlalchemy/orm/util.py @@ -1,5 +1,5 @@ # orm/util.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -30,15 +30,15 @@ class CascadeOptions(frozenset): def __new__(cls, arg): values = set([ - c for c + c for c in re.split('\s*,\s*', arg or "") if c ]) if values.difference(cls._allowed_cascades): raise sa_exc.ArgumentError( - "Invalid cascade option(s): %s" % - ", ".join([repr(x) for x in + "Invalid cascade option(s): %s" % + ", ".join([repr(x) for x in sorted( values.difference(cls._allowed_cascades) )]) @@ -68,30 +68,42 @@ class CascadeOptions(frozenset): ",".join([x for x in sorted(self)]) ) -def _validator_events(desc, key, validator): +def _validator_events(desc, key, validator, include_removes): """Runs a validation method on an attribute value to be set or appended.""" - def append(state, value, initiator): - return validator(state.obj(), key, value) + if include_removes: + def append(state, value, initiator): + return validator(state.obj(), key, value, False) - def set_(state, value, oldvalue, initiator): - return validator(state.obj(), key, value) + def set_(state, value, oldvalue, initiator): + return validator(state.obj(), key, value, False) + + def remove(state, value, initiator): + validator(state.obj(), key, value, True) + else: + def append(state, value, initiator): + return validator(state.obj(), key, value) + + def set_(state, value, oldvalue, initiator): + return validator(state.obj(), key, value) event.listen(desc, 'append', append, raw=True, retval=True) event.listen(desc, 'set', set_, raw=True, retval=True) + if include_removes: + event.listen(desc, "remove", remove, raw=True, retval=True) def polymorphic_union(table_map, typecolname, aliasname='p_union', cast_nulls=True): """Create a ``UNION`` statement used by a polymorphic mapper. See :ref:`concrete_inheritance` for an example of how this is used. - - :param table_map: mapping of polymorphic identities to + + :param table_map: mapping of polymorphic identities to :class:`.Table` objects. - :param typecolname: string name of a "discriminator" column, which will be + :param typecolname: string name of a "discriminator" column, which will be derived from the query, producing the polymorphic identity for each row. If ``None``, no polymorphic discriminator is generated. - :param aliasname: name of the :func:`~sqlalchemy.sql.expression.alias()` + :param aliasname: name of the :func:`~sqlalchemy.sql.expression.alias()` construct generated. :param cast_nulls: if True, non-existent columns, which are represented as labeled NULLs, will be passed into CAST. This is a legacy behavior that is problematic @@ -105,7 +117,7 @@ def polymorphic_union(table_map, typecolname, aliasname='p_union', cast_nulls=Tr for key in table_map.keys(): table = table_map[key] - # mysql doesnt like selecting from a select; + # mysql doesnt like selecting from a select; # make it an alias of the select if isinstance(table, sql.Select): table = table.alias() @@ -187,7 +199,7 @@ def identity_key(*args, **kwargs): % ", ".join(kwargs.keys())) mapper = class_mapper(class_) if "ident" in locals(): - return mapper.identity_key_from_primary_key(ident) + return mapper.identity_key_from_primary_key(util.to_list(ident)) return mapper.identity_key_from_row(row) instance = kwargs.pop("instance") if kwargs: @@ -203,14 +215,14 @@ class ORMAdapter(sql_util.ColumnAdapter): and the AliasedClass if any is referenced. """ - def __init__(self, entity, equivalents=None, + def __init__(self, entity, equivalents=None, chain_to=None, adapt_required=False): self.mapper, selectable, is_aliased_class = _entity_info(entity) if is_aliased_class: self.aliased_class = entity else: self.aliased_class = None - sql_util.ColumnAdapter.__init__(self, selectable, + sql_util.ColumnAdapter.__init__(self, selectable, equivalents, chain_to, adapt_required=adapt_required) @@ -236,51 +248,51 @@ class AliasedClass(object): session.query(User, user_alias).\\ join((user_alias, User.id > user_alias.id)).\\ filter(User.name==user_alias.name) - + The resulting object is an instance of :class:`.AliasedClass`, however it implements a ``__getattribute__()`` scheme which will proxy attribute access to that of the ORM class being aliased. All classmethods - on the mapped entity should also be available here, including + on the mapped entity should also be available here, including hybrids created with the :ref:`hybrids_toplevel` extension, which will receive the :class:`.AliasedClass` as the "class" argument when classmethods are called. - + :param cls: ORM mapped entity which will be "wrapped" around an alias. :param alias: a selectable, such as an :func:`.alias` or :func:`.select` construct, which will be rendered in place of the mapped table of the - ORM entity. If left as ``None``, an ordinary :class:`.Alias` of the + ORM entity. If left as ``None``, an ordinary :class:`.Alias` of the ORM entity's mapped table will be generated. :param name: A name which will be applied both to the :class:`.Alias` if one is generated, as well as the name present in the "named tuple" returned by the :class:`.Query` object when results are returned. :param adapt_on_names: if True, more liberal "matching" will be used when - mapping the mapped columns of the ORM entity to those of the given selectable - - a name-based match will be performed if the given selectable doesn't - otherwise have a column that corresponds to one on the entity. The + mapping the mapped columns of the ORM entity to those of the given selectable - + a name-based match will be performed if the given selectable doesn't + otherwise have a column that corresponds to one on the entity. The use case for this is when associating an entity with some derived selectable such as one that uses aggregate functions:: - + class UnitPrice(Base): __tablename__ = 'unit_price' ... unit_id = Column(Integer) price = Column(Numeric) - + aggregated_unit_price = Session.query( func.sum(UnitPrice.price).label('price') ).group_by(UnitPrice.unit_id).subquery() - + aggregated_unit_price = aliased(UnitPrice, alias=aggregated_unit_price, adapt_on_names=True) - + Above, functions on ``aggregated_unit_price`` which refer to ``.price`` will return the ``fund.sum(UnitPrice.price).label('price')`` column, as it is matched on the name "price". Ordinarily, the "price" function wouldn't have any "column correspondence" to the actual ``UnitPrice.price`` column as it is not a proxy of the original. - - ``adapt_on_names`` is new in 0.7.3. - + + .. versionadded:: 0.7.3 + """ def __init__(self, cls, alias=None, name=None, adapt_on_names=False): self.__mapper = _class_to_mapper(cls) @@ -299,8 +311,8 @@ class AliasedClass(object): def __getstate__(self): return { - 'mapper':self.__mapper, - 'alias':self.__alias, + 'mapper':self.__mapper, + 'alias':self.__alias, 'name':self._sa_label_name, 'adapt_on_names':self.__adapt_on_names, } @@ -321,7 +333,7 @@ class AliasedClass(object): def __adapt_element(self, elem): return self.__adapter.traverse(elem).\ _annotate({ - 'parententity': self, + 'parententity': self, 'parentmapper':self.__mapper} ) @@ -388,7 +400,7 @@ class _ORMJoin(expression.Join): __visit_name__ = expression.Join.__visit_name__ - def __init__(self, left, right, onclause=None, + def __init__(self, left, right, onclause=None, isouter=False, join_to_left=True): adapt_from = None @@ -447,7 +459,7 @@ class _ORMJoin(expression.Join): def join(left, right, onclause=None, isouter=False, join_to_left=True): """Produce an inner join between left and right clauses. - + :func:`.orm.join` is an extension to the core join interface provided by :func:`.sql.expression.join()`, where the left and right selectables may be not only core selectable @@ -460,30 +472,30 @@ def join(left, right, onclause=None, isouter=False, join_to_left=True): in whatever form it is passed, to the selectable passed as the left side. If False, the onclause is used as is. - + :func:`.orm.join` is not commonly needed in modern usage, as its functionality is encapsulated within that of the :meth:`.Query.join` method, which features a significant amount of automation beyond :func:`.orm.join` - by itself. Explicit usage of :func:`.orm.join` - with :class:`.Query` involves usage of the + by itself. Explicit usage of :func:`.orm.join` + with :class:`.Query` involves usage of the :meth:`.Query.select_from` method, as in:: - + from sqlalchemy.orm import join session.query(User).\\ select_from(join(User, Address, User.addresses)).\\ filter(Address.email_address=='foo@bar.com') - - In modern SQLAlchemy the above join can be written more + + In modern SQLAlchemy the above join can be written more succinctly as:: - + session.query(User).\\ join(User.addresses).\\ filter(Address.email_address=='foo@bar.com') See :meth:`.Query.join` for information on modern usage of ORM level joins. - + """ return _ORMJoin(left, right, onclause, isouter, join_to_left) @@ -504,23 +516,24 @@ def with_parent(instance, prop): The SQL rendered is the same as that rendered when a lazy loader would fire off from the given parent on that attribute, meaning - that the appropriate state is taken from the parent object in + that the appropriate state is taken from the parent object in Python without the need to render joins to the parent table in the rendered statement. - As of 0.6.4, this method accepts parent instances in all - persistence states, including transient, persistent, and detached. - Only the requisite primary key/foreign key attributes need to - be populated. Previous versions didn't work with transient - instances. + .. versionchanged:: 0.6.4 + This method accepts parent instances in all + persistence states, including transient, persistent, and detached. + Only the requisite primary key/foreign key attributes need to + be populated. Previous versions didn't work with transient + instances. :param instance: An instance which has some :func:`.relationship`. :param property: String property name, or class-bound attribute, which indicates - what relationship from the instance should be used to reconcile the - parent/child relationship. + what relationship from the instance should be used to reconcile the + parent/child relationship. """ if isinstance(prop, basestring): @@ -529,8 +542,8 @@ def with_parent(instance, prop): elif isinstance(prop, attributes.QueryableAttribute): prop = prop.property - return prop.compare(operators.eq, - instance, + return prop.compare(operators.eq, + instance, value_is_parent=True) @@ -584,7 +597,7 @@ def _entity_descriptor(entity, key): return getattr(entity, key) except AttributeError: raise sa_exc.InvalidRequestError( - "Entity '%s' has no property '%s'" % + "Entity '%s' has no property '%s'" % (description, key) ) @@ -626,7 +639,7 @@ def object_mapper(instance): raise exc.UnmappedInstanceError(instance) def class_mapper(class_, compile=True): - """Given a class, return the primary :class:`.Mapper` associated + """Given a class, return the primary :class:`.Mapper` associated with the key. Raises :class:`.UnmappedClassError` if no mapping is configured @@ -640,8 +653,8 @@ def class_mapper(class_, compile=True): mapper = class_manager.mapper except exc.NO_STATE: - if not isinstance(class_, type): - raise sa_exc.ArgumentError("Class object expected, got '%r'." % class_) + if not isinstance(class_, type): + raise sa_exc.ArgumentError("Class object expected, got '%r'." % class_) raise exc.UnmappedClassError(class_) if compile and mapperlib.module._new_mappers: @@ -672,7 +685,7 @@ def has_identity(object): return state.has_identity def _is_mapped_class(cls): - """Return True if the given object is a mapped class, + """Return True if the given object is a mapped class, :class:`.Mapper`, or :class:`.AliasedClass`.""" if isinstance(cls, (AliasedClass, mapperlib.Mapper)): @@ -685,7 +698,7 @@ def _is_mapped_class(cls): return False def _mapper_or_none(cls): - """Return the :class:`.Mapper` for the given class or None if the + """Return the :class:`.Mapper` for the given class or None if the class is not mapped.""" manager = attributes.manager_of_class(cls) diff --git a/libs/sqlalchemy/pool.py b/libs/sqlalchemy/pool.py index 6254a4ba..0d04998c 100644 --- a/libs/sqlalchemy/pool.py +++ b/libs/sqlalchemy/pool.py @@ -1,5 +1,5 @@ # sqlalchemy/pool.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -16,17 +16,18 @@ regular DB-API connect() methods to be transparently managed by a SQLAlchemy connection pool. """ -import weakref, time, traceback +import weakref +import time +import traceback from sqlalchemy import exc, log, event, events, interfaces, util from sqlalchemy.util import queue as sqla_queue from sqlalchemy.util import threading, memoized_property, \ chop_traceback - proxies = {} def manage(module, **params): - """Return a proxy for a DB-API module that automatically + """Return a proxy for a DB-API module that automatically pools connections. Given a DB-API 2.0 module and pool management parameters, returns @@ -65,11 +66,11 @@ reset_none = util.symbol('reset_none') class Pool(log.Identified): """Abstract base class for connection pools.""" - def __init__(self, - creator, recycle=-1, echo=None, + def __init__(self, + creator, recycle=-1, echo=None, use_threadlocal=False, logging_name=None, - reset_on_return=True, + reset_on_return=True, listeners=None, events=None, _dispatch=None): @@ -86,8 +87,8 @@ class Pool(log.Identified): replaced with a newly opened connection. Defaults to -1. :param logging_name: String identifier which will be used within - the "name" field of logging records generated within the - "sqlalchemy.pool" logger. Defaults to a hexstring of the object's + the "name" field of logging records generated within the + "sqlalchemy.pool" logger. Defaults to a hexstring of the object's id. :param echo: If True, connections being pulled and retrieved @@ -120,7 +121,7 @@ class Pool(log.Identified): :class:`~sqlalchemy.interfaces.PoolListener`-like objects or dictionaries of callables that receive events when DB-API connections are created, checked out and checked in to the - pool. This has been superseded by + pool. This has been superseded by :func:`~sqlalchemy.event.listen`. """ @@ -142,7 +143,7 @@ class Pool(log.Identified): self._reset_on_return = reset_commit else: raise exc.ArgumentError( - "Invalid value for 'reset_on_return': %r" + "Invalid value for 'reset_on_return': %r" % reset_on_return) self.echo = echo @@ -191,8 +192,8 @@ class Pool(log.Identified): """Return a new :class:`.Pool`, of the same class as this one and configured with identical creation arguments. - This method is used in conjunection with :meth:`dispose` - to close out an entire :class:`.Pool` and create a new one in + This method is used in conjunection with :meth:`dispose` + to close out an entire :class:`.Pool` and create a new one in its place. """ @@ -205,18 +206,29 @@ class Pool(log.Identified): This method leaves the possibility of checked-out connections remaining open, as it only affects connections that are idle in the pool. - + See also the :meth:`Pool.recreate` method. """ raise NotImplementedError() + def _replace(self): + """Dispose + recreate this pool. + + Subclasses may employ special logic to + move threads waiting on this pool to the + new one. + + """ + self.dispose() + return self.recreate() + def connect(self): """Return a DBAPI connection from the pool. - The connection is instrumented such that when its - ``close()`` method is called, the connection will be returned to + The connection is instrumented such that when its + ``close()`` method is called, the connection will be returned to the pool. """ @@ -270,7 +282,9 @@ class _ConnectionRecord(object): self.connection = self.__connect() self.info = {} - pool.dispatch.first_connect.exec_once(self.connection, self) + pool.dispatch.first_connect.\ + for_modify(pool.dispatch).\ + exec_once(self.connection, self) pool.dispatch.connect(self.connection, self) def close(self): @@ -360,11 +374,11 @@ def _finalize_fairy(connection, connection_record, pool, ref, echo): if connection_record is not None: connection_record.fairy = None if echo: - pool.logger.debug("Connection %r being returned to pool", + pool.logger.debug("Connection %r being returned to pool", connection) if connection_record.finalize_callback: connection_record.finalize_callback(connection) - del connection_record.finalize_callback + del connection_record.finalize_callback if pool.dispatch.checkin: pool.dispatch.checkin(connection, connection_record) pool._return_conn(connection_record) @@ -387,13 +401,13 @@ class _ConnectionFairy(object): rec = self._connection_record = pool._do_get() conn = self.connection = self._connection_record.get_connection() rec.fairy = weakref.ref( - self, + self, lambda ref:_finalize_fairy and _finalize_fairy(conn, rec, pool, ref, _echo) ) _refs.add(rec) except: # helps with endless __getattr__ loops later on - self.connection = None + self.connection = None self._connection_record = None raise if self._echo: @@ -455,7 +469,7 @@ class _ConnectionFairy(object): attempts = 2 while attempts > 0: try: - self._pool.dispatch.checkout(self.connection, + self._pool.dispatch.checkout(self.connection, self._connection_record, self) return self @@ -498,7 +512,7 @@ class _ConnectionFairy(object): self._close() def _close(self): - _finalize_fairy(self.connection, self._connection_record, + _finalize_fairy(self.connection, self._connection_record, self._pool, None, self._echo) self.connection = None self._connection_record = None @@ -511,7 +525,7 @@ class SingletonThreadPool(Pool): Options are the same as those of :class:`.Pool`, as well as: - :param pool_size: The number of threads in which to maintain connections + :param pool_size: The number of threads in which to maintain connections at once. Defaults to five. :class:`.SingletonThreadPool` is used by the SQLite dialect @@ -529,12 +543,12 @@ class SingletonThreadPool(Pool): def recreate(self): self.logger.info("Pool recreating") - return self.__class__(self._creator, - pool_size=self.size, - recycle=self._recycle, - echo=self.echo, + return self.__class__(self._creator, + pool_size=self.size, + recycle=self._recycle, + echo=self.echo, logging_name=self._orig_logging_name, - use_threadlocal=self._use_threadlocal, + use_threadlocal=self._use_threadlocal, _dispatch=self.dispatch) def dispose(self): @@ -578,10 +592,16 @@ class SingletonThreadPool(Pool): self._cleanup() return c +class DummyLock(object): + def acquire(self, wait=True): + return True + def release(self): + pass + class QueuePool(Pool): """A :class:`.Pool` that imposes a limit on the number of open connections. - :class:`.QueuePool` is the default pooling implementation used for + :class:`.QueuePool` is the default pooling implementation used for all :class:`.Engine` objects, unless the SQLite dialect is in use. """ @@ -640,37 +660,39 @@ class QueuePool(Pool): :meth:`unique_connection` method is provided to bypass the threadlocal behavior installed into :meth:`connect`. - :param reset_on_return: Determine steps to take on - connections as they are returned to the pool. - As of SQLAlchemy 0.7.6, reset_on_return can have any - of these values: - + :param reset_on_return: Determine steps to take on + connections as they are returned to the pool. + reset_on_return can have any of these values: + * 'rollback' - call rollback() on the connection, to release locks and transaction resources. This is the default value. The vast majority of use cases should leave this value set. - * True - same as 'rollback', this is here for + * True - same as 'rollback', this is here for backwards compatibility. * 'commit' - call commit() on the connection, - to release locks and transaction resources. + to release locks and transaction resources. A commit here may be desirable for databases that cache query plans if a commit is emitted, such as Microsoft SQL Server. However, this value is more dangerous than 'rollback' because any data changes present on the transaction are committed unconditionally. - * None - don't do anything on the connection. - This setting should only be made on a database - that has no transaction support at all, - namely MySQL MyISAM. By not doing anything, - performance can be improved. This - setting should **never be selected** for a - database that supports transactions, - as it will lead to deadlocks and stale - state. - * False - same as None, this is here for - backwards compatibility. - + * None - don't do anything on the connection. + This setting should only be made on a database + that has no transaction support at all, + namely MySQL MyISAM. By not doing anything, + performance can be improved. This + setting should **never be selected** for a + database that supports transactions, + as it will lead to deadlocks and stale + state. + * False - same as None, this is here for + backwards compatibility. + + .. versionchanged:: 0.7.6 + ``reset_on_return`` accepts values. + :param listeners: A list of :class:`~sqlalchemy.interfaces.PoolListener`-like objects or dictionaries of callables that receive events when DB-API @@ -684,37 +706,26 @@ class QueuePool(Pool): self._max_overflow = max_overflow self._timeout = timeout self._overflow_lock = self._max_overflow > -1 and \ - threading.Lock() or None - - def recreate(self): - self.logger.info("Pool recreating") - return self.__class__(self._creator, pool_size=self._pool.maxsize, - max_overflow=self._max_overflow, - timeout=self._timeout, - recycle=self._recycle, echo=self.echo, - logging_name=self._orig_logging_name, - use_threadlocal=self._use_threadlocal, - _dispatch=self.dispatch) + threading.Lock() or DummyLock() def _do_return_conn(self, conn): try: self._pool.put(conn, False) except sqla_queue.Full: conn.close() - if self._overflow_lock is None: + self._overflow_lock.acquire() + try: self._overflow -= 1 - else: - self._overflow_lock.acquire() - try: - self._overflow -= 1 - finally: - self._overflow_lock.release() + finally: + self._overflow_lock.release() def _do_get(self): try: wait = self._max_overflow > -1 and \ self._overflow >= self._max_overflow return self._pool.get(wait, self._timeout) + except sqla_queue.SAAbort, aborted: + return aborted.context._do_get() except sqla_queue.Empty: if self._max_overflow > -1 and \ self._overflow >= self._max_overflow: @@ -723,25 +734,30 @@ class QueuePool(Pool): else: raise exc.TimeoutError( "QueuePool limit of size %d overflow %d reached, " - "connection timed out, timeout %d" % + "connection timed out, timeout %d" % (self.size(), self.overflow(), self._timeout)) - if self._overflow_lock is not None: - self._overflow_lock.acquire() - - if self._max_overflow > -1 and \ - self._overflow >= self._max_overflow: - if self._overflow_lock is not None: - self._overflow_lock.release() - return self._do_get() - + self._overflow_lock.acquire() try: - con = self._create_connection() - self._overflow += 1 + if self._max_overflow > -1 and \ + self._overflow >= self._max_overflow: + return self._do_get() + else: + con = self._create_connection() + self._overflow += 1 + return con finally: - if self._overflow_lock is not None: - self._overflow_lock.release() - return con + self._overflow_lock.release() + + def recreate(self): + self.logger.info("Pool recreating") + return self.__class__(self._creator, pool_size=self._pool.maxsize, + max_overflow=self._max_overflow, + timeout=self._timeout, + recycle=self._recycle, echo=self.echo, + logging_name=self._orig_logging_name, + use_threadlocal=self._use_threadlocal, + _dispatch=self.dispatch) def dispose(self): while True: @@ -754,12 +770,18 @@ class QueuePool(Pool): self._overflow = 0 - self.size() self.logger.info("Pool disposed. %s", self.status()) + def _replace(self): + self.dispose() + np = self.recreate() + self._pool.abort(np) + return np + def status(self): return "Pool size: %d Connections in pool: %d "\ "Current Overflow: %d Current Checked out "\ - "connections: %d" % (self.size(), - self.checkedin(), - self.overflow(), + "connections: %d" % (self.size(), + self.checkedin(), + self.overflow(), self.checkedout()) def size(self): @@ -784,9 +806,9 @@ class NullPool(Pool): invalidation are not supported by this Pool implementation, since no connections are held persistently. - :class:`.NullPool` is used by the SQlite dilalect automatically - when a file-based database is used (as of SQLAlchemy 0.7). - See :ref:`sqlite_toplevel`. + .. versionchanged:: 0.7 + :class:`.NullPool` is used by the SQlite dialect automatically + when a file-based database is used. See :ref:`sqlite_toplevel`. """ @@ -802,11 +824,11 @@ class NullPool(Pool): def recreate(self): self.logger.info("Pool recreating") - return self.__class__(self._creator, - recycle=self._recycle, - echo=self.echo, + return self.__class__(self._creator, + recycle=self._recycle, + echo=self.echo, logging_name=self._orig_logging_name, - use_threadlocal=self._use_threadlocal, + use_threadlocal=self._use_threadlocal, _dispatch=self.dispatch) def dispose(self): @@ -865,10 +887,11 @@ class AssertionPool(Pool): This will raise an exception if more than one connection is checked out at a time. Useful for debugging code that is using more connections than desired. - - :class:`.AssertionPool` also logs a traceback of where - the original connection was checked out, and reports - this in the assertion error raised (new in 0.7). + + .. versionchanged:: 0.7 + :class:`.AssertionPool` also logs a traceback of where + the original connection was checked out, and reports + this in the assertion error raised. """ def __init__(self, *args, **kw): @@ -894,7 +917,7 @@ class AssertionPool(Pool): def recreate(self): self.logger.info("Pool recreating") - return self.__class__(self._creator, echo=self.echo, + return self.__class__(self._creator, echo=self.echo, logging_name=self._orig_logging_name, _dispatch=self.dispatch) @@ -961,7 +984,7 @@ class _DBProxy(object): try: if key not in self.pools: kw.pop('sa_pool_key', None) - pool = self.poolclass(lambda: + pool = self.poolclass(lambda: self.module.connect(*args, **kw), **self.kw) self.pools[key] = pool return pool @@ -1000,6 +1023,6 @@ class _DBProxy(object): return kw['sa_pool_key'] return tuple( - list(args) + + list(args) + [(k, kw[k]) for k in sorted(kw)] ) diff --git a/libs/sqlalchemy/processors.py b/libs/sqlalchemy/processors.py index c4bac283..bc5c3909 100644 --- a/libs/sqlalchemy/processors.py +++ b/libs/sqlalchemy/processors.py @@ -5,7 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -"""defines generic type conversion functions, as used in bind and result +"""defines generic type conversion functions, as used in bind and result processors. They all share one common characteristic: None is passed through unchanged. @@ -110,9 +110,9 @@ try: def to_decimal_processor_factory(target_class, scale=10): # Note that the scale argument is not taken into account for integer - # values in the C implementation while it is in the Python one. - # For example, the Python implementation might return - # Decimal('5.00000') whereas the C implementation will + # values in the C implementation while it is in the Python one. + # For example, the Python implementation might return + # Decimal('5.00000') whereas the C implementation will # return Decimal('5'). These are equivalent of course. return DecimalResultProcessor(target_class, "%%.%df" % scale).process diff --git a/libs/sqlalchemy/schema.py b/libs/sqlalchemy/schema.py index d2951437..154fb5f7 100644 --- a/libs/sqlalchemy/schema.py +++ b/libs/sqlalchemy/schema.py @@ -1,5 +1,5 @@ # sqlalchemy/schema.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -84,7 +84,7 @@ def _validate_dialect_kwargs(kwargs, name): # validate remaining kwargs that they all specify DB prefixes if len([k for k in kwargs if not re.match( - r'^(?:%s)_' % + r'^(?:%s)_' % '|'.join(dialects.__all__), k ) ]): @@ -97,7 +97,7 @@ class Table(SchemaItem, expression.TableClause): e.g.:: - mytable = Table("mytable", metadata, + mytable = Table("mytable", metadata, Column('mytable_id', Integer, primary_key=True), Column('value', String(50)) ) @@ -105,7 +105,7 @@ class Table(SchemaItem, expression.TableClause): The :class:`.Table` object constructs a unique instance of itself based on its name and optional schema name within the given :class:`.MetaData` object. Calling the :class:`.Table` - constructor with the same name and same :class:`.MetaData` argument + constructor with the same name and same :class:`.MetaData` argument a second time will return the *same* :class:`.Table` object - in this way the :class:`.Table` constructor acts as a registry function. @@ -115,7 +115,7 @@ class Table(SchemaItem, expression.TableClause): Constructor arguments are as follows: - :param name: The name of this table as represented in the database. + :param name: The name of this table as represented in the database. This property, along with the *schema*, indicates the *singleton identity* of this table in relation to its parent :class:`.MetaData`. @@ -126,13 +126,13 @@ class Table(SchemaItem, expression.TableClause): will be treated as case insensitive names, and will not be quoted unless they are a reserved word. Names with any number of upper case characters will be quoted and sent exactly. Note that this - behavior applies even for databases which standardize upper + behavior applies even for databases which standardize upper case names as case insensitive such as Oracle. - :param metadata: a :class:`.MetaData` object which will contain this + :param metadata: a :class:`.MetaData` object which will contain this table. The metadata is used as a point of association of this table with other tables which are referenced via foreign key. It also - may be used to associate this table with a particular + may be used to associate this table with a particular :class:`~sqlalchemy.engine.base.Connectable`. :param \*args: Additional positional arguments are used primarily @@ -141,21 +141,23 @@ class Table(SchemaItem, expression.TableClause): :class:`.SchemaItem` constructs may be added here, including :class:`.PrimaryKeyConstraint`, and :class:`.ForeignKeyConstraint`. - :param autoload: Defaults to False: the Columns for this table should + :param autoload: Defaults to False: the Columns for this table should be reflected from the database. Usually there will be no Column objects in the constructor if this property is set. - :param autoload_replace: If ``True``, when using ``autoload=True`` + :param autoload_replace: If ``True``, when using ``autoload=True`` and ``extend_existing=True``, replace ``Column`` objects already present in the ``Table`` that's - in the ``MetaData`` registry with + in the ``MetaData`` registry with what's reflected. Otherwise, all existing columns will be excluded from the reflection process. Note that this does not impact ``Column`` objects specified in the same call to ``Table`` which includes ``autoload``, those always take precedence. - Defaults to ``True``. New in 0.7.5. + Defaults to ``True``. - :param autoload_with: If autoload==True, this is an optional Engine + .. versionadded:: 0.7.5 + + :param autoload_with: If autoload==True, this is an optional Engine or Connection instance to be used for the table reflection. If ``None``, the underlying MetaData's bound connectable will be used. @@ -164,15 +166,17 @@ class Table(SchemaItem, expression.TableClause): the constructor to the existing :class:`.Table`. If ``extend_existing`` or ``keep_existing`` are not set, an error is - raised if additional table modifiers are specified when + raised if additional table modifiers are specified when the given :class:`.Table` is already present in the :class:`.MetaData`. - As of version 0.7.4, ``extend_existing`` will work in conjunction - with ``autoload=True`` to run a new reflection operation against - the database; new :class:`.Column` objects will be produced - from database metadata to replace those existing with the same - name, and additional :class:`.Column` objects not present - in the :class:`.Table` will be added. + .. versionchanged:: 0.7.4 + ``extend_existing`` will work in conjunction + with ``autoload=True`` to run a new reflection operation against + the database; new :class:`.Column` objects will be produced + from database metadata to replace those existing with the same + name, and additional :class:`.Column` objects not present + in the :class:`.Table` will be added. + As is always the case with ``autoload=True``, :class:`.Column` objects can be specified in the same :class:`.Table` constructor, which will take precedence. I.e.:: @@ -184,17 +188,17 @@ class Table(SchemaItem, expression.TableClause): autoload_with=engine ) - The above will overwrite all columns within ``mytable`` which + The above will overwrite all columns within ``mytable`` which are present in the database, except for ``y`` which will be used as is from the above definition. If the ``autoload_replace`` flag is set to False, no existing columns will be replaced. - :param implicit_returning: True by default - indicates that - RETURNING can be used by default to fetch newly inserted primary key - values, for backends which support this. Note that + :param implicit_returning: True by default - indicates that + RETURNING can be used by default to fetch newly inserted primary key + values, for backends which support this. Note that create_engine() also provides an implicit_returning flag. - :param include_columns: A list of strings indicating a subset of + :param include_columns: A list of strings indicating a subset of columns to be loaded via the ``autoload`` operation; table columns who aren't present in this list will not be represented on the resulting ``Table`` object. Defaults to ``None`` which indicates all columns @@ -203,7 +207,7 @@ class Table(SchemaItem, expression.TableClause): :param info: A dictionary which defaults to ``{}``. A space to store application specific data. This must be a dictionary. - :param keep_existing: When ``True``, indicates that if this Table + :param keep_existing: When ``True``, indicates that if this Table is already present in the given :class:`.MetaData`, ignore further arguments within the constructor to the existing :class:`.Table`, and return the :class:`.Table` object as @@ -214,13 +218,13 @@ class Table(SchemaItem, expression.TableClause): being applied a second time. Also see extend_existing. If extend_existing or keep_existing are not set, an error is - raised if additional table modifiers are specified when + raised if additional table modifiers are specified when the given :class:`.Table` is already present in the :class:`.MetaData`. :param listeners: A list of tuples of the form ``(, )`` - which will be passed to :func:`.event.listen` upon construction. + which will be passed to :func:`.event.listen` upon construction. This alternate hook to :func:`.event.listen` allows the establishment - of a listener function specific to this :class:`.Table` before + of a listener function specific to this :class:`.Table` before the "autoload" process begins. Particularly useful for the :meth:`.events.column_reflect` event:: @@ -229,13 +233,13 @@ class Table(SchemaItem, expression.TableClause): # ... t = Table( - 'sometable', + 'sometable', autoload=True, listeners=[ ('column_reflect', listen_for_reflect) ]) - :param mustexist: When ``True``, indicates that this Table must already + :param mustexist: When ``True``, indicates that this Table must already be present in the given :class:`.MetaData`` collection, else an exception is raised. @@ -246,14 +250,14 @@ class Table(SchemaItem, expression.TableClause): :param quote: Force quoting of this table's name on or off, corresponding to ``True`` or ``False``. When left at its default of ``None``, the column identifier will be quoted according to whether the name is - case sensitive (identifiers with at least one upper case character are - treated as case sensitive), or if it's a reserved word. This flag + case sensitive (identifiers with at least one upper case character are + treated as case sensitive), or if it's a reserved word. This flag is only needed to force quoting of a reserved word which is not known by the SQLAlchemy dialect. :param quote_schema: same as 'quote' but applies to the schema identifier. - :param schema: The *schema name* for this table, which is required if + :param schema: The *schema name* for this table, which is required if the table resides in a schema other than the default selected schema for the engine's database connection. Defaults to ``None``. @@ -326,7 +330,7 @@ class Table(SchemaItem, expression.TableClause): for constructor arguments. """ - # __init__ is overridden to prevent __new__ from + # __init__ is overridden to prevent __new__ from # calling the superclass constructor. def _init(self, name, metadata, *args, **kwargs): @@ -342,7 +346,7 @@ class Table(SchemaItem, expression.TableClause): self.indexes = set() self.constraints = set() self._columns = expression.ColumnCollection() - PrimaryKeyConstraint()._set_parent_with_dispatch(self) + PrimaryKeyConstraint()._set_parent_with_dispatch(self) self.foreign_keys = set() self._extra_dependencies = set() self.kwargs = {} @@ -383,7 +387,7 @@ class Table(SchemaItem, expression.TableClause): def _autoload(self, metadata, autoload_with, include_columns, exclude_columns=()): if self.primary_key.columns: PrimaryKeyConstraint(*[ - c for c in self.primary_key.columns + c for c in self.primary_key.columns if c.key in exclude_columns ])._set_parent_with_dispatch(self) @@ -393,7 +397,7 @@ class Table(SchemaItem, expression.TableClause): self, include_columns, exclude_columns ) else: - bind = _bind_or_error(metadata, + bind = _bind_or_error(metadata, msg="No engine is bound to this Table's MetaData. " "Pass an engine to the Table via " "autoload_with=, " @@ -452,14 +456,13 @@ class Table(SchemaItem, expression.TableClause): def _init_collections(self): pass - @util.memoized_property def _autoincrement_column(self): for col in self.primary_key: if col.autoincrement and \ col.type._type_affinity is not None and \ issubclass(col.type._type_affinity, sqltypes.Integer) and \ - (not col.foreign_keys or col.autoincrement=='ignore_fk') and \ + (not col.foreign_keys or col.autoincrement == 'ignore_fk') and \ isinstance(col.default, (type(None), Sequence)) and \ (col.server_default is None or col.server_default.reflected): return col @@ -489,8 +492,8 @@ class Table(SchemaItem, expression.TableClause): This is another Table object which must be created first before this one can, or dropped after this one. - Usually, dependencies between tables are determined via - ForeignKey objects. However, for other situations that + Usually, dependencies between tables are determined via + ForeignKey objects. However, for other situations that create dependencies outside of foreign keys (rules, inheriting), this method can manually establish such a link. @@ -507,11 +510,11 @@ class Table(SchemaItem, expression.TableClause): UPDATE, etc. statements generated from this :class:`~.schema.Table` construct. - Note that this does **not** change the definition of the table + Note that this does **not** change the definition of the table as it exists within any underlying database, assuming that - table has already been created in the database. Relational - databases support the addition of columns to existing tables - using the SQL ALTER command, which would need to be + table has already been created in the database. Relational + databases support the addition of columns to existing tables + using the SQL ALTER command, which would need to be emitted for an already-existing table that doesn't contain the newly added column. @@ -523,11 +526,11 @@ class Table(SchemaItem, expression.TableClause): """Append a :class:`~.schema.Constraint` to this :class:`~.schema.Table`. This has the effect of the constraint being included in any - future CREATE TABLE statement, assuming specific DDL creation - events have not been associated with the given :class:`~.schema.Constraint` + future CREATE TABLE statement, assuming specific DDL creation + events have not been associated with the given :class:`~.schema.Constraint` object. - Note that this does **not** produce the constraint within the + Note that this does **not** produce the constraint within the relational database automatically, for a table that already exists in the database. To add a constraint to an existing relational database table, the SQL ALTER command must @@ -554,7 +557,7 @@ class Table(SchemaItem, expression.TableClause): metadata._add_table(self.name, self.schema, self) self.metadata = metadata - def get_children(self, column_collections=True, + def get_children(self, column_collections=True, schema_visitor=False, **kw): if not schema_visitor: return expression.TableClause.get_children( @@ -571,11 +574,11 @@ class Table(SchemaItem, expression.TableClause): if bind is None: bind = _bind_or_error(self) - return bind.run_callable(bind.dialect.has_table, + return bind.run_callable(bind.dialect.has_table, self.name, schema=self.schema) def create(self, bind=None, checkfirst=False): - """Issue a ``CREATE`` statement for this + """Issue a ``CREATE`` statement for this :class:`.Table`, using the given :class:`.Connectable` for connectivity. @@ -585,13 +588,13 @@ class Table(SchemaItem, expression.TableClause): if bind is None: bind = _bind_or_error(self) - bind._run_visitor(ddl.SchemaGenerator, - self, + bind._run_visitor(ddl.SchemaGenerator, + self, checkfirst=checkfirst) def drop(self, bind=None, checkfirst=False): - """Issue a ``DROP`` statement for this + """Issue a ``DROP`` statement for this :class:`.Table`, using the given :class:`.Connectable` for connectivity. @@ -600,8 +603,8 @@ class Table(SchemaItem, expression.TableClause): """ if bind is None: bind = _bind_or_error(self) - bind._run_visitor(ddl.SchemaDropper, - self, + bind._run_visitor(ddl.SchemaDropper, + self, checkfirst=checkfirst) @@ -664,7 +667,7 @@ class Column(SchemaItem, expression.ColumnClause): """ Construct a new ``Column`` object. - :param name: The name of this column as represented in the database. + :param name: The name of this column as represented in the database. This argument may be the first positional argument, or specified via keyword. @@ -672,15 +675,15 @@ class Column(SchemaItem, expression.ColumnClause): will be treated as case insensitive names, and will not be quoted unless they are a reserved word. Names with any number of upper case characters will be quoted and sent exactly. Note that this - behavior applies even for databases which standardize upper + behavior applies even for databases which standardize upper case names as case insensitive such as Oracle. The name field may be omitted at construction time and applied - later, at any time before the Column is associated with a + later, at any time before the Column is associated with a :class:`.Table`. This is to support convenient usage within the :mod:`~sqlalchemy.ext.declarative` extension. - :param type\_: The column's type, indicated using an instance which + :param type\_: The column's type, indicated using an instance which subclasses :class:`~sqlalchemy.types.TypeEngine`. If no arguments are required for the type, the class of the type can be sent as well, e.g.:: @@ -694,30 +697,30 @@ class Column(SchemaItem, expression.ColumnClause): The ``type`` argument may be the second positional argument or specified by keyword. - There is partial support for automatic detection of the - type based on that of a :class:`.ForeignKey` associated - with this column, if the type is specified as ``None``. - However, this feature is not fully implemented and + There is partial support for automatic detection of the + type based on that of a :class:`.ForeignKey` associated + with this column, if the type is specified as ``None``. + However, this feature is not fully implemented and may not function in all cases. - :param \*args: Additional positional arguments include various - :class:`.SchemaItem` derived constructs which will be applied - as options to the column. These include instances of - :class:`.Constraint`, :class:`.ForeignKey`, :class:`.ColumnDefault`, - and :class:`.Sequence`. In some cases an equivalent keyword + :param \*args: Additional positional arguments include various + :class:`.SchemaItem` derived constructs which will be applied + as options to the column. These include instances of + :class:`.Constraint`, :class:`.ForeignKey`, :class:`.ColumnDefault`, + and :class:`.Sequence`. In some cases an equivalent keyword argument is available such as ``server_default``, ``default`` and ``unique``. - :param autoincrement: This flag may be set to ``False`` to + :param autoincrement: This flag may be set to ``False`` to indicate an integer primary key column that should not be considered to be the "autoincrement" column, that is - the integer primary key column which generates values + the integer primary key column which generates values implicitly upon INSERT and whose value is usually returned via the DBAPI cursor.lastrowid attribute. It defaults to ``True`` to satisfy the common use case of a table with a single integer primary key column. If the table has a composite primary key consisting of more than one - integer column, set this flag to True only on the + integer column, set this flag to True only on the column that should be considered "autoincrement". The setting *only* has an effect for columns which are: @@ -727,7 +730,9 @@ class Column(SchemaItem, expression.ColumnClause): * Part of the primary key * Are not referenced by any foreign keys, unless - the value is specified as ``'ignore_fk'`` (new in 0.7.4) + the value is specified as ``'ignore_fk'`` + + .. versionadded:: 0.7.4 * have no server side or client side defaults (with the exception of Postgresql SERIAL). @@ -738,25 +743,26 @@ class Column(SchemaItem, expression.ColumnClause): * DDL issued for the column will include database-specific keywords intended to signify this column as an "autoincrement" column, such as AUTO INCREMENT on MySQL, - SERIAL on Postgresql, and IDENTITY on MS-SQL. It does + SERIAL on Postgresql, and IDENTITY on MS-SQL. It does *not* issue AUTOINCREMENT for SQLite since this is a special SQLite flag that is not required for autoincrementing behavior. See the SQLite dialect documentation for information on SQLite's AUTOINCREMENT. - * The column will be considered to be available as + * The column will be considered to be available as cursor.lastrowid or equivalent, for those dialects which "post fetch" newly inserted identifiers after a row has - been inserted (SQLite, MySQL, MS-SQL). It does not have - any effect in this regard for databases that use sequences - to generate primary key identifiers (i.e. Firebird, Postgresql, + been inserted (SQLite, MySQL, MS-SQL). It does not have + any effect in this regard for databases that use sequences + to generate primary key identifiers (i.e. Firebird, Postgresql, Oracle). - As of 0.7.4, ``autoincrement`` accepts a special value ``'ignore_fk'`` - to indicate that autoincrementing status regardless of foreign key - references. This applies to certain composite foreign key - setups, such as the one demonstrated in the ORM documentation - at :ref:`post_update`. + .. versionchanged:: 0.7.4 + ``autoincrement`` accepts a special value ``'ignore_fk'`` + to indicate that autoincrementing status regardless of foreign key + references. This applies to certain composite foreign key + setups, such as the one demonstrated in the ORM documentation + at :ref:`post_update`. :param default: A scalar, Python callable, or :class:`~sqlalchemy.sql.expression.ClauseElement` representing the @@ -765,7 +771,7 @@ class Column(SchemaItem, expression.ColumnClause): the insert. This is a shortcut to using :class:`.ColumnDefault` as a positional argument. - Contrast this argument to ``server_default`` which creates a + Contrast this argument to ``server_default`` which creates a default generator on the database side. :param doc: optional String that can be used by the ORM or similar @@ -787,7 +793,7 @@ class Column(SchemaItem, expression.ColumnClause): :param info: A dictionary which defaults to ``{}``. A space to store application specific data. This must be a dictionary. - :param nullable: If set to the default of ``True``, indicates the + :param nullable: If set to the default of ``True``, indicates the column will be rendered as allowing NULL, else it's rendered as NOT NULL. This parameter is only used when issuing CREATE TABLE statements. @@ -917,7 +923,7 @@ class Column(SchemaItem, expression.ColumnClause): if self.server_default is not None: if isinstance(self.server_default, FetchedValue): - args.append(self.server_default) + args.append(self.server_default._as_for_update(False)) else: args.append(DefaultClause(self.server_default)) @@ -929,7 +935,7 @@ class Column(SchemaItem, expression.ColumnClause): if self.server_onupdate is not None: if isinstance(self.server_onupdate, FetchedValue): - args.append(self.server_onupdate) + args.append(self.server_onupdate._as_for_update(True)) else: args.append(DefaultClause(self.server_onupdate, for_update=True)) @@ -989,7 +995,7 @@ class Column(SchemaItem, expression.ColumnClause): [repr(self.name)] + [repr(self.type)] + [repr(x) for x in self.foreign_keys if x is not None] + [repr(x) for x in self.constraints] + - [(self.table is not None and "table=<%s>" % + [(self.table is not None and "table=<%s>" % self.table.description or "table=None")] + ["%s=%s" % (k, repr(getattr(self, k))) for k in kwarg]) @@ -1004,7 +1010,7 @@ class Column(SchemaItem, expression.ColumnClause): existing = getattr(self, 'table', None) if existing is not None and existing is not table: raise exc.ArgumentError( - "Column object already assigned to Table '%s'" % + "Column object already assigned to Table '%s'" % existing.description) if self.key in table._columns: @@ -1022,6 +1028,7 @@ class Column(SchemaItem, expression.ColumnClause): if self.primary_key: table.primary_key._replace(self) + Table._autoincrement_column._reset(table) elif self.key in table.primary_key: raise exc.ArgumentError( "Trying to redefine primary-key column '%s' as a " @@ -1064,15 +1071,15 @@ class Column(SchemaItem, expression.ColumnClause): [c.copy(**kw) for c in self.foreign_keys if not c.constraint] c = self._constructor( - name=self.name, - type_=self.type, - key = self.key, - primary_key = self.primary_key, - nullable = self.nullable, - unique = self.unique, - quote=self.quote, - index=self.index, - autoincrement=self.autoincrement, + name=self.name, + type_=self.type, + key = self.key, + primary_key = self.primary_key, + nullable = self.nullable, + unique = self.unique, + quote=self.quote, + index=self.index, + autoincrement=self.autoincrement, default=self.default, server_default=self.server_default, onupdate=self.onupdate, @@ -1100,11 +1107,11 @@ class Column(SchemaItem, expression.ColumnClause): "been assigned.") try: c = self._constructor( - expression._as_truncated(name or self.name), - self.type, - key = name or self.key, - primary_key = self.primary_key, - nullable = self.nullable, + expression._as_truncated(name or self.name), + self.type, + key = name or self.key, + primary_key = self.primary_key, + nullable = self.nullable, quote=self.quote, _proxies=[self], *fk) except TypeError, e: # Py3K @@ -1135,7 +1142,7 @@ class Column(SchemaItem, expression.ColumnClause): def get_children(self, schema_visitor=False, **kwargs): if schema_visitor: - return [x for x in (self.default, self.onupdate) + return [x for x in (self.default, self.onupdate) if x is not None] + \ list(self.foreign_keys) + list(self.constraints) else: @@ -1148,7 +1155,7 @@ class ForeignKey(SchemaItem): ``ForeignKey`` is specified as an argument to a :class:`.Column` object, e.g.:: - t = Table("remote_table", metadata, + t = Table("remote_table", metadata, Column("remote_id", ForeignKey("main_table.id")) ) @@ -1156,7 +1163,7 @@ class ForeignKey(SchemaItem): a dependency between two columns. The actual constraint is in all cases represented by the :class:`.ForeignKeyConstraint` object. This object will be generated automatically when - a ``ForeignKey`` is associated with a :class:`.Column` which + a ``ForeignKey`` is associated with a :class:`.Column` which in turn is associated with a :class:`.Table`. Conversely, when :class:`.ForeignKeyConstraint` is applied to a :class:`.Table`, ``ForeignKey`` markers are automatically generated to be @@ -1170,7 +1177,7 @@ class ForeignKey(SchemaItem): to the :class:`.Table`. The associated ``ForeignKey`` objects are created automatically. - The ``ForeignKey`` objects associated with an individual + The ``ForeignKey`` objects associated with an individual :class:`.Column` object are available in the `foreign_keys` collection of that column. @@ -1199,9 +1206,9 @@ class ForeignKey(SchemaItem): (defaults to the column name itself), unless ``link_to_name`` is ``True`` in which case the rendered name of the column is used. - Note that if the schema name is not included, and the underlying - :class:`.MetaData` has a "schema", that value will be used. - (new in 0.7.4) + .. versionadded:: 0.7.4 + Note that if the schema name is not included, and the underlying + :class:`.MetaData` has a "schema", that value will be used. :param name: Optional string. An in-database name for the key if `constraint` is not provided. @@ -1236,7 +1243,7 @@ class ForeignKey(SchemaItem): # the linked ForeignKeyConstraint. # ForeignKey will create this when parent Column # is attached to a Table, *or* ForeignKeyConstraint - # object passes itself in when creating ForeignKey + # object passes itself in when creating ForeignKey # markers. self.constraint = _constraint @@ -1308,7 +1315,7 @@ class ForeignKey(SchemaItem): return table.corresponding_column(self.column) is not None def get_referent(self, table): - """Return the :class:`.Column` in the given :class:`.Table` + """Return the :class:`.Column` in the given :class:`.Table` referenced by this :class:`.ForeignKey`. Returns None if this :class:`.ForeignKey` does not reference the given @@ -1328,7 +1335,7 @@ class ForeignKey(SchemaItem): process to locate the referenced remote :class:`.Column`. The resolution process traverses to the parent :class:`.Column`, :class:`.Table`, and - :class:`.MetaData` to proceed - if any of these aren't + :class:`.MetaData` to proceed - if any of these aren't yet present, an error is raised. """ @@ -1407,7 +1414,7 @@ class ForeignKey(SchemaItem): raise exc.NoReferencedColumnError( "Could not create ForeignKey '%s' on table '%s': " "table '%s' has no column named '%s'" % ( - self._colspec, parenttable.name, table.name, key), + self._colspec, parenttable.name, table.name, key), table.name, key) elif hasattr(self._colspec, '__clause_element__'): @@ -1488,7 +1495,7 @@ class DefaultGenerator(_NotAColumnExpr, SchemaItem): class ColumnDefault(DefaultGenerator): """A plain default value on a column. - This could correspond to a constant, a callable function, + This could correspond to a constant, a callable function, or a SQL clause. :class:`.ColumnDefault` is generated automatically @@ -1602,7 +1609,7 @@ class Sequence(DefaultGenerator): is_sequence = True def __init__(self, name, start=None, increment=None, schema=None, - optional=False, quote=None, metadata=None, + optional=False, quote=None, metadata=None, quote_schema=None, for_update=False): """Construct a :class:`.Sequence` object. @@ -1610,10 +1617,10 @@ class Sequence(DefaultGenerator): :param name: The name of the sequence. :param start: the starting index of the sequence. This value is used when the CREATE SEQUENCE command is emitted to the database - as the value of the "START WITH" clause. If ``None``, the + as the value of the "START WITH" clause. If ``None``, the clause is omitted, which on most platforms indicates a starting value of 1. - :param increment: the increment value of the sequence. This + :param increment: the increment value of the sequence. This value is used when the CREATE SEQUENCE command is emitted to the database as the value of the "INCREMENT BY" clause. If ``None``, the clause is omitted, which on most platforms indicates an @@ -1630,21 +1637,24 @@ class Sequence(DefaultGenerator): forces quoting of the schema name on or off. When left at its default of ``None``, normal quoting rules based on casing and reserved words take place. - :param metadata: optional :class:`.MetaData` object which will be + :param metadata: optional :class:`.MetaData` object which will be associated with this :class:`.Sequence`. A :class:`.Sequence` - that is associated with a :class:`.MetaData` gains access to the + that is associated with a :class:`.MetaData` gains access to the ``bind`` of that :class:`.MetaData`, meaning the :meth:`.Sequence.create` and :meth:`.Sequence.drop` methods will make usage of that engine - automatically. Additionally, the appropriate CREATE SEQUENCE/ - DROP SEQUENCE DDL commands will be emitted corresponding to this - :class:`.Sequence` when :meth:`.MetaData.create_all` and - :meth:`.MetaData.drop_all` are invoked (new in 0.7). + automatically. - Note that when a :class:`.Sequence` is applied to a :class:`.Column`, - the :class:`.Sequence` is automatically associated with the - :class:`.MetaData` object of that column's parent :class:`.Table`, + .. versionchanged:: 0.7 + Additionally, the appropriate CREATE SEQUENCE/ + DROP SEQUENCE DDL commands will be emitted corresponding to this + :class:`.Sequence` when :meth:`.MetaData.create_all` and + :meth:`.MetaData.drop_all` are invoked. + + Note that when a :class:`.Sequence` is applied to a :class:`.Column`, + the :class:`.Sequence` is automatically associated with the + :class:`.MetaData` object of that column's parent :class:`.Table`, when that association is made. The :class:`.Sequence` will then - be subject to automatic CREATE SEQUENCE/DROP SEQUENCE corresponding + be subject to automatic CREATE SEQUENCE/DROP SEQUENCE corresponding to when the :class:`.Table` object itself is created or dropped, rather than that of the :class:`.MetaData` object overall. :param for_update: Indicates this :class:`.Sequence`, when associated @@ -1709,8 +1719,8 @@ class Sequence(DefaultGenerator): if bind is None: bind = _bind_or_error(self) - bind._run_visitor(ddl.SchemaGenerator, - self, + bind._run_visitor(ddl.SchemaGenerator, + self, checkfirst=checkfirst) def drop(self, bind=None, checkfirst=True): @@ -1718,8 +1728,8 @@ class Sequence(DefaultGenerator): if bind is None: bind = _bind_or_error(self) - bind._run_visitor(ddl.SchemaDropper, - self, + bind._run_visitor(ddl.SchemaDropper, + self, checkfirst=checkfirst) def _not_a_column_expr(self): @@ -1727,7 +1737,7 @@ class Sequence(DefaultGenerator): "This %s cannot be used directly " "as a column expression. Use func.next_value(sequence) " "to produce a 'next value' function that's usable " - "as a column element." + "as a column element." % self.__class__.__name__) @@ -1753,6 +1763,19 @@ class FetchedValue(_NotAColumnExpr, events.SchemaEventTarget): def __init__(self, for_update=False): self.for_update = for_update + def _as_for_update(self, for_update): + if for_update == self.for_update: + return self + else: + return self._clone(for_update) + + def _clone(self, for_update): + n = self.__class__.__new__(self.__class__) + n.__dict__.update(self.__dict__) + n.__dict__.pop('column', None) + n.for_update = for_update + return n + def _set_parent(self, column): self.column = column if self.for_update: @@ -1802,10 +1825,11 @@ class DefaultClause(FetchedValue): class PassiveDefault(DefaultClause): """A DDL-specified DEFAULT column value. - .. deprecated:: 0.6 :class:`.PassiveDefault` is deprecated. + .. deprecated:: 0.6 + :class:`.PassiveDefault` is deprecated. Use :class:`.DefaultClause`. """ - @util.deprecated("0.6", + @util.deprecated("0.6", ":class:`.PassiveDefault` is deprecated. " "Use :class:`.DefaultClause`.", False) @@ -1817,8 +1841,8 @@ class Constraint(SchemaItem): __visit_name__ = 'constraint' - def __init__(self, name=None, deferrable=None, initially=None, - _create_rule=None, + def __init__(self, name=None, deferrable=None, initially=None, + _create_rule=None, **kw): """Create a SQL constraint. @@ -1849,8 +1873,8 @@ class Constraint(SchemaItem): _create_rule is used by some types to create constraints. Currently, its call signature is subject to change at any time. - - :param \**kwargs: + + :param \**kwargs: Dialect-specific keyword parameters, see the documentation for various dialects and constraints regarding options here. @@ -1885,7 +1909,7 @@ class Constraint(SchemaItem): class ColumnCollectionMixin(object): def __init__(self, *columns): self.columns = expression.ColumnCollection() - self._pending_colargs = [_to_schema_column_or_string(c) + self._pending_colargs = [_to_schema_column_or_string(c) for c in columns] if self._pending_colargs and \ isinstance(self._pending_colargs[0], Column) and \ @@ -1938,7 +1962,7 @@ class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint): return self.columns.contains_column(col) def __iter__(self): - # inlining of + # inlining of # return iter(self.columns) # ColumnCollection->OrderedProperties->OrderedDict ordered_dict = self.columns._data @@ -1954,7 +1978,7 @@ class CheckConstraint(Constraint): Can be included in the definition of a Table or Column. """ - def __init__(self, sqltext, name=None, deferrable=None, + def __init__(self, sqltext, name=None, deferrable=None, initially=None, table=None, _create_rule=None): """Construct a CHECK constraint. @@ -1989,7 +2013,7 @@ class CheckConstraint(Constraint): __visit_name__ = property(__visit_name__) def copy(self, **kw): - c = CheckConstraint(self.sqltext, + c = CheckConstraint(self.sqltext, name=self.name, initially=self.initially, deferrable=self.deferrable, @@ -2068,16 +2092,16 @@ class ForeignKeyConstraint(Constraint): # standalone ForeignKeyConstraint - create # associated ForeignKey objects which will be applied to hosted - # Column objects (in col.foreign_keys), either now or when attached + # Column objects (in col.foreign_keys), either now or when attached # to the Table for string-specified names for col, refcol in zip(columns, refcolumns): self._elements[col] = ForeignKey( - refcol, - _constraint=self, - name=self.name, - onupdate=self.onupdate, - ondelete=self.ondelete, - use_alter=self.use_alter, + refcol, + _constraint=self, + name=self.name, + onupdate=self.onupdate, + ondelete=self.ondelete, + use_alter=self.use_alter, link_to_name=self.link_to_name ) @@ -2121,11 +2145,11 @@ class ForeignKeyConstraint(Constraint): def copy(self, **kw): fkc = ForeignKeyConstraint( - [x.parent.name for x in self._elements.values()], - [x._get_colspec(**kw) for x in self._elements.values()], - name=self.name, - onupdate=self.onupdate, - ondelete=self.ondelete, + [x.parent.key for x in self._elements.values()], + [x._get_colspec(**kw) for x in self._elements.values()], + name=self.name, + onupdate=self.onupdate, + ondelete=self.ondelete, use_alter=self.use_alter, deferrable=self.deferrable, initially=self.initially, @@ -2208,8 +2232,6 @@ class Index(ColumnCollectionMixin, SchemaItem): self.table = None # will call _set_parent() if table-bound column # objects are present - if not columns: - util.warn("No column names or expressions given for Index.") ColumnCollectionMixin.__init__(self, *columns) self.name = name self.unique = kw.pop('unique', False) @@ -2243,7 +2265,7 @@ class Index(ColumnCollectionMixin, SchemaItem): return self.table.bind def create(self, bind=None): - """Issue a ``CREATE`` statement for this + """Issue a ``CREATE`` statement for this :class:`.Index`, using the given :class:`.Connectable` for connectivity. @@ -2256,7 +2278,7 @@ class Index(ColumnCollectionMixin, SchemaItem): return self def drop(self, bind=None): - """Issue a ``DROP`` statement for this + """Issue a ``DROP`` statement for this :class:`.Index`, using the given :class:`.Connectable` for connectivity. @@ -2270,7 +2292,7 @@ class Index(ColumnCollectionMixin, SchemaItem): def __repr__(self): return 'Index(%s)' % ( ", ".join( - [repr(self.name)] + + [repr(self.name)] + [repr(c) for c in self.columns] + (self.unique and ["unique=True"] or []) )) @@ -2278,7 +2300,7 @@ class Index(ColumnCollectionMixin, SchemaItem): class MetaData(SchemaItem): """A collection of :class:`.Table` objects and their associated schema constructs. - Holds a collection of :class:`.Table` objects as well as + Holds a collection of :class:`.Table` objects as well as an optional binding to an :class:`.Engine` or :class:`.Connection`. If bound, the :class:`.Table` objects in the collection and their columns may participate in implicit SQL @@ -2304,8 +2326,6 @@ class MetaData(SchemaItem): :ref:`metadata_describing` - Introduction to database metadata - :ref:`metadata_binding` - Information on binding connectables to :class:`.MetaData` - .. index:: single: thread safety; MetaData @@ -2330,12 +2350,14 @@ class MetaData(SchemaItem): :param schema: The default schema to use for the :class:`.Table`, :class:`.Sequence`, and other objects associated with this :class:`.MetaData`. - Defaults to ``None``. New in 0.7.4. - + Defaults to ``None``. + :param quote_schema: Sets the ``quote_schema`` flag for those :class:`.Table`, :class:`.Sequence`, and other objects which make usage of the local ``schema`` name. - New in 0.7.4. + + .. versionadded:: 0.7.4 + ``schema`` and ``quote_schema`` parameters. """ self.tables = util.immutabledict() @@ -2369,14 +2391,14 @@ class MetaData(SchemaItem): key = _get_table_key(name, schema) dict.pop(self.tables, key, None) if self._schemas: - self._schemas = set([t.schema - for t in self.tables.values() + self._schemas = set([t.schema + for t in self.tables.values() if t.schema is not None]) def __getstate__(self): return {'tables': self.tables, 'schema':self.schema, 'quote_schema':self.quote_schema, - 'schemas':self._schemas, + 'schemas':self._schemas, 'sequences':self._sequences} def __setstate__(self, state): @@ -2393,11 +2415,20 @@ class MetaData(SchemaItem): return self._bind is not None def bind(self): - """An Engine or Connection to which this MetaData is bound. + """An :class:`.Engine` or :class:`.Connection` to which this + :class:`.MetaData` is bound. - This property may be assigned an ``Engine`` or ``Connection``, or - assigned a string or URL to automatically create a basic ``Engine`` - for this bind with ``create_engine()``. + Typically, a :class:`.Engine` is assigned to this attribute + so that "implicit execution" may be used, or alternatively + as a means of providing engine binding information to an + ORM :class:`.Session` object:: + + engine = create_engine("someurl://") + metadata.bind = engine + + .. seealso:: + + :ref:`dbengine_implicit` - background on "bound metadata" """ return self._bind @@ -2467,13 +2498,20 @@ class MetaData(SchemaItem): arguments and should return a true value for any table to reflect. """ - reflect_opts = {'autoload': True} if bind is None: bind = _bind_or_error(self) - conn = None + + if bind.engine is not bind: + conn = bind + close = False else: - reflect_opts['autoload_with'] = bind conn = bind.contextual_connect() + close = True + + reflect_opts = { + 'autoload': True, + 'autoload_with': bind + } if schema is None: schema = self.schema @@ -2486,7 +2524,7 @@ class MetaData(SchemaItem): connection=conn)) if views: available.update( - bind.dialect.get_view_names(conn or bind, schema) + bind.dialect.get_view_names(conn, schema) ) current = set(self.tables.iterkeys()) @@ -2502,15 +2540,14 @@ class MetaData(SchemaItem): s = schema and (" schema '%s'" % schema) or '' raise exc.InvalidRequestError( 'Could not reflect: requested table(s) not available ' - 'in %s%s: (%s)' % + 'in %s%s: (%s)' % (bind.engine.url, s, ', '.join(missing))) load = [name for name in only if name not in current] for name in load: Table(name, self, **reflect_opts) finally: - if conn is not None and \ - conn is not bind: + if close: conn.close() def append_ddl_listener(self, event_name, listener): @@ -2547,8 +2584,8 @@ class MetaData(SchemaItem): """ if bind is None: bind = _bind_or_error(self) - bind._run_visitor(ddl.SchemaGenerator, - self, + bind._run_visitor(ddl.SchemaGenerator, + self, checkfirst=checkfirst, tables=tables) @@ -2574,8 +2611,8 @@ class MetaData(SchemaItem): """ if bind is None: bind = _bind_or_error(self) - bind._run_visitor(ddl.SchemaDropper, - self, + bind._run_visitor(ddl.SchemaDropper, + self, checkfirst=checkfirst, tables=tables) @@ -2702,8 +2739,8 @@ class DDLElement(expression.Executable, expression.ClauseElement): ``.bind`` property. :param target: - Optional, defaults to None. The target SchemaItem for the - execute call. Will be passed to the ``on`` callable if any, + Optional, defaults to None. The target SchemaItem for the + execute call. Will be passed to the ``on`` callable if any, and may also provide string expansion data for the statement. See ``execute_at`` for more information. @@ -2737,7 +2774,7 @@ class DDLElement(expression.Executable, expression.ClauseElement): The Table or MetaData instance for which this DDLElement will be associated with. - A DDLElement instance can be linked to any number of schema items. + A DDLElement instance can be linked to any number of schema items. ``execute_at`` builds on the ``append_ddl_listener`` interface of :class:`.MetaData` and :class:`.Table` objects. @@ -2749,7 +2786,7 @@ class DDLElement(expression.Executable, expression.ClauseElement): """ def call_event(target, connection, **kw): - if self._should_execute_deprecated(event_name, + if self._should_execute_deprecated(event_name, target, connection, **kw): return connection.execute(self.against(target)) @@ -2763,14 +2800,14 @@ class DDLElement(expression.Executable, expression.ClauseElement): @expression._generative def execute_if(self, dialect=None, callable_=None, state=None): - """Return a callable that will execute this + """Return a callable that will execute this DDLElement conditionally. Used to provide a wrapper for event listening:: event.listen( metadata, - 'before_create', + 'before_create', DDL("my_ddl").execute_if(dialect='postgresql') ) @@ -2784,15 +2821,15 @@ class DDLElement(expression.Executable, expression.ClauseElement): DDL('something').execute_if(dialect=('postgresql', 'mysql')) - :param callable_: A callable, which will be invoked with - four positional arguments as well as optional keyword + :param callable_: A callable, which will be invoked with + four positional arguments as well as optional keyword arguments: :ddl: This DDL element. :target: - The :class:`.Table` or :class:`.MetaData` object which is the target of + The :class:`.Table` or :class:`.MetaData` object which is the target of this event. May be None if the DDL is executed explicitly. :bind: @@ -2809,13 +2846,13 @@ class DDLElement(expression.Executable, expression.ClauseElement): :checkfirst: Keyword argument, will be True if the 'checkfirst' flag was - set during the call to ``create()``, ``create_all()``, + set during the call to ``create()``, ``create_all()``, ``drop()``, ``drop_all()``. If the callable returns a true value, the DDL statement will be executed. - :param state: any value which will be passed to the callable_ + :param state: any value which will be passed to the callable_ as the ``state`` keyword argument. See also: @@ -2864,7 +2901,7 @@ class DDLElement(expression.Executable, expression.ClauseElement): def _check_ddl_on(self, on): if (on is not None and - (not isinstance(on, (basestring, tuple, list, set)) and + (not isinstance(on, (basestring, tuple, list, set)) and not util.callable(on))): raise exc.ArgumentError( "Expected the name of a database dialect, a tuple " @@ -2892,10 +2929,10 @@ class DDLElement(expression.Executable, expression.ClauseElement): class DDL(DDLElement): """A literal DDL statement. - Specifies literal SQL DDL to be executed by the database. DDL objects + Specifies literal SQL DDL to be executed by the database. DDL objects function as DDL event listeners, and can be subscribed to those events listed in :class:`.DDLEvents`, using either :class:`.Table` or :class:`.MetaData` - objects as targets. Basic templating support allows a single DDL instance + objects as targets. Basic templating support allows a single DDL instance to handle repetitive tasks for multiple tables. Examples:: @@ -2962,7 +2999,7 @@ class DDL(DDLElement): 'after-create' Will be None if the DDL is executed explicitly. :target: - The ``Table`` or ``MetaData`` object which is the target of + The ``Table`` or ``MetaData`` object which is the target of this event. May be None if the DDL is executed explicitly. :connection: @@ -3047,7 +3084,7 @@ class _CreateDropBase(DDLElement): def _create_rule_disable(self, compiler): """Allow disable of _create_rule using a callable. - Pass to _create_rule using + Pass to _create_rule using util.portable_instancemethod(self._create_rule_disable) to retain serializability. @@ -3057,7 +3094,7 @@ class _CreateDropBase(DDLElement): class CreateSchema(_CreateDropBase): """Represent a CREATE SCHEMA statement. - New in 0.7.4. + .. versionadded:: 0.7.4 The argument here is the string name of the schema. @@ -3076,7 +3113,8 @@ class DropSchema(_CreateDropBase): The argument here is the string name of the schema. - New in 0.7.4. + .. versionadded:: 0.7.4 + """ __visit_name__ = "drop_schema" diff --git a/libs/sqlalchemy/sql/__init__.py b/libs/sqlalchemy/sql/__init__.py index eac845dc..77fbfc84 100644 --- a/libs/sqlalchemy/sql/__init__.py +++ b/libs/sqlalchemy/sql/__init__.py @@ -1,5 +1,5 @@ # sql/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/sql/compiler.py b/libs/sqlalchemy/sql/compiler.py index c5c6f9ec..9dc56d1f 100644 --- a/libs/sqlalchemy/sql/compiler.py +++ b/libs/sqlalchemy/sql/compiler.py @@ -1,5 +1,5 @@ # sql/compiler.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -17,7 +17,7 @@ strings :class:`~sqlalchemy.sql.compiler.GenericTypeCompiler` - renders type specification strings. -To generate user-defined SQL strings, see +To generate user-defined SQL strings, see :module:`~sqlalchemy.ext.compiler`. """ @@ -29,6 +29,7 @@ from sqlalchemy.sql import operators, functions, util as sql_util, \ visitors from sqlalchemy.sql import expression as sql import decimal +import itertools RESERVED_WORDS = set([ 'all', 'analyse', 'analyze', 'and', 'any', 'array', @@ -59,7 +60,7 @@ BIND_TEMPLATES = { 'pyformat':"%%(%(name)s)s", 'qmark':"?", 'format':"%%s", - 'numeric':":%(position)s", + 'numeric':":[_POSITION]", 'named':":%(name)s" } @@ -214,7 +215,7 @@ class SQLCompiler(engine.Compiled): driver/DB enforces this """ - def __init__(self, dialect, statement, column_keys=None, + def __init__(self, dialect, statement, column_keys=None, inline=False, **kwargs): """Construct a new ``DefaultCompiler`` object. @@ -252,16 +253,14 @@ class SQLCompiler(engine.Compiled): # column targeting self.result_map = {} - # collect CTEs to tack on top of a SELECT - self.ctes = util.OrderedDict() - self.ctes_recursive = False - # true if the paramstyle is positional self.positional = dialect.positional if self.positional: self.positiontup = [] self.bindtemplate = BIND_TEMPLATES[dialect.paramstyle] + self.ctes = None + # an IdentifierPreparer that formats the quoting of identifiers self.preparer = dialect.identifier_preparer self.label_length = dialect.label_length \ @@ -276,7 +275,29 @@ class SQLCompiler(engine.Compiled): self.truncated_names = {} engine.Compiled.__init__(self, dialect, statement, **kwargs) + if self.positional and dialect.paramstyle == 'numeric': + self._apply_numbered_params() + @util.memoized_instancemethod + def _init_cte_state(self): + """Initialize collections related to CTEs only if + a CTE is located, to save on the overhead of + these collections otherwise. + + """ + # collect CTEs to tack on top of a SELECT + self.ctes = util.OrderedDict() + self.ctes_by_name = {} + self.ctes_recursive = False + if self.positional: + self.cte_positional = [] + + def _apply_numbered_params(self): + poscount = itertools.count(1) + self.string = re.sub( + r'\[_POSITION\]', + lambda m:str(util.next(poscount)), + self.string) @util.memoized_property def _bind_processors(self): @@ -309,11 +330,11 @@ class SQLCompiler(engine.Compiled): if _group_number: raise exc.InvalidRequestError( "A value is required for bind parameter %r, " - "in parameter group %d" % + "in parameter group %d" % (bindparam.key, _group_number)) else: raise exc.InvalidRequestError( - "A value is required for bind parameter %r" + "A value is required for bind parameter %r" % bindparam.key) else: pd[name] = bindparam.effective_value @@ -325,18 +346,18 @@ class SQLCompiler(engine.Compiled): if _group_number: raise exc.InvalidRequestError( "A value is required for bind parameter %r, " - "in parameter group %d" % + "in parameter group %d" % (bindparam.key, _group_number)) else: raise exc.InvalidRequestError( - "A value is required for bind parameter %r" + "A value is required for bind parameter %r" % bindparam.key) pd[self.bind_names[bindparam]] = bindparam.effective_value return pd @property def params(self): - """Return the bind param dictionary embedded into this + """Return the bind param dictionary embedded into this compiled object, for those values that are present.""" return self.construct_params(_check=False) @@ -352,8 +373,8 @@ class SQLCompiler(engine.Compiled): def visit_grouping(self, grouping, asfrom=False, **kwargs): return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")" - def visit_label(self, label, result_map=None, - within_label_clause=False, + def visit_label(self, label, result_map=None, + within_label_clause=False, within_columns_clause=False, **kw): # only render labels within the columns clause # or ORDER BY clause of a select. dialect-specific compilers @@ -366,20 +387,20 @@ class SQLCompiler(engine.Compiled): if result_map is not None: result_map[labelname.lower()] = ( - label.name, - (label, label.element, labelname, ) + + label.name, + (label, label.element, labelname, ) + label._alt_names, label.type) - return label.element._compiler_dispatch(self, + return label.element._compiler_dispatch(self, within_columns_clause=True, - within_label_clause=True, + within_label_clause=True, **kw) + \ OPERATORS[operators.as_] + \ self.preparer.format_label(label, labelname) else: - return label.element._compiler_dispatch(self, - within_columns_clause=False, + return label.element._compiler_dispatch(self, + within_columns_clause=False, **kw) def visit_column(self, column, result_map=None, **kwargs): @@ -393,8 +414,8 @@ class SQLCompiler(engine.Compiled): name = self._truncated_identifier("colident", name) if result_map is not None: - result_map[name.lower()] = (orig_name, - (column, name, column.key), + result_map[name.lower()] = (orig_name, + (column, name, column.key), column.type) if is_literal: @@ -408,7 +429,7 @@ class SQLCompiler(engine.Compiled): else: if table.schema: schema_prefix = self.preparer.quote_schema( - table.schema, + table.schema, table.quote_schema) + '.' else: schema_prefix = '' @@ -448,7 +469,7 @@ class SQLCompiler(engine.Compiled): if name in textclause.bindparams: return self.process(textclause.bindparams[name]) else: - return self.bindparam_string(name) + return self.bindparam_string(name, **kwargs) # un-escape any \:params return BIND_PARAMS_ESC.sub(lambda m: m.group(1), @@ -472,8 +493,8 @@ class SQLCompiler(engine.Compiled): else: sep = OPERATORS[clauselist.operator] return sep.join( - s for s in - (c._compiler_dispatch(self, **kwargs) + s for s in + (c._compiler_dispatch(self, **kwargs) for c in clauselist.clauses) if s) @@ -499,21 +520,21 @@ class SQLCompiler(engine.Compiled): cast.typeclause._compiler_dispatch(self, **kwargs)) def visit_over(self, over, **kwargs): - x ="%s OVER (" % over.func._compiler_dispatch(self, **kwargs) - if over.partition_by is not None: - x += "PARTITION BY %s" % \ - over.partition_by._compiler_dispatch(self, **kwargs) - if over.order_by is not None: - x += " " - if over.order_by is not None: - x += "ORDER BY %s" % \ - over.order_by._compiler_dispatch(self, **kwargs) - x += ")" - return x + return "%s OVER (%s)" % ( + over.func._compiler_dispatch(self, **kwargs), + ' '.join( + '%s BY %s' % (word, clause._compiler_dispatch(self, **kwargs)) + for word, clause in ( + ('PARTITION', over.partition_by), + ('ORDER', over.order_by) + ) + if clause is not None and len(clause) + ) + ) def visit_extract(self, extract, **kwargs): field = self.extract_map.get(extract.field, extract.field) - return "EXTRACT(%s FROM %s)" % (field, + return "EXTRACT(%s FROM %s)" % (field, extract.expr._compiler_dispatch(self, **kwargs)) def visit_function(self, func, result_map=None, **kwargs): @@ -526,7 +547,7 @@ class SQLCompiler(engine.Compiled): else: name = FUNCTIONS.get(func.__class__, func.name + "%(expr)s") return ".".join(list(func.packagenames) + [name]) % \ - {'expr':self.function_argspec(func, **kwargs)} + {'expr': self.function_argspec(func, **kwargs)} def visit_next_value_func(self, next_value, **kw): return self.visit_sequence(next_value.sequence) @@ -539,16 +560,17 @@ class SQLCompiler(engine.Compiled): def function_argspec(self, func, **kwargs): return func.clause_expr._compiler_dispatch(self, **kwargs) - def visit_compound_select(self, cs, asfrom=False, - parens=True, compound_index=1, **kwargs): + def visit_compound_select(self, cs, asfrom=False, + parens=True, compound_index=0, **kwargs): entry = self.stack and self.stack[-1] or {} - self.stack.append({'from':entry.get('from', None), 'iswrapper':True}) + self.stack.append({'from': entry.get('from', None), + 'iswrapper': not entry}) keyword = self.compound_keywords.get(cs.keyword) text = (" " + keyword + " ").join( - (c._compiler_dispatch(self, - asfrom=asfrom, parens=False, + (c._compiler_dispatch(self, + asfrom=asfrom, parens=False, compound_index=i, **kwargs) for i, c in enumerate(cs.selects)) ) @@ -562,6 +584,10 @@ class SQLCompiler(engine.Compiled): text += (cs._limit is not None or cs._offset is not None) and \ self.limit_clause(cs) or "" + if self.ctes and \ + compound_index == 0 and not entry: + text = self._render_cte_clause() + text + self.stack.pop(-1) if asfrom and parens: return "(" + text + ")" @@ -585,8 +611,8 @@ class SQLCompiler(engine.Compiled): return self._operator_dispatch(binary.operator, binary, - lambda opstr: binary.left._compiler_dispatch(self, **kw) + - opstr + + lambda opstr: binary.left._compiler_dispatch(self, **kw) + + opstr + binary.right._compiler_dispatch( self, **kw), **kw @@ -595,36 +621,36 @@ class SQLCompiler(engine.Compiled): def visit_like_op(self, binary, **kw): escape = binary.modifiers.get("escape", None) return '%s LIKE %s' % ( - binary.left._compiler_dispatch(self, **kw), + binary.left._compiler_dispatch(self, **kw), binary.right._compiler_dispatch(self, **kw)) \ - + (escape and + + (escape and (' ESCAPE ' + self.render_literal_value(escape, None)) or '') def visit_notlike_op(self, binary, **kw): escape = binary.modifiers.get("escape", None) return '%s NOT LIKE %s' % ( - binary.left._compiler_dispatch(self, **kw), + binary.left._compiler_dispatch(self, **kw), binary.right._compiler_dispatch(self, **kw)) \ - + (escape and + + (escape and (' ESCAPE ' + self.render_literal_value(escape, None)) or '') def visit_ilike_op(self, binary, **kw): escape = binary.modifiers.get("escape", None) return 'lower(%s) LIKE lower(%s)' % ( - binary.left._compiler_dispatch(self, **kw), + binary.left._compiler_dispatch(self, **kw), binary.right._compiler_dispatch(self, **kw)) \ - + (escape and + + (escape and (' ESCAPE ' + self.render_literal_value(escape, None)) or '') def visit_notilike_op(self, binary, **kw): escape = binary.modifiers.get("escape", None) return 'lower(%s) NOT LIKE lower(%s)' % ( - binary.left._compiler_dispatch(self, **kw), + binary.left._compiler_dispatch(self, **kw), binary.right._compiler_dispatch(self, **kw)) \ - + (escape and + + (escape and (' ESCAPE ' + self.render_literal_value(escape, None)) or '') @@ -668,7 +694,7 @@ class SQLCompiler(engine.Compiled): "bindparam() name '%s' is reserved " "for automatic usage in the VALUES or SET " "clause of this " - "insert/update statement. Please use a " + "insert/update statement. Please use a " "name other than column name when using bindparam() " "with insert() or update() (for example, 'b_%s')." % (bindparam.key, bindparam.key) @@ -676,7 +702,7 @@ class SQLCompiler(engine.Compiled): self.binds[bindparam.key] = self.binds[name] = bindparam - return self.bindparam_string(name) + return self.bindparam_string(name, **kwargs) def render_literal_bindparam(self, bindparam, **kw): value = bindparam.value @@ -688,7 +714,7 @@ class SQLCompiler(engine.Compiled): def render_literal_value(self, value, type_): """Render the value of a bind parameter as a quoted literal. - This is used for statement sections that do not accept bind paramters + This is used for statement sections that do not accept bind parameters on the target driver/database. This should be implemented by subclasses using the quoting services @@ -746,20 +772,45 @@ class SQLCompiler(engine.Compiled): self.anon_map[derived] = anonymous_counter + 1 return derived + "_" + str(anonymous_counter) - def bindparam_string(self, name): + def bindparam_string(self, name, positional_names=None, **kw): if self.positional: - self.positiontup.append(name) - return self.bindtemplate % { - 'name':name, 'position':len(self.positiontup)} - else: - return self.bindtemplate % {'name':name} + if positional_names is not None: + positional_names.append(name) + else: + self.positiontup.append(name) + return self.bindtemplate % {'name':name} + + def visit_cte(self, cte, asfrom=False, ashint=False, + fromhints=None, + **kwargs): + self._init_cte_state() + if self.positional: + kwargs['positional_names'] = self.cte_positional - def visit_cte(self, cte, asfrom=False, ashint=False, - fromhints=None, **kwargs): if isinstance(cte.name, sql._truncated_label): cte_name = self._truncated_identifier("alias", cte.name) else: cte_name = cte.name + + if cte_name in self.ctes_by_name: + existing_cte = self.ctes_by_name[cte_name] + # we've generated a same-named CTE that we are enclosed in, + # or this is the same CTE. just return the name. + if cte in existing_cte._restates or cte is existing_cte: + return cte_name + elif existing_cte in cte._restates: + # we've generated a same-named CTE that is + # enclosed in us - we take precedence, so + # discard the text for the "inner". + del self.ctes[existing_cte] + else: + raise exc.CompileError( + "Multiple, unrelated CTEs found with " + "the same name: %r" % + cte_name) + + self.ctes_by_name[cte_name] = cte + if cte.cte_alias: if isinstance(cte.cte_alias, sql._truncated_label): cte_alias = self._truncated_identifier("alias", cte.cte_alias) @@ -776,10 +827,13 @@ class SQLCompiler(engine.Compiled): col_source = cte.original.selects[0] else: assert False - recur_cols = [c.key for c in util.unique_list(col_source.inner_columns) + recur_cols = [c for c in + util.unique_list(col_source.inner_columns) if c is not None] - text += "(%s)" % (", ".join(recur_cols)) + text += "(%s)" % (", ".join( + self.preparer.format_column(ident) + for ident in recur_cols)) text += " AS \n" + \ cte.original._compiler_dispatch( self, asfrom=True, **kwargs @@ -793,7 +847,7 @@ class SQLCompiler(engine.Compiled): return self.preparer.format_alias(cte, cte_name) return text - def visit_alias(self, alias, asfrom=False, ashint=False, + def visit_alias(self, alias, asfrom=False, ashint=False, fromhints=None, **kwargs): if asfrom or ashint: if isinstance(alias.name, sql._truncated_label): @@ -804,7 +858,7 @@ class SQLCompiler(engine.Compiled): if ashint: return self.preparer.format_alias(alias, alias_name) elif asfrom: - ret = alias.original._compiler_dispatch(self, + ret = alias.original._compiler_dispatch(self, asfrom=True, **kwargs) + \ " AS " + \ self.preparer.format_alias(alias, alias_name) @@ -828,8 +882,8 @@ class SQLCompiler(engine.Compiled): select.use_labels and \ column._label: return _CompileLabel( - column, - column._label, + column, + column._label, alt_names=(column._key_label, ) ) @@ -839,9 +893,9 @@ class SQLCompiler(engine.Compiled): not column.is_literal and \ column.table is not None and \ not isinstance(column.table, sql.Select): - return _CompileLabel(column, sql._as_truncated(column.name), + return _CompileLabel(column, sql._as_truncated(column.name), alt_names=(column.key,)) - elif not isinstance(column, + elif not isinstance(column, (sql._UnaryExpression, sql._TextClause)) \ and (not hasattr(column, 'name') or \ isinstance(column, sql.Function)): @@ -858,9 +912,10 @@ class SQLCompiler(engine.Compiled): def get_crud_hint_text(self, table, text): return None - def visit_select(self, select, asfrom=False, parens=True, - iswrapper=False, fromhints=None, - compound_index=1, **kwargs): + def visit_select(self, select, asfrom=False, parens=True, + iswrapper=False, fromhints=None, + compound_index=0, + positional_names=None, **kwargs): entry = self.stack and self.stack[-1] or {} @@ -875,13 +930,18 @@ class SQLCompiler(engine.Compiled): # to outermost if existingfroms: correlate_froms = # correlate_froms.union(existingfroms) - self.stack.append({'from': correlate_froms, 'iswrapper' - : iswrapper}) + populate_result_map = compound_index == 0 and ( + not entry or \ + entry.get('iswrapper', False) + ) - if compound_index==1 and not entry or entry.get('iswrapper', False): - column_clause_args = {'result_map':self.result_map} + self.stack.append({'from': correlate_froms, 'iswrapper': iswrapper}) + + if populate_result_map: + column_clause_args = {'result_map': self.result_map, + 'positional_names': positional_names} else: - column_clause_args = {} + column_clause_args = {'positional_names': positional_names} # the actual list of columns to print in the SELECT column list. inner_columns = [ @@ -889,7 +949,7 @@ class SQLCompiler(engine.Compiled): self.label_select_column(select, co, asfrom=asfrom).\ _compiler_dispatch(self, within_columns_clause=True, - **column_clause_args) + **column_clause_args) for co in util.unique_list(select.inner_columns) ] if c is not None @@ -902,9 +962,9 @@ class SQLCompiler(engine.Compiled): (from_, hinttext % { 'name':from_._compiler_dispatch( self, ashint=True) - }) - for (from_, dialect), hinttext in - select._hints.iteritems() + }) + for (from_, dialect), hinttext in + select._hints.iteritems() if dialect in ('*', self.dialect.name) ]) hint_text = self.get_select_hint_text(byfrom) @@ -913,7 +973,7 @@ class SQLCompiler(engine.Compiled): if select._prefixes: text += " ".join( - x._compiler_dispatch(self, **kwargs) + x._compiler_dispatch(self, **kwargs) for x in select._prefixes) + " " text += self.get_select_precolumns(select) text += ', '.join(inner_columns) @@ -922,13 +982,13 @@ class SQLCompiler(engine.Compiled): text += " \nFROM " if select._hints: - text += ', '.join([f._compiler_dispatch(self, - asfrom=True, fromhints=byfrom, - **kwargs) + text += ', '.join([f._compiler_dispatch(self, + asfrom=True, fromhints=byfrom, + **kwargs) for f in froms]) else: - text += ', '.join([f._compiler_dispatch(self, - asfrom=True, **kwargs) + text += ', '.join([f._compiler_dispatch(self, + asfrom=True, **kwargs) for f in froms]) else: text += self.default_from() @@ -957,13 +1017,8 @@ class SQLCompiler(engine.Compiled): text += self.for_update_clause(select) if self.ctes and \ - compound_index==1 and not entry: - cte_text = self.get_cte_preamble(self.ctes_recursive) + " " - cte_text += ", \n".join( - [txt for txt in self.ctes.values()] - ) - cte_text += "\n " - text = cte_text + text + compound_index == 0 and not entry: + text = self._render_cte_clause() + text self.stack.pop(-1) @@ -972,6 +1027,16 @@ class SQLCompiler(engine.Compiled): else: return text + def _render_cte_clause(self): + if self.positional: + self.positiontup = self.cte_positional + self.positiontup + cte_text = self.get_cte_preamble(self.ctes_recursive) + " " + cte_text += ", \n".join( + [txt for txt in self.ctes.values()] + ) + cte_text += "\n " + return cte_text + def get_cte_preamble(self, recursive): if recursive: return "WITH RECURSIVE" @@ -1008,7 +1073,7 @@ class SQLCompiler(engine.Compiled): text += " OFFSET " + self.process(sql.literal(select._offset)) return text - def visit_table(self, table, asfrom=False, ashint=False, + def visit_table(self, table, asfrom=False, ashint=False, fromhints=None, **kwargs): if asfrom or ashint: if getattr(table, "schema", None): @@ -1028,10 +1093,10 @@ class SQLCompiler(engine.Compiled): def visit_join(self, join, asfrom=False, **kwargs): return ( - join.left._compiler_dispatch(self, asfrom=True, **kwargs) + - (join.isouter and " LEFT OUTER JOIN " or " JOIN ") + - join.right._compiler_dispatch(self, asfrom=True, **kwargs) + - " ON " + + join.left._compiler_dispatch(self, asfrom=True, **kwargs) + + (join.isouter and " LEFT OUTER JOIN " or " JOIN ") + + join.right._compiler_dispatch(self, asfrom=True, **kwargs) + + " ON " + join.onclause._compiler_dispatch(self, **kwargs) ) @@ -1043,7 +1108,7 @@ class SQLCompiler(engine.Compiled): not self.dialect.supports_default_values and \ not self.dialect.supports_empty_insert: raise exc.CompileError("The version of %s you are using does " - "not support empty inserts." % + "not support empty inserts." % self.dialect.name) preparer = self.preparer @@ -1061,13 +1126,13 @@ class SQLCompiler(engine.Compiled): if insert_stmt._hints: dialect_hints = dict([ (table, hint_text) - for (table, dialect), hint_text in + for (table, dialect), hint_text in insert_stmt._hints.items() if dialect in ('*', self.dialect.name) ]) if insert_stmt.table in dialect_hints: text += " " + self.get_crud_hint_text( - insert_stmt.table, + insert_stmt.table, dialect_hints[insert_stmt.table] ) @@ -1098,7 +1163,7 @@ class SQLCompiler(engine.Compiled): """Provide a hook for MySQL to add LIMIT to the UPDATE""" return None - def update_tables_clause(self, update_stmt, from_table, + def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): """Provide a hook to override the initial table clause in an UPDATE statement. @@ -1108,19 +1173,19 @@ class SQLCompiler(engine.Compiled): """ return self.preparer.format_table(from_table) - def update_from_clause(self, update_stmt, - from_table, extra_froms, + def update_from_clause(self, update_stmt, + from_table, extra_froms, from_hints, **kw): - """Provide a hook to override the generation of an + """Provide a hook to override the generation of an UPDATE..FROM clause. - MySQL overrides this. + MySQL and MSSQL override this. """ return "FROM " + ', '.join( - t._compiler_dispatch(self, asfrom=True, - fromhints=from_hints, **kw) + t._compiler_dispatch(self, asfrom=True, + fromhints=from_hints, **kw) for t in extra_froms) def visit_update(self, update_stmt, **kw): @@ -1133,20 +1198,20 @@ class SQLCompiler(engine.Compiled): colparams = self._get_colparams(update_stmt, extra_froms) text = "UPDATE " + self.update_tables_clause( - update_stmt, - update_stmt.table, + update_stmt, + update_stmt.table, extra_froms, **kw) if update_stmt._hints: dialect_hints = dict([ (table, hint_text) - for (table, dialect), hint_text in + for (table, dialect), hint_text in update_stmt._hints.items() if dialect in ('*', self.dialect.name) ]) if update_stmt.table in dialect_hints: text += " " + self.get_crud_hint_text( - update_stmt.table, + update_stmt.table, dialect_hints[update_stmt.table] ) else: @@ -1155,12 +1220,12 @@ class SQLCompiler(engine.Compiled): text += ' SET ' if extra_froms and self.render_table_with_column_in_update_from: text += ', '.join( - self.visit_column(c[0]) + + self.visit_column(c[0]) + '=' + c[1] for c in colparams ) else: text += ', '.join( - self.preparer.quote(c[0].name, c[0].quote) + + self.preparer.quote(c[0].name, c[0].quote) + '=' + c[1] for c in colparams ) @@ -1172,9 +1237,9 @@ class SQLCompiler(engine.Compiled): if extra_froms: extra_from_text = self.update_from_clause( - update_stmt, - update_stmt.table, - extra_froms, + update_stmt, + update_stmt.table, + extra_froms, dialect_hints, **kw) if extra_from_text: text += " " + extra_from_text @@ -1195,7 +1260,7 @@ class SQLCompiler(engine.Compiled): return text def _create_crud_bind_param(self, col, value, required=False): - bindparam = sql.bindparam(col.key, value, + bindparam = sql.bindparam(col.key, value, type_=col.type, required=required) bindparam._is_crud = True return bindparam._compiler_dispatch(self) @@ -1220,8 +1285,8 @@ class SQLCompiler(engine.Compiled): # compiled params - return binds for all columns if self.column_keys is None and stmt.parameters is None: return [ - (c, self._create_crud_bind_param(c, - None, required=True)) + (c, self._create_crud_bind_param(c, + None, required=True)) for c in stmt.table.columns ] @@ -1233,8 +1298,8 @@ class SQLCompiler(engine.Compiled): parameters = {} else: parameters = dict((sql._column_as_key(key), required) - for key in self.column_keys - if not stmt.parameters or + for key in self.column_keys + if not stmt.parameters or key not in stmt.parameters) if stmt.parameters is not None: @@ -1255,7 +1320,7 @@ class SQLCompiler(engine.Compiled): postfetch_lastrowid = need_pks and self.dialect.postfetch_lastrowid check_columns = {} - # special logic that only occurs for multi-table UPDATE + # special logic that only occurs for multi-table UPDATE # statements if extra_tables and stmt.parameters: assert self.isupdate @@ -1274,7 +1339,7 @@ class SQLCompiler(engine.Compiled): value = self.process(value.self_group()) values.append((c, value)) # determine tables which are actually - # to be updated - process onupdate and + # to be updated - process onupdate and # server_onupdate for these for t in affected_tables: for c in t.c: @@ -1295,7 +1360,7 @@ class SQLCompiler(engine.Compiled): self.postfetch.append(c) # iterating through columns at the top to maintain ordering. - # otherwise we might iterate through individual sets of + # otherwise we might iterate through individual sets of # "defaults", "primary key cols", etc. for c in stmt.table.columns: if c.key in parameters and c.key not in check_columns: @@ -1315,8 +1380,8 @@ class SQLCompiler(engine.Compiled): if c.primary_key and \ need_pks and \ ( - implicit_returning or - not postfetch_lastrowid or + implicit_returning or + not postfetch_lastrowid or c is not stmt.table._autoincrement_column ): @@ -1402,7 +1467,7 @@ class SQLCompiler(engine.Compiled): ).difference(check_columns) if check: util.warn( - "Unconsumed column names: %s" % + "Unconsumed column names: %s" % (", ".join(check)) ) @@ -1417,13 +1482,13 @@ class SQLCompiler(engine.Compiled): if delete_stmt._hints: dialect_hints = dict([ (table, hint_text) - for (table, dialect), hint_text in + for (table, dialect), hint_text in delete_stmt._hints.items() if dialect in ('*', self.dialect.name) ]) if delete_stmt.table in dialect_hints: text += " " + self.get_crud_hint_text( - delete_stmt.table, + delete_stmt.table, dialect_hints[delete_stmt.table] ) else: @@ -1517,7 +1582,7 @@ class DDLCompiler(engine.Compiled): text += separator separator = ", \n" text += "\t" + self.get_column_specification( - column, + column, first_pk=column.primary_key and \ not first_pk ) @@ -1529,16 +1594,16 @@ class DDLCompiler(engine.Compiled): text += " " + const except exc.CompileError, ce: # Py3K - #raise exc.CompileError("(in table '%s', column '%s'): %s" + #raise exc.CompileError("(in table '%s', column '%s'): %s" # % ( - # table.description, - # column.name, + # table.description, + # column.name, # ce.args[0] # )) from ce # Py2K - raise exc.CompileError("(in table '%s', column '%s'): %s" + raise exc.CompileError("(in table '%s', column '%s'): %s" % ( - table.description, + table.description, column.name, ce.args[0] )), None, sys.exc_info()[2] @@ -1559,17 +1624,17 @@ class DDLCompiler(engine.Compiled): if table.primary_key: constraints.append(table.primary_key) - constraints.extend([c for c in table._sorted_constraints + constraints.extend([c for c in table._sorted_constraints if c is not table.primary_key]) return ", \n\t".join(p for p in - (self.process(constraint) - for constraint in constraints + (self.process(constraint) + for constraint in constraints if ( constraint._create_rule is None or constraint._create_rule(self)) and ( - not self.dialect.supports_alter or + not self.dialect.supports_alter or not getattr(constraint, 'use_alter', False) )) if p is not None ) @@ -1582,13 +1647,12 @@ class DDLCompiler(engine.Compiled): max = self.dialect.max_index_name_length or \ self.dialect.max_identifier_length if len(ident) > max: - return ident[0:max - 8] + \ + ident = ident[0:max - 8] + \ "_" + util.md5_hex(ident)[-4:] - else: - return ident else: self.dialect.validate_identifier(ident) - return ident + + return ident def visit_create_index(self, create): index = create.element @@ -1597,7 +1661,7 @@ class DDLCompiler(engine.Compiled): if index.unique: text += "UNIQUE " text += "INDEX %s ON %s (%s)" \ - % (preparer.quote(self._index_identifier(index.name), + % (preparer.quote(self._index_identifier(index.name), index.quote), preparer.format_table(index.table), ', '.join(preparer.quote(c.name, c.quote) @@ -1606,9 +1670,20 @@ class DDLCompiler(engine.Compiled): def visit_drop_index(self, drop): index = drop.element - return "\nDROP INDEX " + \ - self.preparer.quote( - self._index_identifier(index.name), index.quote) + if index.table is not None and index.table.schema: + schema = index.table.schema + schema_name = self.preparer.quote_schema(schema, + index.table.quote_schema) + else: + schema_name = None + + index_name = self.preparer.quote( + self._index_identifier(index.name), + index.quote) + + if schema_name: + index_name = schema_name + "." + index_name + return "\nDROP INDEX " + index_name def visit_add_constraint(self, create): preparer = self.preparer @@ -1723,7 +1798,7 @@ class DDLCompiler(engine.Compiled): text += "CONSTRAINT %s " % \ self.preparer.format_constraint(constraint) text += "UNIQUE (%s)" % ( - ', '.join(self.preparer.quote(c.name, c.quote) + ', '.join(self.preparer.quote(c.name, c.quote) for c in constraint)) text += self.define_constraint_deferrability(constraint) return text @@ -1769,7 +1844,7 @@ class GenericTypeCompiler(engine.TypeCompiler): {'precision': type_.precision} else: return "NUMERIC(%(precision)s, %(scale)s)" % \ - {'precision': type_.precision, + {'precision': type_.precision, 'scale' : type_.scale} def visit_DECIMAL(self, type_): @@ -1826,25 +1901,25 @@ class GenericTypeCompiler(engine.TypeCompiler): def visit_large_binary(self, type_): return self.visit_BLOB(type_) - def visit_boolean(self, type_): + def visit_boolean(self, type_): return self.visit_BOOLEAN(type_) - def visit_time(self, type_): + def visit_time(self, type_): return self.visit_TIME(type_) - def visit_datetime(self, type_): + def visit_datetime(self, type_): return self.visit_DATETIME(type_) - def visit_date(self, type_): + def visit_date(self, type_): return self.visit_DATE(type_) - def visit_big_integer(self, type_): + def visit_big_integer(self, type_): return self.visit_BIGINT(type_) - def visit_small_integer(self, type_): + def visit_small_integer(self, type_): return self.visit_SMALLINT(type_) - def visit_integer(self, type_): + def visit_integer(self, type_): return self.visit_INTEGER(type_) def visit_real(self, type_): @@ -1853,19 +1928,19 @@ class GenericTypeCompiler(engine.TypeCompiler): def visit_float(self, type_): return self.visit_FLOAT(type_) - def visit_numeric(self, type_): + def visit_numeric(self, type_): return self.visit_NUMERIC(type_) - def visit_string(self, type_): + def visit_string(self, type_): return self.visit_VARCHAR(type_) - def visit_unicode(self, type_): + def visit_unicode(self, type_): return self.visit_VARCHAR(type_) - def visit_text(self, type_): + def visit_text(self, type_): return self.visit_TEXT(type_) - def visit_unicode_text(self, type_): + def visit_unicode_text(self, type_): return self.visit_TEXT(type_) def visit_enum(self, type_): @@ -1889,7 +1964,7 @@ class IdentifierPreparer(object): illegal_initial_characters = ILLEGAL_INITIAL_CHARACTERS - def __init__(self, dialect, initial_quote='"', + def __init__(self, dialect, initial_quote='"', final_quote=None, escape_quote='"', omit_schema=False): """Construct a new ``IdentifierPreparer`` object. @@ -1953,7 +2028,7 @@ class IdentifierPreparer(object): def quote_schema(self, schema, force): """Quote a schema. - Subclasses should override this to provide database-dependent + Subclasses should override this to provide database-dependent quoting behavior. """ return self.quote(schema, force) @@ -2010,7 +2085,7 @@ class IdentifierPreparer(object): return self.quote(name, quote) - def format_column(self, column, use_table=False, + def format_column(self, column, use_table=False, name=None, table_name=None): """Prepare a quoted column name.""" @@ -2019,14 +2094,14 @@ class IdentifierPreparer(object): if not getattr(column, 'is_literal', False): if use_table: return self.format_table( - column.table, use_schema=False, + column.table, use_schema=False, name=table_name) + "." + \ self.quote(name, column.quote) else: return self.quote(name, column.quote) else: - # literal textual elements get stuck into ColumnClause alot, - # which shouldnt get quoted + # literal textual elements get stuck into ColumnClause a lot, + # which shouldn't get quoted if use_table: return self.format_table(column.table, diff --git a/libs/sqlalchemy/sql/expression.py b/libs/sqlalchemy/sql/expression.py index aa67f44f..c90a3dcb 100644 --- a/libs/sqlalchemy/sql/expression.py +++ b/libs/sqlalchemy/sql/expression.py @@ -1,5 +1,5 @@ # sql/expression.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -114,20 +114,20 @@ def outerjoin(left, right, onclause=None): The returned object is an instance of :class:`.Join`. - Similar functionality is also available via the - :meth:`~.FromClause.outerjoin()` method on any + Similar functionality is also available via the + :meth:`~.FromClause.outerjoin()` method on any :class:`.FromClause`. :param left: The left side of the join. :param right: The right side of the join. - :param onclause: Optional criterion for the ``ON`` clause, is - derived from foreign key relationships established between + :param onclause: Optional criterion for the ``ON`` clause, is + derived from foreign key relationships established between left and right otherwise. - To chain joins together, use the :meth:`.FromClause.join` or - :meth:`.FromClause.outerjoin` methods on the resulting + To chain joins together, use the :meth:`.FromClause.join` or + :meth:`.FromClause.outerjoin` methods on the resulting :class:`.Join` object. """ @@ -138,20 +138,20 @@ def join(left, right, onclause=None, isouter=False): The returned object is an instance of :class:`.Join`. - Similar functionality is also available via the - :meth:`~.FromClause.join()` method on any + Similar functionality is also available via the + :meth:`~.FromClause.join()` method on any :class:`.FromClause`. :param left: The left side of the join. :param right: The right side of the join. - :param onclause: Optional criterion for the ``ON`` clause, is - derived from foreign key relationships established between + :param onclause: Optional criterion for the ``ON`` clause, is + derived from foreign key relationships established between left and right otherwise. - To chain joins together, use the :meth:`.FromClause.join` or - :meth:`.FromClause.outerjoin` methods on the resulting + To chain joins together, use the :meth:`.FromClause.join` or + :meth:`.FromClause.outerjoin` methods on the resulting :class:`.Join` object. @@ -208,7 +208,7 @@ def select(columns=None, whereclause=None, from_obj=[], **kwargs): to set the autocommit option. :param bind=None: - an :class:`~.base.Engine` or :class:`~.base.Connection` instance + an :class:`~.base.Engine` or :class:`~.base.Connection` instance to which the resulting :class:`.Select` object will be bound. The :class:`.Select` object will otherwise automatically bind to whatever @@ -236,19 +236,27 @@ def select(columns=None, whereclause=None, from_obj=[], **kwargs): ``distinct`` is also available via the :meth:`~.Select.distinct` generative method. - .. note:: - + .. note:: + The ``distinct`` keyword's acceptance of a string argument for usage with MySQL is deprecated. Use the ``prefixes`` argument or :meth:`~.Select.prefix_with`. :param for_update=False: when ``True``, applies ``FOR UPDATE`` to the end of the - resulting statement. Certain database dialects also support - alternate values for this parameter, for example mysql - supports "read" which translates to ``LOCK IN SHARE MODE``, - and oracle supports "nowait" which translates to ``FOR UPDATE - NOWAIT``. + resulting statement. + + Certain database dialects also support + alternate values for this parameter: + + * With the MySQL dialect, the value ``"read"`` translates to + ``LOCK IN SHARE MODE``. + * With the Oracle and Postgresql dialects, the value ``"nowait"`` + translates to ``FOR UPDATE NOWAIT``. + * With the Postgresql dialect, the values "read" and ``"read_nowait"`` + translate to ``FOR SHARE`` and ``FOR SHARE NOWAIT``, respectively. + + .. versionadded:: 0.7.7 :param group_by: a list of :class:`.ClauseElement` objects which will comprise the @@ -298,7 +306,7 @@ def select(columns=None, whereclause=None, from_obj=[], **kwargs): **kwargs) def subquery(alias, *args, **kwargs): - """Return an :class:`.Alias` object derived + """Return an :class:`.Alias` object derived from a :class:`.Select`. name @@ -313,7 +321,7 @@ def subquery(alias, *args, **kwargs): return Select(*args, **kwargs).alias(alias) def insert(table, values=None, inline=False, **kwargs): - """Represent an ``INSERT`` statement via the :class:`.Insert` SQL + """Represent an ``INSERT`` statement via the :class:`.Insert` SQL construct. Similar functionality is available via the :meth:`~.TableClause.insert` method on @@ -362,20 +370,20 @@ def insert(table, values=None, inline=False, **kwargs): return Insert(table, values, inline=inline, **kwargs) def update(table, whereclause=None, values=None, inline=False, **kwargs): - """Represent an ``UPDATE`` statement via the :class:`.Update` SQL + """Represent an ``UPDATE`` statement via the :class:`.Update` SQL construct. E.g.:: from sqlalchemy import update - + stmt = update(users).where(users.c.id==5).\\ values(name='user #5') Similar functionality is available via the :meth:`~.TableClause.update` method on :class:`.Table`:: - - + + stmt = users.update().\\ where(users.c.id==5).\\ values(name='user #5') @@ -385,40 +393,43 @@ def update(table, whereclause=None, values=None, inline=False, **kwargs): :param whereclause: Optional SQL expression describing the ``WHERE`` condition of the ``UPDATE`` statement. Modern applications - may prefer to use the generative :meth:`~Update.where()` + may prefer to use the generative :meth:`~Update.where()` method to specify the ``WHERE`` clause. - - The WHERE clause can refer to multiple tables as of version 0.7.4. + + The WHERE clause can refer to multiple tables. For databases which support this, an ``UPDATE FROM`` clause will - be generated, or on MySQL, a multi-table update. The statement + be generated, or on MySQL, a multi-table update. The statement will fail on databases that don't have support for multi-table update statements. A SQL-standard method of referring to additional tables in the WHERE clause is to use a correlated subquery:: - + users.update().values(name='ed').where( users.c.name==select([addresses.c.email_address]).\\ where(addresses.c.user_id==users.c.id).\\ as_scalar() ) + .. versionchanged:: 0.7.4 + The WHERE clause can refer to multiple tables. + :param values: Optional dictionary which specifies the ``SET`` conditions of the ``UPDATE``. If left as ``None``, the ``SET`` - conditions are determined from those parameters passed to the - statement during the execution and/or compilation of the + conditions are determined from those parameters passed to the + statement during the execution and/or compilation of the statement. When compiled standalone without any parameters, the ``SET`` clause generates for all columns. - - Modern applications may prefer to use the generative - :meth:`.Update.values` method to set the values of the + + Modern applications may prefer to use the generative + :meth:`.Update.values` method to set the values of the UPDATE statement. :param inline: - if True, SQL defaults present on :class:`.Column` objects via + if True, SQL defaults present on :class:`.Column` objects via the ``default`` keyword will be compiled 'inline' into the statement and not pre-executed. This means that their values will not - be available in the dictionary returned from + be available in the dictionary returned from :meth:`.ResultProxy.last_updated_params`. If both ``values`` and compile-time bind parameters are present, the @@ -430,25 +441,25 @@ def update(table, whereclause=None, values=None, inline=False, **kwargs): :class:`.Column`, normally but not necessarily equivalent to its "name"). Normally, the :class:`.Column` objects used here are expected to be - part of the target :class:`.Table` that is the table + part of the target :class:`.Table` that is the table to be updated. However when using MySQL, a multiple-table UPDATE statement can refer to columns from any of the tables referred to in the WHERE clause. - + The values referred to in ``values`` are typically: - + * a literal data value (i.e. string, number, etc.) * a SQL expression, such as a related :class:`.Column`, - a scalar-returning :func:`.select` construct, + a scalar-returning :func:`.select` construct, etc. When combining :func:`.select` constructs within the values clause of an :func:`.update` construct, - the subquery represented by the :func:`.select` should be + the subquery represented by the :func:`.select` should be *correlated* to the parent table, that is, providing criterion which links the table inside the subquery to the outer table being updated:: - + users.update().values( name=select([addresses.c.email_address]).\\ where(addresses.c.user_id==users.c.id).\\ @@ -457,20 +468,20 @@ def update(table, whereclause=None, values=None, inline=False, **kwargs): See also: - :ref:`inserts_and_updates` - SQL Expression + :ref:`inserts_and_updates` - SQL Expression Language Tutorial - - + + """ return Update( - table, - whereclause=whereclause, - values=values, - inline=inline, + table, + whereclause=whereclause, + values=values, + inline=inline, **kwargs) def delete(table, whereclause = None, **kwargs): - """Represent a ``DELETE`` statement via the :class:`.Delete` SQL + """Represent a ``DELETE`` statement via the :class:`.Delete` SQL construct. Similar functionality is available via the :meth:`~.TableClause.delete` method on @@ -483,7 +494,7 @@ def delete(table, whereclause = None, **kwargs): :meth:`~Delete.where()` generative method may be used instead. See also: - + :ref:`deletes` - SQL Expression Tutorial """ @@ -571,7 +582,7 @@ def case(whens, value=None, else_=None): when specified as strings, will be interpreted as bound values. To specify textual SQL expressions for these, use the :func:`literal_column` - construct. + construct. The expressions used for the WHEN criterion may only be literal strings when "value" is @@ -596,7 +607,7 @@ def case(whens, value=None, else_=None): can be specified which determines the type of the :func:`case()` construct overall:: - case([(orderline.c.qty > 100, + case([(orderline.c.qty > 100, literal_column("'greaterthan100'", String)), (orderline.c.qty > 10, literal_column("'greaterthan10'", String)) @@ -642,8 +653,8 @@ def collate(expression, collation): expr = _literal_as_binds(expression) return _BinaryExpression( - expr, - _literal_as_text(collation), + expr, + _literal_as_text(collation), operators.collate, type_=expr.type) def exists(*args, **kwargs): @@ -774,7 +785,7 @@ def alias(selectable, name=None): with an alternate name assigned within SQL, typically using the ``AS`` clause when generated, e.g. ``SELECT * FROM table AS aliasname``. - Similar functionality is available via the + Similar functionality is available via the :meth:`~.FromClause.alias` method available on all :class:`.FromClause` subclasses. @@ -833,11 +844,11 @@ def tuple_(*expr): ) .. warning:: - - The composite IN construct is not supported by all backends, + + The composite IN construct is not supported by all backends, and is currently known to work on Postgresql and MySQL, but not SQLite. Unsupported backends will raise - a subclass of :class:`~sqlalchemy.exc.DBAPIError` when such + a subclass of :class:`~sqlalchemy.exc.DBAPIError` when such an expression is invoked. """ @@ -873,7 +884,7 @@ def type_coerce(expr, type_): conn.execute( select([type_coerce(mytable.c.ident, AsGuid)]).\\ where( - type_coerce(mytable.c.ident, AsGuid) == + type_coerce(mytable.c.ident, AsGuid) == uuid.uuid3(uuid.NAMESPACE_URL, 'bar') ) ) @@ -925,11 +936,11 @@ def column(text, type_=None): from sqlalchemy.sql import table, column - :param text: the name of the column. Quoting rules will be applied + :param text: the name of the column. Quoting rules will be applied to the clause like any other column name. For textual column constructs that are not to be quoted, use the :func:`literal_column` function. - :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` object + :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` object which will provide result-set translation for this column. See :class:`.ColumnClause` for further examples. @@ -963,8 +974,8 @@ def table(name, *columns): """Represent a textual table clause. The object returned is an instance of :class:`.TableClause`, which represents the - "syntactical" portion of the schema-level :class:`~.schema.Table` object. - It may be used to construct lightweight table constructs. + "syntactical" portion of the schema-level :class:`~.schema.Table` object. + It may be used to construct lightweight table constructs. Note that the :func:`~.expression.table` function is not part of the ``sqlalchemy`` namespace. It must be imported from the ``sql`` package:: @@ -1019,11 +1030,11 @@ def bindparam(key, value=None, type_=None, unique=False, required=False, callabl """ if isinstance(key, ColumnClause): - return _BindParamClause(key.name, value, type_=key.type, + return _BindParamClause(key.name, value, type_=key.type, callable_=callable_, unique=unique, required=required) else: - return _BindParamClause(key, value, type_=type_, + return _BindParamClause(key, value, type_=type_, callable_=callable_, unique=unique, required=required) @@ -1050,8 +1061,8 @@ def text(text, bind=None, *args, **kwargs): The advantages :func:`text` provides over a plain string are backend-neutral support for bind parameters, per-statement - execution options, as well as - bind parameter and result-column typing behavior, allowing + execution options, as well as + bind parameter and result-column typing behavior, allowing SQLAlchemy type constructs to play a role when executing a statement that is specified literally. @@ -1061,7 +1072,7 @@ def text(text, bind=None, *args, **kwargs): t = text("SELECT * FROM users WHERE id=:user_id") result = connection.execute(t, user_id=12) - To invoke SQLAlchemy typing logic for bind parameters, the + To invoke SQLAlchemy typing logic for bind parameters, the ``bindparams`` list allows specification of :func:`bindparam` constructs which specify the type for a given name:: @@ -1103,8 +1114,8 @@ def text(text, bind=None, *args, **kwargs): Note that SQLAlchemy's usual "autocommit" behavior applies to :func:`text` constructs - that is, statements which begin - with a phrase such as ``INSERT``, ``UPDATE``, ``DELETE``, - or a variety of other phrases specific to certain backends, will + with a phrase such as ``INSERT``, ``UPDATE``, ``DELETE``, + or a variety of other phrases specific to certain backends, will be eligible for autocommit if no transaction is in progress. :param text: @@ -1130,7 +1141,7 @@ def text(text, bind=None, *args, **kwargs): a dictionary mapping the names of columns represented in the columns clause of a ``SELECT`` statement to type objects, which will be used to perform post-processing on columns within - the result set. This argument applies to any expression + the result set. This argument applies to any expression that returns result sets. """ @@ -1161,7 +1172,7 @@ def over(func, partition_by=None, order_by=None): This function is also available from the :attr:`~.expression.func` construct itself via the :meth:`.FunctionElement.over` method. - New in 0.7. + .. versionadded:: 0.7 """ return _Over(func, partition_by=partition_by, order_by=order_by) @@ -1173,14 +1184,14 @@ def null(): return _Null() def true(): - """Return a :class:`_True` object, which compiles to ``true``, or the + """Return a :class:`_True` object, which compiles to ``true``, or the boolean equivalent for the target dialect. """ return _True() def false(): - """Return a :class:`_False` object, which compiles to ``false``, or the + """Return a :class:`_False` object, which compiles to ``false``, or the boolean equivalent for the target dialect. """ @@ -1291,23 +1302,23 @@ class _truncated_label(unicode): return self # for backwards compatibility in case -# someone is re-implementing the +# someone is re-implementing the # _truncated_identifier() sequence in a custom # compiler _generated_label = _truncated_label class _anonymous_label(_truncated_label): - """A unicode subclass used to identify anonymously + """A unicode subclass used to identify anonymously generated names.""" def __add__(self, other): return _anonymous_label( - unicode(self) + + unicode(self) + unicode(other)) def __radd__(self, other): return _anonymous_label( - unicode(other) + + unicode(other) + unicode(self)) def apply_map(self, map_): @@ -1315,8 +1326,8 @@ class _anonymous_label(_truncated_label): def _as_truncated(value): """coerce the given value to :class:`._truncated_label`. - - Existing :class:`._truncated_label` and + + Existing :class:`._truncated_label` and :class:`._anonymous_label` objects are passed unchanged. """ @@ -1346,7 +1357,7 @@ def _expand_cloned(elements): return itertools.chain(*[x._cloned_set for x in elements]) def _select_iterables(elements): - """expand tables into individual columns in the + """expand tables into individual columns in the given list of column expressions. """ @@ -1356,7 +1367,7 @@ def _cloned_intersection(a, b): """return the intersection of sets a and b, counting any overlap between 'cloned' predecessors. - The returned set is in terms of the enties present within 'a'. + The returned set is in terms of the entities present within 'a'. """ all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b)) @@ -1476,8 +1487,8 @@ def _corresponding_column_or_error(fromclause, column, raise exc.InvalidRequestError( "Given column '%s', attached to table '%s', " "failed to locate a corresponding column from table '%s'" - % - (column, + % + (column, getattr(column, 'table', None),fromclause.description) ) return c @@ -1535,7 +1546,7 @@ class ClauseElement(Visitable): def _constructor(self): """return the 'constructor' for this ClauseElement. - This is for the purposes for creating a new object of + This is for the purposes for creating a new object of this type. Usually, its just the element's __class__. However, the "Annotated" version of the object overrides to return the class of its proxied element. @@ -1545,7 +1556,7 @@ class ClauseElement(Visitable): @util.memoized_property def _cloned_set(self): - """Return the set consisting all cloned anscestors of this + """Return the set consisting all cloned ancestors of this ClauseElement. Includes this ClauseElement. This accessor tends to be used for @@ -1591,7 +1602,7 @@ class ClauseElement(Visitable): return self._clone() def unique_params(self, *optionaldict, **kwargs): - """Return a copy with :func:`bindparam()` elments replaced. + """Return a copy with :func:`bindparam()` elements replaced. Same functionality as ``params()``, except adds `unique=True` to affected bind parameters so that multiple statements can be @@ -1601,7 +1612,7 @@ class ClauseElement(Visitable): return self._params(True, optionaldict, kwargs) def params(self, *optionaldict, **kwargs): - """Return a copy with :func:`bindparam()` elments replaced. + """Return a copy with :func:`bindparam()` elements replaced. Returns a copy of this ClauseElement with :func:`bindparam()` elements replaced with values taken from the given dictionary:: @@ -1672,21 +1683,21 @@ class ClauseElement(Visitable): def self_group(self, against=None): """Apply a 'grouping' to this :class:`.ClauseElement`. - This method is overridden by subclasses to return a + This method is overridden by subclasses to return a "grouping" construct, i.e. parenthesis. In particular it's used by "binary" expressions to provide a grouping - around themselves when placed into a larger expression, + around themselves when placed into a larger expression, as well as by :func:`.select` constructs when placed into - the FROM clause of another :func:`.select`. (Note that - subqueries should be normally created using the + the FROM clause of another :func:`.select`. (Note that + subqueries should be normally created using the :func:`.Select.alias` method, as many platforms require nested SELECT statements to be named). As expressions are composed together, the application of - :meth:`self_group` is automatic - end-user code should never + :meth:`self_group` is automatic - end-user code should never need to use this method directly. Note that SQLAlchemy's - clause constructs take operator precedence into account - - so parenthesis might not be needed, for example, in + clause constructs take operator precedence into account - + so parenthesis might not be needed, for example, in an expression like ``x OR (y AND z)`` - AND takes precedence over OR. @@ -1741,7 +1752,7 @@ class ClauseElement(Visitable): compiled statement. If ``None``, all columns from the target table object are rendered. - :param dialect: A ``Dialect`` instance frmo which a ``Compiled`` + :param dialect: A ``Dialect`` instance from which a ``Compiled`` will be acquired. This argument takes precedence over the `bind` argument as well as this :class:`.ClauseElement`'s bound engine, if any. @@ -1789,15 +1800,15 @@ class ClauseElement(Visitable): return self._negate() def __nonzero__(self): - raise TypeError("Boolean value of this clause is not defined") + raise TypeError("Boolean value of this clause is not defined") def _negate(self): if hasattr(self, 'negation_clause'): return self.negation_clause else: return _UnaryExpression( - self.self_group(against=operators.inv), - operator=operators.inv, + self.self_group(against=operators.inv), + operator=operators.inv, negate=None) def __repr__(self): @@ -1826,7 +1837,7 @@ class _CompareMixin(ColumnOperators): """Defines comparison and math operations for :class:`.ClauseElement` instances. - See :class:`.ColumnOperators` and :class:`.Operators` for descriptions + See :class:`.ColumnOperators` and :class:`.Operators` for descriptions of all operations. """ @@ -1835,10 +1846,10 @@ class _CompareMixin(ColumnOperators): **kwargs ): if obj is None or isinstance(obj, _Null): - if op == operators.eq: + if op in (operators.eq, operators.is_): return _BinaryExpression(self, null(), operators.is_, negate=operators.isnot) - elif op == operators.ne: + elif op in (operators.ne, operators.isnot): return _BinaryExpression(self, null(), operators.isnot, negate=operators.is_) else: @@ -1848,16 +1859,16 @@ class _CompareMixin(ColumnOperators): obj = self._check_literal(op, obj) if reverse: - return _BinaryExpression(obj, - self, - op, - type_=sqltypes.BOOLEANTYPE, + return _BinaryExpression(obj, + self, + op, + type_=sqltypes.BOOLEANTYPE, negate=negate, modifiers=kwargs) else: - return _BinaryExpression(self, - obj, - op, - type_=sqltypes.BOOLEANTYPE, + return _BinaryExpression(self, + obj, + op, + type_=sqltypes.BOOLEANTYPE, negate=negate, modifiers=kwargs) def __operate(self, op, obj, reverse=False): @@ -1899,6 +1910,8 @@ class _CompareMixin(ColumnOperators): operators.eq : (__compare, operators.ne), operators.like_op : (__compare, operators.notlike_op), operators.ilike_op : (__compare, operators.notilike_op), + operators.is_ : (__compare, operators.is_), + operators.isnot : (__compare, operators.isnot), } def operate(self, op, *other, **kwargs): @@ -1982,7 +1995,7 @@ class _CompareMixin(ColumnOperators): """See :meth:`.ColumnOperators.endswith`.""" return self.__compare( operators.like_op, - literal_column("'%'", type_=sqltypes.String) + + literal_column("'%'", type_=sqltypes.String) + self._check_literal(operators.like_op, other), escape=escape) @@ -2054,8 +2067,8 @@ class _CompareMixin(ColumnOperators): return lambda other: self.__operate(operator, other) def _bind_param(self, operator, obj): - return _BindParamClause(None, obj, - _compared_to_operator=operator, + return _BindParamClause(None, obj, + _compared_to_operator=operator, _compared_to_type=self.type, unique=True) def _check_literal(self, operator, other): @@ -2127,7 +2140,7 @@ class ColumnElement(ClauseElement, _CompareMixin): return s def shares_lineage(self, othercolumn): - """Return True if the given :class:`.ColumnElement` + """Return True if the given :class:`.ColumnElement` has a common ancestor to this :class:`.ColumnElement`.""" return bool(self.proxy_set.intersection(othercolumn.proxy_set)) @@ -2154,14 +2167,14 @@ class ColumnElement(ClauseElement, _CompareMixin): else: key = name - co = ColumnClause(_as_truncated(name), - selectable, + co = ColumnClause(_as_truncated(name), + selectable, type_=getattr(self, 'type', None)) co.proxies = [self] if selectable._is_clone_of is not None: co._is_clone_of = \ - selectable._is_clone_of.columns[key] + selectable._is_clone_of.columns.get(key) selectable._columns[key] = co return co @@ -2175,7 +2188,7 @@ class ColumnElement(ClauseElement, _CompareMixin): :param equivalents: a dictionary of columns as keys mapped to sets of columns. If the given "other" column is present in this - dictionary, if any of the columns in the correponding set() pass the + dictionary, if any of the columns in the corresponding set() pass the comparison test, the result is True. This is used to expand the comparison to other columns that may be known to be equivalent to this one via foreign key or other criterion. @@ -2366,7 +2379,7 @@ class Selectable(ClauseElement): __visit_name__ = 'selectable' class FromClause(Selectable): - """Represent an element that can be used within the ``FROM`` + """Represent an element that can be used within the ``FROM`` clause of a ``SELECT`` statement. """ @@ -2375,7 +2388,7 @@ class FromClause(Selectable): _hide_froms = [] quote = None schema = None - _memoized_property = util.group_expirable_memoized_property(["_columns"]) + _memoized_property = util.group_expirable_memoized_property(["_columns"]) def count(self, whereclause=None, **params): """return a SELECT COUNT generated against this @@ -2386,9 +2399,9 @@ class FromClause(Selectable): else: col = list(self.columns)[0] return select( - [func.count(col).label('tbl_row_count')], - whereclause, - from_obj=[self], + [func.count(col).label('tbl_row_count')], + whereclause, + from_obj=[self], **params) def select(self, whereclause=None, **params): @@ -2429,10 +2442,23 @@ class FromClause(Selectable): An example would be an Alias of a Table is derived from that Table. """ + # this is essentially an "identity" check in the base class. + # Other constructs override this to traverse through + # contained elements. return fromclause in self._cloned_set + def _is_lexical_equivalent(self, other): + """Return True if this FromClause and the other represent + the same lexical identity. + + This tests if either one is a copy of the other, or + if they are the same via annotation identity. + + """ + return self._cloned_set.intersection(other._cloned_set) + def replace_selectable(self, old, alias): - """replace all occurrences of FromClause 'old' with the given Alias + """replace all occurrences of FromClause 'old' with the given Alias object, returning a copy of this :class:`.FromClause`. """ @@ -2456,7 +2482,7 @@ class FromClause(Selectable): """Given a :class:`.ColumnElement`, return the exported :class:`.ColumnElement` object from this :class:`.Selectable` which corresponds to that original - :class:`~sqlalchemy.schema.Column` via a common anscestor + :class:`~sqlalchemy.schema.Column` via a common ancestor column. :param column: the target :class:`.ColumnElement` to be matched @@ -2465,7 +2491,7 @@ class FromClause(Selectable): the given :class:`.ColumnElement`, if the given :class:`.ColumnElement` is actually present within a sub-element of this :class:`.FromClause`. Normally the column will match if - it merely shares a common anscestor with one of the exported + it merely shares a common ancestor with one of the exported columns of this :class:`.FromClause`. """ @@ -2477,7 +2503,7 @@ class FromClause(Selectable): return False return True - # dont dig around if the column is locally present + # don't dig around if the column is locally present if self.c.contains_column(column): return column col, intersect = None, None @@ -2590,9 +2616,9 @@ class _BindParamClause(ColumnElement): __visit_name__ = 'bindparam' quote = None - def __init__(self, key, value, type_=None, unique=False, + def __init__(self, key, value, type_=None, unique=False, callable_=None, - isoutparam=False, required=False, + isoutparam=False, required=False, _compared_to_operator=None, _compared_to_type=None): """Construct a _BindParamClause. @@ -2615,7 +2641,7 @@ class _BindParamClause(ColumnElement): will be called at statement execution time to determine the ultimate value. Used for scenarios where the actual bind value cannot be determined at the point at which the clause - construct is created, but embeded bind values are still desirable. + construct is created, but embedded bind values are still desirable. :param type\_: A ``TypeEngine`` object that will be used to pre-process the @@ -2643,12 +2669,12 @@ class _BindParamClause(ColumnElement): self.key = key or _anonymous_label('%%(%d param)s' % id(self)) - # identifiying key that won't change across + # identifying key that won't change across # clones, used to identify the bind's logical # identity self._identifying_key = self.key - # key that was passed in the first place, used to + # key that was passed in the first place, used to # generate new keys self._orig_key = key or 'param' @@ -2672,13 +2698,13 @@ class _BindParamClause(ColumnElement): @property def effective_value(self): - """Return the value of this bound parameter, + """Return the value of this bound parameter, taking into account if the ``callable`` parameter - was set. - + was set. + The ``callable`` value will be evaluated and returned if present, else ``value``. - + """ if self.callable: return self.callable() @@ -2764,12 +2790,12 @@ class Executable(_Generative): """ Set non-SQL options for the statement which take effect during execution. - Execution options can be set on a per-statement or - per :class:`.Connection` basis. Additionally, the + Execution options can be set on a per-statement or + per :class:`.Connection` basis. Additionally, the :class:`.Engine` and ORM :class:`~.orm.query.Query` objects provide access to execution options which they in turn configure upon connections. - The :meth:`execution_options` method is generative. A new + The :meth:`execution_options` method is generative. A new instance of this statement is returned that contains the options:: statement = select([table.c.x, table.c.y]) @@ -2778,7 +2804,7 @@ class Executable(_Generative): Note that only a subset of possible execution options can be applied to a statement - these include "autocommit" and "stream_results", but not "isolation_level" or "compiled_cache". - See :meth:`.Connection.execution_options` for a full list of + See :meth:`.Connection.execution_options` for a full list of possible options. See also: @@ -2823,7 +2849,7 @@ class Executable(_Generative): @property def bind(self): - """Returns the :class:`.Engine` or :class:`.Connection` to + """Returns the :class:`.Engine` or :class:`.Connection` to which this :class:`.Executable` is bound, or None if none found. This is a traversal which checks locally, then @@ -2933,6 +2959,10 @@ class _Null(ColumnElement): def __init__(self): self.type = sqltypes.NULLTYPE + def compare(self, other): + return isinstance(other, _Null) + + class _False(ColumnElement): """Represent the ``false`` keyword in a SQL statement. @@ -3086,12 +3116,12 @@ class _Case(ColumnElement): if value is not None: whenlist = [ - (_literal_as_binds(c).self_group(), + (_literal_as_binds(c).self_group(), _literal_as_binds(r)) for (c, r) in whens ] else: whenlist = [ - (_no_literals(c).self_group(), + (_no_literals(c).self_group(), _literal_as_binds(r)) for (c, r) in whens ] @@ -3115,7 +3145,7 @@ class _Case(ColumnElement): def _copy_internals(self, clone=_clone, **kw): if self.value is not None: self.value = clone(self.value, **kw) - self.whens = [(clone(x, **kw), clone(y, **kw)) + self.whens = [(clone(x, **kw), clone(y, **kw)) for x, y in self.whens] if self.else_ is not None: self.else_ = clone(self.else_, **kw) @@ -3127,7 +3157,7 @@ class _Case(ColumnElement): yield x yield y if self.else_ is not None: - yield self.else_ + yield self.else_ @property def _from_objects(self): @@ -3150,7 +3180,7 @@ class FunctionElement(Executable, ColumnElement, FromClause): @property def columns(self): - """Fulfill the 'columns' contrct of :class:`.ColumnElement`. + """Fulfill the 'columns' contract of :class:`.ColumnElement`. Returns a single-element list consisting of this object. @@ -3182,7 +3212,7 @@ class FunctionElement(Executable, ColumnElement, FromClause): See :func:`~.expression.over` for a full description. - New in 0.7. + .. versionadded:: 0.7 """ return over(self, partition_by=partition_by, order_by=order_by) @@ -3192,7 +3222,7 @@ class FunctionElement(Executable, ColumnElement, FromClause): return self.clauses._from_objects def get_children(self, **kwargs): - return self.clause_expr, + return self.clause_expr, def _copy_internals(self, clone=_clone, **kw): self.clause_expr = clone(self.clause_expr, **kw) @@ -3200,7 +3230,7 @@ class FunctionElement(Executable, ColumnElement, FromClause): util.reset_memoized(self, 'clauses') def select(self): - """Produce a :func:`~.expression.select` construct + """Produce a :func:`~.expression.select` construct against this :class:`.FunctionElement`. This is shorthand for:: @@ -3217,10 +3247,10 @@ class FunctionElement(Executable, ColumnElement, FromClause): """Execute this :class:`.FunctionElement` against an embedded 'bind' and return a scalar value. - This first calls :meth:`~.FunctionElement.select` to + This first calls :meth:`~.FunctionElement.select` to produce a SELECT construct. - Note that :class:`.FunctionElement` can be passed to + Note that :class:`.FunctionElement` can be passed to the :meth:`.Connectable.scalar` method of :class:`.Connection` or :class:`.Engine`. @@ -3231,10 +3261,10 @@ class FunctionElement(Executable, ColumnElement, FromClause): """Execute this :class:`.FunctionElement` against an embedded 'bind'. - This first calls :meth:`~.FunctionElement.select` to + This first calls :meth:`~.FunctionElement.select` to produce a SELECT construct. - Note that :class:`.FunctionElement` can be passed to + Note that :class:`.FunctionElement` can be passed to the :meth:`.Connectable.execute` method of :class:`.Connection` or :class:`.Engine`. @@ -3242,7 +3272,7 @@ class FunctionElement(Executable, ColumnElement, FromClause): return self.select().execute() def _bind_param(self, operator, obj): - return _BindParamClause(None, obj, _compared_to_operator=operator, + return _BindParamClause(None, obj, _compared_to_operator=operator, _compared_to_type=self.type, unique=True) @@ -3259,7 +3289,7 @@ class Function(FunctionElement): def __init__(self, name, *clauses, **kw): """Construct a :class:`.Function`. - The :attr:`.func` construct is normally used to construct + The :attr:`.func` construct is normally used to construct new :class:`.Function` instances. """ @@ -3322,7 +3352,7 @@ class _UnaryExpression(ColumnElement): __visit_name__ = 'unary' - def __init__(self, element, operator=None, modifier=None, + def __init__(self, element, operator=None, modifier=None, type_=None, negate=None): self.operator = operator self.modifier = modifier @@ -3377,7 +3407,7 @@ class _BinaryExpression(ColumnElement): __visit_name__ = 'binary' - def __init__(self, left, right, operator, type_=None, + def __init__(self, left, right, operator, type_=None, negate=None, modifiers=None): self.left = _literal_as_text(left).self_group(against=operator) self.right = _literal_as_text(right).self_group(against=operator) @@ -3407,7 +3437,7 @@ class _BinaryExpression(ColumnElement): return self.left, self.right def compare(self, other, **kw): - """Compare this :class:`_BinaryExpression` against the + """Compare this :class:`_BinaryExpression` against the given :class:`_BinaryExpression`.""" return ( @@ -3568,10 +3598,10 @@ class Join(FromClause): where(whereclause).\\ select_from(j) - :param whereclause: the WHERE criterion that will be sent to + :param whereclause: the WHERE criterion that will be sent to the :func:`select()` function - :param fold_equivalents: based on the join criterion of this + :param fold_equivalents: based on the join criterion of this :class:`.Join`, do not include repeat column names in the column list of the resulting select, for columns that are calculated to be "equivalent" @@ -3579,7 +3609,7 @@ class Join(FromClause): recursively apply to any joins directly nested by this one as well. - :param \**kwargs: all other kwargs are sent to the + :param \**kwargs: all other kwargs are sent to the underlying :func:`select()` function. """ @@ -3599,9 +3629,9 @@ class Join(FromClause): Used against a :class:`.Join` object, :meth:`~.Join.alias` calls the :meth:`~.Join.select` - method first so that a subquery against a + method first so that a subquery against a :func:`.select` construct is generated. - the :func:`~expression.select` construct also has the + the :func:`~expression.select` construct also has the ``correlate`` flag set to ``False`` and will not auto-correlate inside an enclosing :func:`~expression.select` construct. @@ -3618,7 +3648,7 @@ class Join(FromClause): name=name ) - See :func:`~.expression.alias` for further details on + See :func:`~.expression.alias` for further details on aliases. """ @@ -3721,21 +3751,24 @@ class Alias(FromClause): class CTE(Alias): """Represent a Common Table Expression. - + The :class:`.CTE` object is obtained using the :meth:`._SelectBase.cte` method from any selectable. See that method for complete examples. - - New in 0.7.6. + + .. versionadded:: 0.7.6 """ __visit_name__ = 'cte' - def __init__(self, selectable, - name=None, - recursive=False, - cte_alias=False): + + def __init__(self, selectable, + name=None, + recursive=False, + cte_alias=False, + _restates=frozenset()): self.recursive = recursive self.cte_alias = cte_alias + self._restates = _restates super(CTE, self).__init__(selectable, name=name) def alias(self, name=None): @@ -3750,14 +3783,16 @@ class CTE(Alias): return CTE( self.original.union(other), name=self.name, - recursive=self.recursive + recursive=self.recursive, + _restates=self._restates.union([self]) ) def union_all(self, other): return CTE( self.original.union_all(other), name=self.name, - recursive=self.recursive + recursive=self.recursive, + _restates=self._restates.union([self]) ) @@ -3845,7 +3880,7 @@ class _FromGrouping(FromClause): class _Over(ColumnElement): """Represent an OVER clause. - This is a special operator against a so-called + This is a special operator against a so-called "window" function, as well as any aggregate function, which produces results relative to the result set itself. It's supported only by certain database @@ -3869,8 +3904,8 @@ class _Over(ColumnElement): return self.func.type def get_children(self, **kwargs): - return [c for c in - (self.func, self.partition_by, self.order_by) + return [c for c in + (self.func, self.partition_by, self.order_by) if c is not None] def _copy_internals(self, clone=_clone, **kw): @@ -3883,8 +3918,8 @@ class _Over(ColumnElement): @property def _from_objects(self): return list(itertools.chain( - *[c._from_objects for c in - (self.func, self.partition_by, self.order_by) + *[c._from_objects for c in + (self.func, self.partition_by, self.order_by) if c is not None] )) @@ -3929,8 +3964,8 @@ class _Label(ColumnElement): def self_group(self, against=None): sub_element = self._element.self_group(against=against) if sub_element is not self._element: - return _Label(self.name, - sub_element, + return _Label(self.name, + sub_element, type_=self._type) else: return self @@ -3975,7 +4010,7 @@ class ColumnClause(_Immutable, ColumnElement): s = select([c1, c2]).where(c1==5) There is also a variant on :func:`~.expression.column` known - as :func:`~.expression.literal_column` - the difference is that + as :func:`~.expression.literal_column` - the difference is that in the latter case, the string value is assumed to be an exact expression, rather than a column name, so that no quoting rules or similar are applied:: @@ -3984,8 +4019,8 @@ class ColumnClause(_Immutable, ColumnElement): s = select([literal_column("5 + 7")]) - :class:`.ColumnClause` can also be used in a table-like - fashion by combining the :func:`~.expression.column` function + :class:`.ColumnClause` can also be used in a table-like + fashion by combining the :func:`~.expression.column` function with the :func:`~.expression.table` function, to produce a "lightweight" form of table metadata:: @@ -4005,10 +4040,10 @@ class ColumnClause(_Immutable, ColumnElement): :param selectable: parent selectable. - :param type: :class:`.types.TypeEngine` object which can associate + :param type: :class:`.types.TypeEngine` object which can associate this :class:`.ColumnClause` with a type. - :param is_literal: if True, the :class:`.ColumnClause` is assumed to + :param is_literal: if True, the :class:`.ColumnClause` is assumed to be an exact expression that will be delivered to the output with no quoting rules applied regardless of case sensitive settings. the :func:`literal_column()` function is usually used to create such a @@ -4019,7 +4054,7 @@ class ColumnClause(_Immutable, ColumnElement): onupdate = default = server_default = server_onupdate = None - _memoized_property = util.group_expirable_memoized_property() + _memoized_property = util.group_expirable_memoized_property() def __init__(self, text, selectable=None, type_=None, is_literal=False): self.key = self.name = text @@ -4028,11 +4063,15 @@ class ColumnClause(_Immutable, ColumnElement): self.is_literal = is_literal def _compare_name_for_result(self, other): - if self.table is not None and hasattr(other, 'proxy_set'): - return other.proxy_set.intersection(self.proxy_set) - else: + if self.is_literal or \ + self.table is None or \ + not hasattr(other, 'proxy_set') or ( + isinstance(other, ColumnClause) and other.is_literal + ): return super(ColumnClause, self).\ _compare_name_for_result(other) + else: + return other.proxy_set.intersection(self.proxy_set) def _get_table(self): return self.__dict__['table'] @@ -4096,9 +4135,9 @@ class ColumnClause(_Immutable, ColumnElement): return name def label(self, name): - # currently, anonymous labels don't occur for + # currently, anonymous labels don't occur for # ColumnClause. The use at the moment - # is that they do not generate nicely for + # is that they do not generate nicely for # is_literal clauses. We would like to change # this so that label(None) acts as would be expected. # See [ticket:2168]. @@ -4119,15 +4158,15 @@ class ColumnClause(_Immutable, ColumnElement): # otherwise its considered to be a label is_literal = self.is_literal and (name is None or name == self.name) c = self._constructor( - _as_truncated(name or self.name), - selectable=selectable, - type_=self.type, + _as_truncated(name or self.name), + selectable=selectable, + type_=self.type, is_literal=is_literal ) c.proxies = [self] if selectable._is_clone_of is not None: c._is_clone_of = \ - selectable._is_clone_of.columns[c.name] + selectable._is_clone_of.columns.get(c.name) if attach: selectable._columns[c.name] = c @@ -4137,8 +4176,8 @@ class TableClause(_Immutable, FromClause): """Represents a minimal "table" construct. The constructor for :class:`.TableClause` is the - :func:`~.expression.table` function. This produces - a lightweight table object that has only a name and a + :func:`~.expression.table` function. This produces + a lightweight table object that has only a name and a collection of columns, which are typically produced by the :func:`~.expression.column` function:: @@ -4156,7 +4195,7 @@ class TableClause(_Immutable, FromClause): the ``.c.`` collection and statement generation methods. It does **not** provide all the additional schema-level services - of :class:`~.schema.Table`, including constraints, references to other + of :class:`~.schema.Table`, including constraints, references to other tables, or support for :class:`.MetaData`-level services. It's useful on its own as an ad-hoc construct used to generate quick SQL statements when a more fully fledged :class:`~.schema.Table` is not on hand. @@ -4206,21 +4245,21 @@ class TableClause(_Immutable, FromClause): else: col = list(self.columns)[0] return select( - [func.count(col).label('tbl_row_count')], - whereclause, - from_obj=[self], + [func.count(col).label('tbl_row_count')], + whereclause, + from_obj=[self], **params) def insert(self, values=None, inline=False, **kwargs): """Generate an :func:`.insert` construct against this :class:`.TableClause`. - + E.g.:: - + table.insert().values(name='foo') - + See :func:`.insert` for argument and usage information. - + """ return insert(self, values=values, inline=inline, **kwargs) @@ -4228,28 +4267,28 @@ class TableClause(_Immutable, FromClause): def update(self, whereclause=None, values=None, inline=False, **kwargs): """Generate an :func:`.update` construct against this :class:`.TableClause`. - + E.g.:: - + table.update().where(table.c.id==7).values(name='foo') - + See :func:`.update` for argument and usage information. - + """ - return update(self, whereclause=whereclause, + return update(self, whereclause=whereclause, values=values, inline=inline, **kwargs) def delete(self, whereclause=None, **kwargs): """Generate a :func:`.delete` construct against this :class:`.TableClause`. - + E.g.:: - + table.delete().where(table.c.id==7) - + See :func:`.delete` for argument and usage information. - + """ return delete(self, whereclause, **kwargs) @@ -4302,7 +4341,7 @@ class _SelectBase(Executable, FromClause): Typically, a select statement which has only one column in its columns clause is eligible to be used as a scalar expression. - The returned object is an instance of + The returned object is an instance of :class:`_ScalarSelect`. """ @@ -4332,36 +4371,36 @@ class _SelectBase(Executable, FromClause): def cte(self, name=None, recursive=False): """Return a new :class:`.CTE`, or Common Table Expression instance. - + Common table expressions are a SQL standard whereby SELECT statements can draw upon secondary statements specified along with the primary statement, using a clause called "WITH". - Special semantics regarding UNION can also be employed to - allow "recursive" queries, where a SELECT statement can draw + Special semantics regarding UNION can also be employed to + allow "recursive" queries, where a SELECT statement can draw upon the set of rows that have previously been selected. - + SQLAlchemy detects :class:`.CTE` objects, which are treated similarly to :class:`.Alias` objects, as special elements to be delivered to the FROM clause of the statement as well as to a WITH clause at the top of the statement. - The :meth:`._SelectBase.cte` method is new in 0.7.6. - + .. versionadded:: 0.7.6 + :param name: name given to the common table expression. Like :meth:`._FromClause.alias`, the name can be left as ``None`` in which case an anonymous symbol will be used at query compile time. :param recursive: if ``True``, will render ``WITH RECURSIVE``. - A recursive common table expression is intended to be used in + A recursive common table expression is intended to be used in conjunction with UNION ALL in order to derive rows from those already selected. - The following examples illustrate two examples from + The following examples illustrate two examples from Postgresql's documentation at http://www.postgresql.org/docs/8.4/static/queries-with.html. - + Example 1, non recursive:: - + from sqlalchemy import Table, Column, String, Integer, MetaData, \\ select, func @@ -4375,30 +4414,30 @@ class _SelectBase(Executable, FromClause): ) regional_sales = select([ - orders.c.region, + orders.c.region, func.sum(orders.c.amount).label('total_sales') ]).group_by(orders.c.region).cte("regional_sales") top_regions = select([regional_sales.c.region]).\\ where( - regional_sales.c.total_sales > + regional_sales.c.total_sales > select([ func.sum(regional_sales.c.total_sales)/10 ]) ).cte("top_regions") statement = select([ - orders.c.region, - orders.c.product, - func.sum(orders.c.quantity).label("product_units"), + orders.c.region, + orders.c.product, + func.sum(orders.c.quantity).label("product_units"), func.sum(orders.c.amount).label("product_sales") ]).where(orders.c.region.in_( select([top_regions.c.region]) )).group_by(orders.c.region, orders.c.product) - + result = conn.execute(statement).fetchall() - + Example 2, WITH RECURSIVE:: from sqlalchemy import Table, Column, String, Integer, MetaData, \\ @@ -4413,8 +4452,8 @@ class _SelectBase(Executable, FromClause): ) included_parts = select([ - parts.c.sub_part, - parts.c.part, + parts.c.sub_part, + parts.c.part, parts.c.quantity]).\\ where(parts.c.part=='our part').\\ cte(recursive=True) @@ -4424,15 +4463,15 @@ class _SelectBase(Executable, FromClause): parts_alias = parts.alias() included_parts = included_parts.union_all( select([ - parts_alias.c.part, - parts_alias.c.sub_part, + parts_alias.c.part, + parts_alias.c.sub_part, parts_alias.c.quantity ]). where(parts_alias.c.part==incl_alias.c.sub_part) ) statement = select([ - included_parts.c.sub_part, + included_parts.c.sub_part, func.sum(included_parts.c.quantity).label('total_quantity') ]).\ select_from(included_parts.join(parts, @@ -4441,9 +4480,9 @@ class _SelectBase(Executable, FromClause): result = conn.execute(statement).fetchall() - + See also: - + :meth:`.orm.query.Query.cte` - ORM version of :meth:`._SelectBase.cte`. """ @@ -4560,7 +4599,7 @@ class _ScalarSelect(_Grouping): return list(self.inner_columns)[0]._make_proxy(selectable, name) class CompoundSelect(_SelectBase): - """Forms the basis of ``UNION``, ``UNION ALL``, and other + """Forms the basis of ``UNION``, ``UNION ALL``, and other SELECT-based set operations.""" __visit_name__ = 'compound_select' @@ -4680,14 +4719,14 @@ class Select(_SelectBase): _memoized_property = _SelectBase._memoized_property - def __init__(self, - columns, - whereclause=None, - from_obj=None, - distinct=False, - having=None, - correlate=True, - prefixes=None, + def __init__(self, + columns, + whereclause=None, + from_obj=None, + distinct=False, + having=None, + correlate=True, + prefixes=None, **kwargs): """Construct a Select object. @@ -4715,14 +4754,14 @@ class Select(_SelectBase): self._distinct = True else: self._distinct = [ - _literal_as_text(e) + _literal_as_text(e) for e in util.to_list(distinct) ] self._correlate = set() if from_obj is not None: self._from_obj = util.OrderedSet( - _literal_as_text(f) + _literal_as_text(f) for f in util.to_list(from_obj)) else: self._from_obj = util.OrderedSet() @@ -4762,7 +4801,7 @@ class Select(_SelectBase): def _froms(self): # would love to cache this, # but there's just enough edge cases, particularly now that - # declarative encourages construction of SQL expressions + # declarative encourages construction of SQL expressions # without tables present, to just regen this each time. froms = [] seen = set() @@ -4795,6 +4834,17 @@ class Select(_SelectBase): toremove = set(itertools.chain(*[f._hide_froms for f in froms])) if toremove: + # if we're maintaining clones of froms, + # add the copies out to the toremove list. only include + # clones that are lexical equivalents. + if self._from_cloned: + toremove.update( + self._from_cloned[f] for f in + toremove.intersection(self._from_cloned) + if self._from_cloned[f]._is_lexical_equivalent(f) + ) + # filter out to FROM clauses not in the list, + # using a list to maintain ordering froms = [f for f in froms if f not in toremove] if len(froms) > 1 or self._correlate: @@ -4941,14 +4991,14 @@ class Select(_SelectBase): return (column_collections and list(self.columns) or []) + \ self._raw_columns + list(self._froms) + \ - [x for x in - (self._whereclause, self._having, - self._order_by_clause, self._group_by_clause) + [x for x in + (self._whereclause, self._having, + self._order_by_clause, self._group_by_clause) if x is not None] @_generative def column(self, column): - """return a new select() construct with the given column expression + """return a new select() construct with the given column expression added to its columns clause. """ @@ -4956,44 +5006,43 @@ class Select(_SelectBase): @_generative def with_only_columns(self, columns): - """Return a new :func:`.select` construct with its columns + """Return a new :func:`.select` construct with its columns clause replaced with the given columns. - - .. note:: - - Due to a bug fix, this method has a slight - behavioral change as of version 0.7.3. - Prior to version 0.7.3, the FROM clause of - a :func:`.select` was calculated upfront and as new columns - were added; in 0.7.3 and later it's calculated - at compile time, fixing an issue regarding late binding - of columns to parent tables. This changes the behavior of - :meth:`.Select.with_only_columns` in that FROM clauses no - longer represented in the new list are dropped, - but this behavior is more consistent in - that the FROM clauses are consistently derived from the - current columns clause. The original intent of this method - is to allow trimming of the existing columns list to be fewer - columns than originally present; the use case of replacing - the columns list with an entirely different one hadn't - been anticipated until 0.7.3 was released; the usage - guidelines below illustrate how this should be done. - - This method is exactly equivalent to as if the original - :func:`.select` had been called with the given columns + + .. versionchanged:: 0.7.3 + Due to a bug fix, this method has a slight + behavioral change as of version 0.7.3. + Prior to version 0.7.3, the FROM clause of + a :func:`.select` was calculated upfront and as new columns + were added; in 0.7.3 and later it's calculated + at compile time, fixing an issue regarding late binding + of columns to parent tables. This changes the behavior of + :meth:`.Select.with_only_columns` in that FROM clauses no + longer represented in the new list are dropped, + but this behavior is more consistent in + that the FROM clauses are consistently derived from the + current columns clause. The original intent of this method + is to allow trimming of the existing columns list to be fewer + columns than originally present; the use case of replacing + the columns list with an entirely different one hadn't + been anticipated until 0.7.3 was released; the usage + guidelines below illustrate how this should be done. + + This method is exactly equivalent to as if the original + :func:`.select` had been called with the given columns clause. I.e. a statement:: - + s = select([table1.c.a, table1.c.b]) s = s.with_only_columns([table1.c.b]) - + should be exactly equivalent to:: - + s = select([table1.c.b]) - - This means that FROM clauses which are only derived - from the column list will be discarded if the new column + + This means that FROM clauses which are only derived + from the column list will be discarded if the new column list no longer contains that FROM:: - + >>> table1 = table('t1', column('a'), column('b')) >>> table2 = table('t2', column('a'), column('b')) >>> s1 = select([table1.c.a, table2.c.b]) @@ -5002,45 +5051,45 @@ class Select(_SelectBase): >>> s2 = s1.with_only_columns([table2.c.b]) >>> print s2 SELECT t2.b FROM t1 - + The preferred way to maintain a specific FROM clause in the construct, assuming it won't be represented anywhere - else (i.e. not in the WHERE clause, etc.) is to set it using + else (i.e. not in the WHERE clause, etc.) is to set it using :meth:`.Select.select_from`:: - + >>> s1 = select([table1.c.a, table2.c.b]).\\ ... select_from(table1.join(table2, table1.c.a==table2.c.a)) >>> s2 = s1.with_only_columns([table2.c.b]) >>> print s2 SELECT t2.b FROM t1 JOIN t2 ON t1.a=t2.a - + Care should also be taken to use the correct set of column objects passed to :meth:`.Select.with_only_columns`. Since the method is essentially equivalent to calling the - :func:`.select` construct in the first place with the given - columns, the columns passed to :meth:`.Select.with_only_columns` - should usually be a subset of those which were passed + :func:`.select` construct in the first place with the given + columns, the columns passed to :meth:`.Select.with_only_columns` + should usually be a subset of those which were passed to the :func:`.select` construct, not those which are available from the ``.c`` collection of that :func:`.select`. That is:: - + s = select([table1.c.a, table1.c.b]).select_from(table1) s = s.with_only_columns([table1.c.b]) - + and **not**:: - + # usually incorrect s = s.with_only_columns([s.c.b]) The latter would produce the SQL:: - SELECT b - FROM (SELECT t1.a AS a, t1.b AS b + SELECT b + FROM (SELECT t1.a AS a, t1.b AS b FROM t1), t1 - + Since the :func:`.select` construct is essentially being asked to select both from ``table1`` as well as itself. - + """ self._reset_exported() rc = [] @@ -5090,14 +5139,14 @@ class Select(_SelectBase): @_generative def prefix_with(self, *expr): """return a new select() construct which will apply the given - expressions, typically strings, to the start of its columns clause, + expressions, typically strings, to the start of its columns clause, not using any commas. In particular is useful for MySQL keywords. e.g.:: - select(['a', 'b']).prefix_with('HIGH_PRIORITY', - 'SQL_SMALL_RESULT', + select(['a', 'b']).prefix_with('HIGH_PRIORITY', + 'SQL_SMALL_RESULT', 'ALL') Would render:: @@ -5112,31 +5161,31 @@ class Select(_SelectBase): def select_from(self, fromclause): """return a new :func:`.select` construct with the given FROM expression merged into its list of FROM objects. - + E.g.:: - + table1 = table('t1', column('a')) table2 = table('t2', column('b')) s = select([table1.c.a]).\\ select_from( table1.join(table2, table1.c.a==table2.c.b) ) - + The "from" list is a unique set on the identity of each element, so adding an already present :class:`.Table` or other selectable will have no effect. Passing a :class:`.Join` that refers - to an already present :class:`.Table` or other selectable will have - the effect of concealing the presence of that selectable as + to an already present :class:`.Table` or other selectable will have + the effect of concealing the presence of that selectable as an individual element in the rendered FROM list, instead rendering it into a JOIN clause. - + While the typical purpose of :meth:`.Select.select_from` is to replace the default, derived FROM clause with a join, it can also be called with - individual table elements, multiple times if desired, in the case that the + individual table elements, multiple times if desired, in the case that the FROM clause cannot be fully derived from the columns clause:: - + select([func.count('*')]).select_from(table1) - + """ self.append_from(fromclause) @@ -5230,8 +5279,8 @@ class Select(_SelectBase): def _populate_column_collection(self): for c in self.inner_columns: if hasattr(c, '_make_proxy'): - c._make_proxy(self, - name=self.use_labels + c._make_proxy(self, + name=self.use_labels and c._label or None) def self_group(self, against=None): @@ -5375,17 +5424,17 @@ class UpdateBase(Executable, ClauseElement): column expression. :class:`~sqlalchemy.schema.Table` objects will be expanded into their individual columns. - Upon compilation, a RETURNING clause, or database equivalent, - will be rendered within the statement. For INSERT and UPDATE, - the values are the newly inserted/updated values. For DELETE, + Upon compilation, a RETURNING clause, or database equivalent, + will be rendered within the statement. For INSERT and UPDATE, + the values are the newly inserted/updated values. For DELETE, the values are those of the rows which were deleted. Upon execution, the values of the columns to be returned are made available via the result set and can be iterated using ``fetchone()`` and similar. For DBAPIs which do not - natively support returning values (i.e. cx_oracle), + natively support returning values (i.e. cx_oracle), SQLAlchemy will approximate this behavior at the result level - so that a reasonable amount of behavioral neutrality is + so that a reasonable amount of behavioral neutrality is provided. Note that not all databases/DBAPIs @@ -5393,8 +5442,8 @@ class UpdateBase(Executable, ClauseElement): an exception is raised upon compilation and/or execution. For those who do support it, the functionality across backends varies greatly, including restrictions on executemany() - and other statements which return multiple rows. Please - read the documentation notes for the database in use in + and other statements which return multiple rows. Please + read the documentation notes for the database in use in order to determine the availability of RETURNING. """ @@ -5402,20 +5451,20 @@ class UpdateBase(Executable, ClauseElement): @_generative def with_hint(self, text, selectable=None, dialect_name="*"): - """Add a table hint for a single table to this + """Add a table hint for a single table to this INSERT/UPDATE/DELETE statement. .. note:: - :meth:`.UpdateBase.with_hint` currently applies only to + :meth:`.UpdateBase.with_hint` currently applies only to Microsoft SQL Server. For MySQL INSERT hints, use - :meth:`.Insert.prefix_with`. UPDATE/DELETE hints for + :meth:`.Insert.prefix_with`. UPDATE/DELETE hints for MySQL will be added in a future release. - + The text of the hint is rendered in the appropriate location for the database backend in use, relative to the :class:`.Table` that is the subject of this - statement, or optionally to that of the given + statement, or optionally to that of the given :class:`.Table` passed as the ``selectable`` argument. The ``dialect_name`` option will limit the rendering of a particular @@ -5424,7 +5473,7 @@ class UpdateBase(Executable, ClauseElement): mytable.insert().with_hint("WITH (PAGLOCK)", dialect_name="mssql") - New in 0.7.6. + .. versionadded:: 0.7.6 :param text: Text of the hint. :param selectable: optional :class:`.Table` that specifies @@ -5453,7 +5502,7 @@ class ValuesBase(UpdateBase): """specify the VALUES clause for an INSERT statement, or the SET clause for an UPDATE. - :param \**kwargs: key value pairs representing the string key + :param \**kwargs: key value pairs representing the string key of a :class:`.Column` mapped to the value to be rendered into the VALUES or SET clause:: @@ -5471,13 +5520,13 @@ class ValuesBase(UpdateBase): See also: - :ref:`inserts_and_updates` - SQL Expression + :ref:`inserts_and_updates` - SQL Expression Language Tutorial :func:`~.expression.insert` - produce an ``INSERT`` statement :func:`~.expression.update` - produce an ``UPDATE`` statement - + """ if args: v = args[0] @@ -5506,12 +5555,12 @@ class Insert(ValuesBase): _prefixes = () - def __init__(self, - table, - values=None, - inline=False, - bind=None, - prefixes=None, + def __init__(self, + table, + values=None, + inline=False, + bind=None, + prefixes=None, returning=None, **kwargs): ValuesBase.__init__(self, table, values) @@ -5554,12 +5603,12 @@ class Update(ValuesBase): """ __visit_name__ = 'update' - def __init__(self, - table, - whereclause, - values=None, - inline=False, - bind=None, + def __init__(self, + table, + whereclause, + values=None, + inline=False, + bind=None, returning=None, **kwargs): ValuesBase.__init__(self, table, values) @@ -5621,10 +5670,10 @@ class Delete(UpdateBase): __visit_name__ = 'delete' - def __init__(self, - table, - whereclause, - bind=None, + def __init__(self, + table, + whereclause, + bind=None, returning =None, **kwargs): self._bind = bind diff --git a/libs/sqlalchemy/sql/functions.py b/libs/sqlalchemy/sql/functions.py index aac97cff..95781d70 100644 --- a/libs/sqlalchemy/sql/functions.py +++ b/libs/sqlalchemy/sql/functions.py @@ -1,5 +1,5 @@ # sql/functions.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -33,11 +33,11 @@ class GenericFunction(Function): class next_value(Function): """Represent the 'next value', given a :class:`.Sequence` as it's single argument. - + Compiles into the appropriate function on each backend, or will raise NotImplementedError if used on a backend that does not provide support for sequences. - + """ type = sqltypes.Integer() name = "next_value" diff --git a/libs/sqlalchemy/sql/operators.py b/libs/sqlalchemy/sql/operators.py index 89f0aaee..9e796506 100644 --- a/libs/sqlalchemy/sql/operators.py +++ b/libs/sqlalchemy/sql/operators.py @@ -1,5 +1,5 @@ # sql/operators.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -21,25 +21,25 @@ from sqlalchemy.util import symbol class Operators(object): """Base of comparison and logical operators. - + Implements base methods :meth:`operate` and :meth:`reverse_operate`, as well as :meth:`__and__`, :meth:`__or__`, :meth:`__invert__`. - + Usually is used via its most common subclass :class:`.ColumnOperators`. - + """ def __and__(self, other): """Implement the ``&`` operator. - + When used with SQL expressions, results in an AND operation, equivalent to :func:`~.expression.and_`, that is:: - + a & b - + is equivalent to:: - + from sqlalchemy import and_ and_(a, b) @@ -47,7 +47,7 @@ class Operators(object): operator precedence; the ``&`` operator has the highest precedence. The operands should be enclosed in parenthesis if they contain further sub expressions:: - + (a == 2) & (b == 4) """ @@ -55,15 +55,15 @@ class Operators(object): def __or__(self, other): """Implement the ``|`` operator. - + When used with SQL expressions, results in an OR operation, equivalent to :func:`~.expression.or_`, that is:: - + a | b - + is equivalent to:: - + from sqlalchemy import or_ or_(a, b) @@ -71,7 +71,7 @@ class Operators(object): operator precedence; the ``|`` operator has the highest precedence. The operands should be enclosed in parenthesis if they contain further sub expressions:: - + (a == 2) | (b == 4) """ @@ -79,15 +79,15 @@ class Operators(object): def __invert__(self): """Implement the ``~`` operator. - - When used with SQL expressions, results in a - NOT operation, equivalent to + + When used with SQL expressions, results in a + NOT operation, equivalent to :func:`~.expression.not_`, that is:: - + ~a - + is equivalent to:: - + from sqlalchemy import not_ not_(a) @@ -123,16 +123,16 @@ class Operators(object): def operate(self, op, *other, **kwargs): """Operate on an argument. - + This is the lowest level of operation, raises :class:`NotImplementedError` by default. - - Overriding this on a subclass can allow common - behavior to be applied to all operations. + + Overriding this on a subclass can allow common + behavior to be applied to all operations. For example, overriding :class:`.ColumnOperators` - to apply ``func.lower()`` to the left and right + to apply ``func.lower()`` to the left and right side:: - + class MyComparator(ColumnOperators): def operate(self, op, other): return op(func.lower(self), func.lower(other)) @@ -142,48 +142,48 @@ class Operators(object): be a single scalar for most operations. :param \**kwargs: modifiers. These may be passed by special operators such as :meth:`ColumnOperators.contains`. - - + + """ raise NotImplementedError(str(op)) def reverse_operate(self, op, other, **kwargs): """Reverse operate on an argument. - + Usage is the same as :meth:`operate`. - + """ raise NotImplementedError(str(op)) class ColumnOperators(Operators): """Defines comparison and math operations. - + By default all methods call down to :meth:`Operators.operate` or :meth:`Operators.reverse_operate` - passing in the appropriate operator function from the + passing in the appropriate operator function from the Python builtin ``operator`` module or - a SQLAlchemy-specific operator function from + a SQLAlchemy-specific operator function from :mod:`sqlalchemy.expression.operators`. For example the ``__eq__`` function:: - + def __eq__(self, other): return self.operate(operators.eq, other) Where ``operators.eq`` is essentially:: - + def eq(a, b): return a == b - + A SQLAlchemy construct like :class:`.ColumnElement` ultimately overrides :meth:`.Operators.operate` and others - to return further :class:`.ClauseElement` constructs, + to return further :class:`.ClauseElement` constructs, so that the ``==`` operation above is replaced by a clause construct. - + The docstrings here will describe column-oriented behavior of each operator. For ORM-based operators on related objects and collections, see :class:`.RelationshipProperty.Comparator`. - + """ timetuple = None @@ -191,17 +191,17 @@ class ColumnOperators(Operators): def __lt__(self, other): """Implement the ``<`` operator. - + In a column context, produces the clause ``a < b``. - + """ return self.operate(lt, other) def __le__(self, other): """Implement the ``<=`` operator. - + In a column context, produces the clause ``a <= b``. - + """ return self.operate(le, other) @@ -209,7 +209,7 @@ class ColumnOperators(Operators): def __eq__(self, other): """Implement the ``==`` operator. - + In a column context, produces the clause ``a = b``. If the target is ``None``, produces ``a IS NULL``. @@ -221,98 +221,128 @@ class ColumnOperators(Operators): In a column context, produces the clause ``a != b``. If the target is ``None``, produces ``a IS NOT NULL``. - + """ return self.operate(ne, other) def __gt__(self, other): """Implement the ``>`` operator. - + In a column context, produces the clause ``a > b``. - + """ return self.operate(gt, other) def __ge__(self, other): """Implement the ``>=`` operator. - + In a column context, produces the clause ``a >= b``. - + """ return self.operate(ge, other) def __neg__(self): """Implement the ``-`` operator. - + In a column context, produces the clause ``-a``. - + """ return self.operate(neg) def concat(self, other): """Implement the 'concat' operator. - + In a column context, produces the clause ``a || b``, or uses the ``concat()`` operator on MySQL. - + """ return self.operate(concat_op, other) def like(self, other, escape=None): """Implement the ``like`` operator. - + In a column context, produces the clause ``a LIKE other``. - + """ return self.operate(like_op, other, escape=escape) def ilike(self, other, escape=None): """Implement the ``ilike`` operator. - + In a column context, produces the clause ``a ILIKE other``. - + """ return self.operate(ilike_op, other, escape=escape) def in_(self, other): """Implement the ``in`` operator. - + In a column context, produces the clause ``a IN other``. "other" may be a tuple/list of column expressions, or a :func:`~.expression.select` construct. - + """ return self.operate(in_op, other) + def is_(self, other): + """Implement the ``IS`` operator. + + Normally, ``IS`` is generated automatically when comparing to a + value of ``None``, which resolves to ``NULL``. However, explicit + usage of ``IS`` may be desirable if comparing to boolean values + on certain platforms. + + .. versionadded:: 0.7.9 + + .. seealso:: :meth:`.ColumnOperators.isnot` + + """ + return self.operate(is_, other) + + def isnot(self, other): + """Implement the ``IS NOT`` operator. + + Normally, ``IS NOT`` is generated automatically when comparing to a + value of ``None``, which resolves to ``NULL``. However, explicit + usage of ``IS NOT`` may be desirable if comparing to boolean values + on certain platforms. + + .. versionadded:: 0.7.9 + + .. seealso:: :meth:`.ColumnOperators.is_` + + """ + return self.operate(isnot, other) + def startswith(self, other, **kwargs): """Implement the ``startwith`` operator. In a column context, produces the clause ``LIKE '%'`` - + """ return self.operate(startswith_op, other, **kwargs) def endswith(self, other, **kwargs): """Implement the 'endswith' operator. - + In a column context, produces the clause ``LIKE '%'`` - + """ return self.operate(endswith_op, other, **kwargs) def contains(self, other, **kwargs): """Implement the 'contains' operator. - + In a column context, produces the clause ``LIKE '%%'`` - + """ return self.operate(contains_op, other, **kwargs) def match(self, other, **kwargs): """Implements the 'match' operator. - - In a column context, this produces a MATCH clause, i.e. - ``MATCH ''``. The allowed contents of ``other`` + + In a column context, this produces a MATCH clause, i.e. + ``MATCH ''``. The allowed contents of ``other`` are database backend specific. """ @@ -347,7 +377,7 @@ class ColumnOperators(Operators): """Implement the ``+`` operator in reverse. See :meth:`__add__`. - + """ return self.reverse_operate(add, other) @@ -355,7 +385,7 @@ class ColumnOperators(Operators): """Implement the ``-`` operator in reverse. See :meth:`__sub__`. - + """ return self.reverse_operate(sub, other) @@ -363,7 +393,7 @@ class ColumnOperators(Operators): """Implement the ``*`` operator in reverse. See :meth:`__mul__`. - + """ return self.reverse_operate(mul, other) @@ -371,7 +401,7 @@ class ColumnOperators(Operators): """Implement the ``/`` operator in reverse. See :meth:`__div__`. - + """ return self.reverse_operate(div, other) @@ -386,61 +416,61 @@ class ColumnOperators(Operators): def __add__(self, other): """Implement the ``+`` operator. - + In a column context, produces the clause ``a + b`` if the parent object has non-string affinity. - If the parent object has a string affinity, + If the parent object has a string affinity, produces the concatenation operator, ``a || b`` - see :meth:`concat`. - + """ return self.operate(add, other) def __sub__(self, other): """Implement the ``-`` operator. - + In a column context, produces the clause ``a - b``. - + """ return self.operate(sub, other) def __mul__(self, other): """Implement the ``*`` operator. - + In a column context, produces the clause ``a * b``. - + """ return self.operate(mul, other) def __div__(self, other): """Implement the ``/`` operator. - + In a column context, produces the clause ``a / b``. - + """ return self.operate(div, other) def __mod__(self, other): """Implement the ``%`` operator. - + In a column context, produces the clause ``a % b``. - + """ return self.operate(mod, other) def __truediv__(self, other): """Implement the ``//`` operator. - + In a column context, produces the clause ``a / b``. - + """ return self.operate(truediv, other) def __rtruediv__(self, other): """Implement the ``//`` operator in reverse. - + See :meth:`__truediv__`. - + """ return self.reverse_operate(truediv, other) @@ -469,13 +499,13 @@ def like_op(a, b, escape=None): return a.like(b, escape=escape) def notlike_op(a, b, escape=None): - raise NotImplementedError() + return ~a.like(b, escape=escape) def ilike_op(a, b, escape=None): return a.ilike(b, escape=escape) def notilike_op(a, b, escape=None): - raise NotImplementedError() + return ~a.ilike(b, escape=escape) def between_op(a, b, c): return a.between(b, c) @@ -484,7 +514,7 @@ def in_op(a, b): return a.in_(b) def notin_op(a, b): - raise NotImplementedError() + return ~a.in_(b) def distinct_op(a): return a.distinct() @@ -525,7 +555,7 @@ def is_commutative(op): return op in _commutative def is_ordering_modifier(op): - return op in (asc_op, desc_op, + return op in (asc_op, desc_op, nullsfirst_op, nullslast_op) _associative = _commutative.union([concat_op, and_, or_]) diff --git a/libs/sqlalchemy/sql/util.py b/libs/sqlalchemy/sql/util.py index 97975441..0a00674c 100644 --- a/libs/sqlalchemy/sql/util.py +++ b/libs/sqlalchemy/sql/util.py @@ -1,5 +1,5 @@ # sql/util.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -27,8 +27,8 @@ def sort_tables(tables): tuples.append((parent_table, child_table)) for table in tables: - visitors.traverse(table, - {'schema_visitor':True}, + visitors.traverse(table, + {'schema_visitor':True}, {'foreign_key':visit_foreign_key}) tuples.extend( @@ -38,9 +38,9 @@ def sort_tables(tables): return list(topological.sort(tuples, tables)) def find_join_source(clauses, join_to): - """Given a list of FROM clauses and a selectable, - return the first index and element from the list of - clauses which can be joined against the selectable. returns + """Given a list of FROM clauses and a selectable, + return the first index and element from the list of + clauses which can be joined against the selectable. returns None, None if no match is found. e.g.:: @@ -62,8 +62,8 @@ def find_join_source(clauses, join_to): else: return None, None -def find_tables(clause, check_columns=False, - include_aliases=False, include_joins=False, +def find_tables(clause, check_columns=False, + include_aliases=False, include_joins=False, include_selects=False, include_crud=False): """locate Table objects within the given expression.""" @@ -112,7 +112,7 @@ def unwrap_order_by(clause): ( not isinstance(t, expression._UnaryExpression) or \ not operators.is_ordering_modifier(t.modifier) - ): + ): cols.add(t) else: for c in t.get_children(): @@ -167,7 +167,7 @@ def _quote_ddl_expr(element): class _repr_params(object): """A string view of bound parameters, truncating display to the given number of 'multi' parameter sets. - + """ def __init__(self, params, batches): self.params = params @@ -187,7 +187,7 @@ class _repr_params(object): def expression_as_ddl(clause): - """Given a SQL expression, convert for usage in DDL, such as + """Given a SQL expression, convert for usage in DDL, such as CREATE INDEX and CHECK CONSTRAINT. Converts bind params into quoted literals, column identifiers @@ -259,7 +259,7 @@ def join_condition(a, b, ignore_nonexistent_tables=False, a_subset=None): if left is None: continue for fk in sorted( - b.foreign_keys, + b.foreign_keys, key=lambda fk:fk.parent._creation_order): try: col = fk.get_referent(left) @@ -274,7 +274,7 @@ def join_condition(a, b, ignore_nonexistent_tables=False, a_subset=None): constraints.add(fk.constraint) if left is not b: for fk in sorted( - left.foreign_keys, + left.foreign_keys, key=lambda fk:fk.parent._creation_order): try: col = fk.get_referent(b) @@ -317,12 +317,12 @@ def join_condition(a, b, ignore_nonexistent_tables=False, a_subset=None): class Annotated(object): """clones a ClauseElement and applies an 'annotations' dictionary. - Unlike regular clones, this clone also mimics __hash__() and + Unlike regular clones, this clone also mimics __hash__() and __cmp__() of the original element so that it takes its place in hashed collections. A reference to the original element is maintained, for the important - reason of keeping its hash value current. When GC'ed, the + reason of keeping its hash value current. When GC'ed, the hash value may be reused, causing conflicts. """ @@ -338,13 +338,13 @@ class Annotated(object): try: cls = annotated_classes[element.__class__] except KeyError: - cls = annotated_classes[element.__class__] = type.__new__(type, - "Annotated%s" % element.__class__.__name__, + cls = annotated_classes[element.__class__] = type.__new__(type, + "Annotated%s" % element.__class__.__name__, (Annotated, element.__class__), {}) return object.__new__(cls) def __init__(self, element, values): - # force FromClause to generate their internal + # force FromClause to generate their internal # collections into __dict__ if isinstance(element, expression.FromClause): element.c @@ -404,22 +404,30 @@ for cls in expression.__dict__.values() + [schema.Column, schema.Table]: exec "annotated_classes[cls] = Annotated%s" % (cls.__name__) def _deep_annotate(element, annotations, exclude=None): - """Deep copy the given ClauseElement, annotating each element with the given annotations dictionary. + """Deep copy the given ClauseElement, annotating each element + with the given annotations dictionary. Elements within the exclude collection will be cloned but not annotated. """ + cloned = util.column_dict() + def clone(elem): # check if element is present in the exclude list. # take into account proxying relationships. - if exclude and \ + if elem in cloned: + return cloned[elem] + elif exclude and \ hasattr(elem, 'proxy_set') and \ elem.proxy_set.intersection(exclude): - elem = elem._clone() + newelem = elem._clone() elif annotations != elem._annotations: - elem = elem._annotate(annotations.copy()) - elem._copy_internals(clone=clone) - return elem + newelem = elem._annotate(annotations) + else: + newelem = elem + newelem._copy_internals(clone=clone) + cloned[elem] = newelem + return newelem if element is not None: element = clone(element) @@ -428,26 +436,30 @@ def _deep_annotate(element, annotations, exclude=None): def _deep_deannotate(element): """Deep copy the given element, removing all annotations.""" + cloned = util.column_dict() + def clone(elem): - elem = elem._deannotate() - elem._copy_internals(clone=clone) - return elem + if elem not in cloned: + newelem = elem._deannotate() + newelem._copy_internals(clone=clone) + cloned[elem] = newelem + return cloned[elem] if element is not None: element = clone(element) return element -def _shallow_annotate(element, annotations): - """Annotate the given ClauseElement and copy its internals so that - internal objects refer to the new annotated object. +def _shallow_annotate(element, annotations): + """Annotate the given ClauseElement and copy its internals so that + internal objects refer to the new annotated object. - Basically used to apply a "dont traverse" annotation to a - selectable, without digging throughout the whole - structure wasting time. - """ - element = element._annotate(annotations) - element._copy_internals() - return element + Basically used to apply a "dont traverse" annotation to a + selectable, without digging throughout the whole + structure wasting time. + """ + element = element._annotate(annotations) + element._copy_internals() + return element def splice_joins(left, right, stop_on=None): if left is None: @@ -526,7 +538,7 @@ def reduce_columns(columns, *clauses, **kw): return expression.ColumnSet(columns.difference(omit)) -def criterion_as_pairs(expression, consider_as_foreign_keys=None, +def criterion_as_pairs(expression, consider_as_foreign_keys=None, consider_as_referenced_keys=None, any_operator=False): """traverse an expression and locate binary criterion pairs.""" @@ -544,20 +556,20 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None, if consider_as_foreign_keys: if binary.left in consider_as_foreign_keys and \ - (binary.right is binary.left or + (binary.right is binary.left or binary.right not in consider_as_foreign_keys): pairs.append((binary.right, binary.left)) elif binary.right in consider_as_foreign_keys and \ - (binary.left is binary.right or + (binary.left is binary.right or binary.left not in consider_as_foreign_keys): pairs.append((binary.left, binary.right)) elif consider_as_referenced_keys: if binary.left in consider_as_referenced_keys and \ - (binary.right is binary.left or + (binary.right is binary.left or binary.right not in consider_as_referenced_keys): pairs.append((binary.left, binary.right)) elif binary.right in consider_as_referenced_keys and \ - (binary.left is binary.right or + (binary.left is binary.right or binary.left not in consider_as_referenced_keys): pairs.append((binary.right, binary.left)) else: @@ -574,17 +586,17 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None, def folded_equivalents(join, equivs=None): """Return a list of uniquely named columns. - The column list of the given Join will be narrowed + The column list of the given Join will be narrowed down to a list of all equivalently-named, equated columns folded into one column, where 'equated' means they are equated to each other in the ON clause of this join. This function is used by Join.select(fold_equivalents=True). - Deprecated. This function is used for a certain kind of + Deprecated. This function is used for a certain kind of "polymorphic_union" which is designed to achieve joined table inheritance where the base table has no "discriminator" - column; [ticket:1131] will provide a better way to + column; [ticket:1131] will provide a better way to achieve this. """ @@ -679,12 +691,12 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor): def _corresponding_column(self, col, require_embedded, _seen=util.EMPTY_SET): newcol = self.selectable.corresponding_column( - col, + col, require_embedded=require_embedded) if newcol is None and col in self.equivalents and col not in _seen: for equiv in self.equivalents[col]: - newcol = self._corresponding_column(equiv, - require_embedded=require_embedded, + newcol = self._corresponding_column(equiv, + require_embedded=require_embedded, _seen=_seen.union([col])) if newcol is not None: return newcol @@ -710,14 +722,14 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor): class ColumnAdapter(ClauseAdapter): """Extends ClauseAdapter with extra utility functions. - Provides the ability to "wrap" this ClauseAdapter + Provides the ability to "wrap" this ClauseAdapter around another, a columns dictionary which returns - adapted elements given an original, and an + adapted elements given an original, and an adapted_row() factory. """ - def __init__(self, selectable, equivalents=None, - chain_to=None, include=None, + def __init__(self, selectable, equivalents=None, + chain_to=None, include=None, exclude=None, adapt_required=False): ClauseAdapter.__init__(self, selectable, equivalents, include, exclude) if chain_to: @@ -753,7 +765,7 @@ class ColumnAdapter(ClauseAdapter): c = c.label(None) # adapt_required indicates that if we got the same column - # back which we put in (i.e. it passed through), + # back which we put in (i.e. it passed through), # it's not correct. this is used by eagerloading which # knows that all columns and expressions need to be adapted # to a result row, and a "passthrough" is definitely targeting diff --git a/libs/sqlalchemy/sql/visitors.py b/libs/sqlalchemy/sql/visitors.py index 5354fbcb..d236063d 100644 --- a/libs/sqlalchemy/sql/visitors.py +++ b/libs/sqlalchemy/sql/visitors.py @@ -1,5 +1,5 @@ # sql/visitors.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,15 +8,15 @@ SQLAlchemy schema and expression constructs rely on a Python-centric version of the classic "visitor" pattern as the primary way in which -they apply functionality. The most common use of this pattern -is statement compilation, where individual expression classes match -up to rendering methods that produce a string result. Beyond this, -the visitor system is also used to inspect expressions for various -information and patterns, as well as for usage in +they apply functionality. The most common use of this pattern +is statement compilation, where individual expression classes match +up to rendering methods that produce a string result. Beyond this, +the visitor system is also used to inspect expressions for various +information and patterns, as well as for usage in some kinds of expression transformation. Other kinds of transformation use a non-visitor traversal system. -For many examples of how the visit system is used, see the +For many examples of how the visit system is used, see the sqlalchemy.sql.util and the sqlalchemy.sql.compiler modules. For an introduction to clause adaption, see http://techspot.zzzeek.org/2008/01/23/expression-transformations/ @@ -28,18 +28,18 @@ import re from sqlalchemy import util import operator -__all__ = ['VisitableType', 'Visitable', 'ClauseVisitor', - 'CloningVisitor', 'ReplacingCloningVisitor', 'iterate', +__all__ = ['VisitableType', 'Visitable', 'ClauseVisitor', + 'CloningVisitor', 'ReplacingCloningVisitor', 'iterate', 'iterate_depthfirst', 'traverse_using', 'traverse', 'cloned_traverse', 'replacement_traverse'] class VisitableType(type): """Metaclass which assigns a `_compiler_dispatch` method to classes having a `__visit_name__` attribute. - + The _compiler_dispatch attribute becomes an instance method which looks approximately like the following:: - + def _compiler_dispatch (self, visitor, **kw): '''Look for an attribute named "visit_" + self.__visit_name__ on the visitor, and call it with the same kw params.''' @@ -92,7 +92,7 @@ class Visitable(object): __metaclass__ = VisitableType class ClauseVisitor(object): - """Base class for visitor objects which can traverse using + """Base class for visitor objects which can traverse using the traverse() function. """ @@ -144,7 +144,7 @@ class ClauseVisitor(object): return self class CloningVisitor(ClauseVisitor): - """Base class for visitor objects which can traverse using + """Base class for visitor objects which can traverse using the cloned_traverse() function. """ @@ -160,7 +160,7 @@ class CloningVisitor(ClauseVisitor): return cloned_traverse(obj, self.__traverse_options__, self._visitor_dict) class ReplacingCloningVisitor(CloningVisitor): - """Base class for visitor objects which can traverse using + """Base class for visitor objects which can traverse using the replacement_traverse() function. """ @@ -168,8 +168,8 @@ class ReplacingCloningVisitor(CloningVisitor): def replace(self, elem): """receive pre-copied elements during a cloning traversal. - If the method returns a new element, the element is used - instead of creating a simple copy of the element. Traversal + If the method returns a new element, the element is used + instead of creating a simple copy of the element. Traversal will halt on the newly returned element if it is re-encountered. """ return None @@ -232,7 +232,7 @@ def traverse_depthfirst(obj, opts, visitors): return traverse_using(iterate_depthfirst(obj, opts), obj, visitors) def cloned_traverse(obj, opts, visitors): - """clone the given expression structure, allowing + """clone the given expression structure, allowing modifications by visitors.""" cloned = util.column_dict() @@ -256,7 +256,7 @@ def cloned_traverse(obj, opts, visitors): def replacement_traverse(obj, opts, replace): - """clone the given expression structure, allowing element + """clone the given expression structure, allowing element replacement by a given replacement function.""" cloned = util.column_dict() diff --git a/libs/sqlalchemy/types.py b/libs/sqlalchemy/types.py index 512ac626..5fe2ba20 100644 --- a/libs/sqlalchemy/types.py +++ b/libs/sqlalchemy/types.py @@ -1,5 +1,5 @@ # sqlalchemy/types.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -13,12 +13,12 @@ For more information see the SQLAlchemy documentation on types. """ __all__ = [ 'TypeEngine', 'TypeDecorator', 'AbstractType', 'UserDefinedType', 'INT', 'CHAR', 'VARCHAR', 'NCHAR', 'NVARCHAR','TEXT', 'Text', - 'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME', - 'CLOB', 'BLOB', 'BOOLEAN', 'SMALLINT', 'INTEGER', 'DATE', 'TIME', - 'String', 'Integer', 'SmallInteger', 'BigInteger', 'Numeric', - 'Float', 'DateTime', 'Date', 'Time', 'LargeBinary', 'Binary', - 'Boolean', 'Unicode', 'MutableType', 'Concatenable', - 'UnicodeText','PickleType', 'Interval', 'Enum' ] + 'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME', + 'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT', 'SMALLINT', + 'INTEGER', 'DATE', 'TIME', 'String', 'Integer', 'SmallInteger', + 'BigInteger', 'Numeric', 'Float', 'DateTime', 'Date', 'Time', + 'LargeBinary', 'Binary', 'Boolean', 'Unicode', 'Concatenable', + 'UnicodeText','PickleType', 'Interval', 'Enum', 'MutableType' ] import inspect import datetime as dt @@ -39,7 +39,7 @@ if util.jython: import array class AbstractType(Visitable): - """Base for all types - not needed except for backwards + """Base for all types - not needed except for backwards compatibility.""" class TypeEngine(AbstractType): @@ -89,13 +89,13 @@ class TypeEngine(AbstractType): This allows systems like the ORM to know if a column value can be considered 'not changed' by comparing the identity of objects alone. Values such as dicts, lists which - are serialized into strings are examples of "mutable" + are serialized into strings are examples of "mutable" column structures. - .. note:: - + .. note:: + This functionality is now superseded by the - ``sqlalchemy.ext.mutable`` extension described in + ``sqlalchemy.ext.mutable`` extension described in :ref:`mutable_toplevel`. When this method is overridden, :meth:`copy_value` should @@ -117,15 +117,15 @@ class TypeEngine(AbstractType): @property def python_type(self): """Return the Python type object expected to be returned - by instances of this type, if known. - + by instances of this type, if known. + Basically, for those types which enforce a return type, - or are known across the board to do such for all common + or are known across the board to do such for all common DBAPIs (like ``int`` for example), will return that type. - + If a return type is not defined, raises ``NotImplementedError``. - + Note that any type also accommodates NULL in SQL which means you can also get back ``None`` from any type in practice. @@ -134,7 +134,7 @@ class TypeEngine(AbstractType): raise NotImplementedError() def with_variant(self, type_, dialect_name): - """Produce a new type object that will utilize the given + """Produce a new type object that will utilize the given type when applied to the dialect of the given name. e.g.:: @@ -149,16 +149,16 @@ class TypeEngine(AbstractType): The construction of :meth:`.TypeEngine.with_variant` is always from the "fallback" type to that which is dialect specific. The returned type is an instance of :class:`.Variant`, which - itself provides a :meth:`~sqlalchemy.types.Variant.with_variant` that can + itself provides a :meth:`~sqlalchemy.types.Variant.with_variant` that can be called repeatedly. :param type_: a :class:`.TypeEngine` that will be selected as a variant from the originating type, when a dialect of the given name is in use. - :param dialect_name: base name of the dialect which uses + :param dialect_name: base name of the dialect which uses this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.) - New in 0.7.2. + .. versionadded:: 0.7.2 """ return Variant(self, {dialect_name:type_}) @@ -216,7 +216,7 @@ class TypeEngine(AbstractType): return rp def _dialect_info(self, dialect): - """Return a dialect-specific registry which + """Return a dialect-specific registry which caches a dialect-specific implementation, bind processing function, and one or more result processing functions.""" @@ -235,10 +235,10 @@ class TypeEngine(AbstractType): return dialect.type_descriptor(self) def adapt(self, cls, **kw): - """Produce an "adapted" form of this type, given an "impl" class - to work with. + """Produce an "adapted" form of this type, given an "impl" class + to work with. - This method is used internally to associate generic + This method is used internally to associate generic types with "implementation" types that are specific to a particular dialect. """ @@ -251,7 +251,7 @@ class TypeEngine(AbstractType): to return a type which the value should be coerced into. The default behavior here is conservative; if the right-hand - side is already coerced into a SQL type based on its + side is already coerced into a SQL type based on its Python type, it is usually left alone. End-user functionality extension here should generally be via @@ -361,7 +361,7 @@ class UserDefinedType(TypeEngine): def adapt_operator(self, op): """A hook which allows the given operator to be adapted - to something new. + to something new. See also UserDefinedType._adapt_expression(), an as-yet- semi-public method with greater capability in this regard. @@ -374,7 +374,7 @@ class TypeDecorator(TypeEngine): to an existing type. This method is preferred to direct subclassing of SQLAlchemy's - built-in types as it ensures that all required functionality of + built-in types as it ensures that all required functionality of the underlying type is kept in place. Typical usage:: @@ -411,9 +411,9 @@ class TypeDecorator(TypeEngine): mytable.c.somecol + datetime.date(2009, 5, 15) - Above, if "somecol" is an ``Integer`` variant, it makes sense that + Above, if "somecol" is an ``Integer`` variant, it makes sense that we're doing date arithmetic, where above is usually interpreted - by databases as adding a number of days to the given date. + by databases as adding a number of days to the given date. The expression system does the right thing by not attempting to coerce the "date()" value into an integer-oriented bind parameter. @@ -455,14 +455,14 @@ class TypeDecorator(TypeEngine): def __init__(self, *args, **kwargs): """Construct a :class:`.TypeDecorator`. - Arguments sent here are passed to the constructor + Arguments sent here are passed to the constructor of the class assigned to the ``impl`` class level attribute, assuming the ``impl`` is a callable, and the resulting object is assigned to the ``self.impl`` instance attribute (thus overriding the class attribute of the same name). - + If the class level ``impl`` is not a callable (the unusual case), - it will be assigned to the same instance attribute 'as-is', + it will be assigned to the same instance attribute 'as-is', ignoring those arguments passed to the constructor. Subclasses can override this to customize the generation @@ -529,7 +529,7 @@ class TypeDecorator(TypeEngine): This is an end-user override hook that can be used to provide differing types depending on the given dialect. It is used - by the :class:`.TypeDecorator` implementation of :meth:`type_engine` + by the :class:`.TypeDecorator` implementation of :meth:`type_engine` to help determine what type should ultimately be returned for a given :class:`.TypeDecorator`. @@ -548,11 +548,11 @@ class TypeDecorator(TypeEngine): Subclasses override this method to return the value that should be passed along to the underlying - :class:`.TypeEngine` object, and from there to the + :class:`.TypeEngine` object, and from there to the DBAPI ``execute()`` method. The operation could be anything desired to perform custom - behavior, such as transforming or serializing data. + behavior, such as transforming or serializing data. This could also be used as a hook for validating logic. This operation should be designed with the reverse operation @@ -580,7 +580,7 @@ class TypeDecorator(TypeEngine): from the DBAPI cursor method ``fetchone()`` or similar. The operation could be anything desired to perform custom - behavior, such as transforming or serializing data. + behavior, such as transforming or serializing data. This could also be used as a hook for validating logic. :param value: Data to operate upon, of any type expected by @@ -595,12 +595,12 @@ class TypeDecorator(TypeEngine): raise NotImplementedError() def bind_processor(self, dialect): - """Provide a bound value processing function for the + """Provide a bound value processing function for the given :class:`.Dialect`. - This is the method that fulfills the :class:`.TypeEngine` + This is the method that fulfills the :class:`.TypeEngine` contract for bound value conversion. :class:`.TypeDecorator` - will wrap a user-defined implementation of + will wrap a user-defined implementation of :meth:`process_bind_param` here. User-defined code can override this method directly, @@ -632,9 +632,9 @@ class TypeDecorator(TypeEngine): def result_processor(self, dialect, coltype): """Provide a result value processing function for the given :class:`.Dialect`. - This is the method that fulfills the :class:`.TypeEngine` + This is the method that fulfills the :class:`.TypeEngine` contract for result value conversion. :class:`.TypeDecorator` - will wrap a user-defined implementation of + will wrap a user-defined implementation of :meth:`process_result_value` here. User-defined code can override this method directly, @@ -669,7 +669,7 @@ class TypeDecorator(TypeEngine): """Suggest a type for a 'coerced' Python value in an expression. By default, returns self. This method is called by - the expression system when an object using this type is + the expression system when an object using this type is on the left or right side of an expression against a plain Python object which does not yet have a SQLAlchemy type assigned:: @@ -691,7 +691,7 @@ class TypeDecorator(TypeEngine): def copy(self): """Produce a copy of this :class:`.TypeDecorator` instance. - This is a shallow copy and is provided to fulfill part of + This is a shallow copy and is provided to fulfill part of the :class:`.TypeEngine` contract. It usually does not need to be overridden unless the user-defined :class:`.TypeDecorator` has local state that should be deep-copied. @@ -705,7 +705,7 @@ class TypeDecorator(TypeEngine): def get_dbapi_type(self, dbapi): """Return the DBAPI type object represented by this :class:`.TypeDecorator`. - By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the + By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the underlying "impl". """ return self.impl.get_dbapi_type(dbapi) @@ -713,12 +713,12 @@ class TypeDecorator(TypeEngine): def copy_value(self, value): """Given a value, produce a copy of it. - By default this calls upon :meth:`.TypeEngine.copy_value` + By default this calls upon :meth:`.TypeEngine.copy_value` of the underlying "impl". :meth:`.copy_value` will return the object itself, assuming "mutability" is not enabled. - Only the :class:`.MutableType` mixin provides a copy + Only the :class:`.MutableType` mixin provides a copy function that actually produces a new object. The copying function is used by the ORM when "mutable" types are used, to memoize the original @@ -726,7 +726,7 @@ class TypeDecorator(TypeEngine): which is then compared to the possibly mutated version to check for changes. - Modern implementations should use the + Modern implementations should use the ``sqlalchemy.ext.mutable`` extension described in :ref:`mutable_toplevel` for intercepting in-place changes to values. @@ -737,7 +737,7 @@ class TypeDecorator(TypeEngine): def compare_values(self, x, y): """Given two values, compare them for equality. - By default this calls upon :meth:`.TypeEngine.compare_values` + By default this calls upon :meth:`.TypeEngine.compare_values` of the underlying "impl", which in turn usually uses the Python equals operator ``==``. @@ -755,13 +755,13 @@ class TypeDecorator(TypeEngine): This allows systems like the ORM to know if a column value can be considered 'not changed' by comparing the identity of objects alone. Values such as dicts, lists which - are serialized into strings are examples of "mutable" + are serialized into strings are examples of "mutable" column structures. - .. note:: - + .. note:: + This functionality is now superseded by the - ``sqlalchemy.ext.mutable`` extension described in + ``sqlalchemy.ext.mutable`` extension described in :ref:`mutable_toplevel`. """ @@ -777,24 +777,27 @@ class TypeDecorator(TypeEngine): else: return op, typ + def __repr__(self): + return util.generic_repr(self, to_inspect=self.impl) + class Variant(TypeDecorator): """A wrapping type that selects among a variety of implementations based on dialect in use. - + The :class:`.Variant` type is typically constructed using the :meth:`.TypeEngine.with_variant` method. - - New in 0.7.2. - + + .. versionadded:: 0.7.2 + """ def __init__(self, base, mapping): """Construct a new :class:`.Variant`. - + :param base: the base 'fallback' type - :param mapping: dictionary of string dialect names to :class:`.TypeEngine` + :param mapping: dictionary of string dialect names to :class:`.TypeEngine` instances. - + """ self.impl = base self.mapping = mapping @@ -807,17 +810,15 @@ class Variant(TypeDecorator): def with_variant(self, type_, dialect_name): """Return a new :class:`.Variant` which adds the given - type + dialect name to the mapping, in addition to the + type + dialect name to the mapping, in addition to the mapping present in this :class:`.Variant`. - + :param type_: a :class:`.TypeEngine` that will be selected as a variant from the originating type, when a dialect of the given name is in use. - :param dialect_name: base name of the dialect which uses + :param dialect_name: base name of the dialect which uses this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.) - New in 0.7.2. - """ if dialect_name in self.mapping: @@ -833,27 +834,26 @@ class MutableType(object): a mutable Python object type. This functionality is used only by the ORM. - .. note:: - - :class:`.MutableType` is superseded as of SQLAlchemy 0.7 + .. versionchanged:: 0.7 + :class:`.MutableType` is superseded by the ``sqlalchemy.ext.mutable`` extension described in :ref:`mutable_toplevel`. This extension provides an event driven approach to in-place mutation detection that does not incur the severe performance penalty of the :class:`.MutableType` approach. - "mutable" means that changes can occur in place to a value + "mutable" means that changes can occur in place to a value of this type. Examples includes Python lists, dictionaries, and sets, as well as user-defined objects. The primary - need for identification of "mutable" types is by the ORM, - which applies special rules to such values in order to guarantee - that changes are detected. These rules may have a significant + need for identification of "mutable" types is by the ORM, + which applies special rules to such values in order to guarantee + that changes are detected. These rules may have a significant performance impact, described below. A :class:`.MutableType` usually allows a flag called ``mutable=False`` to enable/disable the "mutability" flag, - represented on this class by :meth:`is_mutable`. Examples - include :class:`.PickleType` and + represented on this class by :meth:`is_mutable`. Examples + include :class:`.PickleType` and :class:`~sqlalchemy.dialects.postgresql.base.ARRAY`. Setting this flag to ``True`` enables mutability-specific behavior by the ORM. @@ -863,8 +863,8 @@ class MutableType(object): type - implementing subclasses should override these appropriately. - .. warning:: - + .. warning:: + The usage of mutable types has significant performance implications when using the ORM. In order to detect changes, the ORM must create a copy of the value when it is first @@ -880,18 +880,19 @@ class MutableType(object): the 6000 objects that have mutable attributes, possibly resulting in tens of thousands of additional method calls for every query. - As of SQLAlchemy 0.7, the ``sqlalchemy.ext.mutable`` is provided which - allows an event driven approach to in-place mutation detection. This - approach should now be favored over the usage of :class:`.MutableType` - with ``mutable=True``. ``sqlalchemy.ext.mutable`` is described in - :ref:`mutable_toplevel`. + .. versionchanged:: 0.7 + As of SQLAlchemy 0.7, the ``sqlalchemy.ext.mutable`` is provided + which allows an event driven approach to in-place + mutation detection. This approach should now be favored over + the usage of :class:`.MutableType` with ``mutable=True``. + ``sqlalchemy.ext.mutable`` is described in :ref:`mutable_toplevel`. """ def is_mutable(self): """Return True if the target Python type is 'mutable'. - For :class:`.MutableType`, this method is set to + For :class:`.MutableType`, this method is set to return ``True``. """ @@ -938,6 +939,7 @@ def adapt_type(typeobj, colspecs): + class NullType(TypeEngine): """An unknown type. @@ -1007,7 +1009,7 @@ class String(Concatenable, TypeEngine): __visit_name__ = 'string' - def __init__(self, length=None, convert_unicode=False, + def __init__(self, length=None, convert_unicode=False, assert_unicode=None, unicode_error=None, _warn_on_bytestring=False ): @@ -1022,23 +1024,23 @@ class String(Concatenable, TypeEngine): with no length is included. Whether the value is interpreted as bytes or characters is database specific. - :param convert_unicode: When set to ``True``, the + :param convert_unicode: When set to ``True``, the :class:`.String` type will assume that input is to be passed as Python ``unicode`` objects, and results returned as Python ``unicode`` objects. If the DBAPI in use does not support Python unicode (which is fewer and fewer these days), SQLAlchemy - will encode/decode the value, using the - value of the ``encoding`` parameter passed to + will encode/decode the value, using the + value of the ``encoding`` parameter passed to :func:`.create_engine` as the encoding. - + When using a DBAPI that natively supports Python - unicode objects, this flag generally does not + unicode objects, this flag generally does not need to be set. For columns that are explicitly intended to store non-ASCII data, the :class:`.Unicode` - or :class:`UnicodeText` + or :class:`UnicodeText` types should be used regardless, which feature - the same behavior of ``convert_unicode`` but + the same behavior of ``convert_unicode`` but also indicate an underlying column type that directly supports unicode, such as ``NVARCHAR``. @@ -1049,11 +1051,11 @@ class String(Concatenable, TypeEngine): cause SQLAlchemy's encode/decode services to be used unconditionally. - :param assert_unicode: Deprecated. A warning is emitted - when a non-``unicode`` object is passed to the - :class:`.Unicode` subtype of :class:`.String`, - or the :class:`.UnicodeText` subtype of :class:`.Text`. - See :class:`.Unicode` for information on how to + :param assert_unicode: Deprecated. A warning is emitted + when a non-``unicode`` object is passed to the + :class:`.Unicode` subtype of :class:`.String`, + or the :class:`.UnicodeText` subtype of :class:`.Text`. + See :class:`.Unicode` for information on how to control this warning. :param unicode_error: Optional, a method to use to handle Unicode @@ -1120,7 +1122,7 @@ class String(Concatenable, TypeEngine): def result_processor(self, dialect, coltype): wants_unicode = self.convert_unicode or dialect.convert_unicode needs_convert = wants_unicode and \ - (dialect.returns_unicode_strings is not True or + (dialect.returns_unicode_strings is not True or self.convert_unicode == 'force') if needs_convert: @@ -1172,36 +1174,36 @@ class Unicode(String): that assumes input and output as Python ``unicode`` data, and in that regard is equivalent to the usage of the ``convert_unicode`` flag with the :class:`.String` type. - However, unlike plain :class:`.String`, it also implies an + However, unlike plain :class:`.String`, it also implies an underlying column type that is explicitly supporting of non-ASCII data, such as ``NVARCHAR`` on Oracle and SQL Server. - This can impact the output of ``CREATE TABLE`` statements - and ``CAST`` functions at the dialect level, and can + This can impact the output of ``CREATE TABLE`` statements + and ``CAST`` functions at the dialect level, and can also affect the handling of bound parameters in some specific DBAPI scenarios. - + The encoding used by the :class:`.Unicode` type is usually - determined by the DBAPI itself; most modern DBAPIs + determined by the DBAPI itself; most modern DBAPIs feature support for Python ``unicode`` objects as bound values and result set values, and the encoding should be configured as detailed in the notes for the target DBAPI in the :ref:`dialect_toplevel` section. - + For those DBAPIs which do not support, or are not configured to accommodate Python ``unicode`` objects directly, SQLAlchemy does the encoding and decoding - outside of the DBAPI. The encoding in this scenario - is determined by the ``encoding`` flag passed to + outside of the DBAPI. The encoding in this scenario + is determined by the ``encoding`` flag passed to :func:`.create_engine`. - When using the :class:`.Unicode` type, it is only appropriate + When using the :class:`.Unicode` type, it is only appropriate to pass Python ``unicode`` objects, and not plain ``str``. If a plain ``str`` is passed under Python 2, a warning - is emitted. If you notice your application emitting these warnings but - you're not sure of the source of them, the Python - ``warnings`` filter, documented at - http://docs.python.org/library/warnings.html, - can be used to turn these warnings into exceptions + is emitted. If you notice your application emitting these warnings but + you're not sure of the source of them, the Python + ``warnings`` filter, documented at + http://docs.python.org/library/warnings.html, + can be used to turn these warnings into exceptions which will illustrate a stack trace:: import warnings @@ -1209,7 +1211,7 @@ class Unicode(String): For an application that wishes to pass plain bytestrings and Python ``unicode`` objects to the ``Unicode`` type - equally, the bytestrings must first be decoded into + equally, the bytestrings must first be decoded into unicode. The recipe at :ref:`coerce_to_unicode` illustrates how this is done. @@ -1225,7 +1227,7 @@ class Unicode(String): def __init__(self, length=None, **kwargs): """ Create a :class:`.Unicode` object. - + Parameters are the same as that of :class:`.String`, with the exception that ``convert_unicode`` defaults to ``True``. @@ -1241,8 +1243,8 @@ class UnicodeText(Text): See :class:`.Unicode` for details on the unicode behavior of this object. - Like :class:`.Unicode`, usage the :class:`.UnicodeText` type implies a - unicode-capable type being used on the backend, such as + Like :class:`.Unicode`, usage the :class:`.UnicodeText` type implies a + unicode-capable type being used on the backend, such as ``NCLOB``, ``NTEXT``. """ @@ -1335,8 +1337,8 @@ class Numeric(_DateAffinity, TypeEngine): ``decimal.Decimal`` objects by default, applying conversion as needed. - .. note:: - + .. note:: + The `cdecimal `_ library is a high performing alternative to Python's built-in ``decimal.Decimal`` type, which performs very poorly in high volume @@ -1355,9 +1357,9 @@ class Numeric(_DateAffinity, TypeEngine): import cdecimal sys.modules["decimal"] = cdecimal - While the global patch is a little ugly, it's particularly - important to use just one decimal library at a time since - Python Decimal and cdecimal Decimal objects + While the global patch is a little ugly, it's particularly + important to use just one decimal library at a time since + Python Decimal and cdecimal Decimal objects are not currently compatible *with each other*:: >>> import cdecimal @@ -1365,7 +1367,7 @@ class Numeric(_DateAffinity, TypeEngine): >>> decimal.Decimal("10") == cdecimal.Decimal("10") False - SQLAlchemy will provide more natural support of + SQLAlchemy will provide more natural support of cdecimal if and when it becomes a standard part of Python installations and is supported by all DBAPIs. @@ -1392,15 +1394,15 @@ class Numeric(_DateAffinity, TypeEngine): that the asdecimal setting is apppropriate for the DBAPI in use - when Numeric applies a conversion from Decimal->float or float-> Decimal, this conversion incurs an additional performance overhead - for all result columns received. + for all result columns received. - DBAPIs that return Decimal natively (e.g. psycopg2) will have + DBAPIs that return Decimal natively (e.g. psycopg2) will have better accuracy and higher performance with a setting of ``True``, as the native translation to Decimal reduces the amount of floating- point issues at play, and the Numeric type itself doesn't need - to apply any further conversions. However, another DBAPI which - returns floats natively *will* incur an additional conversion - overhead, and is still subject to floating point data loss - in + to apply any further conversions. However, another DBAPI which + returns floats natively *will* incur an additional conversion + overhead, and is still subject to floating point data loss - in which case ``asdecimal=False`` will at least remove the extra conversion overhead. @@ -1504,10 +1506,10 @@ class Float(Numeric): results in floating point conversion. :param \**kwargs: deprecated. Additional arguments here are ignored - by the default :class:`.Float` type. For database specific - floats that support additional arguments, see that dialect's + by the default :class:`.Float` type. For database specific + floats that support additional arguments, see that dialect's documentation for details, such as :class:`sqlalchemy.dialects.mysql.FLOAT`. - + """ self.precision = precision self.asdecimal = asdecimal @@ -1560,12 +1562,12 @@ class DateTime(_DateAffinity, TypeEngine): def __init__(self, timezone=False): """Construct a new :class:`.DateTime`. - + :param timezone: boolean. If True, and supported by the backend, will produce 'TIMESTAMP WITH TIMEZONE'. For backends that don't support timezone aware timestamps, has no effect. - + """ self.timezone = timezone @@ -1619,7 +1621,7 @@ class Date(_DateAffinity,TypeEngine): Interval:DateTime, # date - datetime = interval, - # this one is not in the PG docs + # this one is not in the PG docs # but works DateTime:Interval, }, @@ -1681,7 +1683,7 @@ class _Binary(TypeEngine): return None return process - # Python 3 has native bytes() type + # Python 3 has native bytes() type # both sqlite3 and pg8000 seem to return it # (i.e. and not 'memoryview') # Py2K @@ -1752,7 +1754,7 @@ class SchemaType(events.SchemaEventTarget): as well as types that are complimented by table or schema level constraints, triggers, and other rules. - :class:`.SchemaType` classes can also be targets for the + :class:`.SchemaType` classes can also be targets for the :meth:`.DDLEvents.before_parent_attach` and :meth:`.DDLEvents.after_parent_attach` events, where the events fire off surrounding the association of the type object with a parent :class:`.Column`. @@ -1850,17 +1852,17 @@ class SchemaType(events.SchemaEventTarget): class Enum(String, SchemaType): """Generic Enum Type. - The Enum type provides a set of possible string values which the + The Enum type provides a set of possible string values which the column is constrained towards. - By default, uses the backend's native ENUM type if available, + By default, uses the backend's native ENUM type if available, else uses VARCHAR + a CHECK constraint. - + See also: - + :class:`~.postgresql.ENUM` - PostgreSQL-specific type, which has additional functionality. - + """ __visit_name__ = 'enum' @@ -1911,7 +1913,7 @@ class Enum(String, SchemaType): """ self.enums = enums self.native_enum = kw.pop('native_enum', True) - convert_unicode= kw.pop('convert_unicode', None) + convert_unicode = kw.pop('convert_unicode', None) if convert_unicode is None: for e in enums: if isinstance(e, unicode): @@ -1921,15 +1923,21 @@ class Enum(String, SchemaType): convert_unicode = False if self.enums: - length =max(len(x) for x in self.enums) + length = max(len(x) for x in self.enums) else: length = 0 - String.__init__(self, - length =length, - convert_unicode=convert_unicode, + String.__init__(self, + length=length, + convert_unicode=convert_unicode, ) SchemaType.__init__(self, **kw) + def __repr__(self): + return util.generic_repr(self, [ + ("native_enum", True), + ("name", None) + ]) + def _should_create_constraint(self, compiler): return not self.native_enum or \ not compiler.dialect.supports_native_enum @@ -1949,9 +1957,9 @@ class Enum(String, SchemaType): def adapt(self, impltype, **kw): if issubclass(impltype, Enum): - return impltype(name=self.name, - quote=self.quote, - schema=self.schema, + return impltype(name=self.name, + quote=self.quote, + schema=self.schema, metadata=self.metadata, convert_unicode=self.convert_unicode, native_enum=self.native_enum, @@ -1973,7 +1981,7 @@ class PickleType(MutableType, TypeDecorator): impl = LargeBinary - def __init__(self, protocol=pickle.HIGHEST_PROTOCOL, + def __init__(self, protocol=pickle.HIGHEST_PROTOCOL, pickler=None, mutable=False, comparator=None): """ Construct a PickleType. @@ -1986,21 +1994,23 @@ class PickleType(MutableType, TypeDecorator): :param mutable: defaults to False; implements :meth:`AbstractType.is_mutable`. When ``True``, incoming - objects will be compared against copies of themselves - using the Python "equals" operator, unless the + objects will be compared against copies of themselves + using the Python "equals" operator, unless the ``comparator`` argument is present. See :class:`.MutableType` for details on "mutable" type - behavior. (default changed from ``True`` in - 0.7.0). + behavior. + + .. versionchanged:: 0.7.0 + Default changed from ``True``. + + .. note:: - .. note:: - This functionality is now superseded by the - ``sqlalchemy.ext.mutable`` extension described in + ``sqlalchemy.ext.mutable`` extension described in :ref:`mutable_toplevel`. :param comparator: a 2-arg callable predicate used - to compare values of this type. If left as ``None``, + to compare values of this type. If left as ``None``, the Python "equals" operator is used to compare values. """ @@ -2011,9 +2021,9 @@ class PickleType(MutableType, TypeDecorator): super(PickleType, self).__init__() def __reduce__(self): - return PickleType, (self.protocol, - None, - self.mutable, + return PickleType, (self.protocol, + None, + self.mutable, self.comparator) def bind_processor(self, dialect): @@ -2085,7 +2095,7 @@ class Boolean(TypeEngine, SchemaType): def __init__(self, create_constraint=True, name=None): """Construct a Boolean. - :param create_constraint: defaults to True. If the boolean + :param create_constraint: defaults to True. If the boolean is generated as an int/smallint, also create a CHECK constraint on the table that ensures 1 or 0 as a value. @@ -2147,22 +2157,22 @@ class Interval(_DateAffinity, TypeDecorator): impl = DateTime epoch = dt.datetime.utcfromtimestamp(0) - def __init__(self, native=True, - second_precision=None, + def __init__(self, native=True, + second_precision=None, day_precision=None): """Construct an Interval object. :param native: when True, use the actual INTERVAL type provided by the database, if supported (currently Postgresql, Oracle). - Otherwise, represent the interval data as + Otherwise, represent the interval data as an epoch value regardless. :param second_precision: For native interval types which support a "fractional seconds precision" parameter, i.e. Oracle and Postgresql - :param day_precision: for native interval types which + :param day_precision: for native interval types which support a "day precision" parameter, i.e. Oracle. """ @@ -2176,8 +2186,8 @@ class Interval(_DateAffinity, TypeDecorator): return cls._adapt_from_generic_interval(self, **kw) else: return self.__class__( - native=self.native, - second_precision=self.second_precision, + native=self.native, + second_precision=self.second_precision, day_precision=self.day_precision, **kw) diff --git a/libs/sqlalchemy/util/__init__.py b/libs/sqlalchemy/util/__init__.py index 13914aa7..8cb4c65b 100644 --- a/libs/sqlalchemy/util/__init__.py +++ b/libs/sqlalchemy/util/__init__.py @@ -1,13 +1,13 @@ # util/__init__.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from compat import callable, cmp, reduce, defaultdict, py25_dict, \ - threading, py3k_warning, jython, pypy, win32, set_types, buffer, pickle, \ - update_wrapper, partial, md5_hex, decode_slice, dottedgetter,\ - parse_qsl, any, contextmanager + threading, py3k_warning, jython, pypy, cpython, win32, set_types, buffer, \ + pickle, update_wrapper, partial, md5_hex, decode_slice, dottedgetter,\ + parse_qsl, any, contextmanager, next from _collections import NamedTuple, ImmutableContainer, immutabledict, \ Properties, OrderedProperties, ImmutableProperties, OrderedDict, \ diff --git a/libs/sqlalchemy/util/_collections.py b/libs/sqlalchemy/util/_collections.py index 1c407324..5a09dca6 100644 --- a/libs/sqlalchemy/util/_collections.py +++ b/libs/sqlalchemy/util/_collections.py @@ -1,5 +1,5 @@ # util/_collections.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -210,7 +210,7 @@ class OrderedDict(dict): try: self._list.append(key) except AttributeError: - # work around Python pickle loads() with + # work around Python pickle loads() with # dict subclass (seems to ignore __setstate__?) self._list = [key] dict.__setitem__(self, key, object) @@ -585,7 +585,7 @@ else: self[key] = value = self.creator(key) return value -# define collections that are capable of storing +# define collections that are capable of storing # ColumnElement objects as hashable keys/elements. column_set = set column_dict = dict @@ -595,12 +595,12 @@ populate_column_dict = PopulateDict def unique_list(seq, hashfunc=None): seen = {} if not hashfunc: - return [x for x in seq - if x not in seen + return [x for x in seq + if x not in seen and not seen.__setitem__(x, True)] else: - return [x for x in seq - if hashfunc(x) not in seen + return [x for x in seq + if hashfunc(x) not in seen and not seen.__setitem__(hashfunc(x), True)] class UniqueAppender(object): @@ -801,15 +801,15 @@ class LRUCache(dict): def _manage_size(self): while len(self) > self.capacity + self.capacity * self.threshold: - by_counter = sorted(dict.values(self), + by_counter = sorted(dict.values(self), key=operator.itemgetter(2), reverse=True) for item in by_counter[self.capacity:]: try: del self[item[0]] except KeyError: - # if we couldnt find a key, most - # likely some other thread broke in + # if we couldnt find a key, most + # likely some other thread broke in # on us. loop around and try again break @@ -870,7 +870,7 @@ class ScopedRegistry(object): pass class ThreadLocalRegistry(ScopedRegistry): - """A :class:`.ScopedRegistry` that uses a ``threading.local()`` + """A :class:`.ScopedRegistry` that uses a ``threading.local()`` variable for storage. """ diff --git a/libs/sqlalchemy/util/compat.py b/libs/sqlalchemy/util/compat.py index 99b92b1e..18ea2815 100644 --- a/libs/sqlalchemy/util/compat.py +++ b/libs/sqlalchemy/util/compat.py @@ -1,5 +1,5 @@ # util/compat.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -19,6 +19,7 @@ py3k_warning = getattr(sys, 'py3kwarning', False) or sys.version_info >= (3, 0) jython = sys.platform.startswith('java') pypy = hasattr(sys, 'pypy_version_info') win32 = sys.platform.startswith('win') +cpython = not pypy and not jython # TODO: something better for this ? if py3k_warning: set_types = set @@ -40,6 +41,11 @@ else: set_types = set, sets.Set +if sys.version_info < (2, 6): + def next(iter): + return iter.next() +else: + next = next if py3k_warning: import pickle else: @@ -50,7 +56,7 @@ else: # a controversial feature, required by MySQLdb currently def buffer(x): - return x + return x # Py2K buffer = buffer @@ -193,7 +199,7 @@ import time if win32 or jython: time_func = time.clock else: - time_func = time.time + time_func = time.time if sys.version_info >= (2, 5): any = any diff --git a/libs/sqlalchemy/util/deprecations.py b/libs/sqlalchemy/util/deprecations.py index fc156f39..330d35db 100644 --- a/libs/sqlalchemy/util/deprecations.py +++ b/libs/sqlalchemy/util/deprecations.py @@ -1,5 +1,5 @@ # util/deprecations.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/libs/sqlalchemy/util/langhelpers.py b/libs/sqlalchemy/util/langhelpers.py index b6c89b11..b7c5132d 100644 --- a/libs/sqlalchemy/util/langhelpers.py +++ b/libs/sqlalchemy/util/langhelpers.py @@ -1,5 +1,5 @@ # util/langhelpers.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -239,14 +239,16 @@ def unbound_method_to_callable(func_or_cls): else: return func_or_cls -def generic_repr(obj): +def generic_repr(obj, additional_kw=(), to_inspect=None): """Produce a __repr__() based on direct association of the __init__() specification vs. same-named attributes present. - + """ + if to_inspect is None: + to_inspect = obj def genargs(): try: - (args, vargs, vkw, defaults) = inspect.getargspec(obj.__init__) + (args, vargs, vkw, defaults) = inspect.getargspec(to_inspect.__init__) except TypeError: return @@ -267,6 +269,15 @@ def generic_repr(obj): yield '%s=%r' % (arg, val) except: pass + if additional_kw: + for arg, defval in additional_kw: + try: + val = getattr(obj, arg, None) + if val != defval: + yield '%s=%r' % (arg, val) + except: + pass + return "%s(%s)" % (obj.__class__.__name__, ", ".join(genargs())) class portable_instancemethod(object): @@ -485,6 +496,8 @@ class memoized_property(object): obj.__dict__[self.__name__] = result = self.fget(obj) return result + def _reset(self, obj): + obj.__dict__.pop(self.__name__, None) class memoized_instancemethod(object): """Decorate a method memoize its return value. @@ -551,10 +564,10 @@ class importlater(object): from mypackage.somemodule import somesubmod except evaluted upon attribute access to "somesubmod". - + importlater() currently requires that resolve_all() be called, typically at the bottom of a package's __init__.py. - This is so that __import__ still called only at + This is so that __import__ still called only at module import time, and not potentially within a non-main thread later on. @@ -597,14 +610,14 @@ class importlater(object): importlater._unresolved.discard(self) if self._il_addtl: self._initial_import = __import__( - self._il_path, globals(), locals(), + self._il_path, globals(), locals(), [self._il_addtl]) else: self._initial_import = __import__(self._il_path) def __getattr__(self, key): if key == 'module': - raise ImportError("Could not resolve module %s" + raise ImportError("Could not resolve module %s" % self._full_path) try: attr = getattr(self.module, key) @@ -860,8 +873,8 @@ def warn(msg, stacklevel=3): If msg is a string, :class:`.exc.SAWarning` is used as the category. - .. note:: - + .. note:: + This function is swapped out when the test suite runs, with a compatible version that uses warnings.warn_explicit, so that the warnings registry can diff --git a/libs/sqlalchemy/util/queue.py b/libs/sqlalchemy/util/queue.py index e71ceb45..acccf3c5 100644 --- a/libs/sqlalchemy/util/queue.py +++ b/libs/sqlalchemy/util/queue.py @@ -1,5 +1,5 @@ # util/queue.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -16,6 +16,15 @@ condition.""" from collections import deque from time import time as _time from sqlalchemy.util import threading +import sys + +if sys.version_info < (2, 6): + def notify_all(condition): + condition.notify() +else: + def notify_all(condition): + condition.notify_all() + __all__ = ['Empty', 'Full', 'Queue'] @@ -29,6 +38,11 @@ class Full(Exception): pass +class SAAbort(Exception): + "Special SQLA exception to abort waiting" + def __init__(self, context): + self.context = context + class Queue: def __init__(self, maxsize=0): """Initialize a queue object with a given maximum size. @@ -49,6 +63,9 @@ class Queue: # a thread waiting to put is notified then. self.not_full = threading.Condition(self.mutex) + # when this is set, SAAbort is raised within get(). + self._sqla_abort_context = False + def qsize(self): """Return the approximate size of the queue (not reliable!).""" @@ -138,6 +155,8 @@ class Queue: elif timeout is None: while self._empty(): self.not_empty.wait() + if self._sqla_abort_context: + raise SAAbort(self._sqla_abort_context) else: if timeout < 0: raise ValueError("'timeout' must be a positive number") @@ -147,12 +166,27 @@ class Queue: if remaining <= 0.0: raise Empty self.not_empty.wait(remaining) + if self._sqla_abort_context: + raise SAAbort(self._sqla_abort_context) item = self._get() self.not_full.notify() return item finally: self.not_empty.release() + def abort(self, context): + """Issue an 'abort', will force any thread waiting on get() + to stop waiting and raise SAAbort. + + """ + self._sqla_abort_context = context + if not self.not_full.acquire(False): + return + try: + notify_all(self.not_empty) + finally: + self.not_full.release() + def get_nowait(self): """Remove and return an item from the queue without blocking. diff --git a/libs/sqlalchemy/util/topological.py b/libs/sqlalchemy/util/topological.py index 2ba86b23..86e42c1f 100644 --- a/libs/sqlalchemy/util/topological.py +++ b/libs/sqlalchemy/util/topological.py @@ -1,5 +1,5 @@ # util/topological.py -# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -29,7 +29,7 @@ def sort_as_subsets(tuples, allitems): if not output: raise CircularDependencyError( "Circular dependency detected.", - find_cycles(tuples, allitems), + find_cycles(tuples, allitems), _gen_edges(edges) ) @@ -56,7 +56,7 @@ def find_cycles(tuples, allitems): output = set() - # we'd like to find all nodes that are + # we'd like to find all nodes that are # involved in cycles, so we do the full # pass through the whole thing for each # node in the original list. @@ -86,7 +86,7 @@ def find_cycles(tuples, allitems): def _gen_edges(edges): return set([ - (right, left) - for left in edges - for right in edges[left] + (right, left) + for left in edges + for right in edges[left] ])