mirror of
https://github.com/janeczku/calibre-web
synced 2025-11-16 15:07:13 +00:00
Initial Fork from https://bitbucket.org/raphaelmutschler/calibreserver/
This commit is contained in:
357
lib/sqlalchemy/engine/__init__.py
Normal file
357
lib/sqlalchemy/engine/__init__.py
Normal file
@@ -0,0 +1,357 @@
|
||||
# engine/__init__.py
|
||||
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""SQL connections, SQL execution and high-level DB-API interface.
|
||||
|
||||
The engine package defines the basic components used to interface
|
||||
DB-API modules with higher-level statement construction,
|
||||
connection-management, execution and result contexts. The primary
|
||||
"entry point" class into this package is the Engine and it's public
|
||||
constructor ``create_engine()``.
|
||||
|
||||
This package includes:
|
||||
|
||||
base.py
|
||||
Defines interface classes and some implementation classes which
|
||||
comprise the basic components used to interface between a DB-API,
|
||||
constructed and plain-text statements, connections, transactions,
|
||||
and results.
|
||||
|
||||
default.py
|
||||
Contains default implementations of some of the components defined
|
||||
in base.py. All current database dialects use the classes in
|
||||
default.py as base classes for their own database-specific
|
||||
implementations.
|
||||
|
||||
strategies.py
|
||||
The mechanics of constructing ``Engine`` objects are represented
|
||||
here. Defines the ``EngineStrategy`` class which represents how
|
||||
to go from arguments specified to the ``create_engine()``
|
||||
function, to a fully constructed ``Engine``, including
|
||||
initialization of connection pooling, dialects, and specific
|
||||
subclasses of ``Engine``.
|
||||
|
||||
threadlocal.py
|
||||
The ``TLEngine`` class is defined here, which is a subclass of
|
||||
the generic ``Engine`` and tracks ``Connection`` and
|
||||
``Transaction`` objects against the identity of the current
|
||||
thread. This allows certain programming patterns based around
|
||||
the concept of a "thread-local connection" to be possible.
|
||||
The ``TLEngine`` is created by using the "threadlocal" engine
|
||||
strategy in conjunction with the ``create_engine()`` function.
|
||||
|
||||
url.py
|
||||
Defines the ``URL`` class which represents the individual
|
||||
components of a string URL passed to ``create_engine()``. Also
|
||||
defines a basic module-loading strategy for the dialect specifier
|
||||
within a URL.
|
||||
"""
|
||||
|
||||
# not sure what this was used for
|
||||
#import sqlalchemy.databases
|
||||
|
||||
from .interfaces import (
|
||||
Compiled,
|
||||
Connectable,
|
||||
Dialect,
|
||||
ExecutionContext,
|
||||
TypeCompiler
|
||||
)
|
||||
|
||||
from .base import (
|
||||
Connection,
|
||||
Engine,
|
||||
NestedTransaction,
|
||||
RootTransaction,
|
||||
Transaction,
|
||||
TwoPhaseTransaction,
|
||||
)
|
||||
|
||||
from .result import (
|
||||
BufferedColumnResultProxy,
|
||||
BufferedColumnRow,
|
||||
BufferedRowResultProxy,
|
||||
FullyBufferedResultProxy,
|
||||
ResultProxy,
|
||||
RowProxy,
|
||||
)
|
||||
|
||||
from .util import (
|
||||
connection_memoize
|
||||
)
|
||||
|
||||
from . import util, strategies
|
||||
|
||||
default_strategy = 'plain'
|
||||
|
||||
|
||||
def create_engine(*args, **kwargs):
|
||||
"""Create a new :class:`.Engine` instance.
|
||||
|
||||
The standard calling form is to send the URL as the
|
||||
first positional argument, usually a string
|
||||
that indicates database dialect and connection arguments.
|
||||
Additional keyword arguments may then follow it which
|
||||
establish various options on the resulting :class:`.Engine`
|
||||
and its underlying :class:`.Dialect` and :class:`.Pool`
|
||||
constructs.
|
||||
|
||||
The string form of the URL is
|
||||
``dialect+driver://user:password@host/dbname[?key=value..]``, where
|
||||
``dialect`` is a database name such as ``mysql``, ``oracle``,
|
||||
``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
|
||||
``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
|
||||
the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
|
||||
|
||||
``**kwargs`` takes a wide variety of options which are routed
|
||||
towards their appropriate components. Arguments may be specific
|
||||
to the :class:`.Engine`, the underlying :class:`.Dialect`, as well as
|
||||
the :class:`.Pool`. Specific dialects also accept keyword
|
||||
arguments that are unique to that dialect. Here, we describe the
|
||||
parameters that are common to most :func:`.create_engine()` usage.
|
||||
|
||||
Once established, the newly resulting :class:`.Engine` will
|
||||
request a connection from the underlying :class:`.Pool` once
|
||||
:meth:`.Engine.connect` is called, or a method which depends on it
|
||||
such as :meth:`.Engine.execute` is invoked. The :class:`.Pool` in turn
|
||||
will establish the first actual DBAPI connection when this request
|
||||
is received. The :func:`.create_engine` call itself does **not**
|
||||
establish any actual DBAPI connections directly.
|
||||
|
||||
See also:
|
||||
|
||||
:doc:`/core/engines`
|
||||
|
||||
:ref:`connections_toplevel`
|
||||
|
||||
:param case_sensitive=True: if False, result column names
|
||||
will match in a case-insensitive fashion, that is,
|
||||
``row['SomeColumn']``.
|
||||
|
||||
.. versionchanged:: 0.8
|
||||
By default, result row names match case-sensitively.
|
||||
In version 0.7 and prior, all matches were case-insensitive.
|
||||
|
||||
:param connect_args: a dictionary of options which will be
|
||||
passed directly to the DBAPI's ``connect()`` method as
|
||||
additional keyword arguments. See the example
|
||||
at :ref:`custom_dbapi_args`.
|
||||
|
||||
:param convert_unicode=False: if set to True, sets
|
||||
the default behavior of ``convert_unicode`` on the
|
||||
:class:`.String` type to ``True``, regardless
|
||||
of a setting of ``False`` on an individual
|
||||
:class:`.String` type, thus causing all :class:`.String`
|
||||
-based columns
|
||||
to accommodate Python ``unicode`` objects. This flag
|
||||
is useful as an engine-wide setting when using a
|
||||
DBAPI that does not natively support Python
|
||||
``unicode`` objects and raises an error when
|
||||
one is received (such as pyodbc with FreeTDS).
|
||||
|
||||
See :class:`.String` for further details on
|
||||
what this flag indicates.
|
||||
|
||||
:param creator: a callable which returns a DBAPI connection.
|
||||
This creation function will be passed to the underlying
|
||||
connection pool and will be used to create all new database
|
||||
connections. Usage of this function causes connection
|
||||
parameters specified in the URL argument to be bypassed.
|
||||
|
||||
:param echo=False: if True, the Engine will log all statements
|
||||
as well as a repr() of their parameter lists to the engines
|
||||
logger, which defaults to sys.stdout. The ``echo`` attribute of
|
||||
``Engine`` can be modified at any time to turn logging on and
|
||||
off. If set to the string ``"debug"``, result rows will be
|
||||
printed to the standard output as well. This flag ultimately
|
||||
controls a Python logger; see :ref:`dbengine_logging` for
|
||||
information on how to configure logging directly.
|
||||
|
||||
:param echo_pool=False: if True, the connection pool will log
|
||||
all checkouts/checkins to the logging stream, which defaults to
|
||||
sys.stdout. This flag ultimately controls a Python logger; see
|
||||
:ref:`dbengine_logging` for information on how to configure logging
|
||||
directly.
|
||||
|
||||
:param encoding: Defaults to ``utf-8``. This is the string
|
||||
encoding used by SQLAlchemy for string encode/decode
|
||||
operations which occur within SQLAlchemy, **outside of
|
||||
the DBAPI.** Most modern DBAPIs feature some degree of
|
||||
direct support for Python ``unicode`` objects,
|
||||
what you see in Python 2 as a string of the form
|
||||
``u'some string'``. For those scenarios where the
|
||||
DBAPI is detected as not supporting a Python ``unicode``
|
||||
object, this encoding is used to determine the
|
||||
source/destination encoding. It is **not used**
|
||||
for those cases where the DBAPI handles unicode
|
||||
directly.
|
||||
|
||||
To properly configure a system to accommodate Python
|
||||
``unicode`` objects, the DBAPI should be
|
||||
configured to handle unicode to the greatest
|
||||
degree as is appropriate - see
|
||||
the notes on unicode pertaining to the specific
|
||||
target database in use at :ref:`dialect_toplevel`.
|
||||
|
||||
Areas where string encoding may need to be accommodated
|
||||
outside of the DBAPI include zero or more of:
|
||||
|
||||
* the values passed to bound parameters, corresponding to
|
||||
the :class:`.Unicode` type or the :class:`.String` type
|
||||
when ``convert_unicode`` is ``True``;
|
||||
* the values returned in result set columns corresponding
|
||||
to the :class:`.Unicode` type or the :class:`.String`
|
||||
type when ``convert_unicode`` is ``True``;
|
||||
* the string SQL statement passed to the DBAPI's
|
||||
``cursor.execute()`` method;
|
||||
* the string names of the keys in the bound parameter
|
||||
dictionary passed to the DBAPI's ``cursor.execute()``
|
||||
as well as ``cursor.setinputsizes()`` methods;
|
||||
* the string column names retrieved from the DBAPI's
|
||||
``cursor.description`` attribute.
|
||||
|
||||
When using Python 3, the DBAPI is required to support
|
||||
*all* of the above values as Python ``unicode`` objects,
|
||||
which in Python 3 are just known as ``str``. In Python 2,
|
||||
the DBAPI does not specify unicode behavior at all,
|
||||
so SQLAlchemy must make decisions for each of the above
|
||||
values on a per-DBAPI basis - implementations are
|
||||
completely inconsistent in their behavior.
|
||||
|
||||
:param execution_options: Dictionary execution options which will
|
||||
be applied to all connections. See
|
||||
:meth:`~sqlalchemy.engine.Connection.execution_options`
|
||||
|
||||
:param implicit_returning=True: When ``True``, a RETURNING-
|
||||
compatible construct, if available, will be used to
|
||||
fetch newly generated primary key values when a single row
|
||||
INSERT statement is emitted with no existing returning()
|
||||
clause. This applies to those backends which support RETURNING
|
||||
or a compatible construct, including Postgresql, Firebird, Oracle,
|
||||
Microsoft SQL Server. Set this to ``False`` to disable
|
||||
the automatic usage of RETURNING.
|
||||
|
||||
:param label_length=None: optional integer value which limits
|
||||
the size of dynamically generated column labels to that many
|
||||
characters. If less than 6, labels are generated as
|
||||
"_(counter)". If ``None``, the value of
|
||||
``dialect.max_identifier_length`` is used instead.
|
||||
|
||||
:param listeners: A list of one or more
|
||||
:class:`~sqlalchemy.interfaces.PoolListener` objects which will
|
||||
receive connection pool events.
|
||||
|
||||
:param logging_name: String identifier which will be used within
|
||||
the "name" field of logging records generated within the
|
||||
"sqlalchemy.engine" logger. Defaults to a hexstring of the
|
||||
object's id.
|
||||
|
||||
:param max_overflow=10: the number of connections to allow in
|
||||
connection pool "overflow", that is connections that can be
|
||||
opened above and beyond the pool_size setting, which defaults
|
||||
to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
|
||||
|
||||
:param module=None: reference to a Python module object (the module
|
||||
itself, not its string name). Specifies an alternate DBAPI module to
|
||||
be used by the engine's dialect. Each sub-dialect references a
|
||||
specific DBAPI which will be imported before first connect. This
|
||||
parameter causes the import to be bypassed, and the given module to
|
||||
be used instead. Can be used for testing of DBAPIs as well as to
|
||||
inject "mock" DBAPI implementations into the :class:`.Engine`.
|
||||
|
||||
:param pool=None: an already-constructed instance of
|
||||
:class:`~sqlalchemy.pool.Pool`, such as a
|
||||
:class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
|
||||
pool will be used directly as the underlying connection pool
|
||||
for the engine, bypassing whatever connection parameters are
|
||||
present in the URL argument. For information on constructing
|
||||
connection pools manually, see :ref:`pooling_toplevel`.
|
||||
|
||||
:param poolclass=None: a :class:`~sqlalchemy.pool.Pool`
|
||||
subclass, which will be used to create a connection pool
|
||||
instance using the connection parameters given in the URL. Note
|
||||
this differs from ``pool`` in that you don't actually
|
||||
instantiate the pool in this case, you just indicate what type
|
||||
of pool to be used.
|
||||
|
||||
:param pool_logging_name: String identifier which will be used within
|
||||
the "name" field of logging records generated within the
|
||||
"sqlalchemy.pool" logger. Defaults to a hexstring of the object's
|
||||
id.
|
||||
|
||||
:param pool_size=5: the number of connections to keep open
|
||||
inside the connection pool. This used with
|
||||
:class:`~sqlalchemy.pool.QueuePool` as
|
||||
well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With
|
||||
:class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting
|
||||
of 0 indicates no limit; to disable pooling, set ``poolclass`` to
|
||||
:class:`~sqlalchemy.pool.NullPool` instead.
|
||||
|
||||
:param pool_recycle=-1: this setting causes the pool to recycle
|
||||
connections after the given number of seconds has passed. It
|
||||
defaults to -1, or no timeout. For example, setting to 3600
|
||||
means connections will be recycled after one hour. Note that
|
||||
MySQL in particular will disconnect automatically if no
|
||||
activity is detected on a connection for eight hours (although
|
||||
this is configurable with the MySQLDB connection itself and the
|
||||
server configuration as well).
|
||||
|
||||
:param pool_reset_on_return='rollback': set the "reset on return"
|
||||
behavior of the pool, which is whether ``rollback()``,
|
||||
``commit()``, or nothing is called upon connections
|
||||
being returned to the pool. See the docstring for
|
||||
``reset_on_return`` at :class:`.Pool`.
|
||||
|
||||
.. versionadded:: 0.7.6
|
||||
|
||||
:param pool_timeout=30: number of seconds to wait before giving
|
||||
up on getting a connection from the pool. This is only used
|
||||
with :class:`~sqlalchemy.pool.QueuePool`.
|
||||
|
||||
:param strategy='plain': selects alternate engine implementations.
|
||||
Currently available are:
|
||||
|
||||
* the ``threadlocal`` strategy, which is described in
|
||||
:ref:`threadlocal_strategy`;
|
||||
* the ``mock`` strategy, which dispatches all statement
|
||||
execution to a function passed as the argument ``executor``.
|
||||
See `example in the FAQ
|
||||
<http://www.sqlalchemy.org/trac/wiki/FAQ#HowcanIgettheCREATETABLEDROPTABLEoutputasastring>`_.
|
||||
|
||||
:param executor=None: a function taking arguments
|
||||
``(sql, *multiparams, **params)``, to which the ``mock`` strategy will
|
||||
dispatch all statement execution. Used only by ``strategy='mock'``.
|
||||
|
||||
"""
|
||||
|
||||
strategy = kwargs.pop('strategy', default_strategy)
|
||||
strategy = strategies.strategies[strategy]
|
||||
return strategy.create(*args, **kwargs)
|
||||
|
||||
|
||||
def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
|
||||
"""Create a new Engine instance using a configuration dictionary.
|
||||
|
||||
The dictionary is typically produced from a config file where keys
|
||||
are prefixed, such as sqlalchemy.url, sqlalchemy.echo, etc. The
|
||||
'prefix' argument indicates the prefix to be searched for.
|
||||
|
||||
A select set of keyword arguments will be "coerced" to their
|
||||
expected type based on string values. In a future release, this
|
||||
functionality will be expanded and include dialect-specific
|
||||
arguments.
|
||||
"""
|
||||
|
||||
opts = util._coerce_config(configuration, prefix)
|
||||
opts.update(kwargs)
|
||||
url = opts.pop('url')
|
||||
return create_engine(url, **opts)
|
||||
|
||||
|
||||
__all__ = (
|
||||
'create_engine',
|
||||
'engine_from_config',
|
||||
)
|
||||
1729
lib/sqlalchemy/engine/base.py
Normal file
1729
lib/sqlalchemy/engine/base.py
Normal file
File diff suppressed because it is too large
Load Diff
193
lib/sqlalchemy/engine/ddl.py
Normal file
193
lib/sqlalchemy/engine/ddl.py
Normal file
@@ -0,0 +1,193 @@
|
||||
# engine/ddl.py
|
||||
# Copyright (C) 2009-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Routines to handle CREATE/DROP workflow."""
|
||||
|
||||
from .. import schema
|
||||
from ..sql import util as sql_util
|
||||
|
||||
|
||||
class DDLBase(schema.SchemaVisitor):
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
|
||||
|
||||
class SchemaGenerator(DDLBase):
|
||||
|
||||
def __init__(self, dialect, connection, checkfirst=False,
|
||||
tables=None, **kwargs):
|
||||
super(SchemaGenerator, self).__init__(connection, **kwargs)
|
||||
self.checkfirst = checkfirst
|
||||
self.tables = tables
|
||||
self.preparer = dialect.identifier_preparer
|
||||
self.dialect = dialect
|
||||
self.memo = {}
|
||||
|
||||
def _can_create_table(self, table):
|
||||
self.dialect.validate_identifier(table.name)
|
||||
if table.schema:
|
||||
self.dialect.validate_identifier(table.schema)
|
||||
return not self.checkfirst or \
|
||||
not self.dialect.has_table(self.connection,
|
||||
table.name, schema=table.schema)
|
||||
|
||||
def _can_create_sequence(self, sequence):
|
||||
return self.dialect.supports_sequences and \
|
||||
(
|
||||
(not self.dialect.sequences_optional or
|
||||
not sequence.optional) and
|
||||
(
|
||||
not self.checkfirst or
|
||||
not self.dialect.has_sequence(
|
||||
self.connection,
|
||||
sequence.name,
|
||||
schema=sequence.schema)
|
||||
)
|
||||
)
|
||||
|
||||
def visit_metadata(self, metadata):
|
||||
if self.tables is not None:
|
||||
tables = self.tables
|
||||
else:
|
||||
tables = metadata.tables.values()
|
||||
collection = [t for t in sql_util.sort_tables(tables)
|
||||
if self._can_create_table(t)]
|
||||
seq_coll = [s for s in metadata._sequences.values()
|
||||
if s.column is None and self._can_create_sequence(s)]
|
||||
|
||||
metadata.dispatch.before_create(metadata, self.connection,
|
||||
tables=collection,
|
||||
checkfirst=self.checkfirst,
|
||||
_ddl_runner=self)
|
||||
|
||||
for seq in seq_coll:
|
||||
self.traverse_single(seq, create_ok=True)
|
||||
|
||||
for table in collection:
|
||||
self.traverse_single(table, create_ok=True)
|
||||
|
||||
metadata.dispatch.after_create(metadata, self.connection,
|
||||
tables=collection,
|
||||
checkfirst=self.checkfirst,
|
||||
_ddl_runner=self)
|
||||
|
||||
def visit_table(self, table, create_ok=False):
|
||||
if not create_ok and not self._can_create_table(table):
|
||||
return
|
||||
|
||||
table.dispatch.before_create(table, self.connection,
|
||||
checkfirst=self.checkfirst,
|
||||
_ddl_runner=self)
|
||||
|
||||
for column in table.columns:
|
||||
if column.default is not None:
|
||||
self.traverse_single(column.default)
|
||||
|
||||
self.connection.execute(schema.CreateTable(table))
|
||||
|
||||
if hasattr(table, 'indexes'):
|
||||
for index in table.indexes:
|
||||
self.traverse_single(index)
|
||||
|
||||
table.dispatch.after_create(table, self.connection,
|
||||
checkfirst=self.checkfirst,
|
||||
_ddl_runner=self)
|
||||
|
||||
def visit_sequence(self, sequence, create_ok=False):
|
||||
if not create_ok and not self._can_create_sequence(sequence):
|
||||
return
|
||||
self.connection.execute(schema.CreateSequence(sequence))
|
||||
|
||||
def visit_index(self, index):
|
||||
self.connection.execute(schema.CreateIndex(index))
|
||||
|
||||
|
||||
class SchemaDropper(DDLBase):
|
||||
|
||||
def __init__(self, dialect, connection, checkfirst=False,
|
||||
tables=None, **kwargs):
|
||||
super(SchemaDropper, self).__init__(connection, **kwargs)
|
||||
self.checkfirst = checkfirst
|
||||
self.tables = tables
|
||||
self.preparer = dialect.identifier_preparer
|
||||
self.dialect = dialect
|
||||
self.memo = {}
|
||||
|
||||
def visit_metadata(self, metadata):
|
||||
if self.tables is not None:
|
||||
tables = self.tables
|
||||
else:
|
||||
tables = metadata.tables.values()
|
||||
|
||||
collection = [
|
||||
t
|
||||
for t in reversed(sql_util.sort_tables(tables))
|
||||
if self._can_drop_table(t)
|
||||
]
|
||||
|
||||
seq_coll = [
|
||||
s
|
||||
for s in metadata._sequences.values()
|
||||
if s.column is None and self._can_drop_sequence(s)
|
||||
]
|
||||
|
||||
metadata.dispatch.before_drop(
|
||||
metadata, self.connection, tables=collection,
|
||||
checkfirst=self.checkfirst, _ddl_runner=self)
|
||||
|
||||
for table in collection:
|
||||
self.traverse_single(table, drop_ok=True)
|
||||
|
||||
for seq in seq_coll:
|
||||
self.traverse_single(seq, drop_ok=True)
|
||||
|
||||
metadata.dispatch.after_drop(
|
||||
metadata, self.connection, tables=collection,
|
||||
checkfirst=self.checkfirst, _ddl_runner=self)
|
||||
|
||||
def _can_drop_table(self, table):
|
||||
self.dialect.validate_identifier(table.name)
|
||||
if table.schema:
|
||||
self.dialect.validate_identifier(table.schema)
|
||||
return not self.checkfirst or self.dialect.has_table(self.connection,
|
||||
table.name, schema=table.schema)
|
||||
|
||||
def _can_drop_sequence(self, sequence):
|
||||
return self.dialect.supports_sequences and \
|
||||
((not self.dialect.sequences_optional or
|
||||
not sequence.optional) and
|
||||
(not self.checkfirst or
|
||||
self.dialect.has_sequence(
|
||||
self.connection,
|
||||
sequence.name,
|
||||
schema=sequence.schema))
|
||||
)
|
||||
|
||||
def visit_index(self, index):
|
||||
self.connection.execute(schema.DropIndex(index))
|
||||
|
||||
def visit_table(self, table, drop_ok=False):
|
||||
if not drop_ok and not self._can_drop_table(table):
|
||||
return
|
||||
|
||||
table.dispatch.before_drop(table, self.connection,
|
||||
checkfirst=self.checkfirst,
|
||||
_ddl_runner=self)
|
||||
|
||||
for column in table.columns:
|
||||
if column.default is not None:
|
||||
self.traverse_single(column.default)
|
||||
|
||||
self.connection.execute(schema.DropTable(table))
|
||||
|
||||
table.dispatch.after_drop(table, self.connection,
|
||||
checkfirst=self.checkfirst,
|
||||
_ddl_runner=self)
|
||||
|
||||
def visit_sequence(self, sequence, drop_ok=False):
|
||||
if not drop_ok and not self._can_drop_sequence(sequence):
|
||||
return
|
||||
self.connection.execute(schema.DropSequence(sequence))
|
||||
837
lib/sqlalchemy/engine/default.py
Normal file
837
lib/sqlalchemy/engine/default.py
Normal file
@@ -0,0 +1,837 @@
|
||||
# engine/default.py
|
||||
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Default implementations of per-dialect sqlalchemy.engine classes.
|
||||
|
||||
These are semi-private implementation classes which are only of importance
|
||||
to database dialect authors; dialects will usually use the classes here
|
||||
as the base class for their own corresponding classes.
|
||||
|
||||
"""
|
||||
|
||||
import re
|
||||
import random
|
||||
from . import reflection, interfaces, result
|
||||
from ..sql import compiler, expression
|
||||
from .. import exc, types as sqltypes, util, pool, processors
|
||||
import codecs
|
||||
import weakref
|
||||
|
||||
AUTOCOMMIT_REGEXP = re.compile(
|
||||
r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)',
|
||||
re.I | re.UNICODE)
|
||||
|
||||
|
||||
class DefaultDialect(interfaces.Dialect):
|
||||
"""Default implementation of Dialect"""
|
||||
|
||||
statement_compiler = compiler.SQLCompiler
|
||||
ddl_compiler = compiler.DDLCompiler
|
||||
type_compiler = compiler.GenericTypeCompiler
|
||||
preparer = compiler.IdentifierPreparer
|
||||
supports_alter = True
|
||||
|
||||
# the first value we'd get for an autoincrement
|
||||
# column.
|
||||
default_sequence_base = 1
|
||||
|
||||
# most DBAPIs happy with this for execute().
|
||||
# not cx_oracle.
|
||||
execute_sequence_format = tuple
|
||||
|
||||
supports_views = True
|
||||
supports_sequences = False
|
||||
sequences_optional = False
|
||||
preexecute_autoincrement_sequences = False
|
||||
postfetch_lastrowid = True
|
||||
implicit_returning = False
|
||||
|
||||
supports_native_enum = False
|
||||
supports_native_boolean = False
|
||||
|
||||
# if the NUMERIC type
|
||||
# returns decimal.Decimal.
|
||||
# *not* the FLOAT type however.
|
||||
supports_native_decimal = False
|
||||
|
||||
# Py3K
|
||||
#supports_unicode_statements = True
|
||||
#supports_unicode_binds = True
|
||||
#returns_unicode_strings = True
|
||||
#description_encoding = None
|
||||
# Py2K
|
||||
supports_unicode_statements = False
|
||||
supports_unicode_binds = False
|
||||
returns_unicode_strings = False
|
||||
description_encoding = 'use_encoding'
|
||||
# end Py2K
|
||||
|
||||
name = 'default'
|
||||
|
||||
# length at which to truncate
|
||||
# any identifier.
|
||||
max_identifier_length = 9999
|
||||
|
||||
# length at which to truncate
|
||||
# the name of an index.
|
||||
# Usually None to indicate
|
||||
# 'use max_identifier_length'.
|
||||
# thanks to MySQL, sigh
|
||||
max_index_name_length = None
|
||||
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
dbapi_type_map = {}
|
||||
colspecs = {}
|
||||
default_paramstyle = 'named'
|
||||
supports_default_values = False
|
||||
supports_empty_insert = True
|
||||
supports_multivalues_insert = False
|
||||
|
||||
server_version_info = None
|
||||
|
||||
# indicates symbol names are
|
||||
# UPPERCASEd if they are case insensitive
|
||||
# within the database.
|
||||
# if this is True, the methods normalize_name()
|
||||
# and denormalize_name() must be provided.
|
||||
requires_name_normalize = False
|
||||
|
||||
reflection_options = ()
|
||||
|
||||
def __init__(self, convert_unicode=False,
|
||||
encoding='utf-8', paramstyle=None, dbapi=None,
|
||||
implicit_returning=None,
|
||||
case_sensitive=True,
|
||||
label_length=None, **kwargs):
|
||||
|
||||
if not getattr(self, 'ported_sqla_06', True):
|
||||
util.warn(
|
||||
"The %s dialect is not yet ported to the 0.6 format" %
|
||||
self.name)
|
||||
|
||||
self.convert_unicode = convert_unicode
|
||||
self.encoding = encoding
|
||||
self.positional = False
|
||||
self._ischema = None
|
||||
self.dbapi = dbapi
|
||||
if paramstyle is not None:
|
||||
self.paramstyle = paramstyle
|
||||
elif self.dbapi is not None:
|
||||
self.paramstyle = self.dbapi.paramstyle
|
||||
else:
|
||||
self.paramstyle = self.default_paramstyle
|
||||
if implicit_returning is not None:
|
||||
self.implicit_returning = implicit_returning
|
||||
self.positional = self.paramstyle in ('qmark', 'format', 'numeric')
|
||||
self.identifier_preparer = self.preparer(self)
|
||||
self.type_compiler = self.type_compiler(self)
|
||||
|
||||
self.case_sensitive = case_sensitive
|
||||
|
||||
if label_length and label_length > self.max_identifier_length:
|
||||
raise exc.ArgumentError(
|
||||
"Label length of %d is greater than this dialect's"
|
||||
" maximum identifier length of %d" %
|
||||
(label_length, self.max_identifier_length))
|
||||
self.label_length = label_length
|
||||
|
||||
if self.description_encoding == 'use_encoding':
|
||||
self._description_decoder = \
|
||||
processors.to_unicode_processor_factory(
|
||||
encoding
|
||||
)
|
||||
elif self.description_encoding is not None:
|
||||
self._description_decoder = \
|
||||
processors.to_unicode_processor_factory(
|
||||
self.description_encoding
|
||||
)
|
||||
self._encoder = codecs.getencoder(self.encoding)
|
||||
self._decoder = processors.to_unicode_processor_factory(self.encoding)
|
||||
|
||||
@util.memoized_property
|
||||
def _type_memos(self):
|
||||
return weakref.WeakKeyDictionary()
|
||||
|
||||
@property
|
||||
def dialect_description(self):
|
||||
return self.name + "+" + self.driver
|
||||
|
||||
@classmethod
|
||||
def get_pool_class(cls, url):
|
||||
return getattr(cls, 'poolclass', pool.QueuePool)
|
||||
|
||||
def initialize(self, connection):
|
||||
try:
|
||||
self.server_version_info = \
|
||||
self._get_server_version_info(connection)
|
||||
except NotImplementedError:
|
||||
self.server_version_info = None
|
||||
try:
|
||||
self.default_schema_name = \
|
||||
self._get_default_schema_name(connection)
|
||||
except NotImplementedError:
|
||||
self.default_schema_name = None
|
||||
|
||||
try:
|
||||
self.default_isolation_level = \
|
||||
self.get_isolation_level(connection.connection)
|
||||
except NotImplementedError:
|
||||
self.default_isolation_level = None
|
||||
|
||||
self.returns_unicode_strings = self._check_unicode_returns(connection)
|
||||
|
||||
self.do_rollback(connection.connection)
|
||||
|
||||
def on_connect(self):
|
||||
"""return a callable which sets up a newly created DBAPI connection.
|
||||
|
||||
This is used to set dialect-wide per-connection options such as
|
||||
isolation modes, unicode modes, etc.
|
||||
|
||||
If a callable is returned, it will be assembled into a pool listener
|
||||
that receives the direct DBAPI connection, with all wrappers removed.
|
||||
|
||||
If None is returned, no listener will be generated.
|
||||
|
||||
"""
|
||||
return None
|
||||
|
||||
def _check_unicode_returns(self, connection):
|
||||
# Py2K
|
||||
if self.supports_unicode_statements:
|
||||
cast_to = unicode
|
||||
else:
|
||||
cast_to = str
|
||||
# end Py2K
|
||||
# Py3K
|
||||
#cast_to = str
|
||||
|
||||
def check_unicode(formatstr, type_):
|
||||
cursor = connection.connection.cursor()
|
||||
try:
|
||||
try:
|
||||
cursor.execute(
|
||||
cast_to(
|
||||
expression.select(
|
||||
[expression.cast(
|
||||
expression.literal_column(
|
||||
"'test %s returns'" % formatstr),
|
||||
type_)
|
||||
]).compile(dialect=self)
|
||||
)
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
|
||||
return isinstance(row[0], unicode)
|
||||
except self.dbapi.Error, de:
|
||||
util.warn("Exception attempting to "
|
||||
"detect unicode returns: %r" % de)
|
||||
return False
|
||||
finally:
|
||||
cursor.close()
|
||||
|
||||
# detect plain VARCHAR
|
||||
unicode_for_varchar = check_unicode("plain", sqltypes.VARCHAR(60))
|
||||
|
||||
# detect if there's an NVARCHAR type with different behavior available
|
||||
unicode_for_unicode = check_unicode("unicode", sqltypes.Unicode(60))
|
||||
|
||||
if unicode_for_unicode and not unicode_for_varchar:
|
||||
return "conditional"
|
||||
else:
|
||||
return unicode_for_varchar
|
||||
|
||||
def type_descriptor(self, typeobj):
|
||||
"""Provide a database-specific :class:`.TypeEngine` object, given
|
||||
the generic object which comes from the types module.
|
||||
|
||||
This method looks for a dictionary called
|
||||
``colspecs`` as a class or instance-level variable,
|
||||
and passes on to :func:`.types.adapt_type`.
|
||||
|
||||
"""
|
||||
return sqltypes.adapt_type(typeobj, self.colspecs)
|
||||
|
||||
def reflecttable(self, connection, table, include_columns,
|
||||
exclude_columns=None):
|
||||
insp = reflection.Inspector.from_engine(connection)
|
||||
return insp.reflecttable(table, include_columns, exclude_columns)
|
||||
|
||||
def get_pk_constraint(self, conn, table_name, schema=None, **kw):
|
||||
"""Compatibility method, adapts the result of get_primary_keys()
|
||||
for those dialects which don't implement get_pk_constraint().
|
||||
|
||||
"""
|
||||
return {
|
||||
'constrained_columns':
|
||||
self.get_primary_keys(conn, table_name,
|
||||
schema=schema, **kw)
|
||||
}
|
||||
|
||||
def validate_identifier(self, ident):
|
||||
if len(ident) > self.max_identifier_length:
|
||||
raise exc.IdentifierError(
|
||||
"Identifier '%s' exceeds maximum length of %d characters" %
|
||||
(ident, self.max_identifier_length)
|
||||
)
|
||||
|
||||
def connect(self, *cargs, **cparams):
|
||||
return self.dbapi.connect(*cargs, **cparams)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args()
|
||||
opts.update(url.query)
|
||||
return [[], opts]
|
||||
|
||||
def do_begin(self, dbapi_connection):
|
||||
pass
|
||||
|
||||
def do_rollback(self, dbapi_connection):
|
||||
dbapi_connection.rollback()
|
||||
|
||||
def do_commit(self, dbapi_connection):
|
||||
dbapi_connection.commit()
|
||||
|
||||
def do_close(self, dbapi_connection):
|
||||
dbapi_connection.close()
|
||||
|
||||
def create_xid(self):
|
||||
"""Create a random two-phase transaction ID.
|
||||
|
||||
This id will be passed to do_begin_twophase(), do_rollback_twophase(),
|
||||
do_commit_twophase(). Its format is unspecified.
|
||||
"""
|
||||
|
||||
return "_sa_%032x" % random.randint(0, 2 ** 128)
|
||||
|
||||
def do_savepoint(self, connection, name):
|
||||
connection.execute(expression.SavepointClause(name))
|
||||
|
||||
def do_rollback_to_savepoint(self, connection, name):
|
||||
connection.execute(expression.RollbackToSavepointClause(name))
|
||||
|
||||
def do_release_savepoint(self, connection, name):
|
||||
connection.execute(expression.ReleaseSavepointClause(name))
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
cursor.executemany(statement, parameters)
|
||||
|
||||
def do_execute(self, cursor, statement, parameters, context=None):
|
||||
cursor.execute(statement, parameters)
|
||||
|
||||
def do_execute_no_params(self, cursor, statement, context=None):
|
||||
cursor.execute(statement)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
return False
|
||||
|
||||
def reset_isolation_level(self, dbapi_conn):
|
||||
# default_isolation_level is read from the first connection
|
||||
# after the initial set of 'isolation_level', if any, so is
|
||||
# the configured default of this dialect.
|
||||
self.set_isolation_level(dbapi_conn, self.default_isolation_level)
|
||||
|
||||
|
||||
class DefaultExecutionContext(interfaces.ExecutionContext):
|
||||
isinsert = False
|
||||
isupdate = False
|
||||
isdelete = False
|
||||
isddl = False
|
||||
executemany = False
|
||||
result_map = None
|
||||
compiled = None
|
||||
statement = None
|
||||
postfetch_cols = None
|
||||
prefetch_cols = None
|
||||
_is_implicit_returning = False
|
||||
_is_explicit_returning = False
|
||||
|
||||
# a hook for SQLite's translation of
|
||||
# result column names
|
||||
_translate_colname = None
|
||||
|
||||
@classmethod
|
||||
def _init_ddl(cls, dialect, connection, dbapi_connection, compiled_ddl):
|
||||
"""Initialize execution context for a DDLElement construct."""
|
||||
|
||||
self = cls.__new__(cls)
|
||||
self.dialect = dialect
|
||||
self.root_connection = connection
|
||||
self._dbapi_connection = dbapi_connection
|
||||
self.engine = connection.engine
|
||||
|
||||
self.compiled = compiled = compiled_ddl
|
||||
self.isddl = True
|
||||
|
||||
self.execution_options = compiled.statement._execution_options
|
||||
if connection._execution_options:
|
||||
self.execution_options = dict(self.execution_options)
|
||||
self.execution_options.update(connection._execution_options)
|
||||
|
||||
if not dialect.supports_unicode_statements:
|
||||
self.unicode_statement = unicode(compiled)
|
||||
self.statement = dialect._encoder(self.unicode_statement)[0]
|
||||
else:
|
||||
self.statement = self.unicode_statement = unicode(compiled)
|
||||
|
||||
self.cursor = self.create_cursor()
|
||||
self.compiled_parameters = []
|
||||
|
||||
if dialect.positional:
|
||||
self.parameters = [dialect.execute_sequence_format()]
|
||||
else:
|
||||
self.parameters = [{}]
|
||||
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def _init_compiled(cls, dialect, connection, dbapi_connection,
|
||||
compiled, parameters):
|
||||
"""Initialize execution context for a Compiled construct."""
|
||||
|
||||
self = cls.__new__(cls)
|
||||
self.dialect = dialect
|
||||
self.root_connection = connection
|
||||
self._dbapi_connection = dbapi_connection
|
||||
self.engine = connection.engine
|
||||
|
||||
self.compiled = compiled
|
||||
|
||||
if not compiled.can_execute:
|
||||
raise exc.ArgumentError("Not an executable clause")
|
||||
|
||||
self.execution_options = compiled.statement._execution_options
|
||||
if connection._execution_options:
|
||||
self.execution_options = dict(self.execution_options)
|
||||
self.execution_options.update(connection._execution_options)
|
||||
|
||||
# compiled clauseelement. process bind params, process table defaults,
|
||||
# track collections used by ResultProxy to target and process results
|
||||
|
||||
self.result_map = compiled.result_map
|
||||
|
||||
self.unicode_statement = unicode(compiled)
|
||||
if not dialect.supports_unicode_statements:
|
||||
self.statement = self.unicode_statement.encode(
|
||||
self.dialect.encoding)
|
||||
else:
|
||||
self.statement = self.unicode_statement
|
||||
|
||||
self.isinsert = compiled.isinsert
|
||||
self.isupdate = compiled.isupdate
|
||||
self.isdelete = compiled.isdelete
|
||||
|
||||
if self.isinsert or self.isupdate or self.isdelete:
|
||||
self._is_explicit_returning = bool(compiled.statement._returning)
|
||||
self._is_implicit_returning = bool(compiled.returning and \
|
||||
not compiled.statement._returning)
|
||||
|
||||
if not parameters:
|
||||
self.compiled_parameters = [compiled.construct_params()]
|
||||
else:
|
||||
self.compiled_parameters = \
|
||||
[compiled.construct_params(m, _group_number=grp) for
|
||||
grp, m in enumerate(parameters)]
|
||||
|
||||
self.executemany = len(parameters) > 1
|
||||
|
||||
self.cursor = self.create_cursor()
|
||||
if self.isinsert or self.isupdate:
|
||||
self.postfetch_cols = self.compiled.postfetch
|
||||
self.prefetch_cols = self.compiled.prefetch
|
||||
self.__process_defaults()
|
||||
|
||||
processors = compiled._bind_processors
|
||||
|
||||
# Convert the dictionary of bind parameter values
|
||||
# into a dict or list to be sent to the DBAPI's
|
||||
# execute() or executemany() method.
|
||||
parameters = []
|
||||
if dialect.positional:
|
||||
for compiled_params in self.compiled_parameters:
|
||||
param = []
|
||||
for key in self.compiled.positiontup:
|
||||
if key in processors:
|
||||
param.append(processors[key](compiled_params[key]))
|
||||
else:
|
||||
param.append(compiled_params[key])
|
||||
parameters.append(dialect.execute_sequence_format(param))
|
||||
else:
|
||||
encode = not dialect.supports_unicode_statements
|
||||
for compiled_params in self.compiled_parameters:
|
||||
param = {}
|
||||
if encode:
|
||||
for key in compiled_params:
|
||||
if key in processors:
|
||||
param[dialect._encoder(key)[0]] = \
|
||||
processors[key](compiled_params[key])
|
||||
else:
|
||||
param[dialect._encoder(key)[0]] = \
|
||||
compiled_params[key]
|
||||
else:
|
||||
for key in compiled_params:
|
||||
if key in processors:
|
||||
param[key] = processors[key](compiled_params[key])
|
||||
else:
|
||||
param[key] = compiled_params[key]
|
||||
parameters.append(param)
|
||||
self.parameters = dialect.execute_sequence_format(parameters)
|
||||
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def _init_statement(cls, dialect, connection, dbapi_connection,
|
||||
statement, parameters):
|
||||
"""Initialize execution context for a string SQL statement."""
|
||||
|
||||
self = cls.__new__(cls)
|
||||
self.dialect = dialect
|
||||
self.root_connection = connection
|
||||
self._dbapi_connection = dbapi_connection
|
||||
self.engine = connection.engine
|
||||
|
||||
# plain text statement
|
||||
self.execution_options = connection._execution_options
|
||||
|
||||
if not parameters:
|
||||
if self.dialect.positional:
|
||||
self.parameters = [dialect.execute_sequence_format()]
|
||||
else:
|
||||
self.parameters = [{}]
|
||||
elif isinstance(parameters[0], dialect.execute_sequence_format):
|
||||
self.parameters = parameters
|
||||
elif isinstance(parameters[0], dict):
|
||||
if dialect.supports_unicode_statements:
|
||||
self.parameters = parameters
|
||||
else:
|
||||
self.parameters = [
|
||||
dict((dialect._encoder(k)[0], d[k]) for k in d)
|
||||
for d in parameters
|
||||
] or [{}]
|
||||
else:
|
||||
self.parameters = [dialect.execute_sequence_format(p)
|
||||
for p in parameters]
|
||||
|
||||
self.executemany = len(parameters) > 1
|
||||
|
||||
if not dialect.supports_unicode_statements and \
|
||||
isinstance(statement, unicode):
|
||||
self.unicode_statement = statement
|
||||
self.statement = dialect._encoder(statement)[0]
|
||||
else:
|
||||
self.statement = self.unicode_statement = statement
|
||||
|
||||
self.cursor = self.create_cursor()
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def _init_default(cls, dialect, connection, dbapi_connection):
|
||||
"""Initialize execution context for a ColumnDefault construct."""
|
||||
|
||||
self = cls.__new__(cls)
|
||||
self.dialect = dialect
|
||||
self.root_connection = connection
|
||||
self._dbapi_connection = dbapi_connection
|
||||
self.engine = connection.engine
|
||||
self.execution_options = connection._execution_options
|
||||
self.cursor = self.create_cursor()
|
||||
return self
|
||||
|
||||
@util.memoized_property
|
||||
def no_parameters(self):
|
||||
return self.execution_options.get("no_parameters", False)
|
||||
|
||||
@util.memoized_property
|
||||
def is_crud(self):
|
||||
return self.isinsert or self.isupdate or self.isdelete
|
||||
|
||||
@util.memoized_property
|
||||
def should_autocommit(self):
|
||||
autocommit = self.execution_options.get('autocommit',
|
||||
not self.compiled and
|
||||
self.statement and
|
||||
expression.PARSE_AUTOCOMMIT
|
||||
or False)
|
||||
|
||||
if autocommit is expression.PARSE_AUTOCOMMIT:
|
||||
return self.should_autocommit_text(self.unicode_statement)
|
||||
else:
|
||||
return autocommit
|
||||
|
||||
def _execute_scalar(self, stmt, type_):
|
||||
"""Execute a string statement on the current cursor, returning a
|
||||
scalar result.
|
||||
|
||||
Used to fire off sequences, default phrases, and "select lastrowid"
|
||||
types of statements individually or in the context of a parent INSERT
|
||||
or UPDATE statement.
|
||||
|
||||
"""
|
||||
|
||||
conn = self.root_connection
|
||||
if isinstance(stmt, unicode) and \
|
||||
not self.dialect.supports_unicode_statements:
|
||||
stmt = self.dialect._encoder(stmt)[0]
|
||||
|
||||
if self.dialect.positional:
|
||||
default_params = self.dialect.execute_sequence_format()
|
||||
else:
|
||||
default_params = {}
|
||||
|
||||
conn._cursor_execute(self.cursor, stmt, default_params, context=self)
|
||||
r = self.cursor.fetchone()[0]
|
||||
if type_ is not None:
|
||||
# apply type post processors to the result
|
||||
proc = type_._cached_result_processor(
|
||||
self.dialect,
|
||||
self.cursor.description[0][1]
|
||||
)
|
||||
if proc:
|
||||
return proc(r)
|
||||
return r
|
||||
|
||||
@property
|
||||
def connection(self):
|
||||
return self.root_connection._branch()
|
||||
|
||||
def should_autocommit_text(self, statement):
|
||||
return AUTOCOMMIT_REGEXP.match(statement)
|
||||
|
||||
def create_cursor(self):
|
||||
return self._dbapi_connection.cursor()
|
||||
|
||||
def pre_exec(self):
|
||||
pass
|
||||
|
||||
def post_exec(self):
|
||||
pass
|
||||
|
||||
def get_result_processor(self, type_, colname, coltype):
|
||||
"""Return a 'result processor' for a given type as present in
|
||||
cursor.description.
|
||||
|
||||
This has a default implementation that dialects can override
|
||||
for context-sensitive result type handling.
|
||||
|
||||
"""
|
||||
return type_._cached_result_processor(self.dialect, coltype)
|
||||
|
||||
def get_lastrowid(self):
|
||||
"""return self.cursor.lastrowid, or equivalent, after an INSERT.
|
||||
|
||||
This may involve calling special cursor functions,
|
||||
issuing a new SELECT on the cursor (or a new one),
|
||||
or returning a stored value that was
|
||||
calculated within post_exec().
|
||||
|
||||
This function will only be called for dialects
|
||||
which support "implicit" primary key generation,
|
||||
keep preexecute_autoincrement_sequences set to False,
|
||||
and when no explicit id value was bound to the
|
||||
statement.
|
||||
|
||||
The function is called once, directly after
|
||||
post_exec() and before the transaction is committed
|
||||
or ResultProxy is generated. If the post_exec()
|
||||
method assigns a value to `self._lastrowid`, the
|
||||
value is used in place of calling get_lastrowid().
|
||||
|
||||
Note that this method is *not* equivalent to the
|
||||
``lastrowid`` method on ``ResultProxy``, which is a
|
||||
direct proxy to the DBAPI ``lastrowid`` accessor
|
||||
in all cases.
|
||||
|
||||
"""
|
||||
return self.cursor.lastrowid
|
||||
|
||||
def handle_dbapi_exception(self, e):
|
||||
pass
|
||||
|
||||
def get_result_proxy(self):
|
||||
return result.ResultProxy(self)
|
||||
|
||||
@property
|
||||
def rowcount(self):
|
||||
return self.cursor.rowcount
|
||||
|
||||
def supports_sane_rowcount(self):
|
||||
return self.dialect.supports_sane_rowcount
|
||||
|
||||
def supports_sane_multi_rowcount(self):
|
||||
return self.dialect.supports_sane_multi_rowcount
|
||||
|
||||
def post_insert(self):
|
||||
if not self._is_implicit_returning and \
|
||||
not self._is_explicit_returning and \
|
||||
not self.compiled.inline and \
|
||||
self.dialect.postfetch_lastrowid and \
|
||||
(not self.inserted_primary_key or \
|
||||
None in self.inserted_primary_key):
|
||||
|
||||
table = self.compiled.statement.table
|
||||
lastrowid = self.get_lastrowid()
|
||||
autoinc_col = table._autoincrement_column
|
||||
if autoinc_col is not None:
|
||||
# apply type post processors to the lastrowid
|
||||
proc = autoinc_col.type._cached_result_processor(
|
||||
self.dialect, None)
|
||||
if proc is not None:
|
||||
lastrowid = proc(lastrowid)
|
||||
|
||||
self.inserted_primary_key = [
|
||||
lastrowid if c is autoinc_col else v
|
||||
for c, v in zip(
|
||||
table.primary_key,
|
||||
self.inserted_primary_key)
|
||||
]
|
||||
|
||||
def _fetch_implicit_returning(self, resultproxy):
|
||||
table = self.compiled.statement.table
|
||||
row = resultproxy.fetchone()
|
||||
|
||||
ipk = []
|
||||
for c, v in zip(table.primary_key, self.inserted_primary_key):
|
||||
if v is not None:
|
||||
ipk.append(v)
|
||||
else:
|
||||
ipk.append(row[c])
|
||||
|
||||
self.inserted_primary_key = ipk
|
||||
|
||||
def lastrow_has_defaults(self):
|
||||
return (self.isinsert or self.isupdate) and \
|
||||
bool(self.postfetch_cols)
|
||||
|
||||
def set_input_sizes(self, translate=None, exclude_types=None):
|
||||
"""Given a cursor and ClauseParameters, call the appropriate
|
||||
style of ``setinputsizes()`` on the cursor, using DB-API types
|
||||
from the bind parameter's ``TypeEngine`` objects.
|
||||
|
||||
This method only called by those dialects which require it,
|
||||
currently cx_oracle.
|
||||
|
||||
"""
|
||||
|
||||
if not hasattr(self.compiled, 'bind_names'):
|
||||
return
|
||||
|
||||
types = dict(
|
||||
(self.compiled.bind_names[bindparam], bindparam.type)
|
||||
for bindparam in self.compiled.bind_names)
|
||||
|
||||
if self.dialect.positional:
|
||||
inputsizes = []
|
||||
for key in self.compiled.positiontup:
|
||||
typeengine = types[key]
|
||||
dbtype = typeengine.dialect_impl(self.dialect).\
|
||||
get_dbapi_type(self.dialect.dbapi)
|
||||
if dbtype is not None and \
|
||||
(not exclude_types or dbtype not in exclude_types):
|
||||
inputsizes.append(dbtype)
|
||||
try:
|
||||
self.cursor.setinputsizes(*inputsizes)
|
||||
except Exception, e:
|
||||
self.root_connection._handle_dbapi_exception(
|
||||
e, None, None, None, self)
|
||||
else:
|
||||
inputsizes = {}
|
||||
for key in self.compiled.bind_names.values():
|
||||
typeengine = types[key]
|
||||
dbtype = typeengine.dialect_impl(self.dialect).\
|
||||
get_dbapi_type(self.dialect.dbapi)
|
||||
if dbtype is not None and \
|
||||
(not exclude_types or dbtype not in exclude_types):
|
||||
if translate:
|
||||
key = translate.get(key, key)
|
||||
if not self.dialect.supports_unicode_binds:
|
||||
key = self.dialect._encoder(key)[0]
|
||||
inputsizes[key] = dbtype
|
||||
try:
|
||||
self.cursor.setinputsizes(**inputsizes)
|
||||
except Exception, e:
|
||||
self.root_connection._handle_dbapi_exception(
|
||||
e, None, None, None, self)
|
||||
|
||||
def _exec_default(self, default, type_):
|
||||
if default.is_sequence:
|
||||
return self.fire_sequence(default, type_)
|
||||
elif default.is_callable:
|
||||
return default.arg(self)
|
||||
elif default.is_clause_element:
|
||||
# TODO: expensive branching here should be
|
||||
# pulled into _exec_scalar()
|
||||
conn = self.connection
|
||||
c = expression.select([default.arg]).compile(bind=conn)
|
||||
return conn._execute_compiled(c, (), {}).scalar()
|
||||
else:
|
||||
return default.arg
|
||||
|
||||
def get_insert_default(self, column):
|
||||
if column.default is None:
|
||||
return None
|
||||
else:
|
||||
return self._exec_default(column.default, column.type)
|
||||
|
||||
def get_update_default(self, column):
|
||||
if column.onupdate is None:
|
||||
return None
|
||||
else:
|
||||
return self._exec_default(column.onupdate, column.type)
|
||||
|
||||
def __process_defaults(self):
|
||||
"""Generate default values for compiled insert/update statements,
|
||||
and generate inserted_primary_key collection.
|
||||
"""
|
||||
|
||||
if self.executemany:
|
||||
if len(self.compiled.prefetch):
|
||||
scalar_defaults = {}
|
||||
|
||||
# pre-determine scalar Python-side defaults
|
||||
# to avoid many calls of get_insert_default()/
|
||||
# get_update_default()
|
||||
for c in self.prefetch_cols:
|
||||
if self.isinsert and c.default and c.default.is_scalar:
|
||||
scalar_defaults[c] = c.default.arg
|
||||
elif self.isupdate and c.onupdate and c.onupdate.is_scalar:
|
||||
scalar_defaults[c] = c.onupdate.arg
|
||||
|
||||
for param in self.compiled_parameters:
|
||||
self.current_parameters = param
|
||||
for c in self.prefetch_cols:
|
||||
if c in scalar_defaults:
|
||||
val = scalar_defaults[c]
|
||||
elif self.isinsert:
|
||||
val = self.get_insert_default(c)
|
||||
else:
|
||||
val = self.get_update_default(c)
|
||||
if val is not None:
|
||||
param[c.key] = val
|
||||
del self.current_parameters
|
||||
else:
|
||||
self.current_parameters = compiled_parameters = \
|
||||
self.compiled_parameters[0]
|
||||
|
||||
for c in self.compiled.prefetch:
|
||||
if self.isinsert:
|
||||
val = self.get_insert_default(c)
|
||||
else:
|
||||
val = self.get_update_default(c)
|
||||
|
||||
if val is not None:
|
||||
compiled_parameters[c.key] = val
|
||||
del self.current_parameters
|
||||
|
||||
if self.isinsert:
|
||||
self.inserted_primary_key = [
|
||||
self.compiled_parameters[0].get(c.key, None)
|
||||
for c in self.compiled.\
|
||||
statement.table.primary_key
|
||||
]
|
||||
|
||||
|
||||
DefaultDialect.execution_ctx_cls = DefaultExecutionContext
|
||||
925
lib/sqlalchemy/engine/interfaces.py
Normal file
925
lib/sqlalchemy/engine/interfaces.py
Normal file
@@ -0,0 +1,925 @@
|
||||
# engine/interfaces.py
|
||||
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Define core interfaces used by the engine system."""
|
||||
|
||||
from .. import util, event, events
|
||||
|
||||
|
||||
class Dialect(object):
|
||||
"""Define the behavior of a specific database and DB-API combination.
|
||||
|
||||
Any aspect of metadata definition, SQL query generation,
|
||||
execution, result-set handling, or anything else which varies
|
||||
between databases is defined under the general category of the
|
||||
Dialect. The Dialect acts as a factory for other
|
||||
database-specific object implementations including
|
||||
ExecutionContext, Compiled, DefaultGenerator, and TypeEngine.
|
||||
|
||||
All Dialects implement the following attributes:
|
||||
|
||||
name
|
||||
identifying name for the dialect from a DBAPI-neutral point of view
|
||||
(i.e. 'sqlite')
|
||||
|
||||
driver
|
||||
identifying name for the dialect's DBAPI
|
||||
|
||||
positional
|
||||
True if the paramstyle for this Dialect is positional.
|
||||
|
||||
paramstyle
|
||||
the paramstyle to be used (some DB-APIs support multiple
|
||||
paramstyles).
|
||||
|
||||
convert_unicode
|
||||
True if Unicode conversion should be applied to all ``str``
|
||||
types.
|
||||
|
||||
encoding
|
||||
type of encoding to use for unicode, usually defaults to
|
||||
'utf-8'.
|
||||
|
||||
statement_compiler
|
||||
a :class:`.Compiled` class used to compile SQL statements
|
||||
|
||||
ddl_compiler
|
||||
a :class:`.Compiled` class used to compile DDL statements
|
||||
|
||||
server_version_info
|
||||
a tuple containing a version number for the DB backend in use.
|
||||
This value is only available for supporting dialects, and is
|
||||
typically populated during the initial connection to the database.
|
||||
|
||||
default_schema_name
|
||||
the name of the default schema. This value is only available for
|
||||
supporting dialects, and is typically populated during the
|
||||
initial connection to the database.
|
||||
|
||||
execution_ctx_cls
|
||||
a :class:`.ExecutionContext` class used to handle statement execution
|
||||
|
||||
execute_sequence_format
|
||||
either the 'tuple' or 'list' type, depending on what cursor.execute()
|
||||
accepts for the second argument (they vary).
|
||||
|
||||
preparer
|
||||
a :class:`~sqlalchemy.sql.compiler.IdentifierPreparer` class used to
|
||||
quote identifiers.
|
||||
|
||||
supports_alter
|
||||
``True`` if the database supports ``ALTER TABLE``.
|
||||
|
||||
max_identifier_length
|
||||
The maximum length of identifier names.
|
||||
|
||||
supports_unicode_statements
|
||||
Indicate whether the DB-API can receive SQL statements as Python
|
||||
unicode strings
|
||||
|
||||
supports_unicode_binds
|
||||
Indicate whether the DB-API can receive string bind parameters
|
||||
as Python unicode strings
|
||||
|
||||
supports_sane_rowcount
|
||||
Indicate whether the dialect properly implements rowcount for
|
||||
``UPDATE`` and ``DELETE`` statements.
|
||||
|
||||
supports_sane_multi_rowcount
|
||||
Indicate whether the dialect properly implements rowcount for
|
||||
``UPDATE`` and ``DELETE`` statements when executed via
|
||||
executemany.
|
||||
|
||||
preexecute_autoincrement_sequences
|
||||
True if 'implicit' primary key functions must be executed separately
|
||||
in order to get their value. This is currently oriented towards
|
||||
Postgresql.
|
||||
|
||||
implicit_returning
|
||||
use RETURNING or equivalent during INSERT execution in order to load
|
||||
newly generated primary keys and other column defaults in one execution,
|
||||
which are then available via inserted_primary_key.
|
||||
If an insert statement has returning() specified explicitly,
|
||||
the "implicit" functionality is not used and inserted_primary_key
|
||||
will not be available.
|
||||
|
||||
dbapi_type_map
|
||||
A mapping of DB-API type objects present in this Dialect's
|
||||
DB-API implementation mapped to TypeEngine implementations used
|
||||
by the dialect.
|
||||
|
||||
This is used to apply types to result sets based on the DB-API
|
||||
types present in cursor.description; it only takes effect for
|
||||
result sets against textual statements where no explicit
|
||||
typemap was present.
|
||||
|
||||
colspecs
|
||||
A dictionary of TypeEngine classes from sqlalchemy.types mapped
|
||||
to subclasses that are specific to the dialect class. This
|
||||
dictionary is class-level only and is not accessed from the
|
||||
dialect instance itself.
|
||||
|
||||
supports_default_values
|
||||
Indicates if the construct ``INSERT INTO tablename DEFAULT
|
||||
VALUES`` is supported
|
||||
|
||||
supports_sequences
|
||||
Indicates if the dialect supports CREATE SEQUENCE or similar.
|
||||
|
||||
sequences_optional
|
||||
If True, indicates if the "optional" flag on the Sequence() construct
|
||||
should signal to not generate a CREATE SEQUENCE. Applies only to
|
||||
dialects that support sequences. Currently used only to allow Postgresql
|
||||
SERIAL to be used on a column that specifies Sequence() for usage on
|
||||
other backends.
|
||||
|
||||
supports_native_enum
|
||||
Indicates if the dialect supports a native ENUM construct.
|
||||
This will prevent types.Enum from generating a CHECK
|
||||
constraint when that type is used.
|
||||
|
||||
supports_native_boolean
|
||||
Indicates if the dialect supports a native boolean construct.
|
||||
This will prevent types.Boolean from generating a CHECK
|
||||
constraint when that type is used.
|
||||
|
||||
"""
|
||||
|
||||
def create_connect_args(self, url):
|
||||
"""Build DB-API compatible connection arguments.
|
||||
|
||||
Given a :class:`~sqlalchemy.engine.url.URL` object, returns a tuple
|
||||
consisting of a `*args`/`**kwargs` suitable to send directly
|
||||
to the dbapi's connect function.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def type_descriptor(cls, typeobj):
|
||||
"""Transform a generic type to a dialect-specific type.
|
||||
|
||||
Dialect classes will usually use the
|
||||
:func:`.types.adapt_type` function in the types module to
|
||||
accomplish this.
|
||||
|
||||
The returned result is cached *per dialect class* so can
|
||||
contain no dialect-instance state.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def initialize(self, connection):
|
||||
"""Called during strategized creation of the dialect with a
|
||||
connection.
|
||||
|
||||
Allows dialects to configure options based on server version info or
|
||||
other properties.
|
||||
|
||||
The connection passed here is a SQLAlchemy Connection object,
|
||||
with full capabilities.
|
||||
|
||||
The initalize() method of the base dialect should be called via
|
||||
super().
|
||||
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def reflecttable(self, connection, table, include_columns=None):
|
||||
"""Load table description from the database.
|
||||
|
||||
Given a :class:`.Connection` and a
|
||||
:class:`~sqlalchemy.schema.Table` object, reflect its columns and
|
||||
properties from the database. If include_columns (a list or
|
||||
set) is specified, limit the autoload to the given column
|
||||
names.
|
||||
|
||||
The default implementation uses the
|
||||
:class:`~sqlalchemy.engine.reflection.Inspector` interface to
|
||||
provide the output, building upon the granular table/column/
|
||||
constraint etc. methods of :class:`.Dialect`.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_columns(self, connection, table_name, schema=None, **kw):
|
||||
"""Return information about columns in `table_name`.
|
||||
|
||||
Given a :class:`.Connection`, a string
|
||||
`table_name`, and an optional string `schema`, return column
|
||||
information as a list of dictionaries with these keys:
|
||||
|
||||
name
|
||||
the column's name
|
||||
|
||||
type
|
||||
[sqlalchemy.types#TypeEngine]
|
||||
|
||||
nullable
|
||||
boolean
|
||||
|
||||
default
|
||||
the column's default value
|
||||
|
||||
autoincrement
|
||||
boolean
|
||||
|
||||
sequence
|
||||
a dictionary of the form
|
||||
{'name' : str, 'start' :int, 'increment': int}
|
||||
|
||||
Additional column attributes may be present.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_primary_keys(self, connection, table_name, schema=None, **kw):
|
||||
"""Return information about primary keys in `table_name`.
|
||||
|
||||
|
||||
Deprecated. This method is only called by the default
|
||||
implementation of :meth:`.Dialect.get_pk_constraint`. Dialects should
|
||||
instead implement this method directly.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
|
||||
"""Return information about the primary key constraint on
|
||||
table_name`.
|
||||
|
||||
Given a :class:`.Connection`, a string
|
||||
`table_name`, and an optional string `schema`, return primary
|
||||
key information as a dictionary with these keys:
|
||||
|
||||
constrained_columns
|
||||
a list of column names that make up the primary key
|
||||
|
||||
name
|
||||
optional name of the primary key constraint.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
|
||||
"""Return information about foreign_keys in `table_name`.
|
||||
|
||||
Given a :class:`.Connection`, a string
|
||||
`table_name`, and an optional string `schema`, return foreign
|
||||
key information as a list of dicts with these keys:
|
||||
|
||||
name
|
||||
the constraint's name
|
||||
|
||||
constrained_columns
|
||||
a list of column names that make up the foreign key
|
||||
|
||||
referred_schema
|
||||
the name of the referred schema
|
||||
|
||||
referred_table
|
||||
the name of the referred table
|
||||
|
||||
referred_columns
|
||||
a list of column names in the referred table that correspond to
|
||||
constrained_columns
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
"""Return a list of table names for `schema`."""
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
"""Return a list of all view names available in the database.
|
||||
|
||||
schema:
|
||||
Optional, retrieve names from a non-default schema.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_view_definition(self, connection, view_name, schema=None, **kw):
|
||||
"""Return view definition.
|
||||
|
||||
Given a :class:`.Connection`, a string
|
||||
`view_name`, and an optional string `schema`, return the view
|
||||
definition.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_indexes(self, connection, table_name, schema=None, **kw):
|
||||
"""Return information about indexes in `table_name`.
|
||||
|
||||
Given a :class:`.Connection`, a string
|
||||
`table_name` and an optional string `schema`, return index
|
||||
information as a list of dictionaries with these keys:
|
||||
|
||||
name
|
||||
the index's name
|
||||
|
||||
column_names
|
||||
list of column names in order
|
||||
|
||||
unique
|
||||
boolean
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def normalize_name(self, name):
|
||||
"""convert the given name to lowercase if it is detected as
|
||||
case insensitive.
|
||||
|
||||
this method is only used if the dialect defines
|
||||
requires_name_normalize=True.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def denormalize_name(self, name):
|
||||
"""convert the given name to a case insensitive identifier
|
||||
for the backend if it is an all-lowercase name.
|
||||
|
||||
this method is only used if the dialect defines
|
||||
requires_name_normalize=True.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def has_table(self, connection, table_name, schema=None):
|
||||
"""Check the existence of a particular table in the database.
|
||||
|
||||
Given a :class:`.Connection` object and a string
|
||||
`table_name`, return True if the given table (possibly within
|
||||
the specified `schema`) exists in the database, False
|
||||
otherwise.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def has_sequence(self, connection, sequence_name, schema=None):
|
||||
"""Check the existence of a particular sequence in the database.
|
||||
|
||||
Given a :class:`.Connection` object and a string
|
||||
`sequence_name`, return True if the given sequence exists in
|
||||
the database, False otherwise.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
"""Retrieve the server version info from the given connection.
|
||||
|
||||
This is used by the default implementation to populate the
|
||||
"server_version_info" attribute and is called exactly
|
||||
once upon first connect.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def _get_default_schema_name(self, connection):
|
||||
"""Return the string name of the currently selected schema from
|
||||
the given connection.
|
||||
|
||||
This is used by the default implementation to populate the
|
||||
"default_schema_name" attribute and is called exactly
|
||||
once upon first connect.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def do_begin(self, dbapi_connection):
|
||||
"""Provide an implementation of ``connection.begin()``, given a
|
||||
DB-API connection.
|
||||
|
||||
The DBAPI has no dedicated "begin" method and it is expected
|
||||
that transactions are implicit. This hook is provided for those
|
||||
DBAPIs that might need additional help in this area.
|
||||
|
||||
Note that :meth:`.Dialect.do_begin` is not called unless a
|
||||
:class:`.Transaction` object is in use. The
|
||||
:meth:`.Dialect.do_autocommit`
|
||||
hook is provided for DBAPIs that need some extra commands emitted
|
||||
after a commit in order to enter the next transaction, when the
|
||||
SQLAlchemy :class:`.Connection` is used in it's default "autocommit"
|
||||
mode.
|
||||
|
||||
:param dbapi_connection: a DBAPI connection, typically
|
||||
proxied within a :class:`.ConnectionFairy`.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def do_rollback(self, dbapi_connection):
|
||||
"""Provide an implementation of ``connection.rollback()``, given
|
||||
a DB-API connection.
|
||||
|
||||
:param dbapi_connection: a DBAPI connection, typically
|
||||
proxied within a :class:`.ConnectionFairy`.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def do_commit(self, dbapi_connection):
|
||||
"""Provide an implementation of ``connection.commit()``, given a
|
||||
DB-API connection.
|
||||
|
||||
:param dbapi_connection: a DBAPI connection, typically
|
||||
proxied within a :class:`.ConnectionFairy`.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def do_close(self, dbapi_connection):
|
||||
"""Provide an implementation of ``connection.close()``, given a DBAPI
|
||||
connection.
|
||||
|
||||
This hook is called by the :class:`.Pool` when a connection has been
|
||||
detached from the pool, or is being returned beyond the normal
|
||||
capacity of the pool.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def create_xid(self):
|
||||
"""Create a two-phase transaction ID.
|
||||
|
||||
This id will be passed to do_begin_twophase(),
|
||||
do_rollback_twophase(), do_commit_twophase(). Its format is
|
||||
unspecified.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def do_savepoint(self, connection, name):
|
||||
"""Create a savepoint with the given name.
|
||||
|
||||
:param connection: a :class:`.Connection`.
|
||||
:param name: savepoint name.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def do_rollback_to_savepoint(self, connection, name):
|
||||
"""Rollback a connection to the named savepoint.
|
||||
|
||||
:param connection: a :class:`.Connection`.
|
||||
:param name: savepoint name.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def do_release_savepoint(self, connection, name):
|
||||
"""Release the named savepoint on a connection.
|
||||
|
||||
:param connection: a :class:`.Connection`.
|
||||
:param name: savepoint name.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def do_begin_twophase(self, connection, xid):
|
||||
"""Begin a two phase transaction on the given connection.
|
||||
|
||||
:param connection: a :class:`.Connection`.
|
||||
:param xid: xid
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def do_prepare_twophase(self, connection, xid):
|
||||
"""Prepare a two phase transaction on the given connection.
|
||||
|
||||
:param connection: a :class:`.Connection`.
|
||||
:param xid: xid
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def do_rollback_twophase(self, connection, xid, is_prepared=True,
|
||||
recover=False):
|
||||
"""Rollback a two phase transaction on the given connection.
|
||||
|
||||
:param connection: a :class:`.Connection`.
|
||||
:param xid: xid
|
||||
:param is_prepared: whether or not
|
||||
:meth:`.TwoPhaseTransaction.prepare` was called.
|
||||
:param recover: if the recover flag was passed.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def do_commit_twophase(self, connection, xid, is_prepared=True,
|
||||
recover=False):
|
||||
"""Commit a two phase transaction on the given connection.
|
||||
|
||||
|
||||
:param connection: a :class:`.Connection`.
|
||||
:param xid: xid
|
||||
:param is_prepared: whether or not
|
||||
:meth:`.TwoPhaseTransaction.prepare` was called.
|
||||
:param recover: if the recover flag was passed.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def do_recover_twophase(self, connection):
|
||||
"""Recover list of uncommited prepared two phase transaction
|
||||
identifiers on the given connection.
|
||||
|
||||
:param connection: a :class:`.Connection`.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
"""Provide an implementation of ``cursor.executemany(statement,
|
||||
parameters)``."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def do_execute(self, cursor, statement, parameters, context=None):
|
||||
"""Provide an implementation of ``cursor.execute(statement,
|
||||
parameters)``."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def do_execute_no_params(self, cursor, statement, parameters,
|
||||
context=None):
|
||||
"""Provide an implementation of ``cursor.execute(statement)``.
|
||||
|
||||
The parameter collection should not be sent.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
"""Return True if the given DB-API error indicates an invalid
|
||||
connection"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def connect(self):
|
||||
"""return a callable which sets up a newly created DBAPI connection.
|
||||
|
||||
The callable accepts a single argument "conn" which is the
|
||||
DBAPI connection itself. It has no return value.
|
||||
|
||||
This is used to set dialect-wide per-connection options such as
|
||||
isolation modes, unicode modes, etc.
|
||||
|
||||
If a callable is returned, it will be assembled into a pool listener
|
||||
that receives the direct DBAPI connection, with all wrappers removed.
|
||||
|
||||
If None is returned, no listener will be generated.
|
||||
|
||||
"""
|
||||
return None
|
||||
|
||||
def reset_isolation_level(self, dbapi_conn):
|
||||
"""Given a DBAPI connection, revert its isolation to the default."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_isolation_level(self, dbapi_conn, level):
|
||||
"""Given a DBAPI connection, set its isolation level."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_isolation_level(self, dbapi_conn):
|
||||
"""Given a DBAPI connection, return its isolation level."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class ExecutionContext(object):
|
||||
"""A messenger object for a Dialect that corresponds to a single
|
||||
execution.
|
||||
|
||||
ExecutionContext should have these data members:
|
||||
|
||||
connection
|
||||
Connection object which can be freely used by default value
|
||||
generators to execute SQL. This Connection should reference the
|
||||
same underlying connection/transactional resources of
|
||||
root_connection.
|
||||
|
||||
root_connection
|
||||
Connection object which is the source of this ExecutionContext. This
|
||||
Connection may have close_with_result=True set, in which case it can
|
||||
only be used once.
|
||||
|
||||
dialect
|
||||
dialect which created this ExecutionContext.
|
||||
|
||||
cursor
|
||||
DB-API cursor procured from the connection,
|
||||
|
||||
compiled
|
||||
if passed to constructor, sqlalchemy.engine.base.Compiled object
|
||||
being executed,
|
||||
|
||||
statement
|
||||
string version of the statement to be executed. Is either
|
||||
passed to the constructor, or must be created from the
|
||||
sql.Compiled object by the time pre_exec() has completed.
|
||||
|
||||
parameters
|
||||
bind parameters passed to the execute() method. For compiled
|
||||
statements, this is a dictionary or list of dictionaries. For
|
||||
textual statements, it should be in a format suitable for the
|
||||
dialect's paramstyle (i.e. dict or list of dicts for non
|
||||
positional, list or list of lists/tuples for positional).
|
||||
|
||||
isinsert
|
||||
True if the statement is an INSERT.
|
||||
|
||||
isupdate
|
||||
True if the statement is an UPDATE.
|
||||
|
||||
should_autocommit
|
||||
True if the statement is a "committable" statement.
|
||||
|
||||
prefetch_cols
|
||||
a list of Column objects for which a client-side default
|
||||
was fired off. Applies to inserts and updates.
|
||||
|
||||
postfetch_cols
|
||||
a list of Column objects for which a server-side default or
|
||||
inline SQL expression value was fired off. Applies to inserts
|
||||
and updates.
|
||||
"""
|
||||
|
||||
def create_cursor(self):
|
||||
"""Return a new cursor generated from this ExecutionContext's
|
||||
connection.
|
||||
|
||||
Some dialects may wish to change the behavior of
|
||||
connection.cursor(), such as postgresql which may return a PG
|
||||
"server side" cursor.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def pre_exec(self):
|
||||
"""Called before an execution of a compiled statement.
|
||||
|
||||
If a compiled statement was passed to this ExecutionContext,
|
||||
the `statement` and `parameters` datamembers must be
|
||||
initialized after this statement is complete.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def post_exec(self):
|
||||
"""Called after the execution of a compiled statement.
|
||||
|
||||
If a compiled statement was passed to this ExecutionContext,
|
||||
the `last_insert_ids`, `last_inserted_params`, etc.
|
||||
datamembers should be available after this method completes.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def result(self):
|
||||
"""Return a result object corresponding to this ExecutionContext.
|
||||
|
||||
Returns a ResultProxy.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def handle_dbapi_exception(self, e):
|
||||
"""Receive a DBAPI exception which occurred upon execute, result
|
||||
fetch, etc."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def should_autocommit_text(self, statement):
|
||||
"""Parse the given textual statement and return True if it refers to
|
||||
a "committable" statement"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def lastrow_has_defaults(self):
|
||||
"""Return True if the last INSERT or UPDATE row contained
|
||||
inlined or database-side defaults.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_rowcount(self):
|
||||
"""Return the DBAPI ``cursor.rowcount`` value, or in some
|
||||
cases an interpreted value.
|
||||
|
||||
See :attr:`.ResultProxy.rowcount` for details on this.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class Compiled(object):
|
||||
"""Represent a compiled SQL or DDL expression.
|
||||
|
||||
The ``__str__`` method of the ``Compiled`` object should produce
|
||||
the actual text of the statement. ``Compiled`` objects are
|
||||
specific to their underlying database dialect, and also may
|
||||
or may not be specific to the columns referenced within a
|
||||
particular set of bind parameters. In no case should the
|
||||
``Compiled`` object be dependent on the actual values of those
|
||||
bind parameters, even though it may reference those values as
|
||||
defaults.
|
||||
"""
|
||||
|
||||
def __init__(self, dialect, statement, bind=None,
|
||||
compile_kwargs=util.immutabledict()):
|
||||
"""Construct a new ``Compiled`` object.
|
||||
|
||||
:param dialect: ``Dialect`` to compile against.
|
||||
|
||||
:param statement: ``ClauseElement`` to be compiled.
|
||||
|
||||
:param bind: Optional Engine or Connection to compile this
|
||||
statement against.
|
||||
|
||||
:param compile_kwargs: additional kwargs that will be
|
||||
passed to the initial call to :meth:`.Compiled.process`.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
"""
|
||||
|
||||
self.dialect = dialect
|
||||
self.bind = bind
|
||||
if statement is not None:
|
||||
self.statement = statement
|
||||
self.can_execute = statement.supports_execution
|
||||
self.string = self.process(self.statement, **compile_kwargs)
|
||||
|
||||
@util.deprecated("0.7", ":class:`.Compiled` objects now compile "
|
||||
"within the constructor.")
|
||||
def compile(self):
|
||||
"""Produce the internal string representation of this element."""
|
||||
pass
|
||||
|
||||
@property
|
||||
def sql_compiler(self):
|
||||
"""Return a Compiled that is capable of processing SQL expressions.
|
||||
|
||||
If this compiler is one, it would likely just return 'self'.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def process(self, obj, **kwargs):
|
||||
return obj._compiler_dispatch(self, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
"""Return the string text of the generated SQL or DDL."""
|
||||
|
||||
return self.string or ''
|
||||
|
||||
def construct_params(self, params=None):
|
||||
"""Return the bind params for this compiled object.
|
||||
|
||||
:param params: a dict of string/object pairs whose values will
|
||||
override bind values compiled in to the
|
||||
statement.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def params(self):
|
||||
"""Return the bind params for this compiled object."""
|
||||
return self.construct_params()
|
||||
|
||||
def execute(self, *multiparams, **params):
|
||||
"""Execute this compiled object."""
|
||||
|
||||
e = self.bind
|
||||
if e is None:
|
||||
raise exc.UnboundExecutionError(
|
||||
"This Compiled object is not bound to any Engine "
|
||||
"or Connection.")
|
||||
return e._execute_compiled(self, multiparams, params)
|
||||
|
||||
def scalar(self, *multiparams, **params):
|
||||
"""Execute this compiled object and return the result's
|
||||
scalar value."""
|
||||
|
||||
return self.execute(*multiparams, **params).scalar()
|
||||
|
||||
|
||||
class TypeCompiler(object):
|
||||
"""Produces DDL specification for TypeEngine objects."""
|
||||
|
||||
def __init__(self, dialect):
|
||||
self.dialect = dialect
|
||||
|
||||
def process(self, type_):
|
||||
return type_._compiler_dispatch(self)
|
||||
|
||||
|
||||
class Connectable(object):
|
||||
"""Interface for an object which supports execution of SQL constructs.
|
||||
|
||||
The two implementations of :class:`.Connectable` are
|
||||
:class:`.Connection` and :class:`.Engine`.
|
||||
|
||||
Connectable must also implement the 'dialect' member which references a
|
||||
:class:`.Dialect` instance.
|
||||
|
||||
"""
|
||||
|
||||
dispatch = event.dispatcher(events.ConnectionEvents)
|
||||
|
||||
def connect(self, **kwargs):
|
||||
"""Return a :class:`.Connection` object.
|
||||
|
||||
Depending on context, this may be ``self`` if this object
|
||||
is already an instance of :class:`.Connection`, or a newly
|
||||
procured :class:`.Connection` if this object is an instance
|
||||
of :class:`.Engine`.
|
||||
|
||||
"""
|
||||
|
||||
def contextual_connect(self):
|
||||
"""Return a :class:`.Connection` object which may be part of an ongoing
|
||||
context.
|
||||
|
||||
Depending on context, this may be ``self`` if this object
|
||||
is already an instance of :class:`.Connection`, or a newly
|
||||
procured :class:`.Connection` if this object is an instance
|
||||
of :class:`.Engine`.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
@util.deprecated("0.7",
|
||||
"Use the create() method on the given schema "
|
||||
"object directly, i.e. :meth:`.Table.create`, "
|
||||
":meth:`.Index.create`, :meth:`.MetaData.create_all`")
|
||||
def create(self, entity, **kwargs):
|
||||
"""Emit CREATE statements for the given schema entity."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
@util.deprecated("0.7",
|
||||
"Use the drop() method on the given schema "
|
||||
"object directly, i.e. :meth:`.Table.drop`, "
|
||||
":meth:`.Index.drop`, :meth:`.MetaData.drop_all`")
|
||||
def drop(self, entity, **kwargs):
|
||||
"""Emit DROP statements for the given schema entity."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def execute(self, object, *multiparams, **params):
|
||||
"""Executes the given construct and returns a :class:`.ResultProxy`."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def scalar(self, object, *multiparams, **params):
|
||||
"""Executes and returns the first column of the first row.
|
||||
|
||||
The underlying cursor is closed after execution.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _run_visitor(self, visitorcallable, element,
|
||||
**kwargs):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _execute_clauseelement(self, elem, multiparams=None, params=None):
|
||||
raise NotImplementedError()
|
||||
526
lib/sqlalchemy/engine/reflection.py
Normal file
526
lib/sqlalchemy/engine/reflection.py
Normal file
@@ -0,0 +1,526 @@
|
||||
# engine/reflection.py
|
||||
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides an abstraction for obtaining database schema information.
|
||||
|
||||
Usage Notes:
|
||||
|
||||
Here are some general conventions when accessing the low level inspector
|
||||
methods such as get_table_names, get_columns, etc.
|
||||
|
||||
1. Inspector methods return lists of dicts in most cases for the following
|
||||
reasons:
|
||||
|
||||
* They're both standard types that can be serialized.
|
||||
* Using a dict instead of a tuple allows easy expansion of attributes.
|
||||
* Using a list for the outer structure maintains order and is easy to work
|
||||
with (e.g. list comprehension [d['name'] for d in cols]).
|
||||
|
||||
2. Records that contain a name, such as the column name in a column record
|
||||
use the key 'name'. So for most return values, each record will have a
|
||||
'name' attribute..
|
||||
"""
|
||||
|
||||
from .. import exc, sql
|
||||
from .. import schema as sa_schema
|
||||
from .. import util
|
||||
from ..types import TypeEngine
|
||||
from ..util import deprecated
|
||||
from ..util import topological
|
||||
from .. import inspection
|
||||
from .base import Connectable
|
||||
|
||||
|
||||
@util.decorator
|
||||
def cache(fn, self, con, *args, **kw):
|
||||
info_cache = kw.get('info_cache', None)
|
||||
if info_cache is None:
|
||||
return fn(self, con, *args, **kw)
|
||||
key = (
|
||||
fn.__name__,
|
||||
tuple(a for a in args if isinstance(a, basestring)),
|
||||
tuple((k, v) for k, v in kw.iteritems() if isinstance(v, (basestring, int, float)))
|
||||
)
|
||||
ret = info_cache.get(key)
|
||||
if ret is None:
|
||||
ret = fn(self, con, *args, **kw)
|
||||
info_cache[key] = ret
|
||||
return ret
|
||||
|
||||
|
||||
class Inspector(object):
|
||||
"""Performs database schema inspection.
|
||||
|
||||
The Inspector acts as a proxy to the reflection methods of the
|
||||
:class:`~sqlalchemy.engine.interfaces.Dialect`, providing a
|
||||
consistent interface as well as caching support for previously
|
||||
fetched metadata.
|
||||
|
||||
A :class:`.Inspector` object is usually created via the
|
||||
:func:`.inspect` function::
|
||||
|
||||
from sqlalchemy import inspect, create_engine
|
||||
engine = create_engine('...')
|
||||
insp = inspect(engine)
|
||||
|
||||
The inspection method above is equivalent to using the
|
||||
:meth:`.Inspector.from_engine` method, i.e.::
|
||||
|
||||
engine = create_engine('...')
|
||||
insp = Inspector.from_engine(engine)
|
||||
|
||||
Where above, the :class:`~sqlalchemy.engine.interfaces.Dialect` may opt
|
||||
to return an :class:`.Inspector` subclass that provides additional
|
||||
methods specific to the dialect's target database.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, bind):
|
||||
"""Initialize a new :class:`.Inspector`.
|
||||
|
||||
:param bind: a :class:`~sqlalchemy.engine.Connectable`,
|
||||
which is typically an instance of
|
||||
:class:`~sqlalchemy.engine.Engine` or
|
||||
:class:`~sqlalchemy.engine.Connection`.
|
||||
|
||||
For a dialect-specific instance of :class:`.Inspector`, see
|
||||
:meth:`.Inspector.from_engine`
|
||||
|
||||
"""
|
||||
# this might not be a connection, it could be an engine.
|
||||
self.bind = bind
|
||||
|
||||
# set the engine
|
||||
if hasattr(bind, 'engine'):
|
||||
self.engine = bind.engine
|
||||
else:
|
||||
self.engine = bind
|
||||
|
||||
if self.engine is bind:
|
||||
# if engine, ensure initialized
|
||||
bind.connect().close()
|
||||
|
||||
self.dialect = self.engine.dialect
|
||||
self.info_cache = {}
|
||||
|
||||
@classmethod
|
||||
def from_engine(cls, bind):
|
||||
"""Construct a new dialect-specific Inspector object from the given
|
||||
engine or connection.
|
||||
|
||||
:param bind: a :class:`~sqlalchemy.engine.Connectable`,
|
||||
which is typically an instance of
|
||||
:class:`~sqlalchemy.engine.Engine` or
|
||||
:class:`~sqlalchemy.engine.Connection`.
|
||||
|
||||
This method differs from direct a direct constructor call of
|
||||
:class:`.Inspector` in that the
|
||||
:class:`~sqlalchemy.engine.interfaces.Dialect` is given a chance to
|
||||
provide a dialect-specific :class:`.Inspector` instance, which may
|
||||
provide additional methods.
|
||||
|
||||
See the example at :class:`.Inspector`.
|
||||
|
||||
"""
|
||||
if hasattr(bind.dialect, 'inspector'):
|
||||
return bind.dialect.inspector(bind)
|
||||
return Inspector(bind)
|
||||
|
||||
@inspection._inspects(Connectable)
|
||||
def _insp(bind):
|
||||
return Inspector.from_engine(bind)
|
||||
|
||||
@property
|
||||
def default_schema_name(self):
|
||||
"""Return the default schema name presented by the dialect
|
||||
for the current engine's database user.
|
||||
|
||||
E.g. this is typically ``public`` for Postgresql and ``dbo``
|
||||
for SQL Server.
|
||||
|
||||
"""
|
||||
return self.dialect.default_schema_name
|
||||
|
||||
def get_schema_names(self):
|
||||
"""Return all schema names.
|
||||
"""
|
||||
|
||||
if hasattr(self.dialect, 'get_schema_names'):
|
||||
return self.dialect.get_schema_names(self.bind,
|
||||
info_cache=self.info_cache)
|
||||
return []
|
||||
|
||||
def get_table_names(self, schema=None, order_by=None):
|
||||
"""Return all table names in referred to within a particular schema.
|
||||
|
||||
The names are expected to be real tables only, not views.
|
||||
Views are instead returned using the :meth:`.get_view_names`
|
||||
method.
|
||||
|
||||
|
||||
:param schema: Schema name. If ``schema`` is left at ``None``, the
|
||||
database's default schema is
|
||||
used, else the named schema is searched. If the database does not
|
||||
support named schemas, behavior is undefined if ``schema`` is not
|
||||
passed as ``None``.
|
||||
|
||||
:param order_by: Optional, may be the string "foreign_key" to sort
|
||||
the result on foreign key dependencies.
|
||||
|
||||
.. versionchanged:: 0.8 the "foreign_key" sorting sorts tables
|
||||
in order of dependee to dependent; that is, in creation
|
||||
order, rather than in drop order. This is to maintain
|
||||
consistency with similar features such as
|
||||
:attr:`.MetaData.sorted_tables` and :func:`.util.sort_tables`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.MetaData.sorted_tables`
|
||||
|
||||
"""
|
||||
|
||||
if hasattr(self.dialect, 'get_table_names'):
|
||||
tnames = self.dialect.get_table_names(self.bind,
|
||||
schema, info_cache=self.info_cache)
|
||||
else:
|
||||
tnames = self.engine.table_names(schema)
|
||||
if order_by == 'foreign_key':
|
||||
tuples = []
|
||||
for tname in tnames:
|
||||
for fkey in self.get_foreign_keys(tname, schema):
|
||||
if tname != fkey['referred_table']:
|
||||
tuples.append((fkey['referred_table'], tname))
|
||||
tnames = list(topological.sort(tuples, tnames))
|
||||
return tnames
|
||||
|
||||
def get_table_options(self, table_name, schema=None, **kw):
|
||||
"""Return a dictionary of options specified when the table of the
|
||||
given name was created.
|
||||
|
||||
This currently includes some options that apply to MySQL tables.
|
||||
|
||||
"""
|
||||
if hasattr(self.dialect, 'get_table_options'):
|
||||
return self.dialect.get_table_options(
|
||||
self.bind, table_name, schema,
|
||||
info_cache=self.info_cache, **kw)
|
||||
return {}
|
||||
|
||||
def get_view_names(self, schema=None):
|
||||
"""Return all view names in `schema`.
|
||||
|
||||
:param schema: Optional, retrieve names from a non-default schema.
|
||||
"""
|
||||
|
||||
return self.dialect.get_view_names(self.bind, schema,
|
||||
info_cache=self.info_cache)
|
||||
|
||||
def get_view_definition(self, view_name, schema=None):
|
||||
"""Return definition for `view_name`.
|
||||
|
||||
:param schema: Optional, retrieve names from a non-default schema.
|
||||
"""
|
||||
|
||||
return self.dialect.get_view_definition(
|
||||
self.bind, view_name, schema, info_cache=self.info_cache)
|
||||
|
||||
def get_columns(self, table_name, schema=None, **kw):
|
||||
"""Return information about columns in `table_name`.
|
||||
|
||||
Given a string `table_name` and an optional string `schema`, return
|
||||
column information as a list of dicts with these keys:
|
||||
|
||||
name
|
||||
the column's name
|
||||
|
||||
type
|
||||
:class:`~sqlalchemy.types.TypeEngine`
|
||||
|
||||
nullable
|
||||
boolean
|
||||
|
||||
default
|
||||
the column's default value
|
||||
|
||||
attrs
|
||||
dict containing optional column attributes
|
||||
"""
|
||||
|
||||
col_defs = self.dialect.get_columns(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache,
|
||||
**kw)
|
||||
for col_def in col_defs:
|
||||
# make this easy and only return instances for coltype
|
||||
coltype = col_def['type']
|
||||
if not isinstance(coltype, TypeEngine):
|
||||
col_def['type'] = coltype()
|
||||
return col_defs
|
||||
|
||||
@deprecated('0.7', 'Call to deprecated method get_primary_keys.'
|
||||
' Use get_pk_constraint instead.')
|
||||
def get_primary_keys(self, table_name, schema=None, **kw):
|
||||
"""Return information about primary keys in `table_name`.
|
||||
|
||||
Given a string `table_name`, and an optional string `schema`, return
|
||||
primary key information as a list of column names.
|
||||
"""
|
||||
|
||||
return self.dialect.get_pk_constraint(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache,
|
||||
**kw)['constrained_columns']
|
||||
|
||||
def get_pk_constraint(self, table_name, schema=None, **kw):
|
||||
"""Return information about primary key constraint on `table_name`.
|
||||
|
||||
Given a string `table_name`, and an optional string `schema`, return
|
||||
primary key information as a dictionary with these keys:
|
||||
|
||||
constrained_columns
|
||||
a list of column names that make up the primary key
|
||||
|
||||
name
|
||||
optional name of the primary key constraint.
|
||||
|
||||
"""
|
||||
return self.dialect.get_pk_constraint(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache,
|
||||
**kw)
|
||||
|
||||
def get_foreign_keys(self, table_name, schema=None, **kw):
|
||||
"""Return information about foreign_keys in `table_name`.
|
||||
|
||||
Given a string `table_name`, and an optional string `schema`, return
|
||||
foreign key information as a list of dicts with these keys:
|
||||
|
||||
constrained_columns
|
||||
a list of column names that make up the foreign key
|
||||
|
||||
referred_schema
|
||||
the name of the referred schema
|
||||
|
||||
referred_table
|
||||
the name of the referred table
|
||||
|
||||
referred_columns
|
||||
a list of column names in the referred table that correspond to
|
||||
constrained_columns
|
||||
|
||||
name
|
||||
optional name of the foreign key constraint.
|
||||
|
||||
\**kw
|
||||
other options passed to the dialect's get_foreign_keys() method.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_foreign_keys(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache,
|
||||
**kw)
|
||||
|
||||
def get_indexes(self, table_name, schema=None, **kw):
|
||||
"""Return information about indexes in `table_name`.
|
||||
|
||||
Given a string `table_name` and an optional string `schema`, return
|
||||
index information as a list of dicts with these keys:
|
||||
|
||||
name
|
||||
the index's name
|
||||
|
||||
column_names
|
||||
list of column names in order
|
||||
|
||||
unique
|
||||
boolean
|
||||
|
||||
\**kw
|
||||
other options passed to the dialect's get_indexes() method.
|
||||
"""
|
||||
|
||||
return self.dialect.get_indexes(self.bind, table_name,
|
||||
schema,
|
||||
info_cache=self.info_cache, **kw)
|
||||
|
||||
def reflecttable(self, table, include_columns, exclude_columns=()):
|
||||
"""Given a Table object, load its internal constructs based on
|
||||
introspection.
|
||||
|
||||
This is the underlying method used by most dialects to produce
|
||||
table reflection. Direct usage is like::
|
||||
|
||||
from sqlalchemy import create_engine, MetaData, Table
|
||||
from sqlalchemy.engine import reflection
|
||||
|
||||
engine = create_engine('...')
|
||||
meta = MetaData()
|
||||
user_table = Table('user', meta)
|
||||
insp = Inspector.from_engine(engine)
|
||||
insp.reflecttable(user_table, None)
|
||||
|
||||
:param table: a :class:`~sqlalchemy.schema.Table` instance.
|
||||
:param include_columns: a list of string column names to include
|
||||
in the reflection process. If ``None``, all columns are reflected.
|
||||
|
||||
"""
|
||||
dialect = self.bind.dialect
|
||||
|
||||
# table attributes we might need.
|
||||
reflection_options = dict(
|
||||
(k, table.kwargs.get(k))
|
||||
for k in dialect.reflection_options if k in table.kwargs)
|
||||
|
||||
schema = table.schema
|
||||
table_name = table.name
|
||||
|
||||
# apply table options
|
||||
tbl_opts = self.get_table_options(table_name, schema, **table.kwargs)
|
||||
if tbl_opts:
|
||||
table.kwargs.update(tbl_opts)
|
||||
|
||||
# table.kwargs will need to be passed to each reflection method. Make
|
||||
# sure keywords are strings.
|
||||
tblkw = table.kwargs.copy()
|
||||
for (k, v) in tblkw.items():
|
||||
del tblkw[k]
|
||||
tblkw[str(k)] = v
|
||||
|
||||
# Py2K
|
||||
if isinstance(schema, str):
|
||||
schema = schema.decode(dialect.encoding)
|
||||
if isinstance(table_name, str):
|
||||
table_name = table_name.decode(dialect.encoding)
|
||||
# end Py2K
|
||||
|
||||
# columns
|
||||
found_table = False
|
||||
cols_by_orig_name = {}
|
||||
|
||||
for col_d in self.get_columns(table_name, schema, **tblkw):
|
||||
found_table = True
|
||||
orig_name = col_d['name']
|
||||
|
||||
table.dispatch.column_reflect(self, table, col_d)
|
||||
|
||||
name = col_d['name']
|
||||
if include_columns and name not in include_columns:
|
||||
continue
|
||||
if exclude_columns and name in exclude_columns:
|
||||
continue
|
||||
|
||||
coltype = col_d['type']
|
||||
col_kw = {
|
||||
'nullable': col_d['nullable'],
|
||||
}
|
||||
for k in ('autoincrement', 'quote', 'info', 'key'):
|
||||
if k in col_d:
|
||||
col_kw[k] = col_d[k]
|
||||
|
||||
colargs = []
|
||||
if col_d.get('default') is not None:
|
||||
# the "default" value is assumed to be a literal SQL
|
||||
# expression, so is wrapped in text() so that no quoting
|
||||
# occurs on re-issuance.
|
||||
colargs.append(
|
||||
sa_schema.DefaultClause(
|
||||
sql.text(col_d['default']), _reflected=True
|
||||
)
|
||||
)
|
||||
|
||||
if 'sequence' in col_d:
|
||||
# TODO: mssql, maxdb and sybase are using this.
|
||||
seq = col_d['sequence']
|
||||
sequence = sa_schema.Sequence(seq['name'], 1, 1)
|
||||
if 'start' in seq:
|
||||
sequence.start = seq['start']
|
||||
if 'increment' in seq:
|
||||
sequence.increment = seq['increment']
|
||||
colargs.append(sequence)
|
||||
|
||||
cols_by_orig_name[orig_name] = col = \
|
||||
sa_schema.Column(name, coltype, *colargs, **col_kw)
|
||||
|
||||
table.append_column(col)
|
||||
|
||||
if not found_table:
|
||||
raise exc.NoSuchTableError(table.name)
|
||||
|
||||
# Primary keys
|
||||
pk_cons = self.get_pk_constraint(table_name, schema, **tblkw)
|
||||
if pk_cons:
|
||||
pk_cols = [
|
||||
cols_by_orig_name[pk]
|
||||
for pk in pk_cons['constrained_columns']
|
||||
if pk in cols_by_orig_name and pk not in exclude_columns
|
||||
]
|
||||
pk_cols += [
|
||||
pk
|
||||
for pk in table.primary_key
|
||||
if pk.key in exclude_columns
|
||||
]
|
||||
primary_key_constraint = sa_schema.PrimaryKeyConstraint(
|
||||
name=pk_cons.get('name'),
|
||||
*pk_cols
|
||||
)
|
||||
|
||||
table.append_constraint(primary_key_constraint)
|
||||
|
||||
# Foreign keys
|
||||
fkeys = self.get_foreign_keys(table_name, schema, **tblkw)
|
||||
for fkey_d in fkeys:
|
||||
conname = fkey_d['name']
|
||||
# look for columns by orig name in cols_by_orig_name,
|
||||
# but support columns that are in-Python only as fallback
|
||||
constrained_columns = [
|
||||
cols_by_orig_name[c].key
|
||||
if c in cols_by_orig_name else c
|
||||
for c in fkey_d['constrained_columns']
|
||||
]
|
||||
if exclude_columns and set(constrained_columns).intersection(
|
||||
exclude_columns):
|
||||
continue
|
||||
referred_schema = fkey_d['referred_schema']
|
||||
referred_table = fkey_d['referred_table']
|
||||
referred_columns = fkey_d['referred_columns']
|
||||
refspec = []
|
||||
if referred_schema is not None:
|
||||
sa_schema.Table(referred_table, table.metadata,
|
||||
autoload=True, schema=referred_schema,
|
||||
autoload_with=self.bind,
|
||||
**reflection_options
|
||||
)
|
||||
for column in referred_columns:
|
||||
refspec.append(".".join(
|
||||
[referred_schema, referred_table, column]))
|
||||
else:
|
||||
sa_schema.Table(referred_table, table.metadata, autoload=True,
|
||||
autoload_with=self.bind,
|
||||
**reflection_options
|
||||
)
|
||||
for column in referred_columns:
|
||||
refspec.append(".".join([referred_table, column]))
|
||||
table.append_constraint(
|
||||
sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
|
||||
conname, link_to_name=True))
|
||||
# Indexes
|
||||
indexes = self.get_indexes(table_name, schema)
|
||||
for index_d in indexes:
|
||||
name = index_d['name']
|
||||
columns = index_d['column_names']
|
||||
unique = index_d['unique']
|
||||
flavor = index_d.get('type', 'unknown type')
|
||||
if include_columns and \
|
||||
not set(columns).issubset(include_columns):
|
||||
util.warn(
|
||||
"Omitting %s KEY for (%s), key covers omitted columns." %
|
||||
(flavor, ', '.join(columns)))
|
||||
continue
|
||||
# look for columns by orig name in cols_by_orig_name,
|
||||
# but support columns that are in-Python only as fallback
|
||||
sa_schema.Index(name, *[
|
||||
cols_by_orig_name[c] if c in cols_by_orig_name
|
||||
else table.c[c]
|
||||
for c in columns
|
||||
],
|
||||
**dict(unique=unique))
|
||||
997
lib/sqlalchemy/engine/result.py
Normal file
997
lib/sqlalchemy/engine/result.py
Normal file
@@ -0,0 +1,997 @@
|
||||
# engine/result.py
|
||||
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Define result set constructs including :class:`.ResultProxy`
|
||||
and :class:`.RowProxy."""
|
||||
|
||||
|
||||
from itertools import izip
|
||||
from .. import exc, types, util
|
||||
from ..sql import expression
|
||||
import collections
|
||||
|
||||
# This reconstructor is necessary so that pickles with the C extension or
|
||||
# without use the same Binary format.
|
||||
try:
|
||||
# We need a different reconstructor on the C extension so that we can
|
||||
# add extra checks that fields have correctly been initialized by
|
||||
# __setstate__.
|
||||
from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
|
||||
|
||||
# The extra function embedding is needed so that the
|
||||
# reconstructor function has the same signature whether or not
|
||||
# the extension is present.
|
||||
def rowproxy_reconstructor(cls, state):
|
||||
return safe_rowproxy_reconstructor(cls, state)
|
||||
except ImportError:
|
||||
def rowproxy_reconstructor(cls, state):
|
||||
obj = cls.__new__(cls)
|
||||
obj.__setstate__(state)
|
||||
return obj
|
||||
|
||||
try:
|
||||
from sqlalchemy.cresultproxy import BaseRowProxy
|
||||
except ImportError:
|
||||
class BaseRowProxy(object):
|
||||
__slots__ = ('_parent', '_row', '_processors', '_keymap')
|
||||
|
||||
def __init__(self, parent, row, processors, keymap):
|
||||
"""RowProxy objects are constructed by ResultProxy objects."""
|
||||
|
||||
self._parent = parent
|
||||
self._row = row
|
||||
self._processors = processors
|
||||
self._keymap = keymap
|
||||
|
||||
def __reduce__(self):
|
||||
return (rowproxy_reconstructor,
|
||||
(self.__class__, self.__getstate__()))
|
||||
|
||||
def values(self):
|
||||
"""Return the values represented by this RowProxy as a list."""
|
||||
return list(self)
|
||||
|
||||
def __iter__(self):
|
||||
for processor, value in izip(self._processors, self._row):
|
||||
if processor is None:
|
||||
yield value
|
||||
else:
|
||||
yield processor(value)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._row)
|
||||
|
||||
def __getitem__(self, key):
|
||||
try:
|
||||
processor, obj, index = self._keymap[key]
|
||||
except KeyError:
|
||||
processor, obj, index = self._parent._key_fallback(key)
|
||||
except TypeError:
|
||||
if isinstance(key, slice):
|
||||
l = []
|
||||
for processor, value in izip(self._processors[key],
|
||||
self._row[key]):
|
||||
if processor is None:
|
||||
l.append(value)
|
||||
else:
|
||||
l.append(processor(value))
|
||||
return tuple(l)
|
||||
else:
|
||||
raise
|
||||
if index is None:
|
||||
raise exc.InvalidRequestError(
|
||||
"Ambiguous column name '%s' in result set! "
|
||||
"try 'use_labels' option on select statement." % key)
|
||||
if processor is not None:
|
||||
return processor(self._row[index])
|
||||
else:
|
||||
return self._row[index]
|
||||
|
||||
def __getattr__(self, name):
|
||||
try:
|
||||
return self[name]
|
||||
except KeyError, e:
|
||||
raise AttributeError(e.args[0])
|
||||
|
||||
|
||||
class RowProxy(BaseRowProxy):
|
||||
"""Proxy values from a single cursor row.
|
||||
|
||||
Mostly follows "ordered dictionary" behavior, mapping result
|
||||
values to the string-based column name, the integer position of
|
||||
the result in the row, as well as Column instances which can be
|
||||
mapped to the original Columns that produced this result set (for
|
||||
results that correspond to constructed SQL expressions).
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
def __contains__(self, key):
|
||||
return self._parent._has_key(self._row, key)
|
||||
|
||||
def __getstate__(self):
|
||||
return {
|
||||
'_parent': self._parent,
|
||||
'_row': tuple(self)
|
||||
}
|
||||
|
||||
def __setstate__(self, state):
|
||||
self._parent = parent = state['_parent']
|
||||
self._row = state['_row']
|
||||
self._processors = parent._processors
|
||||
self._keymap = parent._keymap
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def __eq__(self, other):
|
||||
return other is self or other == tuple(self)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(tuple(self))
|
||||
|
||||
def has_key(self, key):
|
||||
"""Return True if this RowProxy contains the given key."""
|
||||
|
||||
return self._parent._has_key(self._row, key)
|
||||
|
||||
def items(self):
|
||||
"""Return a list of tuples, each tuple containing a key/value pair."""
|
||||
# TODO: no coverage here
|
||||
return [(key, self[key]) for key in self.iterkeys()]
|
||||
|
||||
def keys(self):
|
||||
"""Return the list of keys as strings represented by this RowProxy."""
|
||||
|
||||
return self._parent.keys
|
||||
|
||||
def iterkeys(self):
|
||||
return iter(self._parent.keys)
|
||||
|
||||
def itervalues(self):
|
||||
return iter(self)
|
||||
|
||||
try:
|
||||
# Register RowProxy with Sequence,
|
||||
# so sequence protocol is implemented
|
||||
from collections import Sequence
|
||||
Sequence.register(RowProxy)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class ResultMetaData(object):
|
||||
"""Handle cursor.description, applying additional info from an execution
|
||||
context."""
|
||||
|
||||
def __init__(self, parent, metadata):
|
||||
self._processors = processors = []
|
||||
|
||||
# We do not strictly need to store the processor in the key mapping,
|
||||
# though it is faster in the Python version (probably because of the
|
||||
# saved attribute lookup self._processors)
|
||||
self._keymap = keymap = {}
|
||||
self.keys = []
|
||||
context = parent.context
|
||||
dialect = context.dialect
|
||||
typemap = dialect.dbapi_type_map
|
||||
translate_colname = context._translate_colname
|
||||
self.case_sensitive = dialect.case_sensitive
|
||||
|
||||
# high precedence key values.
|
||||
primary_keymap = {}
|
||||
|
||||
for i, rec in enumerate(metadata):
|
||||
colname = rec[0]
|
||||
coltype = rec[1]
|
||||
|
||||
if dialect.description_encoding:
|
||||
colname = dialect._description_decoder(colname)
|
||||
|
||||
if translate_colname:
|
||||
colname, untranslated = translate_colname(colname)
|
||||
|
||||
if dialect.requires_name_normalize:
|
||||
colname = dialect.normalize_name(colname)
|
||||
|
||||
if context.result_map:
|
||||
try:
|
||||
name, obj, type_ = context.result_map[colname
|
||||
if self.case_sensitive
|
||||
else colname.lower()]
|
||||
except KeyError:
|
||||
name, obj, type_ = \
|
||||
colname, None, typemap.get(coltype, types.NULLTYPE)
|
||||
else:
|
||||
name, obj, type_ = \
|
||||
colname, None, typemap.get(coltype, types.NULLTYPE)
|
||||
|
||||
processor = context.get_result_processor(type_, colname, coltype)
|
||||
|
||||
processors.append(processor)
|
||||
rec = (processor, obj, i)
|
||||
|
||||
# indexes as keys. This is only needed for the Python version of
|
||||
# RowProxy (the C version uses a faster path for integer indexes).
|
||||
primary_keymap[i] = rec
|
||||
|
||||
# populate primary keymap, looking for conflicts.
|
||||
if primary_keymap.setdefault(
|
||||
name if self.case_sensitive
|
||||
else name.lower(),
|
||||
rec) is not rec:
|
||||
# place a record that doesn't have the "index" - this
|
||||
# is interpreted later as an AmbiguousColumnError,
|
||||
# but only when actually accessed. Columns
|
||||
# colliding by name is not a problem if those names
|
||||
# aren't used; integer access is always
|
||||
# unambiguous.
|
||||
primary_keymap[name
|
||||
if self.case_sensitive
|
||||
else name.lower()] = rec = (None, obj, None)
|
||||
|
||||
self.keys.append(colname)
|
||||
if obj:
|
||||
for o in obj:
|
||||
keymap[o] = rec
|
||||
# technically we should be doing this but we
|
||||
# are saving on callcounts by not doing so.
|
||||
# if keymap.setdefault(o, rec) is not rec:
|
||||
# keymap[o] = (None, obj, None)
|
||||
|
||||
if translate_colname and \
|
||||
untranslated:
|
||||
keymap[untranslated] = rec
|
||||
|
||||
# overwrite keymap values with those of the
|
||||
# high precedence keymap.
|
||||
keymap.update(primary_keymap)
|
||||
|
||||
if parent._echo:
|
||||
context.engine.logger.debug(
|
||||
"Col %r", tuple(x[0] for x in metadata))
|
||||
|
||||
@util.pending_deprecation("0.8", "sqlite dialect uses "
|
||||
"_translate_colname() now")
|
||||
def _set_keymap_synonym(self, name, origname):
|
||||
"""Set a synonym for the given name.
|
||||
|
||||
Some dialects (SQLite at the moment) may use this to
|
||||
adjust the column names that are significant within a
|
||||
row.
|
||||
|
||||
"""
|
||||
rec = (processor, obj, i) = self._keymap[origname if
|
||||
self.case_sensitive
|
||||
else origname.lower()]
|
||||
if self._keymap.setdefault(name, rec) is not rec:
|
||||
self._keymap[name] = (processor, obj, None)
|
||||
|
||||
def _key_fallback(self, key, raiseerr=True):
|
||||
map = self._keymap
|
||||
result = None
|
||||
if isinstance(key, basestring):
|
||||
result = map.get(key if self.case_sensitive else key.lower())
|
||||
# fallback for targeting a ColumnElement to a textual expression
|
||||
# this is a rare use case which only occurs when matching text()
|
||||
# or colummn('name') constructs to ColumnElements, or after a
|
||||
# pickle/unpickle roundtrip
|
||||
elif isinstance(key, expression.ColumnElement):
|
||||
if key._label and (
|
||||
key._label
|
||||
if self.case_sensitive
|
||||
else key._label.lower()) in map:
|
||||
result = map[key._label
|
||||
if self.case_sensitive
|
||||
else key._label.lower()]
|
||||
elif hasattr(key, 'name') and (
|
||||
key.name
|
||||
if self.case_sensitive
|
||||
else key.name.lower()) in map:
|
||||
# match is only on name.
|
||||
result = map[key.name
|
||||
if self.case_sensitive
|
||||
else key.name.lower()]
|
||||
# search extra hard to make sure this
|
||||
# isn't a column/label name overlap.
|
||||
# this check isn't currently available if the row
|
||||
# was unpickled.
|
||||
if result is not None and \
|
||||
result[1] is not None:
|
||||
for obj in result[1]:
|
||||
if key._compare_name_for_result(obj):
|
||||
break
|
||||
else:
|
||||
result = None
|
||||
if result is None:
|
||||
if raiseerr:
|
||||
raise exc.NoSuchColumnError(
|
||||
"Could not locate column in row for column '%s'" %
|
||||
expression._string_or_unprintable(key))
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
map[key] = result
|
||||
return result
|
||||
|
||||
def _has_key(self, row, key):
|
||||
if key in self._keymap:
|
||||
return True
|
||||
else:
|
||||
return self._key_fallback(key, False) is not None
|
||||
|
||||
def __getstate__(self):
|
||||
return {
|
||||
'_pickled_keymap': dict(
|
||||
(key, index)
|
||||
for key, (processor, obj, index) in self._keymap.iteritems()
|
||||
if isinstance(key, (basestring, int))
|
||||
),
|
||||
'keys': self.keys,
|
||||
"case_sensitive": self.case_sensitive,
|
||||
}
|
||||
|
||||
def __setstate__(self, state):
|
||||
# the row has been processed at pickling time so we don't need any
|
||||
# processor anymore
|
||||
self._processors = [None for _ in xrange(len(state['keys']))]
|
||||
self._keymap = keymap = {}
|
||||
for key, index in state['_pickled_keymap'].iteritems():
|
||||
# not preserving "obj" here, unfortunately our
|
||||
# proxy comparison fails with the unpickle
|
||||
keymap[key] = (None, None, index)
|
||||
self.keys = state['keys']
|
||||
self.case_sensitive = state['case_sensitive']
|
||||
self._echo = False
|
||||
|
||||
|
||||
class ResultProxy(object):
|
||||
"""Wraps a DB-API cursor object to provide easier access to row columns.
|
||||
|
||||
Individual columns may be accessed by their integer position,
|
||||
case-insensitive column name, or by ``schema.Column``
|
||||
object. e.g.::
|
||||
|
||||
row = fetchone()
|
||||
|
||||
col1 = row[0] # access via integer position
|
||||
|
||||
col2 = row['col2'] # access via name
|
||||
|
||||
col3 = row[mytable.c.mycol] # access via Column object.
|
||||
|
||||
``ResultProxy`` also handles post-processing of result column
|
||||
data using ``TypeEngine`` objects, which are referenced from
|
||||
the originating SQL statement that produced this result set.
|
||||
|
||||
"""
|
||||
|
||||
_process_row = RowProxy
|
||||
out_parameters = None
|
||||
_can_close_connection = False
|
||||
_metadata = None
|
||||
|
||||
def __init__(self, context):
|
||||
self.context = context
|
||||
self.dialect = context.dialect
|
||||
self.closed = False
|
||||
self.cursor = self._saved_cursor = context.cursor
|
||||
self.connection = context.root_connection
|
||||
self._echo = self.connection._echo and \
|
||||
context.engine._should_log_debug()
|
||||
self._init_metadata()
|
||||
|
||||
def _init_metadata(self):
|
||||
metadata = self._cursor_description()
|
||||
if metadata is not None:
|
||||
self._metadata = ResultMetaData(self, metadata)
|
||||
|
||||
def keys(self):
|
||||
"""Return the current set of string keys for rows."""
|
||||
if self._metadata:
|
||||
return self._metadata.keys
|
||||
else:
|
||||
return []
|
||||
|
||||
@util.memoized_property
|
||||
def rowcount(self):
|
||||
"""Return the 'rowcount' for this result.
|
||||
|
||||
The 'rowcount' reports the number of rows *matched*
|
||||
by the WHERE criterion of an UPDATE or DELETE statement.
|
||||
|
||||
.. note::
|
||||
|
||||
Notes regarding :attr:`.ResultProxy.rowcount`:
|
||||
|
||||
|
||||
* This attribute returns the number of rows *matched*,
|
||||
which is not necessarily the same as the number of rows
|
||||
that were actually *modified* - an UPDATE statement, for example,
|
||||
may have no net change on a given row if the SET values
|
||||
given are the same as those present in the row already.
|
||||
Such a row would be matched but not modified.
|
||||
On backends that feature both styles, such as MySQL,
|
||||
rowcount is configured by default to return the match
|
||||
count in all cases.
|
||||
|
||||
* :attr:`.ResultProxy.rowcount` is *only* useful in conjunction
|
||||
with an UPDATE or DELETE statement. Contrary to what the Python
|
||||
DBAPI says, it does *not* return the
|
||||
number of rows available from the results of a SELECT statement
|
||||
as DBAPIs cannot support this functionality when rows are
|
||||
unbuffered.
|
||||
|
||||
* :attr:`.ResultProxy.rowcount` may not be fully implemented by
|
||||
all dialects. In particular, most DBAPIs do not support an
|
||||
aggregate rowcount result from an executemany call.
|
||||
The :meth:`.ResultProxy.supports_sane_rowcount` and
|
||||
:meth:`.ResultProxy.supports_sane_multi_rowcount` methods
|
||||
will report from the dialect if each usage is known to be
|
||||
supported.
|
||||
|
||||
* Statements that use RETURNING may not return a correct
|
||||
rowcount.
|
||||
|
||||
"""
|
||||
try:
|
||||
return self.context.rowcount
|
||||
except Exception, e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
e, None, None, self.cursor, self.context)
|
||||
|
||||
@property
|
||||
def lastrowid(self):
|
||||
"""return the 'lastrowid' accessor on the DBAPI cursor.
|
||||
|
||||
This is a DBAPI specific method and is only functional
|
||||
for those backends which support it, for statements
|
||||
where it is appropriate. It's behavior is not
|
||||
consistent across backends.
|
||||
|
||||
Usage of this method is normally unnecessary when
|
||||
using insert() expression constructs; the
|
||||
:attr:`~ResultProxy.inserted_primary_key` attribute provides a
|
||||
tuple of primary key values for a newly inserted row,
|
||||
regardless of database backend.
|
||||
|
||||
"""
|
||||
try:
|
||||
return self._saved_cursor.lastrowid
|
||||
except Exception, e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
e, None, None,
|
||||
self._saved_cursor, self.context)
|
||||
|
||||
@property
|
||||
def returns_rows(self):
|
||||
"""True if this :class:`.ResultProxy` returns rows.
|
||||
|
||||
I.e. if it is legal to call the methods
|
||||
:meth:`~.ResultProxy.fetchone`,
|
||||
:meth:`~.ResultProxy.fetchmany`
|
||||
:meth:`~.ResultProxy.fetchall`.
|
||||
|
||||
"""
|
||||
return self._metadata is not None
|
||||
|
||||
@property
|
||||
def is_insert(self):
|
||||
"""True if this :class:`.ResultProxy` is the result
|
||||
of a executing an expression language compiled
|
||||
:func:`.expression.insert` construct.
|
||||
|
||||
When True, this implies that the
|
||||
:attr:`inserted_primary_key` attribute is accessible,
|
||||
assuming the statement did not include
|
||||
a user defined "returning" construct.
|
||||
|
||||
"""
|
||||
return self.context.isinsert
|
||||
|
||||
def _cursor_description(self):
|
||||
"""May be overridden by subclasses."""
|
||||
|
||||
return self._saved_cursor.description
|
||||
|
||||
def close(self, _autoclose_connection=True):
|
||||
"""Close this ResultProxy.
|
||||
|
||||
Closes the underlying DBAPI cursor corresponding to the execution.
|
||||
|
||||
Note that any data cached within this ResultProxy is still available.
|
||||
For some types of results, this may include buffered rows.
|
||||
|
||||
If this ResultProxy was generated from an implicit execution,
|
||||
the underlying Connection will also be closed (returns the
|
||||
underlying DBAPI connection to the connection pool.)
|
||||
|
||||
This method is called automatically when:
|
||||
|
||||
* all result rows are exhausted using the fetchXXX() methods.
|
||||
* cursor.description is None.
|
||||
|
||||
"""
|
||||
|
||||
if not self.closed:
|
||||
self.closed = True
|
||||
self.connection._safe_close_cursor(self.cursor)
|
||||
if _autoclose_connection and \
|
||||
self.connection.should_close_with_result:
|
||||
self.connection.close()
|
||||
# allow consistent errors
|
||||
self.cursor = None
|
||||
|
||||
def __iter__(self):
|
||||
while True:
|
||||
row = self.fetchone()
|
||||
if row is None:
|
||||
raise StopIteration
|
||||
else:
|
||||
yield row
|
||||
|
||||
@util.memoized_property
|
||||
def inserted_primary_key(self):
|
||||
"""Return the primary key for the row just inserted.
|
||||
|
||||
The return value is a list of scalar values
|
||||
corresponding to the list of primary key columns
|
||||
in the target table.
|
||||
|
||||
This only applies to single row :func:`.insert`
|
||||
constructs which did not explicitly specify
|
||||
:meth:`.Insert.returning`.
|
||||
|
||||
Note that primary key columns which specify a
|
||||
server_default clause,
|
||||
or otherwise do not qualify as "autoincrement"
|
||||
columns (see the notes at :class:`.Column`), and were
|
||||
generated using the database-side default, will
|
||||
appear in this list as ``None`` unless the backend
|
||||
supports "returning" and the insert statement executed
|
||||
with the "implicit returning" enabled.
|
||||
|
||||
Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
|
||||
statement is not a compiled expression construct
|
||||
or is not an insert() construct.
|
||||
|
||||
"""
|
||||
|
||||
if not self.context.compiled:
|
||||
raise exc.InvalidRequestError(
|
||||
"Statement is not a compiled "
|
||||
"expression construct.")
|
||||
elif not self.context.isinsert:
|
||||
raise exc.InvalidRequestError(
|
||||
"Statement is not an insert() "
|
||||
"expression construct.")
|
||||
elif self.context._is_explicit_returning:
|
||||
raise exc.InvalidRequestError(
|
||||
"Can't call inserted_primary_key "
|
||||
"when returning() "
|
||||
"is used.")
|
||||
|
||||
return self.context.inserted_primary_key
|
||||
|
||||
def last_updated_params(self):
|
||||
"""Return the collection of updated parameters from this
|
||||
execution.
|
||||
|
||||
Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
|
||||
statement is not a compiled expression construct
|
||||
or is not an update() construct.
|
||||
|
||||
"""
|
||||
if not self.context.compiled:
|
||||
raise exc.InvalidRequestError(
|
||||
"Statement is not a compiled "
|
||||
"expression construct.")
|
||||
elif not self.context.isupdate:
|
||||
raise exc.InvalidRequestError(
|
||||
"Statement is not an update() "
|
||||
"expression construct.")
|
||||
elif self.context.executemany:
|
||||
return self.context.compiled_parameters
|
||||
else:
|
||||
return self.context.compiled_parameters[0]
|
||||
|
||||
def last_inserted_params(self):
|
||||
"""Return the collection of inserted parameters from this
|
||||
execution.
|
||||
|
||||
Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
|
||||
statement is not a compiled expression construct
|
||||
or is not an insert() construct.
|
||||
|
||||
"""
|
||||
if not self.context.compiled:
|
||||
raise exc.InvalidRequestError(
|
||||
"Statement is not a compiled "
|
||||
"expression construct.")
|
||||
elif not self.context.isinsert:
|
||||
raise exc.InvalidRequestError(
|
||||
"Statement is not an insert() "
|
||||
"expression construct.")
|
||||
elif self.context.executemany:
|
||||
return self.context.compiled_parameters
|
||||
else:
|
||||
return self.context.compiled_parameters[0]
|
||||
|
||||
def lastrow_has_defaults(self):
|
||||
"""Return ``lastrow_has_defaults()`` from the underlying
|
||||
:class:`.ExecutionContext`.
|
||||
|
||||
See :class:`.ExecutionContext` for details.
|
||||
|
||||
"""
|
||||
|
||||
return self.context.lastrow_has_defaults()
|
||||
|
||||
def postfetch_cols(self):
|
||||
"""Return ``postfetch_cols()`` from the underlying
|
||||
:class:`.ExecutionContext`.
|
||||
|
||||
See :class:`.ExecutionContext` for details.
|
||||
|
||||
Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
|
||||
statement is not a compiled expression construct
|
||||
or is not an insert() or update() construct.
|
||||
|
||||
"""
|
||||
|
||||
if not self.context.compiled:
|
||||
raise exc.InvalidRequestError(
|
||||
"Statement is not a compiled "
|
||||
"expression construct.")
|
||||
elif not self.context.isinsert and not self.context.isupdate:
|
||||
raise exc.InvalidRequestError(
|
||||
"Statement is not an insert() or update() "
|
||||
"expression construct.")
|
||||
return self.context.postfetch_cols
|
||||
|
||||
def prefetch_cols(self):
|
||||
"""Return ``prefetch_cols()`` from the underlying
|
||||
:class:`.ExecutionContext`.
|
||||
|
||||
See :class:`.ExecutionContext` for details.
|
||||
|
||||
Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
|
||||
statement is not a compiled expression construct
|
||||
or is not an insert() or update() construct.
|
||||
|
||||
"""
|
||||
|
||||
if not self.context.compiled:
|
||||
raise exc.InvalidRequestError(
|
||||
"Statement is not a compiled "
|
||||
"expression construct.")
|
||||
elif not self.context.isinsert and not self.context.isupdate:
|
||||
raise exc.InvalidRequestError(
|
||||
"Statement is not an insert() or update() "
|
||||
"expression construct.")
|
||||
return self.context.prefetch_cols
|
||||
|
||||
def supports_sane_rowcount(self):
|
||||
"""Return ``supports_sane_rowcount`` from the dialect.
|
||||
|
||||
See :attr:`.ResultProxy.rowcount` for background.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.supports_sane_rowcount
|
||||
|
||||
def supports_sane_multi_rowcount(self):
|
||||
"""Return ``supports_sane_multi_rowcount`` from the dialect.
|
||||
|
||||
See :attr:`.ResultProxy.rowcount` for background.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.supports_sane_multi_rowcount
|
||||
|
||||
def _fetchone_impl(self):
|
||||
try:
|
||||
return self.cursor.fetchone()
|
||||
except AttributeError:
|
||||
self._non_result()
|
||||
|
||||
def _fetchmany_impl(self, size=None):
|
||||
try:
|
||||
if size is None:
|
||||
return self.cursor.fetchmany()
|
||||
else:
|
||||
return self.cursor.fetchmany(size)
|
||||
except AttributeError:
|
||||
self._non_result()
|
||||
|
||||
def _fetchall_impl(self):
|
||||
try:
|
||||
return self.cursor.fetchall()
|
||||
except AttributeError:
|
||||
self._non_result()
|
||||
|
||||
def _non_result(self):
|
||||
if self._metadata is None:
|
||||
raise exc.ResourceClosedError(
|
||||
"This result object does not return rows. "
|
||||
"It has been closed automatically.",
|
||||
)
|
||||
else:
|
||||
raise exc.ResourceClosedError("This result object is closed.")
|
||||
|
||||
def process_rows(self, rows):
|
||||
process_row = self._process_row
|
||||
metadata = self._metadata
|
||||
keymap = metadata._keymap
|
||||
processors = metadata._processors
|
||||
if self._echo:
|
||||
log = self.context.engine.logger.debug
|
||||
l = []
|
||||
for row in rows:
|
||||
log("Row %r", row)
|
||||
l.append(process_row(metadata, row, processors, keymap))
|
||||
return l
|
||||
else:
|
||||
return [process_row(metadata, row, processors, keymap)
|
||||
for row in rows]
|
||||
|
||||
def fetchall(self):
|
||||
"""Fetch all rows, just like DB-API ``cursor.fetchall()``."""
|
||||
|
||||
try:
|
||||
l = self.process_rows(self._fetchall_impl())
|
||||
self.close()
|
||||
return l
|
||||
except Exception, e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
e, None, None,
|
||||
self.cursor, self.context)
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
"""Fetch many rows, just like DB-API
|
||||
``cursor.fetchmany(size=cursor.arraysize)``.
|
||||
|
||||
If rows are present, the cursor remains open after this is called.
|
||||
Else the cursor is automatically closed and an empty list is returned.
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
l = self.process_rows(self._fetchmany_impl(size))
|
||||
if len(l) == 0:
|
||||
self.close()
|
||||
return l
|
||||
except Exception, e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
e, None, None,
|
||||
self.cursor, self.context)
|
||||
|
||||
def fetchone(self):
|
||||
"""Fetch one row, just like DB-API ``cursor.fetchone()``.
|
||||
|
||||
If a row is present, the cursor remains open after this is called.
|
||||
Else the cursor is automatically closed and None is returned.
|
||||
|
||||
"""
|
||||
try:
|
||||
row = self._fetchone_impl()
|
||||
if row is not None:
|
||||
return self.process_rows([row])[0]
|
||||
else:
|
||||
self.close()
|
||||
return None
|
||||
except Exception, e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
e, None, None,
|
||||
self.cursor, self.context)
|
||||
|
||||
def first(self):
|
||||
"""Fetch the first row and then close the result set unconditionally.
|
||||
|
||||
Returns None if no row is present.
|
||||
|
||||
"""
|
||||
if self._metadata is None:
|
||||
self._non_result()
|
||||
|
||||
try:
|
||||
row = self._fetchone_impl()
|
||||
except Exception, e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
e, None, None,
|
||||
self.cursor, self.context)
|
||||
|
||||
try:
|
||||
if row is not None:
|
||||
return self.process_rows([row])[0]
|
||||
else:
|
||||
return None
|
||||
finally:
|
||||
self.close()
|
||||
|
||||
def scalar(self):
|
||||
"""Fetch the first column of the first row, and close the result set.
|
||||
|
||||
Returns None if no row is present.
|
||||
|
||||
"""
|
||||
row = self.first()
|
||||
if row is not None:
|
||||
return row[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class BufferedRowResultProxy(ResultProxy):
|
||||
"""A ResultProxy with row buffering behavior.
|
||||
|
||||
``ResultProxy`` that buffers the contents of a selection of rows
|
||||
before ``fetchone()`` is called. This is to allow the results of
|
||||
``cursor.description`` to be available immediately, when
|
||||
interfacing with a DB-API that requires rows to be consumed before
|
||||
this information is available (currently psycopg2, when used with
|
||||
server-side cursors).
|
||||
|
||||
The pre-fetching behavior fetches only one row initially, and then
|
||||
grows its buffer size by a fixed amount with each successive need
|
||||
for additional rows up to a size of 100.
|
||||
"""
|
||||
|
||||
def _init_metadata(self):
|
||||
self.__buffer_rows()
|
||||
super(BufferedRowResultProxy, self)._init_metadata()
|
||||
|
||||
# this is a "growth chart" for the buffering of rows.
|
||||
# each successive __buffer_rows call will use the next
|
||||
# value in the list for the buffer size until the max
|
||||
# is reached
|
||||
size_growth = {
|
||||
1: 5,
|
||||
5: 10,
|
||||
10: 20,
|
||||
20: 50,
|
||||
50: 100,
|
||||
100: 250,
|
||||
250: 500,
|
||||
500: 1000
|
||||
}
|
||||
|
||||
def __buffer_rows(self):
|
||||
size = getattr(self, '_bufsize', 1)
|
||||
self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
|
||||
self._bufsize = self.size_growth.get(size, size)
|
||||
|
||||
def _fetchone_impl(self):
|
||||
if self.closed:
|
||||
return None
|
||||
if not self.__rowbuffer:
|
||||
self.__buffer_rows()
|
||||
if not self.__rowbuffer:
|
||||
return None
|
||||
return self.__rowbuffer.popleft()
|
||||
|
||||
def _fetchmany_impl(self, size=None):
|
||||
if size is None:
|
||||
return self._fetchall_impl()
|
||||
result = []
|
||||
for x in range(0, size):
|
||||
row = self._fetchone_impl()
|
||||
if row is None:
|
||||
break
|
||||
result.append(row)
|
||||
return result
|
||||
|
||||
def _fetchall_impl(self):
|
||||
self.__rowbuffer.extend(self.cursor.fetchall())
|
||||
ret = self.__rowbuffer
|
||||
self.__rowbuffer = collections.deque()
|
||||
return ret
|
||||
|
||||
|
||||
class FullyBufferedResultProxy(ResultProxy):
|
||||
"""A result proxy that buffers rows fully upon creation.
|
||||
|
||||
Used for operations where a result is to be delivered
|
||||
after the database conversation can not be continued,
|
||||
such as MSSQL INSERT...OUTPUT after an autocommit.
|
||||
|
||||
"""
|
||||
def _init_metadata(self):
|
||||
super(FullyBufferedResultProxy, self)._init_metadata()
|
||||
self.__rowbuffer = self._buffer_rows()
|
||||
|
||||
def _buffer_rows(self):
|
||||
return collections.deque(self.cursor.fetchall())
|
||||
|
||||
def _fetchone_impl(self):
|
||||
if self.__rowbuffer:
|
||||
return self.__rowbuffer.popleft()
|
||||
else:
|
||||
return None
|
||||
|
||||
def _fetchmany_impl(self, size=None):
|
||||
if size is None:
|
||||
return self._fetchall_impl()
|
||||
result = []
|
||||
for x in range(0, size):
|
||||
row = self._fetchone_impl()
|
||||
if row is None:
|
||||
break
|
||||
result.append(row)
|
||||
return result
|
||||
|
||||
def _fetchall_impl(self):
|
||||
ret = self.__rowbuffer
|
||||
self.__rowbuffer = collections.deque()
|
||||
return ret
|
||||
|
||||
|
||||
class BufferedColumnRow(RowProxy):
|
||||
def __init__(self, parent, row, processors, keymap):
|
||||
# preprocess row
|
||||
row = list(row)
|
||||
# this is a tad faster than using enumerate
|
||||
index = 0
|
||||
for processor in parent._orig_processors:
|
||||
if processor is not None:
|
||||
row[index] = processor(row[index])
|
||||
index += 1
|
||||
row = tuple(row)
|
||||
super(BufferedColumnRow, self).__init__(parent, row,
|
||||
processors, keymap)
|
||||
|
||||
|
||||
class BufferedColumnResultProxy(ResultProxy):
|
||||
"""A ResultProxy with column buffering behavior.
|
||||
|
||||
``ResultProxy`` that loads all columns into memory each time
|
||||
fetchone() is called. If fetchmany() or fetchall() are called,
|
||||
the full grid of results is fetched. This is to operate with
|
||||
databases where result rows contain "live" results that fall out
|
||||
of scope unless explicitly fetched. Currently this includes
|
||||
cx_Oracle LOB objects.
|
||||
|
||||
"""
|
||||
|
||||
_process_row = BufferedColumnRow
|
||||
|
||||
def _init_metadata(self):
|
||||
super(BufferedColumnResultProxy, self)._init_metadata()
|
||||
metadata = self._metadata
|
||||
# orig_processors will be used to preprocess each row when they are
|
||||
# constructed.
|
||||
metadata._orig_processors = metadata._processors
|
||||
# replace the all type processors by None processors.
|
||||
metadata._processors = [None for _ in xrange(len(metadata.keys))]
|
||||
keymap = {}
|
||||
for k, (func, obj, index) in metadata._keymap.iteritems():
|
||||
keymap[k] = (None, obj, index)
|
||||
self._metadata._keymap = keymap
|
||||
|
||||
def fetchall(self):
|
||||
# can't call cursor.fetchall(), since rows must be
|
||||
# fully processed before requesting more from the DBAPI.
|
||||
l = []
|
||||
while True:
|
||||
row = self.fetchone()
|
||||
if row is None:
|
||||
break
|
||||
l.append(row)
|
||||
return l
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
# can't call cursor.fetchmany(), since rows must be
|
||||
# fully processed before requesting more from the DBAPI.
|
||||
if size is None:
|
||||
return self.fetchall()
|
||||
l = []
|
||||
for i in xrange(size):
|
||||
row = self.fetchone()
|
||||
if row is None:
|
||||
break
|
||||
l.append(row)
|
||||
return l
|
||||
261
lib/sqlalchemy/engine/strategies.py
Normal file
261
lib/sqlalchemy/engine/strategies.py
Normal file
@@ -0,0 +1,261 @@
|
||||
# engine/strategies.py
|
||||
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Strategies for creating new instances of Engine types.
|
||||
|
||||
These are semi-private implementation classes which provide the
|
||||
underlying behavior for the "strategy" keyword argument available on
|
||||
:func:`~sqlalchemy.engine.create_engine`. Current available options are
|
||||
``plain``, ``threadlocal``, and ``mock``.
|
||||
|
||||
New strategies can be added via new ``EngineStrategy`` classes.
|
||||
"""
|
||||
|
||||
from operator import attrgetter
|
||||
|
||||
from sqlalchemy.engine import base, threadlocal, url
|
||||
from sqlalchemy import util, exc, event
|
||||
from sqlalchemy import pool as poollib
|
||||
|
||||
strategies = {}
|
||||
|
||||
|
||||
class EngineStrategy(object):
|
||||
"""An adaptor that processes input arguments and produces an Engine.
|
||||
|
||||
Provides a ``create`` method that receives input arguments and
|
||||
produces an instance of base.Engine or a subclass.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
strategies[self.name] = self
|
||||
|
||||
def create(self, *args, **kwargs):
|
||||
"""Given arguments, returns a new Engine instance."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class DefaultEngineStrategy(EngineStrategy):
|
||||
"""Base class for built-in strategies."""
|
||||
|
||||
def create(self, name_or_url, **kwargs):
|
||||
# create url.URL object
|
||||
u = url.make_url(name_or_url)
|
||||
|
||||
dialect_cls = u.get_dialect()
|
||||
|
||||
dialect_args = {}
|
||||
# consume dialect arguments from kwargs
|
||||
for k in util.get_cls_kwargs(dialect_cls):
|
||||
if k in kwargs:
|
||||
dialect_args[k] = kwargs.pop(k)
|
||||
|
||||
dbapi = kwargs.pop('module', None)
|
||||
if dbapi is None:
|
||||
dbapi_args = {}
|
||||
for k in util.get_func_kwargs(dialect_cls.dbapi):
|
||||
if k in kwargs:
|
||||
dbapi_args[k] = kwargs.pop(k)
|
||||
dbapi = dialect_cls.dbapi(**dbapi_args)
|
||||
|
||||
dialect_args['dbapi'] = dbapi
|
||||
|
||||
# create dialect
|
||||
dialect = dialect_cls(**dialect_args)
|
||||
|
||||
# assemble connection arguments
|
||||
(cargs, cparams) = dialect.create_connect_args(u)
|
||||
cparams.update(kwargs.pop('connect_args', {}))
|
||||
|
||||
# look for existing pool or create
|
||||
pool = kwargs.pop('pool', None)
|
||||
if pool is None:
|
||||
def connect():
|
||||
try:
|
||||
return dialect.connect(*cargs, **cparams)
|
||||
except Exception, e:
|
||||
invalidated = dialect.is_disconnect(e, None, None)
|
||||
# Py3K
|
||||
#raise exc.DBAPIError.instance(None, None,
|
||||
# e, dialect.dbapi.Error,
|
||||
# connection_invalidated=invalidated
|
||||
#) from e
|
||||
# Py2K
|
||||
import sys
|
||||
raise exc.DBAPIError.instance(
|
||||
None, None, e, dialect.dbapi.Error,
|
||||
connection_invalidated=invalidated
|
||||
), None, sys.exc_info()[2]
|
||||
# end Py2K
|
||||
|
||||
creator = kwargs.pop('creator', connect)
|
||||
|
||||
poolclass = kwargs.pop('poolclass', None)
|
||||
if poolclass is None:
|
||||
poolclass = dialect_cls.get_pool_class(u)
|
||||
pool_args = {}
|
||||
|
||||
# consume pool arguments from kwargs, translating a few of
|
||||
# the arguments
|
||||
translate = {'logging_name': 'pool_logging_name',
|
||||
'echo': 'echo_pool',
|
||||
'timeout': 'pool_timeout',
|
||||
'recycle': 'pool_recycle',
|
||||
'events': 'pool_events',
|
||||
'use_threadlocal': 'pool_threadlocal',
|
||||
'reset_on_return': 'pool_reset_on_return'}
|
||||
for k in util.get_cls_kwargs(poolclass):
|
||||
tk = translate.get(k, k)
|
||||
if tk in kwargs:
|
||||
pool_args[k] = kwargs.pop(tk)
|
||||
pool = poolclass(creator, **pool_args)
|
||||
else:
|
||||
if isinstance(pool, poollib._DBProxy):
|
||||
pool = pool.get_pool(*cargs, **cparams)
|
||||
else:
|
||||
pool = pool
|
||||
|
||||
# create engine.
|
||||
engineclass = self.engine_cls
|
||||
engine_args = {}
|
||||
for k in util.get_cls_kwargs(engineclass):
|
||||
if k in kwargs:
|
||||
engine_args[k] = kwargs.pop(k)
|
||||
|
||||
_initialize = kwargs.pop('_initialize', True)
|
||||
|
||||
# all kwargs should be consumed
|
||||
if kwargs:
|
||||
raise TypeError(
|
||||
"Invalid argument(s) %s sent to create_engine(), "
|
||||
"using configuration %s/%s/%s. Please check that the "
|
||||
"keyword arguments are appropriate for this combination "
|
||||
"of components." % (','.join("'%s'" % k for k in kwargs),
|
||||
dialect.__class__.__name__,
|
||||
pool.__class__.__name__,
|
||||
engineclass.__name__))
|
||||
|
||||
engine = engineclass(pool, dialect, u, **engine_args)
|
||||
|
||||
if _initialize:
|
||||
do_on_connect = dialect.on_connect()
|
||||
if do_on_connect:
|
||||
def on_connect(dbapi_connection, connection_record):
|
||||
conn = getattr(
|
||||
dbapi_connection, '_sqla_unwrap', dbapi_connection)
|
||||
if conn is None:
|
||||
return
|
||||
do_on_connect(conn)
|
||||
|
||||
event.listen(pool, 'first_connect', on_connect)
|
||||
event.listen(pool, 'connect', on_connect)
|
||||
|
||||
@util.only_once
|
||||
def first_connect(dbapi_connection, connection_record):
|
||||
c = base.Connection(engine, connection=dbapi_connection)
|
||||
|
||||
# TODO: removing this allows the on connect activities
|
||||
# to generate events. tests currently assume these aren't
|
||||
# sent. do we want users to get all the initial connect
|
||||
# activities as events ?
|
||||
c._has_events = False
|
||||
|
||||
dialect.initialize(c)
|
||||
event.listen(pool, 'first_connect', first_connect)
|
||||
|
||||
return engine
|
||||
|
||||
|
||||
class PlainEngineStrategy(DefaultEngineStrategy):
|
||||
"""Strategy for configuring a regular Engine."""
|
||||
|
||||
name = 'plain'
|
||||
engine_cls = base.Engine
|
||||
|
||||
PlainEngineStrategy()
|
||||
|
||||
|
||||
class ThreadLocalEngineStrategy(DefaultEngineStrategy):
|
||||
"""Strategy for configuring an Engine with threadlocal behavior."""
|
||||
|
||||
name = 'threadlocal'
|
||||
engine_cls = threadlocal.TLEngine
|
||||
|
||||
ThreadLocalEngineStrategy()
|
||||
|
||||
|
||||
class MockEngineStrategy(EngineStrategy):
|
||||
"""Strategy for configuring an Engine-like object with mocked execution.
|
||||
|
||||
Produces a single mock Connectable object which dispatches
|
||||
statement execution to a passed-in function.
|
||||
|
||||
"""
|
||||
|
||||
name = 'mock'
|
||||
|
||||
def create(self, name_or_url, executor, **kwargs):
|
||||
# create url.URL object
|
||||
u = url.make_url(name_or_url)
|
||||
|
||||
dialect_cls = u.get_dialect()
|
||||
|
||||
dialect_args = {}
|
||||
# consume dialect arguments from kwargs
|
||||
for k in util.get_cls_kwargs(dialect_cls):
|
||||
if k in kwargs:
|
||||
dialect_args[k] = kwargs.pop(k)
|
||||
|
||||
# create dialect
|
||||
dialect = dialect_cls(**dialect_args)
|
||||
|
||||
return MockEngineStrategy.MockConnection(dialect, executor)
|
||||
|
||||
class MockConnection(base.Connectable):
|
||||
def __init__(self, dialect, execute):
|
||||
self._dialect = dialect
|
||||
self.execute = execute
|
||||
|
||||
engine = property(lambda s: s)
|
||||
dialect = property(attrgetter('_dialect'))
|
||||
name = property(lambda s: s._dialect.name)
|
||||
|
||||
def contextual_connect(self, **kwargs):
|
||||
return self
|
||||
|
||||
def execution_options(self, **kw):
|
||||
return self
|
||||
|
||||
def compiler(self, statement, parameters, **kwargs):
|
||||
return self._dialect.compiler(
|
||||
statement, parameters, engine=self, **kwargs)
|
||||
|
||||
def create(self, entity, **kwargs):
|
||||
kwargs['checkfirst'] = False
|
||||
from sqlalchemy.engine import ddl
|
||||
|
||||
ddl.SchemaGenerator(
|
||||
self.dialect, self, **kwargs).traverse_single(entity)
|
||||
|
||||
def drop(self, entity, **kwargs):
|
||||
kwargs['checkfirst'] = False
|
||||
from sqlalchemy.engine import ddl
|
||||
ddl.SchemaDropper(
|
||||
self.dialect, self, **kwargs).traverse_single(entity)
|
||||
|
||||
def _run_visitor(self, visitorcallable, element,
|
||||
connection=None,
|
||||
**kwargs):
|
||||
kwargs['checkfirst'] = False
|
||||
visitorcallable(self.dialect, self,
|
||||
**kwargs).traverse_single(element)
|
||||
|
||||
def execute(self, object, *multiparams, **params):
|
||||
raise NotImplementedError()
|
||||
|
||||
MockEngineStrategy()
|
||||
134
lib/sqlalchemy/engine/threadlocal.py
Normal file
134
lib/sqlalchemy/engine/threadlocal.py
Normal file
@@ -0,0 +1,134 @@
|
||||
# engine/threadlocal.py
|
||||
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides a thread-local transactional wrapper around the root Engine class.
|
||||
|
||||
The ``threadlocal`` module is invoked when using the
|
||||
``strategy="threadlocal"`` flag with :func:`~sqlalchemy.engine.create_engine`.
|
||||
This module is semi-private and is invoked automatically when the threadlocal
|
||||
engine strategy is used.
|
||||
"""
|
||||
|
||||
from .. import util
|
||||
from . import base
|
||||
import weakref
|
||||
|
||||
|
||||
class TLConnection(base.Connection):
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
super(TLConnection, self).__init__(*arg, **kw)
|
||||
self.__opencount = 0
|
||||
|
||||
def _increment_connect(self):
|
||||
self.__opencount += 1
|
||||
return self
|
||||
|
||||
def close(self):
|
||||
if self.__opencount == 1:
|
||||
base.Connection.close(self)
|
||||
self.__opencount -= 1
|
||||
|
||||
def _force_close(self):
|
||||
self.__opencount = 0
|
||||
base.Connection.close(self)
|
||||
|
||||
|
||||
class TLEngine(base.Engine):
|
||||
"""An Engine that includes support for thread-local managed
|
||||
transactions.
|
||||
|
||||
"""
|
||||
_tl_connection_cls = TLConnection
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(TLEngine, self).__init__(*args, **kwargs)
|
||||
self._connections = util.threading.local()
|
||||
|
||||
def contextual_connect(self, **kw):
|
||||
if not hasattr(self._connections, 'conn'):
|
||||
connection = None
|
||||
else:
|
||||
connection = self._connections.conn()
|
||||
|
||||
if connection is None or connection.closed:
|
||||
# guards against pool-level reapers, if desired.
|
||||
# or not connection.connection.is_valid:
|
||||
connection = self._tl_connection_cls(
|
||||
self, self.pool.connect(), **kw)
|
||||
self._connections.conn = weakref.ref(connection)
|
||||
|
||||
return connection._increment_connect()
|
||||
|
||||
def begin_twophase(self, xid=None):
|
||||
if not hasattr(self._connections, 'trans'):
|
||||
self._connections.trans = []
|
||||
self._connections.trans.append(
|
||||
self.contextual_connect().begin_twophase(xid=xid))
|
||||
return self
|
||||
|
||||
def begin_nested(self):
|
||||
if not hasattr(self._connections, 'trans'):
|
||||
self._connections.trans = []
|
||||
self._connections.trans.append(
|
||||
self.contextual_connect().begin_nested())
|
||||
return self
|
||||
|
||||
def begin(self):
|
||||
if not hasattr(self._connections, 'trans'):
|
||||
self._connections.trans = []
|
||||
self._connections.trans.append(self.contextual_connect().begin())
|
||||
return self
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
if type is None:
|
||||
self.commit()
|
||||
else:
|
||||
self.rollback()
|
||||
|
||||
def prepare(self):
|
||||
if not hasattr(self._connections, 'trans') or \
|
||||
not self._connections.trans:
|
||||
return
|
||||
self._connections.trans[-1].prepare()
|
||||
|
||||
def commit(self):
|
||||
if not hasattr(self._connections, 'trans') or \
|
||||
not self._connections.trans:
|
||||
return
|
||||
trans = self._connections.trans.pop(-1)
|
||||
trans.commit()
|
||||
|
||||
def rollback(self):
|
||||
if not hasattr(self._connections, 'trans') or \
|
||||
not self._connections.trans:
|
||||
return
|
||||
trans = self._connections.trans.pop(-1)
|
||||
trans.rollback()
|
||||
|
||||
def dispose(self):
|
||||
self._connections = util.threading.local()
|
||||
super(TLEngine, self).dispose()
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
return not hasattr(self._connections, 'conn') or \
|
||||
self._connections.conn() is None or \
|
||||
self._connections.conn().closed
|
||||
|
||||
def close(self):
|
||||
if not self.closed:
|
||||
self.contextual_connect().close()
|
||||
connection = self._connections.conn()
|
||||
connection._force_close()
|
||||
del self._connections.conn
|
||||
self._connections.trans = []
|
||||
|
||||
def __repr__(self):
|
||||
return 'TLEngine(%s)' % str(self.url)
|
||||
222
lib/sqlalchemy/engine/url.py
Normal file
222
lib/sqlalchemy/engine/url.py
Normal file
@@ -0,0 +1,222 @@
|
||||
# engine/url.py
|
||||
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides the :class:`~sqlalchemy.engine.url.URL` class which encapsulates
|
||||
information about a database connection specification.
|
||||
|
||||
The URL object is created automatically when
|
||||
:func:`~sqlalchemy.engine.create_engine` is called with a string
|
||||
argument; alternatively, the URL is a public-facing construct which can
|
||||
be used directly and is also accepted directly by ``create_engine()``.
|
||||
"""
|
||||
|
||||
import re
|
||||
import urllib
|
||||
from .. import exc, util
|
||||
from . import Dialect
|
||||
|
||||
|
||||
class URL(object):
|
||||
"""
|
||||
Represent the components of a URL used to connect to a database.
|
||||
|
||||
This object is suitable to be passed directly to a
|
||||
``create_engine()`` call. The fields of the URL are parsed from a
|
||||
string by the ``module-level make_url()`` function. the string
|
||||
format of the URL is an RFC-1738-style string.
|
||||
|
||||
All initialization parameters are available as public attributes.
|
||||
|
||||
:param drivername: the name of the database backend.
|
||||
This name will correspond to a module in sqlalchemy/databases
|
||||
or a third party plug-in.
|
||||
|
||||
:param username: The user name.
|
||||
|
||||
:param password: database password.
|
||||
|
||||
:param host: The name of the host.
|
||||
|
||||
:param port: The port number.
|
||||
|
||||
:param database: The database name.
|
||||
|
||||
:param query: A dictionary of options to be passed to the
|
||||
dialect and/or the DBAPI upon connect.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, drivername, username=None, password=None,
|
||||
host=None, port=None, database=None, query=None):
|
||||
self.drivername = drivername
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.host = host
|
||||
if port is not None:
|
||||
self.port = int(port)
|
||||
else:
|
||||
self.port = None
|
||||
self.database = database
|
||||
self.query = query or {}
|
||||
|
||||
def __to_string__(self, hide_password=True):
|
||||
s = self.drivername + "://"
|
||||
if self.username is not None:
|
||||
s += self.username
|
||||
if self.password is not None:
|
||||
s += ':' + ('***' if hide_password
|
||||
else urllib.quote_plus(self.password))
|
||||
s += "@"
|
||||
if self.host is not None:
|
||||
if ':' in self.host:
|
||||
s += "[%s]" % self.host
|
||||
else:
|
||||
s += self.host
|
||||
if self.port is not None:
|
||||
s += ':' + str(self.port)
|
||||
if self.database is not None:
|
||||
s += '/' + self.database
|
||||
if self.query:
|
||||
keys = self.query.keys()
|
||||
keys.sort()
|
||||
s += '?' + "&".join("%s=%s" % (k, self.query[k]) for k in keys)
|
||||
return s
|
||||
|
||||
def __str__(self):
|
||||
return self.__to_string__(hide_password=False)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__to_string__()
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self))
|
||||
|
||||
def __eq__(self, other):
|
||||
return \
|
||||
isinstance(other, URL) and \
|
||||
self.drivername == other.drivername and \
|
||||
self.username == other.username and \
|
||||
self.password == other.password and \
|
||||
self.host == other.host and \
|
||||
self.database == other.database and \
|
||||
self.query == other.query
|
||||
|
||||
def get_dialect(self):
|
||||
"""Return the SQLAlchemy database dialect class corresponding
|
||||
to this URL's driver name.
|
||||
"""
|
||||
|
||||
if '+' not in self.drivername:
|
||||
name = self.drivername
|
||||
else:
|
||||
name = self.drivername.replace('+', '.')
|
||||
from sqlalchemy.dialects import registry
|
||||
cls = registry.load(name)
|
||||
# check for legacy dialects that
|
||||
# would return a module with 'dialect' as the
|
||||
# actual class
|
||||
if hasattr(cls, 'dialect') and \
|
||||
isinstance(cls.dialect, type) and \
|
||||
issubclass(cls.dialect, Dialect):
|
||||
return cls.dialect
|
||||
else:
|
||||
return cls
|
||||
|
||||
def translate_connect_args(self, names=[], **kw):
|
||||
"""Translate url attributes into a dictionary of connection arguments.
|
||||
|
||||
Returns attributes of this url (`host`, `database`, `username`,
|
||||
`password`, `port`) as a plain dictionary. The attribute names are
|
||||
used as the keys by default. Unset or false attributes are omitted
|
||||
from the final dictionary.
|
||||
|
||||
:param \**kw: Optional, alternate key names for url attributes.
|
||||
|
||||
:param names: Deprecated. Same purpose as the keyword-based alternate
|
||||
names, but correlates the name to the original positionally.
|
||||
"""
|
||||
|
||||
translated = {}
|
||||
attribute_names = ['host', 'database', 'username', 'password', 'port']
|
||||
for sname in attribute_names:
|
||||
if names:
|
||||
name = names.pop(0)
|
||||
elif sname in kw:
|
||||
name = kw[sname]
|
||||
else:
|
||||
name = sname
|
||||
if name is not None and getattr(self, sname, False):
|
||||
translated[name] = getattr(self, sname)
|
||||
return translated
|
||||
|
||||
|
||||
def make_url(name_or_url):
|
||||
"""Given a string or unicode instance, produce a new URL instance.
|
||||
|
||||
The given string is parsed according to the RFC 1738 spec. If an
|
||||
existing URL object is passed, just returns the object.
|
||||
"""
|
||||
|
||||
if isinstance(name_or_url, basestring):
|
||||
return _parse_rfc1738_args(name_or_url)
|
||||
else:
|
||||
return name_or_url
|
||||
|
||||
|
||||
def _parse_rfc1738_args(name):
|
||||
pattern = re.compile(r'''
|
||||
(?P<name>[\w\+]+)://
|
||||
(?:
|
||||
(?P<username>[^:/]*)
|
||||
(?::(?P<password>[^/]*))?
|
||||
@)?
|
||||
(?:
|
||||
(?:
|
||||
\[(?P<ipv6host>[^/]+)\] |
|
||||
(?P<ipv4host>[^/:]+)
|
||||
)?
|
||||
(?::(?P<port>[^/]*))?
|
||||
)?
|
||||
(?:/(?P<database>.*))?
|
||||
''', re.X)
|
||||
|
||||
m = pattern.match(name)
|
||||
if m is not None:
|
||||
components = m.groupdict()
|
||||
if components['database'] is not None:
|
||||
tokens = components['database'].split('?', 2)
|
||||
components['database'] = tokens[0]
|
||||
query = (len(tokens) > 1 and dict(util.parse_qsl(tokens[1]))) or None
|
||||
# Py2K
|
||||
if query is not None:
|
||||
query = dict((k.encode('ascii'), query[k]) for k in query)
|
||||
# end Py2K
|
||||
else:
|
||||
query = None
|
||||
components['query'] = query
|
||||
|
||||
if components['password'] is not None:
|
||||
components['password'] = \
|
||||
urllib.unquote_plus(components['password'])
|
||||
|
||||
ipv4host = components.pop('ipv4host')
|
||||
ipv6host = components.pop('ipv6host')
|
||||
components['host'] = ipv4host or ipv6host
|
||||
name = components.pop('name')
|
||||
return URL(name, **components)
|
||||
else:
|
||||
raise exc.ArgumentError(
|
||||
"Could not parse rfc1738 URL from string '%s'" % name)
|
||||
|
||||
|
||||
def _parse_keyvalue_args(name):
|
||||
m = re.match(r'(\w+)://(.*)', name)
|
||||
if m is not None:
|
||||
(name, args) = m.group(1, 2)
|
||||
opts = dict(util.parse_qsl(args))
|
||||
return URL(name, *opts)
|
||||
else:
|
||||
return None
|
||||
94
lib/sqlalchemy/engine/util.py
Normal file
94
lib/sqlalchemy/engine/util.py
Normal file
@@ -0,0 +1,94 @@
|
||||
# engine/util.py
|
||||
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .. import util
|
||||
|
||||
|
||||
def _coerce_config(configuration, prefix):
|
||||
"""Convert configuration values to expected types."""
|
||||
|
||||
options = dict((key[len(prefix):], configuration[key])
|
||||
for key in configuration
|
||||
if key.startswith(prefix))
|
||||
for option, type_ in (
|
||||
('convert_unicode', util.bool_or_str('force')),
|
||||
('pool_timeout', int),
|
||||
('echo', util.bool_or_str('debug')),
|
||||
('echo_pool', util.bool_or_str('debug')),
|
||||
('pool_recycle', int),
|
||||
('pool_size', int),
|
||||
('max_overflow', int),
|
||||
('pool_threadlocal', bool),
|
||||
('use_native_unicode', bool),
|
||||
):
|
||||
util.coerce_kw_type(options, option, type_)
|
||||
return options
|
||||
|
||||
|
||||
def connection_memoize(key):
|
||||
"""Decorator, memoize a function in a connection.info stash.
|
||||
|
||||
Only applicable to functions which take no arguments other than a
|
||||
connection. The memo will be stored in ``connection.info[key]``.
|
||||
"""
|
||||
|
||||
@util.decorator
|
||||
def decorated(fn, self, connection):
|
||||
connection = connection.connect()
|
||||
try:
|
||||
return connection.info[key]
|
||||
except KeyError:
|
||||
connection.info[key] = val = fn(self, connection)
|
||||
return val
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
def py_fallback():
|
||||
def _distill_params(multiparams, params):
|
||||
"""Given arguments from the calling form *multiparams, **params,
|
||||
return a list of bind parameter structures, usually a list of
|
||||
dictionaries.
|
||||
|
||||
In the case of 'raw' execution which accepts positional parameters,
|
||||
it may be a list of tuples or lists.
|
||||
|
||||
"""
|
||||
|
||||
if not multiparams:
|
||||
if params:
|
||||
return [params]
|
||||
else:
|
||||
return []
|
||||
elif len(multiparams) == 1:
|
||||
zero = multiparams[0]
|
||||
if isinstance(zero, (list, tuple)):
|
||||
if not zero or hasattr(zero[0], '__iter__') and \
|
||||
not hasattr(zero[0], 'strip'):
|
||||
# execute(stmt, [{}, {}, {}, ...])
|
||||
# execute(stmt, [(), (), (), ...])
|
||||
return zero
|
||||
else:
|
||||
# execute(stmt, ("value", "value"))
|
||||
return [zero]
|
||||
elif hasattr(zero, 'keys'):
|
||||
# execute(stmt, {"key":"value"})
|
||||
return [zero]
|
||||
else:
|
||||
# execute(stmt, "value")
|
||||
return [[zero]]
|
||||
else:
|
||||
if hasattr(multiparams[0], '__iter__') and \
|
||||
not hasattr(multiparams[0], 'strip'):
|
||||
return multiparams
|
||||
else:
|
||||
return [multiparams]
|
||||
|
||||
return locals()
|
||||
try:
|
||||
from sqlalchemy.cutils import _distill_params
|
||||
except ImportError:
|
||||
globals().update(py_fallback())
|
||||
Reference in New Issue
Block a user