ss
This commit is contained in:
@@ -0,0 +1,116 @@
|
||||
# postgresql/__init__.py
|
||||
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import base
|
||||
from . import pg8000 # noqa
|
||||
from . import psycopg2 # noqa
|
||||
from . import psycopg2cffi # noqa
|
||||
from . import pygresql # noqa
|
||||
from . import pypostgresql # noqa
|
||||
from . import zxjdbc # noqa
|
||||
from .array import All
|
||||
from .array import Any
|
||||
from .array import ARRAY
|
||||
from .array import array
|
||||
from .base import BIGINT
|
||||
from .base import BIT
|
||||
from .base import BOOLEAN
|
||||
from .base import BYTEA
|
||||
from .base import CHAR
|
||||
from .base import CIDR
|
||||
from .base import CreateEnumType
|
||||
from .base import DATE
|
||||
from .base import DOUBLE_PRECISION
|
||||
from .base import DropEnumType
|
||||
from .base import ENUM
|
||||
from .base import FLOAT
|
||||
from .base import INET
|
||||
from .base import INTEGER
|
||||
from .base import INTERVAL
|
||||
from .base import MACADDR
|
||||
from .base import MONEY
|
||||
from .base import NUMERIC
|
||||
from .base import OID
|
||||
from .base import REAL
|
||||
from .base import REGCLASS
|
||||
from .base import SMALLINT
|
||||
from .base import TEXT
|
||||
from .base import TIME
|
||||
from .base import TIMESTAMP
|
||||
from .base import TSVECTOR
|
||||
from .base import UUID
|
||||
from .base import VARCHAR
|
||||
from .dml import Insert
|
||||
from .dml import insert
|
||||
from .ext import aggregate_order_by
|
||||
from .ext import array_agg
|
||||
from .ext import ExcludeConstraint
|
||||
from .hstore import HSTORE
|
||||
from .hstore import hstore
|
||||
from .json import JSON
|
||||
from .json import JSONB
|
||||
from .ranges import DATERANGE
|
||||
from .ranges import INT4RANGE
|
||||
from .ranges import INT8RANGE
|
||||
from .ranges import NUMRANGE
|
||||
from .ranges import TSRANGE
|
||||
from .ranges import TSTZRANGE
|
||||
|
||||
|
||||
base.dialect = dialect = psycopg2.dialect
|
||||
|
||||
|
||||
__all__ = (
|
||||
"INTEGER",
|
||||
"BIGINT",
|
||||
"SMALLINT",
|
||||
"VARCHAR",
|
||||
"CHAR",
|
||||
"TEXT",
|
||||
"NUMERIC",
|
||||
"FLOAT",
|
||||
"REAL",
|
||||
"INET",
|
||||
"CIDR",
|
||||
"UUID",
|
||||
"BIT",
|
||||
"MACADDR",
|
||||
"MONEY",
|
||||
"OID",
|
||||
"REGCLASS",
|
||||
"DOUBLE_PRECISION",
|
||||
"TIMESTAMP",
|
||||
"TIME",
|
||||
"DATE",
|
||||
"BYTEA",
|
||||
"BOOLEAN",
|
||||
"INTERVAL",
|
||||
"ARRAY",
|
||||
"ENUM",
|
||||
"dialect",
|
||||
"array",
|
||||
"HSTORE",
|
||||
"hstore",
|
||||
"INT4RANGE",
|
||||
"INT8RANGE",
|
||||
"NUMRANGE",
|
||||
"DATERANGE",
|
||||
"TSVECTOR",
|
||||
"TSRANGE",
|
||||
"TSTZRANGE",
|
||||
"JSON",
|
||||
"JSONB",
|
||||
"Any",
|
||||
"All",
|
||||
"DropEnumType",
|
||||
"CreateEnumType",
|
||||
"ExcludeConstraint",
|
||||
"aggregate_order_by",
|
||||
"array_agg",
|
||||
"insert",
|
||||
"Insert",
|
||||
)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
355
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/array.py
Normal file
355
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/array.py
Normal file
@@ -0,0 +1,355 @@
|
||||
# postgresql/array.py
|
||||
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .base import colspecs
|
||||
from .base import ischema_names
|
||||
from ... import types as sqltypes
|
||||
from ...sql import expression
|
||||
from ...sql import operators
|
||||
|
||||
|
||||
try:
|
||||
from uuid import UUID as _python_UUID # noqa
|
||||
except ImportError:
|
||||
_python_UUID = None
|
||||
|
||||
|
||||
def Any(other, arrexpr, operator=operators.eq):
|
||||
"""A synonym for the :meth:`.ARRAY.Comparator.any` method.
|
||||
|
||||
This method is legacy and is here for backwards-compatibility.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.expression.any_`
|
||||
|
||||
"""
|
||||
|
||||
return arrexpr.any(other, operator)
|
||||
|
||||
|
||||
def All(other, arrexpr, operator=operators.eq):
|
||||
"""A synonym for the :meth:`.ARRAY.Comparator.all` method.
|
||||
|
||||
This method is legacy and is here for backwards-compatibility.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.expression.all_`
|
||||
|
||||
"""
|
||||
|
||||
return arrexpr.all(other, operator)
|
||||
|
||||
|
||||
class array(expression.Tuple):
|
||||
|
||||
"""A PostgreSQL ARRAY literal.
|
||||
|
||||
This is used to produce ARRAY literals in SQL expressions, e.g.::
|
||||
|
||||
from sqlalchemy.dialects.postgresql import array
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy import select, func
|
||||
|
||||
stmt = select([
|
||||
array([1,2]) + array([3,4,5])
|
||||
])
|
||||
|
||||
print(stmt.compile(dialect=postgresql.dialect()))
|
||||
|
||||
Produces the SQL::
|
||||
|
||||
SELECT ARRAY[%(param_1)s, %(param_2)s] ||
|
||||
ARRAY[%(param_3)s, %(param_4)s, %(param_5)s]) AS anon_1
|
||||
|
||||
An instance of :class:`.array` will always have the datatype
|
||||
:class:`.ARRAY`. The "inner" type of the array is inferred from
|
||||
the values present, unless the ``type_`` keyword argument is passed::
|
||||
|
||||
array(['foo', 'bar'], type_=CHAR)
|
||||
|
||||
Multidimensional arrays are produced by nesting :class:`.array` constructs.
|
||||
The dimensionality of the final :class:`.ARRAY` type is calculated by
|
||||
recursively adding the dimensions of the inner :class:`.ARRAY` type::
|
||||
|
||||
stmt = select([
|
||||
array([
|
||||
array([1, 2]), array([3, 4]), array([column('q'), column('x')])
|
||||
])
|
||||
])
|
||||
print(stmt.compile(dialect=postgresql.dialect()))
|
||||
|
||||
Produces::
|
||||
|
||||
SELECT ARRAY[ARRAY[%(param_1)s, %(param_2)s],
|
||||
ARRAY[%(param_3)s, %(param_4)s], ARRAY[q, x]] AS anon_1
|
||||
|
||||
.. versionadded:: 1.3.6 added support for multidimensional array literals
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.postgresql.ARRAY`
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = "array"
|
||||
|
||||
def __init__(self, clauses, **kw):
|
||||
super(array, self).__init__(*clauses, **kw)
|
||||
if isinstance(self.type, ARRAY):
|
||||
self.type = ARRAY(
|
||||
self.type.item_type,
|
||||
dimensions=self.type.dimensions + 1
|
||||
if self.type.dimensions is not None
|
||||
else 2,
|
||||
)
|
||||
else:
|
||||
self.type = ARRAY(self.type)
|
||||
|
||||
def _bind_param(self, operator, obj, _assume_scalar=False, type_=None):
|
||||
if _assume_scalar or operator is operators.getitem:
|
||||
# if getitem->slice were called, Indexable produces
|
||||
# a Slice object from that
|
||||
assert isinstance(obj, int)
|
||||
return expression.BindParameter(
|
||||
None,
|
||||
obj,
|
||||
_compared_to_operator=operator,
|
||||
type_=type_,
|
||||
_compared_to_type=self.type,
|
||||
unique=True,
|
||||
)
|
||||
|
||||
else:
|
||||
return array(
|
||||
[
|
||||
self._bind_param(
|
||||
operator, o, _assume_scalar=True, type_=type_
|
||||
)
|
||||
for o in obj
|
||||
]
|
||||
)
|
||||
|
||||
def self_group(self, against=None):
|
||||
if against in (operators.any_op, operators.all_op, operators.getitem):
|
||||
return expression.Grouping(self)
|
||||
else:
|
||||
return self
|
||||
|
||||
|
||||
CONTAINS = operators.custom_op("@>", precedence=5)
|
||||
|
||||
CONTAINED_BY = operators.custom_op("<@", precedence=5)
|
||||
|
||||
OVERLAP = operators.custom_op("&&", precedence=5)
|
||||
|
||||
|
||||
class ARRAY(sqltypes.ARRAY):
|
||||
|
||||
"""PostgreSQL ARRAY type.
|
||||
|
||||
.. versionchanged:: 1.1 The :class:`.postgresql.ARRAY` type is now
|
||||
a subclass of the core :class:`.types.ARRAY` type.
|
||||
|
||||
The :class:`.postgresql.ARRAY` type is constructed in the same way
|
||||
as the core :class:`.types.ARRAY` type; a member type is required, and a
|
||||
number of dimensions is recommended if the type is to be used for more
|
||||
than one dimension::
|
||||
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
mytable = Table("mytable", metadata,
|
||||
Column("data", postgresql.ARRAY(Integer, dimensions=2))
|
||||
)
|
||||
|
||||
The :class:`.postgresql.ARRAY` type provides all operations defined on the
|
||||
core :class:`.types.ARRAY` type, including support for "dimensions",
|
||||
indexed access, and simple matching such as
|
||||
:meth:`.types.ARRAY.Comparator.any` and
|
||||
:meth:`.types.ARRAY.Comparator.all`. :class:`.postgresql.ARRAY` class also
|
||||
provides PostgreSQL-specific methods for containment operations, including
|
||||
:meth:`.postgresql.ARRAY.Comparator.contains`
|
||||
:meth:`.postgresql.ARRAY.Comparator.contained_by`, and
|
||||
:meth:`.postgresql.ARRAY.Comparator.overlap`, e.g.::
|
||||
|
||||
mytable.c.data.contains([1, 2])
|
||||
|
||||
The :class:`.postgresql.ARRAY` type may not be supported on all
|
||||
PostgreSQL DBAPIs; it is currently known to work on psycopg2 only.
|
||||
|
||||
Additionally, the :class:`.postgresql.ARRAY` type does not work directly in
|
||||
conjunction with the :class:`.ENUM` type. For a workaround, see the
|
||||
special type at :ref:`postgresql_array_of_enum`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.types.ARRAY` - base array type
|
||||
|
||||
:class:`.postgresql.array` - produces a literal array value.
|
||||
|
||||
"""
|
||||
|
||||
class Comparator(sqltypes.ARRAY.Comparator):
|
||||
|
||||
"""Define comparison operations for :class:`.ARRAY`.
|
||||
|
||||
Note that these operations are in addition to those provided
|
||||
by the base :class:`.types.ARRAY.Comparator` class, including
|
||||
:meth:`.types.ARRAY.Comparator.any` and
|
||||
:meth:`.types.ARRAY.Comparator.all`.
|
||||
|
||||
"""
|
||||
|
||||
def contains(self, other, **kwargs):
|
||||
"""Boolean expression. Test if elements are a superset of the
|
||||
elements of the argument array expression.
|
||||
"""
|
||||
return self.operate(CONTAINS, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contained_by(self, other):
|
||||
"""Boolean expression. Test if elements are a proper subset of the
|
||||
elements of the argument array expression.
|
||||
"""
|
||||
return self.operate(
|
||||
CONTAINED_BY, other, result_type=sqltypes.Boolean
|
||||
)
|
||||
|
||||
def overlap(self, other):
|
||||
"""Boolean expression. Test if array has elements in common with
|
||||
an argument array expression.
|
||||
"""
|
||||
return self.operate(OVERLAP, other, result_type=sqltypes.Boolean)
|
||||
|
||||
comparator_factory = Comparator
|
||||
|
||||
def __init__(
|
||||
self, item_type, as_tuple=False, dimensions=None, zero_indexes=False
|
||||
):
|
||||
"""Construct an ARRAY.
|
||||
|
||||
E.g.::
|
||||
|
||||
Column('myarray', ARRAY(Integer))
|
||||
|
||||
Arguments are:
|
||||
|
||||
:param item_type: The data type of items of this array. Note that
|
||||
dimensionality is irrelevant here, so multi-dimensional arrays like
|
||||
``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as
|
||||
``ARRAY(ARRAY(Integer))`` or such.
|
||||
|
||||
:param as_tuple=False: Specify whether return results
|
||||
should be converted to tuples from lists. DBAPIs such
|
||||
as psycopg2 return lists by default. When tuples are
|
||||
returned, the results are hashable.
|
||||
|
||||
:param dimensions: if non-None, the ARRAY will assume a fixed
|
||||
number of dimensions. This will cause the DDL emitted for this
|
||||
ARRAY to include the exact number of bracket clauses ``[]``,
|
||||
and will also optimize the performance of the type overall.
|
||||
Note that PG arrays are always implicitly "non-dimensioned",
|
||||
meaning they can store any number of dimensions no matter how
|
||||
they were declared.
|
||||
|
||||
:param zero_indexes=False: when True, index values will be converted
|
||||
between Python zero-based and PostgreSQL one-based indexes, e.g.
|
||||
a value of one will be added to all index values before passing
|
||||
to the database.
|
||||
|
||||
.. versionadded:: 0.9.5
|
||||
|
||||
|
||||
"""
|
||||
if isinstance(item_type, ARRAY):
|
||||
raise ValueError(
|
||||
"Do not nest ARRAY types; ARRAY(basetype) "
|
||||
"handles multi-dimensional arrays of basetype"
|
||||
)
|
||||
if isinstance(item_type, type):
|
||||
item_type = item_type()
|
||||
self.item_type = item_type
|
||||
self.as_tuple = as_tuple
|
||||
self.dimensions = dimensions
|
||||
self.zero_indexes = zero_indexes
|
||||
|
||||
@property
|
||||
def hashable(self):
|
||||
return self.as_tuple
|
||||
|
||||
@property
|
||||
def python_type(self):
|
||||
return list
|
||||
|
||||
def compare_values(self, x, y):
|
||||
return x == y
|
||||
|
||||
def _proc_array(self, arr, itemproc, dim, collection):
|
||||
if dim is None:
|
||||
arr = list(arr)
|
||||
if (
|
||||
dim == 1
|
||||
or dim is None
|
||||
and (
|
||||
# this has to be (list, tuple), or at least
|
||||
# not hasattr('__iter__'), since Py3K strings
|
||||
# etc. have __iter__
|
||||
not arr
|
||||
or not isinstance(arr[0], (list, tuple))
|
||||
)
|
||||
):
|
||||
if itemproc:
|
||||
return collection(itemproc(x) for x in arr)
|
||||
else:
|
||||
return collection(arr)
|
||||
else:
|
||||
return collection(
|
||||
self._proc_array(
|
||||
x,
|
||||
itemproc,
|
||||
dim - 1 if dim is not None else None,
|
||||
collection,
|
||||
)
|
||||
for x in arr
|
||||
)
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
item_proc = self.item_type.dialect_impl(dialect).bind_processor(
|
||||
dialect
|
||||
)
|
||||
|
||||
def process(value):
|
||||
if value is None:
|
||||
return value
|
||||
else:
|
||||
return self._proc_array(
|
||||
value, item_proc, self.dimensions, list
|
||||
)
|
||||
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
item_proc = self.item_type.dialect_impl(dialect).result_processor(
|
||||
dialect, coltype
|
||||
)
|
||||
|
||||
def process(value):
|
||||
if value is None:
|
||||
return value
|
||||
else:
|
||||
return self._proc_array(
|
||||
value,
|
||||
item_proc,
|
||||
self.dimensions,
|
||||
tuple if self.as_tuple else list,
|
||||
)
|
||||
|
||||
return process
|
||||
|
||||
|
||||
colspecs[sqltypes.ARRAY] = ARRAY
|
||||
ischema_names["_array"] = ARRAY
|
||||
3545
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/base.py
Normal file
3545
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/base.py
Normal file
File diff suppressed because it is too large
Load Diff
226
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/dml.py
Normal file
226
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/dml.py
Normal file
@@ -0,0 +1,226 @@
|
||||
# postgresql/on_conflict.py
|
||||
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import ext
|
||||
from ... import util
|
||||
from ...sql import schema
|
||||
from ...sql.base import _generative
|
||||
from ...sql.dml import Insert as StandardInsert
|
||||
from ...sql.elements import ClauseElement
|
||||
from ...sql.expression import alias
|
||||
from ...util.langhelpers import public_factory
|
||||
|
||||
|
||||
__all__ = ("Insert", "insert")
|
||||
|
||||
|
||||
class Insert(StandardInsert):
|
||||
"""PostgreSQL-specific implementation of INSERT.
|
||||
|
||||
Adds methods for PG-specific syntaxes such as ON CONFLICT.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
|
||||
@util.memoized_property
|
||||
def excluded(self):
|
||||
"""Provide the ``excluded`` namespace for an ON CONFLICT statement
|
||||
|
||||
PG's ON CONFLICT clause allows reference to the row that would
|
||||
be inserted, known as ``excluded``. This attribute provides
|
||||
all columns in this row to be referenceable.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_insert_on_conflict` - example of how
|
||||
to use :attr:`.Insert.excluded`
|
||||
|
||||
"""
|
||||
return alias(self.table, name="excluded").columns
|
||||
|
||||
@_generative
|
||||
def on_conflict_do_update(
|
||||
self,
|
||||
constraint=None,
|
||||
index_elements=None,
|
||||
index_where=None,
|
||||
set_=None,
|
||||
where=None,
|
||||
):
|
||||
r"""
|
||||
Specifies a DO UPDATE SET action for ON CONFLICT clause.
|
||||
|
||||
Either the ``constraint`` or ``index_elements`` argument is
|
||||
required, but only one of these can be specified.
|
||||
|
||||
:param constraint:
|
||||
The name of a unique or exclusion constraint on the table,
|
||||
or the constraint object itself if it has a .name attribute.
|
||||
|
||||
:param index_elements:
|
||||
A sequence consisting of string column names, :class:`.Column`
|
||||
objects, or other column expression objects that will be used
|
||||
to infer a target index.
|
||||
|
||||
:param index_where:
|
||||
Additional WHERE criterion that can be used to infer a
|
||||
conditional target index.
|
||||
|
||||
:param set\_:
|
||||
Required argument. A dictionary or other mapping object
|
||||
with column names as keys and expressions or literals as values,
|
||||
specifying the ``SET`` actions to take.
|
||||
If the target :class:`.Column` specifies a ".key" attribute distinct
|
||||
from the column name, that key should be used.
|
||||
|
||||
.. warning:: This dictionary does **not** take into account
|
||||
Python-specified default UPDATE values or generation functions,
|
||||
e.g. those specified using :paramref:`.Column.onupdate`.
|
||||
These values will not be exercised for an ON CONFLICT style of
|
||||
UPDATE, unless they are manually specified in the
|
||||
:paramref:`.Insert.on_conflict_do_update.set_` dictionary.
|
||||
|
||||
:param where:
|
||||
Optional argument. If present, can be a literal SQL
|
||||
string or an acceptable expression for a ``WHERE`` clause
|
||||
that restricts the rows affected by ``DO UPDATE SET``. Rows
|
||||
not meeting the ``WHERE`` condition will not be updated
|
||||
(effectively a ``DO NOTHING`` for those rows).
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_insert_on_conflict`
|
||||
|
||||
"""
|
||||
self._post_values_clause = OnConflictDoUpdate(
|
||||
constraint, index_elements, index_where, set_, where
|
||||
)
|
||||
return self
|
||||
|
||||
@_generative
|
||||
def on_conflict_do_nothing(
|
||||
self, constraint=None, index_elements=None, index_where=None
|
||||
):
|
||||
"""
|
||||
Specifies a DO NOTHING action for ON CONFLICT clause.
|
||||
|
||||
The ``constraint`` and ``index_elements`` arguments
|
||||
are optional, but only one of these can be specified.
|
||||
|
||||
:param constraint:
|
||||
The name of a unique or exclusion constraint on the table,
|
||||
or the constraint object itself if it has a .name attribute.
|
||||
|
||||
:param index_elements:
|
||||
A sequence consisting of string column names, :class:`.Column`
|
||||
objects, or other column expression objects that will be used
|
||||
to infer a target index.
|
||||
|
||||
:param index_where:
|
||||
Additional WHERE criterion that can be used to infer a
|
||||
conditional target index.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_insert_on_conflict`
|
||||
|
||||
"""
|
||||
self._post_values_clause = OnConflictDoNothing(
|
||||
constraint, index_elements, index_where
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
insert = public_factory(Insert, ".dialects.postgresql.insert")
|
||||
|
||||
|
||||
class OnConflictClause(ClauseElement):
|
||||
def __init__(self, constraint=None, index_elements=None, index_where=None):
|
||||
|
||||
if constraint is not None:
|
||||
if not isinstance(constraint, util.string_types) and isinstance(
|
||||
constraint,
|
||||
(schema.Index, schema.Constraint, ext.ExcludeConstraint),
|
||||
):
|
||||
constraint = getattr(constraint, "name") or constraint
|
||||
|
||||
if constraint is not None:
|
||||
if index_elements is not None:
|
||||
raise ValueError(
|
||||
"'constraint' and 'index_elements' are mutually exclusive"
|
||||
)
|
||||
|
||||
if isinstance(constraint, util.string_types):
|
||||
self.constraint_target = constraint
|
||||
self.inferred_target_elements = None
|
||||
self.inferred_target_whereclause = None
|
||||
elif isinstance(constraint, schema.Index):
|
||||
index_elements = constraint.expressions
|
||||
index_where = constraint.dialect_options["postgresql"].get(
|
||||
"where"
|
||||
)
|
||||
elif isinstance(constraint, ext.ExcludeConstraint):
|
||||
index_elements = constraint.columns
|
||||
index_where = constraint.where
|
||||
else:
|
||||
index_elements = constraint.columns
|
||||
index_where = constraint.dialect_options["postgresql"].get(
|
||||
"where"
|
||||
)
|
||||
|
||||
if index_elements is not None:
|
||||
self.constraint_target = None
|
||||
self.inferred_target_elements = index_elements
|
||||
self.inferred_target_whereclause = index_where
|
||||
elif constraint is None:
|
||||
self.constraint_target = (
|
||||
self.inferred_target_elements
|
||||
) = self.inferred_target_whereclause = None
|
||||
|
||||
|
||||
class OnConflictDoNothing(OnConflictClause):
|
||||
__visit_name__ = "on_conflict_do_nothing"
|
||||
|
||||
|
||||
class OnConflictDoUpdate(OnConflictClause):
|
||||
__visit_name__ = "on_conflict_do_update"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
constraint=None,
|
||||
index_elements=None,
|
||||
index_where=None,
|
||||
set_=None,
|
||||
where=None,
|
||||
):
|
||||
super(OnConflictDoUpdate, self).__init__(
|
||||
constraint=constraint,
|
||||
index_elements=index_elements,
|
||||
index_where=index_where,
|
||||
)
|
||||
|
||||
if (
|
||||
self.inferred_target_elements is None
|
||||
and self.constraint_target is None
|
||||
):
|
||||
raise ValueError(
|
||||
"Either constraint or index_elements, "
|
||||
"but not both, must be specified unless DO NOTHING"
|
||||
)
|
||||
|
||||
if not isinstance(set_, dict) or not set_:
|
||||
raise ValueError("set parameter must be a non-empty dictionary")
|
||||
self.update_values_to_set = [
|
||||
(key, value) for key, value in set_.items()
|
||||
]
|
||||
self.update_whereclause = where
|
||||
224
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/ext.py
Normal file
224
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/ext.py
Normal file
@@ -0,0 +1,224 @@
|
||||
# postgresql/ext.py
|
||||
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .array import ARRAY
|
||||
from ...sql import elements
|
||||
from ...sql import expression
|
||||
from ...sql import functions
|
||||
from ...sql.schema import ColumnCollectionConstraint
|
||||
|
||||
|
||||
class aggregate_order_by(expression.ColumnElement):
|
||||
"""Represent a PostgreSQL aggregate order by expression.
|
||||
|
||||
E.g.::
|
||||
|
||||
from sqlalchemy.dialects.postgresql import aggregate_order_by
|
||||
expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc()))
|
||||
stmt = select([expr])
|
||||
|
||||
would represent the expression::
|
||||
|
||||
SELECT array_agg(a ORDER BY b DESC) FROM table;
|
||||
|
||||
Similarly::
|
||||
|
||||
expr = func.string_agg(
|
||||
table.c.a,
|
||||
aggregate_order_by(literal_column("','"), table.c.a)
|
||||
)
|
||||
stmt = select([expr])
|
||||
|
||||
Would represent::
|
||||
|
||||
SELECT string_agg(a, ',' ORDER BY a) FROM table;
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. versionchanged:: 1.2.13 - the ORDER BY argument may be multiple terms
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.array_agg`
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = "aggregate_order_by"
|
||||
|
||||
def __init__(self, target, *order_by):
|
||||
self.target = elements._literal_as_binds(target)
|
||||
|
||||
_lob = len(order_by)
|
||||
if _lob == 0:
|
||||
raise TypeError("at least one ORDER BY element is required")
|
||||
elif _lob == 1:
|
||||
self.order_by = elements._literal_as_binds(order_by[0])
|
||||
else:
|
||||
self.order_by = elements.ClauseList(
|
||||
*order_by, _literal_as_text=elements._literal_as_binds
|
||||
)
|
||||
|
||||
def self_group(self, against=None):
|
||||
return self
|
||||
|
||||
def get_children(self, **kwargs):
|
||||
return self.target, self.order_by
|
||||
|
||||
def _copy_internals(self, clone=elements._clone, **kw):
|
||||
self.target = clone(self.target, **kw)
|
||||
self.order_by = clone(self.order_by, **kw)
|
||||
|
||||
@property
|
||||
def _from_objects(self):
|
||||
return self.target._from_objects + self.order_by._from_objects
|
||||
|
||||
|
||||
class ExcludeConstraint(ColumnCollectionConstraint):
|
||||
"""A table-level EXCLUDE constraint.
|
||||
|
||||
Defines an EXCLUDE constraint as described in the `postgres
|
||||
documentation`__.
|
||||
|
||||
__ http://www.postgresql.org/docs/9.0/static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE
|
||||
|
||||
""" # noqa
|
||||
|
||||
__visit_name__ = "exclude_constraint"
|
||||
|
||||
where = None
|
||||
|
||||
@elements._document_text_coercion(
|
||||
"where",
|
||||
":class:`.ExcludeConstraint`",
|
||||
":paramref:`.ExcludeConstraint.where`",
|
||||
)
|
||||
def __init__(self, *elements, **kw):
|
||||
r"""
|
||||
Create an :class:`.ExcludeConstraint` object.
|
||||
|
||||
E.g.::
|
||||
|
||||
const = ExcludeConstraint(
|
||||
(Column('period'), '&&'),
|
||||
(Column('group'), '='),
|
||||
where=(Column('group') != 'some group')
|
||||
)
|
||||
|
||||
The constraint is normally embedded into the :class:`.Table` construct
|
||||
directly, or added later using :meth:`.append_constraint`::
|
||||
|
||||
some_table = Table(
|
||||
'some_table', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('period', TSRANGE()),
|
||||
Column('group', String)
|
||||
)
|
||||
|
||||
some_table.append_constraint(
|
||||
ExcludeConstraint(
|
||||
(some_table.c.period, '&&'),
|
||||
(some_table.c.group, '='),
|
||||
where=some_table.c.group != 'some group',
|
||||
name='some_table_excl_const'
|
||||
)
|
||||
)
|
||||
|
||||
:param \*elements:
|
||||
|
||||
A sequence of two tuples of the form ``(column, operator)`` where
|
||||
"column" is a SQL expression element or a raw SQL string, most
|
||||
typically a :class:`.Column` object, and "operator" is a string
|
||||
containing the operator to use. In order to specify a column name
|
||||
when a :class:`.Column` object is not available, while ensuring
|
||||
that any necessary quoting rules take effect, an ad-hoc
|
||||
:class:`.Column` or :func:`.sql.expression.column` object should be
|
||||
used.
|
||||
|
||||
:param name:
|
||||
Optional, the in-database name of this constraint.
|
||||
|
||||
:param deferrable:
|
||||
Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
|
||||
issuing DDL for this constraint.
|
||||
|
||||
:param initially:
|
||||
Optional string. If set, emit INITIALLY <value> when issuing DDL
|
||||
for this constraint.
|
||||
|
||||
:param using:
|
||||
Optional string. If set, emit USING <index_method> when issuing DDL
|
||||
for this constraint. Defaults to 'gist'.
|
||||
|
||||
:param where:
|
||||
Optional SQL expression construct or literal SQL string.
|
||||
If set, emit WHERE <predicate> when issuing DDL
|
||||
for this constraint.
|
||||
|
||||
"""
|
||||
columns = []
|
||||
render_exprs = []
|
||||
self.operators = {}
|
||||
|
||||
expressions, operators = zip(*elements)
|
||||
|
||||
for (expr, column, strname, add_element), operator in zip(
|
||||
self._extract_col_expression_collection(expressions), operators
|
||||
):
|
||||
if add_element is not None:
|
||||
columns.append(add_element)
|
||||
|
||||
name = column.name if column is not None else strname
|
||||
|
||||
if name is not None:
|
||||
# backwards compat
|
||||
self.operators[name] = operator
|
||||
|
||||
expr = expression._literal_as_column(expr)
|
||||
|
||||
render_exprs.append((expr, name, operator))
|
||||
|
||||
self._render_exprs = render_exprs
|
||||
|
||||
ColumnCollectionConstraint.__init__(
|
||||
self,
|
||||
*columns,
|
||||
name=kw.get("name"),
|
||||
deferrable=kw.get("deferrable"),
|
||||
initially=kw.get("initially")
|
||||
)
|
||||
self.using = kw.get("using", "gist")
|
||||
where = kw.get("where")
|
||||
if where is not None:
|
||||
self.where = expression._literal_as_text(
|
||||
where, allow_coercion_to_text=True
|
||||
)
|
||||
|
||||
def copy(self, **kw):
|
||||
elements = [(col, self.operators[col]) for col in self.columns.keys()]
|
||||
c = self.__class__(
|
||||
*elements,
|
||||
name=self.name,
|
||||
deferrable=self.deferrable,
|
||||
initially=self.initially,
|
||||
where=self.where,
|
||||
using=self.using
|
||||
)
|
||||
c.dispatch._update(self.dispatch)
|
||||
return c
|
||||
|
||||
|
||||
def array_agg(*arg, **kw):
|
||||
"""PostgreSQL-specific form of :class:`.array_agg`, ensures
|
||||
return type is :class:`.postgresql.ARRAY` and not
|
||||
the plain :class:`.types.ARRAY`, unless an explicit ``type_``
|
||||
is passed.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
kw["_default_array_type"] = ARRAY
|
||||
return functions.func.array_agg(*arg, **kw)
|
||||
450
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/hstore.py
Normal file
450
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/hstore.py
Normal file
@@ -0,0 +1,450 @@
|
||||
# postgresql/hstore.py
|
||||
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import re
|
||||
|
||||
from .array import ARRAY
|
||||
from .base import ischema_names
|
||||
from ... import types as sqltypes
|
||||
from ... import util
|
||||
from ...sql import functions as sqlfunc
|
||||
from ...sql import operators
|
||||
|
||||
|
||||
__all__ = ("HSTORE", "hstore")
|
||||
|
||||
idx_precedence = operators._PRECEDENCE[operators.json_getitem_op]
|
||||
|
||||
GETITEM = operators.custom_op(
|
||||
"->",
|
||||
precedence=idx_precedence,
|
||||
natural_self_precedent=True,
|
||||
eager_grouping=True,
|
||||
)
|
||||
|
||||
HAS_KEY = operators.custom_op(
|
||||
"?",
|
||||
precedence=idx_precedence,
|
||||
natural_self_precedent=True,
|
||||
eager_grouping=True,
|
||||
)
|
||||
|
||||
HAS_ALL = operators.custom_op(
|
||||
"?&",
|
||||
precedence=idx_precedence,
|
||||
natural_self_precedent=True,
|
||||
eager_grouping=True,
|
||||
)
|
||||
|
||||
HAS_ANY = operators.custom_op(
|
||||
"?|",
|
||||
precedence=idx_precedence,
|
||||
natural_self_precedent=True,
|
||||
eager_grouping=True,
|
||||
)
|
||||
|
||||
CONTAINS = operators.custom_op(
|
||||
"@>",
|
||||
precedence=idx_precedence,
|
||||
natural_self_precedent=True,
|
||||
eager_grouping=True,
|
||||
)
|
||||
|
||||
CONTAINED_BY = operators.custom_op(
|
||||
"<@",
|
||||
precedence=idx_precedence,
|
||||
natural_self_precedent=True,
|
||||
eager_grouping=True,
|
||||
)
|
||||
|
||||
|
||||
class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL HSTORE type.
|
||||
|
||||
The :class:`.HSTORE` type stores dictionaries containing strings, e.g.::
|
||||
|
||||
data_table = Table('data_table', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('data', HSTORE)
|
||||
)
|
||||
|
||||
with engine.connect() as conn:
|
||||
conn.execute(
|
||||
data_table.insert(),
|
||||
data = {"key1": "value1", "key2": "value2"}
|
||||
)
|
||||
|
||||
:class:`.HSTORE` provides for a wide range of operations, including:
|
||||
|
||||
* Index operations::
|
||||
|
||||
data_table.c.data['some key'] == 'some value'
|
||||
|
||||
* Containment operations::
|
||||
|
||||
data_table.c.data.has_key('some key')
|
||||
|
||||
data_table.c.data.has_all(['one', 'two', 'three'])
|
||||
|
||||
* Concatenation::
|
||||
|
||||
data_table.c.data + {"k1": "v1"}
|
||||
|
||||
For a full list of special methods see
|
||||
:class:`.HSTORE.comparator_factory`.
|
||||
|
||||
For usage with the SQLAlchemy ORM, it may be desirable to combine
|
||||
the usage of :class:`.HSTORE` with :class:`.MutableDict` dictionary
|
||||
now part of the :mod:`sqlalchemy.ext.mutable`
|
||||
extension. This extension will allow "in-place" changes to the
|
||||
dictionary, e.g. addition of new keys or replacement/removal of existing
|
||||
keys to/from the current dictionary, to produce events which will be
|
||||
detected by the unit of work::
|
||||
|
||||
from sqlalchemy.ext.mutable import MutableDict
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'data_table'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
data = Column(MutableDict.as_mutable(HSTORE))
|
||||
|
||||
my_object = session.query(MyClass).one()
|
||||
|
||||
# in-place mutation, requires Mutable extension
|
||||
# in order for the ORM to detect
|
||||
my_object.data['some_key'] = 'some value'
|
||||
|
||||
session.commit()
|
||||
|
||||
When the :mod:`sqlalchemy.ext.mutable` extension is not used, the ORM
|
||||
will not be alerted to any changes to the contents of an existing
|
||||
dictionary, unless that dictionary value is re-assigned to the
|
||||
HSTORE-attribute itself, thus generating a change event.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.hstore` - render the PostgreSQL ``hstore()`` function.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = "HSTORE"
|
||||
hashable = False
|
||||
text_type = sqltypes.Text()
|
||||
|
||||
def __init__(self, text_type=None):
|
||||
"""Construct a new :class:`.HSTORE`.
|
||||
|
||||
:param text_type: the type that should be used for indexed values.
|
||||
Defaults to :class:`.types.Text`.
|
||||
|
||||
.. versionadded:: 1.1.0
|
||||
|
||||
"""
|
||||
if text_type is not None:
|
||||
self.text_type = text_type
|
||||
|
||||
class Comparator(
|
||||
sqltypes.Indexable.Comparator, sqltypes.Concatenable.Comparator
|
||||
):
|
||||
"""Define comparison operations for :class:`.HSTORE`."""
|
||||
|
||||
def has_key(self, other):
|
||||
"""Boolean expression. Test for presence of a key. Note that the
|
||||
key may be a SQLA expression.
|
||||
"""
|
||||
return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def has_all(self, other):
|
||||
"""Boolean expression. Test for presence of all keys in jsonb
|
||||
"""
|
||||
return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def has_any(self, other):
|
||||
"""Boolean expression. Test for presence of any key in jsonb
|
||||
"""
|
||||
return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contains(self, other, **kwargs):
|
||||
"""Boolean expression. Test if keys (or array) are a superset
|
||||
of/contained the keys of the argument jsonb expression.
|
||||
"""
|
||||
return self.operate(CONTAINS, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contained_by(self, other):
|
||||
"""Boolean expression. Test if keys are a proper subset of the
|
||||
keys of the argument jsonb expression.
|
||||
"""
|
||||
return self.operate(
|
||||
CONTAINED_BY, other, result_type=sqltypes.Boolean
|
||||
)
|
||||
|
||||
def _setup_getitem(self, index):
|
||||
return GETITEM, index, self.type.text_type
|
||||
|
||||
def defined(self, key):
|
||||
"""Boolean expression. Test for presence of a non-NULL value for
|
||||
the key. Note that the key may be a SQLA expression.
|
||||
"""
|
||||
return _HStoreDefinedFunction(self.expr, key)
|
||||
|
||||
def delete(self, key):
|
||||
"""HStore expression. Returns the contents of this hstore with the
|
||||
given key deleted. Note that the key may be a SQLA expression.
|
||||
"""
|
||||
if isinstance(key, dict):
|
||||
key = _serialize_hstore(key)
|
||||
return _HStoreDeleteFunction(self.expr, key)
|
||||
|
||||
def slice(self, array):
|
||||
"""HStore expression. Returns a subset of an hstore defined by
|
||||
array of keys.
|
||||
"""
|
||||
return _HStoreSliceFunction(self.expr, array)
|
||||
|
||||
def keys(self):
|
||||
"""Text array expression. Returns array of keys."""
|
||||
return _HStoreKeysFunction(self.expr)
|
||||
|
||||
def vals(self):
|
||||
"""Text array expression. Returns array of values."""
|
||||
return _HStoreValsFunction(self.expr)
|
||||
|
||||
def array(self):
|
||||
"""Text array expression. Returns array of alternating keys and
|
||||
values.
|
||||
"""
|
||||
return _HStoreArrayFunction(self.expr)
|
||||
|
||||
def matrix(self):
|
||||
"""Text array expression. Returns array of [key, value] pairs."""
|
||||
return _HStoreMatrixFunction(self.expr)
|
||||
|
||||
comparator_factory = Comparator
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if util.py2k:
|
||||
encoding = dialect.encoding
|
||||
|
||||
def process(value):
|
||||
if isinstance(value, dict):
|
||||
return _serialize_hstore(value).encode(encoding)
|
||||
else:
|
||||
return value
|
||||
|
||||
else:
|
||||
|
||||
def process(value):
|
||||
if isinstance(value, dict):
|
||||
return _serialize_hstore(value)
|
||||
else:
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if util.py2k:
|
||||
encoding = dialect.encoding
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return _parse_hstore(value.decode(encoding))
|
||||
else:
|
||||
return value
|
||||
|
||||
else:
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return _parse_hstore(value)
|
||||
else:
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
|
||||
ischema_names["hstore"] = HSTORE
|
||||
|
||||
|
||||
class hstore(sqlfunc.GenericFunction):
|
||||
"""Construct an hstore value within a SQL expression using the
|
||||
PostgreSQL ``hstore()`` function.
|
||||
|
||||
The :class:`.hstore` function accepts one or two arguments as described
|
||||
in the PostgreSQL documentation.
|
||||
|
||||
E.g.::
|
||||
|
||||
from sqlalchemy.dialects.postgresql import array, hstore
|
||||
|
||||
select([hstore('key1', 'value1')])
|
||||
|
||||
select([
|
||||
hstore(
|
||||
array(['key1', 'key2', 'key3']),
|
||||
array(['value1', 'value2', 'value3'])
|
||||
)
|
||||
])
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.HSTORE` - the PostgreSQL ``HSTORE`` datatype.
|
||||
|
||||
"""
|
||||
|
||||
type = HSTORE
|
||||
name = "hstore"
|
||||
|
||||
|
||||
class _HStoreDefinedFunction(sqlfunc.GenericFunction):
|
||||
type = sqltypes.Boolean
|
||||
name = "defined"
|
||||
|
||||
|
||||
class _HStoreDeleteFunction(sqlfunc.GenericFunction):
|
||||
type = HSTORE
|
||||
name = "delete"
|
||||
|
||||
|
||||
class _HStoreSliceFunction(sqlfunc.GenericFunction):
|
||||
type = HSTORE
|
||||
name = "slice"
|
||||
|
||||
|
||||
class _HStoreKeysFunction(sqlfunc.GenericFunction):
|
||||
type = ARRAY(sqltypes.Text)
|
||||
name = "akeys"
|
||||
|
||||
|
||||
class _HStoreValsFunction(sqlfunc.GenericFunction):
|
||||
type = ARRAY(sqltypes.Text)
|
||||
name = "avals"
|
||||
|
||||
|
||||
class _HStoreArrayFunction(sqlfunc.GenericFunction):
|
||||
type = ARRAY(sqltypes.Text)
|
||||
name = "hstore_to_array"
|
||||
|
||||
|
||||
class _HStoreMatrixFunction(sqlfunc.GenericFunction):
|
||||
type = ARRAY(sqltypes.Text)
|
||||
name = "hstore_to_matrix"
|
||||
|
||||
|
||||
#
|
||||
# parsing. note that none of this is used with the psycopg2 backend,
|
||||
# which provides its own native extensions.
|
||||
#
|
||||
|
||||
# My best guess at the parsing rules of hstore literals, since no formal
|
||||
# grammar is given. This is mostly reverse engineered from PG's input parser
|
||||
# behavior.
|
||||
HSTORE_PAIR_RE = re.compile(
|
||||
r"""
|
||||
(
|
||||
"(?P<key> (\\ . | [^"])* )" # Quoted key
|
||||
)
|
||||
[ ]* => [ ]* # Pair operator, optional adjoining whitespace
|
||||
(
|
||||
(?P<value_null> NULL ) # NULL value
|
||||
| "(?P<value> (\\ . | [^"])* )" # Quoted value
|
||||
)
|
||||
""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
|
||||
HSTORE_DELIMITER_RE = re.compile(
|
||||
r"""
|
||||
[ ]* , [ ]*
|
||||
""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
|
||||
|
||||
def _parse_error(hstore_str, pos):
|
||||
"""format an unmarshalling error."""
|
||||
|
||||
ctx = 20
|
||||
hslen = len(hstore_str)
|
||||
|
||||
parsed_tail = hstore_str[max(pos - ctx - 1, 0) : min(pos, hslen)]
|
||||
residual = hstore_str[min(pos, hslen) : min(pos + ctx + 1, hslen)]
|
||||
|
||||
if len(parsed_tail) > ctx:
|
||||
parsed_tail = "[...]" + parsed_tail[1:]
|
||||
if len(residual) > ctx:
|
||||
residual = residual[:-1] + "[...]"
|
||||
|
||||
return "After %r, could not parse residual at position %d: %r" % (
|
||||
parsed_tail,
|
||||
pos,
|
||||
residual,
|
||||
)
|
||||
|
||||
|
||||
def _parse_hstore(hstore_str):
|
||||
"""Parse an hstore from its literal string representation.
|
||||
|
||||
Attempts to approximate PG's hstore input parsing rules as closely as
|
||||
possible. Although currently this is not strictly necessary, since the
|
||||
current implementation of hstore's output syntax is stricter than what it
|
||||
accepts as input, the documentation makes no guarantees that will always
|
||||
be the case.
|
||||
|
||||
|
||||
|
||||
"""
|
||||
result = {}
|
||||
pos = 0
|
||||
pair_match = HSTORE_PAIR_RE.match(hstore_str)
|
||||
|
||||
while pair_match is not None:
|
||||
key = pair_match.group("key").replace(r"\"", '"').replace("\\\\", "\\")
|
||||
if pair_match.group("value_null"):
|
||||
value = None
|
||||
else:
|
||||
value = (
|
||||
pair_match.group("value")
|
||||
.replace(r"\"", '"')
|
||||
.replace("\\\\", "\\")
|
||||
)
|
||||
result[key] = value
|
||||
|
||||
pos += pair_match.end()
|
||||
|
||||
delim_match = HSTORE_DELIMITER_RE.match(hstore_str[pos:])
|
||||
if delim_match is not None:
|
||||
pos += delim_match.end()
|
||||
|
||||
pair_match = HSTORE_PAIR_RE.match(hstore_str[pos:])
|
||||
|
||||
if pos != len(hstore_str):
|
||||
raise ValueError(_parse_error(hstore_str, pos))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _serialize_hstore(val):
|
||||
"""Serialize a dictionary into an hstore literal. Keys and values must
|
||||
both be strings (except None for values).
|
||||
|
||||
"""
|
||||
|
||||
def esc(s, position):
|
||||
if position == "value" and s is None:
|
||||
return "NULL"
|
||||
elif isinstance(s, util.string_types):
|
||||
return '"%s"' % s.replace("\\", "\\\\").replace('"', r"\"")
|
||||
else:
|
||||
raise ValueError(
|
||||
"%r in %s position is not a string." % (s, position)
|
||||
)
|
||||
|
||||
return ", ".join(
|
||||
"%s=>%s" % (esc(k, "key"), esc(v, "value")) for k, v in val.items()
|
||||
)
|
||||
318
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/json.py
Normal file
318
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/json.py
Normal file
@@ -0,0 +1,318 @@
|
||||
# postgresql/json.py
|
||||
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .base import colspecs
|
||||
from .base import ischema_names
|
||||
from ... import types as sqltypes
|
||||
from ... import util
|
||||
from ...sql import operators
|
||||
|
||||
|
||||
__all__ = ("JSON", "JSONB")
|
||||
|
||||
idx_precedence = operators._PRECEDENCE[operators.json_getitem_op]
|
||||
|
||||
ASTEXT = operators.custom_op(
|
||||
"->>",
|
||||
precedence=idx_precedence,
|
||||
natural_self_precedent=True,
|
||||
eager_grouping=True,
|
||||
)
|
||||
|
||||
JSONPATH_ASTEXT = operators.custom_op(
|
||||
"#>>",
|
||||
precedence=idx_precedence,
|
||||
natural_self_precedent=True,
|
||||
eager_grouping=True,
|
||||
)
|
||||
|
||||
|
||||
HAS_KEY = operators.custom_op(
|
||||
"?",
|
||||
precedence=idx_precedence,
|
||||
natural_self_precedent=True,
|
||||
eager_grouping=True,
|
||||
)
|
||||
|
||||
HAS_ALL = operators.custom_op(
|
||||
"?&",
|
||||
precedence=idx_precedence,
|
||||
natural_self_precedent=True,
|
||||
eager_grouping=True,
|
||||
)
|
||||
|
||||
HAS_ANY = operators.custom_op(
|
||||
"?|",
|
||||
precedence=idx_precedence,
|
||||
natural_self_precedent=True,
|
||||
eager_grouping=True,
|
||||
)
|
||||
|
||||
CONTAINS = operators.custom_op(
|
||||
"@>",
|
||||
precedence=idx_precedence,
|
||||
natural_self_precedent=True,
|
||||
eager_grouping=True,
|
||||
)
|
||||
|
||||
CONTAINED_BY = operators.custom_op(
|
||||
"<@",
|
||||
precedence=idx_precedence,
|
||||
natural_self_precedent=True,
|
||||
eager_grouping=True,
|
||||
)
|
||||
|
||||
|
||||
class JSONPathType(sqltypes.JSON.JSONPathType):
|
||||
def bind_processor(self, dialect):
|
||||
super_proc = self.string_bind_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
assert isinstance(value, util.collections_abc.Sequence)
|
||||
tokens = [util.text_type(elem) for elem in value]
|
||||
value = "{%s}" % (", ".join(tokens))
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def literal_processor(self, dialect):
|
||||
super_proc = self.string_literal_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
assert isinstance(value, util.collections_abc.Sequence)
|
||||
tokens = [util.text_type(elem) for elem in value]
|
||||
value = "{%s}" % (", ".join(tokens))
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
|
||||
colspecs[sqltypes.JSON.JSONPathType] = JSONPathType
|
||||
|
||||
|
||||
class JSON(sqltypes.JSON):
|
||||
"""Represent the PostgreSQL JSON type.
|
||||
|
||||
This type is a specialization of the Core-level :class:`.types.JSON`
|
||||
type. Be sure to read the documentation for :class:`.types.JSON` for
|
||||
important tips regarding treatment of NULL values and ORM use.
|
||||
|
||||
.. versionchanged:: 1.1 :class:`.postgresql.JSON` is now a PostgreSQL-
|
||||
specific specialization of the new :class:`.types.JSON` type.
|
||||
|
||||
The operators provided by the PostgreSQL version of :class:`.JSON`
|
||||
include:
|
||||
|
||||
* Index operations (the ``->`` operator)::
|
||||
|
||||
data_table.c.data['some key']
|
||||
|
||||
data_table.c.data[5]
|
||||
|
||||
|
||||
* Index operations returning text (the ``->>`` operator)::
|
||||
|
||||
data_table.c.data['some key'].astext == 'some value'
|
||||
|
||||
* Index operations with CAST
|
||||
(equivalent to ``CAST(col ->> ['some key'] AS <type>)``)::
|
||||
|
||||
data_table.c.data['some key'].astext.cast(Integer) == 5
|
||||
|
||||
* Path index operations (the ``#>`` operator)::
|
||||
|
||||
data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')]
|
||||
|
||||
* Path index operations returning text (the ``#>>`` operator)::
|
||||
|
||||
data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')].astext == 'some value'
|
||||
|
||||
.. versionchanged:: 1.1 The :meth:`.ColumnElement.cast` operator on
|
||||
JSON objects now requires that the :attr:`.JSON.Comparator.astext`
|
||||
modifier be called explicitly, if the cast works only from a textual
|
||||
string.
|
||||
|
||||
Index operations return an expression object whose type defaults to
|
||||
:class:`.JSON` by default, so that further JSON-oriented instructions
|
||||
may be called upon the result type.
|
||||
|
||||
Custom serializers and deserializers are specified at the dialect level,
|
||||
that is using :func:`.create_engine`. The reason for this is that when
|
||||
using psycopg2, the DBAPI only allows serializers at the per-cursor
|
||||
or per-connection level. E.g.::
|
||||
|
||||
engine = create_engine("postgresql://scott:tiger@localhost/test",
|
||||
json_serializer=my_serialize_fn,
|
||||
json_deserializer=my_deserialize_fn
|
||||
)
|
||||
|
||||
When using the psycopg2 dialect, the json_deserializer is registered
|
||||
against the database using ``psycopg2.extras.register_default_json``.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.types.JSON` - Core level JSON type
|
||||
|
||||
:class:`.JSONB`
|
||||
|
||||
""" # noqa
|
||||
|
||||
astext_type = sqltypes.Text()
|
||||
|
||||
def __init__(self, none_as_null=False, astext_type=None):
|
||||
"""Construct a :class:`.JSON` type.
|
||||
|
||||
:param none_as_null: if True, persist the value ``None`` as a
|
||||
SQL NULL value, not the JSON encoding of ``null``. Note that
|
||||
when this flag is False, the :func:`.null` construct can still
|
||||
be used to persist a NULL value::
|
||||
|
||||
from sqlalchemy import null
|
||||
conn.execute(table.insert(), data=null())
|
||||
|
||||
.. versionchanged:: 0.9.8 - Added ``none_as_null``, and :func:`.null`
|
||||
is now supported in order to persist a NULL value.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.JSON.NULL`
|
||||
|
||||
:param astext_type: the type to use for the
|
||||
:attr:`.JSON.Comparator.astext`
|
||||
accessor on indexed attributes. Defaults to :class:`.types.Text`.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
super(JSON, self).__init__(none_as_null=none_as_null)
|
||||
if astext_type is not None:
|
||||
self.astext_type = astext_type
|
||||
|
||||
class Comparator(sqltypes.JSON.Comparator):
|
||||
"""Define comparison operations for :class:`.JSON`."""
|
||||
|
||||
@property
|
||||
def astext(self):
|
||||
"""On an indexed expression, use the "astext" (e.g. "->>")
|
||||
conversion when rendered in SQL.
|
||||
|
||||
E.g.::
|
||||
|
||||
select([data_table.c.data['some key'].astext])
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.ColumnElement.cast`
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(self.expr.right.type, sqltypes.JSON.JSONPathType):
|
||||
return self.expr.left.operate(
|
||||
JSONPATH_ASTEXT,
|
||||
self.expr.right,
|
||||
result_type=self.type.astext_type,
|
||||
)
|
||||
else:
|
||||
return self.expr.left.operate(
|
||||
ASTEXT, self.expr.right, result_type=self.type.astext_type
|
||||
)
|
||||
|
||||
comparator_factory = Comparator
|
||||
|
||||
|
||||
colspecs[sqltypes.JSON] = JSON
|
||||
ischema_names["json"] = JSON
|
||||
|
||||
|
||||
class JSONB(JSON):
|
||||
"""Represent the PostgreSQL JSONB type.
|
||||
|
||||
The :class:`.JSONB` type stores arbitrary JSONB format data, e.g.::
|
||||
|
||||
data_table = Table('data_table', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('data', JSONB)
|
||||
)
|
||||
|
||||
with engine.connect() as conn:
|
||||
conn.execute(
|
||||
data_table.insert(),
|
||||
data = {"key1": "value1", "key2": "value2"}
|
||||
)
|
||||
|
||||
The :class:`.JSONB` type includes all operations provided by
|
||||
:class:`.JSON`, including the same behaviors for indexing operations.
|
||||
It also adds additional operators specific to JSONB, including
|
||||
:meth:`.JSONB.Comparator.has_key`, :meth:`.JSONB.Comparator.has_all`,
|
||||
:meth:`.JSONB.Comparator.has_any`, :meth:`.JSONB.Comparator.contains`,
|
||||
and :meth:`.JSONB.Comparator.contained_by`.
|
||||
|
||||
Like the :class:`.JSON` type, the :class:`.JSONB` type does not detect
|
||||
in-place changes when used with the ORM, unless the
|
||||
:mod:`sqlalchemy.ext.mutable` extension is used.
|
||||
|
||||
Custom serializers and deserializers
|
||||
are shared with the :class:`.JSON` class, using the ``json_serializer``
|
||||
and ``json_deserializer`` keyword arguments. These must be specified
|
||||
at the dialect level using :func:`.create_engine`. When using
|
||||
psycopg2, the serializers are associated with the jsonb type using
|
||||
``psycopg2.extras.register_default_jsonb`` on a per-connection basis,
|
||||
in the same way that ``psycopg2.extras.register_default_json`` is used
|
||||
to register these handlers with the json type.
|
||||
|
||||
.. versionadded:: 0.9.7
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.JSON`
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = "JSONB"
|
||||
|
||||
class Comparator(JSON.Comparator):
|
||||
"""Define comparison operations for :class:`.JSON`."""
|
||||
|
||||
def has_key(self, other):
|
||||
"""Boolean expression. Test for presence of a key. Note that the
|
||||
key may be a SQLA expression.
|
||||
"""
|
||||
return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def has_all(self, other):
|
||||
"""Boolean expression. Test for presence of all keys in jsonb
|
||||
"""
|
||||
return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def has_any(self, other):
|
||||
"""Boolean expression. Test for presence of any key in jsonb
|
||||
"""
|
||||
return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contains(self, other, **kwargs):
|
||||
"""Boolean expression. Test if keys (or array) are a superset
|
||||
of/contained the keys of the argument jsonb expression.
|
||||
"""
|
||||
return self.operate(CONTAINS, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contained_by(self, other):
|
||||
"""Boolean expression. Test if keys are a proper subset of the
|
||||
keys of the argument jsonb expression.
|
||||
"""
|
||||
return self.operate(
|
||||
CONTAINED_BY, other, result_type=sqltypes.Boolean
|
||||
)
|
||||
|
||||
comparator_factory = Comparator
|
||||
|
||||
|
||||
ischema_names["jsonb"] = JSONB
|
||||
332
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/pg8000.py
Normal file
332
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/pg8000.py
Normal file
@@ -0,0 +1,332 @@
|
||||
# postgresql/pg8000.py
|
||||
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors <see AUTHORS
|
||||
# file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
r"""
|
||||
.. dialect:: postgresql+pg8000
|
||||
:name: pg8000
|
||||
:dbapi: pg8000
|
||||
:connectstring: postgresql+pg8000://user:password@host:port/dbname[?key=value&key=value...]
|
||||
:url: https://pythonhosted.org/pg8000/
|
||||
|
||||
.. note::
|
||||
|
||||
The pg8000 dialect is **not tested as part of SQLAlchemy's continuous
|
||||
integration** and may have unresolved issues. The recommended PostgreSQL
|
||||
dialect is psycopg2.
|
||||
|
||||
.. _pg8000_unicode:
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
pg8000 will encode / decode string values between it and the server using the
|
||||
PostgreSQL ``client_encoding`` parameter; by default this is the value in
|
||||
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
|
||||
Typically, this can be changed to ``utf-8``, as a more useful default::
|
||||
|
||||
#client_encoding = sql_ascii # actually, defaults to database
|
||||
# encoding
|
||||
client_encoding = utf8
|
||||
|
||||
The ``client_encoding`` can be overridden for a session by executing the SQL:
|
||||
|
||||
SET CLIENT_ENCODING TO 'utf8';
|
||||
|
||||
SQLAlchemy will execute this SQL on all new connections based on the value
|
||||
passed to :func:`.create_engine` using the ``client_encoding`` parameter::
|
||||
|
||||
engine = create_engine(
|
||||
"postgresql+pg8000://user:pass@host/dbname", client_encoding='utf8')
|
||||
|
||||
|
||||
.. _pg8000_isolation_level:
|
||||
|
||||
pg8000 Transaction Isolation Level
|
||||
-------------------------------------
|
||||
|
||||
The pg8000 dialect offers the same isolation level settings as that
|
||||
of the :ref:`psycopg2 <psycopg2_isolation_level>` dialect:
|
||||
|
||||
* ``READ COMMITTED``
|
||||
* ``READ UNCOMMITTED``
|
||||
* ``REPEATABLE READ``
|
||||
* ``SERIALIZABLE``
|
||||
* ``AUTOCOMMIT``
|
||||
|
||||
.. versionadded:: 0.9.5 support for AUTOCOMMIT isolation level when using
|
||||
pg8000.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_isolation_level`
|
||||
|
||||
:ref:`psycopg2_isolation_level`
|
||||
|
||||
|
||||
""" # noqa
|
||||
import decimal
|
||||
import re
|
||||
|
||||
from .base import _DECIMAL_TYPES
|
||||
from .base import _FLOAT_TYPES
|
||||
from .base import _INT_TYPES
|
||||
from .base import PGCompiler
|
||||
from .base import PGDialect
|
||||
from .base import PGExecutionContext
|
||||
from .base import PGIdentifierPreparer
|
||||
from .base import UUID
|
||||
from .json import JSON
|
||||
from ... import exc
|
||||
from ... import processors
|
||||
from ... import types as sqltypes
|
||||
from ... import util
|
||||
from ...sql.elements import quoted_name
|
||||
|
||||
|
||||
try:
|
||||
from uuid import UUID as _python_UUID # noqa
|
||||
except ImportError:
|
||||
_python_UUID = None
|
||||
|
||||
|
||||
class _PGNumeric(sqltypes.Numeric):
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.asdecimal:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
return processors.to_decimal_processor_factory(
|
||||
decimal.Decimal, self._effective_decimal_return_scale
|
||||
)
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
# pg8000 returns Decimal natively for 1700
|
||||
return None
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype
|
||||
)
|
||||
else:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
# pg8000 returns float natively for 701
|
||||
return None
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
return processors.to_float
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype
|
||||
)
|
||||
|
||||
|
||||
class _PGNumericNoBind(_PGNumeric):
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
|
||||
class _PGJSON(JSON):
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._dbapi_version > (1, 10, 1):
|
||||
return None # Has native JSON
|
||||
else:
|
||||
return super(_PGJSON, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGUUID(UUID):
|
||||
def bind_processor(self, dialect):
|
||||
if not self.as_uuid:
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = _python_UUID(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not self.as_uuid:
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = str(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class PGExecutionContext_pg8000(PGExecutionContext):
|
||||
pass
|
||||
|
||||
|
||||
class PGCompiler_pg8000(PGCompiler):
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return (
|
||||
self.process(binary.left, **kw)
|
||||
+ " %% "
|
||||
+ self.process(binary.right, **kw)
|
||||
)
|
||||
|
||||
def post_process_text(self, text):
|
||||
if "%%" in text:
|
||||
util.warn(
|
||||
"The SQLAlchemy postgresql dialect "
|
||||
"now automatically escapes '%' in text() "
|
||||
"expressions to '%%'."
|
||||
)
|
||||
return text.replace("%", "%%")
|
||||
|
||||
|
||||
class PGIdentifierPreparer_pg8000(PGIdentifierPreparer):
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
return value.replace("%", "%%")
|
||||
|
||||
|
||||
class PGDialect_pg8000(PGDialect):
|
||||
driver = "pg8000"
|
||||
|
||||
supports_unicode_statements = True
|
||||
|
||||
supports_unicode_binds = True
|
||||
|
||||
default_paramstyle = "format"
|
||||
supports_sane_multi_rowcount = True
|
||||
execution_ctx_cls = PGExecutionContext_pg8000
|
||||
statement_compiler = PGCompiler_pg8000
|
||||
preparer = PGIdentifierPreparer_pg8000
|
||||
description_encoding = "use_encoding"
|
||||
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _PGNumericNoBind,
|
||||
sqltypes.Float: _PGNumeric,
|
||||
JSON: _PGJSON,
|
||||
sqltypes.JSON: _PGJSON,
|
||||
UUID: _PGUUID,
|
||||
},
|
||||
)
|
||||
|
||||
def __init__(self, client_encoding=None, **kwargs):
|
||||
PGDialect.__init__(self, **kwargs)
|
||||
self.client_encoding = client_encoding
|
||||
|
||||
def initialize(self, connection):
|
||||
self.supports_sane_multi_rowcount = self._dbapi_version >= (1, 9, 14)
|
||||
super(PGDialect_pg8000, self).initialize(connection)
|
||||
|
||||
@util.memoized_property
|
||||
def _dbapi_version(self):
|
||||
if self.dbapi and hasattr(self.dbapi, "__version__"):
|
||||
return tuple(
|
||||
[
|
||||
int(x)
|
||||
for x in re.findall(
|
||||
r"(\d+)(?:[-\.]?|$)", self.dbapi.__version__
|
||||
)
|
||||
]
|
||||
)
|
||||
else:
|
||||
return (99, 99, 99)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__("pg8000")
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username="user")
|
||||
if "port" in opts:
|
||||
opts["port"] = int(opts["port"])
|
||||
opts.update(url.query)
|
||||
return ([], opts)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
return "connection is closed" in str(e)
|
||||
|
||||
def set_isolation_level(self, connection, level):
|
||||
level = level.replace("_", " ")
|
||||
|
||||
# adjust for ConnectionFairy possibly being present
|
||||
if hasattr(connection, "connection"):
|
||||
connection = connection.connection
|
||||
|
||||
if level == "AUTOCOMMIT":
|
||||
connection.autocommit = True
|
||||
elif level in self._isolation_lookup:
|
||||
connection.autocommit = False
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
"SET SESSION CHARACTERISTICS AS TRANSACTION "
|
||||
"ISOLATION LEVEL %s" % level
|
||||
)
|
||||
cursor.execute("COMMIT")
|
||||
cursor.close()
|
||||
else:
|
||||
raise exc.ArgumentError(
|
||||
"Invalid value '%s' for isolation_level. "
|
||||
"Valid isolation levels for %s are %s or AUTOCOMMIT"
|
||||
% (level, self.name, ", ".join(self._isolation_lookup))
|
||||
)
|
||||
|
||||
def set_client_encoding(self, connection, client_encoding):
|
||||
# adjust for ConnectionFairy possibly being present
|
||||
if hasattr(connection, "connection"):
|
||||
connection = connection.connection
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("SET CLIENT_ENCODING TO '" + client_encoding + "'")
|
||||
cursor.execute("COMMIT")
|
||||
cursor.close()
|
||||
|
||||
def do_begin_twophase(self, connection, xid):
|
||||
connection.connection.tpc_begin((0, xid, ""))
|
||||
|
||||
def do_prepare_twophase(self, connection, xid):
|
||||
connection.connection.tpc_prepare()
|
||||
|
||||
def do_rollback_twophase(
|
||||
self, connection, xid, is_prepared=True, recover=False
|
||||
):
|
||||
connection.connection.tpc_rollback((0, xid, ""))
|
||||
|
||||
def do_commit_twophase(
|
||||
self, connection, xid, is_prepared=True, recover=False
|
||||
):
|
||||
connection.connection.tpc_commit((0, xid, ""))
|
||||
|
||||
def do_recover_twophase(self, connection):
|
||||
return [row[1] for row in connection.connection.tpc_recover()]
|
||||
|
||||
def on_connect(self):
|
||||
fns = []
|
||||
|
||||
def on_connect(conn):
|
||||
conn.py_types[quoted_name] = conn.py_types[util.text_type]
|
||||
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.client_encoding is not None:
|
||||
|
||||
def on_connect(conn):
|
||||
self.set_client_encoding(conn, self.client_encoding)
|
||||
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.isolation_level is not None:
|
||||
|
||||
def on_connect(conn):
|
||||
self.set_isolation_level(conn, self.isolation_level)
|
||||
|
||||
fns.append(on_connect)
|
||||
|
||||
if len(fns) > 0:
|
||||
|
||||
def on_connect(conn):
|
||||
for fn in fns:
|
||||
fn(conn)
|
||||
|
||||
return on_connect
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
dialect = PGDialect_pg8000
|
||||
@@ -0,0 +1,962 @@
|
||||
# postgresql/psycopg2.py
|
||||
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
r"""
|
||||
.. dialect:: postgresql+psycopg2
|
||||
:name: psycopg2
|
||||
:dbapi: psycopg2
|
||||
:connectstring: postgresql+psycopg2://user:password@host:port/dbname[?key=value&key=value...]
|
||||
:url: http://pypi.python.org/pypi/psycopg2/
|
||||
|
||||
psycopg2 Connect Arguments
|
||||
-----------------------------------
|
||||
|
||||
psycopg2-specific keyword arguments which are accepted by
|
||||
:func:`.create_engine()` are:
|
||||
|
||||
* ``server_side_cursors``: Enable the usage of "server side cursors" for SQL
|
||||
statements which support this feature. What this essentially means from a
|
||||
psycopg2 point of view is that the cursor is created using a name, e.g.
|
||||
``connection.cursor('some name')``, which has the effect that result rows
|
||||
are not immediately pre-fetched and buffered after statement execution, but
|
||||
are instead left on the server and only retrieved as needed. SQLAlchemy's
|
||||
:class:`~sqlalchemy.engine.ResultProxy` uses special row-buffering
|
||||
behavior when this feature is enabled, such that groups of 100 rows at a
|
||||
time are fetched over the wire to reduce conversational overhead.
|
||||
Note that the :paramref:`.Connection.execution_options.stream_results`
|
||||
execution option is a more targeted
|
||||
way of enabling this mode on a per-execution basis.
|
||||
|
||||
* ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode
|
||||
per connection. True by default.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_disable_native_unicode`
|
||||
|
||||
* ``isolation_level``: This option, available for all PostgreSQL dialects,
|
||||
includes the ``AUTOCOMMIT`` isolation level when using the psycopg2
|
||||
dialect.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_isolation_level`
|
||||
|
||||
* ``client_encoding``: sets the client encoding in a libpq-agnostic way,
|
||||
using psycopg2's ``set_client_encoding()`` method.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_unicode`
|
||||
|
||||
* ``executemany_mode``, ``executemany_batch_page_size``,
|
||||
``executemany_values_page_size``: Allows use of psycopg2
|
||||
extensions for optimizing "executemany"-stye queries. See the referenced
|
||||
section below for details.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_executemany_mode`
|
||||
|
||||
* ``use_batch_mode``: this is the previous setting used to affect "executemany"
|
||||
mode and is now deprecated.
|
||||
|
||||
|
||||
Unix Domain Connections
|
||||
------------------------
|
||||
|
||||
psycopg2 supports connecting via Unix domain connections. When the ``host``
|
||||
portion of the URL is omitted, SQLAlchemy passes ``None`` to psycopg2,
|
||||
which specifies Unix-domain communication rather than TCP/IP communication::
|
||||
|
||||
create_engine("postgresql+psycopg2://user:password@/dbname")
|
||||
|
||||
By default, the socket file used is to connect to a Unix-domain socket
|
||||
in ``/tmp``, or whatever socket directory was specified when PostgreSQL
|
||||
was built. This value can be overridden by passing a pathname to psycopg2,
|
||||
using ``host`` as an additional keyword argument::
|
||||
|
||||
create_engine("postgresql+psycopg2://user:password@/dbname?host=/var/lib/postgresql")
|
||||
|
||||
.. seealso::
|
||||
|
||||
`PQconnectdbParams \
|
||||
<http://www.postgresql.org/docs/9.1/static/libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS>`_
|
||||
|
||||
Empty DSN Connections / Environment Variable Connections
|
||||
---------------------------------------------------------
|
||||
|
||||
The psycopg2 DBAPI can connect to PostgreSQL by passing an empty DSN to the
|
||||
libpq client library, which by default indicates to connect to a localhost
|
||||
PostgreSQL database that is open for "trust" connections. This behavior can be
|
||||
further tailored using a particular set of environment variables which are
|
||||
prefixed with ``PG_...``, which are consumed by ``libpq`` to take the place of
|
||||
any or all elements of the connection string.
|
||||
|
||||
For this form, the URL can be passed without any elements other than the
|
||||
initial scheme::
|
||||
|
||||
engine = create_engine('postgresql+psycopg2://')
|
||||
|
||||
In the above form, a blank "dsn" string is passed to the ``psycopg2.connect()``
|
||||
function which in turn represents an empty DSN passed to libpq.
|
||||
|
||||
.. versionadded:: 1.3.2 support for parameter-less connections with psycopg2.
|
||||
|
||||
.. seealso::
|
||||
|
||||
`Environment Variables\
|
||||
<https://www.postgresql.org/docs/current/libpq-envars.html>`_ -
|
||||
PostgreSQL documentation on how to use ``PG_...``
|
||||
environment variables for connections.
|
||||
|
||||
.. _psycopg2_execution_options:
|
||||
|
||||
Per-Statement/Connection Execution Options
|
||||
-------------------------------------------
|
||||
|
||||
The following DBAPI-specific options are respected when used with
|
||||
:meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`,
|
||||
:meth:`.Query.execution_options`, in addition to those not specific to DBAPIs:
|
||||
|
||||
* ``isolation_level`` - Set the transaction isolation level for the lifespan
|
||||
of a :class:`.Connection` (can only be set on a connection, not a statement
|
||||
or query). See :ref:`psycopg2_isolation_level`.
|
||||
|
||||
* ``stream_results`` - Enable or disable usage of psycopg2 server side
|
||||
cursors - this feature makes use of "named" cursors in combination with
|
||||
special result handling methods so that result rows are not fully buffered.
|
||||
If ``None`` or not set, the ``server_side_cursors`` option of the
|
||||
:class:`.Engine` is used.
|
||||
|
||||
* ``max_row_buffer`` - when using ``stream_results``, an integer value that
|
||||
specifies the maximum number of rows to buffer at a time. This is
|
||||
interpreted by the :class:`.BufferedRowResultProxy`, and if omitted the
|
||||
buffer will grow to ultimately store 1000 rows at a time.
|
||||
|
||||
.. versionadded:: 1.0.6
|
||||
|
||||
.. _psycopg2_executemany_mode:
|
||||
|
||||
Psycopg2 Fast Execution Helpers
|
||||
-------------------------------
|
||||
|
||||
Modern versions of psycopg2 include a feature known as
|
||||
`Fast Execution Helpers \
|
||||
<http://initd.org/psycopg/docs/extras.html#fast-execution-helpers>`_, which
|
||||
have been shown in benchmarking to improve psycopg2's executemany()
|
||||
performance, primarily with INSERT statements, by multiple orders of magnitude.
|
||||
SQLAlchemy allows this extension to be used for all ``executemany()`` style
|
||||
calls invoked by an :class:`.Engine` when used with :ref:`multiple parameter
|
||||
sets <execute_multiple>`, which includes the use of this feature both by the
|
||||
Core as well as by the ORM for inserts of objects with non-autogenerated
|
||||
primary key values, by adding the ``executemany_mode`` flag to
|
||||
:func:`.create_engine`::
|
||||
|
||||
engine = create_engine(
|
||||
"postgresql+psycopg2://scott:tiger@host/dbname",
|
||||
executemany_mode='batch')
|
||||
|
||||
|
||||
.. versionchanged:: 1.3.7 - the ``use_batch_mode`` flag has been superseded
|
||||
by a new parameter ``executemany_mode`` which provides support both for
|
||||
psycopg2's ``execute_batch`` helper as well as the ``execute_values``
|
||||
helper.
|
||||
|
||||
Possible options for ``executemany_mode`` include:
|
||||
|
||||
* ``None`` - By default, psycopg2's extensions are not used, and the usual
|
||||
``cursor.executemany()`` method is used when invoking batches of statements.
|
||||
|
||||
* ``'batch'`` - Uses ``psycopg2.extras.execute_batch`` so that multiple copies
|
||||
of a SQL query, each one corresponding to a parameter set passed to
|
||||
``executemany()``, are joined into a single SQL string separated by a
|
||||
semicolon. This is the same behavior as was provided by the
|
||||
``use_batch_mode=True`` flag.
|
||||
|
||||
* ``'values'``- For Core :func:`.insert` constructs only (including those
|
||||
emitted by the ORM automatically), the ``psycopg2.extras.execute_values``
|
||||
extension is used so that multiple parameter sets are grouped into a single
|
||||
INSERT statement and joined together with multiple VALUES expressions. This
|
||||
method requires that the string text of the VALUES clause inside the
|
||||
INSERT statement is manipulated, so is only supported with a compiled
|
||||
:func:`.insert` construct where the format is predictable. For all other
|
||||
constructs, including plain textual INSERT statements not rendered by the
|
||||
SQLAlchemy expression language compiler, the
|
||||
``psycopg2.extras.execute_batch`` method is used. It is therefore important
|
||||
to note that **"values" mode implies that "batch" mode is also used for
|
||||
all statements for which "values" mode does not apply**.
|
||||
|
||||
For both strategies, the ``executemany_batch_page_size`` and
|
||||
``executemany_values_page_size`` arguments control how many parameter sets
|
||||
should be represented in each execution. Because "values" mode implies a
|
||||
fallback down to "batch" mode for non-INSERT statements, there are two
|
||||
independent page size arguments. For each, the default value of ``None`` means
|
||||
to use psycopg2's defaults, which at the time of this writing are quite low at
|
||||
100. For the ``execute_values`` method, a number as high as 10000 may prove
|
||||
to be performant, whereas for ``execute_batch``, as the number represents
|
||||
full statements repeated, a number closer to the default of 100 is likely
|
||||
more appropriate::
|
||||
|
||||
engine = create_engine(
|
||||
"postgresql+psycopg2://scott:tiger@host/dbname",
|
||||
executemany_mode='values',
|
||||
executemany_values_page_size=10000, executemany_batch_page_size=500)
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`execute_multiple` - General information on using the
|
||||
:class:`.Connection` object to execute statements in such a way as to make
|
||||
use of the DBAPI ``.executemany()`` method.
|
||||
|
||||
.. versionchanged:: 1.3.7 - Added support for
|
||||
``psycopg2.extras.execute_values``. The ``use_batch_mode`` flag is
|
||||
superseded by the ``executemany_mode`` flag.
|
||||
|
||||
|
||||
.. _psycopg2_unicode:
|
||||
|
||||
Unicode with Psycopg2
|
||||
----------------------
|
||||
|
||||
By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE``
|
||||
extension, such that the DBAPI receives and returns all strings as Python
|
||||
Unicode objects directly - SQLAlchemy passes these values through without
|
||||
change. Psycopg2 here will encode/decode string values based on the
|
||||
current "client encoding" setting; by default this is the value in
|
||||
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
|
||||
Typically, this can be changed to ``utf8``, as a more useful default::
|
||||
|
||||
# postgresql.conf file
|
||||
|
||||
# client_encoding = sql_ascii # actually, defaults to database
|
||||
# encoding
|
||||
client_encoding = utf8
|
||||
|
||||
A second way to affect the client encoding is to set it within Psycopg2
|
||||
locally. SQLAlchemy will call psycopg2's
|
||||
:meth:`psycopg2:connection.set_client_encoding` method
|
||||
on all new connections based on the value passed to
|
||||
:func:`.create_engine` using the ``client_encoding`` parameter::
|
||||
|
||||
# set_client_encoding() setting;
|
||||
# works for *all* PostgreSQL versions
|
||||
engine = create_engine("postgresql://user:pass@host/dbname",
|
||||
client_encoding='utf8')
|
||||
|
||||
This overrides the encoding specified in the PostgreSQL client configuration.
|
||||
When using the parameter in this way, the psycopg2 driver emits
|
||||
``SET client_encoding TO 'utf8'`` on the connection explicitly, and works
|
||||
in all PostgreSQL versions.
|
||||
|
||||
Note that the ``client_encoding`` setting as passed to :func:`.create_engine`
|
||||
is **not the same** as the more recently added ``client_encoding`` parameter
|
||||
now supported by libpq directly. This is enabled when ``client_encoding``
|
||||
is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed
|
||||
using the :paramref:`.create_engine.connect_args` parameter::
|
||||
|
||||
engine = create_engine(
|
||||
"postgresql://user:pass@host/dbname",
|
||||
connect_args={'client_encoding': 'utf8'})
|
||||
|
||||
# using the query string is equivalent
|
||||
engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8")
|
||||
|
||||
The above parameter was only added to libpq as of version 9.1 of PostgreSQL,
|
||||
so using the previous method is better for cross-version support.
|
||||
|
||||
.. _psycopg2_disable_native_unicode:
|
||||
|
||||
Disabling Native Unicode
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
SQLAlchemy can also be instructed to skip the usage of the psycopg2
|
||||
``UNICODE`` extension and to instead utilize its own unicode encode/decode
|
||||
services, which are normally reserved only for those DBAPIs that don't
|
||||
fully support unicode directly. Passing ``use_native_unicode=False`` to
|
||||
:func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``.
|
||||
SQLAlchemy will instead encode data itself into Python bytestrings on the way
|
||||
in and coerce from bytes on the way back,
|
||||
using the value of the :func:`.create_engine` ``encoding`` parameter, which
|
||||
defaults to ``utf-8``.
|
||||
SQLAlchemy's own unicode encode/decode functionality is steadily becoming
|
||||
obsolete as most DBAPIs now support unicode fully.
|
||||
|
||||
Bound Parameter Styles
|
||||
----------------------
|
||||
|
||||
The default parameter style for the psycopg2 dialect is "pyformat", where
|
||||
SQL is rendered using ``%(paramname)s`` style. This format has the limitation
|
||||
that it does not accommodate the unusual case of parameter names that
|
||||
actually contain percent or parenthesis symbols; as SQLAlchemy in many cases
|
||||
generates bound parameter names based on the name of a column, the presence
|
||||
of these characters in a column name can lead to problems.
|
||||
|
||||
There are two solutions to the issue of a :class:`.schema.Column` that contains
|
||||
one of these characters in its name. One is to specify the
|
||||
:paramref:`.schema.Column.key` for columns that have such names::
|
||||
|
||||
measurement = Table('measurement', metadata,
|
||||
Column('Size (meters)', Integer, key='size_meters')
|
||||
)
|
||||
|
||||
Above, an INSERT statement such as ``measurement.insert()`` will use
|
||||
``size_meters`` as the parameter name, and a SQL expression such as
|
||||
``measurement.c.size_meters > 10`` will derive the bound parameter name
|
||||
from the ``size_meters`` key as well.
|
||||
|
||||
.. versionchanged:: 1.0.0 - SQL expressions will use :attr:`.Column.key`
|
||||
as the source of naming when anonymous bound parameters are created
|
||||
in SQL expressions; previously, this behavior only applied to
|
||||
:meth:`.Table.insert` and :meth:`.Table.update` parameter names.
|
||||
|
||||
The other solution is to use a positional format; psycopg2 allows use of the
|
||||
"format" paramstyle, which can be passed to
|
||||
:paramref:`.create_engine.paramstyle`::
|
||||
|
||||
engine = create_engine(
|
||||
'postgresql://scott:tiger@localhost:5432/test', paramstyle='format')
|
||||
|
||||
With the above engine, instead of a statement like::
|
||||
|
||||
INSERT INTO measurement ("Size (meters)") VALUES (%(Size (meters))s)
|
||||
{'Size (meters)': 1}
|
||||
|
||||
we instead see::
|
||||
|
||||
INSERT INTO measurement ("Size (meters)") VALUES (%s)
|
||||
(1, )
|
||||
|
||||
Where above, the dictionary style is converted into a tuple with positional
|
||||
style.
|
||||
|
||||
|
||||
Transactions
|
||||
------------
|
||||
|
||||
The psycopg2 dialect fully supports SAVEPOINT and two-phase commit operations.
|
||||
|
||||
.. _psycopg2_isolation_level:
|
||||
|
||||
Psycopg2 Transaction Isolation Level
|
||||
-------------------------------------
|
||||
|
||||
As discussed in :ref:`postgresql_isolation_level`,
|
||||
all PostgreSQL dialects support setting of transaction isolation level
|
||||
both via the ``isolation_level`` parameter passed to :func:`.create_engine`,
|
||||
as well as the ``isolation_level`` argument used by
|
||||
:meth:`.Connection.execution_options`. When using the psycopg2 dialect, these
|
||||
options make use of psycopg2's ``set_isolation_level()`` connection method,
|
||||
rather than emitting a PostgreSQL directive; this is because psycopg2's
|
||||
API-level setting is always emitted at the start of each transaction in any
|
||||
case.
|
||||
|
||||
The psycopg2 dialect supports these constants for isolation level:
|
||||
|
||||
* ``READ COMMITTED``
|
||||
* ``READ UNCOMMITTED``
|
||||
* ``REPEATABLE READ``
|
||||
* ``SERIALIZABLE``
|
||||
* ``AUTOCOMMIT``
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_isolation_level`
|
||||
|
||||
:ref:`pg8000_isolation_level`
|
||||
|
||||
|
||||
NOTICE logging
|
||||
---------------
|
||||
|
||||
The psycopg2 dialect will log PostgreSQL NOTICE messages
|
||||
via the ``sqlalchemy.dialects.postgresql`` logger. When this logger
|
||||
is set to the ``logging.INFO`` level, notice messages will be logged::
|
||||
|
||||
import logging
|
||||
|
||||
logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO)
|
||||
|
||||
Above, it is assumed that logging is configured externally. If this is not
|
||||
the case, configuration such as ``logging.basicConfig()`` must be utilized::
|
||||
|
||||
import logging
|
||||
|
||||
logging.basicConfig() # log messages to stdout
|
||||
logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO)
|
||||
|
||||
.. seealso::
|
||||
|
||||
`Logging HOWTO <https://docs.python.org/3/howto/logging.html>`_ - on the python.org website
|
||||
|
||||
.. _psycopg2_hstore:
|
||||
|
||||
HSTORE type
|
||||
------------
|
||||
|
||||
The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of
|
||||
the HSTORE type. The SQLAlchemy psycopg2 dialect will enable this extension
|
||||
by default when psycopg2 version 2.4 or greater is used, and
|
||||
it is detected that the target database has the HSTORE type set up for use.
|
||||
In other words, when the dialect makes the first
|
||||
connection, a sequence like the following is performed:
|
||||
|
||||
1. Request the available HSTORE oids using
|
||||
``psycopg2.extras.HstoreAdapter.get_oids()``.
|
||||
If this function returns a list of HSTORE identifiers, we then determine
|
||||
that the ``HSTORE`` extension is present.
|
||||
This function is **skipped** if the version of psycopg2 installed is
|
||||
less than version 2.4.
|
||||
|
||||
2. If the ``use_native_hstore`` flag is at its default of ``True``, and
|
||||
we've detected that ``HSTORE`` oids are available, the
|
||||
``psycopg2.extensions.register_hstore()`` extension is invoked for all
|
||||
connections.
|
||||
|
||||
The ``register_hstore()`` extension has the effect of **all Python
|
||||
dictionaries being accepted as parameters regardless of the type of target
|
||||
column in SQL**. The dictionaries are converted by this extension into a
|
||||
textual HSTORE expression. If this behavior is not desired, disable the
|
||||
use of the hstore extension by setting ``use_native_hstore`` to ``False`` as
|
||||
follows::
|
||||
|
||||
engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test",
|
||||
use_native_hstore=False)
|
||||
|
||||
The ``HSTORE`` type is **still supported** when the
|
||||
``psycopg2.extensions.register_hstore()`` extension is not used. It merely
|
||||
means that the coercion between Python dictionaries and the HSTORE
|
||||
string format, on both the parameter side and the result side, will take
|
||||
place within SQLAlchemy's own marshalling logic, and not that of ``psycopg2``
|
||||
which may be more performant.
|
||||
|
||||
""" # noqa
|
||||
from __future__ import absolute_import
|
||||
|
||||
import decimal
|
||||
import logging
|
||||
import re
|
||||
|
||||
from .base import _DECIMAL_TYPES
|
||||
from .base import _FLOAT_TYPES
|
||||
from .base import _INT_TYPES
|
||||
from .base import ENUM
|
||||
from .base import PGCompiler
|
||||
from .base import PGDialect
|
||||
from .base import PGExecutionContext
|
||||
from .base import PGIdentifierPreparer
|
||||
from .base import UUID
|
||||
from .hstore import HSTORE
|
||||
from .json import JSON
|
||||
from .json import JSONB
|
||||
from ... import exc
|
||||
from ... import processors
|
||||
from ... import types as sqltypes
|
||||
from ... import util
|
||||
from ...engine import result as _result
|
||||
from ...util import collections_abc
|
||||
|
||||
try:
|
||||
from uuid import UUID as _python_UUID # noqa
|
||||
except ImportError:
|
||||
_python_UUID = None
|
||||
|
||||
|
||||
logger = logging.getLogger("sqlalchemy.dialects.postgresql")
|
||||
|
||||
|
||||
class _PGNumeric(sqltypes.Numeric):
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.asdecimal:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
return processors.to_decimal_processor_factory(
|
||||
decimal.Decimal, self._effective_decimal_return_scale
|
||||
)
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
# pg8000 returns Decimal natively for 1700
|
||||
return None
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype
|
||||
)
|
||||
else:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
# pg8000 returns float natively for 701
|
||||
return None
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
return processors.to_float
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype
|
||||
)
|
||||
|
||||
|
||||
class _PGEnum(ENUM):
|
||||
def result_processor(self, dialect, coltype):
|
||||
if util.py2k and self._expect_unicode is True:
|
||||
# for py2k, if the enum type needs unicode data (which is set up as
|
||||
# part of the Enum() constructor based on values passed as py2k
|
||||
# unicode objects) we have to use our own converters since
|
||||
# psycopg2's don't work, a rare exception to the "modern DBAPIs
|
||||
# support unicode everywhere" theme of deprecating
|
||||
# convert_unicode=True. Use the special "force_nocheck" directive
|
||||
# which forces unicode conversion to happen on the Python side
|
||||
# without an isinstance() check. in py3k psycopg2 does the right
|
||||
# thing automatically.
|
||||
self._expect_unicode = "force_nocheck"
|
||||
return super(_PGEnum, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGHStore(HSTORE):
|
||||
def bind_processor(self, dialect):
|
||||
if dialect._has_native_hstore:
|
||||
return None
|
||||
else:
|
||||
return super(_PGHStore, self).bind_processor(dialect)
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._has_native_hstore:
|
||||
return None
|
||||
else:
|
||||
return super(_PGHStore, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSON(JSON):
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._has_native_json:
|
||||
return None
|
||||
else:
|
||||
return super(_PGJSON, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSONB(JSONB):
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._has_native_jsonb:
|
||||
return None
|
||||
else:
|
||||
return super(_PGJSONB, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGUUID(UUID):
|
||||
def bind_processor(self, dialect):
|
||||
if not self.as_uuid and dialect.use_native_uuid:
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = _python_UUID(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not self.as_uuid and dialect.use_native_uuid:
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = str(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
|
||||
_server_side_id = util.counter()
|
||||
|
||||
|
||||
class PGExecutionContext_psycopg2(PGExecutionContext):
|
||||
def create_server_side_cursor(self):
|
||||
# use server-side cursors:
|
||||
# http://lists.initd.org/pipermail/psycopg/2007-January/005251.html
|
||||
ident = "c_%s_%s" % (hex(id(self))[2:], hex(_server_side_id())[2:])
|
||||
return self._dbapi_connection.cursor(ident)
|
||||
|
||||
def get_result_proxy(self):
|
||||
self._log_notices(self.cursor)
|
||||
|
||||
if self._is_server_side:
|
||||
return _result.BufferedRowResultProxy(self)
|
||||
else:
|
||||
return _result.ResultProxy(self)
|
||||
|
||||
def _log_notices(self, cursor):
|
||||
# check also that notices is an iterable, after it's already
|
||||
# established that we will be iterating through it. This is to get
|
||||
# around test suites such as SQLAlchemy's using a Mock object for
|
||||
# cursor
|
||||
if not cursor.connection.notices or not isinstance(
|
||||
cursor.connection.notices, collections_abc.Iterable
|
||||
):
|
||||
return
|
||||
|
||||
for notice in cursor.connection.notices:
|
||||
# NOTICE messages have a
|
||||
# newline character at the end
|
||||
logger.info(notice.rstrip())
|
||||
|
||||
cursor.connection.notices[:] = []
|
||||
|
||||
|
||||
class PGCompiler_psycopg2(PGCompiler):
|
||||
pass
|
||||
|
||||
|
||||
class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer):
|
||||
pass
|
||||
|
||||
|
||||
EXECUTEMANY_DEFAULT = util.symbol("executemany_default")
|
||||
EXECUTEMANY_BATCH = util.symbol("executemany_batch")
|
||||
EXECUTEMANY_VALUES = util.symbol("executemany_values")
|
||||
|
||||
|
||||
class PGDialect_psycopg2(PGDialect):
|
||||
driver = "psycopg2"
|
||||
if util.py2k:
|
||||
supports_unicode_statements = False
|
||||
|
||||
supports_server_side_cursors = True
|
||||
|
||||
default_paramstyle = "pyformat"
|
||||
# set to true based on psycopg2 version
|
||||
supports_sane_multi_rowcount = False
|
||||
execution_ctx_cls = PGExecutionContext_psycopg2
|
||||
statement_compiler = PGCompiler_psycopg2
|
||||
preparer = PGIdentifierPreparer_psycopg2
|
||||
psycopg2_version = (0, 0)
|
||||
|
||||
FEATURE_VERSION_MAP = dict(
|
||||
native_json=(2, 5),
|
||||
native_jsonb=(2, 5, 4),
|
||||
sane_multi_rowcount=(2, 0, 9),
|
||||
array_oid=(2, 4, 3),
|
||||
hstore_adapter=(2, 4),
|
||||
)
|
||||
|
||||
_has_native_hstore = False
|
||||
_has_native_json = False
|
||||
_has_native_jsonb = False
|
||||
|
||||
engine_config_types = PGDialect.engine_config_types.union(
|
||||
[("use_native_unicode", util.asbool)]
|
||||
)
|
||||
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _PGNumeric,
|
||||
ENUM: _PGEnum, # needs force_unicode
|
||||
sqltypes.Enum: _PGEnum, # needs force_unicode
|
||||
HSTORE: _PGHStore,
|
||||
JSON: _PGJSON,
|
||||
sqltypes.JSON: _PGJSON,
|
||||
JSONB: _PGJSONB,
|
||||
UUID: _PGUUID,
|
||||
},
|
||||
)
|
||||
|
||||
@util.deprecated_params(
|
||||
use_batch_mode=(
|
||||
"1.3.7",
|
||||
"The psycopg2 use_batch_mode flag is superseded by "
|
||||
"executemany_mode='batch'",
|
||||
)
|
||||
)
|
||||
def __init__(
|
||||
self,
|
||||
server_side_cursors=False,
|
||||
use_native_unicode=True,
|
||||
client_encoding=None,
|
||||
use_native_hstore=True,
|
||||
use_native_uuid=True,
|
||||
executemany_mode=None,
|
||||
executemany_batch_page_size=None,
|
||||
executemany_values_page_size=None,
|
||||
use_batch_mode=None,
|
||||
**kwargs
|
||||
):
|
||||
PGDialect.__init__(self, **kwargs)
|
||||
self.server_side_cursors = server_side_cursors
|
||||
self.use_native_unicode = use_native_unicode
|
||||
self.use_native_hstore = use_native_hstore
|
||||
self.use_native_uuid = use_native_uuid
|
||||
self.supports_unicode_binds = use_native_unicode
|
||||
self.client_encoding = client_encoding
|
||||
|
||||
# Parse executemany_mode argument, allowing it to be only one of the
|
||||
# symbol names
|
||||
self.executemany_mode = util.symbol.parse_user_argument(
|
||||
executemany_mode,
|
||||
{
|
||||
EXECUTEMANY_DEFAULT: [None],
|
||||
EXECUTEMANY_BATCH: ["batch"],
|
||||
EXECUTEMANY_VALUES: ["values"],
|
||||
},
|
||||
"executemany_mode",
|
||||
)
|
||||
if use_batch_mode:
|
||||
self.executemany_mode = EXECUTEMANY_BATCH
|
||||
|
||||
self.executemany_batch_page_size = executemany_batch_page_size
|
||||
self.executemany_values_page_size = executemany_values_page_size
|
||||
|
||||
if self.dbapi and hasattr(self.dbapi, "__version__"):
|
||||
m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", self.dbapi.__version__)
|
||||
if m:
|
||||
self.psycopg2_version = tuple(
|
||||
int(x) for x in m.group(1, 2, 3) if x is not None
|
||||
)
|
||||
|
||||
def initialize(self, connection):
|
||||
super(PGDialect_psycopg2, self).initialize(connection)
|
||||
self._has_native_hstore = (
|
||||
self.use_native_hstore
|
||||
and self._hstore_oids(connection.connection) is not None
|
||||
)
|
||||
self._has_native_json = (
|
||||
self.psycopg2_version >= self.FEATURE_VERSION_MAP["native_json"]
|
||||
)
|
||||
self._has_native_jsonb = (
|
||||
self.psycopg2_version >= self.FEATURE_VERSION_MAP["native_jsonb"]
|
||||
)
|
||||
|
||||
# http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9
|
||||
self.supports_sane_multi_rowcount = (
|
||||
self.psycopg2_version
|
||||
>= self.FEATURE_VERSION_MAP["sane_multi_rowcount"]
|
||||
and self.executemany_mode is EXECUTEMANY_DEFAULT
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
import psycopg2
|
||||
|
||||
return psycopg2
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extensions(cls):
|
||||
from psycopg2 import extensions
|
||||
|
||||
return extensions
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extras(cls):
|
||||
from psycopg2 import extras
|
||||
|
||||
return extras
|
||||
|
||||
@util.memoized_property
|
||||
def _isolation_lookup(self):
|
||||
extensions = self._psycopg2_extensions()
|
||||
return {
|
||||
"AUTOCOMMIT": extensions.ISOLATION_LEVEL_AUTOCOMMIT,
|
||||
"READ COMMITTED": extensions.ISOLATION_LEVEL_READ_COMMITTED,
|
||||
"READ UNCOMMITTED": extensions.ISOLATION_LEVEL_READ_UNCOMMITTED,
|
||||
"REPEATABLE READ": extensions.ISOLATION_LEVEL_REPEATABLE_READ,
|
||||
"SERIALIZABLE": extensions.ISOLATION_LEVEL_SERIALIZABLE,
|
||||
}
|
||||
|
||||
def set_isolation_level(self, connection, level):
|
||||
try:
|
||||
level = self._isolation_lookup[level.replace("_", " ")]
|
||||
except KeyError:
|
||||
raise exc.ArgumentError(
|
||||
"Invalid value '%s' for isolation_level. "
|
||||
"Valid isolation levels for %s are %s"
|
||||
% (level, self.name, ", ".join(self._isolation_lookup))
|
||||
)
|
||||
|
||||
connection.set_isolation_level(level)
|
||||
|
||||
def on_connect(self):
|
||||
extras = self._psycopg2_extras()
|
||||
extensions = self._psycopg2_extensions()
|
||||
|
||||
fns = []
|
||||
if self.client_encoding is not None:
|
||||
|
||||
def on_connect(conn):
|
||||
conn.set_client_encoding(self.client_encoding)
|
||||
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.isolation_level is not None:
|
||||
|
||||
def on_connect(conn):
|
||||
self.set_isolation_level(conn, self.isolation_level)
|
||||
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self.use_native_uuid:
|
||||
|
||||
def on_connect(conn):
|
||||
extras.register_uuid(None, conn)
|
||||
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self.use_native_unicode:
|
||||
|
||||
def on_connect(conn):
|
||||
extensions.register_type(extensions.UNICODE, conn)
|
||||
extensions.register_type(extensions.UNICODEARRAY, conn)
|
||||
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self.use_native_hstore:
|
||||
|
||||
def on_connect(conn):
|
||||
hstore_oids = self._hstore_oids(conn)
|
||||
if hstore_oids is not None:
|
||||
oid, array_oid = hstore_oids
|
||||
kw = {"oid": oid}
|
||||
if util.py2k:
|
||||
kw["unicode"] = True
|
||||
if (
|
||||
self.psycopg2_version
|
||||
>= self.FEATURE_VERSION_MAP["array_oid"]
|
||||
):
|
||||
kw["array_oid"] = array_oid
|
||||
extras.register_hstore(conn, **kw)
|
||||
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self._json_deserializer:
|
||||
|
||||
def on_connect(conn):
|
||||
if self._has_native_json:
|
||||
extras.register_default_json(
|
||||
conn, loads=self._json_deserializer
|
||||
)
|
||||
if self._has_native_jsonb:
|
||||
extras.register_default_jsonb(
|
||||
conn, loads=self._json_deserializer
|
||||
)
|
||||
|
||||
fns.append(on_connect)
|
||||
|
||||
if fns:
|
||||
|
||||
def on_connect(conn):
|
||||
for fn in fns:
|
||||
fn(conn)
|
||||
|
||||
return on_connect
|
||||
else:
|
||||
return None
|
||||
|
||||
_insert_values_match = re.compile(r".* VALUES (\(.+\))").match
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
if self.executemany_mode is EXECUTEMANY_DEFAULT:
|
||||
cursor.executemany(statement, parameters)
|
||||
return
|
||||
|
||||
if (
|
||||
self.executemany_mode is EXECUTEMANY_VALUES
|
||||
and context
|
||||
and context.isinsert
|
||||
):
|
||||
executemany_values = self._insert_values_match(statement)
|
||||
else:
|
||||
executemany_values = None
|
||||
|
||||
if executemany_values:
|
||||
# Currently, SQLAlchemy does not pass "RETURNING" statements
|
||||
# into executemany(), since no DBAPI has ever supported that
|
||||
# until the introduction of psycopg2's executemany_values, so
|
||||
# we are not yet using the fetch=True flag.
|
||||
statement = statement.replace(executemany_values.group(1), "%s")
|
||||
if self.executemany_values_page_size:
|
||||
kwargs = {"page_size": self.executemany_values_page_size}
|
||||
else:
|
||||
kwargs = {}
|
||||
self._psycopg2_extras().execute_values(
|
||||
cursor,
|
||||
statement,
|
||||
parameters,
|
||||
template=executemany_values.group(1),
|
||||
**kwargs
|
||||
)
|
||||
|
||||
else:
|
||||
if self.executemany_batch_page_size:
|
||||
kwargs = {"page_size": self.executemany_batch_page_size}
|
||||
else:
|
||||
kwargs = {}
|
||||
self._psycopg2_extras().execute_batch(
|
||||
cursor, statement, parameters, **kwargs
|
||||
)
|
||||
|
||||
@util.memoized_instancemethod
|
||||
def _hstore_oids(self, conn):
|
||||
if self.psycopg2_version >= self.FEATURE_VERSION_MAP["hstore_adapter"]:
|
||||
extras = self._psycopg2_extras()
|
||||
oids = extras.HstoreAdapter.get_oids(conn)
|
||||
if oids is not None and oids[0]:
|
||||
return oids[0:2]
|
||||
return None
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username="user")
|
||||
if opts:
|
||||
if "port" in opts:
|
||||
opts["port"] = int(opts["port"])
|
||||
opts.update(url.query)
|
||||
# send individual dbname, user, password, host, port
|
||||
# parameters to psycopg2.connect()
|
||||
return ([], opts)
|
||||
elif url.query:
|
||||
# any other connection arguments, pass directly
|
||||
opts.update(url.query)
|
||||
return ([], opts)
|
||||
else:
|
||||
# no connection arguments whatsoever; psycopg2.connect()
|
||||
# requires that "dsn" be present as a blank string.
|
||||
return ([""], opts)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.Error):
|
||||
# check the "closed" flag. this might not be
|
||||
# present on old psycopg2 versions. Also,
|
||||
# this flag doesn't actually help in a lot of disconnect
|
||||
# situations, so don't rely on it.
|
||||
if getattr(connection, "closed", False):
|
||||
return True
|
||||
|
||||
# checks based on strings. in the case that .closed
|
||||
# didn't cut it, fall back onto these.
|
||||
str_e = str(e).partition("\n")[0]
|
||||
for msg in [
|
||||
# these error messages from libpq: interfaces/libpq/fe-misc.c
|
||||
# and interfaces/libpq/fe-secure.c.
|
||||
"terminating connection",
|
||||
"closed the connection",
|
||||
"connection not open",
|
||||
"could not receive data from server",
|
||||
"could not send data to server",
|
||||
# psycopg2 client errors, psycopg2/conenction.h,
|
||||
# psycopg2/cursor.h
|
||||
"connection already closed",
|
||||
"cursor already closed",
|
||||
# not sure where this path is originally from, it may
|
||||
# be obsolete. It really says "losed", not "closed".
|
||||
"losed the connection unexpectedly",
|
||||
# these can occur in newer SSL
|
||||
"connection has been closed unexpectedly",
|
||||
"SSL SYSCALL error: Bad file descriptor",
|
||||
"SSL SYSCALL error: EOF detected",
|
||||
"SSL error: decryption failed or bad record mac",
|
||||
"SSL SYSCALL error: Operation timed out",
|
||||
]:
|
||||
idx = str_e.find(msg)
|
||||
if idx >= 0 and '"' not in str_e[:idx]:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
dialect = PGDialect_psycopg2
|
||||
@@ -0,0 +1,59 @@
|
||||
# testing/engines.py
|
||||
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
r"""
|
||||
.. dialect:: postgresql+psycopg2cffi
|
||||
:name: psycopg2cffi
|
||||
:dbapi: psycopg2cffi
|
||||
:connectstring: postgresql+psycopg2cffi://user:password@host:port/dbname[?key=value&key=value...]
|
||||
:url: http://pypi.python.org/pypi/psycopg2cffi/
|
||||
|
||||
``psycopg2cffi`` is an adaptation of ``psycopg2``, using CFFI for the C
|
||||
layer. This makes it suitable for use in e.g. PyPy. Documentation
|
||||
is as per ``psycopg2``.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:mod:`sqlalchemy.dialects.postgresql.psycopg2`
|
||||
|
||||
""" # noqa
|
||||
from .psycopg2 import PGDialect_psycopg2
|
||||
|
||||
|
||||
class PGDialect_psycopg2cffi(PGDialect_psycopg2):
|
||||
driver = "psycopg2cffi"
|
||||
supports_unicode_statements = True
|
||||
|
||||
# psycopg2cffi's first release is 2.5.0, but reports
|
||||
# __version__ as 2.4.4. Subsequent releases seem to have
|
||||
# fixed this.
|
||||
|
||||
FEATURE_VERSION_MAP = dict(
|
||||
native_json=(2, 4, 4),
|
||||
native_jsonb=(2, 7, 1),
|
||||
sane_multi_rowcount=(2, 4, 4),
|
||||
array_oid=(2, 4, 4),
|
||||
hstore_adapter=(2, 4, 4),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__("psycopg2cffi")
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extensions(cls):
|
||||
root = __import__("psycopg2cffi", fromlist=["extensions"])
|
||||
return root.extensions
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extras(cls):
|
||||
root = __import__("psycopg2cffi", fromlist=["extras"])
|
||||
return root.extras
|
||||
|
||||
|
||||
dialect = PGDialect_psycopg2cffi
|
||||
@@ -0,0 +1,266 @@
|
||||
# postgresql/pygresql.py
|
||||
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""
|
||||
.. dialect:: postgresql+pygresql
|
||||
:name: pygresql
|
||||
:dbapi: pgdb
|
||||
:connectstring: postgresql+pygresql://user:password@host:port/dbname[?key=value&key=value...]
|
||||
:url: http://www.pygresql.org/
|
||||
|
||||
.. note::
|
||||
|
||||
The pygresql dialect is **not tested as part of SQLAlchemy's continuous
|
||||
integration** and may have unresolved issues. The recommended PostgreSQL
|
||||
dialect is psycopg2.
|
||||
|
||||
""" # noqa
|
||||
|
||||
import decimal
|
||||
import re
|
||||
|
||||
from .base import _DECIMAL_TYPES
|
||||
from .base import _FLOAT_TYPES
|
||||
from .base import _INT_TYPES
|
||||
from .base import PGCompiler
|
||||
from .base import PGDialect
|
||||
from .base import PGIdentifierPreparer
|
||||
from .base import UUID
|
||||
from .hstore import HSTORE
|
||||
from .json import JSON
|
||||
from .json import JSONB
|
||||
from ... import exc
|
||||
from ... import processors
|
||||
from ... import util
|
||||
from ...sql.elements import Null
|
||||
from ...types import JSON as Json
|
||||
from ...types import Numeric
|
||||
|
||||
|
||||
class _PGNumeric(Numeric):
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not isinstance(coltype, int):
|
||||
coltype = coltype.oid
|
||||
if self.asdecimal:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
return processors.to_decimal_processor_factory(
|
||||
decimal.Decimal, self._effective_decimal_return_scale
|
||||
)
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
# PyGreSQL returns Decimal natively for 1700 (numeric)
|
||||
return None
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype
|
||||
)
|
||||
else:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
# PyGreSQL returns float natively for 701 (float8)
|
||||
return None
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
return processors.to_float
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype
|
||||
)
|
||||
|
||||
|
||||
class _PGHStore(HSTORE):
|
||||
def bind_processor(self, dialect):
|
||||
if not dialect.has_native_hstore:
|
||||
return super(_PGHStore, self).bind_processor(dialect)
|
||||
hstore = dialect.dbapi.Hstore
|
||||
|
||||
def process(value):
|
||||
if isinstance(value, dict):
|
||||
return hstore(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not dialect.has_native_hstore:
|
||||
return super(_PGHStore, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSON(JSON):
|
||||
def bind_processor(self, dialect):
|
||||
if not dialect.has_native_json:
|
||||
return super(_PGJSON, self).bind_processor(dialect)
|
||||
json = dialect.dbapi.Json
|
||||
|
||||
def process(value):
|
||||
if value is self.NULL:
|
||||
value = None
|
||||
elif isinstance(value, Null) or (
|
||||
value is None and self.none_as_null
|
||||
):
|
||||
return None
|
||||
if value is None or isinstance(value, (dict, list)):
|
||||
return json(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not dialect.has_native_json:
|
||||
return super(_PGJSON, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSONB(JSONB):
|
||||
def bind_processor(self, dialect):
|
||||
if not dialect.has_native_json:
|
||||
return super(_PGJSONB, self).bind_processor(dialect)
|
||||
json = dialect.dbapi.Json
|
||||
|
||||
def process(value):
|
||||
if value is self.NULL:
|
||||
value = None
|
||||
elif isinstance(value, Null) or (
|
||||
value is None and self.none_as_null
|
||||
):
|
||||
return None
|
||||
if value is None or isinstance(value, (dict, list)):
|
||||
return json(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not dialect.has_native_json:
|
||||
return super(_PGJSONB, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGUUID(UUID):
|
||||
def bind_processor(self, dialect):
|
||||
if not dialect.has_native_uuid:
|
||||
return super(_PGUUID, self).bind_processor(dialect)
|
||||
uuid = dialect.dbapi.Uuid
|
||||
|
||||
def process(value):
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, (str, bytes)):
|
||||
if len(value) == 16:
|
||||
return uuid(bytes=value)
|
||||
return uuid(value)
|
||||
if isinstance(value, int):
|
||||
return uuid(int=value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not dialect.has_native_uuid:
|
||||
return super(_PGUUID, self).result_processor(dialect, coltype)
|
||||
if not self.as_uuid:
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return str(value)
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class _PGCompiler(PGCompiler):
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return (
|
||||
self.process(binary.left, **kw)
|
||||
+ " %% "
|
||||
+ self.process(binary.right, **kw)
|
||||
)
|
||||
|
||||
def post_process_text(self, text):
|
||||
return text.replace("%", "%%")
|
||||
|
||||
|
||||
class _PGIdentifierPreparer(PGIdentifierPreparer):
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
return value.replace("%", "%%")
|
||||
|
||||
|
||||
class PGDialect_pygresql(PGDialect):
|
||||
|
||||
driver = "pygresql"
|
||||
|
||||
statement_compiler = _PGCompiler
|
||||
preparer = _PGIdentifierPreparer
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
import pgdb
|
||||
|
||||
return pgdb
|
||||
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
{
|
||||
Numeric: _PGNumeric,
|
||||
HSTORE: _PGHStore,
|
||||
Json: _PGJSON,
|
||||
JSON: _PGJSON,
|
||||
JSONB: _PGJSONB,
|
||||
UUID: _PGUUID,
|
||||
},
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(PGDialect_pygresql, self).__init__(**kwargs)
|
||||
try:
|
||||
version = self.dbapi.version
|
||||
m = re.match(r"(\d+)\.(\d+)", version)
|
||||
version = (int(m.group(1)), int(m.group(2)))
|
||||
except (AttributeError, ValueError, TypeError):
|
||||
version = (0, 0)
|
||||
self.dbapi_version = version
|
||||
if version < (5, 0):
|
||||
has_native_hstore = has_native_json = has_native_uuid = False
|
||||
if version != (0, 0):
|
||||
util.warn(
|
||||
"PyGreSQL is only fully supported by SQLAlchemy"
|
||||
" since version 5.0."
|
||||
)
|
||||
else:
|
||||
self.supports_unicode_statements = True
|
||||
self.supports_unicode_binds = True
|
||||
has_native_hstore = has_native_json = has_native_uuid = True
|
||||
self.has_native_hstore = has_native_hstore
|
||||
self.has_native_json = has_native_json
|
||||
self.has_native_uuid = has_native_uuid
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username="user")
|
||||
if "port" in opts:
|
||||
opts["host"] = "%s:%s" % (
|
||||
opts.get("host", "").rsplit(":", 1)[0],
|
||||
opts.pop("port"),
|
||||
)
|
||||
opts.update(url.query)
|
||||
return [], opts
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.Error):
|
||||
if not connection:
|
||||
return False
|
||||
try:
|
||||
connection = connection.connection
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
if not connection:
|
||||
return False
|
||||
try:
|
||||
return connection.closed
|
||||
except AttributeError: # PyGreSQL < 5.0
|
||||
return connection._cnx is None
|
||||
return False
|
||||
|
||||
|
||||
dialect = PGDialect_pygresql
|
||||
@@ -0,0 +1,109 @@
|
||||
# postgresql/pypostgresql.py
|
||||
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""
|
||||
.. dialect:: postgresql+pypostgresql
|
||||
:name: py-postgresql
|
||||
:dbapi: pypostgresql
|
||||
:connectstring: postgresql+pypostgresql://user:password@host:port/dbname[?key=value&key=value...]
|
||||
:url: http://python.projects.pgfoundry.org/
|
||||
|
||||
.. note::
|
||||
|
||||
The pypostgresql dialect is **not tested as part of SQLAlchemy's continuous
|
||||
integration** and may have unresolved issues. The recommended PostgreSQL
|
||||
driver is psycopg2.
|
||||
|
||||
|
||||
""" # noqa
|
||||
|
||||
from .base import PGDialect
|
||||
from .base import PGExecutionContext
|
||||
from ... import processors
|
||||
from ... import types as sqltypes
|
||||
from ... import util
|
||||
|
||||
|
||||
class PGNumeric(sqltypes.Numeric):
|
||||
def bind_processor(self, dialect):
|
||||
return processors.to_str
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.asdecimal:
|
||||
return None
|
||||
else:
|
||||
return processors.to_float
|
||||
|
||||
|
||||
class PGExecutionContext_pypostgresql(PGExecutionContext):
|
||||
pass
|
||||
|
||||
|
||||
class PGDialect_pypostgresql(PGDialect):
|
||||
driver = "pypostgresql"
|
||||
|
||||
supports_unicode_statements = True
|
||||
supports_unicode_binds = True
|
||||
description_encoding = None
|
||||
default_paramstyle = "pyformat"
|
||||
|
||||
# requires trunk version to support sane rowcounts
|
||||
# TODO: use dbapi version information to set this flag appropriately
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = False
|
||||
|
||||
execution_ctx_cls = PGExecutionContext_pypostgresql
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: PGNumeric,
|
||||
# prevents PGNumeric from being used
|
||||
sqltypes.Float: sqltypes.Float,
|
||||
},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
from postgresql.driver import dbapi20
|
||||
|
||||
return dbapi20
|
||||
|
||||
_DBAPI_ERROR_NAMES = [
|
||||
"Error",
|
||||
"InterfaceError",
|
||||
"DatabaseError",
|
||||
"DataError",
|
||||
"OperationalError",
|
||||
"IntegrityError",
|
||||
"InternalError",
|
||||
"ProgrammingError",
|
||||
"NotSupportedError",
|
||||
]
|
||||
|
||||
@util.memoized_property
|
||||
def dbapi_exception_translation_map(self):
|
||||
if self.dbapi is None:
|
||||
return {}
|
||||
|
||||
return dict(
|
||||
(getattr(self.dbapi, name).__name__, name)
|
||||
for name in self._DBAPI_ERROR_NAMES
|
||||
)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username="user")
|
||||
if "port" in opts:
|
||||
opts["port"] = int(opts["port"])
|
||||
else:
|
||||
opts["port"] = 5432
|
||||
opts.update(url.query)
|
||||
return ([], opts)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
return "connection is closed" in str(e)
|
||||
|
||||
|
||||
dialect = PGDialect_pypostgresql
|
||||
166
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/ranges.py
Normal file
166
.venv/Lib/site-packages/sqlalchemy/dialects/postgresql/ranges.py
Normal file
@@ -0,0 +1,166 @@
|
||||
# Copyright (C) 2013-2019 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .base import ischema_names
|
||||
from ... import types as sqltypes
|
||||
|
||||
|
||||
__all__ = ("INT4RANGE", "INT8RANGE", "NUMRANGE")
|
||||
|
||||
|
||||
class RangeOperators(object):
|
||||
"""
|
||||
This mixin provides functionality for the Range Operators
|
||||
listed in Table 9-44 of the `postgres documentation`__ for Range
|
||||
Functions and Operators. It is used by all the range types
|
||||
provided in the ``postgres`` dialect and can likely be used for
|
||||
any range types you create yourself.
|
||||
|
||||
__ http://www.postgresql.org/docs/devel/static/functions-range.html
|
||||
|
||||
No extra support is provided for the Range Functions listed in
|
||||
Table 9-45 of the postgres documentation. For these, the normal
|
||||
:func:`~sqlalchemy.sql.expression.func` object should be used.
|
||||
|
||||
"""
|
||||
|
||||
class comparator_factory(sqltypes.Concatenable.Comparator):
|
||||
"""Define comparison operations for range types."""
|
||||
|
||||
def __ne__(self, other):
|
||||
"Boolean expression. Returns true if two ranges are not equal"
|
||||
if other is None:
|
||||
return super(RangeOperators.comparator_factory, self).__ne__(
|
||||
other
|
||||
)
|
||||
else:
|
||||
return self.expr.op("<>")(other)
|
||||
|
||||
def contains(self, other, **kw):
|
||||
"""Boolean expression. Returns true if the right hand operand,
|
||||
which can be an element or a range, is contained within the
|
||||
column.
|
||||
"""
|
||||
return self.expr.op("@>")(other)
|
||||
|
||||
def contained_by(self, other):
|
||||
"""Boolean expression. Returns true if the column is contained
|
||||
within the right hand operand.
|
||||
"""
|
||||
return self.expr.op("<@")(other)
|
||||
|
||||
def overlaps(self, other):
|
||||
"""Boolean expression. Returns true if the column overlaps
|
||||
(has points in common with) the right hand operand.
|
||||
"""
|
||||
return self.expr.op("&&")(other)
|
||||
|
||||
def strictly_left_of(self, other):
|
||||
"""Boolean expression. Returns true if the column is strictly
|
||||
left of the right hand operand.
|
||||
"""
|
||||
return self.expr.op("<<")(other)
|
||||
|
||||
__lshift__ = strictly_left_of
|
||||
|
||||
def strictly_right_of(self, other):
|
||||
"""Boolean expression. Returns true if the column is strictly
|
||||
right of the right hand operand.
|
||||
"""
|
||||
return self.expr.op(">>")(other)
|
||||
|
||||
__rshift__ = strictly_right_of
|
||||
|
||||
def not_extend_right_of(self, other):
|
||||
"""Boolean expression. Returns true if the range in the column
|
||||
does not extend right of the range in the operand.
|
||||
"""
|
||||
return self.expr.op("&<")(other)
|
||||
|
||||
def not_extend_left_of(self, other):
|
||||
"""Boolean expression. Returns true if the range in the column
|
||||
does not extend left of the range in the operand.
|
||||
"""
|
||||
return self.expr.op("&>")(other)
|
||||
|
||||
def adjacent_to(self, other):
|
||||
"""Boolean expression. Returns true if the range in the column
|
||||
is adjacent to the range in the operand.
|
||||
"""
|
||||
return self.expr.op("-|-")(other)
|
||||
|
||||
def __add__(self, other):
|
||||
"""Range expression. Returns the union of the two ranges.
|
||||
Will raise an exception if the resulting range is not
|
||||
contigous.
|
||||
"""
|
||||
return self.expr.op("+")(other)
|
||||
|
||||
|
||||
class INT4RANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL INT4RANGE type.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = "INT4RANGE"
|
||||
|
||||
|
||||
ischema_names["int4range"] = INT4RANGE
|
||||
|
||||
|
||||
class INT8RANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL INT8RANGE type.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = "INT8RANGE"
|
||||
|
||||
|
||||
ischema_names["int8range"] = INT8RANGE
|
||||
|
||||
|
||||
class NUMRANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL NUMRANGE type.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = "NUMRANGE"
|
||||
|
||||
|
||||
ischema_names["numrange"] = NUMRANGE
|
||||
|
||||
|
||||
class DATERANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL DATERANGE type.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = "DATERANGE"
|
||||
|
||||
|
||||
ischema_names["daterange"] = DATERANGE
|
||||
|
||||
|
||||
class TSRANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL TSRANGE type.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = "TSRANGE"
|
||||
|
||||
|
||||
ischema_names["tsrange"] = TSRANGE
|
||||
|
||||
|
||||
class TSTZRANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL TSTZRANGE type.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = "TSTZRANGE"
|
||||
|
||||
|
||||
ischema_names["tstzrange"] = TSTZRANGE
|
||||
@@ -0,0 +1,48 @@
|
||||
# postgresql/zxjdbc.py
|
||||
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: postgresql+zxjdbc
|
||||
:name: zxJDBC for Jython
|
||||
:dbapi: zxjdbc
|
||||
:connectstring: postgresql+zxjdbc://scott:tiger@localhost/db
|
||||
:driverurl: http://jdbc.postgresql.org/
|
||||
|
||||
|
||||
"""
|
||||
from .base import PGDialect
|
||||
from .base import PGExecutionContext
|
||||
from ...connectors.zxJDBC import ZxJDBCConnector
|
||||
|
||||
|
||||
class PGExecutionContext_zxjdbc(PGExecutionContext):
|
||||
def create_cursor(self):
|
||||
cursor = self._dbapi_connection.cursor()
|
||||
cursor.datahandler = self.dialect.DataHandler(cursor.datahandler)
|
||||
return cursor
|
||||
|
||||
|
||||
class PGDialect_zxjdbc(ZxJDBCConnector, PGDialect):
|
||||
jdbc_db_name = "postgresql"
|
||||
jdbc_driver_name = "org.postgresql.Driver"
|
||||
|
||||
execution_ctx_cls = PGExecutionContext_zxjdbc
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(PGDialect_zxjdbc, self).__init__(*args, **kwargs)
|
||||
from com.ziclix.python.sql.handler import PostgresqlDataHandler
|
||||
|
||||
self.DataHandler = PostgresqlDataHandler
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
parts = connection.connection.dbversion.split(".")
|
||||
return tuple(int(x) for x in parts)
|
||||
|
||||
|
||||
dialect = PGDialect_zxjdbc
|
||||
Reference in New Issue
Block a user