From 6d7ba58f880be618ade07f8ea080fe8c4bf8a896 Mon Sep 17 00:00:00 2001 From: cyfraeviolae Date: Wed, 3 Apr 2024 03:10:44 -0400 Subject: venv --- .../site-packages/sqlalchemy/sql/__init__.py | 145 + .../sql/__pycache__/__init__.cpython-311.pyc | Bin 0 -> 6476 bytes .../__pycache__/_dml_constructors.cpython-311.pyc | Bin 0 -> 4303 bytes .../_elements_constructors.cpython-311.pyc | Bin 0 -> 66541 bytes .../sql/__pycache__/_orm_types.cpython-311.pyc | Bin 0 -> 678 bytes .../sql/__pycache__/_py_util.cpython-311.pyc | Bin 0 -> 3351 bytes .../_selectable_constructors.cpython-311.pyc | Bin 0 -> 21634 bytes .../sql/__pycache__/_typing.cpython-311.pyc | Bin 0 -> 16730 bytes .../sql/__pycache__/annotation.cpython-311.pyc | Bin 0 -> 23147 bytes .../sql/__pycache__/base.cpython-311.pyc | Bin 0 -> 107198 bytes .../sql/__pycache__/cache_key.cpython-311.pyc | Bin 0 -> 39232 bytes .../sql/__pycache__/coercions.cpython-311.pyc | Bin 0 -> 53716 bytes .../sql/__pycache__/compiler.cpython-311.pyc | Bin 0 -> 286030 bytes .../sql/__pycache__/crud.cpython-311.pyc | Bin 0 -> 47495 bytes .../sqlalchemy/sql/__pycache__/ddl.cpython-311.pyc | Bin 0 -> 62979 bytes .../__pycache__/default_comparator.cpython-311.pyc | Bin 0 -> 18086 bytes .../sqlalchemy/sql/__pycache__/dml.cpython-311.pyc | Bin 0 -> 77526 bytes .../sql/__pycache__/elements.cpython-311.pyc | Bin 0 -> 217761 bytes .../sql/__pycache__/events.cpython-311.pyc | Bin 0 -> 19364 bytes .../sql/__pycache__/expression.cpython-311.pyc | Bin 0 -> 7230 bytes .../sql/__pycache__/functions.cpython-311.pyc | Bin 0 -> 80876 bytes .../sql/__pycache__/lambdas.cpython-311.pyc | Bin 0 -> 59594 bytes .../sql/__pycache__/naming.cpython-311.pyc | Bin 0 -> 9203 bytes .../sql/__pycache__/operators.cpython-311.pyc | Bin 0 -> 93289 bytes .../sql/__pycache__/roles.cpython-311.pyc | Bin 0 -> 14938 bytes .../sql/__pycache__/schema.cpython-311.pyc | Bin 0 -> 255679 bytes .../sql/__pycache__/selectable.cpython-311.pyc | Bin 0 -> 272258 bytes .../sql/__pycache__/sqltypes.cpython-311.pyc | Bin 0 -> 158830 bytes .../sql/__pycache__/traversals.cpython-311.pyc | Bin 0 -> 49251 bytes .../sql/__pycache__/type_api.cpython-311.pyc | Bin 0 -> 87962 bytes .../sql/__pycache__/util.cpython-311.pyc | Bin 0 -> 59936 bytes .../sql/__pycache__/visitors.cpython-311.pyc | Bin 0 -> 38853 bytes .../sqlalchemy/sql/_dml_constructors.py | 140 + .../sqlalchemy/sql/_elements_constructors.py | 1840 +++++ .../site-packages/sqlalchemy/sql/_orm_types.py | 20 + .../site-packages/sqlalchemy/sql/_py_util.py | 75 + .../sqlalchemy/sql/_selectable_constructors.py | 635 ++ .../site-packages/sqlalchemy/sql/_typing.py | 457 ++ .../site-packages/sqlalchemy/sql/annotation.py | 585 ++ .../site-packages/sqlalchemy/sql/base.py | 2180 ++++++ .../site-packages/sqlalchemy/sql/cache_key.py | 1057 +++ .../site-packages/sqlalchemy/sql/coercions.py | 1389 ++++ .../site-packages/sqlalchemy/sql/compiler.py | 7811 ++++++++++++++++++++ .../site-packages/sqlalchemy/sql/crud.py | 1669 +++++ .../python3.11/site-packages/sqlalchemy/sql/ddl.py | 1378 ++++ .../sqlalchemy/sql/default_comparator.py | 552 ++ .../python3.11/site-packages/sqlalchemy/sql/dml.py | 1817 +++++ .../site-packages/sqlalchemy/sql/elements.py | 5405 ++++++++++++++ .../site-packages/sqlalchemy/sql/events.py | 455 ++ .../site-packages/sqlalchemy/sql/expression.py | 162 + .../site-packages/sqlalchemy/sql/functions.py | 2052 +++++ .../site-packages/sqlalchemy/sql/lambdas.py | 1449 ++++ .../site-packages/sqlalchemy/sql/naming.py | 212 + .../site-packages/sqlalchemy/sql/operators.py | 2573 +++++++ .../site-packages/sqlalchemy/sql/roles.py | 323 + .../site-packages/sqlalchemy/sql/schema.py | 6115 +++++++++++++++ .../site-packages/sqlalchemy/sql/selectable.py | 6913 +++++++++++++++++ .../site-packages/sqlalchemy/sql/sqltypes.py | 3786 ++++++++++ .../site-packages/sqlalchemy/sql/traversals.py | 1022 +++ .../site-packages/sqlalchemy/sql/type_api.py | 2303 ++++++ .../site-packages/sqlalchemy/sql/util.py | 1486 ++++ .../site-packages/sqlalchemy/sql/visitors.py | 1165 +++ 62 files changed, 57171 insertions(+) create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__init__.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/__init__.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_dml_constructors.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_elements_constructors.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_orm_types.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_py_util.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_typing.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/annotation.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/base.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/cache_key.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/coercions.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/compiler.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/crud.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/ddl.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/default_comparator.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/dml.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/elements.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/events.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/expression.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/functions.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/lambdas.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/naming.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/operators.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/roles.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/schema.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/selectable.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/sqltypes.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/traversals.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/type_api.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/util.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/visitors.cpython-311.pyc create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/_dml_constructors.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/_elements_constructors.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/_orm_types.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/_py_util.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/_selectable_constructors.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/_typing.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/annotation.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/base.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/cache_key.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/coercions.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/compiler.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/crud.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/ddl.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/default_comparator.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/dml.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/elements.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/events.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/expression.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/functions.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/lambdas.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/naming.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/operators.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/roles.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/schema.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/selectable.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/sqltypes.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/traversals.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/type_api.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/util.py create mode 100644 venv/lib/python3.11/site-packages/sqlalchemy/sql/visitors.py (limited to 'venv/lib/python3.11/site-packages/sqlalchemy/sql') diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__init__.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__init__.py new file mode 100644 index 0000000..9e0d2ca --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__init__.py @@ -0,0 +1,145 @@ +# sql/__init__.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +from typing import Any +from typing import TYPE_CHECKING + +from ._typing import ColumnExpressionArgument as ColumnExpressionArgument +from ._typing import NotNullable as NotNullable +from ._typing import Nullable as Nullable +from .base import Executable as Executable +from .compiler import COLLECT_CARTESIAN_PRODUCTS as COLLECT_CARTESIAN_PRODUCTS +from .compiler import FROM_LINTING as FROM_LINTING +from .compiler import NO_LINTING as NO_LINTING +from .compiler import WARN_LINTING as WARN_LINTING +from .ddl import BaseDDLElement as BaseDDLElement +from .ddl import DDL as DDL +from .ddl import DDLElement as DDLElement +from .ddl import ExecutableDDLElement as ExecutableDDLElement +from .expression import Alias as Alias +from .expression import alias as alias +from .expression import all_ as all_ +from .expression import and_ as and_ +from .expression import any_ as any_ +from .expression import asc as asc +from .expression import between as between +from .expression import bindparam as bindparam +from .expression import case as case +from .expression import cast as cast +from .expression import ClauseElement as ClauseElement +from .expression import collate as collate +from .expression import column as column +from .expression import ColumnCollection as ColumnCollection +from .expression import ColumnElement as ColumnElement +from .expression import CompoundSelect as CompoundSelect +from .expression import cte as cte +from .expression import Delete as Delete +from .expression import delete as delete +from .expression import desc as desc +from .expression import distinct as distinct +from .expression import except_ as except_ +from .expression import except_all as except_all +from .expression import exists as exists +from .expression import extract as extract +from .expression import false as false +from .expression import False_ as False_ +from .expression import FromClause as FromClause +from .expression import func as func +from .expression import funcfilter as funcfilter +from .expression import Insert as Insert +from .expression import insert as insert +from .expression import intersect as intersect +from .expression import intersect_all as intersect_all +from .expression import Join as Join +from .expression import join as join +from .expression import label as label +from .expression import LABEL_STYLE_DEFAULT as LABEL_STYLE_DEFAULT +from .expression import ( + LABEL_STYLE_DISAMBIGUATE_ONLY as LABEL_STYLE_DISAMBIGUATE_ONLY, +) +from .expression import LABEL_STYLE_NONE as LABEL_STYLE_NONE +from .expression import ( + LABEL_STYLE_TABLENAME_PLUS_COL as LABEL_STYLE_TABLENAME_PLUS_COL, +) +from .expression import lambda_stmt as lambda_stmt +from .expression import LambdaElement as LambdaElement +from .expression import lateral as lateral +from .expression import literal as literal +from .expression import literal_column as literal_column +from .expression import modifier as modifier +from .expression import not_ as not_ +from .expression import null as null +from .expression import nulls_first as nulls_first +from .expression import nulls_last as nulls_last +from .expression import nullsfirst as nullsfirst +from .expression import nullslast as nullslast +from .expression import or_ as or_ +from .expression import outerjoin as outerjoin +from .expression import outparam as outparam +from .expression import over as over +from .expression import quoted_name as quoted_name +from .expression import Select as Select +from .expression import select as select +from .expression import Selectable as Selectable +from .expression import SelectLabelStyle as SelectLabelStyle +from .expression import SQLColumnExpression as SQLColumnExpression +from .expression import StatementLambdaElement as StatementLambdaElement +from .expression import Subquery as Subquery +from .expression import table as table +from .expression import TableClause as TableClause +from .expression import TableSample as TableSample +from .expression import tablesample as tablesample +from .expression import text as text +from .expression import true as true +from .expression import True_ as True_ +from .expression import try_cast as try_cast +from .expression import tuple_ as tuple_ +from .expression import type_coerce as type_coerce +from .expression import union as union +from .expression import union_all as union_all +from .expression import Update as Update +from .expression import update as update +from .expression import Values as Values +from .expression import values as values +from .expression import within_group as within_group +from .visitors import ClauseVisitor as ClauseVisitor + + +def __go(lcls: Any) -> None: + from .. import util as _sa_util + + from . import base + from . import coercions + from . import elements + from . import lambdas + from . import selectable + from . import schema + from . import traversals + from . import type_api + + if not TYPE_CHECKING: + base.coercions = elements.coercions = coercions + base.elements = elements + base.type_api = type_api + coercions.elements = elements + coercions.lambdas = lambdas + coercions.schema = schema + coercions.selectable = selectable + + from .annotation import _prepare_annotations + from .annotation import Annotated + from .elements import AnnotatedColumnElement + from .elements import ClauseList + from .selectable import AnnotatedFromClause + + _prepare_annotations(ColumnElement, AnnotatedColumnElement) + _prepare_annotations(FromClause, AnnotatedFromClause) + _prepare_annotations(ClauseList, Annotated) + + _sa_util.preloaded.import_prefix("sqlalchemy.sql") + + +__go(locals()) diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..16135d0 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_dml_constructors.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_dml_constructors.cpython-311.pyc new file mode 100644 index 0000000..0525743 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_dml_constructors.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_elements_constructors.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_elements_constructors.cpython-311.pyc new file mode 100644 index 0000000..493de78 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_elements_constructors.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_orm_types.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_orm_types.cpython-311.pyc new file mode 100644 index 0000000..76e4a97 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_orm_types.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_py_util.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_py_util.cpython-311.pyc new file mode 100644 index 0000000..ab0a578 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_py_util.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-311.pyc new file mode 100644 index 0000000..29ea597 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_typing.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_typing.cpython-311.pyc new file mode 100644 index 0000000..d5f60fb Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/_typing.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/annotation.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/annotation.cpython-311.pyc new file mode 100644 index 0000000..d52f144 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/annotation.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/base.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/base.cpython-311.pyc new file mode 100644 index 0000000..efe2b16 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/base.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/cache_key.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/cache_key.cpython-311.pyc new file mode 100644 index 0000000..fa315ca Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/cache_key.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/coercions.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/coercions.cpython-311.pyc new file mode 100644 index 0000000..132dce4 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/coercions.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/compiler.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/compiler.cpython-311.pyc new file mode 100644 index 0000000..1f1a5fc Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/compiler.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/crud.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/crud.cpython-311.pyc new file mode 100644 index 0000000..021ffb3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/crud.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/ddl.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/ddl.cpython-311.pyc new file mode 100644 index 0000000..3f5a4e9 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/ddl.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/default_comparator.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/default_comparator.cpython-311.pyc new file mode 100644 index 0000000..ae92d63 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/default_comparator.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/dml.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/dml.cpython-311.pyc new file mode 100644 index 0000000..14985ca Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/dml.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/elements.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/elements.cpython-311.pyc new file mode 100644 index 0000000..3eda846 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/elements.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/events.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/events.cpython-311.pyc new file mode 100644 index 0000000..6ff8a38 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/events.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/expression.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/expression.cpython-311.pyc new file mode 100644 index 0000000..e3d2b68 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/expression.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/functions.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/functions.cpython-311.pyc new file mode 100644 index 0000000..f03311a Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/functions.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/lambdas.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/lambdas.cpython-311.pyc new file mode 100644 index 0000000..9eea091 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/lambdas.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/naming.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/naming.cpython-311.pyc new file mode 100644 index 0000000..86ebcd9 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/naming.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/operators.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/operators.cpython-311.pyc new file mode 100644 index 0000000..36ffdc3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/operators.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/roles.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/roles.cpython-311.pyc new file mode 100644 index 0000000..d3dab5d Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/roles.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/schema.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/schema.cpython-311.pyc new file mode 100644 index 0000000..ca5ea38 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/schema.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/selectable.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/selectable.cpython-311.pyc new file mode 100644 index 0000000..fea6f8f Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/selectable.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/sqltypes.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/sqltypes.cpython-311.pyc new file mode 100644 index 0000000..1214a08 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/sqltypes.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/traversals.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/traversals.cpython-311.pyc new file mode 100644 index 0000000..f1c5425 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/traversals.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/type_api.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/type_api.cpython-311.pyc new file mode 100644 index 0000000..47f73cd Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/type_api.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/util.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/util.cpython-311.pyc new file mode 100644 index 0000000..1a4cda5 Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/util.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/visitors.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/visitors.cpython-311.pyc new file mode 100644 index 0000000..63c33db Binary files /dev/null and b/venv/lib/python3.11/site-packages/sqlalchemy/sql/__pycache__/visitors.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/_dml_constructors.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/_dml_constructors.py new file mode 100644 index 0000000..a7ead52 --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/_dml_constructors.py @@ -0,0 +1,140 @@ +# sql/_dml_constructors.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from .dml import Delete +from .dml import Insert +from .dml import Update + +if TYPE_CHECKING: + from ._typing import _DMLTableArgument + + +def insert(table: _DMLTableArgument) -> Insert: + """Construct an :class:`_expression.Insert` object. + + E.g.:: + + from sqlalchemy import insert + + stmt = ( + insert(user_table). + values(name='username', fullname='Full Username') + ) + + Similar functionality is available via the + :meth:`_expression.TableClause.insert` method on + :class:`_schema.Table`. + + .. seealso:: + + :ref:`tutorial_core_insert` - in the :ref:`unified_tutorial` + + + :param table: :class:`_expression.TableClause` + which is the subject of the + insert. + + :param values: collection of values to be inserted; see + :meth:`_expression.Insert.values` + for a description of allowed formats here. + Can be omitted entirely; a :class:`_expression.Insert` construct + will also dynamically render the VALUES clause at execution time + based on the parameters passed to :meth:`_engine.Connection.execute`. + + :param inline: if True, no attempt will be made to retrieve the + SQL-generated default values to be provided within the statement; + in particular, + this allows SQL expressions to be rendered 'inline' within the + statement without the need to pre-execute them beforehand; for + backends that support "returning", this turns off the "implicit + returning" feature for the statement. + + If both :paramref:`_expression.insert.values` and compile-time bind + parameters are present, the compile-time bind parameters override the + information specified within :paramref:`_expression.insert.values` on a + per-key basis. + + The keys within :paramref:`_expression.Insert.values` can be either + :class:`~sqlalchemy.schema.Column` objects or their string + identifiers. Each key may reference one of: + + * a literal data value (i.e. string, number, etc.); + * a Column object; + * a SELECT statement. + + If a ``SELECT`` statement is specified which references this + ``INSERT`` statement's table, the statement will be correlated + against the ``INSERT`` statement. + + .. seealso:: + + :ref:`tutorial_core_insert` - in the :ref:`unified_tutorial` + + """ + return Insert(table) + + +def update(table: _DMLTableArgument) -> Update: + r"""Construct an :class:`_expression.Update` object. + + E.g.:: + + from sqlalchemy import update + + stmt = ( + update(user_table). + where(user_table.c.id == 5). + values(name='user #5') + ) + + Similar functionality is available via the + :meth:`_expression.TableClause.update` method on + :class:`_schema.Table`. + + :param table: A :class:`_schema.Table` + object representing the database + table to be updated. + + + .. seealso:: + + :ref:`tutorial_core_update_delete` - in the :ref:`unified_tutorial` + + + """ + return Update(table) + + +def delete(table: _DMLTableArgument) -> Delete: + r"""Construct :class:`_expression.Delete` object. + + E.g.:: + + from sqlalchemy import delete + + stmt = ( + delete(user_table). + where(user_table.c.id == 5) + ) + + Similar functionality is available via the + :meth:`_expression.TableClause.delete` method on + :class:`_schema.Table`. + + :param table: The table to delete rows from. + + .. seealso:: + + :ref:`tutorial_core_update_delete` - in the :ref:`unified_tutorial` + + + """ + return Delete(table) diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/_elements_constructors.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/_elements_constructors.py new file mode 100644 index 0000000..77cc2a8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/_elements_constructors.py @@ -0,0 +1,1840 @@ +# sql/_elements_constructors.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import typing +from typing import Any +from typing import Callable +from typing import Mapping +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple as typing_Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import coercions +from . import roles +from .base import _NoArg +from .coercions import _document_text_coercion +from .elements import BindParameter +from .elements import BooleanClauseList +from .elements import Case +from .elements import Cast +from .elements import CollationClause +from .elements import CollectionAggregate +from .elements import ColumnClause +from .elements import ColumnElement +from .elements import Extract +from .elements import False_ +from .elements import FunctionFilter +from .elements import Label +from .elements import Null +from .elements import Over +from .elements import TextClause +from .elements import True_ +from .elements import TryCast +from .elements import Tuple +from .elements import TypeCoerce +from .elements import UnaryExpression +from .elements import WithinGroup +from .functions import FunctionElement +from ..util.typing import Literal + +if typing.TYPE_CHECKING: + from ._typing import _ByArgument + from ._typing import _ColumnExpressionArgument + from ._typing import _ColumnExpressionOrLiteralArgument + from ._typing import _ColumnExpressionOrStrLabelArgument + from ._typing import _TypeEngineArgument + from .elements import BinaryExpression + from .selectable import FromClause + from .type_api import TypeEngine + +_T = TypeVar("_T") + + +def all_(expr: _ColumnExpressionArgument[_T]) -> CollectionAggregate[bool]: + """Produce an ALL expression. + + For dialects such as that of PostgreSQL, this operator applies + to usage of the :class:`_types.ARRAY` datatype, for that of + MySQL, it may apply to a subquery. e.g.:: + + # renders on PostgreSQL: + # '5 = ALL (somearray)' + expr = 5 == all_(mytable.c.somearray) + + # renders on MySQL: + # '5 = ALL (SELECT value FROM table)' + expr = 5 == all_(select(table.c.value)) + + Comparison to NULL may work using ``None``:: + + None == all_(mytable.c.somearray) + + The any_() / all_() operators also feature a special "operand flipping" + behavior such that if any_() / all_() are used on the left side of a + comparison using a standalone operator such as ``==``, ``!=``, etc. + (not including operator methods such as + :meth:`_sql.ColumnOperators.is_`) the rendered expression is flipped:: + + # would render '5 = ALL (column)` + all_(mytable.c.column) == 5 + + Or with ``None``, which note will not perform + the usual step of rendering "IS" as is normally the case for NULL:: + + # would render 'NULL = ALL(somearray)' + all_(mytable.c.somearray) == None + + .. versionchanged:: 1.4.26 repaired the use of any_() / all_() + comparing to NULL on the right side to be flipped to the left. + + The column-level :meth:`_sql.ColumnElement.all_` method (not to be + confused with :class:`_types.ARRAY` level + :meth:`_types.ARRAY.Comparator.all`) is shorthand for + ``all_(col)``:: + + 5 == mytable.c.somearray.all_() + + .. seealso:: + + :meth:`_sql.ColumnOperators.all_` + + :func:`_expression.any_` + + """ + return CollectionAggregate._create_all(expr) + + +def and_( # type: ignore[empty-body] + initial_clause: Union[Literal[True], _ColumnExpressionArgument[bool]], + *clauses: _ColumnExpressionArgument[bool], +) -> ColumnElement[bool]: + r"""Produce a conjunction of expressions joined by ``AND``. + + E.g.:: + + from sqlalchemy import and_ + + stmt = select(users_table).where( + and_( + users_table.c.name == 'wendy', + users_table.c.enrolled == True + ) + ) + + The :func:`.and_` conjunction is also available using the + Python ``&`` operator (though note that compound expressions + need to be parenthesized in order to function with Python + operator precedence behavior):: + + stmt = select(users_table).where( + (users_table.c.name == 'wendy') & + (users_table.c.enrolled == True) + ) + + The :func:`.and_` operation is also implicit in some cases; + the :meth:`_expression.Select.where` + method for example can be invoked multiple + times against a statement, which will have the effect of each + clause being combined using :func:`.and_`:: + + stmt = select(users_table).\ + where(users_table.c.name == 'wendy').\ + where(users_table.c.enrolled == True) + + The :func:`.and_` construct must be given at least one positional + argument in order to be valid; a :func:`.and_` construct with no + arguments is ambiguous. To produce an "empty" or dynamically + generated :func:`.and_` expression, from a given list of expressions, + a "default" element of :func:`_sql.true` (or just ``True``) should be + specified:: + + from sqlalchemy import true + criteria = and_(true(), *expressions) + + The above expression will compile to SQL as the expression ``true`` + or ``1 = 1``, depending on backend, if no other expressions are + present. If expressions are present, then the :func:`_sql.true` value is + ignored as it does not affect the outcome of an AND expression that + has other elements. + + .. deprecated:: 1.4 The :func:`.and_` element now requires that at + least one argument is passed; creating the :func:`.and_` construct + with no arguments is deprecated, and will emit a deprecation warning + while continuing to produce a blank SQL string. + + .. seealso:: + + :func:`.or_` + + """ + ... + + +if not TYPE_CHECKING: + # handle deprecated case which allows zero-arguments + def and_(*clauses): # noqa: F811 + r"""Produce a conjunction of expressions joined by ``AND``. + + E.g.:: + + from sqlalchemy import and_ + + stmt = select(users_table).where( + and_( + users_table.c.name == 'wendy', + users_table.c.enrolled == True + ) + ) + + The :func:`.and_` conjunction is also available using the + Python ``&`` operator (though note that compound expressions + need to be parenthesized in order to function with Python + operator precedence behavior):: + + stmt = select(users_table).where( + (users_table.c.name == 'wendy') & + (users_table.c.enrolled == True) + ) + + The :func:`.and_` operation is also implicit in some cases; + the :meth:`_expression.Select.where` + method for example can be invoked multiple + times against a statement, which will have the effect of each + clause being combined using :func:`.and_`:: + + stmt = select(users_table).\ + where(users_table.c.name == 'wendy').\ + where(users_table.c.enrolled == True) + + The :func:`.and_` construct must be given at least one positional + argument in order to be valid; a :func:`.and_` construct with no + arguments is ambiguous. To produce an "empty" or dynamically + generated :func:`.and_` expression, from a given list of expressions, + a "default" element of :func:`_sql.true` (or just ``True``) should be + specified:: + + from sqlalchemy import true + criteria = and_(true(), *expressions) + + The above expression will compile to SQL as the expression ``true`` + or ``1 = 1``, depending on backend, if no other expressions are + present. If expressions are present, then the :func:`_sql.true` value + is ignored as it does not affect the outcome of an AND expression that + has other elements. + + .. deprecated:: 1.4 The :func:`.and_` element now requires that at + least one argument is passed; creating the :func:`.and_` construct + with no arguments is deprecated, and will emit a deprecation warning + while continuing to produce a blank SQL string. + + .. seealso:: + + :func:`.or_` + + """ + return BooleanClauseList.and_(*clauses) + + +def any_(expr: _ColumnExpressionArgument[_T]) -> CollectionAggregate[bool]: + """Produce an ANY expression. + + For dialects such as that of PostgreSQL, this operator applies + to usage of the :class:`_types.ARRAY` datatype, for that of + MySQL, it may apply to a subquery. e.g.:: + + # renders on PostgreSQL: + # '5 = ANY (somearray)' + expr = 5 == any_(mytable.c.somearray) + + # renders on MySQL: + # '5 = ANY (SELECT value FROM table)' + expr = 5 == any_(select(table.c.value)) + + Comparison to NULL may work using ``None`` or :func:`_sql.null`:: + + None == any_(mytable.c.somearray) + + The any_() / all_() operators also feature a special "operand flipping" + behavior such that if any_() / all_() are used on the left side of a + comparison using a standalone operator such as ``==``, ``!=``, etc. + (not including operator methods such as + :meth:`_sql.ColumnOperators.is_`) the rendered expression is flipped:: + + # would render '5 = ANY (column)` + any_(mytable.c.column) == 5 + + Or with ``None``, which note will not perform + the usual step of rendering "IS" as is normally the case for NULL:: + + # would render 'NULL = ANY(somearray)' + any_(mytable.c.somearray) == None + + .. versionchanged:: 1.4.26 repaired the use of any_() / all_() + comparing to NULL on the right side to be flipped to the left. + + The column-level :meth:`_sql.ColumnElement.any_` method (not to be + confused with :class:`_types.ARRAY` level + :meth:`_types.ARRAY.Comparator.any`) is shorthand for + ``any_(col)``:: + + 5 = mytable.c.somearray.any_() + + .. seealso:: + + :meth:`_sql.ColumnOperators.any_` + + :func:`_expression.all_` + + """ + return CollectionAggregate._create_any(expr) + + +def asc( + column: _ColumnExpressionOrStrLabelArgument[_T], +) -> UnaryExpression[_T]: + """Produce an ascending ``ORDER BY`` clause element. + + e.g.:: + + from sqlalchemy import asc + stmt = select(users_table).order_by(asc(users_table.c.name)) + + will produce SQL as:: + + SELECT id, name FROM user ORDER BY name ASC + + The :func:`.asc` function is a standalone version of the + :meth:`_expression.ColumnElement.asc` + method available on all SQL expressions, + e.g.:: + + + stmt = select(users_table).order_by(users_table.c.name.asc()) + + :param column: A :class:`_expression.ColumnElement` (e.g. + scalar SQL expression) + with which to apply the :func:`.asc` operation. + + .. seealso:: + + :func:`.desc` + + :func:`.nulls_first` + + :func:`.nulls_last` + + :meth:`_expression.Select.order_by` + + """ + return UnaryExpression._create_asc(column) + + +def collate( + expression: _ColumnExpressionArgument[str], collation: str +) -> BinaryExpression[str]: + """Return the clause ``expression COLLATE collation``. + + e.g.:: + + collate(mycolumn, 'utf8_bin') + + produces:: + + mycolumn COLLATE utf8_bin + + The collation expression is also quoted if it is a case sensitive + identifier, e.g. contains uppercase characters. + + .. versionchanged:: 1.2 quoting is automatically applied to COLLATE + expressions if they are case sensitive. + + """ + return CollationClause._create_collation_expression(expression, collation) + + +def between( + expr: _ColumnExpressionOrLiteralArgument[_T], + lower_bound: Any, + upper_bound: Any, + symmetric: bool = False, +) -> BinaryExpression[bool]: + """Produce a ``BETWEEN`` predicate clause. + + E.g.:: + + from sqlalchemy import between + stmt = select(users_table).where(between(users_table.c.id, 5, 7)) + + Would produce SQL resembling:: + + SELECT id, name FROM user WHERE id BETWEEN :id_1 AND :id_2 + + The :func:`.between` function is a standalone version of the + :meth:`_expression.ColumnElement.between` method available on all + SQL expressions, as in:: + + stmt = select(users_table).where(users_table.c.id.between(5, 7)) + + All arguments passed to :func:`.between`, including the left side + column expression, are coerced from Python scalar values if a + the value is not a :class:`_expression.ColumnElement` subclass. + For example, + three fixed values can be compared as in:: + + print(between(5, 3, 7)) + + Which would produce:: + + :param_1 BETWEEN :param_2 AND :param_3 + + :param expr: a column expression, typically a + :class:`_expression.ColumnElement` + instance or alternatively a Python scalar expression to be coerced + into a column expression, serving as the left side of the ``BETWEEN`` + expression. + + :param lower_bound: a column or Python scalar expression serving as the + lower bound of the right side of the ``BETWEEN`` expression. + + :param upper_bound: a column or Python scalar expression serving as the + upper bound of the right side of the ``BETWEEN`` expression. + + :param symmetric: if True, will render " BETWEEN SYMMETRIC ". Note + that not all databases support this syntax. + + .. seealso:: + + :meth:`_expression.ColumnElement.between` + + """ + col_expr = coercions.expect(roles.ExpressionElementRole, expr) + return col_expr.between(lower_bound, upper_bound, symmetric=symmetric) + + +def outparam( + key: str, type_: Optional[TypeEngine[_T]] = None +) -> BindParameter[_T]: + """Create an 'OUT' parameter for usage in functions (stored procedures), + for databases which support them. + + The ``outparam`` can be used like a regular function parameter. + The "output" value will be available from the + :class:`~sqlalchemy.engine.CursorResult` object via its ``out_parameters`` + attribute, which returns a dictionary containing the values. + + """ + return BindParameter(key, None, type_=type_, unique=False, isoutparam=True) + + +@overload +def not_(clause: BinaryExpression[_T]) -> BinaryExpression[_T]: ... + + +@overload +def not_(clause: _ColumnExpressionArgument[_T]) -> ColumnElement[_T]: ... + + +def not_(clause: _ColumnExpressionArgument[_T]) -> ColumnElement[_T]: + """Return a negation of the given clause, i.e. ``NOT(clause)``. + + The ``~`` operator is also overloaded on all + :class:`_expression.ColumnElement` subclasses to produce the + same result. + + """ + + return coercions.expect(roles.ExpressionElementRole, clause).__invert__() + + +def bindparam( + key: Optional[str], + value: Any = _NoArg.NO_ARG, + type_: Optional[_TypeEngineArgument[_T]] = None, + unique: bool = False, + required: Union[bool, Literal[_NoArg.NO_ARG]] = _NoArg.NO_ARG, + quote: Optional[bool] = None, + callable_: Optional[Callable[[], Any]] = None, + expanding: bool = False, + isoutparam: bool = False, + literal_execute: bool = False, +) -> BindParameter[_T]: + r"""Produce a "bound expression". + + The return value is an instance of :class:`.BindParameter`; this + is a :class:`_expression.ColumnElement` + subclass which represents a so-called + "placeholder" value in a SQL expression, the value of which is + supplied at the point at which the statement in executed against a + database connection. + + In SQLAlchemy, the :func:`.bindparam` construct has + the ability to carry along the actual value that will be ultimately + used at expression time. In this way, it serves not just as + a "placeholder" for eventual population, but also as a means of + representing so-called "unsafe" values which should not be rendered + directly in a SQL statement, but rather should be passed along + to the :term:`DBAPI` as values which need to be correctly escaped + and potentially handled for type-safety. + + When using :func:`.bindparam` explicitly, the use case is typically + one of traditional deferment of parameters; the :func:`.bindparam` + construct accepts a name which can then be referred to at execution + time:: + + from sqlalchemy import bindparam + + stmt = select(users_table).where( + users_table.c.name == bindparam("username") + ) + + The above statement, when rendered, will produce SQL similar to:: + + SELECT id, name FROM user WHERE name = :username + + In order to populate the value of ``:username`` above, the value + would typically be applied at execution time to a method + like :meth:`_engine.Connection.execute`:: + + result = connection.execute(stmt, {"username": "wendy"}) + + Explicit use of :func:`.bindparam` is also common when producing + UPDATE or DELETE statements that are to be invoked multiple times, + where the WHERE criterion of the statement is to change on each + invocation, such as:: + + stmt = ( + users_table.update() + .where(user_table.c.name == bindparam("username")) + .values(fullname=bindparam("fullname")) + ) + + connection.execute( + stmt, + [ + {"username": "wendy", "fullname": "Wendy Smith"}, + {"username": "jack", "fullname": "Jack Jones"}, + ], + ) + + SQLAlchemy's Core expression system makes wide use of + :func:`.bindparam` in an implicit sense. It is typical that Python + literal values passed to virtually all SQL expression functions are + coerced into fixed :func:`.bindparam` constructs. For example, given + a comparison operation such as:: + + expr = users_table.c.name == 'Wendy' + + The above expression will produce a :class:`.BinaryExpression` + construct, where the left side is the :class:`_schema.Column` object + representing the ``name`` column, and the right side is a + :class:`.BindParameter` representing the literal value:: + + print(repr(expr.right)) + BindParameter('%(4327771088 name)s', 'Wendy', type_=String()) + + The expression above will render SQL such as:: + + user.name = :name_1 + + Where the ``:name_1`` parameter name is an anonymous name. The + actual string ``Wendy`` is not in the rendered string, but is carried + along where it is later used within statement execution. If we + invoke a statement like the following:: + + stmt = select(users_table).where(users_table.c.name == 'Wendy') + result = connection.execute(stmt) + + We would see SQL logging output as:: + + SELECT "user".id, "user".name + FROM "user" + WHERE "user".name = %(name_1)s + {'name_1': 'Wendy'} + + Above, we see that ``Wendy`` is passed as a parameter to the database, + while the placeholder ``:name_1`` is rendered in the appropriate form + for the target database, in this case the PostgreSQL database. + + Similarly, :func:`.bindparam` is invoked automatically when working + with :term:`CRUD` statements as far as the "VALUES" portion is + concerned. The :func:`_expression.insert` construct produces an + ``INSERT`` expression which will, at statement execution time, generate + bound placeholders based on the arguments passed, as in:: + + stmt = users_table.insert() + result = connection.execute(stmt, {"name": "Wendy"}) + + The above will produce SQL output as:: + + INSERT INTO "user" (name) VALUES (%(name)s) + {'name': 'Wendy'} + + The :class:`_expression.Insert` construct, at + compilation/execution time, rendered a single :func:`.bindparam` + mirroring the column name ``name`` as a result of the single ``name`` + parameter we passed to the :meth:`_engine.Connection.execute` method. + + :param key: + the key (e.g. the name) for this bind param. + Will be used in the generated + SQL statement for dialects that use named parameters. This + value may be modified when part of a compilation operation, + if other :class:`BindParameter` objects exist with the same + key, or if its length is too long and truncation is + required. + + If omitted, an "anonymous" name is generated for the bound parameter; + when given a value to bind, the end result is equivalent to calling upon + the :func:`.literal` function with a value to bind, particularly + if the :paramref:`.bindparam.unique` parameter is also provided. + + :param value: + Initial value for this bind param. Will be used at statement + execution time as the value for this parameter passed to the + DBAPI, if no other value is indicated to the statement execution + method for this particular parameter name. Defaults to ``None``. + + :param callable\_: + A callable function that takes the place of "value". The function + will be called at statement execution time to determine the + ultimate value. Used for scenarios where the actual bind + value cannot be determined at the point at which the clause + construct is created, but embedded bind values are still desirable. + + :param type\_: + A :class:`.TypeEngine` class or instance representing an optional + datatype for this :func:`.bindparam`. If not passed, a type + may be determined automatically for the bind, based on the given + value; for example, trivial Python types such as ``str``, + ``int``, ``bool`` + may result in the :class:`.String`, :class:`.Integer` or + :class:`.Boolean` types being automatically selected. + + The type of a :func:`.bindparam` is significant especially in that + the type will apply pre-processing to the value before it is + passed to the database. For example, a :func:`.bindparam` which + refers to a datetime value, and is specified as holding the + :class:`.DateTime` type, may apply conversion needed to the + value (such as stringification on SQLite) before passing the value + to the database. + + :param unique: + if True, the key name of this :class:`.BindParameter` will be + modified if another :class:`.BindParameter` of the same name + already has been located within the containing + expression. This flag is used generally by the internals + when producing so-called "anonymous" bound expressions, it + isn't generally applicable to explicitly-named :func:`.bindparam` + constructs. + + :param required: + If ``True``, a value is required at execution time. If not passed, + it defaults to ``True`` if neither :paramref:`.bindparam.value` + or :paramref:`.bindparam.callable` were passed. If either of these + parameters are present, then :paramref:`.bindparam.required` + defaults to ``False``. + + :param quote: + True if this parameter name requires quoting and is not + currently known as a SQLAlchemy reserved word; this currently + only applies to the Oracle backend, where bound names must + sometimes be quoted. + + :param isoutparam: + if True, the parameter should be treated like a stored procedure + "OUT" parameter. This applies to backends such as Oracle which + support OUT parameters. + + :param expanding: + if True, this parameter will be treated as an "expanding" parameter + at execution time; the parameter value is expected to be a sequence, + rather than a scalar value, and the string SQL statement will + be transformed on a per-execution basis to accommodate the sequence + with a variable number of parameter slots passed to the DBAPI. + This is to allow statement caching to be used in conjunction with + an IN clause. + + .. seealso:: + + :meth:`.ColumnOperators.in_` + + :ref:`baked_in` - with baked queries + + .. note:: The "expanding" feature does not support "executemany"- + style parameter sets. + + .. versionadded:: 1.2 + + .. versionchanged:: 1.3 the "expanding" bound parameter feature now + supports empty lists. + + :param literal_execute: + if True, the bound parameter will be rendered in the compile phase + with a special "POSTCOMPILE" token, and the SQLAlchemy compiler will + render the final value of the parameter into the SQL statement at + statement execution time, omitting the value from the parameter + dictionary / list passed to DBAPI ``cursor.execute()``. This + produces a similar effect as that of using the ``literal_binds``, + compilation flag, however takes place as the statement is sent to + the DBAPI ``cursor.execute()`` method, rather than when the statement + is compiled. The primary use of this + capability is for rendering LIMIT / OFFSET clauses for database + drivers that can't accommodate for bound parameters in these + contexts, while allowing SQL constructs to be cacheable at the + compilation level. + + .. versionadded:: 1.4 Added "post compile" bound parameters + + .. seealso:: + + :ref:`change_4808`. + + .. seealso:: + + :ref:`tutorial_sending_parameters` - in the + :ref:`unified_tutorial` + + + """ + return BindParameter( + key, + value, + type_, + unique, + required, + quote, + callable_, + expanding, + isoutparam, + literal_execute, + ) + + +def case( + *whens: Union[ + typing_Tuple[_ColumnExpressionArgument[bool], Any], Mapping[Any, Any] + ], + value: Optional[Any] = None, + else_: Optional[Any] = None, +) -> Case[Any]: + r"""Produce a ``CASE`` expression. + + The ``CASE`` construct in SQL is a conditional object that + acts somewhat analogously to an "if/then" construct in other + languages. It returns an instance of :class:`.Case`. + + :func:`.case` in its usual form is passed a series of "when" + constructs, that is, a list of conditions and results as tuples:: + + from sqlalchemy import case + + stmt = select(users_table).\ + where( + case( + (users_table.c.name == 'wendy', 'W'), + (users_table.c.name == 'jack', 'J'), + else_='E' + ) + ) + + The above statement will produce SQL resembling:: + + SELECT id, name FROM user + WHERE CASE + WHEN (name = :name_1) THEN :param_1 + WHEN (name = :name_2) THEN :param_2 + ELSE :param_3 + END + + When simple equality expressions of several values against a single + parent column are needed, :func:`.case` also has a "shorthand" format + used via the + :paramref:`.case.value` parameter, which is passed a column + expression to be compared. In this form, the :paramref:`.case.whens` + parameter is passed as a dictionary containing expressions to be + compared against keyed to result expressions. The statement below is + equivalent to the preceding statement:: + + stmt = select(users_table).\ + where( + case( + {"wendy": "W", "jack": "J"}, + value=users_table.c.name, + else_='E' + ) + ) + + The values which are accepted as result values in + :paramref:`.case.whens` as well as with :paramref:`.case.else_` are + coerced from Python literals into :func:`.bindparam` constructs. + SQL expressions, e.g. :class:`_expression.ColumnElement` constructs, + are accepted + as well. To coerce a literal string expression into a constant + expression rendered inline, use the :func:`_expression.literal_column` + construct, + as in:: + + from sqlalchemy import case, literal_column + + case( + ( + orderline.c.qty > 100, + literal_column("'greaterthan100'") + ), + ( + orderline.c.qty > 10, + literal_column("'greaterthan10'") + ), + else_=literal_column("'lessthan10'") + ) + + The above will render the given constants without using bound + parameters for the result values (but still for the comparison + values), as in:: + + CASE + WHEN (orderline.qty > :qty_1) THEN 'greaterthan100' + WHEN (orderline.qty > :qty_2) THEN 'greaterthan10' + ELSE 'lessthan10' + END + + :param \*whens: The criteria to be compared against, + :paramref:`.case.whens` accepts two different forms, based on + whether or not :paramref:`.case.value` is used. + + .. versionchanged:: 1.4 the :func:`_sql.case` + function now accepts the series of WHEN conditions positionally + + In the first form, it accepts multiple 2-tuples passed as positional + arguments; each 2-tuple consists of ``(, )``, + where the SQL expression is a boolean expression and "value" is a + resulting value, e.g.:: + + case( + (users_table.c.name == 'wendy', 'W'), + (users_table.c.name == 'jack', 'J') + ) + + In the second form, it accepts a Python dictionary of comparison + values mapped to a resulting value; this form requires + :paramref:`.case.value` to be present, and values will be compared + using the ``==`` operator, e.g.:: + + case( + {"wendy": "W", "jack": "J"}, + value=users_table.c.name + ) + + :param value: An optional SQL expression which will be used as a + fixed "comparison point" for candidate values within a dictionary + passed to :paramref:`.case.whens`. + + :param else\_: An optional SQL expression which will be the evaluated + result of the ``CASE`` construct if all expressions within + :paramref:`.case.whens` evaluate to false. When omitted, most + databases will produce a result of NULL if none of the "when" + expressions evaluate to true. + + + """ + return Case(*whens, value=value, else_=else_) + + +def cast( + expression: _ColumnExpressionOrLiteralArgument[Any], + type_: _TypeEngineArgument[_T], +) -> Cast[_T]: + r"""Produce a ``CAST`` expression. + + :func:`.cast` returns an instance of :class:`.Cast`. + + E.g.:: + + from sqlalchemy import cast, Numeric + + stmt = select(cast(product_table.c.unit_price, Numeric(10, 4))) + + The above statement will produce SQL resembling:: + + SELECT CAST(unit_price AS NUMERIC(10, 4)) FROM product + + The :func:`.cast` function performs two distinct functions when + used. The first is that it renders the ``CAST`` expression within + the resulting SQL string. The second is that it associates the given + type (e.g. :class:`.TypeEngine` class or instance) with the column + expression on the Python side, which means the expression will take + on the expression operator behavior associated with that type, + as well as the bound-value handling and result-row-handling behavior + of the type. + + An alternative to :func:`.cast` is the :func:`.type_coerce` function. + This function performs the second task of associating an expression + with a specific type, but does not render the ``CAST`` expression + in SQL. + + :param expression: A SQL expression, such as a + :class:`_expression.ColumnElement` + expression or a Python string which will be coerced into a bound + literal value. + + :param type\_: A :class:`.TypeEngine` class or instance indicating + the type to which the ``CAST`` should apply. + + .. seealso:: + + :ref:`tutorial_casts` + + :func:`.try_cast` - an alternative to CAST that results in + NULLs when the cast fails, instead of raising an error. + Only supported by some dialects. + + :func:`.type_coerce` - an alternative to CAST that coerces the type + on the Python side only, which is often sufficient to generate the + correct SQL and data coercion. + + + """ + return Cast(expression, type_) + + +def try_cast( + expression: _ColumnExpressionOrLiteralArgument[Any], + type_: _TypeEngineArgument[_T], +) -> TryCast[_T]: + """Produce a ``TRY_CAST`` expression for backends which support it; + this is a ``CAST`` which returns NULL for un-castable conversions. + + In SQLAlchemy, this construct is supported **only** by the SQL Server + dialect, and will raise a :class:`.CompileError` if used on other + included backends. However, third party backends may also support + this construct. + + .. tip:: As :func:`_sql.try_cast` originates from the SQL Server dialect, + it's importable both from ``sqlalchemy.`` as well as from + ``sqlalchemy.dialects.mssql``. + + :func:`_sql.try_cast` returns an instance of :class:`.TryCast` and + generally behaves similarly to the :class:`.Cast` construct; + at the SQL level, the difference between ``CAST`` and ``TRY_CAST`` + is that ``TRY_CAST`` returns NULL for an un-castable expression, + such as attempting to cast a string ``"hi"`` to an integer value. + + E.g.:: + + from sqlalchemy import select, try_cast, Numeric + + stmt = select( + try_cast(product_table.c.unit_price, Numeric(10, 4)) + ) + + The above would render on Microsoft SQL Server as:: + + SELECT TRY_CAST (product_table.unit_price AS NUMERIC(10, 4)) + FROM product_table + + .. versionadded:: 2.0.14 :func:`.try_cast` has been + generalized from the SQL Server dialect into a general use + construct that may be supported by additional dialects. + + """ + return TryCast(expression, type_) + + +def column( + text: str, + type_: Optional[_TypeEngineArgument[_T]] = None, + is_literal: bool = False, + _selectable: Optional[FromClause] = None, +) -> ColumnClause[_T]: + """Produce a :class:`.ColumnClause` object. + + The :class:`.ColumnClause` is a lightweight analogue to the + :class:`_schema.Column` class. The :func:`_expression.column` + function can + be invoked with just a name alone, as in:: + + from sqlalchemy import column + + id, name = column("id"), column("name") + stmt = select(id, name).select_from("user") + + The above statement would produce SQL like:: + + SELECT id, name FROM user + + Once constructed, :func:`_expression.column` + may be used like any other SQL + expression element such as within :func:`_expression.select` + constructs:: + + from sqlalchemy.sql import column + + id, name = column("id"), column("name") + stmt = select(id, name).select_from("user") + + The text handled by :func:`_expression.column` + is assumed to be handled + like the name of a database column; if the string contains mixed case, + special characters, or matches a known reserved word on the target + backend, the column expression will render using the quoting + behavior determined by the backend. To produce a textual SQL + expression that is rendered exactly without any quoting, + use :func:`_expression.literal_column` instead, + or pass ``True`` as the + value of :paramref:`_expression.column.is_literal`. Additionally, + full SQL + statements are best handled using the :func:`_expression.text` + construct. + + :func:`_expression.column` can be used in a table-like + fashion by combining it with the :func:`.table` function + (which is the lightweight analogue to :class:`_schema.Table` + ) to produce + a working table construct with minimal boilerplate:: + + from sqlalchemy import table, column, select + + user = table("user", + column("id"), + column("name"), + column("description"), + ) + + stmt = select(user.c.description).where(user.c.name == 'wendy') + + A :func:`_expression.column` / :func:`.table` + construct like that illustrated + above can be created in an + ad-hoc fashion and is not associated with any + :class:`_schema.MetaData`, DDL, or events, unlike its + :class:`_schema.Table` counterpart. + + :param text: the text of the element. + + :param type: :class:`_types.TypeEngine` object which can associate + this :class:`.ColumnClause` with a type. + + :param is_literal: if True, the :class:`.ColumnClause` is assumed to + be an exact expression that will be delivered to the output with no + quoting rules applied regardless of case sensitive settings. the + :func:`_expression.literal_column()` function essentially invokes + :func:`_expression.column` while passing ``is_literal=True``. + + .. seealso:: + + :class:`_schema.Column` + + :func:`_expression.literal_column` + + :func:`.table` + + :func:`_expression.text` + + :ref:`tutorial_select_arbitrary_text` + + """ + return ColumnClause(text, type_, is_literal, _selectable) + + +def desc( + column: _ColumnExpressionOrStrLabelArgument[_T], +) -> UnaryExpression[_T]: + """Produce a descending ``ORDER BY`` clause element. + + e.g.:: + + from sqlalchemy import desc + + stmt = select(users_table).order_by(desc(users_table.c.name)) + + will produce SQL as:: + + SELECT id, name FROM user ORDER BY name DESC + + The :func:`.desc` function is a standalone version of the + :meth:`_expression.ColumnElement.desc` + method available on all SQL expressions, + e.g.:: + + + stmt = select(users_table).order_by(users_table.c.name.desc()) + + :param column: A :class:`_expression.ColumnElement` (e.g. + scalar SQL expression) + with which to apply the :func:`.desc` operation. + + .. seealso:: + + :func:`.asc` + + :func:`.nulls_first` + + :func:`.nulls_last` + + :meth:`_expression.Select.order_by` + + """ + return UnaryExpression._create_desc(column) + + +def distinct(expr: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: + """Produce an column-expression-level unary ``DISTINCT`` clause. + + This applies the ``DISTINCT`` keyword to an individual column + expression, and is typically contained within an aggregate function, + as in:: + + from sqlalchemy import distinct, func + stmt = select(func.count(distinct(users_table.c.name))) + + The above would produce an expression resembling:: + + SELECT COUNT(DISTINCT name) FROM user + + The :func:`.distinct` function is also available as a column-level + method, e.g. :meth:`_expression.ColumnElement.distinct`, as in:: + + stmt = select(func.count(users_table.c.name.distinct())) + + The :func:`.distinct` operator is different from the + :meth:`_expression.Select.distinct` method of + :class:`_expression.Select`, + which produces a ``SELECT`` statement + with ``DISTINCT`` applied to the result set as a whole, + e.g. a ``SELECT DISTINCT`` expression. See that method for further + information. + + .. seealso:: + + :meth:`_expression.ColumnElement.distinct` + + :meth:`_expression.Select.distinct` + + :data:`.func` + + """ + return UnaryExpression._create_distinct(expr) + + +def bitwise_not(expr: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: + """Produce a unary bitwise NOT clause, typically via the ``~`` operator. + + Not to be confused with boolean negation :func:`_sql.not_`. + + .. versionadded:: 2.0.2 + + .. seealso:: + + :ref:`operators_bitwise` + + + """ + + return UnaryExpression._create_bitwise_not(expr) + + +def extract(field: str, expr: _ColumnExpressionArgument[Any]) -> Extract: + """Return a :class:`.Extract` construct. + + This is typically available as :func:`.extract` + as well as ``func.extract`` from the + :data:`.func` namespace. + + :param field: The field to extract. + + :param expr: A column or Python scalar expression serving as the + right side of the ``EXTRACT`` expression. + + E.g.:: + + from sqlalchemy import extract + from sqlalchemy import table, column + + logged_table = table("user", + column("id"), + column("date_created"), + ) + + stmt = select(logged_table.c.id).where( + extract("YEAR", logged_table.c.date_created) == 2021 + ) + + In the above example, the statement is used to select ids from the + database where the ``YEAR`` component matches a specific value. + + Similarly, one can also select an extracted component:: + + stmt = select( + extract("YEAR", logged_table.c.date_created) + ).where(logged_table.c.id == 1) + + The implementation of ``EXTRACT`` may vary across database backends. + Users are reminded to consult their database documentation. + """ + return Extract(field, expr) + + +def false() -> False_: + """Return a :class:`.False_` construct. + + E.g.: + + .. sourcecode:: pycon+sql + + >>> from sqlalchemy import false + >>> print(select(t.c.x).where(false())) + {printsql}SELECT x FROM t WHERE false + + A backend which does not support true/false constants will render as + an expression against 1 or 0: + + .. sourcecode:: pycon+sql + + >>> print(select(t.c.x).where(false())) + {printsql}SELECT x FROM t WHERE 0 = 1 + + The :func:`.true` and :func:`.false` constants also feature + "short circuit" operation within an :func:`.and_` or :func:`.or_` + conjunction: + + .. sourcecode:: pycon+sql + + >>> print(select(t.c.x).where(or_(t.c.x > 5, true()))) + {printsql}SELECT x FROM t WHERE true{stop} + + >>> print(select(t.c.x).where(and_(t.c.x > 5, false()))) + {printsql}SELECT x FROM t WHERE false{stop} + + .. seealso:: + + :func:`.true` + + """ + + return False_._instance() + + +def funcfilter( + func: FunctionElement[_T], *criterion: _ColumnExpressionArgument[bool] +) -> FunctionFilter[_T]: + """Produce a :class:`.FunctionFilter` object against a function. + + Used against aggregate and window functions, + for database backends that support the "FILTER" clause. + + E.g.:: + + from sqlalchemy import funcfilter + funcfilter(func.count(1), MyClass.name == 'some name') + + Would produce "COUNT(1) FILTER (WHERE myclass.name = 'some name')". + + This function is also available from the :data:`~.expression.func` + construct itself via the :meth:`.FunctionElement.filter` method. + + .. seealso:: + + :ref:`tutorial_functions_within_group` - in the + :ref:`unified_tutorial` + + :meth:`.FunctionElement.filter` + + """ + return FunctionFilter(func, *criterion) + + +def label( + name: str, + element: _ColumnExpressionArgument[_T], + type_: Optional[_TypeEngineArgument[_T]] = None, +) -> Label[_T]: + """Return a :class:`Label` object for the + given :class:`_expression.ColumnElement`. + + A label changes the name of an element in the columns clause of a + ``SELECT`` statement, typically via the ``AS`` SQL keyword. + + This functionality is more conveniently available via the + :meth:`_expression.ColumnElement.label` method on + :class:`_expression.ColumnElement`. + + :param name: label name + + :param obj: a :class:`_expression.ColumnElement`. + + """ + return Label(name, element, type_) + + +def null() -> Null: + """Return a constant :class:`.Null` construct.""" + + return Null._instance() + + +def nulls_first(column: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: + """Produce the ``NULLS FIRST`` modifier for an ``ORDER BY`` expression. + + :func:`.nulls_first` is intended to modify the expression produced + by :func:`.asc` or :func:`.desc`, and indicates how NULL values + should be handled when they are encountered during ordering:: + + + from sqlalchemy import desc, nulls_first + + stmt = select(users_table).order_by( + nulls_first(desc(users_table.c.name))) + + The SQL expression from the above would resemble:: + + SELECT id, name FROM user ORDER BY name DESC NULLS FIRST + + Like :func:`.asc` and :func:`.desc`, :func:`.nulls_first` is typically + invoked from the column expression itself using + :meth:`_expression.ColumnElement.nulls_first`, + rather than as its standalone + function version, as in:: + + stmt = select(users_table).order_by( + users_table.c.name.desc().nulls_first()) + + .. versionchanged:: 1.4 :func:`.nulls_first` is renamed from + :func:`.nullsfirst` in previous releases. + The previous name remains available for backwards compatibility. + + .. seealso:: + + :func:`.asc` + + :func:`.desc` + + :func:`.nulls_last` + + :meth:`_expression.Select.order_by` + + """ + return UnaryExpression._create_nulls_first(column) + + +def nulls_last(column: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: + """Produce the ``NULLS LAST`` modifier for an ``ORDER BY`` expression. + + :func:`.nulls_last` is intended to modify the expression produced + by :func:`.asc` or :func:`.desc`, and indicates how NULL values + should be handled when they are encountered during ordering:: + + + from sqlalchemy import desc, nulls_last + + stmt = select(users_table).order_by( + nulls_last(desc(users_table.c.name))) + + The SQL expression from the above would resemble:: + + SELECT id, name FROM user ORDER BY name DESC NULLS LAST + + Like :func:`.asc` and :func:`.desc`, :func:`.nulls_last` is typically + invoked from the column expression itself using + :meth:`_expression.ColumnElement.nulls_last`, + rather than as its standalone + function version, as in:: + + stmt = select(users_table).order_by( + users_table.c.name.desc().nulls_last()) + + .. versionchanged:: 1.4 :func:`.nulls_last` is renamed from + :func:`.nullslast` in previous releases. + The previous name remains available for backwards compatibility. + + .. seealso:: + + :func:`.asc` + + :func:`.desc` + + :func:`.nulls_first` + + :meth:`_expression.Select.order_by` + + """ + return UnaryExpression._create_nulls_last(column) + + +def or_( # type: ignore[empty-body] + initial_clause: Union[Literal[False], _ColumnExpressionArgument[bool]], + *clauses: _ColumnExpressionArgument[bool], +) -> ColumnElement[bool]: + """Produce a conjunction of expressions joined by ``OR``. + + E.g.:: + + from sqlalchemy import or_ + + stmt = select(users_table).where( + or_( + users_table.c.name == 'wendy', + users_table.c.name == 'jack' + ) + ) + + The :func:`.or_` conjunction is also available using the + Python ``|`` operator (though note that compound expressions + need to be parenthesized in order to function with Python + operator precedence behavior):: + + stmt = select(users_table).where( + (users_table.c.name == 'wendy') | + (users_table.c.name == 'jack') + ) + + The :func:`.or_` construct must be given at least one positional + argument in order to be valid; a :func:`.or_` construct with no + arguments is ambiguous. To produce an "empty" or dynamically + generated :func:`.or_` expression, from a given list of expressions, + a "default" element of :func:`_sql.false` (or just ``False``) should be + specified:: + + from sqlalchemy import false + or_criteria = or_(false(), *expressions) + + The above expression will compile to SQL as the expression ``false`` + or ``0 = 1``, depending on backend, if no other expressions are + present. If expressions are present, then the :func:`_sql.false` value is + ignored as it does not affect the outcome of an OR expression which + has other elements. + + .. deprecated:: 1.4 The :func:`.or_` element now requires that at + least one argument is passed; creating the :func:`.or_` construct + with no arguments is deprecated, and will emit a deprecation warning + while continuing to produce a blank SQL string. + + .. seealso:: + + :func:`.and_` + + """ + ... + + +if not TYPE_CHECKING: + # handle deprecated case which allows zero-arguments + def or_(*clauses): # noqa: F811 + """Produce a conjunction of expressions joined by ``OR``. + + E.g.:: + + from sqlalchemy import or_ + + stmt = select(users_table).where( + or_( + users_table.c.name == 'wendy', + users_table.c.name == 'jack' + ) + ) + + The :func:`.or_` conjunction is also available using the + Python ``|`` operator (though note that compound expressions + need to be parenthesized in order to function with Python + operator precedence behavior):: + + stmt = select(users_table).where( + (users_table.c.name == 'wendy') | + (users_table.c.name == 'jack') + ) + + The :func:`.or_` construct must be given at least one positional + argument in order to be valid; a :func:`.or_` construct with no + arguments is ambiguous. To produce an "empty" or dynamically + generated :func:`.or_` expression, from a given list of expressions, + a "default" element of :func:`_sql.false` (or just ``False``) should be + specified:: + + from sqlalchemy import false + or_criteria = or_(false(), *expressions) + + The above expression will compile to SQL as the expression ``false`` + or ``0 = 1``, depending on backend, if no other expressions are + present. If expressions are present, then the :func:`_sql.false` value + is ignored as it does not affect the outcome of an OR expression which + has other elements. + + .. deprecated:: 1.4 The :func:`.or_` element now requires that at + least one argument is passed; creating the :func:`.or_` construct + with no arguments is deprecated, and will emit a deprecation warning + while continuing to produce a blank SQL string. + + .. seealso:: + + :func:`.and_` + + """ + return BooleanClauseList.or_(*clauses) + + +def over( + element: FunctionElement[_T], + partition_by: Optional[_ByArgument] = None, + order_by: Optional[_ByArgument] = None, + range_: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, + rows: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, +) -> Over[_T]: + r"""Produce an :class:`.Over` object against a function. + + Used against aggregate or so-called "window" functions, + for database backends that support window functions. + + :func:`_expression.over` is usually called using + the :meth:`.FunctionElement.over` method, e.g.:: + + func.row_number().over(order_by=mytable.c.some_column) + + Would produce:: + + ROW_NUMBER() OVER(ORDER BY some_column) + + Ranges are also possible using the :paramref:`.expression.over.range_` + and :paramref:`.expression.over.rows` parameters. These + mutually-exclusive parameters each accept a 2-tuple, which contains + a combination of integers and None:: + + func.row_number().over( + order_by=my_table.c.some_column, range_=(None, 0)) + + The above would produce:: + + ROW_NUMBER() OVER(ORDER BY some_column + RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) + + A value of ``None`` indicates "unbounded", a + value of zero indicates "current row", and negative / positive + integers indicate "preceding" and "following": + + * RANGE BETWEEN 5 PRECEDING AND 10 FOLLOWING:: + + func.row_number().over(order_by='x', range_=(-5, 10)) + + * ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW:: + + func.row_number().over(order_by='x', rows=(None, 0)) + + * RANGE BETWEEN 2 PRECEDING AND UNBOUNDED FOLLOWING:: + + func.row_number().over(order_by='x', range_=(-2, None)) + + * RANGE BETWEEN 1 FOLLOWING AND 3 FOLLOWING:: + + func.row_number().over(order_by='x', range_=(1, 3)) + + :param element: a :class:`.FunctionElement`, :class:`.WithinGroup`, + or other compatible construct. + :param partition_by: a column element or string, or a list + of such, that will be used as the PARTITION BY clause + of the OVER construct. + :param order_by: a column element or string, or a list + of such, that will be used as the ORDER BY clause + of the OVER construct. + :param range\_: optional range clause for the window. This is a + tuple value which can contain integer values or ``None``, + and will render a RANGE BETWEEN PRECEDING / FOLLOWING clause. + + :param rows: optional rows clause for the window. This is a tuple + value which can contain integer values or None, and will render + a ROWS BETWEEN PRECEDING / FOLLOWING clause. + + This function is also available from the :data:`~.expression.func` + construct itself via the :meth:`.FunctionElement.over` method. + + .. seealso:: + + :ref:`tutorial_window_functions` - in the :ref:`unified_tutorial` + + :data:`.expression.func` + + :func:`_expression.within_group` + + """ + return Over(element, partition_by, order_by, range_, rows) + + +@_document_text_coercion("text", ":func:`.text`", ":paramref:`.text.text`") +def text(text: str) -> TextClause: + r"""Construct a new :class:`_expression.TextClause` clause, + representing + a textual SQL string directly. + + E.g.:: + + from sqlalchemy import text + + t = text("SELECT * FROM users") + result = connection.execute(t) + + The advantages :func:`_expression.text` + provides over a plain string are + backend-neutral support for bind parameters, per-statement + execution options, as well as + bind parameter and result-column typing behavior, allowing + SQLAlchemy type constructs to play a role when executing + a statement that is specified literally. The construct can also + be provided with a ``.c`` collection of column elements, allowing + it to be embedded in other SQL expression constructs as a subquery. + + Bind parameters are specified by name, using the format ``:name``. + E.g.:: + + t = text("SELECT * FROM users WHERE id=:user_id") + result = connection.execute(t, {"user_id": 12}) + + For SQL statements where a colon is required verbatim, as within + an inline string, use a backslash to escape:: + + t = text(r"SELECT * FROM users WHERE name='\:username'") + + The :class:`_expression.TextClause` + construct includes methods which can + provide information about the bound parameters as well as the column + values which would be returned from the textual statement, assuming + it's an executable SELECT type of statement. The + :meth:`_expression.TextClause.bindparams` + method is used to provide bound + parameter detail, and :meth:`_expression.TextClause.columns` + method allows + specification of return columns including names and types:: + + t = text("SELECT * FROM users WHERE id=:user_id").\ + bindparams(user_id=7).\ + columns(id=Integer, name=String) + + for id, name in connection.execute(t): + print(id, name) + + The :func:`_expression.text` construct is used in cases when + a literal string SQL fragment is specified as part of a larger query, + such as for the WHERE clause of a SELECT statement:: + + s = select(users.c.id, users.c.name).where(text("id=:user_id")) + result = connection.execute(s, {"user_id": 12}) + + :func:`_expression.text` is also used for the construction + of a full, standalone statement using plain text. + As such, SQLAlchemy refers + to it as an :class:`.Executable` object and may be used + like any other statement passed to an ``.execute()`` method. + + :param text: + the text of the SQL statement to be created. Use ``:`` + to specify bind parameters; they will be compiled to their + engine-specific format. + + .. seealso:: + + :ref:`tutorial_select_arbitrary_text` + + """ + return TextClause(text) + + +def true() -> True_: + """Return a constant :class:`.True_` construct. + + E.g.: + + .. sourcecode:: pycon+sql + + >>> from sqlalchemy import true + >>> print(select(t.c.x).where(true())) + {printsql}SELECT x FROM t WHERE true + + A backend which does not support true/false constants will render as + an expression against 1 or 0: + + .. sourcecode:: pycon+sql + + >>> print(select(t.c.x).where(true())) + {printsql}SELECT x FROM t WHERE 1 = 1 + + The :func:`.true` and :func:`.false` constants also feature + "short circuit" operation within an :func:`.and_` or :func:`.or_` + conjunction: + + .. sourcecode:: pycon+sql + + >>> print(select(t.c.x).where(or_(t.c.x > 5, true()))) + {printsql}SELECT x FROM t WHERE true{stop} + + >>> print(select(t.c.x).where(and_(t.c.x > 5, false()))) + {printsql}SELECT x FROM t WHERE false{stop} + + .. seealso:: + + :func:`.false` + + """ + + return True_._instance() + + +def tuple_( + *clauses: _ColumnExpressionArgument[Any], + types: Optional[Sequence[_TypeEngineArgument[Any]]] = None, +) -> Tuple: + """Return a :class:`.Tuple`. + + Main usage is to produce a composite IN construct using + :meth:`.ColumnOperators.in_` :: + + from sqlalchemy import tuple_ + + tuple_(table.c.col1, table.c.col2).in_( + [(1, 2), (5, 12), (10, 19)] + ) + + .. versionchanged:: 1.3.6 Added support for SQLite IN tuples. + + .. warning:: + + The composite IN construct is not supported by all backends, and is + currently known to work on PostgreSQL, MySQL, and SQLite. + Unsupported backends will raise a subclass of + :class:`~sqlalchemy.exc.DBAPIError` when such an expression is + invoked. + + """ + return Tuple(*clauses, types=types) + + +def type_coerce( + expression: _ColumnExpressionOrLiteralArgument[Any], + type_: _TypeEngineArgument[_T], +) -> TypeCoerce[_T]: + r"""Associate a SQL expression with a particular type, without rendering + ``CAST``. + + E.g.:: + + from sqlalchemy import type_coerce + + stmt = select(type_coerce(log_table.date_string, StringDateTime())) + + The above construct will produce a :class:`.TypeCoerce` object, which + does not modify the rendering in any way on the SQL side, with the + possible exception of a generated label if used in a columns clause + context: + + .. sourcecode:: sql + + SELECT date_string AS date_string FROM log + + When result rows are fetched, the ``StringDateTime`` type processor + will be applied to result rows on behalf of the ``date_string`` column. + + .. note:: the :func:`.type_coerce` construct does not render any + SQL syntax of its own, including that it does not imply + parenthesization. Please use :meth:`.TypeCoerce.self_group` + if explicit parenthesization is required. + + In order to provide a named label for the expression, use + :meth:`_expression.ColumnElement.label`:: + + stmt = select( + type_coerce(log_table.date_string, StringDateTime()).label('date') + ) + + + A type that features bound-value handling will also have that behavior + take effect when literal values or :func:`.bindparam` constructs are + passed to :func:`.type_coerce` as targets. + For example, if a type implements the + :meth:`.TypeEngine.bind_expression` + method or :meth:`.TypeEngine.bind_processor` method or equivalent, + these functions will take effect at statement compilation/execution + time when a literal value is passed, as in:: + + # bound-value handling of MyStringType will be applied to the + # literal value "some string" + stmt = select(type_coerce("some string", MyStringType)) + + When using :func:`.type_coerce` with composed expressions, note that + **parenthesis are not applied**. If :func:`.type_coerce` is being + used in an operator context where the parenthesis normally present from + CAST are necessary, use the :meth:`.TypeCoerce.self_group` method: + + .. sourcecode:: pycon+sql + + >>> some_integer = column("someint", Integer) + >>> some_string = column("somestr", String) + >>> expr = type_coerce(some_integer + 5, String) + some_string + >>> print(expr) + {printsql}someint + :someint_1 || somestr{stop} + >>> expr = type_coerce(some_integer + 5, String).self_group() + some_string + >>> print(expr) + {printsql}(someint + :someint_1) || somestr{stop} + + :param expression: A SQL expression, such as a + :class:`_expression.ColumnElement` + expression or a Python string which will be coerced into a bound + literal value. + + :param type\_: A :class:`.TypeEngine` class or instance indicating + the type to which the expression is coerced. + + .. seealso:: + + :ref:`tutorial_casts` + + :func:`.cast` + + """ # noqa + return TypeCoerce(expression, type_) + + +def within_group( + element: FunctionElement[_T], *order_by: _ColumnExpressionArgument[Any] +) -> WithinGroup[_T]: + r"""Produce a :class:`.WithinGroup` object against a function. + + Used against so-called "ordered set aggregate" and "hypothetical + set aggregate" functions, including :class:`.percentile_cont`, + :class:`.rank`, :class:`.dense_rank`, etc. + + :func:`_expression.within_group` is usually called using + the :meth:`.FunctionElement.within_group` method, e.g.:: + + from sqlalchemy import within_group + stmt = select( + department.c.id, + func.percentile_cont(0.5).within_group( + department.c.salary.desc() + ) + ) + + The above statement would produce SQL similar to + ``SELECT department.id, percentile_cont(0.5) + WITHIN GROUP (ORDER BY department.salary DESC)``. + + :param element: a :class:`.FunctionElement` construct, typically + generated by :data:`~.expression.func`. + :param \*order_by: one or more column elements that will be used + as the ORDER BY clause of the WITHIN GROUP construct. + + .. seealso:: + + :ref:`tutorial_functions_within_group` - in the + :ref:`unified_tutorial` + + :data:`.expression.func` + + :func:`_expression.over` + + """ + return WithinGroup(element, *order_by) diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/_orm_types.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/_orm_types.py new file mode 100644 index 0000000..bccb533 --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/_orm_types.py @@ -0,0 +1,20 @@ +# sql/_orm_types.py +# Copyright (C) 2022-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""ORM types that need to present specifically for **documentation only** of +the Executable.execution_options() method, which includes options that +are meaningful to the ORM. + +""" + + +from __future__ import annotations + +from ..util.typing import Literal + +SynchronizeSessionArgument = Literal[False, "auto", "evaluate", "fetch"] +DMLStrategyArgument = Literal["bulk", "raw", "orm", "auto"] diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/_py_util.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/_py_util.py new file mode 100644 index 0000000..df372bf --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/_py_util.py @@ -0,0 +1,75 @@ +# sql/_py_util.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import typing +from typing import Any +from typing import Dict +from typing import Tuple +from typing import Union + +from ..util.typing import Literal + +if typing.TYPE_CHECKING: + from .cache_key import CacheConst + + +class prefix_anon_map(Dict[str, str]): + """A map that creates new keys for missing key access. + + Considers keys of the form " " to produce + new symbols "_", where "index" is an incrementing integer + corresponding to . + + Inlines the approach taken by :class:`sqlalchemy.util.PopulateDict` which + is otherwise usually used for this type of operation. + + """ + + def __missing__(self, key: str) -> str: + (ident, derived) = key.split(" ", 1) + anonymous_counter = self.get(derived, 1) + self[derived] = anonymous_counter + 1 # type: ignore + value = f"{derived}_{anonymous_counter}" + self[key] = value + return value + + +class cache_anon_map( + Dict[Union[int, "Literal[CacheConst.NO_CACHE]"], Union[Literal[True], str]] +): + """A map that creates new keys for missing key access. + + Produces an incrementing sequence given a series of unique keys. + + This is similar to the compiler prefix_anon_map class although simpler. + + Inlines the approach taken by :class:`sqlalchemy.util.PopulateDict` which + is otherwise usually used for this type of operation. + + """ + + _index = 0 + + def get_anon(self, object_: Any) -> Tuple[str, bool]: + idself = id(object_) + if idself in self: + s_val = self[idself] + assert s_val is not True + return s_val, True + else: + # inline of __missing__ + self[idself] = id_ = str(self._index) + self._index += 1 + + return id_, False + + def __missing__(self, key: int) -> str: + self[key] = val = str(self._index) + self._index += 1 + return val diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/_selectable_constructors.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/_selectable_constructors.py new file mode 100644 index 0000000..c2b5008 --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/_selectable_constructors.py @@ -0,0 +1,635 @@ +# sql/_selectable_constructors.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +from typing import Any +from typing import Optional +from typing import overload +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import coercions +from . import roles +from ._typing import _ColumnsClauseArgument +from ._typing import _no_kw +from .elements import ColumnClause +from .selectable import Alias +from .selectable import CompoundSelect +from .selectable import Exists +from .selectable import FromClause +from .selectable import Join +from .selectable import Lateral +from .selectable import LateralFromClause +from .selectable import NamedFromClause +from .selectable import Select +from .selectable import TableClause +from .selectable import TableSample +from .selectable import Values + +if TYPE_CHECKING: + from ._typing import _FromClauseArgument + from ._typing import _OnClauseArgument + from ._typing import _SelectStatementForCompoundArgument + from ._typing import _T0 + from ._typing import _T1 + from ._typing import _T2 + from ._typing import _T3 + from ._typing import _T4 + from ._typing import _T5 + from ._typing import _T6 + from ._typing import _T7 + from ._typing import _T8 + from ._typing import _T9 + from ._typing import _TypedColumnClauseArgument as _TCCA + from .functions import Function + from .selectable import CTE + from .selectable import HasCTE + from .selectable import ScalarSelect + from .selectable import SelectBase + + +_T = TypeVar("_T", bound=Any) + + +def alias( + selectable: FromClause, name: Optional[str] = None, flat: bool = False +) -> NamedFromClause: + """Return a named alias of the given :class:`.FromClause`. + + For :class:`.Table` and :class:`.Join` objects, the return type is the + :class:`_expression.Alias` object. Other kinds of :class:`.NamedFromClause` + objects may be returned for other kinds of :class:`.FromClause` objects. + + The named alias represents any :class:`_expression.FromClause` with an + alternate name assigned within SQL, typically using the ``AS`` clause when + generated, e.g. ``SELECT * FROM table AS aliasname``. + + Equivalent functionality is available via the + :meth:`_expression.FromClause.alias` + method available on all :class:`_expression.FromClause` objects. + + :param selectable: any :class:`_expression.FromClause` subclass, + such as a table, select statement, etc. + + :param name: string name to be assigned as the alias. + If ``None``, a name will be deterministically generated at compile + time. Deterministic means the name is guaranteed to be unique against + other constructs used in the same statement, and will also be the same + name for each successive compilation of the same statement object. + + :param flat: Will be passed through to if the given selectable + is an instance of :class:`_expression.Join` - see + :meth:`_expression.Join.alias` for details. + + """ + return Alias._factory(selectable, name=name, flat=flat) + + +def cte( + selectable: HasCTE, name: Optional[str] = None, recursive: bool = False +) -> CTE: + r"""Return a new :class:`_expression.CTE`, + or Common Table Expression instance. + + Please see :meth:`_expression.HasCTE.cte` for detail on CTE usage. + + """ + return coercions.expect(roles.HasCTERole, selectable).cte( + name=name, recursive=recursive + ) + + +def except_( + *selects: _SelectStatementForCompoundArgument, +) -> CompoundSelect: + r"""Return an ``EXCEPT`` of multiple selectables. + + The returned object is an instance of + :class:`_expression.CompoundSelect`. + + :param \*selects: + a list of :class:`_expression.Select` instances. + + """ + return CompoundSelect._create_except(*selects) + + +def except_all( + *selects: _SelectStatementForCompoundArgument, +) -> CompoundSelect: + r"""Return an ``EXCEPT ALL`` of multiple selectables. + + The returned object is an instance of + :class:`_expression.CompoundSelect`. + + :param \*selects: + a list of :class:`_expression.Select` instances. + + """ + return CompoundSelect._create_except_all(*selects) + + +def exists( + __argument: Optional[ + Union[_ColumnsClauseArgument[Any], SelectBase, ScalarSelect[Any]] + ] = None, +) -> Exists: + """Construct a new :class:`_expression.Exists` construct. + + The :func:`_sql.exists` can be invoked by itself to produce an + :class:`_sql.Exists` construct, which will accept simple WHERE + criteria:: + + exists_criteria = exists().where(table1.c.col1 == table2.c.col2) + + However, for greater flexibility in constructing the SELECT, an + existing :class:`_sql.Select` construct may be converted to an + :class:`_sql.Exists`, most conveniently by making use of the + :meth:`_sql.SelectBase.exists` method:: + + exists_criteria = ( + select(table2.c.col2). + where(table1.c.col1 == table2.c.col2). + exists() + ) + + The EXISTS criteria is then used inside of an enclosing SELECT:: + + stmt = select(table1.c.col1).where(exists_criteria) + + The above statement will then be of the form:: + + SELECT col1 FROM table1 WHERE EXISTS + (SELECT table2.col2 FROM table2 WHERE table2.col2 = table1.col1) + + .. seealso:: + + :ref:`tutorial_exists` - in the :term:`2.0 style` tutorial. + + :meth:`_sql.SelectBase.exists` - method to transform a ``SELECT`` to an + ``EXISTS`` clause. + + """ # noqa: E501 + + return Exists(__argument) + + +def intersect( + *selects: _SelectStatementForCompoundArgument, +) -> CompoundSelect: + r"""Return an ``INTERSECT`` of multiple selectables. + + The returned object is an instance of + :class:`_expression.CompoundSelect`. + + :param \*selects: + a list of :class:`_expression.Select` instances. + + """ + return CompoundSelect._create_intersect(*selects) + + +def intersect_all( + *selects: _SelectStatementForCompoundArgument, +) -> CompoundSelect: + r"""Return an ``INTERSECT ALL`` of multiple selectables. + + The returned object is an instance of + :class:`_expression.CompoundSelect`. + + :param \*selects: + a list of :class:`_expression.Select` instances. + + + """ + return CompoundSelect._create_intersect_all(*selects) + + +def join( + left: _FromClauseArgument, + right: _FromClauseArgument, + onclause: Optional[_OnClauseArgument] = None, + isouter: bool = False, + full: bool = False, +) -> Join: + """Produce a :class:`_expression.Join` object, given two + :class:`_expression.FromClause` + expressions. + + E.g.:: + + j = join(user_table, address_table, + user_table.c.id == address_table.c.user_id) + stmt = select(user_table).select_from(j) + + would emit SQL along the lines of:: + + SELECT user.id, user.name FROM user + JOIN address ON user.id = address.user_id + + Similar functionality is available given any + :class:`_expression.FromClause` object (e.g. such as a + :class:`_schema.Table`) using + the :meth:`_expression.FromClause.join` method. + + :param left: The left side of the join. + + :param right: the right side of the join; this is any + :class:`_expression.FromClause` object such as a + :class:`_schema.Table` object, and + may also be a selectable-compatible object such as an ORM-mapped + class. + + :param onclause: a SQL expression representing the ON clause of the + join. If left at ``None``, :meth:`_expression.FromClause.join` + will attempt to + join the two tables based on a foreign key relationship. + + :param isouter: if True, render a LEFT OUTER JOIN, instead of JOIN. + + :param full: if True, render a FULL OUTER JOIN, instead of JOIN. + + .. seealso:: + + :meth:`_expression.FromClause.join` - method form, + based on a given left side. + + :class:`_expression.Join` - the type of object produced. + + """ + + return Join(left, right, onclause, isouter, full) + + +def lateral( + selectable: Union[SelectBase, _FromClauseArgument], + name: Optional[str] = None, +) -> LateralFromClause: + """Return a :class:`_expression.Lateral` object. + + :class:`_expression.Lateral` is an :class:`_expression.Alias` + subclass that represents + a subquery with the LATERAL keyword applied to it. + + The special behavior of a LATERAL subquery is that it appears in the + FROM clause of an enclosing SELECT, but may correlate to other + FROM clauses of that SELECT. It is a special case of subquery + only supported by a small number of backends, currently more recent + PostgreSQL versions. + + .. seealso:: + + :ref:`tutorial_lateral_correlation` - overview of usage. + + """ + return Lateral._factory(selectable, name=name) + + +def outerjoin( + left: _FromClauseArgument, + right: _FromClauseArgument, + onclause: Optional[_OnClauseArgument] = None, + full: bool = False, +) -> Join: + """Return an ``OUTER JOIN`` clause element. + + The returned object is an instance of :class:`_expression.Join`. + + Similar functionality is also available via the + :meth:`_expression.FromClause.outerjoin` method on any + :class:`_expression.FromClause`. + + :param left: The left side of the join. + + :param right: The right side of the join. + + :param onclause: Optional criterion for the ``ON`` clause, is + derived from foreign key relationships established between + left and right otherwise. + + To chain joins together, use the :meth:`_expression.FromClause.join` + or + :meth:`_expression.FromClause.outerjoin` methods on the resulting + :class:`_expression.Join` object. + + """ + return Join(left, right, onclause, isouter=True, full=full) + + +# START OVERLOADED FUNCTIONS select Select 1-10 + +# code within this block is **programmatically, +# statically generated** by tools/generate_tuple_map_overloads.py + + +@overload +def select(__ent0: _TCCA[_T0]) -> Select[Tuple[_T0]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] +) -> Select[Tuple[_T0, _T1]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] +) -> Select[Tuple[_T0, _T1, _T2]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], +) -> Select[Tuple[_T0, _T1, _T2, _T3]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], + __ent8: _TCCA[_T8], +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8]]: ... + + +@overload +def select( + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], + __ent8: _TCCA[_T8], + __ent9: _TCCA[_T9], +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8, _T9]]: ... + + +# END OVERLOADED FUNCTIONS select + + +@overload +def select( + *entities: _ColumnsClauseArgument[Any], **__kw: Any +) -> Select[Any]: ... + + +def select(*entities: _ColumnsClauseArgument[Any], **__kw: Any) -> Select[Any]: + r"""Construct a new :class:`_expression.Select`. + + + .. versionadded:: 1.4 - The :func:`_sql.select` function now accepts + column arguments positionally. The top-level :func:`_sql.select` + function will automatically use the 1.x or 2.x style API based on + the incoming arguments; using :func:`_sql.select` from the + ``sqlalchemy.future`` module will enforce that only the 2.x style + constructor is used. + + Similar functionality is also available via the + :meth:`_expression.FromClause.select` method on any + :class:`_expression.FromClause`. + + .. seealso:: + + :ref:`tutorial_selecting_data` - in the :ref:`unified_tutorial` + + :param \*entities: + Entities to SELECT from. For Core usage, this is typically a series + of :class:`_expression.ColumnElement` and / or + :class:`_expression.FromClause` + objects which will form the columns clause of the resulting + statement. For those objects that are instances of + :class:`_expression.FromClause` (typically :class:`_schema.Table` + or :class:`_expression.Alias` + objects), the :attr:`_expression.FromClause.c` + collection is extracted + to form a collection of :class:`_expression.ColumnElement` objects. + + This parameter will also accept :class:`_expression.TextClause` + constructs as + given, as well as ORM-mapped classes. + + """ + # the keyword args are a necessary element in order for the typing + # to work out w/ the varargs vs. having named "keyword" arguments that + # aren't always present. + if __kw: + raise _no_kw() + return Select(*entities) + + +def table(name: str, *columns: ColumnClause[Any], **kw: Any) -> TableClause: + """Produce a new :class:`_expression.TableClause`. + + The object returned is an instance of + :class:`_expression.TableClause`, which + represents the "syntactical" portion of the schema-level + :class:`_schema.Table` object. + It may be used to construct lightweight table constructs. + + :param name: Name of the table. + + :param columns: A collection of :func:`_expression.column` constructs. + + :param schema: The schema name for this table. + + .. versionadded:: 1.3.18 :func:`_expression.table` can now + accept a ``schema`` argument. + """ + + return TableClause(name, *columns, **kw) + + +def tablesample( + selectable: _FromClauseArgument, + sampling: Union[float, Function[Any]], + name: Optional[str] = None, + seed: Optional[roles.ExpressionElementRole[Any]] = None, +) -> TableSample: + """Return a :class:`_expression.TableSample` object. + + :class:`_expression.TableSample` is an :class:`_expression.Alias` + subclass that represents + a table with the TABLESAMPLE clause applied to it. + :func:`_expression.tablesample` + is also available from the :class:`_expression.FromClause` + class via the + :meth:`_expression.FromClause.tablesample` method. + + The TABLESAMPLE clause allows selecting a randomly selected approximate + percentage of rows from a table. It supports multiple sampling methods, + most commonly BERNOULLI and SYSTEM. + + e.g.:: + + from sqlalchemy import func + + selectable = people.tablesample( + func.bernoulli(1), + name='alias', + seed=func.random()) + stmt = select(selectable.c.people_id) + + Assuming ``people`` with a column ``people_id``, the above + statement would render as:: + + SELECT alias.people_id FROM + people AS alias TABLESAMPLE bernoulli(:bernoulli_1) + REPEATABLE (random()) + + :param sampling: a ``float`` percentage between 0 and 100 or + :class:`_functions.Function`. + + :param name: optional alias name + + :param seed: any real-valued SQL expression. When specified, the + REPEATABLE sub-clause is also rendered. + + """ + return TableSample._factory(selectable, sampling, name=name, seed=seed) + + +def union( + *selects: _SelectStatementForCompoundArgument, +) -> CompoundSelect: + r"""Return a ``UNION`` of multiple selectables. + + The returned object is an instance of + :class:`_expression.CompoundSelect`. + + A similar :func:`union()` method is available on all + :class:`_expression.FromClause` subclasses. + + :param \*selects: + a list of :class:`_expression.Select` instances. + + :param \**kwargs: + available keyword arguments are the same as those of + :func:`select`. + + """ + return CompoundSelect._create_union(*selects) + + +def union_all( + *selects: _SelectStatementForCompoundArgument, +) -> CompoundSelect: + r"""Return a ``UNION ALL`` of multiple selectables. + + The returned object is an instance of + :class:`_expression.CompoundSelect`. + + A similar :func:`union_all()` method is available on all + :class:`_expression.FromClause` subclasses. + + :param \*selects: + a list of :class:`_expression.Select` instances. + + """ + return CompoundSelect._create_union_all(*selects) + + +def values( + *columns: ColumnClause[Any], + name: Optional[str] = None, + literal_binds: bool = False, +) -> Values: + r"""Construct a :class:`_expression.Values` construct. + + The column expressions and the actual data for + :class:`_expression.Values` are given in two separate steps. The + constructor receives the column expressions typically as + :func:`_expression.column` constructs, + and the data is then passed via the + :meth:`_expression.Values.data` method as a list, + which can be called multiple + times to add more data, e.g.:: + + from sqlalchemy import column + from sqlalchemy import values + + value_expr = values( + column('id', Integer), + column('name', String), + name="my_values" + ).data( + [(1, 'name1'), (2, 'name2'), (3, 'name3')] + ) + + :param \*columns: column expressions, typically composed using + :func:`_expression.column` objects. + + :param name: the name for this VALUES construct. If omitted, the + VALUES construct will be unnamed in a SQL expression. Different + backends may have different requirements here. + + :param literal_binds: Defaults to False. Whether or not to render + the data values inline in the SQL output, rather than using bound + parameters. + + """ + return Values(*columns, literal_binds=literal_binds, name=name) diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/_typing.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/_typing.py new file mode 100644 index 0000000..c861bae --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/_typing.py @@ -0,0 +1,457 @@ +# sql/_typing.py +# Copyright (C) 2022-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import operator +from typing import Any +from typing import Callable +from typing import Dict +from typing import Generic +from typing import Iterable +from typing import Mapping +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import roles +from .. import exc +from .. import util +from ..inspection import Inspectable +from ..util.typing import Literal +from ..util.typing import Protocol +from ..util.typing import TypeAlias + +if TYPE_CHECKING: + from datetime import date + from datetime import datetime + from datetime import time + from datetime import timedelta + from decimal import Decimal + from uuid import UUID + + from .base import Executable + from .compiler import Compiled + from .compiler import DDLCompiler + from .compiler import SQLCompiler + from .dml import UpdateBase + from .dml import ValuesBase + from .elements import ClauseElement + from .elements import ColumnElement + from .elements import KeyedColumnElement + from .elements import quoted_name + from .elements import SQLCoreOperations + from .elements import TextClause + from .lambdas import LambdaElement + from .roles import FromClauseRole + from .schema import Column + from .selectable import Alias + from .selectable import CTE + from .selectable import FromClause + from .selectable import Join + from .selectable import NamedFromClause + from .selectable import ReturnsRows + from .selectable import Select + from .selectable import Selectable + from .selectable import SelectBase + from .selectable import Subquery + from .selectable import TableClause + from .sqltypes import TableValueType + from .sqltypes import TupleType + from .type_api import TypeEngine + from ..engine import Dialect + from ..util.typing import TypeGuard + +_T = TypeVar("_T", bound=Any) +_T_co = TypeVar("_T_co", bound=Any, covariant=True) + + +_CE = TypeVar("_CE", bound="ColumnElement[Any]") + +_CLE = TypeVar("_CLE", bound="ClauseElement") + + +class _HasClauseElement(Protocol, Generic[_T_co]): + """indicates a class that has a __clause_element__() method""" + + def __clause_element__(self) -> roles.ExpressionElementRole[_T_co]: ... + + +class _CoreAdapterProto(Protocol): + """protocol for the ClauseAdapter/ColumnAdapter.traverse() method.""" + + def __call__(self, obj: _CE) -> _CE: ... + + +class _HasDialect(Protocol): + """protocol for Engine/Connection-like objects that have dialect + attribute. + """ + + @property + def dialect(self) -> Dialect: ... + + +# match column types that are not ORM entities +_NOT_ENTITY = TypeVar( + "_NOT_ENTITY", + int, + str, + bool, + "datetime", + "date", + "time", + "timedelta", + "UUID", + float, + "Decimal", +) + +_MAYBE_ENTITY = TypeVar( + "_MAYBE_ENTITY", + roles.ColumnsClauseRole, + Literal["*", 1], + Type[Any], + Inspectable[_HasClauseElement[Any]], + _HasClauseElement[Any], +) + + +# convention: +# XYZArgument - something that the end user is passing to a public API method +# XYZElement - the internal representation that we use for the thing. +# the coercions system is responsible for converting from XYZArgument to +# XYZElement. + +_TextCoercedExpressionArgument = Union[ + str, + "TextClause", + "ColumnElement[_T]", + _HasClauseElement[_T], + roles.ExpressionElementRole[_T], +] + +_ColumnsClauseArgument = Union[ + roles.TypedColumnsClauseRole[_T], + roles.ColumnsClauseRole, + "SQLCoreOperations[_T]", + Literal["*", 1], + Type[_T], + Inspectable[_HasClauseElement[_T]], + _HasClauseElement[_T], +] +"""open-ended SELECT columns clause argument. + +Includes column expressions, tables, ORM mapped entities, a few literal values. + +This type is used for lists of columns / entities to be returned in result +sets; select(...), insert().returning(...), etc. + + +""" + +_TypedColumnClauseArgument = Union[ + roles.TypedColumnsClauseRole[_T], + "SQLCoreOperations[_T]", + Type[_T], +] + +_TP = TypeVar("_TP", bound=Tuple[Any, ...]) + +_T0 = TypeVar("_T0", bound=Any) +_T1 = TypeVar("_T1", bound=Any) +_T2 = TypeVar("_T2", bound=Any) +_T3 = TypeVar("_T3", bound=Any) +_T4 = TypeVar("_T4", bound=Any) +_T5 = TypeVar("_T5", bound=Any) +_T6 = TypeVar("_T6", bound=Any) +_T7 = TypeVar("_T7", bound=Any) +_T8 = TypeVar("_T8", bound=Any) +_T9 = TypeVar("_T9", bound=Any) + + +_ColumnExpressionArgument = Union[ + "ColumnElement[_T]", + _HasClauseElement[_T], + "SQLCoreOperations[_T]", + roles.ExpressionElementRole[_T], + Callable[[], "ColumnElement[_T]"], + "LambdaElement", +] +"See docs in public alias ColumnExpressionArgument." + +ColumnExpressionArgument: TypeAlias = _ColumnExpressionArgument[_T] +"""Narrower "column expression" argument. + +This type is used for all the other "column" kinds of expressions that +typically represent a single SQL column expression, not a set of columns the +way a table or ORM entity does. + +This includes ColumnElement, or ORM-mapped attributes that will have a +``__clause_element__()`` method, it also has the ExpressionElementRole +overall which brings in the TextClause object also. + +.. versionadded:: 2.0.13 + +""" + +_ColumnExpressionOrLiteralArgument = Union[Any, _ColumnExpressionArgument[_T]] + +_ColumnExpressionOrStrLabelArgument = Union[str, _ColumnExpressionArgument[_T]] + +_ByArgument = Union[ + Iterable[_ColumnExpressionOrStrLabelArgument[Any]], + _ColumnExpressionOrStrLabelArgument[Any], +] +"""Used for keyword-based ``order_by`` and ``partition_by`` parameters.""" + + +_InfoType = Dict[Any, Any] +"""the .info dictionary accepted and used throughout Core /ORM""" + +_FromClauseArgument = Union[ + roles.FromClauseRole, + Type[Any], + Inspectable[_HasClauseElement[Any]], + _HasClauseElement[Any], +] +"""A FROM clause, like we would send to select().select_from(). + +Also accommodates ORM entities and related constructs. + +""" + +_JoinTargetArgument = Union[_FromClauseArgument, roles.JoinTargetRole] +"""target for join() builds on _FromClauseArgument to include additional +join target roles such as those which come from the ORM. + +""" + +_OnClauseArgument = Union[_ColumnExpressionArgument[Any], roles.OnClauseRole] +"""target for an ON clause, includes additional roles such as those which +come from the ORM. + +""" + +_SelectStatementForCompoundArgument = Union[ + "SelectBase", roles.CompoundElementRole +] +"""SELECT statement acceptable by ``union()`` and other SQL set operations""" + +_DMLColumnArgument = Union[ + str, + _HasClauseElement[Any], + roles.DMLColumnRole, + "SQLCoreOperations[Any]", +] +"""A DML column expression. This is a "key" inside of insert().values(), +update().values(), and related. + +These are usually strings or SQL table columns. + +There's also edge cases like JSON expression assignment, which we would want +the DMLColumnRole to be able to accommodate. + +""" + +_DMLKey = TypeVar("_DMLKey", bound=_DMLColumnArgument) +_DMLColumnKeyMapping = Mapping[_DMLKey, Any] + + +_DDLColumnArgument = Union[str, "Column[Any]", roles.DDLConstraintColumnRole] +"""DDL column. + +used for :class:`.PrimaryKeyConstraint`, :class:`.UniqueConstraint`, etc. + +""" + +_DMLTableArgument = Union[ + "TableClause", + "Join", + "Alias", + "CTE", + Type[Any], + Inspectable[_HasClauseElement[Any]], + _HasClauseElement[Any], +] + +_PropagateAttrsType = util.immutabledict[str, Any] + +_TypeEngineArgument = Union[Type["TypeEngine[_T]"], "TypeEngine[_T]"] + +_EquivalentColumnMap = Dict["ColumnElement[Any]", Set["ColumnElement[Any]"]] + +_LimitOffsetType = Union[int, _ColumnExpressionArgument[int], None] + +_AutoIncrementType = Union[bool, Literal["auto", "ignore_fk"]] + +if TYPE_CHECKING: + + def is_sql_compiler(c: Compiled) -> TypeGuard[SQLCompiler]: ... + + def is_ddl_compiler(c: Compiled) -> TypeGuard[DDLCompiler]: ... + + def is_named_from_clause( + t: FromClauseRole, + ) -> TypeGuard[NamedFromClause]: ... + + def is_column_element( + c: ClauseElement, + ) -> TypeGuard[ColumnElement[Any]]: ... + + def is_keyed_column_element( + c: ClauseElement, + ) -> TypeGuard[KeyedColumnElement[Any]]: ... + + def is_text_clause(c: ClauseElement) -> TypeGuard[TextClause]: ... + + def is_from_clause(c: ClauseElement) -> TypeGuard[FromClause]: ... + + def is_tuple_type(t: TypeEngine[Any]) -> TypeGuard[TupleType]: ... + + def is_table_value_type( + t: TypeEngine[Any], + ) -> TypeGuard[TableValueType]: ... + + def is_selectable(t: Any) -> TypeGuard[Selectable]: ... + + def is_select_base( + t: Union[Executable, ReturnsRows] + ) -> TypeGuard[SelectBase]: ... + + def is_select_statement( + t: Union[Executable, ReturnsRows] + ) -> TypeGuard[Select[Any]]: ... + + def is_table(t: FromClause) -> TypeGuard[TableClause]: ... + + def is_subquery(t: FromClause) -> TypeGuard[Subquery]: ... + + def is_dml(c: ClauseElement) -> TypeGuard[UpdateBase]: ... + +else: + is_sql_compiler = operator.attrgetter("is_sql") + is_ddl_compiler = operator.attrgetter("is_ddl") + is_named_from_clause = operator.attrgetter("named_with_column") + is_column_element = operator.attrgetter("_is_column_element") + is_keyed_column_element = operator.attrgetter("_is_keyed_column_element") + is_text_clause = operator.attrgetter("_is_text_clause") + is_from_clause = operator.attrgetter("_is_from_clause") + is_tuple_type = operator.attrgetter("_is_tuple_type") + is_table_value_type = operator.attrgetter("_is_table_value") + is_selectable = operator.attrgetter("is_selectable") + is_select_base = operator.attrgetter("_is_select_base") + is_select_statement = operator.attrgetter("_is_select_statement") + is_table = operator.attrgetter("_is_table") + is_subquery = operator.attrgetter("_is_subquery") + is_dml = operator.attrgetter("is_dml") + + +def has_schema_attr(t: FromClauseRole) -> TypeGuard[TableClause]: + return hasattr(t, "schema") + + +def is_quoted_name(s: str) -> TypeGuard[quoted_name]: + return hasattr(s, "quote") + + +def is_has_clause_element(s: object) -> TypeGuard[_HasClauseElement[Any]]: + return hasattr(s, "__clause_element__") + + +def is_insert_update(c: ClauseElement) -> TypeGuard[ValuesBase]: + return c.is_dml and (c.is_insert or c.is_update) # type: ignore + + +def _no_kw() -> exc.ArgumentError: + return exc.ArgumentError( + "Additional keyword arguments are not accepted by this " + "function/method. The presence of **kw is for pep-484 typing purposes" + ) + + +def _unexpected_kw(methname: str, kw: Dict[str, Any]) -> NoReturn: + k = list(kw)[0] + raise TypeError(f"{methname} got an unexpected keyword argument '{k}'") + + +@overload +def Nullable( + val: "SQLCoreOperations[_T]", +) -> "SQLCoreOperations[Optional[_T]]": ... + + +@overload +def Nullable( + val: roles.ExpressionElementRole[_T], +) -> roles.ExpressionElementRole[Optional[_T]]: ... + + +@overload +def Nullable(val: Type[_T]) -> Type[Optional[_T]]: ... + + +def Nullable( + val: _TypedColumnClauseArgument[_T], +) -> _TypedColumnClauseArgument[Optional[_T]]: + """Types a column or ORM class as nullable. + + This can be used in select and other contexts to express that the value of + a column can be null, for example due to an outer join:: + + stmt1 = select(A, Nullable(B)).outerjoin(A.bs) + stmt2 = select(A.data, Nullable(B.data)).outerjoin(A.bs) + + At runtime this method returns the input unchanged. + + .. versionadded:: 2.0.20 + """ + return val + + +@overload +def NotNullable( + val: "SQLCoreOperations[Optional[_T]]", +) -> "SQLCoreOperations[_T]": ... + + +@overload +def NotNullable( + val: roles.ExpressionElementRole[Optional[_T]], +) -> roles.ExpressionElementRole[_T]: ... + + +@overload +def NotNullable(val: Type[Optional[_T]]) -> Type[_T]: ... + + +@overload +def NotNullable(val: Optional[Type[_T]]) -> Type[_T]: ... + + +def NotNullable( + val: Union[_TypedColumnClauseArgument[Optional[_T]], Optional[Type[_T]]], +) -> _TypedColumnClauseArgument[_T]: + """Types a column or ORM class as not nullable. + + This can be used in select and other contexts to express that the value of + a column cannot be null, for example due to a where condition on a + nullable column:: + + stmt = select(NotNullable(A.value)).where(A.value.is_not(None)) + + At runtime this method returns the input unchanged. + + .. versionadded:: 2.0.20 + """ + return val # type: ignore diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/annotation.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/annotation.py new file mode 100644 index 0000000..db382b8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/annotation.py @@ -0,0 +1,585 @@ +# sql/annotation.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""The :class:`.Annotated` class and related routines; creates hash-equivalent +copies of SQL constructs which contain context-specific markers and +associations. + +Note that the :class:`.Annotated` concept as implemented in this module is not +related in any way to the pep-593 concept of "Annotated". + + +""" + +from __future__ import annotations + +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import FrozenSet +from typing import Mapping +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar + +from . import operators +from .cache_key import HasCacheKey +from .visitors import anon_map +from .visitors import ExternallyTraversible +from .visitors import InternalTraversal +from .. import util +from ..util.typing import Literal +from ..util.typing import Self + +if TYPE_CHECKING: + from .base import _EntityNamespace + from .visitors import _TraverseInternalsType + +_AnnotationDict = Mapping[str, Any] + +EMPTY_ANNOTATIONS: util.immutabledict[str, Any] = util.EMPTY_DICT + + +class SupportsAnnotations(ExternallyTraversible): + __slots__ = () + + _annotations: util.immutabledict[str, Any] = EMPTY_ANNOTATIONS + + proxy_set: util.generic_fn_descriptor[FrozenSet[Any]] + + _is_immutable: bool + + def _annotate(self, values: _AnnotationDict) -> Self: + raise NotImplementedError() + + @overload + def _deannotate( + self, + values: Literal[None] = ..., + clone: bool = ..., + ) -> Self: ... + + @overload + def _deannotate( + self, + values: Sequence[str] = ..., + clone: bool = ..., + ) -> SupportsAnnotations: ... + + def _deannotate( + self, + values: Optional[Sequence[str]] = None, + clone: bool = False, + ) -> SupportsAnnotations: + raise NotImplementedError() + + @util.memoized_property + def _annotations_cache_key(self) -> Tuple[Any, ...]: + anon_map_ = anon_map() + + return self._gen_annotations_cache_key(anon_map_) + + def _gen_annotations_cache_key( + self, anon_map: anon_map + ) -> Tuple[Any, ...]: + return ( + "_annotations", + tuple( + ( + key, + ( + value._gen_cache_key(anon_map, []) + if isinstance(value, HasCacheKey) + else value + ), + ) + for key, value in [ + (key, self._annotations[key]) + for key in sorted(self._annotations) + ] + ), + ) + + +class SupportsWrappingAnnotations(SupportsAnnotations): + __slots__ = () + + _constructor: Callable[..., SupportsWrappingAnnotations] + + if TYPE_CHECKING: + + @util.ro_non_memoized_property + def entity_namespace(self) -> _EntityNamespace: ... + + def _annotate(self, values: _AnnotationDict) -> Self: + """return a copy of this ClauseElement with annotations + updated by the given dictionary. + + """ + return Annotated._as_annotated_instance(self, values) # type: ignore + + def _with_annotations(self, values: _AnnotationDict) -> Self: + """return a copy of this ClauseElement with annotations + replaced by the given dictionary. + + """ + return Annotated._as_annotated_instance(self, values) # type: ignore + + @overload + def _deannotate( + self, + values: Literal[None] = ..., + clone: bool = ..., + ) -> Self: ... + + @overload + def _deannotate( + self, + values: Sequence[str] = ..., + clone: bool = ..., + ) -> SupportsAnnotations: ... + + def _deannotate( + self, + values: Optional[Sequence[str]] = None, + clone: bool = False, + ) -> SupportsAnnotations: + """return a copy of this :class:`_expression.ClauseElement` + with annotations + removed. + + :param values: optional tuple of individual values + to remove. + + """ + if clone: + s = self._clone() + return s + else: + return self + + +class SupportsCloneAnnotations(SupportsWrappingAnnotations): + # SupportsCloneAnnotations extends from SupportsWrappingAnnotations + # to support the structure of having the base ClauseElement + # be a subclass of SupportsWrappingAnnotations. Any ClauseElement + # subclass that wants to extend from SupportsCloneAnnotations + # will inherently also be subclassing SupportsWrappingAnnotations, so + # make that specific here. + + if not typing.TYPE_CHECKING: + __slots__ = () + + _clone_annotations_traverse_internals: _TraverseInternalsType = [ + ("_annotations", InternalTraversal.dp_annotations_key) + ] + + def _annotate(self, values: _AnnotationDict) -> Self: + """return a copy of this ClauseElement with annotations + updated by the given dictionary. + + """ + new = self._clone() + new._annotations = new._annotations.union(values) + new.__dict__.pop("_annotations_cache_key", None) + new.__dict__.pop("_generate_cache_key", None) + return new + + def _with_annotations(self, values: _AnnotationDict) -> Self: + """return a copy of this ClauseElement with annotations + replaced by the given dictionary. + + """ + new = self._clone() + new._annotations = util.immutabledict(values) + new.__dict__.pop("_annotations_cache_key", None) + new.__dict__.pop("_generate_cache_key", None) + return new + + @overload + def _deannotate( + self, + values: Literal[None] = ..., + clone: bool = ..., + ) -> Self: ... + + @overload + def _deannotate( + self, + values: Sequence[str] = ..., + clone: bool = ..., + ) -> SupportsAnnotations: ... + + def _deannotate( + self, + values: Optional[Sequence[str]] = None, + clone: bool = False, + ) -> SupportsAnnotations: + """return a copy of this :class:`_expression.ClauseElement` + with annotations + removed. + + :param values: optional tuple of individual values + to remove. + + """ + if clone or self._annotations: + # clone is used when we are also copying + # the expression for a deep deannotation + new = self._clone() + new._annotations = util.immutabledict() + new.__dict__.pop("_annotations_cache_key", None) + return new + else: + return self + + +class Annotated(SupportsAnnotations): + """clones a SupportsAnnotations and applies an 'annotations' dictionary. + + Unlike regular clones, this clone also mimics __hash__() and + __eq__() of the original element so that it takes its place + in hashed collections. + + A reference to the original element is maintained, for the important + reason of keeping its hash value current. When GC'ed, the + hash value may be reused, causing conflicts. + + .. note:: The rationale for Annotated producing a brand new class, + rather than placing the functionality directly within ClauseElement, + is **performance**. The __hash__() method is absent on plain + ClauseElement which leads to significantly reduced function call + overhead, as the use of sets and dictionaries against ClauseElement + objects is prevalent, but most are not "annotated". + + """ + + _is_column_operators = False + + @classmethod + def _as_annotated_instance( + cls, element: SupportsWrappingAnnotations, values: _AnnotationDict + ) -> Annotated: + try: + cls = annotated_classes[element.__class__] + except KeyError: + cls = _new_annotation_type(element.__class__, cls) + return cls(element, values) + + _annotations: util.immutabledict[str, Any] + __element: SupportsWrappingAnnotations + _hash: int + + def __new__(cls: Type[Self], *args: Any) -> Self: + return object.__new__(cls) + + def __init__( + self, element: SupportsWrappingAnnotations, values: _AnnotationDict + ): + self.__dict__ = element.__dict__.copy() + self.__dict__.pop("_annotations_cache_key", None) + self.__dict__.pop("_generate_cache_key", None) + self.__element = element + self._annotations = util.immutabledict(values) + self._hash = hash(element) + + def _annotate(self, values: _AnnotationDict) -> Self: + _values = self._annotations.union(values) + new = self._with_annotations(_values) + return new + + def _with_annotations(self, values: _AnnotationDict) -> Self: + clone = self.__class__.__new__(self.__class__) + clone.__dict__ = self.__dict__.copy() + clone.__dict__.pop("_annotations_cache_key", None) + clone.__dict__.pop("_generate_cache_key", None) + clone._annotations = util.immutabledict(values) + return clone + + @overload + def _deannotate( + self, + values: Literal[None] = ..., + clone: bool = ..., + ) -> Self: ... + + @overload + def _deannotate( + self, + values: Sequence[str] = ..., + clone: bool = ..., + ) -> Annotated: ... + + def _deannotate( + self, + values: Optional[Sequence[str]] = None, + clone: bool = True, + ) -> SupportsAnnotations: + if values is None: + return self.__element + else: + return self._with_annotations( + util.immutabledict( + { + key: value + for key, value in self._annotations.items() + if key not in values + } + ) + ) + + if not typing.TYPE_CHECKING: + # manually proxy some methods that need extra attention + def _compiler_dispatch(self, visitor: Any, **kw: Any) -> Any: + return self.__element.__class__._compiler_dispatch( + self, visitor, **kw + ) + + @property + def _constructor(self): + return self.__element._constructor + + def _clone(self, **kw: Any) -> Self: + clone = self.__element._clone(**kw) + if clone is self.__element: + # detect immutable, don't change anything + return self + else: + # update the clone with any changes that have occurred + # to this object's __dict__. + clone.__dict__.update(self.__dict__) + return self.__class__(clone, self._annotations) + + def __reduce__(self) -> Tuple[Type[Annotated], Tuple[Any, ...]]: + return self.__class__, (self.__element, self._annotations) + + def __hash__(self) -> int: + return self._hash + + def __eq__(self, other: Any) -> bool: + if self._is_column_operators: + return self.__element.__class__.__eq__(self, other) + else: + return hash(other) == hash(self) + + @util.ro_non_memoized_property + def entity_namespace(self) -> _EntityNamespace: + if "entity_namespace" in self._annotations: + return cast( + SupportsWrappingAnnotations, + self._annotations["entity_namespace"], + ).entity_namespace + else: + return self.__element.entity_namespace + + +# hard-generate Annotated subclasses. this technique +# is used instead of on-the-fly types (i.e. type.__new__()) +# so that the resulting objects are pickleable; additionally, other +# decisions can be made up front about the type of object being annotated +# just once per class rather than per-instance. +annotated_classes: Dict[Type[SupportsWrappingAnnotations], Type[Annotated]] = ( + {} +) + +_SA = TypeVar("_SA", bound="SupportsAnnotations") + + +def _safe_annotate(to_annotate: _SA, annotations: _AnnotationDict) -> _SA: + try: + _annotate = to_annotate._annotate + except AttributeError: + # skip objects that don't actually have an `_annotate` + # attribute, namely QueryableAttribute inside of a join + # condition + return to_annotate + else: + return _annotate(annotations) + + +def _deep_annotate( + element: _SA, + annotations: _AnnotationDict, + exclude: Optional[Sequence[SupportsAnnotations]] = None, + *, + detect_subquery_cols: bool = False, + ind_cols_on_fromclause: bool = False, + annotate_callable: Optional[ + Callable[[SupportsAnnotations, _AnnotationDict], SupportsAnnotations] + ] = None, +) -> _SA: + """Deep copy the given ClauseElement, annotating each element + with the given annotations dictionary. + + Elements within the exclude collection will be cloned but not annotated. + + """ + + # annotated objects hack the __hash__() method so if we want to + # uniquely process them we have to use id() + + cloned_ids: Dict[int, SupportsAnnotations] = {} + + def clone(elem: SupportsAnnotations, **kw: Any) -> SupportsAnnotations: + # ind_cols_on_fromclause means make sure an AnnotatedFromClause + # has its own .c collection independent of that which its proxying. + # this is used specifically by orm.LoaderCriteriaOption to break + # a reference cycle that it's otherwise prone to building, + # see test_relationship_criteria-> + # test_loader_criteria_subquery_w_same_entity. logic here was + # changed for #8796 and made explicit; previously it occurred + # by accident + + kw["detect_subquery_cols"] = detect_subquery_cols + id_ = id(elem) + + if id_ in cloned_ids: + return cloned_ids[id_] + + if ( + exclude + and hasattr(elem, "proxy_set") + and elem.proxy_set.intersection(exclude) + ): + newelem = elem._clone(clone=clone, **kw) + elif annotations != elem._annotations: + if detect_subquery_cols and elem._is_immutable: + to_annotate = elem._clone(clone=clone, **kw) + else: + to_annotate = elem + if annotate_callable: + newelem = annotate_callable(to_annotate, annotations) + else: + newelem = _safe_annotate(to_annotate, annotations) + else: + newelem = elem + + newelem._copy_internals( + clone=clone, ind_cols_on_fromclause=ind_cols_on_fromclause + ) + + cloned_ids[id_] = newelem + return newelem + + if element is not None: + element = cast(_SA, clone(element)) + clone = None # type: ignore # remove gc cycles + return element + + +@overload +def _deep_deannotate( + element: Literal[None], values: Optional[Sequence[str]] = None +) -> Literal[None]: ... + + +@overload +def _deep_deannotate( + element: _SA, values: Optional[Sequence[str]] = None +) -> _SA: ... + + +def _deep_deannotate( + element: Optional[_SA], values: Optional[Sequence[str]] = None +) -> Optional[_SA]: + """Deep copy the given element, removing annotations.""" + + cloned: Dict[Any, SupportsAnnotations] = {} + + def clone(elem: SupportsAnnotations, **kw: Any) -> SupportsAnnotations: + key: Any + if values: + key = id(elem) + else: + key = elem + + if key not in cloned: + newelem = elem._deannotate(values=values, clone=True) + newelem._copy_internals(clone=clone) + cloned[key] = newelem + return newelem + else: + return cloned[key] + + if element is not None: + element = cast(_SA, clone(element)) + clone = None # type: ignore # remove gc cycles + return element + + +def _shallow_annotate(element: _SA, annotations: _AnnotationDict) -> _SA: + """Annotate the given ClauseElement and copy its internals so that + internal objects refer to the new annotated object. + + Basically used to apply a "don't traverse" annotation to a + selectable, without digging throughout the whole + structure wasting time. + """ + element = element._annotate(annotations) + element._copy_internals() + return element + + +def _new_annotation_type( + cls: Type[SupportsWrappingAnnotations], base_cls: Type[Annotated] +) -> Type[Annotated]: + """Generates a new class that subclasses Annotated and proxies a given + element type. + + """ + if issubclass(cls, Annotated): + return cls + elif cls in annotated_classes: + return annotated_classes[cls] + + for super_ in cls.__mro__: + # check if an Annotated subclass more specific than + # the given base_cls is already registered, such + # as AnnotatedColumnElement. + if super_ in annotated_classes: + base_cls = annotated_classes[super_] + break + + annotated_classes[cls] = anno_cls = cast( + Type[Annotated], + type("Annotated%s" % cls.__name__, (base_cls, cls), {}), + ) + globals()["Annotated%s" % cls.__name__] = anno_cls + + if "_traverse_internals" in cls.__dict__: + anno_cls._traverse_internals = list(cls._traverse_internals) + [ + ("_annotations", InternalTraversal.dp_annotations_key) + ] + elif cls.__dict__.get("inherit_cache", False): + anno_cls._traverse_internals = list(cls._traverse_internals) + [ + ("_annotations", InternalTraversal.dp_annotations_key) + ] + + # some classes include this even if they have traverse_internals + # e.g. BindParameter, add it if present. + if cls.__dict__.get("inherit_cache", False): + anno_cls.inherit_cache = True # type: ignore + elif "inherit_cache" in cls.__dict__: + anno_cls.inherit_cache = cls.__dict__["inherit_cache"] # type: ignore + + anno_cls._is_column_operators = issubclass(cls, operators.ColumnOperators) + + return anno_cls + + +def _prepare_annotations( + target_hierarchy: Type[SupportsWrappingAnnotations], + base_cls: Type[Annotated], +) -> None: + for cls in util.walk_subclasses(target_hierarchy): + _new_annotation_type(cls, base_cls) diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/base.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/base.py new file mode 100644 index 0000000..5eb32e3 --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/base.py @@ -0,0 +1,2180 @@ +# sql/base.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Foundational utilities common to many sql modules. + +""" + + +from __future__ import annotations + +import collections +from enum import Enum +import itertools +from itertools import zip_longest +import operator +import re +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import FrozenSet +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import NamedTuple +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import roles +from . import visitors +from .cache_key import HasCacheKey # noqa +from .cache_key import MemoizedHasCacheKey # noqa +from .traversals import HasCopyInternals # noqa +from .visitors import ClauseVisitor +from .visitors import ExtendedInternalTraversal +from .visitors import ExternallyTraversible +from .visitors import InternalTraversal +from .. import event +from .. import exc +from .. import util +from ..util import HasMemoized as HasMemoized +from ..util import hybridmethod +from ..util import typing as compat_typing +from ..util.typing import Protocol +from ..util.typing import Self +from ..util.typing import TypeGuard + +if TYPE_CHECKING: + from . import coercions + from . import elements + from . import type_api + from ._orm_types import DMLStrategyArgument + from ._orm_types import SynchronizeSessionArgument + from ._typing import _CLE + from .elements import BindParameter + from .elements import ClauseList + from .elements import ColumnClause # noqa + from .elements import ColumnElement + from .elements import KeyedColumnElement + from .elements import NamedColumn + from .elements import SQLCoreOperations + from .elements import TextClause + from .schema import Column + from .schema import DefaultGenerator + from .selectable import _JoinTargetElement + from .selectable import _SelectIterable + from .selectable import FromClause + from ..engine import Connection + from ..engine import CursorResult + from ..engine.interfaces import _CoreMultiExecuteParams + from ..engine.interfaces import _ExecuteOptions + from ..engine.interfaces import _ImmutableExecuteOptions + from ..engine.interfaces import CacheStats + from ..engine.interfaces import Compiled + from ..engine.interfaces import CompiledCacheType + from ..engine.interfaces import CoreExecuteOptionsParameter + from ..engine.interfaces import Dialect + from ..engine.interfaces import IsolationLevel + from ..engine.interfaces import SchemaTranslateMapType + from ..event import dispatcher + +if not TYPE_CHECKING: + coercions = None # noqa + elements = None # noqa + type_api = None # noqa + + +class _NoArg(Enum): + NO_ARG = 0 + + def __repr__(self): + return f"_NoArg.{self.name}" + + +NO_ARG = _NoArg.NO_ARG + + +class _NoneName(Enum): + NONE_NAME = 0 + """indicate a 'deferred' name that was ultimately the value None.""" + + +_NONE_NAME = _NoneName.NONE_NAME + +_T = TypeVar("_T", bound=Any) + +_Fn = TypeVar("_Fn", bound=Callable[..., Any]) + +_AmbiguousTableNameMap = MutableMapping[str, str] + + +class _DefaultDescriptionTuple(NamedTuple): + arg: Any + is_scalar: Optional[bool] + is_callable: Optional[bool] + is_sentinel: Optional[bool] + + @classmethod + def _from_column_default( + cls, default: Optional[DefaultGenerator] + ) -> _DefaultDescriptionTuple: + return ( + _DefaultDescriptionTuple( + default.arg, # type: ignore + default.is_scalar, + default.is_callable, + default.is_sentinel, + ) + if default + and ( + default.has_arg + or (not default.for_update and default.is_sentinel) + ) + else _DefaultDescriptionTuple(None, None, None, None) + ) + + +_never_select_column = operator.attrgetter("_omit_from_statements") + + +class _EntityNamespace(Protocol): + def __getattr__(self, key: str) -> SQLCoreOperations[Any]: ... + + +class _HasEntityNamespace(Protocol): + @util.ro_non_memoized_property + def entity_namespace(self) -> _EntityNamespace: ... + + +def _is_has_entity_namespace(element: Any) -> TypeGuard[_HasEntityNamespace]: + return hasattr(element, "entity_namespace") + + +# Remove when https://github.com/python/mypy/issues/14640 will be fixed +_Self = TypeVar("_Self", bound=Any) + + +class Immutable: + """mark a ClauseElement as 'immutable' when expressions are cloned. + + "immutable" objects refers to the "mutability" of an object in the + context of SQL DQL and DML generation. Such as, in DQL, one can + compose a SELECT or subquery of varied forms, but one cannot modify + the structure of a specific table or column within DQL. + :class:`.Immutable` is mostly intended to follow this concept, and as + such the primary "immutable" objects are :class:`.ColumnClause`, + :class:`.Column`, :class:`.TableClause`, :class:`.Table`. + + """ + + __slots__ = () + + _is_immutable = True + + def unique_params(self, *optionaldict, **kwargs): + raise NotImplementedError("Immutable objects do not support copying") + + def params(self, *optionaldict, **kwargs): + raise NotImplementedError("Immutable objects do not support copying") + + def _clone(self: _Self, **kw: Any) -> _Self: + return self + + def _copy_internals( + self, *, omit_attrs: Iterable[str] = (), **kw: Any + ) -> None: + pass + + +class SingletonConstant(Immutable): + """Represent SQL constants like NULL, TRUE, FALSE""" + + _is_singleton_constant = True + + _singleton: SingletonConstant + + def __new__(cls: _T, *arg: Any, **kw: Any) -> _T: + return cast(_T, cls._singleton) + + @util.non_memoized_property + def proxy_set(self) -> FrozenSet[ColumnElement[Any]]: + raise NotImplementedError() + + @classmethod + def _create_singleton(cls): + obj = object.__new__(cls) + obj.__init__() # type: ignore + + # for a long time this was an empty frozenset, meaning + # a SingletonConstant would never be a "corresponding column" in + # a statement. This referred to #6259. However, in #7154 we see + # that we do in fact need "correspondence" to work when matching cols + # in result sets, so the non-correspondence was moved to a more + # specific level when we are actually adapting expressions for SQL + # render only. + obj.proxy_set = frozenset([obj]) + cls._singleton = obj + + +def _from_objects( + *elements: Union[ + ColumnElement[Any], FromClause, TextClause, _JoinTargetElement + ] +) -> Iterator[FromClause]: + return itertools.chain.from_iterable( + [element._from_objects for element in elements] + ) + + +def _select_iterables( + elements: Iterable[roles.ColumnsClauseRole], +) -> _SelectIterable: + """expand tables into individual columns in the + given list of column expressions. + + """ + return itertools.chain.from_iterable( + [c._select_iterable for c in elements] + ) + + +_SelfGenerativeType = TypeVar("_SelfGenerativeType", bound="_GenerativeType") + + +class _GenerativeType(compat_typing.Protocol): + def _generate(self) -> Self: ... + + +def _generative(fn: _Fn) -> _Fn: + """non-caching _generative() decorator. + + This is basically the legacy decorator that copies the object and + runs a method on the new copy. + + """ + + @util.decorator + def _generative( + fn: _Fn, self: _SelfGenerativeType, *args: Any, **kw: Any + ) -> _SelfGenerativeType: + """Mark a method as generative.""" + + self = self._generate() + x = fn(self, *args, **kw) + assert x is self, "generative methods must return self" + return self + + decorated = _generative(fn) + decorated.non_generative = fn # type: ignore + return decorated + + +def _exclusive_against(*names: str, **kw: Any) -> Callable[[_Fn], _Fn]: + msgs = kw.pop("msgs", {}) + + defaults = kw.pop("defaults", {}) + + getters = [ + (name, operator.attrgetter(name), defaults.get(name, None)) + for name in names + ] + + @util.decorator + def check(fn, *args, **kw): + # make pylance happy by not including "self" in the argument + # list + self = args[0] + args = args[1:] + for name, getter, default_ in getters: + if getter(self) is not default_: + msg = msgs.get( + name, + "Method %s() has already been invoked on this %s construct" + % (fn.__name__, self.__class__), + ) + raise exc.InvalidRequestError(msg) + return fn(self, *args, **kw) + + return check + + +def _clone(element, **kw): + return element._clone(**kw) + + +def _expand_cloned( + elements: Iterable[_CLE], +) -> Iterable[_CLE]: + """expand the given set of ClauseElements to be the set of all 'cloned' + predecessors. + + """ + # TODO: cython candidate + return itertools.chain(*[x._cloned_set for x in elements]) + + +def _de_clone( + elements: Iterable[_CLE], +) -> Iterable[_CLE]: + for x in elements: + while x._is_clone_of is not None: + x = x._is_clone_of + yield x + + +def _cloned_intersection(a: Iterable[_CLE], b: Iterable[_CLE]) -> Set[_CLE]: + """return the intersection of sets a and b, counting + any overlap between 'cloned' predecessors. + + The returned set is in terms of the entities present within 'a'. + + """ + all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b)) + return {elem for elem in a if all_overlap.intersection(elem._cloned_set)} + + +def _cloned_difference(a: Iterable[_CLE], b: Iterable[_CLE]) -> Set[_CLE]: + all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b)) + return { + elem for elem in a if not all_overlap.intersection(elem._cloned_set) + } + + +class _DialectArgView(MutableMapping[str, Any]): + """A dictionary view of dialect-level arguments in the form + _. + + """ + + def __init__(self, obj): + self.obj = obj + + def _key(self, key): + try: + dialect, value_key = key.split("_", 1) + except ValueError as err: + raise KeyError(key) from err + else: + return dialect, value_key + + def __getitem__(self, key): + dialect, value_key = self._key(key) + + try: + opt = self.obj.dialect_options[dialect] + except exc.NoSuchModuleError as err: + raise KeyError(key) from err + else: + return opt[value_key] + + def __setitem__(self, key, value): + try: + dialect, value_key = self._key(key) + except KeyError as err: + raise exc.ArgumentError( + "Keys must be of the form _" + ) from err + else: + self.obj.dialect_options[dialect][value_key] = value + + def __delitem__(self, key): + dialect, value_key = self._key(key) + del self.obj.dialect_options[dialect][value_key] + + def __len__(self): + return sum( + len(args._non_defaults) + for args in self.obj.dialect_options.values() + ) + + def __iter__(self): + return ( + "%s_%s" % (dialect_name, value_name) + for dialect_name in self.obj.dialect_options + for value_name in self.obj.dialect_options[ + dialect_name + ]._non_defaults + ) + + +class _DialectArgDict(MutableMapping[str, Any]): + """A dictionary view of dialect-level arguments for a specific + dialect. + + Maintains a separate collection of user-specified arguments + and dialect-specified default arguments. + + """ + + def __init__(self): + self._non_defaults = {} + self._defaults = {} + + def __len__(self): + return len(set(self._non_defaults).union(self._defaults)) + + def __iter__(self): + return iter(set(self._non_defaults).union(self._defaults)) + + def __getitem__(self, key): + if key in self._non_defaults: + return self._non_defaults[key] + else: + return self._defaults[key] + + def __setitem__(self, key, value): + self._non_defaults[key] = value + + def __delitem__(self, key): + del self._non_defaults[key] + + +@util.preload_module("sqlalchemy.dialects") +def _kw_reg_for_dialect(dialect_name): + dialect_cls = util.preloaded.dialects.registry.load(dialect_name) + if dialect_cls.construct_arguments is None: + return None + return dict(dialect_cls.construct_arguments) + + +class DialectKWArgs: + """Establish the ability for a class to have dialect-specific arguments + with defaults and constructor validation. + + The :class:`.DialectKWArgs` interacts with the + :attr:`.DefaultDialect.construct_arguments` present on a dialect. + + .. seealso:: + + :attr:`.DefaultDialect.construct_arguments` + + """ + + __slots__ = () + + _dialect_kwargs_traverse_internals = [ + ("dialect_options", InternalTraversal.dp_dialect_options) + ] + + @classmethod + def argument_for(cls, dialect_name, argument_name, default): + """Add a new kind of dialect-specific keyword argument for this class. + + E.g.:: + + Index.argument_for("mydialect", "length", None) + + some_index = Index('a', 'b', mydialect_length=5) + + The :meth:`.DialectKWArgs.argument_for` method is a per-argument + way adding extra arguments to the + :attr:`.DefaultDialect.construct_arguments` dictionary. This + dictionary provides a list of argument names accepted by various + schema-level constructs on behalf of a dialect. + + New dialects should typically specify this dictionary all at once as a + data member of the dialect class. The use case for ad-hoc addition of + argument names is typically for end-user code that is also using + a custom compilation scheme which consumes the additional arguments. + + :param dialect_name: name of a dialect. The dialect must be + locatable, else a :class:`.NoSuchModuleError` is raised. The + dialect must also include an existing + :attr:`.DefaultDialect.construct_arguments` collection, indicating + that it participates in the keyword-argument validation and default + system, else :class:`.ArgumentError` is raised. If the dialect does + not include this collection, then any keyword argument can be + specified on behalf of this dialect already. All dialects packaged + within SQLAlchemy include this collection, however for third party + dialects, support may vary. + + :param argument_name: name of the parameter. + + :param default: default value of the parameter. + + """ + + construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name] + if construct_arg_dictionary is None: + raise exc.ArgumentError( + "Dialect '%s' does have keyword-argument " + "validation and defaults enabled configured" % dialect_name + ) + if cls not in construct_arg_dictionary: + construct_arg_dictionary[cls] = {} + construct_arg_dictionary[cls][argument_name] = default + + @util.memoized_property + def dialect_kwargs(self): + """A collection of keyword arguments specified as dialect-specific + options to this construct. + + The arguments are present here in their original ``_`` + format. Only arguments that were actually passed are included; + unlike the :attr:`.DialectKWArgs.dialect_options` collection, which + contains all options known by this dialect including defaults. + + The collection is also writable; keys are accepted of the + form ``_`` where the value will be assembled + into the list of options. + + .. seealso:: + + :attr:`.DialectKWArgs.dialect_options` - nested dictionary form + + """ + return _DialectArgView(self) + + @property + def kwargs(self): + """A synonym for :attr:`.DialectKWArgs.dialect_kwargs`.""" + return self.dialect_kwargs + + _kw_registry = util.PopulateDict(_kw_reg_for_dialect) + + def _kw_reg_for_dialect_cls(self, dialect_name): + construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name] + d = _DialectArgDict() + + if construct_arg_dictionary is None: + d._defaults.update({"*": None}) + else: + for cls in reversed(self.__class__.__mro__): + if cls in construct_arg_dictionary: + d._defaults.update(construct_arg_dictionary[cls]) + return d + + @util.memoized_property + def dialect_options(self): + """A collection of keyword arguments specified as dialect-specific + options to this construct. + + This is a two-level nested registry, keyed to ```` + and ````. For example, the ``postgresql_where`` + argument would be locatable as:: + + arg = my_object.dialect_options['postgresql']['where'] + + .. versionadded:: 0.9.2 + + .. seealso:: + + :attr:`.DialectKWArgs.dialect_kwargs` - flat dictionary form + + """ + + return util.PopulateDict( + util.portable_instancemethod(self._kw_reg_for_dialect_cls) + ) + + def _validate_dialect_kwargs(self, kwargs: Dict[str, Any]) -> None: + # validate remaining kwargs that they all specify DB prefixes + + if not kwargs: + return + + for k in kwargs: + m = re.match("^(.+?)_(.+)$", k) + if not m: + raise TypeError( + "Additional arguments should be " + "named _, got '%s'" % k + ) + dialect_name, arg_name = m.group(1, 2) + + try: + construct_arg_dictionary = self.dialect_options[dialect_name] + except exc.NoSuchModuleError: + util.warn( + "Can't validate argument %r; can't " + "locate any SQLAlchemy dialect named %r" + % (k, dialect_name) + ) + self.dialect_options[dialect_name] = d = _DialectArgDict() + d._defaults.update({"*": None}) + d._non_defaults[arg_name] = kwargs[k] + else: + if ( + "*" not in construct_arg_dictionary + and arg_name not in construct_arg_dictionary + ): + raise exc.ArgumentError( + "Argument %r is not accepted by " + "dialect %r on behalf of %r" + % (k, dialect_name, self.__class__) + ) + else: + construct_arg_dictionary[arg_name] = kwargs[k] + + +class CompileState: + """Produces additional object state necessary for a statement to be + compiled. + + the :class:`.CompileState` class is at the base of classes that assemble + state for a particular statement object that is then used by the + compiler. This process is essentially an extension of the process that + the SQLCompiler.visit_XYZ() method takes, however there is an emphasis + on converting raw user intent into more organized structures rather than + producing string output. The top-level :class:`.CompileState` for the + statement being executed is also accessible when the execution context + works with invoking the statement and collecting results. + + The production of :class:`.CompileState` is specific to the compiler, such + as within the :meth:`.SQLCompiler.visit_insert`, + :meth:`.SQLCompiler.visit_select` etc. methods. These methods are also + responsible for associating the :class:`.CompileState` with the + :class:`.SQLCompiler` itself, if the statement is the "toplevel" statement, + i.e. the outermost SQL statement that's actually being executed. + There can be other :class:`.CompileState` objects that are not the + toplevel, such as when a SELECT subquery or CTE-nested + INSERT/UPDATE/DELETE is generated. + + .. versionadded:: 1.4 + + """ + + __slots__ = ("statement", "_ambiguous_table_name_map") + + plugins: Dict[Tuple[str, str], Type[CompileState]] = {} + + _ambiguous_table_name_map: Optional[_AmbiguousTableNameMap] + + @classmethod + def create_for_statement(cls, statement, compiler, **kw): + # factory construction. + + if statement._propagate_attrs: + plugin_name = statement._propagate_attrs.get( + "compile_state_plugin", "default" + ) + klass = cls.plugins.get( + (plugin_name, statement._effective_plugin_target), None + ) + if klass is None: + klass = cls.plugins[ + ("default", statement._effective_plugin_target) + ] + + else: + klass = cls.plugins[ + ("default", statement._effective_plugin_target) + ] + + if klass is cls: + return cls(statement, compiler, **kw) + else: + return klass.create_for_statement(statement, compiler, **kw) + + def __init__(self, statement, compiler, **kw): + self.statement = statement + + @classmethod + def get_plugin_class( + cls, statement: Executable + ) -> Optional[Type[CompileState]]: + plugin_name = statement._propagate_attrs.get( + "compile_state_plugin", None + ) + + if plugin_name: + key = (plugin_name, statement._effective_plugin_target) + if key in cls.plugins: + return cls.plugins[key] + + # there's no case where we call upon get_plugin_class() and want + # to get None back, there should always be a default. return that + # if there was no plugin-specific class (e.g. Insert with "orm" + # plugin) + try: + return cls.plugins[("default", statement._effective_plugin_target)] + except KeyError: + return None + + @classmethod + def _get_plugin_class_for_plugin( + cls, statement: Executable, plugin_name: str + ) -> Optional[Type[CompileState]]: + try: + return cls.plugins[ + (plugin_name, statement._effective_plugin_target) + ] + except KeyError: + return None + + @classmethod + def plugin_for( + cls, plugin_name: str, visit_name: str + ) -> Callable[[_Fn], _Fn]: + def decorate(cls_to_decorate): + cls.plugins[(plugin_name, visit_name)] = cls_to_decorate + return cls_to_decorate + + return decorate + + +class Generative(HasMemoized): + """Provide a method-chaining pattern in conjunction with the + @_generative decorator.""" + + def _generate(self) -> Self: + skip = self._memoized_keys + cls = self.__class__ + s = cls.__new__(cls) + if skip: + # ensure this iteration remains atomic + s.__dict__ = { + k: v for k, v in self.__dict__.copy().items() if k not in skip + } + else: + s.__dict__ = self.__dict__.copy() + return s + + +class InPlaceGenerative(HasMemoized): + """Provide a method-chaining pattern in conjunction with the + @_generative decorator that mutates in place.""" + + __slots__ = () + + def _generate(self): + skip = self._memoized_keys + # note __dict__ needs to be in __slots__ if this is used + for k in skip: + self.__dict__.pop(k, None) + return self + + +class HasCompileState(Generative): + """A class that has a :class:`.CompileState` associated with it.""" + + _compile_state_plugin: Optional[Type[CompileState]] = None + + _attributes: util.immutabledict[str, Any] = util.EMPTY_DICT + + _compile_state_factory = CompileState.create_for_statement + + +class _MetaOptions(type): + """metaclass for the Options class. + + This metaclass is actually necessary despite the availability of the + ``__init_subclass__()`` hook as this type also provides custom class-level + behavior for the ``__add__()`` method. + + """ + + _cache_attrs: Tuple[str, ...] + + def __add__(self, other): + o1 = self() + + if set(other).difference(self._cache_attrs): + raise TypeError( + "dictionary contains attributes not covered by " + "Options class %s: %r" + % (self, set(other).difference(self._cache_attrs)) + ) + + o1.__dict__.update(other) + return o1 + + if TYPE_CHECKING: + + def __getattr__(self, key: str) -> Any: ... + + def __setattr__(self, key: str, value: Any) -> None: ... + + def __delattr__(self, key: str) -> None: ... + + +class Options(metaclass=_MetaOptions): + """A cacheable option dictionary with defaults.""" + + __slots__ = () + + _cache_attrs: Tuple[str, ...] + + def __init_subclass__(cls) -> None: + dict_ = cls.__dict__ + cls._cache_attrs = tuple( + sorted( + d + for d in dict_ + if not d.startswith("__") + and d not in ("_cache_key_traversal",) + ) + ) + super().__init_subclass__() + + def __init__(self, **kw): + self.__dict__.update(kw) + + def __add__(self, other): + o1 = self.__class__.__new__(self.__class__) + o1.__dict__.update(self.__dict__) + + if set(other).difference(self._cache_attrs): + raise TypeError( + "dictionary contains attributes not covered by " + "Options class %s: %r" + % (self, set(other).difference(self._cache_attrs)) + ) + + o1.__dict__.update(other) + return o1 + + def __eq__(self, other): + # TODO: very inefficient. This is used only in test suites + # right now. + for a, b in zip_longest(self._cache_attrs, other._cache_attrs): + if getattr(self, a) != getattr(other, b): + return False + return True + + def __repr__(self): + # TODO: fairly inefficient, used only in debugging right now. + + return "%s(%s)" % ( + self.__class__.__name__, + ", ".join( + "%s=%r" % (k, self.__dict__[k]) + for k in self._cache_attrs + if k in self.__dict__ + ), + ) + + @classmethod + def isinstance(cls, klass: Type[Any]) -> bool: + return issubclass(cls, klass) + + @hybridmethod + def add_to_element(self, name, value): + return self + {name: getattr(self, name) + value} + + @hybridmethod + def _state_dict_inst(self) -> Mapping[str, Any]: + return self.__dict__ + + _state_dict_const: util.immutabledict[str, Any] = util.EMPTY_DICT + + @_state_dict_inst.classlevel + def _state_dict(cls) -> Mapping[str, Any]: + return cls._state_dict_const + + @classmethod + def safe_merge(cls, other): + d = other._state_dict() + + # only support a merge with another object of our class + # and which does not have attrs that we don't. otherwise + # we risk having state that might not be part of our cache + # key strategy + + if ( + cls is not other.__class__ + and other._cache_attrs + and set(other._cache_attrs).difference(cls._cache_attrs) + ): + raise TypeError( + "other element %r is not empty, is not of type %s, " + "and contains attributes not covered here %r" + % ( + other, + cls, + set(other._cache_attrs).difference(cls._cache_attrs), + ) + ) + return cls + d + + @classmethod + def from_execution_options( + cls, key, attrs, exec_options, statement_exec_options + ): + """process Options argument in terms of execution options. + + + e.g.:: + + ( + load_options, + execution_options, + ) = QueryContext.default_load_options.from_execution_options( + "_sa_orm_load_options", + { + "populate_existing", + "autoflush", + "yield_per" + }, + execution_options, + statement._execution_options, + ) + + get back the Options and refresh "_sa_orm_load_options" in the + exec options dict w/ the Options as well + + """ + + # common case is that no options we are looking for are + # in either dictionary, so cancel for that first + check_argnames = attrs.intersection( + set(exec_options).union(statement_exec_options) + ) + + existing_options = exec_options.get(key, cls) + + if check_argnames: + result = {} + for argname in check_argnames: + local = "_" + argname + if argname in exec_options: + result[local] = exec_options[argname] + elif argname in statement_exec_options: + result[local] = statement_exec_options[argname] + + new_options = existing_options + result + exec_options = util.immutabledict().merge_with( + exec_options, {key: new_options} + ) + return new_options, exec_options + + else: + return existing_options, exec_options + + if TYPE_CHECKING: + + def __getattr__(self, key: str) -> Any: ... + + def __setattr__(self, key: str, value: Any) -> None: ... + + def __delattr__(self, key: str) -> None: ... + + +class CacheableOptions(Options, HasCacheKey): + __slots__ = () + + @hybridmethod + def _gen_cache_key_inst(self, anon_map, bindparams): + return HasCacheKey._gen_cache_key(self, anon_map, bindparams) + + @_gen_cache_key_inst.classlevel + def _gen_cache_key(cls, anon_map, bindparams): + return (cls, ()) + + @hybridmethod + def _generate_cache_key(self): + return HasCacheKey._generate_cache_key_for_object(self) + + +class ExecutableOption(HasCopyInternals): + __slots__ = () + + _annotations = util.EMPTY_DICT + + __visit_name__ = "executable_option" + + _is_has_cache_key = False + + _is_core = True + + def _clone(self, **kw): + """Create a shallow copy of this ExecutableOption.""" + c = self.__class__.__new__(self.__class__) + c.__dict__ = dict(self.__dict__) # type: ignore + return c + + +class Executable(roles.StatementRole): + """Mark a :class:`_expression.ClauseElement` as supporting execution. + + :class:`.Executable` is a superclass for all "statement" types + of objects, including :func:`select`, :func:`delete`, :func:`update`, + :func:`insert`, :func:`text`. + + """ + + supports_execution: bool = True + _execution_options: _ImmutableExecuteOptions = util.EMPTY_DICT + _is_default_generator = False + _with_options: Tuple[ExecutableOption, ...] = () + _with_context_options: Tuple[ + Tuple[Callable[[CompileState], None], Any], ... + ] = () + _compile_options: Optional[Union[Type[CacheableOptions], CacheableOptions]] + + _executable_traverse_internals = [ + ("_with_options", InternalTraversal.dp_executable_options), + ( + "_with_context_options", + ExtendedInternalTraversal.dp_with_context_options, + ), + ("_propagate_attrs", ExtendedInternalTraversal.dp_propagate_attrs), + ] + + is_select = False + is_update = False + is_insert = False + is_text = False + is_delete = False + is_dml = False + + if TYPE_CHECKING: + __visit_name__: str + + def _compile_w_cache( + self, + dialect: Dialect, + *, + compiled_cache: Optional[CompiledCacheType], + column_keys: List[str], + for_executemany: bool = False, + schema_translate_map: Optional[SchemaTranslateMapType] = None, + **kw: Any, + ) -> Tuple[ + Compiled, Optional[Sequence[BindParameter[Any]]], CacheStats + ]: ... + + def _execute_on_connection( + self, + connection: Connection, + distilled_params: _CoreMultiExecuteParams, + execution_options: CoreExecuteOptionsParameter, + ) -> CursorResult[Any]: ... + + def _execute_on_scalar( + self, + connection: Connection, + distilled_params: _CoreMultiExecuteParams, + execution_options: CoreExecuteOptionsParameter, + ) -> Any: ... + + @util.ro_non_memoized_property + def _all_selected_columns(self): + raise NotImplementedError() + + @property + def _effective_plugin_target(self) -> str: + return self.__visit_name__ + + @_generative + def options(self, *options: ExecutableOption) -> Self: + """Apply options to this statement. + + In the general sense, options are any kind of Python object + that can be interpreted by the SQL compiler for the statement. + These options can be consumed by specific dialects or specific kinds + of compilers. + + The most commonly known kind of option are the ORM level options + that apply "eager load" and other loading behaviors to an ORM + query. However, options can theoretically be used for many other + purposes. + + For background on specific kinds of options for specific kinds of + statements, refer to the documentation for those option objects. + + .. versionchanged:: 1.4 - added :meth:`.Executable.options` to + Core statement objects towards the goal of allowing unified + Core / ORM querying capabilities. + + .. seealso:: + + :ref:`loading_columns` - refers to options specific to the usage + of ORM queries + + :ref:`relationship_loader_options` - refers to options specific + to the usage of ORM queries + + """ + self._with_options += tuple( + coercions.expect(roles.ExecutableOptionRole, opt) + for opt in options + ) + return self + + @_generative + def _set_compile_options(self, compile_options: CacheableOptions) -> Self: + """Assign the compile options to a new value. + + :param compile_options: appropriate CacheableOptions structure + + """ + + self._compile_options = compile_options + return self + + @_generative + def _update_compile_options(self, options: CacheableOptions) -> Self: + """update the _compile_options with new keys.""" + + assert self._compile_options is not None + self._compile_options += options + return self + + @_generative + def _add_context_option( + self, + callable_: Callable[[CompileState], None], + cache_args: Any, + ) -> Self: + """Add a context option to this statement. + + These are callable functions that will + be given the CompileState object upon compilation. + + A second argument cache_args is required, which will be combined with + the ``__code__`` identity of the function itself in order to produce a + cache key. + + """ + self._with_context_options += ((callable_, cache_args),) + return self + + @overload + def execution_options( + self, + *, + compiled_cache: Optional[CompiledCacheType] = ..., + logging_token: str = ..., + isolation_level: IsolationLevel = ..., + no_parameters: bool = False, + stream_results: bool = False, + max_row_buffer: int = ..., + yield_per: int = ..., + insertmanyvalues_page_size: int = ..., + schema_translate_map: Optional[SchemaTranslateMapType] = ..., + populate_existing: bool = False, + autoflush: bool = False, + synchronize_session: SynchronizeSessionArgument = ..., + dml_strategy: DMLStrategyArgument = ..., + render_nulls: bool = ..., + is_delete_using: bool = ..., + is_update_from: bool = ..., + preserve_rowcount: bool = False, + **opt: Any, + ) -> Self: ... + + @overload + def execution_options(self, **opt: Any) -> Self: ... + + @_generative + def execution_options(self, **kw: Any) -> Self: + """Set non-SQL options for the statement which take effect during + execution. + + Execution options can be set at many scopes, including per-statement, + per-connection, or per execution, using methods such as + :meth:`_engine.Connection.execution_options` and parameters which + accept a dictionary of options such as + :paramref:`_engine.Connection.execute.execution_options` and + :paramref:`_orm.Session.execute.execution_options`. + + The primary characteristic of an execution option, as opposed to + other kinds of options such as ORM loader options, is that + **execution options never affect the compiled SQL of a query, only + things that affect how the SQL statement itself is invoked or how + results are fetched**. That is, execution options are not part of + what's accommodated by SQL compilation nor are they considered part of + the cached state of a statement. + + The :meth:`_sql.Executable.execution_options` method is + :term:`generative`, as + is the case for the method as applied to the :class:`_engine.Engine` + and :class:`_orm.Query` objects, which means when the method is called, + a copy of the object is returned, which applies the given parameters to + that new copy, but leaves the original unchanged:: + + statement = select(table.c.x, table.c.y) + new_statement = statement.execution_options(my_option=True) + + An exception to this behavior is the :class:`_engine.Connection` + object, where the :meth:`_engine.Connection.execution_options` method + is explicitly **not** generative. + + The kinds of options that may be passed to + :meth:`_sql.Executable.execution_options` and other related methods and + parameter dictionaries include parameters that are explicitly consumed + by SQLAlchemy Core or ORM, as well as arbitrary keyword arguments not + defined by SQLAlchemy, which means the methods and/or parameter + dictionaries may be used for user-defined parameters that interact with + custom code, which may access the parameters using methods such as + :meth:`_sql.Executable.get_execution_options` and + :meth:`_engine.Connection.get_execution_options`, or within selected + event hooks using a dedicated ``execution_options`` event parameter + such as + :paramref:`_events.ConnectionEvents.before_execute.execution_options` + or :attr:`_orm.ORMExecuteState.execution_options`, e.g.:: + + from sqlalchemy import event + + @event.listens_for(some_engine, "before_execute") + def _process_opt(conn, statement, multiparams, params, execution_options): + "run a SQL function before invoking a statement" + + if execution_options.get("do_special_thing", False): + conn.exec_driver_sql("run_special_function()") + + Within the scope of options that are explicitly recognized by + SQLAlchemy, most apply to specific classes of objects and not others. + The most common execution options include: + + * :paramref:`_engine.Connection.execution_options.isolation_level` - + sets the isolation level for a connection or a class of connections + via an :class:`_engine.Engine`. This option is accepted only + by :class:`_engine.Connection` or :class:`_engine.Engine`. + + * :paramref:`_engine.Connection.execution_options.stream_results` - + indicates results should be fetched using a server side cursor; + this option is accepted by :class:`_engine.Connection`, by the + :paramref:`_engine.Connection.execute.execution_options` parameter + on :meth:`_engine.Connection.execute`, and additionally by + :meth:`_sql.Executable.execution_options` on a SQL statement object, + as well as by ORM constructs like :meth:`_orm.Session.execute`. + + * :paramref:`_engine.Connection.execution_options.compiled_cache` - + indicates a dictionary that will serve as the + :ref:`SQL compilation cache ` + for a :class:`_engine.Connection` or :class:`_engine.Engine`, as + well as for ORM methods like :meth:`_orm.Session.execute`. + Can be passed as ``None`` to disable caching for statements. + This option is not accepted by + :meth:`_sql.Executable.execution_options` as it is inadvisable to + carry along a compilation cache within a statement object. + + * :paramref:`_engine.Connection.execution_options.schema_translate_map` + - a mapping of schema names used by the + :ref:`Schema Translate Map ` feature, accepted + by :class:`_engine.Connection`, :class:`_engine.Engine`, + :class:`_sql.Executable`, as well as by ORM constructs + like :meth:`_orm.Session.execute`. + + .. seealso:: + + :meth:`_engine.Connection.execution_options` + + :paramref:`_engine.Connection.execute.execution_options` + + :paramref:`_orm.Session.execute.execution_options` + + :ref:`orm_queryguide_execution_options` - documentation on all + ORM-specific execution options + + """ # noqa: E501 + if "isolation_level" in kw: + raise exc.ArgumentError( + "'isolation_level' execution option may only be specified " + "on Connection.execution_options(), or " + "per-engine using the isolation_level " + "argument to create_engine()." + ) + if "compiled_cache" in kw: + raise exc.ArgumentError( + "'compiled_cache' execution option may only be specified " + "on Connection.execution_options(), not per statement." + ) + self._execution_options = self._execution_options.union(kw) + return self + + def get_execution_options(self) -> _ExecuteOptions: + """Get the non-SQL options which will take effect during execution. + + .. versionadded:: 1.3 + + .. seealso:: + + :meth:`.Executable.execution_options` + """ + return self._execution_options + + +class SchemaEventTarget(event.EventTarget): + """Base class for elements that are the targets of :class:`.DDLEvents` + events. + + This includes :class:`.SchemaItem` as well as :class:`.SchemaType`. + + """ + + dispatch: dispatcher[SchemaEventTarget] + + def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None: + """Associate with this SchemaEvent's parent object.""" + + def _set_parent_with_dispatch( + self, parent: SchemaEventTarget, **kw: Any + ) -> None: + self.dispatch.before_parent_attach(self, parent) + self._set_parent(parent, **kw) + self.dispatch.after_parent_attach(self, parent) + + +class SchemaVisitor(ClauseVisitor): + """Define the visiting for ``SchemaItem`` objects.""" + + __traverse_options__ = {"schema_visitor": True} + + +class _SentinelDefaultCharacterization(Enum): + NONE = "none" + UNKNOWN = "unknown" + CLIENTSIDE = "clientside" + SENTINEL_DEFAULT = "sentinel_default" + SERVERSIDE = "serverside" + IDENTITY = "identity" + SEQUENCE = "sequence" + + +class _SentinelColumnCharacterization(NamedTuple): + columns: Optional[Sequence[Column[Any]]] = None + is_explicit: bool = False + is_autoinc: bool = False + default_characterization: _SentinelDefaultCharacterization = ( + _SentinelDefaultCharacterization.NONE + ) + + +_COLKEY = TypeVar("_COLKEY", Union[None, str], str) + +_COL_co = TypeVar("_COL_co", bound="ColumnElement[Any]", covariant=True) +_COL = TypeVar("_COL", bound="KeyedColumnElement[Any]") + + +class _ColumnMetrics(Generic[_COL_co]): + __slots__ = ("column",) + + column: _COL_co + + def __init__( + self, collection: ColumnCollection[Any, _COL_co], col: _COL_co + ): + self.column = col + + # proxy_index being non-empty means it was initialized. + # so we need to update it + pi = collection._proxy_index + if pi: + for eps_col in col._expanded_proxy_set: + pi[eps_col].add(self) + + def get_expanded_proxy_set(self): + return self.column._expanded_proxy_set + + def dispose(self, collection): + pi = collection._proxy_index + if not pi: + return + for col in self.column._expanded_proxy_set: + colset = pi.get(col, None) + if colset: + colset.discard(self) + if colset is not None and not colset: + del pi[col] + + def embedded( + self, + target_set: Union[ + Set[ColumnElement[Any]], FrozenSet[ColumnElement[Any]] + ], + ) -> bool: + expanded_proxy_set = self.column._expanded_proxy_set + for t in target_set.difference(expanded_proxy_set): + if not expanded_proxy_set.intersection(_expand_cloned([t])): + return False + return True + + +class ColumnCollection(Generic[_COLKEY, _COL_co]): + """Collection of :class:`_expression.ColumnElement` instances, + typically for + :class:`_sql.FromClause` objects. + + The :class:`_sql.ColumnCollection` object is most commonly available + as the :attr:`_schema.Table.c` or :attr:`_schema.Table.columns` collection + on the :class:`_schema.Table` object, introduced at + :ref:`metadata_tables_and_columns`. + + The :class:`_expression.ColumnCollection` has both mapping- and sequence- + like behaviors. A :class:`_expression.ColumnCollection` usually stores + :class:`_schema.Column` objects, which are then accessible both via mapping + style access as well as attribute access style. + + To access :class:`_schema.Column` objects using ordinary attribute-style + access, specify the name like any other object attribute, such as below + a column named ``employee_name`` is accessed:: + + >>> employee_table.c.employee_name + + To access columns that have names with special characters or spaces, + index-style access is used, such as below which illustrates a column named + ``employee ' payment`` is accessed:: + + >>> employee_table.c["employee ' payment"] + + As the :class:`_sql.ColumnCollection` object provides a Python dictionary + interface, common dictionary method names like + :meth:`_sql.ColumnCollection.keys`, :meth:`_sql.ColumnCollection.values`, + and :meth:`_sql.ColumnCollection.items` are available, which means that + database columns that are keyed under these names also need to use indexed + access:: + + >>> employee_table.c["values"] + + + The name for which a :class:`_schema.Column` would be present is normally + that of the :paramref:`_schema.Column.key` parameter. In some contexts, + such as a :class:`_sql.Select` object that uses a label style set + using the :meth:`_sql.Select.set_label_style` method, a column of a certain + key may instead be represented under a particular label name such + as ``tablename_columnname``:: + + >>> from sqlalchemy import select, column, table + >>> from sqlalchemy import LABEL_STYLE_TABLENAME_PLUS_COL + >>> t = table("t", column("c")) + >>> stmt = select(t).set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL) + >>> subq = stmt.subquery() + >>> subq.c.t_c + + + :class:`.ColumnCollection` also indexes the columns in order and allows + them to be accessible by their integer position:: + + >>> cc[0] + Column('x', Integer(), table=None) + >>> cc[1] + Column('y', Integer(), table=None) + + .. versionadded:: 1.4 :class:`_expression.ColumnCollection` + allows integer-based + index access to the collection. + + Iterating the collection yields the column expressions in order:: + + >>> list(cc) + [Column('x', Integer(), table=None), + Column('y', Integer(), table=None)] + + The base :class:`_expression.ColumnCollection` object can store + duplicates, which can + mean either two columns with the same key, in which case the column + returned by key access is **arbitrary**:: + + >>> x1, x2 = Column('x', Integer), Column('x', Integer) + >>> cc = ColumnCollection(columns=[(x1.name, x1), (x2.name, x2)]) + >>> list(cc) + [Column('x', Integer(), table=None), + Column('x', Integer(), table=None)] + >>> cc['x'] is x1 + False + >>> cc['x'] is x2 + True + + Or it can also mean the same column multiple times. These cases are + supported as :class:`_expression.ColumnCollection` + is used to represent the columns in + a SELECT statement which may include duplicates. + + A special subclass :class:`.DedupeColumnCollection` exists which instead + maintains SQLAlchemy's older behavior of not allowing duplicates; this + collection is used for schema level objects like :class:`_schema.Table` + and + :class:`.PrimaryKeyConstraint` where this deduping is helpful. The + :class:`.DedupeColumnCollection` class also has additional mutation methods + as the schema constructs have more use cases that require removal and + replacement of columns. + + .. versionchanged:: 1.4 :class:`_expression.ColumnCollection` + now stores duplicate + column keys as well as the same column in multiple positions. The + :class:`.DedupeColumnCollection` class is added to maintain the + former behavior in those cases where deduplication as well as + additional replace/remove operations are needed. + + + """ + + __slots__ = "_collection", "_index", "_colset", "_proxy_index" + + _collection: List[Tuple[_COLKEY, _COL_co, _ColumnMetrics[_COL_co]]] + _index: Dict[Union[None, str, int], Tuple[_COLKEY, _COL_co]] + _proxy_index: Dict[ColumnElement[Any], Set[_ColumnMetrics[_COL_co]]] + _colset: Set[_COL_co] + + def __init__( + self, columns: Optional[Iterable[Tuple[_COLKEY, _COL_co]]] = None + ): + object.__setattr__(self, "_colset", set()) + object.__setattr__(self, "_index", {}) + object.__setattr__( + self, "_proxy_index", collections.defaultdict(util.OrderedSet) + ) + object.__setattr__(self, "_collection", []) + if columns: + self._initial_populate(columns) + + @util.preload_module("sqlalchemy.sql.elements") + def __clause_element__(self) -> ClauseList: + elements = util.preloaded.sql_elements + + return elements.ClauseList( + _literal_as_text_role=roles.ColumnsClauseRole, + group=False, + *self._all_columns, + ) + + def _initial_populate( + self, iter_: Iterable[Tuple[_COLKEY, _COL_co]] + ) -> None: + self._populate_separate_keys(iter_) + + @property + def _all_columns(self) -> List[_COL_co]: + return [col for (_, col, _) in self._collection] + + def keys(self) -> List[_COLKEY]: + """Return a sequence of string key names for all columns in this + collection.""" + return [k for (k, _, _) in self._collection] + + def values(self) -> List[_COL_co]: + """Return a sequence of :class:`_sql.ColumnClause` or + :class:`_schema.Column` objects for all columns in this + collection.""" + return [col for (_, col, _) in self._collection] + + def items(self) -> List[Tuple[_COLKEY, _COL_co]]: + """Return a sequence of (key, column) tuples for all columns in this + collection each consisting of a string key name and a + :class:`_sql.ColumnClause` or + :class:`_schema.Column` object. + """ + + return [(k, col) for (k, col, _) in self._collection] + + def __bool__(self) -> bool: + return bool(self._collection) + + def __len__(self) -> int: + return len(self._collection) + + def __iter__(self) -> Iterator[_COL_co]: + # turn to a list first to maintain over a course of changes + return iter([col for _, col, _ in self._collection]) + + @overload + def __getitem__(self, key: Union[str, int]) -> _COL_co: ... + + @overload + def __getitem__( + self, key: Tuple[Union[str, int], ...] + ) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]: ... + + @overload + def __getitem__( + self, key: slice + ) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]: ... + + def __getitem__( + self, key: Union[str, int, slice, Tuple[Union[str, int], ...]] + ) -> Union[ReadOnlyColumnCollection[_COLKEY, _COL_co], _COL_co]: + try: + if isinstance(key, (tuple, slice)): + if isinstance(key, slice): + cols = ( + (sub_key, col) + for (sub_key, col, _) in self._collection[key] + ) + else: + cols = (self._index[sub_key] for sub_key in key) + + return ColumnCollection(cols).as_readonly() + else: + return self._index[key][1] + except KeyError as err: + if isinstance(err.args[0], int): + raise IndexError(err.args[0]) from err + else: + raise + + def __getattr__(self, key: str) -> _COL_co: + try: + return self._index[key][1] + except KeyError as err: + raise AttributeError(key) from err + + def __contains__(self, key: str) -> bool: + if key not in self._index: + if not isinstance(key, str): + raise exc.ArgumentError( + "__contains__ requires a string argument" + ) + return False + else: + return True + + def compare(self, other: ColumnCollection[Any, Any]) -> bool: + """Compare this :class:`_expression.ColumnCollection` to another + based on the names of the keys""" + + for l, r in zip_longest(self, other): + if l is not r: + return False + else: + return True + + def __eq__(self, other: Any) -> bool: + return self.compare(other) + + def get( + self, key: str, default: Optional[_COL_co] = None + ) -> Optional[_COL_co]: + """Get a :class:`_sql.ColumnClause` or :class:`_schema.Column` object + based on a string key name from this + :class:`_expression.ColumnCollection`.""" + + if key in self._index: + return self._index[key][1] + else: + return default + + def __str__(self) -> str: + return "%s(%s)" % ( + self.__class__.__name__, + ", ".join(str(c) for c in self), + ) + + def __setitem__(self, key: str, value: Any) -> NoReturn: + raise NotImplementedError() + + def __delitem__(self, key: str) -> NoReturn: + raise NotImplementedError() + + def __setattr__(self, key: str, obj: Any) -> NoReturn: + raise NotImplementedError() + + def clear(self) -> NoReturn: + """Dictionary clear() is not implemented for + :class:`_sql.ColumnCollection`.""" + raise NotImplementedError() + + def remove(self, column: Any) -> None: + raise NotImplementedError() + + def update(self, iter_: Any) -> NoReturn: + """Dictionary update() is not implemented for + :class:`_sql.ColumnCollection`.""" + raise NotImplementedError() + + # https://github.com/python/mypy/issues/4266 + __hash__ = None # type: ignore + + def _populate_separate_keys( + self, iter_: Iterable[Tuple[_COLKEY, _COL_co]] + ) -> None: + """populate from an iterator of (key, column)""" + + self._collection[:] = collection = [ + (k, c, _ColumnMetrics(self, c)) for k, c in iter_ + ] + self._colset.update(c._deannotate() for _, c, _ in collection) + self._index.update( + {idx: (k, c) for idx, (k, c, _) in enumerate(collection)} + ) + self._index.update({k: (k, col) for k, col, _ in reversed(collection)}) + + def add( + self, column: ColumnElement[Any], key: Optional[_COLKEY] = None + ) -> None: + """Add a column to this :class:`_sql.ColumnCollection`. + + .. note:: + + This method is **not normally used by user-facing code**, as the + :class:`_sql.ColumnCollection` is usually part of an existing + object such as a :class:`_schema.Table`. To add a + :class:`_schema.Column` to an existing :class:`_schema.Table` + object, use the :meth:`_schema.Table.append_column` method. + + """ + colkey: _COLKEY + + if key is None: + colkey = column.key # type: ignore + else: + colkey = key + + l = len(self._collection) + + # don't really know how this part is supposed to work w/ the + # covariant thing + + _column = cast(_COL_co, column) + + self._collection.append( + (colkey, _column, _ColumnMetrics(self, _column)) + ) + self._colset.add(_column._deannotate()) + self._index[l] = (colkey, _column) + if colkey not in self._index: + self._index[colkey] = (colkey, _column) + + def __getstate__(self) -> Dict[str, Any]: + return { + "_collection": [(k, c) for k, c, _ in self._collection], + "_index": self._index, + } + + def __setstate__(self, state: Dict[str, Any]) -> None: + object.__setattr__(self, "_index", state["_index"]) + object.__setattr__( + self, "_proxy_index", collections.defaultdict(util.OrderedSet) + ) + object.__setattr__( + self, + "_collection", + [ + (k, c, _ColumnMetrics(self, c)) + for (k, c) in state["_collection"] + ], + ) + object.__setattr__( + self, "_colset", {col for k, col, _ in self._collection} + ) + + def contains_column(self, col: ColumnElement[Any]) -> bool: + """Checks if a column object exists in this collection""" + if col not in self._colset: + if isinstance(col, str): + raise exc.ArgumentError( + "contains_column cannot be used with string arguments. " + "Use ``col_name in table.c`` instead." + ) + return False + else: + return True + + def as_readonly(self) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]: + """Return a "read only" form of this + :class:`_sql.ColumnCollection`.""" + + return ReadOnlyColumnCollection(self) + + def _init_proxy_index(self): + """populate the "proxy index", if empty. + + proxy index is added in 2.0 to provide more efficient operation + for the corresponding_column() method. + + For reasons of both time to construct new .c collections as well as + memory conservation for large numbers of large .c collections, the + proxy_index is only filled if corresponding_column() is called. once + filled it stays that way, and new _ColumnMetrics objects created after + that point will populate it with new data. Note this case would be + unusual, if not nonexistent, as it means a .c collection is being + mutated after corresponding_column() were used, however it is tested in + test/base/test_utils.py. + + """ + pi = self._proxy_index + if pi: + return + + for _, _, metrics in self._collection: + eps = metrics.column._expanded_proxy_set + + for eps_col in eps: + pi[eps_col].add(metrics) + + def corresponding_column( + self, column: _COL, require_embedded: bool = False + ) -> Optional[Union[_COL, _COL_co]]: + """Given a :class:`_expression.ColumnElement`, return the exported + :class:`_expression.ColumnElement` object from this + :class:`_expression.ColumnCollection` + which corresponds to that original :class:`_expression.ColumnElement` + via a common + ancestor column. + + :param column: the target :class:`_expression.ColumnElement` + to be matched. + + :param require_embedded: only return corresponding columns for + the given :class:`_expression.ColumnElement`, if the given + :class:`_expression.ColumnElement` + is actually present within a sub-element + of this :class:`_expression.Selectable`. + Normally the column will match if + it merely shares a common ancestor with one of the exported + columns of this :class:`_expression.Selectable`. + + .. seealso:: + + :meth:`_expression.Selectable.corresponding_column` + - invokes this method + against the collection returned by + :attr:`_expression.Selectable.exported_columns`. + + .. versionchanged:: 1.4 the implementation for ``corresponding_column`` + was moved onto the :class:`_expression.ColumnCollection` itself. + + """ + # TODO: cython candidate + + # don't dig around if the column is locally present + if column in self._colset: + return column + + selected_intersection, selected_metrics = None, None + target_set = column.proxy_set + + pi = self._proxy_index + if not pi: + self._init_proxy_index() + + for current_metrics in ( + mm for ts in target_set if ts in pi for mm in pi[ts] + ): + if not require_embedded or current_metrics.embedded(target_set): + if selected_metrics is None: + # no corresponding column yet, pick this one. + selected_metrics = current_metrics + continue + + current_intersection = target_set.intersection( + current_metrics.column._expanded_proxy_set + ) + if selected_intersection is None: + selected_intersection = target_set.intersection( + selected_metrics.column._expanded_proxy_set + ) + + if len(current_intersection) > len(selected_intersection): + # 'current' has a larger field of correspondence than + # 'selected'. i.e. selectable.c.a1_x->a1.c.x->table.c.x + # matches a1.c.x->table.c.x better than + # selectable.c.x->table.c.x does. + + selected_metrics = current_metrics + selected_intersection = current_intersection + elif current_intersection == selected_intersection: + # they have the same field of correspondence. see + # which proxy_set has fewer columns in it, which + # indicates a closer relationship with the root + # column. Also take into account the "weight" + # attribute which CompoundSelect() uses to give + # higher precedence to columns based on vertical + # position in the compound statement, and discard + # columns that have no reference to the target + # column (also occurs with CompoundSelect) + + selected_col_distance = sum( + [ + sc._annotations.get("weight", 1) + for sc in ( + selected_metrics.column._uncached_proxy_list() + ) + if sc.shares_lineage(column) + ], + ) + current_col_distance = sum( + [ + sc._annotations.get("weight", 1) + for sc in ( + current_metrics.column._uncached_proxy_list() + ) + if sc.shares_lineage(column) + ], + ) + if current_col_distance < selected_col_distance: + selected_metrics = current_metrics + selected_intersection = current_intersection + + return selected_metrics.column if selected_metrics else None + + +_NAMEDCOL = TypeVar("_NAMEDCOL", bound="NamedColumn[Any]") + + +class DedupeColumnCollection(ColumnCollection[str, _NAMEDCOL]): + """A :class:`_expression.ColumnCollection` + that maintains deduplicating behavior. + + This is useful by schema level objects such as :class:`_schema.Table` and + :class:`.PrimaryKeyConstraint`. The collection includes more + sophisticated mutator methods as well to suit schema objects which + require mutable column collections. + + .. versionadded:: 1.4 + + """ + + def add( + self, column: ColumnElement[Any], key: Optional[str] = None + ) -> None: + named_column = cast(_NAMEDCOL, column) + if key is not None and named_column.key != key: + raise exc.ArgumentError( + "DedupeColumnCollection requires columns be under " + "the same key as their .key" + ) + key = named_column.key + + if key is None: + raise exc.ArgumentError( + "Can't add unnamed column to column collection" + ) + + if key in self._index: + existing = self._index[key][1] + + if existing is named_column: + return + + self.replace(named_column) + + # pop out memoized proxy_set as this + # operation may very well be occurring + # in a _make_proxy operation + util.memoized_property.reset(named_column, "proxy_set") + else: + self._append_new_column(key, named_column) + + def _append_new_column(self, key: str, named_column: _NAMEDCOL) -> None: + l = len(self._collection) + self._collection.append( + (key, named_column, _ColumnMetrics(self, named_column)) + ) + self._colset.add(named_column._deannotate()) + self._index[l] = (key, named_column) + self._index[key] = (key, named_column) + + def _populate_separate_keys( + self, iter_: Iterable[Tuple[str, _NAMEDCOL]] + ) -> None: + """populate from an iterator of (key, column)""" + cols = list(iter_) + + replace_col = [] + for k, col in cols: + if col.key != k: + raise exc.ArgumentError( + "DedupeColumnCollection requires columns be under " + "the same key as their .key" + ) + if col.name in self._index and col.key != col.name: + replace_col.append(col) + elif col.key in self._index: + replace_col.append(col) + else: + self._index[k] = (k, col) + self._collection.append((k, col, _ColumnMetrics(self, col))) + self._colset.update(c._deannotate() for (k, c, _) in self._collection) + + self._index.update( + (idx, (k, c)) for idx, (k, c, _) in enumerate(self._collection) + ) + for col in replace_col: + self.replace(col) + + def extend(self, iter_: Iterable[_NAMEDCOL]) -> None: + self._populate_separate_keys((col.key, col) for col in iter_) + + def remove(self, column: _NAMEDCOL) -> None: + if column not in self._colset: + raise ValueError( + "Can't remove column %r; column is not in this collection" + % column + ) + del self._index[column.key] + self._colset.remove(column) + self._collection[:] = [ + (k, c, metrics) + for (k, c, metrics) in self._collection + if c is not column + ] + for metrics in self._proxy_index.get(column, ()): + metrics.dispose(self) + + self._index.update( + {idx: (k, col) for idx, (k, col, _) in enumerate(self._collection)} + ) + # delete higher index + del self._index[len(self._collection)] + + def replace( + self, + column: _NAMEDCOL, + extra_remove: Optional[Iterable[_NAMEDCOL]] = None, + ) -> None: + """add the given column to this collection, removing unaliased + versions of this column as well as existing columns with the + same key. + + e.g.:: + + t = Table('sometable', metadata, Column('col1', Integer)) + t.columns.replace(Column('col1', Integer, key='columnone')) + + will remove the original 'col1' from the collection, and add + the new column under the name 'columnname'. + + Used by schema.Column to override columns during table reflection. + + """ + + if extra_remove: + remove_col = set(extra_remove) + else: + remove_col = set() + # remove up to two columns based on matches of name as well as key + if column.name in self._index and column.key != column.name: + other = self._index[column.name][1] + if other.name == other.key: + remove_col.add(other) + + if column.key in self._index: + remove_col.add(self._index[column.key][1]) + + if not remove_col: + self._append_new_column(column.key, column) + return + new_cols: List[Tuple[str, _NAMEDCOL, _ColumnMetrics[_NAMEDCOL]]] = [] + replaced = False + for k, col, metrics in self._collection: + if col in remove_col: + if not replaced: + replaced = True + new_cols.append( + (column.key, column, _ColumnMetrics(self, column)) + ) + else: + new_cols.append((k, col, metrics)) + + if remove_col: + self._colset.difference_update(remove_col) + + for rc in remove_col: + for metrics in self._proxy_index.get(rc, ()): + metrics.dispose(self) + + if not replaced: + new_cols.append((column.key, column, _ColumnMetrics(self, column))) + + self._colset.add(column._deannotate()) + self._collection[:] = new_cols + + self._index.clear() + + self._index.update( + {idx: (k, col) for idx, (k, col, _) in enumerate(self._collection)} + ) + self._index.update({k: (k, col) for (k, col, _) in self._collection}) + + +class ReadOnlyColumnCollection( + util.ReadOnlyContainer, ColumnCollection[_COLKEY, _COL_co] +): + __slots__ = ("_parent",) + + def __init__(self, collection): + object.__setattr__(self, "_parent", collection) + object.__setattr__(self, "_colset", collection._colset) + object.__setattr__(self, "_index", collection._index) + object.__setattr__(self, "_collection", collection._collection) + object.__setattr__(self, "_proxy_index", collection._proxy_index) + + def __getstate__(self): + return {"_parent": self._parent} + + def __setstate__(self, state): + parent = state["_parent"] + self.__init__(parent) # type: ignore + + def add(self, column: Any, key: Any = ...) -> Any: + self._readonly() + + def extend(self, elements: Any) -> NoReturn: + self._readonly() + + def remove(self, item: Any) -> NoReturn: + self._readonly() + + +class ColumnSet(util.OrderedSet["ColumnClause[Any]"]): + def contains_column(self, col): + return col in self + + def extend(self, cols): + for col in cols: + self.add(col) + + def __eq__(self, other): + l = [] + for c in other: + for local in self: + if c.shares_lineage(local): + l.append(c == local) + return elements.and_(*l) + + def __hash__(self): + return hash(tuple(x for x in self)) + + +def _entity_namespace( + entity: Union[_HasEntityNamespace, ExternallyTraversible] +) -> _EntityNamespace: + """Return the nearest .entity_namespace for the given entity. + + If not immediately available, does an iterate to find a sub-element + that has one, if any. + + """ + try: + return cast(_HasEntityNamespace, entity).entity_namespace + except AttributeError: + for elem in visitors.iterate(cast(ExternallyTraversible, entity)): + if _is_has_entity_namespace(elem): + return elem.entity_namespace + else: + raise + + +def _entity_namespace_key( + entity: Union[_HasEntityNamespace, ExternallyTraversible], + key: str, + default: Union[SQLCoreOperations[Any], _NoArg] = NO_ARG, +) -> SQLCoreOperations[Any]: + """Return an entry from an entity_namespace. + + + Raises :class:`_exc.InvalidRequestError` rather than attribute error + on not found. + + """ + + try: + ns = _entity_namespace(entity) + if default is not NO_ARG: + return getattr(ns, key, default) + else: + return getattr(ns, key) # type: ignore + except AttributeError as err: + raise exc.InvalidRequestError( + 'Entity namespace for "%s" has no property "%s"' % (entity, key) + ) from err diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/cache_key.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/cache_key.py new file mode 100644 index 0000000..1172d3c --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/cache_key.py @@ -0,0 +1,1057 @@ +# sql/cache_key.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +from __future__ import annotations + +import enum +from itertools import zip_longest +import typing +from typing import Any +from typing import Callable +from typing import Dict +from typing import Iterable +from typing import Iterator +from typing import List +from typing import MutableMapping +from typing import NamedTuple +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import Union + +from .visitors import anon_map +from .visitors import HasTraversalDispatch +from .visitors import HasTraverseInternals +from .visitors import InternalTraversal +from .visitors import prefix_anon_map +from .. import util +from ..inspection import inspect +from ..util import HasMemoized +from ..util.typing import Literal +from ..util.typing import Protocol + +if typing.TYPE_CHECKING: + from .elements import BindParameter + from .elements import ClauseElement + from .elements import ColumnElement + from .visitors import _TraverseInternalsType + from ..engine.interfaces import _CoreSingleExecuteParams + + +class _CacheKeyTraversalDispatchType(Protocol): + def __call__( + s, self: HasCacheKey, visitor: _CacheKeyTraversal + ) -> _CacheKeyTraversalDispatchTypeReturn: ... + + +class CacheConst(enum.Enum): + NO_CACHE = 0 + + +NO_CACHE = CacheConst.NO_CACHE + + +_CacheKeyTraversalType = Union[ + "_TraverseInternalsType", Literal[CacheConst.NO_CACHE], Literal[None] +] + + +class CacheTraverseTarget(enum.Enum): + CACHE_IN_PLACE = 0 + CALL_GEN_CACHE_KEY = 1 + STATIC_CACHE_KEY = 2 + PROPAGATE_ATTRS = 3 + ANON_NAME = 4 + + +( + CACHE_IN_PLACE, + CALL_GEN_CACHE_KEY, + STATIC_CACHE_KEY, + PROPAGATE_ATTRS, + ANON_NAME, +) = tuple(CacheTraverseTarget) + +_CacheKeyTraversalDispatchTypeReturn = Sequence[ + Tuple[ + str, + Any, + Union[ + Callable[..., Tuple[Any, ...]], + CacheTraverseTarget, + InternalTraversal, + ], + ] +] + + +class HasCacheKey: + """Mixin for objects which can produce a cache key. + + This class is usually in a hierarchy that starts with the + :class:`.HasTraverseInternals` base, but this is optional. Currently, + the class should be able to work on its own without including + :class:`.HasTraverseInternals`. + + .. seealso:: + + :class:`.CacheKey` + + :ref:`sql_caching` + + """ + + __slots__ = () + + _cache_key_traversal: _CacheKeyTraversalType = NO_CACHE + + _is_has_cache_key = True + + _hierarchy_supports_caching = True + """private attribute which may be set to False to prevent the + inherit_cache warning from being emitted for a hierarchy of subclasses. + + Currently applies to the :class:`.ExecutableDDLElement` hierarchy which + does not implement caching. + + """ + + inherit_cache: Optional[bool] = None + """Indicate if this :class:`.HasCacheKey` instance should make use of the + cache key generation scheme used by its immediate superclass. + + The attribute defaults to ``None``, which indicates that a construct has + not yet taken into account whether or not its appropriate for it to + participate in caching; this is functionally equivalent to setting the + value to ``False``, except that a warning is also emitted. + + This flag can be set to ``True`` on a particular class, if the SQL that + corresponds to the object does not change based on attributes which + are local to this class, and not its superclass. + + .. seealso:: + + :ref:`compilerext_caching` - General guideslines for setting the + :attr:`.HasCacheKey.inherit_cache` attribute for third-party or user + defined SQL constructs. + + """ + + __slots__ = () + + _generated_cache_key_traversal: Any + + @classmethod + def _generate_cache_attrs( + cls, + ) -> Union[_CacheKeyTraversalDispatchType, Literal[CacheConst.NO_CACHE]]: + """generate cache key dispatcher for a new class. + + This sets the _generated_cache_key_traversal attribute once called + so should only be called once per class. + + """ + inherit_cache = cls.__dict__.get("inherit_cache", None) + inherit = bool(inherit_cache) + + if inherit: + _cache_key_traversal = getattr(cls, "_cache_key_traversal", None) + if _cache_key_traversal is None: + try: + assert issubclass(cls, HasTraverseInternals) + _cache_key_traversal = cls._traverse_internals + except AttributeError: + cls._generated_cache_key_traversal = NO_CACHE + return NO_CACHE + + assert _cache_key_traversal is not NO_CACHE, ( + f"class {cls} has _cache_key_traversal=NO_CACHE, " + "which conflicts with inherit_cache=True" + ) + + # TODO: wouldn't we instead get this from our superclass? + # also, our superclass may not have this yet, but in any case, + # we'd generate for the superclass that has it. this is a little + # more complicated, so for the moment this is a little less + # efficient on startup but simpler. + return _cache_key_traversal_visitor.generate_dispatch( + cls, + _cache_key_traversal, + "_generated_cache_key_traversal", + ) + else: + _cache_key_traversal = cls.__dict__.get( + "_cache_key_traversal", None + ) + if _cache_key_traversal is None: + _cache_key_traversal = cls.__dict__.get( + "_traverse_internals", None + ) + if _cache_key_traversal is None: + cls._generated_cache_key_traversal = NO_CACHE + if ( + inherit_cache is None + and cls._hierarchy_supports_caching + ): + util.warn( + "Class %s will not make use of SQL compilation " + "caching as it does not set the 'inherit_cache' " + "attribute to ``True``. This can have " + "significant performance implications including " + "some performance degradations in comparison to " + "prior SQLAlchemy versions. Set this attribute " + "to True if this object can make use of the cache " + "key generated by the superclass. Alternatively, " + "this attribute may be set to False which will " + "disable this warning." % (cls.__name__), + code="cprf", + ) + return NO_CACHE + + return _cache_key_traversal_visitor.generate_dispatch( + cls, + _cache_key_traversal, + "_generated_cache_key_traversal", + ) + + @util.preload_module("sqlalchemy.sql.elements") + def _gen_cache_key( + self, anon_map: anon_map, bindparams: List[BindParameter[Any]] + ) -> Optional[Tuple[Any, ...]]: + """return an optional cache key. + + The cache key is a tuple which can contain any series of + objects that are hashable and also identifies + this object uniquely within the presence of a larger SQL expression + or statement, for the purposes of caching the resulting query. + + The cache key should be based on the SQL compiled structure that would + ultimately be produced. That is, two structures that are composed in + exactly the same way should produce the same cache key; any difference + in the structures that would affect the SQL string or the type handlers + should result in a different cache key. + + If a structure cannot produce a useful cache key, the NO_CACHE + symbol should be added to the anon_map and the method should + return None. + + """ + + cls = self.__class__ + + id_, found = anon_map.get_anon(self) + if found: + return (id_, cls) + + dispatcher: Union[ + Literal[CacheConst.NO_CACHE], + _CacheKeyTraversalDispatchType, + ] + + try: + dispatcher = cls.__dict__["_generated_cache_key_traversal"] + except KeyError: + # traversals.py -> _preconfigure_traversals() + # may be used to run these ahead of time, but + # is not enabled right now. + # this block will generate any remaining dispatchers. + dispatcher = cls._generate_cache_attrs() + + if dispatcher is NO_CACHE: + anon_map[NO_CACHE] = True + return None + + result: Tuple[Any, ...] = (id_, cls) + + # inline of _cache_key_traversal_visitor.run_generated_dispatch() + + for attrname, obj, meth in dispatcher( + self, _cache_key_traversal_visitor + ): + if obj is not None: + # TODO: see if C code can help here as Python lacks an + # efficient switch construct + + if meth is STATIC_CACHE_KEY: + sck = obj._static_cache_key + if sck is NO_CACHE: + anon_map[NO_CACHE] = True + return None + result += (attrname, sck) + elif meth is ANON_NAME: + elements = util.preloaded.sql_elements + if isinstance(obj, elements._anonymous_label): + obj = obj.apply_map(anon_map) # type: ignore + result += (attrname, obj) + elif meth is CALL_GEN_CACHE_KEY: + result += ( + attrname, + obj._gen_cache_key(anon_map, bindparams), + ) + + # remaining cache functions are against + # Python tuples, dicts, lists, etc. so we can skip + # if they are empty + elif obj: + if meth is CACHE_IN_PLACE: + result += (attrname, obj) + elif meth is PROPAGATE_ATTRS: + result += ( + attrname, + obj["compile_state_plugin"], + ( + obj["plugin_subject"]._gen_cache_key( + anon_map, bindparams + ) + if obj["plugin_subject"] + else None + ), + ) + elif meth is InternalTraversal.dp_annotations_key: + # obj is here is the _annotations dict. Table uses + # a memoized version of it. however in other cases, + # we generate it given anon_map as we may be from a + # Join, Aliased, etc. + # see #8790 + + if self._gen_static_annotations_cache_key: # type: ignore # noqa: E501 + result += self._annotations_cache_key # type: ignore # noqa: E501 + else: + result += self._gen_annotations_cache_key(anon_map) # type: ignore # noqa: E501 + + elif ( + meth is InternalTraversal.dp_clauseelement_list + or meth is InternalTraversal.dp_clauseelement_tuple + or meth + is InternalTraversal.dp_memoized_select_entities + ): + result += ( + attrname, + tuple( + [ + elem._gen_cache_key(anon_map, bindparams) + for elem in obj + ] + ), + ) + else: + result += meth( # type: ignore + attrname, obj, self, anon_map, bindparams + ) + return result + + def _generate_cache_key(self) -> Optional[CacheKey]: + """return a cache key. + + The cache key is a tuple which can contain any series of + objects that are hashable and also identifies + this object uniquely within the presence of a larger SQL expression + or statement, for the purposes of caching the resulting query. + + The cache key should be based on the SQL compiled structure that would + ultimately be produced. That is, two structures that are composed in + exactly the same way should produce the same cache key; any difference + in the structures that would affect the SQL string or the type handlers + should result in a different cache key. + + The cache key returned by this method is an instance of + :class:`.CacheKey`, which consists of a tuple representing the + cache key, as well as a list of :class:`.BindParameter` objects + which are extracted from the expression. While two expressions + that produce identical cache key tuples will themselves generate + identical SQL strings, the list of :class:`.BindParameter` objects + indicates the bound values which may have different values in + each one; these bound parameters must be consulted in order to + execute the statement with the correct parameters. + + a :class:`_expression.ClauseElement` structure that does not implement + a :meth:`._gen_cache_key` method and does not implement a + :attr:`.traverse_internals` attribute will not be cacheable; when + such an element is embedded into a larger structure, this method + will return None, indicating no cache key is available. + + """ + + bindparams: List[BindParameter[Any]] = [] + + _anon_map = anon_map() + key = self._gen_cache_key(_anon_map, bindparams) + if NO_CACHE in _anon_map: + return None + else: + assert key is not None + return CacheKey(key, bindparams) + + @classmethod + def _generate_cache_key_for_object( + cls, obj: HasCacheKey + ) -> Optional[CacheKey]: + bindparams: List[BindParameter[Any]] = [] + + _anon_map = anon_map() + key = obj._gen_cache_key(_anon_map, bindparams) + if NO_CACHE in _anon_map: + return None + else: + assert key is not None + return CacheKey(key, bindparams) + + +class HasCacheKeyTraverse(HasTraverseInternals, HasCacheKey): + pass + + +class MemoizedHasCacheKey(HasCacheKey, HasMemoized): + __slots__ = () + + @HasMemoized.memoized_instancemethod + def _generate_cache_key(self) -> Optional[CacheKey]: + return HasCacheKey._generate_cache_key(self) + + +class SlotsMemoizedHasCacheKey(HasCacheKey, util.MemoizedSlots): + __slots__ = () + + def _memoized_method__generate_cache_key(self) -> Optional[CacheKey]: + return HasCacheKey._generate_cache_key(self) + + +class CacheKey(NamedTuple): + """The key used to identify a SQL statement construct in the + SQL compilation cache. + + .. seealso:: + + :ref:`sql_caching` + + """ + + key: Tuple[Any, ...] + bindparams: Sequence[BindParameter[Any]] + + # can't set __hash__ attribute because it interferes + # with namedtuple + # can't use "if not TYPE_CHECKING" because mypy rejects it + # inside of a NamedTuple + def __hash__(self) -> Optional[int]: # type: ignore + """CacheKey itself is not hashable - hash the .key portion""" + return None + + def to_offline_string( + self, + statement_cache: MutableMapping[Any, str], + statement: ClauseElement, + parameters: _CoreSingleExecuteParams, + ) -> str: + """Generate an "offline string" form of this :class:`.CacheKey` + + The "offline string" is basically the string SQL for the + statement plus a repr of the bound parameter values in series. + Whereas the :class:`.CacheKey` object is dependent on in-memory + identities in order to work as a cache key, the "offline" version + is suitable for a cache that will work for other processes as well. + + The given ``statement_cache`` is a dictionary-like object where the + string form of the statement itself will be cached. This dictionary + should be in a longer lived scope in order to reduce the time spent + stringifying statements. + + + """ + if self.key not in statement_cache: + statement_cache[self.key] = sql_str = str(statement) + else: + sql_str = statement_cache[self.key] + + if not self.bindparams: + param_tuple = tuple(parameters[key] for key in sorted(parameters)) + else: + param_tuple = tuple( + parameters.get(bindparam.key, bindparam.value) + for bindparam in self.bindparams + ) + + return repr((sql_str, param_tuple)) + + def __eq__(self, other: Any) -> bool: + return bool(self.key == other.key) + + def __ne__(self, other: Any) -> bool: + return not (self.key == other.key) + + @classmethod + def _diff_tuples(cls, left: CacheKey, right: CacheKey) -> str: + ck1 = CacheKey(left, []) + ck2 = CacheKey(right, []) + return ck1._diff(ck2) + + def _whats_different(self, other: CacheKey) -> Iterator[str]: + k1 = self.key + k2 = other.key + + stack: List[int] = [] + pickup_index = 0 + while True: + s1, s2 = k1, k2 + for idx in stack: + s1 = s1[idx] + s2 = s2[idx] + + for idx, (e1, e2) in enumerate(zip_longest(s1, s2)): + if idx < pickup_index: + continue + if e1 != e2: + if isinstance(e1, tuple) and isinstance(e2, tuple): + stack.append(idx) + break + else: + yield "key%s[%d]: %s != %s" % ( + "".join("[%d]" % id_ for id_ in stack), + idx, + e1, + e2, + ) + else: + pickup_index = stack.pop(-1) + break + + def _diff(self, other: CacheKey) -> str: + return ", ".join(self._whats_different(other)) + + def __str__(self) -> str: + stack: List[Union[Tuple[Any, ...], HasCacheKey]] = [self.key] + + output = [] + sentinel = object() + indent = -1 + while stack: + elem = stack.pop(0) + if elem is sentinel: + output.append((" " * (indent * 2)) + "),") + indent -= 1 + elif isinstance(elem, tuple): + if not elem: + output.append((" " * ((indent + 1) * 2)) + "()") + else: + indent += 1 + stack = list(elem) + [sentinel] + stack + output.append((" " * (indent * 2)) + "(") + else: + if isinstance(elem, HasCacheKey): + repr_ = "<%s object at %s>" % ( + type(elem).__name__, + hex(id(elem)), + ) + else: + repr_ = repr(elem) + output.append((" " * (indent * 2)) + " " + repr_ + ", ") + + return "CacheKey(key=%s)" % ("\n".join(output),) + + def _generate_param_dict(self) -> Dict[str, Any]: + """used for testing""" + + _anon_map = prefix_anon_map() + return {b.key % _anon_map: b.effective_value for b in self.bindparams} + + @util.preload_module("sqlalchemy.sql.elements") + def _apply_params_to_element( + self, original_cache_key: CacheKey, target_element: ColumnElement[Any] + ) -> ColumnElement[Any]: + if target_element._is_immutable or original_cache_key is self: + return target_element + + elements = util.preloaded.sql_elements + return elements._OverrideBinds( + target_element, self.bindparams, original_cache_key.bindparams + ) + + +def _ad_hoc_cache_key_from_args( + tokens: Tuple[Any, ...], + traverse_args: Iterable[Tuple[str, InternalTraversal]], + args: Iterable[Any], +) -> Tuple[Any, ...]: + """a quick cache key generator used by reflection.flexi_cache.""" + bindparams: List[BindParameter[Any]] = [] + + _anon_map = anon_map() + + tup = tokens + + for (attrname, sym), arg in zip(traverse_args, args): + key = sym.name + visit_key = key.replace("dp_", "visit_") + + if arg is None: + tup += (attrname, None) + continue + + meth = getattr(_cache_key_traversal_visitor, visit_key) + if meth is CACHE_IN_PLACE: + tup += (attrname, arg) + elif meth in ( + CALL_GEN_CACHE_KEY, + STATIC_CACHE_KEY, + ANON_NAME, + PROPAGATE_ATTRS, + ): + raise NotImplementedError( + f"Haven't implemented symbol {meth} for ad-hoc key from args" + ) + else: + tup += meth(attrname, arg, None, _anon_map, bindparams) + return tup + + +class _CacheKeyTraversal(HasTraversalDispatch): + # very common elements are inlined into the main _get_cache_key() method + # to produce a dramatic savings in Python function call overhead + + visit_has_cache_key = visit_clauseelement = CALL_GEN_CACHE_KEY + visit_clauseelement_list = InternalTraversal.dp_clauseelement_list + visit_annotations_key = InternalTraversal.dp_annotations_key + visit_clauseelement_tuple = InternalTraversal.dp_clauseelement_tuple + visit_memoized_select_entities = ( + InternalTraversal.dp_memoized_select_entities + ) + + visit_string = visit_boolean = visit_operator = visit_plain_obj = ( + CACHE_IN_PLACE + ) + visit_statement_hint_list = CACHE_IN_PLACE + visit_type = STATIC_CACHE_KEY + visit_anon_name = ANON_NAME + + visit_propagate_attrs = PROPAGATE_ATTRS + + def visit_with_context_options( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return tuple((fn.__code__, c_key) for fn, c_key in obj) + + def visit_inspectable( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return (attrname, inspect(obj)._gen_cache_key(anon_map, bindparams)) + + def visit_string_list( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return tuple(obj) + + def visit_multi( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return ( + attrname, + ( + obj._gen_cache_key(anon_map, bindparams) + if isinstance(obj, HasCacheKey) + else obj + ), + ) + + def visit_multi_list( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return ( + attrname, + tuple( + ( + elem._gen_cache_key(anon_map, bindparams) + if isinstance(elem, HasCacheKey) + else elem + ) + for elem in obj + ), + ) + + def visit_has_cache_key_tuples( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + if not obj: + return () + return ( + attrname, + tuple( + tuple( + elem._gen_cache_key(anon_map, bindparams) + for elem in tup_elem + ) + for tup_elem in obj + ), + ) + + def visit_has_cache_key_list( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + if not obj: + return () + return ( + attrname, + tuple(elem._gen_cache_key(anon_map, bindparams) for elem in obj), + ) + + def visit_executable_options( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + if not obj: + return () + return ( + attrname, + tuple( + elem._gen_cache_key(anon_map, bindparams) + for elem in obj + if elem._is_has_cache_key + ), + ) + + def visit_inspectable_list( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return self.visit_has_cache_key_list( + attrname, [inspect(o) for o in obj], parent, anon_map, bindparams + ) + + def visit_clauseelement_tuples( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return self.visit_has_cache_key_tuples( + attrname, obj, parent, anon_map, bindparams + ) + + def visit_fromclause_ordered_set( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + if not obj: + return () + return ( + attrname, + tuple([elem._gen_cache_key(anon_map, bindparams) for elem in obj]), + ) + + def visit_clauseelement_unordered_set( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + if not obj: + return () + cache_keys = [ + elem._gen_cache_key(anon_map, bindparams) for elem in obj + ] + return ( + attrname, + tuple( + sorted(cache_keys) + ), # cache keys all start with (id_, class) + ) + + def visit_named_ddl_element( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return (attrname, obj.name) + + def visit_prefix_sequence( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + if not obj: + return () + + return ( + attrname, + tuple( + [ + (clause._gen_cache_key(anon_map, bindparams), strval) + for clause, strval in obj + ] + ), + ) + + def visit_setup_join_tuple( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return tuple( + ( + target._gen_cache_key(anon_map, bindparams), + ( + onclause._gen_cache_key(anon_map, bindparams) + if onclause is not None + else None + ), + ( + from_._gen_cache_key(anon_map, bindparams) + if from_ is not None + else None + ), + tuple([(key, flags[key]) for key in sorted(flags)]), + ) + for (target, onclause, from_, flags) in obj + ) + + def visit_table_hint_list( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + if not obj: + return () + + return ( + attrname, + tuple( + [ + ( + clause._gen_cache_key(anon_map, bindparams), + dialect_name, + text, + ) + for (clause, dialect_name), text in obj.items() + ] + ), + ) + + def visit_plain_dict( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return (attrname, tuple([(key, obj[key]) for key in sorted(obj)])) + + def visit_dialect_options( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return ( + attrname, + tuple( + ( + dialect_name, + tuple( + [ + (key, obj[dialect_name][key]) + for key in sorted(obj[dialect_name]) + ] + ), + ) + for dialect_name in sorted(obj) + ), + ) + + def visit_string_clauseelement_dict( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return ( + attrname, + tuple( + (key, obj[key]._gen_cache_key(anon_map, bindparams)) + for key in sorted(obj) + ), + ) + + def visit_string_multi_dict( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return ( + attrname, + tuple( + ( + key, + ( + value._gen_cache_key(anon_map, bindparams) + if isinstance(value, HasCacheKey) + else value + ), + ) + for key, value in [(key, obj[key]) for key in sorted(obj)] + ), + ) + + def visit_fromclause_canonical_column_collection( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + # inlining into the internals of ColumnCollection + return ( + attrname, + tuple( + col._gen_cache_key(anon_map, bindparams) + for k, col, _ in obj._collection + ), + ) + + def visit_unknown_structure( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + anon_map[NO_CACHE] = True + return () + + def visit_dml_ordered_values( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + return ( + attrname, + tuple( + ( + ( + key._gen_cache_key(anon_map, bindparams) + if hasattr(key, "__clause_element__") + else key + ), + value._gen_cache_key(anon_map, bindparams), + ) + for key, value in obj + ), + ) + + def visit_dml_values( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + # in py37 we can assume two dictionaries created in the same + # insert ordering will retain that sorting + return ( + attrname, + tuple( + ( + ( + k._gen_cache_key(anon_map, bindparams) + if hasattr(k, "__clause_element__") + else k + ), + obj[k]._gen_cache_key(anon_map, bindparams), + ) + for k in obj + ), + ) + + def visit_dml_multi_values( + self, + attrname: str, + obj: Any, + parent: Any, + anon_map: anon_map, + bindparams: List[BindParameter[Any]], + ) -> Tuple[Any, ...]: + # multivalues are simply not cacheable right now + anon_map[NO_CACHE] = True + return () + + +_cache_key_traversal_visitor = _CacheKeyTraversal() diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/coercions.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/coercions.py new file mode 100644 index 0000000..22d6091 --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/coercions.py @@ -0,0 +1,1389 @@ +# sql/coercions.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +from __future__ import annotations + +import collections.abc as collections_abc +import numbers +import re +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Iterable +from typing import Iterator +from typing import List +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import operators +from . import roles +from . import visitors +from ._typing import is_from_clause +from .base import ExecutableOption +from .base import Options +from .cache_key import HasCacheKey +from .visitors import Visitable +from .. import exc +from .. import inspection +from .. import util +from ..util.typing import Literal + +if typing.TYPE_CHECKING: + # elements lambdas schema selectable are set by __init__ + from . import elements + from . import lambdas + from . import schema + from . import selectable + from ._typing import _ColumnExpressionArgument + from ._typing import _ColumnsClauseArgument + from ._typing import _DDLColumnArgument + from ._typing import _DMLTableArgument + from ._typing import _FromClauseArgument + from .dml import _DMLTableElement + from .elements import BindParameter + from .elements import ClauseElement + from .elements import ColumnClause + from .elements import ColumnElement + from .elements import DQLDMLClauseElement + from .elements import NamedColumn + from .elements import SQLCoreOperations + from .schema import Column + from .selectable import _ColumnsClauseElement + from .selectable import _JoinTargetProtocol + from .selectable import FromClause + from .selectable import HasCTE + from .selectable import SelectBase + from .selectable import Subquery + from .visitors import _TraverseCallableType + +_SR = TypeVar("_SR", bound=roles.SQLRole) +_F = TypeVar("_F", bound=Callable[..., Any]) +_StringOnlyR = TypeVar("_StringOnlyR", bound=roles.StringRole) +_T = TypeVar("_T", bound=Any) + + +def _is_literal(element): + """Return whether or not the element is a "literal" in the context + of a SQL expression construct. + + """ + + return not isinstance( + element, + (Visitable, schema.SchemaEventTarget), + ) and not hasattr(element, "__clause_element__") + + +def _deep_is_literal(element): + """Return whether or not the element is a "literal" in the context + of a SQL expression construct. + + does a deeper more esoteric check than _is_literal. is used + for lambda elements that have to distinguish values that would + be bound vs. not without any context. + + """ + + if isinstance(element, collections_abc.Sequence) and not isinstance( + element, str + ): + for elem in element: + if not _deep_is_literal(elem): + return False + else: + return True + + return ( + not isinstance( + element, + ( + Visitable, + schema.SchemaEventTarget, + HasCacheKey, + Options, + util.langhelpers.symbol, + ), + ) + and not hasattr(element, "__clause_element__") + and ( + not isinstance(element, type) + or not issubclass(element, HasCacheKey) + ) + ) + + +def _document_text_coercion( + paramname: str, meth_rst: str, param_rst: str +) -> Callable[[_F], _F]: + return util.add_parameter_text( + paramname, + ( + ".. warning:: " + "The %s argument to %s can be passed as a Python string argument, " + "which will be treated " + "as **trusted SQL text** and rendered as given. **DO NOT PASS " + "UNTRUSTED INPUT TO THIS PARAMETER**." + ) + % (param_rst, meth_rst), + ) + + +def _expression_collection_was_a_list( + attrname: str, + fnname: str, + args: Union[Sequence[_T], Sequence[Sequence[_T]]], +) -> Sequence[_T]: + if args and isinstance(args[0], (list, set, dict)) and len(args) == 1: + if isinstance(args[0], list): + raise exc.ArgumentError( + f'The "{attrname}" argument to {fnname}(), when ' + "referring to a sequence " + "of items, is now passed as a series of positional " + "elements, rather than as a list. " + ) + return cast("Sequence[_T]", args[0]) + + return cast("Sequence[_T]", args) + + +@overload +def expect( + role: Type[roles.TruncatedLabelRole], + element: Any, + **kw: Any, +) -> str: ... + + +@overload +def expect( + role: Type[roles.DMLColumnRole], + element: Any, + *, + as_key: Literal[True] = ..., + **kw: Any, +) -> str: ... + + +@overload +def expect( + role: Type[roles.LiteralValueRole], + element: Any, + **kw: Any, +) -> BindParameter[Any]: ... + + +@overload +def expect( + role: Type[roles.DDLReferredColumnRole], + element: Any, + **kw: Any, +) -> Column[Any]: ... + + +@overload +def expect( + role: Type[roles.DDLConstraintColumnRole], + element: Any, + **kw: Any, +) -> Union[Column[Any], str]: ... + + +@overload +def expect( + role: Type[roles.StatementOptionRole], + element: Any, + **kw: Any, +) -> DQLDMLClauseElement: ... + + +@overload +def expect( + role: Type[roles.LabeledColumnExprRole[Any]], + element: _ColumnExpressionArgument[_T], + **kw: Any, +) -> NamedColumn[_T]: ... + + +@overload +def expect( + role: Union[ + Type[roles.ExpressionElementRole[Any]], + Type[roles.LimitOffsetRole], + Type[roles.WhereHavingRole], + ], + element: _ColumnExpressionArgument[_T], + **kw: Any, +) -> ColumnElement[_T]: ... + + +@overload +def expect( + role: Union[ + Type[roles.ExpressionElementRole[Any]], + Type[roles.LimitOffsetRole], + Type[roles.WhereHavingRole], + Type[roles.OnClauseRole], + Type[roles.ColumnArgumentRole], + ], + element: Any, + **kw: Any, +) -> ColumnElement[Any]: ... + + +@overload +def expect( + role: Type[roles.DMLTableRole], + element: _DMLTableArgument, + **kw: Any, +) -> _DMLTableElement: ... + + +@overload +def expect( + role: Type[roles.HasCTERole], + element: HasCTE, + **kw: Any, +) -> HasCTE: ... + + +@overload +def expect( + role: Type[roles.SelectStatementRole], + element: SelectBase, + **kw: Any, +) -> SelectBase: ... + + +@overload +def expect( + role: Type[roles.FromClauseRole], + element: _FromClauseArgument, + **kw: Any, +) -> FromClause: ... + + +@overload +def expect( + role: Type[roles.FromClauseRole], + element: SelectBase, + *, + explicit_subquery: Literal[True] = ..., + **kw: Any, +) -> Subquery: ... + + +@overload +def expect( + role: Type[roles.ColumnsClauseRole], + element: _ColumnsClauseArgument[Any], + **kw: Any, +) -> _ColumnsClauseElement: ... + + +@overload +def expect( + role: Type[roles.JoinTargetRole], + element: _JoinTargetProtocol, + **kw: Any, +) -> _JoinTargetProtocol: ... + + +# catchall for not-yet-implemented overloads +@overload +def expect( + role: Type[_SR], + element: Any, + **kw: Any, +) -> Any: ... + + +def expect( + role: Type[_SR], + element: Any, + *, + apply_propagate_attrs: Optional[ClauseElement] = None, + argname: Optional[str] = None, + post_inspect: bool = False, + disable_inspection: bool = False, + **kw: Any, +) -> Any: + if ( + role.allows_lambda + # note callable() will not invoke a __getattr__() method, whereas + # hasattr(obj, "__call__") will. by keeping the callable() check here + # we prevent most needless calls to hasattr() and therefore + # __getattr__(), which is present on ColumnElement. + and callable(element) + and hasattr(element, "__code__") + ): + return lambdas.LambdaElement( + element, + role, + lambdas.LambdaOptions(**kw), + apply_propagate_attrs=apply_propagate_attrs, + ) + + # major case is that we are given a ClauseElement already, skip more + # elaborate logic up front if possible + impl = _impl_lookup[role] + + original_element = element + + if not isinstance( + element, + ( + elements.CompilerElement, + schema.SchemaItem, + schema.FetchedValue, + lambdas.PyWrapper, + ), + ): + resolved = None + + if impl._resolve_literal_only: + resolved = impl._literal_coercion(element, **kw) + else: + original_element = element + + is_clause_element = False + + # this is a special performance optimization for ORM + # joins used by JoinTargetImpl that we don't go through the + # work of creating __clause_element__() when we only need the + # original QueryableAttribute, as the former will do clause + # adaption and all that which is just thrown away here. + if ( + impl._skip_clauseelement_for_target_match + and isinstance(element, role) + and hasattr(element, "__clause_element__") + ): + is_clause_element = True + else: + while hasattr(element, "__clause_element__"): + is_clause_element = True + + if not getattr(element, "is_clause_element", False): + element = element.__clause_element__() + else: + break + + if not is_clause_element: + if impl._use_inspection and not disable_inspection: + insp = inspection.inspect(element, raiseerr=False) + if insp is not None: + if post_inspect: + insp._post_inspect + try: + resolved = insp.__clause_element__() + except AttributeError: + impl._raise_for_expected(original_element, argname) + + if resolved is None: + resolved = impl._literal_coercion( + element, argname=argname, **kw + ) + else: + resolved = element + elif isinstance(element, lambdas.PyWrapper): + resolved = element._sa__py_wrapper_literal(**kw) + else: + resolved = element + + if apply_propagate_attrs is not None: + if typing.TYPE_CHECKING: + assert isinstance(resolved, (SQLCoreOperations, ClauseElement)) + + if not apply_propagate_attrs._propagate_attrs and getattr( + resolved, "_propagate_attrs", None + ): + apply_propagate_attrs._propagate_attrs = resolved._propagate_attrs + + if impl._role_class in resolved.__class__.__mro__: + if impl._post_coercion: + resolved = impl._post_coercion( + resolved, + argname=argname, + original_element=original_element, + **kw, + ) + return resolved + else: + return impl._implicit_coercions( + original_element, resolved, argname=argname, **kw + ) + + +def expect_as_key( + role: Type[roles.DMLColumnRole], element: Any, **kw: Any +) -> str: + kw.pop("as_key", None) + return expect(role, element, as_key=True, **kw) + + +def expect_col_expression_collection( + role: Type[roles.DDLConstraintColumnRole], + expressions: Iterable[_DDLColumnArgument], +) -> Iterator[ + Tuple[ + Union[str, Column[Any]], + Optional[ColumnClause[Any]], + Optional[str], + Optional[Union[Column[Any], str]], + ] +]: + for expr in expressions: + strname = None + column = None + + resolved: Union[Column[Any], str] = expect(role, expr) + if isinstance(resolved, str): + assert isinstance(expr, str) + strname = resolved = expr + else: + cols: List[Column[Any]] = [] + col_append: _TraverseCallableType[Column[Any]] = cols.append + visitors.traverse(resolved, {}, {"column": col_append}) + if cols: + column = cols[0] + add_element = column if column is not None else strname + + yield resolved, column, strname, add_element + + +class RoleImpl: + __slots__ = ("_role_class", "name", "_use_inspection") + + def _literal_coercion(self, element, **kw): + raise NotImplementedError() + + _post_coercion: Any = None + _resolve_literal_only = False + _skip_clauseelement_for_target_match = False + + def __init__(self, role_class): + self._role_class = role_class + self.name = role_class._role_name + self._use_inspection = issubclass(role_class, roles.UsesInspection) + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + self._raise_for_expected(element, argname, resolved) + + def _raise_for_expected( + self, + element: Any, + argname: Optional[str] = None, + resolved: Optional[Any] = None, + advice: Optional[str] = None, + code: Optional[str] = None, + err: Optional[Exception] = None, + **kw: Any, + ) -> NoReturn: + if resolved is not None and resolved is not element: + got = "%r object resolved from %r object" % (resolved, element) + else: + got = repr(element) + + if argname: + msg = "%s expected for argument %r; got %s." % ( + self.name, + argname, + got, + ) + else: + msg = "%s expected, got %s." % (self.name, got) + + if advice: + msg += " " + advice + + raise exc.ArgumentError(msg, code=code) from err + + +class _Deannotate: + __slots__ = () + + def _post_coercion(self, resolved, **kw): + from .util import _deep_deannotate + + return _deep_deannotate(resolved) + + +class _StringOnly: + __slots__ = () + + _resolve_literal_only = True + + +class _ReturnsStringKey(RoleImpl): + __slots__ = () + + def _implicit_coercions(self, element, resolved, argname=None, **kw): + if isinstance(element, str): + return element + else: + self._raise_for_expected(element, argname, resolved) + + def _literal_coercion(self, element, **kw): + return element + + +class _ColumnCoercions(RoleImpl): + __slots__ = () + + def _warn_for_scalar_subquery_coercion(self): + util.warn( + "implicitly coercing SELECT object to scalar subquery; " + "please use the .scalar_subquery() method to produce a scalar " + "subquery.", + ) + + def _implicit_coercions(self, element, resolved, argname=None, **kw): + original_element = element + if not getattr(resolved, "is_clause_element", False): + self._raise_for_expected(original_element, argname, resolved) + elif resolved._is_select_base: + self._warn_for_scalar_subquery_coercion() + return resolved.scalar_subquery() + elif resolved._is_from_clause and isinstance( + resolved, selectable.Subquery + ): + self._warn_for_scalar_subquery_coercion() + return resolved.element.scalar_subquery() + elif self._role_class.allows_lambda and resolved._is_lambda_element: + return resolved + else: + self._raise_for_expected(original_element, argname, resolved) + + +def _no_text_coercion( + element: Any, + argname: Optional[str] = None, + exc_cls: Type[exc.SQLAlchemyError] = exc.ArgumentError, + extra: Optional[str] = None, + err: Optional[Exception] = None, +) -> NoReturn: + raise exc_cls( + "%(extra)sTextual SQL expression %(expr)r %(argname)sshould be " + "explicitly declared as text(%(expr)r)" + % { + "expr": util.ellipses_string(element), + "argname": "for argument %s" % (argname,) if argname else "", + "extra": "%s " % extra if extra else "", + } + ) from err + + +class _NoTextCoercion(RoleImpl): + __slots__ = () + + def _literal_coercion(self, element, argname=None, **kw): + if isinstance(element, str) and issubclass( + elements.TextClause, self._role_class + ): + _no_text_coercion(element, argname) + else: + self._raise_for_expected(element, argname) + + +class _CoerceLiterals(RoleImpl): + __slots__ = () + _coerce_consts = False + _coerce_star = False + _coerce_numerics = False + + def _text_coercion(self, element, argname=None): + return _no_text_coercion(element, argname) + + def _literal_coercion(self, element, argname=None, **kw): + if isinstance(element, str): + if self._coerce_star and element == "*": + return elements.ColumnClause("*", is_literal=True) + else: + return self._text_coercion(element, argname, **kw) + + if self._coerce_consts: + if element is None: + return elements.Null() + elif element is False: + return elements.False_() + elif element is True: + return elements.True_() + + if self._coerce_numerics and isinstance(element, (numbers.Number)): + return elements.ColumnClause(str(element), is_literal=True) + + self._raise_for_expected(element, argname) + + +class LiteralValueImpl(RoleImpl): + _resolve_literal_only = True + + def _implicit_coercions( + self, + element, + resolved, + argname, + type_=None, + literal_execute=False, + **kw, + ): + if not _is_literal(resolved): + self._raise_for_expected( + element, resolved=resolved, argname=argname, **kw + ) + + return elements.BindParameter( + None, + element, + type_=type_, + unique=True, + literal_execute=literal_execute, + ) + + def _literal_coercion(self, element, argname=None, type_=None, **kw): + return element + + +class _SelectIsNotFrom(RoleImpl): + __slots__ = () + + def _raise_for_expected( + self, + element: Any, + argname: Optional[str] = None, + resolved: Optional[Any] = None, + advice: Optional[str] = None, + code: Optional[str] = None, + err: Optional[Exception] = None, + **kw: Any, + ) -> NoReturn: + if ( + not advice + and isinstance(element, roles.SelectStatementRole) + or isinstance(resolved, roles.SelectStatementRole) + ): + advice = ( + "To create a " + "FROM clause from a %s object, use the .subquery() method." + % (resolved.__class__ if resolved is not None else element,) + ) + code = "89ve" + else: + code = None + + super()._raise_for_expected( + element, + argname=argname, + resolved=resolved, + advice=advice, + code=code, + err=err, + **kw, + ) + # never reached + assert False + + +class HasCacheKeyImpl(RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if isinstance(element, HasCacheKey): + return element + else: + self._raise_for_expected(element, argname, resolved) + + def _literal_coercion(self, element, **kw): + return element + + +class ExecutableOptionImpl(RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if isinstance(element, ExecutableOption): + return element + else: + self._raise_for_expected(element, argname, resolved) + + def _literal_coercion(self, element, **kw): + return element + + +class ExpressionElementImpl(_ColumnCoercions, RoleImpl): + __slots__ = () + + def _literal_coercion( + self, element, name=None, type_=None, argname=None, is_crud=False, **kw + ): + if ( + element is None + and not is_crud + and (type_ is None or not type_.should_evaluate_none) + ): + # TODO: there's no test coverage now for the + # "should_evaluate_none" part of this, as outside of "crud" this + # codepath is not normally used except in some special cases + return elements.Null() + else: + try: + return elements.BindParameter( + name, element, type_, unique=True, _is_crud=is_crud + ) + except exc.ArgumentError as err: + self._raise_for_expected(element, err=err) + + def _raise_for_expected(self, element, argname=None, resolved=None, **kw): + # select uses implicit coercion with warning instead of raising + if isinstance(element, selectable.Values): + advice = ( + "To create a column expression from a VALUES clause, " + "use the .scalar_values() method." + ) + elif isinstance(element, roles.AnonymizedFromClauseRole): + advice = ( + "To create a column expression from a FROM clause row " + "as a whole, use the .table_valued() method." + ) + else: + advice = None + + return super()._raise_for_expected( + element, argname=argname, resolved=resolved, advice=advice, **kw + ) + + +class BinaryElementImpl(ExpressionElementImpl, RoleImpl): + __slots__ = () + + def _literal_coercion( + self, element, expr, operator, bindparam_type=None, argname=None, **kw + ): + try: + return expr._bind_param(operator, element, type_=bindparam_type) + except exc.ArgumentError as err: + self._raise_for_expected(element, err=err) + + def _post_coercion(self, resolved, expr, bindparam_type=None, **kw): + if resolved.type._isnull and not expr.type._isnull: + resolved = resolved._with_binary_element_type( + bindparam_type if bindparam_type is not None else expr.type + ) + return resolved + + +class InElementImpl(RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if resolved._is_from_clause: + if ( + isinstance(resolved, selectable.Alias) + and resolved.element._is_select_base + ): + self._warn_for_implicit_coercion(resolved) + return self._post_coercion(resolved.element, **kw) + else: + self._warn_for_implicit_coercion(resolved) + return self._post_coercion(resolved.select(), **kw) + else: + self._raise_for_expected(element, argname, resolved) + + def _warn_for_implicit_coercion(self, elem): + util.warn( + "Coercing %s object into a select() for use in IN(); " + "please pass a select() construct explicitly" + % (elem.__class__.__name__) + ) + + def _literal_coercion(self, element, expr, operator, **kw): + if util.is_non_string_iterable(element): + non_literal_expressions: Dict[ + Optional[operators.ColumnOperators], + operators.ColumnOperators, + ] = {} + element = list(element) + for o in element: + if not _is_literal(o): + if not isinstance(o, operators.ColumnOperators): + self._raise_for_expected(element, **kw) + + else: + non_literal_expressions[o] = o + elif o is None: + non_literal_expressions[o] = elements.Null() + + if non_literal_expressions: + return elements.ClauseList( + *[ + ( + non_literal_expressions[o] + if o in non_literal_expressions + else expr._bind_param(operator, o) + ) + for o in element + ] + ) + else: + return expr._bind_param(operator, element, expanding=True) + + else: + self._raise_for_expected(element, **kw) + + def _post_coercion(self, element, expr, operator, **kw): + if element._is_select_base: + # for IN, we are doing scalar_subquery() coercion without + # a warning + return element.scalar_subquery() + elif isinstance(element, elements.ClauseList): + assert not len(element.clauses) == 0 + return element.self_group(against=operator) + + elif isinstance(element, elements.BindParameter): + element = element._clone(maintain_key=True) + element.expanding = True + element.expand_op = operator + + return element + elif isinstance(element, selectable.Values): + return element.scalar_values() + else: + return element + + +class OnClauseImpl(_ColumnCoercions, RoleImpl): + __slots__ = () + + _coerce_consts = True + + def _literal_coercion( + self, element, name=None, type_=None, argname=None, is_crud=False, **kw + ): + self._raise_for_expected(element) + + def _post_coercion(self, resolved, original_element=None, **kw): + # this is a hack right now as we want to use coercion on an + # ORM InstrumentedAttribute, but we want to return the object + # itself if it is one, not its clause element. + # ORM context _join and _legacy_join() would need to be improved + # to look for annotations in a clause element form. + if isinstance(original_element, roles.JoinTargetRole): + return original_element + return resolved + + +class WhereHavingImpl(_CoerceLiterals, _ColumnCoercions, RoleImpl): + __slots__ = () + + _coerce_consts = True + + def _text_coercion(self, element, argname=None): + return _no_text_coercion(element, argname) + + +class StatementOptionImpl(_CoerceLiterals, RoleImpl): + __slots__ = () + + _coerce_consts = True + + def _text_coercion(self, element, argname=None): + return elements.TextClause(element) + + +class ColumnArgumentImpl(_NoTextCoercion, RoleImpl): + __slots__ = () + + +class ColumnArgumentOrKeyImpl(_ReturnsStringKey, RoleImpl): + __slots__ = () + + +class StrAsPlainColumnImpl(_CoerceLiterals, RoleImpl): + __slots__ = () + + def _text_coercion(self, element, argname=None): + return elements.ColumnClause(element) + + +class ByOfImpl(_CoerceLiterals, _ColumnCoercions, RoleImpl, roles.ByOfRole): + __slots__ = () + + _coerce_consts = True + + def _text_coercion(self, element, argname=None): + return elements._textual_label_reference(element) + + +class OrderByImpl(ByOfImpl, RoleImpl): + __slots__ = () + + def _post_coercion(self, resolved, **kw): + if ( + isinstance(resolved, self._role_class) + and resolved._order_by_label_element is not None + ): + return elements._label_reference(resolved) + else: + return resolved + + +class GroupByImpl(ByOfImpl, RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if is_from_clause(resolved): + return elements.ClauseList(*resolved.c) + else: + return resolved + + +class DMLColumnImpl(_ReturnsStringKey, RoleImpl): + __slots__ = () + + def _post_coercion(self, element, as_key=False, **kw): + if as_key: + return element.key + else: + return element + + +class ConstExprImpl(RoleImpl): + __slots__ = () + + def _literal_coercion(self, element, argname=None, **kw): + if element is None: + return elements.Null() + elif element is False: + return elements.False_() + elif element is True: + return elements.True_() + else: + self._raise_for_expected(element, argname) + + +class TruncatedLabelImpl(_StringOnly, RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if isinstance(element, str): + return resolved + else: + self._raise_for_expected(element, argname, resolved) + + def _literal_coercion(self, element, argname=None, **kw): + """coerce the given value to :class:`._truncated_label`. + + Existing :class:`._truncated_label` and + :class:`._anonymous_label` objects are passed + unchanged. + """ + + if isinstance(element, elements._truncated_label): + return element + else: + return elements._truncated_label(element) + + +class DDLExpressionImpl(_Deannotate, _CoerceLiterals, RoleImpl): + __slots__ = () + + _coerce_consts = True + + def _text_coercion(self, element, argname=None): + # see #5754 for why we can't easily deprecate this coercion. + # essentially expressions like postgresql_where would have to be + # text() as they come back from reflection and we don't want to + # have text() elements wired into the inspection dictionaries. + return elements.TextClause(element) + + +class DDLConstraintColumnImpl(_Deannotate, _ReturnsStringKey, RoleImpl): + __slots__ = () + + +class DDLReferredColumnImpl(DDLConstraintColumnImpl): + __slots__ = () + + +class LimitOffsetImpl(RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if resolved is None: + return None + else: + self._raise_for_expected(element, argname, resolved) + + def _literal_coercion(self, element, name, type_, **kw): + if element is None: + return None + else: + value = util.asint(element) + return selectable._OffsetLimitParam( + name, value, type_=type_, unique=True + ) + + +class LabeledColumnExprImpl(ExpressionElementImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if isinstance(resolved, roles.ExpressionElementRole): + return resolved.label(None) + else: + new = super()._implicit_coercions( + element, resolved, argname=argname, **kw + ) + if isinstance(new, roles.ExpressionElementRole): + return new.label(None) + else: + self._raise_for_expected(element, argname, resolved) + + +class ColumnsClauseImpl(_SelectIsNotFrom, _CoerceLiterals, RoleImpl): + __slots__ = () + + _coerce_consts = True + _coerce_numerics = True + _coerce_star = True + + _guess_straight_column = re.compile(r"^\w\S*$", re.I) + + def _raise_for_expected( + self, element, argname=None, resolved=None, advice=None, **kw + ): + if not advice and isinstance(element, list): + advice = ( + f"Did you mean to say select(" + f"{', '.join(repr(e) for e in element)})?" + ) + + return super()._raise_for_expected( + element, argname=argname, resolved=resolved, advice=advice, **kw + ) + + def _text_coercion(self, element, argname=None): + element = str(element) + + guess_is_literal = not self._guess_straight_column.match(element) + raise exc.ArgumentError( + "Textual column expression %(column)r %(argname)sshould be " + "explicitly declared with text(%(column)r), " + "or use %(literal_column)s(%(column)r) " + "for more specificity" + % { + "column": util.ellipses_string(element), + "argname": "for argument %s" % (argname,) if argname else "", + "literal_column": ( + "literal_column" if guess_is_literal else "column" + ), + } + ) + + +class ReturnsRowsImpl(RoleImpl): + __slots__ = () + + +class StatementImpl(_CoerceLiterals, RoleImpl): + __slots__ = () + + def _post_coercion(self, resolved, original_element, argname=None, **kw): + if resolved is not original_element and not isinstance( + original_element, str + ): + # use same method as Connection uses; this will later raise + # ObjectNotExecutableError + try: + original_element._execute_on_connection + except AttributeError: + util.warn_deprecated( + "Object %r should not be used directly in a SQL statement " + "context, such as passing to methods such as " + "session.execute(). This usage will be disallowed in a " + "future release. " + "Please use Core select() / update() / delete() etc. " + "with Session.execute() and other statement execution " + "methods." % original_element, + "1.4", + ) + + return resolved + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if resolved._is_lambda_element: + return resolved + else: + return super()._implicit_coercions( + element, resolved, argname=argname, **kw + ) + + +class SelectStatementImpl(_NoTextCoercion, RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if resolved._is_text_clause: + return resolved.columns() + else: + self._raise_for_expected(element, argname, resolved) + + +class HasCTEImpl(ReturnsRowsImpl): + __slots__ = () + + +class IsCTEImpl(RoleImpl): + __slots__ = () + + +class JoinTargetImpl(RoleImpl): + __slots__ = () + + _skip_clauseelement_for_target_match = True + + def _literal_coercion(self, element, argname=None, **kw): + self._raise_for_expected(element, argname) + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + legacy: bool = False, + **kw: Any, + ) -> Any: + if isinstance(element, roles.JoinTargetRole): + # note that this codepath no longer occurs as of + # #6550, unless JoinTargetImpl._skip_clauseelement_for_target_match + # were set to False. + return element + elif legacy and resolved._is_select_base: + util.warn_deprecated( + "Implicit coercion of SELECT and textual SELECT " + "constructs into FROM clauses is deprecated; please call " + ".subquery() on any Core select or ORM Query object in " + "order to produce a subquery object.", + version="1.4", + ) + # TODO: doing _implicit_subquery here causes tests to fail, + # how was this working before? probably that ORM + # join logic treated it as a select and subquery would happen + # in _ORMJoin->Join + return resolved + else: + self._raise_for_expected(element, argname, resolved) + + +class FromClauseImpl(_SelectIsNotFrom, _NoTextCoercion, RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + explicit_subquery: bool = False, + allow_select: bool = True, + **kw: Any, + ) -> Any: + if resolved._is_select_base: + if explicit_subquery: + return resolved.subquery() + elif allow_select: + util.warn_deprecated( + "Implicit coercion of SELECT and textual SELECT " + "constructs into FROM clauses is deprecated; please call " + ".subquery() on any Core select or ORM Query object in " + "order to produce a subquery object.", + version="1.4", + ) + return resolved._implicit_subquery + elif resolved._is_text_clause: + return resolved + else: + self._raise_for_expected(element, argname, resolved) + + def _post_coercion(self, element, deannotate=False, **kw): + if deannotate: + return element._deannotate() + else: + return element + + +class StrictFromClauseImpl(FromClauseImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + explicit_subquery: bool = False, + allow_select: bool = False, + **kw: Any, + ) -> Any: + if resolved._is_select_base and allow_select: + util.warn_deprecated( + "Implicit coercion of SELECT and textual SELECT constructs " + "into FROM clauses is deprecated; please call .subquery() " + "on any Core select or ORM Query object in order to produce a " + "subquery object.", + version="1.4", + ) + return resolved._implicit_subquery + else: + self._raise_for_expected(element, argname, resolved) + + +class AnonymizedFromClauseImpl(StrictFromClauseImpl): + __slots__ = () + + def _post_coercion(self, element, flat=False, name=None, **kw): + assert name is None + + return element._anonymous_fromclause(flat=flat) + + +class DMLTableImpl(_SelectIsNotFrom, _NoTextCoercion, RoleImpl): + __slots__ = () + + def _post_coercion(self, element, **kw): + if "dml_table" in element._annotations: + return element._annotations["dml_table"] + else: + return element + + +class DMLSelectImpl(_NoTextCoercion, RoleImpl): + __slots__ = () + + def _implicit_coercions( + self, + element: Any, + resolved: Any, + argname: Optional[str] = None, + **kw: Any, + ) -> Any: + if resolved._is_from_clause: + if ( + isinstance(resolved, selectable.Alias) + and resolved.element._is_select_base + ): + return resolved.element + else: + return resolved.select() + else: + self._raise_for_expected(element, argname, resolved) + + +class CompoundElementImpl(_NoTextCoercion, RoleImpl): + __slots__ = () + + def _raise_for_expected(self, element, argname=None, resolved=None, **kw): + if isinstance(element, roles.FromClauseRole): + if element._is_subquery: + advice = ( + "Use the plain select() object without " + "calling .subquery() or .alias()." + ) + else: + advice = ( + "To SELECT from any FROM clause, use the .select() method." + ) + else: + advice = None + return super()._raise_for_expected( + element, argname=argname, resolved=resolved, advice=advice, **kw + ) + + +_impl_lookup = {} + + +for name in dir(roles): + cls = getattr(roles, name) + if name.endswith("Role"): + name = name.replace("Role", "Impl") + if name in globals(): + impl = globals()[name](cls) + _impl_lookup[cls] = impl + +if not TYPE_CHECKING: + ee_impl = _impl_lookup[roles.ExpressionElementRole] + + for py_type in (int, bool, str, float): + _impl_lookup[roles.ExpressionElementRole[py_type]] = ee_impl diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/compiler.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/compiler.py new file mode 100644 index 0000000..c354ba8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/compiler.py @@ -0,0 +1,7811 @@ +# sql/compiler.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Base SQL and DDL compiler implementations. + +Classes provided include: + +:class:`.compiler.SQLCompiler` - renders SQL +strings + +:class:`.compiler.DDLCompiler` - renders DDL +(data definition language) strings + +:class:`.compiler.GenericTypeCompiler` - renders +type specification strings. + +To generate user-defined SQL strings, see +:doc:`/ext/compiler`. + +""" +from __future__ import annotations + +import collections +import collections.abc as collections_abc +import contextlib +from enum import IntEnum +import functools +import itertools +import operator +import re +from time import perf_counter +import typing +from typing import Any +from typing import Callable +from typing import cast +from typing import ClassVar +from typing import Dict +from typing import FrozenSet +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import NamedTuple +from typing import NoReturn +from typing import Optional +from typing import Pattern +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +from . import base +from . import coercions +from . import crud +from . import elements +from . import functions +from . import operators +from . import roles +from . import schema +from . import selectable +from . import sqltypes +from . import util as sql_util +from ._typing import is_column_element +from ._typing import is_dml +from .base import _de_clone +from .base import _from_objects +from .base import _NONE_NAME +from .base import _SentinelDefaultCharacterization +from .base import Executable +from .base import NO_ARG +from .elements import ClauseElement +from .elements import quoted_name +from .schema import Column +from .sqltypes import TupleType +from .type_api import TypeEngine +from .visitors import prefix_anon_map +from .visitors import Visitable +from .. import exc +from .. import util +from ..util import FastIntFlag +from ..util.typing import Literal +from ..util.typing import Protocol +from ..util.typing import TypedDict + +if typing.TYPE_CHECKING: + from .annotation import _AnnotationDict + from .base import _AmbiguousTableNameMap + from .base import CompileState + from .cache_key import CacheKey + from .ddl import ExecutableDDLElement + from .dml import Insert + from .dml import UpdateBase + from .dml import ValuesBase + from .elements import _truncated_label + from .elements import BindParameter + from .elements import ColumnClause + from .elements import ColumnElement + from .elements import Label + from .functions import Function + from .schema import Table + from .selectable import AliasedReturnsRows + from .selectable import CompoundSelectState + from .selectable import CTE + from .selectable import FromClause + from .selectable import NamedFromClause + from .selectable import ReturnsRows + from .selectable import Select + from .selectable import SelectState + from .type_api import _BindProcessorType + from ..engine.cursor import CursorResultMetaData + from ..engine.interfaces import _CoreSingleExecuteParams + from ..engine.interfaces import _DBAPIAnyExecuteParams + from ..engine.interfaces import _DBAPIMultiExecuteParams + from ..engine.interfaces import _DBAPISingleExecuteParams + from ..engine.interfaces import _ExecuteOptions + from ..engine.interfaces import _GenericSetInputSizesType + from ..engine.interfaces import _MutableCoreSingleExecuteParams + from ..engine.interfaces import Dialect + from ..engine.interfaces import SchemaTranslateMapType + +_FromHintsType = Dict["FromClause", str] + +RESERVED_WORDS = { + "all", + "analyse", + "analyze", + "and", + "any", + "array", + "as", + "asc", + "asymmetric", + "authorization", + "between", + "binary", + "both", + "case", + "cast", + "check", + "collate", + "column", + "constraint", + "create", + "cross", + "current_date", + "current_role", + "current_time", + "current_timestamp", + "current_user", + "default", + "deferrable", + "desc", + "distinct", + "do", + "else", + "end", + "except", + "false", + "for", + "foreign", + "freeze", + "from", + "full", + "grant", + "group", + "having", + "ilike", + "in", + "initially", + "inner", + "intersect", + "into", + "is", + "isnull", + "join", + "leading", + "left", + "like", + "limit", + "localtime", + "localtimestamp", + "natural", + "new", + "not", + "notnull", + "null", + "off", + "offset", + "old", + "on", + "only", + "or", + "order", + "outer", + "overlaps", + "placing", + "primary", + "references", + "right", + "select", + "session_user", + "set", + "similar", + "some", + "symmetric", + "table", + "then", + "to", + "trailing", + "true", + "union", + "unique", + "user", + "using", + "verbose", + "when", + "where", +} + +LEGAL_CHARACTERS = re.compile(r"^[A-Z0-9_$]+$", re.I) +LEGAL_CHARACTERS_PLUS_SPACE = re.compile(r"^[A-Z0-9_ $]+$", re.I) +ILLEGAL_INITIAL_CHARACTERS = {str(x) for x in range(0, 10)}.union(["$"]) + +FK_ON_DELETE = re.compile( + r"^(?:RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT)$", re.I +) +FK_ON_UPDATE = re.compile( + r"^(?:RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT)$", re.I +) +FK_INITIALLY = re.compile(r"^(?:DEFERRED|IMMEDIATE)$", re.I) +BIND_PARAMS = re.compile(r"(? ", + operators.ge: " >= ", + operators.eq: " = ", + operators.is_distinct_from: " IS DISTINCT FROM ", + operators.is_not_distinct_from: " IS NOT DISTINCT FROM ", + operators.concat_op: " || ", + operators.match_op: " MATCH ", + operators.not_match_op: " NOT MATCH ", + operators.in_op: " IN ", + operators.not_in_op: " NOT IN ", + operators.comma_op: ", ", + operators.from_: " FROM ", + operators.as_: " AS ", + operators.is_: " IS ", + operators.is_not: " IS NOT ", + operators.collate: " COLLATE ", + # unary + operators.exists: "EXISTS ", + operators.distinct_op: "DISTINCT ", + operators.inv: "NOT ", + operators.any_op: "ANY ", + operators.all_op: "ALL ", + # modifiers + operators.desc_op: " DESC", + operators.asc_op: " ASC", + operators.nulls_first_op: " NULLS FIRST", + operators.nulls_last_op: " NULLS LAST", + # bitwise + operators.bitwise_xor_op: " ^ ", + operators.bitwise_or_op: " | ", + operators.bitwise_and_op: " & ", + operators.bitwise_not_op: "~", + operators.bitwise_lshift_op: " << ", + operators.bitwise_rshift_op: " >> ", +} + +FUNCTIONS: Dict[Type[Function[Any]], str] = { + functions.coalesce: "coalesce", + functions.current_date: "CURRENT_DATE", + functions.current_time: "CURRENT_TIME", + functions.current_timestamp: "CURRENT_TIMESTAMP", + functions.current_user: "CURRENT_USER", + functions.localtime: "LOCALTIME", + functions.localtimestamp: "LOCALTIMESTAMP", + functions.random: "random", + functions.sysdate: "sysdate", + functions.session_user: "SESSION_USER", + functions.user: "USER", + functions.cube: "CUBE", + functions.rollup: "ROLLUP", + functions.grouping_sets: "GROUPING SETS", +} + + +EXTRACT_MAP = { + "month": "month", + "day": "day", + "year": "year", + "second": "second", + "hour": "hour", + "doy": "doy", + "minute": "minute", + "quarter": "quarter", + "dow": "dow", + "week": "week", + "epoch": "epoch", + "milliseconds": "milliseconds", + "microseconds": "microseconds", + "timezone_hour": "timezone_hour", + "timezone_minute": "timezone_minute", +} + +COMPOUND_KEYWORDS = { + selectable._CompoundSelectKeyword.UNION: "UNION", + selectable._CompoundSelectKeyword.UNION_ALL: "UNION ALL", + selectable._CompoundSelectKeyword.EXCEPT: "EXCEPT", + selectable._CompoundSelectKeyword.EXCEPT_ALL: "EXCEPT ALL", + selectable._CompoundSelectKeyword.INTERSECT: "INTERSECT", + selectable._CompoundSelectKeyword.INTERSECT_ALL: "INTERSECT ALL", +} + + +class ResultColumnsEntry(NamedTuple): + """Tracks a column expression that is expected to be represented + in the result rows for this statement. + + This normally refers to the columns clause of a SELECT statement + but may also refer to a RETURNING clause, as well as for dialect-specific + emulations. + + """ + + keyname: str + """string name that's expected in cursor.description""" + + name: str + """column name, may be labeled""" + + objects: Tuple[Any, ...] + """sequence of objects that should be able to locate this column + in a RowMapping. This is typically string names and aliases + as well as Column objects. + + """ + + type: TypeEngine[Any] + """Datatype to be associated with this column. This is where + the "result processing" logic directly links the compiled statement + to the rows that come back from the cursor. + + """ + + +class _ResultMapAppender(Protocol): + def __call__( + self, + keyname: str, + name: str, + objects: Sequence[Any], + type_: TypeEngine[Any], + ) -> None: ... + + +# integer indexes into ResultColumnsEntry used by cursor.py. +# some profiling showed integer access faster than named tuple +RM_RENDERED_NAME: Literal[0] = 0 +RM_NAME: Literal[1] = 1 +RM_OBJECTS: Literal[2] = 2 +RM_TYPE: Literal[3] = 3 + + +class _BaseCompilerStackEntry(TypedDict): + asfrom_froms: Set[FromClause] + correlate_froms: Set[FromClause] + selectable: ReturnsRows + + +class _CompilerStackEntry(_BaseCompilerStackEntry, total=False): + compile_state: CompileState + need_result_map_for_nested: bool + need_result_map_for_compound: bool + select_0: ReturnsRows + insert_from_select: Select[Any] + + +class ExpandedState(NamedTuple): + """represents state to use when producing "expanded" and + "post compile" bound parameters for a statement. + + "expanded" parameters are parameters that are generated at + statement execution time to suit a number of parameters passed, the most + prominent example being the individual elements inside of an IN expression. + + "post compile" parameters are parameters where the SQL literal value + will be rendered into the SQL statement at execution time, rather than + being passed as separate parameters to the driver. + + To create an :class:`.ExpandedState` instance, use the + :meth:`.SQLCompiler.construct_expanded_state` method on any + :class:`.SQLCompiler` instance. + + """ + + statement: str + """String SQL statement with parameters fully expanded""" + + parameters: _CoreSingleExecuteParams + """Parameter dictionary with parameters fully expanded. + + For a statement that uses named parameters, this dictionary will map + exactly to the names in the statement. For a statement that uses + positional parameters, the :attr:`.ExpandedState.positional_parameters` + will yield a tuple with the positional parameter set. + + """ + + processors: Mapping[str, _BindProcessorType[Any]] + """mapping of bound value processors""" + + positiontup: Optional[Sequence[str]] + """Sequence of string names indicating the order of positional + parameters""" + + parameter_expansion: Mapping[str, List[str]] + """Mapping representing the intermediary link from original parameter + name to list of "expanded" parameter names, for those parameters that + were expanded.""" + + @property + def positional_parameters(self) -> Tuple[Any, ...]: + """Tuple of positional parameters, for statements that were compiled + using a positional paramstyle. + + """ + if self.positiontup is None: + raise exc.InvalidRequestError( + "statement does not use a positional paramstyle" + ) + return tuple(self.parameters[key] for key in self.positiontup) + + @property + def additional_parameters(self) -> _CoreSingleExecuteParams: + """synonym for :attr:`.ExpandedState.parameters`.""" + return self.parameters + + +class _InsertManyValues(NamedTuple): + """represents state to use for executing an "insertmanyvalues" statement. + + The primary consumers of this object are the + :meth:`.SQLCompiler._deliver_insertmanyvalues_batches` and + :meth:`.DefaultDialect._deliver_insertmanyvalues_batches` methods. + + .. versionadded:: 2.0 + + """ + + is_default_expr: bool + """if True, the statement is of the form + ``INSERT INTO TABLE DEFAULT VALUES``, and can't be rewritten as a "batch" + + """ + + single_values_expr: str + """The rendered "values" clause of the INSERT statement. + + This is typically the parenthesized section e.g. "(?, ?, ?)" or similar. + The insertmanyvalues logic uses this string as a search and replace + target. + + """ + + insert_crud_params: List[crud._CrudParamElementStr] + """List of Column / bind names etc. used while rewriting the statement""" + + num_positional_params_counted: int + """the number of bound parameters in a single-row statement. + + This count may be larger or smaller than the actual number of columns + targeted in the INSERT, as it accommodates for SQL expressions + in the values list that may have zero or more parameters embedded + within them. + + This count is part of what's used to organize rewritten parameter lists + when batching. + + """ + + sort_by_parameter_order: bool = False + """if the deterministic_returnined_order parameter were used on the + insert. + + All of the attributes following this will only be used if this is True. + + """ + + includes_upsert_behaviors: bool = False + """if True, we have to accommodate for upsert behaviors. + + This will in some cases downgrade "insertmanyvalues" that requests + deterministic ordering. + + """ + + sentinel_columns: Optional[Sequence[Column[Any]]] = None + """List of sentinel columns that were located. + + This list is only here if the INSERT asked for + sort_by_parameter_order=True, + and dialect-appropriate sentinel columns were located. + + .. versionadded:: 2.0.10 + + """ + + num_sentinel_columns: int = 0 + """how many sentinel columns are in the above list, if any. + + This is the same as + ``len(sentinel_columns) if sentinel_columns is not None else 0`` + + """ + + sentinel_param_keys: Optional[Sequence[str]] = None + """parameter str keys in each param dictionary / tuple + that would link to the client side "sentinel" values for that row, which + we can use to match up parameter sets to result rows. + + This is only present if sentinel_columns is present and the INSERT + statement actually refers to client side values for these sentinel + columns. + + .. versionadded:: 2.0.10 + + .. versionchanged:: 2.0.29 - the sequence is now string dictionary keys + only, used against the "compiled parameteters" collection before + the parameters were converted by bound parameter processors + + """ + + implicit_sentinel: bool = False + """if True, we have exactly one sentinel column and it uses a server side + value, currently has to generate an incrementing integer value. + + The dialect in question would have asserted that it supports receiving + these values back and sorting on that value as a means of guaranteeing + correlation with the incoming parameter list. + + .. versionadded:: 2.0.10 + + """ + + embed_values_counter: bool = False + """Whether to embed an incrementing integer counter in each parameter + set within the VALUES clause as parameters are batched over. + + This is only used for a specific INSERT..SELECT..VALUES..RETURNING syntax + where a subquery is used to produce value tuples. Current support + includes PostgreSQL, Microsoft SQL Server. + + .. versionadded:: 2.0.10 + + """ + + +class _InsertManyValuesBatch(NamedTuple): + """represents an individual batch SQL statement for insertmanyvalues. + + This is passed through the + :meth:`.SQLCompiler._deliver_insertmanyvalues_batches` and + :meth:`.DefaultDialect._deliver_insertmanyvalues_batches` methods out + to the :class:`.Connection` within the + :meth:`.Connection._exec_insertmany_context` method. + + .. versionadded:: 2.0.10 + + """ + + replaced_statement: str + replaced_parameters: _DBAPIAnyExecuteParams + processed_setinputsizes: Optional[_GenericSetInputSizesType] + batch: Sequence[_DBAPISingleExecuteParams] + sentinel_values: Sequence[Tuple[Any, ...]] + current_batch_size: int + batchnum: int + total_batches: int + rows_sorted: bool + is_downgraded: bool + + +class InsertmanyvaluesSentinelOpts(FastIntFlag): + """bitflag enum indicating styles of PK defaults + which can work as implicit sentinel columns + + """ + + NOT_SUPPORTED = 1 + AUTOINCREMENT = 2 + IDENTITY = 4 + SEQUENCE = 8 + + ANY_AUTOINCREMENT = AUTOINCREMENT | IDENTITY | SEQUENCE + _SUPPORTED_OR_NOT = NOT_SUPPORTED | ANY_AUTOINCREMENT + + USE_INSERT_FROM_SELECT = 16 + RENDER_SELECT_COL_CASTS = 64 + + +class CompilerState(IntEnum): + COMPILING = 0 + """statement is present, compilation phase in progress""" + + STRING_APPLIED = 1 + """statement is present, string form of the statement has been applied. + + Additional processors by subclasses may still be pending. + + """ + + NO_STATEMENT = 2 + """compiler does not have a statement to compile, is used + for method access""" + + +class Linting(IntEnum): + """represent preferences for the 'SQL linting' feature. + + this feature currently includes support for flagging cartesian products + in SQL statements. + + """ + + NO_LINTING = 0 + "Disable all linting." + + COLLECT_CARTESIAN_PRODUCTS = 1 + """Collect data on FROMs and cartesian products and gather into + 'self.from_linter'""" + + WARN_LINTING = 2 + "Emit warnings for linters that find problems" + + FROM_LINTING = COLLECT_CARTESIAN_PRODUCTS | WARN_LINTING + """Warn for cartesian products; combines COLLECT_CARTESIAN_PRODUCTS + and WARN_LINTING""" + + +NO_LINTING, COLLECT_CARTESIAN_PRODUCTS, WARN_LINTING, FROM_LINTING = tuple( + Linting +) + + +class FromLinter(collections.namedtuple("FromLinter", ["froms", "edges"])): + """represents current state for the "cartesian product" detection + feature.""" + + def lint(self, start=None): + froms = self.froms + if not froms: + return None, None + + edges = set(self.edges) + the_rest = set(froms) + + if start is not None: + start_with = start + the_rest.remove(start_with) + else: + start_with = the_rest.pop() + + stack = collections.deque([start_with]) + + while stack and the_rest: + node = stack.popleft() + the_rest.discard(node) + + # comparison of nodes in edges here is based on hash equality, as + # there are "annotated" elements that match the non-annotated ones. + # to remove the need for in-python hash() calls, use native + # containment routines (e.g. "node in edge", "edge.index(node)") + to_remove = {edge for edge in edges if node in edge} + + # appendleft the node in each edge that is not + # the one that matched. + stack.extendleft(edge[not edge.index(node)] for edge in to_remove) + edges.difference_update(to_remove) + + # FROMS left over? boom + if the_rest: + return the_rest, start_with + else: + return None, None + + def warn(self, stmt_type="SELECT"): + the_rest, start_with = self.lint() + + # FROMS left over? boom + if the_rest: + froms = the_rest + if froms: + template = ( + "{stmt_type} statement has a cartesian product between " + "FROM element(s) {froms} and " + 'FROM element "{start}". Apply join condition(s) ' + "between each element to resolve." + ) + froms_str = ", ".join( + f'"{self.froms[from_]}"' for from_ in froms + ) + message = template.format( + stmt_type=stmt_type, + froms=froms_str, + start=self.froms[start_with], + ) + + util.warn(message) + + +class Compiled: + """Represent a compiled SQL or DDL expression. + + The ``__str__`` method of the ``Compiled`` object should produce + the actual text of the statement. ``Compiled`` objects are + specific to their underlying database dialect, and also may + or may not be specific to the columns referenced within a + particular set of bind parameters. In no case should the + ``Compiled`` object be dependent on the actual values of those + bind parameters, even though it may reference those values as + defaults. + """ + + statement: Optional[ClauseElement] = None + "The statement to compile." + string: str = "" + "The string representation of the ``statement``" + + state: CompilerState + """description of the compiler's state""" + + is_sql = False + is_ddl = False + + _cached_metadata: Optional[CursorResultMetaData] = None + + _result_columns: Optional[List[ResultColumnsEntry]] = None + + schema_translate_map: Optional[SchemaTranslateMapType] = None + + execution_options: _ExecuteOptions = util.EMPTY_DICT + """ + Execution options propagated from the statement. In some cases, + sub-elements of the statement can modify these. + """ + + preparer: IdentifierPreparer + + _annotations: _AnnotationDict = util.EMPTY_DICT + + compile_state: Optional[CompileState] = None + """Optional :class:`.CompileState` object that maintains additional + state used by the compiler. + + Major executable objects such as :class:`_expression.Insert`, + :class:`_expression.Update`, :class:`_expression.Delete`, + :class:`_expression.Select` will generate this + state when compiled in order to calculate additional information about the + object. For the top level object that is to be executed, the state can be + stored here where it can also have applicability towards result set + processing. + + .. versionadded:: 1.4 + + """ + + dml_compile_state: Optional[CompileState] = None + """Optional :class:`.CompileState` assigned at the same point that + .isinsert, .isupdate, or .isdelete is assigned. + + This will normally be the same object as .compile_state, with the + exception of cases like the :class:`.ORMFromStatementCompileState` + object. + + .. versionadded:: 1.4.40 + + """ + + cache_key: Optional[CacheKey] = None + """The :class:`.CacheKey` that was generated ahead of creating this + :class:`.Compiled` object. + + This is used for routines that need access to the original + :class:`.CacheKey` instance generated when the :class:`.Compiled` + instance was first cached, typically in order to reconcile + the original list of :class:`.BindParameter` objects with a + per-statement list that's generated on each call. + + """ + + _gen_time: float + """Generation time of this :class:`.Compiled`, used for reporting + cache stats.""" + + def __init__( + self, + dialect: Dialect, + statement: Optional[ClauseElement], + schema_translate_map: Optional[SchemaTranslateMapType] = None, + render_schema_translate: bool = False, + compile_kwargs: Mapping[str, Any] = util.immutabledict(), + ): + """Construct a new :class:`.Compiled` object. + + :param dialect: :class:`.Dialect` to compile against. + + :param statement: :class:`_expression.ClauseElement` to be compiled. + + :param schema_translate_map: dictionary of schema names to be + translated when forming the resultant SQL + + .. seealso:: + + :ref:`schema_translating` + + :param compile_kwargs: additional kwargs that will be + passed to the initial call to :meth:`.Compiled.process`. + + + """ + self.dialect = dialect + self.preparer = self.dialect.identifier_preparer + if schema_translate_map: + self.schema_translate_map = schema_translate_map + self.preparer = self.preparer._with_schema_translate( + schema_translate_map + ) + + if statement is not None: + self.state = CompilerState.COMPILING + self.statement = statement + self.can_execute = statement.supports_execution + self._annotations = statement._annotations + if self.can_execute: + if TYPE_CHECKING: + assert isinstance(statement, Executable) + self.execution_options = statement._execution_options + self.string = self.process(self.statement, **compile_kwargs) + + if render_schema_translate: + self.string = self.preparer._render_schema_translates( + self.string, schema_translate_map + ) + + self.state = CompilerState.STRING_APPLIED + else: + self.state = CompilerState.NO_STATEMENT + + self._gen_time = perf_counter() + + def __init_subclass__(cls) -> None: + cls._init_compiler_cls() + return super().__init_subclass__() + + @classmethod + def _init_compiler_cls(cls): + pass + + def _execute_on_connection( + self, connection, distilled_params, execution_options + ): + if self.can_execute: + return connection._execute_compiled( + self, distilled_params, execution_options + ) + else: + raise exc.ObjectNotExecutableError(self.statement) + + def visit_unsupported_compilation(self, element, err, **kw): + raise exc.UnsupportedCompilationError(self, type(element)) from err + + @property + def sql_compiler(self): + """Return a Compiled that is capable of processing SQL expressions. + + If this compiler is one, it would likely just return 'self'. + + """ + + raise NotImplementedError() + + def process(self, obj: Visitable, **kwargs: Any) -> str: + return obj._compiler_dispatch(self, **kwargs) + + def __str__(self) -> str: + """Return the string text of the generated SQL or DDL.""" + + if self.state is CompilerState.STRING_APPLIED: + return self.string + else: + return "" + + def construct_params( + self, + params: Optional[_CoreSingleExecuteParams] = None, + extracted_parameters: Optional[Sequence[BindParameter[Any]]] = None, + escape_names: bool = True, + ) -> Optional[_MutableCoreSingleExecuteParams]: + """Return the bind params for this compiled object. + + :param params: a dict of string/object pairs whose values will + override bind values compiled in to the + statement. + """ + + raise NotImplementedError() + + @property + def params(self): + """Return the bind params for this compiled object.""" + return self.construct_params() + + +class TypeCompiler(util.EnsureKWArg): + """Produces DDL specification for TypeEngine objects.""" + + ensure_kwarg = r"visit_\w+" + + def __init__(self, dialect: Dialect): + self.dialect = dialect + + def process(self, type_: TypeEngine[Any], **kw: Any) -> str: + if ( + type_._variant_mapping + and self.dialect.name in type_._variant_mapping + ): + type_ = type_._variant_mapping[self.dialect.name] + return type_._compiler_dispatch(self, **kw) + + def visit_unsupported_compilation( + self, element: Any, err: Exception, **kw: Any + ) -> NoReturn: + raise exc.UnsupportedCompilationError(self, element) from err + + +# this was a Visitable, but to allow accurate detection of +# column elements this is actually a column element +class _CompileLabel( + roles.BinaryElementRole[Any], elements.CompilerColumnElement +): + """lightweight label object which acts as an expression.Label.""" + + __visit_name__ = "label" + __slots__ = "element", "name", "_alt_names" + + def __init__(self, col, name, alt_names=()): + self.element = col + self.name = name + self._alt_names = (col,) + alt_names + + @property + def proxy_set(self): + return self.element.proxy_set + + @property + def type(self): + return self.element.type + + def self_group(self, **kw): + return self + + +class ilike_case_insensitive( + roles.BinaryElementRole[Any], elements.CompilerColumnElement +): + """produce a wrapping element for a case-insensitive portion of + an ILIKE construct. + + The construct usually renders the ``lower()`` function, but on + PostgreSQL will pass silently with the assumption that "ILIKE" + is being used. + + .. versionadded:: 2.0 + + """ + + __visit_name__ = "ilike_case_insensitive_operand" + __slots__ = "element", "comparator" + + def __init__(self, element): + self.element = element + self.comparator = element.comparator + + @property + def proxy_set(self): + return self.element.proxy_set + + @property + def type(self): + return self.element.type + + def self_group(self, **kw): + return self + + def _with_binary_element_type(self, type_): + return ilike_case_insensitive( + self.element._with_binary_element_type(type_) + ) + + +class SQLCompiler(Compiled): + """Default implementation of :class:`.Compiled`. + + Compiles :class:`_expression.ClauseElement` objects into SQL strings. + + """ + + extract_map = EXTRACT_MAP + + bindname_escape_characters: ClassVar[Mapping[str, str]] = ( + util.immutabledict( + { + "%": "P", + "(": "A", + ")": "Z", + ":": "C", + ".": "_", + "[": "_", + "]": "_", + " ": "_", + } + ) + ) + """A mapping (e.g. dict or similar) containing a lookup of + characters keyed to replacement characters which will be applied to all + 'bind names' used in SQL statements as a form of 'escaping'; the given + characters are replaced entirely with the 'replacement' character when + rendered in the SQL statement, and a similar translation is performed + on the incoming names used in parameter dictionaries passed to methods + like :meth:`_engine.Connection.execute`. + + This allows bound parameter names used in :func:`_sql.bindparam` and + other constructs to have any arbitrary characters present without any + concern for characters that aren't allowed at all on the target database. + + Third party dialects can establish their own dictionary here to replace the + default mapping, which will ensure that the particular characters in the + mapping will never appear in a bound parameter name. + + The dictionary is evaluated at **class creation time**, so cannot be + modified at runtime; it must be present on the class when the class + is first declared. + + Note that for dialects that have additional bound parameter rules such + as additional restrictions on leading characters, the + :meth:`_sql.SQLCompiler.bindparam_string` method may need to be augmented. + See the cx_Oracle compiler for an example of this. + + .. versionadded:: 2.0.0rc1 + + """ + + _bind_translate_re: ClassVar[Pattern[str]] + _bind_translate_chars: ClassVar[Mapping[str, str]] + + is_sql = True + + compound_keywords = COMPOUND_KEYWORDS + + isdelete: bool = False + isinsert: bool = False + isupdate: bool = False + """class-level defaults which can be set at the instance + level to define if this Compiled instance represents + INSERT/UPDATE/DELETE + """ + + postfetch: Optional[List[Column[Any]]] + """list of columns that can be post-fetched after INSERT or UPDATE to + receive server-updated values""" + + insert_prefetch: Sequence[Column[Any]] = () + """list of columns for which default values should be evaluated before + an INSERT takes place""" + + update_prefetch: Sequence[Column[Any]] = () + """list of columns for which onupdate default values should be evaluated + before an UPDATE takes place""" + + implicit_returning: Optional[Sequence[ColumnElement[Any]]] = None + """list of "implicit" returning columns for a toplevel INSERT or UPDATE + statement, used to receive newly generated values of columns. + + .. versionadded:: 2.0 ``implicit_returning`` replaces the previous + ``returning`` collection, which was not a generalized RETURNING + collection and instead was in fact specific to the "implicit returning" + feature. + + """ + + isplaintext: bool = False + + binds: Dict[str, BindParameter[Any]] + """a dictionary of bind parameter keys to BindParameter instances.""" + + bind_names: Dict[BindParameter[Any], str] + """a dictionary of BindParameter instances to "compiled" names + that are actually present in the generated SQL""" + + stack: List[_CompilerStackEntry] + """major statements such as SELECT, INSERT, UPDATE, DELETE are + tracked in this stack using an entry format.""" + + returning_precedes_values: bool = False + """set to True classwide to generate RETURNING + clauses before the VALUES or WHERE clause (i.e. MSSQL) + """ + + render_table_with_column_in_update_from: bool = False + """set to True classwide to indicate the SET clause + in a multi-table UPDATE statement should qualify + columns with the table name (i.e. MySQL only) + """ + + ansi_bind_rules: bool = False + """SQL 92 doesn't allow bind parameters to be used + in the columns clause of a SELECT, nor does it allow + ambiguous expressions like "? = ?". A compiler + subclass can set this flag to False if the target + driver/DB enforces this + """ + + bindtemplate: str + """template to render bound parameters based on paramstyle.""" + + compilation_bindtemplate: str + """template used by compiler to render parameters before positional + paramstyle application""" + + _numeric_binds_identifier_char: str + """Character that's used to as the identifier of a numerical bind param. + For example if this char is set to ``$``, numerical binds will be rendered + in the form ``$1, $2, $3``. + """ + + _result_columns: List[ResultColumnsEntry] + """relates label names in the final SQL to a tuple of local + column/label name, ColumnElement object (if any) and + TypeEngine. CursorResult uses this for type processing and + column targeting""" + + _textual_ordered_columns: bool = False + """tell the result object that the column names as rendered are important, + but they are also "ordered" vs. what is in the compiled object here. + + As of 1.4.42 this condition is only present when the statement is a + TextualSelect, e.g. text("....").columns(...), where it is required + that the columns are considered positionally and not by name. + + """ + + _ad_hoc_textual: bool = False + """tell the result that we encountered text() or '*' constructs in the + middle of the result columns, but we also have compiled columns, so + if the number of columns in cursor.description does not match how many + expressions we have, that means we can't rely on positional at all and + should match on name. + + """ + + _ordered_columns: bool = True + """ + if False, means we can't be sure the list of entries + in _result_columns is actually the rendered order. Usually + True unless using an unordered TextualSelect. + """ + + _loose_column_name_matching: bool = False + """tell the result object that the SQL statement is textual, wants to match + up to Column objects, and may be using the ._tq_label in the SELECT rather + than the base name. + + """ + + _numeric_binds: bool = False + """ + True if paramstyle is "numeric". This paramstyle is trickier than + all the others. + + """ + + _render_postcompile: bool = False + """ + whether to render out POSTCOMPILE params during the compile phase. + + This attribute is used only for end-user invocation of stmt.compile(); + it's never used for actual statement execution, where instead the + dialect internals access and render the internal postcompile structure + directly. + + """ + + _post_compile_expanded_state: Optional[ExpandedState] = None + """When render_postcompile is used, the ``ExpandedState`` used to create + the "expanded" SQL is assigned here, and then used by the ``.params`` + accessor and ``.construct_params()`` methods for their return values. + + .. versionadded:: 2.0.0rc1 + + """ + + _pre_expanded_string: Optional[str] = None + """Stores the original string SQL before 'post_compile' is applied, + for cases where 'post_compile' were used. + + """ + + _pre_expanded_positiontup: Optional[List[str]] = None + + _insertmanyvalues: Optional[_InsertManyValues] = None + + _insert_crud_params: Optional[crud._CrudParamSequence] = None + + literal_execute_params: FrozenSet[BindParameter[Any]] = frozenset() + """bindparameter objects that are rendered as literal values at statement + execution time. + + """ + + post_compile_params: FrozenSet[BindParameter[Any]] = frozenset() + """bindparameter objects that are rendered as bound parameter placeholders + at statement execution time. + + """ + + escaped_bind_names: util.immutabledict[str, str] = util.EMPTY_DICT + """Late escaping of bound parameter names that has to be converted + to the original name when looking in the parameter dictionary. + + """ + + has_out_parameters = False + """if True, there are bindparam() objects that have the isoutparam + flag set.""" + + postfetch_lastrowid = False + """if True, and this in insert, use cursor.lastrowid to populate + result.inserted_primary_key. """ + + _cache_key_bind_match: Optional[ + Tuple[ + Dict[ + BindParameter[Any], + List[BindParameter[Any]], + ], + Dict[ + str, + BindParameter[Any], + ], + ] + ] = None + """a mapping that will relate the BindParameter object we compile + to those that are part of the extracted collection of parameters + in the cache key, if we were given a cache key. + + """ + + positiontup: Optional[List[str]] = None + """for a compiled construct that uses a positional paramstyle, will be + a sequence of strings, indicating the names of bound parameters in order. + + This is used in order to render bound parameters in their correct order, + and is combined with the :attr:`_sql.Compiled.params` dictionary to + render parameters. + + This sequence always contains the unescaped name of the parameters. + + .. seealso:: + + :ref:`faq_sql_expression_string` - includes a usage example for + debugging use cases. + + """ + _values_bindparam: Optional[List[str]] = None + + _visited_bindparam: Optional[List[str]] = None + + inline: bool = False + + ctes: Optional[MutableMapping[CTE, str]] + + # Detect same CTE references - Dict[(level, name), cte] + # Level is required for supporting nesting + ctes_by_level_name: Dict[Tuple[int, str], CTE] + + # To retrieve key/level in ctes_by_level_name - + # Dict[cte_reference, (level, cte_name, cte_opts)] + level_name_by_cte: Dict[CTE, Tuple[int, str, selectable._CTEOpts]] + + ctes_recursive: bool + + _post_compile_pattern = re.compile(r"__\[POSTCOMPILE_(\S+?)(~~.+?~~)?\]") + _pyformat_pattern = re.compile(r"%\(([^)]+?)\)s") + _positional_pattern = re.compile( + f"{_pyformat_pattern.pattern}|{_post_compile_pattern.pattern}" + ) + + @classmethod + def _init_compiler_cls(cls): + cls._init_bind_translate() + + @classmethod + def _init_bind_translate(cls): + reg = re.escape("".join(cls.bindname_escape_characters)) + cls._bind_translate_re = re.compile(f"[{reg}]") + cls._bind_translate_chars = cls.bindname_escape_characters + + def __init__( + self, + dialect: Dialect, + statement: Optional[ClauseElement], + cache_key: Optional[CacheKey] = None, + column_keys: Optional[Sequence[str]] = None, + for_executemany: bool = False, + linting: Linting = NO_LINTING, + _supporting_against: Optional[SQLCompiler] = None, + **kwargs: Any, + ): + """Construct a new :class:`.SQLCompiler` object. + + :param dialect: :class:`.Dialect` to be used + + :param statement: :class:`_expression.ClauseElement` to be compiled + + :param column_keys: a list of column names to be compiled into an + INSERT or UPDATE statement. + + :param for_executemany: whether INSERT / UPDATE statements should + expect that they are to be invoked in an "executemany" style, + which may impact how the statement will be expected to return the + values of defaults and autoincrement / sequences and similar. + Depending on the backend and driver in use, support for retrieving + these values may be disabled which means SQL expressions may + be rendered inline, RETURNING may not be rendered, etc. + + :param kwargs: additional keyword arguments to be consumed by the + superclass. + + """ + self.column_keys = column_keys + + self.cache_key = cache_key + + if cache_key: + cksm = {b.key: b for b in cache_key[1]} + ckbm = {b: [b] for b in cache_key[1]} + self._cache_key_bind_match = (ckbm, cksm) + + # compile INSERT/UPDATE defaults/sequences to expect executemany + # style execution, which may mean no pre-execute of defaults, + # or no RETURNING + self.for_executemany = for_executemany + + self.linting = linting + + # a dictionary of bind parameter keys to BindParameter + # instances. + self.binds = {} + + # a dictionary of BindParameter instances to "compiled" names + # that are actually present in the generated SQL + self.bind_names = util.column_dict() + + # stack which keeps track of nested SELECT statements + self.stack = [] + + self._result_columns = [] + + # true if the paramstyle is positional + self.positional = dialect.positional + if self.positional: + self._numeric_binds = nb = dialect.paramstyle.startswith("numeric") + if nb: + self._numeric_binds_identifier_char = ( + "$" if dialect.paramstyle == "numeric_dollar" else ":" + ) + + self.compilation_bindtemplate = _pyformat_template + else: + self.compilation_bindtemplate = BIND_TEMPLATES[dialect.paramstyle] + + self.ctes = None + + self.label_length = ( + dialect.label_length or dialect.max_identifier_length + ) + + # a map which tracks "anonymous" identifiers that are created on + # the fly here + self.anon_map = prefix_anon_map() + + # a map which tracks "truncated" names based on + # dialect.label_length or dialect.max_identifier_length + self.truncated_names: Dict[Tuple[str, str], str] = {} + self._truncated_counters: Dict[str, int] = {} + + Compiled.__init__(self, dialect, statement, **kwargs) + + if self.isinsert or self.isupdate or self.isdelete: + if TYPE_CHECKING: + assert isinstance(statement, UpdateBase) + + if self.isinsert or self.isupdate: + if TYPE_CHECKING: + assert isinstance(statement, ValuesBase) + if statement._inline: + self.inline = True + elif self.for_executemany and ( + not self.isinsert + or ( + self.dialect.insert_executemany_returning + and statement._return_defaults + ) + ): + self.inline = True + + self.bindtemplate = BIND_TEMPLATES[dialect.paramstyle] + + if _supporting_against: + self.__dict__.update( + { + k: v + for k, v in _supporting_against.__dict__.items() + if k + not in { + "state", + "dialect", + "preparer", + "positional", + "_numeric_binds", + "compilation_bindtemplate", + "bindtemplate", + } + } + ) + + if self.state is CompilerState.STRING_APPLIED: + if self.positional: + if self._numeric_binds: + self._process_numeric() + else: + self._process_positional() + + if self._render_postcompile: + parameters = self.construct_params( + escape_names=False, + _no_postcompile=True, + ) + + self._process_parameters_for_postcompile( + parameters, _populate_self=True + ) + + @property + def insert_single_values_expr(self) -> Optional[str]: + """When an INSERT is compiled with a single set of parameters inside + a VALUES expression, the string is assigned here, where it can be + used for insert batching schemes to rewrite the VALUES expression. + + .. versionadded:: 1.3.8 + + .. versionchanged:: 2.0 This collection is no longer used by + SQLAlchemy's built-in dialects, in favor of the currently + internal ``_insertmanyvalues`` collection that is used only by + :class:`.SQLCompiler`. + + """ + if self._insertmanyvalues is None: + return None + else: + return self._insertmanyvalues.single_values_expr + + @util.ro_memoized_property + def effective_returning(self) -> Optional[Sequence[ColumnElement[Any]]]: + """The effective "returning" columns for INSERT, UPDATE or DELETE. + + This is either the so-called "implicit returning" columns which are + calculated by the compiler on the fly, or those present based on what's + present in ``self.statement._returning`` (expanded into individual + columns using the ``._all_selected_columns`` attribute) i.e. those set + explicitly using the :meth:`.UpdateBase.returning` method. + + .. versionadded:: 2.0 + + """ + if self.implicit_returning: + return self.implicit_returning + elif self.statement is not None and is_dml(self.statement): + return [ + c + for c in self.statement._all_selected_columns + if is_column_element(c) + ] + + else: + return None + + @property + def returning(self): + """backwards compatibility; returns the + effective_returning collection. + + """ + return self.effective_returning + + @property + def current_executable(self): + """Return the current 'executable' that is being compiled. + + This is currently the :class:`_sql.Select`, :class:`_sql.Insert`, + :class:`_sql.Update`, :class:`_sql.Delete`, + :class:`_sql.CompoundSelect` object that is being compiled. + Specifically it's assigned to the ``self.stack`` list of elements. + + When a statement like the above is being compiled, it normally + is also assigned to the ``.statement`` attribute of the + :class:`_sql.Compiler` object. However, all SQL constructs are + ultimately nestable, and this attribute should never be consulted + by a ``visit_`` method, as it is not guaranteed to be assigned + nor guaranteed to correspond to the current statement being compiled. + + .. versionadded:: 1.3.21 + + For compatibility with previous versions, use the following + recipe:: + + statement = getattr(self, "current_executable", False) + if statement is False: + statement = self.stack[-1]["selectable"] + + For versions 1.4 and above, ensure only .current_executable + is used; the format of "self.stack" may change. + + + """ + try: + return self.stack[-1]["selectable"] + except IndexError as ie: + raise IndexError("Compiler does not have a stack entry") from ie + + @property + def prefetch(self): + return list(self.insert_prefetch) + list(self.update_prefetch) + + @util.memoized_property + def _global_attributes(self) -> Dict[Any, Any]: + return {} + + @util.memoized_instancemethod + def _init_cte_state(self) -> MutableMapping[CTE, str]: + """Initialize collections related to CTEs only if + a CTE is located, to save on the overhead of + these collections otherwise. + + """ + # collect CTEs to tack on top of a SELECT + # To store the query to print - Dict[cte, text_query] + ctes: MutableMapping[CTE, str] = util.OrderedDict() + self.ctes = ctes + + # Detect same CTE references - Dict[(level, name), cte] + # Level is required for supporting nesting + self.ctes_by_level_name = {} + + # To retrieve key/level in ctes_by_level_name - + # Dict[cte_reference, (level, cte_name, cte_opts)] + self.level_name_by_cte = {} + + self.ctes_recursive = False + + return ctes + + @contextlib.contextmanager + def _nested_result(self): + """special API to support the use case of 'nested result sets'""" + result_columns, ordered_columns = ( + self._result_columns, + self._ordered_columns, + ) + self._result_columns, self._ordered_columns = [], False + + try: + if self.stack: + entry = self.stack[-1] + entry["need_result_map_for_nested"] = True + else: + entry = None + yield self._result_columns, self._ordered_columns + finally: + if entry: + entry.pop("need_result_map_for_nested") + self._result_columns, self._ordered_columns = ( + result_columns, + ordered_columns, + ) + + def _process_positional(self): + assert not self.positiontup + assert self.state is CompilerState.STRING_APPLIED + assert not self._numeric_binds + + if self.dialect.paramstyle == "format": + placeholder = "%s" + else: + assert self.dialect.paramstyle == "qmark" + placeholder = "?" + + positions = [] + + def find_position(m: re.Match[str]) -> str: + normal_bind = m.group(1) + if normal_bind: + positions.append(normal_bind) + return placeholder + else: + # this a post-compile bind + positions.append(m.group(2)) + return m.group(0) + + self.string = re.sub( + self._positional_pattern, find_position, self.string + ) + + if self.escaped_bind_names: + reverse_escape = {v: k for k, v in self.escaped_bind_names.items()} + assert len(self.escaped_bind_names) == len(reverse_escape) + self.positiontup = [ + reverse_escape.get(name, name) for name in positions + ] + else: + self.positiontup = positions + + if self._insertmanyvalues: + positions = [] + + single_values_expr = re.sub( + self._positional_pattern, + find_position, + self._insertmanyvalues.single_values_expr, + ) + insert_crud_params = [ + ( + v[0], + v[1], + re.sub(self._positional_pattern, find_position, v[2]), + v[3], + ) + for v in self._insertmanyvalues.insert_crud_params + ] + + self._insertmanyvalues = self._insertmanyvalues._replace( + single_values_expr=single_values_expr, + insert_crud_params=insert_crud_params, + ) + + def _process_numeric(self): + assert self._numeric_binds + assert self.state is CompilerState.STRING_APPLIED + + num = 1 + param_pos: Dict[str, str] = {} + order: Iterable[str] + if self._insertmanyvalues and self._values_bindparam is not None: + # bindparams that are not in values are always placed first. + # this avoids the need of changing them when using executemany + # values () () + order = itertools.chain( + ( + name + for name in self.bind_names.values() + if name not in self._values_bindparam + ), + self.bind_names.values(), + ) + else: + order = self.bind_names.values() + + for bind_name in order: + if bind_name in param_pos: + continue + bind = self.binds[bind_name] + if ( + bind in self.post_compile_params + or bind in self.literal_execute_params + ): + # set to None to just mark the in positiontup, it will not + # be replaced below. + param_pos[bind_name] = None # type: ignore + else: + ph = f"{self._numeric_binds_identifier_char}{num}" + num += 1 + param_pos[bind_name] = ph + + self.next_numeric_pos = num + + self.positiontup = list(param_pos) + if self.escaped_bind_names: + len_before = len(param_pos) + param_pos = { + self.escaped_bind_names.get(name, name): pos + for name, pos in param_pos.items() + } + assert len(param_pos) == len_before + + # Can't use format here since % chars are not escaped. + self.string = self._pyformat_pattern.sub( + lambda m: param_pos[m.group(1)], self.string + ) + + if self._insertmanyvalues: + single_values_expr = ( + # format is ok here since single_values_expr includes only + # place-holders + self._insertmanyvalues.single_values_expr + % param_pos + ) + insert_crud_params = [ + (v[0], v[1], "%s", v[3]) + for v in self._insertmanyvalues.insert_crud_params + ] + + self._insertmanyvalues = self._insertmanyvalues._replace( + # This has the numbers (:1, :2) + single_values_expr=single_values_expr, + # The single binds are instead %s so they can be formatted + insert_crud_params=insert_crud_params, + ) + + @util.memoized_property + def _bind_processors( + self, + ) -> MutableMapping[ + str, Union[_BindProcessorType[Any], Sequence[_BindProcessorType[Any]]] + ]: + # mypy is not able to see the two value types as the above Union, + # it just sees "object". don't know how to resolve + return { + key: value # type: ignore + for key, value in ( + ( + self.bind_names[bindparam], + ( + bindparam.type._cached_bind_processor(self.dialect) + if not bindparam.type._is_tuple_type + else tuple( + elem_type._cached_bind_processor(self.dialect) + for elem_type in cast( + TupleType, bindparam.type + ).types + ) + ), + ) + for bindparam in self.bind_names + ) + if value is not None + } + + def is_subquery(self): + return len(self.stack) > 1 + + @property + def sql_compiler(self): + return self + + def construct_expanded_state( + self, + params: Optional[_CoreSingleExecuteParams] = None, + escape_names: bool = True, + ) -> ExpandedState: + """Return a new :class:`.ExpandedState` for a given parameter set. + + For queries that use "expanding" or other late-rendered parameters, + this method will provide for both the finalized SQL string as well + as the parameters that would be used for a particular parameter set. + + .. versionadded:: 2.0.0rc1 + + """ + parameters = self.construct_params( + params, + escape_names=escape_names, + _no_postcompile=True, + ) + return self._process_parameters_for_postcompile( + parameters, + ) + + def construct_params( + self, + params: Optional[_CoreSingleExecuteParams] = None, + extracted_parameters: Optional[Sequence[BindParameter[Any]]] = None, + escape_names: bool = True, + _group_number: Optional[int] = None, + _check: bool = True, + _no_postcompile: bool = False, + ) -> _MutableCoreSingleExecuteParams: + """return a dictionary of bind parameter keys and values""" + + if self._render_postcompile and not _no_postcompile: + assert self._post_compile_expanded_state is not None + if not params: + return dict(self._post_compile_expanded_state.parameters) + else: + raise exc.InvalidRequestError( + "can't construct new parameters when render_postcompile " + "is used; the statement is hard-linked to the original " + "parameters. Use construct_expanded_state to generate a " + "new statement and parameters." + ) + + has_escaped_names = escape_names and bool(self.escaped_bind_names) + + if extracted_parameters: + # related the bound parameters collected in the original cache key + # to those collected in the incoming cache key. They will not have + # matching names but they will line up positionally in the same + # way. The parameters present in self.bind_names may be clones of + # these original cache key params in the case of DML but the .key + # will be guaranteed to match. + if self.cache_key is None: + raise exc.CompileError( + "This compiled object has no original cache key; " + "can't pass extracted_parameters to construct_params" + ) + else: + orig_extracted = self.cache_key[1] + + ckbm_tuple = self._cache_key_bind_match + assert ckbm_tuple is not None + ckbm, _ = ckbm_tuple + resolved_extracted = { + bind: extracted + for b, extracted in zip(orig_extracted, extracted_parameters) + for bind in ckbm[b] + } + else: + resolved_extracted = None + + if params: + pd = {} + for bindparam, name in self.bind_names.items(): + escaped_name = ( + self.escaped_bind_names.get(name, name) + if has_escaped_names + else name + ) + + if bindparam.key in params: + pd[escaped_name] = params[bindparam.key] + elif name in params: + pd[escaped_name] = params[name] + + elif _check and bindparam.required: + if _group_number: + raise exc.InvalidRequestError( + "A value is required for bind parameter %r, " + "in parameter group %d" + % (bindparam.key, _group_number), + code="cd3x", + ) + else: + raise exc.InvalidRequestError( + "A value is required for bind parameter %r" + % bindparam.key, + code="cd3x", + ) + else: + if resolved_extracted: + value_param = resolved_extracted.get( + bindparam, bindparam + ) + else: + value_param = bindparam + + if bindparam.callable: + pd[escaped_name] = value_param.effective_value + else: + pd[escaped_name] = value_param.value + return pd + else: + pd = {} + for bindparam, name in self.bind_names.items(): + escaped_name = ( + self.escaped_bind_names.get(name, name) + if has_escaped_names + else name + ) + + if _check and bindparam.required: + if _group_number: + raise exc.InvalidRequestError( + "A value is required for bind parameter %r, " + "in parameter group %d" + % (bindparam.key, _group_number), + code="cd3x", + ) + else: + raise exc.InvalidRequestError( + "A value is required for bind parameter %r" + % bindparam.key, + code="cd3x", + ) + + if resolved_extracted: + value_param = resolved_extracted.get(bindparam, bindparam) + else: + value_param = bindparam + + if bindparam.callable: + pd[escaped_name] = value_param.effective_value + else: + pd[escaped_name] = value_param.value + + return pd + + @util.memoized_instancemethod + def _get_set_input_sizes_lookup(self): + dialect = self.dialect + + include_types = dialect.include_set_input_sizes + exclude_types = dialect.exclude_set_input_sizes + + dbapi = dialect.dbapi + + def lookup_type(typ): + dbtype = typ._unwrapped_dialect_impl(dialect).get_dbapi_type(dbapi) + + if ( + dbtype is not None + and (exclude_types is None or dbtype not in exclude_types) + and (include_types is None or dbtype in include_types) + ): + return dbtype + else: + return None + + inputsizes = {} + + literal_execute_params = self.literal_execute_params + + for bindparam in self.bind_names: + if bindparam in literal_execute_params: + continue + + if bindparam.type._is_tuple_type: + inputsizes[bindparam] = [ + lookup_type(typ) + for typ in cast(TupleType, bindparam.type).types + ] + else: + inputsizes[bindparam] = lookup_type(bindparam.type) + + return inputsizes + + @property + def params(self): + """Return the bind param dictionary embedded into this + compiled object, for those values that are present. + + .. seealso:: + + :ref:`faq_sql_expression_string` - includes a usage example for + debugging use cases. + + """ + return self.construct_params(_check=False) + + def _process_parameters_for_postcompile( + self, + parameters: _MutableCoreSingleExecuteParams, + _populate_self: bool = False, + ) -> ExpandedState: + """handle special post compile parameters. + + These include: + + * "expanding" parameters -typically IN tuples that are rendered + on a per-parameter basis for an otherwise fixed SQL statement string. + + * literal_binds compiled with the literal_execute flag. Used for + things like SQL Server "TOP N" where the driver does not accommodate + N as a bound parameter. + + """ + + expanded_parameters = {} + new_positiontup: Optional[List[str]] + + pre_expanded_string = self._pre_expanded_string + if pre_expanded_string is None: + pre_expanded_string = self.string + + if self.positional: + new_positiontup = [] + + pre_expanded_positiontup = self._pre_expanded_positiontup + if pre_expanded_positiontup is None: + pre_expanded_positiontup = self.positiontup + + else: + new_positiontup = pre_expanded_positiontup = None + + processors = self._bind_processors + single_processors = cast( + "Mapping[str, _BindProcessorType[Any]]", processors + ) + tuple_processors = cast( + "Mapping[str, Sequence[_BindProcessorType[Any]]]", processors + ) + + new_processors: Dict[str, _BindProcessorType[Any]] = {} + + replacement_expressions: Dict[str, Any] = {} + to_update_sets: Dict[str, Any] = {} + + # notes: + # *unescaped* parameter names in: + # self.bind_names, self.binds, self._bind_processors, self.positiontup + # + # *escaped* parameter names in: + # construct_params(), replacement_expressions + + numeric_positiontup: Optional[List[str]] = None + + if self.positional and pre_expanded_positiontup is not None: + names: Iterable[str] = pre_expanded_positiontup + if self._numeric_binds: + numeric_positiontup = [] + else: + names = self.bind_names.values() + + ebn = self.escaped_bind_names + for name in names: + escaped_name = ebn.get(name, name) if ebn else name + parameter = self.binds[name] + + if parameter in self.literal_execute_params: + if escaped_name not in replacement_expressions: + replacement_expressions[escaped_name] = ( + self.render_literal_bindparam( + parameter, + render_literal_value=parameters.pop(escaped_name), + ) + ) + continue + + if parameter in self.post_compile_params: + if escaped_name in replacement_expressions: + to_update = to_update_sets[escaped_name] + values = None + else: + # we are removing the parameter from parameters + # because it is a list value, which is not expected by + # TypeEngine objects that would otherwise be asked to + # process it. the single name is being replaced with + # individual numbered parameters for each value in the + # param. + # + # note we are also inserting *escaped* parameter names + # into the given dictionary. default dialect will + # use these param names directly as they will not be + # in the escaped_bind_names dictionary. + values = parameters.pop(name) + + leep_res = self._literal_execute_expanding_parameter( + escaped_name, parameter, values + ) + (to_update, replacement_expr) = leep_res + + to_update_sets[escaped_name] = to_update + replacement_expressions[escaped_name] = replacement_expr + + if not parameter.literal_execute: + parameters.update(to_update) + if parameter.type._is_tuple_type: + assert values is not None + new_processors.update( + ( + "%s_%s_%s" % (name, i, j), + tuple_processors[name][j - 1], + ) + for i, tuple_element in enumerate(values, 1) + for j, _ in enumerate(tuple_element, 1) + if name in tuple_processors + and tuple_processors[name][j - 1] is not None + ) + else: + new_processors.update( + (key, single_processors[name]) + for key, _ in to_update + if name in single_processors + ) + if numeric_positiontup is not None: + numeric_positiontup.extend( + name for name, _ in to_update + ) + elif new_positiontup is not None: + # to_update has escaped names, but that's ok since + # these are new names, that aren't in the + # escaped_bind_names dict. + new_positiontup.extend(name for name, _ in to_update) + expanded_parameters[name] = [ + expand_key for expand_key, _ in to_update + ] + elif new_positiontup is not None: + new_positiontup.append(name) + + def process_expanding(m): + key = m.group(1) + expr = replacement_expressions[key] + + # if POSTCOMPILE included a bind_expression, render that + # around each element + if m.group(2): + tok = m.group(2).split("~~") + be_left, be_right = tok[1], tok[3] + expr = ", ".join( + "%s%s%s" % (be_left, exp, be_right) + for exp in expr.split(", ") + ) + return expr + + statement = re.sub( + self._post_compile_pattern, process_expanding, pre_expanded_string + ) + + if numeric_positiontup is not None: + assert new_positiontup is not None + param_pos = { + key: f"{self._numeric_binds_identifier_char}{num}" + for num, key in enumerate( + numeric_positiontup, self.next_numeric_pos + ) + } + # Can't use format here since % chars are not escaped. + statement = self._pyformat_pattern.sub( + lambda m: param_pos[m.group(1)], statement + ) + new_positiontup.extend(numeric_positiontup) + + expanded_state = ExpandedState( + statement, + parameters, + new_processors, + new_positiontup, + expanded_parameters, + ) + + if _populate_self: + # this is for the "render_postcompile" flag, which is not + # otherwise used internally and is for end-user debugging and + # special use cases. + self._pre_expanded_string = pre_expanded_string + self._pre_expanded_positiontup = pre_expanded_positiontup + self.string = expanded_state.statement + self.positiontup = ( + list(expanded_state.positiontup or ()) + if self.positional + else None + ) + self._post_compile_expanded_state = expanded_state + + return expanded_state + + @util.preload_module("sqlalchemy.engine.cursor") + def _create_result_map(self): + """utility method used for unit tests only.""" + cursor = util.preloaded.engine_cursor + return cursor.CursorResultMetaData._create_description_match_map( + self._result_columns + ) + + # assigned by crud.py for insert/update statements + _get_bind_name_for_col: _BindNameForColProtocol + + @util.memoized_property + def _within_exec_param_key_getter(self) -> Callable[[Any], str]: + getter = self._get_bind_name_for_col + return getter + + @util.memoized_property + @util.preload_module("sqlalchemy.engine.result") + def _inserted_primary_key_from_lastrowid_getter(self): + result = util.preloaded.engine_result + + param_key_getter = self._within_exec_param_key_getter + + assert self.compile_state is not None + statement = self.compile_state.statement + + if TYPE_CHECKING: + assert isinstance(statement, Insert) + + table = statement.table + + getters = [ + (operator.methodcaller("get", param_key_getter(col), None), col) + for col in table.primary_key + ] + + autoinc_getter = None + autoinc_col = table._autoincrement_column + if autoinc_col is not None: + # apply type post processors to the lastrowid + lastrowid_processor = autoinc_col.type._cached_result_processor( + self.dialect, None + ) + autoinc_key = param_key_getter(autoinc_col) + + # if a bind value is present for the autoincrement column + # in the parameters, we need to do the logic dictated by + # #7998; honor a non-None user-passed parameter over lastrowid. + # previously in the 1.4 series we weren't fetching lastrowid + # at all if the key were present in the parameters + if autoinc_key in self.binds: + + def _autoinc_getter(lastrowid, parameters): + param_value = parameters.get(autoinc_key, lastrowid) + if param_value is not None: + # they supplied non-None parameter, use that. + # SQLite at least is observed to return the wrong + # cursor.lastrowid for INSERT..ON CONFLICT so it + # can't be used in all cases + return param_value + else: + # use lastrowid + return lastrowid + + # work around mypy https://github.com/python/mypy/issues/14027 + autoinc_getter = _autoinc_getter + + else: + lastrowid_processor = None + + row_fn = result.result_tuple([col.key for col in table.primary_key]) + + def get(lastrowid, parameters): + """given cursor.lastrowid value and the parameters used for INSERT, + return a "row" that represents the primary key, either by + using the "lastrowid" or by extracting values from the parameters + that were sent along with the INSERT. + + """ + if lastrowid_processor is not None: + lastrowid = lastrowid_processor(lastrowid) + + if lastrowid is None: + return row_fn(getter(parameters) for getter, col in getters) + else: + return row_fn( + ( + ( + autoinc_getter(lastrowid, parameters) + if autoinc_getter is not None + else lastrowid + ) + if col is autoinc_col + else getter(parameters) + ) + for getter, col in getters + ) + + return get + + @util.memoized_property + @util.preload_module("sqlalchemy.engine.result") + def _inserted_primary_key_from_returning_getter(self): + if typing.TYPE_CHECKING: + from ..engine import result + else: + result = util.preloaded.engine_result + + assert self.compile_state is not None + statement = self.compile_state.statement + + if TYPE_CHECKING: + assert isinstance(statement, Insert) + + param_key_getter = self._within_exec_param_key_getter + table = statement.table + + returning = self.implicit_returning + assert returning is not None + ret = {col: idx for idx, col in enumerate(returning)} + + getters = cast( + "List[Tuple[Callable[[Any], Any], bool]]", + [ + ( + (operator.itemgetter(ret[col]), True) + if col in ret + else ( + operator.methodcaller( + "get", param_key_getter(col), None + ), + False, + ) + ) + for col in table.primary_key + ], + ) + + row_fn = result.result_tuple([col.key for col in table.primary_key]) + + def get(row, parameters): + return row_fn( + getter(row) if use_row else getter(parameters) + for getter, use_row in getters + ) + + return get + + def default_from(self): + """Called when a SELECT statement has no froms, and no FROM clause is + to be appended. + + Gives Oracle a chance to tack on a ``FROM DUAL`` to the string output. + + """ + return "" + + def visit_override_binds(self, override_binds, **kw): + """SQL compile the nested element of an _OverrideBinds with + bindparams swapped out. + + The _OverrideBinds is not normally expected to be compiled; it + is meant to be used when an already cached statement is to be used, + the compilation was already performed, and only the bound params should + be swapped in at execution time. + + However, there are test cases that exericise this object, and + additionally the ORM subquery loader is known to feed in expressions + which include this construct into new queries (discovered in #11173), + so it has to do the right thing at compile time as well. + + """ + + # get SQL text first + sqltext = override_binds.element._compiler_dispatch(self, **kw) + + # for a test compile that is not for caching, change binds after the + # fact. note that we don't try to + # swap the bindparam as we compile, because our element may be + # elsewhere in the statement already (e.g. a subquery or perhaps a + # CTE) and was already visited / compiled. See + # test_relationship_criteria.py -> + # test_selectinload_local_criteria_subquery + for k in override_binds.translate: + if k not in self.binds: + continue + bp = self.binds[k] + + # so this would work, just change the value of bp in place. + # but we dont want to mutate things outside. + # bp.value = override_binds.translate[bp.key] + # continue + + # instead, need to replace bp with new_bp or otherwise accommodate + # in all internal collections + new_bp = bp._with_value( + override_binds.translate[bp.key], + maintain_key=True, + required=False, + ) + + name = self.bind_names[bp] + self.binds[k] = self.binds[name] = new_bp + self.bind_names[new_bp] = name + self.bind_names.pop(bp, None) + + if bp in self.post_compile_params: + self.post_compile_params |= {new_bp} + if bp in self.literal_execute_params: + self.literal_execute_params |= {new_bp} + + ckbm_tuple = self._cache_key_bind_match + if ckbm_tuple: + ckbm, cksm = ckbm_tuple + for bp in bp._cloned_set: + if bp.key in cksm: + cb = cksm[bp.key] + ckbm[cb].append(new_bp) + + return sqltext + + def visit_grouping(self, grouping, asfrom=False, **kwargs): + return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")" + + def visit_select_statement_grouping(self, grouping, **kwargs): + return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")" + + def visit_label_reference( + self, element, within_columns_clause=False, **kwargs + ): + if self.stack and self.dialect.supports_simple_order_by_label: + try: + compile_state = cast( + "Union[SelectState, CompoundSelectState]", + self.stack[-1]["compile_state"], + ) + except KeyError as ke: + raise exc.CompileError( + "Can't resolve label reference for ORDER BY / " + "GROUP BY / DISTINCT etc." + ) from ke + + ( + with_cols, + only_froms, + only_cols, + ) = compile_state._label_resolve_dict + if within_columns_clause: + resolve_dict = only_froms + else: + resolve_dict = only_cols + + # this can be None in the case that a _label_reference() + # were subject to a replacement operation, in which case + # the replacement of the Label element may have changed + # to something else like a ColumnClause expression. + order_by_elem = element.element._order_by_label_element + + if ( + order_by_elem is not None + and order_by_elem.name in resolve_dict + and order_by_elem.shares_lineage( + resolve_dict[order_by_elem.name] + ) + ): + kwargs["render_label_as_label"] = ( + element.element._order_by_label_element + ) + return self.process( + element.element, + within_columns_clause=within_columns_clause, + **kwargs, + ) + + def visit_textual_label_reference( + self, element, within_columns_clause=False, **kwargs + ): + if not self.stack: + # compiling the element outside of the context of a SELECT + return self.process(element._text_clause) + + try: + compile_state = cast( + "Union[SelectState, CompoundSelectState]", + self.stack[-1]["compile_state"], + ) + except KeyError as ke: + coercions._no_text_coercion( + element.element, + extra=( + "Can't resolve label reference for ORDER BY / " + "GROUP BY / DISTINCT etc." + ), + exc_cls=exc.CompileError, + err=ke, + ) + + with_cols, only_froms, only_cols = compile_state._label_resolve_dict + try: + if within_columns_clause: + col = only_froms[element.element] + else: + col = with_cols[element.element] + except KeyError as err: + coercions._no_text_coercion( + element.element, + extra=( + "Can't resolve label reference for ORDER BY / " + "GROUP BY / DISTINCT etc." + ), + exc_cls=exc.CompileError, + err=err, + ) + else: + kwargs["render_label_as_label"] = col + return self.process( + col, within_columns_clause=within_columns_clause, **kwargs + ) + + def visit_label( + self, + label, + add_to_result_map=None, + within_label_clause=False, + within_columns_clause=False, + render_label_as_label=None, + result_map_targets=(), + **kw, + ): + # only render labels within the columns clause + # or ORDER BY clause of a select. dialect-specific compilers + # can modify this behavior. + render_label_with_as = ( + within_columns_clause and not within_label_clause + ) + render_label_only = render_label_as_label is label + + if render_label_only or render_label_with_as: + if isinstance(label.name, elements._truncated_label): + labelname = self._truncated_identifier("colident", label.name) + else: + labelname = label.name + + if render_label_with_as: + if add_to_result_map is not None: + add_to_result_map( + labelname, + label.name, + (label, labelname) + label._alt_names + result_map_targets, + label.type, + ) + return ( + label.element._compiler_dispatch( + self, + within_columns_clause=True, + within_label_clause=True, + **kw, + ) + + OPERATORS[operators.as_] + + self.preparer.format_label(label, labelname) + ) + elif render_label_only: + return self.preparer.format_label(label, labelname) + else: + return label.element._compiler_dispatch( + self, within_columns_clause=False, **kw + ) + + def _fallback_column_name(self, column): + raise exc.CompileError( + "Cannot compile Column object until its 'name' is assigned." + ) + + def visit_lambda_element(self, element, **kw): + sql_element = element._resolved + return self.process(sql_element, **kw) + + def visit_column( + self, + column: ColumnClause[Any], + add_to_result_map: Optional[_ResultMapAppender] = None, + include_table: bool = True, + result_map_targets: Tuple[Any, ...] = (), + ambiguous_table_name_map: Optional[_AmbiguousTableNameMap] = None, + **kwargs: Any, + ) -> str: + name = orig_name = column.name + if name is None: + name = self._fallback_column_name(column) + + is_literal = column.is_literal + if not is_literal and isinstance(name, elements._truncated_label): + name = self._truncated_identifier("colident", name) + + if add_to_result_map is not None: + targets = (column, name, column.key) + result_map_targets + if column._tq_label: + targets += (column._tq_label,) + + add_to_result_map(name, orig_name, targets, column.type) + + if is_literal: + # note we are not currently accommodating for + # literal_column(quoted_name('ident', True)) here + name = self.escape_literal_column(name) + else: + name = self.preparer.quote(name) + table = column.table + if table is None or not include_table or not table.named_with_column: + return name + else: + effective_schema = self.preparer.schema_for_object(table) + + if effective_schema: + schema_prefix = ( + self.preparer.quote_schema(effective_schema) + "." + ) + else: + schema_prefix = "" + + if TYPE_CHECKING: + assert isinstance(table, NamedFromClause) + tablename = table.name + + if ( + not effective_schema + and ambiguous_table_name_map + and tablename in ambiguous_table_name_map + ): + tablename = ambiguous_table_name_map[tablename] + + if isinstance(tablename, elements._truncated_label): + tablename = self._truncated_identifier("alias", tablename) + + return schema_prefix + self.preparer.quote(tablename) + "." + name + + def visit_collation(self, element, **kw): + return self.preparer.format_collation(element.collation) + + def visit_fromclause(self, fromclause, **kwargs): + return fromclause.name + + def visit_index(self, index, **kwargs): + return index.name + + def visit_typeclause(self, typeclause, **kw): + kw["type_expression"] = typeclause + kw["identifier_preparer"] = self.preparer + return self.dialect.type_compiler_instance.process( + typeclause.type, **kw + ) + + def post_process_text(self, text): + if self.preparer._double_percents: + text = text.replace("%", "%%") + return text + + def escape_literal_column(self, text): + if self.preparer._double_percents: + text = text.replace("%", "%%") + return text + + def visit_textclause(self, textclause, add_to_result_map=None, **kw): + def do_bindparam(m): + name = m.group(1) + if name in textclause._bindparams: + return self.process(textclause._bindparams[name], **kw) + else: + return self.bindparam_string(name, **kw) + + if not self.stack: + self.isplaintext = True + + if add_to_result_map: + # text() object is present in the columns clause of a + # select(). Add a no-name entry to the result map so that + # row[text()] produces a result + add_to_result_map(None, None, (textclause,), sqltypes.NULLTYPE) + + # un-escape any \:params + return BIND_PARAMS_ESC.sub( + lambda m: m.group(1), + BIND_PARAMS.sub( + do_bindparam, self.post_process_text(textclause.text) + ), + ) + + def visit_textual_select( + self, taf, compound_index=None, asfrom=False, **kw + ): + toplevel = not self.stack + entry = self._default_stack_entry if toplevel else self.stack[-1] + + new_entry: _CompilerStackEntry = { + "correlate_froms": set(), + "asfrom_froms": set(), + "selectable": taf, + } + self.stack.append(new_entry) + + if taf._independent_ctes: + self._dispatch_independent_ctes(taf, kw) + + populate_result_map = ( + toplevel + or ( + compound_index == 0 + and entry.get("need_result_map_for_compound", False) + ) + or entry.get("need_result_map_for_nested", False) + ) + + if populate_result_map: + self._ordered_columns = self._textual_ordered_columns = ( + taf.positional + ) + + # enable looser result column matching when the SQL text links to + # Column objects by name only + self._loose_column_name_matching = not taf.positional and bool( + taf.column_args + ) + + for c in taf.column_args: + self.process( + c, + within_columns_clause=True, + add_to_result_map=self._add_to_result_map, + ) + + text = self.process(taf.element, **kw) + if self.ctes: + nesting_level = len(self.stack) if not toplevel else None + text = self._render_cte_clause(nesting_level=nesting_level) + text + + self.stack.pop(-1) + + return text + + def visit_null(self, expr, **kw): + return "NULL" + + def visit_true(self, expr, **kw): + if self.dialect.supports_native_boolean: + return "true" + else: + return "1" + + def visit_false(self, expr, **kw): + if self.dialect.supports_native_boolean: + return "false" + else: + return "0" + + def _generate_delimited_list(self, elements, separator, **kw): + return separator.join( + s + for s in (c._compiler_dispatch(self, **kw) for c in elements) + if s + ) + + def _generate_delimited_and_list(self, clauses, **kw): + lcc, clauses = elements.BooleanClauseList._process_clauses_for_boolean( + operators.and_, + elements.True_._singleton, + elements.False_._singleton, + clauses, + ) + if lcc == 1: + return clauses[0]._compiler_dispatch(self, **kw) + else: + separator = OPERATORS[operators.and_] + return separator.join( + s + for s in (c._compiler_dispatch(self, **kw) for c in clauses) + if s + ) + + def visit_tuple(self, clauselist, **kw): + return "(%s)" % self.visit_clauselist(clauselist, **kw) + + def visit_clauselist(self, clauselist, **kw): + sep = clauselist.operator + if sep is None: + sep = " " + else: + sep = OPERATORS[clauselist.operator] + + return self._generate_delimited_list(clauselist.clauses, sep, **kw) + + def visit_expression_clauselist(self, clauselist, **kw): + operator_ = clauselist.operator + + disp = self._get_operator_dispatch( + operator_, "expression_clauselist", None + ) + if disp: + return disp(clauselist, operator_, **kw) + + try: + opstring = OPERATORS[operator_] + except KeyError as err: + raise exc.UnsupportedCompilationError(self, operator_) from err + else: + kw["_in_operator_expression"] = True + return self._generate_delimited_list( + clauselist.clauses, opstring, **kw + ) + + def visit_case(self, clause, **kwargs): + x = "CASE " + if clause.value is not None: + x += clause.value._compiler_dispatch(self, **kwargs) + " " + for cond, result in clause.whens: + x += ( + "WHEN " + + cond._compiler_dispatch(self, **kwargs) + + " THEN " + + result._compiler_dispatch(self, **kwargs) + + " " + ) + if clause.else_ is not None: + x += ( + "ELSE " + clause.else_._compiler_dispatch(self, **kwargs) + " " + ) + x += "END" + return x + + def visit_type_coerce(self, type_coerce, **kw): + return type_coerce.typed_expression._compiler_dispatch(self, **kw) + + def visit_cast(self, cast, **kwargs): + type_clause = cast.typeclause._compiler_dispatch(self, **kwargs) + match = re.match("(.*)( COLLATE .*)", type_clause) + return "CAST(%s AS %s)%s" % ( + cast.clause._compiler_dispatch(self, **kwargs), + match.group(1) if match else type_clause, + match.group(2) if match else "", + ) + + def _format_frame_clause(self, range_, **kw): + return "%s AND %s" % ( + ( + "UNBOUNDED PRECEDING" + if range_[0] is elements.RANGE_UNBOUNDED + else ( + "CURRENT ROW" + if range_[0] is elements.RANGE_CURRENT + else ( + "%s PRECEDING" + % ( + self.process( + elements.literal(abs(range_[0])), **kw + ), + ) + if range_[0] < 0 + else "%s FOLLOWING" + % (self.process(elements.literal(range_[0]), **kw),) + ) + ) + ), + ( + "UNBOUNDED FOLLOWING" + if range_[1] is elements.RANGE_UNBOUNDED + else ( + "CURRENT ROW" + if range_[1] is elements.RANGE_CURRENT + else ( + "%s PRECEDING" + % ( + self.process( + elements.literal(abs(range_[1])), **kw + ), + ) + if range_[1] < 0 + else "%s FOLLOWING" + % (self.process(elements.literal(range_[1]), **kw),) + ) + ) + ), + ) + + def visit_over(self, over, **kwargs): + text = over.element._compiler_dispatch(self, **kwargs) + if over.range_: + range_ = "RANGE BETWEEN %s" % self._format_frame_clause( + over.range_, **kwargs + ) + elif over.rows: + range_ = "ROWS BETWEEN %s" % self._format_frame_clause( + over.rows, **kwargs + ) + else: + range_ = None + + return "%s OVER (%s)" % ( + text, + " ".join( + [ + "%s BY %s" + % (word, clause._compiler_dispatch(self, **kwargs)) + for word, clause in ( + ("PARTITION", over.partition_by), + ("ORDER", over.order_by), + ) + if clause is not None and len(clause) + ] + + ([range_] if range_ else []) + ), + ) + + def visit_withingroup(self, withingroup, **kwargs): + return "%s WITHIN GROUP (ORDER BY %s)" % ( + withingroup.element._compiler_dispatch(self, **kwargs), + withingroup.order_by._compiler_dispatch(self, **kwargs), + ) + + def visit_funcfilter(self, funcfilter, **kwargs): + return "%s FILTER (WHERE %s)" % ( + funcfilter.func._compiler_dispatch(self, **kwargs), + funcfilter.criterion._compiler_dispatch(self, **kwargs), + ) + + def visit_extract(self, extract, **kwargs): + field = self.extract_map.get(extract.field, extract.field) + return "EXTRACT(%s FROM %s)" % ( + field, + extract.expr._compiler_dispatch(self, **kwargs), + ) + + def visit_scalar_function_column(self, element, **kw): + compiled_fn = self.visit_function(element.fn, **kw) + compiled_col = self.visit_column(element, **kw) + return "(%s).%s" % (compiled_fn, compiled_col) + + def visit_function( + self, + func: Function[Any], + add_to_result_map: Optional[_ResultMapAppender] = None, + **kwargs: Any, + ) -> str: + if add_to_result_map is not None: + add_to_result_map(func.name, func.name, (), func.type) + + disp = getattr(self, "visit_%s_func" % func.name.lower(), None) + + text: str + + if disp: + text = disp(func, **kwargs) + else: + name = FUNCTIONS.get(func._deannotate().__class__, None) + if name: + if func._has_args: + name += "%(expr)s" + else: + name = func.name + name = ( + self.preparer.quote(name) + if self.preparer._requires_quotes_illegal_chars(name) + or isinstance(name, elements.quoted_name) + else name + ) + name = name + "%(expr)s" + text = ".".join( + [ + ( + self.preparer.quote(tok) + if self.preparer._requires_quotes_illegal_chars(tok) + or isinstance(name, elements.quoted_name) + else tok + ) + for tok in func.packagenames + ] + + [name] + ) % {"expr": self.function_argspec(func, **kwargs)} + + if func._with_ordinality: + text += " WITH ORDINALITY" + return text + + def visit_next_value_func(self, next_value, **kw): + return self.visit_sequence(next_value.sequence) + + def visit_sequence(self, sequence, **kw): + raise NotImplementedError( + "Dialect '%s' does not support sequence increments." + % self.dialect.name + ) + + def function_argspec(self, func, **kwargs): + return func.clause_expr._compiler_dispatch(self, **kwargs) + + def visit_compound_select( + self, cs, asfrom=False, compound_index=None, **kwargs + ): + toplevel = not self.stack + + compile_state = cs._compile_state_factory(cs, self, **kwargs) + + if toplevel and not self.compile_state: + self.compile_state = compile_state + + compound_stmt = compile_state.statement + + entry = self._default_stack_entry if toplevel else self.stack[-1] + need_result_map = toplevel or ( + not compound_index + and entry.get("need_result_map_for_compound", False) + ) + + # indicates there is already a CompoundSelect in play + if compound_index == 0: + entry["select_0"] = cs + + self.stack.append( + { + "correlate_froms": entry["correlate_froms"], + "asfrom_froms": entry["asfrom_froms"], + "selectable": cs, + "compile_state": compile_state, + "need_result_map_for_compound": need_result_map, + } + ) + + if compound_stmt._independent_ctes: + self._dispatch_independent_ctes(compound_stmt, kwargs) + + keyword = self.compound_keywords[cs.keyword] + + text = (" " + keyword + " ").join( + ( + c._compiler_dispatch( + self, asfrom=asfrom, compound_index=i, **kwargs + ) + for i, c in enumerate(cs.selects) + ) + ) + + kwargs["include_table"] = False + text += self.group_by_clause(cs, **dict(asfrom=asfrom, **kwargs)) + text += self.order_by_clause(cs, **kwargs) + if cs._has_row_limiting_clause: + text += self._row_limit_clause(cs, **kwargs) + + if self.ctes: + nesting_level = len(self.stack) if not toplevel else None + text = ( + self._render_cte_clause( + nesting_level=nesting_level, + include_following_stack=True, + ) + + text + ) + + self.stack.pop(-1) + return text + + def _row_limit_clause(self, cs, **kwargs): + if cs._fetch_clause is not None: + return self.fetch_clause(cs, **kwargs) + else: + return self.limit_clause(cs, **kwargs) + + def _get_operator_dispatch(self, operator_, qualifier1, qualifier2): + attrname = "visit_%s_%s%s" % ( + operator_.__name__, + qualifier1, + "_" + qualifier2 if qualifier2 else "", + ) + return getattr(self, attrname, None) + + def visit_unary( + self, unary, add_to_result_map=None, result_map_targets=(), **kw + ): + if add_to_result_map is not None: + result_map_targets += (unary,) + kw["add_to_result_map"] = add_to_result_map + kw["result_map_targets"] = result_map_targets + + if unary.operator: + if unary.modifier: + raise exc.CompileError( + "Unary expression does not support operator " + "and modifier simultaneously" + ) + disp = self._get_operator_dispatch( + unary.operator, "unary", "operator" + ) + if disp: + return disp(unary, unary.operator, **kw) + else: + return self._generate_generic_unary_operator( + unary, OPERATORS[unary.operator], **kw + ) + elif unary.modifier: + disp = self._get_operator_dispatch( + unary.modifier, "unary", "modifier" + ) + if disp: + return disp(unary, unary.modifier, **kw) + else: + return self._generate_generic_unary_modifier( + unary, OPERATORS[unary.modifier], **kw + ) + else: + raise exc.CompileError( + "Unary expression has no operator or modifier" + ) + + def visit_truediv_binary(self, binary, operator, **kw): + if self.dialect.div_is_floordiv: + return ( + self.process(binary.left, **kw) + + " / " + # TODO: would need a fast cast again here, + # unless we want to use an implicit cast like "+ 0.0" + + self.process( + elements.Cast( + binary.right, + ( + binary.right.type + if binary.right.type._type_affinity + is sqltypes.Numeric + else sqltypes.Numeric() + ), + ), + **kw, + ) + ) + else: + return ( + self.process(binary.left, **kw) + + " / " + + self.process(binary.right, **kw) + ) + + def visit_floordiv_binary(self, binary, operator, **kw): + if ( + self.dialect.div_is_floordiv + and binary.right.type._type_affinity is sqltypes.Integer + ): + return ( + self.process(binary.left, **kw) + + " / " + + self.process(binary.right, **kw) + ) + else: + return "FLOOR(%s)" % ( + self.process(binary.left, **kw) + + " / " + + self.process(binary.right, **kw) + ) + + def visit_is_true_unary_operator(self, element, operator, **kw): + if ( + element._is_implicitly_boolean + or self.dialect.supports_native_boolean + ): + return self.process(element.element, **kw) + else: + return "%s = 1" % self.process(element.element, **kw) + + def visit_is_false_unary_operator(self, element, operator, **kw): + if ( + element._is_implicitly_boolean + or self.dialect.supports_native_boolean + ): + return "NOT %s" % self.process(element.element, **kw) + else: + return "%s = 0" % self.process(element.element, **kw) + + def visit_not_match_op_binary(self, binary, operator, **kw): + return "NOT %s" % self.visit_binary( + binary, override_operator=operators.match_op + ) + + def visit_not_in_op_binary(self, binary, operator, **kw): + # The brackets are required in the NOT IN operation because the empty + # case is handled using the form "(col NOT IN (null) OR 1 = 1)". + # The presence of the OR makes the brackets required. + return "(%s)" % self._generate_generic_binary( + binary, OPERATORS[operator], **kw + ) + + def visit_empty_set_op_expr(self, type_, expand_op, **kw): + if expand_op is operators.not_in_op: + if len(type_) > 1: + return "(%s)) OR (1 = 1" % ( + ", ".join("NULL" for element in type_) + ) + else: + return "NULL) OR (1 = 1" + elif expand_op is operators.in_op: + if len(type_) > 1: + return "(%s)) AND (1 != 1" % ( + ", ".join("NULL" for element in type_) + ) + else: + return "NULL) AND (1 != 1" + else: + return self.visit_empty_set_expr(type_) + + def visit_empty_set_expr(self, element_types, **kw): + raise NotImplementedError( + "Dialect '%s' does not support empty set expression." + % self.dialect.name + ) + + def _literal_execute_expanding_parameter_literal_binds( + self, parameter, values, bind_expression_template=None + ): + typ_dialect_impl = parameter.type._unwrapped_dialect_impl(self.dialect) + + if not values: + # empty IN expression. note we don't need to use + # bind_expression_template here because there are no + # expressions to render. + + if typ_dialect_impl._is_tuple_type: + replacement_expression = ( + "VALUES " if self.dialect.tuple_in_values else "" + ) + self.visit_empty_set_op_expr( + parameter.type.types, parameter.expand_op + ) + + else: + replacement_expression = self.visit_empty_set_op_expr( + [parameter.type], parameter.expand_op + ) + + elif typ_dialect_impl._is_tuple_type or ( + typ_dialect_impl._isnull + and isinstance(values[0], collections_abc.Sequence) + and not isinstance(values[0], (str, bytes)) + ): + if typ_dialect_impl._has_bind_expression: + raise NotImplementedError( + "bind_expression() on TupleType not supported with " + "literal_binds" + ) + + replacement_expression = ( + "VALUES " if self.dialect.tuple_in_values else "" + ) + ", ".join( + "(%s)" + % ( + ", ".join( + self.render_literal_value(value, param_type) + for value, param_type in zip( + tuple_element, parameter.type.types + ) + ) + ) + for i, tuple_element in enumerate(values) + ) + else: + if bind_expression_template: + post_compile_pattern = self._post_compile_pattern + m = post_compile_pattern.search(bind_expression_template) + assert m and m.group( + 2 + ), "unexpected format for expanding parameter" + + tok = m.group(2).split("~~") + be_left, be_right = tok[1], tok[3] + replacement_expression = ", ".join( + "%s%s%s" + % ( + be_left, + self.render_literal_value(value, parameter.type), + be_right, + ) + for value in values + ) + else: + replacement_expression = ", ".join( + self.render_literal_value(value, parameter.type) + for value in values + ) + + return (), replacement_expression + + def _literal_execute_expanding_parameter(self, name, parameter, values): + if parameter.literal_execute: + return self._literal_execute_expanding_parameter_literal_binds( + parameter, values + ) + + dialect = self.dialect + typ_dialect_impl = parameter.type._unwrapped_dialect_impl(dialect) + + if self._numeric_binds: + bind_template = self.compilation_bindtemplate + else: + bind_template = self.bindtemplate + + if ( + self.dialect._bind_typing_render_casts + and typ_dialect_impl.render_bind_cast + ): + + def _render_bindtemplate(name): + return self.render_bind_cast( + parameter.type, + typ_dialect_impl, + bind_template % {"name": name}, + ) + + else: + + def _render_bindtemplate(name): + return bind_template % {"name": name} + + if not values: + to_update = [] + if typ_dialect_impl._is_tuple_type: + replacement_expression = self.visit_empty_set_op_expr( + parameter.type.types, parameter.expand_op + ) + else: + replacement_expression = self.visit_empty_set_op_expr( + [parameter.type], parameter.expand_op + ) + + elif typ_dialect_impl._is_tuple_type or ( + typ_dialect_impl._isnull + and isinstance(values[0], collections_abc.Sequence) + and not isinstance(values[0], (str, bytes)) + ): + assert not typ_dialect_impl._is_array + to_update = [ + ("%s_%s_%s" % (name, i, j), value) + for i, tuple_element in enumerate(values, 1) + for j, value in enumerate(tuple_element, 1) + ] + + replacement_expression = ( + "VALUES " if dialect.tuple_in_values else "" + ) + ", ".join( + "(%s)" + % ( + ", ".join( + _render_bindtemplate( + to_update[i * len(tuple_element) + j][0] + ) + for j, value in enumerate(tuple_element) + ) + ) + for i, tuple_element in enumerate(values) + ) + else: + to_update = [ + ("%s_%s" % (name, i), value) + for i, value in enumerate(values, 1) + ] + replacement_expression = ", ".join( + _render_bindtemplate(key) for key, value in to_update + ) + + return to_update, replacement_expression + + def visit_binary( + self, + binary, + override_operator=None, + eager_grouping=False, + from_linter=None, + lateral_from_linter=None, + **kw, + ): + if from_linter and operators.is_comparison(binary.operator): + if lateral_from_linter is not None: + enclosing_lateral = kw["enclosing_lateral"] + lateral_from_linter.edges.update( + itertools.product( + _de_clone( + binary.left._from_objects + [enclosing_lateral] + ), + _de_clone( + binary.right._from_objects + [enclosing_lateral] + ), + ) + ) + else: + from_linter.edges.update( + itertools.product( + _de_clone(binary.left._from_objects), + _de_clone(binary.right._from_objects), + ) + ) + + # don't allow "? = ?" to render + if ( + self.ansi_bind_rules + and isinstance(binary.left, elements.BindParameter) + and isinstance(binary.right, elements.BindParameter) + ): + kw["literal_execute"] = True + + operator_ = override_operator or binary.operator + disp = self._get_operator_dispatch(operator_, "binary", None) + if disp: + return disp(binary, operator_, **kw) + else: + try: + opstring = OPERATORS[operator_] + except KeyError as err: + raise exc.UnsupportedCompilationError(self, operator_) from err + else: + return self._generate_generic_binary( + binary, + opstring, + from_linter=from_linter, + lateral_from_linter=lateral_from_linter, + **kw, + ) + + def visit_function_as_comparison_op_binary(self, element, operator, **kw): + return self.process(element.sql_function, **kw) + + def visit_mod_binary(self, binary, operator, **kw): + if self.preparer._double_percents: + return ( + self.process(binary.left, **kw) + + " %% " + + self.process(binary.right, **kw) + ) + else: + return ( + self.process(binary.left, **kw) + + " % " + + self.process(binary.right, **kw) + ) + + def visit_custom_op_binary(self, element, operator, **kw): + kw["eager_grouping"] = operator.eager_grouping + return self._generate_generic_binary( + element, + " " + self.escape_literal_column(operator.opstring) + " ", + **kw, + ) + + def visit_custom_op_unary_operator(self, element, operator, **kw): + return self._generate_generic_unary_operator( + element, self.escape_literal_column(operator.opstring) + " ", **kw + ) + + def visit_custom_op_unary_modifier(self, element, operator, **kw): + return self._generate_generic_unary_modifier( + element, " " + self.escape_literal_column(operator.opstring), **kw + ) + + def _generate_generic_binary( + self, binary, opstring, eager_grouping=False, **kw + ): + _in_operator_expression = kw.get("_in_operator_expression", False) + + kw["_in_operator_expression"] = True + kw["_binary_op"] = binary.operator + text = ( + binary.left._compiler_dispatch( + self, eager_grouping=eager_grouping, **kw + ) + + opstring + + binary.right._compiler_dispatch( + self, eager_grouping=eager_grouping, **kw + ) + ) + + if _in_operator_expression and eager_grouping: + text = "(%s)" % text + return text + + def _generate_generic_unary_operator(self, unary, opstring, **kw): + return opstring + unary.element._compiler_dispatch(self, **kw) + + def _generate_generic_unary_modifier(self, unary, opstring, **kw): + return unary.element._compiler_dispatch(self, **kw) + opstring + + @util.memoized_property + def _like_percent_literal(self): + return elements.literal_column("'%'", type_=sqltypes.STRINGTYPE) + + def visit_ilike_case_insensitive_operand(self, element, **kw): + return f"lower({element.element._compiler_dispatch(self, **kw)})" + + def visit_contains_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent.concat(binary.right).concat(percent) + return self.visit_like_op_binary(binary, operator, **kw) + + def visit_not_contains_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent.concat(binary.right).concat(percent) + return self.visit_not_like_op_binary(binary, operator, **kw) + + def visit_icontains_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.left = ilike_case_insensitive(binary.left) + binary.right = percent.concat( + ilike_case_insensitive(binary.right) + ).concat(percent) + return self.visit_ilike_op_binary(binary, operator, **kw) + + def visit_not_icontains_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.left = ilike_case_insensitive(binary.left) + binary.right = percent.concat( + ilike_case_insensitive(binary.right) + ).concat(percent) + return self.visit_not_ilike_op_binary(binary, operator, **kw) + + def visit_startswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent._rconcat(binary.right) + return self.visit_like_op_binary(binary, operator, **kw) + + def visit_not_startswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent._rconcat(binary.right) + return self.visit_not_like_op_binary(binary, operator, **kw) + + def visit_istartswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.left = ilike_case_insensitive(binary.left) + binary.right = percent._rconcat(ilike_case_insensitive(binary.right)) + return self.visit_ilike_op_binary(binary, operator, **kw) + + def visit_not_istartswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.left = ilike_case_insensitive(binary.left) + binary.right = percent._rconcat(ilike_case_insensitive(binary.right)) + return self.visit_not_ilike_op_binary(binary, operator, **kw) + + def visit_endswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent.concat(binary.right) + return self.visit_like_op_binary(binary, operator, **kw) + + def visit_not_endswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent.concat(binary.right) + return self.visit_not_like_op_binary(binary, operator, **kw) + + def visit_iendswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.left = ilike_case_insensitive(binary.left) + binary.right = percent.concat(ilike_case_insensitive(binary.right)) + return self.visit_ilike_op_binary(binary, operator, **kw) + + def visit_not_iendswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.left = ilike_case_insensitive(binary.left) + binary.right = percent.concat(ilike_case_insensitive(binary.right)) + return self.visit_not_ilike_op_binary(binary, operator, **kw) + + def visit_like_op_binary(self, binary, operator, **kw): + escape = binary.modifiers.get("escape", None) + + return "%s LIKE %s" % ( + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw), + ) + ( + " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE) + if escape is not None + else "" + ) + + def visit_not_like_op_binary(self, binary, operator, **kw): + escape = binary.modifiers.get("escape", None) + return "%s NOT LIKE %s" % ( + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw), + ) + ( + " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE) + if escape is not None + else "" + ) + + def visit_ilike_op_binary(self, binary, operator, **kw): + if operator is operators.ilike_op: + binary = binary._clone() + binary.left = ilike_case_insensitive(binary.left) + binary.right = ilike_case_insensitive(binary.right) + # else we assume ilower() has been applied + + return self.visit_like_op_binary(binary, operator, **kw) + + def visit_not_ilike_op_binary(self, binary, operator, **kw): + if operator is operators.not_ilike_op: + binary = binary._clone() + binary.left = ilike_case_insensitive(binary.left) + binary.right = ilike_case_insensitive(binary.right) + # else we assume ilower() has been applied + + return self.visit_not_like_op_binary(binary, operator, **kw) + + def visit_between_op_binary(self, binary, operator, **kw): + symmetric = binary.modifiers.get("symmetric", False) + return self._generate_generic_binary( + binary, " BETWEEN SYMMETRIC " if symmetric else " BETWEEN ", **kw + ) + + def visit_not_between_op_binary(self, binary, operator, **kw): + symmetric = binary.modifiers.get("symmetric", False) + return self._generate_generic_binary( + binary, + " NOT BETWEEN SYMMETRIC " if symmetric else " NOT BETWEEN ", + **kw, + ) + + def visit_regexp_match_op_binary(self, binary, operator, **kw): + raise exc.CompileError( + "%s dialect does not support regular expressions" + % self.dialect.name + ) + + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): + raise exc.CompileError( + "%s dialect does not support regular expressions" + % self.dialect.name + ) + + def visit_regexp_replace_op_binary(self, binary, operator, **kw): + raise exc.CompileError( + "%s dialect does not support regular expression replacements" + % self.dialect.name + ) + + def visit_bindparam( + self, + bindparam, + within_columns_clause=False, + literal_binds=False, + skip_bind_expression=False, + literal_execute=False, + render_postcompile=False, + **kwargs, + ): + + if not skip_bind_expression: + impl = bindparam.type.dialect_impl(self.dialect) + if impl._has_bind_expression: + bind_expression = impl.bind_expression(bindparam) + wrapped = self.process( + bind_expression, + skip_bind_expression=True, + within_columns_clause=within_columns_clause, + literal_binds=literal_binds and not bindparam.expanding, + literal_execute=literal_execute, + render_postcompile=render_postcompile, + **kwargs, + ) + if bindparam.expanding: + # for postcompile w/ expanding, move the "wrapped" part + # of this into the inside + + m = re.match( + r"^(.*)\(__\[POSTCOMPILE_(\S+?)\]\)(.*)$", wrapped + ) + assert m, "unexpected format for expanding parameter" + wrapped = "(__[POSTCOMPILE_%s~~%s~~REPL~~%s~~])" % ( + m.group(2), + m.group(1), + m.group(3), + ) + + if literal_binds: + ret = self.render_literal_bindparam( + bindparam, + within_columns_clause=True, + bind_expression_template=wrapped, + **kwargs, + ) + return "(%s)" % ret + + return wrapped + + if not literal_binds: + literal_execute = ( + literal_execute + or bindparam.literal_execute + or (within_columns_clause and self.ansi_bind_rules) + ) + post_compile = literal_execute or bindparam.expanding + else: + post_compile = False + + if literal_binds: + ret = self.render_literal_bindparam( + bindparam, within_columns_clause=True, **kwargs + ) + if bindparam.expanding: + ret = "(%s)" % ret + return ret + + name = self._truncate_bindparam(bindparam) + + if name in self.binds: + existing = self.binds[name] + if existing is not bindparam: + if ( + (existing.unique or bindparam.unique) + and not existing.proxy_set.intersection( + bindparam.proxy_set + ) + and not existing._cloned_set.intersection( + bindparam._cloned_set + ) + ): + raise exc.CompileError( + "Bind parameter '%s' conflicts with " + "unique bind parameter of the same name" % name + ) + elif existing.expanding != bindparam.expanding: + raise exc.CompileError( + "Can't reuse bound parameter name '%s' in both " + "'expanding' (e.g. within an IN expression) and " + "non-expanding contexts. If this parameter is to " + "receive a list/array value, set 'expanding=True' on " + "it for expressions that aren't IN, otherwise use " + "a different parameter name." % (name,) + ) + elif existing._is_crud or bindparam._is_crud: + if existing._is_crud and bindparam._is_crud: + # TODO: this condition is not well understood. + # see tests in test/sql/test_update.py + raise exc.CompileError( + "Encountered unsupported case when compiling an " + "INSERT or UPDATE statement. If this is a " + "multi-table " + "UPDATE statement, please provide string-named " + "arguments to the " + "values() method with distinct names; support for " + "multi-table UPDATE statements that " + "target multiple tables for UPDATE is very " + "limited", + ) + else: + raise exc.CompileError( + f"bindparam() name '{bindparam.key}' is reserved " + "for automatic usage in the VALUES or SET " + "clause of this " + "insert/update statement. Please use a " + "name other than column name when using " + "bindparam() " + "with insert() or update() (for example, " + f"'b_{bindparam.key}')." + ) + + self.binds[bindparam.key] = self.binds[name] = bindparam + + # if we are given a cache key that we're going to match against, + # relate the bindparam here to one that is most likely present + # in the "extracted params" portion of the cache key. this is used + # to set up a positional mapping that is used to determine the + # correct parameters for a subsequent use of this compiled with + # a different set of parameter values. here, we accommodate for + # parameters that may have been cloned both before and after the cache + # key was been generated. + ckbm_tuple = self._cache_key_bind_match + + if ckbm_tuple: + ckbm, cksm = ckbm_tuple + for bp in bindparam._cloned_set: + if bp.key in cksm: + cb = cksm[bp.key] + ckbm[cb].append(bindparam) + + if bindparam.isoutparam: + self.has_out_parameters = True + + if post_compile: + if render_postcompile: + self._render_postcompile = True + + if literal_execute: + self.literal_execute_params |= {bindparam} + else: + self.post_compile_params |= {bindparam} + + ret = self.bindparam_string( + name, + post_compile=post_compile, + expanding=bindparam.expanding, + bindparam_type=bindparam.type, + **kwargs, + ) + + if bindparam.expanding: + ret = "(%s)" % ret + + return ret + + def render_bind_cast(self, type_, dbapi_type, sqltext): + raise NotImplementedError() + + def render_literal_bindparam( + self, + bindparam, + render_literal_value=NO_ARG, + bind_expression_template=None, + **kw, + ): + if render_literal_value is not NO_ARG: + value = render_literal_value + else: + if bindparam.value is None and bindparam.callable is None: + op = kw.get("_binary_op", None) + if op and op not in (operators.is_, operators.is_not): + util.warn_limited( + "Bound parameter '%s' rendering literal NULL in a SQL " + "expression; comparisons to NULL should not use " + "operators outside of 'is' or 'is not'", + (bindparam.key,), + ) + return self.process(sqltypes.NULLTYPE, **kw) + value = bindparam.effective_value + + if bindparam.expanding: + leep = self._literal_execute_expanding_parameter_literal_binds + to_update, replacement_expr = leep( + bindparam, + value, + bind_expression_template=bind_expression_template, + ) + return replacement_expr + else: + return self.render_literal_value(value, bindparam.type) + + def render_literal_value(self, value, type_): + """Render the value of a bind parameter as a quoted literal. + + This is used for statement sections that do not accept bind parameters + on the target driver/database. + + This should be implemented by subclasses using the quoting services + of the DBAPI. + + """ + + if value is None and not type_.should_evaluate_none: + # issue #10535 - handle NULL in the compiler without placing + # this onto each type, except for "evaluate None" types + # (e.g. JSON) + return self.process(elements.Null._instance()) + + processor = type_._cached_literal_processor(self.dialect) + if processor: + try: + return processor(value) + except Exception as e: + raise exc.CompileError( + f"Could not render literal value " + f'"{sql_util._repr_single_value(value)}" ' + f"with datatype " + f"{type_}; see parent stack trace for " + "more detail." + ) from e + + else: + raise exc.CompileError( + f"No literal value renderer is available for literal value " + f'"{sql_util._repr_single_value(value)}" ' + f"with datatype {type_}" + ) + + def _truncate_bindparam(self, bindparam): + if bindparam in self.bind_names: + return self.bind_names[bindparam] + + bind_name = bindparam.key + if isinstance(bind_name, elements._truncated_label): + bind_name = self._truncated_identifier("bindparam", bind_name) + + # add to bind_names for translation + self.bind_names[bindparam] = bind_name + + return bind_name + + def _truncated_identifier( + self, ident_class: str, name: _truncated_label + ) -> str: + if (ident_class, name) in self.truncated_names: + return self.truncated_names[(ident_class, name)] + + anonname = name.apply_map(self.anon_map) + + if len(anonname) > self.label_length - 6: + counter = self._truncated_counters.get(ident_class, 1) + truncname = ( + anonname[0 : max(self.label_length - 6, 0)] + + "_" + + hex(counter)[2:] + ) + self._truncated_counters[ident_class] = counter + 1 + else: + truncname = anonname + self.truncated_names[(ident_class, name)] = truncname + return truncname + + def _anonymize(self, name: str) -> str: + return name % self.anon_map + + def bindparam_string( + self, + name: str, + post_compile: bool = False, + expanding: bool = False, + escaped_from: Optional[str] = None, + bindparam_type: Optional[TypeEngine[Any]] = None, + accumulate_bind_names: Optional[Set[str]] = None, + visited_bindparam: Optional[List[str]] = None, + **kw: Any, + ) -> str: + # TODO: accumulate_bind_names is passed by crud.py to gather + # names on a per-value basis, visited_bindparam is passed by + # visit_insert() to collect all parameters in the statement. + # see if this gathering can be simplified somehow + if accumulate_bind_names is not None: + accumulate_bind_names.add(name) + if visited_bindparam is not None: + visited_bindparam.append(name) + + if not escaped_from: + if self._bind_translate_re.search(name): + # not quite the translate use case as we want to + # also get a quick boolean if we even found + # unusual characters in the name + new_name = self._bind_translate_re.sub( + lambda m: self._bind_translate_chars[m.group(0)], + name, + ) + escaped_from = name + name = new_name + + if escaped_from: + self.escaped_bind_names = self.escaped_bind_names.union( + {escaped_from: name} + ) + if post_compile: + ret = "__[POSTCOMPILE_%s]" % name + if expanding: + # for expanding, bound parameters or literal values will be + # rendered per item + return ret + + # otherwise, for non-expanding "literal execute", apply + # bind casts as determined by the datatype + if bindparam_type is not None: + type_impl = bindparam_type._unwrapped_dialect_impl( + self.dialect + ) + if type_impl.render_literal_cast: + ret = self.render_bind_cast(bindparam_type, type_impl, ret) + return ret + elif self.state is CompilerState.COMPILING: + ret = self.compilation_bindtemplate % {"name": name} + else: + ret = self.bindtemplate % {"name": name} + + if ( + bindparam_type is not None + and self.dialect._bind_typing_render_casts + ): + type_impl = bindparam_type._unwrapped_dialect_impl(self.dialect) + if type_impl.render_bind_cast: + ret = self.render_bind_cast(bindparam_type, type_impl, ret) + + return ret + + def _dispatch_independent_ctes(self, stmt, kw): + local_kw = kw.copy() + local_kw.pop("cte_opts", None) + for cte, opt in zip( + stmt._independent_ctes, stmt._independent_ctes_opts + ): + cte._compiler_dispatch(self, cte_opts=opt, **local_kw) + + def visit_cte( + self, + cte: CTE, + asfrom: bool = False, + ashint: bool = False, + fromhints: Optional[_FromHintsType] = None, + visiting_cte: Optional[CTE] = None, + from_linter: Optional[FromLinter] = None, + cte_opts: selectable._CTEOpts = selectable._CTEOpts(False), + **kwargs: Any, + ) -> Optional[str]: + self_ctes = self._init_cte_state() + assert self_ctes is self.ctes + + kwargs["visiting_cte"] = cte + + cte_name = cte.name + + if isinstance(cte_name, elements._truncated_label): + cte_name = self._truncated_identifier("alias", cte_name) + + is_new_cte = True + embedded_in_current_named_cte = False + + _reference_cte = cte._get_reference_cte() + + nesting = cte.nesting or cte_opts.nesting + + # check for CTE already encountered + if _reference_cte in self.level_name_by_cte: + cte_level, _, existing_cte_opts = self.level_name_by_cte[ + _reference_cte + ] + assert _ == cte_name + + cte_level_name = (cte_level, cte_name) + existing_cte = self.ctes_by_level_name[cte_level_name] + + # check if we are receiving it here with a specific + # "nest_here" location; if so, move it to this location + + if cte_opts.nesting: + if existing_cte_opts.nesting: + raise exc.CompileError( + "CTE is stated as 'nest_here' in " + "more than one location" + ) + + old_level_name = (cte_level, cte_name) + cte_level = len(self.stack) if nesting else 1 + cte_level_name = new_level_name = (cte_level, cte_name) + + del self.ctes_by_level_name[old_level_name] + self.ctes_by_level_name[new_level_name] = existing_cte + self.level_name_by_cte[_reference_cte] = new_level_name + ( + cte_opts, + ) + + else: + cte_level = len(self.stack) if nesting else 1 + cte_level_name = (cte_level, cte_name) + + if cte_level_name in self.ctes_by_level_name: + existing_cte = self.ctes_by_level_name[cte_level_name] + else: + existing_cte = None + + if existing_cte is not None: + embedded_in_current_named_cte = visiting_cte is existing_cte + + # we've generated a same-named CTE that we are enclosed in, + # or this is the same CTE. just return the name. + if cte is existing_cte._restates or cte is existing_cte: + is_new_cte = False + elif existing_cte is cte._restates: + # we've generated a same-named CTE that is + # enclosed in us - we take precedence, so + # discard the text for the "inner". + del self_ctes[existing_cte] + + existing_cte_reference_cte = existing_cte._get_reference_cte() + + assert existing_cte_reference_cte is _reference_cte + assert existing_cte_reference_cte is existing_cte + + del self.level_name_by_cte[existing_cte_reference_cte] + else: + # if the two CTEs are deep-copy identical, consider them + # the same, **if** they are clones, that is, they came from + # the ORM or other visit method + if ( + cte._is_clone_of is not None + or existing_cte._is_clone_of is not None + ) and cte.compare(existing_cte): + is_new_cte = False + else: + raise exc.CompileError( + "Multiple, unrelated CTEs found with " + "the same name: %r" % cte_name + ) + + if not asfrom and not is_new_cte: + return None + + if cte._cte_alias is not None: + pre_alias_cte = cte._cte_alias + cte_pre_alias_name = cte._cte_alias.name + if isinstance(cte_pre_alias_name, elements._truncated_label): + cte_pre_alias_name = self._truncated_identifier( + "alias", cte_pre_alias_name + ) + else: + pre_alias_cte = cte + cte_pre_alias_name = None + + if is_new_cte: + self.ctes_by_level_name[cte_level_name] = cte + self.level_name_by_cte[_reference_cte] = cte_level_name + ( + cte_opts, + ) + + if pre_alias_cte not in self.ctes: + self.visit_cte(pre_alias_cte, **kwargs) + + if not cte_pre_alias_name and cte not in self_ctes: + if cte.recursive: + self.ctes_recursive = True + text = self.preparer.format_alias(cte, cte_name) + if cte.recursive: + col_source = cte.element + + # TODO: can we get at the .columns_plus_names collection + # that is already (or will be?) generated for the SELECT + # rather than calling twice? + recur_cols = [ + # TODO: proxy_name is not technically safe, + # see test_cte-> + # test_with_recursive_no_name_currently_buggy. not + # clear what should be done with such a case + fallback_label_name or proxy_name + for ( + _, + proxy_name, + fallback_label_name, + c, + repeated, + ) in (col_source._generate_columns_plus_names(True)) + if not repeated + ] + + text += "(%s)" % ( + ", ".join( + self.preparer.format_label_name( + ident, anon_map=self.anon_map + ) + for ident in recur_cols + ) + ) + + assert kwargs.get("subquery", False) is False + + if not self.stack: + # toplevel, this is a stringify of the + # cte directly. just compile the inner + # the way alias() does. + return cte.element._compiler_dispatch( + self, asfrom=asfrom, **kwargs + ) + else: + prefixes = self._generate_prefixes( + cte, cte._prefixes, **kwargs + ) + inner = cte.element._compiler_dispatch( + self, asfrom=True, **kwargs + ) + + text += " AS %s\n(%s)" % (prefixes, inner) + + if cte._suffixes: + text += " " + self._generate_prefixes( + cte, cte._suffixes, **kwargs + ) + + self_ctes[cte] = text + + if asfrom: + if from_linter: + from_linter.froms[cte._de_clone()] = cte_name + + if not is_new_cte and embedded_in_current_named_cte: + return self.preparer.format_alias(cte, cte_name) + + if cte_pre_alias_name: + text = self.preparer.format_alias(cte, cte_pre_alias_name) + if self.preparer._requires_quotes(cte_name): + cte_name = self.preparer.quote(cte_name) + text += self.get_render_as_alias_suffix(cte_name) + return text + else: + return self.preparer.format_alias(cte, cte_name) + + return None + + def visit_table_valued_alias(self, element, **kw): + if element.joins_implicitly: + kw["from_linter"] = None + if element._is_lateral: + return self.visit_lateral(element, **kw) + else: + return self.visit_alias(element, **kw) + + def visit_table_valued_column(self, element, **kw): + return self.visit_column(element, **kw) + + def visit_alias( + self, + alias, + asfrom=False, + ashint=False, + iscrud=False, + fromhints=None, + subquery=False, + lateral=False, + enclosing_alias=None, + from_linter=None, + **kwargs, + ): + if lateral: + if "enclosing_lateral" not in kwargs: + # if lateral is set and enclosing_lateral is not + # present, we assume we are being called directly + # from visit_lateral() and we need to set enclosing_lateral. + assert alias._is_lateral + kwargs["enclosing_lateral"] = alias + + # for lateral objects, we track a second from_linter that is... + # lateral! to the level above us. + if ( + from_linter + and "lateral_from_linter" not in kwargs + and "enclosing_lateral" in kwargs + ): + kwargs["lateral_from_linter"] = from_linter + + if enclosing_alias is not None and enclosing_alias.element is alias: + inner = alias.element._compiler_dispatch( + self, + asfrom=asfrom, + ashint=ashint, + iscrud=iscrud, + fromhints=fromhints, + lateral=lateral, + enclosing_alias=alias, + **kwargs, + ) + if subquery and (asfrom or lateral): + inner = "(%s)" % (inner,) + return inner + else: + enclosing_alias = kwargs["enclosing_alias"] = alias + + if asfrom or ashint: + if isinstance(alias.name, elements._truncated_label): + alias_name = self._truncated_identifier("alias", alias.name) + else: + alias_name = alias.name + + if ashint: + return self.preparer.format_alias(alias, alias_name) + elif asfrom: + if from_linter: + from_linter.froms[alias._de_clone()] = alias_name + + inner = alias.element._compiler_dispatch( + self, asfrom=True, lateral=lateral, **kwargs + ) + if subquery: + inner = "(%s)" % (inner,) + + ret = inner + self.get_render_as_alias_suffix( + self.preparer.format_alias(alias, alias_name) + ) + + if alias._supports_derived_columns and alias._render_derived: + ret += "(%s)" % ( + ", ".join( + "%s%s" + % ( + self.preparer.quote(col.name), + ( + " %s" + % self.dialect.type_compiler_instance.process( + col.type, **kwargs + ) + if alias._render_derived_w_types + else "" + ), + ) + for col in alias.c + ) + ) + + if fromhints and alias in fromhints: + ret = self.format_from_hint_text( + ret, alias, fromhints[alias], iscrud + ) + + return ret + else: + # note we cancel the "subquery" flag here as well + return alias.element._compiler_dispatch( + self, lateral=lateral, **kwargs + ) + + def visit_subquery(self, subquery, **kw): + kw["subquery"] = True + return self.visit_alias(subquery, **kw) + + def visit_lateral(self, lateral_, **kw): + kw["lateral"] = True + return "LATERAL %s" % self.visit_alias(lateral_, **kw) + + def visit_tablesample(self, tablesample, asfrom=False, **kw): + text = "%s TABLESAMPLE %s" % ( + self.visit_alias(tablesample, asfrom=True, **kw), + tablesample._get_method()._compiler_dispatch(self, **kw), + ) + + if tablesample.seed is not None: + text += " REPEATABLE (%s)" % ( + tablesample.seed._compiler_dispatch(self, **kw) + ) + + return text + + def _render_values(self, element, **kw): + kw.setdefault("literal_binds", element.literal_binds) + tuples = ", ".join( + self.process( + elements.Tuple( + types=element._column_types, *elem + ).self_group(), + **kw, + ) + for chunk in element._data + for elem in chunk + ) + return f"VALUES {tuples}" + + def visit_values(self, element, asfrom=False, from_linter=None, **kw): + v = self._render_values(element, **kw) + + if element._unnamed: + name = None + elif isinstance(element.name, elements._truncated_label): + name = self._truncated_identifier("values", element.name) + else: + name = element.name + + if element._is_lateral: + lateral = "LATERAL " + else: + lateral = "" + + if asfrom: + if from_linter: + from_linter.froms[element._de_clone()] = ( + name if name is not None else "(unnamed VALUES element)" + ) + + if name: + kw["include_table"] = False + v = "%s(%s)%s (%s)" % ( + lateral, + v, + self.get_render_as_alias_suffix(self.preparer.quote(name)), + ( + ", ".join( + c._compiler_dispatch(self, **kw) + for c in element.columns + ) + ), + ) + else: + v = "%s(%s)" % (lateral, v) + return v + + def visit_scalar_values(self, element, **kw): + return f"({self._render_values(element, **kw)})" + + def get_render_as_alias_suffix(self, alias_name_text): + return " AS " + alias_name_text + + def _add_to_result_map( + self, + keyname: str, + name: str, + objects: Tuple[Any, ...], + type_: TypeEngine[Any], + ) -> None: + if keyname is None or keyname == "*": + self._ordered_columns = False + self._ad_hoc_textual = True + if type_._is_tuple_type: + raise exc.CompileError( + "Most backends don't support SELECTing " + "from a tuple() object. If this is an ORM query, " + "consider using the Bundle object." + ) + self._result_columns.append( + ResultColumnsEntry(keyname, name, objects, type_) + ) + + def _label_returning_column( + self, stmt, column, populate_result_map, column_clause_args=None, **kw + ): + """Render a column with necessary labels inside of a RETURNING clause. + + This method is provided for individual dialects in place of calling + the _label_select_column method directly, so that the two use cases + of RETURNING vs. SELECT can be disambiguated going forward. + + .. versionadded:: 1.4.21 + + """ + return self._label_select_column( + None, + column, + populate_result_map, + False, + {} if column_clause_args is None else column_clause_args, + **kw, + ) + + def _label_select_column( + self, + select, + column, + populate_result_map, + asfrom, + column_clause_args, + name=None, + proxy_name=None, + fallback_label_name=None, + within_columns_clause=True, + column_is_repeated=False, + need_column_expressions=False, + include_table=True, + ): + """produce labeled columns present in a select().""" + impl = column.type.dialect_impl(self.dialect) + + if impl._has_column_expression and ( + need_column_expressions or populate_result_map + ): + col_expr = impl.column_expression(column) + else: + col_expr = column + + if populate_result_map: + # pass an "add_to_result_map" callable into the compilation + # of embedded columns. this collects information about the + # column as it will be fetched in the result and is coordinated + # with cursor.description when the query is executed. + add_to_result_map = self._add_to_result_map + + # if the SELECT statement told us this column is a repeat, + # wrap the callable with one that prevents the addition of the + # targets + if column_is_repeated: + _add_to_result_map = add_to_result_map + + def add_to_result_map(keyname, name, objects, type_): + _add_to_result_map(keyname, name, (), type_) + + # if we redefined col_expr for type expressions, wrap the + # callable with one that adds the original column to the targets + elif col_expr is not column: + _add_to_result_map = add_to_result_map + + def add_to_result_map(keyname, name, objects, type_): + _add_to_result_map( + keyname, name, (column,) + objects, type_ + ) + + else: + add_to_result_map = None + + # this method is used by some of the dialects for RETURNING, + # which has different inputs. _label_returning_column was added + # as the better target for this now however for 1.4 we will keep + # _label_select_column directly compatible with this use case. + # these assertions right now set up the current expected inputs + assert within_columns_clause, ( + "_label_select_column is only relevant within " + "the columns clause of a SELECT or RETURNING" + ) + if isinstance(column, elements.Label): + if col_expr is not column: + result_expr = _CompileLabel( + col_expr, column.name, alt_names=(column.element,) + ) + else: + result_expr = col_expr + + elif name: + # here, _columns_plus_names has determined there's an explicit + # label name we need to use. this is the default for + # tablenames_plus_columnnames as well as when columns are being + # deduplicated on name + + assert ( + proxy_name is not None + ), "proxy_name is required if 'name' is passed" + + result_expr = _CompileLabel( + col_expr, + name, + alt_names=( + proxy_name, + # this is a hack to allow legacy result column lookups + # to work as they did before; this goes away in 2.0. + # TODO: this only seems to be tested indirectly + # via test/orm/test_deprecations.py. should be a + # resultset test for this + column._tq_label, + ), + ) + else: + # determine here whether this column should be rendered in + # a labelled context or not, as we were given no required label + # name from the caller. Here we apply heuristics based on the kind + # of SQL expression involved. + + if col_expr is not column: + # type-specific expression wrapping the given column, + # so we render a label + render_with_label = True + elif isinstance(column, elements.ColumnClause): + # table-bound column, we render its name as a label if we are + # inside of a subquery only + render_with_label = ( + asfrom + and not column.is_literal + and column.table is not None + ) + elif isinstance(column, elements.TextClause): + render_with_label = False + elif isinstance(column, elements.UnaryExpression): + render_with_label = column.wraps_column_expression or asfrom + elif ( + # general class of expressions that don't have a SQL-column + # addressible name. includes scalar selects, bind parameters, + # SQL functions, others + not isinstance(column, elements.NamedColumn) + # deeper check that indicates there's no natural "name" to + # this element, which accommodates for custom SQL constructs + # that might have a ".name" attribute (but aren't SQL + # functions) but are not implementing this more recently added + # base class. in theory the "NamedColumn" check should be + # enough, however here we seek to maintain legacy behaviors + # as well. + and column._non_anon_label is None + ): + render_with_label = True + else: + render_with_label = False + + if render_with_label: + if not fallback_label_name: + # used by the RETURNING case right now. we generate it + # here as 3rd party dialects may be referring to + # _label_select_column method directly instead of the + # just-added _label_returning_column method + assert not column_is_repeated + fallback_label_name = column._anon_name_label + + fallback_label_name = ( + elements._truncated_label(fallback_label_name) + if not isinstance( + fallback_label_name, elements._truncated_label + ) + else fallback_label_name + ) + + result_expr = _CompileLabel( + col_expr, fallback_label_name, alt_names=(proxy_name,) + ) + else: + result_expr = col_expr + + column_clause_args.update( + within_columns_clause=within_columns_clause, + add_to_result_map=add_to_result_map, + include_table=include_table, + ) + return result_expr._compiler_dispatch(self, **column_clause_args) + + def format_from_hint_text(self, sqltext, table, hint, iscrud): + hinttext = self.get_from_hint_text(table, hint) + if hinttext: + sqltext += " " + hinttext + return sqltext + + def get_select_hint_text(self, byfroms): + return None + + def get_from_hint_text(self, table, text): + return None + + def get_crud_hint_text(self, table, text): + return None + + def get_statement_hint_text(self, hint_texts): + return " ".join(hint_texts) + + _default_stack_entry: _CompilerStackEntry + + if not typing.TYPE_CHECKING: + _default_stack_entry = util.immutabledict( + [("correlate_froms", frozenset()), ("asfrom_froms", frozenset())] + ) + + def _display_froms_for_select( + self, select_stmt, asfrom, lateral=False, **kw + ): + # utility method to help external dialects + # get the correct from list for a select. + # specifically the oracle dialect needs this feature + # right now. + toplevel = not self.stack + entry = self._default_stack_entry if toplevel else self.stack[-1] + + compile_state = select_stmt._compile_state_factory(select_stmt, self) + + correlate_froms = entry["correlate_froms"] + asfrom_froms = entry["asfrom_froms"] + + if asfrom and not lateral: + froms = compile_state._get_display_froms( + explicit_correlate_froms=correlate_froms.difference( + asfrom_froms + ), + implicit_correlate_froms=(), + ) + else: + froms = compile_state._get_display_froms( + explicit_correlate_froms=correlate_froms, + implicit_correlate_froms=asfrom_froms, + ) + return froms + + translate_select_structure: Any = None + """if not ``None``, should be a callable which accepts ``(select_stmt, + **kw)`` and returns a select object. this is used for structural changes + mostly to accommodate for LIMIT/OFFSET schemes + + """ + + def visit_select( + self, + select_stmt, + asfrom=False, + insert_into=False, + fromhints=None, + compound_index=None, + select_wraps_for=None, + lateral=False, + from_linter=None, + **kwargs, + ): + assert select_wraps_for is None, ( + "SQLAlchemy 1.4 requires use of " + "the translate_select_structure hook for structural " + "translations of SELECT objects" + ) + + # initial setup of SELECT. the compile_state_factory may now + # be creating a totally different SELECT from the one that was + # passed in. for ORM use this will convert from an ORM-state + # SELECT to a regular "Core" SELECT. other composed operations + # such as computation of joins will be performed. + + kwargs["within_columns_clause"] = False + + compile_state = select_stmt._compile_state_factory( + select_stmt, self, **kwargs + ) + kwargs["ambiguous_table_name_map"] = ( + compile_state._ambiguous_table_name_map + ) + + select_stmt = compile_state.statement + + toplevel = not self.stack + + if toplevel and not self.compile_state: + self.compile_state = compile_state + + is_embedded_select = compound_index is not None or insert_into + + # translate step for Oracle, SQL Server which often need to + # restructure the SELECT to allow for LIMIT/OFFSET and possibly + # other conditions + if self.translate_select_structure: + new_select_stmt = self.translate_select_structure( + select_stmt, asfrom=asfrom, **kwargs + ) + + # if SELECT was restructured, maintain a link to the originals + # and assemble a new compile state + if new_select_stmt is not select_stmt: + compile_state_wraps_for = compile_state + select_wraps_for = select_stmt + select_stmt = new_select_stmt + + compile_state = select_stmt._compile_state_factory( + select_stmt, self, **kwargs + ) + select_stmt = compile_state.statement + + entry = self._default_stack_entry if toplevel else self.stack[-1] + + populate_result_map = need_column_expressions = ( + toplevel + or entry.get("need_result_map_for_compound", False) + or entry.get("need_result_map_for_nested", False) + ) + + # indicates there is a CompoundSelect in play and we are not the + # first select + if compound_index: + populate_result_map = False + + # this was first proposed as part of #3372; however, it is not + # reached in current tests and could possibly be an assertion + # instead. + if not populate_result_map and "add_to_result_map" in kwargs: + del kwargs["add_to_result_map"] + + froms = self._setup_select_stack( + select_stmt, compile_state, entry, asfrom, lateral, compound_index + ) + + column_clause_args = kwargs.copy() + column_clause_args.update( + {"within_label_clause": False, "within_columns_clause": False} + ) + + text = "SELECT " # we're off to a good start ! + + if select_stmt._hints: + hint_text, byfrom = self._setup_select_hints(select_stmt) + if hint_text: + text += hint_text + " " + else: + byfrom = None + + if select_stmt._independent_ctes: + self._dispatch_independent_ctes(select_stmt, kwargs) + + if select_stmt._prefixes: + text += self._generate_prefixes( + select_stmt, select_stmt._prefixes, **kwargs + ) + + text += self.get_select_precolumns(select_stmt, **kwargs) + # the actual list of columns to print in the SELECT column list. + inner_columns = [ + c + for c in [ + self._label_select_column( + select_stmt, + column, + populate_result_map, + asfrom, + column_clause_args, + name=name, + proxy_name=proxy_name, + fallback_label_name=fallback_label_name, + column_is_repeated=repeated, + need_column_expressions=need_column_expressions, + ) + for ( + name, + proxy_name, + fallback_label_name, + column, + repeated, + ) in compile_state.columns_plus_names + ] + if c is not None + ] + + if populate_result_map and select_wraps_for is not None: + # if this select was generated from translate_select, + # rewrite the targeted columns in the result map + + translate = dict( + zip( + [ + name + for ( + key, + proxy_name, + fallback_label_name, + name, + repeated, + ) in compile_state.columns_plus_names + ], + [ + name + for ( + key, + proxy_name, + fallback_label_name, + name, + repeated, + ) in compile_state_wraps_for.columns_plus_names + ], + ) + ) + + self._result_columns = [ + ResultColumnsEntry( + key, name, tuple(translate.get(o, o) for o in obj), type_ + ) + for key, name, obj, type_ in self._result_columns + ] + + text = self._compose_select_body( + text, + select_stmt, + compile_state, + inner_columns, + froms, + byfrom, + toplevel, + kwargs, + ) + + if select_stmt._statement_hints: + per_dialect = [ + ht + for (dialect_name, ht) in select_stmt._statement_hints + if dialect_name in ("*", self.dialect.name) + ] + if per_dialect: + text += " " + self.get_statement_hint_text(per_dialect) + + # In compound query, CTEs are shared at the compound level + if self.ctes and (not is_embedded_select or toplevel): + nesting_level = len(self.stack) if not toplevel else None + text = self._render_cte_clause(nesting_level=nesting_level) + text + + if select_stmt._suffixes: + text += " " + self._generate_prefixes( + select_stmt, select_stmt._suffixes, **kwargs + ) + + self.stack.pop(-1) + + return text + + def _setup_select_hints( + self, select: Select[Any] + ) -> Tuple[str, _FromHintsType]: + byfrom = { + from_: hinttext + % {"name": from_._compiler_dispatch(self, ashint=True)} + for (from_, dialect), hinttext in select._hints.items() + if dialect in ("*", self.dialect.name) + } + hint_text = self.get_select_hint_text(byfrom) + return hint_text, byfrom + + def _setup_select_stack( + self, select, compile_state, entry, asfrom, lateral, compound_index + ): + correlate_froms = entry["correlate_froms"] + asfrom_froms = entry["asfrom_froms"] + + if compound_index == 0: + entry["select_0"] = select + elif compound_index: + select_0 = entry["select_0"] + numcols = len(select_0._all_selected_columns) + + if len(compile_state.columns_plus_names) != numcols: + raise exc.CompileError( + "All selectables passed to " + "CompoundSelect must have identical numbers of " + "columns; select #%d has %d columns, select " + "#%d has %d" + % ( + 1, + numcols, + compound_index + 1, + len(select._all_selected_columns), + ) + ) + + if asfrom and not lateral: + froms = compile_state._get_display_froms( + explicit_correlate_froms=correlate_froms.difference( + asfrom_froms + ), + implicit_correlate_froms=(), + ) + else: + froms = compile_state._get_display_froms( + explicit_correlate_froms=correlate_froms, + implicit_correlate_froms=asfrom_froms, + ) + + new_correlate_froms = set(_from_objects(*froms)) + all_correlate_froms = new_correlate_froms.union(correlate_froms) + + new_entry: _CompilerStackEntry = { + "asfrom_froms": new_correlate_froms, + "correlate_froms": all_correlate_froms, + "selectable": select, + "compile_state": compile_state, + } + self.stack.append(new_entry) + + return froms + + def _compose_select_body( + self, + text, + select, + compile_state, + inner_columns, + froms, + byfrom, + toplevel, + kwargs, + ): + text += ", ".join(inner_columns) + + if self.linting & COLLECT_CARTESIAN_PRODUCTS: + from_linter = FromLinter({}, set()) + warn_linting = self.linting & WARN_LINTING + if toplevel: + self.from_linter = from_linter + else: + from_linter = None + warn_linting = False + + # adjust the whitespace for no inner columns, part of #9440, + # so that a no-col SELECT comes out as "SELECT WHERE..." or + # "SELECT FROM ...". + # while it would be better to have built the SELECT starting string + # without trailing whitespace first, then add whitespace only if inner + # cols were present, this breaks compatibility with various custom + # compilation schemes that are currently being tested. + if not inner_columns: + text = text.rstrip() + + if froms: + text += " \nFROM " + + if select._hints: + text += ", ".join( + [ + f._compiler_dispatch( + self, + asfrom=True, + fromhints=byfrom, + from_linter=from_linter, + **kwargs, + ) + for f in froms + ] + ) + else: + text += ", ".join( + [ + f._compiler_dispatch( + self, + asfrom=True, + from_linter=from_linter, + **kwargs, + ) + for f in froms + ] + ) + else: + text += self.default_from() + + if select._where_criteria: + t = self._generate_delimited_and_list( + select._where_criteria, from_linter=from_linter, **kwargs + ) + if t: + text += " \nWHERE " + t + + if warn_linting: + assert from_linter is not None + from_linter.warn() + + if select._group_by_clauses: + text += self.group_by_clause(select, **kwargs) + + if select._having_criteria: + t = self._generate_delimited_and_list( + select._having_criteria, **kwargs + ) + if t: + text += " \nHAVING " + t + + if select._order_by_clauses: + text += self.order_by_clause(select, **kwargs) + + if select._has_row_limiting_clause: + text += self._row_limit_clause(select, **kwargs) + + if select._for_update_arg is not None: + text += self.for_update_clause(select, **kwargs) + + return text + + def _generate_prefixes(self, stmt, prefixes, **kw): + clause = " ".join( + prefix._compiler_dispatch(self, **kw) + for prefix, dialect_name in prefixes + if dialect_name in (None, "*") or dialect_name == self.dialect.name + ) + if clause: + clause += " " + return clause + + def _render_cte_clause( + self, + nesting_level=None, + include_following_stack=False, + ): + """ + include_following_stack + Also render the nesting CTEs on the next stack. Useful for + SQL structures like UNION or INSERT that can wrap SELECT + statements containing nesting CTEs. + """ + if not self.ctes: + return "" + + ctes: MutableMapping[CTE, str] + + if nesting_level and nesting_level > 1: + ctes = util.OrderedDict() + for cte in list(self.ctes.keys()): + cte_level, cte_name, cte_opts = self.level_name_by_cte[ + cte._get_reference_cte() + ] + nesting = cte.nesting or cte_opts.nesting + is_rendered_level = cte_level == nesting_level or ( + include_following_stack and cte_level == nesting_level + 1 + ) + if not (nesting and is_rendered_level): + continue + + ctes[cte] = self.ctes[cte] + + else: + ctes = self.ctes + + if not ctes: + return "" + ctes_recursive = any([cte.recursive for cte in ctes]) + + cte_text = self.get_cte_preamble(ctes_recursive) + " " + cte_text += ", \n".join([txt for txt in ctes.values()]) + cte_text += "\n " + + if nesting_level and nesting_level > 1: + for cte in list(ctes.keys()): + cte_level, cte_name, cte_opts = self.level_name_by_cte[ + cte._get_reference_cte() + ] + del self.ctes[cte] + del self.ctes_by_level_name[(cte_level, cte_name)] + del self.level_name_by_cte[cte._get_reference_cte()] + + return cte_text + + def get_cte_preamble(self, recursive): + if recursive: + return "WITH RECURSIVE" + else: + return "WITH" + + def get_select_precolumns(self, select, **kw): + """Called when building a ``SELECT`` statement, position is just + before column list. + + """ + if select._distinct_on: + util.warn_deprecated( + "DISTINCT ON is currently supported only by the PostgreSQL " + "dialect. Use of DISTINCT ON for other backends is currently " + "silently ignored, however this usage is deprecated, and will " + "raise CompileError in a future release for all backends " + "that do not support this syntax.", + version="1.4", + ) + return "DISTINCT " if select._distinct else "" + + def group_by_clause(self, select, **kw): + """allow dialects to customize how GROUP BY is rendered.""" + + group_by = self._generate_delimited_list( + select._group_by_clauses, OPERATORS[operators.comma_op], **kw + ) + if group_by: + return " GROUP BY " + group_by + else: + return "" + + def order_by_clause(self, select, **kw): + """allow dialects to customize how ORDER BY is rendered.""" + + order_by = self._generate_delimited_list( + select._order_by_clauses, OPERATORS[operators.comma_op], **kw + ) + + if order_by: + return " ORDER BY " + order_by + else: + return "" + + def for_update_clause(self, select, **kw): + return " FOR UPDATE" + + def returning_clause( + self, + stmt: UpdateBase, + returning_cols: Sequence[ColumnElement[Any]], + *, + populate_result_map: bool, + **kw: Any, + ) -> str: + columns = [ + self._label_returning_column( + stmt, + column, + populate_result_map, + fallback_label_name=fallback_label_name, + column_is_repeated=repeated, + name=name, + proxy_name=proxy_name, + **kw, + ) + for ( + name, + proxy_name, + fallback_label_name, + column, + repeated, + ) in stmt._generate_columns_plus_names( + True, cols=base._select_iterables(returning_cols) + ) + ] + + return "RETURNING " + ", ".join(columns) + + def limit_clause(self, select, **kw): + text = "" + if select._limit_clause is not None: + text += "\n LIMIT " + self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + if select._limit_clause is None: + text += "\n LIMIT -1" + text += " OFFSET " + self.process(select._offset_clause, **kw) + return text + + def fetch_clause( + self, + select, + fetch_clause=None, + require_offset=False, + use_literal_execute_for_simple_int=False, + **kw, + ): + if fetch_clause is None: + fetch_clause = select._fetch_clause + fetch_clause_options = select._fetch_clause_options + else: + fetch_clause_options = {"percent": False, "with_ties": False} + + text = "" + + if select._offset_clause is not None: + offset_clause = select._offset_clause + if ( + use_literal_execute_for_simple_int + and select._simple_int_clause(offset_clause) + ): + offset_clause = offset_clause.render_literal_execute() + offset_str = self.process(offset_clause, **kw) + text += "\n OFFSET %s ROWS" % offset_str + elif require_offset: + text += "\n OFFSET 0 ROWS" + + if fetch_clause is not None: + if ( + use_literal_execute_for_simple_int + and select._simple_int_clause(fetch_clause) + ): + fetch_clause = fetch_clause.render_literal_execute() + text += "\n FETCH FIRST %s%s ROWS %s" % ( + self.process(fetch_clause, **kw), + " PERCENT" if fetch_clause_options["percent"] else "", + "WITH TIES" if fetch_clause_options["with_ties"] else "ONLY", + ) + return text + + def visit_table( + self, + table, + asfrom=False, + iscrud=False, + ashint=False, + fromhints=None, + use_schema=True, + from_linter=None, + ambiguous_table_name_map=None, + **kwargs, + ): + if from_linter: + from_linter.froms[table] = table.fullname + + if asfrom or ashint: + effective_schema = self.preparer.schema_for_object(table) + + if use_schema and effective_schema: + ret = ( + self.preparer.quote_schema(effective_schema) + + "." + + self.preparer.quote(table.name) + ) + else: + ret = self.preparer.quote(table.name) + + if ( + not effective_schema + and ambiguous_table_name_map + and table.name in ambiguous_table_name_map + ): + anon_name = self._truncated_identifier( + "alias", ambiguous_table_name_map[table.name] + ) + + ret = ret + self.get_render_as_alias_suffix( + self.preparer.format_alias(None, anon_name) + ) + + if fromhints and table in fromhints: + ret = self.format_from_hint_text( + ret, table, fromhints[table], iscrud + ) + return ret + else: + return "" + + def visit_join(self, join, asfrom=False, from_linter=None, **kwargs): + if from_linter: + from_linter.edges.update( + itertools.product( + _de_clone(join.left._from_objects), + _de_clone(join.right._from_objects), + ) + ) + + if join.full: + join_type = " FULL OUTER JOIN " + elif join.isouter: + join_type = " LEFT OUTER JOIN " + else: + join_type = " JOIN " + return ( + join.left._compiler_dispatch( + self, asfrom=True, from_linter=from_linter, **kwargs + ) + + join_type + + join.right._compiler_dispatch( + self, asfrom=True, from_linter=from_linter, **kwargs + ) + + " ON " + # TODO: likely need asfrom=True here? + + join.onclause._compiler_dispatch( + self, from_linter=from_linter, **kwargs + ) + ) + + def _setup_crud_hints(self, stmt, table_text): + dialect_hints = { + table: hint_text + for (table, dialect), hint_text in stmt._hints.items() + if dialect in ("*", self.dialect.name) + } + if stmt.table in dialect_hints: + table_text = self.format_from_hint_text( + table_text, stmt.table, dialect_hints[stmt.table], True + ) + return dialect_hints, table_text + + # within the realm of "insertmanyvalues sentinel columns", + # these lookups match different kinds of Column() configurations + # to specific backend capabilities. they are broken into two + # lookups, one for autoincrement columns and the other for non + # autoincrement columns + _sentinel_col_non_autoinc_lookup = util.immutabledict( + { + _SentinelDefaultCharacterization.CLIENTSIDE: ( + InsertmanyvaluesSentinelOpts._SUPPORTED_OR_NOT + ), + _SentinelDefaultCharacterization.SENTINEL_DEFAULT: ( + InsertmanyvaluesSentinelOpts._SUPPORTED_OR_NOT + ), + _SentinelDefaultCharacterization.NONE: ( + InsertmanyvaluesSentinelOpts._SUPPORTED_OR_NOT + ), + _SentinelDefaultCharacterization.IDENTITY: ( + InsertmanyvaluesSentinelOpts.IDENTITY + ), + _SentinelDefaultCharacterization.SEQUENCE: ( + InsertmanyvaluesSentinelOpts.SEQUENCE + ), + } + ) + _sentinel_col_autoinc_lookup = _sentinel_col_non_autoinc_lookup.union( + { + _SentinelDefaultCharacterization.NONE: ( + InsertmanyvaluesSentinelOpts.AUTOINCREMENT + ), + } + ) + + def _get_sentinel_column_for_table( + self, table: Table + ) -> Optional[Sequence[Column[Any]]]: + """given a :class:`.Table`, return a usable sentinel column or + columns for this dialect if any. + + Return None if no sentinel columns could be identified, or raise an + error if a column was marked as a sentinel explicitly but isn't + compatible with this dialect. + + """ + + sentinel_opts = self.dialect.insertmanyvalues_implicit_sentinel + sentinel_characteristics = table._sentinel_column_characteristics + + sent_cols = sentinel_characteristics.columns + + if sent_cols is None: + return None + + if sentinel_characteristics.is_autoinc: + bitmask = self._sentinel_col_autoinc_lookup.get( + sentinel_characteristics.default_characterization, 0 + ) + else: + bitmask = self._sentinel_col_non_autoinc_lookup.get( + sentinel_characteristics.default_characterization, 0 + ) + + if sentinel_opts & bitmask: + return sent_cols + + if sentinel_characteristics.is_explicit: + # a column was explicitly marked as insert_sentinel=True, + # however it is not compatible with this dialect. they should + # not indicate this column as a sentinel if they need to include + # this dialect. + + # TODO: do we want non-primary key explicit sentinel cols + # that can gracefully degrade for some backends? + # insert_sentinel="degrade" perhaps. not for the initial release. + # I am hoping people are generally not dealing with this sentinel + # business at all. + + # if is_explicit is True, there will be only one sentinel column. + + raise exc.InvalidRequestError( + f"Column {sent_cols[0]} can't be explicitly " + "marked as a sentinel column when using the " + f"{self.dialect.name} dialect, as the " + "particular type of default generation on this column is " + "not currently compatible with this dialect's specific " + f"INSERT..RETURNING syntax which can receive the " + "server-generated value in " + "a deterministic way. To remove this error, remove " + "insert_sentinel=True from primary key autoincrement " + "columns; these columns are automatically used as " + "sentinels for supported dialects in any case." + ) + + return None + + def _deliver_insertmanyvalues_batches( + self, + statement: str, + parameters: _DBAPIMultiExecuteParams, + compiled_parameters: List[_MutableCoreSingleExecuteParams], + generic_setinputsizes: Optional[_GenericSetInputSizesType], + batch_size: int, + sort_by_parameter_order: bool, + schema_translate_map: Optional[SchemaTranslateMapType], + ) -> Iterator[_InsertManyValuesBatch]: + imv = self._insertmanyvalues + assert imv is not None + + if not imv.sentinel_param_keys: + _sentinel_from_params = None + else: + _sentinel_from_params = operator.itemgetter( + *imv.sentinel_param_keys + ) + + lenparams = len(parameters) + if imv.is_default_expr and not self.dialect.supports_default_metavalue: + # backend doesn't support + # INSERT INTO table (pk_col) VALUES (DEFAULT), (DEFAULT), ... + # at the moment this is basically SQL Server due to + # not being able to use DEFAULT for identity column + # just yield out that many single statements! still + # faster than a whole connection.execute() call ;) + # + # note we still are taking advantage of the fact that we know + # we are using RETURNING. The generalized approach of fetching + # cursor.lastrowid etc. still goes through the more heavyweight + # "ExecutionContext per statement" system as it isn't usable + # as a generic "RETURNING" approach + use_row_at_a_time = True + downgraded = False + elif not self.dialect.supports_multivalues_insert or ( + sort_by_parameter_order + and self._result_columns + and (imv.sentinel_columns is None or imv.includes_upsert_behaviors) + ): + # deterministic order was requested and the compiler could + # not organize sentinel columns for this dialect/statement. + # use row at a time + use_row_at_a_time = True + downgraded = True + else: + use_row_at_a_time = False + downgraded = False + + if use_row_at_a_time: + for batchnum, (param, compiled_param) in enumerate( + cast( + "Sequence[Tuple[_DBAPISingleExecuteParams, _MutableCoreSingleExecuteParams]]", # noqa: E501 + zip(parameters, compiled_parameters), + ), + 1, + ): + yield _InsertManyValuesBatch( + statement, + param, + generic_setinputsizes, + [param], + ( + [_sentinel_from_params(compiled_param)] + if _sentinel_from_params + else [] + ), + 1, + batchnum, + lenparams, + sort_by_parameter_order, + downgraded, + ) + return + + if schema_translate_map: + rst = functools.partial( + self.preparer._render_schema_translates, + schema_translate_map=schema_translate_map, + ) + else: + rst = None + + imv_single_values_expr = imv.single_values_expr + if rst: + imv_single_values_expr = rst(imv_single_values_expr) + + executemany_values = f"({imv_single_values_expr})" + statement = statement.replace(executemany_values, "__EXECMANY_TOKEN__") + + # Use optional insertmanyvalues_max_parameters + # to further shrink the batch size so that there are no more than + # insertmanyvalues_max_parameters params. + # Currently used by SQL Server, which limits statements to 2100 bound + # parameters (actually 2099). + max_params = self.dialect.insertmanyvalues_max_parameters + if max_params: + total_num_of_params = len(self.bind_names) + num_params_per_batch = len(imv.insert_crud_params) + num_params_outside_of_batch = ( + total_num_of_params - num_params_per_batch + ) + batch_size = min( + batch_size, + ( + (max_params - num_params_outside_of_batch) + // num_params_per_batch + ), + ) + + batches = cast("List[Sequence[Any]]", list(parameters)) + compiled_batches = cast( + "List[Sequence[Any]]", list(compiled_parameters) + ) + + processed_setinputsizes: Optional[_GenericSetInputSizesType] = None + batchnum = 1 + total_batches = lenparams // batch_size + ( + 1 if lenparams % batch_size else 0 + ) + + insert_crud_params = imv.insert_crud_params + assert insert_crud_params is not None + + if rst: + insert_crud_params = [ + (col, key, rst(expr), st) + for col, key, expr, st in insert_crud_params + ] + + escaped_bind_names: Mapping[str, str] + expand_pos_lower_index = expand_pos_upper_index = 0 + + if not self.positional: + if self.escaped_bind_names: + escaped_bind_names = self.escaped_bind_names + else: + escaped_bind_names = {} + + all_keys = set(parameters[0]) + + def apply_placeholders(keys, formatted): + for key in keys: + key = escaped_bind_names.get(key, key) + formatted = formatted.replace( + self.bindtemplate % {"name": key}, + self.bindtemplate + % {"name": f"{key}__EXECMANY_INDEX__"}, + ) + return formatted + + if imv.embed_values_counter: + imv_values_counter = ", _IMV_VALUES_COUNTER" + else: + imv_values_counter = "" + formatted_values_clause = f"""({', '.join( + apply_placeholders(bind_keys, formatted) + for _, _, formatted, bind_keys in insert_crud_params + )}{imv_values_counter})""" + + keys_to_replace = all_keys.intersection( + escaped_bind_names.get(key, key) + for _, _, _, bind_keys in insert_crud_params + for key in bind_keys + ) + base_parameters = { + key: parameters[0][key] + for key in all_keys.difference(keys_to_replace) + } + executemany_values_w_comma = "" + else: + formatted_values_clause = "" + keys_to_replace = set() + base_parameters = {} + + if imv.embed_values_counter: + executemany_values_w_comma = ( + f"({imv_single_values_expr}, _IMV_VALUES_COUNTER), " + ) + else: + executemany_values_w_comma = f"({imv_single_values_expr}), " + + all_names_we_will_expand: Set[str] = set() + for elem in imv.insert_crud_params: + all_names_we_will_expand.update(elem[3]) + + # get the start and end position in a particular list + # of parameters where we will be doing the "expanding". + # statements can have params on either side or both sides, + # given RETURNING and CTEs + if all_names_we_will_expand: + positiontup = self.positiontup + assert positiontup is not None + + all_expand_positions = { + idx + for idx, name in enumerate(positiontup) + if name in all_names_we_will_expand + } + expand_pos_lower_index = min(all_expand_positions) + expand_pos_upper_index = max(all_expand_positions) + 1 + assert ( + len(all_expand_positions) + == expand_pos_upper_index - expand_pos_lower_index + ) + + if self._numeric_binds: + escaped = re.escape(self._numeric_binds_identifier_char) + executemany_values_w_comma = re.sub( + rf"{escaped}\d+", "%s", executemany_values_w_comma + ) + + while batches: + batch = batches[0:batch_size] + compiled_batch = compiled_batches[0:batch_size] + + batches[0:batch_size] = [] + compiled_batches[0:batch_size] = [] + + if batches: + current_batch_size = batch_size + else: + current_batch_size = len(batch) + + if generic_setinputsizes: + # if setinputsizes is present, expand this collection to + # suit the batch length as well + # currently this will be mssql+pyodbc for internal dialects + processed_setinputsizes = [ + (new_key, len_, typ) + for new_key, len_, typ in ( + (f"{key}_{index}", len_, typ) + for index in range(current_batch_size) + for key, len_, typ in generic_setinputsizes + ) + ] + + replaced_parameters: Any + if self.positional: + num_ins_params = imv.num_positional_params_counted + + batch_iterator: Iterable[Sequence[Any]] + extra_params_left: Sequence[Any] + extra_params_right: Sequence[Any] + + if num_ins_params == len(batch[0]): + extra_params_left = extra_params_right = () + batch_iterator = batch + else: + extra_params_left = batch[0][:expand_pos_lower_index] + extra_params_right = batch[0][expand_pos_upper_index:] + batch_iterator = ( + b[expand_pos_lower_index:expand_pos_upper_index] + for b in batch + ) + + if imv.embed_values_counter: + expanded_values_string = ( + "".join( + executemany_values_w_comma.replace( + "_IMV_VALUES_COUNTER", str(i) + ) + for i, _ in enumerate(batch) + ) + )[:-2] + else: + expanded_values_string = ( + (executemany_values_w_comma * current_batch_size) + )[:-2] + + if self._numeric_binds and num_ins_params > 0: + # numeric will always number the parameters inside of + # VALUES (and thus order self.positiontup) to be higher + # than non-VALUES parameters, no matter where in the + # statement those non-VALUES parameters appear (this is + # ensured in _process_numeric by numbering first all + # params that are not in _values_bindparam) + # therefore all extra params are always + # on the left side and numbered lower than the VALUES + # parameters + assert not extra_params_right + + start = expand_pos_lower_index + 1 + end = num_ins_params * (current_batch_size) + start + + # need to format here, since statement may contain + # unescaped %, while values_string contains just (%s, %s) + positions = tuple( + f"{self._numeric_binds_identifier_char}{i}" + for i in range(start, end) + ) + expanded_values_string = expanded_values_string % positions + + replaced_statement = statement.replace( + "__EXECMANY_TOKEN__", expanded_values_string + ) + + replaced_parameters = tuple( + itertools.chain.from_iterable(batch_iterator) + ) + + replaced_parameters = ( + extra_params_left + + replaced_parameters + + extra_params_right + ) + + else: + replaced_values_clauses = [] + replaced_parameters = base_parameters.copy() + + for i, param in enumerate(batch): + fmv = formatted_values_clause.replace( + "EXECMANY_INDEX__", str(i) + ) + if imv.embed_values_counter: + fmv = fmv.replace("_IMV_VALUES_COUNTER", str(i)) + + replaced_values_clauses.append(fmv) + replaced_parameters.update( + {f"{key}__{i}": param[key] for key in keys_to_replace} + ) + + replaced_statement = statement.replace( + "__EXECMANY_TOKEN__", + ", ".join(replaced_values_clauses), + ) + + yield _InsertManyValuesBatch( + replaced_statement, + replaced_parameters, + processed_setinputsizes, + batch, + ( + [_sentinel_from_params(cb) for cb in compiled_batch] + if _sentinel_from_params + else [] + ), + current_batch_size, + batchnum, + total_batches, + sort_by_parameter_order, + False, + ) + batchnum += 1 + + def visit_insert( + self, insert_stmt, visited_bindparam=None, visiting_cte=None, **kw + ): + compile_state = insert_stmt._compile_state_factory( + insert_stmt, self, **kw + ) + insert_stmt = compile_state.statement + + if visiting_cte is not None: + kw["visiting_cte"] = visiting_cte + toplevel = False + else: + toplevel = not self.stack + + if toplevel: + self.isinsert = True + if not self.dml_compile_state: + self.dml_compile_state = compile_state + if not self.compile_state: + self.compile_state = compile_state + + self.stack.append( + { + "correlate_froms": set(), + "asfrom_froms": set(), + "selectable": insert_stmt, + } + ) + + counted_bindparam = 0 + + # reset any incoming "visited_bindparam" collection + visited_bindparam = None + + # for positional, insertmanyvalues needs to know how many + # bound parameters are in the VALUES sequence; there's no simple + # rule because default expressions etc. can have zero or more + # params inside them. After multiple attempts to figure this out, + # this very simplistic "count after" works and is + # likely the least amount of callcounts, though looks clumsy + if self.positional and visiting_cte is None: + # if we are inside a CTE, don't count parameters + # here since they wont be for insertmanyvalues. keep + # visited_bindparam at None so no counting happens. + # see #9173 + visited_bindparam = [] + + crud_params_struct = crud._get_crud_params( + self, + insert_stmt, + compile_state, + toplevel, + visited_bindparam=visited_bindparam, + **kw, + ) + + if self.positional and visited_bindparam is not None: + counted_bindparam = len(visited_bindparam) + if self._numeric_binds: + if self._values_bindparam is not None: + self._values_bindparam += visited_bindparam + else: + self._values_bindparam = visited_bindparam + + crud_params_single = crud_params_struct.single_params + + if ( + not crud_params_single + and not self.dialect.supports_default_values + and not self.dialect.supports_default_metavalue + and not self.dialect.supports_empty_insert + ): + raise exc.CompileError( + "The '%s' dialect with current database " + "version settings does not support empty " + "inserts." % self.dialect.name + ) + + if compile_state._has_multi_parameters: + if not self.dialect.supports_multivalues_insert: + raise exc.CompileError( + "The '%s' dialect with current database " + "version settings does not support " + "in-place multirow inserts." % self.dialect.name + ) + elif ( + self.implicit_returning or insert_stmt._returning + ) and insert_stmt._sort_by_parameter_order: + raise exc.CompileError( + "RETURNING cannot be determinstically sorted when " + "using an INSERT which includes multi-row values()." + ) + crud_params_single = crud_params_struct.single_params + else: + crud_params_single = crud_params_struct.single_params + + preparer = self.preparer + supports_default_values = self.dialect.supports_default_values + + text = "INSERT " + + if insert_stmt._prefixes: + text += self._generate_prefixes( + insert_stmt, insert_stmt._prefixes, **kw + ) + + text += "INTO " + table_text = preparer.format_table(insert_stmt.table) + + if insert_stmt._hints: + _, table_text = self._setup_crud_hints(insert_stmt, table_text) + + if insert_stmt._independent_ctes: + self._dispatch_independent_ctes(insert_stmt, kw) + + text += table_text + + if crud_params_single or not supports_default_values: + text += " (%s)" % ", ".join( + [expr for _, expr, _, _ in crud_params_single] + ) + + # look for insertmanyvalues attributes that would have been configured + # by crud.py as it scanned through the columns to be part of the + # INSERT + use_insertmanyvalues = crud_params_struct.use_insertmanyvalues + named_sentinel_params: Optional[Sequence[str]] = None + add_sentinel_cols = None + implicit_sentinel = False + + returning_cols = self.implicit_returning or insert_stmt._returning + if returning_cols: + add_sentinel_cols = crud_params_struct.use_sentinel_columns + if add_sentinel_cols is not None: + assert use_insertmanyvalues + + # search for the sentinel column explicitly present + # in the INSERT columns list, and additionally check that + # this column has a bound parameter name set up that's in the + # parameter list. If both of these cases are present, it means + # we will have a client side value for the sentinel in each + # parameter set. + + _params_by_col = { + col: param_names + for col, _, _, param_names in crud_params_single + } + named_sentinel_params = [] + for _add_sentinel_col in add_sentinel_cols: + if _add_sentinel_col not in _params_by_col: + named_sentinel_params = None + break + param_name = self._within_exec_param_key_getter( + _add_sentinel_col + ) + if param_name not in _params_by_col[_add_sentinel_col]: + named_sentinel_params = None + break + named_sentinel_params.append(param_name) + + if named_sentinel_params is None: + # if we are not going to have a client side value for + # the sentinel in the parameter set, that means it's + # an autoincrement, an IDENTITY, or a server-side SQL + # expression like nextval('seqname'). So this is + # an "implicit" sentinel; we will look for it in + # RETURNING + # only, and then sort on it. For this case on PG, + # SQL Server we have to use a special INSERT form + # that guarantees the server side function lines up with + # the entries in the VALUES. + if ( + self.dialect.insertmanyvalues_implicit_sentinel + & InsertmanyvaluesSentinelOpts.ANY_AUTOINCREMENT + ): + implicit_sentinel = True + else: + # here, we are not using a sentinel at all + # and we are likely the SQLite dialect. + # The first add_sentinel_col that we have should not + # be marked as "insert_sentinel=True". if it was, + # an error should have been raised in + # _get_sentinel_column_for_table. + assert not add_sentinel_cols[0]._insert_sentinel, ( + "sentinel selection rules should have prevented " + "us from getting here for this dialect" + ) + + # always put the sentinel columns last. even if they are + # in the returning list already, they will be there twice + # then. + returning_cols = list(returning_cols) + list(add_sentinel_cols) + + returning_clause = self.returning_clause( + insert_stmt, + returning_cols, + populate_result_map=toplevel, + ) + + if self.returning_precedes_values: + text += " " + returning_clause + + else: + returning_clause = None + + if insert_stmt.select is not None: + # placed here by crud.py + select_text = self.process( + self.stack[-1]["insert_from_select"], insert_into=True, **kw + ) + + if self.ctes and self.dialect.cte_follows_insert: + nesting_level = len(self.stack) if not toplevel else None + text += " %s%s" % ( + self._render_cte_clause( + nesting_level=nesting_level, + include_following_stack=True, + ), + select_text, + ) + else: + text += " %s" % select_text + elif not crud_params_single and supports_default_values: + text += " DEFAULT VALUES" + if use_insertmanyvalues: + self._insertmanyvalues = _InsertManyValues( + True, + self.dialect.default_metavalue_token, + cast( + "List[crud._CrudParamElementStr]", crud_params_single + ), + counted_bindparam, + sort_by_parameter_order=( + insert_stmt._sort_by_parameter_order + ), + includes_upsert_behaviors=( + insert_stmt._post_values_clause is not None + ), + sentinel_columns=add_sentinel_cols, + num_sentinel_columns=( + len(add_sentinel_cols) if add_sentinel_cols else 0 + ), + implicit_sentinel=implicit_sentinel, + ) + elif compile_state._has_multi_parameters: + text += " VALUES %s" % ( + ", ".join( + "(%s)" + % (", ".join(value for _, _, value, _ in crud_param_set)) + for crud_param_set in crud_params_struct.all_multi_params + ), + ) + else: + insert_single_values_expr = ", ".join( + [ + value + for _, _, value, _ in cast( + "List[crud._CrudParamElementStr]", + crud_params_single, + ) + ] + ) + + if use_insertmanyvalues: + if ( + implicit_sentinel + and ( + self.dialect.insertmanyvalues_implicit_sentinel + & InsertmanyvaluesSentinelOpts.USE_INSERT_FROM_SELECT + ) + # this is checking if we have + # INSERT INTO table (id) VALUES (DEFAULT). + and not (crud_params_struct.is_default_metavalue_only) + ): + # if we have a sentinel column that is server generated, + # then for selected backends render the VALUES list as a + # subquery. This is the orderable form supported by + # PostgreSQL and SQL Server. + embed_sentinel_value = True + + render_bind_casts = ( + self.dialect.insertmanyvalues_implicit_sentinel + & InsertmanyvaluesSentinelOpts.RENDER_SELECT_COL_CASTS + ) + + colnames = ", ".join( + f"p{i}" for i, _ in enumerate(crud_params_single) + ) + + if render_bind_casts: + # render casts for the SELECT list. For PG, we are + # already rendering bind casts in the parameter list, + # selectively for the more "tricky" types like ARRAY. + # however, even for the "easy" types, if the parameter + # is NULL for every entry, PG gives up and says + # "it must be TEXT", which fails for other easy types + # like ints. So we cast on this side too. + colnames_w_cast = ", ".join( + self.render_bind_cast( + col.type, + col.type._unwrapped_dialect_impl(self.dialect), + f"p{i}", + ) + for i, (col, *_) in enumerate(crud_params_single) + ) + else: + colnames_w_cast = colnames + + text += ( + f" SELECT {colnames_w_cast} FROM " + f"(VALUES ({insert_single_values_expr})) " + f"AS imp_sen({colnames}, sen_counter) " + "ORDER BY sen_counter" + ) + else: + # otherwise, if no sentinel or backend doesn't support + # orderable subquery form, use a plain VALUES list + embed_sentinel_value = False + text += f" VALUES ({insert_single_values_expr})" + + self._insertmanyvalues = _InsertManyValues( + is_default_expr=False, + single_values_expr=insert_single_values_expr, + insert_crud_params=cast( + "List[crud._CrudParamElementStr]", + crud_params_single, + ), + num_positional_params_counted=counted_bindparam, + sort_by_parameter_order=( + insert_stmt._sort_by_parameter_order + ), + includes_upsert_behaviors=( + insert_stmt._post_values_clause is not None + ), + sentinel_columns=add_sentinel_cols, + num_sentinel_columns=( + len(add_sentinel_cols) if add_sentinel_cols else 0 + ), + sentinel_param_keys=named_sentinel_params, + implicit_sentinel=implicit_sentinel, + embed_values_counter=embed_sentinel_value, + ) + + else: + text += f" VALUES ({insert_single_values_expr})" + + if insert_stmt._post_values_clause is not None: + post_values_clause = self.process( + insert_stmt._post_values_clause, **kw + ) + if post_values_clause: + text += " " + post_values_clause + + if returning_clause and not self.returning_precedes_values: + text += " " + returning_clause + + if self.ctes and not self.dialect.cte_follows_insert: + nesting_level = len(self.stack) if not toplevel else None + text = ( + self._render_cte_clause( + nesting_level=nesting_level, + include_following_stack=True, + ) + + text + ) + + self.stack.pop(-1) + + return text + + def update_limit_clause(self, update_stmt): + """Provide a hook for MySQL to add LIMIT to the UPDATE""" + return None + + def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): + """Provide a hook to override the initial table clause + in an UPDATE statement. + + MySQL overrides this. + + """ + kw["asfrom"] = True + return from_table._compiler_dispatch(self, iscrud=True, **kw) + + def update_from_clause( + self, update_stmt, from_table, extra_froms, from_hints, **kw + ): + """Provide a hook to override the generation of an + UPDATE..FROM clause. + + MySQL and MSSQL override this. + + """ + raise NotImplementedError( + "This backend does not support multiple-table " + "criteria within UPDATE" + ) + + def visit_update(self, update_stmt, visiting_cte=None, **kw): + compile_state = update_stmt._compile_state_factory( + update_stmt, self, **kw + ) + update_stmt = compile_state.statement + + if visiting_cte is not None: + kw["visiting_cte"] = visiting_cte + toplevel = False + else: + toplevel = not self.stack + + if toplevel: + self.isupdate = True + if not self.dml_compile_state: + self.dml_compile_state = compile_state + if not self.compile_state: + self.compile_state = compile_state + + if self.linting & COLLECT_CARTESIAN_PRODUCTS: + from_linter = FromLinter({}, set()) + warn_linting = self.linting & WARN_LINTING + if toplevel: + self.from_linter = from_linter + else: + from_linter = None + warn_linting = False + + extra_froms = compile_state._extra_froms + is_multitable = bool(extra_froms) + + if is_multitable: + # main table might be a JOIN + main_froms = set(_from_objects(update_stmt.table)) + render_extra_froms = [ + f for f in extra_froms if f not in main_froms + ] + correlate_froms = main_froms.union(extra_froms) + else: + render_extra_froms = [] + correlate_froms = {update_stmt.table} + + self.stack.append( + { + "correlate_froms": correlate_froms, + "asfrom_froms": correlate_froms, + "selectable": update_stmt, + } + ) + + text = "UPDATE " + + if update_stmt._prefixes: + text += self._generate_prefixes( + update_stmt, update_stmt._prefixes, **kw + ) + + table_text = self.update_tables_clause( + update_stmt, + update_stmt.table, + render_extra_froms, + from_linter=from_linter, + **kw, + ) + crud_params_struct = crud._get_crud_params( + self, update_stmt, compile_state, toplevel, **kw + ) + crud_params = crud_params_struct.single_params + + if update_stmt._hints: + dialect_hints, table_text = self._setup_crud_hints( + update_stmt, table_text + ) + else: + dialect_hints = None + + if update_stmt._independent_ctes: + self._dispatch_independent_ctes(update_stmt, kw) + + text += table_text + + text += " SET " + text += ", ".join( + expr + "=" + value + for _, expr, value, _ in cast( + "List[Tuple[Any, str, str, Any]]", crud_params + ) + ) + + if self.implicit_returning or update_stmt._returning: + if self.returning_precedes_values: + text += " " + self.returning_clause( + update_stmt, + self.implicit_returning or update_stmt._returning, + populate_result_map=toplevel, + ) + + if extra_froms: + extra_from_text = self.update_from_clause( + update_stmt, + update_stmt.table, + render_extra_froms, + dialect_hints, + from_linter=from_linter, + **kw, + ) + if extra_from_text: + text += " " + extra_from_text + + if update_stmt._where_criteria: + t = self._generate_delimited_and_list( + update_stmt._where_criteria, from_linter=from_linter, **kw + ) + if t: + text += " WHERE " + t + + limit_clause = self.update_limit_clause(update_stmt) + if limit_clause: + text += " " + limit_clause + + if ( + self.implicit_returning or update_stmt._returning + ) and not self.returning_precedes_values: + text += " " + self.returning_clause( + update_stmt, + self.implicit_returning or update_stmt._returning, + populate_result_map=toplevel, + ) + + if self.ctes: + nesting_level = len(self.stack) if not toplevel else None + text = self._render_cte_clause(nesting_level=nesting_level) + text + + if warn_linting: + assert from_linter is not None + from_linter.warn(stmt_type="UPDATE") + + self.stack.pop(-1) + + return text + + def delete_extra_from_clause( + self, update_stmt, from_table, extra_froms, from_hints, **kw + ): + """Provide a hook to override the generation of an + DELETE..FROM clause. + + This can be used to implement DELETE..USING for example. + + MySQL and MSSQL override this. + + """ + raise NotImplementedError( + "This backend does not support multiple-table " + "criteria within DELETE" + ) + + def delete_table_clause(self, delete_stmt, from_table, extra_froms, **kw): + return from_table._compiler_dispatch( + self, asfrom=True, iscrud=True, **kw + ) + + def visit_delete(self, delete_stmt, visiting_cte=None, **kw): + compile_state = delete_stmt._compile_state_factory( + delete_stmt, self, **kw + ) + delete_stmt = compile_state.statement + + if visiting_cte is not None: + kw["visiting_cte"] = visiting_cte + toplevel = False + else: + toplevel = not self.stack + + if toplevel: + self.isdelete = True + if not self.dml_compile_state: + self.dml_compile_state = compile_state + if not self.compile_state: + self.compile_state = compile_state + + if self.linting & COLLECT_CARTESIAN_PRODUCTS: + from_linter = FromLinter({}, set()) + warn_linting = self.linting & WARN_LINTING + if toplevel: + self.from_linter = from_linter + else: + from_linter = None + warn_linting = False + + extra_froms = compile_state._extra_froms + + correlate_froms = {delete_stmt.table}.union(extra_froms) + self.stack.append( + { + "correlate_froms": correlate_froms, + "asfrom_froms": correlate_froms, + "selectable": delete_stmt, + } + ) + + text = "DELETE " + + if delete_stmt._prefixes: + text += self._generate_prefixes( + delete_stmt, delete_stmt._prefixes, **kw + ) + + text += "FROM " + + try: + table_text = self.delete_table_clause( + delete_stmt, + delete_stmt.table, + extra_froms, + from_linter=from_linter, + ) + except TypeError: + # anticipate 3rd party dialects that don't include **kw + # TODO: remove in 2.1 + table_text = self.delete_table_clause( + delete_stmt, delete_stmt.table, extra_froms + ) + if from_linter: + _ = self.process(delete_stmt.table, from_linter=from_linter) + + crud._get_crud_params(self, delete_stmt, compile_state, toplevel, **kw) + + if delete_stmt._hints: + dialect_hints, table_text = self._setup_crud_hints( + delete_stmt, table_text + ) + else: + dialect_hints = None + + if delete_stmt._independent_ctes: + self._dispatch_independent_ctes(delete_stmt, kw) + + text += table_text + + if ( + self.implicit_returning or delete_stmt._returning + ) and self.returning_precedes_values: + text += " " + self.returning_clause( + delete_stmt, + self.implicit_returning or delete_stmt._returning, + populate_result_map=toplevel, + ) + + if extra_froms: + extra_from_text = self.delete_extra_from_clause( + delete_stmt, + delete_stmt.table, + extra_froms, + dialect_hints, + from_linter=from_linter, + **kw, + ) + if extra_from_text: + text += " " + extra_from_text + + if delete_stmt._where_criteria: + t = self._generate_delimited_and_list( + delete_stmt._where_criteria, from_linter=from_linter, **kw + ) + if t: + text += " WHERE " + t + + if ( + self.implicit_returning or delete_stmt._returning + ) and not self.returning_precedes_values: + text += " " + self.returning_clause( + delete_stmt, + self.implicit_returning or delete_stmt._returning, + populate_result_map=toplevel, + ) + + if self.ctes: + nesting_level = len(self.stack) if not toplevel else None + text = self._render_cte_clause(nesting_level=nesting_level) + text + + if warn_linting: + assert from_linter is not None + from_linter.warn(stmt_type="DELETE") + + self.stack.pop(-1) + + return text + + def visit_savepoint(self, savepoint_stmt, **kw): + return "SAVEPOINT %s" % self.preparer.format_savepoint(savepoint_stmt) + + def visit_rollback_to_savepoint(self, savepoint_stmt, **kw): + return "ROLLBACK TO SAVEPOINT %s" % self.preparer.format_savepoint( + savepoint_stmt + ) + + def visit_release_savepoint(self, savepoint_stmt, **kw): + return "RELEASE SAVEPOINT %s" % self.preparer.format_savepoint( + savepoint_stmt + ) + + +class StrSQLCompiler(SQLCompiler): + """A :class:`.SQLCompiler` subclass which allows a small selection + of non-standard SQL features to render into a string value. + + The :class:`.StrSQLCompiler` is invoked whenever a Core expression + element is directly stringified without calling upon the + :meth:`_expression.ClauseElement.compile` method. + It can render a limited set + of non-standard SQL constructs to assist in basic stringification, + however for more substantial custom or dialect-specific SQL constructs, + it will be necessary to make use of + :meth:`_expression.ClauseElement.compile` + directly. + + .. seealso:: + + :ref:`faq_sql_expression_string` + + """ + + def _fallback_column_name(self, column): + return "" + + @util.preload_module("sqlalchemy.engine.url") + def visit_unsupported_compilation(self, element, err, **kw): + if element.stringify_dialect != "default": + url = util.preloaded.engine_url + dialect = url.URL.create(element.stringify_dialect).get_dialect()() + + compiler = dialect.statement_compiler( + dialect, None, _supporting_against=self + ) + if not isinstance(compiler, StrSQLCompiler): + return compiler.process(element, **kw) + + return super().visit_unsupported_compilation(element, err) + + def visit_getitem_binary(self, binary, operator, **kw): + return "%s[%s]" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw), + ) + + def visit_json_getitem_op_binary(self, binary, operator, **kw): + return self.visit_getitem_binary(binary, operator, **kw) + + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): + return self.visit_getitem_binary(binary, operator, **kw) + + def visit_sequence(self, seq, **kw): + return "" % self.preparer.format_sequence(seq) + + def returning_clause( + self, + stmt: UpdateBase, + returning_cols: Sequence[ColumnElement[Any]], + *, + populate_result_map: bool, + **kw: Any, + ) -> str: + columns = [ + self._label_select_column(None, c, True, False, {}) + for c in base._select_iterables(returning_cols) + ] + return "RETURNING " + ", ".join(columns) + + def update_from_clause( + self, update_stmt, from_table, extra_froms, from_hints, **kw + ): + kw["asfrom"] = True + return "FROM " + ", ".join( + t._compiler_dispatch(self, fromhints=from_hints, **kw) + for t in extra_froms + ) + + def delete_extra_from_clause( + self, update_stmt, from_table, extra_froms, from_hints, **kw + ): + kw["asfrom"] = True + return ", " + ", ".join( + t._compiler_dispatch(self, fromhints=from_hints, **kw) + for t in extra_froms + ) + + def visit_empty_set_expr(self, type_, **kw): + return "SELECT 1 WHERE 1!=1" + + def get_from_hint_text(self, table, text): + return "[%s]" % text + + def visit_regexp_match_op_binary(self, binary, operator, **kw): + return self._generate_generic_binary(binary, " ", **kw) + + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): + return self._generate_generic_binary(binary, " ", **kw) + + def visit_regexp_replace_op_binary(self, binary, operator, **kw): + return "(%s, %s)" % ( + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw), + ) + + def visit_try_cast(self, cast, **kwargs): + return "TRY_CAST(%s AS %s)" % ( + cast.clause._compiler_dispatch(self, **kwargs), + cast.typeclause._compiler_dispatch(self, **kwargs), + ) + + +class DDLCompiler(Compiled): + is_ddl = True + + if TYPE_CHECKING: + + def __init__( + self, + dialect: Dialect, + statement: ExecutableDDLElement, + schema_translate_map: Optional[SchemaTranslateMapType] = ..., + render_schema_translate: bool = ..., + compile_kwargs: Mapping[str, Any] = ..., + ): ... + + @util.memoized_property + def sql_compiler(self): + return self.dialect.statement_compiler( + self.dialect, None, schema_translate_map=self.schema_translate_map + ) + + @util.memoized_property + def type_compiler(self): + return self.dialect.type_compiler_instance + + def construct_params( + self, + params: Optional[_CoreSingleExecuteParams] = None, + extracted_parameters: Optional[Sequence[BindParameter[Any]]] = None, + escape_names: bool = True, + ) -> Optional[_MutableCoreSingleExecuteParams]: + return None + + def visit_ddl(self, ddl, **kwargs): + # table events can substitute table and schema name + context = ddl.context + if isinstance(ddl.target, schema.Table): + context = context.copy() + + preparer = self.preparer + path = preparer.format_table_seq(ddl.target) + if len(path) == 1: + table, sch = path[0], "" + else: + table, sch = path[-1], path[0] + + context.setdefault("table", table) + context.setdefault("schema", sch) + context.setdefault("fullname", preparer.format_table(ddl.target)) + + return self.sql_compiler.post_process_text(ddl.statement % context) + + def visit_create_schema(self, create, **kw): + text = "CREATE SCHEMA " + if create.if_not_exists: + text += "IF NOT EXISTS " + return text + self.preparer.format_schema(create.element) + + def visit_drop_schema(self, drop, **kw): + text = "DROP SCHEMA " + if drop.if_exists: + text += "IF EXISTS " + text += self.preparer.format_schema(drop.element) + if drop.cascade: + text += " CASCADE" + return text + + def visit_create_table(self, create, **kw): + table = create.element + preparer = self.preparer + + text = "\nCREATE " + if table._prefixes: + text += " ".join(table._prefixes) + " " + + text += "TABLE " + if create.if_not_exists: + text += "IF NOT EXISTS " + + text += preparer.format_table(table) + " " + + create_table_suffix = self.create_table_suffix(table) + if create_table_suffix: + text += create_table_suffix + " " + + text += "(" + + separator = "\n" + + # if only one primary key, specify it along with the column + first_pk = False + for create_column in create.columns: + column = create_column.element + try: + processed = self.process( + create_column, first_pk=column.primary_key and not first_pk + ) + if processed is not None: + text += separator + separator = ", \n" + text += "\t" + processed + if column.primary_key: + first_pk = True + except exc.CompileError as ce: + raise exc.CompileError( + "(in table '%s', column '%s'): %s" + % (table.description, column.name, ce.args[0]) + ) from ce + + const = self.create_table_constraints( + table, + _include_foreign_key_constraints=create.include_foreign_key_constraints, # noqa + ) + if const: + text += separator + "\t" + const + + text += "\n)%s\n\n" % self.post_create_table(table) + return text + + def visit_create_column(self, create, first_pk=False, **kw): + column = create.element + + if column.system: + return None + + text = self.get_column_specification(column, first_pk=first_pk) + const = " ".join( + self.process(constraint) for constraint in column.constraints + ) + if const: + text += " " + const + + return text + + def create_table_constraints( + self, table, _include_foreign_key_constraints=None, **kw + ): + # On some DB order is significant: visit PK first, then the + # other constraints (engine.ReflectionTest.testbasic failed on FB2) + constraints = [] + if table.primary_key: + constraints.append(table.primary_key) + + all_fkcs = table.foreign_key_constraints + if _include_foreign_key_constraints is not None: + omit_fkcs = all_fkcs.difference(_include_foreign_key_constraints) + else: + omit_fkcs = set() + + constraints.extend( + [ + c + for c in table._sorted_constraints + if c is not table.primary_key and c not in omit_fkcs + ] + ) + + return ", \n\t".join( + p + for p in ( + self.process(constraint) + for constraint in constraints + if (constraint._should_create_for_compiler(self)) + and ( + not self.dialect.supports_alter + or not getattr(constraint, "use_alter", False) + ) + ) + if p is not None + ) + + def visit_drop_table(self, drop, **kw): + text = "\nDROP TABLE " + if drop.if_exists: + text += "IF EXISTS " + return text + self.preparer.format_table(drop.element) + + def visit_drop_view(self, drop, **kw): + return "\nDROP VIEW " + self.preparer.format_table(drop.element) + + def _verify_index_table(self, index): + if index.table is None: + raise exc.CompileError( + "Index '%s' is not associated with any table." % index.name + ) + + def visit_create_index( + self, create, include_schema=False, include_table_schema=True, **kw + ): + index = create.element + self._verify_index_table(index) + preparer = self.preparer + text = "CREATE " + if index.unique: + text += "UNIQUE " + if index.name is None: + raise exc.CompileError( + "CREATE INDEX requires that the index have a name" + ) + + text += "INDEX " + if create.if_not_exists: + text += "IF NOT EXISTS " + + text += "%s ON %s (%s)" % ( + self._prepared_index_name(index, include_schema=include_schema), + preparer.format_table( + index.table, use_schema=include_table_schema + ), + ", ".join( + self.sql_compiler.process( + expr, include_table=False, literal_binds=True + ) + for expr in index.expressions + ), + ) + return text + + def visit_drop_index(self, drop, **kw): + index = drop.element + + if index.name is None: + raise exc.CompileError( + "DROP INDEX requires that the index have a name" + ) + text = "\nDROP INDEX " + if drop.if_exists: + text += "IF EXISTS " + + return text + self._prepared_index_name(index, include_schema=True) + + def _prepared_index_name(self, index, include_schema=False): + if index.table is not None: + effective_schema = self.preparer.schema_for_object(index.table) + else: + effective_schema = None + if include_schema and effective_schema: + schema_name = self.preparer.quote_schema(effective_schema) + else: + schema_name = None + + index_name = self.preparer.format_index(index) + + if schema_name: + index_name = schema_name + "." + index_name + return index_name + + def visit_add_constraint(self, create, **kw): + return "ALTER TABLE %s ADD %s" % ( + self.preparer.format_table(create.element.table), + self.process(create.element), + ) + + def visit_set_table_comment(self, create, **kw): + return "COMMENT ON TABLE %s IS %s" % ( + self.preparer.format_table(create.element), + self.sql_compiler.render_literal_value( + create.element.comment, sqltypes.String() + ), + ) + + def visit_drop_table_comment(self, drop, **kw): + return "COMMENT ON TABLE %s IS NULL" % self.preparer.format_table( + drop.element + ) + + def visit_set_column_comment(self, create, **kw): + return "COMMENT ON COLUMN %s IS %s" % ( + self.preparer.format_column( + create.element, use_table=True, use_schema=True + ), + self.sql_compiler.render_literal_value( + create.element.comment, sqltypes.String() + ), + ) + + def visit_drop_column_comment(self, drop, **kw): + return "COMMENT ON COLUMN %s IS NULL" % self.preparer.format_column( + drop.element, use_table=True + ) + + def visit_set_constraint_comment(self, create, **kw): + raise exc.UnsupportedCompilationError(self, type(create)) + + def visit_drop_constraint_comment(self, drop, **kw): + raise exc.UnsupportedCompilationError(self, type(drop)) + + def get_identity_options(self, identity_options): + text = [] + if identity_options.increment is not None: + text.append("INCREMENT BY %d" % identity_options.increment) + if identity_options.start is not None: + text.append("START WITH %d" % identity_options.start) + if identity_options.minvalue is not None: + text.append("MINVALUE %d" % identity_options.minvalue) + if identity_options.maxvalue is not None: + text.append("MAXVALUE %d" % identity_options.maxvalue) + if identity_options.nominvalue is not None: + text.append("NO MINVALUE") + if identity_options.nomaxvalue is not None: + text.append("NO MAXVALUE") + if identity_options.cache is not None: + text.append("CACHE %d" % identity_options.cache) + if identity_options.cycle is not None: + text.append("CYCLE" if identity_options.cycle else "NO CYCLE") + return " ".join(text) + + def visit_create_sequence(self, create, prefix=None, **kw): + text = "CREATE SEQUENCE " + if create.if_not_exists: + text += "IF NOT EXISTS " + text += self.preparer.format_sequence(create.element) + + if prefix: + text += prefix + options = self.get_identity_options(create.element) + if options: + text += " " + options + return text + + def visit_drop_sequence(self, drop, **kw): + text = "DROP SEQUENCE " + if drop.if_exists: + text += "IF EXISTS " + return text + self.preparer.format_sequence(drop.element) + + def visit_drop_constraint(self, drop, **kw): + constraint = drop.element + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + else: + formatted_name = None + + if formatted_name is None: + raise exc.CompileError( + "Can't emit DROP CONSTRAINT for constraint %r; " + "it has no name" % drop.element + ) + return "ALTER TABLE %s DROP CONSTRAINT %s%s%s" % ( + self.preparer.format_table(drop.element.table), + "IF EXISTS " if drop.if_exists else "", + formatted_name, + " CASCADE" if drop.cascade else "", + ) + + def get_column_specification(self, column, **kwargs): + colspec = ( + self.preparer.format_column(column) + + " " + + self.dialect.type_compiler_instance.process( + column.type, type_expression=column + ) + ) + default = self.get_column_default_string(column) + if default is not None: + colspec += " DEFAULT " + default + + if column.computed is not None: + colspec += " " + self.process(column.computed) + + if ( + column.identity is not None + and self.dialect.supports_identity_columns + ): + colspec += " " + self.process(column.identity) + + if not column.nullable and ( + not column.identity or not self.dialect.supports_identity_columns + ): + colspec += " NOT NULL" + return colspec + + def create_table_suffix(self, table): + return "" + + def post_create_table(self, table): + return "" + + def get_column_default_string(self, column): + if isinstance(column.server_default, schema.DefaultClause): + return self.render_default_string(column.server_default.arg) + else: + return None + + def render_default_string(self, default): + if isinstance(default, str): + return self.sql_compiler.render_literal_value( + default, sqltypes.STRINGTYPE + ) + else: + return self.sql_compiler.process(default, literal_binds=True) + + def visit_table_or_column_check_constraint(self, constraint, **kw): + if constraint.is_column_level: + return self.visit_column_check_constraint(constraint) + else: + return self.visit_check_constraint(constraint) + + def visit_check_constraint(self, constraint, **kw): + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + text += "CHECK (%s)" % self.sql_compiler.process( + constraint.sqltext, include_table=False, literal_binds=True + ) + text += self.define_constraint_deferrability(constraint) + return text + + def visit_column_check_constraint(self, constraint, **kw): + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + text += "CHECK (%s)" % self.sql_compiler.process( + constraint.sqltext, include_table=False, literal_binds=True + ) + text += self.define_constraint_deferrability(constraint) + return text + + def visit_primary_key_constraint(self, constraint, **kw): + if len(constraint) == 0: + return "" + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + text += "PRIMARY KEY " + text += "(%s)" % ", ".join( + self.preparer.quote(c.name) + for c in ( + constraint.columns_autoinc_first + if constraint._implicit_generated + else constraint.columns + ) + ) + text += self.define_constraint_deferrability(constraint) + return text + + def visit_foreign_key_constraint(self, constraint, **kw): + preparer = self.preparer + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + remote_table = list(constraint.elements)[0].column.table + text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % ( + ", ".join( + preparer.quote(f.parent.name) for f in constraint.elements + ), + self.define_constraint_remote_table( + constraint, remote_table, preparer + ), + ", ".join( + preparer.quote(f.column.name) for f in constraint.elements + ), + ) + text += self.define_constraint_match(constraint) + text += self.define_constraint_cascades(constraint) + text += self.define_constraint_deferrability(constraint) + return text + + def define_constraint_remote_table(self, constraint, table, preparer): + """Format the remote table clause of a CREATE CONSTRAINT clause.""" + + return preparer.format_table(table) + + def visit_unique_constraint(self, constraint, **kw): + if len(constraint) == 0: + return "" + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + text += "UNIQUE %s(%s)" % ( + self.define_unique_constraint_distinct(constraint, **kw), + ", ".join(self.preparer.quote(c.name) for c in constraint), + ) + text += self.define_constraint_deferrability(constraint) + return text + + def define_unique_constraint_distinct(self, constraint, **kw): + return "" + + def define_constraint_cascades(self, constraint): + text = "" + if constraint.ondelete is not None: + text += " ON DELETE %s" % self.preparer.validate_sql_phrase( + constraint.ondelete, FK_ON_DELETE + ) + if constraint.onupdate is not None: + text += " ON UPDATE %s" % self.preparer.validate_sql_phrase( + constraint.onupdate, FK_ON_UPDATE + ) + return text + + def define_constraint_deferrability(self, constraint): + text = "" + if constraint.deferrable is not None: + if constraint.deferrable: + text += " DEFERRABLE" + else: + text += " NOT DEFERRABLE" + if constraint.initially is not None: + text += " INITIALLY %s" % self.preparer.validate_sql_phrase( + constraint.initially, FK_INITIALLY + ) + return text + + def define_constraint_match(self, constraint): + text = "" + if constraint.match is not None: + text += " MATCH %s" % constraint.match + return text + + def visit_computed_column(self, generated, **kw): + text = "GENERATED ALWAYS AS (%s)" % self.sql_compiler.process( + generated.sqltext, include_table=False, literal_binds=True + ) + if generated.persisted is True: + text += " STORED" + elif generated.persisted is False: + text += " VIRTUAL" + return text + + def visit_identity_column(self, identity, **kw): + text = "GENERATED %s AS IDENTITY" % ( + "ALWAYS" if identity.always else "BY DEFAULT", + ) + options = self.get_identity_options(identity) + if options: + text += " (%s)" % options + return text + + +class GenericTypeCompiler(TypeCompiler): + def visit_FLOAT(self, type_, **kw): + return "FLOAT" + + def visit_DOUBLE(self, type_, **kw): + return "DOUBLE" + + def visit_DOUBLE_PRECISION(self, type_, **kw): + return "DOUBLE PRECISION" + + def visit_REAL(self, type_, **kw): + return "REAL" + + def visit_NUMERIC(self, type_, **kw): + if type_.precision is None: + return "NUMERIC" + elif type_.scale is None: + return "NUMERIC(%(precision)s)" % {"precision": type_.precision} + else: + return "NUMERIC(%(precision)s, %(scale)s)" % { + "precision": type_.precision, + "scale": type_.scale, + } + + def visit_DECIMAL(self, type_, **kw): + if type_.precision is None: + return "DECIMAL" + elif type_.scale is None: + return "DECIMAL(%(precision)s)" % {"precision": type_.precision} + else: + return "DECIMAL(%(precision)s, %(scale)s)" % { + "precision": type_.precision, + "scale": type_.scale, + } + + def visit_INTEGER(self, type_, **kw): + return "INTEGER" + + def visit_SMALLINT(self, type_, **kw): + return "SMALLINT" + + def visit_BIGINT(self, type_, **kw): + return "BIGINT" + + def visit_TIMESTAMP(self, type_, **kw): + return "TIMESTAMP" + + def visit_DATETIME(self, type_, **kw): + return "DATETIME" + + def visit_DATE(self, type_, **kw): + return "DATE" + + def visit_TIME(self, type_, **kw): + return "TIME" + + def visit_CLOB(self, type_, **kw): + return "CLOB" + + def visit_NCLOB(self, type_, **kw): + return "NCLOB" + + def _render_string_type(self, type_, name, length_override=None): + text = name + if length_override: + text += "(%d)" % length_override + elif type_.length: + text += "(%d)" % type_.length + if type_.collation: + text += ' COLLATE "%s"' % type_.collation + return text + + def visit_CHAR(self, type_, **kw): + return self._render_string_type(type_, "CHAR") + + def visit_NCHAR(self, type_, **kw): + return self._render_string_type(type_, "NCHAR") + + def visit_VARCHAR(self, type_, **kw): + return self._render_string_type(type_, "VARCHAR") + + def visit_NVARCHAR(self, type_, **kw): + return self._render_string_type(type_, "NVARCHAR") + + def visit_TEXT(self, type_, **kw): + return self._render_string_type(type_, "TEXT") + + def visit_UUID(self, type_, **kw): + return "UUID" + + def visit_BLOB(self, type_, **kw): + return "BLOB" + + def visit_BINARY(self, type_, **kw): + return "BINARY" + (type_.length and "(%d)" % type_.length or "") + + def visit_VARBINARY(self, type_, **kw): + return "VARBINARY" + (type_.length and "(%d)" % type_.length or "") + + def visit_BOOLEAN(self, type_, **kw): + return "BOOLEAN" + + def visit_uuid(self, type_, **kw): + if not type_.native_uuid or not self.dialect.supports_native_uuid: + return self._render_string_type(type_, "CHAR", length_override=32) + else: + return self.visit_UUID(type_, **kw) + + def visit_large_binary(self, type_, **kw): + return self.visit_BLOB(type_, **kw) + + def visit_boolean(self, type_, **kw): + return self.visit_BOOLEAN(type_, **kw) + + def visit_time(self, type_, **kw): + return self.visit_TIME(type_, **kw) + + def visit_datetime(self, type_, **kw): + return self.visit_DATETIME(type_, **kw) + + def visit_date(self, type_, **kw): + return self.visit_DATE(type_, **kw) + + def visit_big_integer(self, type_, **kw): + return self.visit_BIGINT(type_, **kw) + + def visit_small_integer(self, type_, **kw): + return self.visit_SMALLINT(type_, **kw) + + def visit_integer(self, type_, **kw): + return self.visit_INTEGER(type_, **kw) + + def visit_real(self, type_, **kw): + return self.visit_REAL(type_, **kw) + + def visit_float(self, type_, **kw): + return self.visit_FLOAT(type_, **kw) + + def visit_double(self, type_, **kw): + return self.visit_DOUBLE(type_, **kw) + + def visit_numeric(self, type_, **kw): + return self.visit_NUMERIC(type_, **kw) + + def visit_string(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) + + def visit_unicode(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) + + def visit_text(self, type_, **kw): + return self.visit_TEXT(type_, **kw) + + def visit_unicode_text(self, type_, **kw): + return self.visit_TEXT(type_, **kw) + + def visit_enum(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) + + def visit_null(self, type_, **kw): + raise exc.CompileError( + "Can't generate DDL for %r; " + "did you forget to specify a " + "type on this Column?" % type_ + ) + + def visit_type_decorator(self, type_, **kw): + return self.process(type_.type_engine(self.dialect), **kw) + + def visit_user_defined(self, type_, **kw): + return type_.get_col_spec(**kw) + + +class StrSQLTypeCompiler(GenericTypeCompiler): + def process(self, type_, **kw): + try: + _compiler_dispatch = type_._compiler_dispatch + except AttributeError: + return self._visit_unknown(type_, **kw) + else: + return _compiler_dispatch(self, **kw) + + def __getattr__(self, key): + if key.startswith("visit_"): + return self._visit_unknown + else: + raise AttributeError(key) + + def _visit_unknown(self, type_, **kw): + if type_.__class__.__name__ == type_.__class__.__name__.upper(): + return type_.__class__.__name__ + else: + return repr(type_) + + def visit_null(self, type_, **kw): + return "NULL" + + def visit_user_defined(self, type_, **kw): + try: + get_col_spec = type_.get_col_spec + except AttributeError: + return repr(type_) + else: + return get_col_spec(**kw) + + +class _SchemaForObjectCallable(Protocol): + def __call__(self, obj: Any) -> str: ... + + +class _BindNameForColProtocol(Protocol): + def __call__(self, col: ColumnClause[Any]) -> str: ... + + +class IdentifierPreparer: + """Handle quoting and case-folding of identifiers based on options.""" + + reserved_words = RESERVED_WORDS + + legal_characters = LEGAL_CHARACTERS + + illegal_initial_characters = ILLEGAL_INITIAL_CHARACTERS + + initial_quote: str + + final_quote: str + + _strings: MutableMapping[str, str] + + schema_for_object: _SchemaForObjectCallable = operator.attrgetter("schema") + """Return the .schema attribute for an object. + + For the default IdentifierPreparer, the schema for an object is always + the value of the ".schema" attribute. if the preparer is replaced + with one that has a non-empty schema_translate_map, the value of the + ".schema" attribute is rendered a symbol that will be converted to a + real schema name from the mapping post-compile. + + """ + + _includes_none_schema_translate: bool = False + + def __init__( + self, + dialect, + initial_quote='"', + final_quote=None, + escape_quote='"', + quote_case_sensitive_collations=True, + omit_schema=False, + ): + """Construct a new ``IdentifierPreparer`` object. + + initial_quote + Character that begins a delimited identifier. + + final_quote + Character that ends a delimited identifier. Defaults to + `initial_quote`. + + omit_schema + Prevent prepending schema name. Useful for databases that do + not support schemae. + """ + + self.dialect = dialect + self.initial_quote = initial_quote + self.final_quote = final_quote or self.initial_quote + self.escape_quote = escape_quote + self.escape_to_quote = self.escape_quote * 2 + self.omit_schema = omit_schema + self.quote_case_sensitive_collations = quote_case_sensitive_collations + self._strings = {} + self._double_percents = self.dialect.paramstyle in ( + "format", + "pyformat", + ) + + def _with_schema_translate(self, schema_translate_map): + prep = self.__class__.__new__(self.__class__) + prep.__dict__.update(self.__dict__) + + includes_none = None in schema_translate_map + + def symbol_getter(obj): + name = obj.schema + if obj._use_schema_map and (name is not None or includes_none): + if name is not None and ("[" in name or "]" in name): + raise exc.CompileError( + "Square bracket characters ([]) not supported " + "in schema translate name '%s'" % name + ) + return quoted_name( + "__[SCHEMA_%s]" % (name or "_none"), quote=False + ) + else: + return obj.schema + + prep.schema_for_object = symbol_getter + prep._includes_none_schema_translate = includes_none + return prep + + def _render_schema_translates(self, statement, schema_translate_map): + d = schema_translate_map + if None in d: + if not self._includes_none_schema_translate: + raise exc.InvalidRequestError( + "schema translate map which previously did not have " + "`None` present as a key now has `None` present; compiled " + "statement may lack adequate placeholders. Please use " + "consistent keys in successive " + "schema_translate_map dictionaries." + ) + + d["_none"] = d[None] + + def replace(m): + name = m.group(2) + if name in d: + effective_schema = d[name] + else: + if name in (None, "_none"): + raise exc.InvalidRequestError( + "schema translate map which previously had `None` " + "present as a key now no longer has it present; don't " + "know how to apply schema for compiled statement. " + "Please use consistent keys in successive " + "schema_translate_map dictionaries." + ) + effective_schema = name + + if not effective_schema: + effective_schema = self.dialect.default_schema_name + if not effective_schema: + # TODO: no coverage here + raise exc.CompileError( + "Dialect has no default schema name; can't " + "use None as dynamic schema target." + ) + return self.quote_schema(effective_schema) + + return re.sub(r"(__\[SCHEMA_([^\]]+)\])", replace, statement) + + def _escape_identifier(self, value: str) -> str: + """Escape an identifier. + + Subclasses should override this to provide database-dependent + escaping behavior. + """ + + value = value.replace(self.escape_quote, self.escape_to_quote) + if self._double_percents: + value = value.replace("%", "%%") + return value + + def _unescape_identifier(self, value: str) -> str: + """Canonicalize an escaped identifier. + + Subclasses should override this to provide database-dependent + unescaping behavior that reverses _escape_identifier. + """ + + return value.replace(self.escape_to_quote, self.escape_quote) + + def validate_sql_phrase(self, element, reg): + """keyword sequence filter. + + a filter for elements that are intended to represent keyword sequences, + such as "INITIALLY", "INITIALLY DEFERRED", etc. no special characters + should be present. + + .. versionadded:: 1.3 + + """ + + if element is not None and not reg.match(element): + raise exc.CompileError( + "Unexpected SQL phrase: %r (matching against %r)" + % (element, reg.pattern) + ) + return element + + def quote_identifier(self, value: str) -> str: + """Quote an identifier. + + Subclasses should override this to provide database-dependent + quoting behavior. + """ + + return ( + self.initial_quote + + self._escape_identifier(value) + + self.final_quote + ) + + def _requires_quotes(self, value: str) -> bool: + """Return True if the given identifier requires quoting.""" + lc_value = value.lower() + return ( + lc_value in self.reserved_words + or value[0] in self.illegal_initial_characters + or not self.legal_characters.match(str(value)) + or (lc_value != value) + ) + + def _requires_quotes_illegal_chars(self, value): + """Return True if the given identifier requires quoting, but + not taking case convention into account.""" + return not self.legal_characters.match(str(value)) + + def quote_schema(self, schema: str, force: Any = None) -> str: + """Conditionally quote a schema name. + + + The name is quoted if it is a reserved word, contains quote-necessary + characters, or is an instance of :class:`.quoted_name` which includes + ``quote`` set to ``True``. + + Subclasses can override this to provide database-dependent + quoting behavior for schema names. + + :param schema: string schema name + :param force: unused + + .. deprecated:: 0.9 + + The :paramref:`.IdentifierPreparer.quote_schema.force` + parameter is deprecated and will be removed in a future + release. This flag has no effect on the behavior of the + :meth:`.IdentifierPreparer.quote` method; please refer to + :class:`.quoted_name`. + + """ + if force is not None: + # not using the util.deprecated_params() decorator in this + # case because of the additional function call overhead on this + # very performance-critical spot. + util.warn_deprecated( + "The IdentifierPreparer.quote_schema.force parameter is " + "deprecated and will be removed in a future release. This " + "flag has no effect on the behavior of the " + "IdentifierPreparer.quote method; please refer to " + "quoted_name().", + # deprecated 0.9. warning from 1.3 + version="0.9", + ) + + return self.quote(schema) + + def quote(self, ident: str, force: Any = None) -> str: + """Conditionally quote an identifier. + + The identifier is quoted if it is a reserved word, contains + quote-necessary characters, or is an instance of + :class:`.quoted_name` which includes ``quote`` set to ``True``. + + Subclasses can override this to provide database-dependent + quoting behavior for identifier names. + + :param ident: string identifier + :param force: unused + + .. deprecated:: 0.9 + + The :paramref:`.IdentifierPreparer.quote.force` + parameter is deprecated and will be removed in a future + release. This flag has no effect on the behavior of the + :meth:`.IdentifierPreparer.quote` method; please refer to + :class:`.quoted_name`. + + """ + if force is not None: + # not using the util.deprecated_params() decorator in this + # case because of the additional function call overhead on this + # very performance-critical spot. + util.warn_deprecated( + "The IdentifierPreparer.quote.force parameter is " + "deprecated and will be removed in a future release. This " + "flag has no effect on the behavior of the " + "IdentifierPreparer.quote method; please refer to " + "quoted_name().", + # deprecated 0.9. warning from 1.3 + version="0.9", + ) + + force = getattr(ident, "quote", None) + + if force is None: + if ident in self._strings: + return self._strings[ident] + else: + if self._requires_quotes(ident): + self._strings[ident] = self.quote_identifier(ident) + else: + self._strings[ident] = ident + return self._strings[ident] + elif force: + return self.quote_identifier(ident) + else: + return ident + + def format_collation(self, collation_name): + if self.quote_case_sensitive_collations: + return self.quote(collation_name) + else: + return collation_name + + def format_sequence(self, sequence, use_schema=True): + name = self.quote(sequence.name) + + effective_schema = self.schema_for_object(sequence) + + if ( + not self.omit_schema + and use_schema + and effective_schema is not None + ): + name = self.quote_schema(effective_schema) + "." + name + return name + + def format_label( + self, label: Label[Any], name: Optional[str] = None + ) -> str: + return self.quote(name or label.name) + + def format_alias( + self, alias: Optional[AliasedReturnsRows], name: Optional[str] = None + ) -> str: + if name is None: + assert alias is not None + return self.quote(alias.name) + else: + return self.quote(name) + + def format_savepoint(self, savepoint, name=None): + # Running the savepoint name through quoting is unnecessary + # for all known dialects. This is here to support potential + # third party use cases + ident = name or savepoint.ident + if self._requires_quotes(ident): + ident = self.quote_identifier(ident) + return ident + + @util.preload_module("sqlalchemy.sql.naming") + def format_constraint(self, constraint, _alembic_quote=True): + naming = util.preloaded.sql_naming + + if constraint.name is _NONE_NAME: + name = naming._constraint_name_for_table( + constraint, constraint.table + ) + + if name is None: + return None + else: + name = constraint.name + + if constraint.__visit_name__ == "index": + return self.truncate_and_render_index_name( + name, _alembic_quote=_alembic_quote + ) + else: + return self.truncate_and_render_constraint_name( + name, _alembic_quote=_alembic_quote + ) + + def truncate_and_render_index_name(self, name, _alembic_quote=True): + # calculate these at format time so that ad-hoc changes + # to dialect.max_identifier_length etc. can be reflected + # as IdentifierPreparer is long lived + max_ = ( + self.dialect.max_index_name_length + or self.dialect.max_identifier_length + ) + return self._truncate_and_render_maxlen_name( + name, max_, _alembic_quote + ) + + def truncate_and_render_constraint_name(self, name, _alembic_quote=True): + # calculate these at format time so that ad-hoc changes + # to dialect.max_identifier_length etc. can be reflected + # as IdentifierPreparer is long lived + max_ = ( + self.dialect.max_constraint_name_length + or self.dialect.max_identifier_length + ) + return self._truncate_and_render_maxlen_name( + name, max_, _alembic_quote + ) + + def _truncate_and_render_maxlen_name(self, name, max_, _alembic_quote): + if isinstance(name, elements._truncated_label): + if len(name) > max_: + name = name[0 : max_ - 8] + "_" + util.md5_hex(name)[-4:] + else: + self.dialect.validate_identifier(name) + + if not _alembic_quote: + return name + else: + return self.quote(name) + + def format_index(self, index): + return self.format_constraint(index) + + def format_table(self, table, use_schema=True, name=None): + """Prepare a quoted table and schema name.""" + + if name is None: + name = table.name + + result = self.quote(name) + + effective_schema = self.schema_for_object(table) + + if not self.omit_schema and use_schema and effective_schema: + result = self.quote_schema(effective_schema) + "." + result + return result + + def format_schema(self, name): + """Prepare a quoted schema name.""" + + return self.quote(name) + + def format_label_name( + self, + name, + anon_map=None, + ): + """Prepare a quoted column name.""" + + if anon_map is not None and isinstance( + name, elements._truncated_label + ): + name = name.apply_map(anon_map) + + return self.quote(name) + + def format_column( + self, + column, + use_table=False, + name=None, + table_name=None, + use_schema=False, + anon_map=None, + ): + """Prepare a quoted column name.""" + + if name is None: + name = column.name + + if anon_map is not None and isinstance( + name, elements._truncated_label + ): + name = name.apply_map(anon_map) + + if not getattr(column, "is_literal", False): + if use_table: + return ( + self.format_table( + column.table, use_schema=use_schema, name=table_name + ) + + "." + + self.quote(name) + ) + else: + return self.quote(name) + else: + # literal textual elements get stuck into ColumnClause a lot, + # which shouldn't get quoted + + if use_table: + return ( + self.format_table( + column.table, use_schema=use_schema, name=table_name + ) + + "." + + name + ) + else: + return name + + def format_table_seq(self, table, use_schema=True): + """Format table name and schema as a tuple.""" + + # Dialects with more levels in their fully qualified references + # ('database', 'owner', etc.) could override this and return + # a longer sequence. + + effective_schema = self.schema_for_object(table) + + if not self.omit_schema and use_schema and effective_schema: + return ( + self.quote_schema(effective_schema), + self.format_table(table, use_schema=False), + ) + else: + return (self.format_table(table, use_schema=False),) + + @util.memoized_property + def _r_identifiers(self): + initial, final, escaped_final = ( + re.escape(s) + for s in ( + self.initial_quote, + self.final_quote, + self._escape_identifier(self.final_quote), + ) + ) + r = re.compile( + r"(?:" + r"(?:%(initial)s((?:%(escaped)s|[^%(final)s])+)%(final)s" + r"|([^\.]+))(?=\.|$))+" + % {"initial": initial, "final": final, "escaped": escaped_final} + ) + return r + + def unformat_identifiers(self, identifiers): + """Unpack 'schema.table.column'-like strings into components.""" + + r = self._r_identifiers + return [ + self._unescape_identifier(i) + for i in [a or b for a, b in r.findall(identifiers)] + ] diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/crud.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/crud.py new file mode 100644 index 0000000..499a19d --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/crud.py @@ -0,0 +1,1669 @@ +# sql/crud.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Functions used by compiler.py to determine the parameters rendered +within INSERT and UPDATE statements. + +""" +from __future__ import annotations + +import functools +import operator +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Iterable +from typing import List +from typing import MutableMapping +from typing import NamedTuple +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from . import coercions +from . import dml +from . import elements +from . import roles +from .base import _DefaultDescriptionTuple +from .dml import isinsert as _compile_state_isinsert +from .elements import ColumnClause +from .schema import default_is_clause_element +from .schema import default_is_sequence +from .selectable import Select +from .selectable import TableClause +from .. import exc +from .. import util +from ..util.typing import Literal + +if TYPE_CHECKING: + from .compiler import _BindNameForColProtocol + from .compiler import SQLCompiler + from .dml import _DMLColumnElement + from .dml import DMLState + from .dml import ValuesBase + from .elements import ColumnElement + from .elements import KeyedColumnElement + from .schema import _SQLExprDefault + from .schema import Column + +REQUIRED = util.symbol( + "REQUIRED", + """ +Placeholder for the value within a :class:`.BindParameter` +which is required to be present when the statement is passed +to :meth:`_engine.Connection.execute`. + +This symbol is typically used when a :func:`_expression.insert` +or :func:`_expression.update` statement is compiled without parameter +values present. + +""", +) + + +def _as_dml_column(c: ColumnElement[Any]) -> ColumnClause[Any]: + if not isinstance(c, ColumnClause): + raise exc.CompileError( + f"Can't create DML statement against column expression {c!r}" + ) + return c + + +_CrudParamElement = Tuple[ + "ColumnElement[Any]", + str, # column name + Optional[ + Union[str, "_SQLExprDefault"] + ], # bound parameter string or SQL expression to apply + Iterable[str], +] +_CrudParamElementStr = Tuple[ + "KeyedColumnElement[Any]", + str, # column name + str, # bound parameter string + Iterable[str], +] +_CrudParamElementSQLExpr = Tuple[ + "ColumnClause[Any]", + str, + "_SQLExprDefault", # SQL expression to apply + Iterable[str], +] + +_CrudParamSequence = List[_CrudParamElement] + + +class _CrudParams(NamedTuple): + single_params: _CrudParamSequence + all_multi_params: List[Sequence[_CrudParamElementStr]] + is_default_metavalue_only: bool = False + use_insertmanyvalues: bool = False + use_sentinel_columns: Optional[Sequence[Column[Any]]] = None + + +def _get_crud_params( + compiler: SQLCompiler, + stmt: ValuesBase, + compile_state: DMLState, + toplevel: bool, + **kw: Any, +) -> _CrudParams: + """create a set of tuples representing column/string pairs for use + in an INSERT or UPDATE statement. + + Also generates the Compiled object's postfetch, prefetch, and + returning column collections, used for default handling and ultimately + populating the CursorResult's prefetch_cols() and postfetch_cols() + collections. + + """ + + # note: the _get_crud_params() system was written with the notion in mind + # that INSERT, UPDATE, DELETE are always the top level statement and + # that there is only one of them. With the addition of CTEs that can + # make use of DML, this assumption is no longer accurate; the DML + # statement is not necessarily the top-level "row returning" thing + # and it is also theoretically possible (fortunately nobody has asked yet) + # to have a single statement with multiple DMLs inside of it via CTEs. + + # the current _get_crud_params() design doesn't accommodate these cases + # right now. It "just works" for a CTE that has a single DML inside of + # it, and for a CTE with multiple DML, it's not clear what would happen. + + # overall, the "compiler.XYZ" collections here would need to be in a + # per-DML structure of some kind, and DefaultDialect would need to + # navigate these collections on a per-statement basis, with additional + # emphasis on the "toplevel returning data" statement. However we + # still need to run through _get_crud_params() for all DML as we have + # Python / SQL generated column defaults that need to be rendered. + + # if there is user need for this kind of thing, it's likely a post 2.0 + # kind of change as it would require deep changes to DefaultDialect + # as well as here. + + compiler.postfetch = [] + compiler.insert_prefetch = [] + compiler.update_prefetch = [] + compiler.implicit_returning = [] + + visiting_cte = kw.get("visiting_cte", None) + if visiting_cte is not None: + # for insert -> CTE -> insert, don't populate an incoming + # _crud_accumulate_bind_names collection; the INSERT we process here + # will not be inline within the VALUES of the enclosing INSERT as the + # CTE is placed on the outside. See issue #9173 + kw.pop("accumulate_bind_names", None) + assert ( + "accumulate_bind_names" not in kw + ), "Don't know how to handle insert within insert without a CTE" + + # getters - these are normally just column.key, + # but in the case of mysql multi-table update, the rules for + # .key must conditionally take tablename into account + ( + _column_as_key, + _getattr_col_key, + _col_bind_name, + ) = _key_getters_for_crud_column(compiler, stmt, compile_state) + + compiler._get_bind_name_for_col = _col_bind_name + + if stmt._returning and stmt._return_defaults: + raise exc.CompileError( + "Can't compile statement that includes returning() and " + "return_defaults() simultaneously" + ) + + if compile_state.isdelete: + _setup_delete_return_defaults( + compiler, + stmt, + compile_state, + (), + _getattr_col_key, + _column_as_key, + _col_bind_name, + (), + (), + toplevel, + kw, + ) + return _CrudParams([], []) + + # no parameters in the statement, no parameters in the + # compiled params - return binds for all columns + if compiler.column_keys is None and compile_state._no_parameters: + return _CrudParams( + [ + ( + c, + compiler.preparer.format_column(c), + _create_bind_param(compiler, c, None, required=True), + (c.key,), + ) + for c in stmt.table.columns + if not c._omit_from_statements + ], + [], + ) + + stmt_parameter_tuples: Optional[ + List[Tuple[Union[str, ColumnClause[Any]], Any]] + ] + spd: Optional[MutableMapping[_DMLColumnElement, Any]] + + if ( + _compile_state_isinsert(compile_state) + and compile_state._has_multi_parameters + ): + mp = compile_state._multi_parameters + assert mp is not None + spd = mp[0] + stmt_parameter_tuples = list(spd.items()) + spd_str_key = {_column_as_key(key) for key in spd} + elif compile_state._ordered_values: + spd = compile_state._dict_parameters + stmt_parameter_tuples = compile_state._ordered_values + assert spd is not None + spd_str_key = {_column_as_key(key) for key in spd} + elif compile_state._dict_parameters: + spd = compile_state._dict_parameters + stmt_parameter_tuples = list(spd.items()) + spd_str_key = {_column_as_key(key) for key in spd} + else: + stmt_parameter_tuples = spd = spd_str_key = None + + # if we have statement parameters - set defaults in the + # compiled params + if compiler.column_keys is None: + parameters = {} + elif stmt_parameter_tuples: + assert spd_str_key is not None + parameters = { + _column_as_key(key): REQUIRED + for key in compiler.column_keys + if key not in spd_str_key + } + else: + parameters = { + _column_as_key(key): REQUIRED for key in compiler.column_keys + } + + # create a list of column assignment clauses as tuples + values: List[_CrudParamElement] = [] + + if stmt_parameter_tuples is not None: + _get_stmt_parameter_tuples_params( + compiler, + compile_state, + parameters, + stmt_parameter_tuples, + _column_as_key, + values, + kw, + ) + + check_columns: Dict[str, ColumnClause[Any]] = {} + + # special logic that only occurs for multi-table UPDATE + # statements + if dml.isupdate(compile_state) and compile_state.is_multitable: + _get_update_multitable_params( + compiler, + stmt, + compile_state, + stmt_parameter_tuples, + check_columns, + _col_bind_name, + _getattr_col_key, + values, + kw, + ) + + if _compile_state_isinsert(compile_state) and stmt._select_names: + # is an insert from select, is not a multiparams + + assert not compile_state._has_multi_parameters + + _scan_insert_from_select_cols( + compiler, + stmt, + compile_state, + parameters, + _getattr_col_key, + _column_as_key, + _col_bind_name, + check_columns, + values, + toplevel, + kw, + ) + use_insertmanyvalues = False + use_sentinel_columns = None + else: + use_insertmanyvalues, use_sentinel_columns = _scan_cols( + compiler, + stmt, + compile_state, + parameters, + _getattr_col_key, + _column_as_key, + _col_bind_name, + check_columns, + values, + toplevel, + kw, + ) + + if parameters and stmt_parameter_tuples: + check = ( + set(parameters) + .intersection(_column_as_key(k) for k, v in stmt_parameter_tuples) + .difference(check_columns) + ) + if check: + raise exc.CompileError( + "Unconsumed column names: %s" + % (", ".join("%s" % (c,) for c in check)) + ) + + is_default_metavalue_only = False + + if ( + _compile_state_isinsert(compile_state) + and compile_state._has_multi_parameters + ): + # is a multiparams, is not an insert from a select + assert not stmt._select_names + multi_extended_values = _extend_values_for_multiparams( + compiler, + stmt, + compile_state, + cast( + "Sequence[_CrudParamElementStr]", + values, + ), + cast("Callable[..., str]", _column_as_key), + kw, + ) + return _CrudParams(values, multi_extended_values) + elif ( + not values + and compiler.for_executemany + and compiler.dialect.supports_default_metavalue + ): + # convert an "INSERT DEFAULT VALUES" + # into INSERT (firstcol) VALUES (DEFAULT) which can be turned + # into an in-place multi values. This supports + # insert_executemany_returning mode :) + values = [ + ( + _as_dml_column(stmt.table.columns[0]), + compiler.preparer.format_column(stmt.table.columns[0]), + compiler.dialect.default_metavalue_token, + (), + ) + ] + is_default_metavalue_only = True + + return _CrudParams( + values, + [], + is_default_metavalue_only=is_default_metavalue_only, + use_insertmanyvalues=use_insertmanyvalues, + use_sentinel_columns=use_sentinel_columns, + ) + + +@overload +def _create_bind_param( + compiler: SQLCompiler, + col: ColumnElement[Any], + value: Any, + process: Literal[True] = ..., + required: bool = False, + name: Optional[str] = None, + **kw: Any, +) -> str: ... + + +@overload +def _create_bind_param( + compiler: SQLCompiler, + col: ColumnElement[Any], + value: Any, + **kw: Any, +) -> str: ... + + +def _create_bind_param( + compiler: SQLCompiler, + col: ColumnElement[Any], + value: Any, + process: bool = True, + required: bool = False, + name: Optional[str] = None, + **kw: Any, +) -> Union[str, elements.BindParameter[Any]]: + if name is None: + name = col.key + bindparam = elements.BindParameter( + name, value, type_=col.type, required=required + ) + bindparam._is_crud = True + if process: + return bindparam._compiler_dispatch(compiler, **kw) + else: + return bindparam + + +def _handle_values_anonymous_param(compiler, col, value, name, **kw): + # the insert() and update() constructs as of 1.4 will now produce anonymous + # bindparam() objects in the values() collections up front when given plain + # literal values. This is so that cache key behaviors, which need to + # produce bound parameters in deterministic order without invoking any + # compilation here, can be applied to these constructs when they include + # values() (but not yet multi-values, which are not included in caching + # right now). + # + # in order to produce the desired "crud" style name for these parameters, + # which will also be targetable in engine/default.py through the usual + # conventions, apply our desired name to these unique parameters by + # populating the compiler truncated names cache with the desired name, + # rather than having + # compiler.visit_bindparam()->compiler._truncated_identifier make up a + # name. Saves on call counts also. + + # for INSERT/UPDATE that's a CTE, we don't need names to match to + # external parameters and these would also conflict in the case where + # multiple insert/update are combined together using CTEs + is_cte = "visiting_cte" in kw + + if ( + not is_cte + and value.unique + and isinstance(value.key, elements._truncated_label) + ): + compiler.truncated_names[("bindparam", value.key)] = name + + if value.type._isnull: + # either unique parameter, or other bound parameters that were + # passed in directly + # set type to that of the column unconditionally + value = value._with_binary_element_type(col.type) + + return value._compiler_dispatch(compiler, **kw) + + +def _key_getters_for_crud_column( + compiler: SQLCompiler, stmt: ValuesBase, compile_state: DMLState +) -> Tuple[ + Callable[[Union[str, ColumnClause[Any]]], Union[str, Tuple[str, str]]], + Callable[[ColumnClause[Any]], Union[str, Tuple[str, str]]], + _BindNameForColProtocol, +]: + if dml.isupdate(compile_state) and compile_state._extra_froms: + # when extra tables are present, refer to the columns + # in those extra tables as table-qualified, including in + # dictionaries and when rendering bind param names. + # the "main" table of the statement remains unqualified, + # allowing the most compatibility with a non-multi-table + # statement. + _et = set(compile_state._extra_froms) + + c_key_role = functools.partial( + coercions.expect_as_key, roles.DMLColumnRole + ) + + def _column_as_key( + key: Union[ColumnClause[Any], str] + ) -> Union[str, Tuple[str, str]]: + str_key = c_key_role(key) + if hasattr(key, "table") and key.table in _et: + return (key.table.name, str_key) # type: ignore + else: + return str_key + + def _getattr_col_key( + col: ColumnClause[Any], + ) -> Union[str, Tuple[str, str]]: + if col.table in _et: + return (col.table.name, col.key) # type: ignore + else: + return col.key + + def _col_bind_name(col: ColumnClause[Any]) -> str: + if col.table in _et: + if TYPE_CHECKING: + assert isinstance(col.table, TableClause) + return "%s_%s" % (col.table.name, col.key) + else: + return col.key + + else: + _column_as_key = functools.partial( + coercions.expect_as_key, roles.DMLColumnRole + ) + _getattr_col_key = _col_bind_name = operator.attrgetter("key") # type: ignore # noqa: E501 + + return _column_as_key, _getattr_col_key, _col_bind_name + + +def _scan_insert_from_select_cols( + compiler, + stmt, + compile_state, + parameters, + _getattr_col_key, + _column_as_key, + _col_bind_name, + check_columns, + values, + toplevel, + kw, +): + cols = [stmt.table.c[_column_as_key(name)] for name in stmt._select_names] + + assert compiler.stack[-1]["selectable"] is stmt + + compiler.stack[-1]["insert_from_select"] = stmt.select + + add_select_cols: List[_CrudParamElementSQLExpr] = [] + if stmt.include_insert_from_select_defaults: + col_set = set(cols) + for col in stmt.table.columns: + # omit columns that were not in the SELECT statement. + # this will omit columns marked as omit_from_statements naturally, + # as long as that col was not explicit in the SELECT. + # if an omit_from_statements col has a "default" on it, then + # we need to include it, as these defaults should still fire off. + # but, if it has that default and it's the "sentinel" default, + # we don't do sentinel default operations for insert_from_select + # here so we again omit it. + if ( + col not in col_set + and col.default + and not col.default.is_sentinel + ): + cols.append(col) + + for c in cols: + col_key = _getattr_col_key(c) + if col_key in parameters and col_key not in check_columns: + parameters.pop(col_key) + values.append((c, compiler.preparer.format_column(c), None, ())) + else: + _append_param_insert_select_hasdefault( + compiler, stmt, c, add_select_cols, kw + ) + + if add_select_cols: + values.extend(add_select_cols) + ins_from_select = compiler.stack[-1]["insert_from_select"] + if not isinstance(ins_from_select, Select): + raise exc.CompileError( + f"Can't extend statement for INSERT..FROM SELECT to include " + f"additional default-holding column(s) " + f"""{ + ', '.join(repr(key) for _, key, _, _ in add_select_cols) + }. Convert the selectable to a subquery() first, or pass """ + "include_defaults=False to Insert.from_select() to skip these " + "columns." + ) + ins_from_select = ins_from_select._generate() + # copy raw_columns + ins_from_select._raw_columns = list(ins_from_select._raw_columns) + [ + expr for _, _, expr, _ in add_select_cols + ] + compiler.stack[-1]["insert_from_select"] = ins_from_select + + +def _scan_cols( + compiler, + stmt, + compile_state, + parameters, + _getattr_col_key, + _column_as_key, + _col_bind_name, + check_columns, + values, + toplevel, + kw, +): + ( + need_pks, + implicit_returning, + implicit_return_defaults, + postfetch_lastrowid, + use_insertmanyvalues, + use_sentinel_columns, + ) = _get_returning_modifiers(compiler, stmt, compile_state, toplevel) + + assert compile_state.isupdate or compile_state.isinsert + + if compile_state._parameter_ordering: + parameter_ordering = [ + _column_as_key(key) for key in compile_state._parameter_ordering + ] + ordered_keys = set(parameter_ordering) + cols = [ + stmt.table.c[key] + for key in parameter_ordering + if isinstance(key, str) and key in stmt.table.c + ] + [c for c in stmt.table.c if c.key not in ordered_keys] + + else: + cols = stmt.table.columns + + isinsert = _compile_state_isinsert(compile_state) + if isinsert and not compile_state._has_multi_parameters: + # new rules for #7998. fetch lastrowid or implicit returning + # for autoincrement column even if parameter is NULL, for DBs that + # override NULL param for primary key (sqlite, mysql/mariadb) + autoincrement_col = stmt.table._autoincrement_column + insert_null_pk_still_autoincrements = ( + compiler.dialect.insert_null_pk_still_autoincrements + ) + else: + autoincrement_col = insert_null_pk_still_autoincrements = None + + if stmt._supplemental_returning: + supplemental_returning = set(stmt._supplemental_returning) + else: + supplemental_returning = set() + + compiler_implicit_returning = compiler.implicit_returning + + # TODO - see TODO(return_defaults_columns) below + # cols_in_params = set() + + for c in cols: + # scan through every column in the target table + + col_key = _getattr_col_key(c) + + if col_key in parameters and col_key not in check_columns: + # parameter is present for the column. use that. + + _append_param_parameter( + compiler, + stmt, + compile_state, + c, + col_key, + parameters, + _col_bind_name, + implicit_returning, + implicit_return_defaults, + postfetch_lastrowid, + values, + autoincrement_col, + insert_null_pk_still_autoincrements, + kw, + ) + + # TODO - see TODO(return_defaults_columns) below + # cols_in_params.add(c) + + elif isinsert: + # no parameter is present and it's an insert. + + if c.primary_key and need_pks: + # it's a primary key column, it will need to be generated by a + # default generator of some kind, and the statement expects + # inserted_primary_key to be available. + + if implicit_returning: + # we can use RETURNING, find out how to invoke this + # column and get the value where RETURNING is an option. + # we can inline server-side functions in this case. + + _append_param_insert_pk_returning( + compiler, stmt, c, values, kw + ) + else: + # otherwise, find out how to invoke this column + # and get its value where RETURNING is not an option. + # if we have to invoke a server-side function, we need + # to pre-execute it. or if this is a straight + # autoincrement column and the dialect supports it + # we can use cursor.lastrowid. + + _append_param_insert_pk_no_returning( + compiler, stmt, c, values, kw + ) + + elif c.default is not None: + # column has a default, but it's not a pk column, or it is but + # we don't need to get the pk back. + if not c.default.is_sentinel or ( + use_sentinel_columns is not None + ): + _append_param_insert_hasdefault( + compiler, stmt, c, implicit_return_defaults, values, kw + ) + + elif c.server_default is not None: + # column has a DDL-level default, and is either not a pk + # column or we don't need the pk. + if implicit_return_defaults and c in implicit_return_defaults: + compiler_implicit_returning.append(c) + elif not c.primary_key: + compiler.postfetch.append(c) + + elif implicit_return_defaults and c in implicit_return_defaults: + compiler_implicit_returning.append(c) + + elif ( + c.primary_key + and c is not stmt.table._autoincrement_column + and not c.nullable + ): + _warn_pk_with_no_anticipated_value(c) + + elif compile_state.isupdate: + # no parameter is present and it's an insert. + + _append_param_update( + compiler, + compile_state, + stmt, + c, + implicit_return_defaults, + values, + kw, + ) + + # adding supplemental cols to implicit_returning in table + # order so that order is maintained between multiple INSERT + # statements which may have different parameters included, but all + # have the same RETURNING clause + if ( + c in supplemental_returning + and c not in compiler_implicit_returning + ): + compiler_implicit_returning.append(c) + + if supplemental_returning: + # we should have gotten every col into implicit_returning, + # however supplemental returning can also have SQL functions etc. + # in it + remaining_supplemental = supplemental_returning.difference( + compiler_implicit_returning + ) + compiler_implicit_returning.extend( + c + for c in stmt._supplemental_returning + if c in remaining_supplemental + ) + + # TODO(return_defaults_columns): there can still be more columns in + # _return_defaults_columns in the case that they are from something like an + # aliased of the table. we can add them here, however this breaks other ORM + # things. so this is for another day. see + # test/orm/dml/test_update_delete_where.py -> test_update_from_alias + + # if stmt._return_defaults_columns: + # compiler_implicit_returning.extend( + # set(stmt._return_defaults_columns) + # .difference(compiler_implicit_returning) + # .difference(cols_in_params) + # ) + + return (use_insertmanyvalues, use_sentinel_columns) + + +def _setup_delete_return_defaults( + compiler, + stmt, + compile_state, + parameters, + _getattr_col_key, + _column_as_key, + _col_bind_name, + check_columns, + values, + toplevel, + kw, +): + (_, _, implicit_return_defaults, *_) = _get_returning_modifiers( + compiler, stmt, compile_state, toplevel + ) + + if not implicit_return_defaults: + return + + if stmt._return_defaults_columns: + compiler.implicit_returning.extend(implicit_return_defaults) + + if stmt._supplemental_returning: + ir_set = set(compiler.implicit_returning) + compiler.implicit_returning.extend( + c for c in stmt._supplemental_returning if c not in ir_set + ) + + +def _append_param_parameter( + compiler, + stmt, + compile_state, + c, + col_key, + parameters, + _col_bind_name, + implicit_returning, + implicit_return_defaults, + postfetch_lastrowid, + values, + autoincrement_col, + insert_null_pk_still_autoincrements, + kw, +): + value = parameters.pop(col_key) + + col_value = compiler.preparer.format_column( + c, use_table=compile_state.include_table_with_column_exprs + ) + + accumulated_bind_names: Set[str] = set() + + if coercions._is_literal(value): + if ( + insert_null_pk_still_autoincrements + and c.primary_key + and c is autoincrement_col + ): + # support use case for #7998, fetch autoincrement cols + # even if value was given. + + if postfetch_lastrowid: + compiler.postfetch_lastrowid = True + elif implicit_returning: + compiler.implicit_returning.append(c) + + value = _create_bind_param( + compiler, + c, + value, + required=value is REQUIRED, + name=( + _col_bind_name(c) + if not _compile_state_isinsert(compile_state) + or not compile_state._has_multi_parameters + else "%s_m0" % _col_bind_name(c) + ), + accumulate_bind_names=accumulated_bind_names, + **kw, + ) + elif value._is_bind_parameter: + if ( + insert_null_pk_still_autoincrements + and value.value is None + and c.primary_key + and c is autoincrement_col + ): + # support use case for #7998, fetch autoincrement cols + # even if value was given + if implicit_returning: + compiler.implicit_returning.append(c) + elif compiler.dialect.postfetch_lastrowid: + compiler.postfetch_lastrowid = True + + value = _handle_values_anonymous_param( + compiler, + c, + value, + name=( + _col_bind_name(c) + if not _compile_state_isinsert(compile_state) + or not compile_state._has_multi_parameters + else "%s_m0" % _col_bind_name(c) + ), + accumulate_bind_names=accumulated_bind_names, + **kw, + ) + else: + # value is a SQL expression + value = compiler.process( + value.self_group(), + accumulate_bind_names=accumulated_bind_names, + **kw, + ) + + if compile_state.isupdate: + if implicit_return_defaults and c in implicit_return_defaults: + compiler.implicit_returning.append(c) + + else: + compiler.postfetch.append(c) + else: + if c.primary_key: + if implicit_returning: + compiler.implicit_returning.append(c) + elif compiler.dialect.postfetch_lastrowid: + compiler.postfetch_lastrowid = True + + elif implicit_return_defaults and (c in implicit_return_defaults): + compiler.implicit_returning.append(c) + + else: + # postfetch specifically means, "we can SELECT the row we just + # inserted by primary key to get back the server generated + # defaults". so by definition this can't be used to get the + # primary key value back, because we need to have it ahead of + # time. + + compiler.postfetch.append(c) + + values.append((c, col_value, value, accumulated_bind_names)) + + +def _append_param_insert_pk_returning(compiler, stmt, c, values, kw): + """Create a primary key expression in the INSERT statement where + we want to populate result.inserted_primary_key and RETURNING + is available. + + """ + if c.default is not None: + if c.default.is_sequence: + if compiler.dialect.supports_sequences and ( + not c.default.optional + or not compiler.dialect.sequences_optional + ): + accumulated_bind_names: Set[str] = set() + values.append( + ( + c, + compiler.preparer.format_column(c), + compiler.process( + c.default, + accumulate_bind_names=accumulated_bind_names, + **kw, + ), + accumulated_bind_names, + ) + ) + compiler.implicit_returning.append(c) + elif c.default.is_clause_element: + accumulated_bind_names = set() + values.append( + ( + c, + compiler.preparer.format_column(c), + compiler.process( + c.default.arg.self_group(), + accumulate_bind_names=accumulated_bind_names, + **kw, + ), + accumulated_bind_names, + ) + ) + compiler.implicit_returning.append(c) + else: + # client side default. OK we can't use RETURNING, need to + # do a "prefetch", which in fact fetches the default value + # on the Python side + values.append( + ( + c, + compiler.preparer.format_column(c), + _create_insert_prefetch_bind_param(compiler, c, **kw), + (c.key,), + ) + ) + elif c is stmt.table._autoincrement_column or c.server_default is not None: + compiler.implicit_returning.append(c) + elif not c.nullable: + # no .default, no .server_default, not autoincrement, we have + # no indication this primary key column will have any value + _warn_pk_with_no_anticipated_value(c) + + +def _append_param_insert_pk_no_returning(compiler, stmt, c, values, kw): + """Create a primary key expression in the INSERT statement where + we want to populate result.inserted_primary_key and we cannot use + RETURNING. + + Depending on the kind of default here we may create a bound parameter + in the INSERT statement and pre-execute a default generation function, + or we may use cursor.lastrowid if supported by the dialect. + + + """ + + if ( + # column has a Python-side default + c.default is not None + and ( + # and it either is not a sequence, or it is and we support + # sequences and want to invoke it + not c.default.is_sequence + or ( + compiler.dialect.supports_sequences + and ( + not c.default.optional + or not compiler.dialect.sequences_optional + ) + ) + ) + ) or ( + # column is the "autoincrement column" + c is stmt.table._autoincrement_column + and ( + # dialect can't use cursor.lastrowid + not compiler.dialect.postfetch_lastrowid + and ( + # column has a Sequence and we support those + ( + c.default is not None + and c.default.is_sequence + and compiler.dialect.supports_sequences + ) + or + # column has no default on it, but dialect can run the + # "autoincrement" mechanism explicitly, e.g. PostgreSQL + # SERIAL we know the sequence name + ( + c.default is None + and compiler.dialect.preexecute_autoincrement_sequences + ) + ) + ) + ): + # do a pre-execute of the default + values.append( + ( + c, + compiler.preparer.format_column(c), + _create_insert_prefetch_bind_param(compiler, c, **kw), + (c.key,), + ) + ) + elif ( + c.default is None + and c.server_default is None + and not c.nullable + and c is not stmt.table._autoincrement_column + ): + # no .default, no .server_default, not autoincrement, we have + # no indication this primary key column will have any value + _warn_pk_with_no_anticipated_value(c) + elif compiler.dialect.postfetch_lastrowid: + # finally, where it seems like there will be a generated primary key + # value and we haven't set up any other way to fetch it, and the + # dialect supports cursor.lastrowid, switch on the lastrowid flag so + # that the DefaultExecutionContext calls upon cursor.lastrowid + compiler.postfetch_lastrowid = True + + +def _append_param_insert_hasdefault( + compiler, stmt, c, implicit_return_defaults, values, kw +): + if c.default.is_sequence: + if compiler.dialect.supports_sequences and ( + not c.default.optional or not compiler.dialect.sequences_optional + ): + accumulated_bind_names: Set[str] = set() + values.append( + ( + c, + compiler.preparer.format_column(c), + compiler.process( + c.default, + accumulate_bind_names=accumulated_bind_names, + **kw, + ), + accumulated_bind_names, + ) + ) + if implicit_return_defaults and c in implicit_return_defaults: + compiler.implicit_returning.append(c) + elif not c.primary_key: + compiler.postfetch.append(c) + elif c.default.is_clause_element: + accumulated_bind_names = set() + values.append( + ( + c, + compiler.preparer.format_column(c), + compiler.process( + c.default.arg.self_group(), + accumulate_bind_names=accumulated_bind_names, + **kw, + ), + accumulated_bind_names, + ) + ) + + if implicit_return_defaults and c in implicit_return_defaults: + compiler.implicit_returning.append(c) + elif not c.primary_key: + # don't add primary key column to postfetch + compiler.postfetch.append(c) + else: + values.append( + ( + c, + compiler.preparer.format_column(c), + _create_insert_prefetch_bind_param(compiler, c, **kw), + (c.key,), + ) + ) + + +def _append_param_insert_select_hasdefault( + compiler: SQLCompiler, + stmt: ValuesBase, + c: ColumnClause[Any], + values: List[_CrudParamElementSQLExpr], + kw: Dict[str, Any], +) -> None: + if default_is_sequence(c.default): + if compiler.dialect.supports_sequences and ( + not c.default.optional or not compiler.dialect.sequences_optional + ): + values.append( + ( + c, + compiler.preparer.format_column(c), + c.default.next_value(), + (), + ) + ) + elif default_is_clause_element(c.default): + values.append( + ( + c, + compiler.preparer.format_column(c), + c.default.arg.self_group(), + (), + ) + ) + else: + values.append( + ( + c, + compiler.preparer.format_column(c), + _create_insert_prefetch_bind_param( + compiler, c, process=False, **kw + ), + (c.key,), + ) + ) + + +def _append_param_update( + compiler, compile_state, stmt, c, implicit_return_defaults, values, kw +): + include_table = compile_state.include_table_with_column_exprs + if c.onupdate is not None and not c.onupdate.is_sequence: + if c.onupdate.is_clause_element: + values.append( + ( + c, + compiler.preparer.format_column( + c, + use_table=include_table, + ), + compiler.process(c.onupdate.arg.self_group(), **kw), + (), + ) + ) + if implicit_return_defaults and c in implicit_return_defaults: + compiler.implicit_returning.append(c) + else: + compiler.postfetch.append(c) + else: + values.append( + ( + c, + compiler.preparer.format_column( + c, + use_table=include_table, + ), + _create_update_prefetch_bind_param(compiler, c, **kw), + (c.key,), + ) + ) + elif c.server_onupdate is not None: + if implicit_return_defaults and c in implicit_return_defaults: + compiler.implicit_returning.append(c) + else: + compiler.postfetch.append(c) + elif ( + implicit_return_defaults + and (stmt._return_defaults_columns or not stmt._return_defaults) + and c in implicit_return_defaults + ): + compiler.implicit_returning.append(c) + + +@overload +def _create_insert_prefetch_bind_param( + compiler: SQLCompiler, + c: ColumnElement[Any], + process: Literal[True] = ..., + **kw: Any, +) -> str: ... + + +@overload +def _create_insert_prefetch_bind_param( + compiler: SQLCompiler, + c: ColumnElement[Any], + process: Literal[False], + **kw: Any, +) -> elements.BindParameter[Any]: ... + + +def _create_insert_prefetch_bind_param( + compiler: SQLCompiler, + c: ColumnElement[Any], + process: bool = True, + name: Optional[str] = None, + **kw: Any, +) -> Union[elements.BindParameter[Any], str]: + param = _create_bind_param( + compiler, c, None, process=process, name=name, **kw + ) + compiler.insert_prefetch.append(c) # type: ignore + return param + + +@overload +def _create_update_prefetch_bind_param( + compiler: SQLCompiler, + c: ColumnElement[Any], + process: Literal[True] = ..., + **kw: Any, +) -> str: ... + + +@overload +def _create_update_prefetch_bind_param( + compiler: SQLCompiler, + c: ColumnElement[Any], + process: Literal[False], + **kw: Any, +) -> elements.BindParameter[Any]: ... + + +def _create_update_prefetch_bind_param( + compiler: SQLCompiler, + c: ColumnElement[Any], + process: bool = True, + name: Optional[str] = None, + **kw: Any, +) -> Union[elements.BindParameter[Any], str]: + param = _create_bind_param( + compiler, c, None, process=process, name=name, **kw + ) + compiler.update_prefetch.append(c) # type: ignore + return param + + +class _multiparam_column(elements.ColumnElement[Any]): + _is_multiparam_column = True + + def __init__(self, original, index): + self.index = index + self.key = "%s_m%d" % (original.key, index + 1) + self.original = original + self.default = original.default + self.type = original.type + + def compare(self, other, **kw): + raise NotImplementedError() + + def _copy_internals(self, other, **kw): + raise NotImplementedError() + + def __eq__(self, other): + return ( + isinstance(other, _multiparam_column) + and other.key == self.key + and other.original == self.original + ) + + @util.memoized_property + def _default_description_tuple(self) -> _DefaultDescriptionTuple: + """used by default.py -> _process_execute_defaults()""" + + return _DefaultDescriptionTuple._from_column_default(self.default) + + @util.memoized_property + def _onupdate_description_tuple(self) -> _DefaultDescriptionTuple: + """used by default.py -> _process_execute_defaults()""" + + return _DefaultDescriptionTuple._from_column_default(self.onupdate) + + +def _process_multiparam_default_bind( + compiler: SQLCompiler, + stmt: ValuesBase, + c: KeyedColumnElement[Any], + index: int, + kw: Dict[str, Any], +) -> str: + if not c.default: + raise exc.CompileError( + "INSERT value for column %s is explicitly rendered as a bound" + "parameter in the VALUES clause; " + "a Python-side value or SQL expression is required" % c + ) + elif default_is_clause_element(c.default): + return compiler.process(c.default.arg.self_group(), **kw) + elif c.default.is_sequence: + # these conditions would have been established + # by append_param_insert_(?:hasdefault|pk_returning|pk_no_returning) + # in order for us to be here, so these don't need to be + # checked + # assert compiler.dialect.supports_sequences and ( + # not c.default.optional + # or not compiler.dialect.sequences_optional + # ) + return compiler.process(c.default, **kw) + else: + col = _multiparam_column(c, index) + assert isinstance(stmt, dml.Insert) + return _create_insert_prefetch_bind_param( + compiler, col, process=True, **kw + ) + + +def _get_update_multitable_params( + compiler, + stmt, + compile_state, + stmt_parameter_tuples, + check_columns, + _col_bind_name, + _getattr_col_key, + values, + kw, +): + normalized_params = { + coercions.expect(roles.DMLColumnRole, c): param + for c, param in stmt_parameter_tuples or () + } + + include_table = compile_state.include_table_with_column_exprs + + affected_tables = set() + for t in compile_state._extra_froms: + for c in t.c: + if c in normalized_params: + affected_tables.add(t) + check_columns[_getattr_col_key(c)] = c + value = normalized_params[c] + + col_value = compiler.process(c, include_table=include_table) + if coercions._is_literal(value): + value = _create_bind_param( + compiler, + c, + value, + required=value is REQUIRED, + name=_col_bind_name(c), + **kw, # TODO: no test coverage for literal binds here + ) + accumulated_bind_names: Iterable[str] = (c.key,) + elif value._is_bind_parameter: + cbn = _col_bind_name(c) + value = _handle_values_anonymous_param( + compiler, c, value, name=cbn, **kw + ) + accumulated_bind_names = (cbn,) + else: + compiler.postfetch.append(c) + value = compiler.process(value.self_group(), **kw) + accumulated_bind_names = () + values.append((c, col_value, value, accumulated_bind_names)) + # determine tables which are actually to be updated - process onupdate + # and server_onupdate for these + for t in affected_tables: + for c in t.c: + if c in normalized_params: + continue + elif c.onupdate is not None and not c.onupdate.is_sequence: + if c.onupdate.is_clause_element: + values.append( + ( + c, + compiler.process(c, include_table=include_table), + compiler.process( + c.onupdate.arg.self_group(), **kw + ), + (), + ) + ) + compiler.postfetch.append(c) + else: + values.append( + ( + c, + compiler.process(c, include_table=include_table), + _create_update_prefetch_bind_param( + compiler, c, name=_col_bind_name(c), **kw + ), + (c.key,), + ) + ) + elif c.server_onupdate is not None: + compiler.postfetch.append(c) + + +def _extend_values_for_multiparams( + compiler: SQLCompiler, + stmt: ValuesBase, + compile_state: DMLState, + initial_values: Sequence[_CrudParamElementStr], + _column_as_key: Callable[..., str], + kw: Dict[str, Any], +) -> List[Sequence[_CrudParamElementStr]]: + values_0 = initial_values + values = [initial_values] + + mp = compile_state._multi_parameters + assert mp is not None + for i, row in enumerate(mp[1:]): + extension: List[_CrudParamElementStr] = [] + + row = {_column_as_key(key): v for key, v in row.items()} + + for col, col_expr, param, accumulated_names in values_0: + if col.key in row: + key = col.key + + if coercions._is_literal(row[key]): + new_param = _create_bind_param( + compiler, + col, + row[key], + name="%s_m%d" % (col.key, i + 1), + **kw, + ) + else: + new_param = compiler.process(row[key].self_group(), **kw) + else: + new_param = _process_multiparam_default_bind( + compiler, stmt, col, i, kw + ) + + extension.append((col, col_expr, new_param, accumulated_names)) + + values.append(extension) + + return values + + +def _get_stmt_parameter_tuples_params( + compiler, + compile_state, + parameters, + stmt_parameter_tuples, + _column_as_key, + values, + kw, +): + for k, v in stmt_parameter_tuples: + colkey = _column_as_key(k) + if colkey is not None: + parameters.setdefault(colkey, v) + else: + # a non-Column expression on the left side; + # add it to values() in an "as-is" state, + # coercing right side to bound param + + # note one of the main use cases for this is array slice + # updates on PostgreSQL, as the left side is also an expression. + + col_expr = compiler.process( + k, include_table=compile_state.include_table_with_column_exprs + ) + + if coercions._is_literal(v): + v = compiler.process( + elements.BindParameter(None, v, type_=k.type), **kw + ) + else: + if v._is_bind_parameter and v.type._isnull: + # either unique parameter, or other bound parameters that + # were passed in directly + # set type to that of the column unconditionally + v = v._with_binary_element_type(k.type) + + v = compiler.process(v.self_group(), **kw) + + # TODO: not sure if accumulated_bind_names applies here + values.append((k, col_expr, v, ())) + + +def _get_returning_modifiers(compiler, stmt, compile_state, toplevel): + """determines RETURNING strategy, if any, for the statement. + + This is where it's determined what we need to fetch from the + INSERT or UPDATE statement after it's invoked. + + """ + + dialect = compiler.dialect + + need_pks = ( + toplevel + and _compile_state_isinsert(compile_state) + and not stmt._inline + and ( + not compiler.for_executemany + or (dialect.insert_executemany_returning and stmt._return_defaults) + ) + and not stmt._returning + # and (not stmt._returning or stmt._return_defaults) + and not compile_state._has_multi_parameters + ) + + # check if we have access to simple cursor.lastrowid. we can use that + # after the INSERT if that's all we need. + postfetch_lastrowid = ( + need_pks + and dialect.postfetch_lastrowid + and stmt.table._autoincrement_column is not None + ) + + # see if we want to add RETURNING to an INSERT in order to get + # primary key columns back. This would be instead of postfetch_lastrowid + # if that's set. + implicit_returning = ( + # statement itself can veto it + need_pks + # the dialect can veto it if it just doesnt support RETURNING + # with INSERT + and dialect.insert_returning + # user-defined implicit_returning on Table can veto it + and compile_state._primary_table.implicit_returning + # the compile_state can veto it (SQlite uses this to disable + # RETURNING for an ON CONFLICT insert, as SQLite does not return + # for rows that were updated, which is wrong) + and compile_state._supports_implicit_returning + and ( + # since we support MariaDB and SQLite which also support lastrowid, + # decide if we should use lastrowid or RETURNING. for insert + # that didnt call return_defaults() and has just one set of + # parameters, we can use lastrowid. this is more "traditional" + # and a lot of weird use cases are supported by it. + # SQLite lastrowid times 3x faster than returning, + # Mariadb lastrowid 2x faster than returning + (not postfetch_lastrowid or dialect.favor_returning_over_lastrowid) + or compile_state._has_multi_parameters + or stmt._return_defaults + ) + ) + if implicit_returning: + postfetch_lastrowid = False + + if _compile_state_isinsert(compile_state): + should_implicit_return_defaults = ( + implicit_returning and stmt._return_defaults + ) + explicit_returning = ( + should_implicit_return_defaults + or stmt._returning + or stmt._supplemental_returning + ) + use_insertmanyvalues = ( + toplevel + and compiler.for_executemany + and dialect.use_insertmanyvalues + and ( + explicit_returning or dialect.use_insertmanyvalues_wo_returning + ) + ) + + use_sentinel_columns = None + if ( + use_insertmanyvalues + and explicit_returning + and stmt._sort_by_parameter_order + ): + use_sentinel_columns = compiler._get_sentinel_column_for_table( + stmt.table + ) + + elif compile_state.isupdate: + should_implicit_return_defaults = ( + stmt._return_defaults + and compile_state._primary_table.implicit_returning + and compile_state._supports_implicit_returning + and dialect.update_returning + ) + use_insertmanyvalues = False + use_sentinel_columns = None + elif compile_state.isdelete: + should_implicit_return_defaults = ( + stmt._return_defaults + and compile_state._primary_table.implicit_returning + and compile_state._supports_implicit_returning + and dialect.delete_returning + ) + use_insertmanyvalues = False + use_sentinel_columns = None + else: + should_implicit_return_defaults = False # pragma: no cover + use_insertmanyvalues = False + use_sentinel_columns = None + + if should_implicit_return_defaults: + if not stmt._return_defaults_columns: + # TODO: this is weird. See #9685 where we have to + # take an extra step to prevent this from happening. why + # would this ever be *all* columns? but if we set to blank, then + # that seems to break things also in the ORM. So we should + # try to clean this up and figure out what return_defaults + # needs to do w/ the ORM etc. here + implicit_return_defaults = set(stmt.table.c) + else: + implicit_return_defaults = set(stmt._return_defaults_columns) + else: + implicit_return_defaults = None + + return ( + need_pks, + implicit_returning or should_implicit_return_defaults, + implicit_return_defaults, + postfetch_lastrowid, + use_insertmanyvalues, + use_sentinel_columns, + ) + + +def _warn_pk_with_no_anticipated_value(c): + msg = ( + "Column '%s.%s' is marked as a member of the " + "primary key for table '%s', " + "but has no Python-side or server-side default generator indicated, " + "nor does it indicate 'autoincrement=True' or 'nullable=True', " + "and no explicit value is passed. " + "Primary key columns typically may not store NULL." + % (c.table.fullname, c.name, c.table.fullname) + ) + if len(c.table.primary_key) > 1: + msg += ( + " Note that as of SQLAlchemy 1.1, 'autoincrement=True' must be " + "indicated explicitly for composite (e.g. multicolumn) primary " + "keys if AUTO_INCREMENT/SERIAL/IDENTITY " + "behavior is expected for one of the columns in the primary key. " + "CREATE TABLE statements are impacted by this change as well on " + "most backends." + ) + util.warn(msg) diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/ddl.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/ddl.py new file mode 100644 index 0000000..d9e3f67 --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/ddl.py @@ -0,0 +1,1378 @@ +# sql/ddl.py +# Copyright (C) 2009-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +""" +Provides the hierarchy of DDL-defining schema items as well as routines +to invoke them for a create/drop call. + +""" +from __future__ import annotations + +import contextlib +import typing +from typing import Any +from typing import Callable +from typing import Iterable +from typing import List +from typing import Optional +from typing import Sequence as typing_Sequence +from typing import Tuple + +from . import roles +from .base import _generative +from .base import Executable +from .base import SchemaVisitor +from .elements import ClauseElement +from .. import exc +from .. import util +from ..util import topological +from ..util.typing import Protocol +from ..util.typing import Self + +if typing.TYPE_CHECKING: + from .compiler import Compiled + from .compiler import DDLCompiler + from .elements import BindParameter + from .schema import Constraint + from .schema import ForeignKeyConstraint + from .schema import SchemaItem + from .schema import Sequence + from .schema import Table + from .selectable import TableClause + from ..engine.base import Connection + from ..engine.interfaces import CacheStats + from ..engine.interfaces import CompiledCacheType + from ..engine.interfaces import Dialect + from ..engine.interfaces import SchemaTranslateMapType + + +class BaseDDLElement(ClauseElement): + """The root of DDL constructs, including those that are sub-elements + within the "create table" and other processes. + + .. versionadded:: 2.0 + + """ + + _hierarchy_supports_caching = False + """disable cache warnings for all _DDLCompiles subclasses. """ + + def _compiler(self, dialect, **kw): + """Return a compiler appropriate for this ClauseElement, given a + Dialect.""" + + return dialect.ddl_compiler(dialect, self, **kw) + + def _compile_w_cache( + self, + dialect: Dialect, + *, + compiled_cache: Optional[CompiledCacheType], + column_keys: List[str], + for_executemany: bool = False, + schema_translate_map: Optional[SchemaTranslateMapType] = None, + **kw: Any, + ) -> Tuple[ + Compiled, Optional[typing_Sequence[BindParameter[Any]]], CacheStats + ]: + raise NotImplementedError() + + +class DDLIfCallable(Protocol): + def __call__( + self, + ddl: BaseDDLElement, + target: SchemaItem, + bind: Optional[Connection], + tables: Optional[List[Table]] = None, + state: Optional[Any] = None, + *, + dialect: Dialect, + compiler: Optional[DDLCompiler] = ..., + checkfirst: bool, + ) -> bool: ... + + +class DDLIf(typing.NamedTuple): + dialect: Optional[str] + callable_: Optional[DDLIfCallable] + state: Optional[Any] + + def _should_execute( + self, + ddl: BaseDDLElement, + target: SchemaItem, + bind: Optional[Connection], + compiler: Optional[DDLCompiler] = None, + **kw: Any, + ) -> bool: + if bind is not None: + dialect = bind.dialect + elif compiler is not None: + dialect = compiler.dialect + else: + assert False, "compiler or dialect is required" + + if isinstance(self.dialect, str): + if self.dialect != dialect.name: + return False + elif isinstance(self.dialect, (tuple, list, set)): + if dialect.name not in self.dialect: + return False + if self.callable_ is not None and not self.callable_( + ddl, + target, + bind, + state=self.state, + dialect=dialect, + compiler=compiler, + **kw, + ): + return False + + return True + + +class ExecutableDDLElement(roles.DDLRole, Executable, BaseDDLElement): + """Base class for standalone executable DDL expression constructs. + + This class is the base for the general purpose :class:`.DDL` class, + as well as the various create/drop clause constructs such as + :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`, + etc. + + .. versionchanged:: 2.0 :class:`.ExecutableDDLElement` is renamed from + :class:`.DDLElement`, which still exists for backwards compatibility. + + :class:`.ExecutableDDLElement` integrates closely with SQLAlchemy events, + introduced in :ref:`event_toplevel`. An instance of one is + itself an event receiving callable:: + + event.listen( + users, + 'after_create', + AddConstraint(constraint).execute_if(dialect='postgresql') + ) + + .. seealso:: + + :class:`.DDL` + + :class:`.DDLEvents` + + :ref:`event_toplevel` + + :ref:`schema_ddl_sequences` + + """ + + _ddl_if: Optional[DDLIf] = None + target: Optional[SchemaItem] = None + + def _execute_on_connection( + self, connection, distilled_params, execution_options + ): + return connection._execute_ddl( + self, distilled_params, execution_options + ) + + @_generative + def against(self, target: SchemaItem) -> Self: + """Return a copy of this :class:`_schema.ExecutableDDLElement` which + will include the given target. + + This essentially applies the given item to the ``.target`` attribute of + the returned :class:`_schema.ExecutableDDLElement` object. This target + is then usable by event handlers and compilation routines in order to + provide services such as tokenization of a DDL string in terms of a + particular :class:`_schema.Table`. + + When a :class:`_schema.ExecutableDDLElement` object is established as + an event handler for the :meth:`_events.DDLEvents.before_create` or + :meth:`_events.DDLEvents.after_create` events, and the event then + occurs for a given target such as a :class:`_schema.Constraint` or + :class:`_schema.Table`, that target is established with a copy of the + :class:`_schema.ExecutableDDLElement` object using this method, which + then proceeds to the :meth:`_schema.ExecutableDDLElement.execute` + method in order to invoke the actual DDL instruction. + + :param target: a :class:`_schema.SchemaItem` that will be the subject + of a DDL operation. + + :return: a copy of this :class:`_schema.ExecutableDDLElement` with the + ``.target`` attribute assigned to the given + :class:`_schema.SchemaItem`. + + .. seealso:: + + :class:`_schema.DDL` - uses tokenization against the "target" when + processing the DDL string. + + """ + self.target = target + return self + + @_generative + def execute_if( + self, + dialect: Optional[str] = None, + callable_: Optional[DDLIfCallable] = None, + state: Optional[Any] = None, + ) -> Self: + r"""Return a callable that will execute this + :class:`_ddl.ExecutableDDLElement` conditionally within an event + handler. + + Used to provide a wrapper for event listening:: + + event.listen( + metadata, + 'before_create', + DDL("my_ddl").execute_if(dialect='postgresql') + ) + + :param dialect: May be a string or tuple of strings. + If a string, it will be compared to the name of the + executing database dialect:: + + DDL('something').execute_if(dialect='postgresql') + + If a tuple, specifies multiple dialect names:: + + DDL('something').execute_if(dialect=('postgresql', 'mysql')) + + :param callable\_: A callable, which will be invoked with + three positional arguments as well as optional keyword + arguments: + + :ddl: + This DDL element. + + :target: + The :class:`_schema.Table` or :class:`_schema.MetaData` + object which is the + target of this event. May be None if the DDL is executed + explicitly. + + :bind: + The :class:`_engine.Connection` being used for DDL execution. + May be None if this construct is being created inline within + a table, in which case ``compiler`` will be present. + + :tables: + Optional keyword argument - a list of Table objects which are to + be created/ dropped within a MetaData.create_all() or drop_all() + method call. + + :dialect: keyword argument, but always present - the + :class:`.Dialect` involved in the operation. + + :compiler: keyword argument. Will be ``None`` for an engine + level DDL invocation, but will refer to a :class:`.DDLCompiler` + if this DDL element is being created inline within a table. + + :state: + Optional keyword argument - will be the ``state`` argument + passed to this function. + + :checkfirst: + Keyword argument, will be True if the 'checkfirst' flag was + set during the call to ``create()``, ``create_all()``, + ``drop()``, ``drop_all()``. + + If the callable returns a True value, the DDL statement will be + executed. + + :param state: any value which will be passed to the callable\_ + as the ``state`` keyword argument. + + .. seealso:: + + :meth:`.SchemaItem.ddl_if` + + :class:`.DDLEvents` + + :ref:`event_toplevel` + + """ + self._ddl_if = DDLIf(dialect, callable_, state) + return self + + def _should_execute(self, target, bind, **kw): + if self._ddl_if is None: + return True + else: + return self._ddl_if._should_execute(self, target, bind, **kw) + + def _invoke_with(self, bind): + if self._should_execute(self.target, bind): + return bind.execute(self) + + def __call__(self, target, bind, **kw): + """Execute the DDL as a ddl_listener.""" + + self.against(target)._invoke_with(bind) + + def _generate(self): + s = self.__class__.__new__(self.__class__) + s.__dict__ = self.__dict__.copy() + return s + + +DDLElement = ExecutableDDLElement +""":class:`.DDLElement` is renamed to :class:`.ExecutableDDLElement`.""" + + +class DDL(ExecutableDDLElement): + """A literal DDL statement. + + Specifies literal SQL DDL to be executed by the database. DDL objects + function as DDL event listeners, and can be subscribed to those events + listed in :class:`.DDLEvents`, using either :class:`_schema.Table` or + :class:`_schema.MetaData` objects as targets. + Basic templating support allows + a single DDL instance to handle repetitive tasks for multiple tables. + + Examples:: + + from sqlalchemy import event, DDL + + tbl = Table('users', metadata, Column('uid', Integer)) + event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger')) + + spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE') + event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb')) + + drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE') + connection.execute(drop_spow) + + When operating on Table events, the following ``statement`` + string substitutions are available:: + + %(table)s - the Table name, with any required quoting applied + %(schema)s - the schema name, with any required quoting applied + %(fullname)s - the Table name including schema, quoted if needed + + The DDL's "context", if any, will be combined with the standard + substitutions noted above. Keys present in the context will override + the standard substitutions. + + """ + + __visit_name__ = "ddl" + + def __init__(self, statement, context=None): + """Create a DDL statement. + + :param statement: + A string or unicode string to be executed. Statements will be + processed with Python's string formatting operator using + a fixed set of string substitutions, as well as additional + substitutions provided by the optional :paramref:`.DDL.context` + parameter. + + A literal '%' in a statement must be escaped as '%%'. + + SQL bind parameters are not available in DDL statements. + + :param context: + Optional dictionary, defaults to None. These values will be + available for use in string substitutions on the DDL statement. + + .. seealso:: + + :class:`.DDLEvents` + + :ref:`event_toplevel` + + """ + + if not isinstance(statement, str): + raise exc.ArgumentError( + "Expected a string or unicode SQL statement, got '%r'" + % statement + ) + + self.statement = statement + self.context = context or {} + + def __repr__(self): + parts = [repr(self.statement)] + if self.context: + parts.append(f"context={self.context}") + + return "<%s@%s; %s>" % ( + type(self).__name__, + id(self), + ", ".join(parts), + ) + + +class _CreateDropBase(ExecutableDDLElement): + """Base class for DDL constructs that represent CREATE and DROP or + equivalents. + + The common theme of _CreateDropBase is a single + ``element`` attribute which refers to the element + to be created or dropped. + + """ + + def __init__( + self, + element, + ): + self.element = self.target = element + self._ddl_if = getattr(element, "_ddl_if", None) + + @property + def stringify_dialect(self): + return self.element.create_drop_stringify_dialect + + def _create_rule_disable(self, compiler): + """Allow disable of _create_rule using a callable. + + Pass to _create_rule using + util.portable_instancemethod(self._create_rule_disable) + to retain serializability. + + """ + return False + + +class _CreateBase(_CreateDropBase): + def __init__(self, element, if_not_exists=False): + super().__init__(element) + self.if_not_exists = if_not_exists + + +class _DropBase(_CreateDropBase): + def __init__(self, element, if_exists=False): + super().__init__(element) + self.if_exists = if_exists + + +class CreateSchema(_CreateBase): + """Represent a CREATE SCHEMA statement. + + The argument here is the string name of the schema. + + """ + + __visit_name__ = "create_schema" + + stringify_dialect = "default" + + def __init__( + self, + name, + if_not_exists=False, + ): + """Create a new :class:`.CreateSchema` construct.""" + + super().__init__(element=name, if_not_exists=if_not_exists) + + +class DropSchema(_DropBase): + """Represent a DROP SCHEMA statement. + + The argument here is the string name of the schema. + + """ + + __visit_name__ = "drop_schema" + + stringify_dialect = "default" + + def __init__( + self, + name, + cascade=False, + if_exists=False, + ): + """Create a new :class:`.DropSchema` construct.""" + + super().__init__(element=name, if_exists=if_exists) + self.cascade = cascade + + +class CreateTable(_CreateBase): + """Represent a CREATE TABLE statement.""" + + __visit_name__ = "create_table" + + def __init__( + self, + element: Table, + include_foreign_key_constraints: Optional[ + typing_Sequence[ForeignKeyConstraint] + ] = None, + if_not_exists: bool = False, + ): + """Create a :class:`.CreateTable` construct. + + :param element: a :class:`_schema.Table` that's the subject + of the CREATE + :param on: See the description for 'on' in :class:`.DDL`. + :param include_foreign_key_constraints: optional sequence of + :class:`_schema.ForeignKeyConstraint` objects that will be included + inline within the CREATE construct; if omitted, all foreign key + constraints that do not specify use_alter=True are included. + + :param if_not_exists: if True, an IF NOT EXISTS operator will be + applied to the construct. + + .. versionadded:: 1.4.0b2 + + """ + super().__init__(element, if_not_exists=if_not_exists) + self.columns = [CreateColumn(column) for column in element.columns] + self.include_foreign_key_constraints = include_foreign_key_constraints + + +class _DropView(_DropBase): + """Semi-public 'DROP VIEW' construct. + + Used by the test suite for dialect-agnostic drops of views. + This object will eventually be part of a public "view" API. + + """ + + __visit_name__ = "drop_view" + + +class CreateConstraint(BaseDDLElement): + def __init__(self, element: Constraint): + self.element = element + + +class CreateColumn(BaseDDLElement): + """Represent a :class:`_schema.Column` + as rendered in a CREATE TABLE statement, + via the :class:`.CreateTable` construct. + + This is provided to support custom column DDL within the generation + of CREATE TABLE statements, by using the + compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel` + to extend :class:`.CreateColumn`. + + Typical integration is to examine the incoming :class:`_schema.Column` + object, and to redirect compilation if a particular flag or condition + is found:: + + from sqlalchemy import schema + from sqlalchemy.ext.compiler import compiles + + @compiles(schema.CreateColumn) + def compile(element, compiler, **kw): + column = element.element + + if "special" not in column.info: + return compiler.visit_create_column(element, **kw) + + text = "%s SPECIAL DIRECTIVE %s" % ( + column.name, + compiler.type_compiler.process(column.type) + ) + default = compiler.get_column_default_string(column) + if default is not None: + text += " DEFAULT " + default + + if not column.nullable: + text += " NOT NULL" + + if column.constraints: + text += " ".join( + compiler.process(const) + for const in column.constraints) + return text + + The above construct can be applied to a :class:`_schema.Table` + as follows:: + + from sqlalchemy import Table, Metadata, Column, Integer, String + from sqlalchemy import schema + + metadata = MetaData() + + table = Table('mytable', MetaData(), + Column('x', Integer, info={"special":True}, primary_key=True), + Column('y', String(50)), + Column('z', String(20), info={"special":True}) + ) + + metadata.create_all(conn) + + Above, the directives we've added to the :attr:`_schema.Column.info` + collection + will be detected by our custom compilation scheme:: + + CREATE TABLE mytable ( + x SPECIAL DIRECTIVE INTEGER NOT NULL, + y VARCHAR(50), + z SPECIAL DIRECTIVE VARCHAR(20), + PRIMARY KEY (x) + ) + + The :class:`.CreateColumn` construct can also be used to skip certain + columns when producing a ``CREATE TABLE``. This is accomplished by + creating a compilation rule that conditionally returns ``None``. + This is essentially how to produce the same effect as using the + ``system=True`` argument on :class:`_schema.Column`, which marks a column + as an implicitly-present "system" column. + + For example, suppose we wish to produce a :class:`_schema.Table` + which skips + rendering of the PostgreSQL ``xmin`` column against the PostgreSQL + backend, but on other backends does render it, in anticipation of a + triggered rule. A conditional compilation rule could skip this name only + on PostgreSQL:: + + from sqlalchemy.schema import CreateColumn + + @compiles(CreateColumn, "postgresql") + def skip_xmin(element, compiler, **kw): + if element.element.name == 'xmin': + return None + else: + return compiler.visit_create_column(element, **kw) + + + my_table = Table('mytable', metadata, + Column('id', Integer, primary_key=True), + Column('xmin', Integer) + ) + + Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE`` + which only includes the ``id`` column in the string; the ``xmin`` column + will be omitted, but only against the PostgreSQL backend. + + """ + + __visit_name__ = "create_column" + + def __init__(self, element): + self.element = element + + +class DropTable(_DropBase): + """Represent a DROP TABLE statement.""" + + __visit_name__ = "drop_table" + + def __init__(self, element: Table, if_exists: bool = False): + """Create a :class:`.DropTable` construct. + + :param element: a :class:`_schema.Table` that's the subject + of the DROP. + :param on: See the description for 'on' in :class:`.DDL`. + :param if_exists: if True, an IF EXISTS operator will be applied to the + construct. + + .. versionadded:: 1.4.0b2 + + """ + super().__init__(element, if_exists=if_exists) + + +class CreateSequence(_CreateBase): + """Represent a CREATE SEQUENCE statement.""" + + __visit_name__ = "create_sequence" + + def __init__(self, element: Sequence, if_not_exists: bool = False): + super().__init__(element, if_not_exists=if_not_exists) + + +class DropSequence(_DropBase): + """Represent a DROP SEQUENCE statement.""" + + __visit_name__ = "drop_sequence" + + def __init__(self, element: Sequence, if_exists: bool = False): + super().__init__(element, if_exists=if_exists) + + +class CreateIndex(_CreateBase): + """Represent a CREATE INDEX statement.""" + + __visit_name__ = "create_index" + + def __init__(self, element, if_not_exists=False): + """Create a :class:`.Createindex` construct. + + :param element: a :class:`_schema.Index` that's the subject + of the CREATE. + :param if_not_exists: if True, an IF NOT EXISTS operator will be + applied to the construct. + + .. versionadded:: 1.4.0b2 + + """ + super().__init__(element, if_not_exists=if_not_exists) + + +class DropIndex(_DropBase): + """Represent a DROP INDEX statement.""" + + __visit_name__ = "drop_index" + + def __init__(self, element, if_exists=False): + """Create a :class:`.DropIndex` construct. + + :param element: a :class:`_schema.Index` that's the subject + of the DROP. + :param if_exists: if True, an IF EXISTS operator will be applied to the + construct. + + .. versionadded:: 1.4.0b2 + + """ + super().__init__(element, if_exists=if_exists) + + +class AddConstraint(_CreateBase): + """Represent an ALTER TABLE ADD CONSTRAINT statement.""" + + __visit_name__ = "add_constraint" + + def __init__(self, element): + super().__init__(element) + element._create_rule = util.portable_instancemethod( + self._create_rule_disable + ) + + +class DropConstraint(_DropBase): + """Represent an ALTER TABLE DROP CONSTRAINT statement.""" + + __visit_name__ = "drop_constraint" + + def __init__(self, element, cascade=False, if_exists=False, **kw): + self.cascade = cascade + super().__init__(element, if_exists=if_exists, **kw) + element._create_rule = util.portable_instancemethod( + self._create_rule_disable + ) + + +class SetTableComment(_CreateDropBase): + """Represent a COMMENT ON TABLE IS statement.""" + + __visit_name__ = "set_table_comment" + + +class DropTableComment(_CreateDropBase): + """Represent a COMMENT ON TABLE '' statement. + + Note this varies a lot across database backends. + + """ + + __visit_name__ = "drop_table_comment" + + +class SetColumnComment(_CreateDropBase): + """Represent a COMMENT ON COLUMN IS statement.""" + + __visit_name__ = "set_column_comment" + + +class DropColumnComment(_CreateDropBase): + """Represent a COMMENT ON COLUMN IS NULL statement.""" + + __visit_name__ = "drop_column_comment" + + +class SetConstraintComment(_CreateDropBase): + """Represent a COMMENT ON CONSTRAINT IS statement.""" + + __visit_name__ = "set_constraint_comment" + + +class DropConstraintComment(_CreateDropBase): + """Represent a COMMENT ON CONSTRAINT IS NULL statement.""" + + __visit_name__ = "drop_constraint_comment" + + +class InvokeDDLBase(SchemaVisitor): + def __init__(self, connection): + self.connection = connection + + @contextlib.contextmanager + def with_ddl_events(self, target, **kw): + """helper context manager that will apply appropriate DDL events + to a CREATE or DROP operation.""" + + raise NotImplementedError() + + +class InvokeCreateDDLBase(InvokeDDLBase): + @contextlib.contextmanager + def with_ddl_events(self, target, **kw): + """helper context manager that will apply appropriate DDL events + to a CREATE or DROP operation.""" + + target.dispatch.before_create( + target, self.connection, _ddl_runner=self, **kw + ) + yield + target.dispatch.after_create( + target, self.connection, _ddl_runner=self, **kw + ) + + +class InvokeDropDDLBase(InvokeDDLBase): + @contextlib.contextmanager + def with_ddl_events(self, target, **kw): + """helper context manager that will apply appropriate DDL events + to a CREATE or DROP operation.""" + + target.dispatch.before_drop( + target, self.connection, _ddl_runner=self, **kw + ) + yield + target.dispatch.after_drop( + target, self.connection, _ddl_runner=self, **kw + ) + + +class SchemaGenerator(InvokeCreateDDLBase): + def __init__( + self, dialect, connection, checkfirst=False, tables=None, **kwargs + ): + super().__init__(connection, **kwargs) + self.checkfirst = checkfirst + self.tables = tables + self.preparer = dialect.identifier_preparer + self.dialect = dialect + self.memo = {} + + def _can_create_table(self, table): + self.dialect.validate_identifier(table.name) + effective_schema = self.connection.schema_for_object(table) + if effective_schema: + self.dialect.validate_identifier(effective_schema) + return not self.checkfirst or not self.dialect.has_table( + self.connection, table.name, schema=effective_schema + ) + + def _can_create_index(self, index): + effective_schema = self.connection.schema_for_object(index.table) + if effective_schema: + self.dialect.validate_identifier(effective_schema) + return not self.checkfirst or not self.dialect.has_index( + self.connection, + index.table.name, + index.name, + schema=effective_schema, + ) + + def _can_create_sequence(self, sequence): + effective_schema = self.connection.schema_for_object(sequence) + + return self.dialect.supports_sequences and ( + (not self.dialect.sequences_optional or not sequence.optional) + and ( + not self.checkfirst + or not self.dialect.has_sequence( + self.connection, sequence.name, schema=effective_schema + ) + ) + ) + + def visit_metadata(self, metadata): + if self.tables is not None: + tables = self.tables + else: + tables = list(metadata.tables.values()) + + collection = sort_tables_and_constraints( + [t for t in tables if self._can_create_table(t)] + ) + + seq_coll = [ + s + for s in metadata._sequences.values() + if s.column is None and self._can_create_sequence(s) + ] + + event_collection = [t for (t, fks) in collection if t is not None] + + with self.with_ddl_events( + metadata, + tables=event_collection, + checkfirst=self.checkfirst, + ): + for seq in seq_coll: + self.traverse_single(seq, create_ok=True) + + for table, fkcs in collection: + if table is not None: + self.traverse_single( + table, + create_ok=True, + include_foreign_key_constraints=fkcs, + _is_metadata_operation=True, + ) + else: + for fkc in fkcs: + self.traverse_single(fkc) + + def visit_table( + self, + table, + create_ok=False, + include_foreign_key_constraints=None, + _is_metadata_operation=False, + ): + if not create_ok and not self._can_create_table(table): + return + + with self.with_ddl_events( + table, + checkfirst=self.checkfirst, + _is_metadata_operation=_is_metadata_operation, + ): + for column in table.columns: + if column.default is not None: + self.traverse_single(column.default) + + if not self.dialect.supports_alter: + # e.g., don't omit any foreign key constraints + include_foreign_key_constraints = None + + CreateTable( + table, + include_foreign_key_constraints=( + include_foreign_key_constraints + ), + )._invoke_with(self.connection) + + if hasattr(table, "indexes"): + for index in table.indexes: + self.traverse_single(index, create_ok=True) + + if ( + self.dialect.supports_comments + and not self.dialect.inline_comments + ): + if table.comment is not None: + SetTableComment(table)._invoke_with(self.connection) + + for column in table.columns: + if column.comment is not None: + SetColumnComment(column)._invoke_with(self.connection) + + if self.dialect.supports_constraint_comments: + for constraint in table.constraints: + if constraint.comment is not None: + self.connection.execute( + SetConstraintComment(constraint) + ) + + def visit_foreign_key_constraint(self, constraint): + if not self.dialect.supports_alter: + return + + with self.with_ddl_events(constraint): + AddConstraint(constraint)._invoke_with(self.connection) + + def visit_sequence(self, sequence, create_ok=False): + if not create_ok and not self._can_create_sequence(sequence): + return + with self.with_ddl_events(sequence): + CreateSequence(sequence)._invoke_with(self.connection) + + def visit_index(self, index, create_ok=False): + if not create_ok and not self._can_create_index(index): + return + with self.with_ddl_events(index): + CreateIndex(index)._invoke_with(self.connection) + + +class SchemaDropper(InvokeDropDDLBase): + def __init__( + self, dialect, connection, checkfirst=False, tables=None, **kwargs + ): + super().__init__(connection, **kwargs) + self.checkfirst = checkfirst + self.tables = tables + self.preparer = dialect.identifier_preparer + self.dialect = dialect + self.memo = {} + + def visit_metadata(self, metadata): + if self.tables is not None: + tables = self.tables + else: + tables = list(metadata.tables.values()) + + try: + unsorted_tables = [t for t in tables if self._can_drop_table(t)] + collection = list( + reversed( + sort_tables_and_constraints( + unsorted_tables, + filter_fn=lambda constraint: ( + False + if not self.dialect.supports_alter + or constraint.name is None + else None + ), + ) + ) + ) + except exc.CircularDependencyError as err2: + if not self.dialect.supports_alter: + util.warn( + "Can't sort tables for DROP; an " + "unresolvable foreign key " + "dependency exists between tables: %s; and backend does " + "not support ALTER. To restore at least a partial sort, " + "apply use_alter=True to ForeignKey and " + "ForeignKeyConstraint " + "objects involved in the cycle to mark these as known " + "cycles that will be ignored." + % (", ".join(sorted([t.fullname for t in err2.cycles]))) + ) + collection = [(t, ()) for t in unsorted_tables] + else: + raise exc.CircularDependencyError( + err2.args[0], + err2.cycles, + err2.edges, + msg="Can't sort tables for DROP; an " + "unresolvable foreign key " + "dependency exists between tables: %s. Please ensure " + "that the ForeignKey and ForeignKeyConstraint objects " + "involved in the cycle have " + "names so that they can be dropped using " + "DROP CONSTRAINT." + % (", ".join(sorted([t.fullname for t in err2.cycles]))), + ) from err2 + + seq_coll = [ + s + for s in metadata._sequences.values() + if self._can_drop_sequence(s) + ] + + event_collection = [t for (t, fks) in collection if t is not None] + + with self.with_ddl_events( + metadata, + tables=event_collection, + checkfirst=self.checkfirst, + ): + for table, fkcs in collection: + if table is not None: + self.traverse_single( + table, + drop_ok=True, + _is_metadata_operation=True, + _ignore_sequences=seq_coll, + ) + else: + for fkc in fkcs: + self.traverse_single(fkc) + + for seq in seq_coll: + self.traverse_single(seq, drop_ok=seq.column is None) + + def _can_drop_table(self, table): + self.dialect.validate_identifier(table.name) + effective_schema = self.connection.schema_for_object(table) + if effective_schema: + self.dialect.validate_identifier(effective_schema) + return not self.checkfirst or self.dialect.has_table( + self.connection, table.name, schema=effective_schema + ) + + def _can_drop_index(self, index): + effective_schema = self.connection.schema_for_object(index.table) + if effective_schema: + self.dialect.validate_identifier(effective_schema) + return not self.checkfirst or self.dialect.has_index( + self.connection, + index.table.name, + index.name, + schema=effective_schema, + ) + + def _can_drop_sequence(self, sequence): + effective_schema = self.connection.schema_for_object(sequence) + return self.dialect.supports_sequences and ( + (not self.dialect.sequences_optional or not sequence.optional) + and ( + not self.checkfirst + or self.dialect.has_sequence( + self.connection, sequence.name, schema=effective_schema + ) + ) + ) + + def visit_index(self, index, drop_ok=False): + if not drop_ok and not self._can_drop_index(index): + return + + with self.with_ddl_events(index): + DropIndex(index)(index, self.connection) + + def visit_table( + self, + table, + drop_ok=False, + _is_metadata_operation=False, + _ignore_sequences=(), + ): + if not drop_ok and not self._can_drop_table(table): + return + + with self.with_ddl_events( + table, + checkfirst=self.checkfirst, + _is_metadata_operation=_is_metadata_operation, + ): + DropTable(table)._invoke_with(self.connection) + + # traverse client side defaults which may refer to server-side + # sequences. noting that some of these client side defaults may + # also be set up as server side defaults + # (see https://docs.sqlalchemy.org/en/ + # latest/core/defaults.html + # #associating-a-sequence-as-the-server-side- + # default), so have to be dropped after the table is dropped. + for column in table.columns: + if ( + column.default is not None + and column.default not in _ignore_sequences + ): + self.traverse_single(column.default) + + def visit_foreign_key_constraint(self, constraint): + if not self.dialect.supports_alter: + return + with self.with_ddl_events(constraint): + DropConstraint(constraint)._invoke_with(self.connection) + + def visit_sequence(self, sequence, drop_ok=False): + if not drop_ok and not self._can_drop_sequence(sequence): + return + with self.with_ddl_events(sequence): + DropSequence(sequence)._invoke_with(self.connection) + + +def sort_tables( + tables: Iterable[TableClause], + skip_fn: Optional[Callable[[ForeignKeyConstraint], bool]] = None, + extra_dependencies: Optional[ + typing_Sequence[Tuple[TableClause, TableClause]] + ] = None, +) -> List[Table]: + """Sort a collection of :class:`_schema.Table` objects based on + dependency. + + This is a dependency-ordered sort which will emit :class:`_schema.Table` + objects such that they will follow their dependent :class:`_schema.Table` + objects. + Tables are dependent on another based on the presence of + :class:`_schema.ForeignKeyConstraint` + objects as well as explicit dependencies + added by :meth:`_schema.Table.add_is_dependent_on`. + + .. warning:: + + The :func:`._schema.sort_tables` function cannot by itself + accommodate automatic resolution of dependency cycles between + tables, which are usually caused by mutually dependent foreign key + constraints. When these cycles are detected, the foreign keys + of these tables are omitted from consideration in the sort. + A warning is emitted when this condition occurs, which will be an + exception raise in a future release. Tables which are not part + of the cycle will still be returned in dependency order. + + To resolve these cycles, the + :paramref:`_schema.ForeignKeyConstraint.use_alter` parameter may be + applied to those constraints which create a cycle. Alternatively, + the :func:`_schema.sort_tables_and_constraints` function will + automatically return foreign key constraints in a separate + collection when cycles are detected so that they may be applied + to a schema separately. + + .. versionchanged:: 1.3.17 - a warning is emitted when + :func:`_schema.sort_tables` cannot perform a proper sort due to + cyclical dependencies. This will be an exception in a future + release. Additionally, the sort will continue to return + other tables not involved in the cycle in dependency order + which was not the case previously. + + :param tables: a sequence of :class:`_schema.Table` objects. + + :param skip_fn: optional callable which will be passed a + :class:`_schema.ForeignKeyConstraint` object; if it returns True, this + constraint will not be considered as a dependency. Note this is + **different** from the same parameter in + :func:`.sort_tables_and_constraints`, which is + instead passed the owning :class:`_schema.ForeignKeyConstraint` object. + + :param extra_dependencies: a sequence of 2-tuples of tables which will + also be considered as dependent on each other. + + .. seealso:: + + :func:`.sort_tables_and_constraints` + + :attr:`_schema.MetaData.sorted_tables` - uses this function to sort + + + """ + + if skip_fn is not None: + fixed_skip_fn = skip_fn + + def _skip_fn(fkc): + for fk in fkc.elements: + if fixed_skip_fn(fk): + return True + else: + return None + + else: + _skip_fn = None # type: ignore + + return [ + t + for (t, fkcs) in sort_tables_and_constraints( + tables, + filter_fn=_skip_fn, + extra_dependencies=extra_dependencies, + _warn_for_cycles=True, + ) + if t is not None + ] + + +def sort_tables_and_constraints( + tables, filter_fn=None, extra_dependencies=None, _warn_for_cycles=False +): + """Sort a collection of :class:`_schema.Table` / + :class:`_schema.ForeignKeyConstraint` + objects. + + This is a dependency-ordered sort which will emit tuples of + ``(Table, [ForeignKeyConstraint, ...])`` such that each + :class:`_schema.Table` follows its dependent :class:`_schema.Table` + objects. + Remaining :class:`_schema.ForeignKeyConstraint` + objects that are separate due to + dependency rules not satisfied by the sort are emitted afterwards + as ``(None, [ForeignKeyConstraint ...])``. + + Tables are dependent on another based on the presence of + :class:`_schema.ForeignKeyConstraint` objects, explicit dependencies + added by :meth:`_schema.Table.add_is_dependent_on`, + as well as dependencies + stated here using the :paramref:`~.sort_tables_and_constraints.skip_fn` + and/or :paramref:`~.sort_tables_and_constraints.extra_dependencies` + parameters. + + :param tables: a sequence of :class:`_schema.Table` objects. + + :param filter_fn: optional callable which will be passed a + :class:`_schema.ForeignKeyConstraint` object, + and returns a value based on + whether this constraint should definitely be included or excluded as + an inline constraint, or neither. If it returns False, the constraint + will definitely be included as a dependency that cannot be subject + to ALTER; if True, it will **only** be included as an ALTER result at + the end. Returning None means the constraint is included in the + table-based result unless it is detected as part of a dependency cycle. + + :param extra_dependencies: a sequence of 2-tuples of tables which will + also be considered as dependent on each other. + + .. seealso:: + + :func:`.sort_tables` + + + """ + + fixed_dependencies = set() + mutable_dependencies = set() + + if extra_dependencies is not None: + fixed_dependencies.update(extra_dependencies) + + remaining_fkcs = set() + for table in tables: + for fkc in table.foreign_key_constraints: + if fkc.use_alter is True: + remaining_fkcs.add(fkc) + continue + + if filter_fn: + filtered = filter_fn(fkc) + + if filtered is True: + remaining_fkcs.add(fkc) + continue + + dependent_on = fkc.referred_table + if dependent_on is not table: + mutable_dependencies.add((dependent_on, table)) + + fixed_dependencies.update( + (parent, table) for parent in table._extra_dependencies + ) + + try: + candidate_sort = list( + topological.sort( + fixed_dependencies.union(mutable_dependencies), + tables, + ) + ) + except exc.CircularDependencyError as err: + if _warn_for_cycles: + util.warn( + "Cannot correctly sort tables; there are unresolvable cycles " + 'between tables "%s", which is usually caused by mutually ' + "dependent foreign key constraints. Foreign key constraints " + "involving these tables will not be considered; this warning " + "may raise an error in a future release." + % (", ".join(sorted(t.fullname for t in err.cycles)),) + ) + for edge in err.edges: + if edge in mutable_dependencies: + table = edge[1] + if table not in err.cycles: + continue + can_remove = [ + fkc + for fkc in table.foreign_key_constraints + if filter_fn is None or filter_fn(fkc) is not False + ] + remaining_fkcs.update(can_remove) + for fkc in can_remove: + dependent_on = fkc.referred_table + if dependent_on is not table: + mutable_dependencies.discard((dependent_on, table)) + candidate_sort = list( + topological.sort( + fixed_dependencies.union(mutable_dependencies), + tables, + ) + ) + + return [ + (table, table.foreign_key_constraints.difference(remaining_fkcs)) + for table in candidate_sort + ] + [(None, list(remaining_fkcs))] diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/default_comparator.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/default_comparator.py new file mode 100644 index 0000000..76131bc --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/default_comparator.py @@ -0,0 +1,552 @@ +# sql/default_comparator.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php + +"""Default implementation of SQL comparison operations. +""" + +from __future__ import annotations + +import typing +from typing import Any +from typing import Callable +from typing import Dict +from typing import NoReturn +from typing import Optional +from typing import Tuple +from typing import Type +from typing import Union + +from . import coercions +from . import operators +from . import roles +from . import type_api +from .elements import and_ +from .elements import BinaryExpression +from .elements import ClauseElement +from .elements import CollationClause +from .elements import CollectionAggregate +from .elements import ExpressionClauseList +from .elements import False_ +from .elements import Null +from .elements import OperatorExpression +from .elements import or_ +from .elements import True_ +from .elements import UnaryExpression +from .operators import OperatorType +from .. import exc +from .. import util + +_T = typing.TypeVar("_T", bound=Any) + +if typing.TYPE_CHECKING: + from .elements import ColumnElement + from .operators import custom_op + from .type_api import TypeEngine + + +def _boolean_compare( + expr: ColumnElement[Any], + op: OperatorType, + obj: Any, + *, + negate_op: Optional[OperatorType] = None, + reverse: bool = False, + _python_is_types: Tuple[Type[Any], ...] = (type(None), bool), + result_type: Optional[TypeEngine[bool]] = None, + **kwargs: Any, +) -> OperatorExpression[bool]: + if result_type is None: + result_type = type_api.BOOLEANTYPE + + if isinstance(obj, _python_is_types + (Null, True_, False_)): + # allow x ==/!= True/False to be treated as a literal. + # this comes out to "== / != true/false" or "1/0" if those + # constants aren't supported and works on all platforms + if op in (operators.eq, operators.ne) and isinstance( + obj, (bool, True_, False_) + ): + return OperatorExpression._construct_for_op( + expr, + coercions.expect(roles.ConstExprRole, obj), + op, + type_=result_type, + negate=negate_op, + modifiers=kwargs, + ) + elif op in ( + operators.is_distinct_from, + operators.is_not_distinct_from, + ): + return OperatorExpression._construct_for_op( + expr, + coercions.expect(roles.ConstExprRole, obj), + op, + type_=result_type, + negate=negate_op, + modifiers=kwargs, + ) + elif expr._is_collection_aggregate: + obj = coercions.expect( + roles.ConstExprRole, element=obj, operator=op, expr=expr + ) + else: + # all other None uses IS, IS NOT + if op in (operators.eq, operators.is_): + return OperatorExpression._construct_for_op( + expr, + coercions.expect(roles.ConstExprRole, obj), + operators.is_, + negate=operators.is_not, + type_=result_type, + ) + elif op in (operators.ne, operators.is_not): + return OperatorExpression._construct_for_op( + expr, + coercions.expect(roles.ConstExprRole, obj), + operators.is_not, + negate=operators.is_, + type_=result_type, + ) + else: + raise exc.ArgumentError( + "Only '=', '!=', 'is_()', 'is_not()', " + "'is_distinct_from()', 'is_not_distinct_from()' " + "operators can be used with None/True/False" + ) + else: + obj = coercions.expect( + roles.BinaryElementRole, element=obj, operator=op, expr=expr + ) + + if reverse: + return OperatorExpression._construct_for_op( + obj, + expr, + op, + type_=result_type, + negate=negate_op, + modifiers=kwargs, + ) + else: + return OperatorExpression._construct_for_op( + expr, + obj, + op, + type_=result_type, + negate=negate_op, + modifiers=kwargs, + ) + + +def _custom_op_operate( + expr: ColumnElement[Any], + op: custom_op[Any], + obj: Any, + reverse: bool = False, + result_type: Optional[TypeEngine[Any]] = None, + **kw: Any, +) -> ColumnElement[Any]: + if result_type is None: + if op.return_type: + result_type = op.return_type + elif op.is_comparison: + result_type = type_api.BOOLEANTYPE + + return _binary_operate( + expr, op, obj, reverse=reverse, result_type=result_type, **kw + ) + + +def _binary_operate( + expr: ColumnElement[Any], + op: OperatorType, + obj: roles.BinaryElementRole[Any], + *, + reverse: bool = False, + result_type: Optional[TypeEngine[_T]] = None, + **kw: Any, +) -> OperatorExpression[_T]: + coerced_obj = coercions.expect( + roles.BinaryElementRole, obj, expr=expr, operator=op + ) + + if reverse: + left, right = coerced_obj, expr + else: + left, right = expr, coerced_obj + + if result_type is None: + op, result_type = left.comparator._adapt_expression( + op, right.comparator + ) + + return OperatorExpression._construct_for_op( + left, right, op, type_=result_type, modifiers=kw + ) + + +def _conjunction_operate( + expr: ColumnElement[Any], op: OperatorType, other: Any, **kw: Any +) -> ColumnElement[Any]: + if op is operators.and_: + return and_(expr, other) + elif op is operators.or_: + return or_(expr, other) + else: + raise NotImplementedError() + + +def _scalar( + expr: ColumnElement[Any], + op: OperatorType, + fn: Callable[[ColumnElement[Any]], ColumnElement[Any]], + **kw: Any, +) -> ColumnElement[Any]: + return fn(expr) + + +def _in_impl( + expr: ColumnElement[Any], + op: OperatorType, + seq_or_selectable: ClauseElement, + negate_op: OperatorType, + **kw: Any, +) -> ColumnElement[Any]: + seq_or_selectable = coercions.expect( + roles.InElementRole, seq_or_selectable, expr=expr, operator=op + ) + if "in_ops" in seq_or_selectable._annotations: + op, negate_op = seq_or_selectable._annotations["in_ops"] + + return _boolean_compare( + expr, op, seq_or_selectable, negate_op=negate_op, **kw + ) + + +def _getitem_impl( + expr: ColumnElement[Any], op: OperatorType, other: Any, **kw: Any +) -> ColumnElement[Any]: + if ( + isinstance(expr.type, type_api.INDEXABLE) + or isinstance(expr.type, type_api.TypeDecorator) + and isinstance(expr.type.impl_instance, type_api.INDEXABLE) + ): + other = coercions.expect( + roles.BinaryElementRole, other, expr=expr, operator=op + ) + return _binary_operate(expr, op, other, **kw) + else: + _unsupported_impl(expr, op, other, **kw) + + +def _unsupported_impl( + expr: ColumnElement[Any], op: OperatorType, *arg: Any, **kw: Any +) -> NoReturn: + raise NotImplementedError( + "Operator '%s' is not supported on this expression" % op.__name__ + ) + + +def _inv_impl( + expr: ColumnElement[Any], op: OperatorType, **kw: Any +) -> ColumnElement[Any]: + """See :meth:`.ColumnOperators.__inv__`.""" + + # undocumented element currently used by the ORM for + # relationship.contains() + if hasattr(expr, "negation_clause"): + return expr.negation_clause + else: + return expr._negate() + + +def _neg_impl( + expr: ColumnElement[Any], op: OperatorType, **kw: Any +) -> ColumnElement[Any]: + """See :meth:`.ColumnOperators.__neg__`.""" + return UnaryExpression(expr, operator=operators.neg, type_=expr.type) + + +def _bitwise_not_impl( + expr: ColumnElement[Any], op: OperatorType, **kw: Any +) -> ColumnElement[Any]: + """See :meth:`.ColumnOperators.bitwise_not`.""" + + return UnaryExpression( + expr, operator=operators.bitwise_not_op, type_=expr.type + ) + + +def _match_impl( + expr: ColumnElement[Any], op: OperatorType, other: Any, **kw: Any +) -> ColumnElement[Any]: + """See :meth:`.ColumnOperators.match`.""" + + return _boolean_compare( + expr, + operators.match_op, + coercions.expect( + roles.BinaryElementRole, + other, + expr=expr, + operator=operators.match_op, + ), + result_type=type_api.MATCHTYPE, + negate_op=( + operators.not_match_op + if op is operators.match_op + else operators.match_op + ), + **kw, + ) + + +def _distinct_impl( + expr: ColumnElement[Any], op: OperatorType, **kw: Any +) -> ColumnElement[Any]: + """See :meth:`.ColumnOperators.distinct`.""" + return UnaryExpression( + expr, operator=operators.distinct_op, type_=expr.type + ) + + +def _between_impl( + expr: ColumnElement[Any], + op: OperatorType, + cleft: Any, + cright: Any, + **kw: Any, +) -> ColumnElement[Any]: + """See :meth:`.ColumnOperators.between`.""" + return BinaryExpression( + expr, + ExpressionClauseList._construct_for_list( + operators.and_, + type_api.NULLTYPE, + coercions.expect( + roles.BinaryElementRole, + cleft, + expr=expr, + operator=operators.and_, + ), + coercions.expect( + roles.BinaryElementRole, + cright, + expr=expr, + operator=operators.and_, + ), + group=False, + ), + op, + negate=( + operators.not_between_op + if op is operators.between_op + else operators.between_op + ), + modifiers=kw, + ) + + +def _collate_impl( + expr: ColumnElement[str], op: OperatorType, collation: str, **kw: Any +) -> ColumnElement[str]: + return CollationClause._create_collation_expression(expr, collation) + + +def _regexp_match_impl( + expr: ColumnElement[str], + op: OperatorType, + pattern: Any, + flags: Optional[str], + **kw: Any, +) -> ColumnElement[Any]: + return BinaryExpression( + expr, + coercions.expect( + roles.BinaryElementRole, + pattern, + expr=expr, + operator=operators.comma_op, + ), + op, + negate=operators.not_regexp_match_op, + modifiers={"flags": flags}, + ) + + +def _regexp_replace_impl( + expr: ColumnElement[Any], + op: OperatorType, + pattern: Any, + replacement: Any, + flags: Optional[str], + **kw: Any, +) -> ColumnElement[Any]: + return BinaryExpression( + expr, + ExpressionClauseList._construct_for_list( + operators.comma_op, + type_api.NULLTYPE, + coercions.expect( + roles.BinaryElementRole, + pattern, + expr=expr, + operator=operators.comma_op, + ), + coercions.expect( + roles.BinaryElementRole, + replacement, + expr=expr, + operator=operators.comma_op, + ), + group=False, + ), + op, + modifiers={"flags": flags}, + ) + + +# a mapping of operators with the method they use, along with +# additional keyword arguments to be passed +operator_lookup: Dict[ + str, + Tuple[ + Callable[..., ColumnElement[Any]], + util.immutabledict[ + str, Union[OperatorType, Callable[..., ColumnElement[Any]]] + ], + ], +] = { + "and_": (_conjunction_operate, util.EMPTY_DICT), + "or_": (_conjunction_operate, util.EMPTY_DICT), + "inv": (_inv_impl, util.EMPTY_DICT), + "add": (_binary_operate, util.EMPTY_DICT), + "mul": (_binary_operate, util.EMPTY_DICT), + "sub": (_binary_operate, util.EMPTY_DICT), + "div": (_binary_operate, util.EMPTY_DICT), + "mod": (_binary_operate, util.EMPTY_DICT), + "bitwise_xor_op": (_binary_operate, util.EMPTY_DICT), + "bitwise_or_op": (_binary_operate, util.EMPTY_DICT), + "bitwise_and_op": (_binary_operate, util.EMPTY_DICT), + "bitwise_not_op": (_bitwise_not_impl, util.EMPTY_DICT), + "bitwise_lshift_op": (_binary_operate, util.EMPTY_DICT), + "bitwise_rshift_op": (_binary_operate, util.EMPTY_DICT), + "truediv": (_binary_operate, util.EMPTY_DICT), + "floordiv": (_binary_operate, util.EMPTY_DICT), + "custom_op": (_custom_op_operate, util.EMPTY_DICT), + "json_path_getitem_op": (_binary_operate, util.EMPTY_DICT), + "json_getitem_op": (_binary_operate, util.EMPTY_DICT), + "concat_op": (_binary_operate, util.EMPTY_DICT), + "any_op": ( + _scalar, + util.immutabledict({"fn": CollectionAggregate._create_any}), + ), + "all_op": ( + _scalar, + util.immutabledict({"fn": CollectionAggregate._create_all}), + ), + "lt": (_boolean_compare, util.immutabledict({"negate_op": operators.ge})), + "le": (_boolean_compare, util.immutabledict({"negate_op": operators.gt})), + "ne": (_boolean_compare, util.immutabledict({"negate_op": operators.eq})), + "gt": (_boolean_compare, util.immutabledict({"negate_op": operators.le})), + "ge": (_boolean_compare, util.immutabledict({"negate_op": operators.lt})), + "eq": (_boolean_compare, util.immutabledict({"negate_op": operators.ne})), + "is_distinct_from": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.is_not_distinct_from}), + ), + "is_not_distinct_from": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.is_distinct_from}), + ), + "like_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_like_op}), + ), + "ilike_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_ilike_op}), + ), + "not_like_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.like_op}), + ), + "not_ilike_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.ilike_op}), + ), + "contains_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_contains_op}), + ), + "icontains_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_icontains_op}), + ), + "startswith_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_startswith_op}), + ), + "istartswith_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_istartswith_op}), + ), + "endswith_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_endswith_op}), + ), + "iendswith_op": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.not_iendswith_op}), + ), + "desc_op": ( + _scalar, + util.immutabledict({"fn": UnaryExpression._create_desc}), + ), + "asc_op": ( + _scalar, + util.immutabledict({"fn": UnaryExpression._create_asc}), + ), + "nulls_first_op": ( + _scalar, + util.immutabledict({"fn": UnaryExpression._create_nulls_first}), + ), + "nulls_last_op": ( + _scalar, + util.immutabledict({"fn": UnaryExpression._create_nulls_last}), + ), + "in_op": ( + _in_impl, + util.immutabledict({"negate_op": operators.not_in_op}), + ), + "not_in_op": ( + _in_impl, + util.immutabledict({"negate_op": operators.in_op}), + ), + "is_": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.is_}), + ), + "is_not": ( + _boolean_compare, + util.immutabledict({"negate_op": operators.is_not}), + ), + "collate": (_collate_impl, util.EMPTY_DICT), + "match_op": (_match_impl, util.EMPTY_DICT), + "not_match_op": (_match_impl, util.EMPTY_DICT), + "distinct_op": (_distinct_impl, util.EMPTY_DICT), + "between_op": (_between_impl, util.EMPTY_DICT), + "not_between_op": (_between_impl, util.EMPTY_DICT), + "neg": (_neg_impl, util.EMPTY_DICT), + "getitem": (_getitem_impl, util.EMPTY_DICT), + "lshift": (_unsupported_impl, util.EMPTY_DICT), + "rshift": (_unsupported_impl, util.EMPTY_DICT), + "contains": (_unsupported_impl, util.EMPTY_DICT), + "regexp_match_op": (_regexp_match_impl, util.EMPTY_DICT), + "not_regexp_match_op": (_regexp_match_impl, util.EMPTY_DICT), + "regexp_replace_op": (_regexp_replace_impl, util.EMPTY_DICT), +} diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/dml.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/dml.py new file mode 100644 index 0000000..779be1d --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/dml.py @@ -0,0 +1,1817 @@ +# sql/dml.py +# Copyright (C) 2009-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +""" +Provide :class:`_expression.Insert`, :class:`_expression.Update` and +:class:`_expression.Delete`. + +""" +from __future__ import annotations + +import collections.abc as collections_abc +import operator +from typing import Any +from typing import cast +from typing import Dict +from typing import Iterable +from typing import List +from typing import MutableMapping +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import coercions +from . import roles +from . import util as sql_util +from ._typing import _TP +from ._typing import _unexpected_kw +from ._typing import is_column_element +from ._typing import is_named_from_clause +from .base import _entity_namespace_key +from .base import _exclusive_against +from .base import _from_objects +from .base import _generative +from .base import _select_iterables +from .base import ColumnCollection +from .base import CompileState +from .base import DialectKWArgs +from .base import Executable +from .base import Generative +from .base import HasCompileState +from .elements import BooleanClauseList +from .elements import ClauseElement +from .elements import ColumnClause +from .elements import ColumnElement +from .elements import Null +from .selectable import Alias +from .selectable import ExecutableReturnsRows +from .selectable import FromClause +from .selectable import HasCTE +from .selectable import HasPrefixes +from .selectable import Join +from .selectable import SelectLabelStyle +from .selectable import TableClause +from .selectable import TypedReturnsRows +from .sqltypes import NullType +from .visitors import InternalTraversal +from .. import exc +from .. import util +from ..util.typing import Self +from ..util.typing import TypeGuard + +if TYPE_CHECKING: + from ._typing import _ColumnExpressionArgument + from ._typing import _ColumnsClauseArgument + from ._typing import _DMLColumnArgument + from ._typing import _DMLColumnKeyMapping + from ._typing import _DMLTableArgument + from ._typing import _T0 # noqa + from ._typing import _T1 # noqa + from ._typing import _T2 # noqa + from ._typing import _T3 # noqa + from ._typing import _T4 # noqa + from ._typing import _T5 # noqa + from ._typing import _T6 # noqa + from ._typing import _T7 # noqa + from ._typing import _TypedColumnClauseArgument as _TCCA # noqa + from .base import ReadOnlyColumnCollection + from .compiler import SQLCompiler + from .elements import KeyedColumnElement + from .selectable import _ColumnsClauseElement + from .selectable import _SelectIterable + from .selectable import Select + from .selectable import Selectable + + def isupdate(dml: DMLState) -> TypeGuard[UpdateDMLState]: ... + + def isdelete(dml: DMLState) -> TypeGuard[DeleteDMLState]: ... + + def isinsert(dml: DMLState) -> TypeGuard[InsertDMLState]: ... + +else: + isupdate = operator.attrgetter("isupdate") + isdelete = operator.attrgetter("isdelete") + isinsert = operator.attrgetter("isinsert") + + +_T = TypeVar("_T", bound=Any) + +_DMLColumnElement = Union[str, ColumnClause[Any]] +_DMLTableElement = Union[TableClause, Alias, Join] + + +class DMLState(CompileState): + _no_parameters = True + _dict_parameters: Optional[MutableMapping[_DMLColumnElement, Any]] = None + _multi_parameters: Optional[ + List[MutableMapping[_DMLColumnElement, Any]] + ] = None + _ordered_values: Optional[List[Tuple[_DMLColumnElement, Any]]] = None + _parameter_ordering: Optional[List[_DMLColumnElement]] = None + _primary_table: FromClause + _supports_implicit_returning = True + + isupdate = False + isdelete = False + isinsert = False + + statement: UpdateBase + + def __init__( + self, statement: UpdateBase, compiler: SQLCompiler, **kw: Any + ): + raise NotImplementedError() + + @classmethod + def get_entity_description(cls, statement: UpdateBase) -> Dict[str, Any]: + return { + "name": ( + statement.table.name + if is_named_from_clause(statement.table) + else None + ), + "table": statement.table, + } + + @classmethod + def get_returning_column_descriptions( + cls, statement: UpdateBase + ) -> List[Dict[str, Any]]: + return [ + { + "name": c.key, + "type": c.type, + "expr": c, + } + for c in statement._all_selected_columns + ] + + @property + def dml_table(self) -> _DMLTableElement: + return self.statement.table + + if TYPE_CHECKING: + + @classmethod + def get_plugin_class(cls, statement: Executable) -> Type[DMLState]: ... + + @classmethod + def _get_multi_crud_kv_pairs( + cls, + statement: UpdateBase, + multi_kv_iterator: Iterable[Dict[_DMLColumnArgument, Any]], + ) -> List[Dict[_DMLColumnElement, Any]]: + return [ + { + coercions.expect(roles.DMLColumnRole, k): v + for k, v in mapping.items() + } + for mapping in multi_kv_iterator + ] + + @classmethod + def _get_crud_kv_pairs( + cls, + statement: UpdateBase, + kv_iterator: Iterable[Tuple[_DMLColumnArgument, Any]], + needs_to_be_cacheable: bool, + ) -> List[Tuple[_DMLColumnElement, Any]]: + return [ + ( + coercions.expect(roles.DMLColumnRole, k), + ( + v + if not needs_to_be_cacheable + else coercions.expect( + roles.ExpressionElementRole, + v, + type_=NullType(), + is_crud=True, + ) + ), + ) + for k, v in kv_iterator + ] + + def _make_extra_froms( + self, statement: DMLWhereBase + ) -> Tuple[FromClause, List[FromClause]]: + froms: List[FromClause] = [] + + all_tables = list(sql_util.tables_from_leftmost(statement.table)) + primary_table = all_tables[0] + seen = {primary_table} + + consider = statement._where_criteria + if self._dict_parameters: + consider += tuple(self._dict_parameters.values()) + + for crit in consider: + for item in _from_objects(crit): + if not seen.intersection(item._cloned_set): + froms.append(item) + seen.update(item._cloned_set) + + froms.extend(all_tables[1:]) + return primary_table, froms + + def _process_values(self, statement: ValuesBase) -> None: + if self._no_parameters: + self._dict_parameters = statement._values + self._no_parameters = False + + def _process_select_values(self, statement: ValuesBase) -> None: + assert statement._select_names is not None + parameters: MutableMapping[_DMLColumnElement, Any] = { + name: Null() for name in statement._select_names + } + + if self._no_parameters: + self._no_parameters = False + self._dict_parameters = parameters + else: + # this condition normally not reachable as the Insert + # does not allow this construction to occur + assert False, "This statement already has parameters" + + def _no_multi_values_supported(self, statement: ValuesBase) -> NoReturn: + raise exc.InvalidRequestError( + "%s construct does not support " + "multiple parameter sets." % statement.__visit_name__.upper() + ) + + def _cant_mix_formats_error(self) -> NoReturn: + raise exc.InvalidRequestError( + "Can't mix single and multiple VALUES " + "formats in one INSERT statement; one style appends to a " + "list while the other replaces values, so the intent is " + "ambiguous." + ) + + +@CompileState.plugin_for("default", "insert") +class InsertDMLState(DMLState): + isinsert = True + + include_table_with_column_exprs = False + + _has_multi_parameters = False + + def __init__( + self, + statement: Insert, + compiler: SQLCompiler, + disable_implicit_returning: bool = False, + **kw: Any, + ): + self.statement = statement + self._primary_table = statement.table + + if disable_implicit_returning: + self._supports_implicit_returning = False + + self.isinsert = True + if statement._select_names: + self._process_select_values(statement) + if statement._values is not None: + self._process_values(statement) + if statement._multi_values: + self._process_multi_values(statement) + + @util.memoized_property + def _insert_col_keys(self) -> List[str]: + # this is also done in crud.py -> _key_getters_for_crud_column + return [ + coercions.expect(roles.DMLColumnRole, col, as_key=True) + for col in self._dict_parameters or () + ] + + def _process_values(self, statement: ValuesBase) -> None: + if self._no_parameters: + self._has_multi_parameters = False + self._dict_parameters = statement._values + self._no_parameters = False + elif self._has_multi_parameters: + self._cant_mix_formats_error() + + def _process_multi_values(self, statement: ValuesBase) -> None: + for parameters in statement._multi_values: + multi_parameters: List[MutableMapping[_DMLColumnElement, Any]] = [ + ( + { + c.key: value + for c, value in zip(statement.table.c, parameter_set) + } + if isinstance(parameter_set, collections_abc.Sequence) + else parameter_set + ) + for parameter_set in parameters + ] + + if self._no_parameters: + self._no_parameters = False + self._has_multi_parameters = True + self._multi_parameters = multi_parameters + self._dict_parameters = self._multi_parameters[0] + elif not self._has_multi_parameters: + self._cant_mix_formats_error() + else: + assert self._multi_parameters + self._multi_parameters.extend(multi_parameters) + + +@CompileState.plugin_for("default", "update") +class UpdateDMLState(DMLState): + isupdate = True + + include_table_with_column_exprs = False + + def __init__(self, statement: Update, compiler: SQLCompiler, **kw: Any): + self.statement = statement + + self.isupdate = True + if statement._ordered_values is not None: + self._process_ordered_values(statement) + elif statement._values is not None: + self._process_values(statement) + elif statement._multi_values: + self._no_multi_values_supported(statement) + t, ef = self._make_extra_froms(statement) + self._primary_table = t + self._extra_froms = ef + + self.is_multitable = mt = ef + self.include_table_with_column_exprs = bool( + mt and compiler.render_table_with_column_in_update_from + ) + + def _process_ordered_values(self, statement: ValuesBase) -> None: + parameters = statement._ordered_values + + if self._no_parameters: + self._no_parameters = False + assert parameters is not None + self._dict_parameters = dict(parameters) + self._ordered_values = parameters + self._parameter_ordering = [key for key, value in parameters] + else: + raise exc.InvalidRequestError( + "Can only invoke ordered_values() once, and not mixed " + "with any other values() call" + ) + + +@CompileState.plugin_for("default", "delete") +class DeleteDMLState(DMLState): + isdelete = True + + def __init__(self, statement: Delete, compiler: SQLCompiler, **kw: Any): + self.statement = statement + + self.isdelete = True + t, ef = self._make_extra_froms(statement) + self._primary_table = t + self._extra_froms = ef + self.is_multitable = ef + + +class UpdateBase( + roles.DMLRole, + HasCTE, + HasCompileState, + DialectKWArgs, + HasPrefixes, + Generative, + ExecutableReturnsRows, + ClauseElement, +): + """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements.""" + + __visit_name__ = "update_base" + + _hints: util.immutabledict[Tuple[_DMLTableElement, str], str] = ( + util.EMPTY_DICT + ) + named_with_column = False + + _label_style: SelectLabelStyle = ( + SelectLabelStyle.LABEL_STYLE_DISAMBIGUATE_ONLY + ) + table: _DMLTableElement + + _return_defaults = False + _return_defaults_columns: Optional[Tuple[_ColumnsClauseElement, ...]] = ( + None + ) + _supplemental_returning: Optional[Tuple[_ColumnsClauseElement, ...]] = None + _returning: Tuple[_ColumnsClauseElement, ...] = () + + is_dml = True + + def _generate_fromclause_column_proxies( + self, fromclause: FromClause + ) -> None: + fromclause._columns._populate_separate_keys( + col._make_proxy(fromclause) + for col in self._all_selected_columns + if is_column_element(col) + ) + + def params(self, *arg: Any, **kw: Any) -> NoReturn: + """Set the parameters for the statement. + + This method raises ``NotImplementedError`` on the base class, + and is overridden by :class:`.ValuesBase` to provide the + SET/VALUES clause of UPDATE and INSERT. + + """ + raise NotImplementedError( + "params() is not supported for INSERT/UPDATE/DELETE statements." + " To set the values for an INSERT or UPDATE statement, use" + " stmt.values(**parameters)." + ) + + @_generative + def with_dialect_options(self, **opt: Any) -> Self: + """Add dialect options to this INSERT/UPDATE/DELETE object. + + e.g.:: + + upd = table.update().dialect_options(mysql_limit=10) + + .. versionadded: 1.4 - this method supersedes the dialect options + associated with the constructor. + + + """ + self._validate_dialect_kwargs(opt) + return self + + @_generative + def return_defaults( + self, + *cols: _DMLColumnArgument, + supplemental_cols: Optional[Iterable[_DMLColumnArgument]] = None, + sort_by_parameter_order: bool = False, + ) -> Self: + """Make use of a :term:`RETURNING` clause for the purpose + of fetching server-side expressions and defaults, for supporting + backends only. + + .. deepalchemy:: + + The :meth:`.UpdateBase.return_defaults` method is used by the ORM + for its internal work in fetching newly generated primary key + and server default values, in particular to provide the underyling + implementation of the :paramref:`_orm.Mapper.eager_defaults` + ORM feature as well as to allow RETURNING support with bulk + ORM inserts. Its behavior is fairly idiosyncratic + and is not really intended for general use. End users should + stick with using :meth:`.UpdateBase.returning` in order to + add RETURNING clauses to their INSERT, UPDATE and DELETE + statements. + + Normally, a single row INSERT statement will automatically populate the + :attr:`.CursorResult.inserted_primary_key` attribute when executed, + which stores the primary key of the row that was just inserted in the + form of a :class:`.Row` object with column names as named tuple keys + (and the :attr:`.Row._mapping` view fully populated as well). The + dialect in use chooses the strategy to use in order to populate this + data; if it was generated using server-side defaults and / or SQL + expressions, dialect-specific approaches such as ``cursor.lastrowid`` + or ``RETURNING`` are typically used to acquire the new primary key + value. + + However, when the statement is modified by calling + :meth:`.UpdateBase.return_defaults` before executing the statement, + additional behaviors take place **only** for backends that support + RETURNING and for :class:`.Table` objects that maintain the + :paramref:`.Table.implicit_returning` parameter at its default value of + ``True``. In these cases, when the :class:`.CursorResult` is returned + from the statement's execution, not only will + :attr:`.CursorResult.inserted_primary_key` be populated as always, the + :attr:`.CursorResult.returned_defaults` attribute will also be + populated with a :class:`.Row` named-tuple representing the full range + of server generated + values from that single row, including values for any columns that + specify :paramref:`_schema.Column.server_default` or which make use of + :paramref:`_schema.Column.default` using a SQL expression. + + When invoking INSERT statements with multiple rows using + :ref:`insertmanyvalues `, the + :meth:`.UpdateBase.return_defaults` modifier will have the effect of + the :attr:`_engine.CursorResult.inserted_primary_key_rows` and + :attr:`_engine.CursorResult.returned_defaults_rows` attributes being + fully populated with lists of :class:`.Row` objects representing newly + inserted primary key values as well as newly inserted server generated + values for each row inserted. The + :attr:`.CursorResult.inserted_primary_key` and + :attr:`.CursorResult.returned_defaults` attributes will also continue + to be populated with the first row of these two collections. + + If the backend does not support RETURNING or the :class:`.Table` in use + has disabled :paramref:`.Table.implicit_returning`, then no RETURNING + clause is added and no additional data is fetched, however the + INSERT, UPDATE or DELETE statement proceeds normally. + + E.g.:: + + stmt = table.insert().values(data='newdata').return_defaults() + + result = connection.execute(stmt) + + server_created_at = result.returned_defaults['created_at'] + + When used against an UPDATE statement + :meth:`.UpdateBase.return_defaults` instead looks for columns that + include :paramref:`_schema.Column.onupdate` or + :paramref:`_schema.Column.server_onupdate` parameters assigned, when + constructing the columns that will be included in the RETURNING clause + by default if explicit columns were not specified. When used against a + DELETE statement, no columns are included in RETURNING by default, they + instead must be specified explicitly as there are no columns that + normally change values when a DELETE statement proceeds. + + .. versionadded:: 2.0 :meth:`.UpdateBase.return_defaults` is supported + for DELETE statements also and has been moved from + :class:`.ValuesBase` to :class:`.UpdateBase`. + + The :meth:`.UpdateBase.return_defaults` method is mutually exclusive + against the :meth:`.UpdateBase.returning` method and errors will be + raised during the SQL compilation process if both are used at the same + time on one statement. The RETURNING clause of the INSERT, UPDATE or + DELETE statement is therefore controlled by only one of these methods + at a time. + + The :meth:`.UpdateBase.return_defaults` method differs from + :meth:`.UpdateBase.returning` in these ways: + + 1. :meth:`.UpdateBase.return_defaults` method causes the + :attr:`.CursorResult.returned_defaults` collection to be populated + with the first row from the RETURNING result. This attribute is not + populated when using :meth:`.UpdateBase.returning`. + + 2. :meth:`.UpdateBase.return_defaults` is compatible with existing + logic used to fetch auto-generated primary key values that are then + populated into the :attr:`.CursorResult.inserted_primary_key` + attribute. By contrast, using :meth:`.UpdateBase.returning` will + have the effect of the :attr:`.CursorResult.inserted_primary_key` + attribute being left unpopulated. + + 3. :meth:`.UpdateBase.return_defaults` can be called against any + backend. Backends that don't support RETURNING will skip the usage + of the feature, rather than raising an exception, *unless* + ``supplemental_cols`` is passed. The return value + of :attr:`_engine.CursorResult.returned_defaults` will be ``None`` + for backends that don't support RETURNING or for which the target + :class:`.Table` sets :paramref:`.Table.implicit_returning` to + ``False``. + + 4. An INSERT statement invoked with executemany() is supported if the + backend database driver supports the + :ref:`insertmanyvalues ` + feature which is now supported by most SQLAlchemy-included backends. + When executemany is used, the + :attr:`_engine.CursorResult.returned_defaults_rows` and + :attr:`_engine.CursorResult.inserted_primary_key_rows` accessors + will return the inserted defaults and primary keys. + + .. versionadded:: 1.4 Added + :attr:`_engine.CursorResult.returned_defaults_rows` and + :attr:`_engine.CursorResult.inserted_primary_key_rows` accessors. + In version 2.0, the underlying implementation which fetches and + populates the data for these attributes was generalized to be + supported by most backends, whereas in 1.4 they were only + supported by the ``psycopg2`` driver. + + + :param cols: optional list of column key names or + :class:`_schema.Column` that acts as a filter for those columns that + will be fetched. + :param supplemental_cols: optional list of RETURNING expressions, + in the same form as one would pass to the + :meth:`.UpdateBase.returning` method. When present, the additional + columns will be included in the RETURNING clause, and the + :class:`.CursorResult` object will be "rewound" when returned, so + that methods like :meth:`.CursorResult.all` will return new rows + mostly as though the statement used :meth:`.UpdateBase.returning` + directly. However, unlike when using :meth:`.UpdateBase.returning` + directly, the **order of the columns is undefined**, so can only be + targeted using names or :attr:`.Row._mapping` keys; they cannot + reliably be targeted positionally. + + .. versionadded:: 2.0 + + :param sort_by_parameter_order: for a batch INSERT that is being + executed against multiple parameter sets, organize the results of + RETURNING so that the returned rows correspond to the order of + parameter sets passed in. This applies only to an :term:`executemany` + execution for supporting dialects and typically makes use of the + :term:`insertmanyvalues` feature. + + .. versionadded:: 2.0.10 + + .. seealso:: + + :ref:`engine_insertmanyvalues_returning_order` - background on + sorting of RETURNING rows for bulk INSERT + + .. seealso:: + + :meth:`.UpdateBase.returning` + + :attr:`_engine.CursorResult.returned_defaults` + + :attr:`_engine.CursorResult.returned_defaults_rows` + + :attr:`_engine.CursorResult.inserted_primary_key` + + :attr:`_engine.CursorResult.inserted_primary_key_rows` + + """ + + if self._return_defaults: + # note _return_defaults_columns = () means return all columns, + # so if we have been here before, only update collection if there + # are columns in the collection + if self._return_defaults_columns and cols: + self._return_defaults_columns = tuple( + util.OrderedSet(self._return_defaults_columns).union( + coercions.expect(roles.ColumnsClauseRole, c) + for c in cols + ) + ) + else: + # set for all columns + self._return_defaults_columns = () + else: + self._return_defaults_columns = tuple( + coercions.expect(roles.ColumnsClauseRole, c) for c in cols + ) + self._return_defaults = True + if sort_by_parameter_order: + if not self.is_insert: + raise exc.ArgumentError( + "The 'sort_by_parameter_order' argument to " + "return_defaults() only applies to INSERT statements" + ) + self._sort_by_parameter_order = True + if supplemental_cols: + # uniquifying while also maintaining order (the maintain of order + # is for test suites but also for vertical splicing + supplemental_col_tup = ( + coercions.expect(roles.ColumnsClauseRole, c) + for c in supplemental_cols + ) + + if self._supplemental_returning is None: + self._supplemental_returning = tuple( + util.unique_list(supplemental_col_tup) + ) + else: + self._supplemental_returning = tuple( + util.unique_list( + self._supplemental_returning + + tuple(supplemental_col_tup) + ) + ) + + return self + + @_generative + def returning( + self, + *cols: _ColumnsClauseArgument[Any], + sort_by_parameter_order: bool = False, + **__kw: Any, + ) -> UpdateBase: + r"""Add a :term:`RETURNING` or equivalent clause to this statement. + + e.g.: + + .. sourcecode:: pycon+sql + + >>> stmt = ( + ... table.update() + ... .where(table.c.data == "value") + ... .values(status="X") + ... .returning(table.c.server_flag, table.c.updated_timestamp) + ... ) + >>> print(stmt) + {printsql}UPDATE some_table SET status=:status + WHERE some_table.data = :data_1 + RETURNING some_table.server_flag, some_table.updated_timestamp + + The method may be invoked multiple times to add new entries to the + list of expressions to be returned. + + .. versionadded:: 1.4.0b2 The method may be invoked multiple times to + add new entries to the list of expressions to be returned. + + The given collection of column expressions should be derived from the + table that is the target of the INSERT, UPDATE, or DELETE. While + :class:`_schema.Column` objects are typical, the elements can also be + expressions: + + .. sourcecode:: pycon+sql + + >>> stmt = table.insert().returning( + ... (table.c.first_name + " " + table.c.last_name).label("fullname") + ... ) + >>> print(stmt) + {printsql}INSERT INTO some_table (first_name, last_name) + VALUES (:first_name, :last_name) + RETURNING some_table.first_name || :first_name_1 || some_table.last_name AS fullname + + Upon compilation, a RETURNING clause, or database equivalent, + will be rendered within the statement. For INSERT and UPDATE, + the values are the newly inserted/updated values. For DELETE, + the values are those of the rows which were deleted. + + Upon execution, the values of the columns to be returned are made + available via the result set and can be iterated using + :meth:`_engine.CursorResult.fetchone` and similar. + For DBAPIs which do not + natively support returning values (i.e. cx_oracle), SQLAlchemy will + approximate this behavior at the result level so that a reasonable + amount of behavioral neutrality is provided. + + Note that not all databases/DBAPIs + support RETURNING. For those backends with no support, + an exception is raised upon compilation and/or execution. + For those who do support it, the functionality across backends + varies greatly, including restrictions on executemany() + and other statements which return multiple rows. Please + read the documentation notes for the database in use in + order to determine the availability of RETURNING. + + :param \*cols: series of columns, SQL expressions, or whole tables + entities to be returned. + :param sort_by_parameter_order: for a batch INSERT that is being + executed against multiple parameter sets, organize the results of + RETURNING so that the returned rows correspond to the order of + parameter sets passed in. This applies only to an :term:`executemany` + execution for supporting dialects and typically makes use of the + :term:`insertmanyvalues` feature. + + .. versionadded:: 2.0.10 + + .. seealso:: + + :ref:`engine_insertmanyvalues_returning_order` - background on + sorting of RETURNING rows for bulk INSERT (Core level discussion) + + :ref:`orm_queryguide_bulk_insert_returning_ordered` - example of + use with :ref:`orm_queryguide_bulk_insert` (ORM level discussion) + + .. seealso:: + + :meth:`.UpdateBase.return_defaults` - an alternative method tailored + towards efficient fetching of server-side defaults and triggers + for single-row INSERTs or UPDATEs. + + :ref:`tutorial_insert_returning` - in the :ref:`unified_tutorial` + + """ # noqa: E501 + if __kw: + raise _unexpected_kw("UpdateBase.returning()", __kw) + if self._return_defaults: + raise exc.InvalidRequestError( + "return_defaults() is already configured on this statement" + ) + self._returning += tuple( + coercions.expect(roles.ColumnsClauseRole, c) for c in cols + ) + if sort_by_parameter_order: + if not self.is_insert: + raise exc.ArgumentError( + "The 'sort_by_parameter_order' argument to returning() " + "only applies to INSERT statements" + ) + self._sort_by_parameter_order = True + return self + + def corresponding_column( + self, column: KeyedColumnElement[Any], require_embedded: bool = False + ) -> Optional[ColumnElement[Any]]: + return self.exported_columns.corresponding_column( + column, require_embedded=require_embedded + ) + + @util.ro_memoized_property + def _all_selected_columns(self) -> _SelectIterable: + return [c for c in _select_iterables(self._returning)] + + @util.ro_memoized_property + def exported_columns( + self, + ) -> ReadOnlyColumnCollection[Optional[str], ColumnElement[Any]]: + """Return the RETURNING columns as a column collection for this + statement. + + .. versionadded:: 1.4 + + """ + return ColumnCollection( + (c.key, c) + for c in self._all_selected_columns + if is_column_element(c) + ).as_readonly() + + @_generative + def with_hint( + self, + text: str, + selectable: Optional[_DMLTableArgument] = None, + dialect_name: str = "*", + ) -> Self: + """Add a table hint for a single table to this + INSERT/UPDATE/DELETE statement. + + .. note:: + + :meth:`.UpdateBase.with_hint` currently applies only to + Microsoft SQL Server. For MySQL INSERT/UPDATE/DELETE hints, use + :meth:`.UpdateBase.prefix_with`. + + The text of the hint is rendered in the appropriate + location for the database backend in use, relative + to the :class:`_schema.Table` that is the subject of this + statement, or optionally to that of the given + :class:`_schema.Table` passed as the ``selectable`` argument. + + The ``dialect_name`` option will limit the rendering of a particular + hint to a particular backend. Such as, to add a hint + that only takes effect for SQL Server:: + + mytable.insert().with_hint("WITH (PAGLOCK)", dialect_name="mssql") + + :param text: Text of the hint. + :param selectable: optional :class:`_schema.Table` that specifies + an element of the FROM clause within an UPDATE or DELETE + to be the subject of the hint - applies only to certain backends. + :param dialect_name: defaults to ``*``, if specified as the name + of a particular dialect, will apply these hints only when + that dialect is in use. + """ + if selectable is None: + selectable = self.table + else: + selectable = coercions.expect(roles.DMLTableRole, selectable) + self._hints = self._hints.union({(selectable, dialect_name): text}) + return self + + @property + def entity_description(self) -> Dict[str, Any]: + """Return a :term:`plugin-enabled` description of the table and/or + entity which this DML construct is operating against. + + This attribute is generally useful when using the ORM, as an + extended structure which includes information about mapped + entities is returned. The section :ref:`queryguide_inspection` + contains more background. + + For a Core statement, the structure returned by this accessor + is derived from the :attr:`.UpdateBase.table` attribute, and + refers to the :class:`.Table` being inserted, updated, or deleted:: + + >>> stmt = insert(user_table) + >>> stmt.entity_description + { + "name": "user_table", + "table": Table("user_table", ...) + } + + .. versionadded:: 1.4.33 + + .. seealso:: + + :attr:`.UpdateBase.returning_column_descriptions` + + :attr:`.Select.column_descriptions` - entity information for + a :func:`.select` construct + + :ref:`queryguide_inspection` - ORM background + + """ + meth = DMLState.get_plugin_class(self).get_entity_description + return meth(self) + + @property + def returning_column_descriptions(self) -> List[Dict[str, Any]]: + """Return a :term:`plugin-enabled` description of the columns + which this DML construct is RETURNING against, in other words + the expressions established as part of :meth:`.UpdateBase.returning`. + + This attribute is generally useful when using the ORM, as an + extended structure which includes information about mapped + entities is returned. The section :ref:`queryguide_inspection` + contains more background. + + For a Core statement, the structure returned by this accessor is + derived from the same objects that are returned by the + :attr:`.UpdateBase.exported_columns` accessor:: + + >>> stmt = insert(user_table).returning(user_table.c.id, user_table.c.name) + >>> stmt.entity_description + [ + { + "name": "id", + "type": Integer, + "expr": Column("id", Integer(), table=, ...) + }, + { + "name": "name", + "type": String(), + "expr": Column("name", String(), table=, ...) + }, + ] + + .. versionadded:: 1.4.33 + + .. seealso:: + + :attr:`.UpdateBase.entity_description` + + :attr:`.Select.column_descriptions` - entity information for + a :func:`.select` construct + + :ref:`queryguide_inspection` - ORM background + + """ # noqa: E501 + meth = DMLState.get_plugin_class( + self + ).get_returning_column_descriptions + return meth(self) + + +class ValuesBase(UpdateBase): + """Supplies support for :meth:`.ValuesBase.values` to + INSERT and UPDATE constructs.""" + + __visit_name__ = "values_base" + + _supports_multi_parameters = False + + select: Optional[Select[Any]] = None + """SELECT statement for INSERT .. FROM SELECT""" + + _post_values_clause: Optional[ClauseElement] = None + """used by extensions to Insert etc. to add additional syntacitcal + constructs, e.g. ON CONFLICT etc.""" + + _values: Optional[util.immutabledict[_DMLColumnElement, Any]] = None + _multi_values: Tuple[ + Union[ + Sequence[Dict[_DMLColumnElement, Any]], + Sequence[Sequence[Any]], + ], + ..., + ] = () + + _ordered_values: Optional[List[Tuple[_DMLColumnElement, Any]]] = None + + _select_names: Optional[List[str]] = None + _inline: bool = False + + def __init__(self, table: _DMLTableArgument): + self.table = coercions.expect( + roles.DMLTableRole, table, apply_propagate_attrs=self + ) + + @_generative + @_exclusive_against( + "_select_names", + "_ordered_values", + msgs={ + "_select_names": "This construct already inserts from a SELECT", + "_ordered_values": "This statement already has ordered " + "values present", + }, + ) + def values( + self, + *args: Union[ + _DMLColumnKeyMapping[Any], + Sequence[Any], + ], + **kwargs: Any, + ) -> Self: + r"""Specify a fixed VALUES clause for an INSERT statement, or the SET + clause for an UPDATE. + + Note that the :class:`_expression.Insert` and + :class:`_expression.Update` + constructs support + per-execution time formatting of the VALUES and/or SET clauses, + based on the arguments passed to :meth:`_engine.Connection.execute`. + However, the :meth:`.ValuesBase.values` method can be used to "fix" a + particular set of parameters into the statement. + + Multiple calls to :meth:`.ValuesBase.values` will produce a new + construct, each one with the parameter list modified to include + the new parameters sent. In the typical case of a single + dictionary of parameters, the newly passed keys will replace + the same keys in the previous construct. In the case of a list-based + "multiple values" construct, each new list of values is extended + onto the existing list of values. + + :param \**kwargs: key value pairs representing the string key + of a :class:`_schema.Column` + mapped to the value to be rendered into the + VALUES or SET clause:: + + users.insert().values(name="some name") + + users.update().where(users.c.id==5).values(name="some name") + + :param \*args: As an alternative to passing key/value parameters, + a dictionary, tuple, or list of dictionaries or tuples can be passed + as a single positional argument in order to form the VALUES or + SET clause of the statement. The forms that are accepted vary + based on whether this is an :class:`_expression.Insert` or an + :class:`_expression.Update` construct. + + For either an :class:`_expression.Insert` or + :class:`_expression.Update` + construct, a single dictionary can be passed, which works the same as + that of the kwargs form:: + + users.insert().values({"name": "some name"}) + + users.update().values({"name": "some new name"}) + + Also for either form but more typically for the + :class:`_expression.Insert` construct, a tuple that contains an + entry for every column in the table is also accepted:: + + users.insert().values((5, "some name")) + + The :class:`_expression.Insert` construct also supports being + passed a list of dictionaries or full-table-tuples, which on the + server will render the less common SQL syntax of "multiple values" - + this syntax is supported on backends such as SQLite, PostgreSQL, + MySQL, but not necessarily others:: + + users.insert().values([ + {"name": "some name"}, + {"name": "some other name"}, + {"name": "yet another name"}, + ]) + + The above form would render a multiple VALUES statement similar to:: + + INSERT INTO users (name) VALUES + (:name_1), + (:name_2), + (:name_3) + + It is essential to note that **passing multiple values is + NOT the same as using traditional executemany() form**. The above + syntax is a **special** syntax not typically used. To emit an + INSERT statement against multiple rows, the normal method is + to pass a multiple values list to the + :meth:`_engine.Connection.execute` + method, which is supported by all database backends and is generally + more efficient for a very large number of parameters. + + .. seealso:: + + :ref:`tutorial_multiple_parameters` - an introduction to + the traditional Core method of multiple parameter set + invocation for INSERTs and other statements. + + The UPDATE construct also supports rendering the SET parameters + in a specific order. For this feature refer to the + :meth:`_expression.Update.ordered_values` method. + + .. seealso:: + + :meth:`_expression.Update.ordered_values` + + + """ + if args: + # positional case. this is currently expensive. we don't + # yet have positional-only args so we have to check the length. + # then we need to check multiparams vs. single dictionary. + # since the parameter format is needed in order to determine + # a cache key, we need to determine this up front. + arg = args[0] + + if kwargs: + raise exc.ArgumentError( + "Can't pass positional and kwargs to values() " + "simultaneously" + ) + elif len(args) > 1: + raise exc.ArgumentError( + "Only a single dictionary/tuple or list of " + "dictionaries/tuples is accepted positionally." + ) + + elif isinstance(arg, collections_abc.Sequence): + if arg and isinstance(arg[0], dict): + multi_kv_generator = DMLState.get_plugin_class( + self + )._get_multi_crud_kv_pairs + self._multi_values += (multi_kv_generator(self, arg),) + return self + + if arg and isinstance(arg[0], (list, tuple)): + self._multi_values += (arg,) + return self + + if TYPE_CHECKING: + # crud.py raises during compilation if this is not the + # case + assert isinstance(self, Insert) + + # tuple values + arg = {c.key: value for c, value in zip(self.table.c, arg)} + + else: + # kwarg path. this is the most common path for non-multi-params + # so this is fairly quick. + arg = cast("Dict[_DMLColumnArgument, Any]", kwargs) + if args: + raise exc.ArgumentError( + "Only a single dictionary/tuple or list of " + "dictionaries/tuples is accepted positionally." + ) + + # for top level values(), convert literals to anonymous bound + # parameters at statement construction time, so that these values can + # participate in the cache key process like any other ClauseElement. + # crud.py now intercepts bound parameters with unique=True from here + # and ensures they get the "crud"-style name when rendered. + + kv_generator = DMLState.get_plugin_class(self)._get_crud_kv_pairs + coerced_arg = dict(kv_generator(self, arg.items(), True)) + if self._values: + self._values = self._values.union(coerced_arg) + else: + self._values = util.immutabledict(coerced_arg) + return self + + +class Insert(ValuesBase): + """Represent an INSERT construct. + + The :class:`_expression.Insert` object is created using the + :func:`_expression.insert()` function. + + """ + + __visit_name__ = "insert" + + _supports_multi_parameters = True + + select = None + include_insert_from_select_defaults = False + + _sort_by_parameter_order: bool = False + + is_insert = True + + table: TableClause + + _traverse_internals = ( + [ + ("table", InternalTraversal.dp_clauseelement), + ("_inline", InternalTraversal.dp_boolean), + ("_select_names", InternalTraversal.dp_string_list), + ("_values", InternalTraversal.dp_dml_values), + ("_multi_values", InternalTraversal.dp_dml_multi_values), + ("select", InternalTraversal.dp_clauseelement), + ("_post_values_clause", InternalTraversal.dp_clauseelement), + ("_returning", InternalTraversal.dp_clauseelement_tuple), + ("_hints", InternalTraversal.dp_table_hint_list), + ("_return_defaults", InternalTraversal.dp_boolean), + ( + "_return_defaults_columns", + InternalTraversal.dp_clauseelement_tuple, + ), + ("_sort_by_parameter_order", InternalTraversal.dp_boolean), + ] + + HasPrefixes._has_prefixes_traverse_internals + + DialectKWArgs._dialect_kwargs_traverse_internals + + Executable._executable_traverse_internals + + HasCTE._has_ctes_traverse_internals + ) + + def __init__(self, table: _DMLTableArgument): + super().__init__(table) + + @_generative + def inline(self) -> Self: + """Make this :class:`_expression.Insert` construct "inline" . + + When set, no attempt will be made to retrieve the + SQL-generated default values to be provided within the statement; + in particular, + this allows SQL expressions to be rendered 'inline' within the + statement without the need to pre-execute them beforehand; for + backends that support "returning", this turns off the "implicit + returning" feature for the statement. + + + .. versionchanged:: 1.4 the :paramref:`_expression.Insert.inline` + parameter + is now superseded by the :meth:`_expression.Insert.inline` method. + + """ + self._inline = True + return self + + @_generative + def from_select( + self, + names: Sequence[_DMLColumnArgument], + select: Selectable, + include_defaults: bool = True, + ) -> Self: + """Return a new :class:`_expression.Insert` construct which represents + an ``INSERT...FROM SELECT`` statement. + + e.g.:: + + sel = select(table1.c.a, table1.c.b).where(table1.c.c > 5) + ins = table2.insert().from_select(['a', 'b'], sel) + + :param names: a sequence of string column names or + :class:`_schema.Column` + objects representing the target columns. + :param select: a :func:`_expression.select` construct, + :class:`_expression.FromClause` + or other construct which resolves into a + :class:`_expression.FromClause`, + such as an ORM :class:`_query.Query` object, etc. The order of + columns returned from this FROM clause should correspond to the + order of columns sent as the ``names`` parameter; while this + is not checked before passing along to the database, the database + would normally raise an exception if these column lists don't + correspond. + :param include_defaults: if True, non-server default values and + SQL expressions as specified on :class:`_schema.Column` objects + (as documented in :ref:`metadata_defaults_toplevel`) not + otherwise specified in the list of names will be rendered + into the INSERT and SELECT statements, so that these values are also + included in the data to be inserted. + + .. note:: A Python-side default that uses a Python callable function + will only be invoked **once** for the whole statement, and **not + per row**. + + """ + + if self._values: + raise exc.InvalidRequestError( + "This construct already inserts value expressions" + ) + + self._select_names = [ + coercions.expect(roles.DMLColumnRole, name, as_key=True) + for name in names + ] + self._inline = True + self.include_insert_from_select_defaults = include_defaults + self.select = coercions.expect(roles.DMLSelectRole, select) + return self + + if TYPE_CHECKING: + # START OVERLOADED FUNCTIONS self.returning ReturningInsert 1-8 ", *, sort_by_parameter_order: bool = False" # noqa: E501 + + # code within this block is **programmatically, + # statically generated** by tools/generate_tuple_map_overloads.py + + @overload + def returning( + self, __ent0: _TCCA[_T0], *, sort_by_parameter_order: bool = False + ) -> ReturningInsert[Tuple[_T0]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + *, + sort_by_parameter_order: bool = False, + ) -> ReturningInsert[Tuple[_T0, _T1]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + *, + sort_by_parameter_order: bool = False, + ) -> ReturningInsert[Tuple[_T0, _T1, _T2]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + *, + sort_by_parameter_order: bool = False, + ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + *, + sort_by_parameter_order: bool = False, + ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + *, + sort_by_parameter_order: bool = False, + ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + *, + sort_by_parameter_order: bool = False, + ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], + *, + sort_by_parameter_order: bool = False, + ) -> ReturningInsert[ + Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7] + ]: ... + + # END OVERLOADED FUNCTIONS self.returning + + @overload + def returning( + self, + *cols: _ColumnsClauseArgument[Any], + sort_by_parameter_order: bool = False, + **__kw: Any, + ) -> ReturningInsert[Any]: ... + + def returning( + self, + *cols: _ColumnsClauseArgument[Any], + sort_by_parameter_order: bool = False, + **__kw: Any, + ) -> ReturningInsert[Any]: ... + + +class ReturningInsert(Insert, TypedReturnsRows[_TP]): + """Typing-only class that establishes a generic type form of + :class:`.Insert` which tracks returned column types. + + This datatype is delivered when calling the + :meth:`.Insert.returning` method. + + .. versionadded:: 2.0 + + """ + + +class DMLWhereBase: + table: _DMLTableElement + _where_criteria: Tuple[ColumnElement[Any], ...] = () + + @_generative + def where(self, *whereclause: _ColumnExpressionArgument[bool]) -> Self: + """Return a new construct with the given expression(s) added to + its WHERE clause, joined to the existing clause via AND, if any. + + Both :meth:`_dml.Update.where` and :meth:`_dml.Delete.where` + support multiple-table forms, including database-specific + ``UPDATE...FROM`` as well as ``DELETE..USING``. For backends that + don't have multiple-table support, a backend agnostic approach + to using multiple tables is to make use of correlated subqueries. + See the linked tutorial sections below for examples. + + .. seealso:: + + :ref:`tutorial_correlated_updates` + + :ref:`tutorial_update_from` + + :ref:`tutorial_multi_table_deletes` + + """ + + for criterion in whereclause: + where_criteria: ColumnElement[Any] = coercions.expect( + roles.WhereHavingRole, criterion, apply_propagate_attrs=self + ) + self._where_criteria += (where_criteria,) + return self + + def filter(self, *criteria: roles.ExpressionElementRole[Any]) -> Self: + """A synonym for the :meth:`_dml.DMLWhereBase.where` method. + + .. versionadded:: 1.4 + + """ + + return self.where(*criteria) + + def _filter_by_zero(self) -> _DMLTableElement: + return self.table + + def filter_by(self, **kwargs: Any) -> Self: + r"""apply the given filtering criterion as a WHERE clause + to this select. + + """ + from_entity = self._filter_by_zero() + + clauses = [ + _entity_namespace_key(from_entity, key) == value + for key, value in kwargs.items() + ] + return self.filter(*clauses) + + @property + def whereclause(self) -> Optional[ColumnElement[Any]]: + """Return the completed WHERE clause for this :class:`.DMLWhereBase` + statement. + + This assembles the current collection of WHERE criteria + into a single :class:`_expression.BooleanClauseList` construct. + + + .. versionadded:: 1.4 + + """ + + return BooleanClauseList._construct_for_whereclause( + self._where_criteria + ) + + +class Update(DMLWhereBase, ValuesBase): + """Represent an Update construct. + + The :class:`_expression.Update` object is created using the + :func:`_expression.update()` function. + + """ + + __visit_name__ = "update" + + is_update = True + + _traverse_internals = ( + [ + ("table", InternalTraversal.dp_clauseelement), + ("_where_criteria", InternalTraversal.dp_clauseelement_tuple), + ("_inline", InternalTraversal.dp_boolean), + ("_ordered_values", InternalTraversal.dp_dml_ordered_values), + ("_values", InternalTraversal.dp_dml_values), + ("_returning", InternalTraversal.dp_clauseelement_tuple), + ("_hints", InternalTraversal.dp_table_hint_list), + ("_return_defaults", InternalTraversal.dp_boolean), + ( + "_return_defaults_columns", + InternalTraversal.dp_clauseelement_tuple, + ), + ] + + HasPrefixes._has_prefixes_traverse_internals + + DialectKWArgs._dialect_kwargs_traverse_internals + + Executable._executable_traverse_internals + + HasCTE._has_ctes_traverse_internals + ) + + def __init__(self, table: _DMLTableArgument): + super().__init__(table) + + @_generative + def ordered_values(self, *args: Tuple[_DMLColumnArgument, Any]) -> Self: + """Specify the VALUES clause of this UPDATE statement with an explicit + parameter ordering that will be maintained in the SET clause of the + resulting UPDATE statement. + + E.g.:: + + stmt = table.update().ordered_values( + ("name", "ed"), ("ident", "foo") + ) + + .. seealso:: + + :ref:`tutorial_parameter_ordered_updates` - full example of the + :meth:`_expression.Update.ordered_values` method. + + .. versionchanged:: 1.4 The :meth:`_expression.Update.ordered_values` + method + supersedes the + :paramref:`_expression.update.preserve_parameter_order` + parameter, which will be removed in SQLAlchemy 2.0. + + """ + if self._values: + raise exc.ArgumentError( + "This statement already has values present" + ) + elif self._ordered_values: + raise exc.ArgumentError( + "This statement already has ordered values present" + ) + + kv_generator = DMLState.get_plugin_class(self)._get_crud_kv_pairs + self._ordered_values = kv_generator(self, args, True) + return self + + @_generative + def inline(self) -> Self: + """Make this :class:`_expression.Update` construct "inline" . + + When set, SQL defaults present on :class:`_schema.Column` + objects via the + ``default`` keyword will be compiled 'inline' into the statement and + not pre-executed. This means that their values will not be available + in the dictionary returned from + :meth:`_engine.CursorResult.last_updated_params`. + + .. versionchanged:: 1.4 the :paramref:`_expression.update.inline` + parameter + is now superseded by the :meth:`_expression.Update.inline` method. + + """ + self._inline = True + return self + + if TYPE_CHECKING: + # START OVERLOADED FUNCTIONS self.returning ReturningUpdate 1-8 + + # code within this block is **programmatically, + # statically generated** by tools/generate_tuple_map_overloads.py + + @overload + def returning( + self, __ent0: _TCCA[_T0] + ) -> ReturningUpdate[Tuple[_T0]]: ... + + @overload + def returning( + self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] + ) -> ReturningUpdate[Tuple[_T0, _T1]]: ... + + @overload + def returning( + self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], + ) -> ReturningUpdate[ + Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7] + ]: ... + + # END OVERLOADED FUNCTIONS self.returning + + @overload + def returning( + self, *cols: _ColumnsClauseArgument[Any], **__kw: Any + ) -> ReturningUpdate[Any]: ... + + def returning( + self, *cols: _ColumnsClauseArgument[Any], **__kw: Any + ) -> ReturningUpdate[Any]: ... + + +class ReturningUpdate(Update, TypedReturnsRows[_TP]): + """Typing-only class that establishes a generic type form of + :class:`.Update` which tracks returned column types. + + This datatype is delivered when calling the + :meth:`.Update.returning` method. + + .. versionadded:: 2.0 + + """ + + +class Delete(DMLWhereBase, UpdateBase): + """Represent a DELETE construct. + + The :class:`_expression.Delete` object is created using the + :func:`_expression.delete()` function. + + """ + + __visit_name__ = "delete" + + is_delete = True + + _traverse_internals = ( + [ + ("table", InternalTraversal.dp_clauseelement), + ("_where_criteria", InternalTraversal.dp_clauseelement_tuple), + ("_returning", InternalTraversal.dp_clauseelement_tuple), + ("_hints", InternalTraversal.dp_table_hint_list), + ] + + HasPrefixes._has_prefixes_traverse_internals + + DialectKWArgs._dialect_kwargs_traverse_internals + + Executable._executable_traverse_internals + + HasCTE._has_ctes_traverse_internals + ) + + def __init__(self, table: _DMLTableArgument): + self.table = coercions.expect( + roles.DMLTableRole, table, apply_propagate_attrs=self + ) + + if TYPE_CHECKING: + # START OVERLOADED FUNCTIONS self.returning ReturningDelete 1-8 + + # code within this block is **programmatically, + # statically generated** by tools/generate_tuple_map_overloads.py + + @overload + def returning( + self, __ent0: _TCCA[_T0] + ) -> ReturningDelete[Tuple[_T0]]: ... + + @overload + def returning( + self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] + ) -> ReturningDelete[Tuple[_T0, _T1]]: ... + + @overload + def returning( + self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] + ) -> ReturningDelete[Tuple[_T0, _T1, _T2]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... + + @overload + def returning( + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + __ent3: _TCCA[_T3], + __ent4: _TCCA[_T4], + __ent5: _TCCA[_T5], + __ent6: _TCCA[_T6], + __ent7: _TCCA[_T7], + ) -> ReturningDelete[ + Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7] + ]: ... + + # END OVERLOADED FUNCTIONS self.returning + + @overload + def returning( + self, *cols: _ColumnsClauseArgument[Any], **__kw: Any + ) -> ReturningDelete[Any]: ... + + def returning( + self, *cols: _ColumnsClauseArgument[Any], **__kw: Any + ) -> ReturningDelete[Any]: ... + + +class ReturningDelete(Update, TypedReturnsRows[_TP]): + """Typing-only class that establishes a generic type form of + :class:`.Delete` which tracks returned column types. + + This datatype is delivered when calling the + :meth:`.Delete.returning` method. + + .. versionadded:: 2.0 + + """ diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/sql/elements.py b/venv/lib/python3.11/site-packages/sqlalchemy/sql/elements.py new file mode 100644 index 0000000..bafb5c7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/sqlalchemy/sql/elements.py @@ -0,0 +1,5405 @@ +# sql/elements.py +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Core SQL expression elements, including :class:`_expression.ClauseElement`, +:class:`_expression.ColumnElement`, and derived classes. + +""" + +from __future__ import annotations + +from decimal import Decimal +from enum import IntEnum +import itertools +import operator +import re +import typing +from typing import AbstractSet +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import FrozenSet +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Set +from typing import Tuple as typing_Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from . import coercions +from . import operators +from . import roles +from . import traversals +from . import type_api +from ._typing import has_schema_attr +from ._typing import is_named_from_clause +from ._typing import is_quoted_name +from ._typing import is_tuple_type +from .annotation import Annotated +from .annotation import SupportsWrappingAnnotations +from .base import _clone +from .base import _expand_cloned +from .base import _generative +from .base import _NoArg +from .base import Executable +from .base import Generative +from .base import HasMemoized +from .base import Immutable +from .base import NO_ARG +from .base import SingletonConstant +from .cache_key import MemoizedHasCacheKey +from .cache_key import NO_CACHE +from .coercions import _document_text_coercion # noqa +from .operators import ColumnOperators +from .traversals import HasCopyInternals +from .visitors import cloned_traverse +from .visitors import ExternallyTraversible +from .visitors import InternalTraversal +from .visitors import traverse +from .visitors import Visitable +from .. import exc +from .. import inspection +from .. import util +from ..util import HasMemoized_ro_memoized_attribute +from ..util import TypingOnly +from ..util.typing import Literal +from ..util.typing import Self + +if typing.TYPE_CHECKING: + from ._typing import _ByArgument + from ._typing import _ColumnExpressionArgument + from ._typing import _ColumnExpressionOrStrLabelArgument + from ._typing import _HasDialect + from ._typing import _InfoType + from ._typing import _PropagateAttrsType + from ._typing import _TypeEngineArgument + from .cache_key import _CacheKeyTraversalType + from .cache_key import CacheKey + from .compiler import Compiled + from .compiler import SQLCompiler + from .functions import FunctionElement + from .operators import OperatorType + from .schema import Column + from .schema import DefaultGenerator + from .schema import FetchedValue + from .schema import ForeignKey + from .selectable import _SelectIterable + from .selectable import FromClause + from .selectable import NamedFromClause + from .selectable import TextualSelect + from .sqltypes import TupleType + from .type_api import TypeEngine + from .visitors import _CloneCallableType + from .visitors import _TraverseInternalsType + from .visitors import anon_map + from ..engine import Connection + from ..engine import Dialect + from ..engine.interfaces import _CoreMultiExecuteParams + from ..engine.interfaces import CacheStats + from ..engine.interfaces import CompiledCacheType + from ..engine.interfaces import CoreExecuteOptionsParameter + from ..engine.interfaces import SchemaTranslateMapType + from ..engine.result import Result + +_NUMERIC = Union[float, Decimal] +_NUMBER = Union[float, int, Decimal] + +_T = TypeVar("_T", bound="Any") +_T_co = TypeVar("_T_co", bound=Any, covariant=True) +_OPT = TypeVar("_OPT", bound="Any") +_NT = TypeVar("_NT", bound="_NUMERIC") + +_NMT = TypeVar("_NMT", bound="_NUMBER") + + +@overload +def literal( + value: Any, + type_: _TypeEngineArgument[_T], + literal_execute: bool = False, +) -> BindParameter[_T]: ... + + +@overload +def literal( + value: _T, + type_: None = None, + literal_execute: bool = False, +) -> BindParameter[_T]: ... + + +@overload +def literal( + value: Any, + type_: Optional[_TypeEngineArgument[Any]] = None, + literal_execute: bool = False, +) -> BindParameter[Any]: ... + + +def literal( + value: Any, + type_: Optional[_TypeEngineArgument[Any]] = None, + literal_execute: bool = False, +) -> BindParameter[Any]: + r"""Return a literal clause, bound to a bind parameter. + + Literal clauses are created automatically when non- + :class:`_expression.ClauseElement` objects (such as strings, ints, dates, + etc.) are + used in a comparison operation with a :class:`_expression.ColumnElement` + subclass, + such as a :class:`~sqlalchemy.schema.Column` object. Use this function + to force the generation of a literal clause, which will be created as a + :class:`BindParameter` with a bound value. + + :param value: the value to be bound. Can be any Python object supported by + the underlying DB-API, or is translatable via the given type argument. + + :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` which will + provide bind-parameter translation for this literal. + + :param literal_execute: optional bool, when True, the SQL engine will + attempt to render the bound value directly in the SQL statement at + execution time rather than providing as a parameter value. + + .. versionadded:: 2.0 + + """ + return coercions.expect( + roles.LiteralValueRole, + value, + type_=type_, + literal_execute=literal_execute, + ) + + +def literal_column( + text: str, type_: Optional[_TypeEngineArgument[_T]] = None +) -> ColumnClause[_T]: + r"""Produce a :class:`.ColumnClause` object that has the + :paramref:`_expression.column.is_literal` flag set to True. + + :func:`_expression.literal_column` is similar to + :func:`_expression.column`, except that + it is more often used as a "standalone" column expression that renders + exactly as stated; while :func:`_expression.column` + stores a string name that + will be assumed to be part of a table and may be quoted as such, + :func:`_expression.literal_column` can be that, + or any other arbitrary column-oriented + expression. + + :param text: the text of the expression; can be any SQL expression. + Quoting rules will not be applied. To specify a column-name expression + which should be subject to quoting rules, use the :func:`column` + function. + + :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` + object which will + provide result-set translation and additional expression semantics for + this column. If left as ``None`` the type will be :class:`.NullType`. + + .. seealso:: + + :func:`_expression.column` + + :func:`_expression.text` + + :ref:`tutorial_select_arbitrary_text` + + """ + return ColumnClause(text, type_=type_, is_literal=True) + + +class CompilerElement(Visitable): + """base class for SQL elements that can be compiled to produce a + SQL string. + + .. versionadded:: 2.0 + + """ + + __slots__ = () + __visit_name__ = "compiler_element" + + supports_execution = False + + stringify_dialect = "default" + + @util.preload_module("sqlalchemy.engine.default") + @util.preload_module("sqlalchemy.engine.url") + def compile( + self, + bind: Optional[_HasDialect] = None, + dialect: Optional[Dialect] = None, + **kw: Any, + ) -> Compiled: + """Compile this SQL expression. + + The return value is a :class:`~.Compiled` object. + Calling ``str()`` or ``unicode()`` on the returned value will yield a + string representation of the result. The + :class:`~.Compiled` object also can return a + dictionary of bind parameter names and values + using the ``params`` accessor. + + :param bind: An :class:`.Connection` or :class:`.Engine` which + can provide a :class:`.Dialect` in order to generate a + :class:`.Compiled` object. If the ``bind`` and + ``dialect`` parameters are both omitted, a default SQL compiler + is used. + + :param column_keys: Used for INSERT and UPDATE statements, a list of + column names which should be present in the VALUES clause of the + compiled statement. If ``None``, all columns from the target table + object are rendered. + + :param dialect: A :class:`.Dialect` instance which can generate + a :class:`.Compiled` object. This argument takes precedence over + the ``bind`` argument. + + :param compile_kwargs: optional dictionary of additional parameters + that will be passed through to the compiler within all "visit" + methods. This allows any custom flag to be passed through to + a custom compilation construct, for example. It is also used + for the case of passing the ``literal_binds`` flag through:: + + from sqlalchemy.sql import table, column, select + + t = table('t', column('x')) + + s = select(t).where(t.c.x == 5) + + print(s.compile(compile_kwargs={"literal_binds": True})) + + .. seealso:: + + :ref:`faq_sql_expression_string` + + """ + + if dialect is None: + if bind: + dialect = bind.dialect + elif self.stringify_dialect == "default": + default = util.preloaded.engine_default + dialect = default.StrCompileDialect() + else: + url = util.preloaded.engine_url + dialect = url.URL.create( + self.stringify_dialect + ).get_dialect()() + + return self._compiler(dialect, **kw) + + def _compiler(self, dialect: Dialect, **kw: Any) -> Compiled: + """Return a compiler appropriate for this ClauseElement, given a + Dialect.""" + + if TYPE_CHECKING: + assert isinstance(self, ClauseElement) + return dialect.statement_compiler(dialect, self, **kw) + + def __str__(self) -> str: + return str(self.compile()) + + +@inspection._self_inspects +class ClauseElement( + SupportsWrappingAnnotations, + MemoizedHasCacheKey, + HasCopyInternals, + ExternallyTraversible, + CompilerElement, +): + """Base class for elements of a programmatically constructed SQL + expression. + + """ + + __visit_name__ = "clause" + + if TYPE_CHECKING: + + @util.memoized_property + def _propagate_attrs(self) -> _PropagateAttrsType: + """like annotations, however these propagate outwards liberally + as SQL constructs are built, and are set up at construction time. + + """ + ... + + else: + _propagate_attrs = util.EMPTY_DICT + + @util.ro_memoized_property + def description(self) -> Optional[str]: + return None + + _is_clone_of: Optional[Self] = None + + is_clause_element = True + is_selectable = False + is_dml = False + _is_column_element = False + _is_keyed_column_element = False + _is_table = False + _gen_static_annotations_cache_key = False + _is_textual = False + _is_from_clause = False + _is_returns_rows = False + _is_text_clause = False + _is_from_container = False + _is_select_container = False + _is_select_base = False + _is_select_statement = False + _is_bind_parameter = False + _is_clause_list = False + _is_lambda_element = False + _is_singleton_constant = False + _is_immutable = False + _is_star = False + + @property + def _order_by_label_element(self) -> Optional[Label[Any]]: + return None + + _cache_key_traversal: _CacheKeyTraversalType = None + + negation_clause: ColumnElement[bool] + + if typing.TYPE_CHECKING: + + def get_children( + self, *, omit_attrs: typing_Tuple[str, ...] = ..., **kw: Any + ) -> Iterable[ClauseElement]: ... + + @util.ro_non_memoized_property + def _from_objects(self) -> List[FromClause]: + return [] + + def _set_propagate_attrs(self, values: Mapping[str, Any]) -> Self: + # usually, self._propagate_attrs is empty here. one case where it's + # not is a subquery against ORM select, that is then pulled as a + # property of an aliased class. should all be good + + # assert not self._propagate_attrs + + self._propagate_attrs = util.immutabledict(values) + return self + + def _clone(self, **kw: Any) -> Self: + """Create a shallow copy of this ClauseElement. + + This method may be used by a generative API. Its also used as + part of the "deep" copy afforded by a traversal that combines + the _copy_internals() method. + + """ + + skip = self._memoized_keys + c = self.__class__.__new__(self.__class__) + + if skip: + # ensure this iteration remains atomic + c.__dict__ = { + k: v for k, v in self.__dict__.copy().items() if k not in skip + } + else: + c.__dict__ = self.__dict__.copy() + + # this is a marker that helps to "equate" clauses to each other + # when a Select returns its list of FROM clauses. the cloning + # process leaves around a lot of remnants of the previous clause + # typically in the form of column expressions still attached to the + # old table. + cc = self._is_clone_of + c._is_clone_of = cc if cc is not None else self + return c + + def _negate_in_binary(self, negated_op, original_op): + """a hook to allow the right side of a binary expression to respond + to a negation of the binary expression. + + Used for the special case of expanding bind parameter with IN. + + """ + return self + + def _with_binary_element_type(self, type_): + """in the context of binary expression, convert the type of this + object to the one given. + + applies only to :class:`_expression.ColumnElement` classes. + + """ + return self + + @property + def _constructor(self): + """return the 'constructor' for this ClauseElement. + + This is for the purposes for creating a new object of + this type. Usually, its just the element's __class__. + However, the "Annotated" version of the object overrides + to return the class of its proxied element. + + """ + return self.__class__ + + @HasMemoized.memoized_attribute + def _cloned_set(self): + """Return the set consisting all cloned ancestors of this + ClauseElement. + + Includes this ClauseElement. This accessor tends to be used for + FromClause objects to identify 'equivalent' FROM clauses, regardless + of transformative operations. + + """ + s = util.column_set() + f: Optional[ClauseElement] = self + + # note this creates a cycle, asserted in test_memusage. however, + # turning this into a plain @property adds tends of thousands of method + # calls to Core / ORM performance tests, so the small overhead + # introduced by the relatively small amount of short term cycles + # produced here is preferable + while f is not None: + s.add(f) + f = f._is_clone_of + return s + + def _de_clone(self): + while self._is_clone_of is not None: + self = self._is_clone_of + return self + + @property + def entity_namespace(self): + raise AttributeError( + "This SQL expression has no entity namespace " + "with which to filter from." + ) + + def __getstate__(self): + d = self.__dict__.copy() + d.pop("_is_clone_of", None) + d.pop("_generate_cache_key", None) + return d + + def _execute_on_connection( + self, + connection: Connection, + distilled_params: _CoreMultiExecuteParams, + execution_options: CoreExecuteOptionsParameter, + ) -> Result[Any]: + if self.supports_execution: + if TYPE_CHECKING: + assert isinstance(self, Executable) + return connection._execute_clauseelement( + self, distilled_params, execution_options + ) + else: + raise exc.ObjectNotExecutableError(self) + + def _execute_on_scalar( + self, + connection: Connection, + distilled_params: _CoreMultiExecuteParams, + execution_options: CoreExecuteOptionsParameter, + ) -> Any: + """an additional hook for subclasses to provide a different + implementation for connection.scalar() vs. connection.execute(). + + .. versionadded:: 2.0 + + """ + return self._execute_on_connection( + connection, distilled_params, execution_options + ).scalar() + + def _get_embedded_bindparams(self) -> Sequence[BindParameter[Any]]: + """Return the list of :class:`.BindParameter` objects embedded in the + object. + + This accomplishes the same purpose as ``visitors.traverse()`` or + similar would provide, however by making use of the cache key + it takes advantage of memoization of the key to result in fewer + net method calls, assuming the statement is also going to be + executed. + + """ + + key = self._generate_cache_key() + if key is None: + bindparams: List[BindParameter[Any]] = [] + + traverse(self, {}, {"bindparam": bindparams.append}) + return bindparams + + else: + return key.bindparams + + def unique_params( + self, + __optionaldict: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> Self: + """Return a copy with :func:`_expression.bindparam` elements + replaced. + + Same functionality as :meth:`_expression.ClauseElement.params`, + except adds `unique=True` + to affected bind parameters so that multiple statements can be + used. + + """ + return self._replace_params(True, __optionaldict, kwargs) + + def params( + self, + __optionaldict: Optional[Mapping[str, Any]] = None, + **kwargs: Any, + ) -> Self: + """Return a copy with :func:`_expression.bindparam` elements + replaced. + + Returns a copy of this ClauseElement with + :func:`_expression.bindparam` + elements replaced with values taken from the given dictionary:: + + >>> clause = column('x') + bindparam('foo') + >>> print(clause.compile().params) + {'foo':None} + >>> print(clause.params({'foo':7}).compile().params) + {'foo':7} + + """ + return self._replace_params(False, __optionaldict, kwargs) + + def _replace_params( + self, + unique: bool, + optionaldict: Optional[Mapping[str, Any]], + kwargs: Dict[str, Any], + ) -> Self: + if optionaldict: + kwargs.update(optionaldict) + + def visit_bindparam(bind: BindParameter[Any]) -> None: + if bind.key in kwargs: + bind.value = kwargs[bind.key] + bind.required = False + if unique: + bind._convert_to_unique() + + return cloned_traverse( + self, + {"maintain_key": True, "detect_subquery_cols": True}, + {"bindparam": visit_bindparam}, + ) + + def compare(self, other: ClauseElement, **kw: Any) -> bool: + r"""Compare this :class:`_expression.ClauseElement` to + the given :class:`_expression.ClauseElement`. + + Subclasses should override the default behavior, which is a + straight identity comparison. + + \**kw are arguments consumed by subclass ``compare()`` methods and + may be used to modify the criteria for comparison + (see :class:`_expression.ColumnElement`). + + """ + return traversals.compare(self, other, **kw) + + def self_group( + self, against: Optional[OperatorType] = None + ) -> ClauseElement: + """Apply a 'grouping' to this :class:`_expression.ClauseElement`. + + This method is overridden by subclasses to return a "grouping" + construct, i.e. parenthesis. In particular it's used by "binary" + expressions to provide a grouping around themselves when placed into a + larger expression, as well as by :func:`_expression.select` + constructs when placed into the FROM clause of another + :func:`_expression.select`. (Note that subqueries should be + normally created using the :meth:`_expression.Select.alias` method, + as many + platforms require nested SELECT statements to be named). + + As expressions are composed together, the application of + :meth:`self_group` is automatic - end-user code should never + need to use this method directly. Note that SQLAlchemy's + clause constructs take operator precedence into account - + so parenthesis might not be needed, for example, in + an expression like ``x OR (y AND z)`` - AND takes precedence + over OR. + + The base :meth:`self_group` method of + :class:`_expression.ClauseElement` + just returns self. + """ + return self + + def _ungroup(self) -> ClauseElement: + """Return this :class:`_expression.ClauseElement` + without any groupings. + """ + + return self + + def _compile_w_cache( + self, + dialect: Dialect, + *, + compiled_cache: Optional[CompiledCacheType], + column_keys: List[str], + for_executemany: bool = False, + schema_translate_map: Optional[SchemaTranslateMapType] = None, + **kw: Any, + ) -> typing_Tuple[ + Compiled, Optional[Sequence[BindParameter[Any]]], CacheStats + ]: + elem_cache_key: Optional[CacheKey] + + if compiled_cache is not None and dialect._supports_statement_cache: + elem_cache_key = self._generate_cache_key() + else: + elem_cache_key = None + + if elem_cache_key is not None: + if TYPE_CHECKING: + assert compiled_cache is not None + + cache_key, extracted_params = elem_cache_key + key = ( + dialect, + cache_key, + tuple(column_keys), + bool(schema_translate_map), + for_executemany, + ) + compiled_sql = compiled_cache.get(key) + + if compiled_sql is None: + cache_hit = dialect.CACHE_MISS + compiled_sql = self._compiler( + dialect, + cache_key=elem_cache_key, + column_keys=column_keys, + for_executemany=for_executemany, + schema_translate_map=schema_translate_map, + **kw, + ) + compiled_cache[key] = compiled_sql + else: + cache_hit = dialect.CACHE_HIT + else: + extracted_params = None + compiled_sql = self._compiler( + dialect, + cache_key=elem_cache_key, + column_keys=column_keys, + for_executemany=for_executemany, + schema_translate_map=schema_translate_map, + **kw, + ) + + if not dialect._supports_statement_cache: + cache_hit = dialect.NO_DIALECT_SUPPORT + elif compiled_cache is None: + cache_hit = dialect.CACHING_DISABLED + else: + cache_hit = dialect.NO_CACHE_KEY + + return compiled_sql, extracted_params, cache_hit + + def __invert__(self): + # undocumented element currently used by the ORM for + # relationship.contains() + if hasattr(self, "negation_clause"): + return self.negation_clause + else: + return self._negate() + + def _negate(self) -> ClauseElement: + grouped = self.self_group(against=operators.inv) + assert isinstance(grouped, ColumnElement) + return UnaryExpression(grouped, operator=operators.inv) + + def __bool__(self): + raise TypeError("Boolean value of this clause is not defined") + + def __repr__(self): + friendly = self.description + if friendly is None: + return object.__repr__(self) + else: + return "<%s.%s at 0x%x; %s>" % ( + self.__module__, + self.__class__.__name__, + id(self), + friendly, + ) + + +class DQLDMLClauseElement(ClauseElement): + """represents a :class:`.ClauseElement` that compiles to a DQL or DML + expression, not DDL. + + .. versionadded:: 2.0 + + """ + + if typing.TYPE_CHECKING: + + def _compiler(self, dialect: Dialect, **kw: Any) -> SQLCompiler: + """Return a compiler appropriate for this ClauseElement, given a + Dialect.""" + ... + + def compile( # noqa: A001 + self, + bind: Optional[_HasDialect] = None, + dialect: Optional[Dialect] = None, + **kw: Any, + ) -> SQLCompiler: ... + + +class CompilerColumnElement( + roles.DMLColumnRole, + roles.DDLConstraintColumnRole, + roles.ColumnsClauseRole, + CompilerElement, +): + """A compiler-only column element used for ad-hoc string compilations. + + .. versionadded:: 2.0 + + """ + + __slots__ = () + + _propagate_attrs = util.EMPTY_DICT + _is_collection_aggregate = False + + +# SQLCoreOperations should be suiting the ExpressionElementRole +# and ColumnsClauseRole. however the MRO issues become too elaborate +# at the moment. +class SQLCoreOperations(Generic[_T_co], ColumnOperators, TypingOnly): + __slots__ = () + + # annotations for comparison methods + # these are from operators->Operators / ColumnOperators, + # redefined with the specific types returned by ColumnElement hierarchies + if typing.TYPE_CHECKING: + + @util.non_memoized_property + def _propagate_attrs(self) -> _PropagateAttrsType: ... + + def operate( + self, op: OperatorType, *other: Any, **kwargs: Any + ) -> ColumnElement[Any]: ... + + def reverse_operate( + self, op: OperatorType, other: Any, **kwargs: Any + ) -> ColumnElement[Any]: ... + + @overload + def op( + self, + opstring: str, + precedence: int = ..., + is_comparison: bool = ..., + *, + return_type: _TypeEngineArgument[_OPT], + python_impl: Optional[Callable[..., Any]] = None, + ) -> Callable[[Any], BinaryExpression[_OPT]]: ... + + @overload + def op( + self, + opstring: str, + precedence: int = ..., + is_comparison: bool = ..., + return_type: Optional[_TypeEngineArgument[Any]] = ..., + python_impl: Optional[Callable[..., Any]] = ..., + ) -> Callable[[Any], BinaryExpression[Any]]: ... + + def op( + self, + opstring: str, + precedence: int = 0, + is_comparison: bool = False, + return_type: Optional[_TypeEngineArgument[Any]] = None, + python_impl: Optional[Callable[..., Any]] = None, + ) -> Callable[[Any], BinaryExpression[Any]]: ... + + def bool_op( + self, + opstring: str, + precedence: int = 0, + python_impl: Optional[Callable[..., Any]] = None, + ) -> Callable[[Any], BinaryExpression[bool]]: ... + + def __and__(self, other: Any) -> BooleanClauseList: ... + + def __or__(self, other: Any) -> BooleanClauseList: ... + + def __invert__(self) -> ColumnElement[_T_co]: ... + + def __lt__(self, other: Any) -> ColumnElement[bool]: ... + + def __le__(self, other: Any) -> ColumnElement[bool]: ... + + # declare also that this class has an hash method otherwise + # it may be assumed to be None by type checkers since the + # object defines __eq__ and python sets it to None in that case: + # https://docs.python.org/3/reference/datamodel.html#object.__hash__ + def __hash__(self) -> int: ... + + def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 + ... + + def __ne__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 + ... + + def is_distinct_from(self, other: Any) -> ColumnElement[bool]: ... + + def is_not_distinct_from(self, other: Any) -> ColumnElement[bool]: ... + + def __gt__(self, other: Any) -> ColumnElement[bool]: ... + + def __ge__(self, other: Any) -> ColumnElement[bool]: ... + + def __neg__(self) -> UnaryExpression[_T_co]: ... + + def __contains__(self, other: Any) -> ColumnElement[bool]: ... + + def __getitem__(self, index: Any) -> ColumnElement[Any]: ... + + @overload + def __lshift__(self: _SQO[int], other: Any) -> ColumnElement[int]: ... + + @overload + def __lshift__(self, other: Any) -> ColumnElement[Any]: ... + + def __lshift__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __rshift__(self: _SQO[int], other: Any) -> ColumnElement[int]: ... + + @overload + def __rshift__(self, other: Any) -> ColumnElement[Any]: ... + + def __rshift__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def concat(self: _SQO[str], other: Any) -> ColumnElement[str]: ... + + @overload + def concat(self, other: Any) -> ColumnElement[Any]: ... + + def concat(self, other: Any) -> ColumnElement[Any]: ... + + def like( + self, other: Any, escape: Optional[str] = None + ) -> BinaryExpression[bool]: ... + + def ilike( + self, other: Any, escape: Optional[str] = None + ) -> BinaryExpression[bool]: ... + + def bitwise_xor(self, other: Any) -> BinaryExpression[Any]: ... + + def bitwise_or(self, other: Any) -> BinaryExpression[Any]: ... + + def bitwise_and(self, other: Any) -> BinaryExpression[Any]: ... + + def bitwise_not(self) -> UnaryExpression[_T_co]: ... + + def bitwise_lshift(self, other: Any) -> BinaryExpression[Any]: ... + + def bitwise_rshift(self, other: Any) -> BinaryExpression[Any]: ... + + def in_( + self, + other: Union[ + Iterable[Any], BindParameter[Any], roles.InElementRole + ], + ) -> BinaryExpression[bool]: ... + + def not_in( + self, + other: Union[ + Iterable[Any], BindParameter[Any], roles.InElementRole + ], + ) -> BinaryExpression[bool]: ... + + def notin_( + self, + other: Union[ + Iterable[Any], BindParameter[Any], roles.InElementRole + ], + ) -> BinaryExpression[bool]: ... + + def not_like( + self, other: Any, escape: Optional[str] = None + ) -> BinaryExpression[bool]: ... + + def notlike( + self, other: Any, escape: Optional[str] = None + ) -> BinaryExpression[bool]: ... + + def not_ilike( + self, other: Any, escape: Optional[str] = None + ) -> BinaryExpression[bool]: ... + + def notilike( + self, other: Any, escape: Optional[str] = None + ) -> BinaryExpression[bool]: ... + + def is_(self, other: Any) -> BinaryExpression[bool]: ... + + def is_not(self, other: Any) -> BinaryExpression[bool]: ... + + def isnot(self, other: Any) -> BinaryExpression[bool]: ... + + def startswith( + self, + other: Any, + escape: Optional[str] = None, + autoescape: bool = False, + ) -> ColumnElement[bool]: ... + + def istartswith( + self, + other: Any, + escape: Optional[str] = None, + autoescape: bool = False, + ) -> ColumnElement[bool]: ... + + def endswith( + self, + other: Any, + escape: Optional[str] = None, + autoescape: bool = False, + ) -> ColumnElement[bool]: ... + + def iendswith( + self, + other: Any, + escape: Optional[str] = None, + autoescape: bool = False, + ) -> ColumnElement[bool]: ... + + def contains(self, other: Any, **kw: Any) -> ColumnElement[bool]: ... + + def icontains(self, other: Any, **kw: Any) -> ColumnElement[bool]: ... + + def match(self, other: Any, **kwargs: Any) -> ColumnElement[bool]: ... + + def regexp_match( + self, pattern: Any, flags: Optional[str] = None + ) -> ColumnElement[bool]: ... + + def regexp_replace( + self, pattern: Any, replacement: Any, flags: Optional[str] = None + ) -> ColumnElement[str]: ... + + def desc(self) -> UnaryExpression[_T_co]: ... + + def asc(self) -> UnaryExpression[_T_co]: ... + + def nulls_first(self) -> UnaryExpression[_T_co]: ... + + def nullsfirst(self) -> UnaryExpression[_T_co]: ... + + def nulls_last(self) -> UnaryExpression[_T_co]: ... + + def nullslast(self) -> UnaryExpression[_T_co]: ... + + def collate(self, collation: str) -> CollationClause: ... + + def between( + self, cleft: Any, cright: Any, symmetric: bool = False + ) -> BinaryExpression[bool]: ... + + def distinct(self: _SQO[_T_co]) -> UnaryExpression[_T_co]: ... + + def any_(self) -> CollectionAggregate[Any]: ... + + def all_(self) -> CollectionAggregate[Any]: ... + + # numeric overloads. These need more tweaking + # in particular they all need to have a variant for Optiona[_T] + # because Optional only applies to the data side, not the expression + # side + + @overload + def __add__( + self: _SQO[_NMT], + other: Any, + ) -> ColumnElement[_NMT]: ... + + @overload + def __add__( + self: _SQO[str], + other: Any, + ) -> ColumnElement[str]: ... + + def __add__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __radd__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ... + + @overload + def __radd__(self: _SQO[str], other: Any) -> ColumnElement[str]: ... + + def __radd__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __sub__( + self: _SQO[_NMT], + other: Any, + ) -> ColumnElement[_NMT]: ... + + @overload + def __sub__(self, other: Any) -> ColumnElement[Any]: ... + + def __sub__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __rsub__( + self: _SQO[_NMT], + other: Any, + ) -> ColumnElement[_NMT]: ... + + @overload + def __rsub__(self, other: Any) -> ColumnElement[Any]: ... + + def __rsub__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __mul__( + self: _SQO[_NMT], + other: Any, + ) -> ColumnElement[_NMT]: ... + + @overload + def __mul__(self, other: Any) -> ColumnElement[Any]: ... + + def __mul__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __rmul__( + self: _SQO[_NMT], + other: Any, + ) -> ColumnElement[_NMT]: ... + + @overload + def __rmul__(self, other: Any) -> ColumnElement[Any]: ... + + def __rmul__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __mod__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ... + + @overload + def __mod__(self, other: Any) -> ColumnElement[Any]: ... + + def __mod__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __rmod__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ... + + @overload + def __rmod__(self, other: Any) -> ColumnElement[Any]: ... + + def __rmod__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __truediv__( + self: _SQO[int], other: Any + ) -> ColumnElement[_NUMERIC]: ... + + @overload + def __truediv__(self: _SQO[_NT], other: Any) -> ColumnElement[_NT]: ... + + @overload + def __truediv__(self, other: Any) -> ColumnElement[Any]: ... + + def __truediv__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __rtruediv__( + self: _SQO[_NMT], other: Any + ) -> ColumnElement[_NUMERIC]: ... + + @overload + def __rtruediv__(self, other: Any) -> ColumnElement[Any]: ... + + def __rtruediv__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __floordiv__( + self: _SQO[_NMT], other: Any + ) -> ColumnElement[_NMT]: ... + + @overload + def __floordiv__(self, other: Any) -> ColumnElement[Any]: ... + + def __floordiv__(self, other: Any) -> ColumnElement[Any]: ... + + @overload + def __rfloordiv__( + self: _SQO[_NMT], other: Any + ) -> ColumnElement[_NMT]: ... + + @overload + def __rfloordiv__(self, other: Any) -> ColumnElement[Any]: ... + + def __rfloordiv__(self, other: Any) -> ColumnElement[Any]: ... + + +class SQLColumnExpression( + SQLCoreOperations[_T_co], roles.ExpressionElementRole[_T_co], TypingOnly +): + """A type that may be used to indicate any SQL column element or object + that acts in place of one. + + :class:`.SQLColumnExpression` is a base of + :class:`.ColumnElement`, as well as within the bases of ORM elements + such as :class:`.InstrumentedAttribute`, and may be used in :pep:`484` + typing to indicate arguments or return values that should behave + as column expressions. + + .. versionadded:: 2.0.0b4 + + + """ + + __slots__ = () + + +_SQO = SQLCoreOperations + + +class ColumnElement( + roles.ColumnArgumentOrKeyRole, + roles.StatementOptionRole, + roles.WhereHavingRole, + roles.BinaryElementRole[_T], + roles.OrderByRole, + roles.ColumnsClauseRole, + roles.LimitOffsetRole, + roles.DMLColumnRole, + roles.DDLConstraintColumnRole, + roles.DDLExpressionRole, + SQLColumnExpression[_T], + DQLDMLClauseElement, +): + """Represent a column-oriented SQL expression suitable for usage in the + "columns" clause, WHERE clause etc. of a statement. + + While the most familiar kind of :class:`_expression.ColumnElement` is the + :class:`_schema.Column` object, :class:`_expression.ColumnElement` + serves as the basis + for any unit that may be present in a SQL expression, including + the expressions themselves, SQL functions, bound parameters, + literal expressions, keywords such as ``NULL``, etc. + :class:`_expression.ColumnElement` + is the ultimate base class for all such elements. + + A wide variety of SQLAlchemy Core functions work at the SQL expression + level, and are intended to accept instances of + :class:`_expression.ColumnElement` as + arguments. These functions will typically document that they accept a + "SQL expression" as an argument. What this means in terms of SQLAlchemy + usually refers to an input which is either already in the form of a + :class:`_expression.ColumnElement` object, + or a value which can be **coerced** into + one. The coercion rules followed by most, but not all, SQLAlchemy Core + functions with regards to SQL expressions are as follows: + + * a literal Python value, such as a string, integer or floating + point value, boolean, datetime, ``Decimal`` object, or virtually + any other Python object, will be coerced into a "literal bound + value". This generally means that a :func:`.bindparam` will be + produced featuring the given value embedded into the construct; the + resulting :class:`.BindParameter` object is an instance of + :class:`_expression.ColumnElement`. + The Python value will ultimately be sent + to the DBAPI at execution time as a parameterized argument to the + ``execute()`` or ``executemany()`` methods, after SQLAlchemy + type-specific converters (e.g. those provided by any associated + :class:`.TypeEngine` objects) are applied to the value. + + * any special object value, typically ORM-level constructs, which + feature an accessor called ``__clause_element__()``. The Core + expression system looks for this method when an object of otherwise + unknown type is passed to a function that is looking to coerce the + argument into a :class:`_expression.ColumnElement` and sometimes a + :class:`_expression.SelectBase` expression. + It is used within the ORM to + convert from ORM-specific objects like mapped classes and + mapped attributes into Core expression objects. + + * The Python ``None`` value is typically interpreted as ``NULL``, + which in SQLAlchemy Core produces an instance of :func:`.null`. + + A :class:`_expression.ColumnElement` provides the ability to generate new + :class:`_expression.ColumnElement` + objects using Python expressions. This means that Python operators + such as ``==``, ``!=`` and ``<`` are overloaded to mimic SQL operations, + and allow the instantiation of further :class:`_expression.ColumnElement` + instances + which are composed from other, more fundamental + :class:`_expression.ColumnElement` + objects. For example, two :class:`.ColumnClause` objects can be added + together with the addition operator ``+`` to produce + a :class:`.BinaryExpression`. + Both :class:`.ColumnClause` and :class:`.BinaryExpression` are subclasses + of :class:`_expression.ColumnElement`: + + .. sourcecode:: pycon+sql + + >>> from sqlalchemy.sql import column + >>> column('a') + column('b') + + >>> print(column('a') + column('b')) + {printsql}a + b + + .. seealso:: + + :class:`_schema.Column` + + :func:`_expression.column` + + """ + + __visit_name__ = "column_element" + + primary_key: bool = False + _is_clone_of: Optional[ColumnElement[_T]] + _is_column_element = True + _insert_sentinel: bool = False + _omit_from_statements = False + _is_collection_aggregate = False + + foreign_keys: AbstractSet[ForeignKey] = frozenset() + + @util.memoized_property + def _proxies(self) -> List[ColumnElement[Any]]: + return [] + + @util.non_memoized_property + def _tq_label(self) -> Optional[str]: + """The named label that can be used to target + this column in a result set in a "table qualified" context. + + This label is almost always the label used when + rendering AS