chore: 添加虚拟环境到仓库
- 添加 backend_service/venv 虚拟环境 - 包含所有Python依赖包 - 注意:虚拟环境约393MB,包含12655个文件
This commit is contained in:
@@ -0,0 +1,106 @@
|
||||
"""
|
||||
PyPika is divided into a couple of modules, primarily the ``queries`` and ``terms`` modules.
|
||||
|
||||
pypika.queries
|
||||
--------------
|
||||
|
||||
This is where the ``Query`` class can be found which is the core class in PyPika. Also, other top level classes such
|
||||
as ``Table`` can be found here. ``Query`` is a container that holds all of the ``Term`` types together and also
|
||||
serializes the builder to a string.
|
||||
|
||||
pypika.terms
|
||||
------------
|
||||
|
||||
This module contains the classes which represent individual parts of queries that extend the ``Term`` base class.
|
||||
|
||||
pypika.functions
|
||||
----------------
|
||||
|
||||
Wrappers for common SQL functions are stored in this package.
|
||||
|
||||
pypika.enums
|
||||
------------
|
||||
|
||||
Enumerated values are kept in this package which are used as options for Queries and Terms.
|
||||
|
||||
|
||||
pypika.utils
|
||||
------------
|
||||
|
||||
This contains all of the utility classes such as exceptions and decorators.
|
||||
|
||||
"""
|
||||
# noinspection PyUnresolvedReferences
|
||||
from pypika.dialects import (
|
||||
ClickHouseQuery,
|
||||
Dialects,
|
||||
MSSQLQuery,
|
||||
MySQLQuery,
|
||||
OracleQuery,
|
||||
PostgreSQLQuery,
|
||||
RedshiftQuery,
|
||||
SQLLiteQuery,
|
||||
VerticaQuery,
|
||||
)
|
||||
|
||||
# noinspection PyUnresolvedReferences
|
||||
from pypika.enums import (
|
||||
DatePart,
|
||||
JoinType,
|
||||
Order,
|
||||
)
|
||||
|
||||
# noinspection PyUnresolvedReferences
|
||||
from pypika.queries import (
|
||||
AliasedQuery,
|
||||
Query,
|
||||
Schema,
|
||||
Table,
|
||||
Column,
|
||||
Database,
|
||||
make_tables as Tables,
|
||||
make_columns as Columns,
|
||||
)
|
||||
|
||||
# noinspection PyUnresolvedReferences
|
||||
from pypika.terms import (
|
||||
Array,
|
||||
Bracket,
|
||||
Case,
|
||||
Criterion,
|
||||
EmptyCriterion,
|
||||
Field,
|
||||
Index,
|
||||
Interval,
|
||||
JSON,
|
||||
Not,
|
||||
NullValue,
|
||||
SystemTimeValue,
|
||||
Parameter,
|
||||
QmarkParameter,
|
||||
NumericParameter,
|
||||
NamedParameter,
|
||||
FormatParameter,
|
||||
PyformatParameter,
|
||||
Rollup,
|
||||
Tuple,
|
||||
CustomFunction,
|
||||
)
|
||||
|
||||
# noinspection PyUnresolvedReferences
|
||||
from pypika.utils import (
|
||||
CaseException,
|
||||
GroupingException,
|
||||
JoinException,
|
||||
QueryException,
|
||||
RollupException,
|
||||
SetOperationException,
|
||||
FunctionException,
|
||||
)
|
||||
|
||||
__author__ = "Timothy Heys"
|
||||
__email__ = "theys@kayak.com"
|
||||
__version__ = "0.48.9"
|
||||
|
||||
NULL = NullValue()
|
||||
SYSTEM_TIME = SystemTimeValue()
|
||||
@@ -0,0 +1,122 @@
|
||||
"""
|
||||
Package for SQL analytic functions wrappers
|
||||
"""
|
||||
from pypika.terms import (
|
||||
AnalyticFunction,
|
||||
WindowFrameAnalyticFunction,
|
||||
IgnoreNullsAnalyticFunction,
|
||||
)
|
||||
|
||||
__author__ = "Timothy Heys"
|
||||
__email__ = "theys@kayak.com"
|
||||
|
||||
|
||||
class Preceding(WindowFrameAnalyticFunction.Edge):
|
||||
modifier = "PRECEDING"
|
||||
|
||||
|
||||
class Following(WindowFrameAnalyticFunction.Edge):
|
||||
modifier = "FOLLOWING"
|
||||
|
||||
|
||||
CURRENT_ROW = "CURRENT ROW"
|
||||
|
||||
|
||||
class Rank(AnalyticFunction):
|
||||
def __init__(self, **kwargs):
|
||||
super(Rank, self).__init__("RANK", **kwargs)
|
||||
|
||||
|
||||
class DenseRank(AnalyticFunction):
|
||||
def __init__(self, **kwargs):
|
||||
super(DenseRank, self).__init__("DENSE_RANK", **kwargs)
|
||||
|
||||
|
||||
class RowNumber(AnalyticFunction):
|
||||
def __init__(self, **kwargs):
|
||||
super(RowNumber, self).__init__("ROW_NUMBER", **kwargs)
|
||||
|
||||
|
||||
class NTile(AnalyticFunction):
|
||||
def __init__(self, term, **kwargs):
|
||||
super(NTile, self).__init__("NTILE", term, **kwargs)
|
||||
|
||||
|
||||
class FirstValue(WindowFrameAnalyticFunction, IgnoreNullsAnalyticFunction):
|
||||
def __init__(self, *terms, **kwargs):
|
||||
super(FirstValue, self).__init__("FIRST_VALUE", *terms, **kwargs)
|
||||
|
||||
|
||||
class LastValue(WindowFrameAnalyticFunction, IgnoreNullsAnalyticFunction):
|
||||
def __init__(self, *terms, **kwargs):
|
||||
super(LastValue, self).__init__("LAST_VALUE", *terms, **kwargs)
|
||||
|
||||
|
||||
class Median(AnalyticFunction):
|
||||
def __init__(self, term, **kwargs):
|
||||
super(Median, self).__init__("MEDIAN", term, **kwargs)
|
||||
|
||||
|
||||
class Avg(WindowFrameAnalyticFunction):
|
||||
def __init__(self, term, **kwargs):
|
||||
super(Avg, self).__init__("AVG", term, **kwargs)
|
||||
|
||||
|
||||
class StdDev(WindowFrameAnalyticFunction):
|
||||
def __init__(self, term, **kwargs):
|
||||
super(StdDev, self).__init__("STDDEV", term, **kwargs)
|
||||
|
||||
|
||||
class StdDevPop(WindowFrameAnalyticFunction):
|
||||
def __init__(self, term, **kwargs):
|
||||
super(StdDevPop, self).__init__("STDDEV_POP", term, **kwargs)
|
||||
|
||||
|
||||
class StdDevSamp(WindowFrameAnalyticFunction):
|
||||
def __init__(self, term, **kwargs):
|
||||
super(StdDevSamp, self).__init__("STDDEV_SAMP", term, **kwargs)
|
||||
|
||||
|
||||
class Variance(WindowFrameAnalyticFunction):
|
||||
def __init__(self, term, **kwargs):
|
||||
super(Variance, self).__init__("VARIANCE", term, **kwargs)
|
||||
|
||||
|
||||
class VarPop(WindowFrameAnalyticFunction):
|
||||
def __init__(self, term, **kwargs):
|
||||
super(VarPop, self).__init__("VAR_POP", term, **kwargs)
|
||||
|
||||
|
||||
class VarSamp(WindowFrameAnalyticFunction):
|
||||
def __init__(self, term, **kwargs):
|
||||
super(VarSamp, self).__init__("VAR_SAMP", term, **kwargs)
|
||||
|
||||
|
||||
class Count(WindowFrameAnalyticFunction):
|
||||
def __init__(self, term, **kwargs):
|
||||
super(Count, self).__init__("COUNT", term, **kwargs)
|
||||
|
||||
|
||||
class Sum(WindowFrameAnalyticFunction):
|
||||
def __init__(self, term, **kwargs):
|
||||
super(Sum, self).__init__("SUM", term, **kwargs)
|
||||
|
||||
|
||||
class Max(WindowFrameAnalyticFunction):
|
||||
def __init__(self, term, **kwargs):
|
||||
super(Max, self).__init__("MAX", term, **kwargs)
|
||||
|
||||
|
||||
class Min(WindowFrameAnalyticFunction):
|
||||
def __init__(self, term, **kwargs):
|
||||
super(Min, self).__init__("MIN", term, **kwargs)
|
||||
|
||||
|
||||
class Lag(AnalyticFunction):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Lag, self).__init__("LAG", *args, **kwargs)
|
||||
|
||||
|
||||
class Lead(AnalyticFunction):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Lead, self).__init__("LEAD", *args, **kwargs)
|
||||
@@ -0,0 +1,94 @@
|
||||
import abc
|
||||
|
||||
from pypika.terms import (
|
||||
Field,
|
||||
Function,
|
||||
Term,
|
||||
)
|
||||
from pypika.utils import format_alias_sql
|
||||
|
||||
|
||||
class Array(Term):
|
||||
def __init__(self, values: list, converter_cls=None, converter_options: dict = None, alias: str = None):
|
||||
super().__init__(alias)
|
||||
self._values = values
|
||||
self._converter_cls = converter_cls
|
||||
self._converter_options = converter_options or dict()
|
||||
|
||||
def get_sql(self):
|
||||
if self._converter_cls:
|
||||
converted = []
|
||||
for value in self._values:
|
||||
converter = self._converter_cls(value, **self._converter_options)
|
||||
converted.append(converter.get_sql())
|
||||
sql = "".join(["[", ",".join(converted), "]"])
|
||||
|
||||
else:
|
||||
sql = str(self._values)
|
||||
|
||||
return format_alias_sql(sql, self.alias)
|
||||
|
||||
|
||||
class HasAny(Function):
|
||||
def __init__(
|
||||
self,
|
||||
left_array: Array or Field,
|
||||
right_array: Array or Field,
|
||||
alias: str = None,
|
||||
schema: str = None,
|
||||
):
|
||||
self._left_array = left_array
|
||||
self._right_array = right_array
|
||||
self.alias = alias
|
||||
self.schema = schema
|
||||
self.args = ()
|
||||
self.name = "hasAny"
|
||||
|
||||
def get_sql(self, with_alias=False, with_namespace=False, quote_char=None, dialect=None, **kwargs):
|
||||
left = self._left_array.get_sql()
|
||||
right = self._right_array.get_sql()
|
||||
sql = "{name}({left},{right})".format(
|
||||
name=self.name,
|
||||
left='"%s"' % left if isinstance(self._left_array, Field) else left,
|
||||
right='"%s"' % right if isinstance(self._right_array, Field) else right,
|
||||
)
|
||||
return format_alias_sql(sql, self.alias, **kwargs)
|
||||
|
||||
|
||||
class _AbstractArrayFunction(Function, metaclass=abc.ABCMeta):
|
||||
def __init__(self, array: Array or Field, alias: str = None, schema: str = None):
|
||||
self.schema = schema
|
||||
self.alias = alias
|
||||
self.name = self.clickhouse_function()
|
||||
self._array = array
|
||||
|
||||
def get_sql(self, with_namespace=False, quote_char=None, dialect=None, **kwargs):
|
||||
array = self._array.get_sql()
|
||||
sql = "{name}({array})".format(
|
||||
name=self.name,
|
||||
array='"%s"' % array if isinstance(self._array, Field) else array,
|
||||
)
|
||||
return format_alias_sql(sql, self.alias, **kwargs)
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def clickhouse_function(cls) -> str:
|
||||
pass
|
||||
|
||||
|
||||
class NotEmpty(_AbstractArrayFunction):
|
||||
@classmethod
|
||||
def clickhouse_function(cls) -> str:
|
||||
return "notEmpty"
|
||||
|
||||
|
||||
class Empty(_AbstractArrayFunction):
|
||||
@classmethod
|
||||
def clickhouse_function(cls) -> str:
|
||||
return "empty"
|
||||
|
||||
|
||||
class Length(_AbstractArrayFunction):
|
||||
@classmethod
|
||||
def clickhouse_function(cls) -> str:
|
||||
return "length"
|
||||
@@ -0,0 +1,11 @@
|
||||
from pypika.terms import Function
|
||||
|
||||
|
||||
class If(Function):
|
||||
def __init__(self, *conditions, **kwargs):
|
||||
super().__init__("if", *conditions, **kwargs)
|
||||
|
||||
|
||||
class MultiIf(Function):
|
||||
def __init__(self, *conditions, **kwargs):
|
||||
super().__init__("multiIf", *conditions, **kwargs)
|
||||
@@ -0,0 +1,22 @@
|
||||
from pypika import CustomFunction
|
||||
|
||||
_add_subtract_args = ["name", "interval"]
|
||||
|
||||
ToYYYYMM = CustomFunction("toYYYYMM")
|
||||
AddYears = CustomFunction("addYears", _add_subtract_args)
|
||||
AddMonths = CustomFunction("addMonths", _add_subtract_args)
|
||||
AddWeeks = CustomFunction("addWeeks", _add_subtract_args)
|
||||
AddDays = CustomFunction("addDays", _add_subtract_args)
|
||||
AddHours = CustomFunction("addHours", _add_subtract_args)
|
||||
AddMinutes = CustomFunction("addMinutes", _add_subtract_args)
|
||||
AddSeconds = CustomFunction("addSeconds", _add_subtract_args)
|
||||
AddQuarters = CustomFunction("addQuarters", _add_subtract_args)
|
||||
SubtractYears = CustomFunction("subtractYears", _add_subtract_args)
|
||||
SubtractMonths = CustomFunction("subtractMonths", _add_subtract_args)
|
||||
SubtractWeeks = CustomFunction("subtractWeeks", _add_subtract_args)
|
||||
SubtractDays = CustomFunction("subtractDays", _add_subtract_args)
|
||||
SubtractHours = CustomFunction("subtractHours", _add_subtract_args)
|
||||
SubtractMinutes = CustomFunction("subtractMinutes", _add_subtract_args)
|
||||
SubtractSeconds = CustomFunction("subtractSeconds", _add_subtract_args)
|
||||
SubtractQuarters = CustomFunction("subtractQuarters", _add_subtract_args)
|
||||
FormatDateTime = CustomFunction("formatDateTime", ["name", "dt_format"])
|
||||
@@ -0,0 +1,6 @@
|
||||
from pypika.terms import Function
|
||||
|
||||
|
||||
class IfNull(Function):
|
||||
def __init__(self, term, alt, **kwargs):
|
||||
super().__init__("ifNull", term, alt, **kwargs)
|
||||
@@ -0,0 +1,88 @@
|
||||
import abc
|
||||
|
||||
from pypika.terms import Function
|
||||
from pypika.utils import format_alias_sql
|
||||
|
||||
|
||||
class _AbstractSearchString(Function, metaclass=abc.ABCMeta):
|
||||
def __init__(self, name, pattern: str, alias: str = None):
|
||||
super(_AbstractSearchString, self).__init__(self.clickhouse_function(), name, alias=alias)
|
||||
|
||||
self._pattern = pattern
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def clickhouse_function(cls) -> str:
|
||||
pass
|
||||
|
||||
def get_sql(self, with_alias=False, with_namespace=False, quote_char=None, dialect=None, **kwargs):
|
||||
args = []
|
||||
for p in self.args:
|
||||
if hasattr(p, "get_sql"):
|
||||
args.append('toString("{arg}")'.format(arg=p.get_sql(with_alias=False, **kwargs)))
|
||||
else:
|
||||
args.append(str(p))
|
||||
|
||||
sql = "{name}({args},'{pattern}')".format(
|
||||
name=self.name,
|
||||
args=",".join(args),
|
||||
pattern=self._pattern,
|
||||
)
|
||||
return format_alias_sql(sql, self.alias, **kwargs)
|
||||
|
||||
|
||||
class Match(_AbstractSearchString):
|
||||
@classmethod
|
||||
def clickhouse_function(cls) -> str:
|
||||
return "match"
|
||||
|
||||
|
||||
class Like(_AbstractSearchString):
|
||||
@classmethod
|
||||
def clickhouse_function(cls) -> str:
|
||||
return "like"
|
||||
|
||||
|
||||
class NotLike(_AbstractSearchString):
|
||||
@classmethod
|
||||
def clickhouse_function(cls) -> str:
|
||||
return "notLike"
|
||||
|
||||
|
||||
class _AbstractMultiSearchString(Function, metaclass=abc.ABCMeta):
|
||||
def __init__(self, name, patterns: list, alias: str = None):
|
||||
super(_AbstractMultiSearchString, self).__init__(self.clickhouse_function(), name, alias=alias)
|
||||
|
||||
self._patterns = patterns
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def clickhouse_function(cls) -> str:
|
||||
pass
|
||||
|
||||
def get_sql(self, with_alias=False, with_namespace=False, quote_char=None, dialect=None, **kwargs):
|
||||
args = []
|
||||
for p in self.args:
|
||||
if hasattr(p, "get_sql"):
|
||||
args.append('toString("{arg}")'.format(arg=p.get_sql(with_alias=False, **kwargs)))
|
||||
else:
|
||||
args.append(str(p))
|
||||
|
||||
sql = "{name}({args},[{patterns}])".format(
|
||||
name=self.name,
|
||||
args=",".join(args),
|
||||
patterns=",".join(["'%s'" % i for i in self._patterns]),
|
||||
)
|
||||
return format_alias_sql(sql, self.alias, **kwargs)
|
||||
|
||||
|
||||
class MultiSearchAny(_AbstractMultiSearchString):
|
||||
@classmethod
|
||||
def clickhouse_function(cls) -> str:
|
||||
return "multiSearchAny"
|
||||
|
||||
|
||||
class MultiMatchAny(_AbstractMultiSearchString):
|
||||
@classmethod
|
||||
def clickhouse_function(cls) -> str:
|
||||
return "multiMatchAny"
|
||||
@@ -0,0 +1,88 @@
|
||||
from pypika.terms import (
|
||||
Field,
|
||||
Function,
|
||||
)
|
||||
from pypika.utils import format_alias_sql
|
||||
|
||||
|
||||
class ToString(Function):
|
||||
def __init__(self, name, alias: str = None):
|
||||
super(ToString, self).__init__("toString", name, alias=alias)
|
||||
|
||||
|
||||
class ToFixedString(Function):
|
||||
def __init__(self, field, length: int, alias: str = None, schema: str = None):
|
||||
self._length = length
|
||||
self._field = field
|
||||
self.alias = alias
|
||||
self.name = "toFixedString"
|
||||
self.schema = schema
|
||||
self.args = ()
|
||||
|
||||
def get_sql(self, with_alias=False, with_namespace=False, quote_char=None, dialect=None, **kwargs):
|
||||
sql = "{name}({field},{length})".format(
|
||||
name=self.name,
|
||||
field=self._field if isinstance(self._field, Field) else "'%s'" % str(self._field),
|
||||
length=self._length,
|
||||
)
|
||||
return format_alias_sql(sql, self.alias, **kwargs)
|
||||
|
||||
|
||||
class ToInt8(Function):
|
||||
def __init__(self, name, alias: str = None):
|
||||
super(ToInt8, self).__init__("toInt8", name, alias=alias)
|
||||
|
||||
|
||||
class ToInt16(Function):
|
||||
def __init__(self, name, alias: str = None):
|
||||
super(ToInt16, self).__init__("toInt16", name, alias=alias)
|
||||
|
||||
|
||||
class ToInt32(Function):
|
||||
def __init__(self, name, alias: str = None):
|
||||
super(ToInt32, self).__init__("toInt32", name, alias=alias)
|
||||
|
||||
|
||||
class ToInt64(Function):
|
||||
def __init__(self, name, alias: str = None):
|
||||
super(ToInt64, self).__init__("toInt64", name, alias=alias)
|
||||
|
||||
|
||||
class ToUInt8(Function):
|
||||
def __init__(self, name, alias: str = None):
|
||||
super(ToUInt8, self).__init__("toUInt8", name, alias=alias)
|
||||
|
||||
|
||||
class ToUInt16(Function):
|
||||
def __init__(self, name, alias: str = None):
|
||||
super(ToUInt16, self).__init__("toUInt16", name, alias=alias)
|
||||
|
||||
|
||||
class ToUInt32(Function):
|
||||
def __init__(self, name, alias: str = None):
|
||||
super(ToUInt32, self).__init__("toUInt32", name, alias=alias)
|
||||
|
||||
|
||||
class ToUInt64(Function):
|
||||
def __init__(self, name, alias: str = None):
|
||||
super(ToUInt64, self).__init__("toUInt64", name, alias=alias)
|
||||
|
||||
|
||||
class ToFloat32(Function):
|
||||
def __init__(self, name, alias: str = None):
|
||||
super(ToFloat32, self).__init__("toFloat32", name, alias=alias)
|
||||
|
||||
|
||||
class ToFloat64(Function):
|
||||
def __init__(self, name, alias: str = None):
|
||||
super(ToFloat64, self).__init__("toFloat64", name, alias=alias)
|
||||
|
||||
|
||||
class ToDate(Function):
|
||||
def __init__(self, name, alias: str = None):
|
||||
super(ToDate, self).__init__("toDate", name, alias=alias)
|
||||
|
||||
|
||||
class ToDateTime(Function):
|
||||
def __init__(self, name, alias: str = None):
|
||||
super(ToDateTime, self).__init__("toDateTime", name, alias=alias)
|
||||
@@ -0,0 +1,870 @@
|
||||
import itertools
|
||||
from copy import copy
|
||||
from typing import Any, Optional, Union, Tuple as TypedTuple
|
||||
|
||||
from pypika.enums import Dialects
|
||||
from pypika.queries import (
|
||||
CreateQueryBuilder,
|
||||
Database,
|
||||
DropQueryBuilder,
|
||||
Selectable,
|
||||
Table,
|
||||
Query,
|
||||
QueryBuilder,
|
||||
)
|
||||
from pypika.terms import ArithmeticExpression, Criterion, EmptyCriterion, Field, Function, Star, Term, ValueWrapper
|
||||
from pypika.utils import QueryException, builder, format_quotes
|
||||
|
||||
|
||||
class SnowflakeQuery(Query):
|
||||
"""
|
||||
Defines a query class for use with Snowflake.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _builder(cls, **kwargs: Any) -> "SnowflakeQueryBuilder":
|
||||
return SnowflakeQueryBuilder(**kwargs)
|
||||
|
||||
@classmethod
|
||||
def create_table(cls, table: Union[str, Table]) -> "SnowflakeCreateQueryBuilder":
|
||||
return SnowflakeCreateQueryBuilder().create_table(table)
|
||||
|
||||
@classmethod
|
||||
def drop_table(cls, table: Union[str, Table]) -> "SnowflakeDropQueryBuilder":
|
||||
return SnowflakeDropQueryBuilder().drop_table(table)
|
||||
|
||||
|
||||
class SnowflakeQueryBuilder(QueryBuilder):
|
||||
QUOTE_CHAR = None
|
||||
ALIAS_QUOTE_CHAR = '"'
|
||||
QUERY_ALIAS_QUOTE_CHAR = ''
|
||||
QUERY_CLS = SnowflakeQuery
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
super().__init__(dialect=Dialects.SNOWFLAKE, **kwargs)
|
||||
|
||||
|
||||
class SnowflakeCreateQueryBuilder(CreateQueryBuilder):
|
||||
QUOTE_CHAR = None
|
||||
QUERY_CLS = SnowflakeQuery
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(dialect=Dialects.SNOWFLAKE)
|
||||
|
||||
|
||||
class SnowflakeDropQueryBuilder(DropQueryBuilder):
|
||||
QUOTE_CHAR = None
|
||||
QUERY_CLS = SnowflakeQuery
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(dialect=Dialects.SNOWFLAKE)
|
||||
|
||||
|
||||
class MySQLQuery(Query):
|
||||
"""
|
||||
Defines a query class for use with MySQL.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _builder(cls, **kwargs: Any) -> "MySQLQueryBuilder":
|
||||
return MySQLQueryBuilder(**kwargs)
|
||||
|
||||
@classmethod
|
||||
def load(cls, fp: str) -> "MySQLLoadQueryBuilder":
|
||||
return MySQLLoadQueryBuilder().load(fp)
|
||||
|
||||
@classmethod
|
||||
def create_table(cls, table: Union[str, Table]) -> "MySQLCreateQueryBuilder":
|
||||
return MySQLCreateQueryBuilder().create_table(table)
|
||||
|
||||
@classmethod
|
||||
def drop_table(cls, table: Union[str, Table]) -> "MySQLDropQueryBuilder":
|
||||
return MySQLDropQueryBuilder().drop_table(table)
|
||||
|
||||
|
||||
class MySQLQueryBuilder(QueryBuilder):
|
||||
QUOTE_CHAR = "`"
|
||||
QUERY_CLS = MySQLQuery
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
super().__init__(dialect=Dialects.MYSQL, wrap_set_operation_queries=False, **kwargs)
|
||||
self._duplicate_updates = []
|
||||
self._ignore_duplicates = False
|
||||
self._modifiers = []
|
||||
|
||||
self._for_update_nowait = False
|
||||
self._for_update_skip_locked = False
|
||||
self._for_update_of = set()
|
||||
|
||||
def __copy__(self) -> "MySQLQueryBuilder":
|
||||
newone = super().__copy__()
|
||||
newone._duplicate_updates = copy(self._duplicate_updates)
|
||||
newone._ignore_duplicates = copy(self._ignore_duplicates)
|
||||
return newone
|
||||
|
||||
@builder
|
||||
def for_update(
|
||||
self, nowait: bool = False, skip_locked: bool = False, of: TypedTuple[str, ...] = ()
|
||||
) -> "QueryBuilder":
|
||||
self._for_update = True
|
||||
self._for_update_skip_locked = skip_locked
|
||||
self._for_update_nowait = nowait
|
||||
self._for_update_of = set(of)
|
||||
|
||||
@builder
|
||||
def on_duplicate_key_update(self, field: Union[Field, str], value: Any) -> "MySQLQueryBuilder":
|
||||
if self._ignore_duplicates:
|
||||
raise QueryException("Can not have two conflict handlers")
|
||||
|
||||
field = Field(field) if not isinstance(field, Field) else field
|
||||
self._duplicate_updates.append((field, ValueWrapper(value)))
|
||||
|
||||
@builder
|
||||
def on_duplicate_key_ignore(self) -> "MySQLQueryBuilder":
|
||||
if self._duplicate_updates:
|
||||
raise QueryException("Can not have two conflict handlers")
|
||||
|
||||
self._ignore_duplicates = True
|
||||
|
||||
def get_sql(self, **kwargs: Any) -> str:
|
||||
self._set_kwargs_defaults(kwargs)
|
||||
querystring = super(MySQLQueryBuilder, self).get_sql(**kwargs)
|
||||
if querystring:
|
||||
if self._duplicate_updates:
|
||||
querystring += self._on_duplicate_key_update_sql(**kwargs)
|
||||
elif self._ignore_duplicates:
|
||||
querystring += self._on_duplicate_key_ignore_sql()
|
||||
return querystring
|
||||
|
||||
def _for_update_sql(self, **kwargs) -> str:
|
||||
if self._for_update:
|
||||
for_update = ' FOR UPDATE'
|
||||
if self._for_update_of:
|
||||
for_update += f' OF {", ".join([Table(item).get_sql(**kwargs) for item in self._for_update_of])}'
|
||||
if self._for_update_nowait:
|
||||
for_update += ' NOWAIT'
|
||||
elif self._for_update_skip_locked:
|
||||
for_update += ' SKIP LOCKED'
|
||||
else:
|
||||
for_update = ''
|
||||
|
||||
return for_update
|
||||
|
||||
def _on_duplicate_key_update_sql(self, **kwargs: Any) -> str:
|
||||
return " ON DUPLICATE KEY UPDATE {updates}".format(
|
||||
updates=",".join(
|
||||
"{field}={value}".format(field=field.get_sql(**kwargs), value=value.get_sql(**kwargs))
|
||||
for field, value in self._duplicate_updates
|
||||
)
|
||||
)
|
||||
|
||||
def _on_duplicate_key_ignore_sql(self) -> str:
|
||||
return " ON DUPLICATE KEY IGNORE"
|
||||
|
||||
@builder
|
||||
def modifier(self, value: str) -> "MySQLQueryBuilder":
|
||||
"""
|
||||
Adds a modifier such as SQL_CALC_FOUND_ROWS to the query.
|
||||
https://dev.mysql.com/doc/refman/5.7/en/select.html
|
||||
|
||||
:param value: The modifier value e.g. SQL_CALC_FOUND_ROWS
|
||||
"""
|
||||
self._modifiers.append(value)
|
||||
|
||||
def _select_sql(self, **kwargs: Any) -> str:
|
||||
"""
|
||||
Overridden function to generate the SELECT part of the SQL statement,
|
||||
with the addition of the a modifier if present.
|
||||
"""
|
||||
return "SELECT {distinct}{modifier}{select}".format(
|
||||
distinct="DISTINCT " if self._distinct else "",
|
||||
modifier="{} ".format(" ".join(self._modifiers)) if self._modifiers else "",
|
||||
select=",".join(term.get_sql(with_alias=True, subquery=True, **kwargs) for term in self._selects),
|
||||
)
|
||||
|
||||
|
||||
class MySQLLoadQueryBuilder:
|
||||
QUERY_CLS = MySQLQuery
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._load_file = None
|
||||
self._into_table = None
|
||||
|
||||
@builder
|
||||
def load(self, fp: str) -> "MySQLLoadQueryBuilder":
|
||||
self._load_file = fp
|
||||
|
||||
@builder
|
||||
def into(self, table: Union[str, Table]) -> "MySQLLoadQueryBuilder":
|
||||
self._into_table = table if isinstance(table, Table) else Table(table)
|
||||
|
||||
def get_sql(self, *args: Any, **kwargs: Any) -> str:
|
||||
querystring = ""
|
||||
if self._load_file and self._into_table:
|
||||
querystring += self._load_file_sql(**kwargs)
|
||||
querystring += self._into_table_sql(**kwargs)
|
||||
querystring += self._options_sql(**kwargs)
|
||||
|
||||
return querystring
|
||||
|
||||
def _load_file_sql(self, **kwargs: Any) -> str:
|
||||
return "LOAD DATA LOCAL INFILE '{}'".format(self._load_file)
|
||||
|
||||
def _into_table_sql(self, **kwargs: Any) -> str:
|
||||
return " INTO TABLE `{}`".format(self._into_table.get_sql(**kwargs))
|
||||
|
||||
def _options_sql(self, **kwargs: Any) -> str:
|
||||
return " FIELDS TERMINATED BY ','"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.get_sql()
|
||||
|
||||
|
||||
class MySQLCreateQueryBuilder(CreateQueryBuilder):
|
||||
QUOTE_CHAR = "`"
|
||||
|
||||
|
||||
class MySQLDropQueryBuilder(DropQueryBuilder):
|
||||
QUOTE_CHAR = "`"
|
||||
|
||||
|
||||
class VerticaQuery(Query):
|
||||
"""
|
||||
Defines a query class for use with Vertica.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _builder(cls, **kwargs) -> "VerticaQueryBuilder":
|
||||
return VerticaQueryBuilder(**kwargs)
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, fp: str) -> "VerticaCopyQueryBuilder":
|
||||
return VerticaCopyQueryBuilder().from_file(fp)
|
||||
|
||||
@classmethod
|
||||
def create_table(cls, table: Union[str, Table]) -> "VerticaCreateQueryBuilder":
|
||||
return VerticaCreateQueryBuilder().create_table(table)
|
||||
|
||||
|
||||
class VerticaQueryBuilder(QueryBuilder):
|
||||
QUERY_CLS = VerticaQuery
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
super().__init__(dialect=Dialects.VERTICA, **kwargs)
|
||||
self._hint = None
|
||||
|
||||
@builder
|
||||
def hint(self, label: str) -> "VerticaQueryBuilder":
|
||||
self._hint = label
|
||||
|
||||
def get_sql(self, *args: Any, **kwargs: Any) -> str:
|
||||
sql = super().get_sql(*args, **kwargs)
|
||||
|
||||
if self._hint is not None:
|
||||
sql = "".join([sql[:7], "/*+label({hint})*/".format(hint=self._hint), sql[6:]])
|
||||
|
||||
return sql
|
||||
|
||||
|
||||
class VerticaCreateQueryBuilder(CreateQueryBuilder):
|
||||
QUERY_CLS = VerticaQuery
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(dialect=Dialects.VERTICA)
|
||||
self._local = False
|
||||
self._preserve_rows = False
|
||||
|
||||
@builder
|
||||
def local(self) -> "VerticaCreateQueryBuilder":
|
||||
if not self._temporary:
|
||||
raise AttributeError("'Query' object has no attribute temporary")
|
||||
|
||||
self._local = True
|
||||
|
||||
@builder
|
||||
def preserve_rows(self) -> "VerticaCreateQueryBuilder":
|
||||
if not self._temporary:
|
||||
raise AttributeError("'Query' object has no attribute temporary")
|
||||
|
||||
self._preserve_rows = True
|
||||
|
||||
def _create_table_sql(self, **kwargs: Any) -> str:
|
||||
return "CREATE {local}{temporary}TABLE {table}".format(
|
||||
local="LOCAL " if self._local else "",
|
||||
temporary="TEMPORARY " if self._temporary else "",
|
||||
table=self._create_table.get_sql(**kwargs),
|
||||
)
|
||||
|
||||
def _table_options_sql(self, **kwargs) -> str:
|
||||
table_options = super()._table_options_sql(**kwargs)
|
||||
table_options += self._preserve_rows_sql()
|
||||
return table_options
|
||||
|
||||
def _as_select_sql(self, **kwargs: Any) -> str:
|
||||
return "{preserve_rows} AS ({query})".format(
|
||||
preserve_rows=self._preserve_rows_sql(),
|
||||
query=self._as_select.get_sql(**kwargs),
|
||||
)
|
||||
|
||||
def _preserve_rows_sql(self) -> str:
|
||||
return " ON COMMIT PRESERVE ROWS" if self._preserve_rows else ""
|
||||
|
||||
|
||||
class VerticaCopyQueryBuilder:
|
||||
QUERY_CLS = VerticaQuery
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._copy_table = None
|
||||
self._from_file = None
|
||||
|
||||
@builder
|
||||
def from_file(self, fp: str) -> "VerticaCopyQueryBuilder":
|
||||
self._from_file = fp
|
||||
|
||||
@builder
|
||||
def copy_(self, table: Union[str, Table]) -> "VerticaCopyQueryBuilder":
|
||||
self._copy_table = table if isinstance(table, Table) else Table(table)
|
||||
|
||||
def get_sql(self, *args: Any, **kwargs: Any) -> str:
|
||||
querystring = ""
|
||||
if self._copy_table and self._from_file:
|
||||
querystring += self._copy_table_sql(**kwargs)
|
||||
querystring += self._from_file_sql(**kwargs)
|
||||
querystring += self._options_sql(**kwargs)
|
||||
|
||||
return querystring
|
||||
|
||||
def _copy_table_sql(self, **kwargs: Any) -> str:
|
||||
return 'COPY "{}"'.format(self._copy_table.get_sql(**kwargs))
|
||||
|
||||
def _from_file_sql(self, **kwargs: Any) -> str:
|
||||
return " FROM LOCAL '{}'".format(self._from_file)
|
||||
|
||||
def _options_sql(self, **kwargs: Any) -> str:
|
||||
return " PARSER fcsvparser(header=false)"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.get_sql()
|
||||
|
||||
|
||||
class OracleQuery(Query):
|
||||
"""
|
||||
Defines a query class for use with Oracle.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _builder(cls, **kwargs: Any) -> "OracleQueryBuilder":
|
||||
return OracleQueryBuilder(**kwargs)
|
||||
|
||||
|
||||
class OracleQueryBuilder(QueryBuilder):
|
||||
QUOTE_CHAR = None
|
||||
QUERY_CLS = OracleQuery
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
super().__init__(dialect=Dialects.ORACLE, **kwargs)
|
||||
|
||||
def get_sql(self, *args: Any, **kwargs: Any) -> str:
|
||||
# Oracle does not support group by a field alias
|
||||
# Note: set directly in kwargs as they are re-used down the tree in the case of subqueries!
|
||||
kwargs['groupby_alias'] = False
|
||||
return super().get_sql(*args, **kwargs)
|
||||
|
||||
|
||||
class PostgreSQLQuery(Query):
|
||||
"""
|
||||
Defines a query class for use with PostgreSQL.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _builder(cls, **kwargs) -> "PostgreSQLQueryBuilder":
|
||||
return PostgreSQLQueryBuilder(**kwargs)
|
||||
|
||||
|
||||
class PostgreSQLQueryBuilder(QueryBuilder):
|
||||
ALIAS_QUOTE_CHAR = '"'
|
||||
QUERY_CLS = PostgreSQLQuery
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
super().__init__(dialect=Dialects.POSTGRESQL, **kwargs)
|
||||
self._returns = []
|
||||
self._return_star = False
|
||||
|
||||
self._on_conflict = False
|
||||
self._on_conflict_fields = []
|
||||
self._on_conflict_do_nothing = False
|
||||
self._on_conflict_do_updates = []
|
||||
self._on_conflict_wheres = None
|
||||
self._on_conflict_do_update_wheres = None
|
||||
|
||||
self._distinct_on = []
|
||||
|
||||
self._for_update_nowait = False
|
||||
self._for_update_skip_locked = False
|
||||
self._for_update_of = set()
|
||||
|
||||
def __copy__(self) -> "PostgreSQLQueryBuilder":
|
||||
newone = super().__copy__()
|
||||
newone._returns = copy(self._returns)
|
||||
newone._on_conflict_do_updates = copy(self._on_conflict_do_updates)
|
||||
return newone
|
||||
|
||||
@builder
|
||||
def distinct_on(self, *fields: Union[str, Term]) -> "PostgreSQLQueryBuilder":
|
||||
for field in fields:
|
||||
if isinstance(field, str):
|
||||
self._distinct_on.append(Field(field))
|
||||
elif isinstance(field, Term):
|
||||
self._distinct_on.append(field)
|
||||
|
||||
@builder
|
||||
def for_update(
|
||||
self, nowait: bool = False, skip_locked: bool = False, of: TypedTuple[str, ...] = ()
|
||||
) -> "QueryBuilder":
|
||||
self._for_update = True
|
||||
self._for_update_skip_locked = skip_locked
|
||||
self._for_update_nowait = nowait
|
||||
self._for_update_of = set(of)
|
||||
|
||||
@builder
|
||||
def on_conflict(self, *target_fields: Union[str, Term]) -> "PostgreSQLQueryBuilder":
|
||||
if not self._insert_table:
|
||||
raise QueryException("On conflict only applies to insert query")
|
||||
|
||||
self._on_conflict = True
|
||||
|
||||
for target_field in target_fields:
|
||||
if isinstance(target_field, str):
|
||||
self._on_conflict_fields.append(self._conflict_field_str(target_field))
|
||||
elif isinstance(target_field, Term):
|
||||
self._on_conflict_fields.append(target_field)
|
||||
|
||||
@builder
|
||||
def do_nothing(self) -> "PostgreSQLQueryBuilder":
|
||||
if len(self._on_conflict_do_updates) > 0:
|
||||
raise QueryException("Can not have two conflict handlers")
|
||||
self._on_conflict_do_nothing = True
|
||||
|
||||
@builder
|
||||
def do_update(
|
||||
self, update_field: Union[str, Field], update_value: Optional[Any] = None
|
||||
) -> "PostgreSQLQueryBuilder":
|
||||
if self._on_conflict_do_nothing:
|
||||
raise QueryException("Can not have two conflict handlers")
|
||||
|
||||
if isinstance(update_field, str):
|
||||
field = self._conflict_field_str(update_field)
|
||||
elif isinstance(update_field, Field):
|
||||
field = update_field
|
||||
else:
|
||||
raise QueryException("Unsupported update_field")
|
||||
|
||||
if update_value is not None:
|
||||
self._on_conflict_do_updates.append((field, ValueWrapper(update_value)))
|
||||
else:
|
||||
self._on_conflict_do_updates.append((field, None))
|
||||
|
||||
@builder
|
||||
def where(self, criterion: Criterion) -> "PostgreSQLQueryBuilder":
|
||||
if not self._on_conflict:
|
||||
return super().where(criterion)
|
||||
|
||||
if isinstance(criterion, EmptyCriterion):
|
||||
return
|
||||
|
||||
if self._on_conflict_do_nothing:
|
||||
raise QueryException('DO NOTHING doest not support WHERE')
|
||||
|
||||
if self._on_conflict_fields and self._on_conflict_do_updates:
|
||||
if self._on_conflict_do_update_wheres:
|
||||
self._on_conflict_do_update_wheres &= criterion
|
||||
else:
|
||||
self._on_conflict_do_update_wheres = criterion
|
||||
elif self._on_conflict_fields:
|
||||
if self._on_conflict_wheres:
|
||||
self._on_conflict_wheres &= criterion
|
||||
else:
|
||||
self._on_conflict_wheres = criterion
|
||||
else:
|
||||
raise QueryException('Can not have fieldless ON CONFLICT WHERE')
|
||||
|
||||
@builder
|
||||
def using(self, table: Union[Selectable, str]) -> "QueryBuilder":
|
||||
self._using.append(table)
|
||||
|
||||
def _distinct_sql(self, **kwargs: Any) -> str:
|
||||
if self._distinct_on:
|
||||
return "DISTINCT ON({distinct_on}) ".format(
|
||||
distinct_on=",".join(term.get_sql(with_alias=True, **kwargs) for term in self._distinct_on)
|
||||
)
|
||||
return super()._distinct_sql(**kwargs)
|
||||
|
||||
def _conflict_field_str(self, term: str) -> Optional[Field]:
|
||||
if self._insert_table:
|
||||
return Field(term, table=self._insert_table)
|
||||
|
||||
def _on_conflict_sql(self, **kwargs: Any) -> str:
|
||||
if not self._on_conflict_do_nothing and len(self._on_conflict_do_updates) == 0:
|
||||
if not self._on_conflict_fields:
|
||||
return ""
|
||||
raise QueryException("No handler defined for on conflict")
|
||||
|
||||
if self._on_conflict_do_updates and not self._on_conflict_fields:
|
||||
raise QueryException("Can not have fieldless on conflict do update")
|
||||
|
||||
conflict_query = " ON CONFLICT"
|
||||
if self._on_conflict_fields:
|
||||
fields = [f.get_sql(with_alias=True, **kwargs) for f in self._on_conflict_fields]
|
||||
conflict_query += " (" + ', '.join(fields) + ")"
|
||||
|
||||
if self._on_conflict_wheres:
|
||||
conflict_query += " WHERE {where}".format(where=self._on_conflict_wheres.get_sql(subquery=True, **kwargs))
|
||||
|
||||
return conflict_query
|
||||
|
||||
def _for_update_sql(self, **kwargs) -> str:
|
||||
if self._for_update:
|
||||
for_update = ' FOR UPDATE'
|
||||
if self._for_update_of:
|
||||
for_update += f' OF {", ".join([Table(item).get_sql(**kwargs) for item in self._for_update_of])}'
|
||||
if self._for_update_nowait:
|
||||
for_update += ' NOWAIT'
|
||||
elif self._for_update_skip_locked:
|
||||
for_update += ' SKIP LOCKED'
|
||||
else:
|
||||
for_update = ''
|
||||
|
||||
return for_update
|
||||
|
||||
def _on_conflict_action_sql(self, **kwargs: Any) -> str:
|
||||
if self._on_conflict_do_nothing:
|
||||
return " DO NOTHING"
|
||||
elif len(self._on_conflict_do_updates) > 0:
|
||||
updates = []
|
||||
for field, value in self._on_conflict_do_updates:
|
||||
if value:
|
||||
updates.append(
|
||||
"{field}={value}".format(
|
||||
field=field.get_sql(**kwargs),
|
||||
value=value.get_sql(with_namespace=True, **kwargs),
|
||||
)
|
||||
)
|
||||
else:
|
||||
updates.append(
|
||||
"{field}=EXCLUDED.{value}".format(
|
||||
field=field.get_sql(**kwargs),
|
||||
value=field.get_sql(**kwargs),
|
||||
)
|
||||
)
|
||||
action_sql = " DO UPDATE SET {updates}".format(updates=",".join(updates))
|
||||
|
||||
if self._on_conflict_do_update_wheres:
|
||||
action_sql += " WHERE {where}".format(
|
||||
where=self._on_conflict_do_update_wheres.get_sql(subquery=True, with_namespace=True, **kwargs)
|
||||
)
|
||||
return action_sql
|
||||
|
||||
return ''
|
||||
|
||||
@builder
|
||||
def returning(self, *terms: Any) -> "PostgreSQLQueryBuilder":
|
||||
for term in terms:
|
||||
if isinstance(term, Field):
|
||||
self._return_field(term)
|
||||
elif isinstance(term, str):
|
||||
self._return_field_str(term)
|
||||
elif isinstance(term, (Function, ArithmeticExpression)):
|
||||
if term.is_aggregate:
|
||||
raise QueryException("Aggregate functions are not allowed in returning")
|
||||
self._return_other(term)
|
||||
else:
|
||||
self._return_other(self.wrap_constant(term, self._wrapper_cls))
|
||||
|
||||
def _validate_returning_term(self, term: Term) -> None:
|
||||
for field in term.fields_():
|
||||
if not any([self._insert_table, self._update_table, self._delete_from]):
|
||||
raise QueryException("Returning can't be used in this query")
|
||||
|
||||
table_is_insert_or_update_table = field.table in {self._insert_table, self._update_table}
|
||||
join_tables = set(itertools.chain.from_iterable([j.criterion.tables_ for j in self._joins]))
|
||||
join_and_base_tables = set(self._from) | join_tables
|
||||
table_not_base_or_join = bool(term.tables_ - join_and_base_tables)
|
||||
if not table_is_insert_or_update_table and table_not_base_or_join:
|
||||
raise QueryException("You can't return from other tables")
|
||||
|
||||
def _set_returns_for_star(self) -> None:
|
||||
self._returns = [returning for returning in self._returns if not hasattr(returning, "table")]
|
||||
self._return_star = True
|
||||
|
||||
def _return_field(self, term: Union[str, Field]) -> None:
|
||||
if self._return_star:
|
||||
# Do not add select terms after a star is selected
|
||||
return
|
||||
|
||||
self._validate_returning_term(term)
|
||||
|
||||
if isinstance(term, Star):
|
||||
self._set_returns_for_star()
|
||||
|
||||
self._returns.append(term)
|
||||
|
||||
def _return_field_str(self, term: Union[str, Field]) -> None:
|
||||
if term == "*":
|
||||
self._set_returns_for_star()
|
||||
self._returns.append(Star())
|
||||
return
|
||||
|
||||
if self._insert_table:
|
||||
self._return_field(Field(term, table=self._insert_table))
|
||||
elif self._update_table:
|
||||
self._return_field(Field(term, table=self._update_table))
|
||||
elif self._delete_from:
|
||||
self._return_field(Field(term, table=self._from[0]))
|
||||
else:
|
||||
raise QueryException("Returning can't be used in this query")
|
||||
|
||||
def _return_other(self, function: Term) -> None:
|
||||
self._validate_returning_term(function)
|
||||
self._returns.append(function)
|
||||
|
||||
def _returning_sql(self, **kwargs: Any) -> str:
|
||||
return " RETURNING {returning}".format(
|
||||
returning=",".join(term.get_sql(with_alias=True, **kwargs) for term in self._returns),
|
||||
)
|
||||
|
||||
def get_sql(self, with_alias: bool = False, subquery: bool = False, **kwargs: Any) -> str:
|
||||
self._set_kwargs_defaults(kwargs)
|
||||
|
||||
querystring = super(PostgreSQLQueryBuilder, self).get_sql(with_alias, subquery, **kwargs)
|
||||
|
||||
querystring += self._on_conflict_sql(**kwargs)
|
||||
querystring += self._on_conflict_action_sql(**kwargs)
|
||||
|
||||
if self._returns:
|
||||
kwargs['with_namespace'] = self._update_table and self.from_
|
||||
querystring += self._returning_sql(**kwargs)
|
||||
return querystring
|
||||
|
||||
|
||||
class RedshiftQuery(Query):
|
||||
"""
|
||||
Defines a query class for use with Amazon Redshift.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _builder(cls, **kwargs: Any) -> "RedShiftQueryBuilder":
|
||||
return RedShiftQueryBuilder(dialect=Dialects.REDSHIFT, **kwargs)
|
||||
|
||||
|
||||
class RedShiftQueryBuilder(QueryBuilder):
|
||||
QUERY_CLS = RedshiftQuery
|
||||
|
||||
|
||||
class MSSQLQuery(Query):
|
||||
"""
|
||||
Defines a query class for use with Microsoft SQL Server.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _builder(cls, **kwargs: Any) -> "MSSQLQueryBuilder":
|
||||
return MSSQLQueryBuilder(**kwargs)
|
||||
|
||||
|
||||
class MSSQLQueryBuilder(QueryBuilder):
|
||||
QUERY_CLS = MSSQLQuery
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
super().__init__(dialect=Dialects.MSSQL, **kwargs)
|
||||
self._top: Union[int, None] = None
|
||||
self._top_with_ties: bool = False
|
||||
self._top_percent: bool = False
|
||||
|
||||
@builder
|
||||
def top(self, value: Union[str, int], percent: bool = False, with_ties: bool = False) -> "MSSQLQueryBuilder":
|
||||
"""
|
||||
Implements support for simple TOP clauses.
|
||||
https://docs.microsoft.com/en-us/sql/t-sql/queries/top-transact-sql?view=sql-server-2017
|
||||
"""
|
||||
try:
|
||||
self._top = int(value)
|
||||
except ValueError:
|
||||
raise QueryException("TOP value must be an integer")
|
||||
|
||||
if percent and not (0 <= int(value) <= 100):
|
||||
raise QueryException("TOP value must be between 0 and 100 when `percent`" " is specified")
|
||||
self._top_percent: bool = percent
|
||||
self._top_with_ties: bool = with_ties
|
||||
|
||||
@builder
|
||||
def fetch_next(self, limit: int) -> "MSSQLQueryBuilder":
|
||||
# Overridden to provide a more domain-specific API for T-SQL users
|
||||
self._limit = limit
|
||||
|
||||
def _offset_sql(self) -> str:
|
||||
return " OFFSET {offset} ROWS".format(offset=self._offset or 0)
|
||||
|
||||
def _limit_sql(self) -> str:
|
||||
return " FETCH NEXT {limit} ROWS ONLY".format(limit=self._limit)
|
||||
|
||||
def _apply_pagination(self, querystring: str) -> str:
|
||||
# Note: Overridden as MSSQL specifies offset before the fetch next limit
|
||||
if self._limit is not None or self._offset:
|
||||
# Offset has to be present if fetch next is specified in a MSSQL query
|
||||
querystring += self._offset_sql()
|
||||
|
||||
if self._limit is not None:
|
||||
querystring += self._limit_sql()
|
||||
|
||||
return querystring
|
||||
|
||||
def get_sql(self, *args: Any, **kwargs: Any) -> str:
|
||||
# MSSQL does not support group by a field alias.
|
||||
# Note: set directly in kwargs as they are re-used down the tree in the case of subqueries!
|
||||
kwargs['groupby_alias'] = False
|
||||
return super().get_sql(*args, **kwargs)
|
||||
|
||||
def _top_sql(self) -> str:
|
||||
_top_statement: str = ""
|
||||
if self._top:
|
||||
_top_statement = f"TOP ({self._top}) "
|
||||
if self._top_percent:
|
||||
_top_statement = f"{_top_statement}PERCENT "
|
||||
if self._top_with_ties:
|
||||
_top_statement = f"{_top_statement}WITH TIES "
|
||||
|
||||
return _top_statement
|
||||
|
||||
def _select_sql(self, **kwargs: Any) -> str:
|
||||
return "SELECT {distinct}{top}{select}".format(
|
||||
top=self._top_sql(),
|
||||
distinct="DISTINCT " if self._distinct else "",
|
||||
select=",".join(term.get_sql(with_alias=True, subquery=True, **kwargs) for term in self._selects),
|
||||
)
|
||||
|
||||
|
||||
class ClickHouseQuery(Query):
|
||||
"""
|
||||
Defines a query class for use with Yandex ClickHouse.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _builder(cls, **kwargs: Any) -> "ClickHouseQueryBuilder":
|
||||
return ClickHouseQueryBuilder(
|
||||
dialect=Dialects.CLICKHOUSE, wrap_set_operation_queries=False, as_keyword=True, **kwargs
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def drop_database(self, database: Union[Database, str]) -> "ClickHouseDropQueryBuilder":
|
||||
return ClickHouseDropQueryBuilder().drop_database(database)
|
||||
|
||||
@classmethod
|
||||
def drop_table(self, table: Union[Table, str]) -> "ClickHouseDropQueryBuilder":
|
||||
return ClickHouseDropQueryBuilder().drop_table(table)
|
||||
|
||||
@classmethod
|
||||
def drop_dictionary(self, dictionary: str) -> "ClickHouseDropQueryBuilder":
|
||||
return ClickHouseDropQueryBuilder().drop_dictionary(dictionary)
|
||||
|
||||
@classmethod
|
||||
def drop_quota(self, quota: str) -> "ClickHouseDropQueryBuilder":
|
||||
return ClickHouseDropQueryBuilder().drop_quota(quota)
|
||||
|
||||
@classmethod
|
||||
def drop_user(self, user: str) -> "ClickHouseDropQueryBuilder":
|
||||
return ClickHouseDropQueryBuilder().drop_user(user)
|
||||
|
||||
@classmethod
|
||||
def drop_view(self, view: str) -> "ClickHouseDropQueryBuilder":
|
||||
return ClickHouseDropQueryBuilder().drop_view(view)
|
||||
|
||||
|
||||
class ClickHouseQueryBuilder(QueryBuilder):
|
||||
QUERY_CLS = ClickHouseQuery
|
||||
|
||||
@staticmethod
|
||||
def _delete_sql(**kwargs: Any) -> str:
|
||||
return 'ALTER TABLE'
|
||||
|
||||
def _update_sql(self, **kwargs: Any) -> str:
|
||||
return "ALTER TABLE {table}".format(table=self._update_table.get_sql(**kwargs))
|
||||
|
||||
def _from_sql(self, with_namespace: bool = False, **kwargs: Any) -> str:
|
||||
selectable = ",".join(clause.get_sql(subquery=True, with_alias=True, **kwargs) for clause in self._from)
|
||||
if self._delete_from:
|
||||
return " {selectable} DELETE".format(selectable=selectable)
|
||||
return " FROM {selectable}".format(selectable=selectable)
|
||||
|
||||
def _set_sql(self, **kwargs: Any) -> str:
|
||||
return " UPDATE {set}".format(
|
||||
set=",".join(
|
||||
"{field}={value}".format(
|
||||
field=field.get_sql(**dict(kwargs, with_namespace=False)), value=value.get_sql(**kwargs)
|
||||
)
|
||||
for field, value in self._updates
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ClickHouseDropQueryBuilder(DropQueryBuilder):
|
||||
QUERY_CLS = ClickHouseQuery
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(dialect=Dialects.CLICKHOUSE)
|
||||
self._cluster_name = None
|
||||
|
||||
@builder
|
||||
def drop_dictionary(self, dictionary: str) -> "ClickHouseDropQueryBuilder":
|
||||
super()._set_target('DICTIONARY', dictionary)
|
||||
|
||||
@builder
|
||||
def drop_quota(self, quota: str) -> "ClickHouseDropQueryBuilder":
|
||||
super()._set_target('QUOTA', quota)
|
||||
|
||||
@builder
|
||||
def on_cluster(self, cluster: str) -> "ClickHouseDropQueryBuilder":
|
||||
if self._cluster_name:
|
||||
raise AttributeError("'DropQuery' object already has attribute cluster_name")
|
||||
self._cluster_name = cluster
|
||||
|
||||
def get_sql(self, **kwargs: Any) -> str:
|
||||
query = super().get_sql(**kwargs)
|
||||
|
||||
if self._drop_target_kind != "DICTIONARY" and self._cluster_name is not None:
|
||||
query += " ON CLUSTER " + format_quotes(self._cluster_name, super().QUOTE_CHAR)
|
||||
|
||||
return query
|
||||
|
||||
|
||||
class SQLLiteValueWrapper(ValueWrapper):
|
||||
def get_value_sql(self, **kwargs: Any) -> str:
|
||||
if isinstance(self.value, bool):
|
||||
return "1" if self.value else "0"
|
||||
return super().get_value_sql(**kwargs)
|
||||
|
||||
|
||||
class SQLLiteQuery(Query):
|
||||
"""
|
||||
Defines a query class for use with Microsoft SQL Server.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _builder(cls, **kwargs: Any) -> "SQLLiteQueryBuilder":
|
||||
return SQLLiteQueryBuilder(**kwargs)
|
||||
|
||||
|
||||
class SQLLiteQueryBuilder(QueryBuilder):
|
||||
QUERY_CLS = SQLLiteQuery
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
super().__init__(dialect=Dialects.SQLLITE, wrapper_cls=SQLLiteValueWrapper, **kwargs)
|
||||
self._insert_or_replace = False
|
||||
|
||||
@builder
|
||||
def insert_or_replace(self, *terms: Any) -> "SQLLiteQueryBuilder":
|
||||
self._apply_terms(*terms)
|
||||
self._replace = True
|
||||
self._insert_or_replace = True
|
||||
|
||||
def _replace_sql(self, **kwargs: Any) -> str:
|
||||
prefix = "INSERT OR " if self._insert_or_replace else ""
|
||||
return prefix + super()._replace_sql(**kwargs)
|
||||
@@ -0,0 +1,157 @@
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
__author__ = "Timothy Heys"
|
||||
__email__ = "theys@kayak.com"
|
||||
|
||||
|
||||
class Arithmetic(Enum):
|
||||
add = "+"
|
||||
sub = "-"
|
||||
mul = "*"
|
||||
div = "/"
|
||||
lshift = "<<"
|
||||
rshift = ">>"
|
||||
|
||||
|
||||
class Comparator(Enum):
|
||||
pass
|
||||
|
||||
|
||||
class Equality(Comparator):
|
||||
eq = "="
|
||||
ne = "<>"
|
||||
gt = ">"
|
||||
gte = ">="
|
||||
lt = "<"
|
||||
lte = "<="
|
||||
|
||||
|
||||
class Matching(Comparator):
|
||||
not_like = " NOT LIKE "
|
||||
like = " LIKE "
|
||||
not_ilike = " NOT ILIKE "
|
||||
ilike = " ILIKE "
|
||||
rlike = " RLIKE "
|
||||
regex = " REGEX "
|
||||
regexp = " REGEXP "
|
||||
bin_regex = " REGEX BINARY "
|
||||
as_of = " AS OF "
|
||||
glob = " GLOB "
|
||||
|
||||
|
||||
class Boolean(Comparator):
|
||||
and_ = "AND"
|
||||
or_ = "OR"
|
||||
xor_ = "XOR"
|
||||
true = "TRUE"
|
||||
false = "FALSE"
|
||||
|
||||
|
||||
class Order(Enum):
|
||||
asc = "ASC"
|
||||
desc = "DESC"
|
||||
|
||||
|
||||
class JoinType(Enum):
|
||||
inner = ""
|
||||
left = "LEFT"
|
||||
right = "RIGHT"
|
||||
outer = "FULL OUTER"
|
||||
left_outer = "LEFT OUTER"
|
||||
right_outer = "RIGHT OUTER"
|
||||
full_outer = "FULL OUTER"
|
||||
cross = "CROSS"
|
||||
hash = "HASH"
|
||||
|
||||
|
||||
class ReferenceOption(Enum):
|
||||
cascade = "CASCADE"
|
||||
no_action = "NO ACTION"
|
||||
restrict = "RESTRICT"
|
||||
set_null = "SET NULL"
|
||||
set_default = "SET DEFAULT"
|
||||
|
||||
|
||||
class SetOperation(Enum):
|
||||
union = "UNION"
|
||||
union_all = "UNION ALL"
|
||||
intersect = "INTERSECT"
|
||||
except_of = "EXCEPT"
|
||||
minus = "MINUS"
|
||||
|
||||
|
||||
class DatePart(Enum):
|
||||
year = "YEAR"
|
||||
quarter = "QUARTER"
|
||||
month = "MONTH"
|
||||
week = "WEEK"
|
||||
day = "DAY"
|
||||
hour = "HOUR"
|
||||
minute = "MINUTE"
|
||||
second = "SECOND"
|
||||
microsecond = "MICROSECOND"
|
||||
|
||||
|
||||
class SqlType:
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
|
||||
def __call__(self, length: int) -> "SqlTypeLength":
|
||||
return SqlTypeLength(self.name, length)
|
||||
|
||||
def get_sql(self, **kwargs: Any) -> str:
|
||||
return "{name}".format(name=self.name)
|
||||
|
||||
|
||||
class SqlTypeLength:
|
||||
def __init__(self, name: str, length: int) -> None:
|
||||
self.name = name
|
||||
self.length = length
|
||||
|
||||
def get_sql(self, **kwargs: Any) -> str:
|
||||
return "{name}({length})".format(name=self.name, length=self.length)
|
||||
|
||||
|
||||
class SqlTypes:
|
||||
BOOLEAN = "BOOLEAN"
|
||||
INTEGER = "INTEGER"
|
||||
FLOAT = "FLOAT"
|
||||
NUMERIC = "NUMERIC"
|
||||
SIGNED = "SIGNED"
|
||||
UNSIGNED = "UNSIGNED"
|
||||
|
||||
DATE = "DATE"
|
||||
TIME = "TIME"
|
||||
TIMESTAMP = "TIMESTAMP"
|
||||
|
||||
CHAR = SqlType("CHAR")
|
||||
VARCHAR = SqlType("VARCHAR")
|
||||
LONG_VARCHAR = SqlType("LONG VARCHAR")
|
||||
BINARY = SqlType("BINARY")
|
||||
VARBINARY = SqlType("VARBINARY")
|
||||
LONG_VARBINARY = SqlType("LONG VARBINARY")
|
||||
|
||||
|
||||
class Dialects(Enum):
|
||||
VERTICA = "vertica"
|
||||
CLICKHOUSE = "clickhouse"
|
||||
ORACLE = "oracle"
|
||||
MSSQL = "mssql"
|
||||
MYSQL = "mysql"
|
||||
POSTGRESQL = "postgressql"
|
||||
REDSHIFT = "redshift"
|
||||
SQLLITE = "sqllite"
|
||||
SNOWFLAKE = "snowflake"
|
||||
|
||||
|
||||
class JSONOperators(Enum):
|
||||
HAS_KEY = "?"
|
||||
CONTAINS = "@>"
|
||||
CONTAINED_BY = "<@"
|
||||
HAS_KEYS = "?&"
|
||||
HAS_ANY_KEYS = "?|"
|
||||
GET_JSON_VALUE = "->"
|
||||
GET_TEXT_VALUE = "->>"
|
||||
GET_PATH_JSON_VALUE = "#>"
|
||||
GET_PATH_TEXT_VALUE = "#>>"
|
||||
@@ -0,0 +1,316 @@
|
||||
"""
|
||||
Package for SQL functions wrappers
|
||||
"""
|
||||
from pypika.enums import SqlTypes
|
||||
from pypika.terms import (
|
||||
AggregateFunction,
|
||||
Function,
|
||||
LiteralValue,
|
||||
Star,
|
||||
)
|
||||
from pypika.utils import builder
|
||||
|
||||
__author__ = "Timothy Heys"
|
||||
__email__ = "theys@kayak.com"
|
||||
|
||||
|
||||
class DistinctOptionFunction(AggregateFunction):
|
||||
def __init__(self, name, *args, **kwargs):
|
||||
alias = kwargs.get("alias")
|
||||
super(DistinctOptionFunction, self).__init__(name, *args, alias=alias)
|
||||
self._distinct = False
|
||||
|
||||
def get_function_sql(self, **kwargs):
|
||||
s = super(DistinctOptionFunction, self).get_function_sql(**kwargs)
|
||||
|
||||
n = len(self.name) + 1
|
||||
if self._distinct:
|
||||
return s[:n] + "DISTINCT " + s[n:]
|
||||
return s
|
||||
|
||||
@builder
|
||||
def distinct(self):
|
||||
self._distinct = True
|
||||
|
||||
|
||||
class Count(DistinctOptionFunction):
|
||||
def __init__(self, param, alias=None):
|
||||
is_star = isinstance(param, str) and "*" == param
|
||||
super(Count, self).__init__("COUNT", Star() if is_star else param, alias=alias)
|
||||
|
||||
|
||||
# Arithmetic Functions
|
||||
class Sum(DistinctOptionFunction):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Sum, self).__init__("SUM", term, alias=alias)
|
||||
|
||||
|
||||
class Avg(AggregateFunction):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Avg, self).__init__("AVG", term, alias=alias)
|
||||
|
||||
|
||||
class Min(AggregateFunction):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Min, self).__init__("MIN", term, alias=alias)
|
||||
|
||||
|
||||
class Max(AggregateFunction):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Max, self).__init__("MAX", term, alias=alias)
|
||||
|
||||
|
||||
class Std(AggregateFunction):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Std, self).__init__("STD", term, alias=alias)
|
||||
|
||||
|
||||
class StdDev(AggregateFunction):
|
||||
def __init__(self, term, alias=None):
|
||||
super(StdDev, self).__init__("STDDEV", term, alias=alias)
|
||||
|
||||
|
||||
class Abs(AggregateFunction):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Abs, self).__init__("ABS", term, alias=alias)
|
||||
|
||||
|
||||
class First(AggregateFunction):
|
||||
def __init__(self, term, alias=None):
|
||||
super(First, self).__init__("FIRST", term, alias=alias)
|
||||
|
||||
|
||||
class Last(AggregateFunction):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Last, self).__init__("LAST", term, alias=alias)
|
||||
|
||||
|
||||
class Sqrt(Function):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Sqrt, self).__init__("SQRT", term, alias=alias)
|
||||
|
||||
|
||||
class Floor(Function):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Floor, self).__init__("FLOOR", term, alias=alias)
|
||||
|
||||
|
||||
class ApproximatePercentile(AggregateFunction):
|
||||
def __init__(self, term, percentile, alias=None):
|
||||
super(ApproximatePercentile, self).__init__("APPROXIMATE_PERCENTILE", term, alias=alias)
|
||||
self.percentile = float(percentile)
|
||||
|
||||
def get_special_params_sql(self, **kwargs):
|
||||
return "USING PARAMETERS percentile={percentile}".format(percentile=self.percentile)
|
||||
|
||||
|
||||
# Type Functions
|
||||
class Cast(Function):
|
||||
def __init__(self, term, as_type, alias=None):
|
||||
super(Cast, self).__init__("CAST", term, alias=alias)
|
||||
self.as_type = as_type
|
||||
|
||||
def get_special_params_sql(self, **kwargs):
|
||||
type_sql = self.as_type.get_sql(**kwargs) if hasattr(self.as_type, "get_sql") else str(self.as_type).upper()
|
||||
|
||||
return "AS {type}".format(type=type_sql)
|
||||
|
||||
|
||||
class Convert(Function):
|
||||
def __init__(self, term, encoding, alias=None):
|
||||
super(Convert, self).__init__("CONVERT", term, alias=alias)
|
||||
self.encoding = encoding
|
||||
|
||||
def get_special_params_sql(self, **kwargs):
|
||||
return "USING {type}".format(type=self.encoding.value)
|
||||
|
||||
|
||||
class ToChar(Function):
|
||||
def __init__(self, term, as_type, alias=None):
|
||||
super(ToChar, self).__init__("TO_CHAR", term, as_type, alias=alias)
|
||||
|
||||
|
||||
class Signed(Cast):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Signed, self).__init__(term, SqlTypes.SIGNED, alias=alias)
|
||||
|
||||
|
||||
class Unsigned(Cast):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Unsigned, self).__init__(term, SqlTypes.UNSIGNED, alias=alias)
|
||||
|
||||
|
||||
class Date(Function):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Date, self).__init__("DATE", term, alias=alias)
|
||||
|
||||
|
||||
class DateDiff(Function):
|
||||
def __init__(self, interval, start_date, end_date, alias=None):
|
||||
super(DateDiff, self).__init__("DATEDIFF", interval, start_date, end_date, alias=alias)
|
||||
|
||||
|
||||
class TimeDiff(Function):
|
||||
def __init__(self, start_time, end_time, alias=None):
|
||||
super(TimeDiff, self).__init__("TIMEDIFF", start_time, end_time, alias=alias)
|
||||
|
||||
|
||||
class DateAdd(Function):
|
||||
def __init__(self, date_part, interval, term, alias=None):
|
||||
date_part = getattr(date_part, "value", date_part)
|
||||
super(DateAdd, self).__init__("DATE_ADD", LiteralValue(date_part), interval, term, alias=alias)
|
||||
|
||||
|
||||
class ToDate(Function):
|
||||
def __init__(self, value, format_mask, alias=None):
|
||||
super(ToDate, self).__init__("TO_DATE", value, format_mask, alias=alias)
|
||||
|
||||
|
||||
class Timestamp(Function):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Timestamp, self).__init__("TIMESTAMP", term, alias=alias)
|
||||
|
||||
|
||||
class TimestampAdd(Function):
|
||||
def __init__(self, date_part, interval, term, alias=None):
|
||||
date_part = getattr(date_part, 'value', date_part)
|
||||
super(TimestampAdd, self).__init__("TIMESTAMPADD", LiteralValue(date_part), interval, term, alias=alias)
|
||||
|
||||
|
||||
# String Functions
|
||||
class Ascii(Function):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Ascii, self).__init__("ASCII", term, alias=alias)
|
||||
|
||||
|
||||
class NullIf(Function):
|
||||
def __init__(self, term, condition, **kwargs):
|
||||
super(NullIf, self).__init__("NULLIF", term, condition, **kwargs)
|
||||
|
||||
|
||||
class Bin(Function):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Bin, self).__init__("BIN", term, alias=alias)
|
||||
|
||||
|
||||
class Concat(Function):
|
||||
def __init__(self, *terms, **kwargs):
|
||||
super(Concat, self).__init__("CONCAT", *terms, **kwargs)
|
||||
|
||||
|
||||
class Insert(Function):
|
||||
def __init__(self, term, start, stop, subterm, alias=None):
|
||||
term, start, stop, subterm = [term for term in [term, start, stop, subterm]]
|
||||
super(Insert, self).__init__("INSERT", term, start, stop, subterm, alias=alias)
|
||||
|
||||
|
||||
class Length(Function):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Length, self).__init__("LENGTH", term, alias=alias)
|
||||
|
||||
|
||||
class Upper(Function):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Upper, self).__init__("UPPER", term, alias=alias)
|
||||
|
||||
|
||||
class Lower(Function):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Lower, self).__init__("LOWER", term, alias=alias)
|
||||
|
||||
|
||||
class Substring(Function):
|
||||
def __init__(self, term, start, stop, alias=None):
|
||||
super(Substring, self).__init__("SUBSTRING", term, start, stop, alias=alias)
|
||||
|
||||
|
||||
class Reverse(Function):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Reverse, self).__init__("REVERSE", term, alias=alias)
|
||||
|
||||
|
||||
class Trim(Function):
|
||||
def __init__(self, term, alias=None):
|
||||
super(Trim, self).__init__("TRIM", term, alias=alias)
|
||||
|
||||
|
||||
class SplitPart(Function):
|
||||
def __init__(self, term, delimiter, index, alias=None):
|
||||
super(SplitPart, self).__init__("SPLIT_PART", term, delimiter, index, alias=alias)
|
||||
|
||||
|
||||
class RegexpMatches(Function):
|
||||
def __init__(self, term, pattern, modifiers=None, alias=None):
|
||||
super(RegexpMatches, self).__init__("REGEXP_MATCHES", term, pattern, modifiers, alias=alias)
|
||||
|
||||
|
||||
class RegexpLike(Function):
|
||||
def __init__(self, term, pattern, modifiers=None, alias=None):
|
||||
super(RegexpLike, self).__init__("REGEXP_LIKE", term, pattern, modifiers, alias=alias)
|
||||
|
||||
|
||||
class Replace(Function):
|
||||
def __init__(self, term, find_string, replace_with, alias=None):
|
||||
super(Replace, self).__init__("REPLACE", term, find_string, replace_with, alias=alias)
|
||||
|
||||
|
||||
# Date/Time Functions
|
||||
class Now(Function):
|
||||
def __init__(self, alias=None):
|
||||
super(Now, self).__init__("NOW", alias=alias)
|
||||
|
||||
|
||||
class UtcTimestamp(Function):
|
||||
def __init__(self, alias=None):
|
||||
super(UtcTimestamp, self).__init__("UTC_TIMESTAMP", alias=alias)
|
||||
|
||||
|
||||
class CurTimestamp(Function):
|
||||
def __init__(self, alias=None):
|
||||
super(CurTimestamp, self).__init__("CURRENT_TIMESTAMP", alias=alias)
|
||||
|
||||
def get_function_sql(self, **kwargs):
|
||||
# CURRENT_TIMESTAMP takes no arguments, so the SQL to generate is quite
|
||||
# simple. Note that empty parentheses have been omitted intentionally.
|
||||
return "CURRENT_TIMESTAMP"
|
||||
|
||||
|
||||
class CurDate(Function):
|
||||
def __init__(self, alias=None):
|
||||
super(CurDate, self).__init__("CURRENT_DATE", alias=alias)
|
||||
|
||||
|
||||
class CurTime(Function):
|
||||
def __init__(self, alias=None):
|
||||
super(CurTime, self).__init__("CURRENT_TIME", alias=alias)
|
||||
|
||||
|
||||
class Extract(Function):
|
||||
def __init__(self, date_part, field, alias=None):
|
||||
date_part = getattr(date_part, "value", date_part)
|
||||
super(Extract, self).__init__("EXTRACT", LiteralValue(date_part), alias=alias)
|
||||
self.field = field
|
||||
|
||||
def get_special_params_sql(self, **kwargs):
|
||||
return "FROM {field}".format(field=self.field.get_sql(**kwargs))
|
||||
|
||||
|
||||
# Null Functions
|
||||
class IsNull(Function):
|
||||
def __init__(self, term, alias=None):
|
||||
super(IsNull, self).__init__("ISNULL", term, alias=alias)
|
||||
|
||||
|
||||
class Coalesce(Function):
|
||||
def __init__(self, term, *default_values, **kwargs):
|
||||
super(Coalesce, self).__init__("COALESCE", term, *default_values, **kwargs)
|
||||
|
||||
|
||||
class IfNull(Function):
|
||||
def __init__(self, condition, term, **kwargs):
|
||||
super(IfNull, self).__init__("IFNULL", condition, term, **kwargs)
|
||||
|
||||
|
||||
class NVL(Function):
|
||||
def __init__(self, condition, term, alias=None):
|
||||
super(NVL, self).__init__("NVL", condition, term, alias=alias)
|
||||
@@ -0,0 +1,8 @@
|
||||
from .terms import PseudoColumn
|
||||
|
||||
ColumnValue = PseudoColumn("COLUMN_VALUE")
|
||||
ObjectID = PseudoColumn("OBJECT_ID")
|
||||
ObjectValue = PseudoColumn("OBJECT_VALUE")
|
||||
RowNum = PseudoColumn("ROWNUM")
|
||||
RowID = PseudoColumn("ROWID")
|
||||
SysDate = PseudoColumn("SYSDATE")
|
||||
2115
backend_service/venv/lib/python3.13/site-packages/pypika/queries.py
Normal file
2115
backend_service/venv/lib/python3.13/site-packages/pypika/queries.py
Normal file
File diff suppressed because it is too large
Load Diff
1655
backend_service/venv/lib/python3.13/site-packages/pypika/terms.py
Normal file
1655
backend_service/venv/lib/python3.13/site-packages/pypika/terms.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,125 @@
|
||||
from typing import Any, Callable, List, Optional, Type
|
||||
|
||||
__author__ = "Timothy Heys"
|
||||
__email__ = "theys@kayak.com"
|
||||
|
||||
|
||||
class QueryException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class GroupingException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class CaseException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class JoinException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class SetOperationException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class RollupException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DialectNotSupported(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class FunctionException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def builder(func: Callable) -> Callable:
|
||||
"""
|
||||
Decorator for wrapper "builder" functions. These are functions on the Query class or other classes used for
|
||||
building queries which mutate the query and return self. To make the build functions immutable, this decorator is
|
||||
used which will deepcopy the current instance. This decorator will return the return value of the inner function
|
||||
or the new copy of the instance. The inner function does not need to return self.
|
||||
"""
|
||||
import copy
|
||||
|
||||
def _copy(self, *args, **kwargs):
|
||||
self_copy = copy.copy(self) if getattr(self, "immutable", True) else self
|
||||
result = func(self_copy, *args, **kwargs)
|
||||
|
||||
# Return self if the inner function returns None. This way the inner function can return something
|
||||
# different (for example when creating joins, a different builder is returned).
|
||||
if result is None:
|
||||
return self_copy
|
||||
|
||||
return result
|
||||
|
||||
return _copy
|
||||
|
||||
|
||||
def ignore_copy(func: Callable) -> Callable:
|
||||
"""
|
||||
Decorator for wrapping the __getattr__ function for classes that are copied via deepcopy. This prevents infinite
|
||||
recursion caused by deepcopy looking for magic functions in the class. Any class implementing __getattr__ that is
|
||||
meant to be deepcopy'd should use this decorator.
|
||||
|
||||
deepcopy is used by pypika in builder functions (decorated by @builder) to make the results immutable. Any data
|
||||
model type class (stored in the Query instance) is copied.
|
||||
"""
|
||||
|
||||
def _getattr(self, name):
|
||||
if name in [
|
||||
"__copy__",
|
||||
"__deepcopy__",
|
||||
"__getstate__",
|
||||
"__setstate__",
|
||||
"__getnewargs__",
|
||||
]:
|
||||
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, name))
|
||||
|
||||
return func(self, name)
|
||||
|
||||
return _getattr
|
||||
|
||||
|
||||
def resolve_is_aggregate(values: List[Optional[bool]]) -> Optional[bool]:
|
||||
"""
|
||||
Resolves the is_aggregate flag for an expression that contains multiple terms. This works like a voter system,
|
||||
each term votes True or False or abstains with None.
|
||||
|
||||
:param values: A list of booleans (or None) for each term in the expression
|
||||
:return: If all values are True or None, True is returned. If all values are None, None is returned. Otherwise,
|
||||
False is returned.
|
||||
"""
|
||||
result = [x for x in values if x is not None]
|
||||
if result:
|
||||
return all(result)
|
||||
return None
|
||||
|
||||
|
||||
def format_quotes(value: Any, quote_char: Optional[str]) -> str:
|
||||
return "{quote}{value}{quote}".format(value=value, quote=quote_char or "")
|
||||
|
||||
|
||||
def format_alias_sql(
|
||||
sql: str,
|
||||
alias: Optional[str],
|
||||
quote_char: Optional[str] = None,
|
||||
alias_quote_char: Optional[str] = None,
|
||||
as_keyword: bool = False,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
if alias is None:
|
||||
return sql
|
||||
return "{sql}{_as}{alias}".format(
|
||||
sql=sql, _as=' AS ' if as_keyword else ' ', alias=format_quotes(alias, alias_quote_char or quote_char)
|
||||
)
|
||||
|
||||
|
||||
def validate(*args: Any, exc: Optional[Exception] = None, type: Optional[Type] = None) -> None:
|
||||
if type is not None:
|
||||
for arg in args:
|
||||
if not isinstance(arg, type):
|
||||
raise exc
|
||||
Reference in New Issue
Block a user