From 014a366849a55cef5d181bdd638d491c2dd8bbbc Mon Sep 17 00:00:00 2001 From: rodp63 Date: Wed, 21 May 2025 16:36:01 -0500 Subject: [PATCH 1/7] QDB-16709 - Add pybind11-stubgen support --- .gitignore | 3 +++ dev-requirements.txt | 4 ++++ scripts/teamcity/10.build.sh | 16 ++++++++++++---- 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index e574aa1b..2bbb48c2 100644 --- a/.gitignore +++ b/.gitignore @@ -127,6 +127,9 @@ venv # mypy .mypy_cache/ +# Stubs +*.pyi + # Visual Studio Code .vscode/* !.vscode/settings.json diff --git a/dev-requirements.txt b/dev-requirements.txt index 801c0f52..82b45f33 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -37,3 +37,7 @@ setuptools-git == 1.2 # Linting black + +# Stubs +mypy +pybind11-stubgen diff --git a/scripts/teamcity/10.build.sh b/scripts/teamcity/10.build.sh index 042ca998..9de5e792 100755 --- a/scripts/teamcity/10.build.sh +++ b/scripts/teamcity/10.build.sh @@ -33,9 +33,7 @@ function relabel_wheel { fi } -DIST_DIR=dist - -rm -r -f build/ ${DIST_DIR}/ +rm -r -f build/ dist/ if [[ "$OSTYPE" == "darwin"* && $PYTHON == "python3.9"* ]]; then ${VENV_PYTHON} -m pip install --upgrade setuptools==63.0.0b1 wheel @@ -50,6 +48,16 @@ export QDB_TESTS_ENABLED=OFF ${VENV_PYTHON} -m build -w -for whl in ${DIST_DIR}/*.whl; do + +# # Build the stubs and rebuild the api +cd dist +${VENV_PYTHON} -m pip install *.whl +pybind11-stubgen quasardb -o ../ # stubgen (from mypy) is not that good +cd ../ + +${VENV_PYTHON} -m build -w + +## Rename the build +for whl in dist/*.whl; do relabel_wheel "$whl" done From 6ec2a767b78a00757c5978faec139abf3b4dde2a Mon Sep 17 00:00:00 2001 From: rodp63 Date: Fri, 23 May 2025 15:33:55 -0500 Subject: [PATCH 2/7] QDB-16709 - Add basic pyi files --- .gitignore | 6 +- quasardb/__init__.pyi | 128 +++++ quasardb/all.pyi | 778 ++++++++++++++++++++++++++ quasardb/batch_inserter.hpp | 23 +- quasardb/error.hpp | 16 + quasardb/extensions/__init__.pyi | 9 + quasardb/extensions/writer.pyi | 30 + quasardb/metrics.pyi | 24 + quasardb/quasardb/__init__.pyi | 4 + quasardb/quasardb/_batch_column.pyi | 5 + quasardb/quasardb/_batch_inserter.pyi | 30 + quasardb/table.hpp | 113 ++-- scripts/teamcity/10.build.sh | 10 - setup.py | 9 +- 14 files changed, 1102 insertions(+), 83 deletions(-) create mode 100644 quasardb/__init__.pyi create mode 100644 quasardb/all.pyi create mode 100644 quasardb/extensions/__init__.pyi create mode 100644 quasardb/extensions/writer.pyi create mode 100644 quasardb/metrics.pyi create mode 100644 quasardb/quasardb/__init__.pyi create mode 100644 quasardb/quasardb/_batch_column.pyi create mode 100644 quasardb/quasardb/_batch_inserter.pyi diff --git a/.gitignore b/.gitignore index 2bbb48c2..1804c981 100644 --- a/.gitignore +++ b/.gitignore @@ -127,9 +127,6 @@ venv # mypy .mypy_cache/ -# Stubs -*.pyi - # Visual Studio Code .vscode/* !.vscode/settings.json @@ -141,6 +138,9 @@ venv # PyCharm .idea +# MacOS +.DS_Store + # Test specific cluster_private.key cluster_public.key diff --git a/quasardb/__init__.pyi b/quasardb/__init__.pyi new file mode 100644 index 00000000..1456d079 --- /dev/null +++ b/quasardb/__init__.pyi @@ -0,0 +1,128 @@ +""" + +.. module: quasardb + :platform: Unix, Windows + :synopsis: quasardb official Python API + +.. moduleauthor: quasardb SAS. All rights reserved +""" + +from __future__ import annotations + +import datetime + +from quasardb.quasardb import BatchColumnInfo + +# from quasardb.quasardb import ( +# AliasAlreadyExistsError, +# AliasNotFoundError, +# AsyncPipelineFullError, +# Blob, +# Cluster, +# ColumnInfo, +# ColumnType, +# DirectBlob, +# DirectInteger, +# Double, +# Entry, +# Error, +# ExpirableEntry, +# FindQuery, +# IncompatibleTypeError, +# IndexedColumnInfo, +# InputBufferTooSmallError, +# Integer, +# InternalLocalError, +# InvalidArgumentError, +# InvalidDatetimeError, +# InvalidHandleError, +# InvalidQueryError, +# MaskedArray, +# Node, +# NotImplementedError, +# Options, +# OutOfBoundsError, +# Perf, +# Properties, +# QueryContinuous, +# Reader, +# RetryOptions, +# String, +# Table, +# Tag, +# Timestamp, +# TryAgainError, +# UninitializedError, +# Writer, +# WriterData, +# WriterPushMode, +# build, +# dict_query, +# metrics, +# version, +# ) + +__all__ = [ + # "AliasAlreadyExistsError", + # "AliasNotFoundError", + # "AsyncPipelineFullError", + "BatchColumnInfo", + # "Blob", + # "Cluster", + # "ColumnInfo", + # "ColumnType", + # "DirectBlob", + # "DirectInteger", + # "Double", + # "Entry", + # "Error", + # "ExpirableEntry", + # "FindQuery", + # "IncompatibleTypeError", + # "IndexedColumnInfo", + # "InputBufferTooSmallError", + # "Integer", + # "InternalLocalError", + # "InvalidArgumentError", + # "InvalidDatetimeError", + # "InvalidHandleError", + # "InvalidQueryError", + # "MaskedArray", + # "Node", + # "NotImplementedError", + # "Options", + # "OutOfBoundsError", + # "Perf", + # "Properties", + # "QueryContinuous", + # "Reader", + # "RetryOptions", + # "String", + # "Table", + # "Tag", + # "Timestamp", + # "TryAgainError", + # "UninitializedError", + # "Writer", + # "WriterData", + # "WriterPushMode", + # "build", + # "dict_query", + # "extend_module", + # "extensions", + # "generic_error_msg", + # "glibc_error_msg", + # "link_error_msg", + # "metrics", + # "never_expires", + # "quasardb", + # "unknown_error_msg", + # "version", +] + +def generic_error_msg(msg, e=None): ... +def glibc_error_msg(e): ... +def link_error_msg(e): ... +def unknown_error_msg(): ... + +never_expires: datetime.datetime # value = datetime.datetime(1969, 12, 31, 19, 0) diff --git a/quasardb/all.pyi b/quasardb/all.pyi new file mode 100644 index 00000000..bbaa1bfb --- /dev/null +++ b/quasardb/all.pyi @@ -0,0 +1,778 @@ +""" +QuasarDB Official Python API +""" + +from __future__ import annotations + +import datetime +import typing + +import numpy +import numpy.ma + +from . import metrics + +__all__ = [ + "AliasAlreadyExistsError", + "AliasNotFoundError", + "AsyncPipelineFullError", + "Blob", + "Cluster", + "ColumnInfo", + "ColumnType", + "DirectBlob", + "DirectInteger", + "Double", + "Entry", + "Error", + "ExpirableEntry", + "FindQuery", + "IncompatibleTypeError", + "IndexedColumnInfo", + "InputBufferTooSmallError", + "Integer", + "InternalLocalError", + "InvalidArgumentError", + "InvalidDatetimeError", + "InvalidHandleError", + "InvalidQueryError", + "MaskedArray", + "Node", + "NotImplementedError", + "Options", + "OutOfBoundsError", + "Perf", + "Properties", + "QueryContinuous", + "Reader", + "RetryOptions", + "String", + "Table", + "Tag", + "Timestamp", + "TryAgainError", + "UninitializedError", + "Writer", + "WriterData", + "WriterPushMode", + "build", + "dict_query", + "metrics", + "never_expires", + "version", +] + +class AliasAlreadyExistsError(Error): + pass + +class AliasNotFoundError(Error): + pass + +class AsyncPipelineFullError(Error): + pass + +class Blob(ExpirableEntry): + def __init__(self, arg0: ..., arg1: str) -> None: ... + def compare_and_swap(self, new_content: str, comparand: str) -> bytes: ... + def get(self) -> bytes: ... + def get_and_remove(self) -> bytes: ... + def get_and_update(self, data: str) -> bytes: ... + def put(self, data: str) -> None: ... + def remove_if(self, comparand: str) -> None: ... + def update(self, data: str, expiry: datetime.datetime = ...) -> None: ... + +class Cluster: + """ + Represents a connection to the QuasarDB cluster. + """ + + def __enter__(self) -> Cluster: ... + def __exit__( + self, arg0: typing.Any, arg1: typing.Any, arg2: typing.Any + ) -> None: ... + def __init__( + self, + uri: str, + user_name: str = "", + user_private_key: str = "", + cluster_public_key: str = "", + *, + user_security_file: str = "", + cluster_public_key_file: str = "", + timeout: datetime.timedelta = ..., + do_version_check: bool = False, + enable_encryption: bool = False, + compression_mode: Options.Compression = ..., + client_max_parallelism: int = 0, + ) -> None: ... + def blob(self, arg0: str) -> ...: ... + def close(self) -> None: ... + def compact_abort(self) -> None: ... + def compact_full(self) -> None: ... + def compact_progress(self) -> int: ... + def double(self, arg0: str) -> ...: ... + def endpoints(self) -> list[str]: ... + def find(self, arg0: str) -> ...: ... + def get_memory_info(self) -> str: ... + def inserter(self, arg0: list[...]) -> ...: ... + def integer(self, arg0: str) -> ...: ... + def is_open(self) -> bool: ... + def node(self, arg0: str) -> ...: ... + def node_config(self, arg0: str) -> typing.Any: ... + def node_status(self, arg0: str) -> typing.Any: ... + def node_topology(self, arg0: str) -> typing.Any: ... + def options(self) -> Options: ... + def perf(self) -> ...: ... + def pinned_writer(self) -> ...: ... + def prefix_count(self, arg0: str) -> int: ... + def prefix_get(self, arg0: str, arg1: int) -> list[str]: ... + def properties(self) -> ...: ... + def purge_all(self, arg0: datetime.timedelta) -> None: ... + def purge_cache(self, arg0: datetime.timedelta) -> None: ... + def query(self, query: str, blobs: typing.Any = False) -> typing.Any: ... + def query_continuous_full( + self, query: str, pace: datetime.timedelta, blobs: typing.Any = False + ) -> ...: ... + def query_continuous_new_values( + self, query: str, pace: datetime.timedelta, blobs: typing.Any = False + ) -> ...: ... + def query_numpy(self, query: str) -> typing.Any: ... + def reader( + self, + table_names: list[str], + *, + column_names: list[str] = [], + batch_size: int = 0, + ranges: list[tuple] = [], + ) -> ...: ... + def string(self, arg0: str) -> ...: ... + def suffix_count(self, arg0: str) -> int: ... + def suffix_get(self, arg0: str, arg1: int) -> list[str]: ... + def table(self, arg0: str) -> ...: ... + def tag(self, arg0: str) -> ...: ... + def tidy_memory(self) -> None: ... + def timestamp(self, arg0: str) -> ...: ... + def trim_all(self, arg0: datetime.timedelta, arg1: datetime.timedelta) -> None: ... + def ts(self, arg0: str) -> ...: ... + def ts_batch(self, arg0: list[...]) -> ...: ... + def uri(self) -> str: ... + def wait_for_compaction(self) -> None: ... + def writer(self) -> ...: ... + +class ColumnInfo: + name: str + symtable: str + type: ColumnType + @typing.overload + def __init__(self, arg0: ColumnType, arg1: str) -> None: ... + @typing.overload + def __init__(self, arg0: ColumnType, arg1: str, arg2: str) -> None: ... + def __repr__(self) -> str: ... + +class ColumnType: + """ + Column type + + Members: + + Uninitialized + + Double + + Blob + + String + + Symbol + + Int64 + + Timestamp + """ + + Blob: typing.ClassVar[ColumnType] # value = + Double: typing.ClassVar[ColumnType] # value = + Int64: typing.ClassVar[ColumnType] # value = + String: typing.ClassVar[ColumnType] # value = + Symbol: typing.ClassVar[ColumnType] # value = + Timestamp: typing.ClassVar[ColumnType] # value = + Uninitialized: typing.ClassVar[ColumnType] # value = + __members__: typing.ClassVar[ + dict[str, ColumnType] + ] # value = {'Uninitialized': , 'Double': , 'Blob': , 'String': , 'Symbol': , 'Int64': , 'Timestamp': } + def __and__(self, other: typing.Any) -> typing.Any: ... + def __eq__(self, other: typing.Any) -> bool: ... + def __ge__(self, other: typing.Any) -> bool: ... + def __getstate__(self) -> int: ... + def __gt__(self, other: typing.Any) -> bool: ... + def __hash__(self) -> int: ... + def __index__(self) -> int: ... + def __init__(self, value: int) -> None: ... + def __int__(self) -> int: ... + def __invert__(self) -> typing.Any: ... + def __le__(self, other: typing.Any) -> bool: ... + def __lt__(self, other: typing.Any) -> bool: ... + def __ne__(self, other: typing.Any) -> bool: ... + def __or__(self, other: typing.Any) -> typing.Any: ... + def __rand__(self, other: typing.Any) -> typing.Any: ... + def __repr__(self) -> str: ... + def __ror__(self, other: typing.Any) -> typing.Any: ... + def __rxor__(self, other: typing.Any) -> typing.Any: ... + def __setstate__(self, state: int) -> None: ... + def __str__(self) -> str: ... + def __xor__(self, other: typing.Any) -> typing.Any: ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + +class DirectBlob: + def __init__(self, arg0: ..., arg1: ..., arg2: str) -> None: ... + def get(self) -> bytes: ... + def put(self, data: str) -> None: ... + def remove(self) -> None: ... + def update(self, data: str) -> None: ... + +class DirectInteger: + def __init__(self, arg0: ..., arg1: ..., arg2: str) -> None: ... + def get(self) -> int: ... + def put(self, integer: int) -> None: ... + def remove(self) -> None: ... + def update(self, integer: int) -> None: ... + +class Double(ExpirableEntry): + def __init__(self, arg0: ..., arg1: str) -> None: ... + def add(self, addend: float) -> float: ... + def get(self) -> float: ... + def put(self, double: float) -> None: ... + def update(self, double: float) -> None: ... + +class Entry: + class Metadata: + expiry_time: qdb_timespec_t + modification_time: qdb_timespec_t + size: int + type: Entry.Type + def __init__(self) -> None: ... + + class Type: + """ + Entry type + + Members: + + Uninitialized + + Integer + + HashSet + + Tag + + Deque + + Stream + + Timeseries + """ + + Deque: typing.ClassVar[Entry.Type] # value = + HashSet: typing.ClassVar[Entry.Type] # value = + Integer: typing.ClassVar[Entry.Type] # value = + Stream: typing.ClassVar[Entry.Type] # value = + Tag: typing.ClassVar[Entry.Type] # value = + Timeseries: typing.ClassVar[Entry.Type] # value = + Uninitialized: typing.ClassVar[Entry.Type] # value = + __members__: typing.ClassVar[ + dict[str, Entry.Type] + ] # value = {'Uninitialized': , 'Integer': , 'HashSet': , 'Tag': , 'Deque': , 'Stream': , 'Timeseries': } + def __and__(self, other: typing.Any) -> typing.Any: ... + def __eq__(self, other: typing.Any) -> bool: ... + def __ge__(self, other: typing.Any) -> bool: ... + def __getstate__(self) -> int: ... + def __gt__(self, other: typing.Any) -> bool: ... + def __hash__(self) -> int: ... + def __index__(self) -> int: ... + def __init__(self, value: int) -> None: ... + def __int__(self) -> int: ... + def __invert__(self) -> typing.Any: ... + def __le__(self, other: typing.Any) -> bool: ... + def __lt__(self, other: typing.Any) -> bool: ... + def __ne__(self, other: typing.Any) -> bool: ... + def __or__(self, other: typing.Any) -> typing.Any: ... + def __rand__(self, other: typing.Any) -> typing.Any: ... + def __repr__(self) -> str: ... + def __ror__(self, other: typing.Any) -> typing.Any: ... + def __rxor__(self, other: typing.Any) -> typing.Any: ... + def __setstate__(self, state: int) -> None: ... + def __str__(self) -> str: ... + def __xor__(self, other: typing.Any) -> typing.Any: ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + + def __init__(self, arg0: ..., arg1: str) -> None: ... + def attach_tag(self, arg0: str) -> bool: ... + def attach_tags(self, arg0: list[str]) -> None: ... + def detach_tag(self, arg0: str) -> bool: ... + def detach_tags(self, arg0: list[str]) -> None: ... + def exists(self) -> bool: + """ + Returns true if the entry exists + """ + + def get_entry_type(self) -> Entry.Type: ... + def get_location(self) -> tuple[str, int]: ... + def get_metadata(self) -> ...: ... + def get_name(self) -> str: ... + def get_tags(self) -> list[str]: ... + def has_tag(self, arg0: str) -> bool: ... + def remove(self) -> None: ... + +class Error(RuntimeError): + pass + +class ExpirableEntry(Entry): + def __init__(self, arg0: ..., arg1: str) -> None: ... + def expires_at(self, arg0: ...) -> None: ... + def expires_from_now(self, arg0: datetime.timedelta) -> None: ... + def get_expiry_time(self) -> ...: ... + +class FindQuery: + def __init__(self, arg0: ..., arg1: str) -> None: ... + def run(self) -> list[str]: ... + +class IncompatibleTypeError(Error): + pass + +class IndexedColumnInfo: + @typing.overload + def __init__(self, arg0: ColumnType, arg1: int) -> None: ... + @typing.overload + def __init__(self, arg0: ColumnType, arg1: int, arg2: str) -> None: ... + @property + def index(self) -> int: ... + @property + def symtable(self) -> str: ... + @property + def type(self) -> ColumnType: ... + +class InputBufferTooSmallError(Error): + pass + +class Integer(ExpirableEntry): + def __init__(self, arg0: ..., arg1: str) -> None: ... + def add(self, addend: int) -> int: ... + def get(self) -> int: ... + def put(self, integer: int) -> None: ... + def update(self, integer: int) -> None: ... + +class InternalLocalError(Error): + pass + +class InvalidArgumentError(Error): + pass + +class InvalidDatetimeError(Error): + pass + +class InvalidHandleError(Error): + pass + +class InvalidQueryError(Error): + pass + +class MaskedArray: + pass + +class Node: + def __init__( + self, + uri: str, + user_name: str = "", + user_private_key: str = "", + cluster_public_key: str = "", + *, + user_security_file: str = "", + cluster_public_key_file: str = "", + enable_encryption: bool = False, + ) -> None: ... + def blob(self, arg0: str) -> ...: ... + def integer(self, arg0: str) -> ...: ... + def prefix_get(self, arg0: str, arg1: int) -> list[str]: ... + +class NotImplementedError(Error): + pass + +class Options: + class Compression: + """ + Compression mode + + Members: + + Disabled + + Best + + Balanced + """ + + Balanced: typing.ClassVar[ + Options.Compression + ] # value = + Best: typing.ClassVar[Options.Compression] # value = + Disabled: typing.ClassVar[ + Options.Compression + ] # value = + __members__: typing.ClassVar[ + dict[str, Options.Compression] + ] # value = {'Disabled': , 'Best': , 'Balanced': } + def __and__(self, other: typing.Any) -> typing.Any: ... + def __eq__(self, other: typing.Any) -> bool: ... + def __ge__(self, other: typing.Any) -> bool: ... + def __getstate__(self) -> int: ... + def __gt__(self, other: typing.Any) -> bool: ... + def __hash__(self) -> int: ... + def __index__(self) -> int: ... + def __init__(self, value: int) -> None: ... + def __int__(self) -> int: ... + def __invert__(self) -> typing.Any: ... + def __le__(self, other: typing.Any) -> bool: ... + def __lt__(self, other: typing.Any) -> bool: ... + def __ne__(self, other: typing.Any) -> bool: ... + def __or__(self, other: typing.Any) -> typing.Any: ... + def __rand__(self, other: typing.Any) -> typing.Any: ... + def __repr__(self) -> str: ... + def __ror__(self, other: typing.Any) -> typing.Any: ... + def __rxor__(self, other: typing.Any) -> typing.Any: ... + def __setstate__(self, state: int) -> None: ... + def __str__(self) -> str: ... + def __xor__(self, other: typing.Any) -> typing.Any: ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + + class Encryption: + """ + Encryption type + + Members: + + Disabled + + AES256GCM + """ + + AES256GCM: typing.ClassVar[ + Options.Encryption + ] # value = + Disabled: typing.ClassVar[ + Options.Encryption + ] # value = + __members__: typing.ClassVar[ + dict[str, Options.Encryption] + ] # value = {'Disabled': , 'AES256GCM': } + def __and__(self, other: typing.Any) -> typing.Any: ... + def __eq__(self, other: typing.Any) -> bool: ... + def __ge__(self, other: typing.Any) -> bool: ... + def __getstate__(self) -> int: ... + def __gt__(self, other: typing.Any) -> bool: ... + def __hash__(self) -> int: ... + def __index__(self) -> int: ... + def __init__(self, value: int) -> None: ... + def __int__(self) -> int: ... + def __invert__(self) -> typing.Any: ... + def __le__(self, other: typing.Any) -> bool: ... + def __lt__(self, other: typing.Any) -> bool: ... + def __ne__(self, other: typing.Any) -> bool: ... + def __or__(self, other: typing.Any) -> typing.Any: ... + def __rand__(self, other: typing.Any) -> typing.Any: ... + def __repr__(self) -> str: ... + def __ror__(self, other: typing.Any) -> typing.Any: ... + def __rxor__(self, other: typing.Any) -> typing.Any: ... + def __setstate__(self, state: int) -> None: ... + def __str__(self) -> str: ... + def __xor__(self, other: typing.Any) -> typing.Any: ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + + def __init__(self, arg0: ...) -> None: ... + def disable_user_properties(self) -> None: ... + def enable_user_properties(self) -> None: ... + def get_client_max_batch_load(self) -> int: + """ + Get the number of shards per thread used for the batch writer. + """ + + def get_client_max_in_buf_size(self) -> int: ... + def get_client_max_parallelism(self) -> int: ... + def get_cluster_max_in_buf_size(self) -> int: ... + def get_connection_per_address_soft_limit(self) -> int: + """ + Get the maximum number of connections per qdbd node + """ + + def get_query_max_length(self) -> int: ... + def get_stabilization_max_wait(self) -> datetime.timedelta: ... + def get_timeout(self) -> datetime.timedelta: ... + def get_timezone(self) -> str: ... + def set_client_max_batch_load(self, arg0: int) -> None: + """ + Adjust the number of shards per thread used for the batch writer. + """ + + def set_client_max_in_buf_size(self, arg0: int) -> None: ... + def set_client_soft_memory_limit(self, limit: int) -> None: ... + def set_cluster_public_key(self, arg0: str) -> None: ... + def set_connection_per_address_soft_limit(self, arg0: int) -> None: + """ + Adjust the maximum number of connections per qdbd node + """ + + def set_encryption(self, arg0: Options.Encryption) -> None: ... + def set_max_cardinality(self, arg0: int) -> None: ... + def set_query_max_length(self, query_max_length: int) -> None: ... + def set_stabilization_max_wait(self, arg0: datetime.timedelta) -> None: ... + def set_timeout(self, arg0: datetime.timedelta) -> None: ... + def set_timezone(self, arg0: str) -> None: ... + def set_user_credentials(self, arg0: str, arg1: str) -> None: ... + +class OutOfBoundsError(Error): + pass + +class Perf: + def __init__(self, arg0: ...) -> None: ... + def clear(self) -> None: ... + def disable(self) -> None: ... + def enable(self) -> None: ... + def get(self, flame: bool = False, outfile: str = "") -> typing.Any: ... + +class Properties: + def __init__(self, arg0: ...) -> None: ... + def clear(self) -> None: ... + def get(self, arg0: str) -> str | None: ... + def put(self, arg0: str, arg1: str) -> None: ... + def remove(self, arg0: str) -> None: ... + +class QueryContinuous: + def __init__(self, arg0: ..., arg1: typing.Any) -> None: ... + def __iter__(self) -> QueryContinuous: ... + def __next__(self) -> list[dict[str, typing.Any]]: ... + def probe_results(self) -> list[dict[str, typing.Any]]: ... + def results(self) -> list[dict[str, typing.Any]]: ... + def run( + self, + arg0: qdb_query_continuous_mode_type_t, + arg1: datetime.timedelta, + arg2: str, + ) -> None: ... + def stop(self) -> None: ... + +class Reader: + def __enter__(self) -> Reader: ... + def __exit__( + self, arg0: typing.Any, arg1: typing.Any, arg2: typing.Any + ) -> None: ... + def __init__( + self, + conn: ..., + table_names: list[str], + *, + column_names: list[str] = [], + batch_size: int = 65536, + ranges: list[tuple] = [], + ) -> None: ... + def __iter__(self) -> typing.Iterator[dict]: ... + def get_batch_size(self) -> int: ... + +class RetryOptions: + delay: datetime.timedelta + exponent: int + jitter: float + retries_left: int + def __init__( + self, + retries: int = 3, + *, + delay: datetime.timedelta = ..., + exponent: int = 2, + jitter: float = 0.1, + ) -> None: ... + def has_next(self) -> bool: ... + def next(self) -> RetryOptions: ... + +class String(ExpirableEntry): + def __init__(self, arg0: ..., arg1: str) -> None: ... + def compare_and_swap(self, new_content: str, comparand: str) -> str: ... + def get(self) -> str: ... + def get_and_remove(self) -> str: ... + def get_and_update(self, data: str) -> str: ... + def put(self, data: str) -> None: ... + def remove_if(self, comparand: str) -> None: ... + def update(self, data: str, expiry: datetime.datetime = ...) -> None: ... + +class Table(Entry): + """ + Table representation + """ + + def __init__(self, arg0: ..., arg1: str) -> None: ... + def __repr__(self) -> str: ... + def blob_get_ranges( + self, column: str, ranges: typing.Any = None + ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... + def blob_insert( + self, arg0: str, arg1: numpy.ndarray, arg2: numpy.ma.MaskedArray + ) -> None: ... + def column_id_by_index(self, arg0: int) -> str: ... + def column_index_by_id(self, arg0: str) -> int: ... + def column_info_by_index(self, arg0: int) -> ...: ... + def column_type_by_id(self, arg0: str) -> ColumnType: ... + def column_type_by_index(self, arg0: int) -> ColumnType: ... + def create( + self, + columns: list[...], + shard_size: datetime.timedelta = ..., + ttl: datetime.timedelta = ..., + ) -> None: ... + def double_get_ranges( + self, column: str, ranges: typing.Any = None + ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... + def double_insert( + self, arg0: str, arg1: numpy.ndarray, arg2: numpy.ma.MaskedArray + ) -> None: ... + def erase_ranges(self, arg0: str, arg1: typing.Any) -> int: ... + def get_name(self: Entry) -> str: ... + def get_shard_size(self) -> datetime.timedelta: ... + def get_ttl(self) -> datetime.timedelta: ... + def has_ttl(self) -> bool: ... + def insert_columns(self, arg0: list[...]) -> None: ... + def int64_get_ranges( + self, column: str, ranges: typing.Any = None + ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... + def int64_insert( + self, arg0: str, arg1: numpy.ndarray, arg2: numpy.ma.MaskedArray + ) -> None: ... + def list_columns(self) -> list[...]: ... + def reader( + self, + *, + column_names: list[str] = [], + batch_size: int = 0, + ranges: list[tuple] = [], + ) -> ...: ... + def retrieve_metadata(self) -> None: ... + def string_get_ranges( + self, column: str, ranges: typing.Any = None + ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... + def string_insert( + self, arg0: str, arg1: numpy.ndarray, arg2: numpy.ma.MaskedArray + ) -> None: ... + def subscribe(self, arg0: typing.Any) -> typing.Any: ... + def timestamp_get_ranges( + self, column: str, ranges: typing.Any = None + ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... + def timestamp_insert( + self, arg0: str, arg1: numpy.ndarray, arg2: numpy.ma.MaskedArray + ) -> None: ... + +class Tag(Entry): + def __init__(self, arg0: ..., arg1: str) -> None: ... + def count(self) -> int: ... + def get_entries(self) -> list[str]: ... + +class Timestamp(ExpirableEntry): + def __init__(self, arg0: ..., arg1: str) -> None: ... + def add(self, addend: ...) -> ...: ... + def get(self) -> ...: ... + def put(self, timestamp: ...) -> None: ... + def update(self, timestamp: ...) -> None: ... + +class TryAgainError(Error): + pass + +class UninitializedError(Error): + pass + +class WriterData: + def __init__(self) -> None: ... + def append(self, table: Table, index: typing.Any, column_data: list) -> None: + """ + Append new data + """ + + def empty(self) -> bool: + """ + Returns true if underlying data is empty + """ + +class WriterPushMode: + """ + Push Mode + + Members: + + Transactional + + Fast + + Truncate + + Async + """ + + Async: typing.ClassVar[WriterPushMode] # value = + Fast: typing.ClassVar[WriterPushMode] # value = + Transactional: typing.ClassVar[ + WriterPushMode + ] # value = + Truncate: typing.ClassVar[WriterPushMode] # value = + __members__: typing.ClassVar[ + dict[str, WriterPushMode] + ] # value = {'Transactional': , 'Fast': , 'Truncate': , 'Async': } + def __and__(self, other: typing.Any) -> typing.Any: ... + def __eq__(self, other: typing.Any) -> bool: ... + def __ge__(self, other: typing.Any) -> bool: ... + def __getstate__(self) -> int: ... + def __gt__(self, other: typing.Any) -> bool: ... + def __hash__(self) -> int: ... + def __index__(self) -> int: ... + def __init__(self, value: int) -> None: ... + def __int__(self) -> int: ... + def __invert__(self) -> typing.Any: ... + def __le__(self, other: typing.Any) -> bool: ... + def __lt__(self, other: typing.Any) -> bool: ... + def __ne__(self, other: typing.Any) -> bool: ... + def __or__(self, other: typing.Any) -> typing.Any: ... + def __rand__(self, other: typing.Any) -> typing.Any: ... + def __repr__(self) -> str: ... + def __ror__(self, other: typing.Any) -> typing.Any: ... + def __rxor__(self, other: typing.Any) -> typing.Any: ... + def __setstate__(self, state: int) -> None: ... + def __str__(self) -> str: ... + def __xor__(self, other: typing.Any) -> typing.Any: ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + +def build() -> str: + """ + Return build number + """ + +def dict_query( + arg0: ..., arg1: str, arg2: typing.Any +) -> list[dict[str, typing.Any]]: ... +def version() -> str: + """ + Return version number + """ + +Writer = None +never_expires: datetime.datetime # value = datetime.datetime(1969, 12, 31, 19, 0) diff --git a/quasardb/batch_inserter.hpp b/quasardb/batch_inserter.hpp index d4299975..7e362bf0 100644 --- a/quasardb/batch_inserter.hpp +++ b/quasardb/batch_inserter.hpp @@ -225,18 +225,21 @@ static inline void register_batch_inserter(Module & m) { namespace py = pybind11; - py::class_{m, "TimeSeriesBatch"} // - .def(py::init &>()) // + py::class_{m, "TimeSeriesBatch"} + .def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{"conn.ts_batch(...)"}; + return nullptr; + })) .def("start_row", &qdb::batch_inserter::start_row, - "Calling this function marks the beginning of processing a new row.") // - .def("set_blob", &qdb::batch_inserter::set_blob) // - .def("set_string", &qdb::batch_inserter::set_string) // - .def("set_double", &qdb::batch_inserter::set_double) // - .def("set_int64", &qdb::batch_inserter::set_int64) // - .def("set_timestamp", &qdb::batch_inserter::set_timestamp) // - .def("push", &qdb::batch_inserter::push, "Regular batch push") // + "Calling this function marks the beginning of processing a new row.") + .def("set_blob", &qdb::batch_inserter::set_blob) + .def("set_string", &qdb::batch_inserter::set_string) + .def("set_double", &qdb::batch_inserter::set_double) + .def("set_int64", &qdb::batch_inserter::set_int64) + .def("set_timestamp", &qdb::batch_inserter::set_timestamp) + .def("push", &qdb::batch_inserter::push, "Regular batch push") .def("push_async", &qdb::batch_inserter::push_async, - "Asynchronous batch push that buffers data inside the QuasarDB daemon") // + "Asynchronous batch push that buffers data inside the QuasarDB daemon") .def("push_fast", &qdb::batch_inserter::push_fast, "Fast, in-place batch push that is efficient when doing lots of small, incremental pushes.") .def("push_truncate", &qdb::batch_inserter::push_truncate, diff --git a/quasardb/error.hpp b/quasardb/error.hpp index 8137b9c2..a9c11660 100644 --- a/quasardb/error.hpp +++ b/quasardb/error.hpp @@ -246,6 +246,21 @@ class invalid_datetime_exception : public exception {} }; +class direct_instantiation_exception : public exception +{ +public: + direct_instantiation_exception() noexcept + : exception(qdb_e_internal_local, + std::string("Direct instantiation is not allowed")) + {} + + direct_instantiation_exception(std::string const & correct_instantiation) noexcept + : exception(qdb_e_internal_local, + std::string("Direct instantiation is not allowed, use '") + + correct_instantiation + std::string("' instead.")) + {} +}; + namespace detail { @@ -388,6 +403,7 @@ static inline void register_errors(Module & m) py::register_exception(m, "TryAgainError", base_class); py::register_exception(m, "AsyncPipelineFullError", base_class); py::register_exception(m, "OutOfBoundsError", base_class); + py::register_exception(m, "DirectInstantiationError", base_class); } } // namespace qdb diff --git a/quasardb/extensions/__init__.pyi b/quasardb/extensions/__init__.pyi new file mode 100644 index 00000000..74077aaf --- /dev/null +++ b/quasardb/extensions/__init__.pyi @@ -0,0 +1,9 @@ +from __future__ import annotations + +from quasardb.extensions.writer import extend_writer + +from . import writer + +__all__: list = list() + +def extend_module(m): ... diff --git a/quasardb/extensions/writer.pyi b/quasardb/extensions/writer.pyi new file mode 100644 index 00000000..941dd623 --- /dev/null +++ b/quasardb/extensions/writer.pyi @@ -0,0 +1,30 @@ +from __future__ import annotations + +import copy as copy + +import numpy as np +from numpy import ma + +import quasardb as quasardb + +__all__: list = list() + +def _ensure_ctype(self, idx, ctype): ... +def _legacy_current_row(self): ... +def _legacy_next_row(self, table): ... +def _legacy_push(self): ... +def _legacy_set_blob(self, idx, x): ... +def _legacy_set_double(self, idx, x): ... +def _legacy_set_int64(self, idx, x): ... +def _legacy_set_string(self, idx, x): ... +def _legacy_set_timestamp(self, idx, x): ... +def _legacy_start_row(self, table, x): ... +def _wrap_fn(old_fn, replace_fn): ... +def extend_writer(x): + """ + + Extends the writer with the "old", batch inserter API. This is purely + a backwards compatibility layer, and we want to avoid having to maintain that + in C++ with few benefits. + + """ diff --git a/quasardb/metrics.pyi b/quasardb/metrics.pyi new file mode 100644 index 00000000..56605abf --- /dev/null +++ b/quasardb/metrics.pyi @@ -0,0 +1,24 @@ +""" +Keep track of low-level performance metrics +""" + +from __future__ import annotations + +import typing + +__all__ = ["Measure", "clear", "totals"] + +class Measure: + """ + Track all metrics within a block of code + """ + + def __enter__(self) -> Measure: ... + def __exit__( + self, arg0: typing.Any, arg1: typing.Any, arg2: typing.Any + ) -> None: ... + def __init__(self) -> None: ... + def get(self) -> dict[str, int]: ... + +def clear() -> None: ... +def totals() -> dict[str, int]: ... diff --git a/quasardb/quasardb/__init__.pyi b/quasardb/quasardb/__init__.pyi new file mode 100644 index 00000000..ea5cd91e --- /dev/null +++ b/quasardb/quasardb/__init__.pyi @@ -0,0 +1,4 @@ +from ._batch_column import BatchColumnInfo +from ._batch_inserter import TimeSeriesBatch + +__all__ = ["BatchColumnInfo", "TimeSeriesBatch"] diff --git a/quasardb/quasardb/_batch_column.pyi b/quasardb/quasardb/_batch_column.pyi new file mode 100644 index 00000000..f699486e --- /dev/null +++ b/quasardb/quasardb/_batch_column.pyi @@ -0,0 +1,5 @@ +class BatchColumnInfo: + column: str + elements_count_hint: int + timeseries: str + def __init__(self, ts_name: str, col_name: str, size_hint: int = 0) -> None: ... diff --git a/quasardb/quasardb/_batch_inserter.pyi b/quasardb/quasardb/_batch_inserter.pyi new file mode 100644 index 00000000..f1fb6a23 --- /dev/null +++ b/quasardb/quasardb/_batch_inserter.pyi @@ -0,0 +1,30 @@ +class TimeSeriesBatch: + def push(self) -> None: + """ + Regular batch push + """ + + def push_async(self) -> None: + """ + Asynchronous batch push that buffers data inside the QuasarDB daemon + """ + + def push_fast(self) -> None: + """ + Fast, in-place batch push that is efficient when doing lots of small, incremental pushes. + """ + + def push_truncate(self, **kwargs) -> None: + """ + Before inserting data, truncates any existing data. This is useful when you want your insertions to be idempotent, e.g. in case of a retry. + """ + + def set_blob(self, index: int, blob: bytes) -> None: ... + def set_double(self, index: int, double: float) -> None: ... + def set_int64(self, index: int, int64: int) -> None: ... + def set_string(self, index: int, string: str) -> None: ... + def set_timestamp(self, index: int, timestamp: object) -> None: ... + def start_row(self, ts: object) -> None: + """ + Calling this function marks the beginning of processing a new row. + """ diff --git a/quasardb/table.hpp b/quasardb/table.hpp index 4deea961..bf4d0581 100644 --- a/quasardb/table.hpp +++ b/quasardb/table.hpp @@ -323,61 +323,64 @@ static inline void register_table(Module & m) .value("Int64", qdb_ts_column_int64) // .value("Timestamp", qdb_ts_column_timestamp); // - py::class_{m, "Table", "Table representation"} // - .def(py::init()) // - .def("__repr__", &qdb::table::repr) // - .def("create", &qdb::table::create, py::arg("columns"), // - py::arg("shard_size") = std::chrono::hours{24}, // - py::arg("ttl") = std::chrono::milliseconds::zero() // - ) // - .def("get_name", &qdb::table::get_name) // - .def("retrieve_metadata", &qdb::table::retrieve_metadata) // - .def("column_index_by_id", &qdb::table::column_index_by_id) // - .def("column_type_by_id", &qdb::table::column_type_by_id) // - .def("column_info_by_index", &qdb::table::column_info_by_index) // - .def("column_type_by_index", &qdb::table::column_type_by_index) // - .def("column_id_by_index", &qdb::table::column_id_by_index) // - .def("insert_columns", &qdb::table::insert_columns) // - .def("list_columns", &qdb::table::list_columns) // - .def("has_ttl", &qdb::table::has_ttl) // - .def("get_ttl", &qdb::table::get_ttl) // - .def("get_shard_size", &qdb::table::get_shard_size) // - // - .def("reader", &qdb::table::reader, // - py::kw_only(), // - py::arg("column_names") = std::vector{}, // - py::arg("batch_size") = std::size_t{0}, // - py::arg("ranges") = std::vector{} // - ) // - // - .def("subscribe", &qdb::table::subscribe) // - .def("erase_ranges", &qdb::table::erase_ranges) // - .def("blob_insert", &qdb::table::blob_insert) // - .def("string_insert", &qdb::table::string_insert) // - .def("double_insert", &qdb::table::double_insert) // - .def("int64_insert", &qdb::table::int64_insert) // - .def("timestamp_insert", &qdb::table::timestamp_insert) // - // - .def("blob_get_ranges", &qdb::table::blob_get_ranges, // - py::arg("column"), // - py::arg("ranges") = py::none{} // - ) // - .def("string_get_ranges", &qdb::table::string_get_ranges, // - py::arg("column"), // - py::arg("ranges") = py::none{} // - ) // - .def("double_get_ranges", &qdb::table::double_get_ranges, // - py::arg("column"), // - py::arg("ranges") = py::none{} // - ) // - .def("int64_get_ranges", &qdb::table::int64_get_ranges, // - py::arg("column"), // - py::arg("ranges") = py::none{} // - ) // - .def("timestamp_get_ranges", &qdb::table::timestamp_get_ranges, // - py::arg("column"), // - py::arg("ranges") = py::none{} // - ); // + py::class_{m, "Table", "Table representation"} // + .def(py::init([](py::args, py::kwargs) { // + throw qdb::direct_instantiation_exception{"conn.table(...)"}; // + return nullptr; // + })) // + .def("__repr__", &qdb::table::repr) // + .def("create", &qdb::table::create, py::arg("columns"), // + py::arg("shard_size") = std::chrono::hours{24}, // + py::arg("ttl") = std::chrono::milliseconds::zero() // + ) // + .def("get_name", &qdb::table::get_name) // + .def("retrieve_metadata", &qdb::table::retrieve_metadata) // + .def("column_index_by_id", &qdb::table::column_index_by_id) // + .def("column_type_by_id", &qdb::table::column_type_by_id) // + .def("column_info_by_index", &qdb::table::column_info_by_index) // + .def("column_type_by_index", &qdb::table::column_type_by_index) // + .def("column_id_by_index", &qdb::table::column_id_by_index) // + .def("insert_columns", &qdb::table::insert_columns) // + .def("list_columns", &qdb::table::list_columns) // + .def("has_ttl", &qdb::table::has_ttl) // + .def("get_ttl", &qdb::table::get_ttl) // + .def("get_shard_size", &qdb::table::get_shard_size) // + // + .def("reader", &qdb::table::reader, // + py::kw_only(), // + py::arg("column_names") = std::vector{}, // + py::arg("batch_size") = std::size_t{0}, // + py::arg("ranges") = std::vector{} // + ) // + // + .def("subscribe", &qdb::table::subscribe) // + .def("erase_ranges", &qdb::table::erase_ranges) // + .def("blob_insert", &qdb::table::blob_insert) // + .def("string_insert", &qdb::table::string_insert) // + .def("double_insert", &qdb::table::double_insert) // + .def("int64_insert", &qdb::table::int64_insert) // + .def("timestamp_insert", &qdb::table::timestamp_insert) // + // + .def("blob_get_ranges", &qdb::table::blob_get_ranges, // + py::arg("column"), // + py::arg("ranges") = py::none{} // + ) // + .def("string_get_ranges", &qdb::table::string_get_ranges, // + py::arg("column"), // + py::arg("ranges") = py::none{} // + ) // + .def("double_get_ranges", &qdb::table::double_get_ranges, // + py::arg("column"), // + py::arg("ranges") = py::none{} // + ) // + .def("int64_get_ranges", &qdb::table::int64_get_ranges, // + py::arg("column"), // + py::arg("ranges") = py::none{} // + ) // + .def("timestamp_get_ranges", &qdb::table::timestamp_get_ranges, // + py::arg("column"), // + py::arg("ranges") = py::none{} // + ); // } } // namespace qdb diff --git a/scripts/teamcity/10.build.sh b/scripts/teamcity/10.build.sh index 9de5e792..b1b16957 100755 --- a/scripts/teamcity/10.build.sh +++ b/scripts/teamcity/10.build.sh @@ -48,16 +48,6 @@ export QDB_TESTS_ENABLED=OFF ${VENV_PYTHON} -m build -w - -# # Build the stubs and rebuild the api -cd dist -${VENV_PYTHON} -m pip install *.whl -pybind11-stubgen quasardb -o ../ # stubgen (from mypy) is not that good -cd ../ - -${VENV_PYTHON} -m build -w - -## Rename the build for whl in dist/*.whl; do relabel_wheel "$whl" done diff --git a/setup.py b/setup.py index 62054071..9e751577 100644 --- a/setup.py +++ b/setup.py @@ -5,19 +5,17 @@ # pylint: disable=C0103,C0111,C0326,W0201,line-too-long +import glob import os -import re -import sys import platform import subprocess -import glob +import sys +from setuptools import Extension, setup from setuptools.command.build_ext import build_ext -from setuptools import setup, Extension from setuptools.command.install import install # NOTE: Import distutils after setuptools. -from pkg_resources import get_build_platform from wheel.bdist_wheel import bdist_wheel as old_bdist_wheel qdb_version = "3.15.0.dev0" @@ -29,6 +27,7 @@ package_name = "quasardb" packages = [ package_name, + "quasardb.quasardb", # stubs "quasardb.pandas", "quasardb.numpy", "quasardb.extensions", From a4b16966fc9aba18039fb01311274de653db9154 Mon Sep 17 00:00:00 2001 From: rodp63 Date: Wed, 28 May 2025 14:29:37 -0500 Subject: [PATCH 3/7] QDB-16709 - Complete pyi files --- dev-requirements.txt | 2 +- quasardb/__init__.pyi | 170 ++-- quasardb/all.pyi | 778 ------------------ quasardb/batch_column.hpp | 16 +- quasardb/blob.hpp | 25 +- quasardb/cluster.cpp | 148 ++-- quasardb/cluster.hpp | 5 +- quasardb/continuous.hpp | 20 +- quasardb/detail/retry.cpp | 32 +- quasardb/detail/ts_column.hpp | 26 +- quasardb/double.hpp | 17 +- quasardb/entry.hpp | 73 +- quasardb/error.hpp | 2 +- quasardb/extensions/__init__.pyi | 9 - quasardb/extensions/writer.pyi | 30 - quasardb/integer.hpp | 17 +- quasardb/module.cpp | 7 +- quasardb/node.hpp | 18 +- quasardb/options.hpp | 87 +- quasardb/perf.hpp | 13 +- quasardb/properties.hpp | 7 +- quasardb/quasardb/__init__.pyi | 96 ++- quasardb/quasardb/_blob.pyi | 16 + quasardb/quasardb/_cluster.pyi | 100 +++ quasardb/quasardb/_continuous.pyi | 16 + quasardb/quasardb/_double.pyi | 7 + quasardb/quasardb/_entry.pyi | 64 ++ quasardb/quasardb/_error.pyi | 15 + quasardb/quasardb/_integer.pyi | 7 + quasardb/quasardb/_node.pyi | 26 + quasardb/quasardb/_options.pyi | 105 +++ quasardb/quasardb/_perf.pyi | 5 + quasardb/quasardb/_properties.pyi | 5 + quasardb/quasardb/_query.pyi | 2 + quasardb/quasardb/_reader.pyi | 9 + quasardb/quasardb/_retry.pyi | 16 + quasardb/quasardb/_string.pyi | 12 + quasardb/quasardb/_table.pyi | 125 +++ quasardb/quasardb/_tag.pyi | 5 + quasardb/quasardb/_timestamp.pyi | 9 + quasardb/quasardb/_writer.pyi | 111 +++ .../metrics/__init__.pyi} | 6 +- quasardb/query.hpp | 9 +- quasardb/reader.cpp | 31 +- quasardb/string.hpp | 25 +- quasardb/table.hpp | 133 ++- quasardb/tag.hpp | 11 +- quasardb/timestamp.hpp | 17 +- quasardb/writer.hpp | 27 +- scripts/teamcity/10.build.sh | 2 + setup.py | 3 +- 51 files changed, 1213 insertions(+), 1304 deletions(-) delete mode 100644 quasardb/all.pyi delete mode 100644 quasardb/extensions/__init__.pyi delete mode 100644 quasardb/extensions/writer.pyi create mode 100644 quasardb/quasardb/_blob.pyi create mode 100644 quasardb/quasardb/_cluster.pyi create mode 100644 quasardb/quasardb/_continuous.pyi create mode 100644 quasardb/quasardb/_double.pyi create mode 100644 quasardb/quasardb/_entry.pyi create mode 100644 quasardb/quasardb/_error.pyi create mode 100644 quasardb/quasardb/_integer.pyi create mode 100644 quasardb/quasardb/_node.pyi create mode 100644 quasardb/quasardb/_options.pyi create mode 100644 quasardb/quasardb/_perf.pyi create mode 100644 quasardb/quasardb/_properties.pyi create mode 100644 quasardb/quasardb/_query.pyi create mode 100644 quasardb/quasardb/_reader.pyi create mode 100644 quasardb/quasardb/_retry.pyi create mode 100644 quasardb/quasardb/_string.pyi create mode 100644 quasardb/quasardb/_table.pyi create mode 100644 quasardb/quasardb/_tag.pyi create mode 100644 quasardb/quasardb/_timestamp.pyi create mode 100644 quasardb/quasardb/_writer.pyi rename quasardb/{metrics.pyi => quasardb/metrics/__init__.pyi} (76%) diff --git a/dev-requirements.txt b/dev-requirements.txt index 82b45f33..df185393 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -3,7 +3,7 @@ # FreeBSD), and there are some conflicting requirements. For example, Numpy # doesn't have any version that works on both Python 3.6 and Python 3.10. -numpy ~= 1.19.5; python_version <= '3.7' +# numpy ~= 1.19.5; python_version <= '3.7' numpy ~= 1.20.3; python_version == '3.8' numpy ~= 1.20.3; python_version == '3.9' numpy >= 2.0.1; python_version > '3.9' diff --git a/quasardb/__init__.pyi b/quasardb/__init__.pyi index 1456d079..01eae0f0 100644 --- a/quasardb/__init__.pyi +++ b/quasardb/__init__.pyi @@ -9,120 +9,64 @@ from __future__ import annotations -import datetime - -from quasardb.quasardb import BatchColumnInfo - -# from quasardb.quasardb import ( -# AliasAlreadyExistsError, -# AliasNotFoundError, -# AsyncPipelineFullError, -# Blob, -# Cluster, -# ColumnInfo, -# ColumnType, -# DirectBlob, -# DirectInteger, -# Double, -# Entry, -# Error, -# ExpirableEntry, -# FindQuery, -# IncompatibleTypeError, -# IndexedColumnInfo, -# InputBufferTooSmallError, -# Integer, -# InternalLocalError, -# InvalidArgumentError, -# InvalidDatetimeError, -# InvalidHandleError, -# InvalidQueryError, -# MaskedArray, -# Node, -# NotImplementedError, -# Options, -# OutOfBoundsError, -# Perf, -# Properties, -# QueryContinuous, -# Reader, -# RetryOptions, -# String, -# Table, -# Tag, -# Timestamp, -# TryAgainError, -# UninitializedError, -# Writer, -# WriterData, -# WriterPushMode, -# build, -# dict_query, -# metrics, -# version, -# ) +from quasardb.quasardb import ( + AliasAlreadyExistsError, + AliasNotFoundError, + AsyncPipelineFullError, + BatchColumnInfo, + Cluster, + ColumnInfo, + ColumnType, + Error, + IncompatibleTypeError, + IndexedColumnInfo, + InputBufferTooSmallError, + InternalLocalError, + InvalidArgumentError, + InvalidDatetimeError, + InvalidHandleError, + InvalidQueryError, + Node, + NotImplementedError, + OutOfBoundsError, + RetryOptions, + TryAgainError, + UninitializedError, + WriterData, + WriterPushMode, + build, + metrics, + never_expires, + version, +) __all__ = [ - # "AliasAlreadyExistsError", - # "AliasNotFoundError", - # "AsyncPipelineFullError", + "AliasAlreadyExistsError", + "AliasNotFoundError", + "AsyncPipelineFullError", "BatchColumnInfo", - # "Blob", - # "Cluster", - # "ColumnInfo", - # "ColumnType", - # "DirectBlob", - # "DirectInteger", - # "Double", - # "Entry", - # "Error", - # "ExpirableEntry", - # "FindQuery", - # "IncompatibleTypeError", - # "IndexedColumnInfo", - # "InputBufferTooSmallError", - # "Integer", - # "InternalLocalError", - # "InvalidArgumentError", - # "InvalidDatetimeError", - # "InvalidHandleError", - # "InvalidQueryError", - # "MaskedArray", - # "Node", - # "NotImplementedError", - # "Options", - # "OutOfBoundsError", - # "Perf", - # "Properties", - # "QueryContinuous", - # "Reader", - # "RetryOptions", - # "String", - # "Table", - # "Tag", - # "Timestamp", - # "TryAgainError", - # "UninitializedError", - # "Writer", - # "WriterData", - # "WriterPushMode", - # "build", - # "dict_query", - # "extend_module", - # "extensions", - # "generic_error_msg", - # "glibc_error_msg", - # "link_error_msg", - # "metrics", - # "never_expires", - # "quasardb", - # "unknown_error_msg", - # "version", + "Cluster", + "ColumnInfo", + "ColumnType", + "Error", + "IncompatibleTypeError", + "IndexedColumnInfo", + "InputBufferTooSmallError", + "InternalLocalError", + "InvalidArgumentError", + "InvalidDatetimeError", + "InvalidHandleError", + "InvalidQueryError", + "Node", + "NotImplementedError", + "OutOfBoundsError", + "RetryOptions", + "TryAgainError", + "UninitializedError", + "WriterData", + "WriterPushMode", + "build", + "metrics", + "never_expires", + "version", ] - -def generic_error_msg(msg, e=None): ... -def glibc_error_msg(e): ... -def link_error_msg(e): ... -def unknown_error_msg(): ... - -never_expires: datetime.datetime # value = datetime.datetime(1969, 12, 31, 19, 0) diff --git a/quasardb/all.pyi b/quasardb/all.pyi deleted file mode 100644 index bbaa1bfb..00000000 --- a/quasardb/all.pyi +++ /dev/null @@ -1,778 +0,0 @@ -""" -QuasarDB Official Python API -""" - -from __future__ import annotations - -import datetime -import typing - -import numpy -import numpy.ma - -from . import metrics - -__all__ = [ - "AliasAlreadyExistsError", - "AliasNotFoundError", - "AsyncPipelineFullError", - "Blob", - "Cluster", - "ColumnInfo", - "ColumnType", - "DirectBlob", - "DirectInteger", - "Double", - "Entry", - "Error", - "ExpirableEntry", - "FindQuery", - "IncompatibleTypeError", - "IndexedColumnInfo", - "InputBufferTooSmallError", - "Integer", - "InternalLocalError", - "InvalidArgumentError", - "InvalidDatetimeError", - "InvalidHandleError", - "InvalidQueryError", - "MaskedArray", - "Node", - "NotImplementedError", - "Options", - "OutOfBoundsError", - "Perf", - "Properties", - "QueryContinuous", - "Reader", - "RetryOptions", - "String", - "Table", - "Tag", - "Timestamp", - "TryAgainError", - "UninitializedError", - "Writer", - "WriterData", - "WriterPushMode", - "build", - "dict_query", - "metrics", - "never_expires", - "version", -] - -class AliasAlreadyExistsError(Error): - pass - -class AliasNotFoundError(Error): - pass - -class AsyncPipelineFullError(Error): - pass - -class Blob(ExpirableEntry): - def __init__(self, arg0: ..., arg1: str) -> None: ... - def compare_and_swap(self, new_content: str, comparand: str) -> bytes: ... - def get(self) -> bytes: ... - def get_and_remove(self) -> bytes: ... - def get_and_update(self, data: str) -> bytes: ... - def put(self, data: str) -> None: ... - def remove_if(self, comparand: str) -> None: ... - def update(self, data: str, expiry: datetime.datetime = ...) -> None: ... - -class Cluster: - """ - Represents a connection to the QuasarDB cluster. - """ - - def __enter__(self) -> Cluster: ... - def __exit__( - self, arg0: typing.Any, arg1: typing.Any, arg2: typing.Any - ) -> None: ... - def __init__( - self, - uri: str, - user_name: str = "", - user_private_key: str = "", - cluster_public_key: str = "", - *, - user_security_file: str = "", - cluster_public_key_file: str = "", - timeout: datetime.timedelta = ..., - do_version_check: bool = False, - enable_encryption: bool = False, - compression_mode: Options.Compression = ..., - client_max_parallelism: int = 0, - ) -> None: ... - def blob(self, arg0: str) -> ...: ... - def close(self) -> None: ... - def compact_abort(self) -> None: ... - def compact_full(self) -> None: ... - def compact_progress(self) -> int: ... - def double(self, arg0: str) -> ...: ... - def endpoints(self) -> list[str]: ... - def find(self, arg0: str) -> ...: ... - def get_memory_info(self) -> str: ... - def inserter(self, arg0: list[...]) -> ...: ... - def integer(self, arg0: str) -> ...: ... - def is_open(self) -> bool: ... - def node(self, arg0: str) -> ...: ... - def node_config(self, arg0: str) -> typing.Any: ... - def node_status(self, arg0: str) -> typing.Any: ... - def node_topology(self, arg0: str) -> typing.Any: ... - def options(self) -> Options: ... - def perf(self) -> ...: ... - def pinned_writer(self) -> ...: ... - def prefix_count(self, arg0: str) -> int: ... - def prefix_get(self, arg0: str, arg1: int) -> list[str]: ... - def properties(self) -> ...: ... - def purge_all(self, arg0: datetime.timedelta) -> None: ... - def purge_cache(self, arg0: datetime.timedelta) -> None: ... - def query(self, query: str, blobs: typing.Any = False) -> typing.Any: ... - def query_continuous_full( - self, query: str, pace: datetime.timedelta, blobs: typing.Any = False - ) -> ...: ... - def query_continuous_new_values( - self, query: str, pace: datetime.timedelta, blobs: typing.Any = False - ) -> ...: ... - def query_numpy(self, query: str) -> typing.Any: ... - def reader( - self, - table_names: list[str], - *, - column_names: list[str] = [], - batch_size: int = 0, - ranges: list[tuple] = [], - ) -> ...: ... - def string(self, arg0: str) -> ...: ... - def suffix_count(self, arg0: str) -> int: ... - def suffix_get(self, arg0: str, arg1: int) -> list[str]: ... - def table(self, arg0: str) -> ...: ... - def tag(self, arg0: str) -> ...: ... - def tidy_memory(self) -> None: ... - def timestamp(self, arg0: str) -> ...: ... - def trim_all(self, arg0: datetime.timedelta, arg1: datetime.timedelta) -> None: ... - def ts(self, arg0: str) -> ...: ... - def ts_batch(self, arg0: list[...]) -> ...: ... - def uri(self) -> str: ... - def wait_for_compaction(self) -> None: ... - def writer(self) -> ...: ... - -class ColumnInfo: - name: str - symtable: str - type: ColumnType - @typing.overload - def __init__(self, arg0: ColumnType, arg1: str) -> None: ... - @typing.overload - def __init__(self, arg0: ColumnType, arg1: str, arg2: str) -> None: ... - def __repr__(self) -> str: ... - -class ColumnType: - """ - Column type - - Members: - - Uninitialized - - Double - - Blob - - String - - Symbol - - Int64 - - Timestamp - """ - - Blob: typing.ClassVar[ColumnType] # value = - Double: typing.ClassVar[ColumnType] # value = - Int64: typing.ClassVar[ColumnType] # value = - String: typing.ClassVar[ColumnType] # value = - Symbol: typing.ClassVar[ColumnType] # value = - Timestamp: typing.ClassVar[ColumnType] # value = - Uninitialized: typing.ClassVar[ColumnType] # value = - __members__: typing.ClassVar[ - dict[str, ColumnType] - ] # value = {'Uninitialized': , 'Double': , 'Blob': , 'String': , 'Symbol': , 'Int64': , 'Timestamp': } - def __and__(self, other: typing.Any) -> typing.Any: ... - def __eq__(self, other: typing.Any) -> bool: ... - def __ge__(self, other: typing.Any) -> bool: ... - def __getstate__(self) -> int: ... - def __gt__(self, other: typing.Any) -> bool: ... - def __hash__(self) -> int: ... - def __index__(self) -> int: ... - def __init__(self, value: int) -> None: ... - def __int__(self) -> int: ... - def __invert__(self) -> typing.Any: ... - def __le__(self, other: typing.Any) -> bool: ... - def __lt__(self, other: typing.Any) -> bool: ... - def __ne__(self, other: typing.Any) -> bool: ... - def __or__(self, other: typing.Any) -> typing.Any: ... - def __rand__(self, other: typing.Any) -> typing.Any: ... - def __repr__(self) -> str: ... - def __ror__(self, other: typing.Any) -> typing.Any: ... - def __rxor__(self, other: typing.Any) -> typing.Any: ... - def __setstate__(self, state: int) -> None: ... - def __str__(self) -> str: ... - def __xor__(self, other: typing.Any) -> typing.Any: ... - @property - def name(self) -> str: ... - @property - def value(self) -> int: ... - -class DirectBlob: - def __init__(self, arg0: ..., arg1: ..., arg2: str) -> None: ... - def get(self) -> bytes: ... - def put(self, data: str) -> None: ... - def remove(self) -> None: ... - def update(self, data: str) -> None: ... - -class DirectInteger: - def __init__(self, arg0: ..., arg1: ..., arg2: str) -> None: ... - def get(self) -> int: ... - def put(self, integer: int) -> None: ... - def remove(self) -> None: ... - def update(self, integer: int) -> None: ... - -class Double(ExpirableEntry): - def __init__(self, arg0: ..., arg1: str) -> None: ... - def add(self, addend: float) -> float: ... - def get(self) -> float: ... - def put(self, double: float) -> None: ... - def update(self, double: float) -> None: ... - -class Entry: - class Metadata: - expiry_time: qdb_timespec_t - modification_time: qdb_timespec_t - size: int - type: Entry.Type - def __init__(self) -> None: ... - - class Type: - """ - Entry type - - Members: - - Uninitialized - - Integer - - HashSet - - Tag - - Deque - - Stream - - Timeseries - """ - - Deque: typing.ClassVar[Entry.Type] # value = - HashSet: typing.ClassVar[Entry.Type] # value = - Integer: typing.ClassVar[Entry.Type] # value = - Stream: typing.ClassVar[Entry.Type] # value = - Tag: typing.ClassVar[Entry.Type] # value = - Timeseries: typing.ClassVar[Entry.Type] # value = - Uninitialized: typing.ClassVar[Entry.Type] # value = - __members__: typing.ClassVar[ - dict[str, Entry.Type] - ] # value = {'Uninitialized': , 'Integer': , 'HashSet': , 'Tag': , 'Deque': , 'Stream': , 'Timeseries': } - def __and__(self, other: typing.Any) -> typing.Any: ... - def __eq__(self, other: typing.Any) -> bool: ... - def __ge__(self, other: typing.Any) -> bool: ... - def __getstate__(self) -> int: ... - def __gt__(self, other: typing.Any) -> bool: ... - def __hash__(self) -> int: ... - def __index__(self) -> int: ... - def __init__(self, value: int) -> None: ... - def __int__(self) -> int: ... - def __invert__(self) -> typing.Any: ... - def __le__(self, other: typing.Any) -> bool: ... - def __lt__(self, other: typing.Any) -> bool: ... - def __ne__(self, other: typing.Any) -> bool: ... - def __or__(self, other: typing.Any) -> typing.Any: ... - def __rand__(self, other: typing.Any) -> typing.Any: ... - def __repr__(self) -> str: ... - def __ror__(self, other: typing.Any) -> typing.Any: ... - def __rxor__(self, other: typing.Any) -> typing.Any: ... - def __setstate__(self, state: int) -> None: ... - def __str__(self) -> str: ... - def __xor__(self, other: typing.Any) -> typing.Any: ... - @property - def name(self) -> str: ... - @property - def value(self) -> int: ... - - def __init__(self, arg0: ..., arg1: str) -> None: ... - def attach_tag(self, arg0: str) -> bool: ... - def attach_tags(self, arg0: list[str]) -> None: ... - def detach_tag(self, arg0: str) -> bool: ... - def detach_tags(self, arg0: list[str]) -> None: ... - def exists(self) -> bool: - """ - Returns true if the entry exists - """ - - def get_entry_type(self) -> Entry.Type: ... - def get_location(self) -> tuple[str, int]: ... - def get_metadata(self) -> ...: ... - def get_name(self) -> str: ... - def get_tags(self) -> list[str]: ... - def has_tag(self, arg0: str) -> bool: ... - def remove(self) -> None: ... - -class Error(RuntimeError): - pass - -class ExpirableEntry(Entry): - def __init__(self, arg0: ..., arg1: str) -> None: ... - def expires_at(self, arg0: ...) -> None: ... - def expires_from_now(self, arg0: datetime.timedelta) -> None: ... - def get_expiry_time(self) -> ...: ... - -class FindQuery: - def __init__(self, arg0: ..., arg1: str) -> None: ... - def run(self) -> list[str]: ... - -class IncompatibleTypeError(Error): - pass - -class IndexedColumnInfo: - @typing.overload - def __init__(self, arg0: ColumnType, arg1: int) -> None: ... - @typing.overload - def __init__(self, arg0: ColumnType, arg1: int, arg2: str) -> None: ... - @property - def index(self) -> int: ... - @property - def symtable(self) -> str: ... - @property - def type(self) -> ColumnType: ... - -class InputBufferTooSmallError(Error): - pass - -class Integer(ExpirableEntry): - def __init__(self, arg0: ..., arg1: str) -> None: ... - def add(self, addend: int) -> int: ... - def get(self) -> int: ... - def put(self, integer: int) -> None: ... - def update(self, integer: int) -> None: ... - -class InternalLocalError(Error): - pass - -class InvalidArgumentError(Error): - pass - -class InvalidDatetimeError(Error): - pass - -class InvalidHandleError(Error): - pass - -class InvalidQueryError(Error): - pass - -class MaskedArray: - pass - -class Node: - def __init__( - self, - uri: str, - user_name: str = "", - user_private_key: str = "", - cluster_public_key: str = "", - *, - user_security_file: str = "", - cluster_public_key_file: str = "", - enable_encryption: bool = False, - ) -> None: ... - def blob(self, arg0: str) -> ...: ... - def integer(self, arg0: str) -> ...: ... - def prefix_get(self, arg0: str, arg1: int) -> list[str]: ... - -class NotImplementedError(Error): - pass - -class Options: - class Compression: - """ - Compression mode - - Members: - - Disabled - - Best - - Balanced - """ - - Balanced: typing.ClassVar[ - Options.Compression - ] # value = - Best: typing.ClassVar[Options.Compression] # value = - Disabled: typing.ClassVar[ - Options.Compression - ] # value = - __members__: typing.ClassVar[ - dict[str, Options.Compression] - ] # value = {'Disabled': , 'Best': , 'Balanced': } - def __and__(self, other: typing.Any) -> typing.Any: ... - def __eq__(self, other: typing.Any) -> bool: ... - def __ge__(self, other: typing.Any) -> bool: ... - def __getstate__(self) -> int: ... - def __gt__(self, other: typing.Any) -> bool: ... - def __hash__(self) -> int: ... - def __index__(self) -> int: ... - def __init__(self, value: int) -> None: ... - def __int__(self) -> int: ... - def __invert__(self) -> typing.Any: ... - def __le__(self, other: typing.Any) -> bool: ... - def __lt__(self, other: typing.Any) -> bool: ... - def __ne__(self, other: typing.Any) -> bool: ... - def __or__(self, other: typing.Any) -> typing.Any: ... - def __rand__(self, other: typing.Any) -> typing.Any: ... - def __repr__(self) -> str: ... - def __ror__(self, other: typing.Any) -> typing.Any: ... - def __rxor__(self, other: typing.Any) -> typing.Any: ... - def __setstate__(self, state: int) -> None: ... - def __str__(self) -> str: ... - def __xor__(self, other: typing.Any) -> typing.Any: ... - @property - def name(self) -> str: ... - @property - def value(self) -> int: ... - - class Encryption: - """ - Encryption type - - Members: - - Disabled - - AES256GCM - """ - - AES256GCM: typing.ClassVar[ - Options.Encryption - ] # value = - Disabled: typing.ClassVar[ - Options.Encryption - ] # value = - __members__: typing.ClassVar[ - dict[str, Options.Encryption] - ] # value = {'Disabled': , 'AES256GCM': } - def __and__(self, other: typing.Any) -> typing.Any: ... - def __eq__(self, other: typing.Any) -> bool: ... - def __ge__(self, other: typing.Any) -> bool: ... - def __getstate__(self) -> int: ... - def __gt__(self, other: typing.Any) -> bool: ... - def __hash__(self) -> int: ... - def __index__(self) -> int: ... - def __init__(self, value: int) -> None: ... - def __int__(self) -> int: ... - def __invert__(self) -> typing.Any: ... - def __le__(self, other: typing.Any) -> bool: ... - def __lt__(self, other: typing.Any) -> bool: ... - def __ne__(self, other: typing.Any) -> bool: ... - def __or__(self, other: typing.Any) -> typing.Any: ... - def __rand__(self, other: typing.Any) -> typing.Any: ... - def __repr__(self) -> str: ... - def __ror__(self, other: typing.Any) -> typing.Any: ... - def __rxor__(self, other: typing.Any) -> typing.Any: ... - def __setstate__(self, state: int) -> None: ... - def __str__(self) -> str: ... - def __xor__(self, other: typing.Any) -> typing.Any: ... - @property - def name(self) -> str: ... - @property - def value(self) -> int: ... - - def __init__(self, arg0: ...) -> None: ... - def disable_user_properties(self) -> None: ... - def enable_user_properties(self) -> None: ... - def get_client_max_batch_load(self) -> int: - """ - Get the number of shards per thread used for the batch writer. - """ - - def get_client_max_in_buf_size(self) -> int: ... - def get_client_max_parallelism(self) -> int: ... - def get_cluster_max_in_buf_size(self) -> int: ... - def get_connection_per_address_soft_limit(self) -> int: - """ - Get the maximum number of connections per qdbd node - """ - - def get_query_max_length(self) -> int: ... - def get_stabilization_max_wait(self) -> datetime.timedelta: ... - def get_timeout(self) -> datetime.timedelta: ... - def get_timezone(self) -> str: ... - def set_client_max_batch_load(self, arg0: int) -> None: - """ - Adjust the number of shards per thread used for the batch writer. - """ - - def set_client_max_in_buf_size(self, arg0: int) -> None: ... - def set_client_soft_memory_limit(self, limit: int) -> None: ... - def set_cluster_public_key(self, arg0: str) -> None: ... - def set_connection_per_address_soft_limit(self, arg0: int) -> None: - """ - Adjust the maximum number of connections per qdbd node - """ - - def set_encryption(self, arg0: Options.Encryption) -> None: ... - def set_max_cardinality(self, arg0: int) -> None: ... - def set_query_max_length(self, query_max_length: int) -> None: ... - def set_stabilization_max_wait(self, arg0: datetime.timedelta) -> None: ... - def set_timeout(self, arg0: datetime.timedelta) -> None: ... - def set_timezone(self, arg0: str) -> None: ... - def set_user_credentials(self, arg0: str, arg1: str) -> None: ... - -class OutOfBoundsError(Error): - pass - -class Perf: - def __init__(self, arg0: ...) -> None: ... - def clear(self) -> None: ... - def disable(self) -> None: ... - def enable(self) -> None: ... - def get(self, flame: bool = False, outfile: str = "") -> typing.Any: ... - -class Properties: - def __init__(self, arg0: ...) -> None: ... - def clear(self) -> None: ... - def get(self, arg0: str) -> str | None: ... - def put(self, arg0: str, arg1: str) -> None: ... - def remove(self, arg0: str) -> None: ... - -class QueryContinuous: - def __init__(self, arg0: ..., arg1: typing.Any) -> None: ... - def __iter__(self) -> QueryContinuous: ... - def __next__(self) -> list[dict[str, typing.Any]]: ... - def probe_results(self) -> list[dict[str, typing.Any]]: ... - def results(self) -> list[dict[str, typing.Any]]: ... - def run( - self, - arg0: qdb_query_continuous_mode_type_t, - arg1: datetime.timedelta, - arg2: str, - ) -> None: ... - def stop(self) -> None: ... - -class Reader: - def __enter__(self) -> Reader: ... - def __exit__( - self, arg0: typing.Any, arg1: typing.Any, arg2: typing.Any - ) -> None: ... - def __init__( - self, - conn: ..., - table_names: list[str], - *, - column_names: list[str] = [], - batch_size: int = 65536, - ranges: list[tuple] = [], - ) -> None: ... - def __iter__(self) -> typing.Iterator[dict]: ... - def get_batch_size(self) -> int: ... - -class RetryOptions: - delay: datetime.timedelta - exponent: int - jitter: float - retries_left: int - def __init__( - self, - retries: int = 3, - *, - delay: datetime.timedelta = ..., - exponent: int = 2, - jitter: float = 0.1, - ) -> None: ... - def has_next(self) -> bool: ... - def next(self) -> RetryOptions: ... - -class String(ExpirableEntry): - def __init__(self, arg0: ..., arg1: str) -> None: ... - def compare_and_swap(self, new_content: str, comparand: str) -> str: ... - def get(self) -> str: ... - def get_and_remove(self) -> str: ... - def get_and_update(self, data: str) -> str: ... - def put(self, data: str) -> None: ... - def remove_if(self, comparand: str) -> None: ... - def update(self, data: str, expiry: datetime.datetime = ...) -> None: ... - -class Table(Entry): - """ - Table representation - """ - - def __init__(self, arg0: ..., arg1: str) -> None: ... - def __repr__(self) -> str: ... - def blob_get_ranges( - self, column: str, ranges: typing.Any = None - ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... - def blob_insert( - self, arg0: str, arg1: numpy.ndarray, arg2: numpy.ma.MaskedArray - ) -> None: ... - def column_id_by_index(self, arg0: int) -> str: ... - def column_index_by_id(self, arg0: str) -> int: ... - def column_info_by_index(self, arg0: int) -> ...: ... - def column_type_by_id(self, arg0: str) -> ColumnType: ... - def column_type_by_index(self, arg0: int) -> ColumnType: ... - def create( - self, - columns: list[...], - shard_size: datetime.timedelta = ..., - ttl: datetime.timedelta = ..., - ) -> None: ... - def double_get_ranges( - self, column: str, ranges: typing.Any = None - ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... - def double_insert( - self, arg0: str, arg1: numpy.ndarray, arg2: numpy.ma.MaskedArray - ) -> None: ... - def erase_ranges(self, arg0: str, arg1: typing.Any) -> int: ... - def get_name(self: Entry) -> str: ... - def get_shard_size(self) -> datetime.timedelta: ... - def get_ttl(self) -> datetime.timedelta: ... - def has_ttl(self) -> bool: ... - def insert_columns(self, arg0: list[...]) -> None: ... - def int64_get_ranges( - self, column: str, ranges: typing.Any = None - ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... - def int64_insert( - self, arg0: str, arg1: numpy.ndarray, arg2: numpy.ma.MaskedArray - ) -> None: ... - def list_columns(self) -> list[...]: ... - def reader( - self, - *, - column_names: list[str] = [], - batch_size: int = 0, - ranges: list[tuple] = [], - ) -> ...: ... - def retrieve_metadata(self) -> None: ... - def string_get_ranges( - self, column: str, ranges: typing.Any = None - ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... - def string_insert( - self, arg0: str, arg1: numpy.ndarray, arg2: numpy.ma.MaskedArray - ) -> None: ... - def subscribe(self, arg0: typing.Any) -> typing.Any: ... - def timestamp_get_ranges( - self, column: str, ranges: typing.Any = None - ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... - def timestamp_insert( - self, arg0: str, arg1: numpy.ndarray, arg2: numpy.ma.MaskedArray - ) -> None: ... - -class Tag(Entry): - def __init__(self, arg0: ..., arg1: str) -> None: ... - def count(self) -> int: ... - def get_entries(self) -> list[str]: ... - -class Timestamp(ExpirableEntry): - def __init__(self, arg0: ..., arg1: str) -> None: ... - def add(self, addend: ...) -> ...: ... - def get(self) -> ...: ... - def put(self, timestamp: ...) -> None: ... - def update(self, timestamp: ...) -> None: ... - -class TryAgainError(Error): - pass - -class UninitializedError(Error): - pass - -class WriterData: - def __init__(self) -> None: ... - def append(self, table: Table, index: typing.Any, column_data: list) -> None: - """ - Append new data - """ - - def empty(self) -> bool: - """ - Returns true if underlying data is empty - """ - -class WriterPushMode: - """ - Push Mode - - Members: - - Transactional - - Fast - - Truncate - - Async - """ - - Async: typing.ClassVar[WriterPushMode] # value = - Fast: typing.ClassVar[WriterPushMode] # value = - Transactional: typing.ClassVar[ - WriterPushMode - ] # value = - Truncate: typing.ClassVar[WriterPushMode] # value = - __members__: typing.ClassVar[ - dict[str, WriterPushMode] - ] # value = {'Transactional': , 'Fast': , 'Truncate': , 'Async': } - def __and__(self, other: typing.Any) -> typing.Any: ... - def __eq__(self, other: typing.Any) -> bool: ... - def __ge__(self, other: typing.Any) -> bool: ... - def __getstate__(self) -> int: ... - def __gt__(self, other: typing.Any) -> bool: ... - def __hash__(self) -> int: ... - def __index__(self) -> int: ... - def __init__(self, value: int) -> None: ... - def __int__(self) -> int: ... - def __invert__(self) -> typing.Any: ... - def __le__(self, other: typing.Any) -> bool: ... - def __lt__(self, other: typing.Any) -> bool: ... - def __ne__(self, other: typing.Any) -> bool: ... - def __or__(self, other: typing.Any) -> typing.Any: ... - def __rand__(self, other: typing.Any) -> typing.Any: ... - def __repr__(self) -> str: ... - def __ror__(self, other: typing.Any) -> typing.Any: ... - def __rxor__(self, other: typing.Any) -> typing.Any: ... - def __setstate__(self, state: int) -> None: ... - def __str__(self) -> str: ... - def __xor__(self, other: typing.Any) -> typing.Any: ... - @property - def name(self) -> str: ... - @property - def value(self) -> int: ... - -def build() -> str: - """ - Return build number - """ - -def dict_query( - arg0: ..., arg1: str, arg2: typing.Any -) -> list[dict[str, typing.Any]]: ... -def version() -> str: - """ - Return version number - """ - -Writer = None -never_expires: datetime.datetime # value = datetime.datetime(1969, 12, 31, 19, 0) diff --git a/quasardb/batch_column.hpp b/quasardb/batch_column.hpp index 885c3cfc..011543f3 100644 --- a/quasardb/batch_column.hpp +++ b/quasardb/batch_column.hpp @@ -67,14 +67,14 @@ static inline void register_batch_column(Module & m) { namespace py = pybind11; - py::class_{m, "BatchColumnInfo"} // - .def(py::init(), // - py::arg("ts_name"), // - py::arg("col_name"), // - py::arg("size_hint") = 0) // - .def_readwrite("timeseries", &qdb::batch_column_info::timeseries) // - .def_readwrite("column", &qdb::batch_column_info::column) // - .def_readwrite("elements_count_hint", &qdb::batch_column_info::elements_count_hint); // + py::class_{m, "BatchColumnInfo"} + .def(py::init(), + py::arg("ts_name"), + py::arg("col_name"), + py::arg("size_hint") = 0) + .def_readwrite("timeseries", &qdb::batch_column_info::timeseries) + .def_readwrite("column", &qdb::batch_column_info::column) + .def_readwrite("elements_count_hint", &qdb::batch_column_info::elements_count_hint); } } // namespace qdb diff --git a/quasardb/blob.hpp b/quasardb/blob.hpp index 7a00adeb..193f3354 100644 --- a/quasardb/blob.hpp +++ b/quasardb/blob.hpp @@ -133,18 +133,21 @@ static inline void register_blob(Module & m) { namespace py = pybind11; - py::class_(m, "Blob") // - .def(py::init()) // - .def("get", &qdb::blob_entry::get) // - .def("put", &qdb::blob_entry::put, py::arg("data")) // + py::class_(m, "Blob") + .def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{"conn.blob(...)"}; + return nullptr; + })) + .def("get", &qdb::blob_entry::get) + .def("put", &qdb::blob_entry::put, py::arg("data")) .def("update", &qdb::blob_entry::update, py::arg("data"), - py::arg("expiry") = std::chrono::system_clock::time_point{}) // - .def("remove_if", &qdb::blob_entry::remove_if, py::arg("comparand")) // - .def("get_and_remove", &qdb::blob_entry::get_and_remove) // - .def("get_and_update", &qdb::blob_entry::get_and_update, // - py::arg("data")) // - .def("compare_and_swap", &qdb::blob_entry::compare_and_swap, // - py::arg("new_content"), py::arg("comparand")); // + py::arg("expiry") = std::chrono::system_clock::time_point{}) + .def("remove_if", &qdb::blob_entry::remove_if, py::arg("comparand")) + .def("get_and_remove", &qdb::blob_entry::get_and_remove) + .def("get_and_update", &qdb::blob_entry::get_and_update, + py::arg("data")) + .def("compare_and_swap", &qdb::blob_entry::compare_and_swap, + py::arg("new_content"), py::arg("comparand")); } } // namespace qdb diff --git a/quasardb/cluster.cpp b/quasardb/cluster.cpp index a2f24987..44a0e58a 100644 --- a/quasardb/cluster.cpp +++ b/quasardb/cluster.cpp @@ -116,82 +116,82 @@ void register_cluster(py::module_ & m) namespace py = pybind11; py::class_(m, "Cluster", - "Represents a connection to the QuasarDB cluster. ") // + "Represents a connection to the QuasarDB cluster.") .def(py::init(), // - py::arg("uri"), // - py::arg("user_name") = std::string{}, // - py::arg("user_private_key") = std::string{}, // - py::arg("cluster_public_key") = std::string{}, // - py::kw_only(), // - py::arg("user_security_file") = std::string{}, // - py::arg("cluster_public_key_file") = std::string{}, // - py::arg("timeout") = std::chrono::minutes{1}, // - py::arg("do_version_check") = false, // - py::arg("enable_encryption") = false, // - py::arg("compression_mode") = qdb_comp_balanced, // - py::arg("client_max_parallelism") = std::size_t{0} // - ) // - .def("__enter__", &qdb::cluster::enter) // - .def("__exit__", &qdb::cluster::exit) // - .def("tidy_memory", &qdb::cluster::tidy_memory) // - .def("get_memory_info", &qdb::cluster::get_memory_info) // - .def("is_open", &qdb::cluster::is_open) // - .def("uri", &qdb::cluster::uri) // - .def("node", &qdb::cluster::node) // - .def("options", &qdb::cluster::options) // - .def("properties", &qdb::cluster::properties) // - .def("perf", &qdb::cluster::perf) // - .def("node_status", &qdb::cluster::node_status) // - .def("node_config", &qdb::cluster::node_config) // - .def("node_topology", &qdb::cluster::node_topology) // - .def("tag", &qdb::cluster::tag) // - .def("blob", &qdb::cluster::blob) // - .def("string", &qdb::cluster::string) // - .def("integer", &qdb::cluster::integer) // - .def("double", &qdb::cluster::double_) // - .def("timestamp", &qdb::cluster::timestamp) // - .def("ts", &qdb::cluster::table) // - .def("table", &qdb::cluster::table) // - .def("ts_batch", &qdb::cluster::inserter) // - .def("inserter", &qdb::cluster::inserter) // - .def("reader", &qdb::cluster::reader, // - py::arg("table_names"), // - py::kw_only(), // - py::arg("column_names") = std::vector{}, // - py::arg("batch_size") = std::size_t{0}, // - py::arg("ranges") = std::vector{} // - ) // - .def("pinned_writer", &qdb::cluster::pinned_writer) // - .def("writer", &qdb::cluster::writer) // - .def("find", &qdb::cluster::find) // - .def("query", &qdb::cluster::query, // - py::arg("query"), // - py::arg("blobs") = false) // - .def("query_numpy", &qdb::cluster::query_numpy, // - py::arg("query")) // - .def("query_continuous_full", &qdb::cluster::query_continuous_full, // - py::arg("query"), // - py::arg("pace"), // - py::arg("blobs") = false) // - .def("query_continuous_new_values", &qdb::cluster::query_continuous_new_values, // - py::arg("query"), // - py::arg("pace"), // - py::arg("blobs") = false) // - .def("prefix_get", &qdb::cluster::prefix_get) // - .def("prefix_count", &qdb::cluster::prefix_count) // - .def("suffix_get", &qdb::cluster::suffix_get) // - .def("suffix_count", &qdb::cluster::suffix_count) // - .def("close", &qdb::cluster::close) // - .def("purge_all", &qdb::cluster::purge_all) // - .def("trim_all", &qdb::cluster::trim_all) // - .def("purge_cache", &qdb::cluster::purge_cache) // - .def("compact_full", &qdb::cluster::compact_full) // - .def("compact_progress", &qdb::cluster::compact_progress) // - .def("compact_abort", &qdb::cluster::compact_abort) // - .def("wait_for_compaction", &qdb::cluster::wait_for_compaction) // - .def("endpoints", &qdb::cluster::endpoints); // + std::chrono::milliseconds, bool, bool, qdb_compression_t, std::size_t>(), + py::arg("uri"), + py::arg("user_name") = std::string{}, + py::arg("user_private_key") = std::string{}, + py::arg("cluster_public_key") = std::string{}, + py::kw_only(), + py::arg("user_security_file") = std::string{}, + py::arg("cluster_public_key_file") = std::string{}, + py::arg("timeout") = std::chrono::minutes{1}, + py::arg("do_version_check") = false, + py::arg("enable_encryption") = false, + py::arg("compression_mode") = qdb_comp_balanced, + py::arg("client_max_parallelism") = std::size_t{0} + ) + .def("__enter__", &qdb::cluster::enter) + .def("__exit__", &qdb::cluster::exit) + .def("tidy_memory", &qdb::cluster::tidy_memory) + .def("get_memory_info", &qdb::cluster::get_memory_info) + .def("is_open", &qdb::cluster::is_open) + .def("uri", &qdb::cluster::uri) + .def("node", &qdb::cluster::node) + .def("options", &qdb::cluster::options) + .def("properties", &qdb::cluster::properties) + .def("perf", &qdb::cluster::perf) + .def("node_status", &qdb::cluster::node_status) + .def("node_config", &qdb::cluster::node_config) + .def("node_topology", &qdb::cluster::node_topology) + .def("tag", &qdb::cluster::tag) + .def("blob", &qdb::cluster::blob) + .def("string", &qdb::cluster::string) + .def("integer", &qdb::cluster::integer) + .def("double", &qdb::cluster::double_) + .def("timestamp", &qdb::cluster::timestamp) + .def("ts", &qdb::cluster::table) + .def("table", &qdb::cluster::table) + .def("ts_batch", &qdb::cluster::inserter) + .def("inserter", &qdb::cluster::inserter) + .def("reader", &qdb::cluster::reader, + py::arg("table_names"), + py::kw_only(), + py::arg("column_names") = std::vector{}, + py::arg("batch_size") = std::size_t{0}, + py::arg("ranges") = std::vector{} + ) + .def("pinned_writer", &qdb::cluster::pinned_writer) + .def("writer", &qdb::cluster::writer) + .def("find", &qdb::cluster::find) + .def("query", &qdb::cluster::query, + py::arg("query"), + py::arg("blobs") = false) + .def("query_numpy", &qdb::cluster::query_numpy, + py::arg("query")) + .def("query_continuous_full", &qdb::cluster::query_continuous_full, + py::arg("query"), + py::arg("pace"), + py::arg("blobs") = false) + .def("query_continuous_new_values", &qdb::cluster::query_continuous_new_values, + py::arg("query"), + py::arg("pace"), + py::arg("blobs") = false) + .def("prefix_get", &qdb::cluster::prefix_get) + .def("prefix_count", &qdb::cluster::prefix_count) + .def("suffix_get", &qdb::cluster::suffix_get) + .def("suffix_count", &qdb::cluster::suffix_count) + .def("close", &qdb::cluster::close) + .def("purge_all", &qdb::cluster::purge_all) + .def("trim_all", &qdb::cluster::trim_all) + .def("purge_cache", &qdb::cluster::purge_cache) + .def("compact_full", &qdb::cluster::compact_full) + .def("compact_progress", &qdb::cluster::compact_progress) + .def("compact_abort", &qdb::cluster::compact_abort) + .def("wait_for_compaction", &qdb::cluster::wait_for_compaction) + .def("endpoints", &qdb::cluster::endpoints); } }; // namespace qdb diff --git a/quasardb/cluster.hpp b/quasardb/cluster.hpp index b6867e88..303912bb 100644 --- a/quasardb/cluster.hpp +++ b/quasardb/cluster.hpp @@ -332,11 +332,12 @@ class cluster } public: - qdb::find_query find(const std::string & query_string) + std::vector find(const std::string & query_string) { check_open(); - return qdb::find_query{_handle, query_string}; + auto o = std::make_shared(_handle, query_string); + return o->run(); } py::object query(const std::string & query_string, const py::object & blobs) diff --git a/quasardb/continuous.hpp b/quasardb/continuous.hpp index 150b9109..ce755fd5 100644 --- a/quasardb/continuous.hpp +++ b/quasardb/continuous.hpp @@ -93,17 +93,19 @@ static inline void register_continuous(Module & m) { namespace py = pybind11; - py::class_>{m, "QueryContinuous"} // - .def(py::init()) // - .def("run", &qdb::query_continuous::run) // - .def("results", &qdb::query_continuous::results) // - .def("probe_results", &qdb::query_continuous::probe_results) // - .def("stop", &qdb::query_continuous::stop) // + py::class_>{m, "QueryContinuous"} + .def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{"conn.query_continuous_full(...)"}; + return nullptr; + })) + .def("run", &qdb::query_continuous::run) + .def("results", &qdb::query_continuous::results) + .def("probe_results", &qdb::query_continuous::probe_results) + .def("stop", &qdb::query_continuous::stop) // required interface to use query_continuous as an iterator - .def("__iter__", [](const std::shared_ptr & cont) { return cont; }) // - .def("__next__", &qdb::query_continuous::results); // + .def("__iter__", [](const std::shared_ptr & cont) { return cont; }) + .def("__next__", &qdb::query_continuous::results); } } // namespace qdb diff --git a/quasardb/detail/retry.cpp b/quasardb/detail/retry.cpp index 29a92ede..7969dcb4 100644 --- a/quasardb/detail/retry.cpp +++ b/quasardb/detail/retry.cpp @@ -8,22 +8,22 @@ void register_retry_options(py::module_ & m) { namespace py = pybind11; - py::class_{m, "RetryOptions"} // - .def(py::init(), // - py::arg("retries") = std::size_t{3}, // - py::kw_only(), // - py::arg("delay") = std::chrono::milliseconds{3000}, // - py::arg("exponent") = std::size_t{2}, // - py::arg("jitter") = double{0.1} // - ) // - // - .def_readwrite("retries_left", &retry_options::retries_left) // - .def_readwrite("delay", &retry_options::delay) // - .def_readwrite("exponent", &retry_options::exponent) // - .def_readwrite("jitter", &retry_options::jitter) // - // - .def("has_next", &retry_options::has_next) // - .def("next", &retry_options::next) // + py::class_{m, "RetryOptions"} + .def(py::init(), + py::arg("retries") = std::size_t{3}, + py::kw_only(), + py::arg("delay") = std::chrono::milliseconds{3000}, + py::arg("exponent") = std::size_t{2}, + py::arg("jitter") = double{0.1} + ) + + .def_readwrite("retries_left", &retry_options::retries_left) + .def_readwrite("delay", &retry_options::delay) + .def_readwrite("exponent", &retry_options::exponent) + .def_readwrite("jitter", &retry_options::jitter) + + .def("has_next", &retry_options::has_next) + .def("next", &retry_options::next) ; } diff --git a/quasardb/detail/ts_column.hpp b/quasardb/detail/ts_column.hpp index 30fa6d9f..a3fd9849 100644 --- a/quasardb/detail/ts_column.hpp +++ b/quasardb/detail/ts_column.hpp @@ -203,20 +203,20 @@ static inline void register_ts_column(Module & m) { namespace py = pybind11; - py::class_{m, "ColumnInfo"} // - .def(py::init()) // - .def(py::init()) // + py::class_{m, "ColumnInfo"} + .def(py::init()) + .def(py::init()) .def("__repr__", &column_info::repr) - .def_readwrite("type", &column_info::type) // - .def_readwrite("name", &column_info::name) // - .def_readwrite("symtable", &column_info::symtable); // - - py::class_{m, "IndexedColumnInfo"} // - .def(py::init()) // - .def(py::init()) // - .def_readonly("type", &indexed_column_info::type) // - .def_readonly("index", &indexed_column_info::index) // - .def_readonly("symtable", &indexed_column_info::symtable); // + .def_readwrite("type", &column_info::type) + .def_readwrite("name", &column_info::name) + .def_readwrite("symtable", &column_info::symtable); + + py::class_{m, "IndexedColumnInfo"} + .def(py::init()) + .def(py::init()) + .def_readonly("type", &indexed_column_info::type) + .def_readonly("index", &indexed_column_info::index) + .def_readonly("symtable", &indexed_column_info::symtable); } } // namespace detail diff --git a/quasardb/double.hpp b/quasardb/double.hpp index 69954b68..8d259e85 100644 --- a/quasardb/double.hpp +++ b/quasardb/double.hpp @@ -75,13 +75,16 @@ static inline void register_double(Module & m) { namespace py = pybind11; - py::class_(m, "Double") // - .def(py::init()) // - .def("get", &qdb::double_entry::get) // - .def("put", &qdb::double_entry::put, py::arg("double")) // - .def("update", &qdb::double_entry::update, py::arg("double")) // - .def("add", &qdb::double_entry::add, py::arg("addend")) // - ; // + py::class_(m, "Double") + .def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{"conn.double(...)"}; + return nullptr; + })) + .def("get", &qdb::double_entry::get) + .def("put", &qdb::double_entry::put, py::arg("double")) + .def("update", &qdb::double_entry::update, py::arg("double")) + .def("add", &qdb::double_entry::add, py::arg("addend")) + ; } } // namespace qdb diff --git a/quasardb/entry.hpp b/quasardb/entry.hpp index 4c8aa7f8..fdfbbab1 100644 --- a/quasardb/entry.hpp +++ b/quasardb/entry.hpp @@ -228,45 +228,52 @@ static inline void register_entry(Module & m) py::class_ e{m, "Entry"}; - py::enum_{e, "Type", py::arithmetic(), "Entry type"} // - .value("Uninitialized", qdb_entry_uninitialized) // - .value("Integer", qdb_entry_integer) // - .value("HashSet", qdb_entry_hset) // - .value("Tag", qdb_entry_tag) // - .value("Deque", qdb_entry_deque) // - .value("Stream", qdb_entry_stream) // - .value("Timeseries", qdb_entry_ts) // + py::enum_{e, "Type", py::arithmetic(), "Entry type"} + .value("Uninitialized", qdb_entry_uninitialized) + .value("Blob", qdb_entry_blob) + .value("Integer", qdb_entry_integer) + .value("HashSet", qdb_entry_hset) + .value("Tag", qdb_entry_tag) + .value("Deque", qdb_entry_deque) + .value("Stream", qdb_entry_stream) + .value("Timeseries", qdb_entry_ts) ; - e.def(py::init()) // - .def("attach_tag", &qdb::entry::attach_tag) // - .def("attach_tags", &qdb::entry::attach_tags) // - .def("detach_tag", &qdb::entry::detach_tag) // - .def("detach_tags", &qdb::entry::detach_tags) // - .def("has_tag", &qdb::entry::has_tag) // - .def("get_tags", &qdb::entry::get_tags) // - .def("remove", &qdb::entry::remove) // - .def("exists", &qdb::entry::exists, // - "Returns true if the entry exists") // - .def("get_location", &qdb::entry::get_location) // - .def("get_entry_type", &qdb::entry::get_entry_type) // - .def("get_metadata", &qdb::entry::get_metadata) // - .def("get_name", &qdb::entry::get_name) // + e.def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{}; + return nullptr; + })) + .def("attach_tag", &qdb::entry::attach_tag) + .def("attach_tags", &qdb::entry::attach_tags) + .def("detach_tag", &qdb::entry::detach_tag) + .def("detach_tags", &qdb::entry::detach_tags) + .def("has_tag", &qdb::entry::has_tag) + .def("get_tags", &qdb::entry::get_tags) + .def("remove", &qdb::entry::remove) + .def("exists", &qdb::entry::exists, + "Returns true if the entry exists") + .def("get_location", &qdb::entry::get_location) + .def("get_entry_type", &qdb::entry::get_entry_type) + .def("get_metadata", &qdb::entry::get_metadata) + .def("get_name", &qdb::entry::get_name) ; - py::class_{e, "Metadata"} // - .def(py::init<>()) // - .def_readwrite("type", &qdb::entry::metadata::type) // - .def_readwrite("size", &qdb::entry::metadata::size) // - .def_readwrite("modification_time", &qdb::entry::metadata::modification_time) // - .def_readwrite("expiry_time", &qdb::entry::metadata::expiry_time) // + py::class_{e, "Metadata"} + .def(py::init<>()) + .def_readwrite("type", &qdb::entry::metadata::type) + .def_readwrite("size", &qdb::entry::metadata::size) + .def_readwrite("modification_time", &qdb::entry::metadata::modification_time) + .def_readwrite("expiry_time", &qdb::entry::metadata::expiry_time) ; - py::class_{m, "ExpirableEntry"} // - .def(py::init()) // - .def("expires_at", &qdb::expirable_entry::expires_at) // - .def("expires_from_now", &qdb::expirable_entry::expires_from_now) // - .def("get_expiry_time", &qdb::expirable_entry::get_expiry_time) // + py::class_{m, "ExpirableEntry"} + .def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{}; + return nullptr; + })) + .def("expires_at", &qdb::expirable_entry::expires_at) + .def("expires_from_now", &qdb::expirable_entry::expires_from_now) + .def("get_expiry_time", &qdb::expirable_entry::get_expiry_time) ; } diff --git a/quasardb/error.hpp b/quasardb/error.hpp index a9c11660..4a78e81e 100644 --- a/quasardb/error.hpp +++ b/quasardb/error.hpp @@ -250,7 +250,7 @@ class direct_instantiation_exception : public exception { public: direct_instantiation_exception() noexcept - : exception(qdb_e_internal_local, + : exception(qdb_e_internal_local, // TODO: check this code std::string("Direct instantiation is not allowed")) {} diff --git a/quasardb/extensions/__init__.pyi b/quasardb/extensions/__init__.pyi deleted file mode 100644 index 74077aaf..00000000 --- a/quasardb/extensions/__init__.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from __future__ import annotations - -from quasardb.extensions.writer import extend_writer - -from . import writer - -__all__: list = list() - -def extend_module(m): ... diff --git a/quasardb/extensions/writer.pyi b/quasardb/extensions/writer.pyi deleted file mode 100644 index 941dd623..00000000 --- a/quasardb/extensions/writer.pyi +++ /dev/null @@ -1,30 +0,0 @@ -from __future__ import annotations - -import copy as copy - -import numpy as np -from numpy import ma - -import quasardb as quasardb - -__all__: list = list() - -def _ensure_ctype(self, idx, ctype): ... -def _legacy_current_row(self): ... -def _legacy_next_row(self, table): ... -def _legacy_push(self): ... -def _legacy_set_blob(self, idx, x): ... -def _legacy_set_double(self, idx, x): ... -def _legacy_set_int64(self, idx, x): ... -def _legacy_set_string(self, idx, x): ... -def _legacy_set_timestamp(self, idx, x): ... -def _legacy_start_row(self, table, x): ... -def _wrap_fn(old_fn, replace_fn): ... -def extend_writer(x): - """ - - Extends the writer with the "old", batch inserter API. This is purely - a backwards compatibility layer, and we want to avoid having to maintain that - in C++ with few benefits. - - """ diff --git a/quasardb/integer.hpp b/quasardb/integer.hpp index 8d435a84..6942ef8b 100644 --- a/quasardb/integer.hpp +++ b/quasardb/integer.hpp @@ -76,13 +76,16 @@ static inline void register_integer(Module & m) { namespace py = pybind11; - py::class_(m, "Integer") // - .def(py::init()) // - .def("get", &qdb::integer_entry::get) // - .def("put", &qdb::integer_entry::put, py::arg("integer")) // - .def("update", &qdb::integer_entry::update, py::arg("integer")) // - .def("add", &qdb::integer_entry::add, py::arg("addend")) // - ; // + py::class_(m, "Integer") + .def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{"conn.integer(...)"}; + return nullptr; + })) + .def("get", &qdb::integer_entry::get) + .def("put", &qdb::integer_entry::put, py::arg("integer")) + .def("update", &qdb::integer_entry::update, py::arg("integer")) + .def("add", &qdb::integer_entry::add, py::arg("addend")) + ; } } // namespace qdb diff --git a/quasardb/module.cpp b/quasardb/module.cpp index 0bfd3caa..0316f05f 100644 --- a/quasardb/module.cpp +++ b/quasardb/module.cpp @@ -53,7 +53,12 @@ PYBIND11_MODULE(quasardb, m) m.doc() = "QuasarDB Official Python API"; m.def("version", &qdb_version, "Return version number"); m.def("build", &qdb_build, "Return build number"); - m.attr("never_expires") = std::chrono::system_clock::time_point{}; + + // Deal with tz directly + auto py_datetime = py::module_::import("datetime"); + auto py_utc = py_datetime.attr("timezone").attr("utc"); + m.attr("never_expires") = py_datetime.attr("datetime").attr("fromtimestamp")(0, py_utc); + // m.attr("never_expires") = std::chrono::system_clock::time_point{}; qdb::register_errors(m); qdb::register_options(m); diff --git a/quasardb/node.hpp b/quasardb/node.hpp index 9960f11f..12185020 100644 --- a/quasardb/node.hpp +++ b/quasardb/node.hpp @@ -115,15 +115,15 @@ static inline void register_node(Module & m) py::class_(m, "Node") .def(py::init(), - py::arg("uri"), // - py::arg("user_name") = std::string{}, // - py::arg("user_private_key") = std::string{}, // - py::arg("cluster_public_key") = std::string{}, // - py::kw_only(), // - py::arg("user_security_file") = std::string{}, // - py::arg("cluster_public_key_file") = std::string{}, // - py::arg("enable_encryption") = false // - ) // + py::arg("uri"), + py::arg("user_name") = std::string{}, + py::arg("user_private_key") = std::string{}, + py::arg("cluster_public_key") = std::string{}, + py::kw_only(), + py::arg("user_security_file") = std::string{}, + py::arg("cluster_public_key_file") = std::string{}, + py::arg("enable_encryption") = false + ) .def("prefix_get", &qdb::node::prefix_get) .def("blob", &qdb::node::blob) .def("integer", &qdb::node::integer); diff --git a/quasardb/options.hpp b/quasardb/options.hpp index c17c1b41..68466843 100644 --- a/quasardb/options.hpp +++ b/quasardb/options.hpp @@ -258,50 +258,53 @@ static inline void register_options(Module & m) { namespace py = pybind11; - py::class_ o(m, "Options"); // + py::class_ o(m, "Options"); // None is reserved keyword in Python - py::enum_{o, "Compression", py::arithmetic(), "Compression mode"} // - .value("Disabled", qdb_comp_none) // - .value("Best", qdb_comp_best) // - .value("Balanced", qdb_comp_balanced); // - - py::enum_{o, "Encryption", py::arithmetic(), "Encryption type"} // - .value("Disabled", qdb_crypt_none) // - .value("AES256GCM", qdb_crypt_aes_gcm_256); // - - o.def(py::init()) // - .def("set_timeout", &qdb::options::set_timeout) // - .def("get_timeout", &qdb::options::get_timeout) // - .def("set_timezone", &qdb::options::set_timezone) // - .def("get_timezone", &qdb::options::get_timezone) // - .def("enable_user_properties", &qdb::options::enable_user_properties) // - .def("disable_user_properties", &qdb::options::disable_user_properties) // - .def("set_stabilization_max_wait", &qdb::options::set_stabilization_max_wait) // - .def("get_stabilization_max_wait", &qdb::options::get_stabilization_max_wait) // - .def("set_max_cardinality", &qdb::options::set_max_cardinality) // - .def("set_encryption", &qdb::options::set_encryption) // - .def("set_cluster_public_key", &qdb::options::set_cluster_public_key) // - .def("set_user_credentials", &qdb::options::set_user_credentials) // - .def("set_client_max_in_buf_size", &qdb::options::set_client_max_in_buf_size) // - .def("get_client_max_in_buf_size", &qdb::options::get_client_max_in_buf_size) // - .def("set_client_max_batch_load", &qdb::options::set_client_max_batch_load, // - "Adjust the number of shards per thread used for the batch writer.") // - .def("get_client_max_batch_load", &qdb::options::get_client_max_batch_load, // - "Get the number of shards per thread used for the batch writer.") // - .def("set_connection_per_address_soft_limit", // - &qdb::options::set_connection_per_address_soft_limit, // - "Adjust the maximum number of connections per qdbd node") // - .def("get_connection_per_address_soft_limit", // - &qdb::options::get_connection_per_address_soft_limit, // - "Get the maximum number of connections per qdbd node") // - .def("get_cluster_max_in_buf_size", &qdb::options::get_cluster_max_in_buf_size) // - .def("get_client_max_parallelism", &qdb::options::get_client_max_parallelism) // - .def("set_query_max_length", &qdb::options::set_query_max_length, // - py::arg("query_max_length")) // - .def("get_query_max_length", &qdb::options::get_query_max_length) // - .def("set_client_soft_memory_limit", &qdb::options::set_client_soft_memory_limit, // - py::arg("limit")) // + py::enum_{o, "Compression", py::arithmetic(), "Compression mode"} + .value("Disabled", qdb_comp_none) + .value("Best", qdb_comp_best) + .value("Balanced", qdb_comp_balanced); + + py::enum_{o, "Encryption", py::arithmetic(), "Encryption type"} + .value("Disabled", qdb_crypt_none) + .value("AES256GCM", qdb_crypt_aes_gcm_256); + + o.def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{"conn.options(...)"}; + return nullptr; + })) + .def("set_timeout", &qdb::options::set_timeout) + .def("get_timeout", &qdb::options::get_timeout) + .def("set_timezone", &qdb::options::set_timezone) + .def("get_timezone", &qdb::options::get_timezone) + .def("enable_user_properties", &qdb::options::enable_user_properties) + .def("disable_user_properties", &qdb::options::disable_user_properties) + .def("set_stabilization_max_wait", &qdb::options::set_stabilization_max_wait) + .def("get_stabilization_max_wait", &qdb::options::get_stabilization_max_wait) + .def("set_max_cardinality", &qdb::options::set_max_cardinality) + .def("set_encryption", &qdb::options::set_encryption) + .def("set_cluster_public_key", &qdb::options::set_cluster_public_key) + .def("set_user_credentials", &qdb::options::set_user_credentials) + .def("set_client_max_in_buf_size", &qdb::options::set_client_max_in_buf_size) + .def("get_client_max_in_buf_size", &qdb::options::get_client_max_in_buf_size) + .def("set_client_max_batch_load", &qdb::options::set_client_max_batch_load, + "Adjust the number of shards per thread used for the batch writer.") + .def("get_client_max_batch_load", &qdb::options::get_client_max_batch_load, + "Get the number of shards per thread used for the batch writer.") + .def("set_connection_per_address_soft_limit", + &qdb::options::set_connection_per_address_soft_limit, + "Adjust the maximum number of connections per qdbd node") + .def("get_connection_per_address_soft_limit", + &qdb::options::get_connection_per_address_soft_limit, + "Get the maximum number of connections per qdbd node") + .def("get_cluster_max_in_buf_size", &qdb::options::get_cluster_max_in_buf_size) + .def("get_client_max_parallelism", &qdb::options::get_client_max_parallelism) + .def("set_query_max_length", &qdb::options::set_query_max_length, + py::arg("query_max_length")) + .def("get_query_max_length", &qdb::options::get_query_max_length) + .def("set_client_soft_memory_limit", &qdb::options::set_client_soft_memory_limit, + py::arg("limit")) ; } diff --git a/quasardb/perf.hpp b/quasardb/perf.hpp index 6921cb82..938478d8 100644 --- a/quasardb/perf.hpp +++ b/quasardb/perf.hpp @@ -326,11 +326,14 @@ static inline void register_perf(Module & m) namespace py = pybind11; py::class_(m, "Perf") - .def(py::init()) // - .def("get", &qdb::perf::get, py::arg("flame") = false, py::arg("outfile") = "") // - .def("clear", &qdb::perf::clear_all_profiles) // - .def("enable", &qdb::perf::enable_client_tracking) // - .def("disable", &qdb::perf::disable_client_tracking); // + .def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{"conn.perf(...)"}; + return nullptr; + })) + .def("get", &qdb::perf::get, py::arg("flame") = false, py::arg("outfile") = "") + .def("clear", &qdb::perf::clear_all_profiles) + .def("enable", &qdb::perf::enable_client_tracking) + .def("disable", &qdb::perf::disable_client_tracking); } } // namespace qdb diff --git a/quasardb/properties.hpp b/quasardb/properties.hpp index 68a5c3ab..b05bdf93 100644 --- a/quasardb/properties.hpp +++ b/quasardb/properties.hpp @@ -73,9 +73,12 @@ class properties static inline void register_properties(py::module_ & m) { - py::class_ p(m, "Properties"); // + py::class_ p(m, "Properties"); - p.def(py::init()) // + p.def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{"conn.properties(...)"}; + return nullptr; + })) .def("get", &qdb::properties::get) .def("put", &qdb::properties::put) .def("remove", &qdb::properties::remove) diff --git a/quasardb/quasardb/__init__.pyi b/quasardb/quasardb/__init__.pyi index ea5cd91e..acf6fdb0 100644 --- a/quasardb/quasardb/__init__.pyi +++ b/quasardb/quasardb/__init__.pyi @@ -1,4 +1,98 @@ +import datetime + +import metrics + from ._batch_column import BatchColumnInfo from ._batch_inserter import TimeSeriesBatch +from ._blob import Blob +from ._cluster import Cluster +from ._continuous import QueryContinuous +from ._double import Double +from ._entry import Entry, ExpirableEntry +from ._error import ( + AliasAlreadyExistsError, + AliasNotFoundError, + AsyncPipelineFullError, + Error, + IncompatibleTypeError, + InputBufferTooSmallError, + InternalLocalError, + InvalidArgumentError, + InvalidDatetimeError, + InvalidHandleError, + InvalidQueryError, + NotImplementedError, + OutOfBoundsError, + TryAgainError, + UninitializedError, +) +from ._integer import Integer +from ._node import DirectBlob, DirectInteger, Node +from ._options import Options +from ._perf import Perf +from ._query import FindQuery +from ._reader import Reader +from ._retry import RetryOptions +from ._string import String +from ._table import ColumnInfo, ColumnType, IndexedColumnInfo, Table +from ._tag import Tag +from ._timestamp import Timestamp +from ._writer import Writer, WriterData, WriterPushMode + +__all__ = [ + "BatchColumnInfo", + "TimeSeriesBatch", + "Blob", + "Cluster", + "QueryContinuous", + "Double", + "Entry", + "ExpirableEntry", + "Error", + "AliasAlreadyExistsError", + "AliasNotFoundError", + "AsyncPipelineFullError", + "IncompatibleTypeError", + "InputBufferTooSmallError", + "InternalLocalError", + "InvalidArgumentError", + "InvalidDatetimeError", + "InvalidHandleError", + "InvalidQueryError", + "NotImplementedError", + "OutOfBoundsError", + "TryAgainError", + "UninitializedError", + "Integer", + "DirectBlob", + "DirectInteger", + "Node", + "Options", + "Perf", + "FindQuery", + "Reader", + "RetryOptions", + "String", + "ColumnInfo", + "ColumnType", + "IndexedColumnInfo", + "Table", + "Tag", + "Timestamp", + "Writer", + "WriterData", + "WriterPushMode", + "metrics", +] + +never_expires: datetime.datetime = ... + +def build() -> str: + """ + Return build number + """ -__all__ = ["BatchColumnInfo", "TimeSeriesBatch"] +def version() -> str: + """ + Return version number + """ diff --git a/quasardb/quasardb/_blob.pyi b/quasardb/quasardb/_blob.pyi new file mode 100644 index 00000000..b4d98910 --- /dev/null +++ b/quasardb/quasardb/_blob.pyi @@ -0,0 +1,16 @@ +import datetime + +from ._entry import ExpirableEntry + +class Blob(ExpirableEntry): + def compare_and_swap(self, new_content: str, comparand: str) -> bytes: ... + def get(self) -> bytes: ... + def get_and_remove(self) -> bytes: ... + def get_and_update(self, data: str) -> bytes: ... + def put(self, data: str) -> None: ... + def remove_if(self, comparand: str) -> None: ... + def update( + self, + data: str, + expiry: datetime.datetime = datetime.datetime.fromtimestamp(0), + ) -> None: ... diff --git a/quasardb/quasardb/_cluster.pyi b/quasardb/quasardb/_cluster.pyi new file mode 100644 index 00000000..095b3a41 --- /dev/null +++ b/quasardb/quasardb/_cluster.pyi @@ -0,0 +1,100 @@ +from __future__ import annotations + +import datetime + +import numpy as np + +from ._batch_column import BatchColumnInfo +from ._batch_inserter import TimeSeriesBatch +from ._blob import Blob +from ._continuous import QueryContinuous +from ._double import Double +from ._integer import Integer +from ._node import Node +from ._options import Options +from ._perf import Perf +from ._properties import Properties +from ._reader import Reader +from ._string import String +from ._table import Table +from ._tag import Tag +from ._timestamp import Timestamp +from ._writer import Writer + +class Cluster: + """ + Represents a connection to the QuasarDB cluster. + """ + + def __enter__(self) -> Cluster: ... + def __exit__(self, exc_type: object, exc_value: object, exc_tb: object) -> None: ... + def __init__( + self, + uri: str, + user_name: str = "", + user_private_key: str = "", + cluster_public_key: str = "", + user_security_file: str = "", + cluster_public_key_file: str = "", + timeout: datetime.timedelta = datetime.timedelta(minutes=1), + do_version_check: bool = False, + enable_encryption: bool = False, + compression_mode: Options.Compression = ..., # balanced + client_max_parallelism: int = 0, + ) -> None: ... + def blob(self, alias: str) -> Blob: ... + def close(self) -> None: ... + def compact_abort(self) -> None: ... + def compact_full(self) -> None: ... + def compact_progress(self) -> int: ... + def double(self, alias: str) -> Double: ... + def endpoints(self) -> list[str]: ... + def find(self, query: str) -> list[str]: ... + def get_memory_info(self) -> str: ... + def inserter(self, column_info_list: list[BatchColumnInfo]) -> TimeSeriesBatch: ... + def integer(self, alias: str) -> Integer: ... + def is_open(self) -> bool: ... + def node(self, uri: str) -> Node: ... + def node_config(self, uri: str) -> dict[str, object]: ... + def node_status(self, uri: str) -> dict[str, object]: ... + def node_topology(self, uri: str) -> dict[str, object]: ... + def options(self) -> Options: ... + def perf(self) -> Perf: ... + def pinned_writer(self) -> Writer: ... + def prefix_count(self, prefix: str) -> int: ... + def prefix_get(self, prefix: str, max_count: int) -> list[str]: ... + def properties(self) -> Properties: ... + def purge_all(self, timeout: datetime.timedelta) -> None: ... + def purge_cache(self, timeout: datetime.timedelta) -> None: ... + def query( + self, query: str, blobs: bool | list[str] = False + ) -> list[dict[str, object]]: ... + def query_continuous_full( + self, query: str, pace: datetime.timedelta, blobs: bool | list[str] = False + ) -> QueryContinuous: ... + def query_continuous_new_values( + self, query: str, pace: datetime.timedelta, blobs: bool | list[str] = False + ) -> QueryContinuous: ... + def query_numpy(self, query: str) -> list[tuple[str, np.ma.MaskedArray]]: ... + def reader( + self, + table_names: list[str], + column_names: list[str] = [], + batch_size: int = 0, + ranges: list[tuple] = [], + ) -> Reader: ... + def string(self, alias: str) -> String: ... + def suffix_count(self, suffix: str) -> int: ... + def suffix_get(self, suffix: str, max_count: int) -> list[str]: ... + def table(self, alias: str) -> Table: ... + def tag(self, alias: str) -> Tag: ... + def tidy_memory(self) -> None: ... + def timestamp(self, alias: str) -> Timestamp: ... + def trim_all( + self, pause: datetime.timedelta, timeout: datetime.timedelta + ) -> None: ... + def ts(self, alias: str) -> Table: ... + def ts_batch(self, column_info_list: list[BatchColumnInfo]) -> TimeSeriesBatch: ... + def uri(self) -> str: ... + def wait_for_compaction(self) -> None: ... + def writer(self) -> Writer: ... diff --git a/quasardb/quasardb/_continuous.pyi b/quasardb/quasardb/_continuous.pyi new file mode 100644 index 00000000..fe4c026b --- /dev/null +++ b/quasardb/quasardb/_continuous.pyi @@ -0,0 +1,16 @@ +from __future__ import annotations + +# import datetime + +class QueryContinuous: + def __iter__(self) -> QueryContinuous: ... + def __next__(self) -> list[dict[str, object]]: ... + def probe_results(self) -> list[dict[str, object]]: ... + def results(self) -> list[dict[str, object]]: ... + # def run( + # self, + # mode: qdb_query_continuous_mode_type_t, + # pace: datetime.timedelta, + # query: str, + # ) -> None: ... + def stop(self) -> None: ... diff --git a/quasardb/quasardb/_double.pyi b/quasardb/quasardb/_double.pyi new file mode 100644 index 00000000..b4e7d8a4 --- /dev/null +++ b/quasardb/quasardb/_double.pyi @@ -0,0 +1,7 @@ +from ._entry import ExpirableEntry + +class Double(ExpirableEntry): + def add(self, addend: float) -> float: ... + def get(self) -> float: ... + def put(self, double: float) -> None: ... + def update(self, double: float) -> None: ... diff --git a/quasardb/quasardb/_entry.pyi b/quasardb/quasardb/_entry.pyi new file mode 100644 index 00000000..86e49bb6 --- /dev/null +++ b/quasardb/quasardb/_entry.pyi @@ -0,0 +1,64 @@ +from __future__ import annotations + +import datetime + +class Entry: + class Metadata: ... + # expiry_time: qdb_timespec_t + # modification_time: qdb_timespec_t + # size: int + # type: Entry.Type + + class Type: + Uninitialized: Entry.Type # value = + Blob: Entry.Type # value = + Integer: Entry.Type # value = + HashSet: Entry.Type # value = + Tag: Entry.Type # value = + Deque: Entry.Type # value = + Stream: Entry.Type # value = + Timeseries: Entry.Type # value = + __members__: dict[str, Entry.Type] + def __and__(self, other: object) -> object: ... + def __eq__(self, other: object) -> bool: ... + def __ge__(self, other: object) -> bool: ... + def __getstate__(self) -> int: ... + def __gt__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def __index__(self) -> int: ... + def __init__(self, value: int) -> None: ... + def __int__(self) -> int: ... + def __invert__(self) -> object: ... + def __le__(self, other: object) -> bool: ... + def __lt__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __or__(self, other: object) -> object: ... + def __rand__(self, other: object) -> object: ... + def __repr__(self) -> str: ... + def __ror__(self, other: object) -> object: ... + def __rxor__(self, other: object) -> object: ... + def __setstate__(self, state: int) -> None: ... + def __str__(self) -> str: ... + def __xor__(self, other: object) -> object: ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + + def attach_tag(self, tag: str) -> bool: ... + def attach_tags(self, tags: list[str]) -> None: ... + def detach_tag(self, tag: str) -> bool: ... + def detach_tags(self, tags: list[str]) -> None: ... + def exists(self) -> bool: ... + def get_entry_type(self) -> Entry.Type: ... + def get_location(self) -> tuple[str, int]: ... + def get_metadata(self) -> ...: ... + def get_name(self) -> str: ... + def get_tags(self) -> list[str]: ... + def has_tag(self, tag: str) -> bool: ... + def remove(self) -> None: ... + +class ExpirableEntry(Entry): + def expires_at(self, expiry_time: datetime.datetime) -> None: ... + def expires_from_now(self, expiry_delta: datetime.timedelta) -> None: ... + def get_expiry_time(self) -> ...: ... diff --git a/quasardb/quasardb/_error.pyi b/quasardb/quasardb/_error.pyi new file mode 100644 index 00000000..61bbdd3c --- /dev/null +++ b/quasardb/quasardb/_error.pyi @@ -0,0 +1,15 @@ +class Error(RuntimeError): ... +class AliasAlreadyExistsError(Error): ... +class AliasNotFoundError(Error): ... +class AsyncPipelineFullError(Error): ... +class IncompatibleTypeError(Error): ... +class InputBufferTooSmallError(Error): ... +class InternalLocalError(Error): ... +class InvalidArgumentError(Error): ... +class InvalidDatetimeError(Error): ... +class InvalidHandleError(Error): ... +class InvalidQueryError(Error): ... +class NotImplementedError(Error): ... +class OutOfBoundsError(Error): ... +class TryAgainError(Error): ... +class UninitializedError(Error): ... diff --git a/quasardb/quasardb/_integer.pyi b/quasardb/quasardb/_integer.pyi new file mode 100644 index 00000000..97fad082 --- /dev/null +++ b/quasardb/quasardb/_integer.pyi @@ -0,0 +1,7 @@ +from ._entry import ExpirableEntry + +class Integer(ExpirableEntry): + def add(self, addend: int) -> int: ... + def get(self) -> int: ... + def put(self, integer: int) -> None: ... + def update(self, integer: int) -> None: ... diff --git a/quasardb/quasardb/_node.pyi b/quasardb/quasardb/_node.pyi new file mode 100644 index 00000000..5cfa0ca8 --- /dev/null +++ b/quasardb/quasardb/_node.pyi @@ -0,0 +1,26 @@ +class DirectBlob: + def get(self) -> bytes: ... + def put(self, data: str) -> None: ... + def remove(self) -> None: ... + def update(self, data: str) -> None: ... + +class DirectInteger: + def get(self) -> int: ... + def put(self, integer: int) -> None: ... + def remove(self) -> None: ... + def update(self, integer: int) -> None: ... + +class Node: + def __init__( + self, + uri: str, + user_name: str = "", + user_private_key: str = "", + cluster_public_key: str = "", + user_security_file: str = "", + cluster_public_key_file: str = "", + enable_encryption: bool = False, + ) -> None: ... + def blob(self, alias: str) -> ...: ... + def integer(self, alias: str) -> ...: ... + def prefix_get(self, prefix: str, max_count: int) -> list[str]: ... diff --git a/quasardb/quasardb/_options.pyi b/quasardb/quasardb/_options.pyi new file mode 100644 index 00000000..764dc90b --- /dev/null +++ b/quasardb/quasardb/_options.pyi @@ -0,0 +1,105 @@ +from __future__ import annotations + +import datetime + +class Options: + class Compression: + Disabled: Options.Compression # value = + Best: Options.Compression # value = + Balanced: Options.Compression # value = + __members__: dict[str, Options.Compression] + def __and__(self, other: object) -> object: ... + def __eq__(self, other: object) -> bool: ... + def __ge__(self, other: object) -> bool: ... + def __getstate__(self) -> int: ... + def __gt__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def __index__(self) -> int: ... + def __init__(self, value: int) -> None: ... + def __int__(self) -> int: ... + def __invert__(self) -> object: ... + def __le__(self, other: object) -> bool: ... + def __lt__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __or__(self, other: object) -> object: ... + def __rand__(self, other: object) -> object: ... + def __repr__(self) -> str: ... + def __ror__(self, other: object) -> object: ... + def __rxor__(self, other: object) -> object: ... + def __setstate__(self, state: int) -> None: ... + def __str__(self) -> str: ... + def __xor__(self, other: object) -> object: ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + + class Encryption: + Disabled: Options.Encryption # value = + AES256GCM: Options.Encryption # value = + __members__: dict[str, Options.Encryption] + def __and__(self, other: object) -> object: ... + def __eq__(self, other: object) -> bool: ... + def __ge__(self, other: object) -> bool: ... + def __getstate__(self) -> int: ... + def __gt__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def __index__(self) -> int: ... + def __init__(self, value: int) -> None: ... + def __int__(self) -> int: ... + def __invert__(self) -> object: ... + def __le__(self, other: object) -> bool: ... + def __lt__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __or__(self, other: object) -> object: ... + def __rand__(self, other: object) -> object: ... + def __repr__(self) -> str: ... + def __ror__(self, other: object) -> object: ... + def __rxor__(self, other: object) -> object: ... + def __setstate__(self, state: int) -> None: ... + def __str__(self) -> str: ... + def __xor__(self, other: object) -> object: ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + + def disable_user_properties(self) -> None: ... + def enable_user_properties(self) -> None: ... + def get_client_max_batch_load(self) -> int: + """ + Get the number of shards per thread used for the batch writer. + """ + + def get_client_max_in_buf_size(self) -> int: ... + def get_client_max_parallelism(self) -> int: ... + def get_cluster_max_in_buf_size(self) -> int: ... + def get_connection_per_address_soft_limit(self) -> int: + """ + Get the maximum number of connections per qdbd node + """ + + def get_query_max_length(self) -> int: ... + def get_stabilization_max_wait(self) -> datetime.timedelta: ... + def get_timeout(self) -> datetime.timedelta: ... + def get_timezone(self) -> str: ... + def set_client_max_batch_load(self, shard_count: int) -> None: + """ + Adjust the number of shards per thread used for the batch writer. + """ + + def set_client_max_in_buf_size(self, max_size: int) -> None: ... + def set_client_soft_memory_limit(self, limit: int) -> None: ... + def set_cluster_public_key(self, key: str) -> None: ... + def set_connection_per_address_soft_limit(self, max_count: int) -> None: + """ + Adjust the maximum number of connections per qdbd node + """ + + def set_encryption(self, algorithm: Options.Encryption) -> None: ... + def set_max_cardinality(self, cardinality: int) -> None: ... + def set_query_max_length(self, query_max_length: int) -> None: ... + def set_stabilization_max_wait(self, max_wait: datetime.timedelta) -> None: ... + def set_timeout(self, timeout: datetime.timedelta) -> None: ... + def set_timezone(self, tz: str) -> None: ... + def set_user_credentials(self, user: str, private_key: str) -> None: ... diff --git a/quasardb/quasardb/_perf.pyi b/quasardb/quasardb/_perf.pyi new file mode 100644 index 00000000..dd26ec93 --- /dev/null +++ b/quasardb/quasardb/_perf.pyi @@ -0,0 +1,5 @@ +class Perf: + def clear(self) -> None: ... + def disable(self) -> None: ... + def enable(self) -> None: ... + def get(self, flame: bool = False, outfile: str = "") -> object: ... diff --git a/quasardb/quasardb/_properties.pyi b/quasardb/quasardb/_properties.pyi new file mode 100644 index 00000000..11e830ac --- /dev/null +++ b/quasardb/quasardb/_properties.pyi @@ -0,0 +1,5 @@ +class Properties: + def clear(self) -> None: ... + def get(self, key: str) -> str | None: ... + def put(self, key: str, value: str) -> None: ... + def remove(self, key: str) -> None: ... diff --git a/quasardb/quasardb/_query.pyi b/quasardb/quasardb/_query.pyi new file mode 100644 index 00000000..b8a894f7 --- /dev/null +++ b/quasardb/quasardb/_query.pyi @@ -0,0 +1,2 @@ +class FindQuery: + def run(self) -> list[str]: ... diff --git a/quasardb/quasardb/_reader.pyi b/quasardb/quasardb/_reader.pyi new file mode 100644 index 00000000..fd0ad7e8 --- /dev/null +++ b/quasardb/quasardb/_reader.pyi @@ -0,0 +1,9 @@ +from __future__ import annotations + +import typing + +class Reader: + def __enter__(self) -> Reader: ... + def __exit__(self, exc_type: object, exc_value: object, exc_tb: object) -> None: ... + def __iter__(self) -> typing.Iterator[dict[str, object]]: ... + def get_batch_size(self) -> int: ... diff --git a/quasardb/quasardb/_retry.pyi b/quasardb/quasardb/_retry.pyi new file mode 100644 index 00000000..58048a0f --- /dev/null +++ b/quasardb/quasardb/_retry.pyi @@ -0,0 +1,16 @@ +import datetime + +class RetryOptions: + delay: datetime.timedelta + exponent: int + jitter: float + retries_left: int + def __init__( + self, + retries: int = 3, + delay: datetime.timedelta = ..., + exponent: int = 2, + jitter: float = 0.1, + ) -> None: ... + def has_next(self) -> bool: ... + def next(self) -> RetryOptions: ... diff --git a/quasardb/quasardb/_string.pyi b/quasardb/quasardb/_string.pyi new file mode 100644 index 00000000..7ab7fb59 --- /dev/null +++ b/quasardb/quasardb/_string.pyi @@ -0,0 +1,12 @@ +import datetime + +from ._entry import ExpirableEntry + +class String(ExpirableEntry): + def compare_and_swap(self, new_content: str, comparand: str) -> str: ... + def get(self) -> str: ... + def get_and_remove(self) -> str: ... + def get_and_update(self, data: str) -> str: ... + def put(self, data: str) -> None: ... + def remove_if(self, comparand: str) -> None: ... + def update(self, data: str, expiry: datetime.datetime = ...) -> None: ... diff --git a/quasardb/quasardb/_table.pyi b/quasardb/quasardb/_table.pyi new file mode 100644 index 00000000..dd422826 --- /dev/null +++ b/quasardb/quasardb/_table.pyi @@ -0,0 +1,125 @@ +from __future__ import annotations + +import datetime +import typing + +import numpy + +from quasardb.quasardb._reader import Reader + +from ._entry import Entry + +class ColumnType: + Uninitialized: ColumnType # value = + Double: ColumnType # value = + Blob: ColumnType # value = + Int64: ColumnType # value = + Timestamp: ColumnType # value = + String: ColumnType # value = + Symbol: ColumnType # value = + __members__: dict[str, ColumnType] + def __and__(self, other: object) -> object: ... + def __eq__(self, other: object) -> bool: ... + def __ge__(self, other: object) -> bool: ... + def __getstate__(self) -> int: ... + def __gt__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def __index__(self) -> int: ... + def __init__(self, value: int) -> None: ... + def __int__(self) -> int: ... + def __invert__(self) -> object: ... + def __le__(self, other: object) -> bool: ... + def __lt__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __or__(self, other: object) -> object: ... + def __rand__(self, other: object) -> object: ... + def __repr__(self) -> str: ... + def __ror__(self, other: object) -> object: ... + def __rxor__(self, other: object) -> object: ... + def __setstate__(self, state: int) -> None: ... + def __str__(self) -> str: ... + def __xor__(self, other: object) -> object: ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + +class ColumnInfo: + name: str + symtable: str + type: ColumnType + @typing.overload + def __init__(self, type: ColumnType, name: str) -> None: ... + @typing.overload + def __init__(self, type: ColumnType, name: str, symtable: str) -> None: ... + def __repr__(self) -> str: ... + +class IndexedColumnInfo: + @typing.overload + def __init__(self, type: ColumnType, index: int) -> None: ... + @typing.overload + def __init__(self, type: ColumnType, index: int, symtable: str) -> None: ... + @property + def index(self) -> int: ... + @property + def symtable(self) -> str: ... + @property + def type(self) -> ColumnType: ... + +class Table(Entry): + def __repr__(self) -> str: ... + def blob_get_ranges( + self, column: str, ranges: object = None + ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... + def blob_insert( + self, column: str, timestamps: numpy.ndarray, values: numpy.ma.MaskedArray + ) -> None: ... + def column_id_by_index(self, index: int) -> str: ... + def column_index_by_id(self, alias: str) -> int: ... + def column_info_by_index(self, index: int) -> ...: ... + def column_type_by_id(self, alias: str) -> ColumnType: ... + def column_type_by_index(self, index: int) -> ColumnType: ... + def create( + self, + columns: list[ColumnInfo], + shard_size: datetime.timedelta = ..., + ttl: datetime.timedelta = ..., + ) -> None: ... + def double_get_ranges( + self, column: str, ranges: object = None + ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... + def double_insert( + self, column: str, timestamps: numpy.ndarray, values: numpy.ma.MaskedArray + ) -> None: ... + def erase_ranges(self, column: str, ranges: object) -> int: ... + def get_shard_size(self) -> datetime.timedelta: ... + def get_ttl(self) -> datetime.timedelta: ... + def has_ttl(self) -> bool: ... + def insert_columns(self, columns: list[ColumnInfo]) -> None: ... + def int64_get_ranges( + self, column: str, ranges: object = None + ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... + def int64_insert( + self, column: str, timestamps: numpy.ndarray, values: numpy.ma.MaskedArray + ) -> None: ... + def list_columns(self) -> list[ColumnInfo]: ... + def reader( + self, + column_names: list[str] = [], + batch_size: int = 0, + ranges: list[tuple] = [], + ) -> Reader: ... + def retrieve_metadata(self) -> None: ... + def string_get_ranges( + self, column: str, ranges: object = None + ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... + def string_insert( + self, column: str, timestamps: numpy.ndarray, values: numpy.ma.MaskedArray + ) -> None: ... + def subscribe(self, conn: object) -> object: ... + def timestamp_get_ranges( + self, column: str, ranges: object = None + ) -> tuple[numpy.ndarray, numpy.ma.MaskedArray]: ... + def timestamp_insert( + self, column: str, timestamps: numpy.ndarray, values: numpy.ma.MaskedArray + ) -> None: ... diff --git a/quasardb/quasardb/_tag.pyi b/quasardb/quasardb/_tag.pyi new file mode 100644 index 00000000..88499ffb --- /dev/null +++ b/quasardb/quasardb/_tag.pyi @@ -0,0 +1,5 @@ +from ._entry import Entry + +class Tag(Entry): + def count(self) -> int: ... + def get_entries(self) -> list[str]: ... diff --git a/quasardb/quasardb/_timestamp.pyi b/quasardb/quasardb/_timestamp.pyi new file mode 100644 index 00000000..9447fb71 --- /dev/null +++ b/quasardb/quasardb/_timestamp.pyi @@ -0,0 +1,9 @@ +import datetime + +from ._entry import ExpirableEntry + +class Timestamp(ExpirableEntry): + def add(self, addend: datetime.datetime) -> datetime.datetime: ... + def get(self) -> datetime.datetime: ... + def put(self, timestamp: datetime.datetime) -> None: ... + def update(self, timestamp: datetime.datetime) -> None: ... diff --git a/quasardb/quasardb/_writer.pyi b/quasardb/quasardb/_writer.pyi new file mode 100644 index 00000000..ef7bc960 --- /dev/null +++ b/quasardb/quasardb/_writer.pyi @@ -0,0 +1,111 @@ +from __future__ import annotations + +from ._table import Table + +class WriterData: + def __init__(self) -> None: ... + def append( + self, table: Table, index: list[object], column_data: list[list[object]] + ) -> None: ... + def empty(self) -> bool: ... + +class WriterPushMode: + Transactional: WriterPushMode # value = + Truncate: WriterPushMode # value = + Fast: WriterPushMode # value = + Async: WriterPushMode # value = + __members__: dict[str, WriterPushMode] + def __and__(self, other: object) -> object: ... + def __eq__(self, other: object) -> bool: ... + def __ge__(self, other: object) -> bool: ... + def __getstate__(self) -> int: ... + def __gt__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def __index__(self) -> int: ... + def __init__(self, value: int) -> None: ... + def __int__(self) -> int: ... + def __invert__(self) -> object: ... + def __le__(self, other: object) -> bool: ... + def __lt__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __or__(self, other: object) -> object: ... + def __rand__(self, other: object) -> object: ... + def __repr__(self) -> str: ... + def __ror__(self, other: object) -> object: ... + def __rxor__(self, other: object) -> object: ... + def __setstate__(self, state: int) -> None: ... + def __str__(self) -> str: ... + def __xor__(self, other: object) -> object: ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + +class Writer: + def push( + self, + data: WriterData, + write_through: bool, + push_mode: WriterPushMode, + deduplication_mode: str, + deduplicate: str, + retries: int, + range: tuple[object, ...], + **kwargs, + ) -> None: ... + def push_fast( + self, + data: WriterData, + write_through: bool, + push_mode: WriterPushMode, + deduplication_mode: str, + deduplicate: str, + retries: int, + range: tuple[object, ...], + **kwargs, + ) -> None: + """Deprecated: Use `writer.push()` instead.""" + + def push_async( + self, + data: WriterData, + write_through: bool, + push_mode: WriterPushMode, + deduplication_mode: str, + deduplicate: str, + retries: int, + range: tuple[object, ...], + **kwargs, + ) -> None: + """Deprecated: Use `writer.push()` instead.""" + + def push_truncate( + self, + data: WriterData, + write_through: bool, + push_mode: WriterPushMode, + deduplication_mode: str, + deduplicate: str, + retries: int, + range: tuple[object, ...], + **kwargs, + ) -> None: + """Deprecated: Use `writer.push()` instead3.""" + + def start_row(self, table: object, x: object) -> None: + """Legacy function""" + + def set_double(self, idx: object, value: object) -> object: + """Legacy function""" + + def set_int64(self, idx: object, value: object) -> object: + """Legacy function""" + + def set_string(self, idx: object, value: object) -> object: + """Legacy function""" + + def set_blob(self, idx: object, value: object) -> object: + """Legacy function""" + + def set_timestamp(self, idx: object, value: object) -> object: + """Legacy function""" diff --git a/quasardb/metrics.pyi b/quasardb/quasardb/metrics/__init__.pyi similarity index 76% rename from quasardb/metrics.pyi rename to quasardb/quasardb/metrics/__init__.pyi index 56605abf..1dede248 100644 --- a/quasardb/metrics.pyi +++ b/quasardb/quasardb/metrics/__init__.pyi @@ -4,8 +4,6 @@ Keep track of low-level performance metrics from __future__ import annotations -import typing - __all__ = ["Measure", "clear", "totals"] class Measure: @@ -14,9 +12,7 @@ class Measure: """ def __enter__(self) -> Measure: ... - def __exit__( - self, arg0: typing.Any, arg1: typing.Any, arg2: typing.Any - ) -> None: ... + def __exit__(self, exc_type: object, exc_value: object, exc_tb: object) -> None: ... def __init__(self) -> None: ... def get(self) -> dict[str, int]: ... diff --git a/quasardb/query.hpp b/quasardb/query.hpp index e7fb872c..cfd377ad 100644 --- a/quasardb/query.hpp +++ b/quasardb/query.hpp @@ -82,9 +82,12 @@ static inline void register_query(Module & m) { namespace py = pybind11; - py::class_{m, "FindQuery"} // - .def(py::init()) // - .def("run", &qdb::find_query::run); // + py::class_{m, "FindQuery"} + .def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{}; + return nullptr; + })) + .def("run", &qdb::find_query::run); m.def("dict_query", &qdb::dict_query); } diff --git a/quasardb/reader.cpp b/quasardb/reader.cpp index 21bbeb62..d87a6373 100644 --- a/quasardb/reader.cpp +++ b/quasardb/reader.cpp @@ -206,29 +206,16 @@ void register_reader(py::module_ & m) // basic interface reader_c - .def(py::init< // - qdb::handle_ptr, // - std::vector const &, // - std::vector const &, // - std::size_t, // - std::vector const &>(), // - py::arg("conn"), // - py::arg("table_names"), // - py::kw_only(), // - py::arg("column_names") = std::vector{}, // - py::arg("batch_size") = std::size_t{1 << 16}, // - py::arg("ranges") = std::vector{} // - ) // - // - .def("get_batch_size", &qdb::reader::get_batch_size) // - // - .def("__enter__", &qdb::reader::enter) // - .def("__exit__", &qdb::reader::exit) // - .def( // - "__iter__", [](qdb::reader & r) { return py::make_iterator(r.begin(), r.end()); }, // + .def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{"conn.reader(...)"}; + return nullptr; + })) + .def("get_batch_size", &qdb::reader::get_batch_size) + .def("__enter__", &qdb::reader::enter) + .def("__exit__", &qdb::reader::exit) + .def( + "__iter__", [](qdb::reader & r) { return py::make_iterator(r.begin(), r.end()); }, py::keep_alive<0, 1>()); - - // } } // namespace qdb diff --git a/quasardb/string.hpp b/quasardb/string.hpp index fda49c21..90ac6889 100644 --- a/quasardb/string.hpp +++ b/quasardb/string.hpp @@ -143,18 +143,21 @@ static inline void register_string(Module & m) { namespace py = pybind11; - py::class_(m, "String") // - .def(py::init()) // - .def("get", &qdb::string_entry::get) // - .def("put", &qdb::string_entry::put, py::arg("data")) // + py::class_(m, "String") + .def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{"conn.string(...)"}; + return nullptr; + })) + .def("get", &qdb::string_entry::get) + .def("put", &qdb::string_entry::put, py::arg("data")) .def("update", &qdb::string_entry::update, py::arg("data"), - py::arg("expiry") = std::chrono::system_clock::time_point{}) // - .def("remove_if", &qdb::string_entry::remove_if, py::arg("comparand")) // - .def("get_and_remove", &qdb::string_entry::get_and_remove) // - .def("get_and_update", &qdb::string_entry::get_and_update, // - py::arg("data")) // - .def("compare_and_swap", &qdb::string_entry::compare_and_swap, // - py::arg("new_content"), py::arg("comparand")); // + py::arg("expiry") = std::chrono::system_clock::time_point{}) + .def("remove_if", &qdb::string_entry::remove_if, py::arg("comparand")) + .def("get_and_remove", &qdb::string_entry::get_and_remove) + .def("get_and_update", &qdb::string_entry::get_and_update, + py::arg("data")) + .def("compare_and_swap", &qdb::string_entry::compare_and_swap, + py::arg("new_content"), py::arg("comparand")); } } // namespace qdb diff --git a/quasardb/table.hpp b/quasardb/table.hpp index bf4d0581..b8ff0b71 100644 --- a/quasardb/table.hpp +++ b/quasardb/table.hpp @@ -314,73 +314,72 @@ static inline void register_table(Module & m) { namespace py = pybind11; - py::enum_{m, "ColumnType", py::arithmetic(), "Column type"} // - .value("Uninitialized", qdb_ts_column_uninitialized) // - .value("Double", qdb_ts_column_double) // - .value("Blob", qdb_ts_column_blob) // - .value("String", qdb_ts_column_string) // - .value("Symbol", qdb_ts_column_symbol) // - .value("Int64", qdb_ts_column_int64) // - .value("Timestamp", qdb_ts_column_timestamp); // - - py::class_{m, "Table", "Table representation"} // - .def(py::init([](py::args, py::kwargs) { // - throw qdb::direct_instantiation_exception{"conn.table(...)"}; // - return nullptr; // - })) // - .def("__repr__", &qdb::table::repr) // - .def("create", &qdb::table::create, py::arg("columns"), // - py::arg("shard_size") = std::chrono::hours{24}, // - py::arg("ttl") = std::chrono::milliseconds::zero() // - ) // - .def("get_name", &qdb::table::get_name) // - .def("retrieve_metadata", &qdb::table::retrieve_metadata) // - .def("column_index_by_id", &qdb::table::column_index_by_id) // - .def("column_type_by_id", &qdb::table::column_type_by_id) // - .def("column_info_by_index", &qdb::table::column_info_by_index) // - .def("column_type_by_index", &qdb::table::column_type_by_index) // - .def("column_id_by_index", &qdb::table::column_id_by_index) // - .def("insert_columns", &qdb::table::insert_columns) // - .def("list_columns", &qdb::table::list_columns) // - .def("has_ttl", &qdb::table::has_ttl) // - .def("get_ttl", &qdb::table::get_ttl) // - .def("get_shard_size", &qdb::table::get_shard_size) // - // - .def("reader", &qdb::table::reader, // - py::kw_only(), // - py::arg("column_names") = std::vector{}, // - py::arg("batch_size") = std::size_t{0}, // - py::arg("ranges") = std::vector{} // - ) // - // - .def("subscribe", &qdb::table::subscribe) // - .def("erase_ranges", &qdb::table::erase_ranges) // - .def("blob_insert", &qdb::table::blob_insert) // - .def("string_insert", &qdb::table::string_insert) // - .def("double_insert", &qdb::table::double_insert) // - .def("int64_insert", &qdb::table::int64_insert) // - .def("timestamp_insert", &qdb::table::timestamp_insert) // - // - .def("blob_get_ranges", &qdb::table::blob_get_ranges, // - py::arg("column"), // - py::arg("ranges") = py::none{} // - ) // - .def("string_get_ranges", &qdb::table::string_get_ranges, // - py::arg("column"), // - py::arg("ranges") = py::none{} // - ) // - .def("double_get_ranges", &qdb::table::double_get_ranges, // - py::arg("column"), // - py::arg("ranges") = py::none{} // - ) // - .def("int64_get_ranges", &qdb::table::int64_get_ranges, // - py::arg("column"), // - py::arg("ranges") = py::none{} // - ) // - .def("timestamp_get_ranges", &qdb::table::timestamp_get_ranges, // - py::arg("column"), // - py::arg("ranges") = py::none{} // - ); // + py::enum_{m, "ColumnType", py::arithmetic(), "Column type"} + .value("Uninitialized", qdb_ts_column_uninitialized) + .value("Double", qdb_ts_column_double) + .value("Blob", qdb_ts_column_blob) + .value("String", qdb_ts_column_string) + .value("Symbol", qdb_ts_column_symbol) + .value("Int64", qdb_ts_column_int64) + .value("Timestamp", qdb_ts_column_timestamp); + + py::class_{m, "Table", "Table representation"} + .def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{"conn.table(...)"}; + return nullptr; + })) + .def("__repr__", &qdb::table::repr) + .def("create", &qdb::table::create, py::arg("columns"), + py::arg("shard_size") = std::chrono::hours{24}, + py::arg("ttl") = std::chrono::milliseconds::zero() + ) + .def("retrieve_metadata", &qdb::table::retrieve_metadata) + .def("column_index_by_id", &qdb::table::column_index_by_id) + .def("column_type_by_id", &qdb::table::column_type_by_id) + .def("column_info_by_index", &qdb::table::column_info_by_index) + .def("column_type_by_index", &qdb::table::column_type_by_index) + .def("column_id_by_index", &qdb::table::column_id_by_index) + .def("insert_columns", &qdb::table::insert_columns) + .def("list_columns", &qdb::table::list_columns) + .def("has_ttl", &qdb::table::has_ttl) + .def("get_ttl", &qdb::table::get_ttl) + .def("get_shard_size", &qdb::table::get_shard_size) + + .def("reader", &qdb::table::reader, + py::kw_only(), + py::arg("column_names") = std::vector{}, + py::arg("batch_size") = std::size_t{0}, + py::arg("ranges") = std::vector{} + ) + + .def("subscribe", &qdb::table::subscribe) + .def("erase_ranges", &qdb::table::erase_ranges) + .def("blob_insert", &qdb::table::blob_insert) + .def("string_insert", &qdb::table::string_insert) + .def("double_insert", &qdb::table::double_insert) + .def("int64_insert", &qdb::table::int64_insert) + .def("timestamp_insert", &qdb::table::timestamp_insert) + + .def("blob_get_ranges", &qdb::table::blob_get_ranges, + py::arg("column"), + py::arg("ranges") = py::none{} + ) + .def("string_get_ranges", &qdb::table::string_get_ranges, + py::arg("column"), + py::arg("ranges") = py::none{} + ) + .def("double_get_ranges", &qdb::table::double_get_ranges, + py::arg("column"), + py::arg("ranges") = py::none{} + ) + .def("int64_get_ranges", &qdb::table::int64_get_ranges, + py::arg("column"), + py::arg("ranges") = py::none{} + ) + .def("timestamp_get_ranges", &qdb::table::timestamp_get_ranges, + py::arg("column"), + py::arg("ranges") = py::none{} + ); } } // namespace qdb diff --git a/quasardb/tag.hpp b/quasardb/tag.hpp index a4572c84..eedd1767 100644 --- a/quasardb/tag.hpp +++ b/quasardb/tag.hpp @@ -68,10 +68,13 @@ static inline void register_tag(Module & m) { namespace py = pybind11; - py::class_(m, "Tag") // - .def(py::init()) // - .def("get_entries", &qdb::tag::get_entries) // - .def("count", &qdb::tag::count); // + py::class_(m, "Tag") + .def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{"conn.tag(...)"}; + return nullptr; + })) + .def("get_entries", &qdb::tag::get_entries) + .def("count", &qdb::tag::count); } } // namespace qdb diff --git a/quasardb/timestamp.hpp b/quasardb/timestamp.hpp index 06364245..41e22288 100644 --- a/quasardb/timestamp.hpp +++ b/quasardb/timestamp.hpp @@ -85,13 +85,16 @@ static inline void register_timestamp(Module & m) { namespace py = pybind11; - py::class_(m, "Timestamp") // - .def(py::init()) // - .def("get", &qdb::timestamp_entry::get) // - .def("put", &qdb::timestamp_entry::put, py::arg("timestamp")) // - .def("update", &qdb::timestamp_entry::update, py::arg("timestamp")) // - .def("add", &qdb::timestamp_entry::add, py::arg("addend")) // - ; // + py::class_(m, "Timestamp") + .def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{"conn.timestamp(...)"}; + return nullptr; + })) + .def("get", &qdb::timestamp_entry::get) + .def("put", &qdb::timestamp_entry::put, py::arg("timestamp")) + .def("update", &qdb::timestamp_entry::update, py::arg("timestamp")) + .def("add", &qdb::timestamp_entry::add, py::arg("addend")) + ; } } // namespace qdb diff --git a/quasardb/writer.hpp b/quasardb/writer.hpp index 9f88a04b..82c0316a 100644 --- a/quasardb/writer.hpp +++ b/quasardb/writer.hpp @@ -305,9 +305,9 @@ class writer // behavior using writer_ptr = std::unique_ptr; -template < // - qdb::concepts::writer_push_strategy PushStrategy, // - qdb::concepts::sleep_strategy SleepStrategy> // +template < + qdb::concepts::writer_push_strategy PushStrategy, + qdb::concepts::sleep_strategy SleepStrategy> static void register_writer(py::module_ & m) { using PS = PushStrategy; @@ -324,28 +324,31 @@ static void register_writer(py::module_ & m) // Different push modes, makes it easy to convert to<>from native types, as we accept the push // mode as a kwarg. - py::enum_{m, "WriterPushMode", py::arithmetic(), "Push Mode"} // - .value("Transactional", qdb_exp_batch_push_transactional) // - .value("Fast", qdb_exp_batch_push_fast) // - .value("Truncate", qdb_exp_batch_push_truncate) // - .value("Async", qdb_exp_batch_push_async); // + py::enum_{m, "WriterPushMode", py::arithmetic(), "Push Mode"} + .value("Transactional", qdb_exp_batch_push_transactional) + .value("Fast", qdb_exp_batch_push_fast) + .value("Truncate", qdb_exp_batch_push_truncate) + .value("Async", qdb_exp_batch_push_async); // And the actual pinned writer auto writer_c = py::class_{m, "Writer"}; // basic interface - writer_c.def(py::init()); // + writer_c.def(py::init([](py::args, py::kwargs) { + throw qdb::direct_instantiation_exception{"conn.writer(...)"}; + return nullptr; + })); writer_c.def_readwrite("_legacy_state", &qdb::writer::legacy_state_); // push functions writer_c - .def("push", &qdb::writer::push, "Regular batch push") // + .def("push", &qdb::writer::push, "Regular batch push") .def("push_async", &qdb::writer::push_async, - "Asynchronous batch push that buffers data inside the QuasarDB daemon") // + "Asynchronous batch push that buffers data inside the QuasarDB daemon") .def("push_fast", &qdb::writer::push_fast, "Fast, in-place batch push that is efficient when doing lots of small, incremental " - "pushes.") // + "pushes.") .def("push_truncate", &qdb::writer::push_truncate, "Before inserting data, truncates any existing data. This is useful when you want your " "insertions to be idempotent, e.g. in " diff --git a/scripts/teamcity/10.build.sh b/scripts/teamcity/10.build.sh index b1b16957..100033ad 100755 --- a/scripts/teamcity/10.build.sh +++ b/scripts/teamcity/10.build.sh @@ -21,6 +21,8 @@ else VENV_PYTHON="${SCRIPT_DIR}/../../.env/bin/python" fi +${VENV_PYTHON} --version + function relabel_wheel { wheel="$1" diff --git a/setup.py b/setup.py index 9e751577..1747a122 100644 --- a/setup.py +++ b/setup.py @@ -27,10 +27,11 @@ package_name = "quasardb" packages = [ package_name, - "quasardb.quasardb", # stubs "quasardb.pandas", "quasardb.numpy", "quasardb.extensions", + "quasardb.quasardb", # stubs + "quasardb.quasardb.metrics", # stubs ] From b36fb772d07fcaedf78836bd64e9183aae6e2dc8 Mon Sep 17 00:00:00 2001 From: rodp63 Date: Wed, 28 May 2025 15:47:24 -0500 Subject: [PATCH 4/7] QDB-16709 - Fix mypy errors --- quasardb/quasardb/__init__.pyi | 2 -- quasardb/quasardb/_entry.pyi | 8 ++------ quasardb/quasardb/_node.pyi | 4 ++-- 3 files changed, 4 insertions(+), 10 deletions(-) diff --git a/quasardb/quasardb/__init__.pyi b/quasardb/quasardb/__init__.pyi index acf6fdb0..b52327ce 100644 --- a/quasardb/quasardb/__init__.pyi +++ b/quasardb/quasardb/__init__.pyi @@ -1,7 +1,5 @@ import datetime -import metrics - from ._batch_column import BatchColumnInfo from ._batch_inserter import TimeSeriesBatch from ._blob import Blob diff --git a/quasardb/quasardb/_entry.pyi b/quasardb/quasardb/_entry.pyi index 86e49bb6..c5c80710 100644 --- a/quasardb/quasardb/_entry.pyi +++ b/quasardb/quasardb/_entry.pyi @@ -4,10 +4,6 @@ import datetime class Entry: class Metadata: ... - # expiry_time: qdb_timespec_t - # modification_time: qdb_timespec_t - # size: int - # type: Entry.Type class Type: Uninitialized: Entry.Type # value = @@ -52,7 +48,7 @@ class Entry: def exists(self) -> bool: ... def get_entry_type(self) -> Entry.Type: ... def get_location(self) -> tuple[str, int]: ... - def get_metadata(self) -> ...: ... + def get_metadata(self) -> Metadata: ... def get_name(self) -> str: ... def get_tags(self) -> list[str]: ... def has_tag(self, tag: str) -> bool: ... @@ -61,4 +57,4 @@ class Entry: class ExpirableEntry(Entry): def expires_at(self, expiry_time: datetime.datetime) -> None: ... def expires_from_now(self, expiry_delta: datetime.timedelta) -> None: ... - def get_expiry_time(self) -> ...: ... + def get_expiry_time(self) -> datetime.datetime: ... diff --git a/quasardb/quasardb/_node.pyi b/quasardb/quasardb/_node.pyi index 5cfa0ca8..ccc2f282 100644 --- a/quasardb/quasardb/_node.pyi +++ b/quasardb/quasardb/_node.pyi @@ -21,6 +21,6 @@ class Node: cluster_public_key_file: str = "", enable_encryption: bool = False, ) -> None: ... - def blob(self, alias: str) -> ...: ... - def integer(self, alias: str) -> ...: ... + def blob(self, alias: str) -> DirectBlob: ... + def integer(self, alias: str) -> DirectInteger: ... def prefix_get(self, prefix: str, max_count: int) -> list[str]: ... From afec01e685bab11e2ed39a8cd34f4d61ebf917c0 Mon Sep 17 00:00:00 2001 From: rodp63 Date: Thu, 29 May 2025 15:37:06 -0500 Subject: [PATCH 5/7] QDB-16709 - Resolve comments --- quasardb/module.cpp | 1 - quasardb/quasardb/__init__.pyi | 1 + quasardb/quasardb/_table.pyi | 2 +- 3 files changed, 2 insertions(+), 2 deletions(-) diff --git a/quasardb/module.cpp b/quasardb/module.cpp index 0316f05f..7e7faea6 100644 --- a/quasardb/module.cpp +++ b/quasardb/module.cpp @@ -58,7 +58,6 @@ PYBIND11_MODULE(quasardb, m) auto py_datetime = py::module_::import("datetime"); auto py_utc = py_datetime.attr("timezone").attr("utc"); m.attr("never_expires") = py_datetime.attr("datetime").attr("fromtimestamp")(0, py_utc); - // m.attr("never_expires") = std::chrono::system_clock::time_point{}; qdb::register_errors(m); qdb::register_options(m); diff --git a/quasardb/quasardb/__init__.pyi b/quasardb/quasardb/__init__.pyi index b52327ce..1384a0fe 100644 --- a/quasardb/quasardb/__init__.pyi +++ b/quasardb/quasardb/__init__.pyi @@ -1,5 +1,6 @@ import datetime +from . import metrics from ._batch_column import BatchColumnInfo from ._batch_inserter import TimeSeriesBatch from ._blob import Blob diff --git a/quasardb/quasardb/_table.pyi b/quasardb/quasardb/_table.pyi index dd422826..ddd5831f 100644 --- a/quasardb/quasardb/_table.pyi +++ b/quasardb/quasardb/_table.pyi @@ -76,7 +76,7 @@ class Table(Entry): ) -> None: ... def column_id_by_index(self, index: int) -> str: ... def column_index_by_id(self, alias: str) -> int: ... - def column_info_by_index(self, index: int) -> ...: ... + def column_info_by_index(self, index: int) -> ColumnInfo: ... def column_type_by_id(self, alias: str) -> ColumnType: ... def column_type_by_index(self, index: int) -> ColumnType: ... def create( From 7de40bb45644c354571485b75c5161a9f4fa1b21 Mon Sep 17 00:00:00 2001 From: rodp63 Date: Tue, 3 Jun 2025 11:20:58 -0500 Subject: [PATCH 6/7] QDB-16709 - gh workflow / mypy checks --- .github/workflows/lint.yml | 37 ++++++++--- quasardb/__init__.py | 13 ++-- quasardb/numpy/__init__.py | 30 +++++---- quasardb/pool.py | 6 ++ scripts/github_actions/post_lint.py | 95 ----------------------------- 5 files changed, 59 insertions(+), 122 deletions(-) delete mode 100644 scripts/github_actions/post_lint.py diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index aa1ec891..721ee7bc 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,4 +1,4 @@ -name: Lint +name: Format check on: push: @@ -6,11 +6,13 @@ on: - master paths: - '**.py' + - '**.pyi' pull_request: branches: - master paths: - '**.py' + - '**.pyi' jobs: lint: @@ -28,15 +30,30 @@ jobs: run: | pip install -r scripts/github_actions/requirements.txt - - name: Run black - id: run_black + - name: Run black on Python files run: | black --check --verbose . - # - name: Run post-lint actions - # if: ${{ !cancelled() }} - # env: - # SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - # SLACK_BOT_CHANNEL: ${{ vars.SLACK_BOT_CHANNEL }} - # run: | - # python scripts/github_actions/post_lint.py ${{ steps.run_black.outcome }} + - name: Run black on stub files + run: | + black --include '\.pyi$' --check --verbose . + + typing: + runs-on: ubuntu-latest + steps: + - name: Check out code + uses: actions/checkout@v4 + + - name: Setup Python version + uses: actions/setup-python@v5 + with: + python-version: '3.13' + + - name: Install dependencies + run: | + pip install -r scripts/github_actions/requirements.txt + pip install -r dev-requirements.txt + + - name: Run mypy + run: | + mypy --check quasardb diff --git a/quasardb/__init__.py b/quasardb/__init__.py index 6704b654..5a8a52ee 100644 --- a/quasardb/__init__.py +++ b/quasardb/__init__.py @@ -36,7 +36,10 @@ """ -def generic_error_msg(msg, e=None): +from typing import List, Optional + + +def generic_error_msg(msg: List[str], e: Optional[BaseException] = None) -> str: msg_str = "\n".join(msg) if e is None: @@ -47,7 +50,7 @@ def generic_error_msg(msg, e=None): ************************************************************************** """.format( - msg_str, type(e), str(e) + msg_str ) else: return """ @@ -68,7 +71,7 @@ def generic_error_msg(msg, e=None): ) -def link_error_msg(e): +def link_error_msg(e: BaseException) -> str: msg = [ "QuasarDB was unable to find all expected symbols in the compiled library.", "This is usually caused by running an incorrect version of the QuasarDB C", @@ -82,7 +85,7 @@ def link_error_msg(e): return generic_error_msg(msg, e) -def glibc_error_msg(e): +def glibc_error_msg(e: BaseException) -> str: msg = [ "QuasarDB was unable to find the expected GLIBC version on this machine.", "This is usually caused by compiling the Python API on a different machine " @@ -99,7 +102,7 @@ def glibc_error_msg(e): return generic_error_msg(msg, e) -def unknown_error_msg(): +def unknown_error_msg() -> str: msg = [ "Unable to import quasardb module: unknown error. ", "", diff --git a/quasardb/numpy/__init__.py b/quasardb/numpy/__init__.py index 32479995..d3c390de 100644 --- a/quasardb/numpy/__init__.py +++ b/quasardb/numpy/__init__.py @@ -30,6 +30,8 @@ import logging import time import warnings +from typing import Dict, List, Optional, Tuple, Union +from numpy.typing import DTypeLike import quasardb import quasardb.table_cache as table_cache @@ -62,14 +64,14 @@ class IncompatibleDtypeError(TypeError): Exception raised when a provided dtype is not the expected dtype. """ - def __init__(self, cname=None, ctype=None, expected=None, provided=None): + def __init__(self, cname=None, ctype=None, expected=None, provided=None) -> None: self.cname = cname self.ctype = ctype self.expected = expected self.provided = provided super().__init__(self.msg()) - def msg(self): + def msg(self) -> str: return "Data for column '{}' with type '{}' was provided in dtype '{}' but need '{}'.".format( self.cname, self.ctype, self.provided, self.expected ) @@ -80,11 +82,11 @@ class IncompatibleDtypeErrors(TypeError): Wraps multiple dtype errors """ - def __init__(self, xs): + def __init__(self, xs) -> None: self.xs = xs super().__init__(self.msg()) - def msg(self): + def msg(self) -> str: return "\n".join(x.msg() for x in self.xs) @@ -93,12 +95,12 @@ class InvalidDataCardinalityError(ValueError): Raised when the provided data arrays doesn't match the table's columns. """ - def __init__(self, data, cinfos): + def __init__(self, data, cinfos) -> None: self.data = data self.cinfos = cinfos super().__init__(self.msg()) - def msg(self): + def msg(self) -> str: return "Provided data array length '{}' exceeds amount of table columns '{}', unable to map data to columns".format( len(self.data), len(self.cinfos) ) @@ -117,7 +119,7 @@ def msg(self): } -def _best_dtype_for_ctype(ctype: quasardb.quasardb.ColumnType): +def _best_dtype_for_ctype(ctype: quasardb.ColumnType): """ Returns the 'best' DType for a certain column type. For example, for blobs, even though we accept py::bytes, prefer bytestrings (as they are faster to read in c++). @@ -129,7 +131,10 @@ def _best_dtype_for_ctype(ctype: quasardb.quasardb.ColumnType): return possible_dtypes[0] -def _coerce_dtype(dtype, columns): +def _coerce_dtype( + dtype: Union[DTypeLike, Dict[str, DTypeLike], List[DTypeLike]], + columns: List[Tuple[str, quasardb.ColumnInfo]], +) -> List[DTypeLike]: if dtype is None: dtype = [None] * len(columns) @@ -137,11 +142,10 @@ def _coerce_dtype(dtype, columns): dtype = [dtype] if type(dtype) is dict: - # Conveniently look up column index by label offsets = {} for i in range(len(columns)): - (cname, ctype) = columns[i] + (cname, _) = columns[i] offsets[cname] = i # Now convert the provided dtype dict to a list that matches @@ -152,7 +156,7 @@ def _coerce_dtype(dtype, columns): for k, dt in dtype.items(): if not k in offsets: - logger.warn( + logger.warning( "Forced dtype provided for column '%s' = %s, but that column is not found in the table. Skipping...", k, ) @@ -179,7 +183,9 @@ def _coerce_dtype(dtype, columns): return dtype -def _add_desired_dtypes(dtype, columns): +def _add_desired_dtypes( + dtype: List[DTypeLike], columns: List[Tuple[str, quasardb.ColumnInfo]] +) -> List[DTypeLike]: """ When infer_types=True, this function sets the 'desired' dtype for each of the columns. `dtype` is expected to be the output of `_coerce_dtype`, that is, a list-like with an diff --git a/quasardb/pool.py b/quasardb/pool.py index 6932488f..d7994c17 100644 --- a/quasardb/pool.py +++ b/quasardb/pool.py @@ -118,6 +118,9 @@ def close(self): logger.debug("closing connection {}".format(conn)) conn.close() + def _do_connect(self): + raise NotImplementedError + def connect(self) -> quasardb.Cluster: """ Acquire a new connection from the pool. Returned connection must either @@ -131,6 +134,9 @@ def connect(self) -> quasardb.Cluster: logger.info("Acquiring connection from pool") return self._do_connect() + def _do_release(self): + raise NotImplementedError + def release(self, conn: quasardb.Cluster): """ Put a connection back into the pool diff --git a/scripts/github_actions/post_lint.py b/scripts/github_actions/post_lint.py deleted file mode 100644 index c4b31180..00000000 --- a/scripts/github_actions/post_lint.py +++ /dev/null @@ -1,95 +0,0 @@ -import logging -import os -import sys - -from slack_sdk import WebClient - -logger = logging.getLogger(__name__) -qdb_users = { - "solatis": "U717WTKJ8", # Leon - "terngkub": "UAKV3RR55", # Nattapol - "igorniebylski": "U061GU3RHAR", # Igor - "rodp63": "U085W1KAGCW", # Joaquin -} - - -def get_slack_data() -> dict[str, str]: - return { - "token": os.getenv("SLACK_BOT_TOKEN", ""), - "channel": os.getenv("SLACK_BOT_CHANNEL", ""), - } - - -def get_user(github_user: str) -> str: - return ( - f"<@{qdb_users[github_user]}> ({github_user})" - if qdb_users.get(github_user) - else github_user - ) - - -def get_job_data() -> dict[str, str]: - server = os.getenv("GITHUB_SERVER_URL") - repository = os.getenv("GITHUB_REPOSITORY") - run_id = os.getenv("GITHUB_RUN_ID") - actor = os.getenv("GITHUB_ACTOR") - event_name = os.getenv("GITHUB_EVENT_NAME") - ref = os.getenv("GITHUB_REF") - workflow = os.getenv("GITHUB_WORKFLOW") - sha = os.getenv("GITHUB_SHA") - - return { - "url": f"{server}/{repository}/actions/runs/{run_id}", - "repository": repository, - "run_id": run_id, - "actor": actor, - "event_name": event_name, - "ref": ref, - "workflow": workflow, - "sha": sha, - } - - -def send_slack_message(client: WebClient, channel: str): - job_data = get_job_data() - client.chat_postMessage( - channel=channel, - blocks=[ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "A workflow has failed :broken_heart:", - }, - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": ( - f"*Repository* - {job_data['repository']}\n" - f"*Workflow* - {job_data['workflow']}\n" - f"*Triggered by* - {get_user(job_data['actor'])}\n" - f"*Event type* - {job_data['event_name']}\n" - f"*Event ref* - {job_data['ref']}\n" - f"*Commmit SHA* - {job_data['sha']}\n" - f"*Job link* - <{job_data['url']}|/actions/runs/{job_data['run_id']}>\n" - ), - }, - }, - ], - ) - - -def main(): - job_exit_outcome = sys.argv[1] - if job_exit_outcome == "failure": - slack_data = get_slack_data() - client = WebClient(token=slack_data["token"]) - send_slack_message(client, slack_data["channel"]) - - return 0 - - -if __name__ == "__main__": - sys.exit(main()) From 2aecd48f2e76ec25bc4cef81afdd00851d0e17ee Mon Sep 17 00:00:00 2001 From: rodp63 Date: Wed, 11 Jun 2025 12:25:56 -0500 Subject: [PATCH 7/7] QDB-16709 - disable mypy workflow --- .github/workflows/lint.yml | 38 +++++++++++++++++++------------------- dev-requirements.txt | 2 +- pyproject.toml | 3 +++ 3 files changed, 23 insertions(+), 20 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 721ee7bc..13c46eef 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -38,22 +38,22 @@ jobs: run: | black --include '\.pyi$' --check --verbose . - typing: - runs-on: ubuntu-latest - steps: - - name: Check out code - uses: actions/checkout@v4 - - - name: Setup Python version - uses: actions/setup-python@v5 - with: - python-version: '3.13' - - - name: Install dependencies - run: | - pip install -r scripts/github_actions/requirements.txt - pip install -r dev-requirements.txt - - - name: Run mypy - run: | - mypy --check quasardb + # typing: + # runs-on: ubuntu-latest + # steps: + # - name: Check out code + # uses: actions/checkout@v4 + + # - name: Setup Python version + # uses: actions/setup-python@v5 + # with: + # python-version: '3.13' + + # - name: Install dependencies + # run: | + # pip install -r scripts/github_actions/requirements.txt + # pip install -r dev-requirements.txt + + # - name: Run mypy + # run: | + # mypy --check quasardb diff --git a/dev-requirements.txt b/dev-requirements.txt index df185393..9271f11a 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -36,7 +36,7 @@ teamcity-messages == 1.29 setuptools-git == 1.2 # Linting -black +black==24.10.0 # Stubs mypy diff --git a/pyproject.toml b/pyproject.toml index e04d6c10..db220b9c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,3 +24,6 @@ addopts = ["-s", "-x"] xfail_strict = true filterwarnings = [] testpaths = ["tests"] + +# [tool.mypy] +# python_version = "3.9" \ No newline at end of file