diff --git a/.cruft.json b/.cruft.json index 58ab012..0eeeb93 100644 --- a/.cruft.json +++ b/.cruft.json @@ -1,6 +1,6 @@ { "template": "https://github.com/NarrativeScience/cookiecutter-python-lib", - "commit": "f31f5912ab949296517c6d65fc666b11926a5cf8", + "commit": "06d791b4e3ac2362c595a9bcf0617f84e546ec3c", "context": { "cookiecutter": { "author_name": "", diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 50a63f6..f2eb669 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,6 +35,23 @@ repos: # Need to define stages explicitly since `default_stages` was not being respected stages: [commit] + - repo: https://github.com/lk16/detect-missing-init + rev: v0.1.4 + hooks: + - id: detect-missing-init + args: [--create, --track] + + - repo: https://github.com/bgimby-ns/pydocstyle + rev: 305f311b + hooks: + - id: pydocstyle + name: Lint Python docstrings (pydocstyle) + exclude: > + (?x)^( + .*__init__.py$| + .*setup.py$ + )$ + - repo: local hooks: - id: codespell diff --git a/README.md b/README.md index d0a9fc6..60577df 100644 --- a/README.md +++ b/README.md @@ -4,30 +4,39 @@ Pynocular is a lightweight ORM that lets you query your database using Pydantic models and asyncio. -With Pynocular you can decorate your existing Pydantic models to sync them with the corresponding table in your +With Pynocular, you can annotate your existing Pydantic models to sync them with the corresponding table in your database, allowing you to persist changes without ever having to think about the database. Transaction management is automatically handled for you so you can focus on the important parts of your code. This integrates seamlessly with frameworks that use Pydantic models such as FastAPI. Features: -- Fully supports asyncio to write to SQL databases +- Fully supports asyncio to write to SQL databases through the [databases](https://www.encode.io/databases/) library - Provides simple methods for basic SQLAlchemy support (create, delete, update, read) - Contains access to more advanced functionality such as custom SQLAlchemy selects - Contains helper functions for creating new database tables -- Advanced transaction management system allows you to conditionally put requests in transactions +- Supports automatic and nested transactions Table of Contents: - [Installation](#installation) -- [Guide](#guide) - - [Basic Usage](#basic-usage) - - [Advanced Usage](#advanced-usage) - - [Creating database tables](#creating-database-tables) +- [Basic Usage](#basic-usage) + - [Defining models](#defining-models) + - [Creating a database and setting the backend](#creating-a-database-and-setting-the-backend) + - [Creating, reading, updating, and deleting database objects](#creating-reading-updating-and-deleting-database-objects) + - [Serialization](#serialization) + - [Special type arguments](#special-type-arguments) +- [Advanced Usage](#advanced-usage) + - [Tables with compound keys](#tables-with-compound-keys) + - [Batch operations on tables](#batch-operations-on-tables) + - [Transactions and asyncio.gather](#transactions-and-asynciogather) + - [Complex queries](#complex-queries) + - [Creating database and tables](#creating-database-and-tables) + - [Unit testing with DatabaseModel](#unit-testing-with-databasemodel) - [Development](#development) ## Installation -Pynocular requires Python 3.6 or above. +Pynocular requires Python 3.9 or above. ```bash pip install pynocular @@ -35,37 +44,17 @@ pip install pynocular poetry add pynocular ``` -## Guide +## Basic Usage -### Basic Usage +### Defining models -Pynocular works by decorating your base Pydantic model with the function `database_model`. Once decorated -with the proper information, you can proceed to use that model to interface with your specified database table. - -The first step is to define a `DBInfo` object. This will contain the connection information to your database. +Pynocular works by augmenting Pydantic's `BaseModel` through the `DatabaseModel` class. Once you define a class that extends `DatabaseModel`, you can proceed to use that model to interface with your specified database table. ```python -from pynocular.engines import DatabaseType, DBInfo - +from pydantic import Field +from pynocular import DatabaseModel, UUID_STR -# Example below shows how to connect to a locally-running Postgres database -connection_string = f"postgresql://{db_user_name}:{db_user_password}@localhost:5432/{db_name}?sslmode=disable" -) -db_info = DBInfo(connection_string) -``` - -#### Object Management - -Once you define a `db_info` object, you are ready to decorate your Pydantic models and interact with your database! - -```python -from pydantic import BaseModel, Field -from pynocular.database_model import database_model, UUID_STR - -from my_package import db_info - -@database_model("organizations", db_info) -class Org(BaseModel): +class Org(DatabaseModel, table_name="organizations"): id: Optional[UUID_STR] = Field(primary_key=True, fetch_on_create=True) name: str = Field(max_length=45) @@ -74,9 +63,30 @@ class Org(BaseModel): created_at: Optional[datetime] = Field(fetch_on_create=True) updated_at: Optional[datetime] = Field(fetch_on_update=True) +``` + +### Creating a database and setting the backend + +The first step is to create a database pool and set the Pynocular backend. This will tell the models how to persist data. -#### Object management +Use the [databases](https://www.encode.io/databases/) library to create a database connection using the dialect of your choice and pass the database object to `SQLDatabaseModelBackend`. +```python +from pynocular import Database, set_backend, SQLDatabaseModelBackend + +async def main(): + # Example below shows how to connect to a locally-running Postgres database + connection_string = f"postgresql://{db_user_name}:{db_user_password}@localhost:5432/{db_name}?sslmode=disable" + async with Database(connection_string) as db: + with set_backend(SQLDatabaseModelBackend(db)): + print(await Org.get_list()) +``` + +### Creating, reading, updating, and deleting database objects + +Once you define a database model and set a backend, you are ready to interact with your database! + +```python # Create a new Org via `create` org = await Org.create(name="new org", slug="new-org") @@ -115,14 +125,13 @@ assert org3.name == "new org2" ``` -#### Serialization +### Serialization -DatabaseModels have their own serialization functions to convert to and from -dictionaries. +Database models have their own serialization functions to convert to and from dictionaries. ```python # Serializing org with `to_dict()` -org = Org.create(name="org serialize", slug="org-serialize") +org = await Org.create(name="org serialize", slug="org-serialize") org_dict = org.to_dict() expected_org_dict = { "id": "e64f6c7a-1bd1-4169-b482-189bd3598079", @@ -133,163 +142,37 @@ expected_org_dict = { } assert org_dict == expected_org_dict - # De-serializing org with `from_dict()` new_org = Org.from_dict(expected_org_dict) assert org == new_org ``` -#### Using Nested DatabaseModels - -Pynocular also supports basic object relationships. If your database tables have a -foreign key reference you can leverage that in your pydantic models to increase the -accessibility of those related objects. - -```python -from pydantic import BaseModel, Field -from pynocular.database_model import database_model, nested_model, UUID_STR - -from my_package import db_info - -@database_model("users", db_info) -class User(BaseModel): +### Special type arguments - id: Optional[UUID_STR] = Field(primary_key=True, fetch_on_create=True) - username: str = Field(max_length=100) - - created_at: Optional[datetime] = Field(fetch_on_create=True) - updated_at: Optional[datetime] = Field(fetch_on_update=True) - -@database_model("organizations", db_info) -class Org(BaseModel): - - id: Optional[UUID_STR] = Field(primary_key=True, fetch_on_create=True) - name: str = Field(max_length=45) - slug: str = Field(max_length=45) - # `organizations`.`tech_owner_id` is a foreign key to `users`.`id` - tech_owner: Optional[nested_model(User, reference_field="tech_owner_id")] - # `organizations`.`business_owner_id` is a foreign key to `users`.`id` - business_owner: nested_model(User, reference_field="business_owner_id") - tag: Optional[str] = Field(max_length=100) - - created_at: Optional[datetime] = Field(fetch_on_create=True) - updated_at: Optional[datetime] = Field(fetch_on_update=True) - - -tech_owner = await User.create("tech owner") -business_owner = await User.create("business owner") - - -# Creating org with only business owner set -org = await Org.create( - name="org name", - slug="org-slug", - business_owner=business_owner -) - -assert org.business_owner == business_owner - -# Add tech owner -org.tech_owner = tech_owner -await org.save() - -# Fetch from the db and check ids -org2 = Org.get(org.id) -assert org2.tech_owner.id == tech_owner.id -assert org2.business_owner.id == business_owner.id - -# Swap user roles -org2.tech_owner = business_owner -org2.business_owner = tech_owner -await org2.save() -org3 = await Org.get(org2.id) -assert org3.tech_owner.id == business_owner.id -assert org3.business_owner.id == tech_owner.id - - -# Serialize org -org_dict = org3.to_dict() -expected_org_dict = { - "id": org3.id, - "name": "org name", - "slug": "org-slug", - "business_owner_id": tech_owner.id, - "tech_owner_id": business_owner.id, - "tag": None, - "created_at": org3.created_at, - "updated_at": org3.updated_at -} - -assert org_dict == expected_org_dict - -``` - -When using `DatabaseModel.get(..)`, any foreign references will need to be resolved before any properties besides the primary ID can be accessed. If you try to access a property before calling `fetch()` on the nested model, a `NestedDatabaseModelNotResolved` error will be thrown. - -```python -org_get = await Org.get(org3.id) -org_get.tech_owner.id # Does not raise `NestedDatabaseModelNotResolved` -org_get.tech_owner.username # Raises `NestedDatabaseModelNotResolved` - -org_get = await Org.get(org3.id) -await org_get.tech_owner.fetch() -org_get.tech_owner.username # Does not raise `NestedDatabaseModelNotResolved` -``` - -Alternatively, calling `DatabaseModel.get_with_refs()` instead of `DatabaseModel.get()` will -automatically fetch the referenced records and fully resolve those objects for you. - -```python -org_get_with_refs = await Org.get_with_refs(org3.id) -org_get_with_refs.tech_owner.username # Does not raise `NestedDatabaseModelNotResolved` -``` - -There are some situations where none of the objects have been persisted to the -database yet. In this situation, you can call `Database.save(include_nested_models=True)` -on the object with the references and it will persist all of them in a transaction. - -```python -# We create the objects but dont persist them -tech_owner = User("tech owner") -business_owner = User("business owner") - -org = Org( - name="org name", - slug="org-slug", - business_owner=business_owner -) - -await org.save(include_nested_models=True) -``` - -#### Special Type arguments - -With Pynocular you can set fields to be optional and set by the database. This is useful +With Pynocular you can set fields to be optional and rely on the database server to set its value. This is useful if you want to let the database autogenerate your primary key or `created_at` and `updated_at` fields on your table. To do this you must: - Wrap the typehint in `Optional` - Provide keyword arguments of `fetch_on_create=True` or `fetch_on_update=True` to the `Field` class -### Advanced Usage +## Advanced Usage For most use cases, the basic usage defined above should suffice. However, there are certain situations where you don't necessarily want to fetch each object or you need to do more complex queries that are not exposed by the `DatabaseModel` interface. Below are some examples of how those situations can be addressed using Pynocular. -#### Tables with compound keys +### Tables with compound keys Pynocular supports tables that use multiple fields as its primary key such as join tables. ```python -from pydantic import BaseModel, Field -from pynocular.database_model import database_model, nested_model, UUID_STR +from pydantic import Field +from pynocular import DatabaseModel, UUID_STR -from my_package import db_info -@database_model("user_subscriptions", db_info) -class UserSubscriptions(BaseModel): +class UserSubscriptions(DatabaseModel, table_name="user_subscriptions"): user_id: UUID_STR = Field(primary_key=True, fetch_on_create=True) subscription_id: UUID_STR = Field(primary_key=True, fetch_on_create=True) @@ -314,9 +197,9 @@ user_sub_get.name = "change name" await user_sub_get.save() ``` -#### Batch operations on tables +### Batch operations on tables -Sometimes you want to insert a bunch of records into a database and you don't want to do an insert for each one. +Sometimes you want to perform a bulk insert of records into a database table. This can be handled by the `create_list` function. ```python @@ -352,116 +235,118 @@ org = await Org.get("05c0060c-ceb8-40f0-8faa-dfb91266a6cf") assert org.tag == "blue" ``` -#### Complex queries +### Transactions and asyncio.gather + +You should avoid using `asyncio.gather` within a database transaction. You can use Pynocular's `gather` function instead, which has the same interface but executes queries sequentially: + +```python +from pynocular import get_backend +from pynocular.util import gather + +async with get_backend().transaction(): + await gather( + Org.create(id="abc", name="foo"), + Org.create(id="def", name="bar"), + ) +``` + +The reason is that concurrent queries can interfere with each other and result in the error: + +```txt +asyncpg.exceptions._base.InterfaceError: cannot perform operation: another operation is in progress +``` + +See: https://github.com/encode/databases/issues/125#issuecomment-511720013 + +### Complex queries Sometimes your application will require performing complex queries, such as getting the count of each unique field value for all records in the table. Because Pynocular is backed by SQLAlchemy, we can access table columns directly to write pure SQLAlchemy queries as well! ```python from sqlalchemy import func, select -from pynocular.engines import DBEngine +from pynocular import get_backend + async def generate_org_stats(): query = ( select([func.count(Org.column.id), Org.column.tag]) .group_by(Org.column.tag) .order_by(func.count().desc()) ) - async with await DBEngine.transaction(Org._database_info, is_conditional=False) as conn: + # Get the active backend and open a database transaction + async with get_backend().transaction(): result = await conn.execute(query) - return [dict(row) async for row in result] + return [dict(row) for row in result] ``` -NOTE: `DBengine.transaction` is used to create a connection to the database using the credentials passed in. -If `is_conditional` is `False`, then it will add the query to any transaction that is opened in the call chain. This allows us to make database calls -in different functions but still have them all be under the same database transaction. If there is no transaction opened in the call chain it will open -a new one and any subsequent calls underneath that context manager will be added to the new transaction. - -If `is_conditional` is `True` and there is no transaction in the call chain, then the connection will not create a new transaction. Instead, the query will be performed without a transaction. - ### Creating database and tables -With Pynocular you can use simple python code to create new databases and database tables. All you need is a working connection string to the database host, a `DatabaseInfo` object that contains the information of the database you want to create, and a properly decorated pydantic model. When you decorate a pydantic model with Pynocular, it creates a SQLAlchemy table as a private variable. This can be accessed via the `_table` property -(although accessing private variables is not recommended). +With Pynocular you can use simple Python code to create new databases and database tables. All you need is a working connection string to the database host and a properly defined `DatabaseModel` class. When you define a class that extends `DatabaseModel`, Pynocular creates a SQLAlchemy table under the hood. This can be accessed via the `table` property. ```python -from pynocular.db_util import create_new_database, create_table +from pynocular import Database +from pynocular.util import create_new_database, create_table -from my_package import Org, db_info +from my_package import Org -connection_string = "postgresql://postgres:XXXX@localhost:5432/postgres?sslmode=disable" +async def main(): + connection_string = "postgresql://postgres:XXXX@localhost:5432/postgres" + await create_new_database(connection_string, "my_new_db") -# Creates a new database and "organizations" table in that database -await create_new_database(connection_string, db_info) -await create_table(db_info, Org._table) + connection_string = "postgresql://postgres:XXXX@localhost:5432/my_new_db" + async with Database(connection_string) as db: + # Creates a new database and "organizations" table in that database + await create_table(db, Org.table) ``` -### Unit Testing with DatabaseModels +### Unit testing with DatabaseModel -Pynocular comes with tooling to write unit tests against your DatabaseModels, giving you +Pynocular comes with tooling to write unit tests against your database models, giving you the ability to test your business logic without the extra work and latency involved in -managing a database. All you have to do is use the `patch_database_model` context -manager provided in Pynocular. +managing a database. All you have to do is set the backend using the `MemoryDatabaseModelBackend` instead of the SQL backend. You don't need to change any of your database model definitions. ```python -from pynocular.patch_models import patch_database_model +from pynocular import MemoryDatabaseModelBackend, set_backend from my_package import Org, User - -with patch_database_model(Org): +async def main(): orgs = [ Org(id=str(uuid4()), name="orgus borgus", slug="orgus_borgus"), Org(id=str(uuid4()), name="orgus borgus2", slug="orgus_borgus"), ] - await Org.create_list(orgs) - fetched_orgs = await Org.get_list(name=orgs[0].name) - assert orgs[0] == fetched_orgs[0] - -# patch_database_model also works with nested models -users = [ - User(id=str(uuid4()), username="Bob"), - User(id=str(uuid4()), username="Sally"), -] -orgs = [ - Org( - id=str(uuid4()), - name="orgus borgus", - slug="orgus_borgus", - tech_owner=users[0], - business_owner=users[1], - ), -] - -with patch_database_model(Org, models=orgs), patch_database_model( - User, models=users -): - org = await Org.get(orgs[0].id) - org.name = "new test name" - users[0].username = "bberkley" + with set_backend(MemoryDatabaseModelBackend()): + await Org.create_list(orgs) + fetched_orgs = await Org.get_list(name=orgs[0].name) + assert orgs[0] == fetched_orgs[0] - # Save the username update when saving the org model update - await org.save(include_nested_models=True) + users = [ + User(id=str(uuid4()), username="Bob"), + User(id=str(uuid4()), username="Sally"), + ] - # Get the org with the resolved nested model - org_get = await Org.get_with_refs(org.id) - assert org_get.name == "new test name" - assert org_get.tech_owner.username == "bberkley" + # You can also seed the backend with existing records + with MemoryDatabaseModelBackend( + records={ + "orgs": [o.to_dict() for o in orgs], + "users": [u.to_dict() for u in users], + } + ): + org = await Org.get(orgs[0].id) + org.name = "new test name" + await org.save() ``` ## Development -To develop Pynocular, install dependencies and enable the pre-commit hook. - -The example below is using Python 3.9 but you can replace this with any supported version of Python. - -Install Python 3.9 and activate it in your shell. +To develop Pynocular, install dependencies and enable the pre-commit hook. Make sure to install Python 3.9 and activate it in your shell. ```bash sudo yum install libffi-devel # Needed for ctypes to install poetry -pyenv install 3.9.7 -pyenv shell 3.9.7 +pyenv install 3.9.12 +pyenv shell 3.9.12 ``` Install dependencies and enable the pre-commit hook. diff --git a/poetry.lock b/poetry.lock index 49b7e94..81f2241 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,38 +1,3 @@ -[[package]] -name = "aenum" -version = "3.1.11" -description = "Advanced Enumerations (compatible with Python's stdlib Enum), NamedTuples, and NamedConstants" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "aiocontextvars" -version = "0.2.2" -description = "Asyncio support for PEP-567 contextvars backport." -category = "main" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -contextvars = {version = "2.4", markers = "python_version < \"3.7\""} - -[[package]] -name = "aiopg" -version = "1.3.3" -description = "Postgres integration with asyncio." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -async-timeout = ">=3.0,<5.0" -psycopg2-binary = ">=2.8.4" -sqlalchemy = {version = ">=1.3,<1.5", extras = ["postgresql_psycopg2binary"], optional = true, markers = "extra == \"sa\""} - -[package.extras] -sa = ["sqlalchemy[postgresql_psycopg2binary] (>=1.3,<1.5)"] - [[package]] name = "arrow" version = "1.2.2" @@ -43,18 +8,19 @@ python-versions = ">=3.6" [package.dependencies] python-dateutil = ">=2.7.0" -typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [[package]] -name = "async-timeout" -version = "4.0.2" -description = "Timeout context manager for asyncio programs" +name = "asyncpg" +version = "0.25.0" +description = "An asyncio PostgreSQL driver" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.0" -[package.dependencies] -typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} +[package.extras] +dev = ["Cython (>=0.29.24,<0.30.0)", "pytest (>=6.0)", "Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "pycodestyle (>=2.7.0,<2.8.0)", "flake8 (>=3.9.2,<3.10.0)", "uvloop (>=0.15.3)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)"] +test = ["pycodestyle (>=2.7.0,<2.8.0)", "flake8 (>=3.9.2,<3.10.0)", "uvloop (>=0.15.3)"] [[package]] name = "atomicwrites" @@ -99,12 +65,10 @@ python-versions = ">=3.6.2" [package.dependencies] click = ">=8.0.0" -dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} mypy-extensions = ">=0.4.3" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] @@ -150,15 +114,14 @@ unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "8.0.4" +version = "8.1.2" description = "Composable command line interface toolkit" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "colorama" @@ -168,17 +131,6 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -[[package]] -name = "contextvars" -version = "2.4" -description = "PEP 567 Backport" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -immutables = ">=0.9" - [[package]] name = "cookiecutter" version = "1.7.3" @@ -209,7 +161,6 @@ python-versions = ">=3.6.2,<4.0.0" click = ">=7.1.2,<9.0.0" cookiecutter = ">=1.6,<2.0" gitpython = ">=3.0,<4.0" -importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} typer = ">=0.4.0,<0.5.0" [package.extras] @@ -217,12 +168,23 @@ pyproject = ["toml (>=0.10,<0.11)"] examples = ["examples (>=1.0.2,<2.0.0)"] [[package]] -name = "dataclasses" -version = "0.8" -description = "A backport of the dataclasses module for Python 3.6" +name = "databases" +version = "0.5.5" +description = "Async database support for Python." category = "main" optional = false -python-versions = ">=3.6, <3.7" +python-versions = ">=3.6" + +[package.dependencies] +asyncpg = {version = "*", optional = true, markers = "extra == \"postgresql\""} +sqlalchemy = ">=1.4,<1.5" + +[package.extras] +mysql = ["aiomysql"] +mysql_asyncmy = ["asyncmy"] +postgresql = ["asyncpg"] +postgresql_aiopg = ["aiopg"] +sqlite = ["aiosqlite"] [[package]] name = "distlib" @@ -234,16 +196,29 @@ python-versions = "*" [[package]] name = "filelock" -version = "3.4.1" +version = "3.6.0" description = "A platform independent file lock." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] +[[package]] +name = "flake8" +version = "4.0.1" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.8.0,<2.9.0" +pyflakes = ">=2.4.0,<2.5.0" + [[package]] name = "gitdb" version = "4.0.9" @@ -257,15 +232,14 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.20" -description = "Python Git Library" +version = "3.1.27" +description = "GitPython is a python library used to interact with Git repositories" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""} [[package]] name = "greenlet" @@ -280,11 +254,11 @@ docs = ["sphinx"] [[package]] name = "identify" -version = "2.4.4" +version = "2.4.12" description = "File identification library for Python" category = "dev" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [package.extras] license = ["ukkonen"] @@ -297,52 +271,6 @@ category = "dev" optional = false python-versions = ">=3.5" -[[package]] -name = "immutables" -version = "0.17" -description = "Immutable Collections" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} - -[package.extras] -test = ["flake8 (>=3.8.4,<3.9.0)", "pycodestyle (>=2.6.0,<2.7.0)", "mypy (>=0.910)", "pytest (>=6.2.4,<6.3.0)"] - -[[package]] -name = "importlib-metadata" -version = "4.8.3" -description = "Read metadata from Python packages" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] - -[[package]] -name = "importlib-resources" -version = "5.2.3" -description = "Read resources from Python packages" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] - [[package]] name = "iniconfig" version = "1.1.1" @@ -353,11 +281,11 @@ python-versions = "*" [[package]] name = "jinja2" -version = "3.0.3" +version = "3.1.1" description = "A very fast and expressive template engine." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] MarkupSafe = ">=2.0" @@ -379,11 +307,19 @@ jinja2 = "*" [[package]] name = "markupsafe" -version = "2.0.1" +version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" [[package]] name = "mypy-extensions" @@ -422,15 +358,15 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "platformdirs" -version = "2.4.0" +version = "2.5.2" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] -docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] [[package]] name = "pluggy" @@ -440,9 +376,6 @@ category = "dev" optional = false python-versions = ">=3.6" -[package.dependencies] -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} - [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] @@ -457,17 +390,15 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pre-commit" -version = "2.17.0" +version = "2.18.1" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [package.dependencies] cfgv = ">=2.0.0" identify = ">=1.0.0" -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} -importlib-resources = {version = "<5.3", markers = "python_version < \"3.7\""} nodeenv = ">=0.11.1" pyyaml = ">=5.1" toml = "*" @@ -489,6 +420,14 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "pycodestyle" +version = "2.8.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + [[package]] name = "pydantic" version = "1.9.0" @@ -498,45 +437,51 @@ optional = false python-versions = ">=3.6.1" [package.dependencies] -dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} typing-extensions = ">=3.7.4.3" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pyflakes" +version = "2.4.0" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + [[package]] name = "pyparsing" -version = "3.0.7" -description = "Python parsing module" +version = "3.0.8" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.8" [package.extras] -diagrams = ["jinja2", "railroad-diagrams"] +diagrams = ["railroad-diagrams", "jinja2"] [[package]] name = "pytest" -version = "6.2.5" +version = "7.1.1" description = "pytest: simple powerful testing with Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" py = ">=1.8.2" -toml = "*" +tomli = ">=1.0.0" [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] [[package]] name = "pytest-asyncio" @@ -552,6 +497,17 @@ pytest = ">=5.4.0" [package.extras] testing = ["coverage", "hypothesis (>=5.7.1)"] +[[package]] +name = "pytest-lazy-fixture" +version = "0.6.3" +description = "It helps to use fixtures in pytest.mark.parametrize" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pytest = ">=3.2.5" + [[package]] name = "python-dateutil" version = "2.8.2" @@ -629,8 +585,6 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" [package.dependencies] greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} -psycopg2-binary = {version = "*", optional = true, markers = "extra == \"postgresql_psycopg2binary\""} [package.extras] aiomysql = ["greenlet (!=0.4.17)", "aiomysql"] @@ -671,19 +625,11 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tomli" -version = "1.2.3" +version = "2.0.1" description = "A lil' TOML parser" category = "dev" optional = false -python-versions = ">=3.6" - -[[package]] -name = "typed-ast" -version = "1.5.3" -description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" -optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "typer" @@ -704,11 +650,11 @@ test = ["shellingham (>=1.3.0,<2.0.0)", "pytest (>=4.4.0,<5.4.0)", "pytest-cov ( [[package]] name = "typing-extensions" -version = "4.1.1" -description = "Backported and Experimental Type Hints for Python 3.6+" +version = "4.2.0" +description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "urllib3" @@ -734,8 +680,6 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] distlib = ">=0.3.1,<1" filelock = ">=3.2,<4" -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -importlib-resources = {version = ">=1.0", markers = "python_version < \"3.7\""} platformdirs = ">=2,<3" six = ">=1.9.0,<2" @@ -743,44 +687,43 @@ six = ">=1.9.0,<2" docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] -[[package]] -name = "zipp" -version = "3.6.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] - [metadata] lock-version = "1.1" -python-versions = "^3.6.5" -content-hash = "bb884979f923922a0a7465041174e6a0e1150d01bdefd1c28b3305d314d147f6" +python-versions = "^3.9" +content-hash = "f64fc5621cd0d1fe310988c7a7e2f1b793daca2d307953c2ac24d5991f4722cd" [metadata.files] -aenum = [ - {file = "aenum-3.1.11-py2-none-any.whl", hash = "sha256:525b4870a27d0b471c265bda692bc657f1e0dd7597ad4186d072c59f9db666f6"}, - {file = "aenum-3.1.11-py3-none-any.whl", hash = "sha256:12ae89967f2e25c0ce28c293955d643f891603488bc3d9946158ba2b35203638"}, - {file = "aenum-3.1.11.tar.gz", hash = "sha256:aed2c273547ae72a0d5ee869719c02a643da16bf507c80958faadc7e038e3f73"}, -] -aiocontextvars = [ - {file = "aiocontextvars-0.2.2-py2.py3-none-any.whl", hash = "sha256:885daf8261818767d8f7cbd79f9d4482d118f024b6586ef6e67980236a27bfa3"}, - {file = "aiocontextvars-0.2.2.tar.gz", hash = "sha256:f027372dc48641f683c559f247bd84962becaacdc9ba711d583c3871fb5652aa"}, -] -aiopg = [ - {file = "aiopg-1.3.3-py3-none-any.whl", hash = "sha256:2842dd8741460eeef940032dcb577bfba4d4115205dd82a73ce13b3271f5bf0a"}, - {file = "aiopg-1.3.3.tar.gz", hash = "sha256:547c6ba4ea0d73c2a11a2f44387d7133cc01d3c6f3b8ed976c0ac1eff4f595d7"}, -] arrow = [ {file = "arrow-1.2.2-py3-none-any.whl", hash = "sha256:d622c46ca681b5b3e3574fcb60a04e5cc81b9625112d5fb2b44220c36c892177"}, {file = "arrow-1.2.2.tar.gz", hash = "sha256:05caf1fd3d9a11a1135b2b6f09887421153b94558e5ef4d090b567b47173ac2b"}, ] -async-timeout = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +asyncpg = [ + {file = "asyncpg-0.25.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf5e3408a14a17d480f36ebaf0401a12ff6ae5457fdf45e4e2775c51cc9517d3"}, + {file = "asyncpg-0.25.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2bc197fc4aca2fd24f60241057998124012469d2e414aed3f992579db0c88e3a"}, + {file = "asyncpg-0.25.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a70783f6ffa34cc7dd2de20a873181414a34fd35a4a208a1f1a7f9f695e4ec4"}, + {file = "asyncpg-0.25.0-cp310-cp310-win32.whl", hash = "sha256:43cde84e996a3afe75f325a68300093425c2f47d340c0fc8912765cf24a1c095"}, + {file = "asyncpg-0.25.0-cp310-cp310-win_amd64.whl", hash = "sha256:56d88d7ef4341412cd9c68efba323a4519c916979ba91b95d4c08799d2ff0c09"}, + {file = "asyncpg-0.25.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a84d30e6f850bac0876990bcd207362778e2208df0bee8be8da9f1558255e634"}, + {file = "asyncpg-0.25.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:beaecc52ad39614f6ca2e48c3ca15d56e24a2c15cbfdcb764a4320cc45f02fd5"}, + {file = "asyncpg-0.25.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:6f8f5fc975246eda83da8031a14004b9197f510c41511018e7b1bedde6968e92"}, + {file = "asyncpg-0.25.0-cp36-cp36m-win32.whl", hash = "sha256:ddb4c3263a8d63dcde3d2c4ac1c25206bfeb31fa83bd70fd539e10f87739dee4"}, + {file = "asyncpg-0.25.0-cp36-cp36m-win_amd64.whl", hash = "sha256:bf6dc9b55b9113f39eaa2057337ce3f9ef7de99a053b8a16360395ce588925cd"}, + {file = "asyncpg-0.25.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:acb311722352152936e58a8ee3c5b8e791b24e84cd7d777c414ff05b3530ca68"}, + {file = "asyncpg-0.25.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a61fb196ce4dae2f2fa26eb20a778db21bbee484d2e798cb3cc988de13bdd1b"}, + {file = "asyncpg-0.25.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2633331cbc8429030b4f20f712f8d0fbba57fa8555ee9b2f45f981b81328b256"}, + {file = "asyncpg-0.25.0-cp37-cp37m-win32.whl", hash = "sha256:863d36eba4a7caa853fd7d83fad5fd5306f050cc2fe6e54fbe10cdb30420e5e9"}, + {file = "asyncpg-0.25.0-cp37-cp37m-win_amd64.whl", hash = "sha256:fe471ccd915b739ca65e2e4dbd92a11b44a5b37f2e38f70827a1c147dafe0fa8"}, + {file = "asyncpg-0.25.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:72a1e12ea0cf7c1e02794b697e3ca967b2360eaa2ce5d4bfdd8604ec2d6b774b"}, + {file = "asyncpg-0.25.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4327f691b1bdb222df27841938b3e04c14068166b3a97491bec2cb982f49f03e"}, + {file = "asyncpg-0.25.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:739bbd7f89a2b2f6bc44cb8bf967dab12c5bc714fcbe96e68d512be45ecdf962"}, + {file = "asyncpg-0.25.0-cp38-cp38-win32.whl", hash = "sha256:18d49e2d93a7139a2fdbd113e320cc47075049997268a61bfbe0dde680c55471"}, + {file = "asyncpg-0.25.0-cp38-cp38-win_amd64.whl", hash = "sha256:191fe6341385b7fdea7dbdcf47fd6db3fd198827dcc1f2b228476d13c05a03c6"}, + {file = "asyncpg-0.25.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:52fab7f1b2c29e187dd8781fce896249500cf055b63471ad66332e537e9b5f7e"}, + {file = "asyncpg-0.25.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a738f1b2876f30d710d3dc1e7858160a0afe1603ba16bf5f391f5316eb0ed855"}, + {file = "asyncpg-0.25.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4105f57ad1e8fbc8b1e535d8fcefa6ce6c71081228f08680c6dea24384ff0e"}, + {file = "asyncpg-0.25.0-cp39-cp39-win32.whl", hash = "sha256:f55918ded7b85723a5eaeb34e86e7b9280d4474be67df853ab5a7fa0cc7c6bf2"}, + {file = "asyncpg-0.25.0-cp39-cp39-win_amd64.whl", hash = "sha256:649e2966d98cc48d0646d9a4e29abecd8b59d38d55c256d5c857f6b27b7407ac"}, + {file = "asyncpg-0.25.0.tar.gz", hash = "sha256:63f8e6a69733b285497c2855464a34de657f2cccd25aeaeeb5071872e9382540"}, ] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, @@ -836,16 +779,13 @@ charset-normalizer = [ {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, ] click = [ - {file = "click-8.0.4-py3-none-any.whl", hash = "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1"}, - {file = "click-8.0.4.tar.gz", hash = "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb"}, + {file = "click-8.1.2-py3-none-any.whl", hash = "sha256:24e1a4a9ec5bf6299411369b208c1df2188d9eb8d916302fe6bf03faed227f1e"}, + {file = "click-8.1.2.tar.gz", hash = "sha256:479707fe14d9ec9a0757618b7a100a0ae4c4e236fac5b7f80ca68028141a1a72"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] -contextvars = [ - {file = "contextvars-2.4.tar.gz", hash = "sha256:f38c908aaa59c14335eeea12abea5f443646216c4e29380d7bf34d2018e2c39e"}, -] cookiecutter = [ {file = "cookiecutter-1.7.3-py2.py3-none-any.whl", hash = "sha256:f8671531fa96ab14339d0c59b4f662a4f12a2ecacd94a0f70a3500843da588e2"}, {file = "cookiecutter-1.7.3.tar.gz", hash = "sha256:6b9a4d72882e243be077a7397d0f1f76fe66cf3df91f3115dbb5330e214fa457"}, @@ -854,25 +794,29 @@ cruft = [ {file = "cruft-2.10.2-py3-none-any.whl", hash = "sha256:8ccf0b74ea07f4de3bc7c6a798c0fbfb922a02c7c44f472905edd624f383085c"}, {file = "cruft-2.10.2.tar.gz", hash = "sha256:fe7aaace048df17efc0e597c8035cb0deaa7a8734a86eb8c6cca5388971a2a42"}, ] -dataclasses = [ - {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, - {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, +databases = [ + {file = "databases-0.5.5-py3-none-any.whl", hash = "sha256:97d9b9647216d1ab53ca61c059412b5c7b6e1f0bf8ce985477982ebcc7f278f3"}, + {file = "databases-0.5.5.tar.gz", hash = "sha256:02c6b016c1c951c21cca281dc8e2e002c60dc44026c0084aabbd8c37514aeb37"}, ] distlib = [ {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, ] filelock = [ - {file = "filelock-3.4.1-py3-none-any.whl", hash = "sha256:a4bc51381e01502a30e9f06dd4fa19a1712eab852b6fb0f84fd7cce0793d8ca3"}, - {file = "filelock-3.4.1.tar.gz", hash = "sha256:0f12f552b42b5bf60dba233710bf71337d35494fc8bdd4fd6d9f6d082ad45e06"}, + {file = "filelock-3.6.0-py3-none-any.whl", hash = "sha256:f8314284bfffbdcfa0ff3d7992b023d4c628ced6feb957351d4c48d059f56bc0"}, + {file = "filelock-3.6.0.tar.gz", hash = "sha256:9cd540a9352e432c7246a48fe4e8712b10acb1df2ad1f30e8c070b82ae1fed85"}, +] +flake8 = [ + {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, + {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, ] gitdb = [ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] gitpython = [ - {file = "GitPython-3.1.20-py3-none-any.whl", hash = "sha256:b1e1c269deab1b08ce65403cf14e10d2ef1f6c89e33ea7c5e5bb0222ea593b8a"}, - {file = "GitPython-3.1.20.tar.gz", hash = "sha256:df0e072a200703a65387b0cfdf0466e3bab729c0458cf6b7349d0e9877636519"}, + {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, + {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, ] greenlet = [ {file = "greenlet-1.1.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6"}, @@ -932,154 +876,70 @@ greenlet = [ {file = "greenlet-1.1.2.tar.gz", hash = "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a"}, ] identify = [ - {file = "identify-2.4.4-py2.py3-none-any.whl", hash = "sha256:aa68609c7454dbcaae60a01ff6b8df1de9b39fe6e50b1f6107ec81dcda624aa6"}, - {file = "identify-2.4.4.tar.gz", hash = "sha256:6b4b5031f69c48bf93a646b90de9b381c6b5f560df4cbe0ed3cf7650ae741e4d"}, + {file = "identify-2.4.12-py2.py3-none-any.whl", hash = "sha256:5f06b14366bd1facb88b00540a1de05b69b310cbc2654db3c7e07fa3a4339323"}, + {file = "identify-2.4.12.tar.gz", hash = "sha256:3f3244a559290e7d3deb9e9adc7b33594c1bc85a9dd82e0f1be519bf12a1ec17"}, ] idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] -immutables = [ - {file = "immutables-0.17-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cab10d65a29b2019fffd7a3924f6965a8f785e7bd409641ce36ab2d3335f88c4"}, - {file = "immutables-0.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f73088c9b8595ddfd45a5658f8cce0cb3ae6e5890458381fccba3ed3035081d4"}, - {file = "immutables-0.17-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef632832fa1acae6861d83572b866126f9e35706ab6e581ce6b175b3e0b7a3c4"}, - {file = "immutables-0.17-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0efdcec7b63859b41f794ffa0cd0d6dc87e77d1be4ff0ec23471a3a1e719235f"}, - {file = "immutables-0.17-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3eca96f12bc1535657d24eae2c69816d0b22c4a4bc7f4753115e028a137e8dad"}, - {file = "immutables-0.17-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:01a25b1056754aa486afea5471ca348410d77f458477ccb6fa3baf2d3e3ff3d5"}, - {file = "immutables-0.17-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c41a6648f7355f1241da677c418edae56fdc45af19ad3540ca8a1e7a81606a7a"}, - {file = "immutables-0.17-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0b578bba11bd8ae55dee9536edf8d82be18463d15d4b4c9827e27eeeb73826bf"}, - {file = "immutables-0.17-cp310-cp310-win32.whl", hash = "sha256:a28682e115191e909673aedb9ccea3377da3a6a929f8bd86982a2a76bdfa89db"}, - {file = "immutables-0.17-cp310-cp310-win_amd64.whl", hash = "sha256:293ddb681502945f29b3065e688a962e191e752320040892316b9dd1e3b9c8c9"}, - {file = "immutables-0.17-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ec04fc7d9f76f26d82a5d9d1715df0409d0096309828fc46cd1a2067c7fbab95"}, - {file = "immutables-0.17-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f024f25e9fda42251a2b2167668ca70678c19fb3ab6ed509cef0b4b431d0ff73"}, - {file = "immutables-0.17-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b02083b2052cc201ac5cbd38f34a5da21fcd51016cb4ddd1fb43d7dc113eac17"}, - {file = "immutables-0.17-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea32db31afb82d8369e98f85c5b815ff81610a12fbc837830a34388f1b56f080"}, - {file = "immutables-0.17-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:898a9472d1dd3d17f291114395a1be65be035355fc65af0b2c88238f8fbeaa62"}, - {file = "immutables-0.17-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:736dd3d88d44da0ee48804792bd095c01a344c5d1b0f10beeb9ccb3a00b9c19d"}, - {file = "immutables-0.17-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:15ff4139720f79b902f435a25e3c00f9c8adcc41d79bed64b7e51ae36cfe9620"}, - {file = "immutables-0.17-cp36-cp36m-win32.whl", hash = "sha256:4f018a6c4c3689b82f763ad4f84dec6aa91c83981db7f6bafef963f036e5e815"}, - {file = "immutables-0.17-cp36-cp36m-win_amd64.whl", hash = "sha256:d7400a6753b292ac80102ed026efa8da2c3fedd50c443924cbe9b6448d3b19e4"}, - {file = "immutables-0.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f7a6e0380bddb99c46bb3f12ae5eee9a23d6a66d99bbf0fb10fa552f935c2e8d"}, - {file = "immutables-0.17-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7696c42d1f9a16ecda0ee46229848df8706973690b45e8a090d995d647a5ec57"}, - {file = "immutables-0.17-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:892b6a1619cd8c398fa70302c4cfa9768a694377639330e7a58cc7be111ab23e"}, - {file = "immutables-0.17-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89093d5a85357250b1d5ae218fdcfdbac4097cbb2d8b55004aa7a2ca2a00a09f"}, - {file = "immutables-0.17-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99a8bc6d0623300eb46beea74f7a5061968fb3efc4e072f23f6c0b21c588238d"}, - {file = "immutables-0.17-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:00380474f8e3b4a2eeb06ce694e0e3cb85a144919140a2b3116defb6c1587471"}, - {file = "immutables-0.17-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:078e3ed63be0ac36523b80bbabbfb1bb57e55009f4efb5650b0e3b3ed569c3f1"}, - {file = "immutables-0.17-cp37-cp37m-win32.whl", hash = "sha256:14905aecc62b318d86045dcf8d35ef2063803d9d331aeccd88958f03caadc7b0"}, - {file = "immutables-0.17-cp37-cp37m-win_amd64.whl", hash = "sha256:3774d403d1570105a1da2e00c38ce3f04065fd1deff04cf998f8d8e946d0ae13"}, - {file = "immutables-0.17-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e5a9caee1b99eccf1447056ae6bda77edd15c357421293e81fa1a4f28e83448a"}, - {file = "immutables-0.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fed1e1baf1de1bc94a0310da29814892064928d7d40ff5a3b86bcd11d5e7cfff"}, - {file = "immutables-0.17-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d7daa340d76747ba5a8f64816b48def74bd4be45a9508073b34fa954d099fba"}, - {file = "immutables-0.17-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4644c29fe07fb92ba84b26659708e1799fecaaf781214adf13edd8a4d7495a9"}, - {file = "immutables-0.17-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1e9ea0e2a31db44fb01617ff875d4c26f962696e1c5ff11ed7767c2d8dedac4"}, - {file = "immutables-0.17-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:64100dfdb29fae2bc84748fff5d66dd6b3997806c717eeb75f7099aeee9b1878"}, - {file = "immutables-0.17-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5f933e5bf6f2c1afb24bc2fc8bea8b132096a4a6ba54f36be59787981f3e50ff"}, - {file = "immutables-0.17-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9508a087a47f9f9506adf2fa8383ab14c46a222b57eea8612bc4c2aa9a9550fe"}, - {file = "immutables-0.17-cp38-cp38-win32.whl", hash = "sha256:dfd2c63f15d1e5ea1ed2a05b7c602b5f61a64337415d299df20e103a57ae4906"}, - {file = "immutables-0.17-cp38-cp38-win_amd64.whl", hash = "sha256:301c539660c988c5b24051ccad1e36c040a916f1e58fa3e245e3122fc50dd28d"}, - {file = "immutables-0.17-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:563bc2ddbe75c10faa3b4b0206870653b44a231b97ed23cff8ab8aff503d922d"}, - {file = "immutables-0.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f621ea6130393cd14d0fbd35b306d4dc70bcd0fda550a8cd313db8015e34ca60"}, - {file = "immutables-0.17-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57c2d1b16b716bca70345db334dd6a861bf45c46cb11bb1801277f8a9012e864"}, - {file = "immutables-0.17-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a08e1a80bd8c5df72c2bf0af24a37ceec17e8ffdb850ed5a62d0bba1d4d86018"}, - {file = "immutables-0.17-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b99155ad112149d43208c611c6c42f19e16716526dacc0fcc16736d2f5d2e20"}, - {file = "immutables-0.17-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ed71e736f8fb82545d00c8969dbc167547c15e85729058edbed3c03b94fca86c"}, - {file = "immutables-0.17-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:19e4b8e5810dd7cab63fa700373f787a369d992166eabc23f4b962e5704d33c5"}, - {file = "immutables-0.17-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:305062012497d4c4a70fe35e20cef2c6f65744e721b04671092a63354799988d"}, - {file = "immutables-0.17-cp39-cp39-win32.whl", hash = "sha256:f5c6bd012384a8d6af7bb25675719214d76640fe6c336e2b5fba9eef1407ae6a"}, - {file = "immutables-0.17-cp39-cp39-win_amd64.whl", hash = "sha256:615ab26873a794559ccaf4e0e9afdb5aefad0867c15262ba64a55a12a5a41573"}, - {file = "immutables-0.17.tar.gz", hash = "sha256:ad894446355b6f5289a9c84fb46f7c47c6ef2b1bfbdd2be6cb177dbb7f1587ad"}, -] -importlib-metadata = [ - {file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"}, - {file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"}, -] -importlib-resources = [ - {file = "importlib_resources-5.2.3-py3-none-any.whl", hash = "sha256:ae35ed1cfe8c0d6c1a53ecd168167f01fa93b893d51a62cdf23aea044c67211b"}, - {file = "importlib_resources-5.2.3.tar.gz", hash = "sha256:203d70dda34cfbfbb42324a8d4211196e7d3e858de21a5eb68c6d1cdd99e4e98"}, -] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] jinja2 = [ - {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, - {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, + {file = "Jinja2-3.1.1-py3-none-any.whl", hash = "sha256:539835f51a74a69f41b848a9645dbdc35b4f20a3b601e2d9a7e22947b15ff119"}, + {file = "Jinja2-3.1.1.tar.gz", hash = "sha256:640bed4bb501cbd17194b3cace1dc2126f5b619cf068a726b98192a0fde74ae9"}, ] jinja2-time = [ {file = "jinja2-time-0.2.0.tar.gz", hash = "sha256:d14eaa4d315e7688daa4969f616f226614350c48730bfa1692d2caebd8c90d40"}, {file = "jinja2_time-0.2.0-py2.py3-none-any.whl", hash = "sha256:d3eab6605e3ec8b7a0863df09cc1d23714908fa61aa6986a845c20ba488b4efa"}, ] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, - {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, @@ -1098,8 +958,8 @@ pathspec = [ {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, ] platformdirs = [ - {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, - {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, @@ -1110,8 +970,8 @@ poyo = [ {file = "poyo-0.5.0.tar.gz", hash = "sha256:e26956aa780c45f011ca9886f044590e2d8fd8b61db7b1c1cf4e0869f48ed4dd"}, ] pre-commit = [ - {file = "pre_commit-2.17.0-py2.py3-none-any.whl", hash = "sha256:725fa7459782d7bec5ead072810e47351de01709be838c2ce1726b9591dad616"}, - {file = "pre_commit-2.17.0.tar.gz", hash = "sha256:c1a8040ff15ad3d648c70cc3e55b93e4d2d5b687320955505587fd79bbaed06a"}, + {file = "pre_commit-2.18.1-py2.py3-none-any.whl", hash = "sha256:02226e69564ebca1a070bd1f046af866aa1c318dbc430027c50ab832ed2b73f2"}, + {file = "pre_commit-2.18.1.tar.gz", hash = "sha256:5d445ee1fa8738d506881c5d84f83c62bb5be6b2838e32207433647e8e5ebe10"}, ] psycopg2-binary = [ {file = "psycopg2-binary-2.9.3.tar.gz", hash = "sha256:761df5313dc15da1502b21453642d7599d26be88bff659382f8f9747c7ebea4e"}, @@ -1175,6 +1035,10 @@ py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] +pycodestyle = [ + {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, + {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, +] pydantic = [ {file = "pydantic-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cb23bcc093697cdea2708baae4f9ba0e972960a835af22560f6ae4e7e47d33f5"}, {file = "pydantic-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1d5278bd9f0eee04a44c712982343103bba63507480bfd2fc2790fa70cd64cf4"}, @@ -1212,18 +1076,26 @@ pydantic = [ {file = "pydantic-1.9.0-py3-none-any.whl", hash = "sha256:085ca1de245782e9b46cefcf99deecc67d418737a1fd3f6a4f511344b613a5b3"}, {file = "pydantic-1.9.0.tar.gz", hash = "sha256:742645059757a56ecd886faf4ed2441b9c0cd406079c2b4bee51bcc3fbcd510a"}, ] +pyflakes = [ + {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, + {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, +] pyparsing = [ - {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, - {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, + {file = "pyparsing-3.0.8-py3-none-any.whl", hash = "sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06"}, + {file = "pyparsing-3.0.8.tar.gz", hash = "sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954"}, ] pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, + {file = "pytest-7.1.1-py3-none-any.whl", hash = "sha256:92f723789a8fdd7180b6b06483874feca4c48a5c76968e03bb3e7f806a1869ea"}, + {file = "pytest-7.1.1.tar.gz", hash = "sha256:841132caef6b1ad17a9afde46dc4f6cfa59a05f9555aae5151f73bdf2820ca63"}, ] pytest-asyncio = [ {file = "pytest-asyncio-0.15.1.tar.gz", hash = "sha256:2564ceb9612bbd560d19ca4b41347b54e7835c2f792c504f698e05395ed63f6f"}, {file = "pytest_asyncio-0.15.1-py3-none-any.whl", hash = "sha256:3042bcdf1c5d978f6b74d96a151c4cfb9dcece65006198389ccd7e6c60eb1eea"}, ] +pytest-lazy-fixture = [ + {file = "pytest-lazy-fixture-0.6.3.tar.gz", hash = "sha256:0e7d0c7f74ba33e6e80905e9bfd81f9d15ef9a790de97993e34213deb5ad10ac"}, + {file = "pytest_lazy_fixture-0.6.3-py3-none-any.whl", hash = "sha256:e0b379f38299ff27a653f03eaa69b08a6fd4484e46fd1c9907d984b9f9daeda6"}, +] python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -1326,42 +1198,16 @@ toml = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tomli = [ - {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, - {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"}, -] -typed-ast = [ - {file = "typed_ast-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ad3b48cf2b487be140072fb86feff36801487d4abb7382bb1929aaac80638ea"}, - {file = "typed_ast-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:542cd732351ba8235f20faa0fc7398946fe1a57f2cdb289e5497e1e7f48cfedb"}, - {file = "typed_ast-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc2c11ae59003d4a26dda637222d9ae924387f96acae9492df663843aefad55"}, - {file = "typed_ast-1.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd5df1313915dbd70eaaa88c19030b441742e8b05e6103c631c83b75e0435ccc"}, - {file = "typed_ast-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:e34f9b9e61333ecb0f7d79c21c28aa5cd63bec15cb7e1310d7d3da6ce886bc9b"}, - {file = "typed_ast-1.5.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f818c5b81966d4728fec14caa338e30a70dfc3da577984d38f97816c4b3071ec"}, - {file = "typed_ast-1.5.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3042bfc9ca118712c9809201f55355479cfcdc17449f9f8db5e744e9625c6805"}, - {file = "typed_ast-1.5.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4fff9fdcce59dc61ec1b317bdb319f8f4e6b69ebbe61193ae0a60c5f9333dc49"}, - {file = "typed_ast-1.5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:8e0b8528838ffd426fea8d18bde4c73bcb4167218998cc8b9ee0a0f2bfe678a6"}, - {file = "typed_ast-1.5.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ef1d96ad05a291f5c36895d86d1375c0ee70595b90f6bb5f5fdbee749b146db"}, - {file = "typed_ast-1.5.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed44e81517364cb5ba367e4f68fca01fba42a7a4690d40c07886586ac267d9b9"}, - {file = "typed_ast-1.5.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f60d9de0d087454c91b3999a296d0c4558c1666771e3460621875021bf899af9"}, - {file = "typed_ast-1.5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9e237e74fd321a55c90eee9bc5d44be976979ad38a29bbd734148295c1ce7617"}, - {file = "typed_ast-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ee852185964744987609b40aee1d2eb81502ae63ee8eef614558f96a56c1902d"}, - {file = "typed_ast-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:27e46cdd01d6c3a0dd8f728b6a938a6751f7bd324817501c15fb056307f918c6"}, - {file = "typed_ast-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d64dabc6336ddc10373922a146fa2256043b3b43e61f28961caec2a5207c56d5"}, - {file = "typed_ast-1.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8cdf91b0c466a6c43f36c1964772918a2c04cfa83df8001ff32a89e357f8eb06"}, - {file = "typed_ast-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:9cc9e1457e1feb06b075c8ef8aeb046a28ec351b1958b42c7c31c989c841403a"}, - {file = "typed_ast-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e20d196815eeffb3d76b75223e8ffed124e65ee62097e4e73afb5fec6b993e7a"}, - {file = "typed_ast-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:37e5349d1d5de2f4763d534ccb26809d1c24b180a477659a12c4bde9dd677d74"}, - {file = "typed_ast-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f1a27592fac87daa4e3f16538713d705599b0a27dfe25518b80b6b017f0a6d"}, - {file = "typed_ast-1.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8831479695eadc8b5ffed06fdfb3e424adc37962a75925668deeb503f446c0a3"}, - {file = "typed_ast-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:20d5118e494478ef2d3a2702d964dae830aedd7b4d3b626d003eea526be18718"}, - {file = "typed_ast-1.5.3.tar.gz", hash = "sha256:27f25232e2dd0edfe1f019d6bfaaf11e86e657d9bdb7b0956db95f560cceb2b3"}, + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] typer = [ {file = "typer-0.4.1-py3-none-any.whl", hash = "sha256:e8467f0ebac0c81366c2168d6ad9f888efdfb6d4e1d3d5b4a004f46fa444b5c3"}, {file = "typer-0.4.1.tar.gz", hash = "sha256:5646aef0d936b2c761a10393f0384ee6b5c7fe0bb3e5cd710b17134ca1d99cff"}, ] typing-extensions = [ - {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, - {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, + {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, + {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, ] urllib3 = [ {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, @@ -1371,7 +1217,3 @@ virtualenv = [ {file = "virtualenv-20.14.1-py2.py3-none-any.whl", hash = "sha256:e617f16e25b42eb4f6e74096b9c9e37713cf10bf30168fb4a739f3fa8f898a3a"}, {file = "virtualenv-20.14.1.tar.gz", hash = "sha256:ef589a79795589aada0c1c5b319486797c03b67ac3984c48c669c0e4f50df3a5"}, ] -zipp = [ - {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, - {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, -] diff --git a/pynocular/__init__.py b/pynocular/__init__.py index 2429f99..a35ff6e 100644 --- a/pynocular/__init__.py +++ b/pynocular/__init__.py @@ -1,6 +1,9 @@ """Lightweight ORM that lets you query your database using Pydantic models and asyncio""" -__version__ = "0.19.0" +__version__ = "2.0.0-rc5" -from pynocular.database_model import DatabaseModel, UUID_STR -from pynocular.engines import DatabaseType, DBInfo +from pynocular.backends.context import get_backend, set_backend +from pynocular.backends.memory import MemoryDatabaseModelBackend +from pynocular.backends.sql import Database, SQLDatabaseModelBackend +from pynocular.database_model import DatabaseModel +from pynocular.util import UUID_STR diff --git a/pynocular/aiopg_transaction.py b/pynocular/aiopg_transaction.py deleted file mode 100644 index ac04d15..0000000 --- a/pynocular/aiopg_transaction.py +++ /dev/null @@ -1,277 +0,0 @@ -"""Module for aiopg transaction utils""" -import asyncio -import sys -from typing import Dict, Optional, Union - -import aiocontextvars as contextvars -from aiopg.sa.connection import SAConnection -import aiopg.sa.engine - -transaction_connections_var = contextvars.ContextVar("transaction_connections") - - -def get_current_task() -> asyncio.Task: - """Get the current task when this method is called - - Returns: - The current task the method is called in - - """ - if sys.version_info.major == 3 and sys.version_info.minor > 7: - # If this is version 3.7 or higher then use the new function to get the current task - return asyncio.current_task() - else: - return asyncio.Task.current_task() - - -class LockedConnection(SAConnection): - """A wrapper connection class that won't make multiple queries at once""" - - def __init__(self, connection: SAConnection) -> None: - """Create a new LockedConnection - - Args: - connection: The connection to wrap - - """ - self._conn = connection - self.lock = asyncio.Lock() - - async def execute(self, *args, **kwargs): - """Wrapper around the `execute` method of the wrapped SAConnection""" - async with self.lock: - return await self._conn.execute(*args, **kwargs) - - def __getattr__(self, attr): - """Except for execute, all other attributes should pass through""" - return getattr(self._conn, attr) - - -class TaskContextConnection: - """Interface for managing a connection entry on the asyncio Task context - - The current asyncio.Task has a context attribute that keeps track of various keys. - We'll use this to store the open connection so we can perform our nested/conditional - transaction logic in :py:class:`transaction`. The actual value stored on the context - is a dict of connections keyed by the engine. - """ - - def __init__(self, connection_key: str) -> None: - """Initializer - - Args: - connection_key: Key for getting/setting/clearing from the connection map - - """ - self.connection_key = connection_key - self._token: Optional[contextvars.Token] = None - - # Set the asyncio task context if it's not set already. We'll look in the - # context for an open connection. - task = get_current_task() - if not hasattr(task, "context"): - task.context = contextvars.copy_context() - - @classmethod - def _get_connections(cls) -> Dict[str, LockedConnection]: - """Get the map of connections from the task context""" - global transaction_connections_var - return transaction_connections_var.get({}) - - def get(self) -> Optional[LockedConnection]: - """If there is already a connection stored, get it""" - return self._get_connections().get(self.connection_key) - - def set(self, conn: LockedConnection) -> contextvars.Token: - """Set the connection on the context - - Args: - conn: Connection to store - - Returns: - contextvars token used to reset the var in :py:meth:`.clear` - - """ - global transaction_connections_var - connections = self._get_connections() - connections[self.connection_key] = conn - token = transaction_connections_var.set(connections) - self._token = token - return token - - def clear(self) -> None: - """Clear the connection from the context""" - if not self._token: - raise ValueError("Token must be defined") - - global transaction_connections_var - transaction_connections_var.reset(self._token) - - -class transaction: - """A context manager to collect nested calls in a transaction - - To use, anywhere you want to have queries put into a transaction, do - - async with transaction(aiopg_engine) as trx: - ... - - The resulting trx object can be used just like a connection you would - get from `aiopg_engine.acquire()`, but any nested usages of this decorator - will ensure that we do not deadlock from nested acquire calls, and do not - run into errors where we attempt to use the same connection to make - multiple calls at once. - NB: It does this by ensuring that we get the same connection object and - execute serially, so you only want to use this in cases where you are - worried about these issues. - - For example, using just aiopg, this is an error: - - async with engine.acquire() as conn: - await asyncio.gather( - conn.execute(TABLE.insert().values(id=uuid(), name="foo")), - conn.execute(TABLE.insert().values(id=uuid(), name="bar"))) - - But using this class, we will not error: - - async with transaction(engine) as conn: - await asyncio.gather( - conn.execute(TABLE.insert().values(id=uuid(), name="foo")), - conn.execute(TABLE.insert().values(id=uuid(), name="bar"))) - - Note: - There are limits to the transaction rollback protection that this - context manager affords. Specifically, a known failure case can be - encountered if a DB connection is created by calling `Engine.acquire` - rather than `transaction(Engine)`, even if the call to `acquire` is - made within a transaction context. For more information, see: - :py:module:`python_core.tests.functional - .test_aiopg_transaction_integrity`. - - """ - - def __init__(self, engine: aiopg.sa.engine.Engine) -> None: - """Create a new transaction context - - Args: - engine: Database engine for making connections - - """ - self._engine = engine - - # Is this the outer-most transaction context? - # If so, this will be set to true in `__aenter__` - self._top = False - - # If we have started a transaction, store it here - self._trx = None - - # Initiatize an interface for managing the connection on the asyncio task context - self.task_connection = TaskContextConnection(str(engine)) - - async def __aenter__(self) -> LockedConnection: - """Establish the transaction context - - Figure out if this is the top level context. If so, get a connection - and start a transaction. If not, then just grab the stored connection. - """ - conn = self.task_connection.get() - if not conn: - # There is no stored connection in this context, so this must be - # the top level call. - self._top = True - # Create the connection - conn = LockedConnection(await self._engine.acquire()) - self.task_connection.set(conn) - # Start a transaction - try: - self._trx = await conn.begin() - except Exception: - self.task_connection.clear() - await conn.close() - raise - return conn - - async def __aexit__(self, exc_type, exc_value, tb) -> None: - """Exit the transaction context - - If this is the top level context, then commit the transaction (unless - there was an error, in which case we should rollback instead). - If this is not the top level context, we don't need to do anything, - since everything will be committed or rolled back by that top level - context. - """ - if self._top: - # We may have gotten here from an error, in which case it is - # possible that we are also awaiting for a query to finish - # executing. So before rolling back the connection, make sure we - # can acquire the connection lock to ensure nothing else is - # executing - conn = self.task_connection.get() - async with conn.lock: - try: - if exc_type: # There was an exception - await self._trx.rollback() - else: - await self._trx.commit() - finally: - self.task_connection.clear() - await conn.close() - - -class ConditionalTransaction(transaction): - """Context manager to conditionally collect nested calls in a transaction - - This context manager allows you to conditionally execute code in a - transaction if nested within another transaction. If it is the top level - "transaction", this will behave like a standard `engine.acquire()`. Usage - is otherwise the same as for the parent transaction class. - - Examples: - This Will behave the same as `engine.acquire`, assuming this is not - nested under a transaction elsewhere - - async with ConditionalTransaction(engine) as trx: - ... - - This will behave as a nested transaction: - - async with transaction(engine) as outer_trx: - async with ConditionalTransaction(engine) as inner_trx: - ... - - """ - - def __init__(self, engine: aiopg.sa.engine.Engine) -> None: - """Initialize the context manager - - Args: - engine: An aiopg engine - - """ - super().__init__(engine) - # The connection object, if functioning as standard connection - self._conn = None - - async def __aenter__(self) -> Union[LockedConnection, SAConnection]: - """Conditionally establish the transaction context - - Returns: - Either a locked connection or a standard connection, depending on - whether this context manager is nested under a transaction. - - """ - conn = self.task_connection.get() - # If there is already a connection stored, act as a transaction - if conn: - return await super().__aenter__() - # Otherwise behave as a standard connection - self._conn = await self._engine.acquire() - return self._conn - - async def __aexit__(self, exc_type, exc_value, tb) -> None: - """Exit the transaction context""" - if self._conn is not None: - await self._conn.close() - else: - await super().__aexit__(exc_type, exc_value, tb) diff --git a/pynocular/backends/__init__.py b/pynocular/backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pynocular/backends/base.py b/pynocular/backends/base.py new file mode 100644 index 0000000..10fbd5b --- /dev/null +++ b/pynocular/backends/base.py @@ -0,0 +1,147 @@ +"""Contains base classes for defining database backends""" + +from abc import ABC, abstractmethod +from dataclasses import dataclass +from typing import Any, Dict, List, Optional, Set + +from pydantic import Field +from sqlalchemy import Column, Table +from sqlalchemy.sql.elements import BinaryExpression, UnaryExpression + + +@dataclass +class DatabaseModelConfig: + """Data class that holds parsed configuration for a database model. + + This class will be instantiated by a database model class at import time. + """ + + fields: Dict[str, Field] + primary_keys: List[Column] + db_managed_fields: List[str] + table: Table + + @property + def primary_key_names(self) -> Set[str]: + """Set of primary key names""" + return {primary_key.name for primary_key in self.primary_keys} + + +class DatabaseModelBackend(ABC): + """Defines abstract base class that database backends must implement + + The backend is agnostic to the DatabaseModel. This means that the concept of a + DatabaseModel should not show up in any of the backend method implementations. + + * Methods should accept and return raw dictionaries. + * Each method should accept a DatabaseModelConfig instance, which contains references + to a table and columns that can be used to build queries suited to the backend. + + """ + + @abstractmethod + def transaction(self) -> Any: + """Create a new transaction + + Not all backends will be able to implement this method. + """ + pass + + @abstractmethod + async def select( + self, + config: DatabaseModelConfig, + where_expressions: Optional[List[BinaryExpression]] = None, + order_by: Optional[List[UnaryExpression]] = None, + limit: Optional[int] = None, + ) -> List[Dict[str, Any]]: + """Select a group of records + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + where_expressions: A list of BinaryExpressions for the table that will be + `and`ed together for the where clause of the backend query + order_by: A list of criteria for the order_by clause + limit: The number of records to return + + Returns: + A list of record dicts + + """ + pass + + @abstractmethod + async def create_records( + self, config: DatabaseModelConfig, records: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: + """Create new group of records + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + records: List of records to persist + + Returns: + list of newly created records + + """ + pass + + @abstractmethod + async def delete_records( + self, config: DatabaseModelConfig, where_expressions: List[BinaryExpression] + ) -> Optional[int]: + """Delete a group of records + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + where_expressions: A list of BinaryExpressions for the table that will be + `and`ed together for the where clause of the backend query + + Returns: + number of records deleted (or None if the backend does not support) + + """ + pass + + @abstractmethod + async def update_records( + self, + config: DatabaseModelConfig, + where_expressions: Optional[List[BinaryExpression]], + values: Dict[str, Any], + ) -> List[Dict[str, Any]]: + """Update a group of records + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + where_expressions: A list of BinaryExpressions for the table that will be + `and`ed together for the where clause of the backend query + values: The map of key-values to update all records to that match the + where_expressions + + Returns: + the updated database records + + """ + pass + + @abstractmethod + async def upsert( + self, config: DatabaseModelConfig, record: Dict[str, Any] + ) -> Dict[str, Any]: + """Upsert a single database record + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + record: The record to update + + Returns: + the updated record + + """ + pass diff --git a/pynocular/backends/context.py b/pynocular/backends/context.py new file mode 100644 index 0000000..815f904 --- /dev/null +++ b/pynocular/backends/context.py @@ -0,0 +1,37 @@ +"""Contains contextvar and helper functions to manage the active database backend""" + +from contextlib import contextmanager +import contextvars +import logging + +from .base import DatabaseModelBackend + +logger = logging.getLogger("pynocular") +_backend = contextvars.ContextVar("database_model_backend", default=None) + + +@contextmanager +def set_backend(backend: DatabaseModelBackend) -> None: + """Set the database backend in the aio context + + Args: + backend: Database backend instance + + """ + logger.debug("Setting backend") + token = _backend.set(backend) + try: + yield + finally: + _backend.reset(token) + logger.debug("Reset backend") + + +def get_backend() -> DatabaseModelBackend: + """Get the currently active database backend + + Returns: + database backend instance + + """ + return _backend.get() diff --git a/pynocular/backends/memory.py b/pynocular/backends/memory.py new file mode 100644 index 0000000..19a3528 --- /dev/null +++ b/pynocular/backends/memory.py @@ -0,0 +1,400 @@ +"""Contains the MemoryDatabaseModelBackend class""" + +import asyncio +from collections import defaultdict +from copy import deepcopy +from datetime import datetime +import functools +import itertools +from types import TracebackType +from typing import Any, Callable, Dict, Generator, List, Optional, Type +from uuid import uuid4 + +from sqlalchemy import Integer +from sqlalchemy.sql.elements import BinaryExpression, UnaryExpression +from sqlalchemy.sql.operators import desc_op + +from pynocular.backends.base import DatabaseModelBackend, DatabaseModelConfig +from pynocular.evaluate_column_element import evaluate_column_element +from pynocular.util import UUID_STR + + +class MemoryConnection: + """In-memory connection + + This mirrors the databases library implementation. + """ + + def __init__( + self, records: Optional[Dict[str, List[Dict[str, Any]]]] = None + ) -> None: + """In-memory connection + + Args: + records: Optional map of table name to list of records to bootstrap the + in-memory database + + """ + self.records = records or defaultdict(list) + self._tmp_records = None + self._transaction_lock = asyncio.Lock() + self._transaction_stack: list[MemoryTransaction] = [] + + def backup_records(self) -> None: + """Backup the records in the connection to a temporary variable""" + self._tmp_records = deepcopy(self.records) + + def clear_backup(self) -> None: + """Clear the backup""" + self._tmp_records = None + + def restore_records(self) -> None: + """Restore the original copy of records""" + self.records = deepcopy(self._tmp_records) + + +class MemoryTransaction: + """In-memory transaction + + This mirrors the databases library implementation. + """ + + def __init__(self, connection: MemoryConnection) -> None: + """In-memory transaction + + Args: + connection: Connection instance containing records + + """ + self._connection = connection + + async def __aenter__(self) -> "MemoryTransaction": + """Called when entering `async with database.transaction()`""" + await self.start() + return self + + async def __aexit__( + self, + exc_type: Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + """Called when exiting `async with database.transaction()`""" + if exc_type is not None: + await self.rollback() + else: + await self.commit() + + def __await__(self) -> Generator: + """Called if using the low-level `transaction = await database.transaction()`""" + return self.start().__await__() + + def __call__(self, func: Callable) -> Callable: + """Called if using `@database.transaction()` as a decorator.""" + + @functools.wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + async with self: + return await func(*args, **kwargs) + + return wrapper + + async def start(self) -> "MemoryTransaction": + """Start a transaction""" + async with self._connection._transaction_lock: + is_root = not self._connection._transaction_stack + if is_root: + self._connection.backup_records() + self._connection._transaction_stack.append(self) + return self + + async def commit(self) -> None: + """Commit the transaction on success""" + async with self._connection._transaction_lock: + assert self._connection._transaction_stack[-1] is self + self._connection._transaction_stack.pop() + is_root = not self._connection._transaction_stack + if is_root: + self._connection.clear_backup() + + async def rollback(self) -> None: + """Rollback the transaction in case of failure""" + async with self._connection._transaction_lock: + assert self._connection._transaction_stack[-1] is self + self._connection._transaction_stack.pop() + is_root = not self._connection._transaction_stack + if is_root: + self._connection.restore_records() + + +class MemoryDatabaseModelBackend(DatabaseModelBackend): + """In-memory database model backend + + This backend stores records in memory. It translates SQLAlchemy expressions into + Python operations. It should only be used in tests. + """ + + def __init__(self, records: Optional[Dict[str, List[Dict[str, Any]]]] = None): + """Initialize a MemoryDatabaseModelBackend + + Args: + records: Optional map of table name to list of records to bootstrap the + in-memory database + + """ + super().__init__() + # Create a "connection" to hold records and interface with transactions + self._connection = MemoryConnection(records) + # Serial primary key generator + self._pk_generator = itertools.count(start=1) + + @property + def records(self) -> Dict[str, List[Dict[str, Any]]]: + """Map of table name to list of records""" + return self._connection.records + + def _set_primary_key_values( + self, + config: DatabaseModelConfig, + record: Dict[str, Any], + ) -> Dict[str, Any]: + """Set default values on a record for the primary keys + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + record: The record to update + + Returns: + updated record + + """ + for primary_key in config.primary_keys: + value = ( + next(self._pk_generator) + if isinstance(primary_key.type, Integer) + else str(uuid4()) + ) + record.setdefault(primary_key.name, value) + + return record + + @staticmethod + def _update_db_managed_fields( + config: DatabaseModelConfig, + record: Dict[str, Any], + fetch_on_create: bool = False, + fetch_on_update: bool = False, + ) -> Dict[str, Any]: + """Update record values for db managed fields + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + record: The record to update + fetch_on_create: Flag that controls whether the db managed field will be + updated if it has the option `fetch_on_create=True`. Defaults to False. + fetch_on_update: Flag that controls whether the db managed field will be + updated if it has the option `fetch_on_update=True`. Defaults to False. + + Raises: + NotImplementedError: if a field sets fetch_on_create or fetch_on_update to + true but its type is not supported + + Returns: + updated record + + """ + for name in config.db_managed_fields: + field = config.fields[name] + if ( + fetch_on_create + and field.field_info.extra.get("fetch_on_create") + and record.get(name) is None + ) or (fetch_on_update and field.field_info.extra.get("fetch_on_update")): + if field.type_ == datetime: + record[name] = datetime.utcnow() + elif field.type_ == UUID_STR: + record[name] = str(uuid4()) + else: + raise NotImplementedError(field.type_) + + return record + + def transaction(self) -> MemoryTransaction: + """Create a new transaction""" + return MemoryTransaction(self._connection) + + async def select( + self, + config: DatabaseModelConfig, + where_expressions: Optional[List[BinaryExpression]] = None, + order_by: Optional[List[UnaryExpression]] = None, + limit: Optional[int] = None, + ) -> List[Dict[str, Any]]: + """Select a group of records + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + where_expressions: A list of BinaryExpressions for the table that will be + `and`ed together for the where clause of the backend query + order_by: A list of criteria for the order_by clause + limit: The number of records to return + + Returns: + list of records + + Raises: + InvalidFieldValue: The class is missing a database table + + """ + records = self.records[config.table.name] + + if where_expressions: + records = [ + record + for record in records + if all( + evaluate_column_element(expr, record) for expr in where_expressions + ) + ] + + if order_by: + for expr in order_by: + if isinstance(expr, UnaryExpression): + column = expr.element + reverse = expr.modifier == desc_op + else: + # Assume a column was provided with no explicit sorting modifier + column = expr + reverse = False + + records = sorted( + records, key=lambda r: r.get(column.name), reverse=reverse + ) + + if limit is None: + records[:limit] + + return records + + async def create_records( + self, config: DatabaseModelConfig, records: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: + """Create new group of records + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + records: List of records to persist + + Returns: + list of newly created records + + """ + for record in records: + self._set_primary_key_values(config, record) + self._update_db_managed_fields( + config, record, fetch_on_create=True, fetch_on_update=True + ) + + self.records[config.table.name].extend(records) + + return self.records[config.table.name] + + async def delete_records( + self, config: DatabaseModelConfig, where_expressions: List[BinaryExpression] + ) -> int: + """Delete a group of records + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + where_expressions: A list of BinaryExpressions for the table that will be + `and`ed together for the where clause of the backend query + + Returns: + number of records deleted + + """ + start_count = len(self.records[config.table.name]) + self.records[config.table.name][:] = [ + record + for record in self.records[config.table.name] + if not all( + evaluate_column_element(expr, record) for expr in where_expressions + ) + ] + return start_count - len(self.records[config.table.name]) + + async def update_records( + self, + config: DatabaseModelConfig, + where_expressions: Optional[List[BinaryExpression]], + values: Dict[str, Any], + ) -> List[Dict[str, Any]]: + """Update a group of records + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + where_expressions: A list of BinaryExpressions for the table that will be + `and`ed together for the where clause of the backend query + values: The map of key-values to update all records to that match the + where_expressions + + Returns: + the updated database records + + """ + records = await self.select(config, where_expressions=where_expressions) + for record in records: + record.update(values) + self._update_db_managed_fields(config, record, fetch_on_update=True) + + return records + + async def upsert( + self, + config: DatabaseModelConfig, + record: Dict[str, Any], + ) -> Dict[str, Any]: + """Upsert a single database record + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + record: The record to update + + Returns: + the updated record + + """ + where_expressions = [ + primary_key == record.get(primary_key.name) + for primary_key in config.primary_keys + ] + existing_records = await self.select( + config, where_expressions=where_expressions, limit=1 + ) + if ( + all( + record.get(primary_key.name) is not None + for primary_key in config.primary_keys + ) + and existing_records + ): + # All primary keys are already set and a record was found so update + self._update_db_managed_fields(config, record, fetch_on_update=True) + records = await self.update_records(config, where_expressions, record) + return records[0] + else: + # Primary keys have not been set or there were no records found, so this is + # a new record + self._set_primary_key_values(config, record) + self._update_db_managed_fields( + config, record, fetch_on_create=True, fetch_on_update=True + ) + self.records[config.table.name].append(record) + return record diff --git a/pynocular/backends/sql.py b/pynocular/backends/sql.py new file mode 100644 index 0000000..97fa1ff --- /dev/null +++ b/pynocular/backends/sql.py @@ -0,0 +1,199 @@ +"""Contains the SQLDatabaseModelBackend class""" + +import logging +from typing import Any, Dict, List, Optional + +from databases import Database +from databases.core import Transaction +from sqlalchemy import and_ +from sqlalchemy.dialects.postgresql import insert +from sqlalchemy.sql.elements import BinaryExpression, UnaryExpression + +from pynocular.backends.base import DatabaseModelBackend, DatabaseModelConfig +from pynocular.exceptions import InvalidFieldValue, InvalidTextRepresentation + +logger = logging.getLogger("pynocular") + + +class SQLDatabaseModelBackend(DatabaseModelBackend): + """SQL database model backend + + This backend works with SQL dialects supported by https://www.encode.io/databases/. except sqlite* + + * sqlalchemy does not support the `RETURNING` clause. See https://github.com/sqlalchemy/sqlalchemy/issues/6195 + """ + + def __init__(self, db: Database): + """Initialize a SQLDatabaseModelBackend + + Args: + db: Database object that has already established a connection pool + + """ + self.db = db + + def transaction(self) -> Transaction: + """Create a new transaction + + Returns: + new transaction to be used as a context manager + + """ + return self.db.transaction() + + async def select( + self, + config: DatabaseModelConfig, + where_expressions: Optional[List[BinaryExpression]] = None, + order_by: Optional[List[UnaryExpression]] = None, + limit: Optional[int] = None, + ) -> List[Dict[str, Any]]: + """Select a group of records + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + where_expressions: A list of BinaryExpressions for the table that will be + `and`ed together for the where clause of the backend query + order_by: A list of criteria for the order_by clause + limit: The number of records to return + + Returns: + list of records + + Raises: + InvalidFieldValue: The class is missing a database table + + """ + query = config.table.select() + if where_expressions is not None and len(where_expressions) > 0: + query = query.where(and_(*where_expressions)) + if order_by is not None and len(order_by) > 0: + query = query.order_by(*order_by) + if limit is not None and limit > 0: + query = query.limit(limit) + + try: + result = await self.db.fetch_all(query) + # The value was the wrong type. This usually happens with UUIDs. + except InvalidTextRepresentation as e: + raise InvalidFieldValue(message=e.diag.message_primary) + + return [dict(record) for record in result] + + async def create_records( + self, config: DatabaseModelConfig, records: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: + """Create new group of records + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + records: List of records to persist + + Returns: + list of newly created records + + """ + if not records: + return [] + + async with self.transaction(): + result = await self.db.fetch_all( + insert(config.table).values(records).returning(config.table) + ) + + return [dict(record) for record in result] + + async def delete_records( + self, config: DatabaseModelConfig, where_expressions: List[BinaryExpression] + ) -> Optional[int]: + """Delete a group of records + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + where_expressions: A list of BinaryExpressions for the table that will be + `and`ed together for the where clause of the backend query + + Returns: + number of records deleted + + """ + async with self.transaction(): + query = ( + config.table.delete() + .where(and_(*where_expressions)) + .returning(*config.primary_keys) + ) + try: + result = await self.db.fetch_all(query) + return len(result) + # The value was the wrong type. This usually happens with UUIDs. + except InvalidTextRepresentation as e: + raise InvalidFieldValue(message=e.diag.message_primary) + + async def update_records( + self, + config: DatabaseModelConfig, + where_expressions: Optional[List[BinaryExpression]], + values: Dict[str, Any], + ) -> List[Dict[str, Any]]: + """Update a group of records + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + where_expressions: A list of BinaryExpressions for the table that will be + `and`ed together for the where clause of the backend query + values: The map of key-values to update all records to that match the + where_expressions + + Returns: + the updated database records + + """ + async with self.transaction(): + query = ( + config.table.update() + .where(and_(*where_expressions)) + .values(values) + .returning(config.table) + ) + try: + result = await self.db.fetch_all(query) + # The value was the wrong type. This usually happens with UUIDs. + except InvalidTextRepresentation as e: + raise InvalidFieldValue(message=e.diag.message_primary) + + return [dict(record) for record in result] + + async def upsert( + self, + config: DatabaseModelConfig, + record: Dict[str, Any], + ) -> Dict[str, Any]: + """Upsert a single database record + + Args: + config: DatabaseModelConfig instance that contains references to a table and + columns that can be used to build queries suited to the backend. + record: The record to update + + Returns: + the updated record + + """ + async with self.transaction(): + logger.debug("Upsert starting") + query = ( + insert(config.table) + .values(record) + .on_conflict_do_update( + index_elements=config.primary_key_names, set_=record + ) + .returning(config.table) + ) + updated_record = await self.db.fetch_one(query) + logger.debug("Upsert complete") + return dict(updated_record) diff --git a/pynocular/config.py b/pynocular/config.py deleted file mode 100644 index ab125c0..0000000 --- a/pynocular/config.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Configuration for engines and models""" -import os - -POOL_RECYCLE = int(os.environ.get("POOL_RECYCLE", 300)) -DB_POOL_MIN_SIZE = int(os.environ.get("DB_POOL_MIN_SIZE", 2)) -DB_POOL_MAX_SIZE = int(os.environ.get("DB_POOL_MAX_SIZE", 10)) diff --git a/pynocular/database_model.py b/pynocular/database_model.py index 2703751..f9a11a1 100644 --- a/pynocular/database_model.py +++ b/pynocular/database_model.py @@ -1,16 +1,12 @@ -"""Base Model class that implements CRUD methods for database entities based on Pydantic dataclasses""" -import asyncio +"""Contains DatabaseModel class""" + from datetime import datetime from enum import Enum, EnumMeta -import inspect -from typing import Any, Callable, Dict, Generator, List, Optional, Sequence, Set, Union +from typing import Any, Dict, List, Optional, Sequence, TYPE_CHECKING from uuid import UUID as stdlib_uuid -from aenum import Enum as AEnum, EnumMeta as AEnumMeta -from pydantic import BaseModel, PositiveFloat, PositiveInt -from pydantic.types import UUID4 +from pydantic import BaseModel, PositiveFloat, PositiveInt, UUID4 from sqlalchemy import ( - and_, Boolean, Column, Enum as SQLEnum, @@ -18,193 +14,65 @@ Integer, MetaData, Table, + text, TIMESTAMP, VARCHAR, ) -from sqlalchemy.dialects.postgresql import insert, JSONB, UUID as sqlalchemy_uuid +from sqlalchemy.dialects.postgresql import JSONB, UUID as sqlalchemy_uuid from sqlalchemy.schema import FetchedValue from sqlalchemy.sql.base import ImmutableColumnCollection from sqlalchemy.sql.elements import BinaryExpression, UnaryExpression -from pynocular.engines import DBEngine, DBInfo +from pynocular.backends.base import DatabaseModelConfig +from pynocular.backends.context import get_backend from pynocular.exceptions import ( DatabaseModelMisconfigured, DatabaseModelMissingField, DatabaseRecordNotFound, - InvalidFieldValue, InvalidMethodParameterization, - InvalidTextRepresentation, - NestedDatabaseModelNotResolved, ) -from pynocular.nested_database_model import NestedDatabaseModel - - -def is_valid_uuid(string: str) -> bool: - """Check if a string is a valid UUID - - Args: - string: the string to check - - Returns: - Whether or not the string is a well-formed UUIDv4 - - """ - try: - stdlib_uuid(string, version=4) - return True - except (TypeError, AttributeError, ValueError): - return False - - -class UUID_STR(str): - """A string that represents a UUID4 value""" - - @classmethod - def __get_validators__(cls) -> Generator: - """Get the validators for the given class""" - yield cls.validate - - @classmethod - def validate(cls, v: Any) -> str: - """Function to validate the value - - Args: - v: The value to validate - - """ - if isinstance(v, stdlib_uuid) or (isinstance(v, str) and is_valid_uuid(v)): - return str(v) - else: - raise ValueError("invalid UUID string") - +from pynocular.util import UUID_STR -def nested_model( - db_model_class: "DatabaseModel", reference_field: str = None -) -> Callable: - """Generate a NestedModel class with dynamic model references - Args: - db_model_class: The specific model class that will be nested. This will be a - subclass of `DatabaseModel` - reference_field: The name of the field on the database table that this nested - model references. +class DatabaseModel(BaseModel): + """DatabaseModel defines a Pydantic model that abstracts away backend storage + This allows us to use the same object for both database queries and HTTP requests. + Methods on the DatabaseModel call through to the active backend implementation. The + backend handle queries and storage. """ - class NestedModel: - """NestedModel type for NestedDatabaseModels""" - - reference_field_name = reference_field - - @classmethod - def __get_validators__(cls) -> Generator: - """Get the validators for the given class""" - yield cls.validate - - @classmethod - def validate(cls, v: Union[UUID_STR, "DatabaseModel"]) -> NestedDatabaseModel: - """Validate value and generate a nested database model""" - # If value is a uuid then create a NestedDatabaseModel, otherwise just - # Set the DatabaseModel as the value - if is_valid_uuid(v): - return NestedDatabaseModel(db_model_class, v) - else: - return NestedDatabaseModel(db_model_class, v.get_primary_id(), v) - - return NestedModel - - -def database_model(table_name: str, database_info: DBInfo) -> "DatabaseModel": - """Decorator that adds SQL functionality to Pydantic BaseModel objects - - Args: - table_name: Name of the table this model represents in the database - database_info: Database connection info for the database to connect to - - Raises: - DatabaseModelMisconfigured: Raised when class with this decorator is not a pydantic.BaseModel - subclass. We depend on the class implementing a some specific things and currently don't - support any other type of dataclass. - - """ - - def wrapped(cls): - if BaseModel not in inspect.getmro(cls): - raise DatabaseModelMisconfigured( - "Model is not subclass of pydantic.BaseModel" - ) - - cls.__bases__ += (DatabaseModel,) - cls.initialize_table(table_name, database_info) - - return cls + if TYPE_CHECKING: + # Set by _process_config + _config: DatabaseModelConfig - return wrapped + @staticmethod + def _process_config(cls, table_name: str) -> DatabaseModelConfig: + """Process configuration passed into the DatabaseModel subclass signature - -class DatabaseModel: - """Adds database functionality to a Pydantic BaseModel - - A DatabaseModel is a Pydantic based model along with a SQLAlchemy - table object. This allows us to use the same object for both - database queries and HTTP requests - - """ - - # Define metadata for the database connection on the class level so we don't - # have to recaluclate the table for each database call - _table: Table = None - _database_info: DBInfo = None - - # We may have times where we need a compound primary key. - # We store each one into this list and have our query functions - # handle using it - _primary_keys: List[Column] = None - - # Some fields are exclusively produced by the database server - # For all save operations, we need to get those values from the database - # These are the server_default and server_onupdate functions in SQLAlchemy - _db_managed_fields: List[str] = None - - # The following tables track which attributes on the model are nested model - # references - # Some nested model attributes may have different names than their actual db table; - # For example; on an App we may have an `org` attribute but the db field is - # `organzation_id` - - # In order to manage this we also need maps from attribute name to table_field_name - # and back - _nested_model_attributes: Set[str] = None - _nested_attr_table_field_map: Dict[str, str] = None - _nested_table_field_attr_map: Dict[str, str] = None - - # This can be used to access the table when defining where expressions - columns: ImmutableColumnCollection = None - - @classmethod - def initialize_table(cls, table_name: str, database_info: DBInfo) -> None: - """Returns a SQLAlchemy table definition to expose SQLAlchemy functions - - This method should cache the Table on the __table__ class property. - We don't want to have to recaluclate the table for every SQL call, - so it's desirable to cache this at the class level. + The primary job of this method is to generate a DatabaseModelConfig instance, + specifically a SQLAlchemy table definition for backend implementations to + leverage. Returns: - A Table object based on the Field properties defined from the Pydantic model + DatabaseModelConfig instance Raises: - DatabaseModelMisconfigured: When the class does not defined certain properties; - or cannot be converted to a Table + DatabaseModelMisconfigured: When the class does not define certain + properties or cannot be converted to a SQLAlchemy Table """ - cls._primary_keys = [] - cls._database_info = database_info - cls._db_managed_fields = [] - cls._nested_attr_table_field_map = {} - cls._nested_table_field_attr_map = {} - cls._nested_model_attributes = set() - - columns = [] + # We may have times where we need a compound primary key. + # We store each one into this list and have our query functions + # handle using it + primary_keys: List[Column] = [] + + # Some fields are exclusively produced by the database server + # For all save operations, we need to get those values from the database + # These are the server_default and server_onupdate functions in SQLAlchemy + db_managed_fields: List[str] = [] + + columns: List[Column] = [] for field in cls.__fields__.values(): name = field.name is_nullable = not field.required @@ -229,7 +97,7 @@ def initialize_table(cls, table_name: str, database_info: DBInfo) -> None: or field.type_.__name__ == "ConstrainedFloatValue" ): type = Float - elif field.type_.__class__ in (AEnumMeta, EnumMeta): + elif field.type_.__class__ == EnumMeta: type = SQLEnum( field.type_, values_callable=lambda obj: [e.value for e in obj] ) @@ -241,77 +109,121 @@ def initialize_table(cls, table_name: str, database_info: DBInfo) -> None: type = sqlalchemy_uuid() elif field.type_ is datetime: type = TIMESTAMP(timezone=True) - elif field.type_.__name__ == "NestedModel": - cls._nested_model_attributes.add(name) - # If the field name on the NestedModel type is not None, use that for the - # column name - if field.type_.reference_field_name is not None: - cls._nested_attr_table_field_map[ - name - ] = field.type_.reference_field_name - cls._nested_table_field_attr_map[ - field.type_.reference_field_name - ] = name - name = field.type_.reference_field_name - - # Assume all IDs are UUIDs for now - type = sqlalchemy_uuid() # TODO - how are people using this today? Is there a class we need to make or can we reuse one # elif field.type_ is bit: # type = Bit else: raise DatabaseModelMisconfigured(f"Unsupported type {field.type_}") - column = Column( - name, type, primary_key=is_primary_key, nullable=is_nullable - ) - + server_default = None if fetch_on_create: - column.server_default = FetchedValue() - cls._db_managed_fields.append(name) + if field.type_ in (UUID4, stdlib_uuid, UUID_STR): + server_default = text("uuid_generate_v4()") + else: + server_default = FetchedValue() + db_managed_fields.append(name) + server_onupdate = None if fetch_on_update: - column.server_onupdate = FetchedValue() - cls._db_managed_fields.append(name) + server_onupdate = FetchedValue() + db_managed_fields.append(name) + + column = Column( + name, + type, + primary_key=is_primary_key, + nullable=is_nullable, + server_default=server_default, + server_onupdate=server_onupdate, + ) if is_primary_key: - cls._primary_keys.append(column) + primary_keys.append(column) columns.append(column) - cls._table = Table(table_name, MetaData(), *columns) - cls.columns = cls._table.c + # Define metadata for the database connection on the class level so we don't + # have to recalculate the table for each database call + table = Table(table_name, MetaData(), *columns) + + return DatabaseModelConfig( + fields={**cls.__fields__}, + db_managed_fields=db_managed_fields, + primary_keys=primary_keys, + table=table, + ) + + def __init_subclass__(cls, table_name: str, **kwargs) -> None: + """Hook for processing class configuration when DatabaseModel is subclassed + + Args: + table_name: Name of the database table + + """ + super().__init_subclass__(**kwargs) + cls._config = DatabaseModel._process_config(cls, table_name) + + @classmethod + @property + def table(cls) -> Table: + """Returns SQLAlchemy table object for the model""" + return cls._config.table @classmethod - def get_table(cls) -> Table: - """Get the table object associated with this model + @property + def columns(cls) -> ImmutableColumnCollection: + """Reference to the model's table's column collection""" + return cls.table.c + + @classmethod + def from_dict(cls, _dict: Dict[str, Any]) -> "DatabaseModel": + """Instantiate a DatabaseModel object from a dict record + + Note: + This is the base implementation and is set up so classes that subclass this + one don't have to make this boilerplate if they don't need to + + Args: + _dict: The dictionary form of the DatabaseModel Returns: - The SQLALChemy table object for the model + The DatabaseModel object """ - return cls._table + return cls(**_dict) - @classmethod - async def get_with_refs(cls, *args: Any, **kwargs: Any) -> "DatabaseModel": - """Gets the DatabaseModel associated with any nested key references resolved + def to_dict( + self, serialize: bool = False, include_keys: Optional[Sequence] = None + ) -> Dict[str, Any]: + """Create a dict from the DatabaseModel object + + Note: + This implementation is only valid if __base_props__ is set for the instance Args: - args: The column id for the object's primary key - kwargs: The columns and ids that make up the object's composite primary key + serialize: A flag determining whether or not to serialize enum types into + strings + include_keys: Set of keys that should be included in the results. If not + provided or empty, all keys will be included. Returns: - A DatabaseModel object representing the record in the db if one exists + A dict of the DatabaseObject object + + Raises: + NotImplementedError: This function implementation is being used without + __base_props__ being set """ - obj = await cls.get(*args, **kwargs) - gatherables = [ - (getattr(obj, prop_name)).fetch() - for prop_name in cls._nested_model_attributes - ] - await asyncio.gather(*gatherables) + _dict = {} + for prop_name, prop_value in self.dict().items(): + if serialize: + if isinstance(prop_value, Enum): + prop_value = prop_value.name - return obj + if not include_keys or prop_name in include_keys: + _dict[prop_name] = prop_value + + return _dict @classmethod async def get(cls, *args: Any, **kwargs: Any) -> "DatabaseModel": @@ -333,28 +245,71 @@ async def get(cls, *args: Any, **kwargs: Any) -> "DatabaseModel": if ( (len(args) > 1) or (len(args) == 1 and len(kwargs) > 0) - or (len(args) == 1 and len(cls._primary_keys) > 1) + or (len(args) == 1 and len(cls._config.primary_keys) > 1) or (len(args) == 0 and len(kwargs) == 0) ): raise InvalidMethodParameterization("get", args=args, kwargs=kwargs) if len(args) == 1: - primary_key_dict = {cls._primary_keys[0].name: args[0]} + primary_key_dict = {cls._config.primary_keys[0].name: args[0]} else: primary_key_dict = kwargs original_primary_key_dict = primary_key_dict.copy() where_expressions = [] - for primary_key in cls._primary_keys: + for primary_key in cls._config.primary_keys: primary_key_value = primary_key_dict.pop(primary_key.name) where_expressions.append(primary_key == primary_key_value) records = await cls.select(where_expressions=where_expressions, limit=1) if len(records) == 0: - raise DatabaseRecordNotFound(cls._table.name, **original_primary_key_dict) + raise DatabaseRecordNotFound( + cls._config.table.name, **original_primary_key_dict + ) return records[0] + @classmethod + async def create(cls, **data) -> "DatabaseModel": + """Create a new instance of the this DatabaseModel and save it + + Args: + kwargs: The parameters for the instance + + Returns: + The new DatabaseModel instance + + """ + new = cls(**data) + await new.save() + + return new + + def get_primary_id(self) -> Any: + """Standard interface for returning the id of a field + + This assumes that there is a single primary id, otherwise this returns `None` + + Returns: + The ID value for this DatabaseModel instance + + """ + if len(self._config.primary_keys) > 1: + return None + + return getattr(self, self._config.primary_keys[0].name) + + async def fetch(self) -> None: + """Gets the latest of the object from the database and updates itself""" + get_params = { + primary_key.name: getattr(self, primary_key.name) + for primary_key in self._config.primary_keys + } + new_self = await self.get(**get_params) + + for attr_name, new_attr_val in new_self.dict().items(): + setattr(self, attr_name, new_attr_val) + @classmethod async def get_list(cls, **kwargs: Any) -> List["DatabaseModel"]: """Fetches the DatabaseModel for based on the provided kwargs @@ -372,23 +327,21 @@ async def get_list(cls, **kwargs: Any) -> List["DatabaseModel"]: exist on the database table """ - where_clause_list = [] + where_expressions = [] for field_name, db_field_value in kwargs.items(): - db_field_name = cls._nested_attr_table_field_map.get(field_name, field_name) - try: - db_field = getattr(cls._table.c, db_field_name) + db_field = getattr(cls._config.table.c, field_name) except AttributeError: - raise DatabaseModelMissingField(cls.__name__, db_field_name) + raise DatabaseModelMissingField(cls.__name__, field_name) if isinstance(db_field_value, list): exp = db_field.in_(db_field_value) else: exp = db_field == db_field_value - where_clause_list.append(exp) + where_expressions.append(exp) - return await cls.select(where_expressions=where_clause_list) + return await cls.select(where_expressions=where_expressions) @classmethod async def select( @@ -412,41 +365,13 @@ async def select( DatabaseModelMisconfigured: The class is missing a database table """ - async with ( - await DBEngine.transaction(cls._database_info, is_conditional=True) - ) as conn: - query = cls._table.select() - if where_expressions is not None and len(where_expressions) > 0: - query = query.where(and_(*where_expressions)) - if order_by is not None and len(order_by) > 0: - query = query.order_by(*order_by) - if limit is not None and limit > 0: - query = query.limit(limit) - - try: - result = await conn.execute(query) - # The value was the wrong type. This usually happens with UUIDs. - except InvalidTextRepresentation as e: - raise InvalidFieldValue(message=e.diag.message_primary) - records = await result.fetchall() - - return [cls.from_dict(dict(record)) for record in records] - - @classmethod - async def create(cls, **data) -> "DatabaseModel": - """Create a new instance of the this DatabaseModel and save it - - Args: - kwargs: The parameters for the instance - - Returns: - The new DatabaseModel instance - - """ - new = cls(**data) - await new.save() - - return new + records = await get_backend().select( + cls._config, + where_expressions=where_expressions, + order_by=order_by, + limit=limit, + ) + return [cls.from_dict(record) for record in records] @classmethod async def create_list(cls, models: List["DatabaseModel"]) -> List["DatabaseModel"]: @@ -461,73 +386,70 @@ async def create_list(cls, models: List["DatabaseModel"]) -> List["DatabaseModel list of new database models that have been saved """ - if not models: - return [] - values = [] for model in models: dict_obj = model.to_dict() - for field in cls._db_managed_fields: - # Remove any fields that the database calculates + + # Remove any fields that the database calculates + for field in cls._config.db_managed_fields: del dict_obj[field] + + # Remove keys for primary keys that don't have a value. This indicates that + # the backend will generate new values. + for field in cls._config.primary_keys: + if field.name in dict_obj and dict_obj[field.name] is None: + del dict_obj[field.name] + values.append(dict_obj) - async with ( - await DBEngine.transaction(cls._database_info, is_conditional=False) - ) as conn: - result = await conn.execute( - insert(cls._table).values(values).returning(cls._table) - ) - # Set db managed column information on the object - rows = await result.fetchall() - for row, model in zip(rows, models): - record_dict = dict(row) - for column in cls._db_managed_fields: - col_val = record_dict.get(column) - if col_val is not None: - setattr(model, column, col_val) + records = await get_backend().create_records(cls._config, values) + + # Set db managed column information on the object + for record, model in zip(records, models): + for column in cls._config.db_managed_fields: + col_val = record.get(column) + if col_val is not None: + setattr(model, column, col_val) + + for field in cls._config.primary_keys: + value = record.get(field.name) + if value is not None: + setattr(model, field.name, value) return models @classmethod - async def delete_records(cls, **kwargs: Any) -> None: + async def delete_records(cls, **kwargs: Any) -> Optional[int]: """Execute a DELETE on a DatabaseModel with the provided kwargs Args: kwargs: The filterable key/value pairs for the where clause. These will be `and`ed together + Returns: + number of records deleted (or None if the backend does not support) + Raises: DatabaseModelMisconfigured: The class is missing a database table DatabaseModelMissingField: One of the fields provided in the query does not exist on the database table """ - where_clause_list = [] + where_expressions = [] for field_name, db_field_value in kwargs.items(): - db_field_name = cls._nested_attr_table_field_map.get(field_name, field_name) - try: - db_field = getattr(cls._table.c, db_field_name) + db_field = getattr(cls._config.table.c, field_name) except AttributeError: - raise DatabaseModelMissingField(cls.__name__, db_field_name) + raise DatabaseModelMissingField(cls.__name__, field_name) if isinstance(db_field_value, list): exp = db_field.in_(db_field_value) else: exp = db_field == db_field_value - where_clause_list.append(exp) + where_expressions.append(exp) - async with ( - await DBEngine.transaction(cls._database_info, is_conditional=False) - ) as conn: - query = cls._table.delete().where(and_(*where_clause_list)) - try: - await conn.execute(query) - # The value was the wrong type. This usually happens with UUIDs. - except InvalidTextRepresentation as e: - raise InvalidFieldValue(message=e.diag.message_primary) + return await get_backend().delete_records(cls._config, where_expressions) @classmethod async def update_record(cls, **kwargs: Any) -> "DatabaseModel": @@ -545,19 +467,14 @@ async def update_record(cls, **kwargs: Any) -> "DatabaseModel": """ where_expressions = [] primary_key_dict = {} - for primary_key in cls._primary_keys: + for primary_key in cls._config.primary_keys: primary_key_value = kwargs.pop(primary_key.name) where_expressions.append(primary_key == primary_key_value) primary_key_dict[primary_key.name] = primary_key_value - modified_kwargs = {} - for field_name, value in kwargs.items(): - db_field_name = cls._nested_attr_table_field_map.get(field_name, field_name) - modified_kwargs[db_field_name] = value - - updated_records = await cls.update(where_expressions, modified_kwargs) + updated_records = await cls.update(where_expressions, kwargs) if len(updated_records) == 0: - raise DatabaseRecordNotFound(cls._table.name, **primary_key_dict) + raise DatabaseRecordNotFound(cls._config.table.name, **primary_key_dict) return updated_records[0] @classmethod @@ -579,184 +496,39 @@ async def update( DatabaseModelMisconfigured: The class is missing a database table """ - async with ( - await DBEngine.transaction(cls._database_info, is_conditional=False) - ) as conn: - query = ( - cls._table.update() - .where(and_(*where_expressions)) - .values(**values) - .returning(cls._table) + return [ + cls.from_dict(record) + for record in await get_backend().update_records( + cls._config, where_expressions=where_expressions, values=values ) - try: - results = await conn.execute(query) - # The value was the wrong type. This usually happens with UUIDs. - except InvalidTextRepresentation as e: - raise InvalidFieldValue(message=e.diag.message_primary) - - return [cls.from_dict(dict(record)) for record in await results.fetchall()] - - async def save(self, include_nested_models=False) -> None: - """Update the database record this object represents with its current state - - Args: - include_nested_models: If True, any nested models should get saved before - this object gets saved - - """ + ] + async def save(self) -> None: + """Update the database record this object represents with its current state""" dict_self = self.to_dict() - - primary_key_names = [primary_key.name for primary_key in self._primary_keys] - - for field in self._db_managed_fields: - if field in primary_key_names and dict_self[field] is not None: + for field in self._config.db_managed_fields: + if field in self._config.primary_key_names and dict_self[field] is not None: continue # Remove any fields that the database calculates del dict_self[field] - async with ( - await DBEngine.transaction(self._database_info, is_conditional=False) - ) as conn: - # If flag is set, first try to persist any nested models. This needs to - # happen inside of the transaction so if something fails everything gets - # rolled back - if include_nested_models: - for attr_name in self._nested_model_attributes: - try: - obj = getattr(self, attr_name) - if obj is not None: - await obj.save() - except NestedDatabaseModelNotResolved: - # If the object was never resolved than it already exists in the - # DB and the DB has the latest state - continue - - record = await conn.execute( - insert(self._table) - .values(dict_self) - .on_conflict_do_update(index_elements=primary_key_names, set_=dict_self) - .returning(self._table) - ) - - row = await record.fetchone() - - for field in self._db_managed_fields: - setattr(self, field, row[field]) - - def get_primary_id(self) -> Any: - """Standard interface for returning the id of a field - - This assumes that there is a single primary id, otherwise this returns `None` - - Returns: - The ID value for this DatabaseModel instance - - """ - if len(self._primary_keys) > 1: - return None - - return getattr(self, self._primary_keys[0].name) - - async def fetch(self, resolve_references: bool = False) -> None: - """Gets the latest of the object from the database and updates itself - - Args: - resolve_references: If True, resolve any nested key references - - """ - # Get the latest version of self - get_params = { - primary_key.name: getattr(self, primary_key.name) - for primary_key in self._primary_keys - } - if resolve_references: - new_self = await self.get_with_refs(**get_params) - else: - new_self = await self.get(**get_params) - - for attr_name, new_attr_val in new_self.dict().items(): - setattr(self, attr_name, new_attr_val) + record = await get_backend().upsert( + self._config, + dict_self, + ) + for field in self._config.db_managed_fields: + existing_value = getattr(self, field, None) + column: Column = self._config.table.c[field] + if ( + column.server_default is not None and existing_value is None + ) or column.server_default is None: + setattr(self, field, record[field]) async def delete(self) -> None: """Delete this record from the database""" - - async with ( - await DBEngine.transaction(self._database_info, is_conditional=False) - ) as conn: - where_expressions = [ - getattr(self._table.c, pkey.name) == getattr(self, pkey.name) - for pkey in self._primary_keys - ] - query = self._table.delete().where(and_(*where_expressions)) - try: - await conn.execute(query) - # The value was the wrong type. This usually happens with UUIDs. - except InvalidTextRepresentation as e: - raise InvalidFieldValue(message=e.diag.message_primary) - - @classmethod - def from_dict(cls, _dict: Dict[str, Any]) -> "DatabaseModel": - """Instantiate a DatabaseModel object from a dict record - - Note: - This is the base implementation and is set up so classes that subclass this - one don't have to make this boilerplate if they don't need to - - Args: - _dict: The dictionary form of the DatabaseModel - - Returns: - The DatabaseModel object - - """ - modified_dict = {} - for key, value in _dict.items(): - modified_key = cls._nested_table_field_attr_map.get(key, key) - modified_dict[modified_key] = value - return cls(**modified_dict) - - def to_dict( - self, serialize: bool = False, include_keys: Optional[Sequence] = None - ) -> Dict[str, Any]: - """Create a dict from the DatabaseModel object - - Note: - This implementation is only valid if __base_props__ is set for the instance - - Args: - serialize: A flag determining whether or not to serialize enum types into - strings - include_keys: Set of keys that should be included in the results. If not - provided or empty, all keys will be included. - - Returns: - A dict of the DatabaseObject object - - Raises: - NotImplementedError: This function implementation is being used without - __base_props__ being set - - """ - _dict = {} - for prop_name, prop_value in self.dict().items(): - if serialize: - if isinstance(prop_value, Enum): - prop_value = prop_value.name - elif isinstance(prop_value, AEnum): - prop_value = prop_value.value - - if prop_name in self._nested_model_attributes: - # self.dict() will serialize any BaseModels into a dict so fetch the - # actual object from self - temp_prop_value = getattr(self, prop_name) - prop_name = self._nested_attr_table_field_map.get(prop_name, prop_name) - # temp_prop_value can be `None` if the nested key is optional - if temp_prop_value is not None: - prop_value = temp_prop_value.get_primary_id() - - if not include_keys or prop_name in include_keys: - _dict[prop_name] = prop_value - - return _dict + where_expressions = [ + getattr(self._config.table.c, pkey.name) == getattr(self, pkey.name) + for pkey in self._config.primary_keys + ] + return await get_backend().delete_records(self._config, where_expressions) diff --git a/pynocular/db_util.py b/pynocular/db_util.py deleted file mode 100644 index 7ad1ebb..0000000 --- a/pynocular/db_util.py +++ /dev/null @@ -1,196 +0,0 @@ -"""Database utility functions""" - -import logging -import re - -from aiopg.sa.connection import SAConnection -import sqlalchemy as sa -from sqlalchemy.sql.ddl import CreateTable - -from pynocular.engines import DBEngine, DBInfo -from pynocular.exceptions import InvalidSqlIdentifierErr - -logger = logging.getLogger() - - -async def is_database_available(db_info: DBInfo) -> bool: - """Check if the database is available - - Args: - db_info: A database's connection information - - Returns: - true if the DB exists - - """ - engine = None - try: - engine = await DBEngine.get_engine(db_info) - await engine.acquire() - return True - except Exception: - return False - finally: - if engine: - engine.close() - - -async def create_new_database(connection_string: str, db_name: str) -> None: - """Create a new database database for testing - - Args: - connection_string: A connection string for the database - db_name: the name of the database to create - - """ - existing_db = DBInfo(connection_string) - conn = await (await DBEngine.get_engine(existing_db)).acquire() - # End existing commit - await conn.execute("commit") - # Create db - await conn.execute(f"drop database if exists {db_name}") - await conn.execute(f"create database {db_name}") - await conn.close() - - -async def create_table(db_info: DBInfo, table: sa.Table) -> None: - """Create table in database - - Args: - db_info: Information for the database to connect to - table: The table to create - - """ - engine = await DBEngine.get_engine(db_info) - conn = await engine.acquire() - await conn.execute(CreateTable(table)) - await conn.close() - - -async def drop_table(db_info: DBInfo, table: sa.Table) -> None: - """Drop table in database - - Args: - db_info: Information for the database to connect to - table: The table to create - - """ - engine = await DBEngine.get_engine(db_info) - conn = await engine.acquire() - await conn.execute(f"drop table if exists {table.name}") - await conn.close() - - -async def setup_datetime_trigger(conn: SAConnection) -> None: - """Set up created_at/updated_at datetime trigger - - Args: - conn: an async sqlalchemy connection - - """ - await conn.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";') - await conn.execute('CREATE EXTENSION IF NOT EXISTS "plpgsql";') - await conn.execute( - """ - CREATE OR REPLACE FUNCTION update_timestamp_columns() - RETURNS TRIGGER AS $$ - BEGIN - IF NEW.created_at IS NULL THEN - NEW.created_at = now(); - END IF; - - NEW.updated_at = now(); - RETURN NEW; - END; - $$ language 'plpgsql'; - """ - ) - - -async def add_datetime_trigger(conn: SAConnection, table: str) -> None: - """Helper method for adding created_at and updated_at datetime triggers on a table - - - Args: - conn: an async sqlalchemy connection - table: The name of the table to add an edit trigger for - - """ - await setup_datetime_trigger(conn) - await conn.execute( - """ - CREATE TRIGGER update_{table}_timestamps - BEFORE INSERT OR UPDATE ON {table} - FOR EACH ROW EXECUTE PROCEDURE update_timestamp_columns(); - """.format( - table=table - ) - ) - - -async def remove_datetime_trigger(conn: SAConnection, table: str) -> None: - """Helper method for removing datetime triggers on a table - - Args: - conn: an async sqlalchemy connection - table: The name of the table to remove a trigger for - - """ - await conn.execute( - "DROP TRIGGER IF EXISTS update_{table}_timestamps on {table}".format( - table=table - ) - ) - - -def get_cleaned_db_name( - name: str, - lowercase: bool = True, - remove_leading_numbers: bool = True, - replace_spaces_with_underscores: bool = True, - replace_dashes_with_underscores: bool = True, - remove_special_chars: bool = True, - limit: int = 128, -) -> str: - """Gets a name cleaned to adhere to sql naming conventions - - Args: - name: An uncleaned name (such as a table or column name) - lowercase: Whether all letters in the name should be lowercased - remove_leading_numbers: Whether leading numbers should be stripped - replace_spaces_with_underscores: Whether spaces should be replaced with underscores - replace_dashes_with_underscores: Whether dashes should be replaced with underscores - remove_special_chars: Whether any characters other than letters, numbers, and - underscores should be removed from the name - limit: the maximum allowed length of the name after cleaning. The default value - is the Athena/Glue column name length limit. - - Returns: - A cleaned name to be used in a relational database - - Raises: - :py:exc:`InvalidSqlIdentifierErr`: If the name is still invalid - after being cleaned - - """ - cleaned_name = name - - if lowercase: - cleaned_name = cleaned_name.lower() - - if remove_leading_numbers: - cleaned_name = cleaned_name.lstrip("0123456789") - - if replace_spaces_with_underscores: - cleaned_name = "_".join(cleaned_name.split(" ")) - - if replace_dashes_with_underscores: - cleaned_name = "_".join(cleaned_name.split("-")) - - if remove_special_chars: - cleaned_name = re.sub(r"[^a-zA-Z0-9_]*", "", cleaned_name) - - if len(cleaned_name) == 0 or len(cleaned_name) > limit: - raise InvalidSqlIdentifierErr(cleaned_name) - - return cleaned_name diff --git a/pynocular/engines.py b/pynocular/engines.py deleted file mode 100644 index f79a453..0000000 --- a/pynocular/engines.py +++ /dev/null @@ -1,253 +0,0 @@ -"""Functions for getting database engines and connections""" -import asyncio -from enum import Enum -from functools import wraps -import logging -from typing import Any, Callable, Dict, NamedTuple, Optional, Tuple, Union - -from aiopg import Connection as AIOPGConnection -from aiopg.sa import create_engine, Engine - -from pynocular.aiopg_transaction import ( - ConditionalTransaction, - transaction as Transaction, -) -from pynocular.config import POOL_RECYCLE - -logger = logging.getLogger(__name__) - -_engines: Dict[Tuple[str, str], Engine] = {} - - -async def get_aiopg_engine( - conn_str: str, - enable_hstore: bool = True, - force: bool = False, - application_name: str = None, - if_exists: bool = False, -) -> Optional[Engine]: - """Returns the aiopg SQLAlchemy connection engine for a given connection string. - - This function lazily creates the connection engine if it doesn't already - exist. Callers of this function shouldn't close the engine. It will be - closed automatically when the process exits. - - This function exists to keep a single engine (and thus a single connection - pool) per database, which prevents us from maxing out the number of - connections the database server will give us. - - We include the hash of the event loop in the cache key because otherwise, if the - event loop closes, the cached engine will raise an exception when it's used. - - Args: - conn_str: The connection string for the engine - enable_hstore: determines if the hstore should be enabled on the database. - Redshift requires this to be disabled. - force: Force the creation of the engine regardless of the cache - application_name: Arbitrary string that shows up in queries to the - ``pg_stat_activity`` view for tracking the source of database connections. - if_exists: Only return the engine if it already exists - - Returns: - aiopg engine for the connection string - - """ - global _engines - logger.debug("Attempting to get DB engine") - loop_hash = str(hash(asyncio.get_event_loop())) - cache_key = (loop_hash, conn_str) - engine = _engines.get(cache_key) - - if if_exists and engine is None: - return None - - if engine is None or force or engine.closed: - engine = await create_engine( - conn_str, - enable_hstore=enable_hstore, - application_name=application_name, - pool_recycle=POOL_RECYCLE, - ) - _engines[cache_key] = engine - logger.debug(f"DB engine created successfully: {engine}") - - logger.debug("DB engine retrieved") - return engine - - -class DatabaseType(Enum): - """Database type to differentiate engines and pools""" - - aiopg_engine = "aiopg_engine" - - -class DBInfo(NamedTuple): - """Data class for a database's connection information""" - - connection_string: str - enable_hstore: bool = True - engine_type: DatabaseType = DatabaseType.aiopg_engine - - -class DBEngine: - """Wrapper over database engine types""" - - @classmethod - async def _get_engine( - cls, - db_info: DBInfo, - force: bool = False, - application_name: str = None, - if_exists: bool = False, - ) -> Optional[Engine]: - """Get an async db engine depending on the database configuration. - - Args: - db_info: Information for making the database connection - force: Force the creation of the pool regardless of the cache - application_name: Arbitrary string that shows up in queries to the - ``pg_stat_activity`` view for tracking the source of database - connections. - if_exists: Only return the engine or pool if it already exists - - Returns: - database engine or pool - - Raises: - :py:exec:`ValueError` if the database type isn't supported - - """ - if db_info.engine_type == DatabaseType.aiopg_engine: - return await get_aiopg_engine( - db_info.connection_string, - enable_hstore=db_info.enable_hstore, - force=force, - application_name=application_name, - if_exists=if_exists, - ) - - raise ValueError(f"Unsupported database type: {db_info.engine_type}") - - @classmethod - async def get_engine( - cls, db_info: DBInfo, force: bool = False, application_name: str = None - ) -> Union[Engine]: - """Get a SQLAlchemy connection engine for a given database alias. - - See :py:func:`.get_engine` for more details. - - Args: - db_info: database connection information - force: Force the creation of the pool regardless of the cache - application_name: Arbitrary string that shows up in queries to the - ``pg_stat_activity`` view for tracking the source of database - connections. - - Returns: - database engine - - """ - return await cls._get_engine( - db_info, force=force, application_name=application_name - ) - - @classmethod - async def acquire(cls, db_info: DBInfo) -> Union[AIOPGConnection]: - """Acquire a SQLAlchemy connection for a given database alias. - - This is a convenience function that first gets/creates the engine or pool then - calls acquire. This returns a context manager. - - Args: - db_info: database connection information - - Returns: - context manager that yields the connection - - """ - engine = await cls._get_engine(db_info) - return engine.acquire() - - @classmethod - async def transaction( - cls, db_info: DBInfo, is_conditional: bool = False - ) -> Union[ConditionalTransaction, Transaction]: - """Acquire a SQLAlchemy transaction for a given database alias. - - This is a convenience function that first gets/creates the engine then calls - ConditionalTransaction. This returns a context manager. - - Args: - db_info: database connection information - is_conditional: If true, returns a conditional transaction. - - Returns: - Transaction or ConditionalTransaction for use as a context manager - - """ - if db_info.engine_type != DatabaseType.aiopg_engine: - raise ValueError( - f"Transaction does not support database type {db_info.engine_type}" - ) - engine = await cls._get_engine(db_info) - return ConditionalTransaction(engine) if is_conditional else Transaction(engine) - - @classmethod - def open_transaction(cls, db_info: DBInfo) -> Callable: - """Decorator that wraps the function call in a database transaction - - Args: - database_alias: The database alias to use for the transaction - - Returns: - The wrapped function call - - """ - - def parameterized_decorator(fn: Callable) -> Callable: - """Function that will create the wrapper function - - Args: - fn: The function to wrap - - Returns: - The wrapped function - - """ - - @wraps(fn) - async def wrapped_funct(*args: Any, **kwargs: Any) -> Any: - """The actual wrapper function - - Args: - args: The argument calls to the wrapped function - kwargs: The keyword args to the wrapped function - - Returns: - The result of the function - - """ - async with await DBEngine.transaction(db_info, is_conditional=False): - ret = await fn(*args, **kwargs) - return ret - - return wrapped_funct - - return parameterized_decorator - - @classmethod - async def close(cls, db_info: DBInfo) -> None: - """Close existing database engines and pools - - Args: - db_info: database connection information - - """ - logger.info("Closing database engine") - pool_engine = await cls._get_engine(db_info, if_exists=True) - if pool_engine is None: - # The engine/pool doesn't exist so nothing to close - pass - else: - pool_engine.close() - await pool_engine.wait_closed() diff --git a/pynocular/evaluate_column_element.py b/pynocular/evaluate_column_element.py new file mode 100644 index 0000000..cbca97b --- /dev/null +++ b/pynocular/evaluate_column_element.py @@ -0,0 +1,239 @@ +"""Contains evaluate_column_element function for evaluating filter expressions""" + +import functools +from typing import Any, Dict, List + +from sqlalchemy import Column +from sqlalchemy.sql.elements import ( + AsBoolean, + BinaryExpression, + BindParameter, + BooleanClauseList, + ClauseList, + ColumnElement, + False_, + Grouping, + Null, + True_, +) +from sqlalchemy.sql.operators import in_op, is_, is_false, is_not + + +@functools.singledispatch +def evaluate_column_element( + column_element: ColumnElement, model: Dict[str, Any] +) -> Any: + """Evaluate a ColumnElement on a dictionary representing a database model + + This function can be overridden based on the type of ColumnElement to return + an element from the model, a static value, or the result of some operation (e.g. + addition). + + Args: + column_element: The element to evaluate. + model: The model to evaluate the column element on. Represented as a dictionary + where the keys are column names. + + """ + raise Exception(f"Cannot evaluate a {column_element} object.") + + +@evaluate_column_element.register(BooleanClauseList) +def _evaluate_boolean_clause_list( + column_element: ClauseList, model: Dict[str, Any] +) -> Any: + """Evaluates a boolean clause list and breaks it down into its sub column elements + + Args: + column_element: The BooleanClauseList in question. + model: The model of data this clause should be evaluated for. + + Returns: + The result of the evaluation. + + """ + operator = column_element.operator + + return functools.reduce( + operator, + [ + evaluate_column_element(sub_element, model) + for sub_element in column_element.get_children() + ], + ) + + +@evaluate_column_element.register(ClauseList) +def _evaluate_clause_list(column_element: ClauseList, model: Dict[str, Any]) -> Any: + """Evaluates a clause list and breaks it down into its sub column elements + + Args: + column_element: The ClauseList in question. + model: The model of data this clause should be evaluated for. + + Returns: + The result of the evaluation. + + """ + operator = column_element.operator + + return operator( + *[ + evaluate_column_element(sub_element, model) + for sub_element in column_element.get_children() + ] + ) + + +@evaluate_column_element.register(BinaryExpression) +def _evaluate_binary_expression( + column_element: BinaryExpression, model: Dict[str, Any] +) -> Any: + """Evaluates the binary expression + + Args: + column_element: The binary expression to evaluate. + model: The model to evaluate the expression on. + + Returns: + The evaluation response dictated by the operator of the expression. + + """ + operator = column_element.operator + + # The sqlalchemy `in` operator does not work on evaluated columns, so we replace + # it with the standard `in` operator. + if operator == in_op: + operator = lambda x, y: x in y + + # The sqlalchemy `is` operator does not work on evaluated columns, so we replace it + # with the standard `is` operator. + if operator == is_: + operator = lambda x, y: x is y + + # The sqlalchemy `is_not` operator does not work on evaluated columns, so we replace + # it with the standard `!=` operator. + if operator == is_not: + operator = lambda x, y: x != y + + return operator( + evaluate_column_element(column_element.left, model), + evaluate_column_element(column_element.right, model), + ) + + +@evaluate_column_element.register(AsBoolean) +def _evaluate_as_boolean(column_element: AsBoolean, model: Dict[str, Any]) -> Any: + """Evaluates a boolean + + Args: + column_element: The boolean to evaluate. + model: The model to evaluate the expression on. + + Returns: + The evaluation response dictated by the operator of the expression. + + """ + result = bool(evaluate_column_element(column_element.element, model)) + if column_element.operator == is_false: + return not result + return result + + +@evaluate_column_element.register(Column) +def _evaluate_column(column_element: Column, model: Dict[str, Any]) -> Any: + """Evaluate the column based on the column element name + + Args: + column_element: The column to evaluate. + model: The model dictionary. + + Returns: + The value from the model of attribute referenced by column_element. + + """ + return model.get(column_element.name) + + +@evaluate_column_element.register(BindParameter) +def _evaluate_bind_parameter( + column_element: BindParameter, model: Dict[str, Any] +) -> Any: + """Evaluate the column_elements value + + Args: + column_element: The column's bind parameter. + model: The model dictionary. + + Returns: + The value of the column_element + + """ + return column_element.value + + +@evaluate_column_element.register(True_) +def _evaluate_true(column_element: True_, model: Dict[str, Any]) -> bool: + """Wrapper around evaluating True + + Args: + column_element: The column to evaluate. This is just True + model: The model dictionary. + + Returns: + True + + """ + # The boolean value True is its own SQLAlchemy element + return True + + +@evaluate_column_element.register(False_) +def _evaluate_false(column_element: False_, model: Dict[str, Any]) -> bool: + """Wrapper around evaluating False + + Args: + column_element: The column to evaluate. This just returns False + model: The model dictionary. + + Returns: + False + + """ + # The boolean value False is its own SQLAlchemy element + return False + + +@evaluate_column_element.register(Grouping) +def _evaluate_grouping(column_element: Grouping, model: Dict[str, Any]) -> List[Any]: + """Wrapper around evaluating a grouping + + Args: + column_element: The grouping to evaluate. + model: The model dictionary. + + Returns: + A list of of values that are the resulting of evaluating each element in the + group. + + """ + return [ + evaluate_column_element(clause, model) + for clause in column_element.element.clauses + ] + + +@evaluate_column_element.register(Null) +def _evaluate_null(column_element: Null, model: Dict[str, Any]) -> Any: + """Wrapper around evaluating null + + Args: + column_element: The column element to evaluate. This is null + model: The model dictionary. + + Returns: + None + + """ + # The Null value is its own SQLAlchemy element + return None diff --git a/pynocular/exceptions.py b/pynocular/exceptions.py index 7a707ea..8bd315d 100644 --- a/pynocular/exceptions.py +++ b/pynocular/exceptions.py @@ -306,22 +306,3 @@ def __init__(self, identifier: str) -> None: def __str__(self) -> str: """Returns the message describing the exception""" return f"Invalid identifier {self.identifier}" - - -class NestedDatabaseModelNotResolved(BaseException): - """Indicates a property was accessed before the reference was resolved""" - - def __init__(self, model_cls: str, nested_model_id_value: Any) -> None: - """Initialize NestedDatabaseModelNotResolved - - Args: - model_cls: The class name of the model that was being referenecd - nested_model_id_value: The value of the unique id for this nested model - - """ - msg = ( - f"Object {model_cls} with id {nested_model_id_value} was not resolved." - f" Please call `fetch()` before trying to access properties of {model_cls}" - ) - - super().__init__(msg, msg) diff --git a/pynocular/nested_database_model.py b/pynocular/nested_database_model.py deleted file mode 100644 index a8f0c63..0000000 --- a/pynocular/nested_database_model.py +++ /dev/null @@ -1,78 +0,0 @@ -"""Class that wraps nested DatabaseModels""" -from typing import Any, Callable - -from pynocular.exceptions import NestedDatabaseModelNotResolved - - -class NestedDatabaseModel: - """Class that wraps nested DatabaseModels""" - - def __init__( - self, - model_cls: Callable, - _id: Any, - model: "DatabaseModel" = None, # noqa - ) -> None: - """Init for NestedDatabaseModel - - Args: - model_cls: The class that the id relates to - _id: The id of the references - model: The model object if it is already loaded - - """ - self._model_cls = model_cls - self._model = model - # We can only support nested database models that are based off of a single - # unique identifier - self._primary_key_name = model_cls._primary_keys[0].name - setattr(self, self._primary_key_name, _id) - - def get_primary_id(self) -> Any: - """Standard interface for returning the id of a field - - Returns: - The ID value for the proxied DatabaseModel - - """ - return getattr(self, self._primary_key_name) - - async def fetch(self) -> None: - """Resolves the reference via the id set""" - if self._model is None: - self._model = await self._model_cls.get( - getattr(self, self._primary_key_name) - ) - - def __getattr__(self, attr_name: str) -> Any: - """Wrapper around getattr - - This will only get hit if the class doesn't have a reference to attr_name - - Args: - attr_name: The name of the attribute - - Returns: - The value of the attribute on the object - - """ - if self._model is None: - raise NestedDatabaseModelNotResolved(self._model_cls, self.get_primary_id()) - else: - return getattr(self._model, attr_name) - - def __eq__(self, other: Any) -> bool: - """Equality function - - Args: - other: The object to compare to - - Returns: - If the object is equal to this one - - """ - - if self._model is None: - return False - else: - return self._model == other diff --git a/pynocular/patch_models.py b/pynocular/patch_models.py deleted file mode 100644 index e9607ed..0000000 --- a/pynocular/patch_models.py +++ /dev/null @@ -1,497 +0,0 @@ -"""Context manager for mocking db calls for DatabaseModels during tests""" -from contextlib import contextmanager -import functools -from typing import Any, Dict, List, Optional -from unittest.mock import patch -from uuid import uuid4 - -from sqlalchemy import Column -from sqlalchemy.sql.elements import ( - AsBoolean, - BinaryExpression, - BindParameter, - BooleanClauseList, - ClauseList, - ColumnElement, - False_, - Grouping, - Null, - True_, - UnaryExpression, -) -from sqlalchemy.sql.operators import in_op, is_, is_false, is_not - -from pynocular.database_model import DatabaseModel - - -@contextmanager -def patch_database_model( - model_cls: DatabaseModel, - models: Optional[List[DatabaseModel]] = None, -) -> None: - """Patch a DatabaseModel class, seeding with a set of values - - Example: - with patch_database_model(Org, [Org(id="1", name="org 1"), ...]: - await Org.get_list(...) - - Args: - model_cls: A subclass of DatabaseModel that should be patched. - models: models that should be in the patched DB table. - - """ - models = list(models) if models is not None else [] - - def match(model: DatabaseModel, expression: BinaryExpression) -> bool: - """Function to match the value with the expected one in the expression - - Args: - model: The db model that represents a model in the "db". - expression: The expression object to compare to. - - Returns: - True if the expression operator is True. - - """ - return expression.operator( - model.get(expression.left.name), expression.right.value - ) - - async def select( - where_expressions: Optional[List[BinaryExpression]] = None, - order_by: Optional[List[UnaryExpression]] = None, - limit: Optional[int] = None, - ) -> List[DatabaseModel]: - """Mock select function for DatabaseModel - - Args: - where_expressions: The BinaryExpressions to use in the select where clause. - order_by: The order by expressions to be included in the select. This are - not supported for mocking at this time. - limit: The maximum number of objects to return. - - Returns: - List of DatabaseModels that match the parameters. - - """ - # This function currently does not support `order_by` parameter. - if where_expressions is None: - return models - - matched_models = [ - model - for model in models - if all( - _evaluate_column_element(expr, model.to_dict()) - for expr in where_expressions - ) - ] - - if limit is None: - matched_models[:limit] - - return matched_models - - async def create_list(models) -> List[DatabaseModel]: - """Mock `create_list` function for DatabaseModel - - Args: - models: List of DatabaseModels to persist. - - Returns: - The list of new DatabaseModels that have been saved. - - """ - # Iterate through the list of orm objs and call save(). - for obj in models: - await obj.save() - - return models - - async def save(model, include_nested_models=False) -> None: - """Mock `save` function for DatabaseModel - - Args: - model: The model to save. - include_nested_models: If True, any nested models should get saved before - this object gets saved. - - """ - # If include_nested_models is True, call save on all nested model attributes. - # This requires that the nested models are also patched. - if include_nested_models: - for attr_name in model._nested_model_attributes: - obj = getattr(model, attr_name) - if obj is not None: - await obj.save() - - primary_keys = model_cls._primary_keys - # Put uuids into any primary key that isn't set yet. - for primary_key in primary_keys: - val = getattr(model, primary_key.name) - if val is None: - setattr(model, primary_key.name, str(uuid4())) - - # Pull the primary keys out of the class and the values out of the provided - # database model. Then build a where_expression list to get the model matching those - # primary keys. - where_expressions = [ - primary_key == getattr(model, primary_key.name) - for primary_key in primary_keys - ] - selected_models = [ - model - for model in models - if all( - _evaluate_column_element(expr, model.to_dict()) - for expr in where_expressions - ) - ] - - if len(selected_models) == 0: - # Add a new model to the models since this model didn't exist before. - models.append(model) - else: - # Update the matching model. Since these are primary keys there should only - # ever be one model matching the given where_expressions. - matched_model = selected_models[0] - for attr, val in model.dict().items(): - setattr(matched_model, attr, val) - - async def update_record(**kwargs: Any) -> DatabaseModel: - """Mock `update_record` function for DatabaseModel - - Args: - kwargs: The values to update. - - Returns: - The updated DatabaseModel. - - """ - primary_keys = model_cls._primary_keys - - # Pull the primary keys out of the class and the values out of the provided - # kwargs. Then build a where_expression list to get the model matching those - # primary keys. - where_expressions = [ - primary_key == kwargs[primary_key.name] for primary_key in primary_keys - ] - selected_models = [ - model - for model in models - if all( - _evaluate_column_element(expr, model.to_dict()) - for expr in where_expressions - ) - ] - - # Update the matching model. Since these are primary keys there should only - # ever be one model matching the given where_expressions. - model = selected_models[0] - for attr, val in kwargs.items(): - setattr(model, attr, val) - return model - - async def update( - where_expressions: Optional[List[BinaryExpression]], values: Dict[str, Any] - ) -> List[DatabaseModel]: - """Mock `update_record` function for DatabaseModel - - Args: - where_expressions: A list of BinaryExpressions for the table that will be - `and`ed together for the where clause of the UPDATE - values: The field and values to update all records to that match the - where_expressions - - Returns: - The updated DatabaseModels. - - """ - models = await select(where_expressions) - for model in models: - for attr, val in values.items(): - setattr(model, attr, val) - return models - - async def delete(model) -> None: - """Mock `delete` function for DatabaseModel""" - primary_keys = model_cls._primary_keys - - # Pull the primary keys out of the class and the values out of the provided - # database model. Then build a where_expression list to get the model matching those - # primary keys. - where_expressions = [ - primary_key == getattr(model, primary_key.name) - for primary_key in primary_keys - ] - - # Remove any models that match the given where_expression - models[:] = [ - model - for model in models - if not all( - _evaluate_column_element(expr, model.to_dict()) - for expr in where_expressions - ) - ] - - async def delete_records(**kwargs: Any) -> None: - """Mock `delete_records` function for DatabaseModel - - Args: - kwargs: The values used to find the records that should be deleted - - """ - where_exp = [] - for key, value in kwargs.items(): - col = getattr(model_cls.columns, key) - if isinstance(value, list): - where_exp.append(col.in_(value)) - else: - where_exp.append(col == value) - - # Remove any models that match the given where_expression - models[:] = [ - model - for model in models - if not all( - _evaluate_column_element(expr, model.to_dict()) for expr in where_exp - ) - ] - - # Add the patches. Note that create functionality is patched indirectly though - # 'save' already, but add a spy on it anyway so we can test calls against it. - with patch.object(model_cls, "select", select), patch.object( - model_cls, "save", save - ), patch.object(model_cls, "update_record", update_record), patch.object( - model_cls, "update", update - ), patch.object( - model_cls, "create_list", create_list - ), patch.object( - model_cls, "create", wraps=model_cls.create - ), patch.object( - model_cls, "delete", delete - ), patch.object( - model_cls, "delete_records", delete_records - ): - yield - - -@functools.singledispatch -def _evaluate_column_element( - column_element: ColumnElement, model: Dict[str, Any] -) -> Any: - """Evaluate a ColumnElement on a dictionary representing a database model - - This function can be overridden based on the type of ColumnElement to return - an element from the model, a static value, or the result of some operation (e.g. - addition). - - Args: - column_element: The element to evaluate. - model: The model to evaluate the column element on. Represented as a dictionary - where the keys are column names. - - """ - raise Exception(f"Cannot evaluate a {column_element} object.") - - -@_evaluate_column_element.register(BooleanClauseList) -def _evaluate_boolean_clause_list( - column_element: ClauseList, model: Dict[str, Any] -) -> Any: - """Evaluates a boolean clause list and breaks it down into its sub column elements - - Args: - column_element: The BooleanClauseList in question. - model: The model of data this clause should be evaluated for. - - Returns: - The result of the evaluation. - - """ - operator = column_element.operator - - return functools.reduce( - operator, - [ - _evaluate_column_element(sub_element, model) - for sub_element in column_element.get_children() - ], - ) - - -@_evaluate_column_element.register(ClauseList) -def _evaluate_clause_list(column_element: ClauseList, model: Dict[str, Any]) -> Any: - """Evaluates a clause list and breaks it down into its sub column elements - - Args: - column_element: The ClauseList in question. - model: The model of data this clause should be evaluated for. - - Returns: - The result of the evaluation. - - """ - operator = column_element.operator - - return operator( - *[ - _evaluate_column_element(sub_element, model) - for sub_element in column_element.get_children() - ] - ) - - -@_evaluate_column_element.register(BinaryExpression) -def _evaluate_binary_expression( - column_element: BinaryExpression, model: Dict[str, Any] -) -> Any: - """Evaluates the binary expression - - Args: - column_element: The binary expression to evaluate. - model: The model to evaluate the expression on. - - Returns: - The evaluation response dictated by the operator of the expression. - - """ - operator = column_element.operator - - # The sqlalchemy `in` operator does not work on evaluated columns, so we replace - # it with the standard `in` operator. - if operator == in_op: - operator = lambda x, y: x in y - - # The sqlalchemy `is` operator does not work on evaluated columns, so we replace it - # with the standard `is` operator. - if operator == is_: - operator = lambda x, y: x is y - - # The sqlalchemy `is_not` operator does not work on evaluated columns, so we replace - # it with the standard `!=` operator. - if operator == is_not: - operator = lambda x, y: x != y - - return operator( - _evaluate_column_element(column_element.left, model), - _evaluate_column_element(column_element.right, model), - ) - - -@_evaluate_column_element.register(AsBoolean) -def _evaluate_as_boolean(column_element: AsBoolean, model: Dict[str, Any]) -> Any: - """Evaluates a boolean - - Args: - column_element: The boolean to evaluate. - model: The model to evaluate the expression on. - - Returns: - The evaluation response dictated by the operator of the expression. - - """ - result = bool(_evaluate_column_element(column_element.element, model)) - if column_element.operator == is_false: - return not result - return result - - -@_evaluate_column_element.register(Column) -def _evaluate_column(column_element: Column, model: Dict[str, Any]) -> Any: - """Evaluate the column based on the column element name - - Args: - column_element: The column to evaluate. - model: The model dictionary. - - Returns: - The value from the model of attribute referenced by column_element. - - """ - return model.get(column_element.name) - - -@_evaluate_column_element.register(BindParameter) -def _evaluate_bind_parameter( - column_element: BindParameter, model: Dict[str, Any] -) -> Any: - """Evaluate the column_elements value - - Args: - column_element: The column's bind parameter. - model: The model dictionary. - - Returns: - The value of the column_element - - """ - return column_element.value - - -@_evaluate_column_element.register(True_) -def _evaluate_true(column_element: True_, model: Dict[str, Any]) -> bool: - """Wrapper around evaluating True - - Args: - column_element: The column to evaluate. This is just True - model: The model dictionary. - - Returns: - True - - """ - # The boolean value True is its own SQLAlchemy element - return True - - -@_evaluate_column_element.register(False_) -def _evaluate_false(column_element: False_, model: Dict[str, Any]) -> bool: - """Wrapper around evaluating False - - Args: - column_element: The column to evaluate. This just returns False - model: The model dictionary. - - Returns: - False - - """ - # The boolean value False is its own SQLAlchemy element - return False - - -@_evaluate_column_element.register(Grouping) -def _evaluate_grouping(column_element: Grouping, model: Dict[str, Any]) -> List[Any]: - """Wrapper around evaluating a grouping - - Args: - column_element: The grouping to evaluate. - model: The model dictionary. - - Returns: - A list of of values that are the resulting of evaluating each element in the - group. - - """ - return [ - _evaluate_column_element(clause, model) - for clause in column_element.element.clauses - ] - - -@_evaluate_column_element.register(Null) -def _evaluate_null(column_element: Null, model: Dict[str, Any]) -> Any: - """Wrapper around evaluating null - - Args: - column_element: The column element to evaluate. This is null - model: The model dictionary. - - Returns: - None - - """ - # The Null value is its own SQLAlchemy element - return None diff --git a/pynocular/util.py b/pynocular/util.py new file mode 100644 index 0000000..0d04821 --- /dev/null +++ b/pynocular/util.py @@ -0,0 +1,287 @@ +"""Database utility functions""" + +from functools import wraps +import logging +import re +from typing import Any, Coroutine, Generator +from uuid import UUID as stdlib_uuid + +from databases.core import Database +import sqlalchemy as sa +from sqlalchemy.sql.ddl import CreateTable, DropTable + +from pynocular.backends.context import get_backend +from pynocular.exceptions import InvalidSqlIdentifierErr + +logger = logging.getLogger("pynocular") + + +def is_valid_uuid(string: str) -> bool: + """Check if a string is a valid UUID + + Args: + string: the string to check + + Returns: + Whether or not the string is a well-formed UUIDv4 + + """ + try: + stdlib_uuid(string, version=4) + return True + except (TypeError, AttributeError, ValueError): + return False + + +class UUID_STR(str): + """A string that represents a UUID4 value""" + + @classmethod + def __get_validators__(cls) -> Generator: + """Get the validators for the given class""" + yield cls.validate + + @classmethod + def validate(cls, v: Any) -> str: + """Function to validate the value + + Args: + v: The value to validate + + """ + if isinstance(v, stdlib_uuid) or (isinstance(v, str) and is_valid_uuid(v)): + return str(v) + else: + raise ValueError("invalid UUID string") + + +async def is_database_available(connection_string: str) -> bool: + """Check if the database is available + + Args: + connection_string: A connection string for the database + + Returns: + true if the DB exists + + """ + try: + async with Database(connection_string) as db: + await db.execute("SELECT 1") + return True + except Exception: + return False + + +async def create_new_database(connection_string: str, db_name: str) -> None: + """Create a new database database for testing + + Args: + connection_string: A connection string for the database + db_name: the name of the database to create + + """ + async with Database(connection_string) as db: + # End existing commit + await db.execute("COMMIT") + # Create db + await db.execute(f"DROP DATABASE IF EXISTS {db_name}") + await db.execute(f"CREATE DATABASE {db_name}") + + +async def create_table(db: Database, table: sa.Table) -> None: + """Create table in database + + Args: + db: an async database connection + table: The table to create + + """ + await db.execute(CreateTable(table)) + + +async def drop_table(db: Database, table: sa.Table) -> None: + """Drop table in database + + Args: + db: an async database connection + table: The table to create + + """ + logger.debug(f"Dropping table {table.name}") + await db.execute(DropTable(table, if_exists=True)) + logger.debug(f"Dropped table {table.name}") + + +async def setup_uuid(db: Database) -> None: + """Set up UUID support + + Args: + db: an async database connection + + """ + await db.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";') + + +async def setup_datetime_trigger(db: Database) -> None: + """Set up created_at/updated_at datetime trigger + + Args: + db: an async database connection + + """ + await db.execute('CREATE EXTENSION IF NOT EXISTS "plpgsql";') + await db.execute( + """ + CREATE OR REPLACE FUNCTION update_timestamp_columns() + RETURNS TRIGGER AS $$ + BEGIN + IF NEW.created_at IS NULL THEN + NEW.created_at = now(); + END IF; + + NEW.updated_at = now(); + RETURN NEW; + END; + $$ language 'plpgsql'; + """ + ) + + +async def add_datetime_trigger(db: Database, table: str) -> None: + """Helper method for adding created_at and updated_at datetime triggers on a table + + + Args: + db: an async database connection + table: The name of the table to add an edit trigger for + + """ + await setup_datetime_trigger(db) + await db.execute( + """ + CREATE TRIGGER update_{table}_timestamps + BEFORE INSERT OR UPDATE ON {table} + FOR EACH ROW EXECUTE PROCEDURE update_timestamp_columns(); + """.format( + table=table + ) + ) + + +async def remove_datetime_trigger(db: Database, table: str) -> None: + """Helper method for removing datetime triggers on a table + + Args: + db: an async database connection + table: The name of the table to remove a trigger for + + """ + await db.execute( + "DROP TRIGGER IF EXISTS update_{table}_timestamps on {table}".format( + table=table + ) + ) + + +def get_cleaned_db_name( + name: str, + lowercase: bool = True, + remove_leading_numbers: bool = True, + replace_spaces_with_underscores: bool = True, + replace_dashes_with_underscores: bool = True, + remove_special_chars: bool = True, + limit: int = 128, +) -> str: + """Gets a name cleaned to adhere to sql naming conventions + + Args: + name: An uncleaned name (such as a table or column name) + lowercase: Whether all letters in the name should be lowercased + remove_leading_numbers: Whether leading numbers should be stripped + replace_spaces_with_underscores: Whether spaces should be replaced with underscores + replace_dashes_with_underscores: Whether dashes should be replaced with underscores + remove_special_chars: Whether any characters other than letters, numbers, and + underscores should be removed from the name + limit: the maximum allowed length of the name after cleaning. The default value + is the Athena/Glue column name length limit. + + Returns: + A cleaned name to be used in a relational database + + Raises: + :py:exc:`InvalidSqlIdentifierErr`: If the name is still invalid + after being cleaned + + """ + cleaned_name = name + + if lowercase: + cleaned_name = cleaned_name.lower() + + if remove_leading_numbers: + cleaned_name = cleaned_name.lstrip("0123456789") + + if replace_spaces_with_underscores: + cleaned_name = "_".join(cleaned_name.split(" ")) + + if replace_dashes_with_underscores: + cleaned_name = "_".join(cleaned_name.split("-")) + + if remove_special_chars: + cleaned_name = re.sub(r"[^a-zA-Z0-9_]*", "", cleaned_name) + + if len(cleaned_name) == 0 or len(cleaned_name) > limit: + raise InvalidSqlIdentifierErr(cleaned_name) + + return cleaned_name + + +async def gather(*coros: Coroutine, return_exceptions: bool = False) -> list[Any]: + """Helper function to run a collection of coroutines in sequence + + This should be used inside of database transaction instead of asyncio.gather to + avoid issues caused by multiple concurrent queries. + + See https://github.com/encode/databases/issues/125#issuecomment-511720013 + + Args: + return_exceptions: Flag that controls whether exceptions are returned in the + list instead of raised immediately. Defaults to False. + + Returns: + list of results from executing the coroutines + + """ + results = [] + for coro in coros: + try: + result = await coro + results.append(result) + except Exception as e: + if return_exceptions: + results.append(e) + else: + raise + + return results + + +def transaction(f): + """Helper decorator to wrap a function in a database transaction + + Args: + f: Function to wrap + + Returns: + wrapped function that will execute in a transaction + + """ + + @wraps(f) + async def wrapper(*args, **kwargs): + """Wrapper function""" + async with get_backend().transaction(): + return await f(*args, **kwargs) + + return wrapper diff --git a/pyproject.toml b/pyproject.toml index 3298dcf..fd15afd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pynocular" -version = "0.19.0" +version = "2.0.0-rc.5" description = "Lightweight ORM that lets you query your database using Pydantic models and asyncio" authors = [ "RJ Santana ", @@ -13,18 +13,19 @@ homepage = "https://github.com/NarrativeScience/pynocular" repository = "https://github.com/NarrativeScience/pynocular" [tool.poetry.dependencies] -python = "^3.6.5" -aenum = "^3.1.0" -aiocontextvars = "^0.2.2" -aiopg = {extras = ["sa"], version = "^1.3.1"} +python = "^3.9" pydantic = "^1.6" +databases = {extras = ["postgresql"], version = "^0.5.5"} +psycopg2-binary = "^2.9.3" [tool.poetry.dev-dependencies] pre-commit = "^2.10.1" -pytest = "^6.2.2" +pytest = "^7.1.1" pytest-asyncio = "^0.15" -black = {version = "^22.3.0", allow-prereleases = true} +black = "^22.3.0" cruft = "^2.9.0" +pytest-lazy-fixture = "^0.6.3" +flake8 = "^4.0.1" [tool.cruft] skip = ["pyproject.toml", "pynocular", "tests", "README.md", ".circleci/config.yml"] diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/functional/__init__.py b/tests/functional/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/functional/conftest.py b/tests/functional/conftest.py new file mode 100644 index 0000000..49d7eb4 --- /dev/null +++ b/tests/functional/conftest.py @@ -0,0 +1,60 @@ +"""Contains shared functional test fixtures""" + +import asyncio +import logging +import os + +from databases import Database +import pytest + +from pynocular.util import create_new_database, setup_uuid + +logger = logging.getLogger("pynocular") + + +@pytest.fixture(scope="session") +def event_loop(): + """Returns the event loop so we can define async, session-scoped fixtures""" + return asyncio.get_event_loop() + + +@pytest.fixture(scope="session") +async def postgres_database(): + """Fixture that manages a Postgres database fixture + + Yields: + postgres database + + """ + db_host = os.environ.get("DB_HOST", "localhost") + db_user_name = os.environ.get("DB_USER_NAME", os.environ.get("USER", "postgres")) + db_user_password = os.environ.get("DB_USER_PASSWORD", "") + test_db_name = os.environ.get("TEST_DB_NAME", "test_db") + + maintenance_connection_string = f"postgres://{db_user_name}:{db_user_password}@{db_host}:5432/postgres?sslmode=disable" + db_connection_string = f"postgresql://{db_user_name}:{db_user_password}@{db_host}:5432/{test_db_name}?sslmode=disable" + + try: + await create_new_database(maintenance_connection_string, test_db_name) + except Exception as e: + # If this fails, assume its already created + logger.info(str(e)) + + database = Database(db_connection_string, timeout=5, command_timeout=5) + await database.connect() + await setup_uuid(database) + try: + yield database + except Exception as e: + logger.info(str(e)) + finally: + logger.debug("Disconnecting") + await asyncio.wait_for(database.disconnect(), 2) + + try: + logger.debug(f"Dropping {test_db_name}") + async with Database(maintenance_connection_string) as db: + await db.execute(f"DROP DATABASE IF EXISTS {test_db_name}") + logger.debug(f"Dropped {test_db_name}") + except Exception as e: + logger.info(str(e)) diff --git a/tests/functional/test_database_model.py b/tests/functional/test_database_model.py index 84d0bc3..ef3ac11 100644 --- a/tests/functional/test_database_model.py +++ b/tests/functional/test_database_model.py @@ -1,49 +1,28 @@ """Tests for DatabaseModel abstract class""" -import asyncio from asyncio import gather, sleep from datetime import datetime -import os from typing import Optional from uuid import uuid4 -from pydantic import BaseModel, Field +from databases import Database +from pydantic import Field from pydantic.error_wrappers import ValidationError import pytest -from pynocular.database_model import database_model, UUID_STR -from pynocular.db_util import ( - add_datetime_trigger, - create_new_database, - create_table, - drop_table, +from pynocular import ( + DatabaseModel, + MemoryDatabaseModelBackend, + set_backend, + SQLDatabaseModelBackend, ) -from pynocular.engines import DBEngine, DBInfo from pynocular.exceptions import DatabaseModelMissingField, DatabaseRecordNotFound +from pynocular.util import add_datetime_trigger, create_table, drop_table, UUID_STR -db_user_password = str(os.environ.get("DB_USER_PASSWORD")) -# DB to initially connect to so we can create a new db -existing_connection_string = str( - os.environ.get( - "EXISTING_DB_CONNECTION_STRING", - f"postgresql://postgres:{db_user_password}@localhost:5432/postgres?sslmode=disable", - ) -) - -test_db_name = str(os.environ.get("TEST_DB_NAME", "test_db")) -test_connection_string = str( - os.environ.get( - "TEST_DB_CONNECTION_STRING", - f"postgresql://postgres:{db_user_password}@localhost:5432/{test_db_name}?sslmode=disable", - ) -) -testdb = DBInfo(test_connection_string) - -@database_model("organizations", testdb) -class Org(BaseModel): +class Org(DatabaseModel, table_name="organizations"): """A test database model""" - id: UUID_STR = Field(primary_key=True) + id: Optional[UUID_STR] = Field(primary_key=True, fetch_on_create=True) serial_id: Optional[int] name: str = Field(max_length=45) slug: str = Field(max_length=45) @@ -52,8 +31,7 @@ class Org(BaseModel): updated_at: Optional[datetime] = Field(fetch_on_update=True) -@database_model("topics", testdb) -class Topic(BaseModel): +class Topic(DatabaseModel, table_name="topics"): """A test class with a nullable JSONB field""" id: UUID_STR = Field(primary_key=True) @@ -63,45 +41,46 @@ class Topic(BaseModel): name: str = Field(max_length=45) -class TestDatabaseModel: - """Test suite for DatabaseModel object management""" +@pytest.fixture(scope="module") +async def postgres_backend(postgres_database: Database): + """Fixture that creates tables before yielding a Postgres backend - @classmethod - async def _setup_class(cls): - """Create the database and tables""" - try: - await create_new_database(existing_connection_string, test_db_name) - except Exception: - # If this fails, assume its already created - pass - - await create_table(testdb, Org.get_table()) - await create_table(testdb, Topic.get_table()) - conn = await (await DBEngine.get_engine(testdb)).acquire() - await add_datetime_trigger(conn, "organizations") - await conn.close() - - @classmethod - def setup_class(cls): - """Setup class function""" - loop = asyncio.get_event_loop() - loop.run_until_complete(cls._setup_class()) - - @classmethod - async def _teardown_class(cls): - """Drop database tables""" - await drop_table(testdb, Org._table) - await drop_table(testdb, Topic._table) - - @classmethod - def teardown_class(cls): - """Teardown class function""" - loop = asyncio.get_event_loop() - loop.run_until_complete(cls._teardown_class()) - - @pytest.mark.asyncio - async def test_select(self) -> None: - """Test that we can select the full set of DatabaseModels""" + Returns: + postgres backend + + """ + await create_table(postgres_database, Org.table) + await create_table(postgres_database, Topic.table) + await add_datetime_trigger(postgres_database, Org.table.name) + try: + yield SQLDatabaseModelBackend(postgres_database) + finally: + await drop_table(postgres_database, Topic.table) + await drop_table(postgres_database, Org.table) + + +@pytest.fixture() +async def memory_backend(): + """Fixture that yields an in-memory backend + + Returns: + in-memory backend + + """ + return MemoryDatabaseModelBackend() + + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_select(backend) -> None: + """Test that we can select the full set of DatabaseModels""" + with set_backend(backend): try: org = await Org.create( id=str(uuid4()), @@ -114,9 +93,18 @@ async def test_select(self) -> None: finally: await org.delete() - @pytest.mark.asyncio - async def test_get_list(self) -> None: - """Test that we can get_list and get a subset of DatabaseModels""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_get_list(backend) -> None: + """Test that we can get_list and get a subset of DatabaseModels""" + with set_backend(backend): try: org1 = await Org.create( id=str(uuid4()), name="orgus borgus", slug="orgus_borgus", serial_id=1 @@ -138,9 +126,18 @@ async def test_get_list(self) -> None: await org2.delete() await org3.delete() - @pytest.mark.asyncio - async def test_get_list__none_filter_value(self) -> None: - """Test that we can get_list based on a None filter value""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_get_list__none_filter_value(backend) -> None: + """Test that we can get_list based on a None filter value""" + with set_backend(backend): try: test_org = await Org.create( id=uuid4(), name="orgus borgus", slug="orgus_borgus", serial_id=None @@ -150,9 +147,18 @@ async def test_get_list__none_filter_value(self) -> None: finally: await test_org.delete() - @pytest.mark.asyncio - async def test_get_list__none_json_value(self) -> None: - """Test that we can get_list for a None value on a JSON field""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_get_list__none_json_value(backend) -> None: + """Test that we can get_list for a None value on a JSON field""" + with set_backend(backend): # The None value will be persisted as a SQL NULL value rather than a JSON-encoded # null value when the Topic is created, so the filter value None will work here try: @@ -168,9 +174,18 @@ async def test_get_list__none_json_value(self) -> None: finally: await base_topic.delete() - @pytest.mark.asyncio - async def test_create_new_record(self) -> None: - """Test that we can create a database record""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_create_new_record(backend) -> None: + """Test that we can create a database record""" + with set_backend(backend): org_id = str(uuid4()) serial_id = 100 try: @@ -183,9 +198,18 @@ async def test_create_new_record(self) -> None: # Make sure we delete org so we don't leak out of test await org.delete() - @pytest.mark.asyncio - async def test_create_list(self) -> None: - """Test that we can create a list of database records""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_create_list(backend) -> None: + """Test that we can create a list of database records""" + with set_backend(backend): try: initial_orgs = [ Org(id=str(uuid4()), name="fake org 1", slug="fake-slug-1"), @@ -199,15 +223,33 @@ async def test_create_list(self) -> None: finally: await gather(*[org.delete() for org in created_orgs]) - @pytest.mark.asyncio - async def test_create_list__empty(self) -> None: - """Should return empty list for input of empty list""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_create_list__empty(backend) -> None: + """Should return empty list for input of empty list""" + with set_backend(backend): created_orgs = await Org.create_list([]) assert created_orgs == [] - @pytest.mark.asyncio - async def test_update_new_record__save(self) -> None: - """Test that we can update a database record using `save`""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_update_new_record__save(backend) -> None: + """Test that we can update a database record using `save`""" + with set_backend(backend): org_id = str(uuid4()) serial_id = 101 @@ -227,9 +269,18 @@ async def test_update_new_record__save(self) -> None: # Make sure we delete org so we don't leak out of test await org.delete() - @pytest.mark.asyncio - async def test_update_new_record__update_record(self) -> None: - """Test that we can update a database record using `update_record`""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_update_new_record__update_record(backend) -> None: + """Test that we can update a database record using `update_record`""" + with set_backend(backend): org_id = str(uuid4()) serial_id = 100000 @@ -247,9 +298,18 @@ async def test_update_new_record__update_record(self) -> None: # Make sure we delete org so we don't leak out of test await org.delete() - @pytest.mark.asyncio - async def test_delete_new_record__delete(self) -> None: - """Test that we can delete a database record using `delete`""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_delete_new_record__delete(backend) -> None: + """Test that we can delete a database record using `delete`""" + with set_backend(backend): org_id = str(uuid4()) serial_id = 102 @@ -267,9 +327,18 @@ async def test_delete_new_record__delete(self) -> None: with pytest.raises(DatabaseRecordNotFound): await Org.get(org_id) - @pytest.mark.asyncio - async def test_delete_new_record__delete_records(self) -> None: - """Test that we can delete a database record using `delete_records`""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_delete_new_record__delete_records(backend) -> None: + """Test that we can delete a database record using `delete_records`""" + with set_backend(backend): org_id = str(uuid4()) serial_id = 103 @@ -285,9 +354,18 @@ async def test_delete_new_record__delete_records(self) -> None: with pytest.raises(DatabaseRecordNotFound): await Org.get(org_id) - @pytest.mark.asyncio - async def test_delete_new_record__delete_records_multi_kwargs(self) -> None: - """Test that we can delete a database record using `delete_records` with multiple kwargs""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_delete_new_record__delete_records_multi_kwargs(backend) -> None: + """Test that we can delete a database record using `delete_records` with multiple kwargs""" + with set_backend(backend): org_id = str(uuid4()) serial_id = 104 @@ -303,26 +381,69 @@ async def test_delete_new_record__delete_records_multi_kwargs(self) -> None: with pytest.raises(DatabaseRecordNotFound): await Org.get(org_id) - @pytest.mark.asyncio - async def test_bad_org_object_creation(self) -> None: - """Test that we raise an Exception if the object is missing fields""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_delete_records__count(backend) -> None: + """Should delete records and return deleted count""" + with set_backend(backend): + for i in range(3): + await Org.create(id=str(uuid4()), serial_id=i, name=str(i), slug=str(i)) + + count = await Org.delete_records(name="2") + assert count == 1 + + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_bad_org_object_creation(backend) -> None: + """Test that we raise an Exception if the object is missing fields""" + with set_backend(backend): org_id = str(uuid4()) with pytest.raises(ValidationError): Org(**{"id": org_id}) - @pytest.mark.asyncio - async def test_raise_error_get_list_wrong_field(self) -> None: - """Test that we raise an exception if we query for a wrong field on the object""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_raise_error_get_list_wrong_field(backend) -> None: + """Test that we raise an exception if we query for a wrong field on the object""" + with set_backend(backend): with pytest.raises(DatabaseModelMissingField): await Org.get_list(table_id="Table1") - @pytest.mark.asyncio - async def test_setting_db_managed_columns(self) -> None: - """Test that db managed columns get automatically set on save""" - org = await Org.create( - id=str(uuid4()), serial_id=105, name="fake_org105", slug="fake_org105" - ) + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_setting_db_managed_columns(backend) -> None: + """Test that db managed columns get automatically set on save""" + with set_backend(backend): + org = await Org.create(serial_id=105, name="fake_org105", slug="fake_org105") try: assert org.created_at is not None @@ -336,9 +457,18 @@ async def test_setting_db_managed_columns(self) -> None: finally: await org.delete() - @pytest.mark.asyncio - async def test_fetch(self) -> None: - """Test that we can fetch the latest state of a database record""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_fetch(backend) -> None: + """Test that we can fetch the latest state of a database record""" + with set_backend(backend): org_id = str(uuid4()) serial_id = 100 try: diff --git a/tests/functional/test_db_util.py b/tests/functional/test_db_util.py index c34200a..544b279 100644 --- a/tests/functional/test_db_util.py +++ b/tests/functional/test_db_util.py @@ -1,35 +1,21 @@ """Tests for the db_util module""" -import os +from databases import Database import pytest -from pynocular.db_util import is_database_available -from pynocular.engines import DBInfo +from pynocular.util import is_database_available -db_user_password = str(os.environ.get("DB_USER_PASSWORD")) -test_db_name = str(os.environ.get("TEST_DB_NAME", "test_db")) -test_connection_string = str( - os.environ.get( - "TEST_DB_CONNECTION_STRING", - f"postgresql://postgres:{db_user_password}@localhost:5432/{test_db_name}?sslmode=disable", - ) -) -test_db = DBInfo(test_connection_string) +@pytest.mark.asyncio +async def test_is_database_available(postgres_database: Database) -> None: + """Test successful database connection""" + available = await is_database_available(str(postgres_database.url)) + assert available is True -class TestDBUtil: - """Test cases for DB util functions""" - @pytest.mark.asyncio - async def test_is_database_available(self) -> None: - """Test successful database connection""" - available = await is_database_available(test_db) - assert available is True - - @pytest.mark.asyncio - async def test_is_database_not_available(self) -> None: - """Test db connection unavailable""" - invalid_connection_string = f"postgresql://postgres:{db_user_password}@localhost:5432/INVALID?sslmode=disable" - non_existing_db = DBInfo(invalid_connection_string) - available = await is_database_available(non_existing_db) - assert available is False +@pytest.mark.asyncio +async def test_is_database_not_available(postgres_database: Database) -> None: + """Test db connection unavailable""" + invalid_connection_string = str(postgres_database.url.replace(database="INVALID")) + available = await is_database_available(invalid_connection_string) + assert available is False diff --git a/tests/functional/test_nested_database_model.py b/tests/functional/test_nested_database_model.py deleted file mode 100644 index 48ad8b8..0000000 --- a/tests/functional/test_nested_database_model.py +++ /dev/null @@ -1,312 +0,0 @@ -"""Tests for DatabaseModel abstract class""" -import asyncio -from datetime import datetime -import os -from typing import Optional -from uuid import uuid4 - -from pydantic import BaseModel, Field -import pytest - -from pynocular.database_model import database_model, nested_model, UUID_STR -from pynocular.db_util import ( - add_datetime_trigger, - create_new_database, - create_table, - drop_table, -) -from pynocular.engines import DBEngine, DBInfo -from pynocular.exceptions import NestedDatabaseModelNotResolved - -db_user_password = str(os.environ.get("DB_USER_PASSWORD")) -# DB to initially connect to so we can create a new db -existing_connection_string = str( - os.environ.get( - "EXISTING_DB_CONNECTION_STRING", - f"postgresql://postgres:{db_user_password}@localhost:5432/postgres?sslmode=disable", - ) -) - -test_db_name = str(os.environ.get("TEST_DB_NAME", "test_db")) -test_connection_string = str( - os.environ.get( - "TEST_DB_CONNECTION_STRING", - f"postgresql://postgres:{db_user_password}@localhost:5432/{test_db_name}?sslmode=disable", - ) -) -testdb = DBInfo(test_connection_string) - - -@database_model("users", testdb) -class User(BaseModel): - """Model that represents the `users` table""" - - id: UUID_STR = Field(primary_key=True) - username: str = Field(max_length=100) - - -@database_model("organizations", testdb) -class Org(BaseModel): - """Model that represents the `organizations` table""" - - id: UUID_STR = Field(primary_key=True) - name: str = Field(max_length=45) - slug: str = Field(max_length=45) - tech_owner: Optional[ - nested_model(User, reference_field="tech_owner_id") # noqa F821 - ] - business_owner: Optional[ - nested_model(User, reference_field="business_owner_id") # noqa F821 - ] - - created_at: Optional[datetime] = Field(fetch_on_create=True) - updated_at: Optional[datetime] = Field(fetch_on_update=True) - - -@database_model("apps", testdb) -class App(BaseModel): - """Model that represents the `apps` table""" - - id: Optional[UUID_STR] = Field(primary_key=True, fetch_on_create=True) - name: str = Field(max_length=45) - org: nested_model(Org, reference_field="organization_id") # noqa F821 - slug: str = Field(max_length=45) - - -@database_model("topics", testdb) -class Topic(BaseModel): - """Model that represents the `topics` table""" - - id: UUID_STR = Field(primary_key=True) - app: nested_model(App, reference_field="app_id") # noqa F821 - name: str = Field(max_length=45) - - -class TestNestedDatabaseModel: - """Test suite for NestedDatabaseModel interactions""" - - @classmethod - async def _setup_class(cls): - """Create the database and tables""" - try: - await create_new_database(existing_connection_string, test_db_name) - except Exception: - # If this fails, assume its already created - pass - - await create_table(testdb, User._table) - await create_table(testdb, Org._table) - await create_table(testdb, Topic._table) - await create_table(testdb, App._table) - conn = await (await DBEngine.get_engine(testdb)).acquire() - await add_datetime_trigger(conn, "organizations") - await conn.close() - - @classmethod - def setup_class(cls): - """Setup class function""" - loop = asyncio.get_event_loop() - loop.run_until_complete(cls._setup_class()) - - @classmethod - async def _teardown_class(cls): - """Drop database tables""" - await drop_table(testdb, User._table) - await drop_table(testdb, Org._table) - await drop_table(testdb, Topic._table) - await drop_table(testdb, App._table) - - @classmethod - def teardown_class(cls): - """Teardown class function""" - loop = asyncio.get_event_loop() - loop.run_until_complete(cls._teardown_class()) - - @pytest.mark.asyncio - async def test_fetch(self) -> None: - """Test that we can resolve the reference for a foreign key""" - - try: - tech_owner = await User.create(id=str(uuid4()), username="owner1") - business_owner = await User.create(id=str(uuid4()), username="owner2") - org = await Org.create( - id=str(uuid4()), - name="fake org104", - slug="fake slug104", - tech_owner=tech_owner, - business_owner=business_owner, - ) - - org_get = await Org.get(org.id) - assert org_get.tech_owner.id == tech_owner.id - assert org_get.business_owner.id == business_owner.id - - # Error should be raised if we try to access a property before it is fetched - with pytest.raises(NestedDatabaseModelNotResolved): - org_get.tech_owner.username - - await org_get.tech_owner.fetch() - await org_get.business_owner.fetch() - assert org_get.tech_owner == tech_owner - assert org_get.business_owner == business_owner - finally: - await org.delete() - await tech_owner.delete() - await business_owner.delete() - - @pytest.mark.asyncio - async def test_swap_foreign_reference(self) -> None: - """Test that we can swap foreign key references""" - org_id = str(uuid4()) - - try: - org1 = await Org.create(id=org_id, name="fake org104", slug="fake slug104") - org2 = await Org.create( - id=str(uuid4()), - name="fake org105", - slug="fake slug105", - ) - - # Start with app pointing to the first org - app = await App.create( - id=str(uuid4()), - name="app name", - org=org1, - slug="app-slug", - ) - - # Confirm app is associated with org 1 - app_get = await App.get(app.id) - assert app_get.org.id == org1.id - - # Move app to org 2 - app_get.org = org2 - await app_get.save() - app_get = await App.get(app.id) - assert app_get.org.id == org2.id - await app_get.org.fetch() - assert app_get.org == org2 - finally: - await org1.delete() - await org2.delete() - await app.delete() - - @pytest.mark.asyncio - async def test_get_with_refs(self) -> None: - """Test that we can resolve foreign keys when we retrieve the record object""" - org_id = str(uuid4()) - - try: - org = await Org.create(id=org_id, name="fake org104", slug="fake slug104") - app = await App.create( - id=str(uuid4()), - name="app name", - org=org, - slug="app-slug", - ) - - app_get = await App.get_with_refs(app.id) - assert app_get.org == org - finally: - await org.delete() - await app.delete() - - @pytest.mark.asyncio - async def test_nested_foreign_references(self) -> None: - """Test that we can nest foreign key references and resolve them""" - org_id = str(uuid4()) - - try: - org = await Org.create(id=org_id, name="fake org104", slug="fake slug104") - app = await App.create( - id=str(uuid4()), - name="app name", - org=org, - slug="app-slug", - ) - - topic = await Topic.create(id=str(uuid4()), name="topic name", app=app) - - assert topic.app.id == app.id - assert topic.app == app - assert topic.app.org.id == org.id - assert topic.app.org == org - finally: - await org.delete() - await app.delete() - await topic.delete() - - @pytest.mark.asyncio - async def test_nested_save(self) -> None: - """Test that all the objects will persist if the proper flag is provided""" - - try: - tech_owner = User(id=str(uuid4()), username="owner1") - business_owner = User(id=str(uuid4()), username="owner2") - org = Org( - id=str(uuid4()), - name="fake org104", - slug="fake slug104", - business_owner=business_owner, - ) - - await org.save(include_nested_models=True) - - # Get the org and user that should have persisted - org_get = await Org.get(org.id) - user_get = await User.get(business_owner.id) - - assert org_get.business_owner.id == user_get.id - - # Now add the tech owner and save again. This time, org_get.business_owner is - # not resolved but it should still successfully persist everything - org_get.tech_owner = tech_owner - await org_get.save(include_nested_models=True) - - org_get = await Org.get(org_get.id) - user_get = await User.get(tech_owner.id) - - assert org_get.tech_owner.id == user_get.id - assert org_get.business_owner.id == business_owner.id - finally: - await org.delete() - await tech_owner.delete() - await business_owner.delete() - - @pytest.mark.asyncio - async def test_serialization(self) -> None: - """Test that we can handle nested models in serialization correctly""" - - try: - tech_owner = await User.create(id=str(uuid4()), username="owner1") - business_owner = await User.create(id=str(uuid4()), username="owner2") - org = await Org.create( - id=str(uuid4()), - name="fake org104", - slug="fake slug104", - tech_owner=tech_owner, - business_owner=business_owner, - ) - - expected_org_dict = { - "id": org.id, - "name": org.name, - "slug": org.slug, - "tech_owner_id": tech_owner.id, - "business_owner_id": business_owner.id, - "created_at": org.created_at, - "updated_at": org.updated_at, - } - - org_dict = org.to_dict() - assert org_dict == expected_org_dict - - # Confirm the serialization is the same regardless of if nested models are - # resolved - org = await Org.get(org.id) - org_dict = org.to_dict() - assert org_dict == expected_org_dict - finally: - await org.delete() - await tech_owner.delete() - await business_owner.delete() diff --git a/tests/functional/test_transactions.py b/tests/functional/test_transactions.py index cad35b5..20b5313 100644 --- a/tests/functional/test_transactions.py +++ b/tests/functional/test_transactions.py @@ -1,227 +1,262 @@ """Test that db transaction functionality works as expected""" -import asyncio -import os +import logging from uuid import uuid4 -from pydantic import BaseModel, Field +from databases import Database +from pydantic import Field import pytest -from pynocular.database_model import database_model, UUID_STR -from pynocular.db_util import create_new_database, create_table, drop_table -from pynocular.engines import DBEngine, DBInfo - -db_user_password = str(os.environ.get("DB_USER_PASSWORD")) -# DB to initially connect to so we can create a new db -existing_connection_string = str( - os.environ.get( - "EXISTING_DB_CONNECTION_STRING", - f"postgresql://postgres:{db_user_password}@localhost:5432/postgres?sslmode=disable", - ) -) +from pynocular.backends.context import get_backend, set_backend +from pynocular.backends.memory import MemoryDatabaseModelBackend +from pynocular.backends.sql import SQLDatabaseModelBackend +from pynocular.database_model import DatabaseModel, UUID_STR +from pynocular.util import create_table, drop_table, gather, transaction -test_db_name = str(os.environ.get("TEST_DB_NAME", "test_db")) -test_connection_string = str( - os.environ.get( - "TEST_DB_CONNECTION_STRING", - f"postgresql://postgres:{db_user_password}@localhost:5432/{test_db_name}?sslmode=disable", - ) -) -testdb = DBInfo(test_connection_string) +logger = logging.getLogger("pynocular") -@database_model("organizations", testdb) -class Org(BaseModel): +class Org(DatabaseModel, table_name="organizations"): """A test database model""" id: UUID_STR = Field(primary_key=True) name: str = Field(max_length=45) -class TestDatabaseTransactions: - """Test suite for testing transaction handling with DatabaseModels""" +@pytest.fixture() +async def postgres_backend(postgres_database: Database): + """Fixture that creates tables before yielding a Postgres backend - @classmethod - async def _setup_class(cls): - """Create the database and tables""" - try: - await create_new_database(existing_connection_string, test_db_name) - except Exception: - # If this fails, assume its already created - pass + Args: + postgres_database: Postgres database instance + + Returns: + postgres backend + + """ + await create_table(postgres_database, Org.table) + try: + yield SQLDatabaseModelBackend(postgres_database) + finally: + await drop_table(postgres_database, Org.table) - await create_table(testdb, Org._table) - conn = await (await DBEngine.get_engine(testdb)).acquire() - await conn.close() - - @classmethod - def setup_class(cls): - """Setup class function""" - loop = asyncio.get_event_loop() - loop.run_until_complete(cls._setup_class()) - - @classmethod - async def _teardown_class(cls): - """Drop database tables""" - await drop_table(testdb, Org._table) - - @classmethod - def teardown_class(cls): - """Teardown class function""" - loop = asyncio.get_event_loop() - loop.run_until_complete(cls._teardown_class()) - - @pytest.mark.asyncio - async def test_gathered_creates(self) -> None: - """Test that we can update the db multiple times in a gather under a single transaction""" + +@pytest.fixture() +async def memory_backend(): + """Fixture that yields an in-memory backend + + Returns: + in-memory backend + + """ + return MemoryDatabaseModelBackend() + + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_gathered_creates(backend) -> None: + """Test that we can update the db multiple times in a gather under a single transaction""" + with set_backend(backend): + async with get_backend().transaction(): + await gather( + Org.create(id=str(uuid4()), name="orgus borgus"), + Org.create(id=str(uuid4()), name="porgus orgus"), + ) + + all_orgs = await Org.select() + assert len(all_orgs) == 2 + + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_gathered_updates_raise_error(backend) -> None: + """Test that an error in one update rolls back the other when gathered""" + with set_backend(backend): try: - async with await DBEngine.transaction(testdb, is_conditional=False): - await asyncio.gather( + async with get_backend().transaction(): + await gather( Org.create(id=str(uuid4()), name="orgus borgus"), - Org.create(id=str(uuid4()), name="porgus orgus"), + # The inputs aren't the right type which should throw an error + Org.create(id="blah", name=123), ) - all_orgs = await Org.select() - assert len(all_orgs) == 2 - finally: - await asyncio.gather(*[org.delete() for org in all_orgs]) - - @pytest.mark.asyncio - async def test_gathered_updates_raise_error(self) -> None: - """Test that an error in one update rolls back the other when gathered""" - try: - try: - async with await DBEngine.transaction(testdb, is_conditional=False): - await asyncio.gather( - Org.create(id=str(uuid4()), name="orgus borgus"), - # The inputs aren't the right type which should throw an error - Org.create(id="blah", name=123), - ) - except Exception: - pass - - all_orgs = await Org.select() - assert len(all_orgs) == 0 - finally: - await asyncio.gather(*[org.delete() for org in all_orgs]) - - @pytest.mark.asyncio - async def test_serial_updates(self) -> None: - """Test that we can update the db serially under a single transaction""" - try: - async with await DBEngine.transaction(testdb, is_conditional=False): - await Org.create(id=str(uuid4()), name="orgus borgus") - await Org.create(id=str(uuid4()), name="porgus orgus") + except Exception: + pass - all_orgs = await Org.select() - assert len(all_orgs) == 2 - finally: - await asyncio.gather(*[org.delete() for org in all_orgs]) + all_orgs = await Org.select() + assert len(all_orgs) == 0 - @pytest.mark.asyncio - async def test_serial_updates_raise_error(self) -> None: - """Test that an error in one update rolls back the other when run serially""" - try: - try: - async with await DBEngine.transaction(testdb, is_conditional=False): - await Org.create(id=str(uuid4()), name="orgus borgus") - await Org.create(id="blah", name=123) - except Exception: - pass - all_orgs = await Org.select() - assert len(all_orgs) == 0 - finally: - await asyncio.gather(*[org.delete() for org in all_orgs]) +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_serial_updates(backend) -> None: + """Test that we can update the db serially under a single transaction""" + with set_backend(backend): + async with get_backend().transaction(): + await Org.create(id=str(uuid4()), name="orgus borgus") + await Org.create(id=str(uuid4()), name="porgus orgus") + + all_orgs = await Org.select() + assert len(all_orgs) == 2 - @pytest.mark.asyncio - async def test_nested_updates(self) -> None: - """Test that we can perform nested update on the db under a single transaction""" + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_serial_updates_raise_error(backend) -> None: + """Test that an error in one update rolls back the other when run serially""" + with set_backend(backend): try: - async with await DBEngine.transaction(testdb, is_conditional=False): + async with get_backend().transaction(): await Org.create(id=str(uuid4()), name="orgus borgus") + await Org.create(id="blah", name=123) + except Exception: + pass + + all_orgs = await Org.select() + assert len(all_orgs) == 0 + + +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_nested_updates(backend) -> None: + """Test that we can perform nested update on the db under a single transaction""" + with set_backend(backend): + async with get_backend().transaction(): + await Org.create(id=str(uuid4()), name="orgus borgus") + + async with get_backend().transaction(): + await Org.create(id=str(uuid4()), name="porgus orgus") - async with await DBEngine.transaction(testdb, is_conditional=False): - await Org.create(id=str(uuid4()), name="porgus orgus") + all_orgs = await Org.select() + assert len(all_orgs) == 2 - all_orgs = await Org.select() - assert len(all_orgs) == 2 - finally: - await asyncio.gather(*[org.delete() for org in all_orgs]) - @pytest.mark.asyncio - async def test_nested_updates_raise_error(self) -> None: - """Test that an error in one update rolls back the other when it is nested""" +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_nested_updates_raise_error(backend) -> None: + """Test that an error in one update rolls back the other when it is nested""" + with set_backend(backend): try: - try: - async with await DBEngine.transaction(testdb, is_conditional=False): - await Org.create(id=str(uuid4()), name="orgus borgus") + async with get_backend().transaction(): + await Org.create(id=str(uuid4()), name="orgus borgus") - async with await DBEngine.transaction(testdb, is_conditional=False): - await Org.create(id="blah", name=123) + async with get_backend().transaction(): + await Org.create(id="blah", name=123) + + except Exception: + pass - except Exception: - pass + all_orgs = await Org.select() + assert len(all_orgs) == 0 - all_orgs = await Org.select() - assert len(all_orgs) == 0 - finally: - await asyncio.gather(*[org.delete() for org in all_orgs]) - @pytest.mark.asyncio - async def test_nested_conditional_updates_raise_error(self) -> None: - """Test that an error in one update rolls back the other even if its a conditional transaction""" +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_nested_conditional_updates_raise_error(backend) -> None: + """Test that an error in one update rolls back the other even if its a conditional transaction""" + with set_backend(backend): try: - try: - async with await DBEngine.transaction(testdb, is_conditional=False): - await Org.create(id=str(uuid4()), name="orgus borgus") + async with get_backend().transaction(): + await Org.create(id=str(uuid4()), name="orgus borgus") - async with await DBEngine.transaction(testdb, is_conditional=True): - await Org.create(id="blah", name=123) + async with get_backend().transaction(): + await Org.create(id="blah", name=123) - except Exception: - pass + except Exception: + pass + + all_orgs = await Org.select() + assert len(all_orgs) == 0 - all_orgs = await Org.select() - assert len(all_orgs) == 0 - finally: - await asyncio.gather(*[org.delete() for org in all_orgs]) - @pytest.mark.asyncio - async def test_open_transaction_decorator(self) -> None: - """Test that the open_transaction decorator will execute everything in a transaction""" +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_open_transaction_decorator(backend) -> None: + """Test that the open_transaction decorator will execute everything in a transaction""" + with set_backend(backend): - @DBEngine.open_transaction(testdb) + @transaction async def write_than_raise_error(): await Org.create(id=str(uuid4()), name="orgus borgus") await Org.create(id=str(uuid4()), name="orgus porgus") try: - try: - await write_than_raise_error() - except Exception: - pass + await write_than_raise_error() + except Exception: + pass - all_orgs = await Org.select() - assert len(all_orgs) == 2 - finally: - await asyncio.gather(*[org.delete() for org in all_orgs]) + all_orgs = await Org.select() + assert len(all_orgs) == 2 - @pytest.mark.asyncio - async def test_open_transaction_decorator_rolls_back(self) -> None: - """Test that the open_transaction decorator will roll back everything in the function""" - @DBEngine.open_transaction(testdb) +@pytest.mark.parametrize( + "backend", + [ + pytest.lazy_fixture("postgres_backend"), + pytest.lazy_fixture("memory_backend"), + ], +) +@pytest.mark.asyncio +async def test_open_transaction_decorator_rolls_back(backend) -> None: + """Test that the open_transaction decorator will roll back everything in the function""" + with set_backend(backend): + + @transaction async def write_than_raise_error(): await Org.create(id=str(uuid4()), name="orgus borgus") # This create will fail and the decorator should roll back the top one await Org.create(id="blah", name=123) try: - try: - await write_than_raise_error() - except Exception: - pass - - all_orgs = await Org.select() - assert len(all_orgs) == 0 - finally: - await asyncio.gather(*[org.delete() for org in all_orgs]) + await write_than_raise_error() + except Exception: + pass + + all_orgs = await Org.select() + assert len(all_orgs) == 0 diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/test_evaluate_column_element.py b/tests/unit/test_evaluate_column_element.py new file mode 100644 index 0000000..404a2a9 --- /dev/null +++ b/tests/unit/test_evaluate_column_element.py @@ -0,0 +1,37 @@ +"""Contains unit tests for the evaluate_column_element module""" + +from pydantic import Field +from sqlalchemy import or_ + +from pynocular.database_model import DatabaseModel +from pynocular.evaluate_column_element import evaluate_column_element +from pynocular.util import UUID_STR + + +class Org(DatabaseModel, table_name="organizations"): + """Model that represents the `organizations` table""" + + id: UUID_STR = Field(primary_key=True) + name: str = Field(max_length=45) + slug: str = Field(max_length=45) + flag1: bool = Field(default=True) + flag2: bool = Field(default=True) + flag3: bool = Field(default=True) + + +def test_evaluate_column_element__neq(): + """Should handle the is_not operator""" + assert not evaluate_column_element(Org.columns.name != "foo", {"name": "foo"}) + + +def test_evaluate_column_element__n_ary_or(): + """Should handle an OR with multiple arguments""" + assert evaluate_column_element( + or_(Org.columns.flag1, Org.columns.flag2, Org.columns.flag3), + {"flag1": False, "flag2": False, "flag3": True}, + ) + + +def test_evaluate_column_element__not(): + """Should handle a NOT operator""" + assert not evaluate_column_element(~Org.columns.flag1, {"flag1": True}) diff --git a/tests/unit/test_patch_models.py b/tests/unit/test_patch_models.py deleted file mode 100644 index 55bddc7..0000000 --- a/tests/unit/test_patch_models.py +++ /dev/null @@ -1,189 +0,0 @@ -"""Tests for patch_database_model context manager""" -from typing import Optional -from uuid import uuid4 - -from pydantic import BaseModel, Field -import pytest -from sqlalchemy import or_ - -from pynocular.database_model import database_model, nested_model, UUID_STR -from pynocular.engines import DBInfo -from pynocular.patch_models import _evaluate_column_element, patch_database_model - -# With the `patch_database_model` we don't need a database connection -test_connection_string = "fake connection string" -testdb = DBInfo(test_connection_string) -name = "boo" - - -@database_model("users", testdb) -class User(BaseModel): - """Model that represents the `users` table""" - - id: UUID_STR = Field(primary_key=True) - username: str = Field(max_length=100) - - -@database_model("organizations", testdb) -class Org(BaseModel): - """Model that represents the `organizations` table""" - - id: UUID_STR = Field(primary_key=True) - name: str = Field(max_length=45) - slug: str = Field(max_length=45) - tech_owner: Optional[ - nested_model(User, reference_field="tech_owner_id") # noqa F821 - ] - business_owner: Optional[ - nested_model(User, reference_field="business_owner_id") # noqa F821 - ] - flag1: bool = Field(default=True) - flag2: bool = Field(default=True) - flag3: bool = Field(default=True) - - -class TestPatchDatabaseModel: - """Test class for patch_database_model""" - - @pytest.mark.asyncio - async def test_patch_database_model_without_models(self) -> None: - """Test that we can use `patch_database_model` without providing models""" - orgs = [ - Org(id=str(uuid4()), name="orgus borgus", slug="orgus_borgus"), - Org(id=str(uuid4()), name="orgus borgus2", slug="orgus_borgus"), - ] - - with patch_database_model(Org): - await Org.create_list(orgs) - # Also create one org through Org.create() - await Org.create( - id=str(uuid4()), name="nonorgus borgus", slug="orgus_borgus" - ) - all_orgs = await Org.select() - subset_orgs = await Org.get_list(name=orgs[0].name) - assert len(subset_orgs) <= len(all_orgs) - assert orgs[0] == subset_orgs[0] - - @pytest.mark.asyncio - async def test_patch_database_model_with_models(self) -> None: - """Test that we can use `patch_database_model` with models""" - orgs = [ - Org(id=str(uuid4()), name="orgus borgus", slug="orgus_borgus"), - Org(id=str(uuid4()), name="orgus borgus2", slug="orgus_borgus"), - Org(id=str(uuid4()), name="nonorgus borgus", slug="orgus_borgus"), - ] - - with patch_database_model(Org, models=orgs): - org = (await Org.get_list(name=orgs[0].name))[0] - org.name = "new test name" - await org.save() - org_get = await Org.get(org.id) - assert org_get.name == "new test name" - - @pytest.mark.asyncio - async def test_patch_database_model_with_nested_models(self) -> None: - """Test that we can use `patch_database_model` with nested models""" - users = [ - User(id=str(uuid4()), username="Bob"), - User(id=str(uuid4()), username="Sally"), - ] - orgs = [ - Org( - id=str(uuid4()), - name="orgus borgus", - slug="orgus_borgus", - tech_owner=users[0], - business_owner=users[1], - ), - Org(id=str(uuid4()), name="orgus borgus2", slug="orgus_borgus"), - Org(id=str(uuid4()), name="nonorgus borgus", slug="orgus_borgus"), - ] - - with patch_database_model(Org, models=orgs), patch_database_model( - User, models=users - ): - org = (await Org.get_list(name=orgs[0].name))[0] - org.name = "new test name" - users[0].username = "bberkley" - await org.save(include_nested_models=True) - org_get = await Org.get_with_refs(org.id) - assert org_get.name == "new test name" - assert org_get.tech_owner.username == "bberkley" - - @pytest.mark.asyncio - async def test_patch_database_model_with_delete(self) -> None: - """Test that we can use `delete` on a patched db model""" - orgs = [ - Org(id=str(uuid4()), name="orgus borgus", slug="orgus_borgus"), - Org(id=str(uuid4()), name="orgus borgus2", slug="orgus_borgus"), - Org(id=str(uuid4()), name="nonorgus borgus", slug="orgus_borgus"), - ] - - with patch_database_model(Org, models=orgs): - db_orgs = await Org.get_list() - assert len(db_orgs) == 3 - await orgs[0].delete() - db_orgs = await Org.get_list() - assert len(db_orgs) == 2 - - # Confirm the correct orgs are left - sorted_orgs = sorted(orgs[1:3], key=lambda x: x.id) - sorted_db_orgs = sorted(db_orgs, key=lambda x: x.id) - assert sorted_orgs == sorted_db_orgs - - @pytest.mark.asyncio - async def test_patch_database_model_with_delete_records(self) -> None: - """Test that we can use `delete_records` on a patched db model""" - orgs = [ - Org(id=str(uuid4()), name="orgus borgus", slug="orgus_borgus"), - Org(id=str(uuid4()), name="orgus borgus2", slug="orgus_borgus2"), - Org(id=str(uuid4()), name="nonorgus borgus", slug="nonorgus_borgus"), - ] - - with patch_database_model(Org, models=orgs): - db_orgs = await Org.get_list() - assert len(db_orgs) == 3 - await Org.delete_records(slug=["orgus_borgus2", "nonorgus_borgus"]) - db_orgs = await Org.get_list() - assert len(db_orgs) == 1 - - # Confirm the correct org is left - assert orgs[0] == db_orgs[0] - - @pytest.mark.asyncio - async def test_patch_database_model_with_update(self) -> None: - """Test that we can use `update` to update multiple models""" - orgs = [ - Org(id=str(uuid4()), name="orgus borgus", slug="orgus_borgus"), - Org(id=str(uuid4()), name="orgus borgus", slug="orgus_borgus"), - Org(id=str(uuid4()), name="nonorgus borgus", slug="nonorgus_borgus"), - ] - - with patch_database_model(Org, models=orgs): - db_orgs = await Org.get_list() - assert len(db_orgs) == 3 - updated = await Org.update( - [Org.columns.name == "orgus borgus"], - values={"name": "foo", "slug": "bar"}, - ) - assert {org.id for org in updated} == {org.id for org in orgs[:2]} - assert all(org.name == "foo" and org.slug == "bar" for org in updated) - - -class TestEvaluateColumnElement: - """Test class for the _evaluate_column_element function""" - - def test_evaluate_column_element__neq(self) -> None: - """Should handle the is_not operator""" - assert not _evaluate_column_element(Org.columns.name != "foo", {"name": "foo"}) - - def test_evaluate_column_element__n_ary_or(self) -> None: - """Should handle an OR with multiple arguments""" - assert _evaluate_column_element( - or_(Org.columns.flag1, Org.columns.flag2, Org.columns.flag3), - {"flag1": False, "flag2": False, "flag3": True}, - ) - - def test_evaluate_column_element__not(self) -> None: - """Should handle a NOT operator""" - assert not _evaluate_column_element(~Org.columns.flag1, {"flag1": True}) diff --git a/tests/unit/test_task_context_connection.py b/tests/unit/test_task_context_connection.py deleted file mode 100644 index 6bbcb9d..0000000 --- a/tests/unit/test_task_context_connection.py +++ /dev/null @@ -1,49 +0,0 @@ -"""Test for TaskContextConnection""" -from unittest.mock import Mock - -import pytest - -from pynocular.aiopg_transaction import LockedConnection, TaskContextConnection - - -@pytest.fixture() -def locked_connection(): - """Return a locked connection""" - return LockedConnection(Mock()) - - -@pytest.mark.asyncio() -async def test_task_context_connection_set_clear(locked_connection) -> None: - """Test that we can set and clear the connection""" - - context_conn = TaskContextConnection("key1") - context_conn.set(locked_connection) - test_conn = context_conn.get() - assert test_conn == locked_connection - - context_conn.clear() - # No connection should exist now - test_conn = context_conn.get() - assert test_conn is None - - -@pytest.mark.asyncio() -async def test_task_context_connection_shared(locked_connection) -> None: - """Test that we can share context across instances""" - - context_conn = TaskContextConnection("key1") - context_conn.set(locked_connection) - test_conn = context_conn.get() - assert test_conn == locked_connection - - # Create another instance that should share the connection - context_conn2 = TaskContextConnection("key1") - test_conn2 = context_conn2.get() - assert test_conn2 == locked_connection - - context_conn.clear() - # No connection should exist on either connection - test_conn = context_conn.get() - assert test_conn is None - test_conn2 = context_conn2.get() - assert test_conn2 is None