diff --git a/.circleci/commands/restore-test-cache.yml b/.circleci/commands/restore-test-cache.yml new file mode 100644 index 0000000..cb11249 --- /dev/null +++ b/.circleci/commands/restore-test-cache.yml @@ -0,0 +1,11 @@ +description: Restores the build-test cache for pyenv, poetry, and pre-commit +steps: + - restore_cache: + keys: + - v2-pyenv-{{ arch }}-pynocular-{{ checksum "poetry.lock" }} + - restore_cache: + keys: + - v2-poetry-{{ arch }}-pynocular-{{ checksum "poetry.lock" }} + - restore_cache: + keys: + - v2-precommit-{{ arch }}-pynocular-{{ checksum ".pre-commit-config.yaml" }} diff --git a/.circleci/commands/save-test-cache.yml b/.circleci/commands/save-test-cache.yml new file mode 100644 index 0000000..150d73d --- /dev/null +++ b/.circleci/commands/save-test-cache.yml @@ -0,0 +1,14 @@ +description: Stores the build-test cache for pyenv, poetry, and pre-commit +steps: + - save_cache: + key: v2-pyenv-{{ arch }}-pynocular-{{ checksum "poetry.lock" }} + paths: + - "~/.pyenv" + - save_cache: + key: v2-poetry-{{ arch }}-pynocular-{{ checksum "poetry.lock" }} + paths: + - "~/.cache/pypoetry" + - save_cache: + key: v2-precommit-{{ arch }}-pynocular-{{ checksum ".pre-commit-config.yaml" }} + paths: + - "~/.cache/pre-commit" diff --git a/.circleci/commands/store-test-artifacts.yml b/.circleci/commands/store-test-artifacts.yml new file mode 100644 index 0000000..c46b2a3 --- /dev/null +++ b/.circleci/commands/store-test-artifacts.yml @@ -0,0 +1,22 @@ +description: Store test artifacts in CCI workflow run +parameters: + artifacts_path: + description: The absolute path to the artifacts stored as an env var + type: string + artifacts_storage_dir: + description: The directory in /tmp where we want to store the artifacts + type: string + default: << parameters.artifacts_path >> + export_test_results: + description: Whether or not to upload the artifacts as Test Summary metadata + type: boolean + default: false +steps: + - store_artifacts: + path: << parameters.artifacts_path >> + destination: << parameters.artifacts_storage_dir >> + - when: + condition: << parameters.export_test_results >> + steps: + - store_test_results: + path: << parameters.artifacts_path >> diff --git a/.circleci/config.yml b/.circleci/config.yml index a34144b..1a183f8 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -213,10 +213,60 @@ commands: pip install poetry poetry config pypi-token.pypi "$POETRY_PYPI_TOKEN_PYPI" name: Configure Poetry + restore-test-cache: + description: Restores the build-test cache for pyenv, poetry, and pre-commit + steps: + - restore_cache: + keys: + - v2-pyenv-{{ arch }}-pynocular-{{ checksum "poetry.lock" }} + - restore_cache: + keys: + - v2-poetry-{{ arch }}-pynocular-{{ checksum "poetry.lock" }} + - restore_cache: + keys: + - v2-precommit-{{ arch }}-pynocular-{{ checksum ".pre-commit-config.yaml" }} + save-test-cache: + description: Stores the build-test cache for pyenv, poetry, and pre-commit + steps: + - save_cache: + key: v2-pyenv-{{ arch }}-pynocular-{{ checksum "poetry.lock" }} + paths: + - ~/.pyenv + - save_cache: + key: v2-poetry-{{ arch }}-pynocular-{{ checksum "poetry.lock" }} + paths: + - ~/.cache/pypoetry + - save_cache: + key: v2-precommit-{{ arch }}-pynocular-{{ checksum ".pre-commit-config.yaml" }} + paths: + - ~/.cache/pre-commit + store-test-artifacts: + description: Store test artifacts in CCI workflow run + parameters: + artifacts_path: + description: The absolute path to the artifacts stored as an env var + type: string + artifacts_storage_dir: + default: << parameters.artifacts_path >> + description: The directory in /tmp where we want to store the artifacts + type: string + export_test_results: + default: false + description: Whether or not to upload the artifacts as Test Summary metadata + type: boolean + steps: + - store_artifacts: + destination: << parameters.artifacts_storage_dir >> + path: << parameters.artifacts_path >> + - when: + condition: << parameters.export_test_results >> + steps: + - store_test_results: + path: << parameters.artifacts_path >> executors: default: docker: - - image: cimg/python:3.6 + - image: cimg/python:3.9 resource_class: small python-3_6-medium: docker: @@ -291,6 +341,7 @@ jobs: - ghpr/build-prospective-branch - aws-cli/install - circleci-cli/install + - restore-test-cache - poetry-configure - bump-branch-version - run: @@ -310,11 +361,19 @@ jobs: --origin "origin/${CIRCLE_BRANCH}" \ --show-diff-on-failure name: Run commit hooks - - run: poetry run pytest --junit-xml .junit/unit/results.xml - - store_test_results: - path: .junit - - ghpr/post-pr-comment: - comment: Tests failed! + - run: + command: | + mkdir -p test-results/unit + poetry run pytest --junit-xml test-results/unit/results.xml + name: Run tests + - store-test-artifacts: + artifacts_path: test-results + export_test_results: true + - save-test-cache + - ghpr/slack-pr-author: + color: '#fcaaa3' + get_slack_user_by: meseeks + message: ':ci-fail: Tests failed' when: on_fail poetry-publish: description: Publish a release of the project @@ -328,6 +387,14 @@ jobs: - run: poetry install - run: poetry build - run: poetry publish + send-slack-on-pr-success: + description: Send a Slack message to the PR author on PR workflow success. + executor: default + steps: + - ghpr/slack-pr-author: + color: '#4cb79c' + get_slack_user_by: meseeks + message: ':ci-success: PR tests have passed!' orbs: aws-cli: circleci/aws-cli@2.0.3 circleci-cli: circleci/circleci-cli@0.1.9 diff --git a/.circleci/executors/default.yml b/.circleci/executors/default.yml index 7bc2539..6287e71 100644 --- a/.circleci/executors/default.yml +++ b/.circleci/executors/default.yml @@ -1,3 +1,3 @@ docker: - - image: cimg/python:3.6 + - image: cimg/python:3.9 resource_class: small diff --git a/.circleci/jobs/build-test.yml b/.circleci/jobs/build-test.yml index 8254356..ffb244e 100644 --- a/.circleci/jobs/build-test.yml +++ b/.circleci/jobs/build-test.yml @@ -13,6 +13,7 @@ steps: - ghpr/build-prospective-branch - aws-cli/install - circleci-cli/install + - restore-test-cache - poetry-configure - bump-branch-version - run: @@ -32,9 +33,17 @@ steps: --source "origin/${GITHUB_PR_BASE_BRANCH}" \ --origin "origin/${CIRCLE_BRANCH}" \ --show-diff-on-failure - - run: poetry run pytest --junit-xml .junit/unit/results.xml - - store_test_results: - path: .junit - - ghpr/post-pr-comment: - comment: Tests failed! + - run: + name: Run tests + command: | + mkdir -p test-results/unit + poetry run pytest --junit-xml test-results/unit/results.xml + - store-test-artifacts: + artifacts_path: test-results + export_test_results: true + - save-test-cache + - ghpr/slack-pr-author: when: on_fail + message: ':ci-fail: Tests failed' + get_slack_user_by: meseeks + color: "#fcaaa3" diff --git a/.circleci/jobs/send-slack-on-pr-success.yml b/.circleci/jobs/send-slack-on-pr-success.yml new file mode 100644 index 0000000..fc4798b --- /dev/null +++ b/.circleci/jobs/send-slack-on-pr-success.yml @@ -0,0 +1,7 @@ +description: Send a Slack message to the PR author on PR workflow success. +executor: default +steps: + - ghpr/slack-pr-author: + message: ':ci-success: PR tests have passed!' + get_slack_user_by: meseeks + color: "#4cb79c" diff --git a/.cruft.json b/.cruft.json index f984acf..e85acc5 100644 --- a/.cruft.json +++ b/.cruft.json @@ -1,6 +1,6 @@ { "template": "https://github.com/NarrativeScience/cookiecutter-python-lib", - "commit": "c0344678b29ccb03a7775bed55a202a8453161ee", + "commit": "9e14f244710d27b6ddf3f6a88e107488b7d3208e", "context": { "cookiecutter": { "package_name": "pynocular", diff --git a/.githooks/pre-commit-scripts/cruft-check.sh b/.githooks/pre-commit-scripts/cruft-check.sh new file mode 100755 index 0000000..e0d5453 --- /dev/null +++ b/.githooks/pre-commit-scripts/cruft-check.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +# Run the cruft check and if it fails prompt user to run the manual update +cruft check +if [[ $? -ne 0 ]]; then + echo "This project's cruft is not up to date." + echo "Please run 'cruft update' and follow the prompts to update this repository." +fi diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ffaed3e..164f5df 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -49,21 +49,21 @@ repos: entry: isort language: python types: [file, python] - additional_dependencies: [isort==4.3.16] + additional_dependencies: [isort==5.9.3] - id: black name: Format Python (black) entry: black language: python types: [file, python] - additional_dependencies: [black==21.7b0] + additional_dependencies: [black==21.9b0] - id: pydocstyle name: Lint Python docstrings (pydocstyle) entry: pydocstyle language: python types: [file, python] - additional_dependencies: [pydocstyle==5.0.2] + additional_dependencies: [pydocstyle==6.1.1] exclude: > (?x)^( .*__init__.py$| @@ -76,7 +76,7 @@ repos: language: python types: [file, python] additional_dependencies: - - flake8==3.7.9 + - flake8==3.9.2 - "flake8-import-order<0.19,>=0.18" - flake8-print>=3.1.4,<4 @@ -85,3 +85,11 @@ repos: entry: .githooks/pre-commit-scripts/circleci.sh language: script files: '^\.circleci/' + + - id: cruft-check + name: Check project's Cruft (Cruft) + entry: .githooks/pre-commit-scripts/cruft-check.sh + language: python + additional_dependencies: [cruft==2.9.0] + always_run: true + stages: [push] diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 0000000..46810b7 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,2 @@ +# See: https://help.github.com/en/articles/about-code-owners +* @ssantana-ns @jdrake diff --git a/README.md b/README.md index 6114529..97727a0 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# pynocular +# Pynocular [![](https://img.shields.io/pypi/v/pynocular.svg)](https://pypi.org/pypi/pynocular/) [![License](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause) @@ -27,7 +27,7 @@ Table of Contents: ## Installation -pynocular requires Python 3.6 or above. +Pynocular requires Python 3.6 or above. ```bash pip install pynocular @@ -51,11 +51,15 @@ from pynocular.engines import DatabaseType, DBInfo # Example below shows how to connect to a locally-running Postgres database connection_string = f"postgresql://{db_user_name}:{db_user_password}@localhost:5432/{db_name}?sslmode=disable" ) -db_info = DBInfo(DatabaseType.aiopg_engine, connection_string) +db_info = DBInfo(connection_string) ``` -Pynocular supports connecting to your database through two different asyncio engines; aiopg and asyncpgsa. -You can pick which one you want to use by passing the correct `DatabaseType` enum value into `DBInfo`. +Pynocular uses the asynchronous engine provided by aiopg to connect to your database. You can choose to use a +different engine by providing a different engine_type value to `DBInfo`. +```python +db_info = DBInfo(connection_string, engine_type=DatabaseAlias.asyncpg_engine) +``` +All other engine options are experimental and do not support all of the functionality Pynocular provides. #### Object Management @@ -404,7 +408,7 @@ await create_table(db_info, Org._table) Pynocular comes with tooling to write unit tests against your DatabaseModels, giving you the ability to test your business logic without the extra work and latency involved in managing a database. All you have to do is use the `patch_database_model` context -manager provided in pynocular. +manager provided in Pynocular. ```python from pynocular.patch_models import patch_database_model @@ -455,7 +459,19 @@ with patch_database_model(Org, models=orgs), patch_database_model( ## Development -To develop pynocular, install dependencies and enable the pre-commit hook: +To develop Pynocular, install dependencies and enable the pre-commit hook. + +The example below is using Python 3.9 but you can replace this with any supported version of Python. + +Install Python 3.9 and activate it in your shell. + +```bash +sudo yum install libffi-devel # Needed for ctypes to install poetry +pyenv install 3.9.7 +pyenv shell 3.9.7 +``` + +Install dependencies and enable the pre-commit hook. ```bash pip install pre-commit poetry @@ -463,7 +479,7 @@ poetry install pre-commit install ``` -To run tests: +Run tests to confirm everything is installed correctly. ```bash poetry run pytest diff --git a/pull_request_template.md b/pull_request_template.md new file mode 100644 index 0000000..d124f71 --- /dev/null +++ b/pull_request_template.md @@ -0,0 +1,19 @@ +# Overview of changes +*Describe the big picture of your changes here to communicate to the maintainers why we should accept this pull request.* + +*Please indicate the type of change: bug(fix), new feature, tests, config/infrastructure, docs, etc.* +*Be sure to link to that issue and place it in the name of the PR.* + +## For software test +*Describe any pertinent instructions to get this running or vulnerable test areas.* + +## Additional details +* additional notes, things that were changed, areas of concern, etc.* + +## Important Reminders +- We like PRs to be less than 300 LOC +- Run the pre-commit for both hook stages! (pre-commit and pre-push) +- Add tests! Do not forget that we use pytest here at Narrative Science. 80% Code Coverage recommended. +- Use proper mocks in tests. Do not write tests without mocks that test external libraries or external endpoints. +- Add necessary documentation to README's +- Notion page with our detailed [PR guidelines](https://www.notion.so/narsci/Code-Reviews-fc8109101b2144739a7d9cde1fe2248b) diff --git a/pynocular/__init__.py b/pynocular/__init__.py index 17bcd53..56169e6 100644 --- a/pynocular/__init__.py +++ b/pynocular/__init__.py @@ -1,5 +1,5 @@ """Lightweight ORM that lets you query your database using Pydantic models and asyncio""" -__version__ = "0.13.0" +__version__ = "0.14.0" from pynocular.engines import DatabaseType, DBInfo diff --git a/pynocular/db_util.py b/pynocular/db_util.py index 06059ea..7ad1ebb 100644 --- a/pynocular/db_util.py +++ b/pynocular/db_util.py @@ -7,7 +7,7 @@ import sqlalchemy as sa from sqlalchemy.sql.ddl import CreateTable -from pynocular.engines import DatabaseType, DBEngine, DBInfo +from pynocular.engines import DBEngine, DBInfo from pynocular.exceptions import InvalidSqlIdentifierErr logger = logging.getLogger() @@ -43,7 +43,7 @@ async def create_new_database(connection_string: str, db_name: str) -> None: db_name: the name of the database to create """ - existing_db = DBInfo(DatabaseType.aiopg_engine, connection_string) + existing_db = DBInfo(connection_string) conn = await (await DBEngine.get_engine(existing_db)).acquire() # End existing commit await conn.execute("commit") diff --git a/pynocular/engines.py b/pynocular/engines.py index 980b52b..8c93d12 100644 --- a/pynocular/engines.py +++ b/pynocular/engines.py @@ -218,9 +218,9 @@ class DatabaseType(Enum): class DBInfo(NamedTuple): """Data class for a database's connection information""" - engine_type: DatabaseType connection_string: str enable_hstore: bool = True + engine_type: DatabaseType = DatabaseType.aiopg_engine class DBEngine: diff --git a/pyproject.toml b/pyproject.toml index 01d6cc4..6d93017 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pynocular" -version = "0.13.0" +version = "0.14.0" description = "Lightweight ORM that lets you query your database using Pydantic models and asyncio" authors = [ "RJ Santana ", diff --git a/setup.cfg b/setup.cfg index 1cc771f..005105e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,7 +8,6 @@ combine_as_imports=true default_section=THIRDPARTY force_sort_within_sections=true include_trailing_comma=true -known_standard_library=typing known_first_party= pynocular line_length=88 diff --git a/tests/functional/test_database_model.py b/tests/functional/test_database_model.py index c02b1d1..8e2f8f0 100644 --- a/tests/functional/test_database_model.py +++ b/tests/functional/test_database_model.py @@ -17,7 +17,7 @@ create_table, drop_table, ) -from pynocular.engines import DatabaseType, DBEngine, DBInfo +from pynocular.engines import DBEngine, DBInfo from pynocular.exceptions import DatabaseModelMissingField, DatabaseRecordNotFound db_user_password = str(os.environ.get("DB_USER_PASSWORD")) @@ -36,7 +36,7 @@ f"postgresql://postgres:{db_user_password}@localhost:5432/{test_db_name}?sslmode=disable", ) ) -testdb = DBInfo(DatabaseType.aiopg_engine, test_connection_string) +testdb = DBInfo(test_connection_string) @database_model("organizations", testdb) diff --git a/tests/functional/test_nested_database_model.py b/tests/functional/test_nested_database_model.py index 9de62b2..48ad8b8 100644 --- a/tests/functional/test_nested_database_model.py +++ b/tests/functional/test_nested_database_model.py @@ -15,7 +15,7 @@ create_table, drop_table, ) -from pynocular.engines import DatabaseType, DBEngine, DBInfo +from pynocular.engines import DBEngine, DBInfo from pynocular.exceptions import NestedDatabaseModelNotResolved db_user_password = str(os.environ.get("DB_USER_PASSWORD")) @@ -34,7 +34,7 @@ f"postgresql://postgres:{db_user_password}@localhost:5432/{test_db_name}?sslmode=disable", ) ) -testdb = DBInfo(DatabaseType.aiopg_engine, test_connection_string) +testdb = DBInfo(test_connection_string) @database_model("users", testdb) @@ -52,8 +52,12 @@ class Org(BaseModel): id: UUID_STR = Field(primary_key=True) name: str = Field(max_length=45) slug: str = Field(max_length=45) - tech_owner: Optional[nested_model(User, reference_field="tech_owner_id")] - business_owner: Optional[nested_model(User, reference_field="business_owner_id")] + tech_owner: Optional[ + nested_model(User, reference_field="tech_owner_id") # noqa F821 + ] + business_owner: Optional[ + nested_model(User, reference_field="business_owner_id") # noqa F821 + ] created_at: Optional[datetime] = Field(fetch_on_create=True) updated_at: Optional[datetime] = Field(fetch_on_update=True) @@ -65,7 +69,7 @@ class App(BaseModel): id: Optional[UUID_STR] = Field(primary_key=True, fetch_on_create=True) name: str = Field(max_length=45) - org: nested_model(Org, reference_field="organization_id") + org: nested_model(Org, reference_field="organization_id") # noqa F821 slug: str = Field(max_length=45) @@ -74,7 +78,7 @@ class Topic(BaseModel): """Model that represents the `topics` table""" id: UUID_STR = Field(primary_key=True) - app: nested_model(App, reference_field="app_id") + app: nested_model(App, reference_field="app_id") # noqa F821 name: str = Field(max_length=45) diff --git a/tests/functional/test_transactions.py b/tests/functional/test_transactions.py index 41c8a2f..68bed78 100644 --- a/tests/functional/test_transactions.py +++ b/tests/functional/test_transactions.py @@ -8,7 +8,7 @@ from pynocular.database_model import database_model, UUID_STR from pynocular.db_util import create_new_database, create_table, drop_table -from pynocular.engines import DatabaseType, DBEngine, DBInfo +from pynocular.engines import DBEngine, DBInfo db_user_password = str(os.environ.get("DB_USER_PASSWORD")) # DB to initially connect to so we can create a new db @@ -26,7 +26,7 @@ f"postgresql://postgres:{db_user_password}@localhost:5432/{test_db_name}?sslmode=disable", ) ) -testdb = DBInfo(DatabaseType.aiopg_engine, test_connection_string) +testdb = DBInfo(test_connection_string) @database_model("organizations", testdb) diff --git a/tests/unit/test_db_util.py b/tests/unit/test_db_util.py index 984226b..c34200a 100644 --- a/tests/unit/test_db_util.py +++ b/tests/unit/test_db_util.py @@ -4,7 +4,7 @@ import pytest from pynocular.db_util import is_database_available -from pynocular.engines import DatabaseType, DBInfo +from pynocular.engines import DBInfo db_user_password = str(os.environ.get("DB_USER_PASSWORD")) test_db_name = str(os.environ.get("TEST_DB_NAME", "test_db")) @@ -14,7 +14,7 @@ f"postgresql://postgres:{db_user_password}@localhost:5432/{test_db_name}?sslmode=disable", ) ) -test_db = DBInfo(DatabaseType.aiopg_engine, test_connection_string) +test_db = DBInfo(test_connection_string) class TestDBUtil: @@ -30,6 +30,6 @@ async def test_is_database_available(self) -> None: async def test_is_database_not_available(self) -> None: """Test db connection unavailable""" invalid_connection_string = f"postgresql://postgres:{db_user_password}@localhost:5432/INVALID?sslmode=disable" - non_existing_db = DBInfo(DatabaseType.aiopg_engine, invalid_connection_string) + non_existing_db = DBInfo(invalid_connection_string) available = await is_database_available(non_existing_db) assert available is False diff --git a/tests/unit/test_patch_models.py b/tests/unit/test_patch_models.py index def8737..d34daf8 100644 --- a/tests/unit/test_patch_models.py +++ b/tests/unit/test_patch_models.py @@ -6,12 +6,13 @@ import pytest from pynocular.database_model import database_model, nested_model, UUID_STR -from pynocular.engines import DatabaseType, DBInfo +from pynocular.engines import DBInfo from pynocular.patch_models import patch_database_model # With the `patch_database_model` we don't need a database connection test_connection_string = "fake connection string" -testdb = DBInfo(DatabaseType.aiopg_engine, test_connection_string) +testdb = DBInfo(test_connection_string) +name = "boo" @database_model("users", testdb) @@ -29,8 +30,12 @@ class Org(BaseModel): id: UUID_STR = Field(primary_key=True) name: str = Field(max_length=45) slug: str = Field(max_length=45) - tech_owner: Optional[nested_model(User, reference_field="tech_owner_id")] - business_owner: Optional[nested_model(User, reference_field="business_owner_id")] + tech_owner: Optional[ + nested_model(User, reference_field="tech_owner_id") # noqa F821 + ] + business_owner: Optional[ + nested_model(User, reference_field="business_owner_id") # noqa F821 + ] class TestPatchDatabaseModel: