From 85acd72eca9aef1995cf3bf297b34caa8b33d7d2 Mon Sep 17 00:00:00 2001 From: Michael Chouinard <46358556+chouinar@users.noreply.github.com> Date: Tue, 2 Apr 2024 14:36:18 -0400 Subject: [PATCH] [Issue #1270] Modify DB session logic to allow for multiple schemas (#1520) ## Summary Fixes #1270 ### Time to review: __10 mins__ ## Changes proposed Modify our SQLAlchemy logic to allow for multiple schemas to be setup. This includes: * Setting the schema explicitly in a class that all SQLAlchemy models inherit from * Setting up a schema translate map (only meaningfully needed for local tests) to allow for changing the name of a schema * A new (LOCAL ONLY) script for creating the `api` schema ## Context for reviewers **Non-locally this change does not actually change anything yet - locally it does by making local development more similar to non-local** This does not actually setup any new schemas, and every table we create still lives in a single schema, the `api` schema. This change looks far larger than it actually is. Before, all of our tables had their schema set implicitly by the `DB_SCHEMA` environment variable. Locally this value was set to `public` and non-locally it was set to `api`. These changes make it so locally it also uses `api`, however in order for that to work, the Alembic migrations need to explicitly say `api` (in case we add more schemas later). There is a flag in the Alembic configuration that tells it to generate with the schemas, but we had that disabled. I enabled it so future migrations _just work_. But in order to make everything work locally, I had to manually fix all of the past migrations to have the `api` schema. Non-locally the schema already was `api` so changing already-run migrations won't matter as they already ran as if they had that value set. ## Additional information This change requires you run `make db-recreate` locally in order to use the updated schemas. To test this, I manually ran the database migrations one step at a time, fixing any issues. I then ran the down migrations and made sure they also worked correctly, undoing the up migrations correctly. I then ran a few of our local scripts to make sure everything still worked properly and didn't find any issues. --------- Co-authored-by: nava-platform-bot --- api/Makefile | 5 +- api/local.env | 1 - api/pyproject.toml | 2 + api/src/adapters/db/__init__.py | 3 +- .../adapters/db/clients/postgres_client.py | 2 +- .../adapters/db/clients/postgres_config.py | 12 +++- api/src/constants/schema.py | 5 ++ api/src/data_migration/copy_oracle_data.py | 21 ++++--- .../data_migration/setup_foreign_tables.py | 15 ++++- api/src/db/migrations/env.py | 8 ++- .../db/migrations/setup_local_postgres_db.py | 27 ++++++++ .../2023_10_18_basic_opportunity_table.py | 3 +- ...rename_opportunity_table_prior_to_real_.py | 6 +- ...023_12_11_add_rest_of_opportunity_table.py | 45 +++++++++----- ..._29_add_topportunity_table_for_transfer.py | 20 ++++-- ...24_02_02_add_opportunity_category_table.py | 31 +++++++--- ...4_02_07_add_expanded_opportunity_models.py | 47 ++++++++------ .../2024_02_12_create_dms_exceptions_table.py | 4 +- .../2024_02_21_remove_dms_exceptions_table.py | 4 +- .../2024_03_07_drop_tables_to_remake.py | 57 ++++++++++------- .../2024_03_07_updates_for_summary_tables.py | 61 ++++++++++++------- .../2024_03_12_add_indexes_for_search.py | 31 +++++++++- api/src/db/models/base.py | 9 ++- api/src/db/models/lookup/lookup_table.py | 4 +- api/src/db/models/opportunity_models.py | 16 ++--- .../db/models/transfer/topportunity_models.py | 4 +- api/tests/conftest.py | 16 ++++- api/tests/lib/db_testing.py | 22 ++++--- .../db/clients/test_postgres_client.py | 1 - .../test_postgres_type_decorators.py | 6 +- .../data_migration/test_copy_oracle_data.py | 38 ++++++------ .../test_setup_foreign_tables.py | 14 ++--- .../models/lookup/test_sync_lookup_values.py | 5 +- api/tests/src/db/test_migrations.py | 28 +++++++-- 34 files changed, 399 insertions(+), 174 deletions(-) create mode 100644 api/src/constants/schema.py create mode 100644 api/src/db/migrations/setup_local_postgres_db.py diff --git a/api/Makefile b/api/Makefile index 019e7d20d..770190219 100644 --- a/api/Makefile +++ b/api/Makefile @@ -120,7 +120,7 @@ check: format-check lint db-check-migrations test # Docker starts the image for the DB but it's not quite # ready to accept connections so we add a brief wait script -init-db: start-db db-migrate +init-db: start-db setup-postgres-db db-migrate start-db: docker-compose up --detach grants-db @@ -176,6 +176,9 @@ create-erds: # Create ERD diagrams for our DB schema $(PY_RUN_CMD) create-erds mv bin/*.png ../documentation/api/database/erds +setup-postgres-db: ## Does any initial setup necessary for our local database to work + $(PY_RUN_CMD) setup-postgres-db + ################################################## # Testing diff --git a/api/local.env b/api/local.env index 9c0a19a87..6b2d90f21 100644 --- a/api/local.env +++ b/api/local.env @@ -51,7 +51,6 @@ POSTGRES_PASSWORD=secret123 DB_HOST=grants-db DB_NAME=app DB_USER=app -DB_SCHEMA=public DB_PASSWORD=secret123 DB_SSL_MODE=allow diff --git a/api/pyproject.toml b/api/pyproject.toml index 9b17f07a5..d07faf55e 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -54,6 +54,8 @@ db-migrate-down = "src.db.migrations.run:down" db-migrate-down-all = "src.db.migrations.run:downall" db-seed-local = "tests.lib.seed_local_db:seed_local_db" create-erds = "bin.create_erds:main" +setup-postgres-db = "src.db.migrations.setup_local_postgres_db:setup_local_postgres_db" + [tool.black] line-length = 100 diff --git a/api/src/adapters/db/__init__.py b/api/src/adapters/db/__init__.py index 591de8510..21f670be8 100644 --- a/api/src/adapters/db/__init__.py +++ b/api/src/adapters/db/__init__.py @@ -25,8 +25,9 @@ # Re-export for convenience from src.adapters.db.client import Connection, DBClient, Session from src.adapters.db.clients.postgres_client import PostgresDBClient +from src.adapters.db.clients.postgres_config import PostgresDBConfig # Do not import flask_db here, because this module is not dependent on any specific framework. # Code can choose to use this module on its own or with the flask_db module depending on needs. -__all__ = ["Connection", "DBClient", "Session", "PostgresDBClient"] +__all__ = ["Connection", "DBClient", "Session", "PostgresDBClient", "PostgresDBConfig"] diff --git a/api/src/adapters/db/clients/postgres_client.py b/api/src/adapters/db/clients/postgres_client.py index c70ceefa1..a054d4592 100644 --- a/api/src/adapters/db/clients/postgres_client.py +++ b/api/src/adapters/db/clients/postgres_client.py @@ -47,6 +47,7 @@ def get_conn() -> Any: "postgresql+psycopg://", pool=conn_pool, hide_parameters=db_config.hide_sql_parameter_logs, + execution_options={"schema_translate_map": db_config.get_schema_translate_map()}, # TODO: Don't think we need this as we aren't using JSON columns, but keeping for reference # json_serializer=lambda o: json.dumps(o, default=pydantic.json.pydantic_encoder), ) @@ -94,7 +95,6 @@ def get_connection_parameters(db_config: PostgresDBConfig) -> dict[str, Any]: user=db_config.username, password=password, port=db_config.port, - options=f"-c search_path={db_config.db_schema}", connect_timeout=10, sslmode=db_config.ssl_mode, **connect_args, diff --git a/api/src/adapters/db/clients/postgres_config.py b/api/src/adapters/db/clients/postgres_config.py index 41f4bebe3..57b3c83ee 100644 --- a/api/src/adapters/db/clients/postgres_config.py +++ b/api/src/adapters/db/clients/postgres_config.py @@ -3,6 +3,7 @@ from pydantic import Field +from src.constants.schema import Schemas from src.util.env_config import PydanticBaseEnvConfig logger = logging.getLogger(__name__) @@ -15,11 +16,19 @@ class PostgresDBConfig(PydanticBaseEnvConfig): name: str = Field(alias="DB_NAME") username: str = Field(alias="DB_USER") password: Optional[str] = Field(None, alias="DB_PASSWORD") - db_schema: str = Field("public", alias="DB_SCHEMA") port: int = Field(5432, alias="DB_PORT") hide_sql_parameter_logs: bool = Field(True, alias="HIDE_SQL_PARAMETER_LOGS") ssl_mode: str = Field("require", alias="DB_SSL_MODE") + schema_prefix_override: str | None = Field(None) + + def get_schema_translate_map(self) -> dict[str, str]: + prefix = "" + if self.schema_prefix_override is not None: + prefix = self.schema_prefix_override + + return {schema: f"{prefix}{schema}" for schema in Schemas} + def get_db_config() -> PostgresDBConfig: db_config = PostgresDBConfig() @@ -31,7 +40,6 @@ def get_db_config() -> PostgresDBConfig: "dbname": db_config.name, "username": db_config.username, "password": "***" if db_config.password is not None else None, - "db_schema": db_config.db_schema, "port": db_config.port, "hide_sql_parameter_logs": db_config.hide_sql_parameter_logs, }, diff --git a/api/src/constants/schema.py b/api/src/constants/schema.py new file mode 100644 index 000000000..21fb26d80 --- /dev/null +++ b/api/src/constants/schema.py @@ -0,0 +1,5 @@ +from enum import StrEnum + + +class Schemas(StrEnum): + API = "api" diff --git a/api/src/data_migration/copy_oracle_data.py b/api/src/data_migration/copy_oracle_data.py index 92578b214..2d597cb3c 100644 --- a/api/src/data_migration/copy_oracle_data.py +++ b/api/src/data_migration/copy_oracle_data.py @@ -4,6 +4,7 @@ import src.adapters.db as db import src.adapters.db.flask_db as flask_db +from src.constants.schema import Schemas from src.data_migration.data_migration_blueprint import data_migration_blueprint logger = logging.getLogger(__name__) @@ -20,10 +21,10 @@ class SqlCommands: ################################# OPPORTUNITY_DELETE_QUERY = """ - delete from transfer_topportunity + delete from {}.transfer_topportunity """ OPPORTUNITY_INSERT_QUERY = """ - insert into transfer_topportunity + insert into {}.transfer_topportunity select opportunity_id, oppnumber, @@ -40,7 +41,7 @@ class SqlCommands: last_upd_date, creator_id, created_date - from foreign_topportunity + from {}.foreign_topportunity where is_draft = 'N' """ @@ -54,7 +55,7 @@ def copy_oracle_data(db_session: db.Session) -> None: try: with db_session.begin(): - _run_copy_commands(db_session) + _run_copy_commands(db_session, Schemas.API) except Exception: logger.exception("Failed to run copy-oracle-data command") raise @@ -62,10 +63,12 @@ def copy_oracle_data(db_session: db.Session) -> None: logger.info("Successfully ran copy-oracle-data") -def _run_copy_commands(db_session: db.Session) -> None: +def _run_copy_commands(db_session: db.Session, api_schema: str) -> None: logger.info("Running copy commands for TOPPORTUNITY") - db_session.execute(text(SqlCommands.OPPORTUNITY_DELETE_QUERY)) - db_session.execute(text(SqlCommands.OPPORTUNITY_INSERT_QUERY)) - count = db_session.scalar(text("SELECT count(*) from transfer_topportunity")) - logger.info(f"Loaded {count} records into transfer_topportunity") + db_session.execute(text(SqlCommands.OPPORTUNITY_DELETE_QUERY.format(api_schema))) + db_session.execute(text(SqlCommands.OPPORTUNITY_INSERT_QUERY.format(api_schema, api_schema))) + count = db_session.scalar( + text(f"SELECT count(*) from {api_schema}.transfer_topportunity") # nosec + ) + logger.info(f"Loaded {count} records into {api_schema}.transfer_topportunity") diff --git a/api/src/data_migration/setup_foreign_tables.py b/api/src/data_migration/setup_foreign_tables.py index 03c62fca6..76304977f 100644 --- a/api/src/data_migration/setup_foreign_tables.py +++ b/api/src/data_migration/setup_foreign_tables.py @@ -6,6 +6,7 @@ import src.adapters.db as db import src.adapters.db.flask_db as flask_db +from src.constants.schema import Schemas from src.data_migration.data_migration_blueprint import data_migration_blueprint from src.util.env_config import PydanticBaseEnvConfig @@ -14,6 +15,7 @@ class ForeignTableConfig(PydanticBaseEnvConfig): is_local_foreign_table: bool = Field(False) + schema_name: str = Field(Schemas.API) @dataclass @@ -62,7 +64,7 @@ def setup_foreign_tables(db_session: db.Session) -> None: logger.info("Successfully ran setup-foreign-tables") -def build_sql(table_name: str, columns: list[Column], is_local: bool) -> str: +def build_sql(table_name: str, columns: list[Column], is_local: bool, schema_name: str) -> str: """ Build the SQL for creating a possibly foreign data table. If running with is_local, it instead creates a regular table. @@ -111,10 +113,17 @@ def build_sql(table_name: str, columns: list[Column], is_local: bool) -> str: # We don't want the config at the end if we're running locally so unset it create_command_suffix = "" - return f"{create_table_command} foreign_{table_name.lower()} ({','.join(column_sql_parts)}){create_command_suffix}" + return f"{create_table_command} {schema_name}.foreign_{table_name.lower()} ({','.join(column_sql_parts)}){create_command_suffix}" def _run_create_table_commands(db_session: db.Session, config: ForeignTableConfig) -> None: db_session.execute( - text(build_sql("TOPPORTUNITY", OPPORTUNITY_COLUMNS, config.is_local_foreign_table)) + text( + build_sql( + "TOPPORTUNITY", + OPPORTUNITY_COLUMNS, + config.is_local_foreign_table, + config.schema_name, + ) + ) ) diff --git a/api/src/db/migrations/env.py b/api/src/db/migrations/env.py index 1da37884a..3100d77b6 100644 --- a/api/src/db/migrations/env.py +++ b/api/src/db/migrations/env.py @@ -6,6 +6,7 @@ import src.adapters.db as db import src.logging +from src.constants.schema import Schemas from src.db.models import metadata from src.adapters.db.type_decorators.postgres_type_decorators import LookupColumn # isort:skip @@ -36,6 +37,10 @@ def include_object( reflected: bool, compare_to: Any, ) -> bool: + # We don't want alembic to try and drop its own table + if name == "alembic_version": + return False + if type_ == "schema" and getattr(object, "schema", None) is not None: return False if type_ == "table" and name is not None and name.startswith("foreign_"): @@ -69,10 +74,11 @@ def run_migrations_online() -> None: context.configure( connection=connection, target_metadata=target_metadata, - include_schemas=False, + include_schemas=True, include_object=include_object, compare_type=True, render_item=render_item, + version_table_schema=Schemas.API, ) with context.begin_transaction(): context.run_migrations() diff --git a/api/src/db/migrations/setup_local_postgres_db.py b/api/src/db/migrations/setup_local_postgres_db.py new file mode 100644 index 000000000..3a13d4431 --- /dev/null +++ b/api/src/db/migrations/setup_local_postgres_db.py @@ -0,0 +1,27 @@ +import logging + +from sqlalchemy import text + +import src.adapters.db as db +import src.logging +from src.adapters.db import PostgresDBClient +from src.constants.schema import Schemas +from src.util.local import error_if_not_local + +logger = logging.getLogger(__name__) + + +def setup_local_postgres_db() -> None: + with src.logging.init(__package__): + error_if_not_local() + + db_client = PostgresDBClient() + + with db_client.get_connection() as conn, conn.begin(): + for schema in Schemas: + _create_schema(conn, schema) + + +def _create_schema(conn: db.Connection, schema_name: str) -> None: + logger.info("Creating schema %s if it does not already exist", schema_name) + conn.execute(text(f"CREATE SCHEMA IF NOT EXISTS {schema_name}")) diff --git a/api/src/db/migrations/versions/2023_10_18_basic_opportunity_table.py b/api/src/db/migrations/versions/2023_10_18_basic_opportunity_table.py index 27650ebd2..00ea6bad8 100644 --- a/api/src/db/migrations/versions/2023_10_18_basic_opportunity_table.py +++ b/api/src/db/migrations/versions/2023_10_18_basic_opportunity_table.py @@ -38,11 +38,12 @@ def upgrade(): nullable=False, ), sa.PrimaryKeyConstraint("opportunity_id", name=op.f("topportunity_pkey")), + schema="api", ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("topportunity") + op.drop_table("topportunity", schema="api") # ### end Alembic commands ### diff --git a/api/src/db/migrations/versions/2023_11_27_rename_opportunity_table_prior_to_real_.py b/api/src/db/migrations/versions/2023_11_27_rename_opportunity_table_prior_to_real_.py index 4fd4f6d67..89e8e0200 100644 --- a/api/src/db/migrations/versions/2023_11_27_rename_opportunity_table_prior_to_real_.py +++ b/api/src/db/migrations/versions/2023_11_27_rename_opportunity_table_prior_to_real_.py @@ -39,8 +39,9 @@ def upgrade(): nullable=False, ), sa.PrimaryKeyConstraint("opportunity_id", name=op.f("opportunity_pkey")), + schema="api", ) - op.drop_table("topportunity") + op.drop_table("topportunity", schema="api") # ### end Alembic commands ### @@ -69,6 +70,7 @@ def downgrade(): nullable=False, ), sa.PrimaryKeyConstraint("opportunity_id", name="topportunity_pkey"), + schema="api", ) - op.drop_table("opportunity") + op.drop_table("opportunity", schema="api") # ### end Alembic commands ### diff --git a/api/src/db/migrations/versions/2023_12_11_add_rest_of_opportunity_table.py b/api/src/db/migrations/versions/2023_12_11_add_rest_of_opportunity_table.py index 1ba905da9..51a842c31 100644 --- a/api/src/db/migrations/versions/2023_12_11_add_rest_of_opportunity_table.py +++ b/api/src/db/migrations/versions/2023_12_11_add_rest_of_opportunity_table.py @@ -17,30 +17,45 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.add_column("opportunity", sa.Column("category_explanation", sa.Text(), nullable=True)) - op.add_column("opportunity", sa.Column("revision_number", sa.Integer(), nullable=True)) - op.add_column("opportunity", sa.Column("modified_comments", sa.Text(), nullable=True)) - op.add_column("opportunity", sa.Column("publisher_user_id", sa.Integer(), nullable=True)) - op.add_column("opportunity", sa.Column("publisher_profile_id", sa.Integer(), nullable=True)) - op.create_index(op.f("opportunity_category_idx"), "opportunity", ["category"], unique=False) - op.create_index(op.f("opportunity_is_draft_idx"), "opportunity", ["is_draft"], unique=False) + op.add_column( + "opportunity", sa.Column("category_explanation", sa.Text(), nullable=True), schema="api" + ) + op.add_column( + "opportunity", sa.Column("revision_number", sa.Integer(), nullable=True), schema="api" + ) + op.add_column( + "opportunity", sa.Column("modified_comments", sa.Text(), nullable=True), schema="api" + ) + op.add_column( + "opportunity", sa.Column("publisher_user_id", sa.Integer(), nullable=True), schema="api" + ) + op.add_column( + "opportunity", sa.Column("publisher_profile_id", sa.Integer(), nullable=True), schema="api" + ) + op.create_index( + op.f("opportunity_category_idx"), "opportunity", ["category"], unique=False, schema="api" + ) + op.create_index( + op.f("opportunity_is_draft_idx"), "opportunity", ["is_draft"], unique=False, schema="api" + ) op.create_index( op.f("opportunity_opportunity_title_idx"), "opportunity", ["opportunity_title"], unique=False, + schema="api", ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_index(op.f("opportunity_opportunity_title_idx"), table_name="opportunity") - op.drop_index(op.f("opportunity_is_draft_idx"), table_name="opportunity") - op.drop_index(op.f("opportunity_category_idx"), table_name="opportunity") - op.drop_column("opportunity", "publisher_profile_id") - op.drop_column("opportunity", "publisher_user_id") - op.drop_column("opportunity", "modified_comments") - op.drop_column("opportunity", "revision_number") - op.drop_column("opportunity", "category_explanation") + op.drop_index(op.f("opportunity_opportunity_title_idx"), table_name="opportunity", schema="api") + op.drop_index(op.f("opportunity_is_draft_idx"), table_name="opportunity", schema="api") + op.drop_index(op.f("opportunity_category_idx"), table_name="opportunity", schema="api") + op.drop_column("opportunity", "publisher_profile_id", schema="api") + op.drop_column("opportunity", "publisher_user_id", schema="api") + op.drop_column("opportunity", "modified_comments", schema="api") + op.drop_column("opportunity", "revision_number", schema="api") + op.drop_column("opportunity", "category_explanation", schema="api") # ### end Alembic commands ### diff --git a/api/src/db/migrations/versions/2024_01_29_add_topportunity_table_for_transfer.py b/api/src/db/migrations/versions/2024_01_29_add_topportunity_table_for_transfer.py index 2ba1d2024..0968407c2 100644 --- a/api/src/db/migrations/versions/2024_01_29_add_topportunity_table_for_transfer.py +++ b/api/src/db/migrations/versions/2024_01_29_add_topportunity_table_for_transfer.py @@ -47,32 +47,44 @@ def upgrade(): nullable=False, ), sa.PrimaryKeyConstraint("opportunity_id", name=op.f("transfer_topportunity_pkey")), + schema="api", ) op.create_index( op.f("transfer_topportunity_is_draft_idx"), "transfer_topportunity", ["is_draft"], unique=False, + schema="api", ) op.create_index( op.f("transfer_topportunity_oppcategory_idx"), "transfer_topportunity", ["oppcategory"], unique=False, + schema="api", ) op.create_index( op.f("transfer_topportunity_opptitle_idx"), "transfer_topportunity", ["opptitle"], unique=False, + schema="api", ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_index(op.f("transfer_topportunity_opptitle_idx"), table_name="transfer_topportunity") - op.drop_index(op.f("transfer_topportunity_oppcategory_idx"), table_name="transfer_topportunity") - op.drop_index(op.f("transfer_topportunity_is_draft_idx"), table_name="transfer_topportunity") - op.drop_table("transfer_topportunity") + op.drop_index( + op.f("transfer_topportunity_opptitle_idx"), table_name="transfer_topportunity", schema="api" + ) + op.drop_index( + op.f("transfer_topportunity_oppcategory_idx"), + table_name="transfer_topportunity", + schema="api", + ) + op.drop_index( + op.f("transfer_topportunity_is_draft_idx"), table_name="transfer_topportunity", schema="api" + ) + op.drop_table("transfer_topportunity", schema="api") # ### end Alembic commands ### diff --git a/api/src/db/migrations/versions/2024_02_02_add_opportunity_category_table.py b/api/src/db/migrations/versions/2024_02_02_add_opportunity_category_table.py index b7a4b2947..4c030078f 100644 --- a/api/src/db/migrations/versions/2024_02_02_add_opportunity_category_table.py +++ b/api/src/db/migrations/versions/2024_02_02_add_opportunity_category_table.py @@ -36,14 +36,20 @@ def upgrade(): sa.PrimaryKeyConstraint( "opportunity_category_id", name=op.f("lk_opportunity_category_pkey") ), + schema="api", ) - op.add_column("opportunity", sa.Column("opportunity_category_id", sa.Integer(), nullable=True)) - op.drop_index("opportunity_category_idx", table_name="opportunity") + op.add_column( + "opportunity", + sa.Column("opportunity_category_id", sa.Integer(), nullable=True), + schema="api", + ) + op.drop_index("opportunity_category_idx", table_name="opportunity", schema="api") op.create_index( op.f("opportunity_opportunity_category_id_idx"), "opportunity", ["opportunity_category_id"], unique=False, + schema="api", ) op.create_foreign_key( op.f("opportunity_opportunity_category_id_lk_opportunity_category_fkey"), @@ -51,23 +57,32 @@ def upgrade(): "lk_opportunity_category", ["opportunity_category_id"], ["opportunity_category_id"], + source_schema="api", + referent_schema="api", ) - op.drop_column("opportunity", "category") + op.drop_column("opportunity", "category", schema="api") # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( - "opportunity", sa.Column("category", sa.TEXT(), autoincrement=False, nullable=True) + "opportunity", + sa.Column("category", sa.TEXT(), autoincrement=False, nullable=True), + schema="api", ) op.drop_constraint( op.f("opportunity_opportunity_category_id_lk_opportunity_category_fkey"), "opportunity", type_="foreignkey", + schema="api", + ) + op.drop_index( + op.f("opportunity_opportunity_category_id_idx"), table_name="opportunity", schema="api" + ) + op.create_index( + "opportunity_category_idx", "opportunity", ["category"], unique=False, schema="api" ) - op.drop_index(op.f("opportunity_opportunity_category_id_idx"), table_name="opportunity") - op.create_index("opportunity_category_idx", "opportunity", ["category"], unique=False) - op.drop_column("opportunity", "opportunity_category_id") - op.drop_table("lk_opportunity_category") + op.drop_column("opportunity", "opportunity_category_id", schema="api") + op.drop_table("lk_opportunity_category", schema="api") # ### end Alembic commands ### diff --git a/api/src/db/migrations/versions/2024_02_07_add_expanded_opportunity_models.py b/api/src/db/migrations/versions/2024_02_07_add_expanded_opportunity_models.py index 022e05164..b7d6daed4 100644 --- a/api/src/db/migrations/versions/2024_02_07_add_expanded_opportunity_models.py +++ b/api/src/db/migrations/versions/2024_02_07_add_expanded_opportunity_models.py @@ -34,6 +34,7 @@ def upgrade(): nullable=False, ), sa.PrimaryKeyConstraint("applicant_type_id", name=op.f("lk_applicant_type_pkey")), + schema="api", ) op.create_table( "lk_funding_category", @@ -52,6 +53,7 @@ def upgrade(): nullable=False, ), sa.PrimaryKeyConstraint("funding_category_id", name=op.f("lk_funding_category_pkey")), + schema="api", ) op.create_table( "lk_funding_instrument", @@ -70,6 +72,7 @@ def upgrade(): nullable=False, ), sa.PrimaryKeyConstraint("funding_instrument_id", name=op.f("lk_funding_instrument_pkey")), + schema="api", ) op.create_table( "lk_opportunity_status", @@ -88,6 +91,7 @@ def upgrade(): nullable=False, ), sa.PrimaryKeyConstraint("opportunity_status_id", name=op.f("lk_opportunity_status_pkey")), + schema="api", ) op.create_table( "link_applicant_type_opportunity", @@ -109,17 +113,18 @@ def upgrade(): ), sa.ForeignKeyConstraint( ["applicant_type_id"], - ["lk_applicant_type.applicant_type_id"], + ["api.lk_applicant_type.applicant_type_id"], name=op.f("link_applicant_type_opportunity_applicant_type_id_lk_applicant_type_fkey"), ), sa.ForeignKeyConstraint( ["opportunity_id"], - ["opportunity.opportunity_id"], + ["api.opportunity.opportunity_id"], name=op.f("link_applicant_type_opportunity_opportunity_id_opportunity_fkey"), ), sa.PrimaryKeyConstraint( "opportunity_id", "applicant_type_id", name=op.f("link_applicant_type_opportunity_pkey") ), + schema="api", ) op.create_table( "link_funding_category_opportunity", @@ -141,14 +146,14 @@ def upgrade(): ), sa.ForeignKeyConstraint( ["funding_category_id"], - ["lk_funding_category.funding_category_id"], + ["api.lk_funding_category.funding_category_id"], name=op.f( "link_funding_category_opportunity_funding_category_id_lk_funding_category_fkey" ), ), sa.ForeignKeyConstraint( ["opportunity_id"], - ["opportunity.opportunity_id"], + ["api.opportunity.opportunity_id"], name=op.f("link_funding_category_opportunity_opportunity_id_opportunity_fkey"), ), sa.PrimaryKeyConstraint( @@ -156,6 +161,7 @@ def upgrade(): "funding_category_id", name=op.f("link_funding_category_opportunity_pkey"), ), + schema="api", ) op.create_table( "link_funding_instrument_opportunity", @@ -177,14 +183,14 @@ def upgrade(): ), sa.ForeignKeyConstraint( ["funding_instrument_id"], - ["lk_funding_instrument.funding_instrument_id"], + ["api.lk_funding_instrument.funding_instrument_id"], name=op.f( "link_funding_instrument_opportunity_funding_instrument_id_lk_funding_instrument_fkey" ), ), sa.ForeignKeyConstraint( ["opportunity_id"], - ["opportunity.opportunity_id"], + ["api.opportunity.opportunity_id"], name=op.f("link_funding_instrument_opportunity_opportunity_id_opportunity_fkey"), ), sa.PrimaryKeyConstraint( @@ -192,6 +198,7 @@ def upgrade(): "funding_instrument_id", name=op.f("link_funding_instrument_opportunity_pkey"), ), + schema="api", ) op.create_table( "opportunity_assistance_listing", @@ -215,18 +222,20 @@ def upgrade(): ), sa.ForeignKeyConstraint( ["opportunity_id"], - ["opportunity.opportunity_id"], + ["api.opportunity.opportunity_id"], name=op.f("opportunity_assistance_listing_opportunity_id_opportunity_fkey"), ), sa.PrimaryKeyConstraint( "opportunity_assistance_listing_id", name=op.f("opportunity_assistance_listing_pkey") ), + schema="api", ) op.create_index( op.f("opportunity_assistance_listing_opportunity_id_idx"), "opportunity_assistance_listing", ["opportunity_id"], unique=False, + schema="api", ) op.create_table( "opportunity_summary", @@ -274,32 +283,34 @@ def upgrade(): ), sa.ForeignKeyConstraint( ["opportunity_id"], - ["opportunity.opportunity_id"], + ["api.opportunity.opportunity_id"], name=op.f("opportunity_summary_opportunity_id_opportunity_fkey"), ), sa.ForeignKeyConstraint( ["opportunity_status_id"], - ["lk_opportunity_status.opportunity_status_id"], + ["api.lk_opportunity_status.opportunity_status_id"], name=op.f("opportunity_summary_opportunity_status_id_lk_opportunity_status_fkey"), ), sa.PrimaryKeyConstraint("opportunity_id", name=op.f("opportunity_summary_pkey")), + schema="api", ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("opportunity_summary") + op.drop_table("opportunity_summary", schema="api") op.drop_index( op.f("opportunity_assistance_listing_opportunity_id_idx"), table_name="opportunity_assistance_listing", + schema="api", ) - op.drop_table("opportunity_assistance_listing") - op.drop_table("link_funding_instrument_opportunity") - op.drop_table("link_funding_category_opportunity") - op.drop_table("link_applicant_type_opportunity") - op.drop_table("lk_opportunity_status") - op.drop_table("lk_funding_instrument") - op.drop_table("lk_funding_category") - op.drop_table("lk_applicant_type") + op.drop_table("opportunity_assistance_listing", schema="api") + op.drop_table("link_funding_instrument_opportunity", schema="api") + op.drop_table("link_funding_category_opportunity", schema="api") + op.drop_table("link_applicant_type_opportunity", schema="api") + op.drop_table("lk_opportunity_status", schema="api") + op.drop_table("lk_funding_instrument", schema="api") + op.drop_table("lk_funding_category", schema="api") + op.drop_table("lk_applicant_type", schema="api") # ### end Alembic commands ### diff --git a/api/src/db/migrations/versions/2024_02_12_create_dms_exceptions_table.py b/api/src/db/migrations/versions/2024_02_12_create_dms_exceptions_table.py index 7cd667884..cd3fef4ed 100644 --- a/api/src/db/migrations/versions/2024_02_12_create_dms_exceptions_table.py +++ b/api/src/db/migrations/versions/2024_02_12_create_dms_exceptions_table.py @@ -18,7 +18,7 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute( """ - CREATE TABLE IF NOT EXISTS awsdms_apply_exceptions ( + CREATE TABLE IF NOT EXISTS api.awsdms_apply_exceptions ( ERROR_TIME timestamp NOT NULL, TASK_NAME varchar(128) NOT NULL, TABLE_OWNER varchar(128) NOT NULL, @@ -33,5 +33,5 @@ def upgrade(): def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.execute("DROP TABLE IF EXISTS awsdms_apply_exceptions") + op.execute("DROP TABLE IF EXISTS api.awsdms_apply_exceptions") # ### end Alembic commands ### diff --git a/api/src/db/migrations/versions/2024_02_21_remove_dms_exceptions_table.py b/api/src/db/migrations/versions/2024_02_21_remove_dms_exceptions_table.py index 5a686d71e..8944bd287 100644 --- a/api/src/db/migrations/versions/2024_02_21_remove_dms_exceptions_table.py +++ b/api/src/db/migrations/versions/2024_02_21_remove_dms_exceptions_table.py @@ -16,7 +16,7 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.execute("DROP TABLE IF EXISTS awsdms_apply_exceptions") + op.execute("DROP TABLE IF EXISTS api.awsdms_apply_exceptions") # ### end Alembic commands ### @@ -24,7 +24,7 @@ def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute( """ - CREATE TABLE IF NOT EXISTS awsdms_apply_exceptions ( + CREATE TABLE IF NOT EXISTS api.awsdms_apply_exceptions ( ERROR_TIME timestamp NOT NULL, TASK_NAME varchar(128) NOT NULL, TABLE_OWNER varchar(128) NOT NULL, diff --git a/api/src/db/migrations/versions/2024_03_07_drop_tables_to_remake.py b/api/src/db/migrations/versions/2024_03_07_drop_tables_to_remake.py index 7efe946d9..19dd02d7f 100644 --- a/api/src/db/migrations/versions/2024_03_07_drop_tables_to_remake.py +++ b/api/src/db/migrations/versions/2024_03_07_drop_tables_to_remake.py @@ -18,20 +18,21 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("link_funding_instrument_opportunity") - op.drop_index("opportunity_is_draft_idx", table_name="opportunity") - op.drop_index("opportunity_opportunity_category_id_idx", table_name="opportunity") - op.drop_index("opportunity_opportunity_title_idx", table_name="opportunity") + op.drop_table("link_funding_instrument_opportunity", schema="api") + op.drop_index("opportunity_is_draft_idx", table_name="opportunity", schema="api") + op.drop_index("opportunity_opportunity_category_id_idx", table_name="opportunity", schema="api") + op.drop_index("opportunity_opportunity_title_idx", table_name="opportunity", schema="api") - op.drop_table("link_funding_category_opportunity") - op.drop_table("link_applicant_type_opportunity") + op.drop_table("link_funding_category_opportunity", schema="api") + op.drop_table("link_applicant_type_opportunity", schema="api") op.drop_index( "opportunity_assistance_listing_opportunity_id_idx", table_name="opportunity_assistance_listing", + schema="api", ) - op.drop_table("opportunity_assistance_listing") - op.drop_table("opportunity_summary") - op.drop_table("opportunity") + op.drop_table("opportunity_assistance_listing", schema="api") + op.drop_table("opportunity_summary", schema="api") + op.drop_table("opportunity", schema="api") # ### end Alembic commands ### @@ -72,22 +73,30 @@ def downgrade(): sa.Column("opportunity_category_id", sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint( ["opportunity_category_id"], - ["lk_opportunity_category.opportunity_category_id"], + ["api.lk_opportunity_category.opportunity_category_id"], name="opportunity_opportunity_category_id_lk_opportunity_cate_c6e9", ), sa.PrimaryKeyConstraint("opportunity_id", name="opportunity_pkey"), postgresql_ignore_search_path=False, + schema="api", ) op.create_index( - "opportunity_opportunity_title_idx", "opportunity", ["opportunity_title"], unique=False + "opportunity_opportunity_title_idx", + "opportunity", + ["opportunity_title"], + unique=False, + schema="api", ) op.create_index( "opportunity_opportunity_category_id_idx", "opportunity", ["opportunity_category_id"], unique=False, + schema="api", + ) + op.create_index( + "opportunity_is_draft_idx", "opportunity", ["is_draft"], unique=False, schema="api" ) - op.create_index("opportunity_is_draft_idx", "opportunity", ["is_draft"], unique=False) op.create_table( "opportunity_summary", sa.Column("opportunity_id", sa.INTEGER(), autoincrement=False, nullable=False), @@ -142,15 +151,16 @@ def downgrade(): ), sa.ForeignKeyConstraint( ["opportunity_id"], - ["opportunity.opportunity_id"], + ["api.opportunity.opportunity_id"], name="opportunity_summary_opportunity_id_opportunity_fkey", ), sa.ForeignKeyConstraint( ["opportunity_status_id"], - ["lk_opportunity_status.opportunity_status_id"], + ["api.lk_opportunity_status.opportunity_status_id"], name="opportunity_summary_opportunity_status_id_lk_opportunit_ea00", ), sa.PrimaryKeyConstraint("opportunity_id", name="opportunity_summary_pkey"), + schema="api", ) op.create_table( "opportunity_assistance_listing", @@ -178,18 +188,20 @@ def downgrade(): ), sa.ForeignKeyConstraint( ["opportunity_id"], - ["opportunity.opportunity_id"], + ["api.opportunity.opportunity_id"], name="opportunity_assistance_listing_opportunity_id_opportunity_fkey", ), sa.PrimaryKeyConstraint( "opportunity_assistance_listing_id", name="opportunity_assistance_listing_pkey" ), + schema="api", ) op.create_index( "opportunity_assistance_listing_opportunity_id_idx", "opportunity_assistance_listing", ["opportunity_id"], unique=False, + schema="api", ) op.create_table( "link_applicant_type_opportunity", @@ -213,17 +225,18 @@ def downgrade(): ), sa.ForeignKeyConstraint( ["applicant_type_id"], - ["lk_applicant_type.applicant_type_id"], + ["api.lk_applicant_type.applicant_type_id"], name="link_applicant_type_opportunity_applicant_type_id_lk_ap_7903", ), sa.ForeignKeyConstraint( ["opportunity_id"], - ["opportunity.opportunity_id"], + ["api.opportunity.opportunity_id"], name="link_applicant_type_opportunity_opportunity_id_opportunity_fkey", ), sa.PrimaryKeyConstraint( "opportunity_id", "applicant_type_id", name="link_applicant_type_opportunity_pkey" ), + schema="api", ) op.create_table( "link_funding_category_opportunity", @@ -247,17 +260,18 @@ def downgrade(): ), sa.ForeignKeyConstraint( ["funding_category_id"], - ["lk_funding_category.funding_category_id"], + ["api.lk_funding_category.funding_category_id"], name="link_funding_category_opportunity_funding_category_id_l_4add", ), sa.ForeignKeyConstraint( ["opportunity_id"], - ["opportunity.opportunity_id"], + ["api.opportunity.opportunity_id"], name="link_funding_category_opportunity_opportunity_id_opport_eb65", ), sa.PrimaryKeyConstraint( "opportunity_id", "funding_category_id", name="link_funding_category_opportunity_pkey" ), + schema="api", ) op.create_table( @@ -282,12 +296,12 @@ def downgrade(): ), sa.ForeignKeyConstraint( ["funding_instrument_id"], - ["lk_funding_instrument.funding_instrument_id"], + ["api.lk_funding_instrument.funding_instrument_id"], name="link_funding_instrument_opportunity_funding_instrument__68d6", ), sa.ForeignKeyConstraint( ["opportunity_id"], - ["opportunity.opportunity_id"], + ["api.opportunity.opportunity_id"], name="link_funding_instrument_opportunity_opportunity_id_oppo_9420", ), sa.PrimaryKeyConstraint( @@ -295,5 +309,6 @@ def downgrade(): "funding_instrument_id", name="link_funding_instrument_opportunity_pkey", ), + schema="api", ) # ### end Alembic commands ### diff --git a/api/src/db/migrations/versions/2024_03_07_updates_for_summary_tables.py b/api/src/db/migrations/versions/2024_03_07_updates_for_summary_tables.py index fe4cd6bdc..cdbcd34e6 100644 --- a/api/src/db/migrations/versions/2024_03_07_updates_for_summary_tables.py +++ b/api/src/db/migrations/versions/2024_03_07_updates_for_summary_tables.py @@ -44,23 +44,28 @@ def upgrade(): ), sa.ForeignKeyConstraint( ["opportunity_category_id"], - ["lk_opportunity_category.opportunity_category_id"], + ["api.lk_opportunity_category.opportunity_category_id"], name=op.f("opportunity_opportunity_category_id_lk_opportunity_category_fkey"), ), sa.PrimaryKeyConstraint("opportunity_id", name=op.f("opportunity_pkey")), + schema="api", + ) + op.create_index( + op.f("opportunity_is_draft_idx"), "opportunity", ["is_draft"], unique=False, schema="api" ) - op.create_index(op.f("opportunity_is_draft_idx"), "opportunity", ["is_draft"], unique=False) op.create_index( op.f("opportunity_opportunity_category_id_idx"), "opportunity", ["opportunity_category_id"], unique=False, + schema="api", ) op.create_index( op.f("opportunity_opportunity_title_idx"), "opportunity", ["opportunity_title"], unique=False, + schema="api", ) op.create_table( "opportunity_assistance_listing", @@ -84,18 +89,20 @@ def upgrade(): ), sa.ForeignKeyConstraint( ["opportunity_id"], - ["opportunity.opportunity_id"], + ["api.opportunity.opportunity_id"], name=op.f("opportunity_assistance_listing_opportunity_id_opportunity_fkey"), ), sa.PrimaryKeyConstraint( "opportunity_assistance_listing_id", name=op.f("opportunity_assistance_listing_pkey") ), + schema="api", ) op.create_index( op.f("opportunity_assistance_listing_opportunity_id_idx"), "opportunity_assistance_listing", ["opportunity_id"], unique=False, + schema="api", ) op.create_table( "opportunity_summary", @@ -151,10 +158,11 @@ def upgrade(): ), sa.ForeignKeyConstraint( ["opportunity_id"], - ["opportunity.opportunity_id"], + ["api.opportunity.opportunity_id"], name=op.f("opportunity_summary_opportunity_id_opportunity_fkey"), ), sa.PrimaryKeyConstraint("opportunity_summary_id", name=op.f("opportunity_summary_pkey")), + schema="api", ) op.create_table( "current_opportunity_summary", @@ -175,19 +183,19 @@ def upgrade(): ), sa.ForeignKeyConstraint( ["opportunity_id"], - ["opportunity.opportunity_id"], + ["api.opportunity.opportunity_id"], name=op.f("current_opportunity_summary_opportunity_id_opportunity_fkey"), ), sa.ForeignKeyConstraint( ["opportunity_status_id"], - ["lk_opportunity_status.opportunity_status_id"], + ["api.lk_opportunity_status.opportunity_status_id"], name=op.f( "current_opportunity_summary_opportunity_status_id_lk_opportunity_status_fkey" ), ), sa.ForeignKeyConstraint( ["opportunity_summary_id"], - ["opportunity_summary.opportunity_summary_id"], + ["api.opportunity_summary.opportunity_summary_id"], name=op.f( "current_opportunity_summary_opportunity_summary_id_opportunity_summary_fkey" ), @@ -197,6 +205,7 @@ def upgrade(): "opportunity_summary_id", name=op.f("current_opportunity_summary_pkey"), ), + schema="api", ) op.create_table( "link_opportunity_summary_applicant_type", @@ -219,14 +228,14 @@ def upgrade(): ), sa.ForeignKeyConstraint( ["applicant_type_id"], - ["lk_applicant_type.applicant_type_id"], + ["api.lk_applicant_type.applicant_type_id"], name=op.f( "link_opportunity_summary_applicant_type_applicant_type_id_lk_applicant_type_fkey" ), ), sa.ForeignKeyConstraint( ["opportunity_summary_id"], - ["opportunity_summary.opportunity_summary_id"], + ["api.opportunity_summary.opportunity_summary_id"], name=op.f( "link_opportunity_summary_applicant_type_opportunity_summary_id_opportunity_summary_fkey" ), @@ -236,6 +245,7 @@ def upgrade(): "applicant_type_id", name=op.f("link_opportunity_summary_applicant_type_pkey"), ), + schema="api", ) op.create_table( "link_opportunity_summary_funding_category", @@ -258,14 +268,14 @@ def upgrade(): ), sa.ForeignKeyConstraint( ["funding_category_id"], - ["lk_funding_category.funding_category_id"], + ["api.lk_funding_category.funding_category_id"], name=op.f( "link_opportunity_summary_funding_category_funding_category_id_lk_funding_category_fkey" ), ), sa.ForeignKeyConstraint( ["opportunity_summary_id"], - ["opportunity_summary.opportunity_summary_id"], + ["api.opportunity_summary.opportunity_summary_id"], name=op.f( "link_opportunity_summary_funding_category_opportunity_summary_id_opportunity_summary_fkey" ), @@ -275,6 +285,7 @@ def upgrade(): "funding_category_id", name=op.f("link_opportunity_summary_funding_category_pkey"), ), + schema="api", ) op.create_table( "link_opportunity_summary_funding_instrument", @@ -297,14 +308,14 @@ def upgrade(): ), sa.ForeignKeyConstraint( ["funding_instrument_id"], - ["lk_funding_instrument.funding_instrument_id"], + ["api.lk_funding_instrument.funding_instrument_id"], name=op.f( "link_opportunity_summary_funding_instrument_funding_instrument_id_lk_funding_instrument_fkey" ), ), sa.ForeignKeyConstraint( ["opportunity_summary_id"], - ["opportunity_summary.opportunity_summary_id"], + ["api.opportunity_summary.opportunity_summary_id"], name=op.f( "link_opportunity_summary_funding_instrument_opportunity_summary_id_opportunity_summary_fkey" ), @@ -314,24 +325,28 @@ def upgrade(): "funding_instrument_id", name=op.f("link_opportunity_summary_funding_instrument_pkey"), ), + schema="api", ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("link_opportunity_summary_funding_instrument") - op.drop_table("link_opportunity_summary_funding_category") - op.drop_table("link_opportunity_summary_applicant_type") - op.drop_table("current_opportunity_summary") - op.drop_table("opportunity_summary") + op.drop_table("link_opportunity_summary_funding_instrument", schema="api") + op.drop_table("link_opportunity_summary_funding_category", schema="api") + op.drop_table("link_opportunity_summary_applicant_type", schema="api") + op.drop_table("current_opportunity_summary", schema="api") + op.drop_table("opportunity_summary", schema="api") op.drop_index( op.f("opportunity_assistance_listing_opportunity_id_idx"), table_name="opportunity_assistance_listing", + schema="api", + ) + op.drop_table("opportunity_assistance_listing", schema="api") + op.drop_index(op.f("opportunity_opportunity_title_idx"), table_name="opportunity", schema="api") + op.drop_index( + op.f("opportunity_opportunity_category_id_idx"), table_name="opportunity", schema="api" ) - op.drop_table("opportunity_assistance_listing") - op.drop_index(op.f("opportunity_opportunity_title_idx"), table_name="opportunity") - op.drop_index(op.f("opportunity_opportunity_category_id_idx"), table_name="opportunity") - op.drop_index(op.f("opportunity_is_draft_idx"), table_name="opportunity") - op.drop_table("opportunity") + op.drop_index(op.f("opportunity_is_draft_idx"), table_name="opportunity", schema="api") + op.drop_table("opportunity", schema="api") # ### end Alembic commands ### diff --git a/api/src/db/migrations/versions/2024_03_12_add_indexes_for_search.py b/api/src/db/migrations/versions/2024_03_12_add_indexes_for_search.py index ca4ad5185..a8a6c7f12 100644 --- a/api/src/db/migrations/versions/2024_03_12_add_indexes_for_search.py +++ b/api/src/db/migrations/versions/2024_03_12_add_indexes_for_search.py @@ -21,103 +21,128 @@ def upgrade(): "current_opportunity_summary", ["opportunity_id"], unique=False, + schema="api", ) op.create_index( op.f("current_opportunity_summary_opportunity_status_id_idx"), "current_opportunity_summary", ["opportunity_status_id"], unique=False, + schema="api", ) op.create_index( op.f("current_opportunity_summary_opportunity_summary_id_idx"), "current_opportunity_summary", ["opportunity_summary_id"], unique=False, + schema="api", ) op.create_index( op.f("link_opportunity_summary_applicant_type_applicant_type_id_idx"), "link_opportunity_summary_applicant_type", ["applicant_type_id"], unique=False, + schema="api", ) op.create_index( op.f("link_opportunity_summary_applicant_type_opportunity_summary_id_idx"), "link_opportunity_summary_applicant_type", ["opportunity_summary_id"], unique=False, + schema="api", ) op.create_index( op.f("link_opportunity_summary_funding_category_funding_category_id_idx"), "link_opportunity_summary_funding_category", ["funding_category_id"], unique=False, + schema="api", ) op.create_index( op.f("link_opportunity_summary_funding_category_opportunity_summary_id_idx"), "link_opportunity_summary_funding_category", ["opportunity_summary_id"], unique=False, + schema="api", ) op.create_index( op.f("link_opportunity_summary_funding_instrument_funding_instrument_id_idx"), "link_opportunity_summary_funding_instrument", ["funding_instrument_id"], unique=False, + schema="api", ) op.create_index( op.f("link_opportunity_summary_funding_instrument_opportunity_summary_id_idx"), "link_opportunity_summary_funding_instrument", ["opportunity_summary_id"], unique=False, + schema="api", + ) + op.create_index( + op.f("opportunity_agency_idx"), "opportunity", ["agency"], unique=False, schema="api" ) - op.create_index(op.f("opportunity_agency_idx"), "opportunity", ["agency"], unique=False) op.create_index( op.f("opportunity_summary_opportunity_id_idx"), "opportunity_summary", ["opportunity_id"], unique=False, + schema="api", ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_index(op.f("opportunity_summary_opportunity_id_idx"), table_name="opportunity_summary") - op.drop_index(op.f("opportunity_agency_idx"), table_name="opportunity") + op.drop_index( + op.f("opportunity_summary_opportunity_id_idx"), + table_name="opportunity_summary", + schema="api", + ) + op.drop_index(op.f("opportunity_agency_idx"), table_name="opportunity", schema="api") op.drop_index( op.f("link_opportunity_summary_funding_instrument_opportunity_summary_id_idx"), table_name="link_opportunity_summary_funding_instrument", + schema="api", ) op.drop_index( op.f("link_opportunity_summary_funding_instrument_funding_instrument_id_idx"), table_name="link_opportunity_summary_funding_instrument", + schema="api", ) op.drop_index( op.f("link_opportunity_summary_funding_category_opportunity_summary_id_idx"), table_name="link_opportunity_summary_funding_category", + schema="api", ) op.drop_index( op.f("link_opportunity_summary_funding_category_funding_category_id_idx"), table_name="link_opportunity_summary_funding_category", + schema="api", ) op.drop_index( op.f("link_opportunity_summary_applicant_type_opportunity_summary_id_idx"), table_name="link_opportunity_summary_applicant_type", + schema="api", ) op.drop_index( op.f("link_opportunity_summary_applicant_type_applicant_type_id_idx"), table_name="link_opportunity_summary_applicant_type", + schema="api", ) op.drop_index( op.f("current_opportunity_summary_opportunity_summary_id_idx"), table_name="current_opportunity_summary", + schema="api", ) op.drop_index( op.f("current_opportunity_summary_opportunity_status_id_idx"), table_name="current_opportunity_summary", + schema="api", ) op.drop_index( op.f("current_opportunity_summary_opportunity_id_idx"), table_name="current_opportunity_summary", + schema="api", ) # ### end Alembic commands ### diff --git a/api/src/db/models/base.py b/api/src/db/models/base.py index 5c6cc5382..ee739fb91 100644 --- a/api/src/db/models/base.py +++ b/api/src/db/models/base.py @@ -9,6 +9,7 @@ from sqlalchemy.orm import DeclarativeBase, Mapped, declarative_mixin, mapped_column from sqlalchemy.sql.functions import now as sqlnow +from src.constants.schema import Schemas from src.util import datetime_util # Override the default naming of constraints @@ -16,7 +17,7 @@ # https://stackoverflow.com/questions/4107915/postgresql-default-constraint-names/4108266#4108266 metadata = MetaData( naming_convention={ - "ix": "%(column_0_label)s_idx", + "ix": "%(table_name)s_%(column_0_name)s_idx", "uq": "%(table_name)s_%(column_0_name)s_uniq", "ck": "%(table_name)s_`%(constraint_name)s_check`", "fk": "%(table_name)s_%(column_0_name)s_%(referred_table_name)s_fkey", @@ -96,6 +97,12 @@ def __rich_repr__(self) -> Iterable[tuple[str, Any]]: return self._dict().items() +class ApiSchemaTable(Base): + __abstract__ = True + + __table_args__ = {"schema": Schemas.API} + + @declarative_mixin class IdMixin: """Mixin to add a UUID id primary key column to a model diff --git a/api/src/db/models/lookup/lookup_table.py b/api/src/db/models/lookup/lookup_table.py index 4faeb806c..95f6429a4 100644 --- a/api/src/db/models/lookup/lookup_table.py +++ b/api/src/db/models/lookup/lookup_table.py @@ -1,12 +1,12 @@ from typing import Type, TypeVar -from src.db.models.base import Base +from src.db.models.base import ApiSchemaTable from src.db.models.lookup import Lookup L = TypeVar("L", bound="LookupTable") -class LookupTable(Base): +class LookupTable(ApiSchemaTable): __abstract__ = True @classmethod diff --git a/api/src/db/models/opportunity_models.py b/api/src/db/models/opportunity_models.py index 35ebb64f8..d322ff00e 100644 --- a/api/src/db/models/opportunity_models.py +++ b/api/src/db/models/opportunity_models.py @@ -11,7 +11,7 @@ OpportunityCategory, OpportunityStatus, ) -from src.db.models.base import Base, TimestampMixin +from src.db.models.base import ApiSchemaTable, TimestampMixin from src.db.models.lookup_models import ( LkApplicantType, LkFundingCategory, @@ -21,7 +21,7 @@ ) -class Opportunity(Base, TimestampMixin): +class Opportunity(ApiSchemaTable, TimestampMixin): __tablename__ = "opportunity" opportunity_id: Mapped[int] = mapped_column(BigInteger, primary_key=True) @@ -82,7 +82,7 @@ def opportunity_status(self) -> OpportunityStatus | None: return self.current_opportunity_summary.opportunity_status -class OpportunitySummary(Base, TimestampMixin): +class OpportunitySummary(ApiSchemaTable, TimestampMixin): __tablename__ = "opportunity_summary" opportunity_summary_id: Mapped[int] = mapped_column(BigInteger, primary_key=True) @@ -169,7 +169,7 @@ def applicant_types(self) -> set[ApplicantType]: return {a.applicant_type for a in self.link_applicant_types} -class OpportunityAssistanceListing(Base, TimestampMixin): +class OpportunityAssistanceListing(ApiSchemaTable, TimestampMixin): __tablename__ = "opportunity_assistance_listing" opportunity_assistance_listing_id: Mapped[int] = mapped_column(BigInteger, primary_key=True) @@ -186,7 +186,7 @@ class OpportunityAssistanceListing(Base, TimestampMixin): created_by: Mapped[str | None] -class LinkOpportunitySummaryFundingInstrument(Base, TimestampMixin): +class LinkOpportunitySummaryFundingInstrument(ApiSchemaTable, TimestampMixin): __tablename__ = "link_opportunity_summary_funding_instrument" opportunity_summary_id: Mapped[int] = mapped_column( @@ -211,7 +211,7 @@ class LinkOpportunitySummaryFundingInstrument(Base, TimestampMixin): created_by: Mapped[str | None] -class LinkOpportunitySummaryFundingCategory(Base, TimestampMixin): +class LinkOpportunitySummaryFundingCategory(ApiSchemaTable, TimestampMixin): __tablename__ = "link_opportunity_summary_funding_category" opportunity_summary_id: Mapped[int] = mapped_column( @@ -236,7 +236,7 @@ class LinkOpportunitySummaryFundingCategory(Base, TimestampMixin): created_by: Mapped[str | None] -class LinkOpportunitySummaryApplicantType(Base, TimestampMixin): +class LinkOpportunitySummaryApplicantType(ApiSchemaTable, TimestampMixin): __tablename__ = "link_opportunity_summary_applicant_type" opportunity_summary_id: Mapped[int] = mapped_column( @@ -261,7 +261,7 @@ class LinkOpportunitySummaryApplicantType(Base, TimestampMixin): created_by: Mapped[str | None] -class CurrentOpportunitySummary(Base, TimestampMixin): +class CurrentOpportunitySummary(ApiSchemaTable, TimestampMixin): __tablename__ = "current_opportunity_summary" opportunity_id: Mapped[int] = mapped_column( diff --git a/api/src/db/models/transfer/topportunity_models.py b/api/src/db/models/transfer/topportunity_models.py index 60f2e244a..83ef2735f 100644 --- a/api/src/db/models/transfer/topportunity_models.py +++ b/api/src/db/models/transfer/topportunity_models.py @@ -3,7 +3,7 @@ from sqlalchemy import VARCHAR, Integer from sqlalchemy.orm import Mapped, mapped_column -from src.db.models.base import Base, TimestampMixin +from src.db.models.base import ApiSchemaTable, TimestampMixin ########## # NOTES @@ -15,7 +15,7 @@ """ -class TransferTopportunity(Base, TimestampMixin): +class TransferTopportunity(ApiSchemaTable, TimestampMixin): __tablename__ = "transfer_topportunity" opportunity_id: Mapped[int] = mapped_column(Integer, primary_key=True) diff --git a/api/tests/conftest.py b/api/tests/conftest.py index 91ad2705d..20815aa86 100644 --- a/api/tests/conftest.py +++ b/api/tests/conftest.py @@ -1,4 +1,5 @@ import logging +import uuid import _pytest.monkeypatch import boto3 @@ -10,6 +11,7 @@ import src.adapters.db as db import src.app as app_entry import tests.src.db.models.factories as factories +from src.constants.schema import Schemas from src.db import models from src.db.models.lookup.sync_lookup_values import sync_lookup_values from src.db.models.opportunity_models import Opportunity @@ -84,7 +86,7 @@ def monkeypatch_module(): @pytest.fixture(scope="session") -def db_client(monkeypatch_session) -> db.DBClient: +def db_client(monkeypatch_session, db_schema_prefix) -> db.DBClient: """ Creates an isolated database for the test session. @@ -94,7 +96,7 @@ def db_client(monkeypatch_session) -> db.DBClient: after the test suite session completes. """ - with db_testing.create_isolated_db(monkeypatch_session) as db_client: + with db_testing.create_isolated_db(monkeypatch_session, db_schema_prefix) as db_client: with db_client.get_connection() as conn, conn.begin(): models.metadata.create_all(bind=conn) @@ -124,6 +126,16 @@ def enable_factory_create(monkeypatch, db_session) -> db.Session: return db_session +@pytest.fixture(scope="session") +def db_schema_prefix(): + return f"test_{uuid.uuid4().int}_" + + +@pytest.fixture +def test_api_schema(db_schema_prefix): + return f"{db_schema_prefix}{Schemas.API}" + + #################### # Test App & Client #################### diff --git a/api/tests/lib/db_testing.py b/api/tests/lib/db_testing.py index ccabdd7b4..9b0b0973c 100644 --- a/api/tests/lib/db_testing.py +++ b/api/tests/lib/db_testing.py @@ -1,7 +1,6 @@ """Helper functions for testing database code.""" import contextlib import logging -import uuid from sqlalchemy import text @@ -12,27 +11,34 @@ @contextlib.contextmanager -def create_isolated_db(monkeypatch) -> db.DBClient: +def create_isolated_db(monkeypatch, db_schema_prefix) -> db.DBClient: """ Creates a temporary PostgreSQL schema and creates a database engine that connects to that schema. Drops the schema after the context manager exits. """ - schema_name = f"test_schema_{uuid.uuid4().int}" - monkeypatch.setenv("DB_SCHEMA", schema_name) - monkeypatch.setenv("DB_CHECK_CONNECTION_ON_INIT", "False") # To improve test performance, don't check the database connection # when initializing the DB client. - db_client = db.PostgresDBClient() + monkeypatch.setenv("DB_CHECK_CONNECTION_ON_INIT", "False") + # We set the prefix override here so when the API client creates a DB config + # it also has the appropriate prefix value for mapping + monkeypatch.setenv("SCHEMA_PREFIX_OVERRIDE", db_schema_prefix) + + db_config = db.PostgresDBConfig(schema_prefix_override=db_schema_prefix) + db_client = db.PostgresDBClient(db_config) + test_schemas = db_config.get_schema_translate_map().values() + with db_client.get_connection() as conn: - _create_schema(conn, schema_name) + for schema in test_schemas: + _create_schema(conn, schema) try: yield db_client finally: - _drop_schema(conn, schema_name) + for schema in test_schemas: + _drop_schema(conn, schema) def _create_schema(conn: db.Connection, schema_name: str): diff --git a/api/tests/src/adapters/db/clients/test_postgres_client.py b/api/tests/src/adapters/db/clients/test_postgres_client.py index 1678bd6c6..b6e56afa5 100644 --- a/api/tests/src/adapters/db/clients/test_postgres_client.py +++ b/api/tests/src/adapters/db/clients/test_postgres_client.py @@ -48,7 +48,6 @@ def test_get_connection_parameters(monkeypatch: pytest.MonkeyPatch): user=db_config.username, password=db_config.password, port=db_config.port, - options=f"-c search_path={db_config.db_schema}", connect_timeout=10, sslmode="require", ) diff --git a/api/tests/src/adapters/db/type_decorators/test_postgres_type_decorators.py b/api/tests/src/adapters/db/type_decorators/test_postgres_type_decorators.py index bcd56eb9f..325cf15ae 100644 --- a/api/tests/src/adapters/db/type_decorators/test_postgres_type_decorators.py +++ b/api/tests/src/adapters/db/type_decorators/test_postgres_type_decorators.py @@ -14,7 +14,9 @@ "category,db_value", [(OpportunityCategory.CONTINUATION, 3), (OpportunityCategory.EARMARK, 4), (None, None)], ) -def test_lookup_column_conversion(db_session, enable_factory_create, category, db_value): +def test_lookup_column_conversion( + db_session, enable_factory_create, category, db_value, test_api_schema +): # Verify column works with factories opportunity = OpportunityFactory.create(category=category) assert opportunity.category == category @@ -32,7 +34,7 @@ def test_lookup_column_conversion(db_session, enable_factory_create, category, d # Verify what we stored in the DB is the integer raw_db_value = db_session.execute( text( - f"select opportunity_category_id from {Opportunity.get_table_name()} where opportunity_id={opportunity.opportunity_id}" # nosec + f"select opportunity_category_id from {test_api_schema}.{Opportunity.get_table_name()} where opportunity_id={opportunity.opportunity_id}" # nosec ) ).scalar() assert raw_db_value == db_value diff --git a/api/tests/src/data_migration/test_copy_oracle_data.py b/api/tests/src/data_migration/test_copy_oracle_data.py index 21d4cd57d..51c92a254 100644 --- a/api/tests/src/data_migration/test_copy_oracle_data.py +++ b/api/tests/src/data_migration/test_copy_oracle_data.py @@ -10,15 +10,17 @@ @pytest.fixture(autouse=True) -def setup_foreign_tables(db_session): - _run_create_table_commands(db_session, ForeignTableConfig(is_local_foreign_table=True)) +def setup_foreign_tables(db_session, test_api_schema): + _run_create_table_commands( + db_session, ForeignTableConfig(is_local_foreign_table=True, schema_name=test_api_schema) + ) @pytest.fixture(autouse=True, scope="function") -def truncate_tables(db_session): +def truncate_tables(db_session, test_api_schema): # Automatically delete all the data in the relevant tables before tests - db_session.execute(text("TRUNCATE TABLE foreign_topportunity")) - db_session.execute(text("TRUNCATE TABLE transfer_topportunity")) + db_session.execute(text(f"TRUNCATE TABLE {test_api_schema}.foreign_topportunity")) + db_session.execute(text(f"TRUNCATE TABLE {test_api_schema}.transfer_topportunity")) def convert_value_for_insert(value) -> str: @@ -35,7 +37,7 @@ def convert_value_for_insert(value) -> str: raise Exception("Type not configured for conversion") -def build_foreign_opportunity(db_session, opp_params: dict): +def build_foreign_opportunity(db_session, opp_params: dict, api_schema: str): opp = ForeignTopportunityFactory.build(**opp_params) columns = opp.keys() @@ -43,39 +45,41 @@ def build_foreign_opportunity(db_session, opp_params: dict): db_session.execute( text( - f"INSERT INTO foreign_topportunity ({','.join(columns)}) VALUES ({','.join(values)})" # nosec + f"INSERT INTO {api_schema}.foreign_topportunity ({','.join(columns)}) VALUES ({','.join(values)})" # nosec ) ) return opp -def test_copy_oracle_data_foreign_empty(db_session, enable_factory_create): +def test_copy_oracle_data_foreign_empty(db_session, enable_factory_create, test_api_schema): TransferTopportunityFactory.create_batch(size=5) # The foreign table is empty, so this just truncates the transfer table assert db_session.query(TransferTopportunity).count() == 5 - _run_copy_commands(db_session) + _run_copy_commands(db_session, test_api_schema) assert db_session.query(TransferTopportunity).count() == 0 -def test_copy_oracle_data(db_session, enable_factory_create): +def test_copy_oracle_data(db_session, enable_factory_create, test_api_schema): print(db_session.__class__.__name__) # Create some records initially in the table that we'll wipe TransferTopportunityFactory.create_batch(size=3) foreign_records = [ - build_foreign_opportunity(db_session, {}), - build_foreign_opportunity(db_session, {}), - build_foreign_opportunity(db_session, {}), - build_foreign_opportunity(db_session, {"oppnumber": "ABC-123-454-321-CBA"}), - build_foreign_opportunity(db_session, {"opportunity_id": 100}), + build_foreign_opportunity(db_session, {}, test_api_schema), + build_foreign_opportunity(db_session, {}, test_api_schema), + build_foreign_opportunity(db_session, {}, test_api_schema), + build_foreign_opportunity( + db_session, {"oppnumber": "ABC-123-454-321-CBA"}, test_api_schema + ), + build_foreign_opportunity(db_session, {"opportunity_id": 100}, test_api_schema), ] # The copy script won't fetch anything with is_draft not equaling "N" so add one - build_foreign_opportunity(db_session, {"is_draft": "Y"}) + build_foreign_opportunity(db_session, {"is_draft": "Y"}, test_api_schema) - _run_copy_commands(db_session) + _run_copy_commands(db_session, test_api_schema) copied_opportunities = db_session.query(TransferTopportunity).all() diff --git a/api/tests/src/data_migration/test_setup_foreign_tables.py b/api/tests/src/data_migration/test_setup_foreign_tables.py index 0a23b51e0..2f235e966 100644 --- a/api/tests/src/data_migration/test_setup_foreign_tables.py +++ b/api/tests/src/data_migration/test_setup_foreign_tables.py @@ -3,7 +3,7 @@ from src.data_migration.setup_foreign_tables import OPPORTUNITY_COLUMNS, Column, build_sql EXPECTED_LOCAL_OPPORTUNITY_SQL = ( - "CREATE TABLE IF NOT EXISTS foreign_topportunity " + "CREATE TABLE IF NOT EXISTS {}.foreign_topportunity " "(OPPORTUNITY_ID numeric(20) CONSTRAINT TOPPORTUNITY_pkey PRIMARY KEY NOT NULL," "OPPNUMBER character varying (40)," "REVISION_NUMBER numeric(20)," @@ -25,7 +25,7 @@ ) EXPECTED_NONLOCAL_OPPORTUNITY_SQL = ( - "CREATE FOREIGN TABLE IF NOT EXISTS foreign_topportunity " + "CREATE FOREIGN TABLE IF NOT EXISTS {}.foreign_topportunity " "(OPPORTUNITY_ID numeric(20) OPTIONS (key 'true') NOT NULL," "OPPNUMBER character varying (40)," "REVISION_NUMBER numeric(20)," @@ -53,12 +53,12 @@ Column("DESCRIPTION", "text"), ] EXPECTED_LOCAL_TEST_SQL = ( - "CREATE TABLE IF NOT EXISTS foreign_test_table " + "CREATE TABLE IF NOT EXISTS {}.foreign_test_table " "(ID integer CONSTRAINT TEST_TABLE_pkey PRIMARY KEY NOT NULL," "DESCRIPTION text)" ) EXPECTED_NONLOCAL_TEST_SQL = ( - "CREATE FOREIGN TABLE IF NOT EXISTS foreign_test_table " + "CREATE FOREIGN TABLE IF NOT EXISTS {}.foreign_test_table " "(ID integer OPTIONS (key 'true') NOT NULL," "DESCRIPTION text)" " SERVER grants OPTIONS (schema 'EGRANTSADMIN', table 'TEST_TABLE')" @@ -74,7 +74,7 @@ ("TOPPORTUNITY", OPPORTUNITY_COLUMNS, False, EXPECTED_NONLOCAL_OPPORTUNITY_SQL), ], ) -def test_build_sql(table_name, columns, is_local, expected_sql): - sql = build_sql(table_name, columns, is_local) +def test_build_sql(table_name, columns, is_local, expected_sql, test_api_schema): + sql = build_sql(table_name, columns, is_local, test_api_schema) - assert sql == expected_sql + assert sql == expected_sql.format(test_api_schema) diff --git a/api/tests/src/db/models/lookup/test_sync_lookup_values.py b/api/tests/src/db/models/lookup/test_sync_lookup_values.py index 6a92b6bb2..631ea81e6 100644 --- a/api/tests/src/db/models/lookup/test_sync_lookup_values.py +++ b/api/tests/src/db/models/lookup/test_sync_lookup_values.py @@ -1,4 +1,5 @@ import logging +import uuid from enum import StrEnum from typing import Type @@ -22,7 +23,9 @@ def schema_no_lookup(monkeypatch) -> db.PostgresDBClient: This is similar to what the db_client fixture does but does not create any tables in the schema. """ - with db_testing.create_isolated_db(monkeypatch) as db_client: + with db_testing.create_isolated_db( + monkeypatch, f"test_lookup_{uuid.uuid4().int}_" + ) as db_client: db_models.metadata.create_all(bind=db_client._engine) # Skipping the sync that normally occurs to do in tests below yield db_client diff --git a/api/tests/src/db/test_migrations.py b/api/tests/src/db/test_migrations.py index b6bf9678e..0f9b7d81f 100644 --- a/api/tests/src/db/test_migrations.py +++ b/api/tests/src/db/test_migrations.py @@ -1,4 +1,5 @@ import logging # noqa: B1 +import uuid import alembic.command as command import pytest @@ -20,7 +21,9 @@ def empty_schema(monkeypatch) -> db.DBClient: This is similar to what the db_client fixture does but does not create any tables in the schema. """ - with db_testing.create_isolated_db(monkeypatch) as db_client: + with db_testing.create_isolated_db( + monkeypatch, f"test_migrations_{uuid.uuid4().int}_" + ) as db_client: yield db_client @@ -47,11 +50,26 @@ def test_only_single_head_revision_in_migrations(): ) -def test_db_setup_via_alembic_migration(empty_schema, caplog: pytest.LogCaptureFixture): - caplog.set_level(logging.INFO) # noqa: B1 - command.upgrade(alembic_cfg, "head") - # Verify the migration ran by checking the logs +def test_db_setup_via_alembic_migration( + empty_schema, caplog: pytest.LogCaptureFixture, capsys: pytest.CaptureFixture +): + """ + All of our tests run using temporary DB schemas. However the alembic + migrations are generated with the schema hardcoded (eg. "api") and trying to make alembic + work in a test requires intercepting those function calls to swap in our + test schema. While this is doable, we'd need to do it for more than a dozen + functions with varying signatures, which feels too brittle and complex + to be a valuable test + """ + + caplog.set_level(logging.INFO) + # Tell Alembic to run all migrations, generating SQL commands for each + command.upgrade(alembic_cfg, "base:head", sql=True) + + # Verify that the upgrades ran and that at least one specific query is present + # Alembic just writes to stdout, so capsys captures that. assert "Running upgrade" in caplog.text + assert "CREATE TABLE api.opportunity" in capsys.readouterr().out def test_db_init_with_migrations(empty_schema):