diff --git a/dbt/adapters/bigquery/dataset.py b/dbt/adapters/bigquery/dataset.py index d25b69dc3..dc0b3e27c 100644 --- a/dbt/adapters/bigquery/dataset.py +++ b/dbt/adapters/bigquery/dataset.py @@ -46,14 +46,14 @@ def add_access_entry_to_dataset(dataset: Dataset, access_entry: AccessEntry) -> -def delete_access_entry_to_dataset(dataset: Dataset, access_entry: AccessEntry) -> Dataset: +def delete_access_entry_from_dataset(dataset: Dataset, access_entry: AccessEntry) -> Dataset: """Remove an access entry from a dataset, always use. Args: dataset (Dataset): the dataset to be updated access_entry (AccessEntry): the access entry to be removed from the dataset """ - access_entries = list(dataset.access_entries) + access_entries = dataset.access_entries access_entries_id = [entity.entity_id for entity in access_entries] full_dataset_id = f"{dataset.project}.{dataset.dataset_id}" diff --git a/dbt/adapters/bigquery/impl.py b/dbt/adapters/bigquery/impl.py index 0af621cab..0c986a5d8 100644 --- a/dbt/adapters/bigquery/impl.py +++ b/dbt/adapters/bigquery/impl.py @@ -45,7 +45,7 @@ from dbt.adapters.bigquery import BigQueryColumn, BigQueryConnectionManager from dbt.adapters.bigquery.column import get_nested_column_data_types from dbt.adapters.bigquery.connections import BigQueryAdapterResponse -from dbt.adapters.bigquery.dataset import add_access_entry_to_dataset, is_access_entry_in_dataset, delete_access_entry_to_dataset +from dbt.adapters.bigquery.dataset import add_access_entry_to_dataset, is_access_entry_in_dataset, delete_access_entry_from_dataset from dbt.adapters.bigquery.python_submissions import ( ClusterDataprocHelper, ServerlessDataProcHelper, @@ -840,17 +840,17 @@ def grant_access_to(self, entity, entity_type, role, grant_target_dict,full_refr logger.warning(f"Access entry {access_entry} " f"already exists in dataset") return else: - dataset = delete_access_entry_to_dataset(dataset,access_entry) + dataset = delete_access_entry_from_dataset(dataset,access_entry) dataset = client.update_dataset( dataset, ["access_entries"], ) # Make an API request. full_dataset_id = f"{dataset.project}.{dataset.dataset_id}" - print(f"Revoked dataset access for '{access_entry.entity_id}' to ' dataset '{full_dataset_id}.'") + logger.info(f"Revoked dataset access for '{access_entry.entity_id}' to ' dataset '{full_dataset_id}.'") dataset = add_access_entry_to_dataset(dataset, access_entry) dataset = client.update_dataset(dataset, ["access_entries"]) full_dataset_id = f"{dataset.project}.{dataset.dataset_id}" - print(f"allowed dataset access for '{access_entry.entity_id}' to ' dataset '{full_dataset_id}.'") + logger.info(f"allowed dataset access for '{access_entry.entity_id}' to ' dataset '{full_dataset_id}.'") @available.parse_none @@ -869,15 +869,15 @@ def remove_grant_access_to(self, entity, entity_type, role, grant_target_dict): dataset = client.get_dataset(dataset_ref) access_entry = AccessEntry(role, entity_type, entity) # only perform removing if access entry in dataset + full_dataset_id = f"{dataset.project}.{dataset.dataset_id}" if is_access_entry_in_dataset(dataset, access_entry): - dataset = delete_access_entry_to_dataset(dataset,access_entry) + dataset = delete_access_entry_from_dataset(dataset,access_entry) dataset = client.update_dataset( dataset, ["access_entries"], ) # Make an API request. - full_dataset_id = f"{dataset.project}.{dataset.dataset_id}" - print(f"Revoked dataset access for '{access_entry.entity_id}' to ' dataset '{full_dataset_id}.'") + logger.info(f"Revoked dataset access for '{access_entry.entity_id}' to ' dataset '{full_dataset_id}.'") else: logger.warning(f"Access entry {access_entry} not in the dataset {full_dataset_id} no need to remove it") diff --git a/dbt/include/bigquery/macros/materializations/view.sql b/dbt/include/bigquery/macros/materializations/view.sql index eab2b58db..f783726e4 100644 --- a/dbt/include/bigquery/macros/materializations/view.sql +++ b/dbt/include/bigquery/macros/materializations/view.sql @@ -20,7 +20,7 @@ {% if config.get('grant_access_to') %} {% for grant_target_dict in config.get('grant_access_to') %} - {% do adapter.grant_access_to(this, 'view', None, grant_target_dict,should_full_refresh()) %} + {% do adapter.grant_access_to(this, 'view', None, grant_target_dict, should_full_refresh()) %} {% endfor %} {% endif %} diff --git a/tests/functional/adapter/test_grant_access_to.py b/tests/functional/adapter/test_grant_access_to.py index 633cebe92..f62846178 100644 --- a/tests/functional/adapter/test_grant_access_to.py +++ b/tests/functional/adapter/test_grant_access_to.py @@ -86,6 +86,55 @@ def test_grant_access_succeeds(self, project, setup_grant_schema, teardown_grant assert len(results) == 2 + +class TestAccessGrantSucceedsWithFullRefresh: + @pytest.fixture(scope="class") + def setup_grant_schema( + self, + project, + unique_schema, + ): + with project.adapter.connection_named("__test_grants"): + relation = project.adapter.Relation.create( + database=project.database, + schema=get_schema_name(unique_schema), + identifier="grant_access", + ) + project.adapter.create_schema(relation) + yield relation + + @pytest.fixture(scope="class") + def teardown_grant_schema( + self, + project, + unique_schema, + ): + yield + with project.adapter.connection_named("__test_grants"): + relation = project.adapter.Relation.create( + database=project.database, schema=get_schema_name(unique_schema) + ) + project.adapter.drop_schema(relation) + + @pytest.fixture(scope="class") + def models(self, unique_schema): + dataset = get_schema_name(unique_schema) + return { + "select_1.sql": select_1(dataset=dataset, materialized="view"), + "select_1_table.sql": select_1(dataset=dataset, materialized="table"), + } + + def test_grant_access_succeeds(self, project, setup_grant_schema, teardown_grant_schema): + # Need to run twice to validate idempotency + results = run_dbt(["run"]) + assert len(results) == 2 + time.sleep(10) + results = run_dbt(["run","--full-refresh"]) + assert len(results) == 2 + + + + class TestAccessGrantFails: @pytest.fixture(scope="class") def models(self): diff --git a/tests/unit/test_dataset.py b/tests/unit/test_dataset.py index e0f5d2f96..540f57acd 100644 --- a/tests/unit/test_dataset.py +++ b/tests/unit/test_dataset.py @@ -1,4 +1,4 @@ -from dbt.adapters.bigquery.dataset import add_access_entry_to_dataset, is_access_entry_in_dataset, delete_access_entry_to_dataset +from dbt.adapters.bigquery.dataset import add_access_entry_to_dataset, is_access_entry_in_dataset, delete_access_entry_from_dataset from dbt.adapters.bigquery import BigQueryRelation from google.cloud.bigquery import Dataset, AccessEntry, DatasetReference @@ -114,5 +114,5 @@ def test_delete_access_to_dataset_updates_dataset(): access_entry = AccessEntry(None, "table", entity) dataset = add_access_entry_to_dataset(dataset, access_entry) assert is_access_entry_in_dataset(dataset, access_entry) - dataset = delete_access_entry_to_dataset(dataset, access_entry) + dataset = delete_access_entry_from_dataset(dataset, access_entry) assert not is_access_entry_in_dataset(dataset, access_entry) \ No newline at end of file