diff --git a/dabs/dabs_template/template/tmp/resources/sat_driver_job.yml.tmpl b/dabs/dabs_template/template/tmp/resources/sat_driver_job.yml.tmpl index 41b3a9c7..fcd371e5 100644 --- a/dabs/dabs_template/template/tmp/resources/sat_driver_job.yml.tmpl +++ b/dabs/dabs_template/template/tmp/resources/sat_driver_job.yml.tmpl @@ -17,6 +17,7 @@ resources: job_clusters: - job_cluster_key: job_cluster new_cluster: + data_security_mode: SINGLE_USER num_workers: 5 spark_version: {{.latest_lts}} runtime_engine: "PHOTON" diff --git a/dabs/dabs_template/template/tmp/resources/sat_initiliazer_job.yml.tmpl b/dabs/dabs_template/template/tmp/resources/sat_initiliazer_job.yml.tmpl index 248949a7..8eb77e9e 100644 --- a/dabs/dabs_template/template/tmp/resources/sat_initiliazer_job.yml.tmpl +++ b/dabs/dabs_template/template/tmp/resources/sat_initiliazer_job.yml.tmpl @@ -15,6 +15,7 @@ resources: job_clusters: - job_cluster_key: job_cluster new_cluster: + data_security_mode: SINGLE_USER num_workers: 5 spark_version: {{.latest_lts}} runtime_engine: "PHOTON" diff --git a/docs/setup.md b/docs/setup.md index b992f833..331be43e 100644 --- a/docs/setup.md +++ b/docs/setup.md @@ -77,9 +77,19 @@ You now have two jobs (SAT Initializer Notebook & SAT Driver Notebook). Run SAT ### 2. Access Databricks SQL Dashboards - > **Note:** You can also use Lakeview Dashboards to view the results. + > **Note:** You can use Lakeview Dashboards to view the results. + +The Dashboard is, by default, owned by the profile you used to set up SAT or the Service Principle. If you see errors running the dashboard, you are likely running into permissions issues. + 1. Go over to the dashboard and click on the "Share" button in the top right. + 2. Click on the cogwheel and select the option "Assign new owner" + 3. Assign yourself as the new owner of the dashboard. You can also assign this to someone who has access to the SAT catalog/schema and tables. + 4. Click on the "Published" option at the top to switch to the draft version of the dashboard.Click on the "Publish" button next to the share option. + 5. In the general settings section, you can choose one of two options: + Embed credentials (default): All viewers will run queries using the owner's credentials and compute. This may expose data to users who normally wouldn't have access. + Don't ember credentials: Each viewer will need access to this workspace, the associated data, and the compute to view this dashboard. We recommend using this option. + > **Note:** We are switching SAT to Lakeview Dashboard, but the classic dashboard is still available. In DBSQL find "SAT - Security Analysis Tool" dashboard to see the report. You can filter the dashboard by **SAT** tag. (The old classic legacy dashboard can be found in Workspace -> Home -> SAT_dashboard) diff --git a/terraform/common/jobs.tf b/terraform/common/jobs.tf index d4a67b9b..73d44899 100644 --- a/terraform/common/jobs.tf +++ b/terraform/common/jobs.tf @@ -3,10 +3,11 @@ resource "databricks_job" "initializer" { job_cluster { job_cluster_key = "job_cluster" new_cluster { - num_workers = 5 - spark_version = data.databricks_spark_version.latest_lts.id - node_type_id = data.databricks_node_type.smallest.id - runtime_engine = "PHOTON" + data_security_mode = "SINGLE_USER" + num_workers = 5 + spark_version = data.databricks_spark_version.latest_lts.id + node_type_id = data.databricks_node_type.smallest.id + runtime_engine = "PHOTON" dynamic "gcp_attributes" { for_each = var.gcp_impersonate_service_account == "" ? [] : [var.gcp_impersonate_service_account] content { @@ -17,7 +18,7 @@ resource "databricks_job" "initializer" { } task { - task_key = "Initializer" + task_key = "Initializer" job_cluster_key = "job_cluster" library { pypi { @@ -36,6 +37,7 @@ resource "databricks_job" "driver" { job_cluster { job_cluster_key = "job_cluster" new_cluster { + data_security_mode = "SINGLE_USER" num_workers = 5 spark_version = data.databricks_spark_version.latest_lts.id node_type_id = data.databricks_node_type.smallest.id