Skip to content

Commit

Permalink
[Exporter] Add support for exporting of Lakeview dashboards (#3779)
Browse files Browse the repository at this point in the history
## Changes
<!-- Summary of your changes that are easy to understand -->

This adds support for exporting of `databricks_dashboard` resource and
dependencies.

Current limitations:

- No support for incremental mode
- List operation fails on big lists (not clear if it's a problem with
API or Go SDK)

## Tests
<!-- 
How is this tested? Please see the checklist below and also describe any
other relevant tests
-->

- [x] `make test` run locally
- [x] relevant change in `docs/` folder
- [ ] covered with integration tests in `internal/acceptance`
- [ ] relevant acceptance tests are passing
- [x] using Go SDK
  • Loading branch information
alexott authored Jul 20, 2024
1 parent 733c998 commit 0e32851
Show file tree
Hide file tree
Showing 3 changed files with 173 additions and 0 deletions.
2 changes: 2 additions & 0 deletions docs/guides/experimental-exporter.md
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ Services are just logical groups of resources used for filtering and organizatio

* `access` - [databricks_permissions](../resources/permissions.md), [databricks_instance_profile](../resources/instance_profile.md), [databricks_ip_access_list](../resources/ip_access_list.md), [databricks_mws_permission_assignment](../resources/mws_permission_assignment.md) and [databricks_access_control_rule_set](../resources/access_control_rule_set.md).
* `compute` - **listing** [databricks_cluster](../resources/cluster.md).
* `dashboards` - **listing** [databricks_dashboard](../resources/dashboard.md).
* `directories` - **listing** [databricks_directory](../resources/directory.md). *Please note that directories aren't listed when running in the incremental mode! Only directories with updated notebooks will be emitted.*
* `dlt` - **listing** [databricks_pipeline](../resources/pipeline.md).
* `groups` - **listing** [databricks_group](../data-sources/group.md) with [membership](../resources/group_member.md) and [data access](../resources/group_instance_profile.md).
Expand Down Expand Up @@ -171,6 +172,7 @@ Exporter aims to generate HCL code for most of the resources within the Databric
| [databricks_cluster](../resources/cluster.md) | Yes | No | Yes | No |
| [databricks_cluster_policy](../resources/cluster_policy.md) | Yes | No | Yes | No |
| [databricks_connection](../resources/connection.md) | Yes | Yes | Yes | No |
| [databricks_dashboard](../resources/dashboard.md) | Yes | No | Yes | No |
| [databricks_dbfs_file](../resources/dbfs_file.md) | Yes | No | Yes | No |
| [databricks_external_location](../resources/external_location.md) | Yes | Yes | Yes | No |
| [databricks_file](../resources/file.md) | Yes | No | Yes | No |
Expand Down
77 changes: 77 additions & 0 deletions exporter/exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import (
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute"
sdk_dashboards "github.com/databricks/databricks-sdk-go/service/dashboards"
"github.com/databricks/databricks-sdk-go/service/iam"
sdk_jobs "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/ml"
Expand Down Expand Up @@ -436,6 +437,13 @@ var emptyMetastoreList = qa.HTTPFixture{
ReuseRequest: true,
}

var emptyLakeviewList = qa.HTTPFixture{
Method: "GET",
Resource: "/api/2.0/lakeview/dashboards?page_size=100",
Response: sdk_dashboards.ListDashboardsResponse{},
ReuseRequest: true,
}

func TestImportingUsersGroupsSecretScopes(t *testing.T) {
listSpFixtures := qa.ListServicePrincipalsFixtures([]iam.ServicePrincipal{
{
Expand All @@ -457,6 +465,7 @@ func TestImportingUsersGroupsSecretScopes(t *testing.T) {
qa.HTTPFixturesApply(t,
[]qa.HTTPFixture{
noCurrentMetastoreAttached,
emptyLakeviewList,
emptyMetastoreList,
meAdminFixture,
emptyRepos,
Expand Down Expand Up @@ -729,6 +738,7 @@ func TestImportingNoResourcesError(t *testing.T) {
},
},
noCurrentMetastoreAttached,
emptyLakeviewList,
emptyMetastoreList,
emptyRepos,
emptyExternalLocations,
Expand Down Expand Up @@ -2623,3 +2633,70 @@ func TestImportingRunJobTask(t *testing.T) {
}`))
})
}

func TestImportingLakeviewDashboards(t *testing.T) {
qa.HTTPFixturesApply(t,
[]qa.HTTPFixture{
{
Method: "GET",
ReuseRequest: true,
Resource: "/api/2.0/preview/scim/v2/Me",
Response: scim.User{
Groups: []scim.ComplexValue{
{
Display: "admins",
},
},
UserName: "[email protected]",
},
},
noCurrentMetastoreAttached,
{
Method: "GET",
Resource: "/api/2.0/lakeview/dashboards?page_size=100",
Response: sdk_dashboards.ListDashboardsResponse{
Dashboards: []sdk_dashboards.Dashboard{
{
DashboardId: "9cb0c8f562624a1f",
DisplayName: "Dashboard1",
},
},
},
ReuseRequest: true,
},
{
Method: "GET",
Resource: "/api/2.0/lakeview/dashboards/9cb0c8f562624a1f?",
Response: sdk_dashboards.Dashboard{
DashboardId: "9cb0c8f562624a1f",
DisplayName: "Dashboard1",
ParentPath: "/",
Path: "/Dashboard1.lvdash.json",
SerializedDashboard: `{}`,
WarehouseId: "1234",
},
},
},
func(ctx context.Context, client *common.DatabricksClient) {
tmpDir := fmt.Sprintf("/tmp/tf-%s", qa.RandomName())
defer os.RemoveAll(tmpDir)

ic := newImportContext(client)
ic.Directory = tmpDir
ic.enableListing("dashboards")
ic.enableServices("dashboards")

err := ic.Run()
assert.NoError(t, err)

content, err := os.ReadFile(tmpDir + "/dashboards.tf")
assert.NoError(t, err)
contentStr := string(content)
assert.True(t, strings.Contains(contentStr, `resource "databricks_dashboard" "dashboard1_9cb0c8f562624a1f"`))
assert.True(t, strings.Contains(contentStr, `file_path = "${path.module}/dashboards/Dashboard1_9cb0c8f562624a1f.lvdash.json"`))
content, err = os.ReadFile(tmpDir + "/dashboards/Dashboard1_9cb0c8f562624a1f.lvdash.json")
assert.NoError(t, err)
contentStr = string(content)
assert.Equal(t, `{}`, contentStr)
})
}
94 changes: 94 additions & 0 deletions exporter/importables.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import (
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/dashboards"
"github.com/databricks/databricks-sdk-go/service/iam"
sdk_jobs "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/ml"
Expand Down Expand Up @@ -1109,6 +1110,7 @@ var resourcesMap map[string]importable = map[string]importable{
{Path: "sql_alert_id", Resource: "databricks_sql_alert"},
{Path: "sql_dashboard_id", Resource: "databricks_sql_dashboard"},
{Path: "sql_endpoint_id", Resource: "databricks_sql_endpoint"},
{Path: "dashboard_id", Resource: "databricks_dashboard"},
{Path: "registered_model_id", Resource: "databricks_mlflow_model"},
{Path: "experiment_id", Resource: "databricks_mlflow_experiment"},
{Path: "repo_id", Resource: "databricks_repo"},
Expand Down Expand Up @@ -3091,4 +3093,96 @@ var resourcesMap map[string]importable = map[string]importable{
{Resource: "databricks_group", Path: "principal_id"},
},
},
"databricks_dashboard": {
WorkspaceLevel: true,
Service: "dashboards",
List: func(ic *importContext) error {
dashboards, err := ic.workspaceClient.Lakeview.ListAll(ic.Context, dashboards.ListDashboardsRequest{PageSize: 100})
if err != nil {
return err
}
for i, d := range dashboards {
if !ic.MatchesName(d.DisplayName) {
continue
}
// TODO: add emit for incremental mode. Use already defined functions for emitting?
ic.Emit(&resource{
Resource: "databricks_dashboard",
ID: d.DashboardId,
})
if i%100 == 0 {
log.Printf("[INFO] Processed %d dashboard out of %d", i+1, len(dashboards))
}
}
return nil
},
Name: func(ic *importContext, d *schema.ResourceData) string {
s := d.Get("parent_path").(string)
if s != "" {
s = s[1:]
if s != "" {
s = s + "_"
}
}
dname := d.Get("display_name").(string)
if dname != "" {
s = s + dname
}
s = s + "_" + d.Id()
return nameNormalizationRegex.ReplaceAllString(s, "_")
},
Import: func(ic *importContext, r *resource) error {
path := r.Data.Get("path").(string)
if strings.HasPrefix(path, "/Repos") {
ic.emitRepoByPath(path)
return nil
}
parts := strings.Split(path, "/")
plen := len(parts)
if idx := strings.Index(parts[plen-1], "."); idx != -1 {
parts[plen-1] = parts[plen-1][:idx] + "_" + r.ID + parts[plen-1][idx:]
} else {
parts[plen-1] = parts[plen-1] + "_" + r.ID
}
name := fileNameNormalizationRegex.ReplaceAllString(strings.Join(parts, "/")[1:], "_")
fileName, err := ic.saveFileIn("dashboards", name, []byte(r.Data.Get("serialized_dashboard").(string)))
if err != nil {
return err
}
r.Data.Set("file_path", fileName)
r.Data.Set("serialized_dashboard", "")

ic.emitPermissionsIfNotIgnored(r, "/dashboards/"+r.ID,
"dashboard_"+ic.Importables["databricks_dashboard"].Name(ic, r.Data))
parentPath := r.Data.Get("parent_path").(string)
if parentPath != "" && parentPath != "/" {
ic.Emit(&resource{
Resource: "databricks_directory",
ID: parentPath,
})
}
warehouseId := r.Data.Get("warehouse_id").(string)
if warehouseId != "" {
ic.Emit(&resource{
Resource: "databricks_sql_endpoint",
ID: warehouseId,
})
}

return nil
},
ShouldOmitField: func(ic *importContext, pathString string, as *schema.Schema, d *schema.ResourceData) bool {
return pathString == "dashboard_change_detected" || shouldOmitMd5Field(ic, pathString, as, d)
},
Ignore: func(ic *importContext, r *resource) bool {
return strings.HasPrefix(r.Data.Get("path").(string), "/Repos") || strings.HasPrefix(r.Data.Get("parent_path").(string), "/Repos")
},
Depends: []reference{
{Path: "file_path", File: true},
{Path: "warehouse_id", Resource: "databricks_sql_endpoint"},
{Path: "parent_path", Resource: "databricks_directory"},
{Path: "parent_path", Resource: "databricks_user", Match: "home"},
{Path: "parent_path", Resource: "databricks_service_principal"},
},
},
}

0 comments on commit 0e32851

Please sign in to comment.