Skip to content

Commit

Permalink
[Exporter] Refactoring: remove legacy code (#3864)
Browse files Browse the repository at this point in the history
## Changes
<!-- Summary of your changes that are easy to understand -->

Remove the code that isn't necessary anymore

## Tests
<!-- 
How is this tested? Please see the checklist below and also describe any
other relevant tests
-->

- [x] `make test` run locally
- [ ] relevant change in `docs/` folder
- [ ] covered with integration tests in `internal/acceptance`
- [ ] relevant acceptance tests are passing
- [ ] using Go SDK
  • Loading branch information
alexott authored Aug 8, 2024
1 parent 0f6278d commit 55db1a7
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 60 deletions.
57 changes: 1 addition & 56 deletions exporter/util.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,24 +34,6 @@ import (
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
)

// Remove this once databricks_pipeline and databricks_job resources are migrated to Go SDK
func (ic *importContext) emitInitScriptsLegacy(initScripts []clusters.InitScriptStorageInfo) {
for _, is := range initScripts {
if is.Dbfs != nil {
ic.Emit(&resource{
Resource: "databricks_dbfs_file",
ID: is.Dbfs.Destination,
})
}
if is.Workspace != nil {
ic.emitWorkspaceFileOrRepo(is.Workspace.Destination)
}
if is.Volumes != nil {
ic.emitIfVolumeFile(is.Volumes.Destination)
}
}
}

func (ic *importContext) emitInitScripts(initScripts []compute.InitScriptInfo) {
for _, is := range initScripts {
if is.Dbfs != nil {
Expand Down Expand Up @@ -86,48 +68,11 @@ func (ic *importContext) emitFilesFromMap(m map[string]string) {
}
}

// Remove this when databricks_job resource is migrated
// Usage: ic.importCluster(job.NewCluster)
func (ic *importContext) importClusterLegacy(c *clusters.Cluster) {
if c == nil {
return
}
if c.AwsAttributes != nil {
ic.Emit(&resource{
Resource: "databricks_instance_profile",
ID: c.AwsAttributes.InstanceProfileArn,
})
}
if c.InstancePoolID != "" {
// set enable_elastic_disk to false, and remove aws/gcp/azure_attributes
ic.Emit(&resource{
Resource: "databricks_instance_pool",
ID: c.InstancePoolID,
})
}
if c.DriverInstancePoolID != "" {
ic.Emit(&resource{
Resource: "databricks_instance_pool",
ID: c.DriverInstancePoolID,
})
}
if c.PolicyID != "" {
ic.Emit(&resource{
Resource: "databricks_cluster_policy",
ID: c.PolicyID,
})
}
ic.emitInitScriptsLegacy(c.InitScripts)
ic.emitSecretsFromSecretsPathMap(c.SparkConf)
ic.emitSecretsFromSecretsPathMap(c.SparkEnvVars)
ic.emitUserOrServicePrincipal(c.SingleUserName)
}

func (ic *importContext) importCluster(c *compute.ClusterSpec) {
if c == nil {
return
}
if c.AwsAttributes != nil {
if c.AwsAttributes != nil && c.AwsAttributes.InstanceProfileArn != "" {
ic.Emit(&resource{
Resource: "databricks_instance_profile",
ID: c.AwsAttributes.InstanceProfileArn,
Expand Down
8 changes: 4 additions & 4 deletions exporter/util_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@ import (
"os"
"testing"

"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/iam"
tfcatalog "github.com/databricks/terraform-provider-databricks/catalog"
"github.com/databricks/terraform-provider-databricks/clusters"
"github.com/databricks/terraform-provider-databricks/common"
"github.com/databricks/terraform-provider-databricks/qa"
"github.com/databricks/terraform-provider-databricks/scim"
Expand All @@ -19,10 +19,10 @@ import (
func TestImportClusterEmitsInitScripts(t *testing.T) {
ic := importContextForTest()
ic.enableServices("storage")
ic.importClusterLegacy(&clusters.Cluster{
InitScripts: []clusters.InitScriptStorageInfo{
ic.importCluster(&compute.ClusterSpec{
InitScripts: []compute.InitScriptInfo{
{
Dbfs: &clusters.DbfsStorageInfo{
Dbfs: &compute.DbfsStorageInfo{
Destination: "/mnt/abc/test.sh",
},
},
Expand Down

0 comments on commit 55db1a7

Please sign in to comment.