From e0a06352f90ed6d08a5b39314fc8671e15b66679 Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Wed, 29 Dec 2021 19:42:27 +0100 Subject: [PATCH 1/9] [exporter] interactive cli and notebooks --- common/client.go | 2 +- exporter/command.go | 29 ++++++++++++++- exporter/context.go | 2 +- exporter/importables.go | 65 ++++++++++++++++++++++++++++++++-- exporter/ui.go | 34 ++++++++++++++++++ exporter/util.go | 14 +++++--- workspace/resource_notebook.go | 10 ++++++ 7 files changed, 147 insertions(+), 9 deletions(-) create mode 100644 exporter/ui.go diff --git a/common/client.go b/common/client.go index 1dc17b710d..f015157639 100644 --- a/common/client.go +++ b/common/client.go @@ -358,7 +358,7 @@ func (c *DatabricksClient) configureWithDatabricksCfg(ctx context.Context) (func _, err = os.Stat(configFile) if os.IsNotExist(err) { // early return for non-configured machines - log.Printf("[INFO] %s not found on current host", configFile) + log.Printf("[DEBUG] %s not found on current host", configFile) return nil, nil } cfg, err := ini.Load(configFile) diff --git a/exporter/command.go b/exporter/command.go index f912d64cd5..0eed2f50af 100644 --- a/exporter/command.go +++ b/exporter/command.go @@ -2,6 +2,7 @@ package exporter import ( "flag" + "fmt" "log" "os" "strings" @@ -59,6 +60,8 @@ func Run(args ...string) error { if err != nil { return err } + interactive := false + flags.BoolVar(&interactive, "interactive", true, "Interactive mode") flags.StringVar(&ic.Directory, "directory", cwd, "Directory to generate sources in. Defaults to current directory.") flags.Int64Var(&ic.lastActiveDays, "last-active-days", 3650, @@ -66,7 +69,7 @@ func Run(args ...string) error { flags.BoolVar(&ic.debug, "debug", false, "Print extra debug information.") flags.BoolVar(&ic.mounts, "mounts", false, "List DBFS mount points.") flags.BoolVar(&ic.generateDeclaration, "generateProviderDeclaration", true, - "Generate Databricks provider declaration (for Terraform >= 0.13).") + "Generate Databricks provider declaration.") services, listing := ic.allServicesAndListing() flags.StringVar(&ic.services, "services", services, "Comma-separated list of services to import. By default all services are imported.") @@ -87,6 +90,30 @@ func Run(args ...string) error { if err != nil { return err } + if interactive { + for c.Authenticate(ic.Context) != nil { + c.Host = askFor("🔑 Databricks Workspace URL:") + c.Token = askFor("🔑 Databricks Workspace PAT:") + } + ic.match = askFor("🔍 Match entity names (optional):") + listing := "" + for r, ir := range ic.Importables { + if ir.List == nil { + continue + } + if !askFlag(fmt.Sprintf("✅ Generate `%s` and related resources?", r)) { + continue + } + if len(listing) > 0 { + listing += "," + } + listing += ir.Service + if ir.Service == "mounts" { + ic.mounts = true + } + } + ic.listing = listing + } if len(prefix) > 0 { ic.prefix = prefix + "_" } diff --git a/exporter/context.go b/exporter/context.go index f16b69cab7..67a54c8ee5 100644 --- a/exporter/context.go +++ b/exporter/context.go @@ -220,7 +220,7 @@ func (ic *importContext) Run() error { } } if i%50 == 0 { - log.Printf("[INFO] Generated %d of %d resources", i, scopeSize) + log.Printf("[INFO] Generated %d of %d resources", i+1, scopeSize) } if r.Mode != "data" && ic.Resources[r.Resource].Importer != nil { // nolint diff --git a/exporter/importables.go b/exporter/importables.go index 72fef68b76..3dceaa5dc6 100644 --- a/exporter/importables.go +++ b/exporter/importables.go @@ -148,7 +148,7 @@ var resourcesMap map[string]importable = map[string]importable{ } // libraries installed with init scripts won't be exported. b := body.AppendNewBlock("resource", []string{r.Resource, r.Name}).Body() - relativeFile := fmt.Sprintf("${path.module}/files/%s", fileName) + relativeFile := fmt.Sprintf("${path.module}/%s", fileName) b.SetAttributeValue("path", cty.StringVal(strings.Replace(r.ID, "dbfs:", "", 1))) b.SetAttributeRaw("source", hclwrite.Tokens{ &hclwrite.Token{Type: hclsyntax.TokenOQuote, Bytes: []byte{'"'}}, @@ -756,7 +756,7 @@ var resourcesMap map[string]importable = map[string]importable{ if err != nil { return err } - relativeFile := fmt.Sprintf("${path.module}/files/%s", fileName) + relativeFile := fmt.Sprintf("${path.module}/%s", fileName) b := body.AppendNewBlock("resource", []string{r.Resource, r.Name}).Body() b.SetAttributeValue("name", cty.StringVal(gis.Name)) b.SetAttributeValue("enabled", cty.BoolVal(gis.Enabled)) @@ -896,4 +896,65 @@ var resourcesMap map[string]importable = map[string]importable{ return nil }, }, + "databricks_notebook": { + Service: "notebooks", + Name: func(d *schema.ResourceData) string { + name := d.Get("path").(string) + if name == "" { + return d.Id() + } else { + name = strings.TrimPrefix(name, "/") + } + re := regexp.MustCompile(`[^0-9A-Za-z_]`) + return re.ReplaceAllString(name, "_") + }, + List: func(ic *importContext) error { + notebooksAPI := workspace.NewNotebooksAPI(ic.Context, ic.Client) + notebookList, err := notebooksAPI.List("/", true) + if err != nil { + return err + } + // TODO: emit permissions for notebook folders if non-default, + // as per-notebook permission entry would be a noise in the state + for offset, notebook := range notebookList { + ic.Emit(&resource{ + Resource: "databricks_notebook", + ID: notebook.Path, + }) + if offset%50 == 0 { + log.Printf("[INFO] Scanned %d of %d notebooks", + offset+1, len(notebookList)) + } + } + return nil + }, + Body: func(ic *importContext, body *hclwrite.Body, r *resource) error { + notebooksAPI := workspace.NewNotebooksAPI(ic.Context, ic.Client) + status, err := notebooksAPI.Read(r.ID) + if err != nil { + return err + } + contentB64, err := notebooksAPI.Export(r.ID, "SOURCE") + if err != nil { + return err + } + name := r.ID[1:] + status.Extension() // todo: replace non-alphanum+/ with _ + content, _ := base64.StdEncoding.DecodeString(contentB64) + fileName, err := ic.createFileIn("notebooks", name, []byte(content)) + log.Printf("Creating %s for %s", fileName, r) + if err != nil { + return err + } + // libraries installed with init scripts won't be exported. + b := body.AppendNewBlock("resource", []string{r.Resource, r.Name}).Body() + relativeFile := fmt.Sprintf("${path.module}/%s", fileName) + b.SetAttributeValue("path", cty.StringVal(r.ID)) + b.SetAttributeRaw("source", hclwrite.Tokens{ + &hclwrite.Token{Type: hclsyntax.TokenOQuote, Bytes: []byte{'"'}}, + &hclwrite.Token{Type: hclsyntax.TokenQuotedLit, Bytes: []byte(relativeFile)}, + &hclwrite.Token{Type: hclsyntax.TokenCQuote, Bytes: []byte{'"'}}, + }) + return nil + }, + }, } diff --git a/exporter/ui.go b/exporter/ui.go new file mode 100644 index 0000000000..62edafcc63 --- /dev/null +++ b/exporter/ui.go @@ -0,0 +1,34 @@ +package exporter + +import ( + "bufio" + "fmt" + "os" + "strings" +) + +var input = os.Stdin + +func askFor(prompt string) string { + var s string + r := bufio.NewReader(input) + for { + fmt.Fprint(os.Stdout, prompt+" ") + s, _ = r.ReadString('\n') + if s != "" { + break + } + } + return strings.TrimSpace(s) +} + +func askFlag(prompt string) bool { + res := askFor(fmt.Sprintf("%s [Y/n]", prompt)) + if res == "" { + return true + } + if strings.ToLower(res) == "y" { + return true + } + return false +} diff --git a/exporter/util.go b/exporter/util.go index df2eb1400b..3ef25d7466 100644 --- a/exporter/util.go +++ b/exporter/util.go @@ -5,6 +5,7 @@ import ( "fmt" "log" "os" + "path" "strings" "time" @@ -261,12 +262,16 @@ func (ic *importContext) importJobs(l jobs.JobList) { // returns created file name in "files" directory for the export and error if any func (ic *importContext) createFile(name string, content []byte) (string, error) { - err := os.MkdirAll(fmt.Sprintf("%s/files", ic.Directory), 0755) + return ic.createFileIn("files", name, content) +} + +func (ic *importContext) createFileIn(dir, name string, content []byte) (string, error) { + fileName := ic.prefix + name + localFileName := fmt.Sprintf("%s/%s/%s", ic.Directory, dir, fileName) + err := os.MkdirAll(path.Dir(localFileName), 0755) if err != nil && !os.IsExist(err) { return "", err } - fileName := ic.prefix + name - localFileName := fmt.Sprintf("%s/files/%s", ic.Directory, fileName) local, err := os.Create(localFileName) if err != nil { return "", err @@ -276,5 +281,6 @@ func (ic *importContext) createFile(name string, content []byte) (string, error) if err != nil { return "", err } - return fileName, nil + relativeName := strings.Replace(localFileName, ic.Directory+"/", "", 1) + return relativeName, nil } diff --git a/workspace/resource_notebook.go b/workspace/resource_notebook.go index 67b7f49ec3..5e6acef700 100644 --- a/workspace/resource_notebook.go +++ b/workspace/resource_notebook.go @@ -45,6 +45,15 @@ type ObjectStatus struct { Language string `json:"language,omitempty"` } +func (a ObjectStatus) Extension() string { + for ext, nlf := range extMap { + if nlf.Language == a.Language { + return ext + } + } + return "" +} + // ExportPath contains the base64 content of the notebook type ExportPath struct { Content string `json:"content,omitempty"` @@ -110,6 +119,7 @@ func (a NotebooksAPI) Export(path string, format string) (string, error) { Format: format, Path: path, }, ¬ebookContent) + // TODO: return decoded []byte return notebookContent.Content, err } From 576f9872a0216313fbed5d7d0230e3f82152ec36 Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Thu, 17 Feb 2022 13:11:03 +0100 Subject: [PATCH 2/9] connect notebooks with permissions and jobs --- exporter/importables.go | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/exporter/importables.go b/exporter/importables.go index 3dceaa5dc6..607e8150f7 100644 --- a/exporter/importables.go +++ b/exporter/importables.go @@ -276,6 +276,7 @@ var resourcesMap map[string]importable = map[string]importable{ {Path: "spark_python_task.python_file", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, {Path: "spark_python_task.parameters", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, {Path: "spark_jar_task.jar_uri", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, + {Path: "notebook_task.notebook_path", Resource: "databricks_notebook"}, }, Import: func(ic *importContext, r *resource) error { var job jobs.JobSettings @@ -323,6 +324,12 @@ var resourcesMap map[string]importable = map[string]importable{ } } } + if job.NotebookTask != nil { + ic.Emit(&resource{ + Resource: "databricks_notebook", + ID: job.NotebookTask.NotebookPath, + }) + } return ic.importLibraries(r.Data, s) }, List: func(ic *importContext) error { @@ -914,9 +921,10 @@ var resourcesMap map[string]importable = map[string]importable{ if err != nil { return err } - // TODO: emit permissions for notebook folders if non-default, - // as per-notebook permission entry would be a noise in the state for offset, notebook := range notebookList { + if strings.HasPrefix("/Repos", notebook.Path) { + continue + } ic.Emit(&resource{ Resource: "databricks_notebook", ID: notebook.Path, @@ -928,6 +936,18 @@ var resourcesMap map[string]importable = map[string]importable{ } return nil }, + Import: func(ic *importContext, r *resource) error { + if ic.meAdmin { + // TODO: emit permissions for notebook folders if non-default, + // as per-notebook permission entry would be a noise in the state + ic.Emit(&resource{ + Resource: "databricks_permissions", + ID: fmt.Sprintf("/notebooks/%s", r.Data.Get("object_id")), + Name: "notebook_" + ic.Importables["databricks_notebook"].Name(r.Data), + }) + } + return nil + }, Body: func(ic *importContext, body *hclwrite.Body, r *resource) error { notebooksAPI := workspace.NewNotebooksAPI(ic.Context, ic.Client) status, err := notebooksAPI.Read(r.ID) From 18c455263caa6927efd865b34ccfc72ccf49655e Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Thu, 17 Feb 2022 19:03:21 +0100 Subject: [PATCH 3/9] Added missing test coverage --- CHANGELOG.md | 1 + exporter/command.go | 54 +++++++++++---------- exporter/command_test.go | 42 +++++++++++++++++ exporter/exporter_test.go | 15 +++++- exporter/importables.go | 18 ++----- exporter/importables_test.go | 68 +++++++++++++++++++++++++++ exporter/ui.go | 8 ++-- workspace/data_notebook_paths_test.go | 6 +-- workspace/resource_notebook.go | 4 +- 9 files changed, 167 insertions(+), 49 deletions(-) create mode 100644 exporter/command_test.go diff --git a/CHANGELOG.md b/CHANGELOG.md index 3c9822ced8..e11e83a1a1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ * Added new `gcp_attributes` to `databricks_cluster` and `databricks_instance_pool` ([#1126](https://github.com/databrickslabs/terraform-provider-databricks/pull/1126)). * Added exporter functionality for `databricks_ip_access_list` and `databricks_workspace_conf` ([#1125](https://github.com/databrickslabs/terraform-provider-databricks/pull/1125)). * Added `graviton` selector for `databricks_node_type` and `databricks_spark_version` data sources ([#1127](https://github.com/databrickslabs/terraform-provider-databricks/pull/1127)). +* Added interactive mode to resource exporter ([#1010](https://github.com/databrickslabs/terraform-provider-databricks/pull/1010)). ## 0.4.9 diff --git a/exporter/command.go b/exporter/command.go index 0eed2f50af..61c97448fc 100644 --- a/exporter/command.go +++ b/exporter/command.go @@ -44,6 +44,31 @@ func (ic *importContext) allServicesAndListing() (string, string) { return services, listing } +func (ic *importContext) interactivePrompts() { + for ic.Client.Authenticate(ic.Context) != nil { + ic.Client.Host = askFor("🔑 Databricks Workspace URL:") + ic.Client.Token = askFor("🔑 Databricks Workspace PAT:") + } + ic.match = askFor("🔍 Match entity names (optional):") + listing := "" + for r, ir := range ic.Importables { + if ir.List == nil { + continue + } + if !askFlag(fmt.Sprintf("✅ Generate `%s` and related resources?", r)) { + continue + } + if len(listing) > 0 { + listing += "," + } + listing += ir.Service + if ir.Service == "mounts" { + ic.mounts = true + } + } + ic.listing = listing +} + // Run import according to flags func Run(args ...string) error { log.SetOutput(&logLevel) @@ -60,8 +85,8 @@ func Run(args ...string) error { if err != nil { return err } - interactive := false - flags.BoolVar(&interactive, "interactive", true, "Interactive mode") + var skipInteractive bool + flags.BoolVar(&skipInteractive, "skip-interactive", false, "Skip interactive mode") flags.StringVar(&ic.Directory, "directory", cwd, "Directory to generate sources in. Defaults to current directory.") flags.Int64Var(&ic.lastActiveDays, "last-active-days", 3650, @@ -90,29 +115,8 @@ func Run(args ...string) error { if err != nil { return err } - if interactive { - for c.Authenticate(ic.Context) != nil { - c.Host = askFor("🔑 Databricks Workspace URL:") - c.Token = askFor("🔑 Databricks Workspace PAT:") - } - ic.match = askFor("🔍 Match entity names (optional):") - listing := "" - for r, ir := range ic.Importables { - if ir.List == nil { - continue - } - if !askFlag(fmt.Sprintf("✅ Generate `%s` and related resources?", r)) { - continue - } - if len(listing) > 0 { - listing += "," - } - listing += ir.Service - if ir.Service == "mounts" { - ic.mounts = true - } - } - ic.listing = listing + if !skipInteractive { + ic.interactivePrompts() } if len(prefix) > 0 { ic.prefix = prefix + "_" diff --git a/exporter/command_test.go b/exporter/command_test.go new file mode 100644 index 0000000000..3d862bc331 --- /dev/null +++ b/exporter/command_test.go @@ -0,0 +1,42 @@ +package exporter + +import ( + "bytes" + "testing" + + "github.com/databrickslabs/terraform-provider-databricks/common" + "github.com/stretchr/testify/assert" +) + +type dummyReader string + +func (d dummyReader) Read(p []byte) (int, error) { + n := copy(p, []byte(d)) + return n, nil +} + +func TestInteractivePrompts(t *testing.T) { + cliInput = dummyReader("y\n") + cliOutput = &bytes.Buffer{} + ic := &importContext{ + Client: &common.DatabricksClient{}, + Importables: map[string]importable{ + "x": { + Service: "a", + List: func(_ *importContext) error { + return nil + }, + }, + "y": { + Service: "mounts", + List: func(_ *importContext) error { + return nil + }, + }, + }, + } + ic.interactivePrompts() + assert.Equal(t, "a,mounts", ic.listing) + assert.Equal(t, "y", ic.match) + assert.True(t, ic.mounts) +} diff --git a/exporter/exporter_test.go b/exporter/exporter_test.go index 965b52cc13..657a755aa5 100644 --- a/exporter/exporter_test.go +++ b/exporter/exporter_test.go @@ -22,6 +22,7 @@ import ( "github.com/databrickslabs/terraform-provider-databricks/repos" "github.com/databrickslabs/terraform-provider-databricks/scim" "github.com/databrickslabs/terraform-provider-databricks/secrets" + "github.com/databrickslabs/terraform-provider-databricks/workspace" "github.com/hashicorp/hcl/v2/hclwrite" "github.com/stretchr/testify/assert" @@ -223,6 +224,11 @@ func TestImportingUsersGroupsSecretScopes(t *testing.T) { meAdminFixture, repoListFixture, emptyGitCredentials, + { + Method: "GET", + Resource: "/api/2.0/workspace/list?path=%2F", + Response: workspace.ObjectList{}, + }, emptyIpAccessLIst, { Method: "GET", @@ -397,6 +403,11 @@ func TestImportingNoResourcesError(t *testing.T) { Resource: "/api/2.0/jobs/list", Response: jobs.JobList{}, }, + { + Method: "GET", + Resource: "/api/2.0/workspace/list?path=%2F", + Response: workspace.ObjectList{}, + }, { Method: "GET", Resource: "/api/2.0/clusters/list", @@ -784,10 +795,10 @@ func TestImportingJobs_JobList(t *testing.T) { } func TestImportingWithError(t *testing.T) { - err := Run("-directory", "/bin/sh", "-services", "groups,users") + err := Run("-directory", "/bin/sh", "-services", "groups,users", "-skip-interactive") assert.EqualError(t, err, "the path /bin/sh is not a directory") - err = Run("-directory", "/bin/abcd", "-services", "groups,users", "-prefix", "abc") + err = Run("-directory", "/bin/abcd", "-services", "groups,users", "-prefix", "abc", "-skip-interactive") assert.EqualError(t, err, "can't create directory /bin/abcd") } diff --git a/exporter/importables.go b/exporter/importables.go index 607e8150f7..bfa2590c5e 100644 --- a/exporter/importables.go +++ b/exporter/importables.go @@ -327,7 +327,7 @@ var resourcesMap map[string]importable = map[string]importable{ if job.NotebookTask != nil { ic.Emit(&resource{ Resource: "databricks_notebook", - ID: job.NotebookTask.NotebookPath, + ID: job.NotebookTask.NotebookPath, }) } return ic.importLibraries(r.Data, s) @@ -913,7 +913,7 @@ var resourcesMap map[string]importable = map[string]importable{ name = strings.TrimPrefix(name, "/") } re := regexp.MustCompile(`[^0-9A-Za-z_]`) - return re.ReplaceAllString(name, "_") + return strings.ToLower(re.ReplaceAllString(name, "_")) }, List: func(ic *importContext) error { notebooksAPI := workspace.NewNotebooksAPI(ic.Context, ic.Client) @@ -925,6 +925,8 @@ var resourcesMap map[string]importable = map[string]importable{ if strings.HasPrefix("/Repos", notebook.Path) { continue } + // TODO: emit permissions for notebook folders if non-default, + // as per-notebook permission entry would be a noise in the state ic.Emit(&resource{ Resource: "databricks_notebook", ID: notebook.Path, @@ -936,18 +938,6 @@ var resourcesMap map[string]importable = map[string]importable{ } return nil }, - Import: func(ic *importContext, r *resource) error { - if ic.meAdmin { - // TODO: emit permissions for notebook folders if non-default, - // as per-notebook permission entry would be a noise in the state - ic.Emit(&resource{ - Resource: "databricks_permissions", - ID: fmt.Sprintf("/notebooks/%s", r.Data.Get("object_id")), - Name: "notebook_" + ic.Importables["databricks_notebook"].Name(r.Data), - }) - } - return nil - }, Body: func(ic *importContext, body *hclwrite.Body, r *resource) error { notebooksAPI := workspace.NewNotebooksAPI(ic.Context, ic.Client) status, err := notebooksAPI.Read(r.ID) diff --git a/exporter/importables_test.go b/exporter/importables_test.go index 5c3bce6345..24e6d1d936 100644 --- a/exporter/importables_test.go +++ b/exporter/importables_test.go @@ -20,6 +20,7 @@ import ( "github.com/databrickslabs/terraform-provider-databricks/secrets" "github.com/databrickslabs/terraform-provider-databricks/storage" "github.com/databrickslabs/terraform-provider-databricks/workspace" + "github.com/hashicorp/hcl/v2/hclwrite" "github.com/stretchr/testify/assert" ) @@ -611,3 +612,70 @@ func TestRepoListFails(t *testing.T) { assert.EqualError(t, err, "nope") }) } + +func TestNotebookName(t *testing.T) { + d := workspace.ResourceNotebook().TestResourceData() + d.SetId("x") + assert.Equal(t, "x", resourcesMap["databricks_notebook"].Name(d)) + + d.Set("path", "/Foo/Bar/Baz") + assert.Equal(t, "foo_bar_baz", resourcesMap["databricks_notebook"].Name(d)) +} + +func TestNotebookList(t *testing.T) { + qa.HTTPFixturesApply(t, []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/workspace/list?path=%2F", + Response: workspace.ObjectList{ + Objects: []workspace.ObjectStatus{ + { + Path: "/Repos/Foo/Bar", + }, + { + Path: "/First/Second", + ObjectType: "NOTEBOOK", + }, + }, + }, + }, + { + Method: "GET", + Resource: "/api/2.0/workspace/get-status?path=a", + Response: workspace.ObjectStatus{ + ObjectID: 123, + ObjectType: "NOTEBOOK", + Path: "a", + Language: "PYTHON", + }, + }, + { + Method: "GET", + Resource: "/api/2.0/workspace/export?format=SOURCE&path=a", + Response: workspace.ExportPath{ + Content: "YWJj", + }, + }, + }, func(ctx context.Context, client *common.DatabricksClient) { + ic := importContextForTest() + ic.Client = client + ic.Context = ctx + + err := resourcesMap["databricks_notebook"].List(ic) + assert.NoError(t, err) + assert.True(t, ic.testEmits["databricks_notebook[] (id: /First/Second)"]) + + ic.Directory = fmt.Sprintf("/tmp/tf-%s", qa.RandomName()) + defer os.RemoveAll(ic.Directory) + + // dstFile := fmt.Sprintf("%s/files/_abc_900150983cd24fb0d6963f7d28e17f72", ic.Directory) + // err := os.MkdirAll(dstFile, 0755) + // assert.NoError(t, err) + f := hclwrite.NewEmptyFile() + err = resourcesMap["databricks_notebook"].Body(ic, f.Body(), &resource{ + ID: "a", + Data: workspace.ResourceNotebook().TestResourceData(), + }) + assert.NoError(t, err) + }) +} diff --git a/exporter/ui.go b/exporter/ui.go index 62edafcc63..b04a17ecda 100644 --- a/exporter/ui.go +++ b/exporter/ui.go @@ -3,17 +3,19 @@ package exporter import ( "bufio" "fmt" + "io" "os" "strings" ) -var input = os.Stdin +var cliInput io.Reader = os.Stdin +var cliOutput io.Writer = os.Stdout func askFor(prompt string) string { var s string - r := bufio.NewReader(input) + r := bufio.NewReader(cliInput) for { - fmt.Fprint(os.Stdout, prompt+" ") + fmt.Fprint(cliOutput, prompt+" ") s, _ = r.ReadString('\n') if s != "" { break diff --git a/workspace/data_notebook_paths_test.go b/workspace/data_notebook_paths_test.go index 2f08b958e6..24a558661f 100644 --- a/workspace/data_notebook_paths_test.go +++ b/workspace/data_notebook_paths_test.go @@ -12,7 +12,7 @@ func TestDataSourceNotebookPaths(t *testing.T) { { Method: "GET", Resource: "/api/2.0/workspace/list?path=%2Fa%2Fb%2Fc", - Response: objectList{ + Response: ObjectList{ Objects: []ObjectStatus{ { ObjectID: 987, @@ -25,7 +25,7 @@ func TestDataSourceNotebookPaths(t *testing.T) { { Method: "GET", Resource: "/api/2.0/workspace/list?path=%2Fa%2Fb%2Fc%2Fd", - Response: objectList{ + Response: ObjectList{ Objects: []ObjectStatus{ { ObjectID: 988, @@ -60,7 +60,7 @@ func TestDataSourceNotebookPaths_NoRecursive(t *testing.T) { { Method: "GET", Resource: "/api/2.0/workspace/list?path=%2Fa%2Fb%2Fc", - Response: objectList{ + Response: ObjectList{ Objects: []ObjectStatus{ { ObjectID: 988, diff --git a/workspace/resource_notebook.go b/workspace/resource_notebook.go index 5e6acef700..fdbca35abf 100644 --- a/workspace/resource_notebook.go +++ b/workspace/resource_notebook.go @@ -168,12 +168,12 @@ func (a NotebooksAPI) recursiveAddPaths(path string, pathList *[]ObjectStatus) e return err } -type objectList struct { +type ObjectList struct { Objects []ObjectStatus `json:"objects,omitempty"` } func (a NotebooksAPI) list(path string) ([]ObjectStatus, error) { - var notebookList objectList + var notebookList ObjectList err := a.client.Get(a.context, "/workspace/list", map[string]string{ "path": path, }, ¬ebookList) From 82c2c65093faea4a04b91f7122e1613b0ade7bc6 Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Thu, 17 Feb 2022 19:10:11 +0100 Subject: [PATCH 4/9] add missing context --- exporter/command_test.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/exporter/command_test.go b/exporter/command_test.go index 3d862bc331..74258d3f41 100644 --- a/exporter/command_test.go +++ b/exporter/command_test.go @@ -2,6 +2,7 @@ package exporter import ( "bytes" + "context" "testing" "github.com/databrickslabs/terraform-provider-databricks/common" @@ -20,6 +21,7 @@ func TestInteractivePrompts(t *testing.T) { cliOutput = &bytes.Buffer{} ic := &importContext{ Client: &common.DatabricksClient{}, + Context: context.Background(), Importables: map[string]importable{ "x": { Service: "a", From 69b08e69ec439ebaea3400e5f4eff3017d30eb61 Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Fri, 18 Feb 2022 12:43:06 +0100 Subject: [PATCH 5/9] more coverage --- exporter/context.go | 83 +++++++++++++++++++--------------- exporter/exporter_test.go | 34 +++++++------- exporter/importables.go | 34 +++++++------- exporter/importables_test.go | 38 ++++++++-------- exporter/model.go | 2 + workspace/resource_notebook.go | 9 ---- 6 files changed, 101 insertions(+), 99 deletions(-) diff --git a/exporter/context.go b/exporter/context.go index 67a54c8ee5..9b56d89c36 100644 --- a/exporter/context.go +++ b/exporter/context.go @@ -191,42 +191,7 @@ func (ic *importContext) Run() error { `) dcfile.Close() } - - sort.Sort(ic.Scope) - scopeSize := len(ic.Scope) - log.Printf("[INFO] Generating configuration for %d resources", scopeSize) - for i, r := range ic.Scope { - ir := ic.Importables[r.Resource] - f, ok := ic.Files[ir.Service] - if !ok { - f = hclwrite.NewEmptyFile() - ic.Files[ir.Service] = f - } - if ir.Ignore != nil && ir.Ignore(ic, r) { - continue - } - body := f.Body() - if ir.Body != nil { - err := ir.Body(ic, body, r) - if err != nil { - log.Printf("[ERROR] %s", err.Error()) - } - } else { - resourceBlock := body.AppendNewBlock("resource", []string{r.Resource, r.Name}) - err := ic.dataToHcl(ir, []string{}, ic.Resources[r.Resource], - r.Data, resourceBlock.Body()) - if err != nil { - log.Printf("[ERROR] %s", err.Error()) - } - } - if i%50 == 0 { - log.Printf("[INFO] Generated %d of %d resources", i+1, scopeSize) - } - if r.Mode != "data" && ic.Resources[r.Resource].Importer != nil { - // nolint - sh.WriteString(r.ImportCommand(ic) + "\n") - } - } + ic.generateHclForResources(sh) for service, f := range ic.Files { formatted := hclwrite.Format(f.Bytes()) // fix some formatting in a hacky way instead of writing 100 lines @@ -268,6 +233,44 @@ func (ic *importContext) Run() error { return nil } +func (ic *importContext) generateHclForResources(sh *os.File) { + sort.Sort(ic.Scope) + scopeSize := len(ic.Scope) + log.Printf("[INFO] Generating configuration for %d resources", scopeSize) + for i, r := range ic.Scope { + ir := ic.Importables[r.Resource] + f, ok := ic.Files[ir.Service] + if !ok { + f = hclwrite.NewEmptyFile() + ic.Files[ir.Service] = f + } + if ir.Ignore != nil && ir.Ignore(ic, r) { + continue + } + body := f.Body() + if ir.Body != nil { + err := ir.Body(ic, body, r) + if err != nil { + log.Printf("[ERROR] %s", err.Error()) + } + } else { + resourceBlock := body.AppendNewBlock("resource", []string{r.Resource, r.Name}) + err := ic.dataToHcl(ir, []string{}, ic.Resources[r.Resource], + r.Data, resourceBlock.Body()) + if err != nil { + log.Printf("[ERROR] %s", err.Error()) + } + } + if i%50 == 0 { + log.Printf("[INFO] Generated %d of %d resources", i+1, scopeSize) + } + if r.Mode != "data" && ic.Resources[r.Resource].Importer != nil && sh != nil { + // nolint + sh.WriteString(r.ImportCommand(ic) + "\n") + } + } +} + func (ic *importContext) MatchesName(n string) bool { if ic.match == "" { return true @@ -465,6 +468,14 @@ func (ic *importContext) reference(i importable, path []string, value string) hc if d.Path != match { continue } + if d.File { + relativeFile := fmt.Sprintf("${path.module}/%s", value) + return hclwrite.Tokens{ + &hclwrite.Token{Type: hclsyntax.TokenOQuote, Bytes: []byte{'"'}}, + &hclwrite.Token{Type: hclsyntax.TokenQuotedLit, Bytes: []byte(relativeFile)}, + &hclwrite.Token{Type: hclsyntax.TokenCQuote, Bytes: []byte{'"'}}, + } + } attr := "id" if d.Match != "" { attr = d.Match diff --git a/exporter/exporter_test.go b/exporter/exporter_test.go index 657a755aa5..87a5e61963 100644 --- a/exporter/exporter_test.go +++ b/exporter/exporter_test.go @@ -199,7 +199,7 @@ var meAdminFixture = qa.HTTPFixture{ }, } -var repoListFixture = qa.HTTPFixture{ +var emptyRepos = qa.HTTPFixture{ Method: "GET", ReuseRequest: true, Resource: "/api/2.0/repos?", @@ -218,17 +218,19 @@ var emptyIpAccessLIst = qa.HTTPFixture{ Response: map[string]interface{}{}, } +var emptyWorkspace = qa.HTTPFixture{ + Method: "GET", + Resource: "/api/2.0/workspace/list?path=%2F", + Response: workspace.ObjectList{}, +} + func TestImportingUsersGroupsSecretScopes(t *testing.T) { qa.HTTPFixturesApply(t, []qa.HTTPFixture{ meAdminFixture, - repoListFixture, + emptyRepos, emptyGitCredentials, - { - Method: "GET", - Resource: "/api/2.0/workspace/list?path=%2F", - Response: workspace.ObjectList{}, - }, + emptyWorkspace, emptyIpAccessLIst, { Method: "GET", @@ -382,7 +384,7 @@ func TestImportingNoResourcesError(t *testing.T) { qa.HTTPFixturesApply(t, []qa.HTTPFixture{ meAdminFixture, - repoListFixture, + emptyRepos, { Method: "GET", Resource: "/api/2.0/preview/scim/v2/Groups?", @@ -390,6 +392,7 @@ func TestImportingNoResourcesError(t *testing.T) { }, emptyGitCredentials, emptyIpAccessLIst, + emptyWorkspace, { Method: "GET", Resource: "/api/2.0/global-init-scripts", @@ -403,11 +406,6 @@ func TestImportingNoResourcesError(t *testing.T) { Resource: "/api/2.0/jobs/list", Response: jobs.JobList{}, }, - { - Method: "GET", - Resource: "/api/2.0/workspace/list?path=%2F", - Response: workspace.ObjectList{}, - }, { Method: "GET", Resource: "/api/2.0/clusters/list", @@ -440,7 +438,7 @@ func TestImportingClusters(t *testing.T) { qa.HTTPFixturesApply(t, []qa.HTTPFixture{ meAdminFixture, - repoListFixture, + emptyRepos, { Method: "GET", Resource: "/api/2.0/preview/scim/v2/Groups?", @@ -589,7 +587,7 @@ func TestImportingJobs_JobList(t *testing.T) { qa.HTTPFixturesApply(t, []qa.HTTPFixture{ meAdminFixture, - repoListFixture, + emptyRepos, { Method: "GET", Resource: "/api/2.0/jobs/list", @@ -806,7 +804,7 @@ func TestImportingSecrets(t *testing.T) { qa.HTTPFixturesApply(t, []qa.HTTPFixture{ meAdminFixture, - repoListFixture, + emptyRepos, { Method: "GET", Resource: "/api/2.0/preview/scim/v2/Groups?", @@ -884,7 +882,7 @@ func TestImportingGlobalInitScripts(t *testing.T) { qa.HTTPFixturesApply(t, []qa.HTTPFixture{ meAdminFixture, - repoListFixture, + emptyRepos, { Method: "GET", Resource: "/api/2.0/global-init-scripts", @@ -1105,7 +1103,7 @@ func TestImportingIPAccessLists(t *testing.T) { qa.HTTPFixturesApply(t, []qa.HTTPFixture{ meAdminFixture, - repoListFixture, + emptyRepos, { Method: "GET", Resource: "/api/2.0/global-init-scripts", diff --git a/exporter/importables.go b/exporter/importables.go index bfa2590c5e..73d7b91f69 100644 --- a/exporter/importables.go +++ b/exporter/importables.go @@ -922,7 +922,7 @@ var resourcesMap map[string]importable = map[string]importable{ return err } for offset, notebook := range notebookList { - if strings.HasPrefix("/Repos", notebook.Path) { + if strings.HasPrefix(notebook.Path, "/Repos") { continue } // TODO: emit permissions for notebook folders if non-default, @@ -938,33 +938,31 @@ var resourcesMap map[string]importable = map[string]importable{ } return nil }, - Body: func(ic *importContext, body *hclwrite.Body, r *resource) error { + Import: func(ic *importContext, r *resource) error { notebooksAPI := workspace.NewNotebooksAPI(ic.Context, ic.Client) - status, err := notebooksAPI.Read(r.ID) - if err != nil { - return err - } contentB64, err := notebooksAPI.Export(r.ID, "SOURCE") if err != nil { return err } - name := r.ID[1:] + status.Extension() // todo: replace non-alphanum+/ with _ + language := r.Data.Get("language").(string) + ext := map[string]string{ + "SCALA": ".scala", + "PYTHON": ".py", + "SQL": ".sql", + "R": ".r", + } + name := r.ID[1:] + ext[language] // todo: replace non-alphanum+/ with _ content, _ := base64.StdEncoding.DecodeString(contentB64) fileName, err := ic.createFileIn("notebooks", name, []byte(content)) - log.Printf("Creating %s for %s", fileName, r) if err != nil { return err } - // libraries installed with init scripts won't be exported. - b := body.AppendNewBlock("resource", []string{r.Resource, r.Name}).Body() - relativeFile := fmt.Sprintf("${path.module}/%s", fileName) - b.SetAttributeValue("path", cty.StringVal(r.ID)) - b.SetAttributeRaw("source", hclwrite.Tokens{ - &hclwrite.Token{Type: hclsyntax.TokenOQuote, Bytes: []byte{'"'}}, - &hclwrite.Token{Type: hclsyntax.TokenQuotedLit, Bytes: []byte(relativeFile)}, - &hclwrite.Token{Type: hclsyntax.TokenCQuote, Bytes: []byte{'"'}}, - }) - return nil + log.Printf("Creating %s for %s", fileName, r) + r.Data.Set("source", fileName) + return r.Data.Set("language", "") + }, + Depends: []reference{ + {Path: "source", File: true}, }, }, } diff --git a/exporter/importables_test.go b/exporter/importables_test.go index 24e6d1d936..bc9f668989 100644 --- a/exporter/importables_test.go +++ b/exporter/importables_test.go @@ -9,6 +9,7 @@ import ( "github.com/databrickslabs/terraform-provider-databricks/clusters" "github.com/databrickslabs/terraform-provider-databricks/common" + "github.com/databrickslabs/terraform-provider-databricks/internal" "github.com/databrickslabs/terraform-provider-databricks/jobs" "github.com/databrickslabs/terraform-provider-databricks/permissions" "github.com/databrickslabs/terraform-provider-databricks/policies" @@ -29,6 +30,7 @@ func importContextForTest() *importContext { return &importContext{ Importables: resourcesMap, Resources: p.ResourcesMap, + Files: map[string]*hclwrite.File{}, testEmits: map[string]bool{}, } } @@ -622,7 +624,7 @@ func TestNotebookName(t *testing.T) { assert.Equal(t, "foo_bar_baz", resourcesMap["databricks_notebook"].Name(d)) } -func TestNotebookList(t *testing.T) { +func TestNotebookGeneration(t *testing.T) { qa.HTTPFixturesApply(t, []qa.HTTPFixture{ { Method: "GET", @@ -630,7 +632,8 @@ func TestNotebookList(t *testing.T) { Response: workspace.ObjectList{ Objects: []workspace.ObjectStatus{ { - Path: "/Repos/Foo/Bar", + Path: "/Repos/Foo/Bar", + ObjectType: "NOTEBOOK", }, { Path: "/First/Second", @@ -641,41 +644,40 @@ func TestNotebookList(t *testing.T) { }, { Method: "GET", - Resource: "/api/2.0/workspace/get-status?path=a", + Resource: "/api/2.0/workspace/get-status?path=%2FFirst%2FSecond", Response: workspace.ObjectStatus{ ObjectID: 123, ObjectType: "NOTEBOOK", - Path: "a", + Path: "/First/Second", Language: "PYTHON", }, }, { Method: "GET", - Resource: "/api/2.0/workspace/export?format=SOURCE&path=a", + Resource: "/api/2.0/workspace/export?format=SOURCE&path=%2FFirst%2FSecond", Response: workspace.ExportPath{ Content: "YWJj", }, }, }, func(ctx context.Context, client *common.DatabricksClient) { ic := importContextForTest() + ic.Directory = fmt.Sprintf("/tmp/tf-%s", qa.RandomName()) + defer os.RemoveAll(ic.Directory) + ic.Client = client ic.Context = ctx + ic.testEmits = nil + ic.importing = map[string]bool{} + ic.services = "notebooks" err := resourcesMap["databricks_notebook"].List(ic) assert.NoError(t, err) - assert.True(t, ic.testEmits["databricks_notebook[] (id: /First/Second)"]) - - ic.Directory = fmt.Sprintf("/tmp/tf-%s", qa.RandomName()) - defer os.RemoveAll(ic.Directory) - // dstFile := fmt.Sprintf("%s/files/_abc_900150983cd24fb0d6963f7d28e17f72", ic.Directory) - // err := os.MkdirAll(dstFile, 0755) - // assert.NoError(t, err) - f := hclwrite.NewEmptyFile() - err = resourcesMap["databricks_notebook"].Body(ic, f.Body(), &resource{ - ID: "a", - Data: workspace.ResourceNotebook().TestResourceData(), - }) - assert.NoError(t, err) + ic.generateHclForResources(nil) + assert.Equal(t, internal.TrimLeadingWhitespace(` + resource "databricks_notebook" "first_second" { + source = "${path.module}/notebooks/First/Second.py" + path = "/First/Second" + }`), string(ic.Files["notebooks"].Bytes())) }) } diff --git a/exporter/model.go b/exporter/model.go index e0a8361118..5437a84e9d 100644 --- a/exporter/model.go +++ b/exporter/model.go @@ -54,6 +54,8 @@ type reference struct { Path string Resource string Match string + Variable bool + File bool } type resource struct { diff --git a/workspace/resource_notebook.go b/workspace/resource_notebook.go index fdbca35abf..885b0e8a54 100644 --- a/workspace/resource_notebook.go +++ b/workspace/resource_notebook.go @@ -45,15 +45,6 @@ type ObjectStatus struct { Language string `json:"language,omitempty"` } -func (a ObjectStatus) Extension() string { - for ext, nlf := range extMap { - if nlf.Language == a.Language { - return ext - } - } - return "" -} - // ExportPath contains the base64 content of the notebook type ExportPath struct { Content string `json:"content,omitempty"` From 068e9a6bc95f2f7e95e191f3d4a514b8ec68b976 Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Fri, 18 Feb 2022 14:41:10 +0100 Subject: [PATCH 6/9] remove generic custom body blocks --- exporter/context.go | 13 +- exporter/exporter_test.go | 9 -- exporter/importables.go | 82 ++++--------- exporter/importables_test.go | 224 +++++++++++++++++++++-------------- 4 files changed, 172 insertions(+), 156 deletions(-) diff --git a/exporter/context.go b/exporter/context.go index 9b56d89c36..23f841bc05 100644 --- a/exporter/context.go +++ b/exporter/context.go @@ -476,6 +476,9 @@ func (ic *importContext) reference(i importable, path []string, value string) hc &hclwrite.Token{Type: hclsyntax.TokenCQuote, Bytes: []byte{'"'}}, } } + if d.Variable { + return ic.variable(fmt.Sprintf("%s_%s", path[0], value), "") + } attr := "id" if d.Match != "" { attr = d.Match @@ -523,7 +526,15 @@ func (ic *importContext) dataToHcl(i importable, path []string, if as.Computed { continue } - raw, ok := d.GetOk(strings.Join(append(path, a), ".")) + pathString := strings.Join(append(path, a), ".") + raw, ok := d.GetOk(pathString) + for _, r := range i.Depends { + if r.Path == pathString && r.Variable { + // sensitive fields are moved to variable depends + raw = i.Name(d) + ok = true + } + } if !ok { continue } diff --git a/exporter/exporter_test.go b/exporter/exporter_test.go index 87a5e61963..a85b0f70b1 100644 --- a/exporter/exporter_test.go +++ b/exporter/exporter_test.go @@ -766,15 +766,6 @@ func TestImportingJobs_JobList(t *testing.T) { err := ic.Importables["databricks_job"].List(ic) assert.NoError(t, err) - for _, res := range ic.Scope { - if res.Resource != "databricks_dbfs_file" { - continue - } - err = ic.Importables["databricks_dbfs_file"].Body(ic, - hclwrite.NewEmptyFile().Body(), res) - assert.NoError(t, err) - } - for _, res := range ic.Scope { if res.Resource != "databricks_job" { continue diff --git a/exporter/importables.go b/exporter/importables.go index 73d7b91f69..8bb2dab998 100644 --- a/exporter/importables.go +++ b/exporter/importables.go @@ -6,7 +6,6 @@ import ( "encoding/json" "fmt" "log" - "path" "regexp" "strings" "time" @@ -21,7 +20,6 @@ import ( "github.com/databrickslabs/terraform-provider-databricks/workspace" "github.com/databrickslabs/terraform-provider-databricks/storage" - "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/hashicorp/hcl/v2/hclwrite" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" @@ -134,7 +132,7 @@ var resourcesMap map[string]importable = map[string]importable{ name := "_" + s[len(s)-1] + "_" + fileNameMd5 return name }, - Body: func(ic *importContext, body *hclwrite.Body, r *resource) error { + Import: func(ic *importContext, r *resource) error { dbfsAPI := storage.NewDbfsAPI(ic.Context, ic.Client) content, err := dbfsAPI.Read(r.ID) if err != nil { @@ -146,17 +144,12 @@ var resourcesMap map[string]importable = map[string]importable{ if err != nil { return err } - // libraries installed with init scripts won't be exported. - b := body.AppendNewBlock("resource", []string{r.Resource, r.Name}).Body() - relativeFile := fmt.Sprintf("${path.module}/%s", fileName) - b.SetAttributeValue("path", cty.StringVal(strings.Replace(r.ID, "dbfs:", "", 1))) - b.SetAttributeRaw("source", hclwrite.Tokens{ - &hclwrite.Token{Type: hclsyntax.TokenOQuote, Bytes: []byte{'"'}}, - &hclwrite.Token{Type: hclsyntax.TokenQuotedLit, Bytes: []byte(relativeFile)}, - &hclwrite.Token{Type: hclsyntax.TokenCQuote, Bytes: []byte{'"'}}, - }) + r.Data.Set("source", fileName) return nil }, + Depends: []reference{ + {Path: "source", File: true}, + }, }, "databricks_instance_pool": { Service: "compute", @@ -595,7 +588,8 @@ var resourcesMap map[string]importable = map[string]importable{ if scopes, err := ssAPI.List(); err == nil { for i, scope := range scopes { if !ic.MatchesName(scope.Name) { - log.Printf("[INFO] Secret scope %s doesn't match %s filter", scope.Name, ic.match) + log.Printf("[INFO] Secret scope %s doesn't match %s filter", + scope.Name, ic.match) continue } ic.Emit(&resource{ @@ -603,8 +597,7 @@ var resourcesMap map[string]importable = map[string]importable{ ID: scope.Name, Name: scope.Name, }) - log.Printf("[INFO] Imported %d of %d secret scopes", - i, len(scopes)) + log.Printf("[INFO] Imported %d of %d secret scopes", i, len(scopes)) } } return nil @@ -635,22 +628,15 @@ var resourcesMap map[string]importable = map[string]importable{ "databricks_secret": { Service: "secrets", Depends: []reference{ + {Path: "string_value", Variable: true}, {Path: "scope", Resource: "databricks_secret_scope"}, {Path: "string_value", Resource: "vault_generic_secret", Match: "data"}, {Path: "string_value", Resource: "aws_kms_secrets", Match: "plaintext"}, {Path: "string_value", Resource: "azurerm_key_vault_secret", Match: "value"}, {Path: "string_value", Resource: "aws_secretsmanager_secret_version", Match: "secret_string"}, }, - Body: func(ic *importContext, body *hclwrite.Body, r *resource) error { - b := body.AppendNewBlock("resource", []string{r.Resource, r.Name}).Body() - b.SetAttributeRaw("scope", ic.reference(ic.Importables[r.Resource], - []string{"scope"}, r.Data.Get("scope").(string))) - // secret data is exposed only within notebooks - b.SetAttributeRaw("string_value", ic.variable( - r.Name, fmt.Sprintf("Secret %s from %s scope", - r.Data.Get("key"), r.Data.Get("scope")))) - b.SetAttributeValue("key", cty.StringVal(r.Data.Get("key").(string))) - return nil + Name: func(d *schema.ResourceData) string { + return fmt.Sprintf("%s_%s", d.Get("scope"), d.Get("key")) }, }, "databricks_secret_acl": { @@ -749,7 +735,7 @@ var resourcesMap map[string]importable = map[string]importable{ } return nil }, - Body: func(ic *importContext, body *hclwrite.Body, r *resource) error { + Import: func(ic *importContext, r *resource) error { gis, err := workspace.NewGlobalInitScriptsAPI(ic.Context, ic.Client).Get(r.ID) if err != nil { return err @@ -758,21 +744,15 @@ var resourcesMap map[string]importable = map[string]importable{ if err != nil { return err } - fileName, err := ic.createFile(path.Base(r.Name), content) + fileName, err := ic.createFile(fmt.Sprintf("%s.sh", r.Name), content) log.Printf("Creating %s for %s", fileName, r) if err != nil { return err } - relativeFile := fmt.Sprintf("${path.module}/%s", fileName) - b := body.AppendNewBlock("resource", []string{r.Resource, r.Name}).Body() - b.SetAttributeValue("name", cty.StringVal(gis.Name)) - b.SetAttributeValue("enabled", cty.BoolVal(gis.Enabled)) - b.SetAttributeRaw("source", hclwrite.Tokens{ - &hclwrite.Token{Type: hclsyntax.TokenOQuote, Bytes: []byte{'"'}}, - &hclwrite.Token{Type: hclsyntax.TokenQuotedLit, Bytes: []byte(relativeFile)}, - &hclwrite.Token{Type: hclsyntax.TokenCQuote, Bytes: []byte{'"'}}, - }) - return nil + return r.Data.Set("source", fileName) + }, + Depends: []reference{ + {Path: "source", File: true}, }, }, "databricks_repo": { @@ -813,20 +793,6 @@ var resourcesMap map[string]importable = map[string]importable{ } return nil }, - Body: func(ic *importContext, body *hclwrite.Body, r *resource) error { - b := body.AppendNewBlock("resource", []string{r.Resource, r.Name}).Body() - b.SetAttributeValue("url", cty.StringVal(r.Data.Get("url").(string))) - b.SetAttributeValue("git_provider", cty.StringVal(r.Data.Get("git_provider").(string))) - t := r.Data.Get("branch").(string) - if t != "" { - b.SetAttributeValue("branch", cty.StringVal(t)) - } - t = r.Data.Get("path").(string) - if t != "" { - b.SetAttributeValue("path", cty.StringVal(t)) - } - return nil - }, }, "databricks_git_credential": { Service: "repos", @@ -847,12 +813,8 @@ var resourcesMap map[string]importable = map[string]importable{ } return nil }, - Body: func(ic *importContext, body *hclwrite.Body, r *resource) error { - b := body.AppendNewBlock("resource", []string{r.Resource, r.Name}).Body() - b.SetAttributeRaw("personal_access_token", ic.variable("git_token_"+r.Name, "Git token for "+r.Name)) - b.SetAttributeValue("git_provider", cty.StringVal(r.Data.Get("git_provider").(string))) - b.SetAttributeValue("git_username", cty.StringVal(r.Data.Get("git_username").(string))) - return nil + Depends: []reference{ + {Path: "personal_access_token", Variable: true}, }, }, "databricks_workspace_conf": { @@ -862,7 +824,11 @@ var resourcesMap map[string]importable = map[string]importable{ }, Import: func(ic *importContext, r *resource) error { wsConfAPI := workspace.NewWorkspaceConfAPI(ic.Context, ic.Client) - keys := map[string]interface{}{"enableIpAccessLists": false, "maxTokenLifetimeDays": 0, "enableTokensConfig": false} + keys := map[string]interface{}{ + "enableIpAccessLists": false, + "maxTokenLifetimeDays": 0, + "enableTokensConfig": false, + } err := wsConfAPI.Read(&keys) if err != nil { return err diff --git a/exporter/importables_test.go b/exporter/importables_test.go index bc9f668989..e27eb966de 100644 --- a/exporter/importables_test.go +++ b/exporter/importables_test.go @@ -169,79 +169,6 @@ func TestSecretScope(t *testing.T) { assert.Equal(t, "abc", name) } -func TestDbfsFileCornerCases_ReadFail(t *testing.T) { - qa.HTTPFixturesApply(t, []qa.HTTPFixture{ - { - Method: "GET", - Resource: "/api/2.0/dbfs/read?length=1000000&path=a", - Status: 404, - Response: common.NotFound("nope"), - }, - }, func(ctx context.Context, client *common.DatabricksClient) { - ic := importContextForTest() - ic.Client = client - ic.Context = ctx - err := resourcesMap["databricks_dbfs_file"].Body(ic, nil, &resource{ - ID: "a", - }) - assert.EqualError(t, err, "cannot read a: nope") - }) -} - -func TestDbfsFileCornerCases_WriteWrongDir(t *testing.T) { - qa.HTTPFixturesApply(t, []qa.HTTPFixture{ - { - Method: "GET", - Resource: "/api/2.0/dbfs/read?length=1000000&path=a", - Response: storage.ReadResponse{ - Data: "YWJj", - BytesRead: 3, - }, - }, - }, func(ctx context.Context, client *common.DatabricksClient) { - ic := importContextForTest() - ic.Client = client - ic.Context = ctx - err := resourcesMap["databricks_dbfs_file"].Body(ic, nil, &resource{ - ID: "a", - Data: storage.ResourceDBFSFile().TestResourceData(), - }) - assert.NotNil(t, err) // mustn't match direct OS error - }) -} - -func TestDbfsFileCornerCases_WriteFileExists(t *testing.T) { - qa.HTTPFixturesApply(t, []qa.HTTPFixture{ - { - Method: "GET", - Resource: "/api/2.0/dbfs/read?length=1000000&path=a", - Response: storage.ReadResponse{ - Data: "YWJj", - BytesRead: 3, - }, - }, - }, func(ctx context.Context, client *common.DatabricksClient) { - ic := importContextForTest() - ic.Client = client - ic.Context = ctx - ic.Directory = fmt.Sprintf("/tmp/tf-%s", qa.RandomName()) - defer os.RemoveAll(ic.Directory) - - dstFile := fmt.Sprintf("%s/files/_abc_900150983cd24fb0d6963f7d28e17f72", ic.Directory) - err := os.MkdirAll(dstFile, 0755) - assert.NoError(t, err) - - d := storage.ResourceDBFSFile().TestResourceData() - d.SetId("abc") - - err = resourcesMap["databricks_dbfs_file"].Body(ic, nil, &resource{ - ID: "a", - Data: d, - }) - assert.Equal(t, err.Error(), fmt.Sprintf("open %s: is a directory", dstFile)) - }) -} - func TestInstancePoolNameFromID(t *testing.T) { d := pools.ResourceInstancePool().TestResourceData() d.SetId("a-b-c") @@ -551,7 +478,7 @@ func TestGlobalInitScriptsErrors(t *testing.T) { err := resourcesMap["databricks_global_init_script"].List(ic) assert.EqualError(t, err, "nope") - err = resourcesMap["databricks_global_init_script"].Body(ic, nil, &resource{ + err = resourcesMap["databricks_global_init_script"].Import(ic, &resource{ ID: "abc", }) assert.EqualError(t, err, "nope") @@ -580,12 +507,12 @@ func TestGlobalInitScriptsBodyErrors(t *testing.T) { ic := importContextForTest() ic.Client = client ic.Context = ctx - err := resourcesMap["databricks_global_init_script"].Body(ic, nil, &resource{ + err := resourcesMap["databricks_global_init_script"].Import(ic, &resource{ ID: "sad-emoji", }) assert.EqualError(t, err, "illegal base64 data at input byte 0") - err = resourcesMap["databricks_global_init_script"].Body(ic, nil, &resource{ + err = resourcesMap["databricks_global_init_script"].Import(ic, &resource{ ID: "second", }) assert.NotNil(t, err) // no exact match because of OS diffs @@ -624,8 +551,23 @@ func TestNotebookName(t *testing.T) { assert.Equal(t, "foo_bar_baz", resourcesMap["databricks_notebook"].Name(d)) } +func testGenerate(t *testing.T, fixtures []qa.HTTPFixture, services string, cb func(*importContext)) { + qa.HTTPFixturesApply(t, fixtures, func(ctx context.Context, client *common.DatabricksClient) { + ic := importContextForTest() + ic.Directory = fmt.Sprintf("/tmp/tf-%s", qa.RandomName()) + defer os.RemoveAll(ic.Directory) + ic.Client = client + ic.Context = ctx + ic.testEmits = nil + ic.importing = map[string]bool{} + ic.variables = map[string]string{} + ic.services = services + cb(ic) + }) +} + func TestNotebookGeneration(t *testing.T) { - qa.HTTPFixturesApply(t, []qa.HTTPFixture{ + testGenerate(t, []qa.HTTPFixture{ { Method: "GET", Resource: "/api/2.0/workspace/list?path=%2F", @@ -659,17 +601,7 @@ func TestNotebookGeneration(t *testing.T) { Content: "YWJj", }, }, - }, func(ctx context.Context, client *common.DatabricksClient) { - ic := importContextForTest() - ic.Directory = fmt.Sprintf("/tmp/tf-%s", qa.RandomName()) - defer os.RemoveAll(ic.Directory) - - ic.Client = client - ic.Context = ctx - ic.testEmits = nil - ic.importing = map[string]bool{} - ic.services = "notebooks" - + }, "notebooks", func(ic *importContext) { err := resourcesMap["databricks_notebook"].List(ic) assert.NoError(t, err) @@ -681,3 +613,119 @@ func TestNotebookGeneration(t *testing.T) { }`), string(ic.Files["notebooks"].Bytes())) }) } + +func TestGitCredentialGen(t *testing.T) { + testGenerate(t, []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/git-credentials/a", + Response: repos.GitCredentialResponse { + UserName: "me", + Provider: "github", + }, + }, + }, "repos", func(ic *importContext) { + ic.Emit(&resource{ + Resource: "databricks_git_credential", + ID: "a", + }) + + ic.generateHclForResources(nil) + assert.Equal(t, internal.TrimLeadingWhitespace(` + resource "databricks_git_credential" "github_me_a" { + personal_access_token = var.personal_access_token_github_me_a + git_username = "me" + git_provider = "github" + }`), string(ic.Files["repos"].Bytes())) + }) +} + +func TestGlobalInitScriptGen(t *testing.T) { + testGenerate(t, []qa.HTTPFixture{ + { + Method: "GET", + ReuseRequest: true, + Resource: "/api/2.0/global-init-scripts/a", + Response: workspace.GlobalInitScriptInfo { + Name: "b", + Enabled: true, + ContentBase64: "YWJj", + }, + }, + }, "workspace", func(ic *importContext) { + ic.Emit(&resource{ + Resource: "databricks_global_init_script", + ID: "a", + }) + + ic.generateHclForResources(nil) + assert.Equal(t, internal.TrimLeadingWhitespace(` + resource "databricks_global_init_script" "b" { + source = "${path.module}/files/b.sh" + name = "b" + enabled = true + }`), string(ic.Files["workspace"].Bytes())) + }) +} + +func TestSecretGen(t *testing.T) { + testGenerate(t, []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/secrets/list?scope=a", + + Response: secrets.SecretsList { + Secrets: []secrets.SecretMetadata{ + { + Key: "b", + }, + }, + }, + }, + }, "secrets", func(ic *importContext) { + ic.Emit(&resource{ + Resource: "databricks_secret", + ID: "a|||b", + }) + + ic.generateHclForResources(nil) + assert.Equal(t, internal.TrimLeadingWhitespace(` + resource "databricks_secret" "a_b" { + string_value = var.string_value_a_b + scope = "a" + key = "b" + }`), string(ic.Files["secrets"].Bytes())) + }) +} + +func TestDbfsFileGen(t *testing.T) { + testGenerate(t, []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/dbfs/get-status?path=a", + Response: storage.FileInfo { + Path: "a", + }, + }, + { + Method: "GET", + Resource: "/api/2.0/dbfs/read?length=1000000&path=a", + Response: storage.ReadResponse{ + Data: "YWJj", + BytesRead: 3, + }, + }, + }, "storage", func(ic *importContext) { + ic.Emit(&resource{ + Resource: "databricks_dbfs_file", + ID: "a", + }) + + ic.generateHclForResources(nil) + assert.Equal(t, internal.TrimLeadingWhitespace(` + resource "databricks_dbfs_file" "_a_0cc175b9c0f1b6a831c399e269772661" { + source = "${path.module}/files/_a_0cc175b9c0f1b6a831c399e269772661" + path = "a" + }`), string(ic.Files["storage"].Bytes())) + }) +} \ No newline at end of file From 8de53e26f5d6f8b24f04dd15b02768bab997e78f Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Fri, 18 Feb 2022 15:04:11 +0100 Subject: [PATCH 7/9] make fmt and remove resource namer --- exporter/command_test.go | 2 +- exporter/context.go | 22 +++++++++-------- exporter/importables.go | 27 ++++++++------------- exporter/importables_test.go | 46 +++++++++++++++--------------------- 4 files changed, 42 insertions(+), 55 deletions(-) diff --git a/exporter/command_test.go b/exporter/command_test.go index 74258d3f41..d5cee37d84 100644 --- a/exporter/command_test.go +++ b/exporter/command_test.go @@ -20,7 +20,7 @@ func TestInteractivePrompts(t *testing.T) { cliInput = dummyReader("y\n") cliOutput = &bytes.Buffer{} ic := &importContext{ - Client: &common.DatabricksClient{}, + Client: &common.DatabricksClient{}, Context: context.Background(), Importables: map[string]importable{ "x": { diff --git a/exporter/context.go b/exporter/context.go index 23f841bc05..c2a0deb2e1 100644 --- a/exporter/context.go +++ b/exporter/context.go @@ -82,6 +82,16 @@ type mount struct { ClusterID string } +var nameFixes = []regexFix{ + {regexp.MustCompile(`[0-9a-f]{8}[_-][0-9a-f]{4}[_-][0-9a-f]{4}` + + `[_-][0-9a-f]{4}[_-][0-9a-f]{12}[_-]`), ""}, + {regexp.MustCompile(`[_-][0-9]+[\._-][0-9]+[\._-].*\.([a-z0-9]{1,4})`), "_$1"}, + {regexp.MustCompile(`@.*$`), ""}, + {regexp.MustCompile(`[-\s\.\|]`), "_"}, + {regexp.MustCompile(`\W+`), ""}, + {regexp.MustCompile(`[_]{2,}`), "_"}, +} + func newImportContext(c *common.DatabricksClient) *importContext { p := provider.DatabricksProvider() p.TerraformVersion = "exporter" @@ -103,16 +113,8 @@ func newImportContext(c *common.DatabricksClient) *importContext { Files: map[string]*hclwrite.File{}, Scope: []*resource{}, importing: map[string]bool{}, - nameFixes: []regexFix{ - {regexp.MustCompile(`[0-9a-f]{8}[_-][0-9a-f]{4}[_-][0-9a-f]{4}` + - `[_-][0-9a-f]{4}[_-][0-9a-f]{12}[_-]`), ""}, - {regexp.MustCompile(`[_-][0-9]+[\._-][0-9]+[\._-].*\.([a-z0-9]{1,4})`), "_$1"}, - {regexp.MustCompile(`@.*$`), ""}, - {regexp.MustCompile(`[-\s\.\|]`), "_"}, - {regexp.MustCompile(`\W+`), ""}, - {regexp.MustCompile(`[_]{2,}`), "_"}, - }, - hclFixes: []regexFix{ // Be careful with that! it may break working code + nameFixes: nameFixes, + hclFixes: []regexFix{ // Be careful with that! it may break working code }, allUsers: []scim.User{}, variables: map[string]string{}, diff --git a/exporter/importables.go b/exporter/importables.go index 8bb2dab998..4d727e8168 100644 --- a/exporter/importables.go +++ b/exporter/importables.go @@ -588,7 +588,7 @@ var resourcesMap map[string]importable = map[string]importable{ if scopes, err := ssAPI.List(); err == nil { for i, scope := range scopes { if !ic.MatchesName(scope.Name) { - log.Printf("[INFO] Secret scope %s doesn't match %s filter", + log.Printf("[INFO] Secret scope %s doesn't match %s filter", scope.Name, ic.match) continue } @@ -718,8 +718,7 @@ var resourcesMap map[string]importable = map[string]importable{ if name == "" { return d.Id() } - re := regexp.MustCompile(`[^0-9A-Za-z_]`) - return re.ReplaceAllString(name, "_") + return name }, List: func(ic *importContext) error { globalInitScripts, err := workspace.NewGlobalInitScriptsAPI(ic.Context, ic.Client).List() @@ -761,11 +760,8 @@ var resourcesMap map[string]importable = map[string]importable{ name := d.Get("path").(string) if name == "" { return d.Id() - } else { - name = strings.TrimPrefix(name, "/") } - re := regexp.MustCompile(`[^0-9A-Za-z_]`) - return re.ReplaceAllString(name, "_") + return strings.TrimPrefix(name, "/") }, List: func(ic *importContext) error { repoList, err := repos.NewReposAPI(ic.Context, ic.Client).ListAll() @@ -825,9 +821,9 @@ var resourcesMap map[string]importable = map[string]importable{ Import: func(ic *importContext, r *resource) error { wsConfAPI := workspace.NewWorkspaceConfAPI(ic.Context, ic.Client) keys := map[string]interface{}{ - "enableIpAccessLists": false, - "maxTokenLifetimeDays": 0, - "enableTokensConfig": false, + "enableIpAccessLists": false, + "maxTokenLifetimeDays": 0, + "enableTokensConfig": false, } err := wsConfAPI.Read(&keys) if err != nil { @@ -875,11 +871,8 @@ var resourcesMap map[string]importable = map[string]importable{ name := d.Get("path").(string) if name == "" { return d.Id() - } else { - name = strings.TrimPrefix(name, "/") } - re := regexp.MustCompile(`[^0-9A-Za-z_]`) - return strings.ToLower(re.ReplaceAllString(name, "_")) + return name }, List: func(ic *importContext) error { notebooksAPI := workspace.NewNotebooksAPI(ic.Context, ic.Client) @@ -912,10 +905,10 @@ var resourcesMap map[string]importable = map[string]importable{ } language := r.Data.Get("language").(string) ext := map[string]string{ - "SCALA": ".scala", + "SCALA": ".scala", "PYTHON": ".py", - "SQL": ".sql", - "R": ".r", + "SQL": ".sql", + "R": ".r", } name := r.ID[1:] + ext[language] // todo: replace non-alphanum+/ with _ content, _ := base64.StdEncoding.DecodeString(contentB64) diff --git a/exporter/importables_test.go b/exporter/importables_test.go index e27eb966de..85d799b51e 100644 --- a/exporter/importables_test.go +++ b/exporter/importables_test.go @@ -30,8 +30,9 @@ func importContextForTest() *importContext { return &importContext{ Importables: resourcesMap, Resources: p.ResourcesMap, - Files: map[string]*hclwrite.File{}, + Files: map[string]*hclwrite.File{}, testEmits: map[string]bool{}, + nameFixes: nameFixes, } } @@ -542,15 +543,6 @@ func TestRepoListFails(t *testing.T) { }) } -func TestNotebookName(t *testing.T) { - d := workspace.ResourceNotebook().TestResourceData() - d.SetId("x") - assert.Equal(t, "x", resourcesMap["databricks_notebook"].Name(d)) - - d.Set("path", "/Foo/Bar/Baz") - assert.Equal(t, "foo_bar_baz", resourcesMap["databricks_notebook"].Name(d)) -} - func testGenerate(t *testing.T, fixtures []qa.HTTPFixture, services string, cb func(*importContext)) { qa.HTTPFixturesApply(t, fixtures, func(ctx context.Context, client *common.DatabricksClient) { ic := importContextForTest() @@ -607,7 +599,7 @@ func TestNotebookGeneration(t *testing.T) { ic.generateHclForResources(nil) assert.Equal(t, internal.TrimLeadingWhitespace(` - resource "databricks_notebook" "first_second" { + resource "databricks_notebook" "firstsecond" { source = "${path.module}/notebooks/First/Second.py" path = "/First/Second" }`), string(ic.Files["notebooks"].Bytes())) @@ -619,7 +611,7 @@ func TestGitCredentialGen(t *testing.T) { { Method: "GET", Resource: "/api/2.0/git-credentials/a", - Response: repos.GitCredentialResponse { + Response: repos.GitCredentialResponse{ UserName: "me", Provider: "github", }, @@ -627,7 +619,7 @@ func TestGitCredentialGen(t *testing.T) { }, "repos", func(ic *importContext) { ic.Emit(&resource{ Resource: "databricks_git_credential", - ID: "a", + ID: "a", }) ic.generateHclForResources(nil) @@ -643,26 +635,26 @@ func TestGitCredentialGen(t *testing.T) { func TestGlobalInitScriptGen(t *testing.T) { testGenerate(t, []qa.HTTPFixture{ { - Method: "GET", + Method: "GET", ReuseRequest: true, - Resource: "/api/2.0/global-init-scripts/a", - Response: workspace.GlobalInitScriptInfo { - Name: "b", - Enabled: true, + Resource: "/api/2.0/global-init-scripts/a", + Response: workspace.GlobalInitScriptInfo{ + Name: "New: Importing ^ Things", + Enabled: true, ContentBase64: "YWJj", }, }, }, "workspace", func(ic *importContext) { ic.Emit(&resource{ Resource: "databricks_global_init_script", - ID: "a", + ID: "a", }) ic.generateHclForResources(nil) assert.Equal(t, internal.TrimLeadingWhitespace(` - resource "databricks_global_init_script" "b" { - source = "${path.module}/files/b.sh" - name = "b" + resource "databricks_global_init_script" "new_importing_things" { + source = "${path.module}/files/new_importing_things.sh" + name = "New: Importing ^ Things" enabled = true }`), string(ic.Files["workspace"].Bytes())) }) @@ -674,7 +666,7 @@ func TestSecretGen(t *testing.T) { Method: "GET", Resource: "/api/2.0/secrets/list?scope=a", - Response: secrets.SecretsList { + Response: secrets.SecretsList{ Secrets: []secrets.SecretMetadata{ { Key: "b", @@ -685,7 +677,7 @@ func TestSecretGen(t *testing.T) { }, "secrets", func(ic *importContext) { ic.Emit(&resource{ Resource: "databricks_secret", - ID: "a|||b", + ID: "a|||b", }) ic.generateHclForResources(nil) @@ -703,7 +695,7 @@ func TestDbfsFileGen(t *testing.T) { { Method: "GET", Resource: "/api/2.0/dbfs/get-status?path=a", - Response: storage.FileInfo { + Response: storage.FileInfo{ Path: "a", }, }, @@ -718,7 +710,7 @@ func TestDbfsFileGen(t *testing.T) { }, "storage", func(ic *importContext) { ic.Emit(&resource{ Resource: "databricks_dbfs_file", - ID: "a", + ID: "a", }) ic.generateHclForResources(nil) @@ -728,4 +720,4 @@ func TestDbfsFileGen(t *testing.T) { path = "a" }`), string(ic.Files["storage"].Bytes())) }) -} \ No newline at end of file +} From 1bdc4f1b6440af86f3e6c5f63d6ac3cc316ac0be Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Fri, 18 Feb 2022 16:42:48 +0100 Subject: [PATCH 8/9] Add screencast --- docs/guides/experimental-exporter.md | 6 +++++- exporter/importables.go | 23 ----------------------- exporter/importables_test.go | 26 -------------------------- 3 files changed, 5 insertions(+), 50 deletions(-) diff --git a/docs/guides/experimental-exporter.md b/docs/guides/experimental-exporter.md index 9dcdf82f22..1420b2b466 100644 --- a/docs/guides/experimental-exporter.md +++ b/docs/guides/experimental-exporter.md @@ -11,7 +11,11 @@ Generates `*.tf` files for Databricks resources as well as `import.sh` to run im ## Example Usage -After downloading the [latest released binary](https://github.com/databrickslabs/terraform-provider-databricks/releases), unpack it and place it in the same folder. In fact, you may have already downloaded this binary - check `.terraform` folder of any state directory, where you've used `databricks` provider. It could also be in your plugin cache `~/.terraform.d/plugins/registry.terraform.io/databrickslabs/databricks/*/*/terraform-provider-databricks`. +After downloading the [latest released binary](https://github.com/databrickslabs/terraform-provider-databricks/releases), unpack it and place it in the same folder. In fact, you may have already downloaded this binary - check `.terraform` folder of any state directory, where you've used `databricks` provider. It could also be in your plugin cache `~/.terraform.d/plugins/registry.terraform.io/databrickslabs/databricks/*/*/terraform-provider-databricks`. Here's the tool in action: + +[![asciicast](https://asciinema.org/a/Rv8ZFJQpfrfp6ggWddjtyXaOy.svg)](https://asciinema.org/a/Rv8ZFJQpfrfp6ggWddjtyXaOy) + +Exporter can also be used in a non-interactive mode: ```bash export DATABRICKS_HOST=... diff --git a/exporter/importables.go b/exporter/importables.go index 4d727e8168..c78ba46368 100644 --- a/exporter/importables.go +++ b/exporter/importables.go @@ -790,29 +790,6 @@ var resourcesMap map[string]importable = map[string]importable{ return nil }, }, - "databricks_git_credential": { - Service: "repos", - Name: func(d *schema.ResourceData) string { - return d.Get("git_provider").(string) + "_" + d.Get("git_username").(string) + "_" + d.Id() - }, - List: func(ic *importContext) error { - creds, err := repos.NewGitCredentialsAPI(ic.Context, ic.Client).List() - if err != nil { - return err - } - for offset, cred := range creds { - ic.Emit(&resource{ - Resource: "databricks_git_credential", - ID: fmt.Sprintf("%d", cred.ID), - }) - log.Printf("[INFO] Scanned %d of %d Git credentials", offset+1, len(creds)) - } - return nil - }, - Depends: []reference{ - {Path: "personal_access_token", Variable: true}, - }, - }, "databricks_workspace_conf": { Service: "workspace", Name: func(d *schema.ResourceData) string { diff --git a/exporter/importables_test.go b/exporter/importables_test.go index 85d799b51e..3b7dc2cd2b 100644 --- a/exporter/importables_test.go +++ b/exporter/importables_test.go @@ -606,32 +606,6 @@ func TestNotebookGeneration(t *testing.T) { }) } -func TestGitCredentialGen(t *testing.T) { - testGenerate(t, []qa.HTTPFixture{ - { - Method: "GET", - Resource: "/api/2.0/git-credentials/a", - Response: repos.GitCredentialResponse{ - UserName: "me", - Provider: "github", - }, - }, - }, "repos", func(ic *importContext) { - ic.Emit(&resource{ - Resource: "databricks_git_credential", - ID: "a", - }) - - ic.generateHclForResources(nil) - assert.Equal(t, internal.TrimLeadingWhitespace(` - resource "databricks_git_credential" "github_me_a" { - personal_access_token = var.personal_access_token_github_me_a - git_username = "me" - git_provider = "github" - }`), string(ic.Files["repos"].Bytes())) - }) -} - func TestGlobalInitScriptGen(t *testing.T) { testGenerate(t, []qa.HTTPFixture{ { From bf81f3c1f2f8a2b3cd7baf0d665097edf6343dc6 Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Fri, 18 Feb 2022 16:46:40 +0100 Subject: [PATCH 9/9] remove --- exporter/exporter_test.go | 77 --------------------------------------- 1 file changed, 77 deletions(-) diff --git a/exporter/exporter_test.go b/exporter/exporter_test.go index a85b0f70b1..dcb973c874 100644 --- a/exporter/exporter_test.go +++ b/exporter/exporter_test.go @@ -1002,83 +1002,6 @@ func TestImportingRepos(t *testing.T) { }) } -func TestImportingGitCredentials(t *testing.T) { - provider := "gitHub" - user := "test" - resp := repos.GitCredentialResponse{ - ID: 121232342, - Provider: provider, - UserName: user, - } - qa.HTTPFixturesApply(t, - []qa.HTTPFixture{ - meAdminFixture, - { - Method: "GET", - Resource: "/api/2.0/repos?", - Response: repos.ReposListResponse{ - Repos: []repos.ReposInformation{}, - }, - }, - { - Method: http.MethodGet, - Resource: "/api/2.0/git-credentials", - Response: repos.GitCredentialList{ - Credentials: []repos.GitCredentialResponse{resp}, - }, - }, - { - Method: "GET", - Resource: fmt.Sprintf("/api/2.0/git-credentials/%d", resp.ID), - Response: resp, - }, - }, - func(ctx context.Context, client *common.DatabricksClient) { - tmpDir := fmt.Sprintf("/tmp/tf-%s", qa.RandomName()) - defer os.RemoveAll(tmpDir) - - ic := newImportContext(client) - ic.Directory = tmpDir - ic.listing = "repos" - ic.services = "repos" - - err := ic.Run() - assert.NoError(t, err) - }) -} - -func TestImportingGitCredentials_Error(t *testing.T) { - qa.HTTPFixturesApply(t, - []qa.HTTPFixture{ - meAdminFixture, - { - Method: "GET", - Resource: "/api/2.0/repos?", - Response: repos.ReposListResponse{ - Repos: []repos.ReposInformation{}, - }, - }, - { - Method: http.MethodGet, - Resource: "/api/2.0/git-credentials", - Response: repos.GitCredentialList{}, - Status: 404, - }, - }, - func(ctx context.Context, client *common.DatabricksClient) { - tmpDir := fmt.Sprintf("/tmp/tf-%s", qa.RandomName()) - defer os.RemoveAll(tmpDir) - - ic := newImportContext(client) - ic.Directory = tmpDir - ic.listing = "repos" - ic.services = "repos" - - err := ic.Run() - assert.Error(t, err) - }) -} - func TestImportingIPAccessLists(t *testing.T) { resp := access.IpAccessListStatus{ ListID: "123",