Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[exporter] interactive cli and notebooks #1010

Merged
merged 9 commits into from
Feb 18, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
* Added new `gcp_attributes` to `databricks_cluster` and `databricks_instance_pool` ([#1126](https://github.com/databrickslabs/terraform-provider-databricks/pull/1126)).
* Added exporter functionality for `databricks_ip_access_list` and `databricks_workspace_conf` ([#1125](https://github.com/databrickslabs/terraform-provider-databricks/pull/1125)).
* Added `graviton` selector for `databricks_node_type` and `databricks_spark_version` data sources ([#1127](https://github.com/databrickslabs/terraform-provider-databricks/pull/1127)).
* Added interactive mode to resource exporter ([#1010](https://github.com/databrickslabs/terraform-provider-databricks/pull/1010)).

## 0.4.9

Expand Down
2 changes: 1 addition & 1 deletion common/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ func (c *DatabricksClient) configureWithDatabricksCfg(ctx context.Context) (func
_, err = os.Stat(configFile)
if os.IsNotExist(err) {
// early return for non-configured machines
log.Printf("[INFO] %s not found on current host", configFile)
log.Printf("[DEBUG] %s not found on current host", configFile)
return nil, nil
}
cfg, err := ini.Load(configFile)
Expand Down
6 changes: 5 additions & 1 deletion docs/guides/experimental-exporter.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,11 @@ Generates `*.tf` files for Databricks resources as well as `import.sh` to run im

## Example Usage

After downloading the [latest released binary](https://github.com/databrickslabs/terraform-provider-databricks/releases), unpack it and place it in the same folder. In fact, you may have already downloaded this binary - check `.terraform` folder of any state directory, where you've used `databricks` provider. It could also be in your plugin cache `~/.terraform.d/plugins/registry.terraform.io/databrickslabs/databricks/*/*/terraform-provider-databricks`.
After downloading the [latest released binary](https://github.com/databrickslabs/terraform-provider-databricks/releases), unpack it and place it in the same folder. In fact, you may have already downloaded this binary - check `.terraform` folder of any state directory, where you've used `databricks` provider. It could also be in your plugin cache `~/.terraform.d/plugins/registry.terraform.io/databrickslabs/databricks/*/*/terraform-provider-databricks`. Here's the tool in action:

[![asciicast](https://asciinema.org/a/Rv8ZFJQpfrfp6ggWddjtyXaOy.svg)](https://asciinema.org/a/Rv8ZFJQpfrfp6ggWddjtyXaOy)

Exporter can also be used in a non-interactive mode:

```bash
export DATABRICKS_HOST=...
nfx marked this conversation as resolved.
Show resolved Hide resolved
Expand Down
33 changes: 32 additions & 1 deletion exporter/command.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package exporter

import (
"flag"
"fmt"
"log"
"os"
"strings"
Expand Down Expand Up @@ -43,6 +44,31 @@ func (ic *importContext) allServicesAndListing() (string, string) {
return services, listing
}

func (ic *importContext) interactivePrompts() {
for ic.Client.Authenticate(ic.Context) != nil {
ic.Client.Host = askFor("🔑 Databricks Workspace URL:")
ic.Client.Token = askFor("🔑 Databricks Workspace PAT:")
}
ic.match = askFor("🔍 Match entity names (optional):")
listing := ""
for r, ir := range ic.Importables {
if ir.List == nil {
continue
}
if !askFlag(fmt.Sprintf("✅ Generate `%s` and related resources?", r)) {
continue
}
if len(listing) > 0 {
listing += ","
}
listing += ir.Service
if ir.Service == "mounts" {
ic.mounts = true
}
}
ic.listing = listing
}

// Run import according to flags
func Run(args ...string) error {
log.SetOutput(&logLevel)
Expand All @@ -59,14 +85,16 @@ func Run(args ...string) error {
if err != nil {
return err
}
var skipInteractive bool
flags.BoolVar(&skipInteractive, "skip-interactive", false, "Skip interactive mode")
flags.StringVar(&ic.Directory, "directory", cwd,
"Directory to generate sources in. Defaults to current directory.")
flags.Int64Var(&ic.lastActiveDays, "last-active-days", 3650,
"Items with older than activity specified won't be imported.")
flags.BoolVar(&ic.debug, "debug", false, "Print extra debug information.")
flags.BoolVar(&ic.mounts, "mounts", false, "List DBFS mount points.")
flags.BoolVar(&ic.generateDeclaration, "generateProviderDeclaration", true,
"Generate Databricks provider declaration (for Terraform >= 0.13).")
"Generate Databricks provider declaration.")
services, listing := ic.allServicesAndListing()
flags.StringVar(&ic.services, "services", services,
"Comma-separated list of services to import. By default all services are imported.")
Expand All @@ -87,6 +115,9 @@ func Run(args ...string) error {
if err != nil {
return err
}
if !skipInteractive {
ic.interactivePrompts()
}
nfx marked this conversation as resolved.
Show resolved Hide resolved
if len(prefix) > 0 {
ic.prefix = prefix + "_"
}
Expand Down
44 changes: 44 additions & 0 deletions exporter/command_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
package exporter

import (
"bytes"
"context"
"testing"

"github.com/databrickslabs/terraform-provider-databricks/common"
"github.com/stretchr/testify/assert"
)

type dummyReader string

func (d dummyReader) Read(p []byte) (int, error) {
n := copy(p, []byte(d))
return n, nil
}

func TestInteractivePrompts(t *testing.T) {
cliInput = dummyReader("y\n")
cliOutput = &bytes.Buffer{}
ic := &importContext{
Client: &common.DatabricksClient{},
Context: context.Background(),
Importables: map[string]importable{
"x": {
Service: "a",
List: func(_ *importContext) error {
return nil
},
},
"y": {
Service: "mounts",
List: func(_ *importContext) error {
return nil
},
},
},
}
ic.interactivePrompts()
assert.Equal(t, "a,mounts", ic.listing)
assert.Equal(t, "y", ic.match)
assert.True(t, ic.mounts)
}
118 changes: 71 additions & 47 deletions exporter/context.go
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,16 @@ type mount struct {
ClusterID string
}

var nameFixes = []regexFix{
{regexp.MustCompile(`[0-9a-f]{8}[_-][0-9a-f]{4}[_-][0-9a-f]{4}` +
`[_-][0-9a-f]{4}[_-][0-9a-f]{12}[_-]`), ""},
{regexp.MustCompile(`[_-][0-9]+[\._-][0-9]+[\._-].*\.([a-z0-9]{1,4})`), "_$1"},
{regexp.MustCompile(`@.*$`), ""},
{regexp.MustCompile(`[-\s\.\|]`), "_"},
{regexp.MustCompile(`\W+`), ""},
{regexp.MustCompile(`[_]{2,}`), "_"},
}

func newImportContext(c *common.DatabricksClient) *importContext {
p := provider.DatabricksProvider()
p.TerraformVersion = "exporter"
Expand All @@ -103,16 +113,8 @@ func newImportContext(c *common.DatabricksClient) *importContext {
Files: map[string]*hclwrite.File{},
Scope: []*resource{},
importing: map[string]bool{},
nameFixes: []regexFix{
{regexp.MustCompile(`[0-9a-f]{8}[_-][0-9a-f]{4}[_-][0-9a-f]{4}` +
`[_-][0-9a-f]{4}[_-][0-9a-f]{12}[_-]`), ""},
{regexp.MustCompile(`[_-][0-9]+[\._-][0-9]+[\._-].*\.([a-z0-9]{1,4})`), "_$1"},
{regexp.MustCompile(`@.*$`), ""},
{regexp.MustCompile(`[-\s\.\|]`), "_"},
{regexp.MustCompile(`\W+`), ""},
{regexp.MustCompile(`[_]{2,}`), "_"},
},
hclFixes: []regexFix{ // Be careful with that! it may break working code
nameFixes: nameFixes,
hclFixes: []regexFix{ // Be careful with that! it may break working code
},
allUsers: []scim.User{},
variables: map[string]string{},
Expand Down Expand Up @@ -191,42 +193,7 @@ func (ic *importContext) Run() error {
`)
dcfile.Close()
}

sort.Sort(ic.Scope)
scopeSize := len(ic.Scope)
log.Printf("[INFO] Generating configuration for %d resources", scopeSize)
for i, r := range ic.Scope {
ir := ic.Importables[r.Resource]
f, ok := ic.Files[ir.Service]
if !ok {
f = hclwrite.NewEmptyFile()
ic.Files[ir.Service] = f
}
if ir.Ignore != nil && ir.Ignore(ic, r) {
continue
}
body := f.Body()
if ir.Body != nil {
err := ir.Body(ic, body, r)
if err != nil {
log.Printf("[ERROR] %s", err.Error())
}
} else {
resourceBlock := body.AppendNewBlock("resource", []string{r.Resource, r.Name})
err := ic.dataToHcl(ir, []string{}, ic.Resources[r.Resource],
r.Data, resourceBlock.Body())
if err != nil {
log.Printf("[ERROR] %s", err.Error())
}
}
if i%50 == 0 {
log.Printf("[INFO] Generated %d of %d resources", i, scopeSize)
}
if r.Mode != "data" && ic.Resources[r.Resource].Importer != nil {
// nolint
sh.WriteString(r.ImportCommand(ic) + "\n")
}
}
ic.generateHclForResources(sh)
for service, f := range ic.Files {
formatted := hclwrite.Format(f.Bytes())
// fix some formatting in a hacky way instead of writing 100 lines
Expand Down Expand Up @@ -268,6 +235,44 @@ func (ic *importContext) Run() error {
return nil
}

func (ic *importContext) generateHclForResources(sh *os.File) {
sort.Sort(ic.Scope)
scopeSize := len(ic.Scope)
log.Printf("[INFO] Generating configuration for %d resources", scopeSize)
for i, r := range ic.Scope {
ir := ic.Importables[r.Resource]
f, ok := ic.Files[ir.Service]
if !ok {
f = hclwrite.NewEmptyFile()
ic.Files[ir.Service] = f
}
if ir.Ignore != nil && ir.Ignore(ic, r) {
continue
}
body := f.Body()
if ir.Body != nil {
err := ir.Body(ic, body, r)
if err != nil {
log.Printf("[ERROR] %s", err.Error())
}
} else {
resourceBlock := body.AppendNewBlock("resource", []string{r.Resource, r.Name})
err := ic.dataToHcl(ir, []string{}, ic.Resources[r.Resource],
r.Data, resourceBlock.Body())
if err != nil {
log.Printf("[ERROR] %s", err.Error())
}
}
if i%50 == 0 {
log.Printf("[INFO] Generated %d of %d resources", i+1, scopeSize)
}
if r.Mode != "data" && ic.Resources[r.Resource].Importer != nil && sh != nil {
// nolint
sh.WriteString(r.ImportCommand(ic) + "\n")
}
}
}

func (ic *importContext) MatchesName(n string) bool {
if ic.match == "" {
return true
Expand Down Expand Up @@ -465,6 +470,17 @@ func (ic *importContext) reference(i importable, path []string, value string) hc
if d.Path != match {
continue
}
if d.File {
relativeFile := fmt.Sprintf("${path.module}/%s", value)
return hclwrite.Tokens{
&hclwrite.Token{Type: hclsyntax.TokenOQuote, Bytes: []byte{'"'}},
&hclwrite.Token{Type: hclsyntax.TokenQuotedLit, Bytes: []byte(relativeFile)},
&hclwrite.Token{Type: hclsyntax.TokenCQuote, Bytes: []byte{'"'}},
}
}
if d.Variable {
return ic.variable(fmt.Sprintf("%s_%s", path[0], value), "")
}
attr := "id"
if d.Match != "" {
attr = d.Match
Expand Down Expand Up @@ -512,7 +528,15 @@ func (ic *importContext) dataToHcl(i importable, path []string,
if as.Computed {
continue
}
raw, ok := d.GetOk(strings.Join(append(path, a), "."))
pathString := strings.Join(append(path, a), ".")
raw, ok := d.GetOk(pathString)
for _, r := range i.Depends {
if r.Path == pathString && r.Variable {
// sensitive fields are moved to variable depends
raw = i.Name(d)
ok = true
}
}
if !ok {
continue
}
Expand Down
Loading