Skip to content

Commit

Permalink
Add support for release channels configuration for SQL Endpoints (dat…
Browse files Browse the repository at this point in the history
  • Loading branch information
alexott committed Feb 2, 2022
1 parent 8cb01b7 commit 254f36c
Show file tree
Hide file tree
Showing 4 changed files with 52 additions and 22 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
* Added support for `tf:suppress_diff` on primitive types ([#984](https://github.com/databrickslabs/terraform-provider-databricks/issues/984)).
* Fixed issue arises when destroying `databricks_sql_global_config` with instance profile set ([#1076](https://github.com/databrickslabs/terraform-provider-databricks/issues/1076)).
* Added setting of SQL configuration parameters in `databricks_sql_global_config` ([#1080](https://github.com/databrickslabs/terraform-provider-databricks/pull/1080)).
* Added support for release channels in `databricks_sql_endpoint` configuration ([#1078])(https://github.com/databrickslabs/terraform-provider-databricks/pull/1078).

Updated dependency versions:

Expand Down
4 changes: 3 additions & 1 deletion docs/resources/sql_endpoint.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,9 @@ The following arguments are supported:
* `tags` - Databricks tags all endpoint resources with these tags.
* `spot_instance_policy` - The spot policy to use for allocating instances to clusters: `COST_OPTIMIZED` or `RELIABILITY_OPTIMIZED`. This field is optional. Default is `COST_OPTIMIZED`.
* `enable_photon` - Whether to enable [Photon](https://databricks.com/product/delta-engine). This field is optional and is enabled by default.

* `channel` block, consisting of following fields:
* `name` - Name of the Databricks SQL release channel. Possible values are: `CHANNEL_NAME_PREVIEW` and `CHANNEL_NAME_CURRENT`. Default is `CHANNEL_NAME_CURRENT`.

## Attribute Reference

In addition to all arguments above, the following attributes are exported:
Expand Down
42 changes: 21 additions & 21 deletions sqlanalytics/resource_sql_endpoint.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,28 +21,34 @@ var (

// SQLEndpoint ...
type SQLEndpoint struct {
ID string `json:"id,omitempty" tf:"computed"`
Name string `json:"name"`
ClusterSize string `json:"cluster_size"`
AutoStopMinutes int `json:"auto_stop_mins,omitempty"`
MinNumClusters int `json:"min_num_clusters,omitempty"`
MaxNumClusters int `json:"max_num_clusters,omitempty"`
NumClusters int `json:"num_clusters,omitempty"`
EnablePhoton bool `json:"enable_photon,omitempty"`
EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"`
InstanceProfileARN string `json:"instance_profile_arn,omitempty"`
State string `json:"state,omitempty" tf:"computed"`
JdbcURL string `json:"jdbc_url,omitempty" tf:"computed"`
OdbcParams *OdbcParams `json:"odbc_params,omitempty" tf:"computed"`
Tags *Tags `json:"tags,omitempty" tf:"suppress_diff"`
SpotInstancePolicy string `json:"spot_instance_policy,omitempty"`
ID string `json:"id,omitempty" tf:"computed"`
Name string `json:"name"`
ClusterSize string `json:"cluster_size"`
AutoStopMinutes int `json:"auto_stop_mins,omitempty" tf:"default:120"`
MinNumClusters int `json:"min_num_clusters,omitempty" tf:"default:1"`
MaxNumClusters int `json:"max_num_clusters,omitempty" tf:"default:1"`
NumClusters int `json:"num_clusters,omitempty" tf:"default:1"`
EnablePhoton bool `json:"enable_photon,omitempty" tf:"default:true"`
EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"`
InstanceProfileARN string `json:"instance_profile_arn,omitempty"`
State string `json:"state,omitempty" tf:"computed"`
JdbcURL string `json:"jdbc_url,omitempty" tf:"computed"`
OdbcParams *OdbcParams `json:"odbc_params,omitempty" tf:"computed"`
Tags *Tags `json:"tags,omitempty" tf:"suppress_diff"`
SpotInstancePolicy string `json:"spot_instance_policy,omitempty" tf:"default:COST_OPTIMIZED"`
Channel *ReleaseChannel `json:"channel,omitempty" tf:"suppress_diff"`

// The data source ID is not part of the endpoint API response.
// We manually resolve it by retrieving the list of data sources
// and matching this entity's endpoint ID.
DataSourceID string `json:"data_source_id,omitempty" tf:"computed"`
}

// ReleaseChannel holds information about DBSQL Release Channel
type ReleaseChannel struct {
Name string `json:"name,omitempty" tf:"default:CHANNEL_NAME_CURRENT"`
}

// OdbcParams hold information required to submit SQL commands to the SQL endpoint using ODBC.
type OdbcParams struct {
Hostname string `json:"hostname,omitempty"`
Expand Down Expand Up @@ -185,16 +191,10 @@ func (a SQLEndpointsAPI) Delete(endpointID string) error {
func ResourceSQLEndpoint() *schema.Resource {
s := common.StructToSchema(SQLEndpoint{}, func(
m map[string]*schema.Schema) map[string]*schema.Schema {
m["auto_stop_mins"].Default = 120
m["cluster_size"].ValidateDiagFunc = validation.ToDiagFunc(
validation.StringInSlice(ClusterSizes, false))
m["max_num_clusters"].Default = 1
m["max_num_clusters"].ValidateDiagFunc = validation.ToDiagFunc(
validation.IntBetween(1, MaxNumClusters))
m["min_num_clusters"].Default = 1
m["num_clusters"].Default = 1
m["spot_instance_policy"].Default = "COST_OPTIMIZED"
m["enable_photon"].Default = true
return m
})
return common.Resource{
Expand Down
27 changes: 27 additions & 0 deletions sqlanalytics/resource_sql_endpoint_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -295,3 +295,30 @@ func TestSQLEnpointAPI(t *testing.T) {
require.NoError(t, err)
})
}

func TestResolveDataSourceIDError(t *testing.T) {
qa.HTTPFixturesApply(t, []qa.HTTPFixture{
{
Method: "GET",
Resource: "/api/2.0/preview/sql/data_sources",
Response: map[string]interface{}{},
Status: 404,
},
}, func(ctx context.Context, client *common.DatabricksClient) {
_, err := NewSQLEndpointsAPI(ctx, client).ResolveDataSourceID("any")
require.Error(t, err)
})
}

func TestResolveDataSourceIDNotFound(t *testing.T) {
qa.HTTPFixturesApply(t, []qa.HTTPFixture{
{
Method: "GET",
Resource: "/api/2.0/preview/sql/data_sources",
Response: []interface{}{},
},
}, func(ctx context.Context, client *common.DatabricksClient) {
_, err := NewSQLEndpointsAPI(ctx, client).ResolveDataSourceID("any")
require.Error(t, err)
})
}

0 comments on commit 254f36c

Please sign in to comment.