diff --git a/CHANGELOG.md b/CHANGELOG.md index 4c3c4e12b7..75df907ecc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ * Added support for `tf:suppress_diff` on primitive types ([#984](https://github.com/databrickslabs/terraform-provider-databricks/issues/984)). * Fixed issue arises when destroying `databricks_sql_global_config` with instance profile set ([#1076](https://github.com/databrickslabs/terraform-provider-databricks/issues/1076)). * Added setting of SQL configuration parameters in `databricks_sql_global_config` ([#1080](https://github.com/databrickslabs/terraform-provider-databricks/pull/1080)). +* Added support for release channels in `databricks_sql_endpoint` configuration ([#1078])(https://github.com/databrickslabs/terraform-provider-databricks/pull/1078). Updated dependency versions: diff --git a/docs/resources/sql_endpoint.md b/docs/resources/sql_endpoint.md index 428251d710..b20d5de999 100644 --- a/docs/resources/sql_endpoint.md +++ b/docs/resources/sql_endpoint.md @@ -37,7 +37,9 @@ The following arguments are supported: * `tags` - Databricks tags all endpoint resources with these tags. * `spot_instance_policy` - The spot policy to use for allocating instances to clusters: `COST_OPTIMIZED` or `RELIABILITY_OPTIMIZED`. This field is optional. Default is `COST_OPTIMIZED`. * `enable_photon` - Whether to enable [Photon](https://databricks.com/product/delta-engine). This field is optional and is enabled by default. - +* `channel` block, consisting of following fields: + * `name` - Name of the Databricks SQL release channel. Possible values are: `CHANNEL_NAME_PREVIEW` and `CHANNEL_NAME_CURRENT`. Default is `CHANNEL_NAME_CURRENT`. + ## Attribute Reference In addition to all arguments above, the following attributes are exported: diff --git a/sqlanalytics/resource_sql_endpoint.go b/sqlanalytics/resource_sql_endpoint.go index a7157ebc43..2e668263a3 100644 --- a/sqlanalytics/resource_sql_endpoint.go +++ b/sqlanalytics/resource_sql_endpoint.go @@ -21,21 +21,22 @@ var ( // SQLEndpoint ... type SQLEndpoint struct { - ID string `json:"id,omitempty" tf:"computed"` - Name string `json:"name"` - ClusterSize string `json:"cluster_size"` - AutoStopMinutes int `json:"auto_stop_mins,omitempty"` - MinNumClusters int `json:"min_num_clusters,omitempty"` - MaxNumClusters int `json:"max_num_clusters,omitempty"` - NumClusters int `json:"num_clusters,omitempty"` - EnablePhoton bool `json:"enable_photon,omitempty"` - EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"` - InstanceProfileARN string `json:"instance_profile_arn,omitempty"` - State string `json:"state,omitempty" tf:"computed"` - JdbcURL string `json:"jdbc_url,omitempty" tf:"computed"` - OdbcParams *OdbcParams `json:"odbc_params,omitempty" tf:"computed"` - Tags *Tags `json:"tags,omitempty" tf:"suppress_diff"` - SpotInstancePolicy string `json:"spot_instance_policy,omitempty"` + ID string `json:"id,omitempty" tf:"computed"` + Name string `json:"name"` + ClusterSize string `json:"cluster_size"` + AutoStopMinutes int `json:"auto_stop_mins,omitempty" tf:"default:120"` + MinNumClusters int `json:"min_num_clusters,omitempty" tf:"default:1"` + MaxNumClusters int `json:"max_num_clusters,omitempty" tf:"default:1"` + NumClusters int `json:"num_clusters,omitempty" tf:"default:1"` + EnablePhoton bool `json:"enable_photon,omitempty" tf:"default:true"` + EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"` + InstanceProfileARN string `json:"instance_profile_arn,omitempty"` + State string `json:"state,omitempty" tf:"computed"` + JdbcURL string `json:"jdbc_url,omitempty" tf:"computed"` + OdbcParams *OdbcParams `json:"odbc_params,omitempty" tf:"computed"` + Tags *Tags `json:"tags,omitempty" tf:"suppress_diff"` + SpotInstancePolicy string `json:"spot_instance_policy,omitempty" tf:"default:COST_OPTIMIZED"` + Channel *ReleaseChannel `json:"channel,omitempty" tf:"suppress_diff"` // The data source ID is not part of the endpoint API response. // We manually resolve it by retrieving the list of data sources @@ -43,6 +44,11 @@ type SQLEndpoint struct { DataSourceID string `json:"data_source_id,omitempty" tf:"computed"` } +// ReleaseChannel holds information about DBSQL Release Channel +type ReleaseChannel struct { + Name string `json:"name,omitempty" tf:"default:CHANNEL_NAME_CURRENT"` +} + // OdbcParams hold information required to submit SQL commands to the SQL endpoint using ODBC. type OdbcParams struct { Hostname string `json:"hostname,omitempty"` @@ -185,16 +191,10 @@ func (a SQLEndpointsAPI) Delete(endpointID string) error { func ResourceSQLEndpoint() *schema.Resource { s := common.StructToSchema(SQLEndpoint{}, func( m map[string]*schema.Schema) map[string]*schema.Schema { - m["auto_stop_mins"].Default = 120 m["cluster_size"].ValidateDiagFunc = validation.ToDiagFunc( validation.StringInSlice(ClusterSizes, false)) - m["max_num_clusters"].Default = 1 m["max_num_clusters"].ValidateDiagFunc = validation.ToDiagFunc( validation.IntBetween(1, MaxNumClusters)) - m["min_num_clusters"].Default = 1 - m["num_clusters"].Default = 1 - m["spot_instance_policy"].Default = "COST_OPTIMIZED" - m["enable_photon"].Default = true return m }) return common.Resource{ diff --git a/sqlanalytics/resource_sql_endpoint_test.go b/sqlanalytics/resource_sql_endpoint_test.go index 606a1b2fdf..649b7e1103 100644 --- a/sqlanalytics/resource_sql_endpoint_test.go +++ b/sqlanalytics/resource_sql_endpoint_test.go @@ -295,3 +295,30 @@ func TestSQLEnpointAPI(t *testing.T) { require.NoError(t, err) }) } + +func TestResolveDataSourceIDError(t *testing.T) { + qa.HTTPFixturesApply(t, []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/preview/sql/data_sources", + Response: map[string]interface{}{}, + Status: 404, + }, + }, func(ctx context.Context, client *common.DatabricksClient) { + _, err := NewSQLEndpointsAPI(ctx, client).ResolveDataSourceID("any") + require.Error(t, err) + }) +} + +func TestResolveDataSourceIDNotFound(t *testing.T) { + qa.HTTPFixturesApply(t, []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/preview/sql/data_sources", + Response: []interface{}{}, + }, + }, func(ctx context.Context, client *common.DatabricksClient) { + _, err := NewSQLEndpointsAPI(ctx, client).ResolveDataSourceID("any") + require.Error(t, err) + }) +}