diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 3644c08ee3..0cdacc4df9 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -16,4 +16,5 @@ ### Internal Changes +* Port `databricks_instance_pools` data source to plugin framework ([#5145](https://github.com/databricks/terraform-provider-databricks/pull/5145)) * Caching group membership in `databricks_group_member` to improve performance ([#4581](https://github.com/databricks/terraform-provider-databricks/pull/4581)). diff --git a/internal/providers/pluginfw/pluginfw_rollout_utils.go b/internal/providers/pluginfw/pluginfw_rollout_utils.go index 575f2507c4..222cc983b0 100644 --- a/internal/providers/pluginfw/pluginfw_rollout_utils.go +++ b/internal/providers/pluginfw/pluginfw_rollout_utils.go @@ -19,6 +19,7 @@ import ( "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/dashboards" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/library" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/notificationdestinations" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/pools" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/qualitymonitor" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/registered_model" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/serving" @@ -62,6 +63,7 @@ var pluginFwOnlyDataSources = append( catalog.DataSourceFunctions, dashboards.DataSourceDashboards, notificationdestinations.DataSourceNotificationDestinations, + pools.DataSourceInstancePool, registered_model.DataSourceRegisteredModel, registered_model.DataSourceRegisteredModelVersions, serving.DataSourceServingEndpoints, diff --git a/internal/providers/pluginfw/products/pools/data_instance_pool.go b/internal/providers/pluginfw/products/pools/data_instance_pool.go new file mode 100644 index 0000000000..7aacc9a6c2 --- /dev/null +++ b/internal/providers/pluginfw/products/pools/data_instance_pool.go @@ -0,0 +1,110 @@ +package pools + +import ( + "context" + "fmt" + "reflect" + + "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/databricks/terraform-provider-databricks/common" + pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" + "github.com/databricks/terraform-provider-databricks/internal/service/compute_tf" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +const dataSourceName = "instance_pool" + +func DataSourceInstancePool() datasource.DataSource { + return &InstancePoolDataSource{} +} + +var _ datasource.DataSourceWithConfigure = &InstancePoolDataSource{} + +type InstancePoolDataSource struct { + Client *common.DatabricksClient +} + +type InstancePoolInfo struct { + Id types.String `tfsdk:"id"` + Name types.String `tfsdk:"name"` + PoolInfo types.Object `tfsdk:"pool_info"` +} + +func (InstancePoolInfo) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["id"] = attrs["id"].SetComputed() + attrs["name"] = attrs["name"].SetRequired() + attrs["pool_info"] = attrs["pool_info"].SetComputed() + + return attrs +} + +func (InstancePoolInfo) GetComplexFieldTypes(context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "pool_info": reflect.TypeOf(compute_tf.InstancePoolAndStats{}), + } +} + +func (d *InstancePoolDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceName) +} + +func (d *InstancePoolDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + attrs, blocks := tfschema.DataSourceStructToSchemaMap(ctx, InstancePoolInfo{}, nil) + resp.Schema = schema.Schema{ + Attributes: attrs, + Blocks: blocks, + } +} + +func (d *InstancePoolDataSource) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + if d.Client == nil { + d.Client = pluginfwcommon.ConfigureDataSource(req, resp) + } +} + +func (d *InstancePoolDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, dataSourceName) + w, diags := d.Client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var poolInfo InstancePoolInfo + resp.Diagnostics.Append(req.Config.Get(ctx, &poolInfo)...) + if resp.Diagnostics.HasError() { + return + } + + poolName := poolInfo.Name.ValueString() + pool, err := d.getInstancePoolByName(ctx, w, poolName) + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("failed to get instance pool '%s'", poolName), err.Error()) + return + } + + var tfPool compute_tf.InstancePoolAndStats + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, pool, &tfPool)...) + if resp.Diagnostics.HasError() { + return + } + + poolInfo.Id = types.StringValue(pool.InstancePoolId) + poolInfo.Name = types.StringValue(pool.InstancePoolName) + poolInfo.PoolInfo = tfPool.ToObjectValue(ctx) + resp.Diagnostics.Append(resp.State.Set(ctx, poolInfo)...) +} + +func (d *InstancePoolDataSource) getInstancePoolByName(ctx context.Context, w *databricks.WorkspaceClient, poolName string) (*compute.InstancePoolAndStats, error) { + pool, err := w.InstancePools.GetByInstancePoolName(ctx, poolName) + if err != nil { + return nil, err + } + return pool, nil +} diff --git a/internal/providers/pluginfw/products/pools/data_instance_pool_acc_test.go b/internal/providers/pluginfw/products/pools/data_instance_pool_acc_test.go new file mode 100644 index 0000000000..0e1ab9f973 --- /dev/null +++ b/internal/providers/pluginfw/products/pools/data_instance_pool_acc_test.go @@ -0,0 +1,30 @@ +package pools_test + +import ( + "testing" + + "github.com/databricks/terraform-provider-databricks/internal/acceptance" +) + +func TestAccDataSourceInstancePool(t *testing.T) { + acceptance.WorkspaceLevel(t, acceptance.Step{ + Template: ` + data "databricks_node_type" "smallest" { + local_disk = true + } + + resource "databricks_instance_pool" "this" { + instance_pool_name = "tf-pool-{var.RANDOM}" + min_idle_instances = 0 + max_capacity = 10 + node_type_id = data.databricks_node_type.smallest.id + idle_instance_autotermination_minutes = 10 + } + + data "databricks_instance_pool" "this" { + name = databricks_instance_pool.this.instance_pool_name + depends_on = [databricks_instance_pool.this] + } + `, + }) +} diff --git a/internal/providers/pluginfw/products/pools/data_instance_pool_test.go b/internal/providers/pluginfw/products/pools/data_instance_pool_test.go new file mode 100644 index 0000000000..446b826495 --- /dev/null +++ b/internal/providers/pluginfw/products/pools/data_instance_pool_test.go @@ -0,0 +1,24 @@ +package pools + +import ( + "context" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/stretchr/testify/assert" +) + +func TestInstancePoolDataSource_Metadata(t *testing.T) { + d := &InstancePoolDataSource{} + ctx := context.Background() + + req := datasource.MetadataRequest{ + ProviderTypeName: "databricks", + } + resp := &datasource.MetadataResponse{} + + d.Metadata(ctx, req, resp) + + // Verify the staging name is set correctly + assert.Contains(t, resp.TypeName, "instance_pool") +} diff --git a/internal/providers/sdkv2/sdkv2.go b/internal/providers/sdkv2/sdkv2.go index bda107328e..178bb3c515 100644 --- a/internal/providers/sdkv2/sdkv2.go +++ b/internal/providers/sdkv2/sdkv2.go @@ -127,7 +127,6 @@ func DatabricksProvider(opts ...SdkV2ProviderOption) *schema.Provider { "databricks_external_location": catalog.DataSourceExternalLocation().ToResource(), "databricks_external_locations": catalog.DataSourceExternalLocations().ToResource(), "databricks_group": scim.DataSourceGroup().ToResource(), - "databricks_instance_pool": pools.DataSourceInstancePool().ToResource(), "databricks_instance_profiles": aws.DataSourceInstanceProfiles().ToResource(), "databricks_jobs": jobs.DataSourceJobs().ToResource(), "databricks_job": jobs.DataSourceJob().ToResource(), diff --git a/pools/data_instance_pool.go b/pools/data_instance_pool.go deleted file mode 100644 index 1c733246d0..0000000000 --- a/pools/data_instance_pool.go +++ /dev/null @@ -1,46 +0,0 @@ -package pools - -import ( - "context" - "fmt" - - "github.com/databricks/terraform-provider-databricks/common" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" -) - -func getPool(poolsAPI InstancePoolsAPI, name string) (*InstancePoolAndStats, error) { - poolList, err := poolsAPI.List() - if err != nil { - return nil, err - } - for _, pool := range poolList.InstancePools { - if pool.InstancePoolName == name { - return &pool, nil - } - } - - return nil, fmt.Errorf("instance pool '%s' doesn't exist", name) -} - -// DataSourceInstancePool returns information about instance pool specified by name -func DataSourceInstancePool() common.Resource { - type poolDetails struct { - Name string `json:"name"` - Attributes *InstancePoolAndStats `json:"pool_info,omitempty" tf:"computed"` - } - s := common.StructToSchema(poolDetails{}, nil) - return common.Resource{ - Schema: s, - Read: func(ctx context.Context, d *schema.ResourceData, m *common.DatabricksClient) error { - name := d.Get("name").(string) - poolsAPI := NewInstancePoolsAPI(ctx, m) - pool, err := getPool(poolsAPI, name) - if err != nil { - return err - } - d.SetId(pool.InstancePoolID) - err = common.StructToData(poolDetails{Name: name, Attributes: pool}, s, d) - return err - }, - } -} diff --git a/pools/data_instance_pool_test.go b/pools/data_instance_pool_test.go deleted file mode 100644 index 4907feaf43..0000000000 --- a/pools/data_instance_pool_test.go +++ /dev/null @@ -1,90 +0,0 @@ -package pools - -import ( - "context" - "testing" - - "github.com/databricks/databricks-sdk-go/apierr" - "github.com/databricks/terraform-provider-databricks/common" - "github.com/databricks/terraform-provider-databricks/qa" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestDataSourceInstancePool(t *testing.T) { - d, err := qa.ResourceFixture{ - Fixtures: []qa.HTTPFixture{ - { - Method: "GET", - Resource: "/api/2.0/instance-pools/list", - Response: InstancePoolList{ - InstancePools: []InstancePoolAndStats{ - { - InstancePoolID: "abc", - InstancePoolName: "pool", - NodeTypeID: "node-type", - }, - }, - }, - }, - }, - Read: true, - NonWritable: true, - Resource: DataSourceInstancePool(), - ID: ".", - State: map[string]any{ - "name": "pool", - }, - }.Apply(t) - require.NoError(t, err) - assert.Equal(t, "abc", d.Id()) - assert.NotNil(t, d.Get("pool_info")) - assert.Equal(t, "node-type", d.Get("pool_info.0.node_type_id").(string)) -} - -func TestDataSourceInstancePoolsGetPool(t *testing.T) { - qa.HTTPFixturesApply(t, []qa.HTTPFixture{ - { - Method: "GET", - Resource: "/api/2.0/instance-pools/list", - Status: 404, - Response: apierr.APIError{ - Message: "searching_error", - }, - }, - { - Method: "GET", - Resource: "/api/2.0/instance-pools/list", - Response: InstancePoolList{}, - }, - }, func(ctx context.Context, client *common.DatabricksClient) { - poolsAPI := NewInstancePoolsAPI(ctx, client) - - _, err := getPool(poolsAPI, "searching_error") - assert.EqualError(t, err, "searching_error") - - _, err = getPool(poolsAPI, "unknown") - assert.EqualError(t, err, "instance pool 'unknown' doesn't exist") - }) -} - -func TestDataSourceInstancePool_NotFound(t *testing.T) { - qa.ResourceFixture{ - Fixtures: []qa.HTTPFixture{ - { - Method: "GET", - Resource: "/api/2.0/instance-pools/list", - Response: InstancePoolList{ - InstancePools: []InstancePoolAndStats{}, - }, - }, - }, - Read: true, - NonWritable: true, - Resource: DataSourceInstancePool(), - ID: ".", - State: map[string]any{ - "name": "Unknown", - }, - }.ExpectError(t, "instance pool 'Unknown' doesn't exist") -}