Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changes/unreleased/changed-20260210-082444.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
kind: changed
body: migrate `fabric_spark_environment_settings` resource/data-source to use GA APIs
time: 2026-02-10T08:24:44.10205937Z
custom:
Issue: "809"
17 changes: 13 additions & 4 deletions docs/data-sources/spark_environment_settings.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ subcategory: ""
description: |-
The Spark Environment Settings data-source allows you to retrieve details about a Fabric Spark Environment Settings https://learn.microsoft.com/fabric/data-engineering/environment-manage-compute.
-> This data-source supports Service Principal authentication.
~> This data-source is in preview. To access it, you must explicitly enable the preview mode in the provider level configuration.
---

# fabric_spark_environment_settings (Data Source)
Expand All @@ -14,8 +13,6 @@ The Spark Environment Settings data-source allows you to retrieve details about

-> This data-source supports Service Principal authentication.

~> This data-source is in **preview**. To access it, you must explicitly enable the `preview` mode in the provider level configuration.

## Example Usage

```terraform
Expand Down Expand Up @@ -49,7 +46,7 @@ data "fabric_spark_environment_settings" "example" {
- `id` (String) The Spark Environment Settings ID.
- `pool` (Attributes) Environment pool. (see [below for nested schema](#nestedatt--pool))
- `runtime_version` (String) [Runtime](https://review.learn.microsoft.com/fabric/data-engineering/runtime) version. Value must be one of : `1.1`, `1.2`, `1.3`.
- `spark_properties` (Map of String) A map of key/value pairs of Spark properties.
- `spark_properties` (Attributes List) A list of Spark properties. (see [below for nested schema](#nestedatt--spark_properties))

<a id="nestedatt--timeouts"></a>

Expand Down Expand Up @@ -78,3 +75,15 @@ Read-Only:
- `id` (String) The Pool ID.
- `name` (String) The Pool name. `Starter Pool` means using the starting pool.
- `type` (String) The Pool type. Value must be one of : `Capacity`, `Workspace`.

<a id="nestedatt--spark_properties"></a>

### Nested Schema for `spark_properties`

Read-Only:

- `key` (String) The Spark property key. Spark property key:
- must start with 'spark.'
- cannot contain any white spaces
- dot '.' is allowed but not at the start or end of the property key.
- `value` (String) The Spark property value.
28 changes: 18 additions & 10 deletions docs/resources/spark_environment_settings.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ subcategory: ""
description: |-
The Spark Environment Settings resource allows you to manage a Fabric Spark Environment Settings https://learn.microsoft.com/fabric/data-engineering/environment-manage-compute.
-> This resource supports Service Principal authentication.
~> This resource is in preview. To access it, you must explicitly enable the preview mode in the provider level configuration.
---

# fabric_spark_environment_settings (Resource)
Expand All @@ -14,8 +13,6 @@ The Spark Environment Settings resource allows you to manage a Fabric [Spark Env

-> This resource supports Service Principal authentication.

~> This resource is in **preview**. To access it, you must explicitly enable the `preview` mode in the provider level configuration.

## Example Usage

```terraform
Expand Down Expand Up @@ -44,11 +41,13 @@ resource "fabric_spark_environment_settings" "example" {
*/
}

spark_properties = {
/*
spark_properties = [
{
/*
your settings here
*/
}
}
]
}
```

Expand All @@ -70,10 +69,7 @@ resource "fabric_spark_environment_settings" "example" {
- `executor_memory` (String) Spark executor memory. Value must be one of : `28g`, `56g`, `112g`, `224g`, `400g`.
- `pool` (Attributes) Environment pool. (see [below for nested schema](#nestedatt--pool))
- `runtime_version` (String) [Runtime](https://review.learn.microsoft.com/fabric/data-engineering/runtime) version. Value must be one of : `1.1`, `1.2`, `1.3`.
- `spark_properties` (Map of String) A map of key/value pairs of Spark properties. Key must satisfy all validations: Spark properties:
- must starts with 'spark.'
- cannot contains any white spaces
- dot '.' is allowed but not at the start or end of the property key.
- `spark_properties` (Attributes List) A list of Spark properties. (see [below for nested schema](#nestedatt--spark_properties))
- `timeouts` (Attributes) (see [below for nested schema](#nestedatt--timeouts))

### Read-Only
Expand Down Expand Up @@ -103,6 +99,18 @@ Read-Only:

- `id` (String) The Pool ID.

<a id="nestedatt--spark_properties"></a>

### Nested Schema for `spark_properties`

Required:

- `key` (String) The Spark property key. Spark property key:
- must start with 'spark.'
- cannot contain any white spaces
- dot '.' is allowed but not at the start or end of the property key.
- `value` (String) The Spark property value.

<a id="nestedatt--timeouts"></a>

### Nested Schema for `timeouts`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,11 @@ resource "fabric_spark_environment_settings" "example" {
*/
}

spark_properties = {
/*
spark_properties = [
{
/*
your settings here
*/
}
}
]
}
2 changes: 1 addition & 1 deletion internal/services/sparkenvsettings/base.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,6 @@ var ItemTypeInfo = tftypeinfo.TFTypeInfo{ //nolint:gochecknoglobals
Name: "Spark Environment Settings",
Type: "spark_environment_settings",
DocsURL: "https://learn.microsoft.com/fabric/data-engineering/environment-manage-compute",
IsPreview: true,
IsPreview: false,
IsSPNSupported: true,
}
Original file line number Diff line number Diff line change
Expand Up @@ -104,22 +104,22 @@ func (d *dataSourceSparkEnvironmentSettings) Read(ctx context.Context, req datas
}

func (d *dataSourceSparkEnvironmentSettings) get(ctx context.Context, model *dataSourceSparkEnvironmentSettingsModel) diag.Diagnostics {
var respEntity fabenvironment.SparkComputePreview
var respEntity fabenvironment.SparkCompute

if model.PublicationStatus.ValueString() == SparkEnvironmentPublicationStatusPublished {
respGet, err := d.publishedClient.GetSparkComputePreview(ctx, model.WorkspaceID.ValueString(), model.EnvironmentID.ValueString(), true, nil)
respGet, err := d.publishedClient.GetSparkCompute(ctx, model.WorkspaceID.ValueString(), model.EnvironmentID.ValueString(), false, nil)
if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
return diags
}

respEntity = respGet.SparkComputePreview
respEntity = respGet.SparkCompute
} else {
respGet, err := d.stagingClient.GetSparkComputePreview(ctx, model.WorkspaceID.ValueString(), model.EnvironmentID.ValueString(), true, nil)
respGet, err := d.stagingClient.GetSparkCompute(ctx, model.WorkspaceID.ValueString(), model.EnvironmentID.ValueString(), false, nil)
if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
return diags
}

respEntity = respGet.SparkComputePreview
respEntity = respGet.SparkCompute
}

return model.set(ctx, respEntity)
Expand Down
113 changes: 68 additions & 45 deletions internal/services/sparkenvsettings/models.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ package sparkenvsettings

import (
"context"
"encoding/json"

timeoutsd "github.com/hashicorp/terraform-plugin-framework-timeouts/datasource/timeouts"
timeoutsr "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
Expand Down Expand Up @@ -33,56 +32,33 @@ type baseSparkEnvironmentSettingsModel struct {
ExecutorMemory types.String `tfsdk:"executor_memory"`
Pool supertypes.SingleNestedObjectValueOf[instancePoolPropertiesModel] `tfsdk:"pool"`
RuntimeVersion types.String `tfsdk:"runtime_version"`
SparkProperties supertypes.MapValueOf[types.String] `tfsdk:"spark_properties"`
SparkProperties supertypes.ListNestedObjectValueOf[sparkPropertyModel] `tfsdk:"spark_properties"`
}

func (to *baseSparkEnvironmentSettingsModel) set(ctx context.Context, from fabenvironment.SparkComputePreview) diag.Diagnostics {
var diags diag.Diagnostics

func (to *baseSparkEnvironmentSettingsModel) set(ctx context.Context, from fabenvironment.SparkCompute) diag.Diagnostics {
to.DriverCores = types.Int32PointerValue(from.DriverCores)
to.DriverMemory = types.StringPointerValue(from.DriverMemory)
to.DriverMemory = types.StringPointerValue((*string)(from.DriverMemory))
to.ExecutorCores = types.Int32PointerValue(from.ExecutorCores)
to.ExecutorMemory = types.StringPointerValue(from.ExecutorMemory)
to.ExecutorMemory = types.StringPointerValue((*string)(from.ExecutorMemory))
to.RuntimeVersion = types.StringPointerValue(from.RuntimeVersion)

var sparkProperties map[string]string

sparkPropertiesBytes, err := json.Marshal(from.SparkProperties)
if err != nil {
diags.AddError(
"failed to marshal Spark properties",
err.Error(),
)

return diags
}

err = json.Unmarshal(sparkPropertiesBytes, &sparkProperties)
if err != nil {
diags.AddError(
"failed to unmarshal Spark properties",
err.Error(),
)

return diags
}

sparkPropertiesMap := supertypes.NewMapValueOfNull[types.String](ctx)
sparkPropertiesList := supertypes.NewListNestedObjectValueOfNull[sparkPropertyModel](ctx)

if len(sparkProperties) > 0 {
sparkPropertiesTF := make(map[string]types.String)
if len(from.SparkProperties) > 0 {
slice := make([]*sparkPropertyModel, 0, len(from.SparkProperties))

for k, v := range sparkProperties {
sparkPropertiesTF[k] = types.StringValue(v)
for _, prop := range from.SparkProperties {
sparkPropModel := &sparkPropertyModel{}
sparkPropModel.set(prop)
slice = append(slice, sparkPropModel)
}

sparkPropertiesMap, diags = supertypes.NewMapValueOfMap(ctx, sparkPropertiesTF)
if diags.HasError() {
if diags := sparkPropertiesList.Set(ctx, slice); diags.HasError() {
return diags
}
}

to.SparkProperties = sparkPropertiesMap
to.SparkProperties = sparkPropertiesList

dynamicExecutorAllocation := supertypes.NewSingleNestedObjectValueOfNull[dynamicExecutorAllocationPropertiesModel](ctx)

Expand Down Expand Up @@ -134,7 +110,7 @@ type resourceSparkEnvironmentSettingsModel struct {
}

type requestUpdateSparkEnvironmentSettings struct {
fabenvironment.UpdateEnvironmentSparkComputeRequestPreview
fabenvironment.UpdateEnvironmentSparkComputeRequest
}

func (to *requestUpdateSparkEnvironmentSettings) set(ctx context.Context, from resourceSparkEnvironmentSettingsModel) diag.Diagnostics { //nolint:gocognit, gocyclo
Expand All @@ -143,15 +119,15 @@ func (to *requestUpdateSparkEnvironmentSettings) set(ctx context.Context, from r
}

if !from.DriverMemory.IsNull() && !from.DriverMemory.IsUnknown() {
to.DriverMemory = from.DriverMemory.ValueStringPointer()
to.DriverMemory = (*fabenvironment.CustomPoolMemory)(from.DriverMemory.ValueStringPointer())
}

if !from.ExecutorCores.IsNull() && !from.ExecutorCores.IsUnknown() {
to.ExecutorCores = from.ExecutorCores.ValueInt32Pointer()
}

if !from.ExecutorMemory.IsNull() && !from.ExecutorMemory.IsUnknown() {
to.ExecutorMemory = from.ExecutorMemory.ValueStringPointer()
to.ExecutorMemory = (*fabenvironment.CustomPoolMemory)(from.ExecutorMemory.ValueStringPointer())
}

if !from.RuntimeVersion.IsNull() && !from.RuntimeVersion.IsUnknown() {
Expand Down Expand Up @@ -214,15 +190,23 @@ func (to *requestUpdateSparkEnvironmentSettings) set(ctx context.Context, from r
return diags
}

sparkPropertiesMap := make(map[string]string)
sparkPropertiesSlice := make([]fabenvironment.SparkProperty, 0, len(sparkProperties))

for k, v := range sparkProperties {
if !v.IsNull() && !v.IsUnknown() {
sparkPropertiesMap[k] = v.ValueString()
for _, prop := range sparkProperties {
var reqProp fabenvironment.SparkProperty

if !prop.Key.IsNull() && !prop.Key.IsUnknown() {
reqProp.Key = prop.Key.ValueStringPointer()
}

if !prop.Value.IsNull() && !prop.Value.IsUnknown() {
reqProp.Value = prop.Value.ValueStringPointer()
}

sparkPropertiesSlice = append(sparkPropertiesSlice, reqProp)
}

to.SparkProperties = sparkPropertiesMap
to.SparkProperties = sparkPropertiesSlice
}

return nil
Expand Down Expand Up @@ -255,3 +239,42 @@ func (to *instancePoolPropertiesModel) set(from fabenvironment.InstancePool) {
to.Name = types.StringPointerValue(from.Name)
to.Type = types.StringPointerValue((*string)(from.Type))
}

type sparkPropertyModel struct {
Key types.String `tfsdk:"key"`
Value types.String `tfsdk:"value"`
}

func (to *sparkPropertyModel) set(from fabenvironment.SparkProperty) {
to.Key = types.StringPointerValue(from.Key)
to.Value = types.StringPointerValue(from.Value)
}

// diffSparkProperties merges planned spark properties with current ones,
// adding null-value entries for any current keys not present in the plan.
// This ensures the API deletes properties that were removed from config.
func diffSparkProperties(planned, current []fabenvironment.SparkProperty) []fabenvironment.SparkProperty {
plannedKeys := make(map[string]struct{})

for _, p := range planned {
if p.Key != nil {
plannedKeys[*p.Key] = struct{}{}
}
}

result := make([]fabenvironment.SparkProperty, 0, len(planned)+len(current))
result = append(result, planned...)

for _, c := range current {
if c.Key != nil {
if _, exists := plannedKeys[*c.Key]; !exists {
result = append(result, fabenvironment.SparkProperty{
Key: c.Key,
Value: nil,
})
}
}
}

return result
}
Loading
Loading