diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 4e8cc349..0c0821a7 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -101,7 +101,7 @@ jobs:
- name: Go Coverage
uses: gwatts/go-coverage-action@v1.3.0
with:
- coverage-threshold: 6.7 # current (2023.08.07) state, should be increased - ideally up to at least 50% - as soon as possible
+ coverage-threshold: 7.2 # current (2023.10.10) state
cover-pkg: ./...
ignore-pattern: |
/cdp-sdk-go/
diff --git a/docs/resources/datalake_gcp_datalake.md b/docs/resources/datalake_gcp_datalake.md
new file mode 100644
index 00000000..f0606172
--- /dev/null
+++ b/docs/resources/datalake_gcp_datalake.md
@@ -0,0 +1,154 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "cdp_datalake_gcp_datalake Resource - terraform-provider-cdp"
+subcategory: ""
+description: |-
+ A Data Lake is a service which provides a protective ring around the data stored in a cloud object store, including authentication, authorization, and governance support.
+---
+
+# cdp_datalake_gcp_datalake (Resource)
+
+A Data Lake is a service which provides a protective ring around the data stored in a cloud object store, including authentication, authorization, and governance support.
+
+
+
+
+## Schema
+
+### Required
+
+- `datalake_name` (String)
+- `environment_name` (String)
+- `instance_profile` (String)
+- `storage_location_base` (String)
+
+### Optional
+
+- `custom_instance_groups` (Attributes Set) (see [below for nested schema](#nestedatt--custom_instance_groups))
+- `enable_ranger_raz` (Boolean)
+- `image` (Attributes) (see [below for nested schema](#nestedatt--image))
+- `java_version` (Number)
+- `multi_az` (Boolean)
+- `polling_options` (Attributes) Polling related configuration options that could specify various values that will be used during CDP resource creation. (see [below for nested schema](#nestedatt--polling_options))
+- `recipes` (Attributes Set) (see [below for nested schema](#nestedatt--recipes))
+- `runtime` (String)
+- `scale` (String)
+- `tags` (Map of String)
+
+### Read-Only
+
+- `certificate_expiration_state` (String)
+- `cloudera_manager` (Attributes) (see [below for nested schema](#nestedatt--cloudera_manager))
+- `creation_date` (String)
+- `crn` (String)
+- `endpoints` (Attributes Set) (see [below for nested schema](#nestedatt--endpoints))
+- `environment_crn` (String)
+- `id` (String) The ID of this resource.
+- `instance_groups` (Attributes Set) (see [below for nested schema](#nestedatt--instance_groups))
+- `product_versions` (Attributes Set) (see [below for nested schema](#nestedatt--product_versions))
+- `status` (String)
+- `status_reason` (String)
+
+
+### Nested Schema for `custom_instance_groups`
+
+Required:
+
+- `name` (String)
+
+Optional:
+
+- `instance_type` (String)
+
+
+
+### Nested Schema for `image`
+
+Required:
+
+- `id` (String)
+
+Optional:
+
+- `catalog` (String)
+
+
+
+### Nested Schema for `polling_options`
+
+Optional:
+
+- `polling_timeout` (Number) Timeout value in minutes that specifies for how long should the polling go for resource creation/deletion.
+
+
+
+### Nested Schema for `recipes`
+
+Required:
+
+- `instance_group_name` (String)
+- `recipe_names` (Attributes Set) (see [below for nested schema](#nestedatt--recipes--recipe_names))
+
+
+### Nested Schema for `recipes.recipe_names`
+
+
+
+
+### Nested Schema for `cloudera_manager`
+
+Read-Only:
+
+- `cloudera_manager_repository_url` (String)
+- `cloudera_manager_server_url` (String)
+- `version` (String)
+
+
+
+### Nested Schema for `endpoints`
+
+Read-Only:
+
+- `display_name` (String)
+- `knox_service` (String)
+- `mode` (String)
+- `open` (Boolean)
+- `service_name` (String)
+- `service_url` (String)
+
+
+
+### Nested Schema for `instance_groups`
+
+Read-Only:
+
+- `instances` (Attributes Set) (see [below for nested schema](#nestedatt--instance_groups--instances))
+- `name` (String)
+
+
+### Nested Schema for `instance_groups.instances`
+
+Read-Only:
+
+- `discovery_fqdn` (String)
+- `id` (String)
+- `instance_group` (String)
+- `instance_status` (String)
+- `instance_type_val` (String)
+- `private_ip` (String)
+- `public_ip` (String)
+- `ssh_port` (Number)
+- `state` (String)
+- `status_reason` (String)
+
+
+
+
+### Nested Schema for `product_versions`
+
+Read-Only:
+
+- `name` (String)
+- `version` (String)
+
+
diff --git a/examples/resources/cdp_datalake_gcp_datalake/resource.tf b/examples/resources/cdp_datalake_gcp_datalake/resource.tf
new file mode 100644
index 00000000..2b3fbebe
--- /dev/null
+++ b/examples/resources/cdp_datalake_gcp_datalake/resource.tf
@@ -0,0 +1,34 @@
+// Copyright 2023 Cloudera. All Rights Reserved.
+//
+// This file is licensed under the Apache License Version 2.0 (the "License").
+// You may not use this file except in compliance with the License.
+// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
+//
+// This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+// OF ANY KIND, either express or implied. Refer to the License for the specific
+// permissions and limitations governing your use of the file.
+
+resource "cdp_datalake_gcp_datalake" "example" {
+ datalake_name = ""
+ environment_name = ""
+ cloud_provider_configuration = {
+ service_account_email = ""
+ storage_location = ""
+ }
+}
+
+output "name" {
+ value = cdp_datalake_gcp_datalake.example.datalake_name
+}
+
+output "environment" {
+ value = cdp_datalake_gcp_datalake.example.environment_name
+}
+
+output "service_account_email" {
+ value = cdp_datalake_gcp_datalake.example.cloud_provider_configuration.service_account_email
+}
+
+output "storage_location" {
+ value = cdp_datalake_gcp_datalake.example.cloud_provider_configuration.storage_location
+}
\ No newline at end of file
diff --git a/provider/provider.go b/provider/provider.go
index 0b94a31c..0ca28f3c 100644
--- a/provider/provider.go
+++ b/provider/provider.go
@@ -229,6 +229,7 @@ func (p *CdpProvider) Resources(_ context.Context) []func() resource.Resource {
environments.NewGcpCredentialResource,
datalake.NewAwsDatalakeResource,
datalake.NewAzureDatalakeResource,
+ datalake.NewGcpDatalakeResource,
iam.NewGroupResource,
datahub.NewAwsDatahubResource,
datahub.NewAzureDatahubResource,
diff --git a/resources/datalake/common_schema.go b/resources/datalake/common_schema.go
new file mode 100644
index 00000000..2319805c
--- /dev/null
+++ b/resources/datalake/common_schema.go
@@ -0,0 +1,129 @@
+// Copyright 2023 Cloudera. All Rights Reserved.
+//
+// This file is licensed under the Apache License Version 2.0 (the "License").
+// You may not use this file except in compliance with the License.
+// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
+//
+// This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+// OF ANY KIND, either express or implied. Refer to the License for the specific
+// permissions and limitations governing your use of the file.
+
+package datalake
+
+import (
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/boolplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64default"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+var generalAttributes = map[string]schema.Attribute{
+ "id": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "polling_options": schema.SingleNestedAttribute{
+ MarkdownDescription: "Polling related configuration options that could specify various values that will be used during CDP resource creation.",
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "polling_timeout": schema.Int64Attribute{
+ MarkdownDescription: "Timeout value in minutes that specifies for how long should the polling go for resource creation/deletion.",
+ Default: int64default.StaticInt64(60),
+ Computed: true,
+ Optional: true,
+ },
+ },
+ },
+ "creation_date": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "crn": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "datalake_name": schema.StringAttribute{
+ Required: true,
+ },
+ "enable_ranger_raz": schema.BoolAttribute{
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.Bool{
+ boolplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "environment_crn": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "environment_name": schema.StringAttribute{
+ Required: true,
+ },
+ "image": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "catalog": schema.StringAttribute{
+ Optional: true,
+ },
+ "id": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ "java_version": schema.Int64Attribute{
+ Optional: true,
+ },
+ "recipes": schema.SetNestedAttribute{
+ Optional: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "instance_group_name": schema.StringAttribute{
+ Required: true,
+ },
+ "recipe_names": schema.SetNestedAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ "runtime": schema.StringAttribute{
+ Optional: true,
+ },
+ "scale": schema.StringAttribute{
+ Computed: true,
+ Optional: true,
+ },
+ "status": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "status_reason": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "multi_az": schema.BoolAttribute{
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.Bool{
+ boolplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "tags": schema.MapAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ },
+}
diff --git a/resources/datalake/common_schema_test.go b/resources/datalake/common_schema_test.go
new file mode 100644
index 00000000..de7d73be
--- /dev/null
+++ b/resources/datalake/common_schema_test.go
@@ -0,0 +1,150 @@
+// Copyright 2023 Cloudera. All Rights Reserved.
+//
+// This file is licensed under the Apache License Version 2.0 (the "License").
+// You may not use this file except in compliance with the License.
+// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
+//
+// This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+// OF ANY KIND, either express or implied. Refer to the License for the specific
+// permissions and limitations governing your use of the file.
+
+package datalake
+
+import (
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+)
+
+type TestCaseStructure struct {
+ name string
+ field string
+ computed bool
+ shouldBeRequired bool
+}
+
+var commonElementCaseSet = []TestCaseStructure{
+ {
+ name: "'id' field must exist",
+ field: "id",
+ computed: true,
+ shouldBeRequired: false,
+ },
+ {
+ name: "'polling_options' should exist",
+ field: "polling_options",
+ computed: false,
+ shouldBeRequired: false,
+ },
+ {
+ name: "'creation_date' should exist",
+ field: "creation_date",
+ computed: true,
+ shouldBeRequired: false,
+ },
+ {
+ name: "'crn' should exist",
+ field: "crn",
+ computed: true,
+ shouldBeRequired: false,
+ },
+ {
+ name: "'datalake_name' must exist",
+ field: "datalake_name",
+ computed: false,
+ shouldBeRequired: true,
+ },
+ {
+ name: "'enable_ranger_raz' should exist",
+ field: "enable_ranger_raz",
+ computed: true,
+ shouldBeRequired: false,
+ },
+ {
+ name: "'environment_crn' should exist",
+ field: "environment_crn",
+ computed: true,
+ shouldBeRequired: false,
+ },
+ {
+ name: "'environment_name' must exist",
+ field: "environment_name",
+ computed: false,
+ shouldBeRequired: true,
+ },
+ {
+ name: "'image' should exist",
+ field: "image",
+ computed: false,
+ shouldBeRequired: false,
+ },
+ {
+ name: "'java_version' should exist",
+ field: "java_version",
+ computed: false,
+ shouldBeRequired: false,
+ },
+ {
+ name: "'recipes' should exist",
+ field: "recipes",
+ computed: false,
+ shouldBeRequired: false,
+ },
+ {
+ name: "'runtime' should exist",
+ field: "runtime",
+ computed: false,
+ shouldBeRequired: false,
+ },
+ {
+ name: "'scale' should exist",
+ field: "scale",
+ computed: true,
+ shouldBeRequired: false,
+ },
+ {
+ name: "'status' should exist",
+ field: "status",
+ computed: true,
+ shouldBeRequired: false,
+ },
+ {
+ name: "'status_reason' should exist",
+ field: "status_reason",
+ computed: true,
+ shouldBeRequired: false,
+ },
+ {
+ name: "'multi_az' should exist",
+ field: "multi_az",
+ computed: true,
+ shouldBeRequired: false,
+ },
+ {
+ name: "'tags' should exist",
+ field: "tags",
+ computed: false,
+ shouldBeRequired: false,
+ },
+}
+
+func TestRootElements(t *testing.T) {
+ SchemaContainsCommonElements(t, generalAttributes)
+}
+
+func SchemaContainsCommonElements(t *testing.T, providerSpecificSchema map[string]schema.Attribute) {
+ for _, test := range commonElementCaseSet {
+ t.Run(test.name, func(t *testing.T) {
+ if providerSpecificSchema[test.field] == nil {
+ t.Errorf("The following field does not exists, however it should: " + test.field)
+ t.FailNow()
+ }
+ if providerSpecificSchema[test.field].IsRequired() != test.shouldBeRequired {
+ t.Errorf("The '%s' filed's >required< property should be: %t", test.field, test.shouldBeRequired)
+ }
+ if providerSpecificSchema[test.field].IsComputed() != test.computed {
+ t.Errorf("The '%s' filed's >computed< property should be: %t", test.field, test.computed)
+ }
+ })
+ }
+}
diff --git a/resources/datalake/converter_gcp.go b/resources/datalake/converter_gcp.go
new file mode 100644
index 00000000..97ad6822
--- /dev/null
+++ b/resources/datalake/converter_gcp.go
@@ -0,0 +1,116 @@
+// Copyright 2023 Cloudera. All Rights Reserved.
+//
+// This file is licensed under the Apache License Version 2.0 (the "License").
+// You may not use this file except in compliance with the License.
+// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
+//
+// This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+// OF ANY KIND, either express or implied. Refer to the License for the specific
+// permissions and limitations governing your use of the file.
+
+package datalake
+
+import (
+ "context"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+
+ "github.com/hashicorp/terraform-plugin-framework/types"
+
+ datalakemodels "github.com/cloudera/terraform-provider-cdp/cdp-sdk-go/gen/datalake/models"
+ "github.com/cloudera/terraform-provider-cdp/utils"
+)
+
+func datalakeDetailsToGcpDatalakeResourceModel(ctx context.Context, resp *datalakemodels.DatalakeDetails, model *gcpDatalakeResourceModel, pollingOptions *utils.PollingOptions, diags *diag.Diagnostics) {
+ model.ID = types.StringPointerValue(resp.Crn)
+ model.CreationDate = types.StringValue(resp.CreationDate.String())
+ model.Crn = types.StringPointerValue(resp.Crn)
+ model.DatalakeName = types.StringPointerValue(resp.DatalakeName)
+ model.EnableRangerRaz = types.BoolValue(resp.EnableRangerRaz)
+ model.PollingOptions = pollingOptions
+ endpoints := make([]*endpoint, len(resp.Endpoints.Endpoints))
+ for i, v := range resp.Endpoints.Endpoints {
+ endpoints[i] = &endpoint{
+ DisplayName: types.StringPointerValue(v.DisplayName),
+ KnoxService: types.StringPointerValue(v.KnoxService),
+ Mode: types.StringPointerValue(v.Mode),
+ Open: types.BoolPointerValue(v.Open),
+ ServiceName: types.StringPointerValue(v.ServiceName),
+ ServiceURL: types.StringPointerValue(v.ServiceURL),
+ }
+ }
+ model.EnvironmentCrn = types.StringValue(resp.EnvironmentCrn)
+ productVersions := make([]*productVersion, len(resp.ProductVersions))
+ for i, v := range resp.ProductVersions {
+ productVersions[i] = &productVersion{
+ Name: types.StringPointerValue(v.Name),
+ Version: types.StringPointerValue(v.Version),
+ }
+ }
+ model.Scale = types.StringValue(string(resp.Shape))
+ model.Status = types.StringValue(resp.Status)
+ model.StatusReason = types.StringValue(resp.StatusReason)
+}
+
+func toGcpDatalakeRequest(ctx context.Context, model *gcpDatalakeResourceModel) *datalakemodels.CreateGCPDatalakeRequest {
+ req := &datalakemodels.CreateGCPDatalakeRequest{}
+ if model.CloudProviderConfiguration != nil {
+ req.CloudProviderConfiguration = &datalakemodels.GCPConfigurationRequest{
+ ServiceAccountEmail: model.CloudProviderConfiguration.ServiceAccountEmail.ValueStringPointer(),
+ StorageLocation: model.CloudProviderConfiguration.StorageLocation.ValueStringPointer(),
+ }
+ }
+ req.CustomInstanceGroups = make([]*datalakemodels.SdxInstanceGroupRequest, len(model.CustomInstanceGroups))
+ for i, v := range model.CustomInstanceGroups {
+ req.CustomInstanceGroups[i] = &datalakemodels.SdxInstanceGroupRequest{
+ InstanceType: v.InstanceType.ValueString(),
+ Name: v.Name.ValueStringPointer(),
+ }
+ }
+ req.DatalakeName = model.DatalakeName.ValueStringPointer()
+ req.EnableRangerRaz = model.EnableRangerRaz.ValueBool()
+ req.EnvironmentName = model.EnvironmentName.ValueStringPointer()
+ if model.Image != nil {
+ req.Image = &datalakemodels.ImageRequest{
+ CatalogName: model.Image.CatalogName.ValueStringPointer(),
+ ID: model.Image.ID.ValueStringPointer(),
+ }
+ }
+ req.JavaVersion = int32(model.JavaVersion.ValueInt64())
+ req.Recipes = make([]*datalakemodels.InstanceGroupRecipeRequest, len(model.Recipes))
+ for i, v := range model.Recipes {
+ req.Recipes[i] = &datalakemodels.InstanceGroupRecipeRequest{
+ InstanceGroupName: v.InstanceGroupName.ValueStringPointer(),
+ RecipeNames: utils.FromSetValueToStringList(v.RecipeNames),
+ }
+ }
+ req.Runtime = model.Runtime.ValueString()
+ req.Scale = datalakemodels.DatalakeScaleType(model.Scale.ValueString())
+ if !model.Tags.IsNull() {
+ req.Tags = make([]*datalakemodels.DatalakeResourceGCPTagRequest, len(model.Tags.Elements()))
+ i := 0
+ for k, v := range model.Tags.Elements() {
+ val, diag := v.(basetypes.StringValuable).ToStringValue(ctx)
+ if !diag.HasError() {
+ req.Tags[i] = &datalakemodels.DatalakeResourceGCPTagRequest{
+ Key: &k,
+ Value: val.ValueStringPointer(),
+ }
+ }
+ i++
+ }
+ }
+ return req
+}
+
+func toGcpDatalakeResourceModel(resp *datalakemodels.CreateGCPDatalakeResponse, model *gcpDatalakeResourceModel) {
+ model.ID = types.StringPointerValue(resp.Datalake.DatalakeName)
+ model.CreationDate = types.StringValue(resp.Datalake.CreationDate.String())
+ model.Crn = types.StringPointerValue(resp.Datalake.Crn)
+ model.DatalakeName = types.StringPointerValue(resp.Datalake.DatalakeName)
+ model.EnableRangerRaz = types.BoolValue(resp.Datalake.EnableRangerRaz)
+ model.EnvironmentCrn = types.StringValue(resp.Datalake.EnvironmentCrn)
+ model.MultiAz = types.BoolValue(resp.Datalake.MultiAz)
+ model.Status = types.StringValue(resp.Datalake.Status)
+ model.StatusReason = types.StringValue(resp.Datalake.StatusReason)
+}
diff --git a/resources/datalake/converter_gcp_test.go b/resources/datalake/converter_gcp_test.go
new file mode 100644
index 00000000..3ddc6bf4
--- /dev/null
+++ b/resources/datalake/converter_gcp_test.go
@@ -0,0 +1,63 @@
+// Copyright 2023 Cloudera. All Rights Reserved.
+//
+// This file is licensed under the Apache License Version 2.0 (the "License").
+// You may not use this file except in compliance with the License.
+// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
+//
+// This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+// OF ANY KIND, either express or implied. Refer to the License for the specific
+// permissions and limitations governing your use of the file.
+
+package datalake
+
+import (
+ "github.com/go-openapi/strfmt"
+ "testing"
+
+ datalakemodels "github.com/cloudera/terraform-provider-cdp/cdp-sdk-go/gen/datalake/models"
+)
+
+func TestToGcpDatalakeResourceModel(t *testing.T) {
+ dlCrn := "datalakeCrn"
+ name := "dlName"
+ creationDate := strfmt.NewDateTime()
+ input := &datalakemodels.CreateGCPDatalakeResponse{
+ Datalake: &datalakemodels.Datalake{
+ CertificateExpirationState: "someState",
+ CreationDate: creationDate,
+ Crn: &dlCrn,
+ DatalakeName: &name,
+ EnableRangerRaz: false,
+ EnvironmentCrn: "envCrn",
+ MultiAz: false,
+ Status: "some cool status",
+ StatusReason: "some more cole reason",
+ },
+ }
+ toModify := &gcpDatalakeResourceModel{}
+ toGcpDatalakeResourceModel(input, toModify)
+ if toModify.Crn.ValueString() != dlCrn {
+ t.Errorf("The CRN (%s) is not the expected: %s", toModify.Crn.ValueString(), dlCrn)
+ }
+ if toModify.DatalakeName.ValueString() != name {
+ t.Errorf("The Datalake name (%s) is not the expected: %s", toModify.DatalakeName.ValueString(), name)
+ }
+ if toModify.CreationDate.ValueString() != creationDate.String() {
+ t.Errorf("The creation date (%s) is not the expected: %s", toModify.CreationDate.ValueString(), creationDate.String())
+ }
+ if toModify.EnableRangerRaz.ValueBool() != input.Datalake.EnableRangerRaz {
+ t.Errorf("The EnableRangerRaz (%t) is not the expected: %t", toModify.EnableRangerRaz.ValueBool(), input.Datalake.EnableRangerRaz)
+ }
+ if toModify.EnvironmentCrn.ValueString() != input.Datalake.EnvironmentCrn {
+ t.Errorf("The CRN (%s) is not the expected: %s", toModify.EnvironmentCrn.ValueString(), input.Datalake.EnvironmentCrn)
+ }
+ if toModify.MultiAz.ValueBool() != input.Datalake.MultiAz {
+ t.Errorf("The MultiAz (%t) is not the expected: %t", toModify.MultiAz.ValueBool(), input.Datalake.MultiAz)
+ }
+ if toModify.Status.ValueString() != input.Datalake.Status {
+ t.Errorf("The Status (%s) is not the expected: %s", toModify.Status.ValueString(), input.Datalake.Status)
+ }
+ if toModify.StatusReason.ValueString() != input.Datalake.StatusReason {
+ t.Errorf("The StatusReason (%s) is not the expected: %s", toModify.StatusReason.ValueString(), input.Datalake.StatusReason)
+ }
+}
diff --git a/resources/datalake/model_gcp_datalake.go b/resources/datalake/model_gcp_datalake.go
new file mode 100644
index 00000000..47883d21
--- /dev/null
+++ b/resources/datalake/model_gcp_datalake.go
@@ -0,0 +1,74 @@
+// Copyright 2023 Cloudera. All Rights Reserved.
+//
+// This file is licensed under the Apache License Version 2.0 (the "License").
+// You may not use this file except in compliance with the License.
+// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
+//
+// This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+// OF ANY KIND, either express or implied. Refer to the License for the specific
+// permissions and limitations governing your use of the file.
+
+package datalake
+
+import (
+ "github.com/hashicorp/terraform-plugin-framework/types"
+
+ "github.com/cloudera/terraform-provider-cdp/utils"
+)
+
+type gcpDatalakeResourceModel struct {
+ ID types.String `tfsdk:"id"`
+
+ PollingOptions *utils.PollingOptions `tfsdk:"polling_options"`
+
+ CloudProviderConfiguration *gcpConfiguration `tfsdk:"cloud_provider_configuration"`
+
+ CreationDate types.String `tfsdk:"creation_date"`
+
+ Crn types.String `tfsdk:"crn"`
+
+ CustomInstanceGroups []*gcpDatalakeInstanceGroup `tfsdk:"custom_instance_groups"`
+
+ DatalakeName types.String `tfsdk:"datalake_name"`
+
+ EnableRangerRaz types.Bool `tfsdk:"enable_ranger_raz"`
+
+ EnvironmentCrn types.String `tfsdk:"environment_crn"`
+
+ EnvironmentName types.String `tfsdk:"environment_name"`
+
+ Image *gcpDatalakeImage `tfsdk:"image"`
+
+ JavaVersion types.Int64 `tfsdk:"java_version"`
+
+ MultiAz types.Bool `tfsdk:"multi_az"`
+
+ Recipes []*instanceGroupRecipe `tfsdk:"recipes"`
+
+ Runtime types.String `tfsdk:"runtime"`
+
+ Scale types.String `tfsdk:"scale"`
+
+ Status types.String `tfsdk:"status"`
+
+ StatusReason types.String `tfsdk:"status_reason"`
+
+ Tags types.Map `tfsdk:"tags"`
+}
+
+type gcpConfiguration struct {
+ ServiceAccountEmail types.String `tfsdk:"service_account_email"`
+ StorageLocation types.String `tfsdk:"storage_location"`
+}
+
+type gcpDatalakeInstanceGroup struct {
+ InstanceType types.String `tfsdk:"instance_type"`
+
+ Name types.String `tfsdk:"name"`
+}
+
+type gcpDatalakeImage struct {
+ CatalogName types.String `tfsdk:"catalog_name"`
+
+ ID types.String `tfsdk:"id"`
+}
diff --git a/resources/datalake/resource_gcp_datalake.go b/resources/datalake/resource_gcp_datalake.go
new file mode 100644
index 00000000..bbfc40f5
--- /dev/null
+++ b/resources/datalake/resource_gcp_datalake.go
@@ -0,0 +1,170 @@
+// Copyright 2023 Cloudera. All Rights Reserved.
+//
+// This file is licensed under the Apache License Version 2.0 (the "License").
+// You may not use this file except in compliance with the License.
+// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
+//
+// This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+// OF ANY KIND, either express or implied. Refer to the License for the specific
+// permissions and limitations governing your use of the file.
+
+package datalake
+
+import (
+ "context"
+ "time"
+
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+
+ "github.com/cloudera/terraform-provider-cdp/cdp-sdk-go/cdp"
+ "github.com/cloudera/terraform-provider-cdp/cdp-sdk-go/gen/datalake/client/operations"
+ datalakemodels "github.com/cloudera/terraform-provider-cdp/cdp-sdk-go/gen/datalake/models"
+ "github.com/cloudera/terraform-provider-cdp/utils"
+)
+
+var (
+ _ resource.Resource = &gcpDatalakeResource{}
+)
+
+type gcpDatalakeResource struct {
+ client *cdp.Client
+}
+
+func NewGcpDatalakeResource() resource.Resource {
+ return &gcpDatalakeResource{}
+}
+
+func (r *gcpDatalakeResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_datalake_gcp_datalake"
+}
+
+func (r *gcpDatalakeResource) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
+ r.client = utils.GetCdpClientForResource(req, resp)
+}
+
+func (r *gcpDatalakeResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ var state gcpDatalakeResourceModel
+ diags := req.Plan.Get(ctx, &state)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ tflog.Error(ctx, "Got error while trying to set plan")
+ return
+ }
+
+ client := r.client.Datalake
+
+ params := operations.NewCreateGCPDatalakeParamsWithContext(ctx)
+ params.WithInput(toGcpDatalakeRequest(ctx, &state))
+ responseOk, err := client.Operations.CreateGCPDatalake(params)
+ if err != nil {
+ utils.AddDatalakeDiagnosticsError(err, &resp.Diagnostics, "create GCP Datalake")
+ return
+ }
+
+ datalakeResp := responseOk.Payload
+ toGcpDatalakeResourceModel(datalakeResp, &state)
+
+ diags = resp.State.Set(ctx, state)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ if err := waitForDatalakeToBeRunning(ctx, state.DatalakeName.ValueString(), time.Hour, r.client.Datalake, state.PollingOptions); err != nil {
+ utils.AddDatalakeDiagnosticsError(err, &resp.Diagnostics, "create GCP Datalake")
+ return
+ }
+
+ descParams := operations.NewDescribeDatalakeParamsWithContext(ctx)
+ descParams.WithInput(&datalakemodels.DescribeDatalakeRequest{DatalakeName: state.DatalakeName.ValueStringPointer()})
+ descResponseOk, err := client.Operations.DescribeDatalake(descParams)
+ if err != nil {
+ utils.AddDatalakeDiagnosticsError(err, &resp.Diagnostics, "create GCP Datalake")
+ return
+ }
+
+ descDlResp := descResponseOk.Payload
+ datalakeDetailsToGcpDatalakeResourceModel(ctx, descDlResp.Datalake, &state, state.PollingOptions, &resp.Diagnostics)
+
+ diags = resp.State.Set(ctx, state)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (r *gcpDatalakeResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var state gcpDatalakeResourceModel
+ diags := req.State.Get(ctx, &state)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ client := r.client.Datalake
+
+ params := operations.NewDescribeDatalakeParamsWithContext(ctx)
+ params.WithInput(&datalakemodels.DescribeDatalakeRequest{DatalakeName: state.DatalakeName.ValueStringPointer()})
+ responseOk, err := client.Operations.DescribeDatalake(params)
+ if err != nil {
+ if dlErr, ok := err.(*operations.DescribeDatalakeDefault); ok {
+ if cdp.IsDatalakeError(dlErr.GetPayload(), "NOT_FOUND", "") {
+ resp.Diagnostics.AddWarning("Resource not found on provider", "Data lake not found, removing from state.")
+ tflog.Warn(ctx, "Data lake not found, removing from state", map[string]interface{}{
+ "id": state.ID.ValueString(),
+ })
+ resp.State.RemoveResource(ctx)
+ return
+ }
+ }
+ utils.AddDatalakeDiagnosticsError(err, &resp.Diagnostics, "read GCP Datalake")
+ return
+ }
+
+ datalakeResp := responseOk.Payload
+ datalakeDetailsToGcpDatalakeResourceModel(ctx, datalakeResp.Datalake, &state, state.PollingOptions, &resp.Diagnostics)
+
+ diags = resp.State.Set(ctx, state)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (r *gcpDatalakeResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+}
+
+func (r *gcpDatalakeResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var state gcpDatalakeResourceModel
+ diags := req.State.Get(ctx, &state)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ client := r.client.Datalake
+ params := operations.NewDeleteDatalakeParamsWithContext(ctx)
+ params.WithInput(&datalakemodels.DeleteDatalakeRequest{
+ DatalakeName: state.DatalakeName.ValueStringPointer(),
+ Force: false,
+ })
+ _, err := client.Operations.DeleteDatalake(params)
+ if err != nil {
+ if dlErr, ok := err.(*operations.DescribeDatalakeDefault); ok {
+ if cdp.IsDatalakeError(dlErr.GetPayload(), "NOT_FOUND", "") {
+ tflog.Info(ctx, "Data lake already deleted", map[string]interface{}{
+ "id": state.ID.ValueString(),
+ })
+ return
+ }
+ }
+ utils.AddDatalakeDiagnosticsError(err, &resp.Diagnostics, "delete GCP Datalake")
+ return
+ }
+
+ if err := waitForDatalakeToBeDeleted(ctx, state.DatalakeName.ValueString(), time.Hour, r.client.Datalake, state.PollingOptions); err != nil {
+ utils.AddDatalakeDiagnosticsError(err, &resp.Diagnostics, "delete GCP Datalake")
+ return
+ }
+}
diff --git a/resources/datalake/schema_gcp_datalake.go b/resources/datalake/schema_gcp_datalake.go
new file mode 100644
index 00000000..0b3e5ba3
--- /dev/null
+++ b/resources/datalake/schema_gcp_datalake.go
@@ -0,0 +1,54 @@
+// Copyright 2023 Cloudera. All Rights Reserved.
+//
+// This file is licensed under the Apache License Version 2.0 (the "License").
+// You may not use this file except in compliance with the License.
+// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
+//
+// This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+// OF ANY KIND, either express or implied. Refer to the License for the specific
+// permissions and limitations governing your use of the file.
+
+package datalake
+
+import (
+ "context"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+
+ "github.com/cloudera/terraform-provider-cdp/utils"
+)
+
+func (r *gcpDatalakeResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
+ attr := map[string]schema.Attribute{}
+ utils.Append(attr, generalAttributes)
+ utils.Append(attr, map[string]schema.Attribute{
+ "custom_instance_groups": schema.SetNestedAttribute{
+ Optional: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "instance_type": schema.StringAttribute{
+ Optional: true,
+ },
+ "name": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ "cloud_provider_configuration": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "service_account_email": schema.StringAttribute{
+ Required: true,
+ },
+ "storage_location": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ })
+ resp.Schema = schema.Schema{
+ MarkdownDescription: "A Data Lake is a service which provides a protective ring around the data stored in a cloud object store, including authentication, authorization, and governance support.",
+ Attributes: attr,
+ }
+}
diff --git a/resources/datalake/schema_gcp_datalake_test.go b/resources/datalake/schema_gcp_datalake_test.go
new file mode 100644
index 00000000..33d766ef
--- /dev/null
+++ b/resources/datalake/schema_gcp_datalake_test.go
@@ -0,0 +1,64 @@
+// Copyright 2023 Cloudera. All Rights Reserved.
+//
+// This file is licensed under the Apache License Version 2.0 (the "License").
+// You may not use this file except in compliance with the License.
+// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
+//
+// This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+// OF ANY KIND, either express or implied. Refer to the License for the specific
+// permissions and limitations governing your use of the file.
+
+package datalake
+
+import (
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "testing"
+)
+
+func TestCommonSchemaElementsExist(t *testing.T) {
+ SchemaContainsCommonElements(t, createFilledTestObject())
+}
+
+func TestGcpSpecificElements(t *testing.T) {
+ cases := []TestCaseStructure{
+ {
+ name: "'custom_instance_groups' should exist",
+ field: "custom_instance_groups",
+ computed: false,
+ shouldBeRequired: false,
+ },
+ {
+ name: "cloud_provider_configuration should exist",
+ field: "cloud_provider_configuration",
+ computed: false,
+ shouldBeRequired: true,
+ },
+ }
+
+ underTestAttributes := createFilledTestObject()
+
+ for _, test := range cases {
+ t.Run(test.name, func(t *testing.T) {
+ if underTestAttributes[test.field] == nil {
+ t.Errorf("The following field does not exists, however it should: " + test.field)
+ t.FailNow()
+ }
+ if underTestAttributes[test.field].IsRequired() != test.shouldBeRequired {
+ t.Errorf("The '%s' filed's >required< property should be: %t", test.field, test.shouldBeRequired)
+ }
+ if underTestAttributes[test.field].IsComputed() != test.computed {
+ t.Errorf("The '%s' filed's >computed< property should be: %t", test.field, test.computed)
+ }
+ })
+ }
+}
+
+func createFilledTestObject() map[string]schema.Attribute {
+ var res *gcpDatalakeResource
+ res = &gcpDatalakeResource{}
+ schemaResponse := &resource.SchemaResponse{}
+ res.Schema(nil, resource.SchemaRequest{}, schemaResponse)
+
+ return schemaResponse.Schema.Attributes
+}