From 242f792385de17ad2f3a8c998b8dcf5feb60a3c6 Mon Sep 17 00:00:00 2001 From: Rishi Anand Date: Thu, 22 Jun 2023 17:24:24 -0700 Subject: [PATCH] Modues edge native (#71) * edge-native modules changes * edge-native modules changes * Edge native module changes * edge-native modules changes * PLT-534: Edge native support release. --------- Co-authored-by: nikolay-spectro --- .../config/cluster/cluster-ehl-ams.yaml | 135 ++++++++++++++ examples/edge-native/mod_spectro_org.tf | 48 +++++ examples/edge-native/providers.tf | 29 +++ .../edge-native/terraform.template.tfvars | 4 + main.tf | 2 +- spectro-cluster-common.tf | 2 + spectro-cluster-edge-native.tf | 169 ++++++++++++++++++ 7 files changed, 388 insertions(+), 1 deletion(-) create mode 100644 examples/edge-native/config/cluster/cluster-ehl-ams.yaml create mode 100644 examples/edge-native/mod_spectro_org.tf create mode 100644 examples/edge-native/providers.tf create mode 100644 examples/edge-native/terraform.template.tfvars create mode 100644 spectro-cluster-edge-native.tf diff --git a/examples/edge-native/config/cluster/cluster-ehl-ams.yaml b/examples/edge-native/config/cluster/cluster-ehl-ams.yaml new file mode 100644 index 0000000..6c340bc --- /dev/null +++ b/examples/edge-native/config/cluster/cluster-ehl-ams.yaml @@ -0,0 +1,135 @@ +name: hospital-2 +cloudType: edge-native +skip_completion: true +tags: + - "skip_completion" +profiles: + infra: + name: opensuse-pxke-infra + context: tenant + version: 1.24.6-os-v15.4 + addons: + - name: org1-bootstrap-stable + version: 1.0.110 + - name: org1-security-services-bootstrap-stable + version: 1.0.5 + - name: org1-aca-bootstrap-stable + version: 1.0.13 + - name: org1-reloader-stable + version: 1.0.2 + - name: org1-iris-stable + version: 1.0.9 + - name: org1-logging-monitoring-services-stable + version: 1.0.83 + - name: org1-storage-monitoring-stable + version: 1.0.4 + - name: org1-metacontroller-stable + version: 1.0.8 + - name: org1-common-postgres-stable + version: 1.0.21 + - name: org1-security-postgres-stable + version: 1.0.22 + - name: org1-aca-crd-stable + version: 1.0.33 + - name: org1-logging-monitoring-aca-stable + version: 1.0.30 + - name: org1-security-system-gateway-stable + version: 1.0.60 + - name: org1-siteconfig-stable + version: 1.0.111 + packs: + - name: org2-site-config-aca-chart + version: 0.0.0-zd3a4f18 + override_type: params + params: + hw_pwd_enabled: true + ilo_enabled: true + dual_wan_enabled: true + registry: helm-blr-ees + - name: org1-security-certificate-management-stable + version: 1.0.55 + - name: org1-security-device-management-stable + version: 1.0.21 + - name: org1-system-health-stable + version: 1.0.69 + packs: + - name: org2-cpu-webcomponent + version: 0.1.0-z596c17c + override_type: params + params: + toggle_enabled: true + registry: helm-blr-ees + - name: org2-ram-webcomponent + version: 0.1.0-z8fec9e6 + override_type: params + params: + toggle_enabled: true + registry: helm-blr-ees + - name: org1-security-idam-stable + version: 1.0.106 + - name: org1-admin-console-stable + version: 1.0.89 + packs: + - name: eis-admin-console-dynamic-portal + version: 0.0.0-zf499b45 + override_type: params + params: + host_pwr_mgt_enabled: true + healthlink_enabled: false + registry: helm-blr-ees + - name: org1-security-api-gateway-stable + version: 1.0.65 + - name: org1-security-auditing-stable + version: 1.0.30 + - name: org1-imaging-device-manager-stable + version: 1.0.27 + - name: org1-aca-stable + version: 1.0.31 + packs: + - name: eis-ilo-aca-crd + version: 0.0.1-z0406cbc + override_type: params + params: + enabled: true + registry: helm-blr-ees + - name: org1-security-malware-management-stable + version: 1.0.41 + - name: org1-security-logging-monitoring-services-stable + version: 1.0.14 + - name: org1-security-admission-controller-stable + version: 1.0.33 + - name: org1-hardware-monitoring-stable + version: 1.0.11 + - name: org1-network-monitoring-stable + version: 1.0.6 + - name: org1-k8-cert-renewal-stable + version: 1.0.7 + - name: org1-licensingservice-stable + version: 1.0.34 + - name: org1-priority-scheduler-stable + version: 1.0.28 + - name: org1-rabbitmq-service-stable + version: 1.0.20 + - name: org1-backup-service-stable + version: 1.0.76 + packs: + - name: org2-minio + version: 0.0.1-z0ef7f65 + override_type: params + params: + mode: standalone + registry: helm-blr-ees + - name: org1-security-audit-viewer-stable + version: 1.0.21 + - name: org1-axone-profile + version: 1.26.13 +cloud_config: + ssh_key: "ssh-rsa AAqd spectrocloud2022" + vip: 10.184.225.107 + ntp_servers: ["pool.ntp.org"] +node_groups: + - name: master-pool + control_plane: true + control_plane_as_worker: true + count: 1 + host_uids: ["node-103", "node-104", "node-105"] \ No newline at end of file diff --git a/examples/edge-native/mod_spectro_org.tf b/examples/edge-native/mod_spectro_org.tf new file mode 100644 index 0000000..e045b81 --- /dev/null +++ b/examples/edge-native/mod_spectro_org.tf @@ -0,0 +1,48 @@ +locals { + accounts_params = { ACCOUNT_DEV_NAME = "dev-030", ACCOUNT_PROD_NAME = "prod-004" } + appliances_params = {} + bsl_params = { BSL_NAME = "qa-sharma" } + profile_params = { + SPECTRO_REPO_URL = "https://registry.spectrocloud.com", + REPO_URL = "593235963820.dkr.ecr.us-west-2.amazonaws.com", + OIDC_CLIENT_ID = "5ajs8pq0gatbgpjejld96fldrn", + OIDC_ISSUER_URL = "https://cognito-idp.us-east-1.amazonaws.com/us-east-1_ajvPoziaS", + RABBITMQ_PACK_VERSION = "8.15.2", + string = "$${string}", + ADDON_SPECTRO_REPO_URL = "https://addon-registry.spectrocloud.com", + } + projects_params = {} + clusters_params = {} +} + +module "SpectroOrg" { + source = "../../" + //source = "git::https://github.com/spectrocloud/terraform-spectrocloud-modules.git?ref=edge-native-changes" + + accounts = { + for k in fileset("config/account", "account-*.yaml") : + trimsuffix(k, ".yaml") => yamldecode(templatefile("config/account/${k}", local.accounts_params)) + } + + appliances = { + for k in fileset("config/appliance", "appliance-*.yaml") : + trimsuffix(k, ".yaml") => yamldecode(templatefile("config/appliance/${k}", local.appliances_params)) + } + + profiles = { + for k in fileset("config/profile", "profile-*.yaml") : + trimsuffix(k, ".yaml") => yamldecode(templatefile("config/profile/${k}", local.profile_params)) + } +} + + +module "SpectroProject" { + depends_on = [module.SpectroOrg] + source = "../../" + //source = "git::https://github.com/spectrocloud/terraform-spectrocloud-modules.git?ref=edge-native-changes" + + clusters = { + for k in fileset("config/cluster", "cluster-*.yaml") : + trimsuffix(k, ".yaml") => yamldecode(templatefile("config/cluster/${k}", local.accounts_params)) + } +} diff --git a/examples/edge-native/providers.tf b/examples/edge-native/providers.tf new file mode 100644 index 0000000..145a617 --- /dev/null +++ b/examples/edge-native/providers.tf @@ -0,0 +1,29 @@ +terraform { + required_providers { + + + spectrocloud = { + source = "spectrocloud/spectrocloud" + } + } +} + +variable "sc_host" { + description = "Spectro Cloud Endpoint" + default = "api.spectrocloud.com" +} + +variable "sc_api_key" { + description = "Spectro Cloud API key" +} + +variable "sc_project_name" { + description = "Spectro Cloud Project (e.g: Default)" + default = "Default" +} + +provider "spectrocloud" { + host = var.sc_host + api_key = var.sc_api_key + project_name = var.sc_project_name +} diff --git a/examples/edge-native/terraform.template.tfvars b/examples/edge-native/terraform.template.tfvars new file mode 100644 index 0000000..eeadb1b --- /dev/null +++ b/examples/edge-native/terraform.template.tfvars @@ -0,0 +1,4 @@ +# Spectro Cloud credentials +sc_host = "{enter Spectro Cloud API endpoint}" #e.g: api.spectrocloud.com (for SaaS) +sc_api_key = "{enter Spectro Cloud API Key}" #e.g: Q28GBs7ssdvNNkERWeWpqwSLfI1nnit6W +sc_project_name = "{enter Spectro Cloud project Name}" #e.g: Default \ No newline at end of file diff --git a/main.tf b/main.tf index 6240cf1..993cc5c 100644 --- a/main.tf +++ b/main.tf @@ -6,5 +6,5 @@ terraform { } } - required_version = "~> 1.4.4" + required_version = "> 1.4.4" } diff --git a/spectro-cluster-common.tf b/spectro-cluster-common.tf index be9991f..2dd8d32 100644 --- a/spectro-cluster-common.tf +++ b/spectro-cluster-common.tf @@ -17,6 +17,8 @@ locals { edge_vsphere_clusters = [for key in local.edge_vsphere_keys : lookup(local.cluster_map, key)] edge_keys = compact([for i, cluster in local.cluster_map : cluster.cloudType == "edge" ? i : ""]) edge_clusters = [for key in local.edge_keys : lookup(local.cluster_map, key)] + edge_native_keys = compact([for i, cluster in local.cluster_map : cluster.cloudType == "edge-native" ? i : ""]) + edge_native_clusters = [for key in local.edge_native_keys : lookup(local.cluster_map, key)] // all edge clusters (this is for appliance list) all_edge_clusters = setunion(local.libvirt_clusters) } diff --git a/spectro-cluster-edge-native.tf b/spectro-cluster-edge-native.tf new file mode 100644 index 0000000..9eb6d6a --- /dev/null +++ b/spectro-cluster-edge-native.tf @@ -0,0 +1,169 @@ +resource "spectrocloud_cluster_edge_native" "this" { + for_each = { for x in local.edge_native_clusters : x.name => x } + name = each.value.name + apply_setting = try(each.value.apply_setting, "") + tags = try(each.value.tags, []) + skip_completion = try(each.value.skip_completion, true) + + cloud_config { + ssh_key = try(each.value.cloud_config.ssh_key, "") + vip = try(each.value.cloud_config.vip, "") + ntp_servers = try(each.value.cloud_config.ntp_servers, []) + } + + dynamic "cluster_rbac_binding" { + for_each = try(each.value.cluster_rbac_binding, []) + content { + type = cluster_rbac_binding.value.type + namespace = try(cluster_rbac_binding.value.namespace, "") + + role = { + kind = cluster_rbac_binding.value.role.kind + name = cluster_rbac_binding.value.role.name + } + + dynamic "subjects" { + for_each = try(cluster_rbac_binding.value.subjects, []) + + content { + type = subjects.value.type + name = subjects.value.name + namespace = try(subjects.value.namespace, "") + } + } + } + } + + dynamic "namespaces" { + for_each = try(each.value.namespaces, []) + + content { + name = namespaces.value.name + resource_allocation = { + cpu_cores = try(namespaces.value.resource_allocation.cpu_cores, "") + memory_MiB = try(namespaces.value.resource_allocation.memory_MiB, "") + } + } + } + + #infra profile + cluster_profile { + id = (local.profile_map[format("%s%%%s%%%s", + each.value.profiles.infra.name, + try(each.value.profiles.infra.version, "1.0.0"), + try(each.value.profiles.infra.context, "project"))].id) + + dynamic "pack" { + for_each = try(each.value.profiles.infra.packs, []) + content { + name = pack.value.name + tag = try(pack.value.version, "") + registry_uid = try(local.all_registry_map[pack.value.registry][0], "") + type = (try(pack.value.is_manifest_pack, false)) ? "manifest" : "spectro" + values = "${(try(pack.value.is_manifest_pack, false)) ? + local.cluster-profile-pack-map[format("%s%%%s%%%s$%s",each.value.profiles.infra.name, try(each.value.profiles.infra.version, "1.0.0"), try(each.value.profiles.infra.context, "project"), pack.value.name)].values : + (pack.value.override_type == "values") ? + pack.value.values : + + (pack.value.override_type == "params" ? + local.infra-pack-params-replaced[format("%s$%s%%%s%%%s$%s", each.value.name, each.value.profiles.infra.name, try(each.value.profiles.infra.version, "1.0.0"), try(each.value.profiles.infra.context, "project"), pack.value.name)] : + local.infra-pack-template-params-replaced[format("%s$%s%%%s%%%s$%s", each.value.name, each.value.profiles.infra.name, try(each.value.profiles.infra.version, "1.0.0"), try(each.value.profiles.infra.context, "project"), pack.value.name)]) + }" + + dynamic "manifest" { + for_each = try([local.infra_pack_manifests[format("%s$%s%%%s%%%s$%s", each.value.name, each.value.profiles.infra.name, try(each.value.profiles.infra.version, "1.0.0"), try(each.value.profiles.infra.context, "project"), pack.value.name)]], []) + content { + name = manifest.value.name + content = manifest.value.content + } + } + } + } + } + + dynamic "cluster_profile" { + for_each = try(each.value.profiles.addons, []) + + content { + id = (local.profile_map[format("%s%%%s%%%s", + cluster_profile.value.name, + try(cluster_profile.value.version, "1.0.0"), + try(cluster_profile.value.context, "project"))].id) + + dynamic "pack" { + for_each = try(cluster_profile.value.packs, []) + content { + name = pack.value.name + tag = try(pack.value.version, "") + registry_uid = try(local.all_registry_map[pack.value.registry][0], "") + type = (try(pack.value.is_manifest_pack, false)) ? "manifest" : "spectro" + values = "${(try(pack.value.is_manifest_pack, false)) ? + local.cluster-profile-pack-map[format("%s%%%s%%%s$%s", cluster_profile.value.name, try(cluster_profile.value.version, "1.0.0"), try(cluster_profile.value.context, "project"), pack.value.name)].values : + (pack.value.override_type == "values") ? + pack.value.values : + (pack.value.override_type == "params" ? + local.addon_pack_params_replaced[format("%s$%s%%%s%%%s$%s", each.value.name, cluster_profile.value.name, try(cluster_profile.value.version, "1.0.0"), try(cluster_profile.value.context, "project"), pack.value.name)] : + local.addon_pack_template_params_replaced[format("%s$%s%%%s%%%s$%s", each.value.name, cluster_profile.value.name, try(cluster_profile.value.version, "1.0.0"), try(cluster_profile.value.context, "project"), pack.value.name)]) + }" + + dynamic "manifest" { + for_each = try(local.addon_pack_manifests[format("%s$%s%%%s%%%s$%s", each.value.name, cluster_profile.value.name, try(cluster_profile.value.version, "1.0.0"), try(cluster_profile.value.context, "project"), pack.value.name)], []) + content { + name = manifest.value.name + content = manifest.value.content + } + } + } + } + } + } + + dynamic "machine_pool" { + for_each = each.value.node_groups + content { + name = machine_pool.value.name + control_plane = try(machine_pool.value.control_plane, false) + control_plane_as_worker = try(machine_pool.value.control_plane_as_worker, false) + update_strategy = try(machine_pool.value.update_strategy, "RollingUpdateScaleOut") + + additional_labels = try(machine_pool.value.additional_labels, tomap({})) + host_uids = machine_pool.value.host_uids + + dynamic "taints" { + for_each = try(machine_pool.value.taints, []) + + content { + key = taints.value.key + value = taints.value.value + effect = taints.value.effect + } + } + } + } + + dynamic "backup_policy" { + for_each = try(tolist([each.value.backup_policy]), []) + content { + schedule = backup_policy.value.schedule + backup_location_id = local.bsl_map[backup_policy.value.backup_location] + prefix = backup_policy.value.prefix + expiry_in_hour = 7200 + include_disks = true + include_cluster_resources = true + } + } + + dynamic "scan_policy" { + for_each = try(tolist([each.value.scan_policy]), []) + content { + configuration_scan_schedule = scan_policy.value.configuration_scan_schedule + penetration_scan_schedule = scan_policy.value.penetration_scan_schedule + conformance_scan_schedule = scan_policy.value.conformance_scan_schedule + } + } + + timeouts { + create = try(each.value.timeouts.create, "60m") + delete = try(each.value.timeouts.delete, "60m") + } +}