Closed a-blender closed 1 year ago
@sowmyav27 Please have someone from QA test this back ported bug for Terraform 2.6.11 using this test plan and Terraform v2.0.0-rc4. Waiting for the CI to complete now... if you don't want this delayed you can also test this on Terraform v2.0.0-rc3. It's not ideal to release on Fridays because Hashicorp is not active over the weekend so I will most likely release sometime next Monday, thank you for being patient as we hammer out TF release processes. Reach out to me with any questions.
With Docker on a single-node instance using Rancher v2.6.11
:
Reproduced with rancher2 provider v1.25.0
:
v2.6.11
Screenshot:
Verified with rancher2 provider v2.0.0-rc4
:
v2.6.11
Screenshots:
Main.tf file used for testing:
terraform {
required_providers {
rancher2 = {
source = "terraform.local/local/rancher2"
version = "2.0.0-rc4"
}
}
}
provider "rancher2" {
api_url = "<REDACTED>"
token_key = "<REDACTED>"
insecure = true
}
resource "rancher2_cloud_credential" "rancher2_cloud_credential" {
name = "jkeslar-cc"
amazonec2_credential_config {
access_key = "<REDACTED"
secret_key = "<REDACTED>"
}
}
resource "rancher2_machine_config_v2" "rancher2_machine_config_v2" {
generate_name = "tf-rke2"
amazonec2_config {
ami = ""
region = "<REDACTED>"
security_group = ["<REDACTED>"]
subnet_id = "<REDACTED>"
vpc_id = "<REDACTED>"
zone = "<REDACTED>"
}
}
resource "rancher2_cluster_v2" "rancher2_cluster_v2" {
name = "<REDACTED>"
kubernetes_version = "<REDACTED>"
enable_network_policy = false
default_cluster_role_for_project_members = "user"
rke_config {
machine_pools {
name = "pool1"
cloud_credential_secret_name = rancher2_cloud_credential.rancher2_cloud_credential.id
control_plane_role = false
etcd_role = true
worker_role = false
quantity = 1
machine_config {
kind = rancher2_machine_config_v2.rancher2_machine_config_v2.kind
name = rancher2_machine_config_v2.rancher2_machine_config_v2.name
}
}
machine_pools {
name = "pool2"
cloud_credential_secret_name = rancher2_cloud_credential.rancher2_cloud_credential.id
control_plane_role = true
etcd_role = false
worker_role = false
quantity = 1
machine_config {
kind = rancher2_machine_config_v2.rancher2_machine_config_v2.kind
name = rancher2_machine_config_v2.rancher2_machine_config_v2.name
}
}
machine_pools {
name = "pool3"
cloud_credential_secret_name = rancher2_cloud_credential.rancher2_cloud_credential.id
control_plane_role = false
etcd_role = false
worker_role = true
quantity = 1
machine_config {
kind = rancher2_machine_config_v2.rancher2_machine_config_v2.kind
name = rancher2_machine_config_v2.rancher2_machine_config_v2.name
}
}
}
}
When terraforming a RKE2 cluster i receive the following: │ Error: Provider produced inconsistent final plan │ │ When expanding the plan for rancher2_cluster_v2.test-cluster to include new values learned so far during apply, provider │ "registry.terraform.io/rancher/rancher2" produced an invalid new value for │ .rke_config[0].machine_pools[1].cloud_credential_secret_name: was cty.StringVal(""), but now │ cty.StringVal("cattle-global-data:cc-trrz8"). │ │ This is a bug in the provider, which should be reported in the provider's own issue tracker.
This happends in version 1.22.1 not in 1.21.0 of the provider. /Anders.
SURE-5412 SURE-4866