Skip to content

fix(tke): [119871420] Lift the upper limit of tencentcloud_kubernetes_scale_worker #2850

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Sep 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .changelog/2850.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
resource/tencentcloud_kubernetes_scale_worker: Lift the upper limit of 100
```
14 changes: 8 additions & 6 deletions tencentcloud/services/tke/resource_tc_kubernetes_scale_worker.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,15 @@ variable "scale_instance_type" {
default = "S2.LARGE16"
}

resource tencentcloud_kubernetes_scale_worker test_scale {
cluster_id = "cls-godovr32"
resource "tencentcloud_kubernetes_scale_worker" "example" {
cluster_id = "cls-godovr32"
desired_pod_num = 16

labels = {
"test1" = "test1",
"test2" = "test2",
}

worker_config {
count = 3
availability_zone = var.availability_zone
Expand Down Expand Up @@ -65,14 +67,14 @@ variable "scale_instance_type" {
default = "S2.LARGE16"
}

resource tencentcloud_kubernetes_scale_worker test_scale {
resource "tencentcloud_kubernetes_scale_worker" "example" {
cluster_id = "cls-godovr32"

extra_args = [
"root-dir=/var/lib/kubelet"
"root-dir=/var/lib/kubelet"
]

labels = {
labels = {
"test1" = "test1",
"test2" = "test2",
}
Expand Down Expand Up @@ -106,5 +108,5 @@ Import
tke scale worker can be imported, e.g.

```
$ terraform import tencentcloud_kubernetes_scale_worker.test cls-xxx#ins-xxx
$ terraform import tencentcloud_kubernetes_scale_worker.example cls-mij6c2pq#ins-n6esjkdi,ins-9h3rdxt8,ins-qretqeas
```
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ import (
"strings"
"time"

"github.com/tencentcloudstack/terraform-provider-tencentcloud/tencentcloud/ratelimit"

"github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common/errors"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
Expand Down Expand Up @@ -445,25 +447,47 @@ func resourceTencentCloudKubernetesScaleWorkerCreateOnStart(ctx context.Context)
waitRequest := tke.NewDescribeClusterInstancesRequest()
waitRequest.ClusterId = &clusterId
waitRequest.InstanceIds = helper.Strings(instanceIds)
waitRequest.Offset = helper.Int64(0)
waitRequest.Limit = helper.Int64(100)
err = resource.Retry(tccommon.ReadRetryTimeout*5, func() *resource.RetryError {
result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseTkeClient().DescribeClusterInstances(waitRequest)
if e != nil {
return tccommon.RetryError(e)
} else {
log.Printf("[DEBUG] api[%s] success, request body [%s], response body [%s]\n", waitRequest.GetAction(), waitRequest.ToJsonString(), result.ToJsonString())
var (
offset int64 = 0
limit int64 = 100
tmpInstanceSet []*tke.Instance
)

// get all instances
for {
waitRequest.Limit = &limit
waitRequest.Offset = &offset
ratelimit.Check(waitRequest.GetAction())
result, e := meta.(tccommon.ProviderMeta).GetAPIV3Conn().UseTkeClient().DescribeClusterInstances(waitRequest)
if e != nil {
return tccommon.RetryError(e)
} else {
log.Printf("[DEBUG] api[%s] success, request body [%s], response body [%s]\n", waitRequest.GetAction(), waitRequest.ToJsonString(), result.ToJsonString())
}

if result == nil || len(result.Response.InstanceSet) == 0 {
break
}

tmpInstanceSet = append(tmpInstanceSet, result.Response.InstanceSet...)

if len(result.Response.InstanceSet) < int(limit) {
break
}

offset += limit
}

// check instances status
tmpInstanceSet := result.Response.InstanceSet
if tmpInstanceSet == nil {
if len(tmpInstanceSet) == 0 {
return resource.NonRetryableError(fmt.Errorf("there is no instances in set"))
} else {
var (
stop int
flag bool
)

for _, v := range instanceIds {
for _, instance := range tmpInstanceSet {
if v == *instance.InstanceId {
Expand All @@ -472,7 +496,7 @@ func resourceTencentCloudKubernetesScaleWorkerCreateOnStart(ctx context.Context)
flag = true
} else if *instance.InstanceState == "failed" {
stop += 1
log.Printf("instance:%s status is failed.", v)
log.Printf("instance: %s status is failed.", v)
} else {
continue
}
Expand All @@ -485,8 +509,7 @@ func resourceTencentCloudKubernetesScaleWorkerCreateOnStart(ctx context.Context)
} else if stop == len(instanceIds) && !flag {
return resource.NonRetryableError(fmt.Errorf("The instances being created have all failed."))
} else {
e = fmt.Errorf("cluster instances is still initializing.")
return resource.RetryableError(e)
return resource.RetryableError(fmt.Errorf("cluster instances is still initializing."))
}
}
})
Expand Down
8 changes: 5 additions & 3 deletions website/docs/r/kubernetes_scale_worker.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,15 @@ variable "scale_instance_type" {
default = "S2.LARGE16"
}

resource tencentcloud_kubernetes_scale_worker test_scale {
resource "tencentcloud_kubernetes_scale_worker" "example" {
cluster_id = "cls-godovr32"
desired_pod_num = 16

labels = {
"test1" = "test1",
"test2" = "test2",
}

worker_config {
count = 3
availability_zone = var.availability_zone
Expand Down Expand Up @@ -76,7 +78,7 @@ variable "scale_instance_type" {
default = "S2.LARGE16"
}

resource tencentcloud_kubernetes_scale_worker test_scale {
resource "tencentcloud_kubernetes_scale_worker" "example" {
cluster_id = "cls-godovr32"

extra_args = [
Expand Down Expand Up @@ -206,6 +208,6 @@ In addition to all arguments above, the following attributes are exported:
tke scale worker can be imported, e.g.

```
$ terraform import tencentcloud_kubernetes_scale_worker.test cls-xxx#ins-xxx
$ terraform import tencentcloud_kubernetes_scale_worker.example cls-mij6c2pq#ins-n6esjkdi,ins-9h3rdxt8,ins-qretqeas
```

Loading