Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add ckafka datasource #1827

Merged
merged 2 commits into from
May 26, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions .changelog/1827.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
```release-note:new-data-source
tencentcloud_ckafka_datahub_topic
```

```release-note:new-data-source
tencentcloud_ckafka_datahub_group_offsets
```

```release-note:new-data-source
tencentcloud_ckafka_datahub_task
```

```release-note:new-data-source
tencentcloud_ckafka_group
```

```release-note:new-data-source
tencentcloud_ckafka_group_offsets
```
204 changes: 204 additions & 0 deletions tencentcloud/data_source_tc_ckafka_datahub_group_offsets.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,204 @@
/*
Use this data source to query detailed information of ckafka datahub_group_offsets

Example Usage

```hcl
data "tencentcloud_ckafka_datahub_group_offsets" "datahub_group_offsets" {
}
```
*/
package tencentcloud

import (
"context"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
ckafka "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/ckafka/v20190819"
"github.com/tencentcloudstack/terraform-provider-tencentcloud/tencentcloud/internal/helper"
)

func dataSourceTencentCloudCkafkaDatahubGroupOffsets() *schema.Resource {
return &schema.Resource{
Read: dataSourceTencentCloudCkafkaDatahubGroupOffsetsRead,
Schema: map[string]*schema.Schema{
"name": {
Required: true,
Type: schema.TypeString,
Description: "topic name that the task subscribe.",
},

"group": {
Required: true,
Type: schema.TypeString,
Description: "Kafka consumer group.",
},

"search_word": {
Optional: true,
Type: schema.TypeString,
Description: "fuzzy match topicName.",
},

"topic_list": {
Type: schema.TypeList,
Computed: true,
Description: "The topic array, where each element is a json object.",
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"topic": {
Type: schema.TypeString,
Computed: true,
Description: "topic name.",
},
"partitions": {
Type: schema.TypeList,
Computed: true,
Description: "The topic partition array, where each element is a json object.",
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"partition": {
Type: schema.TypeInt,
Computed: true,
Description: "topic partitionId.",
},
"offset": {
Type: schema.TypeInt,
Computed: true,
Description: "consumer offset.",
},
"metadata": {
Type: schema.TypeString,
Computed: true,
Description: "Usually an empty string.",
},
"error_code": {
Type: schema.TypeInt,
Computed: true,
Description: "Error Code.",
},
"log_end_offset": {
Type: schema.TypeInt,
Computed: true,
Description: "partition Log End Offset.",
},
"lag": {
Type: schema.TypeInt,
Computed: true,
Description: "The number of unconsumed messages.",
},
},
},
},
},
},
},

"result_output_file": {
Type: schema.TypeString,
Optional: true,
Description: "Used to save results.",
},
},
}
}

func dataSourceTencentCloudCkafkaDatahubGroupOffsetsRead(d *schema.ResourceData, meta interface{}) error {
defer logElapsed("data_source.tencentcloud_ckafka_datahub_group_offsets.read")()
defer inconsistentCheck(d, meta)()

logId := getLogId(contextNil)

ctx := context.WithValue(context.TODO(), logIdKey, logId)

paramMap := make(map[string]interface{})
if v, ok := d.GetOk("name"); ok {
paramMap["name"] = helper.String(v.(string))
}

if v, ok := d.GetOk("group"); ok {
paramMap["group"] = helper.String(v.(string))
}

if v, ok := d.GetOk("search_word"); ok {
paramMap["search_word"] = helper.String(v.(string))
}

service := CkafkaService{client: meta.(*TencentCloudClient).apiV3Conn}

var result []*ckafka.GroupOffsetTopic

err := resource.Retry(readRetryTimeout, func() *resource.RetryError {
groupOffsetTopics, e := service.DescribeCkafkaDatahubGroupOffsetsByFilter(ctx, paramMap)
if e != nil {
return retryError(e)
}
result = groupOffsetTopics
return nil
})
if err != nil {
return err
}

ids := make([]string, 0, len(result))
topicList := make([]map[string]interface{}, 0, len(result))
for _, topic := range result {
topicMap := make(map[string]interface{})

if topic.Topic != nil {
topicMap["topic"] = topic.Topic
ids = append(ids, *topic.Topic)

}

if topic.Partitions != nil {
partitionsList := make([]map[string]interface{}, 0)
for _, partitions := range topic.Partitions {
partitionsMap := map[string]interface{}{}

if partitions.Partition != nil {
partitionsMap["partition"] = partitions.Partition
}

if partitions.Offset != nil {
partitionsMap["offset"] = partitions.Offset
}

if partitions.Metadata != nil {
partitionsMap["metadata"] = partitions.Metadata
}

if partitions.ErrorCode != nil {
partitionsMap["error_code"] = partitions.ErrorCode
}

if partitions.LogEndOffset != nil {
partitionsMap["log_end_offset"] = partitions.LogEndOffset
}

if partitions.Lag != nil {
partitionsMap["lag"] = partitions.Lag
}

partitionsList = append(partitionsList, partitionsMap)
}

topicMap["partitions"] = partitionsList
}

topicList = append(topicList, topicMap)

}

d.SetId(helper.DataResourceIdsHash(ids))
_ = d.Set("topic_list", topicList)

output, ok := d.GetOk("result_output_file")
if ok && output.(string) != "" {
if e := writeToFile(output.(string), topicList); e != nil {
return e
}
}
return nil
}
29 changes: 29 additions & 0 deletions tencentcloud/data_source_tc_ckafka_datahub_group_offsets_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
package tencentcloud

import (
"testing"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
)

func TestAccTencentCloudCkafkaDatahubGroupOffsetsDataSource_basic(t *testing.T) {
t.Parallel()
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheckCommon(t, ACCOUNT_TYPE_PREPAY) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
Config: testAccCkafkaDatahubGroupOffsetsDataSource,
Check: resource.ComposeTestCheckFunc(testAccCheckTencentCloudDataSourceID("data.tencentcloud_ckafka_datahub_group_offsets.datahub_group_offsets")),
},
},
})
}

const testAccCkafkaDatahubGroupOffsetsDataSource = `

data "tencentcloud_ckafka_datahub_group_offsets" "datahub_group_offsets" {
name = "1308726196-keep-topic"
group = "topic-lejrlafu-test"
}
`
Loading