Skip to content

Commit

Permalink
feat(dms_flavors): import dms_flavors resource and add unit test and …
Browse files Browse the repository at this point in the history
…document
  • Loading branch information
Zhukun-Huawei committed Oct 17, 2023
1 parent 02543dd commit 1bc0c88
Show file tree
Hide file tree
Showing 4 changed files with 298 additions and 0 deletions.
143 changes: 143 additions & 0 deletions docs/data-sources/dms_kafka_flavors.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
---
subcategory: "Distributed Message Service (DMS)"
---

# flexibleengine_dms_kafka_flavors

Use this data source to get the list of available flavor details within HuaweiCloud.

## Example Usage

### Query the list of kafka flavors for cluster type

```hcl
data "flexibleengine_dms_kafka_flavors" "test" {
type = "cluster"
}
```

### Query the kafka flavor details of the specified ID

```hcl
data "flexibleengine_dms_kafka_flavors" "test" {
flavor_id = "c6.2u4g.cluster"
}
```

### Query list of kafka flavors that available in the availability zone list

```hcl
variable "az1" {}
variable "az2" {}
data "flexibleengine_dms_kafka_flavors" "test" {
availability_zones = [
var.az1,
var.az2,
]
}
```

## Argument Reference

* `region` - (Optional, String) Specifies the region in which to obtain the dms kafka flavors.
If omitted, the provider-level region will be used.

* `flavor_id` - (Optional, String) Specifies the DMS flvaor ID, e.g. **c6.2u4g.cluster**.

* `storage_spec_code` - (Optional, String) Specifies the disk IO encoding.
+ **dms.physical.storage.high.v2**: Type of the disk that uses high I/O.
+ **dms.physical.storage.ultra.v2**: Type of the disk that uses ultra-high I/O.

* `type` - (Optional, String) Specifies flavor type. The valid values are **single** and **cluster**.

* `arch_type` - (Optional, String) Specifies the type of CPU architecture, e.g. **X86**.

* `availability_zones` - (Optional, List) Specifies the list of availability zones with available resources.

* `charging_mode` - (Optional, String) Specifies the flavor billing mode.
The valid valus are **prePaid** and **postPaid**.

## Attribute Reference

In addition to all arguments above, the following attributes are exported:

* `id` - The data source ID.

* `versions` - The supported flavor versions.

* `flavors` - The list of flavor details.
The [object](#dms_kafka_flavors) structure is documented below.

<a name="dms_kafka_flavors"></a>
The `flavors` block supports:

* `id` - The flavor ID.

* `type` - The flavor type.

* `vm_specification` - The underlying VM specification.

* `arch_types` - The list of supported CPU architectures.

* `charging_modes` - The list of supported billing modes.

* `ios` - The list of supported disk IO types.
The [object](#dms_kafka_flavor_ios) structure is documented below.

* `support_features` - The list of features supported by the current specification.
The [object](#dms_kafka_flavor_support_features) structure is documented below.

* `properties` - The properties of the current specification.
The [object](#dms_kafka_flavor_properties) structure is documented below.

<a name="dms_kafka_flavor_ios"></a>
The `ios` block supports:

* `storage_spec_code` - The disk IO encoding.

* `type` - The disk type.

* `availability_zones` - The list of availability zones with available resources.

* `unavailability_zones` - The list of unavailability zones with available resources.

<a name="dms_kafka_flavor_support_features"></a>
The `support_features` block supports:

* `name` - The function name, e.g. **connector_obs**.

* `properties` - The function property details.
The [object](#dms_kafka_flavor_support_feature_properties) structure is documented below.

<a name="dms_kafka_flavor_support_feature_properties"></a>
The `properties` block supports:

* `max_task` - The maximum number of tasks for the dump function.

* `min_task` - The minimum number of tasks for the dump function.

* `max_node` - The maximum number of nodes for the dump function.

* `min_node` - The minimum number of nodes for the dump function.

<a name="dms_kafka_flavor_properties"></a>
The `properties` block supports:

* `max_broker` - The maximum number of brokers.

* `min_broker` - The minimum number of brokers.

* `max_bandwidth_per_broker` - The maximum bandwidth per broker.

* `max_consumer_per_broker` - The maximum number of consumers per broker.

* `max_partition_per_broker` - The maximum number of partitions per broker.

* `max_tps_per_broker` - The maximum TPS per broker.

* `max_storage_per_node` - The maximum storage per node. The unit is GB.

* `min_storage_per_node` - The minimum storage per node. The unit is GB.

* `flavor_alias` - The flavor ID alias.
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
package acceptance

import (
"regexp"
"testing"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"

"github.com/huaweicloud/terraform-provider-huaweicloud/huaweicloud/services/acceptance"
)

func TestAccKafkaFlavorsDataSource_basic(t *testing.T) {
dataSourceName := "data.flexibleengine_dms_kafka_flavors.test"
dc := acceptance.InitDataSourceCheck(dataSourceName)

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
ProviderFactories: TestAccProviderFactories,
Steps: []resource.TestStep{
{
Config: testAccKafkaFlavorsDataSource_basic,
Check: resource.ComposeTestCheckFunc(
dc.CheckResourceExists(),
resource.TestMatchResourceAttr(dataSourceName, "versions.#", regexp.MustCompile(`[1-9]\d*`)),
resource.TestMatchResourceAttr(dataSourceName, "flavors.#", regexp.MustCompile(`[1-9]\d*`)),
resource.TestCheckOutput("type_validation", "true"),
resource.TestCheckOutput("arch_types_validation", "true"),
resource.TestCheckOutput("charging_modes_validation", "true"),
resource.TestCheckOutput("storage_spec_code_validation", "true"),
resource.TestCheckOutput("availability_zones_validation", "true"),
),
},
},
})
}

const testAccKafkaFlavorsDataSource_basic = `
data "flexibleengine_dms_kafka_flavors" "baisc" {
type = "cluster"
}
data "flexibleengine_dms_kafka_flavors" "test" {
type = local.test_refer.type
arch_type = local.test_refer.arch_types[0]
charging_mode = local.test_refer.charging_modes[0]
storage_spec_code = local.test_refer.ios[0].storage_spec_code
availability_zones = local.test_refer.ios[0].availability_zones
}
locals {
test_refer = data.flexibleengine_dms_kafka_flavors.baisc.flavors[0]
test_results = data.flexibleengine_dms_kafka_flavors.test
}
output "type_validation" {
value = contains(local.test_results.flavors[*].type, local.test_refer.type)
}
output "arch_types_validation" {
value = !contains([for a in local.test_results.flavors[*].arch_types : contains(a, local.test_refer.arch_types[0])], false)
}
output "charging_modes_validation" {
value = !contains([for c in local.test_results.flavors[*].charging_modes : contains(c, local.test_refer.charging_modes[0])], false)
}
output "storage_spec_code_validation" {
value = !contains([for ios in local.test_results.flavors[*].ios : !contains([for io in ios : io.storage_spec_code == local.test_refer.ios[0].storage_spec_code], false)], false)
}
output "availability_zones_validation" {
value = !contains([for ios in local.test_results.flavors[*].ios : !contains([for io in ios : length(setintersection(io.availability_zones, local.test_refer.ios[0].availability_zones)) == length(local.test_refer.ios[0].availability_zones)], false)], false)
}
`
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
package acceptance

import (
"regexp"
"testing"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"

"github.com/huaweicloud/terraform-provider-huaweicloud/huaweicloud/services/acceptance"
)

func TestAccRabbitMQFlavorsDataSource_basic(t *testing.T) {
dataSourceName := "data.flexibleengine_dms_rabbitmq_flavors.test"
dc := acceptance.InitDataSourceCheck(dataSourceName)

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
ProviderFactories: TestAccProviderFactories,
Steps: []resource.TestStep{
{
Config: testAccDatasourceDmsRabbitMQFlavors_basic(),
Check: resource.ComposeTestCheckFunc(
dc.CheckResourceExists(),
resource.TestMatchResourceAttr(dataSourceName, "versions.#", regexp.MustCompile(`[1-9]\d*`)),
resource.TestMatchResourceAttr(dataSourceName, "flavors.#", regexp.MustCompile(`[1-9]\d*`)),
resource.TestCheckOutput("type_validation", "true"),
resource.TestCheckOutput("arch_types_validation", "true"),
resource.TestCheckOutput("charging_modes_validation", "true"),
resource.TestCheckOutput("storage_spec_code_validation", "true"),
resource.TestCheckOutput("availability_zones_validation", "true"),
),
},
},
})
}

func testAccDatasourceDmsRabbitMQFlavors_basic() string {
return `
data "flexibleengine_dms_rabbitmq_flavors" "baisc" {
type = "cluster"
}
data "flexibleengine_dms_rabbitmq_flavors" "test" {
type = local.test_refer.type
arch_type = local.test_refer.arch_types[0]
charging_mode = local.test_refer.charging_modes[0]
storage_spec_code = local.test_refer.ios[0].storage_spec_code
availability_zones = local.test_refer.ios[0].availability_zones
}
locals {
test_refer = data.flexibleengine_dms_rabbitmq_flavors.baisc.flavors[0]
test_results = data.flexibleengine_dms_rabbitmq_flavors.test
}
output "type_validation" {
value = contains(local.test_results.flavors[*].type, local.test_refer.type)
}
output "arch_types_validation" {
value = alltrue([for a in local.test_results.flavors[*].arch_types : contains(a, local.test_refer.arch_types[0])])
}
output "charging_modes_validation" {
value = alltrue([for c in local.test_results.flavors[*].charging_modes : contains(c, local.test_refer.charging_modes[0])])
}
output "storage_spec_code_validation" {
value = alltrue([for ios in local.test_results.flavors[*].ios :
alltrue([for io in ios : io.storage_spec_code == local.test_refer.ios[0].storage_spec_code])])
}
output "availability_zones_validation" {
value = alltrue([for ios in local.test_results.flavors[*].ios :
alltrue([for io in ios : length(setintersection(io.availability_zones,
local.test_refer.ios[0].availability_zones))== length(local.test_refer.ios[0].availability_zones)])])
}
`
}
2 changes: 2 additions & 0 deletions flexibleengine/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -293,9 +293,11 @@ func Provider() *schema.Provider {
"flexibleengine_ddm_schemas": ddm.DataSourceDdmSchemas(),
"flexibleengine_ddm_accounts": ddm.DataSourceDdmAccounts(),

"flexibleengine_dms_kafka_flavors": dms.DataSourceKafkaFlavors(),
"flexibleengine_dms_kafka_instances": dms.DataSourceDmsKafkaInstances(),
"flexibleengine_dms_rocketmq_broker": dms.DataSourceDmsRocketMQBroker(),
"flexibleengine_dms_rocketmq_instances": dms.DataSourceDmsRocketMQInstances(),
"flexibleengine_dms_rabbitmq_flavors": dms.DataSourceRabbitMQFlavors(),

"flexibleengine_dws_flavors": dws.DataSourceDwsFlavors(),
"flexibleengine_elb_certificate": elb.DataSourceELBCertificateV3(),
Expand Down

0 comments on commit 1bc0c88

Please sign in to comment.