Skip to content

Commit

Permalink
build(deps): bump the main group across 1 directory with 4 updates (#…
Browse files Browse the repository at this point in the history
…1882)

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Murad Biashimov <[email protected]>
  • Loading branch information
dependabot[bot] and byashimov authored Nov 1, 2024
1 parent b2dfb18 commit 157fde5
Show file tree
Hide file tree
Showing 51 changed files with 296 additions and 220 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,14 @@ nav_order: 1
## [MAJOR.MINOR.PATCH] - YYYY-MM-DD

- Add support for `autoscaler` service integration
- Add `aiven_opensearch` resource field `opensearch_user_config.azure_migration.include_aliases`: Whether to restore aliases alongside their associated indexes
- Add `aiven_opensearch` datasource field `opensearch_user_config.azure_migration.include_aliases`: Whether to restore aliases alongside their associated indexes
- Add `aiven_opensearch` resource field `opensearch_user_config.gcs_migration.include_aliases`: Whether to restore aliases alongside their associated indexes
- Add `aiven_opensearch` datasource field `opensearch_user_config.gcs_migration.include_aliases`: Whether to restore aliases alongside their associated indexes
- Add `aiven_opensearch` resource field `opensearch_user_config.s3_migration.include_aliases`: Whether to restore aliases alongside their associated indexes
- Add `aiven_opensearch` datasource field `opensearch_user_config.s3_migration.include_aliases`: Whether to restore aliases alongside their associated indexes
- Change `aiven_cassandra` resource field `cassandra_user_config.additional_backup_regions`: remove deprecation
- Change `aiven_cassandra` datasource field `cassandra_user_config.additional_backup_regions`: remove deprecation

## [4.28.0] - 2024-10-21

Expand Down
68 changes: 50 additions & 18 deletions changelog/differ.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,26 +4,28 @@ import (
"encoding/json"
"fmt"
"slices"
"sort"
"strings"

"github.com/ettle/strcase"
"github.com/google/go-cmp/cmp"
"github.com/samber/lo"
)

func diffItems(resourceType ResourceType, was, have *Item) (*Diff, error) {
func diffItems(resourceType RootType, was, have *Item) (*Diff, error) {
// Added or removed
if was == nil || have == nil {
action := ChangeTypeAdd
action := AddDiffAction
if have == nil {
action = ChangeTypeRemove
action = RemoveDiffAction
have = was
}

return &Diff{
Type: action,
ResourceType: resourceType,
Description: removeEnum(have.Description),
Item: have,
Action: action,
RootType: resourceType,
Description: removeEnum(have.Description),
Item: have,
}, nil
}

Expand Down Expand Up @@ -55,7 +57,7 @@ func diffItems(resourceType ResourceType, was, have *Item) (*Diff, error) {
case "deprecated":
entry = "remove deprecation"
if have.Deprecated != "" {
entry = fmt.Sprintf("deprecate: %s", have.Deprecated)
entry = fmt.Sprintf("deprecate: %s", strings.TrimRight(have.Deprecated, ". "))
}
case "beta":
entry = "marked as beta"
Expand All @@ -78,16 +80,16 @@ func diffItems(resourceType ResourceType, was, have *Item) (*Diff, error) {
}

return &Diff{
Type: ChangeTypeChange,
ResourceType: resourceType,
Description: strings.Join(entries, ", "),
Item: have,
Action: ChangeDiffAction,
RootType: resourceType,
Description: strings.Join(entries, ", "),
Item: have,
}, nil
}

func diffItemMaps(was, have ItemMap) ([]string, error) {
result := make([]string, 0)
kinds := []ResourceType{ResourceKind, DataSourceKind}
result := make([]*Diff, 0)
kinds := []RootType{ResourceRootType, DataSourceRootType}
for _, kind := range kinds {
wasItems := was[kind]
haveItems := have[kind]
Expand All @@ -105,6 +107,7 @@ func diffItemMaps(was, have ItemMap) ([]string, error) {
seen[k] = true

// When a resource added or removed, it skips all its fields until the next resource
// Otherwise, all its fields will appear as changes
if skipPrefix != "" && strings.HasPrefix(k, skipPrefix) {
continue
}
Expand All @@ -123,11 +126,32 @@ func diffItemMaps(was, have ItemMap) ([]string, error) {
}

if change != nil {
result = append(result, change.String())
result = append(result, change)
}
}
}
return result, nil

// Sorts changes by action, then by root type, then by root name
sort.Slice(result, func(i, j int) bool {
a, b := result[i], result[j]
if a.Action != b.Action {
return a.Action < b.Action
}

if a.Item.Path != b.Item.Path {
return a.Item.Path < b.Item.Path
}

// Resource comes first, then datasource
return a.RootType > b.RootType
})

strs := make([]string, len(result))
for i, r := range result {
strs[i] = r.String()
}

return strs, nil
}

func toMap(item *Item) (map[string]any, error) {
Expand All @@ -145,7 +169,15 @@ func toMap(item *Item) (map[string]any, error) {
m["enum"] = findEnums(item.Description)
m["beta"] = hasBeta(item.Description)
m["type"] = strValueType(item.Type)
m["elemType"] = strValueType(item.ElemType)
delete(m, "description") // Not needed to compare descriptions
m["elementType"] = strValueType(item.ElementType)

// Not needed to compare descriptions
delete(m, "description")

// Turns "maxItems" into "max items" for human readability
for k, v := range m {
delete(m, k)
m[strcase.ToCase(k, strcase.LowerCase, ' ')] = v
}
return m, err
}
26 changes: 13 additions & 13 deletions changelog/differ_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,13 @@ func TestCompare(t *testing.T) {
tests := []struct {
name string
expect string
kind ResourceType
kind RootType
old, new *Item
}{
{
name: "change enums",
expect: "Change resource `foo` field `bar`: enum ~~`bar`, `baz`~~ -> `foo`, `baz`",
kind: ResourceKind,
expect: "Change `foo` resource field `bar`: enum ~~`bar`, `baz`~~ -> `foo`, `baz`",
kind: ResourceRootType,
old: &Item{
Type: schema.TypeString,
Path: "foo.bar",
Expand All @@ -31,8 +31,8 @@ func TestCompare(t *testing.T) {
},
{
name: "add resource field",
expect: "Add resource `foo` field `bar`: Foo",
kind: ResourceKind,
expect: "Add `foo` resource field `bar`: Foo",
kind: ResourceRootType,
new: &Item{
Type: schema.TypeString,
Path: "foo.bar",
Expand All @@ -41,8 +41,8 @@ func TestCompare(t *testing.T) {
},
{
name: "remove resource field",
expect: "Remove resource `foo` field `bar`: Foo",
kind: ResourceKind,
expect: "Remove `foo` resource field `bar`: Foo",
kind: ResourceRootType,
old: &Item{
Type: schema.TypeString,
Path: "foo.bar",
Expand All @@ -51,8 +51,8 @@ func TestCompare(t *testing.T) {
},
{
name: "remove beta from the field",
expect: "Change resource `foo` field `bar`: no longer beta",
kind: ResourceKind,
expect: "Change `foo` resource field `bar`: no longer beta",
kind: ResourceRootType,
old: &Item{
Type: schema.TypeString,
Path: "foo.bar",
Expand All @@ -66,8 +66,8 @@ func TestCompare(t *testing.T) {
},
{
name: "add beta resource",
expect: "Add resource `foo` _(beta)_: does stuff, PROVIDER_AIVEN_ENABLE_BETA",
kind: ResourceKind,
expect: "Add `foo` resource _(beta)_: does stuff, PROVIDER_AIVEN_ENABLE_BETA",
kind: ResourceRootType,
new: &Item{
Type: schema.TypeString,
Path: "foo",
Expand All @@ -76,8 +76,8 @@ func TestCompare(t *testing.T) {
},
{
name: "change type",
expect: "Change resource `foo` field `bar`: type ~~`list`~~ -> `set`",
kind: ResourceKind,
expect: "Change `foo` resource field `bar`: type ~~`list`~~ -> `set`",
kind: ResourceRootType,
old: &Item{
Type: schema.TypeList,
Path: "foo.bar",
Expand Down
8 changes: 4 additions & 4 deletions changelog/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -155,9 +155,9 @@ func writeChangelog(_ string, entries []string) error {
func fromProvider(p *schema.Provider) (ItemMap, error) {
// Item names might clash between resources and data sources
// Splits into separate maps
sourceMaps := map[ResourceType]map[string]*schema.Resource{
ResourceKind: p.ResourcesMap,
DataSourceKind: p.DataSourcesMap,
sourceMaps := map[RootType]map[string]*schema.Resource{
ResourceRootType: p.ResourcesMap,
DataSourceRootType: p.DataSourcesMap,
}

items := make(ItemMap)
Expand Down Expand Up @@ -200,7 +200,7 @@ func walkSchema(name string, this *schema.Schema, parent *Item) []*Item {
// Properties
switch elem := this.Elem.(type) {
case *schema.Schema:
item.ElemType = elem.Type
item.ElementType = elem.Type
case *schema.Resource:
for k, child := range elem.Schema {
items = append(items, walkSchema(k, child, item)...)
Expand Down
41 changes: 22 additions & 19 deletions changelog/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,48 +7,51 @@ import (
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)

type (
ResourceType string
DiffType string
)
type RootType string

const (
ResourceKind ResourceType = "resource"
DataSourceKind ResourceType = "datasource"
ResourceRootType RootType = "resource"
DataSourceRootType RootType = "datasource"
)

type DiffAction string

ChangeTypeAdd DiffType = "Add"
ChangeTypeRemove DiffType = "Remove"
ChangeTypeChange DiffType = "Change"
const (
AddDiffAction DiffAction = "Add"
RemoveDiffAction DiffAction = "Remove"
ChangeDiffAction DiffAction = "Change"
)

type ItemMap map[ResourceType]map[string]*Item
type ItemMap map[RootType]map[string]*Item

type Item struct {
Name string `json:"name"`
Path string `json:"path"`
Path string `json:"path"` // e.g. aiven_project.project
Name string `json:"name"` // e.g. project

// Terraform schema fields
Description string `json:"description"`
ForceNew bool `json:"forceNew"`
Optional bool `json:"optional"`
Sensitive bool `json:"sensitive"`
MaxItems int `json:"maxItems"`
Deprecated string `json:"deprecated"`
Type schema.ValueType `json:"type"`
ElemType schema.ValueType `json:"elemType"`
ElementType schema.ValueType `json:"elementType"`
}

type Diff struct {
Type DiffType
ResourceType ResourceType
Description string
Item *Item
Action DiffAction
RootType RootType
Description string
Item *Item
}

func (c *Diff) String() string {
// resource name + field name
path := strings.SplitN(c.Item.Path, ".", 2)

// e.g.: "Add resource `aiven_project`"
msg := fmt.Sprintf("%s %s `%s`", c.Type, c.ResourceType, path[0])
// e.g.: "Add `aiven_project` resource"
msg := fmt.Sprintf("%s `%s` %s", c.Action, path[0], c.RootType)

// e.g.: "field `project`"
if len(path) > 1 {
Expand Down
2 changes: 1 addition & 1 deletion docs/data-sources/account_team_project.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,4 @@ data "aiven_account_team_project" "account_team_project1" {
### Read-Only

- `id` (String) The ID of this resource.
- `team_type` (String) The Account team project type. The possible values are `admin`, `operator`, `developer`, `read_only`, `project:integrations:read`, `project:networking:read`, `project:permissions:read`, `service:logs:read`, `project:services:read` and `project:audit_logs:read`.
- `team_type` (String) The Account team project type. The possible values are `admin`, `operator`, `developer`, `read_only`, `project:integrations:read`, `project:integrations:write`, `project:networking:read`, `project:networking:write`, `project:permissions:read`, `service:configuration:write`, `services:maintenance`, `service:logs:read`, `project:services:read` and `project:audit_logs:read`.
3 changes: 3 additions & 0 deletions docs/data-sources/opensearch.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ Read-Only:
- `compress` (Boolean)
- `container` (String)
- `endpoint_suffix` (String)
- `include_aliases` (Boolean)
- `indices` (String)
- `key` (String)
- `restore_global_state` (Boolean)
Expand All @@ -145,6 +146,7 @@ Read-Only:
- `chunk_size` (String)
- `compress` (Boolean)
- `credentials` (String)
- `include_aliases` (Boolean)
- `indices` (String)
- `restore_global_state` (Boolean)
- `snapshot_name` (String)
Expand Down Expand Up @@ -449,6 +451,7 @@ Read-Only:
- `chunk_size` (String)
- `compress` (Boolean)
- `endpoint` (String)
- `include_aliases` (Boolean)
- `indices` (String)
- `region` (String)
- `restore_global_state` (Boolean)
Expand Down
2 changes: 1 addition & 1 deletion docs/data-sources/project_user.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,4 +31,4 @@ data "aiven_project_user" "mytestuser" {

- `accepted` (Boolean) Whether the user has accepted the request to join the project. Users get an invite and become project members after accepting the invite.
- `id` (String) The ID of this resource.
- `member_type` (String) Project membership type. The possible values are `admin`, `developer`, `operator`, `project:audit_logs:read`, `project:integrations:read`, `project:networking:read`, `project:permissions:read`, `project:services:read`, `read_only` and `service:logs:read`.
- `member_type` (String) Project membership type. The possible values are `admin`, `developer`, `operator`, `project:audit_logs:read`, `project:integrations:read`, `project:integrations:write`, `project:networking:read`, `project:networking:write`, `project:permissions:read`, `project:services:read`, `read_only`, `service:configuration:write`, `service:logs:read` and `services:maintenance`.
2 changes: 1 addition & 1 deletion docs/resources/account_team_project.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ resource "aiven_account_team_project" "main" {
### Optional

- `project_name` (String) The name of an already existing project
- `team_type` (String) The Account team project type. The possible values are `admin`, `operator`, `developer`, `read_only`, `project:integrations:read`, `project:networking:read`, `project:permissions:read`, `service:logs:read`, `project:services:read` and `project:audit_logs:read`.
- `team_type` (String) The Account team project type. The possible values are `admin`, `operator`, `developer`, `read_only`, `project:integrations:read`, `project:integrations:write`, `project:networking:read`, `project:networking:write`, `project:permissions:read`, `service:configuration:write`, `services:maintenance`, `service:logs:read`, `project:services:read` and `project:audit_logs:read`.
- `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts))

### Read-Only
Expand Down
2 changes: 1 addition & 1 deletion docs/resources/cassandra.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ Optional:

Optional:

- `additional_backup_regions` (List of String, Deprecated) Additional Cloud Regions for Backup Replication.
- `additional_backup_regions` (List of String) Additional Cloud Regions for Backup Replication.
- `backup_hour` (Number) The hour of day (in UTC) when backup for the service is started. New backup is only started if previous backup has already completed. Example: `3`.
- `backup_minute` (Number) The minute of an hour when backup for the service is started. New backup is only started if previous backup has already completed. Example: `30`.
- `cassandra` (Block List, Max: 1) Cassandra configuration values (see [below for nested schema](#nestedblock--cassandra_user_config--cassandra))
Expand Down
2 changes: 1 addition & 1 deletion docs/resources/dragonfly.md
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ Read-Only:
Optional:

- `cache_mode` (Boolean) Evict entries when getting close to maxmemory limit. Default: `false`.
- `dragonfly_persistence` (String) Enum: `off`, `rdb`, `dfs`. When persistence is `rdb` or `dfs`, Dragonfly does RDB or DFS dumps every 10 minutes. Dumps are done according to the backup schedule for backup purposes. When persistence is `off`, no RDB/DFS dumps or backups are done, so data can be lost at any moment if the service is restarted for any reason, or if the service is powered off. Also, the service can't be forked.
- `dragonfly_persistence` (String) Enum: `dfs`, `off`, `rdb`. When persistence is `rdb` or `dfs`, Dragonfly does RDB or DFS dumps every 10 minutes. Dumps are done according to the backup schedule for backup purposes. When persistence is `off`, no RDB/DFS dumps or backups are done, so data can be lost at any moment if the service is restarted for any reason, or if the service is powered off. Also, the service can't be forked.
- `dragonfly_ssl` (Boolean) Require SSL to access Dragonfly. Default: `true`.
- `ip_filter` (Set of String, Deprecated) Allow incoming connections from CIDR address block, e.g. `10.20.0.0/16`.
- `ip_filter_object` (Block Set, Max: 1024) Allow incoming connections from CIDR address block, e.g. `10.20.0.0/16` (see [below for nested schema](#nestedblock--dragonfly_user_config--ip_filter_object))
Expand Down
Loading

0 comments on commit 157fde5

Please sign in to comment.