Skip to content

Commit

Permalink
feat(release): CRD changelog generator
Browse files Browse the repository at this point in the history
  • Loading branch information
byashimov committed Nov 14, 2023
1 parent 17ad1e9 commit e204d19
Show file tree
Hide file tree
Showing 7 changed files with 398 additions and 8 deletions.
35 changes: 35 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,41 @@

- Upgrade to Go 1.21
- Add a format for `string` type fields to the documentation
- Add `Clickhouse` field `userConfig.private_access.clickhouse_mysql`, type `boolean`: Allow clients
to connect to clickhouse_mysql with a DNS name that always resolves to the service's private IP addresses
- Add `Clickhouse` field `userConfig.privatelink_access.clickhouse_mysql`, type `boolean`: Enable clickhouse_mysql
- Add `Clickhouse` field `userConfig.public_access.clickhouse_mysql`, type `boolean`: Allow clients to
connect to clickhouse_mysql from the public internet for service nodes that are in a project VPC
or another type of private network
- Add `Grafana` field `userConfig.unified_alerting_enabled`, type `boolean`: Enable or disable Grafana
unified alerting functionality
- Add `Kafka` field `userConfig.kafka.sasl_oauthbearer_expected_issuer`, type `string`: Optional setting
for the broker to use to verify that the JWT was created by the expected issuer
- Add `Kafka` field `userConfig.kafka.sasl_oauthbearer_expected_audience`, type `string`: The (optional)
comma-delimited setting for the broker to use to verify that the JWT was issued for one of the
expected audiences
- Add `Kafka` field `userConfig.kafka.sasl_oauthbearer_jwks_endpoint_url`, type `string`: OIDC JWKS endpoint
URL. By setting this the SASL SSL OAuth2/OIDC authentication is enabled
- Add `Kafka` field `userConfig.kafka.sasl_oauthbearer_sub_claim_name`, type `string`: Name of the scope
from which to extract the subject claim from the JWT. Defaults to sub
- Add `Kafka` field `userConfig.aiven_kafka_topic_messages`, type `boolean`: Allow access to read Kafka
topic messages in the Aiven Console and REST API
- Change `Kafka` field `userConfig.kafka_version`: enum has: [`3.3`, `3.1`, `3.4`, `3.5`, `3.6`], was:
[`3.3`, `3.1`, `3.4`, `3.5`]
- Change `Kafka` field `userConfig.tiered_storage.local_cache.size`: deprecated
- Add `OpenSearch` field `userConfig.opensearch.indices_memory_min_index_buffer_size`, type `integer`:
Absolute value. Default is 48mb. Doesn't work without indices.memory.index_buffer_size
- Add `OpenSearch` field `userConfig.opensearch.indices_memory_max_index_buffer_size`, type `integer`:
Absolute value. Default is unbound. Doesn't work without indices.memory.index_buffer_size
- Change `OpenSearch` field `userConfig.opensearch.search_max_buckets`: maximum has `1000000`, was `65536`
- Change `OpenSearch` field `userConfig.opensearch.auth_failure_listeners.internal_authentication_backend_limiting.type`:
enum has: [`username`]
- Change `OpenSearch` field `userConfig.opensearch.auth_failure_listeners.internal_authentication_backend_limiting.authentication_backend`:
enum has: [`internal`]
- Change `OpenSearch` field `userConfig.opensearch.auth_failure_listeners.ip_rate_limiting.type`: enum
has: [`ip`]
- Change `ServiceIntegration` field `kafkaMirrormaker.kafka_mirrormaker.producer_max_request_size`: maximum
has `268435456`, was `67108864`

## v0.14.0 - 2023-09-21

Expand Down
3 changes: 2 additions & 1 deletion docs/docs/contributing/resource-generation.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@ Here how it goes in the details:
2. generates full spec reference out of the schema
3. creates a markdown file with spec and example (if exists)
4. Charts generator
updates CRDs, webhooks and cluster roles charts
updates CRDs, webhooks and cluster roles charts,
adds all changes to the changelog

[go-api-schemas]: https://github.com/aiven/go-api-schemas
[service-types]: https://api.aiven.io/doc/#tag/Service/operation/ListPublicServiceTypes
Expand Down
331 changes: 331 additions & 0 deletions generators/charts/changelog.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,331 @@
package main

import (
"fmt"
"os"
"path"
"sort"
"strconv"
"strings"

"github.com/google/go-cmp/cmp"
"golang.org/x/exp/slices"
"gopkg.in/yaml.v3"
)

const lineWidth = 100

func loadSchema(b []byte) (*schema, error) {
crd := new(crdType)
err := yaml.Unmarshal(b, crd)
if err != nil {
return nil, err
}

if crd.Spec.Versions == nil {
return nil, fmt.Errorf("empty schema for kind %s", crd.Spec.Names.Kind)
}

s := crd.Spec.Versions[0].Schema.OpenAPIV3Schema.Properties["spec"]
s.Kind = crd.Spec.Names.Kind
return s, nil
}

func genChangelog(oldBytes, newBytes []byte) ([]string, error) {
oldSchema, err := loadSchema(oldBytes)
if err != nil {
return nil, err
}

newSchema, err := loadSchema(newBytes)
if err != nil {
return nil, err
}

changes := cmpSchema(newSchema.Kind, "", oldSchema, newSchema)
sort.Slice(changes, func(i, j int) bool {
return changes[i][0] < changes[j][0]
})

return changes, nil
}

func cmpSchema(kind, parent string, oldSpec, newSpec *schema) []string {
if cmp.Equal(oldSpec, newSpec) {
return nil
}

keys := make(map[string]bool)
for k := range oldSpec.Properties {
keys[k] = true
}
for k := range newSpec.Properties {
keys[k] = true
}

changes := make([]string, 0)
for k := range keys {
ov, oOk := oldSpec.Properties[k]
nv, nOk := newSpec.Properties[k]

fieldPath := k
if parent != "" {
fieldPath = fmt.Sprintf("%s.%s", parent, k)
}

switch {
case !nOk:
changes = append(changes, fmt.Sprintf("Remove `%s` field `%s`, type `%s`: %s", kind, fieldPath, ov.Type, shortDescription(ov.Description)))
case !oOk:
changes = append(changes, fmt.Sprintf("Add `%s` field `%s`, type `%s`: %s", kind, fieldPath, nv.Type, shortDescription(nv.Description)))
case !cmp.Equal(ov, nv):
switch ov.Type {
case "object":
changes = append(changes, cmpSchema(kind, fieldPath, ov, nv)...)
default:
diff := diffSchema(ov, nv)
if diff != "" {
changes = append(changes, fmt.Sprintf("Change `%s` field `%s`: %s", kind, fieldPath, diff))
}
}
}
}
return changes
}

func diffSchema(old, new *schema) string {
if !cmp.Equal(old.Enum, new.Enum) {
s := fmt.Sprintf("enum has: [%s]", fmtSlice(new.Enum))
if old.Enum != nil {
s = fmt.Sprintf("%s, was: [%s]", s, fmtSlice(old.Enum))
}
return s
}
if !cmp.Equal(old.Pattern, new.Pattern) {
s := fmt.Sprintf("pattern has `%s`", new.Pattern)
if old.Pattern != "" {
s = fmt.Sprintf("%s, was `%s`", s, old.Pattern)
}
return s
}
if !cmp.Equal(old.MinItems, new.MinItems) {
s := fmt.Sprintf("min_items has `%d`", new.MinItems)
if old.MinItems != nil {
s = fmt.Sprintf("%s, was `%d`", s, old.MinItems)
}
return s
}
if !cmp.Equal(old.MaxItems, new.MaxItems) {
s := fmt.Sprintf("max_items has `%d`", new.MaxItems)
if old.MaxItems != nil {
s = fmt.Sprintf("%s, was `%d`", s, old.MaxItems)
}
return s
}
if !cmp.Equal(old.MinLength, new.MinLength) {
s := fmt.Sprintf("min_length has `%d`", new.MinLength)
if old.MinLength != nil {
s = fmt.Sprintf("%s, was `%d`", s, old.MinLength)
}
return s
}
if !cmp.Equal(old.MaxLength, new.MaxLength) {
s := fmt.Sprintf("max_length has `%d", new.MaxLength)
if old.MaxLength != nil {
s = fmt.Sprintf("%s, was `%d`", s, old.MaxLength)
}
return s
}
if !cmp.Equal(old.Minimum, new.Minimum) {
s := fmt.Sprintf("minimum has `%s`", fmtNumber(new.Minimum))
if old.Minimum != nil {
s = fmt.Sprintf("%s, was `%s`", s, fmtNumber(old.Minimum))
}
return s
}
if !cmp.Equal(old.Maximum, new.Maximum) {
s := fmt.Sprintf("maximum has `%s`", fmtNumber(new.Maximum))
if old.Maximum != nil {
s = fmt.Sprintf("%s, was `%s`", s, fmtNumber(old.Maximum))
}
return s
}
if !isDeprecated(old.Description) && isDeprecated(new.Description) {
return "deprecated"
}
return ""
}

type crdType struct {
Spec struct {
Names struct {
Kind string `yaml:"kind"`
} `yaml:"names"`
Versions []struct {
Schema struct {
OpenAPIV3Schema *schema `yaml:"openAPIV3Schema"`
} `yaml:"schema"`
} `yaml:"versions"`
} `yaml:"spec"`
}

type schema struct {
Kind string `yaml:"-"`
Properties map[string]*schema `yaml:"properties"`
Type string `yaml:"type"`
Description string `yaml:"description"`
Enum []any `yaml:"enum"`
Pattern string `yaml:"pattern"`
MinItems *int `yaml:"min_items"`
MaxItems *int `yaml:"max_items"`
MinLength *int `yaml:"min_length"`
MaxLength *int `yaml:"max_length"`
Minimum *float64 `yaml:"minimum"`
Maximum *float64 `yaml:"maximum"`
}

func fmtSlice(src []any) string {
result := make([]string, len(src))
for i, v := range src {
result[i] = fmt.Sprintf("`%v`", v)
}
return strings.Join(result, ", ")
}

func fmtNumber(v any) string {
d, ok := v.(*int)
if ok {
return fmt.Sprintf("%d", d)
}

f, ok := v.(*float64)
if ok && f != nil {
return strconv.FormatFloat(*f, 'f', -1, 64)
}
return ""
}

func isDeprecated(s string) bool {
return strings.HasPrefix(strings.ToLower(s), "deprecated")
}

func shortDescription(s string) string {
chunks := strings.Split(s, ". ")
description := chunks[0]
for i := 1; i < len(chunks); i++ {
d := fmt.Sprintf("%s. %s", description, chunks[i])
if len(d) > lineWidth {
println(len(d), chunks[i])
break
}
description = d
}
return strings.TrimSuffix(description, ".")
}

func wrapLineAt(src, sep string, n int) string {
if int(float64(n)*1.1) > len(src) {
return src
}

line := 1 // line number
for i := 0; i < len(src); {
s := src[i]
// 32 ASCII number for space
if i >= n*line && i%n >= 0 && s == 32 {
src = fmt.Sprintf("%s%s%s", src[:i], sep, src[i+1:])
i += len(sep)
line++
continue
}
i++
}
return src
}

func addChanges(body []byte, changes []string) string {
lines := strings.Split(string(body), "\n")

i := 0
found := false
for ; i < len(lines); i++ {
if strings.HasPrefix(lines[i], "-") {
found = true
continue
}

if found && lines[i] == "" {
break
}
}

items := make([]string, 0)
items = append(items, lines[:i]...)
for _, s := range changes {
items = append(items, wrapLineAt("- "+s, "\n ", lineWidth))
}

items = append(items, lines[i:]...)
return strings.Join(items, "\n")
}

func updateChangelog(operatorPath, crdCharts string) (func() error, error) {
crdDir := path.Join(crdCharts, crdTemplates)
oldFiles, err := readFiles(crdDir)
if err != nil {
return nil, err
}

return func() error {
newFiles, err := readFiles(crdDir)
if err != nil {
return err
}

keys := make([]string, 0, len(newFiles))
for k := range newFiles {
keys = append(keys, k)
}
slices.Sort(keys)

changes := make([]string, 0)
for _, k := range keys {
kindChanges, err := genChangelog(oldFiles[k], newFiles[k])
if err != nil {
return err
}

changes = append(changes, kindChanges...)
}

changelogPath := path.Join(operatorPath, changelogFile)
changelogBody, err := os.ReadFile(changelogPath)
if err != nil {
return err
}

changelogUpdated := addChanges(changelogBody, changes)
return os.WriteFile(changelogPath, []byte(changelogUpdated), 0644)
}, nil
}

func readFiles(p string) (map[string][]byte, error) {
files, err := os.ReadDir(p)
if err != nil {
return nil, err
}

result := make(map[string][]byte)
for _, file := range files {
if file.IsDir() {
continue
}
b, err := os.ReadFile(path.Join(p, file.Name()))
if err != nil {
return nil, err
}
result[file.Name()] = b
}

return result, nil
}
13 changes: 13 additions & 0 deletions generators/charts/changelog_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
package main

import (
"testing"

"github.com/stretchr/testify/assert"
)

func TestWrapLine(t *testing.T) {
src := "Add `Kafka.userConfig.kafka.sasl_oauthbearer_expected_audience`: The (optional) comma-delimited setting for the broker to use to verify that the JWT was issued for one of the expected audiences."
expect := "Add `Kafka.userConfig.kafka.sasl_oauthbearer_expected_audience`: The (optional) comma-delimited setting\n for the broker to use to verify that the JWT was issued for one of the expected audiences."
assert.Equal(t, expect, wrapLineAt(src, "\n ", 100))
}
Loading

0 comments on commit e204d19

Please sign in to comment.