Skip to content

Commit

Permalink
[kube-prometheus-stack] hack: fix etcd mixin (#3880)
Browse files Browse the repository at this point in the history
* [kube-prometheus-stack] hack: fix etcd mixin

Signed-off-by: Jan-Otto Kröpke <[email protected]>

* [kube-prometheus-stack] hack: fix etcd mixin

Signed-off-by: Jan-Otto Kröpke <[email protected]>

* fix imports

Signed-off-by: Jan-Otto Kröpke <[email protected]>

---------

Signed-off-by: Jan-Otto Kröpke <[email protected]>
  • Loading branch information
jkroepke authored Oct 12, 2023
1 parent 7ee69a6 commit bdfef97
Show file tree
Hide file tree
Showing 8 changed files with 123 additions and 36 deletions.
1 change: 1 addition & 0 deletions charts/kube-prometheus-stack/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ charts/*
!charts/crds/
!charts/crds/**
Chart.lock
hack/*.git
2 changes: 1 addition & 1 deletion charts/kube-prometheus-stack/Chart.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ name: kube-prometheus-stack
sources:
- https://github.com/prometheus-community/helm-charts
- https://github.com/prometheus-operator/kube-prometheus
version: 51.5.3
version: 51.6.0
appVersion: v0.68.0
kubeVersion: ">=1.19.0-0"
home: https://github.com/prometheus-operator/kube-prometheus
Expand Down
65 changes: 53 additions & 12 deletions charts/kube-prometheus-stack/hack/sync_grafana_dashboards.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
#!/usr/bin/env python3
"""Fetch dashboards from provided urls into this chart."""
import json
import os
import re
import shutil
import subprocess
import textwrap
from os import makedirs, path

import _jsonnet
import requests
Expand Down Expand Up @@ -35,10 +37,12 @@ def new_representer(dumper, data):
'multicluster_key': '.Values.grafana.sidecar.dashboards.multicluster.global.enabled',
},
{
'source': 'https://raw.githubusercontent.com/etcd-io/etcd/main/contrib/mixin/mixin.libsonnet',
'git': 'https://github.com/etcd-io/etcd.git',
'source': 'contrib/mixin/mixin.libsonnet',
'destination': '../templates/grafana/dashboards-1.14',
'type': 'jsonnet_mixin',
'min_kubernetes': '1.14.0-0',
'type': 'jsonnet_mixin',
'mixin_vars': {'_config+': {}},
'multicluster_key': '(or .Values.grafana.sidecar.dashboards.multicluster.global.enabled .Values.grafana.sidecar.dashboards.multicluster.etcd.enabled)'
},
]
Expand Down Expand Up @@ -124,6 +128,18 @@ def patch_json_set_timezone_as_variable(content):
return re.sub(r'"timezone"\s*:\s*"(?:\\.|[^\"])*"', '"timezone": "`}}{{ .Values.grafana.defaultDashboardsTimezone }}{{`"', content, flags=re.IGNORECASE)


def jsonnet_import_callback(base, rel):
if "github.com" in base:
base = os.getcwd() + '/vendor/' + base[base.find('github.com'):]
elif "github.com" in rel:
base = os.getcwd() + '/vendor/'

if os.path.isfile(base + rel):
return base + rel, open(base + rel).read().encode('utf-8')

raise RuntimeError('File not found')


def write_group_to_file(resource_name, content, url, destination, min_kubernetes, max_kubernetes, multicluster_key):
# initialize header
lines = header % {
Expand All @@ -148,7 +164,7 @@ def write_group_to_file(resource_name, content, url, destination, min_kubernetes
new_filename = "%s/%s" % (destination, filename)

# make sure directories to store the file exist
makedirs(destination, exist_ok=True)
os.makedirs(destination, exist_ok=True)

# recreate the file
with open(new_filename, 'w') as f:
Expand All @@ -161,12 +177,33 @@ def main():
init_yaml_styles()
# read the rules, create a new template file per group
for chart in charts:
print("Generating rules from %s" % chart['source'])
response = requests.get(chart['source'])
if response.status_code != 200:
print('Skipping the file, response code %s not equals 200' % response.status_code)
continue
raw_text = response.text
if 'git' in chart:
print("Clone %s" % chart['git'])
checkout_dir = os.path.basename(chart['git'])
shutil.rmtree(checkout_dir, ignore_errors=True)
subprocess.run(["git", "clone", chart['git'], "--branch", "main", "--single-branch", "--depth", "1", checkout_dir])
print("Generating rules from %s" % chart['source'])

mixin_file = os.path.basename(chart['source'])
mixin_dir = checkout_dir + '/' + os.path.dirname(chart['source']) + '/'
if os.path.exists(mixin_dir + "jsonnetfile.json"):
print("Running jsonnet-bundler, because jsonnetfile.json exists")
subprocess.run(["jb", "install"], cwd=mixin_dir)

mixin_vars = json.dumps(chart['mixin_vars'])

cwd = os.getcwd()
os.chdir(mixin_dir)
raw_text = '((import "%s") + %s)' % (mixin_file, mixin_vars)
source = mixin_file
else:
print("Generating rules from %s" % chart['source'])
response = requests.get(chart['source'])
if response.status_code != 200:
print('Skipping the file, response code %s not equals 200' % response.status_code)
continue
raw_text = response.text
source = chart['source']

if ('max_kubernetes' not in chart):
chart['max_kubernetes']="9.9.9-9"
Expand All @@ -178,15 +215,19 @@ def main():
for resource, content in group['data'].items():
write_group_to_file(resource.replace('.json', ''), content, chart['source'], chart['destination'], chart['min_kubernetes'], chart['max_kubernetes'], chart['multicluster_key'])
elif chart['type'] == 'jsonnet_mixin':
json_text = json.loads(_jsonnet.evaluate_snippet(chart['source'], raw_text + '.grafanaDashboards'))
json_text = json.loads(_jsonnet.evaluate_snippet(source, raw_text + '.grafanaDashboards', import_callback=jsonnet_import_callback))

if 'git' in chart:
os.chdir(cwd)
# is it already a dashboard structure or is it nested (etcd case)?
flat_structure = bool(json_text.get('annotations'))
if flat_structure:
resource = path.basename(chart['source']).replace('.json', '')
resource = os.path.basename(chart['source']).replace('.json', '')
write_group_to_file(resource, json.dumps(json_text, indent=4), chart['source'], chart['destination'], chart['min_kubernetes'], chart['max_kubernetes'], chart['multicluster_key'])
else:
for resource, content in json_text.items():
write_group_to_file(resource.replace('.json', ''), json.dumps(content, indent=4), chart['source'], chart['destination'], chart['min_kubernetes'], chart['max_kubernetes'], chart['multicluster_key'])

print("Finished")


Expand Down
71 changes: 58 additions & 13 deletions charts/kube-prometheus-stack/hack/sync_prometheus_rules.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
#!/usr/bin/env python3
"""Fetch alerting and aggregation rules from provided urls into this chart."""
import json
import os
import re
import shutil
import subprocess
import textwrap
from os import makedirs

import _jsonnet
import requests
Expand Down Expand Up @@ -63,10 +65,12 @@ def new_representer(dumper, data):
'min_kubernetes': '1.14.0-0'
},
{
'source': 'https://raw.githubusercontent.com/etcd-io/etcd/main/contrib/mixin/mixin.libsonnet',
'git': 'https://github.com/etcd-io/etcd.git',
'source': 'contrib/mixin/mixin.libsonnet',
'destination': '../templates/prometheus/rules-1.14',
'min_kubernetes': '1.14.0-0',
'is_mixin': True
'is_mixin': True,
'mixin_vars': {'_config+': {}}
},
]

Expand Down Expand Up @@ -392,7 +396,7 @@ def write_group_to_file(group, url, destination, min_kubernetes, max_kubernetes)
new_filename = "%s/%s" % (destination, filename)

# make sure directories to store the file exist
makedirs(destination, exist_ok=True)
os.makedirs(destination, exist_ok=True)

# recreate the file
with open(new_filename, 'w') as f:
Expand All @@ -416,16 +420,45 @@ def main():
init_yaml_styles()
# read the rules, create a new template file per group
for chart in charts:
print("Generating rules from %s" % chart['source'])
response = requests.get(chart['source'])
if response.status_code != 200:
print('Skipping the file, response code %s not equals 200' % response.status_code)
continue
raw_text = response.text
if chart.get('is_mixin'):
alerts = json.loads(_jsonnet.evaluate_snippet(chart['source'], raw_text + '.prometheusAlerts'))
if 'git' in chart:
print("Clone %s" % chart['git'])
checkout_dir = os.path.basename(chart['git'])
shutil.rmtree(checkout_dir, ignore_errors=True)
subprocess.run(["git", "clone", chart['git'], "--branch", "main", "--single-branch", "--depth", "1", checkout_dir])
print("Generating rules from %s" % chart['source'])

if chart.get('is_mixin'):
mixin_file = os.path.basename(chart['source'])
mixin_dir = checkout_dir + '/' + os.path.dirname(chart['source']) + '/'
if os.path.exists(mixin_dir + "jsonnetfile.json"):
print("Running jsonnet-bundler, because jsonnetfile.json exists")
subprocess.run(["jb", "install"], cwd=mixin_dir)

mixin_vars = json.dumps(chart['mixin_vars'])

print("Generating rules from %s" % mixin_file)
print("Change cwd to %s" % checkout_dir + '/' + os.path.dirname(chart['source']))
cwd = os.getcwd()
os.chdir(mixin_dir)
alerts = json.loads(_jsonnet.evaluate_snippet(mixin_file, '((import "%s") + %s).prometheusAlerts' % (mixin_file, mixin_vars), import_callback=jsonnet_import_callback))
os.chdir(cwd)
else:
with open(checkout_dir + '/' + chart['source'], "r") as f:
raw_text = f.read()

alerts = yaml.full_load(raw_text)

else:
alerts = yaml.full_load(raw_text)
print("Generating rules from %s" % chart['source'])
response = requests.get(chart['source'])
if response.status_code != 200:
print('Skipping the file, response code %s not equals 200' % response.status_code)
continue
raw_text = response.text
if chart.get('is_mixin'):
alerts = json.loads(_jsonnet.evaluate_snippet(chart['source'], raw_text + '.prometheusAlerts'))
else:
alerts = yaml.full_load(raw_text)

if ('max_kubernetes' not in chart):
chart['max_kubernetes']="9.9.9-9"
Expand All @@ -441,5 +474,17 @@ def main():
print("Finished")


def jsonnet_import_callback(base, rel):
if "github.com" in base:
base = os.getcwd() + '/vendor/' + base[base.find('github.com'):]
elif "github.com" in rel:
base = os.getcwd() + '/vendor/'

if os.path.isfile(base + rel):
return base + rel, open(base + rel).read().encode('utf-8')

raise RuntimeError('File not found')


if __name__ == '__main__':
main()

Large diffs are not rendered by default.

Loading

0 comments on commit bdfef97

Please sign in to comment.