Skip to content

Commit

Permalink
serverless test use minimum privileges per case (#15264)
Browse files Browse the repository at this point in the history
This commit splits the privileges per use case and 
removes ilm in template for serverless test
  • Loading branch information
kaisecheng authored Aug 31, 2023
1 parent 6b2fa20 commit 0286f67
Show file tree
Hide file tree
Showing 13 changed files with 97 additions and 46 deletions.
81 changes: 66 additions & 15 deletions ci/serverless/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,15 @@ The test cases against serverless Elasticsearch cover the following scenarios

#### Logstash

Create Logstash API key for test setup/teardown and elastic_integration filter
Plugin user
```
POST /_security/api_key
{
"name": "logstash_user",
"name": "plugin_user",
"expiration": "365d",
"role_descriptors": {
"logstash_user_role": {
"cluster": ["monitor", "manage_index_templates", "manage_logstash_pipelines", "cluster:admin/ingest/pipeline/get", "read_pipeline"],
"plugin_user_role": {
"cluster": ["manage_index_templates", "monitor"],
"indices": [
{
"names": [ "logstash", "logstash-*", "ecs-logstash", "ecs-logstash-*", "serverless*", "logs-*", "metrics-*", "synthetics-*", "traces-*" ],
Expand All @@ -45,6 +45,54 @@ POST /_security/api_key
}
```

Integration User
```
POST /_security/api_key
{
"name": "integration_user",
"expiration": "365d",
"role_descriptors": {
"integration_user_role": {
"cluster": ["manage_index_templates", "read_pipeline", "monitor"]
}
}
}
```

CPM User
```
POST /_security/api_key
{
"name": "cpm_user",
"expiration": "365d",
"role_descriptors": {
"cpm_user_role": {
"cluster": ["manage_logstash_pipelines", "monitor"]
}
}
}
```

Tester
```
POST /_security/api_key
{
"name": "tester_user",
"expiration": "365d",
"role_descriptors": {
"tester_user_role": {
"cluster": ["manage_index_templates", "manage_logstash_pipelines","manage_ingest_pipelines"],
"indices": [
{
"names": [ "logstash", "logstash-*", "ecs-logstash", "ecs-logstash-*", "serverless*", "logs-*", "metrics-*", "synthetics-*", "traces-*", "*test*" ],
"privileges": ["manage", "write", "create_index", "read", "view_index_metadata"]
}
]
}
}
}
```

#### MetricBeat
Grant metricbeat write permission.

Expand Down Expand Up @@ -72,16 +120,19 @@ POST /_security/api_key

The username, password, API key and hosts are stored in `secret/ci/elastic-logstash/serverless-test`.

| Vault field | |
|-------------------------|----------------------------------|
| es_host | Elasticsearch endpoint with port |
| es_superuser | username of superuser |
| es_superuser_pw | password of superuser |
| kb_host | Kibana endpoint with port |
| ls_role_api_key_encoded | base64 of api_key |
| ls_plugin_api_key | id:api_key for Logstash plugins |
| mb_api_key | id:api_key for for beats |
| Vault field | |
|-----------------------------|---------------------------------------------------------|
| es_host | Elasticsearch endpoint with port |
| es_superuser | username of superuser |
| es_superuser_pw | password of superuser |
| kb_host | Kibana endpoint with port |
| mb_api_key | id:api_key for for beats |
| plugin_api_key | id:api_key for es-output/filter/input |
| integration_api_key_encoded | base64 of api_key for elastic integration |
| tester_api_key_encoded | base64 of api_key for the script to update testing data |
| cpm_api_key | id:api_key for central pipeline management |


```bash
vault write secret/ci/elastic-logstash/serverless-test es_host="REDACTED" es_superuser="REDACTED" es_superuser_pw="REDACTED" " kb_host="REDACTED" ls_role_api_key_encoded="REDACTED" ls_plugin_api_key="REDACTED" mb_api_key="REDACTED"
```
vault write secret/ci/elastic-logstash/serverless-test es_host="REDACTED" es_superuser="REDACTED" es_superuser_pw="REDACTED" " kb_host="REDACTED" mb_api_key="REDACTED" plugin_api_key="REDACTED" integration_api_key_encoded="REDACTED" tester_api_key_encoded="REDACTED" cpm_api_key="REDACTED"
```
10 changes: 7 additions & 3 deletions ci/serverless/common.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,14 @@ setup_vault() {
vault_path=secret/ci/elastic-logstash/serverless-test
set +x
export ES_ENDPOINT=$(vault read -field=es_host "${vault_path}")
export ES_USER=$(vault read -field=es_superuser "${vault_path}") # dlq test
export ES_PW=$(vault read -field=es_superuser_pw "${vault_path}")
export KB_ENDPOINT=$(vault read -field=kb_host "${vault_path}")
export LS_ROLE_API_KEY_ENCODED=$(vault read -field=ls_role_api_key_encoded "${vault_path}")
export LS_PLUGIN_API_KEY=$(vault read -field=ls_plugin_api_key "${vault_path}")
export MB_API_KEY=$(vault read -field=mb_api_key "${vault_path}")
export PLUGIN_API_KEY=$(vault read -field=plugin_api_key "${vault_path}")
export INTEGRATION_API_KEY_ENCODED=$(vault read -field=integration_api_key_encoded "${vault_path}")
export TESTER_API_KEY_ENCODED=$(vault read -field=tester_api_key_encoded "${vault_path}")
export CPM_API_KEY=$(vault read -field=cpm_api_key "${vault_path}")
set -x
}

Expand All @@ -24,7 +28,7 @@ build_logstash() {
}

index_test_data() {
curl -X POST -H "Authorization: ApiKey $LS_ROLE_API_KEY_ENCODED" "$ES_ENDPOINT/$INDEX_NAME/_bulk" -H 'Content-Type: application/json' --data-binary @"$CURRENT_DIR/test_data/book.json"
curl -X POST -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/$INDEX_NAME/_bulk" -H 'Content-Type: application/json' --data-binary @"$CURRENT_DIR/test_data/book.json"
}

# $1: check function
Expand Down
9 changes: 2 additions & 7 deletions ci/serverless/config/logstash.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,4 @@
xpack.management.enabled: true
xpack.management.pipeline.id: ["gen_es"]
xpack.management.elasticsearch.api_key: ${LS_PLUGIN_API_KEY}
xpack.management.elasticsearch.hosts: ["${ES_ENDPOINT}"]

# Legacy monitoring is disabled.
#xpack.monitoring.enabled: true
#xpack.monitoring.elasticsearch.api_key: ${LS_PLUGIN_API_KEY}
#xpack.monitoring.elasticsearch.hosts: ["${ES_ENDPOINT}"]
xpack.management.elasticsearch.api_key: ${CPM_API_KEY}
xpack.management.elasticsearch.hosts: ["${ES_ENDPOINT}"]
6 changes: 3 additions & 3 deletions ci/serverless/cpm_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ export PIPELINE_NAME='gen_es'

# update pipeline and check response code
index_pipeline() {
RESP_CODE=$(curl -s -w "%{http_code}" -X PUT -H "Authorization: ApiKey $LS_ROLE_API_KEY_ENCODED" "$ES_ENDPOINT/_logstash/pipeline/$1" -H 'Content-Type: application/json' -d "$2")
RESP_CODE=$(curl -s -w "%{http_code}" -X PUT -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/_logstash/pipeline/$1" -H 'Content-Type: application/json' -d "$2")
if [[ $RESP_CODE -ge '400' ]]; then
echo "failed to update pipeline for Central Pipeline Management. Got $RESP_CODE from Elasticsearch"
exit 1
Expand All @@ -17,7 +17,7 @@ index_pipeline() {
# index pipeline to serverless ES
index_cpm_pipelines() {
index_pipeline "$PIPELINE_NAME" '{
"pipeline": "input { generator { count => 100 } } output { elasticsearch { hosts => \"${ES_ENDPOINT}\" api_key => \"${LS_PLUGIN_API_KEY}\" index=> \"${INDEX_NAME}\" } }",
"pipeline": "input { generator { count => 100 } } output { elasticsearch { hosts => \"${ES_ENDPOINT}\" api_key => \"${PLUGIN_API_KEY}\" index=> \"${INDEX_NAME}\" } }",
"last_modified": "2023-07-04T22:22:22.222Z",
"pipeline_metadata": { "version": "1"},
"username": "log.stash",
Expand All @@ -34,7 +34,7 @@ check_plugin() {
}

delete_pipeline() {
curl -H "Authorization: ApiKey $LS_ROLE_API_KEY_ENCODED" -X DELETE "$ES_ENDPOINT/_logstash/pipeline/$PIPELINE_NAME" -H 'Content-Type: application/json';
curl -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" -X DELETE "$ES_ENDPOINT/_logstash/pipeline/$PIPELINE_NAME" -H 'Content-Type: application/json';
}

cpm_clean_up_and_get_result() {
Expand Down
6 changes: 3 additions & 3 deletions ci/serverless/elastic_integration_filter_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@ set -ex
source ./$(dirname "$0")/common.sh

deploy_ingest_pipeline() {
PIPELINE_RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X PUT -H "Authorization: ApiKey $LS_ROLE_API_KEY_ENCODED" "$ES_ENDPOINT/_ingest/pipeline/integration-logstash_test.events-default" \
PIPELINE_RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X PUT -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/_ingest/pipeline/integration-logstash_test.events-default" \
-H 'Content-Type: application/json' \
--data-binary @"$CURRENT_DIR/test_data/ingest_pipeline.json")

TEMPLATE_RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X PUT -H "Authorization: ApiKey $LS_ROLE_API_KEY_ENCODED" "$ES_ENDPOINT/_index_template/logs-serverless-default-template" \
TEMPLATE_RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X PUT -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/_index_template/logs-serverless-default-template" \
-H 'Content-Type: application/json' \
--data-binary @"$CURRENT_DIR/test_data/index_template.json")

Expand All @@ -29,7 +29,7 @@ check_integration_filter() {
}

get_doc_msg_length() {
curl -s -H "Authorization: ApiKey $LS_ROLE_API_KEY_ENCODED" "$ES_ENDPOINT/logs-$INDEX_NAME.004-default/_search?size=1" | jq '.hits.hits[0]._source.message | length'
curl -s -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/logs-$INDEX_NAME.004-default/_search?size=1" | jq '.hits.hits[0]._source.message | length'
}

# ensure no double run of ingest pipeline
Expand Down
4 changes: 2 additions & 2 deletions ci/serverless/es_output_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ check_named_index() {
}

get_data_stream_count() {
curl -s -H "Authorization: ApiKey $LS_ROLE_API_KEY_ENCODED" "$ES_ENDPOINT/logs-$INDEX_NAME.001-default/_count" | jq '.count'
curl -s -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/logs-$INDEX_NAME.001-default/_count" | jq '.count // 0'
}

compare_data_stream_count() {
[[ $(get_data_stream_count) -ge "$INITIAL_DATA_STREAM_CNT" ]] && echo "0"
[[ $(get_data_stream_count) -gt "$INITIAL_DATA_STREAM_CNT" ]] && echo "0"
}

check_data_stream_output() {
Expand Down
10 changes: 6 additions & 4 deletions ci/serverless/kibana_api_tests.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
#!/usr/bin/env bash

# This test is always fail because the APIs are not ready and return "method [...] exists but is not available with the current configuration"
set -ex

source ./$(dirname "$0")/common.sh
Expand All @@ -7,7 +9,7 @@ export PIPELINE_NAME="stdin_stdout"
export EXIT_CODE="0"

create_pipeline() {
RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X PUT -H "Authorization: ApiKey $LS_ROLE_API_KEY_ENCODED" "$KB_ENDPOINT/api/logstash/pipeline/$PIPELINE_NAME" \
RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X PUT -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$KB_ENDPOINT/api/logstash/pipeline/$PIPELINE_NAME" \
-H 'Content-Type: application/json' -H 'kbn-xsrf: logstash' \
--data-binary @"$CURRENT_DIR/test_data/$PIPELINE_NAME.json")

Expand All @@ -18,7 +20,7 @@ create_pipeline() {
}

get_pipeline() {
RESP_BODY=$(curl -s -X GET -H "Authorization: ApiKey $LS_ROLE_API_KEY_ENCODED" "$KB_ENDPOINT/api/logstash/pipeline/$PIPELINE_NAME")
RESP_BODY=$(curl -s -X GET -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$KB_ENDPOINT/api/logstash/pipeline/$PIPELINE_NAME")
SOURCE_BODY=$(cat "$CURRENT_DIR/test_data/$PIPELINE_NAME.json")

RESP_PIPELINE_NAME=$(echo "$RESP_BODY" | jq -r '.id')
Expand All @@ -39,15 +41,15 @@ get_pipeline() {
}

list_pipeline() {
RESP_BODY=$(curl -s -X GET -H "Authorization: ApiKey $LS_ROLE_API_KEY_ENCODED" "$KB_ENDPOINT/api/logstash/pipelines" | jq --arg name "$PIPELINE_NAME" '.pipelines[] | select(.id==$name)' )
RESP_BODY=$(curl -s -X GET -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$KB_ENDPOINT/api/logstash/pipelines" | jq --arg name "$PIPELINE_NAME" '.pipelines[] | select(.id==$name)' )
if [[ -z "$RESP_BODY" ]]; then
EXIT_CODE=$(( EXIT_CODE + 1 ))
echo "Fail to list pipeline."
fi
}

delete_pipeline() {
RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X DELETE -H "Authorization: ApiKey $LS_ROLE_API_KEY_ENCODED" "$KB_ENDPOINT/api/logstash/pipeline/$PIPELINE_NAME" \
RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X DELETE -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$KB_ENDPOINT/api/logstash/pipeline/$PIPELINE_NAME" \
-H 'Content-Type: application/json' -H 'kbn-xsrf: logstash' \
--data-binary @"$CURRENT_DIR/test_data/$PIPELINE_NAME.json")

Expand Down
2 changes: 1 addition & 1 deletion ci/serverless/metricbeat_monitoring_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ stop_metricbeat() {
}

get_monitor_count() {
curl -s -H "Authorization: ApiKey $LS_ROLE_API_KEY_ENCODED" "$ES_ENDPOINT/$INDEX_NAME/_count" | jq '.count'
curl -s -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/$INDEX_NAME/_count" | jq '.count // 0'
}

compare_monitor_count() {
Expand Down
4 changes: 2 additions & 2 deletions ci/serverless/pipeline/001_es-output.conf
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ output {
elasticsearch {
id => "named_index"
hosts => ["${ES_ENDPOINT}"]
api_key => "${LS_PLUGIN_API_KEY}"
api_key => "${PLUGIN_API_KEY}"
index => "${INDEX_NAME}"
}

elasticsearch {
id => "data_stream"
hosts => ["${ES_ENDPOINT}"]
api_key => "${LS_PLUGIN_API_KEY}"
api_key => "${PLUGIN_API_KEY}"
}
}
2 changes: 1 addition & 1 deletion ci/serverless/pipeline/002_es-filter.conf
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ input {
filter {
elasticsearch {
hosts => ["${ES_ENDPOINT}"]
api_key => "${LS_PLUGIN_API_KEY}"
api_key => "${PLUGIN_API_KEY}"
index => "${INDEX_NAME}"
query => "*"
add_field => {"check" => "good"}
Expand Down
2 changes: 1 addition & 1 deletion ci/serverless/pipeline/003_es-input.conf
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
input {
elasticsearch {
hosts => ["${ES_ENDPOINT}"]
api_key => "${LS_PLUGIN_API_KEY}"
api_key => "${PLUGIN_API_KEY}"
index => "${INDEX_NAME}"
size => 100
schedule => "*/10 * * * * *"
Expand Down
4 changes: 2 additions & 2 deletions ci/serverless/pipeline/004_integration-filter.conf
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ input {
filter {
elastic_integration {
hosts => "${ES_ENDPOINT}"
api_key => "${LS_ROLE_API_KEY_ENCODED}"
api_key => "${INTEGRATION_API_KEY_ENCODED}"
remove_field => ["_version"]
add_field => {"ingested" => "ok"}
}
Expand All @@ -28,6 +28,6 @@ output {
elasticsearch {
id => "data_stream"
hosts => ["${ES_ENDPOINT}"]
api_key => "${LS_PLUGIN_API_KEY}"
api_key => "${PLUGIN_API_KEY}"
}
}
3 changes: 1 addition & 2 deletions ci/serverless/test_data/index_template.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
"priority": 500,
"template": {
"settings": {
"index.default_pipeline": "integration-logstash_test.events-default",
"index.lifecycle.name": "logs"
"index.default_pipeline": "integration-logstash_test.events-default"
}
}
}

0 comments on commit 0286f67

Please sign in to comment.