added lower case rb and updated Lieberman #1639
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This workflow will install Python dependencies, run lint with a single version of Python | |
name: OpenSIEM | |
on: | |
push: | |
paths: | |
- 'config/**' | |
- 'build_scripts/**' | |
pull_request: | |
branches: [ master, develop, 1.0 ] | |
jobs: | |
syntax-check: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Set up Python 3.9 | |
uses: actions/setup-python@v2 | |
with: | |
python-version: 3.9 | |
- name: Install dependencies | |
run: | | |
python -m pip install --upgrade pip | |
pip install flake8 pytest | |
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi | |
- name: Lint with flake8 | |
run: | | |
# stop the build if there are Python syntax errors or undefined names | |
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics | |
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide | |
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics | |
- name: Set logstash variables to be used accross steps | |
run: | | |
export LOGSTASH_VERSION=7.16.2 | |
export LOGSTASH_HOME="/usr/share/logstash" | |
echo "LOGSTASH_VERSION=$LOGSTASH_VERSION" >> $GITHUB_ENV | |
echo "LOGSTASH_HOME=$LOGSTASH_HOME" >> $GITHUB_ENV | |
# - name: Cache logstash directory | |
# id: cache-logstash | |
# uses: actions/[email protected] | |
# with: | |
# path: ${LOGSTASH_HOME} | |
# key: ${LOGSTASH_VERSION} | |
- name: Download logstash if not cached already | |
# Unable to use cached directory. Setting up every time for now. Shall revisit later. | |
# if: steps.cache-logstash.outputs.cache-hit != 'true' | |
run: | | |
sudo apt-get install -qqy --no-install-recommends ca-certificates curl | |
set -x\ | |
&& mkdir ${LOGSTASH_HOME} \ | |
&& export DOWNLOAD_URL=https://artifacts.elastic.co/downloads/logstash \ | |
&& export LOGSTASH_BINARY=logstash-oss-${LOGSTASH_VERSION}-linux-x86_64.tar.gz \ | |
&& cd /tmp \ | |
&& curl -L -O "${DOWNLOAD_URL}/${LOGSTASH_BINARY}" \ | |
&& curl -L -O "${DOWNLOAD_URL}/${LOGSTASH_BINARY}.sha512" \ | |
&& sha512sum --check "${LOGSTASH_BINARY}.sha512"; \ | |
curl -L -O "${DOWNLOAD_URL}/${LOGSTASH_BINARY}.asc" \ | |
&& export GNUPGHOME="$(mktemp -d)"; \ | |
curl -L -O https://artifacts.elastic.co/GPG-KEY-elasticsearch \ | |
&& gpg -q --batch --import /tmp/GPG-KEY-elasticsearch \ | |
&& gpg --batch --no-auto-key-retrieve --verify "${LOGSTASH_BINARY}.asc" "${LOGSTASH_BINARY}"; \ | |
\ | |
tar xzf ${LOGSTASH_BINARY} -C ${LOGSTASH_HOME} --strip-components=1; \ | |
\ | |
rm -rf "$GNUPGHOME" "${LOGSTASH_BINARY}" "${LOGSTASH_BINARY}.sha512" "${LOGSTASH_BINARY}.asc" || true; | |
${LOGSTASH_HOME}/bin/logstash-plugin install --no-verify \ | |
&& ${LOGSTASH_HOME}/bin/logstash-plugin install \ | |
logstash-input-okta_system_log \ | |
logstash-filter-json_encode \ | |
logstash-filter-tld \ | |
logstash-output-opensearch | |
- name: Generate pipelines | |
env: | |
# See README.md for explaination | |
DEPLOY_ENV: test | |
MY_INDEX: '1' | |
# INSTANCE_COUNT=1 as we want to include all configs in one pipeline so syntax check can be performed | |
INSTANCE_COUNT: '1' | |
SUB_MY_IP: abc | |
ELASTIC_USER: elastic_user | |
ELASTIC_PASSWORD: elastic_pass | |
ELASTIC_CONNECTION_STRING: '"127.0.0.1:9200", "127.0.0.2:9200"' | |
KAFKA_CONNECTION_STRING: kafkahost:9000 | |
KAFKA_USER: kafka_uname | |
KAFKA_PASSWORD: kafka_pwd | |
RACK_ID: some_id | |
S3_BUCKET_NAME: some_name | |
LOGSTASH_API_SECRET: '{"azure_audit_conn" : "Endpoint=sb://dummy.com/;SharedAccessKeyName=dum;SharedAccessKey=key=;EntityPath=path", "azure_operational_conn" : "Endpoint=sb://dummy.com/;SharedAccessKeyName=dum;SharedAccessKey=key=;EntityPath=path", "azure_signin_conn" : "Endpoint=sb://dummy.com/;SharedAccessKeyName=dum;SharedAccessKey=key=;EntityPath=path", "azure_o365_conn" : "Endpoint=sb://dummy.com/;SharedAccessKeyName=dum;SharedAccessKey=key=;EntityPath=path", "azure_tcs_security_conn" : "Endpoint=sb://dummy.com/;SharedAccessKeyName=dum;SharedAccessKey=key=;EntityPath=path", "azure_o365_dlp_conn" : "Endpoint=sb://dummy.com/;SharedAccessKeyName=dum;SharedAccessKey=key=;EntityPath=path", "azure_audit_consumer" : "azure_audit_consumer", "azure_operational_consumer" : "azure_operational_consumer", "azure_signin_consumer" : "azure_signin_consumer", "azure_o365_consumer" : "azure_o365_consumer", "azure_tcs_security_consumer" : "azure_o365_consumer", "azure_o365_dlp_consumer" : "cg-production-operation", "azure_storage_conn" : "DefaultEndpointsProtocol=https;AccountName=dummyname;AccountKey=key;EndpointSuffix=core.windows.net", "azure_atp_consumer" : "azure_atp_consumer", "azure_atp_conn" : "Endpoint=sb://dummy.com/;SharedAccessKeyName=dum;SharedAccessKey=key=;EntityPath=path", "memcached_address" : "\"127.0.0.1\",\"127.0.0.2\"", "dns_server" : "\"127.0.0.1\",\"127.0.0.2\""}' | |
run: | | |
sudo mkdir -p /data | |
sudo chmod -R 777 /data | |
python build_scripts/generate_settings.py | |
python build_scripts/generate_pipeline.py | |
- name: Prepare for syntax check | |
run: | | |
cp -r config/* /usr/share/logstash/config/ | |
sudo mkdir -p /mnt/s3fs_geoip | |
sudo chmod -R 777 /mnt/s3fs_geoip | |
touch /mnt/s3fs_geoip/GeoLite2-City.mmdb | |
touch /mnt/s3fs_geoip/GeoLitePrivate2-City.mmdb | |
touch /usr/share/logstash/config/kafka_client_truststore.jks | |
- name: Test configs for syntax errors | |
run: | | |
${LOGSTASH_HOME}/bin/logstash --config.test_and_exit |